<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head><meta http-equiv="content-type" content="text/html; charset=utf-8" />
<title>[210621] trunk/Source</title>
</head>
<body>

<style type="text/css"><!--
#msg dl.meta { border: 1px #006 solid; background: #369; padding: 6px; color: #fff; }
#msg dl.meta dt { float: left; width: 6em; font-weight: bold; }
#msg dt:after { content:':';}
#msg dl, #msg dt, #msg ul, #msg li, #header, #footer, #logmsg { font-family: verdana,arial,helvetica,sans-serif; font-size: 10pt;  }
#msg dl a { font-weight: bold}
#msg dl a:link    { color:#fc3; }
#msg dl a:active  { color:#ff0; }
#msg dl a:visited { color:#cc6; }
h3 { font-family: verdana,arial,helvetica,sans-serif; font-size: 10pt; font-weight: bold; }
#msg pre { overflow: auto; background: #ffc; border: 1px #fa0 solid; padding: 6px; }
#logmsg { background: #ffc; border: 1px #fa0 solid; padding: 1em 1em 0 1em; }
#logmsg p, #logmsg pre, #logmsg blockquote { margin: 0 0 1em 0; }
#logmsg p, #logmsg li, #logmsg dt, #logmsg dd { line-height: 14pt; }
#logmsg h1, #logmsg h2, #logmsg h3, #logmsg h4, #logmsg h5, #logmsg h6 { margin: .5em 0; }
#logmsg h1:first-child, #logmsg h2:first-child, #logmsg h3:first-child, #logmsg h4:first-child, #logmsg h5:first-child, #logmsg h6:first-child { margin-top: 0; }
#logmsg ul, #logmsg ol { padding: 0; list-style-position: inside; margin: 0 0 0 1em; }
#logmsg ul { text-indent: -1em; padding-left: 1em; }#logmsg ol { text-indent: -1.5em; padding-left: 1.5em; }
#logmsg > ul, #logmsg > ol { margin: 0 0 1em 0; }
#logmsg pre { background: #eee; padding: 1em; }
#logmsg blockquote { border: 1px solid #fa0; border-left-width: 10px; padding: 1em 1em 0 1em; background: white;}
#logmsg dl { margin: 0; }
#logmsg dt { font-weight: bold; }
#logmsg dd { margin: 0; padding: 0 0 0.5em 0; }
#logmsg dd:before { content:'\00bb';}
#logmsg table { border-spacing: 0px; border-collapse: collapse; border-top: 4px solid #fa0; border-bottom: 1px solid #fa0; background: #fff; }
#logmsg table th { text-align: left; font-weight: normal; padding: 0.2em 0.5em; border-top: 1px dotted #fa0; }
#logmsg table td { text-align: right; border-top: 1px dotted #fa0; padding: 0.2em 0.5em; }
#logmsg table thead th { text-align: center; border-bottom: 1px solid #fa0; }
#logmsg table th.Corner { text-align: left; }
#logmsg hr { border: none 0; border-top: 2px dashed #fa0; height: 1px; }
#header, #footer { color: #fff; background: #636; border: 1px #300 solid; padding: 6px; }
#patch { width: 100%; }
#patch h4 {font-family: verdana,arial,helvetica,sans-serif;font-size:10pt;padding:8px;background:#369;color:#fff;margin:0;}
#patch .propset h4, #patch .binary h4 {margin:0;}
#patch pre {padding:0;line-height:1.2em;margin:0;}
#patch .diff {width:100%;background:#eee;padding: 0 0 10px 0;overflow:auto;}
#patch .propset .diff, #patch .binary .diff  {padding:10px 0;}
#patch span {display:block;padding:0 10px;}
#patch .modfile, #patch .addfile, #patch .delfile, #patch .propset, #patch .binary, #patch .copfile {border:1px solid #ccc;margin:10px 0;}
#patch ins {background:#dfd;text-decoration:none;display:block;padding:0 10px;}
#patch del {background:#fdd;text-decoration:none;display:block;padding:0 10px;}
#patch .lines, .info {color:#888;background:#fff;}
--></style>
<div id="msg">
<dl class="meta">
<dt>Revision</dt> <dd><a href="http://trac.webkit.org/projects/webkit/changeset/210621">210621</a></dd>
<dt>Author</dt> <dd>eric.carlson@apple.com</dd>
<dt>Date</dt> <dd>2017-01-11 21:22:32 -0800 (Wed, 11 Jan 2017)</dd>
</dl>

<h3>Log Message</h3>
<pre>[MediaStream, Mac] Render media stream audio buffers
https://bugs.webkit.org/show_bug.cgi?id=159836
&lt;rdar://problem/27380390&gt;

Reviewed by Jer Noble.

No new tests, it isn't possible to test audio rendering directly. A follow-up patch will
add a mock audio source that will enable audio testing.

* platform/cf/CoreMediaSoftLink.cpp: Include new functions used.
* platform/cf/CoreMediaSoftLink.h:

* WebCore.xcodeproj/project.pbxproj: Remove references to the deleted previews.

* platform/Logging.h: Add MediaCaptureSamples.

* platform/MediaSample.h: Add outputPresentationTime and outputDuration.

* platform/cf/CoreMediaSoftLink.cpp: Add CMSampleBufferGetOutputDuration, CMSampleBufferGetOutputPresentationTimeStamp,
CMTimeConvertScale, CMTimebaseGetEffectiveRate, CMAudioSampleBufferCreateWithPacketDescriptions,
CMSampleBufferSetDataBufferFromAudioBufferList, CMSampleBufferSetDataReady,
CMAudioFormatDescriptionCreate, CMClockGetHostTimeClock, and CMClockGetTime.
* platform/cf/CoreMediaSoftLink.h:

Create and use an AVSampleBufferAudioRenderer each audio stream track, when it is available,
to render for audio samples. Store the offset between the first sample received from a track's
output presentation and the synchronizer time so we can adjust sample timestamps to be
relative to the synchronizer's timeline regardless of their source. Remove the use of source
previews because not all sources will have them.

* platform/graphics/avfoundation/MediaSampleAVFObjC.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm:

Add an ObjC helper to catch renderer status changes.
(-[WebAVSampleBufferStatusChangeListener initWithParent:]):
(-[WebAVSampleBufferStatusChangeListener dealloc]):
(-[WebAVSampleBufferStatusChangeListener invalidate]):
(-[WebAVSampleBufferStatusChangeListener beginObservingLayer:]):
(-[WebAVSampleBufferStatusChangeListener stopObservingLayer:]):
(-[WebAVSampleBufferStatusChangeListener beginObservingRenderer:]):
(-[WebAVSampleBufferStatusChangeListener stopObservingRenderer:]):
(-[WebAVSampleBufferStatusChangeListener observeValueForKeyPath:ofObject:change:context:]):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::MediaPlayerPrivateMediaStreamAVFObjC):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::removeOldSamplesFromPendingQueue):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::addSampleToPendingQueue):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::updateSampleTimes):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSample):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForVideoData):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForAudioData):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::createAudioRenderer):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderer):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderers):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::audioSourceProvider):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::rendererStatusDidChange):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::layerStatusDidChange):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::flushRenderers):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::ensureLayer):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyLayer):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::platformLayer):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::currentDisplayMode):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::play):
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSampleBufferFromTrack): Deleted.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForMediaData): Deleted.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSampleBuffer): Deleted.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::prepareVideoSampleBufferFromTrack): Deleted.
(WebCore::MediaPlayerPrivateMediaStreamAVFObjC::internalSetVolume): Deleted.

* platform/graphics/avfoundation/objc/MediaSampleAVFObjC.mm:
(WebCore::MediaSampleAVFObjC::outputPresentationTime): New.
(WebCore::MediaSampleAVFObjC::outputDuration): New.
(WebCore::MediaSampleAVFObjC::dump): Log outputPresentationTime.

* platform/mediastream/AudioTrackPrivateMediaStream.h: Add timelineOffset.

* platform/mediastream/MediaStreamTrackPrivate.cpp:
(WebCore::MediaStreamTrackPrivate::setEnabled): No more m_preview.
(WebCore::MediaStreamTrackPrivate::endTrack): Ditto.
(WebCore::MediaStreamTrackPrivate::preview): Deleted.
* platform/mediastream/MediaStreamTrackPrivate.h:

* platform/mediastream/RealtimeMediaSource.h:
(WebCore::RealtimeMediaSource::preview): Deleted.

* platform/mediastream/RealtimeMediaSourcePreview.h: Removed.

* platform/mediastream/VideoTrackPrivateMediaStream.h: Add timelineOffset.

* platform/mediastream/mac/AVAudioCaptureSource.h:
* platform/mediastream/mac/AVAudioCaptureSource.mm:
(WebCore::AVAudioCaptureSource::updateSettings):
(WebCore::AVAudioCaptureSource::captureOutputDidOutputSampleBufferFromConnection): Pass the
sample buffer up the chain.
(WebCore::AVAudioSourcePreview::create): Deleted.
(WebCore::AVAudioSourcePreview::AVAudioSourcePreview): Deleted.
(WebCore::AVAudioSourcePreview::invalidate): Deleted.
(WebCore::AVAudioSourcePreview::play): Deleted.
(WebCore::AVAudioSourcePreview::pause): Deleted.
(WebCore::AVAudioSourcePreview::setEnabled): Deleted.
(WebCore::AVAudioSourcePreview::setVolume): Deleted.
(WebCore::AVAudioSourcePreview::updateState): Deleted.
(WebCore::AVAudioCaptureSource::createPreview): Deleted.

* platform/mediastream/mac/AVMediaCaptureSource.h:
(WebCore::AVMediaSourcePreview): Deleted.
(WebCore::AVMediaCaptureSource::createWeakPtr): Deleted.

* platform/mediastream/mac/AVMediaCaptureSource.mm:
(WebCore::AVMediaCaptureSource::AVMediaCaptureSource): No more preview.
(WebCore::AVMediaCaptureSource::reset):
(WebCore::AVMediaCaptureSource::preview): Deleted.
(WebCore::AVMediaCaptureSource::removePreview): Deleted.
(WebCore::AVMediaSourcePreview::AVMediaSourcePreview): Deleted.
(WebCore::AVMediaSourcePreview::~AVMediaSourcePreview): Deleted.
(WebCore::AVMediaSourcePreview::invalidate): Deleted.

* platform/mediastream/mac/AVVideoCaptureSource.h:
* platform/mediastream/mac/AVVideoCaptureSource.mm:
(WebCore::AVVideoCaptureSource::processNewFrame): Don't set the &quot;display immediately&quot; attachment.
(WebCore::AVVideoSourcePreview::create): Deleted.
(WebCore::AVVideoSourcePreview::AVVideoSourcePreview): Deleted.
(WebCore::AVVideoSourcePreview::backgroundLayerBoundsChanged): Deleted.
(WebCore::AVVideoSourcePreview::invalidate): Deleted.
(WebCore::AVVideoSourcePreview::play): Deleted.
(WebCore::AVVideoSourcePreview::pause): Deleted.
(WebCore::AVVideoSourcePreview::setPaused): Deleted.
(WebCore::AVVideoSourcePreview::setEnabled): Deleted.
(WebCore::AVVideoCaptureSource::createPreview): Deleted.
(-[WebCoreAVVideoCaptureSourceObserver setParent:]): Deleted.
(-[WebCoreAVVideoCaptureSourceObserver observeValueForKeyPath:ofObject:change:context:]): Deleted.

* platform/mediastream/mac/MockRealtimeVideoSourceMac.mm:
(WebCore::MockRealtimeVideoSourceMac::CMSampleBufferFromPixelBuffer): Use a more typical video
time scale. Set the sample decode time.
(WebCore::MockRealtimeVideoSourceMac::pixelBufferFromCGImage): Use a static for colorspace
instead of fetching it for every frame.

* platform/mock/mediasource/MockSourceBufferPrivate.cpp: Add outputPresentationTime and outputDuration.</pre>

<h3>Modified Paths</h3>
<ul>
<li><a href="#trunkSourceWebCoreChangeLog">trunk/Source/WebCore/ChangeLog</a></li>
<li><a href="#trunkSourceWebCoreModuleswebaudioScriptProcessorNodecpp">trunk/Source/WebCore/Modules/webaudio/ScriptProcessorNode.cpp</a></li>
<li><a href="#trunkSourceWebCoreWebCorexcodeprojprojectpbxproj">trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj</a></li>
<li><a href="#trunkSourceWebCoreplatformLoggingh">trunk/Source/WebCore/platform/Logging.h</a></li>
<li><a href="#trunkSourceWebCoreplatformMediaSampleh">trunk/Source/WebCore/platform/MediaSample.h</a></li>
<li><a href="#trunkSourceWebCoreplatformcfCoreMediaSoftLinkcpp">trunk/Source/WebCore/platform/cf/CoreMediaSoftLink.cpp</a></li>
<li><a href="#trunkSourceWebCoreplatformcfCoreMediaSoftLinkh">trunk/Source/WebCore/platform/cf/CoreMediaSoftLink.h</a></li>
<li><a href="#trunkSourceWebCoreplatformgraphicsavfoundationMediaSampleAVFObjCh">trunk/Source/WebCore/platform/graphics/avfoundation/MediaSampleAVFObjC.h</a></li>
<li><a href="#trunkSourceWebCoreplatformgraphicsavfoundationobjcMediaPlayerPrivateMediaStreamAVFObjCh">trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h</a></li>
<li><a href="#trunkSourceWebCoreplatformgraphicsavfoundationobjcMediaPlayerPrivateMediaStreamAVFObjCmm">trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm</a></li>
<li><a href="#trunkSourceWebCoreplatformgraphicsavfoundationobjcMediaSampleAVFObjCmm">trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaSampleAVFObjC.mm</a></li>
<li><a href="#trunkSourceWebCoreplatformmediastreamAudioTrackPrivateMediaStreamh">trunk/Source/WebCore/platform/mediastream/AudioTrackPrivateMediaStream.h</a></li>
<li><a href="#trunkSourceWebCoreplatformmediastreamMediaStreamTrackPrivatecpp">trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp</a></li>
<li><a href="#trunkSourceWebCoreplatformmediastreamMediaStreamTrackPrivateh">trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h</a></li>
<li><a href="#trunkSourceWebCoreplatformmediastreamRealtimeMediaSourceh">trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h</a></li>
<li><a href="#trunkSourceWebCoreplatformmediastreamVideoTrackPrivateMediaStreamh">trunk/Source/WebCore/platform/mediastream/VideoTrackPrivateMediaStream.h</a></li>
<li><a href="#trunkSourceWebCoreplatformmediastreammacAVAudioCaptureSourceh">trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.h</a></li>
<li><a href="#trunkSourceWebCoreplatformmediastreammacAVAudioCaptureSourcemm">trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.mm</a></li>
<li><a href="#trunkSourceWebCoreplatformmediastreammacAVMediaCaptureSourceh">trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.h</a></li>
<li><a href="#trunkSourceWebCoreplatformmediastreammacAVMediaCaptureSourcemm">trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.mm</a></li>
<li><a href="#trunkSourceWebCoreplatformmediastreammacAVVideoCaptureSourceh">trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.h</a></li>
<li><a href="#trunkSourceWebCoreplatformmediastreammacAVVideoCaptureSourcemm">trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm</a></li>
<li><a href="#trunkSourceWebCoreplatformmediastreammacMockRealtimeVideoSourceMacmm">trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm</a></li>
<li><a href="#trunkSourceWebKit2WebProcesscomappleWebProcesssbin">trunk/Source/WebKit2/WebProcess/com.apple.WebProcess.sb.in</a></li>
</ul>

<h3>Removed Paths</h3>
<ul>
<li><a href="#trunkSourceWebCoreplatformmediastreamRealtimeMediaSourcePreviewh">trunk/Source/WebCore/platform/mediastream/RealtimeMediaSourcePreview.h</a></li>
</ul>

</div>
<div id="patch">
<h3>Diff</h3>
<a id="trunkSourceWebCoreChangeLog"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/ChangeLog (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/ChangeLog        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/ChangeLog        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -1,3 +1,146 @@
</span><ins>+2017-01-11  Eric Carlson  &lt;eric.carlson@apple.com&gt;
+
+        [MediaStream, Mac] Render media stream audio buffers
+        https://bugs.webkit.org/show_bug.cgi?id=159836
+        &lt;rdar://problem/27380390&gt;
+
+        Reviewed by Jer Noble.
+
+        No new tests, it isn't possible to test audio rendering directly. A follow-up patch will
+        add a mock audio source that will enable audio testing.
+
+        * platform/cf/CoreMediaSoftLink.cpp: Include new functions used.
+        * platform/cf/CoreMediaSoftLink.h:
+
+        * WebCore.xcodeproj/project.pbxproj: Remove references to the deleted previews.
+
+        * platform/Logging.h: Add MediaCaptureSamples.
+
+        * platform/MediaSample.h: Add outputPresentationTime and outputDuration.
+
+        * platform/cf/CoreMediaSoftLink.cpp: Add CMSampleBufferGetOutputDuration, CMSampleBufferGetOutputPresentationTimeStamp,
+        CMTimeConvertScale, CMTimebaseGetEffectiveRate, CMAudioSampleBufferCreateWithPacketDescriptions, 
+        CMSampleBufferSetDataBufferFromAudioBufferList, CMSampleBufferSetDataReady, 
+        CMAudioFormatDescriptionCreate, CMClockGetHostTimeClock, and CMClockGetTime.
+        * platform/cf/CoreMediaSoftLink.h:
+
+        Create and use an AVSampleBufferAudioRenderer each audio stream track, when it is available,
+        to render for audio samples. Store the offset between the first sample received from a track's
+        output presentation and the synchronizer time so we can adjust sample timestamps to be 
+        relative to the synchronizer's timeline regardless of their source. Remove the use of source
+        previews because not all sources will have them.
+
+        * platform/graphics/avfoundation/MediaSampleAVFObjC.h:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h:
+        * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm:
+        
+        Add an ObjC helper to catch renderer status changes.
+        (-[WebAVSampleBufferStatusChangeListener initWithParent:]): 
+        (-[WebAVSampleBufferStatusChangeListener dealloc]):
+        (-[WebAVSampleBufferStatusChangeListener invalidate]):
+        (-[WebAVSampleBufferStatusChangeListener beginObservingLayer:]):
+        (-[WebAVSampleBufferStatusChangeListener stopObservingLayer:]):
+        (-[WebAVSampleBufferStatusChangeListener beginObservingRenderer:]):
+        (-[WebAVSampleBufferStatusChangeListener stopObservingRenderer:]):
+        (-[WebAVSampleBufferStatusChangeListener observeValueForKeyPath:ofObject:change:context:]):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::MediaPlayerPrivateMediaStreamAVFObjC):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::~MediaPlayerPrivateMediaStreamAVFObjC):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::removeOldSamplesFromPendingQueue):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::addSampleToPendingQueue):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::updateSampleTimes):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSample):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForVideoData):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForAudioData):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::createAudioRenderer):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderer):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderers):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::audioSourceProvider):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::rendererStatusDidChange):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::layerStatusDidChange):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::flushRenderers):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::ensureLayer):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::destroyLayer):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::platformLayer):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::currentDisplayMode):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::play):
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSampleBufferFromTrack): Deleted.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForMediaData): Deleted.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSampleBuffer): Deleted.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::prepareVideoSampleBufferFromTrack): Deleted.
+        (WebCore::MediaPlayerPrivateMediaStreamAVFObjC::internalSetVolume): Deleted.
+
+        * platform/graphics/avfoundation/objc/MediaSampleAVFObjC.mm:
+        (WebCore::MediaSampleAVFObjC::outputPresentationTime): New.
+        (WebCore::MediaSampleAVFObjC::outputDuration): New.
+        (WebCore::MediaSampleAVFObjC::dump): Log outputPresentationTime.
+
+        * platform/mediastream/AudioTrackPrivateMediaStream.h: Add timelineOffset.
+
+        * platform/mediastream/MediaStreamTrackPrivate.cpp:
+        (WebCore::MediaStreamTrackPrivate::setEnabled): No more m_preview.
+        (WebCore::MediaStreamTrackPrivate::endTrack): Ditto.
+        (WebCore::MediaStreamTrackPrivate::preview): Deleted.
+        * platform/mediastream/MediaStreamTrackPrivate.h:
+
+        * platform/mediastream/RealtimeMediaSource.h:
+        (WebCore::RealtimeMediaSource::preview): Deleted.
+
+        * platform/mediastream/RealtimeMediaSourcePreview.h: Removed.
+
+        * platform/mediastream/VideoTrackPrivateMediaStream.h: Add timelineOffset.
+
+        * platform/mediastream/mac/AVAudioCaptureSource.h:
+        * platform/mediastream/mac/AVAudioCaptureSource.mm:
+        (WebCore::AVAudioCaptureSource::updateSettings):
+        (WebCore::AVAudioCaptureSource::captureOutputDidOutputSampleBufferFromConnection): Pass the
+        sample buffer up the chain.
+        (WebCore::AVAudioSourcePreview::create): Deleted.
+        (WebCore::AVAudioSourcePreview::AVAudioSourcePreview): Deleted.
+        (WebCore::AVAudioSourcePreview::invalidate): Deleted.
+        (WebCore::AVAudioSourcePreview::play): Deleted.
+        (WebCore::AVAudioSourcePreview::pause): Deleted.
+        (WebCore::AVAudioSourcePreview::setEnabled): Deleted.
+        (WebCore::AVAudioSourcePreview::setVolume): Deleted.
+        (WebCore::AVAudioSourcePreview::updateState): Deleted.
+        (WebCore::AVAudioCaptureSource::createPreview): Deleted.
+
+        * platform/mediastream/mac/AVMediaCaptureSource.h:
+        (WebCore::AVMediaSourcePreview): Deleted.
+        (WebCore::AVMediaCaptureSource::createWeakPtr): Deleted.
+
+        * platform/mediastream/mac/AVMediaCaptureSource.mm:
+        (WebCore::AVMediaCaptureSource::AVMediaCaptureSource): No more preview.
+        (WebCore::AVMediaCaptureSource::reset):
+        (WebCore::AVMediaCaptureSource::preview): Deleted.
+        (WebCore::AVMediaCaptureSource::removePreview): Deleted.
+        (WebCore::AVMediaSourcePreview::AVMediaSourcePreview): Deleted.
+        (WebCore::AVMediaSourcePreview::~AVMediaSourcePreview): Deleted.
+        (WebCore::AVMediaSourcePreview::invalidate): Deleted.
+
+        * platform/mediastream/mac/AVVideoCaptureSource.h:
+        * platform/mediastream/mac/AVVideoCaptureSource.mm:
+        (WebCore::AVVideoCaptureSource::processNewFrame): Don't set the &quot;display immediately&quot; attachment.
+        (WebCore::AVVideoSourcePreview::create): Deleted.
+        (WebCore::AVVideoSourcePreview::AVVideoSourcePreview): Deleted.
+        (WebCore::AVVideoSourcePreview::backgroundLayerBoundsChanged): Deleted.
+        (WebCore::AVVideoSourcePreview::invalidate): Deleted.
+        (WebCore::AVVideoSourcePreview::play): Deleted.
+        (WebCore::AVVideoSourcePreview::pause): Deleted.
+        (WebCore::AVVideoSourcePreview::setPaused): Deleted.
+        (WebCore::AVVideoSourcePreview::setEnabled): Deleted.
+        (WebCore::AVVideoCaptureSource::createPreview): Deleted.
+        (-[WebCoreAVVideoCaptureSourceObserver setParent:]): Deleted.
+        (-[WebCoreAVVideoCaptureSourceObserver observeValueForKeyPath:ofObject:change:context:]): Deleted.
+
+        * platform/mediastream/mac/MockRealtimeVideoSourceMac.mm:
+        (WebCore::MockRealtimeVideoSourceMac::CMSampleBufferFromPixelBuffer): Use a more typical video
+        time scale. Set the sample decode time.
+        (WebCore::MockRealtimeVideoSourceMac::pixelBufferFromCGImage): Use a static for colorspace
+        instead of fetching it for every frame.
+
+        * platform/mock/mediasource/MockSourceBufferPrivate.cpp: Add outputPresentationTime and outputDuration.
+
</ins><span class="cx"> 2017-01-11  Youenn Fablet  &lt;youenn@apple.com&gt;
</span><span class="cx"> 
</span><span class="cx">         Remove request.formData property until it gets implemented
</span></span></pre></div>
<a id="trunkSourceWebCoreModuleswebaudioScriptProcessorNodecpp"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/Modules/webaudio/ScriptProcessorNode.cpp (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/Modules/webaudio/ScriptProcessorNode.cpp        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/Modules/webaudio/ScriptProcessorNode.cpp        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -213,6 +213,9 @@
</span><span class="cx">             m_isRequestOutstanding = true;
</span><span class="cx"> 
</span><span class="cx">             callOnMainThread([this] {
</span><ins>+                if (!m_hasAudioProcessListener)
+                    return;
+
</ins><span class="cx">                 fireProcessEvent();
</span><span class="cx"> 
</span><span class="cx">                 // De-reference to match the ref() call in process().
</span></span></pre></div>
<a id="trunkSourceWebCoreWebCorexcodeprojprojectpbxproj"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -279,7 +279,6 @@
</span><span class="cx">                 07C1C0E21BFB600100BD2256 /* MediaTrackSupportedConstraints.h in Headers */ = {isa = PBXBuildFile; fileRef = 07C1C0E01BFB600100BD2256 /* MediaTrackSupportedConstraints.h */; };
</span><span class="cx">                 07C1C0E51BFB60ED00BD2256 /* RealtimeMediaSourceSupportedConstraints.h in Headers */ = {isa = PBXBuildFile; fileRef = 07C1C0E41BFB60ED00BD2256 /* RealtimeMediaSourceSupportedConstraints.h */; settings = {ATTRIBUTES = (Private, ); }; };
</span><span class="cx">                 07CE77D516712A6A00C55A47 /* InbandTextTrackPrivateClient.h in Headers */ = {isa = PBXBuildFile; fileRef = 07CE77D416712A6A00C55A47 /* InbandTextTrackPrivateClient.h */; settings = {ATTRIBUTES = (Private, ); }; };
</span><del>-                07D1503B1DDB6965008F7598 /* RealtimeMediaSourcePreview.h in Headers */ = {isa = PBXBuildFile; fileRef = 07D1503A1DDB6688008F7598 /* RealtimeMediaSourcePreview.h */; settings = {ATTRIBUTES = (Private, ); }; };
</del><span class="cx">                 07D637401BB0B11300256CE9 /* WebAudioSourceProviderAVFObjC.h in Headers */ = {isa = PBXBuildFile; fileRef = 07D6373E1BB0B11300256CE9 /* WebAudioSourceProviderAVFObjC.h */; };
</span><span class="cx">                 07D637411BB0B11300256CE9 /* WebAudioSourceProviderAVFObjC.mm in Sources */ = {isa = PBXBuildFile; fileRef = 07D6373F1BB0B11300256CE9 /* WebAudioSourceProviderAVFObjC.mm */; };
</span><span class="cx">                 07D6A4EF1BECF2D200174146 /* MockRealtimeMediaSource.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 07D6A4ED1BECF2D200174146 /* MockRealtimeMediaSource.cpp */; };
</span><span class="lines">@@ -7253,7 +7252,6 @@
</span><span class="cx">                 07C8AD111D073D630087C5CE /* AVFoundationMIMETypeCache.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = AVFoundationMIMETypeCache.mm; sourceTree = &quot;&lt;group&gt;&quot;; };
</span><span class="cx">                 07C8AD121D073D630087C5CE /* AVFoundationMIMETypeCache.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AVFoundationMIMETypeCache.h; sourceTree = &quot;&lt;group&gt;&quot;; };
</span><span class="cx">                 07CE77D416712A6A00C55A47 /* InbandTextTrackPrivateClient.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = InbandTextTrackPrivateClient.h; sourceTree = &quot;&lt;group&gt;&quot;; };
</span><del>-                07D1503A1DDB6688008F7598 /* RealtimeMediaSourcePreview.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RealtimeMediaSourcePreview.h; sourceTree = &quot;&lt;group&gt;&quot;; };
</del><span class="cx">                 07D6373E1BB0B11300256CE9 /* WebAudioSourceProviderAVFObjC.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WebAudioSourceProviderAVFObjC.h; sourceTree = &quot;&lt;group&gt;&quot;; };
</span><span class="cx">                 07D6373F1BB0B11300256CE9 /* WebAudioSourceProviderAVFObjC.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = WebAudioSourceProviderAVFObjC.mm; sourceTree = &quot;&lt;group&gt;&quot;; };
</span><span class="cx">                 07D6A4ED1BECF2D200174146 /* MockRealtimeMediaSource.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = MockRealtimeMediaSource.cpp; sourceTree = &quot;&lt;group&gt;&quot;; };
</span><span class="lines">@@ -15200,7 +15198,6 @@
</span><span class="cx">                                 4A4F656D1AA997F100E38CDD /* RealtimeMediaSourceCapabilities.h */,
</span><span class="cx">                                 4A0FFA9F1AAF5EA20062803B /* RealtimeMediaSourceCenter.cpp */,
</span><span class="cx">                                 4A0FFAA01AAF5EA20062803B /* RealtimeMediaSourceCenter.h */,
</span><del>-                                07D1503A1DDB6688008F7598 /* RealtimeMediaSourcePreview.h */,
</del><span class="cx">                                 4A4F656E1AA997F100E38CDD /* RealtimeMediaSourceSettings.cpp */,
</span><span class="cx">                                 4A4F656F1AA997F100E38CDD /* RealtimeMediaSourceSettings.h */,
</span><span class="cx">                                 2EC41DE21C0410A300D294FE /* RealtimeMediaSourceSupportedConstraints.cpp */,
</span><span class="lines">@@ -27226,7 +27223,6 @@
</span><span class="cx">                                 4A4F65721AA997F100E38CDD /* RealtimeMediaSourceCapabilities.h in Headers */,
</span><span class="cx">                                 4A0FFAA21AAF5EA20062803B /* RealtimeMediaSourceCenter.h in Headers */,
</span><span class="cx">                                 4A0FFAA61AAF5EF60062803B /* RealtimeMediaSourceCenterMac.h in Headers */,
</span><del>-                                07D1503B1DDB6965008F7598 /* RealtimeMediaSourcePreview.h in Headers */,
</del><span class="cx">                                 4A4F65741AA997F100E38CDD /* RealtimeMediaSourceSettings.h in Headers */,
</span><span class="cx">                                 07C1C0E51BFB60ED00BD2256 /* RealtimeMediaSourceSupportedConstraints.h in Headers */,
</span><span class="cx">                                 BC4368E80C226E32005EFB5F /* Rect.h in Headers */,
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformLoggingh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/Logging.h (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/Logging.h        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/Logging.h        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -62,6 +62,7 @@
</span><span class="cx">     M(Media) \
</span><span class="cx">     M(MediaSource) \
</span><span class="cx">     M(MediaSourceSamples) \
</span><ins>+    M(MediaCaptureSamples) \
</ins><span class="cx">     M(MemoryPressure) \
</span><span class="cx">     M(Network) \
</span><span class="cx">     M(NotYetImplemented) \
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformMediaSampleh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/MediaSample.h (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/MediaSample.h        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/MediaSample.h        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -54,8 +54,10 @@
</span><span class="cx">     virtual ~MediaSample() { }
</span><span class="cx"> 
</span><span class="cx">     virtual MediaTime presentationTime() const = 0;
</span><ins>+    virtual MediaTime outputPresentationTime() const { return presentationTime(); }
</ins><span class="cx">     virtual MediaTime decodeTime() const = 0;
</span><span class="cx">     virtual MediaTime duration() const = 0;
</span><ins>+    virtual MediaTime outputDuration() const { return duration(); }
</ins><span class="cx">     virtual AtomicString trackID() const = 0;
</span><span class="cx">     virtual void setTrackID(const String&amp;) = 0;
</span><span class="cx">     virtual size_t sizeInBytes() const = 0;
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformcfCoreMediaSoftLinkcpp"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/cf/CoreMediaSoftLink.cpp (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/cf/CoreMediaSoftLink.cpp        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/cf/CoreMediaSoftLink.cpp        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -85,8 +85,11 @@
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSampleBufferGetDuration, CMTime, (CMSampleBufferRef sbuf), (sbuf))
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSampleBufferGetImageBuffer, CVImageBufferRef, (CMSampleBufferRef sbuf), (sbuf))
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSampleBufferGetPresentationTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
</span><ins>+SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSampleBufferGetOutputDuration, CMTime, (CMSampleBufferRef sbuf), (sbuf))
+SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSampleBufferGetOutputPresentationTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
</ins><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSampleBufferGetSampleAttachmentsArray, CFArrayRef, (CMSampleBufferRef sbuf, Boolean createIfNecessary), (sbuf, createIfNecessary))
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSampleBufferGetSampleTimingInfoArray, OSStatus, (CMSampleBufferRef sbuf, CMItemCount timingArrayEntries, CMSampleTimingInfo *timingArrayOut, CMItemCount *timingArrayEntriesNeededOut), (sbuf, timingArrayEntries, timingArrayOut, timingArrayEntriesNeededOut))
</span><ins>+SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMTimeConvertScale, CMTime, (CMTime time, int32_t newTimescale, CMTimeRoundingMethod method), (time, newTimescale, method))
</ins><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSampleBufferGetTotalSampleSize, size_t, (CMSampleBufferRef sbuf), (sbuf))
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSetAttachment, void, (CMAttachmentBearerRef target, CFStringRef key, CFTypeRef value, CMAttachmentMode attachmentMode), (target, key, value, attachmentMode))
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMTimebaseCreateWithMasterClock, OSStatus, (CFAllocatorRef allocator, CMClockRef masterClock, CMTimebaseRef *timebaseOut), (allocator, masterClock, timebaseOut))
</span><span class="lines">@@ -93,6 +96,7 @@
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMTimebaseGetTime, CMTime, (CMTimebaseRef timebase), (timebase))
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMTimebaseSetRate, OSStatus, (CMTimebaseRef timebase, Float64 rate), (timebase, rate))
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMTimebaseSetTime, OSStatus, (CMTimebaseRef timebase, CMTime time), (timebase, time))
</span><ins>+SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMTimebaseGetEffectiveRate, Float64, (CMTimebaseRef timebase), (timebase))
</ins><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMTimeCopyAsDictionary, CFDictionaryRef, (CMTime time, CFAllocatorRef allocator), (time, allocator))
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMVideoFormatDescriptionCreateForImageBuffer, OSStatus, (CFAllocatorRef allocator, CVImageBufferRef imageBuffer, CMVideoFormatDescriptionRef* outDesc), (allocator, imageBuffer, outDesc))
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMVideoFormatDescriptionGetDimensions, CMVideoDimensions, (CMVideoFormatDescriptionRef videoDesc), (videoDesc))
</span><span class="lines">@@ -114,6 +118,13 @@
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSampleBufferCallBlockForEachSample, OSStatus, (CMSampleBufferRef sbuf, OSStatus (^handler)(CMSampleBufferRef, CMItemCount)), (sbuf, handler))
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSampleBufferCopySampleBufferForRange, OSStatus, (CFAllocatorRef allocator, CMSampleBufferRef sbuf, CFRange sampleRange, CMSampleBufferRef* sBufOut), (allocator, sbuf, sampleRange, sBufOut))
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSampleBufferGetSampleSizeArray, OSStatus, (CMSampleBufferRef sbuf, CMItemCount sizeArrayEntries, size_t* sizeArrayOut, CMItemCount* sizeArrayEntriesNeededOut), (sbuf, sizeArrayEntries, sizeArrayOut, sizeArrayEntriesNeededOut))
</span><ins>+
+SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMAudioSampleBufferCreateWithPacketDescriptions, OSStatus, (CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, Boolean dataReady, CMSampleBufferMakeDataReadyCallback makeDataReadyCallback, void *makeDataReadyRefcon, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMTime sbufPTS, const AudioStreamPacketDescription *packetDescriptions, CMSampleBufferRef *sBufOut), (allocator, dataBuffer, dataReady, makeDataReadyCallback, makeDataReadyRefcon, formatDescription, numSamples, sbufPTS, packetDescriptions, sBufOut))
+SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSampleBufferSetDataBufferFromAudioBufferList, OSStatus, (CMSampleBufferRef sbuf, CFAllocatorRef bbufStructAllocator, CFAllocatorRef bbufMemoryAllocator, uint32_t flags, const AudioBufferList *bufferList), (sbuf, bbufStructAllocator, bbufMemoryAllocator, flags, bufferList))
+SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMSampleBufferSetDataReady, OSStatus, (CMSampleBufferRef sbuf), (sbuf))
+SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMAudioFormatDescriptionCreate, OSStatus, (CFAllocatorRef allocator, const AudioStreamBasicDescription* asbd, size_t layoutSize, const AudioChannelLayout* layout, size_t magicCookieSize, const void* magicCookie, CFDictionaryRef extensions, CMAudioFormatDescriptionRef* outDesc), (allocator, asbd, layoutSize, layout, magicCookieSize, magicCookie, extensions, outDesc))
+SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMClockGetHostTimeClock, CMClockRef, (void), ())
+SOFT_LINK_FUNCTION_FOR_SOURCE(WebCore, CoreMedia, CMClockGetTime, CMTime, (CMClockRef clock), (clock))
</ins><span class="cx"> #endif // PLATFORM(COCOA)
</span><span class="cx"> 
</span><span class="cx"> #if PLATFORM(IOS)
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformcfCoreMediaSoftLinkh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/cf/CoreMediaSoftLink.h (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/cf/CoreMediaSoftLink.h        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/cf/CoreMediaSoftLink.h        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -47,6 +47,8 @@
</span><span class="cx"> #define CMSampleBufferGetFormatDescription softLink_CoreMedia_CMSampleBufferGetFormatDescription
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMSampleBufferGetSampleTimingInfo, OSStatus, (CMSampleBufferRef sbuf, CMItemIndex sampleIndex, CMSampleTimingInfo* timingInfoOut), (sbuf, sampleIndex, timingInfoOut))
</span><span class="cx"> #define CMSampleBufferGetSampleTimingInfo softLink_CoreMedia_CMSampleBufferGetSampleTimingInfo
</span><ins>+SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMTimeConvertScale, CMTime, (CMTime time, int32_t newTimescale, CMTimeRoundingMethod method), (time, newTimescale, method))
+#define CMTimeConvertScale softLink_CoreMedia_CMTimeConvertScale
</ins><span class="cx"> SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMTimeAdd, CMTime, (CMTime time1, CMTime time2), (time1, time2))
</span><span class="cx"> #define CMTimeAdd softLink_CoreMedia_CMTimeAdd
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
</span><span class="lines">@@ -137,6 +139,10 @@
</span><span class="cx"> #define CMSampleBufferGetImageBuffer softLink_CoreMedia_CMSampleBufferGetImageBuffer
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMSampleBufferGetPresentationTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
</span><span class="cx"> #define CMSampleBufferGetPresentationTimeStamp softLink_CoreMedia_CMSampleBufferGetPresentationTimeStamp
</span><ins>+SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMSampleBufferGetOutputDuration, CMTime, (CMSampleBufferRef sbuf), (sbuf))
+#define CMSampleBufferGetOutputDuration softLink_CoreMedia_CMSampleBufferGetOutputDuration
+SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMSampleBufferGetOutputPresentationTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
+#define CMSampleBufferGetOutputPresentationTimeStamp softLink_CoreMedia_CMSampleBufferGetOutputPresentationTimeStamp
</ins><span class="cx"> SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMSampleBufferGetSampleAttachmentsArray, CFArrayRef, (CMSampleBufferRef sbuf, Boolean createIfNecessary), (sbuf, createIfNecessary))
</span><span class="cx"> #define CMSampleBufferGetSampleAttachmentsArray softLink_CoreMedia_CMSampleBufferGetSampleAttachmentsArray
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMSampleBufferGetSampleTimingInfoArray, OSStatus, (CMSampleBufferRef sbuf, CMItemCount timingArrayEntries, CMSampleTimingInfo *timingArrayOut, CMItemCount *timingArrayEntriesNeededOut), (sbuf, timingArrayEntries, timingArrayOut, timingArrayEntriesNeededOut))
</span><span class="lines">@@ -153,6 +159,8 @@
</span><span class="cx"> #define CMTimebaseSetRate softLink_CoreMedia_CMTimebaseSetRate
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMTimebaseSetTime, OSStatus, (CMTimebaseRef timebase, CMTime time), (timebase, time))
</span><span class="cx"> #define CMTimebaseSetTime softLink_CoreMedia_CMTimebaseSetTime
</span><ins>+SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMTimebaseGetEffectiveRate, Float64, (CMTimebaseRef timebase), (timebase))
+#define CMTimebaseGetEffectiveRate softLink_CoreMedia_CMTimebaseGetEffectiveRate
</ins><span class="cx"> SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMTimeCopyAsDictionary, CFDictionaryRef, (CMTime time, CFAllocatorRef allocator), (time, allocator))
</span><span class="cx"> #define CMTimeCopyAsDictionary softLink_CoreMedia_CMTimeCopyAsDictionary
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMVideoFormatDescriptionCreateForImageBuffer, OSStatus, (CFAllocatorRef allocator, CVImageBufferRef imageBuffer, CMVideoFormatDescriptionRef *outDesc), (allocator, imageBuffer, outDesc))
</span><span class="lines">@@ -193,6 +201,18 @@
</span><span class="cx"> SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMSampleBufferGetSampleSizeArray, OSStatus, (CMSampleBufferRef sbuf, CMItemCount sizeArrayEntries, size_t* sizeArrayOut, CMItemCount* sizeArrayEntriesNeededOut), (sbuf, sizeArrayEntries, sizeArrayOut, sizeArrayEntriesNeededOut))
</span><span class="cx"> #define CMSampleBufferGetSampleSizeArray softLink_CoreMedia_CMSampleBufferGetSampleSizeArray
</span><span class="cx"> 
</span><ins>+SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMAudioSampleBufferCreateWithPacketDescriptions, OSStatus, (CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, Boolean dataReady, CMSampleBufferMakeDataReadyCallback makeDataReadyCallback, void *makeDataReadyRefcon, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMTime sbufPTS, const AudioStreamPacketDescription *packetDescriptions, CMSampleBufferRef *sBufOut), (allocator, dataBuffer, dataReady, makeDataReadyCallback, makeDataReadyRefcon, formatDescription, numSamples, sbufPTS, packetDescriptions, sBufOut))
+#define CMAudioSampleBufferCreateWithPacketDescriptions softLink_CoreMedia_CMAudioSampleBufferCreateWithPacketDescriptions
+SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMSampleBufferSetDataBufferFromAudioBufferList, OSStatus, (CMSampleBufferRef sbuf, CFAllocatorRef bbufStructAllocator, CFAllocatorRef bbufMemoryAllocator, uint32_t flags, const AudioBufferList *bufferList), (sbuf, bbufStructAllocator, bbufMemoryAllocator, flags, bufferList))
+#define CMSampleBufferSetDataBufferFromAudioBufferList softLink_CoreMedia_CMSampleBufferSetDataBufferFromAudioBufferList
+SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMSampleBufferSetDataReady, OSStatus, (CMSampleBufferRef sbuf), (sbuf))
+#define CMSampleBufferSetDataReady softLink_CoreMedia_CMSampleBufferSetDataReady
+SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMAudioFormatDescriptionCreate, OSStatus, (CFAllocatorRef allocator, const AudioStreamBasicDescription* asbd, size_t layoutSize, const AudioChannelLayout* layout, size_t magicCookieSize, const void* magicCookie, CFDictionaryRef extensions, CMAudioFormatDescriptionRef* outDesc), (allocator, asbd, layoutSize, layout, magicCookieSize, magicCookie, extensions, outDesc))
+#define CMAudioFormatDescriptionCreate softLink_CoreMedia_CMAudioFormatDescriptionCreate
+SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMClockGetHostTimeClock, CMClockRef, (void), ())
+#define CMClockGetHostTimeClock  softLink_CoreMedia_CMClockGetHostTimeClock
+SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreMedia, CMClockGetTime, CMTime, (CMClockRef clock), (clock))
+#define CMClockGetTime  softLink_CoreMedia_CMClockGetTime
</ins><span class="cx"> #endif // PLATFORM(COCOA)
</span><span class="cx"> 
</span><span class="cx"> #if PLATFORM(IOS)
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformgraphicsavfoundationMediaSampleAVFObjCh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/graphics/avfoundation/MediaSampleAVFObjC.h (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/graphics/avfoundation/MediaSampleAVFObjC.h        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/MediaSampleAVFObjC.h        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -54,8 +54,10 @@
</span><span class="cx">     virtual ~MediaSampleAVFObjC() { }
</span><span class="cx"> 
</span><span class="cx">     MediaTime presentationTime() const override;
</span><ins>+    MediaTime outputPresentationTime() const override;
</ins><span class="cx">     MediaTime decodeTime() const override;
</span><span class="cx">     MediaTime duration() const override;
</span><ins>+    MediaTime outputDuration() const override;
</ins><span class="cx"> 
</span><span class="cx">     AtomicString trackID() const override { return m_id; }
</span><span class="cx">     void setTrackID(const String&amp; id) override { m_id = id; }
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformgraphicsavfoundationobjcMediaPlayerPrivateMediaStreamAVFObjCh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.h        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -1,5 +1,5 @@
</span><span class="cx"> /*
</span><del>- * Copyright (C) 2015 Apple Inc. All rights reserved.
</del><ins>+ * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
</ins><span class="cx">  *
</span><span class="cx">  * Redistribution and use in source and binary forms, with or without
</span><span class="cx">  * modification, are permitted provided that the following conditions
</span><span class="lines">@@ -39,6 +39,8 @@
</span><span class="cx"> OBJC_CLASS AVSampleBufferDisplayLayer;
</span><span class="cx"> OBJC_CLASS AVSampleBufferRenderSynchronizer;
</span><span class="cx"> OBJC_CLASS AVStreamSession;
</span><ins>+OBJC_CLASS NSNumber;
+OBJC_CLASS WebAVSampleBufferStatusChangeListener;
</ins><span class="cx"> typedef struct opaqueCMSampleBuffer *CMSampleBufferRef;
</span><span class="cx"> 
</span><span class="cx"> namespace WebCore {
</span><span class="lines">@@ -53,6 +55,10 @@
</span><span class="cx"> class VideoFullscreenLayerManager;
</span><span class="cx"> #endif
</span><span class="cx"> 
</span><ins>+#if __has_include(&lt;AVFoundation/AVSampleBufferRenderSynchronizer.h&gt;)
+#define USE_RENDER_SYNCHRONIZER 1
+#endif
+
</ins><span class="cx"> class MediaPlayerPrivateMediaStreamAVFObjC final : public MediaPlayerPrivateInterface, private MediaStreamPrivate::Observer, private MediaStreamTrackPrivate::Observer {
</span><span class="cx"> public:
</span><span class="cx">     explicit MediaPlayerPrivateMediaStreamAVFObjC(MediaPlayer*);
</span><span class="lines">@@ -75,6 +81,9 @@
</span><span class="cx">     void ensureLayer();
</span><span class="cx">     void destroyLayer();
</span><span class="cx"> 
</span><ins>+    void rendererStatusDidChange(AVSampleBufferAudioRenderer*, NSNumber*);
+    void layerStatusDidChange(AVSampleBufferDisplayLayer*, NSNumber*);
+
</ins><span class="cx"> private:
</span><span class="cx">     // MediaPlayerPrivateInterface
</span><span class="cx"> 
</span><span class="lines">@@ -97,7 +106,6 @@
</span><span class="cx">     bool paused() const override;
</span><span class="cx"> 
</span><span class="cx">     void setVolume(float) override;
</span><del>-    void internalSetVolume(float, bool);
</del><span class="cx">     void setMuted(bool) override;
</span><span class="cx">     bool supportsMuting() const override { return true; }
</span><span class="cx"> 
</span><span class="lines">@@ -122,14 +130,27 @@
</span><span class="cx"> 
</span><span class="cx">     void setSize(const IntSize&amp;) override { /* No-op */ }
</span><span class="cx"> 
</span><del>-    void enqueueAudioSampleBufferFromTrack(MediaStreamTrackPrivate&amp;, MediaSample&amp;);
</del><ins>+    void flushRenderers();
</ins><span class="cx"> 
</span><del>-    void prepareVideoSampleBufferFromTrack(MediaStreamTrackPrivate&amp;, MediaSample&amp;);
-    void enqueueVideoSampleBuffer(MediaSample&amp;);
</del><ins>+    using PendingSampleQueue = Deque&lt;Ref&lt;MediaSample&gt;&gt;;
+    void addSampleToPendingQueue(PendingSampleQueue&amp;, MediaSample&amp;);
+    void removeOldSamplesFromPendingQueue(PendingSampleQueue&amp;);
+
+    void updateSampleTimes(MediaSample&amp;, const MediaTime&amp;, const char*);
+    MediaTime calculateTimelineOffset(const MediaSample&amp;, double);
+    
+    void enqueueVideoSample(MediaStreamTrackPrivate&amp;, MediaSample&amp;);
</ins><span class="cx">     bool shouldEnqueueVideoSampleBuffer() const;
</span><span class="cx">     void flushAndRemoveVideoSampleBuffers();
</span><del>-    void requestNotificationWhenReadyForMediaData();
</del><ins>+    void requestNotificationWhenReadyForVideoData();
</ins><span class="cx"> 
</span><ins>+    void enqueueAudioSample(MediaStreamTrackPrivate&amp;, MediaSample&amp;);
+    void createAudioRenderer(AtomicString);
+    void destroyAudioRenderer(AVSampleBufferAudioRenderer*);
+    void destroyAudioRenderer(AtomicString);
+    void destroyAudioRenderers();
+    void requestNotificationWhenReadyForAudioData(AtomicString);
+
</ins><span class="cx">     void paint(GraphicsContext&amp;, const FloatRect&amp;) override;
</span><span class="cx">     void paintCurrentFrameInContext(GraphicsContext&amp;, const FloatRect&amp;) override;
</span><span class="cx">     bool metaDataAvailable() const { return m_mediaStreamPrivate &amp;&amp; m_readyState &gt;= MediaPlayer::HaveMetadata; }
</span><span class="lines">@@ -155,6 +176,7 @@
</span><span class="cx">     void updateIntrinsicSize(const FloatSize&amp;);
</span><span class="cx">     void updateTracks();
</span><span class="cx">     void renderingModeChanged();
</span><ins>+    void checkSelectedVideoTrack();
</ins><span class="cx"> 
</span><span class="cx">     void scheduleDeferredTask(Function&lt;void ()&gt;&amp;&amp;);
</span><span class="cx"> 
</span><span class="lines">@@ -186,26 +208,36 @@
</span><span class="cx">     void setVideoFullscreenFrame(FloatRect) override;
</span><span class="cx"> #endif
</span><span class="cx"> 
</span><del>-    bool haveVideoLayer() const { return m_sampleBufferDisplayLayer || m_videoPreviewPlayer; }
</del><ins>+    MediaTime streamTime() const;
</ins><span class="cx"> 
</span><ins>+#if USE(RENDER_SYNCHRONIZER)
+    AudioSourceProvider* audioSourceProvider() final;
+#endif
+
</ins><span class="cx">     MediaPlayer* m_player { nullptr };
</span><span class="cx">     WeakPtrFactory&lt;MediaPlayerPrivateMediaStreamAVFObjC&gt; m_weakPtrFactory;
</span><span class="cx">     RefPtr&lt;MediaStreamPrivate&gt; m_mediaStreamPrivate;
</span><span class="cx"> 
</span><del>-    RefPtr&lt;RealtimeMediaSourcePreview&gt; m_videoPreviewPlayer;
-    RefPtr&lt;MediaStreamTrackPrivate&gt; m_videoTrack;
</del><ins>+    RefPtr&lt;MediaStreamTrackPrivate&gt; m_activeVideoTrack;
</ins><span class="cx"> 
</span><ins>+    RetainPtr&lt;WebAVSampleBufferStatusChangeListener&gt; m_statusChangeListener;
</ins><span class="cx">     RetainPtr&lt;AVSampleBufferDisplayLayer&gt; m_sampleBufferDisplayLayer;
</span><del>-#if PLATFORM(MAC)
</del><ins>+#if USE(RENDER_SYNCHRONIZER)
+    HashMap&lt;String, RetainPtr&lt;AVSampleBufferAudioRenderer&gt;&gt; m_audioRenderers;
</ins><span class="cx">     RetainPtr&lt;AVSampleBufferRenderSynchronizer&gt; m_synchronizer;
</span><ins>+#else
+    std::unique_ptr&lt;Clock&gt; m_clock;
</ins><span class="cx"> #endif
</span><ins>+
+    MediaTime m_pausedTime;
</ins><span class="cx">     RetainPtr&lt;CGImageRef&gt; m_pausedImage;
</span><del>-    double m_pausedTime { 0 };
-    std::unique_ptr&lt;Clock&gt; m_clock;
</del><span class="cx"> 
</span><span class="cx">     HashMap&lt;String, RefPtr&lt;AudioTrackPrivateMediaStream&gt;&gt; m_audioTrackMap;
</span><span class="cx">     HashMap&lt;String, RefPtr&lt;VideoTrackPrivateMediaStream&gt;&gt; m_videoTrackMap;
</span><del>-    Deque&lt;Ref&lt;MediaSample&gt;&gt; m_sampleQueue;
</del><ins>+    PendingSampleQueue m_pendingVideoSampleQueue;
+#if USE(RENDER_SYNCHRONIZER)
+    PendingSampleQueue m_pendingAudioSampleQueue;
+#endif
</ins><span class="cx"> 
</span><span class="cx">     MediaPlayer::NetworkState m_networkState { MediaPlayer::Empty };
</span><span class="cx">     MediaPlayer::ReadyState m_readyState { MediaPlayer::HaveNothing };
</span><span class="lines">@@ -219,6 +251,7 @@
</span><span class="cx">     bool m_hasEverEnqueuedVideoFrame { false };
</span><span class="cx">     bool m_hasReceivedMedia { false };
</span><span class="cx">     bool m_isFrameDisplayed { false };
</span><ins>+    bool m_pendingSelectedTrackCheck { false };
</ins><span class="cx"> 
</span><span class="cx"> #if PLATFORM(MAC) &amp;&amp; ENABLE(VIDEO_PRESENTATION_MODE)
</span><span class="cx">     std::unique_ptr&lt;VideoFullscreenLayerManager&gt; m_videoFullscreenLayerManager;
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformgraphicsavfoundationobjcMediaPlayerPrivateMediaStreamAVFObjCmm"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaStreamAVFObjC.mm        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -1,5 +1,5 @@
</span><span class="cx"> /*
</span><del>- * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
</del><ins>+ * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
</ins><span class="cx">  *
</span><span class="cx">  * Redistribution and use in source and binary forms, with or without
</span><span class="cx">  * modification, are permitted provided that the following conditions
</span><span class="lines">@@ -52,18 +52,149 @@
</span><span class="cx"> 
</span><span class="cx"> SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
</span><span class="cx"> 
</span><ins>+SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferAudioRenderer)
</ins><span class="cx"> SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
</span><span class="cx"> SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferRenderSynchronizer)
</span><span class="cx"> 
</span><ins>+SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
+SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
+
+#define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
+#define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
+
+using namespace WebCore;
+
+@interface WebAVSampleBufferStatusChangeListener : NSObject {
+    MediaPlayerPrivateMediaStreamAVFObjC* _parent;
+    Vector&lt;RetainPtr&lt;AVSampleBufferDisplayLayer&gt;&gt; _layers;
+    Vector&lt;RetainPtr&lt;AVSampleBufferAudioRenderer&gt;&gt; _renderers;
+}
+
+- (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)callback;
+- (void)invalidate;
+- (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
+- (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
+- (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
+- (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
+@end
+
+@implementation WebAVSampleBufferStatusChangeListener
+
+- (id)initWithParent:(MediaPlayerPrivateMediaStreamAVFObjC*)parent
+{
+    if (!(self = [super init]))
+        return nil;
+
+    _parent = parent;
+    return self;
+}
+
+- (void)dealloc
+{
+    [self invalidate];
+    [super dealloc];
+}
+
+- (void)invalidate
+{
+    for (auto&amp; layer : _layers)
+        [layer removeObserver:self forKeyPath:@&quot;status&quot;];
+    _layers.clear();
+
+    for (auto&amp; renderer : _renderers)
+        [renderer removeObserver:self forKeyPath:@&quot;status&quot;];
+    _renderers.clear();
+
+    [[NSNotificationCenter defaultCenter] removeObserver:self];
+
+    _parent = nullptr;
+}
+
+- (void)beginObservingLayer:(AVSampleBufferDisplayLayer*)layer
+{
+    ASSERT(_parent);
+    ASSERT(!_layers.contains(layer));
+
+    _layers.append(layer);
+    [layer addObserver:self forKeyPath:@&quot;status&quot; options:NSKeyValueObservingOptionNew context:nullptr];
+}
+
+- (void)stopObservingLayer:(AVSampleBufferDisplayLayer*)layer
+{
+    ASSERT(_parent);
+    ASSERT(_layers.contains(layer));
+
+    [layer removeObserver:self forKeyPath:@&quot;status&quot;];
+    _layers.remove(_layers.find(layer));
+}
+
+- (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
+{
+    ASSERT(_parent);
+    ASSERT(!_renderers.contains(renderer));
+
+    _renderers.append(renderer);
+    [renderer addObserver:self forKeyPath:@&quot;status&quot; options:NSKeyValueObservingOptionNew context:nullptr];
+}
+
+- (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
+{
+    ASSERT(_parent);
+    ASSERT(_renderers.contains(renderer));
+
+    [renderer removeObserver:self forKeyPath:@&quot;status&quot;];
+    _renderers.remove(_renderers.find(renderer));
+}
+
+- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
+{
+    UNUSED_PARAM(context);
+    UNUSED_PARAM(keyPath);
+    ASSERT(_parent);
+
+    RetainPtr&lt;WebAVSampleBufferStatusChangeListener&gt; protectedSelf = self;
+    if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
+        RetainPtr&lt;AVSampleBufferDisplayLayer&gt; layer = (AVSampleBufferDisplayLayer *)object;
+        RetainPtr&lt;NSNumber&gt; status = [change valueForKey:NSKeyValueChangeNewKey];
+
+        ASSERT(_layers.contains(layer.get()));
+        ASSERT([keyPath isEqualToString:@&quot;status&quot;]);
+
+        callOnMainThread([protectedSelf = WTFMove(protectedSelf), layer = WTFMove(layer), status = WTFMove(status)] {
+            protectedSelf-&gt;_parent-&gt;layerStatusDidChange(layer.get(), status.get());
+        });
+
+    } else if ([object isKindOfClass:getAVSampleBufferAudioRendererClass()]) {
+        RetainPtr&lt;AVSampleBufferAudioRenderer&gt; renderer = (AVSampleBufferAudioRenderer *)object;
+        RetainPtr&lt;NSNumber&gt; status = [change valueForKey:NSKeyValueChangeNewKey];
+
+        ASSERT(_renderers.contains(renderer.get()));
+        ASSERT([keyPath isEqualToString:@&quot;status&quot;]);
+
+        callOnMainThread([protectedSelf = WTFMove(protectedSelf), renderer = WTFMove(renderer), status = WTFMove(status)] {
+            protectedSelf-&gt;_parent-&gt;rendererStatusDidChange(renderer.get(), status.get());
+        });
+    } else
+        ASSERT_NOT_REACHED();
+}
+@end
+
</ins><span class="cx"> namespace WebCore {
</span><span class="cx"> 
</span><span class="cx"> #pragma mark -
</span><span class="cx"> #pragma mark MediaPlayerPrivateMediaStreamAVFObjC
</span><span class="cx"> 
</span><ins>+static const double rendererLatency = 0.02;
+
</ins><span class="cx"> MediaPlayerPrivateMediaStreamAVFObjC::MediaPlayerPrivateMediaStreamAVFObjC(MediaPlayer* player)
</span><span class="cx">     : m_player(player)
</span><span class="cx">     , m_weakPtrFactory(this)
</span><ins>+    , m_statusChangeListener(adoptNS([[WebAVSampleBufferStatusChangeListener alloc] initWithParent:this]))
+#if USE(RENDER_SYNCHRONIZER)
+    , m_synchronizer(adoptNS([allocAVSampleBufferRenderSynchronizerInstance() init]))
+#else
</ins><span class="cx">     , m_clock(Clock::create())
</span><ins>+#endif
</ins><span class="cx"> #if PLATFORM(MAC) &amp;&amp; ENABLE(VIDEO_PRESENTATION_MODE)
</span><span class="cx">     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
</span><span class="cx"> #endif
</span><span class="lines">@@ -81,10 +212,13 @@
</span><span class="cx">             track-&gt;removeObserver(*this);
</span><span class="cx">     }
</span><span class="cx"> 
</span><ins>+    destroyLayer();
+#if USE(RENDER_SYNCHRONIZER)
+    destroyAudioRenderers();
+#endif
+
</ins><span class="cx">     m_audioTrackMap.clear();
</span><span class="cx">     m_videoTrackMap.clear();
</span><del>-
-    destroyLayer();
</del><span class="cx"> }
</span><span class="cx"> 
</span><span class="cx"> #pragma mark -
</span><span class="lines">@@ -127,34 +261,107 @@
</span><span class="cx"> #pragma mark -
</span><span class="cx"> #pragma mark AVSampleBuffer Methods
</span><span class="cx"> 
</span><del>-void MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSampleBufferFromTrack(MediaStreamTrackPrivate&amp;, MediaSample&amp;)
</del><ins>+void MediaPlayerPrivateMediaStreamAVFObjC::removeOldSamplesFromPendingQueue(PendingSampleQueue&amp; queue)
</ins><span class="cx"> {
</span><del>-    // FIXME: https://bugs.webkit.org/show_bug.cgi?id=159836
</del><ins>+    MediaTime now = streamTime();
+    while (!queue.isEmpty()) {
+        if (queue.first()-&gt;decodeTime() &gt; now)
+            break;
+        queue.removeFirst();
+    };
</ins><span class="cx"> }
</span><span class="cx"> 
</span><del>-void MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForMediaData()
</del><ins>+void MediaPlayerPrivateMediaStreamAVFObjC::addSampleToPendingQueue(PendingSampleQueue&amp; queue, MediaSample&amp; sample)
</ins><span class="cx"> {
</span><del>-    [m_sampleBufferDisplayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
-        [m_sampleBufferDisplayLayer stopRequestingMediaData];
</del><ins>+    removeOldSamplesFromPendingQueue(queue);
+    queue.append(sample);
+}
</ins><span class="cx"> 
</span><del>-        while (!m_sampleQueue.isEmpty()) {
-            if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
-                requestNotificationWhenReadyForMediaData();
-                return;
-            }
</del><ins>+void MediaPlayerPrivateMediaStreamAVFObjC::updateSampleTimes(MediaSample&amp; sample, const MediaTime&amp; timelineOffset, const char* loggingPrefix)
+{
+    LOG(MediaCaptureSamples, &quot;%s(%p): original sample = %s&quot;, loggingPrefix, this, toString(sample).utf8().data());
+    sample.offsetTimestampsBy(timelineOffset);
+    LOG(MediaCaptureSamples, &quot;%s(%p): adjusted sample = %s&quot;, loggingPrefix, this, toString(sample).utf8().data());
</ins><span class="cx"> 
</span><del>-            auto sample = m_sampleQueue.takeFirst();
-            enqueueVideoSampleBuffer(sample.get());
-        }
-    }];
</del><ins>+#if !LOG_DISABLED
+    MediaTime now = streamTime();
+    double delta = (sample.presentationTime() - now).toDouble();
+    if (delta &lt; 0)
+        LOG(Media, &quot;%s(%p): *NOTE* audio sample at time %s is %f seconds late&quot;, loggingPrefix, this, toString(now).utf8().data(), -delta);
+    else if (delta &lt; .01)
+        LOG(Media, &quot;%s(%p): *NOTE* audio sample at time %s is only %s seconds early&quot;, loggingPrefix, this, toString(now).utf8().data(), delta);
+    else if (delta &gt; .3)
+        LOG(Media, &quot;%s(%p): *NOTE* audio sample at time %s is %s seconds early!&quot;, loggingPrefix, this, toString(now).utf8().data(), delta);
+#else
+    UNUSED_PARAM(loggingPrefix);
+#endif
+
</ins><span class="cx"> }
</span><span class="cx"> 
</span><del>-void MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSampleBuffer(MediaSample&amp; sample)
</del><ins>+MediaTime MediaPlayerPrivateMediaStreamAVFObjC::calculateTimelineOffset(const MediaSample&amp; sample, double latency)
</ins><span class="cx"> {
</span><ins>+    MediaTime sampleTime = sample.outputPresentationTime();
+    if (!sampleTime || !sampleTime.isValid())
+        sampleTime = sample.presentationTime();
+    MediaTime timelineOffset = streamTime() - sampleTime + MediaTime::createWithDouble(latency);
+    if (timelineOffset.timeScale() != sampleTime.timeScale())
+        timelineOffset = toMediaTime(CMTimeConvertScale(toCMTime(timelineOffset), sampleTime.timeScale(), kCMTimeRoundingMethod_Default));
+    return timelineOffset;
+}
+
+#if USE(RENDER_SYNCHRONIZER)
+void MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSample(MediaStreamTrackPrivate&amp; track, MediaSample&amp; sample)
+{
+    ASSERT(m_audioTrackMap.contains(track.id()));
+    ASSERT(m_audioRenderers.contains(sample.trackID()));
+
+    auto audioTrack = m_audioTrackMap.get(track.id());
+    MediaTime timelineOffset = audioTrack-&gt;timelineOffset();
+    if (timelineOffset == MediaTime::invalidTime()) {
+        timelineOffset = calculateTimelineOffset(sample, rendererLatency);
+        audioTrack-&gt;setTimelineOffset(timelineOffset);
+        LOG(MediaCaptureSamples, &quot;MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSample: timeline offset for track %s set to (%lld/%d)&quot;, track.id().utf8().data(), timelineOffset.timeValue(), timelineOffset.timeScale());
+    }
+
+    updateSampleTimes(sample, timelineOffset, &quot;MediaPlayerPrivateMediaStreamAVFObjC::enqueueAudioSample&quot;);
+
+    auto renderer = m_audioRenderers.get(sample.trackID());
+    if (![renderer isReadyForMoreMediaData]) {
+        addSampleToPendingQueue(m_pendingAudioSampleQueue, sample);
+        requestNotificationWhenReadyForAudioData(sample.trackID());
+        return;
+    }
+
+    [renderer enqueueSampleBuffer:sample.platformSample().sample.cmSampleBuffer];
+}
+#endif
+
+void MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample(MediaStreamTrackPrivate&amp; track, MediaSample&amp; sample)
+{
+    ASSERT(m_videoTrackMap.contains(track.id()));
+
+    if (&amp;track != m_mediaStreamPrivate-&gt;activeVideoTrack())
+        return;
+
+    m_hasReceivedMedia = true;
+    updateReadyState();
+    if (m_displayMode != LivePreview || (m_displayMode == PausedImage &amp;&amp; m_isFrameDisplayed))
+        return;
+
+    auto videoTrack = m_videoTrackMap.get(track.id());
+    MediaTime timelineOffset = videoTrack-&gt;timelineOffset();
+    if (timelineOffset == MediaTime::invalidTime()) {
+        timelineOffset = calculateTimelineOffset(sample, rendererLatency);
+        videoTrack-&gt;setTimelineOffset(timelineOffset);
+        LOG(MediaCaptureSamples, &quot;MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample: timeline offset for track %s set to %f&quot;, track.id().utf8().data(), timelineOffset.toDouble());
+    }
+
+    updateSampleTimes(sample, timelineOffset, &quot;MediaPlayerPrivateMediaStreamAVFObjC::enqueueVideoSample&quot;);
+
</ins><span class="cx">     if (m_sampleBufferDisplayLayer) {
</span><span class="cx">         if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
</span><del>-            m_sampleQueue.append(sample);
-            requestNotificationWhenReadyForMediaData();
</del><ins>+            addSampleToPendingQueue(m_pendingVideoSampleQueue, sample);
+            requestNotificationWhenReadyForVideoData();
</ins><span class="cx">             return;
</span><span class="cx">         }
</span><span class="cx"> 
</span><span class="lines">@@ -164,19 +371,144 @@
</span><span class="cx">     m_isFrameDisplayed = true;
</span><span class="cx">     if (!m_hasEverEnqueuedVideoFrame) {
</span><span class="cx">         m_hasEverEnqueuedVideoFrame = true;
</span><ins>+        if (m_displayMode == PausedImage)
+            updatePausedImage();
</ins><span class="cx">         m_player-&gt;firstVideoFrameAvailable();
</span><del>-        updatePausedImage();
</del><span class="cx">     }
</span><span class="cx"> }
</span><span class="cx"> 
</span><del>-void MediaPlayerPrivateMediaStreamAVFObjC::prepareVideoSampleBufferFromTrack(MediaStreamTrackPrivate&amp; track, MediaSample&amp; sample)
</del><ins>+void MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForVideoData()
</ins><span class="cx"> {
</span><del>-    if (&amp;track != m_mediaStreamPrivate-&gt;activeVideoTrack() || !shouldEnqueueVideoSampleBuffer())
</del><ins>+    [m_sampleBufferDisplayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
+        [m_sampleBufferDisplayLayer stopRequestingMediaData];
+
+        while (!m_pendingVideoSampleQueue.isEmpty()) {
+            if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
+                requestNotificationWhenReadyForVideoData();
+                return;
+            }
+
+            auto sample = m_pendingVideoSampleQueue.takeFirst();
+            enqueueVideoSample(*m_activeVideoTrack.get(), sample.get());
+        }
+    }];
+}
+
+#if USE(RENDER_SYNCHRONIZER)
+void MediaPlayerPrivateMediaStreamAVFObjC::requestNotificationWhenReadyForAudioData(AtomicString trackID)
+{
+    if (!m_audioRenderers.contains(trackID))
</ins><span class="cx">         return;
</span><span class="cx"> 
</span><del>-    enqueueVideoSampleBuffer(sample);
</del><ins>+    auto renderer = m_audioRenderers.get(trackID);
+    [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^ {
+        [renderer stopRequestingMediaData];
+
+        auto audioTrack = m_audioTrackMap.get(trackID);
+        while (!m_pendingAudioSampleQueue.isEmpty()) {
+            if (![renderer isReadyForMoreMediaData]) {
+                requestNotificationWhenReadyForAudioData(trackID);
+                return;
+            }
+
+            auto sample = m_pendingAudioSampleQueue.takeFirst();
+            enqueueAudioSample(audioTrack-&gt;streamTrack(), sample.get());
+        }
+    }];
</ins><span class="cx"> }
</span><span class="cx"> 
</span><ins>+void MediaPlayerPrivateMediaStreamAVFObjC::createAudioRenderer(AtomicString trackID)
+{
+    ASSERT(!m_audioRenderers.contains(trackID));
+    auto renderer = adoptNS([allocAVSampleBufferAudioRendererInstance() init]);
+    [renderer setAudioTimePitchAlgorithm:(m_player-&gt;preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
+    m_audioRenderers.set(trackID, renderer);
+    [m_synchronizer addRenderer:renderer.get()];
+    [m_statusChangeListener beginObservingRenderer:renderer.get()];
+    if (m_audioRenderers.size() == 1)
+        renderingModeChanged();
+}
+
+void MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderer(AVSampleBufferAudioRenderer* renderer)
+{
+    [m_statusChangeListener stopObservingRenderer:renderer];
+    [renderer flush];
+    [renderer stopRequestingMediaData];
+
+    CMTime now = CMTimebaseGetTime([m_synchronizer timebase]);
+    [m_synchronizer removeRenderer:renderer atTime:now withCompletionHandler:^(BOOL) { }];
+}
+
+void MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderer(AtomicString trackID)
+{
+    if (!m_audioRenderers.contains(trackID))
+        return;
+
+    destroyAudioRenderer(m_audioRenderers.get(trackID).get());
+    m_audioRenderers.remove(trackID);
+    if (!m_audioRenderers.size())
+        renderingModeChanged();
+}
+
+void MediaPlayerPrivateMediaStreamAVFObjC::destroyAudioRenderers()
+{
+    m_pendingAudioSampleQueue.clear();
+    for (auto&amp; renderer : m_audioRenderers.values())
+        destroyAudioRenderer(renderer.get());
+    m_audioRenderers.clear();
+}
+
+AudioSourceProvider* MediaPlayerPrivateMediaStreamAVFObjC::audioSourceProvider()
+{
+    // FIXME: This should return a mix of all audio tracks - https://bugs.webkit.org/show_bug.cgi?id=160305
+    for (const auto&amp; track : m_audioTrackMap.values()) {
+        if (track-&gt;streamTrack().ended() || !track-&gt;streamTrack().enabled() || track-&gt;streamTrack().muted())
+            continue;
+
+        return track-&gt;streamTrack().audioSourceProvider();
+    }
+    return nullptr;
+}
+#endif
+
+void MediaPlayerPrivateMediaStreamAVFObjC::rendererStatusDidChange(AVSampleBufferAudioRenderer* renderer, NSNumber* status)
+{
+#if USE(RENDER_SYNCHRONIZER)
+    String trackID;
+    for (auto&amp; pair : m_audioRenderers) {
+        if (pair.value == renderer) {
+            trackID = pair.key;
+            break;
+        }
+    }
+    ASSERT(!trackID.isEmpty());
+    if (status.integerValue == AVQueuedSampleBufferRenderingStatusRendering)
+        m_audioTrackMap.get(trackID)-&gt;setTimelineOffset(MediaTime::invalidTime());
+#else
+    UNUSED_PARAM(renderer);
+    UNUSED_PARAM(status);
+#endif
+}
+
+void MediaPlayerPrivateMediaStreamAVFObjC::layerStatusDidChange(AVSampleBufferDisplayLayer* layer, NSNumber* status)
+{
+    ASSERT_UNUSED(layer, layer == m_sampleBufferDisplayLayer);
+    ASSERT(m_activeVideoTrack);
+    if (status.integerValue == AVQueuedSampleBufferRenderingStatusRendering)
+        m_videoTrackMap.get(m_activeVideoTrack-&gt;id())-&gt;setTimelineOffset(MediaTime::invalidTime());
+}
+
+void MediaPlayerPrivateMediaStreamAVFObjC::flushRenderers()
+{
+    if (m_sampleBufferDisplayLayer)
+        [m_sampleBufferDisplayLayer flush];
+
+#if USE(RENDER_SYNCHRONIZER)
+    for (auto&amp; renderer : m_audioRenderers.values())
+        [renderer flush];
+#endif
+}
+
</ins><span class="cx"> bool MediaPlayerPrivateMediaStreamAVFObjC::shouldEnqueueVideoSampleBuffer() const
</span><span class="cx"> {
</span><span class="cx">     if (m_displayMode == LivePreview)
</span><span class="lines">@@ -196,48 +528,38 @@
</span><span class="cx"> 
</span><span class="cx"> void MediaPlayerPrivateMediaStreamAVFObjC::ensureLayer()
</span><span class="cx"> {
</span><del>-    if (!m_mediaStreamPrivate || haveVideoLayer())
</del><ins>+    if (m_sampleBufferDisplayLayer)
</ins><span class="cx">         return;
</span><span class="cx"> 
</span><del>-    CALayer *videoLayer = nil;
-    if (m_mediaStreamPrivate-&gt;activeVideoTrack()) {
-        m_videoPreviewPlayer = m_mediaStreamPrivate-&gt;activeVideoTrack()-&gt;preview();
-        if (m_videoPreviewPlayer)
-            videoLayer = m_videoPreviewPlayer-&gt;platformLayer();
-    }
-
-    if (!videoLayer) {
-        m_sampleBufferDisplayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
-        videoLayer = m_sampleBufferDisplayLayer.get();
</del><ins>+    m_sampleBufferDisplayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
</ins><span class="cx"> #ifndef NDEBUG
</span><del>-        [m_sampleBufferDisplayLayer setName:@&quot;MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer&quot;];
</del><ins>+    [m_sampleBufferDisplayLayer setName:@&quot;MediaPlayerPrivateMediaStreamAVFObjC AVSampleBufferDisplayLayer&quot;];
</ins><span class="cx"> #endif
</span><del>-        m_sampleBufferDisplayLayer.get().backgroundColor = cachedCGColor(Color::black);
</del><ins>+    m_sampleBufferDisplayLayer.get().backgroundColor = cachedCGColor(Color::black);
+    [m_statusChangeListener beginObservingLayer:m_sampleBufferDisplayLayer.get()];
</ins><span class="cx"> 
</span><del>-#if PLATFORM(MAC)
-        m_synchronizer = adoptNS([allocAVSampleBufferRenderSynchronizerInstance() init]);
-        [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
</del><ins>+#if USE(RENDER_SYNCHRONIZER)
+    [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
</ins><span class="cx"> #endif
</span><del>-    }
</del><span class="cx"> 
</span><span class="cx">     renderingModeChanged();
</span><span class="cx">     
</span><span class="cx"> #if PLATFORM(MAC) &amp;&amp; ENABLE(VIDEO_PRESENTATION_MODE)
</span><del>-    m_videoFullscreenLayerManager-&gt;setVideoLayer(videoLayer, snappedIntRect(m_player-&gt;client().mediaPlayerContentBoxRect()).size());
</del><ins>+    m_videoFullscreenLayerManager-&gt;setVideoLayer(m_sampleBufferDisplayLayer.get(), snappedIntRect(m_player-&gt;client().mediaPlayerContentBoxRect()).size());
</ins><span class="cx"> #endif
</span><span class="cx"> }
</span><span class="cx"> 
</span><span class="cx"> void MediaPlayerPrivateMediaStreamAVFObjC::destroyLayer()
</span><span class="cx"> {
</span><del>-    if (!haveVideoLayer())
</del><ins>+    if (!m_sampleBufferDisplayLayer)
</ins><span class="cx">         return;
</span><span class="cx"> 
</span><del>-    m_videoPreviewPlayer = nullptr;
-
</del><span class="cx">     if (m_sampleBufferDisplayLayer) {
</span><ins>+        m_pendingVideoSampleQueue.clear();
+        [m_statusChangeListener stopObservingLayer:m_sampleBufferDisplayLayer.get()];
</ins><span class="cx">         [m_sampleBufferDisplayLayer stopRequestingMediaData];
</span><span class="cx">         [m_sampleBufferDisplayLayer flush];
</span><del>-#if PLATFORM(MAC)
</del><ins>+#if USE(RENDER_SYNCHRONIZER)
</ins><span class="cx">         CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
</span><span class="cx">         [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL) {
</span><span class="cx">             // No-op.
</span><span class="lines">@@ -305,14 +627,12 @@
</span><span class="cx"> 
</span><span class="cx"> PlatformLayer* MediaPlayerPrivateMediaStreamAVFObjC::platformLayer() const
</span><span class="cx"> {
</span><del>-    if (!haveVideoLayer() || m_displayMode == None)
</del><ins>+    if (!m_sampleBufferDisplayLayer || m_displayMode == None)
</ins><span class="cx">         return nullptr;
</span><span class="cx"> 
</span><span class="cx"> #if PLATFORM(MAC) &amp;&amp; ENABLE(VIDEO_PRESENTATION_MODE)
</span><span class="cx">     return m_videoFullscreenLayerManager-&gt;videoInlineLayer();
</span><span class="cx"> #else
</span><del>-    if (m_videoPreviewPlayer)
-        return m_videoPreviewPlayer-&gt;platformLayer();
</del><span class="cx"> 
</span><span class="cx">     return m_sampleBufferDisplayLayer.get();
</span><span class="cx"> #endif
</span><span class="lines">@@ -320,7 +640,7 @@
</span><span class="cx"> 
</span><span class="cx"> MediaPlayerPrivateMediaStreamAVFObjC::DisplayMode MediaPlayerPrivateMediaStreamAVFObjC::currentDisplayMode() const
</span><span class="cx"> {
</span><del>-    if (m_ended || m_intrinsicSize.isEmpty() || !metaDataAvailable() || !haveVideoLayer())
</del><ins>+    if (m_ended || m_intrinsicSize.isEmpty() || !metaDataAvailable() || !m_sampleBufferDisplayLayer)
</ins><span class="cx">         return None;
</span><span class="cx"> 
</span><span class="cx">     if (m_mediaStreamPrivate-&gt;activeVideoTrack() &amp;&amp; !m_mediaStreamPrivate-&gt;activeVideoTrack()-&gt;enabled())
</span><span class="lines">@@ -368,23 +688,15 @@
</span><span class="cx">     if (!metaDataAvailable() || m_playing || m_ended)
</span><span class="cx">         return;
</span><span class="cx"> 
</span><del>-    m_clock-&gt;start();
</del><span class="cx">     m_playing = true;
</span><del>-
-    if (m_videoPreviewPlayer)
-        m_videoPreviewPlayer-&gt;play();
-#if PLATFORM(MAC)
-    else
-        [m_synchronizer setRate:1];
</del><ins>+#if USE(RENDER_SYNCHRONIZER)
+    if (!m_synchronizer.get().rate)
+        [m_synchronizer setRate:1 ]; // streamtime
+#else
+    if (!m_clock-&gt;isRunning())
+        m_clock-&gt;start();
</ins><span class="cx"> #endif
</span><span class="cx"> 
</span><del>-    for (const auto&amp; track : m_audioTrackMap.values()) {
-        if (!track-&gt;enabled() || !track-&gt;streamTrack().preview())
-            continue;
-
-        track-&gt;streamTrack().preview()-&gt;play();
-    }
-
</del><span class="cx">     m_haveEverPlayed = true;
</span><span class="cx">     scheduleDeferredTask([this] {
</span><span class="cx">         updateDisplayMode();
</span><span class="lines">@@ -399,25 +711,12 @@
</span><span class="cx">     if (!metaDataAvailable() || !m_playing || m_ended)
</span><span class="cx">         return;
</span><span class="cx"> 
</span><del>-    m_pausedTime = m_clock-&gt;currentTime();
</del><ins>+    m_pausedTime = currentMediaTime();
</ins><span class="cx">     m_playing = false;
</span><span class="cx"> 
</span><del>-    if (m_videoPreviewPlayer)
-        m_videoPreviewPlayer-&gt;pause();
-#if PLATFORM(MAC)
-    else
-        [m_synchronizer setRate:0];
-#endif
-
-    for (const auto&amp; track : m_audioTrackMap.values()) {
-        if (!track-&gt;enabled() || !track-&gt;streamTrack().preview())
-            continue;
-
-        track-&gt;streamTrack().preview()-&gt;pause();
-    }
-
</del><span class="cx">     updateDisplayMode();
</span><span class="cx">     updatePausedImage();
</span><ins>+    flushRenderers();
</ins><span class="cx"> }
</span><span class="cx"> 
</span><span class="cx"> bool MediaPlayerPrivateMediaStreamAVFObjC::paused() const
</span><span class="lines">@@ -425,27 +724,21 @@
</span><span class="cx">     return !m_playing;
</span><span class="cx"> }
</span><span class="cx"> 
</span><del>-void MediaPlayerPrivateMediaStreamAVFObjC::internalSetVolume(float volume, bool internal)
</del><ins>+void MediaPlayerPrivateMediaStreamAVFObjC::setVolume(float volume)
</ins><span class="cx"> {
</span><del>-    if (!internal)
-        m_volume = volume;
</del><ins>+    LOG(Media, &quot;MediaPlayerPrivateMediaStreamAVFObjC::setVolume(%p)&quot;, this);
</ins><span class="cx"> 
</span><del>-    if (!metaDataAvailable())
</del><ins>+    if (m_volume == volume)
</ins><span class="cx">         return;
</span><span class="cx"> 
</span><del>-    for (const auto&amp; track : m_audioTrackMap.values()) {
-        if (!track-&gt;enabled() || !track-&gt;streamTrack().preview())
-            continue;
</del><ins>+    m_volume = volume;
</ins><span class="cx"> 
</span><del>-        track-&gt;streamTrack().preview()-&gt;setVolume(volume);
-    }
</del><ins>+#if USE(RENDER_SYNCHRONIZER)
+    for (auto&amp; renderer : m_audioRenderers.values())
+        [renderer setVolume:volume];
+#endif
</ins><span class="cx"> }
</span><span class="cx"> 
</span><del>-void MediaPlayerPrivateMediaStreamAVFObjC::setVolume(float volume)
-{
-    internalSetVolume(volume, false);
-}
-
</del><span class="cx"> void MediaPlayerPrivateMediaStreamAVFObjC::setMuted(bool muted)
</span><span class="cx"> {
</span><span class="cx">     LOG(Media, &quot;MediaPlayerPrivateMediaStreamAVFObjC::setMuted(%p)&quot;, this);
</span><span class="lines">@@ -455,7 +748,10 @@
</span><span class="cx"> 
</span><span class="cx">     m_muted = muted;
</span><span class="cx">     
</span><del>-    internalSetVolume(muted ? 0 : m_volume, true);
</del><ins>+#if USE(RENDER_SYNCHRONIZER)
+    for (auto&amp; renderer : m_audioRenderers.values())
+        [renderer setMuted:muted];
+#endif
</ins><span class="cx"> }
</span><span class="cx"> 
</span><span class="cx"> bool MediaPlayerPrivateMediaStreamAVFObjC::hasVideo() const
</span><span class="lines">@@ -481,9 +777,21 @@
</span><span class="cx"> 
</span><span class="cx"> MediaTime MediaPlayerPrivateMediaStreamAVFObjC::currentMediaTime() const
</span><span class="cx"> {
</span><del>-    return MediaTime::createWithDouble(m_playing ? m_clock-&gt;currentTime() : m_pausedTime);
</del><ins>+    if (!m_playing)
+        return m_pausedTime;
+
+    return streamTime();
</ins><span class="cx"> }
</span><span class="cx"> 
</span><ins>+MediaTime MediaPlayerPrivateMediaStreamAVFObjC::streamTime() const
+{
+#if USE(RENDER_SYNCHRONIZER)
+    return toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
+#else
+    return MediaTime::createWithDouble(m_clock-&gt;currentTime());
+#endif
+}
+
</ins><span class="cx"> MediaPlayer::NetworkState MediaPlayerPrivateMediaStreamAVFObjC::networkState() const
</span><span class="cx"> {
</span><span class="cx">     return m_networkState;
</span><span class="lines">@@ -598,25 +906,36 @@
</span><span class="cx">     ASSERT(mediaSample.platformSample().type == PlatformSample::CMSampleBufferType);
</span><span class="cx">     ASSERT(m_mediaStreamPrivate);
</span><span class="cx"> 
</span><ins>+    if (!m_hasReceivedMedia) {
+        m_hasReceivedMedia = true;
+        updateReadyState();
+    }
+
+    if (!m_playing || streamTime().toDouble() &lt; 0)
+        return;
+
+#if USE(RENDER_SYNCHRONIZER)
+    if (!CMTimebaseGetEffectiveRate([m_synchronizer timebase]))
+        return;
+#endif
+
</ins><span class="cx">     switch (track.type()) {
</span><span class="cx">     case RealtimeMediaSource::None:
</span><span class="cx">         // Do nothing.
</span><span class="cx">         break;
</span><span class="cx">     case RealtimeMediaSource::Audio:
</span><del>-        // FIXME: https://bugs.webkit.org/show_bug.cgi?id=159836
</del><ins>+#if USE(RENDER_SYNCHRONIZER)
+        enqueueAudioSample(track, mediaSample);
+#endif
</ins><span class="cx">         break;
</span><span class="cx">     case RealtimeMediaSource::Video:
</span><del>-        prepareVideoSampleBufferFromTrack(track, mediaSample);
-        m_hasReceivedMedia = true;
-        scheduleDeferredTask([this] {
-            updateReadyState();
-        });
</del><ins>+        if (&amp;track == m_activeVideoTrack.get())
+            enqueueVideoSample(track, mediaSample);
</ins><span class="cx">         break;
</span><span class="cx">     }
</span><span class="cx"> }
</span><span class="cx"> 
</span><span class="cx"> #if PLATFORM(MAC) &amp;&amp; ENABLE(VIDEO_PRESENTATION_MODE)
</span><del>-
</del><span class="cx"> void MediaPlayerPrivateMediaStreamAVFObjC::setVideoFullscreenLayer(PlatformLayer *videoFullscreenLayer, std::function&lt;void()&gt; completionHandler)
</span><span class="cx"> {
</span><span class="cx">     m_videoFullscreenLayerManager-&gt;setVideoFullscreenLayer(videoFullscreenLayer, completionHandler);
</span><span class="lines">@@ -626,11 +945,16 @@
</span><span class="cx"> {
</span><span class="cx">     m_videoFullscreenLayerManager-&gt;setVideoFullscreenFrame(frame);
</span><span class="cx"> }
</span><del>-
</del><span class="cx"> #endif
</span><span class="cx"> 
</span><del>-template &lt;typename RefT, typename PassRefT&gt;
-void updateTracksOfType(HashMap&lt;String, RefT&gt;&amp; trackMap, RealtimeMediaSource::Type trackType, MediaStreamTrackPrivateVector&amp; currentTracks, RefT (*itemFactory)(MediaStreamTrackPrivate&amp;), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT), std::function&lt;void(RefT, int)&gt; configureCallback, MediaStreamTrackPrivate::Observer* trackObserver)
</del><ins>+typedef enum {
+    Add,
+    Remove,
+    Configure
+} TrackState;
+
+template &lt;typename RefT&gt;
+void updateTracksOfType(HashMap&lt;String, RefT&gt;&amp; trackMap, RealtimeMediaSource::Type trackType, MediaStreamTrackPrivateVector&amp; currentTracks, RefT (*itemFactory)(MediaStreamTrackPrivate&amp;), const Function&lt;void(RefT, int, TrackState)&gt;&amp; configureTrack)
</ins><span class="cx"> {
</span><span class="cx">     Vector&lt;RefT&gt; removedTracks;
</span><span class="cx">     Vector&lt;RefT&gt; addedTracks;
</span><span class="lines">@@ -660,18 +984,42 @@
</span><span class="cx">     }
</span><span class="cx"> 
</span><span class="cx">     int index = 0;
</span><ins>+    for (auto&amp; track : removedTracks)
+        configureTrack(track, index++, TrackState::Remove);
+
+    index = 0;
+    for (auto&amp; track : addedTracks)
+        configureTrack(track, index++, TrackState::Add);
+
+    index = 0;
</ins><span class="cx">     for (const auto&amp; track : trackMap.values())
</span><del>-        configureCallback(track, index++);
</del><ins>+        configureTrack(track, index++, TrackState::Configure);
+}
</ins><span class="cx"> 
</span><del>-    for (auto&amp; track : removedTracks) {
-        (player-&gt;*removedFunction)(*track);
-        track-&gt;streamTrack().removeObserver(*trackObserver);
-    }
</del><ins>+void MediaPlayerPrivateMediaStreamAVFObjC::checkSelectedVideoTrack()
+{
+    if (m_pendingSelectedTrackCheck)
+        return;
</ins><span class="cx"> 
</span><del>-    for (auto&amp; track : addedTracks) {
-        (player-&gt;*addedFunction)(*track);
-        track-&gt;streamTrack().addObserver(*trackObserver);
-    }
</del><ins>+    m_pendingSelectedTrackCheck = true;
+    scheduleDeferredTask([this] {
+        bool hideVideoLayer = true;
+        m_activeVideoTrack = nullptr;
+        if (m_mediaStreamPrivate-&gt;activeVideoTrack()) {
+            for (const auto&amp; track : m_videoTrackMap.values()) {
+                if (&amp;track-&gt;streamTrack() == m_mediaStreamPrivate-&gt;activeVideoTrack()) {
+                    m_activeVideoTrack = m_mediaStreamPrivate-&gt;activeVideoTrack();
+                    if (track-&gt;selected())
+                        hideVideoLayer = false;
+                    break;
+                }
+            }
+        }
+
+        ensureLayer();
+        m_sampleBufferDisplayLayer.get().hidden = hideVideoLayer;
+        m_pendingSelectedTrackCheck = false;
+    });
</ins><span class="cx"> }
</span><span class="cx"> 
</span><span class="cx"> void MediaPlayerPrivateMediaStreamAVFObjC::updateTracks()
</span><span class="lines">@@ -678,23 +1026,58 @@
</span><span class="cx"> {
</span><span class="cx">     MediaStreamTrackPrivateVector currentTracks = m_mediaStreamPrivate-&gt;tracks();
</span><span class="cx"> 
</span><del>-    std::function&lt;void(RefPtr&lt;AudioTrackPrivateMediaStream&gt;, int)&gt; enableAudioTrack = [this](auto track, int index)
</del><ins>+    Function&lt;void(RefPtr&lt;AudioTrackPrivateMediaStream&gt;, int, TrackState)&gt;  setAudioTrackState = [this](auto track, int index, TrackState state)
</ins><span class="cx">     {
</span><del>-        track-&gt;setTrackIndex(index);
-        track-&gt;setEnabled(track-&gt;streamTrack().enabled() &amp;&amp; !track-&gt;streamTrack().muted());
</del><ins>+        switch (state) {
+        case TrackState::Remove:
+            track-&gt;streamTrack().removeObserver(*this);
+            m_player-&gt;removeAudioTrack(*track);
+#if USE(RENDER_SYNCHRONIZER)
+            destroyAudioRenderer(track-&gt;id());
+#endif
+            break;
+        case TrackState::Add:
+            track-&gt;streamTrack().addObserver(*this);
+            m_player-&gt;addAudioTrack(*track);
+#if USE(RENDER_SYNCHRONIZER)
+            createAudioRenderer(track-&gt;id());
+#endif
+            break;
+        case TrackState::Configure:
+            track-&gt;setTrackIndex(index);
+            bool enabled = track-&gt;streamTrack().enabled() &amp;&amp; !track-&gt;streamTrack().muted();
+            track-&gt;setEnabled(enabled);
+#if USE(RENDER_SYNCHRONIZER)
+            auto renderer = m_audioRenderers.get(track-&gt;id());
+            ASSERT(renderer);
+            renderer.get().muted = !enabled;
+#endif
+            break;
+        }
</ins><span class="cx">     };
</span><del>-    updateTracksOfType(m_audioTrackMap, RealtimeMediaSource::Audio, currentTracks, &amp;AudioTrackPrivateMediaStream::create, m_player, &amp;MediaPlayer::removeAudioTrack, &amp;MediaPlayer::addAudioTrack, enableAudioTrack, (MediaStreamTrackPrivate::Observer*) this);
</del><ins>+    updateTracksOfType(m_audioTrackMap, RealtimeMediaSource::Audio, currentTracks, &amp;AudioTrackPrivateMediaStream::create, setAudioTrackState);
</ins><span class="cx"> 
</span><del>-    std::function&lt;void(RefPtr&lt;VideoTrackPrivateMediaStream&gt;, int)&gt; enableVideoTrack = [this](auto track, int index)
</del><ins>+    Function&lt;void(RefPtr&lt;VideoTrackPrivateMediaStream&gt;, int, TrackState)&gt; setVideoTrackState = [&amp;](auto track, int index, TrackState state)
</ins><span class="cx">     {
</span><del>-        track-&gt;setTrackIndex(index);
-        bool selected = &amp;track-&gt;streamTrack() == m_mediaStreamPrivate-&gt;activeVideoTrack();
-        track-&gt;setSelected(selected);
-
-        if (selected)
-            ensureLayer();
</del><ins>+        switch (state) {
+        case TrackState::Remove:
+            track-&gt;streamTrack().removeObserver(*this);
+            m_player-&gt;removeVideoTrack(*track);
+            checkSelectedVideoTrack();
+            break;
+        case TrackState::Add:
+            track-&gt;streamTrack().addObserver(*this);
+            m_player-&gt;addVideoTrack(*track);
+            break;
+        case TrackState::Configure:
+            track-&gt;setTrackIndex(index);
+            bool selected = &amp;track-&gt;streamTrack() == m_mediaStreamPrivate-&gt;activeVideoTrack();
+            track-&gt;setSelected(selected);
+            checkSelectedVideoTrack();
+            break;
+        }
</ins><span class="cx">     };
</span><del>-    updateTracksOfType(m_videoTrackMap, RealtimeMediaSource::Video, currentTracks, &amp;VideoTrackPrivateMediaStream::create, m_player, &amp;MediaPlayer::removeVideoTrack, &amp;MediaPlayer::addVideoTrack, enableVideoTrack, (MediaStreamTrackPrivate::Observer*) this);
</del><ins>+    updateTracksOfType(m_videoTrackMap, RealtimeMediaSource::Video, currentTracks, &amp;VideoTrackPrivateMediaStream::create, setVideoTrackState);
</ins><span class="cx"> }
</span><span class="cx"> 
</span><span class="cx"> std::unique_ptr&lt;PlatformTimeRanges&gt; MediaPlayerPrivateMediaStreamAVFObjC::seekable() const
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformgraphicsavfoundationobjcMediaSampleAVFObjCmm"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaSampleAVFObjC.mm (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaSampleAVFObjC.mm        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaSampleAVFObjC.mm        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -37,6 +37,11 @@
</span><span class="cx">     return toMediaTime(CMSampleBufferGetPresentationTimeStamp(m_sample.get()));
</span><span class="cx"> }
</span><span class="cx"> 
</span><ins>+MediaTime MediaSampleAVFObjC::outputPresentationTime() const
+{
+    return toMediaTime(CMSampleBufferGetOutputPresentationTimeStamp(m_sample.get()));
+}
+
</ins><span class="cx"> MediaTime MediaSampleAVFObjC::decodeTime() const
</span><span class="cx"> {
</span><span class="cx">     return toMediaTime(CMSampleBufferGetDecodeTimeStamp(m_sample.get()));
</span><span class="lines">@@ -47,6 +52,11 @@
</span><span class="cx">     return toMediaTime(CMSampleBufferGetDuration(m_sample.get()));
</span><span class="cx"> }
</span><span class="cx"> 
</span><ins>+MediaTime MediaSampleAVFObjC::outputDuration() const
+{
+    return toMediaTime(CMSampleBufferGetOutputDuration(m_sample.get()));
+}
+
</ins><span class="cx"> size_t MediaSampleAVFObjC::sizeInBytes() const
</span><span class="cx"> {
</span><span class="cx">     return CMSampleBufferGetTotalSampleSize(m_sample.get());
</span><span class="lines">@@ -111,7 +121,7 @@
</span><span class="cx"> 
</span><span class="cx"> void MediaSampleAVFObjC::dump(PrintStream&amp; out) const
</span><span class="cx"> {
</span><del>-    out.print(&quot;{PTS(&quot;, presentationTime(), &quot;), DTS(&quot;, decodeTime(), &quot;), duration(&quot;, duration(), &quot;), flags(&quot;, (int)flags(), &quot;), presentationSize(&quot;, presentationSize().width(), &quot;x&quot;, presentationSize().height(), &quot;)}&quot;);
</del><ins>+    out.print(&quot;{PTS(&quot;, presentationTime(), &quot;), OPTS(&quot;, outputPresentationTime(), &quot;), DTS(&quot;, decodeTime(), &quot;), duration(&quot;, duration(), &quot;), flags(&quot;, (int)flags(), &quot;), presentationSize(&quot;, presentationSize().width(), &quot;x&quot;, presentationSize().height(), &quot;)}&quot;);
</ins><span class="cx"> }
</span><span class="cx"> 
</span><span class="cx"> void MediaSampleAVFObjC::offsetTimestampsBy(const MediaTime&amp; offset)
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformmediastreamAudioTrackPrivateMediaStreamh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/mediastream/AudioTrackPrivateMediaStream.h (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/mediastream/AudioTrackPrivateMediaStream.h        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/mediastream/AudioTrackPrivateMediaStream.h        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -50,11 +50,15 @@
</span><span class="cx"> 
</span><span class="cx">     MediaStreamTrackPrivate&amp; streamTrack() { return m_streamTrack.get(); }
</span><span class="cx"> 
</span><ins>+    MediaTime timelineOffset() const { return m_timelineOffset; }
+    void setTimelineOffset(const MediaTime&amp; offset) { m_timelineOffset = offset; }
+
</ins><span class="cx"> private:
</span><span class="cx">     AudioTrackPrivateMediaStream(MediaStreamTrackPrivate&amp; track)
</span><span class="cx">         : m_streamTrack(track)
</span><span class="cx">         , m_id(track.id())
</span><span class="cx">         , m_label(track.label())
</span><ins>+        , m_timelineOffset(MediaTime::invalidTime())
</ins><span class="cx">     {
</span><span class="cx">     }
</span><span class="cx"> 
</span><span class="lines">@@ -62,6 +66,7 @@
</span><span class="cx">     AtomicString m_id;
</span><span class="cx">     AtomicString m_label;
</span><span class="cx">     int m_index { 0 };
</span><ins>+    MediaTime m_timelineOffset;
</ins><span class="cx"> };
</span><span class="cx"> 
</span><span class="cx"> }
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformmediastreamMediaStreamTrackPrivatecpp"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.cpp        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -100,9 +100,6 @@
</span><span class="cx">     // Always update the enabled state regardless of the track being ended.
</span><span class="cx">     m_isEnabled = enabled;
</span><span class="cx"> 
</span><del>-    if (m_preview)
-        m_preview-&gt;setEnabled(enabled);
-
</del><span class="cx">     for (auto&amp; observer : m_observers)
</span><span class="cx">         observer-&gt;trackEnabledChanged(*this);
</span><span class="cx"> }
</span><span class="lines">@@ -117,7 +114,6 @@
</span><span class="cx">     // trackEnded method once.
</span><span class="cx">     m_isEnded = true;
</span><span class="cx"> 
</span><del>-    m_preview = nullptr;
</del><span class="cx">     m_source-&gt;requestStop(this);
</span><span class="cx"> 
</span><span class="cx">     for (auto&amp; observer : m_observers)
</span><span class="lines">@@ -163,15 +159,6 @@
</span><span class="cx">     }
</span><span class="cx"> }
</span><span class="cx"> 
</span><del>-RealtimeMediaSourcePreview* MediaStreamTrackPrivate::preview()
-{
-    if (m_preview)
-        return m_preview.get();
-
-    m_preview = m_source-&gt;preview();
-    return m_preview.get();
-}
-
</del><span class="cx"> void MediaStreamTrackPrivate::applyConstraints(const MediaConstraints&amp; constraints, RealtimeMediaSource::SuccessHandler successHandler, RealtimeMediaSource::FailureHandler failureHandler)
</span><span class="cx"> {
</span><span class="cx">     m_source-&gt;applyConstraints(constraints, successHandler, failureHandler);
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformmediastreamMediaStreamTrackPrivateh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/mediastream/MediaStreamTrackPrivate.h        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -91,7 +91,6 @@
</span><span class="cx">     AudioSourceProvider* audioSourceProvider();
</span><span class="cx"> 
</span><span class="cx">     void paintCurrentFrameInContext(GraphicsContext&amp;, const FloatRect&amp;);
</span><del>-    RealtimeMediaSourcePreview* preview();
</del><span class="cx"> 
</span><span class="cx"> private:
</span><span class="cx">     MediaStreamTrackPrivate(Ref&lt;RealtimeMediaSource&gt;&amp;&amp;, String&amp;&amp; id);
</span><span class="lines">@@ -105,7 +104,6 @@
</span><span class="cx"> 
</span><span class="cx">     Vector&lt;Observer*&gt; m_observers;
</span><span class="cx">     Ref&lt;RealtimeMediaSource&gt; m_source;
</span><del>-    RefPtr&lt;RealtimeMediaSourcePreview&gt; m_preview;
</del><span class="cx"> 
</span><span class="cx">     String m_id;
</span><span class="cx">     bool m_isEnabled;
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformmediastreamRealtimeMediaSourceh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/mediastream/RealtimeMediaSource.h        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -42,7 +42,6 @@
</span><span class="cx"> #include &quot;MediaSample.h&quot;
</span><span class="cx"> #include &quot;PlatformLayer.h&quot;
</span><span class="cx"> #include &quot;RealtimeMediaSourceCapabilities.h&quot;
</span><del>-#include &quot;RealtimeMediaSourcePreview.h&quot;
</del><span class="cx"> #include &lt;wtf/RefCounted.h&gt;
</span><span class="cx"> #include &lt;wtf/Vector.h&gt;
</span><span class="cx"> #include &lt;wtf/WeakPtr.h&gt;
</span><span class="lines">@@ -129,7 +128,6 @@
</span><span class="cx"> 
</span><span class="cx">     virtual RefPtr&lt;Image&gt; currentFrameImage() { return nullptr; }
</span><span class="cx">     virtual void paintCurrentFrameInContext(GraphicsContext&amp;, const FloatRect&amp;) { }
</span><del>-    virtual RefPtr&lt;RealtimeMediaSourcePreview&gt; preview() { return nullptr; }
</del><span class="cx"> 
</span><span class="cx">     void setWidth(int);
</span><span class="cx">     void setHeight(int);
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformmediastreamRealtimeMediaSourcePreviewh"></a>
<div class="delfile"><h4>Deleted: trunk/Source/WebCore/platform/mediastream/RealtimeMediaSourcePreview.h (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/mediastream/RealtimeMediaSourcePreview.h        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/mediastream/RealtimeMediaSourcePreview.h        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -1,69 +0,0 @@
</span><del>-/*
- * Copyright (C) 2016 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- *
- * 1. Redistributions of source code must retain the above copyright
- *    notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- *    notice, this list of conditions and the following disclaimer
- *    in the documentation and/or other materials provided with the
- *    distribution.
- * 3. Neither the name of Ericsson nor the names of its contributors
- *    may be used to endorse or promote products derived from this
- *    software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
- * &quot;AS IS&quot; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
- * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
- * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
- * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#pragma once
-
-#if ENABLE(MEDIA_STREAM)
-
-#include &quot;PlatformLayer.h&quot;
-#include &lt;wtf/RetainPtr.h&gt;
-#include &lt;wtf/WeakPtr.h&gt;
-
-namespace WebCore {
-
-class RealtimeMediaSourcePreview : public RefCounted&lt;RealtimeMediaSourcePreview&gt; {
-public:
-    virtual ~RealtimeMediaSourcePreview() { }
-
-    virtual void play() const = 0;
-    virtual void pause() const = 0;
-    virtual void setEnabled(bool) = 0;
-
-    virtual PlatformLayer* platformLayer() const = 0;
-    virtual void setVolume(double) const = 0;
-
-    virtual void invalidate() { m_weakPtrFactory.revokeAll(); }
-
-    WeakPtr&lt;RealtimeMediaSourcePreview&gt; createWeakPtr() { return m_weakPtrFactory.createWeakPtr(); }
-
-protected:
-    RealtimeMediaSourcePreview()
-        : m_weakPtrFactory(this)
-    {
-    }
-
-private:
-    WeakPtrFactory&lt;RealtimeMediaSourcePreview&gt; m_weakPtrFactory;
-};
-
-} // namespace WebCore
-
-#endif // ENABLE(MEDIA_STREAM)
-
</del></span></pre></div>
<a id="trunkSourceWebCoreplatformmediastreamVideoTrackPrivateMediaStreamh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/mediastream/VideoTrackPrivateMediaStream.h (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/mediastream/VideoTrackPrivateMediaStream.h        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/mediastream/VideoTrackPrivateMediaStream.h        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -40,28 +40,33 @@
</span><span class="cx">         return adoptRef(*new VideoTrackPrivateMediaStream(streamTrack));
</span><span class="cx">     }
</span><span class="cx"> 
</span><del>-    Kind kind() const override { return Kind::Main; }
-    AtomicString id() const override { return m_id; }
-    AtomicString label() const override { return m_label; }
-    AtomicString language() const override { return emptyAtom; }
-    int trackIndex() const override { return m_index; }
-
</del><span class="cx">     void setTrackIndex(int index) { m_index = index; }
</span><span class="cx"> 
</span><span class="cx">     MediaStreamTrackPrivate&amp; streamTrack() { return m_streamTrack.get(); }
</span><span class="cx"> 
</span><ins>+    MediaTime timelineOffset() const { return m_timelineOffset; }
+    void setTimelineOffset(const MediaTime&amp; offset) { m_timelineOffset = offset; }
+
</ins><span class="cx"> private:
</span><span class="cx">     VideoTrackPrivateMediaStream(MediaStreamTrackPrivate&amp; track)
</span><span class="cx">         : m_streamTrack(track)
</span><span class="cx">         , m_id(track.id())
</span><span class="cx">         , m_label(track.label())
</span><ins>+        , m_timelineOffset(MediaTime::invalidTime())
</ins><span class="cx">     {
</span><span class="cx">     }
</span><span class="cx"> 
</span><ins>+    Kind kind() const final { return Kind::Main; }
+    AtomicString id() const final { return m_id; }
+    AtomicString label() const final { return m_label; }
+    AtomicString language() const final { return emptyAtom; }
+    int trackIndex() const final { return m_index; }
+
</ins><span class="cx">     Ref&lt;MediaStreamTrackPrivate&gt; m_streamTrack;
</span><span class="cx">     AtomicString m_id;
</span><span class="cx">     AtomicString m_label;
</span><span class="cx">     int m_index { 0 };
</span><ins>+    MediaTime m_timelineOffset;
</ins><span class="cx"> };
</span><span class="cx"> 
</span><span class="cx"> }
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformmediastreammacAVAudioCaptureSourceh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.h (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.h        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.h        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -1,5 +1,5 @@
</span><span class="cx"> /*
</span><del>- * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
</del><ins>+ * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
</ins><span class="cx">  *
</span><span class="cx">  * Redistribution and use in source and binary forms, with or without
</span><span class="cx">  * modification, are permitted provided that the following conditions
</span><span class="lines">@@ -67,7 +67,6 @@
</span><span class="cx">     void shutdownCaptureSession() override;
</span><span class="cx">     void updateSettings(RealtimeMediaSourceSettings&amp;) override;
</span><span class="cx">     AudioSourceProvider* audioSourceProvider() override;
</span><del>-    RefPtr&lt;AVMediaSourcePreview&gt; createPreview() final;
</del><span class="cx"> 
</span><span class="cx">     RetainPtr&lt;AVCaptureConnection&gt; m_audioConnection;
</span><span class="cx"> 
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformmediastreammacAVAudioCaptureSourcemm"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.mm (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.mm        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVAudioCaptureSource.mm        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -1,5 +1,5 @@
</span><span class="cx"> /*
</span><del>- * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
</del><ins>+ * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
</ins><span class="cx">  *
</span><span class="cx">  * Redistribution and use in source and binary forms, with or without
</span><span class="cx">  * modification, are permitted provided that the following conditions
</span><span class="lines">@@ -30,7 +30,7 @@
</span><span class="cx"> 
</span><span class="cx"> #import &quot;Logging.h&quot;
</span><span class="cx"> #import &quot;MediaConstraints.h&quot;
</span><del>-#import &quot;NotImplemented.h&quot;
</del><ins>+#import &quot;MediaSampleAVFObjC.h&quot;
</ins><span class="cx"> #import &quot;RealtimeMediaSourceSettings.h&quot;
</span><span class="cx"> #import &quot;SoftLinking.h&quot;
</span><span class="cx"> #import &quot;WebAudioSourceProviderAVFObjC.h&quot;
</span><span class="lines">@@ -49,22 +49,15 @@
</span><span class="cx"> typedef AVCaptureDeviceInput AVCaptureDeviceInputType;
</span><span class="cx"> typedef AVCaptureOutput AVCaptureOutputType;
</span><span class="cx"> 
</span><del>-#if !PLATFORM(IOS)
-typedef AVCaptureAudioPreviewOutput AVCaptureAudioPreviewOutputType;
-#endif
-
</del><span class="cx"> SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
</span><span class="cx"> 
</span><span class="cx"> SOFT_LINK_CLASS(AVFoundation, AVCaptureAudioChannel)
</span><span class="cx"> SOFT_LINK_CLASS(AVFoundation, AVCaptureAudioDataOutput)
</span><del>-SOFT_LINK_CLASS(AVFoundation, AVCaptureAudioPreviewOutput)
</del><span class="cx"> SOFT_LINK_CLASS(AVFoundation, AVCaptureConnection)
</span><span class="cx"> SOFT_LINK_CLASS(AVFoundation, AVCaptureDevice)
</span><span class="cx"> SOFT_LINK_CLASS(AVFoundation, AVCaptureDeviceInput)
</span><span class="cx"> SOFT_LINK_CLASS(AVFoundation, AVCaptureOutput)
</span><span class="cx"> 
</span><del>-#define AVCaptureAudioPreviewOutput getAVCaptureAudioPreviewOutputClass()
-
</del><span class="cx"> #define AVCaptureAudioChannel getAVCaptureAudioChannelClass()
</span><span class="cx"> #define AVCaptureAudioDataOutput getAVCaptureAudioDataOutputClass()
</span><span class="cx"> #define AVCaptureConnection getAVCaptureConnectionClass()
</span><span class="lines">@@ -80,79 +73,6 @@
</span><span class="cx"> 
</span><span class="cx"> namespace WebCore {
</span><span class="cx"> 
</span><del>-#if !PLATFORM(IOS)
-class AVAudioSourcePreview: public AVMediaSourcePreview {
-public:
-    static RefPtr&lt;AVMediaSourcePreview&gt; create(AVCaptureSession *, AVAudioCaptureSource*);
-
-private:
-    AVAudioSourcePreview(AVCaptureSession *, AVAudioCaptureSource*);
-
-    void invalidate() final;
-
-    void play() const final;
-    void pause() const final;
-    void setVolume(double) const final;
-    void setEnabled(bool) final;
-    PlatformLayer* platformLayer() const final { return nullptr; }
-
-    void updateState() const;
-
-    RetainPtr&lt;AVCaptureAudioPreviewOutputType&gt; m_audioPreviewOutput;
-    mutable double m_volume { 1 };
-    mutable bool m_paused { false };
-    mutable bool m_enabled { true };
-};
-
-RefPtr&lt;AVMediaSourcePreview&gt; AVAudioSourcePreview::create(AVCaptureSession *session, AVAudioCaptureSource* parent)
-{
-    return adoptRef(new AVAudioSourcePreview(session, parent));
-}
-
-AVAudioSourcePreview::AVAudioSourcePreview(AVCaptureSession *session, AVAudioCaptureSource* parent)
-    : AVMediaSourcePreview(parent)
-{
-    m_audioPreviewOutput = adoptNS([allocAVCaptureAudioPreviewOutputInstance() init]);
-    setVolume(1);
-    [session addOutput:m_audioPreviewOutput.get()];
-}
-
-void AVAudioSourcePreview::invalidate()
-{
-    m_audioPreviewOutput = nullptr;
-    AVMediaSourcePreview::invalidate();
-}
-
-void AVAudioSourcePreview::play() const
-{
-    m_paused = false;
-    updateState();
-}
-
-void AVAudioSourcePreview::pause() const
-{
-    m_paused = true;
-    updateState();
-}
-
-void AVAudioSourcePreview::setEnabled(bool enabled)
-{
-    m_enabled = enabled;
-    updateState();
-}
-
-void AVAudioSourcePreview::setVolume(double volume) const
-{
-    m_volume = volume;
-    m_audioPreviewOutput.get().volume = volume;
-}
-
-void AVAudioSourcePreview::updateState() const
-{
-    m_audioPreviewOutput.get().volume = (!m_enabled || m_paused) ? 0 : m_volume;
-}
-#endif
-
</del><span class="cx"> RefPtr&lt;AVMediaCaptureSource&gt; AVAudioCaptureSource::create(AVCaptureDeviceTypedef* device, const AtomicString&amp; id, const MediaConstraints* constraints, String&amp; invalidConstraint)
</span><span class="cx"> {
</span><span class="cx">     auto source = adoptRef(new AVAudioCaptureSource(device, id));
</span><span class="lines">@@ -190,7 +110,7 @@
</span><span class="cx"> 
</span><span class="cx"> void AVAudioCaptureSource::updateSettings(RealtimeMediaSourceSettings&amp; settings)
</span><span class="cx"> {
</span><del>-    // FIXME: use [AVCaptureAudioPreviewOutput volume] for volume
</del><ins>+    // FIXME: support volume
</ins><span class="cx"> 
</span><span class="cx">     settings.setDeviceId(id());
</span><span class="cx"> }
</span><span class="lines">@@ -276,6 +196,11 @@
</span><span class="cx">     if (!formatDescription)
</span><span class="cx">         return;
</span><span class="cx"> 
</span><ins>+    RetainPtr&lt;CMSampleBufferRef&gt; buffer = sampleBuffer;
+    scheduleDeferredTask([this, buffer] {
+        mediaDataUpdated(MediaSampleAVFObjC::create(buffer.get()));
+    });
+
</ins><span class="cx">     std::unique_lock&lt;Lock&gt; lock(m_lock, std::try_to_lock);
</span><span class="cx">     if (!lock.owns_lock()) {
</span><span class="cx">         // Failed to acquire the lock, just return instead of blocking.
</span><span class="lines">@@ -304,15 +229,6 @@
</span><span class="cx">     return m_audioSourceProvider.get();
</span><span class="cx"> }
</span><span class="cx"> 
</span><del>-RefPtr&lt;AVMediaSourcePreview&gt; AVAudioCaptureSource::createPreview()
-{
-#if !PLATFORM(IOS)
-    return AVAudioSourcePreview::create(session(), this);
-#else
-    return nullptr;
-#endif
-}
-    
</del><span class="cx"> } // namespace WebCore
</span><span class="cx"> 
</span><span class="cx"> #endif // ENABLE(MEDIA_STREAM)
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformmediastreammacAVMediaCaptureSourceh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.h (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.h        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.h        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -1,5 +1,5 @@
</span><span class="cx"> /*
</span><del>- * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
</del><ins>+ * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
</ins><span class="cx">  *
</span><span class="cx">  * Redistribution and use in source and binary forms, with or without
</span><span class="cx">  * modification, are permitted provided that the following conditions
</span><span class="lines">@@ -47,19 +47,6 @@
</span><span class="cx"> 
</span><span class="cx"> class AVMediaCaptureSource;
</span><span class="cx"> 
</span><del>-class AVMediaSourcePreview: public RealtimeMediaSourcePreview {
-public:
-    virtual ~AVMediaSourcePreview();
-
-    void invalidate() override;
-
-protected:
-    AVMediaSourcePreview(AVMediaCaptureSource*);
-
-private:
-    WeakPtr&lt;AVMediaCaptureSource&gt; m_parent;
-};
-
</del><span class="cx"> class AVMediaCaptureSource : public RealtimeMediaSource {
</span><span class="cx"> public:
</span><span class="cx">     virtual ~AVMediaCaptureSource();
</span><span class="lines">@@ -76,10 +63,6 @@
</span><span class="cx">     void stopProducingData() final;
</span><span class="cx">     bool isProducingData() const final { return m_isRunning; }
</span><span class="cx"> 
</span><del>-    RefPtr&lt;RealtimeMediaSourcePreview&gt; preview() final;
-    void removePreview(AVMediaSourcePreview*);
-    WeakPtr&lt;AVMediaCaptureSource&gt; createWeakPtr() { return m_weakPtrFactory.createWeakPtr(); }
-
</del><span class="cx"> protected:
</span><span class="cx">     AVMediaCaptureSource(AVCaptureDevice*, const AtomicString&amp;, RealtimeMediaSource::Type);
</span><span class="cx"> 
</span><span class="lines">@@ -99,8 +82,6 @@
</span><span class="cx">     void setVideoSampleBufferDelegate(AVCaptureVideoDataOutput*);
</span><span class="cx">     void setAudioSampleBufferDelegate(AVCaptureAudioDataOutput*);
</span><span class="cx"> 
</span><del>-    virtual RefPtr&lt;AVMediaSourcePreview&gt; createPreview() = 0;
-
</del><span class="cx"> private:
</span><span class="cx">     void setupSession();
</span><span class="cx">     void reset() final;
</span><span class="lines">@@ -117,8 +98,6 @@
</span><span class="cx">     RefPtr&lt;RealtimeMediaSourceCapabilities&gt; m_capabilities;
</span><span class="cx">     RetainPtr&lt;AVCaptureSession&gt; m_session;
</span><span class="cx">     RetainPtr&lt;AVCaptureDevice&gt; m_device;
</span><del>-    Vector&lt;WeakPtr&lt;RealtimeMediaSourcePreview&gt;&gt; m_previews;
-    WeakPtrFactory&lt;AVMediaCaptureSource&gt; m_weakPtrFactory;
</del><span class="cx">     bool m_isRunning { false};
</span><span class="cx"> };
</span><span class="cx"> 
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformmediastreammacAVMediaCaptureSourcemm"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.mm (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.mm        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVMediaCaptureSource.mm        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -129,7 +129,6 @@
</span><span class="cx">     : RealtimeMediaSource(id, type, emptyString())
</span><span class="cx">     , m_objcObserver(adoptNS([[WebCoreAVMediaCaptureSourceObserver alloc] initWithCallback:this]))
</span><span class="cx">     , m_device(device)
</span><del>-    , m_weakPtrFactory(this)
</del><span class="cx"> {
</span><span class="cx">     setName(device.localizedName);
</span><span class="cx">     setPersistentID(device.uniqueID);
</span><span class="lines">@@ -240,12 +239,6 @@
</span><span class="cx">     for (NSString *keyName in sessionKVOProperties())
</span><span class="cx">         [m_session removeObserver:m_objcObserver.get() forKeyPath:keyName];
</span><span class="cx"> 
</span><del>-    for (const auto&amp; preview : m_previews) {
-        if (preview)
-            preview-&gt;invalidate();
-    }
-    m_previews.clear();
-
</del><span class="cx">     shutdownCaptureSession();
</span><span class="cx">     m_session = nullptr;
</span><span class="cx"> }
</span><span class="lines">@@ -277,45 +270,6 @@
</span><span class="cx">     return nullptr;
</span><span class="cx"> }
</span><span class="cx"> 
</span><del>-RefPtr&lt;RealtimeMediaSourcePreview&gt; AVMediaCaptureSource::preview()
-{
-    RefPtr&lt;AVMediaSourcePreview&gt; preview = createPreview();
-    if (!preview)
-        return nullptr;
-
-    m_previews.append(preview-&gt;createWeakPtr());
-    return preview.leakRef();
-}
-
-void AVMediaCaptureSource::removePreview(AVMediaSourcePreview* preview)
-{
-    size_t index;
-    for (index = 0; index &lt; m_previews.size(); ++index) {
-        if (m_previews[index].get() == preview)
-            break;
-    }
-
-    if (index &lt; m_previews.size())
-        m_previews.remove(index);
-}
-
-AVMediaSourcePreview::AVMediaSourcePreview(AVMediaCaptureSource* parent)
-    : m_parent(parent-&gt;createWeakPtr())
-{
-}
-
-AVMediaSourcePreview::~AVMediaSourcePreview()
-{
-    if (m_parent)
-        m_parent-&gt;removePreview(this);
-}
-
-void AVMediaSourcePreview::invalidate()
-{
-    m_parent = nullptr;
-    RealtimeMediaSourcePreview::invalidate();
-}
-
</del><span class="cx"> NSArray* sessionKVOProperties()
</span><span class="cx"> {
</span><span class="cx">     static NSArray* keys = [@[@&quot;running&quot;] retain];
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformmediastreammacAVVideoCaptureSourceh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.h (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.h        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.h        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -79,7 +79,6 @@
</span><span class="cx"> 
</span><span class="cx">     void paintCurrentFrameInContext(GraphicsContext&amp;, const FloatRect&amp;) final;
</span><span class="cx"> 
</span><del>-    RefPtr&lt;AVMediaSourcePreview&gt; createPreview() final;
</del><span class="cx">     RetainPtr&lt;CGImageRef&gt; currentFrameCGImage();
</span><span class="cx">     RefPtr&lt;Image&gt; currentFrameImage() final;
</span><span class="cx"> 
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformmediastreammacAVVideoCaptureSourcemm"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/mediastream/mac/AVVideoCaptureSource.mm        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -1,5 +1,5 @@
</span><span class="cx"> /*
</span><del>- * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
</del><ins>+ * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
</ins><span class="cx">  *
</span><span class="cx">  * Redistribution and use in source and binary forms, with or without
</span><span class="cx">  * modification, are permitted provided that the following conditions
</span><span class="lines">@@ -38,7 +38,6 @@
</span><span class="cx"> #import &quot;NotImplemented.h&quot;
</span><span class="cx"> #import &quot;PlatformLayer.h&quot;
</span><span class="cx"> #import &quot;RealtimeMediaSourceCenter.h&quot;
</span><del>-#import &quot;RealtimeMediaSourcePreview.h&quot;
</del><span class="cx"> #import &quot;RealtimeMediaSourceSettings.h&quot;
</span><span class="cx"> #import &quot;WebActionDisablingCALayerDelegate.h&quot;
</span><span class="cx"> #import &lt;AVFoundation/AVCaptureDevice.h&gt;
</span><span class="lines">@@ -101,110 +100,8 @@
</span><span class="cx"> 
</span><span class="cx"> using namespace WebCore;
</span><span class="cx"> 
</span><del>-@interface WebCoreAVVideoCaptureSourceObserver : NSObject&lt;CALayerDelegate&gt; {
-    AVVideoSourcePreview *_parent;
-    BOOL _hasObserver;
-}
-
-- (void)setParent:(AVVideoSourcePreview *)parent;
-- (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context;
-@end
-
</del><span class="cx"> namespace WebCore {
</span><span class="cx"> 
</span><del>-class AVVideoSourcePreview: public AVMediaSourcePreview {
-public:
-    static RefPtr&lt;AVMediaSourcePreview&gt; create(AVCaptureSession*, AVCaptureDeviceTypedef*, AVVideoCaptureSource*);
-
-    void backgroundLayerBoundsChanged();
-    PlatformLayer* platformLayer() const final { return m_previewBackgroundLayer.get(); }
-
-private:
-    AVVideoSourcePreview(AVCaptureSession*, AVCaptureDeviceTypedef*, AVVideoCaptureSource*);
-
-    void invalidate() final;
-
-    void play() const final;
-    void pause() const final;
-    void setVolume(double) const final { };
-    void setEnabled(bool) final;
-    void setPaused(bool) const;
-
-    RetainPtr&lt;AVCaptureVideoPreviewLayerType&gt; m_previewLayer;
-    RetainPtr&lt;PlatformLayer&gt; m_previewBackgroundLayer;
-    RetainPtr&lt;AVCaptureDeviceTypedef&gt; m_device;
-    RetainPtr&lt;WebCoreAVVideoCaptureSourceObserver&gt; m_objcObserver;
-};
-
-RefPtr&lt;AVMediaSourcePreview&gt; AVVideoSourcePreview::create(AVCaptureSession *session, AVCaptureDeviceTypedef* device, AVVideoCaptureSource* parent)
-{
-    return adoptRef(new AVVideoSourcePreview(session, device, parent));
-}
-
-AVVideoSourcePreview::AVVideoSourcePreview(AVCaptureSession *session, AVCaptureDeviceTypedef* device, AVVideoCaptureSource* parent)
-    : AVMediaSourcePreview(parent)
-    , m_objcObserver(adoptNS([[WebCoreAVVideoCaptureSourceObserver alloc] init]))
-{
-    m_device = device;
-    m_previewLayer = adoptNS([allocAVCaptureVideoPreviewLayerInstance() initWithSession:session]);
-    m_previewLayer.get().contentsGravity = kCAGravityResize;
-    m_previewLayer.get().anchorPoint = CGPointZero;
-    [m_previewLayer.get() setDelegate:[WebActionDisablingCALayerDelegate shared]];
-
-    m_previewBackgroundLayer = adoptNS([[CALayer alloc] init]);
-    m_previewBackgroundLayer.get().contentsGravity = kCAGravityResizeAspect;
-    m_previewBackgroundLayer.get().anchorPoint = CGPointZero;
-    m_previewBackgroundLayer.get().needsDisplayOnBoundsChange = YES;
-    [m_previewBackgroundLayer.get() setDelegate:[WebActionDisablingCALayerDelegate shared]];
-
-#ifndef NDEBUG
-    m_previewLayer.get().name = @&quot;AVVideoCaptureSource preview layer&quot;;
-    m_previewBackgroundLayer.get().name = @&quot;AVVideoSourcePreview parent layer&quot;;
-#endif
-
-    [m_previewBackgroundLayer addSublayer:m_previewLayer.get()];
-
-    [m_objcObserver.get() setParent:this];
-}
-
-void AVVideoSourcePreview::backgroundLayerBoundsChanged()
-{
-    if (m_previewBackgroundLayer &amp;&amp; m_previewLayer)
-        [m_previewLayer.get() setBounds:m_previewBackgroundLayer.get().bounds];
-}
-
-void AVVideoSourcePreview::invalidate()
-{
-    [m_objcObserver.get() setParent:nil];
-    m_objcObserver = nullptr;
-    m_previewLayer = nullptr;
-    m_previewBackgroundLayer = nullptr;
-    m_device = nullptr;
-    AVMediaSourcePreview::invalidate();
-}
-
-void AVVideoSourcePreview::play() const
-{
-    setPaused(false);
-}
-
-void AVVideoSourcePreview::pause() const
-{
-    setPaused(true);
-}
-
-void AVVideoSourcePreview::setPaused(bool paused) const
-{
-    [m_device lockForConfiguration:nil];
-    m_previewLayer.get().connection.enabled = !paused;
-    [m_device unlockForConfiguration];
-}
-
-void AVVideoSourcePreview::setEnabled(bool enabled)
-{
-    m_previewLayer.get().hidden = !enabled;
-}
-
</del><span class="cx"> const OSType videoCaptureFormat = kCVPixelFormatType_32BGRA;
</span><span class="cx"> 
</span><span class="cx"> RefPtr&lt;AVMediaCaptureSource&gt; AVVideoCaptureSource::create(AVCaptureDeviceTypedef* device, const AtomicString&amp; id, const MediaConstraints* constraints, String&amp; invalidConstraint)
</span><span class="lines">@@ -512,20 +409,7 @@
</span><span class="cx">         return;
</span><span class="cx"> 
</span><span class="cx">     updateFramerate(sampleBuffer.get());
</span><del>-
-    CMSampleBufferRef newSampleBuffer = 0;
-    CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer.get(), &amp;newSampleBuffer);
-    ASSERT(newSampleBuffer);
-
-    CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
-    if (attachmentsArray) {
-        for (CFIndex i = 0; i &lt; CFArrayGetCount(attachmentsArray); ++i) {
-            CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
-            CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
-        }
-    }
-
-    m_buffer = adoptCF(newSampleBuffer);
</del><ins>+    m_buffer = sampleBuffer;
</ins><span class="cx">     m_lastImage = nullptr;
</span><span class="cx"> 
</span><span class="cx">     bool settingsChanged = false;
</span><span class="lines">@@ -605,11 +489,6 @@
</span><span class="cx">     CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), m_lastImage.get());
</span><span class="cx"> }
</span><span class="cx"> 
</span><del>-RefPtr&lt;AVMediaSourcePreview&gt; AVVideoCaptureSource::createPreview()
-{
-    return AVVideoSourcePreview::create(session(), device(), this);
-}
-
</del><span class="cx"> NSString* AVVideoCaptureSource::bestSessionPresetForVideoDimensions(std::optional&lt;int&gt; width, std::optional&lt;int&gt; height) const
</span><span class="cx"> {
</span><span class="cx">     if (!width &amp;&amp; !height)
</span><span class="lines">@@ -656,46 +535,4 @@
</span><span class="cx"> 
</span><span class="cx"> } // namespace WebCore
</span><span class="cx"> 
</span><del>-@implementation WebCoreAVVideoCaptureSourceObserver
-
-static NSString * const KeyValueBoundsChangeKey = @&quot;bounds&quot;;
-
-- (void)setParent:(AVVideoSourcePreview *)parent
-{
-    if (_parent &amp;&amp; _hasObserver &amp;&amp; _parent-&gt;platformLayer()) {
-        _hasObserver = false;
-        [_parent-&gt;platformLayer() removeObserver:self forKeyPath:KeyValueBoundsChangeKey];
-    }
-
-    _parent = parent;
-
-    if (_parent &amp;&amp; _parent-&gt;platformLayer()) {
-        _hasObserver = true;
-        [_parent-&gt;platformLayer() addObserver:self forKeyPath:KeyValueBoundsChangeKey options:0 context:nullptr];
-    }
-}
-
-- (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
-{
-    UNUSED_PARAM(context);
-
-    if (!_parent)
-        return;
-
-    if ([[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue])
-        return;
-
-#if PLATFORM(IOS)
-    WebThreadRun(^ {
-        if ([keyPath isEqual:KeyValueBoundsChangeKey] &amp;&amp; object == _parent-&gt;platformLayer())
-            _parent-&gt;backgroundLayerBoundsChanged();
-    });
-#else
-    if ([keyPath isEqual:KeyValueBoundsChangeKey] &amp;&amp; object == _parent-&gt;platformLayer())
-        _parent-&gt;backgroundLayerBoundsChanged();
-#endif
-}
-
-@end
-
</del><span class="cx"> #endif // ENABLE(MEDIA_STREAM)
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformmediastreammacMockRealtimeVideoSourceMacmm"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebCore/platform/mediastream/mac/MockRealtimeVideoSourceMac.mm        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -48,6 +48,8 @@
</span><span class="cx"> 
</span><span class="cx"> namespace WebCore {
</span><span class="cx"> 
</span><ins>+static const int videoSampleRate = 90000;
+
</ins><span class="cx"> RefPtr&lt;MockRealtimeVideoSource&gt; MockRealtimeVideoSource::create(const String&amp; name, const MediaConstraints* constraints)
</span><span class="cx"> {
</span><span class="cx">     auto source = adoptRef(new MockRealtimeVideoSourceMac(name));
</span><span class="lines">@@ -74,12 +76,9 @@
</span><span class="cx">     if (!pixelBuffer)
</span><span class="cx">         return nullptr;
</span><span class="cx"> 
</span><del>-    CMSampleTimingInfo timingInfo;
</del><ins>+    CMTime sampleTime = CMTimeMake((elapsedTime() + .1) * videoSampleRate, videoSampleRate);
+    CMSampleTimingInfo timingInfo = { kCMTimeInvalid, sampleTime, sampleTime };
</ins><span class="cx"> 
</span><del>-    timingInfo.presentationTimeStamp = CMTimeMake(elapsedTime() * 1000, 1000);
-    timingInfo.decodeTimeStamp = kCMTimeInvalid;
-    timingInfo.duration = kCMTimeInvalid;
-
</del><span class="cx">     CMVideoFormatDescriptionRef formatDescription = nullptr;
</span><span class="cx">     OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, (CVImageBufferRef)pixelBuffer, &amp;formatDescription);
</span><span class="cx">     if (status != noErr) {
</span><span class="lines">@@ -100,6 +99,8 @@
</span><span class="cx"> 
</span><span class="cx"> RetainPtr&lt;CVPixelBufferRef&gt; MockRealtimeVideoSourceMac::pixelBufferFromCGImage(CGImageRef image) const
</span><span class="cx"> {
</span><ins>+    static CGColorSpaceRef deviceRGBColorSpace = CGColorSpaceCreateDeviceRGB();
+
</ins><span class="cx">     CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));
</span><span class="cx">     CFDictionaryRef options = (__bridge CFDictionaryRef) @{
</span><span class="cx">         (__bridge NSString *)kCVPixelBufferCGImageCompatibilityKey: @(NO),
</span><span class="lines">@@ -112,8 +113,7 @@
</span><span class="cx"> 
</span><span class="cx">     CVPixelBufferLockBaseAddress(pixelBuffer, 0);
</span><span class="cx">     void* data = CVPixelBufferGetBaseAddress(pixelBuffer);
</span><del>-    auto rgbColorSpace = adoptCF(CGColorSpaceCreateDeviceRGB());
-    auto context = adoptCF(CGBitmapContextCreate(data, frameSize.width, frameSize.height, 8, CVPixelBufferGetBytesPerRow(pixelBuffer), rgbColorSpace.get(), (CGBitmapInfo) kCGImageAlphaNoneSkipFirst));
</del><ins>+    auto context = adoptCF(CGBitmapContextCreate(data, frameSize.width, frameSize.height, 8, CVPixelBufferGetBytesPerRow(pixelBuffer), deviceRGBColorSpace, (CGBitmapInfo) kCGImageAlphaNoneSkipFirst));
</ins><span class="cx">     CGContextDrawImage(context.get(), CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
</span><span class="cx">     CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
</span><span class="cx"> 
</span></span></pre></div>
<a id="trunkSourceWebKit2WebProcesscomappleWebProcesssbin"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebKit2/WebProcess/com.apple.WebProcess.sb.in (210620 => 210621)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebKit2/WebProcess/com.apple.WebProcess.sb.in        2017-01-12 05:15:48 UTC (rev 210620)
+++ trunk/Source/WebKit2/WebProcess/com.apple.WebProcess.sb.in        2017-01-12 05:22:32 UTC (rev 210621)
</span><span class="lines">@@ -448,3 +448,8 @@
</span><span class="cx">         (iokit-user-client-class &quot;IOUSBDeviceUserClientV2&quot;)
</span><span class="cx">         (iokit-user-client-class &quot;IOUSBInterfaceUserClientV2&quot;))
</span><span class="cx">     (allow device-camera))
</span><ins>+
+;; @@@@@
+(allow device-microphone)
+;; @@@@@
+
</ins></span></pre>
</div>
</div>

</body>
</html>