<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head><meta http-equiv="content-type" content="text/html; charset=utf-8" />
<title>[160810] trunk/Source/WebCore</title>
</head>
<body>
<style type="text/css"><!--
#msg dl.meta { border: 1px #006 solid; background: #369; padding: 6px; color: #fff; }
#msg dl.meta dt { float: left; width: 6em; font-weight: bold; }
#msg dt:after { content:':';}
#msg dl, #msg dt, #msg ul, #msg li, #header, #footer, #logmsg { font-family: verdana,arial,helvetica,sans-serif; font-size: 10pt; }
#msg dl a { font-weight: bold}
#msg dl a:link { color:#fc3; }
#msg dl a:active { color:#ff0; }
#msg dl a:visited { color:#cc6; }
h3 { font-family: verdana,arial,helvetica,sans-serif; font-size: 10pt; font-weight: bold; }
#msg pre { overflow: auto; background: #ffc; border: 1px #fa0 solid; padding: 6px; }
#logmsg { background: #ffc; border: 1px #fa0 solid; padding: 1em 1em 0 1em; }
#logmsg p, #logmsg pre, #logmsg blockquote { margin: 0 0 1em 0; }
#logmsg p, #logmsg li, #logmsg dt, #logmsg dd { line-height: 14pt; }
#logmsg h1, #logmsg h2, #logmsg h3, #logmsg h4, #logmsg h5, #logmsg h6 { margin: .5em 0; }
#logmsg h1:first-child, #logmsg h2:first-child, #logmsg h3:first-child, #logmsg h4:first-child, #logmsg h5:first-child, #logmsg h6:first-child { margin-top: 0; }
#logmsg ul, #logmsg ol { padding: 0; list-style-position: inside; margin: 0 0 0 1em; }
#logmsg ul { text-indent: -1em; padding-left: 1em; }#logmsg ol { text-indent: -1.5em; padding-left: 1.5em; }
#logmsg > ul, #logmsg > ol { margin: 0 0 1em 0; }
#logmsg pre { background: #eee; padding: 1em; }
#logmsg blockquote { border: 1px solid #fa0; border-left-width: 10px; padding: 1em 1em 0 1em; background: white;}
#logmsg dl { margin: 0; }
#logmsg dt { font-weight: bold; }
#logmsg dd { margin: 0; padding: 0 0 0.5em 0; }
#logmsg dd:before { content:'\00bb';}
#logmsg table { border-spacing: 0px; border-collapse: collapse; border-top: 4px solid #fa0; border-bottom: 1px solid #fa0; background: #fff; }
#logmsg table th { text-align: left; font-weight: normal; padding: 0.2em 0.5em; border-top: 1px dotted #fa0; }
#logmsg table td { text-align: right; border-top: 1px dotted #fa0; padding: 0.2em 0.5em; }
#logmsg table thead th { text-align: center; border-bottom: 1px solid #fa0; }
#logmsg table th.Corner { text-align: left; }
#logmsg hr { border: none 0; border-top: 2px dashed #fa0; height: 1px; }
#header, #footer { color: #fff; background: #636; border: 1px #300 solid; padding: 6px; }
#patch { width: 100%; }
#patch h4 {font-family: verdana,arial,helvetica,sans-serif;font-size:10pt;padding:8px;background:#369;color:#fff;margin:0;}
#patch .propset h4, #patch .binary h4 {margin:0;}
#patch pre {padding:0;line-height:1.2em;margin:0;}
#patch .diff {width:100%;background:#eee;padding: 0 0 10px 0;overflow:auto;}
#patch .propset .diff, #patch .binary .diff {padding:10px 0;}
#patch span {display:block;padding:0 10px;}
#patch .modfile, #patch .addfile, #patch .delfile, #patch .propset, #patch .binary, #patch .copfile {border:1px solid #ccc;margin:10px 0;}
#patch ins {background:#dfd;text-decoration:none;display:block;padding:0 10px;}
#patch del {background:#fdd;text-decoration:none;display:block;padding:0 10px;}
#patch .lines, .info {color:#888;background:#fff;}
--></style>
<div id="msg">
<dl class="meta">
<dt>Revision</dt> <dd><a href="http://trac.webkit.org/projects/webkit/changeset/160810">160810</a></dd>
<dt>Author</dt> <dd>jer.noble@apple.com</dd>
<dt>Date</dt> <dd>2013-12-18 17:14:26 -0800 (Wed, 18 Dec 2013)</dd>
</dl>
<h3>Log Message</h3>
<pre>[MSE][Mac] Add AVSampleBufferAudioRenderer support.
https://bugs.webkit.org/show_bug.cgi?id=125905
Reviewed by Eric Carlson.
On platforms which support AVSampleBufferAudioRenderer, add support
for playback of audio CMSampleBufferRefs generated by AVStreamDataParser.
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h:
* platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm:
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::isAvailable): Require AVSampleBufferAudioRenderer.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::setVolume): Pass through to every audio renderer.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::setMuted): Ditto.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer): Slave the renderer's
timebase to the master clock.
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer):
* platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h:
* platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm:
(WebCore::SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC): Drive by fix; initialize
m_enabledVideoTrackID.
(WebCore::SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC): Call destroyRenderers().
(WebCore::callProcessCodedFrameForEachSample): Drive by fix; convert the bool return to an OSErr.
(WebCore::SourceBufferPrivateAVFObjC::destroyRenderers): Added; flush and destroy the audio
renderers.
(WebCore::SourceBufferPrivateAVFObjC::removedFromMediaSource): Call destroyRenderers().
(WebCore::SourceBufferPrivateAVFObjC::trackDidChangeEnabled): Enable or disable the audio
renderer in response.
(WebCore::SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples): Added audio
specific version.
(WebCore::SourceBufferPrivateAVFObjC::enqueueSample): Ditto.
(WebCore::SourceBufferPrivateAVFObjC::isReadyForMoreSamples): Ditto.
(WebCore::SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples): Ditto.
(WebCore::SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples): Ditto.</pre>
<h3>Modified Paths</h3>
<ul>
<li><a href="#trunkSourceWebCoreChangeLog">trunk/Source/WebCore/ChangeLog</a></li>
<li><a href="#trunkSourceWebCoreplatformgraphicsavfoundationobjcMediaPlayerPrivateMediaSourceAVFObjCh">trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h</a></li>
<li><a href="#trunkSourceWebCoreplatformgraphicsavfoundationobjcMediaPlayerPrivateMediaSourceAVFObjCmm">trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm</a></li>
<li><a href="#trunkSourceWebCoreplatformgraphicsavfoundationobjcSourceBufferPrivateAVFObjCh">trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h</a></li>
<li><a href="#trunkSourceWebCoreplatformgraphicsavfoundationobjcSourceBufferPrivateAVFObjCmm">trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm</a></li>
</ul>
</div>
<div id="patch">
<h3>Diff</h3>
<a id="trunkSourceWebCoreChangeLog"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/ChangeLog (160809 => 160810)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/ChangeLog        2013-12-19 00:52:47 UTC (rev 160809)
+++ trunk/Source/WebCore/ChangeLog        2013-12-19 01:14:26 UTC (rev 160810)
</span><span class="lines">@@ -1,3 +1,39 @@
</span><ins>+2013-12-17 Jer Noble <jer.noble@apple.com>
+
+ [MSE][Mac] Add AVSampleBufferAudioRenderer support.
+ https://bugs.webkit.org/show_bug.cgi?id=125905
+
+ Reviewed by Eric Carlson.
+
+ On platforms which support AVSampleBufferAudioRenderer, add support
+ for playback of audio CMSampleBufferRefs generated by AVStreamDataParser.
+
+ * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h:
+ * platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm:
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::isAvailable): Require AVSampleBufferAudioRenderer.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::setVolume): Pass through to every audio renderer.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::setMuted): Ditto.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer): Slave the renderer's
+ timebase to the master clock.
+ (WebCore::MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer):
+ * platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h:
+ * platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm:
+ (WebCore::SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC): Drive by fix; initialize
+ m_enabledVideoTrackID.
+ (WebCore::SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC): Call destroyRenderers().
+ (WebCore::callProcessCodedFrameForEachSample): Drive by fix; convert the bool return to an OSErr.
+ (WebCore::SourceBufferPrivateAVFObjC::destroyRenderers): Added; flush and destroy the audio
+ renderers.
+ (WebCore::SourceBufferPrivateAVFObjC::removedFromMediaSource): Call destroyRenderers().
+ (WebCore::SourceBufferPrivateAVFObjC::trackDidChangeEnabled): Enable or disable the audio
+ renderer in response.
+ (WebCore::SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples): Added audio
+ specific version.
+ (WebCore::SourceBufferPrivateAVFObjC::enqueueSample): Ditto.
+ (WebCore::SourceBufferPrivateAVFObjC::isReadyForMoreSamples): Ditto.
+ (WebCore::SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples): Ditto.
+ (WebCore::SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples): Ditto.
+
</ins><span class="cx"> 2013-12-18 Seokju Kwon <seokju@webkit.org>
</span><span class="cx">
</span><span class="cx"> Web Inspector: Remove leftover code from InspectorController after r108965
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformgraphicsavfoundationobjcMediaPlayerPrivateMediaSourceAVFObjCh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h (160809 => 160810)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h        2013-12-19 00:52:47 UTC (rev 160809)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h        2013-12-19 01:14:26 UTC (rev 160810)
</span><span class="lines">@@ -31,8 +31,10 @@
</span><span class="cx"> #include "MediaPlayerPrivate.h"
</span><span class="cx"> #include "SourceBufferPrivateClient.h"
</span><span class="cx"> #include <wtf/MediaTime.h>
</span><ins>+#include <wtf/Vector.h>
</ins><span class="cx">
</span><span class="cx"> OBJC_CLASS AVAsset;
</span><ins>+OBJC_CLASS AVSampleBufferAudioRenderer;
</ins><span class="cx"> OBJC_CLASS AVSampleBufferDisplayLayer;
</span><span class="cx">
</span><span class="cx"> typedef struct OpaqueCMTimebase* CMTimebaseRef;
</span><span class="lines">@@ -52,6 +54,9 @@
</span><span class="cx"> void addDisplayLayer(AVSampleBufferDisplayLayer*);
</span><span class="cx"> void removeDisplayLayer(AVSampleBufferDisplayLayer*);
</span><span class="cx">
</span><ins>+ void addAudioRenderer(AVSampleBufferAudioRenderer*);
+ void removeAudioRenderer(AVSampleBufferAudioRenderer*);
+
</ins><span class="cx"> virtual MediaPlayer::NetworkState networkState() const OVERRIDE;
</span><span class="cx"> virtual MediaPlayer::ReadyState readyState() const OVERRIDE;
</span><span class="cx"> void setReadyState(MediaPlayer::ReadyState);
</span><span class="lines">@@ -82,6 +87,10 @@
</span><span class="cx">
</span><span class="cx"> virtual bool paused() const OVERRIDE;
</span><span class="cx">
</span><ins>+ virtual void setVolume(float volume) OVERRIDE;
+ virtual bool supportsMuting() const OVERRIDE { return true; }
+ virtual void setMuted(bool) OVERRIDE;
+
</ins><span class="cx"> virtual bool supportsScanning() const OVERRIDE;
</span><span class="cx">
</span><span class="cx"> virtual IntSize naturalSize() const OVERRIDE;
</span><span class="lines">@@ -151,6 +160,7 @@
</span><span class="cx"> RefPtr<MediaSourcePrivateAVFObjC> m_mediaSourcePrivate;
</span><span class="cx"> RetainPtr<AVAsset> m_asset;
</span><span class="cx"> RetainPtr<AVSampleBufferDisplayLayer> m_sampleBufferDisplayLayer;
</span><ins>+ Vector<RetainPtr<AVSampleBufferAudioRenderer>> m_sampleBufferAudioRenderers;
</ins><span class="cx"> std::unique_ptr<PlatformClockCM> m_clock;
</span><span class="cx"> MediaPlayer::NetworkState m_networkState;
</span><span class="cx"> MediaPlayer::ReadyState m_readyState;
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformgraphicsavfoundationobjcMediaPlayerPrivateMediaSourceAVFObjCmm"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm (160809 => 160810)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm        2013-12-19 00:52:47 UTC (rev 160809)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm        2013-12-19 01:14:26 UTC (rev 160810)
</span><span class="lines">@@ -48,10 +48,13 @@
</span><span class="cx">
</span><span class="cx"> SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVAsset)
</span><span class="cx"> SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset)
</span><ins>+SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferAudioRenderer)
</ins><span class="cx"> SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
</span><span class="cx"> SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamDataParser)
</span><span class="cx"> SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVVideoPerformanceMetrics)
</span><span class="cx">
</span><ins>+SOFT_LINK(CoreMedia, FigReadOnlyTimebaseSetTargetTimebase, OSStatus, (CMTimebaseRef timebase, CMTimebaseRef newTargetTimebase), (timebase, newTargetTimebase))
+
</ins><span class="cx"> #pragma mark -
</span><span class="cx"> #pragma mark AVVideoPerformanceMetrics
</span><span class="cx">
</span><span class="lines">@@ -66,6 +69,18 @@
</span><span class="cx"> - (AVVideoPerformanceMetrics *)videoPerformanceMetrics;
</span><span class="cx"> @end
</span><span class="cx">
</span><ins>+
+#pragma mark -
+#pragma mark AVSampleBufferAudioRenderer
+
+#if __MAC_OS_X_VERSION_MIN_REQUIRED <= 1090
+@interface AVSampleBufferAudioRenderer : NSObject
+- (CMTimebaseRef)timebase;
+- (void)setVolume:(float)volume;
+- (void)setMuted:(BOOL)muted;
+@end
+#endif
+
</ins><span class="cx"> namespace WebCore {
</span><span class="cx">
</span><span class="cx"> #pragma mark -
</span><span class="lines">@@ -101,7 +116,7 @@
</span><span class="cx">
</span><span class="cx"> bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable()
</span><span class="cx"> {
</span><del>- return AVFoundationLibrary() && CoreMediaLibrary() && getAVStreamDataParserClass();
</del><ins>+ return AVFoundationLibrary() && CoreMediaLibrary() && getAVStreamDataParserClass() && getAVSampleBufferAudioRendererClass();
</ins><span class="cx"> }
</span><span class="cx">
</span><span class="cx"> static HashSet<String> mimeTypeCache()
</span><span class="lines">@@ -211,11 +226,23 @@
</span><span class="cx"> return !m_clock->isRunning();
</span><span class="cx"> }
</span><span class="cx">
</span><ins>+void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume)
+{
+ for (auto it = m_sampleBufferAudioRenderers.begin(), end = m_sampleBufferAudioRenderers.end(); it != end; ++it)
+ [*it setVolume:volume];
+}
+
</ins><span class="cx"> bool MediaPlayerPrivateMediaSourceAVFObjC::supportsScanning() const
</span><span class="cx"> {
</span><span class="cx"> return true;
</span><span class="cx"> }
</span><span class="cx">
</span><ins>+void MediaPlayerPrivateMediaSourceAVFObjC::setMuted(bool muted)
+{
+ for (auto it = m_sampleBufferAudioRenderers.begin(), end = m_sampleBufferAudioRenderers.end(); it != end; ++it)
+ [*it setMuted:muted];
+}
+
</ins><span class="cx"> IntSize MediaPlayerPrivateMediaSourceAVFObjC::naturalSize() const
</span><span class="cx"> {
</span><span class="cx"> // FIXME(125156): Report the intrinsic size of the enabled video track.
</span><span class="lines">@@ -468,6 +495,26 @@
</span><span class="cx"> m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
</span><span class="cx"> }
</span><span class="cx">
</span><ins>+void MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
+{
+ if (m_sampleBufferAudioRenderers.contains(audioRenderer))
+ return;
+
+ m_sampleBufferAudioRenderers.append(audioRenderer);
+ FigReadOnlyTimebaseSetTargetTimebase([audioRenderer timebase], m_clock->timebase());
+ m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
</ins><span class="cx"> }
</span><span class="cx">
</span><ins>+void MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
+{
+ size_t pos = m_sampleBufferAudioRenderers.find(audioRenderer);
+ if (pos == notFound)
+ return;
+
+ m_sampleBufferAudioRenderers.remove(pos);
+ m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
+}
+
+}
+
</ins><span class="cx"> #endif
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformgraphicsavfoundationobjcSourceBufferPrivateAVFObjCh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h (160809 => 160810)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h        2013-12-19 00:52:47 UTC (rev 160809)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h        2013-12-19 01:14:26 UTC (rev 160810)
</span><span class="lines">@@ -29,6 +29,7 @@
</span><span class="cx"> #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
</span><span class="cx">
</span><span class="cx"> #include "SourceBufferPrivate.h"
</span><ins>+#include <map>
</ins><span class="cx"> #include <wtf/Deque.h>
</span><span class="cx"> #include <wtf/HashMap.h>
</span><span class="cx"> #include <wtf/MediaTime.h>
</span><span class="lines">@@ -39,6 +40,7 @@
</span><span class="cx">
</span><span class="cx"> OBJC_CLASS AVAsset;
</span><span class="cx"> OBJC_CLASS AVStreamDataParser;
</span><ins>+OBJC_CLASS AVSampleBufferAudioRenderer;
</ins><span class="cx"> OBJC_CLASS AVSampleBufferDisplayLayer;
</span><span class="cx"> OBJC_CLASS NSError;
</span><span class="cx"> OBJC_CLASS NSObject;
</span><span class="lines">@@ -94,13 +96,21 @@
</span><span class="cx"> virtual void enqueueSample(PassRefPtr<MediaSample>, AtomicString trackID) OVERRIDE;
</span><span class="cx"> virtual bool isReadyForMoreSamples(AtomicString trackID) OVERRIDE;
</span><span class="cx"> virtual void setActive(bool) OVERRIDE;
</span><ins>+ virtual void notifyClientWhenReadyForMoreSamples(AtomicString trackID) OVERRIDE;
</ins><span class="cx">
</span><ins>+ void flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>>, AVSampleBufferAudioRenderer*);
+ void flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>>, AVSampleBufferDisplayLayer*);
+
+ void didBecomeReadyForMoreSamples(int trackID);
+ void destroyRenderers();
+
</ins><span class="cx"> Vector<RefPtr<VideoTrackPrivate>> m_videoTracks;
</span><span class="cx"> Vector<RefPtr<AudioTrackPrivate>> m_audioTracks;
</span><span class="cx">
</span><span class="cx"> RetainPtr<AVStreamDataParser> m_parser;
</span><span class="cx"> RetainPtr<AVAsset> m_asset;
</span><span class="cx"> RetainPtr<AVSampleBufferDisplayLayer> m_displayLayer;
</span><ins>+ std::map<int, RetainPtr<AVSampleBufferAudioRenderer>> m_audioRenderers;
</ins><span class="cx"> RetainPtr<NSObject> m_delegate;
</span><span class="cx">
</span><span class="cx"> MediaSourcePrivateAVFObjC* m_mediaSource;
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformgraphicsavfoundationobjcSourceBufferPrivateAVFObjCmm"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm (160809 => 160810)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm        2013-12-19 00:52:47 UTC (rev 160809)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm        2013-12-19 01:14:26 UTC (rev 160810)
</span><span class="lines">@@ -59,6 +59,7 @@
</span><span class="cx">
</span><span class="cx"> SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
</span><span class="cx"> SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
</span><ins>+SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
</ins><span class="cx"> SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
</span><span class="cx">
</span><span class="cx"> SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeVideo, NSString *)
</span><span class="lines">@@ -107,9 +108,7 @@
</span><span class="cx">
</span><span class="cx"> #pragma mark -
</span><span class="cx"> #pragma mark AVStreamDataParser
</span><del>-@class AVStreamDataParserInternal;
</del><span class="cx">
</span><del>-NS_CLASS_AVAILABLE(TBD, TBD)
</del><span class="cx"> @interface AVStreamDataParser : NSObject
</span><span class="cx"> - (void)setDelegate:(id)delegate;
</span><span class="cx"> - (void)appendStreamData:(NSData *)data;
</span><span class="lines">@@ -118,6 +117,21 @@
</span><span class="cx"> @end
</span><span class="cx">
</span><span class="cx"> #pragma mark -
</span><ins>+#pragma mark AVSampleBufferAudioRenderer
+
+#if __MAC_OS_X_VERSION_MIN_REQUIRED <= 1090
+@interface AVSampleBufferAudioRenderer : NSObject
+- (NSInteger)status;
+- (NSError*)error;
+- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
+- (void)flush;
+- (BOOL)isReadyForMoreMediaData;
+- (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
+- (void)stopRequestingMediaData;
+@end
+#endif
+
+#pragma mark -
</ins><span class="cx"> #pragma mark WebAVStreamDataParserListener
</span><span class="cx">
</span><span class="cx"> @interface WebAVStreamDataParserListener : NSObject {
</span><span class="lines">@@ -290,18 +304,13 @@
</span><span class="cx"> , m_mediaSource(parent)
</span><span class="cx"> , m_client(0)
</span><span class="cx"> , m_parsingSucceeded(true)
</span><ins>+ , m_enabledVideoTrackID(-1)
</ins><span class="cx"> {
</span><span class="cx"> }
</span><span class="cx">
</span><span class="cx"> SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
</span><span class="cx"> {
</span><del>- if (m_displayLayer) {
- if (m_mediaSource)
- m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
- [m_displayLayer flushAndRemoveImage];
- [m_displayLayer stopRequestingMediaData];
- m_displayLayer = nullptr;
- }
</del><ins>+ destroyRenderers();
</ins><span class="cx"> }
</span><span class="cx">
</span><span class="cx"> void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
</span><span class="lines">@@ -352,7 +361,7 @@
</span><span class="cx"> static OSStatus callProcessCodedFrameForEachSample(CMSampleBufferRef sampleBuffer, CMItemCount, void *refcon)
</span><span class="cx"> {
</span><span class="cx"> ProcessCodedFrameInfo* info = static_cast<ProcessCodedFrameInfo*>(refcon);
</span><del>- return info->sourceBuffer->processCodedFrame(info->trackID, sampleBuffer, info->mediaType);
</del><ins>+ return info->sourceBuffer->processCodedFrame(info->trackID, sampleBuffer, info->mediaType) ? noErr : paramErr;
</ins><span class="cx"> }
</span><span class="cx">
</span><span class="cx"> void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned flags)
</span><span class="lines">@@ -401,7 +410,7 @@
</span><span class="cx"> notImplemented();
</span><span class="cx"> }
</span><span class="cx">
</span><del>-void SourceBufferPrivateAVFObjC::removedFromMediaSource()
</del><ins>+void SourceBufferPrivateAVFObjC::destroyRenderers()
</ins><span class="cx"> {
</span><span class="cx"> if (m_displayLayer) {
</span><span class="cx"> if (m_mediaSource)
</span><span class="lines">@@ -411,6 +420,21 @@
</span><span class="cx"> m_displayLayer = nullptr;
</span><span class="cx"> }
</span><span class="cx">
</span><ins>+ for (auto it = m_audioRenderers.begin(), end = m_audioRenderers.end(); it != end; ++it) {
+ AVSampleBufferAudioRenderer* renderer = it->second.get();
+ if (m_mediaSource)
+ m_mediaSource->player()->removeAudioRenderer(renderer);
+ [renderer flush];
+ [renderer stopRequestingMediaData];
+ }
+
+ m_audioRenderers.clear();
+}
+
+void SourceBufferPrivateAVFObjC::removedFromMediaSource()
+{
+ destroyRenderers();
+
</ins><span class="cx"> if (m_mediaSource)
</span><span class="cx"> m_mediaSource->removeSourceBuffer(this);
</span><span class="cx"> }
</span><span class="lines">@@ -468,18 +492,38 @@
</span><span class="cx"> if (!m_displayLayer) {
</span><span class="cx"> m_displayLayer = [[getAVSampleBufferDisplayLayerClass() alloc] init];
</span><span class="cx"> [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
</span><del>- if (m_client)
- m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this, AtomicString::number(trackID));
</del><ins>+ didBecomeReadyForMoreSamples(trackID);
</ins><span class="cx"> }];
</span><del>- if (m_mediaSource)
- m_mediaSource->player()->addDisplayLayer(m_displayLayer.get());
</del><span class="cx"> }
</span><ins>+ if (m_mediaSource)
+ m_mediaSource->player()->addDisplayLayer(m_displayLayer.get());
</ins><span class="cx"> }
</span><span class="cx"> }
</span><span class="cx">
</span><del>-void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC*)
</del><ins>+void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
</ins><span class="cx"> {
</span><del>- // No-op.
</del><ins>+ int trackID = track->trackID();
+
+ if (!track->enabled()) {
+ AVSampleBufferAudioRenderer* renderer = m_audioRenderers[trackID].get();
+ [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
+ if (m_mediaSource)
+ m_mediaSource->player()->removeAudioRenderer(renderer);
+ } else {
+ [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
+ AVSampleBufferAudioRenderer* renderer;
+ if (!m_audioRenderers.count(trackID)) {
+ renderer = [[getAVSampleBufferAudioRendererClass() alloc] init];
+ [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
+ didBecomeReadyForMoreSamples(trackID);
+ }];
+ m_audioRenderers[trackID] = renderer;
+ } else
+ renderer = m_audioRenderers[trackID].get();
+
+ if (m_mediaSource)
+ m_mediaSource->player()->addAudioRenderer(renderer);
+ }
</ins><span class="cx"> }
</span><span class="cx">
</span><span class="cx"> static RetainPtr<CMSampleBufferRef> createNonDisplayingCopy(CMSampleBufferRef sampleBuffer)
</span><span class="lines">@@ -498,14 +542,20 @@
</span><span class="cx"> return adoptCF(newSampleBuffer);
</span><span class="cx"> }
</span><span class="cx">
</span><del>-void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackID)
</del><ins>+void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackIDString)
</ins><span class="cx"> {
</span><del>- if (trackID.toInt() != m_enabledVideoTrackID)
- return;
</del><ins>+ int trackID = trackIDString.toInt();
+ LOG(Media, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID);
</ins><span class="cx">
</span><del>- LOG(Media, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID.toInt());
</del><ins>+ if (trackID == m_enabledVideoTrackID)
+ flushAndEnqueueNonDisplayingSamples(mediaSamples, m_displayLayer.get());
+ else if (m_audioRenderers.count(trackID))
+ flushAndEnqueueNonDisplayingSamples(mediaSamples, m_audioRenderers[trackID].get());
+}
</ins><span class="cx">
</span><del>- [m_displayLayer flush];
</del><ins>+void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferAudioRenderer* renderer)
+{
+ [renderer flush];
</ins><span class="cx">
</span><span class="cx"> for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
</span><span class="cx"> RefPtr<MediaSample>& mediaSample = *it;
</span><span class="lines">@@ -515,16 +565,33 @@
</span><span class="cx">
</span><span class="cx"> RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
</span><span class="cx">
</span><del>- [m_displayLayer enqueueSampleBuffer:sampleBuffer.get()];
</del><ins>+ [renderer enqueueSampleBuffer:sampleBuffer.get()];
</ins><span class="cx"> }
</span><ins>+}
</ins><span class="cx">
</span><ins>+void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferDisplayLayer* layer)
+{
+ [layer flush];
+
+ for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
+ RefPtr<MediaSample>& mediaSample = *it;
+
+ PlatformSample platformSample = mediaSample->platformSample();
+ ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
+
+ RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
+
+ [layer enqueueSampleBuffer:sampleBuffer.get()];
+ }
+
</ins><span class="cx"> if (m_mediaSource)
</span><span class="cx"> m_mediaSource->player()->setHasAvailableVideoFrame(false);
</span><span class="cx"> }
</span><span class="cx">
</span><del>-void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackID)
</del><ins>+void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackIDString)
</ins><span class="cx"> {
</span><del>- if (trackID.toInt() != m_enabledVideoTrackID)
</del><ins>+ int trackID = trackIDString.toInt();
+ if (trackID != m_enabledVideoTrackID && !m_audioRenderers.count(trackID))
</ins><span class="cx"> return;
</span><span class="cx">
</span><span class="cx"> RefPtr<MediaSample> mediaSample = prpMediaSample;
</span><span class="lines">@@ -533,15 +600,25 @@
</span><span class="cx"> if (platformSample.type != PlatformSample::CMSampleBufferType)
</span><span class="cx"> return;
</span><span class="cx">
</span><del>- [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
- if (m_mediaSource)
- m_mediaSource->player()->setHasAvailableVideoFrame(true);
</del><ins>+ if (trackID == m_enabledVideoTrackID) {
+ [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
+ if (m_mediaSource)
+ m_mediaSource->player()->setHasAvailableVideoFrame(true);
+ } else
+ [m_audioRenderers[trackID] enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
</ins><span class="cx"> }
</span><span class="cx">
</span><del>-bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackID)
</del><ins>+bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackIDString)
</ins><span class="cx"> {
</span><del>- UNUSED_PARAM(trackID);
- return [m_displayLayer isReadyForMoreMediaData];
</del><ins>+ int trackID = trackIDString.toInt();
+ if (trackID == m_enabledVideoTrackID)
+ return [m_displayLayer isReadyForMoreMediaData];
+ else if (m_audioRenderers.count(trackID))
+ return [m_audioRenderers[trackID] isReadyForMoreMediaData];
+ else
+ ASSERT_NOT_REACHED();
+
+ return false;
</ins><span class="cx"> }
</span><span class="cx">
</span><span class="cx"> void SourceBufferPrivateAVFObjC::setActive(bool isActive)
</span><span class="lines">@@ -563,6 +640,36 @@
</span><span class="cx"> m_client->sourceBufferPrivateSeekToTime(this, time);
</span><span class="cx"> }
</span><span class="cx">
</span><ins>+void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
+{
+ if (trackID == m_enabledVideoTrackID)
+ [m_displayLayer stopRequestingMediaData];
+ else if (m_audioRenderers.count(trackID))
+ [m_audioRenderers[trackID] stopRequestingMediaData];
+ else {
+ ASSERT_NOT_REACHED();
+ return;
+ }
+
+ if (m_client)
+ m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this, AtomicString::number(trackID));
</ins><span class="cx"> }
</span><span class="cx">
</span><ins>+void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(AtomicString trackIDString)
+{
+ int trackID = trackIDString.toInt();
+ if (trackID == m_enabledVideoTrackID) {
+ [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
+ didBecomeReadyForMoreSamples(trackID);
+ }];
+ } else if (m_audioRenderers.count(trackID)) {
+ [m_audioRenderers[trackID] requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
+ didBecomeReadyForMoreSamples(trackID);
+ }];
+ } else
+ ASSERT_NOT_REACHED();
+}
+
+}
+
</ins><span class="cx"> #endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
</span></span></pre>
</div>
</div>
</body>
</html>