<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head><meta http-equiv="content-type" content="text/html; charset=utf-8" />
<title>[281440] trunk</title>
</head>
<body>

<style type="text/css"><!--
#msg dl.meta { border: 1px #006 solid; background: #369; padding: 6px; color: #fff; }
#msg dl.meta dt { float: left; width: 6em; font-weight: bold; }
#msg dt:after { content:':';}
#msg dl, #msg dt, #msg ul, #msg li, #header, #footer, #logmsg { font-family: verdana,arial,helvetica,sans-serif; font-size: 10pt;  }
#msg dl a { font-weight: bold}
#msg dl a:link    { color:#fc3; }
#msg dl a:active  { color:#ff0; }
#msg dl a:visited { color:#cc6; }
h3 { font-family: verdana,arial,helvetica,sans-serif; font-size: 10pt; font-weight: bold; }
#msg pre { overflow: auto; background: #ffc; border: 1px #fa0 solid; padding: 6px; }
#logmsg { background: #ffc; border: 1px #fa0 solid; padding: 1em 1em 0 1em; }
#logmsg p, #logmsg pre, #logmsg blockquote { margin: 0 0 1em 0; }
#logmsg p, #logmsg li, #logmsg dt, #logmsg dd { line-height: 14pt; }
#logmsg h1, #logmsg h2, #logmsg h3, #logmsg h4, #logmsg h5, #logmsg h6 { margin: .5em 0; }
#logmsg h1:first-child, #logmsg h2:first-child, #logmsg h3:first-child, #logmsg h4:first-child, #logmsg h5:first-child, #logmsg h6:first-child { margin-top: 0; }
#logmsg ul, #logmsg ol { padding: 0; list-style-position: inside; margin: 0 0 0 1em; }
#logmsg ul { text-indent: -1em; padding-left: 1em; }#logmsg ol { text-indent: -1.5em; padding-left: 1.5em; }
#logmsg > ul, #logmsg > ol { margin: 0 0 1em 0; }
#logmsg pre { background: #eee; padding: 1em; }
#logmsg blockquote { border: 1px solid #fa0; border-left-width: 10px; padding: 1em 1em 0 1em; background: white;}
#logmsg dl { margin: 0; }
#logmsg dt { font-weight: bold; }
#logmsg dd { margin: 0; padding: 0 0 0.5em 0; }
#logmsg dd:before { content:'\00bb';}
#logmsg table { border-spacing: 0px; border-collapse: collapse; border-top: 4px solid #fa0; border-bottom: 1px solid #fa0; background: #fff; }
#logmsg table th { text-align: left; font-weight: normal; padding: 0.2em 0.5em; border-top: 1px dotted #fa0; }
#logmsg table td { text-align: right; border-top: 1px dotted #fa0; padding: 0.2em 0.5em; }
#logmsg table thead th { text-align: center; border-bottom: 1px solid #fa0; }
#logmsg table th.Corner { text-align: left; }
#logmsg hr { border: none 0; border-top: 2px dashed #fa0; height: 1px; }
#header, #footer { color: #fff; background: #636; border: 1px #300 solid; padding: 6px; }
#patch { width: 100%; }
#patch h4 {font-family: verdana,arial,helvetica,sans-serif;font-size:10pt;padding:8px;background:#369;color:#fff;margin:0;}
#patch .propset h4, #patch .binary h4 {margin:0;}
#patch pre {padding:0;line-height:1.2em;margin:0;}
#patch .diff {width:100%;background:#eee;padding: 0 0 10px 0;overflow:auto;}
#patch .propset .diff, #patch .binary .diff  {padding:10px 0;}
#patch span {display:block;padding:0 10px;}
#patch .modfile, #patch .addfile, #patch .delfile, #patch .propset, #patch .binary, #patch .copfile {border:1px solid #ccc;margin:10px 0;}
#patch ins {background:#dfd;text-decoration:none;display:block;padding:0 10px;}
#patch del {background:#fdd;text-decoration:none;display:block;padding:0 10px;}
#patch .lines, .info {color:#888;background:#fff;}
--></style>
<div id="msg">
<dl class="meta">
<dt>Revision</dt> <dd><a href="http://trac.webkit.org/projects/webkit/changeset/281440">281440</a></dd>
<dt>Author</dt> <dd>aboya@igalia.com</dd>
<dt>Date</dt> <dd>2021-08-23 06:22:29 -0700 (Mon, 23 Aug 2021)</dd>
</dl>

<h3>Log Message</h3>
<pre>[MSE][GStreamer] Implement multi-track support
https://bugs.webkit.org/show_bug.cgi?id=229072

Reviewed by Xabier Rodriguez-Calvar.

Source/WebCore:

This patch adds support for SourceBuffer having more than one track in
the GStreamer port.

This fixes the following LayoutTests:

imported/w3c/web-platform-tests/media-source/mediasource-activesourcebuffers.html
media/media-source/media-source-has-audio-video.html
media/media-source/only-bcp47-language-tags-accepted-as-valid.html

* platform/graphics/gstreamer/GStreamerCommon.h:
(GstIteratorAdaptor::GstIteratorAdaptor):
(GstIteratorAdaptor::iterator::iterator):
(GstIteratorAdaptor::iterator::operator*):
(GstIteratorAdaptor::iterator::operator++):
(GstIteratorAdaptor::iterator::operator==):
(GstIteratorAdaptor::iterator::operator!=):
(GstIteratorAdaptor::begin):
(GstIteratorAdaptor::end):
* platform/graphics/gstreamer/mse/AppendPipeline.cpp:
(WebCore::AppendPipeline::AppendPipeline):
(WebCore::AppendPipeline::~AppendPipeline):
(WebCore::AppendPipeline::parseDemuxerSrcPadCaps):
(WebCore::AppendPipeline::appsinkCapsChanged):
(WebCore::AppendPipeline::handleEndOfAppend):
(WebCore::AppendPipeline::appsinkNewSample):
(WebCore::AppendPipeline::didReceiveInitializationSegment):
(WebCore::AppendPipeline::consumeAppsinksAvailableSamples):
(WebCore::AppendPipeline::resetParserState):
(WebCore::AppendPipeline::handleAppsinkNewSampleFromStreamingThread):
(WebCore::createOptionalParserForFormat):
(WebCore::AppendPipeline::generateTrackId):
(WebCore::AppendPipeline::tryCreateTrackFromPad):
(WebCore::AppendPipeline::tryMatchPadToExistingTrack):
(WebCore::AppendPipeline::linkPadWithTrack):
(WebCore::AppendPipeline::makeWebKitTrack):
(WebCore::AppendPipeline::Track::initializeElements):
(WebCore::AppendPipeline::hookTrackEvents):
(WebCore::AppendPipeline::streamTypeToString):
(WebCore::AppendPipeline::id): Deleted.
(WebCore::AppendPipeline::trackId): Deleted.
(WebCore::AppendPipeline::consumeAppsinkAvailableSamples): Deleted.
(WebCore::AppendPipeline::connectDemuxerSrcPadToAppsinkFromStreamingThread): Deleted.
(WebCore::AppendPipeline::connectDemuxerSrcPadToAppsink): Deleted.
(WebCore::AppendPipeline::disconnectDemuxerSrcPadFromAppsinkFromAnyThread): Deleted.
* platform/graphics/gstreamer/mse/AppendPipeline.h:
(WebCore::AppendPipeline::sourceBufferPrivate):
(WebCore::AppendPipeline::Track::Track):
(WebCore::AppendPipeline::appsrc):
(WebCore::AppendPipeline::appsinkCaps): Deleted.
(WebCore::AppendPipeline::track): Deleted.
(WebCore::AppendPipeline::appsink): Deleted.
(WebCore::AppendPipeline::demuxerSrcPadCaps): Deleted.
* platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp:
(WebCore::MediaPlayerPrivateGStreamerMSE::setInitialVideoSize):
(WebCore::MediaPlayerPrivateGStreamerMSE::trackDetected): Deleted.
* platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h:

LayoutTests:

Update expectations and rebaseline one test is which the buffered
ranges have changed slightly due to the audio track previously
discarded now being parsed.

* platform/glib/TestExpectations:
* platform/glib/imported/w3c/web-platform-tests/media-source/mediasource-remove-expected.txt:</pre>

<h3>Modified Paths</h3>
<ul>
<li><a href="#trunkLayoutTestsChangeLog">trunk/LayoutTests/ChangeLog</a></li>
<li><a href="#trunkLayoutTestsplatformglibTestExpectations">trunk/LayoutTests/platform/glib/TestExpectations</a></li>
<li><a href="#trunkLayoutTestsplatformglibimportedw3cwebplatformtestsmediasourcemediasourceremoveexpectedtxt">trunk/LayoutTests/platform/glib/imported/w3c/web-platform-tests/media-source/mediasource-remove-expected.txt</a></li>
<li><a href="#trunkSourceWebCoreChangeLog">trunk/Source/WebCore/ChangeLog</a></li>
<li><a href="#trunkSourceWebCoreplatformgraphicsgstreamerGStreamerCommonh">trunk/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h</a></li>
<li><a href="#trunkSourceWebCoreplatformgraphicsgstreamermseAppendPipelinecpp">trunk/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp</a></li>
<li><a href="#trunkSourceWebCoreplatformgraphicsgstreamermseAppendPipelineh">trunk/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.h</a></li>
<li><a href="#trunkSourceWebCoreplatformgraphicsgstreamermseMediaPlayerPrivateGStreamerMSEcpp">trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp</a></li>
<li><a href="#trunkSourceWebCoreplatformgraphicsgstreamermseMediaPlayerPrivateGStreamerMSEh">trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h</a></li>
</ul>

</div>
<div id="patch">
<h3>Diff</h3>
<a id="trunkLayoutTestsChangeLog"></a>
<div class="modfile"><h4>Modified: trunk/LayoutTests/ChangeLog (281439 => 281440)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/LayoutTests/ChangeLog      2021-08-23 07:11:41 UTC (rev 281439)
+++ trunk/LayoutTests/ChangeLog 2021-08-23 13:22:29 UTC (rev 281440)
</span><span class="lines">@@ -1,3 +1,17 @@
</span><ins>+2021-08-23  Alicia Boya García  <aboya@igalia.com>
+
+        [MSE][GStreamer] Implement multi-track support
+        https://bugs.webkit.org/show_bug.cgi?id=229072
+
+        Reviewed by Xabier Rodriguez-Calvar.
+
+        Update expectations and rebaseline one test is which the buffered
+        ranges have changed slightly due to the audio track previously
+        discarded now being parsed.
+
+        * platform/glib/TestExpectations:
+        * platform/glib/imported/w3c/web-platform-tests/media-source/mediasource-remove-expected.txt:
+
</ins><span class="cx"> 2021-08-22  Yusuke Suzuki  <ysuzuki@apple.com>
</span><span class="cx"> 
</span><span class="cx">         [JSC] Remove already-shipped wasm option flags
</span></span></pre></div>
<a id="trunkLayoutTestsplatformglibTestExpectations"></a>
<div class="modfile"><h4>Modified: trunk/LayoutTests/platform/glib/TestExpectations (281439 => 281440)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/LayoutTests/platform/glib/TestExpectations 2021-08-23 07:11:41 UTC (rev 281439)
+++ trunk/LayoutTests/platform/glib/TestExpectations    2021-08-23 13:22:29 UTC (rev 281440)
</span><span class="lines">@@ -588,7 +588,6 @@
</span><span class="cx"> webkit.org/b/224767 imported/w3c/web-platform-tests/media-source/mediasource-changetype-play-implicit.html [ Crash Pass ]
</span><span class="cx"> 
</span><span class="cx"> # See also bug #175578.
</span><del>-webkit.org/b/167108 imported/w3c/web-platform-tests/media-source/mediasource-activesourcebuffers.html [ Failure ]
</del><span class="cx"> webkit.org/b/167108 imported/w3c/web-platform-tests/media-source/mediasource-avtracks.html [ Failure ]
</span><span class="cx"> webkit.org/b/167108 imported/w3c/web-platform-tests/media-source/mediasource-buffered.html [ Failure ]
</span><span class="cx"> webkit.org/b/167108 imported/w3c/web-platform-tests/media-source/mediasource-changetype.html [ Failure Crash ]
</span><span class="lines">@@ -618,7 +617,6 @@
</span><span class="cx"> 
</span><span class="cx"> webkit.org/b/199617 http/tests/media/hls/hls-video-resize.html [ Skip ]
</span><span class="cx"> 
</span><del>-webkit.org/b/227258 media/media-source/media-source-has-audio-video.html [ Timeout ]
</del><span class="cx"> webkit.org/b/227258 media/media-source/media-source-seek-unbuffered.html [ Timeout ]
</span><span class="cx"> 
</span><span class="cx"> webkit.org/b/227661 http/tests/media/hls/hls-webvtt-seek-backwards.html [ Timeout ]
</span><span class="lines">@@ -2260,7 +2258,6 @@
</span><span class="cx"> 
</span><span class="cx"> webkit.org/b/168373 http/tests/media/track-in-band-hls-metadata-crash.html [ Timeout ]
</span><span class="cx"> webkit.org/b/168373 media/media-fullscreen-loop-inline.html [ Timeout ]
</span><del>-webkit.org/b/168373 media/media-source/only-bcp47-language-tags-accepted-as-valid.html [ Crash Timeout ]
</del><span class="cx"> webkit.org/b/174242 media/media-fullscreen-pause-inline.html [ Skip ]
</span><span class="cx"> webkit.org/b/182108 http/tests/media/hls/hls-webvtt-tracks.html [ Timeout ]
</span><span class="cx"> webkit.org/b/137311 media/video-fullscreen-only-playback.html [ Timeout Crash ]
</span></span></pre></div>
<a id="trunkLayoutTestsplatformglibimportedw3cwebplatformtestsmediasourcemediasourceremoveexpectedtxt"></a>
<div class="modfile"><h4>Modified: trunk/LayoutTests/platform/glib/imported/w3c/web-platform-tests/media-source/mediasource-remove-expected.txt (281439 => 281440)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/LayoutTests/platform/glib/imported/w3c/web-platform-tests/media-source/mediasource-remove-expected.txt     2021-08-23 07:11:41 UTC (rev 281439)
+++ trunk/LayoutTests/platform/glib/imported/w3c/web-platform-tests/media-source/mediasource-remove-expected.txt        2021-08-23 13:22:29 UTC (rev 281440)
</span><span class="lines">@@ -11,8 +11,8 @@
</span><span class="cx"> PASS Test aborting a remove operation.
</span><span class="cx"> PASS Test remove with a start at the duration.
</span><span class="cx"> PASS Test remove transitioning readyState from 'ended' to 'open'.
</span><del>-FAIL Test removing all appended data. assert_equals: Initial buffered range. expected "{ [0.095, 6.423) }" but got "{ [0.000, 6.423) }"
-FAIL Test removing beginning of appended data. assert_equals: Initial buffered range. expected "{ [0.095, 6.423) }" but got "{ [0.000, 6.423) }"
-FAIL Test removing the middle of appended data. assert_equals: Initial buffered range. expected "{ [0.095, 6.423) }" but got "{ [0.000, 6.423) }"
-FAIL Test removing the end of appended data. assert_equals: Initial buffered range. expected "{ [0.095, 6.423) }" but got "{ [0.000, 6.423) }"
</del><ins>+FAIL Test removing all appended data. assert_equals: Initial buffered range. expected "{ [0.095, 6.548) }" but got "{ [0.000, 6.548) }"
+FAIL Test removing beginning of appended data. assert_equals: Initial buffered range. expected "{ [0.095, 6.548) }" but got "{ [0.000, 6.548) }"
+FAIL Test removing the middle of appended data. assert_equals: Initial buffered range. expected "{ [0.095, 6.548) }" but got "{ [0.000, 6.548) }"
+FAIL Test removing the end of appended data. assert_equals: Initial buffered range. expected "{ [0.095, 6.548) }" but got "{ [0.000, 6.548) }"
</ins><span class="cx"> 
</span></span></pre></div>
<a id="trunkSourceWebCoreChangeLog"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/ChangeLog (281439 => 281440)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/ChangeLog   2021-08-23 07:11:41 UTC (rev 281439)
+++ trunk/Source/WebCore/ChangeLog      2021-08-23 13:22:29 UTC (rev 281440)
</span><span class="lines">@@ -1,3 +1,67 @@
</span><ins>+2021-08-23  Alicia Boya García  <aboya@igalia.com>
+
+        [MSE][GStreamer] Implement multi-track support
+        https://bugs.webkit.org/show_bug.cgi?id=229072
+
+        Reviewed by Xabier Rodriguez-Calvar.
+
+        This patch adds support for SourceBuffer having more than one track in
+        the GStreamer port.
+
+        This fixes the following LayoutTests:
+
+        imported/w3c/web-platform-tests/media-source/mediasource-activesourcebuffers.html
+        media/media-source/media-source-has-audio-video.html
+        media/media-source/only-bcp47-language-tags-accepted-as-valid.html
+
+        * platform/graphics/gstreamer/GStreamerCommon.h:
+        (GstIteratorAdaptor::GstIteratorAdaptor):
+        (GstIteratorAdaptor::iterator::iterator):
+        (GstIteratorAdaptor::iterator::operator*):
+        (GstIteratorAdaptor::iterator::operator++):
+        (GstIteratorAdaptor::iterator::operator==):
+        (GstIteratorAdaptor::iterator::operator!=):
+        (GstIteratorAdaptor::begin):
+        (GstIteratorAdaptor::end):
+        * platform/graphics/gstreamer/mse/AppendPipeline.cpp:
+        (WebCore::AppendPipeline::AppendPipeline):
+        (WebCore::AppendPipeline::~AppendPipeline):
+        (WebCore::AppendPipeline::parseDemuxerSrcPadCaps):
+        (WebCore::AppendPipeline::appsinkCapsChanged):
+        (WebCore::AppendPipeline::handleEndOfAppend):
+        (WebCore::AppendPipeline::appsinkNewSample):
+        (WebCore::AppendPipeline::didReceiveInitializationSegment):
+        (WebCore::AppendPipeline::consumeAppsinksAvailableSamples):
+        (WebCore::AppendPipeline::resetParserState):
+        (WebCore::AppendPipeline::handleAppsinkNewSampleFromStreamingThread):
+        (WebCore::createOptionalParserForFormat):
+        (WebCore::AppendPipeline::generateTrackId):
+        (WebCore::AppendPipeline::tryCreateTrackFromPad):
+        (WebCore::AppendPipeline::tryMatchPadToExistingTrack):
+        (WebCore::AppendPipeline::linkPadWithTrack):
+        (WebCore::AppendPipeline::makeWebKitTrack):
+        (WebCore::AppendPipeline::Track::initializeElements):
+        (WebCore::AppendPipeline::hookTrackEvents):
+        (WebCore::AppendPipeline::streamTypeToString):
+        (WebCore::AppendPipeline::id): Deleted.
+        (WebCore::AppendPipeline::trackId): Deleted.
+        (WebCore::AppendPipeline::consumeAppsinkAvailableSamples): Deleted.
+        (WebCore::AppendPipeline::connectDemuxerSrcPadToAppsinkFromStreamingThread): Deleted.
+        (WebCore::AppendPipeline::connectDemuxerSrcPadToAppsink): Deleted.
+        (WebCore::AppendPipeline::disconnectDemuxerSrcPadFromAppsinkFromAnyThread): Deleted.
+        * platform/graphics/gstreamer/mse/AppendPipeline.h:
+        (WebCore::AppendPipeline::sourceBufferPrivate):
+        (WebCore::AppendPipeline::Track::Track):
+        (WebCore::AppendPipeline::appsrc):
+        (WebCore::AppendPipeline::appsinkCaps): Deleted.
+        (WebCore::AppendPipeline::track): Deleted.
+        (WebCore::AppendPipeline::appsink): Deleted.
+        (WebCore::AppendPipeline::demuxerSrcPadCaps): Deleted.
+        * platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp:
+        (WebCore::MediaPlayerPrivateGStreamerMSE::setInitialVideoSize):
+        (WebCore::MediaPlayerPrivateGStreamerMSE::trackDetected): Deleted.
+        * platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h:
+
</ins><span class="cx"> 2021-08-22  Carlos Garcia Campos  <cgarcia@igalia.com>
</span><span class="cx"> 
</span><span class="cx">         [Freetype] Set maximum allowed font size for Freetype
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformgraphicsgstreamerGStreamerCommonh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h (281439 => 281440)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h       2021-08-23 07:11:41 UTC (rev 281439)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h  2021-08-23 13:22:29 UTC (rev 281440)
</span><span class="lines">@@ -335,4 +335,71 @@
</span><span class="cx"> using GstObjectLocker = ExternalLocker<void, gstObjectLock, gstObjectUnlock>;
</span><span class="cx"> using GstPadStreamLocker = ExternalLocker<GstPad, gstPadStreamLock, gstPadStreamUnlock>;
</span><span class="cx"> 
</span><ins>+template <typename T>
+class GstIteratorAdaptor {
+public:
+    GstIteratorAdaptor(GUniquePtr<GstIterator>&& iter)
+        : m_iter(WTFMove(iter))
+    { }
+
+    class iterator {
+    public:
+        iterator(GstIterator* iter, gboolean done = FALSE)
+            : m_iter(iter)
+            , m_done(done)
+        { }
+
+        T* operator*()
+        {
+            return m_currentValue;
+        }
+
+        iterator& operator++()
+        {
+            GValue value = G_VALUE_INIT;
+            switch (gst_iterator_next(m_iter, &value)) {
+            case GST_ITERATOR_OK:
+                m_currentValue = static_cast<T*>(g_value_get_object(&value));
+                g_value_reset(&value);
+                break;
+            case GST_ITERATOR_DONE:
+                m_done = TRUE;
+                m_currentValue = nullptr;
+                break;
+            default:
+                ASSERT_NOT_REACHED_WITH_MESSAGE("Unexpected iterator invalidation");
+            }
+            return *this;
+        }
+
+        bool operator==(const iterator& other)
+        {
+            return m_iter == other.m_iter && m_done == other.m_done;
+        }
+        bool operator!=(const iterator& other) { return !(*this == other); }
+
+    private:
+        GstIterator* m_iter;
+        gboolean m_done;
+        T* m_currentValue { nullptr };
+    };
+
+    iterator begin()
+    {
+        ASSERT(!m_started);
+        m_started = true;
+        iterator iter { m_iter.get() };
+        return ++iter;
+    }
+
+    iterator end()
+    {
+        return { m_iter.get(), TRUE };
+    }
+
+private:
+    GUniquePtr<GstIterator> m_iter;
+    bool m_started { false };
+};
+
</ins><span class="cx"> #endif // USE(GSTREAMER)
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformgraphicsgstreamermseAppendPipelinecpp"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp (281439 => 281440)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp  2021-08-23 07:11:41 UTC (rev 281439)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.cpp     2021-08-23 13:22:29 UTC (rev 281440)
</span><span class="lines">@@ -20,6 +20,8 @@
</span><span class="cx"> 
</span><span class="cx"> #include "config.h"
</span><span class="cx"> #include "AppendPipeline.h"
</span><ins>+#include "AbortableTaskQueue.h"
+#include "MediaSourcePrivateGStreamer.h"
</ins><span class="cx"> 
</span><span class="cx"> #if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
</span><span class="cx"> 
</span><span class="lines">@@ -102,9 +104,7 @@
</span><span class="cx"> AppendPipeline::AppendPipeline(SourceBufferPrivateGStreamer& sourceBufferPrivate, MediaPlayerPrivateGStreamerMSE& playerPrivate)
</span><span class="cx">     : m_sourceBufferPrivate(sourceBufferPrivate)
</span><span class="cx">     , m_playerPrivate(&playerPrivate)
</span><del>-    , m_id(0)
</del><span class="cx">     , m_wasBusAlreadyNotifiedOfAvailableSamples(false)
</span><del>-    , m_streamType(Unknown)
</del><span class="cx"> {
</span><span class="cx">     ASSERT(isMainThread());
</span><span class="cx">     std::call_once(s_staticInitializationFlag, AppendPipeline::staticInitialization);
</span><span class="lines">@@ -150,80 +150,23 @@
</span><span class="cx">     else
</span><span class="cx">         ASSERT_NOT_REACHED();
</span><span class="cx"> 
</span><del>-    m_appsink = makeGStreamerElement("appsink", nullptr);
-
-    gst_app_sink_set_emit_signals(GST_APP_SINK(m_appsink.get()), TRUE);
-    gst_base_sink_set_sync(GST_BASE_SINK(m_appsink.get()), FALSE);
-    gst_base_sink_set_async_enabled(GST_BASE_SINK(m_appsink.get()), FALSE); // No prerolls, no async state changes.
-    gst_base_sink_set_drop_out_of_segment(GST_BASE_SINK(m_appsink.get()), FALSE);
-    gst_base_sink_set_last_sample_enabled(GST_BASE_SINK(m_appsink.get()), FALSE);
-
-    GRefPtr<GstPad> appsinkPad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
-    g_signal_connect(appsinkPad.get(), "notify::caps", G_CALLBACK(+[](GObject*, GParamSpec*, AppendPipeline* appendPipeline) {
-        if (isMainThread()) {
-            // When changing the pipeline state down to READY the demuxer is unlinked and this triggers a caps notification
-            // because the appsink loses its previously negotiated caps. We are not interested in these unnegotiated caps.
-#ifndef NDEBUG
-            GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(appendPipeline->m_appsink.get(), "sink"));
-            GRefPtr<GstCaps> caps = adoptGRef(gst_pad_get_current_caps(pad.get()));
-            ASSERT(!caps);
-#endif
-            return;
-        }
-
-        // The streaming thread has just received a new caps and is about to let samples using the
-        // new caps flow. Let's block it until the main thread has consumed the samples with the old
-        // caps and has processed the caps change.
-        appendPipeline->m_taskQueue.enqueueTaskAndWait<AbortableTaskQueue::Void>([appendPipeline]() {
-            appendPipeline->appsinkCapsChanged();
-            return AbortableTaskQueue::Void();
-        });
-    }), this);
-
</del><span class="cx"> #if !LOG_DISABLED
</span><span class="cx">     GRefPtr<GstPad> demuxerPad = adoptGRef(gst_element_get_static_pad(m_demux.get(), "sink"));
</span><span class="cx">     m_demuxerDataEnteringPadProbeInformation.appendPipeline = this;
</span><span class="cx">     m_demuxerDataEnteringPadProbeInformation.description = "demuxer data entering";
</span><span class="cx">     m_demuxerDataEnteringPadProbeInformation.probeId = gst_pad_add_probe(demuxerPad.get(), GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelinePadProbeDebugInformation), &m_demuxerDataEnteringPadProbeInformation, nullptr);
</span><del>-    m_appsinkDataEnteringPadProbeInformation.appendPipeline = this;
-    m_appsinkDataEnteringPadProbeInformation.description = "appsink data entering";
-    m_appsinkDataEnteringPadProbeInformation.probeId = gst_pad_add_probe(appsinkPad.get(), GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelinePadProbeDebugInformation), &m_appsinkDataEnteringPadProbeInformation, nullptr);
</del><span class="cx"> #endif
</span><span class="cx"> 
</span><del>-#if ENABLE(ENCRYPTED_MEDIA)
-    m_appsinkPadEventProbeInformation.appendPipeline = this;
-    m_appsinkPadEventProbeInformation.description = "appsink event probe";
-    m_appsinkPadEventProbeInformation.probeId = gst_pad_add_probe(appsinkPad.get(), GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, reinterpret_cast<GstPadProbeCallback>(appendPipelineAppsinkPadEventProbe), &m_appsinkPadEventProbeInformation, nullptr);
-#endif
-
-    // These signals won't be connected outside of the lifetime of "this".
-    g_signal_connect(m_demux.get(), "pad-added", G_CALLBACK(+[](GstElement*, GstPad* demuxerSrcPad, AppendPipeline* appendPipeline) {
-        appendPipeline->connectDemuxerSrcPadToAppsinkFromStreamingThread(demuxerSrcPad);
-    }), this);
-    g_signal_connect(m_demux.get(), "pad-removed", G_CALLBACK(+[](GstElement*, GstPad* demuxerSrcPad, AppendPipeline* appendPipeline) {
-        appendPipeline->disconnectDemuxerSrcPadFromAppsinkFromAnyThread(demuxerSrcPad);
-    }), this);
</del><ins>+    // These signals won't outlive the lifetime of `this`.
</ins><span class="cx">     g_signal_connect(m_demux.get(), "no-more-pads", G_CALLBACK(+[](GstElement*, AppendPipeline* appendPipeline) {
</span><span class="cx">         ASSERT(!isMainThread());
</span><span class="cx">         GST_DEBUG("Posting no-more-pads task to main thread");
</span><del>-        appendPipeline->m_taskQueue.enqueueTask([appendPipeline]() {
</del><ins>+        appendPipeline->m_taskQueue.enqueueTaskAndWait<AbortableTaskQueue::Void>([appendPipeline]() {
</ins><span class="cx">             appendPipeline->didReceiveInitializationSegment();
</span><ins>+            return AbortableTaskQueue::Void();
</ins><span class="cx">         });
</span><span class="cx">     }), this);
</span><del>-    g_signal_connect(m_appsink.get(), "new-sample", G_CALLBACK(+[](GstElement* appsink, AppendPipeline* appendPipeline) -> GstFlowReturn {
-        appendPipeline->handleAppsinkNewSampleFromStreamingThread(appsink);
-        return GST_FLOW_OK;
-    }), this);
-    g_signal_connect(m_appsink.get(), "eos", G_CALLBACK(+[](GstElement*, AppendPipeline* appendPipeline) {
-        // Just ignore EOS when having more than one pad. It likely means that one of the pads is
-        // going to be removed and the remaining one will be reattached.
-        if (appendPipeline->m_errorReceived || appendPipeline->m_demux->numsrcpads > 1)
-            return;
</del><span class="cx"> 
</span><del>-        GST_ERROR("AppendPipeline's appsink received EOS. This is usually caused by an invalid initialization segment.");
-        appendPipeline->handleErrorConditionFromStreamingThread();
-    }), this);
-
</del><span class="cx">     // Add_many will take ownership of a reference. That's why we used an assignment before.
</span><span class="cx">     gst_bin_add_many(GST_BIN(m_pipeline.get()), m_appsrc.get(), m_demux.get(), nullptr);
</span><span class="cx">     gst_element_link(m_appsrc.get(), m_demux.get());
</span><span class="lines">@@ -249,9 +192,6 @@
</span><span class="cx">         gst_bus_remove_signal_watch(m_bus.get());
</span><span class="cx">     }
</span><span class="cx"> 
</span><del>-    if (m_appsrc)
-        g_signal_handlers_disconnect_by_data(m_appsrc.get(), this);
-
</del><span class="cx">     if (m_demux) {
</span><span class="cx"> #if !LOG_DISABLED
</span><span class="cx">         GRefPtr<GstPad> demuxerPad = adoptGRef(gst_element_get_static_pad(m_demux.get(), "sink"));
</span><span class="lines">@@ -261,17 +201,15 @@
</span><span class="cx">         g_signal_handlers_disconnect_by_data(m_demux.get(), this);
</span><span class="cx">     }
</span><span class="cx"> 
</span><del>-    if (m_appsink) {
-        GRefPtr<GstPad> appsinkPad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
</del><ins>+    for (std::unique_ptr<Track>& track : m_tracks) {
+        GRefPtr<GstPad> appsinkPad = adoptGRef(gst_element_get_static_pad(track->appsink.get(), "sink"));
</ins><span class="cx">         g_signal_handlers_disconnect_by_data(appsinkPad.get(), this);
</span><del>-        g_signal_handlers_disconnect_by_data(m_appsink.get(), this);
-
</del><ins>+        g_signal_handlers_disconnect_by_data(track->appsink.get(), this);
</ins><span class="cx"> #if !LOG_DISABLED
</span><del>-        gst_pad_remove_probe(appsinkPad.get(), m_appsinkDataEnteringPadProbeInformation.probeId);
</del><ins>+        gst_pad_remove_probe(appsinkPad.get(), track->appsinkDataEnteringPadProbeInformation.probeId);
</ins><span class="cx"> #endif
</span><del>-
</del><span class="cx"> #if ENABLE(ENCRYPTED_MEDIA)
</span><del>-        gst_pad_remove_probe(appsinkPad.get(), m_appsinkPadEventProbeInformation.probeId);
</del><ins>+        gst_pad_remove_probe(appsinkPad.get(), track->appsinkPadEventProbeInformation.probeId);
</ins><span class="cx"> #endif
</span><span class="cx">     }
</span><span class="cx"> 
</span><span class="lines">@@ -351,71 +289,39 @@
</span><span class="cx">     }
</span><span class="cx"> }
</span><span class="cx"> 
</span><del>-gint AppendPipeline::id()
</del><ins>+std::tuple<GRefPtr<GstCaps>, AppendPipeline::StreamType, FloatSize> AppendPipeline::parseDemuxerSrcPadCaps(GstCaps* demuxerSrcPadCaps)
</ins><span class="cx"> {
</span><span class="cx">     ASSERT(isMainThread());
</span><span class="cx"> 
</span><del>-    if (m_id)
-        return m_id;
</del><ins>+    GRefPtr<GstCaps> parsedCaps = demuxerSrcPadCaps;
+    StreamType streamType = StreamType::Unknown;
+    FloatSize presentationSize;
</ins><span class="cx"> 
</span><del>-    static gint s_totalAudio = 0;
-    static gint s_totalVideo = 0;
-    static gint s_totalText = 0;
-
-    switch (m_streamType) {
-    case Audio:
-        m_id = ++s_totalAudio;
-        break;
-    case Video:
-        m_id = ++s_totalVideo;
-        break;
-    case Text:
-        m_id = ++s_totalText;
-        break;
-    case Unknown:
-    case Invalid:
-        GST_ERROR("Trying to get id for a pipeline of Unknown/Invalid type");
-        ASSERT_NOT_REACHED();
-        break;
-    }
-
-    GST_DEBUG("streamType=%d, id=%d", static_cast<int>(m_streamType), m_id);
-
-    return m_id;
-}
-
-void AppendPipeline::parseDemuxerSrcPadCaps(GstCaps* demuxerSrcPadCaps)
-{
-    ASSERT(isMainThread());
-
-    m_demuxerSrcPadCaps = adoptGRef(demuxerSrcPadCaps);
-    m_streamType = MediaSourceStreamTypeGStreamer::Unknown;
-
-    const char* originalMediaType = capsMediaType(m_demuxerSrcPadCaps.get());
</del><ins>+    const char* originalMediaType = capsMediaType(demuxerSrcPadCaps);
</ins><span class="cx">     auto& gstRegistryScanner = GStreamerRegistryScannerMSE::singleton();
</span><span class="cx">     if (!gstRegistryScanner.isCodecSupported(GStreamerRegistryScanner::Configuration::Decoding, originalMediaType)) {
</span><del>-        m_presentationSize = FloatSize();
-        m_streamType = MediaSourceStreamTypeGStreamer::Invalid;
-    } else if (doCapsHaveType(m_demuxerSrcPadCaps.get(), GST_VIDEO_CAPS_TYPE_PREFIX)) {
-        m_presentationSize = getVideoResolutionFromCaps(m_demuxerSrcPadCaps.get()).value_or(FloatSize());
-        m_streamType = MediaSourceStreamTypeGStreamer::Video;
</del><ins>+        streamType = StreamType::Invalid;
+    } else if (doCapsHaveType(demuxerSrcPadCaps, GST_VIDEO_CAPS_TYPE_PREFIX)) {
+        presentationSize = getVideoResolutionFromCaps(demuxerSrcPadCaps).value_or(FloatSize());
+        streamType = StreamType::Video;
</ins><span class="cx">     } else {
</span><del>-        m_presentationSize = FloatSize();
-        if (doCapsHaveType(m_demuxerSrcPadCaps.get(), GST_AUDIO_CAPS_TYPE_PREFIX))
-            m_streamType = MediaSourceStreamTypeGStreamer::Audio;
-        else if (doCapsHaveType(m_demuxerSrcPadCaps.get(), GST_TEXT_CAPS_TYPE_PREFIX))
-            m_streamType = MediaSourceStreamTypeGStreamer::Text;
</del><ins>+        if (doCapsHaveType(demuxerSrcPadCaps, GST_AUDIO_CAPS_TYPE_PREFIX))
+            streamType = StreamType::Audio;
+        else if (doCapsHaveType(demuxerSrcPadCaps, GST_TEXT_CAPS_TYPE_PREFIX))
+            streamType = StreamType::Text;
</ins><span class="cx">     }
</span><ins>+
+    return { WTFMove(parsedCaps), streamType, WTFMove(presentationSize) };
</ins><span class="cx"> }
</span><span class="cx"> 
</span><del>-void AppendPipeline::appsinkCapsChanged()
</del><ins>+void AppendPipeline::appsinkCapsChanged(Track& track)
</ins><span class="cx"> {
</span><span class="cx">     ASSERT(isMainThread());
</span><span class="cx"> 
</span><span class="cx">     // Consume any pending samples with the previous caps.
</span><del>-    consumeAppsinkAvailableSamples();
</del><ins>+    consumeAppsinksAvailableSamples();
</ins><span class="cx"> 
</span><del>-    GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
</del><ins>+    GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(track.appsink.get(), "sink"));
</ins><span class="cx">     GRefPtr<GstCaps> caps = adoptGRef(gst_pad_get_current_caps(pad.get()));
</span><span class="cx"> 
</span><span class="cx">     if (!caps)
</span><span class="lines">@@ -424,8 +330,8 @@
</span><span class="cx">     // If this is not the first time we're parsing an initialization segment, fail if the track
</span><span class="cx">     // has a different codec or type (e.g. if we were previously demuxing an audio stream and
</span><span class="cx">     // someone appends a video stream).
</span><del>-    if (m_appsinkCaps && g_strcmp0(capsMediaType(caps.get()), capsMediaType(m_appsinkCaps.get()))) {
-        GST_WARNING_OBJECT(m_pipeline.get(), "User appended track metadata with type '%s' for a SourceBuffer previously handling '%s'. Erroring out.", capsMediaType(caps.get()), capsMediaType(m_appsinkCaps.get()));
</del><ins>+    if (track.caps && g_strcmp0(capsMediaType(caps.get()), capsMediaType(track.caps.get()))) {
+        GST_WARNING_OBJECT(m_pipeline.get(), "Track received incompatible caps, received '%s' for a track previously handling '%s'. Erroring out.", capsMediaType(caps.get()), capsMediaType(track.caps.get()));
</ins><span class="cx">         m_sourceBufferPrivate.appendParsingFailed();
</span><span class="cx">         return;
</span><span class="cx">     }
</span><span class="lines">@@ -432,24 +338,22 @@
</span><span class="cx"> 
</span><span class="cx">     if (doCapsHaveType(caps.get(), GST_VIDEO_CAPS_TYPE_PREFIX)) {
</span><span class="cx">         if (auto size = getVideoResolutionFromCaps(caps.get()))
</span><del>-            m_presentationSize = *size;
</del><ins>+            track.presentationSize = *size;
</ins><span class="cx">     }
</span><span class="cx"> 
</span><del>-    if (m_appsinkCaps != caps) {
-        m_appsinkCaps = WTFMove(caps);
-        m_playerPrivate->trackDetected(*this, m_track);
-    }
</del><ins>+    if (track.caps != caps)
+        track.caps = WTFMove(caps);
</ins><span class="cx"> }
</span><span class="cx"> 
</span><span class="cx"> void AppendPipeline::handleEndOfAppend()
</span><span class="cx"> {
</span><span class="cx">     ASSERT(isMainThread());
</span><del>-    consumeAppsinkAvailableSamples();
</del><ins>+    consumeAppsinksAvailableSamples();
</ins><span class="cx">     GST_TRACE_OBJECT(m_pipeline.get(), "Notifying SourceBufferPrivate the append is complete");
</span><span class="cx">     sourceBufferPrivate().didReceiveAllPendingSamples();
</span><span class="cx"> }
</span><span class="cx"> 
</span><del>-void AppendPipeline::appsinkNewSample(GRefPtr<GstSample>&& sample)
</del><ins>+void AppendPipeline::appsinkNewSample(const Track& track, GRefPtr<GstSample>&& sample)
</ins><span class="cx"> {
</span><span class="cx">     ASSERT(isMainThread());
</span><span class="cx"> 
</span><span class="lines">@@ -464,7 +368,7 @@
</span><span class="cx">         return;
</span><span class="cx">     }
</span><span class="cx"> 
</span><del>-    auto mediaSample = MediaSampleGStreamer::create(WTFMove(sample), m_presentationSize, trackId());
</del><ins>+    auto mediaSample = MediaSampleGStreamer::create(WTFMove(sample), track.presentationSize, track.trackId);
</ins><span class="cx"> 
</span><span class="cx">     GST_TRACE("append: trackId=%s PTS=%s DTS=%s DUR=%s presentationSize=%.0fx%.0f",
</span><span class="cx">         mediaSample->trackID().string().utf8().data(),
</span><span class="lines">@@ -499,48 +403,98 @@
</span><span class="cx"> {
</span><span class="cx">     ASSERT(isMainThread());
</span><span class="cx"> 
</span><ins>+    bool isFirstInitializationSegment = !m_hasReceivedFirstInitializationSegment;
+
</ins><span class="cx">     SourceBufferPrivateClient::InitializationSegment initializationSegment;
</span><span class="cx"> 
</span><del>-    GST_DEBUG("Notifying SourceBuffer for track %s", (m_track) ? m_track->id().string().utf8().data() : nullptr);
-    initializationSegment.duration = m_initialDuration;
</del><ins>+    gint64 timeLength = 0;
+    if (gst_element_query_duration(m_demux.get(), GST_FORMAT_TIME, &timeLength)
+        && static_cast<guint64>(timeLength) != GST_CLOCK_TIME_NONE)
+        initializationSegment.duration = MediaTime(GST_TIME_AS_USECONDS(timeLength), G_USEC_PER_SEC);
+    else
+        initializationSegment.duration = MediaTime::positiveInfiniteTime();
</ins><span class="cx"> 
</span><del>-    switch (m_streamType) {
-    case Audio: {
-        SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
-        info.track = static_cast<AudioTrackPrivateGStreamer*>(m_track.get());
-        info.description = GStreamerMediaDescription::create(m_demuxerSrcPadCaps.get());
-        initializationSegment.audioTracks.append(info);
-        break;
</del><ins>+    if (isFirstInitializationSegment) {
+        // Create a Track object per pad.
+        int trackIndex = 0;
+        for (GstPad* pad : GstIteratorAdaptor<GstPad>(GUniquePtr<GstIterator>(gst_element_iterate_src_pads(m_demux.get())))) {
+            auto [createTrackResult, track] = tryCreateTrackFromPad(pad, trackIndex);
+            if (createTrackResult == CreateTrackResult::AppendParsingFailed) {
+                // appendParsingFailed() will immediately cause a resetParserState() which will stop demuxing, then the
+                // AppendPipeline will be destroyed.
+                m_sourceBufferPrivate.appendParsingFailed();
+                return;
+            }
+            if (track)
+                linkPadWithTrack(pad, *track);
+            trackIndex++;
+        }
+    } else {
+        // Link pads to existing Track objects that don't have a linked pad yet.
+        unsigned countPads = 0;
+        for (GstPad* pad : GstIteratorAdaptor<GstPad>(GUniquePtr<GstIterator>(gst_element_iterate_src_pads(m_demux.get())))) {
+            countPads++;
+            Track* track = tryMatchPadToExistingTrack(pad);
+            if (!track) {
+                GST_WARNING_OBJECT(pipeline(), "Can't match pad to existing tracks in the AppendPipeline: %" GST_PTR_FORMAT, pad);
+                m_sourceBufferPrivate.appendParsingFailed();
+                return;
+            }
+            linkPadWithTrack(pad, *track);
+        }
+        if (countPads != m_tracks.size()) {
+            GST_WARNING_OBJECT(pipeline(), "Number of pads (%u) doesn't match number of tracks (%zu).", countPads, m_tracks.size());
+            m_sourceBufferPrivate.appendParsingFailed();
+            return;
+        }
</ins><span class="cx">     }
</span><del>-    case Video: {
-        SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
-        info.track = static_cast<VideoTrackPrivateGStreamer*>(m_track.get());
-        info.description = GStreamerMediaDescription::create(m_demuxerSrcPadCaps.get());
-        initializationSegment.videoTracks.append(info);
-        break;
</del><ins>+
+    for (std::unique_ptr<Track>& track : m_tracks) {
+        GST_DEBUG_OBJECT(pipeline(), "Adding track to initialization with segment type %s, id %s.", streamTypeToString(track->streamType), track->trackId.string().utf8().data());
+        switch (track->streamType) {
+        case Audio: {
+            ASSERT(track->webKitTrack);
+            SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
+            info.track = static_cast<AudioTrackPrivateGStreamer*>(track->webKitTrack.get());
+            info.description = GStreamerMediaDescription::create(track->caps.get());
+            initializationSegment.audioTracks.append(info);
+            break;
+        }
+        case Video: {
+            ASSERT(track->webKitTrack);
+            SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
+            info.track = static_cast<VideoTrackPrivateGStreamer*>(track->webKitTrack.get());
+            info.description = GStreamerMediaDescription::create(track->caps.get());
+            initializationSegment.videoTracks.append(info);
+            break;
+        }
+        default:
+            GST_ERROR("Unsupported stream type or codec");
+            break;
+        }
</ins><span class="cx">     }
</span><del>-    default:
-        GST_ERROR("Unsupported stream type or codec");
-        break;
</del><ins>+
+    if (isFirstInitializationSegment) {
+        for (std::unique_ptr<Track>& track : m_tracks) {
+            if (track->streamType == StreamType::Video) {
+                GST_DEBUG_OBJECT(pipeline(), "Setting initial video size to that of track with id '%s', %gx%g.",
+                    track->trackId.string().utf8().data(), static_cast<double>(track->presentationSize.width()), static_cast<double>(track->presentationSize.height()));
+                m_playerPrivate->setInitialVideoSize(track->presentationSize);
+                break;
+            }
+        }
</ins><span class="cx">     }
</span><span class="cx"> 
</span><ins>+    m_hasReceivedFirstInitializationSegment = true;
+    GST_DEBUG("Notifying SourceBuffer of initialization segment.");
+    GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "append-pipeline-received-init-segment");
</ins><span class="cx">     m_sourceBufferPrivate.didReceiveInitializationSegment(WTFMove(initializationSegment), []() { });
</span><span class="cx"> }
</span><span class="cx"> 
</span><del>-AtomString AppendPipeline::trackId()
</del><ins>+void AppendPipeline::consumeAppsinksAvailableSamples()
</ins><span class="cx"> {
</span><span class="cx">     ASSERT(isMainThread());
</span><span class="cx"> 
</span><del>-    if (!m_track)
-        return AtomString();
-
-    return m_track->id();
-}
-
-void AppendPipeline::consumeAppsinkAvailableSamples()
-{
-    ASSERT(isMainThread());
-
</del><span class="cx">     GRefPtr<GstSample> sample;
</span><span class="cx">     int batchedSampleCount = 0;
</span><span class="cx">     // In some cases each frame increases the duration of the movie.
</span><span class="lines">@@ -547,9 +501,11 @@
</span><span class="cx">     // Batch duration changes so that if we pick 100 of such samples we don't have to run 100 times
</span><span class="cx">     // layout for the video controls, but only once.
</span><span class="cx">     m_playerPrivate->blockDurationChanges();
</span><del>-    while ((sample = adoptGRef(gst_app_sink_try_pull_sample(GST_APP_SINK(m_appsink.get()), 0)))) {
-        appsinkNewSample(WTFMove(sample));
-        batchedSampleCount++;
</del><ins>+    for (std::unique_ptr<Track>& track : m_tracks) {
+        while ((sample = adoptGRef(gst_app_sink_try_pull_sample(GST_APP_SINK(track->appsink.get()), 0)))) {
+            appsinkNewSample(*track, WTFMove(sample));
+            batchedSampleCount++;
+        }
</ins><span class="cx">     }
</span><span class="cx">     m_playerPrivate->unblockDurationChanges();
</span><span class="cx"> 
</span><span class="lines">@@ -573,9 +529,6 @@
</span><span class="cx">     // Reset the state of all elements in the pipeline.
</span><span class="cx">     assertedElementSetState(m_pipeline.get(), GST_STATE_READY);
</span><span class="cx"> 
</span><del>-    // The parser is tear down automatically when the demuxer is reset (see disconnectDemuxerSrcPadFromAppsinkFromAnyThread()).
-    ASSERT(!m_parser);
-
</del><span class="cx">     // Set the pipeline to PLAYING so that it can be used again.
</span><span class="cx">     assertedElementSetState(m_pipeline.get(), GST_STATE_PLAYING);
</span><span class="cx"> 
</span><span class="lines">@@ -642,20 +595,18 @@
</span><span class="cx">         GST_TRACE("Posting appsink-new-sample task to the main thread");
</span><span class="cx">         m_taskQueue.enqueueTask([this]() {
</span><span class="cx">             m_wasBusAlreadyNotifiedOfAvailableSamples.clear();
</span><del>-            consumeAppsinkAvailableSamples();
</del><ins>+            consumeAppsinksAvailableSamples();
</ins><span class="cx">         });
</span><span class="cx">     }
</span><span class="cx"> }
</span><span class="cx"> 
</span><span class="cx"> static GRefPtr<GstElement>
</span><del>-createOptionalParserForFormat(GstPad* demuxerSrcPad)
</del><ins>+createOptionalParserForFormat(const AtomString& trackId, const GstCaps* caps)
</ins><span class="cx"> {
</span><del>-    GRefPtr<GstCaps> padCaps = adoptGRef(gst_pad_get_current_caps(demuxerSrcPad));
-    GstStructure* structure = gst_caps_get_structure(padCaps.get(), 0);
</del><ins>+    GstStructure* structure = gst_caps_get_structure(caps, 0);
</ins><span class="cx">     const char* mediaType = gst_structure_get_name(structure);
</span><span class="cx"> 
</span><del>-    GUniquePtr<char> demuxerPadName(gst_pad_get_name(demuxerSrcPad));
-    GUniquePtr<char> parserName(g_strdup_printf("%s_parser", demuxerPadName.get()));
</del><ins>+    GUniquePtr<char> parserName(g_strdup_printf("%s_parser", trackId.string().utf8().data()));
</ins><span class="cx"> 
</span><span class="cx">     if (!g_strcmp0(mediaType, "audio/x-opus")) {
</span><span class="cx">         GstElement* opusparse = makeGStreamerElement("opusparse", parserName.get());
</span><span class="lines">@@ -669,220 +620,230 @@
</span><span class="cx">     return nullptr;
</span><span class="cx"> }
</span><span class="cx"> 
</span><del>-void AppendPipeline::connectDemuxerSrcPadToAppsinkFromStreamingThread(GstPad* demuxerSrcPad)
</del><ins>+AtomString AppendPipeline::generateTrackId(StreamType streamType, int padIndex)
</ins><span class="cx"> {
</span><del>-    ASSERT(!isMainThread());
-
-    GST_DEBUG("connecting to appsink");
-
-    if (m_demux->numsrcpads > 1) {
-        GST_WARNING("Only one stream per SourceBuffer is allowed! Ignoring stream %d by adding a black hole probe.", m_demux->numsrcpads);
-        gulong probeId = gst_pad_add_probe(demuxerSrcPad, GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelineDemuxerBlackHolePadProbe), nullptr, nullptr);
-        g_object_set_data(G_OBJECT(demuxerSrcPad), "blackHoleProbeId", GULONG_TO_POINTER(probeId));
-        return;
</del><ins>+    switch (streamType) {
+    case Audio:
+        return makeString("A", padIndex);
+    case Video:
+        return makeString("V", padIndex);
+    case Text:
+        return makeString("T", padIndex);
+    default:
+        return makeString("O", padIndex);
</ins><span class="cx">     }
</span><del>-
-    GRefPtr<GstPad> appsinkSinkPad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
-
-    // Only one stream per demuxer is supported.
-    ASSERT(!gst_pad_is_linked(appsinkSinkPad.get()));
-
-    gint64 timeLength = 0;
-    if (gst_element_query_duration(m_demux.get(), GST_FORMAT_TIME, &timeLength)
-        && static_cast<guint64>(timeLength) != GST_CLOCK_TIME_NONE)
-        m_initialDuration = MediaTime(GST_TIME_AS_USECONDS(timeLength), G_USEC_PER_SEC);
-    else
-        m_initialDuration = MediaTime::positiveInfiniteTime();
-
-    GST_DEBUG("Requesting demuxer-connect-to-appsink to main thread");
-    auto response = m_taskQueue.enqueueTaskAndWait<AbortableTaskQueue::Void>([this, demuxerSrcPad]() {
-        connectDemuxerSrcPadToAppsink(demuxerSrcPad);
-        return AbortableTaskQueue::Void();
-    });
-    if (!response) {
-        // The AppendPipeline has been destroyed or aborted before we received a response.
-        return;
-    }
-
-    // Must be done in the thread we were called from (usually streaming thread).
-    bool isData = (m_streamType == MediaSourceStreamTypeGStreamer::Audio)
-        || (m_streamType == MediaSourceStreamTypeGStreamer::Video)
-        || (m_streamType == MediaSourceStreamTypeGStreamer::Text);
-
-    if (isData) {
-        GRefPtr<GstObject> parent = adoptGRef(gst_element_get_parent(m_appsink.get()));
-        if (!parent)
-            gst_bin_add(GST_BIN(m_pipeline.get()), m_appsink.get());
-
-        // Current head of the pipeline being built.
-        GRefPtr<GstPad> currentSrcPad = demuxerSrcPad;
-
-        // Some audio files unhelpfully omit the duration of frames in the container. We need to parse
-        // the contained audio streams in order to know the duration of the frames.
-        // This is known to be an issue with YouTube WebM files containing Opus audio as of YTTV2018.
-        m_parser = createOptionalParserForFormat(currentSrcPad.get());
-        if (m_parser) {
-            gst_bin_add(GST_BIN(m_pipeline.get()), m_parser.get());
-            gst_element_sync_state_with_parent(m_parser.get());
-
-            GRefPtr<GstPad> parserSinkPad = adoptGRef(gst_element_get_static_pad(m_parser.get(), "sink"));
-            GRefPtr<GstPad> parserSrcPad = adoptGRef(gst_element_get_static_pad(m_parser.get(), "src"));
-
-            gst_pad_link(currentSrcPad.get(), parserSinkPad.get());
-            currentSrcPad = parserSrcPad;
-        }
-
-        gst_pad_link(currentSrcPad.get(), appsinkSinkPad.get());
-
-        gst_element_sync_state_with_parent(m_appsink.get());
-
-        GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-after-link");
-    }
</del><span class="cx"> }
</span><span class="cx"> 
</span><del>-void AppendPipeline::connectDemuxerSrcPadToAppsink(GstPad* demuxerSrcPad)
</del><ins>+std::pair<AppendPipeline::CreateTrackResult, AppendPipeline::Track*> AppendPipeline::tryCreateTrackFromPad(GstPad* demuxerSrcPad, int trackIndex)
</ins><span class="cx"> {
</span><span class="cx">     ASSERT(isMainThread());
</span><del>-    GST_DEBUG("Connecting to appsink");
</del><ins>+    ASSERT(!m_hasReceivedFirstInitializationSegment);
+    GST_DEBUG_OBJECT(pipeline(), "Creating Track object for pad %" GST_PTR_FORMAT, demuxerSrcPad);
</ins><span class="cx"> 
</span><span class="cx">     const String& type = m_sourceBufferPrivate.type().containerType();
</span><span class="cx">     if (type.endsWith("webm"))
</span><span class="cx">         gst_pad_add_probe(demuxerSrcPad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, matroskademuxForceSegmentStartToEqualZero, nullptr, nullptr);
</span><span class="cx"> 
</span><del>-    GRefPtr<GstPad> sinkSinkPad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
-
-    // Only one stream per demuxer is supported.
-    ASSERT(!gst_pad_is_linked(sinkSinkPad.get()));
-
-    // As it is now, resetParserState() will cause the pads to be disconnected, so they will later be re-added on the next initialization segment.
-
-    GRefPtr<GstCaps> caps = adoptGRef(gst_pad_get_current_caps(GST_PAD(demuxerSrcPad)));
-
</del><ins>+    auto [parsedCaps, streamType, presentationSize] = parseDemuxerSrcPadCaps(adoptGRef(gst_pad_get_current_caps(demuxerSrcPad)).get());
</ins><span class="cx"> #ifndef GST_DISABLE_GST_DEBUG
</span><span class="cx">     {
</span><del>-        GUniquePtr<gchar> strcaps(gst_caps_to_string(caps.get()));
</del><ins>+        GUniquePtr<gchar> strcaps(gst_caps_to_string(parsedCaps.get()));
</ins><span class="cx">         GST_DEBUG("%s", strcaps.get());
</span><span class="cx">     }
</span><span class="cx"> #endif
</span><span class="cx"> 
</span><del>-    parseDemuxerSrcPadCaps(gst_caps_ref(caps.get()));
</del><ins>+    if (streamType == StreamType::Invalid) {
+        GST_WARNING_OBJECT(m_pipeline.get(), "Unsupported track codec: %" GST_PTR_FORMAT, parsedCaps.get());
+        // 3.5.7 Initialization Segment Received
+        // 5.1. If the initialization segment contains tracks with codecs the user agent does not support, then run the
+        // append error algorithm and abort these steps.
+        return { CreateTrackResult::AppendParsingFailed, nullptr };
+    }
+    if (streamType == StreamType::Unknown) {
+        GST_WARNING_OBJECT(pipeline(), "Pad '%s' with parsed caps %" GST_PTR_FORMAT " has an unknown type, will be connected to a black hole probe.", GST_PAD_NAME(demuxerSrcPad), parsedCaps.get());
+        gst_pad_add_probe(demuxerSrcPad, GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelineDemuxerBlackHolePadProbe), nullptr, nullptr);
+        return { CreateTrackResult::TrackIgnored, nullptr };
+    }
+    AtomString trackId = generateTrackId(streamType, trackIndex);
</ins><span class="cx"> 
</span><del>-    TrackPrivateBaseGStreamer* gstreamerTrack;
-    switch (m_streamType) {
-    case MediaSourceStreamTypeGStreamer::Audio: {
-        auto specificTrack = AudioTrackPrivateGStreamer::create(makeWeakPtr(*m_playerPrivate), id(), sinkSinkPad.get());
</del><ins>+    GST_DEBUG_OBJECT(pipeline(), "Creating new AppendPipeline::Track with id '%s'", trackId.string().utf8().data());
+    size_t newTrackIndex = m_tracks.size();
+    m_tracks.append(WTF::makeUnique<Track>(trackId, streamType, parsedCaps, presentationSize));
+    Track& track = *m_tracks.at(newTrackIndex);
+    track.initializeElements(this, GST_BIN(m_pipeline.get()));
+    track.webKitTrack = makeWebKitTrack(newTrackIndex);
+    hookTrackEvents(track);
+    return { CreateTrackResult::TrackCreated, &track };
+}
+
+AppendPipeline::Track* AppendPipeline::tryMatchPadToExistingTrack(GstPad *demuxerSrcPad)
+{
+    ASSERT(isMainThread());
+    ASSERT(m_hasReceivedFirstInitializationSegment);
+    AtomString trackId = GST_PAD_NAME(demuxerSrcPad);
+    auto [parsedCaps, streamType, presentationSize] = parseDemuxerSrcPadCaps(adoptGRef(gst_pad_get_current_caps(demuxerSrcPad)).get());
+
+    // Try to find a matching pre-existing track. Ideally, tracks should be matched by track ID, but matching by type
+    // is provided as a fallback -- which will be used, since we don't have a way to fetch those from GStreamer at the moment.
+    Track* matchingTrack = nullptr;
+    for (std::unique_ptr<Track>& track : m_tracks) {
+        if (track->streamType != streamType || gst_pad_is_linked(track->entryPad.get()))
+            continue;
+        matchingTrack = &*track;
+        if (track->trackId == trackId)
+            break;
+    }
+
+    if (!matchingTrack) {
+        // Invalid configuration.
+        GST_WARNING_OBJECT(pipeline(), "Couldn't find a matching pre-existing track for pad '%s' with parsed caps %" GST_PTR_FORMAT
+            " on non-first initialization segment, will be connected to a black hole probe.", GST_PAD_NAME(demuxerSrcPad), parsedCaps.get());
+        gst_pad_add_probe(demuxerSrcPad, GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelineDemuxerBlackHolePadProbe), nullptr, nullptr);
+    }
+    return matchingTrack;
+}
+
+void AppendPipeline::linkPadWithTrack(GstPad* demuxerSrcPad, Track& track)
+{
+    GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "append-pipeline-before-link");
+    ASSERT(!GST_PAD_IS_LINKED(track.entryPad.get()));
+    gst_pad_link(demuxerSrcPad, track.entryPad.get());
+    ASSERT(GST_PAD_IS_LINKED(track.entryPad.get()));
+    GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "append-pipeline-after-link");
+}
+
+Ref<WebCore::TrackPrivateBase> AppendPipeline::makeWebKitTrack(int trackIndex)
+{
+    Track& appendPipelineTrack = *m_tracks.at(trackIndex);
+
+    RefPtr<WebCore::TrackPrivateBase> track;
+    TrackPrivateBaseGStreamer* gstreamerTrack = nullptr;
+    // FIXME: AudioTrackPrivateGStreamer etc. should probably use pads of the playback pipeline rather than the append pipeline.
+    switch (appendPipelineTrack.streamType) {
+    case StreamType::Audio: {
+        auto specificTrack = AudioTrackPrivateGStreamer::create(makeWeakPtr(m_playerPrivate), trackIndex, appendPipelineTrack.appsinkPad);
</ins><span class="cx">         gstreamerTrack = specificTrack.ptr();
</span><del>-        m_track = makeRefPtr(static_cast<TrackPrivateBase*>(specificTrack.ptr()));
</del><ins>+        track = makeRefPtr(static_cast<TrackPrivateBase*>(specificTrack.ptr()));
</ins><span class="cx">         break;
</span><span class="cx">     }
</span><del>-    case MediaSourceStreamTypeGStreamer::Video: {
-        auto specificTrack = VideoTrackPrivateGStreamer::create(makeWeakPtr(*m_playerPrivate), id(), sinkSinkPad.get());
</del><ins>+    case StreamType::Video: {
+        auto specificTrack = VideoTrackPrivateGStreamer::create(makeWeakPtr(m_playerPrivate), trackIndex, appendPipelineTrack.appsinkPad);
</ins><span class="cx">         gstreamerTrack = specificTrack.ptr();
</span><del>-        m_track = makeRefPtr(static_cast<TrackPrivateBase*>(specificTrack.ptr()));
</del><ins>+        track = makeRefPtr(static_cast<TrackPrivateBase*>(specificTrack.ptr()));
</ins><span class="cx">         break;
</span><span class="cx">     }
</span><del>-    case MediaSourceStreamTypeGStreamer::Text: {
-        auto specificTrack = InbandTextTrackPrivateGStreamer::create(id(), sinkSinkPad.get());
</del><ins>+    case StreamType::Text: {
+        auto specificTrack = InbandTextTrackPrivateGStreamer::create(trackIndex, appendPipelineTrack.appsinkPad);
</ins><span class="cx">         gstreamerTrack = specificTrack.ptr();
</span><del>-        m_track = makeRefPtr(static_cast<TrackPrivateBase*>(specificTrack.ptr()));
</del><ins>+        track = makeRefPtr(static_cast<TrackPrivateBase*>(specificTrack.ptr()));
</ins><span class="cx">         break;
</span><span class="cx">     }
</span><del>-    case MediaSourceStreamTypeGStreamer::Invalid:
-        GST_WARNING_OBJECT(m_pipeline.get(), "Unsupported track codec: %" GST_PTR_FORMAT, caps.get());
-        // 3.5.7 Initialization Segment Received
-        // 5.1. If the initialization segment contains tracks with codecs the user agent does not support, then run the
-        // append error algorithm and abort these steps.
-
-        // appendParsingFailed() will immediately cause a resetParserState() which will stop demuxing, then the
-        // AppendPipeline will be destroyed.
-        m_sourceBufferPrivate.appendParsingFailed();
-        return;
</del><span class="cx">     default:
</span><del>-        GST_WARNING_OBJECT(m_pipeline.get(), "Pad has unknown track type, ignoring: %" GST_PTR_FORMAT, caps.get());
-        return;
</del><ins>+        ASSERT_NOT_REACHED();
</ins><span class="cx">     }
</span><del>-    gstreamerTrack->setInitialCaps(GRefPtr(caps));
-
-    m_appsinkCaps = WTFMove(caps);
-    m_playerPrivate->trackDetected(*this, m_track);
</del><ins>+    ASSERT(appendPipelineTrack.caps.get());
+    gstreamerTrack->setInitialCaps(appendPipelineTrack.caps.get());
+    return track.releaseNonNull();
</ins><span class="cx"> }
</span><span class="cx"> 
</span><del>-void AppendPipeline::disconnectDemuxerSrcPadFromAppsinkFromAnyThread(GstPad* demuxerSrcPad)
</del><ins>+void AppendPipeline::Track::initializeElements(AppendPipeline* appendPipeline, GstBin* bin)
</ins><span class="cx"> {
</span><del>-    // Note: This function can be called either from the streaming thread (e.g. if a strange initialization segment with
-    // incompatible tracks is appended and the srcpad disconnected) or -- more usually -- from the main thread, when
-    // a state change is made to bring the demuxer down. (State change operations run in the main thread.)
-    GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pad-removed-before");
</del><ins>+    appsink = makeGStreamerElement("appsink", nullptr);
+    gst_app_sink_set_emit_signals(GST_APP_SINK(appsink.get()), TRUE);
+    gst_base_sink_set_sync(GST_BASE_SINK(appsink.get()), FALSE);
+    gst_base_sink_set_async_enabled(GST_BASE_SINK(appsink.get()), FALSE); // No prerolls, no async state changes.
+    gst_base_sink_set_drop_out_of_segment(GST_BASE_SINK(appsink.get()), FALSE);
+    gst_base_sink_set_last_sample_enabled(GST_BASE_SINK(appsink.get()), FALSE);
</ins><span class="cx"> 
</span><del>-    // Reconnect the other pad if it's the only remaining after removing this one and wasn't connected yet (has a black hole probe).
-    if (m_demux->numsrcpads == 1) {
-        auto remainingPad = GST_PAD(m_demux->srcpads->data);
</del><ins>+    gst_bin_add(GST_BIN(appendPipeline->pipeline()), appsink.get());
+    gst_element_sync_state_with_parent(appsink.get());
+    entryPad = appsinkPad = adoptGRef(gst_element_get_static_pad(appsink.get(), "sink"));
</ins><span class="cx"> 
</span><del>-        auto probeId = GPOINTER_TO_ULONG(g_object_get_data(G_OBJECT(remainingPad), "blackHoleProbeId"));
-        if (remainingPad && probeId) {
-            auto oldPeerPad = adoptGRef(gst_element_get_static_pad(m_appsink.get(), "sink"));
-            while (gst_pad_is_linked(oldPeerPad.get())) {
-                // Get sink pad of the parser before appsink.
-                // All the expected elements between the demuxer and appsink are supposed to have pads named "sink".
-                oldPeerPad = adoptGRef(gst_pad_get_peer(oldPeerPad.get()));
-                auto element = adoptGRef(gst_pad_get_parent_element(oldPeerPad.get()));
-                oldPeerPad = adoptGRef(gst_element_get_static_pad(element.get(), "sink"));
-                ASSERT(oldPeerPad);
-            }
</del><ins>+#if !LOG_DISABLED
+    appsinkDataEnteringPadProbeInformation.appendPipeline = appendPipeline;
+    appsinkDataEnteringPadProbeInformation.description = "appsink data entering";
+    appsinkDataEnteringPadProbeInformation.probeId = gst_pad_add_probe(appsinkPad.get(), GST_PAD_PROBE_TYPE_BUFFER, reinterpret_cast<GstPadProbeCallback>(appendPipelinePadProbeDebugInformation), &appsinkDataEnteringPadProbeInformation, nullptr);
+#endif
</ins><span class="cx"> 
</span><del>-            gst_pad_remove_probe(remainingPad, probeId);
</del><ins>+#if ENABLE(ENCRYPTED_MEDIA)
+    appsinkPadEventProbeInformation.appendPipeline = appendPipeline;
+    appsinkPadEventProbeInformation.description = "appsink event probe";
+    appsinkPadEventProbeInformation.probeId = gst_pad_add_probe(appsinkPad.get(), GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, reinterpret_cast<GstPadProbeCallback>(appendPipelineAppsinkPadEventProbe), &appsinkPadEventProbeInformation, nullptr);
+#endif
</ins><span class="cx"> 
</span><del>-            auto oldPeerPadCaps = adoptGRef(gst_pad_get_current_caps(oldPeerPad.get()));
-            auto remainingPadCaps = adoptGRef(gst_pad_get_current_caps(remainingPad));
-            const char* oldPeerPadType = nullptr;
-            const char* remainingPadType = nullptr;
</del><ins>+    // Some audio files unhelpfully omit the duration of frames in the container. We need to parse
+    // the contained audio streams in order to know the duration of the frames.
+    // This is known to be an issue with YouTube WebM files containing Opus audio as of YTTV2018.
+    if ((parser = createOptionalParserForFormat(trackId, caps.get()))) {
+        gst_bin_add(bin, parser.get());
+        gst_element_sync_state_with_parent(parser.get());
+        gst_element_link(parser.get(), appsink.get());
+        ASSERT(GST_PAD_IS_LINKED(appsinkPad.get()));
+        entryPad = adoptGRef(gst_element_get_static_pad(parser.get(), "sink"));
+    }
+}
</ins><span class="cx"> 
</span><del>-            if (oldPeerPadCaps) {
-                auto oldPeerPadCapsStructure = gst_caps_get_structure(oldPeerPadCaps.get(), 0);
-                if (oldPeerPadCapsStructure)
-                    oldPeerPadType = gst_structure_get_name(oldPeerPadCapsStructure);
-            }
-            if (remainingPadCaps) {
-                auto remainingPadCapsStructure = gst_caps_get_structure(remainingPadCaps.get(), 0);
-                if (remainingPadCapsStructure)
-                    remainingPadType = gst_structure_get_name(remainingPadCapsStructure);
-            }
</del><ins>+void AppendPipeline::hookTrackEvents(Track& track)
+{
+    g_signal_connect(track.appsink.get(), "new-sample", G_CALLBACK(+[](GstElement* appsink, AppendPipeline* appendPipeline) -> GstFlowReturn {
+        appendPipeline->handleAppsinkNewSampleFromStreamingThread(appsink);
+        return GST_FLOW_OK;
+    }), this);
</ins><span class="cx"> 
</span><del>-            if (g_strcmp0(oldPeerPadType, remainingPadType)) {
-                GST_ERROR("The remaining pad has a blackHoleProbe, but can't reconnect as main pad because the caps types are incompatible: oldPeerPadCaps: %" GST_PTR_FORMAT ", remainingPadCaps: %" GST_PTR_FORMAT, oldPeerPadCaps.get(), remainingPadCaps.get());
-                if (!isMainThread())
-                    handleErrorConditionFromStreamingThread();
-                else
-                    m_sourceBufferPrivate.appendParsingFailed();
-                return;
-            }
</del><ins>+    struct Closure {
+    public:
</ins><span class="cx"> 
</span><del>-            GST_DEBUG("The remaining pad has a blackHoleProbe, reconnecting as main pad. oldPad: %" GST_PTR_FORMAT ", newPad: %" GST_PTR_FORMAT ", peerPad: %" GST_PTR_FORMAT, demuxerSrcPad, remainingPad, oldPeerPad.get());
</del><ins>+        Closure(AppendPipeline& appendPipeline, Track& track)
+            : appendPipeline(appendPipeline)
+            , track(track)
+        { }
+        static void destruct(void* closure, GClosure*) { delete static_cast<Closure*>(closure); }
</ins><span class="cx"> 
</span><del>-            gst_pad_link(remainingPad, oldPeerPad.get());
-            if (m_parser)
-                gst_element_set_state(m_parser.get(), GST_STATE_NULL);
-            gst_element_set_state(m_appsink.get(), GST_STATE_NULL);
-            gst_element_set_state(m_appsink.get(), GST_STATE_PLAYING);
-            if (m_parser)
-                gst_element_set_state(m_parser.get(), GST_STATE_PLAYING);
</del><ins>+        AppendPipeline& appendPipeline;
+        Track& track;
+    };
</ins><span class="cx"> 
</span><del>-            GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-after-relink");
-
</del><ins>+    g_signal_connect_data(track.appsinkPad.get(), "notify::caps", G_CALLBACK(+[](GObject*, GParamSpec*, Closure* closure) {
+        AppendPipeline& appendPipeline = closure->appendPipeline;
+        Track& track = closure->track;
+        if (isMainThread()) {
+            // When changing the pipeline state down to READY the demuxer is unlinked and this triggers a caps notification
+            // because the appsink loses its previously negotiated caps. We are not interested in these unnegotiated caps.
+#ifndef NDEBUG
+            GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(track.appsink.get(), "sink"));
+            GRefPtr<GstCaps> caps = adoptGRef(gst_pad_get_current_caps(pad.get()));
+            ASSERT(!caps);
+#endif
</ins><span class="cx">             return;
</span><span class="cx">         }
</span><del>-    }
</del><span class="cx"> 
</span><del>-    GST_DEBUG("Disconnecting appsink");
</del><ins>+        // The streaming thread has just received a new caps and is about to let samples using the
+        // new caps flow. Let's block it until the main thread has consumed the samples with the old
+        // caps and has processed the caps change.
+        appendPipeline.m_taskQueue.enqueueTaskAndWait<AbortableTaskQueue::Void>([&appendPipeline, &track]() {
+            appendPipeline.appsinkCapsChanged(track);
+            return AbortableTaskQueue::Void();
+        });
+    }), new Closure { *this, track }, Closure::destruct, static_cast<GConnectFlags>(0));
+}
</ins><span class="cx"> 
</span><del>-    if (m_parser) {
-        assertedElementSetState(m_parser.get(), GST_STATE_NULL);
-        gst_bin_remove(GST_BIN(m_pipeline.get()), m_parser.get());
-        m_parser = nullptr;
</del><ins>+#ifndef GST_DISABLE_GST_DEBUG
+const char* AppendPipeline::streamTypeToString(StreamType streamType)
+{
+    switch (streamType) {
+    case StreamType::Audio:
+        return "Audio";
+    case StreamType::Video:
+        return "Video";
+    case StreamType::Text:
+        return "Text";
+    case StreamType::Invalid:
+        return "Invalid";
+    case StreamType::Unknown:
+        return "Unknown";
</ins><span class="cx">     }
</span><del>-
-    GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pad-removed-after");
</del><span class="cx"> }
</span><ins>+#endif
</ins><span class="cx"> 
</span><span class="cx"> #if !LOG_DISABLED
</span><span class="cx"> static GstPadProbeReturn appendPipelinePadProbeDebugInformation(GstPad*, GstPadProbeInfo* info, struct PadProbeInformation* padProbeInformation)
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformgraphicsgstreamermseAppendPipelineh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.h (281439 => 281440)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.h    2021-08-23 07:11:41 UTC (rev 281439)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/mse/AppendPipeline.h       2021-08-23 13:22:29 UTC (rev 281440)
</span><span class="lines">@@ -52,46 +52,82 @@
</span><span class="cx">     void pushNewBuffer(GRefPtr<GstBuffer>&&);
</span><span class="cx">     void resetParserState();
</span><span class="cx">     SourceBufferPrivateGStreamer& sourceBufferPrivate() { return m_sourceBufferPrivate; }
</span><del>-    GstCaps* appsinkCaps() { return m_appsinkCaps.get(); }
-    RefPtr<WebCore::TrackPrivateBase> track() { return m_track; }
</del><span class="cx">     MediaPlayerPrivateGStreamerMSE* playerPrivate() { return m_playerPrivate; }
</span><span class="cx"> 
</span><span class="cx"> private:
</span><span class="cx">     // Similar to TrackPrivateBaseGStreamer::TrackType, but with a new value (Invalid) for when the codec is
</span><span class="cx">     // not supported on this system, which should result in ParsingFailed error being thrown in SourceBuffer.
</span><del>-    enum MediaSourceStreamTypeGStreamer { Audio, Video, Text, Unknown, Invalid };
</del><ins>+    enum StreamType { Audio, Video, Text, Unknown, Invalid };
+#ifndef GST_DISABLE_GST_DEBUG
+    static const char * streamTypeToString(StreamType);
+#endif
</ins><span class="cx"> 
</span><ins>+    struct Track {
+        // Track objects are created on pad-added for the first initialization segment, and destroyed after
+        // the pipeline state has been set to GST_STATE_NULL.
+        WTF_MAKE_NONCOPYABLE(Track);
+        WTF_MAKE_FAST_ALLOCATED;
+    public:
+
+        Track(const AtomString& trackId, StreamType streamType, const GRefPtr<GstCaps>& caps, const FloatSize& presentationSize)
+            : trackId(trackId)
+            , streamType(streamType)
+            , caps(caps)
+            , presentationSize(presentationSize)
+        { }
+
+        AtomString trackId;
+        StreamType streamType;
+        GRefPtr<GstCaps> caps;
+        FloatSize presentationSize;
+
+        GRefPtr<GstPad> entryPad; // Sink pad of the parser (if any) or the appsink.
+        GRefPtr<GstElement> parser; // Optional (needed by some formats).
+        GRefPtr<GstElement> appsink;
+        GRefPtr<GstPad> appsinkPad;
+
+        RefPtr<WebCore::TrackPrivateBase> webKitTrack;
+
+#if !LOG_DISABLED
+        struct PadProbeInformation appsinkDataEnteringPadProbeInformation;
+#endif
+#if ENABLE(ENCRYPTED_MEDIA)
+        struct PadProbeInformation appsinkPadEventProbeInformation;
+#endif
+
+        void initializeElements(AppendPipeline*, GstBin*);
+    };
+
</ins><span class="cx">     void handleErrorSyncMessage(GstMessage*);
</span><span class="cx">     void handleNeedContextSyncMessage(GstMessage*);
</span><span class="cx">     // For debug purposes only:
</span><span class="cx">     void handleStateChangeMessage(GstMessage*);
</span><span class="cx"> 
</span><del>-    gint id();
-
</del><span class="cx">     void handleAppsinkNewSampleFromStreamingThread(GstElement*);
</span><ins>+    void handleErrorCondition();
</ins><span class="cx">     void handleErrorConditionFromStreamingThread();
</span><span class="cx"> 
</span><del>-    // Takes ownership of caps.
-    void parseDemuxerSrcPadCaps(GstCaps*);
-    void appsinkCapsChanged();
-    void appsinkNewSample(GRefPtr<GstSample>&&);
</del><ins>+    void hookTrackEvents(Track&);
+    static std::tuple<GRefPtr<GstCaps>, AppendPipeline::StreamType, FloatSize> parseDemuxerSrcPadCaps(GstCaps*);
+    Ref<WebCore::TrackPrivateBase> makeWebKitTrack(int trackIndex);
+    void appsinkCapsChanged(Track&);
+    void appsinkNewSample(const Track&, GRefPtr<GstSample>&&);
</ins><span class="cx">     void handleEndOfAppend();
</span><span class="cx">     void didReceiveInitializationSegment();
</span><del>-    AtomString trackId();
</del><span class="cx"> 
</span><span class="cx">     GstBus* bus() { return m_bus.get(); }
</span><span class="cx">     GstElement* pipeline() { return m_pipeline.get(); }
</span><span class="cx">     GstElement* appsrc() { return m_appsrc.get(); }
</span><del>-    GstElement* appsink() { return m_appsink.get(); }
-    GstCaps* demuxerSrcPadCaps() { return m_demuxerSrcPadCaps.get(); }
</del><span class="cx"> 
</span><del>-    void disconnectDemuxerSrcPadFromAppsinkFromAnyThread(GstPad*);
-    void connectDemuxerSrcPadToAppsinkFromStreamingThread(GstPad*);
-    void connectDemuxerSrcPadToAppsink(GstPad*);
</del><ins>+    static AtomString generateTrackId(StreamType, int padIndex);
+    enum class CreateTrackResult { TrackCreated, TrackIgnored, AppendParsingFailed };
+    std::pair<CreateTrackResult, AppendPipeline::Track*> tryCreateTrackFromPad(GstPad* demuxerSrcPad, int padIndex);
+    AppendPipeline::Track* tryMatchPadToExistingTrack(GstPad* demuxerSrcPad);
+    void linkPadWithTrack(GstPad* demuxerSrcPad, Track&);
</ins><span class="cx"> 
</span><span class="cx">     void resetPipeline();
</span><span class="cx"> 
</span><del>-    void consumeAppsinkAvailableSamples();
</del><ins>+    void consumeAppsinksAvailableSamples();
</ins><span class="cx"> 
</span><span class="cx">     GstPadProbeReturn appsrcEndOfAppendCheckerProbe(GstPadProbeInfo*);
</span><span class="cx"> 
</span><span class="lines">@@ -105,6 +141,7 @@
</span><span class="cx">     // Only the pointers are compared.
</span><span class="cx">     WTF::Thread* m_streamingThread;
</span><span class="cx"> 
</span><ins>+    bool m_hasReceivedFirstInitializationSegment { false };
</ins><span class="cx">     // Used only for asserting EOS events are only caused by demuxing errors.
</span><span class="cx">     bool m_errorReceived { false };
</span><span class="cx"> 
</span><span class="lines">@@ -111,19 +148,14 @@
</span><span class="cx">     SourceBufferPrivateGStreamer& m_sourceBufferPrivate;
</span><span class="cx">     MediaPlayerPrivateGStreamerMSE* m_playerPrivate;
</span><span class="cx"> 
</span><del>-    // (m_mediaType, m_id) is unique.
-    gint m_id;
-
</del><span class="cx">     MediaTime m_initialDuration;
</span><del>-
</del><ins>+    GRefPtr<GstElement> m_appsrc;
</ins><span class="cx">     GRefPtr<GstElement> m_pipeline;
</span><span class="cx">     GRefPtr<GstBus> m_bus;
</span><del>-    GRefPtr<GstElement> m_appsrc;
</del><span class="cx">     GRefPtr<GstElement> m_demux;
</span><del>-    GRefPtr<GstElement> m_parser; // Optional.
-    // The demuxer has one src stream only, so only one appsink is needed and linked to it.
-    GRefPtr<GstElement> m_appsink;
</del><span class="cx"> 
</span><ins>+    Vector<std::unique_ptr<Track>> m_tracks;
+
</ins><span class="cx">     // Used to avoid unnecessary notifications per sample.
</span><span class="cx">     // It is read and written from the streaming thread and written from the main thread.
</span><span class="cx">     // The main thread must set it to false before actually pulling samples.
</span><span class="lines">@@ -131,25 +163,11 @@
</span><span class="cx">     // queue, instead of it growing unbounded.
</span><span class="cx">     std::atomic_flag m_wasBusAlreadyNotifiedOfAvailableSamples;
</span><span class="cx"> 
</span><del>-    GRefPtr<GstCaps> m_appsinkCaps;
-    GRefPtr<GstCaps> m_demuxerSrcPadCaps;
-    FloatSize m_presentationSize;
-
</del><span class="cx"> #if !LOG_DISABLED
</span><span class="cx">     struct PadProbeInformation m_demuxerDataEnteringPadProbeInformation;
</span><del>-    struct PadProbeInformation m_appsinkDataEnteringPadProbeInformation;
</del><span class="cx"> #endif
</span><span class="cx"> 
</span><del>-#if ENABLE(ENCRYPTED_MEDIA)
-    struct PadProbeInformation m_appsinkPadEventProbeInformation;
-#endif
-
-    MediaSourceStreamTypeGStreamer m_streamType;
-    RefPtr<WebCore::TrackPrivateBase> m_track;
-
</del><span class="cx">     AbortableTaskQueue m_taskQueue;
</span><del>-
-    GRefPtr<GstBuffer> m_pendingBuffer;
</del><span class="cx"> };
</span><span class="cx"> 
</span><span class="cx"> } // namespace WebCore.
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformgraphicsgstreamermseMediaPlayerPrivateGStreamerMSEcpp"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp (281439 => 281440)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp  2021-08-23 07:11:41 UTC (rev 281439)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp     2021-08-23 13:22:29 UTC (rev 281440)
</span><span class="lines">@@ -331,25 +331,20 @@
</span><span class="cx">     }
</span><span class="cx"> }
</span><span class="cx"> 
</span><del>-void MediaPlayerPrivateGStreamerMSE::trackDetected(AppendPipeline& appendPipeline, RefPtr<WebCore::TrackPrivateBase> newTrack)
</del><ins>+void MediaPlayerPrivateGStreamerMSE::setInitialVideoSize(const FloatSize& videoSize)
</ins><span class="cx"> {
</span><span class="cx">     ASSERT(isMainThread());
</span><del>-    ASSERT(appendPipeline.track() == newTrack);
</del><span class="cx"> 
</span><del>-    GstCaps* caps = appendPipeline.appsinkCaps();
-    ASSERT(caps);
-    GST_DEBUG("Demuxer parsed metadata with track ID: %s, caps: %" GST_PTR_FORMAT, newTrack->id().string().latin1().data(), caps);
-
</del><span class="cx">     // We set the size of the video only for the first initialization segment.
</span><span class="cx">     // This is intentional: Normally the video size depends on the frames arriving
</span><span class="cx">     // at the sink in the playback pipeline, not in the append pipeline; but we still
</span><span class="cx">     // want to report an initial size for HAVE_METADATA (first initialization segment).
</span><del>-    if (m_videoSize.isEmpty() && doCapsHaveType(caps, GST_VIDEO_CAPS_TYPE_PREFIX)) {
-        if (auto size = getVideoResolutionFromCaps(caps)) {
-            m_videoSize = *size;
-            GST_DEBUG("Setting initial video size: %gx%g", m_videoSize.width(), m_videoSize.height());
-        }
-    }
</del><ins>+
+    if (!m_videoSize.isEmpty())
+        return;
+
+    GST_DEBUG("Setting initial video size: %gx%g", videoSize.width(), videoSize.height());
+    m_videoSize = videoSize;
</ins><span class="cx"> }
</span><span class="cx"> 
</span><span class="cx"> void MediaPlayerPrivateGStreamerMSE::startSource(const Vector<RefPtr<MediaSourceTrackGStreamer>>& tracks)
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformgraphicsgstreamermseMediaPlayerPrivateGStreamerMSEh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h (281439 => 281440)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h    2021-08-23 07:11:41 UTC (rev 281439)
+++ trunk/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.h       2021-08-23 13:22:29 UTC (rev 281440)
</span><span class="lines">@@ -71,7 +71,7 @@
</span><span class="cx">     void setReadyState(MediaPlayer::ReadyState);
</span><span class="cx">     MediaSourcePrivateClient* mediaSourcePrivateClient() { return m_mediaSource.get(); }
</span><span class="cx"> 
</span><del>-    void trackDetected(AppendPipeline&, RefPtr<WebCore::TrackPrivateBase>);
</del><ins>+    void setInitialVideoSize(const FloatSize&);
</ins><span class="cx"> 
</span><span class="cx">     void blockDurationChanges();
</span><span class="cx">     void unblockDurationChanges();
</span></span></pre>
</div>
</div>

</body>
</html>