<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head><meta http-equiv="content-type" content="text/html; charset=utf-8" />
<title>[176943] trunk/Source/WebCore</title>
</head>
<body>
<style type="text/css"><!--
#msg dl.meta { border: 1px #006 solid; background: #369; padding: 6px; color: #fff; }
#msg dl.meta dt { float: left; width: 6em; font-weight: bold; }
#msg dt:after { content:':';}
#msg dl, #msg dt, #msg ul, #msg li, #header, #footer, #logmsg { font-family: verdana,arial,helvetica,sans-serif; font-size: 10pt; }
#msg dl a { font-weight: bold}
#msg dl a:link { color:#fc3; }
#msg dl a:active { color:#ff0; }
#msg dl a:visited { color:#cc6; }
h3 { font-family: verdana,arial,helvetica,sans-serif; font-size: 10pt; font-weight: bold; }
#msg pre { overflow: auto; background: #ffc; border: 1px #fa0 solid; padding: 6px; }
#logmsg { background: #ffc; border: 1px #fa0 solid; padding: 1em 1em 0 1em; }
#logmsg p, #logmsg pre, #logmsg blockquote { margin: 0 0 1em 0; }
#logmsg p, #logmsg li, #logmsg dt, #logmsg dd { line-height: 14pt; }
#logmsg h1, #logmsg h2, #logmsg h3, #logmsg h4, #logmsg h5, #logmsg h6 { margin: .5em 0; }
#logmsg h1:first-child, #logmsg h2:first-child, #logmsg h3:first-child, #logmsg h4:first-child, #logmsg h5:first-child, #logmsg h6:first-child { margin-top: 0; }
#logmsg ul, #logmsg ol { padding: 0; list-style-position: inside; margin: 0 0 0 1em; }
#logmsg ul { text-indent: -1em; padding-left: 1em; }#logmsg ol { text-indent: -1.5em; padding-left: 1.5em; }
#logmsg > ul, #logmsg > ol { margin: 0 0 1em 0; }
#logmsg pre { background: #eee; padding: 1em; }
#logmsg blockquote { border: 1px solid #fa0; border-left-width: 10px; padding: 1em 1em 0 1em; background: white;}
#logmsg dl { margin: 0; }
#logmsg dt { font-weight: bold; }
#logmsg dd { margin: 0; padding: 0 0 0.5em 0; }
#logmsg dd:before { content:'\00bb';}
#logmsg table { border-spacing: 0px; border-collapse: collapse; border-top: 4px solid #fa0; border-bottom: 1px solid #fa0; background: #fff; }
#logmsg table th { text-align: left; font-weight: normal; padding: 0.2em 0.5em; border-top: 1px dotted #fa0; }
#logmsg table td { text-align: right; border-top: 1px dotted #fa0; padding: 0.2em 0.5em; }
#logmsg table thead th { text-align: center; border-bottom: 1px solid #fa0; }
#logmsg table th.Corner { text-align: left; }
#logmsg hr { border: none 0; border-top: 2px dashed #fa0; height: 1px; }
#header, #footer { color: #fff; background: #636; border: 1px #300 solid; padding: 6px; }
#patch { width: 100%; }
#patch h4 {font-family: verdana,arial,helvetica,sans-serif;font-size:10pt;padding:8px;background:#369;color:#fff;margin:0;}
#patch .propset h4, #patch .binary h4 {margin:0;}
#patch pre {padding:0;line-height:1.2em;margin:0;}
#patch .diff {width:100%;background:#eee;padding: 0 0 10px 0;overflow:auto;}
#patch .propset .diff, #patch .binary .diff {padding:10px 0;}
#patch span {display:block;padding:0 10px;}
#patch .modfile, #patch .addfile, #patch .delfile, #patch .propset, #patch .binary, #patch .copfile {border:1px solid #ccc;margin:10px 0;}
#patch ins {background:#dfd;text-decoration:none;display:block;padding:0 10px;}
#patch del {background:#fdd;text-decoration:none;display:block;padding:0 10px;}
#patch .lines, .info {color:#888;background:#fff;}
--></style>
<div id="msg">
<dl class="meta">
<dt>Revision</dt> <dd><a href="http://trac.webkit.org/projects/webkit/changeset/176943">176943</a></dd>
<dt>Author</dt> <dd>commit-queue@webkit.org</dd>
<dt>Date</dt> <dd>2014-12-08 01:34:25 -0800 (Mon, 08 Dec 2014)</dd>
</dl>
<h3>Log Message</h3>
<pre>[GStreamer] Major cleanup of AudioDestination implementation
https://bugs.webkit.org/show_bug.cgi?id=139370
Patch by Sebastian Dröge <sebastian@centricular.com> on 2014-12-08
Reviewed by Philippe Normand.
* platform/audio/gstreamer/AudioDestinationGStreamer.cpp:
(WebCore::AudioDestinationGStreamer::AudioDestinationGStreamer):
Add an audioresample element before the audio sink. The audio sink
might not be able to handle our sampling rate.
(WebCore::AudioDestinationGStreamer::AudioDestinationGStreamer):
(WebCore::AudioDestinationGStreamer::~AudioDestinationGStreamer):
(WebCore::AudioDestinationGStreamer::stop):
(WebCore::AudioDestinationGStreamer::finishBuildingPipelineAfterWavParserPadReady): Deleted.
Don't use a wavparse element but directly link the raw audio from
the source to the audio sink.
(WebCore::AudioDestinationGStreamer::start):
Catch errors when going to PLAYING early, we might not get an error
message.
* platform/audio/gstreamer/AudioDestinationGStreamer.h:
* platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp:
(getGStreamerMonoAudioCaps):
(webKitWebAudioSrcConstructed):
(webKitWebAudioSrcChangeState):
Don't use a WAV encoder but directly output raw audio. Also don't
include a unneeded audioconvert element before the interleave.
(webKitWebAudioSrcLoop):
Add timestamps and durations to the output buffers, map them in
READWRITE mode and actually keep them mapped until we're sure
nothing is actually writing into them.
(webKitWebAudioSrcLoop):
Pause the task on errors instead of continuously calling it again
immediately.</pre>
<h3>Modified Paths</h3>
<ul>
<li><a href="#trunkSourceWebCoreChangeLog">trunk/Source/WebCore/ChangeLog</a></li>
<li><a href="#trunkSourceWebCoreplatformaudiogstreamerAudioDestinationGStreamercpp">trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp</a></li>
<li><a href="#trunkSourceWebCoreplatformaudiogstreamerAudioDestinationGStreamerh">trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.h</a></li>
<li><a href="#trunkSourceWebCoreplatformaudiogstreamerWebKitWebAudioSourceGStreamercpp">trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp</a></li>
</ul>
</div>
<div id="patch">
<h3>Diff</h3>
<a id="trunkSourceWebCoreChangeLog"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/ChangeLog (176942 => 176943)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/ChangeLog        2014-12-08 09:28:32 UTC (rev 176942)
+++ trunk/Source/WebCore/ChangeLog        2014-12-08 09:34:25 UTC (rev 176943)
</span><span class="lines">@@ -1,5 +1,45 @@
</span><span class="cx"> 2014-12-08 Sebastian Dröge <sebastian@centricular.com>
</span><span class="cx">
</span><ins>+ [GStreamer] Major cleanup of AudioDestination implementation
+ https://bugs.webkit.org/show_bug.cgi?id=139370
+
+ Reviewed by Philippe Normand.
+
+ * platform/audio/gstreamer/AudioDestinationGStreamer.cpp:
+ (WebCore::AudioDestinationGStreamer::AudioDestinationGStreamer):
+ Add an audioresample element before the audio sink. The audio sink
+ might not be able to handle our sampling rate.
+
+ (WebCore::AudioDestinationGStreamer::AudioDestinationGStreamer):
+ (WebCore::AudioDestinationGStreamer::~AudioDestinationGStreamer):
+ (WebCore::AudioDestinationGStreamer::stop):
+ (WebCore::AudioDestinationGStreamer::finishBuildingPipelineAfterWavParserPadReady): Deleted.
+ Don't use a wavparse element but directly link the raw audio from
+ the source to the audio sink.
+
+ (WebCore::AudioDestinationGStreamer::start):
+ Catch errors when going to PLAYING early, we might not get an error
+ message.
+
+ * platform/audio/gstreamer/AudioDestinationGStreamer.h:
+ * platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp:
+ (getGStreamerMonoAudioCaps):
+ (webKitWebAudioSrcConstructed):
+ (webKitWebAudioSrcChangeState):
+ Don't use a WAV encoder but directly output raw audio. Also don't
+ include a unneeded audioconvert element before the interleave.
+
+ (webKitWebAudioSrcLoop):
+ Add timestamps and durations to the output buffers, map them in
+ READWRITE mode and actually keep them mapped until we're sure
+ nothing is actually writing into them.
+
+ (webKitWebAudioSrcLoop):
+ Pause the task on errors instead of continuously calling it again
+ immediately.
+
+2014-12-08 Sebastian Dröge <sebastian@centricular.com>
+
</ins><span class="cx"> [GStreamer] Use gst_message_parse_buffering()
</span><span class="cx"> https://bugs.webkit.org/show_bug.cgi?id=139365
</span><span class="cx">
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformaudiogstreamerAudioDestinationGStreamercpp"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp (176942 => 176943)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp        2014-12-08 09:28:32 UTC (rev 176942)
+++ trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp        2014-12-08 09:34:25 UTC (rev 176943)
</span><span class="lines">@@ -1,5 +1,6 @@
</span><span class="cx"> /*
</span><span class="cx"> * Copyright (C) 2011, 2012 Igalia S.L
</span><ins>+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
</ins><span class="cx"> *
</span><span class="cx"> * This library is free software; you can redistribute it and/or
</span><span class="cx"> * modify it under the terms of the GNU Lesser General Public
</span><span class="lines">@@ -87,38 +88,10 @@
</span><span class="cx"> "provider", &m_callback,
</span><span class="cx"> "frames", framesToPull, NULL));
</span><span class="cx">
</span><del>- GstElement* wavParser = gst_element_factory_make("wavparse", 0);
</del><ins>+ GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(webkitAudioSrc, "src"));
</ins><span class="cx">
</span><del>- m_wavParserAvailable = wavParser;
- ASSERT_WITH_MESSAGE(m_wavParserAvailable, "Failed to create GStreamer wavparse element");
- if (!m_wavParserAvailable)
- return;
-
- gst_bin_add_many(GST_BIN(m_pipeline), webkitAudioSrc, wavParser, NULL);
- gst_element_link_pads_full(webkitAudioSrc, "src", wavParser, "sink", GST_PAD_LINK_CHECK_NOTHING);
-
- GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(wavParser, "src"));
- finishBuildingPipelineAfterWavParserPadReady(srcPad.get());
-}
-
-AudioDestinationGStreamer::~AudioDestinationGStreamer()
-{
- GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)));
- ASSERT(bus);
- g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(messageCallback), this);
- gst_bus_remove_signal_watch(bus.get());
-
- gst_element_set_state(m_pipeline, GST_STATE_NULL);
- gst_object_unref(m_pipeline);
-}
-
-void AudioDestinationGStreamer::finishBuildingPipelineAfterWavParserPadReady(GstPad* pad)
-{
- ASSERT(m_wavParserAvailable);
-
</del><span class="cx"> GRefPtr<GstElement> audioSink = gst_element_factory_make("autoaudiosink", 0);
</span><span class="cx"> m_audioSinkAvailable = audioSink;
</span><del>-
</del><span class="cx"> if (!audioSink) {
</span><span class="cx"> LOG_ERROR("Failed to create GStreamer autoaudiosink element");
</span><span class="cx"> return;
</span><span class="lines">@@ -136,18 +109,29 @@
</span><span class="cx"> }
</span><span class="cx">
</span><span class="cx"> GstElement* audioConvert = gst_element_factory_make("audioconvert", 0);
</span><del>- gst_bin_add_many(GST_BIN(m_pipeline), audioConvert, audioSink.get(), NULL);
</del><ins>+ GstElement* audioResample = gst_element_factory_make("audioresample", 0);
+ gst_bin_add_many(GST_BIN(m_pipeline), webkitAudioSrc, audioConvert, audioResample, audioSink.get(), NULL);
</ins><span class="cx">
</span><span class="cx"> // Link wavparse's src pad to audioconvert sink pad.
</span><span class="cx"> GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(audioConvert, "sink"));
</span><del>- gst_pad_link_full(pad, sinkPad.get(), GST_PAD_LINK_CHECK_NOTHING);
</del><ins>+ gst_pad_link_full(srcPad.get(), sinkPad.get(), GST_PAD_LINK_CHECK_NOTHING);
</ins><span class="cx">
</span><span class="cx"> // Link audioconvert to audiosink and roll states.
</span><del>- gst_element_link_pads_full(audioConvert, "src", audioSink.get(), "sink", GST_PAD_LINK_CHECK_NOTHING);
- gst_element_sync_state_with_parent(audioConvert);
- gst_element_sync_state_with_parent(audioSink.leakRef());
</del><ins>+ gst_element_link_pads_full(audioConvert, "src", audioResample, "sink", GST_PAD_LINK_CHECK_NOTHING);
+ gst_element_link_pads_full(audioResample, "src", audioSink.get(), "sink", GST_PAD_LINK_CHECK_NOTHING);
</ins><span class="cx"> }
</span><span class="cx">
</span><ins>+AudioDestinationGStreamer::~AudioDestinationGStreamer()
+{
+ GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)));
+ ASSERT(bus);
+ g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(messageCallback), this);
+ gst_bus_remove_signal_watch(bus.get());
+
+ gst_element_set_state(m_pipeline, GST_STATE_NULL);
+ gst_object_unref(m_pipeline);
+}
+
</ins><span class="cx"> gboolean AudioDestinationGStreamer::handleMessage(GstMessage* message)
</span><span class="cx"> {
</span><span class="cx"> GUniqueOutPtr<GError> error;
</span><span class="lines">@@ -172,18 +156,23 @@
</span><span class="cx">
</span><span class="cx"> void AudioDestinationGStreamer::start()
</span><span class="cx"> {
</span><del>- ASSERT(m_wavParserAvailable);
- if (!m_wavParserAvailable)
</del><ins>+ ASSERT(m_audioSinkAvailable);
+ if (!m_audioSinkAvailable)
</ins><span class="cx"> return;
</span><span class="cx">
</span><del>- gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
</del><ins>+ if (gst_element_set_state(m_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
+ g_warning("Error: Failed to set pipeline to playing");
+ m_isPlaying = false;
+ return;
+ }
+
</ins><span class="cx"> m_isPlaying = true;
</span><span class="cx"> }
</span><span class="cx">
</span><span class="cx"> void AudioDestinationGStreamer::stop()
</span><span class="cx"> {
</span><del>- ASSERT(m_wavParserAvailable && m_audioSinkAvailable);
- if (!m_wavParserAvailable || !m_audioSinkAvailable)
</del><ins>+ ASSERT(m_audioSinkAvailable);
+ if (!m_audioSinkAvailable)
</ins><span class="cx"> return;
</span><span class="cx">
</span><span class="cx"> gst_element_set_state(m_pipeline, GST_STATE_PAUSED);
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformaudiogstreamerAudioDestinationGStreamerh"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.h (176942 => 176943)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.h        2014-12-08 09:28:32 UTC (rev 176942)
+++ trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.h        2014-12-08 09:34:25 UTC (rev 176943)
</span><span class="lines">@@ -41,7 +41,6 @@
</span><span class="cx"> float sampleRate() const { return m_sampleRate; }
</span><span class="cx"> AudioIOCallback& callback() const { return m_callback; }
</span><span class="cx">
</span><del>- void finishBuildingPipelineAfterWavParserPadReady(GstPad*);
</del><span class="cx"> gboolean handleMessage(GstMessage*);
</span><span class="cx">
</span><span class="cx"> private:
</span><span class="lines">@@ -50,7 +49,6 @@
</span><span class="cx">
</span><span class="cx"> float m_sampleRate;
</span><span class="cx"> bool m_isPlaying;
</span><del>- bool m_wavParserAvailable;
</del><span class="cx"> bool m_audioSinkAvailable;
</span><span class="cx"> GstElement* m_pipeline;
</span><span class="cx"> };
</span></span></pre></div>
<a id="trunkSourceWebCoreplatformaudiogstreamerWebKitWebAudioSourceGStreamercpp"></a>
<div class="modfile"><h4>Modified: trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp (176942 => 176943)</h4>
<pre class="diff"><span>
<span class="info">--- trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp        2014-12-08 09:28:32 UTC (rev 176942)
+++ trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp        2014-12-08 09:34:25 UTC (rev 176943)
</span><span class="lines">@@ -1,5 +1,6 @@
</span><span class="cx"> /*
</span><span class="cx"> * Copyright (C) 2011, 2012 Igalia S.L
</span><ins>+ * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
</ins><span class="cx"> *
</span><span class="cx"> * This library is free software; you can redistribute it and/or
</span><span class="cx"> * modify it under the terms of the GNU Lesser General Public
</span><span class="lines">@@ -53,7 +54,6 @@
</span><span class="cx"> guint framesToPull;
</span><span class="cx">
</span><span class="cx"> GRefPtr<GstElement> interleave;
</span><del>- GRefPtr<GstElement> wavEncoder;
</del><span class="cx">
</span><span class="cx"> GRefPtr<GstTask> task;
</span><span class="cx"> GRecMutex mutex;
</span><span class="lines">@@ -63,6 +63,7 @@
</span><span class="cx">
</span><span class="cx"> bool newStreamEventPending;
</span><span class="cx"> GstSegment segment;
</span><ins>+ guint64 numberOfSamples;
</ins><span class="cx"> };
</span><span class="cx">
</span><span class="cx"> enum {
</span><span class="lines">@@ -72,10 +73,15 @@
</span><span class="cx"> PROP_FRAMES
</span><span class="cx"> };
</span><span class="cx">
</span><ins>+typedef struct {
+ GstBuffer* buffer;
+ GstMapInfo info;
+} AudioSrcBuffer;
+
</ins><span class="cx"> static GstStaticPadTemplate srcTemplate = GST_STATIC_PAD_TEMPLATE("src",
</span><del>- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS("audio/x-wav"));
</del><ins>+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS(GST_AUDIO_CAPS_MAKE(GST_AUDIO_NE(F32))));
</ins><span class="cx">
</span><span class="cx"> GST_DEBUG_CATEGORY_STATIC(webkit_web_audio_src_debug);
</span><span class="cx"> #define GST_CAT_DEFAULT webkit_web_audio_src_debug
</span><span class="lines">@@ -91,7 +97,7 @@
</span><span class="cx"> {
</span><span class="cx"> return gst_caps_new_simple("audio/x-raw", "rate", G_TYPE_INT, static_cast<int>(sampleRate),
</span><span class="cx"> "channels", G_TYPE_INT, 1,
</span><del>- "format", G_TYPE_STRING, gst_audio_format_to_string(GST_AUDIO_FORMAT_F32),
</del><ins>+ "format", G_TYPE_STRING, GST_AUDIO_NE(F32),
</ins><span class="cx"> "layout", G_TYPE_STRING, "interleaved", nullptr);
</span><span class="cx"> }
</span><span class="cx">
</span><span class="lines">@@ -203,28 +209,20 @@
</span><span class="cx"> ASSERT(priv->sampleRate);
</span><span class="cx">
</span><span class="cx"> priv->interleave = gst_element_factory_make("interleave", 0);
</span><del>- priv->wavEncoder = gst_element_factory_make("wavenc", 0);
</del><span class="cx">
</span><span class="cx"> if (!priv->interleave) {
</span><span class="cx"> GST_ERROR_OBJECT(src, "Failed to create interleave");
</span><span class="cx"> return;
</span><span class="cx"> }
</span><span class="cx">
</span><del>- if (!priv->wavEncoder) {
- GST_ERROR_OBJECT(src, "Failed to create wavenc");
- return;
- }
</del><ins>+ gst_bin_add(GST_BIN(src), priv->interleave.get());
</ins><span class="cx">
</span><del>- gst_bin_add_many(GST_BIN(src), priv->interleave.get(), priv->wavEncoder.get(), NULL);
- gst_element_link_pads_full(priv->interleave.get(), "src", priv->wavEncoder.get(), "sink", GST_PAD_LINK_CHECK_NOTHING);
-
</del><span class="cx"> // For each channel of the bus create a new upstream branch for interleave, like:
</span><del>- // queue ! capsfilter ! audioconvert. which is plugged to a new interleave request sinkpad.
</del><ins>+ // queue ! capsfilter. which is plugged to a new interleave request sinkpad.
</ins><span class="cx"> for (unsigned channelIndex = 0; channelIndex < priv->bus->numberOfChannels(); channelIndex++) {
</span><span class="cx"> GUniquePtr<gchar> queueName(g_strdup_printf("webaudioQueue%u", channelIndex));
</span><span class="cx"> GstElement* queue = gst_element_factory_make("queue", queueName.get());
</span><span class="cx"> GstElement* capsfilter = gst_element_factory_make("capsfilter", 0);
</span><del>- GstElement* audioconvert = gst_element_factory_make("audioconvert", 0);
</del><span class="cx">
</span><span class="cx"> GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate));
</span><span class="cx">
</span><span class="lines">@@ -240,16 +238,15 @@
</span><span class="cx"> GstPad* pad = gst_element_get_static_pad(queue, "sink");
</span><span class="cx"> priv->pads = g_slist_prepend(priv->pads, pad);
</span><span class="cx">
</span><del>- gst_bin_add_many(GST_BIN(src), queue, capsfilter, audioconvert, NULL);
</del><ins>+ gst_bin_add_many(GST_BIN(src), queue, capsfilter, NULL);
</ins><span class="cx"> gst_element_link_pads_full(queue, "src", capsfilter, "sink", GST_PAD_LINK_CHECK_NOTHING);
</span><del>- gst_element_link_pads_full(capsfilter, "src", audioconvert, "sink", GST_PAD_LINK_CHECK_NOTHING);
- gst_element_link_pads_full(audioconvert, "src", priv->interleave.get(), 0, GST_PAD_LINK_CHECK_NOTHING);
</del><ins>+ gst_element_link_pads_full(capsfilter, "src", priv->interleave.get(), "sink_%u", GST_PAD_LINK_CHECK_NOTHING);
</ins><span class="cx">
</span><span class="cx"> }
</span><span class="cx"> priv->pads = g_slist_reverse(priv->pads);
</span><span class="cx">
</span><span class="cx"> // wavenc's src pad is the only visible pad of our element.
</span><del>- GRefPtr<GstPad> targetPad = adoptGRef(gst_element_get_static_pad(priv->wavEncoder.get(), "src"));
</del><ins>+ GRefPtr<GstPad> targetPad = adoptGRef(gst_element_get_static_pad(priv->interleave.get(), "src"));
</ins><span class="cx"> gst_ghost_pad_set_target(GST_GHOST_PAD(priv->sourcePad), targetPad.get());
</span><span class="cx"> }
</span><span class="cx">
</span><span class="lines">@@ -320,20 +317,28 @@
</span><span class="cx">
</span><span class="cx"> ASSERT(priv->bus);
</span><span class="cx"> ASSERT(priv->provider);
</span><del>- if (!priv->provider || !priv->bus)
</del><ins>+ if (!priv->provider || !priv->bus) {
+ gst_task_pause(src->priv->task.get());
</ins><span class="cx"> return;
</span><ins>+ }
</ins><span class="cx">
</span><ins>+ GstClockTime timestamp = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate);
+ priv->numberOfSamples += priv->framesToPull;
+ GstClockTime duration = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate) - timestamp;
+
</ins><span class="cx"> GSList* channelBufferList = 0;
</span><span class="cx"> register int i;
</span><span class="cx"> unsigned bufferSize = priv->framesToPull * sizeof(float);
</span><span class="cx"> for (i = g_slist_length(priv->pads) - 1; i >= 0; i--) {
</span><ins>+ AudioSrcBuffer* buffer = g_new(AudioSrcBuffer, 1);
</ins><span class="cx"> GstBuffer* channelBuffer = gst_buffer_new_and_alloc(bufferSize);
</span><span class="cx"> ASSERT(channelBuffer);
</span><del>- channelBufferList = g_slist_prepend(channelBufferList, channelBuffer);
- GstMapInfo info;
- gst_buffer_map(channelBuffer, &info, GST_MAP_READ);
- priv->bus->setChannelMemory(i, reinterpret_cast<float*>(info.data), priv->framesToPull);
- gst_buffer_unmap(channelBuffer, &info);
</del><ins>+ buffer->buffer = channelBuffer;
+ GST_BUFFER_TIMESTAMP(channelBuffer) = timestamp;
+ GST_BUFFER_DURATION(channelBuffer) = duration;
+ gst_buffer_map(channelBuffer, &buffer->info, (GstMapFlags) GST_MAP_READWRITE);
+ priv->bus->setChannelMemory(i, reinterpret_cast<float*>(buffer->info.data), priv->framesToPull);
+ channelBufferList = g_slist_prepend(channelBufferList, buffer);
</ins><span class="cx"> }
</span><span class="cx">
</span><span class="cx"> // FIXME: Add support for local/live audio input.
</span><span class="lines">@@ -350,8 +355,13 @@
</span><span class="cx">
</span><span class="cx"> for (i = 0; padsIt && buffersIt; padsIt = g_slist_next(padsIt), buffersIt = g_slist_next(buffersIt), ++i) {
</span><span class="cx"> GstPad* pad = static_cast<GstPad*>(padsIt->data);
</span><del>- GstBuffer* channelBuffer = static_cast<GstBuffer*>(buffersIt->data);
</del><ins>+ AudioSrcBuffer* buffer = static_cast<AudioSrcBuffer*>(buffersIt->data);
+ GstBuffer* channelBuffer = buffer->buffer;
</ins><span class="cx">
</span><ins>+ // Unmap before passing on the buffer.
+ gst_buffer_unmap(channelBuffer, &buffer->info);
+ g_free(buffer);
+
</ins><span class="cx"> // Send stream-start, segment and caps events downstream, along with the first buffer.
</span><span class="cx"> if (priv->newStreamEventPending) {
</span><span class="cx"> GRefPtr<GstElement> queue = adoptGRef(gst_pad_get_parent_element(pad));
</span><span class="lines">@@ -375,8 +385,10 @@
</span><span class="cx"> }
</span><span class="cx">
</span><span class="cx"> GstFlowReturn ret = gst_pad_chain(pad, channelBuffer);
</span><del>- if (ret != GST_FLOW_OK)
</del><ins>+ if (ret != GST_FLOW_OK) {
</ins><span class="cx"> GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to push buffer on %s:%s flow: %s", GST_DEBUG_PAD_NAME(pad), gst_flow_get_name(ret)));
</span><ins>+ gst_task_pause(src->priv->task.get());
+ }
</ins><span class="cx"> }
</span><span class="cx">
</span><span class="cx"> priv->newStreamEventPending = false;
</span><span class="lines">@@ -396,11 +408,7 @@
</span><span class="cx"> GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (0), ("no interleave"));
</span><span class="cx"> return GST_STATE_CHANGE_FAILURE;
</span><span class="cx"> }
</span><del>- if (!src->priv->wavEncoder) {
- gst_element_post_message(element, gst_missing_element_message_new(element, "wavenc"));
- GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (0), ("no wavenc"));
- return GST_STATE_CHANGE_FAILURE;
- }
</del><ins>+ src->priv->numberOfSamples = 0;
</ins><span class="cx"> break;
</span><span class="cx"> default:
</span><span class="cx"> break;
</span></span></pre>
</div>
</div>
</body>
</html>