Changeset 177085 in webkit


Ignore:
Timestamp:
Dec 10, 2014 11:43:00 AM (9 years ago)
Author:
commit-queue@webkit.org
Message:

[GStreamer] Use appsrcs instead of unconnected queues
https://bugs.webkit.org/show_bug.cgi?id=139490

Patch by Sebastian Dröge <sebastian@centricular.com> on 2014-12-10
Reviewed by Philippe Normand.

  • platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp:

(webkit_web_audio_src_init):
(webKitWebAudioSrcConstructed):
(webKitWebAudioSrcFinalize):
(webKitWebAudioSrcSetProperty):
(webKitWebAudioSrcLoop):
(webKitWebAudioSrcChangeState):
Previously we directly chained buffers into unconnected queues,
which confused some code inside GStreamer and caused some harmless
warnings. Now we use appsrcs instead, which also allows us to remove
quite some code.

Location:
trunk/Source/WebCore
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/WebCore/ChangeLog

    r177081 r177085  
     12014-12-10  Sebastian Dröge  <sebastian@centricular.com>
     2
     3        [GStreamer] Use appsrcs instead of unconnected queues
     4        https://bugs.webkit.org/show_bug.cgi?id=139490
     5
     6        Reviewed by Philippe Normand.
     7
     8        * platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp:
     9        (webkit_web_audio_src_init):
     10        (webKitWebAudioSrcConstructed):
     11        (webKitWebAudioSrcFinalize):
     12        (webKitWebAudioSrcSetProperty):
     13        (webKitWebAudioSrcLoop):
     14        (webKitWebAudioSrcChangeState):
     15        Previously we directly chained buffers into unconnected queues,
     16        which confused some code inside GStreamer and caused some harmless
     17        warnings. Now we use appsrcs instead, which also allows us to remove
     18        quite some code.
     19
    1202014-12-10  Enrica Casucci  <enrica@apple.com>
    221
  • trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp

    r177079 r177085  
    2828#include "GRefPtrGStreamer.h"
    2929#include "GStreamerUtilities.h"
     30#include <gst/app/app.h>
    3031#include <gst/audio/audio.h>
    3132#include <gst/pbutils/pbutils.h>
     
    5354    AudioIOCallback* provider;
    5455    guint framesToPull;
     56    guint bufferSize;
    5557
    5658    GRefPtr<GstElement> interleave;
     
    5961    GRecMutex mutex;
    6062
    61     GSList* pads; // List of queue sink pads. One queue for each planar audio channel.
     63    GSList* sources; // List of appsrc. One appsrc for each planar audio channel.
    6264    GstPad* sourcePad; // src pad of the element, interleaved wav data is pushed to it.
    6365
    64     bool newStreamEventPending;
    65     GstSegment segment;
    6666    guint64 numberOfSamples;
    6767
     
    193193    priv->bus = 0;
    194194
    195     priv->newStreamEventPending = true;
    196     gst_segment_init(&priv->segment, GST_FORMAT_TIME);
    197 
    198195    g_rec_mutex_init(&priv->mutex);
    199196    priv->task = gst_task_new(reinterpret_cast<GstTaskFunction>(webKitWebAudioSrcLoop), src, 0);
     
    211208    ASSERT(priv->sampleRate);
    212209
    213     priv->interleave = gst_element_factory_make("interleave", 0);
     210    priv->interleave = gst_element_factory_make("interleave", nullptr);
    214211
    215212    if (!priv->interleave) {
     
    221218
    222219    // For each channel of the bus create a new upstream branch for interleave, like:
    223     // queue ! capsfilter. which is plugged to a new interleave request sinkpad.
     220    // appsrc ! . which is plugged to a new interleave request sinkpad.
    224221    for (unsigned channelIndex = 0; channelIndex < priv->bus->numberOfChannels(); channelIndex++) {
    225         GUniquePtr<gchar> queueName(g_strdup_printf("webaudioQueue%u", channelIndex));
    226         GstElement* queue = gst_element_factory_make("queue", queueName.get());
    227         GstElement* capsfilter = gst_element_factory_make("capsfilter", 0);
    228 
     222        GUniquePtr<gchar> appsrcName(g_strdup_printf("webaudioSrc%u", channelIndex));
     223        GstElement* appsrc = gst_element_factory_make("appsrc", appsrcName.get());
    229224        GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate));
    230225
     
    233228        GST_AUDIO_INFO_POSITION(&info, 0) = webKitWebAudioGStreamerChannelPosition(channelIndex);
    234229        GRefPtr<GstCaps> caps = adoptGRef(gst_audio_info_to_caps(&info));
    235         g_object_set(capsfilter, "caps", caps.get(), NULL);
    236 
    237         // Configure the queue for minimal latency.
    238         g_object_set(queue, "max-size-buffers", static_cast<guint>(1), NULL);
    239 
    240         GstPad* pad = gst_element_get_static_pad(queue, "sink");
    241         priv->pads = g_slist_prepend(priv->pads, pad);
    242 
    243         gst_bin_add_many(GST_BIN(src), queue, capsfilter, NULL);
    244         gst_element_link_pads_full(queue, "src", capsfilter, "sink", GST_PAD_LINK_CHECK_NOTHING);
    245         gst_element_link_pads_full(capsfilter, "src", priv->interleave.get(), "sink_%u", GST_PAD_LINK_CHECK_NOTHING);
    246 
    247     }
    248     priv->pads = g_slist_reverse(priv->pads);
    249 
    250     // wavenc's src pad is the only visible pad of our element.
     230
     231        // Configure the appsrc for minimal latency.
     232        g_object_set(appsrc, "max-bytes", 2 * priv->bufferSize, "block", TRUE,
     233            "format", GST_FORMAT_TIME, "caps", caps.get(), nullptr);
     234
     235        priv->sources = g_slist_prepend(priv->sources, gst_object_ref(appsrc));
     236
     237        gst_bin_add(GST_BIN(src), appsrc);
     238        gst_element_link_pads_full(appsrc, "src", priv->interleave.get(), "sink_%u", GST_PAD_LINK_CHECK_NOTHING);
     239    }
     240    priv->sources = g_slist_reverse(priv->sources);
     241
     242    // interleave's src pad is the only visible pad of our element.
    251243    GRefPtr<GstPad> targetPad = adoptGRef(gst_element_get_static_pad(priv->interleave.get(), "src"));
    252244    gst_ghost_pad_set_target(GST_GHOST_PAD(priv->sourcePad), targetPad.get());
     
    260252    g_rec_mutex_clear(&priv->mutex);
    261253
    262     g_slist_free_full(priv->pads, reinterpret_cast<GDestroyNotify>(gst_object_unref));
     254    g_slist_free_full(priv->sources, reinterpret_cast<GDestroyNotify>(gst_object_unref));
    263255
    264256    priv->~WebKitWebAudioSourcePrivate();
     
    283275    case PROP_FRAMES:
    284276        priv->framesToPull = g_value_get_uint(value);
     277        priv->bufferSize = sizeof(float) * priv->framesToPull;
    285278        break;
    286279    default:
     
    332325    GSList* channelBufferList = 0;
    333326    register int i;
    334     for (i = g_slist_length(priv->pads) - 1; i >= 0; i--) {
     327    for (i = g_slist_length(priv->sources) - 1; i >= 0; i--) {
    335328        AudioSrcBuffer* buffer = g_new(AudioSrcBuffer, 1);
    336329        GstBuffer* channelBuffer;
     
    364357    priv->provider->render(0, priv->bus, priv->framesToPull);
    365358
    366     GSList* padsIt = priv->pads;
     359    GSList* sourcesIt = priv->sources;
    367360    GSList* buffersIt = channelBufferList;
    368361
    369 #if GST_CHECK_VERSION(1, 2, 0)
    370     guint groupId = 0;
    371     if (priv->newStreamEventPending)
    372         groupId = gst_util_group_id_next();
    373 #endif
    374 
    375     for (i = 0; padsIt && buffersIt; padsIt = g_slist_next(padsIt), buffersIt = g_slist_next(buffersIt), ++i) {
    376         GstPad* pad = static_cast<GstPad*>(padsIt->data);
     362    for (i = 0; sourcesIt && buffersIt; sourcesIt = g_slist_next(sourcesIt), buffersIt = g_slist_next(buffersIt), ++i) {
     363        GstElement* appsrc = static_cast<GstElement*>(sourcesIt->data);
    377364        AudioSrcBuffer* buffer = static_cast<AudioSrcBuffer*>(buffersIt->data);
    378365        GstBuffer* channelBuffer = buffer->buffer;
     
    382369        g_free(buffer);
    383370
    384         // Send stream-start, segment and caps events downstream, along with the first buffer.
    385         if (priv->newStreamEventPending) {
    386             GRefPtr<GstElement> queue = adoptGRef(gst_pad_get_parent_element(pad));
    387             GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink"));
    388             GUniquePtr<gchar> queueName(gst_element_get_name(queue.get()));
    389             GUniquePtr<gchar> streamId(g_strdup_printf("webaudio/%s", queueName.get()));
    390             GstEvent* streamStartEvent = gst_event_new_stream_start(streamId.get());
    391 #if GST_CHECK_VERSION(1, 2, 0)
    392             gst_event_set_group_id(streamStartEvent, groupId);
    393 #endif
    394             gst_pad_send_event(sinkPad.get(), streamStartEvent);
    395 
    396             GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate));
    397             GstAudioInfo info;
    398             gst_audio_info_from_caps(&info, monoCaps.get());
    399             GST_AUDIO_INFO_POSITION(&info, 0) = webKitWebAudioGStreamerChannelPosition(i);
    400             GRefPtr<GstCaps> capsWithChannelPosition = adoptGRef(gst_audio_info_to_caps(&info));
    401             gst_pad_send_event(sinkPad.get(), gst_event_new_caps(capsWithChannelPosition.get()));
    402 
    403             gst_pad_send_event(sinkPad.get(), gst_event_new_segment(&priv->segment));
    404         }
    405 
    406         GstFlowReturn ret = gst_pad_chain(pad, channelBuffer);
     371        GstFlowReturn ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc), channelBuffer);
    407372        if (ret != GST_FLOW_OK) {
    408             GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to push buffer on %s:%s flow: %s", GST_DEBUG_PAD_NAME(pad), gst_flow_get_name(ret)));
     373            GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to push buffer on %s flow: %s", GST_OBJECT_NAME(appsrc), gst_flow_get_name(ret)));
    409374            gst_task_pause(src->priv->task.get());
    410375        }
    411376    }
    412 
    413     priv->newStreamEventPending = false;
    414377
    415378    g_slist_free(channelBufferList);
     
    445408        src->priv->pool = gst_buffer_pool_new();
    446409        GstStructure* config = gst_buffer_pool_get_config(src->priv->pool);
    447         gst_buffer_pool_config_set_params(config, nullptr, src->priv->framesToPull * sizeof(float), 0, 0);
     410        gst_buffer_pool_config_set_params(config, nullptr, src->priv->bufferSize, 0, 0);
    448411        gst_buffer_pool_set_config(src->priv->pool, config);
    449412        if (!gst_buffer_pool_set_active(src->priv->pool, TRUE))
     
    454417    }
    455418    case GST_STATE_CHANGE_PAUSED_TO_READY:
    456         src->priv->newStreamEventPending = true;
    457419        GST_DEBUG_OBJECT(src, "PAUSED->READY");
    458420        if (!gst_task_join(src->priv->task.get()))
Note: See TracChangeset for help on using the changeset viewer.