Changeset 177085 in webkit
- Timestamp:
- Dec 10, 2014, 11:43:00 AM (10 years ago)
- Location:
- trunk/Source/WebCore
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/WebCore/ChangeLog
r177081 r177085 1 2014-12-10 Sebastian Dröge <sebastian@centricular.com> 2 3 [GStreamer] Use appsrcs instead of unconnected queues 4 https://bugs.webkit.org/show_bug.cgi?id=139490 5 6 Reviewed by Philippe Normand. 7 8 * platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp: 9 (webkit_web_audio_src_init): 10 (webKitWebAudioSrcConstructed): 11 (webKitWebAudioSrcFinalize): 12 (webKitWebAudioSrcSetProperty): 13 (webKitWebAudioSrcLoop): 14 (webKitWebAudioSrcChangeState): 15 Previously we directly chained buffers into unconnected queues, 16 which confused some code inside GStreamer and caused some harmless 17 warnings. Now we use appsrcs instead, which also allows us to remove 18 quite some code. 19 1 20 2014-12-10 Enrica Casucci <enrica@apple.com> 2 21 -
trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp
r177079 r177085 28 28 #include "GRefPtrGStreamer.h" 29 29 #include "GStreamerUtilities.h" 30 #include <gst/app/app.h> 30 31 #include <gst/audio/audio.h> 31 32 #include <gst/pbutils/pbutils.h> … … 53 54 AudioIOCallback* provider; 54 55 guint framesToPull; 56 guint bufferSize; 55 57 56 58 GRefPtr<GstElement> interleave; … … 59 61 GRecMutex mutex; 60 62 61 GSList* pads; // List of queue sink pads. One queuefor each planar audio channel.63 GSList* sources; // List of appsrc. One appsrc for each planar audio channel. 62 64 GstPad* sourcePad; // src pad of the element, interleaved wav data is pushed to it. 63 65 64 bool newStreamEventPending;65 GstSegment segment;66 66 guint64 numberOfSamples; 67 67 … … 193 193 priv->bus = 0; 194 194 195 priv->newStreamEventPending = true;196 gst_segment_init(&priv->segment, GST_FORMAT_TIME);197 198 195 g_rec_mutex_init(&priv->mutex); 199 196 priv->task = gst_task_new(reinterpret_cast<GstTaskFunction>(webKitWebAudioSrcLoop), src, 0); … … 211 208 ASSERT(priv->sampleRate); 212 209 213 priv->interleave = gst_element_factory_make("interleave", 0);210 priv->interleave = gst_element_factory_make("interleave", nullptr); 214 211 215 212 if (!priv->interleave) { … … 221 218 222 219 // For each channel of the bus create a new upstream branch for interleave, like: 223 // queue ! capsfilter. which is plugged to a new interleave request sinkpad.220 // appsrc ! . which is plugged to a new interleave request sinkpad. 224 221 for (unsigned channelIndex = 0; channelIndex < priv->bus->numberOfChannels(); channelIndex++) { 225 GUniquePtr<gchar> queueName(g_strdup_printf("webaudioQueue%u", channelIndex)); 226 GstElement* queue = gst_element_factory_make("queue", queueName.get()); 227 GstElement* capsfilter = gst_element_factory_make("capsfilter", 0); 228 222 GUniquePtr<gchar> appsrcName(g_strdup_printf("webaudioSrc%u", channelIndex)); 223 GstElement* appsrc = gst_element_factory_make("appsrc", appsrcName.get()); 229 224 GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate)); 230 225 … … 233 228 GST_AUDIO_INFO_POSITION(&info, 0) = webKitWebAudioGStreamerChannelPosition(channelIndex); 234 229 GRefPtr<GstCaps> caps = adoptGRef(gst_audio_info_to_caps(&info)); 235 g_object_set(capsfilter, "caps", caps.get(), NULL); 236 237 // Configure the queue for minimal latency. 238 g_object_set(queue, "max-size-buffers", static_cast<guint>(1), NULL); 239 240 GstPad* pad = gst_element_get_static_pad(queue, "sink"); 241 priv->pads = g_slist_prepend(priv->pads, pad); 242 243 gst_bin_add_many(GST_BIN(src), queue, capsfilter, NULL); 244 gst_element_link_pads_full(queue, "src", capsfilter, "sink", GST_PAD_LINK_CHECK_NOTHING); 245 gst_element_link_pads_full(capsfilter, "src", priv->interleave.get(), "sink_%u", GST_PAD_LINK_CHECK_NOTHING); 246 247 } 248 priv->pads = g_slist_reverse(priv->pads); 249 250 // wavenc's src pad is the only visible pad of our element. 230 231 // Configure the appsrc for minimal latency. 232 g_object_set(appsrc, "max-bytes", 2 * priv->bufferSize, "block", TRUE, 233 "format", GST_FORMAT_TIME, "caps", caps.get(), nullptr); 234 235 priv->sources = g_slist_prepend(priv->sources, gst_object_ref(appsrc)); 236 237 gst_bin_add(GST_BIN(src), appsrc); 238 gst_element_link_pads_full(appsrc, "src", priv->interleave.get(), "sink_%u", GST_PAD_LINK_CHECK_NOTHING); 239 } 240 priv->sources = g_slist_reverse(priv->sources); 241 242 // interleave's src pad is the only visible pad of our element. 251 243 GRefPtr<GstPad> targetPad = adoptGRef(gst_element_get_static_pad(priv->interleave.get(), "src")); 252 244 gst_ghost_pad_set_target(GST_GHOST_PAD(priv->sourcePad), targetPad.get()); … … 260 252 g_rec_mutex_clear(&priv->mutex); 261 253 262 g_slist_free_full(priv-> pads, reinterpret_cast<GDestroyNotify>(gst_object_unref));254 g_slist_free_full(priv->sources, reinterpret_cast<GDestroyNotify>(gst_object_unref)); 263 255 264 256 priv->~WebKitWebAudioSourcePrivate(); … … 283 275 case PROP_FRAMES: 284 276 priv->framesToPull = g_value_get_uint(value); 277 priv->bufferSize = sizeof(float) * priv->framesToPull; 285 278 break; 286 279 default: … … 332 325 GSList* channelBufferList = 0; 333 326 register int i; 334 for (i = g_slist_length(priv-> pads) - 1; i >= 0; i--) {327 for (i = g_slist_length(priv->sources) - 1; i >= 0; i--) { 335 328 AudioSrcBuffer* buffer = g_new(AudioSrcBuffer, 1); 336 329 GstBuffer* channelBuffer; … … 364 357 priv->provider->render(0, priv->bus, priv->framesToPull); 365 358 366 GSList* padsIt = priv->pads;359 GSList* sourcesIt = priv->sources; 367 360 GSList* buffersIt = channelBufferList; 368 361 369 #if GST_CHECK_VERSION(1, 2, 0) 370 guint groupId = 0; 371 if (priv->newStreamEventPending) 372 groupId = gst_util_group_id_next(); 373 #endif 374 375 for (i = 0; padsIt && buffersIt; padsIt = g_slist_next(padsIt), buffersIt = g_slist_next(buffersIt), ++i) { 376 GstPad* pad = static_cast<GstPad*>(padsIt->data); 362 for (i = 0; sourcesIt && buffersIt; sourcesIt = g_slist_next(sourcesIt), buffersIt = g_slist_next(buffersIt), ++i) { 363 GstElement* appsrc = static_cast<GstElement*>(sourcesIt->data); 377 364 AudioSrcBuffer* buffer = static_cast<AudioSrcBuffer*>(buffersIt->data); 378 365 GstBuffer* channelBuffer = buffer->buffer; … … 382 369 g_free(buffer); 383 370 384 // Send stream-start, segment and caps events downstream, along with the first buffer. 385 if (priv->newStreamEventPending) { 386 GRefPtr<GstElement> queue = adoptGRef(gst_pad_get_parent_element(pad)); 387 GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink")); 388 GUniquePtr<gchar> queueName(gst_element_get_name(queue.get())); 389 GUniquePtr<gchar> streamId(g_strdup_printf("webaudio/%s", queueName.get())); 390 GstEvent* streamStartEvent = gst_event_new_stream_start(streamId.get()); 391 #if GST_CHECK_VERSION(1, 2, 0) 392 gst_event_set_group_id(streamStartEvent, groupId); 393 #endif 394 gst_pad_send_event(sinkPad.get(), streamStartEvent); 395 396 GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate)); 397 GstAudioInfo info; 398 gst_audio_info_from_caps(&info, monoCaps.get()); 399 GST_AUDIO_INFO_POSITION(&info, 0) = webKitWebAudioGStreamerChannelPosition(i); 400 GRefPtr<GstCaps> capsWithChannelPosition = adoptGRef(gst_audio_info_to_caps(&info)); 401 gst_pad_send_event(sinkPad.get(), gst_event_new_caps(capsWithChannelPosition.get())); 402 403 gst_pad_send_event(sinkPad.get(), gst_event_new_segment(&priv->segment)); 404 } 405 406 GstFlowReturn ret = gst_pad_chain(pad, channelBuffer); 371 GstFlowReturn ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc), channelBuffer); 407 372 if (ret != GST_FLOW_OK) { 408 GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to push buffer on %s :%s flow: %s", GST_DEBUG_PAD_NAME(pad), gst_flow_get_name(ret)));373 GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to push buffer on %s flow: %s", GST_OBJECT_NAME(appsrc), gst_flow_get_name(ret))); 409 374 gst_task_pause(src->priv->task.get()); 410 375 } 411 376 } 412 413 priv->newStreamEventPending = false;414 377 415 378 g_slist_free(channelBufferList); … … 445 408 src->priv->pool = gst_buffer_pool_new(); 446 409 GstStructure* config = gst_buffer_pool_get_config(src->priv->pool); 447 gst_buffer_pool_config_set_params(config, nullptr, src->priv-> framesToPull * sizeof(float), 0, 0);410 gst_buffer_pool_config_set_params(config, nullptr, src->priv->bufferSize, 0, 0); 448 411 gst_buffer_pool_set_config(src->priv->pool, config); 449 412 if (!gst_buffer_pool_set_active(src->priv->pool, TRUE)) … … 454 417 } 455 418 case GST_STATE_CHANGE_PAUSED_TO_READY: 456 src->priv->newStreamEventPending = true;457 419 GST_DEBUG_OBJECT(src, "PAUSED->READY"); 458 420 if (!gst_task_join(src->priv->task.get()))
Note:
See TracChangeset
for help on using the changeset viewer.