Changeset 176943 in webkit
- Timestamp:
- Dec 8, 2014 1:34:25 AM (9 years ago)
- Location:
- trunk/Source/WebCore
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/WebCore/ChangeLog
r176942 r176943 1 2014-12-08 Sebastian Dröge <sebastian@centricular.com> 2 3 [GStreamer] Major cleanup of AudioDestination implementation 4 https://bugs.webkit.org/show_bug.cgi?id=139370 5 6 Reviewed by Philippe Normand. 7 8 * platform/audio/gstreamer/AudioDestinationGStreamer.cpp: 9 (WebCore::AudioDestinationGStreamer::AudioDestinationGStreamer): 10 Add an audioresample element before the audio sink. The audio sink 11 might not be able to handle our sampling rate. 12 13 (WebCore::AudioDestinationGStreamer::AudioDestinationGStreamer): 14 (WebCore::AudioDestinationGStreamer::~AudioDestinationGStreamer): 15 (WebCore::AudioDestinationGStreamer::stop): 16 (WebCore::AudioDestinationGStreamer::finishBuildingPipelineAfterWavParserPadReady): Deleted. 17 Don't use a wavparse element but directly link the raw audio from 18 the source to the audio sink. 19 20 (WebCore::AudioDestinationGStreamer::start): 21 Catch errors when going to PLAYING early, we might not get an error 22 message. 23 24 * platform/audio/gstreamer/AudioDestinationGStreamer.h: 25 * platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp: 26 (getGStreamerMonoAudioCaps): 27 (webKitWebAudioSrcConstructed): 28 (webKitWebAudioSrcChangeState): 29 Don't use a WAV encoder but directly output raw audio. Also don't 30 include a unneeded audioconvert element before the interleave. 31 32 (webKitWebAudioSrcLoop): 33 Add timestamps and durations to the output buffers, map them in 34 READWRITE mode and actually keep them mapped until we're sure 35 nothing is actually writing into them. 36 37 (webKitWebAudioSrcLoop): 38 Pause the task on errors instead of continuously calling it again 39 immediately. 40 1 41 2014-12-08 Sebastian Dröge <sebastian@centricular.com> 2 42 -
trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp
r163797 r176943 1 1 /* 2 2 * Copyright (C) 2011, 2012 Igalia S.L 3 * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com> 3 4 * 4 5 * This library is free software; you can redistribute it and/or … … 88 89 "frames", framesToPull, NULL)); 89 90 90 GstElement* wavParser = gst_element_factory_make("wavparse", 0); 91 92 m_wavParserAvailable = wavParser; 93 ASSERT_WITH_MESSAGE(m_wavParserAvailable, "Failed to create GStreamer wavparse element"); 94 if (!m_wavParserAvailable) 95 return; 96 97 gst_bin_add_many(GST_BIN(m_pipeline), webkitAudioSrc, wavParser, NULL); 98 gst_element_link_pads_full(webkitAudioSrc, "src", wavParser, "sink", GST_PAD_LINK_CHECK_NOTHING); 99 100 GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(wavParser, "src")); 101 finishBuildingPipelineAfterWavParserPadReady(srcPad.get()); 102 } 103 104 AudioDestinationGStreamer::~AudioDestinationGStreamer() 105 { 106 GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline))); 107 ASSERT(bus); 108 g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(messageCallback), this); 109 gst_bus_remove_signal_watch(bus.get()); 110 111 gst_element_set_state(m_pipeline, GST_STATE_NULL); 112 gst_object_unref(m_pipeline); 113 } 114 115 void AudioDestinationGStreamer::finishBuildingPipelineAfterWavParserPadReady(GstPad* pad) 116 { 117 ASSERT(m_wavParserAvailable); 91 GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(webkitAudioSrc, "src")); 118 92 119 93 GRefPtr<GstElement> audioSink = gst_element_factory_make("autoaudiosink", 0); 120 94 m_audioSinkAvailable = audioSink; 121 122 95 if (!audioSink) { 123 96 LOG_ERROR("Failed to create GStreamer autoaudiosink element"); … … 137 110 138 111 GstElement* audioConvert = gst_element_factory_make("audioconvert", 0); 139 gst_bin_add_many(GST_BIN(m_pipeline), audioConvert, audioSink.get(), NULL); 112 GstElement* audioResample = gst_element_factory_make("audioresample", 0); 113 gst_bin_add_many(GST_BIN(m_pipeline), webkitAudioSrc, audioConvert, audioResample, audioSink.get(), NULL); 140 114 141 115 // Link wavparse's src pad to audioconvert sink pad. 142 116 GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(audioConvert, "sink")); 143 gst_pad_link_full( pad, sinkPad.get(), GST_PAD_LINK_CHECK_NOTHING);117 gst_pad_link_full(srcPad.get(), sinkPad.get(), GST_PAD_LINK_CHECK_NOTHING); 144 118 145 119 // Link audioconvert to audiosink and roll states. 146 gst_element_link_pads_full(audioConvert, "src", audioSink.get(), "sink", GST_PAD_LINK_CHECK_NOTHING); 147 gst_element_sync_state_with_parent(audioConvert); 148 gst_element_sync_state_with_parent(audioSink.leakRef()); 120 gst_element_link_pads_full(audioConvert, "src", audioResample, "sink", GST_PAD_LINK_CHECK_NOTHING); 121 gst_element_link_pads_full(audioResample, "src", audioSink.get(), "sink", GST_PAD_LINK_CHECK_NOTHING); 122 } 123 124 AudioDestinationGStreamer::~AudioDestinationGStreamer() 125 { 126 GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline))); 127 ASSERT(bus); 128 g_signal_handlers_disconnect_by_func(bus.get(), reinterpret_cast<gpointer>(messageCallback), this); 129 gst_bus_remove_signal_watch(bus.get()); 130 131 gst_element_set_state(m_pipeline, GST_STATE_NULL); 132 gst_object_unref(m_pipeline); 149 133 } 150 134 … … 173 157 void AudioDestinationGStreamer::start() 174 158 { 175 ASSERT(m_ wavParserAvailable);176 if (!m_ wavParserAvailable)159 ASSERT(m_audioSinkAvailable); 160 if (!m_audioSinkAvailable) 177 161 return; 178 162 179 gst_element_set_state(m_pipeline, GST_STATE_PLAYING); 163 if (gst_element_set_state(m_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) { 164 g_warning("Error: Failed to set pipeline to playing"); 165 m_isPlaying = false; 166 return; 167 } 168 180 169 m_isPlaying = true; 181 170 } … … 183 172 void AudioDestinationGStreamer::stop() 184 173 { 185 ASSERT(m_ wavParserAvailable && m_audioSinkAvailable);186 if (!m_ wavParserAvailable || !m_audioSinkAvailable)174 ASSERT(m_audioSinkAvailable); 175 if (!m_audioSinkAvailable) 187 176 return; 188 177 -
trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.h
r149817 r176943 42 42 AudioIOCallback& callback() const { return m_callback; } 43 43 44 void finishBuildingPipelineAfterWavParserPadReady(GstPad*);45 44 gboolean handleMessage(GstMessage*); 46 45 … … 51 50 float m_sampleRate; 52 51 bool m_isPlaying; 53 bool m_wavParserAvailable;54 52 bool m_audioSinkAvailable; 55 53 GstElement* m_pipeline; -
trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp
r172338 r176943 1 1 /* 2 2 * Copyright (C) 2011, 2012 Igalia S.L 3 * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com> 3 4 * 4 5 * This library is free software; you can redistribute it and/or … … 54 55 55 56 GRefPtr<GstElement> interleave; 56 GRefPtr<GstElement> wavEncoder;57 57 58 58 GRefPtr<GstTask> task; … … 64 64 bool newStreamEventPending; 65 65 GstSegment segment; 66 guint64 numberOfSamples; 66 67 }; 67 68 … … 73 74 }; 74 75 76 typedef struct { 77 GstBuffer* buffer; 78 GstMapInfo info; 79 } AudioSrcBuffer; 80 75 81 static GstStaticPadTemplate srcTemplate = GST_STATIC_PAD_TEMPLATE("src", 76 77 78 GST_STATIC_CAPS("audio/x-wav"));82 GST_PAD_SRC, 83 GST_PAD_ALWAYS, 84 GST_STATIC_CAPS(GST_AUDIO_CAPS_MAKE(GST_AUDIO_NE(F32)))); 79 85 80 86 GST_DEBUG_CATEGORY_STATIC(webkit_web_audio_src_debug); … … 92 98 return gst_caps_new_simple("audio/x-raw", "rate", G_TYPE_INT, static_cast<int>(sampleRate), 93 99 "channels", G_TYPE_INT, 1, 94 "format", G_TYPE_STRING, gst_audio_format_to_string(GST_AUDIO_FORMAT_F32),100 "format", G_TYPE_STRING, GST_AUDIO_NE(F32), 95 101 "layout", G_TYPE_STRING, "interleaved", nullptr); 96 102 } … … 204 210 205 211 priv->interleave = gst_element_factory_make("interleave", 0); 206 priv->wavEncoder = gst_element_factory_make("wavenc", 0);207 212 208 213 if (!priv->interleave) { … … 211 216 } 212 217 213 if (!priv->wavEncoder) { 214 GST_ERROR_OBJECT(src, "Failed to create wavenc"); 215 return; 216 } 217 218 gst_bin_add_many(GST_BIN(src), priv->interleave.get(), priv->wavEncoder.get(), NULL); 219 gst_element_link_pads_full(priv->interleave.get(), "src", priv->wavEncoder.get(), "sink", GST_PAD_LINK_CHECK_NOTHING); 218 gst_bin_add(GST_BIN(src), priv->interleave.get()); 220 219 221 220 // For each channel of the bus create a new upstream branch for interleave, like: 222 // queue ! capsfilter ! audioconvert. which is plugged to a new interleave request sinkpad.221 // queue ! capsfilter. which is plugged to a new interleave request sinkpad. 223 222 for (unsigned channelIndex = 0; channelIndex < priv->bus->numberOfChannels(); channelIndex++) { 224 223 GUniquePtr<gchar> queueName(g_strdup_printf("webaudioQueue%u", channelIndex)); 225 224 GstElement* queue = gst_element_factory_make("queue", queueName.get()); 226 225 GstElement* capsfilter = gst_element_factory_make("capsfilter", 0); 227 GstElement* audioconvert = gst_element_factory_make("audioconvert", 0);228 226 229 227 GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate)); … … 241 239 priv->pads = g_slist_prepend(priv->pads, pad); 242 240 243 gst_bin_add_many(GST_BIN(src), queue, capsfilter, audioconvert,NULL);241 gst_bin_add_many(GST_BIN(src), queue, capsfilter, NULL); 244 242 gst_element_link_pads_full(queue, "src", capsfilter, "sink", GST_PAD_LINK_CHECK_NOTHING); 245 gst_element_link_pads_full(capsfilter, "src", audioconvert, "sink", GST_PAD_LINK_CHECK_NOTHING); 246 gst_element_link_pads_full(audioconvert, "src", priv->interleave.get(), 0, GST_PAD_LINK_CHECK_NOTHING); 243 gst_element_link_pads_full(capsfilter, "src", priv->interleave.get(), "sink_%u", GST_PAD_LINK_CHECK_NOTHING); 247 244 248 245 } … … 250 247 251 248 // wavenc's src pad is the only visible pad of our element. 252 GRefPtr<GstPad> targetPad = adoptGRef(gst_element_get_static_pad(priv-> wavEncoder.get(), "src"));249 GRefPtr<GstPad> targetPad = adoptGRef(gst_element_get_static_pad(priv->interleave.get(), "src")); 253 250 gst_ghost_pad_set_target(GST_GHOST_PAD(priv->sourcePad), targetPad.get()); 254 251 } … … 321 318 ASSERT(priv->bus); 322 319 ASSERT(priv->provider); 323 if (!priv->provider || !priv->bus) 320 if (!priv->provider || !priv->bus) { 321 gst_task_pause(src->priv->task.get()); 324 322 return; 323 } 324 325 GstClockTime timestamp = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate); 326 priv->numberOfSamples += priv->framesToPull; 327 GstClockTime duration = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate) - timestamp; 325 328 326 329 GSList* channelBufferList = 0; … … 328 331 unsigned bufferSize = priv->framesToPull * sizeof(float); 329 332 for (i = g_slist_length(priv->pads) - 1; i >= 0; i--) { 333 AudioSrcBuffer* buffer = g_new(AudioSrcBuffer, 1); 330 334 GstBuffer* channelBuffer = gst_buffer_new_and_alloc(bufferSize); 331 335 ASSERT(channelBuffer); 332 channelBufferList = g_slist_prepend(channelBufferList, channelBuffer); 333 GstMapInfo info; 334 gst_buffer_map(channelBuffer, &info, GST_MAP_READ); 335 priv->bus->setChannelMemory(i, reinterpret_cast<float*>(info.data), priv->framesToPull); 336 gst_buffer_unmap(channelBuffer, &info); 336 buffer->buffer = channelBuffer; 337 GST_BUFFER_TIMESTAMP(channelBuffer) = timestamp; 338 GST_BUFFER_DURATION(channelBuffer) = duration; 339 gst_buffer_map(channelBuffer, &buffer->info, (GstMapFlags) GST_MAP_READWRITE); 340 priv->bus->setChannelMemory(i, reinterpret_cast<float*>(buffer->info.data), priv->framesToPull); 341 channelBufferList = g_slist_prepend(channelBufferList, buffer); 337 342 } 338 343 … … 351 356 for (i = 0; padsIt && buffersIt; padsIt = g_slist_next(padsIt), buffersIt = g_slist_next(buffersIt), ++i) { 352 357 GstPad* pad = static_cast<GstPad*>(padsIt->data); 353 GstBuffer* channelBuffer = static_cast<GstBuffer*>(buffersIt->data); 358 AudioSrcBuffer* buffer = static_cast<AudioSrcBuffer*>(buffersIt->data); 359 GstBuffer* channelBuffer = buffer->buffer; 360 361 // Unmap before passing on the buffer. 362 gst_buffer_unmap(channelBuffer, &buffer->info); 363 g_free(buffer); 354 364 355 365 // Send stream-start, segment and caps events downstream, along with the first buffer. … … 376 386 377 387 GstFlowReturn ret = gst_pad_chain(pad, channelBuffer); 378 if (ret != GST_FLOW_OK) 388 if (ret != GST_FLOW_OK) { 379 389 GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to push buffer on %s:%s flow: %s", GST_DEBUG_PAD_NAME(pad), gst_flow_get_name(ret))); 390 gst_task_pause(src->priv->task.get()); 391 } 380 392 } 381 393 … … 397 409 return GST_STATE_CHANGE_FAILURE; 398 410 } 399 if (!src->priv->wavEncoder) { 400 gst_element_post_message(element, gst_missing_element_message_new(element, "wavenc")); 401 GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (0), ("no wavenc")); 402 return GST_STATE_CHANGE_FAILURE; 403 } 411 src->priv->numberOfSamples = 0; 404 412 break; 405 413 default:
Note: See TracChangeset
for help on using the changeset viewer.