Changeset 213445 in webkit
- Timestamp:
- Mar 6, 2017 2:32:57 AM (7 years ago)
- Location:
- trunk/Source/WebCore
- Files:
-
- 18 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/WebCore/ChangeLog
r213444 r213445 1 2017-03-06 Vanessa Chipirrás Navalón <vchipirras@igalia.com> 2 3 [GStreamer] Adopt nullptr 4 https://bugs.webkit.org/show_bug.cgi?id=123438 5 6 Reviewed by Xabier Rodriguez-Calvar. 7 8 To adapt the code to the C++11 standard, all NULL or 0 pointers have been changed to nullptr. 9 10 * platform/audio/gstreamer/AudioDestinationGStreamer.cpp: 11 (WebCore::AudioDestinationGStreamer::AudioDestinationGStreamer): 12 * platform/audio/gstreamer/AudioFileReaderGStreamer.cpp: 13 (WebCore::AudioFileReader::handleNewDeinterleavePad): 14 (WebCore::AudioFileReader::plugDeinterleave): 15 (WebCore::AudioFileReader::decodeAudioForBusCreation): 16 * platform/audio/gstreamer/AudioSourceProviderGStreamer.cpp: 17 (WebCore::AudioSourceProviderGStreamer::AudioSourceProviderGStreamer): 18 (WebCore::AudioSourceProviderGStreamer::configureAudioBin): 19 (WebCore::AudioSourceProviderGStreamer::setClient): 20 (WebCore::AudioSourceProviderGStreamer::handleNewDeinterleavePad): 21 * platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp: 22 (webkit_web_audio_src_init): 23 (webKitWebAudioSrcLoop): 24 (webKitWebAudioSrcChangeState): 25 * platform/graphics/gstreamer/AudioTrackPrivateGStreamer.cpp: 26 (WebCore::AudioTrackPrivateGStreamer::setEnabled): 27 * platform/graphics/gstreamer/GStreamerUtilities.cpp: 28 (WebCore::initializeGStreamer): 29 * platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp: 30 (WebCore::MediaPlayerPrivateGStreamer::setAudioStreamProperties): 31 (WebCore::MediaPlayerPrivateGStreamer::registerMediaEngine): 32 (WebCore::initializeGStreamerAndRegisterWebKitElements): 33 (WebCore::MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer): 34 (WebCore::MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer): 35 (WebCore::MediaPlayerPrivateGStreamer::newTextSample): 36 (WebCore::MediaPlayerPrivateGStreamer::handleMessage): 37 (WebCore::MediaPlayerPrivateGStreamer::processTableOfContents): 38 Removed the unused second argument on processTableOfContentsEntry function. 39 (WebCore::MediaPlayerPrivateGStreamer::processTableOfContentsEntry): 40 Removed the unused second argument on this function. 41 (WebCore::MediaPlayerPrivateGStreamer::fillTimerFired): 42 (WebCore::MediaPlayerPrivateGStreamer::loadNextLocation): 43 (WebCore::MediaPlayerPrivateGStreamer::createAudioSink): 44 (WebCore::MediaPlayerPrivateGStreamer::createGSTPlayBin): 45 * platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h: 46 Removed the unused second argument on processTableOfContentsEntry function. 47 * platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp: 48 (WebCore::MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase): 49 (WebCore::MediaPlayerPrivateGStreamerBase::setMuted): 50 (WebCore::MediaPlayerPrivateGStreamerBase::muted): 51 (WebCore::MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute): 52 (WebCore::MediaPlayerPrivateGStreamerBase::setStreamVolumeElement): 53 (WebCore::MediaPlayerPrivateGStreamerBase::decodedFrameCount): 54 (WebCore::MediaPlayerPrivateGStreamerBase::droppedFrameCount): 55 * platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.cpp: 56 (WebCore::MediaPlayerPrivateGStreamerOwr::registerMediaEngine): 57 * platform/graphics/gstreamer/TextCombinerGStreamer.cpp: 58 (webkit_text_combiner_init): 59 (webkitTextCombinerPadEvent): 60 (webkitTextCombinerRequestNewPad): 61 (webkitTextCombinerNew): 62 * platform/graphics/gstreamer/TextSinkGStreamer.cpp: 63 (webkitTextSinkNew): 64 * platform/graphics/gstreamer/TrackPrivateBaseGStreamer.cpp: 65 (WebCore::TrackPrivateBaseGStreamer::tagsChanged): 66 (WebCore::TrackPrivateBaseGStreamer::notifyTrackOfActiveChanged): 67 * platform/graphics/gstreamer/VideoSinkGStreamer.cpp: 68 (webkit_video_sink_init): 69 (webkitVideoSinkProposeAllocation): 70 (webkitVideoSinkNew): 71 * platform/graphics/gstreamer/VideoTrackPrivateGStreamer.cpp: 72 (WebCore::VideoTrackPrivateGStreamer::setSelected): 73 * platform/graphics/gstreamer/WebKitWebSourceGStreamer.cpp: 74 (webkit_web_src_init): 75 (webKitWebSrcDispose): 76 (webKitWebSrcSetProperty): 77 (webKitWebSrcStop): 78 (webKitWebSrcChangeState): 79 (webKitWebSrcQueryWithParent): 80 (webKitWebSrcGetProtocols): 81 (StreamingClient::handleResponseReceived): 82 (StreamingClient::handleDataReceived): 83 (ResourceHandleStreamingClient::didFail): 84 (ResourceHandleStreamingClient::wasBlocked): 85 (ResourceHandleStreamingClient::cannotShowURL): 86 * platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.cpp: 87 (webKitMediaSrcGetProtocols): 88 1 89 2017-03-06 Andreas Kling <akling@apple.com> 2 90 -
trunk/Source/WebCore/platform/audio/gstreamer/AudioDestinationGStreamer.cpp
r201077 r213445 93 93 "bus", m_renderBus.get(), 94 94 "provider", &m_callback, 95 "frames", framesToPull, NULL));95 "frames", framesToPull, nullptr)); 96 96 97 GRefPtr<GstElement> audioSink = gst_element_factory_make("autoaudiosink", 0);97 GRefPtr<GstElement> audioSink = gst_element_factory_make("autoaudiosink", nullptr); 98 98 m_audioSinkAvailable = audioSink; 99 99 if (!audioSink) { … … 115 115 } 116 116 117 GstElement* audioConvert = gst_element_factory_make("audioconvert", 0);118 GstElement* audioResample = gst_element_factory_make("audioresample", 0);119 gst_bin_add_many(GST_BIN(m_pipeline), webkitAudioSrc, audioConvert, audioResample, audioSink.get(), NULL);117 GstElement* audioConvert = gst_element_factory_make("audioconvert", nullptr); 118 GstElement* audioResample = gst_element_factory_make("audioresample", nullptr); 119 gst_bin_add_many(GST_BIN(m_pipeline), webkitAudioSrc, audioConvert, audioResample, audioSink.get(), nullptr); 120 120 121 121 // Link src pads from webkitAudioSrc to audioConvert ! audioResample ! autoaudiosink. -
trunk/Source/WebCore/platform/audio/gstreamer/AudioFileReaderGStreamer.cpp
r192511 r213445 214 214 // channel. Pipeline looks like: 215 215 // ... deinterleave ! queue ! appsink. 216 GstElement* queue = gst_element_factory_make("queue", 0);217 GstElement* sink = gst_element_factory_make("appsink", 0);216 GstElement* queue = gst_element_factory_make("queue", nullptr); 217 GstElement* sink = gst_element_factory_make("appsink", nullptr); 218 218 219 219 static GstAppSinkCallbacks callbacks = { … … 226 226 { nullptr } 227 227 }; 228 gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, this, 0);229 230 g_object_set(sink, "sync", FALSE, NULL);228 gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, this, nullptr); 229 230 g_object_set(sink, "sync", FALSE, nullptr); 231 231 232 232 gst_bin_add_many(GST_BIN(m_pipeline.get()), queue, sink, nullptr); … … 257 257 // separate each planar channel. Sub pipeline looks like 258 258 // ... decodebin2 ! audioconvert ! audioresample ! capsfilter ! deinterleave. 259 GstElement* audioConvert = gst_element_factory_make("audioconvert", 0);260 GstElement* audioResample = gst_element_factory_make("audioresample", 0);261 GstElement* capsFilter = gst_element_factory_make("capsfilter", 0);259 GstElement* audioConvert = gst_element_factory_make("audioconvert", nullptr); 260 GstElement* audioResample = gst_element_factory_make("audioresample", nullptr); 261 GstElement* capsFilter = gst_element_factory_make("capsfilter", nullptr); 262 262 m_deInterleave = gst_element_factory_make("deinterleave", "deinterleave"); 263 263 264 g_object_set(m_deInterleave.get(), "keep-positions", TRUE, NULL);264 g_object_set(m_deInterleave.get(), "keep-positions", TRUE, nullptr); 265 265 g_signal_connect_swapped(m_deInterleave.get(), "pad-added", G_CALLBACK(deinterleavePadAddedCallback), this); 266 266 g_signal_connect_swapped(m_deInterleave.get(), "no-more-pads", G_CALLBACK(deinterleaveReadyCallback), this); … … 317 317 if (m_data) { 318 318 ASSERT(m_dataSize); 319 source = gst_element_factory_make("giostreamsrc", 0);320 GRefPtr<GInputStream> memoryStream = adoptGRef(g_memory_input_stream_new_from_data(m_data, m_dataSize, 0));321 g_object_set(source, "stream", memoryStream.get(), NULL);319 source = gst_element_factory_make("giostreamsrc", nullptr); 320 GRefPtr<GInputStream> memoryStream = adoptGRef(g_memory_input_stream_new_from_data(m_data, m_dataSize, nullptr)); 321 g_object_set(source, "stream", memoryStream.get(), nullptr); 322 322 } else { 323 source = gst_element_factory_make("filesrc", 0);324 g_object_set(source, "location", m_filePath, NULL);323 source = gst_element_factory_make("filesrc", nullptr); 324 g_object_set(source, "location", m_filePath, nullptr); 325 325 } 326 326 … … 328 328 g_signal_connect_swapped(m_decodebin.get(), "pad-added", G_CALLBACK(decodebinPadAddedCallback), this); 329 329 330 gst_bin_add_many(GST_BIN(m_pipeline.get()), source, m_decodebin.get(), NULL);330 gst_bin_add_many(GST_BIN(m_pipeline.get()), source, m_decodebin.get(), nullptr); 331 331 gst_element_link_pads_full(source, "src", m_decodebin.get(), "sink", GST_PAD_LINK_CHECK_NOTHING); 332 332 -
trunk/Source/WebCore/platform/audio/gstreamer/AudioSourceProviderGStreamer.cpp
r185502 r213445 84 84 85 85 AudioSourceProviderGStreamer::AudioSourceProviderGStreamer() 86 : m_client( 0)86 : m_client(nullptr) 87 87 , m_deinterleaveSourcePads(0) 88 88 , m_deinterleavePadAddedHandlerId(0) … … 114 114 115 115 GstElement* audioTee = gst_element_factory_make("tee", "audioTee"); 116 GstElement* audioQueue = gst_element_factory_make("queue", 0);117 GstElement* audioConvert = gst_element_factory_make("audioconvert", 0);118 GstElement* audioConvert2 = gst_element_factory_make("audioconvert", 0);119 GstElement* audioResample = gst_element_factory_make("audioresample", 0);120 GstElement* audioResample2 = gst_element_factory_make("audioresample", 0);116 GstElement* audioQueue = gst_element_factory_make("queue", nullptr); 117 GstElement* audioConvert = gst_element_factory_make("audioconvert", nullptr); 118 GstElement* audioConvert2 = gst_element_factory_make("audioconvert", nullptr); 119 GstElement* audioResample = gst_element_factory_make("audioresample", nullptr); 120 GstElement* audioResample2 = gst_element_factory_make("audioresample", nullptr); 121 121 GstElement* volumeElement = gst_element_factory_make("volume", "volume"); 122 GstElement* audioSink = gst_element_factory_make("autoaudiosink", 0);122 GstElement* audioSink = gst_element_factory_make("autoaudiosink", nullptr); 123 123 124 124 gst_bin_add_many(GST_BIN(m_audioSinkBin.get()), audioTee, audioQueue, audioConvert, audioResample, volumeElement, audioConvert2, audioResample2, audioSink, nullptr); … … 212 212 // ensure deinterleave and the sinks downstream receive buffers in 213 213 // the format specified by the capsfilter. 214 GstElement* audioQueue = gst_element_factory_make("queue", 0);215 GstElement* audioConvert = gst_element_factory_make("audioconvert", 0);216 GstElement* audioResample = gst_element_factory_make("audioresample", 0);217 GstElement* capsFilter = gst_element_factory_make("capsfilter", 0);214 GstElement* audioQueue = gst_element_factory_make("queue", nullptr); 215 GstElement* audioConvert = gst_element_factory_make("audioconvert", nullptr); 216 GstElement* audioResample = gst_element_factory_make("audioresample", nullptr); 217 GstElement* capsFilter = gst_element_factory_make("capsfilter", nullptr); 218 218 GstElement* deInterleave = gst_element_factory_make("deinterleave", "deinterleave"); 219 219 … … 258 258 if (m_deinterleaveSourcePads > 2) { 259 259 g_warning("The AudioSourceProvider supports only mono and stereo audio. Silencing out this new channel."); 260 GstElement* queue = gst_element_factory_make("queue", 0);261 GstElement* sink = gst_element_factory_make("fakesink", 0);260 GstElement* queue = gst_element_factory_make("queue", nullptr); 261 GstElement* sink = gst_element_factory_make("fakesink", nullptr); 262 262 g_object_set(sink, "async", FALSE, nullptr); 263 263 gst_bin_add_many(GST_BIN(m_audioSinkBin.get()), queue, sink, nullptr); … … 278 278 // channel. Pipeline looks like: 279 279 // ... deinterleave ! queue ! appsink. 280 GstElement* queue = gst_element_factory_make("queue", 0);281 GstElement* sink = gst_element_factory_make("appsink", 0);280 GstElement* queue = gst_element_factory_make("queue", nullptr); 281 GstElement* sink = gst_element_factory_make("appsink", nullptr); 282 282 283 283 GstAppSinkCallbacks callbacks; 284 callbacks.eos = 0;285 callbacks.new_preroll = 0;284 callbacks.eos = nullptr; 285 callbacks.new_preroll = nullptr; 286 286 callbacks.new_sample = onAppsinkNewBufferCallback; 287 gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, this, 0);287 gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, this, nullptr); 288 288 289 289 g_object_set(sink, "async", FALSE, nullptr); -
trunk/Source/WebCore/platform/audio/gstreamer/WebKitWebAudioSourceGStreamer.cpp
r210584 r213445 185 185 new (priv) WebKitWebAudioSourcePrivate(); 186 186 187 priv->sourcePad = webkitGstGhostPadFromStaticTemplate(&srcTemplate, "src", 0);187 priv->sourcePad = webkitGstGhostPadFromStaticTemplate(&srcTemplate, "src", nullptr); 188 188 gst_element_add_pad(GST_ELEMENT(src), priv->sourcePad); 189 189 190 priv->provider = 0;191 priv->bus = 0;190 priv->provider = nullptr; 191 priv->bus = nullptr; 192 192 193 193 g_rec_mutex_init(&priv->mutex); 194 priv->task = adoptGRef(gst_task_new(reinterpret_cast<GstTaskFunction>(webKitWebAudioSrcLoop), src, 0));194 priv->task = adoptGRef(gst_task_new(reinterpret_cast<GstTaskFunction>(webKitWebAudioSrcLoop), src, nullptr)); 195 195 196 196 gst_task_set_lock(priv->task.get(), &priv->mutex); … … 345 345 346 346 // FIXME: Add support for local/live audio input. 347 priv->provider->render( 0, priv->bus, priv->framesToPull);347 priv->provider->render(nullptr, priv->bus, priv->framesToPull); 348 348 349 349 ASSERT(channelBufferList.size() == priv->sources.size()); … … 379 379 if (!src->priv->interleave) { 380 380 gst_element_post_message(element, gst_missing_element_message_new(element, "interleave")); 381 GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, ( 0), ("no interleave"));381 GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (nullptr), ("no interleave")); 382 382 return GST_STATE_CHANGE_FAILURE; 383 383 } -
trunk/Source/WebCore/platform/graphics/gstreamer/AudioTrackPrivateGStreamer.cpp
r207873 r213445 56 56 57 57 if (enabled && m_playbin) 58 g_object_set(m_playbin.get(), "current-audio", m_index, NULL);58 g_object_set(m_playbin.get(), "current-audio", m_index, nullptr); 59 59 } 60 60 -
trunk/Source/WebCore/platform/graphics/gstreamer/GStreamerUtilities.cpp
r210584 r213445 153 153 GUniqueOutPtr<GError> error; 154 154 // FIXME: We should probably pass the arguments from the command line. 155 bool gstInitialized = gst_init_check( 0, 0, &error.outPtr());155 bool gstInitialized = gst_init_check(nullptr, nullptr, &error.outPtr()); 156 156 ASSERT_WITH_MESSAGE(gstInitialized, "GStreamer initialization failed: %s", error ? error->message : "unknown error occurred"); 157 157 -
trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
r213224 r213445 97 97 98 98 const char* role = m_player->client().mediaPlayerIsVideo() ? "video" : "music"; 99 GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, NULL);100 g_object_set(object, "stream-properties", structure, NULL);99 GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr); 100 g_object_set(object, "stream-properties", structure, nullptr); 101 101 gst_structure_free(structure); 102 102 GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object))); … … 108 108 if (isAvailable()) 109 109 registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamer>(player); }, 110 getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);110 getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem); 111 111 } 112 112 … … 121 121 if (!srcFactory) { 122 122 GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player"); 123 gst_element_register( 0, "webkitwebsrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_WEB_SRC);123 gst_element_register(nullptr, "webkitwebsrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_WEB_SRC); 124 124 } 125 125 … … 154 154 , m_seekIsPending(false) 155 155 , m_seekTime(0) 156 , m_source( 0)156 , m_source(nullptr) 157 157 , m_volumeAndMuteInitialized(false) 158 158 , m_weakPtrFactory(this) 159 , m_mediaLocations( 0)159 , m_mediaLocations(nullptr) 160 160 , m_mediaLocationCurrentIndex(0) 161 161 , m_playbackRatePause(false) … … 195 195 if (m_mediaLocations) { 196 196 gst_structure_free(m_mediaLocations); 197 m_mediaLocations = 0;197 m_mediaLocations = nullptr; 198 198 } 199 199 … … 797 797 798 798 GRefPtr<GstSample> sample; 799 g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), NULL);799 g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr); 800 800 ASSERT(sample); 801 801 … … 987 987 // Construct a filename for the graphviz dot file output. 988 988 GstState newState; 989 gst_message_parse_state_changed(message, ¤tState, &newState, 0);989 gst_message_parse_state_changed(message, ¤tState, &newState, nullptr); 990 990 CString dotFileName = String::format("webkit-video.%s_%s", gst_element_state_get_name(currentState), gst_element_state_get_name(newState)).utf8(); 991 991 GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data()); … … 1174 1174 1175 1175 for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next) 1176 processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data), 0); 1177 } 1178 1179 void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry, GstTocEntry* parent) 1180 { 1181 UNUSED_PARAM(parent); 1176 processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data)); 1177 } 1178 1179 void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry) 1180 { 1182 1181 ASSERT(entry); 1183 1182 … … 1193 1192 GstTagList* tags = gst_toc_entry_get_tags(entry); 1194 1193 if (tags) { 1195 gchar* title = 0;1194 gchar* title = nullptr; 1196 1195 gst_tag_list_get_string(tags, GST_TAG_TITLE, &title); 1197 1196 if (title) { … … 1204 1203 1205 1204 for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next) 1206 processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data) , entry);1205 processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data)); 1207 1206 } 1208 1207 #endif … … 1220 1219 gdouble fillStatus = 100.0; 1221 1220 1222 gst_query_parse_buffering_range(query, 0, &start, &stop, 0);1221 gst_query_parse_buffering_range(query, nullptr, &start, &stop, nullptr); 1223 1222 gst_query_unref(query); 1224 1223 … … 1655 1654 1656 1655 const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations"); 1657 const gchar* newLocation = 0;1656 const gchar* newLocation = nullptr; 1658 1657 1659 1658 if (!locations) { … … 1666 1665 if (!newLocation) { 1667 1666 if (m_mediaLocationCurrentIndex < 0) { 1668 m_mediaLocations = 0;1667 m_mediaLocations = nullptr; 1669 1668 return false; 1670 1669 } … … 1975 1974 GstElement* MediaPlayerPrivateGStreamer::createAudioSink() 1976 1975 { 1977 m_autoAudioSink = gst_element_factory_make("autoaudiosink", 0);1976 m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr); 1978 1977 if (!m_autoAudioSink) { 1979 1978 GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation"); … … 2116 2115 // the reason for using >= 1.4.2 instead of >= 1.4.0. 2117 2116 if (m_preservesPitch && webkitGstCheckVersion(1, 4, 2)) { 2118 GstElement* scale = gst_element_factory_make("scaletempo", 0);2117 GstElement* scale = gst_element_factory_make("scaletempo", nullptr); 2119 2118 2120 2119 if (!scale) -
trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h
r212889 r213445 162 162 #if ENABLE(VIDEO_TRACK) 163 163 void processTableOfContents(GstMessage*); 164 void processTableOfContentsEntry(GstTocEntry* , GstTocEntry* parent);164 void processTableOfContentsEntry(GstTocEntry*); 165 165 #endif 166 166 virtual bool doSeek(gint64 position, float rate, GstSeekFlags seekType); -
trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp
r213224 r213445 202 202 MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player) 203 203 : m_player(player) 204 , m_fpsSink( 0)204 , m_fpsSink(nullptr) 205 205 , m_readyState(MediaPlayer::HaveNothing) 206 206 , m_networkState(MediaPlayer::Empty) … … 210 210 , m_usingFallbackVideoSink(false) 211 211 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) 212 , m_cdmSession( 0)212 , m_cdmSession(nullptr) 213 213 #endif 214 214 { … … 596 596 return; 597 597 598 g_object_set(m_volumeElement.get(), "mute", muted, NULL);598 g_object_set(m_volumeElement.get(), "mute", muted, nullptr); 599 599 } 600 600 … … 605 605 606 606 bool muted; 607 g_object_get(m_volumeElement.get(), "mute", &muted, NULL);607 g_object_get(m_volumeElement.get(), "mute", &muted, nullptr); 608 608 return muted; 609 609 } … … 615 615 616 616 gboolean muted; 617 g_object_get(m_volumeElement.get(), "mute", &muted, NULL);617 g_object_get(m_volumeElement.get(), "mute", &muted, nullptr); 618 618 m_player->muteChanged(static_cast<bool>(muted)); 619 619 } … … 1180 1180 if (!m_player->platformVolumeConfigurationRequired()) { 1181 1181 GST_DEBUG("Setting stream volume to %f", m_player->volume()); 1182 g_object_set(m_volumeElement.get(), "volume", m_player->volume(), NULL);1182 g_object_set(m_volumeElement.get(), "volume", m_player->volume(), nullptr); 1183 1183 } else 1184 1184 GST_DEBUG("Not setting stream volume, trusting system one"); 1185 1185 1186 1186 GST_DEBUG("Setting stream muted %d", m_player->muted()); 1187 g_object_set(m_volumeElement.get(), "mute", m_player->muted(), NULL);1187 g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr); 1188 1188 1189 1189 g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this); … … 1195 1195 guint64 decodedFrames = 0; 1196 1196 if (m_fpsSink) 1197 g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, NULL);1197 g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr); 1198 1198 return static_cast<unsigned>(decodedFrames); 1199 1199 } … … 1203 1203 guint64 framesDropped = 0; 1204 1204 if (m_fpsSink) 1205 g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, NULL);1205 g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr); 1206 1206 return static_cast<unsigned>(framesDropped); 1207 1207 } -
trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerOwr.cpp
r210499 r213445 287 287 registrar([](MediaPlayer* player) { 288 288 return std::make_unique<MediaPlayerPrivateGStreamerOwr>(player); 289 }, getSupportedTypes, supportsType, 0, 0, 0, 0);289 }, getSupportedTypes, supportsType, nullptr, nullptr, nullptr, nullptr); 290 290 } 291 291 } -
trunk/Source/WebCore/platform/graphics/gstreamer/TextCombinerGStreamer.cpp
r160375 r213445 77 77 static void webkit_text_combiner_init(WebKitTextCombiner* combiner) 78 78 { 79 combiner->funnel = gst_element_factory_make("funnel", NULL);79 combiner->funnel = gst_element_factory_make("funnel", nullptr); 80 80 ASSERT(combiner->funnel); 81 81 … … 148 148 if (targetParent == combiner->funnel) { 149 149 /* Setup a WebVTT encoder */ 150 GstElement* encoder = gst_element_factory_make("webvttenc", NULL);150 GstElement* encoder = gst_element_factory_make("webvttenc", nullptr); 151 151 ASSERT(encoder); 152 152 … … 233 233 ASSERT(pad); 234 234 235 GstPad* ghostPad = GST_PAD(g_object_new(WEBKIT_TYPE_TEXT_COMBINER_PAD, "direction", gst_pad_get_direction(pad), NULL));235 GstPad* ghostPad = GST_PAD(g_object_new(WEBKIT_TYPE_TEXT_COMBINER_PAD, "direction", gst_pad_get_direction(pad), nullptr)); 236 236 ASSERT(ghostPad); 237 237 … … 296 296 GstElement* webkitTextCombinerNew() 297 297 { 298 return GST_ELEMENT(g_object_new(WEBKIT_TYPE_TEXT_COMBINER, 0));298 return GST_ELEMENT(g_object_new(WEBKIT_TYPE_TEXT_COMBINER, nullptr)); 299 299 } 300 300 -
trunk/Source/WebCore/platform/graphics/gstreamer/TextSinkGStreamer.cpp
r159730 r213445 96 96 GstElement* webkitTextSinkNew() 97 97 { 98 return GST_ELEMENT(g_object_new(WEBKIT_TYPE_TEXT_SINK, 0));98 return GST_ELEMENT(g_object_new(WEBKIT_TYPE_TEXT_SINK, nullptr)); 99 99 } 100 100 -
trunk/Source/WebCore/platform/graphics/gstreamer/TrackPrivateBaseGStreamer.cpp
r210319 r213445 90 90 GRefPtr<GstTagList> tags; 91 91 if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_pad.get()), "tags")) 92 g_object_get(m_pad.get(), "tags", &tags.outPtr(), NULL);92 g_object_get(m_pad.get(), "tags", &tags.outPtr(), nullptr); 93 93 else 94 94 tags = adoptGRef(gst_tag_list_new_empty()); … … 109 109 gboolean active = false; 110 110 if (m_pad && g_object_class_find_property(G_OBJECT_GET_CLASS(m_pad.get()), "active")) 111 g_object_get(m_pad.get(), "active", &active, NULL);111 g_object_get(m_pad.get(), "active", &active, nullptr); 112 112 113 113 setActive(active); -
trunk/Source/WebCore/platform/graphics/gstreamer/VideoSinkGStreamer.cpp
r213224 r213445 184 184 { 185 185 sink->priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, WEBKIT_TYPE_VIDEO_SINK, WebKitVideoSinkPrivate); 186 g_object_set(GST_BASE_SINK(sink), "enable-last-sample", FALSE, NULL);186 g_object_set(GST_BASE_SINK(sink), "enable-last-sample", FALSE, nullptr); 187 187 new (sink->priv) WebKitVideoSinkPrivate(); 188 188 } … … 342 342 { 343 343 GstCaps* caps; 344 gst_query_parse_allocation(query, &caps, 0);344 gst_query_parse_allocation(query, &caps, nullptr); 345 345 if (!caps) 346 346 return FALSE; … … 350 350 return FALSE; 351 351 352 gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, 0);353 gst_query_add_allocation_meta(query, GST_VIDEO_CROP_META_API_TYPE, 0);354 gst_query_add_allocation_meta(query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, 0);352 gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, nullptr); 353 gst_query_add_allocation_meta(query, GST_VIDEO_CROP_META_API_TYPE, nullptr); 354 gst_query_add_allocation_meta(query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, nullptr); 355 355 return TRUE; 356 356 } … … 409 409 GstElement* webkitVideoSinkNew() 410 410 { 411 return GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, 0));411 return GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, nullptr)); 412 412 } 413 413 -
trunk/Source/WebCore/platform/graphics/gstreamer/VideoTrackPrivateGStreamer.cpp
r207873 r213445 56 56 57 57 if (selected && m_playbin) 58 g_object_set(m_playbin.get(), "current-video", m_index, NULL);58 g_object_set(m_playbin.get(), "current-video", m_index, nullptr); 59 59 } 60 60 -
trunk/Source/WebCore/platform/graphics/gstreamer/WebKitWebSourceGStreamer.cpp
r212993 r213445 271 271 priv->createdInMainThread = isMainThread(); 272 272 273 priv->appsrc = GST_APP_SRC(gst_element_factory_make("appsrc", 0));273 priv->appsrc = GST_APP_SRC(gst_element_factory_make("appsrc", nullptr)); 274 274 if (!priv->appsrc) { 275 275 GST_ERROR_OBJECT(src, "Failed to create appsrc"); … … 288 288 gst_pad_set_query_function(priv->srcpad, webKitWebSrcQueryWithParent); 289 289 290 gst_app_src_set_callbacks(priv->appsrc, &appsrcCallbacks, src, 0);290 gst_app_src_set_callbacks(priv->appsrc, &appsrcCallbacks, src, nullptr); 291 291 gst_app_src_set_emit_signals(priv->appsrc, FALSE); 292 292 gst_app_src_set_stream_type(priv->appsrc, GST_APP_STREAM_TYPE_SEEKABLE); … … 309 309 // the queue is really empty. 310 310 // This might need tweaking for ports not using libsoup. 311 g_object_set(priv->appsrc, "min-percent", 20, NULL);312 313 gst_app_src_set_caps(priv->appsrc, 0);311 g_object_set(priv->appsrc, "min-percent", 20, nullptr); 312 313 gst_app_src_set_caps(priv->appsrc, nullptr); 314 314 gst_app_src_set_size(priv->appsrc, -1); 315 315 } … … 320 320 WebKitWebSrcPrivate* priv = src->priv; 321 321 322 priv->player = 0;322 priv->player = nullptr; 323 323 324 324 GST_CALL_PARENT(G_OBJECT_CLASS, dispose, (object)); … … 340 340 switch (propID) { 341 341 case PROP_LOCATION: 342 gst_uri_handler_set_uri(reinterpret_cast<GstURIHandler*>(src), g_value_get_string(value), 0);342 gst_uri_handler_set_uri(reinterpret_cast<GstURIHandler*>(src), g_value_get_string(value), nullptr); 343 343 break; 344 344 case PROP_KEEP_ALIVE: … … 434 434 priv->size = 0; 435 435 priv->requestedOffset = 0; 436 priv->player = 0;436 priv->player = nullptr; 437 437 } 438 438 … … 440 440 441 441 if (priv->appsrc) { 442 gst_app_src_set_caps(priv->appsrc, 0);442 gst_app_src_set_caps(priv->appsrc, nullptr); 443 443 if (!wasSeeking) 444 444 gst_app_src_set_size(priv->appsrc, -1); … … 616 616 gst_element_post_message(element, 617 617 gst_missing_element_message_new(element, "appsrc")); 618 GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, ( 0), ("no appsrc"));618 GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (nullptr), ("no appsrc")); 619 619 return GST_STATE_CHANGE_FAILURE; 620 620 } … … 659 659 GstFormat format; 660 660 661 gst_query_parse_duration(query, &format, NULL);661 gst_query_parse_duration(query, &format, nullptr); 662 662 663 663 GST_DEBUG_OBJECT(src, "duration query in format %s", gst_format_get_name(format)); … … 711 711 const gchar* const* webKitWebSrcGetProtocols(GType) 712 712 { 713 static const char* protocols[] = {"http", "https", "blob", 0};713 static const char* protocols[] = {"http", "https", "blob", nullptr }; 714 714 return protocols; 715 715 } … … 937 937 gst_app_src_set_size(priv->appsrc, -1); 938 938 939 gst_app_src_set_caps(priv->appsrc, 0);939 gst_app_src_set_caps(priv->appsrc, nullptr); 940 940 } 941 941 … … 1004 1004 GstFlowReturn ret = gst_app_src_push_buffer(priv->appsrc, priv->buffer.leakRef()); 1005 1005 if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS) 1006 GST_ELEMENT_ERROR(src, CORE, FAILED, ( 0), (0));1006 GST_ELEMENT_ERROR(src, CORE, FAILED, (nullptr), (nullptr)); 1007 1007 } 1008 1008 … … 1183 1183 1184 1184 GST_ERROR_OBJECT(src, "Have failure: %s", error.localizedDescription().utf8().data()); 1185 GST_ELEMENT_ERROR(src, RESOURCE, FAILED, ("%s", error.localizedDescription().utf8().data()), ( 0));1185 GST_ELEMENT_ERROR(src, RESOURCE, FAILED, ("%s", error.localizedDescription().utf8().data()), (nullptr)); 1186 1186 gst_app_src_end_of_stream(src->priv->appsrc); 1187 1187 } … … 1198 1198 locker.unlock(); 1199 1199 1200 GST_ELEMENT_ERROR(src, RESOURCE, OPEN_READ, ("Access to \"%s\" was blocked", uri.get()), ( 0));1200 GST_ELEMENT_ERROR(src, RESOURCE, OPEN_READ, ("Access to \"%s\" was blocked", uri.get()), (nullptr)); 1201 1201 } 1202 1202 … … 1212 1212 locker.unlock(); 1213 1213 1214 GST_ELEMENT_ERROR(src, RESOURCE, OPEN_READ, ("Can't show \"%s\"", uri.get()), ( 0));1214 GST_ELEMENT_ERROR(src, RESOURCE, OPEN_READ, ("Can't show \"%s\"", uri.get()), (nullptr)); 1215 1215 } 1216 1216 -
trunk/Source/WebCore/platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.cpp
r212285 r213445 594 594 const gchar* const* webKitMediaSrcGetProtocols(GType) 595 595 { 596 static const char* protocols[] = {"mediasourceblob", 0};596 static const char* protocols[] = {"mediasourceblob", nullptr }; 597 597 return protocols; 598 598 }
Note: See TracChangeset
for help on using the changeset viewer.