Changeset 228641 in webkit
- Timestamp:
- Feb 19, 2018 2:49:56 AM (6 years ago)
- Location:
- trunk/Source/WebCore
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/WebCore/ChangeLog
r228639 r228641 1 2018-02-19 Philippe Normand <pnormand@igalia.com> 2 3 [GTK][GStreamer] Replaying a webm video twice causes the video to stop getting rendered 4 https://bugs.webkit.org/show_bug.cgi?id=176789 5 6 Reviewed by Xabier Rodriguez-Calvar. 7 8 Ensure the wrapped GstGLContext is set when the pipeline goes from 9 READY to PAUSED state. This is a workaround for 10 https://bugzilla.gnome.org/show_bug.cgi?id=757933. 11 12 * platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp: 13 (WebCore::MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer): Add debug statement. 14 (WebCore::MediaPlayerPrivateGStreamer::readyTimerFired): Ditto. 15 (WebCore::MediaPlayerPrivateGStreamer::changePipelineState): 16 Ensure the wrapped GstGLContext is set when the pipeline goes from 17 READY to PAUSED state. 18 (WebCore::MediaPlayerPrivateGStreamer::didEnd): Add debug statement. 19 * platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp: 20 Change the requestGLContext to be a method instead of a static 21 function. Being a static function was a requirement for the now-removed OpenWebRTC player. 22 (WebCore::MediaPlayerPrivateGStreamerBase::handleSyncMessage): Add 23 debug statement, fix requestGLContext usage. 24 (WebCore::MediaPlayerPrivateGStreamerBase::requestGLContext): Refactor as method. 25 (WebCore::MediaPlayerPrivateGStreamerBase::flushCurrentBuffer): Fix requestGLContext usage. 26 (WebCore::MediaPlayerPrivateGStreamerBase::createVideoSinkGL): Remove fixed FIXME. 27 (WebCore::MediaPlayerPrivateGStreamerBase::ensureGLVideoSinkContext): 28 Set display and app wrapped contexts on the video sink. 29 * platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h: 30 1 31 2018-02-19 Xabier Rodriguez Calvar <calvaris@igalia.com> 2 32 -
trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
r228639 r228641 166 166 MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer() 167 167 { 168 GST_DEBUG("Disposing player"); 169 168 170 #if ENABLE(VIDEO_TRACK) 169 171 for (auto& track : m_audioTracks.values()) … … 346 348 void MediaPlayerPrivateGStreamer::readyTimerFired() 347 349 { 350 GST_DEBUG("In READY for too long. Releasing pipeline resources."); 348 351 changePipelineState(GST_STATE_NULL); 349 352 } … … 365 368 GST_DEBUG("Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState), 366 369 gst_element_state_get_name(currentState), gst_element_state_get_name(pending)); 370 371 #if USE(GSTREAMER_GL) 372 if (currentState == GST_STATE_READY && newState == GST_STATE_PAUSED) 373 ensureGLVideoSinkContext(); 374 #endif 367 375 368 376 GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState); … … 2071 2079 void MediaPlayerPrivateGStreamer::didEnd() 2072 2080 { 2081 GST_INFO("Playback ended"); 2082 2073 2083 // Synchronize position and duration values to not confuse the 2074 2084 // HTMLMediaElement. In some cases like reverse playback the -
trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp
r228590 r228641 323 323 const gchar* contextType; 324 324 gst_message_parse_context_type(message, &contextType); 325 GST_DEBUG("Handling %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message)); 325 326 326 327 #if USE(GSTREAMER_GL) 327 GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType , this));328 GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType)); 328 329 if (elementContext) { 329 330 gst_element_set_context(GST_ELEMENT(message->src), elementContext.get()); … … 408 409 409 410 #if USE(GSTREAMER_GL) 410 GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const gchar* contextType, MediaPlayerPrivateGStreamerBase* player)411 { 412 if (! player->ensureGstGLContext())411 GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const char* contextType) 412 { 413 if (!ensureGstGLContext()) 413 414 return nullptr; 414 415 415 416 if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) { 416 417 GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE); 417 gst_context_set_gl_display(displayContext, player->gstGLDisplay());418 gst_context_set_gl_display(displayContext, gstGLDisplay()); 418 419 return displayContext; 419 420 } … … 423 424 GstStructure* structure = gst_context_writable_structure(appContext); 424 425 #if GST_CHECK_VERSION(1, 11, 0) 425 gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, player->gstGLContext(), nullptr);426 gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, gstGLContext(), nullptr); 426 427 #else 427 gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, player->gstGLContext(), nullptr);428 gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, gstGLContext(), nullptr); 428 429 #endif 429 430 return appContext; … … 841 842 void MediaPlayerPrivateGStreamerBase::flushCurrentBuffer() 842 843 { 844 GST_DEBUG("Flushing video sample"); 843 845 WTF::GMutexLocker<GMutex> lock(m_sampleMutex); 844 846 m_sample.clear(); … … 1018 1020 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL() 1019 1021 { 1020 // FIXME: Currently it's not possible to get the video frames and caps using this approach until1021 // the pipeline gets into playing state. Due to this, trying to grab a frame and painting it by some1022 // other mean (canvas or webgl) before playing state can result in a crash.1023 // This is being handled in https://bugs.webkit.org/show_bug.cgi?id=159460.1024 1022 if (!webkitGstCheckVersion(1, 8, 0)) 1025 1023 return nullptr; … … 1061 1059 } 1062 1060 return videoSink; 1061 } 1062 1063 void MediaPlayerPrivateGStreamerBase::ensureGLVideoSinkContext() 1064 { 1065 if (!m_glDisplayElementContext) 1066 m_glDisplayElementContext = adoptGRef(requestGLContext(GST_GL_DISPLAY_CONTEXT_TYPE)); 1067 1068 if (m_glDisplayElementContext) 1069 gst_element_set_context(m_videoSink.get(), m_glDisplayElementContext.get()); 1070 1071 if (!m_glAppElementContext) 1072 m_glAppElementContext = adoptGRef(requestGLContext("gst.gl.app_context")); 1073 1074 if (m_glAppElementContext) 1075 gst_element_set_context(m_videoSink.get(), m_glAppElementContext.get()); 1063 1076 } 1064 1077 #endif // USE(GSTREAMER_GL) -
trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h
r228617 r228641 78 78 #if USE(GSTREAMER_GL) 79 79 bool ensureGstGLContext(); 80 static GstContext* requestGLContext(const gchar* contextType, MediaPlayerPrivateGStreamerBase*);80 GstContext* requestGLContext(const char* contextType); 81 81 #endif 82 82 static bool initializeGStreamerAndRegisterWebKitElements(); … … 171 171 GstGLContext* gstGLContext() const { return m_glContext.get(); } 172 172 GstGLDisplay* gstGLDisplay() const { return m_glDisplay.get(); } 173 void ensureGLVideoSinkContext(); 173 174 #endif 174 175 … … 245 246 GRefPtr<GstGLContext> m_glContext; 246 247 GRefPtr<GstGLDisplay> m_glDisplay; 248 GRefPtr<GstContext> m_glDisplayElementContext; 249 GRefPtr<GstContext> m_glAppElementContext; 247 250 std::unique_ptr<VideoTextureCopierGStreamer> m_videoTextureCopier; 248 251 #endif
Note: See TracChangeset
for help on using the changeset viewer.