Changeset 248464 in webkit
- Timestamp:
- Aug 9, 2019 2:36:45 AM (5 years ago)
- Location:
- trunk/Source/WebCore
- Files:
-
- 7 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/WebCore/ChangeLog
r248463 r248464 1 2019-08-09 Víctor Manuel Jáquez Leal <vjaquez@igalia.com> 2 3 [GL][GStreamer] activate wrapped shared context 4 https://bugs.webkit.org/show_bug.cgi?id=196966 5 6 Reviewed by Žan Doberšek. 7 8 This patch consists in four parts: 9 10 1\ When the media player is instantiated, and it is intended to 11 render textures, it will create a wrapped object of the 12 application's GL context, and in order to populate the wrapped 13 object with the GL vtable, the context has to be current. Thus, 14 this patch makes current the shared WebKit application context, 15 and populate the wrapped GstGLContext by activating it and filling 16 in it. Afterwards, the wrapped context is deactivated. 17 18 2\ This patch makes GL texture use the RGBA color space, thus the 19 color transformation is done in GStreamer, and no further color 20 transformation is required in WebKit. 21 22 3\ Since it is not necessary to modify behavior if the decoder is 23 imxvpudecoder, its identification and label were removed. 24 25 4\ As only RGBA is used, the old color conversions when rendering 26 using Cairo (fallback) were changed to convert the RGBA, as in 27 GStreamer's format, to ARGB32, as in Cairo format -which depends 28 on endianness. 29 30 No new tests because there is no behavior change. 31 32 * platform/graphics/gstreamer/ImageGStreamerCairo.cpp: 33 (WebCore::ImageGStreamer::ImageGStreamer): Only convert GStreamer 34 RGBA to Cairo RGB32. 35 * platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp: 36 (WebCore::MediaPlayerPrivateGStreamer::createGSTPlayBin): Removes 37 the IMX VPU identification. 38 * platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp: 39 (WebCore::MediaPlayerPrivateGStreamerBase::ensureGstGLContext): 40 Intializes the wrapped GL Context. 41 (WebCore::MediaPlayerPrivateGStreamerBase::updateTextureMapperFlags): 42 Removes frame's color conversion. 43 (WebCore::MediaPlayerPrivateGStreamerBase::createVideoSinkGL): 44 Instead of parsing a string, the GstCaps are created manually, and 45 it is set to appsink, rather than a filtered linking. 46 * platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h: 47 Removes ImxVPU enumeration value. 48 * platform/graphics/gstreamer/VideoTextureCopierGStreamer.cpp: 49 Adds NoConvert option to texture copier, setting an identity 50 matrix. 51 (WebCore::VideoTextureCopierGStreamer::updateColorConversionMatrix): 52 * platform/graphics/gstreamer/VideoTextureCopierGStreamer.h: Adds 53 NoConvert enumeration value. 54 1 55 2019-08-09 Ryosuke Niwa <rniwa@webkit.org> 2 56 -
trunk/Source/WebCore/platform/graphics/gstreamer/ImageGStreamerCairo.cpp
r235115 r248464 58 58 RefPtr<cairo_surface_t> surface; 59 59 cairo_format_t cairoFormat; 60 #if G_BYTE_ORDER == G_LITTLE_ENDIAN 61 cairoFormat = (GST_VIDEO_FRAME_FORMAT(&m_videoFrame) == GST_VIDEO_FORMAT_BGRA) ? CAIRO_FORMAT_ARGB32 : CAIRO_FORMAT_RGB24; 62 #else 63 cairoFormat = (GST_VIDEO_FRAME_FORMAT(&m_videoFrame) == GST_VIDEO_FORMAT_ARGB) ? CAIRO_FORMAT_ARGB32 : CAIRO_FORMAT_RGB24; 64 #endif 60 cairoFormat = (GST_VIDEO_FRAME_FORMAT(&m_videoFrame) == GST_VIDEO_FORMAT_RGBA) ? CAIRO_FORMAT_ARGB32 : CAIRO_FORMAT_RGB24; 65 61 66 62 // GStreamer doesn't use premultiplied alpha, but cairo does. So if the video format has an alpha component … … 76 72 for (int x = 0; x < width; x++) { 77 73 for (int y = 0; y < height; y++) { 74 unsigned short alpha = bufferData[3]; 78 75 #if G_BYTE_ORDER == G_LITTLE_ENDIAN 79 // Video frames use BGRA in little endian. 80 unsigned short alpha = bufferData[3]; 81 surfacePixel[0] = (bufferData[0] * alpha + 128) / 255; 76 // Video frames use RGBA in little endian. 77 surfacePixel[0] = (bufferData[2] * alpha + 128) / 255; 82 78 surfacePixel[1] = (bufferData[1] * alpha + 128) / 255; 83 surfacePixel[2] = (bufferData[ 2] * alpha + 128) / 255;79 surfacePixel[2] = (bufferData[0] * alpha + 128) / 255; 84 80 surfacePixel[3] = alpha; 85 81 #else 86 // Video frames use ARGB in big endian. 87 unsigned short alpha = bufferData[0]; 82 // Video frames use RGBA in big endian. 88 83 surfacePixel[0] = alpha; 89 surfacePixel[1] = (bufferData[ 1] * alpha + 128) / 255;90 surfacePixel[2] = (bufferData[ 2] * alpha + 128) / 255;91 surfacePixel[3] = (bufferData[ 3] * alpha + 128) / 255;84 surfacePixel[1] = (bufferData[0] * alpha + 128) / 255; 85 surfacePixel[2] = (bufferData[1] * alpha + 128) / 255; 86 surfacePixel[3] = (bufferData[2] * alpha + 128) / 255; 92 87 #endif 93 88 bufferData += 4; -
trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp
r248405 r248464 2436 2436 if (g_str_has_prefix(elementName.get(), "v4l2")) 2437 2437 player->m_videoDecoderPlatform = WebKitGstVideoDecoderPlatform::Video4Linux; 2438 else if (g_str_has_prefix(elementName.get(), "imxvpudecoder"))2439 player->m_videoDecoderPlatform = WebKitGstVideoDecoderPlatform::ImxVPU;2440 2438 2441 2439 #if USE(TEXTURE_MAPPER_GL) -
trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp
r248040 r248464 55 55 56 56 #if USE(GSTREAMER_GL) 57 #if G_BYTE_ORDER == G_LITTLE_ENDIAN 58 #define GST_GL_CAPS_FORMAT "{ BGRx, BGRA }" 59 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureBGRAToRGBA 60 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertBGRAToRGBA 61 #else 62 #define GST_GL_CAPS_FORMAT "{ xRGB, ARGB }" 63 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureARGBToRGBA 64 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertARGBToRGBA 65 #endif 57 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::NoConvert 66 58 67 59 #include <gst/app/gstappsink.h> 68 69 60 70 61 #include "GLContext.h" … … 476 467 m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI); 477 468 469 // Activate and fill the GStreamer wrapped context with the Webkit's shared one. 470 auto previousActiveContext = GLContext::current(); 471 webkitContext->makeContextCurrent(); 472 if (gst_gl_context_activate(m_glContext.get(), TRUE)) { 473 GUniqueOutPtr<GError> error; 474 if (!gst_gl_context_fill_info(m_glContext.get(), &error.outPtr())) 475 GST_WARNING("Failed to fill in GStreamer context: %s", error->message); 476 gst_gl_context_activate(m_glContext.get(), FALSE); 477 } else 478 GST_WARNING("Failed to activate GStreamer context %" GST_PTR_FORMAT, m_glContext.get()); 479 if (previousActiveContext) 480 previousActiveContext->makeContextCurrent(); 481 478 482 return true; 479 483 } … … 994 998 break; 995 999 } 996 997 #if USE(GSTREAMER_GL)998 // When the imxvpudecoder is used, the texture sampling of the999 // directviv-uploaded texture returns an RGB value, so there's no need to1000 // convert it.1001 if (m_videoDecoderPlatform != WebKitGstVideoDecoderPlatform::ImxVPU)1002 m_textureMapperFlags |= TEXTURE_MAPPER_COLOR_CONVERT_FLAG;1003 #endif1004 1000 } 1005 1001 #endif … … 1066 1062 GstElement* appsink = createGLAppSink(); 1067 1063 1064 // glsinkbin is not used because it includes glcolorconvert which only process RGBA, 1065 // but in the future it would be possible to render YUV formats too: 1066 // https://bugs.webkit.org/show_bug.cgi?id=132869 1067 1068 1068 if (!appsink || !upload || !colorconvert) { 1069 1069 GST_WARNING("Failed to create GstGL elements"); … … 1083 1083 gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr); 1084 1084 1085 GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) " GST_GL_CAPS_FORMAT)); 1086 1087 result &= gst_element_link_pads(upload, "src", colorconvert, "sink"); 1088 result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get()); 1085 GRefPtr<GstCaps> caps = adoptGRef(gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "RGBA", nullptr)); 1086 gst_caps_set_features(caps.get(), 0, gst_caps_features_new(GST_CAPS_FEATURE_MEMORY_GL_MEMORY, nullptr)); 1087 g_object_set(appsink, "caps", caps.get(), nullptr); 1088 1089 result &= gst_element_link_many(upload, colorconvert, appsink, nullptr); 1089 1090 1090 1091 GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink")); -
trunk/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h
r247800 r248464 308 308 #endif 309 309 310 enum class WebKitGstVideoDecoderPlatform { ImxVPU,Video4Linux };310 enum class WebKitGstVideoDecoderPlatform { Video4Linux }; 311 311 Optional<WebKitGstVideoDecoderPlatform> m_videoDecoderPlatform; 312 312 }; -
trunk/Source/WebCore/platform/graphics/gstreamer/VideoTextureCopierGStreamer.cpp
r228590 r248464 84 84 case ColorConversion::ConvertARGBToRGBA: 85 85 m_colorConversionMatrix.setMatrix(0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0); 86 break; 87 case ColorConversion::NoConvert: 88 m_colorConversionMatrix.makeIdentity(); 86 89 break; 87 90 default: -
trunk/Source/WebCore/platform/graphics/gstreamer/VideoTextureCopierGStreamer.h
r228590 r248464 37 37 enum class ColorConversion { 38 38 ConvertBGRAToRGBA, 39 ConvertARGBToRGBA 39 ConvertARGBToRGBA, 40 NoConvert, 40 41 }; 41 42
Note: See TracChangeset
for help on using the changeset viewer.