Changeset 288025 in webkit
- Timestamp:
- Jan 14, 2022 12:41:15 PM (6 months ago)
- Location:
- trunk
- Files:
-
- 2 added
- 4 edited
-
LayoutTests/ChangeLog (modified) (1 diff)
-
LayoutTests/fast/mediastream/getUserMedia-to-canvas-expected.txt (added)
-
LayoutTests/fast/mediastream/getUserMedia-to-canvas.html (added)
-
LayoutTests/webrtc/routines.js (modified) (1 diff)
-
Source/WebCore/ChangeLog (modified) (1 diff)
-
Source/WebCore/html/canvas/WebGLRenderingContextBase.cpp (modified) (2 diffs)
Legend:
- Unmodified
- Added
- Removed
-
trunk/LayoutTests/ChangeLog
r288024 r288025 1 2022-01-14 Kimmo Kinnunen <kkinnunen@apple.com> 2 3 gl.texImage2D upload of getUserMedia streams via <video> element fails 4 https://bugs.webkit.org/show_bug.cgi?id=230617 5 <rdar://problem/83407577> 6 7 Reviewed by Youenn Fablet. 8 9 * fast/mediastream/getUserMedia-to-canvas-expected.txt: Added. 10 * fast/mediastream/getUserMedia-to-canvas.html: Added. 11 Add a test to test getting video frame to 2DContext and 12 WebGL canvas elements. 13 14 * webrtc/routines.js: 15 Add a function to assert that ImageData contains 16 the simulated mock camera image. This is useful 17 in verifying that the image is exactly as 18 expected. The test tests only the default orientation. 19 Later changes will update the test to address the 20 camera rotation. 21 1 22 2022-01-14 Chris Dumez <cdumez@apple.com> 2 23 -
trunk/LayoutTests/webrtc/routines.js
r267504 r288025 298 298 }).join('\r\n'); 299 299 } 300 301 // Returns Uint8Array[4] of RGBA color. 302 // p: [x, y] of 0..1 range. 303 function getImageDataPixel(imageData, p) 304 { 305 let xi = Math.floor(p[0] * imageData.width); 306 let yi = Math.floor(p[1] * imageData.height); 307 let i = (yi * imageData.width + xi) * 4; 308 return imageData.data.slice(i, i + 4); 309 } 310 311 // Asserts that ImageData instance contains mock camera image rendered by MiniBrowser and WebKitTestRunner. 312 // Obtain full camera image of size `width`: 313 // await navigator.mediaDevices.getUserMedia({ video: { width: { exact: width } } }); 314 function assertImageDataContainsMockCameraImage(imageData) 315 { 316 const white = [ 255, 255, 255, 255 ]; 317 const yellow = [ 255, 255, 0, 255 ]; 318 const cyan = [ 0, 255, 255, 255 ]; 319 const lightGreen = [ 0, 128, 0, 255 ]; 320 321 let err = 3; 322 assert_array_approx_equals(getImageDataPixel(imageData, [ 0.04, 0.7 ]), white, err, "white rect not found"); 323 assert_array_approx_equals(getImageDataPixel(imageData, [ 0.08, 0.7 ]), yellow, err, "yellow rect not found"); 324 assert_array_approx_equals(getImageDataPixel(imageData, [ 0.12, 0.7 ]), cyan, err, "cyan rect not found"); 325 assert_array_approx_equals(getImageDataPixel(imageData, [ 0.16, 0.7 ]), lightGreen, err, "light green rect not found"); 326 } -
trunk/Source/WebCore/ChangeLog
r288024 r288025 1 2022-01-14 Kimmo Kinnunen <kkinnunen@apple.com> 2 3 gl.texImage2D upload of getUserMedia streams via <video> element fails 4 https://bugs.webkit.org/show_bug.cgi?id=230617 5 <rdar://problem/83407577> 6 7 Reviewed by Youenn Fablet. 8 9 Fix MSE camera to WebGL texture uploads. 10 Partially revert r280963 for Cocoa MediaPlayer implementatations 11 that do not have nativeImageForCurrentTime / pixelBufferForCurrentTime. 12 Turns out MSE does not have these implemented, so currently fall back 13 to the painting path. 14 15 Test: fast/mediastream/getUserMedia-to-canvas.html 16 17 * html/canvas/WebGLRenderingContextBase.cpp: 18 (WebCore::WebGLRenderingContextBase::videoFrameToImage): 19 1 20 2022-01-14 Chris Dumez <cdumez@apple.com> 2 21 -
trunk/Source/WebCore/html/canvas/WebGLRenderingContextBase.cpp
r287984 r288025 5889 5889 RefPtr<Image> WebGLRenderingContextBase::videoFrameToImage(HTMLVideoElement* video, BackingStoreCopy backingStoreCopy, const char* functionName) 5890 5890 { 5891 ImageBuffer* imageBuffer = nullptr; 5891 5892 // FIXME: When texImage2D is passed an HTMLVideoElement, implementations 5892 5893 // interoperably use the native RGB color values of the video frame (e.g. … … 5903 5904 // Currently we might be missing an image due to MSE not being able to provide the first requested frame. 5904 5905 // https://bugs.webkit.org/show_bug.cgi?id=228997 5905 if (!nativeImage) 5906 return nullptr; 5907 IntSize imageSize = nativeImage->size(); 5908 if (imageSize.isEmpty()) { 5909 synthesizeGLError(GraphicsContextGL::INVALID_VALUE, functionName, "video visible size is empty"); 5910 return nullptr; 5911 } 5912 FloatRect imageRect { { }, imageSize }; 5913 ImageBuffer* imageBuffer = m_generatedImageCache.imageBuffer(imageSize, nativeImage->colorSpace(), CompositeOperator::Copy); 5906 if (nativeImage) { 5907 IntSize imageSize = nativeImage->size(); 5908 if (imageSize.isEmpty()) { 5909 synthesizeGLError(GraphicsContextGL::INVALID_VALUE, functionName, "video visible size is empty"); 5910 return nullptr; 5911 } 5912 FloatRect imageRect { { }, imageSize }; 5913 ImageBuffer* imageBuffer = m_generatedImageCache.imageBuffer(imageSize, nativeImage->colorSpace(), CompositeOperator::Copy); 5914 if (!imageBuffer) { 5915 synthesizeGLError(GraphicsContextGL::OUT_OF_MEMORY, functionName, "out of memory"); 5916 return nullptr; 5917 } 5918 imageBuffer->context().drawNativeImage(*nativeImage, imageRect.size(), imageRect, imageRect, CompositeOperator::Copy); 5919 } 5920 #endif 5914 5921 if (!imageBuffer) { 5915 synthesizeGLError(GraphicsContextGL::OUT_OF_MEMORY, functionName, "out of memory"); 5916 return nullptr; 5917 } 5918 imageBuffer->context().drawNativeImage(*nativeImage, imageRect.size(), imageRect, imageRect, CompositeOperator::Copy); 5919 #else 5920 // This is a legacy code path that produces incompatible texture size when the 5921 // video visible size is different to the natural size. This should be removed 5922 // once all platforms implement nativeImageForCurrentTime(). 5923 IntSize videoSize { static_cast<int>(video->videoWidth()), static_cast<int>(video->videoHeight()) }; 5924 auto colorSpace = video->colorSpace(); 5925 if (!colorSpace) 5926 colorSpace = DestinationColorSpace::SRGB(); 5927 ImageBuffer* imageBuffer = m_generatedImageCache.imageBuffer(videoSize, *colorSpace); 5928 if (!imageBuffer) { 5929 synthesizeGLError(GraphicsContextGL::OUT_OF_MEMORY, functionName, "out of memory"); 5930 return nullptr; 5931 } 5932 video->paintCurrentFrameInContext(imageBuffer->context(), { { }, videoSize }); 5933 #endif 5922 // This is a legacy code path that produces incompatible texture size when the 5923 // video visible size is different to the natural size. This should be removed 5924 // once all platforms implement nativeImageForCurrentTime(). 5925 IntSize videoSize { static_cast<int>(video->videoWidth()), static_cast<int>(video->videoHeight()) }; 5926 auto colorSpace = video->colorSpace(); 5927 if (!colorSpace) 5928 colorSpace = DestinationColorSpace::SRGB(); 5929 imageBuffer = m_generatedImageCache.imageBuffer(videoSize, *colorSpace); 5930 if (!imageBuffer) { 5931 synthesizeGLError(GraphicsContextGL::OUT_OF_MEMORY, functionName, "out of memory"); 5932 return nullptr; 5933 } 5934 video->paintCurrentFrameInContext(imageBuffer->context(), { { }, videoSize }); 5935 } 5934 5936 RefPtr<Image> image = imageBuffer->copyImage(backingStoreCopy); 5935 5937 if (!image) {
Note: See TracChangeset
for help on using the changeset viewer.