Changeset 236397 in webkit
- Timestamp:
- Sep 24, 2018 4:16:58 AM (6 years ago)
- Location:
- trunk/Source/WebCore
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/WebCore/ChangeLog
r236396 r236397 1 2018-09-24 Thibault Saunier <tsaunier@igalia.com> 2 3 [WPE][GTK][WebRTC] Fix leaks in the libwebrtc Decoder and Encoder 4 https://bugs.webkit.org/show_bug.cgi?id=189835 5 6 Reviewed by Philippe Normand. 7 8 - Rework memory management to avoid leaking encoded frames (basically use the same 9 strategy as other libwebrtc encoder implementation). 10 - Plug a GstCaps leak. 11 12 * platform/mediastream/gstreamer/GStreamerVideoCapturer.cpp: 13 * platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp: 14 * platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp: 15 (WebCore::GStreamerVideoEncoder::InitEncode): 16 (WebCore::GStreamerVideoEncoder::newSampleCallback): 17 (WebCore::GStreamerVideoEncoder::Fragmentize): 18 (WebCore::GStreamerVideoEncoder::SetRestrictionCaps): 19 1 20 2018-09-24 Philippe Normand <pnormand@igalia.com> 2 21 -
trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCapturer.cpp
r232589 r236397 24 24 #if ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER) 25 25 #include "GStreamerVideoCapturer.h" 26 27 #include <gst/app/gstappsink.h>28 26 29 27 namespace WebCore { -
trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp
r235230 r236397 160 160 161 161 // FIXME- Use a GstBufferPool. 162 auto buffer = gst_buffer_new_wrapped(g_memdup(inputImage._buffer, inputImage._size),163 inputImage._size) ;164 GST_BUFFER_DTS(buffer ) = (static_cast<guint64>(inputImage._timeStamp) * GST_MSECOND) - m_firstBufferDts;165 GST_BUFFER_PTS(buffer ) = (static_cast<guint64>(renderTimeMs) * GST_MSECOND) - m_firstBufferPts;166 m_dtsPtsMap[GST_BUFFER_PTS(buffer )] = inputImage._timeStamp;162 auto buffer = adoptGRef(gst_buffer_new_wrapped(g_memdup(inputImage._buffer, inputImage._size), 163 inputImage._size)); 164 GST_BUFFER_DTS(buffer.get()) = (static_cast<guint64>(inputImage._timeStamp) * GST_MSECOND) - m_firstBufferDts; 165 GST_BUFFER_PTS(buffer.get()) = (static_cast<guint64>(renderTimeMs) * GST_MSECOND) - m_firstBufferPts; 166 m_dtsPtsMap[GST_BUFFER_PTS(buffer.get())] = inputImage._timeStamp; 167 167 168 168 GST_LOG_OBJECT(pipeline(), "%ld Decoding: %" GST_PTR_FORMAT, renderTimeMs, buffer); 169 switch (gst_app_src_push_sample(GST_APP_SRC(m_src),170 gst_sample_new(buffer, GetCapsForFrame(inputImage), nullptr, nullptr))) {169 auto sample = adoptGRef(gst_sample_new(buffer.get(), GetCapsForFrame(inputImage), nullptr, nullptr)); 170 switch (gst_app_src_push_sample(GST_APP_SRC(m_src), sample.get())) { 171 171 case GST_FLOW_OK: 172 172 return WEBRTC_VIDEO_CODEC_OK; -
trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp
r235230 r236397 79 79 newBitrate, frameRate); 80 80 81 auto caps = gst_caps_make_writable(m_restrictionCaps.get());82 gst_caps_set_simple(caps , "framerate", GST_TYPE_FRACTION, frameRate, 1, nullptr);81 auto caps = adoptGRef(gst_caps_copy(m_restrictionCaps.get())); 82 gst_caps_set_simple(caps.get(), "framerate", GST_TYPE_FRACTION, frameRate, 1, nullptr); 83 83 84 84 SetRestrictionCaps(caps); … … 107 107 g_return_val_if_fail(codecSettings, WEBRTC_VIDEO_CODEC_ERR_PARAMETER); 108 108 g_return_val_if_fail(codecSettings->codecType == CodecType(), WEBRTC_VIDEO_CODEC_ERR_PARAMETER); 109 110 m_encodedFrame._size = codecSettings->width * codecSettings->height * 3; 111 m_encodedFrame._buffer = new uint8_t[m_encodedFrame._size]; 112 encoded_image_buffer_.reset(m_encodedFrame._buffer); 113 m_encodedFrame._completeFrame = true; 114 m_encodedFrame._encodedWidth = 0; 115 m_encodedFrame._encodedHeight = 0; 116 m_encodedFrame._length = 0; 109 117 110 118 m_pipeline = makeElement("pipeline"); … … 151 159 int32_t Release() final 152 160 { 161 m_encodedFrame._buffer = nullptr; 162 encoded_image_buffer_.reset(); 153 163 GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get()))); 154 164 gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr); … … 220 230 auto caps = gst_sample_get_caps(sample.get()); 221 231 222 webrtc::RTPFragmentationHeader *fragmentationInfo;223 auto frame = Fragmentize(buffer, &fragmentationInfo);224 if (! frame._size)232 webrtc::RTPFragmentationHeader fragmentationInfo; 233 Fragmentize(&m_encodedFrame, &encoded_image_buffer_, buffer, &fragmentationInfo); 234 if (!m_encodedFrame._size) 225 235 return GST_FLOW_OK; 226 236 227 237 gst_structure_get(gst_caps_get_structure(caps, 0), 228 "width", G_TYPE_INT, & frame._encodedWidth,229 "height", G_TYPE_INT, & frame._encodedHeight,238 "width", G_TYPE_INT, &m_encodedFrame._encodedWidth, 239 "height", G_TYPE_INT, &m_encodedFrame._encodedHeight, 230 240 nullptr); 231 241 232 frame._frameType = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT) ? webrtc::kVideoFrameDelta : webrtc::kVideoFrameKey;233 frame._completeFrame = true;234 frame.capture_time_ms_ = GST_TIME_AS_MSECONDS(GST_BUFFER_PTS(buffer));235 frame._timeStamp = GST_TIME_AS_MSECONDS(GST_BUFFER_DTS(buffer));242 m_encodedFrame._frameType = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT) ? webrtc::kVideoFrameDelta : webrtc::kVideoFrameKey; 243 m_encodedFrame._completeFrame = true; 244 m_encodedFrame.capture_time_ms_ = GST_TIME_AS_MSECONDS(GST_BUFFER_PTS(buffer)); 245 m_encodedFrame._timeStamp = GST_TIME_AS_MSECONDS(GST_BUFFER_DTS(buffer)); 236 246 GST_LOG_OBJECT(m_pipeline.get(), "Got buffer TS: %" GST_TIME_FORMAT, GST_TIME_ARGS(GST_BUFFER_PTS(buffer))); 237 247 … … 239 249 PopulateCodecSpecific(&codecSpecifiInfos, buffer); 240 250 241 webrtc::EncodedImageCallback::Result result = m_imageReadyCb->OnEncodedImage( frame, &codecSpecifiInfos,fragmentationInfo);251 webrtc::EncodedImageCallback::Result result = m_imageReadyCb->OnEncodedImage(m_encodedFrame, &codecSpecifiInfos, &fragmentationInfo); 242 252 if (result.error != webrtc::EncodedImageCallback::Result::OK) { 243 253 GST_ELEMENT_ERROR(m_pipeline.get(), LIBRARY, FAILED, (nullptr), … … 350 360 virtual void PopulateCodecSpecific(webrtc::CodecSpecificInfo*, GstBuffer*) = 0; 351 361 352 virtual webrtc::EncodedImage Fragmentize(GstBuffer* buffer, webrtc::RTPFragmentationHeader** outFragmentationInfo) 362 virtual void Fragmentize(webrtc::EncodedImage* encodedImage, std::unique_ptr<uint8_t[]>* encoded_image_buffer, GstBuffer* buffer, 363 webrtc::RTPFragmentationHeader* fragmentationInfo) 353 364 { 354 365 GstMapInfo map; 355 366 356 367 gst_buffer_map(buffer, &map, GST_MAP_READ); 357 webrtc::EncodedImage frame(map.data, map.size, map.size); 368 if (encodedImage->_size < map.size) { 369 encodedImage->_size = map.size; 370 encodedImage->_buffer = new uint8_t[encodedImage->_size]; 371 encoded_image_buffer->reset(encodedImage->_buffer); 372 memcpy(encodedImage->_buffer, map.data, map.size); 373 } 358 374 gst_buffer_unmap(buffer, &map); 359 360 // No fragmentation by default.361 webrtc::RTPFragmentationHeader* fragmentationInfo = new webrtc::RTPFragmentationHeader();362 375 363 376 fragmentationInfo->VerifyAndAllocateFragmentationHeader(1); … … 366 379 fragmentationInfo->fragmentationPlType[0] = 0; 367 380 fragmentationInfo->fragmentationTimeDiff[0] = 0; 368 369 *outFragmentationInfo = fragmentationInfo;370 371 return frame;372 381 } 373 382 … … 381 390 virtual const gchar* Name() = 0; 382 391 383 void SetRestrictionCaps(G stCaps*caps)384 { 385 if (caps && m_profile.get() && gst_caps_is_equal(m_restrictionCaps.get(), caps ))392 void SetRestrictionCaps(GRefPtr<GstCaps> caps) 393 { 394 if (caps && m_profile.get() && gst_caps_is_equal(m_restrictionCaps.get(), caps.get())) 386 395 g_object_set(m_profile.get(), "restriction-caps", caps, nullptr); 387 396 … … 404 413 GRefPtr<GstEncodingProfile> m_profile; 405 414 BitrateSetter m_bitrateSetter; 415 webrtc::EncodedImage m_encodedFrame; 416 std::unique_ptr<uint8_t[]> encoded_image_buffer_; 406 417 }; 407 418 … … 421 432 422 433 // FIXME - MT. safety! 423 webrtc::EncodedImage Fragmentize(GstBuffer* gstbuffer, webrtc::RTPFragmentationHeader** outFragmentationInfo) final 434 void Fragmentize(webrtc::EncodedImage* encodedImage, std::unique_ptr<uint8_t[]>* encoded_image_buffer, 435 GstBuffer* gstbuffer, webrtc::RTPFragmentationHeader* fragmentationHeader) final 424 436 { 425 437 GstMapInfo map; … … 431 443 432 444 std::vector<GstH264NalUnit> nals; 433 webrtc::EncodedImage encodedImage;434 445 435 446 const uint8_t startCode[4] = { 0, 0, 0, 1 }; … … 448 459 } 449 460 450 encodedImage._size = requiredSize; 451 encodedImage._buffer = new uint8_t[encodedImage._size]; 461 if (encodedImage->_size < requiredSize) { 462 encodedImage->_size = requiredSize; 463 encodedImage->_buffer = new uint8_t[encodedImage->_size]; 464 encoded_image_buffer->reset(encodedImage->_buffer); 465 } 466 452 467 // Iterate nal units and fill the Fragmentation info. 453 webrtc::RTPFragmentationHeader* fragmentationHeader = new webrtc::RTPFragmentationHeader();454 468 fragmentationHeader->VerifyAndAllocateFragmentationHeader(nals.size()); 455 469 size_t fragmentIndex = 0; 456 encodedImage ._length = 0;470 encodedImage->_length = 0; 457 471 for (std::vector<GstH264NalUnit>::iterator nal = nals.begin(); nal != nals.end(); ++nal, fragmentIndex++) { 458 472 … … 465 479 fragmentationHeader->fragmentationLength[fragmentIndex] = nal->size; 466 480 467 memcpy(encodedImage ._buffer + encodedImage._length, &map.data[nal->sc_offset],481 memcpy(encodedImage->_buffer + encodedImage->_length, &map.data[nal->sc_offset], 468 482 sizeof(startCode) + nal->size); 469 encodedImage._length += nal->size + sizeof(startCode); 470 } 471 472 *outFragmentationInfo = fragmentationHeader; 483 encodedImage->_length += nal->size + sizeof(startCode); 484 } 485 473 486 gst_buffer_unmap(gstbuffer, &map); 474 return encodedImage;475 487 } 476 488
Note: See TracChangeset
for help on using the changeset viewer.