Changeset 236397 in webkit


Ignore:
Timestamp:
Sep 24, 2018 4:16:58 AM (6 years ago)
Author:
commit-queue@webkit.org
Message:

[WPE][GTK][WebRTC] Fix leaks in the libwebrtc Decoder and Encoder
https://bugs.webkit.org/show_bug.cgi?id=189835

Patch by Thibault Saunier <tsaunier@igalia.com> on 2018-09-24
Reviewed by Philippe Normand.

  • Rework memory management to avoid leaking encoded frames (basically use the same strategy as other libwebrtc encoder implementation).
  • Plug a GstCaps leak.
  • platform/mediastream/gstreamer/GStreamerVideoCapturer.cpp:
  • platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp:
  • platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp:

(WebCore::GStreamerVideoEncoder::InitEncode):
(WebCore::GStreamerVideoEncoder::newSampleCallback):
(WebCore::GStreamerVideoEncoder::Fragmentize):
(WebCore::GStreamerVideoEncoder::SetRestrictionCaps):

Location:
trunk/Source/WebCore
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/WebCore/ChangeLog

    r236396 r236397  
     12018-09-24  Thibault Saunier  <tsaunier@igalia.com>
     2
     3        [WPE][GTK][WebRTC] Fix leaks in the libwebrtc Decoder and Encoder
     4        https://bugs.webkit.org/show_bug.cgi?id=189835
     5
     6        Reviewed by Philippe Normand.
     7
     8        - Rework memory management to avoid leaking encoded frames (basically use the same
     9          strategy as other libwebrtc encoder implementation).
     10        - Plug a GstCaps leak.
     11
     12        * platform/mediastream/gstreamer/GStreamerVideoCapturer.cpp:
     13        * platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp:
     14        * platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp:
     15        (WebCore::GStreamerVideoEncoder::InitEncode):
     16        (WebCore::GStreamerVideoEncoder::newSampleCallback):
     17        (WebCore::GStreamerVideoEncoder::Fragmentize):
     18        (WebCore::GStreamerVideoEncoder::SetRestrictionCaps):
     19
    1202018-09-24  Philippe Normand  <pnormand@igalia.com>
    221
  • trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCapturer.cpp

    r232589 r236397  
    2424#if ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER)
    2525#include "GStreamerVideoCapturer.h"
    26 
    27 #include <gst/app/gstappsink.h>
    2826
    2927namespace WebCore {
  • trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp

    r235230 r236397  
    160160
    161161        // FIXME- Use a GstBufferPool.
    162         auto buffer = gst_buffer_new_wrapped(g_memdup(inputImage._buffer, inputImage._size),
    163             inputImage._size);
    164         GST_BUFFER_DTS(buffer) = (static_cast<guint64>(inputImage._timeStamp) * GST_MSECOND) - m_firstBufferDts;
    165         GST_BUFFER_PTS(buffer) = (static_cast<guint64>(renderTimeMs) * GST_MSECOND) - m_firstBufferPts;
    166         m_dtsPtsMap[GST_BUFFER_PTS(buffer)] = inputImage._timeStamp;
     162        auto buffer = adoptGRef(gst_buffer_new_wrapped(g_memdup(inputImage._buffer, inputImage._size),
     163            inputImage._size));
     164        GST_BUFFER_DTS(buffer.get()) = (static_cast<guint64>(inputImage._timeStamp) * GST_MSECOND) - m_firstBufferDts;
     165        GST_BUFFER_PTS(buffer.get()) = (static_cast<guint64>(renderTimeMs) * GST_MSECOND) - m_firstBufferPts;
     166        m_dtsPtsMap[GST_BUFFER_PTS(buffer.get())] = inputImage._timeStamp;
    167167
    168168        GST_LOG_OBJECT(pipeline(), "%ld Decoding: %" GST_PTR_FORMAT, renderTimeMs, buffer);
    169         switch (gst_app_src_push_sample(GST_APP_SRC(m_src),
    170             gst_sample_new(buffer, GetCapsForFrame(inputImage), nullptr, nullptr))) {
     169        auto sample = adoptGRef(gst_sample_new(buffer.get(), GetCapsForFrame(inputImage), nullptr, nullptr));
     170        switch (gst_app_src_push_sample(GST_APP_SRC(m_src), sample.get())) {
    171171        case GST_FLOW_OK:
    172172            return WEBRTC_VIDEO_CODEC_OK;
  • trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp

    r235230 r236397  
    7979            newBitrate, frameRate);
    8080
    81         auto caps = gst_caps_make_writable(m_restrictionCaps.get());
    82         gst_caps_set_simple(caps, "framerate", GST_TYPE_FRACTION, frameRate, 1, nullptr);
     81        auto caps = adoptGRef(gst_caps_copy(m_restrictionCaps.get()));
     82        gst_caps_set_simple(caps.get(), "framerate", GST_TYPE_FRACTION, frameRate, 1, nullptr);
    8383
    8484        SetRestrictionCaps(caps);
     
    107107        g_return_val_if_fail(codecSettings, WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
    108108        g_return_val_if_fail(codecSettings->codecType == CodecType(), WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
     109
     110        m_encodedFrame._size = codecSettings->width * codecSettings->height * 3;
     111        m_encodedFrame._buffer = new uint8_t[m_encodedFrame._size];
     112        encoded_image_buffer_.reset(m_encodedFrame._buffer);
     113        m_encodedFrame._completeFrame = true;
     114        m_encodedFrame._encodedWidth = 0;
     115        m_encodedFrame._encodedHeight = 0;
     116        m_encodedFrame._length = 0;
    109117
    110118        m_pipeline = makeElement("pipeline");
     
    151159    int32_t Release() final
    152160    {
     161        m_encodedFrame._buffer = nullptr;
     162        encoded_image_buffer_.reset();
    153163        GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
    154164        gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
     
    220230        auto caps = gst_sample_get_caps(sample.get());
    221231
    222         webrtc::RTPFragmentationHeader* fragmentationInfo;
    223         auto frame = Fragmentize(buffer, &fragmentationInfo);
    224         if (!frame._size)
     232        webrtc::RTPFragmentationHeader fragmentationInfo;
     233        Fragmentize(&m_encodedFrame, &encoded_image_buffer_, buffer, &fragmentationInfo);
     234        if (!m_encodedFrame._size)
    225235            return GST_FLOW_OK;
    226236
    227237        gst_structure_get(gst_caps_get_structure(caps, 0),
    228             "width", G_TYPE_INT, &frame._encodedWidth,
    229             "height", G_TYPE_INT, &frame._encodedHeight,
     238            "width", G_TYPE_INT, &m_encodedFrame._encodedWidth,
     239            "height", G_TYPE_INT, &m_encodedFrame._encodedHeight,
    230240            nullptr);
    231241
    232         frame._frameType = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT) ? webrtc::kVideoFrameDelta : webrtc::kVideoFrameKey;
    233         frame._completeFrame = true;
    234         frame.capture_time_ms_ = GST_TIME_AS_MSECONDS(GST_BUFFER_PTS(buffer));
    235         frame._timeStamp = GST_TIME_AS_MSECONDS(GST_BUFFER_DTS(buffer));
     242        m_encodedFrame._frameType = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT) ? webrtc::kVideoFrameDelta : webrtc::kVideoFrameKey;
     243        m_encodedFrame._completeFrame = true;
     244        m_encodedFrame.capture_time_ms_ = GST_TIME_AS_MSECONDS(GST_BUFFER_PTS(buffer));
     245        m_encodedFrame._timeStamp = GST_TIME_AS_MSECONDS(GST_BUFFER_DTS(buffer));
    236246        GST_LOG_OBJECT(m_pipeline.get(), "Got buffer TS: %" GST_TIME_FORMAT, GST_TIME_ARGS(GST_BUFFER_PTS(buffer)));
    237247
     
    239249        PopulateCodecSpecific(&codecSpecifiInfos, buffer);
    240250
    241         webrtc::EncodedImageCallback::Result result = m_imageReadyCb->OnEncodedImage(frame, &codecSpecifiInfos, fragmentationInfo);
     251        webrtc::EncodedImageCallback::Result result = m_imageReadyCb->OnEncodedImage(m_encodedFrame, &codecSpecifiInfos, &fragmentationInfo);
    242252        if (result.error != webrtc::EncodedImageCallback::Result::OK) {
    243253            GST_ELEMENT_ERROR(m_pipeline.get(), LIBRARY, FAILED, (nullptr),
     
    350360    virtual void PopulateCodecSpecific(webrtc::CodecSpecificInfo*, GstBuffer*) = 0;
    351361
    352     virtual webrtc::EncodedImage Fragmentize(GstBuffer* buffer, webrtc::RTPFragmentationHeader** outFragmentationInfo)
     362    virtual void Fragmentize(webrtc::EncodedImage* encodedImage, std::unique_ptr<uint8_t[]>* encoded_image_buffer, GstBuffer* buffer,
     363        webrtc::RTPFragmentationHeader* fragmentationInfo)
    353364    {
    354365        GstMapInfo map;
    355366
    356367        gst_buffer_map(buffer, &map, GST_MAP_READ);
    357         webrtc::EncodedImage frame(map.data, map.size, map.size);
     368        if (encodedImage->_size < map.size) {
     369            encodedImage->_size = map.size;
     370            encodedImage->_buffer = new uint8_t[encodedImage->_size];
     371            encoded_image_buffer->reset(encodedImage->_buffer);
     372            memcpy(encodedImage->_buffer, map.data, map.size);
     373        }
    358374        gst_buffer_unmap(buffer, &map);
    359 
    360         // No fragmentation by default.
    361         webrtc::RTPFragmentationHeader* fragmentationInfo = new webrtc::RTPFragmentationHeader();
    362375
    363376        fragmentationInfo->VerifyAndAllocateFragmentationHeader(1);
     
    366379        fragmentationInfo->fragmentationPlType[0] = 0;
    367380        fragmentationInfo->fragmentationTimeDiff[0] = 0;
    368 
    369         *outFragmentationInfo = fragmentationInfo;
    370 
    371         return frame;
    372381    }
    373382
     
    381390    virtual const gchar* Name() = 0;
    382391
    383     void SetRestrictionCaps(GstCaps* caps)
    384     {
    385         if (caps && m_profile.get() && gst_caps_is_equal(m_restrictionCaps.get(), caps))
     392    void SetRestrictionCaps(GRefPtr<GstCaps> caps)
     393    {
     394        if (caps && m_profile.get() && gst_caps_is_equal(m_restrictionCaps.get(), caps.get()))
    386395            g_object_set(m_profile.get(), "restriction-caps", caps, nullptr);
    387396
     
    404413    GRefPtr<GstEncodingProfile> m_profile;
    405414    BitrateSetter m_bitrateSetter;
     415    webrtc::EncodedImage m_encodedFrame;
     416    std::unique_ptr<uint8_t[]> encoded_image_buffer_;
    406417};
    407418
     
    421432
    422433    // FIXME - MT. safety!
    423     webrtc::EncodedImage Fragmentize(GstBuffer* gstbuffer, webrtc::RTPFragmentationHeader** outFragmentationInfo) final
     434    void Fragmentize(webrtc::EncodedImage* encodedImage, std::unique_ptr<uint8_t[]>* encoded_image_buffer,
     435        GstBuffer* gstbuffer, webrtc::RTPFragmentationHeader* fragmentationHeader) final
    424436    {
    425437        GstMapInfo map;
     
    431443
    432444        std::vector<GstH264NalUnit> nals;
    433         webrtc::EncodedImage encodedImage;
    434445
    435446        const uint8_t startCode[4] = { 0, 0, 0, 1 };
     
    448459        }
    449460
    450         encodedImage._size = requiredSize;
    451         encodedImage._buffer = new uint8_t[encodedImage._size];
     461        if (encodedImage->_size < requiredSize) {
     462            encodedImage->_size = requiredSize;
     463            encodedImage->_buffer = new uint8_t[encodedImage->_size];
     464            encoded_image_buffer->reset(encodedImage->_buffer);
     465        }
     466
    452467        // Iterate nal units and fill the Fragmentation info.
    453         webrtc::RTPFragmentationHeader* fragmentationHeader = new webrtc::RTPFragmentationHeader();
    454468        fragmentationHeader->VerifyAndAllocateFragmentationHeader(nals.size());
    455469        size_t fragmentIndex = 0;
    456         encodedImage._length = 0;
     470        encodedImage->_length = 0;
    457471        for (std::vector<GstH264NalUnit>::iterator nal = nals.begin(); nal != nals.end(); ++nal, fragmentIndex++) {
    458472
     
    465479            fragmentationHeader->fragmentationLength[fragmentIndex] = nal->size;
    466480
    467             memcpy(encodedImage._buffer + encodedImage._length, &map.data[nal->sc_offset],
     481            memcpy(encodedImage->_buffer + encodedImage->_length, &map.data[nal->sc_offset],
    468482                sizeof(startCode) + nal->size);
    469             encodedImage._length += nal->size + sizeof(startCode);
    470         }
    471 
    472         *outFragmentationInfo = fragmentationHeader;
     483            encodedImage->_length += nal->size + sizeof(startCode);
     484        }
     485
    473486        gst_buffer_unmap(gstbuffer, &map);
    474         return encodedImage;
    475487    }
    476488
Note: See TracChangeset for help on using the changeset viewer.