Changeset 223452 in webkit


Ignore:
Timestamp:
Oct 16, 2017 5:35:12 PM (7 years ago)
Author:
jer.noble@apple.com
Message:

ImageDecoderAVFObjC fails to create more CMSampleBuffers after creating about 32MB worth.
https://bugs.webkit.org/show_bug.cgi?id=178360

Reviewed by Eric Carlson.

AVSampleBufferGenerator has a constrained memory pool of about 32MB in size. Once
CMSampleBuffers representing about 32MB of memory are allocated, no more can be created
until previously created ones are released. So rather than (only) creating the sample
buffers up front in readSampleMetadata(), also create them dynamically, if missing, in
createFrameImageAtIndex(...) and release them in storeSampleBuffer(...) after they have been
decoded.

Drive-by fix: the expected content length was never actually set by the owner of ImageDecoderAVFObjC.
Now that the expected content length is available, we don't have to wait until the data is complete
to respond to requests.

  • platform/graphics/ImageSource.cpp:

(WebCore::ImageSource::ensureDecoderAvailable):

  • platform/graphics/avfoundation/objc/ImageDecoderAVFObjC.h:
  • platform/graphics/avfoundation/objc/ImageDecoderAVFObjC.mm:

(SOFT_LINK_POINTER_OPTIONAL):
(-[WebCoreSharedBufferResourceLoaderDelegate canFulfillRequest:]):
(-[WebCoreSharedBufferResourceLoaderDelegate fulfillRequest:]):
(-[WebCoreSharedBufferResourceLoaderDelegate resourceLoader:shouldWaitForLoadingOfRequestedResource:]):
(WebCore::imageDecoderAssetOptions):
(WebCore::ImageDecoderAVFObjC::firstEnabledTrack):
(WebCore::ImageDecoderAVFObjC::storeSampleBuffer):
(WebCore::ImageDecoderAVFObjC::createFrameImageAtIndex):
(WebCore::ImageDecoderAVFObjC::setExpectedContentSize):

Location:
trunk/Source/WebCore
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/WebCore/ChangeLog

    r223451 r223452  
     12017-10-16  Jer Noble  <jer.noble@apple.com>
     2
     3        ImageDecoderAVFObjC fails to create more CMSampleBuffers after creating about 32MB worth.
     4        https://bugs.webkit.org/show_bug.cgi?id=178360
     5
     6        Reviewed by Eric Carlson.
     7
     8        AVSampleBufferGenerator has a constrained memory pool of about 32MB in size. Once
     9        CMSampleBuffers representing about 32MB of memory are allocated, no more can be created
     10        until previously created ones are released. So rather than (only) creating the sample
     11        buffers up front in readSampleMetadata(), also create them dynamically, if missing, in
     12        createFrameImageAtIndex(...) and release them in storeSampleBuffer(...) after they have been
     13        decoded.
     14
     15        Drive-by fix: the expected content length was never actually set by the owner of ImageDecoderAVFObjC.
     16        Now that the expected content length is available, we don't have to wait until the data is complete
     17        to respond to requests.
     18
     19        * platform/graphics/ImageSource.cpp:
     20        (WebCore::ImageSource::ensureDecoderAvailable):
     21        * platform/graphics/avfoundation/objc/ImageDecoderAVFObjC.h:
     22        * platform/graphics/avfoundation/objc/ImageDecoderAVFObjC.mm:
     23        (SOFT_LINK_POINTER_OPTIONAL):
     24        (-[WebCoreSharedBufferResourceLoaderDelegate canFulfillRequest:]):
     25        (-[WebCoreSharedBufferResourceLoaderDelegate fulfillRequest:]):
     26        (-[WebCoreSharedBufferResourceLoaderDelegate resourceLoader:shouldWaitForLoadingOfRequestedResource:]):
     27        (WebCore::imageDecoderAssetOptions):
     28        (WebCore::ImageDecoderAVFObjC::firstEnabledTrack):
     29        (WebCore::ImageDecoderAVFObjC::storeSampleBuffer):
     30        (WebCore::ImageDecoderAVFObjC::createFrameImageAtIndex):
     31        (WebCore::ImageDecoderAVFObjC::setExpectedContentSize):
     32
    1332017-10-12  Matt Rajca  <mrajca@apple.com>
    234
  • trunk/Source/WebCore/platform/graphics/ImageSource.cpp

    r223091 r223452  
    8080        return false;
    8181
     82    if (auto expectedContentLength = m_frameCache->expectedContentLength())
     83        m_decoder->setExpectedContentSize(expectedContentLength);
     84
    8285    m_frameCache->setDecoder(m_decoder.get());
    8386    return true;
  • trunk/Source/WebCore/platform/graphics/avfoundation/objc/ImageDecoderAVFObjC.h

    r222225 r223452  
    120120    Lock m_sampleGeneratorLock;
    121121    bool m_isAllDataReceived { false };
    122     long long m_expectedContentSize { 0 };
    123122    std::optional<IntSize> m_size;
    124123    std::optional<RotationProperties> m_rotation;
  • trunk/Source/WebCore/platform/graphics/avfoundation/objc/ImageDecoderAVFObjC.mm

    r222326 r223452  
    3333#import "FloatRect.h"
    3434#import "FloatSize.h"
     35#import "Logging.h"
    3536#import "MIMETypeRegistry.h"
    3637#import "SharedBuffer.h"
     
    6465SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaCharacteristicVisual, NSString *)
    6566SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
     67SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString *)
    6668#define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
    6769#define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
     70#define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
    6871
    6972#pragma mark -
     
    128131
    129132    if (auto dataRequest = request.dataRequest) {
    130         if (dataRequest.requestedOffset + dataRequest.requestedLength > static_cast<long long>(_data.get().length))
     133        if (dataRequest.requestedOffset > static_cast<long long>(_data.get().length))
    131134            return NO;
    132135    }
     
    178181
    179182        [dataRequest respondWithData:requestedData];
     183
     184        if (dataRequest.requestsAllDataToEndOfResource) {
     185            if (!_complete)
     186                return;
     187        } else if (dataRequest.requestedOffset + dataRequest.requestedLength > dataRequest.currentOffset)
     188            return;
    180189    }
    181190
     
    191200    if ([self canFulfillRequest:loadingRequest]) {
    192201        [self fulfillRequest:loadingRequest];
    193         return NO;
     202        if (loadingRequest.finished)
     203            return NO;
    194204    }
    195205
     
    223233{
    224234    static NeverDestroyed<RetainPtr<NSDictionary>> options;
    225     if (!options.get())
    226         options.get() = @{ AVURLAssetReferenceRestrictionsKey: @(AVAssetReferenceRestrictionForbidAll) };
     235    if (!options.get()) {
     236        options.get() = @{
     237            AVURLAssetReferenceRestrictionsKey: @(AVAssetReferenceRestrictionForbidAll),
     238            AVURLAssetUsesNoPersistentCacheKey: @YES,
     239        };
     240    }
    227241
    228242    return options.get().get();
     
    308322    }];
    309323
    310     if (firstEnabledIndex == NSNotFound)
     324    if (firstEnabledIndex == NSNotFound) {
     325        LOG(Images, "ImageDecoderAVFObjC::firstEnabledTrack(%p) - asset has no enabled video tracks", this);
    311326        return nil;
     327    }
    312328
    313329    return [videoTracks objectAtIndex:firstEnabledIndex];
     
    366382{
    367383    auto pixelBuffer = m_decompressionSession->decodeSampleSync(sampleBuffer);
    368     if (!pixelBuffer)
     384    if (!pixelBuffer) {
     385        LOG(Images, "ImageDecoderAVFObjC::storeSampleBuffer(%p) - could not decode sampleBuffer", this);
    369386        return false;
     387    }
    370388
    371389    auto presentationTime = PAL::toMediaTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
     
    406424
    407425    CGImageRef rawImage = nullptr;
    408     if (noErr != VTCreateCGImageFromCVPixelBuffer(pixelBuffer.get(), nullptr, &rawImage))
     426    if (noErr != VTCreateCGImageFromCVPixelBuffer(pixelBuffer.get(), nullptr, &rawImage)) {
     427        LOG(Images, "ImageDecoderAVFObjC::storeSampleBuffer(%p) - could not create CGImage from pixelBuffer", this);
    409428        return false;
     429    }
    410430
    411431    ASSERT(indexIter->second < m_sampleData.size());
    412432    auto& sampleData = m_sampleData[indexIter->second];
    413433    sampleData.image = adoptCF(rawImage);
     434    sampleData.sample = nullptr;
    414435
    415436    auto alphaInfo = CGImageGetAlphaInfo(rawImage);
     
    576597        auto& cursorSampleData = m_sampleData[indexIter->second];
    577598
     599        if (!cursorSampleData.sample) {
     600            auto request = adoptNS([allocAVSampleBufferRequestInstance() initWithStartCursor:frameCursor]);
     601            cursorSampleData.sample = adoptCF([m_generator createSampleBufferForRequest:request.get()]);
     602        }
     603
    578604        if (!cursorSampleData.sample)
    579605            return nullptr;
     
    592618void ImageDecoderAVFObjC::setExpectedContentSize(long long expectedContentSize)
    593619{
    594     if (m_expectedContentSize == expectedContentSize)
    595         return;
    596 
    597     m_loader.get().expectedContentSize = m_expectedContentSize;
     620    m_loader.get().expectedContentSize = expectedContentSize;
    598621}
    599622
Note: See TracChangeset for help on using the changeset viewer.