Changeset 262663 in webkit
- Timestamp:
- Jun 5, 2020 3:49:11 PM (4 years ago)
- Location:
- trunk
- Files:
-
- 8 deleted
- 31 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/LayoutTests/ChangeLog
r262660 r262663 1 2020-06-05 Ryan Haddad <ryanhaddad@apple.com> 2 3 Unreviewed, reverting r262619, r262625, and r262641. 4 5 Caused mediarecorder layout test crashes. 6 7 Reverted changesets: 8 9 "[Cocoa] Use AVAssetWriterDelegate to implement MediaRecorder" 10 https://bugs.webkit.org/show_bug.cgi?id=206582 11 https://trac.webkit.org/changeset/262619 12 13 "[Cocoa] Use AVAssetWriterDelegate to implement MediaRecorder" 14 https://bugs.webkit.org/show_bug.cgi?id=206582 15 https://trac.webkit.org/changeset/262625 16 17 "Unreviewed, silence deprecation warning to fix build with 18 latest SDK." 19 https://trac.webkit.org/changeset/262641 20 1 21 2020-06-05 Jason Lawrence <lawrence.j@apple.com> 2 22 -
trunk/LayoutTests/TestExpectations
r262619 r262663 3299 3299 webgl/1.0.3/conformance/extensions/webgl-draw-buffers.html [ Skip ] 3300 3300 3301 # Not supported by default 3302 http/wpt/mediarecorder [ Skip ] 3303 imported/w3c/web-platform-tests/mediacapture-record [ Skip ] 3304 fast/history/page-cache-media-recorder.html [ Skip ] 3301 webkit.org/b/197673 http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable.html [ Pass Failure Timeout ] 3305 3302 3306 3303 # WebGL 2 Conformance Suite rules for regular bots post ANGLE backend adoption. -
trunk/LayoutTests/http/wpt/mediarecorder/MediaRecorder-AV-audio-video-dataavailable-gpuprocess.html
r262619 r262663 57 57 58 58 async_test(t => { 59 const ac = new AudioContext(); 60 const osc = ac.createOscillator(); 61 const dest = ac.createMediaStreamDestination(); 62 const audio = dest.stream; 63 osc.connect(dest); 64 59 65 const video = createVideoStream(); 66 assert_equals(video.getAudioTracks().length, 0, "video mediastream starts with no audio track"); 67 assert_equals(audio.getAudioTracks().length, 1, "audio mediastream starts with one audio track"); 68 video.addTrack(audio.getAudioTracks()[0]); 69 assert_equals(video.getAudioTracks().length, 1, "video mediastream starts with one audio track"); 60 70 const recorder = new MediaRecorder(video); 61 71 let mode = 0; -
trunk/LayoutTests/platform/mac/TestExpectations
r262619 r262663 1709 1709 [ Catalina+ ] fast/text/design-system-ui-16.html [ Pass ] 1710 1710 1711 [ Catalina+ ] http/wpt/mediarecorder [ Pass Failure ]1712 [ Catalina+ ] imported/w3c/web-platform-tests/mediacapture-record [ Pass Failure ]1713 [ Catalina+ ] fast/history/page-cache-media-recorder.html [ Pass Failure ]1714 1715 1711 webkit.org/b/200128 imported/w3c/web-platform-tests/html/semantics/embedded-content/the-video-element/video_timeupdate_on_seek.html [ Timeout Pass ] 1716 1712 -
trunk/Source/WTF/ChangeLog
r262639 r262663 1 2020-06-05 Ryan Haddad <ryanhaddad@apple.com> 2 3 Unreviewed, reverting r262619, r262625, and r262641. 4 5 Caused mediarecorder layout test crashes. 6 7 Reverted changesets: 8 9 "[Cocoa] Use AVAssetWriterDelegate to implement MediaRecorder" 10 https://bugs.webkit.org/show_bug.cgi?id=206582 11 https://trac.webkit.org/changeset/262619 12 13 "[Cocoa] Use AVAssetWriterDelegate to implement MediaRecorder" 14 https://bugs.webkit.org/show_bug.cgi?id=206582 15 https://trac.webkit.org/changeset/262625 16 17 "Unreviewed, silence deprecation warning to fix build with 18 latest SDK." 19 https://trac.webkit.org/changeset/262641 20 1 21 2020-06-05 Per Arne Vollan <pvollan@apple.com> 2 22 -
trunk/Source/WTF/wtf/PlatformHave.h
r262639 r262663 543 543 #endif 544 544 545 #if ((PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101500) || PLATFORM(IOS)) && (defined __has_include && __has_include(<AVFoundation/AVAssetWriter_Private.h>))546 #define HAVE_AVASSETWRITERDELEGATE 1547 #endif548 549 545 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS) && !PLATFORM(APPLETV) 550 546 #define HAVE_SYSTEM_FONT_STYLE_TITLE_0 1 -
trunk/Source/WebCore/ChangeLog
r262659 r262663 1 2020-06-05 Ryan Haddad <ryanhaddad@apple.com> 2 3 Unreviewed, reverting r262619, r262625, and r262641. 4 5 Caused mediarecorder layout test crashes. 6 7 Reverted changesets: 8 9 "[Cocoa] Use AVAssetWriterDelegate to implement MediaRecorder" 10 https://bugs.webkit.org/show_bug.cgi?id=206582 11 https://trac.webkit.org/changeset/262619 12 13 "[Cocoa] Use AVAssetWriterDelegate to implement MediaRecorder" 14 https://bugs.webkit.org/show_bug.cgi?id=206582 15 https://trac.webkit.org/changeset/262625 16 17 "Unreviewed, silence deprecation warning to fix build with 18 latest SDK." 19 https://trac.webkit.org/changeset/262641 20 1 21 2020-06-05 Kate Cheney <katherine_cheney@apple.com> 2 22 -
trunk/Source/WebCore/Modules/mediarecorder/MediaRecorderProvider.cpp
r262619 r262663 35 35 std::unique_ptr<MediaRecorderPrivate> MediaRecorderProvider::createMediaRecorderPrivate(MediaStreamPrivate& stream) 36 36 { 37 #if HAVE(AVASSETWRITERDELEGATE)38 37 return MediaRecorderPrivateAVFImpl::create(stream); 39 #else40 UNUSED_PARAM(stream);41 return nullptr;42 #endif43 38 } 44 39 -
trunk/Source/WebCore/PAL/ChangeLog
r262639 r262663 1 2020-06-05 Ryan Haddad <ryanhaddad@apple.com> 2 3 Unreviewed, reverting r262619, r262625, and r262641. 4 5 Caused mediarecorder layout test crashes. 6 7 Reverted changesets: 8 9 "[Cocoa] Use AVAssetWriterDelegate to implement MediaRecorder" 10 https://bugs.webkit.org/show_bug.cgi?id=206582 11 https://trac.webkit.org/changeset/262619 12 13 "[Cocoa] Use AVAssetWriterDelegate to implement MediaRecorder" 14 https://bugs.webkit.org/show_bug.cgi?id=206582 15 https://trac.webkit.org/changeset/262625 16 17 "Unreviewed, silence deprecation warning to fix build with 18 latest SDK." 19 https://trac.webkit.org/changeset/262641 20 1 21 2020-06-05 Per Arne Vollan <pvollan@apple.com> 2 22 -
trunk/Source/WebCore/PAL/PAL.xcodeproj/project.pbxproj
r262619 r262663 120 120 2E1342CD215AA10A007199D2 /* UIKitSoftLink.mm in Sources */ = {isa = PBXBuildFile; fileRef = 2E1342CB215AA10A007199D2 /* UIKitSoftLink.mm */; }; 121 121 31308B1420A21705003FB929 /* SystemPreviewSPI.h in Headers */ = {isa = PBXBuildFile; fileRef = 31308B1320A21705003FB929 /* SystemPreviewSPI.h */; }; 122 416E995323DAE6BE00E871CB /* AudioToolboxSoftLink.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 416E995123DAE6BD00E871CB /* AudioToolboxSoftLink.cpp */; };123 416E995423DAE6BE00E871CB /* AudioToolboxSoftLink.h in Headers */ = {isa = PBXBuildFile; fileRef = 416E995223DAE6BE00E871CB /* AudioToolboxSoftLink.h */; };124 41E1F344248A6A000022D5DE /* VideoToolboxSoftLink.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 416E995523DAEFF700E871CB /* VideoToolboxSoftLink.cpp */; };125 122 442956CD218A72DF0080DB54 /* RevealSPI.h in Headers */ = {isa = PBXBuildFile; fileRef = 442956CC218A72DE0080DB54 /* RevealSPI.h */; }; 126 123 4450FC9F21F5F602004DFA56 /* QuickLookSoftLink.mm in Sources */ = {isa = PBXBuildFile; fileRef = 4450FC9D21F5F602004DFA56 /* QuickLookSoftLink.mm */; }; … … 304 301 31308B1320A21705003FB929 /* SystemPreviewSPI.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SystemPreviewSPI.h; sourceTree = "<group>"; }; 305 302 37119A7820CCB5FF002C6DC9 /* WebKitTargetConditionals.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = WebKitTargetConditionals.xcconfig; sourceTree = "<group>"; }; 306 416E995123DAE6BD00E871CB /* AudioToolboxSoftLink.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = AudioToolboxSoftLink.cpp; sourceTree = "<group>"; };307 416E995223DAE6BE00E871CB /* AudioToolboxSoftLink.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioToolboxSoftLink.h; sourceTree = "<group>"; };308 416E995523DAEFF700E871CB /* VideoToolboxSoftLink.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = VideoToolboxSoftLink.cpp; sourceTree = "<group>"; };309 416E995623DAEFF700E871CB /* VideoToolboxSoftLink.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = VideoToolboxSoftLink.h; sourceTree = "<group>"; };310 303 442956CC218A72DE0080DB54 /* RevealSPI.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RevealSPI.h; sourceTree = "<group>"; }; 311 304 4450FC9D21F5F602004DFA56 /* QuickLookSoftLink.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = QuickLookSoftLink.mm; sourceTree = "<group>"; }; … … 549 542 isa = PBXGroup; 550 543 children = ( 551 416E995123DAE6BD00E871CB /* AudioToolboxSoftLink.cpp */,552 416E995223DAE6BE00E871CB /* AudioToolboxSoftLink.h */,553 544 0CF99CA61F738436007EE793 /* CoreMediaSoftLink.cpp */, 554 545 0CF99CA71F738437007EE793 /* CoreMediaSoftLink.h */, 555 416E995523DAEFF700E871CB /* VideoToolboxSoftLink.cpp */,556 416E995623DAEFF700E871CB /* VideoToolboxSoftLink.h */,557 546 ); 558 547 path = cf; … … 747 736 57FD318B22B35989008D0E8B /* AppSSOSoftLink.h in Headers */, 748 737 576CA9D622B854AB0030143C /* AppSSOSPI.h in Headers */, 749 416E995423DAE6BE00E871CB /* AudioToolboxSoftLink.h in Headers */,750 738 2D02E93C2056FAA700A13797 /* AudioToolboxSPI.h in Headers */, 751 739 572A107822B456F500F410C8 /* AuthKitSPI.h in Headers */, … … 966 954 293EE4A824154F8F0047493D /* AccessibilitySupportSoftLink.cpp in Sources */, 967 955 57FD318A22B3593E008D0E8B /* AppSSOSoftLink.mm in Sources */, 968 416E995323DAE6BE00E871CB /* AudioToolboxSoftLink.cpp in Sources */,969 956 077E87B1226A460200A2AFF0 /* AVFoundationSoftLink.mm in Sources */, 970 957 0C5FFF0F1F78D9DA009EFF1A /* ClockCM.mm in Sources */, … … 981 968 5C7C787423AC3E770065F47E /* ManagedConfigurationSoftLink.mm in Sources */, 982 969 0CF99CA41F736375007EE793 /* MediaTimeAVFoundation.cpp in Sources */, 983 41E1F344248A6A000022D5DE /* VideoToolboxSoftLink.cpp in Sources */,984 970 CDACB3602387425B0018D7CE /* MediaToolboxSoftLink.cpp in Sources */, 985 971 A1F63CA021A4DBF7006FB43B /* PassKitSoftLink.mm in Sources */, -
trunk/Source/WebCore/PAL/pal/cf/CoreMediaSoftLink.cpp
r262619 r262663 47 47 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMBlockBufferCopyDataBytes, OSStatus, (CMBlockBufferRef theSourceBuffer, size_t offsetToData, size_t dataLength, void* destination), (theSourceBuffer, offsetToData, dataLength, destination), PAL_EXPORT) 48 48 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMBlockBufferGetDataLength, size_t, (CMBlockBufferRef theBuffer), (theBuffer), PAL_EXPORT) 49 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMBlockBufferReplaceDataBytes, OSStatus, (const void* sourceBytes, CMBlockBufferRef destinationBuffer, size_t offsetIntoDestination, size_t dataLength), (sourceBytes, destinationBuffer, offsetIntoDestination, dataLength), PAL_EXPORT)50 49 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMFormatDescriptionGetExtensions, CFDictionaryRef, (CMFormatDescriptionRef desc), (desc), PAL_EXPORT) 51 50 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMSampleBufferGetTypeID, CFTypeID, (void), (), PAL_EXPORT) … … 135 134 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMBufferQueueIsEmpty, Boolean, (CMBufferQueueRef queue), (queue), PAL_EXPORT) 136 135 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMBufferQueueGetBufferCount, CMItemCount, (CMBufferQueueRef queue), (queue), PAL_EXPORT) 137 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMBufferQueueGetCallbacksForUnsortedSampleBuffers, const CMBufferCallbacks *, (), (), PAL_EXPORT)138 136 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMBufferQueueGetFirstPresentationTimeStamp, CMTime, (CMBufferQueueRef queue), (queue), PAL_EXPORT) 139 137 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMBufferQueueGetEndPresentationTimeStamp, CMTime, (CMBufferQueueRef queue), (queue), PAL_EXPORT) 140 138 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMBufferQueueInstallTriggerWithIntegerThreshold, OSStatus, (CMBufferQueueRef queue, CMBufferQueueTriggerCallback triggerCallback, void* triggerRefcon, CMBufferQueueTriggerCondition triggerCondition, CMItemCount triggerThreshold, CMBufferQueueTriggerToken* triggerTokenOut), (queue, triggerCallback, triggerRefcon, triggerCondition, triggerThreshold, triggerTokenOut), PAL_EXPORT) 141 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMBufferQueueMarkEndOfData, OSStatus, (CMBufferQueueRef queue), (queue), PAL_EXPORT)142 139 143 140 SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMSampleAttachmentKey_DoNotDisplay, CFStringRef, PAL_EXPORT) … … 153 150 SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMSampleAttachmentKey_IsDependedOnByOthers, CFStringRef, PAL_EXPORT) 154 151 SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMSampleBufferConsumerNotification_BufferConsumed, CFStringRef, PAL_EXPORT) 155 SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMSampleBufferAttachmentKey_EndsPreviousSampleDuration, CFStringRef, PAL_EXPORT)156 SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMSampleBufferAttachmentKey_GradualDecoderRefresh, CFStringRef, PAL_EXPORT)157 SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, get_CoreMedia_kCMSampleBufferAttachmentKey_TrimDurationAtStart, CFStringRef, PAL_EXPORT)158 152 159 153 SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMTimebaseNotification_EffectiveRateChanged, CFStringRef, PAL_EXPORT) … … 170 164 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMSampleBufferSetDataReady, OSStatus, (CMSampleBufferRef sbuf), (sbuf), PAL_EXPORT) 171 165 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMAudioFormatDescriptionCreate, OSStatus, (CFAllocatorRef allocator, const AudioStreamBasicDescription* asbd, size_t layoutSize, const AudioChannelLayout* layout, size_t magicCookieSize, const void* magicCookie, CFDictionaryRef extensions, CMAudioFormatDescriptionRef* outDesc), (allocator, asbd, layoutSize, layout, magicCookieSize, magicCookie, extensions, outDesc), PAL_EXPORT) 172 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMAudioFormatDescriptionGetMagicCookie, const void*, (CMAudioFormatDescriptionRef desc, size_t* sizeOut), (desc, sizeOut), PAL_EXPORT)173 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMAudioFormatDescriptionGetRichestDecodableFormat, const AudioFormatListItem *, (CMAudioFormatDescriptionRef desc), (desc), PAL_EXPORT)174 166 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMClockGetHostTimeClock, CMClockRef, (void), (), PAL_EXPORT) 175 167 SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMClockGetTime, CMTime, (CMClockRef clock), (clock), PAL_EXPORT) -
trunk/Source/WebCore/PAL/pal/cf/CoreMediaSoftLink.h
r262619 r262663 50 50 SOFT_LINK_FUNCTION_FOR_HEADER(PAL, CoreMedia, CMBlockBufferGetDataLength, size_t, (CMBlockBufferRef theBuffer), (theBuffer)) 51 51 #define CMBlockBufferGetDataLength softLink_CoreMedia_CMBlockBufferGetDataLength 52 SOFT_LINK_FUNCTION_FOR_HEADER(PAL, CoreMedia, CMBlockBufferReplaceDataBytes, OSStatus, (const void* sourceBytes, CMBlockBufferRef destinationBuffer, size_t offsetIntoDestination, size_t dataLength), (sourceBytes, destinationBuffer, offsetIntoDestination, dataLength))53 #define CMBlockBufferReplaceDataBytes softLink_CoreMedia_CMBlockBufferReplaceDataBytes54 52 SOFT_LINK_FUNCTION_FOR_HEADER(PAL, CoreMedia, CMFormatDescriptionGetExtensions, CFDictionaryRef, (CMFormatDescriptionRef desc), (desc)) 55 53 #define CMFormatDescriptionGetExtensions softLink_CoreMedia_CMFormatDescriptionGetExtensions … … 228 226 SOFT_LINK_FUNCTION_FOR_HEADER(PAL, CoreMedia, CMBufferQueueGetFirstPresentationTimeStamp, CMTime, (CMBufferQueueRef queue), (queue)) 229 227 #define CMBufferQueueGetFirstPresentationTimeStamp softLink_CoreMedia_CMBufferQueueGetFirstPresentationTimeStamp 230 SOFT_LINK_FUNCTION_FOR_HEADER(PAL, CoreMedia, CMBufferQueueGetCallbacksForUnsortedSampleBuffers, const CMBufferCallbacks *, (), ())231 #define CMBufferQueueGetCallbacksForUnsortedSampleBuffers softLink_CoreMedia_CMBufferQueueGetCallbacksForUnsortedSampleBuffers232 228 SOFT_LINK_FUNCTION_FOR_HEADER(PAL, CoreMedia, CMBufferQueueGetEndPresentationTimeStamp, CMTime, (CMBufferQueueRef queue), (queue)) 233 229 #define CMBufferQueueGetEndPresentationTimeStamp softLink_CoreMedia_CMBufferQueueGetEndPresentationTimeStamp 234 230 SOFT_LINK_FUNCTION_FOR_HEADER(PAL, CoreMedia, CMBufferQueueInstallTriggerWithIntegerThreshold, OSStatus, (CMBufferQueueRef queue, CMBufferQueueTriggerCallback triggerCallback, void* triggerRefcon, CMBufferQueueTriggerCondition triggerCondition, CMItemCount triggerThreshold, CMBufferQueueTriggerToken* triggerTokenOut), (queue, triggerCallback, triggerRefcon, triggerCondition, triggerThreshold, triggerTokenOut)) 235 231 #define CMBufferQueueInstallTriggerWithIntegerThreshold softLink_CoreMedia_CMBufferQueueInstallTriggerWithIntegerThreshold 236 SOFT_LINK_FUNCTION_FOR_HEADER(PAL, CoreMedia, CMBufferQueueMarkEndOfData, OSStatus, (CMBufferQueueRef queue), (queue))237 #define CMBufferQueueMarkEndOfData softLink_CoreMedia_CMBufferQueueMarkEndOfData238 232 239 233 SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMSampleAttachmentKey_DoNotDisplay, CFStringRef) … … 265 259 SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMSampleBufferConsumerNotification_BufferConsumed, CFStringRef) 266 260 #define kCMSampleBufferConsumerNotification_BufferConsumed get_CoreMedia_kCMSampleBufferConsumerNotification_BufferConsumed() 267 SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMSampleBufferAttachmentKey_EndsPreviousSampleDuration, CFStringRef)268 #define kCMSampleBufferAttachmentKey_EndsPreviousSampleDuration get_CoreMedia_kCMSampleBufferAttachmentKey_EndsPreviousSampleDuration()269 SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMSampleBufferAttachmentKey_GradualDecoderRefresh, CFStringRef)270 #define kCMSampleBufferAttachmentKey_GradualDecoderRefresh get_CoreMedia_kCMSampleBufferAttachmentKey_GradualDecoderRefresh()271 SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, get_CoreMedia_kCMSampleBufferAttachmentKey_TrimDurationAtStart, CFStringRef)272 #define get_CoreMedia_kCMSampleBufferAttachmentKey_TrimDurationAtStart get_CoreMedia_kCMSampleBufferAttachmentKey_TrimDurationAtStart()273 274 SOFT_LINK_FUNCTION_FOR_HEADER(PAL, CoreMedia, CMAudioFormatDescriptionGetMagicCookie, const void*, (CMAudioFormatDescriptionRef desc, size_t* sizeOut), (desc, sizeOut))275 #define CMAudioFormatDescriptionGetMagicCookie softLink_CoreMedia_CMAudioFormatDescriptionGetMagicCookie276 261 SOFT_LINK_FUNCTION_FOR_HEADER(PAL, CoreMedia, CMAudioFormatDescriptionGetStreamBasicDescription, const AudioStreamBasicDescription *, (CMAudioFormatDescriptionRef desc), (desc)) 277 262 #define CMAudioFormatDescriptionGetStreamBasicDescription softLink_CoreMedia_CMAudioFormatDescriptionGetStreamBasicDescription … … 282 267 SOFT_LINK_FUNCTION_FOR_HEADER(PAL, CoreMedia, CMSampleBufferGetNumSamples, CMItemCount, (CMSampleBufferRef sbuf), (sbuf)) 283 268 #define CMSampleBufferGetNumSamples softLink_CoreMedia_CMSampleBufferGetNumSamples 284 SOFT_LINK_FUNCTION_FOR_HEADER(PAL, CoreMedia, CMAudioFormatDescriptionGetRichestDecodableFormat, const AudioFormatListItem *, (CMAudioFormatDescriptionRef desc), (desc))285 #define CMAudioFormatDescriptionGetRichestDecodableFormat softLink_CoreMedia_CMAudioFormatDescriptionGetRichestDecodableFormat286 269 SOFT_LINK_FUNCTION_FOR_HEADER(PAL, CoreMedia, CMSampleBufferCopySampleBufferForRange, OSStatus, (CFAllocatorRef allocator, CMSampleBufferRef sbuf, CFRange sampleRange, CMSampleBufferRef* sBufOut), (allocator, sbuf, sampleRange, sBufOut)) 287 270 #define CMSampleBufferCopySampleBufferForRange softLink_CoreMedia_CMSampleBufferCopySampleBufferForRange -
trunk/Source/WebCore/SourcesCocoa.txt
r262619 r262663 493 493 494 494 platform/mediarecorder/MediaRecorderPrivateAVFImpl.cpp 495 platform/mediarecorder/cocoa/AudioSampleBufferCompressor.mm @no-unify496 495 platform/mediarecorder/cocoa/MediaRecorderPrivateWriterCocoa.mm 497 platform/mediarecorder/cocoa/VideoSampleBufferCompressor.mm @no-unify498 496 499 497 platform/mediasession/mac/MediaSessionInterruptionProviderMac.mm -
trunk/Source/WebCore/WebCore.xcodeproj/project.pbxproj
r262619 r262663 1142 1142 41DEFCB61E56C1BD000D9E5F /* JSDOMMapLike.h in Headers */ = {isa = PBXBuildFile; fileRef = 41DEFCB41E56C1B9000D9E5F /* JSDOMMapLike.h */; }; 1143 1143 41E1B1D10FF5986900576B3B /* AbstractWorker.h in Headers */ = {isa = PBXBuildFile; fileRef = 41E1B1CB0FF5986900576B3B /* AbstractWorker.h */; }; 1144 41E1F342248A69D00022D5DE /* VideoSampleBufferCompressor.mm in Sources */ = {isa = PBXBuildFile; fileRef = 41CD6F8B23D6E81D00B16421 /* VideoSampleBufferCompressor.mm */; };1145 41E1F343248A69D40022D5DE /* AudioSampleBufferCompressor.mm in Sources */ = {isa = PBXBuildFile; fileRef = 41E1F33D248A62B60022D5DE /* AudioSampleBufferCompressor.mm */; };1146 1144 41E9DCE7231974BF00F35949 /* BlobLoader.h in Headers */ = {isa = PBXBuildFile; fileRef = 41E9DCE4231973FE00F35949 /* BlobLoader.h */; settings = {ATTRIBUTES = (Private, ); }; }; 1147 1145 41E9DCE92319CA7600F35949 /* NetworkSendQueue.h in Headers */ = {isa = PBXBuildFile; fileRef = 41E9DCE82319CA7500F35949 /* NetworkSendQueue.h */; settings = {ATTRIBUTES = (Private, ); }; }; … … 7586 7584 41C7E1061E6A54360027B4DE /* CanvasCaptureMediaStreamTrack.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CanvasCaptureMediaStreamTrack.h; sourceTree = "<group>"; }; 7587 7585 41C7E1081E6AA37C0027B4DE /* CanvasCaptureMediaStreamTrack.idl */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = CanvasCaptureMediaStreamTrack.idl; sourceTree = "<group>"; }; 7588 41CD6F8923D6E81C00B16421 /* VideoSampleBufferCompressor.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = VideoSampleBufferCompressor.h; sourceTree = "<group>"; };7589 41CD6F8B23D6E81D00B16421 /* VideoSampleBufferCompressor.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = VideoSampleBufferCompressor.mm; sourceTree = "<group>"; };7590 7586 41CF8BE41D46222000707DC9 /* FetchBodyConsumer.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = FetchBodyConsumer.cpp; sourceTree = "<group>"; }; 7591 7587 41CF8BE51D46222000707DC9 /* FetchBodyConsumer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FetchBodyConsumer.h; sourceTree = "<group>"; }; … … 7616 7612 41E1B1CB0FF5986900576B3B /* AbstractWorker.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AbstractWorker.h; sourceTree = "<group>"; }; 7617 7613 41E1B1CC0FF5986900576B3B /* AbstractWorker.idl */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = AbstractWorker.idl; sourceTree = "<group>"; }; 7618 41E1F33D248A62B60022D5DE /* AudioSampleBufferCompressor.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = AudioSampleBufferCompressor.mm; sourceTree = "<group>"; };7619 41E1F33F248A62B60022D5DE /* AudioSampleBufferCompressor.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioSampleBufferCompressor.h; sourceTree = "<group>"; };7620 7614 41E408381DCB747900EFCE19 /* PeerConnectionBackend.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = PeerConnectionBackend.cpp; sourceTree = "<group>"; }; 7621 7615 41E593FD214865A900D3CB61 /* RTCPriorityType.idl */ = {isa = PBXFileReference; lastKnownFileType = text; path = RTCPriorityType.idl; sourceTree = "<group>"; }; … … 19335 19329 isa = PBXGroup; 19336 19330 children = ( 19337 41E1F33F248A62B60022D5DE /* AudioSampleBufferCompressor.h */,19338 41E1F33D248A62B60022D5DE /* AudioSampleBufferCompressor.mm */,19339 19331 4D73F94C218C4A87003A3ED6 /* MediaRecorderPrivateWriterCocoa.h */, 19340 19332 4D73F94D218C4A87003A3ED6 /* MediaRecorderPrivateWriterCocoa.mm */, 19341 41CD6F8923D6E81C00B16421 /* VideoSampleBufferCompressor.h */,19342 41CD6F8B23D6E81D00B16421 /* VideoSampleBufferCompressor.mm */,19343 19333 ); 19344 19334 path = cocoa; … … 34383 34373 07638A9A1884487200E15A1B /* MediaSessionManagerIOS.mm in Sources */, 34384 34374 CDC8B5A6180474F70016E685 /* MediaSourcePrivateAVFObjC.mm in Sources */, 34385 41E1F343248A69D40022D5DE /* AudioSampleBufferCompressor.mm in Sources */,34386 34375 4133CB8B20F80E9900E89B11 /* MediaStreamAudioSourceCocoa.cpp in Sources */, 34387 34376 CDF2B0101820540600F2B424 /* MockBox.cpp in Sources */, … … 34764 34753 DECA80501F9FED6A00E3B661 /* UnifiedSource271.cpp in Sources */, 34765 34754 DECA80511F9FED6A00E3B661 /* UnifiedSource272.cpp in Sources */, 34766 41E1F342248A69D00022D5DE /* VideoSampleBufferCompressor.mm in Sources */,34767 34755 DECA80521F9FED6A00E3B661 /* UnifiedSource273.cpp in Sources */, 34768 34756 DECA80531F9FED6A00E3B661 /* UnifiedSource274.cpp in Sources */, -
trunk/Source/WebCore/platform/mediarecorder/MediaRecorderPrivateAVFImpl.cpp
r262619 r262663 27 27 #include "MediaRecorderPrivateAVFImpl.h" 28 28 29 #if ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)29 #if ENABLE(MEDIA_STREAM) 30 30 31 31 #include "AudioStreamDescription.h" 32 #include "MediaRecorderPrivateWriterCocoa.h"33 32 #include "MediaSample.h" 34 33 #include "MediaStreamPrivate.h" … … 114 113 } // namespace WebCore 115 114 116 #endif // ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)115 #endif // ENABLE(MEDIA_STREAM) -
trunk/Source/WebCore/platform/mediarecorder/MediaRecorderPrivateAVFImpl.h
r262619 r262663 25 25 #pragma once 26 26 27 #if ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)27 #if ENABLE(MEDIA_STREAM) 28 28 29 29 #include "MediaRecorderPrivate.h" … … 61 61 } // namespace WebCore 62 62 63 #endif // ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)63 #endif // ENABLE(MEDIA_STREAM) -
trunk/Source/WebCore/platform/mediarecorder/cocoa/MediaRecorderPrivateWriterCocoa.h
r262619 r262663 25 25 #pragma once 26 26 27 #if ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE) 28 29 #include "AudioStreamDescription.h" 27 #if ENABLE(MEDIA_STREAM) 30 28 31 29 #include "SharedBuffer.h" … … 38 36 #include <wtf/threads/BinarySemaphore.h> 39 37 40 #include <CoreAudio/CoreAudioTypes.h>41 #include <CoreMedia/CMTime.h>42 43 38 typedef struct opaqueCMSampleBuffer *CMSampleBufferRef; 44 typedef const struct opaqueCMFormatDescription* CMFormatDescriptionRef;45 typedef struct opaqueCMBufferQueueTriggerToken *CMBufferQueueTriggerToken;46 39 47 40 OBJC_CLASS AVAssetWriter; 48 41 OBJC_CLASS AVAssetWriterInput; 49 OBJC_CLASS WebAVAssetWriterDelegate;50 42 51 43 namespace WTF { … … 55 47 namespace WebCore { 56 48 57 class AudioSampleBufferCompressor;58 49 class AudioStreamDescription; 59 50 class MediaStreamTrackPrivate; 60 51 class PlatformAudioData; 61 class VideoSampleBufferCompressor;62 52 63 class WEBCORE_EXPORT MediaRecorderPrivateWriter : public ThreadSafeRefCounted<MediaRecorderPrivateWriter, WTF::DestructionThread::Main>, public CanMakeWeakPtr<MediaRecorderPrivateWriter , WeakPtrFactoryInitialization::Eager> {53 class WEBCORE_EXPORT MediaRecorderPrivateWriter : public ThreadSafeRefCounted<MediaRecorderPrivateWriter, WTF::DestructionThread::Main>, public CanMakeWeakPtr<MediaRecorderPrivateWriter> { 64 54 public: 65 55 static RefPtr<MediaRecorderPrivateWriter> create(const MediaStreamTrackPrivate* audioTrack, const MediaStreamTrackPrivate* videoTrack); 66 56 static RefPtr<MediaRecorderPrivateWriter> create(bool hasAudio, int width, int height); 67 57 ~MediaRecorderPrivateWriter(); 68 58 59 bool setupWriter(); 60 bool setVideoInput(int width, int height); 61 bool setAudioInput(); 69 62 void appendVideoSampleBuffer(CMSampleBufferRef); 70 63 void appendAudioSampleBuffer(const PlatformAudioData&, const AudioStreamDescription&, const WTF::MediaTime&, size_t); … … 72 65 void fetchData(CompletionHandler<void(RefPtr<SharedBuffer>&&)>&&); 73 66 74 void appendData(const char*, size_t);75 void appendData(Ref<SharedBuffer>&&);76 77 67 private: 78 MediaRecorderPrivateWriter( bool hasAudio, bool hasVideo);68 MediaRecorderPrivateWriter(RetainPtr<AVAssetWriter>&&, String&& path); 79 69 void clear(); 80 70 81 bool initialize(); 71 RetainPtr<AVAssetWriter> m_writer; 72 RetainPtr<AVAssetWriterInput> m_videoInput; 73 RetainPtr<AVAssetWriterInput> m_audioInput; 82 74 83 static void compressedVideoOutputBufferCallback(void*, CMBufferQueueTriggerToken); 84 static void compressedAudioOutputBufferCallback(void*, CMBufferQueueTriggerToken); 85 86 void startAssetWriter(); 87 void appendCompressedSampleBuffers(); 88 89 bool appendCompressedAudioSampleBuffer(); 90 bool appendCompressedVideoSampleBuffer(); 91 92 void processNewCompressedAudioSampleBuffers(); 93 void processNewCompressedVideoSampleBuffers(); 94 95 void flushCompressedSampleBuffers(CompletionHandler<void()>&&); 96 void appendEndOfVideoSampleDurationIfNeeded(CompletionHandler<void()>&&); 97 75 String m_path; 76 Lock m_videoLock; 77 Lock m_audioLock; 78 BinarySemaphore m_finishWritingSemaphore; 79 BinarySemaphore m_finishWritingAudioSemaphore; 80 BinarySemaphore m_finishWritingVideoSemaphore; 98 81 bool m_hasStartedWriting { false }; 99 82 bool m_isStopped { false }; 100 101 RetainPtr<AVAssetWriter> m_writer; 83 bool m_isFirstAudioSample { true }; 84 dispatch_queue_t m_audioPullQueue; 85 dispatch_queue_t m_videoPullQueue; 86 Deque<RetainPtr<CMSampleBufferRef>> m_videoBufferPool; 87 Deque<RetainPtr<CMSampleBufferRef>> m_audioBufferPool; 102 88 103 89 bool m_isStopping { false }; 104 90 RefPtr<SharedBuffer> m_data; 105 91 CompletionHandler<void(RefPtr<SharedBuffer>&&)> m_fetchDataCompletionHandler; 106 107 bool m_hasAudio;108 bool m_hasVideo;109 110 RetainPtr<CMFormatDescriptionRef> m_audioFormatDescription;111 std::unique_ptr<AudioSampleBufferCompressor> m_audioCompressor;112 RetainPtr<AVAssetWriterInput> m_audioAssetWriterInput;113 114 RetainPtr<CMFormatDescriptionRef> m_videoFormatDescription;115 std::unique_ptr<VideoSampleBufferCompressor> m_videoCompressor;116 RetainPtr<AVAssetWriterInput> m_videoAssetWriterInput;117 CMTime m_lastVideoPresentationTime;118 CMTime m_lastVideoDecodingTime;119 bool m_hasEncodedVideoSamples { false };120 121 RetainPtr<WebAVAssetWriterDelegate> m_writerDelegate;122 92 }; 123 93 124 94 } // namespace WebCore 125 95 126 #endif // ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)96 #endif // ENABLE(MEDIA_STREAM) -
trunk/Source/WebCore/platform/mediarecorder/cocoa/MediaRecorderPrivateWriterCocoa.mm
r262641 r262663 24 24 */ 25 25 26 #include "config.h" 27 #include "MediaRecorderPrivateWriterCocoa.h" 28 29 #if ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE) 30 31 #include "AudioSampleBufferCompressor.h" 32 #include "AudioStreamDescription.h" 33 #include "Logging.h" 34 #include "MediaStreamTrackPrivate.h" 35 #include "VideoSampleBufferCompressor.h" 36 #include "WebAudioBufferList.h" 37 #include <AVFoundation/AVAssetWriter.h> 38 #include <AVFoundation/AVAssetWriterInput.h> 39 #include <AVFoundation/AVAssetWriter_Private.h> 40 #include <pal/avfoundation/MediaTimeAVFoundation.h> 41 #include <wtf/BlockPtr.h> 42 #include <wtf/CompletionHandler.h> 43 #include <wtf/FileSystem.h> 44 #include <wtf/cf/TypeCastsCF.h> 45 46 #include <pal/cf/CoreMediaSoftLink.h> 47 #include <pal/cocoa/AVFoundationSoftLink.h> 48 49 @interface WebAVAssetWriterDelegate : NSObject <AVAssetWriterDelegate> { 50 WeakPtr<WebCore::MediaRecorderPrivateWriter> m_writer; 51 } 52 53 - (instancetype)initWithWriter:(WebCore::MediaRecorderPrivateWriter*)writer; 54 - (void)close; 55 56 @end 57 58 @implementation WebAVAssetWriterDelegate { 59 }; 60 61 - (instancetype)initWithWriter:(WebCore::MediaRecorderPrivateWriter*)writer 62 { 63 ASSERT(isMainThread()); 64 self = [super init]; 65 if (self) 66 self->m_writer = makeWeakPtr(writer); 67 68 return self; 69 } 70 71 - (void)assetWriter:(AVAssetWriter *)assetWriter didProduceFragmentedHeaderData:(NSData *)fragmentedHeaderData 72 { 73 UNUSED_PARAM(assetWriter); 74 if (!isMainThread()) { 75 if (auto size = [fragmentedHeaderData length]) { 76 callOnMainThread([protectedSelf = RetainPtr<WebAVAssetWriterDelegate>(self), buffer = WebCore::SharedBuffer::create(static_cast<const char*>([fragmentedHeaderData bytes]), size)]() mutable { 77 if (protectedSelf->m_writer) 78 protectedSelf->m_writer->appendData(WTFMove(buffer)); 79 }); 80 } 81 return; 82 } 83 84 if (m_writer) 85 m_writer->appendData(static_cast<const char*>([fragmentedHeaderData bytes]), [fragmentedHeaderData length]); 86 } 87 88 - (void)assetWriter:(AVAssetWriter *)assetWriter didProduceFragmentedMediaData:(NSData *)fragmentedMediaData fragmentedMediaDataReport:(AVFragmentedMediaDataReport *)fragmentedMediaDataReport 89 { 90 UNUSED_PARAM(assetWriter); 91 UNUSED_PARAM(fragmentedMediaDataReport); 92 if (!isMainThread()) { 93 if (auto size = [fragmentedMediaData length]) { 94 callOnMainThread([protectedSelf = RetainPtr<WebAVAssetWriterDelegate>(self), buffer = WebCore::SharedBuffer::create(static_cast<const char*>([fragmentedMediaData bytes]), size)]() mutable { 95 if (protectedSelf->m_writer) 96 protectedSelf->m_writer->appendData(WTFMove(buffer)); 97 }); 98 } 99 return; 100 } 101 102 if (m_writer) 103 m_writer->appendData(static_cast<const char*>([fragmentedMediaData bytes]), [fragmentedMediaData length]); 104 } 105 106 - (void)close 107 { 108 m_writer = nullptr; 109 } 110 111 @end 26 #import "config.h" 27 #import "MediaRecorderPrivateWriterCocoa.h" 28 29 #if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION) 30 31 #import "AudioStreamDescription.h" 32 #import "Logging.h" 33 #import "MediaStreamTrackPrivate.h" 34 #import "WebAudioBufferList.h" 35 #import <AVFoundation/AVAssetWriter.h> 36 #import <AVFoundation/AVAssetWriterInput.h> 37 #import <wtf/CompletionHandler.h> 38 #import <wtf/FileSystem.h> 39 40 #import <pal/cf/CoreMediaSoftLink.h> 41 #import <pal/cocoa/AVFoundationSoftLink.h> 42 43 #undef AVEncoderBitRateKey 44 #define AVEncoderBitRateKey getAVEncoderBitRateKeyWithFallback() 45 #undef AVFormatIDKey 46 #define AVFormatIDKey getAVFormatIDKeyWithFallback() 47 #undef AVNumberOfChannelsKey 48 #define AVNumberOfChannelsKey getAVNumberOfChannelsKeyWithFallback() 49 #undef AVSampleRateKey 50 #define AVSampleRateKey getAVSampleRateKeyWithFallback() 112 51 113 52 namespace WebCore { … … 115 54 using namespace PAL; 116 55 117 RefPtr<MediaRecorderPrivateWriter> MediaRecorderPrivateWriter::create(bool hasAudio, int width, int height) 118 { 119 auto writer = adoptRef(*new MediaRecorderPrivateWriter(hasAudio, width && height)); 120 if (!writer->initialize()) 121 return nullptr; 122 return writer; 56 static NSString *getAVFormatIDKeyWithFallback() 57 { 58 if (PAL::canLoad_AVFoundation_AVFormatIDKey()) 59 return PAL::get_AVFoundation_AVFormatIDKey(); 60 61 RELEASE_LOG_ERROR(Media, "Failed to load AVFormatIDKey"); 62 return @"AVFormatIDKey"; 63 } 64 65 static NSString *getAVNumberOfChannelsKeyWithFallback() 66 { 67 if (PAL::canLoad_AVFoundation_AVNumberOfChannelsKey()) 68 return PAL::get_AVFoundation_AVNumberOfChannelsKey(); 69 70 RELEASE_LOG_ERROR(Media, "Failed to load AVNumberOfChannelsKey"); 71 return @"AVNumberOfChannelsKey"; 72 } 73 74 static NSString *getAVSampleRateKeyWithFallback() 75 { 76 if (PAL::canLoad_AVFoundation_AVSampleRateKey()) 77 return PAL::get_AVFoundation_AVSampleRateKey(); 78 79 RELEASE_LOG_ERROR(Media, "Failed to load AVSampleRateKey"); 80 return @"AVSampleRateKey"; 81 } 82 83 static NSString *getAVEncoderBitRateKeyWithFallback() 84 { 85 if (PAL::canLoad_AVFoundation_AVEncoderBitRateKey()) 86 return PAL::get_AVFoundation_AVEncoderBitRateKey(); 87 88 RELEASE_LOG_ERROR(Media, "Failed to load AVEncoderBitRateKey"); 89 return @"AVEncoderBitRateKey"; 123 90 } 124 91 … … 134 101 } 135 102 136 void MediaRecorderPrivateWriter::compressedVideoOutputBufferCallback(void *mediaRecorderPrivateWriter, CMBufferQueueTriggerToken) 137 { 138 auto *writer = static_cast<MediaRecorderPrivateWriter*>(mediaRecorderPrivateWriter); 139 writer->processNewCompressedVideoSampleBuffers(); 140 } 141 142 void MediaRecorderPrivateWriter::compressedAudioOutputBufferCallback(void *mediaRecorderPrivateWriter, CMBufferQueueTriggerToken) 143 { 144 auto *writer = static_cast<MediaRecorderPrivateWriter*>(mediaRecorderPrivateWriter); 145 writer->processNewCompressedAudioSampleBuffers(); 146 } 147 148 MediaRecorderPrivateWriter::MediaRecorderPrivateWriter(bool hasAudio, bool hasVideo) 149 : m_hasAudio(hasAudio) 150 , m_hasVideo(hasVideo) 151 { 152 } 153 154 MediaRecorderPrivateWriter::~MediaRecorderPrivateWriter() 155 { 156 clear(); 157 } 158 159 bool MediaRecorderPrivateWriter::initialize() 160 { 103 RefPtr<MediaRecorderPrivateWriter> MediaRecorderPrivateWriter::create(bool hasAudio, int width, int height) 104 { 105 NSString *directory = FileSystem::createTemporaryDirectory(@"videos"); 106 NSString *filename = [NSString stringWithFormat:@"/%lld.mp4", CMClockGetTime(CMClockGetHostTimeClock()).value]; 107 NSString *path = [directory stringByAppendingString:filename]; 108 109 NSURL *outputURL = [NSURL fileURLWithPath:path]; 110 String filePath = [path UTF8String]; 161 111 NSError *error = nil; 162 ALLOW_DEPRECATED_DECLARATIONS_BEGIN 163 m_writer = adoptNS([PAL::allocAVAssetWriterInstance() initWithFileType:AVFileTypeMPEG4 error:&error]); 164 ALLOW_DEPRECATED_DECLARATIONS_END 112 auto avAssetWriter = adoptNS([PAL::allocAVAssetWriterInstance() initWithURL:outputURL fileType:AVFileTypeMPEG4 error:&error]); 165 113 if (error) { 166 114 RELEASE_LOG_ERROR(MediaStream, "create AVAssetWriter instance failed with error code %ld", (long)error.code); 167 return false; 168 } 169 170 m_writerDelegate = adoptNS([[WebAVAssetWriterDelegate alloc] initWithWriter: this]); 171 [m_writer.get() setDelegate:m_writerDelegate.get()]; 172 173 if (m_hasAudio) { 174 m_audioCompressor = AudioSampleBufferCompressor::create(compressedAudioOutputBufferCallback, this); 175 if (!m_audioCompressor) 176 return false; 177 } 178 if (m_hasVideo) { 179 m_videoCompressor = VideoSampleBufferCompressor::create(kCMVideoCodecType_H264, compressedVideoOutputBufferCallback, this); 180 if (!m_videoCompressor) 181 return false; 182 } 183 return true; 184 } 185 186 void MediaRecorderPrivateWriter::processNewCompressedVideoSampleBuffers() 187 { 188 ASSERT(m_hasVideo); 189 if (!m_videoFormatDescription) { 190 m_videoFormatDescription = CMSampleBufferGetFormatDescription(m_videoCompressor->getOutputSampleBuffer()); 191 callOnMainThread([weakThis = makeWeakPtr(this), this] { 192 if (!weakThis) 193 return; 194 195 if (m_hasAudio && !m_audioFormatDescription) 196 return; 197 198 startAssetWriter(); 199 }); 200 } 201 if (!m_hasStartedWriting) 202 return; 203 appendCompressedSampleBuffers(); 204 } 205 206 void MediaRecorderPrivateWriter::processNewCompressedAudioSampleBuffers() 207 { 208 ASSERT(m_hasAudio); 209 if (!m_audioFormatDescription) { 210 m_audioFormatDescription = CMSampleBufferGetFormatDescription(m_audioCompressor->getOutputSampleBuffer()); 211 callOnMainThread([weakThis = makeWeakPtr(this), this] { 212 if (!weakThis) 213 return; 214 215 if (m_hasVideo && !m_videoFormatDescription) 216 return; 217 218 startAssetWriter(); 219 }); 220 } 221 if (!m_hasStartedWriting) 222 return; 223 appendCompressedSampleBuffers(); 224 } 225 226 void MediaRecorderPrivateWriter::startAssetWriter() 227 { 228 if (m_hasVideo) { 229 m_videoAssetWriterInput = adoptNS([PAL::allocAVAssetWriterInputInstance() initWithMediaType:AVMediaTypeVideo outputSettings:nil sourceFormatHint:m_videoFormatDescription.get()]); 230 [m_videoAssetWriterInput setExpectsMediaDataInRealTime:true]; 231 if (![m_writer.get() canAddInput:m_videoAssetWriterInput.get()]) { 232 RELEASE_LOG_ERROR(MediaStream, "MediaRecorderPrivateWriter::startAssetWriter failed canAddInput for video"); 233 return; 234 } 235 [m_writer.get() addInput:m_videoAssetWriterInput.get()]; 236 } 237 238 if (m_hasAudio) { 239 m_audioAssetWriterInput = adoptNS([PAL::allocAVAssetWriterInputInstance() initWithMediaType:AVMediaTypeAudio outputSettings:nil sourceFormatHint:m_audioFormatDescription.get()]); 240 [m_audioAssetWriterInput setExpectsMediaDataInRealTime:true]; 241 if (![m_writer.get() canAddInput:m_audioAssetWriterInput.get()]) { 242 RELEASE_LOG_ERROR(MediaStream, "MediaRecorderPrivateWriter::startAssetWriter failed canAddInput for audio"); 243 return; 244 } 245 [m_writer.get() addInput:m_audioAssetWriterInput.get()]; 246 } 247 248 if (![m_writer.get() startWriting]) { 249 RELEASE_LOG_ERROR(MediaStream, "MediaRecorderPrivateWriter::startAssetWriter failed startWriting"); 250 return; 251 } 252 253 [m_writer.get() startSessionAtSourceTime:kCMTimeZero]; 254 255 appendCompressedSampleBuffers(); 256 257 m_hasStartedWriting = true; 258 } 259 260 bool MediaRecorderPrivateWriter::appendCompressedAudioSampleBuffer() 261 { 262 if (!m_audioCompressor) 263 return false; 264 265 if (![m_audioAssetWriterInput isReadyForMoreMediaData]) 266 return false; 267 268 auto buffer = m_audioCompressor->takeOutputSampleBuffer(); 269 if (!buffer) 270 return false; 271 272 [m_audioAssetWriterInput.get() appendSampleBuffer:buffer.get()]; 273 return true; 274 } 275 276 bool MediaRecorderPrivateWriter::appendCompressedVideoSampleBuffer() 277 { 278 if (!m_videoCompressor) 279 return false; 280 281 if (![m_videoAssetWriterInput isReadyForMoreMediaData]) 282 return false; 283 284 auto buffer = m_videoCompressor->takeOutputSampleBuffer(); 285 if (!buffer) 286 return false; 287 288 m_lastVideoPresentationTime = CMSampleBufferGetPresentationTimeStamp(buffer.get()); 289 m_lastVideoDecodingTime = CMSampleBufferGetDecodeTimeStamp(buffer.get()); 290 m_hasEncodedVideoSamples = true; 291 292 [m_videoAssetWriterInput.get() appendSampleBuffer:buffer.get()]; 293 return true; 294 } 295 296 void MediaRecorderPrivateWriter::appendCompressedSampleBuffers() 297 { 298 while (appendCompressedVideoSampleBuffer() || appendCompressedAudioSampleBuffer()) { }; 299 } 300 301 static inline void appendEndsPreviousSampleDurationMarker(AVAssetWriterInput *assetWriterInput, CMTime presentationTimeStamp, CMTime decodingTimeStamp) 302 { 303 CMSampleTimingInfo timingInfo = { kCMTimeInvalid, presentationTimeStamp, decodingTimeStamp}; 304 305 CMSampleBufferRef buffer = NULL; 306 auto error = CMSampleBufferCreate(kCFAllocatorDefault, NULL, true, NULL, NULL, NULL, 0, 1, &timingInfo, 0, NULL, &buffer); 307 if (error) { 308 RELEASE_LOG_ERROR(MediaStream, "MediaRecorderPrivateWriter appendEndsPreviousSampleDurationMarker failed CMSampleBufferCreate with %d", error); 309 return; 310 } 311 auto sampleBuffer = adoptCF(buffer); 312 313 CMSetAttachment(sampleBuffer.get(), kCMSampleBufferAttachmentKey_EndsPreviousSampleDuration, kCFBooleanTrue, kCMAttachmentMode_ShouldPropagate); 314 if (![assetWriterInput appendSampleBuffer:sampleBuffer.get()]) 315 RELEASE_LOG_ERROR(MediaStream, "MediaRecorderPrivateWriter appendSampleBuffer to writer input failed"); 316 } 317 318 void MediaRecorderPrivateWriter::appendEndOfVideoSampleDurationIfNeeded(CompletionHandler<void()>&& completionHandler) 319 { 320 if (!m_hasEncodedVideoSamples) { 321 completionHandler(); 322 return; 323 } 324 if ([m_videoAssetWriterInput isReadyForMoreMediaData]) { 325 appendEndsPreviousSampleDurationMarker(m_videoAssetWriterInput.get(), m_lastVideoPresentationTime, m_lastVideoDecodingTime); 326 completionHandler(); 327 return; 328 } 329 330 auto block = makeBlockPtr([this, weakThis = makeWeakPtr(this), completionHandler = WTFMove(completionHandler)]() mutable { 331 if (weakThis) { 332 appendEndsPreviousSampleDurationMarker(m_videoAssetWriterInput.get(), m_lastVideoPresentationTime, m_lastVideoDecodingTime); 333 [m_videoAssetWriterInput markAsFinished]; 334 } 335 completionHandler(); 336 }); 337 [m_videoAssetWriterInput requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:block.get()]; 338 } 339 340 void MediaRecorderPrivateWriter::flushCompressedSampleBuffers(CompletionHandler<void()>&& completionHandler) 341 { 342 appendCompressedSampleBuffers(); 343 appendEndOfVideoSampleDurationIfNeeded(WTFMove(completionHandler)); 115 return nullptr; 116 } 117 118 auto writer = adoptRef(*new MediaRecorderPrivateWriter(WTFMove(avAssetWriter), WTFMove(filePath))); 119 120 if (hasAudio && !writer->setAudioInput()) 121 return nullptr; 122 123 if (width && height) { 124 if (!writer->setVideoInput(width, height)) 125 return nullptr; 126 } 127 128 return WTFMove(writer); 129 } 130 131 MediaRecorderPrivateWriter::MediaRecorderPrivateWriter(RetainPtr<AVAssetWriter>&& avAssetWriter, String&& filePath) 132 : m_writer(WTFMove(avAssetWriter)) 133 , m_path(WTFMove(filePath)) 134 { 135 } 136 137 MediaRecorderPrivateWriter::~MediaRecorderPrivateWriter() 138 { 139 clear(); 344 140 } 345 141 346 142 void MediaRecorderPrivateWriter::clear() 347 143 { 144 if (m_videoInput) { 145 m_videoInput.clear(); 146 dispatch_release(m_videoPullQueue); 147 } 148 if (m_audioInput) { 149 m_audioInput.clear(); 150 dispatch_release(m_audioPullQueue); 151 } 348 152 if (m_writer) 349 153 m_writer.clear(); … … 354 158 } 355 159 160 bool MediaRecorderPrivateWriter::setVideoInput(int width, int height) 161 { 162 ASSERT(!m_videoInput); 163 164 NSDictionary *compressionProperties = @{ 165 AVVideoAverageBitRateKey : @(width * height * 12), 166 AVVideoExpectedSourceFrameRateKey : @(30), 167 AVVideoMaxKeyFrameIntervalKey : @(120), 168 AVVideoProfileLevelKey : AVVideoProfileLevelH264MainAutoLevel 169 }; 170 171 NSDictionary *videoSettings = @{ 172 AVVideoCodecKey: AVVideoCodecH264, 173 AVVideoWidthKey: @(width), 174 AVVideoHeightKey: @(height), 175 AVVideoCompressionPropertiesKey: compressionProperties 176 }; 177 178 m_videoInput = adoptNS([PAL::allocAVAssetWriterInputInstance() initWithMediaType:AVMediaTypeVideo outputSettings:videoSettings sourceFormatHint:nil]); 179 [m_videoInput setExpectsMediaDataInRealTime:true]; 180 181 if (![m_writer canAddInput:m_videoInput.get()]) { 182 m_videoInput = nullptr; 183 RELEASE_LOG_ERROR(MediaStream, "the video input is not allowed to add to the AVAssetWriter"); 184 return false; 185 } 186 [m_writer addInput:m_videoInput.get()]; 187 m_videoPullQueue = dispatch_queue_create("WebCoreVideoRecordingPullBufferQueue", DISPATCH_QUEUE_SERIAL); 188 return true; 189 } 190 191 bool MediaRecorderPrivateWriter::setAudioInput() 192 { 193 ASSERT(!m_audioInput); 194 195 NSDictionary *audioSettings = @{ 196 AVEncoderBitRateKey : @(28000), 197 AVFormatIDKey : @(kAudioFormatMPEG4AAC), 198 AVNumberOfChannelsKey : @(1), 199 AVSampleRateKey : @(22050) 200 }; 201 202 m_audioInput = adoptNS([PAL::allocAVAssetWriterInputInstance() initWithMediaType:AVMediaTypeAudio outputSettings:audioSettings sourceFormatHint:nil]); 203 [m_audioInput setExpectsMediaDataInRealTime:true]; 204 205 if (![m_writer canAddInput:m_audioInput.get()]) { 206 m_audioInput = nullptr; 207 RELEASE_LOG_ERROR(MediaStream, "the audio input is not allowed to add to the AVAssetWriter"); 208 return false; 209 } 210 [m_writer addInput:m_audioInput.get()]; 211 m_audioPullQueue = dispatch_queue_create("WebCoreAudioRecordingPullBufferQueue", DISPATCH_QUEUE_SERIAL); 212 return true; 213 } 356 214 357 215 static inline RetainPtr<CMSampleBufferRef> copySampleBufferWithCurrentTimeStamp(CMSampleBufferRef originalBuffer) … … 360 218 CMItemCount count = 0; 361 219 CMSampleBufferGetSampleTimingInfoArray(originalBuffer, 0, nil, &count); 362 220 363 221 Vector<CMSampleTimingInfo> timeInfo(count); 364 222 CMSampleBufferGetSampleTimingInfoArray(originalBuffer, count, timeInfo.data(), &count); 365 366 for ( autoi = 0; i < count; i++) {223 224 for (CMItemCount i = 0; i < count; i++) { 367 225 timeInfo[i].decodeTimeStamp = kCMTimeInvalid; 368 226 timeInfo[i].presentationTimeStamp = startTime; 369 227 } 370 228 371 229 CMSampleBufferRef newBuffer = nullptr; 372 if (auto error = CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, originalBuffer, count, timeInfo.data(), &newBuffer)) { 373 RELEASE_LOG_ERROR(MediaStream, "MediaRecorderPrivateWriter CMSampleBufferCreateCopyWithNewTiming failed with %d", error); 374 return nullptr; 375 } 230 auto error = CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, originalBuffer, count, timeInfo.data(), &newBuffer); 231 if (error) 232 return nullptr; 376 233 return adoptCF(newBuffer); 377 234 } … … 379 236 void MediaRecorderPrivateWriter::appendVideoSampleBuffer(CMSampleBufferRef sampleBuffer) 380 237 { 381 // FIXME: We should not set the timestamps if they are already set. 382 if (auto bufferWithCurrentTime = copySampleBufferWithCurrentTimeStamp(sampleBuffer)) 383 m_videoCompressor->addSampleBuffer(bufferWithCurrentTime.get()); 238 ASSERT(m_videoInput); 239 if (m_isStopped) 240 return; 241 242 if (!m_hasStartedWriting) { 243 if (![m_writer startWriting]) { 244 m_isStopped = true; 245 RELEASE_LOG_ERROR(MediaStream, "create AVAssetWriter instance failed with error code %ld", (long)[m_writer error]); 246 return; 247 } 248 [m_writer startSessionAtSourceTime:CMClockGetTime(CMClockGetHostTimeClock())]; 249 m_hasStartedWriting = true; 250 RefPtr<MediaRecorderPrivateWriter> protectedThis = this; 251 [m_videoInput requestMediaDataWhenReadyOnQueue:m_videoPullQueue usingBlock:[this, protectedThis = WTFMove(protectedThis)] { 252 do { 253 if (![m_videoInput isReadyForMoreMediaData]) 254 break; 255 auto locker = holdLock(m_videoLock); 256 if (m_videoBufferPool.isEmpty()) 257 break; 258 auto buffer = m_videoBufferPool.takeFirst(); 259 locker.unlockEarly(); 260 if (![m_videoInput appendSampleBuffer:buffer.get()]) 261 break; 262 } while (true); 263 if (m_isStopped && m_videoBufferPool.isEmpty()) { 264 [m_videoInput markAsFinished]; 265 m_finishWritingVideoSemaphore.signal(); 266 } 267 }]; 268 return; 269 } 270 auto bufferWithCurrentTime = copySampleBufferWithCurrentTimeStamp(sampleBuffer); 271 if (!bufferWithCurrentTime) 272 return; 273 274 auto locker = holdLock(m_videoLock); 275 m_videoBufferPool.append(WTFMove(bufferWithCurrentTime)); 384 276 } 385 277 … … 389 281 CMFormatDescriptionRef format = nullptr; 390 282 auto error = CMAudioFormatDescriptionCreate(kCFAllocatorDefault, basicDescription, 0, NULL, 0, NULL, NULL, &format); 391 if (error) { 392 RELEASE_LOG_ERROR(MediaStream, "MediaRecorderPrivateWriter CMAudioFormatDescriptionCreate failed with %d", error); 393 return nullptr; 394 } 283 if (error) 284 return nullptr; 395 285 return adoptCF(format); 396 286 } 397 287 398 static inline RetainPtr<CMSampleBufferRef> createAudioSampleBuffer(const PlatformAudioData& data, const AudioStreamDescription& description, const WTF::MediaTime& time, size_t sampleCount) 399 { 288 static inline RetainPtr<CMSampleBufferRef> createAudioSampleBufferWithPacketDescriptions(CMFormatDescriptionRef format, size_t sampleCount) 289 { 290 CMTime startTime = CMClockGetTime(CMClockGetHostTimeClock()); 291 CMSampleBufferRef sampleBuffer = nullptr; 292 auto error = CMAudioSampleBufferCreateWithPacketDescriptions(kCFAllocatorDefault, NULL, false, NULL, NULL, format, sampleCount, startTime, NULL, &sampleBuffer); 293 if (error) 294 return nullptr; 295 return adoptCF(sampleBuffer); 296 } 297 298 void MediaRecorderPrivateWriter::appendAudioSampleBuffer(const PlatformAudioData& data, const AudioStreamDescription& description, const WTF::MediaTime&, size_t sampleCount) 299 { 300 ASSERT(m_audioInput); 301 if ((!m_hasStartedWriting && m_videoInput) || m_isStopped) 302 return; 400 303 auto format = createAudioFormatDescription(description); 401 304 if (!format) 402 return nullptr; 403 404 CMSampleBufferRef sampleBuffer = nullptr; 405 auto error = CMAudioSampleBufferCreateWithPacketDescriptions(kCFAllocatorDefault, NULL, false, NULL, NULL, format.get(), sampleCount, toCMTime(time), NULL, &sampleBuffer); 406 if (error) { 407 RELEASE_LOG_ERROR(MediaStream, "MediaRecorderPrivateWriter createAudioSampleBufferWithPacketDescriptions failed with %d", error); 408 return nullptr; 409 } 410 auto buffer = adoptCF(sampleBuffer); 411 412 error = CMSampleBufferSetDataBufferFromAudioBufferList(buffer.get(), kCFAllocatorDefault, kCFAllocatorDefault, 0, downcast<WebAudioBufferList>(data).list()); 413 if (error) { 414 RELEASE_LOG_ERROR(MediaStream, "MediaRecorderPrivateWriter CMSampleBufferSetDataBufferFromAudioBufferList failed with %d", error); 415 return nullptr; 416 } 417 return buffer; 418 } 419 420 void MediaRecorderPrivateWriter::appendAudioSampleBuffer(const PlatformAudioData& data, const AudioStreamDescription& description, const WTF::MediaTime& time, size_t sampleCount) 421 { 422 if (auto sampleBuffer = createAudioSampleBuffer(data, description, time, sampleCount)) 423 m_audioCompressor->addSampleBuffer(sampleBuffer.get()); 305 return; 306 if (m_isFirstAudioSample) { 307 if (!m_videoInput) { 308 // audio-only recording. 309 if (![m_writer startWriting]) { 310 m_isStopped = true; 311 return; 312 } 313 [m_writer startSessionAtSourceTime:CMClockGetTime(CMClockGetHostTimeClock())]; 314 m_hasStartedWriting = true; 315 } 316 m_isFirstAudioSample = false; 317 RefPtr<MediaRecorderPrivateWriter> protectedThis = this; 318 [m_audioInput requestMediaDataWhenReadyOnQueue:m_audioPullQueue usingBlock:[this, protectedThis = WTFMove(protectedThis)] { 319 do { 320 if (![m_audioInput isReadyForMoreMediaData]) 321 break; 322 auto locker = holdLock(m_audioLock); 323 if (m_audioBufferPool.isEmpty()) 324 break; 325 auto buffer = m_audioBufferPool.takeFirst(); 326 locker.unlockEarly(); 327 [m_audioInput appendSampleBuffer:buffer.get()]; 328 } while (true); 329 if (m_isStopped && m_audioBufferPool.isEmpty()) { 330 [m_audioInput markAsFinished]; 331 m_finishWritingAudioSemaphore.signal(); 332 } 333 }]; 334 } 335 336 auto sampleBuffer = createAudioSampleBufferWithPacketDescriptions(format.get(), sampleCount); 337 if (!sampleBuffer) 338 return; 339 auto error = CMSampleBufferSetDataBufferFromAudioBufferList(sampleBuffer.get(), kCFAllocatorDefault, kCFAllocatorDefault, 0, downcast<WebAudioBufferList>(data).list()); 340 if (error) 341 return; 342 343 auto locker = holdLock(m_audioLock); 344 m_audioBufferPool.append(WTFMove(sampleBuffer)); 424 345 } 425 346 … … 430 351 431 352 m_isStopped = true; 432 433 if (m_videoCompressor)434 m_videoCompressor->finish();435 if (m_audioCompressor)436 m_audioCompressor->finish();437 438 353 if (!m_hasStartedWriting) 439 354 return; 440 355 ASSERT([m_writer status] == AVAssetWriterStatusWriting); 356 if (m_videoInput) 357 m_finishWritingVideoSemaphore.wait(); 358 359 if (m_audioInput) 360 m_finishWritingAudioSemaphore.wait(); 441 361 442 362 m_isStopping = true; 443 444 flushCompressedSampleBuffers([this, weakThis = makeWeakPtr(this)]() mutable { 445 if (!weakThis) 446 return; 447 448 ALLOW_DEPRECATED_DECLARATIONS_BEGIN 449 [m_writer flush]; 450 ALLOW_DEPRECATED_DECLARATIONS_END 451 [m_writer finishWritingWithCompletionHandler:[this, weakThis = WTFMove(weakThis)]() mutable { 452 callOnMainThread([this, weakThis = WTFMove(weakThis)]() mutable { 453 if (!weakThis) 454 return; 455 456 m_isStopping = false; 457 if (m_fetchDataCompletionHandler) { 458 auto buffer = WTFMove(m_data); 459 m_fetchDataCompletionHandler(WTFMove(buffer)); 460 } 461 462 m_isStopped = false; 463 m_hasStartedWriting = false; 464 clear(); 465 }); 466 }]; 467 }); 363 [m_writer finishWritingWithCompletionHandler:[this, weakPtr = makeWeakPtr(*this)]() mutable { 364 callOnMainThread([this, weakPtr = WTFMove(weakPtr), buffer = SharedBuffer::createWithContentsOfFile(m_path)]() mutable { 365 if (!weakPtr) 366 return; 367 368 m_isStopping = false; 369 if (m_fetchDataCompletionHandler) 370 m_fetchDataCompletionHandler(WTFMove(buffer)); 371 else 372 m_data = WTFMove(buffer); 373 374 m_isStopped = false; 375 m_hasStartedWriting = false; 376 m_isFirstAudioSample = true; 377 clear(); 378 }); 379 m_finishWritingSemaphore.signal(); 380 }]; 381 m_finishWritingSemaphore.wait(); 468 382 } 469 383 … … 479 393 } 480 394 481 void MediaRecorderPrivateWriter::appendData(const char* data, size_t size)482 {483 if (!m_data) {484 m_data = SharedBuffer::create(data, size);485 return;486 }487 m_data->append(data, size);488 }489 490 void MediaRecorderPrivateWriter::appendData(Ref<SharedBuffer>&& buffer)491 {492 if (!m_data) {493 m_data = WTFMove(buffer);494 return;495 }496 m_data->append(WTFMove(buffer));497 }498 499 395 } // namespace WebCore 500 396 501 #endif // ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)397 #endif // ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION) -
trunk/Source/WebKit/ChangeLog
r262659 r262663 1 2020-06-05 Ryan Haddad <ryanhaddad@apple.com> 2 3 Unreviewed, reverting r262619, r262625, and r262641. 4 5 Caused mediarecorder layout test crashes. 6 7 Reverted changesets: 8 9 "[Cocoa] Use AVAssetWriterDelegate to implement MediaRecorder" 10 https://bugs.webkit.org/show_bug.cgi?id=206582 11 https://trac.webkit.org/changeset/262619 12 13 "[Cocoa] Use AVAssetWriterDelegate to implement MediaRecorder" 14 https://bugs.webkit.org/show_bug.cgi?id=206582 15 https://trac.webkit.org/changeset/262625 16 17 "Unreviewed, silence deprecation warning to fix build with 18 latest SDK." 19 https://trac.webkit.org/changeset/262641 20 1 21 2020-06-05 Kate Cheney <katherine_cheney@apple.com> 2 22 -
trunk/Source/WebKit/GPUProcess/GPUConnectionToWebProcess.cpp
r262619 r262663 229 229 } 230 230 231 #if HAVE(AVASSETWRITERDELEGATE)232 231 RemoteMediaRecorderManager& GPUConnectionToWebProcess::mediaRecorderManager() 233 232 { … … 237 236 return *m_remoteMediaRecorderManager; 238 237 } 239 #endif240 238 241 239 #if ENABLE(VIDEO_TRACK) … … 256 254 } 257 255 #endif 258 #endif // PLATFORM(COCOA) && ENABLE(MEDIA_STREAM)256 #endif 259 257 260 258 #if PLATFORM(COCOA) && USE(LIBWEBRTC) … … 374 372 return true; 375 373 } 376 #if HAVE(AVASSETWRITERDELEGATE)377 374 if (decoder.messageReceiverName() == Messages::RemoteMediaRecorderManager::messageReceiverName()) { 378 375 mediaRecorderManager().didReceiveMessageFromWebProcess(connection, decoder); … … 383 380 return true; 384 381 } 385 #endif // HAVE(AVASSETWRITERDELEGATE)386 382 #if ENABLE(VIDEO_TRACK) 387 383 if (decoder.messageReceiverName() == Messages::RemoteAudioMediaStreamTrackRendererManager::messageReceiverName()) { … … 401 397 return true; 402 398 } 403 #endif // PLATFORM(COCOA) && ENABLE(VIDEO_TRACK)404 #endif // ENABLE(MEDIA_STREAM)399 #endif 400 #endif 405 401 #if PLATFORM(COCOA) && USE(LIBWEBRTC) 406 402 if (decoder.messageReceiverName() == Messages::LibWebRTCCodecsProxy::messageReceiverName()) { -
trunk/Source/WebKit/GPUProcess/GPUConnectionToWebProcess.h
r262619 r262663 117 117 #if PLATFORM(COCOA) && ENABLE(MEDIA_STREAM) 118 118 UserMediaCaptureManagerProxy& userMediaCaptureManagerProxy(); 119 #if HAVE(AVASSETWRITERDELEGATE)120 119 RemoteMediaRecorderManager& mediaRecorderManager(); 121 #endif122 120 #if ENABLE(VIDEO_TRACK) 123 121 RemoteAudioMediaStreamTrackRendererManager& audioTrackRendererManager(); … … 171 169 #if PLATFORM(COCOA) && ENABLE(MEDIA_STREAM) 172 170 std::unique_ptr<UserMediaCaptureManagerProxy> m_userMediaCaptureManagerProxy; 173 #if HAVE(AVASSETWRITERDELEGATE)174 171 std::unique_ptr<RemoteMediaRecorderManager> m_remoteMediaRecorderManager; 175 #endif176 172 #if ENABLE(VIDEO_TRACK) 177 173 std::unique_ptr<RemoteAudioMediaStreamTrackRendererManager> m_audioTrackRendererManager; -
trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.cpp
r262619 r262663 27 27 #include "RemoteMediaRecorder.h" 28 28 29 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)29 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) 30 30 31 31 #include "SharedRingBufferStorage.h" … … 136 136 } 137 137 138 #endif // PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)138 #endif -
trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.h
r262619 r262663 26 26 #pragma once 27 27 28 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)28 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) 29 29 30 30 #include "MediaRecorderIdentifier.h" … … 83 83 } 84 84 85 #endif // PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)85 #endif -
trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorder.messages.in
r262619 r262663 22 22 # THE POSSIBILITY OF SUCH DAMAGE. 23 23 24 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)24 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) 25 25 26 26 messages -> RemoteMediaRecorder NotRefCounted { -
trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorderManager.cpp
r262619 r262663 27 27 #include "RemoteMediaRecorderManager.h" 28 28 29 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)29 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) 30 30 31 31 #include "DataReference.h" -
trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorderManager.h
r262619 r262663 28 28 #pragma once 29 29 30 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)30 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) 31 31 32 32 #include "MediaRecorderIdentifier.h" -
trunk/Source/WebKit/GPUProcess/webrtc/RemoteMediaRecorderManager.messages.in
r262619 r262663 22 22 # THE POSSIBILITY OF SUCH DAMAGE. 23 23 24 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)24 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) 25 25 26 26 messages -> RemoteMediaRecorderManager NotRefCounted { -
trunk/Source/WebKit/GPUProcess/webrtc/RemoteSampleBufferDisplayLayerManager.h
r262619 r262663 31 31 #include "RemoteSampleBufferDisplayLayerManagerMessagesReplies.h" 32 32 #include "SampleBufferDisplayLayerIdentifier.h" 33 #include <WebCore/IntSize.h>34 33 #include <wtf/HashMap.h> 35 34 -
trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.cpp
r262619 r262663 27 27 #include "MediaRecorderPrivate.h" 28 28 29 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)29 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) 30 30 31 31 #include "DataReference.h" … … 136 136 } 137 137 138 #endif // PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)138 #endif // PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) -
trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderPrivate.h
r262619 r262663 26 26 #pragma once 27 27 28 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)28 #if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) 29 29 30 30 #include "MediaRecorderIdentifier.h" … … 77 77 } 78 78 79 #endif // PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)79 #endif // PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) 80 80 -
trunk/Source/WebKit/WebProcess/GPU/webrtc/MediaRecorderProvider.cpp
r262619 r262663 37 37 std::unique_ptr<WebCore::MediaRecorderPrivate> MediaRecorderProvider::createMediaRecorderPrivate(MediaStreamPrivate& stream) 38 38 { 39 #if ENABLE(GPU_PROCESS) && HAVE(AVASSETWRITERDELEGATE)39 #if ENABLE(GPU_PROCESS) 40 40 if (m_useGPUProcess) 41 41 return makeUnique<MediaRecorderPrivate>(stream);
Note: See TracChangeset
for help on using the changeset viewer.