Changeset 292563 in webkit
- Timestamp:
- Apr 7, 2022 1:58:35 PM (3 months ago)
- Location:
- trunk/Source/WebCore
- Files:
-
- 4 edited
-
ChangeLog (modified) (1 diff)
-
platform/audio/PlatformMediaSession.cpp (modified) (1 diff)
-
platform/audio/PlatformMediaSession.h (modified) (1 diff)
-
platform/audio/cocoa/MediaSessionManagerCocoa.mm (modified) (4 diffs)
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/WebCore/ChangeLog
r292561 r292563 1 2022-04-07 Youenn Fablet <youenn@apple.com> 2 3 (Safari 15 - iOS15): Increased audio latency on streaming via webrtc 4 https://bugs.webkit.org/show_bug.cgi?id=236363 5 <rdar://problem/88969850> 6 7 Reviewed by Eric Carlson. 8 9 On macOS 12.3, the default preferred buffer size is roughly 100 ms. 10 This is ok for regular audio playback but is not desirable when playing realtime audio. 11 To reduce the perceived latency, we now reduce the preferred buffer size to 20ms 12 whenever playing an audio MediaStreamTrack, similarly to when capturing audio. 13 14 Manually tested. 15 16 * platform/audio/PlatformMediaSession.cpp: 17 * platform/audio/PlatformMediaSession.h: 18 * platform/audio/cocoa/MediaSessionManagerCocoa.mm: 19 1 20 2022-04-07 Antoine Quint <graouts@webkit.org> 2 21 -
trunk/Source/WebCore/platform/audio/PlatformMediaSession.cpp
r290575 r292563 366 366 } 367 367 368 bool PlatformMediaSession::hasMediaStreamSource() const 369 { 370 return m_client.hasMediaStreamSource(); 371 } 372 368 373 void PlatformMediaSession::canProduceAudioChanged() 369 374 { -
trunk/Source/WebCore/platform/audio/PlatformMediaSession.h
r289942 r292563 180 180 bool activeAudioSessionRequired() const; 181 181 bool canProduceAudio() const; 182 bool hasMediaStreamSource() const; 182 183 void canProduceAudioChanged(); 183 184 -
trunk/Source/WebCore/platform/audio/cocoa/MediaSessionManagerCocoa.mm
r291863 r292563 114 114 int audioCount = 0; 115 115 int webAudioCount = 0; 116 int audioMediaStreamTrackCount = 0; 116 117 int captureCount = countActiveAudioCaptureSources(); 117 118 bool hasAudibleAudioOrVideoMediaType = false; … … 127 128 case PlatformMediaSession::MediaType::VideoAudio: 128 129 ++videoAudioCount; 130 if (session.canProduceAudio() && session.hasMediaStreamSource()) 131 ++audioMediaStreamTrackCount; 129 132 break; 130 133 case PlatformMediaSession::MediaType::Audio: 131 134 ++audioCount; 135 if (session.canProduceAudio() && session.hasMediaStreamSource()) 136 ++audioMediaStreamTrackCount; 132 137 break; 133 138 case PlatformMediaSession::MediaType::WebAudio: … … 153 158 ALWAYS_LOG(LOGIDENTIFIER, "types: " 154 159 "AudioCapture(", captureCount, "), " 160 "AudioTrack(", audioMediaStreamTrackCount, "), " 155 161 "Video(", videoCount, "), " 156 162 "Audio(", audioCount, "), " … … 161 167 if (webAudioCount) 162 168 bufferSize = AudioUtilities::renderQuantumSize; 163 else if (captureCount ) {164 // In case of audio capture , we want to grab 20 ms chunks to limit the latency so that it is not noticeable by users169 else if (captureCount || audioMediaStreamTrackCount) { 170 // In case of audio capture or audio MediaStreamTrack playing, we want to grab 20 ms chunks to limit the latency so that it is not noticeable by users 165 171 // while having a large enough buffer so that the audio rendering remains stable, hence a computation based on sample rate. 166 172 bufferSize = AudioSession::sharedSession().sampleRate() / 50;
Note: See TracChangeset
for help on using the changeset viewer.