Changeset 277256 in webkit
- Timestamp:
- May 10, 2021, 12:10:31 AM (4 years ago)
- Location:
- trunk/Source/WebKit
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/WebKit/ChangeLog
r277250 r277256 1 2021-05-10 Youenn Fablet <youenn@apple.com> 2 3 Use IPC::Semaphore instead of sending an IPC message for every captured audio sample 4 https://bugs.webkit.org/show_bug.cgi?id=225452 5 6 Reviewed by Eric Carlson. 7 8 Previously, we were sending an IPC message from UIProcess or GPUProcess to WebProcess for every microphone audio sample chunk. 9 We are now using IPC::Semaphore to signal that a new chunk is to be processed. 10 11 We no longer send the chunk timestamp. Instead, we reconstruct it from the number of previously processed samples. 12 At audio storage change, we send the start time and we assume that there is continuous timing based on sample counts after that. 13 That is why we recreate a new audio storage change anytime we need to reset or the configuration changes, which should not happen often in practice. 14 15 We process fixed-size chunks on WebProcess side and signal it on GPUProcess/UIProcess side. 16 This size is sent through IPC at audio storage change time and is the max of 128 samples (WebAudio quantum) and AudioSession preferred size. 17 In case WebAudio is used, it should be 128 samples. In case WebAudio is not used, it should be 20 ms of audio data. 18 19 Covered by existing tests and manually tested. 20 21 * UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp: 22 (WebKit::UserMediaCaptureManagerProxy::SourceProxy::start): 23 (WebKit::UserMediaCaptureManagerProxy::SourceProxy::storageChanged): 24 * WebProcess/cocoa/RemoteCaptureSampleManager.cpp: 25 (WebKit::RemoteCaptureSampleManager::audioStorageChanged): 26 (WebKit::RemoteCaptureSampleManager::RemoteAudio::RemoteAudio): 27 (WebKit::RemoteCaptureSampleManager::RemoteAudio::~RemoteAudio): 28 (WebKit::RemoteCaptureSampleManager::RemoteAudio::stopThread): 29 (WebKit::RemoteCaptureSampleManager::RemoteAudio::startThread): 30 (WebKit::RemoteCaptureSampleManager::RemoteAudio::setStorage): 31 * WebProcess/cocoa/RemoteCaptureSampleManager.h: 32 * WebProcess/cocoa/RemoteCaptureSampleManager.messages.in: 33 1 34 2021-05-09 Ryosuke Niwa <rniwa@webkit.org> 2 35 -
trunk/Source/WebKit/UIProcess/Cocoa/UserMediaCaptureManagerProxy.cpp
r276633 r277256 37 37 #include "WebProcessProxy.h" 38 38 #include <WebCore/AudioSession.h> 39 #include <WebCore/AudioUtilities.h> 39 40 #include <WebCore/CARingBuffer.h> 40 41 #include <WebCore/ImageRotationSessionVT.h> … … 61 62 , m_connection(WTFMove(connection)) 62 63 , m_source(WTFMove(source)) 63 , m_ringBuffer(makeUniqueRef<SharedRingBufferStorage>(std::bind(&SourceProxy::storageChanged, this, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3)))64 64 { 65 65 m_source->addObserver(*this); … … 78 78 ~SourceProxy() 79 79 { 80 storage().invalidate(); 80 if (m_ringBuffer) 81 static_cast<SharedRingBufferStorage&>(m_ringBuffer->storage()).invalidate(); 81 82 82 83 switch (m_source->type()) { … … 94 95 95 96 RealtimeMediaSource& source() { return m_source; } 96 SharedRingBufferStorage& storage() { return static_cast<SharedRingBufferStorage&>(m_ringBuffer.storage()); }97 97 CAAudioStreamDescription& description() { return m_description; } 98 98 int64_t numberOfFrames() { return m_numberOfFrames; } … … 109 109 void start() 110 110 { 111 m_shouldReset = true; 111 112 m_isEnded = false; 112 113 m_source->start(); … … 146 147 // May get called on a background thread. 147 148 void audioSamplesAvailable(const MediaTime& time, const PlatformAudioData& audioData, const AudioStreamDescription& description, size_t numberOfFrames) final { 148 DisableMallocRestrictionsForCurrentThreadScope scope; 149 150 if (m_description != description) { 149 if (m_description != description || m_shouldReset) { 150 DisableMallocRestrictionsForCurrentThreadScope scope; 151 152 m_shouldReset = false; 153 m_writeOffset = 0; 154 m_remainingFrameCount = 0; 155 m_startTime = time; 156 m_captureSemaphore = makeUnique<IPC::Semaphore>(); 151 157 ASSERT(description.platformDescription().type == PlatformDescription::CAAudioStreamBasicType); 152 158 m_description = *WTF::get<const AudioStreamBasicDescription*>(description.platformDescription().description); 153 159 160 m_frameChunkSize = std::max(WebCore::AudioUtilities::renderQuantumSize, AudioSession::sharedSession().preferredBufferSize()); 161 154 162 // Allocate a ring buffer large enough to contain 2 seconds of audio. 155 163 m_numberOfFrames = m_description.sampleRate() * 2; 156 m_ringBuffer.allocate(m_description.streamDescription(), m_numberOfFrames); 164 m_ringBuffer.reset(); 165 auto storage = makeUniqueRef<SharedRingBufferStorage>(std::bind(&SourceProxy::storageChanged, this, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3)); 166 m_ringBuffer = makeUnique<CARingBuffer>(WTFMove(storage), m_description.streamDescription(), m_numberOfFrames); 157 167 } 158 168 159 169 ASSERT(is<WebAudioBufferList>(audioData)); 160 m_ringBuffer.store(downcast<WebAudioBufferList>(audioData).list(), numberOfFrames, time.timeValue()); 161 m_connection->send(Messages::RemoteCaptureSampleManager::AudioSamplesAvailable(m_id, time, numberOfFrames), 0); 170 m_ringBuffer->store(downcast<WebAudioBufferList>(audioData).list(), numberOfFrames, m_writeOffset); 171 m_writeOffset += numberOfFrames; 172 173 size_t framesToSend = numberOfFrames + m_remainingFrameCount; 174 size_t signalCount = framesToSend / m_frameChunkSize; 175 m_remainingFrameCount = framesToSend - (signalCount * m_frameChunkSize); 176 for (unsigned i = 0; i < signalCount; ++i) 177 m_captureSemaphore->signal(); 162 178 } 163 179 … … 200 216 void storageChanged(SharedMemory* storage, const WebCore::CAAudioStreamDescription& format, size_t frameCount) 201 217 { 202 DisableMallocRestrictionsForCurrentThreadScope scope;203 218 SharedMemory::Handle handle; 204 219 if (storage) … … 211 226 uint64_t dataSize = 0; 212 227 #endif 213 m_connection->send(Messages::RemoteCaptureSampleManager::AudioStorageChanged(m_id, SharedMemory::IPCHandle { WTFMove(handle), dataSize }, format, frameCount ), 0);228 m_connection->send(Messages::RemoteCaptureSampleManager::AudioStorageChanged(m_id, SharedMemory::IPCHandle { WTFMove(handle), dataSize }, format, frameCount, *m_captureSemaphore, m_startTime, m_frameChunkSize), 0); 214 229 } 215 230 … … 224 239 Ref<IPC::Connection> m_connection; 225 240 Ref<RealtimeMediaSource> m_source; 226 CARingBufferm_ringBuffer;241 std::unique_ptr<CARingBuffer> m_ringBuffer; 227 242 CAAudioStreamDescription m_description { }; 228 243 int64_t m_numberOfFrames { 0 }; … … 230 245 std::unique_ptr<ImageRotationSessionVT> m_rotationSession; 231 246 bool m_shouldApplyRotation { false }; 247 std::unique_ptr<IPC::Semaphore> m_captureSemaphore; 248 int64_t m_writeOffset { 0 }; 249 int64_t m_remainingFrameCount { 0 }; 250 size_t m_frameChunkSize { 0 }; 251 MediaTime m_startTime; 252 bool m_shouldReset { false }; 232 253 }; 233 254 -
trunk/Source/WebKit/WebProcess/cocoa/RemoteCaptureSampleManager.cpp
r274553 r277256 126 126 } 127 127 128 void RemoteCaptureSampleManager::audioStorageChanged(WebCore::RealtimeMediaSourceIdentifier identifier, const SharedMemory::IPCHandle& ipcHandle, const WebCore::CAAudioStreamDescription& description, uint64_t numberOfFrames )128 void RemoteCaptureSampleManager::audioStorageChanged(WebCore::RealtimeMediaSourceIdentifier identifier, const SharedMemory::IPCHandle& ipcHandle, const WebCore::CAAudioStreamDescription& description, uint64_t numberOfFrames, IPC::Semaphore&& semaphore, const MediaTime& mediaTime, size_t frameChunkSize) 129 129 { 130 130 ASSERT(!WTF::isMainRunLoop()); … … 135 135 return; 136 136 } 137 iterator->value->setStorage(ipcHandle.handle, description, numberOfFrames); 138 } 139 140 void RemoteCaptureSampleManager::audioSamplesAvailable(WebCore::RealtimeMediaSourceIdentifier identifier, MediaTime time, uint64_t numberOfFrames) 141 { 142 ASSERT(!WTF::isMainRunLoop()); 143 144 auto iterator = m_audioSources.find(identifier); 145 if (iterator == m_audioSources.end()) { 146 RELEASE_LOG_ERROR(WebRTC, "Unable to find source %llu for audioSamplesAvailable", identifier.toUInt64()); 147 return; 148 } 149 iterator->value->audioSamplesAvailable(time, numberOfFrames); 137 iterator->value->setStorage(ipcHandle.handle, description, numberOfFrames, WTFMove(semaphore), mediaTime, frameChunkSize); 150 138 } 151 139 … … 164 152 RemoteCaptureSampleManager::RemoteAudio::RemoteAudio(Ref<RemoteRealtimeAudioSource>&& source) 165 153 : m_source(WTFMove(source)) 166 , m_ringBuffer(makeUnique<CARingBuffer>()) 167 { 168 } 169 170 void RemoteCaptureSampleManager::RemoteAudio::setStorage(const SharedMemory::Handle& handle, const WebCore::CAAudioStreamDescription& description, uint64_t numberOfFrames) 171 { 154 { 155 } 156 157 RemoteCaptureSampleManager::RemoteAudio::~RemoteAudio() 158 { 159 stopThread(); 160 } 161 162 void RemoteCaptureSampleManager::RemoteAudio::stopThread() 163 { 164 if (!m_thread) 165 return; 166 167 m_shouldStopThread = true; 168 m_semaphore.signal(); 169 m_thread->waitForCompletion(); 170 m_thread = nullptr; 171 } 172 173 void RemoteCaptureSampleManager::RemoteAudio::startThread() 174 { 175 ASSERT(!m_thread); 176 m_shouldStopThread = false; 177 auto threadLoop = [this]() mutable { 178 m_readOffset = 0; 179 do { 180 // If waitFor fails, the semaphore on the other side was probably destroyed and we should just exit here and wait to launch a new thread. 181 if (!m_semaphore.waitFor(Seconds::infinity())) 182 break; 183 if (m_shouldStopThread) 184 break; 185 186 auto currentTime = m_startTime + MediaTime { m_readOffset, static_cast<uint32_t>(m_description.sampleRate()) }; 187 m_ringBuffer->fetch(m_buffer->list(), m_frameChunkSize, m_readOffset); 188 m_readOffset += m_frameChunkSize; 189 190 m_source->remoteAudioSamplesAvailable(currentTime, *m_buffer, m_description, m_frameChunkSize); 191 } while (!m_shouldStopThread); 192 }; 193 m_thread = Thread::create("RemoteAudioSourceProviderManager::RemoteAudio thread", WTFMove(threadLoop), ThreadType::Audio, Thread::QOS::UserInteractive); 194 } 195 196 void RemoteCaptureSampleManager::RemoteAudio::setStorage(const SharedMemory::Handle& handle, const WebCore::CAAudioStreamDescription& description, uint64_t numberOfFrames, IPC::Semaphore&& semaphore, const MediaTime& mediaTime, size_t frameChunkSize) 197 { 198 stopThread(); 199 200 if (!numberOfFrames) { 201 m_ringBuffer = nullptr; 202 m_buffer = nullptr; 203 return; 204 } 205 206 m_semaphore = WTFMove(semaphore); 172 207 m_description = description; 208 m_startTime = mediaTime; 209 m_frameChunkSize = frameChunkSize; 210 173 211 m_ringBuffer = makeUnique<CARingBuffer>(makeUniqueRef<ReadOnlySharedRingBufferStorage>(handle), description, numberOfFrames); 174 212 m_buffer = makeUnique<WebAudioBufferList>(description, numberOfFrames); 175 } 176 177 void RemoteCaptureSampleManager::RemoteAudio::audioSamplesAvailable(MediaTime time, uint64_t numberOfFrames) 178 { 179 if (!m_buffer) { 180 RELEASE_LOG_ERROR(WebRTC, "buffer for audio source %llu is null", m_source->identifier().toUInt64()); 181 return; 182 } 183 184 if (!WebAudioBufferList::isSupportedDescription(m_description, numberOfFrames)) { 185 RELEASE_LOG_ERROR(WebRTC, "Unable to support description with given number of frames for audio source %llu", m_source->identifier().toUInt64()); 186 return; 187 } 188 189 m_buffer->setSampleCount(numberOfFrames); 190 191 m_ringBuffer->fetch(m_buffer->list(), numberOfFrames, time.timeValue()); 192 193 m_source->remoteAudioSamplesAvailable(time, *m_buffer, m_description, numberOfFrames); 213 m_buffer->setSampleCount(m_frameChunkSize); 214 215 startThread(); 194 216 } 195 217 -
trunk/Source/WebKit/WebProcess/cocoa/RemoteCaptureSampleManager.h
r273074 r277256 29 29 30 30 #include "Connection.h" 31 #include "IPCSemaphore.h" 31 32 #include "MessageReceiver.h" 32 33 #include "RemoteRealtimeAudioSource.h" … … 66 67 67 68 // Messages 68 void audioStorageChanged(WebCore::RealtimeMediaSourceIdentifier, const SharedMemory::IPCHandle&, const WebCore::CAAudioStreamDescription&, uint64_t numberOfFrames );69 void audioStorageChanged(WebCore::RealtimeMediaSourceIdentifier, const SharedMemory::IPCHandle&, const WebCore::CAAudioStreamDescription&, uint64_t numberOfFrames, IPC::Semaphore&&, const MediaTime&, size_t frameSampleSize); 69 70 void audioSamplesAvailable(WebCore::RealtimeMediaSourceIdentifier, MediaTime, uint64_t numberOfFrames); 70 71 void videoSampleAvailable(WebCore::RealtimeMediaSourceIdentifier, WebCore::RemoteVideoSample&&); … … 76 77 public: 77 78 explicit RemoteAudio(Ref<RemoteRealtimeAudioSource>&&); 79 ~RemoteAudio(); 78 80 79 void setStorage(const SharedMemory::Handle&, const WebCore::CAAudioStreamDescription&, uint64_t numberOfFrames); 80 void audioSamplesAvailable(MediaTime, uint64_t numberOfFrames); 81 void setStorage(const SharedMemory::Handle&, const WebCore::CAAudioStreamDescription&, uint64_t numberOfFrames, IPC::Semaphore&&, const MediaTime&, size_t frameChunkSize); 81 82 82 83 private: 84 void stopThread(); 85 void startThread(); 86 83 87 Ref<RemoteRealtimeAudioSource> m_source; 84 88 WebCore::CAAudioStreamDescription m_description; 89 std::unique_ptr<WebCore::WebAudioBufferList> m_buffer; 85 90 std::unique_ptr<WebCore::CARingBuffer> m_ringBuffer; 86 std::unique_ptr<WebCore::WebAudioBufferList> m_buffer; 91 int64_t m_readOffset { 0 }; 92 MediaTime m_startTime; 93 size_t m_frameChunkSize { 0 }; 94 95 IPC::Semaphore m_semaphore; 96 RefPtr<Thread> m_thread; 97 std::atomic<bool> m_shouldStopThread { false }; 87 98 }; 88 99 -
trunk/Source/WebKit/WebProcess/cocoa/RemoteCaptureSampleManager.messages.in
r272806 r277256 25 25 26 26 messages -> RemoteCaptureSampleManager NotRefCounted { 27 AudioStorageChanged(WebCore::RealtimeMediaSourceIdentifier id, WebKit::SharedMemory::IPCHandle storageHandle, WebCore::CAAudioStreamDescription description, uint64_t numberOfFrames) 28 AudioSamplesAvailable(WebCore::RealtimeMediaSourceIdentifier id, MediaTime time, uint64_t numberOfFrames) 27 AudioStorageChanged(WebCore::RealtimeMediaSourceIdentifier id, WebKit::SharedMemory::IPCHandle storageHandle, WebCore::CAAudioStreamDescription description, uint64_t numberOfFrames, IPC::Semaphore captureSemaphore, MediaTime mediaTime, size_t frameChunkSize); 29 28 VideoSampleAvailable(WebCore::RealtimeMediaSourceIdentifier id, WebCore::RemoteVideoSample sample) 30 29 }
Note:
See TracChangeset
for help on using the changeset viewer.