Changeset 268565 in webkit


Ignore:
Timestamp:
Oct 15, 2020 5:34:30 PM (4 years ago)
Author:
Chris Dumez
Message:

Move AudioContext-specific logic out of BaseAudioContext
https://bugs.webkit.org/show_bug.cgi?id=217794

Reviewed by Geoffrey Garen.

Move AudioContext-specific logic out of BaseAudioContext and into the
AudioContext class. This required having WebKitAudioContext subclass
AudioContext instead of BaseAudioContext.

No new tests, no Web-facing behavior change.

  • Modules/webaudio/AudioContext.cpp:

(WebCore::AudioContext::AudioContext):
(WebCore::AudioContext::suspendRendering):
(WebCore::AudioContext::resumeRendering):
(WebCore::AudioContext::nodeWillBeginPlayback):
(WebCore::AudioContext::startRendering):
(WebCore::AudioContext::lazyInitialize):
(WebCore::AudioContext::willPausePlayback):

  • Modules/webaudio/AudioContext.h:

(WebCore::AudioContext::AudioContext):

  • Modules/webaudio/BaseAudioContext.cpp:

(WebCore::BaseAudioContext::lazyInitialize):

  • Modules/webaudio/BaseAudioContext.h:

(WebCore::BaseAudioContext::nodeWillBeginPlayback):
(WebCore::BaseAudioContext::mediaSession const):

  • Modules/webaudio/DefaultAudioDestinationNode.h:
  • Modules/webaudio/WebKitAudioContext.cpp:

(WebCore::WebKitAudioContext::WebKitAudioContext):

  • Modules/webaudio/WebKitAudioContext.h:

(WebCore::WebKitAudioContext::listener):

Location:
trunk/Source/WebCore
Files:
8 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/WebCore/ChangeLog

    r268564 r268565  
     12020-10-15  Chris Dumez  <cdumez@apple.com>
     2
     3        Move AudioContext-specific logic out of BaseAudioContext
     4        https://bugs.webkit.org/show_bug.cgi?id=217794
     5
     6        Reviewed by Geoffrey Garen.
     7
     8        Move AudioContext-specific logic out of BaseAudioContext and into the
     9        AudioContext class. This required having WebKitAudioContext subclass
     10        AudioContext instead of BaseAudioContext.
     11
     12        No new tests, no Web-facing behavior change.
     13
     14        * Modules/webaudio/AudioContext.cpp:
     15        (WebCore::AudioContext::AudioContext):
     16        (WebCore::AudioContext::suspendRendering):
     17        (WebCore::AudioContext::resumeRendering):
     18        (WebCore::AudioContext::nodeWillBeginPlayback):
     19        (WebCore::AudioContext::startRendering):
     20        (WebCore::AudioContext::lazyInitialize):
     21        (WebCore::AudioContext::willPausePlayback):
     22        * Modules/webaudio/AudioContext.h:
     23        (WebCore::AudioContext::AudioContext):
     24        * Modules/webaudio/BaseAudioContext.cpp:
     25        (WebCore::BaseAudioContext::lazyInitialize):
     26        * Modules/webaudio/BaseAudioContext.h:
     27        (WebCore::BaseAudioContext::nodeWillBeginPlayback):
     28        (WebCore::BaseAudioContext::mediaSession const):
     29        * Modules/webaudio/DefaultAudioDestinationNode.h:
     30        * Modules/webaudio/WebKitAudioContext.cpp:
     31        (WebCore::WebKitAudioContext::WebKitAudioContext):
     32        * Modules/webaudio/WebKitAudioContext.h:
     33        (WebCore::WebKitAudioContext::listener):
     34
    1352020-10-15  Sam Weinig  <weinig@apple.com>
    236
  • trunk/Source/WebCore/Modules/webaudio/AudioContext.cpp

    r267147 r268565  
    3131#include "AudioTimestamp.h"
    3232#include "DOMWindow.h"
    33 #include "DefaultAudioDestinationNode.h"
    3433#include "JSDOMPromiseDeferred.h"
     34#include "Page.h"
    3535#include "Performance.h"
    3636#include <wtf/IsoMallocInlines.h>
     
    100100}
    101101
     102// Only needed for WebKitOfflineAudioContext.
     103AudioContext::AudioContext(Document& document, unsigned numberOfChannels, RefPtr<AudioBuffer>&& renderTarget)
     104    : BaseAudioContext(document, numberOfChannels, WTFMove(renderTarget))
     105{
     106}
     107
    102108double AudioContext::baseLatency()
    103109{
     
    154160}
    155161
     162void AudioContext::suspendRendering(DOMPromiseDeferred<void>&& promise)
     163{
     164    if (isOfflineContext() || isStopped()) {
     165        promise.reject(InvalidStateError);
     166        return;
     167    }
     168
     169    if (state() == State::Closed || state() == State::Interrupted || !destinationNode()) {
     170        promise.reject();
     171        return;
     172    }
     173
     174    addReaction(State::Suspended, WTFMove(promise));
     175    m_wasSuspendedByScript = true;
     176
     177    if (!willPausePlayback())
     178        return;
     179
     180    lazyInitialize();
     181
     182    destinationNode()->suspend([this, protectedThis = makeRef(*this)] {
     183        setState(State::Suspended);
     184    });
     185}
     186
     187void AudioContext::resumeRendering(DOMPromiseDeferred<void>&& promise)
     188{
     189    if (isOfflineContext() || isStopped()) {
     190        promise.reject(InvalidStateError);
     191        return;
     192    }
     193
     194    if (state() == State::Closed || !destinationNode()) {
     195        promise.reject();
     196        return;
     197    }
     198
     199    addReaction(State::Running, WTFMove(promise));
     200    m_wasSuspendedByScript = false;
     201
     202    if (!willBeginPlayback())
     203        return;
     204
     205    lazyInitialize();
     206
     207    destinationNode()->resume([this, protectedThis = makeRef(*this)] {
     208        setState(State::Running);
     209    });
     210}
     211
     212void AudioContext::nodeWillBeginPlayback()
     213{
     214    // Called by scheduled AudioNodes when clients schedule their start times.
     215    // Prior to the introduction of suspend(), resume(), and stop(), starting
     216    // a scheduled AudioNode would remove the user-gesture restriction, if present,
     217    // and would thus unmute the context. Now that AudioContext stays in the
     218    // "suspended" state if a user-gesture restriction is present, starting a
     219    // schedule AudioNode should set the state to "running", but only if the
     220    // user-gesture restriction is set.
     221    if (userGestureRequiredForAudioStart())
     222        startRendering();
     223}
     224
     225void AudioContext::startRendering()
     226{
     227    ALWAYS_LOG(LOGIDENTIFIER);
     228    if (isStopped() || !willBeginPlayback() || m_wasSuspendedByScript)
     229        return;
     230
     231    makePendingActivity();
     232
     233    setState(State::Running);
     234
     235    lazyInitialize();
     236    destination()->startRendering();
     237}
     238
     239void AudioContext::lazyInitialize()
     240{
     241    if (isInitialized())
     242        return;
     243
     244    BaseAudioContext::lazyInitialize();
     245    if (isInitialized()) {
     246        if (destinationNode() && state() != State::Running) {
     247            // This starts the audio thread. The destination node's provideInput() method will now be called repeatedly to render audio.
     248            // Each time provideInput() is called, a portion of the audio stream is rendered. Let's call this time period a "render quantum".
     249            // NOTE: for now default AudioContext does not need an explicit startRendering() call from JavaScript.
     250            // We may want to consider requiring it for symmetry with OfflineAudioContext.
     251            startRendering();
     252            ++s_hardwareContextCount;
     253        }
     254    }
     255}
     256
     257bool AudioContext::willPausePlayback()
     258{
     259    auto* document = this->document();
     260    if (!document)
     261        return false;
     262
     263    if (userGestureRequiredForAudioStart()) {
     264        if (!document->processingUserGestureForMedia())
     265            return false;
     266        removeBehaviorRestriction(BaseAudioContext::RequireUserGestureForAudioStartRestriction);
     267    }
     268
     269    if (pageConsentRequiredForAudioStart()) {
     270        auto* page = document->page();
     271        if (page && !page->canStartMedia()) {
     272            document->addMediaCanStartListener(*this);
     273            return false;
     274        }
     275        removeBehaviorRestriction(BaseAudioContext::RequirePageConsentForAudioStartRestriction);
     276    }
     277
     278    return mediaSession()->clientWillPausePlayback();
     279}
     280
    156281#if ENABLE(VIDEO)
    157282
  • trunk/Source/WebCore/Modules/webaudio/AudioContext.h

    r267147 r268565  
    2828#include "AudioContextOptions.h"
    2929#include "BaseAudioContext.h"
     30#include "DefaultAudioDestinationNode.h"
    3031
    3132namespace WebCore {
    3233
    3334class DOMWindow;
    34 class DefaultAudioDestinationNode;
    3535
    3636struct AudioTimestamp;
     
    5858#endif
    5959
     60    void suspendRendering(DOMPromiseDeferred<void>&&);
     61    void resumeRendering(DOMPromiseDeferred<void>&&);
     62
     63    void nodeWillBeginPlayback() final;
     64    void lazyInitialize() final;
     65
     66    void startRendering();
     67
     68protected:
     69    explicit AudioContext(Document&, const AudioContextOptions& = { });
     70    AudioContext(Document&, unsigned numberOfChannels, RefPtr<AudioBuffer>&& renderTarget);
     71
    6072private:
    61     AudioContext(Document&, const AudioContextOptions&);
     73    bool willPausePlayback();
     74
     75    // [[suspended by user]] flag in the specification:
     76    // https://www.w3.org/TR/webaudio/#dom-audiocontext-suspended-by-user-slot
     77    bool m_wasSuspendedByScript { false };
    6278};
    6379
  • trunk/Source/WebCore/Modules/webaudio/BaseAudioContext.cpp

    r268368 r268565  
    224224        return;
    225225
    226     if (m_destinationNode) {
     226    if (m_destinationNode)
    227227        m_destinationNode->initialize();
    228 
    229         if (!isOfflineContext() && state() != State::Running) {
    230             // This starts the audio thread. The destination node's provideInput() method will now be called repeatedly to render audio.
    231             // Each time provideInput() is called, a portion of the audio stream is rendered. Let's call this time period a "render quantum".
    232             // NOTE: for now default AudioContext does not need an explicit startRendering() call from JavaScript.
    233             // We may want to consider requiring it for symmetry with OfflineAudioContext.
    234             startRendering();
    235             ++s_hardwareContextCount;
    236         }
    237     }
    238228
    239229    m_isInitialized = true;
     
    1004994}
    1005995
    1006 void BaseAudioContext::nodeWillBeginPlayback()
    1007 {
    1008     // Called by scheduled AudioNodes when clients schedule their start times.
    1009     // Prior to the introduction of suspend(), resume(), and stop(), starting
    1010     // a scheduled AudioNode would remove the user-gesture restriction, if present,
    1011     // and would thus unmute the context. Now that AudioContext stays in the
    1012     // "suspended" state if a user-gesture restriction is present, starting a
    1013     // schedule AudioNode should set the state to "running", but only if the
    1014     // user-gesture restriction is set.
    1015     if (userGestureRequiredForAudioStart())
    1016         startRendering();
    1017 }
    1018 
    1019996static bool shouldDocumentAllowWebAudioToAutoPlay(const Document& document)
    1020997{
     
    10541031}
    10551032
    1056 bool BaseAudioContext::willPausePlayback()
    1057 {
    1058     auto* document = this->document();
    1059     if (!document)
    1060         return false;
    1061 
    1062     if (userGestureRequiredForAudioStart()) {
    1063         if (!document->processingUserGestureForMedia())
    1064             return false;
    1065         removeBehaviorRestriction(BaseAudioContext::RequireUserGestureForAudioStartRestriction);
    1066     }
    1067 
    1068     if (pageConsentRequiredForAudioStart()) {
    1069         auto* page = document->page();
    1070         if (page && !page->canStartMedia()) {
    1071             document->addMediaCanStartListener(*this);
    1072             return false;
    1073         }
    1074         removeBehaviorRestriction(BaseAudioContext::RequirePageConsentForAudioStartRestriction);
    1075     }
    1076    
    1077     return m_mediaSession->clientWillPausePlayback();
    1078 }
    1079 
    1080 void BaseAudioContext::startRendering()
    1081 {
    1082     ALWAYS_LOG(LOGIDENTIFIER);
    1083     if (m_isStopScheduled || !willBeginPlayback() || m_wasSuspendedByScript)
    1084         return;
    1085 
    1086     makePendingActivity();
    1087 
    1088     setState(State::Running);
    1089 
    1090     lazyInitialize();
    1091     destination()->startRendering();
    1092 }
    1093 
    10941033void BaseAudioContext::mediaCanStart(Document& document)
    10951034{
     
    11231062}
    11241063
     1064// FIXME: Move to OfflineAudioContext once WebKitOfflineAudioContext gets removed.
    11251065void BaseAudioContext::finishedRendering(bool didRendering)
    11261066{
     
    11791119}
    11801120
    1181 void BaseAudioContext::suspendRendering(DOMPromiseDeferred<void>&& promise)
    1182 {
    1183     if (isOfflineContext() || m_isStopScheduled) {
    1184         promise.reject(InvalidStateError);
    1185         return;
    1186     }
    1187 
    1188     if (m_state == State::Closed || m_state == State::Interrupted || !m_destinationNode) {
    1189         promise.reject();
    1190         return;
    1191     }
    1192 
    1193     addReaction(State::Suspended, WTFMove(promise));
    1194     m_wasSuspendedByScript = true;
    1195 
    1196     if (!willPausePlayback())
    1197         return;
    1198 
    1199     lazyInitialize();
    1200 
    1201     m_destinationNode->suspend([this, protectedThis = makeRef(*this)] {
    1202         setState(State::Suspended);
    1203     });
    1204 }
    1205 
    12061121void BaseAudioContext::didSuspendRendering(size_t)
    12071122{
    12081123    setState(State::Suspended);
    1209 }
    1210 
    1211 void BaseAudioContext::resumeRendering(DOMPromiseDeferred<void>&& promise)
    1212 {
    1213     if (isOfflineContext() || m_isStopScheduled) {
    1214         promise.reject(InvalidStateError);
    1215         return;
    1216     }
    1217 
    1218     if (m_state == State::Closed || !m_destinationNode) {
    1219         promise.reject();
    1220         return;
    1221     }
    1222 
    1223     addReaction(State::Running, WTFMove(promise));
    1224     m_wasSuspendedByScript = false;
    1225 
    1226     if (!willBeginPlayback())
    1227         return;
    1228 
    1229     lazyInitialize();
    1230 
    1231     m_destinationNode->resume([this, protectedThis = makeRef(*this)] {
    1232         setState(State::Running);
    1233     });
    12341124}
    12351125
  • trunk/Source/WebCore/Modules/webaudio/BaseAudioContext.h

    r268103 r268565  
    145145    AudioListener& listener();
    146146
    147     void suspendRendering(DOMPromiseDeferred<void>&&);
    148     void resumeRendering(DOMPromiseDeferred<void>&&);
    149 
    150147    virtual void didSuspendRendering(size_t frame);
    151148
     
    259256    void derefEventTarget() override { deref(); }
    260257
    261     void startRendering();
    262258    void finishedRendering(bool didRendering);
    263259
     
    277273    void isPlayingAudioDidChange();
    278274
    279     void nodeWillBeginPlayback();
     275    virtual void nodeWillBeginPlayback() { }
    280276
    281277#if !RELEASE_LOG_DISABLED
     
    318314    void derefNode(AudioNode&);
    319315
    320     void lazyInitialize();
     316    virtual void lazyInitialize();
    321317
    322318    static bool isSupportedSampleRate(float sampleRate);
     
    351347    virtual void didFinishOfflineRendering(ExceptionOr<Ref<AudioBuffer>>&&) { }
    352348
     349    bool userGestureRequiredForAudioStart() const { return !isOfflineContext() && m_restrictions & RequireUserGestureForAudioStartRestriction; }
     350    bool pageConsentRequiredForAudioStart() const { return !isOfflineContext() && m_restrictions & RequirePageConsentForAudioStartRestriction; }
     351
     352    PlatformMediaSession* mediaSession() const { return m_mediaSession.get(); }
    353353private:
    354354    void constructCommon();
    355 
    356     bool willPausePlayback();
    357 
    358     bool userGestureRequiredForAudioStart() const { return !isOfflineContext() && m_restrictions & RequireUserGestureForAudioStartRestriction; }
    359     bool pageConsentRequiredForAudioStart() const { return !isOfflineContext() && m_restrictions & RequirePageConsentForAudioStartRestriction; }
    360355
    361356    void clear();
     
    477472    HashMap<String, Vector<AudioParamDescriptor>> m_parameterDescriptorMap;
    478473
    479     // [[suspended by user]] flag in the specification:
    480     // https://www.w3.org/TR/webaudio/#dom-audiocontext-suspended-by-user-slot
    481     bool m_wasSuspendedByScript { false };
    482 
    483474    // These are cached per audio context for performance reasons. They cannot be
    484475    // static because they rely on the sample rate.
  • trunk/Source/WebCore/Modules/webaudio/DefaultAudioDestinationNode.h

    r267859 r268565  
    4444    float sampleRate() const final { return m_sampleRate; }
    4545   
     46    ExceptionOr<void> startRendering() final;
     47
    4648private:
    4749    explicit DefaultAudioDestinationNode(BaseAudioContext&, Optional<float>);
     
    5759
    5860    void enableInput(const String& inputDeviceId) final;
    59     ExceptionOr<void> startRendering() final;
    6061    void resume(Function<void ()>&&) final;
    6162    void suspend(Function<void ()>&&) final;
  • trunk/Source/WebCore/Modules/webaudio/WebKitAudioContext.cpp

    r267544 r268565  
    8080// Constructor for rendering to the audio hardware.
    8181WebKitAudioContext::WebKitAudioContext(Document& document)
    82     : BaseAudioContext(document)
     82    : AudioContext(document)
    8383{
    8484}
     
    8686// Constructor for offline (non-realtime) rendering.
    8787WebKitAudioContext::WebKitAudioContext(Document& document, Ref<AudioBuffer>&& renderTarget)
    88     : BaseAudioContext(document, renderTarget->numberOfChannels(), WTFMove(renderTarget))
     88    : AudioContext(document, renderTarget->numberOfChannels(), WTFMove(renderTarget))
    8989{
    9090}
  • trunk/Source/WebCore/Modules/webaudio/WebKitAudioContext.h

    r267147 r268565  
    2626#pragma once
    2727
    28 #include "BaseAudioContext.h"
     28#include "AudioContext.h"
    2929#include "WebKitAudioListener.h"
    3030
     
    4747// For thread safety between the audio thread and the main thread, it has a rendering graph locking mechanism.
    4848
    49 class WebKitAudioContext
    50     : public BaseAudioContext
     49class WebKitAudioContext : public AudioContext
    5150{
    5251    WTF_MAKE_ISO_ALLOCATED(WebKitAudioContext);
     
    5756    void close(DOMPromiseDeferred<void>&&);
    5857
    59     WebKitAudioListener& listener() { return downcast<WebKitAudioListener>(BaseAudioContext::listener()); }
     58    WebKitAudioListener& listener() { return downcast<WebKitAudioListener>(AudioContext::listener()); }
    6059
    6160    // The AudioNode create methods are called on the main thread (from JavaScript).
Note: See TracChangeset for help on using the changeset viewer.