Changeset 271636 in webkit


Ignore:
Timestamp:
Jan 19, 2021 8:33:11 PM (3 years ago)
Author:
commit-queue@webkit.org
Message:

Update media state for active speech recognition as it uses audio capture
https://bugs.webkit.org/show_bug.cgi?id=220667

Patch by Sihui Liu <sihui_liu@appe.com> on 2021-01-19
Reviewed by Youenn Fablet.

Source/WebCore:

To make sure the media capture state is correctly sent to client.

API test: WebKit2.SpeechRecognitionMediaCaptureStateChange

  • Modules/speech/SpeechRecognition.cpp:

(WebCore::SpeechRecognition::startRecognition):
(WebCore::SpeechRecognition::stop):
(WebCore::SpeechRecognition::didStartCapturingAudio):
(WebCore::SpeechRecognition::didStopCapturingAudio):

  • Modules/speech/SpeechRecognition.h:
  • Modules/speech/SpeechRecognitionConnection.h:
  • dom/Document.cpp:

(WebCore::Document::setActiveSpeechRecognition):
(WebCore::Document::updateIsPlayingMedia):

  • dom/Document.h:
  • page/DummySpeechRecognitionProvider.h:

Source/WebKit:

  • WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.cpp:

(WebKit::WebSpeechRecognitionConnection::unregisterClient):

  • WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.h:

Tools:

  • TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm:

(-[SpeechRecognitionUIDelegate _webView:mediaCaptureStateDidChange:]):
(TestWebKitAPI::TEST):
(-[SpeechRecognitionPermissionUIDelegate _webView:requestSpeechRecognitionPermissionForOrigin:decisionHandler:]): Deleted.
(-[SpeechRecognitionPermissionUIDelegate _webView:requestMediaCaptureAuthorization:decisionHandler:]): Deleted.
(-[SpeechRecognitionPermissionUIDelegate _webView:checkUserMediaPermissionForURL:mainFrameURL:frameIdentifier:decisionHandler:]): Deleted.
(-[SpeechRecognitionPermissionUIDelegate webView:createWebViewWithConfiguration:forNavigationAction:windowFeatures:]): Deleted.

Location:
trunk
Files:
12 edited

Legend:

Unmodified
Added
Removed
  • trunk/Source/WebCore/ChangeLog

    r271635 r271636  
     12021-01-19  Sihui Liu  <sihui_liu@appe.com>
     2
     3        Update media state for active speech recognition as it uses audio capture
     4        https://bugs.webkit.org/show_bug.cgi?id=220667
     5
     6        Reviewed by Youenn Fablet.
     7
     8        To make sure the media capture state is correctly sent to client.
     9
     10        API test: WebKit2.SpeechRecognitionMediaCaptureStateChange
     11
     12        * Modules/speech/SpeechRecognition.cpp:
     13        (WebCore::SpeechRecognition::startRecognition):
     14        (WebCore::SpeechRecognition::stop):
     15        (WebCore::SpeechRecognition::didStartCapturingAudio):
     16        (WebCore::SpeechRecognition::didStopCapturingAudio):
     17        * Modules/speech/SpeechRecognition.h:
     18        * Modules/speech/SpeechRecognitionConnection.h:
     19        * dom/Document.cpp:
     20        (WebCore::Document::setActiveSpeechRecognition):
     21        (WebCore::Document::updateIsPlayingMedia):
     22        * dom/Document.h:
     23        * page/DummySpeechRecognitionProvider.h:
     24
    1252021-01-19  Megan Gardner  <megan_gardner@apple.com>
    226
  • trunk/Source/WebCore/Modules/speech/SpeechRecognition.cpp

    r270846 r271636  
    100100}
    101101
     102void SpeechRecognition::stop()
     103{
     104    abortRecognition();
     105    m_connection->unregisterClient(*this);
     106
     107    auto& document = downcast<Document>(*scriptExecutionContext());
     108    document.setActiveSpeechRecognition(nullptr);
     109}
     110
    102111void SpeechRecognition::didStart()
    103112{
     
    110119void SpeechRecognition::didStartCapturingAudio()
    111120{
     121    auto& document = downcast<Document>(*scriptExecutionContext());
     122    document.setActiveSpeechRecognition(this);
     123
    112124    queueTaskToDispatchEvent(*this, TaskSource::Speech, Event::create(eventNames().audiostartEvent, Event::CanBubble::No, Event::IsCancelable::No));
    113125}
     
    135147void SpeechRecognition::didStopCapturingAudio()
    136148{
     149    auto& document = downcast<Document>(*scriptExecutionContext());
     150    document.setActiveSpeechRecognition(nullptr);
     151
    137152    queueTaskToDispatchEvent(*this, TaskSource::Speech, Event::create(eventNames().audioendEvent, Event::CanBubble::No, Event::IsCancelable::No));
    138153}
  • trunk/Source/WebCore/Modules/speech/SpeechRecognition.h

    r269348 r271636  
    8686
    8787    // ActiveDOMObject
    88     const char* activeDOMObjectName() const;
    89     void suspend(ReasonForSuspension);
     88    const char* activeDOMObjectName() const final;
     89    void suspend(ReasonForSuspension) final;
     90    void stop() final;
    9091
    9192    // EventTarget
  • trunk/Source/WebCore/Modules/speech/SpeechRecognitionConnection.h

    r269810 r271636  
    3838    virtual ~SpeechRecognitionConnection() { }
    3939    virtual void registerClient(SpeechRecognitionConnectionClient&) = 0;
     40    virtual void unregisterClient(SpeechRecognitionConnectionClient&) = 0;
    4041    virtual void start(SpeechRecognitionConnectionClientIdentifier, const String& lang, bool continuous, bool interimResults, uint64_t maxAlternatives, ClientOrigin&&) = 0;
    4142    virtual void stop(SpeechRecognitionConnectionClientIdentifier) = 0;
  • trunk/Source/WebCore/dom/Document.cpp

    r271584 r271636  
    209209#include "ShadowRoot.h"
    210210#include "SocketProvider.h"
     211#include "SpeechRecognition.h"
    211212#include "StorageEvent.h"
    212213#include "StringCallback.h"
     
    42544255}
    42554256
     4257void Document::setActiveSpeechRecognition(SpeechRecognition* speechRecognition)
     4258{
     4259    if (m_activeSpeechRecognition == speechRecognition)
     4260        return;
     4261
     4262    m_activeSpeechRecognition = makeWeakPtr(speechRecognition);
     4263    updateIsPlayingMedia();
     4264}
     4265
    42564266void Document::noteUserInteractionWithMediaElement()
    42574267{
     
    42754285#if ENABLE(MEDIA_STREAM)
    42764286    state |= MediaStreamTrack::captureState(*this);
     4287    if (m_activeSpeechRecognition)
     4288        state |= MediaProducer::HasActiveAudioCaptureDevice;
    42774289#endif
    42784290
  • trunk/Source/WebCore/dom/Document.h

    r271514 r271636  
    209209class SerializedScriptValue;
    210210class Settings;
     211class SpeechRecognition;
    211212class StringCallback;
    212213class StyleSheet;
     
    13821383    WEBCORE_EXPORT void addAudioProducer(MediaProducer&);
    13831384    WEBCORE_EXPORT void removeAudioProducer(MediaProducer&);
     1385    void setActiveSpeechRecognition(SpeechRecognition*);
    13841386    MediaProducer::MediaStateFlags mediaState() const { return m_mediaState; }
    13851387    void noteUserInteractionWithMediaElement();
     
    19721974
    19731975    WeakHashSet<MediaProducer> m_audioProducers;
     1976    WeakPtr<SpeechRecognition> m_activeSpeechRecognition;
    19741977
    19751978    HashSet<ShadowRoot*> m_inDocumentShadowRoots;
  • trunk/Source/WebCore/page/DummySpeechRecognitionProvider.h

    r269810 r271636  
    4040        }
    4141        void registerClient(SpeechRecognitionConnectionClient&) final { }
     42        void unregisterClient(SpeechRecognitionConnectionClient&) final { }
    4243        void start(SpeechRecognitionConnectionClientIdentifier, const String&, bool, bool, uint64_t, ClientOrigin&&) final { }
    4344        void stop(SpeechRecognitionConnectionClientIdentifier) final { }
  • trunk/Source/WebKit/ChangeLog

    r271631 r271636  
     12021-01-19  Sihui Liu  <sihui_liu@appe.com>
     2
     3        Update media state for active speech recognition as it uses audio capture
     4        https://bugs.webkit.org/show_bug.cgi?id=220667
     5
     6        Reviewed by Youenn Fablet.
     7
     8        * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.cpp:
     9        (WebKit::WebSpeechRecognitionConnection::unregisterClient):
     10        * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.h:
     11
    1122021-01-19  Chris Dumez  <cdumez@apple.com>
    213
  • trunk/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.cpp

    r270574 r271636  
    6262{
    6363    m_clientMap.add(client.identifier(), makeWeakPtr(client));
     64}
     65
     66void WebSpeechRecognitionConnection::unregisterClient(WebCore::SpeechRecognitionConnectionClient& client)
     67{
     68    m_clientMap.remove(client.identifier());
    6469}
    6570
  • trunk/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.h

    r269810 r271636  
    5757
    5858    void registerClient(WebCore::SpeechRecognitionConnectionClient&) final;
     59    void unregisterClient(WebCore::SpeechRecognitionConnectionClient&) final;
    5960    void didReceiveUpdate(WebCore::SpeechRecognitionUpdate&&) final;
    6061    void invalidate(WebCore::SpeechRecognitionConnectionClientIdentifier);
  • trunk/Tools/ChangeLog

    r271634 r271636  
     12021-01-19  Sihui Liu  <sihui_liu@appe.com>
     2
     3        Update media state for active speech recognition as it uses audio capture
     4        https://bugs.webkit.org/show_bug.cgi?id=220667
     5
     6        Reviewed by Youenn Fablet.
     7
     8        * TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm:
     9        (-[SpeechRecognitionUIDelegate _webView:mediaCaptureStateDidChange:]):
     10        (TestWebKitAPI::TEST):
     11        (-[SpeechRecognitionPermissionUIDelegate _webView:requestSpeechRecognitionPermissionForOrigin:decisionHandler:]): Deleted.
     12        (-[SpeechRecognitionPermissionUIDelegate _webView:requestMediaCaptureAuthorization:decisionHandler:]): Deleted.
     13        (-[SpeechRecognitionPermissionUIDelegate _webView:checkUserMediaPermissionForURL:mainFrameURL:frameIdentifier:decisionHandler:]): Deleted.
     14        (-[SpeechRecognitionPermissionUIDelegate webView:createWebViewWithConfiguration:forNavigationAction:windowFeatures:]): Deleted.
     15
    1162021-01-19  Michael Catanzaro  <mcatanzaro@gnome.org>
    217
  • trunk/Tools/TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm

    r271381 r271636  
    3939static bool receivedScriptMessage;
    4040static bool didFinishNavigation;
     41static bool captureStateDidChange;
     42static bool isCapturing;
    4143static RetainPtr<WKScriptMessage> lastScriptMessage;
    4244static RetainPtr<WKWebView> createdWebView;
    4345
    44 @interface SpeechRecognitionPermissionUIDelegate : NSObject<WKUIDelegatePrivate>
     46@interface SpeechRecognitionUIDelegate : NSObject<WKUIDelegatePrivate>
    4547- (void)_webView:(WKWebView *)webView requestSpeechRecognitionPermissionForOrigin:(WKSecurityOrigin *)origin decisionHandler:(void (^)(BOOL))decisionHandler;
    4648- (void)_webView:(WKWebView *)webView requestMediaCaptureAuthorization: (_WKCaptureDevices)devices decisionHandler:(void (^)(BOOL))decisionHandler;
    4749- (void)_webView:(WKWebView *)webView checkUserMediaPermissionForURL:(NSURL *)url mainFrameURL:(NSURL *)mainFrameURL frameIdentifier:(NSUInteger)frameIdentifier decisionHandler:(void (^)(NSString *salt, BOOL authorized))decisionHandler;
    4850- (WKWebView *)webView:(WKWebView *)webView createWebViewWithConfiguration:(WKWebViewConfiguration *)configuration forNavigationAction:(WKNavigationAction *)navigationAction windowFeatures:(WKWindowFeatures *)windowFeatures;
    49 @end
    50 
    51 @implementation SpeechRecognitionPermissionUIDelegate
     51- (void)_webView:(WKWebView *)webView mediaCaptureStateDidChange:(_WKMediaCaptureState)state;
     52@end
     53
     54@implementation SpeechRecognitionUIDelegate
    5255- (void)_webView:(WKWebView *)webView requestSpeechRecognitionPermissionForOrigin:(WKSecurityOrigin *)origin decisionHandler:(void (^)(BOOL))decisionHandler
    5356{
     
    7073    createdWebView = adoptNS([[WKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration]);
    7174    return createdWebView.get();
     75}
     76
     77- (void)_webView:(WKWebView *)webView mediaCaptureStateDidChange:(_WKMediaCaptureState)state
     78{
     79    isCapturing = state == _WKMediaCaptureStateActiveMicrophone;
     80    captureStateDidChange = true;
    7281}
    7382@end
     
    112121    preferences._speechRecognitionEnabled = YES;
    113122    auto webView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration.get()]);
    114     auto delegate = adoptNS([[SpeechRecognitionPermissionUIDelegate alloc] init]);
     123    auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]);
    115124    [webView setUIDelegate:delegate.get()];
    116125
     
    155164    preferences._speechRecognitionEnabled = YES;
    156165    preferences._mediaCaptureRequiresSecureConnection = NO;
    157     auto delegate = adoptNS([[SpeechRecognitionPermissionUIDelegate alloc] init]);
     166    auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]);
    158167    auto firstWebView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 100, 100) configuration:configuration.get()]);
    159168    [firstWebView setUIDelegate:delegate.get()];
     
    207216    preferences._speechRecognitionEnabled = YES;
    208217    auto webView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration.get()]);
    209     auto delegate = adoptNS([[SpeechRecognitionPermissionUIDelegate alloc] init]);
     218    auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]);
    210219    [webView setUIDelegate:delegate.get()];
    211220
     
    242251    preferences._speechRecognitionEnabled = YES;
    243252    preferences.javaScriptCanOpenWindowsAutomatically = YES;
    244     auto delegate = adoptNS([[SpeechRecognitionPermissionUIDelegate alloc] init]);
     253    auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]);
    245254    auto navigationDelegate = adoptNS([[SpeechRecognitionNavigationDelegate alloc] init]);
    246255    shouldGrantPermissionRequest = true;
     
    269278}
    270279
     280TEST(WebKit2, SpeechRecognitionMediaCaptureStateChange)
     281{
     282    auto configuration = adoptNS([[WKWebViewConfiguration alloc] init]);
     283    auto handler = adoptNS([[SpeechRecognitionMessageHandler alloc] init]);
     284    [[configuration userContentController] addScriptMessageHandler:handler.get() name:@"testHandler"];
     285    auto preferences = [configuration preferences];
     286    preferences._mockCaptureDevicesEnabled = YES;
     287    preferences._speechRecognitionEnabled = YES;
     288    auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]);
     289    auto webView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration.get()]);
     290    [webView setUIDelegate:delegate.get()];
     291    shouldGrantPermissionRequest = true;
     292
     293    captureStateDidChange = false;
     294    [webView synchronouslyLoadTestPageNamed:@"speechrecognition-basic"];
     295    [webView stringByEvaluatingJavaScript:@"start()"];
     296    TestWebKitAPI::Util::run(&captureStateDidChange);
     297    EXPECT_TRUE(isCapturing);
     298
     299    captureStateDidChange = false;
     300    [webView stringByEvaluatingJavaScript:@"stop()"];
     301    TestWebKitAPI::Util::run(&captureStateDidChange);
     302    EXPECT_FALSE(isCapturing);
     303}
     304
    271305#endif
    272306
Note: See TracChangeset for help on using the changeset viewer.