Changeset 271636 in webkit
- Timestamp:
- Jan 19, 2021 8:33:11 PM (3 years ago)
- Location:
- trunk
- Files:
-
- 12 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/WebCore/ChangeLog
r271635 r271636 1 2021-01-19 Sihui Liu <sihui_liu@appe.com> 2 3 Update media state for active speech recognition as it uses audio capture 4 https://bugs.webkit.org/show_bug.cgi?id=220667 5 6 Reviewed by Youenn Fablet. 7 8 To make sure the media capture state is correctly sent to client. 9 10 API test: WebKit2.SpeechRecognitionMediaCaptureStateChange 11 12 * Modules/speech/SpeechRecognition.cpp: 13 (WebCore::SpeechRecognition::startRecognition): 14 (WebCore::SpeechRecognition::stop): 15 (WebCore::SpeechRecognition::didStartCapturingAudio): 16 (WebCore::SpeechRecognition::didStopCapturingAudio): 17 * Modules/speech/SpeechRecognition.h: 18 * Modules/speech/SpeechRecognitionConnection.h: 19 * dom/Document.cpp: 20 (WebCore::Document::setActiveSpeechRecognition): 21 (WebCore::Document::updateIsPlayingMedia): 22 * dom/Document.h: 23 * page/DummySpeechRecognitionProvider.h: 24 1 25 2021-01-19 Megan Gardner <megan_gardner@apple.com> 2 26 -
trunk/Source/WebCore/Modules/speech/SpeechRecognition.cpp
r270846 r271636 100 100 } 101 101 102 void SpeechRecognition::stop() 103 { 104 abortRecognition(); 105 m_connection->unregisterClient(*this); 106 107 auto& document = downcast<Document>(*scriptExecutionContext()); 108 document.setActiveSpeechRecognition(nullptr); 109 } 110 102 111 void SpeechRecognition::didStart() 103 112 { … … 110 119 void SpeechRecognition::didStartCapturingAudio() 111 120 { 121 auto& document = downcast<Document>(*scriptExecutionContext()); 122 document.setActiveSpeechRecognition(this); 123 112 124 queueTaskToDispatchEvent(*this, TaskSource::Speech, Event::create(eventNames().audiostartEvent, Event::CanBubble::No, Event::IsCancelable::No)); 113 125 } … … 135 147 void SpeechRecognition::didStopCapturingAudio() 136 148 { 149 auto& document = downcast<Document>(*scriptExecutionContext()); 150 document.setActiveSpeechRecognition(nullptr); 151 137 152 queueTaskToDispatchEvent(*this, TaskSource::Speech, Event::create(eventNames().audioendEvent, Event::CanBubble::No, Event::IsCancelable::No)); 138 153 } -
trunk/Source/WebCore/Modules/speech/SpeechRecognition.h
r269348 r271636 86 86 87 87 // ActiveDOMObject 88 const char* activeDOMObjectName() const; 89 void suspend(ReasonForSuspension); 88 const char* activeDOMObjectName() const final; 89 void suspend(ReasonForSuspension) final; 90 void stop() final; 90 91 91 92 // EventTarget -
trunk/Source/WebCore/Modules/speech/SpeechRecognitionConnection.h
r269810 r271636 38 38 virtual ~SpeechRecognitionConnection() { } 39 39 virtual void registerClient(SpeechRecognitionConnectionClient&) = 0; 40 virtual void unregisterClient(SpeechRecognitionConnectionClient&) = 0; 40 41 virtual void start(SpeechRecognitionConnectionClientIdentifier, const String& lang, bool continuous, bool interimResults, uint64_t maxAlternatives, ClientOrigin&&) = 0; 41 42 virtual void stop(SpeechRecognitionConnectionClientIdentifier) = 0; -
trunk/Source/WebCore/dom/Document.cpp
r271584 r271636 209 209 #include "ShadowRoot.h" 210 210 #include "SocketProvider.h" 211 #include "SpeechRecognition.h" 211 212 #include "StorageEvent.h" 212 213 #include "StringCallback.h" … … 4254 4255 } 4255 4256 4257 void Document::setActiveSpeechRecognition(SpeechRecognition* speechRecognition) 4258 { 4259 if (m_activeSpeechRecognition == speechRecognition) 4260 return; 4261 4262 m_activeSpeechRecognition = makeWeakPtr(speechRecognition); 4263 updateIsPlayingMedia(); 4264 } 4265 4256 4266 void Document::noteUserInteractionWithMediaElement() 4257 4267 { … … 4275 4285 #if ENABLE(MEDIA_STREAM) 4276 4286 state |= MediaStreamTrack::captureState(*this); 4287 if (m_activeSpeechRecognition) 4288 state |= MediaProducer::HasActiveAudioCaptureDevice; 4277 4289 #endif 4278 4290 -
trunk/Source/WebCore/dom/Document.h
r271514 r271636 209 209 class SerializedScriptValue; 210 210 class Settings; 211 class SpeechRecognition; 211 212 class StringCallback; 212 213 class StyleSheet; … … 1382 1383 WEBCORE_EXPORT void addAudioProducer(MediaProducer&); 1383 1384 WEBCORE_EXPORT void removeAudioProducer(MediaProducer&); 1385 void setActiveSpeechRecognition(SpeechRecognition*); 1384 1386 MediaProducer::MediaStateFlags mediaState() const { return m_mediaState; } 1385 1387 void noteUserInteractionWithMediaElement(); … … 1972 1974 1973 1975 WeakHashSet<MediaProducer> m_audioProducers; 1976 WeakPtr<SpeechRecognition> m_activeSpeechRecognition; 1974 1977 1975 1978 HashSet<ShadowRoot*> m_inDocumentShadowRoots; -
trunk/Source/WebCore/page/DummySpeechRecognitionProvider.h
r269810 r271636 40 40 } 41 41 void registerClient(SpeechRecognitionConnectionClient&) final { } 42 void unregisterClient(SpeechRecognitionConnectionClient&) final { } 42 43 void start(SpeechRecognitionConnectionClientIdentifier, const String&, bool, bool, uint64_t, ClientOrigin&&) final { } 43 44 void stop(SpeechRecognitionConnectionClientIdentifier) final { } -
trunk/Source/WebKit/ChangeLog
r271631 r271636 1 2021-01-19 Sihui Liu <sihui_liu@appe.com> 2 3 Update media state for active speech recognition as it uses audio capture 4 https://bugs.webkit.org/show_bug.cgi?id=220667 5 6 Reviewed by Youenn Fablet. 7 8 * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.cpp: 9 (WebKit::WebSpeechRecognitionConnection::unregisterClient): 10 * WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.h: 11 1 12 2021-01-19 Chris Dumez <cdumez@apple.com> 2 13 -
trunk/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.cpp
r270574 r271636 62 62 { 63 63 m_clientMap.add(client.identifier(), makeWeakPtr(client)); 64 } 65 66 void WebSpeechRecognitionConnection::unregisterClient(WebCore::SpeechRecognitionConnectionClient& client) 67 { 68 m_clientMap.remove(client.identifier()); 64 69 } 65 70 -
trunk/Source/WebKit/WebProcess/WebCoreSupport/WebSpeechRecognitionConnection.h
r269810 r271636 57 57 58 58 void registerClient(WebCore::SpeechRecognitionConnectionClient&) final; 59 void unregisterClient(WebCore::SpeechRecognitionConnectionClient&) final; 59 60 void didReceiveUpdate(WebCore::SpeechRecognitionUpdate&&) final; 60 61 void invalidate(WebCore::SpeechRecognitionConnectionClientIdentifier); -
trunk/Tools/ChangeLog
r271634 r271636 1 2021-01-19 Sihui Liu <sihui_liu@appe.com> 2 3 Update media state for active speech recognition as it uses audio capture 4 https://bugs.webkit.org/show_bug.cgi?id=220667 5 6 Reviewed by Youenn Fablet. 7 8 * TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm: 9 (-[SpeechRecognitionUIDelegate _webView:mediaCaptureStateDidChange:]): 10 (TestWebKitAPI::TEST): 11 (-[SpeechRecognitionPermissionUIDelegate _webView:requestSpeechRecognitionPermissionForOrigin:decisionHandler:]): Deleted. 12 (-[SpeechRecognitionPermissionUIDelegate _webView:requestMediaCaptureAuthorization:decisionHandler:]): Deleted. 13 (-[SpeechRecognitionPermissionUIDelegate _webView:checkUserMediaPermissionForURL:mainFrameURL:frameIdentifier:decisionHandler:]): Deleted. 14 (-[SpeechRecognitionPermissionUIDelegate webView:createWebViewWithConfiguration:forNavigationAction:windowFeatures:]): Deleted. 15 1 16 2021-01-19 Michael Catanzaro <mcatanzaro@gnome.org> 2 17 -
trunk/Tools/TestWebKitAPI/Tests/WebKitCocoa/SpeechRecognition.mm
r271381 r271636 39 39 static bool receivedScriptMessage; 40 40 static bool didFinishNavigation; 41 static bool captureStateDidChange; 42 static bool isCapturing; 41 43 static RetainPtr<WKScriptMessage> lastScriptMessage; 42 44 static RetainPtr<WKWebView> createdWebView; 43 45 44 @interface SpeechRecognition PermissionUIDelegate : NSObject<WKUIDelegatePrivate>46 @interface SpeechRecognitionUIDelegate : NSObject<WKUIDelegatePrivate> 45 47 - (void)_webView:(WKWebView *)webView requestSpeechRecognitionPermissionForOrigin:(WKSecurityOrigin *)origin decisionHandler:(void (^)(BOOL))decisionHandler; 46 48 - (void)_webView:(WKWebView *)webView requestMediaCaptureAuthorization: (_WKCaptureDevices)devices decisionHandler:(void (^)(BOOL))decisionHandler; 47 49 - (void)_webView:(WKWebView *)webView checkUserMediaPermissionForURL:(NSURL *)url mainFrameURL:(NSURL *)mainFrameURL frameIdentifier:(NSUInteger)frameIdentifier decisionHandler:(void (^)(NSString *salt, BOOL authorized))decisionHandler; 48 50 - (WKWebView *)webView:(WKWebView *)webView createWebViewWithConfiguration:(WKWebViewConfiguration *)configuration forNavigationAction:(WKNavigationAction *)navigationAction windowFeatures:(WKWindowFeatures *)windowFeatures; 49 @end 50 51 @implementation SpeechRecognitionPermissionUIDelegate 51 - (void)_webView:(WKWebView *)webView mediaCaptureStateDidChange:(_WKMediaCaptureState)state; 52 @end 53 54 @implementation SpeechRecognitionUIDelegate 52 55 - (void)_webView:(WKWebView *)webView requestSpeechRecognitionPermissionForOrigin:(WKSecurityOrigin *)origin decisionHandler:(void (^)(BOOL))decisionHandler 53 56 { … … 70 73 createdWebView = adoptNS([[WKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration]); 71 74 return createdWebView.get(); 75 } 76 77 - (void)_webView:(WKWebView *)webView mediaCaptureStateDidChange:(_WKMediaCaptureState)state 78 { 79 isCapturing = state == _WKMediaCaptureStateActiveMicrophone; 80 captureStateDidChange = true; 72 81 } 73 82 @end … … 112 121 preferences._speechRecognitionEnabled = YES; 113 122 auto webView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration.get()]); 114 auto delegate = adoptNS([[SpeechRecognition PermissionUIDelegate alloc] init]);123 auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]); 115 124 [webView setUIDelegate:delegate.get()]; 116 125 … … 155 164 preferences._speechRecognitionEnabled = YES; 156 165 preferences._mediaCaptureRequiresSecureConnection = NO; 157 auto delegate = adoptNS([[SpeechRecognition PermissionUIDelegate alloc] init]);166 auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]); 158 167 auto firstWebView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 100, 100) configuration:configuration.get()]); 159 168 [firstWebView setUIDelegate:delegate.get()]; … … 207 216 preferences._speechRecognitionEnabled = YES; 208 217 auto webView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration.get()]); 209 auto delegate = adoptNS([[SpeechRecognition PermissionUIDelegate alloc] init]);218 auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]); 210 219 [webView setUIDelegate:delegate.get()]; 211 220 … … 242 251 preferences._speechRecognitionEnabled = YES; 243 252 preferences.javaScriptCanOpenWindowsAutomatically = YES; 244 auto delegate = adoptNS([[SpeechRecognition PermissionUIDelegate alloc] init]);253 auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]); 245 254 auto navigationDelegate = adoptNS([[SpeechRecognitionNavigationDelegate alloc] init]); 246 255 shouldGrantPermissionRequest = true; … … 269 278 } 270 279 280 TEST(WebKit2, SpeechRecognitionMediaCaptureStateChange) 281 { 282 auto configuration = adoptNS([[WKWebViewConfiguration alloc] init]); 283 auto handler = adoptNS([[SpeechRecognitionMessageHandler alloc] init]); 284 [[configuration userContentController] addScriptMessageHandler:handler.get() name:@"testHandler"]; 285 auto preferences = [configuration preferences]; 286 preferences._mockCaptureDevicesEnabled = YES; 287 preferences._speechRecognitionEnabled = YES; 288 auto delegate = adoptNS([[SpeechRecognitionUIDelegate alloc] init]); 289 auto webView = adoptNS([[TestWKWebView alloc] initWithFrame:CGRectMake(0, 0, 800, 600) configuration:configuration.get()]); 290 [webView setUIDelegate:delegate.get()]; 291 shouldGrantPermissionRequest = true; 292 293 captureStateDidChange = false; 294 [webView synchronouslyLoadTestPageNamed:@"speechrecognition-basic"]; 295 [webView stringByEvaluatingJavaScript:@"start()"]; 296 TestWebKitAPI::Util::run(&captureStateDidChange); 297 EXPECT_TRUE(isCapturing); 298 299 captureStateDidChange = false; 300 [webView stringByEvaluatingJavaScript:@"stop()"]; 301 TestWebKitAPI::Util::run(&captureStateDidChange); 302 EXPECT_FALSE(isCapturing); 303 } 304 271 305 #endif 272 306
Note: See TracChangeset
for help on using the changeset viewer.