From 514839e4a855448c4ac39c18b8644fd4996d38d4 Mon Sep 17 00:00:00 2001 From: sunggook Date: Thu, 1 Sep 2022 13:51:36 -0700 Subject: [PATCH 01/19] Create TextureStream.md --- specs/APIReview_TextureStream.md | 353 +++++++++++++++++++++++++++++++ 1 file changed, 353 insertions(+) create mode 100644 specs/APIReview_TextureStream.md diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md new file mode 100644 index 000000000..2ccd00b36 --- /dev/null +++ b/specs/APIReview_TextureStream.md @@ -0,0 +1,353 @@ + +# Background +Many native apps use a native engine for real-time communication scenarios, which include video +capture, networking and video rendering. However, often, these apps still use WebView or +Electrion for UI rendering. The separation between real-time video rendering and UI rendering +prevents apps from rendering real-time video inside the web contents. This forces apps to +render the real-time video on top of the web contents, which is limiting. Rendering video on +top constrains the user experience and it may also cause performance problems. +We can ask the native apps to use web renderer for video handling because web standard already +provides these features through WebRTC APIs. The end developers, however, prefer to use +their existing engine such as capturing and composition, meanwhile using WebRTC API for rendering. + +# Description +The proposed APIs will allow the end developers to stream the captured or composed video frame to +the WebView renderer where Javascript is able to insert the frame to the page through W3C standard +API of Video, MediaStream element for displaying it. +The API will use the shared GPU texture buffer so that it can minimize the overall cost with +regards to frame copy. + +# Examples +Javascript +// User click the video capture button. +document.querySelector('#showVideo').addEventListener('click', + e => getStreamFromTheHost(e)); +async function getStreamFromTheHost(e) { + try { + // Request stream to the host with unique stream id. + const stream = await window.chrome.webview.getTextureStream('webview2-abcd1234'); + // The MediaStream object is returned and it gets video MediaStreamTrack element from it. + const video_tracks = stream.getVideoTracks(); + const videoTrack = video_tracks[0]; + // Show the video via Video Element. + document.getElementById(video_id).srcObject = stream; + } catch (error) { + console.log(error); + } +} +Win32 C++ +UINT32 luid, +// Get the LUID (Graphic adapter) that the WebView renderer uses. +coreWebView->GetRenderAdapterLUID(&luid); +// Create D3D device based on the WebView's LUID. +ComPtr d3d_device = MyCreateD3DDevice(luid); +// Register unique texture stream that the host can provide. +ComPtr webviewTextureStream; +g_webviewStaging3->CreateTextureStream(L"webview2-abcd1234", d3d_device.Get(), &webviewTextureStream); +// Register the Origin URL that the target renderer could stream of the registered stream id. The request from not registered origin will fail to stream. +webviewTextureStream->AddRequestedFilter(L"https://edge-webscratch"); +// Listen to Start request +EventRegistrationToken start_token; +webviewTextureStream->add_StartRequested(Callback( + [hWnd](ICoreWebView2StagingTextureStream* webview, IUnknown* eventArgs) -> HRESULT { + // Capture video stream by using native API, for example, Media Foundation on Windows. + StartMediaFoundationCapture(hWnd); + return S_OK; + }).Get(), &start_token); +EventRegistrationToken stop_token; +webviewTextureStream->add_StopRequested(Callback( + [hWnd](ICoreWebView2StagingTextureStream* webview, IUnknown* eventArgs) -> HRESULT { + StopMediaFoundationCapture(); + return S_OK; + }).Get(), &stop_token); +EventRegistrationToken texture_token; +webviewTextureStream->add_TextureError(Callback( + [hWnd](ICoreWebView2StagingTextureStream* sender, ICoreWebView2StagingTextureStreamTextureErrorEventArgs* args) { + COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND kind; + HRESULT hr = args->get_Kind(&kind); + assert(SUCCEEDED(hr)); + switch (kind) + { + case COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED: + case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_NOT_FOUND: + case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE: + // assert(false); + break; + default: + break; + } + return S_OK; + }).Get(), &texture_token); + +// TextureStream APIs are called in the UI thread on the WebView2 process meanwhile Video capture +// and composition could happen in worker thread or out of process. +LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) +{ + static ICoreWebView2Staging3* webview2_17 = nullptr; + TCHAR greeting[] = _T("Hello, Windows desktop!"); + ComPtr d3d_device; + HANDLE slimCoreHandle; + HRESULT hr; + ComPtr texture_buffer; + int64_t bufferId = -1; + switch (message) + { + case IDC_TEST_SEND_TEXTURE: + if (webviewTextureStream) { + // Present API should be called on same thread where main WebView + // object is created. + bufferId = (int)wParam; + texture_buffer = texture_address_to_buffer_ids[(HANDLE)bufferId]; + // assert(texture_buffer != nullptr); + if (texture_buffer) { + // Notify the renderer for updated texture on the shared buffer. + webviewTextureStream->SetBuffer(texture_buffer.Get(), texture_buffer_info_->timestamp); + webviewTextureStream->Present(); + } + } + break; + case IDC_TEST_REQUEST_BUFFER: + // Retrieve available shared buffer. + hr = webviewTextureStream->GetAvailableBuffer(&texture_buffer); + if (SUCCEEDED(hr)) { + texture_buffer->get_Handle((HANDLE*)&bufferId); + } + SendBufferIdToOOFCaptureEngine(false, nullptr, bufferId); + break; + case IDC_TEST_CREATE_NEW_BUFFER: + if (webviewTextureStream) { + ComPtr texture_buffer; + UINT32 width = (UINT32)wParam; + UINT32 height = (UINT32)lParam; + // Create shared buffer. + webviewTextureStream->CreateSharedBuffer(width, height, &texture_buffer); + texture_buffer->get_Handle(&slimCoreHandle); + texture_address_to_buffer_ids[slimCoreHandle] = texture_buffer; + SendBufferIdToOOFCaptureEngine(true, slimCoreHandle, (int)slimCoreHandle); + } + break; + default: + return DefWindowProc(hWnd, message, wParam, lParam); + break; + } +} +# API Details +Win32 C++ +[v1_enum] +typedef enum COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND { + /// The host can't create a TextureStream instance more than once + /// for a specific stream id. + COREWEBVIEW2_TEXTURE_STREAM_ERROR_STREAM_ID_ALREADY_REGISTERED, + /// Occurs when the host calls CreateBuffer or Present + /// APIs without being called of Start event. Or, 10 seconds passed before + /// calling these APIs since the OnStart event. + COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED, + /// The buffer has been removed using RemoveBuffer. + COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_NOT_FOUND, + /// The texture to be presented is already in use for rendering. + /// Call GetAvailableBuffer to determine an available buffer to present. + /// The developer can technically call SetBuffer multiple times. + /// But once they call Present, the buffer becomes "in use" until + /// they call SetBuffer and Present on a different buffer and wait a bit + /// for the original buffer to stop being used. + COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE, +} COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND; +/// This is ICoreWebView2Staging3 that returns the texture stream interface. +[uuid(96c27a45-f142-4873-80ad-9d0cd899b2b9), object, pointer_default(unique)] +interface ICoreWebView2Staging3 : IUnknown { + /// Registers the stream id that the host can handle, providing a + /// texture stream when requested from the WebView2's JavaScript code. + /// The host can register multiple unique stream instances, each with + /// a unique stream ID, enabling the host to stream from different sources + /// concurrently. + /// The host should call this only once for unique streamId. The second + /// call of already created streamId without destroying + /// ICoreWebView2StagingTextureStream object will return an error. + /// 'd3dDevice' is used for creating shared IDXGI resource and NT shared + /// of it. The host should use Adapter of the LUID from the GetRenderAdapterLUID + /// for creating the D3D Device. + HRESULT CreateTextureStream( + [in] LPCWSTR streamId, + [in] IUnknown* d3dDevice, + [out, retval ] ICoreWebView2StagingTextureStream** value); + /// Get the graphics adapter LUID of the renderer. The host should use this + /// LUID adapter when creating D3D device to use with CreateTextureStream(). +HRESULT GetRenderAdapterLUID([out, retval] LUID* luid); + /// Listens for change of graphics adapter LUID of the browser. + /// The host can get the updated LUID by GetRenderAdapterLUID. It is expected + /// that the host updates texture's d3d Device with UpdateD3DDevice, + /// removes existing buffers and creates new buffer. + HRESULT add_RenderAdapterLUIDUpdated( + [in] ICoreWebView2StagingRenderAdapterLUIDUpdatedEventHandler* eventHandler, + [out] EventRegistrationToken* token); + /// Remove listener for start stream request. + HRESULT remove_RenderAdapterLUIDUpdated( + [in] EventRegistrationToken token); +} +/// This is the interface that handles texture streaming. +/// The most of APIs have to be called on UI thread. +[uuid(afca8431-633f-4528-abfe-7fc3bedd8962), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStream : IUnknown { + /// Get the stream ID of the object that is used when calling CreateTextureStream. + /// The caller must free the returned string with CoTaskMemFree. See + /// [API Conventions](/microsoft-edge/webview2/concepts/win32-api-conventions#strings). + // MSOWNERS: TBD (wv2core@microsoft.com) + [propget] HRESULT StreamId([out, retval] LPWSTR* id); + /// Adds an allowed url origin for the given stream id. The stream requests + /// could be made from any frame, including iframes, but these origins + /// must be registered first in order for the request to succeed. + /// The added filter will be persistent until + /// ICoreWebView2StagingTextureStream is destroyed or + /// RemoveRequestedFilter is called. + /// The renderer does not support wildcard so it will compare + /// literal string input to the requesting frame origin. So, the input string + /// should have a scheme like https://. + /// For example, https://www.valid-host.com, http://www.valid-host.com are + /// valid origins but www.valid-host.com, or *.valid-host.com. are not + /// valid origins. + /// getTextureStream() will fail unless the requesting frame's origin URL is + /// added to the request filter. + HRESULT AddRequestedFilter([in] LPCWSTR origin); + /// Remove added origin, which was added by AddRequestedFilter. + HRESULT RemoveRequestedFilter([in] LPCWSTR origin); + /// Listens for stream requests from the Javascript's getTextureStream call + /// for the given stream id. It is called for the first request only, the + /// subsequent requests of same stream id will not be called. + /// It is expected that the host provides the stream within 10s after + /// being requested. The first call to Present() fulfills the stream request. + HRESULT add_StartRequested( + [in] ICoreWebView2StagingTextureStreamStartRequestedEventHandler* eventHandler, + [out] EventRegistrationToken* token); + /// Remove listener for start stream request. + HRESULT remove_StartRequested( + [in] EventRegistrationToken token); + /// Listen to stop stream request once the stream started. + /// It is called when user stop all streaming requests from + /// the renderers (Javascript) or the host calls the Stop API. The renderer + /// can stream again by calling the streaming request API. + /// The renderer cleared all registered buffers before sending + /// the stop request event so that the callback of the next start request + /// should register the textures again. + /// The event is triggered when all requests for given stream id closed + /// by the Javascript, or the host's Stop API call. + HRESULT add_StopRequested( + [in] ICoreWebView2StagingTextureStreamStopRequestedEventHandler* eventHandler, + [out] EventRegistrationToken* token); + /// Remove listener for stop stream request. + HRESULT remove_StopRequested( + [in] EventRegistrationToken token); + /// Creates shared buffer that will be referenced by the host and the browser. + /// By using the shared buffer mechanism, the host does not have to + /// send respective texture to the renderer, instead it notifies it + /// with internal buffer id, which is the identity of the shared buffer. + /// The shared buffer is 2D texture, IDXGIResource, format and will be + /// exposed through shared HANDLE or IUnknown type through ICoreWebView2StagingTexture. + /// Whenever the host has new texture to write, it should ask + /// reusable ICoreWebView2StagingTexture from the GetAvailableBuffer, + /// which returns ICoreWebView2StagingTexture. + /// If the GetAvailableBuffer returns an error, then the host calls the + /// CreateBuffer to allocate new shared buffer. + /// The API also registers created shared handle to the browser once it + /// created the resource. + HRESULT CreateBuffer( + [in] UINT32 width, + [in] UINT32 height, + [out, retval] ICoreWebView2StagingTexture** buffer); + /// GetAvailableBuffer can be called on any thread like SetBuffer. + HRESULT GetAvailableBuffer([out, retval] ICoreWebView2StagingTexture** buffer); + /// Remove texture buffer when the host removes the backed 2D texture. + /// The host can save the existing resources by deleting 2D textures + /// when it changes the frame sizes. + HRESULT RemoveBuffer([in] ICoreWebView2StagingTexture* buffer); + /// Indicates that the buffer is ready to present. + /// The buffer must be retrieved from the GetAvailableBuffer. + /// The host writes new texture to the local shared 2D texture of + /// the buffer id, which is created via CreateBuffer. + /// SetBuffer API can be called in any thread. + HRESULT SetBuffer([in] ICoreWebView2StagingTexture* buffer, + [in] ULONGLONG timestamp); + /// Render texture that is current set ICoreWebView2StagingTexture. + HRESULT Present(); + /// Stop streaming of the current stream id. + /// API calls of Present, CreateBuffer will fail after this + /// with an error of COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED. + /// The Javascript can restart the stream with getTextureStream. + HRESULT Stop(); + /// Event handler for those that occur at the Renderer side, the example + /// are CreateBuffer, Present, or Stop. + HRESULT add_TextureError( + [in] ICoreWebView2StagingTextureStreamTextureErrorEventHandler* eventHandler, + [out] EventRegistrationToken* token); + /// Remove listener for texture error event. + HRESULT remove_TextureError([in] EventRegistrationToken token); + /// Updates d3d Device when it is updated by RenderAdapterLUIDUpdated + /// event. + HRESULT UpdateD3DDevice([in] IUnknown* d3dDevice); +} +/// Texture stream buffer that the host writes to so that the Renderer +/// will render on it. +[uuid(0836f09c-34bd-47bf-914a-99fb56ae2d07), object, pointer_default(unique)] +interface ICoreWebView2StagingTexture : IUnknown { + /// Returns shared Windows NT handle. The caller expected to open it with + /// ID3D11Device1::OpenSharedResource1 and writes the incoming texture to it. + [propget] HRESULT Handle([out, retval] HANDLE* handle); + /// Returns IUnknown type that could be query interface to IDXGIResource. + /// The caller can write incoming texture to it. + [propget] HRESULT Resource([out, retval] IUnknown** resource); +} +/// This is the callback for new texture stream request. +[uuid(62d09330-00a9-41bf-a9ae-55aaef8b3c44), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamStartRequestedEventHandler : IUnknown { + //// Called to provide the implementer with the event args for the + //// corresponding event. There are no event args and the args + //// parameter will be null. + HRESULT Invoke( + [in] ICoreWebView2StagingTextureStream* sender, + [in] IUnknown* args); +} +/// This is the callback for stop request of texture stream. +[uuid(4111102a-d19f-4438-af46-efc563b2b9cf), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamStopRequestedEventHandler : IUnknown { + /// Called to provide the implementer with the event args for the + /// corresponding event. There are no event args and the args + /// parameter will be null. + HRESULT Invoke( + [in] ICoreWebView2StagingTextureStream* sender, + [in] IUnknown* args); +} +/// This is the callback for texture stream rendering error. +[uuid(52cb8898-c711-401a-8f97-3646831ba72d), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamTextureErrorEventHandler : IUnknown { + /// Called to provide the implementer with the event args for the + /// corresponding event. + HRESULT Invoke( + [in] ICoreWebView2StagingTextureStream* sender, + [in] ICoreWebView2StagingTextureStreamTextureErrorEventArgs* args); +} +/// This is the event args interface for texture stream error callback. +[uuid(0e1730c1-03df-4ad2-b847-be4d63adf700), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamTextureErrorEventArgs : IUnknown { + /// Error kind. + [propget] HRESULT Kind([out, retval] + COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND* value); + // Texture buffer that the error is associated with. + HRESULT GetBuffer([out, retval] ICoreWebView2StagingTexture** buffer); +} +[uuid(431721e0-0f18-4d7b-bd4d-e5b1522bb110), object, pointer_default(unique)] +interface ICoreWebView2StagingRenderAdapterLUIDUpdatedEventHandler : IUnknown { + /// Called to provide the implementer with the event args for the + /// corresponding event. + HRESULT Invoke( + [in] ICoreWebView2StagingTextureStream* sender, + [in] IUnknown* args); +} +[uuid(431721e0-0f18-4d7b-bd4d-e5b1522bb110), object, pointer_default(unique)] +interface ICoreWebView2StagingRenderAdapterLUIDUpdatedEventHandler : IUnknown { + /// Called to provide the implementer with the event args for the + /// corresponding event. + HRESULT Invoke( + [in] ICoreWebView2Staging3 * sender, + [in] IUnknown* args); +} + + From d896d6b4d117186dd5b9295691a826473c6f0a36 Mon Sep 17 00:00:00 2001 From: sunggook Date: Mon, 12 Sep 2022 17:49:41 -0700 Subject: [PATCH 02/19] Address missing basic markup for md file. --- specs/APIReview_TextureStream.md | 49 +++++++++++++++++++------------- 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index 2ccd00b36..ff42a4411 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -1,31 +1,38 @@ - +TextureStream +=============================================================================================== + # Background -Many native apps use a native engine for real-time communication scenarios, which include video -capture, networking and video rendering. However, often, these apps still use WebView or -Electrion for UI rendering. The separation between real-time video rendering and UI rendering +Many native apps use a native engine for real-time communication scenarios, which include video +capture, networking and video rendering. However, often, these apps still use WebView or +Electron for UI rendering. The separation between real-time video rendering and UI rendering prevents apps from rendering real-time video inside the web contents. This forces apps to -render the real-time video on top of the web contents, which is limiting. Rendering video on -top constrains the user experience and it may also cause performance problems. -We can ask the native apps to use web renderer for video handling because web standard already -provides these features through WebRTC APIs. The end developers, however, prefer to use +render the real-time video on top of the web contents, which is limiting. Rendering video on +top constrains the user experience and it may also cause performance problems. +We can ask the native apps to use web renderer for video handling because web standard already +provides these features through WebRTC APIs. The end developers, however, prefer to use their existing engine such as capturing and composition, meanwhile using WebRTC API for rendering. # Description -The proposed APIs will allow the end developers to stream the captured or composed video frame to -the WebView renderer where Javascript is able to insert the frame to the page through W3C standard +The proposed APIs will allow the end developers to stream the captured or composed video frame to +the WebView renderer where Javascript is able to insert the frame to the page through W3C standard API of Video, MediaStream element for displaying it. -The API will use the shared GPU texture buffer so that it can minimize the overall cost with +The API will use the shared GPU texture buffer so that it can minimize the overall cost with regards to frame copy. # Examples -Javascript + +## Javascript + +This is Javascript code common to both of the following samples: + +```js // User click the video capture button. document.querySelector('#showVideo').addEventListener('click', e => getStreamFromTheHost(e)); async function getStreamFromTheHost(e) { try { // Request stream to the host with unique stream id. - const stream = await window.chrome.webview.getTextureStream('webview2-abcd1234'); + const stream = await window.chrome.webview.getTextureStream('webview2-abcd1234'); // The MediaStream object is returned and it gets video MediaStreamTrack element from it. const video_tracks = stream.getVideoTracks(); const videoTrack = video_tracks[0]; @@ -35,13 +42,16 @@ async function getStreamFromTheHost(e) { console.log(error); } } -Win32 C++ +``` + +## Win32 C++ +```cpp UINT32 luid, // Get the LUID (Graphic adapter) that the WebView renderer uses. coreWebView->GetRenderAdapterLUID(&luid); // Create D3D device based on the WebView's LUID. ComPtr d3d_device = MyCreateD3DDevice(luid); -// Register unique texture stream that the host can provide. +// Register unique texture stream that the host can provide. ComPtr webviewTextureStream; g_webviewStaging3->CreateTextureStream(L"webview2-abcd1234", d3d_device.Get(), &webviewTextureStream); // Register the Origin URL that the target renderer could stream of the registered stream id. The request from not registered origin will fail to stream. @@ -78,7 +88,7 @@ webviewTextureStream->add_TextureError(Callback Date: Mon, 12 Sep 2022 23:36:57 -0700 Subject: [PATCH 03/19] user property of RenderAdapterLUID instead of method, etc --- specs/APIReview_TextureStream.md | 120 +++++++++++++++---------------- 1 file changed, 60 insertions(+), 60 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index ff42a4411..7461d3cdb 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -26,29 +26,58 @@ regards to frame copy. This is Javascript code common to both of the following samples: ```js -// User click the video capture button. +// User click the video capture button to start straming. document.querySelector('#showVideo').addEventListener('click', e => getStreamFromTheHost(e)); async function getStreamFromTheHost(e) { try { // Request stream to the host with unique stream id. + // getTextureStream W3C standard MediaStream object that has only video + // MediaStreamTrack. const stream = await window.chrome.webview.getTextureStream('webview2-abcd1234'); // The MediaStream object is returned and it gets video MediaStreamTrack element from it. const video_tracks = stream.getVideoTracks(); const videoTrack = video_tracks[0]; + window.videoTrack = videoTrack; // Show the video via Video Element. document.getElementById(video_id).srcObject = stream; } catch (error) { console.log(error); } } + +// Developer can use commands and events for MediaStream and MediaStreamTrack. + +// It will completely stop the current streaming. If it restarts, it +// should call getTextureStream again. +function stopStreaming() { + window.videoTrack.addEventListener('ended', () => { + delete window.videoTrack; + }); + + window.videoTrack.stop(); +} + +// Most of events on the MediaStream/MediaStreamTrack will be supported. + +// No video streaming from the host. +window.videoTrack.addEventListener('mute', () => { + console.log('mute state); +}); + +// Sent to the track when data becomes available again, ending the muted state. +window.videoTrack.addEventListener('unmut', () => { + console.log('unmute state); +}); + + window.videoTrack.stop(); ``` ## Win32 C++ ```cpp UINT32 luid, // Get the LUID (Graphic adapter) that the WebView renderer uses. -coreWebView->GetRenderAdapterLUID(&luid); +coreWebView->get_RenderAdapterLUID(&luid); // Create D3D device based on the WebView's LUID. ComPtr d3d_device = MyCreateD3DDevice(luid); // Register unique texture stream that the host can provide. @@ -89,56 +118,27 @@ webviewTextureStream->add_TextureError(Callback d3d_device; - HANDLE slimCoreHandle; - HRESULT hr; - ComPtr texture_buffer; - int64_t bufferId = -1; - switch (message) - { - case IDC_TEST_SEND_TEXTURE: - if (webviewTextureStream) { - // Present API should be called on same thread where main WebView - // object is created. - bufferId = (int)wParam; - texture_buffer = texture_address_to_buffer_ids[(HANDLE)bufferId]; - // assert(texture_buffer != nullptr); - if (texture_buffer) { - // Notify the renderer for updated texture on the shared buffer. - webviewTextureStream->SetBuffer(texture_buffer.Get(), texture_buffer_info_->timestamp); - webviewTextureStream->Present(); - } - } - break; - case IDC_TEST_REQUEST_BUFFER: - // Retrieve available shared buffer. - hr = webviewTextureStream->GetAvailableBuffer(&texture_buffer); - if (SUCCEEDED(hr)) { - texture_buffer->get_Handle((HANDLE*)&bufferId); - } - SendBufferIdToOOFCaptureEngine(false, nullptr, bufferId); - break; - case IDC_TEST_CREATE_NEW_BUFFER: - if (webviewTextureStream) { - ComPtr texture_buffer; - UINT32 width = (UINT32)wParam; - UINT32 height = (UINT32)lParam; - // Create shared buffer. - webviewTextureStream->CreateSharedBuffer(width, height, &texture_buffer); - texture_buffer->get_Handle(&slimCoreHandle); - texture_address_to_buffer_ids[slimCoreHandle] = texture_buffer; - SendBufferIdToOOFCaptureEngine(true, slimCoreHandle, (int)slimCoreHandle); - } - break; - default: - return DefWindowProc(hWnd, message, wParam, lParam); - break; +HRESULT CallWebView2API(bool createBuffer, + UINT32 width, + UINT32 height, + bool requestAvailableBufer, + bool sendTexture, + ICoreWebView2StagingTexture* send_texture, + UINT64 timestamp, + ICoreWebView2StagingTexture** texture) { + if (createBuffer) { + // Create shared buffer. + return webviewTextureStream->CreateSharedBuffer(width, height, &texture); + } + + if (requestAvailableBufer) { + return webviewTextureStream->GetAvailableBuffer(&texture); + } + + if (sendTexture) { + // Notify the renderer for updated texture on the shared buffer. + webviewTextureStream->SetBuffer(send_texture, timestamp); + webviewTextureStream->Present(); } } ``` @@ -184,16 +184,16 @@ interface ICoreWebView2Staging3 : IUnknown { [out, retval ] ICoreWebView2StagingTextureStream** value); /// Get the graphics adapter LUID of the renderer. The host should use this /// LUID adapter when creating D3D device to use with CreateTextureStream(). -HRESULT GetRenderAdapterLUID([out, retval] LUID* luid); + [propget] HRESULT RenderAdapterLUID([out, retval] LUID* luid); /// Listens for change of graphics adapter LUID of the browser. - /// The host can get the updated LUID by GetRenderAdapterLUID. It is expected + /// The host can get the updated LUID by RenderAdapterLUID. It is expected /// that the host updates texture's d3d Device with UpdateD3DDevice, /// removes existing buffers and creates new buffer. - HRESULT add_RenderAdapterLUIDUpdated( - [in] ICoreWebView2StagingRenderAdapterLUIDUpdatedEventHandler* eventHandler, + HRESULT add_RenderAdapterLUIDChanged( + [in] ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler* eventHandler, [out] EventRegistrationToken* token); /// Remove listener for start stream request. - HRESULT remove_RenderAdapterLUIDUpdated( + HRESULT remove_RenderAdapterLUIDChanged( [in] EventRegistrationToken token); } /// This is the interface that handles texture streaming. @@ -292,7 +292,7 @@ interface ICoreWebView2StagingTextureStream : IUnknown { [out] EventRegistrationToken* token); /// Remove listener for texture error event. HRESULT remove_TextureError([in] EventRegistrationToken token); - /// Updates d3d Device when it is updated by RenderAdapterLUIDUpdated + /// Updates d3d Device when it is updated by RenderAdapterLUIDChanged /// event. HRESULT UpdateD3DDevice([in] IUnknown* d3dDevice); } @@ -346,7 +346,7 @@ interface ICoreWebView2StagingTextureStreamTextureErrorEventArgs : IUnknown { HRESULT GetBuffer([out, retval] ICoreWebView2StagingTexture** buffer); } [uuid(431721e0-0f18-4d7b-bd4d-e5b1522bb110), object, pointer_default(unique)] -interface ICoreWebView2StagingRenderAdapterLUIDUpdatedEventHandler : IUnknown { +interface ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler : IUnknown { /// Called to provide the implementer with the event args for the /// corresponding event. HRESULT Invoke( @@ -354,7 +354,7 @@ interface ICoreWebView2StagingRenderAdapterLUIDUpdatedEventHandler : IUnknown { [in] IUnknown* args); } [uuid(431721e0-0f18-4d7b-bd4d-e5b1522bb110), object, pointer_default(unique)] -interface ICoreWebView2StagingRenderAdapterLUIDUpdatedEventHandler : IUnknown { +interface ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler : IUnknown { /// Called to provide the implementer with the event args for the /// corresponding event. HRESULT Invoke( From e2779530d4adaa209ddde29fa8bccf4087c3c1e0 Mon Sep 17 00:00:00 2001 From: sunggook Date: Thu, 22 Sep 2022 15:15:51 -0700 Subject: [PATCH 04/19] add TextureReceived APIs and sample --- specs/APIReview_TextureStream.md | 855 ++++++++++++++++++------------- 1 file changed, 491 insertions(+), 364 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index 7461d3cdb..1bfea41fc 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -1,364 +1,491 @@ -TextureStream -=============================================================================================== - -# Background -Many native apps use a native engine for real-time communication scenarios, which include video -capture, networking and video rendering. However, often, these apps still use WebView or -Electron for UI rendering. The separation between real-time video rendering and UI rendering -prevents apps from rendering real-time video inside the web contents. This forces apps to -render the real-time video on top of the web contents, which is limiting. Rendering video on -top constrains the user experience and it may also cause performance problems. -We can ask the native apps to use web renderer for video handling because web standard already -provides these features through WebRTC APIs. The end developers, however, prefer to use -their existing engine such as capturing and composition, meanwhile using WebRTC API for rendering. - -# Description -The proposed APIs will allow the end developers to stream the captured or composed video frame to -the WebView renderer where Javascript is able to insert the frame to the page through W3C standard -API of Video, MediaStream element for displaying it. -The API will use the shared GPU texture buffer so that it can minimize the overall cost with -regards to frame copy. - -# Examples - -## Javascript - -This is Javascript code common to both of the following samples: - -```js -// User click the video capture button to start straming. -document.querySelector('#showVideo').addEventListener('click', - e => getStreamFromTheHost(e)); -async function getStreamFromTheHost(e) { - try { - // Request stream to the host with unique stream id. - // getTextureStream W3C standard MediaStream object that has only video - // MediaStreamTrack. - const stream = await window.chrome.webview.getTextureStream('webview2-abcd1234'); - // The MediaStream object is returned and it gets video MediaStreamTrack element from it. - const video_tracks = stream.getVideoTracks(); - const videoTrack = video_tracks[0]; - window.videoTrack = videoTrack; - // Show the video via Video Element. - document.getElementById(video_id).srcObject = stream; - } catch (error) { - console.log(error); - } -} - -// Developer can use commands and events for MediaStream and MediaStreamTrack. - -// It will completely stop the current streaming. If it restarts, it -// should call getTextureStream again. -function stopStreaming() { - window.videoTrack.addEventListener('ended', () => { - delete window.videoTrack; - }); - - window.videoTrack.stop(); -} - -// Most of events on the MediaStream/MediaStreamTrack will be supported. - -// No video streaming from the host. -window.videoTrack.addEventListener('mute', () => { - console.log('mute state); -}); - -// Sent to the track when data becomes available again, ending the muted state. -window.videoTrack.addEventListener('unmut', () => { - console.log('unmute state); -}); - - window.videoTrack.stop(); -``` - -## Win32 C++ -```cpp -UINT32 luid, -// Get the LUID (Graphic adapter) that the WebView renderer uses. -coreWebView->get_RenderAdapterLUID(&luid); -// Create D3D device based on the WebView's LUID. -ComPtr d3d_device = MyCreateD3DDevice(luid); -// Register unique texture stream that the host can provide. -ComPtr webviewTextureStream; -g_webviewStaging3->CreateTextureStream(L"webview2-abcd1234", d3d_device.Get(), &webviewTextureStream); -// Register the Origin URL that the target renderer could stream of the registered stream id. The request from not registered origin will fail to stream. -webviewTextureStream->AddRequestedFilter(L"https://edge-webscratch"); -// Listen to Start request -EventRegistrationToken start_token; -webviewTextureStream->add_StartRequested(Callback( - [hWnd](ICoreWebView2StagingTextureStream* webview, IUnknown* eventArgs) -> HRESULT { - // Capture video stream by using native API, for example, Media Foundation on Windows. - StartMediaFoundationCapture(hWnd); - return S_OK; - }).Get(), &start_token); -EventRegistrationToken stop_token; -webviewTextureStream->add_StopRequested(Callback( - [hWnd](ICoreWebView2StagingTextureStream* webview, IUnknown* eventArgs) -> HRESULT { - StopMediaFoundationCapture(); - return S_OK; - }).Get(), &stop_token); -EventRegistrationToken texture_token; -webviewTextureStream->add_TextureError(Callback( - [hWnd](ICoreWebView2StagingTextureStream* sender, ICoreWebView2StagingTextureStreamTextureErrorEventArgs* args) { - COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND kind; - HRESULT hr = args->get_Kind(&kind); - assert(SUCCEEDED(hr)); - switch (kind) - { - case COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED: - case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_NOT_FOUND: - case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE: - // assert(false); - break; - default: - break; - } - return S_OK; - }).Get(), &texture_token); - -HRESULT CallWebView2API(bool createBuffer, - UINT32 width, - UINT32 height, - bool requestAvailableBufer, - bool sendTexture, - ICoreWebView2StagingTexture* send_texture, - UINT64 timestamp, - ICoreWebView2StagingTexture** texture) { - if (createBuffer) { - // Create shared buffer. - return webviewTextureStream->CreateSharedBuffer(width, height, &texture); - } - - if (requestAvailableBufer) { - return webviewTextureStream->GetAvailableBuffer(&texture); - } - - if (sendTexture) { - // Notify the renderer for updated texture on the shared buffer. - webviewTextureStream->SetBuffer(send_texture, timestamp); - webviewTextureStream->Present(); - } -} -``` - -# API Details -``` -[v1_enum] -typedef enum COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND { - /// The host can't create a TextureStream instance more than once - /// for a specific stream id. - COREWEBVIEW2_TEXTURE_STREAM_ERROR_STREAM_ID_ALREADY_REGISTERED, - /// Occurs when the host calls CreateBuffer or Present - /// APIs without being called of Start event. Or, 10 seconds passed before - /// calling these APIs since the OnStart event. - COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED, - /// The buffer has been removed using RemoveBuffer. - COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_NOT_FOUND, - /// The texture to be presented is already in use for rendering. - /// Call GetAvailableBuffer to determine an available buffer to present. - /// The developer can technically call SetBuffer multiple times. - /// But once they call Present, the buffer becomes "in use" until - /// they call SetBuffer and Present on a different buffer and wait a bit - /// for the original buffer to stop being used. - COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE, -} COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND; -/// This is ICoreWebView2Staging3 that returns the texture stream interface. -[uuid(96c27a45-f142-4873-80ad-9d0cd899b2b9), object, pointer_default(unique)] -interface ICoreWebView2Staging3 : IUnknown { - /// Registers the stream id that the host can handle, providing a - /// texture stream when requested from the WebView2's JavaScript code. - /// The host can register multiple unique stream instances, each with - /// a unique stream ID, enabling the host to stream from different sources - /// concurrently. - /// The host should call this only once for unique streamId. The second - /// call of already created streamId without destroying - /// ICoreWebView2StagingTextureStream object will return an error. - /// 'd3dDevice' is used for creating shared IDXGI resource and NT shared - /// of it. The host should use Adapter of the LUID from the GetRenderAdapterLUID - /// for creating the D3D Device. - HRESULT CreateTextureStream( - [in] LPCWSTR streamId, - [in] IUnknown* d3dDevice, - [out, retval ] ICoreWebView2StagingTextureStream** value); - /// Get the graphics adapter LUID of the renderer. The host should use this - /// LUID adapter when creating D3D device to use with CreateTextureStream(). - [propget] HRESULT RenderAdapterLUID([out, retval] LUID* luid); - /// Listens for change of graphics adapter LUID of the browser. - /// The host can get the updated LUID by RenderAdapterLUID. It is expected - /// that the host updates texture's d3d Device with UpdateD3DDevice, - /// removes existing buffers and creates new buffer. - HRESULT add_RenderAdapterLUIDChanged( - [in] ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler* eventHandler, - [out] EventRegistrationToken* token); - /// Remove listener for start stream request. - HRESULT remove_RenderAdapterLUIDChanged( - [in] EventRegistrationToken token); -} -/// This is the interface that handles texture streaming. -/// The most of APIs have to be called on UI thread. -[uuid(afca8431-633f-4528-abfe-7fc3bedd8962), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStream : IUnknown { - /// Get the stream ID of the object that is used when calling CreateTextureStream. - /// The caller must free the returned string with CoTaskMemFree. See - /// [API Conventions](/microsoft-edge/webview2/concepts/win32-api-conventions#strings). - // MSOWNERS: TBD (wv2core@microsoft.com) - [propget] HRESULT StreamId([out, retval] LPWSTR* id); - /// Adds an allowed url origin for the given stream id. The stream requests - /// could be made from any frame, including iframes, but these origins - /// must be registered first in order for the request to succeed. - /// The added filter will be persistent until - /// ICoreWebView2StagingTextureStream is destroyed or - /// RemoveRequestedFilter is called. - /// The renderer does not support wildcard so it will compare - /// literal string input to the requesting frame origin. So, the input string - /// should have a scheme like https://. - /// For example, https://www.valid-host.com, http://www.valid-host.com are - /// valid origins but www.valid-host.com, or *.valid-host.com. are not - /// valid origins. - /// getTextureStream() will fail unless the requesting frame's origin URL is - /// added to the request filter. - HRESULT AddRequestedFilter([in] LPCWSTR origin); - /// Remove added origin, which was added by AddRequestedFilter. - HRESULT RemoveRequestedFilter([in] LPCWSTR origin); - /// Listens for stream requests from the Javascript's getTextureStream call - /// for the given stream id. It is called for the first request only, the - /// subsequent requests of same stream id will not be called. - /// It is expected that the host provides the stream within 10s after - /// being requested. The first call to Present() fulfills the stream request. - HRESULT add_StartRequested( - [in] ICoreWebView2StagingTextureStreamStartRequestedEventHandler* eventHandler, - [out] EventRegistrationToken* token); - /// Remove listener for start stream request. - HRESULT remove_StartRequested( - [in] EventRegistrationToken token); - /// Listen to stop stream request once the stream started. - /// It is called when user stop all streaming requests from - /// the renderers (Javascript) or the host calls the Stop API. The renderer - /// can stream again by calling the streaming request API. - /// The renderer cleared all registered buffers before sending - /// the stop request event so that the callback of the next start request - /// should register the textures again. - /// The event is triggered when all requests for given stream id closed - /// by the Javascript, or the host's Stop API call. - HRESULT add_StopRequested( - [in] ICoreWebView2StagingTextureStreamStopRequestedEventHandler* eventHandler, - [out] EventRegistrationToken* token); - /// Remove listener for stop stream request. - HRESULT remove_StopRequested( - [in] EventRegistrationToken token); - /// Creates shared buffer that will be referenced by the host and the browser. - /// By using the shared buffer mechanism, the host does not have to - /// send respective texture to the renderer, instead it notifies it - /// with internal buffer id, which is the identity of the shared buffer. - /// The shared buffer is 2D texture, IDXGIResource, format and will be - /// exposed through shared HANDLE or IUnknown type through ICoreWebView2StagingTexture. - /// Whenever the host has new texture to write, it should ask - /// reusable ICoreWebView2StagingTexture from the GetAvailableBuffer, - /// which returns ICoreWebView2StagingTexture. - /// If the GetAvailableBuffer returns an error, then the host calls the - /// CreateBuffer to allocate new shared buffer. - /// The API also registers created shared handle to the browser once it - /// created the resource. - HRESULT CreateBuffer( - [in] UINT32 width, - [in] UINT32 height, - [out, retval] ICoreWebView2StagingTexture** buffer); - /// GetAvailableBuffer can be called on any thread like SetBuffer. - HRESULT GetAvailableBuffer([out, retval] ICoreWebView2StagingTexture** buffer); - /// Remove texture buffer when the host removes the backed 2D texture. - /// The host can save the existing resources by deleting 2D textures - /// when it changes the frame sizes. - HRESULT RemoveBuffer([in] ICoreWebView2StagingTexture* buffer); - /// Indicates that the buffer is ready to present. - /// The buffer must be retrieved from the GetAvailableBuffer. - /// The host writes new texture to the local shared 2D texture of - /// the buffer id, which is created via CreateBuffer. - /// SetBuffer API can be called in any thread. - HRESULT SetBuffer([in] ICoreWebView2StagingTexture* buffer, - [in] ULONGLONG timestamp); - /// Render texture that is current set ICoreWebView2StagingTexture. - HRESULT Present(); - /// Stop streaming of the current stream id. - /// API calls of Present, CreateBuffer will fail after this - /// with an error of COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED. - /// The Javascript can restart the stream with getTextureStream. - HRESULT Stop(); - /// Event handler for those that occur at the Renderer side, the example - /// are CreateBuffer, Present, or Stop. - HRESULT add_TextureError( - [in] ICoreWebView2StagingTextureStreamTextureErrorEventHandler* eventHandler, - [out] EventRegistrationToken* token); - /// Remove listener for texture error event. - HRESULT remove_TextureError([in] EventRegistrationToken token); - /// Updates d3d Device when it is updated by RenderAdapterLUIDChanged - /// event. - HRESULT UpdateD3DDevice([in] IUnknown* d3dDevice); -} -/// Texture stream buffer that the host writes to so that the Renderer -/// will render on it. -[uuid(0836f09c-34bd-47bf-914a-99fb56ae2d07), object, pointer_default(unique)] -interface ICoreWebView2StagingTexture : IUnknown { - /// Returns shared Windows NT handle. The caller expected to open it with - /// ID3D11Device1::OpenSharedResource1 and writes the incoming texture to it. - [propget] HRESULT Handle([out, retval] HANDLE* handle); - /// Returns IUnknown type that could be query interface to IDXGIResource. - /// The caller can write incoming texture to it. - [propget] HRESULT Resource([out, retval] IUnknown** resource); -} -/// This is the callback for new texture stream request. -[uuid(62d09330-00a9-41bf-a9ae-55aaef8b3c44), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStreamStartRequestedEventHandler : IUnknown { - //// Called to provide the implementer with the event args for the - //// corresponding event. There are no event args and the args - //// parameter will be null. - HRESULT Invoke( - [in] ICoreWebView2StagingTextureStream* sender, - [in] IUnknown* args); -} -/// This is the callback for stop request of texture stream. -[uuid(4111102a-d19f-4438-af46-efc563b2b9cf), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStreamStopRequestedEventHandler : IUnknown { - /// Called to provide the implementer with the event args for the - /// corresponding event. There are no event args and the args - /// parameter will be null. - HRESULT Invoke( - [in] ICoreWebView2StagingTextureStream* sender, - [in] IUnknown* args); -} -/// This is the callback for texture stream rendering error. -[uuid(52cb8898-c711-401a-8f97-3646831ba72d), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStreamTextureErrorEventHandler : IUnknown { - /// Called to provide the implementer with the event args for the - /// corresponding event. - HRESULT Invoke( - [in] ICoreWebView2StagingTextureStream* sender, - [in] ICoreWebView2StagingTextureStreamTextureErrorEventArgs* args); -} -/// This is the event args interface for texture stream error callback. -[uuid(0e1730c1-03df-4ad2-b847-be4d63adf700), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStreamTextureErrorEventArgs : IUnknown { - /// Error kind. - [propget] HRESULT Kind([out, retval] - COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND* value); - // Texture buffer that the error is associated with. - HRESULT GetBuffer([out, retval] ICoreWebView2StagingTexture** buffer); -} -[uuid(431721e0-0f18-4d7b-bd4d-e5b1522bb110), object, pointer_default(unique)] -interface ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler : IUnknown { - /// Called to provide the implementer with the event args for the - /// corresponding event. - HRESULT Invoke( - [in] ICoreWebView2StagingTextureStream* sender, - [in] IUnknown* args); -} -[uuid(431721e0-0f18-4d7b-bd4d-e5b1522bb110), object, pointer_default(unique)] -interface ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler : IUnknown { - /// Called to provide the implementer with the event args for the - /// corresponding event. - HRESULT Invoke( - [in] ICoreWebView2Staging3 * sender, - [in] IUnknown* args); -} -``` +TextureStream +=============================================================================================== + +# Background +Many native apps use a native engine for real-time communication scenarios, which include video +capture, networking and video rendering. However, often, these apps still use WebView or +Electron for UI rendering. The separation between real-time video rendering and UI rendering +prevents apps from rendering real-time video inside the web contents. This forces apps to +render the real-time video on top of the web contents, which is limiting. Rendering video on +top constrains the user experience and it may also cause performance problems. +We can ask the native apps to use web renderer for video handling because web standard already +provides these features through WebRTC APIs. The end developers, however, prefer to use +their existing engine such as capturing and composition, meanwhile using WebRTC API for rendering. + +# Description +The proposed APIs will allow the end developers to stream the captured or composed video frame to +the WebView renderer where Javascript is able to insert the frame to the page through W3C standard +API of Video, MediaStream element for displaying it. +The API will use the shared GPU texture buffer so that it can minimize the overall cost with +regards to frame copy. + +# Examples + +## Javascript + +This is Javascript code common to both of the following samples: + +```js +// getTextureStream sample. + +// Developer can stream from the host and its returned object is MediaStream +// that is same what getUserMedia returns. Technically, developer can do all +// what MediaStream and its video MediaStreamTrack provide. + +// Scenario: User clicks the button and show stream from the host via video element. +document.querySelector('#showVideo').addEventListener('click', + e => getStreamFromTheHost(e)); +async function getStreamFromTheHost(e) { + try { + // Request stream to the host with unique stream id. + // getTextureStream W3C standard MediaStream object that has only video + // MediaStreamTrack. + const stream = await window.chrome.webview.getTextureStream('webview2-abcd1234'); + // The MediaStream object is returned and it gets video MediaStreamTrack element from it. + const video_tracks = stream.getVideoTracks(); + const videoTrack = video_tracks[0]; + window.videoTrack = videoTrack; + // Show the video via Video Element. + document.getElementById(video_id).srcObject = stream; + } catch (error) { + console.log(error); + } +} + +// Developer can use commands and events for MediaStream and MediaStreamTrack. + +// It will completely stop the current streaming. If it restarts, it +// should call getTextureStream again. +function stopStreaming() { + window.videoTrack.addEventListener('ended', () => { + delete window.videoTrack; + }); + + window.videoTrack.stop(); +} + +// Most of events on the MediaStream/MediaStreamTrack will be supported. + +// No video streaming from the host. +window.videoTrack.addEventListener('mute', () => { + console.log('mute state); +}); + +// Sent to the track when data becomes available again, ending the muted state. +window.videoTrack.addEventListener('unmut', () => { + console.log('unmute state); +}); + +``` + +```js +// registerTextureStream sample. + +// Developer can even send back the processed video frame to the host. + +// Scenario: User clicks to stream from the host and sends back them 1s late +// to the host. +document.querySelector('#sendBack').addEventListener('click', + e => getStreamFromTheHost(e)); +const transformer = new TransformStream({ + async transform(videoFrame, controller) { + // Delay frame 100ms. + await new Promise(resolve => setTimeout(resolve, 1000)); + + // We can create new video frame and edit them, and pass them back here + // if needed. + controller.enqueue(videoFrame); + }, +}); + +async function SendBackToHost(stream_id) { + console.log("stream_id:" + stream_id); + const mediaStream = await window.chrome.webview.getTextureStream(stream_id); + const videoStream = mediaStream.getVideoTracks()[0]; + + const trackProcessor = new MediaStreamTrackProcessor(videoStream); + const trackGenerator = new MediaStreamTrackGenerator('video'); + + // Test purpose, we send it back what just received. + window.chrome.webview.registerTextureStream(stream_id, trackGenerator); + + trackProcessor.readable.pipeThrough(transformer).pipeTo(trackGenerator.writable) +} + +``` + +## Win32 C++ +```cpp +UINT32 luid, +// Get the LUID (Graphic adapter) that the WebView renderer uses. +coreWebView->get_RenderAdapterLUID(&luid); +// Create D3D device based on the WebView's LUID. +ComPtr d3d_device = MyCreateD3DDevice(luid); +// Register unique texture stream that the host can provide. +ComPtr webviewTextureStream; +g_webviewStaging3->CreateTextureStream(L"webview2-abcd1234", d3d_device.Get(), &webviewTextureStream); +// Register the Origin URL that the target renderer could stream of the registered stream id. The request from not registered origin will fail to stream. +webviewTextureStream->AddRequestedFilter(L"https://edge-webscratch"); +// Listen to Start request +EventRegistrationToken start_token; +webviewTextureStream->add_StartRequested(Callback( + [hWnd](ICoreWebView2StagingTextureStream* webview, IUnknown* eventArgs) -> HRESULT { + // Capture video stream by using native API, for example, Media Foundation on Windows. + StartMediaFoundationCapture(hWnd); + return S_OK; + }).Get(), &start_token); +EventRegistrationToken stop_token; +webviewTextureStream->add_StopRequested(Callback( + [hWnd](ICoreWebView2StagingTextureStream* webview, IUnknown* eventArgs) -> HRESULT { + StopMediaFoundationCapture(); + return S_OK; + }).Get(), &stop_token); +EventRegistrationToken texture_token; +webviewTextureStream->add_TextureError(Callback( + [hWnd](ICoreWebView2StagingTextureStream* sender, ICoreWebView2StagingTextureStreamTextureErrorEventArgs* args) { + COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND kind; + HRESULT hr = args->get_Kind(&kind); + assert(SUCCEEDED(hr)); + switch (kind) + { + case COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED: + case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_NOT_FOUND: + case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE: + // assert(false); + break; + default: + break; + } + return S_OK; + }).Get(), &texture_token); + +webviewTextureStream->AddTextureReceivedRequestedFilter(L"https://edge-webscratch"); + +EventRegistrationToken post_token; +webviewTextureStream->add_TextureReceived(Callback( + [&](ICoreWebView2StagingTextureStream* sender, ICoreWebView2StagingTextureStreamTextureReceivedEventArgs* args) { + ULONGLONG timestamp; + args->get_Timestamp(×tamp); + + HANDLE handle; + args->get_Handle(&handle); + DrawTextureWithWICBitmap(handle); + return S_OK; + }).Get(), &post_token); + +EventRegistrationToken post_stop_token; +webviewTextureStream->add_StopTextureReceived(Callback( + [&](ICoreWebView2StagingTextureStream* sender, IUnknown* args) { + return S_OK; + }).Get(), &post_stop_token); + +HRESULT CallWebView2API(bool createBuffer, + UINT32 width, + UINT32 height, + bool requestAvailableBufer, + bool sendTexture, + ICoreWebView2StagingTexture* send_texture, + UINT64 timestamp, + ICoreWebView2StagingTexture** texture) { + if (createBuffer) { + // Create shared buffer. + return webviewTextureStream->CreateSharedBuffer(width, height, &texture); + } + + if (requestAvailableBufer) { + return webviewTextureStream->GetAvailableBuffer(&texture); + } + + if (sendTexture) { + // Notify the renderer for updated texture on the shared buffer. + webviewTextureStream->SetBuffer(send_texture, timestamp); + webviewTextureStream->Present(); + } +} +``` + +# API Details +``` +[v1_enum] +typedef enum COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND { + /// The host can't create a TextureStream instance more than once + /// for a specific stream id. + COREWEBVIEW2_TEXTURE_STREAM_ERROR_STREAM_ID_ALREADY_REGISTERED, + /// Occurs when the host calls CreateBuffer or Present + /// APIs without being called of Start event. Or, 10 seconds passed before + /// calling these APIs since the OnStart event. + COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED, + /// The buffer has been removed using RemoveBuffer. + COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_NOT_FOUND, + /// The texture to be presented is already in use for rendering. + /// Call GetAvailableBuffer to determine an available buffer to present. + /// The developer can technically call SetBuffer multiple times. + /// But once they call Present, the buffer becomes "in use" until + /// they call SetBuffer and Present on a different buffer and wait a bit + /// for the original buffer to stop being used. + COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE, +} COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND; +/// This is ICoreWebView2Staging3 that returns the texture stream interface. +[uuid(96c27a45-f142-4873-80ad-9d0cd899b2b9), object, pointer_default(unique)] +interface ICoreWebView2Staging3 : IUnknown { + /// Registers the stream id that the host can handle, providing a + /// texture stream when requested from the WebView2's JavaScript code. + /// The host can register multiple unique stream instances, each with + /// a unique stream ID, enabling the host to stream from different sources + /// concurrently. + /// The host should call this only once for unique streamId. The second + /// call of already created streamId without destroying + /// ICoreWebView2StagingTextureStream object will return an error. + /// 'd3dDevice' is used for creating shared IDXGI resource and NT shared + /// of it. The host should use Adapter of the LUID from the GetRenderAdapterLUID + /// for creating the D3D Device. + HRESULT CreateTextureStream( + [in] LPCWSTR streamId, + [in] IUnknown* d3dDevice, + [out, retval ] ICoreWebView2StagingTextureStream** value); + /// Get the graphics adapter LUID of the renderer. The host should use this + /// LUID adapter when creating D3D device to use with CreateTextureStream(). + [propget] HRESULT RenderAdapterLUID([out, retval] LUID* luid); + /// Listens for change of graphics adapter LUID of the browser. + /// The host can get the updated LUID by RenderAdapterLUID. It is expected + /// that the host updates texture's d3d Device with UpdateD3DDevice, + /// removes existing buffers and creates new buffer. + HRESULT add_RenderAdapterLUIDChanged( + [in] ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler* eventHandler, + [out] EventRegistrationToken* token); + /// Remove listener for start stream request. + HRESULT remove_RenderAdapterLUIDChanged( + [in] EventRegistrationToken token); +} +/// This is the interface that handles texture streaming. +/// The most of APIs have to be called on UI thread. +[uuid(afca8431-633f-4528-abfe-7fc3bedd8962), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStream : IUnknown { + /// Get the stream ID of the object that is used when calling CreateTextureStream. + /// The caller must free the returned string with CoTaskMemFree. See + /// [API Conventions](/microsoft-edge/webview2/concepts/win32-api-conventions#strings). + // MSOWNERS: TBD (wv2core@microsoft.com) + [propget] HRESULT StreamId([out, retval] LPWSTR* id); + /// Adds an allowed url origin for the given stream id. The stream requests + /// could be made from any frame, including iframes, but these origins + /// must be registered first in order for the request to succeed. + /// The added filter will be persistent until + /// ICoreWebView2StagingTextureStream is destroyed or + /// RemoveRequestedFilter is called. + /// The renderer does not support wildcard so it will compare + /// literal string input to the requesting frame origin. So, the input string + /// should have a scheme like https://. + /// For example, https://www.valid-host.com, http://www.valid-host.com are + /// valid origins but www.valid-host.com, or *.valid-host.com. are not + /// valid origins. + /// getTextureStream() will fail unless the requesting frame's origin URL is + /// added to the request filter. + HRESULT AddRequestedFilter([in] LPCWSTR origin); + /// Remove added origin, which was added by AddRequestedFilter. + HRESULT RemoveRequestedFilter([in] LPCWSTR origin); + /// Listens for stream requests from the Javascript's getTextureStream call + /// for the given stream id. It is called for the first request only, the + /// subsequent requests of same stream id will not be called. + /// It is expected that the host provides the stream within 10s after + /// being requested. The first call to Present() fulfills the stream request. + HRESULT add_StartRequested( + [in] ICoreWebView2StagingTextureStreamStartRequestedEventHandler* eventHandler, + [out] EventRegistrationToken* token); + /// Remove listener for start stream request. + HRESULT remove_StartRequested( + [in] EventRegistrationToken token); + /// Listen to stop stream request once the stream started. + /// It is called when user stop all streaming requests from + /// the renderers (Javascript) or the host calls the Stop API. The renderer + /// can stream again by calling the streaming request API. + /// The renderer cleared all registered buffers before sending + /// the stop request event so that the callback of the next start request + /// should register the textures again. + /// The event is triggered when all requests for given stream id closed + /// by the Javascript, or the host's Stop API call. + HRESULT add_StopRequested( + [in] ICoreWebView2StagingTextureStreamStopRequestedEventHandler* eventHandler, + [out] EventRegistrationToken* token); + /// Remove listener for stop stream request. + HRESULT remove_StopRequested( + [in] EventRegistrationToken token); + /// Creates shared buffer that will be referenced by the host and the browser. + /// By using the shared buffer mechanism, the host does not have to + /// send respective texture to the renderer, instead it notifies it + /// with internal buffer id, which is the identity of the shared buffer. + /// The shared buffer is 2D texture, IDXGIResource, format and will be + /// exposed through shared HANDLE or IUnknown type through ICoreWebView2StagingTexture. + /// Whenever the host has new texture to write, it should ask + /// reusable ICoreWebView2StagingTexture from the GetAvailableBuffer, + /// which returns ICoreWebView2StagingTexture. + /// If the GetAvailableBuffer returns an error, then the host calls the + /// CreateBuffer to allocate new shared buffer. + /// The API also registers created shared handle to the browser once it + /// created the resource. + HRESULT CreateBuffer( + [in] UINT32 width, + [in] UINT32 height, + [out, retval] ICoreWebView2StagingTexture** buffer); + /// GetAvailableBuffer can be called on any thread like SetBuffer. + HRESULT GetAvailableBuffer([out, retval] ICoreWebView2StagingTexture** buffer); + /// Remove texture buffer when the host removes the backed 2D texture. + /// The host can save the existing resources by deleting 2D textures + /// when it changes the frame sizes. + HRESULT RemoveBuffer([in] ICoreWebView2StagingTexture* buffer); + /// Indicates that the buffer is ready to present. + /// The buffer must be retrieved from the GetAvailableBuffer. + /// The host writes new texture to the local shared 2D texture of + /// the buffer id, which is created via CreateBuffer. + /// SetBuffer API can be called in any thread. + HRESULT SetBuffer([in] ICoreWebView2StagingTexture* buffer, + [in] ULONGLONG timestamp); + /// Render texture that is current set ICoreWebView2StagingTexture. + HRESULT Present(); + /// Stop streaming of the current stream id. + /// API calls of Present, CreateBuffer will fail after this + /// with an error of COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED. + /// The Javascript can restart the stream with getTextureStream. + HRESULT Stop(); + /// Event handler for those that occur at the Renderer side, the example + /// are CreateBuffer, Present, or Stop. + HRESULT add_TextureError( + [in] ICoreWebView2StagingTextureStreamTextureErrorEventHandler* eventHandler, + [out] EventRegistrationToken* token); + /// Remove listener for texture error event. + HRESULT remove_TextureError([in] EventRegistrationToken token); + /// Updates d3d Device when it is updated by RenderAdapterLUIDChanged + /// event. + HRESULT UpdateD3DDevice([in] IUnknown* d3dDevice); + /// Event handler for processed texture by Javascript. + /// There is no Start event for texture received. Whenever texture are sent + /// from the Javascript, the event is triggered. + HRESULT add_TextureReceived( + [in] ICoreWebView2StagingTextureStreamTextureReceivedEventHandler* eventHandler, + [out] EventRegistrationToken* token); + HRESULT remove_TextureReceived([in] EventRegistrationToken token); + /// Event handler for stopping of the processed texture stream. + HRESULT add_StopTextureReceived( + [in] ICoreWebView2StagingTextureStreamStopTextureReceivedEventHandler* eventHandler, + [out] EventRegistrationToken* token); + HRESULT remove_StopTextureReceived([in] EventRegistrationToken token); + /// Creates number of shared buffers for texture received from the browser to + /// the host. The proper shared buffer count is important in order not to drop + /// video frame. Dropping video frame usually leads to poor video quality. + /// The default count is 4 based on heuristic test. + /// The buffer count is per different image size that means if there are + /// variable image sizes, then it will use more buffers. + [propput] HRESULT TextureReceivedBufferCount([in] UINT32 count); + /// Adds an allowed url origin for the given stream id for texture received + /// operation. Javascript can send texture stream to the host only when + /// the origin it runs are allowed by the host. + /// The added origin should be registered first by the AddRequestedFilter + /// because registerTextureStream works only for the stream from + /// the getTextureStream. + HRESULT AddTextureReceivedRequestedFilter([in] LPCWSTR origin); + /// Remove added origin, which was added by AddTextureReceivedRequestedFilter. + HRESULT RemoveTextureReceivedRequestedFilter([in] LPCWSTR origin); +} +/// Texture stream buffer that the host writes to so that the Renderer +/// will render on it. +[uuid(0836f09c-34bd-47bf-914a-99fb56ae2d07), object, pointer_default(unique)] +interface ICoreWebView2StagingTexture : IUnknown { + /// Returns shared Windows NT handle. The caller expected to open it with + /// ID3D11Device1::OpenSharedResource1 and writes the incoming texture to it. + [propget] HRESULT Handle([out, retval] HANDLE* handle); + /// Returns IUnknown type that could be query interface to IDXGIResource. + /// The caller can write incoming texture to it. + [propget] HRESULT Resource([out, retval] IUnknown** resource); +} +/// This is the callback for new texture stream request. +[uuid(62d09330-00a9-41bf-a9ae-55aaef8b3c44), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamStartRequestedEventHandler : IUnknown { + //// Called to provide the implementer with the event args for the + //// corresponding event. There are no event args and the args + //// parameter will be null. + HRESULT Invoke( + [in] ICoreWebView2StagingTextureStream* sender, + [in] IUnknown* args); +} +/// This is the callback for stop request of texture stream. +[uuid(4111102a-d19f-4438-af46-efc563b2b9cf), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamStopRequestedEventHandler : IUnknown { + /// Called to provide the implementer with the event args for the + /// corresponding event. There are no event args and the args + /// parameter will be null. + HRESULT Invoke( + [in] ICoreWebView2StagingTextureStream* sender, + [in] IUnknown* args); +} +/// This is the callback for texture stream rendering error. +[uuid(52cb8898-c711-401a-8f97-3646831ba72d), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamTextureErrorEventHandler : IUnknown { + /// Called to provide the implementer with the event args for the + /// corresponding event. + HRESULT Invoke( + [in] ICoreWebView2StagingTextureStream* sender, + [in] ICoreWebView2StagingTextureStreamTextureErrorEventArgs* args); +} +/// This is the event args interface for texture stream error callback. +[uuid(0e1730c1-03df-4ad2-b847-be4d63adf700), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamTextureErrorEventArgs : IUnknown { + /// Error kind. + [propget] HRESULT Kind([out, retval] + COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND* value); + // Texture buffer that the error is associated with. + HRESULT GetBuffer([out, retval] ICoreWebView2StagingTexture** buffer); +} +[uuid(431721e0-0f18-4d7b-bd4d-e5b1522bb110), object, pointer_default(unique)] +interface ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler : IUnknown { + /// Called to provide the implementer with the event args for the + /// corresponding event. + HRESULT Invoke( + [in] ICoreWebView2StagingTextureStream* sender, + [in] IUnknown* args); +} +[uuid(431721e0-0f18-4d7b-bd4d-e5b1522bb110), object, pointer_default(unique)] +interface ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler : IUnknown { + /// Called to provide the implementer with the event args for the + /// corresponding event. + HRESULT Invoke( + [in] ICoreWebView2Staging3 * sender, + [in] IUnknown* args); +} +/// This is the callback for texture received. +[uuid(9ea4228c-295a-11ed-a261-0242ac120002), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamTextureReceivedEventHandler : IUnknown { + /// Called to provide the implementer with the event args for the + /// corresponding event. + HRESULT Invoke( + [in] ICoreWebView2StagingTextureStream* sender, + [in] ICoreWebView2StagingTextureStreamTextureReceivedEventArgs* args); +} +/// This is the event args interface for texture received. +[uuid(a4c2fa3a-295a-11ed-a261-0242ac120002), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamTextureReceivedEventArgs : IUnknown { + /// Texture buffer handle. The handle's lifetime is owned by the + /// ICoreWebView2StagingTextureStream object so the host must not close it. + + /// The same handle value will be used for same buffer so the host can use + /// handle value as a unique buffer key. + [propget] HRESULT Handle([out, retval] HANDLE* handle); + + /// Texture buffer resource. The resource's lifetime is owned by the + /// ICoreWebView2StagingTextureStream object so the host must not close it. + + /// The same resource value will be used for same buffer so the host can use + /// resource value as a unique buffer key. + [propget] HRESULT Resource([out, retval] IUnknown** resource); + + /// It is timestamp that the original sent video frame's during SetBuffer. + [propget] HRESULT Timestamp([out, retval] ULONGLONG* timestamp); +} +/// This is the callback for texture received stop. +[uuid(77eb4638-2f05-11ed-a261-0242ac120002), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamStopTextureReceivedEventHandler : IUnknown { + /// Called to provide the implementer with the event args for the + /// corresponding event. + HRESULT Invoke( + [in] ICoreWebView2StagingTextureStream* sender, + [in] IUnknown* args); +} +``` From d28f0a3b4ec75ff655e7920ac0b7fe63ae67693c Mon Sep 17 00:00:00 2001 From: sunggook Date: Tue, 27 Sep 2022 14:27:42 -0700 Subject: [PATCH 05/19] introduce FinishUsingBuffer --- specs/APIReview_TextureStream.md | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index 1bfea41fc..69265f83f 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -368,13 +368,6 @@ interface ICoreWebView2StagingTextureStream : IUnknown { [in] ICoreWebView2StagingTextureStreamStopTextureReceivedEventHandler* eventHandler, [out] EventRegistrationToken* token); HRESULT remove_StopTextureReceived([in] EventRegistrationToken token); - /// Creates number of shared buffers for texture received from the browser to - /// the host. The proper shared buffer count is important in order not to drop - /// video frame. Dropping video frame usually leads to poor video quality. - /// The default count is 4 based on heuristic test. - /// The buffer count is per different image size that means if there are - /// variable image sizes, then it will use more buffers. - [propput] HRESULT TextureReceivedBufferCount([in] UINT32 count); /// Adds an allowed url origin for the given stream id for texture received /// operation. Javascript can send texture stream to the host only when /// the origin it runs are allowed by the host. @@ -469,15 +462,17 @@ interface ICoreWebView2StagingTextureStreamTextureReceivedEventArgs : IUnknown { /// handle value as a unique buffer key. [propget] HRESULT Handle([out, retval] HANDLE* handle); - /// Texture buffer resource. The resource's lifetime is owned by the - /// ICoreWebView2StagingTextureStream object so the host must not close it. - + /// Texture buffer resource. /// The same resource value will be used for same buffer so the host can use - /// resource value as a unique buffer key. + /// resource value as a unique buffer key. [propget] HRESULT Resource([out, retval] IUnknown** resource); /// It is timestamp that the original sent video frame's during SetBuffer. [propget] HRESULT Timestamp([out, retval] ULONGLONG* timestamp); + + /// The host notifies the browser that it is done with current the buffer handle, + /// so the browser can recycle the buffer again. + HRESULT FinishUsingBuffer(); } /// This is the callback for texture received stop. [uuid(77eb4638-2f05-11ed-a261-0242ac120002), object, pointer_default(unique)] From 99ab9c02eb588bb992a35be88aea330e18fff01d Mon Sep 17 00:00:00 2001 From: sunggook Date: Fri, 11 Nov 2022 11:56:52 -0800 Subject: [PATCH 06/19] use webTexture name for received texture --- specs/APIReview_TextureStream.md | 296 +++++++++++++++++++++---------- 1 file changed, 201 insertions(+), 95 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index 69265f83f..7142787aa 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -89,10 +89,10 @@ document.querySelector('#sendBack').addEventListener('click', e => getStreamFromTheHost(e)); const transformer = new TransformStream({ async transform(videoFrame, controller) { - // Delay frame 100ms. + // Delay frame 100ms. await new Promise(resolve => setTimeout(resolve, 1000)); - // We can create new video frame and edit them, and pass them back here + // We can create new video frame and edit them, and pass them back here // if needed. controller.enqueue(videoFrame); }, @@ -116,68 +116,91 @@ async function SendBackToHost(stream_id) { ## Win32 C++ ```cpp -UINT32 luid, -// Get the LUID (Graphic adapter) that the WebView renderer uses. -coreWebView->get_RenderAdapterLUID(&luid); -// Create D3D device based on the WebView's LUID. -ComPtr d3d_device = MyCreateD3DDevice(luid); -// Register unique texture stream that the host can provide. -ComPtr webviewTextureStream; -g_webviewStaging3->CreateTextureStream(L"webview2-abcd1234", d3d_device.Get(), &webviewTextureStream); -// Register the Origin URL that the target renderer could stream of the registered stream id. The request from not registered origin will fail to stream. -webviewTextureStream->AddRequestedFilter(L"https://edge-webscratch"); -// Listen to Start request -EventRegistrationToken start_token; -webviewTextureStream->add_StartRequested(Callback( - [hWnd](ICoreWebView2StagingTextureStream* webview, IUnknown* eventArgs) -> HRESULT { - // Capture video stream by using native API, for example, Media Foundation on Windows. - StartMediaFoundationCapture(hWnd); - return S_OK; - }).Get(), &start_token); -EventRegistrationToken stop_token; -webviewTextureStream->add_StopRequested(Callback( - [hWnd](ICoreWebView2StagingTextureStream* webview, IUnknown* eventArgs) -> HRESULT { - StopMediaFoundationCapture(); - return S_OK; - }).Get(), &stop_token); -EventRegistrationToken texture_token; -webviewTextureStream->add_TextureError(Callback( - [hWnd](ICoreWebView2StagingTextureStream* sender, ICoreWebView2StagingTextureStreamTextureErrorEventArgs* args) { - COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND kind; - HRESULT hr = args->get_Kind(&kind); - assert(SUCCEEDED(hr)); - switch (kind) - { - case COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED: - case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_NOT_FOUND: - case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE: - // assert(false); - break; - default: - break; - } - return S_OK; - }).Get(), &texture_token); - -webviewTextureStream->AddTextureReceivedRequestedFilter(L"https://edge-webscratch"); - -EventRegistrationToken post_token; -webviewTextureStream->add_TextureReceived(Callback( - [&](ICoreWebView2StagingTextureStream* sender, ICoreWebView2StagingTextureStreamTextureReceivedEventArgs* args) { - ULONGLONG timestamp; - args->get_Timestamp(×tamp); - - HANDLE handle; - args->get_Handle(&handle); - DrawTextureWithWICBitmap(handle); - return S_OK; - }).Get(), &post_token); - -EventRegistrationToken post_stop_token; -webviewTextureStream->add_StopTextureReceived(Callback( - [&](ICoreWebView2StagingTextureStream* sender, IUnknown* args) { - return S_OK; - }).Get(), &post_stop_token); +HRESULT CreateTextureStream(ICoreWebView2Staging3* coreWebView) + UINT32 luid; + + // Get the LUID (Graphic adapter) that the WebView renderer uses. + CHECK_FAILURE(coreWebView->get_RenderAdapterLUID(&luid)); + + // Create D3D device based on the WebView's LUID. + ComPtr d3d_device = MyCreateD3DDevice(luid); + + // Register unique texture stream that the host can provide. + ComPtr webviewTextureStream; + CHECK_FAILURE(g_webviewStaging3->CreateTextureStream(L"webview2-abcd1234", + d3d_device.Get(), &webviewTextureStream)); + + // Register the Origin URL that the target renderer could stream of the registered stream id. The request from not registered origin will fail to stream. + CHECK_FAILURE(webviewTextureStream->AddRequestedFilter(L"https://edge-webscratch")); + + // Listen to Start request. The host will setup system video streaming and + // start sending the texture. + EventRegistrationToken start_token; + CHECK_FAILURE(webviewTextureStream->add_StartRequested(Callback( + [hWnd](ICoreWebView2StagingTextureStream* webview, IUnknown* eventArgs) -> HRESULT { + // Capture video stream by using native API, for example, Media Foundation on Windows. + StartMediaFoundationCapture(hWnd); + return S_OK; + }).Get(), &start_token)); + + // Listen to Stop request. The host end system provided video stream and + // clean any operation resources. + EventRegistrationToken stop_token; + CHECK_FAILURE(webviewTextureStream->add_StopRequested(Callback( + [hWnd](ICoreWebView2StagingTextureStream* webview, IUnknown* eventArgs) -> HRESULT { + StopMediaFoundationCapture(); + return S_OK; + }).Get(), &stop_token)); + + EventRegistrationToken texture_token; + CHECK_FAILURE(webviewTextureStream->add_TextureError(Callback( + [hWnd](ICoreWebView2StagingTextureStream* sender, ICoreWebView2StagingTextureStreamTextureErrorEventArgs* args) { + COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND kind; + HRESULT hr = args->get_Kind(&kind); + assert(SUCCEEDED(hr)); + switch (kind) + { + case COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED: + case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_NOT_FOUND: + case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE: + // assert(false); + break; + default: + break; + } + return S_OK; + }).Get(), &texture_token)); + + // Add allowed origin for registerTextureStream call. 'registerTextureStream' + // call from the Javascript will fail if the requested origin is not registered + // with AddWebTextureRequestedFilter. + CHECK_FAILURE(webviewTextureStream->AddWebTextureRequestedFilter(L"https://edge-webscratch")); + + // Registers listener for video streaming from Javascript. + EventRegistrationToken post_token; + CHECK_FAILURE(webviewTextureStream->add_WebTextureReceived(Callback( + [&](ICoreWebView2StagingTextureStream* sender, ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs* args) { + // Javascript send a texture stream. + ComPtr texture_received; + args->GetWebTexture(&texture_received); + + ULONGLONG timestamp; + texture_received->get_Timestamp(×tamp); + HANDLE handle; + texture_received->get_Handle(&handle); + DrawTextureWithWICBitmap(handle, timestamp); + + return S_OK; + }).Get(), &post_token)); + + // Register listener of video stream from the Javascript end. + EventRegistrationToken stopped_token; + CHECK_FAILURE(webviewTextureStream->add_WebTextureStreamStopped(Callback( + [&](ICoreWebView2StagingTextureStream* sender, IUnknown* args) { + + return S_OK; + }).Get(), &stopped_token)); +} // CreateTextureStream HRESULT CallWebView2API(bool createBuffer, UINT32 width, @@ -356,27 +379,27 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// Updates d3d Device when it is updated by RenderAdapterLUIDChanged /// event. HRESULT UpdateD3DDevice([in] IUnknown* d3dDevice); - /// Event handler for processed texture by Javascript. - /// There is no Start event for texture received. Whenever texture are sent + /// Event handler for receiving texture by Javascript. + /// There is no Start event for web texture. Whenever texture are sent /// from the Javascript, the event is triggered. - HRESULT add_TextureReceived( - [in] ICoreWebView2StagingTextureStreamTextureReceivedEventHandler* eventHandler, + HRESULT add_WebTextureReceived( + [in] ICoreWebView2StagingTextureStreamWebTextureReceivedEventHandler* eventHandler, [out] EventRegistrationToken* token); - HRESULT remove_TextureReceived([in] EventRegistrationToken token); - /// Event handler for stopping of the processed texture stream. - HRESULT add_StopTextureReceived( - [in] ICoreWebView2StagingTextureStreamStopTextureReceivedEventHandler* eventHandler, + HRESULT remove_WebTextureReceived([in] EventRegistrationToken token); + + /// Event handler for stopping of the receiving texture stream. + HRESULT add_WebTextureStreamStopped( + [in] ICoreWebView2StagingTextureStreamWebTextureStreamStoppedEventHandler* eventHandler, [out] EventRegistrationToken* token); - HRESULT remove_StopTextureReceived([in] EventRegistrationToken token); - /// Adds an allowed url origin for the given stream id for texture received + HRESULT remove_WebTextureStreamStopped([in] EventRegistrationToken token); + + /// Adds an allowed url origin for the given stream id for web texture /// operation. Javascript can send texture stream to the host only when /// the origin it runs are allowed by the host. - /// The added origin should be registered first by the AddRequestedFilter - /// because registerTextureStream works only for the stream from - /// the getTextureStream. - HRESULT AddTextureReceivedRequestedFilter([in] LPCWSTR origin); - /// Remove added origin, which was added by AddTextureReceivedRequestedFilter. - HRESULT RemoveTextureReceivedRequestedFilter([in] LPCWSTR origin); + HRESULT AddWebTextureRequestedFilter([in] LPCWSTR origin); + + /// Remove added origin, which was added by AddWebTextureRequestedFilter. + HRESULT RemoveWebTextureRequestedFilter([in] LPCWSTR origin); } /// Texture stream buffer that the host writes to so that the Renderer /// will render on it. @@ -443,40 +466,123 @@ interface ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler : IUnknown { [in] ICoreWebView2Staging3 * sender, [in] IUnknown* args); } -/// This is the callback for texture received. +/// This is the callback for web texture. [uuid(9ea4228c-295a-11ed-a261-0242ac120002), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStreamTextureReceivedEventHandler : IUnknown { +interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventHandler : IUnknown { /// Called to provide the implementer with the event args for the /// corresponding event. HRESULT Invoke( [in] ICoreWebView2StagingTextureStream* sender, - [in] ICoreWebView2StagingTextureStreamTextureReceivedEventArgs* args); + [in] ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs* args); } -/// This is the event args interface for texture received. + +/// This is the event args interface for web texture. [uuid(a4c2fa3a-295a-11ed-a261-0242ac120002), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStreamTextureReceivedEventArgs : IUnknown { +interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs : IUnknown { + // Return ICoreWebView2StagingWebTexture object. + // The call does not create new ICoreWebView2StagingWebTexture object, instead + // returns the same object. + + // The shared buffer handle will be reused when ICoreWebView2StagingWebTexture + // object is released. So, the host should not refer handle or resource of + // the ICoreWebView2StagingWebTexture after its release. + + HRESULT GetWebTexture([out, retval] ICoreWebView2StagingWebTexture** value); +} + +/// Texture received stream buffer that the renderer writes to so that the host +/// will read on it. +[uuid(b94265ae-4c1e-11ed-bdc3-0242ac120002), object, pointer_default(unique)] +interface ICoreWebView2StagingWebTexture : IUnknown { /// Texture buffer handle. The handle's lifetime is owned by the /// ICoreWebView2StagingTextureStream object so the host must not close it. - /// The same handle value will be used for same buffer so the host can use /// handle value as a unique buffer key. + /// If the host opens its own resources by handle, then it is suggested + /// that the host removes those resources when the handle's texture size + /// is changed because the browser also removed previously allocated different + /// sized buffers when image size is changed. [propget] HRESULT Handle([out, retval] HANDLE* handle); - /// Texture buffer resource. + /// Texture buffer resource. /// The same resource value will be used for same buffer so the host can use - /// resource value as a unique buffer key. + /// resource value as a unique buffer key. + /// ICoreWebView2StagingTextureStream object has a reference of the resource + /// so ICoreWebView2StagingWebTexture holds same resource object for + /// the same buffer. [propget] HRESULT Resource([out, retval] IUnknown** resource); - /// It is timestamp that the original sent video frame's during SetBuffer. - [propget] HRESULT Timestamp([out, retval] ULONGLONG* timestamp); + /// It is timestamp of the web texture. Javascript can set this value + /// with any value, but it is suggested to use same value of its original + /// video frame that is a value of SetBuffer so that the host is able to + /// tell the receiving texture delta. + [propget] HRESULT Timestamp([out, retval] ULONGLONG* value); +} + +/// This is the callback for web texture stop. +[uuid(77eb4638-2f05-11ed-a261-0242ac120002), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamWebTextureStreamStoppedEventHandler : IUnknown { + /// Called to provide the implementer with the event args for the + /// corresponding event. + HRESULT Invoke( + [in] ICoreWebView2StagingTextureStream* sender, + [in] IUnknown* args); +}/// This is the callback for web texture. +[uuid(9ea4228c-295a-11ed-a261-0242ac120002), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventHandler : IUnknown { + /// Called to provide the implementer with the event args for the + /// corresponding event. + HRESULT Invoke( + [in] ICoreWebView2StagingTextureStream* sender, + [in] ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs* args); +} + +/// This is the event args interface for web texture. +[uuid(a4c2fa3a-295a-11ed-a261-0242ac120002), object, pointer_default(unique)] +interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs : IUnknown { + // Return ICoreWebView2StagingWebTexture object. + // The call does not create new ICoreWebView2StagingWebTexture object, instead + // returns the same object. + + // The shared buffer handle will be reused when ICoreWebView2StagingWebTexture + // object is released. So, the host should not refer handle or resource of + // the ICoreWebView2StagingWebTexture after its release. - /// The host notifies the browser that it is done with current the buffer handle, - /// so the browser can recycle the buffer again. - HRESULT FinishUsingBuffer(); + HRESULT GetWebTexture([out, retval] ICoreWebView2StagingWebTexture** value); } -/// This is the callback for texture received stop. + +/// Texture received stream buffer that the renderer writes to so that the host +/// will read on it. +[uuid(b94265ae-4c1e-11ed-bdc3-0242ac120002), object, pointer_default(unique)] +interface ICoreWebView2StagingWebTexture : IUnknown { + /// Texture buffer handle. The handle's lifetime is owned by the + /// ICoreWebView2StagingTextureStream object so the host must not close it. + /// The same handle value will be used for same buffer so the host can use + /// handle value as a unique buffer key. + /// If the host opens its own resources by handle, then it is suggested + /// that the host removes those resources when the handle's texture size + /// is changed because the browser also removed previously allocated different + /// sized buffers when image size is changed. + [propget] HRESULT Handle([out, retval] HANDLE* handle); + + /// Texture buffer resource. + /// The same resource value will be used for same buffer so the host can use + /// resource value as a unique buffer key. + /// ICoreWebView2StagingTextureStream object has a reference of the resource + /// so ICoreWebView2StagingWebTexture holds same resource object for + /// the same buffer. + [propget] HRESULT Resource([out, retval] IUnknown** resource); + + /// It is timestamp of the web texture. Javascript can set this value + /// with any value, but it is suggested to use same value of its original + /// video frame that is a value of SetBuffer so that the host is able to + /// tell the receiving texture delta. + [propget] HRESULT Timestamp([out, retval] ULONGLONG* value); +} + +/// This is the callback for web texture stop. [uuid(77eb4638-2f05-11ed-a261-0242ac120002), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStreamStopTextureReceivedEventHandler : IUnknown { +interface ICoreWebView2StagingTextureStreamWebTextureStreamStoppedEventHandler : IUnknown { /// Called to provide the implementer with the event args for the /// corresponding event. HRESULT Invoke( From bc5d2674edecfdf303bdf8d28308a44a61eee0be Mon Sep 17 00:00:00 2001 From: sunggook Date: Mon, 14 Nov 2022 15:26:10 -0800 Subject: [PATCH 07/19] renamed to AddAllowedOrigin, etc --- specs/APIReview_TextureStream.md | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index 7142787aa..69e3b6da8 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -42,11 +42,13 @@ async function getStreamFromTheHost(e) { // MediaStreamTrack. const stream = await window.chrome.webview.getTextureStream('webview2-abcd1234'); // The MediaStream object is returned and it gets video MediaStreamTrack element from it. - const video_tracks = stream.getVideoTracks(); - const videoTrack = video_tracks[0]; + const videoTracks = stream.getVideoTracks(); + const videoTrack = videoTracks[0]; window.videoTrack = videoTrack; // Show the video via Video Element. - document.getElementById(video_id).srcObject = stream; + let videoId = document.createElement('video'); + videoId.srcObject = stream; + body.appendChild(videoId); } catch (error) { console.log(error); } @@ -68,12 +70,12 @@ function stopStreaming() { // No video streaming from the host. window.videoTrack.addEventListener('mute', () => { - console.log('mute state); + console.log('mute state'); }); // Sent to the track when data becomes available again, ending the muted state. window.videoTrack.addEventListener('unmut', () => { - console.log('unmute state); + console.log('unmute state'); }); ``` @@ -100,15 +102,13 @@ const transformer = new TransformStream({ async function SendBackToHost(stream_id) { console.log("stream_id:" + stream_id); + const trackGenerator = new MediaStreamTrackGenerator('video'); + await window.chrome.webview.registerTextureStream(stream_id, trackGenerator); + const mediaStream = await window.chrome.webview.getTextureStream(stream_id); const videoStream = mediaStream.getVideoTracks()[0]; const trackProcessor = new MediaStreamTrackProcessor(videoStream); - const trackGenerator = new MediaStreamTrackGenerator('video'); - - // Test purpose, we send it back what just received. - window.chrome.webview.registerTextureStream(stream_id, trackGenerator); - trackProcessor.readable.pipeThrough(transformer).pipeTo(trackGenerator.writable) } @@ -131,7 +131,7 @@ HRESULT CreateTextureStream(ICoreWebView2Staging3* coreWebView) d3d_device.Get(), &webviewTextureStream)); // Register the Origin URL that the target renderer could stream of the registered stream id. The request from not registered origin will fail to stream. - CHECK_FAILURE(webviewTextureStream->AddRequestedFilter(L"https://edge-webscratch")); + CHECK_FAILURE(webviewTextureStream->AddAllowedOrigin(L"https://edge-webscratch")); // Listen to Start request. The host will setup system video streaming and // start sending the texture. @@ -173,8 +173,8 @@ HRESULT CreateTextureStream(ICoreWebView2Staging3* coreWebView) // Add allowed origin for registerTextureStream call. 'registerTextureStream' // call from the Javascript will fail if the requested origin is not registered - // with AddWebTextureRequestedFilter. - CHECK_FAILURE(webviewTextureStream->AddWebTextureRequestedFilter(L"https://edge-webscratch")); + // with AddWebTextureAllowedOrigin. + CHECK_FAILURE(webviewTextureStream->AddWebTextureAllowedOrigin(L"https://edge-webscratch")); // Registers listener for video streaming from Javascript. EventRegistrationToken post_token; @@ -281,7 +281,6 @@ interface ICoreWebView2Staging3 : IUnknown { [in] EventRegistrationToken token); } /// This is the interface that handles texture streaming. -/// The most of APIs have to be called on UI thread. [uuid(afca8431-633f-4528-abfe-7fc3bedd8962), object, pointer_default(unique)] interface ICoreWebView2StagingTextureStream : IUnknown { /// Get the stream ID of the object that is used when calling CreateTextureStream. @@ -294,7 +293,7 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// must be registered first in order for the request to succeed. /// The added filter will be persistent until /// ICoreWebView2StagingTextureStream is destroyed or - /// RemoveRequestedFilter is called. + /// RemoveAllowedOrigin is called. /// The renderer does not support wildcard so it will compare /// literal string input to the requesting frame origin. So, the input string /// should have a scheme like https://. @@ -303,14 +302,15 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// valid origins. /// getTextureStream() will fail unless the requesting frame's origin URL is /// added to the request filter. - HRESULT AddRequestedFilter([in] LPCWSTR origin); - /// Remove added origin, which was added by AddRequestedFilter. - HRESULT RemoveRequestedFilter([in] LPCWSTR origin); + HRESULT AddAllowedOrigin([in] LPCWSTR origin); + /// Remove added origin, which was added by AddAllowedOrigin. + HRESULT RemoveAllowedOrigin([in] LPCWSTR origin); /// Listens for stream requests from the Javascript's getTextureStream call - /// for the given stream id. It is called for the first request only, the - /// subsequent requests of same stream id will not be called. - /// It is expected that the host provides the stream within 10s after - /// being requested. The first call to Present() fulfills the stream request. + /// for this stream's id. It is called for the first request only, and will + /// not be called with subsequent requests of same stream id from any pages + /// in the middle of request handling or after it returns success. + /// The request is regarded as success only when the host provides the stream, + /// Present API call, within 10s after being requested. HRESULT add_StartRequested( [in] ICoreWebView2StagingTextureStreamStartRequestedEventHandler* eventHandler, [out] EventRegistrationToken* token); @@ -396,10 +396,10 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// Adds an allowed url origin for the given stream id for web texture /// operation. Javascript can send texture stream to the host only when /// the origin it runs are allowed by the host. - HRESULT AddWebTextureRequestedFilter([in] LPCWSTR origin); + HRESULT AddWebTextureAllowedOrigin([in] LPCWSTR origin); - /// Remove added origin, which was added by AddWebTextureRequestedFilter. - HRESULT RemoveWebTextureRequestedFilter([in] LPCWSTR origin); + /// Remove added origin, which was added by AddWebTextureAllowedOrigin. + HRESULT RemoveWebTextureAllowedOrigin([in] LPCWSTR origin); } /// Texture stream buffer that the host writes to so that the Renderer /// will render on it. From a28ec231cd8563c373130c46e03bc2e2f214a1c0 Mon Sep 17 00:00:00 2001 From: sunggook Date: Fri, 18 Nov 2022 17:33:25 -0800 Subject: [PATCH 08/19] Use TextureBuffer, etc --- specs/APIReview_TextureStream.md | 205 +++++++++++++------------------ 1 file changed, 88 insertions(+), 117 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index 69e3b6da8..d99e2110e 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -46,9 +46,9 @@ async function getStreamFromTheHost(e) { const videoTrack = videoTracks[0]; window.videoTrack = videoTrack; // Show the video via Video Element. - let videoId = document.createElement('video'); - videoId.srcObject = stream; - body.appendChild(videoId); + const videoElement = document.createElement('video'); + videoElement.srcObject = stream; + document.body.appendChild(videoElement); } catch (error) { console.log(error); } @@ -130,7 +130,7 @@ HRESULT CreateTextureStream(ICoreWebView2Staging3* coreWebView) CHECK_FAILURE(g_webviewStaging3->CreateTextureStream(L"webview2-abcd1234", d3d_device.Get(), &webviewTextureStream)); - // Register the Origin URL that the target renderer could stream of the registered stream id. The request from not registered origin will fail to stream. + // Register the Origin URI that the target renderer could stream of the registered stream id. The request from not registered origin will fail to stream. CHECK_FAILURE(webviewTextureStream->AddAllowedOrigin(L"https://edge-webscratch")); // Listen to Start request. The host will setup system video streaming and @@ -211,7 +211,7 @@ HRESULT CallWebView2API(bool createBuffer, UINT64 timestamp, ICoreWebView2StagingTexture** texture) { if (createBuffer) { - // Create shared buffer. + // Create TextureBuffer. return webviewTextureStream->CreateSharedBuffer(width, height, &texture); } @@ -220,7 +220,7 @@ HRESULT CallWebView2API(bool createBuffer, } if (sendTexture) { - // Notify the renderer for updated texture on the shared buffer. + // Notify the renderer for updated texture on the TextureBuffer. webviewTextureStream->SetBuffer(send_texture, timestamp); webviewTextureStream->Present(); } @@ -238,14 +238,14 @@ typedef enum COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND { /// APIs without being called of Start event. Or, 10 seconds passed before /// calling these APIs since the OnStart event. COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED, - /// The buffer has been removed using RemoveBuffer. + /// The TextureBuffer has been removed using RemoveBuffer. COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_NOT_FOUND, /// The texture to be presented is already in use for rendering. - /// Call GetAvailableBuffer to determine an available buffer to present. + /// Call GetAvailableBuffer to determine an available TextureBuffer to present. /// The developer can technically call SetBuffer multiple times. - /// But once they call Present, the buffer becomes "in use" until - /// they call SetBuffer and Present on a different buffer and wait a bit - /// for the original buffer to stop being used. + /// But once they call Present, the TextureBuffer becomes "in use" until + /// they call SetBuffer and Present on a different TextureBuffer and wait a bit + /// for the original TextureBuffer to stop being used. COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE, } COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND; /// This is ICoreWebView2Staging3 that returns the texture stream interface. @@ -272,7 +272,7 @@ interface ICoreWebView2Staging3 : IUnknown { /// Listens for change of graphics adapter LUID of the browser. /// The host can get the updated LUID by RenderAdapterLUID. It is expected /// that the host updates texture's d3d Device with UpdateD3DDevice, - /// removes existing buffers and creates new buffer. + /// removes existing buffers and creates new TextureBuffer. HRESULT add_RenderAdapterLUIDChanged( [in] ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler* eventHandler, [out] EventRegistrationToken* token); @@ -286,24 +286,33 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// Get the stream ID of the object that is used when calling CreateTextureStream. /// The caller must free the returned string with CoTaskMemFree. See /// [API Conventions](/microsoft-edge/webview2/concepts/win32-api-conventions#strings). - // MSOWNERS: TBD (wv2core@microsoft.com) - [propget] HRESULT StreamId([out, retval] LPWSTR* id); - /// Adds an allowed url origin for the given stream id. The stream requests - /// could be made from any frame, including iframes, but these origins + /// MSOWNERS: TBD (wv2core@microsoft.com) + [propget] HRESULT Id([out, retval] LPWSTR* id); + /// Adds an allowed URI origin. The stream requests could be made from + /// any frame, including iframes, but the origin of the page in the frames /// must be registered first in order for the request to succeed. - /// The added filter will be persistent until + /// The added origin will be persistent until /// ICoreWebView2StagingTextureStream is destroyed or /// RemoveAllowedOrigin is called. /// The renderer does not support wildcard so it will compare - /// literal string input to the requesting frame origin. So, the input string - /// should have a scheme like https://. - /// For example, https://www.valid-host.com, http://www.valid-host.com are - /// valid origins but www.valid-host.com, or *.valid-host.com. are not - /// valid origins. - /// getTextureStream() will fail unless the requesting frame's origin URL is - /// added to the request filter. + /// literal string input to the requesting frame's page's origin after + /// normalization. The page origin will be normalized so ASCII characters + /// in the scheme and hostname will be lowercased, and non-ASCII characters + /// in the hostname will be normalized to their punycode form. + /// For example `HTTPS://WWW.ã„“.COM` will be normalized to + /// `https://www.xn--kfk.com` for comparison. So, the input string + /// should have a scheme like https://. For example, + /// https://www.valid-host.com, http://www.valid-host.com are + /// valid origins but www.valid-host.com, or https://*.valid-host.com. are not + /// valid origins. If invalid origin is provided, the API will return an error + /// of E_INVALIDARG. + /// getTextureStream() will fail unless the requesting frame's origin URI is + /// added to the allowed origins. HRESULT AddAllowedOrigin([in] LPCWSTR origin); /// Remove added origin, which was added by AddAllowedOrigin. + /// The allowed or disallowed origins will take effect only when Javascript + /// request a streaming. So, once the streaming started, it does not stop + /// streaming. HRESULT RemoveAllowedOrigin([in] LPCWSTR origin); /// Listens for stream requests from the Javascript's getTextureStream call /// for this stream's id. It is called for the first request only, and will @@ -311,6 +320,9 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// in the middle of request handling or after it returns success. /// The request is regarded as success only when the host provides the stream, /// Present API call, within 10s after being requested. + + /// The Texture stream becomes 'Started' state once it starts sending a texture + /// until it calls Stop API or receives 'StopRequested' event. HRESULT add_StartRequested( [in] ICoreWebView2StagingTextureStreamStartRequestedEventHandler* eventHandler, [out] EventRegistrationToken* token); @@ -321,45 +333,59 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// It is called when user stop all streaming requests from /// the renderers (Javascript) or the host calls the Stop API. The renderer /// can stream again by calling the streaming request API. - /// The renderer cleared all registered buffers before sending + /// The renderer cleared all registered TextureBuffers before sending /// the stop request event so that the callback of the next start request /// should register the textures again. /// The event is triggered when all requests for given stream id closed /// by the Javascript, or the host's Stop API call. + /// TextureBuffer related API calls after this event will return an error + /// of HRESULT_FROM_WIN32(ERROR_INVALID_STATE). HRESULT add_StopRequested( [in] ICoreWebView2StagingTextureStreamStopRequestedEventHandler* eventHandler, [out] EventRegistrationToken* token); /// Remove listener for stop stream request. HRESULT remove_StopRequested( [in] EventRegistrationToken token); - /// Creates shared buffer that will be referenced by the host and the browser. - /// By using the shared buffer mechanism, the host does not have to + /// Creates TextureBuffer that will be referenced by the host and the browser. + /// By using the TextureBuffer mechanism, the host does not have to /// send respective texture to the renderer, instead it notifies it - /// with internal buffer id, which is the identity of the shared buffer. - /// The shared buffer is 2D texture, IDXGIResource, format and will be + /// with internal TextureBuffer id, which is the identity of the TextureBuffer. + /// The TextureBuffer is 2D texture, IDXGIResource, format and will be /// exposed through shared HANDLE or IUnknown type through ICoreWebView2StagingTexture. /// Whenever the host has new texture to write, it should ask /// reusable ICoreWebView2StagingTexture from the GetAvailableBuffer, /// which returns ICoreWebView2StagingTexture. /// If the GetAvailableBuffer returns an error, then the host calls the - /// CreateBuffer to allocate new shared buffer. + /// CreateBuffer to allocate new TextureBuffer. /// The API also registers created shared handle to the browser once it /// created the resource. + + /// Unit for width and height is texture unit (in texels). + /// 'https://learn.microsoft.com/en-us/windows/win32/api/d3d11ns-d3d11-d3d11_texture2d_desc' HRESULT CreateBuffer( [in] UINT32 width, [in] UINT32 height, [out, retval] ICoreWebView2StagingTexture** buffer); - /// GetAvailableBuffer can be called on any thread like SetBuffer. + /// Returns reuseable TextureBuffer for video frame rendering. + /// Once the renderer finishes rendering of TextureBuffer's video frame, which + /// was requested by Present, the renderer informs the host so that it can + /// be reused. The host has to create new TextureBuffer with CreateBuffer + /// if the API return an error HRESULT_FROM_WIN32(ERROR_NO_MORE_ITEMS). HRESULT GetAvailableBuffer([out, retval] ICoreWebView2StagingTexture** buffer); - /// Remove texture buffer when the host removes the backed 2D texture. + /// Remove TextureBuffer when the host removes the backed 2D texture. /// The host can save the existing resources by deleting 2D textures /// when it changes the frame sizes. HRESULT RemoveBuffer([in] ICoreWebView2StagingTexture* buffer); - /// Indicates that the buffer is ready to present. - /// The buffer must be retrieved from the GetAvailableBuffer. + /// Indicates that the TextureBuffer is ready to present. + /// The TextureBuffer must be retrieved from the GetAvailableBuffer. /// The host writes new texture to the local shared 2D texture of - /// the buffer id, which is created via CreateBuffer. + /// the TextureBuffer id, which is created via CreateBuffer. /// SetBuffer API can be called in any thread. + + /// `timestamp` is video capture time with unit of 100-nanosecond units. + /// The value does not have to be exact captured time, but it should be + /// increasing order, the next Present's TextureBuffer should have later + /// time. HRESULT SetBuffer([in] ICoreWebView2StagingTexture* buffer, [in] ULONGLONG timestamp); /// Render texture that is current set ICoreWebView2StagingTexture. @@ -380,28 +406,33 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// event. HRESULT UpdateD3DDevice([in] IUnknown* d3dDevice); /// Event handler for receiving texture by Javascript. - /// There is no Start event for web texture. Whenever texture are sent - /// from the Javascript, the event is triggered. + /// `window.chrome.webview.registerTextureStream` call by Javascript will + /// request sending video frame to the host where it will filter requested + /// page's origin against allowed origins. If allowed, the Javascript will + /// send a video frame (web texture), through MediaStreamTrack insertable APIs, + /// MediaStreamTrackGenerator. + /// https://www.w3.org/TR/mediacapture-transform/. + /// WebTextureReceived event will be called only when it receives + /// a web texture. There is no start event for receiving web texture. HRESULT add_WebTextureReceived( [in] ICoreWebView2StagingTextureStreamWebTextureReceivedEventHandler* eventHandler, [out] EventRegistrationToken* token); + /// Remove listener for receiving texture stream. HRESULT remove_WebTextureReceived([in] EventRegistrationToken token); - /// Event handler for stopping of the receiving texture stream. HRESULT add_WebTextureStreamStopped( [in] ICoreWebView2StagingTextureStreamWebTextureStreamStoppedEventHandler* eventHandler, [out] EventRegistrationToken* token); + /// Remove listener for receiving texture stream stopped. HRESULT remove_WebTextureStreamStopped([in] EventRegistrationToken token); - - /// Adds an allowed url origin for the given stream id for web texture + /// Adds an allowed URI origin for the given stream id for web texture /// operation. Javascript can send texture stream to the host only when /// the origin it runs are allowed by the host. HRESULT AddWebTextureAllowedOrigin([in] LPCWSTR origin); - /// Remove added origin, which was added by AddWebTextureAllowedOrigin. HRESULT RemoveWebTextureAllowedOrigin([in] LPCWSTR origin); } -/// Texture stream buffer that the host writes to so that the Renderer +/// TextureBuffer that the host writes to so that the Renderer /// will render on it. [uuid(0836f09c-34bd-47bf-914a-99fb56ae2d07), object, pointer_default(unique)] interface ICoreWebView2StagingTexture : IUnknown { @@ -447,7 +478,7 @@ interface ICoreWebView2StagingTextureStreamTextureErrorEventArgs : IUnknown { /// Error kind. [propget] HRESULT Kind([out, retval] COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND* value); - // Texture buffer that the error is associated with. + /// TextureBuffer that the error is associated with. HRESULT GetBuffer([out, retval] ICoreWebView2StagingTexture** buffer); } [uuid(431721e0-0f18-4d7b-bd4d-e5b1522bb110), object, pointer_default(unique)] @@ -479,46 +510,17 @@ interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventHandler : IUnk /// This is the event args interface for web texture. [uuid(a4c2fa3a-295a-11ed-a261-0242ac120002), object, pointer_default(unique)] interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs : IUnknown { - // Return ICoreWebView2StagingWebTexture object. - // The call does not create new ICoreWebView2StagingWebTexture object, instead - // returns the same object. + /// Return ICoreWebView2StagingWebTexture object. + /// The call does not create new ICoreWebView2StagingWebTexture object, instead + /// returns the same object. - // The shared buffer handle will be reused when ICoreWebView2StagingWebTexture - // object is released. So, the host should not refer handle or resource of - // the ICoreWebView2StagingWebTexture after its release. + /// The TextureBuffer handle will be reused when ICoreWebView2StagingWebTexture + /// object is released. So, the host should not refer handle or resource of + /// the ICoreWebView2StagingWebTexture after its release. HRESULT GetWebTexture([out, retval] ICoreWebView2StagingWebTexture** value); } -/// Texture received stream buffer that the renderer writes to so that the host -/// will read on it. -[uuid(b94265ae-4c1e-11ed-bdc3-0242ac120002), object, pointer_default(unique)] -interface ICoreWebView2StagingWebTexture : IUnknown { - /// Texture buffer handle. The handle's lifetime is owned by the - /// ICoreWebView2StagingTextureStream object so the host must not close it. - /// The same handle value will be used for same buffer so the host can use - /// handle value as a unique buffer key. - /// If the host opens its own resources by handle, then it is suggested - /// that the host removes those resources when the handle's texture size - /// is changed because the browser also removed previously allocated different - /// sized buffers when image size is changed. - [propget] HRESULT Handle([out, retval] HANDLE* handle); - - /// Texture buffer resource. - /// The same resource value will be used for same buffer so the host can use - /// resource value as a unique buffer key. - /// ICoreWebView2StagingTextureStream object has a reference of the resource - /// so ICoreWebView2StagingWebTexture holds same resource object for - /// the same buffer. - [propget] HRESULT Resource([out, retval] IUnknown** resource); - - /// It is timestamp of the web texture. Javascript can set this value - /// with any value, but it is suggested to use same value of its original - /// video frame that is a value of SetBuffer so that the host is able to - /// tell the receiving texture delta. - [propget] HRESULT Timestamp([out, retval] ULONGLONG* value); -} - /// This is the callback for web texture stop. [uuid(77eb4638-2f05-11ed-a261-0242ac120002), object, pointer_default(unique)] interface ICoreWebView2StagingTextureStreamWebTextureStreamStoppedEventHandler : IUnknown { @@ -527,50 +529,28 @@ interface ICoreWebView2StagingTextureStreamWebTextureStreamStoppedEventHandler : HRESULT Invoke( [in] ICoreWebView2StagingTextureStream* sender, [in] IUnknown* args); -}/// This is the callback for web texture. -[uuid(9ea4228c-295a-11ed-a261-0242ac120002), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventHandler : IUnknown { - /// Called to provide the implementer with the event args for the - /// corresponding event. - HRESULT Invoke( - [in] ICoreWebView2StagingTextureStream* sender, - [in] ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs* args); } -/// This is the event args interface for web texture. -[uuid(a4c2fa3a-295a-11ed-a261-0242ac120002), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs : IUnknown { - // Return ICoreWebView2StagingWebTexture object. - // The call does not create new ICoreWebView2StagingWebTexture object, instead - // returns the same object. - - // The shared buffer handle will be reused when ICoreWebView2StagingWebTexture - // object is released. So, the host should not refer handle or resource of - // the ICoreWebView2StagingWebTexture after its release. - - HRESULT GetWebTexture([out, retval] ICoreWebView2StagingWebTexture** value); -} - -/// Texture received stream buffer that the renderer writes to so that the host +/// Received TextureBuffer that the renderer writes to so that the host /// will read on it. [uuid(b94265ae-4c1e-11ed-bdc3-0242ac120002), object, pointer_default(unique)] interface ICoreWebView2StagingWebTexture : IUnknown { - /// Texture buffer handle. The handle's lifetime is owned by the + /// TextureBuffer handle. The handle's lifetime is owned by the /// ICoreWebView2StagingTextureStream object so the host must not close it. - /// The same handle value will be used for same buffer so the host can use - /// handle value as a unique buffer key. + /// The same handle value will be used for same TextureBuffer so the host can use + /// handle value as a unique TextureBuffer key. /// If the host opens its own resources by handle, then it is suggested /// that the host removes those resources when the handle's texture size /// is changed because the browser also removed previously allocated different /// sized buffers when image size is changed. [propget] HRESULT Handle([out, retval] HANDLE* handle); - /// Texture buffer resource. - /// The same resource value will be used for same buffer so the host can use - /// resource value as a unique buffer key. + /// TextureBuffer resource. + /// The same resource value will be used for same TextureBuffer so the host can use + /// resource value as a unique TextureBuffer key. /// ICoreWebView2StagingTextureStream object has a reference of the resource /// so ICoreWebView2StagingWebTexture holds same resource object for - /// the same buffer. + /// the same TextureBuffer. [propget] HRESULT Resource([out, retval] IUnknown** resource); /// It is timestamp of the web texture. Javascript can set this value @@ -580,13 +560,4 @@ interface ICoreWebView2StagingWebTexture : IUnknown { [propget] HRESULT Timestamp([out, retval] ULONGLONG* value); } -/// This is the callback for web texture stop. -[uuid(77eb4638-2f05-11ed-a261-0242ac120002), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStreamWebTextureStreamStoppedEventHandler : IUnknown { - /// Called to provide the implementer with the event args for the - /// corresponding event. - HRESULT Invoke( - [in] ICoreWebView2StagingTextureStream* sender, - [in] IUnknown* args); -} ``` From 039202188441649a0546c5174b2d65fcd56ec1ff Mon Sep 17 00:00:00 2001 From: sunggook Date: Mon, 21 Nov 2022 12:50:53 -0800 Subject: [PATCH 09/19] microseconds use --- specs/APIReview_TextureStream.md | 30 ++++++++++++------------------ 1 file changed, 12 insertions(+), 18 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index d99e2110e..3230b7260 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -2,22 +2,14 @@ TextureStream =============================================================================================== # Background -Many native apps use a native engine for real-time communication scenarios, which include video -capture, networking and video rendering. However, often, these apps still use WebView or -Electron for UI rendering. The separation between real-time video rendering and UI rendering -prevents apps from rendering real-time video inside the web contents. This forces apps to -render the real-time video on top of the web contents, which is limiting. Rendering video on -top constrains the user experience and it may also cause performance problems. -We can ask the native apps to use web renderer for video handling because web standard already -provides these features through WebRTC APIs. The end developers, however, prefer to use -their existing engine such as capturing and composition, meanwhile using WebRTC API for rendering. +Many native apps use a native engine for real-time communication scenarios, which include video capture, networking and video rendering. However, often, these apps still use WebView or Electron for UI rendering. The separation between real-time video rendering and UI rendering prevents apps from rendering real-time video inside the web contents. This forces apps to render the real-time video on top of the web contents, which is limiting. Rendering video on top constrains the user experience and it may also cause performance problems. We can ask the native apps to use web renderer for video handling because web standard already provides these features through WebRTC APIs. The end developers, however, prefer to use their existing engine such as capturing and composition, meanwhile using WebRTC API for rendering. # Description -The proposed APIs will allow the end developers to stream the captured or composed video frame to -the WebView renderer where Javascript is able to insert the frame to the page through W3C standard -API of Video, MediaStream element for displaying it. -The API will use the shared GPU texture buffer so that it can minimize the overall cost with -regards to frame copy. +The proposed APIs will allow the end developers to stream the captured or composed video frame to the WebView renderer where Javascript is able to insert the frame to the page through W3C standard API of Video, MediaStream element for displaying it. +The API will use the shared GPU texture buffer so that it can minimize the overall cost with regards to frame copy. + +The proposed APIs have dependency on the DirectX and its internal attributes such as +adapter LUID so it supports Win32/C++ and C++/WinRT APIs at this time. # Examples @@ -184,7 +176,7 @@ HRESULT CreateTextureStream(ICoreWebView2Staging3* coreWebView) ComPtr texture_received; args->GetWebTexture(&texture_received); - ULONGLONG timestamp; + UINT64 timestamp; texture_received->get_Timestamp(×tamp); HANDLE handle; texture_received->get_Handle(&handle); @@ -382,12 +374,12 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// the TextureBuffer id, which is created via CreateBuffer. /// SetBuffer API can be called in any thread. - /// `timestamp` is video capture time with unit of 100-nanosecond units. + /// `timestamp` is video capture time with unit of microseconds units. /// The value does not have to be exact captured time, but it should be /// increasing order, the next Present's TextureBuffer should have later /// time. HRESULT SetBuffer([in] ICoreWebView2StagingTexture* buffer, - [in] ULONGLONG timestamp); + [in] UINT64 timestamp); /// Render texture that is current set ICoreWebView2StagingTexture. HRESULT Present(); /// Stop streaming of the current stream id. @@ -420,6 +412,8 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// Remove listener for receiving texture stream. HRESULT remove_WebTextureReceived([in] EventRegistrationToken token); /// Event handler for stopping of the receiving texture stream. + /// It is expected that the host releases any holding handle/resource from + /// the WebTexture before an event handler returns. HRESULT add_WebTextureStreamStopped( [in] ICoreWebView2StagingTextureStreamWebTextureStreamStoppedEventHandler* eventHandler, [out] EventRegistrationToken* token); @@ -557,7 +551,7 @@ interface ICoreWebView2StagingWebTexture : IUnknown { /// with any value, but it is suggested to use same value of its original /// video frame that is a value of SetBuffer so that the host is able to /// tell the receiving texture delta. - [propget] HRESULT Timestamp([out, retval] ULONGLONG* value); + [propget] HRESULT Timestamp([out, retval] UINT64* value); } ``` From cf1d3c2805b662b6a146d5c8c0801e6a45462057 Mon Sep 17 00:00:00 2001 From: sunggook Date: Mon, 21 Nov 2022 23:17:09 -0800 Subject: [PATCH 10/19] remove AddWebTextureAllowedOrigin, etc --- specs/APIReview_TextureStream.md | 63 +++++++++++++++++--------------- 1 file changed, 33 insertions(+), 30 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index 3230b7260..0192949eb 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -9,7 +9,7 @@ The proposed APIs will allow the end developers to stream the captured or compos The API will use the shared GPU texture buffer so that it can minimize the overall cost with regards to frame copy. The proposed APIs have dependency on the DirectX and its internal attributes such as -adapter LUID so it supports Win32/C++ and C++/WinRT APIs at this time. +adapter LUID so it supports only Win32/C++ APIs at this time. # Examples @@ -122,8 +122,11 @@ HRESULT CreateTextureStream(ICoreWebView2Staging3* coreWebView) CHECK_FAILURE(g_webviewStaging3->CreateTextureStream(L"webview2-abcd1234", d3d_device.Get(), &webviewTextureStream)); - // Register the Origin URI that the target renderer could stream of the registered stream id. The request from not registered origin will fail to stream. - CHECK_FAILURE(webviewTextureStream->AddAllowedOrigin(L"https://edge-webscratch")); + // Register the Origin URI that the target renderer could stream of the registered + // stream id. The request from not registered origin will fail to stream. + + // `true` boolean value will add allowed origin for registerTextureStream as well. + CHECK_FAILURE(webviewTextureStream->AddAllowedOrigin(L"https://edge-webscratch"), true); // Listen to Start request. The host will setup system video streaming and // start sending the texture. @@ -268,7 +271,7 @@ interface ICoreWebView2Staging3 : IUnknown { HRESULT add_RenderAdapterLUIDChanged( [in] ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler* eventHandler, [out] EventRegistrationToken* token); - /// Remove listener for start stream request. + /// Remove listener for RenderAdapterLUIDChange event. HRESULT remove_RenderAdapterLUIDChanged( [in] EventRegistrationToken token); } @@ -300,7 +303,9 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// of E_INVALIDARG. /// getTextureStream() will fail unless the requesting frame's origin URI is /// added to the allowed origins. - HRESULT AddAllowedOrigin([in] LPCWSTR origin); + /// If `value` is TRUE, then the origin will also be added to WebTexture's + /// allowed origin. + HRESULT AddAllowedOrigin([in] LPCWSTR origin, [in] BOOL value); /// Remove added origin, which was added by AddAllowedOrigin. /// The allowed or disallowed origins will take effect only when Javascript /// request a streaming. So, once the streaming started, it does not stop @@ -318,7 +323,7 @@ interface ICoreWebView2StagingTextureStream : IUnknown { HRESULT add_StartRequested( [in] ICoreWebView2StagingTextureStreamStartRequestedEventHandler* eventHandler, [out] EventRegistrationToken* token); - /// Remove listener for start stream request. + /// Remove listener for StartRequest event. HRESULT remove_StartRequested( [in] EventRegistrationToken token); /// Listen to stop stream request once the stream started. @@ -335,7 +340,7 @@ interface ICoreWebView2StagingTextureStream : IUnknown { HRESULT add_StopRequested( [in] ICoreWebView2StagingTextureStreamStopRequestedEventHandler* eventHandler, [out] EventRegistrationToken* token); - /// Remove listener for stop stream request. + /// Remove listener for StopRequested event. HRESULT remove_StopRequested( [in] EventRegistrationToken token); /// Creates TextureBuffer that will be referenced by the host and the browser. @@ -355,8 +360,8 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// Unit for width and height is texture unit (in texels). /// 'https://learn.microsoft.com/en-us/windows/win32/api/d3d11ns-d3d11-d3d11_texture2d_desc' HRESULT CreateBuffer( - [in] UINT32 width, - [in] UINT32 height, + [in] UINT32 widthInTexels, + [in] UINT32 heightInTexels, [out, retval] ICoreWebView2StagingTexture** buffer); /// Returns reuseable TextureBuffer for video frame rendering. /// Once the renderer finishes rendering of TextureBuffer's video frame, which @@ -364,23 +369,27 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// be reused. The host has to create new TextureBuffer with CreateBuffer /// if the API return an error HRESULT_FROM_WIN32(ERROR_NO_MORE_ITEMS). HRESULT GetAvailableBuffer([out, retval] ICoreWebView2StagingTexture** buffer); - /// Remove TextureBuffer when the host removes the backed 2D texture. - /// The host can save the existing resources by deleting 2D textures - /// when it changes the frame sizes. + /// Removes TextureBuffer when the host removes the backed 2D texture. + /// The host can save the resources by deleting 2D textures + /// when it changes the frame sizes. The API will send a message + /// to the browser where it will remove TextureBuffer. HRESULT RemoveBuffer([in] ICoreWebView2StagingTexture* buffer); - /// Indicates that the TextureBuffer is ready to present. - /// The TextureBuffer must be retrieved from the GetAvailableBuffer. - /// The host writes new texture to the local shared 2D texture of - /// the TextureBuffer id, which is created via CreateBuffer. + /// Sets rendering image/resource through ICoreWebView2StagingTexture. + /// The TextureBuffer must be retrieved from the GetAvailableBuffer or + /// created via CreateBuffer. + /// It is expected that hhe host writes new image/resource to the local + /// shared 2D texture of the TextureBuffer (handle/resource). /// SetBuffer API can be called in any thread. - /// `timestamp` is video capture time with unit of microseconds units. + /// `timestampInMs` is video capture time with microseconds units. /// The value does not have to be exact captured time, but it should be - /// increasing order, the next Present's TextureBuffer should have later - /// time. + /// increasing order because renderer (composition) ignores incoming + /// video frame (texture) if its timestampInMs is equal or prior to + /// the current compositing video frame. HRESULT SetBuffer([in] ICoreWebView2StagingTexture* buffer, - [in] UINT64 timestamp); - /// Render texture that is current set ICoreWebView2StagingTexture. + [in] UINT64 timestampInMs); + /// Render ICoreWebView2StagingTexture resource, which is the most recent + /// call to SetBuffer. HRESULT Present(); /// Stop streaming of the current stream id. /// API calls of Present, CreateBuffer will fail after this @@ -392,7 +401,7 @@ interface ICoreWebView2StagingTextureStream : IUnknown { HRESULT add_TextureError( [in] ICoreWebView2StagingTextureStreamTextureErrorEventHandler* eventHandler, [out] EventRegistrationToken* token); - /// Remove listener for texture error event. + /// Remove listener for TextureError event. HRESULT remove_TextureError([in] EventRegistrationToken token); /// Updates d3d Device when it is updated by RenderAdapterLUIDChanged /// event. @@ -409,7 +418,7 @@ interface ICoreWebView2StagingTextureStream : IUnknown { HRESULT add_WebTextureReceived( [in] ICoreWebView2StagingTextureStreamWebTextureReceivedEventHandler* eventHandler, [out] EventRegistrationToken* token); - /// Remove listener for receiving texture stream. + /// Remove listener for WebTextureReceived event. HRESULT remove_WebTextureReceived([in] EventRegistrationToken token); /// Event handler for stopping of the receiving texture stream. /// It is expected that the host releases any holding handle/resource from @@ -417,14 +426,8 @@ interface ICoreWebView2StagingTextureStream : IUnknown { HRESULT add_WebTextureStreamStopped( [in] ICoreWebView2StagingTextureStreamWebTextureStreamStoppedEventHandler* eventHandler, [out] EventRegistrationToken* token); - /// Remove listener for receiving texture stream stopped. + /// Remove listener for WebTextureStreamStopped event. HRESULT remove_WebTextureStreamStopped([in] EventRegistrationToken token); - /// Adds an allowed URI origin for the given stream id for web texture - /// operation. Javascript can send texture stream to the host only when - /// the origin it runs are allowed by the host. - HRESULT AddWebTextureAllowedOrigin([in] LPCWSTR origin); - /// Remove added origin, which was added by AddWebTextureAllowedOrigin. - HRESULT RemoveWebTextureAllowedOrigin([in] LPCWSTR origin); } /// TextureBuffer that the host writes to so that the Renderer /// will render on it. From b2b69326a220fe1413c34490fffc7c8111f7df49 Mon Sep 17 00:00:00 2001 From: sunggook Date: Tue, 10 Jan 2023 16:45:13 -0800 Subject: [PATCH 11/19] CreateTextureStream on Environment, etc --- specs/APIReview_TextureStream.md | 141 ++++++++++++++++++------------- 1 file changed, 80 insertions(+), 61 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index 0192949eb..757d5959c 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -67,7 +67,7 @@ window.videoTrack.addEventListener('mute', () => { // Sent to the track when data becomes available again, ending the muted state. window.videoTrack.addEventListener('unmut', () => { - console.log('unmute state'); + console.log('unmut state'); }); ``` @@ -83,21 +83,30 @@ document.querySelector('#sendBack').addEventListener('click', e => getStreamFromTheHost(e)); const transformer = new TransformStream({ async transform(videoFrame, controller) { - // Delay frame 100ms. - await new Promise(resolve => setTimeout(resolve, 1000)); + function appSpecificCreateTransformedVideoFrame(originalVideoFrame) { + // At this point the app would create a new video frame based on the original + // video frame. For this sample we just delay for 1000ms and return the + // original. + await new Promise(resolve => setTimeout(resolve, 1000)); + return originalVideoFrame; + } + + // Delay frame 1000ms. + let transformedVideoFrame = + await appSpecificCreateTransformedVideoFrame(originalVideoFrame); // We can create new video frame and edit them, and pass them back here // if needed. - controller.enqueue(videoFrame); + controller.enqueue(transformedVideoFrame); }, }); -async function SendBackToHost(stream_id) { - console.log("stream_id:" + stream_id); +async function SendBackToHost(streamId) { + console.log("streamId:" + streamId); const trackGenerator = new MediaStreamTrackGenerator('video'); - await window.chrome.webview.registerTextureStream(stream_id, trackGenerator); + await window.chrome.webview.registerTextureStream(streamId, trackGenerator); - const mediaStream = await window.chrome.webview.getTextureStream(stream_id); + const mediaStream = await window.chrome.webview.getTextureStream(streamId); const videoStream = mediaStream.getVideoTracks()[0]; const trackProcessor = new MediaStreamTrackProcessor(videoStream); @@ -108,18 +117,18 @@ async function SendBackToHost(stream_id) { ## Win32 C++ ```cpp -HRESULT CreateTextureStream(ICoreWebView2Staging3* coreWebView) +HRESULT CreateTextureStream(ICoreWebView2StagingEnvironment* environment) UINT32 luid; // Get the LUID (Graphic adapter) that the WebView renderer uses. - CHECK_FAILURE(coreWebView->get_RenderAdapterLUID(&luid)); + CHECK_FAILURE(environment->get_RenderAdapterLUID(&luid)); // Create D3D device based on the WebView's LUID. ComPtr d3d_device = MyCreateD3DDevice(luid); // Register unique texture stream that the host can provide. ComPtr webviewTextureStream; - CHECK_FAILURE(g_webviewStaging3->CreateTextureStream(L"webview2-abcd1234", + CHECK_FAILURE(environment->CreateTextureStream(L"webview2-abcd1234", d3d_device.Get(), &webviewTextureStream)); // Register the Origin URI that the target renderer could stream of the registered @@ -141,22 +150,22 @@ HRESULT CreateTextureStream(ICoreWebView2Staging3* coreWebView) // Listen to Stop request. The host end system provided video stream and // clean any operation resources. EventRegistrationToken stop_token; - CHECK_FAILURE(webviewTextureStream->add_StopRequested(Callback( + CHECK_FAILURE(webviewTextureStream->add_Stopped(Callback( [hWnd](ICoreWebView2StagingTextureStream* webview, IUnknown* eventArgs) -> HRESULT { StopMediaFoundationCapture(); return S_OK; }).Get(), &stop_token)); EventRegistrationToken texture_token; - CHECK_FAILURE(webviewTextureStream->add_TextureError(Callback( - [hWnd](ICoreWebView2StagingTextureStream* sender, ICoreWebView2StagingTextureStreamTextureErrorEventArgs* args) { + CHECK_FAILURE(webviewTextureStream->add_ErrorReceived(Callback( + [hWnd](ICoreWebView2StagingTextureStream* sender, ICoreWebView2StagingTextureStreamErrorReceivedEventArgs* args) { COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND kind; HRESULT hr = args->get_Kind(&kind); assert(SUCCEEDED(hr)); switch (kind) { case COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED: - case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_NOT_FOUND: + case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_ERROR: case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE: // assert(false); break; @@ -197,28 +206,31 @@ HRESULT CreateTextureStream(ICoreWebView2Staging3* coreWebView) }).Get(), &stopped_token)); } // CreateTextureStream -HRESULT CallWebView2API(bool createBuffer, - UINT32 width, - UINT32 height, - bool requestAvailableBufer, - bool sendTexture, - ICoreWebView2StagingTexture* send_texture, - UINT64 timestamp, - ICoreWebView2StagingTexture** texture) { - if (createBuffer) { +HRESULT SendTextureToBrowserAfterReceivingFrameFromTheSystem( + ID3D11DeviceContext* deviceContext, + ID3D11Texture2D* inputTexture, + UINT64 timestamp) { + + ComPtr textureBuffer; + HRESULT hr = webviewTextureStream->GetAvailableBuffer(&textureBuffer); + if (FAILED(hr)) { // Create TextureBuffer. - return webviewTextureStream->CreateSharedBuffer(width, height, &texture); - } + hr = webviewTextureStream->CreateBuffer(width, height, &texture); + if (FAILED(hr)) + return hr; - if (requestAvailableBufer) { - return webviewTextureStream->GetAvailableBuffer(&texture); + hr = webviewTextureStream->GetAvailableBuffer(&textureBuffer); + assert(SUCCEEDED(hr)); } - if (sendTexture) { - // Notify the renderer for updated texture on the TextureBuffer. - webviewTextureStream->SetBuffer(send_texture, timestamp); - webviewTextureStream->Present(); - } + ComPtr dxgiResource; + CHECK_FAILURE(textureBuffer->get_Resource(&dxgiResource)); + ComPtr sharedBuffer; + CHECK_FAILURE(dxgiResource.As(&sharedBuffer)); + CHECK_FAILURE(deviceContext->CopyResource(sharedBuffer.Get(), inputTexture.Get())); + + // Notify the renderer for updated texture on the TextureBuffer. + CHECK_FAILURE(webviewTextureStream->PresentBuffer(textureBuffer.Get(), timestamp)); } ``` @@ -226,15 +238,15 @@ HRESULT CallWebView2API(bool createBuffer, ``` [v1_enum] typedef enum COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND { - /// The host can't create a TextureStream instance more than once - /// for a specific stream id. - COREWEBVIEW2_TEXTURE_STREAM_ERROR_STREAM_ID_ALREADY_REGISTERED, - /// Occurs when the host calls CreateBuffer or Present - /// APIs without being called of Start event. Or, 10 seconds passed before - /// calling these APIs since the OnStart event. + /// CreateBuffer/Present and so on should return failed HRESULT if + /// the texture stream is in the stopped state rather than using the + /// error event. But there can be edge cases where the browser process + /// knows the texture stream is in the stopped state and the host app + /// process texture stream doesn't yet know that. Like the 10 second issue + /// or if the script side has stopped the stream. COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED, - /// The TextureBuffer has been removed using RemoveBuffer. - COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_NOT_FOUND, + /// The TextureBuffer already has been removed using CloseBuffer. + COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_ERROR, /// The texture to be presented is already in use for rendering. /// Call GetAvailableBuffer to determine an available TextureBuffer to present. /// The developer can technically call SetBuffer multiple times. @@ -243,9 +255,10 @@ typedef enum COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND { /// for the original TextureBuffer to stop being used. COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE, } COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND; -/// This is ICoreWebView2Staging3 that returns the texture stream interface. + +/// This is ICoreWebView2StagingEnvironment that returns the texture stream interface. [uuid(96c27a45-f142-4873-80ad-9d0cd899b2b9), object, pointer_default(unique)] -interface ICoreWebView2Staging3 : IUnknown { +interface ICoreWebView2StagingEnvironment : IUnknown { /// Registers the stream id that the host can handle, providing a /// texture stream when requested from the WebView2's JavaScript code. /// The host can register multiple unique stream instances, each with @@ -266,7 +279,7 @@ interface ICoreWebView2Staging3 : IUnknown { [propget] HRESULT RenderAdapterLUID([out, retval] LUID* luid); /// Listens for change of graphics adapter LUID of the browser. /// The host can get the updated LUID by RenderAdapterLUID. It is expected - /// that the host updates texture's d3d Device with UpdateD3DDevice, + /// that the host updates texture's d3d Device with SetD3DDevice, /// removes existing buffers and creates new TextureBuffer. HRESULT add_RenderAdapterLUIDChanged( [in] ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler* eventHandler, @@ -319,7 +332,7 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// Present API call, within 10s after being requested. /// The Texture stream becomes 'Started' state once it starts sending a texture - /// until it calls Stop API or receives 'StopRequested' event. + /// until it calls Stop API or receives 'Stopped' event. HRESULT add_StartRequested( [in] ICoreWebView2StagingTextureStreamStartRequestedEventHandler* eventHandler, [out] EventRegistrationToken* token); @@ -337,11 +350,11 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// by the Javascript, or the host's Stop API call. /// TextureBuffer related API calls after this event will return an error /// of HRESULT_FROM_WIN32(ERROR_INVALID_STATE). - HRESULT add_StopRequested( - [in] ICoreWebView2StagingTextureStreamStopRequestedEventHandler* eventHandler, + HRESULT add_Stopped( + [in] ICoreWebView2StagingTextureStreamStoppedEventHandler* eventHandler, [out] EventRegistrationToken* token); - /// Remove listener for StopRequested event. - HRESULT remove_StopRequested( + /// Remove listener for Stopped event. + HRESULT remove_Stopped( [in] EventRegistrationToken token); /// Creates TextureBuffer that will be referenced by the host and the browser. /// By using the TextureBuffer mechanism, the host does not have to @@ -373,7 +386,7 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// The host can save the resources by deleting 2D textures /// when it changes the frame sizes. The API will send a message /// to the browser where it will remove TextureBuffer. - HRESULT RemoveBuffer([in] ICoreWebView2StagingTexture* buffer); + HRESULT CloseBuffer([in] ICoreWebView2StagingTexture* buffer); /// Sets rendering image/resource through ICoreWebView2StagingTexture. /// The TextureBuffer must be retrieved from the GetAvailableBuffer or /// created via CreateBuffer. @@ -392,20 +405,26 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// call to SetBuffer. HRESULT Present(); /// Stop streaming of the current stream id. + /// The Javascript will receive `MediaStreamTrack::ended` event when the API + /// is called. + /// The Javascript can restart the stream with getTextureStream. + /// The API call will release any internal resources on both of WebView2 host + /// and the browser processes. /// API calls of Present, CreateBuffer will fail after this /// with an error of COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED. - /// The Javascript can restart the stream with getTextureStream. + /// The Stop API will be called implicitly when ICoreWebView2StagingTextureStream + /// object is destroyed. HRESULT Stop(); /// Event handler for those that occur at the Renderer side, the example /// are CreateBuffer, Present, or Stop. - HRESULT add_TextureError( - [in] ICoreWebView2StagingTextureStreamTextureErrorEventHandler* eventHandler, + HRESULT add_ErrorReceived( + [in] ICoreWebView2StagingTextureStreamErrorReceivedEventHandler* eventHandler, [out] EventRegistrationToken* token); - /// Remove listener for TextureError event. - HRESULT remove_TextureError([in] EventRegistrationToken token); + /// Remove listener for ErrorReceived event. + HRESULT remove_ErrorReceived([in] EventRegistrationToken token); /// Updates d3d Device when it is updated by RenderAdapterLUIDChanged /// event. - HRESULT UpdateD3DDevice([in] IUnknown* d3dDevice); + HRESULT SetD3DDevice([in] IUnknown* d3dDevice); /// Event handler for receiving texture by Javascript. /// `window.chrome.webview.registerTextureStream` call by Javascript will /// request sending video frame to the host where it will filter requested @@ -452,7 +471,7 @@ interface ICoreWebView2StagingTextureStreamStartRequestedEventHandler : IUnknown } /// This is the callback for stop request of texture stream. [uuid(4111102a-d19f-4438-af46-efc563b2b9cf), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStreamStopRequestedEventHandler : IUnknown { +interface ICoreWebView2StagingTextureStreamStoppedEventHandler : IUnknown { /// Called to provide the implementer with the event args for the /// corresponding event. There are no event args and the args /// parameter will be null. @@ -462,16 +481,16 @@ interface ICoreWebView2StagingTextureStreamStopRequestedEventHandler : IUnknown } /// This is the callback for texture stream rendering error. [uuid(52cb8898-c711-401a-8f97-3646831ba72d), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStreamTextureErrorEventHandler : IUnknown { +interface ICoreWebView2StagingTextureStreamErrorReceivedEventHandler : IUnknown { /// Called to provide the implementer with the event args for the /// corresponding event. HRESULT Invoke( [in] ICoreWebView2StagingTextureStream* sender, - [in] ICoreWebView2StagingTextureStreamTextureErrorEventArgs* args); + [in] ICoreWebView2StagingTextureStreamErrorReceivedEventArgs* args); } /// This is the event args interface for texture stream error callback. [uuid(0e1730c1-03df-4ad2-b847-be4d63adf700), object, pointer_default(unique)] -interface ICoreWebView2StagingTextureStreamTextureErrorEventArgs : IUnknown { +interface ICoreWebView2StagingTextureStreamErrorReceivedEventArgs : IUnknown { /// Error kind. [propget] HRESULT Kind([out, retval] COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND* value); @@ -491,7 +510,7 @@ interface ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler : IUnknown { /// Called to provide the implementer with the event args for the /// corresponding event. HRESULT Invoke( - [in] ICoreWebView2Staging3 * sender, + [in] ICoreWebView2StagingEnvironment * sender, [in] IUnknown* args); } /// This is the callback for web texture. From c86906bee91a6daf01dbb8a086b365e6e72d805f Mon Sep 17 00:00:00 2001 From: sunggook Date: Tue, 10 Jan 2023 17:15:48 -0800 Subject: [PATCH 12/19] PresentBuffer instead of SetBuffer and Present, etc --- specs/APIReview_TextureStream.md | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index 757d5959c..665a28eb0 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -249,10 +249,10 @@ typedef enum COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND { COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_ERROR, /// The texture to be presented is already in use for rendering. /// Call GetAvailableBuffer to determine an available TextureBuffer to present. - /// The developer can technically call SetBuffer multiple times. - /// But once they call Present, the TextureBuffer becomes "in use" until - /// they call SetBuffer and Present on a different TextureBuffer and wait a bit - /// for the original TextureBuffer to stop being used. + /// The developer can technically call PresentBuffer multiple times, + /// but the first call make input TextureBuffer "in use" until the browser + /// renders it and returns the buffer as "recycle" so that it can be a member of + /// available buffers. COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE, } COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND; @@ -392,18 +392,16 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// created via CreateBuffer. /// It is expected that hhe host writes new image/resource to the local /// shared 2D texture of the TextureBuffer (handle/resource). - /// SetBuffer API can be called in any thread. - /// `timestampInMs` is video capture time with microseconds units. + /// `timestampInMicroseconds` is video capture time with microseconds units. /// The value does not have to be exact captured time, but it should be /// increasing order because renderer (composition) ignores incoming - /// video frame (texture) if its timestampInMs is equal or prior to - /// the current compositing video frame. - HRESULT SetBuffer([in] ICoreWebView2StagingTexture* buffer, - [in] UINT64 timestampInMs); - /// Render ICoreWebView2StagingTexture resource, which is the most recent - /// call to SetBuffer. - HRESULT Present(); + /// video frame (texture) if its timestampInMicroseconds is equal or prior to + /// the current compositing video frame. It also will be exposed to the + /// JS with `VideoFrame::timestamp`. + /// (https://docs.w3cub.com/dom/videoframe/timestamp.html). + HRESULT PresentBuffer([in] ICoreWebView2StagingTexture* buffer, + [in] UINT64 timestampInMicroseconds); /// Stop streaming of the current stream id. /// The Javascript will receive `MediaStreamTrack::ended` event when the API /// is called. @@ -571,9 +569,9 @@ interface ICoreWebView2StagingWebTexture : IUnknown { /// It is timestamp of the web texture. Javascript can set this value /// with any value, but it is suggested to use same value of its original - /// video frame that is a value of SetBuffer so that the host is able to + /// video frame that is a value of PresentBuffer so that the host is able to /// tell the receiving texture delta. - [propget] HRESULT Timestamp([out, retval] UINT64* value); + [propget] HRESULT Timestamp([out, retval] UINT64* timestampInMicroseconds); } ``` From 37ca44d86766bb6e95a4c552306077af4c3f20b3 Mon Sep 17 00:00:00 2001 From: sunggook Date: Thu, 12 Jan 2023 19:44:30 -0800 Subject: [PATCH 13/19] use texture instead of buffer, etc --- specs/APIReview_TextureStream.md | 381 +++++++++++++++++++------------ 1 file changed, 231 insertions(+), 150 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index 665a28eb0..6055b600f 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -6,7 +6,7 @@ Many native apps use a native engine for real-time communication scenarios, whic # Description The proposed APIs will allow the end developers to stream the captured or composed video frame to the WebView renderer where Javascript is able to insert the frame to the page through W3C standard API of Video, MediaStream element for displaying it. -The API will use the shared GPU texture buffer so that it can minimize the overall cost with regards to frame copy. +The API will use the shared GPU texture so that it can minimize the overall cost with regards to frame copy. The proposed APIs have dependency on the DirectX and its internal attributes such as adapter LUID so it supports only Win32/C++ APIs at this time. @@ -15,7 +15,9 @@ adapter LUID so it supports only Win32/C++ APIs at this time. ## Javascript -This is Javascript code common to both of the following samples: +Render video stream in video HTML element + +In this sample, the native code generates video in a texture stream, it is sent to JavaScript, which renders it into a video HTML element. ```js // getTextureStream sample. @@ -71,51 +73,8 @@ window.videoTrack.addEventListener('unmut', () => { }); ``` - -```js -// registerTextureStream sample. - -// Developer can even send back the processed video frame to the host. - -// Scenario: User clicks to stream from the host and sends back them 1s late -// to the host. -document.querySelector('#sendBack').addEventListener('click', - e => getStreamFromTheHost(e)); -const transformer = new TransformStream({ - async transform(videoFrame, controller) { - function appSpecificCreateTransformedVideoFrame(originalVideoFrame) { - // At this point the app would create a new video frame based on the original - // video frame. For this sample we just delay for 1000ms and return the - // original. - await new Promise(resolve => setTimeout(resolve, 1000)); - return originalVideoFrame; - } - - // Delay frame 1000ms. - let transformedVideoFrame = - await appSpecificCreateTransformedVideoFrame(originalVideoFrame); - - // We can create new video frame and edit them, and pass them back here - // if needed. - controller.enqueue(transformedVideoFrame); - }, -}); - -async function SendBackToHost(streamId) { - console.log("streamId:" + streamId); - const trackGenerator = new MediaStreamTrackGenerator('video'); - await window.chrome.webview.registerTextureStream(streamId, trackGenerator); - - const mediaStream = await window.chrome.webview.getTextureStream(streamId); - const videoStream = mediaStream.getVideoTracks()[0]; - - const trackProcessor = new MediaStreamTrackProcessor(videoStream); - trackProcessor.readable.pipeThrough(transformer).pipeTo(trackGenerator.writable) -} - -``` - ## Win32 C++ + ```cpp HRESULT CreateTextureStream(ICoreWebView2StagingEnvironment* environment) UINT32 luid; @@ -145,7 +104,7 @@ HRESULT CreateTextureStream(ICoreWebView2StagingEnvironment* environment) // Capture video stream by using native API, for example, Media Foundation on Windows. StartMediaFoundationCapture(hWnd); return S_OK; - }).Get(), &start_token)); + }).Get(), &start_token));ffer // Listen to Stop request. The host end system provided video stream and // clean any operation resources. @@ -165,9 +124,9 @@ HRESULT CreateTextureStream(ICoreWebView2StagingEnvironment* environment) switch (kind) { case COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED: - case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_ERROR: - case COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE: - // assert(false); + case COREWEBVIEW2_TEXTURE_STREAM_ERROR_TEXTURE_ERROR: + case COREWEBVIEW2_TEXTURE_STREAM_ERROR_TEXTURE_IN_USE: + assert(false); break; default: break; @@ -175,10 +134,116 @@ HRESULT CreateTextureStream(ICoreWebView2StagingEnvironment* environment) return S_OK; }).Get(), &texture_token)); - // Add allowed origin for registerTextureStream call. 'registerTextureStream' - // call from the Javascript will fail if the requested origin is not registered - // with AddWebTextureAllowedOrigin. - CHECK_FAILURE(webviewTextureStream->AddWebTextureAllowedOrigin(L"https://edge-webscratch")); + // Add allowed origin for getTextureStream call. + CHECK_FAILURE(webviewTextureStream->AddWebTextureAllowedOrigin(L"https://edge-webscratch"), false); +} // CreateTextureStream + +HRESULT StartMediaFoundationCapture() { + // The video stream can come from the any sources, one of examples is to use + // Windows Media Foundation by using IMFCaptureEngine. + // The sample code is https://github.com/microsoft/Windows-classic-samples/tree/main/Samples/CaptureEngineVideoCapture. + + // Once stream engine setups, the engine will sends video stream. For Media + // Foundation, it can send it with ID3D11Texture2D format. + + // Stream engine callback calls SendTextureToBrowserAfterReceivingFrameFromTheSystem + // with captured image of ID3D11Texture2D format. +} + +HRESULT StopMediaFoundationCapture() { + // It stops video stream and clean any resources that were allocated for + // streaming. +} + +HRESULT SendTextureToBrowserAfterReceivingFrameFromTheSystem( + ID3D11DeviceContext* deviceContext, + ID3D11Texture2D* inputTexture, + UINT64 timestamp) { + + ComPtr texture; + HRESULT hr = webviewTextureStream->GetAvailableTexture(&texture); + if (FAILED(hr)) { + // Create TextureTexture. + hr = webviewTextureStream->CreateTexture(width, height, &texture); + if (FAILED(hr)) + return hr; + + hr = webviewTextureStream->GetAvailableTexture(&texture); + assert(SUCCEEDED(hr)); + } + + ComPtr dxgiResource; + CHECK_FAILURE(texture->get_Resource(&dxgiResource)); + CHECK_FAILURE(texture->put_Timestamp(timestamp)); + ComPtr sharedTexture; + CHECK_FAILURE(dxgiResource.As(&sharedTexture)); + CHECK_FAILURE(deviceContext->CopyResource(sharedTexture.Get(), inputTexture.Get())); + + // Notify the renderer for updated texture on the TextureTexture. + CHECK_FAILURE(webviewTextureStream->PresentTexture(texture.Get())); +} +``` + +Edit video in JavaScript and send back to native + +In this sample, the native code generates video in a texture stream, it is sent to JavaScript, JavaScript edits the video, and sends it back to native code. + +```js +// registerTextureStream sample. + +// Developer can even send back the processed video frame to the host. + +// Scenario: User clicks to stream from the host and sends back them 1s late +// to the host. +document.querySelector('#sendBack').addEventListener('click', + e => getStreamFromTheHost(e)); +const transformer = new TransformStream({ + async transform(videoFrame, controller) { + function appSpecificCreateTransformedVideoFrame(originalVideoFrame) { + // At this point the app would create a new video frame based on the original + // video frame. For this sample we just delay for 1000ms and return the + // original. + await new Promise(resolve => setTimeout(resolve, 1000)); + return originalVideoFrame; + } + + // Delay frame 1000ms. + let transformedVideoFrame = + await appSpecificCreateTransformedVideoFrame(originalVideoFrame); + + // We can create new video frame and edit them, and pass them back here + // if needed. + controller.enqueue(transformedVideoFrame); + }, +}); + +async function SendBackToHost(streamId) { + console.log("streamId:" + streamId); + const trackGenerator = new MediaStreamTrackGenerator('video'); + await window.chrome.webview.registerTextureStream(streamId, trackGenerator); + + const mediaStream = await window.chrome.webview.getTextureStream(streamId); + const videoStream = mediaStream.getVideoTracks()[0]; + + const trackProcessor = new MediaStreamTrackProcessor(videoStream); + trackProcessor.readable.pipeThrough(transformer).pipeTo(trackGenerator.writable) +} + +``` + +## Win32 C++ +```cpp +HRESULT RegisterTextureStream(ICoreWebView2TextureStream* webviewTextureStream) + UINT32 luid; + + // It uses same code of getTextureStream's CreateTextureStream API where + // it creates ICoreWebView2TextureStream. + + // Add allowed origin for registerTextureStream call by providing `true` on + // AddWebTextureAllowedOrigin 'registerTextureStream'. + // Call from the Javascript will fail if the requested origin is not registered + // for registerTextureStream. + CHECK_FAILURE(webviewTextureStream->AddWebTextureAllowedOrigin(L"https://edge-webscratch"), true); // Registers listener for video streaming from Javascript. EventRegistrationToken post_token; @@ -190,9 +255,11 @@ HRESULT CreateTextureStream(ICoreWebView2StagingEnvironment* environment) UINT64 timestamp; texture_received->get_Timestamp(×tamp); - HANDLE handle; - texture_received->get_Handle(&handle); - DrawTextureWithWICBitmap(handle, timestamp); + ComPtr dxgiResource; + CHECK_FAILURE(texture->get_Resource(&dxgiResource)); + ComPtr sharedTexture; + CHECK_FAILURE(dxgiResource.As(&sharedTexture)); + DrawTextureWithWICBitmap(sharedTexture.Get(), timestamp); return S_OK; }).Get(), &post_token)); @@ -206,31 +273,11 @@ HRESULT CreateTextureStream(ICoreWebView2StagingEnvironment* environment) }).Get(), &stopped_token)); } // CreateTextureStream -HRESULT SendTextureToBrowserAfterReceivingFrameFromTheSystem( - ID3D11DeviceContext* deviceContext, - ID3D11Texture2D* inputTexture, - UINT64 timestamp) { +HRESULT DrawTextureWithWICBitmap(ID3D11Texture2D* 2dTexture, UINT64 timestamp) { + // It draws 2dTexture by using DirectX APIs. - ComPtr textureBuffer; - HRESULT hr = webviewTextureStream->GetAvailableBuffer(&textureBuffer); - if (FAILED(hr)) { - // Create TextureBuffer. - hr = webviewTextureStream->CreateBuffer(width, height, &texture); - if (FAILED(hr)) - return hr; - - hr = webviewTextureStream->GetAvailableBuffer(&textureBuffer); - assert(SUCCEEDED(hr)); - } - - ComPtr dxgiResource; - CHECK_FAILURE(textureBuffer->get_Resource(&dxgiResource)); - ComPtr sharedBuffer; - CHECK_FAILURE(dxgiResource.As(&sharedBuffer)); - CHECK_FAILURE(deviceContext->CopyResource(sharedBuffer.Get(), inputTexture.Get())); - - // Notify the renderer for updated texture on the TextureBuffer. - CHECK_FAILURE(webviewTextureStream->PresentBuffer(textureBuffer.Get(), timestamp)); + // `timestamp` can be used to find out the delta between sent (by + // SendTextureToBrowserAfterReceivingFrameFromTheSystem) and received texture. } ``` @@ -238,22 +285,22 @@ HRESULT SendTextureToBrowserAfterReceivingFrameFromTheSystem( ``` [v1_enum] typedef enum COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND { - /// CreateBuffer/Present and so on should return failed HRESULT if + /// CreateTexture/PresentTexture and so on should return failed HRESULT if /// the texture stream is in the stopped state rather than using the /// error event. But there can be edge cases where the browser process /// knows the texture stream is in the stopped state and the host app /// process texture stream doesn't yet know that. Like the 10 second issue /// or if the script side has stopped the stream. COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED, - /// The TextureBuffer already has been removed using CloseBuffer. - COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_ERROR, + /// The texture already has been removed using CloseTexture. + COREWEBVIEW2_TEXTURE_STREAM_ERROR_TEXTURE_ERROR, /// The texture to be presented is already in use for rendering. - /// Call GetAvailableBuffer to determine an available TextureBuffer to present. - /// The developer can technically call PresentBuffer multiple times, - /// but the first call make input TextureBuffer "in use" until the browser - /// renders it and returns the buffer as "recycle" so that it can be a member of - /// available buffers. - COREWEBVIEW2_TEXTURE_STREAM_ERROR_BUFFER_IN_USE, + /// Call GetAvailableTexture to determine an available texture to present. + /// The developer can technically call PresentTexture multiple times, + /// but the first call make input texture "in use" until the browser + /// renders it and returns the texture as "recycle" so that it can be a member of + /// available textures. + COREWEBVIEW2_TEXTURE_STREAM_ERROR_TEXTURE_IN_USE, } COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND; /// This is ICoreWebView2StagingEnvironment that returns the texture stream interface. @@ -276,11 +323,11 @@ interface ICoreWebView2StagingEnvironment : IUnknown { [out, retval ] ICoreWebView2StagingTextureStream** value); /// Get the graphics adapter LUID of the renderer. The host should use this /// LUID adapter when creating D3D device to use with CreateTextureStream(). - [propget] HRESULT RenderAdapterLUID([out, retval] LUID* luid); + [propget] HRESULT RenderAdapterLUID([out, retval] LUID* value); /// Listens for change of graphics adapter LUID of the browser. /// The host can get the updated LUID by RenderAdapterLUID. It is expected /// that the host updates texture's d3d Device with SetD3DDevice, - /// removes existing buffers and creates new TextureBuffer. + /// removes existing textures and creates new texture. HRESULT add_RenderAdapterLUIDChanged( [in] ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler* eventHandler, [out] EventRegistrationToken* token); @@ -295,7 +342,7 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// The caller must free the returned string with CoTaskMemFree. See /// [API Conventions](/microsoft-edge/webview2/concepts/win32-api-conventions#strings). /// MSOWNERS: TBD (wv2core@microsoft.com) - [propget] HRESULT Id([out, retval] LPWSTR* id); + [propget] HRESULT Id([out, retval] LPWSTR* value); /// Adds an allowed URI origin. The stream requests could be made from /// any frame, including iframes, but the origin of the page in the frames /// must be registered first in order for the request to succeed. @@ -331,7 +378,7 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// The request is regarded as success only when the host provides the stream, /// Present API call, within 10s after being requested. - /// The Texture stream becomes 'Started' state once it starts sending a texture + /// The texture stream becomes 'Started' state once it starts sending a texture /// until it calls Stop API or receives 'Stopped' event. HRESULT add_StartRequested( [in] ICoreWebView2StagingTextureStreamStartRequestedEventHandler* eventHandler, @@ -343,12 +390,12 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// It is called when user stop all streaming requests from /// the renderers (Javascript) or the host calls the Stop API. The renderer /// can stream again by calling the streaming request API. - /// The renderer cleared all registered TextureBuffers before sending + /// The renderer cleared all registered Textures before sending /// the stop request event so that the callback of the next start request /// should register the textures again. /// The event is triggered when all requests for given stream id closed /// by the Javascript, or the host's Stop API call. - /// TextureBuffer related API calls after this event will return an error + /// texture related API calls after this event will return an error /// of HRESULT_FROM_WIN32(ERROR_INVALID_STATE). HRESULT add_Stopped( [in] ICoreWebView2StagingTextureStreamStoppedEventHandler* eventHandler, @@ -356,65 +403,57 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// Remove listener for Stopped event. HRESULT remove_Stopped( [in] EventRegistrationToken token); - /// Creates TextureBuffer that will be referenced by the host and the browser. - /// By using the TextureBuffer mechanism, the host does not have to + /// Creates texture that will be referenced by the host and the browser. + /// By using the texture mechanism, the host does not have to /// send respective texture to the renderer, instead it notifies it - /// with internal TextureBuffer id, which is the identity of the TextureBuffer. - /// The TextureBuffer is 2D texture, IDXGIResource, format and will be + /// with internal texture id, which is the identity of the texture. + /// The texture is 2D texture, IDXGIResource, format and will be /// exposed through shared HANDLE or IUnknown type through ICoreWebView2StagingTexture. /// Whenever the host has new texture to write, it should ask - /// reusable ICoreWebView2StagingTexture from the GetAvailableBuffer, + /// reusable ICoreWebView2StagingTexture from the GetAvailableTexture, /// which returns ICoreWebView2StagingTexture. - /// If the GetAvailableBuffer returns an error, then the host calls the - /// CreateBuffer to allocate new TextureBuffer. + /// If the GetAvailableTexture returns an error, then the host calls the + /// CreateTexture to allocate new texture. /// The API also registers created shared handle to the browser once it /// created the resource. /// Unit for width and height is texture unit (in texels). /// 'https://learn.microsoft.com/en-us/windows/win32/api/d3d11ns-d3d11-d3d11_texture2d_desc' - HRESULT CreateBuffer( + HRESULT CreateTexture( [in] UINT32 widthInTexels, [in] UINT32 heightInTexels, - [out, retval] ICoreWebView2StagingTexture** buffer); - /// Returns reuseable TextureBuffer for video frame rendering. - /// Once the renderer finishes rendering of TextureBuffer's video frame, which + [out, retval] ICoreWebView2StagingTexture** texture); + /// Returns reuseable texture for video frame rendering. + /// Once the renderer finishes rendering of texture's video frame, which /// was requested by Present, the renderer informs the host so that it can - /// be reused. The host has to create new TextureBuffer with CreateBuffer + /// be reused. The host has to create new texture with CreateTexture /// if the API return an error HRESULT_FROM_WIN32(ERROR_NO_MORE_ITEMS). - HRESULT GetAvailableBuffer([out, retval] ICoreWebView2StagingTexture** buffer); - /// Removes TextureBuffer when the host removes the backed 2D texture. + HRESULT GetAvailableTexture([out, retval] ICoreWebView2StagingTexture** texture); + /// Removes texture when the host removes the backed 2D texture. /// The host can save the resources by deleting 2D textures /// when it changes the frame sizes. The API will send a message - /// to the browser where it will remove TextureBuffer. - HRESULT CloseBuffer([in] ICoreWebView2StagingTexture* buffer); + /// to the browser where it will remove texture. + HRESULT CloseTexture([in] ICoreWebView2StagingTexture* texture); /// Sets rendering image/resource through ICoreWebView2StagingTexture. - /// The TextureBuffer must be retrieved from the GetAvailableBuffer or - /// created via CreateBuffer. + /// The texture must be retrieved from the GetAvailableTexture or + /// created via CreateTexture. /// It is expected that hhe host writes new image/resource to the local - /// shared 2D texture of the TextureBuffer (handle/resource). - - /// `timestampInMicroseconds` is video capture time with microseconds units. - /// The value does not have to be exact captured time, but it should be - /// increasing order because renderer (composition) ignores incoming - /// video frame (texture) if its timestampInMicroseconds is equal or prior to - /// the current compositing video frame. It also will be exposed to the - /// JS with `VideoFrame::timestamp`. - /// (https://docs.w3cub.com/dom/videoframe/timestamp.html). - HRESULT PresentBuffer([in] ICoreWebView2StagingTexture* buffer, - [in] UINT64 timestampInMicroseconds); + /// shared 2D texture of the texture (handle/resource). + HRESULT PresentTexture([in] ICoreWebView2StagingTexture* texture) + /// Stop streaming of the current stream id. /// The Javascript will receive `MediaStreamTrack::ended` event when the API /// is called. /// The Javascript can restart the stream with getTextureStream. /// The API call will release any internal resources on both of WebView2 host /// and the browser processes. - /// API calls of Present, CreateBuffer will fail after this + /// API calls of Present, CreateTexture will fail after this /// with an error of COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED. /// The Stop API will be called implicitly when ICoreWebView2StagingTextureStream /// object is destroyed. HRESULT Stop(); /// Event handler for those that occur at the Renderer side, the example - /// are CreateBuffer, Present, or Stop. + /// are CreateTexture, Present, or Stop. HRESULT add_ErrorReceived( [in] ICoreWebView2StagingTextureStreamErrorReceivedEventHandler* eventHandler, [out] EventRegistrationToken* token); @@ -439,23 +478,37 @@ interface ICoreWebView2StagingTextureStream : IUnknown { HRESULT remove_WebTextureReceived([in] EventRegistrationToken token); /// Event handler for stopping of the receiving texture stream. /// It is expected that the host releases any holding handle/resource from - /// the WebTexture before an event handler returns. + /// the WebTexture before an event handler returns. The event can be raised + /// when the JS calls `window.chrome.webview.unregisterTextureStream`. + /// JS can restart sending the stream with `window.chrome.webview.registerTextureStream` + /// after stream is stopped. If the stream is started again, `add_WebTextureReceived` + /// will be called. HRESULT add_WebTextureStreamStopped( [in] ICoreWebView2StagingTextureStreamWebTextureStreamStoppedEventHandler* eventHandler, [out] EventRegistrationToken* token); /// Remove listener for WebTextureStreamStopped event. HRESULT remove_WebTextureStreamStopped([in] EventRegistrationToken token); } -/// TextureBuffer that the host writes to so that the Renderer +/// texture that the host writes to so that the Renderer /// will render on it. [uuid(0836f09c-34bd-47bf-914a-99fb56ae2d07), object, pointer_default(unique)] interface ICoreWebView2StagingTexture : IUnknown { - /// Returns shared Windows NT handle. The caller expected to open it with - /// ID3D11Device1::OpenSharedResource1 and writes the incoming texture to it. - [propget] HRESULT Handle([out, retval] HANDLE* handle); + /// Returns Windows NT handle to shared memory containing the texture. + /// The caller expected to open it with ID3D11Device1::OpenSharedResource1 + /// and writes the incoming texture to it. + [propget] HRESULT Handle([out, retval] HANDLE* value); /// Returns IUnknown type that could be query interface to IDXGIResource. /// The caller can write incoming texture to it. - [propget] HRESULT Resource([out, retval] IUnknown** resource); + [propget] HRESULT Resource([out, retval] IUnknown** value); + /// Sets timestamp of presenting texture. + /// `value` is video capture time with microseconds units. + /// The value does not have to be exact captured time, but it should be + /// increasing order because renderer (composition) ignores incoming + /// video frame (texture) if its timestamp is equal or prior to + /// the current compositing video frame. It also will be exposed to the + /// JS with `VideoFrame::timestamp`. + /// (https://docs.w3cub.com/dom/videoframe/timestamp.html). + [propput] HRESULT Timestamp([out, retval] UINT64* value); } /// This is the callback for new texture stream request. [uuid(62d09330-00a9-41bf-a9ae-55aaef8b3c44), object, pointer_default(unique)] @@ -492,8 +545,8 @@ interface ICoreWebView2StagingTextureStreamErrorReceivedEventArgs : IUnknown { /// Error kind. [propget] HRESULT Kind([out, retval] COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND* value); - /// TextureBuffer that the error is associated with. - HRESULT GetBuffer([out, retval] ICoreWebView2StagingTexture** buffer); + /// texture that the error is associated with. + [propget] Texture([out, retval] ICoreWebView2StagingTexture** value); } [uuid(431721e0-0f18-4d7b-bd4d-e5b1522bb110), object, pointer_default(unique)] interface ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler : IUnknown { @@ -528,11 +581,11 @@ interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs : IUnknow /// The call does not create new ICoreWebView2StagingWebTexture object, instead /// returns the same object. - /// The TextureBuffer handle will be reused when ICoreWebView2StagingWebTexture + /// The texture handle will be reused when ICoreWebView2StagingWebTexture /// object is released. So, the host should not refer handle or resource of /// the ICoreWebView2StagingWebTexture after its release. - HRESULT GetWebTexture([out, retval] ICoreWebView2StagingWebTexture** value); + [propget] WebTexture([out, retval] ICoreWebView2StagingWebTexture** value); } /// This is the callback for web texture stop. @@ -545,33 +598,61 @@ interface ICoreWebView2StagingTextureStreamWebTextureStreamStoppedEventHandler : [in] IUnknown* args); } -/// Received TextureBuffer that the renderer writes to so that the host +/// Received texture that the renderer writes to so that the host /// will read on it. [uuid(b94265ae-4c1e-11ed-bdc3-0242ac120002), object, pointer_default(unique)] interface ICoreWebView2StagingWebTexture : IUnknown { - /// TextureBuffer handle. The handle's lifetime is owned by the + /// texture handle. The handle's lifetime is owned by the /// ICoreWebView2StagingTextureStream object so the host must not close it. - /// The same handle value will be used for same TextureBuffer so the host can use - /// handle value as a unique TextureBuffer key. + /// The same handle value will be used for same texture so the host can use + /// handle value as a unique texture key. /// If the host opens its own resources by handle, then it is suggested /// that the host removes those resources when the handle's texture size /// is changed because the browser also removed previously allocated different - /// sized buffers when image size is changed. - [propget] HRESULT Handle([out, retval] HANDLE* handle); + /// sized textures when image size is changed. + [propget] HRESULT Handle([out, retval] HANDLE* value); - /// TextureBuffer resource. - /// The same resource value will be used for same TextureBuffer so the host can use - /// resource value as a unique TextureBuffer key. + /// Direct2D texture resource. + /// The same resource value will be used for same texture so the host can use + /// resource value as a unique texture key. /// ICoreWebView2StagingTextureStream object has a reference of the resource /// so ICoreWebView2StagingWebTexture holds same resource object for - /// the same TextureBuffer. - [propget] HRESULT Resource([out, retval] IUnknown** resource); + /// the same texture. + [propget] HRESULT Resource([out, retval] IUnknown** value); /// It is timestamp of the web texture. Javascript can set this value /// with any value, but it is suggested to use same value of its original - /// video frame that is a value of PresentBuffer so that the host is able to + /// video frame that is a value of PresentTexture so that the host is able to /// tell the receiving texture delta. - [propget] HRESULT Timestamp([out, retval] UINT64* timestampInMicroseconds); + [propget] HRESULT Timestamp([out, retval] UINT64* value); } ``` + +---ts +interface WebView extends EventTarget { + // ... leaving out existing methods + + /// Request video stream to the WebView2 native host. The API call will + /// trigger StartRequested event in the native host, then the native host + /// will setup video frame from the Web cam or any other source, and provide + /// stream with PresentTexture. + + /// The API returns Promise that will return MediaStream object when the first + /// video stream arrive from the native host's PresentTexture. + + /// The API will return an exception of `CONSTRAINT_NOT_SATISFIED` if the + /// requested `textureStreamId` is not supported by the native host. + getTextureStream(textureStreamId: string): MediaStream; + + /// Provides video stream to the WebView2 native host. When JS creates new + /// video frame from the receiving video frame by getTextureStream, it can + /// sends it back to the native host with the API call. + registerTextureStream(textureStreamId: string, textureStream: MediaStreamTrack): void; + + /// Stops sending video stream to the native host, which was started by + /// registerTextureStream. + unregisterTextureStream(textureStreamId: string): void; +} + +--- From a8889e2d3fbdace01bfceae6aebd920b54114e56 Mon Sep 17 00:00:00 2001 From: sunggook Date: Thu, 26 Jan 2023 21:50:29 -0800 Subject: [PATCH 14/19] rephrase based on feedback --- specs/APIReview_TextureStream.md | 127 +++++++++++++++++++++---------- 1 file changed, 87 insertions(+), 40 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index 6055b600f..d891e721c 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -2,14 +2,31 @@ TextureStream =============================================================================================== # Background -Many native apps use a native engine for real-time communication scenarios, which include video capture, networking and video rendering. However, often, these apps still use WebView or Electron for UI rendering. The separation between real-time video rendering and UI rendering prevents apps from rendering real-time video inside the web contents. This forces apps to render the real-time video on top of the web contents, which is limiting. Rendering video on top constrains the user experience and it may also cause performance problems. We can ask the native apps to use web renderer for video handling because web standard already provides these features through WebRTC APIs. The end developers, however, prefer to use their existing engine such as capturing and composition, meanwhile using WebRTC API for rendering. +Many native apps use a native engine for real-time communication scenarios, +which include video capture, networking and video rendering. However, often, +these apps still use WebView or Electron for UI rendering. The separation +between real-time video rendering and UI rendering prevents apps from rendering +real-time video inside the web contents. This forces apps to render the +real-time video on top of the web contents, which is limiting. Rendering video +on top constrains the user experience and it may also cause performance problems. +We can ask the native apps to use web renderer for video handling because web +standard already provides these features through WebRTC APIs. The end +developers, however, prefer to use their existing engine such as capturing +and composition, meanwhile using WebRTC API for rendering. # Description -The proposed APIs will allow the end developers to stream the captured or composed video frame to the WebView renderer where Javascript is able to insert the frame to the page through W3C standard API of Video, MediaStream element for displaying it. -The API will use the shared GPU texture so that it can minimize the overall cost with regards to frame copy. +The proposed APIs allow end developers to stream captured or composed video +frames to the WebView2 where JavaScript can render or otherwise interact with +the frames via W3C standard DOM APIs including the Video element, and MediaStream. -The proposed APIs have dependency on the DirectX and its internal attributes such as -adapter LUID so it supports only Win32/C++ APIs at this time. +The API aims to minimize the number of times a frame must be copied and so is +structured to allow reuse of frame objects which are implemented with GPU +textures that can be shared across processes. + +The proposed APIs have dependency on the DirectX and its internal attributes +such as adapter LUID so the API is only exposed in the WebView2 COM APIs to +Win32/C++ consumers and as an Interop COM interface to allow C++/WinRT +consumers to access the interface. # Examples @@ -17,7 +34,8 @@ adapter LUID so it supports only Win32/C++ APIs at this time. Render video stream in video HTML element -In this sample, the native code generates video in a texture stream, it is sent to JavaScript, which renders it into a video HTML element. +In this sample, the native code generates video in a texture stream, it is +sent to JavaScript, which renders it into a video HTML element. ```js // getTextureStream sample. @@ -94,14 +112,17 @@ HRESULT CreateTextureStream(ICoreWebView2StagingEnvironment* environment) // stream id. The request from not registered origin will fail to stream. // `true` boolean value will add allowed origin for registerTextureStream as well. - CHECK_FAILURE(webviewTextureStream->AddAllowedOrigin(L"https://edge-webscratch"), true); + CHECK_FAILURE(webviewTextureStream->AddAllowedOrigin( + L"https://edge-webscratch"), true); // Listen to Start request. The host will setup system video streaming and // start sending the texture. EventRegistrationToken start_token; CHECK_FAILURE(webviewTextureStream->add_StartRequested(Callback( - [hWnd](ICoreWebView2StagingTextureStream* webview, IUnknown* eventArgs) -> HRESULT { - // Capture video stream by using native API, for example, Media Foundation on Windows. + [hWnd](ICoreWebView2StagingTextureStream* webview, + IUnknown* eventArgs) -> HRESULT { + // Capture video stream by using native API, for example, + // Media Foundation on Windows. StartMediaFoundationCapture(hWnd); return S_OK; }).Get(), &start_token));ffer @@ -109,15 +130,18 @@ HRESULT CreateTextureStream(ICoreWebView2StagingEnvironment* environment) // Listen to Stop request. The host end system provided video stream and // clean any operation resources. EventRegistrationToken stop_token; - CHECK_FAILURE(webviewTextureStream->add_Stopped(Callback( - [hWnd](ICoreWebView2StagingTextureStream* webview, IUnknown* eventArgs) -> HRESULT { + CHECK_FAILURE(webviewTextureStream->add_Stopped( + Callback( + [hWnd](ICoreWebView2StagingTextureStream* webview, + IUnknown* eventArgs) -> HRESULT { StopMediaFoundationCapture(); return S_OK; }).Get(), &stop_token)); EventRegistrationToken texture_token; CHECK_FAILURE(webviewTextureStream->add_ErrorReceived(Callback( - [hWnd](ICoreWebView2StagingTextureStream* sender, ICoreWebView2StagingTextureStreamErrorReceivedEventArgs* args) { + [hWnd](ICoreWebView2StagingTextureStream* sender, + ICoreWebView2StagingTextureStreamErrorReceivedEventArgs* args) { COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND kind; HRESULT hr = args->get_Kind(&kind); assert(SUCCEEDED(hr)); @@ -135,7 +159,8 @@ HRESULT CreateTextureStream(ICoreWebView2StagingEnvironment* environment) }).Get(), &texture_token)); // Add allowed origin for getTextureStream call. - CHECK_FAILURE(webviewTextureStream->AddWebTextureAllowedOrigin(L"https://edge-webscratch"), false); + CHECK_FAILURE(webviewTextureStream->AddWebTextureAllowedOrigin( + L"https://edge-webscratch"), false); } // CreateTextureStream HRESULT StartMediaFoundationCapture() { @@ -186,7 +211,9 @@ HRESULT SendTextureToBrowserAfterReceivingFrameFromTheSystem( Edit video in JavaScript and send back to native -In this sample, the native code generates video in a texture stream, it is sent to JavaScript, JavaScript edits the video, and sends it back to native code. +In this sample, the native code generates video in a texture stream, +it is sent to JavaScript, JavaScript edits the video, and sends it back +to native code. ```js // registerTextureStream sample. @@ -243,12 +270,14 @@ HRESULT RegisterTextureStream(ICoreWebView2TextureStream* webviewTextureStream) // AddWebTextureAllowedOrigin 'registerTextureStream'. // Call from the Javascript will fail if the requested origin is not registered // for registerTextureStream. - CHECK_FAILURE(webviewTextureStream->AddWebTextureAllowedOrigin(L"https://edge-webscratch"), true); + CHECK_FAILURE(webviewTextureStream->AddWebTextureAllowedOrigin( + L"https://edge-webscratch"), true); // Registers listener for video streaming from Javascript. EventRegistrationToken post_token; CHECK_FAILURE(webviewTextureStream->add_WebTextureReceived(Callback( - [&](ICoreWebView2StagingTextureStream* sender, ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs* args) { + [&](ICoreWebView2StagingTextureStream* sender, + ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs* args) { // Javascript send a texture stream. ComPtr texture_received; args->GetWebTexture(&texture_received); @@ -298,12 +327,13 @@ typedef enum COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND { /// Call GetAvailableTexture to determine an available texture to present. /// The developer can technically call PresentTexture multiple times, /// but the first call make input texture "in use" until the browser - /// renders it and returns the texture as "recycle" so that it can be a member of - /// available textures. + /// renders it and returns the texture as "recycle" so that it can be a member + /// of available textures. COREWEBVIEW2_TEXTURE_STREAM_ERROR_TEXTURE_IN_USE, } COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND; -/// This is ICoreWebView2StagingEnvironment that returns the texture stream interface. +/// This is ICoreWebView2StagingEnvironment that returns the texture +/// stream interface. [uuid(96c27a45-f142-4873-80ad-9d0cd899b2b9), object, pointer_default(unique)] interface ICoreWebView2StagingEnvironment : IUnknown { /// Registers the stream id that the host can handle, providing a @@ -434,33 +464,50 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// when it changes the frame sizes. The API will send a message /// to the browser where it will remove texture. HRESULT CloseTexture([in] ICoreWebView2StagingTexture* texture); - /// Sets rendering image/resource through ICoreWebView2StagingTexture. - /// The texture must be retrieved from the GetAvailableTexture or - /// created via CreateTexture. - /// It is expected that hhe host writes new image/resource to the local - /// shared 2D texture of the texture (handle/resource). + /// Adds the provided `ICoreWebView2Texture` to the video stream as the + /// next frame. The `ICoreWebView2Texture` must not be closed. + /// The `ICoreWebView2Texture` must have been obtained via a call to + /// `ICoreWebView2TextureStream::GetAvailableTexture` or ` + /// ICoreWebView2TextureStream::CreateTexture` from this `ICoreWebView2TextureStream`. + /// If the `ICoreWebView2Texture` is closed or was created from a different + /// `ICoreWebView2TextureStream` this method will return `E_INVALIDARG`. + /// You should write your video frame data to the `ICoreWebView2Texture` + /// before calling this method. + + /// After this method completes WebView2 will take some time asynchronously + /// to send the texture to the WebView2 processes to be added to the video stream. + /// Do not close or otherwise change the provided `ICoreWebView2Texture` after + /// calling this method. Doing so may result in the texture not being added to + /// the texture stream and the `ErrorReceived` event may be raised. HRESULT PresentTexture([in] ICoreWebView2StagingTexture* texture) - /// Stop streaming of the current stream id. - /// The Javascript will receive `MediaStreamTrack::ended` event when the API - /// is called. - /// The Javascript can restart the stream with getTextureStream. - /// The API call will release any internal resources on both of WebView2 host - /// and the browser processes. - /// API calls of Present, CreateTexture will fail after this - /// with an error of COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED. - /// The Stop API will be called implicitly when ICoreWebView2StagingTextureStream - /// object is destroyed. + /// Stops this texture stream from streaming and moves it into the stopped state. + /// When moving to the stopped state the `ICoreWebView2TextureStream Stopped` + /// event will be raised and in script the `MediaStreamTrack ended` event will + /// be raised. Once stopped, script may again call `Webview.getTextureStream` + /// moving the texture stream back to the start requested state. + /// See the `StartRequested` event for details. + + /// Once stopped, calls to CreateTexture, GetAvailableTexture, + /// PresentTexture, and CloseTexture will fail with + /// COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED. + /// See those methods for details. + + /// The `Stop` method is implicitly called when the texture stream object is + /// destroyed. HRESULT Stop(); - /// Event handler for those that occur at the Renderer side, the example - /// are CreateTexture, Present, or Stop. + /// The `ErrorReceived` event is raised when an error with this texture + /// stream occurs asynchronously. HRESULT add_ErrorReceived( [in] ICoreWebView2StagingTextureStreamErrorReceivedEventHandler* eventHandler, [out] EventRegistrationToken* token); /// Remove listener for ErrorReceived event. HRESULT remove_ErrorReceived([in] EventRegistrationToken token); - /// Updates d3d Device when it is updated by RenderAdapterLUIDChanged - /// event. + /// Set the D3D device this texture stream should use for creating shared + /// texture resources. When the RenderAdapterLUIDChanged event is raised you + /// should create a new D3D device using the RenderAdapterLUID property and + /// call SetD3DDevice with the new D3D device. + /// See the `CreateTextureStream` `d3dDevice` parameter for more details. HRESULT SetD3DDevice([in] IUnknown* d3dDevice); /// Event handler for receiving texture by Javascript. /// `window.chrome.webview.registerTextureStream` call by Javascript will @@ -629,7 +676,7 @@ interface ICoreWebView2StagingWebTexture : IUnknown { ``` ----ts +```ts interface WebView extends EventTarget { // ... leaving out existing methods @@ -655,4 +702,4 @@ interface WebView extends EventTarget { unregisterTextureStream(textureStreamId: string): void; } ---- +``` From d8ffcb0d7ce012b0dd05d89797af1c8382f8e4fb Mon Sep 17 00:00:00 2001 From: sunggook Date: Fri, 27 Jan 2023 15:01:35 -0800 Subject: [PATCH 15/19] update WebTexture desc based on feedback --- specs/APIReview_TextureStream.md | 93 ++++++++++++++++---------------- 1 file changed, 48 insertions(+), 45 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index d891e721c..f2c3082e5 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -490,9 +490,7 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// Once stopped, calls to CreateTexture, GetAvailableTexture, /// PresentTexture, and CloseTexture will fail with - /// COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED. - /// See those methods for details. - + /// HRESULT_FROM_WIN32(ERROR_INVALID_STATE). /// The `Stop` method is implicitly called when the texture stream object is /// destroyed. HRESULT Stop(); @@ -510,26 +508,31 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// See the `CreateTextureStream` `d3dDevice` parameter for more details. HRESULT SetD3DDevice([in] IUnknown* d3dDevice); /// Event handler for receiving texture by Javascript. - /// `window.chrome.webview.registerTextureStream` call by Javascript will - /// request sending video frame to the host where it will filter requested - /// page's origin against allowed origins. If allowed, the Javascript will - /// send a video frame (web texture), through MediaStreamTrack insertable APIs, - /// MediaStreamTrackGenerator. - /// https://www.w3.org/TR/mediacapture-transform/. - /// WebTextureReceived event will be called only when it receives - /// a web texture. There is no start event for receiving web texture. + /// The WebTextureReceived event is raised when script sends a video frame to + /// this texture stream. Allowed script will call `chrome.webview. + /// registerTextureStream` to register a MediaStream with a specified texture + /// stream. Video frames added to that MediaStream will be raised in the + /// WebTextureReceived event. See `registerTextureStream` for details. + /// Script is allowed to call registerTextureStream if it is from an HTML + /// document with an origin allowed via + /// `ICoreWebView2TextureStream::AddAllowedOrigin` with the + /// `alllowWebTexture` parameter set. See `AddAllowedOrigin` for details. HRESULT add_WebTextureReceived( [in] ICoreWebView2StagingTextureStreamWebTextureReceivedEventHandler* eventHandler, [out] EventRegistrationToken* token); /// Remove listener for WebTextureReceived event. HRESULT remove_WebTextureReceived([in] EventRegistrationToken token); - /// Event handler for stopping of the receiving texture stream. - /// It is expected that the host releases any holding handle/resource from - /// the WebTexture before an event handler returns. The event can be raised - /// when the JS calls `window.chrome.webview.unregisterTextureStream`. - /// JS can restart sending the stream with `window.chrome.webview.registerTextureStream` - /// after stream is stopped. If the stream is started again, `add_WebTextureReceived` - /// will be called. + /// The WebTextureStreamStopped event is raised when script unregisters its + /// MediaStream from this texture stream. Script that has previously called + /// `chrome.webview.registerTextureStream`, can call `chrome.webview. + /// unregisterTextureStream` which will raise this event and then close + /// associated ICoreWebView2WebTexture objects in the browser side. You should + /// ensure that you release any references to associated + /// ICoreWebView2WebTexture objects and their underlying resources. + + /// Once stopped, script may start again by calling `chrome.webview. + /// registerTextureStream` and sending more frames. In this case the + /// `ICoreWebView2TextureStream WebTextureReceived` event will be raised again. HRESULT add_WebTextureStreamStopped( [in] ICoreWebView2StagingTextureStreamWebTextureStreamStoppedEventHandler* eventHandler, [out] EventRegistrationToken* token); @@ -540,22 +543,27 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// will render on it. [uuid(0836f09c-34bd-47bf-914a-99fb56ae2d07), object, pointer_default(unique)] interface ICoreWebView2StagingTexture : IUnknown { - /// Returns Windows NT handle to shared memory containing the texture. - /// The caller expected to open it with ID3D11Device1::OpenSharedResource1 - /// and writes the incoming texture to it. - [propget] HRESULT Handle([out, retval] HANDLE* value); - /// Returns IUnknown type that could be query interface to IDXGIResource. - /// The caller can write incoming texture to it. - [propget] HRESULT Resource([out, retval] IUnknown** value); - /// Sets timestamp of presenting texture. - /// `value` is video capture time with microseconds units. - /// The value does not have to be exact captured time, but it should be - /// increasing order because renderer (composition) ignores incoming - /// video frame (texture) if its timestamp is equal or prior to - /// the current compositing video frame. It also will be exposed to the - /// JS with `VideoFrame::timestamp`. - /// (https://docs.w3cub.com/dom/videoframe/timestamp.html). - [propput] HRESULT Timestamp([out, retval] UINT64* value); + /// A handle to OS shared memory containing the texture. You can open it + /// with `ID3D11Device1::OpenSharedResource1` and write your texture data + /// to it. Do not close it yourself. The underlying texture will be closed + /// by WebView2. Do not change the texture after calling + /// `ICoreWebView2TextureStream::PresentTexture` before you can retrieve it + /// again with `GetAvailableTexture`, or you the frame may not be + /// rendered and the `ICoreWebView2TextureStream ErrorReceived` event will + /// be raised. + [propget] HRESULT Handle([out, retval] HANDLE* value); + /// Returns IUnknown type that could be query interface to IDXGIResource. + /// The caller can write incoming texture to it. + [propget] HRESULT Resource([out, retval] IUnknown** value); + /// Sets timestamp of presenting texture. + /// `value` is video capture time with microseconds units. + /// The value does not have to be exact captured time, but it should be + /// increasing order because renderer (composition) ignores incoming + /// video frame (texture) if its timestamp is equal or prior to + /// the current compositing video frame. It also will be exposed to the + /// JS with `VideoFrame::timestamp`. + /// (https://docs.w3cub.com/dom/videoframe/timestamp.html). + [propput] HRESULT Timestamp([out, retval] UINT64* value); } /// This is the callback for new texture stream request. [uuid(62d09330-00a9-41bf-a9ae-55aaef8b3c44), object, pointer_default(unique)] @@ -586,13 +594,15 @@ interface ICoreWebView2StagingTextureStreamErrorReceivedEventHandler : IUnknown [in] ICoreWebView2StagingTextureStream* sender, [in] ICoreWebView2StagingTextureStreamErrorReceivedEventArgs* args); } -/// This is the event args interface for texture stream error callback. +/// The event args for the `ICoreWebViewTextureStream ErrorReceived` event. [uuid(0e1730c1-03df-4ad2-b847-be4d63adf700), object, pointer_default(unique)] interface ICoreWebView2StagingTextureStreamErrorReceivedEventArgs : IUnknown { - /// Error kind. + /// The kind of error that has occurred. [propget] HRESULT Kind([out, retval] COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND* value); - /// texture that the error is associated with. + /// The texture with which this error is associated. For the + /// `COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED` error kind, + /// this property will be `nullptr`. [propget] Texture([out, retval] ICoreWebView2StagingTexture** value); } [uuid(431721e0-0f18-4d7b-bd4d-e5b1522bb110), object, pointer_default(unique)] @@ -621,17 +631,10 @@ interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventHandler : IUnk [in] ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs* args); } -/// This is the event args interface for web texture. +/// The event args for the `ICoreWebView2TextureStream WebTextureReceived` event. [uuid(a4c2fa3a-295a-11ed-a261-0242ac120002), object, pointer_default(unique)] interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs : IUnknown { /// Return ICoreWebView2StagingWebTexture object. - /// The call does not create new ICoreWebView2StagingWebTexture object, instead - /// returns the same object. - - /// The texture handle will be reused when ICoreWebView2StagingWebTexture - /// object is released. So, the host should not refer handle or resource of - /// the ICoreWebView2StagingWebTexture after its release. - [propget] WebTexture([out, retval] ICoreWebView2StagingWebTexture** value); } From f01f5949032d7d4f1d4b481c1c358c6530571bdf Mon Sep 17 00:00:00 2001 From: sunggook Date: Sun, 5 Feb 2023 17:16:44 -0800 Subject: [PATCH 16/19] add propget Timestamp, etc --- specs/APIReview_TextureStream.md | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index f2c3082e5..e4b966fa6 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -312,6 +312,8 @@ HRESULT DrawTextureWithWICBitmap(ID3D11Texture2D* 2dTexture, UINT64 timestamp) { # API Details ``` +/// Kinds of errors that can be reported by the +/// `ICoreWebView2ExperimentalTextureStream ErrorReceived` event. [v1_enum] typedef enum COREWEBVIEW2_TEXTURE_STREAM_ERROR_KIND { /// CreateTexture/PresentTexture and so on should return failed HRESULT if @@ -353,7 +355,7 @@ interface ICoreWebView2StagingEnvironment : IUnknown { [out, retval ] ICoreWebView2StagingTextureStream** value); /// Get the graphics adapter LUID of the renderer. The host should use this /// LUID adapter when creating D3D device to use with CreateTextureStream(). - [propget] HRESULT RenderAdapterLUID([out, retval] LUID* value); + [propget] HRESULT RenderAdapterLUID([out, retval] UINT64* value); /// Listens for change of graphics adapter LUID of the browser. /// The host can get the updated LUID by RenderAdapterLUID. It is expected /// that the host updates texture's d3d Device with SetD3DDevice, @@ -479,7 +481,7 @@ interface ICoreWebView2StagingTextureStream : IUnknown { /// Do not close or otherwise change the provided `ICoreWebView2Texture` after /// calling this method. Doing so may result in the texture not being added to /// the texture stream and the `ErrorReceived` event may be raised. - HRESULT PresentTexture([in] ICoreWebView2StagingTexture* texture) + HRESULT PresentTexture([in] ICoreWebView2StagingTexture* texture); /// Stops this texture stream from streaming and moves it into the stopped state. /// When moving to the stopped state the `ICoreWebView2TextureStream Stopped` @@ -555,6 +557,8 @@ interface ICoreWebView2StagingTexture : IUnknown { /// Returns IUnknown type that could be query interface to IDXGIResource. /// The caller can write incoming texture to it. [propget] HRESULT Resource([out, retval] IUnknown** value); + /// Gets timestamp of presenting texture. + [propget] HRESULT Timestamp([out, retval] UINT64* value); /// Sets timestamp of presenting texture. /// `value` is video capture time with microseconds units. /// The value does not have to be exact captured time, but it should be @@ -563,7 +567,7 @@ interface ICoreWebView2StagingTexture : IUnknown { /// the current compositing video frame. It also will be exposed to the /// JS with `VideoFrame::timestamp`. /// (https://docs.w3cub.com/dom/videoframe/timestamp.html). - [propput] HRESULT Timestamp([out, retval] UINT64* value); + [propput] HRESULT Timestamp([in] UINT64 value); } /// This is the callback for new texture stream request. [uuid(62d09330-00a9-41bf-a9ae-55aaef8b3c44), object, pointer_default(unique)] @@ -603,8 +607,9 @@ interface ICoreWebView2StagingTextureStreamErrorReceivedEventArgs : IUnknown { /// The texture with which this error is associated. For the /// `COREWEBVIEW2_TEXTURE_STREAM_ERROR_NO_VIDEO_TRACK_STARTED` error kind, /// this property will be `nullptr`. - [propget] Texture([out, retval] ICoreWebView2StagingTexture** value); + [propget] HRESULT Texture([out, retval] ICoreWebView2StagingTexture** value); } +/// This is the callback for the browser process's display LUID change. [uuid(431721e0-0f18-4d7b-bd4d-e5b1522bb110), object, pointer_default(unique)] interface ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler : IUnknown { /// Called to provide the implementer with the event args for the @@ -613,14 +618,6 @@ interface ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler : IUnknown { [in] ICoreWebView2StagingTextureStream* sender, [in] IUnknown* args); } -[uuid(431721e0-0f18-4d7b-bd4d-e5b1522bb110), object, pointer_default(unique)] -interface ICoreWebView2StagingRenderAdapterLUIDChangedEventHandler : IUnknown { - /// Called to provide the implementer with the event args for the - /// corresponding event. - HRESULT Invoke( - [in] ICoreWebView2StagingEnvironment * sender, - [in] IUnknown* args); -} /// This is the callback for web texture. [uuid(9ea4228c-295a-11ed-a261-0242ac120002), object, pointer_default(unique)] interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventHandler : IUnknown { @@ -635,7 +632,7 @@ interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventHandler : IUnk [uuid(a4c2fa3a-295a-11ed-a261-0242ac120002), object, pointer_default(unique)] interface ICoreWebView2StagingTextureStreamWebTextureReceivedEventArgs : IUnknown { /// Return ICoreWebView2StagingWebTexture object. - [propget] WebTexture([out, retval] ICoreWebView2StagingWebTexture** value); + [propget] HRESULT WebTexture([out, retval] ICoreWebView2StagingWebTexture** value); } /// This is the callback for web texture stop. From 4327402b12b98bc29597327534ed61a4bf6ffad3 Mon Sep 17 00:00:00 2001 From: sunggook Date: Fri, 3 Mar 2023 16:49:39 -0800 Subject: [PATCH 17/19] fix typo --- specs/APIReview_TextureStream.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index e4b966fa6..0637fcaf9 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -125,7 +125,7 @@ HRESULT CreateTextureStream(ICoreWebView2StagingEnvironment* environment) // Media Foundation on Windows. StartMediaFoundationCapture(hWnd); return S_OK; - }).Get(), &start_token));ffer + }).Get(), &start_token)); // Listen to Stop request. The host end system provided video stream and // clean any operation resources. @@ -572,9 +572,9 @@ interface ICoreWebView2StagingTexture : IUnknown { /// This is the callback for new texture stream request. [uuid(62d09330-00a9-41bf-a9ae-55aaef8b3c44), object, pointer_default(unique)] interface ICoreWebView2StagingTextureStreamStartRequestedEventHandler : IUnknown { - //// Called to provide the implementer with the event args for the - //// corresponding event. There are no event args and the args - //// parameter will be null. + /// Called to provide the implementer with the event args for the + /// corresponding event. There are no event args and the args + /// parameter will be null. HRESULT Invoke( [in] ICoreWebView2StagingTextureStream* sender, [in] IUnknown* args); From 370337a3bb057b9d1ce047e0c687fc2987690b72 Mon Sep 17 00:00:00 2001 From: sunggook Date: Tue, 9 Jan 2024 14:03:58 -0800 Subject: [PATCH 18/19] add warp mode. --- specs/APIReview_TextureStream.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index 0637fcaf9..fc16b3cef 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -355,6 +355,8 @@ interface ICoreWebView2StagingEnvironment : IUnknown { [out, retval ] ICoreWebView2StagingTextureStream** value); /// Get the graphics adapter LUID of the renderer. The host should use this /// LUID adapter when creating D3D device to use with CreateTextureStream(). + /// If the machine does not have GPU, then creating D3D device. In that case, + /// the host can create WARP mode D3D device with default adapter. [propget] HRESULT RenderAdapterLUID([out, retval] UINT64* value); /// Listens for change of graphics adapter LUID of the browser. /// The host can get the updated LUID by RenderAdapterLUID. It is expected From fb9f997687671d8e70d74acedc3d159af4f99621 Mon Sep 17 00:00:00 2001 From: sunggook Date: Tue, 9 Jan 2024 14:26:23 -0800 Subject: [PATCH 19/19] fix typo --- specs/APIReview_TextureStream.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/APIReview_TextureStream.md b/specs/APIReview_TextureStream.md index fc16b3cef..756221bc4 100644 --- a/specs/APIReview_TextureStream.md +++ b/specs/APIReview_TextureStream.md @@ -355,8 +355,8 @@ interface ICoreWebView2StagingEnvironment : IUnknown { [out, retval ] ICoreWebView2StagingTextureStream** value); /// Get the graphics adapter LUID of the renderer. The host should use this /// LUID adapter when creating D3D device to use with CreateTextureStream(). - /// If the machine does not have GPU, then creating D3D device. In that case, - /// the host can create WARP mode D3D device with default adapter. + /// Creating D3D device would fail if the machine does not have GPU. In that case, + /// the host can create D3D device for WARP mode with default adapter. [propget] HRESULT RenderAdapterLUID([out, retval] UINT64* value); /// Listens for change of graphics adapter LUID of the browser. /// The host can get the updated LUID by RenderAdapterLUID. It is expected