Hololens2 上的直播播放
Livestream playback on Hololens2
我遇到了以下问题
我的任务如下,我需要在 3d 对象上播放流式视频(通过 UDP 协议的原始 h264 视频)。目前我正在使用 FFmpegInteropX 将 MediaSource 设置为 Windows object.Media.Playback.MediaPlayer。
Media Player 在帧服务器模式下工作,然后我订阅了 videoFrameAvailable 事件并将生成的帧传输到 Unity
问题是 Hololens2 (UWP) 的性能很低,如果我使用大于 720x720 的纹理尺寸,我无法获得足够的平滑度和低延迟。同时,如果我 运行 PC 上的应用程序,我可以流畅无延迟地播放高达 4096x4096 的所有内容。
也许有人对如何提高 Hololens2 的性能有一些想法?
private SoftwareBitmap frameServerDest = new SoftwareBitmap(BitmapPixelFormat.Rgba8, 1024,1024,BitmapAlphaMode.Premultiplied );
private UnityEngine.Texture2D tex = new UnityEngine.Texture2D(frameServerDest.PixelWidth, frameServerDest.PixelHeight, UnityEngine.TextureFormat.RGBA32, false);
private async void InitializeMediaPlayer(){
FFmpegInteropLogging.SetDefaultLogProvider();
FFmpegInteropConfig configuration = new FFmpegInteropConfig()
{
MaxVideoThreads = 8,
SkipErrors = uint.MaxValue,
DefaultBufferTime = TimeSpan.Zero,
FastSeek = true,
VideoDecoderMode = VideoDecoderMode.ForceFFmpegSoftwareDecoder,
};
configuration.FFmpegOptions.Add("tune", "zerolatency");
configuration.FFmpegOptions.Add("flags", "low_delay");
configuration.FFmpegOptions.Add("fflags", "discardcorrupt+shortest+sortdts+ignidx+nobuffer");
decoder = await FFmpegInteropMSS.CreateFromUriAsync("udp://127.0.0.1:9005",configuration)
var mediaStreamSource = decoder.GetMediaStreamSource();
mediaStreamSource.BufferTime = TimeSpan.FromSeconds(0);
Debug.WriteLine($"{decoder.CurrentVideoStream.CodecName} {decoder.CurrentVideoStream.DecoderEngine} {decoder.CurrentVideoStream.HardwareDecoderStatus} {decoder.CurrentVideoStream.PixelWidth} x {decoder.CurrentVideoStream.PixelHeight}");
var FrameServer = new Windows.Media.Playback.MediaPlayer() { IsVideoFrameServerEnabled = true };
FrameServer.Source = MediaSource.CreateFromMediaStreamSource(mediaStreamSource);
FrameServer.RealTimePlayback = true;
FrameServer.VideoFrameAvailable += MediaPlayer_VideoFrameAvailable;
FrameServer.Play();
}
//FrameAvailable:
private void MediaPlayer_VideoFrameAvailable(Windows.Media.Playback.MediaPlayer sender, object args)
{
CanvasDevice canvasDevice = CanvasDevice.GetSharedDevice();
using (CanvasBitmap canvasBitmap = CanvasBitmap.CreateFromSoftwareBitmap(canvasDevice, frameServerDest))
{
sender.CopyFrameToVideoSurface(canvasBitmap);
byte[] bytes = canvasBitmap.GetPixelBytes();
if (AppCallbacks.Instance.IsInitialized())
{
AppCallbacks.Instance.InvokeOnAppThread(() =>
{
tex.LoadRawTextureData(bytes);
tex.Apply();
Display.GetComponent<UnityEngine.UI.RawImage>().texture = tex;
}, false);
}
GC.Collect();
}
}
我的FFmpeg输出设置
ffmpeg -r 60 -f gdigrab -i desktop -f h264 -framerate 60 -vcodec libx264 -preset ultrafast -tune zerolatency -threads 8 -thread_type slice udp://127.0.0.1:9005
更新:
你好,我做了一些工作。
我做了什么:
- 我已经通过 usb-s - etnernet 建立了直接连接
- 我开始看向使用directx表面
我找到了以下方法来获取 Unity 使用的 d3d11 设备
为此,我不得不使用库 SharpDX 和类似的线程 https://forum.unity.com/threads/d3d11-texture2d-blitting-framerate.562552
但是还有一些问题我还不能解决:
1 FFmpeg 仅在 VideoDecoderMode = VideoDecoderMode.Automatic 或 VideoDecoderMode.ForceFFmpegSoftwareDecoder 模式下工作;
2 在事件处理程序(videoframe_available)中,垃圾收集器的负载仍然很大,显然这会导致性能问题。此外,性能仅在 Hololens 上受到影响。
在其他 VideoDecoderMode 中,流参数已确定,但 videoframe_available 事件永远不会触发。延迟几乎为零,但性能仍然不是很好
也许有解决 GarbageColletor 问题的想法?
private SoftwareBitmap frameServerDist = new SoftwareBitmap(BitmapPixelFormat.Rgba8, 780, 780,
BitmapAlphaMode.Premultiplied);
private FFmpegInteropMSS decoder;
private UnityEngine.GameObject Display;
private UnityEngine.Texture2D targetTexture;
private UnityEngine.GameObject MainCamera;
private SharpDX.Direct3D11.Device dstDevice;
private SharpDX.Direct3D11.DeviceContext dstContenxt;
private SharpDX.Direct3D11.Texture2D m_DstTexture;
private SharpDX.Direct3D11.Device srcDevice;
private SharpDX.Direct3D11.DeviceContext srcContext;
private static DataRectangle _rect;
private SharpDX.Direct3D11.Texture2DDescription Texture2DDescription = new SharpDX.Direct3D11.Texture2DDescription()
{
ArraySize = 1,
BindFlags = SharpDX.Direct3D11.BindFlags.ShaderResource,
Usage = SharpDX.Direct3D11.ResourceUsage.Immutable, //GPU Only
CpuAccessFlags = SharpDX.Direct3D11.CpuAccessFlags.None,
Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm,
MipLevels = 1,
OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.None,
SampleDescription = new SharpDX.DXGI.SampleDescription()
{
Count = 1,
Quality = 0
}
};
//This event occurs when UnityEngine Initialized
private void AppCallbacks_Initialized()
{
srcDevice = new SharpDX.Direct3D11.Device(SharpDX.Direct3D.DriverType.Hardware);
srcContext = srcDevice.ImmediateContext;
UnityEngine.WSA.Application.InvokeOnAppThread(() =>
{
Display = UnityEngine.GameObject.Find("Display");
targetTexture = null;
//Create texture for get Device and Device context
UnityEngine.Texture2D deviceTexture = new UnityEngine.Texture2D(frameServerDist.PixelWidth, frameServerDist.PixelHeight, UnityEngine.TextureFormat.RGBA32, false);
IntPtr txPtr = deviceTexture.GetNativeTexturePtr();
SharpDX.Direct3D11.Texture2D dstTextureX = new SharpDX.Direct3D11.Texture2D(txPtr);
dstDevice = dstTextureX.Device;
dstContenxt = dstDevice.ImmediateContext;
//Create sharedResource
SharpDX.Direct3D11.Texture2DDescription sharedTextureDesc = dstTextureX.Description;
sharedTextureDesc.OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.Shared;
m_DstTexture = new SharpDX.Direct3D11.Texture2D(dstDevice, sharedTextureDesc);
SharpDX.Direct3D11.ShaderResourceViewDescription rvdesc = new SharpDX.Direct3D11.ShaderResourceViewDescription
{
Format = sharedTextureDesc.Format,
Dimension = SharpDX.Direct3D.ShaderResourceViewDimension.Texture2D
};
rvdesc.Texture2D.MostDetailedMip = 0; rvdesc.Texture2D.MipLevels = 1;
SharpDX.Direct3D11.ShaderResourceView rvptr = new SharpDX.Direct3D11.ShaderResourceView(
dstDevice,
m_DstTexture, rvdesc);
targetTexture = UnityEngine.Texture2D.CreateExternalTexture(sharedTextureDesc.Width, sharedTextureDesc.Height, UnityEngine.TextureFormat.BGRA32, false, false, rvptr.NativePointer);
MainCamera = UnityEngine.GameObject.Find("Main Camera");
Display.GetComponent<UnityEngine.UI.RawImage>().texture = targetTexture;
InitializeMediaPlayer();
}, false);
private void MediaPlayer_VideoFrameAvailable(Windows.Media.Playback.MediaPlayer sender, object args)
{
canvasBitmap = CanvasBitmap.CreateFromSoftwareBitmap(canvasDevice, frameServerDist);
sender.CopyFrameToVideoSurface(canvasBitmap);
var sharedResourceDst = m_DstTexture.QueryInterface<SharpDX.DXGI.Resource>();
var sharedTexDst = srcDevice.OpenSharedResource<SharpDX.Direct3D11.Texture2D>(sharedResourceDst.SharedHandle);
using (var _stream = DataStream.Create(canvasBitmap.GetPixelBytes(), true, false))
{
_rect.DataPointer = _stream.DataPointer;
_rect.Pitch = Texture2DDescription.Width * 4;
var srcTexture = new SharpDX.Direct3D11.Texture2D(srcDevice, Texture2DDescription, _rect);
srcContext.CopyResource(srcTexture, sharedTexDst);
srcContext.Flush();
sharedResourceDst.Dispose();
sharedTexDst.Dispose();
srcTexture.Dispose();
}
}
问题是从 CPU 复制到 GPU,SharpDX 库允许将帧直接复制到 Idirect3dsurface。我附上代码,也许它会有用。
Microsoft 文档中提供了 Direct3D11 助手
https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/screen-capture-video#helper-wrapper-classes
private UnityEngine.GameObject MainCamera;
private UnityEngine.Texture2D targetTexture;
private IDirect3DSurface surface;
private SharpDX.Direct3D11.Device dstDevice;
private void AppCallbacks_Initialized()
{
SharpDX.Direct3D11.Device srcDevice = new SharpDX.Direct3D11.Device(SharpDX.Direct3D.DriverType.Hardware);
UnityEngine.WSA.Application.InvokeOnAppThread(() =>
{
Display = UnityEngine.GameObject.Find("Display");
targetTexture = null;
//Create texture for get Device and Device context from Unity
UnityEngine.Texture2D deviceTexture = new UnityEngine.Texture2D(2048, 2048, UnityEngine.TextureFormat.RGBA32, false);
IntPtr txPtr = deviceTexture.GetNativeTexturePtr();
SharpDX.Direct3D11.Texture2D dstTexture = new SharpDX.Direct3D11.Texture2D(txPtr);
dstDevice = dstTexture.Device;
//Create sharedResource
SharpDX.Direct3D11.Texture2DDescription sharedTextureDesc = dstTexture.Description;
sharedTextureDesc.OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.Shared;
SharpDX.Direct3D11.Texture2D m_DstTexture = new SharpDX.Direct3D11.Texture2D(dstDevice, sharedTextureDesc);
SharpDX.Direct3D11.ShaderResourceViewDescription rvdesc = new SharpDX.Direct3D11.ShaderResourceViewDescription
{
Format = sharedTextureDesc.Format,
Dimension = SharpDX.Direct3D.ShaderResourceViewDimension.Texture2D
};
rvdesc.Texture2D.MostDetailedMip = 0;
rvdesc.Texture2D.MipLevels = 1;
SharpDX.Direct3D11.ShaderResourceView rvptr = new SharpDX.Direct3D11.ShaderResourceView(
dstDevice,
m_DstTexture, rvdesc);
targetTexture = UnityEngine.Texture2D.CreateExternalTexture(sharedTextureDesc.Width, sharedTextureDesc.Height, UnityEngine.TextureFormat.BGRA32, false, false, rvptr.NativePointer);
MainCamera = UnityEngine.GameObject.Find("Main Camera");
Display.GetComponent<UnityEngine.UI.RawImage>().texture = targetTexture;
var sharedResourceDst = m_DstTexture.QueryInterface<SharpDX.DXGI.Resource>();
var sharedTexDst = srcDevice.OpenSharedResource<SharpDX.Direct3D11.Texture2D>(sharedResourceDst.SharedHandle);
surface = Direct3D11Helper.CreateDirect3DSurfaceFromSharpDXTexture(sharedTexDst);
sharedResourceDst.Dispose();
sharedTexDst.Dispose();
dstTexture.Dispose();
m_DstTexture.Dispose();
}, false);
InitializeMediaPlayer();
}
private void MediaPlayer_VideoFrameAvailable(Windows.Media.Playback.MediaPlayer sender, object args)
{
Debug.WriteLine("frameAvail");
sender.CopyFrameToVideoSurface(surface);
}
我遇到了以下问题 我的任务如下,我需要在 3d 对象上播放流式视频(通过 UDP 协议的原始 h264 视频)。目前我正在使用 FFmpegInteropX 将 MediaSource 设置为 Windows object.Media.Playback.MediaPlayer。 Media Player 在帧服务器模式下工作,然后我订阅了 videoFrameAvailable 事件并将生成的帧传输到 Unity
问题是 Hololens2 (UWP) 的性能很低,如果我使用大于 720x720 的纹理尺寸,我无法获得足够的平滑度和低延迟。同时,如果我 运行 PC 上的应用程序,我可以流畅无延迟地播放高达 4096x4096 的所有内容。 也许有人对如何提高 Hololens2 的性能有一些想法?
private SoftwareBitmap frameServerDest = new SoftwareBitmap(BitmapPixelFormat.Rgba8, 1024,1024,BitmapAlphaMode.Premultiplied );
private UnityEngine.Texture2D tex = new UnityEngine.Texture2D(frameServerDest.PixelWidth, frameServerDest.PixelHeight, UnityEngine.TextureFormat.RGBA32, false);
private async void InitializeMediaPlayer(){
FFmpegInteropLogging.SetDefaultLogProvider();
FFmpegInteropConfig configuration = new FFmpegInteropConfig()
{
MaxVideoThreads = 8,
SkipErrors = uint.MaxValue,
DefaultBufferTime = TimeSpan.Zero,
FastSeek = true,
VideoDecoderMode = VideoDecoderMode.ForceFFmpegSoftwareDecoder,
};
configuration.FFmpegOptions.Add("tune", "zerolatency");
configuration.FFmpegOptions.Add("flags", "low_delay");
configuration.FFmpegOptions.Add("fflags", "discardcorrupt+shortest+sortdts+ignidx+nobuffer");
decoder = await FFmpegInteropMSS.CreateFromUriAsync("udp://127.0.0.1:9005",configuration)
var mediaStreamSource = decoder.GetMediaStreamSource();
mediaStreamSource.BufferTime = TimeSpan.FromSeconds(0);
Debug.WriteLine($"{decoder.CurrentVideoStream.CodecName} {decoder.CurrentVideoStream.DecoderEngine} {decoder.CurrentVideoStream.HardwareDecoderStatus} {decoder.CurrentVideoStream.PixelWidth} x {decoder.CurrentVideoStream.PixelHeight}");
var FrameServer = new Windows.Media.Playback.MediaPlayer() { IsVideoFrameServerEnabled = true };
FrameServer.Source = MediaSource.CreateFromMediaStreamSource(mediaStreamSource);
FrameServer.RealTimePlayback = true;
FrameServer.VideoFrameAvailable += MediaPlayer_VideoFrameAvailable;
FrameServer.Play();
}
//FrameAvailable:
private void MediaPlayer_VideoFrameAvailable(Windows.Media.Playback.MediaPlayer sender, object args)
{
CanvasDevice canvasDevice = CanvasDevice.GetSharedDevice();
using (CanvasBitmap canvasBitmap = CanvasBitmap.CreateFromSoftwareBitmap(canvasDevice, frameServerDest))
{
sender.CopyFrameToVideoSurface(canvasBitmap);
byte[] bytes = canvasBitmap.GetPixelBytes();
if (AppCallbacks.Instance.IsInitialized())
{
AppCallbacks.Instance.InvokeOnAppThread(() =>
{
tex.LoadRawTextureData(bytes);
tex.Apply();
Display.GetComponent<UnityEngine.UI.RawImage>().texture = tex;
}, false);
}
GC.Collect();
}
}
我的FFmpeg输出设置
ffmpeg -r 60 -f gdigrab -i desktop -f h264 -framerate 60 -vcodec libx264 -preset ultrafast -tune zerolatency -threads 8 -thread_type slice udp://127.0.0.1:9005
更新: 你好,我做了一些工作。 我做了什么:
- 我已经通过 usb-s - etnernet 建立了直接连接
- 我开始看向使用directx表面
我找到了以下方法来获取 Unity 使用的 d3d11 设备 为此,我不得不使用库 SharpDX 和类似的线程 https://forum.unity.com/threads/d3d11-texture2d-blitting-framerate.562552
但是还有一些问题我还不能解决: 1 FFmpeg 仅在 VideoDecoderMode = VideoDecoderMode.Automatic 或 VideoDecoderMode.ForceFFmpegSoftwareDecoder 模式下工作; 2 在事件处理程序(videoframe_available)中,垃圾收集器的负载仍然很大,显然这会导致性能问题。此外,性能仅在 Hololens 上受到影响。 在其他 VideoDecoderMode 中,流参数已确定,但 videoframe_available 事件永远不会触发。延迟几乎为零,但性能仍然不是很好
也许有解决 GarbageColletor 问题的想法?
private SoftwareBitmap frameServerDist = new SoftwareBitmap(BitmapPixelFormat.Rgba8, 780, 780,
BitmapAlphaMode.Premultiplied);
private FFmpegInteropMSS decoder;
private UnityEngine.GameObject Display;
private UnityEngine.Texture2D targetTexture;
private UnityEngine.GameObject MainCamera;
private SharpDX.Direct3D11.Device dstDevice;
private SharpDX.Direct3D11.DeviceContext dstContenxt;
private SharpDX.Direct3D11.Texture2D m_DstTexture;
private SharpDX.Direct3D11.Device srcDevice;
private SharpDX.Direct3D11.DeviceContext srcContext;
private static DataRectangle _rect;
private SharpDX.Direct3D11.Texture2DDescription Texture2DDescription = new SharpDX.Direct3D11.Texture2DDescription()
{
ArraySize = 1,
BindFlags = SharpDX.Direct3D11.BindFlags.ShaderResource,
Usage = SharpDX.Direct3D11.ResourceUsage.Immutable, //GPU Only
CpuAccessFlags = SharpDX.Direct3D11.CpuAccessFlags.None,
Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm,
MipLevels = 1,
OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.None,
SampleDescription = new SharpDX.DXGI.SampleDescription()
{
Count = 1,
Quality = 0
}
};
//This event occurs when UnityEngine Initialized
private void AppCallbacks_Initialized()
{
srcDevice = new SharpDX.Direct3D11.Device(SharpDX.Direct3D.DriverType.Hardware);
srcContext = srcDevice.ImmediateContext;
UnityEngine.WSA.Application.InvokeOnAppThread(() =>
{
Display = UnityEngine.GameObject.Find("Display");
targetTexture = null;
//Create texture for get Device and Device context
UnityEngine.Texture2D deviceTexture = new UnityEngine.Texture2D(frameServerDist.PixelWidth, frameServerDist.PixelHeight, UnityEngine.TextureFormat.RGBA32, false);
IntPtr txPtr = deviceTexture.GetNativeTexturePtr();
SharpDX.Direct3D11.Texture2D dstTextureX = new SharpDX.Direct3D11.Texture2D(txPtr);
dstDevice = dstTextureX.Device;
dstContenxt = dstDevice.ImmediateContext;
//Create sharedResource
SharpDX.Direct3D11.Texture2DDescription sharedTextureDesc = dstTextureX.Description;
sharedTextureDesc.OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.Shared;
m_DstTexture = new SharpDX.Direct3D11.Texture2D(dstDevice, sharedTextureDesc);
SharpDX.Direct3D11.ShaderResourceViewDescription rvdesc = new SharpDX.Direct3D11.ShaderResourceViewDescription
{
Format = sharedTextureDesc.Format,
Dimension = SharpDX.Direct3D.ShaderResourceViewDimension.Texture2D
};
rvdesc.Texture2D.MostDetailedMip = 0; rvdesc.Texture2D.MipLevels = 1;
SharpDX.Direct3D11.ShaderResourceView rvptr = new SharpDX.Direct3D11.ShaderResourceView(
dstDevice,
m_DstTexture, rvdesc);
targetTexture = UnityEngine.Texture2D.CreateExternalTexture(sharedTextureDesc.Width, sharedTextureDesc.Height, UnityEngine.TextureFormat.BGRA32, false, false, rvptr.NativePointer);
MainCamera = UnityEngine.GameObject.Find("Main Camera");
Display.GetComponent<UnityEngine.UI.RawImage>().texture = targetTexture;
InitializeMediaPlayer();
}, false);
private void MediaPlayer_VideoFrameAvailable(Windows.Media.Playback.MediaPlayer sender, object args)
{
canvasBitmap = CanvasBitmap.CreateFromSoftwareBitmap(canvasDevice, frameServerDist);
sender.CopyFrameToVideoSurface(canvasBitmap);
var sharedResourceDst = m_DstTexture.QueryInterface<SharpDX.DXGI.Resource>();
var sharedTexDst = srcDevice.OpenSharedResource<SharpDX.Direct3D11.Texture2D>(sharedResourceDst.SharedHandle);
using (var _stream = DataStream.Create(canvasBitmap.GetPixelBytes(), true, false))
{
_rect.DataPointer = _stream.DataPointer;
_rect.Pitch = Texture2DDescription.Width * 4;
var srcTexture = new SharpDX.Direct3D11.Texture2D(srcDevice, Texture2DDescription, _rect);
srcContext.CopyResource(srcTexture, sharedTexDst);
srcContext.Flush();
sharedResourceDst.Dispose();
sharedTexDst.Dispose();
srcTexture.Dispose();
}
}
问题是从 CPU 复制到 GPU,SharpDX 库允许将帧直接复制到 Idirect3dsurface。我附上代码,也许它会有用。 Microsoft 文档中提供了 Direct3D11 助手 https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/screen-capture-video#helper-wrapper-classes
private UnityEngine.GameObject MainCamera;
private UnityEngine.Texture2D targetTexture;
private IDirect3DSurface surface;
private SharpDX.Direct3D11.Device dstDevice;
private void AppCallbacks_Initialized()
{
SharpDX.Direct3D11.Device srcDevice = new SharpDX.Direct3D11.Device(SharpDX.Direct3D.DriverType.Hardware);
UnityEngine.WSA.Application.InvokeOnAppThread(() =>
{
Display = UnityEngine.GameObject.Find("Display");
targetTexture = null;
//Create texture for get Device and Device context from Unity
UnityEngine.Texture2D deviceTexture = new UnityEngine.Texture2D(2048, 2048, UnityEngine.TextureFormat.RGBA32, false);
IntPtr txPtr = deviceTexture.GetNativeTexturePtr();
SharpDX.Direct3D11.Texture2D dstTexture = new SharpDX.Direct3D11.Texture2D(txPtr);
dstDevice = dstTexture.Device;
//Create sharedResource
SharpDX.Direct3D11.Texture2DDescription sharedTextureDesc = dstTexture.Description;
sharedTextureDesc.OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.Shared;
SharpDX.Direct3D11.Texture2D m_DstTexture = new SharpDX.Direct3D11.Texture2D(dstDevice, sharedTextureDesc);
SharpDX.Direct3D11.ShaderResourceViewDescription rvdesc = new SharpDX.Direct3D11.ShaderResourceViewDescription
{
Format = sharedTextureDesc.Format,
Dimension = SharpDX.Direct3D.ShaderResourceViewDimension.Texture2D
};
rvdesc.Texture2D.MostDetailedMip = 0;
rvdesc.Texture2D.MipLevels = 1;
SharpDX.Direct3D11.ShaderResourceView rvptr = new SharpDX.Direct3D11.ShaderResourceView(
dstDevice,
m_DstTexture, rvdesc);
targetTexture = UnityEngine.Texture2D.CreateExternalTexture(sharedTextureDesc.Width, sharedTextureDesc.Height, UnityEngine.TextureFormat.BGRA32, false, false, rvptr.NativePointer);
MainCamera = UnityEngine.GameObject.Find("Main Camera");
Display.GetComponent<UnityEngine.UI.RawImage>().texture = targetTexture;
var sharedResourceDst = m_DstTexture.QueryInterface<SharpDX.DXGI.Resource>();
var sharedTexDst = srcDevice.OpenSharedResource<SharpDX.Direct3D11.Texture2D>(sharedResourceDst.SharedHandle);
surface = Direct3D11Helper.CreateDirect3DSurfaceFromSharpDXTexture(sharedTexDst);
sharedResourceDst.Dispose();
sharedTexDst.Dispose();
dstTexture.Dispose();
m_DstTexture.Dispose();
}, false);
InitializeMediaPlayer();
}
private void MediaPlayer_VideoFrameAvailable(Windows.Media.Playback.MediaPlayer sender, object args)
{
Debug.WriteLine("frameAvail");
sender.CopyFrameToVideoSurface(surface);
}