DirectShow (C#)、Preview FPS 和 CPU 使用率随时间下降
DirectShow (C#), Preview FPS and CPU usage are dropping with time
我正在构建一个 winforms 应用程序来学习 DirectShow。除了 CPU 使用率和 FPS 随时间下降的奇怪问题外,我一切正常。
使用的 PC 规格较低。所以,一开始我得到了 40% CPU 的使用率,这很棒。然后在 30 分钟左右的时间内,它会低至 4%。当然,FPS 也下降到几 FPS。
我有 运行 并尝试了互联网上的其他示例应用程序,它们都以大致相似的方式运行。他们也都使用 DirectShowLib。所以基础和我的app一样。
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Runtime.InteropServices.ComTypes;
using System.Windows.Forms;
using DirectShowLib;
namespace WebCamPerfTestProject
{
public partial class Cftp_Form1 : Form
{
public enum PlayState : int
{
Stopped,
Paused,
Running,
Init
}
private PlayState CurrentState = PlayState.Stopped;
private int WM_GRAPHNOTIFY = Convert.ToInt32("0X8000", 16) + 1;
private IVideoWindow videoWindow = null;
private IMediaControl mediaControl = null;
private IMediaEventEx mediaEventEx = null;
private IGraphBuilder graph = null;
private ICaptureGraphBuilder2 pGraphBuilder = null;
private IBaseFilter pUSB = null;
private IAMStreamConfig streamConfig = null;
private VideoInfoHeader format = null;
private AMMediaType pmt = null;
private decimal ratio = Decimal.Divide(1280, 720); //temp values
private decimal height;
public Cftp_Form1()
{
InitializeComponent();
}
private void Form1_Load(object sender, EventArgs e)
{
Resize += new EventHandler(WebCamControl_Resize);
Console.WriteLine("Building graph...");
GetInterfaces();
Console.WriteLine("Start capture...");
CaptureVideo(graph);
}
private void GetInterfaces()
{
graph = (IGraphBuilder)(new FilterGraph());
pGraphBuilder = (ICaptureGraphBuilder2)(new CaptureGraphBuilder2());
mediaControl = (IMediaControl)graph;
videoWindow = (IVideoWindow)graph;
mediaEventEx = (IMediaEventEx)graph;
// send notification messages to the control window
int hr = mediaEventEx.SetNotifyWindow(Handle, WM_GRAPHNOTIFY, IntPtr.Zero);
DsError.ThrowExceptionForHR(hr);
}
private void CaptureVideo(IGraphBuilder pGraph)
{
int hr = 0;
hr = pGraphBuilder.SetFiltergraph(pGraph);
DsError.ThrowExceptionForHR(hr);
pUSB = FindCaptureDevice();
hr = pGraph.AddFilter(pUSB, "WebCamControl Video");
DsError.ThrowExceptionForHR(hr);
//add smartTee
IBaseFilter pSmartTee = (IBaseFilter)new SmartTee();
hr = pGraph.AddFilter(pSmartTee, "Smart Tee");
DsError.ThrowExceptionForHR(hr);
//connect smart tee to camera
hr = pGraphBuilder.RenderStream(null, MediaType.Video, pUSB, null, pSmartTee);
DsError.ThrowExceptionForHR(hr);
pmt = new AMMediaType();
pmt.majorType = MediaType.Video;
pmt.subType = MediaSubType.MJPG;
pmt.formatType = FormatType.VideoInfo;
pmt.fixedSizeSamples = false; //true for 640x480
pmt.formatSize = 88;
pmt.sampleSize = 2764800; //2764800 614400
pmt.temporalCompression = false;
//////////////////////////////////
format = new VideoInfoHeader();
format.SrcRect = new DsRect();
format.TargetRect = new DsRect();
format.BitRate = 5000000;
format.AvgTimePerFrame = 666666;
//////////////////////////////////
format.BmiHeader = new BitmapInfoHeader();
format.BmiHeader.Size = 40;
format.BmiHeader.Width = 1280;
format.BmiHeader.Height = 720;
format.BmiHeader.Planes = 1;
format.BmiHeader.BitCount = 24;
format.BmiHeader.Compression = 1196444237; //1196444237 //844715353
format.BmiHeader.ImageSize = 2764800; //2764800 614400
pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(format));
Marshal.StructureToPtr(format, pmt.formatPtr, false);
Debug.WriteLine(getCatName(pUSB) + " at line 130");
streamConfig = (IAMStreamConfig)GetPin(pUSB, getCatName(pUSB));
hr = streamConfig.SetFormat(pmt);
DsUtils.FreeAMMediaType(pmt);
if (hr < 0)
{
CapTest.CustomMessage.ShowMessage("Can`t set format");
DsError.ThrowExceptionForHR(hr);
}
//add MJPEG Decompressor
IBaseFilter pMJPEGDecompressor = (IBaseFilter)new MjpegDec();
hr = pGraph.AddFilter(pMJPEGDecompressor, "MJPEG Decompressor");
if (hr < 0)
{
CapTest.CustomMessage.ShowMessage("Can`t add MJPEG Decompressor");
DsError.ThrowExceptionForHR(hr);
}
//add SampleGrabber
IBaseFilter pSampleGrabber = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(CLSID_SampleGrabber));
hr = pGraph.AddFilter(pSampleGrabber, "SampleGrabber");
checkHR(hr, "Can't add SampleGrabber to graph");
//connect Smart Tee and SampleGrabber
hr = pGraph.ConnectDirect(GetPin(pSmartTee, "Preview"), GetPin(pSampleGrabber, "Input"), null);
checkHR(hr, "Can't connect Smart Tee and SampleGrabber");
//connect smart tee to camera
hr = pGraphBuilder.RenderStream(null, MediaType.Video, pSampleGrabber, null, pMJPEGDecompressor);
//add Color Space Converter
IBaseFilter pColorSpaceConverter = (IBaseFilter)new Colour();
hr = pGraph.AddFilter(pColorSpaceConverter, "Color Space Converter");
if (hr < 0)
{
CapTest.CustomMessage.ShowMessage("Can't add Color Space Converter to graph");
DsError.ThrowExceptionForHR(hr);
}
hr = pGraphBuilder.RenderStream(null, MediaType.Video, pMJPEGDecompressor, null, pColorSpaceConverter);
DsError.ThrowExceptionForHR(hr);
IBaseFilter videoRender = (IBaseFilter)new VideoRenderer();
hr = pGraph.AddFilter(videoRender, "Video Render");
DsError.ThrowExceptionForHR(hr);
hr = pGraphBuilder.RenderStream(null, MediaType.Video, pColorSpaceConverter, null, videoRender);
DsError.ThrowExceptionForHR(hr);
Debug.WriteLine(DsError.GetErrorText(hr) + " is error in rendering");
Marshal.ReleaseComObject(pUSB);
SetupVideoWindow();
}
//Access the camera
private IBaseFilter FindCaptureDevice()
{
IEnumMoniker classEnum = null;
IMoniker[] moniker = new IMoniker[1];
object source = null;
ICreateDevEnum devEnum = (ICreateDevEnum)(new CreateDevEnum());
int hr = devEnum.CreateClassEnumerator(FilterCategory.VideoInputDevice, out classEnum, CDef.None);
DsError.ThrowExceptionForHR(hr);
Marshal.ReleaseComObject(devEnum);
if (classEnum == null)
{
throw new ApplicationException("No video capture device was detected.\r\n\r\n" + "This sample requires a video capture device, such as a USB WebCam,\r\nto be installed and working properly. The sample will now close.");
}
IntPtr none = IntPtr.Zero;
if (classEnum.Next(moniker.Length, moniker, none) == 0)
{
Guid iid = typeof(IBaseFilter).GUID;
moniker[0].BindToObject(null, null, ref iid, out source);
}
else
{
throw new ApplicationException("Unable to access video capture device!");
}
Marshal.ReleaseComObject(moniker[0]);
Marshal.ReleaseComObject(classEnum);
return (IBaseFilter)source;
}
static IPin GetPin(IBaseFilter filter, string pinname)
{
IEnumPins epins;
int hr = filter.EnumPins(out epins);
if (hr < 0)
{
CapTest.CustomMessage.ShowMessage("Cant enumerate pins");
DsError.ThrowExceptionForHR(hr);
}
IntPtr fetched = Marshal.AllocCoTaskMem(4);
IPin[] pins = new IPin[1];
while (epins.Next(1, pins, fetched) == 0)
{
PinInfo pinfo;
pins[0].QueryPinInfo(out pinfo);
bool found = (pinfo.name == "Capture");
CapTest.CustomMessage.ShowMessage(pinfo.name + " is PIN NAME");
DsUtils.FreePinInfo(pinfo);
if (found)
return pins[0];
}
CapTest.CustomMessage.ShowMessage("Pin not found");
DsError.ThrowExceptionForHR(hr);
return null;
}
string getCatName(IBaseFilter filter)
{
string retval = "";
IEnumPins epins;
int hr = filter.EnumPins(out epins);
if (hr < 0)
{
CapTest.CustomMessage.ShowMessage("Cant enumerate pins");
DsError.ThrowExceptionForHR(hr);
}
IntPtr fetched = Marshal.AllocCoTaskMem(4);
IPin[] pins = new IPin[1];
while (epins.Next(1, pins, fetched) == 0)
{
PinInfo pinfo;
pins[0].QueryPinInfo(out pinfo);
bool found = (pinfo.name == "Capture");
CapTest.CustomMessage.ShowMessage(pinfo.name + " is pinname on getCatName");
DsUtils.FreePinInfo(pinfo);
if (found)
retval = pinfo.name;
}
CapTest.CustomMessage.ShowMessage("Pin found " + retval);
return retval;
}
private void SetupVideoWindow()
{
int hr = 0;
//set the video window to be a child of the main window
//putowner : Sets the owning parent window for the video playback window.
hr = videoWindow.put_Owner(pictureBox1.Handle);
DsError.ThrowExceptionForHR(hr);
hr = videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren);
DsError.ThrowExceptionForHR(hr);
//Use helper function to position video window in client rect of main application window
WebCamControl_Resize(this, null);
//Make the video window visible, now that it is properly positioned
//put_visible : This method changes the visibility of the video window.
hr = videoWindow.put_Visible(OABool.True);
DsError.ThrowExceptionForHR(hr);
hr = mediaControl.Run();
DsError.ThrowExceptionForHR(hr);
HandleGraphEvent();
CurrentState = PlayState.Running;
Cftp_Form1.ActiveForm.WindowState = FormWindowState.Maximized;
}
private void HandleGraphEvent()
{
int hr = 0;
EventCode evCode = 0;
IntPtr evParam1 = IntPtr.Zero;
IntPtr evParam2 = IntPtr.Zero;
while (mediaEventEx != null && mediaEventEx.GetEvent(out evCode, out evParam1, out evParam2, 0) == 0)
{
// Free event parameters to prevent memory leaks associated with
// event parameter data. While this application is not interested
// in the received events, applications should always process them.
hr = mediaEventEx.FreeEventParams(evCode, evParam1, evParam2);
DsError.ThrowExceptionForHR(hr);
Console.WriteLine(evCode + " " + evParam1 + " " + evParam2);
// Insert event processing code here, if desired (see http://msdn2.microsoft.com/en-us/library/ms783649.aspx)
}
}
private void ReleaseInterfaces()
{
if (mediaControl != null)
mediaControl.StopWhenReady();
CurrentState = PlayState.Stopped;
// stop notifications of events
if (mediaEventEx != null)
mediaEventEx.SetNotifyWindow(IntPtr.Zero, WM_GRAPHNOTIFY, IntPtr.Zero);
//// below we relinquish ownership (IMPORTANT!) of the video window.
//// Failing to call put_Owner can lead to assert failures within
//// the video renderer, as it still assumes that it has a valid
//// parent window.
if (videoWindow != null)
{
videoWindow.put_Visible(OABool.False);
videoWindow.put_Owner(IntPtr.Zero);
}
// Release DirectShow interfaces
Marshal.ReleaseComObject(mediaControl);
mediaControl = null;
Marshal.ReleaseComObject(mediaEventEx);
mediaEventEx = null;
Marshal.ReleaseComObject(videoWindow);
videoWindow = null;
Marshal.ReleaseComObject(graph);
graph = null;
Marshal.ReleaseComObject(pGraphBuilder);
pGraphBuilder = null;
}
private void WebCamControl_Resize(object sender, System.EventArgs e)
{
//Resize the video preview window to match owner window size
//Calculate the ratio like 16/9
// 1280/720=1.77777778
if (videoWindow != null)
height = Convert.ToDecimal(pictureBox1.Width) / ratio;
int iheight = (int)height;
videoWindow.SetWindowPosition(0, 0, pictureBox1.Width, iheight);
//Debug.WriteLine(pictureBox1.Width +" "+ iheight + " ratio:" + ratio);
mediaControl.Run();
}
private void Cftp_Form1_FormClosing(object sender, FormClosingEventArgs e)
{
ReleaseInterfaces();
}
}
}
有什么方法可以保留进程 "alive"?
也许通过在特定时间间隔请求帧?
如果可能的话,我怎样才能 "request" 获得更多帧数?
此处的第一个评论是了解如何可视化您的 DirectShow 过滤器图并了解有效构建的内容。无论您是在 C++ 中还是在 C# 中通过 .NET 接口库进行开发,您都经常通过显式添加管道的一部分来构建,然后再为您添加一些东西。你不知道你最终会得到什么,所以你需要……
…Understand Your DirectShow Filter Graph
也就是说,让您的应用程序构建管道,然后使用开发人员的工具对其进行检查。当您提出这样的问题时,这也有助于分享信息。
在您的情况下,您的图形构建效果不佳:您首先自己创建一个 Null Renderer,然后请求 IVideoWindow
并隐式连接到自动为您提供的 Video Renderer。这意味着您不了解管道,您的问题和问题部分来自这种误解。
下一步是将实时视频源连接到同步渲染器,一般来说,同步渲染器设计用于提前显示带有预缓冲帧的视频。我的猜测是帧时间会逐渐减少,因此视频渲染器会阻止流式传输并逐渐增加等待时间,从而导致更多的等待和更少的捕获。我将跳过它为什么以这种方式发生的细节,但重要的是要知道您有兴趣将 Smart Tee Filter 插入下游到相机和上游到渲染器,将预览输出连接到视觉呈现。 MSDN 文档详细说明了为什么要这样做以及目的。
我正在构建一个 winforms 应用程序来学习 DirectShow。除了 CPU 使用率和 FPS 随时间下降的奇怪问题外,我一切正常。 使用的 PC 规格较低。所以,一开始我得到了 40% CPU 的使用率,这很棒。然后在 30 分钟左右的时间内,它会低至 4%。当然,FPS 也下降到几 FPS。
我有 运行 并尝试了互联网上的其他示例应用程序,它们都以大致相似的方式运行。他们也都使用 DirectShowLib。所以基础和我的app一样。
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Runtime.InteropServices.ComTypes;
using System.Windows.Forms;
using DirectShowLib;
namespace WebCamPerfTestProject
{
public partial class Cftp_Form1 : Form
{
public enum PlayState : int
{
Stopped,
Paused,
Running,
Init
}
private PlayState CurrentState = PlayState.Stopped;
private int WM_GRAPHNOTIFY = Convert.ToInt32("0X8000", 16) + 1;
private IVideoWindow videoWindow = null;
private IMediaControl mediaControl = null;
private IMediaEventEx mediaEventEx = null;
private IGraphBuilder graph = null;
private ICaptureGraphBuilder2 pGraphBuilder = null;
private IBaseFilter pUSB = null;
private IAMStreamConfig streamConfig = null;
private VideoInfoHeader format = null;
private AMMediaType pmt = null;
private decimal ratio = Decimal.Divide(1280, 720); //temp values
private decimal height;
public Cftp_Form1()
{
InitializeComponent();
}
private void Form1_Load(object sender, EventArgs e)
{
Resize += new EventHandler(WebCamControl_Resize);
Console.WriteLine("Building graph...");
GetInterfaces();
Console.WriteLine("Start capture...");
CaptureVideo(graph);
}
private void GetInterfaces()
{
graph = (IGraphBuilder)(new FilterGraph());
pGraphBuilder = (ICaptureGraphBuilder2)(new CaptureGraphBuilder2());
mediaControl = (IMediaControl)graph;
videoWindow = (IVideoWindow)graph;
mediaEventEx = (IMediaEventEx)graph;
// send notification messages to the control window
int hr = mediaEventEx.SetNotifyWindow(Handle, WM_GRAPHNOTIFY, IntPtr.Zero);
DsError.ThrowExceptionForHR(hr);
}
private void CaptureVideo(IGraphBuilder pGraph)
{
int hr = 0;
hr = pGraphBuilder.SetFiltergraph(pGraph);
DsError.ThrowExceptionForHR(hr);
pUSB = FindCaptureDevice();
hr = pGraph.AddFilter(pUSB, "WebCamControl Video");
DsError.ThrowExceptionForHR(hr);
//add smartTee
IBaseFilter pSmartTee = (IBaseFilter)new SmartTee();
hr = pGraph.AddFilter(pSmartTee, "Smart Tee");
DsError.ThrowExceptionForHR(hr);
//connect smart tee to camera
hr = pGraphBuilder.RenderStream(null, MediaType.Video, pUSB, null, pSmartTee);
DsError.ThrowExceptionForHR(hr);
pmt = new AMMediaType();
pmt.majorType = MediaType.Video;
pmt.subType = MediaSubType.MJPG;
pmt.formatType = FormatType.VideoInfo;
pmt.fixedSizeSamples = false; //true for 640x480
pmt.formatSize = 88;
pmt.sampleSize = 2764800; //2764800 614400
pmt.temporalCompression = false;
//////////////////////////////////
format = new VideoInfoHeader();
format.SrcRect = new DsRect();
format.TargetRect = new DsRect();
format.BitRate = 5000000;
format.AvgTimePerFrame = 666666;
//////////////////////////////////
format.BmiHeader = new BitmapInfoHeader();
format.BmiHeader.Size = 40;
format.BmiHeader.Width = 1280;
format.BmiHeader.Height = 720;
format.BmiHeader.Planes = 1;
format.BmiHeader.BitCount = 24;
format.BmiHeader.Compression = 1196444237; //1196444237 //844715353
format.BmiHeader.ImageSize = 2764800; //2764800 614400
pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(format));
Marshal.StructureToPtr(format, pmt.formatPtr, false);
Debug.WriteLine(getCatName(pUSB) + " at line 130");
streamConfig = (IAMStreamConfig)GetPin(pUSB, getCatName(pUSB));
hr = streamConfig.SetFormat(pmt);
DsUtils.FreeAMMediaType(pmt);
if (hr < 0)
{
CapTest.CustomMessage.ShowMessage("Can`t set format");
DsError.ThrowExceptionForHR(hr);
}
//add MJPEG Decompressor
IBaseFilter pMJPEGDecompressor = (IBaseFilter)new MjpegDec();
hr = pGraph.AddFilter(pMJPEGDecompressor, "MJPEG Decompressor");
if (hr < 0)
{
CapTest.CustomMessage.ShowMessage("Can`t add MJPEG Decompressor");
DsError.ThrowExceptionForHR(hr);
}
//add SampleGrabber
IBaseFilter pSampleGrabber = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(CLSID_SampleGrabber));
hr = pGraph.AddFilter(pSampleGrabber, "SampleGrabber");
checkHR(hr, "Can't add SampleGrabber to graph");
//connect Smart Tee and SampleGrabber
hr = pGraph.ConnectDirect(GetPin(pSmartTee, "Preview"), GetPin(pSampleGrabber, "Input"), null);
checkHR(hr, "Can't connect Smart Tee and SampleGrabber");
//connect smart tee to camera
hr = pGraphBuilder.RenderStream(null, MediaType.Video, pSampleGrabber, null, pMJPEGDecompressor);
//add Color Space Converter
IBaseFilter pColorSpaceConverter = (IBaseFilter)new Colour();
hr = pGraph.AddFilter(pColorSpaceConverter, "Color Space Converter");
if (hr < 0)
{
CapTest.CustomMessage.ShowMessage("Can't add Color Space Converter to graph");
DsError.ThrowExceptionForHR(hr);
}
hr = pGraphBuilder.RenderStream(null, MediaType.Video, pMJPEGDecompressor, null, pColorSpaceConverter);
DsError.ThrowExceptionForHR(hr);
IBaseFilter videoRender = (IBaseFilter)new VideoRenderer();
hr = pGraph.AddFilter(videoRender, "Video Render");
DsError.ThrowExceptionForHR(hr);
hr = pGraphBuilder.RenderStream(null, MediaType.Video, pColorSpaceConverter, null, videoRender);
DsError.ThrowExceptionForHR(hr);
Debug.WriteLine(DsError.GetErrorText(hr) + " is error in rendering");
Marshal.ReleaseComObject(pUSB);
SetupVideoWindow();
}
//Access the camera
private IBaseFilter FindCaptureDevice()
{
IEnumMoniker classEnum = null;
IMoniker[] moniker = new IMoniker[1];
object source = null;
ICreateDevEnum devEnum = (ICreateDevEnum)(new CreateDevEnum());
int hr = devEnum.CreateClassEnumerator(FilterCategory.VideoInputDevice, out classEnum, CDef.None);
DsError.ThrowExceptionForHR(hr);
Marshal.ReleaseComObject(devEnum);
if (classEnum == null)
{
throw new ApplicationException("No video capture device was detected.\r\n\r\n" + "This sample requires a video capture device, such as a USB WebCam,\r\nto be installed and working properly. The sample will now close.");
}
IntPtr none = IntPtr.Zero;
if (classEnum.Next(moniker.Length, moniker, none) == 0)
{
Guid iid = typeof(IBaseFilter).GUID;
moniker[0].BindToObject(null, null, ref iid, out source);
}
else
{
throw new ApplicationException("Unable to access video capture device!");
}
Marshal.ReleaseComObject(moniker[0]);
Marshal.ReleaseComObject(classEnum);
return (IBaseFilter)source;
}
static IPin GetPin(IBaseFilter filter, string pinname)
{
IEnumPins epins;
int hr = filter.EnumPins(out epins);
if (hr < 0)
{
CapTest.CustomMessage.ShowMessage("Cant enumerate pins");
DsError.ThrowExceptionForHR(hr);
}
IntPtr fetched = Marshal.AllocCoTaskMem(4);
IPin[] pins = new IPin[1];
while (epins.Next(1, pins, fetched) == 0)
{
PinInfo pinfo;
pins[0].QueryPinInfo(out pinfo);
bool found = (pinfo.name == "Capture");
CapTest.CustomMessage.ShowMessage(pinfo.name + " is PIN NAME");
DsUtils.FreePinInfo(pinfo);
if (found)
return pins[0];
}
CapTest.CustomMessage.ShowMessage("Pin not found");
DsError.ThrowExceptionForHR(hr);
return null;
}
string getCatName(IBaseFilter filter)
{
string retval = "";
IEnumPins epins;
int hr = filter.EnumPins(out epins);
if (hr < 0)
{
CapTest.CustomMessage.ShowMessage("Cant enumerate pins");
DsError.ThrowExceptionForHR(hr);
}
IntPtr fetched = Marshal.AllocCoTaskMem(4);
IPin[] pins = new IPin[1];
while (epins.Next(1, pins, fetched) == 0)
{
PinInfo pinfo;
pins[0].QueryPinInfo(out pinfo);
bool found = (pinfo.name == "Capture");
CapTest.CustomMessage.ShowMessage(pinfo.name + " is pinname on getCatName");
DsUtils.FreePinInfo(pinfo);
if (found)
retval = pinfo.name;
}
CapTest.CustomMessage.ShowMessage("Pin found " + retval);
return retval;
}
private void SetupVideoWindow()
{
int hr = 0;
//set the video window to be a child of the main window
//putowner : Sets the owning parent window for the video playback window.
hr = videoWindow.put_Owner(pictureBox1.Handle);
DsError.ThrowExceptionForHR(hr);
hr = videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren);
DsError.ThrowExceptionForHR(hr);
//Use helper function to position video window in client rect of main application window
WebCamControl_Resize(this, null);
//Make the video window visible, now that it is properly positioned
//put_visible : This method changes the visibility of the video window.
hr = videoWindow.put_Visible(OABool.True);
DsError.ThrowExceptionForHR(hr);
hr = mediaControl.Run();
DsError.ThrowExceptionForHR(hr);
HandleGraphEvent();
CurrentState = PlayState.Running;
Cftp_Form1.ActiveForm.WindowState = FormWindowState.Maximized;
}
private void HandleGraphEvent()
{
int hr = 0;
EventCode evCode = 0;
IntPtr evParam1 = IntPtr.Zero;
IntPtr evParam2 = IntPtr.Zero;
while (mediaEventEx != null && mediaEventEx.GetEvent(out evCode, out evParam1, out evParam2, 0) == 0)
{
// Free event parameters to prevent memory leaks associated with
// event parameter data. While this application is not interested
// in the received events, applications should always process them.
hr = mediaEventEx.FreeEventParams(evCode, evParam1, evParam2);
DsError.ThrowExceptionForHR(hr);
Console.WriteLine(evCode + " " + evParam1 + " " + evParam2);
// Insert event processing code here, if desired (see http://msdn2.microsoft.com/en-us/library/ms783649.aspx)
}
}
private void ReleaseInterfaces()
{
if (mediaControl != null)
mediaControl.StopWhenReady();
CurrentState = PlayState.Stopped;
// stop notifications of events
if (mediaEventEx != null)
mediaEventEx.SetNotifyWindow(IntPtr.Zero, WM_GRAPHNOTIFY, IntPtr.Zero);
//// below we relinquish ownership (IMPORTANT!) of the video window.
//// Failing to call put_Owner can lead to assert failures within
//// the video renderer, as it still assumes that it has a valid
//// parent window.
if (videoWindow != null)
{
videoWindow.put_Visible(OABool.False);
videoWindow.put_Owner(IntPtr.Zero);
}
// Release DirectShow interfaces
Marshal.ReleaseComObject(mediaControl);
mediaControl = null;
Marshal.ReleaseComObject(mediaEventEx);
mediaEventEx = null;
Marshal.ReleaseComObject(videoWindow);
videoWindow = null;
Marshal.ReleaseComObject(graph);
graph = null;
Marshal.ReleaseComObject(pGraphBuilder);
pGraphBuilder = null;
}
private void WebCamControl_Resize(object sender, System.EventArgs e)
{
//Resize the video preview window to match owner window size
//Calculate the ratio like 16/9
// 1280/720=1.77777778
if (videoWindow != null)
height = Convert.ToDecimal(pictureBox1.Width) / ratio;
int iheight = (int)height;
videoWindow.SetWindowPosition(0, 0, pictureBox1.Width, iheight);
//Debug.WriteLine(pictureBox1.Width +" "+ iheight + " ratio:" + ratio);
mediaControl.Run();
}
private void Cftp_Form1_FormClosing(object sender, FormClosingEventArgs e)
{
ReleaseInterfaces();
}
}
}
有什么方法可以保留进程 "alive"? 也许通过在特定时间间隔请求帧?
如果可能的话,我怎样才能 "request" 获得更多帧数?
此处的第一个评论是了解如何可视化您的 DirectShow 过滤器图并了解有效构建的内容。无论您是在 C++ 中还是在 C# 中通过 .NET 接口库进行开发,您都经常通过显式添加管道的一部分来构建,然后再为您添加一些东西。你不知道你最终会得到什么,所以你需要……
…Understand Your DirectShow Filter Graph
也就是说,让您的应用程序构建管道,然后使用开发人员的工具对其进行检查。当您提出这样的问题时,这也有助于分享信息。
在您的情况下,您的图形构建效果不佳:您首先自己创建一个 Null Renderer,然后请求 IVideoWindow
并隐式连接到自动为您提供的 Video Renderer。这意味着您不了解管道,您的问题和问题部分来自这种误解。
下一步是将实时视频源连接到同步渲染器,一般来说,同步渲染器设计用于提前显示带有预缓冲帧的视频。我的猜测是帧时间会逐渐减少,因此视频渲染器会阻止流式传输并逐渐增加等待时间,从而导致更多的等待和更少的捕获。我将跳过它为什么以这种方式发生的细节,但重要的是要知道您有兴趣将 Smart Tee Filter 插入下游到相机和上游到渲染器,将预览输出连接到视觉呈现。 MSDN 文档详细说明了为什么要这样做以及目的。