减少 60 秒 AVI 视频的占用空间
Reducing the footprint of 60 seconds of AVI video
我有一个程序使用 AForge 库在内存缓冲区中保存来自网络摄像头的 60 秒图像流 运行。在检测到移动后,它将接下来的 30 秒记录到同一个缓冲区中,最终覆盖前 30 秒。实际上,您在任何尚未记录的动作的两侧都有 30 秒的滚动视频,总共有 60 秒的视频。
问题是,来自 AForge COMPRESSED 的 60 秒位图图像在 RAM 中大约为 3GB。最重要的是,生成的 avi 文件大约为 3MB。差距太大了!
谁能看出我可能哪里出错了?以这种速度,每次只将视频直接录制到磁盘一个小时并手动循环播放任何事件会更有益!
系统由以下三个部分组成:
CameraController.cs - 对每个连接的网络摄像头的初始化进行排序。我留下了注释掉的组件,以了解以前使用的设置。
public class CameraController : ServiceBase
{
public virtual void OnStart()
{
Start(60, 0.4f);
}
private FilterInfoCollection _VideoCaptureDevices;
private MotionDetector _MotionDetector;
private Dictionary<string, Recording> _Streams = new Dictionary<string, Recording>();
private Dictionary<int, VideoCaptureDevice> _Devices = new Dictionary<int, VideoCaptureDevice>();
private int _Framerate;
private int _MaxVideoLength;
private float _MotionSensitivity;
public void Start(int maxVideoLength, float motionSensitivity){
_MaxVideoLength = maxVideoLength;
_MotionSensitivity = motionSensitivity;
Init();
}
public void Init()
{
try
{
_MotionDetector = GetDefaultMotionDetector();
_VideoCaptureDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
int counter = 0;
foreach (FilterInfo device in _VideoCaptureDevices)
{
var videoDevice = new VideoCaptureDevice(device.MonikerString);
//_Framerate = videoDevice.VideoCapabilities[0].AverageFrameRate == 0
// ? 25
// : videoDevice.VideoCapabilities[0].AverageFrameRate;
_Framerate = 15;
_Streams.Add(videoDevice.@Source, new Recording(counter, device.Name, videoDevice.@Source, _MaxVideoLength, _Framerate));
videoDevice.NewFrame += new NewFrameEventHandler(NewFrame);
videoDevice.Start();
_Devices.Add(counter++, videoDevice);
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
public void NewFrame(object sender, NewFrameEventArgs eventArgs)
{
try
{
var device = (VideoCaptureDevice) sender;
_Streams[@device.Source].AddBitmap((Bitmap) eventArgs.Frame.Clone());
if (_Streams[@device.Source].IsRecording)
{
_Streams[@device.Source].CheckRecording();
if (_Streams[@device.Source].SaveRequired)
_Streams[@device.Source].WriteToFile();
}
else
{
var motion = _MotionDetector.ProcessFrame(_Streams[@device.Source].Bitmap);
if (motion > _MotionSensitivity)
_Streams[@device.Source].StartRecording();
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
public void StopVideo(bool stopWebcams = false)
{
foreach (var device in _Devices)
{
var stream = _Streams[device.Value.Source];
if(stream.IsRecording)
stream.FileWriter.Close();
if(device.Value.IsRunning && stopWebcams)
device.Value.SignalToStop();
}
}
public static AForge.Vision.Motion.MotionDetector GetDefaultMotionDetector()
{
AForge.Vision.Motion.IMotionDetector detector = null;
AForge.Vision.Motion.IMotionProcessing processor = null;
AForge.Vision.Motion.MotionDetector motionDetector = null;
//detector = new AForge.Vision.Motion.TwoFramesDifferenceDetector()
//{
// DifferenceThreshold = 15,
// SuppressNoise = true
//};
//detector = new AForge.Vision.Motion.CustomFrameDifferenceDetector()
//{
// DifferenceThreshold = 15,
// KeepObjectsEdges = true,
// SuppressNoise = true
//};
detector = new AForge.Vision.Motion.SimpleBackgroundModelingDetector()
{
DifferenceThreshold = 10,
FramesPerBackgroundUpdate = 10,
KeepObjectsEdges = true,
MillisecondsPerBackgroundUpdate = 10,
SuppressNoise = true
};
//processor = new AForge.Vision.Motion.GridMotionAreaProcessing()
//{
// HighlightColor = System.Drawing.Color.Red,
// HighlightMotionGrid = true,
// GridWidth = 100,
// GridHeight = 100,
// MotionAmountToHighlight = 100F
//};
processor = new AForge.Vision.Motion.BlobCountingObjectsProcessing()
{
//HighlightColor = System.Drawing.Color.Red,
//HighlightMotionRegions = true,
MinObjectsHeight = 10,
MinObjectsWidth = 10
};
motionDetector = new AForge.Vision.Motion.MotionDetector(detector, processor);
return motionDetector;
}
}
然后是 Recording.cs - 控制何时 stop/start/write 录音
public class Recording
{
public int Id { get; set; }
public string Name { get; set; }
public string Source { get; set; }
public Bitmap Bitmap { get; set; }
public bool IsRecording { get; set; }
public bool SaveRequired { get; set; }
public int TimeLimitSec { get; set; }
public int FrameRate { get; set; }
public string DirString = ConfigurationManager.AppSettings["DesinationFolder"].ToString();
public Stopwatch Timer = new Stopwatch();
public VideoFileWriter FileWriter = new VideoFileWriter();
public VideoBuffer VideoBuffer;
public int BufferPosition { get; set; }
public Recording(int id, string name, string source, int timeLimit, int framerate)
{
Id = id;
Name = name;
Source = @source;
IsRecording = false;
SaveRequired = false;
TimeLimitSec = timeLimit;
FrameRate = framerate;
VideoBuffer = new VideoBuffer(timeLimit, framerate);
}
public string FileName { get; set; }
public void StartRecording()
{
IsRecording = true;
Timer.Reset();
Timer.Start();
}
public void StopRecording()
{
IsRecording = false;
SaveRequired = true;
Timer.Reset();
Timer.Stop();
}
public void WriteToFile()
{
try
{
if (!Directory.Exists(@DirString))
Directory.CreateDirectory(@DirString);
FileName = @DirString + @"\Video_" + Id + "_" + Name + "_" + DateTime.Now.ToFileTime() + ".avi";
FileWriter.Open(FileName, Bitmap.Width, Bitmap.Height, FrameRate, VideoCodec.Default);
for (int frame = 0; frame < VideoBuffer.BufferPosition; frame++)
{
FileWriter.WriteVideoFrame(Compression.Decompress<Bitmap>(VideoBuffer.Buffer[frame]));
}
FileWriter.Close();
SaveRequired = false;
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
public void AddBitmap(Bitmap bitmap)
{
try
{
this.Bitmap = bitmap;
this.VideoBuffer.AddBitmap(bitmap);
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
public void CheckRecording()
{
try
{
if (IsRecording && Timer.Elapsed.TotalSeconds > TimeLimitSec)
StopRecording();
}
catch (Exception ex)
{
var msg = ex.Message;
Console.WriteLine(ex.Message);
}
}
private void SaveImage()
{
Bitmap.Save(@"D:\Storage\IMG_"+ Id + "_" + Name + "_" + DateTime.Now.ToFileTime() + ".jpg");
}
}
最后 VideoBuffer.cs - 控制位图的 运行 缓冲区。请注意,位图也已压缩为 byte[].
public class VideoBuffer
{
public int BufferLengthSeconds { get; set; }
public byte[][] Buffer { get; set; }
public int BufferPosition { get; set; }
public int MaxPosition { get; set; }
public bool Recorded { get; set; }
public VideoBuffer(int secondsToBuffer, int framerate)
{
MaxPosition = secondsToBuffer * framerate * 2; // Have our buffer before an event is started, as well as the length of time for the next
//Buffer = new Bitmap[MaxPosition + 1]; // Plus one allows us to add the latest bitmap and then clone everything but the first index
Buffer = new byte[MaxPosition + 1][];
BufferPosition = 0;
}
public void AddBitmap(Bitmap bitmap)
{
try
{
// If we haven't reached the maximum buffer size, keep adding it as normal
if (BufferPosition < MaxPosition)
{
Buffer[BufferPosition++] = Compression.Compress(bitmap);
}
else
{
// Otherwise, shuffle everything down one.
Buffer[MaxPosition] = Compression.Compress(bitmap);
var tempBuffer = new byte[MaxPosition + 1][];
Array.Copy(Buffer, 1, tempBuffer, 0, Buffer.Length - 1);
tempBuffer.CopyTo(Buffer, 0);
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
}
所以真正的问题是,我如何进一步减少缓冲区的内存占用,但仍然随时将最后 30 秒的视频保留在内存中?
这周我有点筋疲力尽,看不出可能缺少什么。欢迎提出任何建议!
一些简单的数学表明,1920x1080x24 位颜色、15fps 的高清视频持续 60 秒大约为 5.3 GB。您正在进行一些帧压缩以使用 3GB。
VideoFileWriter
(为什么这个变量不是函数的局部变量?)正在使用默认的 AVI 视频编解码器,它也会在帧之间进行压缩。由于大概帧大部分是静态的,因此可以节省很多 space.
我建议找到一种方法将内存中的视频保存为压缩视频流。
我有一个程序使用 AForge 库在内存缓冲区中保存来自网络摄像头的 60 秒图像流 运行。在检测到移动后,它将接下来的 30 秒记录到同一个缓冲区中,最终覆盖前 30 秒。实际上,您在任何尚未记录的动作的两侧都有 30 秒的滚动视频,总共有 60 秒的视频。
问题是,来自 AForge COMPRESSED 的 60 秒位图图像在 RAM 中大约为 3GB。最重要的是,生成的 avi 文件大约为 3MB。差距太大了!
谁能看出我可能哪里出错了?以这种速度,每次只将视频直接录制到磁盘一个小时并手动循环播放任何事件会更有益!
系统由以下三个部分组成:
CameraController.cs - 对每个连接的网络摄像头的初始化进行排序。我留下了注释掉的组件,以了解以前使用的设置。
public class CameraController : ServiceBase
{
public virtual void OnStart()
{
Start(60, 0.4f);
}
private FilterInfoCollection _VideoCaptureDevices;
private MotionDetector _MotionDetector;
private Dictionary<string, Recording> _Streams = new Dictionary<string, Recording>();
private Dictionary<int, VideoCaptureDevice> _Devices = new Dictionary<int, VideoCaptureDevice>();
private int _Framerate;
private int _MaxVideoLength;
private float _MotionSensitivity;
public void Start(int maxVideoLength, float motionSensitivity){
_MaxVideoLength = maxVideoLength;
_MotionSensitivity = motionSensitivity;
Init();
}
public void Init()
{
try
{
_MotionDetector = GetDefaultMotionDetector();
_VideoCaptureDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
int counter = 0;
foreach (FilterInfo device in _VideoCaptureDevices)
{
var videoDevice = new VideoCaptureDevice(device.MonikerString);
//_Framerate = videoDevice.VideoCapabilities[0].AverageFrameRate == 0
// ? 25
// : videoDevice.VideoCapabilities[0].AverageFrameRate;
_Framerate = 15;
_Streams.Add(videoDevice.@Source, new Recording(counter, device.Name, videoDevice.@Source, _MaxVideoLength, _Framerate));
videoDevice.NewFrame += new NewFrameEventHandler(NewFrame);
videoDevice.Start();
_Devices.Add(counter++, videoDevice);
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
public void NewFrame(object sender, NewFrameEventArgs eventArgs)
{
try
{
var device = (VideoCaptureDevice) sender;
_Streams[@device.Source].AddBitmap((Bitmap) eventArgs.Frame.Clone());
if (_Streams[@device.Source].IsRecording)
{
_Streams[@device.Source].CheckRecording();
if (_Streams[@device.Source].SaveRequired)
_Streams[@device.Source].WriteToFile();
}
else
{
var motion = _MotionDetector.ProcessFrame(_Streams[@device.Source].Bitmap);
if (motion > _MotionSensitivity)
_Streams[@device.Source].StartRecording();
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
public void StopVideo(bool stopWebcams = false)
{
foreach (var device in _Devices)
{
var stream = _Streams[device.Value.Source];
if(stream.IsRecording)
stream.FileWriter.Close();
if(device.Value.IsRunning && stopWebcams)
device.Value.SignalToStop();
}
}
public static AForge.Vision.Motion.MotionDetector GetDefaultMotionDetector()
{
AForge.Vision.Motion.IMotionDetector detector = null;
AForge.Vision.Motion.IMotionProcessing processor = null;
AForge.Vision.Motion.MotionDetector motionDetector = null;
//detector = new AForge.Vision.Motion.TwoFramesDifferenceDetector()
//{
// DifferenceThreshold = 15,
// SuppressNoise = true
//};
//detector = new AForge.Vision.Motion.CustomFrameDifferenceDetector()
//{
// DifferenceThreshold = 15,
// KeepObjectsEdges = true,
// SuppressNoise = true
//};
detector = new AForge.Vision.Motion.SimpleBackgroundModelingDetector()
{
DifferenceThreshold = 10,
FramesPerBackgroundUpdate = 10,
KeepObjectsEdges = true,
MillisecondsPerBackgroundUpdate = 10,
SuppressNoise = true
};
//processor = new AForge.Vision.Motion.GridMotionAreaProcessing()
//{
// HighlightColor = System.Drawing.Color.Red,
// HighlightMotionGrid = true,
// GridWidth = 100,
// GridHeight = 100,
// MotionAmountToHighlight = 100F
//};
processor = new AForge.Vision.Motion.BlobCountingObjectsProcessing()
{
//HighlightColor = System.Drawing.Color.Red,
//HighlightMotionRegions = true,
MinObjectsHeight = 10,
MinObjectsWidth = 10
};
motionDetector = new AForge.Vision.Motion.MotionDetector(detector, processor);
return motionDetector;
}
}
然后是 Recording.cs - 控制何时 stop/start/write 录音
public class Recording
{
public int Id { get; set; }
public string Name { get; set; }
public string Source { get; set; }
public Bitmap Bitmap { get; set; }
public bool IsRecording { get; set; }
public bool SaveRequired { get; set; }
public int TimeLimitSec { get; set; }
public int FrameRate { get; set; }
public string DirString = ConfigurationManager.AppSettings["DesinationFolder"].ToString();
public Stopwatch Timer = new Stopwatch();
public VideoFileWriter FileWriter = new VideoFileWriter();
public VideoBuffer VideoBuffer;
public int BufferPosition { get; set; }
public Recording(int id, string name, string source, int timeLimit, int framerate)
{
Id = id;
Name = name;
Source = @source;
IsRecording = false;
SaveRequired = false;
TimeLimitSec = timeLimit;
FrameRate = framerate;
VideoBuffer = new VideoBuffer(timeLimit, framerate);
}
public string FileName { get; set; }
public void StartRecording()
{
IsRecording = true;
Timer.Reset();
Timer.Start();
}
public void StopRecording()
{
IsRecording = false;
SaveRequired = true;
Timer.Reset();
Timer.Stop();
}
public void WriteToFile()
{
try
{
if (!Directory.Exists(@DirString))
Directory.CreateDirectory(@DirString);
FileName = @DirString + @"\Video_" + Id + "_" + Name + "_" + DateTime.Now.ToFileTime() + ".avi";
FileWriter.Open(FileName, Bitmap.Width, Bitmap.Height, FrameRate, VideoCodec.Default);
for (int frame = 0; frame < VideoBuffer.BufferPosition; frame++)
{
FileWriter.WriteVideoFrame(Compression.Decompress<Bitmap>(VideoBuffer.Buffer[frame]));
}
FileWriter.Close();
SaveRequired = false;
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
public void AddBitmap(Bitmap bitmap)
{
try
{
this.Bitmap = bitmap;
this.VideoBuffer.AddBitmap(bitmap);
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
public void CheckRecording()
{
try
{
if (IsRecording && Timer.Elapsed.TotalSeconds > TimeLimitSec)
StopRecording();
}
catch (Exception ex)
{
var msg = ex.Message;
Console.WriteLine(ex.Message);
}
}
private void SaveImage()
{
Bitmap.Save(@"D:\Storage\IMG_"+ Id + "_" + Name + "_" + DateTime.Now.ToFileTime() + ".jpg");
}
}
最后 VideoBuffer.cs - 控制位图的 运行 缓冲区。请注意,位图也已压缩为 byte[].
public class VideoBuffer
{
public int BufferLengthSeconds { get; set; }
public byte[][] Buffer { get; set; }
public int BufferPosition { get; set; }
public int MaxPosition { get; set; }
public bool Recorded { get; set; }
public VideoBuffer(int secondsToBuffer, int framerate)
{
MaxPosition = secondsToBuffer * framerate * 2; // Have our buffer before an event is started, as well as the length of time for the next
//Buffer = new Bitmap[MaxPosition + 1]; // Plus one allows us to add the latest bitmap and then clone everything but the first index
Buffer = new byte[MaxPosition + 1][];
BufferPosition = 0;
}
public void AddBitmap(Bitmap bitmap)
{
try
{
// If we haven't reached the maximum buffer size, keep adding it as normal
if (BufferPosition < MaxPosition)
{
Buffer[BufferPosition++] = Compression.Compress(bitmap);
}
else
{
// Otherwise, shuffle everything down one.
Buffer[MaxPosition] = Compression.Compress(bitmap);
var tempBuffer = new byte[MaxPosition + 1][];
Array.Copy(Buffer, 1, tempBuffer, 0, Buffer.Length - 1);
tempBuffer.CopyTo(Buffer, 0);
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
}
所以真正的问题是,我如何进一步减少缓冲区的内存占用,但仍然随时将最后 30 秒的视频保留在内存中?
这周我有点筋疲力尽,看不出可能缺少什么。欢迎提出任何建议!
一些简单的数学表明,1920x1080x24 位颜色、15fps 的高清视频持续 60 秒大约为 5.3 GB。您正在进行一些帧压缩以使用 3GB。
VideoFileWriter
(为什么这个变量不是函数的局部变量?)正在使用默认的 AVI 视频编解码器,它也会在帧之间进行压缩。由于大概帧大部分是静态的,因此可以节省很多 space.
我建议找到一种方法将内存中的视频保存为压缩视频流。