处理 - 如何在同一个处理草图中录制、保存和播放视频?
processing - how can I record,save and play the video in the same processing sketch?
我已经能够通过按下一个按钮并再次按下按钮停止录制并导出视频来录制和保存视频。如果我停止处理草图并重新启动它,我可以播放视频。那是因为当我正在录制正在处理的视频并停止录制时,视频文件正在数据文件夹中制作但尚未完成。比如视频的大小约为 50 字节,并且在我的处理草图仍处于活动状态时没有缩略图可见。但是一旦我停止处理草图,就会制作视频。然后我的文件夹中将显示缩略图,大小增加到 600kb 左右,文件可以播放。所以我需要停下来重新开始我的草图来完成视频。有没有其他方法可以完成我的视频并在我完成录制后立即播放我的视频?所以简而言之,我希望我的草图能够打开网络摄像头图像。当我按下按钮或单击鼠标时录制视频并播放视频。可以吗?
这是我目前的代码:
import com.hamoid.*;
import processing.video.*;
import ddf.minim.*;
Minim minim;
AudioPlayer player;
AudioInput in;
AudioRecorder recorder;
Movie myMovie;
Movie myMovie1;
Movie myMovie2;
Movie myMovie3;
int currentScreen;
int videoCounter = 0;
VideoExport videoExport;
boolean recording = false;
Capture theCap;
Capture cam;
int i = 0;
int countname; //change the name
int name = 000000; //set the number in key's' function
// change the file name
void newFile()
{
countname =( name + 1);
recorder = minim.createRecorder(in, "file/Sound" + countname + ".wav", true);
// println("file/" + countname + ".wav");
}
void setup() {
size(500,500);
frameRate(30);
noStroke();
smooth();
//myMovie = new Movie(this, "video0.mp4");
//myMovie.loop();
//myMovie1 = new Movie(this, "video1.mp4");
//myMovie1.loop();
//myMovie2 = new Movie(this, "video2.mp4");
//myMovie1.loop();
//myMovie3 = new Movie(this, "video3.mp4");
//myMovie1.loop();
//if (videoCounter >= 1){
//myMovie = new Movie(this, "video0.mp4");
//myMovie.loop();
//}
String[] cameras = Capture.list();
if (cameras.length == 0) {
println("There are no cameras available for capture.");
exit();
} else {
println("Available cameras:");
for (int i = 0; i < cameras.length; i++) {
println(cameras[i]);
}
// The camera can be initialized directly using an
// element from the array returned by list():
//cam = new Capture(this, cameras[3]); //built in mac cam "isight"
cam = new Capture(this, 1280, 960, "USB-camera"); //externe camera Lex, linker USB
cam.start();
}
println("Druk op R om geluid en video op te nemen.Druk nog een keer op R om het opnemen te stoppen en druk op S om het op te slaan Druk vervolgens op Z om verder te gaan.");
videoExport = new VideoExport(this, "data/video" + i + ".mp4");
minim = new Minim(this);
player = minim.loadFile("file/Sound1.wav");
// get a stereo line-in: sample buffer length of 2048
// default sample rate is 44100, default bit depth is 16
in = minim.getLineIn(Minim.STEREO, 2048);
// create a recorder that will record from the input to the filename specified, using buffered recording
// buffered recording means that all captured audio will be written into a sample buffer
// then when save() is called, the contents of the buffer will actually be written to a file
// the file will be located in the sketch's root folder.
newFile();//go to change file name
textFont(createFont("SanSerif", 12));
}
void draw() {
switch(currentScreen){
case 0: drawScreenZero(); break; //camera
case 1: drawScreenOne(); break; //1 video
case 2: drawScreenZero(); break; //camera
case 3: drawScreenTwo(); break; // 2 video's
case 4: drawScreenZero(); break; //camera
case 5: drawScreenThree(); break; //3 video's
case 6: drawScreenZero(); break; //camera
case 7: drawScreenFour(); break; //4 video's
default: background(0); break;
}
}
void mousePressed() {
currentScreen++;
if (currentScreen > 7) { currentScreen = 0; }
}
void drawScreenZero() {
//println("drawScreenZero camera");
if (cam.available() == true) {
cam.read();
}
image(cam, 0,0,width, height);
// The following does the same, and is faster when just drawing the image
// without any additional resizing, transformations, or tint.
//set(0, 0, cam);
if (recording) {
videoExport.saveFrame();
}
for(int i = 0; i < in.bufferSize() - 1; i++)
{
line(i, 50 + in.left.get(i)*50, i+1, 50 + in.left.get(i+1)*50);
line(i, 150 + in.right.get(i)*50, i+1, 150 + in.right.get(i+1)*50);
}
if ( recorder.isRecording() )
{
text("Aan het opnemen...", 5, 15);
text("Druk op R als je klaar bent met opnemen en druk op S om het op te slaan.", 5, 30);
}
else
{
text("Gestopt met opnemen. Druk op R om op te nemen, druk op S om op te slaan.", 5, 15);
}
}
void drawScreenOne() {
background(0,255,0);
//fill(0);
//rect(250,40,250,400);
//println("drawScreenOne 1 video");
if (videoCounter >= 1){
myMovie = new Movie(this, "video0.mp4");
myMovie.loop();
image(myMovie, 0,0, (width/2),(height/2));
player.play();
} else if (videoCounter == 0) {
text("geen video", 5, 15);
}
}
void drawScreenTwo(){
background(0,0,255);
//println("drawScreenTwo 2 videos");
//triangle(150,100,150,400,450,250);
//image(myMovie, 0,0, (width/2),(height/2));
//image(myMovie1, (width/2),(height/2),(width/2),(height/2));
}
void drawScreenThree(){
//fill(0);
//rect(250,40,250,400);
background(255,0,0);
println("drawScreenThree 3 videos");
//image(myMovie, 0,0, (width/2),(height/2));
//image(myMovie1, (width/2),(height/2),(width/2),(height/2));
//image(myMovie, (width/2),0, (width/2),(height/2));
}
void drawScreenFour(){
//triangle(150,100,150,400,450,250);
background(0,0,255);
//println("drawScreenFour 4 videos");
//image(myMovie, 0,0, (width/2),(height/2));
//image(myMovie1, (width/2),(height/2),(width/2),(height/2));
//image(myMovie, (width/2),0, (width/2),(height/2));
//image(myMovie1, 0,(height/2),(width/2),(height/2));
}
void keyPressed() {
if (key == 'r' || key == 'R') {
recording = !recording;
println("Recording is " + (recording ? "ON" : "OFF"));
} else if (key == 's' || key == 'S') {
i++;
videoExport = new VideoExport(this, "video" + i + ".mp4");
videoCounter++;
println(videoCounter);
//currentScreen++;
//if (currentScreen > 7) { currentScreen = 0; }
} else if (key == 'z' || key == 'Z') {
currentScreen++;
if (currentScreen > 7) { currentScreen = 0; }
}
}
void movieEvent(Movie m) {
m.read();
}
void keyReleased()
{
if ( key == 'r' )
{
// to indicate that you want to start or stop capturing audio data, you must call
// beginRecord() and endRecord() on the AudioRecorder object. You can start and stop
// as many times as you like, the audio data will be appended to the end of the buffer
// (in the case of buffered recording) or to the end of the file (in the case of streamed recording).
if ( recorder.isRecording() )
{
recorder.endRecord();
}
else
{
/*#######################################*/
newFile();
/*#######################################*/
recorder.beginRecord();
}
}
if ( key == 's' )
{
// we've filled the file out buffer,
// now write it to the file we specified in createRecorder
// in the case of buffered recording, if the buffer is large,
// this will appear to freeze the sketch for sometime
// in the case of streamed recording,
// it will not freeze as the data is already in the file and all that is being done
// is closing the file.
// the method returns the recorded audio as an AudioRecording,
// see the example AudioRecorder >> RecordAndPlayback for more about that
name++; //change the file name, everytime +1
recorder.save();
println("Done saving.");
println(name);//check the name
}
}
void stop()
{
// always close Minim audio classes when you are done with them
in.close();
minim.stop();
super.stop();
}
看看 the reference 的 VideoExport
库,它实际上只有一个 class。
那个参考向我们展示了这个函数:
dispose()
Called automatically by Processing to clean up before shut down
然后我们可以查看 VideoExport
class 的 the source 以查看该函数的作用:
public void dispose() {
if (ffmpeg != null) {
try {
ffmpeg.flush();
ffmpeg.close();
} catch (Exception e) {
e.printStackTrace();
}
}
if (process != null) {
process.destroy();
}
}
所以现在我们知道 dispose()
函数正在 ffmpeg
上调用 flush()
,这是一个 OutputStream
。我们还知道 dispose()
函数仅在草图结束时调用。
所以我会尝试的第一件事就是在您想要完成视频时调用 dispose()
函数。
如果这不起作用,或者如果它导致其他异常,那么您可能想要找到一个不同的视频库,允许您根据命令保存它们,或者您甚至可以使用 VideoExport
作为灵感。真的没什么。
我已经能够通过按下一个按钮并再次按下按钮停止录制并导出视频来录制和保存视频。如果我停止处理草图并重新启动它,我可以播放视频。那是因为当我正在录制正在处理的视频并停止录制时,视频文件正在数据文件夹中制作但尚未完成。比如视频的大小约为 50 字节,并且在我的处理草图仍处于活动状态时没有缩略图可见。但是一旦我停止处理草图,就会制作视频。然后我的文件夹中将显示缩略图,大小增加到 600kb 左右,文件可以播放。所以我需要停下来重新开始我的草图来完成视频。有没有其他方法可以完成我的视频并在我完成录制后立即播放我的视频?所以简而言之,我希望我的草图能够打开网络摄像头图像。当我按下按钮或单击鼠标时录制视频并播放视频。可以吗?
这是我目前的代码:
import com.hamoid.*;
import processing.video.*;
import ddf.minim.*;
Minim minim;
AudioPlayer player;
AudioInput in;
AudioRecorder recorder;
Movie myMovie;
Movie myMovie1;
Movie myMovie2;
Movie myMovie3;
int currentScreen;
int videoCounter = 0;
VideoExport videoExport;
boolean recording = false;
Capture theCap;
Capture cam;
int i = 0;
int countname; //change the name
int name = 000000; //set the number in key's' function
// change the file name
void newFile()
{
countname =( name + 1);
recorder = minim.createRecorder(in, "file/Sound" + countname + ".wav", true);
// println("file/" + countname + ".wav");
}
void setup() {
size(500,500);
frameRate(30);
noStroke();
smooth();
//myMovie = new Movie(this, "video0.mp4");
//myMovie.loop();
//myMovie1 = new Movie(this, "video1.mp4");
//myMovie1.loop();
//myMovie2 = new Movie(this, "video2.mp4");
//myMovie1.loop();
//myMovie3 = new Movie(this, "video3.mp4");
//myMovie1.loop();
//if (videoCounter >= 1){
//myMovie = new Movie(this, "video0.mp4");
//myMovie.loop();
//}
String[] cameras = Capture.list();
if (cameras.length == 0) {
println("There are no cameras available for capture.");
exit();
} else {
println("Available cameras:");
for (int i = 0; i < cameras.length; i++) {
println(cameras[i]);
}
// The camera can be initialized directly using an
// element from the array returned by list():
//cam = new Capture(this, cameras[3]); //built in mac cam "isight"
cam = new Capture(this, 1280, 960, "USB-camera"); //externe camera Lex, linker USB
cam.start();
}
println("Druk op R om geluid en video op te nemen.Druk nog een keer op R om het opnemen te stoppen en druk op S om het op te slaan Druk vervolgens op Z om verder te gaan.");
videoExport = new VideoExport(this, "data/video" + i + ".mp4");
minim = new Minim(this);
player = minim.loadFile("file/Sound1.wav");
// get a stereo line-in: sample buffer length of 2048
// default sample rate is 44100, default bit depth is 16
in = minim.getLineIn(Minim.STEREO, 2048);
// create a recorder that will record from the input to the filename specified, using buffered recording
// buffered recording means that all captured audio will be written into a sample buffer
// then when save() is called, the contents of the buffer will actually be written to a file
// the file will be located in the sketch's root folder.
newFile();//go to change file name
textFont(createFont("SanSerif", 12));
}
void draw() {
switch(currentScreen){
case 0: drawScreenZero(); break; //camera
case 1: drawScreenOne(); break; //1 video
case 2: drawScreenZero(); break; //camera
case 3: drawScreenTwo(); break; // 2 video's
case 4: drawScreenZero(); break; //camera
case 5: drawScreenThree(); break; //3 video's
case 6: drawScreenZero(); break; //camera
case 7: drawScreenFour(); break; //4 video's
default: background(0); break;
}
}
void mousePressed() {
currentScreen++;
if (currentScreen > 7) { currentScreen = 0; }
}
void drawScreenZero() {
//println("drawScreenZero camera");
if (cam.available() == true) {
cam.read();
}
image(cam, 0,0,width, height);
// The following does the same, and is faster when just drawing the image
// without any additional resizing, transformations, or tint.
//set(0, 0, cam);
if (recording) {
videoExport.saveFrame();
}
for(int i = 0; i < in.bufferSize() - 1; i++)
{
line(i, 50 + in.left.get(i)*50, i+1, 50 + in.left.get(i+1)*50);
line(i, 150 + in.right.get(i)*50, i+1, 150 + in.right.get(i+1)*50);
}
if ( recorder.isRecording() )
{
text("Aan het opnemen...", 5, 15);
text("Druk op R als je klaar bent met opnemen en druk op S om het op te slaan.", 5, 30);
}
else
{
text("Gestopt met opnemen. Druk op R om op te nemen, druk op S om op te slaan.", 5, 15);
}
}
void drawScreenOne() {
background(0,255,0);
//fill(0);
//rect(250,40,250,400);
//println("drawScreenOne 1 video");
if (videoCounter >= 1){
myMovie = new Movie(this, "video0.mp4");
myMovie.loop();
image(myMovie, 0,0, (width/2),(height/2));
player.play();
} else if (videoCounter == 0) {
text("geen video", 5, 15);
}
}
void drawScreenTwo(){
background(0,0,255);
//println("drawScreenTwo 2 videos");
//triangle(150,100,150,400,450,250);
//image(myMovie, 0,0, (width/2),(height/2));
//image(myMovie1, (width/2),(height/2),(width/2),(height/2));
}
void drawScreenThree(){
//fill(0);
//rect(250,40,250,400);
background(255,0,0);
println("drawScreenThree 3 videos");
//image(myMovie, 0,0, (width/2),(height/2));
//image(myMovie1, (width/2),(height/2),(width/2),(height/2));
//image(myMovie, (width/2),0, (width/2),(height/2));
}
void drawScreenFour(){
//triangle(150,100,150,400,450,250);
background(0,0,255);
//println("drawScreenFour 4 videos");
//image(myMovie, 0,0, (width/2),(height/2));
//image(myMovie1, (width/2),(height/2),(width/2),(height/2));
//image(myMovie, (width/2),0, (width/2),(height/2));
//image(myMovie1, 0,(height/2),(width/2),(height/2));
}
void keyPressed() {
if (key == 'r' || key == 'R') {
recording = !recording;
println("Recording is " + (recording ? "ON" : "OFF"));
} else if (key == 's' || key == 'S') {
i++;
videoExport = new VideoExport(this, "video" + i + ".mp4");
videoCounter++;
println(videoCounter);
//currentScreen++;
//if (currentScreen > 7) { currentScreen = 0; }
} else if (key == 'z' || key == 'Z') {
currentScreen++;
if (currentScreen > 7) { currentScreen = 0; }
}
}
void movieEvent(Movie m) {
m.read();
}
void keyReleased()
{
if ( key == 'r' )
{
// to indicate that you want to start or stop capturing audio data, you must call
// beginRecord() and endRecord() on the AudioRecorder object. You can start and stop
// as many times as you like, the audio data will be appended to the end of the buffer
// (in the case of buffered recording) or to the end of the file (in the case of streamed recording).
if ( recorder.isRecording() )
{
recorder.endRecord();
}
else
{
/*#######################################*/
newFile();
/*#######################################*/
recorder.beginRecord();
}
}
if ( key == 's' )
{
// we've filled the file out buffer,
// now write it to the file we specified in createRecorder
// in the case of buffered recording, if the buffer is large,
// this will appear to freeze the sketch for sometime
// in the case of streamed recording,
// it will not freeze as the data is already in the file and all that is being done
// is closing the file.
// the method returns the recorded audio as an AudioRecording,
// see the example AudioRecorder >> RecordAndPlayback for more about that
name++; //change the file name, everytime +1
recorder.save();
println("Done saving.");
println(name);//check the name
}
}
void stop()
{
// always close Minim audio classes when you are done with them
in.close();
minim.stop();
super.stop();
}
看看 the reference 的 VideoExport
库,它实际上只有一个 class。
那个参考向我们展示了这个函数:
dispose()
Called automatically by Processing to clean up before shut down
然后我们可以查看 VideoExport
class 的 the source 以查看该函数的作用:
public void dispose() {
if (ffmpeg != null) {
try {
ffmpeg.flush();
ffmpeg.close();
} catch (Exception e) {
e.printStackTrace();
}
}
if (process != null) {
process.destroy();
}
}
所以现在我们知道 dispose()
函数正在 ffmpeg
上调用 flush()
,这是一个 OutputStream
。我们还知道 dispose()
函数仅在草图结束时调用。
所以我会尝试的第一件事就是在您想要完成视频时调用 dispose()
函数。
如果这不起作用,或者如果它导致其他异常,那么您可能想要找到一个不同的视频库,允许您根据命令保存它们,或者您甚至可以使用 VideoExport
作为灵感。真的没什么。