我已经能够通过按下按钮并再次按下按钮来录制和保存视频,以停止录制并导出视频。如果我停止处理草图并再次启动它,我可以播放视频。那是因为当我在处理中录制视频并停止录制时,视频文件正在数据文件夹中制作但尚未完成。就像视频的大小大约是50个字节一样,当我的处理草图仍处于活动状态时,没有可见的缩略图。但是一旦我停止处理草图,就会制作视频。然后缩略图图像将显示在我的文件夹中,大小增加到大约600kb,文件可播放。所以我需要停止并重新启动我的草图才能完成视频。有没有其他方法可以完成我的视频,并且在我完成录制后能够播放我的视频?所以简而言之,我希望我的草图能够打开网络摄像头图像。当我按下按钮或单击鼠标时,录制视频并播放视频。那可能吗?
这是我到目前为止的代码:
import com.hamoid.*;
import processing.video.*;
import ddf.minim.*;
Minim minim;
AudioPlayer player;
AudioInput in;
AudioRecorder recorder;
Movie myMovie;
Movie myMovie1;
Movie myMovie2;
Movie myMovie3;
int currentScreen;
int videoCounter = 0;
VideoExport videoExport;
boolean recording = false;
Capture theCap;
Capture cam;
int i = 0;
int countname; //change the name
int name = 000000; //set the number in key's' function
// change the file name
void newFile()
{
countname =( name + 1);
recorder = minim.createRecorder(in, "file/Sound" + countname + ".wav", true);
// println("file/" + countname + ".wav");
}
void setup() {
size(500,500);
frameRate(30);
noStroke();
smooth();
//myMovie = new Movie(this, "video0.mp4");
//myMovie.loop();
//myMovie1 = new Movie(this, "video1.mp4");
//myMovie1.loop();
//myMovie2 = new Movie(this, "video2.mp4");
//myMovie1.loop();
//myMovie3 = new Movie(this, "video3.mp4");
//myMovie1.loop();
//if (videoCounter >= 1){
//myMovie = new Movie(this, "video0.mp4");
//myMovie.loop();
//}
String[] cameras = Capture.list();
if (cameras.length == 0) {
println("There are no cameras available for capture.");
exit();
} else {
println("Available cameras:");
for (int i = 0; i < cameras.length; i++) {
println(cameras[i]);
}
// The camera can be initialized directly using an
// element from the array returned by list():
//cam = new Capture(this, cameras[3]); //built in mac cam "isight"
cam = new Capture(this, 1280, 960, "USB-camera"); //externe camera Lex, linker USB
cam.start();
}
println("Druk op R om geluid en video op te nemen.Druk nog een keer op R om het opnemen te stoppen en druk op S om het op te slaan Druk vervolgens op Z om verder te gaan.");
videoExport = new VideoExport(this, "data/video" + i + ".mp4");
minim = new Minim(this);
player = minim.loadFile("file/Sound1.wav");
// get a stereo line-in: sample buffer length of 2048
// default sample rate is 44100, default bit depth is 16
in = minim.getLineIn(Minim.STEREO, 2048);
// create a recorder that will record from the input to the filename specified, using buffered recording
// buffered recording means that all captured audio will be written into a sample buffer
// then when save() is called, the contents of the buffer will actually be written to a file
// the file will be located in the sketch's root folder.
newFile();//go to change file name
textFont(createFont("SanSerif", 12));
}
void draw() {
switch(currentScreen){
case 0: drawScreenZero(); break; //camera
case 1: drawScreenOne(); break; //1 video
case 2: drawScreenZero(); break; //camera
case 3: drawScreenTwo(); break; // 2 video's
case 4: drawScreenZero(); break; //camera
case 5: drawScreenThree(); break; //3 video's
case 6: drawScreenZero(); break; //camera
case 7: drawScreenFour(); break; //4 video's
default: background(0); break;
}
}
void mousePressed() {
currentScreen++;
if (currentScreen > 7) { currentScreen = 0; }
}
void drawScreenZero() {
//println("drawScreenZero camera");
if (cam.available() == true) {
cam.read();
}
image(cam, 0,0,width, height);
// The following does the same, and is faster when just drawing the image
// without any additional resizing, transformations, or tint.
//set(0, 0, cam);
if (recording) {
videoExport.saveFrame();
}
for(int i = 0; i < in.bufferSize() - 1; i++)
{
line(i, 50 + in.left.get(i)*50, i+1, 50 + in.left.get(i+1)*50);
line(i, 150 + in.right.get(i)*50, i+1, 150 + in.right.get(i+1)*50);
}
if ( recorder.isRecording() )
{
text("Aan het opnemen...", 5, 15);
text("Druk op R als je klaar bent met opnemen en druk op S om het op te slaan.", 5, 30);
}
else
{
text("Gestopt met opnemen. Druk op R om op te nemen, druk op S om op te slaan.", 5, 15);
}
}
void drawScreenOne() {
background(0,255,0);
//fill(0);
//rect(250,40,250,400);
//println("drawScreenOne 1 video");
if (videoCounter >= 1){
myMovie = new Movie(this, "video0.mp4");
myMovie.loop();
image(myMovie, 0,0, (width/2),(height/2));
player.play();
} else if (videoCounter == 0) {
text("geen video", 5, 15);
}
}
void drawScreenTwo(){
background(0,0,255);
//println("drawScreenTwo 2 videos");
//triangle(150,100,150,400,450,250);
//image(myMovie, 0,0, (width/2),(height/2));
//image(myMovie1, (width/2),(height/2),(width/2),(height/2));
}
void drawScreenThree(){
//fill(0);
//rect(250,40,250,400);
background(255,0,0);
println("drawScreenThree 3 videos");
//image(myMovie, 0,0, (width/2),(height/2));
//image(myMovie1, (width/2),(height/2),(width/2),(height/2));
//image(myMovie, (width/2),0, (width/2),(height/2));
}
void drawScreenFour(){
//triangle(150,100,150,400,450,250);
background(0,0,255);
//println("drawScreenFour 4 videos");
//image(myMovie, 0,0, (width/2),(height/2));
//image(myMovie1, (width/2),(height/2),(width/2),(height/2));
//image(myMovie, (width/2),0, (width/2),(height/2));
//image(myMovie1, 0,(height/2),(width/2),(height/2));
}
void keyPressed() {
if (key == 'r' || key == 'R') {
recording = !recording;
println("Recording is " + (recording ? "ON" : "OFF"));
} else if (key == 's' || key == 'S') {
i++;
videoExport = new VideoExport(this, "video" + i + ".mp4");
videoCounter++;
println(videoCounter);
//currentScreen++;
//if (currentScreen > 7) { currentScreen = 0; }
} else if (key == 'z' || key == 'Z') {
currentScreen++;
if (currentScreen > 7) { currentScreen = 0; }
}
}
void movieEvent(Movie m) {
m.read();
}
void keyReleased()
{
if ( key == 'r' )
{
// to indicate that you want to start or stop capturing audio data, you must call
// beginRecord() and endRecord() on the AudioRecorder object. You can start and stop
// as many times as you like, the audio data will be appended to the end of the buffer
// (in the case of buffered recording) or to the end of the file (in the case of streamed recording).
if ( recorder.isRecording() )
{
recorder.endRecord();
}
else
{
/*#######################################*/
newFile();
/*#######################################*/
recorder.beginRecord();
}
}
if ( key == 's' )
{
// we've filled the file out buffer,
// now write it to the file we specified in createRecorder
// in the case of buffered recording, if the buffer is large,
// this will appear to freeze the sketch for sometime
// in the case of streamed recording,
// it will not freeze as the data is already in the file and all that is being done
// is closing the file.
// the method returns the recorded audio as an AudioRecording,
// see the example AudioRecorder >> RecordAndPlayback for more about that
name++; //change the file name, everytime +1
recorder.save();
println("Done saving.");
println(name);//check the name
}
}
void stop()
{
// always close Minim audio classes when you are done with them
in.close();
minim.stop();
super.stop();
}
答案 0 :(得分:1)
查看VideoExport
库the reference,这实际上只是一个类。
该参考资料向我们展示了这个功能:
dispose()
由Processing自动调用以在关闭前清理
然后,我们可以查看VideoExport
类的the source来查看该函数的作用:
public void dispose() {
if (ffmpeg != null) {
try {
ffmpeg.flush();
ffmpeg.close();
} catch (Exception e) {
e.printStackTrace();
}
}
if (process != null) {
process.destroy();
}
}
现在我们知道dispose()
函数正在flush()
上调用ffmpeg
,这是一个OutputStream
。我们也知道dispose()
函数只在草图的末尾调用。
因此,我要尝试的第一件事就是在您想要完成视频时调用dispose()
函数。
如果这不起作用,或者它导致其他异常,那么您可能想要找到一个允许您将它们保存在命令中的其他视频库,或者您甚至可以使用{{1的源代码创建自己的视频库作为灵感。真的没有多少。