我正在为dub mp4视频构建应用程序。我的目标是在用户观看mp4视频时,用户可以录制他的声音。然后我的管道获取此音频文件并将其添加到管道加法器元素中,以便同时监听所有记录。我的目标是将音频和视频混合到mp4文件中。
好。我的问题是当录制停止时我向加法器添加一个文件管道崩溃。我这样做:
1.-暂停管道。
2.-删除"记录箱"来自管道。
3.-将状态NULL设置为"记录箱"
4.-将EOS事件发送到"记录箱"
5.-将"音频箱"(加法器)添加到管道。
6.-将所需的沉默插入到wav文件中。
7.-将音频文件添加到"音频箱"的加法器中。 //也许,这就是问题所在。
我的Vala / Genie代码在这里:
uses
Gtk
Gst
Gee
class AudioFilesSrc : Gst.Bin
wavparse: Element
src:Element
audioconvert: Element
srcpad: Pad
sinkpad: Pad
def OnDynamicPad (element:Element, zz:Pad)
var opad = audioconvert.get_static_pad("sink");
zz.link(opad);
def open(p:Gst.Bin,mixer:Element,s1:string)
src = Gst.ElementFactory.make("filesrc", "src1");
wavparse = ElementFactory.make("wavparse","wavparse");
audioconvert = ElementFactory.make("audioconvert","audioconvert");
wavparse.pad_added.connect(OnDynamicPad);
this.add_many(src,wavparse,audioconvert);
src.link_many(wavparse,audioconvert);
src.set("location",s1);
// añade este bin a pipeline general
p.add(this)
//busca la salida de audioconvert1 y conviertela en la salida del bin
srcpad = new Gst.GhostPad("src", audioconvert.get_static_pad("src"));
this.add_pad(srcpad);
print ".-.----------------------------- abierto "+s1
def conecta(mixer:Element)
//crea una entrada al mixer
sinkpad = mixer.get_request_pad("sink%d")
//this.get_pad("src").link(this.sinkpad)
srcpad.link(this.sinkpad)
//this.srcpad.set_blocked(false)
print ".-.----------------------------- conectado"
def close(p:Gst.Bin,mixer:Element)
p.set_state(State.PAUSED)
this.srcpad.set_blocked(true)
this.set_state(State.NULL)
this.srcpad.unlink(this.sinkpad)
mixer.release_request_pad (this.sinkpad)
init
Gtk.init (ref args)
Gst.init (ref args);
var prueba = new ventana ()
prueba.show_all ()
Gtk.main ();
class ventana : Window
drawing_area:private DrawingArea;
videopipeline: private Pipeline ;
recordbin: Gst.Bin
videobin: Gst.Bin
audiobin: Gst.Bin
volume: private Element ;
videosrc :private Element;
videosink: private Element;
videodec: private Element;
vaudiosink: private Element;
vaudioparser: Element;
vaudiodec: Element;
vaudioadder: private Element;
vaudioarchivos:list of AudioFilesSrc
recordsrc :private Element;
recordsink: private Element;
recordconvert: private Element;
recordencoder: private Element;
comienzo_grabacion:float
xid :private ulong ;
reloj:uint
position: float
duracion:float
bus:Gst.Bus
bus2:Gst.Bus
msg:Gst.Message
msg2:Gst.Message
seek_enabled:bool
seek_enabled2:bool
scale_1:Scale
estado:string
lugar:int
numgrab:int=0
archivos_audio:list of string
button:Button
button1:Button
button2:Button
button3:Button
button4:Button
button5:Button
button6:Button
button7:Button
button8:Button
init
reloj = Timeout.add(1000, mover)
title = "Bikoizketa"
default_height = 250
default_width = 450
window_position = WindowPosition.CENTER
comienzo_grabacion=-1
// video pipeline
duracion=Gst.CLOCK_TIME_NONE;
this.videopipeline = new Pipeline ("mypipeline");
videobin= new Gst.Bin("videobin")
this.videosrc = ElementFactory.make ("filesrc", "filesrc2")
this.videosrc.set("location","gontzal3.mp4");
this.videodec = ElementFactory.make ("decodebin", "dec");
this.videosink = ElementFactory.make ("xvimagesink", "videosink");
this.videosink.set("force-aspect-ratio",true)
this.videodec.pad_added.connect(OnDynamicPad);
this.videobin.add_many (videosrc,videodec,videosink)
this.videosrc.link_many (videodec,videosink)
this.videopipeline.add(videobin)
this.audiobin= new Gst.Bin("audiobin")
this.vaudioadder = ElementFactory.make("adder","mixer");
this.volume= ElementFactory.make("volume","volume");
this.volume.set_property("volume",0.5)
this.vaudiosink= ElementFactory.make("alsasink","alsasink");
this.audiobin.add_many (vaudioadder,volume,vaudiosink)
vaudioadder.link_many(volume,vaudiosink)
this.videopipeline.add(audiobin)
//añadiendo archivos
vaudioarchivos=new list of AudioFilesSrc
vaudioarchivos.add (new AudioFilesSrc())
vaudioarchivos.last().open(audiobin,vaudioadder,"silencios.wav")
vaudioarchivos.last().conecta(vaudioadder)
numgrab++
this.recordbin= new Gst.Bin("recorder")
this.recordsrc= ElementFactory.make ("alsasrc","alsasrc")
this.recordconvert=ElementFactory.make ("audioconvert","audioconvert")
this.recordencoder = Gst.ElementFactory.make("wavenc", "encoder")
this.recordsink= ElementFactory.make ("filesink","filesink")
this.recordsink.set ("location","grabacionx.wav")
this.recordbin.add_many (this.recordsrc,this.recordconvert, this.recordencoder, this.recordsink);
this.recordsrc.link_many(this.recordconvert,recordencoder,recordsink)
//videopipeline.add(recordbin)
// creamos un boton con la siguiente etiqueta
button = new Button.with_label ("grabar")
button2 = new Button.with_label ("parar")
button3 = new Button.with_label ("play")
button4 = new Button.with_label ("pausar")
var box= new Box (Gtk.Orientation.HORIZONTAL, 4)
var box1= new Box (Gtk.Orientation.HORIZONTAL, 4)
var box2= new Box (Gtk.Orientation.VERTICAL, 4)
scale_1= new Scale.with_range (Orientation.HORIZONTAL,0,1000,0.1)
scale_1.value_changed.connect(on_scale)
this.drawing_area = new DrawingArea ();
this.drawing_area.realize.connect(on_realize);
// Une el evento de clic de raton con la funcion pulsado
button.clicked.connect (on_grabar)
button2.clicked.connect (on_parar)
button4.clicked.connect (on_pausa)
button3.clicked.connect (on_play)
// si pulsamos la x de la barra saldrá del loop
destroy.connect(Gtk.main_quit)
// añade el boton a la ventana
this.add(box2)
box.add(button3)
box.add(button2)
box.add(button4)
box.add(button)
box2.pack_start (drawing_area, true, true, 0);
box2.pack_start (box,false, false, 0);
box2.pack_start (scale_1,false, true, 0);
estado="STOP"
bus = this.videopipeline.get_bus()
bus.add_signal_watch()
msg = bus.timed_pop_filtered (10,Gst.MessageType.STATE_CHANGED | Gst.MessageType.ERROR | Gst.MessageType.EOS );
bus.message.connect(on_msg)
def on_msg(m:Gst.Message)
if m.type== Gst.MessageType.STATE_CHANGED
old_state:Gst.State;
new_state:Gst.State;
pending_state:Gst.State;
m.parse_state_changed (out old_state, out new_state, out pending_state);
if (m.src == this.videosink)
// Remember whether we are in the PLAYING state or not:
if (new_state == Gst.State.PLAYING)
q : Gst.Query = new Gst.Query.seeking (Gst.Format.TIME);
start:int64;
end:int64;
if ( this.videosink.query (q) )
q.parse_seeking (null, out this.seek_enabled, out start, out end);
if seek_enabled
pass
if (new_state == Gst.State.READY)
//print "STOP"
pass
if (new_state == Gst.State.PAUSED)
print "PAUSE"
q : Gst.Query = new Gst.Query.seeking (Gst.Format.TIME);
start:int64;
end:int64;
if ( this.videosink.query (q) )
q.parse_seeking (null, out this.seek_enabled, out start, out end);
if seek_enabled
//print "enable cuando pause"+start.to_string()+"-"+end.to_string()
pass
def on_scale( )//cuando la escala se mueve por el usuario
if seek_enabled
if this.videopipeline.seek_simple(Gst.Format.TIME, SeekFlags.FLUSH| Gst.SeekFlags.ACCURATE, (int64)(scale_1.get_value()*Gst.SECOND))
print "moviendo video"
else
print "video no se puede mover"
def mover():bool // cambiando el valor mientras play el video.
//desconectamos la deteccion de valores cambiados de la escala para evitar que el programa crea que el usuario esta cambiando el valor.
scale_1.value_changed.disconnect(on_scale)
var format = Gst.Format.TIME
position=0
if this.videopipeline.query_position(ref format, out position)
scale_1.set_value(position/Gst.MSECOND/1000)
duracion=0;
if this.videopipeline.query_duration(ref format, out duracion)
duracion=duracion/Gst.MSECOND/1000
if estado!="STOP" do scale_1.set_range(0,duracion)
scale_1.value_changed.connect(on_scale)
return true
def on_realize()
this.xid = (ulong)Gdk.X11Window.get_xid(this.drawing_area.get_window());
def on_grabar (btn : Button)
if estado=="STOP" or estado=="PAUSA"
recordsink.set("location","grabacion"+(numgrab).to_string()+".wav")
this.videopipeline.add(this.recordbin)
this.videopipeline.remove(audiobin)
print "grabando"
button.set_label("parar")
var xoverlay = this.videosink as XOverlay;
xoverlay.set_xwindow_id (this.xid);
this.videopipeline.set_state (State.PLAYING);
estado="REC"
var format = Gst.Format.TIME
position: int64
if this.videopipeline.query_position(ref format, out position)
comienzo_grabacion=(position/Gst.MSECOND/1000)
else
comienzo_grabacion=-1
else if estado=="REC"
//parando la grabación
estado="PLAY"
button.set_label("grabar")
videopipeline.set_state(State.PAUSED)
this.videopipeline.remove(recordbin)
this.recordbin.set_state(State.NULL)
this.recordbin.send_event(new Event.eos())
this.videopipeline.add(audiobin)
this.on_insertar()
def on_insertar()
if comienzo_grabacion!=-1
try
Process.spawn_command_line_sync ("sox grabacion"+numgrab.to_string()+".wav grabacion_x.wav pad "+((int64)comienzo_grabacion).to_string()+"@0")
Process.spawn_command_line_sync ("mv grabacion_x.wav grabacion"+numgrab.to_string()+".wav")
pass
except
pass
print "grabacion a añadir :"+numgrab.to_string()
vaudioarchivos.add((new AudioFilesSrc()))
vaudioarchivos.last().open(this.audiobin,this.vaudioadder,"grabacion"+numgrab.to_string()+".wav")
vaudioarchivos.last().conecta(this.vaudioadder)
numgrab+=1
def on_pausa (btn:Button)
if estado=="PLAY"
print "pausando"
this.videopipeline.set_state (State.PAUSED);
estado="PAUSA"
def on_parar (btn : Button)
if estado=="PLAY" or estado=="PAUSA"
var xoverlay = this.videosink as XOverlay;
xoverlay.set_xwindow_id (this.xid);
this.videopipeline.set_state (State.READY);
this.scale_1.set_value(0)
estado="STOP"
def on_play (btn : Button)
if estado=="STOP" or estado=="PAUSA"
print "tocando"
var xoverlay = this.videosink as XOverlay;
xoverlay.set_xwindow_id (this.xid);
this.videopipeline.set_state (State.PLAYING);
estado="PLAY"
def OnDynamicPad (element:Element, zz:Pad)
var opad = this.videosink.get_static_pad("sink");
zz.link(opad);
答案 0 :(得分:0)
我对您的代码进行了一些小修改,并使用以下构建命令使用gstreamer-1.0对其进行编译:
COL1 COL2 COL3
0 a 0 0
1 b NaN 0
2 b 1 2
如果我点击" grabar"按钮,视频播放,我可以录制自己说话。当我点击相同的按钮(现在" parar")时,它会将该录音保存到名为grabacion1.wav的文件中,该文件播放正常但只是我麦克风的录音。
我所做的修改主要是将其更新为gstreamer 1.0 API和新的GDK X11 API。我还添加了对Scale的负值的检查(第254行),因为处于记录状态的流返回-1(可能是因为流持续时间可能是无限的)。
我还注意到你的try-catch子句中有valac --pkg gdk-x11-3.0 --pkg gstreamer-1.0 --pkg gstreamer-video-1.0 --pkg gee-0.8 --pkg gtk+-3.0 bikoizketa.gs
个语句阻止它抛出错误。删除这些后,如果我录制了某些内容,我会收到以下错误,请点击" pausar"按钮,然后再次录制:
pass
sox WARN wav: Premature EOF on .wav input file
如果我理解正确,你最终想要记录每次按下" grabar"并将其保存到一系列文件grabacion1.wav,grabacion2.wav等,当他们完成将它们与视频结合并将输出保存到文件?如果是这样的话,我不太清楚最后一点应该发生在哪里。