我正在Vala中编写一个使用GStreamer加载和播放WAV文件的类。
class Sample : Object
{
private Gst.Pipeline gst_pipeline;
private Gst.Bus gst_pipeline_bus;
private Gst.Element? reader;
private Gst.Element? decoder;
private Gst.Element? convertor;
private Gst.Element? output;
/* Read-only values */
private bool __playing = false;
private string? __source = null;
private int64 __length = -1;
public bool playing {
get { return __playing; }
}
public string? source {
get { return __source; }
}
public int64 position {
get {
if (this.__source == null)
{
return -1;
}
else
{
int64 pos;
this.gst_pipeline.query_position(Gst.Format.TIME, out pos);
return pos;
}
}
}
public int64 length {
get { return __length; }
}
/* Methods */
public Sample()
{
this.gst_pipeline = new Gst.Pipeline(null);
/* Create the elements */
this.reader = Gst.ElementFactory.make("filesrc", null);
this.gst_pipeline.add(this.reader);
this.decoder = Gst.ElementFactory.make("wavparse", null);
this.gst_pipeline.add(this.decoder);
this.reader.link(this.decoder);
this.convertor = Gst.ElementFactory.make("audioconvert", null);
this.gst_pipeline.add(this.convertor);
this.decoder.link(this.convertor);
this.output = Gst.ElementFactory.make("alsasink", null);
this.gst_pipeline.add(this.output);
this.convertor.link(this.output);
/* Get the bus to send messages down */
this.gst_pipeline_bus = this.gst_pipeline.get_bus();
this.gst_pipeline_bus.add_signal_watch();
this.gst_pipeline_bus.message.connect(this.on_gst_message);
}
~Sample()
{
this.gst_pipeline.set_state(Gst.State.NULL);
}
public signal void finished(); // Called when the sample finished playing
public signal void error(Error err); // Called when GStreamer passes us an error message
public void load(string path)
{
this.__source = path;
/* Stop the playback */
this.gst_pipeline.set_state(Gst.State.NULL); // This stops the playback and resets the pipeline
this.finished();
/* Change the location */
this.reader.set_property("location", path);
/* Get the sample length */
this.gst_pipeline.set_state(Gst.State.PAUSED);
print("\t" + this.gst_pipeline.query_duration(Gst.Format.TIME, out this.__length).to_string() + "\n");
this.gst_pipeline.set_state(Gst.State.NULL);
}
public void play()
{
this.__playing = true;
this.gst_pipeline.set_state(Gst.State.PLAYING);
}
public void pause()
{
this.gst_pipeline.set_state(Gst.State.PAUSED);
this.__playing = false;
}
private void on_gst_message(Gst.Message message)
{
/* Here we handle any messages that GStreamer may send us */
switch (message.type)
{
case (Gst.MessageType.EOS): // End Of Stream
{
this.gst_pipeline.set_state(Gst.State.NULL);
this.__playing = false;
this.finished();
break;
}
case (Gst.MessageType.ERROR):
{
/* Stop playing */
this.gst_pipeline.set_state(Gst.State.NULL);
this.__playing = false;
/* Get the error */
Error err;
message.parse_error(out err, null);
/* Display the error */
this.error(err);
break;
}
}
}
}
我使用filesrc
,wavparse
,audioconvert
和alsasink
打击垫来播放文件。
在load
方法中,我尝试通过调用我创建的管道上的Gst.Element.query_duration()
来获取示例的持续时间,
但每当我尝试这样做时,它会返回false
并将我引用的值设置为0
。
我知道管道必须预先滚动以便能够获得其持续时间,尽管我已经尝试通过将其状态设置为Gst.State.PAUSED
来预先推送我的管道,{{1不断失败。
我应该更改哪些内容才能成功获取已加载的WAV文件的持续时间?