Java SourceDataLine未在第一个缓冲区

时间:2016-05-12 20:58:23

标签: java audio libgdx

问题的简化版本:尽管我之前已经开始工作,但是这个尝试用javax.sound.sampled.SourceDataLine产生噪音的代码在第一个缓冲区为空之后不会调用update。 / p>

编辑:更新了16位立体声的代码。 当调用Output.open(format,buffersize)时,声音会播放定义的样本量。

将代码示例简化为两个类项目:

package javaxaudiotest;

import java.io.IOException;

public class JavaxAudioTest {

    public static void main(String[] args) throws IOException {
        JavaxAudioDevice audio = new JavaxAudioDevice();
        audio.create();
        System.out.println("Hit enter to quit.");
        int c = 'a';
        while (c != '\n') {
            c = System.in.read();
        }

        audio.dispose();
    }

}

带有libGDX的JavaxAudioDevice和我自己的项目特定内容。

package javaxaudiotest;

import java.util.Arrays;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.LineEvent;
import javax.sound.sampled.LineListener;
import javax.sound.sampled.SourceDataLine;

public class JavaxAudioDevice implements LineListener {

    byte[] byteBuf = null;

    boolean quit = false;
    boolean hasquit = false;
    boolean underflow = false;

    SourceDataLine Output;
    int leader = 1024;
    boolean notDoneYet = false;
    boolean alreadyLead = false;
    byte b = 0;


    public int create() {
        quit = false;
        AudioFormat af = new AudioFormat(44100, 16, 2, true, false);
        try {
            Output = AudioSystem.getSourceDataLine(af);
        } catch (Exception e) {
            System.err.println("Failed to create audio device. " + e.getLocalizedMessage());
            return 1;
        }

        if (Output != null) {
            Output.addLineListener(this);
            try {
                Output.open(af,44100*8);
            } catch (Exception e) {
                System.err.println("Failed to open audio device. " + e.getLocalizedMessage());
                return 2;
            }
        }
        System.err.println("Created JavaxAudioDevice");
        while (!Output.isOpen());
        update(null);
        System.err.println(Output.getFormat().toString());
        return 0;
    }


    public void dispose() {
        quit = true;
        Output.close();
        Output.flush();
        Output = null;
        hasquit = true;
    }

//LineListener
    @Override
    public void update(LineEvent le) {
        if (le == null) System.err.println("Audio update");
        else System.err.println("Audio update " + le.toString());

        if ( !quit && ( le == null || le.getType() == LineEvent.Type.STOP ) ) {
            boolean uf = false;
            while (notDoneYet) {
                uf = true;
            }
            notDoneYet = true;
            if (uf) {
                underflow = true;
                System.err.println("underflow");
            }

            if (byteBuf == null) { //insert some leader tape
                short[] shortBuf = new short[44100*8];
                for (int i = 0; i < shortBuf.length; i+=2) {
                    shortBuf[i] = (short)((b++)*50);
                    shortBuf[i+1] = (short)((b++)*200);
                }

                byteBuf = new byte[shortBuf.length * 2];
                for (int i = 0; i < byteBuf.length; ++i) {
                    //little endian
                    if (i % 2 == 0) byteBuf[i] = (byte) (shortBuf[i / 2]); //low byte
                    else byteBuf[i] = (byte) (shortBuf[i / 2] >> 8); //high byte
                }
            }
            Output.write(byteBuf, 0, byteBuf.length);
            Output.start();
            byteBuf = null;
            if (!Output.isRunning()) { //debug
                System.err.println("Output not running");
            } else {
                System.err.println("Output running");
            }

            short[] shortBuf = new short[44100*8];
            for (int i = 0; i < shortBuf.length; i+=2) {
                    if (i%2 == 0) shortBuf[i] = shortBuf[i+1] = (short)((b++)*100);
                    else shortBuf[i] = shortBuf[i+1] = (short)((b++)*150);
            }

            byteBuf = new byte[shortBuf.length * 2];
            for (int i = 0; i < byteBuf.length; ++i) {
                //little endian\
                if (i % 2 == 0) byteBuf[i] = (byte) (shortBuf[i / 2]); //low byte
                else byteBuf[i] = (byte) (shortBuf[i / 2] >> 8); //high byte
            }

            notDoneYet = false;
        }

    }
}

这里有一些示例输出:

run:
Created JavaxAudioDevice
Audio update Open event from line com.sun.media.sound.DirectAudioDevice$DirectSDL@4cac0ef5
Audio update
Output running
Audio update Start event from line com.sun.media.sound.DirectAudioDevice$DirectSDL@4cac0ef5
Hit enter to quit.
PCM_SIGNED 44100.0 Hz, 16 bit, stereo, 4 bytes/frame, little-endian

Audio update Stop event from line com.sun.media.sound.DirectAudioDevice$DirectSDL@4cac0ef5
Audio update Close event from line com.sun.media.sound.DirectAudioDevice$DirectSDL@4cac0ef5
BUILD SUCCESSFUL (total time: 7 seconds)
编辑:我更换了#34;领导者磁带&#34;带有锯齿生成循环的副本。我还将byteBuf初始化为null,以便它将激活该部分。另外我添加了一个循环来等待该行打开以强制进行第一次更新。现在我的耳朵里有一小段锯齿波。但是,仍然没有自动更新。

以下原始问题:

我正在使用libGDX并且我编写了一些代码来与主线程异步发出一些声音。然后我使用了libGDX自己的AsyncExecutor用于一些线程和AudioDevice用于音频。 AudioDevice.write(short [],int,int)是阻塞的,所以我需要在另一个线程中执行它。它很成功。不幸的是,它有时会口吃(总是从netbeans运行)。例如,当我运行gradle任务时:desktop:dist它会生成一个完整的jar文件,而当它在Windows 10下运行时,它就像一个铃声一样清晰。

因此,我使用javax.sound.sampled(即SourceDataLine)进行pcm输出的桌面特定实现。所以我创建了一个启动声音的类,并且还充当SourceDataLine的监听器。此类位于桌面子项目中,并在创建主类时传递给核心子项目。它实现了一个小型界面,它是核心的一部分。我把它设置为16位有符号立体声pcm。我有一个核心类,它产生一个方波并返回一个短裤数组。

问题在于:我没有听到任何内容并且更新(LineListener的一部分)没有被调用,除了一开始(开始)和最后几次(停止,关闭)。

这很奇怪,因为我在之前的项目中使用SourceDataLine作为8位签名单声道pcm并且成功了。所以我也尝试通过从短阵列中取出每个其他高字节来测试新版本为8位单声道。 SourceDataLine只接受字节数组。

public class JavaxAudioDevice implements AudioInterface, LineListener {

    // omitted stuff

    short[] curBuf;
    byte[] byteBuf;

    SourceDataLine Output;
    int leader = 1024; //leader "tape"

    @Override
    public int create() {
        quit = false;
        AudioFormat af = new AudioFormat(44100, 16, 2, true, false);
//        AudioFormat af = new AudioFormat(44100, 8, 1, true, false);
        try {
            Output = AudioSystem.getSourceDataLine(af);
        } catch (Exception e) {
            System.err.println("Failed to create audio device. " + e.getLocalizedMessage());
            return 1;
        }

        if (Output != null) {
            try {
                Output.open(af,leader);
            } catch (Exception e) {
                System.err.println("Failed to open audio device. " + e.getLocalizedMessage());
                return 2;
            }
            Output.addLineListener(this);
        }
        System.err.println("Created JavaxAudioDevice");
        byte[] bytes = new byte[leader];
        alreadyLead = true;
        Output.write(bytes, 0, bytes.length);
        Output.start();
        System.err.println(Output.getFormat().toString());
        return 0;
    }

//omitted stuff

//LineListener
    @Override
    public void update(LineEvent le) {
            if (le == null) System.err.println("Audio update");
            else System.err.println("Audio update " + le.toString());

        if ( !quit && ( le == null || le.getType() == LineEvent.Type.STOP ) ) {
            boolean uf = false;
            while (notDoneYet) {
                uf = true;
            }

            notDoneYet = true;
            if (uf) {
                underflow = true;
                System.err.println("underflow");
            }

            if (byteBuf == null) { //insert some leader tape
                byteBuf = new byte[leader];
                for (int i = 0; i < byteBuf.length; ++i)
                    byteBuf[i] = 0;

                if (alreadyLead) System.err.println("Using audio leader when not supposed to!");
                else alreadyLead = true;
            }
            Output.write(byteBuf, 0, byteBuf.length);
            Output.start();
            byteBuf = null;
            if (!Output.isRunning()) { //debug
                System.err.println("Output not running");
            } else {
                System.err.println("Output running");
            }

            aplay.call();
            curBuf = aplay.output;
            if (curBuf == null) {
                //I don't know how to help you
                notDoneYet = false;
                System.err.println("Unable to fill buffer");
                return;
            }
            byteBuf = new byte[curBuf.length * 2];
            for (int i = 0; i < byteBuf.length; ++i) {
                //little endian
                if (i % 2 == 0) byteBuf[i] = (byte) (curBuf[i / 2]/*&255/**/); //low byte
                else byteBuf[i] = (byte) (curBuf[i / 2] >> 8); //high byte
            }

//            byteBuf = new byte[curBuf.length/2]; //debug 8 bit mono
//            for (int i = 0; i < byteBuf.length; ++i) {
//                byteBuf[i] = (byte) (curBuf[i*2] >> 8);
//            }

            notDoneYet = false;
        }

    }
}

一些示例输出:

Created JavaxAudioDevice
Audio update Start event from line com.sun.media.sound.DirectAudioDevice$DirectSDL@13ab2987
PCM_SIGNED 44100.0 Hz, 16 bit, stereo, 4 bytes/frame, little-endian
Disposing
Audio update Stop event from line com.sun.media.sound.DirectAudioDevice$DirectSDL@13ab2987
Audio update Close event from line com.sun.media.sound.DirectAudioDevice$DirectSDL@13ab2987
:desktop:run

BUILD SUCCESSFUL

Total time: 33.128 secs

&#34;配置之&#34;以下是我退出程序后。每秒应该有几个音频更新来重新填充缓冲区。

此时我不知道下一步该尝试什么。

1 个答案:

答案 0 :(得分:0)

好的,我找到了答案。我之前使用javax.sound.sampled进行实验的主要线程涉及生成缓冲区并在检测到完成后重新启动声音。在那个实验中,我在这里尝试做的是使用LineListener,如SDL中的Mix_HookMusic()(一个更老的实验)。

https://www.libsdl.org/projects/SDL_mixer/docs/SDL_mixer.html#SEC60

当SDL_mixer用完缓冲区时,它会调用你提供它的函数来生成更多。这不是LineListener在java中的工作方式。当缓冲区用完时,它不会被调用。

相反,将它视为自己的线程(它是)。当您调用SourceDataLine.start()时,它会在自己的线程中使用start事件类型启动LineListener的事件函数,并返回到主线程。因此,当LineListener检测到一个启动事件时,它会进入一个创建缓冲区的循环并将其写入设备。 SourceDataLine.write()在有一个或更少的缓冲区时返回。这使得循环有足够的时间来制作另一个具有恰好一个缓冲区的延迟。 SourceDataLine.stop()将使用stop类型启动LineListener的另一个事件函数。现在,事件函数的第二个线程可以设置一个布尔值,所以第一个知道是时候退出并退出循环。

这里有一些示例代码可以播放5秒钟的音调,然后在退出之前为另外5个代码静音。

编辑:看起来至少写一帧(我称之为样本)对于确保SourceDataLine.start()正常工作非常重要。由于我使用立体声16位,我需要device.write(new byte [4],0,4);至少在我调用device.start();

之前

JavaxAudioTest.java

package javaxaudiotest;

import java.util.logging.Level;
import java.util.logging.Logger;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;

public class JavaxAudioTest {

    public static void main(String[] args) {
        SourceDataLine device;
        int buffer = 44100/15; //buffer in bytes (4 per sample so 60 buffers per second)

        AudioFormat af = new AudioFormat(44100, 16, 2, true, false);
        try {
            device = (SourceDataLine) AudioSystem.getSourceDataLine(af);
        } catch (LineUnavailableException ex) {
            Logger.getLogger(JavaxAudioTest.class.getName()).log(Level.SEVERE, null, ex);
            return;
        }


        WaveGen wg = new WaveGen();
        JavaxAudioDevice jad = new JavaxAudioDevice();
        jad.gen = wg;
        jad.device = device;
        device.addLineListener(jad);

        try {
            device.open(af,buffer);
        } catch (LineUnavailableException ex) {
            Logger.getLogger(JavaxAudioTest.class.getName()).log(Level.SEVERE, null, ex);
            return;
        }
        device.write(new byte[4], 0, 4); //absolutely essential
        device.start();

        System.out.println("Zzzzzzzzzz");
        try {
            Thread.sleep(5000);
        } catch (InterruptedException ex) {
            Logger.getLogger(JavaxAudioTest.class.getName()).log(Level.SEVERE, null, ex);
        }
        System.out.println("zzzzzzzzzzBWAH!");
        device.close();
        try {
            Thread.sleep(5000);
        } catch (InterruptedException ex) {
            Logger.getLogger(JavaxAudioTest.class.getName()).log(Level.SEVERE, null, ex);
        }
        System.out.println("Baiyo");
        device.drain();
        device.close();
    }
}

JavaxAudioDevice.java

package javaxaudiotest;

import javax.sound.sampled.LineEvent;
import javax.sound.sampled.LineListener;
import javax.sound.sampled.SourceDataLine;

public class JavaxAudioDevice implements LineListener {
    boolean quit = false;
    byte[] byteBuf;
    int SampleRate = 44100;
    int buffer = SampleRate/60;
    public WaveGen gen;
    public SourceDataLine device;


    @Override
    public void update(LineEvent event) {
        System.out.println(event.toString());
        if (event.getType() == LineEvent.Type.STOP) {
            quit = true;
        }
        else if (event.getType() == LineEvent.Type.START) {
            while (!quit) {
                short[] buf = new short[buffer];
                for (int i=0; i<buf.length; ++i) {
                    buf[i] = gen.Output();
                }
                byteBuf = new byte[buf.length*4];
                for (int i=0; i<buf.length; ++i) {
                    byteBuf[i*4] = byteBuf[i*4+2] = (byte)buf[i];
                    byteBuf[i*4+1] = byteBuf[i*4+3] = (byte)(buf[i]>>8);
                }
                device.write(byteBuf, 0, byteBuf.length);
            }
        }
    }

}

WaveGen.java

package javaxaudiotest;

public class WaveGen {
    int Wave = 0;
    int Max = 1<<24;
    byte Fidelity = 20;
    int Level = 8;
    int Boost = 2000;
    int Stepper = (int)(110.0/44100*Max); //freq/44100*2^24

    short Output() {
        short ret = 0;
        Wave += Stepper;
        Wave %= Max;
        ret = (short)(( (Wave>>Fidelity)-Level )*Boost);
        return ret;
    }
}