我通过网络以RTP数据包的形式获得实时音频流,我必须编写代码来捕获,缓冲并播放音频流。
问题
现在要解决这个问题,我已经编写了两个线程,一个用于捕获音频,另一个用于播放音频。 现在,当我启动两个线程时,我的捕获线程运行速度比播放线程慢 :(
缓冲要求
正在播放...如果缓冲区中的20ms帧数超过 AudioBufferHigh = 50然后删除24帧(以最简单的方式 - 删除 来自缓冲区或只是丢弃下6条RTP消息)。
到目前为止我做了什么..
代码
BufferManager.java
public abstract class BufferManager {
protected static final Integer ONE = new Integer(1);
protected static final Integer TWO = new Integer(2);
protected static final Integer THREE = new Integer(3);
protected static final Integer BUFFER_SIZE = 5334;//5.334KB
protected static volatile Map<Integer, ByteArrayOutputStream> bufferPool = new ConcurrentHashMap<>(3, 0.9f, 2);
protected static volatile Integer captureBufferKey = ONE;
protected static volatile Integer playingBufferKey = ONE;
protected static Boolean running;
protected static volatile Integer noOfFrames = 0;
public BufferManager() {
//captureBufferKey = ONE;
//playingBufferKey = ONE;
//noOfFrames = new Integer(0);
}
protected void switchCaptureBufferKey() {
if(ONE.intValue() == captureBufferKey.intValue())
captureBufferKey = TWO;
else if(TWO.intValue() == captureBufferKey.intValue())
captureBufferKey = THREE;
else
captureBufferKey = ONE;
//printBufferState("SWITCHCAPTURE");
}//End of switchWritingBufferKey() Method.
protected void switchPlayingBufferKey() {
if(ONE.intValue() == playingBufferKey.intValue())
playingBufferKey = TWO;
else if(TWO.intValue() == playingBufferKey.intValue())
playingBufferKey = THREE;
else
playingBufferKey = ONE;
}//End of switchWritingBufferKey() Method.
protected static AudioFormat getFormat() {
float sampleRate = 8000;
int sampleSizeInBits = 16;
int channels = 1;
boolean signed = true;
boolean bigEndian = true;
return new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
}
protected int getByfferSize() {
return bufferPool.get(ONE).size()
+ bufferPool.get(TWO).size()
+ bufferPool.get(THREE).size();
}
protected static void printBufferState(String flag) {
int a = bufferPool.get(ONE).size();
int b = bufferPool.get(TWO).size();
int c = bufferPool.get(THREE).size();
System.out.println(flag + " == TOTAL : [" + (a + b +c) + "bytes] ");
// int a,b,c;
// System.out.println(flag + "1 : [" + (a = bufferPool.get(ONE).size()) + "bytes], 2 : [" + (b = bufferPool.get(TWO).size())
// + "bytes] 3 : [" + (c = bufferPool.get(THREE).size()) + "bytes], TOTAL : [" + (a + b +c) + "bytes] ");
}
}//End of BufferManager Class.
AudioCapture.java
public class AudioCapture extends BufferManager implements Runnable {
private static final Integer RTP_HEADER_SIZE = 12;
private InetAddress ipAddress;
private DatagramSocket serverSocket;
long lStartTime = 0;
public AudioCapture(Integer port) throws UnknownHostException, SocketException {
super();
running = Boolean.TRUE;
bufferPool.put(ONE, new ByteArrayOutputStream(BUFFER_SIZE));
bufferPool.put(TWO, new ByteArrayOutputStream(BUFFER_SIZE));
bufferPool.put(THREE, new ByteArrayOutputStream(BUFFER_SIZE));
this.ipAddress = InetAddress.getByName("0.0.0.0");
serverSocket = new DatagramSocket(port, ipAddress);
}
@Override
public void run() {
System.out.println();
byte[] receiveData = new byte[1300];
DatagramPacket receivePacket = null;
lStartTime = System.currentTimeMillis();
receivePacket = new DatagramPacket(receiveData, receiveData.length);
byte[] packet = new byte[receivePacket.getLength() - RTP_HEADER_SIZE];
ByteArrayOutputStream buff = bufferPool.get(captureBufferKey);
while (running) {
if(noOfFrames <= 50) {
try {
serverSocket.receive(receivePacket);
packet = Arrays.copyOfRange(receivePacket.getData(), RTP_HEADER_SIZE, receivePacket.getLength());
if((buff.size() + packet.length) > BUFFER_SIZE) {
switchCaptureBufferKey();
buff = bufferPool.get(captureBufferKey);
}
buff.write(packet);
noOfFrames += 4;
} catch (SocketException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} // End of try-catch block.
} else {
//System.out.println("Packet Ignored, Buffer reached to its maximum limit ");
}//End of if-else block.
} // End of while loop.
}//End of run() Method.
}
AudioPlayer.java
public class AudioPlayer extends BufferManager implements Runnable {
long lStartTime = 0;
public AudioPlayer() {
super();
}
@Override
public void run() {
AudioFormat format = getFormat();
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
SourceDataLine line = null;
try {
line = (SourceDataLine) AudioSystem.getLine(info);
line.open(format);
line.start();
} catch (LineUnavailableException e1) {
e1.printStackTrace();
}
while (running) {
if (noOfFrames >= 24) {
ByteArrayOutputStream out = null;
try {
out = bufferPool.get(playingBufferKey);
InputStream input = new ByteArrayInputStream(out.toByteArray());
byte buffer[] = new byte[640];
int count;
while ((count = input.read(buffer, 0, buffer.length)) != -1) {
if (count > 0) {
InputStream in = new ByteArrayInputStream(buffer);
AudioInputStream ais = new AudioInputStream(in, format, buffer.length / format.getFrameSize());
byte buff[] = new byte[640];
int c = 0;
if((c = ais.read(buff)) != -1)
line.write(buff, 0, buff.length);
}
}
} catch (IOException e) {
e.printStackTrace();
}
/*byte buffer[] = new byte[1280];
try {
int count;
while ((count = ais.read(buffer, 0, buffer.length)) != -1) {
if (count > 0) {
line.write(buffer, 0, count);
}
}
} catch (IOException e) {
e.printStackTrace();
}*/
out.reset();
noOfFrames -= 4;
try {
if (getByfferSize() >= 10240) {
Thread.sleep(15);
} else if (getByfferSize() >= 5120) {
Thread.sleep(25);
} else if (getByfferSize() >= 0) {
Thread.sleep(30);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
// System.out.println("Number of frames :- " + noOfFrames);
}
}
}// End of run() method.
}// End of AudioPlayer Class class.
任何有用链接的帮助或指针都会很明显谢谢......
答案 0 :(得分:0)
This answer explains a few challenges with streaming.
简而言之,您的客户需要处理两个问题:
1)客户端和服务器上的时钟(晶体)不完全同步。服务器可能比客户端更快/更慢的一小部分。客户端通过检查rtp数据包的传输速率,不断匹配推断服务器的时钟速率。然后,客户端通过采样率转换调整播放速率。因此,它不是以48k回放,而是以48000.0001 Hz回放。
2)必须处理数据包丢失,无序到达等。如果丢失数据包,则需要在缓冲区流中保留这些数据包的占位符,否则音频将跳过并且声音破裂并变得不对齐。最简单的方法是用静音替换那些丢失的数据包,但应调整相邻数据包的数量,以避免明显的包络变化捕捉到0.
你的设计看起来有点不正统。我使用环形缓冲区取得了成功。你也必须处理边缘情况。
我总是说流媒体不是一项微不足道的任务。