我正在使用Xuggler转换从java Robot类捕获的图像和从TargetDataLine类读取的声音,并将其编码为视频。然后我尝试通过http(Socket OutputStream)将这个视频数据(在写完我的标题之后)http流传输到flash客户端,但无论我在客户端使用什么缓冲区值,它都会播放并且断断续续(从不只是播放顺畅)。 / p>
我正在寻求帮助并显示我的java代码,因为我怀疑它可能与我如何编码视频或通过http套接字发送数据有关,我没有得到...
ByteArrayURLHandler ba = new ByteArrayURLHandler();
final IRational FRAME_RATE = IRational.make(30);
final int SECONDS_TO_RUN_FOR = 20;
final Robot robot = new Robot();
final Toolkit toolkit = Toolkit.getDefaultToolkit();
final Rectangle screenBounds = new Rectangle(toolkit.getScreenSize());
IMediaWriter writer;
writer = ToolFactory.makeWriter(
XugglerIO.map(
XugglerIO.generateUniqueName(out, ".flv"),
out
));
writer.addListener(new MediaListenerAdapter() {
public void onAddStream(IAddStreamEvent event) {
event.getSource().getContainer().setInputBufferLength(1000);
IStreamCoder coder = event.getSource().getContainer().getStream(event.getStreamIndex()).getStreamCoder();
if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_AUDIO) {
coder.setFlag(IStreamCoder.Flags.FLAG_QSCALE, false);
coder.setBitRate(32000);
System.out.println("onaddstream"+ coder.getPropertyNames().toString());
}
if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO) {
// coder.setBitRate(64000);
// coder.setBitRateTolerance(64000);
}
}
});
writer.addVideoStream(videoStreamIndex, videoStreamId, 1024, 768);
final int channelCount = 1;
int audionumber = writer.addAudioStream(audioStreamIndex, audioStreamId,1, 44100);
int bufferSize = (int)audioFormat.getSampleRate() *audioFormat.getFrameSize();//*6;///6;
byte[] audioBuf;// = new byte[bufferSize];
int i = 0;
final int audioStreamIndex = 1;
final int audioStreamId = 1;
BufferedImage screen, bgrScreen;
long startTime = System.nanoTime();
while(keepGoing)
{
audioBuf = new byte[bufferSize];
i++;
screen = robot.createScreenCapture(screenBounds);
bgrScreen = convertToType(screen, BufferedImage.TYPE_3BYTE_BGR);
long nanoTs = System.nanoTime()-startTime;
writer.encodeVideo(0, bgrScreen, nanoTs, TimeUnit.NANOSECONDS);
audioBuf = new byte[line.available()];
int nBytesRead = line.read(audioBuf, 0, audioBuf.length);
IBuffer iBuf = IBuffer.make(null, audioBuf, 0, nBytesRead);
IAudioSamples smp = IAudioSamples.make(iBuf,1,IAudioSamples.Format.FMT_S16);
if (smp == null) {
return;
}
long numSample = audioBuf.length / smp.getSampleSize();
smp.setComplete(true, numSample,(int)
audioFormat.getSampleRate(), audioFormat.getChannels(),
IAudioSamples.Format.FMT_S16, nanoTs/1000);
writer.encodeAudio(1, smp);
writer.flush();
}