我正在开发一个带有音频的截屏视频应用程序。屏幕录制声音工作正常,但问题是假设我录制5分钟然后生成的视频文件是5分钟但生成的音频文件是4分45秒。所以基本上问题是音频和视频都不同步,音频文件的持续时间比视频文件要少。
音频和视频文件录制都在单独的线程中运行,但仍有问题。
VideoCapturing代码:
public void run() {
setVideoParameters();
FRAME_RATE = frameRate;
// let's make a IMediaWriter to write the file.
writer = ToolFactory.makeWriter(movieFile.getName());
screenBounds = new Rectangle(RecorderSettings.m_CapRectX,
RecorderSettings.m_CapRecY,
(int) RecorderSettings.m_CapRectWidth,
(int) RecorderSettings.m_CapRecHeight);
// We tell it we're going to add one video stream, with id 0,
// at position 0, and that it will have a fixed frame rate of
// FRAME_RATE.
// ScreenWidth && ScreenHeight multiplied by 3/4 to reduce pixel to 3/4
// of actual.
// writer.addVideoStream(0, 0, ICodec.ID.CODEC_ID_MPEG4,
// screenBounds.width , screenBounds.height );
writer.addVideoStream(0, 0, vcodec.getID(),
(screenBounds.width * upperLimit) / lowerLimit,
(screenBounds.height * upperLimit) / lowerLimit);
// To have start time of recording
startTime = System.nanoTime();
while (isStopProceesBtnClk) {
try {
if (!isStopProceesBtnClk) {
break;
} else {
synchronized (this) {
while (isPauseProceesBtnClk) {
try {
// catches starting time of pause.
pauseStartTime = System.nanoTime();
wait();
} catch (Exception e) {
e.printStackTrace();
}
}
}
BufferedImage screen = getDesktopScreenshot();
// convert to the right image type
BufferedImage bgrScreen = convertToType(screen, BufferedImage.TYPE_3BYTE_BGR);
// encode the image to stream #0
if (totalPauseTime > 0) {
writer.encodeVideo(0, bgrScreen, (System.nanoTime() - startTime)- totalPauseTime, TimeUnit.NANOSECONDS);
} else {
writer.encodeVideo(0, bgrScreen, System.nanoTime() - startTime, TimeUnit.NANOSECONDS);
}
// sleep for frame rate milliseconds
try {
Thread.sleep((long) (1000 / FRAME_RATE));
} catch (InterruptedException e) {
// ignore
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
try {
writer.close();
writer = null;
Runtime.getRuntime().gc();
} catch (Exception e) {
// ignore errors
}
// tell the writer to close and write the trailer if needed
}
public static BufferedImage convertToType(BufferedImage sourceImage, int targetType) {
BufferedImage image;
// if the source image is already the target type, return the source
// image
if (sourceImage.getType() == targetType) {
image = sourceImage;
}
// otherwise create a new image of the target type and draw the new
// image
else {
image = new BufferedImage(sourceImage.getWidth(), sourceImage.getHeight(), targetType);
if (true) {
int x = MouseInfo.getPointerInfo().getLocation().x - 25;
int y = MouseInfo.getPointerInfo().getLocation().y - 37;
Graphics2D graphics2D = sourceImage.createGraphics();// getGraphics().drawImage(m_MouseIcon,
// x, y, 48, 48, null);
graphics2D.drawImage(SimpleWebBrowserExample.m_MouseIcon, x, y,
48, 48, null);
}
image.getGraphics().drawImage(sourceImage, 0, 0, null);
}
return image;
}
private BufferedImage getDesktopScreenshot() {
try {
// Robot captures screen shot
Robot robot = new Robot();
Rectangle captureSize = new Rectangle(screenBounds);
return robot.createScreenCapture(captureSize);
} catch (AWTException e) {
e.printStackTrace();
return null;
}
}
AudioCapturing Code:
public void run() {
init();
DataLine.Info info = new DataLine.Info(TargetDataLine.class,audioFormat,(int) (m_AudioFreq * sampleSizeInBytes));
try
{
m_TargetLine = (TargetDataLine) AudioSystem.getLine(info);
m_TargetLine.open(audioFormat, info.getMaxBufferSize());
}
catch(Exception exp){
exp.printStackTrace();
}
AudioFileFormat.Type targetType = AudioFileFormat.Type.WAVE;
try
{
m_outputFile = new File(bufferFileName);
while (m_outputFile.exists() && !m_outputFile.delete())
{
m_outputFile = BerylsUtility.getNextFile(m_outputFile);
}
FileOutputStream outFileStream = new FileOutputStream(m_outputFile);
audioOutStream = new BufferedOutputStream(outFileStream,memoryBufferSize);
}
catch (FileNotFoundException fe){
System.out.println("FileNotFoundException in VoiceCapturing.java :: " + fe);
}
catch (OutOfMemoryError oe){
System.out.println("OutOfMemoryError in VoiceCapturing.java " + oe);
}
while (isStopProceesBtnClk) {
try {
if (!isStopProceesBtnClk) {
break;
} else {
synchronized (this) {
while (isPauseProceesBtnClk) {
try {
wait();
} catch (Exception e) {
e.printStackTrace();
}
}
}
try
{
m_TargetLine.start();
int cnt = m_TargetLine.read(tempBuffer,0,tempBuffer.length);
if(cnt > 0){
audioOutStream.write(tempBuffer,0,cnt);
}
}
catch (Exception e){
System.out.println("Exception in VoiceCapturing.java :: " + e);
}
/*finally{
finish();
}*/
}
} catch (Exception e) {
e.printStackTrace();
}
}
finish();
}
public synchronized void finish()
{
try
{
System.out.println("AudioFinish");
audioOutStream.close();
FileInputStream audioInAgain = new FileInputStream(m_outputFile);
long sampleBytes = m_outputFile.length();
long sizeOfFrame = (long) m_SampleRate * m_Channels / 8;
BufferedInputStream buffAudioIn = new BufferedInputStream(audioInAgain, memoryBufferSize);
AudioInputStream a_input = new AudioInputStream(buffAudioIn, audioFormat, sampleBytes / sizeOfFrame);
while (m_AudioFile.exists() && !m_AudioFile.canWrite())
{
m_AudioFile = BerylsUtility.getNextFile(m_AudioFile);
}
AudioSystem.write(a_input, m_targetType, m_AudioFile);
buffAudioIn.close();
m_outputFile.delete();
}
catch (Exception e)
{
e.printStackTrace();
}
}
有人可以指导我这个...... 感谢。