我有一个从视频中剥离wav文件的函数,然后使用从wavefile中读取的FileStream。问题是,read函数过早地返回-1(不再需要读取的字节数)。它是一个20分钟的wav文件,但只写了大约17.5分钟。
在这个函数中,我将几个波形文件混合成一个大的文件。问题是大波形文件的文件流阅读器过早结束。我无法弄清楚为什么。
这是我的代码:
public void combineWaveFileData(object status)
{
//ProgressWindow progressWindow = status as ProgressWindow;
ExportProgressWindow exportProgressWindow = status as ExportProgressWindow;
string videoAudioFile = outfileName + ".temp";
int descriptionStartSample = 0;
int highestNumber = 0;
int extraSamplesFromExtendedDescription = 0;
const double VOLUME_VIDEO_FACTOR = 0.8; //controls video volume levels; won't be const as volume control will be added later
const double VOLUME_DESCRIPTION_FACTOR = 1.5; //controls description volume levels
double currentSample = 0; //keeps track of the current sample in the video audio track
byte[] videoAudioBuffer = new byte[4];//new byte[bitsPerSample / 8];
int sample = 0; //holds raw audio data sample
int videoReadStatus = 1;
int descriptionEndSample = 0;
byte[] buffer = new byte[4];
FileStream tempStream;
videoAudioStream = new FileStream(videoAudioFile, FileMode.Open, FileAccess.ReadWrite);
videoAudioStream.Seek(DATA_START_POS - 4, 0);
videoAudioStream.Read(buffer, 0, buffer.Length);
totalRawSoundDataBits = BitConverter.ToInt32(buffer, 0);
//totalRawSoundDataBits = videoAudioStream.Length;
videoAudioStream.Seek(24, 0);
videoAudioStream.Read(buffer, 0, 4);
int videoSampleRate = BitConverter.ToInt32(buffer, 0);
sampleRateHz = videoSampleRate;
outFileStream = new FileStream(outfileName, FileMode.Create, FileAccess.ReadWrite);
videoAudioStream.Seek(0, 0);
writeAudioFileHeader(outFileStream, videoAudioStream);
// videoAudioStream.Seek(0, 0); //reset video audio position to 0 (beginning)
convertStatus = false;
if (compatibilityIssue)
{
exportProgressWindow.Close();
return;
}
//calculate total length of extended description files
foreach (Description description in descriptionList)
{
if (description.IsExtendedDescription)
{
tempStream = new FileStream(description.getFilename(), FileMode.Open);
totalRawSoundDataBits += tempStream.Length - DATA_START_POS;
try
{
tempStream.Close();
tempStream.Dispose();
tempStream = null;
}
catch { }
}
WaveReader read = new WaveReader(File.OpenRead(description.getFilename()));
IntPtr oldFormat = read.ReadFormat();
WaveFormat waveformat = AudioCompressionManager.GetWaveFormat(oldFormat);
int descriptionSampleRateHz = waveformat.nSamplesPerSec;
read.Close();
string resampledFilename = description.getFilename();
if (descriptionSampleRateHz != sampleRateHz)
{
exportProgressWindow.SetText(".");
resampledFilename = convertSampleRate(description.getFilename(), sampleRateHz);
description.setFilename(resampledFilename);
}
}
for (int i = 0; i < descriptionList.Count; i++)
{
for (int j = 0; j < descriptionList.Count; j++)
{
Description tempDescription;
if (((Description)descriptionList[i]).getStart() < ((Description)descriptionList[j]).getStart())
{
tempDescription = (Description)descriptionList[j];
descriptionList[j] = descriptionList[i];
descriptionList[i] = tempDescription;
}
}
}
int k = 0;
while (videoReadStatus > 0)
{
try
{
Description description;
description = (Description)descriptionList[k];
descriptionStartSample = (int)Math.Truncate(sampleRateHz * description.getStart());
descriptionEndSample = (int)Math.Truncate(sampleRateHz * description.getEnd());
if (videoAudioStream.Position / 4 > descriptionStartSample )
{
double currentTime = videoAudioStream.Position / 4 / sampleRateHz;
Console.WriteLine(currentTime+ " " + description.getStart() + " " + description.getEnd());
if (k < descriptionList.Count - 1)
{
k++;
}
double percentage = Convert.ToDouble(k) / Convert.ToDouble(descriptionList.Count) * 100.0;
try
{
exportProgressWindow.Increment(percentage);
}
catch (Exception ex)
{
return;
}
buffer = new byte[4];
tempStream = new FileStream(description.getFilename(), FileMode.Open);
try
{
tempStream.Seek(44, 0); //to search for position 34; write: use writeSample()
int tempReadStatus = 1;
while (tempReadStatus > 0 && videoReadStatus > 0)//(currentSample < descriptionEndSample)
{
//If description isn't an extended description then mix the description with the video audio
if (!description.IsExtendedDescription)
{
videoReadStatus = videoAudioStream.Read(videoAudioBuffer, 0, 2);
tempReadStatus = tempStream.Read(buffer, 0, 2);
if (videoReadStatus == 0)
{
Console.WriteLine(currentTime);
int debug = 0;
}
if (tempReadStatus <= 0 || videoReadStatus <=0)
{
int deleteme = 0;
}
sample += (int)(((BitConverter.ToInt16(buffer, 0))* VOLUME_DESCRIPTION_FACTOR + (BitConverter.ToInt16(videoAudioBuffer, 0) * VOLUME_VIDEO_FACTOR)) / 2);
writeSample(sample);
sample = 0;
}
else
// If description is extended then only write the description samples
{
int tempStatus = 1;
while (tempReadStatus > 0)
{
tempReadStatus = tempStream.Read(buffer, 0, 2);
sample = (int)((BitConverter.ToInt16(buffer, 0)));// -((sample * (int)(BitConverter.ToInt16(buffer, 0))) / 65535); //Z = A+B-AB/65535 http://www.vttoth.com/CMS/index.php/technical-notes/68 //* VOLUME_DESCRIPTION_FACTOR);
writeSample(sample);
sample = 0;
}
break;
}
}
}
catch (Exception ex)
{
Console.WriteLine("Debug 1: " + ex.Message);//MessageBox.Show(ex.Message);
}
finally
{
tempStream.Close();
tempStream.Dispose();
tempStream = null;
}
}
else
{
try
{
videoReadStatus = videoAudioStream.Read(videoAudioBuffer, 0, 2);
sample += (int)((BitConverter.ToInt16(videoAudioBuffer, 0)) * VOLUME_VIDEO_FACTOR) ;
if (videoReadStatus == 0)
{
int debug = 0;
}
writeSample(sample);
sample = 0;
convertStatus = true;
}
catch (Exception ex)
{
int test = 0;
}
}
}
catch (Exception ex)
{
MessageBox.Show(ex.GetBaseException().ToString());
}
}
exportProgressWindow.SetText("\n\nLiveDescribe has successfully exported the file.");
try
{
closeStreams();
Control.CheckForIllegalCrossThreadCalls = false;
}
catch (Exception ex)
{
}
}