我正在尝试编写一个可以稍微延迟播放音频麦克风的应用。我的问题是它太长了。我不是手机上的音频流最好的,我在这里使用了很多回收代码,但延迟感觉就像1秒,理想情况下我希望它在100毫秒到200毫秒之间。任何建议表示赞赏
public class MicrophoneSource : MediaStreamSource
{
// Standard constants
const int ChannelCount = 1;
const int BitsPerSample = 16;
const int BufferSamples = 2038; // can be changed
const int BufferSize = ChannelCount * BufferSamples * BitsPerSample / 8;
Dictionary<MediaSampleAttributeKeys, string> mediaSampleAttributes;
MediaStreamDescription mediaStreamDescription;
MemoryStream memoryStream;
// The rate at which we sample the microphone in ms
private int SAMPLE_RATE = 100;
// Object representing the physical microphone on the device
private Microphone microphone = Microphone.Default;
// the buffer that we'll be using to copy samples over with
private byte[] buffer;
public void setBufferRate(int newRate)
{
SAMPLE_RATE = newRate;
}
public MicrophoneSource()
{
// Create empty mediaSampleAttributes dictionary for OpenReadAsync
mediaSampleAttributes = new Dictionary<MediaSampleAttributeKeys, string>();
// Create re-usable MemoryStream for accumulating audio samples
memoryStream = new MemoryStream();
}
private void Microphone_initialization()
{
// Event handler for getting audio data when the buffer is full
microphone.BufferReady += new EventHandler<EventArgs>(microphone_BufferReady);
// set the sample rate
microphone.BufferDuration = TimeSpan.FromMilliseconds(SAMPLE_RATE);
// Allocate memory to hold the audio data
buffer = new byte[microphone.GetSampleSizeInBytes(microphone.BufferDuration)];
}
protected override void OpenMediaAsync()
{
int byteRate = microphone.SampleRate * ChannelCount * BitsPerSample / 8;
short blockAlign = (short)(ChannelCount * (BitsPerSample / 8));
// Build string-based wave-format structure
string waveFormat = "";
waveFormat += ToLittleEndianString(string.Format("{0:X4}", 1)); // indicates PCM
waveFormat += ToLittleEndianString(string.Format("{0:X4}", ChannelCount));
waveFormat += ToLittleEndianString(string.Format("{0:X8}", microphone.SampleRate));
waveFormat += ToLittleEndianString(string.Format("{0:X8}", byteRate));
waveFormat += ToLittleEndianString(string.Format("{0:X4}", blockAlign));
waveFormat += ToLittleEndianString(string.Format("{0:X4}", BitsPerSample));
waveFormat += ToLittleEndianString(string.Format("{0:X4}", 0));
// Put wave format string in media streams dictionary
var mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = waveFormat;
// Make description to add to available streams list
var availableMediaStreams = new List<MediaStreamDescription>();
mediaStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);
availableMediaStreams.Add(mediaStreamDescription);
// Set some appropriate keys in the media source dictionary
var mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = "0";
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "false";
// Initialize the microphone completely
Microphone_initialization();
// Signal that the open operation is completed
ReportOpenMediaCompleted(mediaSourceAttributes, availableMediaStreams);
}
// For building string-based wave-format structure
string ToLittleEndianString(string bigEndianString)
{
StringBuilder strBuilder = new StringBuilder();
for (int i = 0; i < bigEndianString.Length; i += 2)
strBuilder.Insert(0, bigEndianString.Substring(i, 2));
return strBuilder.ToString();
}
// Provides audio samples from AudioSampleProvider property.
// (MediaStreamType parameter will always equal Audio.)
protected override void GetSampleAsync(MediaStreamType mediaStreamType)
{
// start the microphone capture if it's not started yet
if (microphone.State == MicrophoneState.Stopped)
{
microphone.Start();
}
}
// gets called back when the microphone's buffer is ready
private void microphone_BufferReady(object sender, EventArgs e)
{
// Retrieve audio data
microphone.GetData(buffer);
// Reset MemoryStream object
memoryStream.Seek(0, SeekOrigin.Begin);
// Write the newly acquired data into the memory stream
memoryStream.Write(buffer, 0, buffer.Length);
// Send out the sample
ReportGetSampleCompleted(new MediaStreamSample(mediaStreamDescription,
memoryStream,
0,
buffer.Length,
0, // NOTE: Can this be set to 0?
mediaSampleAttributes));
}
protected override void SeekAsync(long seekToTime)
{
// is this valid? no seek calls are made so it should be okay
ReportSeekCompleted(seekToTime);
}
protected override void CloseMedia()
{
if (microphone.State == MicrophoneState.Started)
{
microphone.Stop();
}
microphone.BufferReady -= new EventHandler<EventArgs>(microphone_BufferReady);
}
// Shouldn't get a call here because only one MediaStreamDescription is supported
protected override void SwitchMediaStreamAsync(MediaStreamDescription mediaStreamDescription)
{
throw new NotImplementedException();
}
protected override void GetDiagnosticAsync(MediaStreamSourceDiagnosticKind diagnosticKind)
{
throw new NotImplementedException();
}
}