Xamarin.Android。 OPUS(共识)。录制过程播放声音

时间:2018-08-02 16:02:06

标签: c# xamarin audio audio-recording opus

我尝试从麦克风中获取声音,使用OPUS编解码器(使用Consentus)对其进行编码,然后进行解码和播放,这样我就可以实时听到我在说什么。我有两个问题:

  1. 起初,我尝试只录制和播放声音,而不进行编码解码。但是我听到回声。声音在耳机中,所以这与设备播放录制的声音,听到声音,再次播放,再次听到声音等情况无关。我想我对缓冲区做错了。你能告诉我如何使用麦克风吗?我在做什么错了?

  2. 在解码编码后的片段后,我得到了错误的数据。同样的问题-我在做什么错了?

代码:

public interface IAudioService
{
    void Start();
    void Stop();
    bool IsRunning { get; }
}

public class TestAudioService : IAudioService
{
    private static readonly int _sampleRate = 24000;
    private static readonly int FrameSize = _sampleRate * 20 / 1000;

    private readonly OpusEncoder _encoder;
    private readonly OpusDecoder _decoder;
    private AudioRecord _recorder;
    private AudioTrack _audio;

    public bool IsRunning { get; private set; }

    public TestAudioService()
    {
        _encoder = new OpusEncoder( _sampleRate, 1, Opuspplication.OPUS_APPLICATION_VOIP )
        {
            Bitrate = 16 * 1024,
            SignalType = OpusSignal.OPUS_SIGNAL_VOICE,
        };

        _decoder = new OpusDecoder( _sampleRate, 1 );
    }

    public void Start()
    {
        Task.Run( (Action) Running );
    }

    public void Stop()
    {
        IsRunning = false;
    }

    private void Running()
    {
        int recorderBufferSize = 4 * AudioRecord.GetMinBufferSize( _sampleRate, ChannelIn.Mono, Encoding.Pcm16bit );
        _recorder = new AudioRecord( AudioSource.Mic, _sampleRate, ChannelIn.Mono, Encoding.Pcm16bit, recorderBufferSize );
        _recorder.StartRecording();

        int audioBufferSize = AudioTrack.GetMinBufferSize( _sampleRate, ChannelOut.Mono, Encoding.Pcm16bit );
        _audio = new AudioTrack( Stream.Music, _sampleRate, ChannelOut.Mono, Encoding.Pcm16bit, audioBufferSize, AudioTrackMode.Stream );
        _audio.Play();

        IsRunning = true;
        while ( IsRunning )
        {
            try
            {
                var audioBuffer = new short[2*recorderBufferSize];
                int size = _recorder.Read( audioBuffer, 0, audioBuffer.Length );

                ProcessVoice( audioBuffer.Take( size ).ToArray(), false );
            }
            catch ( Exception ex )
            {
                Console.Out.WriteLine( ex.Message );
            }
        }

        _recorder.Stop();
        _recorder.Release();
        _audio.Stop();
        _audio.Dispose();
    }

    private short[] _notEncodedBuffer = new short[0];

    private void ProcessVoice( short[] sample, bool useOpus )
    {
        if ( !useOpus )
        {
            _audio.Write( sample, 0, sample.Length );
            return;
        }

        var compressedSound = Encode( sample );
        var decompressedSound = Decode( compressedSound );
        _audio.Write( decompressedSound, 0, decompressedSound.Length );
    }

    private byte[] Encode( short[] sample )
    {
        byte[] outputBuffer = new byte[10000];

        int size = _encoder.Encode( sample, 0, FrameSize, outputBuffer, 0, outputBuffer.Length );
        return outputBuffer.Take( size ).ToArray();
    }

    private short[] Decode( byte[] compressedPacket )
    {
        short[] outputBuffer = new short[10000];

        int size = _decoder.Decode( compressedPacket, 0, compressedPacket.Length, outputBuffer, 0, FrameSize );
        return outputBuffer.Take( size ).ToArray();
    }
}

0 个答案:

没有答案