使用AudioGraph UWP创建音频频谱

时间:2019-04-27 18:38:31

标签: c# audio uwp spectrum

我想创建一个音频频谱(一种具有低,中和高频的频谱。经典的频谱是x轴代表频率,y轴代表音量,由竖线组成)

我已经得出两个通道的中间音量,以百分比条表示。

MainPage.xaml:

<Grid>
    <StackPanel Orientation="Vertical" HorizontalAlignment="Center" VerticalAlignment="Center" Margin="0,0,0,250">
        <Button x:Name="BtnFile" Width="150" Height="32" Content="Choose File" Click="BtnFile_Click" HorizontalAlignment="Center"/>
        <Button x:Name="BtnStartStop" Width="150" Height="32" Content="Start Graph" Margin="0,25,0,0" Click="BtnStartStop_Click" HorizontalAlignment="Center"/>
        <ProgressBar x:Name="ProgressVolume" Margin="0,25,0,0" Maximum="1" Width="350" Height="10"/>
    </StackPanel>
</Grid>

MainPage.xaml.cs:

private AudioGraph graph;
private AudioFileInputNode fileInput;
private AudioDeviceOutputNode deviceOutput;
private AudioFrameOutputNode frameOutputNode;

List<double> VolumeList = new List<double>();

DispatcherTimer VolumeTimer = new DispatcherTimer();

public MainPage()
{
    this.InitializeComponent();
    VolumeTimer.Interval = TimeSpan.FromMilliseconds(10);
    VolumeTimer.Tick += TimerVolume_Tick;
}

protected override async void OnNavigatedTo(NavigationEventArgs e)
{
    await CreateAudioGraph();
}

protected override void OnNavigatedFrom(NavigationEventArgs e)
{
    if (graph != null)
    {
        graph.Dispose();
    }
}

private async void BtnFile_Click(object sender, RoutedEventArgs e)
{
    if (fileInput != null)
    {
        fileInput.Dispose();
        if (BtnStartStop.Content.Equals("Stop Graph"))
        {
            TogglePlay();
        }
    }

    FileOpenPicker filePicker = new FileOpenPicker();
    filePicker.SuggestedStartLocation = PickerLocationId.MusicLibrary;
    filePicker.FileTypeFilter.Add(".mp3");
    filePicker.FileTypeFilter.Add(".wav");
    filePicker.FileTypeFilter.Add(".wma");
    filePicker.FileTypeFilter.Add(".m4a");
    filePicker.ViewMode = PickerViewMode.Thumbnail;
    StorageFile file = await filePicker.PickSingleFileAsync();

    if (file == null)
    {
        return;
    }

    CreateAudioFileInputNodeResult fileInputResult = await graph.CreateFileInputNodeAsync(file);
    if (AudioFileNodeCreationStatus.Success != fileInputResult.Status)
    {
        return;
    }

    fileInput = fileInputResult.FileInputNode;

    fileInput.AddOutgoingConnection(deviceOutput);
}

private void BtnStartStop_Click(object sender, RoutedEventArgs e)
{
     TogglePlay();
}

private void TogglePlay()
{
    if (BtnStartStop.Content.Equals("Start Graph"))
    {
        GraphStarting();
        BtnStartStop.Content = "Stop Graph";
        VolumeTimer.Start();
    }
    else
    {
        graph.Stop();
        VolumeTimer.Stop();
        BtnStartStop.Content = "Start Graph";
    }
}

private void GraphStarting()
{
    frameOutputNode = graph.CreateFrameOutputNode();
    fileInput.AddOutgoingConnection(frameOutputNode);
    graph.Start();
}

private void AudioGraph_QuantumProcessed(AudioGraph sender, object args)
{

    AudioFrame frame = frameOutputNode.GetFrame();
    ProcessFrameOutput(frame);
}

unsafe private void ProcessFrameOutput(AudioFrame frame)
{
    using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
    using (IMemoryBufferReference reference = buffer.CreateReference())
    {
        byte* dataInBytes;
        uint capacityInBytes;
        float* dataInFloat;

        // Get the buffer from the AudioFrame
        ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

        dataInFloat = (float*)dataInBytes;

        double LeftValue = dataInFloat[0];
        double RightValue = dataInFloat[1];

        double MidValue = (Math.Abs(LeftValue) + Math.Abs(RightValue)) / 2;

        VolumeList.Add(MidValue);
    }
}

private void TimerVolume_Tick(object sender, object e)
{
    if (VolumeList.Count > 0)
    {
        ProgressVolume.Value = VolumeList[VolumeList.Count - 1];
    }
}

[ComImport]
[Guid("5B0D3235-4DBA-4D44-865E-8F1D0E4FD04D")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
unsafe interface IMemoryBufferByteAccess
{
    void GetBuffer(out byte* buffer, out uint capacity);
}

private async Task CreateAudioGraph()
{
    // Create an AudioGraph with default settings
        AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
    CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

    if (result.Status != AudioGraphCreationStatus.Success)
    {
        return;
    }

    graph = result.Graph;

    // Create a device output node
    CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

    if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
    {
        return;
    }

    deviceOutput = deviceOutputNodeResult.DeviceOutputNode;

    graph.QuantumProcessed += AudioGraph_QuantumProcessed;
}

在此post中,我读到要做到这一点,可以使用FFT算法,在这里我发现了project与算法(FastFourierTransformation.cs),但是我无法使用它从我拥有的唯一数据开始,即所谓的“ MidValue”。

如何从“ MidValue”开始创建光谱?

(使用计时器设置音量值是正确的还是其他方式更正确?)

永远谢谢!

0 个答案:

没有答案