﻿using NAudio.CoreAudioApi;
using NAudio.Wave;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace FunasrSTT.Translate.Services;

/// <summary>
/// 音频采集类
/// </summary>
public class WaveCollectService
{
    private string fileName = string.Empty;
    private WaveInEvent waveSource = null; //实例化音频采集对象
    private WaveFileWriter waveFile = null; //实例化音频写入对象
    public int wave_buffer_milliseconds = 600; //音频缓冲区大小，可以容纳600ms音频数据
    public int wave_buffer_collectbits = 16; //音频采样位深度16bits
    public int wave_buffer_collectchannels = 1; //音频采集通道数，1为单通道 2为立体声
    public int wave_buffer_collectfrequency = 16000; //音频采样率16000 Hz
    public ConcurrentQueue<byte[]> voicebuff = new ConcurrentQueue<byte[]>(); //音频缓存队列

    /// <summary>
    /// 开始录音函数，获取录音设备、采样率等信息
    /// </summary>
    public void StartRec()
    {
        // 先释放之前的资源，防止重复使用同一文件路径
        if (waveSource != null || waveFile != null)
        {
            StopRec();
        }
        // 获取麦克风设备
        var captureDevices = new MMDeviceEnumerator().EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active);
        foreach (var device in captureDevices)
        {
            Console.WriteLine("Device Name: " + device.FriendlyName);
            using (var capture = new WasapiLoopbackCapture(device))
            {
                // 获取支持的采样率列表
                Console.WriteLine("Device Channels:" + capture.WaveFormat.Channels);
                Console.WriteLine("Device SampleRate:" + capture.WaveFormat.SampleRate);
                Console.WriteLine("Device BitsPerSample:" + capture.WaveFormat.BitsPerSample);
            }
        }
        //清空缓存数据
        int buffnum = voicebuff.Count;
        for (int i = 0; i < buffnum; i++)
        {
            voicebuff.TryDequeue(out byte[] buff);
        }
        waveSource = new WaveInEvent();
        waveSource.BufferMilliseconds = wave_buffer_milliseconds;
        waveSource.WaveFormat = new WaveFormat(wave_buffer_collectfrequency, wave_buffer_collectbits, wave_buffer_collectchannels); // 16bit,16KHz,Mono的录音格式
        waveSource.DataAvailable += new EventHandler<WaveInEventArgs>(WaveSource_DataAvailable);
        SetFileName(AppDomain.CurrentDomain.BaseDirectory + "tmp.wav");
        waveFile = new WaveFileWriter(fileName, waveSource.WaveFormat);
        waveSource.StartRecording();
    }

    /// <summary>
    /// 结束录音函数
    /// </summary>
    public void StopRec()
    {
        if (waveSource != null)
        {
            waveSource.StopRecording();
            if (waveSource != null)
            {
                waveSource.Dispose();
                waveSource = null;
            }
            if (waveFile != null)
            {
                waveFile.Dispose();
                waveFile = null;
            }
        }
    }

    /// <summary>
    /// 将参数赋值给全局变量fileName，存储完整的文件路径
    /// </summary>
    /// <param name="fileName"></param>
    public void SetFileName(string fileName)
    {
        this.fileName = fileName;
    }

    /// <summary>
    /// 在麦克风采集到音频数据时触发，将采集的音频写入到音频队列
    /// </summary>
    /// <param name="sender"></param>
    /// <param name="e"></param>
    private void WaveSource_DataAvailable(object sender, WaveInEventArgs e)
    {
        if (waveFile != null)
        {
            if (e.Buffer != null && e.BytesRecorded > 0)
            {
                voicebuff.Enqueue(e.Buffer);
                waveFile.Write(e.Buffer, 0, e.BytesRecorded);
                waveFile.Flush();
            }

        }
    }

    /// <summary>
    /// 从音频队列中取出一段音频，返回byte数组
    /// </summary>
    /// <returns></returns>
    public byte[] Wavedata_Dequeue()
    {
        byte[] datas;
        voicebuff.TryDequeue(out datas);
        return datas;
    }
}
