import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'dart:typed_data';
import 'package:crypto/crypto.dart';
import 'package:flutter/services.dart' show rootBundle;
import 'package:web_socket_channel/web_socket_channel.dart';
import 'package:web_socket_channel/status.dart' as status;

import 'xf_result_entity.dart';

/// 讯飞语音听写（流式版）示例
/// 参考：https://doc.xfyun.cn/rest_api/语音听写（流式版）.html
class XfIatStream {
  /// ==== 讯飞配置 ====
  final String hostUrl = "wss://iat-api.xfyun.cn/v2/iat";
  final String host = "iat-api.xfyun.cn";
  final String appId = "2d2ebcbf";
  final String apiSecret = "MjZkYzhjYzBjMTVmNWNiMGRjZDRkODli";
  final String apiKey = "b5043bdd51f169b9268b48dc7ce5910f";
  final String uri = "/v2/iat";

  /// 帧状态定义
  static const int STATUS_FIRST_FRAME = 0;
  static const int STATUS_CONTINUE_FRAME = 1;
  static const int STATUS_LAST_FRAME = 2;

  /// 当前识别 session id
  String currentSid = "";
  /// WebSocket 连接
  WebSocketChannel? _channel;
  /// 当前帧状态
  int _status = STATUS_FIRST_FRAME;
  /// 保存识别结果
  final List<Result?> _iatResult = [];

  /// 发起识别流程
  Future<void> startRecognizeByAsset(String assetPath) async {
    final date = HttpDate.format(DateTime.now().toUtc());
    final authStr = _getAuthStr(date);
    final wssUrl =
        "$hostUrl?authorization=$authStr&date=$date&host=$host";

    print("Connecting to: $wssUrl");

    _channel = WebSocketChannel.connect(Uri.parse(wssUrl));
    // 监听结果
    _channel!.stream.listen((message) {
      _onMessage(message);
    }, onDone: () {
      print("本次识别 sid：$currentSid");
      print("WebSocket 已关闭");
    }, onError: (err) {
      print("WebSocket 错误: $err");
    });

    // 读取音频文件（从 assets）
    final Uint8List audioBytes = await _loadAssetBytes(assetPath);
    const int highWaterMark = 60 * 1; // 1800字节一帧
    final int total = audioBytes.length;
    int offset = 0;

    print("音频总长度: $total 字节");

    while (offset < total) {
      final end = (offset + highWaterMark > total)
          ? total
          : offset + highWaterMark;
      final chunk = audioBytes.sublist(offset, end);
      await _sendFrame(chunk);
      offset = end;
      await Future.delayed(const Duration(milliseconds: 40)); // 控制速率
    }

    // 发送最后一帧
    _status = STATUS_LAST_FRAME;
    await _sendFrame(Uint8List(0));

    await Future.delayed(const Duration(seconds: 2));
    _channel?.sink.close(status.normalClosure);
  }

  startRecognize(Uint8List? chunk, int s) async {
    print("chunk长度 = ${chunk?.length} s = $s");
    if(s == 0) {
      final date = HttpDate.format(DateTime.now().toUtc());
      final authStr = _getAuthStr(date);
      final wssUrl =
          "$hostUrl?authorization=$authStr&date=$date&host=$host";

      print("Connecting to: $wssUrl");

      _channel = WebSocketChannel.connect(Uri.parse(wssUrl));

      // 监听结果
      _channel!.stream.listen((message) {
        _onMessage(message);
      }, onDone: () {
        print("本次识别 sid：$currentSid");
        print("WebSocket 已关闭");
      }, onError: (err) {
        print("WebSocket 错误: $err");
      });
    }

    if(chunk == null) {
      _status = STATUS_LAST_FRAME;
      await _sendFrame(Uint8List(0));
      await Future.delayed(const Duration(seconds: 2));
      _channel?.sink.close(status.normalClosure);
      return;
    }

    await _sendFrame(chunk);
  }
  Data? resultData;
  /// 处理返回结果
  void _onMessage(String data) {
    final Map<String, dynamic> json = jsonDecode(data);
    XfResultEntity res = XfResultEntity.fromJson(json);
    if(res.code != 0) {
      print("错误：code=${res.code}，message=${res.message}");
      return;
    }
    String str = "";
    for (var item in resultData?.result?.ws ?? []) {
      for (var cw in item.cw ?? []) {
        str += (cw.w ?? "");
      }
    }
    if (res.data?.status == 2) {
      print(json);
      print("识别结果：$str。");
    } else {
      resultData = res.data;
      print("识别过程：$str");
    }
  }

  /// 发送帧
  Future<void> _sendFrame(Uint8List chunk) async {
    Map<String, dynamic> frame = {};

    final dataSection = {
      "status": _status,
      "format": "audio/L16;rate=16000",
      "audio": base64Encode(chunk),
      "encoding": "speex-wb"
    };

    if (_status == STATUS_FIRST_FRAME) {
      frame = {
        "common": {"app_id": appId},
        "business": {
          "language": "zh_cn",
          "domain": "iat",
          "accent": "mandarin",
          "speex_size": 60,
          "dwa": "wpgs" // 动态修正
        },
        "data": dataSection
      };
      _status = STATUS_CONTINUE_FRAME;
    } else {
      frame = {"data": dataSection};
    }
    print("音频帧：${jsonEncode(frame)}");
    _channel?.sink.add(jsonEncode(frame));
  }

  /// 从 Flutter assets 读取音频文件
  Future<Uint8List> _loadAssetBytes(String path) async {
    final ByteData data = await rootBundle.load(path);
    return data.buffer.asUint8List();
  }

  /// 鉴权签名
  String _getAuthStr(String date) {
    final signatureOrigin = "host: $host\ndate: $date\nGET $uri HTTP/1.1";
    final hmacSha = Hmac(sha256, utf8.encode(apiSecret));
    final digest = hmacSha.convert(utf8.encode(signatureOrigin));
    final signature = base64Encode(digest.bytes);
    final authorizationOrigin =
        'api_key="$apiKey", algorithm="hmac-sha256", headers="host date request-line", signature="$signature"';
    final authStr = base64Encode(utf8.encode(authorizationOrigin));
    return authStr;
  }
}
