import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'dart:math';
import 'dart:typed_data';
import 'package:dio/dio.dart';
import 'package:flutter/services.dart' show rootBundle;
import 'package:path_provider/path_provider.dart';
import 'package:speex_demo/sound_demo.dart';
import 'package:speex_demo/speex_ffi.dart';
import 'package:web_socket_channel/io.dart';
import 'package:web_socket_channel/web_socket_channel.dart';
import 'package:web_socket_channel/status.dart' as status;
import 'chat_result_entity.dart';
import 'xf_result_entity.dart';

class DerucciIatStream {
  /// 帧状态定义
  static const int STATUS_FIRST_FRAME = 0;
  static const int STATUS_CONTINUE_FRAME = 1;
  static const int STATUS_LAST_FRAME = 2;

  /// WebSocket 连接
  WebSocketChannel? _channel;
  /// 当前帧状态
  int _status = STATUS_FIRST_FRAME;

  /// 发起识别流程
  Future<void> startRecognizeByAsset(String assetPath) async {
    const wssUrl = "ws://10.16.3.174:8089/speech-chat";

    print("开始连接WebSocket");
    await _channel?.sink.close(status.normalClosure);
    _channel = await createCrossPlatformWebSocket(wssUrl, {
      "X-Client-Type": "APP",
      "X-Token-ID": "7e613cf30a6295940b4d65c67ec1e4a98033b63691235a2a83c535ace2077df56d269a42057e597163c2f783dadc7c01",
      "X-Env": "dev"
    });

    // 监听结果
    _channel!.stream.listen((message) {
      _onMessage(message);
    }, onDone: () {
      print("WebSocket 已关闭");
    }, onError: (err) {
      print("WebSocket 错误: $err");
    });

    // 读取音频文件（从 assets）
    final Uint8List audioBytes = await _loadAssetBytes(assetPath);
    const int highWaterMark = 60 * 1; // 1800字节一帧
    final int total = audioBytes.length;
    int offset = 0;

    print("音频总长度: $total 字节");
    await SoundDemo.instance.stopStreamPlayer();
    await SoundDemo.instance.startStreamPlayer();

    while (offset < total) {
      final end = (offset + highWaterMark > total)
          ? total
          : offset + highWaterMark;
      final chunk = audioBytes.sublist(offset, end);
      await _sendFrame(chunk);
      offset = end;
      await Future.delayed(const Duration(milliseconds: 40)); // 控制速率
    }

    // 发送最后一帧
    _status = STATUS_LAST_FRAME;
    await _sendFrame(Uint8List(0));
  }
  final decoder = SpeexDecoderFFI();
  startRecognize(Uint8List? chunk, int s) async {
    print("chunk长度 = ${chunk?.length} s = $s");
    if(s == 0) {
      const wssUrl = "ws://10.16.3.174:8089/speech-chat";
      print("开始连接WebSocket");
      await _channel?.sink.close(status.normalClosure);
      _channel = await createCrossPlatformWebSocket(wssUrl, {
        "X-Client-Type": "APP",
        "X-Token-ID": "bc73acea9db7ffc4646cd974203c1234632fb90de43169d1b6b182e980debd0ee44c56dfdfe31eeb5e3c15405ab393c0",
        "X-Env": "dev"
      });

      await SoundDemo.instance.stopStreamPlayer();
      await SoundDemo.instance.startStreamPlayer();
      // 监听结果
      _channel!.stream.listen((message) {
        _onMessage(message);
      }, onDone: () {
        print("WebSocket 已关闭");
      }, onError: (err) {
        print("WebSocket 错误: $err");
      });
    }

    if(chunk == null) {
      _status = STATUS_LAST_FRAME;
      await _sendFrame(Uint8List(0));
      return;
    }

    await _sendFrame(chunk);
  }
  Data? resultData;
  final BytesBuilder _audioBuffer = BytesBuilder();

  /// 处理返回结果
  void _onMessage(String data) {
    final Map<String, dynamic> json = jsonDecode(data);
    ChatResultEntity res = ChatResultEntity.fromJson(json);
    print(data);
    if (res.metadata?.responseType == "ASR_TEXT") {
      print("语音识别结果：${res.payload?.text}");
    }
    if (res.metadata?.responseType == "TTS_TEXT") {
      print("响应结果：${res.payload?.text}");
    }
    if (res.metadata?.responseType == "AUDIO") {
      print("语音播放状态：${res.payload?.status}");
      print("语音播放流：${base64.decode(res.payload!.audio!)}");
      Uint8List bytes = base64.decode(res.payload!.audio!);
      int frameSize = 60;
      int offset = 0;
      while (offset + frameSize <= bytes.length) {
        final frame = bytes.sublist(offset, offset + frameSize);
        offset += frameSize;
        final pcmFrame = decoder.decode(frame);
        SoundDemo.instance.feedAudioChunk(pcmFrame);
      }
      _audioBuffer.add(base64.decode(res.payload!.audio!));
      if (res.payload?.status == 2) {
        SoundDemo.instance.stopStreamPlayer();
      }
    }
    if (res.metadata?.responseStatus == "ERROR" && res.metadata?.responseType == "COMPLETE") {
      print(res.payload?.message);
    }
  }

  String _smid = "";
  /// 发送帧
  Future<void> _sendFrame(Uint8List chunk) async {
    Map<String, dynamic> frame = {};
    final dataSection = {
      "status": _status,
      "audio": base64Encode(chunk),
    };

    // 第一个数据包 (首帧)
    if (_status == STATUS_FIRST_FRAME) {
      print("发送音频-首帧");
      _smid = generateFixedLengthNumUuid(32);
      frame = {
        "metadata": {
          "smid": _smid,
          "requestType":"AUDIO",
          "familyId": "7306",
          "roomName": "默认房间",
          "asrEncoding":"speex",
          "ttsEncoding":"speex-org-wb;7"
        },
        "payload": dataSection
      };
      _status = STATUS_CONTINUE_FRAME;
    } else if(_status == STATUS_CONTINUE_FRAME) {
      // 中间数据包
      print("发送音频-中帧");
      frame = {
        "payload": dataSection
      };
    }
    // 最后一个数据包 (尾帧)
    if(_status == STATUS_LAST_FRAME) {
      print("发送音频-尾帧");
      frame = {
        "payload": {
          "status": _status,
        }
      };
    }
    print(frame);
    // 发送音频
    _channel?.sink.add(jsonEncode(frame));
  }

  /// 从 Flutter assets 读取音频文件
  Future<Uint8List> _loadAssetBytes(String path) async {
    final ByteData data = await rootBundle.load(path);
    return data.buffer.asUint8List();
  }


  String generateFixedLengthNumUuid(int length) {
    // 校验长度合法性
    if (length < 1 || length > 32) {
      throw ArgumentError("长度必须在 1-32 之间");
    }

    final random = Random(); // 普通随机数（非加密级）
    final buffer = StringBuffer();

    // 生成 length 个随机数字（0-9）
    for (int i = 0; i < length; i++) {
      buffer.write(random.nextInt(10)); // nextInt(10) 生成 0-9 的整数
    }

    return buffer.toString();
  }


  // 跨平台创建带请求头的 WebSocket 通道
  Future<WebSocketChannel> createCrossPlatformWebSocket(String uri, Map<String, String> headers) async {
    if (Platform.isAndroid || Platform.isIOS || Platform.isMacOS || Platform.isWindows || Platform.isLinux) {
      final webSocket = await WebSocket.connect(uri, headers: headers);
      return IOWebSocketChannel(webSocket);
    } else {
      throw UnsupportedError('当前平台不支持 WebSocket');
    }
  }
}


Future<void> uploadFile(Uint8List data, String fileName) async {
  try {
    // 1. 获取临时目录
    final tempDir = await getTemporaryDirectory();
    final filePath = '${tempDir.path}/$fileName';

    // 2. 将 Uint8List 写入本地文件
    final file = File(filePath);
    await file.writeAsBytes(data);

    // 3. 上传文件
    final dio = Dio();
    final formData = FormData.fromMap({
      'file': await MultipartFile.fromFile(filePath, filename: fileName),
    });

    final response = await dio.post(
      'http://10.34.41.160:8888/upload',
      data: formData,
    );

    print('上传成功: ${response.data}');
  } catch (e) {
    print('上传失败: $e');
  }
}
