import 'dart:convert';
import 'dart:developer';
import 'dart:io';
import 'dart:typed_data';

import 'package:dio/dio.dart';
import 'package:flutter/material.dart';
import 'dart:async';

import 'package:flutter_sound/flutter_sound.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';

class CustomDataSource {
  late FlutterSoundRecorder _recorder;
  final StreamController<Food> _stream_ctl = StreamController<Food>();
  CustomDataSource() {
    _recorder = FlutterSoundRecorder();
  }
  String audioFilePath = "";
  start() async* {
    audioFilePath = "${(await getTemporaryDirectory()).absolute.path}/temp.wav";
    await _recorder.openRecorder();
    await _recorder.startRecorder(
        codec: Codec.pcm16,
        sampleRate: 16000,
        numChannels: 1,
        toFile: audioFilePath);
  }

  void stop() async {
    await _recorder.stopRecorder();
    await _recorder.closeRecorder();
  }
}

class RealASRView extends StatefulWidget {
  const RealASRView({super.key});

  @override
  State<RealASRView> createState() => _RealASRViewState();
}

class _RealASRViewState extends State<RealASRView> {
  late final Dio _dio;
  final _statesController = MaterialStatesController();
  void Function()? _btn_onclick;

  late FlutterSoundRecorder _recorder = FlutterSoundRecorder();
  final StreamController<Food> _stream_ctl = StreamController<Food>();
  Duration _slice_time = const Duration(milliseconds: 40);
  String audioFilePath = "";

  @override
  void initState() {
    super.initState();
    _btn_onclick = startRecognize;
  }

  @override
  Widget build(BuildContext context) {
    return MaterialApp(
        home: Scaffold(
      appBar: AppBar(
        title: const Text('实时语音识别示例'),
        leading: BackButton(onPressed: () {
          Navigator.pop(context);
        }),
      ),
      body: SingleChildScrollView(
        padding: const EdgeInsets.fromLTRB(5, 0, 5, 10),
        child: Column(
          crossAxisAlignment: CrossAxisAlignment.start,
          children: [
            Wrap(
              spacing: 10,
              children: [
                GestureDetector(
                  onTapDown: (_) {
                    print("按下");
                    startRecognize();
                  },
                  onTapUp: (_) async {
                    print("松开");
                    // await stopRecognize();
                    FormData formData = FormData.fromMap({
                      "file": await MultipartFile.fromFile(audioFilePath),
                    });
                    Response response = await _dio.post(
                      "http://192.168.1.9:5000/asr",
                      data: formData,
                      options: Options(
                        headers: {
                          'Content-Type': 'multipart/form-data',
                        },
                      ),
                    );
                    print("response======${response}");
                    setState(() {});
                  },
                  child: Container(
                    width: 100,
                    height: 100,
                    color: Colors.blue,
                    child: Center(
                      child: Text("长按识别"),
                    ),
                  ),
                ),
                ElevatedButton(
                    statesController: _statesController,
                    onPressed: _btn_onclick,
                    child: Text(_btn_onclick == null
                        ? "请等待"
                        : _btn_onclick == startRecognize
                            ? "开始识别"
                            : "停止识别")),
              ],
            ),
            const SizedBox(height: 10),
            const ListTile(title: Text('识别结果')),
          ],
        ),
      ),
    ));
  }

  start() async* {
    await [
      Permission.microphone,
      Permission.storage,
    ].request();
    await _recorder.openRecorder();
    await _recorder.startRecorder(
      toFile: audioFilePath,
      codec: Codec.pcm16,
      sampleRate: 16000,
      numChannels: 1
    );
  }

  startRecognize() async {
    Directory directory = await getApplicationDocumentsDirectory();
    audioFilePath = '${directory.path}/audio.wav';
    await start();
  }

  stopRecognize() async {
    setState(() {
      _btn_onclick = null;
    });
  }

  @override
  void dispose() {
    super.dispose();
  }
}
