import 'package:flutter/material.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:jump_scape/js_data/js_box/js_init.dart';
import 'package:jump_scape/js_ui/js_widget/js_image_view.dart';
import 'package:jump_scape/js_ui/js_widget/js_style.dart';
import 'package:jump_scape/js_util/js_audio_recorder.dart';
import 'package:open_settings/open_settings.dart';

import '../../objectbox.g.dart';

jsShowRequiresMicrophoneDialog(BuildContext context) {
  showDialog(
    context: context,
    barrierDismissible: false,
    useSafeArea: false,
    barrierColor: Colors.black.withOpacity(0.55),
    builder: ((context) {
      return Column(
        mainAxisAlignment: MainAxisAlignment.center,
        children: [
          Container(
            width: 280.r,
            decoration: jsShapeRound(radius: 12.r, color: Colors.white, shadows: [
              BoxShadow(offset: Offset(2.r, 4.r), blurRadius: 4.r, color: Colors.black.withOpacity(0.2)),
            ]),
            child: Dialog(
              alignment: Alignment.center,
              backgroundColor: Colors.transparent,
              insetPadding: EdgeInsets.zero,
              child: Container(
                width: 280.r,
                decoration: jsShapeRound(radius: 12.r, color: Colors.white, shadows: [
                  BoxShadow(offset: Offset(2.r, 4.r), blurRadius: 4.r, color: Colors.black.withOpacity(0.2)),
                ]),
                child: Column(
                  crossAxisAlignment: CrossAxisAlignment.start,
                  children: [
                    Padding(
                      padding: EdgeInsets.fromLTRB(30.r, 25.r, 30.r, 18.r),
                      child: Text('"JumpScape" requires your microphone to record personal introductions.', textAlign: TextAlign.center, style: jsTextStyle(font: JsTextFont.PoetsenOne, fontColor: Color(0xFF050A32), fontSize: 18.r, height: 28.0 / 18)),
                    ),
                    Container(width: double.infinity, height: 0.5.r, color: Color(0xFFCCCCCC)),
                    Row(
                      children: [
                        Expanded(
                          child: InkWell(
                            child: Container(
                              height: 51.r,
                              alignment: Alignment.center,
                              child: Text('No', style: jsTextStyle(font: JsTextFont.PoetsenOne, fontSize: 18.r, fontColor: Color(0xFF9A9A9A))),
                            ),
                            onTap: () {
                              if (Navigator.canPop(context)) {
                                Navigator.pop(context);
                              }
                            },
                          ),
                        ),
                        Container(width: 0.5.r, height: 51.r, color: Color(0xFFCCCCCC)),
                        Expanded(
                          child: InkWell(
                            child: Container(
                              height: 51.r,
                              alignment: Alignment.center,
                              child: Text('Yes', style: jsTextStyle(font: JsTextFont.PoetsenOne, fontSize: 18.r, fontColor: Color(0xFF050A32))),
                            ),
                            onTap: () {
                              if (Navigator.canPop(context)) {
                                Navigator.pop(context);
                              }
                              OpenSettings.openAppSetting();
                            },
                          ),
                        ),
                      ],
                    ),
                  ],
                ),
              ),
            ),
          )
        ],
      );
    }),
  );
}

jsShowAudioDialog(BuildContext context) {
  showDialog(
    context: context,
    barrierDismissible: true,
    useSafeArea: false,
    barrierColor: Colors.black.withOpacity(0.55),
    builder: ((context) {
      return Column(
        mainAxisAlignment: MainAxisAlignment.end,
        children: [
          Dialog(
            alignment: Alignment.bottomCenter,
            backgroundColor: Colors.transparent,
            insetPadding: EdgeInsets.zero,
            child: SizedBox(
              width: 375.r,
              height: 375.r,
              child: JsAudio(),
            ),
          )
        ],
      );
    }),
  );
}

class JsAudio extends StatefulWidget {
  const JsAudio({super.key});

  @override
  State<StatefulWidget> createState() => _JsAudio();
}

class _JsAudio extends State<JsAudio> {
  var _isStartDrag = false;
  var _bottomAudioHeight = 150.r;
  var _audioMoveCancelY = 0.0;
  var _isCancelAudioSend = false;

  var isCancelAudioState = false;
  var isStartAudio = false;

  @override
  void initState() {
    super.initState();
    _bottomAudioHeight = ScreenUtil().screenWidth * (258.0 / 375.0);
    _audioMoveCancelY = ScreenUtil().screenHeight - _bottomAudioHeight;
  }

  @override
  Widget build(BuildContext context) {
    return Column(
      mainAxisAlignment: MainAxisAlignment.end,
      children: [
        Spacer(),
        Text(
          !isStartAudio
              ? 'Press Record'
              : isCancelAudioState
                  ? 'Release Cancel'
                  : 'Release Send',
          style: jsTextStyle(font: JsTextFont.October_Devanagari_Medium, fontSize: 16.r, fontColor: Colors.white),
        ),
        SizedBox(height: 28.r),
        Stack(
          children: [
            jsImage('assets/image/ic_audio_bg.png', width: double.infinity, fit: BoxFit.fitWidth),
            Align(
              alignment: Alignment.topCenter,
              child: jsImage(
                  !isStartAudio
                      ? 'assets/image/ic_audio_s.png'
                      : isCancelAudioState
                          ? 'assets/image/ic_audio.png'
                          : 'assets/svga/ic_audio.svga',
                  height: 50.r,
                  fit: BoxFit.fitHeight,
                  padding: EdgeInsets.only(top: 37.r)),
            ),
            Align(
              alignment: Alignment.topCenter,
              child: GestureDetector(
                onVerticalDragDown: (details) {
                  if (!_isStartDrag) {
                    _isStartDrag = true;

                    // debugPrint("[拖拽] 开始");
                    onAudioStart();
                  }
                },
                onVerticalDragStart: (details) {
                  if (!_isStartDrag) {
                    _isStartDrag = true;

                    // debugPrint("[拖拽] 开始");
                    onAudioStart();
                  }
                },
                onVerticalDragUpdate: (details) {
                  if (_isStartDrag) {
                    final y = details.globalPosition.dy;
                    final isCancel = y < _audioMoveCancelY;

                    if (isCancel != _isCancelAudioSend) {
                      _isCancelAudioSend = isCancel;
                      // debugPrint("[拖拽] 状态更新 audioMoveCancelY=$_audioMoveCancelY, y=$y, isCancel=$_isCancelAudioSend");
                      onAudioUpdate(_isCancelAudioSend);
                    }
                  }
                },
                onVerticalDragCancel: () {
                  if (_isStartDrag) {
                    _isStartDrag = false;
                    // debugPrint("[拖拽] 取消");
                    onAudioEnd(_isCancelAudioSend);
                  }
                },
                onVerticalDragEnd: (details) {
                  if (_isStartDrag) {
                    _isStartDrag = false;
                    // debugPrint("[拖拽] 结束 isCancel=$_isCancelAudioSend");
                    onAudioEnd(_isCancelAudioSend);
                  }
                },
                child: Container(
                  margin: EdgeInsets.only(top: 118.r),
                  clipBehavior: Clip.antiAlias,
                  decoration: jsShapeRound(radius: 40.r, shadows: [
                    BoxShadow(offset: Offset(1.r, 1.r), blurRadius: 4.r, color: Color(0xFFC9C9C9)),
                  ]),
                  child: jsImage(isStartAudio && isCancelAudioState ? 'assets/image/ic_audio_btn.png' : 'assets/image/ic_audio_btn_s.png', width: 80.r, fit: BoxFit.fitWidth),
                ),
              ),
            ),
          ],
        ),
      ],
    );
  }

  void onAudioStart() {
    isCancelAudioState = false;
    isStartAudio = true;
    setState(() {});

    JsAudioRecorder().startRecording(context, (data) async {
      if (data.isNotEmpty) {
        final setting = await JsObjectBox.setting;
        final member = JsObjectBox.memberBox.query(MemberBox_.userId.equals(setting.userId)).build().findFirst();
        if (member != null) {
          member.audioPath = data[0];
          JsObjectBox.memberBox.put(member);

          debugPrint("[member] ✅ $member");
        }

        if (mounted) {
          if (Navigator.canPop(context)) {
            Navigator.pop(context);
          }
        }
      } else {
        isStartAudio = false;
        setState(() {});
      }
    });
  }

  void onAudioUpdate(bool isCancel) {
    isCancelAudioState = isCancel;
    setState(() {});
  }

  void onAudioEnd(bool isCancel) {
    if (isCancel) {
      JsAudioRecorder().cancelRecording();
      if (Navigator.canPop(context)) {
        Navigator.pop(context);
      }
    } else {
      JsAudioRecorder().endRecording();
    }
  }
}
