/*
 * Copyright 2023 The TensorFlow Authors. All Rights Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *             http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package com.example.hanguangfacsemoanaapp
import android.R.string
import android.annotation.SuppressLint
import android.content.ContentValues
import android.content.Context
import android.content.SharedPreferences
import android.content.res.Configuration
import android.database.Cursor
import android.graphics.Bitmap
import android.net.Uri
import android.os.Build
import android.os.Bundle
import android.os.Environment
import android.provider.DocumentsContract
import android.provider.MediaStore
import android.util.Log
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.webkit.WebSettings
import android.webkit.WebView
import android.webkit.WebViewClient
import android.widget.ArrayAdapter
import android.widget.Button
import android.widget.ImageView
import android.widget.ListView
import android.widget.Toast
import androidx.appcompat.app.AlertDialog
import androidx.appcompat.app.AppCompatActivity.MODE_PRIVATE
import androidx.camera.core.AspectRatio
import androidx.camera.core.Camera
import androidx.camera.core.CameraSelector
import androidx.camera.core.ImageAnalysis
import androidx.camera.core.ImageProxy
import androidx.camera.core.Preview
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.camera.video.MediaStoreOutputOptions
import androidx.camera.video.Quality
import androidx.camera.video.QualitySelector
import androidx.camera.video.Recorder
import androidx.camera.video.Recording
import androidx.camera.video.VideoCapture
import androidx.camera.video.VideoRecordEvent
import androidx.core.content.ContextCompat
import androidx.core.content.PermissionChecker
import androidx.fragment.app.Fragment
import androidx.fragment.app.activityViewModels
import androidx.navigation.Navigation
import androidx.recyclerview.widget.LinearLayoutManager
import androidx.viewpager2.widget.ViewPager2.SCROLL_STATE_DRAGGING
import com.example.hanguangfacsemoanaapp.databinding.FragmentCameraBinding
import com.google.mediapipe.examples.facelandmarker.FaceLandmarkerHelper
import com.google.mediapipe.examples.facelandmarker.MainViewModel
import com.google.mediapipe.tasks.vision.core.RunningMode
import com.google.zxing.BarcodeFormat
import com.google.zxing.EncodeHintType
import com.google.zxing.MultiFormatWriter
import com.google.zxing.WriterException
import com.journeyapps.barcodescanner.BarcodeEncoder
import okhttp3.Call
import okhttp3.Callback
import okhttp3.MediaType.Companion.toMediaTypeOrNull
import okhttp3.MultipartBody
import okhttp3.OkHttpClient
import okhttp3.Request
import okhttp3.RequestBody
import okhttp3.Response
import okio.IOException
import java.io.File
import java.text.SimpleDateFormat
import java.util.Hashtable
import java.util.Locale
import java.util.UUID
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit


class CameraFragment : Fragment(), FaceLandmarkerHelper.LandmarkerListener {

    private var _fragmentCameraBinding: FragmentCameraBinding? = null

    private val fragmentCameraBinding
        get() = _fragmentCameraBinding!!

    private lateinit var faceLandmarkerHelper: FaceLandmarkerHelper
    private val viewModel: MainViewModel by activityViewModels()
    private val faceBlendshapesResultAdapter by lazy {
        FaceBlendshapesResultAdapter()
    }
    private var videoFileName=""
    private var preview: Preview? = null
    private var imageAnalyzer: ImageAnalysis? = null
    private var camera: Camera? = null
    private var cameraProvider: ProcessCameraProvider? = null
    private var cameraFacing = CameraSelector.LENS_FACING_FRONT
    private var serverHost = "facs.healhub.cn:8000"
    private var uploadUrl: String = "http://$serverHost/api/faceAna/uploadFiles"
    private var reportURL = "http://$serverHost/reportQN" // 替换为你的URL
    private lateinit var  videoUri: Uri
    private var captureState: String = "close" //未录像
    private var scannedQRCodeResult:String = ""
    /** Blocking ML operations are performed using this executor */
    private lateinit var backgroundExecutor: ExecutorService
    companion object {
        private const val TAG = "Face Landmarker"
        private const val FILENAME_FORMAT = "yyyy-MM-dd-HH-mm-ss-SSS"
        private const val REQUEST_CODE_PERMISSIONS = 10
        private val REQUIRED_PERMISSIONS =
            mutableListOf (
                android.Manifest.permission.CAMERA,
                android.Manifest.permission.RECORD_AUDIO
            ).apply {
                if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.P) {
                    add(android.Manifest.permission.WRITE_EXTERNAL_STORAGE)
                }
            }.toTypedArray()
    }

    // 你可以在这里添加获取 serverHost 的方法
    fun getServerHost(): String? {
        val sharedPreferences: SharedPreferences = requireActivity().getSharedPreferences("HangGuangPreferences", MODE_PRIVATE)
        return sharedPreferences.getString("serverHost", "192.168.1.19")
    }
    override fun onResume() {
        super.onResume()
        // Make sure that all permissions are still present, since the
        // user could have removed them while the app was in paused state.
        if (!PermissionsFragment.hasPermissions(requireContext())) {
            Navigation.findNavController(
                requireActivity(), R.id.fragment_container
            ).navigate(R.id.action_camera_to_permissions)
        }

        // Start the FaceLandmarkerHelper again when users come back
        // to the foreground.
        backgroundExecutor.execute {
            if (faceLandmarkerHelper.isClose()) {
                faceLandmarkerHelper.setupFaceLandmarker()
            }
        }
    }

    private fun generateQRCodeBitmap(text: String, width: Int, height: Int): Bitmap {
//        val barcodeEncoder = BarcodeEncoder()
//        return barcodeEncoder.encodeBitmap(text, BarcodeFormat.QR_CODE, width, height)
        val hints = Hashtable<EncodeHintType, String>()
        hints.put(EncodeHintType.CHARACTER_SET, "UTF-8");
        val barcodeEncoder = BarcodeEncoder()
        return barcodeEncoder.encodeBitmap(text, BarcodeFormat.QR_CODE, width, height,hints)
//        val bitMatrix = MultiFormatWriter().encode(text, BarcodeFormat.QR_CODE, 800, 800, hints)
//        val bitmap: Bitmap = bitMatrix2Bitmap(string, matrix)

    }
    override fun onPause() {
        super.onPause()
        if(this::faceLandmarkerHelper.isInitialized) {
            viewModel.setMaxFaces(faceLandmarkerHelper.maxNumFaces)
            viewModel.setMinFaceDetectionConfidence(faceLandmarkerHelper.minFaceDetectionConfidence)
            viewModel.setMinFaceTrackingConfidence(faceLandmarkerHelper.minFaceTrackingConfidence)
            viewModel.setMinFacePresenceConfidence(faceLandmarkerHelper.minFacePresenceConfidence)
            viewModel.setDelegate(faceLandmarkerHelper.currentDelegate)

            // Close the FaceLandmarkerHelper and release resources
            backgroundExecutor.execute { faceLandmarkerHelper.clearFaceLandmarker() }
        }
    }

    override fun onDestroyView() {
        _fragmentCameraBinding = null
        super.onDestroyView()

        // Shut down our background executor
        backgroundExecutor.shutdown()
        backgroundExecutor.awaitTermination(
            Long.MAX_VALUE, TimeUnit.NANOSECONDS
        )
    }

    override fun onCreateView(
        inflater: LayoutInflater,
        container: ViewGroup?,
        savedInstanceState: Bundle?
    ): View {
        _fragmentCameraBinding =
            FragmentCameraBinding.inflate(inflater, container, false)
        scannedQRCodeResult = arguments?.getString("scannedQRCodeResult","") ?: ""
        if (scannedQRCodeResult != null) {
            // 使用扫描到的二维码字符串
            Log.i("扫描二维码结果：：：",scannedQRCodeResult)
            fragmentCameraBinding.tvScannedQrCodeResult.text="用户：$scannedQRCodeResult"
            // ...
        }
        fragmentCameraBinding.btnStartCapture.setOnClickListener{anaToggle()}
        return fragmentCameraBinding.root
    }

    private var videoCapture: VideoCapture<Recorder>? = null
    private var recording: Recording? = null
    private var emoLabel:String=""
    private lateinit var cameraExecutor: ExecutorService
    fun generateUUID4Hex(): String {
        // 生成一个随机的UUIDv4
        val uuid: UUID = UUID.randomUUID()

        // 将UUID转换为十六进制字符串
        val uuidHex: String = uuid.toString().replace("-", "") // 去除连字符

        return uuidHex
    }
    lateinit var btnReportView:Button;
    private var evaId: String=generateUUID4Hex()
    private  fun doQuestionnaire(){
        val inflater = this.layoutInflater

        // 自定义对话框视图
        val dialogView = inflater.inflate(R.layout.component_dialog_questionnaire, null)

        val moodListView = dialogView.findViewById<ListView>(R.id.moodListView)

        // 设置用户姓名、测评时间等（这里只是示例，你应该从实际数据源获取这些值）
//        userNameTextView.text = "张三"
//        assessmentTimeTextView.text = "测评时间: " + DateFormat.format("yyyy-MM-dd HH:mm", Calendar.getInstance().time)
        // 设置头像和二维码（这里使用占位符，你应该替换为实际的Bitmap或Drawable）
        // userAvatarImageView.setImageResource(R.drawable.actual_avatar)
        // qrCodeImageView.setImageResource(R.drawable.actual_qr_code)

        // 心情选项列表
        val moodItems = listOf("开心", "悲伤", "愤怒", "惊讶", "平静")
        val moodAdapter = ArrayAdapter(requireContext(), android.R.layout.simple_list_item_1, moodItems)
        moodListView.adapter = moodAdapter

        val builder = AlertDialog.Builder(requireContext())
        // 创建对话框
        builder.setView(dialogView)
        val dialog = builder.create()

        // 设置对话框不允许通过点击外部关闭
        dialog.setCanceledOnTouchOutside(false)
        dialog.setCancelable(false)
        // 心情选择点击事件
        moodListView.setOnItemClickListener { _, _, position, _ ->
            emoLabel = moodItems[position]
            // 处理心情选择（例如显示Toast或更新UI）
//            playAudioFromAssets(R.raw.eva_end_music)
            Toast.makeText(requireContext(), "你选择的心情是: $emoLabel", Toast.LENGTH_SHORT).show()
            if(emoLabel!="平静"){
                fragmentCameraBinding.tvTaskDesc.hint="最近遇到了什么事情呀，怎么这么"+emoLabel+"呢？"
            }else{
                fragmentCameraBinding.tvTaskDesc.hint="有什么想要跟我分享的吗？如果没有的话，请用“ 兔子 、 钥匙 、 桥 、 我 ”造句，并大声说给我听哦！"
            }
            fragmentCameraBinding.cardTask.visibility=View.VISIBLE
            fragmentCameraBinding.btnStartCapture.backgroundTintList =ContextCompat.getColorStateList(requireContext(), R.color.flat_red)
            // 关闭对话框
            dialog.dismiss()
//            dialog.dismiss()
            startCaptureVideo()
            // 创建 WebView 对话框
        }
        // 显示对话框
        dialog.show()
    }
    private  fun showReport(){
        fragmentCameraBinding.btnStartCapture.backgroundTintList =ContextCompat.getColorStateList(requireContext(), R.color.flat_blue)

        val inflater = this.layoutInflater
        // 自定义对话框视图
        val dialogView = inflater.inflate(R.layout.component_show_report, null)

        val qrCodeImageView = dialogView.findViewById<ImageView>(R.id.qrCode)

        btnReportView = dialogView.findViewById<Button>(R.id.btnReport)

        try {
            val qrCodeBitmap = generateQRCodeBitmap(reportURL+"/$evaId", 500, 500)
            qrCodeImageView.setImageBitmap(qrCodeBitmap)
        } catch (e: WriterException) {
            e.printStackTrace()
        }

        val builder = AlertDialog.Builder(requireContext())
        // 创建对话框
        builder.setView(dialogView)
        val dialog = builder.create()

        btnReportView.setOnClickListener { showWebViewDialog() }
        // 显示对话框
        dialog.show()
    }

    @SuppressLint("SetJavaScriptEnabled")
    private fun showWebViewDialog() {
        val dialogView = layoutInflater.inflate(R.layout.component_report, null)
        val webView = dialogView.findViewById<WebView>(R.id.webview)

        // 启用 JavaScript
        val webSettings: WebSettings = webView.settings
        webSettings.javaScriptEnabled = true
        // 设置 WebView 客户端，以便在内部加载网页
        webView.webViewClient = WebViewClient()

        // 加载 URL
//        val url = "https://www.example.com" // 替换为你的 URL
        webView.loadUrl(reportURL+"/$evaId")

        val builder = AlertDialog.Builder(requireContext())
        builder.setView(dialogView)
        val dialog = builder.create()
        dialog.show()
    }

    fun sendFileFromUri(context: Context, uri: Uri, emoLabel: String, callback: (Boolean, String?) -> Unit) {
        val client = OkHttpClient.Builder()
            .connectTimeout(600000, TimeUnit.SECONDS) // 设置连接超时为60秒
            .readTimeout(600000, TimeUnit.SECONDS)    // 设置读取超时为60秒
            .writeTimeout(600000, TimeUnit.SECONDS)   // 设置写入超时为60秒
            .build()
        // 使用 ContentResolver 获取文件的输入流
        context.contentResolver.openInputStream(uri)?.use { inputStream ->
            if (inputStream != null) {
                // 创建请求体，这里我们假设文件是二进制数据，比如图片或视频
                val fileBody = RequestBody.create(
                    "video/mp4".toMediaTypeOrNull(), // 根据实际情况设置 MIME 类型
                    inputStream.readBytes() // 注意：对于大文件，这里可能会导致内存溢出，应使用分块读取的方式
                )

                // 创建 MultipartBody，用于上传文件
                val multipartBody = MultipartBody.Builder()
                    .setType(MultipartBody.FORM)
                    .addFormDataPart("file", videoFileName+".mp4", fileBody) // "file" 是服务器期望的文件字段名
                    .addFormDataPart("emoLabel", emoLabel) // 添加文本参数
                    .addFormDataPart("evaId", evaId) // 添加文本参数
                    .build()
                Log.i("上传参数",evaId)
                // 创建请求
                val request = Request.Builder()
                    .url(uploadUrl)
                    .post(multipartBody)
                    .build()

                // 发送请求
                client.newCall(request).enqueue(object : Callback {
                    override fun onFailure(call: Call, e: IOException) {

                        Log.i("上传","开始上传"+e.message.toString())
                        callback(false, e.message)
                    }

                    override fun onResponse(call: Call, response: Response) {
                        if (response.isSuccessful) {
                            callback(true, response.body?.string()) // 处理服务器响应
                        } else {
                            callback(false, response.message)
                            Log.i("上传","开始上传"+response.message.toString())
                        }
                    }
                })
            } else {
                callback(false, "无法打开文件输入流")
            }
        }
    }
//    fun evaToggle() {
//        var mediaPlayer: MediaPlayer? = null
//        try {
//            // 创建MediaPlayer对象并设置音频源
//
//            var fileName = R.raw.eva_end_music
//            if(captureState=="capture"){
//                 fileName = R.raw.eva_end_music
//                captureVideo()
//            }else{
//
//                 fileName = R.raw.eva_start_music
//            }
//            mediaPlayer = MediaPlayer.create(requireContext(),fileName)
//            // 准备音频并开始播放
////            mediaPlayer?.prepare()
//            mediaPlayer?.start()
//            // 如果你想在音频播放完毕后释放资源，可以设置一个监听器
//            mediaPlayer?.setOnCompletionListener { mp ->
//                mp.release()
//                mediaPlayer!!.release()
//                mediaPlayer=null
//
//                if(captureState=="close"){
//                    captureVideo()
//                }
////                mp = null
//            }
//        } catch (e: IOException) {
//            e.printStackTrace()
//        }
//
//        // 注意：在适当的时候（比如Activity的onDestroy方法中），你应该检查并释放MediaPlayer资源
//        // 以防止内存泄漏。如果这是一个后台服务，确保在服务停止时释放资源。
//    }


    fun renameVideoFile(context: Context, uri: Uri, newFileName: String): Boolean {
        var filePath: String? = null

        // Check if the URI is a document URI
        if (DocumentsContract.isDocumentUri(context, uri)) {
            val docId = DocumentsContract.getDocumentId(uri)
            val split = docId.split(":".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray()
            val type = split[0]

            if ("primary".equals(type, ignoreCase = true)) {
                filePath = Environment.getExternalStorageDirectory().toString() + "/" + split[1]
            } else {
                // Handle other types of storage (e.g., secondary storage)
            }
        } else if ("content".equals(uri.scheme, ignoreCase = true)) {
            // Query the content URI to get the file path
            val proj = arrayOf(MediaStore.Video.Media.DATA)
            val cursor: Cursor? = context.contentResolver.query(uri, proj, null, null, null)
            if (cursor != null) {
                if (cursor.moveToFirst()) {
                    val column_index = cursor.getColumnIndexOrThrow(MediaStore.Video.Media.DATA)
                    filePath = cursor.getString(column_index)
                }
                cursor.close()
            }
        } else if ("file".equals(uri.scheme, ignoreCase = true)) {
            filePath = uri.path
        }

        // If filePath is null, we couldn't resolve the URI to a file path
        if (filePath == null) {
            return false
        }

        val file = File(filePath)
        val newFile = File(file.parent, newFileName)
        return file.renameTo(newFile)
    }
    private fun anaToggle(){
        if(captureState=="close") {
            doQuestionnaire()
        }else{
            fragmentCameraBinding.cardTask.visibility=View.INVISIBLE
            endCaptureVideo()
            showReport()
        }
    }
    private  fun endCaptureVideo(){
        val videoCapture = this.videoCapture ?: return

        fragmentCameraBinding.btnStartCapture.isEnabled = false

        val curRecording = recording
        if (curRecording != null) {
            // Stop the current recording session.
            curRecording.stop()
            captureState = "close"
            recording = null


            return
        }
    }
    // Implements VideoCapture use case, including start and stop capturing.
    private fun startCaptureVideo() {

        val videoCapture = this.videoCapture ?: return

        fragmentCameraBinding.btnStartCapture.isEnabled = false

        val curRecording = recording
//        if (curRecording != null) {
//            // Stop the current recording session.
//            curRecording.stop()
//            captureState = "close"
//            recording = null
//
//
//            return
//        }
        captureState = "capture"
//        playAudioFromAssets(R.raw.eva_start_music)
        evaId = scannedQRCodeResult +"_" + emoLabel + "_" +generateUUID4Hex()
        serverHost = getServerHost().toString()
        uploadUrl = "http://$serverHost/api/faceAna/uploadFiles"
        reportURL = "http://$serverHost/reportQN" // 替换为你的URL


        // create and start a new recording session
        videoFileName = scannedQRCodeResult +"_" + emoLabel + "_" + SimpleDateFormat(FILENAME_FORMAT, Locale.US)
            .format(System.currentTimeMillis())
        val contentValues = ContentValues().apply {
            put(MediaStore.MediaColumns.DISPLAY_NAME, videoFileName)
            put(MediaStore.MediaColumns.MIME_TYPE, "video/mp4")
            if (Build.VERSION.SDK_INT > Build.VERSION_CODES.P) {
                put(MediaStore.Video.Media.RELATIVE_PATH, "Movies/CameraX-Video")
            }
        }

        val mediaStoreOutputOptions = MediaStoreOutputOptions
            .Builder(requireContext().contentResolver, MediaStore.Video.Media.EXTERNAL_CONTENT_URI)
            .setContentValues(contentValues)
            .build()
        recording = videoCapture.output
            .prepareRecording(requireContext(), mediaStoreOutputOptions)
            .apply {
                if (PermissionChecker.checkSelfPermission(requireContext(),
                        android.Manifest.permission.RECORD_AUDIO) ==
                    PermissionChecker.PERMISSION_GRANTED)
                {
                    withAudioEnabled()
                }
            }
            .start(ContextCompat.getMainExecutor(requireContext())) { recordEvent ->
                when(recordEvent) {
                    is VideoRecordEvent.Start -> {
                        fragmentCameraBinding.btnStartCapture.apply {
                            text = getString(R.string.stop_capture)
                            isEnabled = true
                        }
                    }
                    is VideoRecordEvent.Finalize -> {
                        if (!recordEvent.hasError()) {
                            videoUri = recordEvent.outputResults.outputUri
                            val msg = "恭喜恭喜，测评已经完成啦！"
//                                    "${recordEvent.outputResults.outputUri}"

//                            var selectedMood = "平静"
                            sendFileFromUri(requireContext(), uri = videoUri, emoLabel = emoLabel) { success, message ->
                                requireActivity().runOnUiThread{
                                    Log.i("上传情况","进行中，"+emoLabel+videoUri)
                                    if (success) {
                                        // 上传成功，处理 message（可能是服务器的响应）
                                        Log.i("上传成功：：",message.toString())
//                                        showWebViewDialog()
                                        btnReportView.visibility = View.VISIBLE

                                    } else {
                                        // 上传失败，处理 message（错误消息）
                                        Log.i("上传失败：：",message.toString())
                                    }}
                            }
                            Toast.makeText(requireContext(), msg, Toast.LENGTH_SHORT)
                                .show()
                            Log.d(TAG, msg)
                        } else {
                            recording?.close()
                            recording = null
                            Log.e(TAG, "Video capture ends with error: " +
                                    "${recordEvent.error}")
                        }
                        fragmentCameraBinding.btnStartCapture.apply {
                            text = getString(R.string.start_capture)
                            isEnabled = true
                        }
                    }
                }
            }
    }
    @SuppressLint("MissingPermission")
    override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
        super.onViewCreated(view, savedInstanceState)

        with(fragmentCameraBinding.recyclerviewResults) {
            layoutManager = LinearLayoutManager(requireContext())
            adapter = faceBlendshapesResultAdapter
        }

        // Initialize our background executor
        backgroundExecutor = Executors.newSingleThreadExecutor()

        // Wait for the views to be properly laid out
        fragmentCameraBinding.viewFinder.post {
            // Set up the camera and its use cases
            setUpCamera()
        }

        // Create the FaceLandmarkerHelper that will handle the inference
        backgroundExecutor.execute {
            faceLandmarkerHelper = FaceLandmarkerHelper(
                context = requireContext(),
                runningMode = RunningMode.LIVE_STREAM,
                minFaceDetectionConfidence = viewModel.currentMinFaceDetectionConfidence,
                minFaceTrackingConfidence = viewModel.currentMinFaceTrackingConfidence,
                minFacePresenceConfidence = viewModel.currentMinFacePresenceConfidence,
                maxNumFaces = viewModel.currentMaxFaces,
                currentDelegate = viewModel.currentDelegate,
                faceLandmarkerHelperListener = this
            )
        }

        // Attach listeners to UI control widgets
        initBottomSheetControls()
    }

    private fun initBottomSheetControls() {
        // init bottom sheet settings
//        fragmentCameraBinding.bottomSheetLayout.maxFacesValue.text =
//            viewModel.currentMaxFaces.toString()
//        fragmentCameraBinding.bottomSheetLayout.detectionThresholdValue.text =
//            String.format(
//                Locale.US, "%.2f", viewModel.currentMinFaceDetectionConfidence
//            )
//        fragmentCameraBinding.bottomSheetLayout.trackingThresholdValue.text =
//            String.format(
//                Locale.US, "%.2f", viewModel.currentMinFaceTrackingConfidence
//            )
//        fragmentCameraBinding.bottomSheetLayout.presenceThresholdValue.text =
//            String.format(
//                Locale.US, "%.2f", viewModel.currentMinFacePresenceConfidence
//            )

        // When clicked, lower face detection score threshold floor
//        fragmentCameraBinding.bottomSheetLayout.detectionThresholdMinus.setOnClickListener {
//            if (faceLandmarkerHelper.minFaceDetectionConfidence >= 0.2) {
//                faceLandmarkerHelper.minFaceDetectionConfidence -= 0.1f
//                updateControlsUi()
//            }
//        }

        // When clicked, raise face detection score threshold floor
//        fragmentCameraBinding.bottomSheetLayout.detectionThresholdPlus.setOnClickListener {
//            if (faceLandmarkerHelper.minFaceDetectionConfidence <= 0.8) {
//                faceLandmarkerHelper.minFaceDetectionConfidence += 0.1f
//                updateControlsUi()
//            }
//        }
//
//        // When clicked, lower face tracking score threshold floor
//        fragmentCameraBinding.bottomSheetLayout.trackingThresholdMinus.setOnClickListener {
//            if (faceLandmarkerHelper.minFaceTrackingConfidence >= 0.2) {
//                faceLandmarkerHelper.minFaceTrackingConfidence -= 0.1f
//                updateControlsUi()
//            }
//        }
//
//        // When clicked, raise face tracking score threshold floor
//        fragmentCameraBinding.bottomSheetLayout.trackingThresholdPlus.setOnClickListener {
//            if (faceLandmarkerHelper.minFaceTrackingConfidence <= 0.8) {
//                faceLandmarkerHelper.minFaceTrackingConfidence += 0.1f
//                updateControlsUi()
//            }
//        }
//
//        // When clicked, lower face presence score threshold floor
//        fragmentCameraBinding.bottomSheetLayout.presenceThresholdMinus.setOnClickListener {
//            if (faceLandmarkerHelper.minFacePresenceConfidence >= 0.2) {
//                faceLandmarkerHelper.minFacePresenceConfidence -= 0.1f
//                updateControlsUi()
//            }
//        }
//
//        // When clicked, raise face presence score threshold floor
//        fragmentCameraBinding.bottomSheetLayout.presenceThresholdPlus.setOnClickListener {
//            if (faceLandmarkerHelper.minFacePresenceConfidence <= 0.8) {
//                faceLandmarkerHelper.minFacePresenceConfidence += 0.1f
//                updateControlsUi()
//            }
//        }
//
//        // When clicked, reduce the number of faces that can be detected at a
//        // time
//        fragmentCameraBinding.bottomSheetLayout.maxFacesMinus.setOnClickListener {
//            if (faceLandmarkerHelper.maxNumFaces > 1) {
//                faceLandmarkerHelper.maxNumFaces--
//                updateControlsUi()
//            }
//        }
//
//        // When clicked, increase the number of faces that can be detected
//        // at a time
//        fragmentCameraBinding.bottomSheetLayout.maxFacesPlus.setOnClickListener {
//            if (faceLandmarkerHelper.maxNumFaces < 2) {
//                faceLandmarkerHelper.maxNumFaces++
//                updateControlsUi()
//            }
//        }
//
//        // When clicked, change the underlying hardware used for inference.
//        // Current options are CPU and GPU
//        fragmentCameraBinding.bottomSheetLayout.spinnerDelegate.setSelection(
//            viewModel.currentDelegate, false
//        )
//        fragmentCameraBinding.bottomSheetLayout.spinnerDelegate.onItemSelectedListener =
//            object : AdapterView.OnItemSelectedListener {
//                override fun onItemSelected(
//                    p0: AdapterView<*>?, p1: View?, p2: Int, p3: Long
//                ) {
//                    try {
//                        faceLandmarkerHelper.currentDelegate = p2
//                        updateControlsUi()
//                    } catch(e: UninitializedPropertyAccessException) {
//                        Log.e(TAG, "FaceLandmarkerHelper has not been initialized yet.")
//                    }
//                }
//
//                override fun onNothingSelected(p0: AdapterView<*>?) {
//                    /* no op */
//                }
//            }
    }

    // Update the values displayed in the bottom sheet. Reset Facelandmarker
    // helper.
//    private fun updateControlsUi() {
//        fragmentCameraBinding.bottomSheetLayout.maxFacesValue.text =
//            faceLandmarkerHelper.maxNumFaces.toString()
//        fragmentCameraBinding.bottomSheetLayout.detectionThresholdValue.text =
//            String.format(
//                Locale.US,
//                "%.2f",
//                faceLandmarkerHelper.minFaceDetectionConfidence
//            )
//        fragmentCameraBinding.bottomSheetLayout.trackingThresholdValue.text =
//            String.format(
//                Locale.US,
//                "%.2f",
//                faceLandmarkerHelper.minFaceTrackingConfidence
//            )
//        fragmentCameraBinding.bottomSheetLayout.presenceThresholdValue.text =
//            String.format(
//                Locale.US,
//                "%.2f",
//                faceLandmarkerHelper.minFacePresenceConfidence
//            )
//
//        // Needs to be cleared instead of reinitialized because the GPU
//        // delegate needs to be initialized on the thread using it when applicable
//        backgroundExecutor.execute {
//            faceLandmarkerHelper.clearFaceLandmarker()
//            faceLandmarkerHelper.setupFaceLandmarker()
//        }
//        fragmentCameraBinding.overlay.clear()
//    }

    // Initialize CameraX, and prepare to bind the camera use cases
    private fun setUpCamera() {
        val cameraProviderFuture =
            ProcessCameraProvider.getInstance(requireContext())
        cameraProviderFuture.addListener(
            {
                // CameraProvider
                cameraProvider = cameraProviderFuture.get()

                // Build and bind the camera use cases
                bindCameraUseCases()
            }, ContextCompat.getMainExecutor(requireContext())
        )
    }

    // Declare and bind preview, capture and analysis use cases
    @SuppressLint("UnsafeOptInUsageError")
    private fun bindCameraUseCases() {

        // CameraProvider
        val cameraProvider = cameraProvider
            ?: throw IllegalStateException("Camera initialization failed.")

        val cameraSelector =
            CameraSelector.Builder().requireLensFacing(cameraFacing).build()

        // Preview. Only using the 4:3 ratio because this is the closest to our models
        preview = Preview.Builder().setTargetAspectRatio(AspectRatio.RATIO_4_3)
            .setTargetRotation(fragmentCameraBinding.viewFinder.display.rotation)
            .build()

        // ImageAnalysis. Using RGBA 8888 to match how our models work
        imageAnalyzer =
            ImageAnalysis.Builder().setTargetAspectRatio(AspectRatio.RATIO_4_3)
                .setTargetRotation(fragmentCameraBinding.viewFinder.display.rotation)
                .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
                .setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_RGBA_8888)
                .build()
                // The analyzer can then be assigned to the instance
                .also {
                    it.setAnalyzer(backgroundExecutor) { image ->
                        detectFace(image)
                    }
                }

        // Must unbind the use-cases before rebinding them
        cameraProvider.unbindAll()
        val recorder = Recorder.Builder()
            .setQualitySelector(QualitySelector.from(Quality.LOWEST))
            .build()
        videoCapture = VideoCapture.withOutput(recorder)

        try {
            // A variable number of use-cases can be passed here -
            // camera provides access to CameraControl & CameraInfo
//            camera = cameraProvider.bindToLifecycle(
//                this, cameraSelector, preview, imageAnalyzer,videoCapture
//            )
            camera = cameraProvider.bindToLifecycle(
                this, cameraSelector, preview,videoCapture
            )
            // Attach the viewfinder's surface provider to preview use case
            preview?.setSurfaceProvider(fragmentCameraBinding.viewFinder.surfaceProvider)
        } catch (exc: Exception) {
            Log.e(TAG, "Use case binding failed", exc)
        }
    }

    private fun detectFace(imageProxy: ImageProxy) {
        faceLandmarkerHelper.detectLiveStream(
            imageProxy = imageProxy,
            isFrontCamera = cameraFacing == CameraSelector.LENS_FACING_FRONT
        )
    }

    override fun onConfigurationChanged(newConfig: Configuration) {
        super.onConfigurationChanged(newConfig)
        imageAnalyzer?.targetRotation =
            fragmentCameraBinding.viewFinder.display.rotation
    }

    // Update UI after face have been detected. Extracts original
    // image height/width to scale and place the landmarks properly through
    // OverlayView
    override fun onResults(
        resultBundle: FaceLandmarkerHelper.ResultBundle
    ) {
        activity?.runOnUiThread {
            if (_fragmentCameraBinding != null) {
                if (fragmentCameraBinding.recyclerviewResults.scrollState != SCROLL_STATE_DRAGGING) {
                    faceBlendshapesResultAdapter.updateResults(resultBundle.result)
                    faceBlendshapesResultAdapter.notifyDataSetChanged()
                }


//                fragmentCameraBinding.bottomSheetLayout.inferenceTimeVal.text =
//                    String.format("%d ms", resultBundle.inferenceTime)

                // Pass necessary information to OverlayView for drawing on the canvas
                fragmentCameraBinding.overlay.setResults(
                    resultBundle.result,
                    resultBundle.inputImageHeight,
                    resultBundle.inputImageWidth,
                    RunningMode.LIVE_STREAM
                )
                // Force a redraw
                fragmentCameraBinding.overlay.invalidate()
            }
        }
    }

    override fun onEmpty() {
        fragmentCameraBinding.overlay.clear()
        activity?.runOnUiThread {
            faceBlendshapesResultAdapter.updateResults(null)
            faceBlendshapesResultAdapter.notifyDataSetChanged()
        }
    }

    override fun onError(error: String, errorCode: Int) {
        activity?.runOnUiThread {
            Toast.makeText(requireContext(), error, Toast.LENGTH_SHORT).show()
            faceBlendshapesResultAdapter.updateResults(null)
            faceBlendshapesResultAdapter.notifyDataSetChanged()

//            if (errorCode == FaceLandmarkerHelper.GPU_ERROR) {
//                fragmentCameraBinding.bottomSheetLayout.spinnerDelegate.setSelection(
//                    FaceLandmarkerHelper.DELEGATE_CPU, false
//                )
//            }
        }
    }
}