package com.vieewer.btdemo.view.microsoft.ocr

import android.Manifest
import android.content.Intent
import org.json.JSONObject
import android.util.Log
import android.os.Environment.getExternalStorageDirectory
import java.io.File
import java.io.FileInputStream
import android.content.pm.PackageManager
import android.os.*
import android.support.annotation.RequiresApi
import android.support.design.widget.Snackbar
import android.view.KeyEvent
import com.google.gson.Gson
import com.google.gson.reflect.TypeToken
import com.jakewharton.rxbinding2.view.RxView
import com.lindroid.androidutilskt.extension.getSpString
import com.tbruyelle.rxpermissions2.RxPermissions
import com.vieewer.btdemo.R
import com.vieewer.btdemo.base.BaseActivity
import com.vieewer.btdemo.bean.DrawTextBean
import com.vieewer.btdemo.bean.OCRResultBean
import com.vieewer.btdemo.bean.TranslateBean
import com.vieewer.btdemo.dismiss
import com.vieewer.btdemo.showLoading
import com.vieewer.btdemo.utils.CommonUtils
import com.vieewer.btdemo.utils.LogUtil
import com.vieewer.btdemo.view.draw.DrawActivity
import com.vieewer.btdemo.view.microsoft.audio.TranslateHelper
import com.vieewer.btdemo.view.microsoft.audio.TranslateHelper.prettify
import com.vieewer.btdemo.view.mycamera.SurfaceCameraActivity
import kotlinx.android.synthetic.main.activity_trans_img_ocr.*
import org.apache.http.client.methods.HttpGet
import org.apache.http.client.methods.HttpPost
import org.apache.http.client.utils.URIBuilder
import org.apache.http.entity.InputStreamEntity
import org.apache.http.entity.StringEntity
import org.apache.http.impl.client.CloseableHttpClient
import org.apache.http.impl.client.HttpClientBuilder
import org.apache.http.util.EntityUtils
import java.lang.StringBuilder
import java.lang.ref.WeakReference


class OcrActivity : BaseActivity() {

    private lateinit var mHttpClient : CloseableHttpClient
    var mSdDir : File ? = null

    private val subscriptionKey = "28db17501a2847419c59c7ffb6073d1a"
    private val endpoint = "https://vieewerocr.cognitiveservices.azure.com/"

    private val uriBase = endpoint + "vision/v2.1/ocr"

    private val imageToAnalyze =
        "https://upload.wikimedia.org/wikipedia/commons/thumb/a/af/" + "Atomist_quote_from_Democritus.png/338px-Atomist_quote_from_Democritus.png"


    private val uriBaseRead = endpoint + "vision/v2.1/read/core/asyncBatchAnalyze"

    private val imageToAnalyzeRead =
        "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/" + "Cursive_Writing_on_Notebook_paper.jpg/800px-Cursive_Writing_on_Notebook_paper.jpg"


    //拍照相关
    private var photoFile : File ? = null

    var mHasFocus = false

    var mDrawTextDatas =  ArrayList<DrawTextBean>()

    var mIsFinish = false


    override fun onCreate(savedInstanceState: Bundle?) {
        super.onCreate(savedInstanceState)
        setContentView(R.layout.activity_trans_img_ocr)
        mHttpClient = HttpClientBuilder.create().build()

        checkPermissions()
        setHttpUrlImageReq()
        setLocalImageReq()
        setOnClickListeners()

        initViews()

    }

    private fun initViews() {
        //todo test edit later
        var filePath =  intent!!.getStringExtra("file")

        //val filePath = "/storage/emulated/0/DCIM/Camera/test.jpg"
        if (filePath != null) {
            Log.i("yjj", "file path =" +  intent!!.getStringExtra("file"))
            showImg(filePath)
        }

        txt_test_start_ocr.setOnClickListener {
            recognizeOcrAndTranslate()
        }

        txt_goto_result.setOnClickListener{
            var intent = Intent(this, DrawActivity::class.java)
            intent.putExtra("data", mDrawTextDatas)
            intent.putExtra("filePath", filePath)
            startActivity(intent)
        }
    }


    private fun setOnClickListeners() {
        //拍照相关
        var rxPermissions = RxPermissions(this)
        RxView.clicks(open_camera)
            .compose(
                rxPermissions.ensure(
                    Manifest.permission.CAMERA,
                    Manifest.permission.RECORD_AUDIO,
                    Manifest.permission.WRITE_EXTERNAL_STORAGE
                )
            )
            .subscribe { granted ->
                if (granted!!) {
                    startVideoRecordActivity()
                } else {
                    Snackbar.make(open_camera, getString(R.string.no_enough_permission), Snackbar.LENGTH_SHORT)
                        .setAction("Confirm", null).show()
                }
            }


    }


    private fun startVideoRecordActivity() {
        val intent = Intent(this, SurfaceCameraActivity::class.java)
        startActivityForResult(intent, 0)
    }


    override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
        super.onActivityResult(requestCode, resultCode, data)

     /*   if (requestCode == 0 && resultCode == 1) {
            var filePath =  data!!.getStringExtra("file")
            if (filePath != null) {
                Log.i("yjj", "file path =" +  data!!.getStringExtra("file"))
                showImg(filePath)
            }
        }
*/
    }




    private fun showImg(filePath: String) {

        if (filePath != null) {
            photoFile = File(filePath)
            custom_view.setImageView(filePath)
        }

    }



    private fun checkPermissions() {
        if (Build.VERSION.SDK_INT >= 23) {
            val REQUEST_CODE_CONTACT = 101
            val permissions = arrayOf<String>(Manifest.permission.WRITE_EXTERNAL_STORAGE)
            //验证是否许可权限
            for (str in permissions) {
                if (this.checkSelfPermission(str) != PackageManager.PERMISSION_GRANTED) {
                    //申请权限
                    this.requestPermissions(permissions, REQUEST_CODE_CONTACT)
                }
            }
        }
    }

    @RequiresApi(Build.VERSION_CODES.KITKAT)
    private fun getPath() {

        if(Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
            //为真则SD卡已装入，
            var sdCardExist = Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED);
            if(sdCardExist) {
                mSdDir = getExternalStorageDirectory()//获取跟目录
                Log.e("main","得到的根目录路径111:"+ mSdDir)
            }

        }

    }

    private fun setLocalImageReq() {

        //todo yjj ocr here
        txt_test_ocr_local.setOnClickListener {
            recognizeOcrAndTranslate()
        }

        txt_test_read_ocr_local.setOnClickListener {

            val httpTextClient = HttpClientBuilder.create().build()
            val httpResultClient = HttpClientBuilder.create().build()

            Thread {
                try {
                    // This operation requires two REST API calls. One to submit the image
                    // for processing, the other to retrieve the text found in the image.

                    val builder = URIBuilder(uriBaseRead)

                    // Prepare the URI for the REST API method.
                    val uri = builder.build()
                    val request = HttpPost(uri)

                    // Request headers.
                    request.setHeader("Content-Type", "application/octet-stream")
                    request.setHeader("Ocp-Apim-Subscription-Key", subscriptionKey)


                 /*   val file = File(
                        getExternalStorageDirectory().getAbsolutePath(),
                        "/tencent/QQfile_recv/img_notebook_paper.jpg"
                    )
*/

                    val reqEntity = InputStreamEntity(
                        FileInputStream(photoFile), -1
                    )

                    reqEntity.setChunked(true) // Send in multiple parts if needed
                    request.entity = reqEntity

                    // Request body.
                    //val requestEntity = StringEntity("{\"url\":\"$imageToAnalyzeRead\"}")
                    //request.entity = requestEntity


                    // Two REST API methods are required to extract text.
                    // One method to submit the image for processing, the other method
                    // to retrieve the text found in the image.

                    // Call the first REST API method to detect the text.
                    val response = httpTextClient.execute(request)

                    // Check for success.
                    if (response.statusLine.statusCode != 202) {
                        // Format and display the JSON error message.
                        val entity = response.entity
                        val jsonString = EntityUtils.toString(entity)
                        val json = JSONObject(jsonString)
                        println("Error:\n")
                        println(json.toString(2))

                    } else {

                        // Store the URI of the second REST API method.
                        // This URI is where you can get the results of the first REST API method.
                        var operationLocation: String? = null

                        // The 'Operation-Location' response header value contains the URI for
                        // the second REST API method.
                        val responseHeaders = response.allHeaders
                        for (header in responseHeaders) {
                            if (header.name == "Operation-Location") {
                                operationLocation = header.value
                                break
                            }
                        }

                        if (operationLocation == null) {
                            println("\nError retrieving Operation-Location.\nExiting.")
                            //System.exit(1)
                        }

                        // If the first REST API method completes successfully, the second
                        // REST API method retrieves the text written in the image.
                        //
                        // Note: The response may not be immediately available. Text
                        // recognition is an asynchronous operation that can take a variable
                        // amount of time depending on the length of the text.
                        // You may need to wait or retry this operation.


                        // Call the second REST API method and get the response.
                        val resultRequest = HttpGet(operationLocation)
                        resultRequest.setHeader("Ocp-Apim-Subscription-Key", subscriptionKey)

                        val resultResponse = httpResultClient.execute(resultRequest)
                        val responseEntity = resultResponse.entity

                        if (responseEntity != null) {
                            // Format and display the JSON response.
                            val jsonString = EntityUtils.toString(responseEntity)
                            val json = JSONObject(jsonString)
                            Log.i("yjj", "Text recognition result response: \n" + json.toString(2))
                        }
                    }

                } catch (e: Exception) {
                    println(e.message)
                }
            }.start()
        }

    }

    private fun recognizeOcrAndTranslate() {

        this.showLoading()

        Thread {

            try {
                val uriBuilder = URIBuilder(uriBase)

                val fromLanguage = CommonUtils.getformatedCode(getSpString("SP_KEY_ORIGIN_LANGUAGE", "en"))

                Log.i("yjj", "ocr翻译 fromLanguage$fromLanguage")

                uriBuilder.setParameter("language", fromLanguage)//unk zh-Hans
                uriBuilder.setParameter("detectOrientation", "true")


                // Request parameters.
                val uri = uriBuilder.build()
                val request = HttpPost(uri)

                // Request headers.
                request.setHeader("Content-Type", "application/octet-stream")
                request.setHeader("Ocp-Apim-Subscription-Key", subscriptionKey)



                /*   val file = File(
                       getExternalStorageDirectory().getAbsolutePath(),
                       "/tencent/QQfile_recv/img_democritus.png"
                   )*/

                val reqEntity = InputStreamEntity(
                    FileInputStream(photoFile), -1
                )

                reqEntity.setChunked(true) // Send in multiple parts if needed
                request.entity = reqEntity


                // Request body.
                //val requestEntity = StringEntity("{\"url\":\"$imageToAnalyze\"}")
                //request.setEntity(requestEntity)


                // Call the REST API method and get the response entity.
                val response = mHttpClient.execute(request)
                val entity = response.entity

                if (entity != null) {
                    // Format and display the JSON response.
                    val jsonString = EntityUtils.toString(entity)
                    val json = JSONObject(jsonString)


                    var resultBean : OCRResultBean = Gson().fromJson(jsonString, OCRResultBean::class.java)
                    tearApartToLineWords(resultBean)


                    LogUtil.e("yjj", "REST Ocr Response:\n" + json.toString(2))
                }
            } catch (e: Exception) {
                // Display error message.
                Log.i("translate error", e.toString())
                this.dismiss()
            }


        }.start()

    }


    private fun setHttpUrlImageReq() {
        txt_test_ocr.setOnClickListener {

            Thread {

                try {
                    val uriBuilder = URIBuilder(uriBase)

                    uriBuilder.setParameter("language", "unk")
                    uriBuilder.setParameter("detectOrientation", "true")

                    // Request parameters.
                    val uri = uriBuilder.build()
                    val request = HttpPost(uri)

                    // Request headers.
                    request.setHeader("Content-Type", "application/json")
                    request.setHeader("Ocp-Apim-Subscription-Key", subscriptionKey)

                    // Request body.
                    val requestEntity = StringEntity("{\"url\":\"$imageToAnalyze\"}")
                    request.setEntity(requestEntity)

                    // Call the REST API method and get the response entity.
                    val response = mHttpClient.execute(request)
                    val entity = response.entity

                    if (entity != null) {
                        // Format and display the JSON response.
                        val jsonString = EntityUtils.toString(entity)
                        val json = JSONObject(jsonString)
                        Log.i("yjj", "REST Ocr Response:\n" + json.toString(2))
                    }
                } catch (e: Exception) {
                    // Display error message.
                    println(e.message)
                }


            }.start()


        }


        txt_test_read_ocr.setOnClickListener {


            val httpTextClient = HttpClientBuilder.create().build()
            val httpResultClient = HttpClientBuilder.create().build()

            Thread {
                try {
                    // This operation requires two REST API calls. One to submit the image
                    // for processing, the other to retrieve the text found in the image.

                    val builder = URIBuilder(uriBaseRead)

                    // Prepare the URI for the REST API method.
                    val uri = builder.build()
                    val request = HttpPost(uri)

                    // Request headers.
                    request.setHeader("Content-Type", "application/json")
                    request.setHeader("Ocp-Apim-Subscription-Key", subscriptionKey)

                    // Request body.
                    val requestEntity = StringEntity("{\"url\":\"$imageToAnalyzeRead\"}")
                    request.entity = requestEntity

                    // Two REST API methods are required to extract text.
                    // One method to submit the image for processing, the other method
                    // to retrieve the text found in the image.

                    // Call the first REST API method to detect the text.
                    val response = httpTextClient.execute(request)

                    // Check for success.
                    if (response.statusLine.statusCode != 202) {
                        // Format and display the JSON error message.
                        val entity = response.entity
                        val jsonString = EntityUtils.toString(entity)
                        val json = JSONObject(jsonString)
                        println("Error:\n")
                        println(json.toString(2))

                    } else {

                        // Store the URI of the second REST API method.
                        // This URI is where you can get the results of the first REST API method.
                        var operationLocation: String? = null

                        // The 'Operation-Location' response header value contains the URI for
                        // the second REST API method.
                        val responseHeaders = response.allHeaders
                        for (header in responseHeaders) {
                            if (header.name == "Operation-Location") {
                                operationLocation = header.value
                                break
                            }
                        }

                        if (operationLocation == null) {
                            println("\nError retrieving Operation-Location.\nExiting.")
                            //System.exit(1)
                        }

                        // If the first REST API method completes successfully, the second
                        // REST API method retrieves the text written in the image.
                        //
                        // Note: The response may not be immediately available. Text
                        // recognition is an asynchronous operation that can take a variable
                        // amount of time depending on the length of the text.
                        // You may need to wait or retry this operation.

                        println("\nText submitted.\n" + "Waiting 10 seconds to retrieve the recognized text.\n")
                        Thread.sleep(10000)

                        // Call the second REST API method and get the response.
                        val resultRequest = HttpGet(operationLocation)
                        resultRequest.setHeader("Ocp-Apim-Subscription-Key", subscriptionKey)

                        val resultResponse = httpResultClient.execute(resultRequest)
                        val responseEntity = resultResponse.entity

                        if (responseEntity != null) {
                            // Format and display the JSON response.
                            val jsonString = EntityUtils.toString(responseEntity)
                            val json = JSONObject(jsonString)
                            Log.i("yjj", "Text recognition result response: \n" + json.toString(2))
                        }
                    }

                } catch (e: Exception) {
                    println(e.message)
                }
            }.start()

        }

    }


    private fun combineResult(jsonStr: String?) : String{

        var resultBean : OCRResultBean = Gson().fromJson(jsonStr, OCRResultBean::class.java)

        tearApartToLineWords(resultBean)

        var sb = StringBuilder()
        for (item in resultBean.regions) {
            for (line in item.lines) {
                for (word in line.words) {
                    sb.append(word.text).append(" ")
                }
            }
        }

        var resultStr = sb.toString()
        Log.i("yjj", "combineResult resultStr >>>>" + resultStr)

        return resultStr

    }

    public fun translate(content : String, index :Int) : String {

        Log.i("yjj", "translate >>>")

        // Android 4.0 之后不能在主线程中请求HTTP请求
        var resultString = ""
        Thread {
            try {
                //todo split and trans zh-hans
                val targetLanguage = CommonUtils.getformatedCode(getSpString("SP_KEY_TARGET_LANGUAGE", "zh-Hans"))
                val translateRequest = TranslateHelper()
                translateRequest.setLanguageCode(targetLanguage)
                val content = "[{\n\t\"Text\": \"" + content + "\"\n}]"
                val response = translateRequest.Post(content)
                val result = prettify(response)
                Log.i("yjj", result)
                var list = Gson().fromJson<List<TranslateBean>>(result, object : TypeToken<List<TranslateBean>>() {

                }.type)

                if (list != null && list.size > 0) {
                    resultString  = list!!.get(0).getTranslations().get(0).getText()
                    Log.i("yjj", " ocr translate result :" + resultString)
                    mDrawTextDatas[index].lineWords = resultString
                }

                if ((index + 1) < mDrawTextDatas.size) {
                    var msg = mHandler.obtainMessage()
                    msg.arg1 = index + 1
                    msg.what = 2
                    mHandler.sendMessage(msg)
                }


            } catch (e: Exception) {
                Log.i("yjj", e.toString())
                this.dismiss()
            }


        }.start()

        return resultString
    }

    private fun tearApartToLineWords(data : OCRResultBean) {
        if (data != null) {

            mDrawTextDatas.clear()

            for (i in 0 until data.getRegions().size) {

                val regionsBean = data.getRegions().get(i)

                for (j in 0 until regionsBean.getLines().size) {

                    val boundingBox = regionsBean.getLines().get(j).getBoundingBox()
                    val indexs = boundingBox.split(",".toRegex()).dropLastWhile({ it.isEmpty() }).toTypedArray()
                    val x = java.lang.Float.parseFloat(indexs[0])
                    val y = java.lang.Float.parseFloat(indexs[1])
                    Log.i("yjj", "ondraw x = $x, y =$y")

                    val wordsList = regionsBean.getLines().get(j).getWords()
                    var words = ""
                    val sb = StringBuilder()
                    for (k in wordsList.indices) {
                        sb.append(wordsList.get(k).getText() + " ")
                    }

                    words = sb.toString()

                    mIsFinish = true

                    var drawTextBean = DrawTextBean()
                    drawTextBean.x = x
                    drawTextBean.y = y
                    drawTextBean.lineWords = words
                    //var curBean = translateLineWords(words, x , y)
                    mDrawTextDatas.add(drawTextBean)


                    Log.i("yjj", "ondraw words =  $words")

                }

            }

            var msg = mHandler.obtainMessage()
            msg.arg1 = 0
            msg.what = 2
            mHandler.sendMessage(msg)

        }
    }



    private fun refreshUIView() {
        if (mDrawTextDatas != null && mDrawTextDatas!!.size > 0) {
            val msg = mHandler.obtainMessage()
            msg.what = 1
            mHandler.sendMessage(msg)
        }
    }


    private fun translateLineWords(words: String, x : Float, y : Float) : DrawTextBean {

        var drawTextBean = DrawTextBean()
        drawTextBean.lineWords = translate(words, 0)
        drawTextBean.x = x
        drawTextBean.y = y

        return drawTextBean
    }


    private val mHandler = MyHandler(this)

    private class MyHandler(activity: OcrActivity) : Handler() {

        private val mActivity: WeakReference<OcrActivity>

        init {
            mActivity = WeakReference(activity)
        }

        override fun handleMessage(msg: Message) {
            if (msg.what == 1) {
                refreshTxtContent(msg)
            }
            if (msg.what == 2) {
                operate(msg.arg1)
            }

        }

        private fun operate(pos: Int) {
            Log.i("yjj", "operate >>>> pos =$pos")
            val activity = mActivity.get()
            var lineWords = activity!!.translate(activity.mDrawTextDatas[pos].lineWords, pos)

            if ((pos + 1) == activity.mDrawTextDatas.size) {
                activity.refreshUIView()
            }
        }

        private fun refreshTxtContent(msg: Message) {
            mActivity.get()!!.dismiss()
            val activity = mActivity.get()
            if (activity != null) {
                //activity.txt_msg.text = msg.obj.toString()
                activity.custom_view.setTranslateTextView(activity.mDrawTextDatas)
            }
        }
    }


    override fun onWindowFocusChanged(hasFocus: Boolean) {
        super.onWindowFocusChanged(hasFocus)
        mHasFocus = hasFocus
    }



    override fun onKeyDown(keyCode: Int, event: KeyEvent?): Boolean {

        Log.i("yjj", "TransImgOcrActivity onKeyDown >>>keyCode" + keyCode + ", action =" + event!!.action)
        if (keyCode == 66 && mHasFocus) {
            //点击触控板识别翻译结果
            recognizeOcrAndTranslate()
        }

        return super.onKeyDown(keyCode, event)
    }


    override fun onDestroy() {
        super.onDestroy()

        this.dismiss()
    }



}
