package com.example.yijian;

import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;

import android.annotation.SuppressLint;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;

import com.chad.library.adapter.base.BaseQuickAdapter;
import com.chad.library.adapter.base.listener.OnItemChildClickListener;
import com.chad.library.adapter.base.listener.OnItemClickListener;
import com.example.yijian.base.AndroidOS;
import com.example.yijian.base.MSLiveWindow;
import com.example.yijian.bean.LicenseInfo;
import com.example.yijian.capture.CaptureActivity;
import com.example.yijian.draft.DraftAdapter;
import com.example.yijian.http.HttpManager;
import com.example.yijian.http.custom.SimpleDownListener;
import com.example.yijian.http.model.Progress;
import com.example.yijian.http.temp.TempStringCallBack;
import com.example.yijian.seletemedia.SelectMediaActivity;
import com.example.yijian.utils.AppManager;
import com.example.yijian.utils.CommonUtils;
import com.example.yijian.utils.Constants;
import com.example.yijian.utils.GsonUtils;
import com.example.yijian.utils.LogUtils;
import com.example.yijian.utils.ParameterSettingValues;
import com.example.yijian.utils.PathUtils;
import com.example.yijian.utils.SharedPreferencesUtils;
import com.example.yijian.utils.ToastUtil;
import com.example.yijian.utils.permission.PermissionsActivity;
import com.example.yijian.utils.permission.PermissionsChecker;
import com.example.yijian.view.FileUtils;
import com.google.android.material.bottomsheet.BottomSheetDialog;
import com.meicam.effect.sdk.NvsEffectSdkContext;
import com.meicam.sdk.NvsStreamingContext;
import com.scwang.smart.refresh.layout.SmartRefreshLayout;

import org.jetbrains.annotations.NotNull;

import java.io.File;
import java.lang.ref.WeakReference;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Logger;

import static com.example.yijian.utils.Constants.BUILD_HUMAN_AI_TYPE_FU;
import static com.example.yijian.utils.Constants.BUILD_HUMAN_AI_TYPE_MS;
import static com.example.yijian.utils.Constants.BUILD_HUMAN_AI_TYPE_MS_ST;
import static com.example.yijian.utils.Constants.BUILD_HUMAN_AI_TYPE_MS_ST_SUPER;
import static com.example.yijian.utils.Constants.HUMAN_AI_TYPE_MS;
import static com.example.yijian.utils.Constants.HUMAN_AI_TYPE_NONE;

public class MainActivity extends AppCompatActivity implements View.OnClickListener {
    public static final int MANAGER_NONE = 0;
    public static final int MANAGER = 1;
    public static final int MANAGER_CANCEL = 2;

    static final int REQUEST_CODE = 110;
    private static final String TAG = "MainActivity";
    List<String> permissionList = CommonUtils.getAllPermissionsList();

    // 权限相关
    private PermissionsChecker mPermissionsChecker;
    private View clickedView = null;

    private LinearLayout llStartVideo;
    private TextView tvManagerGray;
    private TextView tvManager;
    private TextView tvManagerCancel;
    private SmartRefreshLayout refreshLayout;
    private RecyclerView recyclerDraft;
    private ImageButton btnDelete;
    private boolean initARSceneing = true;
    private boolean arSceneFinished = false;
    private DraftAdapter draftAdapter;
    private String[] data = new String[]{"", "", ""};
    /**
     * SDK普通版
     * <p>
     * SDK Normal Edition
     */
    private int mCanUseARFaceType = HUMAN_AI_TYPE_NONE;
    private HandlerThread mHandlerThread;
    private MainActivityHandler mHandler = new MainActivityHandler(this);
    public static final int INIT_ARSCENE_COMPLETE_CODE = 201;
    public static final int INIT_ARSCENE_FAILURE_CODE = 202;
    public static final int AD_SPANNER_CHANGE_CODE = 203;


    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);

        initView();

        draftAdapter = new DraftAdapter();
        recyclerDraft.setLayoutManager(new LinearLayoutManager(this));
        recyclerDraft.setAdapter(draftAdapter);
        setData();


        llStartVideo.setOnClickListener(this);
        tvManager.setOnClickListener(this);
        tvManagerCancel.setOnClickListener(this);
        btnDelete.setOnClickListener(this);
        draftAdapter.setOnItemClickListener(new OnItemClickListener() {
            @Override
            public void onItemClick(@NonNull @NotNull BaseQuickAdapter<?, ?> adapter, @NonNull @NotNull View view, int position) {
                //todo:点击进入编辑
            }
        });
        draftAdapter.setOnItemChildClickListener(new OnItemChildClickListener() {
            @Override
            public void onItemChildClick(@NonNull @NotNull BaseQuickAdapter adapter, @NonNull @NotNull View view, int position) {
                switch (view.getId()) {
                    case R.id.tv_upload:
                        ToastUtil.showToast(MainActivity.this, "上传");
                        break;
                    case R.id.iv_more:
                        showDraftBottomDialog();
                        break;
                }
            }
        });
    }

    @Override
    protected void onResume() {
        super.onResume();

        ParameterSettingValues parameterValues = ParameterSettingValues.instance();
        if (parameterValues != null) {
            //!!!统一处理liveWindow的显示模式
            MSLiveWindow.setLiveModel(parameterValues.getLiveWindowModel());
        }
        checkAuthorization();
    }

    private void setData() {
        setManagerState(data.length == 0 ? MANAGER_NONE : MANAGER);
        draftAdapter.setNewInstance(Arrays.asList(data));
    }

    private void setManagerState(int state) {
        tvManagerGray.setVisibility(View.GONE);
        tvManager.setVisibility(View.GONE);
        tvManagerCancel.setVisibility(View.GONE);
        btnDelete.setVisibility(View.GONE);
        if (state == MANAGER_NONE) {
            tvManagerGray.setVisibility(View.VISIBLE);
        } else if (state == MANAGER) {
            tvManager.setVisibility(View.VISIBLE);
        } else if (state == MANAGER_CANCEL) {
            tvManagerCancel.setVisibility(View.VISIBLE);
            btnDelete.setVisibility(View.VISIBLE);
        }
    }

    private void initView() {
        llStartVideo = findViewById(R.id.ll_add_edit);
        tvManagerGray = findViewById(R.id.tv_manager_gray);
        tvManager = findViewById(R.id.tv_manager);
        tvManagerCancel = findViewById(R.id.tv_manager_cancel);
        refreshLayout = findViewById(R.id.refresh_layout);
        recyclerDraft = findViewById(R.id.rv_draft);
        btnDelete = findViewById(R.id.btn_draft_delete);
    }

    public boolean hasAllPermission() {
        if (mPermissionsChecker == null) {
            mPermissionsChecker = new PermissionsChecker(this);
        }
        return !mPermissionsChecker.lacksPermissions(permissionList);
    }

    public void checkPermissions() {
        if (mPermissionsChecker == null) {
            mPermissionsChecker = new PermissionsChecker(this);
        }
        permissionList = mPermissionsChecker.checkPermission(permissionList);
        String[] permissions = new String[permissionList.size()];
        permissionList.toArray(permissions);
        if (!permissionList.isEmpty()) {
            startPermissionsActivity(REQUEST_CODE, permissions);
        }
    }

    private void startPermissionsActivity(int code, String... permission) {
        PermissionsActivity.startActivityForResult(this, code, permission);
    }

    @Override
    protected void onActivityResult(int requestCode, int resultCode, Intent data) {
        super.onActivityResult(requestCode, resultCode, data);
        switch (resultCode) {
            case PermissionsActivity.PERMISSIONS_DENIED:
                Log.e(TAG, "hasPermission: 没有允许权限");
                break;
            case PermissionsActivity.PERMISSIONS_GRANTED:
                Log.e(TAG, "hasPermission: 所有权限都有了");
                doClick(clickedView);
                break;
            case PermissionsActivity.PERMISSIONS_No_PROMPT:
                Log.e(TAG, "hasPermission: 用户选择了不再提示");
                break;
        }
    }

    @SuppressLint("NonConstantResourceId")
    private void doClick(View clickedView) {
        switch (clickedView.getId()) {
            case R.id.ll_add_edit:
                showCreateDialog();
                break;
            case R.id.tv_open_camera:
                Bundle captureBundle = new Bundle();
                captureBundle.putBoolean("initArScene", arSceneFinished);
                AppManager.getInstance().jumpActivity(this, CaptureActivity.class, captureBundle);
                break;
            case R.id.tv_open_album:
                Bundle editBundle = new Bundle();
                editBundle.putInt("visitMethod", Constants.FROMMAINACTIVITYTOVISIT);
                editBundle.putInt("limitMediaCount", -1);//-1表示无限可选择素材
                AppManager.getInstance().jumpActivity(this, SelectMediaActivity.class, editBundle);
                break;
        }
    }

    @SuppressLint("NonConstantResourceId")
    @Override
    public void onClick(View view) {
        if (view == null) {
            return;
        }
        switch (view.getId()) {
            case R.id.tv_manager:
                setManagerState(MANAGER_CANCEL);
                draftAdapter.setState(MANAGER);
                break;
            case R.id.tv_manager_cancel:
                setManagerState(MANAGER);
                draftAdapter.setState(MANAGER_CANCEL);
                break;
            case R.id.ll_add_edit:
                clickedView = view;
                if (!hasAllPermission()) {
                    checkPermissions();
                    return;
                }
                showCreateDialog();
                break;
            default:
                break;
        }
    }

    private void showCreateDialog() {
        BottomSheetDialog bottomSheetDialog = new BottomSheetDialog(MainActivity.this);
        View inflate = getLayoutInflater().inflate(R.layout.dialog_bottom_create, null, false);
        bottomSheetDialog.setContentView(inflate);
        inflate.findViewById(R.id.tv_open_camera).setOnClickListener(view -> {
            doClick(view);
            bottomSheetDialog.cancel();
        });
        inflate.findViewById(R.id.tv_open_album).setOnClickListener(view -> {
            doClick(view);
            bottomSheetDialog.cancel();
        });
        inflate.findViewById(R.id.tv_cancel).setOnClickListener(v -> bottomSheetDialog.cancel());
        try {
            // hack bg color of the BottomSheetDialog
            ViewGroup parent = (ViewGroup) inflate.getParent();
            parent.setBackgroundResource(android.R.color.transparent);
        } catch (Exception e) {
            e.printStackTrace();
        }
        bottomSheetDialog.setCancelable(true);
        bottomSheetDialog.setCanceledOnTouchOutside(true);
        bottomSheetDialog.show();
    }

    private void showDraftBottomDialog() {
        BottomSheetDialog bottomSheetDialog = new BottomSheetDialog(MainActivity.this);
        View inflate = getLayoutInflater().inflate(R.layout.dialog_bottom_draft, null, false);
        bottomSheetDialog.setContentView(inflate);
        try {
            // hack bg color of the BottomSheetDialog
            ViewGroup parent = (ViewGroup) inflate.getParent();
            parent.setBackgroundResource(android.R.color.transparent);
        } catch (Exception e) {
            e.printStackTrace();
        }
        bottomSheetDialog.setCancelable(true);
        bottomSheetDialog.setCanceledOnTouchOutside(true);
        bottomSheetDialog.show();
    }
    /**
     * 检查授权的方法，不同类型使用不同的授权。
     * 商汤的授权使用同一个授权文件，不区分普通或者高级
     */
    private void checkAuthorization() {
        //美摄自研人脸的人脸粒子需要商汤授权，加上一个判断
        if (BuildConfig.HUMAN_AI_TYPE.contains(BUILD_HUMAN_AI_TYPE_MS_ST)
                || BuildConfig.HUMAN_AI_TYPE.equals(BUILD_HUMAN_AI_TYPE_MS)) {
            //商汤授权
            long param = (long) SharedPreferencesUtils.getParam(MainActivity.this, Constants.KEY_SHARED_END_TIMESTAMP, 0L);
            long currentTimeMillis = System.currentTimeMillis();
            if (param == 0) {
                Log.d(TAG, "需要更新,更新完直接下载 param=" + param);
                updateLicenseFile();
            } else {
                if (currentTimeMillis < param) {
                    String licenseFilePath = (String) SharedPreferencesUtils.getParam(MainActivity.this, Constants.KEY_SHARED_AUTHOR_FILE_PATH, "");
                    Log.d(TAG, "不需要更新授权文件 直接进行授权 licenseFilePath:" + licenseFilePath);
                    if (TextUtils.isEmpty(licenseFilePath)) {
                        updateLicenseFile();
                        return;
                    }
                    File file = new File(licenseFilePath);
                    if (!file.exists()) {
                        updateLicenseFile();
                        return;
                    }
                    initARSceneEffect(licenseFilePath);
                } else {
                    Log.d(TAG, "需要更新,更新完直接下载 param=" + param);
                    updateLicenseFile();
                }
            }
        } else {
            //非商汤授权
            /*
             * 初始化人脸Model
             * Initialize Face Model
             * */
            initARSceneEffect();
        }
    }
    private void initARSceneEffect() {
        initARSceneEffect("");
    }
    /**
     * 初始化人脸相关
     * Initialize face correlation
     */
    private void initARSceneEffect(final String stLicenseFilePath) {
      /*  SenseArMaterialService.setServerType(SenseArServerType.DomesticServer);
        SenseArMaterialService.shareInstance().fetchAllGroups(new SenseArMaterialService.FetchGroupsListener() {
            @Override
            public void onSuccess(List<SenseArMaterialGroupId> list) {
                Log.e(TAG,"onSuccess  == s"+list.size());
            }

            @Override
            public void onFailure(int i, String s) {
                Log.e(TAG,"onFailure  == s"+s);
            }
        });
        SenseArMaterialService.shareInstance().
        SenseArMaterialService.shareInstance().initialize(this);
        byte[] licData = SenseArMaterialService.shareInstance().getLicenseData();
        Log.e(TAG,"licData  =="+licData);*/
        /**
         * 检测SDK包是否有人脸模块
         *Detects whether the SDK package has a face module
         */
        mCanUseARFaceType = NvsStreamingContext.hasARModule();
        /*
         *  初始化AR Scene，全局只需一次
         * Initialize AR Scene, only once globally
         * */
        if (mCanUseARFaceType == HUMAN_AI_TYPE_MS && !arSceneFinished) {
            if (mHandlerThread == null) {
                mHandlerThread = new HandlerThread("handlerThread");
                mHandlerThread.start();
            }
            Handler initHandler = new Handler(mHandlerThread.getLooper(), new Handler.Callback() {
                @Override
                public boolean handleMessage(Message msg) {
                    String modelPath = null;
                    String licensePath = null;
                    String faceModelName = null;
                    String className = null;
                    /**
                     * 初始化基础人脸模型文件
                     *Initialize the base face model file
                     */
                    if (BuildConfig.HUMAN_AI_TYPE.equals(BUILD_HUMAN_AI_TYPE_MS)) {
                        modelPath = "/facemode/ms/ms_face_v1.2.2.model";
                        faceModelName = "ms_face_v1.2.2.model";
                        className = "facemode/ms";
                        if (BuildConfig.FACE_MODEL == 240) {
                            modelPath = "/facemode/ms/240/ms_face240_v2.0.0.model";
                            faceModelName = "ms_face240_v2.0.0.model";
                            className = "facemode/ms/240";
                        }
                        licensePath = "";
                    } else if (BuildConfig.HUMAN_AI_TYPE.equals(BUILD_HUMAN_AI_TYPE_MS_ST)) {
                        modelPath = "/facemode/st/106/M_SenseME_Face_Video_5.3.4.model";
                        faceModelName = "M_SenseME_Face_Video_5.3.4.model";
                        className = "facemode/st/106";
                        licensePath = stLicenseFilePath;

                    } else if (BuildConfig.HUMAN_AI_TYPE.equals(BUILD_HUMAN_AI_TYPE_FU)) {
                        modelPath = "/facemode/fu/fu_face_v3.model";
                        faceModelName = "fu_face_v3.model";
                        className = "facemode/fu";
                        licensePath = "assets:/facemode/fu/fu_face_v3.license";
                    } else if (BuildConfig.HUMAN_AI_TYPE.equals(BUILD_HUMAN_AI_TYPE_MS_ST_SUPER)) {
                        modelPath = "/facemode/st/advance/M_SenseME_Face_Video_7.1.0.model";
                        faceModelName = "M_SenseME_Face_Video_7.1.0.model";
                        className = "facemode/st/advance";
                        licensePath = stLicenseFilePath;
                    }
                    /**
                     * 模型文件需要是本地文件路径，所以从assert内置拷贝到本地
                     *The model file needs to be a local file path, so copy it from assert built-in to local
                     */
                    boolean copySuccess = FileUtils.copyFileIfNeed(MainActivity.this, faceModelName, className);
                    Log.e(TAG, "copySuccess-->" + copySuccess);
                    File rootDir = getApplicationContext().getExternalFilesDir("");
                    if (AndroidOS.USE_SCOPED_STORAGE) {
                        rootDir = getApplicationContext().getFilesDir();
                    }


                    //Streaming SDK
                    String destModelDir = rootDir + modelPath;
                    boolean initSuccess = NvsStreamingContext.initHumanDetection(MSApplication.getContext(),
                            destModelDir, licensePath,
                            NvsStreamingContext.HUMAN_DETECTION_FEATURE_FACE_LANDMARK | NvsStreamingContext.HUMAN_DETECTION_FEATURE_FACE_ACTION);
                    Log.e(TAG, "initSuccess-->" + initSuccess);

                    //Effect SDK
                    NvsEffectSdkContext.initHumanDetection(MSApplication.getContext(),
                            destModelDir, licensePath,
                            NvsStreamingContext.HUMAN_DETECTION_FEATURE_FACE_LANDMARK |
                                    NvsStreamingContext.HUMAN_DETECTION_FEATURE_FACE_ACTION |
                                    NvsStreamingContext.HUMAN_DETECTION_FEATURE_IMAGE_MODE);


                    /**
                     *假脸模型初始化
                     *The fake Face model is initialized
                     */
                    String fakeFacePath = "assets:/facemode/common/fakeface.dat";
                    boolean fakefaceSuccess = NvsStreamingContext.setupHumanDetectionData(NvsStreamingContext.HUMAN_DETECTION_DATA_TYPE_FAKE_FACE, fakeFacePath);
                    Log.e(TAG, "fakefaceSuccess-->" + fakefaceSuccess);

                    //Effect SDK
                    NvsEffectSdkContext.setupHumanDetectionData(NvsStreamingContext.HUMAN_DETECTION_DATA_TYPE_FAKE_FACE, fakeFacePath);

                    /**
                     * 美妆1模型初始化
                     *The makeup model is initialized
                     */
                    String makeUpPath = "assets:/facemode/common/makeup106_v1.0.3.dat";
                    if (BuildConfig.FACE_MODEL == 240) {
                        makeUpPath = "assets:/facemode/common/makeup240_v1.0.4.dat";
                    }
                    boolean makeupSuccess = NvsStreamingContext.setupHumanDetectionData(NvsStreamingContext.HUMAN_DETECTION_DATA_TYPE_MAKEUP, makeUpPath);
                    Log.e(TAG, BuildConfig.FACE_MODEL + "makeupSuccess-->" + makeupSuccess);


                    //Effect SDK
                    NvsEffectSdkContext.setupHumanDetectionData(NvsStreamingContext.HUMAN_DETECTION_DATA_TYPE_MAKEUP, makeUpPath);

                    /**
                     * 美妆2模型初始化
                     *The makeup model is initialized
                     */
                    String makeUpPath2 = "assets:/facemode/common/makeup2_106_v1.0.0.dat";
                    if (BuildConfig.FACE_MODEL == 240) {
                        makeUpPath2 = "assets:/facemode/common/makeup2_240_v1.0.0.dat";
                    }
                    boolean makeupSuccess2 = NvsStreamingContext.setupHumanDetectionData(NvsStreamingContext.HUMAN_DETECTION_DATA_TYPE_MAKEUP2, makeUpPath2);
                    Log.e(TAG, BuildConfig.FACE_MODEL + "makeupSuccess-->" + makeupSuccess2);


                    //Effect SDK
                    NvsEffectSdkContext.setupHumanDetectionData(NvsStreamingContext.HUMAN_DETECTION_DATA_TYPE_MAKEUP2, makeUpPath2);

                    /**
                     * 人脸又分美摄人脸和商汤人脸,在这个基础上又分106点位和240点位，根据编译环境选择不同的初始化文件
                     *Face model is divided by meicam face and Shang Tang face,On this basis, 106 points and 240 points are distinguished,
                     * according to the compilation environment to choose different initialization files
                     */
                    if (BuildConfig.HUMAN_AI_TYPE.equals(BUILD_HUMAN_AI_TYPE_MS)) {

                        if (BuildConfig.FACE_MODEL == 240) {
                            String pePath = "assets:/facemode/ms/240/pe240_ms_v1.0.0.dat";
                            boolean peSuccess = NvsStreamingContext.setupHumanDetectionData(NvsStreamingContext.HUMAN_DETECTION_DATA_TYPE_PE240, pePath);
                            Log.e(TAG, "ms240 peSuccess-->" + peSuccess);

                            //Effect SDK
                            NvsEffectSdkContext.setupHumanDetectionData(NvsStreamingContext.HUMAN_DETECTION_DATA_TYPE_PE240, pePath);
                        }

                        String segPath = "assets:/facemode/ms/ms_humanseg_v1.0.7.model";
                        boolean segSuccess = NvsStreamingContext.initHumanDetectionExt(MSApplication.getContext(),
                                segPath, null, NvsStreamingContext.HUMAN_DETECTION_FEATURE_SEGMENTATION_BACKGROUND);
                        Log.e(TAG, "ms segSuccess-->" + segSuccess);

                        //Effect SDK
                        NvsEffectSdkContext.initHumanDetectionExt(MSApplication.getContext(),
                                segPath, null, NvsStreamingContext.HUMAN_DETECTION_FEATURE_SEGMENTATION_BACKGROUND);


                        String halfBodyPath = "assets:/facemode/ms/ms_halfbodyseg_v1.0.6.model";
                        boolean halfBodySuccess = NvsStreamingContext.initHumanDetectionExt(MSApplication.getContext(),
                                halfBodyPath, null, NvsStreamingContext.HUMAN_DETECTION_FEATURE_SEGMENTATION_HALF_BODY);
                        Log.e(TAG, "ms halfBodySuccess-->" + halfBodySuccess);

                        //Effect SDK
                        NvsEffectSdkContext.initHumanDetectionExt(MSApplication.getContext(),
                                halfBodyPath, null, NvsStreamingContext.HUMAN_DETECTION_FEATURE_SEGMENTATION_HALF_BODY);

                        String segSkyPath = "assets:/facemode/ms/ms_skyseg_v1.0.0.model";
                        boolean segSkySuccess = NvsStreamingContext.initHumanDetectionExt(MSApplication.getContext(),
                                segSkyPath, null, NvsStreamingContext.HUMAN_DETECTION_FEATURE_SEGMENTATION_SKY);
                        Log.e(TAG, "ms segSkySuccess-->" + segSkySuccess);

                        //Effect SDK
                        NvsEffectSdkContext.initHumanDetectionExt(MSApplication.getContext(),
                                segSkyPath, null, NvsStreamingContext.HUMAN_DETECTION_FEATURE_SEGMENTATION_SKY);

                        String handPath = "assets:/facemode/ms/ms_hand_v1.0.0.model";
                        boolean handSuccess = NvsStreamingContext.initHumanDetectionExt(MSApplication.getContext(),
                                handPath, null, NvsStreamingContext.HUMAN_DETECTION_FEATURE_HAND_LANDMARK | NvsStreamingContext.HUMAN_DETECTION_FEATURE_HAND_ACTION);
                        Log.e(TAG, "ms handSuccess-->" + handSuccess);

                        //Effect SDK
                        NvsEffectSdkContext.initHumanDetectionExt(MSApplication.getContext(),
                                handPath, null, NvsStreamingContext.HUMAN_DETECTION_FEATURE_HAND_LANDMARK | NvsStreamingContext.HUMAN_DETECTION_FEATURE_HAND_ACTION);


                    } else if (BuildConfig.HUMAN_AI_TYPE.contains(BUILD_HUMAN_AI_TYPE_MS_ST)) {
                        if (BuildConfig.FACE_MODEL == 240) {
                            modelPath = rootDir + "/facemode/st/240/M_SenseME_Face_Extra_Advanced_6.0.8.model";
                            faceModelName = "M_SenseME_Face_Extra_Advanced_6.0.8.model";
                            String className240 = "facemode/st/240";
                            FileUtils.copyFileIfNeed(MainActivity.this, faceModelName, className240);
                            boolean initHumanDetectionExt = NvsStreamingContext.initHumanDetectionExt(MSApplication.getContext(),
                                    modelPath,
                                    null,
                                    NvsStreamingContext.HUMAN_DETECTION_FEATURE_EXTRA);
                            Log.e(TAG, "handleMessage: initHumanDetectionExt " + initHumanDetectionExt);

                            //Effect SDK
                            NvsEffectSdkContext.initHumanDetectionExt(MSApplication.getContext(),
                                    modelPath,
                                    null,
                                    NvsStreamingContext.HUMAN_DETECTION_FEATURE_EXTRA);

                            //240设置的
                            String pePath = "assets:/facemode/st/240/pe240_st_v1.0.0.dat";
                            boolean peSuccess = NvsStreamingContext.setupHumanDetectionData(NvsStreamingContext.HUMAN_DETECTION_DATA_TYPE_PE240, pePath);
                            Log.e(TAG, "st" + BuildConfig.FACE_MODEL + "peSuccess-->" + peSuccess);

                            //Effect SDK
                            NvsEffectSdkContext.setupHumanDetectionData(NvsStreamingContext.HUMAN_DETECTION_DATA_TYPE_PE240, pePath);
                        }
                        /**
                         * 背景分割
                         * Background segmentation
                         */
                        modelPath = rootDir + "/facemode/st/common/M_SenseME_Segment_4.12.11.model";
                        faceModelName = "M_SenseME_Segment_4.12.11.model";
                        String segmentModel = "facemode/st/common";
                        boolean copySuccess2 = FileUtils.copyFileIfNeed(MainActivity.this, faceModelName, segmentModel);
                        Log.e(TAG, "st copy Segment Success-->" + copySuccess2);
                        boolean stSegmentInit = NvsStreamingContext.initHumanDetectionExt(MSApplication.getContext(),
                                modelPath,
                                null,
                                NvsStreamingContext.HUMAN_DETECTION_FEATURE_SEGMENTATION_BACKGROUND);
                        Log.e(TAG, "st init Segment Success-->" + stSegmentInit);

                        //Effect SDK
                        NvsEffectSdkContext.initHumanDetectionExt(MSApplication.getContext(),
                                modelPath,
                                null,
                                NvsStreamingContext.HUMAN_DETECTION_FEATURE_SEGMENTATION_BACKGROUND);


                        /**
                         * 商汤手势检测
                         * Shang Tang Signal detection
                         */
                        String handModelPath = rootDir + "/facemode/st/hand/M_SenseME_Hand_6.0.8.model";
                        String handModelName = "M_SenseME_Hand_6.0.8.model";
                        String handModel = "facemode/st/hand";
                        boolean copySuccessHand = FileUtils.copyFileIfNeed(MainActivity.this, handModelName, handModel);
                        Log.e(TAG, "st handCopySuccessHand-->" + copySuccessHand);
                        boolean handInit = NvsStreamingContext.initHumanDetectionExt(MSApplication.getContext(),
                                handModelPath, null,
                                NvsStreamingContext.HUMAN_DETECTION_FEATURE_HAND_LANDMARK |
                                        NvsStreamingContext.HUMAN_DETECTION_FEATURE_HAND_ACTION);
                        Log.e(TAG, "st handInitSuccess-->" + handInit);

                        //Effect SDK
                        NvsEffectSdkContext.initHumanDetectionExt(MSApplication.getContext(),
                                handModelPath, null,
                                NvsStreamingContext.HUMAN_DETECTION_FEATURE_HAND_LANDMARK |
                                        NvsStreamingContext.HUMAN_DETECTION_FEATURE_HAND_ACTION);
                    }
                    /**
                     * avatar表情
                     * Avatar look
                     */
                    modelPath = rootDir + "/facemode/common/ms_expression_v1.0.2.model";
                    faceModelName = "ms_expression_v1.0.2.model";
                    String expressionModel = "facemode/common";
                    FileUtils.copyFileIfNeed(MainActivity.this, faceModelName, expressionModel);
                    NvsStreamingContext.initHumanDetectionExt(MSApplication.getContext(),
                            modelPath,
                            null,
                            NvsStreamingContext.HUMAN_DETECTION_FEATURE_AVATAR_EXPRESSION);

                    //Effect SDK
                    NvsEffectSdkContext.initHumanDetectionExt(MSApplication.getContext(),
                            modelPath,
                            null,
                            NvsStreamingContext.HUMAN_DETECTION_FEATURE_AVATAR_EXPRESSION);
                    /**
                     * 高级人脸
                     *Senior human face
                     */
                    if (BuildConfig.HUMAN_AI_TYPE.equals(BUILD_HUMAN_AI_TYPE_MS_ST_SUPER)) {
                        String pePath = "assets:/facemode/st/advance/pe106_advanced_st_v1.0.1.dat";
                        boolean peSuccess = NvsStreamingContext.setupHumanDetectionData(NvsStreamingContext.HUMAN_DETECTION_DATA_TYPE_PE106, pePath);
                        Log.e(TAG, "peSuccess-->" + peSuccess);

                        //Effect SDK
                        NvsEffectSdkContext.setupHumanDetectionData(NvsStreamingContext.HUMAN_DETECTION_DATA_TYPE_PE106, pePath);
                    }
                    if (initSuccess) {
                        mHandler.sendEmptyMessage(INIT_ARSCENE_COMPLETE_CODE);
                    } else {
                        mHandler.sendEmptyMessage(INIT_ARSCENE_FAILURE_CODE);
                    }
                    return false;
                }
            });
            initHandler.sendEmptyMessage(1);
        } else {
            initARSceneing = false;
            Log.e(TAG, "initARScene false 没有人脸模块");
        }
    }
    class MainActivityHandler extends Handler {
        WeakReference<MainActivity> mWeakReference;

        public MainActivityHandler(MainActivity mainActivityContext) {
            mWeakReference = new WeakReference<>(mainActivityContext);
        }

        @Override
        public void handleMessage(Message msg) {
            final MainActivity activity = mWeakReference.get();
            if (activity != null) {
                switch (msg.what) {
                    case INIT_ARSCENE_COMPLETE_CODE:
                        /*
                         *  初始化ARScene 完成
                         * Initialization of ARScene completed
                         * */
                        arSceneFinished = true;
                        initARSceneing = false;
                        break;
                    case INIT_ARSCENE_FAILURE_CODE:
                        /*
                         *  初始化ARScene 失败
                         * Initializing ARScene failed
                         * */
                        arSceneFinished = false;
                        initARSceneing = false;
                        break;
                    default:
                        break;

                }
            }
        }
    }

    /**
     * 商汤授权采取获取线上授权的方法，每次进入检查是否需要更新。
     * SenseTime authorization to take access to the method of online authorization, each entry check whether the need to update
     */
    private void updateLicenseFile() {

        HttpManager.getOldObjectGet(Constants.LICENSE_FILE_URL, new TempStringCallBack() {

            @Override
            public void onResponse(String stringResponse) {
                final LicenseInfo licenseInfo = GsonUtils.fromJson(stringResponse, LicenseInfo.class);
                if (licenseInfo == null) {
                    return;
                }
                runOnUiThread(new Runnable() {
                    @Override
                    public void run() {
                        int code = licenseInfo.getCode();
                        if (code != 1) {
                            return;
                        }
                        LicenseInfo.LicInfo data = licenseInfo.getData();
                        if (data == null) {
                            return;
                        }

                        SharedPreferencesUtils.setParam(MainActivity.this, Constants.KEY_SHARED_START_TIMESTAMP, data.getStartTimestamp());
                        SharedPreferencesUtils.setParam(MainActivity.this, Constants.KEY_SHARED_END_TIMESTAMP, data.getEndTimestamp());
                        SharedPreferencesUtils.setParam(MainActivity.this, Constants.KEY_SHARED_AUTHOR_FILE_URL, data.getAuthorizationFileUrl());

                        Log.d(TAG, "授权文件数据更新成功");

                        String authorizationFileUrl = data.getAuthorizationFileUrl();
                        if (TextUtils.isEmpty(authorizationFileUrl)) {
                            return;
                        }
                        downloadAuthorFile(authorizationFileUrl);
                    }
                });
            }

            @Override
            public void onError(Throwable throwable) {
                LogUtils.e(throwable);
            }
        });

    }
    /**
     * 授权文件下载
     * downloadAuthorFile
     */
    private void downloadAuthorFile(String fileUrl) {
        String[] split = fileUrl.split("/");
        //String filePath = FileUtils.getExternalFilePath(getApplicationContext(), "NvStreamingSdk" + File.separator + "License");
        String filePath = PathUtils.getLicenseFileFolder();
        HttpManager.download(fileUrl, fileUrl, filePath, split[split.length - 1], new SimpleDownListener(fileUrl) {
            @Override
            public void onError(Progress progress) {
                super.onError(progress);
            }

            @Override
            public void onFinish(File file, Progress progress) {
                super.onFinish(file, progress);
                Log.d(TAG, "授权文件下载成功 " + "onResponse------------------------------" + file.getName());
                if (file == null || !file.isFile()) {
                    return;
                }
                SharedPreferencesUtils.setParam(MainActivity.this, Constants.KEY_SHARED_AUTHOR_FILE_PATH, "" + file.getAbsoluteFile());
                //下载成功之后进行授权
                Log.d(TAG, "开始授权 " + "response------------------------------" + file.getAbsoluteFile());
                initARSceneEffect(file.getAbsolutePath());
            }
        });

    }
}