"""
Copyright (c) 2023 - present SKbioinfo
"""
from flask_restful import Resource
from flask import request,jsonify,current_app
from apps import db
from apps.API.utils.log_helper import lg
from apps.API.connector.db_common import *
from apps.API.common.common_time import *
from apps.API.common.db_table_config import *
from flask_jwt_extended import jwt_required
from apps.API.pipeExecutor.util import executor_cmd
from apps.API.pipeExecutor.models import Analysis
from apps.API.pipeExecutor.models import RunParameterValue,Parameter
from apps.API.electricSigature.AuditRecord import Part11AuditRecord as Arc
import pandas as pd
import os,re

class interfacePipeRunID(Resource):
    def __init__(self):
        self.prefix="RUN"
        self.table_name="analysis"
    
    def _Max_id(self,numbers):
        max = 0
        for num in numbers:
            if num > max:
                max = num
            else:
                pass
        return max
    @jwt_required()
    def get(self):
        ### SQL: query current ID ###
        try:
            query_current_id='''
                SELECT run_id FROM {table_name}
            '''.format(
                table_name=self.table_name
            )
            base_info=dic_query_info(query_current_id)
            numList=[]
            if len(base_info) == 0:
                numList.append(999)
            else:
                for info in base_info:
                    tmp=re.sub('[A-Za-z]','',info["run_id"])
                    numList.append(int(tmp))
            Max_id = self._Max_id(numList)
            response=jsonify(self.prefix + str(Max_id + 1))
            response.status = 202
            return response 
        except IOError as e:
            lg.error(e)
            return 404

class interfaceMakeDirectory(Resource):
    def __init__(self):
        self.executor=executor_cmd()
    @jwt_required()
    def post(self,name):
        if name == "rNGS":
            baseDir=current_app.config.get("BASE_DIR")
            run_id=request.json["getpipeData"]["run_id"]
            dirPath= f'''{baseDir}/run/{name}/{run_id}'''
            ### make base directory ###
            mkdir_cmd = '''mkdir -p {dirPath}'''.format(dirPath=dirPath)
            os.popen(mkdir_cmd)
            # (cmd,code) = self.executor.sshpass_return(mkdir_cmd)
            return 202

class interfaceSampleSheet(Resource):
    def __init__(self):
        self.executor=executor_cmd()
        
    def _write_sampleSheet(self,**kwargs):
        pathPrefix=current_app.config.get("PATH_PREFIX")
        base_dir=current_app.config.get("BASE_DIR")
        name=kwargs["name"]
        prefix=kwargs["prefix"]
        dirPath= f'''{pathPrefix}/run/{name}/{prefix}'''
        mntPath= f''' {base_dir}/run/{name}/{prefix}'''
        ### make base directory ###
        mkdir_cmd = f'''mkdir -p {mntPath};chmod 777 {mntPath}'''
        os.popen(mkdir_cmd)
        # self.executor.sshpass_return(mkdir_cmd)
        ### create samplesheet.csv ###
        df=pd.DataFrame(columns=["sample","group","short_reads_1","short_reads_2","long_reads","host_ref"])
        for key,sample in enumerate(kwargs["tableData"]):
            df.loc[key]=[sample["sample_name"],sample["group"],sample["raw_1"],sample["raw_2"],"","homo_sapiens"]
        df.to_csv("{writeDir}/{prefix}_samplesheet.csv".format(writeDir=kwargs["writeDir"],prefix=kwargs["prefix"]),index=False)
        ### write out shell content ###
        outdir=dirPath
        pipe_path=kwargs["pipe_path"]
        prefix=kwargs["prefix"]
        shell_content = f'''#!/usr/bin/sh
cd {outdir}
nextflow -bg -log log/run_newstandard.log \\
    run {pipe_path} -with-trace \\
    -profile docker \\
    --resume \\
    --input {outdir}/{prefix}_samplesheet.csv \\
    --outdir run_newstandard \\
> run_newstandard.log
        '''
        ## write out file ##
        writeDir=kwargs["writeDir"]
        fwrite=open(f"{writeDir}/run_newstandard.sh","w")
        fwrite.write(shell_content)
    
    def _add_run(self,**kwargs):
        ### add run info to database ###
        analysis_dict = {
            "run_id" : kwargs["run_id"],
            "run_name" : kwargs["run_name"],
            "run_description" : kwargs["run_description"],
            "type" : kwargs["type"],
            "run_sample_file" : kwargs["run_sample_file"],
            "run_status" : kwargs["run_status"],
            "run_qc_file" : kwargs["run_qc_file"],
            "run_result_file" : kwargs["run_result_file"],
            "run_trace_file" : kwargs["run_trace_file"],
            "username" : kwargs["username"],
            "report_status" : 0,
            "workflow_id": kwargs["workflow_id"]
        }
        new_Analysis = Analysis(**analysis_dict)
        db.session.add(new_Analysis)
        db.session.commit()
        
    def _record_params(self,**kwargs):
        ### 检索参数列表 ###
        param_dict = {}
        for param in Parameter.query.all():
            tmp=param.to_dict()
            param_dict[tmp["param_name"]]=tmp["param_id"]
        run_param_data=[]
        for key in kwargs:
            if key in ["run_id","username"]:
                pass
            else:
                run_param_data.append({
                    "run_id":kwargs["run_id"],
                    "param_id":param_dict[key],
                    "param_value":kwargs[key],
                    "set_by":kwargs["username"]
                })
        db.session.bulk_insert_mappings(RunParameterValue, run_param_data)
        db.session.commit()
    
    @jwt_required()
    def post(self,name):
        try:
            baseDir=current_app.config.get("BASE_DIR")
            prefix=request.json["getpipeData"]["run_id"]
            ### write out sample_sheet info ###
            writeDir = f"{baseDir}/run/{name}/{prefix}"
            self._write_sampleSheet(
                baseDir=baseDir,
                prefix=prefix,
                name=name,
                writeDir=writeDir,
                tableData=request.json["tableData"],
                pipe_path=current_app.config.get("PIPE_PATH"),
                account=current_app.config.get("PIPE_USER"),
            )
            self._add_run(
                run_id=request.json["getpipeData"]["run_id"],
                run_name = request.json["getpipeData"]["run_name"],
                run_description = request.json["getpipeData"]["description"],
                run_sample_file=f"{prefix}_samplesheet.csv",
                type=name,
                run_status = "0",
                run_qc_file = f"/data/run/{name}/{prefix}/run_newstandard/multiqc/multiqc_report.html",
                run_result_file=f"/data/run/{name}/{prefix}",
                run_trace_file=f"/data/run/{name}/{prefix}",
                username=request.json["userName"],
                report_status=0 ,
                workflow_id=1
            )
            # self._record_params(
            #     run_id=request.json["getpipeData"]["run_id"],
            #     username=request.json["userName"],
            #     reads_minlength = request.json["submitParams"]["reads_minlength"],
            #     fastp_cut_mean_quality = request.json["submitParams"]["fastp_cut_mean_quality"],
            #     fastp_qualified_quality = request.json["submitParams"]["fastp_qualified_quality"],
            #     kraken_database = request.json["submitParams"]["kraken_database"],
            # )
            ### record run info ###
            # run_id=request.json["getpipeData"]["run_id"]
            # Arc.add_aduit_record(
            #     record_type="RUN_PIPELINE",
            #     content=f"Run rNGS Pipeline With RUN_ID:{run_id}",
            #     username=request.json["userName"]
            # )
            return 202
        except Exception as e:
            lg.error(e)
            return 404 
           
class interfaceSubmitAnalysis(Resource):
    def __init__(self):
        self.executor=executor_cmd()
    @jwt_required()
    def post(self,name):
        if name == "rNGS":
            try:
                filePath=current_app.config.get("PATH_PREFIX")
                prefix=request.json["getpipeData"]["run_id"]
                cmd=f'''bash {filePath}/run/{name}/{prefix}/run_newstandard.sh'''
                self.executor.sshpass_cmd(cmd)
                return 202
            except Exception as e:
                lg.error(e)
                return 404
            
'''URL: /api/pipeMonitor/CheckStatus'''     
class interfacePipeCheckStatus(Resource):
    def __init__(self):
        self.executor=executor_cmd()
        self.table_name="analysis"
        self.month={
            "Jan":"01",
            "Feb":"02",
            "Mar":"03",
            "Apr":"04",
            "May":"05",
            "Jun":"06",
            "Jul":"07",
            "Aug":"08",
            "Sep":"09",
            "Oct":"10",
            "Nov":"11",
            "11月":"11",
            "Dec":"12"
        }
        
    def _check_status(self,file_path):
        code=0
        with open(file_path,"r+") as f:
            for line in f.readlines():
                line=line.rstrip()
                if re.match('.*\[mag\].*',line):
                    tmp=line.split(']')
                    if re.match(".*Pipeline completed successfully.*",tmp[1].rstrip("-")):
                        code = 1
                    elif re.match(".*Pipeline completed with errors.*",tmp[1].rstrip("-")):
                        code = -1
                    else:
                        code = 0
                else:
                    pass
        return code

    def _check_time(self,file_path):
        with open(file_path,"r+") as f:
            for line in f.readlines():
                line=line.rstrip()
                if re.match(".*Execution complete.*",line):
                    # 1000-01-01 00:00:00
                    complete_time = '''
                        {year}-{month}-{day} {hour}:{minute}:{second}
                    '''.format( 
                        year=get_current_time().split("-")[0],
                        month=self.month[line.split()[0].split("-")[0]],
                        day=line.split()[0].split("-")[1],
                        hour=line.split()[1].split(":")[0],
                        minute=line.split()[1].split(":")[1],
                        second=line.split()[1].split(":")[2]
                    )
                    return complete_time
                
    def _update_trace(self,type,run_id):
        baseDir=current_app.config.get("BASE_DIR")
        trace_file_path = f'''{baseDir}/run/{type}/{run_id}/run_newstandard/pipeline_info/execution_trace_*txt'''
        trace_file_list = list(os.popen(f'ls -t {trace_file_path}'))
        trace_file = [ file.split("/")[-1] for file in trace_file_list ][0]
        update_trace_file_sql = '''
            UPDATE {table_name} SET run_trace_file=\"{trace_file}\" WHERE run_id=\"{run_id}\"
        '''.format(
            table_name = "analysis",
            trace_file = "/data/run/{type}/{run_id}/run_newstandard/pipeline_info/{fileName}".format(
                type=type,run_id=run_id,fileName=trace_file
            ),
            run_id=run_id
        )
        update_info(update_trace_file_sql)
    @jwt_required()
    def get(self):
        try:
            ### query analysis info ###
            query_analysis_sql='''SELECT run_id,run_status,type,run_create_time FROM analysis'''
            base_info = dic_query_info(query_analysis_sql)
            base_dir = current_app.config.get("BASE_DIR")
            for info in base_info:
                if info["run_status"] == 0:
                    type=info["type"]
                    run_id=info["run_id"]
                    time=str(info["run_create_time"]).split()[0]
                    ### fetch trace file name ###
                    self._update_trace(type=type,run_id=run_id)
                    prefix = f'{base_dir}/run/{type}/{run_id}'
                    file_path=os.path.join(prefix,"run_newstandard.log")
                    if os.path.exists(file_path):
                        code = self._check_status(file_path=file_path)
                        if code == 1:
                            update_status_sql = f'''
                                UPDATE analysis SET run_status=1 WHERE run_id=\"{run_id}\" 
                            '''
                            update_info(update_status_sql)
                            log_file_path=os.path.join(prefix,"log")
                            log_file_name=list(os.popen('ls -t {log_file_path}'.format(log_file_path=log_file_path)))[0].rstrip()
                            if os.path.exists(log_file_path):
                                complete_time=self._check_time(log_file_path + "/" + log_file_name)
                                update_complete_time = '''
                                    UPDATE {table_name} SET run_complete_time=\"{complete_time}\" WHERE run_id=\"{run_id}\"
                                '''.format(
                                    table_name = self.table_name,
                                    complete_time = complete_time,
                                    run_id=info["run_id"]
                                )
                                update_info(update_complete_time)
                            else:
                                pass
                            ### 迁移数据进入results文件夹 ###
                            pathPrefix=current_app.config.get("PATH_PREFIX")
                            type=info["type"]
                            run_id=info["run_id"]
                            cp_cmd = f'''
                                cd {pathPrefix}/results/
                                mkdir -p {time}/{run_id}
                                cp {pathPrefix}/run/{type}/{run_id}/{run_id}_samplesheet.csv ./{time}/{run_id}
                                cp -r {pathPrefix}/run/{type}/{run_id}/run_newstandard/QC_shortreads/fastp/* ./{time}/{run_id}/
                                cp {pathPrefix}/run/{type}/{run_id}/run_newstandard/Tissue_prediction/*csv ./{time}/{run_id}/
                                cp {pathPrefix}/run/{type}/{run_id}/run_newstandard/Trans_quanti/*csv ./{time}/{run_id}/
                            '''
                            os.popen(cp_cmd)
                            # self.executor.sshpass_ttcmd(cp_cmd)
                        elif code == -1:
                            update_status_sql = '''
                                UPDATE {table_name} SET run_status=-1 WHERE run_id=\"{run_id}\" 
                            '''.format(
                                table_name = self.table_name,
                                run_id=info["run_id"]
                            )
                            update_info(update_status_sql)
                    else: 
                        pass
                elif info["run_status"] == -1:
                    self._update_trace(type=info["type"],run_id=info["run_id"])
                else:
                    pass
            return 202
        except Exception as e:
            lg.error(e)
            return 404
