const RabbitMQ_Class=require("./rabbitmq/RabbitMq.js")
const readline = require('readline');
const request = require('request');
const fs = require('fs');
const fse = require('fs-extra')
const path = require("path");
const {S3} = require('@aws-sdk/client-s3');
const  iconvLite = require('iconv-lite');
const mongoose = require("mongoose");
const mongo_models = require('./mongo/mongo_models.js');
const {spawn,exec,execSync,spawnSync,fork} = require('child_process');
Date.prototype.format = function (format, options) {
  var args = {
    "M+": this.getMonth() + 1,
    "d+": this.getDate(),
    "h+": this.getHours(),
    "m+": this.getMinutes(),
    "s+": this.getSeconds(),
    "q+": Math.floor((this.getMonth() + 3) / 3), //quarter
    S: this.getMilliseconds()
  };
  if (/(y+)/.test(format))
    format = format.replace(
      RegExp.$1,
      (this.getFullYear() + "").substr(4 - RegExp.$1.length)
    );
  for (var i in args) {
    var n = args[i];
    if (new RegExp("(" + i + ")").test(format))
      format = format.replace(
        RegExp.$1,
        RegExp.$1.length == 1 ? n : ("00" + n).substr(("" + n).length)
      );
  }
  return format;
}
class Help{
  static Request_Post_Common(_url,data,timeout=0,custom_header={}){
    return new Promise((resolve) => {
      try{
        let headers={
          "content-type": "text/html",
          'User-Agent': "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; QQWubi 133; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; CIBA; InfoPath.2)",
        }
        for(var key in custom_header){ //额外添加的数据，从data传入
          headers[key]=custom_header[key];   
        }
        request({
          url: _url,
          method: "POST",
          json: true,
          headers: headers,
          body:data,
          encoding:null,
          timeout: timeout,
        }, (error, response, result)=> {
          if(error==null){
            if(response.statusCode==200){
              resolve(result)
            }
            else{
              resolve(result)
            }
          }
          else{
            resolve({
              success:false,
              message:error.message,
            })
          }
        });
      }catch(error){
        resolve({
          success:false,
          message:error.message,
        })
      }
    })
  }
  static Request_PostForm_Common(_url,data,timeout=0,custom_header={}){
    return new Promise((resolve) => {
      try{
        let headers={
          'User-Agent': "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; QQWubi 133; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; CIBA; InfoPath.2)",
        }
        for(var key in custom_header){ //额外添加的数据，从data传入
          headers[key]=custom_header[key];   
        }
        request({
          url: _url,
          method: "POST",
          json: true,
          headers: headers,
          form:data,
          encoding:null,
          timeout: timeout,
        }, (error, response, result)=> {
          if(error==null){
            if(response.statusCode==200){
              resolve(result)
            }
            else{
              resolve(result)
            }
          }
          else{
            resolve({
              success:false,
              message:error.message,
            })
          }
        });
      }catch(error){
        resolve({
          success:false,
          message:error.message,
        })
      }
    })
  }
  static Request_Get_Common(_url,data,timeout=0,custom_header={})
  {
    return new Promise((resolve) => {
      let headers={
        "content-type": "text/html",
        'User-Agent': "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; QQWubi 133; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; CIBA; InfoPath.2)",
      }
      for(var key in custom_header){ //额外添加的数据，从data传入
        headers[key]=custom_header[key];   
      }
      let _request={
        url: _url,
        method: "get",
        useQuerystring: true,
        json: true,
        headers: headers,
        encoding:null,
        timeout: timeout,
        qs:data,
      }
      try{
        request(_request,(error,response,result)=>{
          if(error==null){
            if(response.statusCode==200){
              resolve(result)
            }
            else{
              resolve(result)
            }
          }
          else{
            resolve({
              success:false,
              message:error.message,
            })
          }
        })
      }catch(error){
        resolve({
          success:false,
          message:error.message,
        })
      }
    })
  }
  static Json_Parse(Split) {
    try {
      Split = JSON.parse(Split);
      return Split;
    } catch (e) {
      return null;
    }
  }
  static Json_String(Split) {
    return JSON.stringify(Split);
  }
  static Check_isdir(_dir){
    if(fs.existsSync(_dir)){
      try{
        let stats=fs.statSync(_dir);
        if(stats.isDirectory()){
          return true
        }
      }
      catch(error){

      }
    }
    return false
  }
  static Check_isFile(_file){
    if(fs.existsSync(_file)){
      try{
        let stats=fs.statSync(_file);
        if(stats.isFile()){
          return true
        }
      }
      catch(error){

      }
    }
    return false
  }
  static async  Init_RabbitMQ(){
    let RabbitMQ=global["json_config"]?.RabbitMQ
    let _rabbitmq_Class= new RabbitMQ_Class({
      protocol: 'amqp',
      hostname: RabbitMQ.hostname,
      port: RabbitMQ.port,
      username: RabbitMQ.username,
      password: RabbitMQ.password,
      locale: 'en_US',
      frameMax: 0,
      heartbeat: 0,
      vhost: '/',
    })
    for(let a in RabbitMQ.queues){
      await _rabbitmq_Class.CheckQueue(RabbitMQ.queues[a])
      Help.Console_log(`connect Queue ${RabbitMQ.queues[a]} in process ${process.pid}`)
    }
    global["class_rabbitmq"]=_rabbitmq_Class
    return true
  }
  static async  Init_Mongo(){
    let mongo_config=global["json_config"]?.mongo
    global["Mongo_Connection"]=await Help.Get_Mongo_Connection(mongo_config);
    return true
  }
  static Get_Mongo_Connection(options){
    let {ip,port,db,user,pass}=options
    return new Promise(async (resolve)=>{
      let Connection_url=`mongodb://${ip}:${port}/${db}` 
      let options={
        bufferCommands: true, 
        autoIndex:true ,
        autoCreate:true ,
      }
      if(user && pass){
        Connection_url=`${Connection_url}?authSource=${db}` 
        options.user=user
        options.pass=pass
      }
      let connect_mongoose=undefined
      try{
        connect_mongoose=await mongoose.connect(Connection_url,options)
        let mongoose_Connection=mongoose.createConnection(Connection_url,options)
        resolve(mongoose_Connection)
      }
      catch(error){
        Help.Console_log(error?.message)
        resolve(false)
      }
    })
  }
  static async Init_S3(){
    let config_s3=global["json_config"]?.s3
    if(!global["class_s3"]){
      global["class_s3"]=new S3({ 
        endpoint:config_s3?.endpoint,
        forcePathStyle: true,
        region:'us-east-1',
        credentials:{
          accessKeyId:config_s3?.accessKeyId, 
          secretAccessKey:config_s3?.secretAccessKey, 
        },
      });
    }
    return true
  }
  static s3_get_file_list(options) {
    let {bucket,prefix}=options
    return new Promise(async (resolve)=>{
      let GetObjectCommandInput={
        Bucket:bucket, 
        Prefix:prefix
      }
      global["class_s3"]?.listObjectsV2(GetObjectCommandInput,async (err, data)=>{
        if(!err){
          if(data?.Contents?.length>0){
            resolve(data?.Contents)
          }
          else{resolve(null)}
        }
        else{
          resolve(false)
        }
      })
    })
  }
  static s3_get_file(options) {
    let {bucket,key,range}=options
    return new Promise(async (resolve)=>{
      let GetObjectCommandInput={
        Bucket:bucket, 
        Key:key
      }
      if(range){
        GetObjectCommandInput.Range=range
      }
      global["class_s3"]?.getObject(GetObjectCommandInput,async (err, data)=>{
        if(!err){
          //transformToString  transformToWebStream transformToByteArray
          if(data?.ContentLength>0){
            let ByteArray=await data?.Body?.transformToByteArray()
            resolve(ByteArray)
          }
          else{resolve(null)}
        }
        else{
          resolve(false)
        }
      })
    })
  }
  //not use
  static async s3_check_file_exsit(options) {
    options.range=`bytes=0-0`
    let file_byte_0_1=await Help.s3_get_file(options)
    if(file_byte_0_1){
      return true
    }
    else{
      return false 
    }
  }
  static async add_rabbitmq(msg,queue="sdp_pri",priority=50){
    let send_result=await global["class_rabbitmq"].send(queue,Help.Json_String(msg),priority);
    return send_result
  }
  static async  get_next_rabbitmq(queue="sdp_pri",noAck=false){
    let data_msg=await global["class_rabbitmq"].get(queue,noAck)
    return data_msg
  }
  static async init_rabbitmq_data(data,msg){
    let Json_data=Help.Json_Parse(data);
    let {batch,fullName,orderid,priority,username,s3path,foldername}=Json_data
    let batch_task_mongo=await mongo_models.GetTask({
      batch,
      orderid,
      username,
      foldername
    })
    //不存在该任务了
    let mongo_id="";
    if(!batch_task_mongo){
      batch_task_mongo=await mongo_models.UpdateTask({
        batch,
        orderid,
        username,
        foldername,
        s3path,
        state:1,
        message:"无",
        msg:Help.Json_String(msg),
        data:data,
        ack:1,
        atime:(new Date()).getTime(),
      })
    }
    if(batch_task_mongo){
      mongo_id=batch_task_mongo?.mongo_id
    }
    let task_pro_dir=`${global["json_config"]?.produce_base_dir}/${mongo_id}`
    Help.Create_Dir(task_pro_dir)
    let task_pro_info=`${global["json_config"]?.produce_base_dir}/${mongo_id}/pro.txt`
    Help.Write_File_UTF8(task_pro_info,`${batch} ${orderid} ${username} ${foldername}`)
    let task_pro_prepare_dir=`${task_pro_dir}/prepare`
    Help.Create_Dir(task_pro_prepare_dir)
    let s3_downs=[]
    let s3_paths_down_link_file=`${task_pro_prepare_dir}/down_links.json`
    let l1_input_file=`${task_pro_prepare_dir}/l1_input.json`
    if(batch_task_mongo?.state==1){
      let s3_paths=[]
      let s3_downs_has=[]
      if(batch_task_mongo?.s3path){
        let check_state=Help.Check_produce_satellite(foldername)
        if(check_state==true){
          s3_paths.push({
            name:foldername,
            path:s3path
          })
        }
        else{
          batch_task_mongo=await mongo_models.UpdateTask({
            id:mongo_id,
            state:7,
            message:"该数据不需要生产"
          })
        }
      }
      else{
        //查询数据获取prodece_ys_names
        let prodece_ys_names=[]
        let ys_names_file=`${task_pro_prepare_dir}/ys_names.json`
        let ys_task_priority_file=`${task_pro_prepare_dir}/ys_priority.json`
        if(Help.Check_isFile(ys_names_file)){
          prodece_ys_names=Help.Json_Parse(Help.Read_File_UTF8(ys_names_file))
        }
        else{
          let L1_Names_result=undefined
          let batch_name="";
          let task_name="";
          if(username=='bwxzx'||username=='pushadmin'){
            batch_name=batch?.split("/")[0]
            task_name=batch_name
          }
          else if(username=='cgadmin'){
            batch_name=batch?.replaceAll("/","_")
            let orderid_split=orderid?.split("_")
            task_name=orderid_split[orderid_split?.length-1];
          }
          if(username=='bwxzx'|| username=='pushadmin' ||username=='cgadmin'){
            L1_Names_result=await Help.Search_L1_Names(batch_name,"")
          }
          else{
            L1_Names_result=await Help.Search_L1_Names(batch_name,"")
          }
          Help.Write_File_UTF8(ys_task_priority_file,Help.Json_String({priority:priority}))
          let L1_Names_status=L1_Names_result?.status
          let L1_Names_msg=L1_Names_result?.msg
          let L1_Names_names=L1_Names_result?.names 
          if(L1_Names_status==1 || L1_Names_status==2){
            batch_task_mongo=await mongo_models.UpdateTask({
              id:mongo_id,
              state:7,
              message:L1_Names_msg
            })
          }
          else if(L1_Names_status==3){
            batch_task_mongo=await mongo_models.UpdateTask({
              id:mongo_id,
              state:5,
              message:L1_Names_msg
            })
          }
          else if(L1_Names_status==4){
            prodece_ys_names=L1_Names_names
            Help.Write_File_UTF8(ys_names_file,Help.Json_String(prodece_ys_names))
          }
        }
        //根据prodece_ys_names获取所有文件的下载路径
        if(prodece_ys_names?.length>0){
          //获取prodece_ys_names的S3 path
          {
            let s3_path_link_dir=`${task_pro_prepare_dir}/s3_path_link_dir`
            Help.Create_Dir(s3_path_link_dir)
            let s3_path_links=Help.Get_Deep_Files(s3_path_link_dir,[".txt"])
            let s3_paths_has=[]
            for (let k = 0; k < s3_path_links?.length; k++) {
              let one_s3_path_link=s3_path_links[k]
              let one_s3_path_link_text=Help.Read_File_UTF8(one_s3_path_link?.file)
              s3_paths_has.push({
                name:one_s3_path_link?.namenoext,
                path:one_s3_path_link_text
              })
            }
            let L1_Names_paths=await Help.Search_L1_Names_paths(prodece_ys_names,s3_paths_has,s3_path_link_dir)
            s3_paths=L1_Names_paths?.s3_paths
            let s3_paths_errors=L1_Names_paths?.s3_paths_errors
            if(s3_paths?.length==0){
              batch_task_mongo=await mongo_models.UpdateTask({
                id:mongo_id,
                state:7,
                message:"没有查询到待生产数据prodece_ys_names的s3 path"
              })
            }
            else if(s3_paths_errors?.length>0){
              batch_task_mongo=await mongo_models.UpdateTask({
                id:mongo_id,
                message:"待生产数据prodece_ys_names的有查询s3 path错误"
              })
              let Search_L1_Names_error_txt=`${task_pro_prepare_dir}/Search_L1_Names_error.txt`
              for (let i = 0; i < s3_paths_errors.length; i++) {
                let one_error = s3_paths_errors[i];
                fs.appendFileSync(Search_L1_Names_error_txt,`${one_error}\r\n`)
              }
            }
          }
        }
      }
      //获取prodece_ys_names所有文件的S3 KEY 
      if(s3_paths?.length>0){
        let s3_paths_down_link_dir=`${task_pro_prepare_dir}/down_links`
        Help.Create_Dir(s3_paths_down_link_dir)
        let s3_paths_down_links=Help.Get_Deep_Files(s3_paths_down_link_dir,[".json"])
        
        for (let k = 0; k < s3_paths_down_links?.length; k++) {
          let one_s3_paths_down_link=s3_paths_down_links[k]
          let one_s3_paths_down_link_json=Help.Json_Parse(Help.Read_File_UTF8(one_s3_paths_down_link?.file))
          s3_downs_has.push({
            name:one_s3_paths_down_link?.namenoext,
            lists:one_s3_paths_down_link_json
          })
        }
        if(s3_downs_has?.length==s3_paths?.length){
          s3_downs=s3_downs_has
        }
        else{
          s3_downs=await Help.Search_s3_paths_link(s3_paths,s3_downs_has,s3_paths_down_link_dir)
        }
        Help.Write_File_UTF8(s3_paths_down_link_file,Help.Json_String(s3_downs))
        batch_task_mongo=await mongo_models.UpdateTask({
          id:mongo_id,
          state:2,
        })
      }
    }
    let task_pro_unzip_dir=`${task_pro_dir}/unzip`
    let task_pro_dom_dir=`${task_pro_dir}/dom`
    let L1_inputs=[]
    if(batch_task_mongo?.state==2){
      if(Help.Check_isFile(s3_paths_down_link_file)){
        s3_downs=Help.Json_Parse(Help.Read_File_UTF8(s3_paths_down_link_file))
      }
      Help.Create_Dir(task_pro_unzip_dir)
      Help.Create_Dir(task_pro_dom_dir)
      //开始下载
      {
        let new_s3_downs=[]
        if(s3_downs?.length>0){
          let oneMB = 1024 * 1024 * 5;
          for (let i = 0; i < s3_downs?.length; i++) {
            let one_down_load = s3_downs[i];
            let one_down_load_name=one_down_load?.name
            let one_down_load_lists=one_down_load?.lists
            Help.Console_log(`down ${i+1}/${s3_downs?.length} ${one_down_load_name}`)
            let this_element_save_dir=`${task_pro_unzip_dir}/${one_down_load_name}`
            let this_element_down_stat=`${this_element_save_dir}/down.info`
            let down_do=true
            if(Help.Check_isFile(this_element_down_stat)){
              let down_stat=Help.Read_File_UTF8(this_element_down_stat)
              if(down_stat=="true"){
                down_do=false
              }
            }
            let new_one_down_load_lists=[]
            if(down_do){
              for (let k = 0; k < one_down_load_lists?.length; k++) {
                let this_element = one_down_load_lists[k];
                let {dir_name,file_name,key,size}=this_element
                let pic_count=size/oneMB;
                pic_count=Math.ceil(pic_count);
                let rangs=[]
                for (let j = 0; j < pic_count; j++) {
                  let start=j*oneMB;
                  let end=(j+1)*oneMB;
                  if(end>(size-1)){
                    end=size
                  }
                  end=end-1
                  let range=`bytes=${start}-${end}`
                  rangs.push({
                    range,
                    start,
                    end
                  })
                }
                new_one_down_load_lists.push({
                  file_dir:this_element_save_dir,
                  file:`${this_element_save_dir}/${file_name}`,
                  key:key,
                  rangs:rangs,
                })
              }
            }
            if(new_one_down_load_lists?.length>0){
              new_s3_downs.push({
                name:one_down_load_name,
                file_dir:this_element_save_dir,
                file_stat:this_element_down_stat,
                lists:new_one_down_load_lists
              })
            }
          }
        }
        if(new_s3_downs?.length>0){
          await Help.multi_download(new_s3_downs)
        }
      }
      Help.Console_log("down end!")
      {
        L1_inputs=await Help.Produce_L1_data_check(mongo_id,task_pro_unzip_dir)
        Help.Write_File_UTF8(l1_input_file,Help.Json_String(L1_inputs))
      }
      // if(Help.Check_isFile(l1_input_file)){
      //   L1_inputs=Help.Json_Parse(Help.Read_File_UTF8(l1_input_file))
      // }
      // else{
        
      // }
      //add_task_do再检查L1_inputs的长度是否为0
      batch_task_mongo=await mongo_models.UpdateTask({
        id:mongo_id,
        state:3,
      })
    }
    return batch_task_mongo;
  }
  static async init_login(username="13644710263",password="1qaz@WSX"){
    let result=await Help.Request_PostForm_Common(`${global["json_config"]?.task_search?.getToken}`,{
      username: username,
      password: password
    },0,{
      "Content-Type":"application/json;charset=utf-8"
    })
    if(result.detail && result.succeed===true){
      global["user_gistack_token"]=result.detail
      return true
    }
    else{
      return false
    }
  }
  //循环任务处理的入口 包含预下载 N个生产任务
  static async wait_rabbitmq_done_2work(){
    global["task_convert"]=false
    global["task_convert2"]=false
    global["task_checking"]=false
    global["task_check_isdone"]=false
    global["task_check_copy"]=false
    global["task_check_6"]=false
    global["task_priority"]=100
    // 转换生产任务到sdp_pri_gj和sdp_pri_cg队列
    {
      let _setInterval4=setInterval (async ()=>{
        if(global["task_convert"]==false){
          global["task_convert"]=true
          let next_data_msg=await Help.get_next_rabbitmq("sdp_pri",true) // 
          let next_data=next_data_msg?.data
          let next_msg=next_data_msg?.msg
          if(next_data){
            let Json_data=Help.Json_Parse(next_data);
            let {batch,fullName,metaPath,orderid,priority,topic,total,username}=Json_data
            if(username=='bwxzx'){
              let batch_split=batch?.split("/")
              let date_string=batch_split[0] //202501140046260
              let batch_date_string=`${date_string?.slice(0, 4)}-${date_string?.slice(4, 6)}-${date_string?.slice(6, 8)}`
              let batch_date=new Date(batch_date_string)
              let batch_date_time=batch_date.getTime();
              let stand_date_time=(new Date("2025-03-06")).getTime();
              if(batch_date_time>stand_date_time){
                // Json_data.priority=100
                Json_data.priority=104
                await Help.add_rabbitmq(Json_data,"sdp_pri_gj",Json_data.priority);
              }
              else{
                Json_data.priority=99
                await Help.add_rabbitmq(Json_data,"sdp_pri_gj",Json_data.priority);
              }
            }
            else if(username=='cgadmin'){
              // Json_data.priority=102
              Json_data.priority=103
              await Help.add_rabbitmq(Json_data,"sdp_pri_cg",Json_data.priority);
            }
            else{
              Json_data.priority=98
              await Help.add_rabbitmq(Json_data,"sdp_pri_other",Json_data.priority);
            }
          }
          global["task_convert"]=false
        }
      },1000*4)
      // global["class_rabbitmq"].consume("sdp_pri_test",async (data,msg)=>{
      //   let Json_data=Help.Json_Parse(data)
      //   if(Json_data){
      //     let {batch,fullName,orderid,priority,total,username,s3path,foldername}=Json_data
      //     if(username=='bwxzx'){
      //       let batch_split=batch?.split("/")
      //       let date_string=batch_split[0] //202501140046260
      //       let batch_date_string=`${date_string?.slice(0, 4)}-${date_string?.slice(4, 6)}-${date_string?.slice(6, 8)}`
      //       let batch_date=new Date(batch_date_string)
      //       let batch_date_time=batch_date.getTime();
      //       let stand_date_time=(new Date("2025-03-06")).getTime();
      //       if(batch_date_time>stand_date_time){
      //         // Json_data.priority=100
      //         Json_data.priority=102
      //         await Help.add_rabbitmq(Json_data,"sdp_pri_gj",Json_data.priority);
      //       }
      //       else{
      //         Json_data.priority=99
      //         await Help.add_rabbitmq(Json_data,"sdp_pri_gj",Json_data.priority);
      //       }
      //       await global["class_rabbitmq"].ack(msg)
      //     }
      //     else if(username=='cgadmin'){
      //       // Json_data.priority=102
      //       Json_data.priority=101
      //       await Help.add_rabbitmq(Json_data,"sdp_pri_cg",Json_data.priority);
      //       await global["class_rabbitmq"].ack(msg)
      //     }
      //     else{
      //       Json_data.priority=98
      //       await Help.add_rabbitmq(Json_data,"sdp_pri_other",Json_data.priority);
      //       await global["class_rabbitmq"].ack(msg)
      //     }
      //   }
        
      // });
    }
    // return;
    //循环监控已经完成的任务直接删除
    // let _setInterval3=setInterval (async ()=>{
    //   if(global["task_check_6"]==false){
    //     global["task_check_6"]=true
    //     Help.Console_log(`check running gp task is success...`)
    //     let level6_batch_task_mongos=await mongo_models.GetTasks({
    //       state:6
    //     })
    //     if(level6_batch_task_mongos?.length){
    //       for(let i=0;i<level6_batch_task_mongos?.length;i++){
    //         let id=level6_batch_task_mongos[i]?.mongo_id
    //         let result=await Help.delete_task(id)
    //         result=result
    //       }
    //     }
    //     global["task_check_6"]=false
    //   }
    // },1000*60*3)
    //循环监控正在执行的任务是否已经完成了
    let _setInterval2=setInterval (async ()=>{
      if(global["task_check_isdone"]==false){
        global["task_check_isdone"]=true
        Help.Console_log(`check running gp task is done...`)
        let level4_batch_task_mongos=await mongo_models.GetTasks({
          state:4
        })
        if(level4_batch_task_mongos?.length){
          for(let i=0;i<level4_batch_task_mongos?.length;i++){
            let one_level4_batch_task_mongo=level4_batch_task_mongos[i]
            let taskid=one_level4_batch_task_mongo?.taskid
            let mongo_id=one_level4_batch_task_mongo?.mongo_id
            let mongo_msg=one_level4_batch_task_mongo?.msg
            let check_task_result=await Help.check_task_done(taskid)
            let {status}=check_task_result
            if(status==4){
              await mongo_models.UpdateTask({
                id:mongo_id,
                state:6,
                ack:1,
                copy:1,
                etime:(new Date()).getTime(),
                message:`success`
              })
            }
            else if(status==5) {
              await mongo_models.UpdateTask({
                id:mongo_id,
                state:5,
                ack:1,
                copy:1,
                etime:(new Date()).getTime(),
                message:`some success,some error`
              })
            }
            else if(status==-1) {
              // await mongo_models.UpdateTask({
              //   id:mongo_id,
              //   state:5,
              //   ack:1,
              //   copy:1,
              //   etime:(new Date()).getTime(),
              //   message:`${status_3_count} success,${status_4_count} error,${status_5_count} error,${status_all_count} all`
              // })
            }
          }
        }
        let level5_batch_task_mongos=await mongo_models.GetTasks({
          state:5
        })
        if(level5_batch_task_mongos?.length){
          for(let i=0;i<level5_batch_task_mongos?.length;i++){
            let one_level4_batch_task_mongo=level5_batch_task_mongos[i]
            let taskid=one_level4_batch_task_mongo?.taskid
            let mongo_id=one_level4_batch_task_mongo?.mongo_id
            let mongo_msg=one_level4_batch_task_mongo?.msg
            let check_task_result=await Help.check_task_done(taskid)
            let {status}=check_task_result
            if(status==4){
              await mongo_models.UpdateTask({
                id:mongo_id,
                state:6,
                ack:1,
                copy:1,
                etime:(new Date()).getTime(),
                message:`success`
              })
            }
            else if(status==5) {
              await mongo_models.UpdateTask({
                id:mongo_id,
                state:5,
                ack:1,
                copy:1,
                etime:(new Date()).getTime(),
                message:`some success,some error`
              })
            }
            else if(status==-1) {
              // await mongo_models.UpdateTask({
              //   id:mongo_id,
              //   state:5,
              //   ack:1,
              //   copy:1,
              //   etime:(new Date()).getTime(),
              //   message:`${status_3_count} success,${status_4_count} error,${status_5_count} error,${status_all_count} all`
              // })
            }
          }
        }
        global["task_check_isdone"]=false
      }
    },6000)
    // return
    //GetTasks
    let _setInterval=setInterval (async ()=>{
      if(global["task_checking"]==false){
        global["task_checking"]=true
        // 1查询中 2下载数据 3下载完成并检查完毕 4生产运行中
        let level4_batch_task_mongos=await mongo_models.GetTasks({
          state:4
        })
        let level4_batch_task_mongos_count=level4_batch_task_mongos?.length
        let level3_batch_task_mongo=await mongo_models.GetTask({
          state:3
        })
        let level2_batch_task_mongo=await mongo_models.GetTask({
          state:2
        })
        let level1_batch_task_mongo=await mongo_models.GetTask({
          state:1
        })
        let do_get_next=false
        let do_init=false
        let do_init_mongo=undefined
        let do_add_task=false
        let do_add_mongo=undefined
        if(level4_batch_task_mongos_count>global["json_config"]?.task_num){}
        else if(level4_batch_task_mongos_count<=global["json_config"]?.task_num){
          if(level3_batch_task_mongo){
            if(level4_batch_task_mongos_count==global["json_config"]?.task_num){
              //等待添加任务
            }
            else{
              do_add_task=true
              do_add_mongo=level3_batch_task_mongo
            }
          }
          else{
            if (level2_batch_task_mongo || level1_batch_task_mongo){
              do_init=true;//  进行初始化
              if (level2_batch_task_mongo){
                do_init_mongo=level2_batch_task_mongo
              }
              else if (level1_batch_task_mongo){
                do_init_mongo=level1_batch_task_mongo
              }
            }
            else{
              do_get_next=true;//获取下一个任务
              do_init=true;//  进行初始化
            }
          }
        }
        let next_data= undefined
        let next_msg= undefined
        //获取下一个生成任务
        if(do_get_next){
          let next_data2=undefined
          let next_data_msg=undefined
          let next_msg2=undefined
          if(!next_data2){
            next_data_msg=await Help.get_next_rabbitmq("sdp_pri_gj",true)
            next_data2=next_data_msg?.data
            next_msg2=next_data_msg?.msg
          }
          if(!next_data2){
            next_data_msg=await Help.get_next_rabbitmq("sdp_pri_cg",true)
            next_data2=next_data_msg?.data
            next_msg2=next_data_msg?.msg
          }
          if(!next_data2){
            next_data_msg=await Help.get_next_rabbitmq("sdp_pri_other",true)
            next_data2=next_data_msg?.data
            next_msg2=next_data_msg?.msg
          }
          if(next_data2){
            next_data=next_data2
            next_msg=next_msg2
          }
        }
        //下一个任务是否需要init
        if(do_init){
          if(do_init_mongo){
            next_data=do_init_mongo.data
            next_msg=do_init_mongo.msg
          }
          if(next_data){
            Help.Console_log(` init start...`)
            do_add_mongo=await Help.init_rabbitmq_data(next_data,next_msg)
            Help.Console_log(`${do_add_mongo?.mongo_id} init done...`)
          }
        }
        //下一个任务是否需要添加到gp进行生产
        if(do_add_task){
          if(do_add_mongo){
            await Help.add_task_do(do_add_mongo)
          }
        }
        global["task_checking"]=false
      }
    },5000)
    return true
  }
  //移动已完成的tif+xml到lis_produce_done
  static async wait_tif_move_to_done_dir(){
    global["class_rabbitmq"].consume("sdp_pri_gp_done",async (data,msg)=>{
      let Json_data=Help.Json_Parse(data);
      let old_result=Json_data?.result
      for (let i = 0; i < old_result?.length; i++) {
        let one_old_result = old_result[i];
        let one_new_result=`${global["json_config"]?.gp_cb_dir}/${path.basename(one_old_result)}`
        //move 
        Help.Move_File(one_old_result,one_new_result)
        Json_data.result[i]=one_new_result
      }
      await Help.add_rabbitmq(Json_data,"sdp_pri_done");
      await global["class_rabbitmq"].ack(msg)
    });
    return true;
  }
  static async wait_task_done(taskid){
    return new Promise((resolve)=>{
      let _setInterval=setInterval (async ()=>{
        let status_task=await Help.Request_Get_Common(`${global["json_config"]?.gp_url}/task/get_task?`,{
          "taskid":taskid,
          pagenum:1,
          pagesize:10,
        },0)
        if(status_task?.result?.status==3 || status_task?.result?.status==4){
          let status_all=await Help.Request_Get_Common(`${global["json_config"]?.gp_url}/task/get_ctask?`,{
            "taskid":taskid,
            pagenum:1,
            pagesize:10,
          },0)
          let status_4=await Help.Request_Get_Common(`${global["json_config"]?.gp_url}/task/get_ctask?`,{
            "taskid":taskid,
            pagenum:1,
            pagesize:10,
            status:4
          },0)
          let status_3=await Help.Request_Get_Common(`${global["json_config"]?.gp_url}/task/get_ctask?`,{
            "taskid":taskid,
            pagenum:1,
            pagesize:10,
            status:3
          },0)
          let status_4_count=status_4?.result?.page?.count
          let status_3_count=status_3?.result?.page?.count
          let status_all_count=status_all?.result?.page?.count
          if(status_4_count+status_3_count==status_all_count){
            clearInterval(_setInterval);
            resolve({
              status:true,
              status_4_count,
              status_3_count,
              status_all_count
            })
          }
        }
        
      },10000)
    })
  }
  static check_task_done(taskid){
    return new Promise(async (resolve)=>{
      let status_task=await Help.Request_Get_Common(`${global["json_config"]?.gp_url}/task/get_task?`,{
        "taskid":taskid,
        pagenum:1,
        pagesize:10,
      },0)
      if(status_task?.result){
        resolve({
          status:status_task?.result?.status,
        })
      }
      else{
        resolve({
          status:-1,
        })
      }
    })
  }
  static async add_task_do_to_go_from_id(id){
    let level3_batch_task_mongo=await mongo_models.GetTask({
      id:id
    })
    if(level3_batch_task_mongo){
      let mongo_id=level3_batch_task_mongo?.mongo_id
      await mongo_models.UpdateTask({
        id:mongo_id,
        state:1,
      })
      return true
    }
    return false
  }
  //添加任务到gp
  static async add_task_do(batch_task_mongo){
    let mongo_id=batch_task_mongo?.mongo_id
    let task_pro_dir=`${global["json_config"]?.produce_base_dir}/${mongo_id}`
    let task_pro_dom_dir=`${task_pro_dir}/dom`
    Help.Create_Dir(task_pro_dom_dir)
    let task_pro_prepare_dir=`${task_pro_dir}/prepare`
    let l1_input_file=`${task_pro_prepare_dir}/l1_input.json`
    let ys_task_priority_file=`${task_pro_prepare_dir}/ys_priority.json`
    let L1_inputs=[]
    let task_pro_priority=100
    if(Help.Check_isFile(l1_input_file)){
      L1_inputs=Help.Json_Parse(Help.Read_File_UTF8(l1_input_file))
    }
    if(Help.Check_isFile(ys_task_priority_file)){
      task_pro_priority=Help.Json_Parse(Help.Read_File_UTF8(ys_task_priority_file))?.priority
    }
    let new_L1_inputs=[]
    for (let j = 0; j < L1_inputs?.length; j++) {
      let one_tif_object=L1_inputs[j]
      let one_tif_object_result_name="";
      for (let [key, value] of Object.entries(one_tif_object)) {
        if(key=="pan"){
          let _tif=value?.tif
          let _tif_ext=path.extname(_tif)
          one_tif_object_result_name=path.basename(_tif,_tif_ext)
        }
      }
      let one_tif_object_result_true_name=`${task_pro_dom_dir}/${one_tif_object_result_name}.tif`
      if(!Help.Check_isFile(one_tif_object_result_true_name)){
        new_L1_inputs.push(one_tif_object)
      }
    }
    if(new_L1_inputs?.length==0){
      batch_task_mongo=await mongo_models.UpdateTask({
        id:mongo_id,
        state:5,
        // message:`不需要生产`
      })
    }
    else{
      let add_task=await Help.Produce_L1_data(mongo_id,new_L1_inputs,task_pro_dom_dir,task_pro_priority)
      let taskid="";
      if(add_task?.result){
        taskid=add_task?.result
        batch_task_mongo=await mongo_models.UpdateTask({
          id:mongo_id,
          taskid:taskid,
          state:4,
          stime:(new Date()).getTime(),
          // message:"无"
        })
      }
      else{
        batch_task_mongo=await mongo_models.UpdateTask({
          id:mongo_id,
          state:7,
          message:`添加到生产系统失败:${add_task?.message}`
        })
      }
    }
    return batch_task_mongo
  }
  //通过taskView获取归档的所有的数据的Name和
  static async Search_L1_Names(batch,taskname){
    let qs={
      start: 1,
      limit: 10,
    }
    if(batch){
      qs.batch=batch
    }
    if(taskname){
      qs.taskname=taskname
    }
    //在数据归档中根据批次号查询到该批次
    let get_task_data=await Help.Request_Get_Common(global["json_config"]?.task_search?.taskView,qs,0,{
      "x-gistack-token":global["user_gistack_token"]
    })
    let get_task_data_result=undefined
    if(get_task_data?.success==true && get_task_data?.result?.length>0){
      get_task_data_result=get_task_data?.result
    }
    else{
      return {
        status:1,
        msg:"没有在数据归档中查询到该批次"
      }
    }
    //在数据归档中根据批次号查询到该批次的已归档(archive==1)的objects
    let archive_objects=[]
    for (let i = 0; i < get_task_data_result?.length; i++) {
      let one_task= get_task_data_result[i];
      let {id,batch}=one_task 
      let one_batch_details=await Help.Request_Get_Common(global["json_config"]?.task_search?.taskDetail,{
        id: id,
        start: 1,
        limit: 2000,
      },0,{
        "X-Gistack-Token":global["user_gistack_token"]
      })
      if(one_batch_details?.success==true && one_batch_details?.result?.length>0){
        let one_batch_details_result=one_batch_details?.result
        Help.Console_log(`batch count:${one_batch_details_result?.length}..`)
        for (let i = 0; i < one_batch_details_result?.length; i++) {
          let archive= one_batch_details_result[i]?.archive;
          let produce= one_batch_details_result[i]?.produce;
          if(archive==1 && produce==0){
            archive_objects.push(one_batch_details_result[i])
          }
        }
      }
    }
    if(archive_objects?.length==0){
      return {
        status:2,
        msg:"该批次没有处于已归档状态的name景数据"
      }
    }
    let archive_names=[]
    for (let i = 0; i < archive_objects?.length; i++) {
      let name = archive_objects[i]?.name;
      let check_state=Help.Check_produce_satellite(name)
      if(check_state==true){
        archive_names.push(name)
      }
    }
    if(archive_names?.length==0){
      return {
        status:3,
        msg:"该批次没有需要生产的数据(可能数据属于忽略数据)"
      }
    }
    return {
      status:4,
      names:archive_names
    }
  }
  //通过search获取归档的所有的数据的Name
  static async Search_L1_Names_paths(prodece_ys_names,s3_paths_has,s3_path_link_dir){
    Help.Console_log(`Search_L1_Names_paths count:${prodece_ys_names?.length}..`)
    let endDate=(new Date()).format("yyyy-MM-dd")
    let s3_paths=[]
    let Search_L1_Names_errors=[];
    for (let i = 0; i <prodece_ys_names?.length; i++) {
      let one_batch_name = prodece_ys_names[i];
      Help.Console_log(`search path:${i+1}/${prodece_ys_names?.length}..${one_batch_name}`)
      let find_index=s3_paths_has?.findIndex((val)=>{
        return val?.name?.toLowerCase()==one_batch_name.toLowerCase()
      })
      if(find_index>=0){
        if(s3_paths_has[find_index]?.path){
          s3_paths.push(s3_paths_has[find_index])
        }
      }
      else{
        let one_s3_paths_file=`${s3_path_link_dir}/${one_batch_name}.txt`
        let search_data={
          CLOUDPERCENT:"",
          RollViewingAngle:"",
          SateType: null,
          SateName:null,
          formatSate:true,
          regionCode:"",
          from:0,
          size:20,
          imagetype:3,
          startDate:"2000-01-01",
          endDate:endDate,
          ids:[one_batch_name],
          returnPath:true,
          returnAll:true,
        }
        let get_task_data=await Help.Request_Post_Common(global["json_config"]?.task_search?.search,search_data,0,{
          "x-gistack-token":global["user_gistack_token"],
          "Content-Type":"application/json"
        })
        let sat_data_paths=[];
        if(get_task_data?.success==true && get_task_data?.result?.length>0){
          for (let k = 0; k < get_task_data?.result?.length; k++) {
            let this_sat_data=get_task_data?.result[k]
            let this_sat_data_path=this_sat_data?.path
            if(!this_sat_data_path){
              // Search_L1_Names_errors.push(`${this_sat_data?.file_name}:path not found!`)
              continue;
            }
            sat_data_paths.push(this_sat_data_path)
          }
        }
        let sat_data_paths_new=[]
        let map = new Map()
        for (let i = 0; i < sat_data_paths.length; i++) {
          if (!map.has(sat_data_paths[i])) {
            map.set(sat_data_paths[i], true)
            sat_data_paths_new.push(sat_data_paths[i])
          }
        }
        if(sat_data_paths_new?.length==1){
          s3_paths.push({
            name:one_batch_name,
            path:sat_data_paths_new[0]
          })
          Help.Write_File_UTF8(one_s3_paths_file,sat_data_paths_new[0])
        }
        else{
          Help.Console_log(`Search_L1_Names_paths ${one_batch_name} error`)
          Search_L1_Names_errors.push(one_batch_name)
        }
      }
    }
    return {
      s3_paths,
      s3_paths_errors:Search_L1_Names_errors
    }
  }
  //获取S3 key
  static async Search_s3_paths_link(s3_paths,s3_downs_has,s3_paths_down_link_dir){
    Help.Console_log(`down s3 count:${s3_paths?.length}..`)
    let s3_downs=[]
    for (let k = 0; k < s3_paths?.length; k++) {
      let one_s3_path=s3_paths[k]
      let one_s3_path_name=one_s3_path?.name
      let one_s3_path_path=one_s3_path?.path
      Help.Console_log(`get down link ${k+1}/${s3_paths?.length} ${one_s3_path_name}..${one_s3_path_path}.`)
      let find_index=s3_downs_has?.findIndex((val)=>{
        return val?.name?.toLowerCase()==one_s3_path_name.toLowerCase()
      })
      if(find_index>=0){
        //已经获取过下载的key了
        s3_downs.push(s3_downs_has[find_index])
      }
      else{
        let one_s3_path_link_file=`${s3_paths_down_link_dir}/${one_s3_path_name}.json`
        let one_s3_path_file_lists=await Help.s3_get_file_list({
          bucket:global["json_config"]?.s3?.bucket,
          prefix:one_s3_path_path
        })
        let this_sat_data_down_details=[]
        for (let j = 0; j < one_s3_path_file_lists?.length; j++) {
          let one_s3_path_file=one_s3_path_file_lists[j]
          if(one_s3_path_file?.Size>0){
            let one_s3_path_file_split=one_s3_path_file?.Key?.split("/")
            let file_name=one_s3_path_file_split[one_s3_path_file_split.length-1]
            this_sat_data_down_details.push({
              dir_name:one_s3_path_name,
              file_name,
              key:one_s3_path_file?.Key,
              size:one_s3_path_file?.Size
            })
          }
        }
        Help.Write_File_UTF8(one_s3_path_link_file,Help.Json_String(this_sat_data_down_details))
        s3_downs.push({
          name:one_s3_path_name,
          lists:this_sat_data_down_details
        })
      }
    }
    return s3_downs
  }
  //search L1 task archive
  static async Search_L1_Tasks(){
    let check_dir="W:/lis_produce/check"
    let taskViews=[]//7387
    //在数据归档中根据批次号查询到该批次
    let taskViews_file=`${check_dir}/taskViews.txt`
    if(Help.Check_isFile(taskViews_file)){
      taskViews=Help.Json_Parse(Help.Read_File_UTF8(taskViews_file))
    }
    else{
      let is_end=false
      let start_index=1
      while(!is_end){
        let qs={
          username:"cgadmin",
          startDate:"2025-04-20",
          endDate:"2025-05-15",
          start: start_index,
          limit: 100,
        }
        let get_task_data=await Help.Request_Get_Common(global["json_config"]?.task_search?.taskView,qs,0,{
          "x-gistack-token":global["user_gistack_token"]
        })
        if(get_task_data?.success==true ){
          if(get_task_data?.result?.length>0){
            taskViews=taskViews.concat(get_task_data?.result)
            start_index=start_index+1
          }
          else{
            is_end=true
          }
        }
        else{
          is_end=true
        }
      }
      Help.Write_File_UTF8(taskViews_file,Help.Json_String(taskViews))
    }
    //在数据归档中根据批次号查询到该批次的已归档(archive==1)的objects
    let new_taskViews=[]//6899
    let new_taskViews_file=`${check_dir}/new_taskViews_file.txt`
    if(Help.Check_isFile(new_taskViews_file)){
      new_taskViews=Help.Json_Parse(Help.Read_File_UTF8(new_taskViews_file))
    }
    else{
      let new_taskViews_no_record=[]//6899
      for (let i = 0; i < taskViews?.length; i++) {
        console.log(`${i+1}/${taskViews?.length}`)
        let one_task= taskViews[i];
        let {id,batch}=one_task
        let one_batch_details=await Help.Request_Get_Common(global["json_config"]?.task_search?.taskDetail,{
          id: id,
          start: 1,
          limit: 2000,
        },0,{
          "X-Gistack-Token":global["user_gistack_token"]
        })
        if(one_batch_details?.success==true && one_batch_details?.result?.length>0){
          one_task.archive_objects=one_batch_details?.result
          new_taskViews.push(one_task)
        }
        else{
          new_taskViews_no_record.push(id)
        }
      }
      Help.Write_File_UTF8(new_taskViews_file,Help.Json_String(new_taskViews))
      if(new_taskViews_no_record?.length>0){
        let new_taskViews_no_record_file=`${check_dir}/new_taskViews_no_record.txt`
        Help.Write_File_UTF8(new_taskViews_no_record_file,Help.Json_String(new_taskViews_no_record))
      }
    }
    //
    let not_produce_names=[]//1055
    let not_produce_names_file=`${check_dir}/not_produce_names.txt`
    if(Help.Check_isFile(not_produce_names_file)){
      not_produce_names=Help.Json_Parse(Help.Read_File_UTF8(not_produce_names_file))
    }
    else{
      for (let i = 0; i < new_taskViews?.length; i++) {
        let one_task= new_taskViews[i];
        console.log(`${i+1}/${new_taskViews?.length}`)
        let {id,batch,archive_objects}=one_task
        for (let i = 0; i < archive_objects?.length; i++) {
          let name = archive_objects[i]?.name;
          let qs={
            "imagetype":2,
            "startDate":"2025-02-06",
            "endDate":"2025-05-10",
            "regionCode":"",
            "regionName":"",
            "CLOUDPERCENT":"",
            "RollViewingAngle":"",
            "ids":[
              name
            ],
            "SateType":null,
            "SateName":null,
            "formatSate":true,
            "size":20,
            "from":0,
            returnPath:true,
            returnAll:true,
          }
          //查询数据是否已经生产
          let get_task_data=await Help.Request_Post_Common(global["json_config"]?.task_search?.search,qs,0,{
            "x-gistack-token":global["user_gistack_token"],
            "Content-Type":"application/json"
          })
          if(get_task_data?.success==true ){
            if(get_task_data?.result?.length==1){
             //已经生产了
            }
            else{
              //没有生产
              not_produce_names.push(name)
              //is_end=true
            }
          }
          else{
            let error="查询出错"
          }
        }
      }
      Help.Write_File_UTF8(not_produce_names_file,Help.Json_String(not_produce_names))
    }
    let path_get_error=[]
    let path_get_ok=[]
    for (let i = 0; i < not_produce_names?.length; i++) {
      console.log(`${i+1}/${not_produce_names?.length}`)
      let endDate=(new Date()).format("yyyy-MM-dd")
      let name=not_produce_names[i]
      let search_data={
        CLOUDPERCENT:"",
        RollViewingAngle:"",
        SateType: null,
        SateName:null,
        formatSate:true,
        regionCode:"",
        from:0,
        size:20,
        imagetype:3,
        startDate:"2000-01-01",
        endDate:endDate,
        ids:[name],
        returnPath:true,
        returnAll:true,
      }
      let get_task_data=await Help.Request_Post_Common(global["json_config"]?.task_search?.search,search_data,0,{
        "x-gistack-token":global["user_gistack_token"],
        "Content-Type":"application/json"
      })
      let sat_data_paths=[];
      if(get_task_data?.success==true && get_task_data?.result?.length>0){
        for (let k = 0; k < get_task_data?.result?.length; k++) {
          let this_sat_data=get_task_data?.result[k]
          let this_sat_data_path=this_sat_data?.path
          if(!this_sat_data_path){
            continue;
          }
          sat_data_paths.push(this_sat_data_path)
        }
      }
      let sat_data_paths_new=[]
      let map = new Map()
      for (let i = 0; i < sat_data_paths.length; i++) {
        if (!map.has(sat_data_paths[i])) {
          map.set(sat_data_paths[i], true)
          sat_data_paths_new.push(sat_data_paths[i])
        }
      }
      if(sat_data_paths_new?.length==1){
        path_get_ok.push({
          name:name,
          path:sat_data_paths_new[0]
        })
        
      }
      else{
        path_get_error.push(name)
      }
    }
    let path_get_error_file=`${check_dir}/path_get_error.txt`
    let path_get_ok_file=`${check_dir}/path_get_ok.txt`
    Help.Write_File_UTF8(path_get_error_file,Help.Json_String(path_get_error))
    Help.Write_File_UTF8(path_get_ok_file,Help.Json_String(path_get_ok))
    return true
  }
  static async Search_L1_s3_paths(){
    let check_dir="W:/lis_produce/check"
    let path_get_ok_file=`${check_dir}/path_get_ok.txt`
    let path_get_ok=[]//7387
    if(Help.Check_isFile(path_get_ok_file)){
      path_get_ok=Help.Json_Parse(Help.Read_File_UTF8(path_get_ok_file))
    }
    //去重
    let path_get_ok_new=[]
    let map = new Map()
    for (let i = 0; i < path_get_ok.length; i++) {
      if (!map.has(path_get_ok[i]?.name)) {
        map.set(path_get_ok[i]?.name, true)
        path_get_ok_new.push(path_get_ok[i])
      }
    }
    if(path_get_ok_new?.length>0){
      let s3_downs_file=`${check_dir}/s3_downs.txt`
      let s3_downs=[]
      let one_s3_path_link_dir=`${check_dir}/down_links`
      Help.Create_Dir(one_s3_path_link_dir)
      for (let i = 0; i < path_get_ok_new?.length; i++) {
        let one_s3_path = path_get_ok_new[i];
        let one_s3_path_name=one_s3_path?.name
        let one_s3_path_path=one_s3_path?.path
        let one_s3_path_link_file=`${check_dir}/down_links/${one_s3_path_name}.json`
        let one_s3_path_file_lists=await Help.s3_get_file_list({
          bucket:global["json_config"]?.s3?.bucket,
          prefix:one_s3_path_path
        })
        let this_sat_data_down_details=[]
        for (let j = 0; j < one_s3_path_file_lists?.length; j++) {
          console.log(`${i+1}/${path_get_ok_new?.length} ${j+1}/${one_s3_path_file_lists?.length}`)
          let one_s3_path_file=one_s3_path_file_lists[j]
          if(one_s3_path_file?.Size>0){
            let one_s3_path_file_split=one_s3_path_file?.Key?.split("/")
            let file_name=one_s3_path_file_split[one_s3_path_file_split.length-1]
            this_sat_data_down_details.push({
              dir_name:one_s3_path_name,
              file_name,
              key:one_s3_path_file?.Key,
              size:one_s3_path_file?.Size
            })
          }
        }
        Help.Write_File_UTF8(one_s3_path_link_file,Help.Json_String(this_sat_data_down_details))
        s3_downs.push({
          name:one_s3_path_name,
          lists:this_sat_data_down_details
        })
      }
      Help.Write_File_UTF8(s3_downs_file,Help.Json_String(s3_downs))
    }
    return true
  }

  static async Search_L1_s3_path_downs(){
    let check_dir="W:/lis_produce/check"
    let s3_paths_down_link_dir=`${check_dir}/down_links`
    let s3_paths_down_links=Help.Get_Deep_Files(s3_paths_down_link_dir,[".json"])
    let s3_downs=[]//7387
    for (let k = 0; k < s3_paths_down_links?.length; k++) {
      let one_s3_paths_down_link=s3_paths_down_links[k]
      let one_s3_paths_down_link_json=Help.Json_Parse(Help.Read_File_UTF8(one_s3_paths_down_link?.file))
      s3_downs.push({
        name:one_s3_paths_down_link?.namenoext,
        lists:one_s3_paths_down_link_json
      })
    }
    let new_s3_downs=[]
    if(s3_downs?.length>0){
      let oneMB = 1024 * 1024 * 5;
      for (let i = 0; i < s3_downs?.length; i++) {
        let one_down_load = s3_downs[i];
        let one_down_load_name=one_down_load?.name
        let one_down_load_lists=one_down_load?.lists
        Help.Console_log(`down ${i+1}/${s3_downs?.length} ${one_down_load_name}`)
        let this_element_save_dir=`${check_dir}/unzip/${one_down_load_name}`
        let this_element_down_stat=`${this_element_save_dir}/down.info`
        let down_do=true
        if(Help.Check_isFile(this_element_down_stat)){
          let down_stat=Help.Read_File_UTF8(this_element_down_stat)
          if(down_stat=="true"){
            down_do=false
          }
        }
        let new_one_down_load_lists=[]
        if(down_do){
          for (let k = 0; k < one_down_load_lists?.length; k++) {
            let this_element = one_down_load_lists[k];
            let {dir_name,file_name,key,size}=this_element
            let pic_count=size/oneMB;
            pic_count=Math.ceil(pic_count);
            let rangs=[]
            for (let j = 0; j < pic_count; j++) {
              let start=j*oneMB;
              let end=(j+1)*oneMB;
              if(end>(size-1)){
                end=size
              }
              end=end-1
              let range=`bytes=${start}-${end}`
              rangs.push({
                range,
                start,
                end
              })
            }
            new_one_down_load_lists.push({
              file_dir:this_element_save_dir,
              file:`${this_element_save_dir}/${file_name}`,
              key:key,
              rangs:rangs,
            })
          }
        }
        if(new_one_down_load_lists?.length>0){
          new_s3_downs.push({
            name:one_down_load_name,
            file_dir:this_element_save_dir,
            file_stat:this_element_down_stat,
            lists:new_one_down_load_lists
          })
        }
      }
    }
    if(new_s3_downs?.length>0){
      await Help.multi_download(new_s3_downs)
    }
    return true
  }

  static async Search_L1_Tasks_other(){
    let check_dir="W:/lis_produce/check"
    let taskViews=[]//7387
    //在数据归档中根据批次号查询到该批次
    let taskViews_file=`${check_dir}/taskViews.txt`
    if(Help.Check_isFile(taskViews_file)){
      taskViews=Help.Json_Parse(Help.Read_File_UTF8(taskViews_file))
    }
    else{
      let is_end=false
      let start_index=0
      while(!is_end){
        let search_data={
          "imagetype":3,
          "username":"super",
          "orderId":"manualupload_nm",//manualupload
          "returnPath":true,
          from : start_index*200,
          size: 200,
        }
        let get_task_data=await Help.Request_Post_Common(global["json_config"]?.task_search?.search,search_data,0,{
          "x-gistack-token":global["user_gistack_token"],
          "Content-Type":"application/json"
        })
        if(get_task_data?.success==true ){
          if(get_task_data?.result?.length>0){
            taskViews=taskViews.concat(get_task_data?.result)
            start_index=start_index+1
          }
          else{
            is_end=true
          }
        }
        else{
          is_end=true
        }
      }
      Help.Write_File_UTF8(taskViews_file,Help.Json_String(taskViews))
    }
    let path_get_error=[]
    let path_get_ok=[]
    for (let i = 0; i < taskViews?.length; i++) {
      console.log(`${i+1}/${taskViews?.length}`)
      let endDate=(new Date()).format("yyyy-MM-dd")
      let name=taskViews[i]?.folder_name 
      let search_data={
        CLOUDPERCENT:"",
        RollViewingAngle:"",
        SateType: null,
        SateName:null,
        formatSate:true,
        regionCode:"",
        from:0,
        size:20,
        imagetype:3,
        startDate:"2000-01-01",
        endDate:endDate,
        ids:[name],
        returnPath:true,
        returnAll:true,
      }
      let get_task_data=await Help.Request_Post_Common(global["json_config"]?.task_search?.search,search_data,0,{
        "x-gistack-token":global["user_gistack_token"],
        "Content-Type":"application/json"
      })
      let sat_data_paths=[];
      if(get_task_data?.success==true && get_task_data?.result?.length>0){
        for (let k = 0; k < get_task_data?.result?.length; k++) {
          let this_sat_data=get_task_data?.result[k]
          let this_sat_data_path=this_sat_data?.path
          if(!this_sat_data_path){
            continue;
          }
          sat_data_paths.push(this_sat_data_path)
        }
      }
      let sat_data_paths_new=[]
      let map = new Map()
      for (let i = 0; i < sat_data_paths.length; i++) {
        if (!map.has(sat_data_paths[i])) {
          map.set(sat_data_paths[i], true)
          sat_data_paths_new.push(sat_data_paths[i])
        }
      }
      if(sat_data_paths_new?.length==1){
        path_get_ok.push({
          name:name,
          path:sat_data_paths_new[0]
        })
        
      }
      else{
        path_get_error.push(name)
      }
    }
    let path_get_error_file=`${check_dir}/path_get_error.txt`
    let path_get_ok_file=`${check_dir}/path_get_ok.txt`
    Help.Write_File_UTF8(path_get_error_file,Help.Json_String(path_get_error))
    Help.Write_File_UTF8(path_get_ok_file,Help.Json_String(path_get_ok))
    return true
  }

  
  //多进程
  static multi_download(s3_downs){
    return new Promise((resolve)=>{
      Help.Console_log(`down count:${s3_downs?.length}`)
      // let MAX_down_cuont=global["json_config"]?.down_t
      let timeout=global["json_config"]?.down_Intervaltimeout
      let left_s3_downs=Help.Json_Parse(Help.Json_String(s3_downs))
      left_s3_downs=Help.reset_s3_downs(left_s3_downs)
      let now_downing=0;
      let _setInterval=setInterval (()=>{
        now_downing=global["download_process_array"]?.filter((val)=>{
          return val?.stat==1
        })?.length
        if(now_downing==0 && left_s3_downs?.length==0){ //运行结束
          clearInterval(_setInterval);
          resolve(true)
        }
        else
        {
          if (now_downing < global["download_process_array"]?.length && left_s3_downs?.length>0){
            let next_down_object=Help.get_next_down_file(left_s3_downs)
            left_s3_downs=next_down_object?.s3_downs
            let down_file_object=next_down_object?.down_file_object
            if(down_file_object){
              Help.process_down(down_file_object).then((_down_file_object)=>{
                let pid_index=_down_file_object?.pid_index
                let data=_down_file_object?.data
                left_s3_downs=Help.update_s3_downs(left_s3_downs,data)
                global["download_process_array"][pid_index].stat=0
              })
            }
          }
        }
      },timeout)
    })
  }
  static reset_s3_downs(s3_downs){
    for (let i = 0; i < s3_downs?.length; i++) {
      let one_down_load = s3_downs[i];
      let one_down_load_name=one_down_load?.name
      let one_down_load_lists=one_down_load?.lists
      let one_down_load_file_dir=one_down_load?.file_dir
      let one_down_load_file_stat=one_down_load?.file_stat
      for (let k = 0; k < one_down_load_lists?.length; k++) {
        let this_element = one_down_load_lists[k];
        this_element.stat=0
      }
    }
    return s3_downs
  }
  //stat 0 等待  1 下载 2 下载完成
  static get_next_down_file(old_s3_downs){
    let s3_downs=Help.Json_Parse(Help.Json_String(old_s3_downs))
    let down_file_object=undefined
    for (let i = 0; i < s3_downs?.length; i++) {
      let one_down_load = s3_downs[i];
      let one_down_load_name=one_down_load?.name
      let one_down_load_lists=one_down_load?.lists
      let one_down_load_file_dir=one_down_load?.file_dir
      let one_down_load_file_stat=one_down_load?.file_stat
      let findIndex=one_down_load_lists?.findIndex((val)=>{
        return val?.stat==0
      })
      if(findIndex>=0){
        one_down_load_lists[findIndex].stat=1
        down_file_object={
          prename:one_down_load_name,
          index:findIndex,
          dfile:one_down_load_lists[findIndex]
        }
        break;
      }
    }
    return {
      s3_downs:s3_downs,
      down_file_object:down_file_object
    }
  }
  static update_s3_downs(old_s3_downs,down_file_object){
    let s3_downs=Help.Json_Parse(Help.Json_String(old_s3_downs))
    let findIndex=s3_downs?.findIndex((val)=>{
      return val?.name==down_file_object?.prename
    })
    if(findIndex>=0){
      s3_downs[findIndex].lists[down_file_object?.index].stat=2
      let finds=s3_downs[findIndex].lists?.filter((val)=>{
        return val?.stat==2
      })
      if(finds?.length==s3_downs[findIndex].lists?.length){
        Help.Write_File_UTF8(s3_downs[findIndex].file_stat,`true`)
        s3_downs.splice(findIndex,1)
        Help.Console_log(`left down dir ${s3_downs?.length}`)
      }
    }
    return s3_downs
  }
  static async one_file_download(down_file_object){
    let {prename,index,dfile}=down_file_object
    let {file_dir,file,key,rangs}=dfile
    Help.Create_Dir(file_dir)
    Help.Del_File(file)
    let write_file_id=fs.openSync(file,'a')
    for (let j = 0; j < rangs?.length; j++) {
      let range=rangs[j]
      let range_bytes=await Help.s3_get_file({
        bucket:global["json_config"]?.s3?.bucket,
        key:key,
        range:range?.range
      })
      if(range_bytes){
        let buffer=Buffer.from(range_bytes)
        fs.writeSync(write_file_id, buffer, 0, buffer?.length,range?.start);
      }
    }
    fs.closeSync(write_file_id);
    return down_file_object
  }
  static add_download_process(){
    let fork_process_js ="";
    if(process?.env?.debugENV === 'debug'){
      fork_process_js =path.resolve("./down/s3_down.js");
    }
    else{
      fork_process_js =path.join(__dirname, './down/s3_down.js')
    }
    let MAX_down_cuont=global["json_config"]?.down_t
    global["download_process_array"]=[]
    for(let i=0;i<MAX_down_cuont;i++){
      let one_download_process=fork(fork_process_js);
      one_download_process.on('close', (code, signal) => {
        Help.Console_log(`子进程 ${one_download_process.pid}因收到信号 ${signal} 而终止,code=${code}`);
      });
      one_download_process.on('error', (code, signal) => {
        Help.Console_log(`子进程 ${one_download_process.pid} 出现错误`);
      });
      one_download_process.on('message', async (message_data) => {
        let {pid,pid_index,data}=message_data
        one_download_process.resolve(message_data)
      });
      let one_download_process_object={
        process:one_download_process,
        stat:0,
        pid_index:i,
        pid:one_download_process.pid,
      }
      one_download_process_object.send=async (val)=>{
        let one_Promise_resolve=undefined
        let one_Promise=new Promise((resolve)=>{
          one_Promise_resolve=resolve
        })
        one_download_process.resolve=one_Promise_resolve
        one_download_process.send(val)
        let message_data=await one_Promise
        return message_data
      }
      global["download_process_array"].push(one_download_process_object)
    }
  }
  static async process_down(down_file_object){
    return new Promise(async (resolve)=>{
      let stat_0_index= global["download_process_array"]?.findIndex((val)=>{
        return val?.stat==0
      })
      global["download_process_array"][stat_0_index].stat=1
      let message_data=await global["download_process_array"][stat_0_index].send({
        pid_index:global["download_process_array"][stat_0_index].pid_index,
        data:down_file_object
      })
      let {pid,pid_index,data}=message_data
        resolve({
          pid_index:stat_0_index,
          data:data
        })
    })
  }
  static Check_produce_satellite(name){
    let produce_satellite_except=global["json_config"]?.produce_satellite_except
    let find_satellite_index=produce_satellite_except?.findIndex((val)=>{
      let satellite=val?.satellite
      let find_satellite_index2=satellite?.findIndex((val2)=>{
        let bool_find_SatelliteID=-1
        if(val2?.indexOf("*")==val2.length-1){
          let left_SatelliteID=val2?.substring(0,val2.length-1)
          bool_find_SatelliteID=name?.toUpperCase()?.indexOf(left_SatelliteID)
        }
        else{//完全匹配
          bool_find_SatelliteID=name?.toUpperCase()?.indexOf(val2)
        }
        return bool_find_SatelliteID>=0?true:false
      })
      return find_satellite_index2>=0?true:false
    })
    if(find_satellite_index>=0){
      let camera=produce_satellite_except[find_satellite_index]?.camera
      if(camera){
        let find_camera=camera?.findIndex((val)=>{
          let bool_find_camera=-1
          if(val?.indexOf("*")==val.length-1){
            let left_camera=val.substring(0,val.length-1)
            bool_find_camera=name?.toUpperCase()?.indexOf(left_camera)
          }
          else{//完全匹配
            bool_find_camera=name?.toUpperCase()?.indexOf(val)
          }
          return bool_find_camera>=0?true:false
        })
        if(find_camera>=0){
          return false
        }
        else{
          return true
        }
      }
      else{
        return false
      }
    }
    else{
      return true
    }
  }
  //gp check_tifs
  static async Produce_L1_data_check(taskname,task_unzip_dir){
    let L1_input=[]
    Help.Console_log("post into gp for check...")
    let check_tifs_data=await Help.Request_Get_Common(`${global["json_config"]?.gp_url}/task/check_tifs?`,{
      "input":task_unzip_dir,
      "type":"produce_dom",
    },0)
    Help.Console_log("gp check end!")
    if(check_tifs_data?.status){
      let tifs_matchs=check_tifs_data?.result?.tifs_matchs
      let tif_names=[]
      let tif_xmls=[]
      let tif_objects=[]
      for (let j = 0; j < tifs_matchs?.length; j++) {
        let one_tif_object=tifs_matchs[j]
        let one_tif_name="";
        for (let [key, value] of Object.entries(one_tif_object)) {
          if(key!="mul"){
            one_tif_name=value.name
          }
          else{
            if(!one_tif_name){
              one_tif_name=value.name
            }
          }
        }
        if(one_tif_name){
          tif_names.push(`${one_tif_name}.tif`)
          tif_xmls.push(`${one_tif_name}.xml`)
          tif_objects.push({
            name:one_tif_name,
            object:one_tif_object
          })
        }
      }
      let will_do_tif_names=await Help.Check_produce_isExistFile(tif_names,tif_xmls)
      let will_do_tif_objects = tif_objects.filter(el => will_do_tif_names.includes(`${el.name}.tif`))
      //
      will_do_tif_objects=tif_objects
      //
      for (let j = 0; j < will_do_tif_objects?.length; j++) {
        let one_tif_object=will_do_tif_objects[j]?.object
        let L1_input_one={}
        for (let [key, value] of Object.entries(one_tif_object)) {
          if(key=="mul" || key=="pan" ){
            if(!L1_input_one[key]){
              L1_input_one[key]={}
            }
            L1_input_one[key].tif=value.tif
          }
        }
        L1_input.push(L1_input_one)
      }
    }
    return L1_input
  }
  static async Produce_L1_data(taskname,L1_input,task_result_dir,task_pro_priority){
    let config_domdem=await Help.Request_Get_Common(`${global["json_config"]?.gp_url}/task/config/get_config_domdem?`,{},0)
    let domdem=config_domdem?.result?.domdem
    let ddem=domdem?.auxDEM[0]
    let adem=domdem?.highDEM[0]
    let ddom=domdem?.highDOM[0]
    let dogDOM="";
    let short_name=taskname?.substr(-4)
    let add_task_data=await Help.Request_Post_Common(`${global["json_config"]?.gp_url}/task/add_task?`,{
      "name":short_name,
      "type":"produce_dom",
      "val":{
        "pretaskid":"",
        "input":L1_input,
        "output":task_result_dir,
        "kz":"1",
        "ddom":ddom,
        "ddomt":"optical",
        "uddomt":"1",
        "ddem":ddem,
        "adem":adem,
        "cd":0,
        "cl1":0,
        "rerpc":0,
        "bit":1,
        "of":1,
        "oftype":1,
        "mh":1,
        "mhy":1,
        "ba":1,
        "bay":1,
        "ckcon":0,
        "fs":1,
        "zcmatch":0,
        "zbay":0,
        "mfs":1,
        "outband":2,
        "rf":1,
        "autorf":1,
        "colorba":0,
        "god":0,
        "dogdom":dogDOM,
        "py":1,
        "convert":1,
        "priority":task_pro_priority,
        "dtemp":0,
        "cdian":0,
        "cb":`${global["json_config"]?.gp_cb_url}`,
        "options":{
          "cdtype":0,
          "single":1,
          "cutoffratio":"0.005",
          "py_type":"ovr",
          "from":"tif",
          "to":"cog",
          "set_nodata":1,
          "trans_nodata":"0",
          "rtype":"nearest",
          "cachemax":"1024"
        }
      },
      "pamars":{}
    },0,{
      "Content-Type":"application/json"
    })
    return  add_task_data
  }
  static async delete_gp_task(taskid){
    let delete_gp=await Help.Request_Post_Common(`${global["json_config"]?.gp_url}/task/delete_task?`,{
      taskid:taskid,
    },0,{
      "Content-Type":"application/json"
    })
    return true
  }
  static async Check_produce_isExistFile(tif_names,tif_xmls){
    let tif_name_xml=[].concat(tif_names).concat(tif_xmls)
    let importDate=(new Date()).format("yyyyMMdd")
    let names=tif_name_xml
    let get_task_data=await Help.Request_Post_Common(global["json_config"]?.task_search?.isExistFile,{
      "importDate":importDate,
      "name":names,
    },0,{
      "x-gistack-token":global["user_gistack_token"],
      "Content-Type":"application/json"
    })
    let will_do=[]
    if(get_task_data?.success){
      let check_result=get_task_data?.result
      let check_all=get_task_data?.all
      if(check_result==false){
        will_do=tif_names
      }
      else if(check_result?.length>0){
        will_do = tif_names.filter(el => !check_result.includes(el))
      }
      //result all
    }
    return will_do
  }
  static async delete_task(id){
    let task_result=await mongo_models.GetTask({id})
    let mongo_id=task_result?.mongo_id
    let taskid=task_result?.taskid
    if(task_result?.state==6){
      let delete_gp=await Help.Request_Post_Common(`${global["json_config"]?.gp_url}/task/delete_task?`,{
        taskid:taskid,
      },0,{
        "Content-Type":"application/json"
      })
      delete_gp=delete_gp
    }
    {
      await Help.add_rabbitmq({
        delete_dir:`${global["json_config"]?.produce_base_dir_linux}/${mongo_id}`, 
      },"sdp_pri_delete");
    }
    let result=await mongo_models.DeleteTask(id)
    return true
  }
  static Read_File_UTF8(_file,encoding="utf-8"){
    let file_data=undefined
    if(fs.existsSync(_file)){
      try{
        file_data=fs.readFileSync(_file,{
          encoding:encoding
        })
      }
      catch(error){
      }
    }
    return file_data
  }
  static Write_File_UTF8(_file,_data){
    let is_ok=true
    try{
      fs.writeFileSync(_file,_data,{
        encoding:"utf-8"
      })
    }
    catch(error){
      is_ok=false
    }
    return is_ok
  }
  static Copy_File(_src,_dest){
    let move_status=false;
    try{
      Help.Del_File(_dest)
      fs.copyFileSync(_src,_dest)
      move_status=true;
    }
    catch(error){
      Help.Console_log(error.message)
      move_status=false
    }
    return move_status
  }
  static Move_File(_src,_dest){
    let move_status=false;
    try{
      Help.Del_File(_dest)
      fse.moveSync(_src,_dest, { overwrite: true })
      move_status=true;
    }
    catch(error){
      Help.Console_log(error.message)
      move_status=false
    }
    return move_status
  }
  static Copy_Dir(_src,_dest){
    let move_status=false;
    try{
      fse.copySync(_src,_dest)
      move_status=true;
    }
    catch(error){
      Help.Console_log(error.message)
      move_status=false
    }
    return move_status
  }
  static Move_Dir(_src,_dest){
    let move_status=false;
    try{
      fse.moveSync(_src,_dest, { overwrite: true })
      Help.Del_File_Dir(_src)
      move_status=true;
    }
    catch(error){
      Help.Console_log(error.message)
      move_status=false
    }
    return move_status
  }
  static Del_File(_src) {
    let move_status=false;
    try{
      if(Help.Check_isFile(_src)){
        fs.unlinkSync(_src)
      }
      move_status=true;
    }
    catch(error){
      move_status=false
    }
    return move_status
  }
  static Del_File_Dir(_path) {
    let move_status=false;
    try{
      fse.removeSync(_path)
      move_status=true;
    }
    catch(error){
      move_status=false
    }
    return move_status
  }
  static Create_Dir(dirfile){
    try{
      if(!Help.Check_isdir(dirfile)){
        fs.mkdirSync(dirfile,{
          recursive:true,
          mode:"07777"
        });
      }
      else{
        return true
      }
    }
    catch(error){
      return `文件夹创建失败:${error.message}`
    }
    return true
  }
  static Get_Deep_Files(_path,ext,_deep=true){
    let find_files_tifs=[]
    let all_files=fs.readdirSync(_path);
    for(let i in all_files){
      let file_name_with_ext=all_files[i]
      let file_full_name=`${_path}/${file_name_with_ext}`
      let stats=fs.statSync(file_full_name);
      if(stats.isFile()){
        let true_ext=path.extname(file_name_with_ext);
        let be_push={
          file:file_full_name.replace(/\\/g, "/"),
          name:file_name_with_ext,
          namenoext:path.basename(file_name_with_ext,true_ext),
          ext:true_ext,
          size:stats.size,
        }
        if(ext){
          if(Help.Find_ext_in_array(true_ext,ext)){
            find_files_tifs.push(be_push) 
          }
        }
        else{
          find_files_tifs.push(be_push)
        }
        
      }else{
        if(_deep){
          let _find_files_tifs=Help.Get_Deep_Files(file_full_name,ext,_deep);
          find_files_tifs=[].concat(find_files_tifs,_find_files_tifs)
        }
      }
    }
    return find_files_tifs;
  }
  static Find_ext_in_array(extname,exts){
    let ext_index=exts?.findIndex((val)=>{
      return val.toUpperCase()==extname.toUpperCase()
    })
    if(ext_index>=0){
      return true
    }
    return false
  }
  static Get_Pamars(ctx) {
    let pamars={};
    Object.assign(pamars, ctx.req.body);
    Object.assign(pamars, ctx.request.body);
    Object.assign(pamars, ctx.request.query);
    Object.assign(pamars, ctx.params);
    return pamars;
  }
  static Resolve(ctx,status=true, result=null,message='') {
    if(ctx.res){
      ctx.response.status = 200;
      ctx.body ={
        status: status,
        result: result,
        message:message
      }
    }
    else{
      return {
        status: ctx,
        result: status,
        message:result
      }
    }
  }
  static Resolve_Back(ctx,Resolve_data){
    ctx.response.status = 200;
    ctx.set("Content-Type", "application/json")
    ctx.body ={
      status: Resolve_data.status,
      result: Resolve_data.result,
      message:Resolve_data.message
    }
  }
  static Get_TimeNow(format=false){
    let date=new Date()
    if(format){
      return date.format("yyyy-MM-dd_hh_mm_ss");
    }
    else{
      return date.getTime();
    }
  }
  static Console_log(log){
    if(global["json_last_msg"]!=log){
      console.log(`=>${new Date().format("yyyy-MM-dd hh:mm:ss")}  ${log}`)
      global["json_last_msg"]=log
    }
  }
}
module.exports = Help