package com.ruoyi.gbase.util;

import com.ruoyi.common.utils.DateUtils;
import com.ruoyi.gbase.domain.BackTable;
import com.ruoyi.gbase.domain.DbColumns;
import com.ruoyi.gbase.service.IDbColumnsService;
import com.ruoyi.gbase.service.IGbaseJobInfoService;
import com.ruoyi.gbase.service.IGbaseUtilService;
import com.ruoyi.gbase.service.ITableInfoService;
import com.ruoyi.gbase.domain.GbaseJobInfo;
import com.ruoyi.gbase.domain.TableInfo;
import com.ruoyi.nsh.service.*;
import com.ruoyi.nsh.util.DateUtil;
import com.ruoyi.web.controller.common.CommonController;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.net.InetAddress;
import java.net.UnknownHostException;
import java.sql.ResultSet;
import java.util.*;

/**
 * GBASE入库类
 * 
 * @author ruoyi
 */
@Component("GbaseLoadUtil")
public class GbaseLoadUtil
{
    private static final Logger log = LoggerFactory.getLogger(CommonController.class);

    @Value("${backup.ip}")
    private  String ip;

    @Value("${backup.port}")
    private  int port;

    @Value("${backup.username}")
    private  String username;

    @Value("${backup.pwd}")
    private  String pwd;

    @Autowired
    private ITableInfoService tableInfoService;

    @Autowired
    private GbaseUtil gbaseUtil;


    @Value("${sms.phoneNo}")
    private String phoneNo;

    @Autowired
    private IGbaseUtilService gbaseUtilService;

    @Autowired
    private IDbColumnsService dbColumnsService;

    @Autowired
    private IGbaseJobInfoService gbaseJobInfoService;


    /**
     * 最新ods下发地址
     **/
    @Value("${odsurl.url}")
    private String odsurl;

    /**
     * 日常备份文件夹
     **/
    @Value("${odsurl.backupUrl}")
    private String backupUrl;

    /*
        查询是否已经下发，如果当天日期下发了就返回true
     */
    public boolean IsHasXF(String etl_date,String query_schemaName,String query_tableName){
        TableInfo tableInfo = new TableInfo();
        tableInfo.setEtlDate(etl_date);
        tableInfo.setSchemaName(query_schemaName.toUpperCase());
        tableInfo.setTableName(query_tableName.toUpperCase());
        List<TableInfo> tableInfoList = tableInfoService.selectTableInfoList(tableInfo);
        if(tableInfoList.size()>0){
            return true;
        }else{
            return false;
        }
    }

    public void loadTable(String etl_date,String query_schemaName,String query_tableName,String type)
    {
        TableInfo tableInfo = new TableInfo();
        tableInfo.setEtlDate(etl_date);
        tableInfo.setSendCondition(type);
        tableInfo.setJobRunStatus(4);

        if(null!=query_schemaName){
            tableInfo.setSchemaName(query_schemaName.toUpperCase());
        }
        if(null!=query_tableName) {
            tableInfo.setTableName(query_tableName.toUpperCase());
        }

        //20211126 添加只跑未变化表结构的
        //tableInfo.setTableChangeFlag("");
        List<TableInfo> tableInfoList = tableInfoService.selectTableInfo_notLoad_List(tableInfo);

        //20211123 添加，防止表结构变化的已经入库了需要重新入库的，如果表名存在且是增变量的  说明是手动执行的，需要重新跑一下
        if(null!=query_tableName&&type.equals("1")){
            tableInfoList = tableInfoService.selectTableInfoList(tableInfo);
        }

        try {
            InetAddress addr = InetAddress.getLocalHost();
            if("32.202.51.181-".equals(addr.getHostAddress())){
                log.info("当前为本机运行，不做入库操作！！！ IP:"+addr.getHostAddress());
                return ;
            }
        } catch (UnknownHostException e) {
            e.printStackTrace();
        }

        //log.info("当前"+typename+"的数量"+tableInfoList.size());
        if(type.equals("0")){
            //循环处理所有的全量入库
            for(TableInfo t1:tableInfoList){
                //先判断一下 没有变化才入库  直接入庫吧，到時候再該
                //if(!"yes".equals(t1.getTableChangeFlag())){
                t0(t1);
                //这里处理一下 表字段等信息的处理 比如主键  字段是否一致等   主要更新 IM.DIM_ALLTABLES
                updateALLTABLES(t1);
                //更新完再做一下 拉链表检测，如果有拉链表就进行拉链入库； 感觉前期还是得放在存储过程做完了再做拉链表
                //llUil.lastLl(t1.getSchemaName(),t1.getTableName());
                // }
            }
        }else if(type.equals("1")){
            //循环处理所有的全量入库
            for(TableInfo t1:tableInfoList){
                //if(!"yes".equals(t1.getTableChangeFlag())){
                t1(t1);
                //这里处理一下 表字段等信息的处理 比如主键  字段是否一致等   主要更新 IM.DIM_ALLTABLES
                updateALLTABLES(t1);
                //更新完再做一下 拉链表检测，如果有拉链表就进行拉链入库； 感觉前期还是得放在存储过程做完了再做拉链表
                //llUil.lastLl(t1.getSchemaName(),t1.getTableName());
                //}
            }
        }else if(type.equals("2")||type.equals("5")){
            //循环处理所有的全量入库
            for(TableInfo t1:tableInfoList){
                // if(!"yes".equals(t1.getTableChangeFlag())){
                t2(t1);
                //这里处理一下 表字段等信息的处理 比如主键  字段是否一致等   主要更新 IM.DIM_ALLTABLES
                updateALLTABLES(t1);
                //}
            }
        }
        //log.info(etl_date+"("+tableInfoList.size()+"条)"+typename+"处理结束");

    }


    /**
     * 自动_全量表入库 也就是类型为0的  这种非常简单
     * 但注意要 TABLE_CHANGE_FLAG !='yes'
     * 这里 schemaName  tableName和非必填，如果没有填写，就etl_date日期中所有 未变更表的全量
     * 如果填写了，就自动过滤
     */
    public void t0(TableInfo t1)
    {
            GbaseJobInfo gbaseJobInfo = new GbaseJobInfo();
            GbaseJobInfo gbaseJobInfoLast = new GbaseJobInfo();
            //开始入库时间
            gbaseJobInfo.setCreateTime(DateUtils.getNowDate());
            String etldt = t1.getEtlDate();
            String etldtQt = DateUtil.addDay(etldt, -1);
            String schema2 = t1.getSchemaName();
            String tablename2 = t1.getTableName();
            gbaseJobInfo.setSchemaName(schema2);
            gbaseJobInfo.setTableName(tablename2);
            gbaseJobInfo.setSendCondition(t1.getSendCondition());
            //先取到昨天的这张表的下发记录  用来监控昨天是否按正常的处理的
            gbaseJobInfo.setEtlDate(etldtQt);
            List<GbaseJobInfo> glist = gbaseJobInfoService.selectGbaseJobInfoList(gbaseJobInfo);
            if(glist.size()>0){
                gbaseJobInfoLast = glist.get(0);
            }
            try {
                String truncateSQL ="truncate table "+ schema2+"."+t1.getTableName()+" ; ";
                String sql ="load data infile " +
                    " '"+odsurl+"/"+etldt+"/"+schema2+"/_treated/P_103_"+tablename2+"_"+etldt+".del'  " +
                    "into table "+schema2+"."+tablename2+" character set gb18030 " +
                    "data_format 3 fields terminated by '\\x1D' AUTOFILL;  ";
                gbaseUtil.update(truncateSQL,null);
                int result = gbaseUtil.update(sql,null);
                log.info(schema2+"."+tablename2+":开始清空表，导入完成，一导入："+result+"条数据");
                //导入完毕插入到结果表
                gbaseJobInfo.setJobId(t1.getJobId());
                gbaseJobInfo.setJobName(t1.getJobName());
                gbaseJobInfo.setEndTime(DateUtils.getNowDate());
                gbaseJobInfo.setEtlDate(etldt);
                gbaseJobInfo.setNum(result);
                gbaseJobInfo.setTableChangeFlag(t1.getTableChangeFlag());
                gbaseJobInfo.setSecnum(DateUtils.getDatePoor_SEC(gbaseJobInfo.getEndTime(),gbaseJobInfo.getCreateTime()));
                gbaseJobInfo.setRemark("自动_全量表入库：导入"+result+"条数据"+
                        DateUtils.getDatePoor(gbaseJobInfo.getEndTime(),gbaseJobInfo.getCreateTime()));

                //获取 gbase入库后表数量
                String gbasecount = "select count(*) num from " + schema2+"."+tablename2+ ";";
                ResultSet r = gbaseUtil.query(gbasecount);
                if (r.next()) {
                    gbaseJobInfo.setGbasecount(r.getInt(1));
                }

                //根据昨天的是否拉链表数据决定今天是否跑拉链表  全量表： 是否拉链表   拉链表名称  结束日期是否为前天
                if("1".equals(gbaseJobInfoLast.getIsll())&&gbaseJobInfoLast.getLlenddt().equals(etldtQt)){
                    String deleteSql ="delete from datawarehouse.lql_"+ schema2+"_"+t1.getTableName()+" where etl_dt='"+etldt+"' ; ";
                    String loadsql ="load data infile " +
                            " '"+odsurl+"/"+etldt+"/"+schema2+"/_treated/P_103_"+tablename2+"_"+etldt+".del'  " +
                            "into table datawarehouse.lql_"+schema2+"_"+tablename2+" character set gb18030 " +
                            "data_format 3 fields terminated by '\\x1D' AUTOFILL;  ";
                    gbaseUtil.update(deleteSql,null);
                    int loadnum = gbaseUtil.update(loadsql,null);
                    log.info("lql_"+schema2+"_"+tablename2+":拉链表导入完成，一导入："+loadnum+"条数据");
                    gbaseJobInfo.setIsll("1");
                    gbaseJobInfo.setLlstartdt(gbaseJobInfoLast.getLlstartdt());
                    gbaseJobInfo.setLlenddt(etldt);
                    gbaseJobInfo.setLlname("lql_"+schema2+"_"+tablename2);
                    //获取  inc表数量
                    String inccount = "select count(*) num from datawarehouse_etl.l_inc_" + schema2 + "_" + tablename2 + ";";
                    ResultSet ri = gbaseUtil.query(inccount);
                    if (ri.next()) {
                        gbaseJobInfo.setInccount(ri.getInt(1));
                    }
                    gbaseJobInfo.setIsinc(gbaseJobInfoLast.getIsinc());
                }else{
                    gbaseJobInfo.setIsll(gbaseJobInfoLast.getIsll());
                    gbaseJobInfo.setIsinc(gbaseJobInfoLast.getIsinc());
                }

                gbaseJobInfoService.deleteGbaseJobInfoById(t1.getJobId());
                gbaseJobInfoService.insertGbaseJobInfo(gbaseJobInfo);
                log.info(schema2+"."+tablename2+"自动_全量表入库：导入"+result+"条数据");

            } catch (Exception e) {
                log.info(schema2+"."+tablename2+"出现了异常，跳出重新处理"+e.toString());
                return ;
            }

    }

    /**
     * 根据传入的日期 将表全量数据跑起来，如果是增量的就只会传入每天增量的部分（这部分可能根据时点也能得出后续日期的当天数据）
     */
    public void qletldt(String schema,String tablename2,String etldt)
    {
        String schema2 = "etl_yin";
        String xfurl = odsurl;
       /*  if ("2021".equals(etldt.substring(0, 4))) {
            xfurl = odsurl2021;
        }else if ("2020".equals(etldt.substring(0, 4))) {
            xfurl = odsurl2020;
        } else if ("2019".equals(etldt.substring(0, 4))) {
            xfurl = odsurl2019;
        }else if ("2022".equals(etldt.substring(0, 4))&&8>=Integer.valueOf(etldt.substring(4,6)) ) {
            xfurl = odsurl2022_1;
        } */
        int con = 2;
        String type = "_ql";
        // 判断表类型，如果是增量，就不需要处理，如果是全量表  ql 还是增变量  zbl
        GbaseJobInfo gbaseJobInfo = new GbaseJobInfo();
        //开始入库时间
        gbaseJobInfo.setSchemaName(schema);
        gbaseJobInfo.setEtlDate(etldt);
        gbaseJobInfo.setTableName(tablename2);
        List<GbaseJobInfo> glist = gbaseJobInfoService.selectGbaseJobInfoList(gbaseJobInfo);
        if(glist.size()>0){
            GbaseJobInfo gbaseJobInfo1 = glist.get(0);
            con = Integer.valueOf( gbaseJobInfo1.getSendCondition());
            if(con==2||con==5){
                log.info(schema2+"."+tablename2+etldt+"：增量");
                return;
            }if(con==1){
                log.info(schema2+"."+tablename2+etldt+"：增变量");
                type="_zbl";
            }
        }else{
            log.info(schema2+"."+tablename2+etldt+"：未下发入库  跳出");
            return;
        }

        log.info("开始处理全量数据："+schema2+"."+tablename2+" 日期："+etldt);
        try {

            String sql_create = "create table if not exists "+schema2+"."+tablename2+type+" like "+schema+"."+tablename2+" ;";
            gbaseUtil.update(sql_create,null);

                    //获取 gbase入库后表数量
            String gbasecount = "select COUNT(*) NUM from information_schema.COLUMNS " +
                    " where lower(TABLE_NAME) =lower('"+tablename2+type+"') " +
                    " and lower(table_schema) =lower('"+ schema2+"') " +
                    " and lower(column_name) =lower('ql_dt') ;";
            ResultSet rg = gbaseUtil.query(gbasecount);
            if (rg.next()) {
                if(rg.getInt(1)==0){
                    log.info("没有ql_dt字段，需要添加："+schema2+"."+tablename2+type);
                    String sql_alter = "alter table  "+schema2+"."+tablename2+type+" add column ql_dt varchar(8) ;";
                    gbaseUtil.update(sql_alter,null);
                };
            }

            String deletesql = "delete from  "+schema2+"."+tablename2+type+" where ql_dt ='"+etldt+"';";
            int deletenum = gbaseUtil.update(deletesql,null);
            log.info(schema2+"."+tablename2+etldt+"：删除"+deletenum+"条数据");

            String sql ="load data infile " +
                    " '"+xfurl+"/"+etldt+"/"+schema+"/_treated/P_103_"+tablename2+"_"+etldt+".del'  " +
                    "into table "+schema2+"."+tablename2+type+" character set gb18030 " +
                    "data_format 3 fields terminated by '\\x1D' AUTOFILL;  ";
            //如果是20220831 就改为根目录，那天数据丢失，找了几个回来
            if("20220831".equals(etldt)){
                sql ="load data infile " +
                        " '"+xfurl+"/"+etldt+"/P_103_"+tablename2+"_"+etldt+".del'  " +
                        "into table "+schema2+"."+tablename2+type+" character set gb18030 " +
                        "data_format 3 fields terminated by '\\x1D' AUTOFILL;  ";
            }
            int loadnum = gbaseUtil.update(sql,null);
            log.info(schema2+"."+tablename2+etldt+"：导入"+loadnum+"条数据");

            String updatenull = "update  "+schema2+"."+tablename2+type+" set ql_dt ='"+etldt+"' where ql_dt is null ;";
            int result = gbaseUtil.update(updatenull,null);

            log.info(schema2+"."+tablename2+etldt+"：导入并更新"+result+"条数据");

        } catch (Exception e) {
            log.info(schema2+"."+tablename2+"出现了异常，跳出重新处理"+e.toString());
            return ;
        }

    }

    /**
     * 根据备份的表名信息 还原表
     */
    public void reBack(String sname,String date){
        //im.rp_jxtb_month
        String schema2 = sname.split("\\.")[0];
        String tablename2 = sname.split("\\.")[1];
        log.info("开始处理:"+schema2+tablename2+date);
        try {
            String sql_c = "create table if not exists "+schema2+"."+tablename2+date+"_auto like "+schema2+"."+tablename2+" ;";
            log.info("sql_create:"+sql_c);
            gbaseUtil.update(sql_c,null);

            log.info("创建表:"+sname+date+"_auto");
            String sql_delete = "truncate table "+schema2+"."+tablename2+date+"_auto ;";
            gbaseUtil.update(sql_delete,null);
            log.info("清空表:"+sname+date+"_auto");

            String sql ="load data infile " +
                    " '"+backupUrl+"/"+date+"/"+sname+".txt'  " +
                    "into table "+schema2+"."+tablename2+date+"_auto character set gb18030 " +
                    "data_format 3 fields terminated by '\\x1D' AUTOFILL;  ";
            int loadnum = gbaseUtil.update(sql,null);
            log.info(schema2+"."+tablename2+date+"：导入"+loadnum+"条数据");

        } catch (Exception e) {
            log.info(schema2+"."+tablename2+"出现了异常，跳出重新处理"+e.toString());
            return ;
        }
    }

    /**
     * 删除并新建表
     */
    public void dropCreatetable(String schema2,String tablename2)
    {
        log.info("开始删除新建全量数据："+schema2+"."+tablename2+" 日期：");
        try {
            String sql_drop = "drop table if exists "+schema2+"."+tablename2+"_ql ;";
            gbaseUtil.update(sql_drop,null);
            log.info(schema2+"."+tablename2+"：表直接删除");

            String sql_create = "create table if not exists "+schema2+"."+tablename2+"_ql like "+schema2+"."+tablename2+" ;";
            gbaseUtil.update(sql_create,null);

            String sql_alter = "alter table  "+schema2+"."+tablename2+"_ql add column etldt varchar(8) ;";
            gbaseUtil.update(sql_alter,null);

            log.info(schema2+"."+tablename2+"：删除新建完成");

        } catch (Exception e) {
            log.info(schema2+"."+tablename2+"出现了异常，跳出重新处理"+e.toString());
            return ;
        }

    }

    /**
     * 自动_增量+大文本增量表入库 也就是类型为1 的  这种最复杂
     * 但注意要 TABLE_CHANGE_FLAG !='yes'
     * 这里 schemaName  tableName和非必填，如果没有填写，就etl_date日期中所有 未变更表的全量
     * 如果填写了，就自动过滤
     * create database if not exists  temporarydb;
     * create table if not exists temporarydb.${tbname} like ${dbname}.${tbname};
     * load data infile 'ftp://gbase:gbase@172.16.3.107/$path/$filename'   into table   temporarydb.${tbname} data_format 3 fields terminated by '\x1D';
     * delete from ${dbname}.${tbname} where ($zengbian) in (select $zengbian from temporarydb.${tbname});
     * insert into ${dbname}.${tbname} select * from temporarydb.${tbname};
     * drop table temporarydb.${tbname};
     */
    public void t1(TableInfo t1){
        log.info("BEGIN...当前处理表："+t1.getSchemaName()+"_"+t1.getTableName()  );

        GbaseJobInfo gbaseJobInfo = new GbaseJobInfo();
        GbaseJobInfo gbaseJobInfoLast = new GbaseJobInfo();
            //开始入库时间
            String etldt = t1.getEtlDate();
            String etldtQt = DateUtil.addDay(etldt, -1);
            String schema2 = t1.getSchemaName();
            String tablename2 = t1.getTableName();
            gbaseJobInfo.setSchemaName(schema2);
            gbaseJobInfo.setTableName(tablename2);
            gbaseJobInfo.setSendCondition(t1.getSendCondition());
            //先取到昨天的这张表的下发记录  用来监控昨天是否按正常的处理的
            gbaseJobInfo.setEtlDate(etldtQt);
            List<GbaseJobInfo> glist = gbaseJobInfoService.selectGbaseJobInfoList(gbaseJobInfo);
            if(glist.size()>0){
                gbaseJobInfoLast = glist.get(0);
            }
            //按主键清空，首先得根据路径去取到主键 和非主键的字段
            DbColumns dbColumns_query = new DbColumns();
            dbColumns_query.setTabschema(schema2);
            dbColumns_query.setTabname(tablename2);
            List<DbColumns> dbColumnsList_zj = dbColumnsService.selectzjList(dbColumns_query);
            List<DbColumns> dbColumnsList_zd = dbColumnsService.selectzdList(dbColumns_query);

            //根据主键和 字段拼接 验证过了 1增变量的表一定存在主键
            String zjs=",",zds = ",", zds_a = ",", zjs_and = " and", zjs_b = " ";
            for (DbColumns d1 : dbColumnsList_zd) {
                zds = zds + ",\"" + d1.getColname() + "\"";
                zds_a = zds_a + ",a.\"" + d1.getColname() + "\"";
            }
            for (DbColumns d2 : dbColumnsList_zj) {
                zjs = zjs + ","+d2.getColname();
                zjs_and = zjs_and + " and a.\"" + d2.getColname() + "\"=b.\"" + d2.getColname() + "\"";
                zjs_b = " b.\"" + d2.getColname() + "\" ";
            }
            log.info("zds：" + zds);
            zds = zds.replace(",,", "");
            zjs = zjs.replace(",,","");
            //zds_a 是 a.x,a.y格式
            zds_a = zds_a.replace(",,", "");
            zds_a = zds_a.replace(",,", "");
            zjs_and = zjs_and.replace("and and", "");
            log.info("当前处理表："+schema2+"_"+tablename2 +"主键是："+zjs +"  zds：" + zds+"  zds_a：" + zds_a);

             gbaseJobInfo.setCreateTime(DateUtils.getNowDate());
            //采用 delete 方式试试看
            String sql_create = "create table if not exists temporarydb."+schema2+"_"+tablename2+" like "+schema2+"."+tablename2;
            String sql_load ="load data infile " +
                    " '"+odsurl+"/"+etldt+"/"+schema2+"/_treated/P_103_"+tablename2+"_"+etldt+".del'  " +
                    "into table temporarydb."+schema2+"_"+tablename2+" character set gb18030 " +
                    "data_format 3 fields terminated by '\\x1D' AUTOFILL;  ";
            String sql_delete = "delete from "+schema2+"."+tablename2+" where ("+zjs+") in (select "+zjs+" from temporarydb."+schema2+"_"+tablename2+");";
            String sql_insert = "insert into "+schema2+"."+tablename2+" select * from temporarydb."+schema2+"_"+tablename2;

            String sql_drop = "drop table temporarydb."+schema2+"_"+tablename2;

            try {
                log.info("开始创建临时表 "+schema2+"."+tablename2+"：temporarydb."+schema2+"_"+tablename2 +" SQL:"+sql_create);
                gbaseUtil.update(sql_create,null);
                //gbaseUtil.closeConnection();
                log.info("temporarydb."+schema2+"_"+tablename2+":开始加载数据到临时表" +" SQL:"+sql_load);
                int resultload = gbaseUtil.update(sql_load,null);
                log.info("temporarydb."+schema2+"_"+":导入完成，一导入："+resultload+"条数据");
                //gbaseUtil.closeConnection();

                int resultdelete1 = gbaseUtil.update(sql_delete,null);
                log.info("删除有变动的 "+schema2+"."+tablename2+"："+resultdelete1 +" SQL:"+sql_delete);
                int resultinsert1 = gbaseUtil.update(sql_insert,null);
                log.info("插入有变动的 "+schema2+"."+tablename2+"："+resultinsert1 +" SQL:"+sql_insert);
                //gbaseUtil.closeConnection();

                gbaseUtil.update(sql_drop,null);
                log.info("开始删除临时表：temporarydb."+schema2+"_"+tablename2);
                //gbaseUtil.closeConnection();

                //导入完毕插入到结果表
                gbaseJobInfo.setJobId(t1.getJobId());
                gbaseJobInfo.setJobName(t1.getJobName());
                gbaseJobInfo.setEndTime(DateUtils.getNowDate());
                gbaseJobInfo.setEtlDate(etldt);
                gbaseJobInfo.setNum(resultload);
                gbaseJobInfo.setTableChangeFlag(t1.getTableChangeFlag());
                gbaseJobInfo.setSecnum(DateUtils.getDatePoor_SEC(gbaseJobInfo.getEndTime(),gbaseJobInfo.getCreateTime()));
                gbaseJobInfo.setRemark("自动_增变量入库：合计处理了 "+resultinsert1+"条耗时："+
                        DateUtils.getDatePoor(gbaseJobInfo.getEndTime(),gbaseJobInfo.getCreateTime()));

                //获取 gbase入库后表数量
                String gbasecount = "select count(*) num from " + schema2+"."+tablename2+ ";";
                ResultSet rg = gbaseUtil.query(gbasecount);
                if (rg.next()) {
                    gbaseJobInfo.setGbasecount(rg.getInt(1));
                }
                /* 
                //根据昨天的是否inc表数据决定今天是否跑inc表  全量表： 是否inc表   inc名称  结束日期是否为前天
                //暂时这里面关闭自动拉链的功能  太慢了
                if(1==2&&"1".equals(gbaseJobInfoLast.getIsinc())&&etldtQt.equals(gbaseJobInfoLast.getIncenddt())){
                    String deleteSql ="delete from datawarehouse_etl.l_inc_"+ schema2+"_"+t1.getTableName()+" where etl_dt='"+etldt+"' ; ";
                    String loadsql ="load data infile " +
                            " '"+odsurl+"/"+etldt+"/"+schema2+"/_treated/P_103_"+tablename2+"_"+etldt+".del'  " +
                            "into table datawarehouse_etl.l_inc_"+schema2+"_"+tablename2+" character set gb18030 " +
                            "data_format 3 fields terminated by '\\x1D' AUTOFILL;  ";
                    gbaseUtil.update(deleteSql,null);
                    int loadnum = gbaseUtil.update(loadsql,null);
                    log.info("l_inc_"+schema2+"_"+tablename2+":inc表导入完成，一导入："+loadnum+"条数据");
                    String lastdate = gbaseJobInfoLast.getEtlDate();
                    gbaseJobInfo.setIsinc("1");
                    gbaseJobInfo.setIncstartdt(gbaseJobInfoLast.getIncstartdt());
                    gbaseJobInfo.setIncenddt(etldt);
                    gbaseJobInfo.setIncname("l_inc_"+schema2+"_"+tablename2);

                    //获取  inc表数量
                    String inccount = "select count(*) num from datawarehouse_etl.l_inc_" + schema2 + "_" + tablename2 + ";";
                    ResultSet ri = gbaseUtil.query(inccount);
                    if (ri.next()) {
                        gbaseJobInfo.setInccount(ri.getInt(1));
                    }
                    //根据昨天的是否拉链表数据决定今天是否跑拉链表  全量表： 是否拉链表   拉链表名称  结束日期是否为前天
                    if("1".equals(gbaseJobInfoLast.getIsll())&&gbaseJobInfoLast.getLlenddt().equals(etldtQt)){
                        String schemaName = schema2.toUpperCase();
                        String tableName = tablename2.toUpperCase();

                        //2、用拉链表与增量表关联 得到新的拉链表 先插入到临时表
                        String sqlfor2_1 = "drop table if exists  datawarehouse_etl.L_TMP_" + schemaName + "_" + tableName + ";";
                        String sqlfor2_2 = "create table datawarehouse_etl.L_TMP_" + schemaName + "_" + tableName +
                                " select " + zds_a +
                                " ,a.start_dt,case when " + zjs_b + " is not null and a.end_dt>'" + etldt + "' then '" + lastdate + "' else a.end_dt end as end_dt " +
                                " from datawarehouse.L_" + schemaName + "_" + tableName + " a " +
                                " left outer join  datawarehouse_etl.L_INC_" + schemaName + "_" + tableName + " b on " + zjs_and +
                                " and b.etl_dt= '" +etldt+"'"+
                                " union all " +
                                " select  " + zds_a +
                                " ,etl_dt as start_dt,'99991231' as end_dt " +
                                " from datawarehouse_etl.L_INC_" + schemaName + "_" + tableName + " a " +
                                " where etl_dt='"+etldt+"';";
                        log.info("执行 获得最新拉链表数据到临时表" + etldt + "  " + schemaName + "." + tableName + " SQL：" + sqlfor2_1 + "\n" + sqlfor2_2);
                        gbaseUtil.update(sqlfor2_1, null);
                        gbaseUtil.update(sqlfor2_2, null);
                        //3、将临时表覆盖到拉链表结束
                        String sqlfor3_1 = "drop table if exists  datawarehouse_etl.L_Back_" + schemaName + "_" + tableName + ";";
                        String sqlfor3_2 = "create table datawarehouse_etl.L_Back_" + schemaName + "_" + tableName + " select * from datawarehouse.L_" + schemaName + "_" + tableName + ";";
                        String sqlfor3_3 = "truncate table datawarehouse.L_" + schemaName + "_" + tableName + ";";
                        String sqlfor3_4 = "insert into datawarehouse.L_" + schemaName + "_" + tableName + " select * from datawarehouse_etl.L_TMP_" + schemaName + "_" + tableName + ";";
                        log.info("执行 备份到back表" + etldt + "  " + schemaName + "." + tableName + " SQL：" + sqlfor3_1 + "\n" + sqlfor3_2);
                        gbaseUtil.update(sqlfor3_1, null);
                        gbaseUtil.update(sqlfor3_2, null);
                        log.info("执行 最终结果插入到 拉链表" + etldt + "  " + schemaName + "." + tableName + " SQL：" + sqlfor3_1 + "\n" + sqlfor3_2);
                        gbaseUtil.update(sqlfor3_3, null);
                        gbaseUtil.update(sqlfor3_4, null);
                        log.info("执行 拉链表 处理结束" + etldt + "  " + schemaName + "." + tableName + " SQL：" + sqlfor3_1 + "\n" + sqlfor3_2);
                        gbaseJobInfo.setIsll("1");
                        gbaseJobInfo.setLlstartdt(gbaseJobInfoLast.getLlstartdt());
                        gbaseJobInfo.setLlenddt(etldt);
                        gbaseJobInfo.setLlname("l_"+schema2+"_"+tablename2);

                        //获取 表数量 拉链表数量  inc表数量
                        String llcount = "select count(*) num from datawarehouse.L_" + schemaName + "_" + tableName + ";";
                        ResultSet rl = gbaseUtil.query(llcount);
                        if (rl.next()) {
                            gbaseJobInfo.setLlcount(rl.getInt(1));
                        }

                    }else{
                        gbaseJobInfo.setIsll(gbaseJobInfoLast.getIsll());
                    }
                }else{
                    gbaseJobInfo.setIsinc(gbaseJobInfoLast.getIsinc());
                }
                 */

                gbaseJobInfoService.deleteGbaseJobInfoById(t1.getJobId());
                gbaseJobInfoService.insertGbaseJobInfo(gbaseJobInfo);
                log.info(schema2+"."+tablename2+"自动_增变量入库：合计处理了 "+resultinsert1+"条,耗时："+
                        DateUtils.getDatePoor(gbaseJobInfo.getEndTime(),gbaseJobInfo.getCreateTime()));

            } catch (Exception e) {
                log.info(schema2+"."+tablename2+"出现了异常，跳出重新处理"+e.toString());
                return ;
            }

        log.info("END...当前处理表："+t1.getSchemaName()+"_"+t1.getTableName()  );

        }

    /**
     * 自动_增量+大文本增量表入库 也就是类型为5 的  这种非常简单
     * 但注意要 TABLE_CHANGE_FLAG !='yes'
     * 这里 schemaName  tableName和非必填，如果没有填写，就etl_date日期中所有 未变更表的全量
     * 如果填写了，就自动过滤
     */
    public void t2(TableInfo t1)
    {
            GbaseJobInfo gbaseJobInfo = new GbaseJobInfo();
            //开始入库时间
            gbaseJobInfo.setCreateTime(DateUtils.getNowDate());
            String etldt = t1.getEtlDate();
            String schema2 = t1.getSchemaName();
            String tablename2 = t1.getTableName();
            gbaseJobInfo.setSchemaName(schema2);
            gbaseJobInfo.setTableName(tablename2);
            gbaseJobInfo.setSendCondition(t1.getSendCondition());
            //先取到昨天的这张表的下发记录  用来监控昨天是否按正常的处理的
            gbaseJobInfo.setEtlDate(DateUtil.addDay(DateUtils.getdateYYYYMMDD(), -1));
            List<GbaseJobInfo> glist = gbaseJobInfoService.selectGbaseJobInfoList(gbaseJobInfo);

            String truncateSQL ="delete from "+ schema2+"."+t1.getTableName()+" where ETL_DT = '"+etldt+"' ; ";
            String sql ="load data infile " +
                    " '"+odsurl+"/"+etldt+"/"+schema2+"/_treated/P_103_"+tablename2+"_"+etldt+".del'  " +
                    "into table "+schema2+"."+tablename2+" character set gb18030 " +
                    "data_format 3 fields terminated by '\\x1D' AUTOFILL;  ";
            try {
                log.info("开始删除表 "+schema2+"."+tablename2+"当天的数据");
                int resultdelete = gbaseUtil.update(truncateSQL,null);
                //gbaseUtil.closeConnection();
                log.info(schema2+"."+tablename2+":删除成功，开始导入"+resultdelete+"条数据");
                int result = gbaseUtil.update(sql,null);
                log.info(schema2+"."+tablename2+":导入完成，一导入："+result+"条数据");
                //gbaseUtil.closeConnection();
                //导入完毕插入到结果表
                gbaseJobInfo.setJobId(t1.getJobId());
                gbaseJobInfo.setJobName(t1.getJobName());
                gbaseJobInfo.setEndTime(DateUtils.getNowDate());
                gbaseJobInfo.setEtlDate(etldt);
                gbaseJobInfo.setNum(result);
                gbaseJobInfo.setTableChangeFlag(t1.getTableChangeFlag());
                gbaseJobInfo.setSecnum(DateUtils.getDatePoor_SEC(gbaseJobInfo.getEndTime(),gbaseJobInfo.getCreateTime()));
                gbaseJobInfo.setRemark("自动_增量表入库：删除 "+resultdelete+"条,导入 "+result+"条,耗时："+
                        DateUtils.getDatePoor(gbaseJobInfo.getEndTime(),gbaseJobInfo.getCreateTime()));

                gbaseJobInfo.setIsinc("0");
                gbaseJobInfo.setIsll("0");
                //获取 gbase入库后表数量
                String gbasecount = "select count(*) num from " + schema2+"."+tablename2+ ";";
                ResultSet r = gbaseUtil.query(gbasecount);
                if (r.next()) {
                    gbaseJobInfo.setGbasecount(r.getInt(1));
                }
                gbaseJobInfoService.deleteGbaseJobInfoById(t1.getJobId());
                gbaseJobInfoService.insertGbaseJobInfo(gbaseJobInfo);
                log.info(schema2+"."+tablename2+"自动_增量表入库：删除 "+resultdelete+"条,导入 "+result+"条,耗时："+
                        DateUtils.getDatePoor(gbaseJobInfo.getEndTime(),gbaseJobInfo.getCreateTime()));

            } catch (Exception e) {
                log.info(schema2+"."+tablename2+"出现了异常，跳出重新处理"+e.toString());
                return ;
            }

    }

    //查表中主键的数据是否重复
    public boolean zjcf(TableInfo t1){
        boolean iscf = false;
        log.info("BEGIN检查主键是否重复...当前处理表："+t1.getSchemaName()+"_"+t1.getTableName()  );

        GbaseJobInfo gbaseJobInfo = new GbaseJobInfo();
        //开始入库时间
        gbaseJobInfo.setCreateTime(DateUtils.getNowDate());
        String etldt = t1.getEtlDate();
        String schema2 = t1.getSchemaName();
        String tablename2 = t1.getTableName();
        //按主键清空，首先得根据路径去取到主键 和非主键的字段
        DbColumns dbColumns_query = new DbColumns();
        dbColumns_query.setTabschema(schema2);
        dbColumns_query.setTabname(tablename2);
        List<DbColumns> dbColumnsList_zj = dbColumnsService.selectzjList(dbColumns_query);
        //根据主键和 字段拼接 验证过了 1增变量的表一定存在主键
        String zjs=",";

        for(DbColumns d1:dbColumnsList_zj){
            zjs = zjs + ","+d1.getColname();
        }

        zjs = zjs.replace(",,","");

        log.info("当前处理表："+schema2+"_"+tablename2 +"主键是："+zjs );

        //采用 delete 方式试试看
        String sql_select = "select "+zjs+" from "+schema2+"."+tablename2+" group by "+zjs+"  having count(*)>1;";

        log.info("当前SQL："+sql_select );
        try {
            log.info("开始查询当前表是否存在主键重复");
            ResultSet rs =  gbaseUtil.query(sql_select);
            log.info("开始循环");
            while(rs.next())
            {
                 iscf = true;
                log.info("发现啦："+schema2+"."+tablename2);
                 break;
            }
        } catch (Exception e) {
            log.info(schema2+"."+tablename2+"出现了异常，跳出重新处理"+e.toString());
            return false;
        }

        return iscf;

    }


    /**
     * 表字段等信息的处理 比如主键  字段是否一致等   主要更新 IM.DIM_ALLTABLES
     * 处理db2 count的这里不做处理，每张表入库完就执行一下这个操作
     */
    public void updateALLTABLES(TableInfo t){
        String schema2 = t.getSchemaName();
        String tablename2 = t.getTableName();
        int zjcount = 0;
        log.info("开始处理表："+schema2+"."+tablename2 );
        //按主键清空，首先得根据路径去取到主键 和非主键的字段
        DbColumns dbColumns_query = new DbColumns();
        dbColumns_query.setTabschema(schema2);
        dbColumns_query.setTabname(tablename2);
        List<DbColumns> dbColumnsList_zj = dbColumnsService.selectzjList(dbColumns_query);
        //List<DbColumns> dbColumnsList_zd = dbColumnsService.selectzdList(dbColumns_query);
        //根据主键和 字段拼接 验证过了 1增变量的表一定存在主键
        String zjs=",";

        for(DbColumns d1:dbColumnsList_zj){
            zjs = zjs + ","+d1.getColname();
            zjcount ++;
            //zjs_and = zjs_and + " AND A."+d1.getColname()+"=B." + d1.getColname();
        }

        zjs = zjs.replace(",,","");
        //这里再想办法判断一下  两边的字段是否一致
        //先取到 DB2 表的字段信息
        List<DbColumns> dbColumnsList = dbColumnsService.selectzdList(dbColumns_query);
        //再取出GBASE中的字段
        List<DbColumns> gbaseColumnsList = gbaseUtilService.selectGbaseColumnInfoList(dbColumns_query);
        int zdunsame = 1;
        //循环判断2个
        if(dbColumnsList.size()>0){
            //DB2的表字段  因为以DB2下发的为准
            outterLoop: for(DbColumns d:dbColumnsList){
                int isFind = 0;
                for(DbColumns d2:gbaseColumnsList){
                    if(d2.getColname().toUpperCase().equals(d.getColname().toUpperCase())){
                        isFind = 1;
                    }
                }
                //如果 int
                if(d.getZdlx().toUpperCase().contains("BIGINT")||d.getZdlx().toUpperCase().contains("INTEGER")){
                    for(DbColumns d2:gbaseColumnsList){
                        //如果遇到 字段一样 但类型不一样的情况  直接就更新 然后跳出
                        if(d2.getColname().toUpperCase().equals(d.getColname().toUpperCase())&&!d2.getZdlx().toUpperCase().equals(d.getZdlx().toUpperCase())&&!d2.getZdlx().toUpperCase().contains("BIGINT")&&!d2.getZdlx().toUpperCase().contains("INT")){
                            zdunsame=0;
                            break outterLoop;
                        }
                    }
                } else if (d.getZdlx().toUpperCase().contains("CHARACTER")) {
                    for (DbColumns d2 : gbaseColumnsList) {
                        //如果遇到 字段一样 但类型不一样的情况  直接就更新 然后跳出
                        if (d2.getColname().toUpperCase().equals(d.getColname().toUpperCase()) && !d2.getZdlx().toUpperCase().equals(d.getZdlx().toUpperCase()) && !d2.getZdlx().toUpperCase().contains("CHAR")) {
                            zdunsame = 0;
                            break outterLoop;
                        }
                    }
                }else if(d.getZdlx().toUpperCase().contains("BLOB")) {
                    for (DbColumns d2 : gbaseColumnsList) {
                        //如果遇到 字段一样 但类型不一样的情况  直接就更新 然后跳出
                        if (d2.getColname().toUpperCase().equals(d.getColname().toUpperCase()) && !d2.getZdlx().toUpperCase().equals(d.getZdlx().toUpperCase()) && !d2.getZdlx().toUpperCase().contains("BLOB")) {
                            zdunsame = 0;
                            break outterLoop;
                        }
                    }
                }else{
                    for(DbColumns d2:gbaseColumnsList){
                        //如果遇到 字段一样 但类型不一样的情况  直接就更新 然后跳出
                        if(d2.getColname().toUpperCase().equals(d.getColname().toUpperCase())&&!d2.getZdlx().toUpperCase().equals(d.getZdlx().toUpperCase())){
                            log.info("当前处理表："+schema2+"."+tablename2 +" 字段不一致啦 是："+d2.getColname().toUpperCase() +" gbase类型是："+d2.getZdlx().toUpperCase() +"  DB2类型："+d.getZdlx().toUpperCase());
                            zdunsame=0;
                            break outterLoop;
                        }
                    }
                }
                if(isFind == 0){
                    zdunsame=0;
                }
            }
        }

        //查询到DB2表的数据量
        log.info("当前处理表："+schema2+"."+tablename2 +"主键是："+zjs +"主键数量是："+zjcount +" 字段是否完全一致："+zdunsame);
        //更新 update IM.DIM_ALLTABLES set 主键数=,主键='' where TABLE_SCHEMA='' and TABLE_NAME='' ;
        String updateSQL = "update IM.DIM_ALLTABLES set zdunsame = "+zdunsame+",etl_dt='"+DateUtils.getdateYYYYMMDD()+"',sjcount=(SELECT COUNT(*) num FROM "+schema2+"."+tablename2+"),lastimporttime="+t.getCompletedTime()+",rkfs="+t.getSendCondition()+", zjcount="+zjcount+",zjs='"+zjs+"' where UPPER(TABLE_SCHEMA)='"+schema2+"' and UPPER(TABLE_NAME)='"+tablename2+"' ";
        try {
            gbaseUtil.connectGBase();
            gbaseUtil.update(updateSQL,null);
        } catch (Exception e) {
            log.info(schema2+"."+tablename2+"出现了异常，跳出重新处理"+e.toString());
            return ;
        }
    }


}
