package com.ruoyi.gbase.util;

import com.ruoyi.common.utils.DateUtils;
import com.ruoyi.gbase.domain.GbaseJobInfo;
import com.ruoyi.gbase.domain.TableInfo;
import com.ruoyi.gbase.service.IGbaseJobInfoService;
import com.ruoyi.gbase.service.ITableInfoService;
import com.ruoyi.gbase.domain.DbColumns;
import com.ruoyi.gbase.service.IDbColumnsService;
import com.ruoyi.nsh.util.DateUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;

import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;

/**
 * 拉链表执行的工具类
 *
 * @author ys.yin
 * @since 2021-11-16
 *
 */
@Component
public class LlUtil {
	private static final Logger log = LoggerFactory.getLogger(LlUtil.class);

	@Autowired
	private GbaseUtil gbaseUtil;

	@Autowired
	private IDbColumnsService dbColumnsService;

	@Autowired
	private ITableInfoService tableInfoService;

	@Autowired
	private IGbaseJobInfoService gbaseJobInfoService;


	/**
	 * 最新ods下发地址
	 **/
	@Value("${odsurl.url}")
	private String odsurl;


	/**
	 * 增量执行拉链跑批  这种是为了防止一段时间自动的没成功，补起来跑
	 * 会自动判断跑到哪天了
	 *		* 自动_增量+大文本增量表入库 也就是类型为1 的  这种最复杂
	 *      * 自动_全量表入库 也就是类型为0的  这种非常简单
	 * @param schemaName
	 * @param tableName
	 */
	@Async
	public void lastLl(String schemaName,String tableName) {

		schemaName = schemaName.toUpperCase();
		tableName = tableName.toUpperCase();
		TableInfo tableInfo = new TableInfo();
		tableInfo.setEtlDate(DateUtil.addDay(DateUtils.getdateYYYYMMDD(), -2));
		tableInfo.setSchemaName(schemaName);
		tableInfo.setTableName(tableName);
		List<TableInfo> tableInfoList =  tableInfoService.selectTableInfoList(tableInfo);

		if (tableInfoList == null || tableInfoList.size()==0) {
			log.info("当前"+ schemaName + "." + tableName +" 没有找到最近入库情况，暂时不做拉链");
			return;
		}else if("2".equals(tableInfoList.get(0).getSendCondition())||"5".equals(tableInfoList.get(0).getSendCondition())){
			log.info("当前"+ schemaName + "." + tableName +" 为增量表，无需做拉链");
			return;
		}else if("0".equals(tableInfoList.get(0).getSendCondition())){
			log.info("当前"+ schemaName + "." + tableName +" 为全量表，直接做全部入库拉链 ");
			//判断一下 当前拉链表是否存在，如果不存在就return
			try {
				gbaseUtil.connectGBase();
				String sqlmax = "select max(ETL_DT) ETL_DT from datawarehouse.lql_" + schemaName + "_" + tableName + ";";
				log.info("执行查询" + schemaName + "." + tableName + " SQL：" + sqlmax);
				ResultSet r = gbaseUtil.query(sqlmax);
				String maxetl = "";
				if (r.next()) {
					maxetl = r.getString(1);
				}
				String lastdate = maxetl;
				if (lastdate == null || "".equals(lastdate)) {
					log.info("当前" + schemaName + "." + tableName + " 拉链表没有数据 ");
					return;
				}
				//取到截至日期 也就是昨日
				String enddt = DateUtil.addDay(DateUtils.getdateYYYYMMDD(), -1);
				log.info("拉链表全量跑数据：" + schemaName + "." + tableName);

				maxetl = DateUtil.addDay(maxetl, 1);
				log.info(schemaName + "." + tableName + " 最新拉链日期是：" + maxetl);
				// 循环开始处理 得到两个日期之间的所有日期 list里面已经从小到大排序了
				List<String> dates = DateUtils.getBetweenDates(maxetl.replace("-", ""), enddt);
				log.info("一共需要执行" + dates.size() + "天的数据，第一天是：" + maxetl + " 最后一天是：" + enddt);
				String xfurl = odsurl;
				for (String etldt : dates) {
					xfurl = odsurl;
				/* 	if ("2021".equals(etldt.substring(0, 4))) {
						xfurl = odsurl2021;
					}else if ("2020".equals(etldt.substring(0, 4))) {
						xfurl = odsurl2020;
					} else if ("2019".equals(etldt.substring(0, 4))) {
						xfurl = odsurl2019;
					} */
					//删除当天的数据然后再插入新的
					String sqlfor1_1 = "delete from datawarehouse.lql_" + schemaName + "_" + tableName + " where etl_dt='"+etldt+"' ;";
					String sqlfor1_2 = "load data infile '" + xfurl + "/" + etldt + "/" + schemaName + "/_treated/P_103_" + tableName + "_" + etldt + ".del' " +
							"into table datawarehouse.lql_" + schemaName + "_" + tableName + " character set gb18030  data_format 3 fields terminated by '\\x1D' AUTOFILL;";
					log.info("执行 导入" + etldt + "的全量数据  " + schemaName + "." + tableName + " SQL：" + sqlfor1_1 + "\n" + sqlfor1_2);
					gbaseUtil.update(sqlfor1_1, null);
					int updatenum = gbaseUtil.update(sqlfor1_2, null);
					log.info("执行" + etldt + "  " + schemaName + "." + tableName + " 插入全数据数量：" + updatenum+"   处理结束，开始下一天的");
				}
				log.info(schemaName + "." + tableName + "  全量拉链表全部执行完毕");
			} catch (Exception e) {
				log.info(schemaName + "." + tableName + "出现了异常，可能全量拉链表不存在");
				return;
			}
		}else if("1".equals(tableInfoList.get(0).getSendCondition())) {
			log.info("当前" + schemaName + "." + tableName + " 为增变量表，开始做拉链");

			//用的是初始化的方法 下面注释是每天跑的方法，现在设想是 一次性跑用这个，每天跑用下面的
			//其实就算每天跑的，也可以用到inc表去做
			initZBLll("20181231",schemaName,tableName);

			return;
	/*
			//判断一下 当前拉链表是否存在，如果不存在就return
			try {
				gbaseUtil.connectGBase();
				String sqlmax = "select max(ETL_DT) ETL_DT from datawarehouse.L_" + schemaName + "_" + tableName + ";";
				log.info("执行查询" + schemaName + "." + tableName + " SQL：" + sqlmax);
				ResultSet r = gbaseUtil.query(sqlmax);
				String maxetl = "";
				if (r.next()) {
					maxetl = r.getString(1);
				}
				String lastdate = maxetl;
				if (lastdate == null || "".equals(lastdate)) {
					log.info("当前" + schemaName + "." + tableName + " 拉链表没有数据 ");
					return;
				}
				//取到截至日期 也就是昨日
				String enddt = DateUtil.addDay(DateUtils.getdateYYYYMMDD(), -1);
				log.info("拉链表增量跑数据：" + schemaName + "." + tableName);
				DbColumns dbColumns_query = new DbColumns();
				dbColumns_query.setTabschema(schemaName);
				dbColumns_query.setTabname(tableName);
				List<DbColumns> dbColumnsList_zd = dbColumnsService.selectzdList(dbColumns_query);
				List<DbColumns> dbColumnsList_zj = dbColumnsService.selectzjList(dbColumns_query);
				String zds = ",", zds_a = ",", zjs_and = " and", zjs_b = " ";
				for (DbColumns d1 : dbColumnsList_zd) {
					zds = zds + ",\"" + d1.getColname() + "\"";
					zds_a = zds_a + ",a.\"" + d1.getColname() + "\"";
				}
				for (DbColumns d2 : dbColumnsList_zj) {
					zjs_and = zjs_and + " and a.\"" + d2.getColname() + "\"=b.\"" + d2.getColname() + "\"";
					zjs_b = " b.\"" + d2.getColname() + "\" ";
				}
				log.info("zds：" + zds);
				zds = zds.replace(",,", "");
				//zds_a 是 a.x,a.y格式
				zds_a = zds_a.replace(",,", "");
				zds_a = zds_a.replace(",,", "");
				zjs_and = zjs_and.replace("and and", "");
				log.info("zds：" + zds);
				log.info("zds_a：" + zds_a);


				maxetl = DateUtil.addDay(maxetl, 1);
				log.info(schemaName + "." + tableName + " 最新拉链日期是：" + maxetl);
				// 循环开始处理 得到两个日期之间的所有日期 list里面已经从小到大排序了
				List<String> dates = DateUtils.getBetweenDates(maxetl.replace("-", ""), enddt);
				log.info("一共需要执行" + dates.size() + "天的数据，第一天是：" + maxetl + " 最后一天是：" + enddt);
				String xfurl = odsurl;

				for (String etldt : dates) {
					xfurl = odsurl;
					if ("2021".equals(etldt.substring(0, 4))) {
						xfurl = odsurl2021;
					}else if ("2020".equals(etldt.substring(0, 4))) {
						xfurl = odsurl2020;
					} else if ("2019".equals(etldt.substring(0, 4))) {
						xfurl = odsurl2019;
					}
					//每天都开始处理
					//1、清空并导入日期数据
					String sqlfor1_1 = "delete from datawarehouse_etl.L_INC_" + schemaName + "_" + tableName + " where etl_dt='"+etldt+"' ; ";
					String sqlfor1_2 = "load data infile '" + xfurl + "/" + etldt + "/" + schemaName + "/_treated/P_103_" + tableName + "_" + etldt + ".del' " +
							"into table datawarehouse_etl.L_INC_" + schemaName + "_" + tableName + " character set gb18030  data_format 3 fields terminated by '\\x1D' AUTOFILL;";
					log.info("执行 导入" + etldt + "的增量数据  " + schemaName + "." + tableName + " SQL：" + sqlfor1_1 + "\n" + sqlfor1_2);
					gbaseUtil.update(sqlfor1_1, null);
					int updatenum = gbaseUtil.update(sqlfor1_2, null);
					if (updatenum == 0) {
						log.info(etldt + "  " + schemaName + "." + tableName + " 当日增量数据为0 当天直接跳出不处理");
						lastdate = etldt;
						continue;
					} else {
						log.info("执行" + etldt + "  " + schemaName + "." + tableName + " 插入增量数据数量：" + updatenum);
					}
					//2、用拉链表与增量表关联 得到新的拉链表 先插入到临时表
					String sqlfor2_1 = "drop table if exists  datawarehouse_etl.L_TMP_" + schemaName + "_" + tableName + ";";
					String sqlfor2_2 = "create table datawarehouse_etl.L_TMP_" + schemaName + "_" + tableName +
							" select " + zds_a +
							" ,a.start_dt,case when " + zjs_b + " is not null and a.end_dt>'" + etldt + "' then '" + lastdate + "' else a.end_dt end as end_dt " +
							" from datawarehouse.L_" + schemaName + "_" + tableName + " a " +
							" left outer join  datawarehouse_etl.L_INC_" + schemaName + "_" + tableName + " b on " + zjs_and +
							" and b.etl_dt= '" +etldt+"'"+
							" union all " +
							" select  " + zds_a +
							" ,etl_dt as start_dt,'99991231' as end_dt " +
							" from datawarehouse_etl.L_INC_" + schemaName + "_" + tableName + " a " +
							" where etl+dt='"+etldt+"';";
					log.info("执行 获得最新拉链表数据到临时表" + etldt + "  " + schemaName + "." + tableName + " SQL：" + sqlfor2_1 + "\n" + sqlfor2_2);
					gbaseUtil.update(sqlfor2_1, null);
					gbaseUtil.update(sqlfor2_2, null);
					//3、将临时表覆盖到拉链表结束
					String sqlfor3_1 = "drop table if exists  datawarehouse_etl.L_Back_" + schemaName + "_" + tableName + ";";
					String sqlfor3_2 = "create table datawarehouse_etl.L_Back_" + schemaName + "_" + tableName + " select * from datawarehouse.L_" + schemaName + "_" + tableName + ";";
					String sqlfor3_3 = "truncate table datawarehouse.L_" + schemaName + "_" + tableName + ";";
					String sqlfor3_4 = "insert into datawarehouse.L_" + schemaName + "_" + tableName + " select * from datawarehouse_etl.L_TMP_" + schemaName + "_" + tableName + ";";
					log.info("执行 备份到back表" + etldt + "  " + schemaName + "." + tableName + " SQL：" + sqlfor3_1 + "\n" + sqlfor3_2);
					gbaseUtil.update(sqlfor3_1, null);
					gbaseUtil.update(sqlfor3_2, null);
					log.info("执行 最终结果插入到 拉链表" + etldt + "  " + schemaName + "." + tableName + " SQL：" + sqlfor3_1 + "\n" + sqlfor3_2);
					gbaseUtil.update(sqlfor3_3, null);
					gbaseUtil.update(sqlfor3_4, null);
					log.info("执行 当天的拉链表 处理结束，开始下一天的" + etldt + "  " + schemaName + "." + tableName + " SQL：" + sqlfor3_1 + "\n" + sqlfor3_2);
					lastdate = etldt;
				}

				log.info(schemaName + "." + tableName + "  增量拉链表全部执行完毕");
			} catch (Exception e) {
				log.info(schemaName + "." + tableName + "出现了异常，可能拉链表不存在");
				return;
			}
			*/


		}


	}


	/**
	 * 增变量数据跑批 从hisdate头开始inc 做完直接一次性拉链  初始化都用这个表，实验了 l_cbod_saacnacn表 最后出来5800万条，这个得执行半小时
	 * @param hisdate   备份数据的日期  对应一定得有  his_cbod_saacnacn_20181231  表
	 * @param schemaName
	 * @param tableName
	 */
	@Async
	public void initZBLll(String hisdate,String schemaName,String tableName) {
		String xfurl = odsurl;
		String enddt = DateUtil.addDay(DateUtils.getdateYYYYMMDD(), -1);
		GbaseJobInfo gbaseJobInfo = new GbaseJobInfo();
		GbaseJobInfo updateg;
		gbaseJobInfo.setEtlDate(enddt);
		gbaseJobInfo.setSchemaName(schemaName);
		gbaseJobInfo.setTableName(tableName);
		List<GbaseJobInfo> glist = gbaseJobInfoService.selectGbaseJobInfoList(gbaseJobInfo);
		if(glist.size()>0) {
			updateg = glist.get(0);
		}else{
			//说明昨天的还没入库  不给做
			return;
		}

		try {
			//已经做的拉链最后一天日期  也就是对应的 datawarehouse_etl.L_INC_ 表最大的etldt
			String llLastDate = hisdate.replace("-", "");
			gbaseUtil.connectGBase();
			String selectETLDT = "select coaesce(max(etl_dt),'20181231') etl_dt from  datawarehouse_etl.L_INC_" +schemaName + "_" + tableName + ";";
			ResultSet ri1 = gbaseUtil.query(selectETLDT);
			if (ri1.next()) {
				llLastDate = ri1.getString(1);
				log.info("获取到已经处理过的最新的拉链日期:"+llLastDate);
			}

			// 循环开始处理 得到两个日期之间的所有日期 list里面已经从小到大排序了
			List<String> dates = DateUtils.getBetweenDates(llLastDate, enddt);
			//将日期的第一个去掉 第一天已经跑过了
			dates.remove(0);

			schemaName = schemaName.toUpperCase();
			tableName = tableName.toUpperCase();

			//1、先从头跑inc数据
			log.info( " 如果表不存在就先创建：datawarehouse_etl.L_INC_" + schemaName + "." + tableName );
			String sql_create = "create table if not exists datawarehouse_etl.L_INC_"+schemaName+"_"+tableName+" like "+schemaName+"."+tableName;
			gbaseUtil.update(sql_create, null);

			if(dates.size()>0){
				log.info("一共需要执行" + dates.size() + "天的数据，第一天是：" + dates.get(0) );
				for (String etldt : dates) {
					xfurl = odsurl;
					/* if ("2021".equals(etldt.substring(0, 4))) {
						xfurl = odsurl2021;
					}else if ("2020".equals(etldt.substring(0, 4))) {
						xfurl = odsurl2020;
					} else if ("2019".equals(etldt.substring(0, 4))) {
						xfurl = odsurl2019;
					} */
					//每天都开始处理
					//1、清空并导入日期数据
					String sqlfor1_1 = "delete from  datawarehouse_etl.L_INC_" + schemaName + "_" + tableName + " where etl_dt='"+etldt+"';";
					String sqlfor1_2 = "load data infile '" + xfurl + "/" + etldt + "/" + schemaName + "/_treated/P_103_" + tableName + "_" + etldt + ".del' " +
							"into table datawarehouse_etl.L_INC_" + schemaName + "_" + tableName + " character set gb18030  data_format 3 fields terminated by '\\x1D' AUTOFILL;";
					log.info("执行 导入" + etldt + "的增量数据  " + schemaName + "." + tableName + " SQL："  + "\n" + sqlfor1_2);
					gbaseUtil.update(sqlfor1_1, null);
					int updatenum = gbaseUtil.update(sqlfor1_2, null);
					log.info("执行" + etldt + "  " + schemaName + "." + tableName + " 插入增变量数据数量：" + updatenum);

					updateg.setIncstartdt(hisdate);
					updateg.setIncenddt(etldt);
					updateg.setIncname("L_INC_"+schemaName + "_" + tableName);
					updateg.setIsinc("1");
					//只要经过这个方法 直接将拉链设置为1
					updateg.setIsll("1");
					int i=0;
					String inccount = "select count(*) num from  datawarehouse_etl.L_INC_" +schemaName + "_" + tableName + ";";
					ResultSet ri = gbaseUtil.query(inccount);
					if (ri.next()) {
						i = ri.getInt(1);
						updateg.setInccount(i);
						log.info("更新setInccount:" + schemaName+ "." +  tableName +" 数据量:"+i);
					}
					gbaseJobInfoService.updateGbaseJobInfo(updateg);
					log.info("更新库 " + schemaName + "." + tableName + " 拉链日期");

				}
				log.info(schemaName + "." + tableName + "  所有增变量数据插入inc完毕");
			}else{
				log.info(schemaName + "." + tableName + "  inc不需要跑了，就是最新的，直接做拉链");
			}


			//2、跑完inc开始跑拉链数
			log.info("跑完inc开始跑拉链数：" + schemaName + "." + tableName);
			DbColumns dbColumns_query = new DbColumns();
			dbColumns_query.setTabschema(schemaName);
			dbColumns_query.setTabname(tableName);
			List<DbColumns> dbColumnsList_zd = dbColumnsService.selectzdList(dbColumns_query);
			List<DbColumns> dbColumnsList_zj = dbColumnsService.selectzjList(dbColumns_query);
			String zds = ",", zds_a = ",", zjs_and = " and", zjs = ",";
			for (DbColumns d1 : dbColumnsList_zd) {
				zds = zds + ",\"" + d1.getColname() + "\"";
				zds_a = zds_a + ",a.\"" + d1.getColname() + "\"";
			}
			for (DbColumns d2 : dbColumnsList_zj) {
				zjs_and = zjs_and + " and a.\"" + d2.getColname() + "\"=b.\"" + d2.getColname() + "\"";
				zjs = zjs + ",\"" + d2.getColname() + "\"";
			}
			zds = zds.replace(",,", "");
			//zds_a 是 a.x,a.y格式
			zds_a = zds_a.replace(",,", "");
			zjs_and = zjs_and.replace("and and", "");
			zjs = zjs.replace(",,", "");
			log.info("zds：" + zds);
			log.info("zds_a：" + zds_a);
			log.info("zjs：" + zjs);
			try {
				//周六正好正在做这里
				String sql1 = "drop table if exists  datawarehouse.L_" + schemaName + "_" + tableName + ";";
				String sql2 = "create table datawarehouse.L_" + schemaName + "_" + tableName + " like " + schemaName + "." + tableName + ";";
				// 2个表添加字段
				String sql3 = "alter table datawarehouse.L_" + schemaName + "_" + tableName + " add column start_dt varchar(10);";
				String sql4 = "alter table datawarehouse.L_" + schemaName + "_" + tableName + " add column end_dt varchar(10);";
				log.info("执行" + schemaName + "." + tableName + " SQL：" + sql1 + "\n" + sql2 + "\n" + sql3 + "\n" + sql4 );

				gbaseUtil.update(sql1, null);
				gbaseUtil.update(sql2, null);
				gbaseUtil.update(sql3, null);
				gbaseUtil.update(sql4, null);
				// hisdate 时点的全量数据导入到拉链表  相当于初始化了
				String sql5 = "insert into datawarehouse.L_" + schemaName + "_" + tableName  +
						" select "+zds_a+",a.etl_dt start_dt,  TO_CHAR( DATE_ADD(coaesce(b.etl_dt,'29990102'),INTERVAL -1 DAY),'YYYYMMDD') end_dt " +
						" from (select  "+zds_a+",row_number() over (partition by "+zjs+" order by etl_dt asc ) rn " +
						"        from datawarehouse_etl.l_inc_"+ schemaName + "_" + tableName +"  a )  a " +
						" left join (select  "+zjs+",etl_dt,row_number() over (partition by "+zjs+" order by etl_dt asc ) rn  " +
						" from datawarehouse_etl.l_inc_" + schemaName + "_" + tableName + "   )  b on "+zjs_and+" and a.rn+1=b.rn ;";
				String sql6 = "insert into datawarehouse.L_" + schemaName + "_" + tableName  +
						" select  "+zds_a+","+hisdate+" start_dt,  TO_CHAR( DATE_ADD(coaesce(b.etl_dt,'29990102'),INTERVAL -1 DAY),'YYYYMMDD') end_dt " +
						" from datawarehouse_etl.his_" + schemaName + "_" + tableName +"_20181231  a " +
						" left join (select "+zjs+",etl_dt,row_number() over (partition by "+zjs+" order by etl_dt asc) rn " +
						" from datawarehouse_etl.l_inc_" + schemaName + "_" + tableName +") b on "+zjs_and+" and b.rn=1 ;";
				log.info("执行" + schemaName + "." + tableName + " SQL：" + sql5 + "\n" + sql5);
				gbaseUtil.update(sql5, null);
				log.info("执行" + schemaName + "." + tableName + " SQL：" + sql6 + "\n" + sql6);
				gbaseUtil.update(sql6, null);

				log.info(schemaName + "." + tableName + "  拉链表全部执行完毕");

				updateg.setLlstartdt(hisdate);
				updateg.setLlenddt(enddt);
				updateg.setLlname("L_"+schemaName + "_" + tableName);
				updateg.setIsll("1");
				int i=0;
				String llccount = "select count(*) num from  datawarehouse.L_" +schemaName + "_" + tableName + ";";
				ResultSet rl = gbaseUtil.query(llccount);
				if (rl.next()) {
					i = rl.getInt(1);
					updateg.setLlcount(i);
					log.info("更新setLlcount:" + schemaName+ "." +  tableName +" 数据量:"+i);
				}

				gbaseJobInfoService.updateGbaseJobInfo(updateg);
				log.info("更新库 " + schemaName + "." + tableName + " 拉链日期");

			} catch (Exception e) {
				log.info(schemaName + "." + tableName + "出现了异常，跳出重新处理" + e.toString());
				return;
			}
		} catch (Exception e) {
			log.info(schemaName + "." + tableName + "出现了异常，跳出重新处理" + e.toString());
			return;
		}
	}

	/**
	 * 从头开始执行全量拉链数据
	 * 从头完全插入lql 中
	 *
	 * @param hisdate   备份数据的日期  对应一定得有  his_cbod_saacnacn_20181231  表
	 * @param schemaName
	 * @param tableName
	 */
	@Async
	public void initQLll(String hisdate,String schemaName,String tableName) {
		try {
			String xfurl = odsurl;
			//取到截至日期 也就是昨日
			String enddt = DateUtil.addDay(DateUtils.getdateYYYYMMDD(), -1);
			GbaseJobInfo updateg ;
			GbaseJobInfo gbaseJobInfo = new GbaseJobInfo();
			gbaseJobInfo.setEtlDate(enddt);
			gbaseJobInfo.setSchemaName(schemaName);
			gbaseJobInfo.setTableName(tableName);
			List<GbaseJobInfo> glist = gbaseJobInfoService.selectGbaseJobInfoList(gbaseJobInfo);
			if(glist.size()>0) {
				updateg = glist.get(0);
			}else{
				//说明昨天的还没入库  不给做
				return;
			}
			schemaName = schemaName.toUpperCase();
			tableName = tableName.toUpperCase();
			hisdate = hisdate.replace("-", "");
			//判断一下 当前拉链表是否存在，如果不存在就return
			gbaseUtil.connectGBase();
			//取到截至日期 也就是昨日
			log.info("拉链表全量跑数据：" + schemaName + "." + tableName);
			// 循环开始处理 得到两个日期之间的所有日期 list里面已经从小到大排序了
			List<String> dates ;
			//这里判断下 updateg的最大日期  如果都跑过了，没必要从头开始
			if(null!=updateg.getLlenddt()&&updateg.getLlenddt().length()>0&&updateg.getLlenddt().substring(0,2).equals("20")){
				dates = DateUtils.getBetweenDates(updateg.getIncenddt(), enddt); //这个当天不要去掉，当天也重新跑，防止之前断开delete后没有load数据有意外
			}else{
				dates = DateUtils.getBetweenDates(hisdate.replace("-", ""), enddt);
				//将日期的第一个去掉
				dates.remove(0);
			}

			log.info("一共需要执行" + dates.size() + "天的数据，第一天是：" + hisdate + " 最后一天是：" + enddt);
			for (String etldt : dates) {
				xfurl = odsurl;
				/* if ("2021".equals(etldt.substring(0, 4))) {
					xfurl = odsurl2021;
				}else if ("2020".equals(etldt.substring(0, 4))) {
					xfurl = odsurl2020;
				} else if ("2019".equals(etldt.substring(0, 4))) {
					xfurl = odsurl2019;
				} */
				//删除当天的数据然后再插入新的
				String sql_create = "create table if not exists datawarehouse.lql_"+schemaName+"_"+tableName+" like "+schemaName+"."+tableName+";";
				gbaseUtil.update(sql_create, null);
				String sqlfor1_1 = "delete from datawarehouse.lql_" + schemaName + "_" + tableName + " where etl_dt='"+etldt+"' ;";
				String sqlfor1_2 = "load data infile '" + xfurl + "/" + etldt + "/" + schemaName + "/_treated/P_103_" + tableName + "_" + etldt + ".del' " +
						"into table datawarehouse.lql_" + schemaName + "_" + tableName + " character set gb18030  data_format 3 fields terminated by '\\x1D' AUTOFILL;";
				log.info("执行 导入" + etldt + "的全量数据  " + schemaName + "." + tableName + " SQL：" + sqlfor1_1 + "\n" + sqlfor1_2);
				gbaseUtil.update(sqlfor1_1, null);
				int updatenum = gbaseUtil.update(sqlfor1_2, null);
				log.info("执行" + etldt + "  " + schemaName + "." + tableName + " 插入全数据数量：" + updatenum+"   处理结束，开始下一天的");


				updateg.setLlstartdt(hisdate);
				updateg.setLlenddt(etldt);
				updateg.setLlname("lql_"+schemaName + "_" + tableName);
				updateg.setIsll("1");
				//只要经过这个方法 直接将拉链设置为1
				updateg.setIsll("1");
				int i=0;
				String llccount = "select count(*) num from  datawarehouse.LQL_" +schemaName + "_" + tableName + ";";
				ResultSet rl = gbaseUtil.query(llccount);
				if (rl.next()) {
					i = rl.getInt(1);
					updateg.setLlcount(i);
					log.info("更新setLlcount:" + schemaName+ "." +  tableName +" 数据量:"+i);
				}
				gbaseJobInfoService.updateGbaseJobInfo(updateg);
				log.info("更新库 " + schemaName + "." + tableName + " 拉链日期");

			}
			log.info(schemaName + "." + tableName + "  全量拉链表全部执行完毕");
		} catch (Exception e) {
			log.info(schemaName + "." + tableName + "出现了异常，可能全量拉链表不存在");
			return;
		}

	}

	/**
	 * 从头开始执行全量拉链数据   里面直接写死了主键
	 * 从头完全插入lql 中  将增变量的数据入库后直接存一份全量
	 *
	 * @param hisdate   备份数据的日期  对应一定得有  his_ccfs2_mpur_20230831  表
	 * @param schemaName
	 * @param tableName
	 */
	@Async
	public void initQLll_zbl(String hisdate,String schemaName,String tableName) {
		String xfurl = odsurl;
		//取到截至日期 也就是昨日
		String enddt = DateUtil.addDay(DateUtils.getdateYYYYMMDD(), -1);
		GbaseJobInfo updateg;
		GbaseJobInfo gbaseJobInfo = new GbaseJobInfo();
		gbaseJobInfo.setEtlDate(enddt);
		gbaseJobInfo.setSchemaName(schemaName);
		gbaseJobInfo.setTableName(tableName);
		List<GbaseJobInfo> glist = gbaseJobInfoService.selectGbaseJobInfoList(gbaseJobInfo);
		if (glist.size() > 0) {
			updateg = glist.get(0);
		} else {
			//说明昨天的还没入库  不给做
			return;
		}
		schemaName = schemaName.toUpperCase();
		tableName = tableName.toUpperCase();
		hisdate = hisdate.replace("-", "");
		//判断一下 当前拉链表是否存在，如果不存在就return
		gbaseUtil.connectGBase();

		// 循环开始处理 得到两个日期之间的所有日期 list里面已经从小到大排序了
		List<String> dates;
		//这里判断下 updateg的最大日期  如果都跑过了，没必要从头开始
		if (null != updateg.getLlenddt() && updateg.getLlenddt().length() > 0 && updateg.getLlenddt().substring(0, 2).equals("20")) {
			dates = DateUtils.getBetweenDates(updateg.getIncenddt(), enddt); //这个当天不要去掉，当天也重新跑，防止之前断开delete后没有load数据有意外
		} else {
			dates = DateUtils.getBetweenDates(hisdate.replace("-", ""), enddt);
			//将日期的第一个去掉
			dates.remove(0);
		}
		String zjs = "BANK,CARD_NBR,MP_NUMBER";
		log.info("一共需要执行" + dates.size() + "天的数据，第一天是：" + hisdate + " 最后一天是：" + enddt);
		for (String etldt : dates) {
			xfurl = odsurl;
			/* if ("2023".equals(etldt.substring(0, 4))) {
				xfurl = url2023_1;
			} */

			String sql_create = "create table if not exists temporarydb." + schemaName + "__" + tableName + " like " + schemaName + "." + tableName;
			String sql_load = "load data infile " +
					" '" + xfurl + "/" + etldt + "/" + schemaName + "/_treated/P_103_" + tableName + "_" + etldt + ".del'  " +
					"into table temporarydb." + schemaName + "__" + tableName + " character set gb18030 " +
					"data_format 3 fields terminated by '\\x1D' AUTOFILL;  ";
			String sql_delete = "delete from datawarehouse.his_ccfs2_mpur_20230831 where (" + zjs + ") in (select " + zjs + " from temporarydb." + schemaName + "__" + tableName + ");";
			String sql_insert = "insert into datawarehouse.his_ccfs2_mpur_20230831 select * from temporarydb." + schemaName + "__" + tableName;

			String sql_drop = "drop table temporarydb." + schemaName + "__" + tableName;

			try {
				log.info("开始创建临时表 " + schemaName + "." + tableName + "：temporarydb." + schemaName + "__" + tableName + " SQL:" + sql_create);
				gbaseUtil.update(sql_create, null);
				//gbaseUtil.closeConnection();
				log.info("temporarydb." + schemaName + "__" + tableName + ":开始加载数据到临时表" + " SQL:" + sql_load);
				int resultload = gbaseUtil.update(sql_load, null);
				log.info("temporarydb." + schemaName + "__" + ":导入完成，一导入：" + resultload + "条数据");
				//gbaseUtil.closeConnection();

				int resultdelete1 = gbaseUtil.update(sql_delete, null);
				log.info("删除有变动的 " + schemaName + "." + tableName + "：" + resultdelete1 + " SQL:" + sql_delete);
				int resultinsert1 = gbaseUtil.update(sql_insert, null);
				log.info("插入有变动的 " + schemaName + "." + tableName + "：" + resultinsert1 + " SQL:" + sql_insert);
				//gbaseUtil.closeConnection();

				gbaseUtil.update(sql_drop, null);
				log.info("开始删除临时表：temporarydb." + schemaName + "__" + tableName);
				//gbaseUtil.closeConnection();

				String sqlfor1_1 = "insert into datawarehouse.lql_ccfs2_mpur select *,'" + etldt + "' from datawarehouse.his_ccfs2_mpur_20230831;";
				gbaseUtil.update(sqlfor1_1, null);

			} catch (Exception e) {
				log.info(schemaName + "." + tableName + "出现了异常，可能全量拉链表不存在");
				return;
			}

		}
	}

	/**
	 * 自动查询存在的拉链表，根据已经跑的日期继续往下面自动跑
	 *
	 */
	public void autoRunLL(){
		//先找出需要做的拉链表
		try {
			String longName = "";
			String schemaName = "",tableName="";
			log.info("自动执行所有拉链表");
			gbaseUtil.connectGBase();
			String querySql="select TABLE_NAME from information_schema.TABLES where lower(TABLE_SCHEMA) ='datawarehouse' " +
					" and table_type ='BASE TABLE' and (lower(table_name) like 'l_%'" +
					" or  lower(table_name) like 'lql_%') " +
					" and lower(table_name)  like '%_%_%' ";
			ResultSet r = gbaseUtil.query(querySql);
			while (r.next()) {
				longName = r.getString(1);
				schemaName = longName.split("_")[1];
				tableName = longName.replace(longName.split("_")[0]+"_"+schemaName+"_","");
				log.info("schemaName："+schemaName);
				log.info("tableName："+tableName);
				//判断一下昨天这个表昨日是否下发,如果下发了 就做拉链表操作
				log.info("开始做这个表的拉链数据："+schemaName+"."+tableName);
				lastLl(schemaName,tableName);
			}

			log.info("自动执行所有拉链表结束");
		} catch (Exception e) {
			log.info("出现了异常"+e.getStackTrace());
			return ;
		}
	}

	/*
		获得所有拉链表
	 */
	public List<String> getAllLl(){
		try {
			List<String> list = new ArrayList<String>();
			String longName = "";
			gbaseUtil.connectGBase();
			String querySql="select TABLE_NAME from information_schema.TABLES where lower(TABLE_SCHEMA) ='datawarehouse' " +
					" and table_type ='BASE TABLE' and (lower(table_name) like 'l_%'" +
					" or  lower(table_name) like 'lql_%') " +
					" and lower(table_name)  like '%_%_%' ";
			ResultSet r = gbaseUtil.query(querySql);
			while (r.next()) {
				longName = r.getString(1);
				list.add(longName);
			}
			return list;
		} catch (Exception e) {
			log.info("出现了异常"+e.getStackTrace());
			return null;
		}
	}

	public static void main(String[] args) {
		String callSQL = "call yzbank.PROC_FIN_CALJSRJ4JXKHXT('${startDate}','${endDate}')";
		System.out.println(callSQL.toUpperCase().replace("${STARTDATE}",
				"20170101").replace("${ENDDATE}", "20171206"));
	}
}
