package cn.wanda.projects.phoendb;


import com.alibaba.fastjson.JSON;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.parser.Feature;
import com.alibaba.fastjson.serializer.SerializerFeature;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.Test;

import java.io.Serializable;
import java.security.NoSuchAlgorithmException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;

import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

public class Phoneix extends BaseDao implements Serializable {
    protected static final Log logs = LogFactory.getLog(Phoneix.class);

    private static  Connection conn=null;
//    private  static PreparedStatement pstmt=null;

    public Phoneix(String url) {
        logs.info("开始建立url" + url);


        try {
            conn = getconnect(url);
            logs.info("phoenixdb conncet is establish");
        } catch (SQLException e) {
            e.printStackTrace();
        }


    }


    /****
     * 执行提交phoenix语句
     *
     */
//    private  void executeSQL(String preparedSql) {
//
//        try {
//
//            synchronized(conn) {
//                pstmt = conn.prepareStatement(preparedSql);
//
//                boolean rs = pstmt.execute();
//                conn.commit();
//            }
//        } catch (SQLException e) {
//            e.printStackTrace(); // 处理SQLException异常
//        } finally {
//            try {
//
//
//                closeAll(conn, pstmt, null);
//
//            } catch (SQLException e) {
//                e.printStackTrace();
//            }
//        }
//
//
//    }









    private void executeSQL(String preparedSql) {
        synchronized (conn) {
            PreparedStatement pstmt=null;
            try {

                pstmt = conn.prepareStatement(preparedSql);
                boolean rs = pstmt.execute();
                conn.commit();

            } catch (SQLException e) {
                e.printStackTrace(); // 处理SQLException异常
            } finally {
                try {
                    closeAll(conn, pstmt, null);
                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }
        }
    }














//    /****
//     * 对比list获取差异值，获取需要添加的字段
//     *
//     */
//
//    public static Collection getDiffent(Collection collmax, Collection collmin) {
//        //使用LinkeList防止差异过大时,元素拷贝
//        Collection csReturn = new LinkedList();
//        Collection max = collmax;
//        Collection min = collmin;
//        //先比较大小,这样会减少后续map的if判断次数
//        if (collmax.size() < collmin.size()) {
//            max = collmin;
//            min = collmax;
//        }
//        //直接指定大小,防止再散列
//        Map<Object, Integer> map = new HashMap<Object, Integer>(max.size());
//        for (Object object : max) {
//            map.put(object, 1);
//        }
//        for (Object object : min) {
//            if (map.get(object) == null) {
//                csReturn.add(object);
//            } else {
//                map.put(object, 2);
//            }
//        }
//        for (Map.Entry<Object, Integer> entry : map.entrySet()) {
//            if (entry.getValue() == 1) {
//                csReturn.add(entry.getKey());
//            }
//        }
//        return csReturn;
//    }
//
//
//    public static Collection getDiffentNoDuplicate(Collection collmax, Collection collmin) {
//        return new HashSet(getDiffent(collmax, collmin));
//
//    }







    /****
     * 对比list获取差异值，获取需要添加的字段
     *
     */

    public static Set getDiffent(Set  collmax, Set  collmin) {
        //使用LinkeList防止差异过大时,元素拷贝
        Set csReturn = new HashSet();
        Set  max = collmax;
        Set min = collmin;
        //先比较大小,这样会减少后续map的if判断次数
        if (collmax.size() < collmin.size()) {
            max = collmin;
            min = collmax;
        }
        //直接指定大小,防止再散列
        Map<Object, Integer> map = new HashMap<Object, Integer>(max.size());
        for (Object object : max) {
            map.put(object, 1);
        }
        for (Object object : min) {
            if (map.get(object) == null) {
                csReturn.add(object);
            } else {
                map.put(object, 2);
            }
        }
        for (Map.Entry<Object, Integer> entry : map.entrySet()) {
            if (entry.getValue() == 1) {
                csReturn.add(entry.getKey());
            }
        }
        return csReturn;
    }


    public static Set getDiffentNoDuplicate(Set collmax, Set collmin) {
        return new HashSet(getDiffent(collmax, collmin));

    }




    /*
     * 无方向的集合求异或
     */
    public static <T extends Comparable<?>> Set<T> getXor(final Set<T> setA, final Set<T> setB) {
        Set<T> union = new HashSet<>(setA);
        union.addAll(setB);
        Set<T> inter = new HashSet<>(setA);
        inter.retainAll(setB);
        union.removeAll(inter);
        return union;
    }


















    /****
     * 解析maxell获取的json格式的数据
     * @param json 数据
     */

    @Test
    public void JSONphoenix(String json, String database) throws SQLException {
        logs.info(json);
        //  Map<String, Object> mapjon = (Map<String, Object>) JSON.parse(json);
  //      HashMap<String, Object> mapjon = JSON.parseObject(json, LinkedHashMap.class, Feature.OrderedField);
        HashMap<String, Object> mapjon = JSONObject.parseObject(json, LinkedHashMap.class, Feature.OrderedField);
   //     String text = JSON.toJSONString(mapjon.get("old"), SerializerFeature.WriteMapNullValue,SerializerFeature.WriteNullListAsEmpty);
        String databases = mapjon.get("database").toString();
        String table = mapjon.get("table").toString();
        String type = mapjon.get("type").toString();



        logs.info(table + type + ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;");


        if (databases.equals(database) && mapjon.containsKey("data") && type.equals("insert")) {


            //把解析的json数据放到Map里
           //    Map<String, Object> datainserss = (Map<String, Object>) mapjon.get("data");
       //   HashMap<String, Object> datainserss = JSON.parseObject(mapjon.get("data").toString(), LinkedHashMap.class, Feature.OrderedField);
            String inserttext = JSON.toJSONString(mapjon.get("data"), SerializerFeature.WriteMapNullValue,SerializerFeature.WriteNullListAsEmpty);
          //  Map<String, Object>  datainserss = JSONObject.parseObject(inserttext);

            HashMap<String, Object> datainserss = JSONObject.parseObject(inserttext, LinkedHashMap.class, Feature.OrderedField);

          //  HashMap<String, Object> datainserss = JSONObject.parseObject(mapjon.get("data").toString(), LinkedHashMap.class, Feature.OrderedField);

         //   ConcurrentHashMap<String, Object> datainserss = JSON.parseObject(String.valueOf(mapjon.get("data")));

            //获取key的值，及myql字段值
            String filedinserss = "";
            //phoenix字段对应的值
            String valuesinserss = "";
            // String type = null;
            //删除语句的转换
            String deleteinserss = "";
            String valuerowinser = "";
            for (String key : datainserss.keySet()) {

                filedinserss += key + ",";
                valuesinserss += "\'" + datainserss.getOrDefault(key, "null") + "\',";
               // deleteinserss += key + "=" + "\'" + datainserss.getOrDefault(key, "null") + "\'" + " " + "and" + " ";
                valuerowinser += datainserss.getOrDefault(key, "null");


            }

           // System.out.println("valuerowvaluerowvaluerow" + valuerowinser);

            String rowids = null;
            try {
                rowids = sha1(valuerowinser);
            } catch (NoSuchAlgorithmException e) {
                e.printStackTrace();
            }
            String subrowky = rowids.substring(0, 10);
            logs.info("插入字段索引"+subrowky);

            filedinserss = "( " + "rowkey" + "," + filedinserss.substring(0, filedinserss.length() - 1) + ")";
            valuesinserss = "( " + "\'" + subrowky + "\'" + "," + valuesinserss.substring(0, valuesinserss.length() - 1) + ")";


            String sqlinsertss = "upsert into" + " " + databases + "." + table + " " + filedinserss + " values " + valuesinserss;
            logs.info("插入表语句:"+sqlinsertss);

            executeSQL(sqlinsertss);


        } else if (databases.equals(database) && mapjon.containsKey("data") && type.equals("update")) {

            /****
             * 对字段值进行更新，先删除之前的记录，再添加更新后的记录，因为phoenix中的rowkey哈希值不一样
             *以下是对字段值进行删除
             */

            String datatext = JSON.toJSONString(mapjon.get("data"), SerializerFeature.WriteMapNullValue,SerializerFeature.WriteNullListAsEmpty);
            String oldtext = JSON.toJSONString(mapjon.get("old"), SerializerFeature.WriteMapNullValue,SerializerFeature.WriteNullListAsEmpty);
            HashMap<String, Object> datapjon = JSON.parseObject(datatext, LinkedHashMap.class, Feature.OrderedField);


//            HashMap<String, Object> oldpjon = JSON.parseObject(mapjon.get("old").toString(), LinkedHashMap.class, Feature.OrderedField);
            HashMap<String, Object> oldpjon = JSON.parseObject(oldtext, LinkedHashMap.class, Feature.OrderedField);

            Map<String, Object> sortedMap = new LinkedHashMap<String, Object>();
//            String old = "";
            //phoenix字段对应的值

            for (String key : datapjon.keySet()) {
                logs.info("新的需要放到顺序map里的"+key+ datapjon.getOrDefault(key, "null"));
                sortedMap.put(key, datapjon.getOrDefault(key, "null"));


            }


            for (String key : oldpjon.keySet()) {
                logs.info("旧的需要放到顺序map里的"+key+oldpjon.getOrDefault(key, "null"));
                sortedMap.put(key, oldpjon.getOrDefault(key, "null"));


            }


            String deleteql = "";
            String rowkeyold = "";
            for (String key : sortedMap.keySet()) {
                deleteql += key + "=" + "\'" + sortedMap.getOrDefault(key, "null") + "\'" + " " + "and" + " ";
                System.out.println(key + "---" + sortedMap.getOrDefault(key, "null"));
                rowkeyold += sortedMap.getOrDefault(key, "null");

            }

            logs.info("旧的索引字段:"+rowkeyold);


            String rowids = null;
            try {
                rowids = sha1(rowkeyold);
            } catch (NoSuchAlgorithmException e) {
                e.printStackTrace();
            }
            String subrowkydelete = rowids.substring(0, 10);


            String sqls = "delete from" + " " + databases + "." + table + " " + "where" + " " + "rowkey" + "=" + "\'" + subrowkydelete + "\'" + " " + "and" + " " + deleteql.substring(0, deleteql.length() - 4);
            logs.info("更新过程中删除语句"+sqls + "===========================================");

            executeSQL(sqls);

            /****
             * 对字段值进行更新，先删除之前的记录，再添加更新后的记录，因为phoenix中的rowkey哈希值不一样。
             * 以下是对字段值进行更新插入
             */

            //获取key的值，及myql字段值
            String filedinserss = "";
            //phoenix字段对应的值
            String valuesinserss = "";
            //删除语句的转换
        //    String deleteinserss = "";
            String valuerowinsert = "";
            for (String key : datapjon.keySet()) {

                filedinserss += key + ",";
                valuesinserss += "\'" + datapjon.getOrDefault(key, "null") + "\',";
              //  deleteinserss += key + "=" + "\'" + datapjon.getOrDefault(key, "null") + "\'" + " " + "and" + " ";
                valuerowinsert += datapjon.getOrDefault(key, "null");


            }

            logs.info("更新字段值" + valuerowinsert);

            String rowidupdate = null;
            try {
                rowidupdate = sha1(valuerowinsert);
            } catch (NoSuchAlgorithmException e) {
                e.printStackTrace();
            }
            String subrowkyupdate = rowidupdate.substring(0, 10);


            filedinserss = "( " + "rowkey" + "," + filedinserss.substring(0, filedinserss.length() - 1) + ")";
            valuesinserss = "( " + "\'" + subrowkyupdate + "\'" + "," + valuesinserss.substring(0, valuesinserss.length() - 1) + ")";


            String sqlinsertss = "upsert into" + " " + databases + "." + table + " " + filedinserss + " values " + valuesinserss;
            logs.info("更新过程中插入语句:" + sqlinsertss);

            executeSQL(sqlinsertss);


        } else if (databases.equals(database) && mapjon.containsKey("data") && type.equals("delete")) {

            String datatext = JSON.toJSONString(mapjon.get("data"), SerializerFeature.WriteMapNullValue,SerializerFeature.WriteNullListAsEmpty);
            HashMap<String, Object> data = JSON.parseObject(datatext, LinkedHashMap.class, Feature.OrderedField);

            //获取key的值，及myql字段值
            String filed = "";
            //phoenix字段对应的值
            String values = "";

            //删除语句的转换
            String delete = "";
            String rowdelete = "";
            for (String key : data.keySet()) {
                rowdelete += data.getOrDefault(key, "null");
                delete += key + "=" + "\'" + data.getOrDefault(key, "null") + "\'" + " " + "and" + " ";

            }

            logs.info( rowdelete);
            logs.info(delete);
            logs.info(filed);
            logs.info(values);
            String rowids = null;
            try {
                rowids = sha1(rowdelete);
            } catch (NoSuchAlgorithmException e) {
                e.printStackTrace();
            }
            String subrowky = rowids.substring(0, 10);


            String sql = "delete from" + " " + databases + "." + table + " " + "where" + " " + "rowkey" + "=" + "\'" + subrowky + "\'" + " " + "and" + " " + delete.substring(0, delete.length() - 4);
            logs.info("删除表语句:" + sql);

            executeSQL(sql);

        } else if (databases.equals(database) && type.equals("table-alter")) {


//            Map<String, Object> mapjon = (Map<String, Object>) JSON.parse(alterjson);
            //获取旧的字段
//            List<String> listold = new ArrayList<String>();
            Set<String> listold = new HashSet<String>();
            //获取新的字段
//            List<String> listnew = new ArrayList<String>();
            Set<String> listnew = new HashSet<String>();

            String[] typealter = mapjon.get("type").toString().split("-");
            String typesalter = typealter[1] + " " + typealter[0];
            logs.info(typesalter);

            //   Map<String, Object> oldpjon = (Map<String, Object>) JSON.parse(mapjon.get("old").toString());
            HashMap<String, Object> oldpjon = JSON.parseObject(mapjon.get("old").toString(), LinkedHashMap.class, Feature.OrderedField);

            //   Map<String, Object> defjson = (Map<String, Object>) JSON.parse(mapjon.get("def").toString());
            HashMap<String, Object> defjson = JSON.parseObject(mapjon.get("def").toString(), LinkedHashMap.class, Feature.OrderedField);
            JSONArray arrayold = JSONArray.parseArray(oldpjon.get("columns").toString());
            JSONArray arraydef = JSONArray.parseArray(defjson.get("columns").toString());
            for (int i = 0; i < arrayold.size(); i++) {
                Map<String, Object> oldcolums = (Map<String, Object>) JSON.parse(arrayold.getString(i));

                listold.add(oldcolums.get("name").toString());

            }


            for (int i = 0; i < arraydef.size(); i++) {
                HashMap<String, Object> oldcolums = JSON.parseObject(arraydef.getString(i), LinkedHashMap.class, Feature.OrderedField);
                //    Map<String, Object>  oldcolums= (Map<String, Object>) JSON.parse(arraydef.getString(i));
                listnew.add(oldcolums.get("name").toString());


            }

//            Collection<String> filterfield = getDiffentNoDuplicate(listnew, listold);
//            Iterator it = filterfield.iterator();
//            String addfield = null;
//            for (; it.hasNext(); ) {
//
//                addfield = it.next().toString();
//
//            }

//            Set<String> filterfield =  getDiffentNoDuplicate(listnew, listold);
//            Set<String> filterfield =  getDiffentNoDuplicate(listnew, listold);
            Set<String> filterfield = getXor(listnew,listold);

            Iterator<String> it = filterfield.iterator();
            String addfield = null;
            while(it.hasNext())//判断是否有下一个
            {
                addfield =   it.next();
            }

            logs.info("获取需要增加的字段值:" + addfield);


            //phoenix增加字段语法ALTER TABLE e ADD sex varchar(10);
            //增加字段的语句
            String sqlalter = typesalter + " " + mapjon.get("database") + "." + mapjon.get("table") + " " + "ADD" + " " + addfield + " " + "varchar";
            logs.info("增加字段sql" + sqlalter);

            executeSQL(sqlalter);


        } else if (databases.equals(database) && type.equals("table-create")) {

            System.out.println(mapjon.get("def"));
            HashMap<String, Object> def = JSON.parseObject(mapjon.get("def").toString(), LinkedHashMap.class, Feature.OrderedField);
            // Map<String, Object>  def= (Map<String, Object>) mapjon.get("def");
            System.out.println(def.get("columns"));
            JSONArray jsonObj = JSONArray.parseArray(def.get("columns").toString());
            String sqlcreate = "create table  If Not Exists" + " " + mapjon.get("database") + "." + mapjon.get("table") + "(" + "";

            String rowkey = "rowkey  varchar not null primary key ,";

            String names = "";
            for (int i = 0; i < jsonObj.size(); i++) {
                String id = jsonObj.getString(i);
                HashMap<String, Object> colums = JSON.parseObject(id, LinkedHashMap.class, Feature.OrderedField);
                //  Map<String, Object>  colums= (Map<String, Object>) JSON.parse(id);
              //  logs.info(id);
                String colname = colums.get("name").toString();
                names += colname + " " + "varchar,";
            }
            //创建phoenix表的语句
            String tableSentence = sqlcreate + rowkey + names.substring(0, names.length() - 1) + ")";
            logs.info("创建表语句:"+tableSentence);
            executeSQL(tableSentence);
            logs.info("表" + mapjon.get("table") + "被创建");

        } else if (databases.equals(database) && type.equals("table-drop")) {

            String sqlsentence = "drop table" + " " + database + "." + table;
            executeSQL(sqlsentence);
            logs.info("删除表:" + sqlsentence);

        } else {
            logs.info(databases + "不在同步Phoenix范围内或者Phoenix语句不支持");

            return;
        }


    }


}
