package com.ihk.test;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;


import com.ihk.hadoop.hbase.HbaseTemplate;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.mapred.JobConf;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.rdd.RDD;
import org.apache.spark.sql.*;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.types.StructType;
import scala.Function1;
import scala.collection.immutable.Seq;


public class Demo_Mysql3 {


    private static Logger logger = Logger.getLogger(Demo_Mysql3.class);


    public static void main(String[] args) {


        SparkConf sparkConf = new SparkConf();


        sparkConf.setAppName("Demo_Mysql2");
        sparkConf.setMaster("local");
        JavaSparkContext sc = null;
        try {
            sc = new JavaSparkContext(sparkConf);

            SQLContext sqlContext = new SQLContext(sc);

            /** list 转rdd 可将phoenix查询结果转成dataFrame
            List<Study> list = new ArrayList<>();
            DataFrame dataFrame =sqlContext.createDataFrame(list,Study.class);
            dataFrame.registerTempTable("study"); //注册成临时表

            DataFrame df =sqlContext.sql("select * from study");

            JavaRDD<Row> jr = df.toJavaRDD();
             */
// 一个条件表示一个分区
            String[] predicates = new String[] {
                    " ( deal_status=-1 or deal_status is null) and today>'2016-08-30' "
                   };


///172.16.8.84:3306  cloudcall mydbacloudcall
            String url = "jdbc:mysql://172.16.8.84:3306/cloudcall";
            String table = "call_history";
            Properties connectionProperties = new Properties();
            connectionProperties.setProperty("dbtable", table);// 设置表
            connectionProperties.setProperty("user", "cloudcall");// 设置用户名
            connectionProperties.setProperty("password", "mydbacloudcall");// 设置密码

// 读取数据
/*            DataFrame jdbcDF = sqlContext.read().jdbc(url, table, predicates,
                    connectionProperties);*/
            Long start = System.currentTimeMillis();
            DataFrame jdbcDF = sqlContext.read().jdbc(url, table, predicates,
                    connectionProperties);
            System.out.println("使用时间："+(System.currentTimeMillis()-start));
            System.out.println("分区个数："+jdbcDF.toJavaRDD().getNumPartitions());
           // System.out.println("大小："+jdbcDF.collectAsList().size());
            //jdbcDF.show();
           // jdbcDF.cache();
            jdbcDF.registerTempTable("callHistory");
          //  sqlContext.cacheTable("callHistory");
            DataFrame jdbcDF1 =  sqlContext.sql("select id from callHistory ");

                JavaRDD<Integer> teenagerNames = jdbcDF1.javaRDD().map(new Function<Row, Integer>() {
                  public Integer call(Row row) {
                      System.out.println("输出第一个："+row.getInt(0));
                    return Integer.valueOf(row.getInt(0));
                  }
                });
            Integer DF2 =  teenagerNames.reduce(new Function2<Integer, Integer, Integer>() {
                @Override
                public Integer call(Integer integer, Integer integer2) throws Exception {
                    return (integer+integer2);
                }
            });
            System.out.println(DF2);












          //  jdbcDF1.groupBy("create_user_id").count().show();
           // jdbcDF1.show();
            //DataFrame jdbcDF2=jdbcDF1.mapPartitions() ;
            //jdbcDF2.show();
            /*
            String[] coulumns = jdbcDF.columns();
            for(int i=0;i<coulumns.length;i++){
                System.out.println(coulumns[i]); //获取每个列名
            }
            Row[] row = jdbcDF.collect();
            System.out.println("collect.size:"+row.length);
            for(int i =0;i<row.length;i++){
                for(int j=0;j<row[i].size();j++){
                    System.out.println("col:"+String.valueOf(row[i].get(j)));//获取每个值
                }

            }
            jdbcDF.toJavaRDD().foreachPartition(new VoidFunction<Iterator<Row>>() {
                public void call(Iterator<Row> rows) {
                  //  System.out.println("ttttt:"+rows.toString());
                    while (rows.hasNext()) {
                      //  System.out.println("ttttt:"+rows.next().toString()); //值
                        Row r = rows.next();
                        System.out.println("r.size():"+r.size());
                        for(int i = 0;i<r.size();i++){
                            System.out.println("i:"+String.valueOf(r.get(i)).toString()); //遍历得出每个值
                        }
                    }
                }
            });*/

// 写入数据
/*            String url2 = "jdbc:mysql://localhost:3306/mysql";
            Properties connectionProperties2 = new Properties();
            connectionProperties2.setProperty("user", "root");// 设置用户名
            connectionProperties2.setProperty("password", "root");// 设置密码
            String table2 = "demo4";*/


// SaveMode.Append表示添加的模式


// SaveMode.Append:在数据源后添加；
// SaveMode.Overwrite:如果如果数据源已经存在记录，则覆盖；
// SaveMode.ErrorIfExists:如果如果数据源已经存在记录，则包异常；
// SaveMode.Ignore:如果如果数据源已经存在记录，则忽略；


          //  jdbcDF.write().mode(SaveMode.Append)
          //          .jdbc(url2, table2, connectionProperties2);
        } catch (Exception e) {
            logger.error("|main|exception error", e);
        } finally {
            if (sc != null) {
                sc.stop();
            }


        }


    }
    /**
     * 插入数据
     * @param tableName
     * @throws IOException
     */

    public static void insertData(String tableName,DataFrame dataFrame) throws IOException {
        HbaseTemplate hbaseTemplate=new HbaseTemplate();
        hbaseTemplate.put("user","0111ass","cf1","name","xiaoqiang");

/*

        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.property.clientPort", "4180");
        conf.set("hbase.zookeeper.quorum", "192.168.68.84");
        //conf.set(TableInputFormat.INPUT_TABLE, tableName);

        HTable table = new HTable(conf,tableName);

        dataFrame.javaRDD().foreachPartition(new VoidFunction<Iterator<Row>>() {
            public void call(Iterator<Row> rows) {
                while (rows.hasNext()) {
                    //save to hbase
                    Put put = new Put(tableName.getBytes());// 一个PUT代表一行数据，再NEW一个PUT表示第二行数据,每行一个唯一的ROWKEY，此处rowkey为put构造方法中传入的值
                    put.add("column1".getBytes(), null, rows.toString().getBytes());// 本行数据的第一列
                    try {
                        table.put(put);

                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }
        });
*/

    }

    @org.junit.Test
    public  void getHbaseData(){
        HbaseTemplate hbaseTemplate=new HbaseTemplate();
        try {
            hbaseTemplate.createTable("test",new String[]{"name"});
            /*
            Result result=hbaseTemplate.getDataRowKeyColFamilyCol("user","0111ass","cf1","name");
            System.out.println("-------------------:"+hbaseTemplate.getMap(result).toString());
            */

        } catch (IOException e) {
            e.printStackTrace();
        }

    }
}