package FastLearningSpark.SparkSQL;

//import com.sdyc.ndmp.schedule.spark.SparkContextManager;
//import com.sdyc.ndmp.schedule.spark.SparkJobCreator;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.hive.HiveContext;
import org.apache.spark.sql.types.StructType;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import scala.Tuple2;

import java.io.IOException;
import java.util.ArrayList;

//import org.springframework.context.support.ClassPathXmlApplicationContext;


/**
 * <pre>
 * Created with IntelliJ IDEA.
 * User: zhengzhi
 * Date: 2017/3/16
 * To change this template use File | Settings | File Templates.
 * </pre>
 */
public class PublicPraiseAnalyse {

    private static final String url = "jdbc:mysql://localhost:3306/test";


    public static void startSparkJob() throws Exception {
        try {
            runJob();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    private static void runJob() throws Exception {
        SparkConf conf = new SparkConf().setAppName("Spark sql by Java").setMaster("local");
        final JavaSparkContext context = new JavaSparkContext(conf);

        HiveContext hiveContext = new HiveContext(context.sc());
        hiveContext.sql("use etl");
        DataFrame dataFrame = hiveContext.sql("select * from rawtravelscenicinfo limit 5");

        final JSONObject dinnerJsonOb = new JSONObject();
        final JSONObject shopJsonOb = new JSONObject();

        for(int i = 1;i<=5;i++){
            dinnerJsonOb.put(Integer.toString(i),0);
            shopJsonOb.put(Integer.toString(i),0);
        }

        dataFrame.show();

        JavaRDD<Object> scoresRdd = dataFrame.toJavaRDD().map(new Function<Row, Object>() {
            public Object call(Row row) throws Exception {
                StructType schema = row.schema();
                //获取表的字段名称
                String[] filednames = schema.fieldNames();
                JSONObject jsonObject = new JSONObject();

                for (int i = 0; i < filednames.length; i++) {
                    Object o = row.get(i);
                    jsonObject.put(filednames[i], o);
                }

                String scoresStr = jsonObject.get("scores").toString().replace("[", "").replace("]", "").trim().replaceAll("=", ":");
                JSONObject ScoreJO = JSON.parseObject(scoresStr);
                return ScoreJO;
            }
        });

        scoresRdd.foreach(new VoidFunction<Object>() {
            public void call(Object o) throws Exception {
                System.out.println(o.toString());
            }
        });






        scoresRdd.flatMapToPair(new PairFlatMapFunction<Object, String, Integer>() {
            public Iterable<Tuple2<String, Integer>> call(Object o) throws Exception {
                JSONObject jsonObject = (JSONObject)o;
                ArrayList<Tuple2<String, Integer>> scoreTupleList = new ArrayList<Tuple2<String, Integer>>();
                Tuple2<String, Integer> dinnerTuple = new Tuple2<String, Integer>("dinner_" + jsonObject.get("dinner").toString(),1);
                Tuple2<String, Integer> shopTuple = new Tuple2<String, Integer>("shop_" + jsonObject.get("shooping").toString(),1);
                scoreTupleList.add(dinnerTuple);
                scoreTupleList.add(shopTuple);
                return scoreTupleList;
            }
        }).reduceByKey(new Function2<Integer, Integer, Integer>() {
            public Integer call(Integer v1, Integer v2) throws Exception {
                return v1+v2;
            }
        }).foreach(new VoidFunction<Tuple2<String, Integer>>() {
            public void call(Tuple2<String, Integer> tuple) throws Exception {

                String prefStr = "";

                if(tuple._1.contains(prefStr)){
                    if(tuple._1.equals("dinner_1"))
                        dinnerJsonOb.put("1",tuple._2);
                    else if(tuple._1.equals("dinner_2"))
                        dinnerJsonOb.put("2",tuple._2);
                    else if(tuple._1.equals("dinner_3"))
                        dinnerJsonOb.put("3",tuple._2);
                    else if(tuple._1.equals("dinner_4"))
                        dinnerJsonOb.put("4",tuple._2);
                    else
                        dinnerJsonOb.put("5",tuple._2);
                }


            }
        });

        //加载Mysql驱动
//        Class.forName("com.mysql.jdbc.Driver");
//        Properties jdbcProp = new Properties();
//        jdbcProp.setProperty("user","root");
//        jdbcProp.setProperty("password","root");
//        DataFrame dataFrame = sqlContext.read().jdbc(url, "testTable", jdbcProp);
//        dataFrame.select("name","age").show();


//        HashMap<String, String> options = new HashMap<String, String>();
//        options.put("url","jdbc:mysql://localhost:3306/test");
//        options.put("Driver","com.mysql.jdbc.Driver");
////        options.put("dbtale","test");
//        options.put("dbtable","testTable");
//        options.put("user","root");
//        options.put("password","root");
//        DataFrame jdbcDframe = sqlContext.read().format("jdbc").options(options).load();
//        jdbcDframe.groupBy("date").count().show();
//        jdbcDframe.groupBy(jdbcDframe.col("date")).count().registerTempTable("DailyCount");
//
//        DataFrame count = sqlContext.sql("select * from DailyCount where date=\"2001-12-1\"");
//        count.show();
//
//        jdbcDframe.registerTempTable("TestTable");
//
//        sqlContext.sql("select count(*) from TestTable where dinnerPriase=5");


    }


    public static void main(String[] args) throws Exception {

        ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("ApplicationContext.xml");
        startSparkJob();
    }
}
