
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.hive.HiveContext;

import java.io.IOException;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;


public class TestRdd  implements Serializable {

    //参数为两个第一个指定读取文件个根目录，无论是本地还是hdfs上都统一命名
    //第二个参数为是file模式还是hive模式

    public static void main(String[] args) throws IOException {

        //固定三步
        SparkConf sparkConf = new SparkConf().setAppName("Test");
        JavaSparkContext sc = new JavaSparkContext(sparkConf);
        SQLContext sqlContext = new org.apache.spark.sql.SQLContext(sc);

        String campainspath = args[0] + "/campains.txt";


        //sqlContext2.sql("LOAD DATA LOCAL INPATH 'examples/src/main/resources/kv1.txt' INTO TABLE src");

        // Queries are expressed in HiveQL.
        //        Row[] results = sqlContext.sql("FROM src SELECT key, value").collect();



        JavaRDD<Campains> campains = sc.textFile(campainspath).map(
                new Function<String, Campains>() {
                    public Campains call(String line) throws Exception {
                        String[] parts = line.split(",");
                        Campains campains = new Campains();
                        campains.setCampains(parts[0]);
                        campains.setVlaue((Double.parseDouble(parts[1].trim())));
                        return campains;
                    }
                });


        DataFrame campainsDataFrame = sqlContext.createDataFrame(campains, Campains.class);
        



        DataFrame adddataDataFrame = null;

        //在文件的状态下
        if (args[1].equals("file")) {
            //读取原始数据
            String adddatapath = args[0] + "/adddatasample.txt";

            JavaRDD<adddata> tempdata = sc.textFile(adddatapath).map(
                    new Function<String, adddata>() {
                        public adddata call(String line) throws Exception {
                            String[] parts = line.split("\\t");
                            adddata tempdata = new adddata();
                            tempdata.setId(parts[0]);
                            tempdata.setStable((Integer.parseInt(parts[1].trim())));
                            tempdata.setCampains(parts[2]);
                            tempdata.setMonitoring(parts[3]);
                            tempdata.setBrowser(parts[4]);
                            tempdata.setRegion(parts[5]);
                            tempdata.setIp(parts[6]);
                            tempdata.setTime((Integer.parseInt(parts[7].trim())));
                            tempdata.setClick((Integer.parseInt(parts[8].trim())));

                            return tempdata;
                        }
                    });

            adddataDataFrame = sqlContext.createDataFrame(tempdata, adddata.class);
        }

        //在hive的状态下
        else
        {
             HiveContext sqlContext2 = new org.apache.spark.sql.hive.HiveContext(sc.sc());

             adddataDataFrame = sqlContext2.sql("select * from addata");
        }




        DataFrame temp = (adddataDataFrame.join(campainsDataFrame, adddataDataFrame.col("campains").equalTo(campainsDataFrame.col("campains")), "inner"));

        //直接持久化到内存，提高效有其
        temp.persist();

        //给出分组之后的用户ID
        //DataFrame[] group = new DataFrame[61];
        DataFrame[] resultDataFrame = new DataFrame[61];




        //第三个参数表示开始
        for (int i = Integer.parseInt(args[2]); i < 61; i++) {

            //给出用户的ID


                String groupurl = args[0] + "/group" + i;
                JavaRDD<Group> tempgroup = sc.textFile(groupurl).map(
                        new Function<String, Group>() {
                            public Group call(String line) throws Exception {
                                String[] parts = line.split("\\t");
                                Group group = new Group();
                                group.setId(parts[0]);
                                group.setValue(Returnvalue(parts[1]));
                                return group;
                            }
                        });


                //给出当前的ID
                DataFrame group = sqlContext.createDataFrame(tempgroup, Group.class);


                //sc.broadcast(group) tempbroadcast = sc.broadcast(group);
                DataFrame temp2 = temp.join(group, temp.col("id").equalTo(group.col("id")), "inner");

                JavaRDD<Resultrdd> result = null;
                //进行每行的数据转化,文件格式与HIVE不同
                if (args[1].equals("file")) {

                    //算出每个时间段的数据,标准的dataframe转换
                    //使用row标明每个rdd中一行
                    result = temp2.javaRDD().map(
                            new Function<Row, Resultrdd>() {
                                public Resultrdd call(Row a1) {
                                    Resultrdd result = new Resultrdd();

                                    //将一些固定字段直接复制
                                    result.setBrowser(a1.getString(0));
                                    result.setCampains(a1.getString(1));  //应对一些错行的问题 需要加强注意
                                    result.setClick(a1.getInt(2));
                                    result.setId(a1.getString(3));
                                    result.setIp(a1.getString(4));
                                    result.setMonitoring(a1.getString(5));
                                    result.setRegion(a1.getString(6));
                                    result.setStable(a1.getInt(7));

                                    //时间算出差值,给出前后关系
                                    result.setTime(1427691622 - a1.getInt(8));

                                    //时段需要经过计算得到，给出UNIX时间段
                                    result.setTimeslot(returnTimeSlot(a1.getInt(8)));
                                    result.setWhatday(returnWhatday(a1.getInt(8)));

                                    //给出两者的数据值
                                    Double Campainsvalue = (a1.getDouble(10)); //直接得到广告的value
                                    Double Idvalue = (a1.getDouble(12)); // 直接得到用户的value
                                    result.setCampainsvalue(Campainsvalue);
                                    result.setIdvalue(Idvalue);



                                    result.setResultvalue((Campainsvalue * Idvalue));

                                    return result;

                                }

                            });
                }

                //hive模式下：
                else
                {

                    result = temp2.javaRDD().map(
                            new Function<Row, Resultrdd>() {
                                public Resultrdd call(Row a1) {
                                    Resultrdd result = new Resultrdd();

                                    //将一些固定字段直接复制
                                    result.setId(a1.getString(0));
                                    result.setStable(a1.getInt(1));
                                    result.setCampains(a1.getString(2));  //应对一些错行的问题 需要加强注意
                                    result.setMonitoring(a1.getString(3));
                                    result.setBrowser(a1.getString(4));
                                    result.setRegion(a1.getString(5));
                                    result.setIp(a1.getString(6));

                                    //时间算出差值,给出前后关系
                                    result.setTime(a1.getInt(7) - 1427691622);
                                    result.setClick(a1.getInt(8));

                                    //时段需要经过计算得到，给出UNIX时间段
                                    result.setTimeslot(returnTimeSlot(a1.getInt(7)));
                                    result.setWhatday(returnWhatday(a1.getInt(7)));

                                    //给出两者的数据值
                                    Double Campainsvalue = (a1.getDouble(10)); //直接得到广告的value double类型
                                    Double Idvalue = (a1.getDouble(12)); // 直接得到用户的value
                                    result.setCampainsvalue(Campainsvalue);
                                    result.setIdvalue(Idvalue);

                                    result.setResultvalue(Campainsvalue * Idvalue);

                                    return result;

                                }

                            });

                }

                 resultDataFrame[i] = sqlContext.createDataFrame(result, Resultrdd.class);

                 resultDataFrame[i].javaRDD().repartition(1).saveAsTextFile(args[0] + "/results/result" + i);

                //之后对60份数据进行统计
            }
    }



    //给出一个返回时间段的函数
    public static int returnTimeSlot(int unixTimestamp)
    {
        Date date = new java.util.Date(unixTimestamp * 1000);
        return  date.getHours();

    }

    //给出一个返回星期几函数
    //        * Returns the day of the week represented by this date. The
//            * returned value (<tt>0</tt> = Sunday, <tt>1</tt> = Monday,
//        * <tt>2</tt> = Tuesday, <tt>3</tt> = Wednesday, <tt>4</tt> =
//        * Thursday, <tt>5</tt> = Friday, <tt>6</tt> = Saturday)

    public static int returnWhatday(int unixTimestamp)
    {
        Date date = new java.util.Date(unixTimestamp * 1000);
        return date.getDay();
    }


    //科学计数法取数字，，
    public static Double Returnvalue(String unixTimestamp)
    {
        BigDecimal db = new BigDecimal(unixTimestamp);
        Double result = db.doubleValue();
        return   result;
    }

    //统计指标的计算

}



