package com.datamining.online

import org.apache.spark.SparkConf
import org.apache.spark.sql.{SQLContext, SparkSession}
import org.apache.spark.sql.types.{DataType, DataTypes, StructField, StructType}

/**
  * Created by Administrator on 2017/5/6.
  */
/**
  * spark-test
  * ItemTime
  *
  * @author Administrator kevin
  * @create 2017-05-06 18:32
  */
object DataFrameTest {

  case class Test()

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf();
    sparkConf.setMaster("local[4]"); // 本地模式
    sparkConf.setAppName("my_test");
    // 创建sparkSession
    val sparkSession = SparkSession.builder().appName("").config(conf = sparkConf).getOrCreate();

    val sparkContext = sparkSession.sparkContext;

    // 创建rdd
    val order_items_rdd = sparkContext.textFile("K:/github_space/spark-test/src/main/resources/data/order_items/table_a.txt");
    //    val items_rdd = order_items_rdd.map(x => x.split(",").apply(1).split(";").toString);
    //    val take = items_rdd.take(10);
    //    for (item <- take) {
    //      println(item)
    //    }


    //    val df = sparkSession.read.text("K:\\github_space\\spark-test\\src\\main\\resources\\data\\order_items\\groupByOrderId");

    import org.apache.spark.sql.Row;
    import org.apache.spark.sql.types.{StructType, StructField, StringType};

    var row_rdd = order_items_rdd.map(x => {
      x.split(",")
    }).map(x => Row(String.valueOf(x(0)), String.valueOf(x(1)), Integer.valueOf(x(2))))


    val schema = StructType(Array(StructField("code", DataTypes.StringType, true), StructField("name", DataTypes.StringType, true), StructField("id", DataTypes.IntegerType, true)))
    val testDataFrame = sparkSession.createDataFrame(row_rdd, schema)

    testDataFrame.createOrReplaceTempView("kevin_test_a")
    //    val result = sparkSession.sql("select code, name, id from kevin_test")
    //    result.cache()
    //    result.show()

    testDataFrame.show()

    val group = testDataFrame.groupBy("id").count().orderBy("count")
    group.show()


    // 创建rdd
    val order_items_rdd_b = sparkContext.textFile("K:/github_space/spark-test/src/main/resources/data/order_items/table_b.txt");
    var row_rdd_b = order_items_rdd_b.map(x => {
      x.split(",")
    }).map(x => Row(Integer.valueOf(x(0)), String.valueOf(x(1))))
    val schema_b = StructType(Array(StructField("id", DataTypes.IntegerType, true), StructField("categore_name", DataTypes.StringType, true)))
    val testDataFrame_b = sparkSession.createDataFrame(row_rdd_b, schema_b)
    testDataFrame_b.createOrReplaceTempView("kevin_test_b")

    testDataFrame_b.show()

    val join = testDataFrame.join(testDataFrame_b, "id")
    join.show()

  }

}
