package jupitermouse.site.example.scala.batch

import org.apache.flink.api.common.operators.Order
import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.api.scala._

import scala.collection.mutable.ListBuffer

object DataSetTransformationApp {

  def main(args: Array[String]): Unit = {
    val env = ExecutionEnvironment.getExecutionEnvironment
//    mapFunction(env)

//    filterFunction(env)

//    mapPartitionFunction(env)
//    tranFirstFunction(env)
//    FlatMapFunction(env)

//    outerJoinFunction(env)
    cossFunction(env)
//    env.execute()
  }

  def mapFunction(env: ExecutionEnvironment): Unit ={
    val data = env.fromCollection(List(1,2,3,4,5,6,7,8,9,10))
    //对data中的元素都加1
//    data.map((x:Int) => x+1).print()
//    data.map((x) => x+1).print()
//    data.map(x => x +1).print()
    data.map(_ + 1).print()
  }

  def  filterFunction(env: ExecutionEnvironment):Unit ={
    val data = env.fromCollection(List(1,2,3,4,5,6,7,8,9,10))
    //打印大于5的结果
    data.map(_ + 1)
      .filter(_ > 5)
      .print()
  }

  def mapPartitionFunction(env: ExecutionEnvironment):Unit ={
    val students = new ListBuffer[String]
    for(i<-1 to 100){
      students.append("student: "+i)
    }

    val  data = env.fromCollection(students).setParallelism(5)

/*    data.map(x => {
      //每一个元素存储到数据库，首先需要获取到一个connection
      val connection = DBUtils.getConnection()
      println(connection + "     ")

      //TODO 保存数据到DB
      DBUtils.returnConnection(connection)
    }).print()*/

    //todo
    /*data.mapPartition(x =>{
      //每一个元素存储到数据库，首先需要获取到一个connection
      val connection = DBUtils.getConnection()
      println(connection + "     ")

      //TODO 保存数据到DB
      DBUtils.returnConnection(connection)
    })*/

  }

  def tranFirstFunction(env:ExecutionEnvironment):Unit ={
    val list = ListBuffer[(Int, String)]()
    list.append((1,"hadoop"))
    list.append((2,"hadoop"))
    list.append((3,"hadoop"))
    list.append((4,"hadoop"))
    list.append((1,"flink"))
    list.append((1,"spark"))
    list.append((1,"storm"))
    list.append((1,"Jstorm"))
    list.append((1,"sparkx"))
    val data = env.fromCollection(list)

//    data.first(3).print()
    println("-----------------------")
//    data.groupBy(0).first(2).print()
    println("-----------------------")
    data.groupBy(0).sortGroup(0, Order.ASCENDING)
      .first(2).print()
  }


  def FlatMapFunction(env:ExecutionEnvironment):Unit ={
    val  info = ListBuffer[String]()
    info.append("hadoop,spark")
    info.append("hadoop,flink")
    info.append("flink,flink")

    val  data = env.fromCollection(info)
    data.flatMap(_.split(","))
      .map((_,1))
      .groupBy(0)
      .sum(1)
      .print()
  }

  def DistinctFunction(env:ExecutionEnvironment):Unit ={
    val  info = ListBuffer[String]()
    info.append("hadoop,spark")
    info.append("hadoop,flink")
    info.append("flink,flink")

    val  data = env.fromCollection(info)
    data.flatMap(_.split(","))
      .distinct().print()
  }

  def outerJoinFunction(env:ExecutionEnvironment):Unit ={

    val mysql = ListBuffer[(Int,String)]()

    mysql.append((1,"zhangsan"))
    mysql.append((2,"lishi"))
    mysql.append((3,"wangwu"))
    mysql.append((4,"shunyi"))
    mysql.append((5,"qianer"))
    mysql.append((6,"wuming"))


    val hive = ListBuffer[(Int,String)]()

    hive.append((1,"上海"))
    hive.append((2,"北京"))
    hive.append((3,"武汉"))
    hive.append((4,"深圳"))
    hive.append((5,"广州"))

    val table1 = env.fromCollection(mysql)
    val table2 = env.fromCollection(hive)

    table1.leftOuterJoin(table2)
      .where(0).equalTo(0)
      .apply((a,b) => {
        if(b==null){
          (a._1,a._2,"null")
        }else{
          (a._1,a._2,b._2)
        }
      }).print()

    table1.fullOuterJoin(table2)
      .where(0).equalTo(0)
      .apply((a,b) => {
        if(b==null){
          (a._1,a._2,"null")
        }else{
          (a._1,a._2,b._2)
        }
      }).print()
  }

  def cossFunction(env:ExecutionEnvironment):Unit ={
    val mysql = ListBuffer[(Int,String)]()

    mysql.append((1,"zhangsan"))
    mysql.append((2,"lishi"))
    mysql.append((3,"wangwu"))
    mysql.append((4,"shunyi"))
    mysql.append((5,"qianer"))
    mysql.append((6,"wuming"))


    val hive = ListBuffer[(Int,String)]()

    hive.append((1,"上海"))
    hive.append((2,"北京"))
    hive.append((3,"武汉"))
    hive.append((4,"深圳"))
    hive.append((5,"广州"))

    val table1 = env.fromCollection(mysql)
    val table2 = env.fromCollection(hive)

    table1.cross(table2).print()
  }

}
