package com.gabry.flink

import java.util.Properties

import com.alibaba.fastjson._
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.table.api.scala._
import org.apache.hadoop.conf.Configuration
import org.apache.hive.hcatalog.data.HCatRecord


object Main {
  case class Users(id:Long,mobile_phone:String,mobilephone_md5:String,password:String,password_salt:String,token:String,register_date:String,picture_url:String,
                   user_type:Int,status:Int,create_at:String,update_at:String,delete_flag:Int,uuid:String,login_name:String,deal_pwd:String,deal_pwd_salt:String,reg_argee_url:String,
                   custid_behalf:Long)

  case class Customers(id:Long,user_id:Long,customer_type:Int,status:Int,register_date:String,create_at:String,update_at:String,delete_flag:Int,cuid:String,prod_id:String)

  def main(args: Array[String]): Unit = {
    val senv = StreamExecutionEnvironment.getExecutionEnvironment
    val stenv = StreamTableEnvironment.create(senv)

    val kafkaProps = new Properties()
    kafkaProps.setProperty("bootstrap.servers", "hadoop02:9092,hadoop03:9092,hadoop05:9092")
    kafkaProps.setProperty("zookeeper.connect", "hadoop02:2181,hadoop03:2181,hadoop05:2181")
    kafkaProps.setProperty("group.id", "flink_shell")
    kafkaProps.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    kafkaProps.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    val myConsumer = new FlinkKafkaConsumer[String]("customer_center", new SimpleStringSchema(),kafkaProps)
    val stream = senv.addSource(myConsumer)

    val customerStream = stream.map(line=>JSON.parseObject(line))
      .filter(r=>r.getString("database")=="customer_center" && r.getString("table")=="customer")
      .map(r=>r.getJSONArray("data").getJSONObject(0))
      .map(r=>Customers(r.getLong("id"),r.getLong("user_id"),r.getInteger("type"),r.getInteger("status")
        ,r.getString("register_date"),r.getString("create_at"),r.getString("update_at")
        ,r.getInteger("delete_flag"),r.getString("cuid"),r.getString("prod_id")))

    val userStream = stream.map(line=>JSON.parseObject(line))
      .filter(r=>r.getString("database")=="customer_center" && r.getString("table")=="user")
      .map(r=>r.getJSONArray("data").getJSONObject(0))
      .map(r=>Users(r.getLong("id"),r.getString("mobile_phone"),r.getString("mobilephone_md5")
        ,r.getString("password"),r.getString("password_salt"),r.getString("token")
        ,r.getString("register_date"),r.getString("picture_url"),r.getInteger("type")
        ,r.getInteger("status"),r.getString("create_at"),r.getString("update_at")
        ,r.getInteger("delete_flag"),r.getString("uuid"),r.getString("login_name")
        ,r.getString("deal_pwd"),r.getString("deal_pwd_salt"),r.getString("reg_argee_url")
        ,r.getLong("custid_behalf")))

    stenv.registerDataStream("customers",customerStream,'id,'user_id,'customer_type,'status,'register_date,'create_at,'update_at,'delete_flag,'cuid,'prod_id)
    stenv.registerDataStream("users",userStream,'id,'mobile_phone,'mobilephone_md5,'password,'password_salt,'token,'register_date,'picture_url,'user_type,'status,'create_at,'update_at,'delete_flag,'uuid,'login_name,'deal_pwd,'deal_pwd_salt,'reg_argee_url,'custid_behalf)

//    val sql = stenv.sqlQuery("select a.id as cust_id,a.user_id,a.update_at as cust_update,COALESCE(b.update_at,'NULL') as user_update " +
//      "from customers a left join users b on (a.user_id = b.id)")

    ///val sqlRes = sql.toRetractStream[(Long,Long,String,String)].map(r=>(r._1,r._2._1,r._2._2,r._2._3,r._2._4)).keyBy(1).print()
    val conf = new Configuration()
    conf.set("hive.metastore.local","false")
   // conf.set("hive.metastore.uris","thrift://hadoop02:9083")
    //conf.set("hive.metastore.client.capability.check","false")
    conf.addResource("hive-site.xml")
    conf.addResource("core-site.xml")
    conf.addResource("hdfs-site.xml")

    System.out.println(conf.get("hive.metastore.uris"))
    System.out.println(conf.get("dfs.nameservices"))
    val hcat = new HCatInputFormat[HCatRecord]("core_product","test_skynet_decision_result",conf)
//        .getFields("key","fraud_freezeexpires")
//        .asFlinkTuples()
    val hiveTable = senv.createInput(hcat)
    hiveTable.print()
    senv.execute()

  }
}

