package com.atguigu.app

import java.sql.{Connection, PreparedStatement, ResultSet}
import java.text.SimpleDateFormat
import java.util.Date

import com.alibaba.fastjson.JSON
import com.alibaba.fastjson.serializer.SerializeConfig
import com.atguigu.bean.OrderInfor2
import com.atguigu.common.logger.{GmallConstants, MysqlSql}
import com.atguigu.utils.{JDBCUtils, MykafkaUtil}
import org.apache.hadoop.conf.Configuration
import org.apache.phoenix.spark._
import org.apache.spark.SparkConf
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable.ListBuffer

/**
 * 交易额
 * spark读取mysql order数据并保存进hbase
 *
 * 注意：
 * 在mysql中创建表：old_order_mid
 * 在Phoenix中创建表：gmall_order_info
 * create table gmall_order_info
 * (          id varchar primary key ,
 * province_id varchar,
 * consignee varchar,
 * order_comment varchar,
 * consignee_tel varchar,
 * order_status varchar,
 * payment_way varchar,
 * user_id varchar,
 * img_url varchar,
 * total_amount double,
 * expire_time varchar,
 * delivery_address varchar,
 * create_time varchar,
 * operate_time varchar,
 * tracking_no varchar,
 * parent_order_id varchar,
 * out_trade_no varchar,
 * trade_body varchar,
 * create_date varchar,
 * create_hour varchar,
 * history bigint,
 * delete_no bigint
 * )
 *
 * 新版本的CDH需要2个jar包phoenix-core，hadoop-common
 *
 * @author WangJX
 * @date 2019/11/29 13:54
 * @version 1.0
 */
object OrderApp {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("order App")
      .set("spark.streaming.kafka.maxRatePerPartition", "100")
      .set("spark.streaming.backpressure.enabled", "true")
      .set("spark.streaming.stopGracefullyOnShutdown", "true")
      .setMaster("local[*]")

    val ssc = new StreamingContext(conf, Seconds(5))

    //获取kafka topic为GMALL_NEW_ORDER 中的数据
    val orderDstream = MykafkaUtil.getKafkaStream(GmallConstants.KAFKA_TOPIC_NEW_ORDER, ssc)

    //对数据进行脱敏和封装、映射发往下一阶段
    val resultDstream = orderDstream
      .map(_.value())
      .transform {
        rdd => {

          //存储从mysql中获取的数据
          val list = new ListBuffer[String]()

          //获取mysql连接
          val conn: Connection = JDBCUtils.getConnection
          //预编译sql语句
          val statement: PreparedStatement = conn.prepareStatement(MysqlSql.SELECT_OLD_ORDER_MID)

          //获取mysql中的数据
          statement.setInt(1, 1)
          val set: ResultSet = statement.executeQuery()
          //循环取出查询出来的内容放入list中
          while (set.next()) {
            list += set.getInt(1).toString
          }

          //          println("mysql中的数据为：" + list)

          //设置广播变量
          val value: Broadcast[ListBuffer[String]] = ssc.sparkContext.broadcast(list)

          //关闭mysql连接 应为使用了Druid连接池，所以不需要关闭连接
          //          statement.close()
          //          conn.close()

          rdd.map {
            str => {
              //把数据映射成JSON
              val orderInfo: OrderInfor2 = JSON.parseObject(str, classOf[OrderInfor2])

              //              println("脱敏前数据：" + str)

              //对电话号码进行脱敏
              val phone: String = orderInfo.consignee_tel
              val frePhone: String = phone.substring(0, 3)
              val fixPhone: String = phone.substring(phone.length - 4, phone.length)
              val newPhone = frePhone + "****" + fixPhone
              orderInfo.consignee_tel = newPhone


              //完善采集时间
              val dateString: String = new SimpleDateFormat("yyyy-MM-dd HH").format(new Date)
              val date: Array[String] = dateString.split(" ")
              orderInfo.create_date = date(0)
              orderInfo.create_hour = date(1)


              if (list.contains(orderInfo.id)) {
                orderInfo.history = 1
              }

              //在订单上增加一个字段判断该id是否是历史第一次下单，使用JDBC mysql实现
              if (!value.value.contains(orderInfo.id) && !list.contains(orderInfo.id)) {
                //标记为一次下订单
                orderInfo.history = 1
                list += orderInfo.id
              } else {
                orderInfo.history = 0
              }

              //增加一个是否删除的字段
              orderInfo.delete_no = 0


              //              println("脱敏后数据" + orderInfo.toString)
//              val sss: String = JsonUtils.getBeanJson(orderInfo)
              val sss: String  = JSON.toJSONString(orderInfo, new SerializeConfig(true))
//              println("json: " + sss)
              sss
            }
          }
        }
      }
      .cache()

    //    value.print()


    //把所有订单数据存入hbase表中
    val valueToHbase = resultDstream
      .map {
        rdd => {
          val infor: OrderInfor2 = JSON.parseObject(rdd, classOf[OrderInfor2])
          val sss: String  = JSON.toJSONString(infor, new SerializeConfig(true))
          println(sss)
          infor
        }
      }
      .foreachRDD {
        rdd => {
          //使用saveToPhoenix必须把数据映射成对象,还必须是scal的case类型
          rdd.saveToPhoenix("gmall_order_info",
            Seq("ID",
              "PROVINCE_ID",
              "CONSIGNEE",
              "ORDER_COMMENT",
              "CONSIGNEE_TEL",
              "ORDER_STATUS",
              "PAYMENT_WAY",
              "USER_ID", "IMG_URL",
              "TOTAL_AMOUNT",
              "EXPIRE_TIME",
              "DELIVERY_ADDRESS",
              "CREATE_TIME",
              "OPERATE_TIME",
              "TRACKING_NO",
              "PARENT_ORDER_ID",
              "OUT_TRADE_NO",
              "TRADE_BODY",
              "CREATE_DATE",
              "CREATE_HOUR",
              "HISTORY",
              "DELETE_NO"),
            new Configuration,
            Some("hadoop102,hadoop103,hadoop104:2181"))

//          if (rdd.getCheckpointFile != None) {
//            println("保存数据到HBase成功！")
//          }

//          println("hbase" + rdd.collect().mkString(","))
        }
      }



    //把第一次下订单的id存入mysql
    val valueToMysql = resultDstream
      .map(rdd => {
        val orderInfo: OrderInfor2 = JSON.parseObject(rdd, classOf[OrderInfor2])
        (orderInfo.id, orderInfo)
      })
      .filter(_._2.history == 1)
      .groupByKey()
      .map(_._1)
      .mapPartitions {
        valueIter => {
          //获取mysql连接
          val conn: Connection = JDBCUtils.getConnection
          //预编译sql语句
          val statement: PreparedStatement = conn.prepareStatement(MysqlSql.INSERT_OLD_ORDER_MID)

          var i = 0
          for (elem <- valueIter) {
            statement.setInt(1, elem.toInt)
            i = statement.executeUpdate()
          }
          if (i > 0) {
            valueIter.foreach( vas => "写入数据到mysql：" + println(vas))
          }
          valueIter
        }
      }
      .print()

    //    val value1 = resultDstream
    //      .print()

    ssc.start()

    ssc.awaitTermination()

  }

}
