package com.fwmagic.spark.core.cases

import com.alibaba.fastjson.{JSON, JSONException}
import com.fwmagic.spark.streaming.util.{DBUtils, HbaseUtil}
import java.sql.{Connection, PreparedStatement, ResultSet}
import java.util.ArrayList
import org.apache.hadoop.hbase.client.{Put, Table}
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.{TableName, client}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.{Logger, LoggerFactory}

/**
  * 解析json数据，关联mysql表，将关联后的结果写入Hbase
  *
  */
object CalCulateMoneyToHbase {

    def main(args: Array[String]): Unit = {
        val logger: Logger = LoggerFactory.getLogger(CalCulateMoneyToHbase.getClass)

        val conf: SparkConf = new SparkConf()
                .setAppName(this.getClass.getSimpleName)
                .setMaster("local[*]")

        val sc: SparkContext = new SparkContext(conf)

        //从hdfs上读取数据
        val lines: RDD[String] = sc.textFile("hdfs://hd1:9000/tmp/case/data/data.txt")

        //json -> globalsort
        val orders: RDD[Order] = lines.map(line => {
            var order: Order = null
            try {
                order = JSON.parseObject(line, classOf[Order])
            } catch {
                case e: JSONException => {
                    //logger.error(line, e)
                    //e.printStackTrace()
                    println("=====异常数据:" + line)
                }
            }
            order
        })

        //过滤
        val filtered: RDD[Order] = orders.filter(order => order != null)

        //关联mysql的纬度表数据
        val mapPartitioned: RDD[Order] = filtered.mapPartitions(iter => {
            if (iter.nonEmpty) {
                val sql = "select name from t_category where id=?"
                var con: Connection = null
                var ps: PreparedStatement = null
                var rs: ResultSet = null
                var orders: Iterator[Order] = null
                try {
                    con = DBUtils.getConnection()
                    ps = con.prepareStatement(sql)
                    orders = iter.map(order => {
                        ps.setInt(1, order.cid)
                        rs = ps.executeQuery()
                        while (rs.next()) {
                            val name: String = rs.getString("name")
                            order.cname = name
                        }
                        //关闭链接
                        if (!iter.hasNext) {
                            if (ps != null) ps.close()
                            if (rs != null) rs.close()
                            if (con != null) con.close()
                        }
                        order
                    })
                } catch {
                    case e: Exception => {
                        //e.printStackTrace()
                        logger.error("====>查询异常！", e)
                    }
                }
                orders
            } else {
                iter
            }
        })

        //打印数据结果
        mapPartitioned.collect().foreach(println)

        //写数据到Hbase:create 'spark_order' ,'order_info','other_info'
        mapPartitioned.foreachPartition(iter => {
            //获取Hbase的连接
            val con: client.Connection = HbaseUtil.getConnection("192.168.62.131:2181,192.168.62.132:2181,192.168.62.133:2181")
            val table: Table = con.getTable(TableName.valueOf("spark_order"))
            val puts = new ArrayList[Put](100)
            iter.foreach(order => {
                val put: Put = new Put(Bytes.toBytes(order.oid))
                put.addColumn(Bytes.toBytes("order_info"), Bytes.toBytes("cname"), Bytes.toBytes(order.cname))
                put.addColumn(Bytes.toBytes("order_info"), Bytes.toBytes("money"), Bytes.toBytes(order.money))
                puts.add(put)

                //批量插入
                if (puts.size() == 100) {
                    table.put(puts)
                    puts.clear()
                }
            })
            //插入剩余的数据
            table.put(puts)

            //关闭连接
            table.close()
            con.close()
        })

        //关闭任务
        sc.stop()
    }
}
