package com.fwmagic.spark.core.sink

import com.alibaba.fastjson.JSON
import com.alibaba.fastjson.serializer.SerializerFeature
import java.sql.Date
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.{Logger, LoggerFactory}
import redis.clients.jedis.Jedis
import scala.collection.mutable.ArrayBuffer

/**
  * 将Student的数据插入到redis中
  */
object StudentSinkToRedis {

    private val logger: Logger = LoggerFactory.getLogger(StudentSinkToRedis.getClass)

    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf()
                .setAppName(this.getClass.getSimpleName)
                //代表本地运行模式，并开启多个线程(* 表线程数)，线程数=本机核数 * 2
                .setMaster("local[*]")

        val sc = new SparkContext(conf)

        val students = new ArrayBuffer[Student]()

        //构造数据
        for (i <- 1 to 10) {
            val time: Long = System.currentTimeMillis()
            students += Student(i, "name_" + i, "pwd_" + i, new Date(time), new Date(time))
        }

        //创建RDD
        val rdd: RDD[Student] = sc.parallelize(students, 3)

        //写入数据到mysql
        rdd.foreachPartition(fun)

        sc.stop()
    }

    private def fun(it: Iterator[Student]): Unit = {
        val jedis = new Jedis("localhost", 6379, 5000)
        jedis.select(1)
        try {
            //注：Student的每个字段需加上@BeanProperty注解配合JSON.toJSONString(stu, SerializerFeature.PrettyFormat)才可以正常toJosn
            it.foreach(stu => {
                jedis.hset("spark_student", stu.id + "", JSON.toJSONString(stu, SerializerFeature.PrettyFormat))
            })
        } catch {
            case e: Exception => {
                e.printStackTrace()
                logger.error(e.getMessage, e)
            }
        } finally {
            if (jedis != null) jedis.close()
        }
    }
}
