package com.bigdata.spark.flume

import com.bigdata.spark.util.MysqlUtil
import org.apache.spark.sql.SparkSession

/**
 * @author Gerry chan
 * @version 1.0
 * sparksql分析nginx数据：
 * https://www.imooc.com/video/19728
 * 分析结果写入mysql,主类
 */
object Statistics {
  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder()
      .enableHiveSupport()
      .getOrCreate()

    val df = sparkSession.sql(
      "select uid,name from users where total_mark>1000"
    )
    //写入Hive数据到mysql
    MysqlUtil.insert("users", df)
    sparkSession.stop()
  }
}
