import java.util.UUID

import com.atguigu.commons.conf.ConfigurationManager
import com.atguigu.commons.constant.Constants
import com.atguigu.commons.utils.ParamUtils
import net.sf.json.JSONObject
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, SparkSession}

object AreaTop3Stat {

	def main(args: Array[String]): Unit = {

		// 获取筛选条件
		val jsonStr: String = ConfigurationManager.config.getString(Constants.TASK_PARAMS)
		// 获取筛选条件对应的jsonStr
		val taskParam: JSONObject = JSONObject.fromObject(jsonStr)

		val taskUUID = UUID.randomUUID().toString

		// 创建sparkConf
		val sparkConf: SparkConf = new SparkConf().setAppName("session").setMaster("local[*]")
		// 创建sparksession 包含sparkcontext
		val sparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()

		// RDD[(cityId, pid)]
		val city2PidRDD = getCityAndProductInfo(sparkSession, taskParam)

		// RDD[(cityId, CityAreaInfo)]
		val cityId2AreaInfoRDD: RDD[(Long, CityAreaInfo)] = gitCityAreaInfo(sparkSession)

		// tmp_area_basic_info : 表中的一条数据就代表一次点击商品的行为
		getAreaPidBasicInfoTable(sparkSession, city2PidRDD, cityId2AreaInfoRDD)

		sparkSession.udf.register("concat_long_string", (v1: Long, v2: String, split: String) => {
			v1 + split + v2
		})

		sparkSession.udf.register("group_concat_distinct", new GroupConcatDistinct)

		getAreaProductClickCountTable(sparkSession)

		sparkSession.udf.register("get_json_field", (json: String, field: String) => {
			val jsonObject = JSONObject.fromObject(json)
			jsonObject.getString(field)
		})

		getAreaProductClickCountInfo(sparkSession)

		getTop3Product(sparkSession, taskUUID)

		sparkSession.sql("select * from temp_test").show()
	}

	def getTop3Product(sparkSession: SparkSession, taskUUID: String) = {

		val sql = "SELECT " +
				"area," +
				"CASE " +
				"WHEN area='华北' OR area='华东' THEN 'A Level' " +
				"WHEN area='华南' OR area='华中' THEN 'B Level' " +
				"WHEN area='西南' OR area='西北' THEN 'C Level' " +
				"ELSE 'D Level' " +
				"END area_level," +
				"city_infos, pid, product_name, product_status, click_count " +
				" FROM (" +
				"	select area, city_infos, pid, product_name, product_status, click_count, " +
				"row_number() over (PARTITION BY area ORDER BY click_count DESC) rank from tmp_area_count_product_info" +
				") t " +
				"WHERE rank<=3"

		import sparkSession.implicits._

		sparkSession.sql(sql).createOrReplaceTempView("temp_test")
		val top3ProductRDD = sparkSession.sql(sql).rdd.map {
			case row =>
				AreaTop3Product(taskUUID, row.getAs[String]("area"), row.getAs[String]("area_level"),
					row.getAs[Long]("pid"), row.getAs[String]("city_infos"),
					row.getAs[Long]("click_count"), row.getAs[String]("product_name"),
					row.getAs[String]("product_status"))
		}

		top3ProductRDD.toDF().write
				.format("jdbc")
				.option("url", ConfigurationManager.config.getString(Constants.JDBC_URL))
				.option("dbtable", "area_top3_product")
				.option("user", ConfigurationManager.config.getString(Constants.JDBC_USER))
				.option("password", ConfigurationManager.config.getString(Constants.JDBC_PASSWORD))
				.mode(SaveMode.Append)
				.save()
	}


	def getAreaProductClickCountInfo(sparkSession: SparkSession) = {
		val sql = "select tacc.area, tacc.city_infos, tacc.pid, pi.product_name, " +
				"if (get_json_field(pi.extend_info,'product_status')='0','Self','Third Party') product_status, " +
				" tacc.click_count " +
				" from tmp_area_click_count tacc join product_info pi on tacc.pid = pi.product_id"
		sparkSession.sql(sql).createOrReplaceTempView("tmp_area_count_product_info")
	}


	def getAreaProductClickCountTable(sparkSession: SparkSession) = {
		val sql = "select area,pid,count(*) click_count," +
				"group_concat_distinct(concat_long_string(city_id,city_name, ':')) city_infos from " +
				"tmp_area_basic_info group by area, pid"

		sparkSession.sql(sql).createOrReplaceTempView("tmp_area_click_count")
	}

	def getAreaPidBasicInfoTable(sparkSession: SparkSession,
	                             city2PidRDD: RDD[(Long, Long)],
	                             cityId2AreaInfoRDD: RDD[(Long, CityAreaInfo)]) = {
		val areaPidInfoRDD: RDD[(Long, String, String, Long)] = city2PidRDD.join(cityId2AreaInfoRDD).map {
			case (cityId, (pid, areaInfo)) =>
				(cityId, areaInfo.city_name, areaInfo.area, pid)
		}

		import sparkSession.implicits._
		areaPidInfoRDD.toDF("city_id", "city_name", "area", "pid").createOrReplaceTempView("tmp_area_basic_info")
	}


	def gitCityAreaInfo(sparkSession: SparkSession) = {
		val cityAreaInfoArray = Array((0L, "北京", "华北"), (1L, "上海", "华东"), (2L, "南京", "华东"),
			(3L, "广州", "华南"), (4L, "三亚", "华南"), (5L, "武汉", "华中"),
			(6L, "长沙", "华中"), (7L, "西安", "西北"), (8L, "成都", "西南"), (9L, "哈尔滨", "东北"))

		import sparkSession.implicits._
		sparkSession.sparkContext.makeRDD(cityAreaInfoArray).map {
			case (cityId, cityName, area) =>
				(cityId, CityAreaInfo(cityId, cityName, area))
		}
	}

	def getCityAndProductInfo(sparkSession: SparkSession, taskParam: JSONObject) = {
		val startDate: String = ParamUtils.getParam(taskParam, Constants.PARAM_START_DATE)
		val endDate: String = ParamUtils.getParam(taskParam, Constants.PARAM_END_DATE)

		// 只获取发生点击行为的action数据
		// 获取到的一个action数据就代表一个点击行为
		val sql = "select city_id, click_product_id from user_visit_action where  date >='" + startDate + "'and " +
				"date <= '" + endDate + "' and click_product_id != -1"

		import sparkSession.implicits._
		sparkSession.sql(sql).as[CityClickProduct].rdd.map {
			case cityPid => (cityPid.city_id, cityPid.click_product_id)
		}
	}
}
