package com.sunzm.spark.sql.exercise

import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.types.{LongType, StringType, StructField, StructType}
import org.apache.spark.sql.{Dataset, SparkSession}

/**
 *
 * ${DESCRIPTION}
 *
 * @author Administrator
 * @version 1.0
 * @date 2021-06-07 17:58
 */
object SparkSQLFeeJob {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName(s"${this.getClass.getSimpleName.stripSuffix("$")}")
      .master("local[*]")
      .config("spark.default.parallelism", 6)
      .config("spark.sql.shuffle.partitions", 6)
      .getOrCreate()

    import spark.implicits._

    val schema = StructType(Array[StructField](
      StructField("companyId", StringType),
      StructField("caller", StringType),
      StructField("email", StringType),
      StructField("feeDuration", LongType),
      StructField("fee", LongType),
      StructField("callOutCount", LongType)
    ))

    val dataDS: Dataset[(String, String, String, Long, Long, Long)] = spark.read.option("header", true)
      .schema(schema)
      .csv("C:\\mydata\\csv-out2\\call_out-fee-202104.csv")
      .as[(String, String, String, Long, Long, Long)]

    //dataDS.show()

   /* dataDS.map{
      case (_, caller, _, _, _, _) => {
        val callerLen = StringUtils.length(caller)

        callerLen
      }
    }*/

    val sum1: Long = dataDS.filter(t => {
      val caller = t._2
      val trimCaller = StringUtils.trim(caller)
      val callerLen = StringUtils.length(trimCaller)

      callerLen == 11 && StringUtils.startsWith(trimCaller, "1")
    }).map(_._4)
      .reduce(_ + _)
       //.show(1000)

    println(s"手机号：${sum1}")

    val sum2: Long = dataDS.filter(t => {
      val caller = t._2
      val trimCaller = StringUtils.trim(caller)
      val callerLen = StringUtils.length(trimCaller)

      !(callerLen == 11 && StringUtils.startsWith(trimCaller, "1"))
    }).map(_._4)
      .reduce(_ + _)

    println(s"固话：${sum2}")

    println(s"合计：${sum1} + ${sum2} = ${sum1 + sum2}")

    spark.close()
  }
}
