package com.techsun.datanow.sync.hdfs

import com.microsoft.azure.cosmosdb.spark.config.Config
import com.microsoft.azure.cosmosdb.spark.schema._
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.sql.types._

/**
 * @Auther lsc_bigdata@163.com
 * @Date 2021/2/8 9:01
 * @version 1.0.1
 */
class Sync_TsrCoupon extends Sync_Parent {
  override  def sync(spark: SparkSession, cosmosDbEndpoint: String, cosmosDbMasterkey: String,  beginTime: Long, endTime: Long): Unit = {

    //打印等级，可以在resources中放入log4j
    Logger.getLogger("org").setLevel(Level.ERROR)
    Logger.getLogger("com").setLevel(Level.ERROR)
    val config = Config(Map("Endpoint" -> cosmosDbEndpoint,
      "Masterkey" -> cosmosDbMasterkey,
      "Database" -> "MemberCenter",
      "preferredRegions" -> "China East 2;",
      "Collection" -> "TsrCoupon",
      "query_custom" -> s""" SELECT c.id id, c.memberId memberid, c.customerId customerid, c.code code, c.title as title,
                           | c.description description, c.bulkCampId as bulkcampid, c.behavCampId as behavcampid, c.type type,
                           | c.couponRuleId couponruleid, c.ruleCode as rulecode, c.status status, c.sendTime as sendtime, c.useTime as usetime,
                           | c.effectBegin as effectbegin, c.effectEnd as effectend, c.money as money, c.provinceCode as provincecode,
                           | c.cityCode as citycode, c.serialNo as serialno, c.createTime as createtime, c.updateTime as updatetime, c._ts as _ts
                           | FROM c
                           | where c._ts >= ${beginTime}  and c._ts < ${endTime}""".stripMargin))
    val schema = StructType(
      List(
        StructField("id",StringType, true),
        StructField("memberid",StringType, true),
        StructField("customerid",StringType, true),
        StructField("code",StringType, true),
        StructField("title",StringType, true),
        StructField("description",StringType, true),
        StructField("bulkcampid",StringType, true),
        StructField("behavcampid",StringType, true),
        StructField("type",IntegerType, true),
        StructField("couponruleid",StringType, true),
        StructField("rulecode",StringType, true),
        StructField("status",IntegerType, true),
        StructField("sendtime",TimestampType, true),
        StructField("usetime",TimestampType, true),
        StructField("effectbegin",TimestampType, true),
        StructField("effectend",TimestampType, true),
        StructField("money",DoubleType, true),
        StructField("provincecode",StringType, true),
        StructField("citycode",StringType, true),
        StructField("serialno",StringType, true),
        StructField("createtime",TimestampType, true),
        StructField("updatetime",TimestampType, true),
        StructField("_ts",IntegerType, true)))

    //将数据全部放入到DF
    val frame = spark.sqlContext.read.schema(schema).cosmosDB(config)

    frame.write.mode(SaveMode.Append).csv("hdfs://prod-cdp-01:8020/test20210208_tmp")

    /*
        //TODO 描述信息实际生产补充
        spark.sql(
          """
            |create external table if not exists ods_Sync_TsrCoupon(
            |id string comment '优惠券id',
            |memberid string comment '会员账号id',
            |customerid string comment '客户id',
            |code string comment '券号',
            |title string comment '标题',
            |description string comment '说明',
            |bulkcampid string comment '批量营销活动id',
            |behavcampid string comment '行为营销规则id',
            |type int comment '优惠券类型',
            |couponruleid string commnet '优惠券规则id',
            |rulecode string comment '优惠券规则编号',
            |status int comment '状态',
            |sendtime timestamp comment '发放时间',
            |effectbegin timestamp comment '使用时间',
            |effectend timestamp comment '生效日期',
            |effectend timestamp comment '失效日期'，
            |money double comment '面值',
            |provincecode string comment '提领省份编码',
            |citycode string comment '提领城市编码',
            |serialno string comment '券码唯一序列号',
            |createtime timestamp comment '创建时间',
            |updatetime timestamp comment '更新时间',
            |_ts int)
            |comment '优惠券表'
            |""".stripMargin).show()

        spark.sql("insert into table NewEmployeeInfo as select * from ods_Sync_TsrCoupon").rdd.saveAsTextFile("hdfs://dev-cdp-01:8020")*/

  }

}
