package com.lenovo.export

import java.sql.DriverManager
import com.lenovo.function.Utils
import org.apache.spark.sql.{SaveMode, SparkSession}

/**
  * userprofile nature base label
  */
object Day_incident_export{

  def main(args: Array[String]): Unit = {
    val util = new Utils
    val sparkSession = SparkSession.builder.master("yarn").appName("Day_incident_export").enableHiveSupport().getOrCreate()

    //sparkSession.sql("select * from ccsd.itsm_dw_incident where date_format(submitdate,'yyyy-MM-dd') > date_sub('" + util.getDay() + " ','180')").write.format("csv").save("/user/p66_g98/test/dw_incident2.csv")

    sparkSession.sql("select incidentid, servicerequestid, tickettype, tickettypename, internetemail, customername, customerphone, customersite, customer_city, customer_country , department , vip , CAST(submitdate as string) submitdate,newuser , CAST(newtime as string) newtime,assigneduser , CAST(assignedtime as string) assignedtime,CAST(responded_date as string) responded_date,inprogressuser , CAST(inprogresstime as string) inprogresstime,pendinguser , CAST(pendingtime as string) pendingtime,resolveduser , CAST(resolvedtime as string) resolvedtime,closeduser , CAST(closedtime as string) closedtime,CAST(actual_resolved_time as string) actual_resolved_time,resolved_bymonth , CAST(resolved_time_totalday as string) resolved_time_totalday,CAST(latest_pending_start_time as string) latest_pending_start_time,CAST(latest_pending_end_time as string) latest_pending_end_time,pending_history_total_time , pending_history_total_time_tem , been_pending , CAST(pending_day as string) pending_day,pending_time_rate , CAST(resolved_time_ex_pending as string) resolved_time_ex_pending,CAST(reported_date as string) reported_date, reported_source , reported_sourcename , submitter , openticketrole , assigned_support_company , assigned_support_organization , assigned_group , assigned_group_id , assignee , assignee_login_id , owner_support_company , owner_support_organization , owner_group , owner_group_id , product_categorization_tier_1 , product_categorization_tier_2 , product_categorization_tier_3 , product_categorization , closure_product_model_version , product_model_version , categorization_tier_1 , categorization_tier_2 , categorization_tier_3 , service_type , service_typename , priority , priorityname , description , detailed_decription , resolution , root_cause , root_causename , status , statusname , status_reason , status_reasonname , cancelleduser , CAST(cancelledtime as string) cancelledtime,CAST(re_opened_date as string) re_opened_date,last_modified_by , CAST(last_modified_date as string) last_modified_date,sla , slm_priority , slm_priorityname , slm_status , slm_statusname , CAST(last__assigned_date as string) last__assigned_date,CAST(last_acknowledged_date as string) last_acknowledged_date,CAST(last_resolved_date as string) last_resolved_date,servicedesk_assignednumber , CAST(servicedesk_assigned_time as string) servicedesk_assigned_time,online_support_team_assignment , l1overallola , resolution_sla , resolution_slaname , response_sla , response_slaname , overall_sla , transfornumber , monitortype , monitorvalidation , businesscategorization1 , businesscategorization2 , businesscategorization3 , closure_product_category_tier1 , closure_product_category_tier2 , closure_product_category_tier3 , resolution_category , resolution_category_tier_2 , resolution_category_tier_3 , CAST(insertdatetime as string) insertdatetime,CAST(updatedatetime as string) updatedatetime,remark , business_unit , remarkname , closure_product_name , product_name from ccsd.itsm_dw_incident where date_format(submitdate,'yyyy-MM-dd') > date_sub('" + util.getDay() + " ','180')").createTempView("dw")

    val prop = new java.util.Properties
    prop.setProperty("user","root")
    prop.setProperty("password","123456")
    prop.setProperty("charset","utf8")
    //指定驱动
    prop.setProperty("driver","com.mysql.jdbc.Driver")
    // 调用DataFrameWriter将数据写入mysql,写入形式为覆盖写入
    //sparkSession.sql("select * from dw").write.format("csv").save("/user/p66_g98/test/dw_incident1.csv")
    sparkSession.sql("select * from dw").write.mode(SaveMode.Overwrite).jdbc("jdbc:mysql://10.96.91.186:3306/lenovo_upp_test?rewriteBatchedStatements=true&useUnicode=true&characterEncoding=utf8","tb_incident1",prop) // 表可以不存在

    //sparkSession.sql("select * from ccsd.itsm_dw_incident where date_format(submitdate,'yyyy-MM-dd') > date_sub('" + util.getDay() + " ','180')").write.mode(SaveMode.Overwrite).jdbc("jdbc:mysql://10.96.91.186:3306/lenovo_upp_test?rewriteBatchedStatements=true","tb_incident",prop)

  }
}
