#!/usr/bin/python3
# -*- coding: UTF-8 -*-

#运行shell命令： su - hdfs -s /bin/bash -c  "spark-submit --master yarn --deploy-mode cluster --executor-cores 1 --executor-memory 1g    hdfs:/user/hdfs/sparktest.py"
#su - hdfs -s /bin/bash -c  "spark-submit --master yarn --deploy-mode cluster --executor-cores 1 --executor-memory 1g --py-files /opt/cloudera/pyspark/sql_conf.py   /opt/cloudera/pyspark/sparktest.py "

import uuid
import os
from datetime import datetime
from pyspark.sql import SparkSession
from pyspark.sql.types import StringType


class SparkUtil:
    #初始化SparkContext    #.config("spark.sql.hive.convertMetastoreOrc","false") \
    def __init__(self, appName):
        self.sparkSession = SparkSession.builder.appName(appName)\
            .config('spark.driver.maxResultSize','40g') \
            .config("spark.sql.crossJoin.enabled", "true") \
            .config("spark.sql.broadcastTimeout", "36000") \
            .config("spark.sql.autoBroadcastJoinThreshold", -1) \
            .config("spark.sql.debug.maxToStringFields", "100") \
            .config("hive.metastore.uris", "thrift://worker2.inspur:9083") \
            .enableHiveSupport()\
            .getOrCreate()
        #self.sparkSession.sparkContext.setLogLevel("ERROR");
    # 执行databaseName
    def execDatabaseName(self, databaseName):
        self.sparkSession.sql("use " + databaseName);
        print("---------------execDatabaseName---------------" + databaseName)
    #执行sql
    def execSql(self,sql):
        data = self.sparkSession.sql(sql)
        print("---------------execSql---------------sparkSession.sql---------------")
        return data
    def checkJsonUdf(self,udfNmae,check_json):
        self.sparkSession.udf.register(udfNmae, check_json)
        print("---------------checkJsonUdf---------------")

    #执行sql
    def dropTempView(self,tableName):
        self.sparkSession.catalog.dropTempView(tableName)
        print("---------------dropTempView---------------"+tableName)

    #执行sql
    def execSqlNoResult(self,sql):
        self.sparkSession.sql(sql)
        #print("---------------execSqlNoResult---------------")

    #执行保存mysql
    def saveRdbms(self,sql,url,tableName,mode,prop):
        data = self.sparkSession.sql(sql)
        print("---------------saveRdbms---------------")
        # 写入数据库
        data.write.jdbc(url=url, table=tableName, mode=mode, properties=prop)
        # 关闭spark会话
        self.sparkSession.stop()

    # 执行查询mysql
    def queryRdbms(self, url, tableName,props):
        print("---------------queryRdbms---------------")
        return self.sparkSession.read.jdbc(url=url,table=tableName,properties=props);

    def queryRdbmsByWhere(self, url, tableName,props,whereSql):
        print("---------------queryRdbms---------------")
        if whereSql:
           return self.sparkSession.read.jdbc(url=url,table=tableName,properties=props).where(whereSql);
        else:
           return self.sparkSession.read.jdbc(url=url,table=tableName,properties=props);

    #执行sql
    def execError(self,tableName,err):
        dataFrame = self.sparkSession.createDataFrame([(uuid.uuid4().hex,err,tableName,str(datetime.now()))], ['id','error_message',"table_name",'create_date'])
        dataFrame.write.format("hive").mode("append").saveAsTable("xy_dwd.t_error_info")
        print("---------------执行报错，插入报错信息---------------"+tableName+"-----------"+err)

    #销毁SC
    def distory(self):
        if self.sparkSession :
            self.sparkSession.stop()
        #print("---------------distory---------------")

    @staticmethod
    def queryData(databaseName,sql, tableName, debug_partition=None):
        try:
            tempTable = uuid.uuid4().hex
            sparkUtil = SparkUtil(databaseName+"-"+tableName)
            sparkUtil.execDatabaseName(databaseName)
            #执行第一个业务sql
            data = sparkUtil.execSql(sql)
            #data.rdd.mapPartitions(debug_partition)
            # "overwrite"是重写表的模式，如果表存在，就覆盖掉原始数据，如果不存在就重新生成一张表
            #  mode("append")是在原有表的基础上进行添加数据
            data.createOrReplaceTempView(tempTable)
            #data.write.format("hive").mode("overwrite").saveAsTable('{}.{}'.format('xy_dw', tableName))
            sparkUtil.execSql(''' insert overwrite table '''+tableName+'''  select * from '''+tempTable)
            sparkUtil.dropTempView(tempTable)
        except Exception as err:
            print("---------------数据写入失败---------------sql---------------"+sql+"---------------err.message---------------"+str(err))
            #sparkUtil.execError(tableName,str(err))
            raise
        else:
            print("---------------数据写入成功---------------"+tableName)
        finally:
            sparkUtil.distory()


    @staticmethod
    def loadData(sparkUtil,sql, tableName, debug_partition=None):
        try:
            tempTable = uuid.uuid4().hex
            # 执行第一个业务sql
            data = sparkUtil.execSql(sql)
            # data.foreachPartition(debug_partition)
            data.createOrReplaceTempView(tempTable)
            #data.write.format("hive").mode("overwrite").saveAsTable(tableName)
            sparkUtil.execSql(''' insert overwrite table '''+tableName+'''  select * from '''+tempTable)
            sparkUtil.dropTempView(tempTable)
        except Exception as err:
            print("---------------数据写入失败---------------sql---------------"+sql+"---------------err.message---------------"+str(err))
            #sparkUtil.execError(tableName,str(err))
            raise
        else:
            print("---------------数据写入成功--------------------------------------------------------" + tableName)

    @staticmethod
    def loadDataBatchDate(batchDate,sparkUtil, sql, tableName, debug_partition=None):
        try:
            batchData = sparkUtil.execSql(''' select count(1) from ''' + tableName + '''where weeks ='''+batchDate)
            if batchData > 0 :
                print("---------------当前批次数据--------------------------------------------------------" + batchData)
                tempTable = uuid.uuid4().hex
                # 执行第一个业务sql
                data = sparkUtil.execSql(sql)
                data.createOrReplaceTempView(tempTable)
                sparkUtil.execSql(''' insert overwrite table ''' + tableName + '''  select * from ''' + tempTable)
                sparkUtil.dropTempView(tempTable)
            else :
                print("---------------无当前批次数据--------------------------------------------------------" + tableName)
        except Exception as err:
            print("---------------数据写入失败---------------sql---------------" + sql + "---------------err.message---------------" + str(err))
            # sparkUtil.execError(tableName,str(err))
            raise
        else:
            print("---------------数据写入成功--------------------------------------------------------" + tableName)


    @staticmethod
    def loadDataPartition(sparkUtil,sql, tableName,weeks, debug_partition=None):
        try:
            tempTable = uuid.uuid4().hex
            dates = weeks[0:6]

            #if(sql.find("where")>0 or sql.find("WHERE")>0) :
            #   sql = sql+" and  t.weeks="+weeks
            #else:
            #   sql = sql+" where t.weeks="+weeks

            # 执行第一个业务sql
            data = sparkUtil.execSql(sql);
            #data.show()
            data.createOrReplaceTempView(tempTable)
            querySql = ''' insert overwrite table '''+tableName+''' partition (month='''+dates+''',weeks='''+weeks+''') select * from ''' + tempTable
            print(querySql)
            sparkUtil.execSql(querySql)
            sparkUtil.dropTempView(tempTable)
        except Exception as err:
            print("---------------数据写入失败---------------sql---------------"+sql+"---------------err.message---------------"+str(err))
            #sparkUtil.execError(tableName,str(err))
            raise
        else:
            print("---------------数据写入成功---------------" + tableName)

    @staticmethod
    def loadDataToHive(sparkUtil,sourceTableName,targetTableName,mysql,debug_partition=None):
        try:
            tempTable = uuid.uuid4().hex
            # data.write.format("hive").mode("overwrite").saveAsTable('{}.{}'.format('xy_dw', tableName))
            prop = {'user': mysql.username,
                    'password': mysql.password,
                    'driver': mysql.driver,
                    'batchsize': mysql.batchsize,
                    'truncate': mysql.truncate,
                    'isolationLevel': 'NONE'}
            data = sparkUtil.queryRdbms(mysql.url, sourceTableName,prop)
            data.show(20)
            data.createOrReplaceTempView(tempTable)
            sparkUtil.execSql(''' insert overwrite table '''+targetTableName+'''  select * from '''+tempTable)
            sparkUtil.dropTempView(tempTable)
        except Exception as err:
            print("---------------数据写入失败---------------" + targetTableName+"----------err.message---------------" + str(err))
            #sparkUtil.execError(tableName, str(err))
            raise
        else:
            print("---------------数据写入成功---------------" + targetTableName)

    @staticmethod
    def loadDataToMysql(sparkUtil, sql, tableName, mysql, debug_partition=None):
        try:
            tempTable = uuid.uuid4().hex
            # 执行第一个业务sql
            data = sparkUtil.execSql(sql)
            # data.rdd.mapPartitions(debug_partition)
            # "overwrite"是重写表的模式，如果表存在，就覆盖掉原始数据，如果不存在就重新生成一张表
            #  mode("append")是在原有表的基础上进行添加数据
            data.createOrReplaceTempView(tempTable)
            # data.write.format("hive").mode("overwrite").saveAsTable('{}.{}'.format('xy_dw', tableName))
            prop = {'user': mysql.user,
                    'password': mysql.password,
                    'driver': mysql.driver,
                    'batchsize': mysql.batchsize,
                    'truncate': mysql.truncate,
                    'isolationLevel': 'NONE'}
            data.write.jdbc(mysql.url, tableName, 'overwrite', prop)
            sparkUtil.dropTempView(tempTable)
        except Exception as err:
            print(
                "---------------数据写入失败---------------sql---------------" + sql + "---------------err.message---------------" + str(
                    err))
            # sparkUtil.execError(tableName, str(err))
            raise
        else:
            print("---------------数据写入成功---------------" + tableName)
    @staticmethod
    def sendMail(QQmail, QQmessage, QQtitle):
        try:
            os.system(" python3 /data1/xy/kongcb/warning/run_warning.py {} {} {} ".format(QQmail,QQmessage,QQtitle))
        except Exception as err:
            print("---------------发送失败---------------" + str(err))
        else:
            print("---------------发送成功---------------")