#!/usr/bin python3
# -*- encoding: utf-8 -*-
'''
@File    :   repurchase.py
@Time    :   2021/12/08 16:40:25
@Author  :   陈培杞
@Version :   1.0
@Doc     :   sql文件转spark

约定sql文件格式：
1. 每个sql文件只允许有一条sql语句，以set开头的环境设置语句可以保留，但必须在spark-spark的--conf中重新配置
2. 另起一行，以注释--<sql>为sql语句开始标记
3. sql语句必须以";"结尾

export PYSPARK_PYTHON=/root/anaconda3/bin/python3
export PYSPARK_DRIVER_PYTHON=/root/anaconda3/bin/python3 
spark-submit sql2spark.py  -l 20211020 -d isc_dev -s ./ass.sql
'''

from datetime import datetime
from pyspark.sql.session import SparkSession
from pyspark.context import SparkContext
from pyspark.conf import SparkConf
from pyspark.sql import functions as F
from pyspark.sql.functions import from_unixtime, unix_timestamp

spark = SparkSession.builder.config(conf=SparkConf()).enableHiveSupport().getOrCreate()
sc =  spark.sparkContext
spark.sparkContext.setLogLevel("Error")
spark.sql("set hive.exec.dynamic.partition.mode=nonstrict")

import argparse
def argsParse():
    parser = argparse.ArgumentParser()
    parser.add_argument('-l', '--lastDay',  type=str, 
                        required=True,  help="样式yyyymmdd代码执行日期")
    parser.add_argument('-c', '--currentDay',  type=str, 
                        required=True,  help="样式yyyymmdd，当前日期")
    parser.add_argument('-d', '--hiveDb',  type=str, required=True, 
                                default="isc_test", help="数据库名")
    parser.add_argument('-s', '--sqlFile',  type=str, required=True, 
                                default="./ass.sql", help="sql文件")
    args = parser.parse_args()
    return args



def main(args):
    lastDay = args.lastDay
    hiveDb = args.hiveDb
    currentDay = args.currentDay
    with open(args.sqlFile, 'r', encoding='utf8') as f:
        for line in f:
            line = line.strip()
            if line == "--<sql>":
                break

        sql = f.read()
        sql = sql.replace("${lastDay}", f"{lastDay}").replace("${hiveDb}", f"{hiveDb}").replace("${currentDay}",f"{currentDay}")
        del_index = sql.index(";")
        sql = sql[:del_index] 
        print(sql)
        spark.sql(sql).show()

if __name__=='__main__':
    args = argsParse()
    main(args)
