import pymysql
import json
import os

def get_count1(begin_date,db,table_name,partition_name):
    comm='spark-sql -e "select count(1) from '+db+'.'+table_name+' where '+partition_name+ '=' +"'"+begin_date+"'"+'"'
    f=os.popen(comm)
    cnt=f.read()
    try :
        cnt=int(cnt)
    except ValueError:
        print("happen execept")
        cnt=0
    return cnt

def get_conn(mysql_conn_json):
    conn = pymysql.connect(host=mysql_conn_json.get('host'), port=mysql_conn_json.get('port'),
                           user=mysql_conn_json.get('username'), passwd=mysql_conn_json.get('password'), db=mysql_conn_json.get('db'),
                           charset='utf8')
    return conn


def execute(sql, mysql_conn_json):
    conn = get_conn(mysql_conn_json)
    cursor = conn.cursor()
    cursor.execute(sql)
    conn.commit()
    conn.close()


def main(begin_date,mysql_conn_json_str):
    mysql_conn_json = json.loads(mysql_conn_json_str.replace("'", '"'))
    hive_db='jms_dm'
    table_name='dm_cn_address_warehouse_day_rule_dt_parquet'
    partition_name='dt'
    cnt=get_count1(begin_date,hive_db,table_name,partition_name)
    sql = """insert into t_sync_kafka_spark(table_name,spark_flag,hdfs_file_date,data_size) values('dm_cn_address_warehouse_day_rule_dt_parquet','1','{begin_date}','{cnt}')""".format(begin_date=begin_date,cnt=cnt)
    execute(sql, mysql_conn_json)
