import datetime
import os
import time
import sqlite3
import sys

source_path = '/user/hive/warehouse/blackpanther.db/dw_man_warning_rt/startdate_day={}'
target_path = '/user/hive/warehouse/blackpanther_test.db/tbl_man_warning_rt/'

'''
使用shell命令 
将hdfs中某一个目录下的文件复制到另一个目录下
创建 impala 分区
'''

def init_sqlite(is_drop, type):
    '''
    初始化 sqlite 存储 hdfs impala 所有的命令和执行状态
    :return:
    '''
    print('init sqlite start')
    sqlite = sqlite3.connect('dw2tbl.db')
    cursor = sqlite.cursor()
    globals()['sqlite'] = sqlite
    if type == 'hdfs':
        if is_drop:
            cursor.execute('drop table if exists hadoop_log;')
            cursor.execute("create table hadoop_log (command TEXT, success INTEGER, use_time TEXT);")
            init_hadoop_command()
    elif type == 'impala':
        if is_drop:
            cursor.execute('drop table if exists impala_log;')
            cursor.execute("create table impala_log (command TEXT, success INTEGER, use_time TEXT);")
            init_impala_command()
    sqlite.commit()
    cursor.close()


def execute_cp_hadoop():
    '''
    查询hadoop表查询出没有执行的命令和失败的命令
    '''
    print('execute cp hadoop start ')
    sqlite = globals()['sqlite']
    cursor = sqlite.cursor()
    result = cursor.execute('select command from hadoop_log where success=0')
    for hdfs_shell in result.fetchall():
        hdfs_shell = hdfs_shell[0]
        st = time.time()
        print(hdfs_shell)
        # os.system(hdfs_shell)
        et = time.time()
        sql = "update hadoop_log set success=1,use_time='{}' where command='{}'".format(et - st, hdfs_shell)
        print(sql)
        cursor.execute(sql)
        sqlite.commit()
    cursor.close()


def execute_impala():
    print('execute impala')
    sqlite = globals()['sqlite']
    cursor = sqlite.cursor()
    result = cursor.execute('select command from impala_log where success=0')
    for impala_shell in result.fetchall():
        impala_shell = impala_shell[0]
        ori = "impala-shell -i dn32 -q \"{}\""
        print(impala_shell)
        shell = ori.format(impala_shell)
        print(shell)
        start_time = time.time()
        try:
            #os.system(shell)
            end_time = time.time()
            sql = "update impala_log set use_time = '{}', success = 1 where command = \"{}\"".format(
                end_time - start_time, impala_shell)
            cursor.execute(sql)
        except Exception as e:
            print('{} is error {}'.format(shell, e))
        finally:
            sqlite.commit()
    cursor.close()
    # refresh = "impala-shell -i dn32 -q \"REFRESH blackpanther_test.tbl_man_warning_rt; \""
    # os.system(refresh)


def init_hadoop_command():
    '''
    初始化 hadoop 复制命令 start_time = 20180601 end_tme = 20180630
    '''
    print('use new shell command ---')
    middle_times = handle_start_end_time()

    sqlite = globals()['sqlite']
    cursor = sqlite.cursor()
    for mt in middle_times:
        tsp = source_path.format(mt)
        hdfs_cp = 'hadoop fs -cp -f {} {}'.format(tsp, target_path)
        print(hdfs_cp)
        cursor.execute("INSERT  INTO  hadoop_log values ('{}',0,'')".format(hdfs_cp))
    if input('回车继续') != '':
        sqlite.rollback()
        cursor.close()
        sqlite.close()
        sys.exit()
    else:
        sqlite.commit()
        cursor.close()


def handle_start_end_time():
    start_time = '20180601'  # input('start time: ')
    start_time = datetime.datetime.strptime(start_time, '%Y%m%d')
    # 获取结束的时间
    end_time = '20180720'  # input('end time: ')
    end_time = datetime.datetime.strptime(end_time, '%Y%m%d')
    # 存放过程中的时间
    middle_times = [str(start_time).split(' ')[0].replace('-', '')]
    while start_time < end_time:
        start_time = start_time + datetime.timedelta(days=1)
        middle_times.append(str(start_time).split(' ')[0].replace('-', ''))
    return middle_times


def init_impala_command():
    middle_times = handle_start_end_time()
    sqlite = globals()['sqlite']
    cursor = sqlite.cursor()
    for middle_time in middle_times:
        day = int(middle_time[-2:])
        use_times = []
        use_time = datetime.datetime.strptime(str(middle_time), '%Y%m%d')
        while use_time.day == day:
            use_times.append(use_time.strftime('%Y%m%d%H%M'))
            use_time = use_time + datetime.timedelta(minutes=1)

        tmp_create = []
        for t in use_times:
            create = "ALTER TABLE blackpanther_test.tbl_man_warning_rt ADD IF NOT EXISTS PARTITION (startdate_day='{}',startdate_min='{}');".format(
                t[:8], t)
            tmp_create.append(create)
            if len(tmp_create) >= 10:
                sql = "insert into impala_log values (\"{}\", 0, '0')".format(''.join(tmp_create))
                print(sql)
                cursor.execute(sql)
                tmp_create = []
    sqlite.commit()
    cursor.close()


def show_handle_result(type):
    print('show {} handle result'.format(type))
    sqlite = globals()['sqlite']
    cursor = sqlite.cursor()
    result = cursor.execute('select * from {}_log;'.format(type))
    for r in result.fetchall():
        print('\n command = {}\n success = {}\n use_time = {}\n'.format(r[0], r[1], r[2]))

    globals()['sqlite'].close()


def main():
    todo = input('if you want to do hdfs or impala')
    new_old = int(input('is start new command? 1:use new; 0:use old'))
    init_sqlite(new_old, todo)
    if todo == 'hdfs':
        execute_cp_hadoop()
    elif todo == 'impala':
        execute_impala()
    # show_handle_result(todo)


main()
