#!/var/bin/env python
# -*- coding: utf-8 -*-

import sys
import os
import psycopg2.extras
import utils
import logging.config
import time
import commands

# # 文字コードを設定
reload(sys)
sys.setdefaultencoding('utf-8')

# エラーコード読み込み
error_json = utils.read_json("error.json")

# 初期化
tp_db_table_name = None
db_table_name = None
error_info_table_name = None
csv_file_name = None
setting_json = None
commit_counter_to_tempdb = None
commit_counter_to_maindb = None
work_dir = None
database = None
user = None
password = None
host = None
port = None
json_data = None
logger = None



def get_parameter():
    """
    入力パラメータを取得する

    Parameters
    ----------
    なし

    Returns
    -------
    なし
    """
    try:
        global db_table_name, tp_db_table_name, error_info_table_name, csv_file_name, setting_json, \
            commit_counter_to_tempdb, commit_counter_to_maindb, database, user, password, host, port, json_data, logger
        if len(sys.argv) != 2:
            logger = logging.getLogger()
            logger.error(error_json["parameter_count_error"])
            os._exit(1)
        csv_file_name = sys.argv[1].split("/")[-1]

        # 設定ファイルを読み込む
        setting_json = utils.read_json("./setting.ini")
        commit_counter_to_tempdb = setting_json["commit_counter_to_tempdb"]
        commit_counter_to_maindb = setting_json["commit_counter_to_maindb"]
        log_file_path = setting_json["log_file_path"]
        work_dir = setting_json["work_dir"]
        database = setting_json["database"]
        user = setting_json["user"]
        password = setting_json["password"]
        host = setting_json["host"]
        port = setting_json["port"]

        # json定義を読み込む
        json_data = utils.read_json("./data.json", csv_file_name)
        db_table_name = setting_json["schema"] + "." + json_data["tableName"]
        tp_db_table_name = setting_json["schema"] + "." + json_data["tptableName"]
        error_info_table_name = setting_json["schema"] + "." + setting_json["error_db_name"]

        # ログ設定
        log_file_path = log_file_path + json_data["batchId"]
        if not os.path.exists(log_file_path):
            os.makedirs(log_file_path)
        logging.config.fileConfig('./logging.conf', defaults={'logdir': log_file_path})
        logger = logging.getLogger()

    except BaseException as bep:
        logger = logging.getLogger()
        logger.error("%s : " + error_json["csv_file_name_error"], bep, exc_info=1)
        sys.exit(1)


def check_multi_startup():
    """
    多重起動判断する

    Parameters
    ----------
    なし

    Returns
    -------
    なし
    """
    logger.info("多重起動チェック処理が開始します。")
    # 実行start time
    check_start_time = time.time()

    comm_str = "ps -ef | grep python | grep %s | grep -v prep | wc -l" % csv_file_name.split(".")[0]
    output = commands.getoutput(comm_str)
    if int(output) > 1:
        logger.warning(error_json["multi_startup_error"])
        sys.exit(1)

    # 実行end time
    check_end_time = time.time()
    elapsed_time = check_end_time - check_start_time
    logger.info("多重起動チェック処理が終了しました。処理時間：" + str(round(elapsed_time, 2)) + "秒")


def read_tmpdb_to_maindb(tp_db_table_name, db_table_name):
    """
    実テーブルへのデータ取り込みを行う

    Parameters
    ----------
    tp_db_table_name:string
        ワークテーブル名
    db_table_name:string
        実テーブル名
    Returns
    -------
    なし
    """
    logger.info("実テーブルへのデータ取り込み処理が開始します。")
    # 実行start time
    check_start_time = time.time()
    conn = None
    cur = None
    syori_id = ""
    counter = 0
    try:
        # DB_Connection 生成
        conn = psycopg2.connect(database=database, user=user, password=password, host=host, port=port)
        cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
        select_sql = """SELECT * FROM %s ORDER BY syori_id """
        cur.execute(select_sql % tp_db_table_name)
        if cur.rowcount != 0:
            rows = None
            while True:
                try:
                    rows = cur.fetchmany(setting_json["fetchsize"])
                    if not rows:
                        break
                except psycopg2.ProgrammingError:
                    continue
                temp_data = {}
                csv_data = None
                for row in rows:
                    for key in row.keys():
                        tmp_data = row[key]
                        # pkダータ == none時に、空文字列を書き直します。
                        if key.upper() in json_data["pk"] and row[key] is None:
                            tmp_data = ""
                        if key == "csv_data":
                            csv_data = row[key]
                        if key == "syori_id":
                            syori_id = tmp_data

                        temp_data[key] = tmp_data
                    # ワークテーブルのデータを実テーブルに読み込む
                    try:
                        utils.insert_data_to_maindb(json_data, conn, db_table_name, temp_data, tp_db_table_name)
                        counter = counter + 1
                        if counter % setting_json["commit_counter_to_tempdb"] == 0:
                            logger.debug("will commit, counter = %d" % counter)
                            conn.commit()
                    except BaseException as bep:
                        logger.error("%s : " + error_json["database_error"], bep, exc_info=1)
                        conn.commit()
                        # エラーテーブルにデータを追加する
                        error_csv_data = csv_data
                        error_info = error_json["database_error"] + bep.message
                        key_info = ""
                        for key in json_data["pk"]:
                            key = key.lower()
                            key_info += str(row[key])
                        error_sql = """INSERT INTO %s (batch_id, batch_group_id, table_name, key_info, error_info, csv_row_data) \
                                                              VALUES (%%s, %%s, %%s, %%s, %%s, %%s)"""
                        error_value = [json_data["batchId"], json_data["batch_group_id"],
                                       db_table_name, key_info, error_info, error_csv_data]
                        cur.execute(error_sql % error_info_table_name, error_value)
                        logger.error(error_info)

                        # insert失败のエラーデータを削除する
                        del_sql = """DELETE FROM %s WHERE syori_id = %%s """
                        cur.execute(del_sql % tp_db_table_name, [syori_id])
                        counter = counter + 1
                        conn.commit()
                        continue
                    temp_data = {}
    finally:
        if conn:
            logger.debug("finally commit counter = %d" % counter)
            conn.commit()
            cur.close()
            conn.close()
        # 実行end time
        check_end_time = time.time()

        elapsed_time = check_end_time - check_start_time
        logger.info("実テーブルへのデータ取り込み処理が終了しました。処理時間：" + str(round(elapsed_time, 2)) + "秒")



if __name__ == "__main__":

    get_parameter()

    check_multi_startup()

    read_tmpdb_to_maindb(tp_db_table_name, db_table_name)
