"""
一次性脚本
将针对cgtw5的排期软件数据库迁移到新排期软件架构数据库

注意：
    1. 此同步脚本只有插入没有更新，所以新的数据库scheduler6必须是新建的
    2. 两个数据库必须在同一个Server上，也就是说原scheduler5数据库与新scheduler6数据库是同级关系
    3. 执行迁移后，scheduler6新数据库，还需要执行同步脚本，同步现在cgtw6上最新信息，因为cgtw5与cgtw6的有些数据存放路径或字段名称有变化

"""
from .CONFIG import SCHEDULER
import psycopg2
import json
"""

    -------------------------------------------------------------------------------
    数据库配置
    -------------------------------------------------------------------------------

"""
# USER = 'postgres'
# PASSWORD = 'timeaxis'
# HOST = '192.168.1.214'
# PORT= 5432

DATABASE = SCHEDULER['database']
USER = SCHEDULER['user']
PASSWORD = SCHEDULER['password']
HOST = SCHEDULER['host']
PORT= SCHEDULER['port']



class Sync5to6(object):

    def __init__(self,db5_name,db6_name):
        super(Sync5to6, self).__init__()
        self.db5_name = db5_name
        self.db6_name = db6_name

    def _init_db5_handler(self):
        try:
            connect = psycopg2.connect(database=self.db5_name,
                                          user=USER,
                                          password=PASSWORD,
                                          host=HOST,
                                          port=PORT)
            cursor = connect.cursor()
        except Exception as _init_db5_handler_ERR:
            print('_init_db5_handler_ERR:', _init_db5_handler_ERR)
            return None, None
        return connect, cursor


    def _init_db6_handler(self):
        try:
            connect = psycopg2.connect(database=self.db6_name,
                                          user=USER,
                                          password=PASSWORD,
                                          host=HOST,
                                          port=PORT)
            cursor = connect.cursor()
        except Exception as _init_db6_handler_ERR:
            print('_init_db6_handler_ERR:', _init_db6_handler_ERR)
            return None, None
        return connect, cursor

    def get_active_databases(self):
        """
        获取激活的项目database
        :return:
        """
        active_database_list = []
        con5, cur5 = self._init_db5_handler()
        database_sql = "select database from timeaxis.r_project where status !='Close'"
        try:
            cur5.execute(database_sql)
            a_result = cur5.fetchall()
            if a_result:
                for a in a_result:
                    database = a[0]
                    active_database_list.append(database)
        except Exception as get_active_database_ERR:
            print('get_active_database_ERR:',str(get_active_database_ERR))
        finally:
            con5.close()
        return active_database_list

    def sync_account_table(self):
        """
        迁移 account 表
        :return:
        """
        con5,cur5 = self._init_db5_handler()
        con6,cur6 = self._init_db6_handler()

        sql = "select id,account,user_level,custom_json,password,user_name from timeaxis.account"
        try:
            cur5.execute(sql)
            result = cur5.fetchall()
            if result:
                for i in result:
                    aid, account, user_lever, custom_json, password, user_name = i
                    #print(type(custom_json))
                    insert_sql = "insert into timeaxis.account(account,user_level,custom_json,password,user_name) values " \
                                 "('{0}','{1}','{2}','{3}','{4}')".format(account, user_lever, json.dumps(custom_json) , password, user_name)

                    cur6.execute(insert_sql)
                con6.commit()
        except Exception as sync_account_table_ERR:
            print('sync_account_table:',str(sync_account_table_ERR))
        finally:
            con5.close()
            con6.close()
            print('sync_account_table 完成')


    def sync_assigned_table(self):
        """
        迁移 assigned 表  分配的历史任务
        :return:
        """
        con5,cur5 = self._init_db5_handler()
        con6,cur6 = self._init_db6_handler()

        sql = "select task_type,cg_id,artist,show_name,start_date," \
              "end_date,date_length,position,assigner,assigned_time " \
              "from timeaxis.assigned_task"
        try:
            cur5.execute(sql)
            result = cur5.fetchall()
            if result:
                for i in result:
                    task_type,cg_id,artist,show_name,start_date,\
                    end_date,date_length,position,assigner,assigned_time = i
                    insert_sql = "insert into timeaxis.assigned_task(task_type,cg_id,artist,show_name,start_date," \
                                 "end_date,date_length,position,assigner,assigned_time) values " \
                                 "('{0}','{1}','{2}','{3}','{4}'," \
                                 "'{5}',{6},{7},'{8}','{9}')".format(task_type,cg_id,artist,show_name,start_date,
                                                                     end_date,date_length,position,assigner,assigned_time)

                    cur6.execute(insert_sql)
                con6.commit()
                print('sync_assigned_table 完成')
        except Exception as sync_assigned_table_ERR:
            print('sync_assigned_table:',str(sync_assigned_table_ERR))
        finally:
            con5.close()
            con6.close()


    def sync_custom_task_table(self):
        """
        迁移 custom_task 表
        :return:
        """
        print('sync_custom_task_table start')
        con5,cur5 = self._init_db5_handler()
        con6,cur6 = self._init_db6_handler()

        sql = "select cg_id,start_date,end_date,artist,workhour," \
              "description,task_type,custom_type,note,position," \
              "show_workhour, parent_data,show_name,assigned_time,department," \
              "project_name,color,date_length,ignore_weekend,font_color," \
              "database,assigner,status,account_id " \
              "from timeaxis.custom_task"
        try:
            cur5.execute(sql)
            result = cur5.fetchall()
            if result:
                c = 0
                for i in result:
                    cg_id,start_date,end_date,artist,workhour,\
                    description,task_type,custom_type,note,position, \
                    show_workhour, parent_data, show_name, assigned_time, department, \
                    project_name, color, date_length, ignore_weekend, font_color,\
                    database, assigner, status, account_id = i
                    ignore_weekend = ignore_weekend if ignore_weekend else 0  #ignore_weekeng数据不干净，需要为
                    insert_sql = "insert into timeaxis.custom_task(cg_id,start_date,end_date,artist,workhour," \
                                 "description,task_type,custom_type,note,position," \
                                 "show_workhour, parent_data, show_name,assigned_time, department, " \
                                 "project_name, color, date_length,ignore_weekend, font_color, " \
                                 "database, assigner, status, account_id) values " \
                                 f"('{cg_id}','{start_date}','{end_date}','{artist}','{workhour}'," \
                                 f"'{description}','{task_type}','{custom_type}','{note}',{position}," \
                                 f"'{show_workhour}','{json.dumps(parent_data)}','{show_name}','{assigned_time}','{department}'," \
                                 f"'{project_name}','{color}',{date_length},{ignore_weekend},'{font_color}'," \
                                 f"'{database}','{assigner}','{status}','{account_id}')"

                    cur6.execute(insert_sql)
                    c += 1
                    if c%100 == 0:
                        print('=',end='')
                con6.commit()
                print('sync_custom_task_table completed')
        except Exception as sync_custom_task_table_ERR:
            print('sync_custom_task_table_ERR:',str(sync_custom_task_table_ERR))
        finally:
            con5.close()
            con6.close()


    def sync_delete_task_table(self):
        """
        迁移 delete_task表  暂时不需要执行
        :return:
        """
        con5,cur5 = self._init_db5_handler()
        con6,cur6 = self._init_db6_handler()

        sql = "select task_type,cg_id,artist,show_name,start_date,end_date,date_length,position,deleted_by,delete_time " \
              "from timeaxis.delete_task"
        try:
            cur5.execute(sql)
            result = cur5.fetchall()
            if result:
                for i in result:
                    task_type,cg_id,artist,show_name,start_date,end_date,date_length,position,deleted_by,delete_time = i
                    insert_sql = "insert into timeaxis.delete_task(task_type,cg_id,artist,show_name,start_date,end_date," \
                                 "date_length,position,deleted_by,delete_time) values " \
                                 "('{0}','{1}','{2}','{3}','{4}','{5}',{6},{7},'{8}','{9}')".format(task_type,cg_id,
                                 artist,show_name,start_date,end_date,date_length,position,deleted_by,delete_time)

                    cur6.execute(insert_sql)
                con6.commit()
                print('sync_delete_task_table 完成')
        except Exception as sync_delete_task_table_ERR:
            print('sync_delete_task_table_ERR:',str(sync_delete_task_table_ERR))
        finally:
            con5.close()
            con6.close()

    def sync_group_task_table(self,active_database=True):
        """
        迁移 group_task 表
        :return:
        """
        print('sync_group_task_table start')
        con5, cur5 = self._init_db5_handler()
        con6, cur6 = self._init_db6_handler()

        if active_database:
            active_database_list = self.get_active_databases()
            connect_database = '|'.join(active_database_list)
            sql = "select group_name,color,project_name,department,start_date," \
                  "end_date,date_length,workhour,artist,position," \
                  "artist_level,note,assigned_time,task_id_dict,tasks," \
                  "use_custom_workhour,database,account_id " \
                  f"from timeaxis.group_task where database similar to '({connect_database})'"
        else:
            sql = "select group_name,color,project_name,department,start_date," \
                  "end_date,date_length,workhour,artist,position," \
                  "artist_level,note,assigned_time,task_id_dict,tasks," \
                  "use_custom_workhour,database,account_id " \
                  "from timeaxis.group_task"
        try:
            cur5.execute(sql)
            result = cur5.fetchall()
            if result:
                c = 0
                for i in result:
                    group_name, color, project_name, department, start_date,\
                    end_date, date_length, workhour, artist, position,\
                    artist_level, note, assigned_time, task_id_dict, tasks,\
                    use_custom_workhour, database,account_id= i
                    use_custom_workhour = use_custom_workhour if use_custom_workhour else 0  # ignore_weekeng数据不干净，需要为
                    insert_sql = "insert into timeaxis.group_task(group_name, color, project_name, department, start_date," \
                                 "end_date, date_length, workhour, artist, position," \
                                 "artist_level, note, assigned_time, task_id_dict, tasks," \
                                 " use_custom_workhour, database) values " \
                                 f"('{group_name}','{color}','{project_name}','{department}','{start_date}'," \
                                 f"'{end_date}',{date_length},'{workhour}','{artist}',{position}," \
                                 f"'{artist_level}','{note}','{assigned_time}','{json.dumps(task_id_dict)}','{tasks}'," \
                                 f"{use_custom_workhour},'{database}')"

                    cur6.execute(insert_sql)
                    c += 1
                    if c % 100 == 0:
                        print('=', end='')
                con6.commit()
                print('sync_group_task_table completed')
        except Exception as sync_group_task_table_ERR:
            print('sync_group_task_table_ERR:', str(sync_group_task_table_ERR))
        finally:
            con5.close()
            con6.close()

    #同步projects与nodes之后，需要重新排序nodes表中的project_id
    def sync_projects_table(self):
        """
        迁移 projects 表
        :return:
        """
        con5,cur5 = self._init_db5_handler()
        con6,cur6 = self._init_db6_handler()

        sql = "select row_index,column_index,project_name,color,active," \
              "inweek,week_row_index,week_column_index,inhalfyear,halfyear_row_index," \
              "halfyear_column_index,database from timeaxis.projects"
        try:
            cur5.execute(sql)
            result = cur5.fetchall()
            if result:
                for i in result:
                    row_index,column_index,project_name,color,active,\
                        inweek,week_row_index,week_column_index,inhalfyear,halfyear_row_index,\
                        halfyear_column_index,database= i
                    insert_sql = "insert into timeaxis.projects(row_index,column_index,project_name,color,active," \
                                 "inweek,week_row_index,week_column_index,inhalfyear,halfyear_row_index," \
                                 "halfyear_column_index,database) values " \
                                 f"({row_index},{column_index},'{project_name}','{color}',{active}," \
                                 f"{inweek},{week_row_index},{week_column_index},{inhalfyear},{halfyear_row_index}," \
                                 f"{halfyear_column_index},'{database}')"
                    cur6.execute(insert_sql)
                con6.commit()
                print('sync_projects_table 完成')
        except Exception as sync_projects_table_ERR:
            print('sync_projects_table_ERR:',str(sync_projects_table_ERR))
        finally:
            con5.close()
            con6.close()


    def sync_nodes_table(self):
        """
        迁移 nodes表.
        注意：nodes表是绝对关联projects的id的，所以迁移projects后的id其实变了。
        所以需要对比原projects的id与新projects的id,
        :return:
        """
        con5,cur5 = self._init_db5_handler()
        con6,cur6 = self._init_db6_handler()

        sql = "select project_id,node_date,node_info,weight,update_time," \
              "updater from timeaxis.nodes"

        try:
            cur5.execute(sql)
            result = cur5.fetchall()
            if result:
                for i in result:
                    project_id,node_date,node_info,weight,update_time,updater = i
                    insert_sql = "insert into timeaxis.nodes(project_id,node_date,node_info,weight,update_time," \
                                 "updater) values " \
                                 f"({project_id},'{node_date}','{node_info}',{weight},'{update_time}','{updater}')"
                    cur6.execute(insert_sql)
                con6.commit()
                print('sync_nodes_table 完成')
        except Exception as sync_nodes_table_ERR:
            print('sync_nodes_table_ERR:',str(sync_nodes_table_ERR))
        finally:
            con5.close()
            con6.close()

    def reindex_project_id(self):
        """
        重新排序nodes表中的project_id
        :return:
        """
        con5,cur5 = self._init_db5_handler()
        con6,cur6 = self._init_db6_handler()

        p5_sql = "select id,database from timeaxis.projects"
        try:
            cur5.execute(p5_sql) #获取原projects database与id
            p_result = cur5.fetchall()
            if p_result:
                for p in p_result:
                    _id,database = p
                    p6_sql = f"select id from timeaxis.projects where database='{database}'"
                    cur6.execute(p6_sql)  #获取新projects，同batabase对应的id
                    p6_result = cur6.fetchone()
                    if p6_result:
                        new_pid = p6_result[0]
                        n6_sql = f"update timeaxis.nodes set project_id = {new_pid} where project_id = {_id}"
                        cur6.execute(n6_sql)  #在nodes表中，将当前project_id替换成当前projects里项目所在id
            con6.commit()
        except Exception as reindex_project_id_ERR:
            print('reindex_project_id_ERR:',str(reindex_project_id_ERR))

        finally:
            con5.close()
            con6.close()

    def sync_public_conf_table(self):
        """
        迁移 public_conf 表
        :return:
        """
        con5,cur5 = self._init_db5_handler()
        con6,cur6 = self._init_db6_handler()

        sql = "select public_info,token,pipeline from timeaxis.public_conf"
        try:
            cur5.execute(sql)
            result = cur5.fetchall()
            if result:
                for i in result:
                    public_info,token,pipeline= i
                    insert_sql = "insert into timeaxis.public_conf(public_info,token,pipeline) values " \
                                 f"('{json.dumps(public_info)}','{token}','{json.dumps(pipeline)}')"
                    cur6.execute(insert_sql)
                con6.commit()
                print('sync_public_conf_table 完成')
        except Exception as sync_public_conf_table_ERR:
            print('sync_public_conf_table_ERR:',str(sync_public_conf_table_ERR))
        finally:
            con5.close()
            con6.close()


    def sync_r_account_table(self):
        """
        迁移 r_account 表
        :return:
        """
        con5,cur5 = self._init_db5_handler()
        con6,cur6 = self._init_db6_handler()

        sql = "select cg_id,conf_department_id,level,artist from timeaxis.r_account"
        try:
            cur5.execute(sql)
            result = cur5.fetchall()
            if result:
                for i in result:
                    cg_id,conf_department_id,level,artist= i
                    insert_sql = "insert into timeaxis.r_account(cg_id,department_id,level,artist) values " \
                                 f"('{cg_id}','{conf_department_id}','{level}','{artist}')"
                    cur6.execute(insert_sql)
                con6.commit()
                print('sync_r_account_table 完成')
        except Exception as sync_r_account_table_ERR:
            print('sync_r_account_table_ERR:',str(sync_r_account_table_ERR))
        finally:
            con5.close()
            con6.close()


    def sync_r_department_table(self):
        """
        迁移 r_department 表
        :return:
        """
        con5,cur5 = self._init_db5_handler()
        con6,cur6 = self._init_db6_handler()

        sql = "select cg_id,name,display from timeaxis.r_department"
        try:
            cur5.execute(sql)
            result = cur5.fetchall()
            if result:
                for i in result:
                    cg_id,name,display= i
                    display = display if display else 0
                    insert_sql = "insert into timeaxis.r_department(cg_id,name,display) values " \
                                 f"('{cg_id}','{name}',{display})"
                    cur6.execute(insert_sql)
                con6.commit()
                print('sync_r_department_table 完成')
        except Exception as sync_r_department_table_ERR:
            print('sync_r_department_table_ERR:',str(sync_r_department_table_ERR))
        finally:
            con5.close()
            con6.close()


    def sync_r_project_table(self):
        """
        迁移 r_project 表
        :return:
        """
        con5,cur5 = self._init_db5_handler()
        con6,cur6 = self._init_db6_handler()

        sql = "select cg_id,database,entity_name,full_name,color," \
              "status,resolution,frame_rate,path,eps from timeaxis.r_project"
        try:
            cur5.execute(sql)
            result = cur5.fetchall()
            if result:
                for i in result:
                    cg_id, database, entity_name, full_name, color,\
                    status,resolution,frame_rate,path,_eps = i
                    eps_list = _eps.get('eps_list',[])
                    eps = '|'.join(eps_list) if eps_list else ''
                    insert_sql = "insert into timeaxis.r_project(cg_id,database,entity_name,full_name,color," \
                                 "status,resolution,frame_rate,path,eps) values " \
                                 f"('{cg_id}','{database}','{entity_name}','{full_name}','{color}'," \
                                 f"'{status}','{resolution}','{frame_rate}','{path}','{eps}')"
                    cur6.execute(insert_sql)
                con6.commit()
                print('sync_r_project_table 完成')
        except Exception as sync_r_project_table_ERR:
            print('sync_r_project_table_ERR:',str(sync_r_project_table_ERR))
        finally:
            con5.close()
            con6.close()


    def sync_task_table(self,active_database=True):
        """
        迁移 task 表.
        默认仅同步当前激活的项目
        :return:
        """
        con5,cur5 = self._init_db5_handler()
        con6,cur6 = self._init_db6_handler()

        if active_database:
            active_database_list = self.get_active_databases()
            connect_database = '|'.join(active_database_list)
            sql = "select task_type,r_id,artist,position,start_date," \
                  "end_date,status,cg_id,retake_info,task_name," \
                  "task_pipeline,project_name,artist_level,difficulty_level,workhour," \
                  "module,description,assigned_time,eps,img_path," \
                  "important_task_mark,show_name,assigner,database,note," \
                  "project_shortname,show_workhour,client_status_retake_count,sort_right,assign_pipeline," \
                  "department,project_color,retake_workhour,date_length,ignore_weekend," \
                  "last_submit_time,separated,frame,ingroup,cgworkhour," \
                  "assigned_times,assigned_workhour,shot_name,deadline,account_id " \
                  f" from timeaxis.task where database similar to '({connect_database})'"
        else :
            sql = "select task_type,r_id,artist,position,start_date," \
                  "end_date,status,cg_id,retake_info,task_name," \
                  "task_pipeline,project_name,artist_level,difficulty_level,workhour," \
                  "module,description,assigned_time,eps,img_path," \
                  "important_task_mark,show_name,assigner,database,note," \
                  "project_shortname,show_workhour,client_status_retake_count,sort_right,assign_pipeline," \
                  "department,project_color,retake_workhour,date_length,ignore_weekend," \
                  "last_submit_time,separated,frame,ingroup,cgworkhour," \
                  "assigned_times,assigned_workhour,shot_name,deadline,account_id " \
                  " from timeaxis.task"
        try:
            cur5.execute(sql)
            result = cur5.fetchall()
            if result:
                c = 0
                for i in result:
                    task_type, r_id, artist, position, start_date,\
                    end_date,status,cg_id,retake_info,task_name, \
                    task_pipeline, project_name, artist_level, difficulty_level, workhour,\
                    module,description,assigned_time,eps,img_path,\
                    important_task_mark,show_name,assigner,database,note,\
                    project_shortname,show_workhour,client_status_retake_count,sort_right,assign_pipeline, \
                    department, project_color, retake_workhour, date_length, ignore_weekend,\
                    last_submit_time,separated,frame,ingroup,cgworkhour, \
                    assigned_times, assigned_workhour, name, deadline,account_id = i
                    # note = note if note else ''
                    sort_right = sort_right if sort_right else 1
                    ignore_weekend = ignore_weekend if ignore_weekend else 0
                    separated =separated if separated else 0
                    ingroup = ingroup if ingroup else 0
                    assigned_times = assigned_times if assigned_times else 0
                    insert_sql = "insert into timeaxis.task(task_type, r_id, artist, position, start_date," \
                                 "end_date,status,cg_id,retake_info,task_name," \
                                 "task_pipeline, project_name, artist_level, difficulty_level, workhour," \
                                 "module,description,assigned_time,eps,img_path," \
                                 "important_task_mark,show_name,assigner,database,note," \
                                 "project_shortname,show_workhour,client_status_retake_count,sort_right,assign_pipeline," \
                                 "department, project_color, retake_workhour, date_length, ignore_weekend," \
                                 "last_submit_time,separated,frame,ingroup,cgworkhour," \
                                 "assigned_times, assigned_workhour, name, deadline,account_id) values " \
                                 f"('{task_type}',{r_id},'{artist}',{position},'{start_date}'," \
                                 f"'{end_date}','{status}','{cg_id}','{json.dumps(retake_info)}','{task_name}'," \
                                 f"'{task_pipeline}','{project_name}','{artist_level}','{difficulty_level}','{workhour}'," \
                                 f"'{module}','{description}','{assigned_time}','{eps}','{img_path}'," \
                                 f"'{important_task_mark}','{show_name}','{assigner}','{database}','{note}'," \
                                 f"'{project_shortname}','{show_workhour}',{client_status_retake_count},{sort_right},'{assign_pipeline}'," \
                                 f"'{department}','{project_color}','{retake_workhour}',{date_length},{ignore_weekend}," \
                                 f"'{last_submit_time}',{separated},'{frame}',{ingroup},'{cgworkhour}'," \
                                 f"{assigned_times},'{assigned_workhour}','{name}','{deadline}','{account_id}')"


                    cur6.execute(insert_sql)
                    c += 1
                    if c%100 == 0:
                        print('=',end='')
                con6.commit()
                print('sync_task_table 完成')
        except Exception as sync_task_table_ERR:
            print('sync_task_table_ERR:',str(sync_task_table_ERR))
        finally:
            con5.close()
            con6.close()




    def sync_r_task_table(self,active_database=True):
        """
        迁移 r_task 表,
        分两步： 1.先同步r_task里的已有字段
                2.再根据link_id与module类型去r_asset或r_shot里取
        :return:
        """
        con5,cur5 = self._init_db5_handler()
        con6,cur6 = self._init_db6_handler()

        if active_database:
            active_database_list = self.get_active_databases()
            connect_database = '|'.join(active_database_list)
            sql = "select cg_id,link_id,artist,module,entity_name,status," \
                  "client_status_retake_count,assigned,database,difficulty_level,eps," \
                  "important_task_mark,img_path,shot_name,task_name,last_workhour," \
                  "workhour,department,retake_workhour,last_submit_time,task_priority," \
                  "deadline,account_id" \
                  f" from timeaxis.r_task where database similar to '({connect_database})'"
        else:
            sql = "select cg_id,link_id,artist,module,entity_name,status," \
                  "client_status_retake_count,assigned,database,difficulty_level,eps," \
                  "important_task_mark,img_path,shot_name,task_name,last_workhour," \
                  "workhour,department,retake_workhour,last_submit_time,task_priority," \
                  "deadline,account_id" \
                  " from timeaxis.r_task"


        def get_assetOrshot_info(database, module,link_id):
            description=''
            name = ''
            eps = ''
            img_path = ''
            assign_pipeline = ''
            frame = ''
            shot_difficulty_level = ''
            deadline = ''
            asset_shot_cg_id=database+','+link_id
            gsql = "select description,entity_name,eps,img_path,assign_pipeline,frame,shot_difficulty_level," \
                  "deadline from timeaxis.r_shot where cg_id= '{}'".format(asset_shot_cg_id)
            if module=='asset':
                gsql = "select description,entity_name,eps,assign_pipeline,frame from timeaxis.r_asset where cg_id= '{}'".format(asset_shot_cg_id)
            cur5.execute(gsql)
            info = cur5.fetchone()
            #print('info:',info)
            if info:
                if module=='shot':
                    description, name, eps, img_path, assign_pipeline, frame, shot_difficulty_level,deadline = info
                elif module== 'asset':
                    description, name, eps, assign_pipeline, frame = info

            return description, name, eps, img_path, assign_pipeline, frame, shot_difficulty_level,deadline

        try:
            cur5.execute(sql)
            result = cur5.fetchall()
            if result:
                print('r_task counts:',len(result))
                c = 0
                for i in result:
                    cg_id, link_id,artist, module, entity_name, status,\
                    client_status_retake_count,assigned,database,difficulty_level,eps, \
                    important_task_mark,img_path,shot_name,task_name,last_workhour,\
                    workhour,department,retake_workhour,last_submit_time,task_priority, \
                    deadline,account_id = i

                    description, name, eps, img_path, assign_pipeline, frame, shot_difficulty_level, deadline = get_assetOrshot_info(database,module,link_id)

                    client_status_retake_count = client_status_retake_count if client_status_retake_count else 0
                    insert_sql = "insert into timeaxis.r_task(cg_id,description,name,eps,img_path," \
                                 "frame,assign_pipeline,shot_difficulty_level,deadline,artist," \
                                 " module, task_name, status,client_status_retake_count,assigned," \
                                 "database,difficulty_level,important_task_mark,workhour,last_workhour," \
                                 "department,retake_workhour,last_submit_time,task_priority) values " \
                                 f"('{cg_id}','{description}','{name}','{eps}','{img_path}'," \
                                 f"'{frame}','{assign_pipeline}','{shot_difficulty_level}','{deadline}','{artist}'," \
                                 f"'{module}','{task_name}','{status}',{client_status_retake_count},{assigned}," \
                                 f"'{database}','{difficulty_level}','{important_task_mark}','{workhour}','{last_workhour}'," \
                                 f"'{department}','{retake_workhour}','{last_submit_time}','{task_priority}')"

                    cur6.execute(insert_sql)
                    c += 1
                    if c%100 == 0:
                        print('=',end='')
                    if c%1000==0:
                        print()
                con6.commit()
                print('sync_r_task_table 完成')
        except Exception as sync_r_task_table_ERR:
            print('sync_r_task_table_ERR:',str(sync_r_task_table_ERR))
        finally:
            con5.close()
            con6.close()



