import pymysql
import os
import datetime
import json
import sys
import zipfile
from common import *
from loguru import logger
from threading import Thread
from queue import Queue
import random
import re
import time

with Db('data_collection_server') as conn:
    cursor = conn.cursor()
    sql = r'''
     select a.data_source_id 
     from 
     (select data_source_id from ip_to_id where name = "田明祥")a 
     inner join 
     data_source b 
     on a.data_source_id = b.source_id 
     inner join
     id_to_name_handle c
     on a.data_source_id = c.data_source_id
     INNER JOIN
     information_schema.tables d
     on c.table_name = d.table_name
     where b.data_type = 3
     order by a.data_source_id desc

    '''

    cursor.execute(sql)
    data_source_id_ls = [i['data_source_id'] for i in cursor.fetchall()]
    table_name_queue = Queue(len(data_source_id_ls))
    for i in data_source_id_ls:
        table_name_queue.put(i)


class DBbackup(Thread):
    def __init__(self, table_name_queue):
        super().__init__()
        self.table_name_queue = table_name_queue
        self.father_path = 'E:\\project\\DataBaseData'
        self.to_compress_path = 'E:\\project\\DataBaseData\\DbAnalyze'
        self.compress_num = 500

    @log('back')
    def run(self):
        table_name_queue = self.table_name_queue
        with Db('data_collection_server') as conn:
            while not table_name_queue.empty():
                data_source_id = table_name_queue.get()
                cursor = conn.cursor()
                sql = f'call id_to_name_handle({data_source_id})'
                cursor.execute(sql)
                table_name = cursor.fetchone()['table_name_use']
                sql = f'select max(id) max_id from {table_name}'
                cursor.execute(sql)
                max_id = cursor.fetchone()['max_id']
                for id in range(1, max_id + 1):
                    sql = f'select * from {table_name} where id = {id}'
                    cursor.execute(sql)
                    db_data = cursor.fetchone()
                    if db_data:
                        titleDate = str(db_data['title_date'])
                        title = db_data['title']
                        url = db_data['url']
                        html_content = db_data['html_content']
                        if len(html_content) <= 50:
                            continue
                        data_source_id = db_data['data_source_id']
                        data_type = db_data['data_type']
                        identify = db_data['identify']
                        logger.info('{0}-{1}'.format(data_source_id, identify))
                        will_json = {
                            "titleDate": titleDate,
                            "title": title,
                            "url": url,
                            "html_content": html_content,
                            "data_source_id": data_source_id,
                            "data_type": data_type,
                            "table_name": table_name
                        }
                        if not os.path.exists(os.path.join(self.father_path, str(data_source_id))):
                            os.mkdir(os.path.join(self.father_path, str(data_source_id)))
                        with open(os.path.join(self.father_path, str(data_source_id),
                                               f'{datetime.datetime.now().strftime("%Y%m%d%H%M%S")}_{identify}.json'),
                                  'w',
                                  encoding='utf-8') as f:
                            will_json = json.dumps(will_json)
                            f.write(will_json)
                        if len(os.listdir(os.path.join(self.father_path, str(data_source_id)))) >= self.compress_num:
                            compress_params = {
                                'compress_path': self.father_path,
                                'compress_folder': str(data_source_id),
                                'compress_to_path': self.to_compress_path
                            }
                            compress(**compress_params)
                            time.sleep(1)
                if os.path.exists(os.path.join(self.father_path, str(data_source_id))):
                    compress_params = {
                        'compress_path': self.father_path,
                        'compress_folder': str(data_source_id),
                        'compress_to_path': self.to_compress_path
                    }
                    compress(**compress_params)
                    time.sleep(1)
                logger.info('{0} back end ...'.format(str(data_source_id)))


def main():
    for i in range(1):
        t = DBbackup(table_name_queue)
        t.start()


if __name__ == "__main__":
    main()

