import os
import time
import pandas as pd
from sqlalchemy import create_engine
import logging


class fileinfo:
    def __init__(self):
        # self.file_dir = 'D:\天眼查自动采集'
        self.file_dir = 'D:\执业药师详情页数据'
        self.today = self.TimeStampToTime(time.time())
        # self.pat = re.compile("天眼查(.*).xlsx$")
        # 初始化日志
        logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
        # 从环境变量读取配置信息
        self.save_path = os.getenv('SAVE_PATH', 'D:\\合并结果')
        self.db_engine_str = os.getenv('DB_CONNECTION',
                                       'postgresql://postgres:j3IA6rebQ9NG@192.168.6.226:5432/odp20210825')

    def TimeStampToTime(self, timestamp):
        timeStruct = time.localtime(timestamp)
        return time.strftime('%Y-%m-%d', timeStruct)

    def get_FileCreateTime(self, filePath):
        # '''获取文件的创建时间'''
        # filePath = unicode(filePath,'utf8')
        t = os.path.getctime(filePath)
        return self.TimeStampToTime(t)


    def get_FileModifyTime(self, filePath):
        # '''获取文件的修改时间'''
        # filePath = unicode(filePath, 'utf8')
        t = os.path.getmtime(filePath)
        return self.TimeStampToTime(t)

    def get_FileName(self):
        files = os.listdir(self.file_dir)
        return files

    def Compare_CreateTime(self, file_path):
        file_create_time = self.get_FileCreateTime(file_path)
        if file_create_time == self.today:
            return True
        else:
            return False

    def read_excel_files(self, files):
        data_frames = []
        for i, file in enumerate(files, start=1):
            try:
                # Determine the file type based on the extension and read accordingly
                if file.endswith('.csv'):
                    data = pd.read_csv(file, header=0)
                elif file.endswith(('.xlsx', '.xls')):
                    data = pd.read_excel(file, header=1)
                else:
                    raise ValueError(f"Unsupported file type: {file}")

                logging.info(f'Read {i}th file with {len(data)} rows')
                data_frames.append(data)
            except Exception as e:
                logging.error(f'Failed to read file {file}: {e}')
        return data_frames

    def merge_data_frames(self, data_frames):
        if not data_frames:
            logging.error("No data frames to merge.")
            return None
        return pd.concat(data_frames, ignore_index=True)

    def save_merge_result(self, dff):
        if dff is None:
            return False
        merge_path = os.path.join(self.save_path,
                                  f"Excel Merge Result {time.strftime('%Y%m%d', time.localtime(time.time()))}.csv")
        dff.to_csv(merge_path, index=False)
        logging.info(f"Merged data saved to {merge_path}")
        return True

    def import_to_database(self, dff):
        if dff is None:
            return False
        try:
            engine = create_engine(self.db_engine_str)
            table_name = 'tyc_data_20241104'  # 固定的表名
            # table_name = 'zyys_20241008'
            # 导入数据到数据库
            dff.to_sql(table_name, engine, index=False, if_exists='append')
            logging.info(f"Data imported into the database under table '{table_name}'")
            return True
        except Exception as e:
            logging.error(f"Database import failed: {e}")
            return False

    def batch_process(self, files, batch_size=5):
        """
        Process files in batches of specified size.
        """
        total_files = len(files)
        for i in range(0, total_files, batch_size):
            batch_files = files[i:i + batch_size]
            logging.info(f"Processing batch {i // batch_size + 1} of {total_files // batch_size + 1}")
            data_frames = self.read_excel_files(batch_files)
            dff = self.merge_data_frames(data_frames)
            if not self.save_merge_result(dff):
                logging.error("Failed to save merged data.")
                continue
            if not self.import_to_database(dff):
                logging.error("Failed to import data into database.")
                continue
            logging.info(f"Batch {i // batch_size + 1} processed successfully.")

    def merge_excel(self, files, header):
        if not files:
            logging.error("No files provided for merging.")
            return False
        self.batch_process(files)
        return True


if __name__ == '__main__':
    excel_merger = fileinfo()
    file_locations = []
    files = excel_merger.get_FileName()
    for i in files:
        file_locations.append(excel_merger.file_dir + '\\' + i)
    success = excel_merger.merge_excel(file_locations, header=None)

