
# 0 1 * * * find /srv -name "*.csv" -type f -mtime +2 -exec rm {} \;

import os
import pandas as pd
import boto3
from datetime import datetime, timedelta
import requests
import pymysql
import logging  # 引入日志模块

# 配置模块
class Config:
    """
    存储应用程序的配置常量
    """
    DATE_FORMAT = "%Y-%m-%d"
    AWS_ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY')  # 从环境变量中获取
    AWS_SECRET_KEY = os.environ.get('AWS_SECRET_KEY')  # 从环境变量中获取
    S3_BUCKET_NAME = "pro-astralsec-s3-db"
    DB_HOST = "pro-rds-shard-slave.astralsec.com"
    DB_USER = "dwh_reader"
    DB_NAME = "bos_shard"
    DB_PASSWORD = os.environ.get('DB_PASSWORD')

def send_notification(title, content):
    """
    发送通知到指定的Lark Webhook

    参数:
    title (str): 通知的标题
    content (str): 通知的内容
    """
    message = {
        "msg_type": "text",
        "content": {
            "text": f"{title}\n{content}"
        }
    }
    lark_headers = {"Content-Type": "application/json"}
    webhook_url = "https://open.larksuite.com/open-apis/bot/v2/hook/d1c51ec6-429a-4221-83fe-26973b12af58"
    try:
        response = requests.post(webhook_url, json=message, headers=lark_headers)
        if response.status_code == 200:
            logging.info("Notification sent successfully.")
        else:
            logging.error(f"Failed to send notification. Status code: {response.status_code}, Response text: {response.text}")
    except requests.exceptions.RequestException as e:
        logging.error(f"An error occurred while sending notification: {e}")

class S3Uploader:
    """
    处理文件上传到S3的类
    """
    def __init__(self):
        self.s3_client = boto3.client('s3', aws_access_key_id=Config.AWS_ACCESS_KEY, aws_secret_access_key=Config.AWS_SECRET_KEY)

    def file_exists_in_s3(self, s3_object_key):
        """
        检查S3中是否存在指定的对象

        参数:
        s3_object_key (str): S3对象的键

        返回:
        bool: 如果对象存在返回True，否则返回False
        """
        try:
            self.s3_client.head_object(Bucket=Config.S3_BUCKET_NAME, Key=s3_object_key)
            return True
        except Exception as e:
            logging.error(f"Error checking file existence in S3: {e}")
            return False

    def upload_to_s3(self, local_file_path, s3_object_key):
        """
        将本地文件上传到S3

        参数:
        local_file_path (str): 本地文件的路径
        s3_object_key (str): S3对象的键
        """
        try:
            self.s3_client.upload_file(local_file_path, Config.S3_BUCKET_NAME, s3_object_key)
            logging.info(f"File {local_file_path} uploaded to S3 bucket {Config.S3_BUCKET_NAME} with key {s3_object_key}")
        except Exception as e:
            logging.error(f"Error uploading file to S3: {e}")

def main():
    """
    主函数，执行数据库查询、文件保存和S3上传的主要逻辑
    """
    yesterday = datetime.now() - timedelta(days=1)
    yesterday_str = yesterday.strftime(Config.DATE_FORMAT)

    current_month_directory = yesterday.strftime("%Y.%m")

    csv_filename = f"bos_balance_flow_{yesterday_str}.csv"

    if os.path.exists(csv_filename):
        logging.info(f"File {csv_filename} already exists locally. Skipping database query")
        s3_uploader = S3Uploader()

        s3_object_key = f"{current_month_directory}/{csv_filename}"

        if s3_uploader.file_exists_in_s3(s3_object_key):
            logging.info(f"File {csv_filename} already exists in S3. Skipping upload.")

            file_size = os.path.getsize(csv_filename)
            file_size_mb = file_size / (1024 * 1024)
            message = f"S3路径：/{s3_object_key} {file_size_mb:.2f} MB 检测已存在"
            send_notification("只读库bos_balance_flow备份成功:", message)
        else:
            s3_uploader.upload_to_s3(csv_filename, s3_object_key)
    else:
        try:
            with pymysql.connect(host=Config.DB_HOST, user=Config.DB_USER, password=Config.DB_PASSWORD, database=Config.DB_NAME) as connection:
                sql_query = f"SELECT * FROM bos_balance_flow  WHERE created_at >= '{yesterday_str} 00:00:00.000' AND created_at < '{yesterday_str} 23:59:59.999'"
                df = pd.read_sql_query(sql_query, connection)
        except pymysql.Error as e:
            logging.error(f"Database connection error: {e}")
            return

        try:
            df.to_csv(csv_filename, index=False)
        except IOError as e:
            logging.error(f"Error saving data to CSV: {e}")
            return

        s3_uploader = S3Uploader()

        s3_object_key = f"{current_month_directory}/{csv_filename}"

        if s3_uploader.file_exists_in_s3(s3_object_key):
            logging.info(f"File {csv_filename} already exists in S3. Skipping upload!!")

            file_size = os.path.getsize(csv_filename)
            file_size_mb = file_size / (1024 * 1024)
            message = f"S3路径：/{s3_object_key} {file_size_mb:.2f} MB 检测已存在"
            send_notification("只读库bos_balance_flow备份成功:", message)
        else:
            s3_uploader.upload_to_s3(csv_filename, s3_object_key)

            file_size = os.path.getsize(csv_filename)
            file_size_mb = file_size / (1024 * 1024)
            message = f"S3路径：/{s3_object_key} {file_size_mb:.2f} MB"
            send_notification("只读库bos_balance_flow备份成功:", message)

if __name__ == "__main__":
    main()