#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @FileName  :xmlapi.py
# @Time      :2025/1/11 23:28
# @Author    :Admin
import xml.etree.ElementTree as ET
from datetime import datetime

import aiohttp
import asyncio
import gzip
import io
import json
import os
import pymongo as pymongo
import shutil
from apscheduler.schedulers.blocking import BlockingScheduler

from handle_log import HandleLog

u = 'https://e.erw.cc/all.xml.gz'
l = './all.xml.gz'
e = './all.xml'
# 设置日志格式
logger = HandleLog()

# 假设你已经有一个MongoDB的连接
client = pymongo.MongoClient("mongodb://localhost:27017/")
db = client["iBox"]
collection = db["channels"]
# 设置超时时间为 30 秒
timeout = aiohttp.ClientTimeout(total=10)


async def d(f):
    if os.path.exists(f):
        os.remove(f)
        logger.info(f"文件已删除: {f}")
    else:
        logger.info(f"文件不存在: {f}")


async def gz(u, g, x):
    h = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64 erw) AppleWebKit/537.36 (KHTML, like Gecko) '
                      'Chrome/128.0.0.0 Safari/537.36 '
    }
    async with aiohttp.ClientSession(timeout=timeout) as session:
        async with session.get(u, headers=h) as resp:
            if resp.status == 404:
                logger.info(f"错误: 文件不存在或地址错误: {u}")
                exit(1)
            rd = await resp.read()
            with gzip.GzipFile(fileobj=io.BytesIO(rd)) as fin:
                with open(x, 'wb') as fout:
                    shutil.copyfileobj(fin, fout)


async def log(dl, s, o, f):
    log_dir = '/home/wwwlogs'
    log_file = os.path.join(log_dir, 'xmlapi.log')
    if not os.path.exists(log_dir):
        os.makedirs(log_dir)
    with open(log_file, 'a', encoding='utf-8') as m:
        m.write(f"{datetime.now()} - 开始时间： {s} 结束时间： {o} 日期： {f}. 插入频道名称： '精彩节目'.\n")


async def j(redis, d, f, p):
    k = f"{d}:{f}"
    j = {
        "date": f,
        "channel_name": d,
        "url": "e.erw.cc",
        "epg_data": p
    }
    await redis.set(k, json.dumps(j))
    await redis.expire(k, 7 * 24 * 3600)


# 保存数据到本地json文档
async def to_json(info):
    # 数据为空
    if not info:
        logger.info("数据为空，无法保存到本地JSON文档。")
        return
    # 删除旧的json文档
    if os.path.exists('./all.json'):
        os.remove('./all.json')
    # 写入新的json文档
    with open('./all.json', 'w', encoding='utf-8') as file:
        json.dump(info, file, indent=4, ensure_ascii=False)


# 保存数据到MongoDB
async def save_to_mongodb(info):
    # 检查collection是否存在
    if collection.name in db.list_collection_names():
        # 如果存在，先删除collection中的所有数据
        collection.delete_many({})
        logger.info("已删除旧数据")
    # 将info存储到MongoDB
    collection.insert_many(info)
    logger.info("已保存数据到MongoDB")


async def main():
    await gz(u, l, e)
    if not os.path.exists(e):
        logger.info(f"错误: 解压后的文件不存在: {e}")
        exit(1)
    try:
        t = ET.parse(e)
        x = t.getroot()
    except ET.ParseError as p:
        logger.info(f"XML 解析错误: {p}")
        exit(1)
    # 连接redis
    # redis = aioredis.from_url(
    #     "unix:///tmp/redis.sock?password=123456",
    #     encoding='utf-8',
    #     decode_responses=True
    # )
    # st = datetime.now()
    info = []
    for n in x.findall('channel'):
        i = n.get('id')
        dl = n.find('display-name').text.lower()
        if dl:
            p = {}
            for r in x.findall('programme'):
                if r.get('channel') == i:
                    s = r.get('start').split(" ")[0]
                    o = r.get('stop').split(" ")[0]
                    date_str = s[:8]  # YYYYMMDD
                    f = datetime.strptime(date_str, '%Y%m%d').strftime('%Y-%m-%d')
                    t = r.find('title')
                    if t is not None and t.text:
                        t = t.text.lower()
                    else:
                        t = "精彩节目"
                        await log(dl, s, o, f)
                    h = datetime.strptime(s, '%Y%m%d%H%M%S').strftime('%H:%M')
                    m = datetime.strptime(o, '%Y%m%d%H%M%S').strftime('%H:%M')
                    q = {
                        "start": h,
                        "end": m,
                        "title": t,
                        # "desc": ""
                    }
                    if f not in p:
                        p[f] = []
                    p[f].append(q)
            # for f, r in p.items():
            #     await j(redis, dl, f, r)

            for f, r in p.items():
                item = {
                    "date": f,
                    "channel_name": dl,
                    "url": "e.erw.cc",
                    "epg_data": r
                }
                info.append(item)
    # 写入json文档
    # await to_json(info)
    # 保存数据到MongoDB
    await save_to_mongodb(info)
    # et = datetime.now()
    # duration = (et - st).total_seconds()
    await d(e)
    # await redis.close()
    # logger.info(f"导入 Redis 完成，用时: {duration:.2f} 秒")


if __name__ == '__main__':
    scheduler = BlockingScheduler()
    # 每天01:00执行main函数
    scheduler.add_job(asyncio.run(main()), 'cron', hour=1, minute=0)
    scheduler.start()
    # asyncio.run(main())
