import xml.etree.ElementTree as ET
import psycopg2
from psycopg2 import sql
from datetime import datetime, timedelta
import pytz
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
import os
from urllib.parse import urlparse
import hashlib
import logging
import urllib3

# 禁用不安全请求的警告
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

# 配置日志
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')

# 数据库连接配置
DB_CONFIG = {
    'dbname': 'globalMeida',
    'user': 'postgres',
    'password': 'Tech@2024',
    'host': 'localhost',
    'port': '5432'
}

# 设置图片保存的目录
IMAGE_DIR = '/root/epg_images'

# 设置默认国家（如果XML中没有提供）
DEFAULT_COUNTRY = 'NZ'

# 代理服务器设置
PROXIES = {
    'http': 'http://127.0.0.1:8890',
    'https': 'http://127.0.0.1:8890'
}


def connect_to_db():
    logging.info("Connecting to database...")
    return psycopg2.connect(**DB_CONFIG)


def setup_database(cursor):
    logging.info("Setting up database...")
    cursor.execute("""
        CREATE TABLE IF NOT EXISTS ext_epg_channels (
            id SERIAL PRIMARY KEY,
            channel_id VARCHAR(255) UNIQUE NOT NULL,
            display_name VARCHAR(255) NOT NULL,
            url TEXT,
            country VARCHAR(255)
        )
    """)
    cursor.execute("""
        CREATE TABLE IF NOT EXISTS ext_epg_programmes (
            id SERIAL PRIMARY KEY,
            channel_id VARCHAR(255) REFERENCES ext_epg_channels(channel_id),
            start_time TIMESTAMP WITH TIME ZONE NOT NULL,
            stop_time TIMESTAMP WITH TIME ZONE NOT NULL,
            title VARCHAR(255) NOT NULL,
            description TEXT,
            date DATE,
            rating VARCHAR(10),
            icon_url TEXT,
            CONSTRAINT unique_channel_start_time UNIQUE (channel_id, start_time)
        )
    """)
    cursor.execute("""
        CREATE TABLE IF NOT EXISTS ext_epg_categories (
            id SERIAL PRIMARY KEY,
            name VARCHAR(255) UNIQUE NOT NULL
        )
    """)
    cursor.execute("""
        CREATE TABLE IF NOT EXISTS ext_epg_programme_categories (
            programme_id INTEGER REFERENCES ext_epg_programmes(id),
            category_id INTEGER REFERENCES ext_epg_categories(id),
            PRIMARY KEY (programme_id, category_id)
        )
    """)
    cursor.execute("""
        CREATE TABLE IF NOT EXISTS ext_epg_episode_numbers (
            id SERIAL PRIMARY KEY,
            programme_id INTEGER REFERENCES ext_epg_programmes(id),
            system VARCHAR(255) NOT NULL,
            value TEXT NOT NULL,
            UNIQUE (programme_id, system)
        )
    """)
    cursor.execute("""
        CREATE INDEX IF NOT EXISTS idx_channel_start_time ON ext_epg_programmes (channel_id, start_time)
    """)
    logging.info("Database setup completed.")


def parse_datetime(datetime_str):
    dt = datetime.strptime(datetime_str.split()[0], "%Y%m%d%H%M%S")
    tz = pytz.FixedOffset(int(datetime_str.split()[1]) // 100 * 60)
    return tz.localize(dt)


def create_retry_session(retries=3, backoff_factor=0.3, status_forcelist=(500, 502, 504)):
    session = requests.Session()
    retry = Retry(
        total=retries,
        read=retries,
        connect=retries,
        backoff_factor=backoff_factor,
        status_forcelist=status_forcelist,
    )
    adapter = HTTPAdapter(max_retries=retry)
    session.mount('http://', adapter)
    session.mount('https://', adapter)
    return session


def download_image(url):
    if not url:
        logging.warning(f"No URL provided for image download")
        return None

    if not os.path.exists(IMAGE_DIR):
        os.makedirs(IMAGE_DIR)
        logging.info(f"Created directory: {IMAGE_DIR}")

    # 使用URL的MD5哈希作为文件名，以确保唯一性
    url_hash = hashlib.md5(url.encode()).hexdigest()
    file_extension = os.path.splitext(urlparse(url).path)[1] or '.jpg'
    filename = f"{url_hash}{file_extension}"

    local_path = os.path.join(IMAGE_DIR, filename)

    # 检查文件是否已存在
    if os.path.exists(local_path):
        logging.info(f"Image already exists locally: {local_path}")
        return local_path

    session = create_retry_session()

    try:
        logging.info(f"Downloading image: {url}")
        response = session.get(url, stream=True, proxies=PROXIES, verify=False, timeout=30)
        response.raise_for_status()
        with open(local_path, 'wb') as file:
            for chunk in response.iter_content(chunk_size=8192):
                file.write(chunk)
        logging.info(f"Image downloaded successfully: {local_path}")
        return local_path
    except requests.RequestException as e:
        logging.error(f"Error downloading image from {url}: {e}")
        return None


def upsert_channel(cursor, channel, country):
    channel_id = channel.get('id')
    display_name = channel.find('display-name').text
    url = channel.find('url').text if channel.find('url') is not None else None

    logging.info(f"Upserting channel: {channel_id} - {display_name}")
    cursor.execute("""
        INSERT INTO ext_epg_channels (channel_id, display_name, url, country)
        VALUES (%s, %s, %s, %s)
        ON CONFLICT (channel_id) DO UPDATE
        SET display_name = EXCLUDED.display_name, url = EXCLUDED.url, country = EXCLUDED.country
    """, (channel_id, display_name, url, country))


def upsert_programme(cursor, programme):
    channel_id = programme.get('channel')
    start_time = parse_datetime(programme.get('start'))
    stop_time = parse_datetime(programme.get('stop'))
    title = programme.find('title').text
    desc = programme.find('desc').text if programme.find('desc') is not None else None
    date = programme.find('date').text if programme.find('date') is not None else None
    rating = programme.find('rating/value').text if programme.find('rating/value') is not None else None

    icon_url = programme.find('icon').get('src') if programme.find('icon') is not None else None
    icon_local_path = download_image(icon_url) if icon_url else None

    logging.info(f"Upserting programme: {title} - {start_time}")
    cursor.execute("""
        INSERT INTO ext_epg_programmes (channel_id, start_time, stop_time, title, description, date, rating, icon_url)
        VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
        ON CONFLICT (channel_id, start_time) DO UPDATE
        SET stop_time = EXCLUDED.stop_time,
            title = EXCLUDED.title,
            description = EXCLUDED.description,
            date = EXCLUDED.date,
            rating = EXCLUDED.rating,
            icon_url = EXCLUDED.icon_url
        RETURNING id
    """, (channel_id, start_time, stop_time, title, desc, date, rating, icon_local_path))

    return cursor.fetchone()[0]


def get_or_insert_category(cursor, category_name):
    logging.info(f"Getting or inserting category: {category_name}")
    cursor.execute("""
        INSERT INTO ext_epg_categories (name)
        VALUES (%s)
        ON CONFLICT (name) DO NOTHING
        RETURNING id
    """, (category_name,))
    result = cursor.fetchone()
    if result:
        return result[0]
    else:
        cursor.execute("SELECT id FROM ext_epg_categories WHERE name = %s", (category_name,))
        return cursor.fetchone()[0]


def link_programme_category(cursor, programme_id, category_id):
    logging.debug(f"Linking programme {programme_id} with category {category_id}")
    cursor.execute("""
        INSERT INTO ext_epg_programme_categories (programme_id, category_id)
        VALUES (%s, %s)
        ON CONFLICT DO NOTHING
    """, (programme_id, category_id))


def upsert_episode_number(cursor, programme_id, system, value):
    logging.debug(f"Upserting episode number for programme {programme_id}: {system} - {value}")
    cursor.execute("""
        INSERT INTO ext_epg_episode_numbers (programme_id, system, value)
        VALUES (%s, %s, %s)
        ON CONFLICT (programme_id, system) DO UPDATE
        SET value = EXCLUDED.value
    """, (programme_id, system, value))


def clean_old_data(cursor, days_to_keep=7):
    cutoff_date = datetime.now(pytz.utc) - timedelta(days=days_to_keep)
    logging.info(f"Cleaning data older than {cutoff_date}")

    cursor.execute(
        "DELETE FROM ext_epg_episode_numbers WHERE programme_id IN (SELECT id FROM ext_epg_programmes WHERE stop_time < %s)",
        (cutoff_date,))
    cursor.execute(
        "DELETE FROM ext_epg_programme_categories WHERE programme_id IN (SELECT id FROM ext_epg_programmes WHERE stop_time < %s)",
        (cutoff_date,))
    cursor.execute("DELETE FROM ext_epg_programmes WHERE stop_time < %s", (cutoff_date,))

    logging.info(f"Cleaned old data")


def parse_and_insert_xml(xml_file, country=DEFAULT_COUNTRY):
    logging.info(f"Starting to parse XML file: {xml_file}")

    channel_count = 0
    programme_count = 0

    with connect_to_db() as conn:
        with conn.cursor() as cursor:
            setup_database(cursor)

            # 使用迭代解析器来处理大型XML文件
            for event, elem in ET.iterparse(xml_file, events=('start', 'end')):
                if event == 'end':
                    if elem.tag == 'channel':
                        upsert_channel(cursor, elem, country)
                        channel_count += 1
                        elem.clear()  # 清理已处理的元素以释放内存

                    elif elem.tag == 'programme':
                        programme_id = upsert_programme(cursor, elem)
                        programme_count += 1

                        # 处理类别
                        for category in elem.findall('category'):
                            category_id = get_or_insert_category(cursor, category.text)
                            link_programme_category(cursor, programme_id, category_id)

                        # 处理剧集编号
                        for episode_num in elem.findall('episode-num'):
                            upsert_episode_number(cursor, programme_id, episode_num.get('system'), episode_num.text)

                        elem.clear()  # 清理已处理的元素以释放内存

                # 每处理1000个元素就提交一次事务
                if (channel_count + programme_count) % 100 == 0:
                    conn.commit()
                    logging.info(f"Processed {channel_count} channels and {programme_count} programmes so far...")

            # 最后的提交
            conn.commit()

            # 清理旧数据
            clean_old_data(cursor)
            conn.commit()

    logging.info(f"Finished parsing {xml_file}. Upserted {channel_count} channels and {programme_count} programmes.")


if __name__ == "__main__":
    epg_files = [
        ('/root/guide.xml', 'NZ')
    ]

    for xml_file, country in epg_files:
        logging.info(f"Processing EPG data for {country}")
        parse_and_insert_xml(xml_file, country)
        logging.info(f"Data import for {country} completed successfully.")

    logging.info("All EPG data processing completed.")