import redis
import os
from bs4 import BeautifulSoup
import datetime
import git
import json
import requests
from urllib.parse import urlparse, parse_qs
import pickle

import config

r = redis.Redis(host='host.docker.internal', port=6379, db=0)


def get_html_files(dir_path):
    html_files = []
    for root, dirs, files in os.walk(dir_path):
        for file in files:
            if file.endswith('.html'):
                html_files.append(os.path.join(root, file))
    return html_files

def find_image_path(path):
    for root, dirs, files in os.walk(path):
        for file in files:
            if file.endswith('.jpg') or file.endswith('.jpeg') or file.endswith('.png') or file.endswith('.gif'):
                return root
    return None

def saveHtmlInformation():
   # 遍历指定路径中的所有HTML文件
  html_files = get_html_files(config.PROJECT_PATH)
  print('start save html information',html_files)
  repo = git.Repo(config.PROJECT_PATH)
  for file in html_files:
        # 提取文件名和标题标签内容
        filename = os.path.splitext(file)[0]
        filepath = os.path.join(config.PROJECT_PATH, file)
        with open(filepath, 'r', encoding='utf-8') as f:
            soup = BeautifulSoup(f.read(), 'html.parser')
            title = soup.title.string.strip() if soup.title else ''
            img_tags = soup.find_all('img')
            img_srcs = [os.path.join(os.path.dirname(filepath), tag['src']) for tag in img_tags if 'src' in tag.attrs]
            img_name = [os.path.basename(tag['src']) for tag in img_tags if 'src' in tag.attrs]
        filename = filename.replace(config.PROJECT_PATH,config.WEB_PAGE_PREFIX).replace('\\', '/') + '.html'
        # 获取文件最后一次提交的时间
        file_path = os.path.join(config.PROJECT_PATH, file)
        
        # 优化2: 使用更高效的iter_commits代替blame获取最后提交时间
        commits = list(repo.iter_commits(paths=file_path, max_count=1))
        last_commit_time = commits[0].committed_datetime.strftime('%Y-%m-%d %H:%M:%S') if commits else ''   
         # 获取图片文件的修改时间
        img_mod_times = []
        for img_path in img_srcs:
            img_mod_time = datetime.datetime.fromtimestamp(os.path.getmtime(img_path)).strftime("%Y-%m-%d %H:%M:%S")
            img_mod_times.append(img_mod_time)

        # 将数据存入Redis
        data = {
            'filename':filename,
            'title': title,
            'img_name': img_name,
            'last_commit_time': last_commit_time,
            'img_mod_times': img_mod_times
        }
        data_str = json.dumps(data)
        if not r.sismember('html_data', data_str):
            r.sadd('html_data', data_str)


def getHtmlInformation():
    html_data = []
    for data_str in r.smembers('html_data'):
        data = json.loads(data_str)
        html_data.append(data)
    sorted_html_data = sorted(html_data, key=lambda x: x['last_commit_time'], reverse=True)
    return sorted_html_data

def addHtmlInformation(webPageFileName,title):
    html_data = r.smembers('html_data')
    existing_data = [d for d in html_data if json.loads(d)['img_name'] == webPageFileName + '.png']
    if existing_data:
        # 如果已经存在相同的数据，更新数据内容
        existing_data[0]['filename'] = config.WEB_PAGE_PREFIX + '/' + config.HTML_FOLDER + webPageFileName + '.html'
        existing_data[0]['title'] = title
        existing_data[0]['last_commit_time'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        existing_data[0]['img_mod_times'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        r.srem('html_data', json.dumps(existing_data[0]))
        r.sadd('html_data', json.dumps(existing_data[0]))
    else:
    # 如果不存在相同的数据，添加新的数据
        data = {
            'filename': config.WEB_PAGE_PREFIX + '/' + config.HTML_FOLDER + webPageFileName + '.html',
            'title': title,
            'img_name': webPageFileName + '.png',
            'last_commit_time': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
            'img_mod_times': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        }
    html_data_str = json.dumps(data)
    r.sadd('html_data', html_data_str)
    return getHtmlInformation()

