# # run.py
# from app import create_app

# # import logging
# # from logging.handlers import RotatingFileHandler

# # # 配置日志
# # logging.basicConfig()
# # logger = logging.getLogger('sqlalchemy')
# # logger.setLevel(logging.DEBUG)

# # # 创建日志处理器，输出到文件
# # handler = RotatingFileHandler('sqlalchemy.log', maxBytes=10240, backupCount=5)
# # handler.setLevel(logging.DEBUG)
# # formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# # handler.setFormatter(formatter)
# # logger.addHandler(handler)


# import os
# import datetime
# from app.models import Article, db

# # 假设您的文件夹路径是 /path/to/articles
# articles_folder = '../ChaselWang.github.io/source/newposts'

# def import_articles_from_folder(folder_path):
#     for root, dirs, files in os.walk(folder_path):
#         for file in files:
#             if file.endswith('.md'):
#                 file_path = os.path.join(root, file)
#                 category = os.path.basename(root)
#                 title = os.path.splitext(file)[0]
#                 created_at = datetime.datetime.fromtimestamp(os.path.getctime(file_path))
#                 updated_at = datetime.datetime.fromtimestamp(os.path.getmtime(file_path))
                
#                 with open(file_path, 'r', encoding='utf-8') as f:
#                     lines = f.readlines()
#                     if len(lines) >= 3:
#                         tags = [tag.strip() for tag in lines[2].split('#')[1:]]
#                         content = ''.join(lines[3:])
#                     else:
#                         tags = []
#                         content = ''.join(lines)

#                 article = Article.query.filter_by(title=title).first()
#                 if article:
#                     # 更新已存在的文章
#                     article.category = category
#                     article.created_at = created_at
#                     article.updated_at = updated_at
#                     article.tags = ' '.join(tags)
#                     article.content = content
#                     db.session.commit()
#                 else:
#                     # 创建新文章
#                     article = Article(
#                         title=title,
#                         created_at=created_at,
#                         updated_at=updated_at,
#                         category=category,
#                         tags=' '.join(tags),
#                         content=content,
#                         view_count=0
#                     )
#                     db.session.add(article)
#                     db.session.commit()

# if __name__ == '__main__':
#     app = create_app()
#     with app.app_context():
#         import_articles_from_folder(articles_folder)
#     app.run(debug=True)

# # run.py

import os
import datetime
from app import create_app
from app.models import Article, db


# 自动检测文件修改
import time
import sys 
import subprocess


# # import logging
# # from logging.handlers import RotatingFileHandler

# # # 配置日志
# # logging.basicConfig()
# # logger = logging.getLogger('sqlalchemy')
# # logger.setLevel(logging.DEBUG)

# # # 创建日志处理器，输出到文件
# # handler = RotatingFileHandler('sqlalchemy.log', maxBytes=10240, backupCount=5)
# # handler.setLevel(logging.DEBUG)
# # formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# # handler.setFormatter(formatter)
# # logger.addHandler(handler)

def import_articles_from_folder(folder_path):
    for root, dirs, files in os.walk(folder_path):
        for file in files:
            if file.endswith('.md'):
                file_path = os.path.join(root, file)
                category = os.path.basename(root)
                title = os.path.splitext(file)[0]
                updated_at = datetime.datetime.fromtimestamp(os.path.getmtime(file_path))
                
                with open(file_path, 'r', encoding='utf-8') as f:
                    lines = f.readlines()
                    if len(lines) >= 3:
                        created_at_str = lines[1].strip()  # Assuming the second line contains the creation date
                        created_at = datetime.datetime.strptime(created_at_str, '%Y-%m-%d')  # Convert string to datetime object
                        tags = [tag.strip() for tag in lines[3].split('#')[1:]]
                        content = ''.join(lines[4:])
                    else:
                        tags = []
                        content = ''.join(lines)

                article = Article.query.filter_by(title=title).first()
                
                if article:
                    article.category = category
                    article.created_at = created_at
                    article.updated_at = updated_at
                    article.tags = ' '.join(tags)
                    article.content = content
                    db.session.commit()
                else:
                    article = Article(
                        title=title,
                        created_at=created_at,
                        updated_at=updated_at,
                        category=category,
                        tags=' '.join(tags),
                        content=content,
                        view_count=0
                    )
                    db.session.add(article)
                    db.session.commit()

def copy_directory_structure(src, dst):
    """
    复制文件夹的层级结构，但不复制文件。
    
    参数:
    src -- 源文件夹路径
    dst -- 目标文件夹路径
    """
    # 检查源路径是否存在
    if not os.path.exists(src):
        raise ValueError(f"Source directory {src} does not exist")
    
    # 检查目标路径是否存在，如果不存在则创建
    if not os.path.exists(dst):
        os.makedirs(dst)
    
    # 遍历源文件夹中的所有条目
    for item in os.listdir(src):
        src_path = os.path.join(src, item)
        dst_path = os.path.join(dst, item)
        
        # 如果是文件夹，递归复制结构
        if os.path.isdir(src_path):
            copy_directory_structure(src_path, dst_path)

def update_markdown(folder_path):
    for root, dirs, files in os.walk(folder_path):
        for file in files:
            if file.endswith('.md'):
                file_path = os.path.join(root, file)
                category ="categories: "+os.path.basename(root)
                title = '---\ntitle: '+os.path.splitext(file)[0]
                updated_at ='date: '+ str(datetime.datetime.fromtimestamp(os.path.getmtime(file_path)))
                with open(file_path, 'r', encoding='utf-8') as f:
                    lines = f.readlines()
                    if len(lines) >= 3:
                        created_at_str = lines[1].strip()  # Assuming the second line contains the creation date
                        created_at ='date: '+str(datetime.datetime.strptime(created_at_str, '%Y-%m-%d'))  # Convert string to datetime object
                        tags =[tag.strip() for tag in lines[3].split('#')[1:]]
                        tags ="tags: \n  - " + "\n  - ".join(tags)+'\n---'
                        content = ''.join(lines[4:])
                    else:
                        tags = []
                content =title+"\n"+created_at+"\n"+"\n"+tags+content
                # print(content)
                file_path=file_path.replace("newposts", "_posts").replace("\\", "/")
                with open(file_path, 'w', encoding='utf-8') as file:
                    file.write(content)





# 指定要监控的文件夹
MONITOR_DIR = '../ChaselWang.github.io/source/newposts'

# 记录上一次检查时文件的最后修改时间
last_mod_time = {}

# def get_last_mod_time(directory):
#     """获取指定目录下所有文件的最后修改时间"""
#     mod_times = {}
#     for filename in os.listdir(directory):
#         filepath = os.path.join(directory, filename)
#         if os.path.isfile(filepath):
#             mod_times[filename] = os.path.getmtime(filepath)
#     return mod_times

def get_all_files_mod_times(directory):
    """
    获取指定目录及其子目录下所有文件的最后修改时间。
    
    :param directory: 要遍历的目录路径
    :return: 字典，键为文件路径，值为最后修改时间
    """
    mod_times = {}
    for root, dirs, files in os.walk(directory):
        for file in files:
            filepath = os.path.join(root, file)
            mod_times[filepath] = os.path.getmtime(filepath)
    return mod_times
# # 使用示例
# directory_to_monitor = 'path/to/your/directory'
# all_files_mod_times = get_all_files_mod_times(directory_to_monitor)
# for filepath, mod_time in all_files_mod_times.items():
#     print(f"文件: {filepath}, 最后修改时间: {mod_time}")







def restart_server():
    """重启服务器"""
    # 在Linux系统中使用gunicorn时可以这样重启
    #subprocess.call("kill -HUP `cat /var/run/gunicorn.pid`", shell=True)
    # 或者简单地重启Python脚本
    #os.execv(sys.executable, ['python'] + sys.argv)
    print(sys.executable)
    os.execv(sys.executable, [sys.executable] + sys.argv)


def monitor_directory():
    """监控文件夹的函数"""
    global last_mod_time
    while True:
        current_mod_time = get_all_files_mod_times(MONITOR_DIR)
        # 检查是否有文件被修改
        if current_mod_time != last_mod_time:
            print("检测到文件修改，重启服务器...")
            restart_server()
            last_mod_time = current_mod_time  # 更新修改时间记录
        time.sleep(10)  # 每5秒检查

        
def get_files_in_directory(directory):
    file_paths = []
    for root, dirs, files in os.walk(directory):
        for file in files:
            file_paths.append(os.path.join(root, file))
    return file_paths        
    
# 指定你想要监视的文件夹路径
folder_to_monitor = '../ChaselWang.github.io/source/newposts'

# 调用函数并获取文件列表
extra_files = get_files_in_directory(folder_to_monitor)        
app = create_app()

if __name__ == '__main__':

    with app.app_context():
        import_articles_from_folder('../ChaselWang.github.io/source/newposts')
        copy_directory_structure('../ChaselWang.github.io/source/newposts','../ChaselWang.github.io/source/_posts')
        update_markdown('../ChaselWang.github.io/source/newposts')

    #last_mod_time = get_all_files_mod_times(MONITOR_DIR)
    # 启动文件夹监控线程
    #from threading import Thread
    #monitor_thread = Thread(target=monitor_directory)
    #monitor_thread.daemon = True
    #monitor_thread.start()

    # 启动Flask应用
    # app.run(debug=True)
    app.run(debug=True,host="0.0.0.0", port=5000, use_reloader=True, extra_files=extra_files)
    






