import os
import datetime
import jieba
import jieba.posseg as pseg
from sqlalchemy import create_engine, Column, String, DateTime, Integer
from sqlalchemy.orm import declarative_base, sessionmaker
from app.config import Config
import logging
from operator import itemgetter

# 数据库设置
DATABASE_URL = Config().mysql_url  # 使用 config.py 中的数据库连接字符串
Base = declarative_base()
engine = create_engine(DATABASE_URL)
Session = sessionmaker(bind=engine)

# 定义要保留的词性
KEEP_POS = {
    'n',    # 普通名词
    'nr',   # 人名
    'ns',   # 地名
    'nt',   # 机构名
    'nz',   # 其他专名
    'vn',   # 动名词
    'an',   # 名形词
    'j',    # 简称
}

class Post(Base):
    __tablename__ = 't_post'
    id = Column(Integer, primary_key=True, autoincrement=True)
    title = Column(String(255))
    create_date = Column(DateTime)
    update_date = Column(DateTime)
    content_date = Column(DateTime)
    delete_flag = Column(Integer, default=0)  # 默认可用状态

class NewTerms(Base):
    __tablename__ = 'new_terms'
    id = Column(Integer, primary_key=True, autoincrement=True)
    term = Column(String(255), nullable=False)
    count = Column(Integer, nullable=False)
    created_at = Column(DateTime, default=datetime.datetime.utcnow)

def extract_new_terms(existing_terms=None):
    if existing_terms is None:
        existing_terms = set()  # 如果没有提供现有词汇，则初始化为空集合 
    session = Session()
    # 查询最近三天的新闻
    three_days_ago = datetime.datetime.now() - datetime.timedelta(days=3)
    recent_posts = session.query(Post).filter(Post.create_date >= three_days_ago).all()
    recent_terms_count = {}

    for post in recent_posts:
        # 使用词性标注进行分词
        words = pseg.cut(post.title)
        for word, flag in words:
            # 只统计关键词（名词、专有名词等）
            if flag in KEEP_POS and len(word) > 1 and word not in existing_terms:
                if word in recent_terms_count:
                    recent_terms_count[word] += 1
                else:
                    recent_terms_count[word] = 1

    # 按频率排序并打印新词汇
    sorted_terms = sorted(recent_terms_count.items(), key=itemgetter(1), reverse=True)
    if sorted_terms:
        print("\n最近三天出现的新词汇（按频率排序）：")
        print("-" * 40)
        print("词汇\t\t频率")
        print("-" * 40)
        for term, count in sorted_terms:
            print(f"{term:<10}\t{count}")
        print("-" * 40)
    else:
        print("没有发现新词汇")
    
    session.close()
    return recent_terms_count


def get_existing_terms(session):
    existing_terms = set()
    previous_terms = session.query(NewTerms).all()
    for term in previous_terms:
        existing_terms.add(term.term)
    return existing_terms

def update_new_terms(existing_terms):
    if existing_terms is None:
        existing_terms = set()  # 如果没有提供现有词汇，则初始化为空集合

    session = Session()
    try:
        # 获取 new_terms 表中 created_at 最大的时间
        latest_term = session.query(NewTerms).order_by(NewTerms.created_at.desc()).first()
        if latest_term:
            start_time = latest_term.created_at
        else:
            start_time = datetime.datetime.now() - datetime.timedelta(days=30)  # 默认过去30天

        # 获取当前时间三天前的时间
        end_time = datetime.datetime.now() - datetime.timedelta(days=3)
        #如果start_time大于end_time，则不需要进行分词频率更新
        if start_time > end_time:
            print("start_time大于end_time，不需要进行分词频率更新")
            return

        # 查询 t_post 表中在这两个时间范围内的新闻
        recent_posts = session.query(Post).filter(Post.create_date >= start_time, Post.create_date <= end_time).all()

        # 存储新词汇及其出现次数
        new_terms_count = {}

        for post in recent_posts:
            # 分词
            words = jieba.cut(post.title)
            for word in words:
                if word not in existing_terms:
                    if word in new_terms_count:
                        new_terms_count[word] += 1
                    else:
                        new_terms_count[word] = 1

        # 更新或插入新词汇到数据库
        for term, count in new_terms_count.items():
            existing_term = session.query(NewTerms).filter(NewTerms.term == term).first()
            if existing_term:
                existing_term.count += count  # 更新频率
            else:
                new_term_entry = NewTerms(term=term, count=count)
                session.add(new_term_entry)

        session.commit()  # 提交事务
    finally:
        session.close()

if __name__ == '__main__':
    # 创建表（如果不存在）
    Base.metadata.create_all(engine)
    
    session = Session()
    existing_terms = get_existing_terms(session)  # 从数据库读取现有词汇
    session.close()
    update_new_terms(existing_terms)
    existing_terms = get_existing_terms(session)  # 从数据库读取现有词汇
    new_terms = extract_new_terms(existing_terms)
    # print("最近一个月的新词汇:", new_terms)