import streamlit as st
from metapub import PubMedFetcher
from components.chat_utils import ChatAgent
from components.chat_prompts import chat_prompt_template, qa_template
from components.llm import llm
from components.layout_extensions import render_app_info
from backend.abstract_retrieval.pubmed_retriever import PubMedAbstractRetriever
from backend.data_repository.local_data_store import LocalJSONStore
from backend.rag_pipeline.chromadb_rag import ChromaDbRag
from backend.rag_pipeline.embeddings import embeddings
import redis

# 实例化对象
pubmed_client = PubMedAbstractRetriever(PubMedFetcher())
data_repository = LocalJSONStore(storage_folder_path="backend/data")
rag_client = ChromaDbRag(persist_directory="backend/chromadb_storage", embeddings=embeddings)
chat_agent = ChatAgent(prompt=chat_prompt_template, llm=llm)

# 连接Redis服务器
r = redis.Redis(host='127.0.0.1', port=6379, db=0)


def main():
    # 增加访问统计
    visit_count = r.incr('visit_count')
    st.set_page_config(
        page_title="PubMed 筛查器",  # 页面标题
        page_icon='../assets/favicon32-32.ico',  # 页面图标
        layout='wide'
    )

    # 定义列 - 这将使布局水平分割
    column_logo, column_app_info, column_answer = st.columns([1, 4, 4])

    # 在第一列放置Logo
    with column_logo:
        st.image('../assets/m.png')
        st.markdown(
            f"""
            <div style="text-align: center; margin-top: 10px; padding: 5px 10px; background-color: rgba(0, 0, 0, 0.3); color: white; font-size: 11px; border-radius: 5px;">
                本页面总访问量: {visit_count}
            </div>
            """,
            unsafe_allow_html=True
        )

    # 在第二列放置应用信息和一些可能的科学问题示例
    with column_app_info:
        # 渲染应用信息，包括作为用户提示的示例问题
        render_app_info()

        # 输入科学问题的部分
        st.markdown('<h3 style="font-size: 18px;">请输入您的问题！</h3>', unsafe_allow_html=True)
        placeholder_text = "在此输入您的问题..."
        scientist_question = st.text_input("您的问题是什么？", placeholder=placeholder_text, key="question_input")
        get_articles = st.button('获取文章和答案')

        # 处理用户问题，获取数据
        with st.spinner('正在获取摘要，这可能需要一些时间...'):
            if get_articles:
                if scientist_question and scientist_question != placeholder_text:
                    # 获取摘要数据
                    retrieved_abstracts = pubmed_client.get_abstract_data(scientist_question)
                    if not retrieved_abstracts:
                        st.write('未找到摘要。')
                    else:
                        # 将摘要保存到存储中并创建向量索引
                        query_id = data_repository.save_dataset(retrieved_abstracts, scientist_question)
                        documents = data_repository.create_document_list(retrieved_abstracts)
                        rag_client.create_vector_index_for_user_query(documents, query_id)
                        
                        # 回答用户问题，并直接在界面上显示答案
                        vector_index = rag_client.get_vector_index_by_user_query(query_id)
                        retrieved_documents = chat_agent.retrieve_documents(vector_index, scientist_question)
                        chain = qa_template | llm
                        
                        with column_answer:
                            st.markdown(f"##### 对您的问题'{scientist_question}'的回答")
                            st.write(chain.invoke({
                                "question": scientist_question, 
                                "retrieved_abstracts": retrieved_documents,
                            }).content)

    # 聊天机器人部分的开始
    # 显示查询列表，选择一个进行对话
    query_options = data_repository.get_list_of_queries()

    if query_options:
        st.header("与摘要聊天")
        selected_query = st.selectbox('选择一个过去的查询', options=list(query_options.values()), key='selected_query')
        
        # 初始化关于某个历史用户问题的聊天
        if selected_query:
            selected_query_id = next(key for key, val in query_options.items() if val == selected_query)
            vector_index = rag_client.get_vector_index_by_user_query(selected_query_id)

            # 切换查询时清除聊天历史
            if 'prev_selected_query' in st.session_state and st.session_state.prev_selected_query != selected_query:
                chat_agent.reset_history()

            st.session_state.prev_selected_query = selected_query

            # 开始聊天会话
            chat_agent.start_conversation(vector_index, selected_query)


if __name__ == "__main__":
    main()
