import os
import re
import streamlit as st
from openai import OpenAI
from streamlit_chat import message
from langchain.chains import RetrievalQA
from langchain_community.embeddings import HuggingFaceBgeEmbeddings, JinaEmbeddings
from langchain_openai import OpenAIEmbeddings
from langchain_community.vectorstores import FAISS, Chroma, elasticsearch
from langchain_openai import ChatOpenAI
from dotenv import load_dotenv
import warnings

# 初始化配置
warnings.filterwarnings('ignore')
load_dotenv()

client = OpenAI(
    base_url='https://api-inference.modelscope.cn/v1/',
    api_key='b011f8fd-99a5-4633-8bf5-fef46cf7bd90',  # ModelScope Token
)
# 常量定义
CONFIG = {
    "model_path": "./bge-large-zh-v1.5",
    "api_base": "https://api-inference.modelscope.cn/v1/",
    "embedding_config": {
        "model_kwargs": {"device": "cpu"},
        "encode_kwargs": {"normalize_embeddings": True},
        "query_instruction": "为这个句子生成表示以用于检索相关文章:"
    },
    "retriever_config": {"k": 9},
    "ui_config": {
        "page_title": "智能知识检索系统",
        "layout": "wide",
        "logos": {
            "user": "app/static/wen.ico",
            "bot": "app/static/da.ico"
        }
    }
}

# 初始化页面配置
st.set_page_config(
    page_title=CONFIG["ui_config"]["page_title"],
    layout=CONFIG["ui_config"]["layout"]
)

# 自定义CSS样式
STYLES = """
<style>
.think-block {
    background-color: #f0f0f0;
    border-left: 4px solid #4CAF50;
    padding: 1rem;
    margin: 1rem 0;
    font-style: italic;
    color: #555;
    border-radius: 0 5px 5px 0;
}
.container-border {
    border: 1px solid #e0e0e0;
    border-radius: 8px;
    padding: 1rem;
    margin-bottom: 1rem;
}
</style>
"""
st.markdown(STYLES, unsafe_allow_html=True)


@st.cache_resource
def init_llm(temperature):
    """初始化语言模型"""

    return ChatOpenAI(
        model="deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
        api_key='b011f8fd-99a5-4633-8bf5-fef46cf7bd90',
        base_url=CONFIG["api_base"],
        temperature=temperature,
        stream_usage=True
    )


@st.cache_resource
def init_embeddings():
    """初始化嵌入模型"""
    return JinaEmbeddings(
    jina_api_key="jina_21eda124ed7e4ee895f7bb7197f3cc5ebT45ebFpQaWsIeieL6QYTY9Sojgu",
    model_name="jina-embeddings-v3"
)


@st.cache_resource
def init_vectorstore(_embeddings):
    """初始化向量数据库"""
    es = elasticsearch.ElasticsearchStore(embedding=_embeddings,index_name="rag_demo_jina",
                                          es_url="http://10.252.10.4:31920"
                                          ,es_user="elastic",es_password="Grg@123bingbk")
    return es



def init_qa_chain(temperature):
    """初始化问答链"""
    llm = init_llm(temperature)
    embeddings = init_embeddings()
    vectorstore = init_vectorstore(embeddings)

    return RetrievalQA.from_chain_type(
        llm=llm,
        retriever=vectorstore.as_retriever(
            search_kwargs=CONFIG["retriever_config"]
        ),
        chain_type="stuff",
        return_source_documents=True
    )


def highlight_think(content):
    """高亮显示思考内容"""
    segments = re.split(r'(<think>.*?</think>)', content, flags=re.DOTALL)

    for seg in segments:
        if seg.startswith("<think>") and seg.endswith("</think>"):
            clean_seg = seg[7:-8].strip()
            st.markdown(f'<div class="think-block">{clean_seg}</div>',
                        unsafe_allow_html=True)
        elif seg.strip():
            st.write(seg.strip())


def init_session_state():
    """初始化会话状态"""


if "generated" not in st.session_state:
    st.session_state.generated = []
if "past" not in st.session_state:
    st.session_state.past = []


def render_sidebar():
    """渲染侧边栏"""
    with st.sidebar:
        st.title('智能知识检索系统demo')
        st.markdown('---')
        st.markdown("""
            **使用示例：**
            - 提问：项目遵循什么原则
            - 提问：
            - 提问：
        """)
        st.header("对话设置")
        temperature = st.slider("创造力", 0.0, 1.0, 0.7, step=0.1)
        # system_prompt = st.text_area("系统角色设定", "你是一个友好、专业的AI助手")
    return temperature

def get_deepseek_response(messages) -> str:
    """调用DeepSeek API获取响应"""
    response = client.chat.completions.create(
        model='deepseek-ai/DeepSeek-R1-Distill-Qwen-7B',  # ModelScope Model-Id
        messages=messages,
        stream=True
    )
    done_reasoning = False
    for chunk in response:
        reasoning_chunk = chunk.choices[0].delta.reasoning_content
        answer_chunk = chunk.choices[0].delta.content
        if reasoning_chunk != '':
            print(reasoning_chunk, end='', flush=True)
            yield 'think', reasoning_chunk
        elif answer_chunk != '':
            if not done_reasoning:
                print('\n\n === Final Answer ===\n')
                done_reasoning = True
            print(answer_chunk, end='', flush=True)



def main():
    init_session_state()
    temperature=render_sidebar()

    # 初始化问答链
    qa_chain = init_qa_chain(temperature)

    # 主界面布局
    col1, col2 = st.columns([2, 1])


    user_input = st.chat_input("请输入问题：", key="input")#没有在col里面的时候默认在最底部，在col里面的时候默认在最上面

    with col1:
        st.header("智能问答界面")
        if user_input:
            try:
                response = qa_chain.invoke({"query": user_input})
                st.session_state.past.append(user_input)
                st.session_state.generated.append(response["result"])
                print(response)
                # 显示对话历史
                for i in (range(len(st.session_state.generated))):
                    message(
                        st.session_state.past[i],
                        is_user=True,
                        key=f"{i}_user",
                        logo=CONFIG["ui_config"]["logos"]["user"]
                    )
                    highlight_think(st.session_state.generated[i])



            except Exception as e:
                st.error(f"请求处理失败：{str(e)}")

    with col2:
        st.header("参考文档")
        if "response" in locals():
            with st.container(height=800):
                for idx, doc in enumerate(response["source_documents"], 1):
                    with st.expander(f"参考文档 {idx}"):
                        st.markdown(f"```\n{doc.page_content}\n```")
                        st.markdown("---")

if __name__ == "__main__":
    main()
