Spaces:
Sleeping
Sleeping
import gradio as gr | |
from langchain_openai.embeddings import OpenAIEmbeddings | |
from langchain.agents import load_tools | |
import time | |
from time import sleep | |
from css import * | |
import replicate | |
from pre import * | |
from search import * | |
from post import * | |
os.environ["REPLICATE_API_TOKEN"] = "r8_IYJpjwjrxegcUfBeBbyUxErJXXsnHDM4AlSQQ" | |
os.environ["OPENAI_API_KEY"] = "sb-6a683cb3bd63a9b72040aa2dd08feff8b68f08a0e1d959f5" | |
os.environ['OPENAI_BASE_URL'] = "https://api.openai-sb.com/v1/" | |
os.environ["SERPAPI_API_KEY"] = "dcc98b22d5f7d413979a175ff7d75b721c5992a3ee1e2363020b2bbdf4f82404" | |
embedding_model = OpenAIEmbeddings() | |
tools_google = load_tools(["serpapi"]) | |
llm=ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0.6) | |
flag="None" | |
selected_model = None | |
def process_option(option): | |
global selected_model, llm, flag # 使用全局变量 | |
llm= ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0.6) | |
if option in ["gpt-3.5-turbo-0125", "gpt-4o", "gpt-4o-mini","None"]: | |
selected_model = option | |
llm = ChatOpenAI(model=selected_model, temperature=0.6) | |
elif option=="llama-3-70b": | |
flag="llama-3-70b" | |
elif option == "llama-3-8b": | |
flag = "llama-3-8b" | |
elif option=="mistral": | |
flag = "mistral" | |
return selected_model | |
options = ["gpt-3.5-turbo-0125", "gpt-4o", "gpt-4o-mini", "llama-3-70b","llama-3-8b","mistral","None"] | |
selected_search = None | |
def search_option(option): | |
global selected_search | |
if option in ["bing_news","baidu_news","google_news"]: | |
selected_search=option | |
search_options=["bing_news","baidu_news","google_news"] | |
def update_ans_box_label(selected_model): | |
return gr.update(label=selected_model) | |
origin_prompt = ChatPromptTemplate.from_template(template) | |
def detect_fake_news(news,context,progress=gr.Progress()): | |
progress(0, desc="初始化") | |
sleep(1) # Simulate some processing time | |
chain = origin_prompt | llm | StrOutputParser() | |
ori_result=chain.invoke(news) | |
progress(0.25, desc="分割子句") | |
sleep(1) | |
cap_result=capture_sub_queries(news) | |
progress(0.50, desc="提取相关信息") | |
sleep(1) | |
information=context+baidu(cap_result) | |
result = detect_chain.invoke({"news": news, "text": information}) | |
result = extract_clauses_with_counts(result) | |
if (result.count.get("不确定的子句个数")): | |
if selected_search == "bing_news": | |
search_text=search_bing(news) | |
elif selected_search=="baidu_news": | |
search_text=search_baidu(news) | |
elif selected_search=="google_news": | |
search_text=search_google(news) | |
information = search_text + information | |
progress(0.75, desc="Running main chain...") | |
sleep(1) | |
if flag=="None": | |
rag_chain = rag_prompt | llm | StrOutputParser() | |
rag_result = rag_chain.invoke({"news": news,"information":information}) | |
else: | |
input = { | |
"top_p": 0.9, | |
"prompt": rag_prompt.format( | |
news=news, | |
information=information | |
), | |
"min_tokens": 0, | |
"temperature": 0.6, | |
"prompt_template": "system\n\nYou are a helpful assistantuser\n\n{prompt}assistant\n\n", | |
"presence_penalty": 1.15 | |
} | |
if flag == "llama-3-70b": | |
output = replicate.run("meta/meta-llama-3-70b-instruct", input=input) | |
elif flag == "llama-3-8b": | |
output = replicate.run("meta/meta-llama-3-8b-instruct", input=input) | |
elif flag == "mistral": | |
output = replicate.run("mistralai/mixtral-8x7b-instruct-v0.1", input=input) | |
rag_result = "".join(output) | |
progress(1, desc="完成") | |
rag_result=json.loads(rag_result) | |
rag_result="这是"+rag_result['label']+","+rag_result['explanation'] | |
return cap_result,information, ori_result, rag_result | |
def toggle_visibility(is_visible): | |
# 切换文本框的可见性 | |
new_visibility = not is_visible | |
return gr.update(visible=new_visibility),gr.update(visible=new_visibility), new_visibility | |
current_css = css1 | |
def toggle_style(): | |
global current_css | |
# 切换CSS样式 | |
if current_css == css1: | |
current_css = css2 | |
else: | |
current_css = css1 | |
# 返回更新后的HTML内容 | |
return f'<style id="dynamic-css">{current_css}</style>' | |
with gr.Blocks(css=current_css) as iface: | |
gr.Markdown("<h1 style='font-size: 36px; text-align: center; color: #333333; margin-bottom: 20px;'>虚假信息检测</h1>") | |
is_visible = gr.State(False) | |
html_output = gr.HTML(f'<style id="dynamic-css">{current_css}</style>') | |
with gr.Row(): | |
with gr.Column(scale=2): | |
chatbot=gr.Chatbot(elem_classes="gradio-output") | |
context_box = gr.Textbox(label="上下文", elem_classes="gradio-input",placeholder="请输入与该新闻有关的上下文", lines=2) | |
input_box = gr.Textbox(label="新闻", elem_classes="gradio-input",placeholder="请输入新闻", lines=3) | |
with gr.Column(scale=1): | |
option_select = gr.Dropdown(choices=options, label="选择模型",elem_classes="gradio-output") | |
output = gr.Textbox(label="选择模型",visible=False) | |
search_select=gr.Dropdown(choices=search_options,label="选择搜索api",elem_classes="gradio-output") | |
ans_box = gr.Textbox(label="gpt-3.5-turbo-0125",lines=5,elem_classes="gradio-output") | |
rag_box = gr.Textbox(label="有RAG增强",visible=False) | |
# trans_box=gr.Textbox(label="transform",visible=False) | |
capture_box=gr.Textbox(label="子句划分结果",visible=False,lines=3) | |
information_box=gr.Textbox(label="提取到的信息",visible=False,lines=3) | |
css_btn = gr.Button("白天(夜晚)模式切换", elem_classes="gradio-button") | |
toggle_btn = gr.Button("显示(隐藏)文本框",elem_classes="gradio-button") | |
clear = gr.Button("清空页面",elem_classes="gradio-button") | |
submit_btn = gr.Button("提交",elem_classes="gradio-button") | |
option_select.change(process_option, option_select, output).then( | |
update_ans_box_label, inputs=output, outputs=ans_box | |
) | |
search_select.change(search_option,search_select) | |
toggle_btn.click(toggle_visibility, [is_visible], [capture_box,information_box, is_visible]) | |
css_btn.click(toggle_style, inputs=None, outputs=html_output) | |
def user(user_input, user_context, history): | |
if history is None: | |
history = [] | |
return "", "", history + [[user_input, None]] | |
def bot(history,rag_box): | |
if history is None or len(history) == 0: | |
return | |
bot_rag = str(rag_box) | |
history[-1][1] = "" | |
for character in bot_rag: | |
history[-1][1] += character | |
time.sleep(0.01) | |
yield history | |
submit_btn.click(detect_fake_news,[input_box,context_box],[capture_box,information_box,ans_box,rag_box]).then( | |
user,[input_box,context_box,chatbot],[input_box,context_box,chatbot] | |
).then( | |
bot,[chatbot,rag_box],chatbot | |
) | |
clear.click(lambda: (None, None, []), inputs=None, outputs=[chatbot, ans_box, rag_box], queue=False) | |
if __name__ == "__main__": | |
iface.queue() | |
app, local_url, share_url = iface.launch() | |
# print(app, local_url, share_url) |