p-baleine commited on
Commit
ec36bd2
1 Parent(s): 0d58bdf

update app.

Browse files
Files changed (1) hide show
  1. app.py +86 -68
app.py CHANGED
@@ -1,64 +1,79 @@
 
1
  import os
2
- from typing import Optional, Tuple
3
-
4
  import gradio as gr
5
- from langchain.chains import ConversationChain
6
- from langchain.llms import OpenAI
7
- from threading import Lock
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
 
 
9
 
10
- def load_chain():
11
- """Logic for loading the chain you want to use should go here."""
12
- llm = OpenAI(temperature=0)
13
- chain = ConversationChain(llm=llm)
14
- return chain
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
 
16
 
17
  def set_openai_api_key(api_key: str):
18
- """Set the api key and return chain.
19
-
20
- If no api_key, then None is returned.
21
- """
22
- if api_key:
23
- os.environ["OPENAI_API_KEY"] = api_key
24
- chain = load_chain()
25
- os.environ["OPENAI_API_KEY"] = ""
26
- return chain
27
-
28
- class ChatWrapper:
29
-
30
- def __init__(self):
31
- self.lock = Lock()
32
- def __call__(
33
- self, api_key: str, inp: str, history: Optional[Tuple[str, str]], chain: Optional[ConversationChain]
34
- ):
35
- """Execute the chat functionality."""
36
- self.lock.acquire()
37
- try:
38
- history = history or []
39
- # If chain is None, that is because no API key was provided.
40
- if chain is None:
41
- history.append((inp, "Please paste your OpenAI key to use"))
42
- return history, history
43
- # Set OpenAI key
44
- import openai
45
- openai.api_key = api_key
46
- # Run chain and append input.
47
- output = chain.run(input=inp)
48
- history.append((inp, output))
49
- except Exception as e:
50
- raise e
51
- finally:
52
- self.lock.release()
53
- return history, history
54
-
55
- chat = ChatWrapper()
56
-
57
- block = gr.Blocks(css=".gradio-container {background-color: lightgray}")
58
 
59
  with block:
60
  with gr.Row():
61
- gr.Markdown("<h3><center>LangChain Demo</center></h3>")
 
 
 
62
 
63
  openai_api_key_textbox = gr.Textbox(
64
  placeholder="Paste your OpenAI API key (sk-...)",
@@ -66,42 +81,45 @@ with block:
66
  lines=1,
67
  type="password",
68
  )
69
-
70
- chatbot = gr.Chatbot()
 
 
 
 
71
 
72
  with gr.Row():
73
- message = gr.Textbox(
74
- label="What's your question?",
75
- placeholder="What's the answer to life, the universe, and everything?",
76
  lines=1,
77
  )
78
  submit = gr.Button(value="Send", variant="secondary").style(full_width=False)
79
 
80
  gr.Examples(
81
  examples=[
82
- "Hi! How's it going?",
83
- "What should I do tonight?",
84
- "Whats 2 + 2?",
85
  ],
86
- inputs=message,
87
  )
88
 
89
- gr.HTML("Demo application of a LangChain chain.")
 
90
 
91
  gr.HTML(
92
  "<center>Powered by <a href='https://github.com/hwchase17/langchain'>LangChain 🦜️🔗</a></center>"
93
  )
94
 
95
- state = gr.State()
96
- agent_state = gr.State()
97
-
98
- submit.click(chat, inputs=[openai_api_key_textbox, message, state, agent_state], outputs=[chatbot, state])
99
- message.submit(chat, inputs=[openai_api_key_textbox, message, state, agent_state], outputs=[chatbot, state])
100
-
101
  openai_api_key_textbox.change(
102
  set_openai_api_key,
103
  inputs=[openai_api_key_textbox],
104
- outputs=[agent_state],
105
  )
 
 
 
 
 
 
106
 
107
  block.launch(debug=True)
 
1
+ import logging
2
  import os
3
+ # from typing import Optional, Tuple
 
4
  import gradio as gr
5
+ from langchain.chat_models import ChatOpenAI
6
+
7
+ from metaanalyser.chains import SRSectionChain, SROverviewChain, SROutlintChain
8
+ from metaanalyser.paper import search_on_google_scholar, create_papers_vectorstor
9
+
10
+
11
+ logger = logging.getLogger(__name__)
12
+ logging.basicConfig()
13
+ logging.getLogger("metaanalyser").setLevel(level=logging.DEBUG)
14
+
15
+
16
+ def run(query: str):
17
+ llm = ChatOpenAI(temperature=0)
18
+ papers = search_on_google_scholar(query)
19
+ db = create_papers_vectorstor(papers)
20
+ overview_chain = SROverviewChain(llm=llm, verbose=True)
21
+ outline_chain = SROutlintChain(llm=llm, verbose=True)
22
+ section_chain = SRSectionChain(
23
+ llm=llm,
24
+ paper_store=db,
25
+ verbose=True
26
+ )
27
 
28
+ overview = overview_chain.run({"query": query, "papers": papers})
29
+ outline = outline_chain.run({"query": query, "papers": papers, "overview": overview})
30
 
31
+ sections_as_md = []
32
+
33
+ for section_idx in range(len(outline.sections)):
34
+ # TODO: 入れ子のセクションに対応する
35
+ sections_as_md.append(section_chain.run({
36
+ "section_idx": section_idx,
37
+ "section_level": 2,
38
+ "query": query,
39
+ "papers": papers,
40
+ "overview": overview,
41
+ "outline": outline
42
+ }))
43
+
44
+ sr = f"# {overview.title}\n\n{overview.overview}\n\n## Table of contents\n\n{outline}\n\n"
45
+ sr += "\n\n".join(sections_as_md)
46
+ sr += "\n\n## References\n"
47
+
48
+ papers_citation_id_map = {p.citation_id: p for p in papers}
49
+ citations = []
50
+
51
+ for citation_id in outline.citations_ids:
52
+ citation = papers_citation_id_map[int(citation_id)]
53
+ citations.append(f"[^{citation_id}]: [{citation.mla_citiation.snippet}]({citation.link})")
54
+
55
+ sr += "\n\n".join(citations)
56
+
57
+ return sr
58
 
59
 
60
  def set_openai_api_key(api_key: str):
61
+ os.environ["OPENAI_API_KEY"] = api_key
62
+
63
+
64
+ def set_serpapi_api_key(api_key: str):
65
+ os.environ["SERPAPI_API_KEY"] = api_key
66
+
67
+
68
+ # block = gr.Blocks(css=".gradio-container {background-color: lightgray}")
69
+ block = gr.Blocks()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
 
71
  with block:
72
  with gr.Row():
73
+ gr.Markdown("""
74
+ <h2><center>metaanalyser demo</center></h2>
75
+ Generate a systematic review for your query based on Google Scholar search results. It will take a few minutes to output the results.
76
+ """)
77
 
78
  openai_api_key_textbox = gr.Textbox(
79
  placeholder="Paste your OpenAI API key (sk-...)",
 
81
  lines=1,
82
  type="password",
83
  )
84
+ serpai_api_key_textbox = gr.Textbox(
85
+ placeholder="Paste your SerpApi API key",
86
+ show_label=False,
87
+ lines=1,
88
+ type="password",
89
+ )
90
 
91
  with gr.Row():
92
+ query = gr.Textbox(
93
+ label="Query",
94
+ placeholder="the query for Google Scholar",
95
  lines=1,
96
  )
97
  submit = gr.Button(value="Send", variant="secondary").style(full_width=False)
98
 
99
  gr.Examples(
100
  examples=[
101
+ "llm agent OR llm tool integration",
 
 
102
  ],
103
+ inputs=query,
104
  )
105
 
106
+ with gr.Row():
107
+ output = gr.Markdown("It will take a few minutes to output the results...")
108
 
109
  gr.HTML(
110
  "<center>Powered by <a href='https://github.com/hwchase17/langchain'>LangChain 🦜️🔗</a></center>"
111
  )
112
 
113
+ submit.click(fn=run, inputs=query, outputs=output)
 
 
 
 
 
114
  openai_api_key_textbox.change(
115
  set_openai_api_key,
116
  inputs=[openai_api_key_textbox],
 
117
  )
118
+ serpai_api_key_textbox.change(
119
+ set_serpapi_api_key,
120
+ inputs=[serpai_api_key_textbox],
121
+ )
122
+
123
+
124
 
125
  block.launch(debug=True)