notSoNLPnerd commited on
Commit
3842297
1 Parent(s): c24940a

final tiny changes

Browse files
Files changed (1) hide show
  1. app.py +14 -6
app.py CHANGED
@@ -55,16 +55,19 @@ st.markdown("<h5> Answer with GPT's Internal Knowledge </h5>", unsafe_allow_html
55
  placeholder_plain_gpt = st.empty()
56
  st.text(" ")
57
  st.text(" ")
58
- st.markdown(f"<h5> Answer with {st.session_state['query_type']} </h5>", unsafe_allow_html=True)
 
 
 
59
  placeholder_retrieval_augmented = st.empty()
60
 
61
  if st.session_state.get('query') and run_pressed:
62
- input = st.session_state['query']
63
  with st.spinner('Loading pipelines... \n This may take a few mins and might also fail if OpenAI API server is down.'):
64
  p1 = get_plain_pipeline()
65
  with st.spinner('Fetching answers from GPT\'s internal knowledge... '
66
  '\n This may take a few mins and might also fail if OpenAI API server is down.'):
67
- answers = p1.run(input)
68
  placeholder_plain_gpt.markdown(answers['results'][0])
69
 
70
  if st.session_state.get("query_type", "Retrieval Augmented") == "Retrieval Augmented":
@@ -74,8 +77,13 @@ if st.session_state.get('query') and run_pressed:
74
  p2 = get_retrieval_augmented_pipeline()
75
  with st.spinner('Fetching relevant documents from documented stores and calculating answers... '
76
  '\n This may take a few mins and might also fail if OpenAI API server is down.'):
77
- answers_2 = p2.run(input)
78
  else:
79
- p3 = get_web_retrieval_augmented_pipeline()
80
- answers_2 = p3.run(input)
 
 
 
 
 
81
  placeholder_retrieval_augmented.markdown(answers_2['results'][0])
 
55
  placeholder_plain_gpt = st.empty()
56
  st.text(" ")
57
  st.text(" ")
58
+ if st.session_state.get("query_type", "Retrieval Augmented (Static news dataset)") == "Retrieval Augmented (Static news dataset)":
59
+ st.markdown("<h5> Answer with Retrieval Augmented GPT (Static news dataset) </h5>", unsafe_allow_html=True)
60
+ else:
61
+ st.markdown("<h5> Answer with Retrieval Augmented GPT (Web Search) </h5>", unsafe_allow_html=True)
62
  placeholder_retrieval_augmented = st.empty()
63
 
64
  if st.session_state.get('query') and run_pressed:
65
+ ip = st.session_state['query']
66
  with st.spinner('Loading pipelines... \n This may take a few mins and might also fail if OpenAI API server is down.'):
67
  p1 = get_plain_pipeline()
68
  with st.spinner('Fetching answers from GPT\'s internal knowledge... '
69
  '\n This may take a few mins and might also fail if OpenAI API server is down.'):
70
+ answers = p1.run(ip)
71
  placeholder_plain_gpt.markdown(answers['results'][0])
72
 
73
  if st.session_state.get("query_type", "Retrieval Augmented") == "Retrieval Augmented":
 
77
  p2 = get_retrieval_augmented_pipeline()
78
  with st.spinner('Fetching relevant documents from documented stores and calculating answers... '
79
  '\n This may take a few mins and might also fail if OpenAI API server is down.'):
80
+ answers_2 = p2.run(ip)
81
  else:
82
+ with st.spinner(
83
+ 'Loading Retrieval Augmented pipeline... \
84
+ n This may take a few mins and might also fail if OpenAI API server is down.'):
85
+ p3 = get_web_retrieval_augmented_pipeline()
86
+ with st.spinner('Fetching relevant documents from the Web and calculating answers... '
87
+ '\n This may take a few mins and might also fail if OpenAI API server is down.'):
88
+ answers_2 = p3.run(ip)
89
  placeholder_retrieval_augmented.markdown(answers_2['results'][0])