ishaan-mital commited on
Commit
fbfaaa6
·
1 Parent(s): 166c1e2
Files changed (1) hide show
  1. app.py +12 -12
app.py CHANGED
@@ -65,17 +65,17 @@ def main(question):
65
  print(question)
66
  context = retrieval.predict(question)
67
  # try:
68
- # print(context)
69
- # answer = llm.predict(
70
- # f'Question: {question} and context: {context}',
71
- # "NCERT Helper!!", # str in 'System prompt' Textbox component
72
- # 2048, # float (numeric value between 1 and 2048) in 'Max new tokens' Slider component
73
- # 0.1, # float (numeric value between 0.1 and 4.0) in 'Temperature' Slider component
74
- # 0.05, # float (numeric value between 0.05 and 1.0) in 'Top-p (nucleus sampling)' Slider component
75
- # 1, # float (numeric value between 1 and 1000) in 'Top-k' Slider component
76
- # 1, # float (numeric value between 1.0 and 2.0) in 'Repetition penalty' Slider component
77
- # api_name="/chat"
78
- # )
79
  # except:
80
  # answer = llm.predict(
81
  # f'Question: {question}',
@@ -87,7 +87,7 @@ def main(question):
87
  # 1, # float (numeric value between 1.0 and 2.0) in 'Repetition penalty' Slider component
88
  # api_name="/chat"
89
  # )
90
- answer = query({"inputs": {"question": question, "context": context}})[0]["answer"]
91
  return answer
92
 
93
  demo = gr.Interface(main, inputs = "text", outputs = "text")
 
65
  print(question)
66
  context = retrieval.predict(question)
67
  # try:
68
+ print(context)
69
+ answer = llm.predict(
70
+ f'Question: {question} and context: {context}',
71
+ "NCERT Helper!!", # str in 'System prompt' Textbox component
72
+ 2048, # float (numeric value between 1 and 2048) in 'Max new tokens' Slider component
73
+ 0.1, # float (numeric value between 0.1 and 4.0) in 'Temperature' Slider component
74
+ 0.05, # float (numeric value between 0.05 and 1.0) in 'Top-p (nucleus sampling)' Slider component
75
+ 1, # float (numeric value between 1 and 1000) in 'Top-k' Slider component
76
+ 1, # float (numeric value between 1.0 and 2.0) in 'Repetition penalty' Slider component
77
+ api_name="/chat"
78
+ )
79
  # except:
80
  # answer = llm.predict(
81
  # f'Question: {question}',
 
87
  # 1, # float (numeric value between 1.0 and 2.0) in 'Repetition penalty' Slider component
88
  # api_name="/chat"
89
  # )
90
+ # answer = query({"inputs": {"question": question, "context": context}})
91
  return answer
92
 
93
  demo = gr.Interface(main, inputs = "text", outputs = "text")