dh-mc commited on
Commit
ab49330
1 Parent(s): a08dbb5

fixed bug in test.py

Browse files
Files changed (3) hide show
  1. .env.example +1 -1
  2. data/questions.txt +4 -3
  3. test.py +1 -1
.env.example CHANGED
@@ -19,7 +19,7 @@ HF_PIPELINE_DEVICE_TYPE=
19
  # LOAD_QUANTIZED_MODEL=4bit
20
  # LOAD_QUANTIZED_MODEL=8bit
21
 
22
- DISABLE_MODEL_PRELOADING=false
23
  CHAT_HISTORY_ENABLED=true
24
  SHOW_PARAM_SETTINGS=false
25
  SHARE_GRADIO_APP=false
 
19
  # LOAD_QUANTIZED_MODEL=4bit
20
  # LOAD_QUANTIZED_MODEL=8bit
21
 
22
+ DISABLE_MODEL_PRELOADING=true
23
  CHAT_HISTORY_ENABLED=true
24
  SHOW_PARAM_SETTINGS=false
25
  SHARE_GRADIO_APP=false
data/questions.txt CHANGED
@@ -1,3 +1,4 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5f61185685e79b9b115f1b3d34c6bca2913174a18c014b210e749e419beb2211
3
- size 60
 
 
1
+ What's AI?
2
+ life in AI era
3
+ machine learning
4
+ generative model
test.py CHANGED
@@ -128,7 +128,7 @@ while True:
128
  custom_handler.reset()
129
 
130
  start = timer()
131
- result = qa_chain.call({"question": query, "chat_history": chat_history})
132
  end = timer()
133
  print(f"Completed in {end - start:.3f}s")
134
 
 
128
  custom_handler.reset()
129
 
130
  start = timer()
131
+ result = qa_chain.call({"question": query, "chat_history": chat_history}, None)
132
  end = timer()
133
  print(f"Completed in {end - start:.3f}s")
134