RohiniPS commited on
Commit
bf66f9c
·
verified ·
1 Parent(s): 47b5046

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -15
app.py CHANGED
@@ -18,22 +18,21 @@ def ask_question_HFModel1(question, modelHF, tokenizerHF):
18
  return question, answer
19
 
20
  def FetchQwenFromSpace(sQ):
21
- '''
22
- HFModelName ="Qwen/Qwen1.5-0.5B"
23
- tokenizer = Qwen2Tokenizer.from_pretrained(HFModelName) #, trust_remote_code=bTrust_remote_code, model_max_length=8192) #model_max_length = 8192 #, use_fast=False #Qwen
24
- model = Qwen2ForCausalLM.from_pretrained(HFModelName)
25
- modelHF = model
26
- tokenizerHF = tokenizer
27
- '''
28
- #Using pre-trained model from HF
29
- #from transformers import AutoModelCasualLM
30
- sHuggingFacePath = "RohiniPS/Qwen1B-QnA-3-5" #"RohiniPS/Qwen1B-QnA-3-5"
31
- modelHF = Qwen2ForCausalLM.from_pretrained(sHuggingFacePath) #("your_username/my-awesome-model")
32
- tokenizerHF = Qwen2Tokenizer.from_pretrained(sHuggingFacePath)
33
- ###
34
 
35
- sQuestion, sAnswer = ask_question_HFModel1(sQ, modelHF, tokenizerHF)
36
- return sQuestion, sAnswer
37
 
38
 
39
  def GetAnswer(Question):
 
18
  return question, answer
19
 
20
  def FetchQwenFromSpace(sQ):
21
+ HFModelName ="Qwen/Qwen1.5-0.5B"
22
+ tokenizer = Qwen2Tokenizer.from_pretrained(HFModelName) #, trust_remote_code=bTrust_remote_code, model_max_length=8192) #model_max_length = 8192 #, use_fast=False #Qwen
23
+ model = Qwen2ForCausalLM.from_pretrained(HFModelName)
24
+ modelHF = model
25
+ tokenizerHF = tokenizer
26
+ '''
27
+ #Using pre-trained model from HF
28
+ #from transformers import AutoModelCasualLM
29
+ sHuggingFacePath = "RohiniPS/Qwen1B-QnA-3-5" #"RohiniPS/Qwen1B-QnA-3-5"
30
+ modelHF = Qwen2ForCausalLM.from_pretrained(sHuggingFacePath) #("your_username/my-awesome-model")
31
+ tokenizerHF = Qwen2Tokenizer.from_pretrained(sHuggingFacePath)
32
+ ###'''
 
33
 
34
+ sQuestion, sAnswer = ask_question_HFModel1(sQ, modelHF, tokenizerHF)
35
+ return sQuestion, sAnswer
36
 
37
 
38
  def GetAnswer(Question):