indikamk commited on
Commit
a443f89
1 Parent(s): 8337646

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -2,6 +2,9 @@ import torch
2
  from peft import PeftModel, PeftConfig
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
 
 
 
 
5
  peft_model_id = f"{HUGGING_FACE_USER_NAME}/{model_name}"
6
  config = PeftConfig.from_pretrained(peft_model_id)
7
  model = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path, return_dict=True, load_in_8bit=False, device_map='auto')
@@ -10,9 +13,6 @@ tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path)
10
  # Load the Lora model
11
  model = PeftModel.from_pretrained(model, peft_model_id)
12
 
13
- HUGGING_FACE_USER_NAME = "indikamk"
14
- model_name = "BLOOMZ_finetuned_Misconceptions"
15
-
16
  def make_inference(sentence):
17
  batch = tokenizer(f"### INSTRUCTION\nBelow is a student response to a writen question about an electrical circuit. Please identify whether there is a sequential misconception. A sequential misconception in terms of electric circuits is one in which it is believed that elements that are further “downstream” from a source (such as R2 and R3 in the example circuit of Figure 1) “receive” current after elements closer to the source (R1 in the example circuit). With such a misconception, it is likely that a student will think that changes in R2 have no effect on the potential difference and current associated with R1 or Vs..\n\n### Sentence:\n{sentence}\n### Response:\n", return_tensors='pt')
18
 
 
2
  from peft import PeftModel, PeftConfig
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
 
5
+ HUGGING_FACE_USER_NAME = "indikamk"
6
+ model_name = "BLOOMZ_finetuned_Misconceptions"
7
+
8
  peft_model_id = f"{HUGGING_FACE_USER_NAME}/{model_name}"
9
  config = PeftConfig.from_pretrained(peft_model_id)
10
  model = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path, return_dict=True, load_in_8bit=False, device_map='auto')
 
13
  # Load the Lora model
14
  model = PeftModel.from_pretrained(model, peft_model_id)
15
 
 
 
 
16
  def make_inference(sentence):
17
  batch = tokenizer(f"### INSTRUCTION\nBelow is a student response to a writen question about an electrical circuit. Please identify whether there is a sequential misconception. A sequential misconception in terms of electric circuits is one in which it is believed that elements that are further “downstream” from a source (such as R2 and R3 in the example circuit of Figure 1) “receive” current after elements closer to the source (R1 in the example circuit). With such a misconception, it is likely that a student will think that changes in R2 have no effect on the potential difference and current associated with R1 or Vs..\n\n### Sentence:\n{sentence}\n### Response:\n", return_tensors='pt')
18