hagifly commited on
Commit
a6e0d47
1 Parent(s): 49d1738

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +4 -7
README.md CHANGED
@@ -29,8 +29,7 @@ import json
29
 
30
 
31
  HF_TOKEN = "Hugging Face Token"
32
- model_id = "llm-jp/llm-jp-3-13b"
33
- adapter_id = ""
34
 
35
  bnb_config = BitsAndBytesConfig(
36
  load_in_4bit=True,
@@ -39,15 +38,13 @@ bnb_config = BitsAndBytesConfig(
39
  )
40
 
41
  model = AutoModelForCausalLM.from_pretrained(
42
- model_id,
43
  quantization_config=bnb_config,
44
  device_map="auto",
45
  token = HF_TOKEN
46
  )
47
 
48
- tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True, token = HF_TOKEN)
49
-
50
- model = PeftModel.from_pretrained(model, adapter_id, token = HF_TOKEN)
51
 
52
  datasets = []
53
  with open("./elyza-tasks-100-TV_0.jsonl", "r") as f:
@@ -85,7 +82,7 @@ for data in tqdm(datasets):
85
  results.append({"task_id": data["task_id"], "input": input, "output": output})
86
 
87
  import re
88
- jsonl_id = re.sub(".*/", "", adapter_id)
89
  with open(f"./{jsonl_id}-outputs.jsonl", 'w', encoding='utf-8') as f:
90
  for result in results:
91
  json.dump(result, f, ensure_ascii=False) # ensure_ascii=False for handling non-ASCII characters
 
29
 
30
 
31
  HF_TOKEN = "Hugging Face Token"
32
+ model_name = "hagifly/llm-jp-3-13b-finetune"
 
33
 
34
  bnb_config = BitsAndBytesConfig(
35
  load_in_4bit=True,
 
38
  )
39
 
40
  model = AutoModelForCausalLM.from_pretrained(
41
+ model_name,
42
  quantization_config=bnb_config,
43
  device_map="auto",
44
  token = HF_TOKEN
45
  )
46
 
47
+ tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True, token = HF_TOKEN)
 
 
48
 
49
  datasets = []
50
  with open("./elyza-tasks-100-TV_0.jsonl", "r") as f:
 
82
  results.append({"task_id": data["task_id"], "input": input, "output": output})
83
 
84
  import re
85
+ jsonl_id = re.sub(".*/", "", model_name)
86
  with open(f"./{jsonl_id}-outputs.jsonl", 'w', encoding='utf-8') as f:
87
  for result in results:
88
  json.dump(result, f, ensure_ascii=False) # ensure_ascii=False for handling non-ASCII characters