Update README.md
Browse files
README.md
CHANGED
@@ -17,6 +17,7 @@ tags:
|
|
17 |
- Llama 2
|
18 |
- Q&A
|
19 |
library_name: peft
|
|
|
20 |
---
|
21 |
|
22 |
# BODE
|
@@ -50,7 +51,7 @@ Você pode usar o Bode facilmente com a biblioteca Transformers do HuggingFace.
|
|
50 |
from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
|
51 |
from peft import PeftModel, PeftConfig
|
52 |
|
53 |
-
llm_model = 'recogna-nlp/bode-
|
54 |
config = PeftConfig.from_pretrained(llm_model)
|
55 |
model = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path, trust_remote_code=True, return_dict=True, load_in_8bit=True, device_map='auto')
|
56 |
tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path)
|
|
|
17 |
- Llama 2
|
18 |
- Q&A
|
19 |
library_name: peft
|
20 |
+
inference: false
|
21 |
---
|
22 |
|
23 |
# BODE
|
|
|
51 |
from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
|
52 |
from peft import PeftModel, PeftConfig
|
53 |
|
54 |
+
llm_model = 'recogna-nlp/bode-13b-alpaca-pt-br'
|
55 |
config = PeftConfig.from_pretrained(llm_model)
|
56 |
model = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path, trust_remote_code=True, return_dict=True, load_in_8bit=True, device_map='auto')
|
57 |
tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path)
|