Update README.md
Browse files
README.md
CHANGED
@@ -43,7 +43,7 @@ model_id = "cabelo/Tucano-2b4-Instruct-fp16-ov"
|
|
43 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
44 |
model = OVModelForCausalLM.from_pretrained(model_id)
|
45 |
|
46 |
-
inputs = tokenizer("
|
47 |
|
48 |
outputs = model.generate(**inputs, max_length=200)
|
49 |
text = tokenizer.batch_decode(outputs)[0]
|
@@ -78,7 +78,7 @@ import openvino_genai as ov_genai
|
|
78 |
|
79 |
device = "CPU"
|
80 |
pipe = ov_genai.LLMPipeline(model_path, device)
|
81 |
-
print(pipe.generate("
|
82 |
```
|
83 |
|
84 |
More GenAI usage examples can be found in OpenVINO GenAI library [docs](https://github.com/openvinotoolkit/openvino.genai/blob/master/src/README.md) and [samples](https://github.com/openvinotoolkit/openvino.genai?tab=readme-ov-file#openvino-genai-samples)
|
|
|
43 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
44 |
model = OVModelForCausalLM.from_pretrained(model_id)
|
45 |
|
46 |
+
inputs = tokenizer("O que é Carnaval?", return_tensors="pt")
|
47 |
|
48 |
outputs = model.generate(**inputs, max_length=200)
|
49 |
text = tokenizer.batch_decode(outputs)[0]
|
|
|
78 |
|
79 |
device = "CPU"
|
80 |
pipe = ov_genai.LLMPipeline(model_path, device)
|
81 |
+
print(pipe.generate("O que é OpenVINO?", max_length=200))
|
82 |
```
|
83 |
|
84 |
More GenAI usage examples can be found in OpenVINO GenAI library [docs](https://github.com/openvinotoolkit/openvino.genai/blob/master/src/README.md) and [samples](https://github.com/openvinotoolkit/openvino.genai?tab=readme-ov-file#openvino-genai-samples)
|