mayank-mishra commited on
Commit
cad1487
1 Parent(s): 3dda1c0

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -235,7 +235,7 @@ This is a simple example of how to use **Granite-3B-Code-Instruct** model.
235
  import torch
236
  from transformers import AutoModelForCausalLM, AutoTokenizer
237
  device = "cuda" # or "cpu"
238
- model_path = "granite-8b-code-instruct"
239
  tokenizer = AutoTokenizer.from_pretrained(model_path)
240
  # drop device_map if running on CPU
241
  model = AutoModelForCausalLM.from_pretrained(model_path, device_map=device)
 
235
  import torch
236
  from transformers import AutoModelForCausalLM, AutoTokenizer
237
  device = "cuda" # or "cpu"
238
+ model_path = "ibm-granite/granite-3b-code-instruct"
239
  tokenizer = AutoTokenizer.from_pretrained(model_path)
240
  # drop device_map if running on CPU
241
  model = AutoModelForCausalLM.from_pretrained(model_path, device_map=device)