melaseddik commited on
Commit
458d65f
1 Parent(s): eb65128

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +5 -5
README.md CHANGED
@@ -25,7 +25,7 @@ tags:
25
  - **Model type:** Causal decoder-only
26
  - **Architecture:** Transformer-base
27
  - **Language(s) (NLP):** Mainly English
28
- - **License:** TII Falcon-Mamba License 2.0
29
 
30
  <br>
31
 
@@ -64,8 +64,8 @@ print(tokenizer.decode(outputs[0]))
64
  # pip install accelerate
65
  from transformers import AutoTokenizer, AutoModelForCausalLM
66
 
67
- tokenizer = AutoTokenizer.from_pretrained("tiiuae/Falcon3-7B-Base")
68
- model = AutoModelForCausalLM.from_pretrained("tiiuae/Falcon3-7B-Base", device_map="auto")
69
 
70
  input_text = "Question: How many hours in one day? Answer: "
71
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to("cuda")
@@ -85,8 +85,8 @@ print(tokenizer.decode(outputs[0]))
85
  import torch
86
  from transformers import AutoTokenizer, AutoModelForCausalLM
87
 
88
- tokenizer = AutoTokenizer.from_pretrained("tiiuae/Falcon3-7B-Base")
89
- model = AutoModelForCausalLM.from_pretrained("tiiuae/Falcon3-7B-Base", torch_dtype=torch.bfloat16).to(0)
90
 
91
  model = torch.compile(model)
92
 
 
25
  - **Model type:** Causal decoder-only
26
  - **Architecture:** Transformer-base
27
  - **Language(s) (NLP):** Mainly English
28
+ - **License:** TII Falcon-LLM License 2.0
29
 
30
  <br>
31
 
 
64
  # pip install accelerate
65
  from transformers import AutoTokenizer, AutoModelForCausalLM
66
 
67
+ tokenizer = AutoTokenizer.from_pretrained("tiiuae/Falcon3-10B-Base")
68
+ model = AutoModelForCausalLM.from_pretrained("tiiuae/Falcon3-10B-Base", device_map="auto")
69
 
70
  input_text = "Question: How many hours in one day? Answer: "
71
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to("cuda")
 
85
  import torch
86
  from transformers import AutoTokenizer, AutoModelForCausalLM
87
 
88
+ tokenizer = AutoTokenizer.from_pretrained("tiiuae/Falcon3-10B-Base")
89
+ model = AutoModelForCausalLM.from_pretrained("tiiuae/Falcon3-10B-Base", torch_dtype=torch.bfloat16).to(0)
90
 
91
  model = torch.compile(model)
92