Divyasreepat commited on
Commit
1b75265
1 Parent(s): b4ebc62

Update README.md with new model card content

Browse files
Files changed (1) hide show
  1. README.md +6 -6
README.md CHANGED
@@ -55,7 +55,7 @@ import numpy as np
55
 
56
  Use `generate()` to do text generation.
57
  ```python
58
- mistral_lm = keras_hub.models.MistralCausalLM.from_preset("mistral_7b_en", dtype="bfloat16")
59
  mistral_lm.generate("[INST] What is Keras? [/INST]", max_length=500)
60
 
61
  # Generate with batched prompts.
@@ -64,7 +64,7 @@ mistral_lm.generate(["[INST] What is Keras? [/INST]", "[INST] Give me your best
64
 
65
  Compile the `generate()` function with a custom sampler.
66
  ```python
67
- mistral_lm = keras_hub.models.MistralCausalLM.from_preset("mistral_7b_en", dtype="bfloat16")
68
  mistral_lm.compile(sampler="greedy")
69
  mistral_lm.generate("I want to say", max_length=30)
70
 
@@ -92,7 +92,7 @@ mistral_lm.generate(prompt)
92
  Call `fit()` on a single batch.
93
  ```python
94
  features = ["The quick brown fox jumped.", "I forgot my homework."]
95
- mistral_lm = keras_hub.models.MistralCausalLM.from_preset("mistral_7b_en", dtype="bfloat16")
96
  mistral_lm.fit(x=features, batch_size=2)
97
  ```
98
 
@@ -123,7 +123,7 @@ import numpy as np
123
 
124
  Use `generate()` to do text generation.
125
  ```python
126
- mistral_lm = keras_hub.models.MistralCausalLM.from_preset("hf://keras/mistral_7b_en", dtype="bfloat16")
127
  mistral_lm.generate("[INST] What is Keras? [/INST]", max_length=500)
128
 
129
  # Generate with batched prompts.
@@ -132,7 +132,7 @@ mistral_lm.generate(["[INST] What is Keras? [/INST]", "[INST] Give me your best
132
 
133
  Compile the `generate()` function with a custom sampler.
134
  ```python
135
- mistral_lm = keras_hub.models.MistralCausalLM.from_preset("hf://keras/mistral_7b_en", dtype="bfloat16")
136
  mistral_lm.compile(sampler="greedy")
137
  mistral_lm.generate("I want to say", max_length=30)
138
 
@@ -160,7 +160,7 @@ mistral_lm.generate(prompt)
160
  Call `fit()` on a single batch.
161
  ```python
162
  features = ["The quick brown fox jumped.", "I forgot my homework."]
163
- mistral_lm = keras_hub.models.MistralCausalLM.from_preset("hf://keras/mistral_7b_en", dtype="bfloat16")
164
  mistral_lm.fit(x=features, batch_size=2)
165
  ```
166
 
 
55
 
56
  Use `generate()` to do text generation.
57
  ```python
58
+ mistral_lm = keras_hub.models.MistralCausalLM.from_preset("mistral_7b_en")
59
  mistral_lm.generate("[INST] What is Keras? [/INST]", max_length=500)
60
 
61
  # Generate with batched prompts.
 
64
 
65
  Compile the `generate()` function with a custom sampler.
66
  ```python
67
+ mistral_lm = keras_hub.models.MistralCausalLM.from_preset("mistral_7b_en")
68
  mistral_lm.compile(sampler="greedy")
69
  mistral_lm.generate("I want to say", max_length=30)
70
 
 
92
  Call `fit()` on a single batch.
93
  ```python
94
  features = ["The quick brown fox jumped.", "I forgot my homework."]
95
+ mistral_lm = keras_hub.models.MistralCausalLM.from_preset("mistral_7b_en")
96
  mistral_lm.fit(x=features, batch_size=2)
97
  ```
98
 
 
123
 
124
  Use `generate()` to do text generation.
125
  ```python
126
+ mistral_lm = keras_hub.models.MistralCausalLM.from_preset("hf://keras/mistral_7b_en")
127
  mistral_lm.generate("[INST] What is Keras? [/INST]", max_length=500)
128
 
129
  # Generate with batched prompts.
 
132
 
133
  Compile the `generate()` function with a custom sampler.
134
  ```python
135
+ mistral_lm = keras_hub.models.MistralCausalLM.from_preset("hf://keras/mistral_7b_en")
136
  mistral_lm.compile(sampler="greedy")
137
  mistral_lm.generate("I want to say", max_length=30)
138
 
 
160
  Call `fit()` on a single batch.
161
  ```python
162
  features = ["The quick brown fox jumped.", "I forgot my homework."]
163
+ mistral_lm = keras_hub.models.MistralCausalLM.from_preset("hf://keras/mistral_7b_en")
164
  mistral_lm.fit(x=features, batch_size=2)
165
  ```
166