Update README.md
Browse files
README.md
CHANGED
@@ -29,7 +29,7 @@ model = AutoModelForCausalLM.from_pretrained("metagene-ai/METAGENE-1", torch_dty
|
|
29 |
# Example input sequence
|
30 |
input_sequence = "TCACCGTTCTACAATCCCAAGCTGGAGTCAAGCTCAACAGGGTCTTC"
|
31 |
|
32 |
-
# Tokenize the input sequence
|
33 |
input_tokens = tokenizer.encode(input_sequence, return_tensors="pt", add_special_tokens=False)
|
34 |
|
35 |
# Generate output from the model with a max sequence length of 32 tokens
|
|
|
29 |
# Example input sequence
|
30 |
input_sequence = "TCACCGTTCTACAATCCCAAGCTGGAGTCAAGCTCAACAGGGTCTTC"
|
31 |
|
32 |
+
# Tokenize the input sequence
|
33 |
input_tokens = tokenizer.encode(input_sequence, return_tensors="pt", add_special_tokens=False)
|
34 |
|
35 |
# Generate output from the model with a max sequence length of 32 tokens
|