oliu-io commited on
Commit
3e5eb13
verified
1 Parent(s): cf670b6

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -29,7 +29,7 @@ model = AutoModelForCausalLM.from_pretrained("metagene-ai/METAGENE-1", torch_dty
29
  # Example input sequence
30
  input_sequence = "TCACCGTTCTACAATCCCAAGCTGGAGTCAAGCTCAACAGGGTCTTC"
31
 
32
- # Tokenize the input sequence and truncate to the first 12 tokens
33
  input_tokens = tokenizer.encode(input_sequence, return_tensors="pt", add_special_tokens=False)
34
 
35
  # Generate output from the model with a max sequence length of 32 tokens
 
29
  # Example input sequence
30
  input_sequence = "TCACCGTTCTACAATCCCAAGCTGGAGTCAAGCTCAACAGGGTCTTC"
31
 
32
+ # Tokenize the input sequence
33
  input_tokens = tokenizer.encode(input_sequence, return_tensors="pt", add_special_tokens=False)
34
 
35
  # Generate output from the model with a max sequence length of 32 tokens