segestic commited on
Commit
3ae7ea1
1 Parent(s): d55797b

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +8 -5
README.md CHANGED
@@ -18,11 +18,11 @@ tokenizer = AutoTokenizer.from_pretrained("segestic/Tinystories-gpt-0.1-3m")
18
  model = AutoModelForCausalLM.from_pretrained("segestic/Tinystories-gpt-0.1-3m")
19
  prompt = "Once upon a time there was"
20
  input_ids = tokenizer.encode(prompt, return_tensors="pt")
21
- ### Generate completion
22
  output = model.generate(input_ids, max_length = 1000, num_beams=1)
23
- ### Decode the completion
24
  output_text = tokenizer.decode(output[0], skip_special_tokens=True)
25
- ### Print the generated text
26
  print(output_text)
27
 
28
 
@@ -30,11 +30,14 @@ print(output_text)
30
  # ------ EXAMPLE USAGE 2 ------
31
  ## Use a pipeline as a high-level helper
32
  from transformers import pipeline
33
-
34
  pipe = pipeline("text-generation", model="segestic/Tinystories-gpt-0.1-3m")
 
35
  prompt = "where is the little girl"
 
36
  output = pipe(prompt, max_length=1000, num_beams=1)
 
37
  generated_text = output[0]['generated_text']
38
- ### Print the generated text
39
  print(generated_text)
40
 
 
18
  model = AutoModelForCausalLM.from_pretrained("segestic/Tinystories-gpt-0.1-3m")
19
  prompt = "Once upon a time there was"
20
  input_ids = tokenizer.encode(prompt, return_tensors="pt")
21
+ #### Generate completion
22
  output = model.generate(input_ids, max_length = 1000, num_beams=1)
23
+ #### Decode the completion
24
  output_text = tokenizer.decode(output[0], skip_special_tokens=True)
25
+ #### Print the generated text
26
  print(output_text)
27
 
28
 
 
30
  # ------ EXAMPLE USAGE 2 ------
31
  ## Use a pipeline as a high-level helper
32
  from transformers import pipeline
33
+ #### pipeline
34
  pipe = pipeline("text-generation", model="segestic/Tinystories-gpt-0.1-3m")
35
+ #### prompt
36
  prompt = "where is the little girl"
37
+ #### generate completion
38
  output = pipe(prompt, max_length=1000, num_beams=1)
39
+ #### decode the completion
40
  generated_text = output[0]['generated_text']
41
+ #### Print the generated text
42
  print(generated_text)
43