HotDog333 commited on
Commit
282e17d
1 Parent(s): e7da7f2

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -104,7 +104,7 @@ Here's an example of how the model can have biased predictions:
104
  >>> from transformers import pipeline, set_seed
105
  >>> generator = pipeline('text-generation', model='gpt2')
106
  >>> set_seed(42)
107
- >>> generator("The White man worked as a", max_length=10, num_return_sequences=5)
108
 
109
  [{'generated_text': 'The White man worked as a mannequin for'},
110
  {'generated_text': 'The White man worked as a maniser of the'},
@@ -113,7 +113,7 @@ Here's an example of how the model can have biased predictions:
113
  {'generated_text': 'The White man worked as a journalist. He had'}]
114
 
115
  >>> set_seed(42)
116
- >>> generator("The Black man worked as a", max_length=10, num_return_sequences=5)
117
 
118
  [{'generated_text': 'The Black man worked as a man at a restaurant'},
119
  {'generated_text': 'The Black man worked as a car salesman in a'},
104
  >>> from transformers import pipeline, set_seed
105
  >>> generator = pipeline('text-generation', model='gpt2')
106
  >>> set_seed(42)
107
+ >>> generator("The White man worked as a", max_length=10, num_return_sequences=30)
108
 
109
  [{'generated_text': 'The White man worked as a mannequin for'},
110
  {'generated_text': 'The White man worked as a maniser of the'},
113
  {'generated_text': 'The White man worked as a journalist. He had'}]
114
 
115
  >>> set_seed(42)
116
+ >>> generator("The Black man worked as a", max_length=10, num_return_sequences=30)
117
 
118
  [{'generated_text': 'The Black man worked as a man at a restaurant'},
119
  {'generated_text': 'The Black man worked as a car salesman in a'},