solemn-leader commited on
Commit
1322998
1 Parent(s): 82d40f9

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +14 -2
README.md CHANGED
@@ -37,7 +37,19 @@ tokenizer = AutoTokenizer.from_pretrained('tinkoff-ai/ruDialoGPT-small')
37
  model = AutoModelWithLMHead.from_pretrained('tinkoff-ai/ruDialoGPT-small')
38
  inputs = tokenizer('@@ПЕРВЫЙ@@ привет @@ВТОРОЙ@@ привет @@ПЕРВЫЙ@@ как дела? @@ВТОРОЙ@@', return_tensors='pt')
39
  with torch.inference_mode():
40
- generated_token_ids = model.generate(**inputs)
41
- context_with_response = tokenizer.decode(generated_token_ids[0])
 
 
 
 
 
 
 
 
 
 
 
 
42
  context_with_response
43
  ```
37
  model = AutoModelWithLMHead.from_pretrained('tinkoff-ai/ruDialoGPT-small')
38
  inputs = tokenizer('@@ПЕРВЫЙ@@ привет @@ВТОРОЙ@@ привет @@ПЕРВЫЙ@@ как дела? @@ВТОРОЙ@@', return_tensors='pt')
39
  with torch.inference_mode():
40
+ generated_token_ids = model.generate(
41
+ **inputs,
42
+ top_k=10,
43
+ top_p=0.95,
44
+ num_beams=3,
45
+ num_return_sequences=3,
46
+ do_sample=True,
47
+ no_repeat_ngram_size=2,
48
+ temperature=1.2,
49
+ repetition_penalty=1.2,
50
+ length_penalty=1.0,
51
+ eos_token_id=50257
52
+ )
53
+ context_with_response = [tokenizer.decode(sample_token_ids) for sample_token_ids in generated_token_ids]
54
  context_with_response
55
  ```