lvwerra HF staff commited on
Commit
1f37744
1 Parent(s): 5b1d10e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -21,15 +21,15 @@ EOD = "<|endoftext|>"
21
 
22
  GENERATION_TITLE= "<p style='font-size: 16px; color: white;'>Generated code:</p>"
23
 
24
- tokenizer_fim = AutoTokenizer.from_pretrained("bigcode/christmas-models", use_auth_token=True, padding_side="left")
25
 
26
  tokenizer_fim.add_special_tokens({
27
  "additional_special_tokens": [EOD, FIM_PREFIX, FIM_MIDDLE, FIM_SUFFIX, FIM_PAD],
28
  "pad_token": EOD,
29
  })
30
 
31
- tokenizer = AutoTokenizer.from_pretrained("bigcode/christmas-models", use_auth_token=True)
32
- model = AutoModelForCausalLM.from_pretrained("bigcode/christmas-models", trust_remote_code=True, use_auth_token=True).to(device)
33
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
34
 
35
  def post_processing(prompt, completion):
 
21
 
22
  GENERATION_TITLE= "<p style='font-size: 16px; color: white;'>Generated code:</p>"
23
 
24
+ tokenizer_fim = AutoTokenizer.from_pretrained("bigcode/christmas-models", use_auth_token=token, padding_side="left")
25
 
26
  tokenizer_fim.add_special_tokens({
27
  "additional_special_tokens": [EOD, FIM_PREFIX, FIM_MIDDLE, FIM_SUFFIX, FIM_PAD],
28
  "pad_token": EOD,
29
  })
30
 
31
+ tokenizer = AutoTokenizer.from_pretrained("bigcode/christmas-models", use_auth_token=token)
32
+ model = AutoModelForCausalLM.from_pretrained("bigcode/christmas-models", trust_remote_code=True, use_auth_token=token).to(device)
33
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
34
 
35
  def post_processing(prompt, completion):