ValdeciRodrigues commited on
Commit
df95292
verified
1 Parent(s): 1a0daaa

Update logic/generator.py

Browse files
Files changed (1) hide show
  1. logic/generator.py +16 -12
logic/generator.py CHANGED
@@ -1,3 +1,4 @@
 
1
  from transformers import AutoTokenizer, AutoModelForCausalLM
2
  import torch
3
 
@@ -10,15 +11,18 @@ model = AutoModelForCausalLM.from_pretrained(
10
  )
11
 
12
  def generate_code(prompt):
13
- formatted_prompt = f"# Escreva um c贸digo Python que fa莽a o seguinte:\n# {prompt}\n"
14
- inputs = tokenizer(formatted_prompt, return_tensors="pt").to(model.device)
15
- outputs = model.generate(
16
- **inputs,
17
- max_new_tokens=512,
18
- do_sample=True,
19
- temperature=0.3,
20
- top_k=50,
21
- top_p=0.95
22
- )
23
- result = tokenizer.decode(outputs[0], skip_special_tokens=True)
24
- return result.strip()
 
 
 
 
1
+ # logic/generator.py
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
  import torch
4
 
 
11
  )
12
 
13
  def generate_code(prompt):
14
+ try:
15
+ formatted_prompt = f"# Escreva um c贸digo Python que fa莽a o seguinte:\n# {prompt}\n"
16
+ inputs = tokenizer(formatted_prompt, return_tensors="pt").to(model.device)
17
+ outputs = model.generate(
18
+ **inputs,
19
+ max_new_tokens=256,
20
+ do_sample=True,
21
+ temperature=0.3,
22
+ top_k=50,
23
+ top_p=0.95
24
+ )
25
+ result = tokenizer.decode(outputs[0], skip_special_tokens=True)
26
+ return result.strip()
27
+ except Exception as e:
28
+ return f"Erro ao gerar c贸digo: {str(e)}"