DHEIVER commited on
Commit
dd1ccae
1 Parent(s): 014086e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -4
app.py CHANGED
@@ -24,13 +24,25 @@ model = AutoModelForCausalLM.from_pretrained("codeparrot/codeparrot-small-text-t
24
  def create_docstring(gen_prompt):
25
  return "\"\"\"\n" + gen_prompt + "\n\"\"\"\n\n"
26
 
 
 
 
 
 
 
 
 
 
 
 
27
  def generate_code(gen_prompt, max_tokens, temperature=0.6, seed=42):
28
- set_seed(seed)
29
- pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
30
- prompt = create_docstring(gen_prompt)
31
- generated_text = pipe(prompt, do_sample=True, top_p=0.95, temperature=temperature, max_new_tokens=max_tokens)[0]['generated_text']
32
  return generated_text
33
 
 
34
  def save_to_text_file(output_text):
35
  with open("generated_code.txt", "w") as file:
36
  file.write(output_text)
 
24
  def create_docstring(gen_prompt):
25
  return "\"\"\"\n" + gen_prompt + "\n\"\"\"\n\n"
26
 
27
+ def validate_inputs(gen_prompt, max_tokens, temperature, seed):
28
+ # Adicione a lógica de validação aqui
29
+ if not gen_prompt:
30
+ raise ValueError("English instructions cannot be empty.")
31
+ if max_tokens <= 0 or max_tokens > 256:
32
+ raise ValueError("Number of tokens to generate must be between 1 and 256.")
33
+ if temperature < 0 or temperature > 2.5:
34
+ raise ValueError("Temperature must be between 0 and 2.5.")
35
+ if seed < 0 or seed > 1000:
36
+ raise ValueError("Random seed must be between 0 and 1000.")
37
+
38
  def generate_code(gen_prompt, max_tokens, temperature=0.6, seed=42):
39
+ validate_inputs(gen_prompt, max_tokens, temperature, seed)
40
+
41
+ # Resto do código de geração de código aqui
42
+
43
  return generated_text
44
 
45
+
46
  def save_to_text_file(output_text):
47
  with open("generated_code.txt", "w") as file:
48
  file.write(output_text)