liamebs commited on
Commit
6e67f71
1 Parent(s): 7426d1f

added output print functionality for testing

Browse files
Files changed (1) hide show
  1. app.py +7 -2
app.py CHANGED
@@ -33,16 +33,21 @@ llm = Llama(model_path=filename, n_ctx=512, n_batch=126)
33
  def generate_text(prompt="Who is the CEO of Apple?"):
34
  output = llm(
35
  prompt,
36
- max_tokens=256,
37
  temperature=0.1,
38
  top_p=0.5,
39
- echo=False,
40
  stop=["#"],
41
  )
42
  output_text = output["choices"][0]["text"].strip()
 
 
43
 
44
  # Remove Prompt Echo from Generated Text
45
  cleaned_output_text = output_text.replace(prompt, "")
 
 
 
46
  return cleaned_output_text
47
 
48
 
 
33
  def generate_text(prompt="Who is the CEO of Apple?"):
34
  output = llm(
35
  prompt,
36
+ max_tokens=256, # max tokens to generate
37
  temperature=0.1,
38
  top_p=0.5,
39
+ echo=False, # whether to repeat prompt in output
40
  stop=["#"],
41
  )
42
  output_text = output["choices"][0]["text"].strip()
43
+
44
+ print(output_text)
45
 
46
  # Remove Prompt Echo from Generated Text
47
  cleaned_output_text = output_text.replace(prompt, "")
48
+
49
+ print(cleaned_output_text)
50
+
51
  return cleaned_output_text
52
 
53