Spaces:
Sleeping
Sleeping
Upload app.py
Browse files
app.py
CHANGED
@@ -23,7 +23,7 @@ download_file(ggml_model_path, filename)
|
|
23 |
llm = Llama(model_path=filename, n_ctx=512, n_batch=126)
|
24 |
|
25 |
|
26 |
-
def generate_text(prompt):
|
27 |
output = llm(
|
28 |
prompt,
|
29 |
max_tokens=256,
|
@@ -32,14 +32,14 @@ def generate_text(prompt):
|
|
32 |
echo=False,
|
33 |
stop=["#"],
|
34 |
)
|
35 |
-
output_text = output["choices"][0]["text"]
|
36 |
return output_text
|
37 |
|
38 |
|
39 |
description = "Vicuna-7B"
|
40 |
|
41 |
examples = [
|
42 |
-
["What is the capital of France?
|
43 |
[
|
44 |
"Who wrote the novel 'Pride and Prejudice'?",
|
45 |
"The novel 'Pride and Prejudice' was written by Jane Austen.",
|
|
|
23 |
llm = Llama(model_path=filename, n_ctx=512, n_batch=126)
|
24 |
|
25 |
|
26 |
+
def generate_text(prompt="Who is the CEO of Apple?"):
|
27 |
output = llm(
|
28 |
prompt,
|
29 |
max_tokens=256,
|
|
|
32 |
echo=False,
|
33 |
stop=["#"],
|
34 |
)
|
35 |
+
output_text = output["choices"][0]["text"].strip()
|
36 |
return output_text
|
37 |
|
38 |
|
39 |
description = "Vicuna-7B"
|
40 |
|
41 |
examples = [
|
42 |
+
["What is the capital of France?", "The capital of France is Paris."],
|
43 |
[
|
44 |
"Who wrote the novel 'Pride and Prejudice'?",
|
45 |
"The novel 'Pride and Prejudice' was written by Jane Austen.",
|