debisoft commited on
Commit
acc2f64
1 Parent(s): 828b5e8
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -23,7 +23,7 @@ def get_completion(prompt, model="dolly-v0-70m"):
23
  model=model,
24
  messages=messages,
25
  temperature=0, # this is the degree of randomness of the model's output
26
- )
27
 
28
  # Examples from https://www.databricks.com/blog/2023/03/24/hello-dolly-democratizing-magic-chatgpt-open-models.html
29
  instructions = [prompt]
@@ -31,9 +31,9 @@ def get_completion(prompt, model="dolly-v0-70m"):
31
  # set some additional pipeline args
32
  pipeline_kwargs = {'torch_dtype': "auto"}
33
  #if gpu_family == "v100":
34
- #pipeline_kwargs['torch_dtype'] = "float16"
35
  #elif gpu_family == "a10" or gpu_family == "a100":
36
- #pipeline_kwargs['torch_dtype'] = "bfloat16"
37
 
38
  pipeline_kwargs['max_new_tokens'] = 300
39
 
 
23
  model=model,
24
  messages=messages,
25
  temperature=0, # this is the degree of randomness of the model's output
26
+ )
27
 
28
  # Examples from https://www.databricks.com/blog/2023/03/24/hello-dolly-democratizing-magic-chatgpt-open-models.html
29
  instructions = [prompt]
 
31
  # set some additional pipeline args
32
  pipeline_kwargs = {'torch_dtype': "auto"}
33
  #if gpu_family == "v100":
34
+ #pipeline_kwargs['torch_dtype'] = "float16"
35
  #elif gpu_family == "a10" or gpu_family == "a100":
36
+ #pipeline_kwargs['torch_dtype'] = "bfloat16"
37
 
38
  pipeline_kwargs['max_new_tokens'] = 300
39