jake commited on
Commit
6b3d634
1 Parent(s): d931ffc

Update README with project details and usage instructions

Browse files
Files changed (1) hide show
  1. app.py +8 -2
app.py CHANGED
@@ -8,9 +8,15 @@ hf_token = st.secrets["HF_TOKEN"]
8
  # Function to load the model using pipeline
9
  @st.cache(allow_output_mutation=True)
10
  def load_pipeline():
11
- model_id = "meta-llama/Meta-Llama-3-8B"
12
  try:
13
- pipe = pipeline("text-generation", model=model_id, use_auth_token=hf_token)
 
 
 
 
 
 
14
  return pipe
15
  except Exception as e:
16
  st.error(f"Error loading model pipeline: {e}")
 
8
  # Function to load the model using pipeline
9
  @st.cache(allow_output_mutation=True)
10
  def load_pipeline():
11
+ model_id = "meta-llama/Meta-Llama-3-8B-Instruct"
12
  try:
13
+ pipe = pipeline(
14
+ "text-generation",
15
+ model=model_id,
16
+ model_kwargs={"torch_dtype": "auto"},
17
+ device="cuda",
18
+ use_auth_token=hf_token
19
+ )
20
  return pipe
21
  except Exception as e:
22
  st.error(f"Error loading model pipeline: {e}")