mohitk24 commited on
Commit
a28aa68
·
verified ·
1 Parent(s): 657e32c

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -120,7 +120,7 @@ model_id = "hf-internal-testing/tiny-random-LlamaForCausalLM" # placeholder tin
120
  # model_id = "HuggingFaceH4/zephyr-7b-alpha" # requires more resources
121
  # or a tiny flan-t5: "google/flan-t5-small"
122
 
123
- def load_llm_pipeline(model_name=model_id, task="text-generation"):
124
  try:
125
  pipe = pipeline(task, model=model_name, device_map="auto")
126
  except Exception:
@@ -203,5 +203,5 @@ with gr.Blocks() as demo:
203
 
204
  run_btn.click(run_calc, inputs=[rho, mu, D, L, Q, eps], outputs=[numeric, explain])
205
 
206
- # Comment out the demo.launch() call as it will be handled by the Hugging Face Space
207
  # demo.launch(share=True)
 
120
  # model_id = "HuggingFaceH4/zephyr-7b-alpha" # requires more resources
121
  # or a tiny flan-t5: "google/flan-t5-small"
122
 
123
+ def load_llm_pipeline(model_name=model_id, task="text2text-generation"): # Changed task to text2text-generation
124
  try:
125
  pipe = pipeline(task, model=model_name, device_map="auto")
126
  except Exception:
 
203
 
204
  run_btn.click(run_calc, inputs=[rho, mu, D, L, Q, eps], outputs=[numeric, explain])
205
 
206
+ # Comment out the demo.launch() call as it will be handled by the Hugio Face Space
207
  # demo.launch(share=True)