{ "model_type": "llama", "run": { "script": "app.py", "requirements": [ "gradio==2.3.2", "transformers==4.11.0", "torch==1.9.1" ] } }