Spaces:
Runtime error
Runtime error
Duplicate from davila7/try-gorilla
Browse filesCo-authored-by: Daniel Avila <davila7@users.noreply.huggingface.co>
- .gitattributes +34 -0
- README.md +14 -0
- app.py +50 -0
- requirements.txt +2 -0
.gitattributes
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
title: Try Gorilla
|
3 |
+
emoji: 🦍
|
4 |
+
colorFrom: red
|
5 |
+
colorTo: white
|
6 |
+
sdk: streamlit
|
7 |
+
sdk_version: 1.21.0
|
8 |
+
app_file: app.py
|
9 |
+
pinned: false
|
10 |
+
license: mit
|
11 |
+
duplicated_from: davila7/try-gorilla
|
12 |
+
---
|
13 |
+
|
14 |
+
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import openai
|
2 |
+
import urllib.parse
|
3 |
+
import streamlit as st
|
4 |
+
|
5 |
+
openai.api_key = "EMPTY" # Key is ignored and does not matter
|
6 |
+
openai.api_base = "http://34.132.127.197:8000/v1"
|
7 |
+
|
8 |
+
# Report issues
|
9 |
+
def raise_issue(e, model, prompt):
|
10 |
+
issue_title = urllib.parse.quote("[bug] Hosted Gorilla: <Issue>")
|
11 |
+
issue_body = urllib.parse.quote(f"Exception: {e}\nFailed model: {model}, for prompt: {prompt}")
|
12 |
+
issue_url = f"https://github.com/ShishirPatil/gorilla/issues/new?assignees=&labels=hosted-gorilla&projects=&template=hosted-gorilla-.md&title={issue_title}&body={issue_body}"
|
13 |
+
print(f"An exception has occurred: {e} \nPlease raise an issue here: {issue_url}")
|
14 |
+
|
15 |
+
# Query Gorilla server
|
16 |
+
def get_gorilla_response(prompt="I would like to translate from English to French.", api_provider="Huggingface"):
|
17 |
+
try:
|
18 |
+
model = "gorilla-7b-hf-v0"
|
19 |
+
if api_provider == "Huggingface":
|
20 |
+
model = "gorilla-7b-hf-v0"
|
21 |
+
if api_provider == "Torch Hub":
|
22 |
+
model = "gorilla-7b-th-v0"
|
23 |
+
if api_provider == "TensorFlow Hub":
|
24 |
+
model = "gorilla-7b-tf-v0"
|
25 |
+
|
26 |
+
completion = openai.ChatCompletion.create(
|
27 |
+
model=model,
|
28 |
+
messages=[{"role": "user", "content": prompt}]
|
29 |
+
)
|
30 |
+
return completion.choices[0].message.content
|
31 |
+
except Exception as e:
|
32 |
+
raise_issue(e, model, prompt)
|
33 |
+
|
34 |
+
st.title("Try Gorilla 🦍")
|
35 |
+
st.write("Large Language Model Connected with Massive APIs")
|
36 |
+
st.markdown('* Read about this demo here: [Medium](https://medium.com/@dan.avila7/try-gorilla-a-large-language-model-connected-with-massive-apis-442f3b554ffb)')
|
37 |
+
st.markdown('* All code was written with the help of CodeGPT (https://codegpt.co)')
|
38 |
+
|
39 |
+
st.write('---')
|
40 |
+
col1, col2 = st.columns(2)
|
41 |
+
with col1:
|
42 |
+
api_provider = st.radio("Select an API Provider:", ("Huggingface", "Torch Hub", "TensorFlow Hub"))
|
43 |
+
with col2:
|
44 |
+
input = st.text_input("Ask here:")
|
45 |
+
st.write("Example: I would like to translate from English to French.")
|
46 |
+
|
47 |
+
if api_provider and input:
|
48 |
+
if st.button("Run Gorilla"):
|
49 |
+
with st.spinner('Loading...'):
|
50 |
+
st.success(get_gorilla_response(input, api_provider))
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
openai
|
2 |
+
streamlit
|