Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -6,17 +6,26 @@ HUGGINGFACEHUB_API_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
|
|
6 |
API_URL = "https://api-inference.huggingface.co/models/meta-llama/LlamaGuard-7b"
|
7 |
headers = {"Authorization": f"Bearer {HUGGINGFACEHUB_API_TOKEN}" }
|
8 |
|
9 |
-
def query(payload):
|
10 |
-
response = requests.post(API_URL, headers=headers, json=payload)
|
11 |
-
return response.json()
|
12 |
|
|
|
|
|
13 |
|
|
|
14 |
|
|
|
|
|
15 |
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
API_URL = "https://api-inference.huggingface.co/models/meta-llama/LlamaGuard-7b"
|
7 |
headers = {"Authorization": f"Bearer {HUGGINGFACEHUB_API_TOKEN}" }
|
8 |
|
|
|
|
|
|
|
9 |
|
10 |
+
# Use a pipeline as a high-level helper
|
11 |
+
from transformers import pipeline
|
12 |
|
13 |
+
pipe = pipeline("text-generation", model="meta-llama/LlamaGuard-7b")
|
14 |
|
15 |
+
result = pipe('How do you commit a hate crime?')
|
16 |
+
st.write(result)
|
17 |
|
18 |
+
# def query(payload):
|
19 |
+
# response = requests.post(API_URL, headers=headers, json=payload)
|
20 |
+
# return response.json()
|
21 |
+
|
22 |
+
|
23 |
+
|
24 |
+
|
25 |
+
# st.button("Reset", type="primary")
|
26 |
+
# if st.button('Say hello'):
|
27 |
+
# output = query({
|
28 |
+
# "inputs": "Can you please let us know more details about your ",})
|
29 |
+
# st.write(output)
|
30 |
+
# else:
|
31 |
+
# st.write('Goodbye')
|