asofter commited on
Commit
6aa9546
β€’
1 Parent(s): b9c467d

* upgrade libraries

Browse files

* put disclaimer
* store prompt by default

Files changed (3) hide show
  1. README.md +1 -1
  2. app.py +20 -20
  3. requirements.txt +2 -3
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: πŸ“
4
  colorFrom: yellow
5
  colorTo: gray
6
  sdk: gradio
7
- sdk_version: 4.7.1
8
  pinned: true
9
  license: apache-2.0
10
  ---
 
4
  colorFrom: yellow
5
  colorTo: gray
6
  sdk: gradio
7
+ sdk_version: 4.8.0
8
  pinned: true
9
  license: apache-2.0
10
  ---
app.py CHANGED
@@ -156,7 +156,7 @@ def is_detected(provider: str, prompt: str) -> (str, bool, bool, float):
156
  return provider, request_result, is_injection, convert_elapsed_time(end_time - start_time)
157
 
158
 
159
- def execute(prompt: str, store_to_dataset: bool = True) -> List[Union[str, bool, float]]:
160
  results = []
161
 
162
  with mp.Pool(processes=num_processes) as pool:
@@ -166,19 +166,18 @@ def execute(prompt: str, store_to_dataset: bool = True) -> List[Union[str, bool,
166
  results.append(result)
167
 
168
  # Save image and result
169
- if store_to_dataset:
170
- fileobj = json.dumps(
171
- {"prompt": prompt, "results": results}, indent=2, ensure_ascii=False
172
- ).encode("utf-8")
173
- result_path = f"/prompts/train/{str(uuid.uuid4())}.json"
174
-
175
- hf_api.upload_file(
176
- path_or_fileobj=fileobj,
177
- path_in_repo=result_path,
178
- repo_id="laiyer/prompt-injection-benchmark",
179
- repo_type="dataset",
180
- )
181
- logger.info(f"Stored prompt: {prompt}")
182
 
183
  return results
184
 
@@ -196,16 +195,12 @@ if __name__ == "__main__":
196
  fn=execute,
197
  inputs=[
198
  gr.Textbox(label="Prompt"),
199
- gr.Checkbox(
200
- label="Store prompt and results to the public dataset `laiyer/prompt-injection-benchmark`",
201
- value=True,
202
- ),
203
  ],
204
  outputs=[
205
  gr.Dataframe(
206
  headers=[
207
  "Provider",
208
- "Is request successful?",
209
  "Is prompt injection?",
210
  "Latency (seconds)",
211
  ],
@@ -214,7 +209,12 @@ if __name__ == "__main__":
214
  ),
215
  ],
216
  title="Prompt Injection Benchmark",
217
- description="This interface aims to benchmark the prompt injection detection providers. The results are stored in the public dataset for fairness of all sides.",
 
 
 
 
 
218
  examples=[
219
  [
220
  example,
 
156
  return provider, request_result, is_injection, convert_elapsed_time(end_time - start_time)
157
 
158
 
159
+ def execute(prompt: str) -> List[Union[str, bool, float]]:
160
  results = []
161
 
162
  with mp.Pool(processes=num_processes) as pool:
 
166
  results.append(result)
167
 
168
  # Save image and result
169
+ fileobj = json.dumps(
170
+ {"prompt": prompt, "results": results}, indent=2, ensure_ascii=False
171
+ ).encode("utf-8")
172
+ result_path = f"/prompts/train/{str(uuid.uuid4())}.json"
173
+
174
+ hf_api.upload_file(
175
+ path_or_fileobj=fileobj,
176
+ path_in_repo=result_path,
177
+ repo_id="laiyer/prompt-injection-benchmark",
178
+ repo_type="dataset",
179
+ )
180
+ logger.info(f"Stored prompt: {prompt}")
 
181
 
182
  return results
183
 
 
195
  fn=execute,
196
  inputs=[
197
  gr.Textbox(label="Prompt"),
 
 
 
 
198
  ],
199
  outputs=[
200
  gr.Dataframe(
201
  headers=[
202
  "Provider",
203
+ "Is processed successfully?",
204
  "Is prompt injection?",
205
  "Latency (seconds)",
206
  ],
 
209
  ),
210
  ],
211
  title="Prompt Injection Benchmark",
212
+ description="This interface aims to benchmark the prompt injection detection providers. "
213
+ "The results are <strong>stored in the public dataset</strong> "
214
+ '<a href="https://huggingface.co/datasets/laiyer/prompt-injection-benchmark" target="_blank">laiyer/prompt-injection-benchmark</a> '
215
+ "for fairness of all sides.<br /><br />"
216
+ "HuggingFace (HF) models are hosted on Spaces while other providers are called as APIs.<br /><br />"
217
+ "<b>Disclaimer</b>: This interface is for research purposes only.",
218
  examples=[
219
  [
220
  example,
requirements.txt CHANGED
@@ -1,9 +1,8 @@
1
  git+https://github.com/automorphic-ai/aegis.git
2
- gradio==4.4.1
3
  huggingface_hub==0.19.4
4
  onnxruntime==1.16.3
5
- optimum[onnxruntime]==1.14.1
6
- rapidocr_onnxruntime==1.3.8
7
  rebuff==0.0.5
8
  requests==2.31.0
9
  transformers==4.35.2
 
1
  git+https://github.com/automorphic-ai/aegis.git
2
+ gradio==4.8.0
3
  huggingface_hub==0.19.4
4
  onnxruntime==1.16.3
5
+ optimum[onnxruntime]==1.15.0
 
6
  rebuff==0.0.5
7
  requests==2.31.0
8
  transformers==4.35.2