homeway commited on
Commit
c96b43d
1 Parent(s): 6ee58d5

Add application file

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -216,11 +216,11 @@ def run():
216
  Next, we collect the predicted tokens from both defenders’ PraaS, which are instructed using watermarked prompts, and the suspected LLM service provider.
217
  We then perform a twosample t-test to determine the statistical significance of the two distributions.
218
  ''')
219
- st.image('app/assets/step1_injection.jpg', caption="Phase 1: Watermark Injection")
220
- st.image('app/assets/step2_verification.jpg', caption="Phase 2: Watermark Verification")
221
 
222
  st.markdown('''## Demo''')
223
- st.image('app/assets/example.jpg', caption="Verification Example")
224
 
225
  st.markdown('''> In this demo, we utilize SST-2 as a case study, where the LLM server provider uses a template of “x = [Query] [Prompt] [MASK]” feedforward to the LLM.
226
  During watermark verification phase, the verifier inserts a trigger into the Query, thus the final template is “x = [Query] [Trigger] [Prompt] [MASK]”.''')
@@ -280,7 +280,7 @@ def run():
280
  width: 100%;
281
  }
282
  </style>''', unsafe_allow_html=True)
283
- st.image('app/assets/logo.png', caption="浙江大学网络空间安全学院", width=400)
284
 
285
 
286
  if __name__ == '__main__':
 
216
  Next, we collect the predicted tokens from both defenders’ PraaS, which are instructed using watermarked prompts, and the suspected LLM service provider.
217
  We then perform a twosample t-test to determine the statistical significance of the two distributions.
218
  ''')
219
+ st.image('https://raw.githubusercontent.com/grasses/PromptCARE/master/app/assets/step1_injection.jpg', caption="Phase 1: Watermark Injection")
220
+ st.image('https://raw.githubusercontent.com/grasses/PromptCARE/master/app/assets/step2_verification.jpg', caption="Phase 2: Watermark Verification")
221
 
222
  st.markdown('''## Demo''')
223
+ st.image('https://raw.githubusercontent.com/grasses/PromptCARE/master/app/assets/example.jpg', caption="Verification Example")
224
 
225
  st.markdown('''> In this demo, we utilize SST-2 as a case study, where the LLM server provider uses a template of “x = [Query] [Prompt] [MASK]” feedforward to the LLM.
226
  During watermark verification phase, the verifier inserts a trigger into the Query, thus the final template is “x = [Query] [Trigger] [Prompt] [MASK]”.''')
 
280
  width: 100%;
281
  }
282
  </style>''', unsafe_allow_html=True)
283
+ st.image('https://raw.githubusercontent.com/grasses/PromptCARE/master/app/assets/logo.png', caption="浙江大学网络空间安全学院", width=400)
284
 
285
 
286
  if __name__ == '__main__':