Spaces:
Sleeping
Sleeping
jwkirchenbauer
commited on
Commit
·
4f1c9f2
1
Parent(s):
d2a37a0
move no highlight catch logic
Browse files- demo_watermark.py +6 -6
demo_watermark.py
CHANGED
@@ -392,6 +392,11 @@ def detect(input_text, args, tokenizer, device=None, return_green_token_mask=Tru
|
|
392 |
normalizers=args.normalizers,
|
393 |
ignore_repeated_bigrams=args.ignore_repeated_bigrams,
|
394 |
select_green_tokens=args.select_green_tokens)
|
|
|
|
|
|
|
|
|
|
|
395 |
error = False
|
396 |
green_token_mask = None
|
397 |
if input_text == "":
|
@@ -408,12 +413,7 @@ def detect(input_text, args, tokenizer, device=None, return_green_token_mask=Tru
|
|
408 |
output = [["Error","string too short to compute metrics"]]
|
409 |
output += [["",""] for _ in range(6)]
|
410 |
|
411 |
-
|
412 |
-
# if we're using normalizers or ignore_repeated_bigrams
|
413 |
-
if args.normalizers != [] or args.ignore_repeated_bigrams:
|
414 |
-
green_token_mask = None
|
415 |
-
|
416 |
-
html_output = ""
|
417 |
if green_token_mask is not None:
|
418 |
# hack bc we need a fast tokenizer with charspan support
|
419 |
if "opt" in args.model_name_or_path:
|
|
|
392 |
normalizers=args.normalizers,
|
393 |
ignore_repeated_bigrams=args.ignore_repeated_bigrams,
|
394 |
select_green_tokens=args.select_green_tokens)
|
395 |
+
# for now, just don't display the green token mask
|
396 |
+
# if we're using normalizers or ignore_repeated_bigrams
|
397 |
+
if args.normalizers != [] or args.ignore_repeated_bigrams:
|
398 |
+
return_green_token_mask = False
|
399 |
+
|
400 |
error = False
|
401 |
green_token_mask = None
|
402 |
if input_text == "":
|
|
|
413 |
output = [["Error","string too short to compute metrics"]]
|
414 |
output += [["",""] for _ in range(6)]
|
415 |
|
416 |
+
html_output = "[No highlight markup generated]"
|
|
|
|
|
|
|
|
|
|
|
417 |
if green_token_mask is not None:
|
418 |
# hack bc we need a fast tokenizer with charspan support
|
419 |
if "opt" in args.model_name_or_path:
|