tingxinli commited on
Commit
05c0902
1 Parent(s): 3aaecca

Update hideAndSeek.py

Browse files
Files changed (1) hide show
  1. hideAndSeek.py +3 -3
hideAndSeek.py CHANGED
@@ -4,7 +4,7 @@ from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig
4
  import openai
5
  from openai import OpenAI
6
 
7
- def hide_encrypt(original_input, hide_model, tokenizer):
8
  hide_template = """<s>Paraphrase the text:%s\n\n"""
9
  input_text = hide_template % original_input
10
  inputs = tokenizer(input_text, return_tensors='pt').to(hide_model.device)
@@ -21,8 +21,8 @@ def hide_encrypt(original_input, hide_model, tokenizer):
21
  hide_input = tokenizer.decode(pred, skip_special_tokens=True)
22
  return hide_input
23
 
24
- def seek_decrypt(hide_input, hide_output, original_input, seek_model, tokenizer):
25
- seek_template = """Convert the text:\n%s\n\n%s\n\nConvert the text:\n%s\n\n"""
26
  input_text = seek_template % (hide_input, hide_output, original_input)
27
  inputs = tokenizer(input_text, return_tensors='pt').to(seek_model.device)
28
  pred = seek_model.generate(
 
4
  import openai
5
  from openai import OpenAI
6
 
7
+ def hide(original_input, hide_model, tokenizer):
8
  hide_template = """<s>Paraphrase the text:%s\n\n"""
9
  input_text = hide_template % original_input
10
  inputs = tokenizer(input_text, return_tensors='pt').to(hide_model.device)
 
21
  hide_input = tokenizer.decode(pred, skip_special_tokens=True)
22
  return hide_input
23
 
24
+ def seek(hide_input, hide_output, original_input, seek_model, tokenizer):
25
+ seek_template = """<s>Convert the text:\n%s\n\n%s\n\nConvert the text:\n%s\n\n"""
26
  input_text = seek_template % (hide_input, hide_output, original_input)
27
  inputs = tokenizer(input_text, return_tensors='pt').to(seek_model.device)
28
  pred = seek_model.generate(