supermy commited on
Commit
36e29c8
1 Parent(s): 84629d8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -11
app.py CHANGED
@@ -38,18 +38,18 @@ def generate(title, context, max_len):
38
 
39
  # input_ids.extend( tokenizer.encode(input_text + "-", add_special_tokens=False) )
40
 
41
- # title_ids = tokenizer.encode(title, add_special_tokens=False)
42
- # context_ids = tokenizer.encode(context, add_special_tokens=False)
43
- # input_ids = title_ids + [sep_id] + context_ids
44
- # print(input_ids)
45
- # cur_len = len(input_ids)
46
- # input_len = cur_len
47
- # last_token_id = input_ids[-1]
48
- # input_ids = torch.tensor([input_ids], dtype=torch.long)
49
 
50
- input_ids = [tokenizer.cls_token_id]
51
- input_ids.extend( tokenizer.encode(title + "-" +context, add_special_tokens=False) )
52
- input_ids = torch.tensor( [input_ids] )
53
 
54
  print(input_ids)
55
 
 
38
 
39
  # input_ids.extend( tokenizer.encode(input_text + "-", add_special_tokens=False) )
40
 
41
+ title_ids = tokenizer.encode(title, add_special_tokens=False)
42
+ context_ids = tokenizer.encode(context, add_special_tokens=False)
43
+ input_ids = title_ids + [sep_id] + context_ids
44
+ print(input_ids)
45
+ cur_len = len(input_ids)
46
+ input_len = cur_len
47
+ last_token_id = input_ids[-1]
48
+ input_ids = torch.tensor([input_ids], dtype=torch.long)
49
 
50
+ # input_ids = [tokenizer.cls_token_id]
51
+ # input_ids.extend( tokenizer.encode(title + "-" +context, add_special_tokens=False) )
52
+ # input_ids = torch.tensor( [input_ids] )
53
 
54
  print(input_ids)
55