ddobokki commited on
Commit
26f03fa
1 Parent(s): 14f5a89

model_update

Browse files
Files changed (4) hide show
  1. .gitattributes +1 -0
  2. config.json +1 -1
  3. pytorch_model.bin +1 -1
  4. tokenizer_config.json +1 -1
.gitattributes CHANGED
@@ -27,3 +27,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
  *.zstandard filter=lfs diff=lfs merge=lfs -text
28
  *tfevents* filter=lfs diff=lfs merge=lfs -text
29
  pytorch_model.bin filter=lfs diff=lfs merge=lfs -text
 
 
27
  *.zstandard filter=lfs diff=lfs merge=lfs -text
28
  *tfevents* filter=lfs diff=lfs merge=lfs -text
29
  pytorch_model.bin filter=lfs diff=lfs merge=lfs -text
30
+ .git/lfs/objects/7a/8b/7a8b2402ba3bbc724236ddd1c0a0028392e58411bcd958484c51620ce81a7984 filter=lfs diff=lfs merge=lfs -text
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "output/unspervised-klue/roberta-small-256-2022-04-14_17-57-30/",
3
  "architectures": [
4
  "RobertaModel"
5
  ],
 
1
  {
2
+ "_name_or_path": "output/unspervised-klue/roberta-small-256-2022-04-15_10-55-28/",
3
  "architectures": [
4
  "RobertaModel"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7a8b2402ba3bbc724236ddd1c0a0028392e58411bcd958484c51620ce81a7984
3
  size 272402647
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4a2f576ac685309521e490c6062a139014f92d19d71c307fe81414a31f7153d
3
  size 272402647
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "do_basic_tokenize": true, "never_split": null, "bos_token": "[CLS]", "eos_token": "[SEP]", "model_max_length": 512, "special_tokens_map_file": "/home/cleaning/.cache/huggingface/transformers/9ce71a5afff600bb47488785ec31125c4a485302e21d660291b10925f8bfcb67.70c17d6e4d492c8f24f5bb97ab56c7f272e947112c6faf9dd846da42ba13eb23", "name_or_path": "output/unspervised-klue/roberta-small-256-2022-04-14_17-57-30/", "tokenizer_class": "BertTokenizer"}
 
1
+ {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "do_basic_tokenize": true, "never_split": null, "bos_token": "[CLS]", "eos_token": "[SEP]", "model_max_length": 512, "special_tokens_map_file": "/home/cleaning/.cache/huggingface/transformers/9ce71a5afff600bb47488785ec31125c4a485302e21d660291b10925f8bfcb67.70c17d6e4d492c8f24f5bb97ab56c7f272e947112c6faf9dd846da42ba13eb23", "name_or_path": "output/unspervised-klue/roberta-small-256-2022-04-15_10-55-28/", "tokenizer_class": "BertTokenizer"}