antypasd commited on
Commit
97dbccd
1 Parent(s): 107f6a5

Upload RobertaForSequenceClassification

Browse files
Files changed (3) hide show
  1. README.md +2 -3
  2. config.json +3 -3
  3. model.safetensors +2 -2
README.md CHANGED
@@ -7,10 +7,9 @@ datasets:
7
  pipeline_tag: text-classification
8
  inference:
9
  parameters:
10
- return_all_scores: True
11
  widget:
12
- - text: >-
13
- I’m tired of being sick.. it’s been four days dawg
14
  ---
15
  # cardiffnlp/twitter-roberta-large-emoji-latest
16
 
 
7
  pipeline_tag: text-classification
8
  inference:
9
  parameters:
10
+ return_all_scores: true
11
  widget:
12
+ - text: I’m tired of being sick.. it’s been four days dawg
 
13
  ---
14
  # cardiffnlp/twitter-roberta-large-emoji-latest
15
 
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "_name_or_path": "../../best_models/troberta-large-tweet-emoji/best_model/",
3
  "architectures": [
4
- "RobertaModel"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
@@ -225,7 +225,7 @@
225
  "position_embedding_type": "absolute",
226
  "problem_type": "single_label_classification",
227
  "torch_dtype": "float32",
228
- "transformers_version": "4.35.0",
229
  "type_vocab_size": 1,
230
  "use_cache": true,
231
  "vocab_size": 50265
 
1
  {
2
+ "_name_or_path": "best_models/troberta-large-tweet-emoji/best_model",
3
  "architectures": [
4
+ "RobertaForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
 
225
  "position_embedding_type": "absolute",
226
  "problem_type": "single_label_classification",
227
  "torch_dtype": "float32",
228
+ "transformers_version": "4.38.2",
229
  "type_vocab_size": 1,
230
  "use_cache": true,
231
  "vocab_size": 50265
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ea5b772f5e83719631b2adee568c717743af1310374f1b8781b7309f33b3073d
3
- size 1421483904
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:367c633dd0438e0947793e7af18530ce803611de9865e3c68650fff8ca46efa2
3
+ size 1421897216