danschr commited on
Commit
1366939
1 Parent(s): 5328c2d

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +6 -7
README.md CHANGED
@@ -13,12 +13,12 @@ widget:
13
  from transformers import AutoModelForSequenceClassification, AutoTokenizer
14
 
15
  tokenizer = AutoTokenizer.from_pretrained("tum-nlp/Deberta_Human_Value_Detector")
16
- model = AutoModelForSequenceClassification.from_pretrained("tum-nlp/Deberta_Human_Value_Detector", trust_remote_code=True)
17
 
18
  example_text ='whaling is part of the culture of various indigenous population and should be allowed for the purpose of maintaining this tradition and way of life and sustenance, among other uses of a whale. against We should ban whaling'
19
 
20
  encoding = tokenizer.encode_plus(
21
- text,
22
  add_special_tokens=True,
23
  max_length=512,
24
  return_token_type_ids=False,
@@ -41,9 +41,8 @@ THRESHOLD = 0.25
41
  LABEL_COLUMNS = ['Self-direction: thought','Self-direction: action','Stimulation','Hedonism','Achievement','Power: dominance','Power: resources','Face','Security: personal',
42
  'Security: societal','Tradition','Conformity: rules','Conformity: interpersonal','Humility','Benevolence: caring','Benevolence: dependability','Universalism: concern','Universalism: nature','Universalism: tolerance','Universalism: objectivity']
43
  print(f"Predictions:")
44
- for label, prediction in zip(LABEL_COLUMNS, test_prediction):
45
- if prediction < THRESHOLD:
46
- continue
47
- print(f"{label}: {prediction}")
48
- res[label] = prediction
49
  ```
 
13
  from transformers import AutoModelForSequenceClassification, AutoTokenizer
14
 
15
  tokenizer = AutoTokenizer.from_pretrained("tum-nlp/Deberta_Human_Value_Detector")
16
+ trained_model = AutoModelForSequenceClassification.from_pretrained("tum-nlp/Deberta_Human_Value_Detector", trust_remote_code=True)
17
 
18
  example_text ='whaling is part of the culture of various indigenous population and should be allowed for the purpose of maintaining this tradition and way of life and sustenance, among other uses of a whale. against We should ban whaling'
19
 
20
  encoding = tokenizer.encode_plus(
21
+ example_text,
22
  add_special_tokens=True,
23
  max_length=512,
24
  return_token_type_ids=False,
 
41
  LABEL_COLUMNS = ['Self-direction: thought','Self-direction: action','Stimulation','Hedonism','Achievement','Power: dominance','Power: resources','Face','Security: personal',
42
  'Security: societal','Tradition','Conformity: rules','Conformity: interpersonal','Humility','Benevolence: caring','Benevolence: dependability','Universalism: concern','Universalism: nature','Universalism: tolerance','Universalism: objectivity']
43
  print(f"Predictions:")
44
+ for label, prediction in zip(LABEL_COLUMNS, test_prediction):
45
+ if prediction < THRESHOLD:
46
+ continue
47
+ print(f"{label}: {prediction}")
 
48
  ```