emanuelaboros commited on
Commit
f8db31b
·
1 Parent(s): d3ca791

tentative to add random seeds

Browse files
Files changed (2) hide show
  1. generic_ner.py +18 -1
  2. modeling_stacked.py +19 -2
generic_ner.py CHANGED
@@ -1,4 +1,21 @@
1
- import logging
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  from transformers import Pipeline
3
  import numpy as np
4
  import torch
 
1
+ import torch
2
+ import numpy as np
3
+ import random
4
+ import os
5
+
6
+ # 1. Set random seeds
7
+ seed = 2025
8
+ torch.manual_seed(seed)
9
+ torch.cuda.manual_seed_all(seed)
10
+ np.random.seed(seed)
11
+ random.seed(seed)
12
+ os.environ["PYTHONHASHSEED"] = str(seed)
13
+
14
+ # 2. Disable dropout & training randomness
15
+ torch.use_deterministic_algorithms(True, warn_only=True)
16
+ torch.backends.cudnn.deterministic = True
17
+
18
+ torch.backends.cudnn.benchmark = False
19
  from transformers import Pipeline
20
  import numpy as np
21
  import torch
modeling_stacked.py CHANGED
@@ -1,3 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from transformers.modeling_outputs import TokenClassifierOutput
2
  import torch
3
  import torch.nn as nn
@@ -98,8 +116,7 @@ class ExtendedMultitaskTimeModelForTokenClassification(PreTrainedModel):
98
  bert_kwargs.pop("head_mask", None)
99
 
100
  outputs = self.model(**bert_kwargs)
101
- # token_output = self.dropout(outputs[0]) # (B, T, H)
102
- token_output = outputs[0] # (B, T, H)
103
  hidden_states = list(outputs.hidden_states) if output_hidden_states else None
104
 
105
  # Pass through additional transformer layers
 
1
+ import torch
2
+ import numpy as np
3
+ import random
4
+ import os
5
+
6
+ # 1. Set random seeds
7
+ seed = 2025
8
+ torch.manual_seed(seed)
9
+ torch.cuda.manual_seed_all(seed)
10
+ np.random.seed(seed)
11
+ random.seed(seed)
12
+ os.environ["PYTHONHASHSEED"] = str(seed)
13
+
14
+ # 2. Disable dropout & training randomness
15
+ torch.use_deterministic_algorithms(True, warn_only=True)
16
+ torch.backends.cudnn.deterministic = True
17
+ torch.backends.cudnn.benchmark = False
18
+
19
  from transformers.modeling_outputs import TokenClassifierOutput
20
  import torch
21
  import torch.nn as nn
 
116
  bert_kwargs.pop("head_mask", None)
117
 
118
  outputs = self.model(**bert_kwargs)
119
+ token_output = self.dropout(outputs[0]) # (B, T, H)
 
120
  hidden_states = list(outputs.hidden_states) if output_hidden_states else None
121
 
122
  # Pass through additional transformer layers