Feliks Zaslavskiy commited on
Commit
30b8f71
1 Parent(s): 0c1e501

small updates

Browse files
Files changed (3) hide show
  1. app.py +1 -1
  2. quick_evaluate.py +1 -1
  3. train.py +2 -2
app.py CHANGED
@@ -14,7 +14,7 @@ from io import BytesIO
14
  #model = AlbertModel.from_pretrained('albert-' + model_size + '-v2')
15
 
16
  # For baseline 'sentence-transformers/paraphrase-albert-base-v2'
17
- model_name = 'output/training_OnlineConstrativeLoss-2023-03-14_00-40-03'
18
 
19
  similarity_threshold = 0.9
20
 
 
14
  #model = AlbertModel.from_pretrained('albert-' + model_size + '-v2')
15
 
16
  # For baseline 'sentence-transformers/paraphrase-albert-base-v2'
17
+ model_name = 'output/training_OnlineConstrativeLoss-2023-03-14_01-24-44'
18
 
19
  similarity_threshold = 0.9
20
 
quick_evaluate.py CHANGED
@@ -12,7 +12,7 @@ from sentence_transformers import SentenceTransformer
12
  model_name = 'output/training_OnlineConstrativeLoss-2023-03-10_11-17-15'
13
  model_name = 'output/training_OnlineConstrativeLoss-2023-03-11_00-24-35'
14
  model_name = 'output/training_OnlineConstrativeLoss-2023-03-11_01-00-19'
15
- model_name='output/training_OnlineConstrativeLoss-2023-03-12_00-42-41'
16
  model_sbert = SentenceTransformer(model_name)
17
 
18
  def get_sbert_embedding(input_text):
 
12
  model_name = 'output/training_OnlineConstrativeLoss-2023-03-10_11-17-15'
13
  model_name = 'output/training_OnlineConstrativeLoss-2023-03-11_00-24-35'
14
  model_name = 'output/training_OnlineConstrativeLoss-2023-03-11_01-00-19'
15
+ model_name='output/training_OnlineConstrativeLoss-2023-03-14_01-24-44'
16
  model_sbert = SentenceTransformer(model_name)
17
 
18
  def get_sbert_embedding(input_text):
train.py CHANGED
@@ -25,8 +25,8 @@ logger = logging.getLogger(__name__)
25
 
26
  #As base model, we use DistilBERT-base that was pre-trained on NLI and STSb data
27
  model = SentenceTransformer('sentence-transformers/paraphrase-albert-base-v2')
28
- num_epochs = 10
29
- train_batch_size = 10
30
 
31
  #As distance metric, we use cosine distance (cosine_distance = 1-cosine_similarity)
32
  distance_metric = losses.SiameseDistanceMetric.COSINE_DISTANCE
 
25
 
26
  #As base model, we use DistilBERT-base that was pre-trained on NLI and STSb data
27
  model = SentenceTransformer('sentence-transformers/paraphrase-albert-base-v2')
28
+ num_epochs = 12
29
+ train_batch_size = 14
30
 
31
  #As distance metric, we use cosine distance (cosine_distance = 1-cosine_similarity)
32
  distance_metric = losses.SiameseDistanceMetric.COSINE_DISTANCE