anokas's picture
Add new SentenceTransformer model.
77cbd9a verified
metadata
base_model: sentence-transformers/all-mpnet-base-v2
datasets: []
language: []
library_name: sentence-transformers
metrics:
  - cosine_accuracy
  - cosine_accuracy_threshold
  - cosine_f1
  - cosine_f1_threshold
  - cosine_precision
  - cosine_recall
  - cosine_ap
  - dot_accuracy
  - dot_accuracy_threshold
  - dot_f1
  - dot_f1_threshold
  - dot_precision
  - dot_recall
  - dot_ap
  - manhattan_accuracy
  - manhattan_accuracy_threshold
  - manhattan_f1
  - manhattan_f1_threshold
  - manhattan_precision
  - manhattan_recall
  - manhattan_ap
  - euclidean_accuracy
  - euclidean_accuracy_threshold
  - euclidean_f1
  - euclidean_f1_threshold
  - euclidean_precision
  - euclidean_recall
  - euclidean_ap
  - max_accuracy
  - max_accuracy_threshold
  - max_f1
  - max_f1_threshold
  - max_precision
  - max_recall
  - max_ap
pipeline_tag: sentence-similarity
tags:
  - sentence-transformers
  - sentence-similarity
  - feature-extraction
  - generated_from_trainer
  - dataset_size:645861
  - loss:ContrastiveLoss
widget:
  - source_sentence: There was an Eye OS alert.
    sentences:
      - i see lots of tubes
      - On the door is lima mike zero twenty three north exit
      - EyeOS, that’s some kind of tech, right
  - source_sentence: how to use
    sentences:
      - how do i use it
      - This fallen panel might lead to the control room.
      - The rings appear to be completely unmoving now.
  - source_sentence: I'm unsure about this room's name how do I find out?
    sentences:
      - How do I identify the room I'm in without any obvious signs?
      - The door shows l m zero twenty three north exit
      - it reads Cryochamber Medical Support Systems
  - source_sentence: i see Cryochamber Atmospheric Sealing
    sentences:
      - Can you guide me on how to identify this room?
      - it's Laboratory Chemical Storage
      - it reads Cryochamber Atmospheric Sealing
  - source_sentence: floating up
    sentences:
      - All indicators are blue.
      - i can see an interface
      - Found a narrow corridor leading somewhere.
model-index:
  - name: SentenceTransformer based on sentence-transformers/all-mpnet-base-v2
    results:
      - task:
          type: binary-classification
          name: Binary Classification
        dataset:
          name: sts dev
          type: sts-dev
        metrics:
          - type: cosine_accuracy
            value: 0.9002097965885251
            name: Cosine Accuracy
          - type: cosine_accuracy_threshold
            value: 0.4494956135749817
            name: Cosine Accuracy Threshold
          - type: cosine_f1
            value: 0.8908462575859745
            name: Cosine F1
          - type: cosine_f1_threshold
            value: 0.41577932238578796
            name: Cosine F1 Threshold
          - type: cosine_precision
            value: 0.8739044154126013
            name: Cosine Precision
          - type: cosine_recall
            value: 0.908457968024755
            name: Cosine Recall
          - type: cosine_ap
            value: 0.9618224590785398
            name: Cosine Ap
          - type: dot_accuracy
            value: 0.9002097965885251
            name: Dot Accuracy
          - type: dot_accuracy_threshold
            value: 0.4494956135749817
            name: Dot Accuracy Threshold
          - type: dot_f1
            value: 0.8908462575859745
            name: Dot F1
          - type: dot_f1_threshold
            value: 0.4157792925834656
            name: Dot F1 Threshold
          - type: dot_precision
            value: 0.8739044154126013
            name: Dot Precision
          - type: dot_recall
            value: 0.908457968024755
            name: Dot Recall
          - type: dot_ap
            value: 0.961822458350164
            name: Dot Ap
          - type: manhattan_accuracy
            value: 0.8989979280958028
            name: Manhattan Accuracy
          - type: manhattan_accuracy_threshold
            value: 22.644113540649414
            name: Manhattan Accuracy Threshold
          - type: manhattan_f1
            value: 0.8901100449479366
            name: Manhattan F1
          - type: manhattan_f1_threshold
            value: 23.330610275268555
            name: Manhattan F1 Threshold
          - type: manhattan_precision
            value: 0.8757104438714686
            name: Manhattan Precision
          - type: manhattan_recall
            value: 0.9049911179875079
            name: Manhattan Recall
          - type: manhattan_ap
            value: 0.9615309074220045
            name: Manhattan Ap
          - type: euclidean_accuracy
            value: 0.9002097965885251
            name: Euclidean Accuracy
          - type: euclidean_accuracy_threshold
            value: 1.0492897033691406
            name: Euclidean Accuracy Threshold
          - type: euclidean_f1
            value: 0.8908462575859745
            name: Euclidean F1
          - type: euclidean_f1_threshold
            value: 1.080944538116455
            name: Euclidean F1 Threshold
          - type: euclidean_precision
            value: 0.8739044154126013
            name: Euclidean Precision
          - type: euclidean_recall
            value: 0.908457968024755
            name: Euclidean Recall
          - type: euclidean_ap
            value: 0.9618224553002042
            name: Euclidean Ap
          - type: max_accuracy
            value: 0.9002097965885251
            name: Max Accuracy
          - type: max_accuracy_threshold
            value: 22.644113540649414
            name: Max Accuracy Threshold
          - type: max_f1
            value: 0.8908462575859745
            name: Max F1
          - type: max_f1_threshold
            value: 23.330610275268555
            name: Max F1 Threshold
          - type: max_precision
            value: 0.8757104438714686
            name: Max Precision
          - type: max_recall
            value: 0.908457968024755
            name: Max Recall
          - type: max_ap
            value: 0.9618224590785398
            name: Max Ap

SentenceTransformer based on sentence-transformers/all-mpnet-base-v2

This is a sentence-transformers model finetuned from sentence-transformers/all-mpnet-base-v2. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.

Model Details

Model Description

  • Model Type: Sentence Transformer
  • Base model: sentence-transformers/all-mpnet-base-v2
  • Maximum Sequence Length: 384 tokens
  • Output Dimensionality: 768 tokens
  • Similarity Function: Cosine Similarity

Model Sources

Full Model Architecture

SentenceTransformer(
  (0): Transformer({'max_seq_length': 384, 'do_lower_case': False}) with Transformer model: MPNetModel 
  (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
  (2): Normalize()
)

Usage

Direct Usage (Sentence Transformers)

First install the Sentence Transformers library:

pip install -U sentence-transformers

Then you can load this model and run inference.

from sentence_transformers import SentenceTransformer

# Download from the 🤗 Hub
model = SentenceTransformer("IconicAI/all-mpnet-base-v2-anteater")
# Run inference
sentences = [
    'floating up',
    'i can see an interface',
    'All indicators are blue.',
]
embeddings = model.encode(sentences)
print(embeddings.shape)
# [3, 768]

# Get the similarity scores for the embeddings
similarities = model.similarity(embeddings, embeddings)
print(similarities.shape)
# [3, 3]

Evaluation

Metrics

Binary Classification

Metric Value
cosine_accuracy 0.9002
cosine_accuracy_threshold 0.4495
cosine_f1 0.8908
cosine_f1_threshold 0.4158
cosine_precision 0.8739
cosine_recall 0.9085
cosine_ap 0.9618
dot_accuracy 0.9002
dot_accuracy_threshold 0.4495
dot_f1 0.8908
dot_f1_threshold 0.4158
dot_precision 0.8739
dot_recall 0.9085
dot_ap 0.9618
manhattan_accuracy 0.899
manhattan_accuracy_threshold 22.6441
manhattan_f1 0.8901
manhattan_f1_threshold 23.3306
manhattan_precision 0.8757
manhattan_recall 0.905
manhattan_ap 0.9615
euclidean_accuracy 0.9002
euclidean_accuracy_threshold 1.0493
euclidean_f1 0.8908
euclidean_f1_threshold 1.0809
euclidean_precision 0.8739
euclidean_recall 0.9085
euclidean_ap 0.9618
max_accuracy 0.9002
max_accuracy_threshold 22.6441
max_f1 0.8908
max_f1_threshold 23.3306
max_precision 0.8757
max_recall 0.9085
max_ap 0.9618

Training Details

Training Dataset

Unnamed Dataset

  • Size: 645,861 training samples
  • Columns: example1, example2, and label
  • Approximate statistics based on the first 1000 samples:
    example1 example2 label
    type string string int
    details
    • min: 3 tokens
    • mean: 9.02 tokens
    • max: 25 tokens
    • min: 3 tokens
    • mean: 9.19 tokens
    • max: 23 tokens
    • 1: 100.00%
  • Samples:
    example1 example2 label
    Drones are present all around here. What are those drones doing buzzing around here? 1
    am i the only one am i the only one alive on this ship 1
    I’m in a room with a door in front of me and a terminal on the wall mechanics room 1
  • Loss: ContrastiveLoss with these parameters:
    {
        "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
        "margin": 1.0,
        "size_average": true
    }
    

Evaluation Dataset

Unnamed Dataset

  • Size: 76,741 evaluation samples
  • Columns: example1, example2, and label
  • Approximate statistics based on the first 1000 samples:
    example1 example2 label
    type string string int
    details
    • min: 3 tokens
    • mean: 9.25 tokens
    • max: 21 tokens
    • min: 3 tokens
    • mean: 9.15 tokens
    • max: 19 tokens
    • 1: 100.00%
  • Samples:
    example1 example2 label
    Not much, how about you? Nothing, you? 1
    Rings stopped moving. I notice the rings are not spinning anymore. 1
    it's Laboratory Chemical Storage the switch is Laboratory Chemical Storage 1
  • Loss: ContrastiveLoss with these parameters:
    {
        "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
        "margin": 1.0,
        "size_average": true
    }
    

Training Hyperparameters

Non-Default Hyperparameters

  • eval_strategy: steps
  • per_device_train_batch_size: 256
  • per_device_eval_batch_size: 256
  • learning_rate: 1e-07
  • weight_decay: 0.01
  • max_grad_norm: 0.02
  • num_train_epochs: 5
  • warmup_steps: 100
  • bf16: True
  • eval_on_start: True

All Hyperparameters

Click to expand
  • overwrite_output_dir: False
  • do_predict: False
  • eval_strategy: steps
  • prediction_loss_only: True
  • per_device_train_batch_size: 256
  • per_device_eval_batch_size: 256
  • per_gpu_train_batch_size: None
  • per_gpu_eval_batch_size: None
  • gradient_accumulation_steps: 1
  • eval_accumulation_steps: None
  • torch_empty_cache_steps: None
  • learning_rate: 1e-07
  • weight_decay: 0.01
  • adam_beta1: 0.9
  • adam_beta2: 0.999
  • adam_epsilon: 1e-08
  • max_grad_norm: 0.02
  • num_train_epochs: 5
  • max_steps: -1
  • lr_scheduler_type: linear
  • lr_scheduler_kwargs: {}
  • warmup_ratio: 0.0
  • warmup_steps: 100
  • log_level: passive
  • log_level_replica: warning
  • log_on_each_node: True
  • logging_nan_inf_filter: True
  • save_safetensors: True
  • save_on_each_node: False
  • save_only_model: False
  • restore_callback_states_from_checkpoint: False
  • no_cuda: False
  • use_cpu: False
  • use_mps_device: False
  • seed: 42
  • data_seed: None
  • jit_mode_eval: False
  • use_ipex: False
  • bf16: True
  • fp16: False
  • fp16_opt_level: O1
  • half_precision_backend: auto
  • bf16_full_eval: False
  • fp16_full_eval: False
  • tf32: None
  • local_rank: 0
  • ddp_backend: None
  • tpu_num_cores: None
  • tpu_metrics_debug: False
  • debug: []
  • dataloader_drop_last: False
  • dataloader_num_workers: 0
  • dataloader_prefetch_factor: None
  • past_index: -1
  • disable_tqdm: False
  • remove_unused_columns: True
  • label_names: None
  • load_best_model_at_end: False
  • ignore_data_skip: False
  • fsdp: []
  • fsdp_min_num_params: 0
  • fsdp_config: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
  • fsdp_transformer_layer_cls_to_wrap: None
  • accelerator_config: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
  • deepspeed: None
  • label_smoothing_factor: 0.0
  • optim: adamw_torch
  • optim_args: None
  • adafactor: False
  • group_by_length: False
  • length_column_name: length
  • ddp_find_unused_parameters: None
  • ddp_bucket_cap_mb: None
  • ddp_broadcast_buffers: False
  • dataloader_pin_memory: True
  • dataloader_persistent_workers: False
  • skip_memory_metrics: True
  • use_legacy_prediction_loop: False
  • push_to_hub: False
  • resume_from_checkpoint: None
  • hub_model_id: None
  • hub_strategy: every_save
  • hub_private_repo: False
  • hub_always_push: False
  • gradient_checkpointing: False
  • gradient_checkpointing_kwargs: None
  • include_inputs_for_metrics: False
  • eval_do_concat_batches: True
  • fp16_backend: auto
  • push_to_hub_model_id: None
  • push_to_hub_organization: None
  • mp_parameters:
  • auto_find_batch_size: False
  • full_determinism: False
  • torchdynamo: None
  • ray_scope: last
  • ddp_timeout: 1800
  • torch_compile: False
  • torch_compile_backend: None
  • torch_compile_mode: None
  • dispatch_batches: None
  • split_batches: None
  • include_tokens_per_second: False
  • include_num_input_tokens_seen: False
  • neftune_noise_alpha: None
  • optim_target_modules: None
  • batch_eval_metrics: False
  • eval_on_start: True
  • use_liger_kernel: False
  • eval_use_gather_object: False
  • batch_sampler: batch_sampler
  • multi_dataset_batch_sampler: proportional

Training Logs

Click to expand
Epoch Step Training Loss loss sts-dev_max_ap
0 0 - 0.0764 0.9175
0.0040 10 0.0772 - -
0.0079 20 0.0783 - -
0.0119 30 0.0775 - -
0.0159 40 0.0756 - -
0.0198 50 0.075 - -
0.0238 60 0.0777 - -
0.0277 70 0.0784 - -
0.0317 80 0.0721 - -
0.0357 90 0.0755 - -
0.0396 100 0.0778 - -
0.0436 110 0.0735 - -
0.0476 120 0.0753 - -
0.0515 130 0.0741 - -
0.0555 140 0.0791 - -
0.0595 150 0.0753 - -
0.0634 160 0.0748 - -
0.0674 170 0.0709 - -
0.0713 180 0.0738 - -
0.0753 190 0.0759 - -
0.0793 200 0.0703 - -
0.0832 210 0.0724 - -
0.0872 220 0.0726 - -
0.0912 230 0.0734 - -
0.0951 240 0.0718 - -
0.0991 250 0.0776 - -
0.1031 260 0.0757 - -
0.1070 270 0.0722 - -
0.1110 280 0.0746 - -
0.1149 290 0.0718 - -
0.1189 300 0.0733 - -
0.1229 310 0.0725 - -
0.1268 320 0.0724 - -
0.1308 330 0.0681 - -
0.1348 340 0.0735 - -
0.1387 350 0.0716 - -
0.1427 360 0.0698 - -
0.1467 370 0.072 - -
0.1506 380 0.071 - -
0.1546 390 0.0713 - -
0.1585 400 0.073 - -
0.1625 410 0.077 - -
0.1665 420 0.072 - -
0.1704 430 0.0689 - -
0.1744 440 0.0708 - -
0.1784 450 0.0687 - -
0.1823 460 0.0692 - -
0.1863 470 0.0715 - -
0.1902 480 0.0707 - -
0.1942 490 0.0671 - -
0.1982 500 0.0741 0.0703 0.9245
0.2021 510 0.0681 - -
0.2061 520 0.0749 - -
0.2101 530 0.0718 - -
0.2140 540 0.0689 - -
0.2180 550 0.0733 - -
0.2220 560 0.067 - -
0.2259 570 0.0685 - -
0.2299 580 0.07 - -
0.2338 590 0.0683 - -
0.2378 600 0.0693 - -
0.2418 610 0.0705 - -
0.2457 620 0.0707 - -
0.2497 630 0.0703 - -
0.2537 640 0.068 - -
0.2576 650 0.0682 - -
0.2616 660 0.0654 - -
0.2656 670 0.0682 - -
0.2695 680 0.0698 - -
0.2735 690 0.0701 - -
0.2774 700 0.0674 - -
0.2814 710 0.0669 - -
0.2854 720 0.0677 - -
0.2893 730 0.0674 - -
0.2933 740 0.0682 - -
0.2973 750 0.0677 - -
0.3012 760 0.0661 - -
0.3052 770 0.0634 - -
0.3092 780 0.0658 - -
0.3131 790 0.0687 - -
0.3171 800 0.069 - -
0.3210 810 0.0665 - -
0.3250 820 0.0648 - -
0.3290 830 0.0656 - -
0.3329 840 0.0672 - -
0.3369 850 0.0663 - -
0.3409 860 0.0666 - -
0.3448 870 0.0644 - -
0.3488 880 0.065 - -
0.3528 890 0.0666 - -
0.3567 900 0.0657 - -
0.3607 910 0.0636 - -
0.3646 920 0.0681 - -
0.3686 930 0.0671 - -
0.3726 940 0.0653 - -
0.3765 950 0.0643 - -
0.3805 960 0.0637 - -
0.3845 970 0.066 - -
0.3884 980 0.0645 - -
0.3924 990 0.0628 - -
0.3964 1000 0.0627 0.0653 0.9325
0.4003 1010 0.0647 - -
0.4043 1020 0.0649 - -
0.4082 1030 0.0637 - -
0.4122 1040 0.0648 - -
0.4162 1050 0.0647 - -
0.4201 1060 0.0646 - -
0.4241 1070 0.0659 - -
0.4281 1080 0.0641 - -
0.4320 1090 0.0609 - -
0.4360 1100 0.0642 - -
0.4400 1110 0.0614 - -
0.4439 1120 0.0603 - -
0.4479 1130 0.0613 - -
0.4518 1140 0.0646 - -
0.4558 1150 0.0619 - -
0.4598 1160 0.0611 - -
0.4637 1170 0.0638 - -
0.4677 1180 0.0636 - -
0.4717 1190 0.0647 - -
0.4756 1200 0.0622 - -
0.4796 1210 0.0642 - -
0.4836 1220 0.0607 - -
0.4875 1230 0.0623 - -
0.4915 1240 0.0614 - -
0.4954 1250 0.0643 - -
0.4994 1260 0.0614 - -
0.5034 1270 0.0599 - -
0.5073 1280 0.0615 - -
0.5113 1290 0.0595 - -
0.5153 1300 0.061 - -
0.5192 1310 0.0623 - -
0.5232 1320 0.0646 - -
0.5272 1330 0.0621 - -
0.5311 1340 0.0606 - -
0.5351 1350 0.0597 - -
0.5390 1360 0.0621 - -
0.5430 1370 0.0586 - -
0.5470 1380 0.0618 - -
0.5509 1390 0.0601 - -
0.5549 1400 0.0578 - -
0.5589 1410 0.0628 - -
0.5628 1420 0.0595 - -
0.5668 1430 0.0576 - -
0.5707 1440 0.0606 - -
0.5747 1450 0.0618 - -
0.5787 1460 0.0591 - -
0.5826 1470 0.0598 - -
0.5866 1480 0.0611 - -
0.5906 1490 0.0594 - -
0.5945 1500 0.0616 0.0619 0.9393
0.5985 1510 0.0592 - -
0.6025 1520 0.0597 - -
0.6064 1530 0.0619 - -
0.6104 1540 0.0595 - -
0.6143 1550 0.0598 - -
0.6183 1560 0.0609 - -
0.6223 1570 0.059 - -
0.6262 1580 0.0601 - -
0.6302 1590 0.0595 - -
0.6342 1600 0.059 - -
0.6381 1610 0.0606 - -
0.6421 1620 0.0591 - -
0.6461 1630 0.0617 - -
0.6500 1640 0.0592 - -
0.6540 1650 0.0588 - -
0.6579 1660 0.0587 - -
0.6619 1670 0.0585 - -
0.6659 1680 0.0558 - -
0.6698 1690 0.057 - -
0.6738 1700 0.0598 - -
0.6778 1710 0.0567 - -
0.6817 1720 0.0555 - -
0.6857 1730 0.0604 - -
0.6897 1740 0.0558 - -
0.6936 1750 0.0572 - -
0.6976 1760 0.0577 - -
0.7015 1770 0.0587 - -
0.7055 1780 0.0589 - -
0.7095 1790 0.0598 - -
0.7134 1800 0.0583 - -
0.7174 1810 0.058 - -
0.7214 1820 0.0564 - -
0.7253 1830 0.0589 - -
0.7293 1840 0.0557 - -
0.7333 1850 0.0586 - -
0.7372 1860 0.0601 - -
0.7412 1870 0.0556 - -
0.7451 1880 0.0572 - -
0.7491 1890 0.0574 - -
0.7531 1900 0.0583 - -
0.7570 1910 0.0573 - -
0.7610 1920 0.0555 - -
0.7650 1930 0.0561 - -
0.7689 1940 0.0579 - -
0.7729 1950 0.0557 - -
0.7769 1960 0.0558 - -
0.7808 1970 0.0589 - -
0.7848 1980 0.0572 - -
0.7887 1990 0.0572 - -
0.7927 2000 0.0549 0.0592 0.9444
0.7967 2010 0.0548 - -
0.8006 2020 0.0569 - -
0.8046 2030 0.058 - -
0.8086 2040 0.0581 - -
0.8125 2050 0.0585 - -
0.8165 2060 0.0542 - -
0.8205 2070 0.0558 - -
0.8244 2080 0.0569 - -
0.8284 2090 0.0564 - -
0.8323 2100 0.0552 - -
0.8363 2110 0.0559 - -
0.8403 2120 0.0534 - -
0.8442 2130 0.0543 - -
0.8482 2140 0.0573 - -
0.8522 2150 0.0546 - -
0.8561 2160 0.0554 - -
0.8601 2170 0.0568 - -
0.8641 2180 0.0544 - -
0.8680 2190 0.0547 - -
0.8720 2200 0.0549 - -
0.8759 2210 0.0544 - -
0.8799 2220 0.058 - -
0.8839 2230 0.0557 - -
0.8878 2240 0.0551 - -
0.8918 2250 0.0558 - -
0.8958 2260 0.0554 - -
0.8997 2270 0.053 - -
0.9037 2280 0.0552 - -
0.9076 2290 0.0549 - -
0.9116 2300 0.0533 - -
0.9156 2310 0.0543 - -
0.9195 2320 0.0531 - -
0.9235 2330 0.0553 - -
0.9275 2340 0.0542 - -
0.9314 2350 0.0537 - -
0.9354 2360 0.0536 - -
0.9394 2370 0.055 - -
0.9433 2380 0.0551 - -
0.9473 2390 0.0532 - -
0.9512 2400 0.0556 - -
0.9552 2410 0.0548 - -
0.9592 2420 0.0533 - -
0.9631 2430 0.0536 - -
0.9671 2440 0.0549 - -
0.9711 2450 0.0548 - -
0.9750 2460 0.0557 - -
0.9790 2470 0.055 - -
0.9830 2480 0.0535 - -
0.9869 2490 0.0564 - -
0.9909 2500 0.0526 0.0572 0.9482
0.9948 2510 0.0547 - -
0.9988 2520 0.054 - -
1.0028 2530 0.0527 - -
1.0067 2540 0.0522 - -
1.0107 2550 0.0535 - -
1.0147 2560 0.0557 - -
1.0186 2570 0.052 - -
1.0226 2580 0.055 - -
1.0266 2590 0.0542 - -
1.0305 2600 0.0539 - -
1.0345 2610 0.0523 - -
1.0384 2620 0.0507 - -
1.0424 2630 0.0517 - -
1.0464 2640 0.0543 - -
1.0503 2650 0.0543 - -
1.0543 2660 0.054 - -
1.0583 2670 0.0536 - -
1.0622 2680 0.0531 - -
1.0662 2690 0.0537 - -
1.0702 2700 0.0521 - -
1.0741 2710 0.054 - -
1.0781 2720 0.0513 - -
1.0820 2730 0.0496 - -
1.0860 2740 0.0519 - -
1.0900 2750 0.0529 - -
1.0939 2760 0.0542 - -
1.0979 2770 0.0526 - -
1.1019 2780 0.051 - -
1.1058 2790 0.0531 - -
1.1098 2800 0.0539 - -
1.1138 2810 0.0521 - -
1.1177 2820 0.0539 - -
1.1217 2830 0.0505 - -
1.1256 2840 0.0513 - -
1.1296 2850 0.0521 - -
1.1336 2860 0.0537 - -
1.1375 2870 0.0514 - -
1.1415 2880 0.0511 - -
1.1455 2890 0.0495 - -
1.1494 2900 0.0505 - -
1.1534 2910 0.0517 - -
1.1574 2920 0.0509 - -
1.1613 2930 0.0507 - -
1.1653 2940 0.0535 - -
1.1692 2950 0.0511 - -
1.1732 2960 0.0507 - -
1.1772 2970 0.052 - -
1.1811 2980 0.0494 - -
1.1851 2990 0.0524 - -
1.1891 3000 0.052 0.0555 0.9512
1.1930 3010 0.0536 - -
1.1970 3020 0.0502 - -
1.2010 3030 0.0504 - -
1.2049 3040 0.0532 - -
1.2089 3050 0.0529 - -
1.2128 3060 0.0514 - -
1.2168 3070 0.0504 - -
1.2208 3080 0.0501 - -
1.2247 3090 0.0493 - -
1.2287 3100 0.0507 - -
1.2327 3110 0.0501 - -
1.2366 3120 0.0502 - -
1.2406 3130 0.0491 - -
1.2446 3140 0.0495 - -
1.2485 3150 0.051 - -
1.2525 3160 0.0495 - -
1.2564 3170 0.0534 - -
1.2604 3180 0.0483 - -
1.2644 3190 0.049 - -
1.2683 3200 0.0532 - -
1.2723 3210 0.0481 - -
1.2763 3220 0.0496 - -
1.2802 3230 0.0504 - -
1.2842 3240 0.0477 - -
1.2881 3250 0.0483 - -
1.2921 3260 0.0493 - -
1.2961 3270 0.0491 - -
1.3000 3280 0.0489 - -
1.3040 3290 0.0493 - -
1.3080 3300 0.0507 - -
1.3119 3310 0.0482 - -
1.3159 3320 0.0506 - -
1.3199 3330 0.0486 - -
1.3238 3340 0.0487 - -
1.3278 3350 0.0482 - -
1.3317 3360 0.0492 - -
1.3357 3370 0.049 - -
1.3397 3380 0.0485 - -
1.3436 3390 0.0501 - -
1.3476 3400 0.0505 - -
1.3516 3410 0.0508 - -
1.3555 3420 0.0481 - -
1.3595 3430 0.049 - -
1.3635 3440 0.0495 - -
1.3674 3450 0.0507 - -
1.3714 3460 0.0478 - -
1.3753 3470 0.0522 - -
1.3793 3480 0.0505 - -
1.3833 3490 0.0489 - -
1.3872 3500 0.0504 0.0541 0.9537
1.3912 3510 0.0492 - -
1.3952 3520 0.0469 - -
1.3991 3530 0.0495 - -
1.4031 3540 0.0486 - -
1.4071 3550 0.0506 - -
1.4110 3560 0.0506 - -
1.4150 3570 0.0475 - -
1.4189 3580 0.0483 - -
1.4229 3590 0.0471 - -
1.4269 3600 0.0477 - -
1.4308 3610 0.0494 - -
1.4348 3620 0.0481 - -
1.4388 3630 0.0484 - -
1.4427 3640 0.0505 - -
1.4467 3650 0.0498 - -
1.4507 3660 0.0482 - -
1.4546 3670 0.0488 - -
1.4586 3680 0.0458 - -
1.4625 3690 0.0479 - -
1.4665 3700 0.0474 - -
1.4705 3710 0.0471 - -
1.4744 3720 0.0498 - -
1.4784 3730 0.0495 - -
1.4824 3740 0.0505 - -
1.4863 3750 0.0487 - -
1.4903 3760 0.0485 - -
1.4943 3770 0.0479 - -
1.4982 3780 0.0475 - -
1.5022 3790 0.0462 - -
1.5061 3800 0.0487 - -
1.5101 3810 0.0476 - -
1.5141 3820 0.0485 - -
1.5180 3830 0.0489 - -
1.5220 3840 0.0475 - -
1.5260 3850 0.0484 - -
1.5299 3860 0.0465 - -
1.5339 3870 0.0491 - -
1.5379 3880 0.0477 - -
1.5418 3890 0.0475 - -
1.5458 3900 0.0489 - -
1.5497 3910 0.0459 - -
1.5537 3920 0.0488 - -
1.5577 3930 0.0475 - -
1.5616 3940 0.049 - -
1.5656 3950 0.0469 - -
1.5696 3960 0.0493 - -
1.5735 3970 0.0481 - -
1.5775 3980 0.0478 - -
1.5815 3990 0.0456 - -
1.5854 4000 0.047 0.0528 0.9556
1.5894 4010 0.0481 - -
1.5933 4020 0.0468 - -
1.5973 4030 0.0467 - -
1.6013 4040 0.0448 - -
1.6052 4050 0.0491 - -
1.6092 4060 0.0476 - -
1.6132 4070 0.0459 - -
1.6171 4080 0.0456 - -
1.6211 4090 0.0476 - -
1.6250 4100 0.0443 - -
1.6290 4110 0.0477 - -
1.6330 4120 0.0476 - -
1.6369 4130 0.0466 - -
1.6409 4140 0.0457 - -
1.6449 4150 0.0468 - -
1.6488 4160 0.0462 - -
1.6528 4170 0.0476 - -
1.6568 4180 0.0464 - -
1.6607 4190 0.0467 - -
1.6647 4200 0.0455 - -
1.6686 4210 0.0455 - -
1.6726 4220 0.0474 - -
1.6766 4230 0.0469 - -
1.6805 4240 0.0453 - -
1.6845 4250 0.0464 - -
1.6885 4260 0.0448 - -
1.6924 4270 0.0448 - -
1.6964 4280 0.0461 - -
1.7004 4290 0.0444 - -
1.7043 4300 0.045 - -
1.7083 4310 0.047 - -
1.7122 4320 0.0473 - -
1.7162 4330 0.0453 - -
1.7202 4340 0.0461 - -
1.7241 4350 0.0464 - -
1.7281 4360 0.0474 - -
1.7321 4370 0.0444 - -
1.7360 4380 0.0465 - -
1.7400 4390 0.0454 - -
1.7440 4400 0.045 - -
1.7479 4410 0.0444 - -
1.7519 4420 0.0451 - -
1.7558 4430 0.0454 - -
1.7598 4440 0.0471 - -
1.7638 4450 0.0467 - -
1.7677 4460 0.0466 - -
1.7717 4470 0.0452 - -
1.7757 4480 0.0466 - -
1.7796 4490 0.046 - -
1.7836 4500 0.0462 0.0518 0.9570
1.7876 4510 0.0459 - -
1.7915 4520 0.0455 - -
1.7955 4530 0.0456 - -
1.7994 4540 0.0476 - -
1.8034 4550 0.0465 - -
1.8074 4560 0.0447 - -
1.8113 4570 0.0438 - -
1.8153 4580 0.0463 - -
1.8193 4590 0.0452 - -
1.8232 4600 0.0454 - -
1.8272 4610 0.0459 - -
1.8312 4620 0.044 - -
1.8351 4630 0.0445 - -
1.8391 4640 0.0435 - -
1.8430 4650 0.0435 - -
1.8470 4660 0.0442 - -
1.8510 4670 0.0424 - -
1.8549 4680 0.0438 - -
1.8589 4690 0.0451 - -
1.8629 4700 0.0451 - -
1.8668 4710 0.0455 - -
1.8708 4720 0.0441 - -
1.8748 4730 0.0432 - -
1.8787 4740 0.0445 - -
1.8827 4750 0.0482 - -
1.8866 4760 0.045 - -
1.8906 4770 0.0443 - -
1.8946 4780 0.0451 - -
1.8985 4790 0.0446 - -
1.9025 4800 0.0432 - -
1.9065 4810 0.0432 - -
1.9104 4820 0.0465 - -
1.9144 4830 0.0462 - -
1.9184 4840 0.0443 - -
1.9223 4850 0.0447 - -
1.9263 4860 0.0459 - -
1.9302 4870 0.043 - -
1.9342 4880 0.0456 - -
1.9382 4890 0.0444 - -
1.9421 4900 0.0455 - -
1.9461 4910 0.0427 - -
1.9501 4920 0.0461 - -
1.9540 4930 0.0454 - -
1.9580 4940 0.0447 - -
1.9620 4950 0.0434 - -
1.9659 4960 0.0444 - -
1.9699 4970 0.0451 - -
1.9738 4980 0.044 - -
1.9778 4990 0.0444 - -
1.9818 5000 0.0439 0.0508 0.9581
1.9857 5010 0.0427 - -
1.9897 5020 0.0439 - -
1.9937 5030 0.0427 - -
1.9976 5040 0.0435 - -
2.0016 5050 0.0445 - -
2.0055 5060 0.0433 - -
2.0095 5070 0.0433 - -
2.0135 5080 0.0435 - -
2.0174 5090 0.0438 - -
2.0214 5100 0.0431 - -
2.0254 5110 0.0422 - -
2.0293 5120 0.0436 - -
2.0333 5130 0.0455 - -
2.0373 5140 0.044 - -
2.0412 5150 0.0423 - -
2.0452 5160 0.045 - -
2.0491 5170 0.0422 - -
2.0531 5180 0.0435 - -
2.0571 5190 0.0419 - -
2.0610 5200 0.0427 - -
2.0650 5210 0.0447 - -
2.0690 5220 0.0443 - -
2.0729 5230 0.0429 - -
2.0769 5240 0.0436 - -
2.0809 5250 0.0436 - -
2.0848 5260 0.0439 - -
2.0888 5270 0.0433 - -
2.0927 5280 0.0434 - -
2.0967 5290 0.0428 - -
2.1007 5300 0.0431 - -
2.1046 5310 0.0441 - -
2.1086 5320 0.0443 - -
2.1126 5330 0.0442 - -
2.1165 5340 0.044 - -
2.1205 5350 0.0431 - -
2.1245 5360 0.0432 - -
2.1284 5370 0.0421 - -
2.1324 5380 0.0439 - -
2.1363 5390 0.0436 - -
2.1403 5400 0.0428 - -
2.1443 5410 0.044 - -
2.1482 5420 0.0428 - -
2.1522 5430 0.0428 - -
2.1562 5440 0.0418 - -
2.1601 5450 0.0439 - -
2.1641 5460 0.0415 - -
2.1681 5470 0.0415 - -
2.1720 5480 0.0418 - -
2.1760 5490 0.042 - -
2.1799 5500 0.0418 0.0500 0.9591
2.1839 5510 0.0434 - -
2.1879 5520 0.0424 - -
2.1918 5530 0.0425 - -
2.1958 5540 0.0427 - -
2.1998 5550 0.0418 - -
2.2037 5560 0.04 - -
2.2077 5570 0.0426 - -
2.2117 5580 0.0413 - -
2.2156 5590 0.0429 - -
2.2196 5600 0.0428 - -
2.2235 5610 0.044 - -
2.2275 5620 0.0423 - -
2.2315 5630 0.0398 - -
2.2354 5640 0.0427 - -
2.2394 5650 0.0419 - -
2.2434 5660 0.0424 - -
2.2473 5670 0.0422 - -
2.2513 5680 0.0426 - -
2.2553 5690 0.0434 - -
2.2592 5700 0.044 - -
2.2632 5710 0.0427 - -
2.2671 5720 0.0431 - -
2.2711 5730 0.0416 - -
2.2751 5740 0.0428 - -
2.2790 5750 0.0418 - -
2.2830 5760 0.0418 - -
2.2870 5770 0.0421 - -
2.2909 5780 0.041 - -
2.2949 5790 0.0419 - -
2.2989 5800 0.0422 - -
2.3028 5810 0.0428 - -
2.3068 5820 0.0432 - -
2.3107 5830 0.043 - -
2.3147 5840 0.0424 - -
2.3187 5850 0.0396 - -
2.3226 5860 0.0433 - -
2.3266 5870 0.0413 - -
2.3306 5880 0.0436 - -
2.3345 5890 0.0399 - -
2.3385 5900 0.0426 - -
2.3424 5910 0.0405 - -
2.3464 5920 0.0423 - -
2.3504 5930 0.0409 - -
2.3543 5940 0.0412 - -
2.3583 5950 0.0401 - -
2.3623 5960 0.042 - -
2.3662 5970 0.0397 - -
2.3702 5980 0.0422 - -
2.3742 5990 0.0416 - -
2.3781 6000 0.0422 0.0493 0.9599
2.3821 6010 0.041 - -
2.3860 6020 0.0404 - -
2.3900 6030 0.0404 - -
2.3940 6040 0.0412 - -
2.3979 6050 0.0424 - -
2.4019 6060 0.043 - -
2.4059 6070 0.0416 - -
2.4098 6080 0.0405 - -
2.4138 6090 0.0408 - -
2.4178 6100 0.0413 - -
2.4217 6110 0.0408 - -
2.4257 6120 0.0407 - -
2.4296 6130 0.041 - -
2.4336 6140 0.0387 - -
2.4376 6150 0.0408 - -
2.4415 6160 0.0413 - -
2.4455 6170 0.0429 - -
2.4495 6180 0.0394 - -
2.4534 6190 0.041 - -
2.4574 6200 0.0419 - -
2.4614 6210 0.0395 - -
2.4653 6220 0.0405 - -
2.4693 6230 0.0412 - -
2.4732 6240 0.0439 - -
2.4772 6250 0.0423 - -
2.4812 6260 0.0423 - -
2.4851 6270 0.0406 - -
2.4891 6280 0.0402 - -
2.4931 6290 0.0428 - -
2.4970 6300 0.0422 - -
2.5010 6310 0.0399 - -
2.5050 6320 0.0409 - -
2.5089 6330 0.0412 - -
2.5129 6340 0.0403 - -
2.5168 6350 0.04 - -
2.5208 6360 0.0412 - -
2.5248 6370 0.0424 - -
2.5287 6380 0.0409 - -
2.5327 6390 0.0409 - -
2.5367 6400 0.0418 - -
2.5406 6410 0.0403 - -
2.5446 6420 0.0413 - -
2.5486 6430 0.038 - -
2.5525 6440 0.0414 - -
2.5565 6450 0.0409 - -
2.5604 6460 0.0407 - -
2.5644 6470 0.0406 - -
2.5684 6480 0.0392 - -
2.5723 6490 0.0417 - -
2.5763 6500 0.0391 0.0487 0.9605
2.5803 6510 0.039 - -
2.5842 6520 0.0414 - -
2.5882 6530 0.0411 - -
2.5922 6540 0.0395 - -
2.5961 6550 0.0405 - -
2.6001 6560 0.0392 - -
2.6040 6570 0.041 - -
2.6080 6580 0.0387 - -
2.6120 6590 0.0409 - -
2.6159 6600 0.0416 - -
2.6199 6610 0.0399 - -
2.6239 6620 0.0395 - -
2.6278 6630 0.0416 - -
2.6318 6640 0.0397 - -
2.6358 6650 0.041 - -
2.6397 6660 0.0422 - -
2.6437 6670 0.0404 - -
2.6476 6680 0.0405 - -
2.6516 6690 0.0413 - -
2.6556 6700 0.0405 - -
2.6595 6710 0.04 - -
2.6635 6720 0.0383 - -
2.6675 6730 0.0412 - -
2.6714 6740 0.0416 - -
2.6754 6750 0.0405 - -
2.6793 6760 0.0423 - -
2.6833 6770 0.0419 - -
2.6873 6780 0.0405 - -
2.6912 6790 0.0409 - -
2.6952 6800 0.04 - -
2.6992 6810 0.0397 - -
2.7031 6820 0.039 - -
2.7071 6830 0.0393 - -
2.7111 6840 0.0413 - -
2.7150 6850 0.039 - -
2.7190 6860 0.04 - -
2.7229 6870 0.0409 - -
2.7269 6880 0.0403 - -
2.7309 6890 0.0397 - -
2.7348 6900 0.0404 - -
2.7388 6910 0.0396 - -
2.7428 6920 0.04 - -
2.7467 6930 0.0397 - -
2.7507 6940 0.0393 - -
2.7547 6950 0.037 - -
2.7586 6960 0.0383 - -
2.7626 6970 0.04 - -
2.7665 6980 0.0406 - -
2.7705 6990 0.0394 - -
2.7745 7000 0.0385 0.0482 0.9609
2.7784 7010 0.0383 - -
2.7824 7020 0.0403 - -
2.7864 7030 0.04 - -
2.7903 7040 0.0395 - -
2.7943 7050 0.039 - -
2.7983 7060 0.0398 - -
2.8022 7070 0.0401 - -
2.8062 7080 0.0401 - -
2.8101 7090 0.0395 - -
2.8141 7100 0.0396 - -
2.8181 7110 0.0395 - -
2.8220 7120 0.0411 - -
2.8260 7130 0.0386 - -
2.8300 7140 0.0382 - -
2.8339 7150 0.0386 - -
2.8379 7160 0.0389 - -
2.8419 7170 0.0396 - -
2.8458 7180 0.0394 - -
2.8498 7190 0.04 - -
2.8537 7200 0.0401 - -
2.8577 7210 0.0412 - -
2.8617 7220 0.0383 - -
2.8656 7230 0.0392 - -
2.8696 7240 0.0394 - -
2.8736 7250 0.0399 - -
2.8775 7260 0.0403 - -
2.8815 7270 0.0384 - -
2.8855 7280 0.0397 - -
2.8894 7290 0.0407 - -
2.8934 7300 0.0386 - -
2.8973 7310 0.0385 - -
2.9013 7320 0.0405 - -
2.9053 7330 0.0389 - -
2.9092 7340 0.0362 - -
2.9132 7350 0.0397 - -
2.9172 7360 0.0393 - -
2.9211 7370 0.0397 - -
2.9251 7380 0.0386 - -
2.9291 7390 0.0388 - -
2.9330 7400 0.0366 - -
2.9370 7410 0.0394 - -
2.9409 7420 0.0396 - -
2.9449 7430 0.0393 - -
2.9489 7440 0.0401 - -
2.9528 7450 0.0391 - -
2.9568 7460 0.0388 - -
2.9608 7470 0.0386 - -
2.9647 7480 0.0391 - -
2.9687 7490 0.037 - -
2.9727 7500 0.0386 0.0477 0.9613
2.9766 7510 0.0392 - -
2.9806 7520 0.0399 - -
2.9845 7530 0.0385 - -
2.9885 7540 0.0381 - -
2.9925 7550 0.0392 - -
2.9964 7560 0.0386 - -
3.0004 7570 0.0394 - -
3.0044 7580 0.0401 - -
3.0083 7590 0.0404 - -
3.0123 7600 0.0384 - -
3.0163 7610 0.0381 - -
3.0202 7620 0.0383 - -
3.0242 7630 0.0389 - -
3.0281 7640 0.0364 - -
3.0321 7650 0.0399 - -
3.0361 7660 0.0383 - -
3.0400 7670 0.0401 - -
3.0440 7680 0.0388 - -
3.0480 7690 0.0389 - -
3.0519 7700 0.036 - -
3.0559 7710 0.0403 - -
3.0598 7720 0.0376 - -
3.0638 7730 0.0387 - -
3.0678 7740 0.0405 - -
3.0717 7750 0.0399 - -
3.0757 7760 0.0382 - -
3.0797 7770 0.0376 - -
3.0836 7780 0.0393 - -
3.0876 7790 0.0388 - -
3.0916 7800 0.0395 - -
3.0955 7810 0.0391 - -
3.0995 7820 0.0392 - -
3.1034 7830 0.0371 - -
3.1074 7840 0.039 - -
3.1114 7850 0.0395 - -
3.1153 7860 0.0385 - -
3.1193 7870 0.0362 - -
3.1233 7880 0.0375 - -
3.1272 7890 0.0376 - -
3.1312 7900 0.0384 - -
3.1352 7910 0.0378 - -
3.1391 7920 0.0393 - -
3.1431 7930 0.0378 - -
3.1470 7940 0.0404 - -
3.1510 7950 0.0361 - -
3.1550 7960 0.0369 - -
3.1589 7970 0.0396 - -
3.1629 7980 0.0404 - -
3.1669 7990 0.0386 - -
3.1708 8000 0.038 0.0473 0.9616
3.1748 8010 0.0372 - -
3.1788 8020 0.0373 - -
3.1827 8030 0.0369 - -
3.1867 8040 0.0371 - -
3.1906 8050 0.0386 - -
3.1946 8060 0.038 - -
3.1986 8070 0.0366 - -
3.2025 8080 0.0378 - -
3.2065 8090 0.0379 - -
3.2105 8100 0.038 - -
3.2144 8110 0.0374 - -
3.2184 8120 0.0388 - -
3.2224 8130 0.038 - -
3.2263 8140 0.0363 - -
3.2303 8150 0.0369 - -
3.2342 8160 0.0371 - -
3.2382 8170 0.0377 - -
3.2422 8180 0.0364 - -
3.2461 8190 0.0372 - -
3.2501 8200 0.0403 - -
3.2541 8210 0.0385 - -
3.2580 8220 0.0385 - -
3.2620 8230 0.0386 - -
3.2660 8240 0.0369 - -
3.2699 8250 0.039 - -
3.2739 8260 0.0365 - -
3.2778 8270 0.0382 - -
3.2818 8280 0.0354 - -
3.2858 8290 0.0393 - -
3.2897 8300 0.0387 - -
3.2937 8310 0.0366 - -
3.2977 8320 0.0391 - -
3.3016 8330 0.0382 - -
3.3056 8340 0.0377 - -
3.3096 8350 0.0369 - -
3.3135 8360 0.0384 - -
3.3175 8370 0.0379 - -
3.3214 8380 0.0372 - -
3.3254 8390 0.0391 - -
3.3294 8400 0.0378 - -
3.3333 8410 0.0393 - -
3.3373 8420 0.0373 - -
3.3413 8430 0.0394 - -
3.3452 8440 0.0367 - -
3.3492 8450 0.0373 - -
3.3532 8460 0.0362 - -
3.3571 8470 0.0372 - -
3.3611 8480 0.0396 - -
3.3650 8490 0.0392 - -
3.3690 8500 0.0374 0.0470 0.9616
3.3730 8510 0.0378 - -
3.3769 8520 0.0385 - -
3.3809 8530 0.0375 - -
3.3849 8540 0.0392 - -
3.3888 8550 0.0378 - -
3.3928 8560 0.0366 - -
3.3967 8570 0.0383 - -
3.4007 8580 0.0372 - -
3.4047 8590 0.038 - -
3.4086 8600 0.0384 - -
3.4126 8610 0.0359 - -
3.4166 8620 0.0377 - -
3.4205 8630 0.0387 - -
3.4245 8640 0.0365 - -
3.4285 8650 0.0359 - -
3.4324 8660 0.0358 - -
3.4364 8670 0.0366 - -
3.4403 8680 0.0369 - -
3.4443 8690 0.0365 - -
3.4483 8700 0.0366 - -
3.4522 8710 0.0357 - -
3.4562 8720 0.036 - -
3.4602 8730 0.0365 - -
3.4641 8740 0.0381 - -
3.4681 8750 0.0399 - -
3.4721 8760 0.0388 - -
3.4760 8770 0.0366 - -
3.4800 8780 0.0346 - -
3.4839 8790 0.0371 - -
3.4879 8800 0.0376 - -
3.4919 8810 0.0374 - -
3.4958 8820 0.0354 - -
3.4998 8830 0.0363 - -
3.5038 8840 0.0374 - -
3.5077 8850 0.0373 - -
3.5117 8860 0.0347 - -
3.5157 8870 0.0374 - -
3.5196 8880 0.0349 - -
3.5236 8890 0.0376 - -
3.5275 8900 0.0363 - -
3.5315 8910 0.036 - -
3.5355 8920 0.0378 - -
3.5394 8930 0.0376 - -
3.5434 8940 0.039 - -
3.5474 8950 0.0373 - -
3.5513 8960 0.0361 - -
3.5553 8970 0.0356 - -
3.5593 8980 0.0357 - -
3.5632 8990 0.0371 - -
3.5672 9000 0.0374 0.0468 0.9617
3.5711 9010 0.0372 - -
3.5751 9020 0.0369 - -
3.5791 9030 0.0362 - -
3.5830 9040 0.0367 - -
3.5870 9050 0.0388 - -
3.5910 9060 0.0369 - -
3.5949 9070 0.0375 - -
3.5989 9080 0.0374 - -
3.6029 9090 0.0365 - -
3.6068 9100 0.0363 - -
3.6108 9110 0.0396 - -
3.6147 9120 0.0372 - -
3.6187 9130 0.0363 - -
3.6227 9140 0.0363 - -
3.6266 9150 0.0366 - -
3.6306 9160 0.0352 - -
3.6346 9170 0.038 - -
3.6385 9180 0.0359 - -
3.6425 9190 0.0374 - -
3.6465 9200 0.0363 - -
3.6504 9210 0.0356 - -
3.6544 9220 0.0354 - -
3.6583 9230 0.0377 - -
3.6623 9240 0.0361 - -
3.6663 9250 0.0374 - -
3.6702 9260 0.0373 - -
3.6742 9270 0.0357 - -
3.6782 9280 0.0359 - -
3.6821 9290 0.037 - -
3.6861 9300 0.0366 - -
3.6901 9310 0.0374 - -
3.6940 9320 0.0376 - -
3.6980 9330 0.0373 - -
3.7019 9340 0.0363 - -
3.7059 9350 0.0381 - -
3.7099 9360 0.0353 - -
3.7138 9370 0.0363 - -
3.7178 9380 0.0377 - -
3.7218 9390 0.0364 - -
3.7257 9400 0.0378 - -
3.7297 9410 0.0376 - -
3.7337 9420 0.0376 - -
3.7376 9430 0.0368 - -
3.7416 9440 0.0381 - -
3.7455 9450 0.0358 - -
3.7495 9460 0.0362 - -
3.7535 9470 0.038 - -
3.7574 9480 0.0371 - -
3.7614 9490 0.0371 - -
3.7654 9500 0.0353 0.0465 0.9617
3.7693 9510 0.0381 - -
3.7733 9520 0.0362 - -
3.7772 9530 0.0352 - -
3.7812 9540 0.0363 - -
3.7852 9550 0.0352 - -
3.7891 9560 0.0367 - -
3.7931 9570 0.035 - -
3.7971 9580 0.0367 - -
3.8010 9590 0.0369 - -
3.8050 9600 0.0365 - -
3.8090 9610 0.0369 - -
3.8129 9620 0.0359 - -
3.8169 9630 0.0367 - -
3.8208 9640 0.0384 - -
3.8248 9650 0.0359 - -
3.8288 9660 0.0368 - -
3.8327 9670 0.0363 - -
3.8367 9680 0.0374 - -
3.8407 9690 0.0372 - -
3.8446 9700 0.0361 - -
3.8486 9710 0.0381 - -
3.8526 9720 0.0342 - -
3.8565 9730 0.0348 - -
3.8605 9740 0.0372 - -
3.8644 9750 0.0377 - -
3.8684 9760 0.0356 - -
3.8724 9770 0.0365 - -
3.8763 9780 0.0368 - -
3.8803 9790 0.0366 - -
3.8843 9800 0.0383 - -
3.8882 9810 0.0353 - -
3.8922 9820 0.0377 - -
3.8962 9830 0.0364 - -
3.9001 9840 0.0362 - -
3.9041 9850 0.0351 - -
3.9080 9860 0.0381 - -
3.9120 9870 0.0368 - -
3.9160 9880 0.0361 - -
3.9199 9890 0.0356 - -
3.9239 9900 0.035 - -
3.9279 9910 0.0345 - -
3.9318 9920 0.0378 - -
3.9358 9930 0.036 - -
3.9398 9940 0.0367 - -
3.9437 9950 0.0356 - -
3.9477 9960 0.034 - -
3.9516 9970 0.0377 - -
3.9556 9980 0.0379 - -
3.9596 9990 0.0388 - -
3.9635 10000 0.0362 0.0463 0.9618

Framework Versions

  • Python: 3.10.10
  • Sentence Transformers: 3.0.1
  • Transformers: 4.45.0.dev0
  • PyTorch: 2.2.1+cu121
  • Accelerate: 0.34.2
  • Datasets: 2.21.0
  • Tokenizers: 0.19.1

Citation

BibTeX

Sentence Transformers

@inproceedings{reimers-2019-sentence-bert,
    title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
    author = "Reimers, Nils and Gurevych, Iryna",
    booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
    month = "11",
    year = "2019",
    publisher = "Association for Computational Linguistics",
    url = "https://arxiv.org/abs/1908.10084",
}

ContrastiveLoss

@inproceedings{hadsell2006dimensionality,
    author={Hadsell, R. and Chopra, S. and LeCun, Y.},
    booktitle={2006 IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR'06)}, 
    title={Dimensionality Reduction by Learning an Invariant Mapping}, 
    year={2006},
    volume={2},
    number={},
    pages={1735-1742},
    doi={10.1109/CVPR.2006.100}
}