asahi417's picture
model update
e66d33f
raw
history blame contribute delete
732 Bytes
{"model": "roberta-large", "max_length": 64, "mode": "average_no_mask", "data": "relbert/semeval2012_relational_similarity", "split": "train", "data_eval": "relbert/conceptnet_high_confidence", "split_eval": "full", "template_mode": "manual", "template": "Today, I finally discovered the relation between <subj> and <obj> : <subj> is the <mask> of <obj>", "loss_function": "nce_logout", "classification_loss": true, "temperature_nce_constant": 0.05, "temperature_nce_rank": {"min": 0.01, "max": 0.05, "type": "linear"}, "epoch": 1, "batch": 128, "lr": 5e-06, "lr_decay": false, "lr_warmup": 1, "weight_decay": 0, "random_seed": 0, "exclude_relation": null, "exclude_relation_eval": null, "n_sample": 640, "gradient_accumulation": 8}