File size: 1,422 Bytes
2a2b95c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 |
{
"Description": "This is a pre-training configuration for codon optimization model based on BART language model.",
"project_name": "COnTRA_SCPECBS3_wintest",
"dataset_path": "/mount/COnTRA/data/datasets/processed_data_SCPECBS3/homologs/SCPECBS3_SCPECBS3_ExprRefined100",
"tokenizer_path": "/mount/COnTRA/tokenizers/contra_tokenizer_gen_exprrefined",
"cai_refference_path": "/mount/COnTRA/data/datasets/processed_data_SCPECBS3/S_cerevisiae/S_cerevisiae.0.nt.fasta",
"checkpoint_flag": true,
"checkpoint_path": "/mount/COnTRA/models/Finetuned_oSiS_100_ExR/best_model/",
"special_token_th": 42,
"mask_all": true,
"sw_aa_size": 100,
"eval_type": "model",
"model_type": "COBaBExRi",
"cai_query_species": "S_cerevisiae",
"outdir": "/mount/COnTRA/models/Finetuned_oSiS_100_ExR/best_model/",
"outfile": "mask_model_eval.csv",
"out_dict": [
"prot_len",
"num_of_correct_predicted_codons",
"accuracy",
"cross_entropy_loss",
"entropy",
"query_codons",
"subject_codons",
"pred_codons"
],
"orig_dict": [
"qseqid",
"sseqid",
"query_species",
"subject_species",
"pident",
"length",
"mismatch",
"gapopen",
"qstart",
"qend",
"sstart",
"send",
"evalue",
"bitscore"
],
"debug": false
} |