[paths] train = "/home/masakie/lcrads/Masaki_dissertation_model/Engagement_span_finder_v2/data/engagement_three_train.spacy" dev = "/home/masakie/lcrads/Masaki_dissertation_model/Engagement_span_finder_v2/data/engagement_three_dev.spacy" vectors = null init_tok2vec = null [system] gpu_allocator = "pytorch" seed = 0 [nlp] lang = "en" pipeline = ["transformer","parser","tagger","ner","attribute_ruler","lemmatizer","trainable_transformer","spancat"] batch_size = 10 disabled = [] before_creation = null after_creation = null after_pipeline_creation = null tokenizer = {"@tokenizers":"spacy.Tokenizer.v1"} [components] [components.attribute_ruler] factory = "attribute_ruler" scorer = {"@scorers":"spacy.attribute_ruler_scorer.v1"} validate = false [components.lemmatizer] factory = "lemmatizer" mode = "rule" model = null overwrite = false scorer = {"@scorers":"spacy.lemmatizer_scorer.v1"} [components.ner] factory = "ner" incorrect_spans_key = null moves = null scorer = {"@scorers":"spacy.ner_scorer.v1"} update_with_oracle_cut_size = 100 [components.ner.model] @architectures = "spacy.TransitionBasedParser.v2" state_type = "ner" extra_state_tokens = false hidden_width = 64 maxout_pieces = 2 use_upper = false nO = null [components.ner.model.tok2vec] @architectures = "spacy-transformers.TransformerListener.v1" grad_factor = 1.0 upstream = "transformer" pooling = {"@layers":"reduce_mean.v1"} [components.parser] factory = "parser" learn_tokens = false min_action_freq = 30 moves = null scorer = {"@scorers":"spacy.parser_scorer.v1"} update_with_oracle_cut_size = 100 [components.parser.model] @architectures = "spacy.TransitionBasedParser.v2" state_type = "parser" extra_state_tokens = false hidden_width = 64 maxout_pieces = 2 use_upper = false nO = null [components.parser.model.tok2vec] @architectures = "spacy-transformers.TransformerListener.v1" grad_factor = 1.0 upstream = "transformer" pooling = {"@layers":"reduce_mean.v1"} [components.spancat] factory = "spancat" max_positive = null scorer = {"@scorers":"spacy.spancat_scorer.v1"} spans_key = ${vars.spans_key} threshold = 0.5 [components.spancat.model] @architectures = "LSTM_SpanCategorizer.v1" LSTMdepth = 1 LSTMdropout = 0.2 LSTMhidden = 200 [components.spancat.model.reducer] @layers = "Mish_two_way_reducer.v2" depth = 1 dropout = 0.3 hidden_size = 256 [components.spancat.model.scorer] @layers = "spacy.LinearLogistic.v1" nO = null nI = null [components.spancat.model.tok2vec] @architectures = "spacy-transformers.TransformerListener.v1" grad_factor = 1.0 pooling = {"@layers":"reduce_mean.v1"} upstream = "trainable_transformer" [components.spancat.suggester] @misc = "spacy-experimental.ngram_subtree_suggester.v1" sizes = [1,2,3,4,5,6,7,8,9,10,11,12] [components.tagger] factory = "tagger" neg_prefix = "!" overwrite = false scorer = {"@scorers":"spacy.tagger_scorer.v1"} [components.tagger.model] @architectures = "spacy.Tagger.v2" nO = null normalize = false [components.tagger.model.tok2vec] @architectures = "spacy-transformers.TransformerListener.v1" grad_factor = 1.0 upstream = "transformer" pooling = {"@layers":"reduce_mean.v1"} [components.trainable_transformer] factory = "transformer" max_batch_items = 4096 set_extra_annotations = {"@annotation_setters":"spacy-transformers.null_annotation_setter.v1"} [components.trainable_transformer.model] name = "egumasa/roberta-base-academic3" @architectures = "spacy-transformers.TransformerModel.v1" [components.trainable_transformer.model.get_spans] @span_getters = "spacy-transformers.strided_spans.v1" window = 384 stride = 288 [components.trainable_transformer.model.tokenizer_config] use_fast = true [components.transformer] factory = "transformer" max_batch_items = 4096 set_extra_annotations = {"@annotation_setters":"spacy-transformers.null_annotation_setter.v1"} [components.transformer.model] @architectures = "spacy-transformers.TransformerModel.v3" name = "roberta-base" mixed_precision = false [components.transformer.model.get_spans] @span_getters = "spacy-transformers.strided_spans.v1" window = 128 stride = 96 [components.transformer.model.grad_scaler_config] [components.transformer.model.tokenizer_config] use_fast = true [components.transformer.model.transformer_config] [corpora] [corpora.dev] @readers = "spacy.Corpus.v1" path = ${paths.dev} max_length = 0 gold_preproc = false limit = 0 augmenter = null [corpora.train] @readers = "spacy.Corpus.v1" path = ${paths.train} max_length = 2000 gold_preproc = false limit = 0 augmenter = null [training] accumulate_gradient = 8 dev_corpus = "corpora.dev" train_corpus = "corpora.train" seed = ${system.seed} gpu_allocator = ${system.gpu_allocator} dropout = 0.1 patience = 3000 max_epochs = 0 max_steps = 20000 eval_frequency = 200 frozen_components = ["transformer","parser","tagger","ner","attribute_ruler","lemmatizer"] annotating_components = ["transformer","parser","tagger"] before_to_disk = null [training.batcher] @batchers = "spacy.batch_by_words.v1" discard_oversize = false tolerance = 0.2 get_length = null [training.batcher.size] start = 900 @schedules = "compounding.v1" stop = 1000 compound = 1.0002 t = 0.0 [training.logger] @loggers = "spacy.WandbLogger.v4" project_name = "ENG_experiment" remove_config_values = ["paths.train","paths.dev","corpora.train.path","corpora.dev.path"] model_log_interval = null entity = "e-masaki0101" log_dataset_dir = null run_name = null log_best_dir = null log_latest_dir = null [training.optimizer] @optimizers = "Adam.v1" beta1 = 0.9 beta2 = 0.999 L2_is_weight_decay = true L2 = 0.01 grad_clip = 1.0 use_averages = false eps = 0.00000001 [training.optimizer.learn_rate] initial_rate = 0.0000417369 @schedules = "warmup_linear.v1" warmup_steps = 1000 total_steps = 20000 [training.score_weights] dep_uas = null dep_las = null dep_las_per_type = null sents_p = null sents_r = null sents_f = null tag_acc = null ents_f = null ents_p = null ents_r = null ents_per_type = null lemma_acc = null spans_sc_f = 0.5 spans_sc_p = 0.0 spans_sc_r = 0.5 [pretraining] [initialize] vectors = ${paths.vectors} init_tok2vec = ${paths.init_tok2vec} vocab_data = null lookups = null before_init = null after_init = null [initialize.components] [initialize.tokenizer] [vars] spans_key = "sc"