wineberto-ner / config.json
panigrah's picture
Added more entities to training set and also randomized wine label. Earlier training set was flawed because wine labels were always located at the beginning of the description of the wine and the model trained itself to tag everything in the begining of the description as the wine label.
ddb7191
raw
history blame
1.42 kB
{
"_name_or_path": "bert-base-uncased",
"architectures": [
"BertForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "O",
"1": "B-adj",
"2": "B-country",
"3": "B-flavor",
"4": "B-grape",
"5": "B-moutfeel",
"6": "B-mouthfeel",
"7": "B-producer",
"8": "B-province",
"9": "B-region",
"10": "B-sweetness",
"11": "B-vintage",
"12": "I-country",
"13": "I-grape",
"14": "I-producer",
"15": "I-province",
"16": "I-region"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"B-adj": 1,
"B-country": 2,
"B-flavor": 3,
"B-grape": 4,
"B-moutfeel": 5,
"B-mouthfeel": 6,
"B-producer": 7,
"B-province": 8,
"B-region": 9,
"B-sweetness": 10,
"B-vintage": 11,
"I-country": 12,
"I-grape": 13,
"I-producer": 14,
"I-province": 15,
"I-region": 16,
"O": 0
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.33.0",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 30522
}