PhillnetLarge / config.json
ayjays132's picture
Update config.json
89d8fe0 verified
raw
history blame contribute delete
No virus
2.09 kB
{
"model_type": "gpt2",
"architectures": ["GPT2LMHeadModel"],
"tokenizer_config": {
"bos_token_id": 50256,
"eos_token_id": 50256,
"n_positions": 2048
},
"config": {
"activation_function": "gelu_new",
"n_ctx": 2048,
"n_embd": 2048,
"n_head": 16,
"n_layer": 24,
"n_positions": 2048,
"n_special": 0,
"attn_pdrop": 0.1,
"embd_pdrop": 0.1,
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
"resid_pdrop": 0.1,
"summary_activation": null,
"summary_first_dropout": 0.1,
"summary_proj_to_labels": true,
"summary_type": "cls_index",
"summary_use_proj": true
},
"task_specific_params": {
"conversational": {
"max_length": 1024,
"min_length": 20,
"length_penalty": 1.5,
"num_beams": 5,
"early_stopping": true,
"no_repeat_ngram_size": 3,
"temperature": 0.7,
"top_k": 50,
"top_p": 0.9
}
},
"transformers_version": "4.34.0",
"language": ["en"],
"tags": ["conversational"],
"metrics": ["perplexity", "accuracy"],
"pipeline_tag": "conversational",
"library_name": "transformers",
"datasets": ["vicgalle/alpaca-gpt4"],
"license": "apache-2.0",
"custom_params": {
"adaptation_rate": 0.05,
"complexity_metric": null,
"desired_improvement_rate": 0.02,
"ecosystem_dynamics": {
"environmental_volatility": 0.1,
"resource_pool": 1
},
"growth_improvement_threshold": 0.01,
"hidden_dim": 2048,
"initial_neuron_count": 5000,
"innovative_growth_net": {
"adaptation_rate": 0.05,
"complexity_metric": null,
"initial_capacity": 250000,
"input_size": 2048
},
"input_dimension": 768,
"low_stability_threshold": 0.01,
"max_complexity": 10000,
"max_neurons": 250000,
"max_sequence_length": 1024,
"min_epochs_before_growth": 5,
"model_filename": "pytorch_model.bin",
"num_embeddings": 25000,
"pruning_improvement_threshold": 0.005,
"some_adaptation_rate": 0.05,
"stability_threshold": 0.02,
"start_token_index": 2
}
}