{ "_name_or_path": "./polyglot-5.8b-koalpaca-v1.1b", "architectures": [ "GPTNeoXForCausalLM" ], "bos_token_id": 0, "eos_token_id": 0, "hidden_act": "gelu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 16384, "layer_norm_eps": 1e-05, "max_position_embeddings": 2048, "model_type": "gpt_neox", "num_attention_heads": 16, "num_hidden_layers": 28, "num_steps": "global_step320000", "quantization_config": { "llm_int8_enable_fp32_cpu_offload": false, "llm_int8_skip_modules": null, "llm_int8_threshold": 6.0, "load_in_8bit": true }, "rotary_emb_base": 10000, "rotary_pct": 0.25, "tie_word_embeddings": false, "torch_dtype": "float16", "transformers_version": "4.29.0.dev0", "use_cache": true, "use_parallel_residual": true, "vocab_size": 30080 }