{ "_name_or_path": "ebowwa/bad_llm_dpov03", "architectures": [ "LLaMAForCausalLM" ], "bos_token_id": 1, "eos_token_id": 2, "model_type": "llama", "torch_dtype": "float16", "transformers_version": "4.27.0", "vocab_size": 32000 }