{ "model_type": "gptj", "vocab_size": 50257, "n_positions": 2048, "n_ctx": 2048, "n_embd": 4096, "n_layer": 28, "n_head": 16, "rotary_dim": 64, "initializer_range": 0.02, "layer_norm_epsilon": 1e-5, "bos_token_id": 50256, "eos_token_id": 50256, "pad_token_id": 50256, "task_specific_params": { "text-generation": { "do_sample": true, "max_length": 50 } }, "max_length": 20, "min_length": 0, "do_sample": false, "early_stopping": false, "num_beams": 1, "temperature": 1.0, "top_k": 50, "top_p": 1.0, "repetition_penalty": 1.0, "length_penalty": 1.0, "no_repeat_ngram_size": 0, "encoder_no_repeat_ngram_size": 0, "num_return_sequences": 1, "chunk_size_feed_forward": 0, "output_scores": false, "return_dict_in_generate": false, "forced_bos_token_id": null, "forced_eos_token_id": null, "remove_invalid_values": false }