text-generation-webui / logs /session_default_autosave.json
Charris71's picture
Upload folder using huggingface_hub
a1bea4a
raw
history blame
1.68 kB
{
"max_new_tokens": 200,
"seed": -1.0,
"temperature": 0.7,
"top_p": 0.9,
"top_k": 20,
"typical_p": 1,
"epsilon_cutoff": 0,
"eta_cutoff": 0,
"repetition_penalty": 1.15,
"repetition_penalty_range": 0,
"encoder_repetition_penalty": 1,
"no_repeat_ngram_size": 0,
"min_length": 0,
"do_sample": true,
"penalty_alpha": 0,
"num_beams": 1,
"length_penalty": 1,
"early_stopping": false,
"mirostat_mode": 0,
"mirostat_tau": 5,
"mirostat_eta": 0.1,
"add_bos_token": true,
"ban_eos_token": false,
"truncation_length": 2048,
"custom_stopping_strings": "",
"skip_special_tokens": true,
"stream": true,
"tfs": 1,
"top_a": 0,
"textbox": "Common sense questions and answers\n\nQuestion: \nFactual answer:",
"output_textbox": "",
"loader": "Transformers",
"cpu_memory": "110000",
"auto_devices": true,
"disk": false,
"cpu": false,
"bf16": true,
"load_in_8bit": false,
"trust_remote_code": false,
"load_in_4bit": true,
"compute_dtype": "bfloat16",
"quant_type": "nf4",
"use_double_quant": true,
"wbits": "None",
"groupsize": "None",
"model_type": "None",
"pre_layer": 0,
"triton": false,
"desc_act": false,
"no_inject_fused_attention": false,
"no_inject_fused_mlp": false,
"no_use_cuda_fp16": false,
"threads": 0,
"n_batch": 512,
"no_mmap": false,
"low_vram": false,
"mlock": false,
"n_gpu_layers": 0,
"n_ctx": 2048,
"llama_cpp_seed": 0.0,
"gpu_split": "",
"max_seq_len": 2048,
"compress_pos_emb": 1,
"alpha_value": 1,
"gpu_memory_0": 20800
}