ld_requests / TheBloke /Mixtral-8x7B-Instruct-v0.1-GPTQ_eval_request_False_GPTQ_4bit_int4_float16.json
lvkaokao's picture
Add TheBloke/Mixtral-8x7B-Instruct-v0.1-GPTQ to eval queue
d2408ab verified
raw history blame
No virus
483 Bytes
{"model": "TheBloke/Mixtral-8x7B-Instruct-v0.1-GPTQ", "revision": "main", "private": false, "params": 23.81, "architectures": "MixtralForCausalLM", "quant_type": "GPTQ", "precision": "4bit", "model_params": 46.5, "model_size": 23.81, "weight_dtype": "int4", "compute_dtype": "float16", "gguf_ftype": "*Q4_0.gguf", "hardware": "gpu", "status": "Pending", "submitted_time": "2024-05-13T11:54:45Z", "model_type": "quantization", "job_id": -1, "job_start_time": null, "scripts": "ITREX"}