mpt-7b-ggml / mpt-7b-chat-f16.meta
LLukas22's picture
Updated metadata
a681299
raw
history blame
268 Bytes
{
"model": "Mpt",
"quantization": "F16",
"quantization_version": "Not_Quantized",
"container": "GGML",
"converter": "llm-rs",
"hash": "da8d1be23d280ab8ac4e12f185a4ec5461941461ff6bab8d9efd076ae446fc6d",
"base_model": "mosaicml/mpt-7b-chat"
}