File size: 1,275 Bytes
72b855c |
1 2 3 |
{"tstamp": 1720583926.6994, "type": "chat", "model": "llava-fire", "gen_params": {"temperature": 0.7, "top_p": 1.0, "max_new_tokens": 1024}, "start": 1720583926.678, "finish": 1720583926.6994, "state": {"template_name": "vicuna_v1.1", "system_message": "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.", "roles": ["USER", "ASSISTANT"], "messages": [["USER", "Hello"], ["ASSISTANT", "hello"]], "offset": 0, "conv_id": "a48164214dce453a9ad276401b7d095c", "model_name": "llava-fire", "has_csam_image": false}, "ip": "127.0.0.1"}
{"tstamp": 1720583926.6998, "type": "chat", "model": "llava-original", "gen_params": {"temperature": 0.7, "top_p": 1.0, "max_new_tokens": 1024}, "start": 1720583926.6815, "finish": 1720583926.6998, "state": {"template_name": "vicuna_v1.1", "system_message": "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.", "roles": ["USER", "ASSISTANT"], "messages": [["USER", "Hello"], ["ASSISTANT", "hello"]], "offset": 0, "conv_id": "8bed262a5728409284a7a56a0fe66a75", "model_name": "llava-original", "has_csam_image": false}, "ip": "127.0.0.1"}
|