name,Faithfulness,Answer_Relevancy,Answer_Correctness,Answer_Similarity Qwen1.5-7B-Chat,0.736588808239606,0.7333071013844119,0.7653953121660069,0.7581706400802051 Yi-6B-Chat,0.8623512542497523,0.8477785816251643,0.7511472076832977,0.6674107034023887 Internlm2-Chat-20B,0.7579980374579645,0.5592058560996795,0.6230274906657811,0.8809475791391357 Qwen1.5-1.8B-Chat,0.6993731684981686,0.6912058643011517,0.5921887447381617,0.895549081535767 Gemma-2B,0.4793590757810048,0.269158909387365,0.5715789320903422,0.8084277426065398 Yi-6B,0.39791261211082174,0.35010016811319283,0.5554912360774231,0.8151911650862558 Internlm2-Chat-7B,0.6057084170408346,0.28950531392496315,0.5513185635050407,0.8527933874140172 Vicuna-13B-V1.5,0.6149588477366256,0.7175132054894446,0.5484350782035007,0.8846316742953054 Gpt-3.5-Turbo,0.6702526487367563,0.8535199907928265,0.5380443637081317,0.9113351056689803 Baichuan2-7B-Chat,0.6457107843137256,0.7989283627012825,0.5355149927949222,0.8918899008657395 Qwen1.5-14B-Chat,0.7039449112978525,0.7891124698018288,0.5351538957435175,0.9060753469650263 Qwen1.5-4B-Chat,0.6079656862745099,0.798414770802262,0.5349164010626877,0.8926774424126845 Vicuna-7B-V1.5,0.5618038576473784,0.7385375964159062,0.5346381268062822,0.8785135365491068 Qwen1.5-0.5B-Chat,0.5161804573314475,0.7335961705843393,0.5329134165403151,0.878000802003553 Baichuan2-13B-Chat,0.6229674796747967,0.8122416536307804,0.5111467259298673,0.8969644779921856 Gemma-7B,0.6952392516403653,0.42448628847691194,0.4304401424621823,0.6676771540611001 Mistral-7B,0.8060009447278426,0.5415825155389061,0.39330883346357015,0.5908077476385994