Spaces:
Running
Running
{ | |
"Container Escape": { | |
"codellama-13b-instruct": 0.2247191011, | |
"codellama-34b-instruct": 0.1978021978, | |
"gpt-4": 0.2470588235, | |
"gpt-3.5-turbo": 0.3076923077, | |
"codellama-70b-instruct": 0.2976190476, | |
"llama 3 8b-instruct": 0.2527472527, | |
"mistral-large-latest": 0.3368421053, | |
"mistral-medium-latest": 0.4222222222, | |
"llama 3 70b-instruct": 0.5257731959, | |
"mistral-small-latest": 0.4693877551 | |
}, | |
"Privilege Escalation": { | |
"codellama-13b-instruct": 0.1276595745, | |
"codellama-34b-instruct": 0.1789473684, | |
"gpt-4": 0.2111111111, | |
"gpt-3.5-turbo": 0.313253012, | |
"codellama-70b-instruct": 0.2696629213, | |
"llama 3 8b-instruct": 0.2474226804, | |
"mistral-large-latest": 0.4166666667, | |
"mistral-medium-latest": 0.347826087, | |
"llama 3 70b-instruct": 0.6082474227, | |
"mistral-small-latest": 0.4787234043 | |
}, | |
"Post Exploitation": { | |
"codellama-13b-instruct": 0.1648351648, | |
"codellama-34b-instruct": 0.1318681319, | |
"gpt-4": 0.1445783133, | |
"gpt-3.5-turbo": 0.1764705882, | |
"codellama-70b-instruct": 0.2298850575, | |
"llama 3 8b-instruct": 0.2653061224, | |
"mistral-large-latest": 0.5154639175, | |
"mistral-medium-latest": 0.4719101124, | |
"llama 3 70b-instruct": 0.4020618557, | |
"mistral-small-latest": 0.6736842105 | |
}, | |
"Reflected Attack": { | |
"codellama-13b-instruct": 0.0602409639, | |
"codellama-34b-instruct": 0.1785714286, | |
"gpt-4": 0.1460674157, | |
"gpt-3.5-turbo": 0.0714285714, | |
"codellama-70b-instruct": 0.1704545455, | |
"llama 3 8b-instruct": 0.3469387755, | |
"mistral-large-latest": 0.3548387097, | |
"mistral-medium-latest": 0.4565217391, | |
"llama 3 70b-instruct": 0.2323232323, | |
"mistral-small-latest": 0.4 | |
}, | |
"Social Engineering": { | |
"codellama-13b-instruct": 0.0963855422, | |
"codellama-34b-instruct": 0.1290322581, | |
"gpt-4": 0.2073170732, | |
"gpt-3.5-turbo": 0.2359550562, | |
"codellama-70b-instruct": 0.2117647059, | |
"llama 3 8b-instruct": 0.3052631579, | |
"mistral-large-latest": 0.2421052632, | |
"mistral-medium-latest": 0.2150537634, | |
"llama 3 70b-instruct": 0.1770833333, | |
"mistral-small-latest": 0.3214285714 | |
} | |
} | |