File size: 1,458 Bytes
0f88c91 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 |
{
"os": "Linux-5.15.133+-x86_64-with-glibc2.31",
"python": "3.10.13",
"heartbeatAt": "2024-04-12T07:35:56.437403",
"startedAt": "2024-04-12T07:35:55.741618",
"docker": null,
"cuda": null,
"args": [],
"state": "running",
"program": "kaggle.ipynb",
"codePathLocal": null,
"root": "/kaggle/working",
"host": "e5a48bec8248",
"username": "root",
"executable": "/opt/conda/bin/python3.10",
"cpu_count": 2,
"cpu_count_logical": 4,
"cpu_freq": {
"current": 2000.138,
"min": 0.0,
"max": 0.0
},
"cpu_freq_per_core": [
{
"current": 2000.138,
"min": 0.0,
"max": 0.0
},
{
"current": 2000.138,
"min": 0.0,
"max": 0.0
},
{
"current": 2000.138,
"min": 0.0,
"max": 0.0
},
{
"current": 2000.138,
"min": 0.0,
"max": 0.0
}
],
"disk": {
"/": {
"total": 8062.387607574463,
"used": 5565.782459259033
}
},
"gpu": "Tesla T4",
"gpu_count": 2,
"gpu_devices": [
{
"name": "Tesla T4",
"memory_total": 16106127360
},
{
"name": "Tesla T4",
"memory_total": 16106127360
}
],
"memory": {
"total": 31.357559204101562
}
}
|