File size: 3,803 Bytes
2eafbc4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
import os
import platform
import random
import string
import uuid

from inference.core.env import DEVICE_ID, INFERENCE_SERVER_ID


def is_running_in_docker():
    """Checks if the current process is running inside a Docker container.

    Returns:
        bool: True if running inside a Docker container, False otherwise.
    """
    return os.path.exists("/.dockerenv")


def get_gpu_id():
    """Fetches the GPU ID if a GPU is present.

    Tries to import and use the `GPUtil` module to retrieve the GPU information.

    Returns:
        Optional[int]: GPU ID if available, None otherwise.
    """
    try:
        import GPUtil

        GPUs = GPUtil.getGPUs()
        if GPUs:
            return GPUs[0].id
    except ImportError:
        return None
    except Exception as e:
        return None


def get_cpu_id():
    """Fetches the CPU ID based on the operating system.

    Attempts to get the CPU ID for Windows, Linux, and MacOS.
    In case of any error or an unsupported OS, returns None.

    Returns:
        Optional[str]: CPU ID string if available, None otherwise.
    """
    try:
        if platform.system() == "Windows":
            return os.popen("wmic cpu get ProcessorId").read().strip()
        elif platform.system() == "Linux":
            return (
                open("/proc/cpuinfo").read().split("processor")[0].split(":")[1].strip()
            )
        elif platform.system() == "Darwin":
            import subprocess

            return (
                subprocess.check_output(["sysctl", "-n", "machdep.cpu.brand_string"])
                .strip()
                .decode()
            )
    except Exception as e:
        return None


def get_jetson_id():
    """Fetches the Jetson device's serial number.

    Attempts to read the serial number from the device tree.
    In case of any error, returns None.

    Returns:
        Optional[str]: Jetson device serial number if available, None otherwise.
    """
    try:
        # Fetch the device's serial number
        if not os.path.exists("/proc/device-tree/serial-number"):
            return None
        serial_number = os.popen("cat /proc/device-tree/serial-number").read().strip()
        if serial_number == "":
            return None
        return serial_number
    except Exception as e:
        return None


def get_container_id():
    if is_running_in_docker():
        return (
            os.popen(
                "cat /proc/self/cgroup | grep 'docker' | sed 's/^.*\///' | tail -n1"
            )
            .read()
            .strip()
        )
    else:
        return str(uuid.uuid4())


def random_string(length):
    letters = string.ascii_letters + string.digits
    return "".join(random.choice(letters) for i in range(length))


def get_device_hostname():
    """Fetches the device's hostname.

    Returns:
        str: The device's hostname.
    """
    return platform.node()


def get_inference_server_id():
    """Fetches a unique device ID.

    Tries to get the GPU ID first, then falls back to CPU ID.
    If the application is running inside Docker, the Docker container ID is appended to the hostname.

    Returns:
        str: A unique string representing the device. If unable to determine, returns "UNKNOWN".
    """
    try:
        if INFERENCE_SERVER_ID is not None:
            return INFERENCE_SERVER_ID
        id = random_string(6)
        gpu_id = get_gpu_id()
        if gpu_id is not None:
            return f"{id}-GPU-{gpu_id}"
        jetson_id = get_jetson_id()
        if jetson_id is not None:
            return f"{id}-JETSON-{jetson_id}"
        return id
    except Exception as e:
        return "UNKNOWN"


GLOBAL_INFERENCE_SERVER_ID = get_inference_server_id()
GLOBAL_DEVICE_ID = DEVICE_ID if DEVICE_ID is not None else get_device_hostname()