# coding=utf-8
# project
import os.path as osp

PROJECT_PATH = osp.abspath(osp.join(osp.dirname(__file__), '..'))
DATA_PATH = "/mnt/g/FloW/FloW-RI/" # osp.join(PROJECT_PATH, 'data')

MODEL_TYPE = {"TYPE": "RISFNet"}  # Network type:RISFNet
CONV_TYPE = {"TYPE": "GENERAL"}  # DO_CONV # conv type:DO_CONV or GENERAL
ATTENTION = {"TYPE": "NONE"}  # attention type:SEnet, CBAM or NONE

# train
TRAIN = {
    "DATA_TYPE": "FloW",  # DATA_TYPE: FloW
    "TRAIN_IMG_SIZE": 416, # due to MLP, a fixed size is required
    "RADAR_FRAME": 4, # input radar frame, need to be a multiple of 2
    "RADAR_FRAME_THRESHOLD": 1000, # timestamp<1000ms
    "AUGMENT": True,
    "BATCH_SIZE": 2, # 4
    "MULTI_SCALE_TRAIN": False, # True
    "IOU_THRESHOLD_LOSS": 0.5,
    "EPOCHS": 100, # 100
    "EVAL_EPOCH": 70, # 70 # eval from EVAL_EPOCH
    "NUMBER_WORKERS": 6, # 0
    "WEIGHT_DECAY": 0.0005,
    "LR_INIT": 1e-3,
    "showatt": False # heatmap
}

# val
VAL = {
    "TEST_IMG_SIZE": 416, # due to MLP, a fixed size is required
    "BATCH_SIZE": 1, # only for coco_eval, for FloW that is always 1
    "NUMBER_WORKERS": 6, # 0
    "CONF_THRESH": 0.005,
    "NMS_THRESH": 0.1, # 0.45
    "MAP_IOU_THRESH": 0.5, # 0.3 & 0.5 # for metric, eg: mAP@0.5
    "MULTI_SCALE_VAL": False,
    "FLIP_VAL": False,
    "Visual": False,
    "showatt": False
}

def get_radar_path(img_path):
    if TRAIN["DATA_TYPE"] == "FloW":
        return img_path.replace('Pic', 'Radar_RPDM').replace('.jpg', '.png')
    else: return ''

Customer_DATA = {
    "NUM": 1,  # your dataset number
    "CLASSES": [],  # your dataset class
}

FloW_DATA = {
    "NUM": 1,
    "CLASSES": ["bottle"],
}

VOC_DATA = {
    "NUM": 20,
    "CLASSES": [
        "aeroplane",
        "bicycle",
        "bird",
        "boat",
        "bottle",
        "bus",
        "car",
        "cat",
        "chair",
        "cow",
        "diningtable",
        "dog",
        "horse",
        "motorbike",
        "person",
        "pottedplant",
        "sheep",
        "sofa",
        "train",
        "tvmonitor",
    ],
}

COCO_DATA = {
    "NUM": 80,
    "CLASSES": [
        "person",
        "bicycle",
        "car",
        "motorcycle",
        "airplane",
        "bus",
        "train",
        "truck",
        "boat",
        "traffic light",
        "fire hydrant",
        "stop sign",
        "parking meter",
        "bench",
        "bird",
        "cat",
        "dog",
        "horse",
        "sheep",
        "cow",
        "elephant",
        "bear",
        "zebra",
        "giraffe",
        "backpack",
        "umbrella",
        "handbag",
        "tie",
        "suitcase",
        "frisbee",
        "skis",
        "snowboard",
        "sports ball",
        "kite",
        "baseball bat",
        "baseball glove",
        "skateboard",
        "surfboard",
        "tennis racket",
        "bottle",
        "wine glass",
        "cup",
        "fork",
        "knife",
        "spoon",
        "bowl",
        "banana",
        "apple",
        "sandwich",
        "orange",
        "broccoli",
        "carrot",
        "hot dog",
        "pizza",
        "donut",
        "cake",
        "chair",
        "couch",
        "potted plant",
        "bed",
        "dining table",
        "toilet",
        "tv",
        "laptop",
        "mouse",
        "remote",
        "keyboard",
        "cell phone",
        "microwave",
        "oven",
        "toaster",
        "sink",
        "refrigerator",
        "book",
        "clock",
        "vase",
        "scissors",
        "teddy bear",
        "hair drier",
        "toothbrush",
    ],
}


# model
MODEL = {
    "ANCHORS": [
        [
            (1.25, 1.625),
            (2.0, 3.75),
            (4.125, 2.875),
        ],  # Anchors for small obj(12,16),(19,36),(40,28)
        [
            (1.875, 3.8125),
            (3.875, 2.8125),
            (3.6875, 7.4375),
        ],  # Anchors for medium obj(36,75),(76,55),(72,146)
        [
            (3.625, 2.8125),
            (4.875, 6.1875),
            (11.65625, 10.1875)
        ],  # Anchors for big obj(142,110),(192,243),(459,401)
    ],
    "STRIDES": [8, 16, 32],
    "ANCHORS_PER_SCLAE": 3,
}
