import json
import numpy as np
import torch
from dataclasses import dataclass
from tqdm import tqdm


@dataclass
class TopKConfig:
    image_feats: str
    text_feats: str
    output: str
    top_k: int
    eval_batch_size: int = 32768
    metric: str = "cosine"
    debug: bool = False


def make_topk_predictions(
    config: TopKConfig,
):
    # Log params.
    print("Params:")
    for name in sorted(vars(config)):
        val = getattr(config, name)
        print(f"  {name}: {val}")

    print("Begin to load image features...")
    image_ids = []
    image_feats = []
    with open(config.image_feats, "r") as fin:
        for line in tqdm(fin):
            obj = json.loads(line.strip())
            image_ids.append(obj["image_id"])
            image_feats.append(obj["feature"])
    image_feats_array = np.array(image_feats, dtype=np.float32)
    print("Finished loading image features.")

    print("Begin to compute top-{} predictions for texts...".format(config.top_k))
    with open(config.output, "w") as fout:
        with open(config.text_feats, "r") as fin:
            for line in tqdm(fin):
                obj = json.loads(line.strip())
                text_id = obj["text_id"]
                text_feat = obj["feature"]
                score_tuples = []
                text_feat_tensor = torch.tensor(
                    [text_feat], dtype=torch.float
                ).cuda()  # [1, feature_dim]
                idx = 0
                while idx < len(image_ids):
                    img_feats_tensor = torch.from_numpy(
                        image_feats_array[
                            idx : min(idx + config.eval_batch_size, len(image_ids))
                        ]
                    ).cuda()  # [batch_size, feature_dim]
                    batch_scores = (
                        text_feat_tensor @ img_feats_tensor.t()
                    )  # [1, batch_size]
                    for image_id, score in zip(
                        image_ids[
                            idx : min(idx + config.eval_batch_size, len(image_ids))
                        ],
                        batch_scores.squeeze(0).tolist(),
                    ):
                        score_tuples.append((image_id, score))
                    idx += config.eval_batch_size
                top_k_predictions = sorted(
                    score_tuples, key=lambda x: x[1], reverse=True
                )[: config.top_k]
                fout.write(
                    "{}\n".format(
                        json.dumps(
                            {
                                "text_id": text_id,
                                "image_ids": [entry[0] for entry in top_k_predictions],
                            }
                        )
                    )
                )

    print("Top-{} predictions are saved in {}".format(config.top_k, config.output))
    print("Done!")
