from sentence_transformers import SentenceTransformer
import torch

class BaseEmbeddings:
    def __init__(self):
        pass

    def get_embedding(self, queries):
        raise NotImplementedError("This method should be overridden by subclasses")
    
    def cal_similarity(self, embedding1, embedding2):
        raise NotImplementedError("This method should be overridden by subclasses")


class LocalEmbeddings(BaseEmbeddings):
    def __init__(self, model_name_or_path, device, use_fp16=True):
        super().__init__()
        self.device = device
        self.model = SentenceTransformer(model_name_or_path,device=device)
        if use_fp16:
            print("Using FP16 for model")
            self.model = self.model.half()

    def get_embedding(self, queries):
        embedding = self.model.encode(queries)
        return torch.tensor(embedding)

    def cal_similarity(self, embedding1, embedding2):
        if len(embedding1) != len(embedding2):
            raise ValueError("Embeddings must be of the same length")
        similarity = self.model.similarity(embedding1, embedding2)
        return similarity