import os
import time
import json

def get_batch_size(batch):
    return batch["miss_id"].shape[0]
        
def encode_context_id(file_id, stc_id):
    return file_id * 50000 + stc_id

def decode_context_id(context_id):
    return {
        "file_id": context_id // 50000, 
        "stc_id": context_id % 50000
    }

class SaveManager:
    def __init__(self,
        dump_dir,
        counter=0,
        log_interval=100,
        save_interval=500):
        self.sentence_dict = {}
        self.relation_list = []
        self.log_interval = log_interval
        self.save_interval = save_interval
        self.counter = counter
        self.init_counter = counter
        self.dump_dir = dump_dir
        self.progress_path = os.path.join(self.dump_dir, "progress.log")
        self.rel_template = os.path.join(dump_dir, "relation_list_cnt_{}.dump")
        self.stc_template = os.path.join(dump_dir, "sentence_dict_cnt_{}.dump")

        self.watch = StopWatch()

    def sort_relation_list(self, key="dist", reverse=True):
        if key=="dist":
            self.relation_list.sort(key=lambda x:x["distance"], reverse=reverse)
        elif key=="index":
            self.relation_list.sort(
                key=lambda x:(x["context_id"]*1000+x["missing_index"][0])*1000+x["masked_index"][0],
                reverse=reverse
            )
        else:
            return    

    def save_disabled(self):
        return self.save_interval <= 0

    def load_progress(self):
        if self.save_disabled():
            return (0, 0)

        if os.path.exists(self.progress_path):
            with open(self.progress_path, "r") as p_log:
                progress = json.load(p_log)
                file_id = progress["file_id"]
                stc_id = progress["stc_id"]
                self.save_interval = progress["save_interval"]
                self.counter = progress["counter"]
                self.init_counter = progress["counter"]
                return (file_id, stc_id)
        return (0, 0)

    def dump_progress(self, context_id):
        if self.save_disabled():
            return

        ids = decode_context_id(context_id)
        file_id, stc_id = ids["file_id"], ids["stc_id"]
        
        with open(self.progress_path, "w") as p_log:
            progress = {
                "file_id": file_id,
                "stc_id": stc_id,
                "counter": self.counter,
                "save_interval": self.save_interval
            }
            p_log.write(json.dumps(progress))
        
    def save_sentence_list(self):
        sentence_list = []
        for context_id, raw_tokens in self.sentence_dict.items():
            sentence_list.append({
            "id": context_id,
            "context": raw_tokens
        })
        sentence_list.sort(key=lambda x:x["id"])
        save_path = self.stc_template.format(self.counter)
        with open(save_path, "w") as f:
            for sentence in sentence_list:
                f.write(json.dumps(sentence)+"\n")

    def update_sentence(self, sentence, context_id):
        self.sentence_dict[context_id] = sentence["raw"]
    
    def save_relation_list(self):
        save_path = self.rel_template.format(self.counter)
        with open(save_path, "w") as f:
            for relation in self.relation_list:
                f.write(json.dumps(relation)+"\n")
    
    def update_relation(self, sample, distance, context_id):
        self.relation_list.append({
            "context": context_id,
            "missing_index": sample["miss_id"].tolist(),
            "masked_index": sample["mask_id"].tolist(),
            "distance": float(distance)
        })
        self.counter += 1
        if self.counter % self.log_interval == 0:
            interval = self.watch.tick()
            total = self.watch.total_elapsed()
            print("sentence count: {0}  current speed: {1:.4f} sent/s  speed on average: {2:.4f} sent/s".format(
                    self.counter, self.log_interval / interval, (self.counter - self.init_counter) / total
            ))

        if self.save_disabled():
            return

        if self.counter % self.save_interval == 0:
            print("Save examples.")
            self.save_relation_list()
            self.save_sentence_list()
            self.dump_progress(context_id)
            self.relation_list = []
            self.sentence_dict = {}

    def update_relation_batched(self, batch, distance, context_id):
        batch_size = get_batch_size(batch)
        updated_keys = ["miss_id", "mask_id"]
        for index in range(0, batch_size):
            relation = {}
            for key in updated_keys:
                batched_tensor = batch[key]
                relation[key] = batched_tensor[index]
            self.update_relation(relation, distance[index], context_id)

    def start_watch(self):
        self.watch.start()

    def tick_watch(self):
        return self.watch.tick()

class StopWatch:
    def __init__(self):
        self.ticks = []

    def start(self):
        self.ticks.append(time.time())
    
    def tick(self):
        self.ticks.append(time.time())
        return self.ticks[-1] - self.ticks[-2] 

    def total_elapsed(self):
        return self.ticks[-1] - self.ticks[0]

    def clear(self):
        self.ticks.clear()