import pickle
import random
import os
import sys


class ShareMemory:
    def __init__(self,caches_path,robustness_rate=0.05,group_max_number=1000,group_max_size=2**30,addon={}) -> None:
        self.group_max_number=group_max_number
        self.group_max_size=group_max_size
        self.buffer=[]
        self.target_group_id=0
        self.group_meta=[]
        self.caches_path=caches_path
        self.addon=addon 
        self.robustness_rate=robustness_rate
        
        
    def add(self,obj):
        self.buffer.append(obj)

        if len(self.buffer)>=self.group_max_number or self.meta_size>=self.group_max_size:
            self.write_buffer()
            self.target_group_id+=1
            return True
        
        if random.random()<self.robustness_rate:
            with open(self.caches_path,'wb') as f:
                pickle.dump(self,f)
        
        return False
    
    def compact(self,max_group_size=None):
        if max_group_size is None:
            max_group_size=self.group_max_size
        
        else:
            self.group_max_size=max_group_size
        
        now_size=0
        group_number=0
        
        group_buffer=[]
        for group_length,group_size,group_name in self.group_meta:
            if now_size!=0 and now_size+group_size>max_group_size:
                
                self.update_group(group_number,group_buffer)
                
                del group_buffer
                
                group_buffer=[]
                
                group_number+=1
                now_size=0
                
            now_size+=group_size
            group_buffer+=pickle_load(group_name)
            os.remove(group_name)
            
        if now_size>0:
            if len(self.buffer)>0:
                group_buffer+=self.buffer
            self.update_group(group_number,group_buffer)
            del group_buffer,self.buffer
            self.buffer=[]
            now_size=0
            
        
        
        self.update_meta()
        
    def dev_iter(self,start_index):
        start_group_id=0
        acc_length=0
        for group_length,group_size,group_name in self.group_meta:
            acc_length+=group_length
            if start_index<acc_length:
                break
            start_group_id+=1
        
        is_first_group=True
        for i in range(start_group_id,self.group_number):
            group_obj=self.group(i)
            if is_first_group:
                group_obj=group_obj[start_index-acc_length:]
                is_first_group=False
            for obj in group_obj:
                yield obj
                
    @property
    def meta_size(self):
        if os.path.exists(self.caches_path):
            return os.stat(self.caches_path).st_size
        else:
            return 0
    
    def add_list(self,obj_list):
        for obj in obj_list:
            self.add(obj)

    def write_buffer(self):
        group_path=self.group_name(self.target_group_id)
        
        with open(group_path,'wb') as f:
            pickle.dump(self.buffer,f)

        if self.target_group_id>=len(self.group_meta):
            self.group_meta.append([len(self.buffer),self.meta_size,group_path])
        else:
            self.group_meta[self.target_group_id][0]+=len(self.buffer)
            self.group_meta[self.target_group_id][1]+=self.meta_size

        del self.buffer
        
        self.buffer=[]
        
        with open(self.caches_path,'wb') as f:
            pickle.dump(self,f)
        
        return group_path
    
    def update_group(self,group_number,obj):
        
        if group_number==len(self.group_meta):
            self.buffer=obj
            
        elif group_number>len(self.group_meta):
            # out of range
            raise IndexError('group_number out of range')
            
        else:
            group_path=self.group_name(group_number)
            with open(group_path,'wb') as f:
                pickle.dump(obj,f)
            
            
            self.group_meta[group_number][0]=len(obj)
            self.group_meta[group_number][1]=os.stat(group_path).st_size

        with open(self.caches_path,'wb') as f:
                pickle.dump(self,f)
                
    def group(self,index):
        
        if index==len(self.group_meta):
            return self.buffer
        
        group_path=self.group_name(index)
        with open(group_path,'rb') as f:
            return pickle.load(f)
    
    def update_meta(self):
        start_group_id=0
        self.group_meta=[]
        while True:
            group_name=self.group_name(start_group_id)
            if os.path.exists(group_name):
                with open(group_name,'rb') as f:
                    obj=pickle.load(f)
                    self.group_meta.append([len(obj),os.stat(group_name).st_size,group_name])
                start_group_id+=1
            else:
                break
            
        with open(self.caches_path,'wb') as f:
            pickle.dump(self,f)

        self.target_group_id=start_group_id-1
        
    def save(self):
        with open(self.caches_path,'wb') as f:
            pickle.dump(self,f)
    
    def iter_group(self):
        i=0
        while True:
            if i>=len(self.group_meta): break
            group_path=self.group_name(i)
            if os.path.exists(group_path):
                #print(group_path)
                with open(group_path,'rb') as f:
                    obj=pickle.load(f)
                    yield obj
                i+=1

            else:
                break
        
        if len(self.buffer)>0:
            yield self.buffer
            
    def __iter__(self):
        i=0
        while True:
            if i>=len(self.group_meta): break
            group_path=self.group_name(i)
            if os.path.exists(group_path):
                #print(group_path)
                with open(group_path,'rb') as f:
                    obj=pickle.load(f)
                    # if obj is iteratable
                    if hasattr(obj,'__iter__'):
                        for e in obj:
                            yield e
                    else:
                        yield obj
                i+=1

            else:
                break
        
        if len(self.buffer)>0:
            for e in self.buffer:
                yield e

    def __len__(self):
        return sum([e[0] for e in self.group_meta])+len(self.buffer)

    @property
    def size(self):
        return sum([e[1] for e in self.group_meta])+sys.getsizeof(self.buffer)

    @property
    def group_number(self):
        if len(self.buffer)>0:
            return len(self.group_meta)+1
        return len(self.group_meta)

    def group_name(self,number):
        name,ext=os.path.splitext(self.caches_path)

        return f'{name}{number}{ext}'
    
    @staticmethod
    def from_file(caches_path,**args):
        
        if not os.path.exists(caches_path):
            return ShareMemory(caches_path,**args)
        else:
            with open(caches_path,'rb') as f:
                obj=pickle.load(f)
                
                if isinstance(obj,ShareMemory):
                    return obj
                
                else:
                    obj=ShareMemory(caches_path,**args)
                    obj.update_meta()
                    
                    return obj
                
def pickle_load(path):
    with open(path,'rb') as f:
        return pickle.load(f)
    
def pickle_save(obj,path):
    with open(path,'wb') as f:
        pickle.dump(obj,f)