# -*- coding: utf-8 -*-
import os
import math

arch='yolov3'
WEIGHT_BRAM=1200 # num of bram for weight


def extract_darknet(model, inputshape, px=13, py=13, pof=8, depth=1024, depth_wgt=8*1024*144):
    depth_wgt *= 32
    switch_order=2
    
    assert os.path.exists(model), 'model file not found'
    with open(model, 'r') as f:
        lines = f.read().split('\n')
    #get rid of comment
    lines = [x for x in lines if x and not x.startswith('#')]
    #get rid of fringe whitespaces
    lines = [x.rstrip().lstrip() for x in lines]
    net_def=[]   
    for line in lines:
        if line.startswith('['):
            layer_type=line[1:-1].rstrip().lower()
            net_def.append({})
            net_def[-1]['type']=layer_type
        else:
            key, val = line.split("=")
            net_def[-1][key.strip()]=val.strip()
   
    layer_count = 0
    config = {}
    outputshape=inputshape
    shortcut_flag=False
    mux_sel_concat_on=False
    mux_sel_route_on=False
    shortcut_from=[-1]
    names=[]
    
    for layer in net_def:
        layer_type=layer['type']
        
        if layer_type=='net':
            height=int(layer['height'])
            width=int(layer['width'])
            channels=int(layer['channels'])
#            assert inputshape==[height, width, channels], 'The inputshape mismatches config file!'
            
        elif layer_type=='convolutional':
            filters=int(layer['filters'])
            size=int(layer['size'])
            stride=int(layer['stride'])
            # pad=int(layer['pad'])
            pad=int((size-1) // 2)
            assert stride>0, 'stride should larger than 0!'
            assert pad>=0, 'pad should be positive!'
            
            layer_name='Convolutional'+str(layer_count)
            
            layer_count+=1
            config[layer_name]={}
            config[layer_name]['con']=True
            config[layer_name]['filters']=filters
            config[layer_name]['kernel']=(size, size)
            config[layer_name]['stride']=(stride, stride)
            config[layer_name]['padding']=(pad, pad)
            config[layer_name]['shortcut_flag']=shortcut_flag
            config[layer_name]['shortcut_from']=shortcut_from
            config[layer_name]['mux_sel_resadd_on']=False
            config[layer_name]['mux_sel_upsample_on']=False
            config[layer_name]['read_out']=False
            shortcut_flag=False
            shortcut_from=[layer_name]
            config[layer_name]['mux_sel_concat_on']=mux_sel_concat_on
            config[layer_name]['mux_sel_route_on']=mux_sel_route_on
            if mux_sel_route_on:
                config[layer_name]['route_from']=route_from
            mux_sel_concat_on=False
            mux_sel_route_on=False
            # if config[layer_name]['shortcut_from'][0]==-4:
            # print(layer_name)
            if layer_count>1:
                outputshape=config[config[layer_name]['shortcut_from'][0]]['outputshape']
            if len(names)> 0 and names[-1]=='route':
                c=0
                for key in config[layer_name]['shortcut_from']:
                    c+=config[key]['outputshape'][2]
                config[layer_name]['inputshape']=[outputshape[0], outputshape[1], c]
            else:
                config[layer_name]['inputshape']=outputshape
                
            outputshape=[int((outputshape[0]-size+2*pad)/stride)+1,
                         int((outputshape[1]-size+2*pad)/stride)+1,
                         int(filters)]
            config[layer_name]['outputshape']=outputshape
            names.append(layer_name)
            
            if 'batch_normalize' in layer.keys() and layer['batch_normalize']=='1':
                config[layer_name]['BatchNormalization']=True
            else:
                config[layer_name]['BatchNormalization']=False
            
            if 'activation' in layer.keys():
                if layer['activation']!=None:
                    config[layer_name]['activation']=layer['activation']
            
            config[layer_name]['mux_sel_resadd_on']=shortcut_flag
            
        elif layer_type=='maxpool':
            if 'size' in layer.keys():
                size=int(layer['size'])
            else:
                size=outputshape[0]
                
            if 'stride' in layer.keys():
                stride=int(layer['stride'])
            else:
                stride=outputshape[0]
            
            if 'pad' in layer.keys():
                pad=int(layer['pad'])
            else:
                pad=0
                
            layer_name='MaxPool'+str(layer_count)
            layer_count+=1
            config[layer_name]={}
            config[layer_name]['con']=False
            config[layer_name]['kernel']=(size, size)
            config[layer_name]['stride']=(stride, stride)
            config[layer_name]['padding']=(pad, pad)
            
            config[layer_name]['inputshape']=outputshape
            outputshape=[int((outputshape[0]-size+2*pad)/stride)+1,
                         int((outputshape[1]-size+2*pad)/stride)+1,
                         outputshape[2]]
            
        elif layer_type=='avgpool':
            if 'size' in layer.keys():
                size=int(layer['size'])
            else:
                size=outputshape[0]
                
            if 'stride' in layer.keys():
                stride=int(layer['stride'])
            else:
                stride=outputshape[0]
            
            if 'pad' in layer.keys():
                pad=int(layer['pad'])
            else:
                pad=0
                
            layer_name='AvgPool'+str(layer_count)
            layer_count+=1
            config[layer_name]={}
            config[layer_name]['con']=False
            config[layer_name]['kernel']=(size, size)
            config[layer_name]['stride']=(stride, stride)
            config[layer_name]['padding']=(pad, pad)
            
            config[layer_name]['inputshape']=outputshape
            outputshape=[int((outputshape[0]-size+2*pad)/stride)+1,
                         int((outputshape[1]-size+2*pad)/stride)+1,
                         outputshape[2]]
            config[layer_name]['outputshape']=outputshape
            
        elif layer_type=='upsample':
            layer_name='upsample'+str(layer_count)
            layer_count+=1
            config[layer_name]={}
            config[layer_name]['con']=False
            config[layer_name]['inputshape']=outputshape
            outputshape=[outputshape[0]*2,
                         outputshape[1]*2,
                         outputshape[2]]
            config[layer_name]['outputshape']=outputshape
            config[layer_name]['shortcut_flag']=shortcut_flag
            config[layer_name]['shortcut_from']=shortcut_from
            config[layer_name]['outputshape']=outputshape
            config[shortcut_from[0]]['mux_sel_upsample_on']=True
            shortcut_flag=False
            shortcut_from=[layer_name]
            names.append(layer_name)
            
        elif layer_type=='shortcut':
            shortcut_flag=True
            shortcut_from=[names[-1], names[int(layer['from'])]]
            names.append(layer_name)
            
        elif layer_type=='route':
            l = [int(x) for x in layer['layers'].split(',')]
            if len(l)==1:
                shortcut_flag=False
                shortcut_from=[names[l[0]]]
                mux_sel_route_on=True
                route_from=[names[l[0]]]
                config[names[l[0]]]['read_out']=True
            else:
                shortcut_flag=True
                shortcut_from=[names[i] for i in l]
                config[names[l[0]-1]]['mux_sel_concat_on']=True
                config[names[l[0]-1]]['res_from']=names[l[1]]
                config[names[l[1]]]['read_out']=True
            names.append('route')
        
        elif layer_type=='yolo':
            names.append('yolo')
            config[layer_name]['read_out']=True
        else:
            print(layer_type)
    
    for key in config.keys():
        config[key]['to_flag']=False
        if config[key]['shortcut_flag']:
            shortcut_from_0=config[key]['shortcut_from'][0]
            shortcut_from=config[key]['shortcut_from'][1]
            config[shortcut_from]['to_flag']=True
            config[shortcut_from]['to']=key
            config[shortcut_from_0]['mux_sel_resadd_on']=True
            config[shortcut_from_0]['res_from']=shortcut_from
    
    for key in config.keys():
        if config[key]['con']:
            config[key]['reload_in']=config[key]['inputshape'][0]*config[key]['inputshape'][1]*max(config[key]['inputshape'][2],8)>px*py*pof*depth
            if config[key]['mux_sel_concat_on']:
                if config[key]['mux_sel_upsample_on']:
                    config[key]['outputsize_true']=[2*config[key]['outputshape'][0],2*config[key]['outputshape'][1],(config[key]['outputshape'][2] + config[config[key]['res_from']]['outputshape'][2])]
                    config[key]['reload_out']=4*config[key]['outputshape'][0]*config[key]['outputshape'][1]*(config[key]['outputshape'][2] + config[config[key]['res_from']]['outputshape'][2])>px*py*pof*depth
                else:
                    config[key]['outputsize_true']=[config[key]['outputshape'][0],config[key]['outputshape'][1],(config[key]['outputshape'][2] + config[config[key]['res_from']]['outputshape'][2])]
                    config[key]['reload_out']=config[key]['outputshape'][0]*config[key]['outputshape'][1]*(config[key]['outputshape'][2] + config[config[key]['res_from']]['outputshape'][2])>px*py*pof*depth
            else:
                if config[key]['mux_sel_upsample_on']:
                    config[key]['outputsize_true']=[2*config[key]['outputshape'][0],2*config[key]['outputshape'][1],config[key]['outputshape'][2]]
                    config[key]['reload_out']=4*config[key]['outputshape'][0]*config[key]['outputshape'][1]*max(config[key]['outputshape'][2],8)>px*py*pof*depth
                else:
                    config[key]['outputsize_true']=[config[key]['outputshape'][0],config[key]['outputshape'][1],config[key]['outputshape'][2]]
                    config[key]['reload_out']=config[key]['outputshape'][0]*config[key]['outputshape'][1]*max(config[key]['outputshape'][2],8)>px*py*pof*depth
            config[key]['reload_wgt']=max(config[key]['inputshape'][2],8)*config[key]['outputshape'][2]*config[key]['kernel'][0]*config[key]['kernel'][1]>depth_wgt
            config[key]['write_out']=config[key]['mux_sel_concat_on'] | (config[key]['mux_sel_resadd_on'] & config[key]['reload_out'])
            config[key]['read_out']=config[key]['reload_out'] | config[key]['read_out']
            config[key]['write_in']=(key == names[0]) | config[key]['reload_in'] | config[key]['mux_sel_route_on']
            if switch_order==0:
                config[key]['mux_sel_cout_first']=False
            elif switch_order==1:
                config[key]['mux_sel_cout_first']=True
            else:
                config[key]['mux_sel_cout_first']=not(config[key]['reload_wgt'] & (not config[key]['write_out']))
    
    conv_names=[]
    for key in config.keys():
        if 'upsample' not in key:
            conv_names.append(key)
    return config, conv_names





if __name__ == '__main__':
    if arch == 'yolov3':
        model ='../yolov3_ReLU.cfg'
        inputshape  =[416, 416, 3]
        config, layer_name_list = extract_darknet(model, inputshape)
        cpf = 4
        kpf = 8
        px = 13
        py = 13

    # print(config)

