import preprocess.url_parser as Parser
import preprocess.gram as Gram
import numpy as np

# ========================================================================================================
# ===============================Extract feature from training set, dev set and test set seperately ======
def train_websites_to_feature_dict(train_websites,n1,n2):
    

    cnt = 0
    dict_token_features = {}
    dict_feature_index = {}
    
    for website in train_websites:
        tokens = Parser.UrlParser(website).names
        for token in tokens:
            if token not in dict_token_features:
                dict_token_features[token] = []
                for i in range(n1,n2+1):
                    features = Gram.letter_n_gram(token,i)
                    for feature in features:
                        if feature not in dict_token_features[token]:
                            dict_token_features[token].append(feature)
                        if feature not in dict_feature_index:
                            dict_feature_index[feature] = cnt
                            cnt += 1
    
    return dict_token_features, dict_feature_index

def websites_to_vec_with_dict(dict_token_features, dict_feature_index, websites):
    
    feature_length = len(dict_feature_index)
    websites_vec = np.zeros([len(websites),len(dict_feature_index)],dtype=int)
    cnt = 0
    for website in websites:
        tokens = Parser.UrlParser(website).names
        for token in tokens:
            if token in dict_token_features:
                features = dict_token_features[token]
                for feature in features:
                    websites_vec[cnt,dict_feature_index[feature]] += 1
        cnt += 1
        
    return websites_vec
        
def websites_to_feature(websites,n1,n2):
    features = {}
    [dict_token_features, dict_feature_index] = train_websites_to_feature_dict(websites['train'],n1,n2)
    features['train'] = websites_to_vec_with_dict(dict_token_features, dict_feature_index,websites['train'])
    features['test'] = websites_to_vec_with_dict(dict_token_features, dict_feature_index,websites['test'])
    if len(websites) == 3:
        features['dev'] = websites_to_vec_with_dict(dict_token_features, dict_feature_index,websites['dev'])
        
    return features

# ========================================================================================================    
# ========================================================================================================
def partition_data(method,websites,categories):
    
    # =============================================
    # Two method: 'train_test' and 'train_dev_test'
    # =============================================
    np.random.seed(42)
    
    dict_websites = {}
    dict_categories = {}
    websites = np.array(websites)
    categories = np.array(categories)
    permutation = np.random.permutation(len(categories))
    websites = websites[permutation]
    categories = categories[permutation]
    if method == 'train_test':
        stop = int(len(categories) * 0.7)
        dict_websites['train'] = websites[0:stop]
        dict_categories['train'] = categories[0:stop]
        dict_websites['test'] = websites[stop:]
        dict_categories['test'] = categories[stop:]
    elif method == 'train_dev_test':
        stop1 = int(len(categories) * 0.6)
        stop2 = int(len(categories) * 0.8)
        dict_websites['train'] = websites[0:stop1]
        dict_categories['train'] = categories[0:stop1]
        dict_websites['dev'] = websites[stop1:stop2]
        dict_categories['dev'] = categories[stop1:stop2]
        dict_websites['test'] = websites[stop2:]
        dict_categories['test'] = categories[stop2:]
    return dict_websites,dict_categories
    
        
            
