import datetime
from collections import OrderedDict
import csv

DATA_FOLDER = "Data"

WIFI_DATA_FILENAME = "1b768f942564005168dda562defa5b_1417724694_wifi.bigdata"
#WIFI_DATA_FILENAME = "1b768f942564005168dda562defa5b_1416860493.bigdata"
GPS_DATA_FILENAME = "1b768f942564005168dda562defa5b_1415646503_gps.bigdata"
CLASS_DATA_FILENAME = "TransportDataDetail.txt"
FEATURES_DATA_FILENAME = "classification_features.txt"

def load_features_data(file_name):
    features_data = OrderedDict()
    with open(file_name, 'rb') as f:
        reader = csv.reader(f, delimiter='\t')
        for row in reader:
            if row[0] != 'transportation_start':
                curr_date = datetime.datetime.strptime(row[0], '%m/%d/%Y %H:%M')
                distance = float(row[3])
                time = float(row[4])
                speed = float(row[5])
                avg_router_life = float(row[6])
                max_router_life = float(row[7])
                new_routers_per_bin = float(row[8])
                features_data[(curr_date, row[2])] = (distance, time, speed, avg_router_life, max_router_life, new_routers_per_bin)
    return features_data
    
def loadTransportDataDetail(file_name):
    class_data = OrderedDict() # key: date, value: transport, e.g. walk, stand
    mode_set = set()
    with open(file_name, 'rb') as f:
        reader = csv.reader(f, delimiter='\t')
        for row in reader:
            curr_date = datetime.datetime.strptime(row[0] + ' ' + row[1], "%d-%m-%Y %H:%M")
            class_data[curr_date] = row[2]
            mode_set.add(row[2])
    return [class_data, mode_set]
    
def loadTransportDataStartEnd(file_name):
    [class_data, mode_set] = loadTransportDataDetail(file_name)
    stop_location_time = 10
    flag = True
    location_gt = OrderedDict()

    start_idx = 0
    if "stand" in class_data.values()[0]:
        start_idx = 1
            
    for idx in xrange(start_idx, len(class_data.keys()) - 1):
        class_date = class_data.keys()[idx]
        next_class_date = class_data.keys()[idx + 1]
        if flag:
            location_gt[class_date] = class_data[class_date]
            flag = False
        else:
            if ("stand" in class_data[class_date] or "start_data" in class_data[class_date]) and ((next_class_date - class_date).seconds / 60) > stop_location_time \
            or ("stand" in class_data[class_date] and "end_data" in class_data[next_class_date]):
                flag = True
                location_gt[class_date] = class_data[class_date]
                
    location_gt[class_date] = class_data[class_data.keys()[-1]]                
    return location_gt
    
def loadScanData(file_name, start_date, end_date):
    scan_data_routers_count_1min_bin = OrderedDict() # key: (bssid, ssid), value: list of dates
    with open(file_name, 'rb') as f:
        reader = csv.reader(f, delimiter=',')
        for row in reader:
            if row[3] != 'timestamp':
                if long(row[3]) < 0:
                    curr_date = datetime.datetime.fromtimestamp(long(row[4]))
                    router = (row[0] + row[1], row[2])
                    rssi = int(row[3])
                else:
                    curr_date = datetime.datetime.fromtimestamp(long(row[3]))
                    router = (row[0], row[1])
                    rssi = int(row[2])
                    
                if curr_date < end_date and curr_date > start_date:
                    curr_date = datetime.datetime(curr_date.year, curr_date.month, curr_date.day, curr_date.hour, curr_date.minute)
                    if scan_data_routers_count_1min_bin.has_key(router):
                        scan_data_routers_count_1min_bin[router] = scan_data_routers_count_1min_bin[router] + [(curr_date, rssi)]
                    else:
                        scan_data_routers_count_1min_bin[router] = [(curr_date, rssi)]  
    
    scan_data_routers_count_1min_bin.pop("ssid", None)
    return scan_data_routers_count_1min_bin
    
def loadScanDataBinned(file_name, start_date, end_date, delta=1):
    scan_data_routers_count_1min_bin = loadScanData(file_name, start_date, end_date)
    scan_data_binned = OrderedDict()

    for key in scan_data_routers_count_1min_bin.keys():
        scan_data_routers_count_1min_bin[key] = list(set(scan_data_routers_count_1min_bin[key]))
    
    for key in scan_data_routers_count_1min_bin.keys():
        date_list = []
        date_list_orig = sorted(scan_data_routers_count_1min_bin[key])
        idx = 0
        start_bin =  min(date_list_orig)[0]
        start_bin = datetime.datetime(start_bin.year, start_bin.month, start_bin.day, start_bin.hour, 0)
        end_bin = start_bin + datetime.timedelta(minutes = delta)
        while idx < len(date_list_orig):
            if date_list_orig[idx][0] >= start_bin and date_list_orig[idx][0] < end_bin:
                # to jest w binie
                date_list.append((start_bin, 1))
                idx = idx + 1
            else:
                # nowy bin
                start_bin = end_bin
                end_bin = start_bin + datetime.timedelta(minutes = delta)
        scan_data_binned[key] = date_list
        
    for key in scan_data_binned.keys():
        scan_data_binned[key] = sorted(list(set(scan_data_binned[key])))
        if key[0] == '00:00:00:00:00:00':
            scan_data_binned.pop(key, None)
            
    return scan_data_binned

def loadScanDataBinnedNew(file_name, start_date, end_date, delta=1):
    data_1_min_binned = loadScanData(file_name, start_date, end_date)
    scan_data_binned = OrderedDict()

    all_scan_dates = []
    for key in data_1_min_binned.keys():
        data_1_min_binned[key] = list(set(data_1_min_binned[key]))
        all_scan_dates = all_scan_dates + [item[0] for item in list(set(sorted(data_1_min_binned[key])))]
        
    all_scan_dates = sorted(list(set(all_scan_dates)))
    
    for key in data_1_min_binned.keys():
        date_list = []
        date_list_orig = sorted(list(set([item[0] for item in data_1_min_binned[key]])))
        curr_date = start_date
        flag = False
        while curr_date <= end_date:
            if curr_date in all_scan_dates:
                if curr_date in date_list_orig:
                    date_list.append((curr_date, 1))
                    curr_idx = date_list_orig.index(curr_date)
                    if curr_idx + 1 < len(date_list_orig):
                        next_date = date_list_orig[curr_idx + 1]
                        if all_scan_dates.index(next_date) - all_scan_dates.index(curr_date) <= 1:
                            flag = True
                        else:
                            flag = False
                    else:
                        flag = False
                # if next scan s present -> set flag to true
                #else set flag to false
            if flag:
                date_list.append((curr_date, 1))
                #fill with zeros
            
                #fill with ones
            curr_date = curr_date + datetime.timedelta(minutes = delta)
        scan_data_binned[key] = date_list
        
    for key in scan_data_binned.keys():
        scan_data_binned[key] = sorted(list(set(scan_data_binned[key])))
        if key[0] == '00:00:00:00:00:00':
            scan_data_binned.pop(key, None)
            
    return scan_data_binned
        
def loadGpsData(file_name, start_date, end_date):
    class_data = OrderedDict() # key: date, value: transport, e.g. walk, stand
    with open(file_name, 'rb') as f:
        reader = csv.reader(f, delimiter=',')
        for row in reader:
            if row[1] != 'timestamp':
                curr_date = datetime.datetime.fromtimestamp(long(row[1]))
                curr_date = datetime.datetime(curr_date.year, curr_date.month, curr_date.day, curr_date.hour, curr_date.minute)
                class_data[curr_date] = (float(row[2]), float(row[5]))
    return class_data