import logging
from datetime import date, datetime, timedelta
import json
import os
import io
import requests

import xlrd
import numpy as np
import pandas as pd
from geopy import distance
from xlrd import open_workbook
from fuzzywuzzy import fuzz

from .opt_utils import clean_space
from .constants import OPT_PATH, REF_STRING, COL_ORDER


def prepare_file(path: str) -> None:
    wb = open_workbook(path)
    ws = wb.sheets()[0]

    i, curr_row = 0, ''
    nrows, ncols = ws.nrows, ws.ncols
    while not fuzz.partial_token_sort_ratio(REF_STRING, curr_row) >= 80:
        curr_row = ''
        for j in range(ncols):
            cell = ws.cell(i, j).value
            if isinstance(cell, str):
                val = f'{cell}'
                if j < ncols:
                    val += ','
                curr_row += val
            else:
                raise Exception('Не удается найти заголовок таблицы. Попробуйте удалить шапочку')
        if i >= nrows:
            break
        i += 1

    excel = pd.read_excel(path, skiprows=range(i - 1))
    excel.columns = [clean_space(col) for col in excel.columns]

    excel_cols = list(excel.columns)
    if 'index' not in excel_cols:
        excel = excel.assign(index=range(1, len(excel) + 1))
        excel = excel.loc[:, ['index'] + excel_cols]

    excel.to_excel(path, index=False)


def read_date(dt):
    if isinstance(dt, int):
        parser = xlrd.xldate.xldate_as_datetime(dt, 0)
    elif isinstance(dt, str) or isinstance(dt, datetime):
        parser = pd.to_datetime(dt, format='%d.%m.%y', dayfirst=True)
    else:
        raise Exception('Неизвестный формат даты')
    return parser


def concat(s: pd.Series, sep: str = '|') -> str:
    return sep.join(str(el) for el in s)


class GvkmDataConverter:
    def __init__(self, config_path, opt_back_path=OPT_PATH):
        self.logger = logging.getLogger('optimizer')
        self.logger.debug(f"Initializing GvkmDataConverter")
        try:
            with open(os.path.join(opt_back_path, config_path), 'r') as f:
                optimization_params = json.load(f)
                self.cities_path = os.path.join(opt_back_path, optimization_params['cities_path'])
                self.key = optimization_params['key']
                self.order_time_service = optimization_params['order_time_service']
        except Exception as e:
            self.logger.error(f"Couldn't load config {config_path}, {e}")
            raise FileNotFoundError("Couldn't load config")

        try:
            with open(self.cities_path, 'r') as f:
                self.cities = json.load(f)
        except Exception as e:
            self.logger.error(f"Couldn't load file {config_path}, {e}")
            raise FileNotFoundError("Couldn't load cities file")

    @staticmethod
    def preprocess_address_column(address_column):
        address_column = address_column.fillna('').astype(str)
        address_column = address_column.str.replace('\\', '', regex=False)
        address_column = address_column.str.replace('ВСП {,2}\d{,4}', '', regex=True)  # VSP numbers
        address_column = address_column.str.replace(
            ' \+?\d? ?\(?\d{3}\)?[ -]?\d{3}[ -]?\d{2}[ -]?\d{2}', '', regex=True)  # Phone numbers
        address_column = address_column.str.replace(
            '(\d{2}[.:-]\d{2}? ?(до|-)? ?\d{2}[.:-]\d{2}?)|(\d{2}[.:-]\d{2})', '', regex=True)  # Time windows
        address_column = address_column.str.replace('\(.*$', '', regex=True)  # Everything coming after '(' is nasty
        address_column = address_column.str.replace(
            '((о|О)ф(ис)?|(п|П)ом(ещение)?|(э|Э)т(аж)?|(п|П)ав(ильон)?|(п|П)одъезд|(д|Д)омофон|'
            '(к|К)в(артира)?|(в|В)ход|(с|С)клад|(в|В)ыезд|(п|П)ешком|(т|Т)ел(ефон)?|'
            '(з|З)вонить|(к|К)лиенту?)( |\.).*$', '', regex=True)  # Trim excessive info
        address_column = address_column.str.replace('[.,/\\;]', ' ', regex=True)
        address_column = address_column.str.replace('  ', ' ', regex=False)
        address_column = address_column.str.strip()
        return address_column

    @staticmethod
    def _get_districts(data):
        district = None
        address_components = data['address_components']
        for component in address_components:
            types = component['types']
            if district is None and 'sublocality_level_2' in types:
                district = component['long_name']
            if district is None and 'sublocality_level_1' in types:
                district = component['long_name']
            if district is None and 'locality' in types:
                district = component['long_name']
        return district

    def geocode_address_yandex(self, address):
        response = requests.get(
            "https://geocode-maps.yandex.ru/1.x/",
            params=dict(format="json", apikey=self.key, geocode=address))

        if response.status_code == 200:
            data = response.json()["response"]["GeoObjectCollection"]["featureMember"][0]["GeoObject"]
            if not data:
                self.logger.warning(f'Nothing found for "{address}" not found')
                raise Exception(
                    f"Yandex geocoder exception. status_code={response.status_code}, body={response.content}")
        elif response.status_code == 403:
            raise ConnectionRefusedError()
        else:
            raise Exception(f"Unknown Yandex geocoder exception. status_code={response.status_code}, "
                            f"body={response.content}")

        coordinates = data["Point"]["pos"]
        normalized_address = data["metaDataProperty"]["GeocoderMetaData"]["text"]
        longitude, latitude = tuple(coordinates.split(" "))  # Яндекс возвращает координаты в обратном порядке

        return np.float32(latitude), np.float32(longitude), normalized_address

    def geocode_address_google(self, address):
        response = requests.get(
            f"https://maps.googleapis.com/maps/api/geocode/json?address={address}&key={self.key}")

        if response.status_code == 200:
            data = response.json()["results"][0]
            if not data:
                self.logger.warning(f'Nothing found for "{address}" not found')
                raise Exception(
                    f"Yandex geocoder exception. status_code={response.status_code}, body={response.content}")
        elif response.status_code == 403:
            raise ConnectionRefusedError()
        else:
            raise Exception(f"Unknown Yandex geocoder exception. status_code={response.status_code}, "
                            f"body={response.content}")

        latitude = data["geometry"]["location"]["lat"]
        longitude = data["geometry"]["location"]["lng"]
        normalized_address = data["formatted_address"]
        district = self._get_districts(data)

        return np.float32(latitude), np.float32(longitude), normalized_address, district

    def geocode_addresses(self, unique_addresses):
        """Geocode addresses in column 'addr'"""
        lats = []
        lons = []
        normalized_addresses = []
        districts = []
        for i, row in unique_addresses.iterrows():
            try:
                lat, lon, normalized_address, district = self.geocode_address_google(str(row['addr']))
                lats.append(lat)
                lons.append(lon)
                normalized_addresses.append(normalized_address)
                districts.append(district)
            except Exception:
                self.logger.warning(f"Cant geocode address: {str(row['addr'])}")
                lats.append(np.nan)
                lons.append(np.nan)
                normalized_addresses.append(np.nan)
                districts.append(np.nan)
        return lats, lons, normalized_addresses, districts

    def find_distant_address_rows(self, orders, city, prefix):
        city_center = (self.cities[city]['center']['lat'], self.cities[city]['center']['lon'])

        def get_city_center_distance(row):
            if pd.isna(row[f'{prefix}lat']):
                return None
            return distance.distance(city_center, (row[f'{prefix}lat'], row[f'{prefix}lon'])).km

        orders[f'{prefix}center_distance'] = orders.apply(get_city_center_distance, axis=1)
        distant_rows = list(orders.query(f'{prefix}center_distance > 50.0')['request_id'])
        return distant_rows

    def excel_to_orders(self, excel_path, chosen_date, city='Spb'):
        """Read orders from Excel, preprocess, geocode and transform to orders DataFrame"""
        self.logger.info('Converting Excel to orders')
        warnings = []

        # Arthur
        prepare_file(excel_path)
        orders = pd.read_excel(
            excel_path,
            parse_dates=['Дата КД'],
            date_parser=read_date,
            dayfirst=True,
            dtype={'ID Заявки с КД': str, 'Тип выпуска': str}
        )

        orders.reset_index(inplace=True)
        orders.rename(columns={
            'index': 'id',
            'Дата КД': 'date',
            'Период КД': 'time',
            'ID Заявки с КД': 'request_id',
            'Город': 'city',
            'Адрес КД': 'addr',
            'Наименование БК': 'card',
            'Тип выпуска': 'type'
        }, inplace=True)

        orders = orders.drop_duplicates(subset=['request_id'])

        # Select orders on chosen date
        orders = orders.query(f'date == "{chosen_date}"')

        orders = orders[orders['city'] == self.cities[city]['name']].copy()

        # Check that all obligatory fields are set
        nonvalid_rows = orders.query('not(addr == addr)')
        if len(nonvalid_rows) > 0:
            error = 'Не заполнены необходимые поля в заказах ' + str(list(nonvalid_rows['request_id']))
            self.logger.error(error)
            return None, error, warnings

        if len(orders) == 0:
            error = 'Нет заказов в выбранную дату ' + str(chosen_date)
            self.logger.error(error)
            return None, error, warnings

        orders['time_from'] = orders['time'].copy().apply(lambda x: x.split(' - ')[0])
        orders['time_to'] = orders['time'].copy().apply(lambda x: x.split(' - ')[1])

        orders['time_service'] = self.order_time_service

        cols = [
            'id',
            'date',
            'time_from',
            'time_to',
            'request_id',
            'city',
            'addr',
            'type',
            'time_service'
        ]

        for col in ['mean_score', 'epk_id']:
            if col in orders.columns:
                cols.append(col)

        orders = orders.loc[:, cols]

        pd.set_option('display.max_columns', 10)
        print(orders.head(5))

        # Prepare unique addresses and geocode them
        unique_addresses = orders[['addr']].copy()
        unique_addresses.drop_duplicates(inplace=True)

        self.logger.info(f'Got {len(unique_addresses)} unique addresses to geocode')
        lats, lons, normalized_addresses, districts = self.geocode_addresses(unique_addresses)

        unique_addresses = unique_addresses.assign(
            lat=lats,
            lon=lons,
            norm_addr=normalized_addresses,
            district=districts
        )

        # Merge coordinates onto addresses
        orders = orders.merge(unique_addresses, on='addr', how='left', validate='m:1')

        # if city == 'Moscow':
        #     orders = orders.loc[orders.district == 'Tsentralnyy administrativnyy okrug'].reset_index(drop=True)

        distant_rows = self.find_distant_address_rows(orders, city, prefix='')
        if len(distant_rows) > 0:
            self.logger.warning(f'Addresses are too far from city center {str(distant_rows)}')
            warnings.append(f'Адреса находятся аномально далеко от города в строках {str(distant_rows)}')

        nonvalid_rows = list(orders.query('not(lat == lat)')['request_id'])
        if len(nonvalid_rows) > 0:
            error = 'Не удалось определить координаты адресов в строках ' + str(nonvalid_rows)
            self.logger.error(error)
            return None, error, warnings

        if 'epk_id' in orders.columns:
            orders_null = orders.loc[orders.epk_id.isnull()]
            orders_notnull = orders.loc[orders.epk_id.notnull()]
            agg = {
                'id': 'first',
                'date': 'first',
                'time_from': 'first',
                'time_to': 'first',
                'request_id': concat,
                'city': 'first',
                'addr': concat,
                'type': concat,
                'time_service': 'first',
                'lat': 'first',
                'lon': 'first',
                'district': 'first'
            }

            for col in ['mean_score']:
                if col in orders.columns:
                    agg[col] = 'mean'

            orders_notnull = orders_notnull.groupby(['epk_id', 'norm_addr']).agg(agg).reset_index()
            orders = pd.concat([orders_null, orders_notnull], axis=0, sort=False).reset_index(drop=True)

        return orders, None, warnings

    def orders_to_json(self, orders, vkm_count, city='Spb', capacity=None, is_soft=False, lon=None, lat=None):
        """Transforms DataFrame with orders to request-like JSON"""
        self.logger.info('Converting orders to json')

        # Initialize delivers DataFrame
        vkm = [str(x) for x in range(1, vkm_count + 1)]
        delivers = pd.DataFrame(vkm, columns=['descr'])
        delivers = delivers.reset_index().rename(columns={'index': 'id'})
        if (lon is None) or (lat is None):
            start_point = self.cities[city]['start']
        else:
            start_point = {
                "lat": lat,
                "lon": lon,
                "addr": "start-point"
            }
        delivers['addr'] = start_point['addr']
        delivers['lat'] = start_point['lat']
        delivers['lon'] = start_point['lon']

        # Google Distance Matrix API works only with dates within a week from current date
        if abs(orders['date'][0].date() - date.today()).days > 7:
            self.logger.warning('Chosen date is too far from today - using today')
            orders.at[0, 'date'] = date.today()
        orders['date'] = orders['date'].apply(lambda x: x.strftime('%Y.%m.%d')).astype(str)

        capacity = self.cities[city]['capacity'] if capacity is None or not capacity else capacity

        # Emulate optimisation POST request from frontend
        req_json = {
            'descr': 'test',
            'city': city,
            'capacity': capacity,
            'is_soft': is_soft,
            'delivers': delivers.to_dict(orient='rows'),
            'orders': orders.to_dict(orient='rows'),
            'routes': [],
            'id': 0,
            'date': orders['date'][0],
            'updated_dt': '2020-03-10T08:10:46.288Z'
        }

        start_point['id'], start_point['norm_addr'] = -1, 'start-point'
        orders_ext = pd.concat([orders, pd.DataFrame(start_point, index=[0])], axis=0, sort=False).reset_index()

        return req_json, orders_ext

    def create_gmap_direction(self, origin: pd.Series, waypoints: pd.DataFrame):
        self.logger.info('Creating Google Maps Direction')
        gmap_url = 'https://www.google.com/maps/dir/?api=1&origin='
        gmap_url += str(origin['lat']) + ',' + str(origin['lon'])
        destination = waypoints.iloc[-1]
        gmap_url += '&destination=' + str(destination['lat']) + ',' + str(destination['lon'])
        if len(waypoints) > 1:
            gmap_url += '&waypoints='
            for index, waypoint in (waypoints[:-1].iterrows()):
                gmap_url += str(waypoint['lat']) + ',' + str(waypoint['lon']) + '|'
            gmap_url = gmap_url[:-1]
        return gmap_url

    def route_to_excel(self, route):
        self.logger.info('Forming Excel with best route')
        vkms = range(1, len(route['subroutes']) + 1)
        route_df = pd.DataFrame()
        for i, subroute in enumerate(route['subroutes']):
            subroute_df = pd.DataFrame(subroute['points'])
            if len(subroute_df) <= 2:
                continue
            subroute_df['vkm'] = vkms[i]
            route_df = pd.concat([route_df, subroute_df], ignore_index=True)
            gmap_url = self.create_gmap_direction(subroute_df.iloc[0], subroute_df.iloc[1:-1])
            route_df = route_df.assign(gmap_url=gmap_url)

        cols = [
            'request_id',
            'vkm',
            'date',
            'city',
            'addr',
            'type',
            'url',
            'time_travel',
            'time_at_min',
            'gmap_url'
        ]

        for col in ['mean_score', 'epk_id']:
            if col in route_df.columns:
                cols.append(col)

        route_df = route_df.loc[:, cols]

        # Fixing time at min
        new_route_df = []
        for vkm in route_df.loc[:, 'vkm'].unique():
            courier = route_df.query(f'vkm == {vkm}')
            start = pd.to_datetime(courier.iloc[1].loc['time_at_min'])
            time = [np.nan, start.strftime('%H:%M')]
            for idx in range(2, len(courier)):
                delta = timedelta(minutes=int(courier.iloc[idx].loc['time_travel'].split('-')[0]) + 18)
                time.append((start + delta).strftime('%H:%M'))
                start += delta
            courier.loc[:, 'time_at_min'] = time
            new_route_df.append(courier)
        route_df = pd.concat(new_route_df, axis=0, sort=False)

        route_df = route_df.query("request_id != 'start' & request_id != 'finish'")
        route_df.loc[:, 'vkm'].replace({
            curr_idx: new_idx + 1 for new_idx, curr_idx in enumerate(route_df.loc[:, 'vkm'].unique())
        }, inplace=True)

        route_df = route_df.assign(order=np.nan)
        for vkm in route_df.loc[:, 'vkm'].unique():
            condition = route_df.loc[:, 'vkm'].isin([vkm])
            route_df.loc[condition, 'order'] = np.arange(1, condition.sum() + 1, dtype=np.int16)

        if 'mean_score' in route_df.columns:
            vkm_tariff = route_df.groupby(['vkm']).agg({'mean_score': 'sum', 'order': 'count'}).reset_index()
            vkm_tariff = vkm_tariff.sort_values(by=['order', 'mean_score'], ascending=False).reset_index(drop=True)
            chosen_ones = vkm_tariff.loc[:1, 'vkm']
            route_df = route_df.loc[route_df.vkm.isin(chosen_ones)]

        if 'epk_id' in route_df.columns:
            concat_cols = ['request_id', 'addr', 'type']
            key = [col for col in route_df.columns if col not in concat_cols]
            for col in concat_cols:
                if col in route_df.columns:
                    route_df.loc[:, col] = route_df.loc[:, col].str.split('|')
            route_df = (
                route_df
                    .set_index(key)
                    .apply(lambda x: x.apply(pd.Series).stack())
                    .reset_index()
                    .drop([f'level_{len(key)}'], axis=1)
                    .sort_values(by=['vkm', 'order'])
            )

        route_df = route_df.assign(
            duration=18,
            time_end=(pd.to_datetime(route_df.loc[:, 'time_at_min']) + timedelta(minutes=18)).dt.strftime('%H:%M')
        ).loc[:, COL_ORDER]

        route_df.rename(columns={
            'request_id': 'ID Заявки с КД',
            'vkm': 'ВКМ',
            'date': 'Дата КД',
            'city': 'Город КД',
            'addr': 'Адрес КД',
            'url': 'Ссылка на маршрут',
            'time_travel': 'Время в пути до активности',
            'time_at_min': 'Время начала активности',
            'type': 'Тип выпуска',
            'order': 'Порядковый номер доставки',
            'duration': 'Время продолжительности активности',
            'time_end': 'Время конца активности',
            'gmap_url': 'Ссылка на маршрут в GoogleMaps',
            'fio': 'ФИО Клиента',
            'phone': 'Номер телефона',
            'copp': 'Комментарий ЦОПП',
            'client': 'Комментарий Клиента',
            'card': 'Наименование БК'
        }, inplace=True)

        output = io.BytesIO()
        writer = pd.ExcelWriter(output, engine='xlsxwriter')
        route_df.to_excel(writer, encoding='cp1251', index=False)
        worksheet = writer.sheets['Sheet1']
        worksheet.set_column(0, 0, 5)  # VKM
        worksheet.set_column(1, 1, 10)  # date
        worksheet.set_column(3, 3, 10)  # request_id
        worksheet.set_column(5, 5, 120)  # addr
        worksheet.set_column(6, 6, 10)  # type
        worksheet.set_column(7, 7, 24)  # url
        worksheet.set_column(8, 8, 18)  # time_travel
        worksheet.set_column(9, 9, 18)  # time_at_min
        worksheet.set_column(10, 10, 18)  # duration
        worksheet.set_column(11, 11, 18)  # time_end
        worksheet.set_column(12, 12, 24)  # url

        # worksheet.set_column(2, 2, 10)  # request_id
        # worksheet.set_column(0, 0, 5)  # VKM
        # worksheet.set_column(1, 1, 10)  # date
        # worksheet.set_column(3, 3, 120)  # addr
        # worksheet.set_column(4, 4, 10)  # type
        # worksheet.set_column(5, 5, 24)  # url
        # worksheet.set_column(6, 6, 18)  # time_travel
        # worksheet.set_column(7, 7, 18)  # time_at_min
        writer.save()

        return output.getvalue()
