import math

import numpy as np
import torch.nn as nn
import osm2gmns as og
from shapely import Polygon
from shapely.geometry import MultiLineString
from sklearn.preprocessing import StandardScaler
import torch.nn.functional as F
from module.polygon_embedding import POLYGON_EMBEDDING_MODEL
from utils.utils import make_args_parser


def haversine(lat1, lon1, lat2, lon2):  # 计算两坐标点之间的距离
    lat1 = math.radians(lat1)
    lon1 = math.radians(lon1)
    lat2 = math.radians(lat2)
    lon2 = math.radians(lon2)
    R = 6371.0
    dlat = lat2 - lat1
    dlon = lon2 - lon1
    a = math.sin(dlat / 2) ** 2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2) ** 2
    c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
    distance = R * c
    return distance


def get_new_coordinates(lat, lon, distance_north, distance_east):  # 根据坐标点，向北、向南移动距离
    R = 6371.0
    lat_increment = (distance_north / R) * (180 / math.pi)
    lon_increment = (distance_east / (R * math.cos(math.radians(lat)))) * (180 / math.pi)
    new_lat = lat - lat_increment
    new_lon = lon + lon_increment

    return new_lat, new_lon, lat_increment, lon_increment


def get_rectangle_points(point1, point2):  # 得到四边形矩阵polygon
    lat1, lon1 = point1
    lat2, lon2 = point2
    rectangle = Polygon([(lon1, lat1), (lon1, lat2), (lon2, lat2), (lon2, lat1)])
    return rectangle


class New_Module(nn.Module):
    def __init__(self):
        super().__init__()
        self.parser = make_args_parser()
        self.args = self.parser.parse_args(args=[])
        self.extent = {"norm": (-1, 1, -1, 1)}
        self.model = POLYGON_EMBEDDING_MODEL(self.args, spa_enc=None, geom_type="norm", extent=self.extent).cuda()

    def gene_data(self, net):
        og.consolidateComplexIntersections(net, auto_identify=True)
        og.combineShortLinks(net)  # 读取数据，获取简化后的路网
        buffer_radius = 0.0002282247866046481
        line_strings = [net.link_dict[i].geometry.buffer(buffer_radius * 2) for i in net.link_dict.keys()]
        merged_polygon = line_strings[0]
        for polygon in line_strings[1:]:
            merged_polygon = merged_polygon.union(polygon)  # 得到路网边界的坐标
        origin_point = (merged_polygon.convex_hull.bounds[-1], merged_polygon.convex_hull.bounds[0])
        origin_point_0 = origin_point  # 确定起始边界框
        base_lat = origin_point[0]
        base_lon = origin_point[1]
        # 向南1.6公里，向东1.6公里
        distance_north = 1600 / 1000
        distance_east = 1600 / 1000
        new_point_and_dist = get_new_coordinates(base_lat, base_lon, distance_north, distance_east)
        origin_point_1 = (new_point_and_dist[0], new_point_and_dist[1])  # y,x
        distance_north = 1 / 1000  # 1米
        distance_east = 1 / 1000
        delta_dist = get_new_coordinates(base_lat, base_lon, distance_north, distance_east)
        x_delta = delta_dist[-1]
        data = []
        for i in range(4000):  # 移动4000次，切割边界
            point_a = (origin_point_0[0], origin_point_0[1] + i * x_delta)
            point_b = (origin_point_1[0], origin_point_1[1] + i * x_delta)
            rec = get_rectangle_points(point_a, point_b)
            result = merged_polygon.intersection(rec)
            try:
                result = list(result.geoms)[np.argmax([i.area for i in list(result.geoms)])]
            except AttributeError:
                result = result
            t1 = np.array([*result.exterior.coords])
            try:
                t2 = t1[np.sort([np.random.choice(t1.shape[0], 301, replace=False)])][0]
            except ValueError:
                continue
            data.append(t2)
        print(len(data))

        array = np.array(data)
        scaler = StandardScaler()
        normalized_array = scaler.fit_transform(array.reshape(-1, 2)).reshape(-1, 301, 2)
        return normalized_array

    def forward(self, data):
        return F.normalize(self.model(data)).reshape(1, -1)
