#! /usr/local/bin/python3
# coding: utf-8
# __author__ = "Liu jiao"
# __date__ = 2019/10/16 16:11


from urllib.parse import quote
import json
import os
from transCoordinateSystem import gcj02_to_wgs84, gcj02_to_bd09
import area_boundary as  area_boundary
import city_grid as city_grid
import time
import collections
import pandas as pd
from requests.adapters import HTTPAdapter
import requests
from tqdm import tqdm

#from shp import trans_point_to_shp
'''
版本更新说明：
2021.12.01:
    1.清除了poi数据写入shp文件相关操作

'''
#################################################需要修改###########################################################

## TODO 1.划分的网格距离，0.02-0.05最佳，建议如果是数量比较多的用0.01或0.02，如餐厅，企业。数据量少的用0.05或者更大，如大学
pology_split_distance = 0.5
## TODO 2. 城市编码，参见高德城市编码表，注意需要用adcode列的编码
#city_code = '110000'
## TODO 3. POI类型编码，类型名或者编码都行，具体参见《高德地图POI分类编码表.xlsx》
#typs = ['风景名胜']  # ['企业', '公园', '广场', '风景名胜', '小学']

## TODO 4. 高德开放平台密钥

gaode_keys = [
     '397ec223a803fde8041ec683c125022f',
     'eb29c4c588a153cce0055a6bec96c677',
     '979f9ac1f7bd2eb6d895af543d1a975f',
     '9d4a14532e837a06e2c4d5d33b6c396c',
     '7ea7d8707045579bdc783c7c55f925b7',
     '18df88f2184d7a2ac39a5a46dda5a320',
     '561d941ee05203ca27e769add968d8fd',
     '88faf8ebcd0d185780b6e7745f97f591',
     '99fbc207194a3d8a555bdc953cbf50e6',
     'f3664604aa2f1445a931ca4187628c0c',
     '680b82af88e49a4faf320eace40b9462',
     'c6fec1fee0dc8be45ddd15cfa2839581',
     '40040efd9be874f5366b480662802274',
     'a78f67920ed586226e474794c25b85c1',
     '3be85f95690da773f151ec292a7c024c',
     'f2e419ac1468f7fa23d34c73a194b463',
     'd83dc5538da6385c74131b119862c9a4',
     '5bf20c091e179371a71c8c1aa00147e0',
     '030e84b6e1024b3c1192f8c488a59a02',
     'd8e73652e61500ec0854ade29ebf1639',
     '05626da1796336ee3786ffc2c088cc53',
     '56edd01aea2920e7c01123642e2067b8',
     '791914bad67c075a71129b23460e1343',
     '3dfe9f5d2d79856d5162e23084d05333',
     'c9d372924717ba89fe9fe409765624de',
     'a60602974280f6a7fcf13c6c5b3db681',
     '78b897527860616beaa0764018d495a8',
     'f306965623f0812c9fe0bd11e9b9f384',
     'b7ab531ac25a292dac4886b2716e8552',
     '442a9bf1e81a50d64553e9f1ecc68d96',
     'f878f7cf17e95f905084b9e3491d3723'
]

# TODO 5.输出数据坐标系,1为高德GCJ20坐标系，2WGS84坐标系，3百度BD09坐标系

coord = 3
############################################以下不需要动#######################################################################
poi_pology_search_url = 'https://restapi.amap.com/v3/place/polygon'

def init_queen():
    for i in range(len(gaode_key)):
        buffer_keys.append(gaode_key[i])
    print('当前可供使用的高德密钥：', buffer_keys)

# 根据城市名称和分类关键字获取poi数据
def getpois(grids, keywords, pology_split_distance,tables=[]):
    if buffer_keys.maxlen == 0:
        print('密钥已经用尽，程序退出！！！！！！！！！！！！！！！')
        exit(0)
    amap_key = buffer_keys[0]  # 总是获取队列中的第一个密钥
    i = 1
    poilist = []
    while True:  # 使用while循环不断分页获取数据
        print('当前正在爬取{0}页'.format(i))
        result = getpoi_page(grids, keywords, i, amap_key)
        print("当前爬取结果:", result)
        if result != None:
            result = json.loads(result)  # 将字符串转换为json
            try:
                if result['count'] == '0':
                    break
            except Exception as e:
                print('出现异常：', e)
            if result['infocode'] == '10001' or result['infocode'] == '10003' or result['infocode'] == '10044':
                print(result)
                print('无效的密钥！！！！！！！！！！！！！，重新切换密钥进行爬取')
                buffer_keys.remove(buffer_keys[0])
                try:
                    amap_key = buffer_keys[0]  # 总是获取队列中的第一个密钥
                except Exception as e:
                    print('密钥已经用尽，程序退出...')
                    exit(0)
            if int(result['count'])>800:
                gridss = city_grid.generate_grids(grids[0], grids[1], grids[2], grids[3],pology_split_distance/2)
                for grids in gridss:
                    tables.append(grids)
                break
            else:
                result = getpoi_page(grids, keywords, i, amap_key)
                result = json.loads(result)
            hand(poilist, result)
            # try:
            #     hand(poilist, result)
            # except:
            #     time.sleep(5)
            #     result = getpoi_page(grids, keywords, i, amap_key)
            #     result = json.loads(result)
            #     hand(poilist, result)
            i = i + 1
    return poilist,tables

# 数据写入csv文件中
def write_to_csv(poilist, citycode, classfield, coord):
    data_csv = {}
    lons, lats, names, addresss, pnames, citynames, business_areas, types, typecodes, ids, type_1s, type_2s, type_3s, type_4s = [], [], [], [], [], [], [], [], [], [], [], [], [], []
    if len(poilist) == 0:
        print("处理完成，当前citycode:" + str(citycode), ", classfield为：", str(classfield) + "，数据为空，，，结束.......")
        return None, None
    for i in range(len(poilist)):
        location = poilist[i].get('location')
        name = poilist[i].get('name')
        address = poilist[i].get('address')
        pname = poilist[i].get('pname')
        cityname = poilist[i].get('cityname')
        business_area = poilist[i].get('business_area')
        type = poilist[i].get('type')
        typecode = poilist[i].get('typecode')
        lng = str(location).split(",")[0]
        lat = str(location).split(",")[1]
        id = poilist[i].get('id')
        if (coord == 2):
            result = gcj02_to_wgs84(float(lng), float(lat))
            lng = result[0]
            lat = result[1]
        if (coord == 3):
            result = gcj02_to_bd09(float(lng), float(lat))
            lng = result[0]
            lat = result[1]
        type_1, type_2, type_3, type_4 = '','','',''
        if str(type) != None and str(type) != '':
            type_strs = type.split(';')
            for i in range(len(type_strs)):
                ty = type_strs[i]
                if i == 0:
                    type_1 = ty
                elif i == 1:
                    type_2 = ty
                elif i == 2:
                    type_3 = ty
                elif i == 3:
                    type_4 = ty
        lons.append(lng)
        lats.append(lat)
        names.append(name)
        addresss.append(address)
        pnames.append(pname)
        citynames.append(cityname)
        if business_area == []:
            business_area = ''
        business_areas.append(business_area)
        types.append(type)
        typecodes.append(typecode)
        ids.append(id)
        type_1s.append(type_1)
        type_2s.append(type_2)
        type_3s.append(type_3)
        type_4s.append(type_4)
    data_csv['lon'], data_csv['lat'], data_csv['name'], data_csv['address'], data_csv['pname'], \
    data_csv['cityname'], data_csv['business_area'], data_csv['type'], data_csv['typecode'], data_csv['id'], data_csv[
        'type1'], data_csv['type2'], data_csv['type3'], data_csv['type4'] = \
        lons, lats, names, addresss, pnames, citynames, business_areas, types, typecodes, ids, type_1s, type_2s, type_3s, type_4s
    df = pd.DataFrame(data_csv)
    # folder_name = 'poi-' + citycode + "-" + classfield
    # folder_name_full = 'data' + os.sep + folder_name + os.sep
    folder_name_full = 'data/'+citycode +'/'
    if os.path.exists(folder_name_full) is False:
         os.makedirs(folder_name_full)
    file_name = 'poi-' + citycode + "-" + classfield + ".csv"
    file_path = folder_name_full + file_name
    df.to_csv(file_path, index=False, encoding='utf_8_sig')
    print('写入成功')
    return folder_name_full, file_name
# 将返回的poi数据装入集合返回
def hand(poilist, result):
    # result = json.loads(result)  # 将字符串转换为json
    pois = result['pois']
    for i in range(len(pois)):
        poilist.append(pois[i])


# 单页获取pois
def getpoi_page(grids, types, page, key):
    polygon = str(grids[0]) + "," + str(grids[1]) + "|" + str(grids[2]) + "," + str(grids[3])
    req_url = poi_pology_search_url + "?key=" + key + '&extensions=all&types=' + quote(
        types) + '&polygon=' + polygon + '&offset=50' + '&page=' + str(
        page) + '&output=json'
    print('请求url：', req_url)

    s = requests.Session()
    s.mount('http://', HTTPAdapter(max_retries=5))
    s.mount('https://', HTTPAdapter(max_retries=5))
    try:
        data = s.get(req_url, timeout=5)
        return data.text
    except requests.exceptions.RequestException as e:
        data = s.get(req_url, timeout=5)
        return data.text
    return None


def get_drids(min_lng, max_lat, max_lng, min_lat, keyword, key, pology_split_distance, all_grids):
    grids_lib = city_grid.generate_grids(min_lng, max_lat, max_lng, min_lat, pology_split_distance)
    #print('划分后的网格数1：', len(grids_lib))
    #print(grids_lib)
    # 3. 根据生成的网格爬取数据，验证网格大小是否合适，如果不合适的话，需要继续切分网格
    for grid in grids_lib:
        one_pology_data = getpoi_page(grid, keyword, 1, key)
        data = json.loads(one_pology_data)
        #print(data)
        print(grid[0], grid[1], grid[2], grid[3])
        while int(data['count']) > 880:
            print("正在切割:")
            get_drids(grid[0], grid[1], grid[2], grid[3], keyword, key, pology_split_distance, all_grids)
        all_grids.append(grid)
    return all_grids

def get_grides_data(grids_lib,keyword,pology_split_distance):
    tables =[]
    one_pology_data =[]
    for grid in tqdm(grids_lib):
        data, tables = getpois(grid, keyword, pology_split_distance,tables)
        one_pology_data.extend(data)
    return one_pology_data, tables


def get_data(city, keyword, coord,pology_split_distance):
    # 1. 获取城市边界的最大、最小经纬度
    amap_key = buffer_keys[0]  # 总是获取队列中的第一个密钥
    max_lng, min_lng, max_lat, min_lat = area_boundary.getlnglat(city, amap_key)
    print('当前城市：', city, "max_lng, min_lng, max_lat, min_lat：", max_lng, min_lng, max_lat, min_lat)
    # 2. 生成网格切片格式：
    grids_lib = city_grid.generate_grids(min_lng, max_lat, max_lng, min_lat, pology_split_distance)
    #grids_lib = get_drids(min_lng, max_lat, max_lng, min_lat,keyword,key=amap_key,pology_split_distance=0.1,all_grids=[])
    print('划分后的网格数X：', len(grids_lib))
    print(grids_lib)
    all_data = []
    begin_time = time.time()
    print('==========================正式开始爬取啦！！！！！！！！！！！================================')
    # for grid in tqdm(grids_lib):
    #     # grid格式：[112.23, 23.23, 112.24, 23.22]
    #     one_pology_data,tables = getpois(grid, keyword,grids_lib)
    one_pology_data, tables =get_grides_data(grids_lib,keyword,pology_split_distance)
    print('one_pology_data:',len(one_pology_data))
    print('table 的长度:', len(tables))
    all_data.extend(one_pology_data)
    while len(tables)>0:
        print('table 的长度:',len(tables))
        pology_split_distance = pology_split_distance / 2
        grids_lib = tables
        one_pology_data, tables = get_grides_data(grids_lib, keyword,pology_split_distance)
        all_data.extend(one_pology_data)
        print('all_data 的长度:', len(all_data))

    end_time = time.time()
    print('全部：', str(len(grids_lib)) + '个矩形范围', '总的', str(len(all_data)), '条数据, 耗时：', str(end_time - begin_time),
          '正在写入CSV文件中')
    file_folder, file_name = write_to_csv(all_data, city, keyword, coord)
    # 写入shp
    #if file_folder is not None:
        #trans_point_to_shp(file_folder, file_name, 0, 1, pology_split_distance, keyword)

if __name__ == '__main__':
    #初始化密钥队列
    import argparse
    parser = argparse.ArgumentParser(description='Personal information')
    parser.add_argument('--city_code', dest='city_code', type=str, help='Name of province')
    parser.add_argument('--start', dest='start', type=int, help='the start point of kyes list')
    parser.add_argument('--end', dest='end', type=int, help='the end point  of kyes list')
    args = parser.parse_args()
    city_code = args.city_code
    start =args.start
    end =args.end

    # start =1
    # end =2
    types = open(r'poi.txt').readlines()
    gaode_key = gaode_keys[start:end]
    buffer_keys = collections.deque(maxlen=len(gaode_key))
    folder_name_full = 'data/' +city_code + '/'
    if os.path.exists(folder_name_full) is False:
        os.makedirs(folder_name_full)
    all_exists = os.listdir(folder_name_full)
    print("已经存在的文件:", all_exists)
    for type in types:
        type =type[:-1]
        file_name = 'poi-'+city_code+"-"+type+".csv"
        print(file_name)
        if file_name not in all_exists:
            init_queen()
            get_data(city_code, type, coord,pology_split_distance)
        else:
            print('已经存在：',file_name)
