import requests
import json
import pandas as pd
import os

def city_map_spider(index):
    params = {
        'oversea': 'false',
        'index': index,
        'pageid': '212092',
    }
    try:
        response = requests.get('https://m.ctrip.com/webapp/hotel/j/hoteldetail/dianping/api/static/city', params=params)
    except:
        proxies = {
            'https': '127.0.0.1:7890'
        }
        response = requests.get('https://m.ctrip.com/webapp/hotel/j/hoteldetail/dianping/api/static/city', params=params, proxies=proxies)

    print(response.json())
    return response.json()


city_map = ["A", "B", "C", "D", "E", "F", "G", "H", "J", "K", "L", "M", "N", "P", "Q", "R", "S", "T", "U", "W", "X", "Y", "Z"]

for city in city_map:
    city_data = {}
    city_data['index'] = []
    city_data['cityId'] = []
    city_data['cname'] = []
    city_data['ctryId'] = []
    city_data['countryName'] = []
    city_data['hot'] = []
    to_datas = city_map_spider(city)['cities']
    for index in range(len(to_datas)):
        to_data = to_datas[index]
        city_data['index'].append(city)
        city_data['cityId'].append(to_data['cityId'])
        city_data['cname'].append(to_data['cname'])
        city_data['ctryId'].append(to_data['ctryId'])
        city_data['countryName'].append(to_data['countryName'])
        city_data['hot'].append(to_data['hot'])
    w_csv_name = '{}_city.csv'.format(city)
    w_csv_path = os.path.join(os.path.abspath(
        os.path.join(os.path.join(os.path.dirname(__file__), ".."), 'static/spider/data/city')),
        w_csv_name)
    df = pd.DataFrame(city_data)
    df.to_csv(w_csv_path, index=False, encoding='utf-8')