from bs4 import BeautifulSoup
import pandas as pd
import requests

def get_static_url_content(url):
    headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20100101 Firefox/23.0'}
    req=requests.get(url,headers=headers)
    content=req.text
    bsObj=BeautifulSoup(content,'lxml')
    return bsObj

def get_city_id():
    url = 'http://travel.qunar.com/place/'
    bsObj=get_static_url_content(url)
    cat_url = []
    cat_name = []
    code={}
    bs=bsObj.find_all('div',attrs={'class':'sub_list'})

    for i in range(0,len(bs)):
        xxx = bs[i].find_all('a')
        for j in range(0,len(xxx)):
            # cat_name.append(xxx[j].text)
            name=xxx[j].text
            cat_name.append(name)
            # cat_url.append(xxx[j].attrs['href'])
            id=xxx[j].attrs['href']
            code[name]=id
    return cat_name,code

# city_name_list,city_url_list=get_city_id()
# city=pd.DataFrame({'city_name':city_name_list,'city_code':city_url_list})
# city.to_csv('city.csv',encoding='utf_8_sig')
# namelist,code=get_city_id()
