import requests,time
from bs4 import BeautifulSoup
url = 'http://bj.58.com/pingbandiannao/17163938935304x.shtml?adtype=1&PGTID=0d305a36-0000-1741-87ba-8367b07f8931&entinfo=17163938935304_0&psid=109518311199817489453991436&iuType=_undefined&ClickID=2'
headers = {
    'Cookie' : 'f=n; userid360_xml=40B46724CEE5F2DD6F1F4627240886CB; time_create=1527086124103; f=n; sessionid=579f3391-a7a6-4ae9-ac6b-5f829e723e59; id58=c5/njVrdoZtIn+2tBIrvAg==; 58tj_uuid=ca3e5e00-5a5d-48b6-8191-c566e0bd7046; als=0; xxzl_deviceid=Q8JunZ%2FgUuvlIISeWuRYDS3IryKscXpI6vjiWRVdeSDOUSu4Tc%2B8MGZhlY3n%2Brg%2F; 58home=jdz; myfeet_tooltip=end; ipcity=jdz%7C%u666F%u5FB7%u9547; f=n; final_history=17163938935304%2C33815247609917; bdshare_firstime=1524488385794; new_uv=5; utm_source=; spm=; init_refer=http%253A%252F%252Fbj.58.com%252Fpingbandiannao%252F0%252F%253FPGTID%253D0d305a36-0000-1300-a0c9-84577d923df1%2526ClickID%253D1; new_session=0; city=jdz; commontopbar_ipcity=jdz%7C%E6%99%AF%E5%BE%B7%E9%95%87%7C0; commontopbar_new_city_info=1%7C%E5%8C%97%E4%BA%AC%7Cbj',
    'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36'
}
web_data = requests.get(url,headers=headers)
soup = BeautifulSoup(web_data.text,'lxml')
def get_item_info():
    title = soup.select('div.col_sub > h1')
    time = soup.select('li.time')
    price = soup.select('span.price')
    area = soup.select('span.c_25d')
    for title,time,price,area in zip(title,time,price,area):
        data = {
            'title' : title.get_text(),
            'time' : time.get_text(),
            'price' : price.get_text(),
            'area' : list(area.stripped_strings)
        }
        print(data)

def judge(who_sell=0):
    urls = []
    url = 'http://bj.58.com/pbdn/{}/'.format(str(who_sell))
    web_data = requests.get(url,headers=headers)
    soup = BeautifulSoup(web_data.text,'lxml')
    links = soup.select('td.t_b > a')
    for link in links:
        urls.append(link.get('href').split('?')[0])
    print(urls)
    return url

def get_view_from():
    id = url.split('/')[-1].strip('x.shtml')
    print(id)
    api = 'http://jst1.58.com/counter?infoid={}'.format(str(id))
    # print(api)
    js = requests.get(api)
    # print(js)
    view = js.text.split('=')[-1]
    # return view
    print(view)


get_view_from()