# -*- coding: utf-8 -*-
import scrapy
from scrapy import Request
from urllib import parse

from city_house import items


class AnjuSpider(scrapy.Spider):
    name = 'anju'
    allowed_domains = ['https://www.anjuke.com/sy-city.html']
    start_urls = ['http://https://www.anjuke.com/sy-city.html/']

    headers = {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36',
        'referer': 'https://baoding.anjuke.com/community/?from=navigation',
        'upgrade-insecure-requests': 1,
        'cookie': 'aQQ_ajkguid=86D82CDD-A10D-A615-F820-SX0701142140; isp=true; 58tj_uuid=e3205988-3644-409b-817e-663ca774aa14; als=0; _ga=GA1.2.1422172495.1563788112; _gid=GA1.2.1926736827.1563788112; init_refer=https%253A%252F%252Fbaoding.anjuke.com%252Fcommunity%252Fgaobeidiana%252F; new_uv=4; new_session=0; sessid=819ABC2E-9FB7-6BEC-977C-1775864A38C6; lps=http%3A%2F%2Flogin.anjuke.com%2Flogin%2Fform%3Fhistory%3DaHR0cHM6Ly9zYW55YS5hbmp1a2UuY29tL2FudGlzcGFtLWJsb2NrLz9mcm9tPWFudGlzcGFt%7Chttps%3A%2F%2Fsanya.anjuke.com%2Fantispam-block%2F%3Ffrom%3Dantispam; twe=2; ajk_member_captcha=c9657c0c388f85b7582e7fcb0b74b9e3; Hm_lvt_c5899c8768ebee272710c9c5f365a6d8=1561962102,1563850408; Hm_lpvt_c5899c8768ebee272710c9c5f365a6d8=1563850408; ctid=14; __xsptplus8=8.4.1563850463.1563850610.2%232%7Cwww.baidu.com%7C%7C%7C%7C%23%237w7z9LK1dqVFi1WHGDCmWamaHSvxv8V1%23; wmda_uuid=796d0822ea2a2bcc53b6297d3d15a2a6; wmda_new_uuid=1; wmda_session_id_6145577459763=1563851513844-a5c773cc-5532-9f0a; wmda_visited_projects=%3B6145577459763',
        'accept-language': 'zh-CN,zh;q=0.9',
        'accept-encoding': 'gzip,deflate,br',
        'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3'
    }

    @property
    def start_requests(self):
        str = 'https://m.anjuke.com/bj/map/?adr=%E5%8C%97%E8%BE%B0%E8%A5%BF%E8%B7%AF%2C%E8%BF%91%E5%8C%97%E5%9B%9B%E7%8E%AF%E4%B8%AD%E8%B7%AF&lng=116.393056&lat=39.997014&tp=comm&id=1001840'
        # str.decode('utf-8')
        list = parse.unquote(str).split('&')
        item = items.CityHouseItem()
        for l in list:
            print(l.split('='))
            info = l.split('=')
            if info[0] == 'lng' or info[0] == 'lat':
                pass
                # item.info[0] = info[1]
        # print(parse.unquote(str).split('&'))
        print(item)
        return
        for url in self.start_urls:
            yield Request(url, callback=self.parse, headers=self.headers, dont_filter=True)

    def parse(self, response):
        print(response.text)
        return
        urls = response.xpath('//div[@class="letter_city"]//div[@class="city_list"]/a/@href').extract()
        response.xpath('//div[@class="link-icongroup"]/a[contains(@href,"lat")]/@href').extract()

        for url in urls:
            house_url = url + ''
            print(house_url)
        # pass

    def get_info(self,response):
        urls = response.xpath('//div[@class="letter_city"]//div[@class="city_list"]/a/@href').extract()