import scrapy
import json

from catchFarm.items import CatchfarmItem

print("构造")


class FarmSpider(scrapy.Spider):
    name = 'farm'
    allowed_domains = ['api.map.baidu.com']
    url = 'https://api.map.baidu.com/place/v2/search?query=牧业&tag=公司企业&region={}&output=json&ak=IeGD9YehRfcNcvLZ1KCtbskl4828dKqh&coord_type=1'
    print("开始")

    areas = [
        {
            "name": '郑州市',
            "code": '268'
        }, {
            "name": '驻马店市',
            "code": '269'
        }, {
            "name": '安阳市',
            "code": '267'
        }, {
            "name": '新乡市',
            "code": '152'
        }, {
            "name": '洛阳市',
            "code": '153'
        }, {
            "name": '商丘市',
            "code": '154'
        }, {
            "name": '许昌市',
            "code": '155'
        }, {
            "name": '濮阳市',
            "code": '209'
        },
        {
            "name": '开封市',
            "code": '210'
        }, {
            "name": '焦作市',
            "code": '211'
        }, {
            "name": '三门峡市',
            "code": '212'
        }, {
            "name": '平顶山市',
            "code": '213'
        }, {
            "name": '信阳市',
            "code": '214'
        }, {
            "name": '鹤壁市',
            "code": '215'
        }, {
            "name": '周口市',
            "code": '308'
        },
        {
            "name": '南阳市',
            "code": '309'
        },
        {
            "name": '漯河市',
            "code": '344'
        }
    ]
    index = 0
    start_urls = [url.format(areas[0]['code'])]
    print(start_urls)

    def parse(self, response):
        print(response.text)
        print("第{}次请求".format(str(self.index)))

        results = json.loads(response.text)
        results = results["results"]
        for result in results:
            item = CatchfarmItem()
            if 'location' not in result.keys():
                continue
            item['name'] = result['name']
            item['lat'] = result['location']['lat']
            item['lng'] = result['location']['lng']
            item['address'] = result['address']
            item['province'] = result['province']
            item['city'] = result['city']
            item['area'] = result['detail']
            item['uid'] = result['uid']
            yield item

        self.index = self.index + 1
        if self.index >= len(self.areas):
            return
        yield scrapy.Request(self.update_url(),
                             callback=self.parse)

    def update_url(self):
        print(self.url.format(self.areas[self.index]['code']))
        return self.url.format(self.areas[self.index]['code'])
