# -*- coding: utf-8 -*-
import json

import scrapy


class ImageItem(scrapy.Item):
    count = scrapy.Field()
    keyword = scrapy.Field()
    images = scrapy.Field()


class BaiduSpider(scrapy.Spider):
    name = "baidu"
    allowed_domains = ["baidu.com", 'ss3.bdstatic.com']
    img_url = "https://image.baidu.com/search/acjson?tn=resultjson_com&ipn=rj&ct=201326592&is=&fp=result&queryWord" \
              "={}&cl=2&lm=-1&ie=utf-8&oe=utf-8&adpicid=&st=-1&z=&ic=&hd=&latest=&copyright=&word={}"\
              "&s=&se=&tab=&width=&height=&face=0&istype=2&qc=&nc=1&fr=&expermode=&force=&pn={}&rn=30" \
              "&gsm=96&1555918892279= "

    def __init__(self, keyword=None, num=1000, **kwargs):
        super().__init__(**kwargs)
        self.keyword = keyword
        self.num = num
        self.count = 0

    def start_requests(self):
        for i in range(int(self.num)):
            yield scrapy.Request(url=self.img_url.format(self.keyword, self.keyword, i*30), callback=self.parse)

    def parse(self, response):
        json_data = json.loads(response.text)
        json_data = dict(json_data)
        # print(json_data)
        for data in json_data['data']:
            try:
                item = ImageItem()
                item['count'] = self.count
                item['keyword'] = self.keyword
                item['images'] = data['thumbURL']
                self.count += 1
                yield item
            except Exception as e:
                self.logger.warning(e)
        pass


if __name__ == "__main__":
    # crawl = CrawlProcess(get_project_settings())

    pass


