import pymysql
import scrapy
from ..settings import MYSQL_CONF
from ..items import JdItemInfoItem


class JdItemInfoCrawler(scrapy.Spider):
    name = 'jd_item_info_crawler'

    def start_requests(self):
        coon = pymysql.connect(**MYSQL_CONF)
        cursor = coon.cursor()
        cursor.execute('SELECT url FROM jd_search')
        urls = cursor.fetchall()
        cursor.close()
        coon.close()
        urls = [f'https:{url[0]}' for url in urls]
        for url in urls:
            yield scrapy.FormRequest(
                url=url,
                method='GET'
            )

    def parse(self, response, **kwargs):
        item = JdItemInfoItem()
        item['url'] = response.request.url.lstrip('https:')
        item_info = response.css('div.itemInfo-wrap')
        name_list = item_info.css('div.sku-name::text').getall()
        name = ''
        for i in name_list:
            name += i.strip()
        item['name'] = name
        details_list = response.css('#detail > div.tab-con > div > div.p-parameter > ul.parameter2.p-parameter-list > li')
        details = ''
        for i in details_list:
            details += (i.css('li::text').get() + ';')
        item['details'] = details
        yield item






