from scrapy.spider import Spider
from scrapy.selector import Selector
import scrapy
from jav.items import ImageItem
import re
import requests

class JavSpider(scrapy.Spider):
    
    name = "jav"
    allowed_domains = ["www.jav321.com"]
    capture_url = [
            #木村都那
            #"https://www.jav321.com/star/1014011/",
            #本田岬
            "https://www.jav321.com/star/1017267/",
            # #吉泽明步
            "https://www.jav321.com/star/6644/",
            # #水野朝阳
            "https://www.jav321.com/star/1019300/",
            # #水谷心音
            "https://www.jav321.com/star/28168/",
            # #波多野结衣
            "https://www.jav321.com/star/26225/"
            # #三上悠亜
            "https://www.jav321.com/star/1030262/",
            # #河北春菜
            # #"https://www.jav321.com/star/1035159/"
            # #小早川怜子
            "https://www.jav321.com/star/1014614/",
            # #天海翼
            "https://www.jav321.com/star/1001592/",
            # #北川エリカ
            "https://www.jav321.com/star/1003758/",
            # # 鶴田かな
            "https://www.jav321.com/star/1012352/",
            # # 音羽レオン
            # #"https://www.jav321.com/star/29815/",
            # #碧しの
            # #"https://www.jav321.com/star/1001722/",
            # #奥田咲
            "https://www.jav321.com/star/1008965/",
            # # 麻生ゆう
            "https://www.jav321.com/star/1017081/",
            #长濑麻美
            "https://www.jav321.com/star/1026716/"
            # #成宫晴明
            "https://www.jav321.com/star/1032760/",
            # #结菜绘麻
            "https://www.jav321.com/star/1032504/",
            # #桐嶋莉乃
            "https://www.jav321.com/star/1029403/",
            #今永沙奈
            "https://www.jav321.com/star/1034853/",
            #长泽梓
            "https://www.jav321.com/star/26278/",
            # #樱空桃
            "https://www.jav321.com/star/1039157/",
            # # 小野麻利亚
            "https://www.jav321.com/star/1011020/"

    ] 
    start_urls=[]
    for url in capture_url:
        url_prefix = url
        for i in range(1,50):
            start_urls.append(url_prefix+str(i))
    def __init__(self):
        pass

    def parse(self, response):
        hxs = Selector(response)
        #image_str = hxs.select('//*[@id="imgshow"]/img').extract()  #//*[@id="iBody"]  //*[@id="imgshow"]/img //*[@id="mh_content"]/ul //*[@id="imgshow"]/img
        #//*[@id="iBody"] //*[@id="iBody"] //*[@id="hdNextImg"] //*[@id="iBody"]
        #html/body/div[2]/div/div[1]/div[2]/div[2]/div/div[4]/div
        # response.xpath('//a[contains(@href, "image")]/img/@src').extract()
        title = hxs.xpath('//h3[contains(@align, "center")]/text()').re(r'(.*)的')
        image_url_list = hxs.xpath('//a[contains(@href, "video")]/img/@src').extract()   #//*[@id="imgshow"]/img
        image_name_list_raw = hxs.xpath('//a[contains(@href, "video")]/text()').re(r'(.*)')
        image_name_list = []
        #image_str = base_str
        print("image_str ================")
        print(title)
        print(image_url_list)
        print(image_name_list)

        for i in range(0,len(image_name_list_raw)):
            if str(image_name_list_raw[i]).__len__() > 0 :
                image_name_list.append(image_name_list_raw[i])

        for i in range(0,len(image_url_list)):
            image_url_list[i] = str(image_url_list[i]).replace("s.jpg","l.jpg")

        print(image_name_list)


        for i in range(0,len(image_url_list)):
            item = ImageItem()
            item['image_urls'] = image_url_list[i]
            split_index = image_name_list[i].rfind(" ")
            if split_index != -1 :
                code_name=image_name_list[i][split_index+1:]
                des=image_name_list[i][:split_index]
                item['image_name'] = code_name+" "+des+".jpg"
            else:
                item['image_name'] = image_name_list[i]+'.jpg'
            item['title'] = "test"
            if title.__len__() > 0:
                item['title'] = title[0]
            yield item

