import re

import scrapy
from scrapy import Selector
from zhongyaofang_base.items import ZhongyaofangBaseItem

from pymongo import MongoClient
# 连接到MongoDB，默认连接到localhost的27017端口
client = MongoClient('mongodb://erp:erp123@localhost:27017', maxPoolSize=10,
                     minPoolSize=3,
                     maxConnecting=30,
                     socketTimeoutMS=100000,
                     maxIdleTimeMS=60000,
                     connectTimeoutMS=40000,
                     waitQueueTimeoutMS=10000)

db = client['erp']
collection = db['zhongyaofang_index']
collection_index = db['zhongyaofang_detail']
# collection_index = db['zhongyaocai_index']


class ZhongyaofangbaseSpider(scrapy.Spider):
    name = "zhongyaofangBase"
    allowed_domains = ["www.zysj.com.cn"]
    start_urls = ["https://www.zysj.com.cn/zhongyaofang/index.html"]
    base_url = "https://www.zysj.com.cn"
    base = "中医世家"

    # 更新：通过接口获取新闻列表
    def start_requests(self):
        try:

            letter_cursor = collection.find().skip(30000).limit(10000).sort({"letter":1,"name_zh":1}) #
            for document in letter_cursor:
                zhongyaofangBaseItem = ZhongyaofangBaseItem(**document)
                index_url = zhongyaofangBaseItem["index_url"]
                request_url = self.base_url+index_url
                zhongyaofangBaseItem["detail_url"] = request_url

                yield scrapy.Request(url=request_url,callback=self.parse,meta=zhongyaofangBaseItem)
        except Exception as e:
            print(f"app error:{e}")


    def parse(self, response):
        yf = {}
        index_url = response.meta.get("index_url")
        yf["index_url"] = index_url
        name_zh = response.meta.get("name_zh")
        yf["name_zh"] = name_zh
        letter = response.meta.get("letter")
        yf["letter"] = letter
        name_pinyin = response.meta.get("name_pinyin")
        yf["name_pinyin"] = name_pinyin
        parent_url = response.meta.get("parent_url")
        yf["parent_url"] = parent_url
        detail_url = response.meta.get("detail_url")
        yf["detail_url"] = detail_url
        yf["base_url"] = self.base_url
        yf["base"] = self.base

        h1_titil = response.xpath('//*[@id="main"]/h1')
        contents = response.xpath('//*[@id="content"]/div/div')
        for index,content in enumerate(contents):
            try:
                clazz = content.xpath("@class").get()
                if(clazz):
                    clazz = re.sub(" ","_",clazz)
                    clazz = re.sub("item_","",clazz)
                # print("clazz:>>>",clazz)
                # print("content:>>>>",content)
                content_details = content.xpath(".//div")
                # print("content_details:>>>",content_details)
                # key = content_details[0].xpath("text()").extract()
                # value = content_details[1].xpath("text()").extract()
                # print("key:>>>",key)
                # print("value:>>>",value)
                for index,content_detail in enumerate(content_details):
                    # val = content_detail.xpath(".//text()")
                    val = content_detail.xpath(".//text()").getall()
                    value = ''.join(val)
                    value=re.sub("\n","",value)
                    if(index == 0):
                        yf[clazz+"_name"] = value
                    else:
                        yf[clazz] = value
                    pass

                # collection_index.insert_one({"index_url":index_url,"name_zh":name_zh,"name_pinyin":name_pinyin,
                #                              "letter":letter,"parent_url":parent_url})

                # print(index_url,name_zh,name_pinyin,detail_url,letter,parent_url)
                # print(index,content)

            except Exception as e:
                print(f"app error:{e}")

        print(yf)
        collection_index.insert_one(yf)
        pass

    def zyfIndexStartRequests(self,response):
        try:

            letter_cursor = collection.find() #.skip(0).limit(1)
            for document in letter_cursor:
                zhongyaofangBaseItem = ZhongyaofangBaseItem(**document)
                parent_url = zhongyaofangBaseItem["letter_url"]
                request_url = self.base_url+parent_url
                zhongyaofangBaseItem["request_url"] = request_url

                yield scrapy.Request(url=request_url,callback=self.parse,meta=zhongyaofangBaseItem)
        except Exception as e:
            print(f"app error:{e}")

    def zyfIndexParse(self,response):
        request_url = response.meta.get("request_url")
        letter = response.meta.get("letter")
        print(request_url,letter)
        zyf_indexs = response.xpath('//*[@id="list-content"]/ul/li/a[contains(@href,"/zhongyaofang")]')
        for index,zyf_index in enumerate(zyf_indexs):
            index_url = zyf_index.xpath("@href").get()
            name_zh = zyf_index.xpath("text()").get()
            letter_zh = name_zh[0]
            name_pinyin = index_url[len("/zhongyaofang/"):len(index_url) - len("/index.html")]
            # print(index_url,name_zh,letter_zh,name_pinyin,letter,request_url)
            collection_index.insert_one({"index_url":index_url,"name_zh":name_zh,"name_pinyin":name_pinyin,
                                         "letter":letter,"letter_zh":letter_zh,"parent_url":request_url})




    def zhongyaofangBaseScrapy(self,response):
        try:
            selector = Selector(response)

            print("--------------------------")
            # item['title'] = response.xpath('//a[@class="titlelnk"]/text()').extract()       #使用xpath搜索
            # item['link'] = response.xpath('//a[@class="titlelnk"]/@href').extract()
            lis = selector.xpath('//*[@id="filter"]/ul[1]/li/ul/li/a[contains(@href,"/zhongyaofang/index")]')
            # print(lis)
            # liElements = lis.getall()
            for index,  li in enumerate(lis):
                item = ZhongyaofangBaseItem()
                href = li.xpath("@href").get()
                item["parent_url"] = href
                text = li.xpath("text()").get()
                item["letter"] = text

                print(href,text)
                collection.insert_one({"letter_url":href,"letter":text})
                # yield item
        except Exception as e:
            print(f"app error:{e}")
        pass