from tkinter.tix import Select

import scrapy
import re

import json

from pymongo import MongoClient
from scrapy import Selector
from zhongyaocao_detail.items import ZhongyaocaoDetailItem


# 连接到MongoDB，默认连接到localhost的27017端口
client = MongoClient('mongodb://erp:erp123@localhost:27017', maxPoolSize=10,
                     minPoolSize=3,
                     maxConnecting=30,
                     socketTimeoutMS=100000,
                     maxIdleTimeMS=60000,
                     connectTimeoutMS=40000,
                     waitQueueTimeoutMS=10000)

db = client['erp']
collection = db['zhongyaocai_index']
collection_index = db['zhongyaocai_detail']


class ZcydetailspiderSpider(scrapy.Spider):
    name = "zcyDetailSpider"
    allowed_domains = ["www.zysj.com.cn"]
    base_url = "https://www.zysj.com.cn"
    base = "中医世家"


    # 更新：通过接口获取新闻列表
    def start_requests(self):
        try:
            letter_cursor = collection.find().skip(12000).limit(4000)
            for document in letter_cursor:
                zhongyaocaoDetailItem = ZhongyaocaoDetailItem(**document)
                index_url = zhongyaocaoDetailItem["index_url"]
                request_url = self.base_url+index_url
                zhongyaocaoDetailItem["detail_url"] = request_url

                # print(zhongyaocaoDetailItem)

                yield scrapy.Request(url=request_url,callback=self.parse,meta=zhongyaocaoDetailItem)
        except Exception as e:
            print(f"app error:{e}")

    def parse(self, response):

        h1_titil = response.xpath('//*[@id="main"]/h1')
        print(h1_titil)
        contents = response.xpath('//*[@id="content"]/div/div')
        yao = {}
        index_url = response.meta.get("index_url")
        yao["index_url"] = index_url
        name_zh = response.meta.get("name_zh")
        yao["name_zh"] = name_zh
        letter = response.meta.get("letter")
        yao["letter"] = letter
        name_pinyin = response.meta.get("name_pinyin")
        yao["name_pinyin"] = name_pinyin
        parent_url = response.meta.get("parent_url")
        yao["parent_url"] = parent_url
        detail_url = response.meta.get("detail_url")
        yao["detail_url"] = detail_url
        yao["base_url"] = self.base_url
        yao["base"] = self.base

        for index,content in enumerate(contents):
            try:
                clazz = content.xpath("@class").get()
                if(clazz):
                    clazz = re.sub(" ","_",clazz)
                    clazz = re.sub("item_","",clazz)
                # print("clazz:>>>",clazz)
                # print("content:>>>>",content)
                content_details = content.xpath(".//div")
                # print("content_details:>>>",content_details)
                # key = content_details[0].xpath("text()").extract()
                # value = content_details[1].xpath("text()").extract()
                # print("key:>>>",key)
                # print("value:>>>",value)
                for index,content_detail in enumerate(content_details):
                    # val = content_detail.xpath(".//text()")
                    val = content_detail.xpath(".//text()").getall()
                    value = ''.join(val)
                    value=re.sub("\n","",value)
                    if(index == 0):
                        yao[clazz+"_name"] = value
                    else:
                        yao[clazz] = value
                    pass

                # collection_index.insert_one({"index_url":index_url,"name_zh":name_zh,"name_pinyin":name_pinyin,
                #                              "letter":letter,"parent_url":parent_url})

                # print(index_url,name_zh,name_pinyin,detail_url,letter,parent_url)
                # print(index,content)

            except Exception as e:
                print(f"app error:{e}")
        # yaostr = json.dumps(yao)
        # print(yaostr)
        print(yao)
        collection_index.insert_one(yao)