import scrapy
import pymongo
conn = pymongo.MongoClient('localhost', 27017)
db = conn['杭州']
collection = db['会议厅信息']


class A杭州议事厅Spider(scrapy.Spider):
    name = "杭州议事厅"
    async def start(self):
        for i in range (1,1593):
            yield scrapy.Request(f'https://yst.hangzhou.com.cn/list.php?keyword=&info_type=0&name=&page={i}')

    def parse(self, response):
        datas = response.xpath('//table[@style="margin-bottom:4px;"]//tr[starts-with(@class,"list")]')
        for data in datas:
           jy = data.xpath('./td[1]/span/text()').get()
           # id = data.xpath('./td[2]/span/text()').get()
           name = data.xpath('./td/a/text()').get()
           herf = data.xpath('./td/a/@href').get()
           time = data.xpath('./td[4]/text()').get()
           zt = data.xpath('./td[5]/text()').get()
           # print(zt,type(zt))
           # print(jy,id,herf,name,time,zt)
           if zt.strip() == '已转交':


               yield scrapy.Request(herf,callback=self.parse2,meta={"name":name},)




    def parse2(self, response):
        text = response.xpath('//table//td[@class="maintext"]/div[@align="left"]/text()').getall()[1]
        hf = response.xpath('//td[@class="f14"]/text()').get()
        l = []
        data ={
            'name':response.meta['name'],
            'hf': hf,
            'text':text,
        }
        l.append(data)
        if len(l)==10:
            collection.insert_many(l)
        else:
            collection.insert_many(l)
