# -*- coding: utf-8 -*-
import scrapy
from cyb.items import CybItem
from bs4 import BeautifulSoup
import arrow

class EnterpriseSpider(scrapy.Spider):
    name = 'enterprise'
    handle_httpstatus_list = [404]
    allowed_domains = ['']

    def start_requests(self):
        url = 'http://www.cyzone.cn/r/20171115/%s.html'
        import pymongo
        DB = pymongo.MongoClient('192.168.1.220:29001').chuangyebang_v1
        FINSH = [each.get('_id') for each in DB['enterprise'].find({},{'_id':1})]
        for _id in range(16,1000):
            if _id in FINSH:
                continue
            yield scrapy.Request(url=url%_id, callback=self.parse,meta={'_id':_id})

    def parse(self, response):
        item = CybItem()
        soup = BeautifulSoup(response.text, 'lxml')

        item['_id'] = response.meta['_id']
        item['_update'] = arrow.now().floor('day').datetime
        if response.status == 404:
            item['_status'] = 404
            return item
        item['_status'] = 200

        item['name'] = soup.select_one('li.name').get_text(strip=True)
        item['full_name'] = soup.select_one('li.time').get_text(strip=True) \
            .replace('公司全称：','') if soup.select('li.time') else None

        item['url'] = soup.find('div','com-url').find('a').get('href') \
            if soup.find('div','com-url') else None

        info_tag = soup.find('div', 'info-tag')
        item['date'] = info_tag.find('i','i1').find_parent().get_text() \
            if info_tag.find('i','i1') else None
        item['financing_stage'] = info_tag.find('i','i3').find_parent().get_text() \
            if info_tag.find('i','i3') else None
        item['tag'] =[each.string for each in info_tag.find('i','i6').find_parent().find_all('span')] if info_tag.find('i','i6') else None
        item['location'] = info_tag.find('i','i2').find_parent().get_text() if info_tag.find('i','i2') else None

        item['desc'] = soup.find('div','info-box').get_text().strip()

        # qcc
        item['qcc'] = {}
        for ps in soup.find('div', 'qcc').find_all('p'):
            if len(ps.find_all('span'))>1:
                item['qcc'][ps.find('span').string.strip(' :')] = ps.find('span').next_sibling.string.strip() or ps.find_all('span')[1].get_text().strip()
            else:
                item['qcc'][ps.find('span').string.strip(' :')] = ps.find('span').next_sibling.string.strip() if ps.find('span').next_sibling else None


        live_soup = soup.find('div', 'live')
        # print(live_soup)
        if live_soup:
            item['fundinground'] = []
            for trs in live_soup.find('table').find_all('tr',recursive=False)[1:]:
                dic = {}
                dic['series']=trs.find_all('td',recursive=False)[0].get_text()
                dic['raised']=trs.find_all('td',recursive=False)[1].find('div','money').get_text()
                dic['investors']=trs.find_all('td',recursive=False)[2].get('title')
                dic['time']=trs.find_all('td',recursive=False)[3].get_text()
                item['fundinground'].append(dic)
        return item
