from util import common
import time
from selenium.webdriver.support.select import Select
from decimal import Decimal
from decimal import Decimal
from db.MySqlConn import DBSession, JobConfig, WeeklyFund
session = DBSession()

class StockWeeklyNewFund():
    def __init__(self, crawler_type=1):
        self.crawler_info = "周新开户数走势爬虫"
        self.base_url = "http://www.chinaclear.cn/zdjs/xmzkb/center_mzkb.shtml"
        self.log = common.get_logger("stock_weekly_new_fund")
        self.driver = common.new_driver(False)
        self.crawler_type = crawler_type  #0 全量，1 增量
        self.index_date = None  #爬取到的日期

    def start(self):
        self.log.info("开始"+self.crawler_info+"...")
        if self.crawler_type == 0:
            self.index_date="2006.06.02"
        else:
            self.index_date ="2019.02.22"
        self.navigation_and_parse()

    def navigation_and_parse(self,*key):
        self.driver.get(self.base_url)
        time.sleep(3)
        frame_allA = self.driver.find_element_by_css_selector("iframe#frame_allA")
        self.driver.switch_to.frame(frame_allA)
        time.sleep(2)
        self.driver.find_element_by_css_selector("input#dateStr").click()
        time.sleep(2)
        date_iframe = self.driver.find_element_by_css_selector('iframe')
        self.driver.switch_to.frame(date_iframe)
        time.sleep(2)
        #self.driver.execute_script('day_Click(2006,06,02);')
        self.driver.execute_script('day_Click(2018,06,01);')
        self.driver.switch_to.default_content()
        self.driver.switch_to.frame(frame_allA)
        select = Select(self.driver.find_element_by_id('channelIdStr'))
        select.select_by_index(4)  #证劵发行及登记
        self.driver.find_element_by_css_selector('input.btn').click()  #查询
        time.sleep(3)

        data_table_list = self.driver.find_elements_by_xpath('//div[@id="settlementList"]//table')   #元素中间总是穿插不同的div元素，烦得要死，走xpath获取
        data_table = data_table_list[len(data_table_list) -1]  #取最后一个table,有时候一个table，有时候两个
        #self.log.info(data_table.text)
        self.parse(data_table)


    def parse(self,*key):
        item_dict = {}
        data_table = key[0]
        sh_cnt = data_table.find_element_by_css_selector('tbody > tr:nth-child(7) > td:nth-child(2) ').text
        sz_cnt = data_table.find_element_by_css_selector('tbody > tr:nth-child(7) > td:nth-child(3) ').text
        total_cnt = int(sh_cnt)+int(sz_cnt)

        sh_amt = data_table.find_element_by_css_selector('tbody > tr:nth-child(8) > td:nth-child(2) ').text
        sz_amt = data_table.find_element_by_css_selector('tbody > tr:nth-child(8) > td:nth-child(3) ').text
        total_amt = Decimal(sh_amt) + Decimal(sz_amt)

        item_dict["sh_cnt"] = sh_cnt
        item_dict["sz_cnt"] = sz_cnt
        item_dict["total_cnt"] = total_cnt
        item_dict["sh_amt"] = sh_amt
        item_dict["sz_amt"] = sz_amt
        item_dict["total_amt"] = str(total_amt)
        self.log.info(item_dict)
        WeeklyFund.dict_save(item_dict);
        return item_dict



if __name__ == '__main__':
    crawler = StockWeeklyNewFund(crawler_type=0)
    crawler.start()
