#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Created on 2020026
# Project: tanxf
import hashlib
import json
import math
import time
import uuid
import pymysql

from pyspider.libs.base_handler import *


class Handler(BaseHandler):
    crawl_config = {
        'url':'zfcgmanager.czt.zj.gov.cn/cms/api/cors/remote/results'
    }

    def __init__(self):
        self.db = pymysql.connect(host='106.13.85.158',port=3310,user='root', passwd='root@123', db='python', charset='utf8')
    #保存数据库
    def saveData(self, url,sourceId,title,html,jsonText,save):
        try:
            currTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
            md5id = self.md5(url+sourceId)
            cursor = self.db.cursor()
            insertSql = 'insert into T_RESULT_INFO(id, source_id_md5,URL,TITLE, TYPE,HTML,JSON_TEXT,UPDATE_USER,UPDATE_TIME,CREATE_USER,CREATE_TIME,UP_ID,UP_SOURCE_ID,PLATFORM_NAME,DISTRICT_SHOW,CLASSIFY_SHOW,STAGE_SHOW) ' \
                  'values ("%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s","%s")' % (
             sourceId,md5id,url, pymysql.escape_string(title),'0',  pymysql.escape_string(html), pymysql.escape_string(jsonText),
            '','','',currTime,'','','浙江省政府采购网','浙江',save['mainBidMenuName'],save['typeName']);

            selectSql = 'select * from T_RESULT_INFO t where t.source_id_md5 = "'+md5id+'"'
            updateSql = 'update  T_RESULT_INFO set TITLE="%s", HTML="%s",JSON_TEXT="%s",UPDATE_USER="%s",UPDATE_TIME="%s",UP_ID="%s",UP_SOURCE_ID="%s" where source_id_md5="%s"' \
                        '' % (
                            pymysql.escape_string(title), pymysql.escape_string(html),
                            pymysql.escape_string(jsonText),
                            '', currTime,
                            '', '', md5id);
            cursor.execute(selectSql)
            results = cursor.fetchall()
            print(results)
            if len(results) >0:
                cursor.execute(updateSql)
            else:
                cursor.execute(insertSql)
            self.db.commit()
        except Exception as e:
            print(e)
            self.db.rollback()

    @every(minutes=24 * 60)
    def on_start(self):
        self.crawl('http://zfcgmanager.czt.zj.gov.cn/cms/api/cors/remote/results?pageSize=15&pageNo=1&sourceAnnouncementType=3014&url=notice',
                   callback=self.query_page)
    #分页查询
    #任务的有效期。在此期间，该页面将被视为未修改。默认值：-1（从不重新抓取）  单位秒
    @config(age=60)
    def query_page(self, response):
        count = response.json['count']
        pageSize = response.json['pageSize']
        pageNum = math.ceil(count/pageSize)
        for index in range(int(pageNum)):
            self.crawl(
                'http://zfcgmanager.czt.zj.gov.cn/cms/api/cors/remote/results?pageSize='+str(pageSize)+'&pageNo='+str(index+1)+'&sourceAnnouncementType=3014&url=notice',
                callback=self.index_page)
    #查询每一页的详情
    @config(age=60 )
    def index_page(self, response):
        for each in response.json['articles']:
            id = each['id']
            self.crawl('http://zfcgmanager.czt.zj.gov.cn/cms/api/cors/remote/results?noticeId='+str(id)+'&url=noticeDetail' ,save=each, callback=self.detail_page)
    #保存详情页面
    @config(priority=2)
    def detail_page(self, response):
        dataJson = response.json
        save = response.save
        sourceId = str(dataJson['id'])
        html=dataJson['noticeContent']
        title = dataJson['noticeTitle']
        jsonText = json.dumps(dataJson)
        self.saveData(Handler.crawl_config['url'],sourceId,title,html,jsonText,save)
        return response.json

    def md5(self,str):
        m = hashlib.md5()
        m.update(str.encode("utf8"))
        return m.hexdigest()
