from pymysql import *
from spidertools.utils.time_utils import get_current_datetime, get_current_date
from spidertools.db_utils import mongdb_utils
from crawlab import save_item
import os


class MySQLPipeLines(object):
    def __init__(self):
        self.conn = connect(host="192.168.3.205", port=3306, user="root", password="123456", database="vega",
                            charset='utf8')

    def process_item(self, item, spider):
        self.save(item)
        return item

    def save(self, item):
        sql = 'insert into vega(project_type,construction_stage,process_flow,' \
              'company_name,business_classification,company_area,origin_url) VALUES (%s,%s,%s,%s,%s,%s,%s)'
        self.conn.cursor().execute(sql,
                                   [item['project_type_first'],  # 'project_type_first': "工程分类",
                                    item['project_type_second'],  # 'project_type_second': "施工阶段",
                                    item['project_type_third'],  # 'project_type_third': '工序流程',
                                    item['company_name'],  # "company_name": "企业名称",
                                    item['business_classification'],  # 'business_classification': '经营分类',
                                    item['company_area'],  # "company_area": "企业所在地区",
                                    item['origin_url']])
        self.conn.commit()
        return item


class ProjectHtmlPipeline(object):
    def __init__(self):
        self.myclient, self.mydb = mongdb_utils.get_mongo_connect(mongo_url="mongodb://192.168.2.181:27017")
                                                                  # schema='construction')
        self.project_env = os.environ.get("PROJECT_ENV")

        if self.project_env != None and self.project_env == 'test':
            self.mycol = self.mydb['project_html_test']
        else:
            self.mycol = self.mydb['project_html']
        self.total_count = 0

    def start_spider(self, spider):
        pass

    def close_spider(self, spider):

        # 这是一个结果，需要为 dict 类型

        current_date = get_current_date()
        result = {current_date: self.total_count}

        # 调用保存结果方法
        save_item(result)
        self.myclient.close()

    def process_item(self, item, spider):

        insert_item = dict(item)
        if hasattr(spider, "convert_dict") and spider.convert_dict:
            new_insert_item = {}
            for key, value in insert_item.items():
                if key in spider.convert_dict:
                    new_insert_item[spider.convert_dict[key]] = value
                else:
                    new_insert_item[key] = value
            insert_item = new_insert_item
        # self.mycol.insert_one(insert_item)
        search_dict = {"origin_url": insert_item['origin_url']}

        search_result = self.mycol.find(search_dict).count()
        if search_result:
            print("update!!!!!!!")
            insert_item['update_time'] = get_current_datetime()
            update_value = {"$set": insert_item}
            self.mycol.update_one(search_dict, update_value)
        else:
            print("insert!!!!!!!")
            insert_item['create_time'] = get_current_datetime()
            insert_item['update_time'] = get_current_datetime()
            self.mycol.insert_one(insert_item)
        self.total_count += 1

        return item
