# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import pymysql
import redis
import json
from website.items import *
from scrapy.exporters import JsonLinesItemExporter


class WebsitePipeline:
    def process_item(self, item, spider):
        return item


class SaveAsNumberPipeline:
    def process_item(self, item, spider):
        # print(item)
        return item


# class SaveCountryASNRedisPipeline:
#
#     # Spider开启时，执行打开文件操作
#     def open_spider(self, spider):
#         # 获取配置的redis信息
#         host = spider.settings.get("REDIS_HOST")
#         port = spider.settings.get("REDIS_POST")
#         db_index = spider.settings.get("REDIS_DB_INDEX")
#         db_psd = spider.settings.get("REDIS_PASSWORD")
#         self.db_conn = redis.StrictRedis(host=host, port=port, db=db_index, password=db_psd)
#
#     # 数据处理
#     def process_item(self, item, spider):
#         item_dict = dict(item)
#         if isinstance(item, CountryASNItem):
#
#             self.db_conn.rpush("CountryASN", str(item_dict))
#         elif isinstance(item, ASNItem):
#             self.db_conn.rpush("ASN", str(item_dict))
#         elif isinstance(item, ASNContentItem):
#             self.db_conn.rpush("ASNContent", str(item_dict))
#         else:
#             pass
#
#         return item
#
#     def close_spider(self, spider):
#
#         # 关闭数据库
#         self.db_conn.connection_pool.disconnect()


class SaveCountryASNPipeline:

    # Spider开启时，执行打开文件操作
    def open_spider(self, spider):
        # 获取配置的mysql信息
        db_name = spider.settings.get("MYSQL_DB_NAME", "ASN")
        host = spider.settings.get("MYSQL_HOST", "180.76.176.24")
        user = spider.settings.get("MYSQL_USER", "root")
        pwd = spider.settings.get("MYSQL_PASSWORD", "qq4145246")

        # 连接mysql数据库服务器
        self.db_conn = pymysql.connect(db=db_name, host=host, user=user, password=pwd, charset="utf8")
        # 使用cursor()获取操作游标
        self.db_cursor = self.db_conn.cursor()

    # 数据处理
    def process_item(self, item, spider):

        if isinstance(item, CountryASNItem):

            values = (item["countryName"], item["countryHref"], item["countryCode"], item["allocatedASNs"],
                      item["announcedASNs"], item["asnIpv4Number"], item["asnIpv6Number"])
            sql = "insert into CountryASN(countryName,countryHref,countryCode,allocatedASNs,announcedASNs,asnIpv4Number,asnIpv6Number)values (%s,%s,%s,%s,%s,%s,%s)"
            self.db_cursor.execute(sql, values)
        elif isinstance(item, ASNItem):
            values = (item["countryCode"], item["asnNum"], item["asnNumHref"], item["asnName"],
                      item["ipv4NumIps"], item["ipv6NumIps"])
            sql = "insert into ASN(countryCode,asnNum,asnNumHref,asnName,ipv4NumIps,ipv6NumIps)values (%s,%s,%s,%s,%s,%s)"
            self.db_cursor.execute(sql, values)
        elif isinstance(item, ASNContentItem):
            values = (item["asnNum"], item["asName"], item["orgName"], item["registryRegion"], item["ipv4Prefixes"],
                      item["ipv6Prefixes"], item["ipv4NumIps"], item["ipv6NumIps"], item["cidr"], item["cidrHref"],
                      item["cidrDescription"], item["ipNum"])
            sql = "insert into ASNContent(asnNum,asName,orgName,registryRegion,ipv4Prefixes,ipv6Prefixes,ipv4NumIps,ipv6NumIps,cidr,cidrHref,cidrDescription,ipNum)values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
            self.db_cursor.execute(sql, values)
        else:
            pass
        # 提交数据
        self.db_conn.commit()
        return item

    def close_spider(self, spider):
        # 提交数据

        self.db_conn.commit()
        # 关闭游标
        self.db_cursor.close()
        # 关闭数据库
        self.db_conn.close()


class SaveDomainInfoOriginalPipeline:

    # Spider开启时，执行打开文件操作
    def open_spider(self, spider):
        # 获取配置的mysql信息
        db_name = spider.settings.get("MYSQL_DB_NAME", "domain_and_website")
        host = spider.settings.get("MYSQL_HOST", "192.168.37.2")
        user = spider.settings.get("MYSQL_USER", "root")
        pwd = spider.settings.get("MYSQL_PASSWORD", "nlp123456")


        # 连接mysql数据库服务器
        self.db_conn = pymysql.connect(db=db_name, host=host, user=user, password=pwd, charset="utf8")
        # 使用cursor()获取操作游标
        self.db_cursor = self.db_conn.cursor()

        self.NUM = 1

    # 数据处理
    def process_item(self, item, spider):

        item_dict = dict(item)
        # 再将字典对象转化为Json字符串
        str = json.dumps(item_dict, ensure_ascii=False)

        try:
            if "domainInfo" in item_dict:

                values = (item["domainName"], item["domainInfo"],)
                sql = "insert into university_domain_info_original(domainName,domainInfo)values (%s,%s)"
                self.db_cursor.execute(sql, values)
                print("保存【%s】成功,第%d个数据" % (item_dict["domainName"], self.NUM))
            else:
                values = (item["domainName"])
                sql = "insert into university_domain_info_original(domainName)values (%s)"
                self.db_cursor.execute(sql, values)
                # 获取失败
                print("【%s】超时,第%d个数据" % (item_dict["domainName"], self.NUM))
            # 提交数据
            self.db_conn.commit()
        except Exception as e:
            print(e)
        self.NUM += 1

        return item

    def close_spider(self, spider):
        # 提交数据
        self.db_conn.commit()
        # 关闭游标
        self.db_cursor.close()
        # 关闭数据库
        self.db_conn.close()


class SaveDomainInfoPipeline(object):

    def open_spider(self, spider):
        self.NUM = 1
        host = spider.settings.get("REDIS_HOST")
        port = spider.settings.get("REDIS_POST")
        db_index = spider.settings.get("REDIS_DB_INDEX")
        db_psd = spider.settings.get("REDIS_PASSWORD")
        self.db_conn = redis.StrictRedis(host=host, port=port, db=db_index, password=db_psd)

    def process_item(self, item, spider):
        item_dict = dict(item)

        if "addDate" in item_dict:
            print("【%s】保存成功,第%d个数据" % (item_dict["domainName"], self.NUM))
            self.db_conn.rpush("domain_info", str(item_dict))
        else:
            # 获取失败
            print("==========第%d个数据，【%s】失败" % (self.NUM, item_dict["domainName"],))
            self.db_conn.rpush("error_domain_info", str(item_dict))
        self.NUM += 1
        return item

    def close_spider(self, spider):
        self.db_conn.connection_pool.disconnect()


class SaveIpInfoPipeline:

    # Spider开启时，执行打开文件操作
    def open_spider(self, spider):
        # 获取配置的mysql信息
        db_name = spider.settings.get("MYSQL_DB_NAME", "Original")
        host = spider.settings.get("MYSQL_HOST", "180.76.176.24")
        user = spider.settings.get("MYSQL_USER", "root")
        pwd = spider.settings.get("MYSQL_PASSWORD", "qq4145246")

        # 连接mysql数据库服务器
        self.db_conn = pymysql.connect(db=db_name, host=host, user=user, password=pwd, charset="utf8")
        # 使用cursor()获取操作游标
        self.db_cursor = self.db_conn.cursor()

        self.NUM = 1

    # 数据处理
    def process_item(self, item, spider):

        # item_dict = dict(item)
        # # 再将字典对象转化为Json字符串
        # str = json.dumps(item_dict, ensure_ascii=False)
        # print(type(item))
        try:
            if isinstance(item, IpInfoItem):

                values = (
                    item["domainIp"], item["continent"], item["continentCode"], item["country"],
                    item["countryCode"], item["region"], item["regionName"], item["city"], item["district"],
                    item["zip"],
                    item["latitude"], item["longitude"], item["timezone"], item["offset"], item["currency"],
                    item["isp"],
                    item["org"], item["ASNUM"], item["asName"], item["reverse"], item["mobile"], item["proxy"],
                    item["hosting"], item["addDate"])

                sql = "insert into ip_info_Ip_new(domainIp,continent,continentCode,country," \
                      "countryCode,region,regionName,city,district,zip,latitude,longitude,timezone,offset,currency," \
                      "isp,org,ASNUM,asName,reverse,mobile,proxy,hosting,addDate)values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
                self.db_cursor.execute(sql, values)
                print("保存【%s】成功,第%d个数据" % (item["domainIp"], self.NUM))
            else:
                values = (item["domainIp"])
                sql = "insert into ip_overtime_new(domainIp)values (%s)"
                self.db_cursor.execute(sql, values)
                # 获取失败
                print("【%s】超时,第%d个数据" % (item["domainIp"], self.NUM))
            # 提交数据
            self.db_conn.commit()
        except Exception as e:
            print(e)
        self.NUM += 1

        return item

    def close_spider(self, spider):
        # 提交数据
        self.db_conn.commit()
        # 关闭游标
        self.db_cursor.close()
        # 关闭数据库
        self.db_conn.close()


class SaveNameServerIpInfoPipeline:

    # Spider开启时，执行打开文件操作
    def open_spider(self, spider):

        # 获取配置的mysql信息
        db_name = spider.settings.get("MYSQL_DB_NAME", "Original")
        host = spider.settings.get("MYSQL_HOST", "180.76.176.24")
        user = spider.settings.get("MYSQL_USER", "root")
        pwd = spider.settings.get("MYSQL_PASSWORD", "qq4145246")

        # 连接mysql数据库服务器
        self.db_conn = pymysql.connect(db=db_name, host=host, user=user, password=pwd, charset="utf8")
        # 使用cursor()获取操作游标
        self.db_cursor = self.db_conn.cursor()

        self.NUM = 1

    # 数据处理
    def process_item(self, item, spider):

        # item_dict = dict(item)
        # # 再将字典对象转化为Json字符串
        # str = json.dumps(item_dict, ensure_ascii=False)
        # print(type(item))
        try:
            if isinstance(item, NameServerIpInfoItem):

                values = (item["nameServerName"],
                          item["nameServerIp"], item["continent"], item["continentCode"], item["country"],
                          item["countryCode"], item["region"], item["regionName"], item["city"], item["district"],
                          item["zip"],
                          item["latitude"], item["longitude"], item["timezone"], item["offset"], item["currency"],
                          item["isp"],
                          item["org"], item["ASNUM"], item["asName"], item["reverse"], item["mobile"], item["proxy"],
                          item["hosting"], item["addDate"])

                sql = 'insert into name_server_info(nameServerName,nameServerIp,continent,continentCode,country' \
                      ',countryCode,region,regionName,city,district,zip,latitude,longitude,timezone,offset' \
                      ',currency,isp,org,ASNUM,asName,reverse,mobile,proxy,hosting,addDate' \
                      ')values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
                self.db_cursor.execute(sql, values)
                print("保存【%s】成功,第%d个数据" % (item["nameServerName"], self.NUM))
            else:
                values = (item["nameServerName"])
                sql = 'insert into name_server_info_over_time(nameServerName)values (%s)'
                self.db_cursor.execute(sql, values)
                # 获取失败
                print("【%s】超时,第%d个数据" % (item["nameServerName"], self.NUM))
            # 提交数据
            self.db_conn.commit()
        except Exception as e:
            print(e)
        self.NUM += 1

        return item

    def close_spider(self, spider):
        # 提交数据
        self.db_conn.commit()
        # 关闭游标
        self.db_cursor.close()
        # 关闭数据库
        self.db_conn.close()


class SaveClassUrlPipeline:

    # Spider开启时，执行打开文件操作
    def open_spider(self, spider):

        # 获取配置的mysql信息
        db_name = spider.settings.get("MYSQL_DB_NAME", "Original")
        host = spider.settings.get("MYSQL_HOST", "180.76.176.24")
        user = spider.settings.get("MYSQL_USER", "root")
        pwd = spider.settings.get("MYSQL_PASSWORD", "qq4145246")

        # 连接mysql数据库服务器
        self.db_conn = pymysql.connect(db=db_name, host=host, user=user, password=pwd, charset="utf8")
        # 使用cursor()获取操作游标
        self.db_cursor = self.db_conn.cursor()

        self.NUM = 1

    # 数据处理
    def process_item(self, item, spider):

        # item_dict = dict(item)
        # # 再将字典对象转化为Json字符串
        # str = json.dumps(item_dict, ensure_ascii=False)
        # print(type(item))
        try:
            values = (item["class_url"],)

            sql = 'insert into class_url_info(class_url)values (%s)'
            self.db_cursor.execute(sql, values)
            print("保存【%s】成功,第%d个数据" % (item["class_url"], self.NUM))
            # 提交数据
            self.db_conn.commit()
        except Exception as e:
            print(e)
        self.NUM += 1

        return item

    def close_spider(self, spider):
        # 提交数据
        self.db_conn.commit()
        # 关闭游标
        self.db_cursor.close()
        # 关闭数据库
        self.db_conn.close()


class SaveGlnavPipeline:

    # Spider开启时，执行打开文件操作
    def open_spider(self, spider):

        # 获取配置的mysql信息
        # db_name = spider.settings.get("MYSQL_DB_NAME", "Original")
        # host = spider.settings.get("MYSQL_HOST", "180.76.176.24")
        # user = spider.settings.get("MYSQL_USER", "root")
        # pwd = spider.settings.get("MYSQL_PASSWORD", "qq4145246")
        db_name = spider.settings.get("MYSQL_DB_NAME", "fan")
        host = spider.settings.get("MYSQL_HOST", "127.0.0.1")
        user = spider.settings.get("MYSQL_USER", "root")
        pwd = spider.settings.get("MYSQL_PASSWORD", "123")

        # 连接mysql数据库服务器
        self.db_conn = pymysql.connect(db=db_name, host=host, user=user, password=pwd, charset="utf8")
        # 使用cursor()获取操作游标
        self.db_cursor = self.db_conn.cursor()

        self.NUM = 1

    # 数据处理
    def process_item(self, item, spider):
        try:
            values = (item["domainName"], item["url"], item["domain"], item["type"], item["country"], item["desc"])

            sql = 'insert into glnav(domainName,URL,domain,Type,Country,Description)values (%s,%s,%s,%s,%s,%s)'
            self.db_cursor.execute(sql, values)
            print("保存【%s】成功,第%d个数据" % (item["url"], self.NUM))
            # 提交数据
            self.db_conn.commit()
        except Exception as e:
            print(e)
        self.NUM += 1

        return item

    def close_spider(self, spider):
        # 提交数据
        self.db_conn.commit()
        # 关闭游标
        self.db_cursor.close()
        # 关闭数据库
        self.db_conn.close()


class SaveShiJiePipeline:

    # Spider开启时，执行打开文件操作
    def open_spider(self, spider):

        # 获取配置的mysql信息
        # db_name = spider.settings.get("MYSQL_DB_NAME", "Original")
        # host = spider.settings.get("MYSQL_HOST", "180.76.176.24")
        # user = spider.settings.get("MYSQL_USER", "root")
        # pwd = spider.settings.get("MYSQL_PASSWORD", "qq4145246")
        db_name = spider.settings.get("MYSQL_DB_NAME", "fan")
        host = spider.settings.get("MYSQL_HOST", "127.0.0.1")
        user = spider.settings.get("MYSQL_USER", "root")
        pwd = spider.settings.get("MYSQL_PASSWORD", "123")

        # 连接mysql数据库服务器
        self.db_conn = pymysql.connect(db=db_name, host=host, user=user, password=pwd, charset="utf8")
        # 使用cursor()获取操作游标
        self.db_cursor = self.db_conn.cursor()

        self.NUM = 1

    # 数据处理
    def process_item(self, item, spider):
        try:
            values = (item["domainName"], item["url"], item["domain"], item["type"], item["country"], item["desc"])

            sql = 'insert into shijie(domainName,URL,domain,Type,Country,Description)values (%s,%s,%s,%s,%s,%s)'
            self.db_cursor.execute(sql, values)
            print("保存【%s】成功,第%d个数据" % (item["url"], self.NUM))
            # 提交数据
            self.db_conn.commit()
        except Exception as e:
            print(e)
        self.NUM += 1

        return item

    def close_spider(self, spider):
        # 提交数据
        self.db_conn.commit()
        # 关闭游标
        self.db_cursor.close()
        # 关闭数据库
        self.db_conn.close()


class SaveStuffGateContentPipeline(object):
    # 构造方法（初始化对象时执行的方法）
    def __init__(self):
        self.num = 0
        # 使用 'wb' （二进制写模式）模式打开文件
        self.json_file = open('./data/stuff_gate_content_data.json', 'wb')
        # 构建 JsonLinesItemExporter 对象，设定不使用 ASCII 编码，并指定编码格式为 'UTF-8'
        self.json_exporter = JsonLinesItemExporter(self.json_file, ensure_ascii=False, encoding='UTF-8')
        # self.json_exporter = JsonItemExporter(self.json_file, ensure_ascii=False, encoding='UTF-8')

    # Spider开启时，执行打开文件操作
    def open_spider(self, spider):
        # 声明 exporting 过程 开始，这一句也可以放在 open_spider() 方法中执行。
        self.json_exporter.start_exporting()

    # 数据处理
    def process_item(self, item, spider):
        # 将 item 直接写入文件中
        self.json_exporter.export_item(item)
        print("第%s个域名：%s" % (self.num, item["domain_info"]["url"]))
        self.num += 1
        return item

    def close_spider(self, spider):
        # 声明 exporting 过程 结束，结束后，JsonLinesItemExporter 会将所有数据统一写入文件中
        self.json_exporter.finish_exporting()
        # 关闭文件
        self.json_file.close()


class SaveXmlDomainInfoPipeline(object):
    # 构造方法（初始化对象时执行的方法）
    def __init__(self):
        self.num = 0
        # 使用 'wb' （二进制写模式）模式打开文件
        self.json_file = open('./data/xml_domain_data.json', 'wb')
        # 构建 JsonLinesItemExporter 对象，设定不使用 ASCII 编码，并指定编码格式为 'UTF-8'
        self.json_exporter = JsonLinesItemExporter(self.json_file, ensure_ascii=False, encoding='UTF-8')
        # self.json_exporter = JsonItemExporter(self.json_file, ensure_ascii=False, encoding='UTF-8')

    # Spider开启时，执行打开文件操作
    def open_spider(self, spider):
        # 声明 exporting 过程 开始，这一句也可以放在 open_spider() 方法中执行。
        self.json_exporter.start_exporting()

    # 数据处理
    def process_item(self, item, spider):
        # 将 item 直接写入文件中
        self.json_exporter.export_item(item)
        print("第%s个域名：%s" % (self.num, item["xml_domain_info"]["domain"]))
        self.num += 1
        return item

    def close_spider(self, spider):
        # 声明 exporting 过程 结束，结束后，JsonLinesItemExporter 会将所有数据统一写入文件中
        self.json_exporter.finish_exporting()
        # 关闭文件
        self.json_file.close()


class SaveUniversityPipeline(object):
    # 构造方法（初始化对象时执行的方法）
    def __init__(self):
        self.num = 0

    # Spider开启时，执行打开文件操作
    def open_spider(self, spider):
        # 声明 exporting 过程 开始，这一句也可以放在 open_spider() 方法中执行。
        pass

    # 数据处理
    def process_item(self, item, spider):
        # 将 item 直接写入文件中

        return item

    def close_spider(self, spider):
        # 声明 exporting 过程 结束，结束后，JsonLinesItemExporter 会将所有数据统一写入文件中
        pass
