import time
from influxdb import InfluxDBClient
from scrapy import signals

class InfluxDBStatusExporter:
    def __init__(self, settings):
        self.influxdb_host = settings.get('INFLUXDB_HOST', '172.20.10.8')
        self.influxdb_port = settings.getint('INFLUXDB_PORT', 8086)
        self.influxdb_database = settings.get('INFLUXDB_DATABASE', 'scrapy_db')
        self.client = InfluxDBClient(host=self.influxdb_host, port=self.influxdb_port, database=self.influxdb_database)
        self.client.create_database(self.influxdb_database)  # Ensure database exists
        self.response_count = 0
        self.error_count = 0
        self.total_bytes_downloaded = 0
        self.node_id = "2"
        self.start_scheduler()

    @classmethod
    def from_crawler(cls, crawler):
        ext = cls(crawler.settings)
        crawler.signals.connect(ext.spider_opened, signal=signals.spider_opened)
        crawler.signals.connect(ext.spider_closed, signal=signals.spider_closed)
        crawler.signals.connect(ext.spider_error, signal=signals.spider_error)
        crawler.signals.connect(ext.response_received, signal=signals.response_received)
        crawler.signals.connect(ext.request_dropped, signal=signals.request_dropped)
        return ext

    def spider_opened(self, spider):
        self.record_event('spider_opened', spider)

    def spider_closed(self, spider, reason):
        self.record_event('spider_closed', spider, reason)
        self.client.close()

    def spider_error(self, failure, response, spider):
        self.error_count += 1
        data = {
            "measurement": "spider_error",
            "tags": {"spider": spider.name, "url": response.url, "node_id": self.node_id},
            "fields": {"error": str(failure.value)}
        }
        self.client.write_points([data])

    def response_received(self, response, request, spider):
        self.response_count += 1
        bytes_downloaded = len(response.body)
        self.total_bytes_downloaded += bytes_downloaded
        data = {
            "measurement": "response_received",
            "tags": {"spider": spider.name, "url": request.url, "node_id": self.node_id},
            "fields": {"status": response.status, "bytes": bytes_downloaded}
        }
        self.client.write_points([data])

    def request_dropped(self, request, spider):
        data = {
            "measurement": "request_dropped",
            "tags": {"spider": spider.name, "url": request.url, "node_id": self.node_id},
            "fields": {"reason": "dropped"}
        }
        self.client.write_points([data])

    def record_event(self, event_type, spider, reason=None):
        data = {
            "measurement": event_type,
            "tags": {"spider": spider.name, "node_id": self.node_id},
            "fields": {"reason": reason or "None"}
        }
        self.client.write_points([data])

    def start_scheduler(self):
        # Here we replace the asynchronous scheduler with a simple loop that logs data every 10 seconds
        import threading
        def schedule():
            while True:
                self.log_data()
                time.sleep(5)

        thread = threading.Thread(target=schedule)
        thread.start()

    def log_data(self):
        timestamp = int(time.time())
        data = [
            {
                "measurement": "responses_per_10_seconds",
                "tags": {
                    "node_id": self.node_id  # 添加 node_id 标签
                },
                "time": timestamp,
                "fields": {
                    "count": self.response_count
                }
            },
            {
                "measurement": "errors_per_10_seconds",
                "tags": {
                    "node_id": self.node_id  # 添加 node_id 标签
                },
                "time": timestamp,
                "fields": {
                    "count": self.error_count
                }
            },
            {
                "measurement": "bytes_downloaded_per_10_seconds",
                "tags": {
                    "node_id": self.node_id  # 添加 node_id 标签
                },
                "time": timestamp,
                "fields": {
                    "bytes": self.total_bytes_downloaded
                }
            }
        ]
        self.client.write_points(data)
        self.response_count = 0
        self.error_count = 0
        self.total_bytes_downloaded = 0  # 重置计数器
