import json
import os
import subprocess
import threading
import time
from queue import Queue, Empty


class SpiderService:
    def __init__(self):
        current_dir = os.path.dirname(os.path.abspath(__file__))  # 获取当前脚本绝对路径
        self.scrapy_path = os.path.abspath(os.path.join(current_dir, '..', 'bilibili_danmu'))
        print(f"Scrapy项目绝对路径: {self.scrapy_path}")
        self._validate_scrapy_env()

    def _validate_scrapy_env(self):
        """验证Scrapy环境"""
        if not os.path.exists(self.scrapy_path):
            raise RuntimeError(f"Scrapy项目路径不存在: {self.scrapy_path}")

    def crawl_danmu(self, bvid, data_queue, stop_event):
        process = None
        try:
            cmd = [
                'scrapy', 'crawl', 'bilibili_danmuke',
                '-a', f'bvid={bvid}',
                '-o', '-:json',
                '--loglevel', 'INFO'
            ]
            process = subprocess.Popen(
                cmd,
                cwd=self.scrapy_path,
                stdout=subprocess.PIPE,
                stderr=subprocess.STDOUT,
                text=True,
                encoding='utf-8',
                bufsize=1
            )

            def process_line(line):
                line = line.strip()
                if line:
                    try:
                        item = json.loads(line)
                        data_queue.put({'type': 'data', 'data': item})
                    except json.JSONDecodeError:
                        data_queue.put({'type': 'log', 'msg': line})

            while True:
                if stop_event.is_set():
                    process.terminate()
                    try:
                        process.wait(timeout=5)
                    except subprocess.TimeoutExpired:
                        process.kill()
                        process.wait()
                    # 读取剩余输出
                    for line in process.stdout:
                        process_line(line)
                    data_queue.put({'type': 'log', 'msg': '进程已终止'})
                    break

                if process.poll() is not None:
                    # 读取剩余输出
                    for line in process.stdout:
                        process_line(line)
                    break

                # 读取标准输出
                line = process.stdout.readline()
                if line:
                    process_line(line)

            if process.returncode != 0:
                data_queue.put({'type': 'error', 'msg': f"进程异常退出，代码{process.returncode}"})

        except Exception as e:
            data_queue.put({'type': 'error', 'msg': str(e)})

        finally:
            if process:
                if process.stdout:
                    process.stdout.close()
                if process.poll() is None:
                    process.kill()
                    process.wait()
            data_queue.put({'type': 'finish', 'msg': '爬取完成'})


class CrawlManager:
    def __init__(self):
        self.data_queue = Queue()
        self.stop_event = threading.Event()
        self.spider_service = SpiderService()
        self.crawl_thread = None

    def start_crawl(self, bvid: str):
        """启动爬虫线程"""
        self.stop_event.clear()
        self.crawl_thread = threading.Thread(
            target=self.spider_service.crawl_danmu,
            args=(bvid, self.data_queue, self.stop_event)
        )
        self.crawl_thread.daemon = True
        self.crawl_thread.start()

    def stop_crawl(self) -> None:
        """停止爬虫"""
        self.stop_event.set()
        if self.crawl_thread and self.crawl_thread.is_alive():
            self.crawl_thread.join(timeout=5)

    def get_updates(self):
        """获取实时更新"""
        updates = []
        while True:
            try:
                updates.append(self.data_queue.get_nowait())
            except Empty:
                break
        return updates


if __name__ == '__main__':
    bvid = 'BV1yPPEeoEZZ'
    manager = CrawlManager()
    manager.start_crawl(bvid)
    try:
        while True:
            updates = manager.get_updates()
            for update in updates:
                if update['type'] == 'data':
                    print(update['data'])
                elif update['type'] == 'log':
                    print(update['msg'])
                elif update['type'] == 'error':
                    print(update['msg'])
                elif update['type'] == 'finish':
                    print('爬取完成')
                    break
            if not manager.crawl_thread.is_alive():
                remaining = manager.get_updates()
                for update in remaining:
                    if update['type'] == 'data':
                        print(update['data'])
                    elif update['type'] == 'log':
                        print(update['msg'])
                    elif update['type'] == 'error':
                        print(update['msg'])
                    elif update['type'] == 'finish':
                        print('爬取完成，爬取了条数据')
                        break
                break
            time.sleep(1)
    finally:
        manager.stop_crawl()
