import os
import subprocess
import sys
import time
from configparser import ConfigParser
from multiprocessing import Process, Queue

import pymysql
from PyQt5 import QtCore, QtWidgets
from PyQt5.QtCore import pyqtSignal, QThreadPool, QObject, QRunnable, QThread
from PyQt5.QtWidgets import QFileDialog, QDialog, QMainWindow
from scrapy.crawler import CrawlerRunner, CrawlerProcess

# from scrapy.utils.project import get_project_settings
from scrapy.utils.project import get_project_settings
from twisted.internet import reactor

from spider.spiders.sn import SnSpider
from spider.spiders.yhd import YhdSpider
from spider.util.mysignal import MySignal
from spider.util.ui_dialog import Ui_Dialog
from spider.util.ui_main import Ui_MainWindow

from spider.spiders.jd import JdSpider


class SpiderProcess(Process):
    """多进程编程 --爬虫进程"""

    def __init__(self, spider_name, keyword, trans_key, images_path,
                 host, port, username, password, database,
                 queue=None):
        super().__init__()
        self.spider_name = spider_name
        self.keyword = keyword
        self.trans_key = trans_key
        # 自定义类无法序列化，无法由主进程传到任务进程
        # self.signal = MySignal()
        self.q = queue
        self.images_path = images_path
        self.host = host
        self.port = port
        self.username = username
        self.password = password
        self.database = database

    def run(self):
        signal = MySignal()
        signal.send_str.connect(self.show_str)
        signal.send_int.connect(self.show_int)

        # TODO: 使用不同配置启动爬虫
        settings = get_project_settings()
        settings.set('IMAGES_STORE', self.images_path)
        settings.set('HOST', self.host)
        settings.set('PORT', int(self.port))
        settings.set('USERNAME', self.username)
        settings.set('PASSWORD', self.password)
        settings.set('DATABASE', self.database)

        if self.spider_name == 'jd':
            spider = JdSpider
        elif self.spider_name == 'yhd':
            spider = YhdSpider
        else:
            spider = SnSpider

        # runner = CrawlerRunner(settings)
        # runner.crawl(spider, name=self.keyword, trans_key=self.trans_key, signal=signal)
        # d = runner.join()
        # d.addBoth(lambda _: reactor.stop())  # 回调关闭twisted
        # reactor.run()
        crawl = CrawlerProcess(settings)
        crawl.crawl(spider, name=self.keyword, trans_key=self.trans_key, signal=signal)
        crawl.start()

    def show_str(self, p_str):
        self.q.put(p_str)

    def show_int(self, p_int):
        self.q.put(p_int)


class SpiderThread(QRunnable):
    """child thread for spider task"""

    def __init__(self, spider_name, keyword, trans_key):
        super(SpiderThread, self).__init__()
        self.spider_name = spider_name
        self.keyword = keyword
        self.trans_key = trans_key
        self.signal = MySignal()
        self.p = None

    def start_spider_by_subprocess(self):
        spider_cmdline = "scrapy crawl {} -a name={} -a trans_key={}" \
            .format(self.spider_name, self.keyword, self.trans_key)
        self.p = subprocess.Popen(spider_cmdline, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
        self.signal.send_int.emit(0)
        self.signal.send_str.emit('爬虫正在运行')

        while True:
            if subprocess.Popen.poll(self.p) is None:  # 进程结束输出0，正在运行输出None
                data = self.p.stdout.readline().decode('utf-8').strip()
                print("data: ", data)
                if data.isdigit():
                    if int(data) > 100:
                        self.signal.send_int.emit(99)
                    else:
                        self.signal.send_int.emit(int(data))  # 发送进度信号
                else:
                    # self.signal.send_message.emit(data)  # 发送内容
                    pass
            else:
                self.p.terminate()
                self.p.kill()
                break
        self.signal.send_int.emit(100)
        self.signal.send_str.emit('爬虫完成')

    def start_spider_by_crawler(self):
        # TODO: builtins.ValueError: signal only works in main thread
        runner = CrawlerRunner(get_project_settings())
        runner.crawl(self.spider_name, name=self.keyword, trans_key=self.trans_key, signal=self.signal)
        d = runner.join()
        d.addBoth(lambda _: reactor.stop())  # 回调关闭twisted
        reactor.run()
        pass

    def start_spider_by_process(self):

        pass

    def run(self):
        print("进入子线程", self.spider_name, self.keyword, self.trans_key)
        self.signal.send_str.emit("进入子线程，准备开始爬虫")

        self.start_spider_by_subprocess()

    def stop_spider(self):
        """关闭爬虫进程,但不关闭该子线程,子线程由线程池统一管理"""
        print('stop thread')
        self.p.terminate()
        self.p.kill()


class ScanThread(QThread):
    send_int = pyqtSignal(int)
    send_str = pyqtSignal(str)

    def __init__(self, q):
        super().__init__()
        self.q = q

    def run(self):
        while True:
            msg = self.q.get()
            print('===scan===', msg)
            if type(msg) == int:
                if msg >= 100:
                    self.send_int.emit(99)
                else:
                    self.send_int.emit(msg)

            else:
                self.send_str.emit(msg)
                if msg == 'close':
                    self.send_int.emit(100)
                    break

        pass


class DialogWindow(QDialog, Ui_Dialog):
    close_spider = pyqtSignal()

    def __init__(self, parent=None):
        super(DialogWindow, self).__init__(parent)
        self.setupUi(self)

    def set_label(self, spider_name, keyword):
        self.label.setText('正在爬取%s-%s' % (spider_name, keyword))

    def update_progressbar(self, percent):
        self.progressBar.setValue(percent)

    def show_message(self, message):
        # print('爬虫信息通信')
        self.spider_output.setText(message)

    def closeEvent(self, event):
        if self.progressBar.value() < 100:
            """关闭窗口的事件触发消息询问框"""
            reply = QtWidgets.QMessageBox.question(self, u'info', u'尚未完成，确定退出？',
                                                   QtWidgets.QMessageBox.Yes,
                                                   QtWidgets.QMessageBox.No)
            if reply == QtWidgets.QMessageBox.Yes:
                self.close_spider.emit()  # 关闭子线程
                event.accept()
            else:
                event.ignore()
        pass

    def accept(self):
        self.close()

    def reject(self):
        """关闭对话框"""
        self.close()


class MainWindow(QMainWindow, Ui_MainWindow):
    def __init__(self, parent=None):
        super(MainWindow, self).__init__(parent)
        self.setupUi(self)
        self.my_threads = []
        self.pool = QThreadPool()
        self.pool.globalInstance()

    def show_about(self):
        QtWidgets.QMessageBox.about(self, u'About', u'<h3>About</h3>'
                                                    u'<p>该程序基于Scrapy爬虫框架对京东，一号店，苏宁进行信息爬取，<br>'
                                                    u'可以根据指定的关键字以及三个网站中的其一来执行爬虫任务。<br><br>'
                                                    u'<b>爬取的信息有</b>：商品名字，价格，商品sku图</p>')
        print('show_help')

    @staticmethod
    def clear_log():
        log_files = os.listdir('log')
        print(log_files)
        for file in log_files:
            os.remove(os.path.join('log', file))

    def select_save_path(self):
        dir_path = QFileDialog.getExistingDirectory(self, "choose directory", "C:\\Users\\Administrator\\Desktop")
        # file_name = QFileDialog.getOpenFileName(self, "open file dialog", "C:\\", "Txt files(*.txt)")
        # "open file Dialog "为文件对话框的标题，第三个是打开的默认路径，第四个是文件类型过滤器
        print("选择图片保存路径", dir_path)
        self.save_path.setText(dir_path)

    def test_connection(self):
        host = self.host.text()
        port = self.port.text()
        username = self.username.text()
        password = self.password.text()
        database = self.database.text()
        print("测试数据库连接", host, port, username, password, database)
        try:
            connection = pymysql.connect(host=host, user=username, password=password, db=database, port=int(port))
            # connection = pymysql.connect(host='localhost', user='root', password='123456', db='spider', port=3306)
        except Exception as e:
            print("连接失败", e)
            QtWidgets.QMessageBox.warning(self, "测试", "连接失败", QtWidgets.QMessageBox.Close)
        else:
            print("连接成功")
            QtWidgets.QMessageBox.information(self, "测试", "连接成功", QtWidgets.QMessageBox.Close)
            connection.close()
        finally:
            pass

    def get_info(self):
        """get spider information"""
        keyword = self.keyword.text()
        trans_key = self.trans_key.text()
        spider_index = self.spider_comboBox.currentIndex()
        images_path = self.save_path.text()

        host = self.host.text()
        port = self.port.text()
        username = self.username.text()
        password = self.password.text()
        database = self.database.text()

        if spider_index == 0:
            spider_name = 'jd'
        elif spider_index == 1:
            spider_name = 'yhd'
        else:
            spider_name = 'sn'

        print("爬虫信息：", keyword, trans_key, spider_name, spider_index, images_path)

        # cfg_file = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "config\\settings.cfg")
        # TODO 打包后，获取到的当前路径变化，需要重新更改配置文件路径
        # cfg_file = os.path.join(os.path.dirname(sys.argv[0]), 'settings.cfg')
        #
        # config = ConfigParser()
        # config.read(cfg_file, encoding='gbk')
        # config.set('settings', 'images_path', images_path)
        # config.set('database', 'host', host)
        # config.set('database', 'port', port)
        # config.set('database', 'username', username)
        # config.set('database', 'password', password)
        # config.set('database', 'database', database)
        #
        # with open(cfg_file, 'w') as file:
        #     config.write(file)
        return spider_name, keyword, trans_key, images_path, host, port, username, password, database

    def start_spider(self):
        """每点击一次按钮弹出一个子窗口,关闭子窗口则关闭爬虫"""
        spider_name, keyword, trans_key, images_path, host, port, username, password, database = self.get_info()
        print('start_spider')
        dialog = DialogWindow(self)
        dialog.set_label(spider_name, keyword)
        # 通过队列获取子进程信息
        q = Queue()
        p = SpiderProcess(spider_name, keyword, trans_key, images_path,
                          host, port, username, password, database, queue=q)
        dialog.close_spider.connect(p.terminate)
        p.start()
        # 开个子线程循环获取子进程的数据。。。。
        self.scan = ScanThread(q)
        self.scan.send_int.connect(dialog.update_progressbar)
        self.scan.send_str.connect(dialog.show_message)
        dialog.close_spider.connect(self.scan.terminate)
        self.scan.start()

        dialog.show()

    def clear_pool(self):
        """清除线程池还未开始线程的队列"""
        self.pool.clear()
        pass


if __name__ == "__main__":
    app = QtWidgets.QApplication(sys.argv)
    mainWindow = MainWindow()
    mainWindow.show()
    sys.exit(app.exec_())

    # from scrapy.crawler import CrawlerProcess
    # from scrapy.utils.project import get_project_settings
    #
    # jd_spider = JdSpider(name='哈根达斯', trans_key='hd')
    # crawl = CrawlerProcess()
    # crawl.crawl(JdSpider, name='哈根达斯', trans_key='hd')
    # crawl.start()

    # runner = CrawlerRunner(get_project_settings())
    # d = runner.crawl('jd', name='哈根达斯', trans_key='hd')
    # d.addBoth(lambda _: reactor.stop())  # 回调关闭twisted
    # reactor.run()
