import os
import os
import subprocess
import sys
from configparser import ConfigParser

import pymysql
from PyQt5 import QtCore, QtWidgets
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtWidgets import QFileDialog, QDialog
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings

from spider.util.ui_dialog import Ui_Dialog
from spider.util.ui_main import Ui_mainWindow


class RunThread(QtCore.QThread):
    """docstring for RunThread"""
    message_singal = pyqtSignal(str)  # 自定义信号
    percent_signal = pyqtSignal(int)
    close_signal = pyqtSignal(int)

    def __init__(self, spider_name, keyword, trans_key=None, parent=None):
        super(RunThread, self).__init__(parent)
        self.spider_name = spider_name
        self.keyword = keyword
        self.trans_key = trans_key

    def run(self):
        print("进入子线程", self.spider_name, self.keyword)
        self.message_singal.emit("进入子线程，准备开始爬虫")
        # 脚本启动scrapy
        # 爬虫在本线程，执行完之后才会执行，后面的操作
        # 报错，Crawler Runner必须在主线程中执行，
        # runner = CrawlerRunner(get_project_settings())
        # d = runner.crawl(self.spider_name, name=self.keyword)
        # d.addBoth(lambda _: reactor.stop())  # 回调关闭twisted
        # reactor.run()

        # self.message_singal.emit('爬虫完成')

        # 启动子进程
        # p = multiprocessing.Process(target=self.worker, args=())
        # p.start()
        # from pydispatch import dispatcher
        # dispatcher.connect(self.item_process, signals.item_scraped)  # 信号
        # percent_ = 0

        from scrapy.crawler import CrawlerProcess
        from scrapy.utils.project import get_project_settings
        crawl = CrawlerProcess(get_project_settings())
        crawl.crawl('jd', name=self.keyword, trans_key=self.trans_key)
        crawl.start()
        # reactor.run()
        # 测试，使用scrapy内置cmdline执行
        # spider = "scrapy crawl {} -a name={}".format(self.spider_name, self.keyword)
        # execute(spider.split())

        # 使用subprocess.Popen另开一个进程执行爬虫
        # 日志文件，每次运行都清空一次
        # with open("../log/log.log", "w", encoding="utf-8") as f:
        #     f.write(' ')
        # spider_cmdline = "scrapy crawl {} -a name={} -a trans_key={}"\
        #     .format(self.spider_name, self.keyword, self.trans_key)
        # # TODO: 暂时不可以中途关闭子进程，获取pid，在关闭窗口时，关闭该进程
        # p = subprocess.Popen(spider_cmdline, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
        # self.percent_signal.emit(0)
        # self.message_singal.emit('爬虫正在运行，暂时没有设置同时进行多个任务，该任务完成才能进行其他任务，进度条可能存在些许误差')
        # # 获取返回结果（百分比，和当前下载记录），信号返回两个变量
        # # 百分比需要乘以100
        # while True:
        #     # print(subprocess.Popen.poll(p))
        #     if subprocess.Popen.poll(p) is None:  # 结束为-1，包含输出为空
        #         # print('爬虫正在运行')
        #         data = p.stdout.readline().decode('utf-8').strip()
        #         # print('data:', data)
        #         # print('int:', int(data))
        #         print("data: ", data)
        #         if data.isdigit():
        #             if int(data) > 100:
        #                 # self.message_singal.emit('')
        #                 self.percent_signal.emit(99)
        #             else:
        #                 self.percent_signal.emit(int(data))  # 发送进度信号
        #         else:
        #             # TODO 可设置不显示内容
        #             # self.message_singal.emit(data)  # 发送内容
        #             pass
        #
        #     else:
        #         # print("爬虫结束")
        #         # 杀死子进程
        #         self.message_singal.emit('爬虫完成，可关闭该窗口，进行其他爬虫任务')
        #         p.terminate()
        #         p.kill()
        #         break
        # # 爬虫运行完之后，即会执行下面代码
        # print('爬虫进程结束')
        # self.percent_signal.emit(100)
        # self.message_singal.emit('爬虫完成，可关闭该窗口，进行其他爬虫任务')
        # self.close_signal.emit(1)
        pass


class DialogWindow(QDialog):

    def __init__(self):
        QDialog.__init__(self)
        self.dialog = Ui_Dialog()
        self.dialog.setupUi(self)

    def update_prograssbar(self, percent):
        self.dialog.progressBar.setValue(percent)

    def show_message(self, message=None):
        # print('爬虫信息通信')
        self.dialog.spider_output.setText(message)


class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow):
    def __init__(self, parent=None):
        super(MainWindow, self).__init__(parent)
        self.setupUi(self)
        self.dialog = DialogWindow()
        # 连接子窗口，点击按钮时显示子窗口
        self.start_pushButton.clicked.connect(self.dialog.show)
        self.my_thread = None

    def select_save_path(self):
        dir_path = QFileDialog.getExistingDirectory(self, "choose directory", "C:\\Users\\Administrator\\Desktop")
        # file_name = QFileDialog.getOpenFileName(self, "open file dialog", "C:\\", "Txt files(*.txt)")
        # "open file Dialog "为文件对话框的标题，第三个是打开的默认路径，第四个是文件类型过滤器
        print("选择图片保存路径", dir_path)
        self.save_path.setText(dir_path)

    def test_connection(self):
        host = self.host.text()
        port = self.port.text()
        username = self.username.text()
        password = self.password.text()
        database = self.database.text()
        print("测试数据库连接", host, port, username, password, database)
        try:
            connection = pymysql.connect(host=host, user=username, password=password, db=database, port=int(port))
            # connection = pymysql.connect(host='localhost', user='root', password='123456', db='spider', port=3306)
        except Exception as e:
            print("连接失败", e)
            QtWidgets.QMessageBox.warning(self, "测试", "连接失败", QtWidgets.QMessageBox.Close)
        else:
            print("连接成功")
            QtWidgets.QMessageBox.information(self, "测试", "连接成功", QtWidgets.QMessageBox.Close)
            connection.close()
        finally:
            pass

    def create_table(self, cursor, item):
        # 创建sql语句
        sql = """
        CREATE TABLE `products` (
          `spider` varchar(10) DEFAULT NULL,
          `id` varchar(20) NOT NULL,
          `keyword` varchar(45) DEFAULT NULL,
          `trans_key` varchar(45) DEFAULT NULL,
          `name` varchar(100) DEFAULT NULL,
          `price` varchar(10) DEFAULT NULL,
          `image_urls` varchar(1000) DEFAULT NULL,
          PRIMARY KEY (`id`)
        ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; 
        """
        # 执行sql语句
        cursor.execute(sql)

    def start_spider(self, p_str):
        keyword = self.keyword.text()
        trans_key = self.trans_key.text()
        # spider_name = self.spider_comboBox.currentText()
        spider_index = self.spider_comboBox.currentIndex()
        images_path = self.save_path.text()

        host = self.host.text()
        port = self.port.text()
        username = self.username.text()
        password = self.password.text()
        database = self.database.text()

        if spider_index == 0:
            spider_name = 'jd'
        elif spider_index == 1:
            spider_name = 'yhd'
        else:
            spider_name = 'sn'

        print("爬虫信息：", keyword, trans_key, spider_name, spider_index, images_path)

        cfg_file = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "config\\settings.cfg")
        # TODO 打包后，获取到的当前路劲变化，需要重新更改配置文件路径
        # print('当前路径: ', os.path.abspath(__file__))
        # D:\project\spider\spider\dist\run\spider\util\mainwindow.pyc
        # cfg_file = "D:\\project\\spider\\spider\\config\\settings.cfg"
        # print('mainwindow获取到的配置路径: ', cfg_file)
        config = ConfigParser()
        config.read(cfg_file, encoding='gbk')
        # 将输入的信息写入配置文件，在spider启动的时候读取
        config.set('settings', 'images_path', images_path)
        config.set('database', 'host', host)
        config.set('database', 'port', port)
        config.set('database', 'username', username)
        config.set('database', 'password', password)
        config.set('database', 'database', database)

        with open(cfg_file, 'w') as file:
            config.write(file)

        # 启用子线程，并在爬虫结束时返回信号
        # TODO: 1. 每点击一次按钮弹出一个子窗口； 2. 关闭子窗口则关闭爬虫
        # dialog = DialogWindow()
        # dialog.show()
        self.my_thread = RunThread(spider_name, keyword, trans_key)
        self.my_thread.message_singal.connect(self.dialog.show_message)  # 子弹窗显示内容
        self.my_thread.percent_signal.connect(self.dialog.update_prograssbar)  # 子弹窗显示进度条
        self.my_thread.close_signal.connect(self.close_spider)
        self.my_thread.start()

        self.start_pushButton.setEnabled(False)  # 设置开启按钮不可点击

    def close_spider(self, flag):
        """当前爬虫结束后，才可再次开启新的爬虫任务"""
        print('按钮可用：', flag)
        self.start_pushButton.setEnabled(True)


if __name__ == "__main__":
    # app = QtWidgets.QApplication(sys.argv)
    # mainWindow = MainWindow()
    # mainWindow.show()
    # sys.exit(app.exec_())

    # from scrapy.crawler import CrawlerProcess
    # from scrapy.utils.project import get_project_settings

    crawl = CrawlerProcess(get_project_settings())
    crawl.crawl('jd', name='哈根达斯')
    crawl.start()
