import logging
from twisted.internet import task
from scrapy.exceptions import NotConfigured
from scrapy import signals

from Utils.ddrobot.dd_robot import DDRobot, get_webhook_url
from Utils.ddrobot.dd_config import *
from Utils.ddrobot.dd_message import MessageType
import json

logger = logging.getLogger(__name__)


class DingdingRobot:
	def __init__(self, stats, token, secret):
		self.stats = stats
		self.task = None
		url = get_webhook_url(token, secret)
		self.robot = DDRobot(url)
	
	@classmethod
	def from_crawler(cls, crawler):
		robot_enabled = crawler.settings.getbool('ROBOT_ENABLED')
		if not robot_enabled:
			raise NotConfigured
		token = crawler.settings.get('ROBOT_TOKEN')
		secret = crawler.settings.get('ROBOT_SECRET')
		if token is None or secret is None:
			raise NotConfigured
		d = cls(crawler.stats, token, secret)
		crawler.signals.connect(d.spider_opened, signal=signals.spider_opened)
		crawler.signals.connect(d.spider_closed, signal=signals.spider_closed)
		crawler.signals.connect(d.item_scraped, signal=signals.item_scraped)
		return d
	
	def spider_opened(self, spider):
		self.task = task.LoopingCall(self.log, spider)
		self.task.start(1)
		name = spider.name
		self.items = []
		self._send_text_message(f'{name}-- 爬虫开始爬取数据')
	
	def log(self, spider):
		items = self.stats.get_value('item_scraped_count', 0)
		pages = self.stats.get_value('response_received_count', 0)
		msg = "Crawled %(pages)d pages scraped %(items)d items"
		log_args = {'pages': pages,
		            'items': items}
		logger.info(msg, log_args, extra={'spider': spider})
	
	def _send_text_message(self, message='爬虫消息'):
		# Text消息发送
		data = {
			CONTENT: message,
			ISATALL: False,
			ATMOBILES: [],
			ATUSERIDS: []
		}
		self.robot.send_message(MessageType.text, **data)
	
	def _send_action_message(self, title='标题', text='内容'):
		# ActionCard 独立消息发送(主要是传入的消息格式与上不一样)
		if len(self.items) <= 0:
			return
		# if len(self.items) > 10:
		# 	results = dl_list_split(self.items,0)
		# 	for items in results:
		# 		data = {
		# 			TITLE: title,
		# 			TEXT: text,
		# 			AC_BTNS: items
		# 		}
		# 		self.robot.send_message(MessageType.actionCard, **data)
		# 	return
		data = {
			TITLE: title,
			TEXT: text,
			AC_BTNS: self.items
		}
		self.robot.send_message(MessageType.actionCard, **data)
	
	def spider_closed(self, spider, reason):
		if self.task and self.task.running:
			self.task.stop()
		name = spider.name
		self._send_action_message(name, name)
		
		self._send_text_message(f'{name}-- 爬虫结束爬取数据,\n{str(reason)}')
	
	def item_scraped(self, item):
		btns = {
			AC_TITLE: item['name'],
			AC_ACTION_URL: item['detail_urls']
		}
		self.items.append(btns)


# 切分数组: n 切分为n个数组
def dl_list_split(items, n):
	"""
	等分切分数组
	:param items: 原始数组
	:param n: 需要切分的子数组的个数
	:return: 包含子数组的数组
	"""
	if n == 0:
		n = int(len(items) / 5)
	results = [[] for _ in range(n)]
	for index in range(len(items)):
		yu = index % n
		results[yu].append(items[index])
	return results
