import os
from redis import StrictRedis, ConnectionPool
from scrapyd_api import ScrapydAPI


class runSpider:
	def __init__(self):
		self.index = None
		self.indexId = None
		self.redis = StrictRedis(host="localhost", port=6379, db=0)
		self.scrapyd = ScrapydAPI('http://127.1.1.1:6800')
		self.details = None
		self.detailsId = None

	def enterQueue(self, urls):
		for url in urls:
			print("enterQueue:", url)
			url = url + "zufang/"
			print("之后:", url)
			a = self.redis.lpush("index:start_urls", url)
			if a >= 1:
				print("插入成功", url)
			else:
				print("插入失败", url)

	def startSpider(self, urls):
		self.details, self.index = self.scrapyd.list_projects()
		self.indexId = self.scrapyd.schedule(self.index, "index")
		self.detailsId = self.scrapyd.schedule(self.details, "details")
		print("成功启动", "de:", self.details, "in:", self.index)
		for url in urls:
			print(url)
			url = url + "zufang/"
			a = self.redis.lpush("index:start_urls", url)
			if a >= 1:
				print("插入成功", url)
			else:
				print("插入失败", url)
		return self.indexId, self.detailsId

	def stopSpider(self):
		# details, index = self.scrapyd.list_projects()
		# print("details:", details, "id:", self.detailsId)
		de_status = self.scrapyd.job_status(self.details, self.detailsId)
		in_status = self.scrapyd.job_status(self.index, self.indexId)
		print(de_status, in_status)
		print("成功关闭:", de_status, in_status)

	# self.scrapyd.cancel(self.details, self.detailsId)
	# self.scrapyd.cancel(self.index, self.indexId)
	def close(self):
		self.redis.client_kill("127.0.0.1:6379")



if __name__ == '__main__':

	r = runSpider()
	r.startSpider()
	r.stopSpider()

# runSpider().index()
# pool
