# coding=utf-8
#解析器
import time
import urllib.request
from bs4 import BeautifulSoup
import json
import re
import math
import sys
from processor_list import *

sys.path.append("..")
from script.deqQueue import DeqQueue

class ParserTask:
	def __init__(self, json_str):
		pass

	def get_url(self):
		return ""

class Parser:

	def __init__(self):
		self.proccessor_list = []

		pass

	data = {}

	def add_processor(self):
		pass

	def set_deq_name(self, deq_name):
		pass
		
	def isRunning(self):
		return True

	def get_processor_list(self, task):
		return []
	
	def isValid(self):
		pass
	
	def get_send_deq(self):
		try:
			# print("set")
			# print (data)
			self.conn.lpush(queue_name, content)
			return True
		except:
			return False
	
	def get_deq_name(self):
		try:
			data = self.conn.brpop(queue_name)
			# print("get")
			# print(data)
			if (len(data) > 0):
				return data[1].decode('UTF-8')
			else:
				return None
		except:
			print("except")
			return None

	def work(self):
		queue = DeqQueue()
		while(self.isRunning()):
			data_str = queue.get(self.get_deq_name())

			if (data_str == None or data_str == ''):
				print("data is empty")
				return None
			
			task = ParserTask(data_str)    #task应该是content_crawler来的网页html，不应从data_str获取
			if (Parser.isValid(self)):
				continue

			#下面是parser的核心，将不同的结果组装成一个嵌套json
			res = {}
			for processor in self.get_processor_list(task) :
				val = processor.process(task)
				res[processor.get_key()] = val

			self.get_send_deq().send(json.dumps(res))