# -*- coding:utf-8 -*-
import os,sys
import re
import traceback
import time
from datetime import datetime
sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), os.pardir))
import supeanut_config
from CommonLib.mylog import mylog
from DataSupport.ExRightSupport import ExRightSupport
from SendOnline.ExRightsSend import ExRightsSend
from CommonLib.StockTool import StockTool


'''
作者：supeanut
创建时间：2016-xx-xx xx:xx:xx
功能描述：
	xxx
	xxxxx
相关配置：
	supeanut_config.XXX
历史改动：
	2016-xx-xx: xxxxxx
'''
class ExRightsCrawler:
	# init what
	def __init__(self):
		pass

	# 从不间断
	def crawler_process(self):
		log = mylog('Crontab.ExRightsJob.crawler' ,None)
		obj = ExRightSupport()
		flag, msg = obj.get_store_data()
		if flag is False:
			log.error(msg)
			return False, msg
		else:
			return True, 'suc'

	# 每日一更新
	def mongo_process(self):
		log = mylog('Crontab.ExRightsJob.mongo' ,None)
		obj = ExRightsSend()
		flag, msg = obj.insert()
		if flag is False:
			log.error(msg)
			return False, msg
		else:
			return True, 'suc'

if __name__ == '__main__':
	obj = ExRightsCrawler()
	print obj.crawler_process()
