# -*- coding: utf-8 -*-
#######################
#	获取新浪网站企业财报信息
#######################
import sys
import os
import time
dirs = os.path.abspath(os.path.dirname(__file__)+"/../Config")
os.sys.path.append(dirs)   #将上上级目录加载到python的环境变量中
from config import conn,pythoncmd

enterprise_list = ["索菱股份","荣科科技","嘉澳环保","中潜股份","洪汇新材","三祥新材"]

def startcrawler(enterprise_list):
	filepath = sys.path[0]
	for enterprise in enterprise_list:
		os.system(pythoncmd+" "+filepath+"/finance.py %s" % enterprise)
		time.sleep(10)

	print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),"重新爬取上次失败公司。。。")
	cur = conn.cursor()
	#查询上次爬取失败公司列表
	sqls = "select * from crawlerfalselog where type = 1"
	cur.execute(sqls)
	result = cur.fetchall()
	if(result):
		enterprise_list = []
		for item in result:
			enterprise_list.append(item[2])
			sqld = "delete from crawlerfalselog where id = %d" % item[0]
			cur.execute(sqld)
			return startcrawler(enterprise_list)
	else:
		print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),"财务数据爬取完成。。。")
		exit()
startcrawler(enterprise_list)
