# -*- coding: utf-8 -*-
#######################
#	获取新浪网站企业财报信息
#######################
import sys
import os
import time
dirs = os.path.abspath(os.path.dirname(__file__)+"/../Config")
os.sys.path.append(dirs)   #将上上级目录加载到python的环境变量中
from config import conn,pythoncmd

#企业列表
cur = conn.cursor()
#判断是否已经存在数据
sqls = "select searchname from enterpriseinfo where status = 1 and searchname is not null"
cur.execute(sqls)
#企业列表
enterprise_list = []
result_list = cur.fetchall()
for result in result_list:
	enterprise_list.append(result[0])

if enterprise_list == None:
	enterprise_list = ["南电A","金源","生物","珠江","烯碳","易桥","天首","济柴","恒立","钒钛","合金","盈方","韶钢","五稀","冀装","钱江","神火","建峰","东钽","煤气","皇台","江化","獐岛","兴化","创疗","东晶","宇顺","人乐","鲁丰","中特","蓝丰","舜船","新都","川化","新亿","江泉","沧大","山水","景谷","南化","商城","亚星","天利","星马","金瑞","吉恩","中发","山煤","八钢","兴业","中企","工新","百花","云维","黑豹","昆机","宏盛","星湖","新集","常林","新梅","五 粮 液","北辰实业","鄂尔多斯","龙元建设","江苏有线","中文传媒","环旭电子","上海能源","宋都股份","海立美达","陕天然气","豫光金铅","齐翔腾达","赤天化","卫星石化","共进股份","得润电子","弘高创意","新纶科技","赞宇科技","诚志股份","天舟文化","家家悦","江南水务","和佳股份","罗莱生活","四创电子","中润资源","升华拜克","东方精工","海兴电力","茂业通信","益民集团","吉艾科技","露笑科技","思维列控","丹邦科技","南宁百货","奇正藏药","复旦复华","京城股份","博敏电子","深圳惠程","索菱股份","中国嘉陵","醋化股份","汉商集团","汇金股份","南极电商","海虹控股","西安旅游","鲁北化工","朗玛信息","宏磊股份","三六五网","茂化实华","先锋新材","凯美特气","万里石","荣科科技","嘉澳环保","湘油泵","中潜股份","王子新材","洪汇新材","博济医药","汇源通信","三祥新材","佳发安泰","海德股份"]

def startcrawler(enterprise_list):
	filepath = sys.path[0]
	for enterprise in enterprise_list:
		os.system(pythoncmd+" "+filepath+"/finance.py %s" % enterprise)
		time.sleep(10)

	print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),"重新爬取上次失败公司。。。")
	cur = conn.cursor()
	#查询上次爬取失败公司列表
	sqls = "select * from crawlerfalselog where type = 1"
	cur.execute(sqls)
	result = cur.fetchall()
	if(result):
		enterprise_list = []
		for item in result:
			enterprise_list.append(item[2])
			sqld = "delete from crawlerfalselog where id = %d" % item[0]
			cur.execute(sqld)
			return startcrawler(enterprise_list)
	else:
		print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),"财务数据爬取完成。。。")
		exit()
startcrawler(enterprise_list)