#!/var/bin/python
# -*- coding: utf-8 -*-

#######################
# 天眼查全国企业信息查询系统
# 2016-07-27
#######################
import sys
import time
import re
import urllib
import urllib.request
from bs4 import BeautifulSoup
import io
import os
dirs = os.path.abspath(os.path.dirname(__file__)+"/../Config")
os.sys.path.append(dirs)   #将上上级目录加载到python的环境变量中
# os.sys.path.append("D:/job/crawler/Config")
from config import conn,webdriver,logpath,Proxy,ProxyType
from selenium.webdriver.common.keys import Keys  #selenium 输入需要引入keys包
sys.setrecursionlimit(1000000)  # set the maximum depth as 1500

##多进程
from multiprocessing import Pool
import random

import socket
ip = socket.gethostbyname(socket.gethostname()) 
QQ = "JLP"
cur = conn.cursor()
iplists = []
aproxyip = ""
getrandomiptime = 0
def getrandomip(d):
	global getrandomiptime
	global iplists
	global aproxyip
	if (int(time.time()) - getrandomiptime>900):
		# iplists=[]
		# data = urllib.request.urlopen("http://api.xicidaili.com/free2016.txt").read()
		# z_data = data.decode('UTF-8')
		# iplists = z_data.split('\r\n')
		for x in range(d,d+30):
			time.sleep(10)
			print("page %s" % str(x))
			url = 'http://www.xicidaili.com/nn/%s' % str(x)			
			# url = 'http://www.66ip.cn/areaindex_1/%s.html' % str(x)
			user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
			headers = {'User-Agent': user_agent}
			req = urllib.request.Request(url, headers=headers)
			response = urllib.request.urlopen(req,timeout=15)
			proxypage = response.read()
			# z_data = proxypage.decode('UTF-8')
			soup = BeautifulSoup(proxypage,"html.parser")
			# trlist = soup.find("div",id="footer").find_all("tr")
			trlist = soup.find_all("tr",class_="odd")
			iplists = []
			for tr in trlist:
				tdlist = tr.find_all("td")
				# iplists.append(tdlist[0].text.strip()+":"+tdlist[1].text.strip())
				iplists.append(tdlist[1].text.strip()+":"+tdlist[2].text.strip())

			#验证代理IP速度
			for proxyip in iplists:
				print(proxyip)
				try:
					url = 'http://www.tianyancha.com/search?key=baidu'
					user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
					headers = {'User-Agent': user_agent}
					req = urllib.request.Request(url, headers=headers)
					proxy_support = urllib.request.ProxyHandler({"http": str(proxyip)})
					# opener = urllib.request.build_opener(proxy_support)
					# urllib.request.install_opener(opener)
					response = urllib.request.urlopen(req,timeout=10)
					the_page = response.read()
					aa = BeautifulSoup(the_page,"html.parser")
					pos = str(aa).find("百度")
					if pos > -1:
						print("available ip:", proxyip)
						aproxyip = proxyip
						return aproxyip
					else:
						print("checkout proxyip", proxyip,"Verify Failure!",pos)
				except Exception as e:
					print("checkout proxyip", proxyip,"Request Failure!",e)
	else:
		return aproxyip
	# js='window.open("%s")' % "http://www.xicidaili.com/nn/"
	# driver.execute_script(js)
	# handles = driver.window_handles # 获取当前窗口句柄集合（列表类型）
	# driver.switch_to_window(handles[-1])
	# soup = BeautifulSoup(driver.page_source,"html.parser")
	# trlist = soup.find_all("tr",class_="odd")
	# for tr in trlist:
	# 	tdlist = tr.find_all("td")
	# 	iplist.append(tdlist[1].text.strip()+":"+tdlist[2].text.strip())
	# driver.close()
def get_enterprise_data(name):
	time.sleep(5)
	phantomjspath = "D:/phantomjs%s/bin/phantomjs" % str(int(name)+8)
	# driver = webdriver.Firefox()
	driver = webdriver.PhantomJS(executable_path=phantomjspath)
	# phantomjspath = "D:/phantomjs%s/bin/phantomjs" % name
	# driver = webdriver.Firefox()
	# 利用DesiredCapabilities(代理设置)参数值，重新打开一个sessionId，我看意思就相当于浏览器清空缓存后，加上代理重新访问一次url
	proxy = webdriver.Proxy()
	proxy.proxy_type = ProxyType.MANUAL
	proxy.http_proxy = str(getrandomip(1))
	# proxy.http_proxy = "202.106.16.36:3128"
	# 将代理设置添加到webdriver.DesiredCapabilities.PHANTOMJS中
	proxy.add_to_capabilities(webdriver.DesiredCapabilities.PHANTOMJS)
	driver.start_session(webdriver.DesiredCapabilities.PHANTOMJS)
	# # proxy.add_to_capabilities(webdriver.DesiredCapabilities.FIREFOX)
	# # driver.start_session(webdriver.DesiredCapabilities.FIREFOX)
	driver.implicitly_wait(60)
	driver.get("http://www.tianyancha.com")
	#获取本进程查询的范围
	searchname = ""
	sqlname = "select ename from enterpriselist where status=1 order by id desc limit 1"
	cur.execute(sqlname)
	res = cur.fetchone()
	if res != None:
		searchname = res[0]
	else:
		print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),"All Completed. . .")
		exit()

	#加个状态锁
	sqlname = "update enterpriselist set status=2 where ename='%s'" % searchname
	cur.execute(sqlname)
	conn.commit()
	#加个状态锁
	# handles = driver.window_handles # 获取当前窗口句柄集合（列表类型）
	# driver.switch_to_window(handles[-1])
	print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),"Search in the home page. . .",name)
	try:
		driver.find_element_by_id("live-search").clear()
		driver.find_element_by_id("live-search").send_keys(searchname)
		driver.find_element_by_id("live-search").send_keys(Keys.ENTER)
		time.sleep(5)
		try:
			print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),"Open the Company page. . .",name)
			driver.find_element_by_class_name("search_result_container").find_element_by_tag_name("a").click()
			time.sleep(5)
			handles = driver.window_handles # 获取当前窗口句柄集合（列表类型）
			driver.switch_to_window(handles[-1])
			soup = BeautifulSoup(driver.page_source,"html.parser")
			base_lis = soup.find("div",class_="company-content")
			#企业法人
			qyfr = ""
			#注册资本
			zczb = ""
			#状态
			zt = ""
			#注册时间
			zcsj = ""
			try:
				qyfr = base_lis.find("td",class_="td-legalPersonName-value").find("a",class_="ng-scope").text.strip()
			except Exception as e:
				qyfr = ""
			try:
				zczb = base_lis.find("td",class_="td-regCapital-value").text.strip()
			except Exception as e:
				zczb = ""
			try:
				zt = base_lis.find("td",class_="td-regStatus-value").text.strip()
			except Exception as e:
				zt = ""
			try:
				zcsj = base_lis.find("td",class_="td-regTime-value").text.strip()
			except Exception as e:
				zcsj = ""


			base = {}
			base_tds = base_lis.find_all("td",class_="basic-td")
			for base_li in base_tds:
				base_liarr = base_li.text.split("：")
				base[base_liarr[0].strip()] = base_liarr[1].strip()

			fullname = soup.find("div",class_="company_info_text").find("p",class_="ng-binding").text.strip()
			#判断是否已经存在数据
			sqls = "select * from enterpriseinfo where ename like '%s'" % ("%"+fullname+"%")
			cur.execute(sqls)
			result = cur.fetchone()
			print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),"data processing. . .",name)
			if result:
				sql = "update enterpriseinfo set `searchname`='%s',`ename`='%s',`creditcode`='%s',`regnumber`='%s',`organizationcode`='%s',`managementforms`='%s',`companytype`='%s',`RegisterDate`='%s',`legalperson`='%s',`registeredcapital`='%s',`busnissallotedtime`='%s',`registeroffice`='%s',`dateofissue`='%s',`scale`='%s',`industry`='%s',`Englishname`='%s',`businessaddress`='%s',`Businessscope`='%s',`status`=1,`ip`='%s',`QQ`='%s' where eid=%d" % (searchname,fullname,base.get("统一信用代码",""),base.get("工商注册号",""),base.get("组织机构代码",""),zt,base.get("企业类型",""),zcsj,qyfr,zczb,base.get("营业期限",""),base.get("登记机关",""),base.get("发照日期",""),base.get("公司规模",""),base.get("行业",""),base.get("英文名",""),base.get("注册地址",""),base.get("经营范围",""),ip,QQ,result[0])
			else:
				sql = "insert into enterpriseinfo (`searchname`,`ename`,`creditcode`,`regnumber`,`organizationcode`,`managementforms`,`companytype`,`RegisterDate`,`legalperson`,`registeredcapital`,`busnissallotedtime`,`registeroffice`,`dateofissue`,`scale`,`industry`,`Englishname`,`businessaddress`,`Businessscope`,`status`,`ip`,`QQ`) values ('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s',%d,'%s','%s')" % (searchname,fullname,base.get("统一信用代码",""),base.get("工商注册号",""),base.get("组织机构代码",""),zt,base.get("企业类型",""),zcsj,qyfr,zczb,base.get("营业期限",""),base.get("登记机关",""),base.get("发照日期",""),base.get("公司规模",""),base.get("行业",""),base.get("英文名",""),base.get("注册地址",""),base.get("经营范围",""),1,ip,QQ)

			print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),"Add company basic data to the database",name)
			try:
				cur.execute(sql)
				conn.commit()
				#解个状态锁 到完成状态
				sqlname = "update enterpriselist set status=3 where ename='%s'" % searchname
				cur.execute(sqlname)
				conn.commit()
				#解个状态锁 到完成状态
				driver.close()
			except Exception as e:
				print("Failed to add data...",sql,e,name)
				#解个状态锁 到初始状态
				sqlname = "update enterpriselist set status=3 where ename='%s'" % searchname
				cur.execute(sqlname)
				conn.commit()
				#解个状态锁 到初始状态
				driver.close()
				# file_object = open(logpath+"/bigdatatyc.log", 'a')
				# file_object.write(e)
				# file_object.close()
		except Exception as e:
			print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),"data not found...",e,name)
			driver.close()
			# file_object = open(logpath+"/bigdatatyc.log", 'a')
			# file_object.write(e)
			# file_object.close()
			#解个状态锁 到未找到的状态
			sqlname = "update enterpriselist set status=4 where ename='%s'" % searchname
			cur.execute(sqlname)
			conn.commit()
			#解个状态锁 到未找到的状态
	except Exception as e:
		print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), "Unknown Error", e, name)
		# 解个状态锁 到初始状态
		sqlname = "update enterpriselist set status=1 where ename='%s'" % searchname
		cur.execute(sqlname)
		conn.commit()
		# 解个状态锁 到初始状态
		# driver.close()
		# driver.quit()
		# # file_object = open(logpath+"/bigdatatyc.log", 'a')
		# # file_object.write(e)
		# # file_object.close()
		# return get_enterprise_data(name)
	# handles = driver.window_handles # 获取当前窗口句柄集合（列表类型）
	# print(handles)
	# exit()
	# if len(handles)>1:
	# 	for windowsid in handles[1:]:
	# 		driver.close(windowsid)
	# driver.switch_to_window(handles[-1])
	driver.quit()
	return get_enterprise_data(name)
#获取详请页面内容 处理
def dealData(name):
	phantomjspath = "D:/phantomjs%s/bin/phantomjs" % str(int(name)+4)
	# driver = webdriver.Firefox()
	driver = webdriver.PhantomJS(executable_path=phantomjspath)
	# # 利用DesiredCapabilities(代理设置)参数值，重新打开一个sessionId，我看意思就相当于浏览器清空缓存后，加上代理重新访问一次url
	# proxy = webdriver.Proxy()
	# proxy.proxy_type = ProxyType.MANUAL
	# # proxy.http_proxy = random.choice(getrandomip(driver))
	# proxy.http_proxy = "58.221.75.8:8888"
	# # 将代理设置添加到webdriver.DesiredCapabilities.PHANTOMJS中
	# proxy.add_to_capabilities(webdriver.DesiredCapabilities.FIREFOX)
	# driver.start_session(webdriver.DesiredCapabilities.FIREFOX)
	driver.implicitly_wait(60)

	driver.get("http://www.tianyancha.com")
	
	time.sleep(5)
	get_enterprise_data(name,driver)
get_enterprise_data(1)
exit();
def long_time_task(name,):
	print('Run task %s (%s)...' % (name, os.getpid()))
	start = time.time()
	time.sleep(random.random() * 3)
	end = time.time()
	print('Task %s runs %0.2f seconds.' % (name, (end - start)))
	get_enterprise_data(name)
	driver.close()
	driver.quit()

if __name__=='__main__':
	print('Parent process %s.' % os.getpid())
	p = Pool(4)
	for i in range(4):
		p.apply_async(long_time_task, args=(i,))
	print('Waiting for all subprocesses done...')
	p.close()
	p.join()
	print('All subprocesses done.')