# encoding:utf-8

import sys
from bs4 import BeautifulSoup
import requests
import lxml
import Agent
import DataEntity
import json
import time
import LogHelper
from concurrent.futures import ThreadPoolExecutor 
from apscheduler.schedulers.blocking import BlockingScheduler

nowlist=[]
daylist=[]
weeklist=[]

nowhotlist=[]
dayhotlist=[]
weekhotlist=[]

nowlinklist=[]
daylinklist=[]
weeklinklist=[]

nowtemplist=[]
daytemplist=[]
weektemplist=[]

logger=None

def geturl(url,listName):
	global nowlist,daylist,weeklist,logger
	headers={"User-Agent":Agent.get_user_agent_pc()}
	response=requests.get(url=url,headers=headers)
	html=response.content
	html_doc=str(html,response.apparent_encoding) 
	soup = BeautifulSoup(html_doc,'lxml')
	list = soup.select('.keyword')
	gethotValue(soup,listName)
	getnewslink(soup,listName)

	if listName==0:
		nowlist=[]
	elif listName==1:
		daylist=[]
	else :
		weeklist=[]
	
	for item in list:
		logger.logger.info("爬取一条新闻===》")
		for child in item.contents:
			if child =='\n' or child ==' ':
				continue
			else:
				if child.text=='search' or child.text==''	:
					continue
				else:
					#print("=======>"+str(index)+"."+child.text)
					if listName==0:
						nowlist.append(child.text)
					elif listName==1:
						daylist.append(child.text)
					else :
						weeklist.append(child.text)
					

#查询热点数据
def gethotValue(soup,listName):
	global nowhotlist,dayhotlist,weekhotlist
	list = soup.select('span[class^="icon-"]')
	if listName==0:
		nowhotlist=[]
	elif listName==1:
		dayhotlist=[]
	else :
		weekhotlist=[]
	for item in list:
		status=str(item.attrs.get('class'))
		state="平"
		if status=='[\'icon-fall\']':
			state="下降"
		elif status=='[\'icon-rise\']':
			state="上升"
		else:
			state="平"
		if listName==0:
			nowhotlist.append(item.text+","+state)
		elif listName==1:
			dayhotlist.append(item.text+","+state)
		else :
			weekhotlist.append(item.text+","+state)

def getnewslink(soup,listName):
	global nowlinklist,daylinklist,weeklinklist,nowtemplist,daytemplist,weektemplist
	
	mpool = ThreadPoolExecutor(5)

	list = soup.select('.tc a')
	if listName==0:
		nowlinklist=[]
		nowtemplist=[]
	elif listName==1:
		daylinklist=[]
		daytemplist=[]
	else :
		weeklinklist=[]
		weektemplist=[]

	index =1;
	
	for item in list:
		if item.text=="新闻":
			href = item.get('href')
			mpool.submit(getNewsImgAndDesc,href,listName,index)
			index+=1
	mpool.shutdown(wait=True)

	
	if listName==0:
		templist=sorted(nowtemplist, key=lambda s: s[0])
		nowlinklist.extend(templist)
	elif listName==1:
		templist=sorted(daytemplist, key=lambda s: s[0])
		daylinklist.extend(templist)
	else :
		templist=sorted(weektemplist, key=lambda s: s[0])
		weeklinklist.extend(templist)

#获取新闻图片和描述
def getNewsImgAndDesc(url,listName,index):
	global nowtemplist,daytemplist,weektemplist
	try:
		headers={"User-Agent":Agent.get_user_agent_pc()}
		response=requests.get(url=url,headers=headers)
		html=response.content
		html_doc=str(html,response.apparent_encoding) 
		soup = BeautifulSoup(html_doc,'lxml')
		descSpan=soup.find('span', attrs={'class': 'c-font-normal c-color-text'})
		descText = descSpan.text
		imgDiv = soup.find('div', attrs={'class': 'c-img c-img3 c-img-radius-large'})
		imgSrc=None
		if len(imgDiv.contents) > 0:
			imgSrc = imgDiv.contents[1].attrs.get('src')
	
		data = [index,url,imgSrc,descText]
		if listName==0:
			nowtemplist.append(data)
		elif listName==1:
			daytemplist.append(data)
		else:
			weektemplist.append(data)
	except Exception as e:
		logger.logger.error('result Error : '+ str(e))
		data = [index,url,None,None]
		if listName==0:
			nowtemplist.append(data)
		elif listName==1:
			daytemplist.append(data)
		else:
			weektemplist.append(data)

#订阅新闻			
def subNews():
	global logger
	
	t0 = time.time()
	logger.logger.info("开始订阅实时新闻===》")
	geturl("http://top.baidu.com/buzz?b=1&fr=topindex",0)
	logger.logger.info("开始订阅今日新闻===》")
	geturl("http://top.baidu.com/buzz?b=341&c=513&fr=topbuzz_b1",1)
	logger.logger.info("开始订阅7日新闻===》")
	geturl("http://top.baidu.com/buzz?b=42&c=513&fr=topbuzz_b341_c513",2)
	#pool.submit(geturl,"http://top.baidu.com/buzz?b=1&fr=topindex",0)
	#pool.submit(geturl,"http://top.baidu.com/buzz?b=341&c=513&fr=topbuzz_b1",1)
	#pool.submit(geturl,"http://top.baidu.com/buzz?b=42&c=513&fr=topbuzz_b341_c513",2)

	#pool.shutdown(wait=True)
	dlist=[]
	#print("=======实时热点=======")
	mindex = 1
	for i in range(len(nowlist)):
		arr=nowhotlist[i].split(',')
		news = DataEntity.DataEntity(mindex,
									0,
									nowlist[i],
									nowlinklist[i][3],
								    nowlinklist[i][1],
									arr[0],
									arr[1],
									nowlinklist[i][2])

		dlist.append(news.__dict__)
		mindex+=1
	#print("=======今日热点=======")
	for i in range(len(daylist)):
		arr=dayhotlist[i].split(',')
		news = DataEntity.DataEntity(mindex,
									1,
									daylist[i],
									daylinklist[i][3],
								    daylinklist[i][1],
									arr[0],
									arr[1],
									daylinklist[i][2])
		dlist.append(news.__dict__)
		mindex+=1
	#print("=======七日热点=======")
	for i in range(len(weeklist)):
		arr=weekhotlist[i].split(',')
		news = DataEntity.DataEntity(mindex,
									2,
									weeklist[i],
									weeklinklist[i][3],
								    weeklinklist[i][1],
									arr[0],
									arr[1],
									weeklinklist[i][2])
		dlist.append(news.__dict__)
		mindex+=1

	print("list==>"+str(len(dlist)))
	try:
		res=json.dumps(dlist,ensure_ascii=False)
		resStr=	str(res)
		print(resStr)

		filename="news.json"
		with open(filename, 'w',encoding='utf-8') as file_object:
			file_object.write(resStr)
			logger.logger.info("保存完成===》")
	except Exception as e:
		logger.logger.error('result Error : '+ str(e))

	logger.logger.info("本次耗时计算===》")
	logger.logger.info(time.time() - t0)

def main():
	global logger
	logger = LogHelper.LogHelper("Reptile.log")

	subNews()
	try:
		scheduler = BlockingScheduler()
		scheduler.add_job(subNews, 'interval', minutes=10)
		scheduler.start()
	except Exception as e:
		logger.logger.error('main Error : ' + str(e))


if __name__ == "__main__":
    main()



