from collections import deque
from urllib.request import *

import mysql.connector
from lxml import etree


def insert_data_into_mysql(*data):
	conn = mysql.connector.connect(host='127.0.0.1', port='3306', database='spider', user='root', password='123',
								   use_unicode=True)
	c = conn.cursor()
	c.executemany('INSERT INTO fenghuang_new (`title`, `url`, `time`) VALUES (%s, %s, %s)', data)
	conn.commit()
	c.close()
	conn.close()


def new_page_info(url, page_data):
	dom = etree.HTML(page_data)
	new_urls = dom.xpath('//div[@class="newsList"]//li/a/@href')
	new_texts = dom.xpath('//div[@class="newsList"]//li/a/text()')
	new_times = dom.xpath('//div[@class="newsList"]//li/h4/text()')
	result_array = []
	for i in range(len(new_urls)):
		result_array.append([new_texts[i], new_urls[i], new_times[i]])
	next_page = dom.xpath('//div[@class="m_page"]//a')
	for e in next_page:
		if None is not e.text and '下一页' in e.text:
			url_queue.append([e.attrib['href'], url])
	insert_data_into_mysql(*result_array)


def get_web_content(url, refer):
	user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36'
	headers = {'User-Agent': user_agent, 'Connection': 'keep-alive', 'Referer': refer}
	request = Request(url, headers=headers)
	print('request: ', url)
	# print('start request time -->', datetime.datetime.now())
	with urlopen(request) as result:
		data = result.read()
		# print('end request time -->', datetime.datetime.now())
		new_page_info(url, data)


url_queue = deque()
for i in range(3):
	data_time = 20190601 + i
	url = 'http://news.ifeng.com/listpage/11502/{date}/1/rtlist.shtml'
	# url,refer
	original_urls = [url.format(date=str(data_time)),
					 url.format(date=str(data_time))]
	url_queue.append(original_urls)
while len(url_queue) > 0:
	urls = url_queue.popleft()
	get_web_content(*urls)
