# -*- coding:utf-8 -*-
import time
import random
import sqlite3
import requests
from bs4 import BeautifulSoup
from list_UserAgents import UserAgent
"""
获取proxy--存储到数据库（已对proxy是否可用进行了测试）
可单独用，也可调用
"""


headers = {
	"User-Agent": random.choice(UserAgent)
}


class SaveProxies(object):
	def __init__(self):
		self.conn = sqlite3.connect('proxies.db')
		self.cursor = self.conn.cursor()

		# 删除数据表`tb_proxies`（若存在）
		sql_drop = 'drop table if exists tb_proxies'
		self.cursor.execute(sql_drop)
		print '已删除数据表`tb_proxies`（若存在）'

		# 重建数据表`tb_proxies`
		sql_create = 'create table if not exists tb_proxies (id integer primary key autoincrement, proxy varchar(55) NULL)'
		self.cursor.execute(sql_create)
		print '已重建数据表`tb_proxies`'

	def close(self):
		self.conn.commit()
		self.cursor.close()
		self.conn.close()

	def insert_sql(self, proxy):
		sql_insert = 'insert into tb_proxies (proxy) values ("%s")' % proxy
		self.cursor.execute(sql_insert)
		# sql_insert = 'insert into tb_proxies (proxy) values ("?")'
		# self.cursor.execute(sql_insert, proxy)

	def save_proxies(self, url):
		"""
		解析获取数据proxy
		再测试是否可用
		最后存储至数据库
		"""
		doc_data = requests.get(url, headers=headers)
		soup = BeautifulSoup(doc_data.text, 'lxml')
		proxies_data = soup.select('#ip_list tr')

		for proxy_data in proxies_data:
			try:
				ip = proxy_data.select('td')[1].get_text()
				port = proxy_data.select('td')[2].get_text()
				proxy = ':'.join((ip, port))
				# print proxy
				try:
					response = requests.get('https://www.baidu.com/', proxies={'http': 'http://' + proxy})
					if response.status_code == 200:
						print '正在将proxy`{0}`存储至数据库...'.format(proxy)
						SaveProxies.insert_sql(self, proxy)
					else:
						pass
				except:
					pass
			except:
				pass

		# SaveProxies.close(self)


if __name__ == '__main__':
	"""测试调试"""
	run = SaveProxies()
	pg = 1
	urls = ['http://www.xicidaili.com/nn/{}'.format(page) for page in range(1, pg+1)]
	for url in urls:
		run.save_proxies(url)
		time.sleep(random.randint(5, 7))
	run.close()
