import configparser
import random
import datetime
import time
import os
import requests
import re
import json
import sqlite3
from lxml import etree
from multiprocessing import Process, Pool,Manager, JoinableQueue, Queue

connect = sqlite3.connect(database='proxies.db')


def compound(queue, ):
    # 验证获取到的代理的正确性 普通还是高匿 是否可用
    print('从数据库中获取可用的代理进行检查')
    cursor = connect.cursor()
    cursor.execute('select * from available;')
    for proxy in cursor.fetchall():
        pro_dict = {'protocol': proxy[0], 'ip': proxy[1], 'port': proxy[2]}
        queue.put(pro_dict)
        print('from db ', end='\t')
        print(pro_dict)
    cursor.execute('delete from available;')
    connect.commit()
    cursor.close()


def verify(queue):
    cursor = connect.cursor()
    sql = 'insert into available (protocol,ip,port,hidden)  values (?,?,?,?);'
    while True:
        proxy = queue.get()
        try:
            res = requests.get("http://httpbin.org/ip", timeout=10,
                               proxies={proxy['protocol']: proxy['ip'] + ":" + proxy['port']})
        except Exception as e:
            continue
        encoding = 'utf-8' if res.apparent_encoding is None else res.apparent_encoding
        html = res.content.decode(encoding)
        tmp = [proxy['protocol'], proxy['ip'], proxy['port']]
        if re.search(proxy['ip'], html):
            tmp.append(1)
        else:
            tmp.append(0)
        # (protocol, ip, port, update_time, err_count, hidden)
        print('verify {}'.format(tmp))
        cursor.execute(sql, tmp)
        connect.commit()
        queue.task_done()


def save(proxies):
    # 将本次获取到的代理放在数据库中
    cursor = connect.cursor()
    update = []
    add = []
    for proxy in proxies:
        sql = 'select * from all_proxies where protocol=? and ip=? and port=?;'
        # 需要将本次获取到的代理和数据库中的代理进行比较清洗
        cursor.execute(sql, (proxy['protocol'], proxy['ip'], proxy['port']))
        # 获取到零个 将该代理添加
        if cursor.rowcount == -1:
            add.append(proxy)
        # 获取到一个 将该代理的时间更新
        elif cursor.rowcount == 1:
            update.append(proxy)
    now = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
    for proxy in add:
        sql = 'insert into all_proxies values (?,?,?,?,?)'
        cursor.execute(sql, (proxy['protocol'], proxy['ip'], proxy['port'], proxy['website'], now))
    for proxy in update:
        sql = 'update all_proxies set get_time=?,website=? where protocol=? and ip=? and port=?'
        cursor.execute(sql, (now, proxy['website'], proxy['protocol'], proxy['ip'], proxy['port']))
    connect.commit()
    cursor.close()


def handle_url(website, headers, queue):
    proxies = []
    for url in website['urls']:
        header = {"User-Agent": random.choice(headers)}
        try:
            response = requests.get(url, headers=header, timeout=5)
        except Exception as e:
            print(url)
            continue
        html = etree.HTML(response.content.decode(response.encoding))
        ips = [_.replace(' ', '').replace('\r', '').replace('\n', '') for _ in html.xpath(website['ip'])]
        ports = [_.replace(' ', '').replace('\r', '').replace('\n', '') for _ in html.xpath(website['port'])]
        if website['protocol'] == 'http' or website['protocol'] == 'https':
            protocols = [website['protocol'] for _ in range(len(ips))]
        else:
            protocols = [_.replace(' ', '').replace('\r', '').replace('\n', '') for _ in
                         html.xpath(website['protocol'])]
        for i in range(len(ips)):
            # data5u protocol 可能会出现 http,https
            for protocol in protocols[i].lower().split(','):
                ip = ips[i]
                port = ports[i]
                if protocol == 'yes':
                    protocol = 'https'
                elif protocol == 'no':
                    protocol = 'http'
                # 将其放在需要验证的代理中
                print("{}://{}:{}".format(protocol, ip, port))
                proxy = {'protocol': protocol, 'ip': ip, 'port': port, 'website': website['website']}
                queue.put(proxy)
                proxies.append(proxy)
                print('handle {}'.format(proxy))
    save(proxies)


def init_conf():
    '''
    加载配置文件
    生成需要爬取的网站,获取方式
    加载headers
    :return:
    '''
    print('init config')
    websites = []
    headers = []
    conf = configparser.ConfigParser()
    conf.read('./proxies.ini')
    for section in conf.sections():
        # 初始化网站
        if section.startswith('website'):
            website = {'urls': [], 'website': section.split('-')[1]}
            for key, value in conf.items(section):
                if key.startswith('url-'):
                    url = value.split(',')
                    if len(url) == 1:
                        website['urls'].append(url[0])
                    elif len(url) == 3:
                        website['urls'].extend([url[0].format(_) for _ in range(int(url[1]), int(url[2]))])
                elif key == 'rule-protocol':
                    website['protocol'] = conf.get(section, key)
                elif key == 'rule-ip':
                    website['ip'] = conf.get(section, key)
                elif key == 'rule-port':
                    website['port'] = conf.get(section, key)
            websites.append(website)
        # 加载headers
        elif section == 'headers':
            for key, value in conf.items(section):
                headers.append(value)
    return websites, headers


def init_sqlite():
    # 初始化sqlite3数据库
    cursor = connect.cursor()
    # 检查表是否存在 存在则不创建
    # 表不存在 创建表
    # all protocol ip port website time
    all_sql = 'create table if not exists all_proxies (protocol text, ip text, port int, website text, get_time text);'
    # available protocol ip port get_time err_count                 hidden
    #           类型             更新时间 在使用过程中的错误次数    是否是高匿
    available = 'create table if not exists available (protocol text, ip text, port int, err_count int, hidden int);'
    cursor.execute(all_sql)
    cursor.execute(available)
    cursor.close()


if __name__ == '__main__':

    websites, headers = init_conf()
    init_sqlite()
    proxy_queue = Manager().Queue()
    pool = Pool(4)
    pool.apply_async(func=compound, args=(proxy_queue,))
    for website in websites:
        pool.apply_async(func=handle_url,args=((website, headers, proxy_queue)))

    for _ in range(os.cpu_count()):
        pool.apply_async(func=verify, args=(proxy_queue,))
    pool.close()
    pool.join()
