#!/usr/bin/env python
# -*- coding: utf-8 -*-

import datetime
import requests
from bs4 import BeautifulSoup
from unitool.celery import app

from ipproxy.models import IpProxyModel


def resolving_html(html):
    """
    return 代理ip 相关信息列表 
    """
    soup = BeautifulSoup(html)
    data = []
    for tr in soup.tbody.children:
        try:
            ip, port, nimingdu, method, addr, speed, last_time = tr.text.strip('\n').split('\n') 
        except Exception:
            continue
        data.append({
            "ip": ip, "port": port, "nimingdu": nimingdu, 
            "method": "method", "addr": addr, "speed": speed,
            "last_time": last_time
        })
    return data

def check_url(data):
    url = "{}://icanhazip.com".format(data.get('method'))
    proxy = {data.get('method'): "{}://{}:{}".format(data.get('method'), data.get('ip'), data.get('port'))}
    res = requests.get(url, proxies=proxy, timeout=5)
    import ipdb; ipdb.set_trace()
    if res.status_code == 200 and res.text.strip('\n') == data.get('ip'):
        return True
    else:
        return False

@app.task
def get_proxy_ip():
    url = 'https://www.kuaidaili.com/free/inha/{}/'
    for page in range(100):
        res = requests.get(url.format(page)) 
        if res.status_code == 200:
            try:
                datas = resolving_html(res.content.decode(res.apparent_encoding))
            except Exception: 
                print(url)
            IpProxyModel().lpush(*datas)


@app.task
def check_proxy():
    for index in range(IpProxyModel().length()):
        data = IpProxyModel().rpop()
        if check_url(data):
            data.update(last_time=datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S'))
            IpProxyModel().lpushx(data)
