#! /usr/bin/env python
# -*- coding: utf-8 -*-

# Copyright (c) Huoty, All rights reserved
# Author: Huoty <sudohuoty@gmail.com>
# CreateTime: 2018-01-20 20:16:49

import logging
import sqlite3
import contextlib
from collections import namedtuple

import requests
import pandas as pd
from requests import exceptions

from .utils import Throttle


create_table_sql = """
create table if not exists proxy
(
    id        int        not null  primary key,
    addr      char(256)  not null,
    type      char(12)   not null,
    location  char(256)  null
)
"""

Proxy = namedtuple('Proxy', ['addr', 'type', 'location'])


class ProxyPool(object):
    """生成代理池"""

    def __init__(self, db_path):
        self.db_path = db_path
        self.db_conn = sqlite3.connect(self.db_path)

        self.log = logging.getLogger("Tronitor.ProxyPool")

        self._session = requests.Session()
        self._session.headers.update({
            "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:49.0) "
                          "Gecko/20100101 Firefox/49.0",
        })

        self._init_database()

        self._throttle = Throttle(12)

    def _init_database(self):
        self.db_conn.execute(create_table_sql)
        self.db_conn.commit()

    def _request_page(self, url):
        self._throttle(url)
        with contextlib.closing(self._session.get(url, timeout=60)) as req:
            req.raise_for_status()
            page = req.content.decode('utf-8')
        return page

    def _fetch_from_kuaidaili(self):
        url_format = "https://www.kuaidaili.com/free/inha/{}/".format
        for count in range(1, 4):
            page = self._request_page(url_format(count))
            df = pd.read_html(page)[0]
            df = df.iloc[:, [0, 1, 3, 4]].copy()
            df.columns = ["ip", "port", "type", "location"]
            df["addr"] = df[["ip", "port"]].apply(lambda x: "{}:{}".format(*x), axis=1)
            df["location"] = df.location.apply(lambda x: "-".join(x.split()))
            df = df[["addr", "type", "location"]]
            df.to_sql("proxy", self.db_conn, index=False, if_exists="replace")

    def _fetch_from_xicidaili(self):
        url_format = "http://www.xicidaili.com/nn/{}".format
        for count in range(1, 3):
            page = self._request_page(url_format(count))
            df = pd.read_html(page)[0]
            df = df.iloc[1:, [1, 2, 3, 5]].copy()
            df.columns = ["ip", "port", "location", "type"]
            df["addr"] = df[["ip", "port"]].apply(lambda x: "{}:{}".format(*x), axis=1)
            df = df[["addr", "type", "location"]]
            df.to_sql("proxy", self.db_conn, index=False, if_exists="replace")

    def _fetch_from_goubanjia(self):
        url_format = "http://www.goubanjia.com/free/gngn/index{}.shtml".format
        for count in range(1, 4):
            page = self._request_page(url_format(count))
            df = pd.read_html(page)[0]
            df = df.iloc[:, [0, 2, 3]]
            df.columns = ["addr", "type", "location"]
            df["location"] = df.location.apply(lambda x: "-".join(x.split()))
            df.to_sql("proxy", self.db_conn, index=False, if_exists="replace")

    def gen(self):
        self._fetch_from_kuaidaili()
        self._fetch_from_xicidaili()

    def get(self):
        sql = "SELECT addr, type, location FROM proxy ORDER BY RANDOM() LIMIT 1"
        proxy = self.db_conn.execute(sql).fetchone()
        return Proxy(*proxy) if proxy else None

    def delete(self, addr):
        self.log.info("Deleting proxy %s", addr)
        self.db_conn.execute("delete from proxy where addr='{}'".format(addr))
        self.db_conn.commit()

    def check(self, check_url=None):
        check_url = check_url or "http://www.baidu.com"
        for row in self.db_conn.execute("select addr, type, location from proxy"):
            self.log.info("Checking %s, %s, %s", *row)
            proxy_addr, proxy_type = row[:2]
            try:
                proxies = {
                    "http": "http://" + proxy_addr,
                    "https": "https://" + proxy_addr,
                }
                requests.get(check_url, proxies=proxies, timeout=3)
            except (exceptions.Timeout, exceptions.ProxyError) as e:
                self.log.debug(e)
                self.delete(proxy_addr)
            except Exception as e:
                self.log.warning(e)

    def get_all(self):
        sql = "select addr, type, location from proxy"
        return pd.read_sql_query(sql, self.db_conn)
