#!/usr/bin/env python
# coding: utf-8

import PyV8
import requests
from pyquery import PyQuery
import re
import redis


class Proxy(object):
    eval_reg = r'eval.*\)'

    port_reg = r'\(.*?\)'

    proxies = {}

    proxy_list = []

    verify_address = ''

    country_code_list = ['US', 'HK', 'GB', 'SG', 'ID', 'TH', 'IN', 'CA']

    # country_code_list = ['US']

    proxy_list_base_url = 'http://spys.one/proxys/'

    redis_key = 'mdpi_proxy_pool'

    def __init__(self):
        self.redis = redis.Redis('localhost', db=6)

    def crawl(self):
        for country_code in self.country_code_list:
            url = self.proxy_list_base_url + country_code
            req = requests.get(url)
            if req.status_code != 200:
                continue
            content = req.content
            self.extract(content)
        self.proxy_list = [s[1] for s in sorted(self.proxies.iteritems(), key=lambda x: x[0])]
        self.persist()

    def extract(self, content):
        query_content = PyQuery(content)
        eval_pattern = re.compile(self.eval_reg)
        port_pattern = re.compile(self.port_reg)
        func = eval_pattern.findall(content)[0]
        ctx = PyV8.JSContext()
        ctx.enter()
        ctx.eval(func)
        trs = query_content(".spy1xx, .spy1x")
        for tr in trs[1:]:
            text = PyQuery(tr).text().split(' ')
            index = text[0]
            if not index.isdigit():
                continue
            ip = text[1]
            types = text[4]
            delay = text[6]
            ports = port_pattern.findall(text[3])
            for index, port in enumerate(ports):
                ports[index] = str(ctx.eval(port))
            port = ''.join(ports)
            self.proxies[delay] = types + "://" + ip + ':' + port

    def persist(self):
        for proxy in self.proxy_list:
            self.redis.sadd(self.redis_key, proxy)


if __name__ == '__main__':
    Proxy().crawl()
