# -*- coding: utf-8 -*-
# !/usr/bin/python
__author__ = 'bitfeng'

import requests
import logging
import json
import redis
import time
from scrapyluke.commonfun import renameFile
import random


def testKuaiDaiLi():
    url = 'http://dev.kuaidaili.com/api/getproxy/?orderid=935345150249512' \
          '&num=10&b_pcchrome=1&b_pcie=1&b_pcff=1&protocol=1&method=2&an_an=1' \
          '&an_ha=1&sp1=1&f_loc=1&f_an=1&f_sp=1&quality=1&sort=2&dedup=1&format=json&sep=1'
    res = requests.get(url=url)

    result = json.loads(res.content)

    try:
        proxy_list = result['data']['proxy_list']
    except Exception, e:
        proxy_list = {}
        print str(e)

    url = 'http://www.fenjianli.com/search/search.htm'
    header = {
        'Origin': 'http://www.fenjianli.com',
        'Accept-Language': 'zh-CN,zh;q=0.8,en;q=0.6',
        'Accept-Encoding': 'gzip, deflate',
        'X-Requested-With': 'XMLHttpRequest',
        'Host': 'www.fenjianli.com',
        'Accept': 'application/json, text/javascript, */*; q=0.01',
        'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36',
        # 'Connection': 'keep-alive',
        'Cookie': 'username=okm%40okm.com; password=5a690d842935c51f26f473e025c1b97a; JSESSIONID=A2A745DDAB697B2C2BD6D9C2FA014F62;',
        'Referer': 'http://www.fenjianli.com/',
        'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
    }
    if proxy_list:
        for i, proxy in enumerate(proxy_list):
            try:
                post_dict = {
                    'area': '440000',
                    'trade': '\xe9\x93\xb6\xe8\xa1\x8c',
                    'rows': '60',
                    'sortBy': '1',
                    'sortType': '1',
                    'offset': str(i*60+60),
                    '_random': '%.16f' % random.uniform(0, 1),
                }
                body ='areas=320000&trade=%E5%A4%9A%E5%85%83%E5%8C%96%E4%B8%9A%E5%8A%A1%E9%9B%86%E5%9B%A2%E5%85%AC%E5%8F%B8&rows=60&sortBy=1&sortType=1&offset=0&_random=0.3756914078257978'
                d = {'url': 'http://www.fenjianli.com/search/search.htm', 'header': {'Origin': 'http://www.fenjianli.com', 'Accept-Language': 'zh-CN,zh;q=0.8,en;q=0.6', 'Accept-Encoding': 'gzip, deflate', 'Accept': 'application/json, text/javascript, */*; q=0.01', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36', 'Host': 'www.fenjianli.com', 'X-Requested-With': 'XMLHttpRequest', 'Cookie': 'JSESSIONID=EEC40A978D09A2FF1B08BA0E1B609773', 'Referer': 'http://www.fenjianli.com/login/login.htm', 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'}, 'meta': {'area': '440000', 'trade': '\xe5\xae\xb6\xe5\x85\xb7/\xe5\xae\xb6\xe7\x94\xb5/\xe5\xb7\xa5\xe8\x89\xba\xe5\x93\x81/\xe7\x8e\xa9\xe5\x85\xb7/\xe7\x8f\xa0\xe5\xae\x9d', 'download_timeout': 10.0, 'depth': 2, 'proxy': 'http://121.69.45.162:8118', 'offset': 0, 'download_latency': 7.96635103225708, 'download_slot': 'www.fenjianli.com'}}
                # d['header']['Cookie'] = 'D388B6645085EFDD12170CB83144A153'
                # print d['header']
                response = requests.post(url=d['url'], data=body, headers=header,
                             proxies={'http': proxy.split(',')[0]}, timeout=5)
                # response = requests.post(url=url, headers=header, data=post_dict,
                #                          proxies={'http': proxy.split(',')[0]}, timeout=5)
                if response.status_code == 200:
                    # f = open(renameFile(proxy), 'wb')
                    # f.write(response.content)
                    # f.close()
                    print proxy, 'Success', response.url
                else:
                    print proxy, 'Fail'
            except Exception, e:
                print proxy, 'Timeout', str(e)


class KuaidailiProxy(object):

    def __init__(self, redis_uri={'host': 'localhost', 'port': '6379', 'db': 0, 'proxy': 'proxy'}):
        self.live = False
        self.num = 100
        self.interval_min = 5
        self.expiration = 20000
        self.redis_uri = redis_uri
        self.r = redis.StrictRedis(host=redis_uri['host'], port=redis_uri['port'], db=redis_uri['db'], password='Bitfeng,123')

    def init_url(self, num):
        url = 'http://dev.kuaidaili.com/api/getproxy/?orderid=935345150249512' \
              '&num=%d&b_pcchrome=1&b_pcie=1&b_pcff=1&protocol=1&method=2&an_an=1' \
              '&an_ha=1&sp1=1&f_loc=1&f_an=1&f_sp=1&quality=1&sort=2&format=json&sep=1' % num
        return url

    def get(self, num):
        url = self.init_url(num)
        while True:
            try:
                res = requests.get(url=url, timeout=10)
                print 'got something'
                if res.status_code == 200:
                    try:
                        result = json.loads(res.content)
                        proxy_info_list = result['data']['proxy_list']
                        # print 'proxy_list: ', proxy_list
                        return proxy_info_list
                    except Exception, e:
                        print '[Error result wrong] '+str(e)
                        time.sleep(self.interval_min)
                else:
                    print res.status_code
            except Exception, e:
                print '[ERROR]get proxy '+str(e)

    def start(self, intev, max_len):
        self.live = True
        while True:
            proxies = self.get(self.num)
            if proxies:
                if self.add_proxy(proxies):
                    self.ltrim_proxy(max_len)
                    time.sleep(intev)
                    if not self.live:
                        break
                else:
                    time.sleep(self.interval_min)

    def stop(self):
        self.live = False

    def ltrim_proxy(self, num):
        if self.r.llen('proxy') > num:
            self.r.ltrim('proxy', 0, num)

    def add_proxy(self, proxy_info_list):
        proxies = ('http://'+proxy_info.split(',')[0] for proxy_info in proxy_info_list[::-1])
        try:
            self.r.rpush('proxy', *proxies)
            print 'rpush proxy success'
            return True
        except Exception, e:
            print '[EROOR] lpush values'+str(e)
            return False

    def get_redis_uri(self):
        return self.redis_uri

    def score(self, proxy, area, anonymity, delay):
        if delay:
            try:
                return float(delay)
            except Exception, e:
                print str(e)

        return 1.5


if __name__ == '__main__':
    kdl = KuaidailiProxy()
    kdl.start(30, 500)
    # testKuaiDaiLi()
    # header = {
    #     'Origin': 'http://www.fenjianli.com',
    #     'Accept-Language': 'zh-CN,zh;q=0.8,en;q=0.6',
    #     'Accept-Encoding': 'gzip, deflate',
    #     'X-Requested-With': 'XMLHttpRequest',
    #     'Host': 'www.fenjianli.com',
    #     'Accept': 'application/json, text/javascript, */*; q=0.01',
    #     'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36',
    #     'Connection': 'keep-alive',
    #     'Cookie': 'username=okm%40okm.com; password=5a690d842935c51f26f473e025c1b97a; JSESSIONID=A2A745DDAB697B2C2BD6D9C2FA014F62; Hm_lvt_553e06d0322a2c6ff45a820e1b26c315=1453705465,1453779142; Hm_lpvt_553e06d0322a2c6ff45a820e1b26c315=1453779142',
    #     'Referer': 'http://www.fenjianli.com/',
    #     'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
    # }






