# -*- coding: utf-8 -*-

import random
import urllib2, requests, urllib
import json
import redis
from config.setting import PROXIE_KEY, REST_LIST_KEY, SEARCHINFO_KEY, REST_LIST_YT_KEY
from config.setting import SEARCHINFO_REDIS, SPIDERSENSUS_REDIS, SLEEPTIME
import time



#agent = 'http://user:pass@127.0.0.1:3128/'

class DianpAccess():
    def __init__(self):
        pool = redis.ConnectionPool(host=SEARCHINFO_REDIS['host'], port=SEARCHINFO_REDIS['port'], db=SEARCHINFO_REDIS['db'])
        self.rds = redis.Redis(connection_pool=pool)
        self.DianpHost = 'http://www.dianping.com/'
        
        pass
		#self.search_url = []
        #self.cityInfoList = {}
        
    def __get_cityinfolist(self):
        cityInfoList = self.rds.get(SEARCHINFO_KEY)
        return json.loads(cityInfoList)
        
        
    def get_restId(self, start, end):
        restIds = []
        index = 0 
        for i in self.rds.sscan_iter(REST_LIST_YT_KEY):
            if index >= start and index < end:
                restIds.append(i)
            if index > end:
                break
            index = index + 1
        return restIds
        

    def build_district_url(self, cityid, mainRegionid):
        return 'http://www.dianping.com/search/category/' +cityid + '/10/r' + str(mainRegionid)
      
    #获取所有id列表
    def bulid_restdetail_url(self, start, end):
        restDetailUrlList = []
        restIds = self.__get_restId(start, end)
        for restid in restIds:
            url = 'http://www.dianping.com/shop/' + restid
            restDetailUrlList.append(url)
        return restDetailUrlLists
        
    '''
        client 通过seach_url 获取pagenum
        生成所有search_url 然后解析，查找新增店铺
    '''

    def build_food_tag_url(self, restid, tagName):
        return 'http://www.dianping.com/shop/' +restid + '/photos/tag-%E8%8F%9C-' + urllib.quote(tagName.encode('utf-8'))
    
    def build_search_url(self): 
        searchUrlList = []
        cityInfoList = self.__get_cityinfolist()
        #cityInfoList =  {'1':{'hotregionList':[], 'cusineList':[]}}
        for k,v in cityInfoList.items():
            if k != '139':
                continue
            city = k
            cusineList = v['cusineList']
            regionList = v['hotregionList']
            for cusine in cusineList:
                url = 'http://www.dianping.com/search/category/' + city + '/10/' + cusine 
                searchUrlList.append(url)
            for region in regionList:
                url = 'http://www.dianping.com/search/category/' + city + '/10/' + region
                searchUrlList.append(url)
                
        return  searchUrlList
        
       
    def build_search_allpage(self, pagenum, searchUrl):
        allPageSearchUrl = []
        for i in range(1, pagenum + 1):
            url = searchUrl + 'p' + str(i)
            allPageSearchUrl.append(url)
        
        
    def build_pic_url_tag(self, baseUrl, pagenum, start):
        picUrlList = []
        for page in range(start, pagenum + 1):
            url = baseUrl + '?pg=' + str(page)
            picUrlList.append(url)
        return picUrlList

    def build_rest_detail(self, restid):
        return 'http://www.dianping.com/shop/' + str(restid)

    def build_big_pic_url(self, picid):
        return 'http://www.dianping.com/photos/' + picid

    def build_rest_recomend(self):
        return 'http://www.dianping.com/ajax/json/shopDynamic/shopTabs?'

    def build_rest_billboad(self):
        return 'http://www.dianping.com/ajax/json/shopDynamic/shopAside?'
        
    def build_rest_pic_url(self, restid):
        return 'http://www.dianping.com/shop/' + restid + '/photos'

    def build_all_pic_page(self, restid, pagenum, start):
        allUrl = []
        for page in range(start, pagenum + 1):
            url = 'http://www.dianping.com/shop/' + restid + '/photos?pg=' + str(page)
            allUrl.append(url)
        return allUrl
        
    def build_all_search_page(self, searchUrl, pagenum, start):
        allUrl = []
        for page in range(start, pagenum + 1):
            url = searchUrl + 'p' + str(page)
            allUrl.append(url)
        return allUrl

    def build_review_url(self, restid, pagenum):
        return 'http://www.dianping.com/shop/' + restid + '/review_more_latest?pageno=' + str(pagenum)

    def build_all_review_url(self, restid, pagenum, start):
        allUrl = []
        for page in range(start, pagenum + 1):
            url = self.build_review_url(restid, page)
            allUrl.append(url)
        return allUrl
        

import Queue 
       
class DianpRequest():
    def __init__(self):
        self.resJson = None
        self.proxie_queue = Queue.Queue(maxsize = 100)
        self.ipport = ''
        
    def update_proxie(self):
        self.__get_proxie()
        
       
    def spider_proxie(self):
        url  = "http://api.ip.data5u.com/api/get.shtml?order=769874e15707a3cef606b0c863163403&num=100&area=%E4%B8%AD%E5%9B%BD&carrier=0&protocol=1&an1=1&an2=2&an3=3&sp1=1&sp2=2&sp3=3&sort=1&system=1&distinct=0&rettype=0&seprator=%0D%0A"
        #url = "http://api.ip.data5u.com/api/get.shtml?order=769874e15707a3cef606b0c863163403&num=100&area=%E4%B8%AD%E5%9B%BD&carrier=0&protocol=1&an1=1&an2=2&an3=3&sp1=1&sp2=2&sort=1&system=1&distinct=0&rettype=0&seprator=%0D%0A"
        res = requests.get(url) 
        self.resJson = json.loads(res.content)
        for item in self.resJson['data']:
            ipport = "http://" + item['ip'] + ":" + str(item['port'])
            self.proxie_queue.put(ipport)
     
    def __get_proxie(self):
        if self.proxie_queue.empty() == True:
            self.spider_proxie()
        self.ipport = self.proxie_queue.get() 
        
    
    def request_get(self, url, params=None, needProxie=True, headers=None):
        flagTime = 3
        if self.ipport == '':
            self.__get_proxie()
        state, re = self.__requst_get(url, params, needProxie=True, headers=headers)
        
        while (state == False or re.status_code not in (200,404)):
            print url, self.ipport
            self.__get_proxie()
            state, re = self.__requst_get(url, params, needProxie=True, headers=headers)
            #if state != False:
            #    print re.content
            flagTime = flagTime  - 1
        return state, re
            
        
    
    #params = dict 
    def __requst_get(self, url, params, needProxie, headers):
        if needProxie != False:
             proxies = {"http": self.ipport}
        else:
            proxies = None
        time.sleep(SLEEPTIME)
        
        agent = random.choice(user_agent_list)
        headers = {'Host': 'www.dianping.com','Connection': 'keep-alive','Cache-Control': 'max-age=0','Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8','Upgrade-Insecure-Requests': '1','User-Agent': agent,'Accept-Encoding': 'gzip, deflate, sdch','Accept-Language': 'zh-CN,zh;q=0.8'}
        
        try:
            res = requests.get(url, headers=headers, timeout=5, params=params, proxies=proxies)
            res.encoding = 'utf-8'
        except:
            return False, None
        return True, res
        
user_agent_list = [\
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1"\
        "Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",\
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",\
        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",\
        "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",\
        "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",\
        "Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",\
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",\
        "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",\
        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",\
        "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",\
        "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24"
       ]   
        
    
