# -*- coding: utf-8 -*-
'''
    date:2017-05-28 hqk
    spider newadd rest class
'''
import gearman
import redis
import requests
import json
from urlparse import urljoin

from common.xms_json import MyJson
from dianp_access import DianpRequest, DianpAccess
from config.setting import RESTNEW, SPIDERSENSUS_REDIS, REST_LIST_KEY
from dianp_task import TaskStyle, TaskGearman, TaskProtocol, TaskModel
from dianp_census import DianpCensus
from dianp_parse import DianpParse
from dianp_oss import DianpOss
from dianp_log import DianpLog



class DianpNewaddTask(TaskModel, TaskGearman):
    def __init__(self):
        super(DianpNewaddTask, self).__init__() 
        self.dianpAccess = DianpAccess()
        #self.taskStyleO = taskStyle()
        #self.taskGermanO = new TaskGearman()
        self.dianpCensusO = DianpCensus()
        self.dianpLogO = DianpLog('_program.log')
        self.client_init()
        
    
    #获取搜索url
    def __get_req_url(self):
        self.searchUrlList = self.dianpAccess.build_search_url()
        
    def __parse_res(self, res):
        allData = []
        state, resJ = MyJson.jsonLoads(res)
        if state == False:
            return 
        results = resJ['results']
        print results
        '''
        for item in results:
            pData = TaskProtocol.parse_newadd_protocol(item)
            allData.append(pData)
        return allData
        '''
        return results
        
    #获取新增任务,下发任务
    def send_newadd_task(self):
        taskList = []
        for item in self.searchUrlList:
            taskData = TaskStyle.task_style(RESTNEW, item)
            taskList.append(taskData)
        
        res = self.client_multiple_task(taskList)
        return res
        
    
    #解析任务结果, 记录到当天的redis数据
    def parse_task_result(self, res):
        allDatas = TaskProtocol.parse_newadd_protocol(res)
        for allData in allDatas:
            searchUrl = allData['searchUrl']
            for restid in allData['restids']:
                logStr = "seachUrl :[" +searchUrl + "] find newadd restid :[" + restid  +  "]" 
            self.dianpLogO.Log(logStr)
        
        
    def main(self):
        self.__get_req_url()
        res = self.send_newadd_task()
        self.parse_task_result(res)
    
    
class DianpNewaddHandle(TaskGearman, TaskProtocol):
    def __init__(self):
        super(DianpNewaddHandle, self).__init__()
        self.worker_init()
        self.dianpRequestO = DianpRequest()
        self.dianpAcessO = DianpAccess()
        self.dianpParseO = DianpParse()
        self.dianpOssO = DianpOss()
        self.dianpCensusO = DianpCensus()
        
        self.pool = redis.ConnectionPool(host=SPIDERSENSUS_REDIS['host'], port=SPIDERSENSUS_REDIS['port'], db=SPIDERSENSUS_REDIS['db'])
        self.rds = redis.Redis(connection_pool=self.pool)
        
        
    
    def start(self):
        self.worker_register(RESTNEW, self.handle)
        self.worker_start()
    
    def get_all_page_restids(self, searchUrl):
        state, res = self.dianpRequestO.request_get(searchUrl)
        if state == False or res.status_code != 200:
            self.dianpCensusO.update_proxie_times()
            return []
        pageNum = DianpParse.get_max_pagenum(res.content)
        allUrl = self.dianpAcessO.build_all_search_page(searchUrl, pageNum, 1)
        allRestidList = []
        for url in allUrl:
            state, res = self.dianpRequestO.request_get(url)
            if state == False:
                continue
            restidList = DianpParse.get_rest_list(res.content)
            allRestidList = allRestidList + restidList
        return allRestidList
    
    def get_all_reviews(self, restid):
        reviewUrl = self.dianpAcessO.build_review_url(restid, 1)
        state, res = self.dianpRequestO.request_get(reviewUrl)
        if state == False or res.status_code != 200:
            self.dianpCensusO.update_proxie_times()
            return []
        pageNum = DianpParse.get_max_pagenum(res.content)
        allUrl = self.dianpAcessO.build_all_review_url(restid, pageNum, 1)
        allReviewList = []
        for url in allUrl:
            state, res = self.dianpRequestO.request_get(url)
            if state == False:
                continue
            reviewList = DianpParse.get_review(res.content)
            allReviewList = allReviewList + reviewList
        return allReviewList

    def download_all_big_pic(self, restid, tagList):
        for item in tagList:
            tag = item['tag']
            picIds = item['pids']
            if tag == u'全部照片':
                tag = u'其他'
            for item2 in picIds:
                picid = item2['id']
                url = item2['url']
                if url == '':
                    continue
                if 'https' in url:
                    import re
                    url = url.replace('https', 'http')
                    url = re.findall('(.*jpg)', url)[0]
                res = requests.get(url)
                if res.status_code == '200':
                    continue
                pathName = self.dianpOssO.bulid_oss_path(restid, tag, picid) 
                self.dianpOssO.upload(pathName, res.content)
            
        
        
    def handle(self, gearmanWorker, gearmanJob):
        searchUrl = json.loads(gearmanJob.data)
        print searchUrl
        #searchUrl = 'http://www.dianping.com/search/category/344/10/g101'
        allRestidList = self.get_all_page_restids(searchUrl)
        newaddList = []
        #allRestidList = ['79205747']
        for restid in allRestidList:
            if  not self.restExits(restid):
                #获取基本信息
                detailUrl = self.dianpAcessO.build_rest_detail(restid)
                state, res = self.dianpRequestO.request_get(detailUrl)
                if state == False or res.status_code != 200:
                    self.dianpCensusO.update_proxie_times()
                    continue
                flag = 3
                while DianpParse.check_page_404(res.content) == True and flag > 0:
                    state, res = self.dianpRequestO.request_get(detailUrl)
                    #flag = flag -1

                restInfo = DianpParse.get_restinfo(res.content)
                #获取点评标签
                userThink = DianpParse.get_user_think(res.content)
                #获取点评
                reviewList = DianpParse.get_review_mainpage(res.content)
                self.dianpCensusO.rest_newadd_review(restid, reviewList)
                
                #获取推荐菜
                recmmendParams = DianpParse.get_recommend_params(restInfo['restinfo'])
                recommenUrl = self.dianpAcessO.build_rest_recomend()
                state, res = self.dianpRequestO.request_get(recommenUrl, params=recmmendParams)
                if state == False or res.status_code != 200:
                    self.dianpCensusO.update_proxie_times()
                    continue
                
                recommendFood = DianpParse.get_recommend_food(res.content)
                for food in recommendFood:
                    foodUrl = self.dianpAcessO.build_food_tag_url(restid, food['name'])
                    state, res = self.dianpRequestO.request_get(foodUrl)
                    if res.status_code == 404:
                        continue
                    DianpParse.get_food_tag(res.content), '####'
                    food['foodLabel'] = DianpParse.get_food_tag(res.content)
                restInfo['recommendFood'] = recommendFood
                restInfo['userThink'] = userThink
                #获取榜单
                billboadParams = DianpParse.get_billboad_params(restInfo['restinfo'])
                billboadUrl = self.dianpAcessO.build_rest_billboad()
                state, res = self.dianpRequestO.request_get(billboadUrl, params=billboadParams)
                if state == False or res.status_code != 200:
                    continue
                
                billBoad = DianpParse.get_rest_billboad(res.content)
                restInfo['billBoad'] = billBoad
                #获取区域 
                districtUrl = self.dianpAcessO.build_district_url(restInfo['restinfo']['cityId'], restInfo['restinfo']['mainRegionId'])
                state, res = self.dianpRequestO.request_get(districtUrl)
                if state == False or res.status_code != 200:
                    continue
                restInfo['restinfo']['districtId'] = DianpParse.get_res_districtid(res.content)

                self.dianpCensusO.rest_newadd_info(restid, restInfo)
                #获取图片
                #先获取图片ID
                picUrl = self.dianpAcessO.build_rest_pic_url(restid)
                state, res = self.dianpRequestO.request_get(picUrl)
                if state == False or res.status_code != 200:
                    continue
                
                #获取tag
                tagList = DianpParse.get_tag_list(res.content)
                if len(tagList) == 0:
                    continue
                #获取图片地址
                self.get_all_tag_picid(tagList)
                tagPics = self.get_big_pic_url(tagList)
                self.download_all_big_pic(restid, tagPics)
                
                #reData = self.return_newadd_protocol(restid, restInfo)
                self.dianpCensusO.rest_newadd_pic(restid, tagPics)

                #close spider all review
                #reviewList = self.get_all_reviews(restid)
                #reviewList = DianpParse.get_review_mainpage(res.content)
                #self.dianpCensusO.rest_newadd_review(restid, reviewList)
                newaddList.append(restid)
        return json.dumps({'searchUrl':searchUrl, 'restids':newaddList})

    def get_all_tag_picid(self, tagList):
        for item in tagList:
            item['pids'] = []
            tagUrl = urljoin(self.dianpAcessO.DianpHost, item['url'])
            state, res = self.dianpRequestO.request_get(tagUrl)
            pageNum = DianpParse.get_max_pagenum(res.content)
            picPageList =  self.dianpAcessO.build_pic_url_tag(tagUrl, pageNum, 1)
            picIds = self.get_small_pic_url(picPageList)
            item['pids'] = picIds
        
        allPids = []
        for item in tagList:
            if u'全部图片' in  item['tag']:
                allPids = item['pids']
                break
        
        for item in tagList:
            if u'全部图片' in item['tag']:
                continue
            for id in item['pids']:
                if id in allPids:
                    allPids.remove(id)

    def get_small_pic_url(self, picPageList):
        picIds = []
        for picUrl in picPageList:
            state, res = self.dianpRequestO.request_get(picUrl)
            if state == False:
                 continue
            picId = DianpParse.get_restpic(res.content)
            picIds = picIds + picId
        return picIds
    
    def get_big_pic_url(self, tagLists):
        tagPics = []
        for item in tagLists:
            tag = {}
            tag['tag'] = item['tag']
            pids = item['pids']
            tag['pids'] = []
            for item in pids:
                id = item['id']
                picUrl = self.dianpAcessO.build_big_pic_url(id)
                state,res = self.dianpRequestO.request_get(picUrl)
                bigPicUrl = DianpParse.get_big_pic(res.content)
                tag['pids'].append({'id':id, 'url':bigPicUrl, 'intro':item['intro']})
            tagPics.append(tag)
        return tagPics
        
    def restExits(self, restid):
        if self.rds.sismember(REST_LIST_KEY, restid) == 0:
            return False
        else:
            return True

    def test_rest_pic_spider(self, restid):
        picUrl = self.dianpAcessO.build_rest_pic_url(restid)
        state, res = self.dianpRequestO.request_get(picUrl)
        if state == False or res.status_code != 200:
                return

        #获取tag
        tagList = DianpParse.get_tag_list(res.content)
        if len(tagList) == 0:
            return
        #获取图片地址
        self.get_all_tag_picid(tagList)
        tagPics = self.get_big_pic_url(tagList)
            

#ss = DianpNewaddHandle() 
#print ss.handle('','')
