# -*- coding: utf-8 -*-
# @Time    : 2019/3/25 14:25
# @Author  : zjj
# @Email   : 1933860854@qq.com
# @File    : baidu_ads_spider.py
# @Software: PyCharm
import scrapy
import json
from iQiYiSpider.items import baiDuAppAdsItem
import urllib
import random
import logging
from urllib.parse import quote, unquote
# scrapy crawl baidu_ads_spider
from iQiYiSpider.db_method import db_query
class baidu_ads_spider(scrapy.Spider):
    '''
        oppo手机-> 百度app广告爬取
    '''
    name = 'baidu_ads_spider'
    allowed_domains = ['mbd.baidu.com']
    start_urls = ['https://mbd.baidu.com/searchbox?']
    def start_requests(self):
        while True:
            paramter_list = self.getParamter()
            for spider_url in self.start_urls:
                yield scrapy.Request(spider_url + random.choice(paramter_list), callback=self.parse, dont_filter=False)

    def parse(self, response):
        try:
            html = response.text
            result_json = json.loads(html)
            result_list = result_json['data']['100']['itemlist']['items']
            for result in result_list:
                itme = baiDuAppAdsItem()
                data = result['data']
                if "ad" == data['mode'] and "广告" == data['sub_tag']:
                    try:
                        itme['feed_floor_type'] = None
                        itme['tag_has_border'] = None
                        itme['tag_color'] = None
                        itme['tag_skin_color'] = None
                        itme['tag_border_color'] = None
                        itme['tag_skin_border_color'] = None
                        itme['prefetch_image'] = None
                        itme['tag'] = None
                        itme['tag_text_size'] = None
                        itme['title'] = None
                        itme['source'] = None
                        itme['duration'] = None
                        itme['image'] = None
                        itme['type'] = None
                        itme['comment_num'] = None
                        itme['extra_data'] = None
                        itme['videoInfo'] = None
                        itme['video'] = None
                        itme['prefetch_video'] = None
                        itme['auto_play_switch_ad'] = None
                        itme['cmd'] = None
                        itme['lp_cmd'] = None
                        itme['extra_info'] = None
                        itme['ad_append_info'] = None
                        itme['title_tts'] = None
                        itme['ad_id'] = str(result['id'])
                        itme['mode'] = str(data['mode'])
                        itme['sub_tag'] = str(data['sub_tag'])
                        itme['expire'] = str(data['expire'])
                        itme['ad_url'] = None
                        if 'feed_floor_type' in data:
                            itme['feed_floor_type'] = str(data['feed_floor_type'])
                        if 'tag_has_border' in data:
                            itme['tag_has_border'] = str(data['tag_has_border'])
                        if 'tag_color' in data:
                            itme['tag_color'] = str(data['tag_color'])
                        if 'tag_skin_color' in data:
                            itme['tag_skin_color'] = str(data['tag_skin_color'])
                        if 'tag_border_color' in data:
                            itme['tag_border_color'] = str(data['tag_border_color'])
                        if 'tag_skin_border_color' in data:
                            itme['tag_skin_border_color'] = str(data['tag_skin_border_color'])
                        if 'prefetch_image' in data:
                            itme['prefetch_image'] = str(data['prefetch_image'])
                        if 'tag' in data:
                            itme['tag'] = str(data['tag'])
                        if 'tag_text_size' in data:
                            itme['tag_text_size'] = str(data['tag_text_size'])
                        if 'title' in data:
                            itme['title'] = str(data['title'])
                        if 'source' in data:
                            itme['source'] = str(data['source'])
                        if 'duration' in data:
                            itme['duration'] = str(data['duration'])
                        if 'image' in data:
                            itme['image'] = [data['image']]
                        if 'type' in data:
                            itme['type'] = str(data['type'])
                        if 'comment_num' in data:
                            itme['comment_num'] = str(data['comment_num'])
                        if 'lp_real_url' in data:
                            itme['ad_url'] = urllib.parse.unquote(data['lp_real_url'])
                        if 'extra_data' in data:
                            if itme['ad_url'] is None:
                                extra_json = data['extra_data']
                                if 'ad_monitor_url' in extra_json:
                                    ad_url_list = extra_json['ad_monitor_url']
                                    for url in ad_url_list:
                                        click_url = url['click_url']
                                        show_url = url['show_url']
                                        if click_url:
                                            itme['ad_url'] = click_url
                                        elif show_url:
                                            itme['ad_url'] = show_url
                            itme['extra_data'] = str(data['extra_data'])
                        if 'videoInfo' in data:
                            itme['videoInfo'] = str(data['videoInfo'])
                        if 'video' in data:
                            itme['video'] = str(data['video'])
                        if 'prefetch_video' in data:
                            itme['prefetch_video'] = str(data['prefetch_video'])
                        if 'auto_play_switch_ad' in data:
                            itme['auto_play_switch_ad'] = str(data['auto_play_switch_ad'])
                        if 'cmd' in data:
                            itme['cmd'] = str(data['cmd'])
                        if 'lp_cmd' in data:
                            itme['lp_cmd'] = str(data['lp_cmd'])
                        if 'extra_info' in data:
                            itme['extra_info'] = str(data['extra_info'])
                        if 'ad_append_info' in data:
                            itme['ad_append_info'] = str(data['ad_append_info'])
                        if 'title_tts' in data:
                            itme['title_tts'] = str(data['title_tts'])
                        if itme['image'] is None:
                            if 'items' in data:
                                items = data['items']
                                image_list = []
                                for image in items:
                                    image_list.append(image['image'])
                                itme['image'] = image_list
                        yield itme
                    except Exception as e:
                        logging.info(e)
                        continue
                if "video" == data['mode']:
                    video_title = data['videoInfo']['title']
                    video_id = data['videoInfo']['vid']

                    url = "https://mbd.baidu.com/searchbox?action=feed&cmd=185&service=bdbox&uid=0i2m8gax2u_28vuijuSZulOqBageu2iBlavAu0iTSu8Zu28t_u208_uq2igwa2fWA&from=1001703e&ua=_avLC_aE-i4qywoUfpw1z4uS2N_-C2I4_uL5ixLqA&ut=r93jOqiWv8fDN2IJgI2wi4tl1R85A&osname=baiduboxapp&osbranch=a0&pkgname=com.baidu.searchbox&network=1_0&cfrom=1001703e&ctv=2&cen=uid_ua_ut&typeid=0&sid=1012746_5-2054_5102-1012745_6-1012992_2-1013023_2-1013021_1-1813_4378-1013018_1-1562_3682-1819_4393-2082_5167-1012523_2-2085_5175-1831_4420-1578_5389-1012769_1-1012768_1-1012287_3-1012799_1-1840_4442-1074_2311-2102_5225-1012278_1-1010739_2-1013042_2-1012785_3-1013070_3-1012813_5-1863_4493-588_1201-1013081_2-1012820_1-2139_5340-1013075_5-1886_4558-2146_5358-2148_5363-1013099_4-1013096_2-1906_4626-1012859_6-1919_4716-1008524_1-1012360_4-1012872_1-1934_4778-1167_2514-1012638_5-1176_2541-1012630_2-1946_4804-1690_4807-1012910_1-1699_4002-1012902_2-1962_4834-1007550_23035-1012925_2-1007549_23033-1011642_2-1208_2626-1012663_1-1726_4064-1471_3436-1012447_4-1012956_6-2004_4951-1012953_2-1759_4149-1012456_2-2026_5031-1012731_2-2040_5066-1009652_2-1789_4286&zid=OwYLRZu9PHXUQOuwrV11TChqu-VKzhAEENOk6KxzaT_ru624sF-f8r4PJI15_REHajg8e91QqOID4GNMGCZptEZk6Xws2aj8W2YbkuHFPRq8&imgtype=webp&android_id=IaARijO6vfggyAiwN4Ejof-0B&refresh=1"
                    header = {
                        'Content-Type': 'application/raw',
                        'Content-Length': 387,
                        'Host': 'mbd.baidu.com',
                        'Connection': 'Keep-Alive',
                        'Accept-Encoding': 'gzip',
                        'Cookie': 'WISE_HIS_PM=1; fontsize=1.0; BAIDUID=A5B66CF3DE854D9D85EC47DF008E2E95:FG=1; delPer=0; PSINO=6; x-logic-no=5; BAIDUCUID=0i2m8gax2u_28vuijuSZulOqBageu2iBlavAu0iTSu8Zu28t_u208_uq2igwa2fWA; MBD_AT=1553493077; GID=G1ESM1R1TDDD6YSVB4YY8LDYMCL5EKTH78; H_WISE_SIDS=124612_100808_130510_130084_128067_129324_129637_125695_120158_130060_130177_107311_118894_118869_118843_118830_118799_130385_129565_129749_130156_130128_129945_117330_130350_130223_130258_130075_129009_129647_130690_128246_117432_128967_129621_130608_129396_128790_130321_129381_129901_130058_129482_129646_124030_129733_110086_129215_127969_123289_130051_129708_127417_130760; BAIDULOC=12622382_2630922_40_257_1554715870217',
                        'User-Agent': 'okhttp/3.11.0 Dalvik/2.1.0 (Linux; U; Android 5.1; OPPO A37t Build/LMY47I) baiduboxapp/11.6.1.10 (Baidu; P1 5.1)'
                    }
                    data = {"vid":"5593104109913493340","iad":"1","pd":"feed","title":"女友发诈骗短信，不料男友直接汇款，就等着女友打电话给男友！"}
                    data['vid'] = video_id
                    data['title'] = video_title
                    logging.info('二次爬取：' + str(data))
                    body = 'data='+(quote(str(data).replace(" ", "").replace("'", '"')))
                    yield scrapy.Request(url, method='POST', headers=header, body=body, encoding='utf-8', callback=self.parse_two)
            paramter_list = self.getParamter()
            yield scrapy.Request(self.start_urls[0] + random.choice(paramter_list), callback=self.parse, dont_filter=False)
        except Exception as e:
            logging.info(e)
            paramter_list = self.getParamter()
            yield scrapy.Request(self.start_urls[0] + random.choice(paramter_list), callback=self.parse, dont_filter=False)

    def getParamter(self):
        sql = "SELECT * FROM baidu_app_ads_paramter"
        result_list = db_query(sql)
        result_string = []
        for result in result_list:
            result.pop('id')
            result.pop('sid')
            string = ''
            for key in result:
                string += (key+"="+str(result[key])+"&")
            sid_str = ''
            for i in range(1, 10):
                sid_str += (str(random.randrange(1000, 10000))+"_"+str(random.randrange(1000, 10000)) + "-" + str(random.randrange(10000, 100000))+ "-")
            string += "sid=" + sid_str.rstrip('-')
            result_string.append(string.rstrip('&'))

        return result_string



    def parse_two(self, response):
        html = response.text
        result_json = json.loads(html)
        ad_json = result_json['data']['185']['adTpl']
        data = ad_json['data']

        if "广告" == data['sub_tag']:
            item = baiDuAppAdsItem()
            item['feed_floor_type'] = None
            item['tag_has_border'] = None
            item['tag_color'] = None
            item['tag_skin_color'] = None
            item['tag_border_color'] = None
            item['tag_skin_border_color'] = None
            item['prefetch_image'] = None
            item['tag'] = None
            item['tag_text_size'] = None
            item['title'] = None
            item['source'] = None
            item['duration'] = None
            item['image'] = None
            item['type'] = None
            item['comment_num'] = None
            item['extra_data'] = None
            item['videoInfo'] = None
            item['video'] = None
            item['prefetch_video'] = None
            item['auto_play_switch_ad'] = None
            item['cmd'] = None
            item['lp_cmd'] = None
            item['extra_info'] = None
            item['ad_append_info'] = None
            item['title_tts'] = None
            item['ad_url'] = None
            item['ad_id'] = ad_json['id']
            item['mode'] = '二次爬取'
            item['expire'] = None
            if 'title' in data:
                item['title'] = data['title']
            if 'source' in data:
                item['source'] = data['source']
            if 'image' in data:
                item['image'] = [data['image']]
            if item['image'] is None:
                if 'items' in data:
                    items = data['items']
                    image_list = []
                    for image in items:
                        image_list.append(image['image'])
                    item['image'] = image_list
            if 'cmd' in data:
                item['cmd'] = data['cmd']
                st = (unquote(data['cmd']))
                print(st)
            if 'duration' in data:
                item['duration'] = data['duration']
            if 'type' in data:
                item['type'] = data['type']
            if 'comment_num' in data:
                item['comment_num'] = data['comment_num']
            if 'feed_floor_type' in data:
                item['feed_floor_type'] = data['feed_floor_type']
            if 'extra_info' in data:
                item['extra_info'] = str(data['extra_info'])
            if 'lp_real_url' in data:
                item['ad_url'] = urllib.parse.unquote(data['lp_real_url'])
            if 'extra_data' in data:
                item['extra_data'] = str(data['extra_data'])
                if 'ad_monitor_url' in data['extra_data']:
                    if 'ad_url' in item:
                        if data['extra_data']['ad_monitor_url']:
                            ad_url_list = data['extra_data']['ad_monitor_url']
                            for url in ad_url_list:
                                click_url = url['click_url']
                                show_url = url['show_url']
                                if click_url:
                                    item['ad_url'] = click_url
                                elif show_url:
                                    item['ad_url'] = show_url
            if 'sub_tag' in data:
                item['sub_tag'] = data['sub_tag']
            if item['title'] is None:
                pass
            else:
                yield item
        else:
            paramter_list = self.getParamter()
            yield scrapy.Request(self.start_urls[0] + random.choice(paramter_list), callback=self.parse, dont_filter=False)

if __name__ == '__main__':
    from scrapy import cmdline
    cmdline.execute("scrapy crawl baidu_ads_spider".split())


