import scrapy
from scrapy.http import Request
import json
from appInfo.items import AppinfoItem
import requests
import time


class AppInfoSpider(scrapy.Spider):
    name = 'app_info_test'
    allowed_domains = ['qq.com']
    # start_urls = ['http://sj.qq.com/myapp/']
    ajax = "http://sj.qq.com/myapp/searchAjax.htm?kw="
    # 应用信息
    app_info_url = "http://sj.qq.com/myapp/detail.htm?apkName="
    # 评论信息
    comment_url = "http://sj.qq.com/myapp/app/comment.htm?apkName="  # com.tencent.token

    crawl_app_name = "E:\\PycharmPerProject\\appInfo\\app_name\\test_app_info.txt"

    # 重载usls
    def start_requests(self):
        with open(AppInfoSpider.crawl_app_name, 'rb') as f:
            for line in f:
                param = line.strip().decode('utf-8')
                # print(param)
                # print(AppInfoSpider.ajax + param)
                yield Request(AppInfoSpider.ajax + param, meta={'appName': param})

    def parse(self, response):
        # print(response.body.decode('utf-8'))
        js = json.loads(response.body.decode('utf-8'))
        s = js.get('obj').get('items')[0].get('pkgName')
        yield Request(AppInfoSpider.app_info_url + s, meta={'appName': response.meta['appName'], 'pkgName': s},
                      callback=self.get_app_info_parse_first)

    def get_app_info_parse_first(self, response):

        # 获取时间进行转换
        try:
            update_temp = response.xpath("//div[@class='det-othinfo-data']")[1].xpath(
                "./@data-apkpublishtime").extract()
            get_update = time.strftime("%Y-%m-%d", time.localtime(int(update_temp[0])))
        except IndexError as e:
            update_temp = '无'
            get_update = update_temp
        except Exception as e:
            get_update = update_temp
            print("时间转换异常！ %s", e)

        myitem = AppinfoItem()
        myitem['src_app_name'] = response.meta['appName']
        myitem['search_app_name'] = response.xpath("//div[@class='det-name-int']/text()").extract()
        myitem['search_pkg_name'] = response.meta['pkgName']
        myitem['user_score'] = response.xpath("//div[@class='com-blue-star-num']/text()").extract()
        myitem['downloads'] = response.xpath("//div[@class='det-ins-num']/text()").extract()
        myitem['version'] = response.xpath("//div[@class='det-othinfo-data']")[0].xpath('./text()').extract()
        myitem['developer'] = response.xpath("//div[@class='det-othinfo-data']")[2].xpath('./text()').extract()
        myitem['app_info'] = response.xpath("//div[@class='det-app-data-info']")[0].xpath('./text()').extract()
        myitem['app_classification'] = response.xpath("//a[@class='det-type-link']/text()").extract()
        myitem['update_time'] = get_update
        yield Request(AppInfoSpider.comment_url + response.meta['pkgName'], meta={'myitem': myitem},
                      callback=self.get_app_info_parse_second)

    def get_app_info_parse_second(self, response):
        try:
            comment_number_temp = json.loads(response.body).get('obj').get('total')
            response.meta['myitem']['comment_number'] = comment_number_temp
        except Exception as e:
            response.meta['myitem']['comment_number'] = '异常'

        yield response.meta['myitem']

        # save_item = AppinfoItem()
        # # app 源文件名
        # save_item['src_app_name'] = response.meta['appName']
        # # app 搜索的名称
        # save_item['search_app_name'] = response.xpath("//div[@class='det-name-int']/text()").extract()
        # # app 搜索包名
        # save_item['search_pkg_name'] = response.meta['pkgName']
        # # app 用户评分
        # save_item['user_score'] = response.xpath("//div[@class='com-blue-star-num']/text()").extract()
        # # app 下载次数
        # save_item['downloads'] = response.xpath("//div[@class='det-ins-num']/text()").extract()
        # # app 评论次数
        # save_item['comment_number'] = response.xpath("//id[@class='J_CommentCount']/text()").extract()
        # # app 版本号
        # save_item['version'] = response.xpath("//div[@class='det-othinfo-data']")[0].xpath('./text()').extract()
        # # app 更新时间
        # save_item['update_time'] = response.xpath("//div[@class='det-othinfo-data']")[1].xpath('./text()').extract()
        # # app 开发商
        # save_item['developer'] = response.xpath("//div[@class='det-othinfo-data']")[2].xpath('./text()').extract()
        # # app 应用信息
        # save_item['app_info'] = response.xpath("//div[@class='det-app-data-info']")[0].xpath('./text()').extract()
        # # app 分类
        # save_item['app_classification'] = response.xpath("//a[@class='det-type-link']/text()").extract()
