# encoding: utf-8

import time
import uuid
import datetime
import re
import json
from hashlib import sha1, sha256

import scrapy
from scrapy import Request, FormRequest

try:
    from App_Spd.items import AppSpdItem
    from App_Spd.utils import analysis, get_urlid, get_pubtime
except:
    from app_spider.items import AppSpdItem
    from app_spider.utils import analysis, get_urlid, get_pubtime

headers2 = {
    'X-SESSION-ID': '623bd6ab010ef65c3e4f22b7',
    'X-REQUEST-ID': '819dc5b6-6963-4852-a1fe-9a7854da3a3a',
    'X-TIMESTAMP': 1648088804000,
    'X-SIGNATURE': 'b3fe4aef30033ebff051f5f91626ee47b385d93ce17b83003ad161e21ddc314d',
    'X-TENANT-ID': '14',
    'User-Agent': '1.1.8;ffffffff-a77b-5d17-ffff-ffff98c49000;HUAWEI TAS-AN00;Android;7.1.2;360',
    'Cache-Control': 'no-cache',
    'Host': 'app.pjnews.cn',
    'Connection': 'Keep-Alive',
    'Accept-Encoding': 'gzip, deflate'
}


web_headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.74 Safari/537.36 Edg/99.0.1150.46',
}


def singature(str, str2, str3, str4, sign_salt, str5):
    str_format = "{}&&{}&&{}&&{}&&{}&&{}".format(str, str2, str3, str4, sign_salt, str5)
    s1 = sha256()
    s1.update(str_format.encode("utf-8"))
    return s1.hexdigest()


def gen_headers(_t):
    headers = {}
    signature_salt = "FR*r!isE5W"
    _uid = '819dc5b6-6963-4852-a1fe-9a7854da3a3a'
    headers['X-SESSION-ID'] = '623bd6ab010ef65c3e4f22b7'
    headers['X-REQUEST-ID'] = _uid
    headers['X-TIMESTAMP'] = _t
    headers['X-TENANT-ID'] = '14'
    headers['User-Agent'] = '1.1.8;ffffffff-a77b-5d17-ffff-ffff98c49000;HUAWEI TAS-AN00;Android;7.1.2;360'
    headers['Cache-Control'] = 'no-cache'
    headers['Host'] = 'app.pjnews.cn'
    headers['Connection'] = 'Keep-Alive'
    headers['Accept-Encoding'] = 'gzip, deflate'
    headers['X-SIGNATURE'] = singature("/api/article/channel_list", "623bd6ab010ef65c3e4f22b7", _uid, _t, signature_salt, "14")
    return headers


class ShihuapujiangAppSpider(scrapy.Spider):
    websiteId = '2141422574'
    pubSource = '诗画浦江'
    name = 'shihuapujiang_app'

    url = 'https://app.pjnews.cn/api/article/channel_list?channel_id={}&start={}'
    detail_url = 'https://vapp.tmuyun.com/webDetails/news?id={}&tenantId=14'

    channels = [
        '5cc02969b1985017d6fef804',  # 头条
        '5cc2ccbe1b011b18ee37591d',  # 时政
        '5d52be161b011b137b853d18',  # 奉安号
        '5d075f1e1b011b68176a8a00',  # 文明
        '5f103ebaad61a40f3c8cce88',  # 帮忙老哥
        '5cc2cc981b011b18ee37591c',  # 教育
        '5d64c2ea1b011b2a0fbba127',  # 行政执法
        '5cc2cca7b1985017d6fef816',  # 摄友天地
        '5cc2cc821b011b18ee37591b',  # 书画文乐
        '61b1aea9ad61a42065f901c9',  # 浦江有滋味
        '622b01cdfe3fc10794f6c747',  # 浦贤绘浦
    ]

    def start_requests(self):
        for chl in self.channels:
            # _t = 1648088804000
            _t = int(time.time()*1e3)
            yield Request(url=self.url.format(chl, _t), headers=gen_headers(_t))

    def parse(self, response, **kwargs):
        target_li = []
        resp = response.json()
        # print(resp)
        for name in ['focus_list', 'article_list']:
            li = resp['data'].get(name, [])
            target_li += li
        for dt in target_li:
            # print(dt)
            item = AppSpdItem()
            item['title'] = dt.get('doc_title', '')
            item['author'] = dt.get('author', '')
            item['url'] = dt.get('url', '')
            yield Request(url=item['url'], callback=self.parse_detail, meta={'item': item}, headers=web_headers)

    def parse_detail(self, response):
        item = response.meta.get('item')
        content_ = response.xpath('//div[@class="newshare-content"]').getall()
        content_ = ''.join(content_)
        item['content'] = analysis(content_)
        item['pubtime'] = response.xpath('//div[@class="text-from"]/span[@class="text-time"]/text()').get()
        item['websiteId'] = 2141422574  # 提供app对应的id
        item['pubSource'] = "诗画浦江"  # 提供app对应名称
        item['urlid'] = get_urlid(item['url'])
        yield item
