# -*- coding: utf-8 -*-
import scrapy
from urllib import parse
from jianshu.items import JianshuUserItem, JianShuArticle, JianShuArticleDetail

class JianshuContentSpider(scrapy.Spider):
    name = 'jianshu_content'
    allowed_domains = ['www.jianshu.com']

    user_article_list = 'https://www.jianshu.com/u/{user}?order_by=shared_at&page={page}'
    start_user = '898bb4ca481d'
    user_follow_users = 'https://www.jianshu.com/users/{user}/following?page={page}'

    article_detail = 'https://www.jianshu.com/p/{article_token}'

    def start_requests (self):
        yield scrapy.Request(self.user_article_list.format(user=self.start_user, page=1), self.parse_user_article_list)
        yield scrapy.Request(self.user_follow_users.format(user=self.start_user, page=1), self.parse_user_follees_list)
        pass

    def parse_user_article_list (self, response):
        list_selector = response.xpath('//ul[@class="note-list"]//li')
        hasMore = True
        for item in list_selector:
            title = item.xpath('.//a[@class="title"]/text()').extract_first()
            article_token = item.xpath('.//a[@class="title"]/@href').extract_first().split('/')[-1]
            user_token = response.url.split('/')[4].split('?')[0]
            expert = item.xpath('.//p[@class="abstract"]/text()').extract_first()
            time = item.xpath('.//div[@class="meta"]//span[@class="time"]/@data-shared-at').extract_first()
            small_image = item.xpath('.//img[@class="img-blur-done"]/@src').extract_first()

            article_item = JianShuArticle()
            article_item['article_token'] = article_token
            article_item['title'] = title
            article_item['user_token'] = user_token
            article_item['expert'] = expert
            article_item['time'] = time
            article_item['small_image'] = small_image

            yield article_item
            yield scrapy.Request(self.article_detail.format(article_token=article_token), self.parse_article_detail)
            comment = item.xpath('.//p[@class="comment"]/text()')
            if (len(comment) > 0):
                hasMore = False
                break
        url = response.url
        urlPa = url.split('?')[1]
        result = parse.parse_qs(urlPa)
        page = result['page'][0]
        newpage = int(page) + 1
        user = url.split('/')[4].split('?')[0]
        
        if (hasMore):
            yield scrapy.Request(self.user_article_list.format(user=user, page=newpage), self.parse_user_article_list)
        pass

    def parse_user_follees_list (self, response):
        list_selector = response.xpath('//div[@id="list-container"]//ul[@class="user-list"]//li')
        for item in list_selector:
            name = item.xpath('.//a[@class="name"]/text()').extract_first()
            user_token = item.xpath('.//a[@class="name"]/@href').extract_first().split('/')[-1]
            avatar = item.xpath('.//a[@class="avatar"]//img/@src').extract_first()
            jianshuUser = JianshuUserItem()
            jianshuUser["name"] = name
            jianshuUser['user_token'] = user_token
            jianshuUser['avatar'] = avatar
            yield jianshuUser
            yield scrapy.Request(self.user_follow_users.format(user=user_token, page=1), self.parse_user_follees_list)
            yield scrapy.Request(self.user_article_list.format(user=user_token, page=1), self.parse_user_article_list)

        if (len(list_selector) >= 9):
            url = response.url
            page = url[len(url) - 1]
            newpage = int(page) + 1
            user = url.split('/')[4].split('?')[0]
            yield scrapy.Request(self.user_follow_users.format(user=user, page=newpage), self.parse_user_follees_list)


        pass


    def parse_article_detail(self, response):
        result = response.xpath('//div[@class="show-content-free"]').extract()
        
        str1 = " "
        str1 = str1.join(result)
        print('文章详情')

        url = response.url
        article_token = url.split('/')[-1]
        print(str1)
        jianshuDetil = JianShuArticleDetail()
        jianshuDetil["article_token"] = article_token
        jianshuDetil["article_detail"] = str1
        yield jianshuDetil
        pass


