# -*- coding: utf-8 -*-
import requests
from django.core.management.base import BaseCommand
import datetime
from django.utils import timezone
from blog import services
from bs4 import BeautifulSoup


class Command(BaseCommand):
    """
    get article from yinwang.org
    """
    help = 'get article from yinwang.org'

    def add_arguments(self, parser):
        parser.add_argument('--top', type=int, default=-
                            1, help='latest article num')

    def handle(self, *args, **options):
        top = int(options['top'])
        self.host = 'http://www.yinwang.org'
        self.session = requests.Session()
        self.site = self.get_site()
        html = self.url_gettext(self.host)
        soup = BeautifulSoup(html, 'html.parser')
        articles = []
        for li in soup.find_all('li', class_='list-group-item title'):
            if top > 0 and len(articles) >= top:
                break
            link = li.find('a')
            url = link.get('href')
            short_url = url.replace(
                'http://yinwang.org/blog-cn/', '').replace('/', '-')
            post_at = timezone.make_aware(datetime.datetime.strptime(
                short_url[0:10].replace('/', '-').strip(), "%Y-%m-%d"))
            article = dict(title=str(link.text), slug=short_url,
                           post_at=post_at, author_name='yinwang',
                           tags=['yinwang', ],
                           object_key_str=short_url, source_url=url)
            articles.append(article)
            self.parse_article(article)
        # self.parse_articles(articles)
        self.session.close()

    def url_get(self, url):
        self.stdout.write(self.style.SUCCESS('GET %s' % (url, )))
        return self.session.get(url)

    def url_gettext(self, url):
        res = self.url_get(url)
        return res.text

    def url_getjson(self, url):
        res = self.url_get(url)
        return res.json

    def get_site(self):
        return services.get_site(self.host, 'yinwang')

    def parse_article(self, article):
        source_content = self.url_gettext(article['source_url'])
        soup = BeautifulSoup(source_content, 'html.parser')
        body = soup.find(
            'div')
        if not body:
            body = soup.body
        article['content'] = str(body)
        article['description'] = ''
        article['source_content'] = source_content
        services.save_article_async(self.site.id, article)

    def parse_articles(self, articles):
        for article in articles:
            source_content = self.url_gettext(article['source_url'])
            soup = BeautifulSoup(source_content, 'html.parser')
            body = soup.find(
                'div')
            if not body:
                body = soup.body
            article['content'] = body.prettify()
            article['description'] = ''
            article['source_content'] = source_content
        self.save_articles(articles)

    def save_articles(self, articles):
        services.save_articles(self.site, articles)
