#!/usr/bin/env python3
#-*- encoding:utf8 -*-

import requests
from json import loads
from myDatabase import db_session
from myModels import myArticle
from dateutil.parser import parse


url = "https://www.apiopen.top/journalismApi"
docids = db_session.query(myArticle).with_entities(myArticle.docid).all()

def _get(article):
    return dict(
        docid=article["docid"],
        ptime=parse(article["ptime"]),
        channel=article["channel"],
        category=article["category"],
        link=article["link"],
        source=article["source"],
        title=article["title"],
        digest=article["digest"],
        unlikeReason=article["unlikeReason"],
    )

def get_article():
    articles = []
    response = requests.get(url)
    result = loads(response.text)
    for k,w in result["data"].items():
        for article in w:
            if article["docid"]:
                articles.append(_get(article))
    return articles

def update_database(articles):
    for article in articles:
        if (article["docid"],) not in docids:
            db_session.add(myArticle(**article))
            db_session.commit()

def spider_run():
    articles = get_article()
    update_database(articles)

if __name__ == "__main__":
    spider_run()
