import os
import pandas as pd

from blog_analysis.models import Article


class SomeoneStatistical:
    def __init__(self):
        self.articles = []
        self.my_BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
        # csv_path：读取的csv文件(爬虫产物)
        self.csv_path = os.path.join(self.my_BASE_DIR, 'static', 'csv_collect', 'deep_articles.csv')
        # self.csv_path = os.path.join(self.my_BASE_DIR, 'static', 'csv_collect', 'deep_articles.xlsx')

    def run(self):
        # 读取csv数据
        df = pd.read_csv(self.csv_path, sep=",", header=0, encoding="utf-8")
        # df = pd.read_excel(self.csv_path)
        # 制作文章详情集合

        for i in range(len(df)):
            # df["category"][i]
            url = str(df["url"][i])
            title = str(df["title"][i])
            time = str(df["time"][i])
            watchs = str(df["watchs"][i])
            readDuration = str(df["readDuration"][i])
            column = str(df["column"][i])
            likes = str(df["likes"][i])
            comments = str(df["comments"][i])
            stars = str(df["stars"][i])
            author = str(df["author"][i])
            author_url = str(df["author_url"][i])
            author_lever = str(df["author_lever"][i])
            author_articles = str(df["author_articles"][i])
            author_watchs = str(df["author_watchs"][i])
            author_fans = str(df["author_fans"][i])
            category = str(df["category"][i])
            topic = str(df["topic"][i])
            article = Article(url, title, time, watchs, readDuration, column, likes, comments, stars, author,
                              author_url,
                              author_lever, author_articles, author_watchs, author_fans, category, topic)
            # print(article.__json__())   此步可以，问题在json转换那里
            self.articles.append(article)

        return self.articles


# 以脚本方式启动
if __name__ == "__main__":
    # 捕捉异常错误
    try:
        cs = SomeoneStatistical()
        cs.run()
    except Exception as e:
        print("错误:", e)
