from library import filter
import json


class baseScrawler:

    def __init__(self, start_date, num):
        self.start_date = start_date
        self.filter = filter.Filter()
        self.allurl = []
        self.mark = 0
        self.tag_list = {}
        self.final_all = []
        self.num = num
        self.res = []
        self.tag_list = {"Web安全": "web", "网络安全": "network", "终端安全": "endpoint", "数据安全": "database", "系统安全": "system", "漏洞": " vuls", "工具": "sectool"}

    def json_parse(self, json_text):
        pass

    # 判断当前链接是否已经保存
    def unique(self, dic, column="author_link"):
        return dic.get(column) not in self.allurl

    def print_final_all(self):
        for i in self.res:
            print(i)
        print("-----------------4ny0ne Sec--------------------")

    def get_res(self):
        return self.res

    def save_file(self,filename):
        try:
            f = open(filename, 'a+')
            f.write(json.dumps(self.res,ensure_ascii=False))
            f.write("\n\n")
            f.close()
        except IOError:
            print("-----------------Save Wrong--------------------")