import models
from Crawler import Crawler


class ZhihuSpider(object):

    def __init__(self):
        self.crawler = None
        self.parser = None
        self.urls = []

    def initialization(self):
        account_1 = {
            'authorization': 'Bearer 2.0AHBCvu_LCQsAAEKugcsJCwwAAABgAlVNgWyDWAB6-GPE6WOwdpItmZ8EsX6uv9Fg2Q',
            'udid': 'AABCroHLCQtLBVhYrOoMehpY12XJxAo8ZLo='
            }

        account_2 = {
                    'authorization': 'Bearer 2.0AACCJrLVEgsAEALEP9QSCwwAAABgAlVNy62MWADedTm3g61v7tZnBdxthe-izil7zg',
                    'udid': 'ABACxD_UEgtLBUHK7obSqeI3Zt3Vef2Vofo='
                    }

        account_a = models.Account(**account_1)
        account_b = models.Account(**account_2)
        self.crawler = Crawler([account_a, account_b])
        self.urls.append('https://api.zhihu.com/articles/24112818')

    def test_crawling(self):

        for url in self.urls:
            print url
            self.crawler.fetch_data(url)
