#!/usr/bin/env python
from crawler.crawler import SinaCrawler
from crawler.crawler import ConstantsPool
import crawler.utils
import traceback
import os.path

if __name__ == '__main__':
    spider = SinaCrawler()
    cookies = {'weibo.com': None}
    for cookie_name in cookies.keys():
        path = '.cookies/' + cookie_name
        if os.path.exists(path):
            cookies[cookie_name] = \
                    crawler.utils.loadCookies(path)
        else:
            print("Cookie for: " + cookie_name + " not found! \nStarting to relogin...",end='')
            try:
                spider.doLogin()
                spider.getWeiboCom()
                cookies = spider.cookies
            except Exception as e:
                traceback.print_exc()
            finally:
                spider.closeSession()
            print("\tLogined\n", end='')
            print("Dumping cookies ",end='')
            for domain, cookie in cookies.items():
                crawler.utils.dumpCookies(cookie, '.cookies/' + domain)
                print('.', end='')
            print('\n', end='')
            break
    spider.is_login = True
    spider.cookies = cookies
    try:
        print('Start to work.')
        #ret = spider.getIndexHotNew()
        rank_type = 4
        for tr_type, link in ConstantsPool.trtopic_link_prefix.items():
            if tr_type == 'topic_rank':
                has_rank = True
            else:
                has_rank = False
            if tr_type == 'rank4_start4':
                rank_type = 4
                continue
            if tr_type == 'local_topic':
                #Start crawling local topic
                for city, code in ConstantsPool.city_codes.items():
                    topics = spider.trtopicCrawler(link, rank_type, 2, has_rank, code)
                    print(topics)
            print(spider.trtopicCrawler(link,rank_type, 0, has_rank))
            print(spider.trtopicCrawler(link,rank_type, 1, has_rank))
    except Exception as e:
        traceback.print_exc()
    finally:
        spider.closeSession()


