# -*- coding: utf-8 -*-
# encoding:utf-8

import sys
reload(sys)
sys.setdefaultencoding("utf-8")

# import gevent.monkey
# gevent.monkey.patch_socket()
# gevent.monkey.patch_ssl()

import requests,urllib,json,re
from BeautifulSoup import BeautifulSoup
from lxml import html
from db import Follower,engine
from sqlalchemy.orm import scoped_session, sessionmaker
from red_filter import check_url, re_crawl_url
from requests.packages.urllib3.exceptions import InsecureRequestWarning

requests.packages.urllib3.disable_warnings(InsecureRequestWarning)


class Follower_Crawler():

    '''
    basic crawler

    '''

    def __init__(self, url, option="print_data_out"):
        '''
        initialize the crawler

        '''
        self.option = option
        self.req = requests.Session()
        self.url = url
        self.follower = []
        self.header = {}
        self.header[
            "User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36"
#        self.header["Host"]="www.zhihu.com"
        # self.header["Referer"] = "www.zhihu.com"
        self.db = scoped_session(sessionmaker(bind=engine,
                                              autocommit=False, autoflush=True,
                                              expire_on_commit=False))
        # cookie
        self.cookies = {"z_c0": '"QUFEQVNGNDVBQUFYQUFBQVlRSlZUV0tzWWxkazJhX1pZdlVEbGhFLXRDeC1ZUGF0cEhmNzJRPT0=|1463492450|8f274b4f54bed946453b9e4e2e57469cae3adab6"',
                        # "unlock_ticket": 'QUZDQUp3czV3QWtYQUFBQVlRSlZUZnBxQ2xmSWNXX3NuVXo3SVJleUM5Uy1BLUpEdXJEcEpBPT0',
                        "login": '"NmFjZTUyMWY5MjRjNDExNjhiOWVhOWExYWRiODA5OGU=|1463492450|4e32299a48c9f88e90ab1c3d57d604adac78cfe6"',
                        # "n_c": "1",
                        # "_xsrf":'df8d2ad3ee9785d1140554ec1fe5d50b',
                        "q_c1": "dd6df8f987c24912b353c91461ee6581|1463492420000|1463492420000",
                        "l_cap_id": '"NGYwY2JmZDRlNTA3NDg3ZDhjZWVhMDBhMWU3YzRlZGY=|1463492420|eac9c18d068824229bb5bcda87391e2bcf028222"',
                        # "d_c0": '"AIAAACopoQmPTli2giY01wIt-yARroMbkw8=|1458214140"',
                        "cap_id": '"MTBkN2JlNmRmZTdiNDI1NGE3MzM1OGI5MmNmM2E3YmU=|1463492420|d000f2d282af615a905124ee12abb37184fb912c"'}

    def send_request(self):
        '''
        send a request to get HTML source

        '''
        added_followee_url = self.url + "/followees"
        try:
            r = self.req.get(added_followee_url, cookies=self.cookies,
                             headers=self.header, verify=False)
        except:
            re_crawl_url(self.url)
            return

        content = r.text
    	self.hash_id = re.findall("hash_id&quot;: &quot;(.*)&quot;},", r.text)[0]
        self.xsrf = r.headers['Set-Cookie'].split(";")[0].split("=")[1]
        self.cookies['_xsrf'] = self.xsrf
        # print content
        if r.status_code == 200:
            self.parse_user_profile(content)

    def parse(self,content):
        return_json = json.loads(content)
        content = return_json['msg']
        # print content[0]
        for i in content:
            tree = html.fromstring(i)
            self.follower.append((tree.xpath("//h2[@class='zm-list-content-title']/a/@href")[0]).split("/")[-1])

    def get_follower(self,offset):
    	follower_url = "http://www.zhihu.com/node/ProfileFolloweesListV2"
        # follower_url = "https://www.zhihu.com/node/ProfileFollowersListV2"
    	paramas = json.dumps({"offset":offset,"order_by":"created","hash_id":self.hash_id})
    	data = {
    		"method":"next",
    		"params":paramas,
    		"_xsrf":self.xsrf
    	}
    	header = {
                'User-Agent': "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:34.0) Gecko/20100101 Firefox/34.0",
                'Host': "www.zhihu.com",
                'Origin':'https://www.zhihu.com',
                'Referer': self.url+ "/followees"
            }
    	try:
    		r = requests.post(follower_url,headers=header,cookies = self.cookies,data = data,verify=False)
    	except Exception,e:
    		print "error",str(e)
    	content = r.text
    	
    	self.parse(content)


    def process_xpath_source(self, source):
        if source:
            return source[0]
        else:
            return ''

    def parse_user_profile(self, html_source):
        '''
        parse the user's profile to mongo
        '''

        # initialize variances
        tree = html.fromstring(html_source)

        # parse the html via lxml
        try:
            self.user_followees = tree.xpath(
                "//div[@class='zu-main-sidebar']//strong")[0].text
            self.user_followers = tree.xpath(
                "//div[@class='zu-main-sidebar']//strong")[1].text
        except Exception,e:
            print str(e)
            return
        self.user_id = tree.xpath("//button[@class='zg-btn zg-btn-follow zm-rich-follow-btn']/@data-id")[0]

        # find the follower's url
        url_list = tree.xpath("//h2[@class='zm-list-content-title']/a/@href")
        for target_url in url_list:
            # print target_url
            # target_url = target_url.replace("https", "http")
            self.follower.append(target_url.split("/")[-1])
            # check_url(target_url)
        self.user_followees = self.user_followees if int(self.user_followees)<1000 else 1000
        pagenumber = int(self.user_followees)/20
        for i in range(1,pagenumber+1):
            self.get_follower(i*20)
        self.store_data_to_db()
        # self.get_follower(20)

    def print_data_out(self):
        '''
        print out the user data
        '''

        print "被关注:%s\n" % self.user_followers
        print "关注了:%s\n" % self.user_followees

    def store_data_to_db(self):
        '''
        store the data in mongo
        '''
        source = self.url.split("/")[-1]
        follower = []
        for i in self.follower:
            follower.append({
                'source':source,
                'target':i
                })
        self.db.execute(Follower.__table__.insert(),follower)
        self.db.commit()

def main():
    start_url = "https://www.zhihu.com/people/gaoming623"
    zhi = Follower_Crawler(url=start_url)#,option="db")
    zhi.send_request()
if __name__ == '__main__':
    main()