# coding:utf-8
from urllib import parse as url_parse
from logger.log import crawler
from page_get.basic import get_page
from config.conf import get_max_search_page
from page_parse import search as parse_search
from db.dbbase import DBBase
from datetime import datetime
# from db.wb_data import insert_weibo_data, get_wb_by_mid# ----mysql

# This url is just for original weibos.
# If you want other kind of search, you can change the url below
url = 'http://s.weibo.com/weibo/{}&scope=ori&suball=1&page={}'
limit = get_max_search_page() + 1
dbbase = DBBase()

def search_keyword(keyword, name_cookies,adminid):#keyword_id   关键字+cookies
    cur_page = 1
    encode_keyword = url_parse.quote(keyword)
    while cur_page < limit:
        cur_url = url.format(encode_keyword, cur_page)
        search_page = get_page(cur_url, name_cookies)
        if not search_page:
            crawler.warning('No result for keyword {}, the source page is {}'.format(keyword, search_page))
            return
        search_list = parse_search.get_search_info(search_page)
        # 数据入库
        for wb_data in search_list:
            # --------mysql------------
            # rs = get_wb_by_mid(wb_data.weibo_id)
            # if rs:
            #     crawler.info('content {} has been crawled in this turn'.format(wb_data.weibo_id))
            #     break
            # else:# self.weibo_keyword,self.weibo_adminid

                #insert_weibo_data(wb_data)
                #insert_keyword_wbid(keyword_id, wb_data.weibo_id)
                # send task for crawling user info
            #------------------mysql----------------------
            # -----------------mongodb----------------------
            data_id = dbbase.find_weibo_byid(wb_data.weibo_id)
            if data_id is None:
                wb_data.weibo_keyword = keyword
                wb_data.weibo_adminid = adminid
                wb_data.searchtime = str(datetime.now())
                data_re = str(wb_data.weibo_cont).replace("'", '').replace("\n", '')
                wb_data.weibo_cont = data_re
                print(wb_data)
                data_eval = eval('{' + str(wb_data) + '}')
                dbbase.insert_weibo(data_eval)
             # -----------------mongodb----------------------
        if 'page next S_txt1 S_line1' in search_page:
            cur_page += 1
        else:
            crawler.info('keyword {} has been crawled in this turn'.format(keyword))
            return

if __name__ == '__main__':

    cookies =['18021630321',{'tgc': 'TGT-NTc1MTAyMTc0NQ==-1501054815-gz-2AF8BDA25B62DC3CF8161F62F88FA0FC-1', 'YF-Page-G0': '0acee381afd48776ab7a56bd67c2e7ac', 'SCF': 'AribhD1DGy6oW3h5bo0SXJ4xLcrmlNYJmJiKMis6fUlE_w2BHVsoGcDOpHwCGusvjCdrsQ62CgBSIK-Gk6ytiVE.', 'SUHB': '0cSXYea8GqjNF9', 'ALC': 'ac%3D0%26bt%3D1501054815%26cv%3D5.0%26et%3D1532590815%26ic%3D-572352314%26scf%3D%26uid%3D5751021745%26vf%3D0%26vs%3D0%26vt%3D0%26es%3D649a8ae28c05ee413a03ca8474c48bad', 'LT': '1501054815', 'SUB': '_2A250fDcPDeRhGeNJ7lMR8i_LzzmIHXVXCC_HrDV8PUNbmtBeLVLTkW-f1CL7cyn2CtKfczs48YseVhasJA..', 'ALF': '1532590815', 'SSOLoginState': '1501054815', 'sso_info': 'v02m6alo5qztKWRk5yljpOQpZCToKWRk5iljoOgpZCjnLWNs5SxjIOIsY2zkLWJp5WpmYO0tY2zlLGMg4ixjbOQtQ==', 'SRT': 'D.QqHBJZ4nS!MZ4mMb4cYGS4u1i-oi4ZY9Qrow5csHNEYddmiDQrYpMERt4EPKRcsrAcPJP!o4TsVkSZEDODWZ5dtpA3MISciwVqAtdQibPcYYVr9BI8t7*B.vAflW-P9Rc0lR-ykeDvnJqiQVbiRVPBtS!r3JZPQVqbgVdWiMZ4siOzu4DbmKPVsRQ43JmuaNdH3TFilNm9IUFklPc9ki49ndDPIJcYPSrnlMc0kObiIJZWbUqHK4-XkJcM1OFyHJDPJ5mjkODEIS4noTmsJ5mjkODEIS4noTFuJ5mjkODEIS4noTFuJ5mjkODEIS4noTFuJ5mjkODEIS4noTGbeP4ooWv77', 'SRF': '1501054815', 'SUBP': '0033WrSXqPxfM725Ws9jqgMF55529P9D9W5au70N_IbwhcnHJKlknVjm5JpX5K2hUgL.Fo-NSK27eo2NSh-2dJLoI05LxK-L1KeL1hMLxK-L1KeL1hnLxK-L1KeL1hnLxK-L1KeL1hnLxK-L1KeL1hygULqt', 'YF-Ugrow-G0': 'b02489d329584fca03ad6347fc915997'}]

    search_keyword('算法', name_cookies=cookies, adminid='123')