import datetime
from selenium.webdriver.common.by import By
from bs4 import BeautifulSoup as bf
from paras import *

store_path = GET_PLATFORM()

def filter_baoliao_url(url):
    #url = "https://www.smzdm.com/p/46908363"
    u_s = url.split("/")
    try:
        if u_s[2] == "www.smzdm.com" and u_s[3] == "p" and int(u_s[4]): #filter of baoliao url
            return True
    except:
        return False

def culcu_months(date):
    if len(date.split("-"))==1:
        time_standard = time.strftime("%Y-%m-%d", time.localtime())
    if len(date.split("-"))==2:
        time_standard_ = time.strftime("%Y", time.localtime())
        time_standard  = "{}-{}".format(time_standard_,date.split(" ")[0])
    if len(date.split("-"))==3:
        time_standard  = date

    date_now = time.strftime("%Y-%m-%d", time.localtime())
    date_now_list = date_now.split("-")
    date_past_list = time_standard.split("-")
    data1 = datetime.date(int(date_now_list[0]),int(date_now_list[1]),int(date_now_list[2]))
    date2 = datetime.date(int(date_past_list[0]),int(date_past_list[1]),int(date_past_list[2]))
    months = int((data1-date2).days/30)

    return months

def enter_baoliao(browser,UGC_sum):
    if "爆料" in UGC_sum.keys():
        button_baoliao = browser.find_element(By.CSS_SELECTOR,"body > div.container > div.cont-left > ul > li.navtab-title.baoliao > a")
        button_baoliao.click()

def enter_comment(browser,UGC_sum):
    if "评论" in UGC_sum.keys():
        button_comment = browser.find_element(By.CSS_SELECTOR,"body > div.container > div.cont-left > ul.navtab > li.navtab-title.comment > a")
        button_comment.click()

def get_browser_lite(url):
    options = webdriver.ChromeOptions()
    prefs={
         'profile.default_content_setting_values': {
            #'images': 2,
            'javascript':2
        }
    }
    options.add_experimental_option('prefs',prefs)

    browser = webdriver.Chrome(chrome_options=options)
    browser.get(url)
    return browser

def try_select_element(doc_model,css_selector):
    try:
        result = doc_model.select(css_selector)[0]
    except:
        return None
    return result

def try_select_elements(doc_model,css_selector):
    try:
        result = doc_model.select(css_selector)
    except:
        return None
    return result

def strip_str(str):
    return  str.replace('\n', '').replace('\r', '').replace(' ', '')

def get_pages_user(sum):
    per_page_comments=10
    pages=sum//per_page_comments+1
    return pages

def turn_page_user(browser,page_num):
    input_button = browser.find_element( By.CSS_SELECTOR , "#beginpage")
    input_button.click()
    input_button.clear()
    input_button.send_keys(page_num)
    Go_button = browser.find_element(By.CSS_SELECTOR , "body > div.container > div.cont-left > ul.pagination > li:last-child > a")
    Go_button.click()

def crawl_single_page_baoliao(browser,baoliao_dict,page_index,baoliao_url_list):
    html = browser.page_source
    doc_model = bf(html,'html.parser')
    baoliao_list = doc_model.select("body > div.container > div.cont-left > div > div.pandect-content-stuff")
    index = 1
    for baoliao in baoliao_list:
        url = baoliao.select("div.pandect-content-title > a")[0]["href"]
        baoliao_title = strip_str(baoliao.select("div.pandect-content-title > a")[0].text)
        baoliao_detail = strip_str(baoliao.select("div.pandect-content-detail")[0].text)
        baoliao_type_a = baoliao.select("div.pandect-content-type > a")
        baoliao_type = ""
        if baoliao_type_a:
            baoliao_type = baoliao_type_a[0].text
        baoliao_time = baoliao.select("div.pandect-content-type > span")[0].text

        result = {
            "title":baoliao_title,
            "detail":baoliao_detail,
            "type":baoliao_type,
            "baoliao_time":baoliao_time,
            "url":url
        }

        baoliao_dict["baoliao"+str(index+page_index*10)] = result
        baoliao_url_list.append(url)
        index = index+1

    return baoliao_dict,baoliao_url_list,baoliao_time

def crawl_baoliao(browser,UGC_sum):
    enter_baoliao(browser,UGC_sum)
    baoliao_sum = int(UGC_sum["爆料"])
    baoliao_pages = get_pages_user(baoliao_sum)

    baoliao_dict = {}
    baoliao_url_list = []

    for page_index in range(baoliao_pages):
        if page_index == 0:
            SLEEP()
            baoliao_dict,baoliao_url_list,baoliao_time = crawl_single_page_baoliao(browser,baoliao_dict,page_index,baoliao_url_list)


        else:
            SLEEP()
            turn_page_user(browser,page_index+1)
            baoliao_dict,baoliao_url_list,baoliao_time =crawl_single_page_baoliao(browser,baoliao_dict,page_index,baoliao_url_list)
            if page_index >= MAX_BAOLIAO_PAGES - 1: #limit to 100 baoliao
                break

    return baoliao_dict,list(set(baoliao_url_list))

def crawl_single_page_comment(browser,comment_dict,page_index,comment_refer_item_list):
    html = browser.page_source
    doc_model = bf(html,'html.parser')
    comment_list = doc_model.select("body > div.container > div.cont-left > div[id^=div_comment]")
    index = 1
    for comment in comment_list:
        com = strip_str(comment.select("div.centerBlock > div.infoNews")[0].text)
        date = comment.select("div.rightBlock > span")[0].text
        url = comment.select("div.centerBlock > div.objectBlock > span.objTitle > a")[0]["href"]
        abstract = strip_str(comment.select("div.centerBlock > div.objectBlock > span.objTitle > a")[0].text)
        result = {
            "comment":com,
            "abstract":abstract,
            "data":date,
            "url":url
        }

        comment_dict["comment"+str(index+page_index*10)] = result

        if filter_baoliao_url(url):

            comment_refer_item_list.append(url)

        index = index+1

    return comment_dict,comment_refer_item_list,date

def crawl_comment(browser,UGC_sum):
    enter_comment(browser,UGC_sum)
    comment_sum = int(UGC_sum["评论"])
    comment_pages = get_pages_user(comment_sum)

    comment_dict = {}
    comment_refer_item_list = []

    for page_index in range(comment_pages):
        if page_index == 0:
            SLEEP()
            comment_dict,comment_refer_item_list,date = crawl_single_page_comment(browser,comment_dict,page_index,comment_refer_item_list)

        else:
            SLEEP()
            turn_page_user(browser,page_index+1)
            comment_dict,comment_refer_item_list,date = crawl_single_page_comment(browser,comment_dict,page_index,comment_refer_item_list)

            if page_index >= MAX_COMMENT_PAGES - 1 : #limit to 100 comment
                break

    return comment_dict,list(set(comment_refer_item_list))

def get_user_info(browser,url,user_type):

    html = browser.page_source
    doc_model = bf(html,'html.parser')

    crawl_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
    user_id = url.split("/")[-2]
    user_profile_url = 'https:'+try_select_element(doc_model,"body > div.container > div.cont-left > div.self-info.other-person-page > div.face-stuff > div > div > img")["src"]
    user_focus = int(try_select_element(doc_model,"body > div.container > div.cont-right > div.user-info > a.user-focus > span").text)
    user_fans = int(try_select_element(doc_model,"body > div.container > div.cont-right > div.user-info > a.user-fans > span").text)
    user_name = try_select_element(doc_model,"body > div.container > div.cont-left > div.self-info.other-person-page > div.info-stuff > div.info-stuff-set > span.info-stuff-nickname > a").text

    try:
        user_level = int(try_select_element(doc_model,"body > div.container > div.cont-left > div.self-info.other-person-page > div.info-stuff > div.info-stuff-set > div.rank.face-stuff-level > a > img")["src"].split("/")[-1][0])
    except:
        user_level = "null"

    try:
        user_medal = try_select_element(doc_model,"body > div.container > div.cont-left > div.self-info.other-person-page > div.info-stuff > div.info-stuff-set > div.icon-medal > a > img")["alt"]
    except:
        user_medal = "null"

    try:
        signature = strip_str(try_select_element(doc_model,"body > div.container > div.cont-left > div.self-info.other-person-page > div.info-stuff > div.info-stuff-words").text)
    except:
        signature = "null"

    try:
        icon_info = try_select_elements(doc_model,"body > div.container > div.cont-left > div.self-info.other-person-page > div.info-stuff > div.bottom-words")
        icon_info_dict = {}
        for tag in icon_info:
            icon_info_dict[tag.i["class"][0]] = tag.text
    except:
        icon_info_dict = "null"

    try:
        navtabs = try_select_elements(doc_model,"body > div.container > div.cont-left > ul > li:not(:first-child)")
    except:
        navtabs = "null"

    try:
        UGC_sum = {}
        if navtabs:
            for tab in navtabs:
                a=tab.text.split()
                UGC_sum[a[0]]=int(a[1])
    except:
        UGC_sum = {}

    if user_type == "commen_user":
        #baoliao , baoliao_url_list = crawl_baoliao(browser, UGC_sum)
        baoliao = {}
        comment_dict,comment_refer_item_list = crawl_comment(browser,UGC_sum)
        graph_structure_user = {
            "user_url": url,
            "user_type":user_type,
            "comment_refer_item": comment_refer_item_list
        }

    if user_type == "creator":
        baoliao , baoliao_url_list= crawl_baoliao(browser,UGC_sum)
        #comment_dict,comment_refer_item_list = crawl_comment(browser,UGC_sum)
        comment_dict = {}

        graph_structure_user = {
            "user_url": url,
            "user_type": user_type,
            "baoliao_url_list":baoliao_url_list,
        }


    user_info = {

        "url":url,
        "crawl_time":crawl_time,
        "user_id":user_id,
        "user_name":user_name,
        "profile_url":user_profile_url,
        "focus":user_focus,
        "fans":user_fans,
        "member_level":user_level,
        "medal":user_medal,
        "signature":signature,
        "icon_info":icon_info_dict,
        "UGC":UGC_sum,
        "baoliao":baoliao,
        "comment":comment_dict

    }

    return user_info,graph_structure_user



def crawl_user_commen(url):
    try:
        #print(" >>>>>>>>crawling user_commen : {} >>>>>>>>".format(url))
        browser = get_browser(url)
        user_info, graph_structure_user = get_user_info(browser,url, "commen_user")
        store_dir = os.path.join(store_path,"user_{}".format(url.split("/")[4]))
        try_mkdir(store_dir)
        comment_refer_items = graph_structure_user["comment_refer_item"]
        filename_user_info = "user_{}.json".format(url.split("/")[4])
        file_name_graph = "user_{}_graph_structure.json".format(url.split("/")[4])
        write_json(filename_user_info, user_info ,store_dir)
        write_json(file_name_graph, graph_structure_user,store_dir)
        try_close_browser(browser)
        return True , comment_refer_items #url list

    except Exception as log :
        #try_close_browser(browser)
        return False , log.__str__()


def crawl_user_creator(url):
    try:
        #print(" >>>>>>>>crawling user_creator : {} >>>>>>>>".format(url))
        browser = get_browser(url)
        user_info, graph_structure_user = get_user_info(browser,url, "creator")
        store_dir = os.path.join(store_path,"creator_{}".format(url.split("/")[4]))
        try_mkdir(store_dir)
        baoliao_url_list = graph_structure_user["baoliao_url_list"]
        filename_user_info = "user_{}.json".format(url.split("/")[4])
        file_name_graph = "user_{}_graph_structure.json".format(url.split("/")[4])
        write_json(filename_user_info, user_info ,store_dir)
        write_json(file_name_graph, graph_structure_user,store_dir)
        try_close_browser(browser)
        return True , baoliao_url_list

    except Exception as log :
        #try_close_browser(browser)
        return False , log.__str__()

if __name__ == '__main__':
    # STATE , items_list = crawl_user_creator("https://zhiyou.smzdm.com/member/2410880374/")
    # #STATE, items_list = crawl_user_commen("https://zhiyou.smzdm.com/member/4319074307/")
#

        url  = "https://zhiyou.smzdm.com/member/4319074307/"
        browser = get_browser(url)
        user_info, graph_structure_user = get_user_info(browser, url, "commen_user")
        print(user_info, graph_structure_user)
