import os

from item_crawl import crawl_item
from user_crawl import crawl_user_creator,crawl_user_commen
from paras import *
import time
import random

def try_crawl_item(url_item,log):

    try:
        time_start = time.time()
        STATE, _return = crawl_item(url_item)
        if STATE:
            time_end = time.time()
            time_cost = time_end - time_start
            author_url, commenter_url_list = _return[0], _return[1]
            log[url_item] = {
                "url":url_item,
                "crawl_time": GET_TIME(),
                "type":"item",
                "state": "success",# succeed
                "time_cost":time_cost
            }
            print(log[url_item])
            #print("crawling : {} , state:{}".format(url_item,log[url_item]))
            return author_url, commenter_url_list

        if not STATE:
            log[url_item] = {
                "url": url_item,
                "crawl_time": GET_TIME(),
                "type": "item",
                "state": "failed",  # failed
                "log": _return
            }
            log["failed_list"].append(url_item)
            print(log[url_item])
            return None , None

    except Exception as e:
        log[url_item] = {
            "url": url_item,
            "crawl_time": GET_TIME(),
            "type": "item",
            "state": "failed",  # failed
            "log": e.__str__()
        }
        log["failed_list"].append(url_item)
        print(log[url_item])
        return None, None


def try_crawl_creator(author_url, log):
    # log = {}
    #SLEEP()
    try:
        time_start = time.time()
        STATE, _return =  crawl_user_creator(author_url)
        if STATE:
            #time_start = time.time()
            time_end = time.time()
            time_cost = time_end - time_start
            items_list_creator = _return
            log[author_url] = {
                "url": author_url,
                "crawl_time": GET_TIME(),
                "type":"creator",
                "state": "success",  # succeed
                "time_cost":time_cost
            }
            print(log[author_url])
            return items_list_creator

        if not STATE:
            log[author_url] = {
                "url": author_url,
                "crawl_time": GET_TIME(),
                "type": "creator",
                "state": "failed",  # failed
                "log": _return
            }
            print(log[author_url])
            log["failed_list"].append(author_url)
            return []
    except Exception as e:
        log[author_url] = {
            "url": author_url,
            "crawl_time": GET_TIME(),
            "type": "creator",
            "state": "failed",  # failed
            "log": e.__str__()
        }
        print(log[author_url])
        log["failed_list"].append(author_url)
        return []

def try_crawl_commen(commen_url, log):
    # log = {}
    #SLEEP()
    time_start = time.time()
    try:
        STATE, _return =  crawl_user_commen(commen_url)
        if STATE:
            time_end = time.time()
            time_cost = time_end - time_start
            comment_refer_items = _return
            log[commen_url] = {
                "url": commen_url,
                "crawl_time": GET_TIME(),
                "type": "commen",
                "state": "success",  # succeed
                "time_cost": time_cost
            }
            print(log[commen_url])
            return comment_refer_items

        if not STATE:
            log[commen_url] = {
                "url": commen_url,
                "crawl_time": GET_TIME(),
                "type": "commen",
                "state": "failed",  # failed
                "log": _return
            }
            log["failed_list"].append(commen_url)
            print(log[commen_url])
            return []

    except Exception as e:
        log[commen_url] = {
            "url": commen_url,
            "crawl_time": GET_TIME(),
            "type": "commen",
            "state": "failed",  # failed
            "log": e.__str__()
        }
        log["failed_list"].append(commen_url)
        print(log[commen_url])
        return []


def crawler(url_item,log):
    try:

        items_url = []
        author_url, commenter_url_list = try_crawl_item(url_item,log)

        items0 = try_crawl_creator(author_url,log)
        items_url = items_url + items0

        try:
            for i in range(2*len(commenter_url_list)):
                try_crawl_item(get_random_item_url(), log) #could be random a choice
        except:
            pass

        for commenter_url in commenter_url_list:
            items1 = try_crawl_commen(commenter_url,log)
            items_url = items_url + items1

        items_url = list(set(items_url))

        return items_url,log

    except:
        print("??????")
        return items_url,log

def crawler_(url_item,log):
    try:

        items_url = []
        author_url, commenter_url_list = try_crawl_item(url_item,log)

        items0 = try_crawl_creator(author_url,log)
        items_url = items_url + items0

        # try:
        #     for i in range(len(commenter_url_list)):
        #         try_crawl_item(get_random_item_url(), log) #could be random a choice
        # except:
        #     pass

        try:
            items1 = try_crawl_commen(random.choice(commenter_url_list),log)
            items2 = try_crawl_commen(random.choice(commenter_url_list),log)
            items3 = try_crawl_commen(random.choice(commenter_url_list),log)
            items4 = try_crawl_commen(random.choice(commenter_url_list),log)
            items_url = items_url + items1 + items2 + items3 + items4
        except:
            pass

        items_url = list(set(items_url))

        return items_url,log

    except:
        print("??????")
        return items_url,log

def test():
    log = {}
    log["failed_list"] = []
    #url =  "https://zhiyou.smzdm.com/member/4319074307/"
    url =  "https://www.smzdm.com/p/49636934"
    #items_url = try_crawl_commen(url,log)
    #items_url = try_crawl_commen(url,log)
    a,b = try_crawl_item(url,log)
    log["items_url"] = b
    file_name_log = "item_{}_log.json".format(url.split("/")[4])
    print(file_name_log, log, LOG_DIR)
    write_json(file_name_log,log,LOG_DIR)


def epoch_crawl(url_item):
    try:
        log = {}
        log["failed_list"] = []
        items_url,log = crawler_(url_item,log)
        log["items_url"] = items_url
        file_name_log = "item_{}_log.json".format(url_item.split("/")[-2])
        write_json(file_name_log,log,LOG_DIR)
        return random.choice(items_url)
    except Exception as e:
        try:
            # file_name_log = "item_{}_log.json".format(url_item.split("/")[-2])
            # write_json(file_name_log, log, LOG_DIR)
            return random.choice(items_url)
        except:
            return random.choice(items_url)

def cycle_process(urls_pool,all_files):
    while(True):
        SLEEP()
        url = random.choice(urls_pool)
        try:
            if url.split("/")[4] in all_files:
                urls_pool.remove(url)
                urls_pool.append(get_random_item_url())
                print("file already exist , skip this cycle")
                continue
            if url.split("/")[4] not in all_files:
                try:
                    url_ret = epoch_crawl(url)
                    urls_pool.remove(url)
                    urls_pool.append(url_ret)
                except:
                    urls_pool.remove(url)
                    urls_pool.append(get_random_item_url())
                    print("crawl failed , skip this cycle")
                    continue
        except:
            pass

def multi_main():
    pass

if __name__ == '__main__':

   pass