#!/usr/bin/env python3

'''
1、将所有的学校的paper都集合起来放置在一个collection中（以paper Id作为唯一标识符（collection中以Id表示））
2、从这个collection中按照年份取其十分之一作为实验数据
3、误差检验，查看每年的取得是否平均(不平均的原因可能是因为某一年发表的论文数量较少)
'''

from config import *
from multiprocessing import Pool
import pymongo
import random
from do_get_aminer_data import *


#配置mongodb的信息
client = pymongo.MongoClient(MONGO_URL)
#连接paper_info库
db_paper_info = client[MONGO_PAPER_INFO_DB]
#连接all_paper_info库
db_all_paper_info = client[MONGO_ALL_PAPER_INFO_DB]


'''
读取paper数据库中的数据
'''
def get_db_data(university_name):
    university_data = db_paper_info[university_name].find({}).batch_size(1)
    return university_data

'''
获取到所有的作者身份信息并且保存到数据库中
'''
def get_all_author_info():
    university_data = db_all_paper_info['exp_paper_info'].find({}).batch_size(1)
    for researcher_item in university_data:
        for data_item in researcher_item['AA']:
            #if data_item.get("AfN"):
            researcher_data = {"AuN":data_item.get("AuN"),"AuId":data_item.get("AuId"),"AfN":data_item.get("AfN"),"Sex":1}
            db_all_paper_info['exp_all_researcher_info'].save(researcher_data)

'''
将所有的学校的paper都集合起来放置在一个collection中
'''
def get_all_university_paper_info(university_name):
    university_data = get_db_data(university_name)
    for data_item in university_data:
        db_all_paper_info['all_paper_info'].save(data_item)
    print(university_name + '已完成存储')

'''
随机抽取每年的5%的数据作为数据集
'''
def get_some_paper_info():
    for i in range(10):
        university_data = db_all_paper_info['paper_info'].find({"Y":2008 + i})
        university_data_list = []
        for data_item in university_data:
            university_data_list.append(data_item)
        #print(len(university_data))
        university_data_length = len(university_data_list)
        print(str(2008 + i) + "年共有" + str(university_data_length) + "篇paper")

        after_university_data = []
        after_university_data = random.sample(university_data_list,int(university_data_length/20))
        print("after 随机选取后的长度为：",len(after_university_data))
        for data_item in after_university_data:
            db_all_paper_info['exp_paper_info'].save(data_item)
            #pass

'''
将paper_info中的2008-2017的数据保存在2008_to_2017_paper_info中
'''
def save_into_2008_to_2017_paper_info():
    for i in range(10):
        university_data = db_all_paper_info['paper_info'].find({"Y":2008 + i})
        for data_item in university_data:
            db_all_paper_info['2008_to_2017_paper_info'].save(data_item)


'''
将抽取的5%的数据的所有researcher_info保存在exp_researcher_info中
'''
def save_researcher_info():
    exp_paper_info_data = db_all_paper_info['exp_paper_info'].find({}).batch_size(1)
    for exp_paper_info_item in exp_paper_info_data:
        for data_item in exp_paper_info_item['AA']:
            researcher_data = {"AuN":data_item.get("AuN"),"AuId":data_item.get("AuId"),"AfN":data_item.get("AfN"),"Sex":1}
            db_all_paper_info['exp_researcher_info'].save(researcher_data)

'''
删除afflication为空的researcher_info数据
'''
def delete_afflication_none():
    exp_after_del_researcher_info = db_all_paper_info['exp_after_researcher_info'].find({}).batch_size(1)
    for data_item in exp_after_del_researcher_info:
        if data_item.get("AfN"):
            db_all_paper_info['exp_after_del_none_aff_researcher_info'].save(data_item)
        else:
            pass

'''
正式调用aminer接口
'''
def start_do_con_aminer(researcher_info):
    researcher_affiliation = researcher_info.get("AfN")
    researcher_name = researcher_info.get("AuN")
    #print(researcher_name + "-----------------" + researcher_affiliation)
    aminer_data = do_get_aminer_data(researcher_name,researcher_affiliation)
    aminer_data_json,status = aminer_data.do_connect()
    if status == 200:
        print("获取aminer数据成功")
        gender_info = aminer_data_json.get('Final')
        #print("性别为----------",gender_info)
        if gender_info.get('gender') == 'male':
            db_all_paper_info['exp_success_after_del_none_aff_researcher_info'].save({"AuN":researcher_name,"AfN":researcher_affiliation,"AuId":researcher_info.get("AuId"),"Sex":1})
            return None
        else:
            db_all_paper_info['exp_success_after_del_none_aff_researcher_info'].save({"AuN":researcher_name,"AfN":researcher_affiliation,"AuId":researcher_info.get("AuId"),"Sex":0})
            return None
    else:
        #db_all_paper_info['exp_fail_after_del_none_aff_researcher_info'].save({"AuN":researcher_name,"AfN":researcher_affiliation,"AuId":researcher_info.get("AuId"),"Sex":0})
        #return None
        print("获取aminer数据失败")
        return {"AuN":researcher_name,"AuId":researcher_info.get("AuId"),"AfN":researcher_affiliation,"Sex":1}

'''
开启多线程调用aminer接口
'''
def get_aminer_data(researcher_info_list):
    fail_con_aminer_list = []
    pool = Pool()
    #aminer_data_json,status = get_aminer_data
    #fail_con_aminer_list.append(pool.map(start_do_con_aminer,researcher_info_list))

    start_do_con_aminer_return = pool.map(start_do_con_aminer,researcher_info_list)
    #print("start_do_con_aminer_return-----------------",start_do_con_aminer_return)

    for fail_item in start_do_con_aminer_return:
        if fail_item == None:
            pass
        else:
            fail_con_aminer_list.append(fail_item)

    print("fail_con_aminer_list-------------",fail_con_aminer_list)
    pool.close()
    return fail_con_aminer_list

'''
对exp_after_del_none_aff_researcher_info中30万的数据做批量调用aminer接口处理（1000）每批次
'''
def get_reseacher_sex_info():

    #exp_researcher_info_data_list_length = db_all_paper_info['exp_after_del_none_aff_researcher_info'].count()
    #print(exp_researcher_info_data_list_length)

    #exp_researcher_info_list = list(db_all_paper_info['exp_after_del_none_aff_researcher_info'].find({}))
    #print("获取到需要aminer数据的长度为",len(exp_researcher_info_list))
    #after_con_aminer_researcher_info_list = get_aminer_data(exp_researcher_info_list)
    #for fail_item in after_con_aminer_researcher_info_list:
        #db_all_paper_info["exp_fail_after_del_none_aff_researcher_info"].save(fail_item)

    '''
        下面的代码先全都注释
    '''
    #for i in range(int(exp_researcher_info_data_list_length / 1000) + 1):
    for i in range(3,4):
        exp_researcher_info_list = list(db_all_paper_info['exp_after_del_none_aff_researcher_info'].find({}).skip(i * 500).limit(500))
        print("获取到需要aminer数据的长度为",len(exp_researcher_info_list))
        after_con_aminer_researcher_info_list =  get_aminer_data(exp_researcher_info_list)
        #print("第" + str(i*1000) + "到" + str((i+1)*1000) + "条数据还有" + str(len(after_con_aminer_researcher_info_list))+ "条没有完成性别判断" )

        #for fail_item in after_con_aminer_researcher_info_list:
            #db_all_paper_info["exp_fail_after_del_none_aff_researcher_info"].save(fail_item)
        #下面这行代码是因为无法处理服务器返回5000的数据
        while len(after_con_aminer_researcher_info_list) != 0:
            print("第" + str(i*500) + "到" + str((i+1)*500) + "条数据还有" + str(len(after_con_aminer_researcher_info_list))+ "条没有完成性别判断" )
            after_con_aminer_researcher_info_list = get_aminer_data(after_con_aminer_researcher_info_list)
        print("第" + str(i*500) + "到" + str((i+1)*500) +"条数据已完成")

def main():
    #pool = Pool()
    #pool.map(get_all_university_paper_info,UNIVERSITY_LIST1)

    get_reseacher_sex_info()


if __name__ == '__main__':
    main()
