import requests
import json
import pymysql
import re

conn = pymysql.connect(host='gsy-cloud', user='root', password='rootQWE', database='python', port=32769)
cursor = conn.cursor()
sql = "INSERT INTO tb_teacher(pro_pk_id,pro_first_name,pro_last_name,avg_rate,rate_num,school_id,school_name,department_name) values ('%s','%s','%s','%s','%s','%s','%s','%s');"
# sql = 'INSERT INTO tb_teacher()'


departmentList = ["Accounting", "Agriculture", "Anthropology", "Architectural Engineering", "Architecture",
                  "Art History", "Biology", "Biomedical", "Business", "Chemical Engineering", "Chemistry",
                  "Civil Engineering", "Classics", "Communication", "Computer Engineering", "Computer Science",
                  "Criminal Justice", "Culinary Arts", "Design", "Economics", "Education", "Electrical Engineering",
                  "Engineering", "English", "Ethnic Studies", "Film", "Finance", "Fine Arts", "Geography", "Geology",
                  "Graphic Arts", "Health Science", "History", "Hospitality", "Humanities", "Information Science",
                  "International Studies", "Journalism", "Languages", "Law", "Literature", "Management", "Marketing",
                  "Materials Science", "Mathematics", "Mechanical Engineering", "Medicine", "Music", "Nursing",
                  "Nutrition", "Philosophy", "Physical Education", "Physics", "Political Science", "Psychology",
                  "Religion", "Science", "Social Science", "Social Work", "Sociology", "Theater", "Women's Studies",
                  "Writing", ]
for i in departmentList:
    for j in i.split(" "):
        print(j.lower())

def get_department_json(url):
    json_requests = requests.get(url)
    json_pattern = re.compile('noCB[(](.+)[)]')
    return json.loads(json_pattern.match(json_requests.text).group(1))


def parse_json(json_obj, school_name, department_name):
    pageCount = int(json_obj['response']['numFound'])  # 此条目下共多少数量
    doc_list = json_obj['response']['docs']
    re_list = []
    for doc in doc_list:
        try:
            tea_list = [str(doc['pk_id']), doc['teacherfirstname_t'], doc['teacherlastname_t'],
                        doc['averageratingscore_rf'], str(doc['total_number_of_ratings_i']), doc['schoolid_s'],
                        school_name,
                        department_name]
        except:
            tea_list = [str(doc['pk_id']), doc['teacherfirstname_t'], doc['teacherlastname_t'],
                        '', str(doc['total_number_of_ratings_i']), doc['schoolid_s'], school_name,
                        department_name]
        re_list.append(tea_list)
    return pageCount, re_list


def build_url(department, start):
    url_pattern = "https://solr-aws-elb-production.ratemyprofessors.com//solr/rmp/select/?solrformat=true&rows=20&wt" \
                  "=json&json.wrf=noCB&callback=noCB&q=*%3A*+AND+schoolid_s%3A431+AND+teacherdepartment_s%3A" \
                  "%22replacedept%22&defType=edismax&qf=teacherfirstname_t%5E2000+teacherlastname_t%5E2000" \
                  "+teacherfullname_t%5E2000+autosuggest&bf=pow(" \
                  "total_number_of_ratings_i%2C2.1)&sort=total_number_of_ratings_i+desc&siteName=rmp&rows=20&start" \
                  "=replacestart&fl=pk_id+teacherfirstname_t+teacherlastname_t+total_number_of_ratings_i" \
                  "+averageratingscore_rf+schoolid_s&fq= "
    return url_pattern.replace('replacedept', '+'.join(department.split(' '))).replace('replacestart', str(start))


for dname in departmentList:
    start = 0
    while True:
        url = build_url(dname, start)
        try:
            print("开始爬取url:%s" % url)
            limit, sql_list = parse_json(get_department_json(url), 'Illinois Institute of Technology', dname)
            print("爬取%s 第 %d 条消息成功,共 %d 条消息" % (dname, start, limit))
            start += 20
        except Exception as e:
            print("爬取过程出现错误，department name: %s , start : %d" % (dname, start - 20))
            print(e)
            break
        for sql_param in sql_list:
            try:
                cursor.execute(sql % tuple(sql_param))
                conn.commit()
            except Exception as e:
                if not str(e).rfind('Duplicate'):
                    print("sql执行失败" + sql % tuple(sql_param))
                    print(e)
        print('插入数据成功')
        if start > limit:
            break
