# -*- coding: UTF-8 -*-
import json
import re
import time
import requests
from bs4 import BeautifulSoup


def encode_inp(input):
    key_str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/='
    output = ''
    chr1 = ''
    chr2 = ''
    chr3 = ''
    enc1 = ''
    enc2 = ''
    enc3 = ''
    enc4 = ''
    i = 0
    chr1 = '' if i >= len(input) else ord(input[i])
    i += 1
    chr2 = '' if i >= len(input) else ord(input[i])
    i += 1
    chr3 = '' if i >= len(input) else ord(input[i])
    i += 1
    enc1 = (chr1 if isinstance(chr1, int) else 0) >> 2
    enc2 = (((chr1 if isinstance(chr1, int) else 0) & 3) << 4) | ((chr2 if isinstance(chr2, int) else 0) >> 4)
    enc3 = (((chr2 if isinstance(chr2, int) else 0) & 15) << 2) | ((chr3 if isinstance(chr3, int) else 0) >> 6)
    enc4 = (chr3 if isinstance(chr3, int) else 0) & 63
    if not str(chr2).isdigit():
        enc3 = 64
        enc4 = 64
    elif not str(chr3).isdigit():
        enc4 = 64
    output += key_str[enc1] + key_str[enc2] + key_str[enc3] + key_str[enc4]
    chr1 = ''
    chr2 = ''
    chr3 = ''
    enc1 = ''
    enc2 = ''
    enc3 = ''
    enc4 = ''
    while i < len(input):
        chr1 = '' if i >= len(input) else ord(input[i])
        i += 1
        chr2 = '' if i >= len(input) else ord(input[i])
        i += 1
        chr3 = '' if i >= len(input) else ord(input[i])
        i += 1
        enc1 = (chr1 if isinstance(chr1, int) else 0) >> 2
        enc2 = (((chr1 if isinstance(chr1, int) else 0) & 3) << 4) | ((chr2 if isinstance(chr2, int) else 0) >> 4)
        enc3 = (((chr2 if isinstance(chr2, int) else 0) & 15) << 2) | ((chr3 if isinstance(chr3, int) else 0) >> 6)
        enc4 = (chr3 if isinstance(chr3, int) else 0) & 63
        if not str(chr2).isdigit():
            enc3 = 64
            enc4 = 64
        elif not str(chr3).isdigit():
            enc4 = 64
        output += key_str[enc1] + key_str[enc2] + key_str[enc3] + key_str[enc4]
        chr1 = ''
        chr2 = ''
        chr3 = ''
        enc1 = ''
        enc2 = ''
        enc3 = ''
        enc4 = ''
    return output


def contain(a, item):
    for i in a:
        i1 = i.items()
        i2 = item.items()
        if str(i1) == str(i2):
            return True
    return False


def analysis_table(html, location):
    result = []
    bs = BeautifulSoup(html, features='html.parser')
    for i, j in enumerate(bs.find('table').find_all('tr')):
        for x, y in enumerate(j.find_all('td')):
            if len(y.find('div', class_='kbcontent').text.replace(' ', '').replace('\n', '').replace('\xa0', '')) > 0:
                info = [x.text for x in y.find('div', class_='kbcontent').find_all('font')]
                if '&nbsp' in info[0]:
                    info = info[1:]
                text = y.find('div', class_='kbcontent').text.replace(' ', '').replace('\n', '').replace('\xa0', '')
                item = {'title': re.search('(.*?)\d{9}\w{5,}', text).group(1).replace('&nbsp', ' ')}
                item['teacher'] = info[1]
                item['classroom'] = info[5]
                item['day'] = x + 1
                section = info[2][info[2].index('[') + 1:info[2].index('节]')]
                section = [int(x) for x in section.split('-')]
                item['section'] = section
                week = []
                for n in info[2][:info[2].index('(周)')].split(','):
                    if '-' in n:
                        border = n.split('-')
                        for m in range(int(border[0]), int(border[1]) + 1):
                            week.append(m)
                    else:
                        week.append(int(n))
                item['week'] = week
                item['location'] = location
                if not contain(result, item):
                    result.append(item)
                text = text.split('---------------------')
                if len(text) > 1:
                    item = {'title': re.search('(.*?)\d{9}\w{5,}', text[1]).group(1).replace('&nbsp', ' ')}
                    item['teacher'] = info[7]
                    item['classroom'] = info[11]
                    item['day'] = x + 1
                    section = info[8][info[8].index('[') + 1:info[8].index('节]')]
                    section = [int(x) for x in section.split('-')]
                    item['section'] = section
                    week = []
                    for j in info[8][:info[8].index('(周)')].split(','):
                        if '-' in j:
                            border = j.split('-')
                            for m in range(int(border[0]), int(border[1]) + 1):
                                week.append(m)
                        else:
                            week.append(int(j))
                    item['week'] = week
                    item['location'] = location
                    if not contain(result, item):
                        result.append(item)
    return result


def get_user_info_and_week(s):
    html = s.get('http://xjwis.ynufe.edu.cn/jsxsd/framework/xsMain_new.jsp?t1=1').text
    week_bs = BeautifulSoup(html, features='html.parser').find(id='li_showWeek').text
    weeks = [eval(re.search(r'(\d+)', w).group()) for w in week_bs.split('/')]
    week = {'current': weeks[0], 'total': weeks[1]}
    items = []
    values = []
    bs = BeautifulSoup(html, features='html.parser').find('div', class_='middletopttxlr')
    for i in bs.find_all('div', class_='middletopdwxxtit'):
        if i.text == '\xa0':
            continue
        items.append(i.text.replace('：', ''))
    for i in bs.find_all('div', class_='middletopdwxxcont'):
        if i.text == '\xa0':
            continue
        values.append(i.text.replace('：', ''))
    mapper = {'学生姓名': 'name', '学生编号': 'number', '所属院系': 'college', '专业名称': 'major', '班级名称': 'class'}
    if len(items) == len(values):
        return {mapper.get(items[i], items[i]): values[i] for i in range(len(items))}, week
    return {}, week


def get_scores(s):
    html = s.post('http://xjwis.ynufe.edu.cn/jsxsd/kscj/cjcx_list?kksj=&kcxz=&kcmc=&xsfs=all').text
    bs = BeautifulSoup(html, features='html.parser')
    trs = bs.find('table', id='dataList').find_all('tr')
    mapper = ['order', 'semester', 'code', 'name', 'group', 'score', 'score_tag', 'credit', 'hours', 'point',
              'rebuild_semester', 'assessment_type', 'assessment_nature', 'course_attribute', 'course_nature',
              'category']
    scores = []
    for tr in trs:
        tds = tr.find_all('td')
        if len(tds) == 16:
            scores.append(
                {m: eval(tds[i].text.replace('\n', '')) if i in [5, 7, 8, 9] else tds[i].text.replace('\n', '') for i, m
                 in enumerate(mapper) if i > 0})
    return scores


def get_exams(s):
    res = s.post('http://xjwis.ynufe.edu.cn/jsxsd/xsks/xsksap_query')
    bs = BeautifulSoup(res.text, features='html.parser')
    t = bs.find('select', id='xnxqid').find('option', selected=True)['value']
    res = s.post(f'http://xjwis.ynufe.edu.cn/jsxsd/xsks/xsksap_list?xqlbmc=&sxxnxq=&dqxnxq=&ckbz=&xnxqid={t}&xqlb=')
    bs = BeautifulSoup(res.text, features='html.parser')
    trs = bs.find('table', id='dataList').find_all('tr')
    mapper = ['order', 'campus', 'exam_room_campus', 'sessions', 'code', 'name', 'teacher', 'time', 'exam_room', 'seat',
              'exam_num', 'remarks']
    exams = []
    for tr in trs:
        tds = tr.find_all('td')
        if len(tds) == 13:
            exams.append({m: tds[i].text.replace('\n', '') for i, m in enumerate(mapper) if i > 0})
    return exams


def get_course(s):
    res = s.get('http://xjwis.ynufe.edu.cn/jsxsd/xskb/xskb_list.do')
    bs = BeautifulSoup(res.text, features='html.parser')
    parameter = [(bs.find('input')['name'], ''), ('zc', ''), ('demo', ''),
                 (str(bs.find_all('select')[1]['name']),
                  re.search('<option.*selected.*value="(.*?)"', str(bs.find_all('select')[1])).group(1)),
                 ('sfFD', '1'),
                 ('kbjcmsid', '')]
    code = [i['value'] for i in bs.find('select', id="kbjcmsid").find_all('option')]
    for i in re.findall('<input type="hidden" name="jx.*>', res.text):
        parameter.append((re.search('name="(.*?)"', str(i)).group(1), re.search('value="(.*?)"', str(i)).group(1)))
    locations = ['南院', '安宁校区', '北院', '呈贡校区']
    course = []
    for j, i in enumerate(code[1:]):
        parameter[5] = ('kbjcmsid', i)
        html = s.post('http://xjwis.ynufe.edu.cn/jsxsd/xskb/xskb_list.do', data=parameter).text
        course += analysis_table(html, locations[j])
    return course


def get_time():
    return {'big': [{'title': '第一大节', 's': '08:00', 'e': '09:30', 'small': [1, 2]},
                    {'title': '第二大节', 's': '09:50', 'e': '12:10', 'small': [3, 4, 5]},
                    {'title': '第三大节', 's': '14:30', 'e': '16:00', 'small': [6, 7]},
                    {'title': '第四大节', 's': '16:20', 'e': '17:50', 'small': [8, 9]},
                    {'title': '第五大节', 's': '18:00', 'e': '18:50', 'small': [10]},
                    {'title': '第六大节', 's': '19:00', 'e': '20:30', 'small': [11, 12]},
                    {'title': '第七大节', 's': '20:50', 'e': '22:20', 'small': [13, 14, 15]}],
            'small': [{'title': 1, 's': '08:00', 'e': '08:40'},
                      {'title': 2, 's': '08:50', 'e': '09:30'},
                      {'title': 3, 's': '09:50', 'e': '10:30'},
                      {'title': 4, 's': '10:40', 'e': '11:20'},
                      {'title': 5, 's': '11:30', 'e': '12:10'},
                      {'title': 6, 's': '14:30', 'e': '15:10'},
                      {'title': 7, 's': '15:20', 'e': '16:00'},
                      {'title': 8, 's': '16:20', 'e': '17:00'},
                      {'title': 9, 's': '17:10', 'e': '17:50'},
                      {'title': 10, 's': '18:00', 'e': '18:50'},
                      {'title': 11, 's': '19:00', 'e': '19:40'},
                      {'title': 12, 's': '19:50', 'e': '20:30'},
                      {'title': 13, 's': '20:50', 'e': '21:30'},
                      {'title': 14, 's': '21:40', 'e': '22:20'},
                      {'title': 15, 's': '21:40', 'e': '22:20'}]}


def login(account, password):
    s = requests.session()
    s.headers.update({
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
        'User-Agent': 'Mozilla/5.0(WindowsNT10.0;Win64;x64)AppleWebKit/537.36(KHTML,likeGecko)Chrome/80.0.3987.132Safari/537.36'})
    s.get('http://xjwis.ynufe.edu.cn/jsxsd/')
    html = s.post('http://xjwis.ynufe.edu.cn/jsxsd/xk/LoginToXk', data=
    {'userAccount': account, 'userPassword': '', 'encoded': encode_inp(account) + '%%%' + encode_inp(password)}).text
    return not BeautifulSoup(html, features='html.parser').find('title').text == '登录', s


def main():
    success, s = login('201805002590', '@Shy2206034346')
    all_data = {'success': success}
    if success:
        info, week = get_user_info_and_week(s)
        t = time.localtime(time.time())
        all_data = {'success': success,
                    'date': '{}-{}-{}'.format(t[0], str(t[1]).zfill(2), str(t[2]).zfill(2)),
                    'week': week,
                    'user_info': info,
                    'exams': get_exams(s),
                    'scores': get_scores(s),
                    'course': get_course(s),
                    'time_data': get_time()}
    print(json.dumps(all_data).encode('utf-8').decode('unicode_escape'))


if __name__ == "__main__":
    main()
