from django.shortcuts import render

# Create your views here.
from django.http import HttpResponse
import json
import requests
import re
from bs4 import BeautifulSoup
import time
import pymysql
import json


def mysqlConnect():
    # conn = pymysql.connect(host="rm-uf68c81y9b94t4n18.mysql.rds.aliyuncs.com", port=3306, user="qfclou", passwd="$qf@3_1415!!", db="group_boundpu",
    #                        charset="utf8")
    conn = pymysql.connect(host="127.0.0.1", port=3306, user="root",
                           passwd="", db="ziyuanmenhu",
                           charset="utf8")
    cur = conn.cursor()

    return conn, cur

#插入数据
def runInsertIntoSql(sql,data):

    conn, cur = mysqlConnect()
    cur.execute(sql,data)
    conn.commit()
    cur.close()
    conn.close()

#
def sqlSelect(sql,data):
    conn, cur = mysqlConnect()
    cur.execute(sql,data)
    rows = cur.fetchall()
    cur.close()
    conn.close()
    return rows


def buildingParameters(data):
    # SU = 主题, TI = 题名, KY = 关键词, AB = 摘要, FT = 全文, AU = 作者, FI = 第一责任人, AF = 机构, JN = 文献来源, RF = 被引文献, YE = 年, FU = 基金, CLC = 中图分类号, SN = ISSN, CN = 统一刊号, IB = ISBN, CF = 被引频次
    #为了拼接方便后面要去掉
    data["log"].append("###")

    tempStr = []
    for i in range(len(data["values"])):
        strr = data["type"][i] + "=" + "'" + data["values"][i] + "'" + " {} ".format(data["log"][i])
        tempStr.append(strr)

    strr = ''
    for value in tempStr:
        strr += value

    return strr.strip().strip("#").strip()
    # exit()

def updateCookie(cookies,tempCookies):

    for k, v in tempCookies.items():
        cookies[k] = v

    return cookies

def searchData(strr,startTime,endTime):
    return   {
      '$action': '',
      'NaviCode': '*',
      'ua': '1.21',
      'isinEn': '1',
      'PageName': 'ASP.brief_result_aspx',
      'DbPrefix': 'SCDB',
      'DbCatalog': '\u4E2D\u56FD\u5B66\u672F\u6587\u732E\u7F51\u7EDC\u51FA\u7248\u603B\u5E93',
      'ConfigFile': 'SCDB.xml',
      'db_opt': 'CJFQ,CDFD,CMFD,CPFD,IPFD,CCND,CCJD',
      'expertvalue': strr,
      'publishdate_from': startTime,
      'publishdate_to': endTime,
      'his': '0',
      '__': 'Tue Dec 11 2018 18:02:51 GMT+0800 (\u4E2D\u56FD\u6807\u51C6\u65F6\u95F4)'
    }

def resultData():
    data = {
        "websiteType" : 0,
        "titleName":"",
        "titleHref": "",
        "source":"",
        "label":"",
        "volume":"",
        "summary":"",
        "detailId":'',
        "publishTime" :"",
        "category" : "",
        "quote" :"",
        "abstract": "",
        "doi": "",
        "downLoad": "",
        "keyword": [],
        "author": [],

    }
    return data

def joinC(strr,startTime,endTime):
    data = searchData(strr,startTime,endTime)
    # for key,value in webData.items():
    #     tempT = []
    #     tempT.append(key)
    #     tempT.append(value)
    #     data.append(tuple(tempT))
    return data

#更新指定用户网站数据库cookies
def updateCookiesToMysql(uid,webId,cookies):

    # print(json.loads(str((json.dumps(cookies)))))
    # exit()

    sql = "select count(1) as num from cookies where uid=%s and webId=%s"
    data = sqlSelect(sql, [uid, webId])
    if data[0][0] == 0:
        sql = '''insert into cookies(uid,cookies,webId,createTime) values(%s,%s,%s,%s)'''
        runInsertIntoSql(sql, [uid, str(json.dumps(cookies)), webId, int(time.time())])
    else:
        sql = '''update cookies set cookies=%s where uid=%s and webId=%s'''
        runInsertIntoSql(sql, [str(json.dumps(cookies)), uid, webId])

def getCookiesFromMysql(uid,webId):
    sql = "select cookies from cookies where uid=%s and webId=%s"
    data = sqlSelect(sql, [uid, webId])

    return json.loads(data[0][0])

def  index(request):

    data = json.loads(request.POST.get('data', '1'))
    page = 1

    if page == 1:

        cookies,url = getOneCookies(data["values"][0])
        # return HttpResponse([cookies], content_type="application/json")

        tempCookies, allData,allNum = getDataSearch(cookies,url)

    return HttpResponse(json.dumps({
        "collection": allData,
        "allNum": 0
    }), content_type="application/json")


def getOneCookies(keyword):
    headers = {
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
        'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36',
        'Accept': '*/*',
        'Referer': 'http://acad.cnki.net/KNS/brief/result.aspx?dbprefix=CJFQ',
        'Connection': 'keep-alive',
    }

    params = (
        ('action', ''),
        ('NaviCode', '*'),
        ('ua', '1.21'),
        ('PageName', 'ASP.brief_result_aspx'),
        ('DbPrefix', 'CJFQ'),
        ('DbCatalog', '\u4E2D\u56FD\u5B66\u672F\u671F\u520A\u7F51\u7EDC\u51FA\u7248\u603B\u5E93'),
        ('ConfigFile', 'CJFQ.xml'),
        ('db_opt', '\u4E2D\u56FD\u5B66\u672F\u671F\u520A\u7F51\u7EDC\u51FA\u7248\u603B\u5E93'),
        ('db_value', '\u4E2D\u56FD\u5B66\u672F\u671F\u520A\u7F51\u7EDC\u51FA\u7248\u603B\u5E93'),
        ('year_type', 'echar'),
        ('txt_1_sel', 'SU'),
        ('txt_1_value1', keyword),
        ('txt_1_relation', '#CNKI_AND'),
        ('txt_1_special1', '='),
        ('his', '0'),
        ('__', 'Mon Jan 28 2019 11:33:45 GMT+0800 (\u4E2D\u56FD\u6807\u51C6\u65F6\u95F4)'),
    )

    s = requests.session()

    response = s.get('http://acad.cnki.net/KNS/request/SearchHandler.ashx', headers=headers, params=params)

    c = requests.cookies.RequestsCookieJar()  # 利用RequestsCookieJar获取

    cookies = s.cookies.get_dict()
    url = response.text

    return cookies, url

def getDataSearch(cookies,apsUrl):
    t = int(time.time() * 1000)

    headers = {
        'Connection': 'keep-alive',
        'Upgrade-Insecure-Requests': '1',
        'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36',
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
        'Referer': 'http://acad.cnki.net/KNS/brief/result.aspx?dbprefix=CJFQ',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
    }

    url = "http://acad.cnki.net/KNS/brief/brief.aspx?pagename=" + apsUrl + "&t={}&keyValue=&S=1&sorttype=&DisplayMode=custommode".format(t)

    s = requests.session()

    response = s.get(url, headers=headers, cookies=cookies)


    c = requests.cookies.RequestsCookieJar()  # 利用RequestsCookieJar获取

    cookies = s.cookies.get_dict()

    resultData, allNum= getDataZhiWangSearch(response)


    return cookies, resultData, allNum

def getDataZhiWangSearch(response):

    allData = []
    # 获取sid并且更新返回
    soup = BeautifulSoup(response.text, "html5lib")


    trs = soup.select(".GridContent > ul > li")

    page_number1 = soup.select(".pagerTitleCell")[0].text
    page_number2 = re.sub('[^0-9]', ' ', page_number1).strip().replace(" ",'')

    for tr in trs:

        tempData = resultData()

        tempData["websiteType"] = 1

        try:
            titleName = tr.select(".GridTitleDiv")[0].select("a")[0].text
            tempData["titleName"] = re.findall("ReplaceJiankuohao\('(.*?)'\)\)\)\)", titleName)[0]
        except:
            pass
        try:
            tempData["titleHref"] = tr.select(".GridTitleDiv")[0].select("a")[0]["href"]
        except:
            pass

        try:

            tempData["source"] =  tr.select(".fontgreen > a")[-2].text
        except:
            pass
        try:
            tempData["publishTime"] = tr.select(".fontgreen > a")[-1].text
        except:
            pass

        try:
            tempData["downLoad"] = tr.select(".GridContentDiv > div")[-1].select("label")[0].text
        except:
            pass
        try:
            tempData["author"] = getAuthorZhiWang(tr.select(".fontgreen > a"))

        except:
            pass


        allData.append(tempData)


    return allData,page_number2


def getAuthorZhiWang(authors):
    allData = []
    for author in authors:
        allData.append(author.text)


    return allData


def getDataSearchZhiwangPage(cookies,url):
    headers = {
        'Connection': 'keep-alive',
        'Upgrade-Insecure-Requests': '1',
        'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36',
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
        'Referer': 'http://kns.cnki.net/kns/brief/brief.aspx?pagename=ASP.brief_result_aspx&isinEn=1&dbPrefix=SCDB&dbCatalog=%e4%b8%ad%e5%9b%bd%e5%ad%a6%e6%9c%af%e6%96%87%e7%8c%ae%e7%bd%91%e7%bb%9c%e5%87%ba%e7%89%88%e6%80%bb%e5%ba%93&ConfigFile=SCDB.xml&research=off&t=1544522694073&keyValue=&S=1&sorttype=',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
    }
    s = requests.session()

    response = s.get(url, headers=headers,cookies=cookies)

    # return response.text,1,1

    c = requests.cookies.RequestsCookieJar()  # 利用RequestsCookieJar获取

    cookies = s.cookies.get_dict()

    allData,allNum,queryId = getDataZhiWangSearch(response)

