# from django.shortcuts import render

# Create your views here.
# from django.http import HttpResponse
import json
import requests
import time
from bs4 import BeautifulSoup
import time
import re
import urllib


#返回结果格式
def resultData():
    data = {
        "websiteType" : 0,
        "titleName":"",
        "titleHref": "",
        "source":"",
        "label":"",
        "volume":"",
        "summary":"",
        "detailId":'',
        "publishTime" :"",
        "category" : "",
        "quote" :"",
        "abstract": "",
        "doi": "",
        "downLoad": "",
        "keyword": [],
        "author": [],

    }
    return data

def formData(response):
    allData = []
    # 获取sid并且更新返回
    soup = BeautifulSoup(response.text, "html5lib")

    lis = soup.select("#searchContent")[0].select('div')

    for li in lis:
        tempData = resultData()

        try:
            tempData["titleName"] = li.select("h2 a")[0].text.strip()
        except:
            pass
        try:
            tempData["titleHref"] = "http:"+li.select("h2 a")[0]["href"]
        except:
            pass
        try:
            tempData["author"] = li.select("h3")[0].text.strip()
        except:
            pass
        try:
            tempData["publishTime"] = li.select(".clearfix .left span")[0].select('dd')[0].text.strip()
        except:
            pass
        try:
            tempData["source"] = li.select(".clearfix .right span")[0].select('dd')[0].text.strip()
        except:
            pass
        try:
            tempData["doi"] = li.select(".clearfix .right span")[1].select('dd')[0].text.strip()
        except:
            pass
        try:
            tempData["abstract"] = li.select("p.abstract.short_abstract.context")[1].text.strip()[:-4]
        except:
            pass

        allData.append(tempData)


    return allData

def getData(keyword):

    headers = {
        'Accept-Encoding': 'gzip, deflate, br',
        'Accept-Language': 'zh-CN,zh;q=0.8',
        'Upgrade-Insecure-Requests': '1',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.104 Safari/537.36',
        'Accept': 'image/webp,image/apng,image/*,*/*;q=0.8',
        'Referer': 'http://www.universitypressscholarship.com/search?q=ax&searchBtn=Search&isQuickSearch=true',
        'Connection': 'keep-alive',
        'Origin': 'http://www.universitypressscholarship.com',
        'accept-encoding': 'gzip, deflate, br',
        'accept-language': 'zh-CN,zh;q=0.8',
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.104 Safari/537.36',
        'accept': 'image/webp,image/apng,image/*,*/*;q=0.8',
        'referer': 'http://www.universitypressscholarship.com/search?q=ax&searchBtn=Search&isQuickSearch=true',
        'authority': 'www.google-analytics.com',
        'If-None-Match': '4d6d8a276d3d80137d31d0f70bb59947',
        'If-Modified-Since': 'Wed, 16 Jan 2019 16:29:17 GMT',
        'Content-Type': 'application/x-www-form-urlencoded',
    }

    params = (
        ('q', 'ax'),
        ('searchBtn', 'Search'),
        ('isQuickSearch', 'true'),
    )

    response = requests.get('http://www.universitypressscholarship.com/search', headers=headers, params=params)

   
    allData = formData(response)

    return allData


def index():


    # page = 1
    data = json.loads(request.POST.get('data', '1'))

    allData = getData(data["values"][0])

    print(allData)
    # exit()




index()