from django.shortcuts import render

# Create your views here.
from django.http import HttpResponse
import json
import requests
import time
from bs4 import BeautifulSoup
import time
import re
import urllib


#返回结果格式
def resultData():
    data = {
        "websiteType" : 0,
        "titleName":"",
        "titleHref": "",
        "source":"",
        "label":"",
        "volume":"",
        "summary":"",
        "detailId":'',
        "publishTime" :"",
        "category" : "",
        "quote" :"",
        "abstract": "",
        "doi": "",
        "downLoad": "",
        "keyword": [],
        "author": [],
    }
    return data
    
def formData(response):
    allData = []
    # 获取sid并且更新返回
    soup = BeautifulSoup(response.text, "html5lib")

    allNum = soup.select("#ContentColumn .sr-statistics")[0].text.split('of')[1].split('results for')[0].replace(' ','')

    lis = soup.select("#ContentColumn .sr-list")
    for li in lis:
        tempData = resultData()
        try:
            tempData["titleName"] = li.select(".sri-title a")[0].text.strip()
        except:
            pass
        try:
            tempData["titleHref"] = "https://academic.oup.com" + li.select(".sri-title a")[0]["href"]
        except:
            pass
        try:
            tempData["author"] = [d.text.strip() for d in li.select(".sri-authors span a")]
        except:
            pass
        try:
            tempData["source"] = li.select(".sri-authors").next_sibling.find_all('a').text.strip()
        except:
            pass
        try:
            tempData["abstract"] = li.select('.snippet')[0].text
        except:
            pass

        allData.append(tempData)


    return allData, allNum

def getData(keyword):

    headers = {
        'Connection': 'keep-alive',
        'Upgrade-Insecure-Requests': '1',
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36',
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
        'Referer': 'https://academic.oup.com/journals/',
        'Accept-Encoding': 'gzip, deflate, br',
        'Accept-Language': 'zh-CN,zh;q=0.9,en-US;q=0.8,en-CA;q=0.7,en;q=0.6',
    }

    params = (
        ('page', '1'),
        ('q', keyword),
        ('fl_SiteID', '5567'),
        ('SearchSourceType', '1'),
        ('allJournals', '1'),
    )

    proxies = {
        "https": "https://qf:qf3.1415@202.112.118.7:10086/"
    }

    response = requests.get('https://academic.oup.com/journals/search-results', headers=headers, params=params, proxies=proxies)

    allData, allNum = formData(response)

    return allData, allNum


def index(request):

    # page = 1
    data = json.loads(request.POST.get('data', '1'))

    allData, allNum = getData(data["values"][0])

    return HttpResponse(json.dumps({
        "collection": allData,
        "allNum": 0
    }), content_type="application/json")