from django.http import HttpResponse
import json
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlencode


proxies = {
        "http": "http://qf:qf3.1415@202.112.118.7:10086/"
    }

#返回结果格式
def resultData():
    data = {
        "websiteType" : 0,
        "titleName":"",
        "titleHref": "",
        "source":"",
        "label":"",
        "volume":"",
        "summary":"",
        "detailId":'',
        "publishTime" :"",
        "category" : "",
        "quote" :"",
        "abstract": "",
        "doi": "",
        "downLoad": "",
        "keyword": [],
        "author": [],

    }
    return data

def formData(response):
    allData = []
    # 获取sid并且更新返回
    soup = BeautifulSoup(response.text, "html5lib")


    lis = soup.select("form tbody > tr")


    for li in lis:
        tempData = resultData()

        try:
            tempData["titleName"] = li.select("td")[3].select('a')[0].text
        except:
            pass
        try:
            tempData["titleHref"] = "http://www.infobank.cn" + li.select("td")[3].select('a')[0]["href"]
        except:
            pass
        try:
            tempData["publishTime"] = li.select("td")[2].text
        except:
            pass
        allData.append(tempData)

    return allData

def getData(keyword):

    # keyword = keyword.encode('gb2312')
    # keyword = urllib.parse.quote(keyword)

    headers = {
        "Content-Type": "application/x-www-form-urlencoded",
        "Host":"www.infobank.cn",
        "Content-Length": "72",
        "Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
        "Accept-Encoding": "gzip, deflate",
        "Accept-Language":"zh-CN,zh;q=0.9,en;q=0.8",
        "User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.109 Safari/537.36"

    }


    data = {
        'c1': '',
        'rl': '*',
        'c2': '',
        'c3': '',
        'db': 'HS',
        'fd': '@@',
        'ns': '50',
        'iw': keyword,
        'st': '20000223',
        'et': '20190222'
    }

    data_gb2312 = urlencode(data, encoding='gb2312')

    response = requests.post('http://www.infobank.cn/IrisBin/search.dll?SpSearch', headers=headers, data=data_gb2312)

    allData = formData(response)

    return allData


def index(request):

    data = json.loads(request.POST.get('data', '1'))

    allData = getData(data["values"][0])

    return HttpResponse(json.dumps({
        "collection": allData,
        "allNum": 0
    }), content_type="application/json")