import json

import logging
import requests
from django.shortcuts import render

# Create your views here.
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from searchApi.config import webSiteInfo
# from demo.models import User
# from demo.serializer import UserSerializer


# @api_view(['POST'])
# def searchAction(request):
#     text=request.data['text']
#     text = '高考'
#     headers = {'Content-Type': 'application/x-www-form-urlencoded', 'Origin': 'http://www.zvn360.com',
#                'Connection': 'keep-alive',
#                'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'}
#     datas = {'action': 'query', 'OutputEncoding': 'UTF8', 'Responseformat': 'json', 'TotalResults': 'true',
#              'text': text, 'start': 1, 'maxResults': 6}
#     url = "http://www.zvn360.com:31801/service/bd/post"
#     response = requests.post(url, data=datas, headers=headers)
#     if (response.status_code == 200):
#         resJson = json.loads(response.text)
#     else:
#         resJson={'code':response.status_code,'msg':'获取数据失败'}
#     return Response(resJson, status=status.HTTP_200_OK)
# 获取需要爬取的网页
@api_view(['GET'])
def getScrapyUrls(request):
    data=[]
    scrapyObj=webSiteInfo.scrapyObj
    for obj in scrapyObj:
        data.extend(obj['url'])
    return Response(data, status=status.HTTP_200_OK)
# 获取网页的子网页的标记@url
@api_view(['GET'])
def getPageFlagByUrl(request):
    url = request.GET.get('url')
    data={'pageFlag':None}
    response = {'code': -1, 'data': data}
    for obj in webSiteInfo.scrapyObj:
        if url==obj['originUrl']:
            data['pageFlag']=obj['pageFlag']
            response['code']=0
            break
    return Response(response, status=status.HTTP_200_OK)
# 获取对应的网页信息@originUrl
@api_view(['GET'])
def getItemInfoByOriginUrl(request):
    curOriginUrl=request.GET.get('originUrl')
    response={'code':-1,
              'data':{'flag':None,'type':None,
                      'group':None ,'originName':None,
                      'originName':None,'path':None}
              }
    for obj in webSiteInfo.scrapyObj:
        if curOriginUrl==obj['originUrl']:
            response['data']['originUrl']=obj['originUrl']
            response['data']['originName']=obj['originName']
            response['data']['pageFlag']=obj['pageFlag']
            response['data']['path']=obj['path']
            response['data']['originName']=obj['originName']
            response['data']['url']=obj['url']
            response['data']['type']=obj['type']
            response['data']['group']=obj['group']
            response['code']=0
            break
    return Response(response,status=status.HTTP_200_OK)

@api_view(['GET'])
def getWebSiteInfoAction(request):
    data=webSiteInfo.scrapyObj
    obj={'code':0,'data':data,'total':len(data)}
    return Response(obj,status=status.HTTP_200_OK)