from rest_framework.views import  APIView
from rest_framework.response import Response
from django.core.paginator import Paginator

import requests
from bs4 import BeautifulSoup

from .models import CartHome
from elasticsearch import Elasticsearch, helpers
from .serializer import CartHomeModelSerializer
import redis

# 连接redis
redis_rank = redis.Redis(db=9)

es = Elasticsearch()

"""
安装：
    pip install elasticsearch
"""


url_list = ['https://www.autohome.com.cn/news/{}/#liststart'.format(i) for i in range(2, 300)]

headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.111 Safari/537.36'
}


class AcquireNewsView(APIView):
    """
    获取汽车之家的新闻
    """
    def get(self, request):
        # 循环url列表
        for url in url_list:
            # 使用requests模块进行模拟访问
            result = requests.get(url=url, headers=headers)
            # 继承网页当中的编码格式
            result.encoding = result.apparent_encoding
            # 实例化bs4
            html_obj = BeautifulSoup(result.text, features='lxml')
            # 获取li标签列表数据
            li_list = html_obj.find('ul', {'id': 'Ul1'}).find_all('li')
            print(li_list)
            # 循环li标签列表
            cart_home = []
            for li in li_list:
                cart_home.append(CartHome(
                    title=li.find('h3').text,
                    href=li.find('a')['href'],
                    desc=li.find('p').text
                ))
            CartHome.objects.bulk_create(cart_home)
        return Response({'msg': 'OK'})

# 把数据写入es中
class WriteDataEs(APIView):
    """
    把数据库数据写入es中
    """

    def get(self, request):
        cart_list = CartHome.objects.all()
        action = ({
            '_index': 'carthome',
            '_type': 'doc',
            '_source': {
                'title': i.title,
                'href': i.href,
                'desc': i.desc,
            }
        } for i in cart_list)
        helpers.bulk(es, action)

        return Response({'msg': 'OK', 'code': 200})


# 使用es进行索引
class SearchView(APIView):
    """
    搜索
    """
    def post(self, request):
        search = request.POST.get('search')
        body = {
            'size': 20,
            'query': {
                'match': {
                    'title': search
                }
            },
            'highlight': {
                'pre_tags': '<b style="color: red">',
                'post_tags': '</b>',
                'fields': {
                    'title': {}
                }
            }
        }
        res = es.search(index='carthome', body=body)
        return Response({'msg': 'OK', 'code': 200, 'data': res})


class ShowNewsPage(APIView):
    """
    分页展示新闻
    """
    def get(self, request):
        """
        current_page, 每页几条数据
            num_pages 获取总页数
            page_range 页码列表

            page(current_page)  当前页的内容
        """
        # 获取页码
        current_page = request.query_params.get('current_page')
        # 查询所有内容
        cart_list = CartHome.objects.all()
        # 实例化分页器
        paginotor = Paginator(cart_list, 10)
        # 获取当前内容
        paged = paginotor.page(current_page)  # page() 获取到的是列表，哪怕只有一条数据
        # 序列化
        cart_serializer = CartHomeModelSerializer(paged, many=True)
        # 返回数据
        return Response({
            'data': cart_serializer.data,
            'total': cart_list.count()
        })


class NewsRankView(APIView):
    """
    redis当中有序集合  （集合： 去重，有序： 排序）
    """
    def get(self, request):
        news_id = request.query_params.get('news_id')
        redis_rank.zincrby('news_rank', 1, news_id)
        return Response({'msg': '点击量加1', 'code': 200})


class RankView(APIView):

    def get(self, request):
        news_list = redis_rank.zrange('news_rank', 0, -1, desc=True, withscores=True)
        news_data = []
        for i in news_list:
            k, v = i
            cart_obj = CartHome.objects.get(pk=k.decode())
            news_data.append({
                'title': cart_obj.title,
                'counter': v
            })


        return Response(news_data)