# user/views.py
import re
import os
from datetime import datetime
from django.db.models import Q,Count,Sum,F
from django.conf import settings
from django.contrib.auth import authenticate,login,logout
from django.contrib.auth.hashers import make_password
from django.contrib.auth.backends import ModelBackend

from drf_yasg import openapi
from drf_yasg.utils import swagger_auto_schema
from rest_framework import viewsets
from rest_framework import serializers
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from rest_framework.permissions import DjangoModelPermissions
from django_filters.rest_framework import DjangoFilterBackend

from .models import Crawler, MonitoringGoods, MonitoringGoodsPriceHistory
from .filter import CrawlerByKeyboardFilter, MonitoringByKeyboardFilter
from .serializer import CrawlerProfileSerializer, CheckGoodsByMonitoringSerializer, \
	AddGoodsToMonitoringSerializer, GoodsPriceHistorySerializer
from utils.pagination import CustomPageNumberPagination
from utils.generics import ListAPIView, CreateAPIView, UpdateAPIView
# 通用序列化
class CrawlerResponseSerializer(serializers.Serializer):
	status = serializers.CharField()
	msg = serializers.CharField()

# 通用序列化
class StoreDataSerializer(serializers.Serializer):
	status = serializers.CharField()
	results = serializers.ListField()

# 爬取数据接口
class CrawlerDataView(APIView):

	params = openapi.Schema(type=openapi.TYPE_OBJECT, required=['username','pwd1','pwd2'], properties={
		'keyboard': openapi.Schema(type=openapi.TYPE_STRING, description='关键字', examples='电脑'),
		'pages': openapi.Schema(type=openapi.TYPE_STRING, description='页数', examples='10'),
		})

	'''启动爬虫爬取'''
	@swagger_auto_schema(
		title='modify_pwd',
		operation_description='根据关键字进行爬取数据',
		responses={200: CrawlerResponseSerializer},
		request_body=params)
	def post(self, request):
		data = request.data
		
		keyboard = data.get('keyboard', '')
		pages = data.get('pages', '')
		if not keyboard or not pages:
			return Response({'status':'error', 'msg': '信息不全！'})
		# 换成相对地址
		path = os.path.abspath('./apps/utils/crawler.py')
		os.system("python "+path+" "+keyboard+" "+pages)
		return Response({'status':'ok', 'msg': '爬虫启动成功'})

# 获取全部的爬虫所得数据
class GetCrawlerProfileListView(ListAPIView, GenericViewSet):

	"""获取所有爬虫数据"""
	queryset = Crawler.objects.all()
	serializer_class = CrawlerProfileSerializer

# 根据关键词进行筛选
class GetGetCrawlerProfileByKeyboardView(ListAPIView, GenericViewSet):

	"""根据关键词筛选爬虫数据"""
	queryset = Crawler.objects.all()
	serializer_class = CrawlerProfileSerializer
	filter_backends = (DjangoFilterBackend,)
	filter_class = CrawlerByKeyboardFilter

# 判断商品是否被监控
class CheckGoodsByMonitoringView(ListAPIView, GenericViewSet):

	"""判断商品是否被监控"""
	queryset = MonitoringGoods.objects.all()
	serializer_class = CheckGoodsByMonitoringSerializer
	filter_backends = (DjangoFilterBackend,)
	filterset_fields = ('crawler_id',)

# 将商品添加到监控
class AddGoodsToMonitoringView(ListAPIView, CreateAPIView, GenericViewSet):

	"""post:将商品添加到监控 get:获取所有的监控商品"""
	queryset = MonitoringGoods.objects.all()
	serializer_class = AddGoodsToMonitoringSerializer
	filter_backends = (DjangoFilterBackend,)
	filter_class = MonitoringByKeyboardFilter

# 根据根据商品id查询出价格趋势
class MonitoringGoodsPriceView(ListAPIView, GenericViewSet):

	"""根据根据商品id查询出价格趋势"""
	queryset = MonitoringGoodsPriceHistory.objects.all()
	serializer_class = GoodsPriceHistorySerializer
	filter_backends = (DjangoFilterBackend,)
	filterset_fields = ('monitoring_goods_id',)

# 店铺分析
class StoreDataView(APIView):
	params = openapi.Schema(type=openapi.TYPE_OBJECT, required=['username','pwd1','pwd2'], properties={
		'keyboard': openapi.Schema(type=openapi.TYPE_STRING, description='关键字', examples='电脑'),
		})

	'''查询店铺信息'''
	@swagger_auto_schema(
		title='modify_pwd',
		operation_description='查询店铺信息',
		responses={200: StoreDataSerializer},
		request_body=params)
	def post(self, request):
		data = request.data
		keyboard = data.get('keyboard', '')
		if keyboard:
			shops = Crawler.objects.filter(keyboard=keyboard).values('shop').distinct()
		else:
			shops = Crawler.objects.values('shop').distinct()
		count = []
		for index,shop in enumerate(shops):
			search_dict = dict()
			search_dict['shop'] = shop['shop']
			if keyboard:
				search_dict['keyboard'] = keyboard
			commit = Crawler.objects.filter(**search_dict).aggregate(Sum('commit'),total = Sum(F('commit') * F('price')))
			search_dict['id'] = index+1
			search_dict['commit'] = commit['commit__sum']
			search_dict['total'] = commit['total']
			count.append(search_dict)
		return Response({'status':'ok', 'results': count})