# user/views.py
import re
import os
from datetime import datetime
from django.db.models import Q,Count,Sum,F,Avg
from django.conf import settings
from django.contrib.auth import authenticate,login,logout
from django.contrib.auth.hashers import make_password
from django.contrib.auth.backends import ModelBackend
from xpinyin import Pinyin

from drf_yasg import openapi
from drf_yasg.utils import swagger_auto_schema
from rest_framework import viewsets
from rest_framework import serializers
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from rest_framework.filters import  OrderingFilter
from rest_framework.permissions import DjangoModelPermissions
from django_filters.rest_framework import DjangoFilterBackend
from utils.pagination import CustomPageNumberPagination
from utils.generics import ListAPIView, CreateAPIView, UpdateAPIView

from .models import City, CityData
from .serializer import CitySerializer,CityDataSerializer
from .filter import CityFilter, CityDataFilter
# 通用序列化
class CrawlerResponseSerializer(serializers.Serializer):
	status = serializers.CharField()
	msg = serializers.CharField()

# 通用序列化
class StoreDataSerializer(serializers.Serializer):
	status = serializers.CharField()
	results = serializers.ListField()

# 爬取数据接口
class CrawlerDataView(APIView):

	params = openapi.Schema(type=openapi.TYPE_OBJECT, required=['username','pwd1','pwd2'], properties={
		'area': openapi.Schema(type=openapi.TYPE_STRING, description='地区', examples='上海'),
		'year': openapi.Schema(type=openapi.TYPE_STRING, description='年份', examples='2021'),
		})

	'''启动爬虫爬取'''
	@swagger_auto_schema(
		title='modify_pwd',
		operation_description='根据地区和年份进行爬取数据',
		responses={200: CrawlerResponseSerializer},
		request_body=params)
	def post(self, request):
		data = request.data
		
		area = data.get('area', '')
		year = data.get('year', '')
		if not area or not year:
			return Response({'status':'error', 'msg': '信息不全！'})
		p = Pinyin()
		city = City.objects.filter(name=area,year=year).first()
		if city is not None:
			return Response({'status':'error', 'msg': '已爬取该城市该年份数据！'})
		city = City(name = area,year = year)
		city.save()
		# 换成相对地址
		path = os.path.abspath('./apps/utils/crawler.py')
		os.system("python "+path+" "+p.get_pinyin(area, '')+" "+year+" "+str(city.id))
		Data = CityData.objects.filter(city=city).aggregate(avg_top_temperature=Avg("top_temperature"),avg_low_temperature=Avg("low_temperature"),avg_temperature=Avg(F('top_temperature')+F('low_temperature')))
		city.avg_top_temperature = Data['avg_top_temperature']
		city.avg_low_temperature = Data['avg_low_temperature']
		city.avg_temperature = Data['avg_temperature']
		city.save()
		return Response({'status':'ok', 'msg': '爬虫启动成功'})

class GetCityByCityOrYearView(ListAPIView, GenericViewSet):

	"""条件查询城市信息"""
	queryset = City.objects.all()
	serializer_class = CitySerializer
	filter_backends = (DjangoFilterBackend,)
	filter_class = CityFilter

class GetCityDataByCityOrYearView(ListAPIView, GenericViewSet):

	"""条件查询、升降序城市数据信息"""
	queryset = CityData.objects.all()
	serializer_class = CityDataSerializer
	filter_backends = [DjangoFilterBackend,OrderingFilter]
	filter_class = CityDataFilter
	ordering_fields = ('id','publish_time','top_temperature','low_temperature')

# 分析天气状况接口
class WeatherStatusView(APIView):

	params = openapi.Schema(type=openapi.TYPE_OBJECT, required=['username','pwd1','pwd2'], properties={
		'publish_time': openapi.Schema(type=openapi.TYPE_STRING, description='日期', examples='2019-01-01'),
		'week': openapi.Schema(type=openapi.TYPE_STRING, description='星期', examples='星期一'),
		'weather': openapi.Schema(type=openapi.TYPE_STRING, description='天气状况', examples='小雨'),
		'city__name': openapi.Schema(type=openapi.TYPE_STRING, description='城市名称', examples='上海'),
		})

	'''分析天气状况接口'''
	@swagger_auto_schema(
		title='modify_pwd',
		operation_description='多条件筛选天气状况',
		responses={200: StoreDataSerializer},
		request_body=params)
	def post(self, request):
		data = request.data
		publish_time = data.get('publish_time', '')
		week = data.get('week', '')
		weather = data.get('weather', '')
		city__name = data.get('city__name', '')
		search_dict = dict()
		if publish_time:
			search_dict['publish_time__icontains'] = publish_time
		if week:
			search_dict['week'] = week
		if weather:
			search_dict['weather__icontains'] = weather
		if city__name:
			search_dict['city__name'] = city__name
		weathers = CityData.objects.filter(**search_dict).values('weather').distinct()
		count = []
		for index,weather in enumerate(weathers):
			series = {}
			series['weather'] = weather['weather']
			search_dict['weather'] = weather['weather']
			series['count'] = CityData.objects.filter(**search_dict).count()
			count.append(series)
		return Response({'status':'ok', 'results': count})

# 删除城市接口
class DeleteCityView(APIView):

	params = openapi.Schema(type=openapi.TYPE_OBJECT, required=['username','pwd1','pwd2'], properties={
		'id': openapi.Schema(type=openapi.TYPE_STRING, description='城市Id', examples='1'),
		})

	'''删除用户'''
	@swagger_auto_schema(
		title='delete_user',
		operation_description='删除城市',
		responses={200: CrawlerResponseSerializer},
		request_body=params)
	def delete(self, request):
		data = request.data
		id = data.get('id', '')
		try:
			city = City.objects.get(id=id)
		except City.DoesNotExist:
			return Response({'status':'error', 'msg': '该城市不存在！'})
		CityData.objects.filter(city__id=id).delete()
		City.objects.filter(id=id).delete()
		return Response({'status':'ok', 'msg': '删除成功！'})

# 分析风向接口
class WindStatusView(APIView):

	params = openapi.Schema(type=openapi.TYPE_OBJECT, required=['username','pwd1','pwd2'], properties={
		'publish_time': openapi.Schema(type=openapi.TYPE_STRING, description='日期', examples='2019-01-01'),
		'week': openapi.Schema(type=openapi.TYPE_STRING, description='星期', examples='星期一'),
		'weather': openapi.Schema(type=openapi.TYPE_STRING, description='天气状况', examples='小雨'),
		'city__name': openapi.Schema(type=openapi.TYPE_STRING, description='城市名称', examples='上海'),
		})

	'''分析风向状况接口'''
	@swagger_auto_schema(
		title='modify_pwd',
		operation_description='多条件筛选天气状况',
		responses={200: StoreDataSerializer},
		request_body=params)
	def post(self, request):
		data = request.data
		publish_time = data.get('publish_time', '')
		week = data.get('week', '')
		weather = data.get('weather', '')
		city__name = data.get('city__name', '')
		search_dict = dict()
		if publish_time:
			search_dict['publish_time__icontains'] = publish_time
		if week:
			search_dict['week'] = week
		if weather:
			search_dict['weather__icontains'] = weather
		if city__name:
			search_dict['city__name'] = city__name
		wind_directions = ['东风','西风','南风','北风','东北风','西北风','东南风','西南风']
		count = []
		for index,wind_direction in enumerate(wind_directions):
			series = {}
			series['wind_direction'] = wind_direction
			search_dict['wind_direction__icontains'] = wind_direction
			series['count'] = CityData.objects.filter(**search_dict).count()
			count.append(series)
		return Response({'status':'ok', 'results': count})