import os
import shutil
from typing import Union
import uuid
from urllib.parse import unquote
from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
from django.http import FileResponse, JsonResponse,HttpResponse
from django.http.response import mimetypes
from django.views.generic import FormView, TemplateView
from django.views.generic.base import View
from django.views.generic.edit import SingleObjectMixin
from django.conf import settings
from pathlib import Path
from django.http import FileResponse
from django.urls import reverse_lazy

from lib.django.views import LoginRequiredJsonResponseMixin

from .models import File
from .lib.utils import store_permanent
import json

TEMPLATE_DIR = Path("app/media/")

def template_path(path: Union[Path, str]) -> str:
    return str(TEMPLATE_DIR / path)


def append_extension_from_filename(dest: Path, filename: str) -> Path:
    return dest.with_suffix(os.path.splitext(filename)[1])

class TemporaryFileUploadView(LoginRequiredJsonResponseMixin, View):
    """
    Docs: https://pqina.nl/filepond/docs/api/server/
    """
    UPLOAD_DIR = settings.MEDIA_TMP_UPLOAD_ROOT

    def post(self, request, *args, **kwargs):
        """Process file upload or chunk"""
        upload_length = request.META.get('HTTP_UPLOAD_LENGTH')
        is_chunked = upload_length is not None
        
        upload_custom_filename = request.META.get('HTTP_UPLOAD_CUSTOM_FILENAME', '')

        if is_chunked:
            return self._handle_chunked_initial_upload(int(upload_length), upload_custom_filename)
        else:
            return self._handle_full_upload(request, upload_custom_filename)

    def _handle_chunked_initial_upload(self, upload_length: int, file_name: str):
        file_name = unquote(file_name)
        
        dir_id = self._generate_temp_dir_id()
        id = dir_id + "/" + file_name
        dest_path = self.UPLOAD_DIR / id
        os.makedirs(os.path.dirname(dest_path), exist_ok=True)

        # Create an empty file of the specified size
        with open(dest_path, 'wb') as f:
            f.seek(upload_length - 1)
            f.write(b'\0')

        return HttpResponse(id)

    def _handle_full_upload(self, request, file_name: str):
        """Handle regular file upload"""
        if len(request.FILES) != 1:
            return JsonResponse({'error': 'No file provided'}, status=400)
        file_name = unquote(file_name)
        
        _field_name, upload = list(request.FILES.items())[0]

        dir_id = self._generate_temp_dir_id()
        id = dir_id + "/" + file_name
        dest_path = self.UPLOAD_DIR / id
        os.makedirs(os.path.dirname(dest_path), exist_ok=True)

        with open(dest_path, 'wb') as f: 
            f.write(upload.read())

        return HttpResponse(id)

    def patch(self, request, *args, **kwargs):
        """Support for chunked upload"""
        id = self.kwargs.get('id')

        upload_length = request.META.get('HTTP_UPLOAD_LENGTH')
        upload_name = request.META.get('HTTP_UPLOAD_NAME')
        upload_offset = request.META.get('HTTP_UPLOAD_OFFSET')

        if not (upload_length and upload_name and upload_offset):
            return JsonResponse({'error': 'Missing headers!'}, status=400)

        dest_path = self.UPLOAD_DIR / id

        if not os.path.exists(dest_path):
            return JsonResponse({'error': 'File not found'}, status=404)

        try:
            offset = int(upload_offset)
            length = int(upload_length)

            # Validate offset
            if offset < 0 or offset >= length:
                return JsonResponse({'error': 'Invalid offset'}, status=400)

            # Validate chunk size
            chunk_size = len(request.body)
            if offset + chunk_size > length:
                return JsonResponse({'error': 'Chunk exceeds file size'}, status=400)

            with open(dest_path, 'r+b') as f:
                f.seek(offset)
                f.write(request.body)

            # return meaningful nonsense
            return JsonResponse({
                'offset': offset + chunk_size,
                'received': chunk_size
            })

        except (ValueError, IOError) as e:
            return JsonResponse({'error': str(e)}, status=400)

    def delete(self, request, *args, **kwargs):
        """Implement the `revert` functionality"""
        if not request.body:
            return JsonResponse({'error': 'No file ID provided'}, status=400)

        id = request.body.decode('utf-8')
        dest_path = self.UPLOAD_DIR / id
        
        if not os.path.isfile(dest_path):
            return JsonResponse({'error': 'File not found!'}, status=400)
        
        shutil.rmtree(os.path.dirname(dest_path))
        
        return HttpResponse("OK") # nonsense

    def head(self, request, *args, **kwargs):
        """Return the current upload offset for chunked upload. (the request occurs when upload
        failed)."""
        # simply restart from the beginning
        response = HttpResponse()
        response['Upload-Offset'] = str(0)
            
        return response
    
    # TODO test
    def get(self, request, *args, **kwargs):
        """Restore/retrieve the uploaded file"""
        id = self.kwargs.get('id')

        file_path = self.UPLOAD_DIR / id

        if not os.path.isfile(file_path):
            return JsonResponse({'error': 'File not found'}, status=404)

        original_filename = request.GET.get('filename', id)
        content_type, _ = mimetypes.guess_type(original_filename)
        if not content_type:
            content_type = 'application/octet-stream'

        with open(file_path, 'rb') as f:
            response = FileResponse(f, content_type=content_type)
            response['Content-Disposition'] = f'inline; filename="{original_filename}"'
            return response

    def _generate_temp_dir_id(self):
        # we'd like to let id contains the file extension
        # since in our form send, there is not filename information, only the
        # id information
        id = str(uuid.uuid4())
        dest_path = self.UPLOAD_DIR / id
        
        while os.path.isdir(dest_path):
            id = str(uuid.uuid4())
            dest_path = self.UPLOAD_DIR / id
            
        return id

class RawFileView(
    LoginRequiredMixin,
    PermissionRequiredMixin,
    SingleObjectMixin,
    View
):
    queryset = File.objects.prefetch_related('allowed_users').all()
    owner_field = 'owner'

    def has_permission(self):
        file_obj = self.get_object()
        self.object = file_obj
        return file_obj.can_access(self.request.user)
    
    def get(self, request, *args, **kwargs):
        return FileResponse(self.object.file)
        

class PublicFileListView(
    LoginRequiredMixin,
    TemplateView,
):
    template_name = template_path("public.html")

    def post(self, request, *args, **kwargs):
        try:
            user = request.user
            files = json.loads(request.body)
            
            # Extract existing file IDs
            existing_file_ids = [str(f['id']) for f in files if f['type'] == 'existing']
            
            # Batch delete public files not in the list
            File.objects.filter(is_public=True).exclude(uuid__in=existing_file_ids).delete()
            
            # Group files by type for batch processing
            existing_files = [(f['id'], f['path']) for f in files if f['type'] == 'existing']
            upload_files = [(f['id'], f['path']) for f in files if f['type'] == 'upload']
            
            # Batch update existing files
            if existing_files:
                # Get all existing files in one query
                existing_file_ids = [file_id for file_id, _ in existing_files]
                file_objects = {
                    str(f.uuid): f for f in File.objects.filter(uuid__in=existing_file_ids)
                }
                
                # Update each file
                for file_id, path in existing_files:
                    if str(file_id) in file_objects:
                        file_obj = file_objects[str(file_id)]
                        file_obj.relation_name = path
                        file_obj.save()
                        
            # Process uploads
            for file_id, path in upload_files:
                store_permanent(file_id, None, path, user, True)
                
            return JsonResponse({'msg': 'success!'})
        except json.JSONDecodeError:
            return JsonResponse({'error': 'Invalid JSON data'}, status=400)
        except Exception as e:
            return JsonResponse({'error': str(e)}, status=400)

    


    
