
import subprocess
import uuid
import os
import logging
from . import models 
from . import serializers
from django.http import JsonResponse
from django.template import Context, Template
from rest_framework.viewsets import ModelViewSet
from public.utils import file_size_convert, get_files_from_request
from django.utils import timezone
from django.conf import settings
from rest_framework.exceptions import ValidationError
from rest_framework.views import APIView
from django.db import transaction
from rest_framework.decorators import action
from django.template.loader import render_to_string
from public.emali import send_mail_with_content
from rest_framework.pagination import PageNumberPagination

# 3 10
# Create your views here.
# 计算链
# 创建工作空间 上传文件
# 在指定工作空间，执行算法
class TaskViewSet(ModelViewSet):
  queryset = models.AlgorithmTask.objects.order_by("-create_time").all()
  serializer_class = serializers.AlgorithmsTaskListSerializer
  pagination_class = PageNumberPagination

  @action(methods=["GET"], detail=False)
  def statistics(self, request, *args, **kwargs):
    p = self.queryset.filter(status=models.AlgorithmTask.PENDING)
    s = self.queryset.filter(status=models.AlgorithmTask.SUCCESS)
    a = self.queryset.filter().all()
    r = self.queryset.filter(status=models.AlgorithmTask.RUNNING)

    return JsonResponse({
      "success": len(s),
      "pending": len(p),
      "all": len(a),
      "running": len(r)
    })
  
  @action(methods=["post"], detail=False)
  def load_by_id(self, request, *args, **kwargs):
    pdb_id = request.data["pdb_id"].lower()
    chain = request.data["chain"]
    output_dir = f'{settings.OUTPUT_PATH}/{timezone.now().strftime("%Y%m%d%H%M%S")}_{str(uuid.uuid4())[0:8]}'
    cmd = f'{settings.LOAD_ALG} -p {pdb_id} -o {output_dir} -c {chain}'
    logging.info(f"--- Load cmd {cmd}---")
    process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    output, err = process.communicate()
    if err:
      logging.error("----- Load cmd error ------")
      logging.error(err)
    # if output:
    #   output = output.decode("utf-8")
    #   output = output.split("\n")
    #   raise ValidationError({"message": f'The choice of chain should be one of the following: {output[1]}'})
    # else:
    # print(output)
    # try:
    #   models.PDBIdLoad.objects.get(pdb_id=pdb_id)
    # except models.PDBIdLoad.DoesNotExist:
    output_path = f'{output_dir}/{pdb_id}_{chain}.pdb'
    input_path = f'{output_dir}/{pdb_id}.pdb'
    pdb:models.PDBIdLoad = models.PDBIdLoad.objects.create(pdb_id=pdb_id, input_path=input_path, output_path=output_path, chains=chain, cmd=cmd)
    return JsonResponse({"id": pdb.pk})
    
  @action(methods=["post"], detail=False)
  def load(self, request, *args, **kwargs):
    file_id = request.data["file_id"]
    chain = request.data["chain"]
    file: models.FileUpload = models.FileUpload.objects.get(pk=file_id)
    output_dir = f'{settings.OUTPUT_PATH}/{timezone.now().strftime("%Y%m%d%H%M%S")}_{str(uuid.uuid4())[0:8]}'
    cmd = f'{settings.LOAD_ALG} -i {file.input_path} -o {output_dir}  -c {chain}'
    logging.info(f"--- Load cmd {cmd}---")
    process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    output, err = process.communicate()
    if err:
      logging.error("----- Load cmd error ------")
      logging.error(err)
    if output:
      output = output.decode("utf-8")
      output = output.split("\n")
      raise ValidationError({"message": f'The choice of chain should be one of the following: {output[1]}'})
    else:
      filename = file.row_name
      split_filename = os.path.splitext(filename)
      file_pre = split_filename[0]
      file_aft = split_filename[1]
      filename = f'{file_pre}_{chain}{file_aft}'
      output_path = f'{output_dir}/{filename}'
      file.output_path = output_path
      file.chains = chain
      file.cmd = cmd
      file.save()
      return JsonResponse({"message": "load success!"})

  def create(self, request, *args, **kwargs):
    try:
       workspace_dir = request.data["workspace_dir"]
       job_uuid = request.data["task_uuid"]
       workspace = models.Workspace.objects.get(pk=workspace_dir)
       workspace_dir = workspace.workspace_dir
       email = request.data.get("email", "no-emial@gmail.com")
       user_id = request.data["user_id"]
    except Exception as e:
       raise ValidationError({"message": "please input email and workspace_dir"})

    workspace_path = os.path.join(settings.WORKSPACE_PATH, workspace_dir)
    input_path = os.path.join(workspace_path, "input.list")

    if not os.path.exists(workspace_path):
       raise ValidationError({"message": "请先创建工作区"})
    
    # output 日志
    outputLog = os.path.join(workspace_path, "task_output.inf.log")
    # error 日志
    errorLog = os.path.join(workspace_path, "task_error.wf.log")
    # algorithm sh
    taskPath = os.path.join(workspace_path, "task.sh")
  
    if os.path.exists(taskPath):
       raise ValidationError({"message": "任务已经存在"})
    
    user_status_control: models.AlgorithmUUIDUserRuntimeStatus = models.get_uuid_user_status_control(user_id)
    system_control : models.AlgorithmSystemRuntimeControl = models.get_runtime_control()
    
    if models.exceed_user_maximum_task(user_status_control, system_control):
      raise ValidationError({"message": "任务数量已经达到每日任务上限，请明天再来"})
  
    user_status_control.unfree()
    template_str = """
#!/bin/bash

echo "-- Started Executing --"
sh {{ algorithm_path }} -i {{ input }} -o {{ output }}
echo "-- Completed Executing --"
    """
    # 编译模板
    template = Template(template_str)

    variables = {
       "input": input_path,
       "output": workspace_path,
       "algorithm_path": settings.RUN_ALG,
    }

    # 创建上下文并解析变量
    context = Context(variables)
    result = template.render(context)
    with open(taskPath, "w") as task_file:
      task_file.write(result) 

    # 执行算法任务
    cmd = f"nohup /bin/bash -c 'echo {job_uuid} && /bin/bash {taskPath}' 1>{outputLog} 2>{errorLog} && echo {job_uuid} &"
    
    logging.info("------------------------------------------")
    logging.info(f'{email} Join cmd {cmd}')
    logging.info("------------------------------------------")
    
    with transaction.atomic():
      # 记录任务
      t: models.AlgorithmTask = models.AlgorithmTask.objects.create(id=job_uuid, workspace_dir=workspace_path, workspace=workspace_dir, cmd=cmd, email=email, wk=workspace)
      # 放入队列
      models.AlgorithmProcessQueue.objects.create(task=t)
    return JsonResponse({'uuid': job_uuid, 'workspace': workspace_dir, 'workspace_path': workspace_path, "task_id": t.id})
  
  def retrieve(self, request, *args, **kwargs):
      instance = self.get_object()
      serializer = self.get_serializer(instance)

      data = serializer.data
      workspace: models.Workspace = models.Workspace.objects.get(pk=data["wk"])
      files: list[models.FileUpload] = models.FileUpload.objects.filter(pk__in=workspace.file_ids)
      pdb_ids: list[models.PDBIdLoad] = models.PDBIdLoad.objects.filter(pk__in=workspace.pdb_ids)
      result = []
      
      for file in files:
        file_info = {"file_name": file.row_name, "chain":file.chains, "id": f'FILE-{file.pk}', "type": "FILE"}
        result.append(file_info)

      for pdb_id in pdb_ids:
        file_info = {"file_name": pdb_id.pdb_id, "chain":pdb_id.chains,"id": f'PDB-{pdb_id.pk}', "type": "PDB ID"}
        result.append(file_info)
      
      data["info"] = result
      data["result_path"] = f'{data["workspace_dir"]}/results.zip'
      return JsonResponse({"results": [data]})
class TaskQueryAPIView(APIView):
  def get(self, request, *args, **kwargs):
    try:
       models.update_status()
    except Exception as e:
       logging.error(e)

    # 调度任务
    try:
      models.schedule()
    except Exception as e:
      logging.error(e)
    # 统计任务
    return JsonResponse({"message": "query successfully"})
  

class FilterAPIView(APIView):
  def get(self, request, *args, **kwargs):
    return JsonResponse(serializers.FilterListSerializer(models.Filter.objects.first()).data)

class TaskUploadAPIView(APIView):
  def post(self, request, *args, **kwargs):
    files = get_files_from_request(request)
    if not len(files):
      raise ValidationError({"message": "请传递文件"})
    try:  
      file = files[0]
      input_dir = f'{settings.INPUT_PATH}/{timezone.now().strftime("%Y%m%d%H%M%S")}_{str(uuid.uuid4())[0:8]}'
      os.makedirs(input_dir, exist_ok=True)
      input_path = f'{input_dir}/{file.name}'
      with open(input_path, 'w', encoding="utf-8") as f:
        for chunk in file.chunks():
          f.write(chunk.decode('utf-8'))
      file_upload = models.FileUpload.objects.create(input_path=input_path, row_name=file.name, file_size=file_size_convert(file.size)[0])
    except Exception as e:
      logging.error("--- file upload failed ---")
      logging.error(e)
      raise ValidationError({"message": "--- file upload failed ---", "stack": e})
    
    return JsonResponse(serializers.TaskFileUploadSerializer(instance=file_upload).data)


class WorkspaceAPIView(APIView):
  def post(self, request, *args, **kwargs):
    workspace: models.Workspace = models.Workspace()
    file_ids = request.data["file_ids"]
    pdb_ids = request.data["pdb_ids"]
    filter_options = request.data["filter_options"]
    user_id = request.data["user_id"]
    email = request.data.get("email", "no-emial@gmail.com")

    task_user_filter = models.TaskUser.objects.filter(user=user_id)
    task_user = None



    if not task_user_filter.exists():
      task_user = models.TaskUser.objects.create(user=user_id)

    is_filter = filter_options["is_filter"]
    # All Bacteria Phage
    workspace.file_ids = file_ids
    workspace.pdb_ids = pdb_ids
    workspace.filter_options = filter_options
    workspace.is_filter = is_filter
    workspace.email = email
    args = []
    bacteria_args = []
    phage_args = []
    prophage_args = []

    if is_filter:
       bacteria_filters = filter_options["bacterial_filter"]
       for bacteria_filter in bacteria_filters:
         bacteria_args.append(f'bacteria {bacteria_filter}')

       phage_filters = filter_options["phage_filter"]
       for phage_filter in phage_filters:
         phage_args.append(f'phage {phage_filter}')
         
       prophage_filters = filter_options["prophage_filter"]
       for prophage_filter in prophage_filters:
         prophage_args.append(f'proPhages {prophage_filter}')
    else:
      options = filter_options["options"]
      for opt in options:
        args.append(opt + " All")

    retry_time = 0
    workspace_time = timezone.now().strftime("%Y%m%d%H%M%S%f")[:-6]
    workspace_uuid = str(uuid.uuid4())[:6]
    workspace_dir = f'{workspace_time}{workspace_uuid}'
    workspace_path = os.path.join(settings.WORKSPACE_PATH, workspace_dir)
    workspace.workspace_dir = workspace_dir
    while True:
       if os.path.exists(workspace_path):
          workspace_path = workspace_path + str(retry_time)
          retry_time += 1
       else:
          break  
      
    # 创建工作空间
    os.makedirs(workspace_path)
    files: list[models.FileUpload] = models.FileUpload.objects.filter(pk__in=file_ids)
    pdb_ids: list[models.PDBIdLoad] = models.PDBIdLoad.objects.filter(pk__in=pdb_ids)
    result = []

    for file in files:
      file_info = {"file_name": file.row_name, "chain":file.chains}
      result.append(file_info)

    for pdb_id in pdb_ids:
      file_info = {"file_name": pdb_id.pdb_id, "chain":pdb_id.chains}
      result.append(file_info)

    with open(f'{workspace_path}/input.list', 'w') as f:
      for file in files:
        output_path = file.output_path
        if not is_filter:
          for arg in args:
            f.write(f'{output_path} {arg}')
            f.write("\n")
        else:
          for arg in bacteria_args:
            f.write(f'{output_path} {arg}')
            f.write("\n")
          for arg in phage_args:
            f.write(f'{output_path} {arg}')
            f.write("\n")
          for arg in prophage_args:
            f.write(f'{output_path} {arg}')
            f.write("\n")
      for pdb_id in pdb_ids:
        output_path = pdb_id.output_path
        if not is_filter:
          for arg in args:
            f.write(f'{output_path} {arg}')
            f.write("\n")
        else:
          for arg in bacteria_args:
            f.write(f'{output_path} {arg}')
            f.write("\n")
          for arg in phage_args:
            f.write(f'{output_path} {arg}')
            f.write("\n")
          for arg in prophage_args:
            f.write(f'{output_path} {arg}')
            f.write("\n")

    task_uuid = str(uuid.uuid4().hex)
    text = render_to_string("submit.html", {
        "job_id" : workspace_dir,
        "data": result,
        "job_url": f'https://gmpsdb.cn/home/task/{task_uuid}',
    })

    if email and email != "no-emial@gmail.com":
      send_mail_with_content([email], "Task submission successful", text)
    # 将所有文件放入工作空间
    files = get_files_from_request(request)

    # 写入文件
    for file in files:
       with open(f'{workspace_path}/{file.name}', 'w')  as f:
          f.write(str(file.read()))
    workspace.save()
    return JsonResponse({"work_dir": workspace.pk, "task_uuid": task_uuid})

class ChainFileAPIView(APIView):
  def post(self, request, *args, **kwargs):
    file = get_files_from_request(request)[0]
    return JsonResponse({"file": file.name, "chains": ["A", "B", "AB"]})

class ChainIdAPIView(APIView):
  def post(self, request, *args, **kwargs):
    pid = request.data["pid"]
    return JsonResponse({"protein_id": pid, "chains": ["A", "B", "AB"]})


