from rest_framework import serializers
from .models import *
from common.SchedulingAlgorithm import *
from common.KubeClient import KubeClient

class PortSerializer(serializers.ModelSerializer):
    class Meta:
        model = PodPort
        fields = ['containerPort','hostPort']

class ContainerSerializer(serializers.ModelSerializer):
    ports = PortSerializer(many=True)
    class Meta:
        model = Container
        fields = ["name","image","ports"]

class K8sSerializer(serializers.ModelSerializer):
    pod = ContainerSerializer(many=True)
    class Meta:
        model = Deployment
        fields = ['deployment_name','replica','namespace','deployment_type','full_map','pod']

    def create(self, validated_data):
        pod_data = validated_data.pop('pod')
        dn = validated_data['deployment_name']
        ns = validated_data['namespace']
        dt = validated_data['deployment_type']
        rc = validated_data['replica']
        fm = validated_data['full_map']
        deployment = Deployment.objects.create(deployment_name=dn,
                                               namespace=ns,
                                               deployment_type=dt,
                                               replica=rc,
                                               full_map=fm)
        for docker in pod_data:
            docker_dict = {}
            if "name" in docker.keys():
                docker_dict['name'] = docker['name']
            if "image" in docker.keys():
                docker_dict['image'] = docker['image']
            docker_dict['deployment'] = deployment
            new_docker = Container.objects.create(**docker_dict)
            for port in docker['ports']:
                port_dict = {}
                if "containerPort" in port.keys():
                    port_dict['containerPort'] = port['containerPort']
                if "hostPort" in port.keys():
                    port_dict['hostPort'] = port['hostPort']
                port_dict['pod'] = new_docker
                PodPort.objects.create(**port_dict)
        return deployment

    def update(self, instance, validated_data):
        pod_data = validated_data.pop('pod')
        dn = validated_data['deployment_name']
        ns = validated_data['namespace']
        deployment = Deployment.objects.get(deployment_name=dn,namespace=ns)
        deployment.deployment_type = validated_data['deployment_type']
        deployment.replica = validated_data['replica']
        deployment.full_map = validated_data['full_map']
        deployment.save()
        dockers = Container.objects.filter(deployment=deployment)
        dockers.delete()
        for docker in pod_data:
            docker_dict = {}
            if "name" in docker.keys():
                docker_dict['name'] = docker['name']
            if "image" in docker.keys():
                docker_dict['image'] = docker['image']
            docker_dict['deployment'] = deployment
            new_docker = Container.objects.create(**docker_dict)
            for port in docker['ports']:
                port_dict = {}
                if "containerPort" in port.keys():
                    port_dict['containerPort'] = port['containerPort']
                if "hostPort" in port.keys():
                    port_dict['hostPort'] = port['hostPort']
                port_dict['pod'] = new_docker
                PodPort.objects.create(**port_dict)
        query = Deployment.objects.get(deployment_name=dn,namespace=ns)
        return query
        # return deployment

class ConditionSerializer(serializers.Serializer):
    last_transition_time = serializers.DateTimeField()
    last_update_time = serializers.DateTimeField()
    message = serializers.CharField()
    reason = serializers.CharField()
    status = serializers.IntegerField(default=0)
    type = serializers.CharField()

class KubeItemSerializer(serializers.Serializer):
    deployment_name = serializers.CharField()
    namespace = serializers.CharField()
    deployment_type = serializers.CharField()
    replica = serializers.IntegerField()
    available_replicas = serializers.CharField()
    collision_count = serializers.CharField()
    observed_generation = serializers.CharField()
    ready_replicas = serializers.IntegerField()
    updated_replicas = serializers.IntegerField()
    run = serializers.CharField()
    labels = serializers.JSONField()
    pod = ContainerSerializer(many=True)
    conditions = ConditionSerializer(many=True)
    full_map = serializers.JSONField()

class K8sNodeSerializer(serializers.Serializer):
    status = serializers.CharField()
    ipaddr = serializers.IPAddressField()
    hostname = serializers.CharField()

class TaskRecordResourceSerializer(serializers.ModelSerializer):
    class Meta:
        model = TaskRecord
        fields = "__all__"

class DockerEnvSerializer(serializers.ModelSerializer):
    class Meta:
        model = DockerEnv
        fields = ['key','value']

class K8sJobSerializer(serializers.ModelSerializer):
    # resource = DockerResourceSerializer()
    jobarray = TaskRecordResourceSerializer(many=True,required=False)
    # envs = DockerEnvSerializer(many=True,required=False)

    class Meta:
        model = K8sJob
        fields = ['id','name','namespace','image','node_percent','uuid','weight','require_data','type','pu','ram','disk','is_parallel','jobarray','algorithm']

    def create(self, validated_data):
        envs = validated_data.pop('envs', None)
        kj = KubeClient()
        nodes = kj.list_node()
        input_nodes = []
        for n in nodes:
            if n['status'] == "True":
                input_nodes.append(n['ipaddr'])
        train_list = data_study_list(input_nodes,validated_data)
        k8sjob = K8sJob.objects.create(**validated_data)
        for t in train_list:
            TaskRecord.objects.create(job=k8sjob,**t)
        if envs != None:
            for e in envs:
                DockerEnv.objects.create(job=k8sjob,**e)
        # DockerResource.objects.create(job=k8sjob,**resource_data)
        return k8sjob

    def update(self, instance, validated_data):
        envs = validated_data.pop('envs', None)
        kj = KubeClient()
        nodes = kj.list_node()
        input_nodes = []
        for n in nodes:
            if n['status'] == "True":
                input_nodes.append(n['ipaddr'])
        for key, value in validated_data.items():
            setattr(instance, key, value)

        noderecords = TaskRecord.objects.filter(job=instance)
        noderecords.delete()
        train_list = data_study_list(input_nodes,validated_data)
        for t in train_list:
            TaskRecord.objects.create(job=instance,**t)
        legacy_envs = DockerEnv.objects.filter(job=instance)
        legacy_envs.delete()
        if envs != None:
            for e in envs:
                DockerEnv.objects.create(job=instance,**e)

        instance.save()
        return instance
# class AppConfigSerializer(serializers.ModelSerializer):
#     class Meta:
#         model = AppConfig
#         fields = "__all__"
#
# class AppsSerializer(serializers.ModelSerializer):
#     scripts = serializers.FileField(max_length=None, allow_empty_file=False,required=False)
#     appcatagory = AppCatagorySerializer
#     # group = GroupSerializer(many=True, required=False, read_only=True)
#     group_list = serializers.PrimaryKeyRelatedField(required=False,many=True, read_only=False, queryset=Group.objects.all(),source="group")
#     class Meta:
#         model = App
#         fields = ["id","name","describe","index","appcatagory","group_list","scripts"]




