using System.Collections.Generic;
using Unity.MLAgents;
using UnityEngine;

public class SatelliteEnvController : MonoBehaviour
{
    [System.Serializable]
    public class PlayerInfo
    {
        public AgentSatellite Agent;
        [HideInInspector]
        public Vector3 StartingPos;
        [HideInInspector]
        public Quaternion StartingRot;
        [HideInInspector]
        public Rigidbody Rb;
    }


    /// <summary>
    /// Max Academy steps before this platform resets
    /// </summary>
    /// <returns></returns>
 
    //0.02s   10000*0.02=300s

    [Tooltip("Max Environment Steps")] public int MaxEnvironmentSteps = 15000;

    //List of Agents On Platform
    public List<PlayerInfo> AgentsList = new List<PlayerInfo>();

    //private SoccerSettings m_SoccerSettings;

    private SimpleMultiAgentGroup m_RedAgentGroup;
    private SimpleMultiAgentGroup m_BlueAgentGroup;

    private int m_ResetTimer;




    void Start()
    {

      //  m_SoccerSettings = FindObjectOfType<SoccerSettings>();
        // Initialize TeamManager
        m_RedAgentGroup = new SimpleMultiAgentGroup();
        m_BlueAgentGroup = new SimpleMultiAgentGroup();
       
        foreach (var item in AgentsList)
        {
            item.StartingPos = item.Agent.transform.localPosition;
            item.StartingRot = item.Agent.transform.localRotation;
            item.Rb = item.Agent.GetComponent<Rigidbody>();
            if (item.Agent.team == Team.Blue)
            {
                m_BlueAgentGroup.RegisterAgent(item.Agent);
            }
            else
            {
                m_RedAgentGroup.RegisterAgent(item.Agent);
            }
  
        } 
        ResetScene();
    }
      
    void FixedUpdate()
    {
        
        m_ResetTimer += 1;
        
        var agentred = AgentsList[0];
        var agentblue = AgentsList[1];
        // 过程奖励和终端奖励
        // 过程奖励   距离 速度夹角？
        Vector3 posagentred = agentred.Agent.transform.localPosition;
        Vector3 posagentblue = agentblue.Agent.transform.localPosition;
        Vector3 relaPos = posagentred - posagentblue;
        float distancePE = relaPos.magnitude;
        int maxD=1000; // 假设1000为最大可能距离
        m_RedAgentGroup.AddGroupReward(1-distancePE/maxD);
        m_BlueAgentGroup.AddGroupReward(distancePE/maxD);
        
        
        double capD = 20;  // 捕获界限
        // 捕获结束判断
        if (distancePE <= capD)
        {
            m_RedAgentGroup.AddGroupReward(2 - (float)m_ResetTimer / MaxEnvironmentSteps);
            m_BlueAgentGroup.AddGroupReward(-2);

            m_BlueAgentGroup.GroupEpisodeInterrupted();
            m_RedAgentGroup.GroupEpisodeInterrupted();
            ResetScene();
        }
        // 未捕获到结束判断
        if (m_ResetTimer >= MaxEnvironmentSteps && MaxEnvironmentSteps > 0)
        {
            m_BlueAgentGroup.AddGroupReward(1+ distancePE/maxD);
            m_RedAgentGroup.AddGroupReward(-2);
            m_BlueAgentGroup.GroupEpisodeInterrupted();
            m_RedAgentGroup.GroupEpisodeInterrupted();
            ResetScene();
        }
    }


    public void ResetScene()
    {
        m_ResetTimer = 0;

        // 红方蓝方可以用相同的策略吗，一个是追一个是逃
        //红方初始位置
        foreach (var item in AgentsList)
        {
            var randomPosX = Random.Range(-5f, 5f);
            var randomPosY = Random.Range(-5f, 5f);
            var randomPosZ = Random.Range(-10f, 10f);
            var newStartPos = item.Agent.initialPos + new Vector3(randomPosX, randomPosY, randomPosZ);           
            var newRot = Quaternion.Euler(0, 0, 0);
            item.Agent.transform.SetPositionAndRotation(newStartPos, newRot);
            item.Rb.velocity = Vector3.zero;
            item.Rb.angularVelocity = Vector3.zero;
        }

    }
}
