using Baci.Net.ToolKit.ArcGISProGeoprocessor.Models;
using Baci.Net.ToolKit.ArcGISProGeoprocessor.Models.Attributes;
using Baci.Net.ToolKit.ArcGISProGeoprocessor.Models.Attributes.DomainAttributes;
using Baci.Net.ToolKit.ArcGISProGeoprocessor.Models.Enums;
using System.Collections.Generic;
using System.ComponentModel;

namespace Baci.ArcGIS._3DAnalystTools._PointCloud._Classification
{
    /// <summary>
    /// <para>Prepare Point Cloud Training Data</para>
    /// <para>Generates the data that will be used to train and validate 
    /// a PointCNN model for classifying a point cloud.</para>
    /// <para>生成将用于训练和验证的数据
    /// 用于对点云进行分类的 PointCNN 模型。</para>
    /// </summary>    
    [DisplayName("Prepare Point Cloud Training Data")]
    public class PreparePointCloudTrainingData : AbstractGPProcess
    {
        /// <summary>
        /// 无参构造
        /// </summary>
        public PreparePointCloudTrainingData()
        {

        }

        /// <summary>
        /// 有参构造
        /// </summary>
        /// <param name="_in_point_cloud">
        /// <para>Input Point Cloud</para>
        /// <para>The point cloud that will be used to create the training data and, potentially, the validation data if no validation point cloud is specified. In this case, both the training boundary and the validation boundary must be defined.</para>
        /// <para>将用于创建训练数据的点云，如果未指定验证点云，则可能用于创建验证数据。在这种情况下，必须同时定义训练边界和验证边界。</para>
        /// </param>
        /// <param name="_block_size">
        /// <para>Block Size</para>
        /// <para>The two-dimensional width and height of each HDF5 tile created from the input point cloud. As a general rule, the block size should be large enough to capture the objects of interest and their surrounding context.</para>
        /// <para>从输入点云创建的每个 HDF5 瓦片的二维宽度和高度。作为一般规则，块大小应足够大，以捕获感兴趣的对象及其周围上下文。</para>
        /// </param>
        /// <param name="_out_training_data">
        /// <para>Output Training Data</para>
        /// <para>The location and name of the output training data (*.pctd).</para>
        /// <para>输出训练数据 （*.pctd） 的位置和名称。</para>
        /// </param>
        public PreparePointCloudTrainingData(object _in_point_cloud, string? _block_size, object _out_training_data)
        {
            this._in_point_cloud = _in_point_cloud;
            this._block_size = _block_size;
            this._out_training_data = _out_training_data;
        }
        public override string ToolboxName => "3D Analyst Tools";

        public override string ToolName => "Prepare Point Cloud Training Data";

        public override string CallName => "3d.PreparePointCloudTrainingData";

        public override List<string> AcceptEnvironments => ["extent", "geographicTransformations", "outputCoordinateSystem", "parallelProcessingFactor", "scratchWorkspace", "workspace"];

        public override object[] ParameterInfo => [_in_point_cloud, _block_size, _out_training_data, _training_boundary, _validation_point_cloud, _validation_boundary, _class_codes_of_interest, _block_point_limit];

        /// <summary>
        /// <para>Input Point Cloud</para>
        /// <para>The point cloud that will be used to create the training data and, potentially, the validation data if no validation point cloud is specified. In this case, both the training boundary and the validation boundary must be defined.</para>
        /// <para>将用于创建训练数据的点云，如果未指定验证点云，则可能用于创建验证数据。在这种情况下，必须同时定义训练边界和验证边界。</para>
        /// <para></para>
        /// </summary>
        [DisplayName("Input Point Cloud")]
        [Description("")]
        [Option(OptionTypeEnum.Must)]
        public object _in_point_cloud { get; set; }


        /// <summary>
        /// <para>Block Size</para>
        /// <para>The two-dimensional width and height of each HDF5 tile created from the input point cloud. As a general rule, the block size should be large enough to capture the objects of interest and their surrounding context.</para>
        /// <para>从输入点云创建的每个 HDF5 瓦片的二维宽度和高度。作为一般规则，块大小应足够大，以捕获感兴趣的对象及其周围上下文。</para>
        /// <para></para>
        /// </summary>
        [DisplayName("Block Size")]
        [Description("")]
        [Option(OptionTypeEnum.Must)]
        public string? _block_size { get; set; }


        /// <summary>
        /// <para>Output Training Data</para>
        /// <para>The location and name of the output training data (*.pctd).</para>
        /// <para>输出训练数据 （*.pctd） 的位置和名称。</para>
        /// <para></para>
        /// </summary>
        [DisplayName("Output Training Data")]
        [Description("")]
        [Option(OptionTypeEnum.Must)]
        public object _out_training_data { get; set; }


        /// <summary>
        /// <para>Training Boundary Features</para>
        /// <para>The boundary polygons that will delineate the subset of points from the input point cloud that will be used to train the deep learning model.</para>
        /// <para>边界多边形，用于从输入点云中划定点的子集，用于训练深度学习模型。</para>
        /// <para></para>
        /// </summary>
        [DisplayName("Training Boundary Features")]
        [Description("")]
        [Option(OptionTypeEnum.optional)]
        public object _training_boundary { get; set; } = null;


        /// <summary>
        /// <para>Validation Point Cloud</para>
        /// <para>The source of the point cloud that will be used to validate the deep learning model. This dataset must reference a different set of points than the input point cloud in order to ensure the quality of the trained model . If the validation point cloud is not specified, both the Training Boundary Features and Validation Boundary Features parameter values must be provided.</para>
        /// <para>将用于验证深度学习模型的点云源。该数据集必须引用与输入点云不同的点集，以确保训练模型的质量。如果未指定验证点云，则必须同时提供“训练边界要素”和“验证边界要素”参数值。</para>
        /// <para></para>
        /// </summary>
        [DisplayName("Validation Point Cloud")]
        [Description("")]
        [Option(OptionTypeEnum.optional)]
        public object _validation_point_cloud { get; set; } = null;


        /// <summary>
        /// <para>Validation Boundary Features</para>
        /// <para>The polygon features that will delineate the subset of points to be used for validating the trained model. If a validation point cloud is not specified, the points will be sourced from the input point cloud.</para>
        /// <para>将描绘用于验证训练模型的点子集的面要素。如果未指定验证点云，则点将来自输入点云。</para>
        /// <para></para>
        /// </summary>
        [DisplayName("Validation Boundary Features")]
        [Description("")]
        [Option(OptionTypeEnum.optional)]
        public object _validation_boundary { get; set; } = null;


        /// <summary>
        /// <para>Class Codes of Interest</para>
        /// <para>The class codes that will limit the exported training data blocks to only those that contain the specified values. All points in the block will be exported for any block which contains at least one of the class codes listed in this parameter.</para>
        /// <para>将导出的训练数据块限制为仅包含指定值的训练数据块的类代码。对于包含此参数中列出的至少一个类代码的任何块，将导出块中的所有点。</para>
        /// <para></para>
        /// </summary>
        [DisplayName("Class Codes of Interest")]
        [Description("")]
        [Option(OptionTypeEnum.optional)]
        public object _class_codes_of_interest { get; set; } = null;


        /// <summary>
        /// <para>Block Point Limit</para>
        /// <para>The maximum number of points allowed in each block of the training data. When a block contains points in excess of this value, multiple blocks will be created for the same location to ensure that all of the points are used when training.</para>
        /// <para>训练数据的每个块中允许的最大点数。当一个块包含超过此值的点时，将为同一位置创建多个块，以确保在训练时使用所有点。</para>
        /// <para></para>
        /// </summary>
        [DisplayName("Block Point Limit")]
        [Description("")]
        [Option(OptionTypeEnum.optional)]
        public long _block_point_limit { get; set; } = 8192;


        public PreparePointCloudTrainingData SetEnv(object extent = null, object geographicTransformations = null, object outputCoordinateSystem = null, object parallelProcessingFactor = null, object scratchWorkspace = null, object workspace = null)
        {
            base.SetEnv(extent: extent, geographicTransformations: geographicTransformations, outputCoordinateSystem: outputCoordinateSystem, parallelProcessingFactor: parallelProcessingFactor, scratchWorkspace: scratchWorkspace, workspace: workspace);
            return this;
        }

    }

}