/* ******************************************************************************
* 2019 - present Contributed by Apulis Technology (Shenzhen) Co. LTD
*
* This program and the accompanying materials are made available under the
* terms of the MIT License, which is available at
* https://www.opensource.org/licenses/MIT
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: MIT
***************************************************************************** */
package jobscheduler

import (
	"encoding/json"
	"fmt"
)

// create spark application
// JobBase.resType == RESOURCE_TYPE_SPARK_APP
// POST api/v1/jobs/spark
type CreateSparkJobReq struct {
	SparkJob
}

// delete spark application
// DELETE api/v1/jobs/spark/:id
type SparkJob struct {

	// common data
	JobBase JobBase `json:"jobBase"`

	// spark specific args
	Type         string `json:"type"`
	Image        string `json:"image"`
	MainClass    string `json:"mainClass"`
	MainAppFile  string `json:"mainAppFile"`
	SparkVersion string `json:"sparkVersion"`

	Arguments   []string     `json:"arguments"`
	MountPoints []MountPoint `json:"mountPoints"`

	Driver   SparkDriverSpec   `json:"driver"`
	Executor SparkExecutorSpec `json:"executor"`
}

func NewSparkJob() *SparkJob {

	return &SparkJob{
		JobBase:      JobBase{},
		Type:         "",
		Image:        "",
		MainClass:    "",
		MainAppFile:  "",
		SparkVersion: "",
		Arguments:    make([]string, 0),
		MountPoints:  make([]MountPoint, 0),
		Driver:       SparkDriverSpec{},
		Executor:     SparkExecutorSpec{},
	}
}

type SparkPodSpec struct {
	Cores     int     `json:"cores"`
	CoreLimit float64 `json:"coreLimit"`
	Memory    int64   `json:"memory"`
	Instances int     `json:"instances"`

	ServiceAccount string            `json:"serviceAccount"`
	Labels         map[string]string `json:"labels"`
}

func (s *SparkPodSpec) GetCores() int {
	return s.Cores
}

func (s *SparkPodSpec) SetCores(cores int) {
	s.Cores = cores
}

func (s *SparkPodSpec) GetCoreLimit() float64 {
	return s.CoreLimit
}

func (s *SparkPodSpec) SetCoreLimit(limit float64) {
	s.CoreLimit = limit
}

func (s *SparkPodSpec) GetInstances() int {
	return s.Instances
}

func (s *SparkPodSpec) SetInstances(num int) {
	s.Instances = num
}

func (s *SparkPodSpec) GetMemory() int64 {
	return s.Memory
}

func (s *SparkPodSpec) SetMemory(memory int64) {
	s.Memory = memory
}

func (s *SparkPodSpec) GetServiceAccount() string {
	return s.ServiceAccount
}

func (s *SparkPodSpec) SetServiceAccount(account string) {
	s.ServiceAccount = account
}

func (s *SparkPodSpec) GetLabels() map[string]string {
	return s.Labels
}

func (s *SparkPodSpec) SetLabels(labels map[string]string) {
	s.Labels = labels
}

type SparkDriverSpec struct {
	SparkPodSpec
}

type SparkExecutorSpec struct {
	SparkPodSpec
}

func NewSparkDriverSpec() *SparkDriverSpec {
	return &SparkDriverSpec{SparkPodSpec{
		Cores:     1,
		CoreLimit: 1,
		Memory:    1024 * 1024 * 100, // 100 mega
		Instances: 1,
		Labels:    make(map[string]string),
	}}
}

func NewSparkExecutorSpec() *SparkExecutorSpec {
	return &SparkExecutorSpec{SparkPodSpec{
		Cores:     1,
		CoreLimit: 1,
		Memory:    1024 * 1024 * 100, // 100 mega
		Instances: 1,
		Labels:    make(map[string]string),
	}}
}

func (s *SparkJob) Validate() error {

	if s.JobBase.ModId == 0 {
		return fmt.Errorf("invalid module id: %d", s.JobBase.ModId)
	}

	resType := s.JobBase.ResType
	if resType < RESOURCE_TYPE_POD || resType >= RESOURCE_TYPE_UNKONWN {
		return fmt.Errorf("invalid module id: %d", s.JobBase.ResType)
	}

	return nil
}

func (s *SparkJob) GetJobId() string {
	return s.JobBase.JobId
}

func (s *SparkJob) GetModId() int {
	return s.JobBase.ModId
}

func (s *SparkJob) GetType() string {
	return s.Type
}

func (s *SparkJob) GetSparkVersion() string {
	return s.SparkVersion
}

func (s *SparkJob) GetDeployMode() string {
	return "cluster"
}

func (s *SparkJob) GetImage() string {
	return s.Image
}

func (s *SparkJob) GetMainAppFile() string {
	return s.MainAppFile
}

func (s *SparkJob) GetMainClass() string {
	return s.MainClass
}

func (s *SparkJob) GetArguments() []string {
	return s.Arguments
}

func (s *SparkJob) GetMounts() []MountPoint {
	return s.MountPoints
}

func (s *SparkJob) ToString() string {
	data, _ := json.Marshal(*s)
	return string(data)
}

func (s *SparkJob) FromString(data string) error {
	return json.Unmarshal([]byte(data), s)
}

func (s *SparkJob) GetNamespace() string {
	return s.JobBase.Namespace
}
