package dsl

import (
	"encoding/json"
	"fmt"
	"io/ioutil"
	"os"
	"path/filepath"
	"strings"
	"time"

	"github.com/louloulin/dataflare/pkg/workflow/definition"
	"gopkg.in/yaml.v3"
)

// DSLModel represents the DSL model
type DSLModel struct {
	// Version is the DSL version
	Version string `json:"version" yaml:"version"`
	// Name is the workflow name
	Name string `json:"name" yaml:"name"`
	// Description is the workflow description
	Description string `json:"description" yaml:"description"`
	// Config is the workflow configuration
	Config *Config `json:"config" yaml:"config"`
	// Variables are the workflow variables
	Variables map[string]interface{} `json:"variables" yaml:"variables"`
	// Schemas are the data schemas
	Schemas map[string]*Schema `json:"schemas" yaml:"schemas"`
	// Sources are the data sources
	Sources map[string]*Source `json:"sources" yaml:"sources"`
	// Sinks are the data sinks
	Sinks map[string]*Sink `json:"sinks" yaml:"sinks"`
	// Nodes are the processing nodes
	Nodes map[string]*Node `json:"nodes" yaml:"nodes"`
	// Edges are the connections between nodes
	Edges []*Edge `json:"edges" yaml:"edges"`
	// Control contains workflow control settings
	Control *Control `json:"control,omitempty" yaml:"control,omitempty"`
}

// Config represents the workflow configuration
type Config struct {
	// Mode is the workflow mode (stream, batch, hybrid, ai)
	Mode string `json:"mode" yaml:"mode"`
	// Execution contains execution settings
	Execution *ExecutionConfig `json:"execution" yaml:"execution"`
	// Resources contains resource requirements
	Resources *ResourceConfig `json:"resources,omitempty" yaml:"resources,omitempty"`
	// State contains state management settings
	State *StateConfig `json:"state,omitempty" yaml:"state,omitempty"`
}

// ExecutionConfig represents execution settings
type ExecutionConfig struct {
	// Engine is the execution engine (actor, kubernetes, serverless)
	Engine string `json:"engine" yaml:"engine"`
	// Parallelism is the default parallelism
	Parallelism int `json:"parallelism" yaml:"parallelism"`
	// Checkpointing contains checkpointing settings
	Checkpointing *CheckpointingConfig `json:"checkpointing,omitempty" yaml:"checkpointing,omitempty"`
}

// CheckpointingConfig represents checkpointing settings
type CheckpointingConfig struct {
	// Interval is the checkpointing interval
	Interval string `json:"interval" yaml:"interval"`
	// Mode is the checkpointing mode (exactly_once, at_least_once)
	Mode string `json:"mode" yaml:"mode"`
}

// ResourceConfig represents resource requirements
type ResourceConfig struct {
	// Memory is the memory requirement
	Memory string `json:"memory" yaml:"memory"`
	// CPU is the CPU requirement
	CPU string `json:"cpu" yaml:"cpu"`
}

// StateConfig represents state management settings
type StateConfig struct {
	// Backend is the state backend (memory, rocksdb, distributed)
	Backend string `json:"backend" yaml:"backend"`
	// TTL is the state time-to-live
	TTL string `json:"ttl,omitempty" yaml:"ttl,omitempty"`
}

// Schema represents a data schema
type Schema struct {
	// Description is the schema description
	Description string `json:"description,omitempty" yaml:"description,omitempty"`
	// Fields are the schema fields
	Fields map[string]*Field `json:"fields" yaml:"fields"`
}

// Field represents a schema field
type Field struct {
	// Type is the field type
	Type string `json:"type" yaml:"type"`
	// Description is the field description
	Description string `json:"description,omitempty" yaml:"description,omitempty"`
	// Required indicates if the field is required
	Required bool `json:"required,omitempty" yaml:"required,omitempty"`
}

// Source represents a data source
type Source struct {
	// Type is the source type
	Type string `json:"type" yaml:"type"`
	// Schema is the source schema
	Schema string `json:"schema" yaml:"schema"`
	// Config is the source configuration
	Config map[string]interface{} `json:"config" yaml:"config"`
	// Parallelism is the source parallelism
	Parallelism int `json:"parallelism,omitempty" yaml:"parallelism,omitempty"`
	// Description is the source description
	Description string `json:"description,omitempty" yaml:"description,omitempty"`
}

// Sink represents a data sink
type Sink struct {
	// Type is the sink type
	Type string `json:"type" yaml:"type"`
	// Schema is the sink schema
	Schema string `json:"schema" yaml:"schema"`
	// Config is the sink configuration
	Config map[string]interface{} `json:"config" yaml:"config"`
	// Description is the sink description
	Description string `json:"description,omitempty" yaml:"description,omitempty"`
}

// Node represents a processing node
type Node struct {
	// Type is the node type
	Type string `json:"type" yaml:"type"`
	// Description is the node description
	Description string `json:"description,omitempty" yaml:"description,omitempty"`
	// Inputs are the node inputs
	Inputs map[string]*NodePort `json:"inputs" yaml:"inputs"`
	// Outputs are the node outputs
	Outputs map[string]*NodePort `json:"outputs" yaml:"outputs"`
	// Transform contains transformation settings
	Transform *Transform `json:"transform,omitempty" yaml:"transform,omitempty"`
	// Window contains window settings
	Window *Window `json:"window,omitempty" yaml:"window,omitempty"`
	// Agent contains AI agent settings
	Agent *Agent `json:"agent,omitempty" yaml:"agent,omitempty"`
}

// NodePort represents a node input or output port
type NodePort struct {
	// Schema is the port schema
	Schema string `json:"schema" yaml:"schema"`
}

// Transform represents transformation settings
type Transform struct {
	// Language is the transformation language
	Language string `json:"language" yaml:"language"`
	// Code is the transformation code
	Code string `json:"code" yaml:"code"`
}

// Window represents window settings
type Window struct {
	// Type is the window type (tumbling, sliding, session)
	Type string `json:"type" yaml:"type"`
	// Size is the window size
	Size string `json:"size" yaml:"size"`
	// TimeField is the time field
	TimeField string `json:"time_field" yaml:"time_field"`
	// AllowLate indicates if late arrivals are allowed
	AllowLate bool `json:"allow_late,omitempty" yaml:"allow_late,omitempty"`
	// Trigger is the window trigger
	Trigger string `json:"trigger,omitempty" yaml:"trigger,omitempty"`
}

// Agent represents AI agent settings
type Agent struct {
	// Type is the agent type
	Type string `json:"type" yaml:"type"`
	// Provider is the agent provider
	Provider string `json:"provider" yaml:"provider"`
	// Model is the agent model
	Model string `json:"model" yaml:"model"`
	// Prompt is the agent prompt
	Prompt string `json:"prompt" yaml:"prompt"`
	// PromptTemplate is the agent prompt template
	PromptTemplate string `json:"prompt_template" yaml:"prompt_template"`
	// OutputFormat is the agent output format
	OutputFormat string `json:"output_format" yaml:"output_format"`
	// Caching contains caching settings
	Caching *CachingConfig `json:"caching,omitempty" yaml:"caching,omitempty"`
}

// CachingConfig represents caching settings
type CachingConfig struct {
	// Enabled indicates if caching is enabled
	Enabled bool `json:"enabled" yaml:"enabled"`
	// TTL is the cache time-to-live
	TTL string `json:"ttl" yaml:"ttl"`
}

// Edge represents a connection between nodes
type Edge struct {
	// From is the source of the edge
	From *EdgeEndpoint `json:"from" yaml:"from"`
	// To is the destination of the edge
	To *EdgeEndpoint `json:"to" yaml:"to"`
}

// EdgeEndpoint represents an edge endpoint
type EdgeEndpoint struct {
	// Source is the source name
	Source string `json:"source,omitempty" yaml:"source,omitempty"`
	// Node is the node name
	Node string `json:"node,omitempty" yaml:"node,omitempty"`
	// Output is the output name
	Output string `json:"output,omitempty" yaml:"output,omitempty"`
	// Input is the input name
	Input string `json:"input,omitempty" yaml:"input,omitempty"`
}

// Control represents workflow control settings
type Control struct {
	// ErrorHandling contains error handling settings
	ErrorHandling *ErrorHandling `json:"error_handling,omitempty" yaml:"error_handling,omitempty"`
	// Scheduling contains scheduling settings
	Scheduling *Scheduling `json:"scheduling,omitempty" yaml:"scheduling,omitempty"`
	// LifecycleHooks contains lifecycle hook settings
	LifecycleHooks *LifecycleHooks `json:"lifecycle_hooks,omitempty" yaml:"lifecycle_hooks,omitempty"`
}

// ErrorHandling represents error handling settings
type ErrorHandling struct {
	// Default is the default error handling strategy
	Default *ErrorStrategy `json:"default" yaml:"default"`
	// Custom contains custom error handling strategies
	Custom []*CustomErrorStrategy `json:"custom,omitempty" yaml:"custom,omitempty"`
}

// ErrorStrategy represents an error handling strategy
type ErrorStrategy struct {
	// Strategy is the error handling strategy (retry, skip, fail)
	Strategy string `json:"strategy" yaml:"strategy"`
	// MaxRetries is the maximum number of retries
	MaxRetries int `json:"max_retries,omitempty" yaml:"max_retries,omitempty"`
	// Backoff is the retry backoff strategy (fixed, exponential)
	Backoff string `json:"backoff,omitempty" yaml:"backoff,omitempty"`
}

// CustomErrorStrategy represents a custom error handling strategy
type CustomErrorStrategy struct {
	// Condition is the error condition
	Condition string `json:"condition" yaml:"condition"`
	// Strategy is the error handling strategy (retry, skip, fail)
	Strategy string `json:"strategy" yaml:"strategy"`
}

// Scheduling represents scheduling settings
type Scheduling struct {
	// Trigger contains trigger settings
	Trigger *Trigger `json:"trigger" yaml:"trigger"`
}

// Trigger represents trigger settings
type Trigger struct {
	// Type is the trigger type (cron, event)
	Type string `json:"type" yaml:"type"`
	// Expression is the trigger expression
	Expression string `json:"expression" yaml:"expression"`
}

// LifecycleHooks represents lifecycle hook settings
type LifecycleHooks struct {
	// PreStart contains pre-start hooks
	PreStart []*Hook `json:"pre_start,omitempty" yaml:"pre_start,omitempty"`
}

// Hook represents a lifecycle hook
type Hook struct {
	// Name is the hook name
	Name string `json:"name" yaml:"name"`
	// Script is the hook script
	Script string `json:"script" yaml:"script"`
}

// Parser represents a DSL parser
type Parser struct {
	// variableResolver is the variable resolver
	variableResolver VariableResolver
}

// VariableResolver represents a variable resolver
type VariableResolver interface {
	// ResolveVariable resolves a variable reference
	ResolveVariable(reference string) (string, error)
}

// DefaultVariableResolver is the default variable resolver
type DefaultVariableResolver struct {
	// variables are the defined variables
	variables map[string]interface{}
}

// NewDefaultVariableResolver creates a new default variable resolver
func NewDefaultVariableResolver(variables map[string]interface{}) *DefaultVariableResolver {
	return &DefaultVariableResolver{
		variables: variables,
	}
}

// ResolveVariable resolves a variable reference
func (r *DefaultVariableResolver) ResolveVariable(reference string) (string, error) {
	// Check if the reference is an environment variable
	if strings.HasPrefix(reference, "${ENV:") && strings.HasSuffix(reference, "}") {
		envVar := strings.TrimPrefix(strings.TrimSuffix(reference, "}"), "${ENV:")
		value := os.Getenv(envVar)
		if value == "" {
			return "", fmt.Errorf("environment variable %s not found", envVar)
		}
		return value, nil
	}

	// Check if the reference is a variable
	if strings.HasPrefix(reference, "${vars.") && strings.HasSuffix(reference, "}") {
		varName := strings.TrimPrefix(strings.TrimSuffix(reference, "}"), "${vars.")
		if value, ok := r.variables[varName]; ok {
			return fmt.Sprintf("%v", value), nil
		}
		return "", fmt.Errorf("variable %s not found", varName)
	}

	// Return the reference as is
	return reference, nil
}

// NewParser creates a new DSL parser
func NewParser(variableResolver VariableResolver) *Parser {
	return &Parser{
		variableResolver: variableResolver,
	}
}

// ParseFile parses a DSL file
func (p *Parser) ParseFile(filePath string) (*DSLModel, error) {
	// Read the file
	data, err := ioutil.ReadFile(filePath)
	if err != nil {
		return nil, fmt.Errorf("failed to read file: %w", err)
	}

	// Parse the DSL based on the file extension
	ext := filepath.Ext(filePath)
	switch ext {
	case ".json":
		return p.ParseJSON(data)
	case ".yaml", ".yml":
		return p.ParseYAML(data)
	default:
		return nil, fmt.Errorf("unsupported file format: %s", ext)
	}
}

// ParseYAML parses a DSL from YAML
func (p *Parser) ParseYAML(data []byte) (*DSLModel, error) {
	// Parse the YAML
	var model DSLModel
	err := yaml.Unmarshal(data, &model)
	if err != nil {
		return nil, fmt.Errorf("failed to parse YAML: %w", err)
	}

	// Validate the model
	err = p.Validate(&model)
	if err != nil {
		return nil, fmt.Errorf("invalid DSL model: %w", err)
	}

	// Resolve variables
	err = p.ResolveVariables(&model)
	if err != nil {
		return nil, fmt.Errorf("failed to resolve variables: %w", err)
	}

	return &model, nil
}

// ParseJSON parses a DSL from JSON
func (p *Parser) ParseJSON(data []byte) (*DSLModel, error) {
	// Parse the JSON
	var model DSLModel
	err := json.Unmarshal(data, &model)
	if err != nil {
		return nil, fmt.Errorf("failed to parse JSON: %w", err)
	}

	// Validate the model
	err = p.Validate(&model)
	if err != nil {
		return nil, fmt.Errorf("invalid DSL model: %w", err)
	}

	// Resolve variables
	err = p.ResolveVariables(&model)
	if err != nil {
		return nil, fmt.Errorf("failed to resolve variables: %w", err)
	}

	return &model, nil
}

// Validate validates a DSL model
func (p *Parser) Validate(model *DSLModel) error {
	// Check required fields
	if model.Version == "" {
		return fmt.Errorf("version is required")
	}
	if model.Name == "" {
		return fmt.Errorf("name is required")
	}
	if model.Config == nil {
		return fmt.Errorf("config is required")
	}
	if model.Config.Mode == "" {
		return fmt.Errorf("config.mode is required")
	}
	if model.Config.Execution == nil {
		return fmt.Errorf("config.execution is required")
	}
	if model.Config.Execution.Engine == "" {
		return fmt.Errorf("config.execution.engine is required")
	}

	// Validate sources
	for name, source := range model.Sources {
		if source.Type == "" {
			return fmt.Errorf("source %s: type is required", name)
		}
		if source.Schema == "" {
			return fmt.Errorf("source %s: schema is required", name)
		}
		if _, ok := model.Schemas[source.Schema]; !ok {
			return fmt.Errorf("source %s: schema %s not found", name, source.Schema)
		}
	}

	// Validate sinks
	for name, sink := range model.Sinks {
		if sink.Type == "" {
			return fmt.Errorf("sink %s: type is required", name)
		}
		if sink.Schema == "" {
			return fmt.Errorf("sink %s: schema is required", name)
		}
		if _, ok := model.Schemas[sink.Schema]; !ok {
			return fmt.Errorf("sink %s: schema %s not found", name, sink.Schema)
		}
	}

	// Validate nodes
	for name, node := range model.Nodes {
		if node.Type == "" {
			return fmt.Errorf("node %s: type is required", name)
		}
		if len(node.Inputs) == 0 {
			return fmt.Errorf("node %s: at least one input is required", name)
		}
		if len(node.Outputs) == 0 {
			return fmt.Errorf("node %s: at least one output is required", name)
		}

		// Validate inputs
		for portName, port := range node.Inputs {
			if port.Schema == "" {
				return fmt.Errorf("node %s: input %s: schema is required", name, portName)
			}
			if _, ok := model.Schemas[port.Schema]; !ok {
				return fmt.Errorf("node %s: input %s: schema %s not found", name, portName, port.Schema)
			}
		}

		// Validate outputs
		for portName, port := range node.Outputs {
			if port.Schema == "" {
				return fmt.Errorf("node %s: output %s: schema is required", name, portName)
			}
			if _, ok := model.Schemas[port.Schema]; !ok {
				return fmt.Errorf("node %s: output %s: schema %s not found", name, portName, port.Schema)
			}
		}

		// Validate transform
		if node.Type == "transform" && node.Transform == nil {
			return fmt.Errorf("node %s: transform is required for transform node", name)
		}

		// Validate window
		if node.Type == "window" && node.Window == nil {
			return fmt.Errorf("node %s: window is required for window node", name)
		}

		// Validate agent
		if node.Type == "ai_agent" && node.Agent == nil {
			return fmt.Errorf("node %s: agent is required for ai_agent node", name)
		}
	}

	// Validate edges
	for i, edge := range model.Edges {
		if edge.From == nil {
			return fmt.Errorf("edge %d: from is required", i)
		}
		if edge.To == nil {
			return fmt.Errorf("edge %d: to is required", i)
		}

		// Validate from
		if edge.From.Source == "" && edge.From.Node == "" {
			return fmt.Errorf("edge %d: from.source or from.node is required", i)
		}
		if edge.From.Source != "" && edge.From.Node != "" {
			return fmt.Errorf("edge %d: from.source and from.node cannot both be specified", i)
		}
		if edge.From.Source != "" {
			if _, ok := model.Sources[edge.From.Source]; !ok {
				return fmt.Errorf("edge %d: from.source %s not found", i, edge.From.Source)
			}
		}
		if edge.From.Node != "" {
			if _, ok := model.Nodes[edge.From.Node]; !ok {
				return fmt.Errorf("edge %d: from.node %s not found", i, edge.From.Node)
			}
			if edge.From.Output == "" {
				edge.From.Output = "default" // Default output
			}
			if _, ok := model.Nodes[edge.From.Node].Outputs[edge.From.Output]; !ok {
				return fmt.Errorf("edge %d: from.node %s: output %s not found", i, edge.From.Node, edge.From.Output)
			}
		}

		// Validate to
		if edge.To.Node == "" && edge.To.Source == "" {
			return fmt.Errorf("edge %d: to.node or to.sink is required", i)
		}
		if edge.To.Node != "" && edge.To.Source != "" {
			return fmt.Errorf("edge %d: to.node and to.sink cannot both be specified", i)
		}
		if edge.To.Node != "" {
			if _, ok := model.Nodes[edge.To.Node]; !ok {
				return fmt.Errorf("edge %d: to.node %s not found", i, edge.To.Node)
			}
			if edge.To.Input == "" {
				edge.To.Input = "default" // Default input
			}
			if _, ok := model.Nodes[edge.To.Node].Inputs[edge.To.Input]; !ok {
				return fmt.Errorf("edge %d: to.node %s: input %s not found", i, edge.To.Node, edge.To.Input)
			}
		}
		if edge.To.Source != "" {
			if _, ok := model.Sinks[edge.To.Source]; !ok {
				return fmt.Errorf("edge %d: to.sink %s not found", i, edge.To.Source)
			}
		}
	}

	return nil
}

// ResolveVariables resolves variables in a DSL model
func (p *Parser) ResolveVariables(model *DSLModel) error {
	// Create a variable resolver if not provided
	if p.variableResolver == nil {
		p.variableResolver = NewDefaultVariableResolver(model.Variables)
	}

	// Resolve variables in sources
	for _, source := range model.Sources {
		for key, value := range source.Config {
			if strValue, ok := value.(string); ok {
				if strings.Contains(strValue, "${") {
					resolved, err := p.variableResolver.ResolveVariable(strValue)
					if err != nil {
						return fmt.Errorf("failed to resolve variable in source config: %w", err)
					}
					source.Config[key] = resolved
				}
			}
		}
	}

	// Resolve variables in sinks
	for _, sink := range model.Sinks {
		for key, value := range sink.Config {
			if strValue, ok := value.(string); ok {
				if strings.Contains(strValue, "${") {
					resolved, err := p.variableResolver.ResolveVariable(strValue)
					if err != nil {
						return fmt.Errorf("failed to resolve variable in sink config: %w", err)
					}
					sink.Config[key] = resolved
				}
			}
		}
	}

	return nil
}

// ConvertToWorkflow converts a DSL model to a workflow definition
func (p *Parser) ConvertToWorkflow(model *DSLModel) (*definition.Workflow, error) {
	// Create a new workflow
	workflow := definition.NewWorkflow(
		model.Name,
		model.Name,
		model.Version,
		definition.WorkflowTypeParallel, // Default to parallel
		getWorkflowMode(model.Config.Mode),
	)

	// Set description
	workflow.Metadata.Description = model.Description

	// Set workflow configuration
	if model.Config.Execution.Checkpointing != nil {
		workflow.Config.ErrorHandling = &definition.ErrorHandlingPolicy{
			Action: "retry",
			RetryPolicy: &definition.RetryPolicy{
				MaxRetries:    3, // Default retry count
				Interval:      1 * time.Second,
				BackoffFactor: 2.0,
				MaxInterval:   1 * time.Minute,
			},
		}
	}

	// TODO: Convert sources, sinks, nodes, and edges to workflow steps

	return workflow, nil
}

// getWorkflowMode converts a DSL mode to a workflow mode
func getWorkflowMode(mode string) definition.WorkflowMode {
	switch mode {
	case "stream":
		return definition.WorkflowModeStream
	case "batch":
		return definition.WorkflowModeBatch
	case "hybrid":
		// Fallback to stream mode if hybrid is not supported
		return definition.WorkflowModeStream
	default:
		return definition.WorkflowModeStream
	}
}
