package main

import (
	"context"
	"encoding/json"
	"fmt"
	"log"

	"github.com/louloulin/dataflare/pkg/workflow/connector"
	"github.com/louloulin/dataflare/pkg/workflow/processor"
)

func main() {
	// Create test data
	testData := []map[string]interface{}{
		{"id": 1, "value": 10},
		{"id": 2, "value": 20},
		{"id": 3, "value": 30},
	}

	// Get connector factory
	factory := connector.DefaultRegistry.GetFactory()

	// Create memory source connector
	fmt.Println("Creating memory source connector...")
	source, err := factory.CreateSourceConnector("memory")
	if err != nil {
		log.Fatalf("Failed to create memory source connector: %v", err)
	}

	// Initialize source connector with test data
	sourceData := make([]interface{}, len(testData))
	for i, data := range testData {
		sourceData[i] = data
	}

	err = source.Initialize(map[string]interface{}{
		"data": sourceData,
	})
	if err != nil {
		log.Fatalf("Failed to initialize memory source connector: %v", err)
	}

	// Create console sink connector
	fmt.Println("Creating console sink connector...")
	sink, err := factory.CreateSinkConnector("console")
	if err != nil {
		log.Fatalf("Failed to create console sink connector: %v", err)
	}

	// Initialize sink connector
	err = sink.Initialize(map[string]interface{}{
		"format": "json",
	})
	if err != nil {
		log.Fatalf("Failed to initialize console sink connector: %v", err)
	}

	// Create transform processor
	fmt.Println("Creating transform processor...")
	transformProcessor, err := processor.NewProcessor("transform")
	if err != nil {
		log.Fatalf("Failed to create transform processor: %v", err)
	}

	// Initialize processor with transform script
	err = transformProcessor.Init(map[string]interface{}{
		"function": `
			function process(record) {
				// Parse the record value
				const data = JSON.parse(record.value);

				// Double the value
				data.value = data.value * 2;

				// Add processed flag
				data.processed = true;
				data.timestamp = new Date().toISOString();

				// Update the record value
				record.value = JSON.stringify(data);

				return record;
			}
		`,
	})
	if err != nil {
		log.Fatalf("Failed to initialize transform processor: %v", err)
	}

	// Start connectors
	ctx := context.Background()
	fmt.Println("Starting connectors...")
	err = source.Start(ctx)
	if err != nil {
		log.Fatalf("Failed to start source connector: %v", err)
	}
	defer source.Stop(ctx)

	err = sink.Start(ctx)
	if err != nil {
		log.Fatalf("Failed to start sink connector: %v", err)
	}
	defer sink.Stop(ctx)

	// Process records
	fmt.Println("\nProcessing records:")
	recordCount := 0
	for {
		// Read batch
		batch, err := source.Read(ctx)
		if err != nil {
			log.Fatalf("Failed to read records: %v", err)
		}

		// Check if batch is empty (end of data)
		if batch == nil || len(batch.Records) == 0 {
			break
		}

		// Process records
		for i, record := range batch.Records {
			// Convert metadata to string map
			metadata := make(map[string]string)
			for k, v := range record.Metadata {
				if str, ok := v.(string); ok {
					metadata[k] = str
				}
			}

			// Convert record to JSON string
			recordStr := string(record.Value)

			// Process record
			processedValue, err := transformProcessor.Process([]byte(recordStr), metadata)
			if err != nil {
				log.Fatalf("Failed to process record: %v", err)
			}

			// Print original and processed record
			var originalData map[string]interface{}
			json.Unmarshal(record.Value, &originalData)

			var processedData map[string]interface{}
			json.Unmarshal(processedValue, &processedData)

			fmt.Printf("Record %d:\n", i+1)
			fmt.Printf("  Original: %v\n", originalData)
			fmt.Printf("  Processed: %v\n", processedData)

			// Update record in batch
			record.Value = processedValue
			batch.Records[i] = record
		}

		// Write batch
		err = sink.Write(ctx, batch)
		if err != nil {
			log.Fatalf("Failed to write records: %v", err)
		}

		recordCount += len(batch.Records)
	}

	// Flush sink
	err = sink.Flush(ctx)
	if err != nil {
		log.Fatalf("Failed to flush sink: %v", err)
	}

	fmt.Printf("\nProcessed %d records\n", recordCount)
}
