//nolint: all
//go:build ignore
// +build ignore

package main

import (
	"encoding/json"
	"fmt"
	"go/ast"
	"go/parser"
	"go/token"
	"os"
	"path/filepath"
	"reflect"
	"sort"
	"strings"

	"github.com/unkeyed/unkey/go/pkg/codes"
	"golang.org/x/text/cases"
	"golang.org/x/text/language"
)

// commentMap stores comments for types and fields
var commentMap = make(map[string]map[string]string)

func main() {
	// Extract comments from source files first
	if err := extractComments(); err != nil {
		fmt.Fprintf(os.Stderr, "Error extracting comments: %v\n", err)
	}

	// Output file
	outputPath := "constants_gen.go"
	f, err := os.Create(outputPath)
	if err != nil {
		fmt.Fprintf(os.Stderr, "Error creating output file: %v\n", err)
		os.Exit(1)
	}
	defer f.Close()

	// Write file header
	f.WriteString("// Code generated by generate.go; DO NOT EDIT.\n")
	f.WriteString("package codes\n\n")

	// Generate constants
	f.WriteString("// URN is a string type for error code constants\n")
	f.WriteString("type URN string\n\n")

	f.WriteString("// Error code constants for use in switch statements for exhaustive checking\n")
	f.WriteString("const (\n")

	// Track all error codes for MDX generation
	allErrorCodes := []ErrorCodeInfo{}

	// Process each top-level error domain using reflection
	allErrorCodes = append(allErrorCodes, processErrorDomain(f, "User", "User", reflect.ValueOf(codes.User))...)
	allErrorCodes = append(allErrorCodes, processErrorDomain(f, "Unkey", "Auth", reflect.ValueOf(codes.Auth))...)
	allErrorCodes = append(allErrorCodes, processErrorDomain(f, "Unkey", "Data", reflect.ValueOf(codes.Data))...)
	allErrorCodes = append(allErrorCodes, processErrorDomain(f, "Unkey", "App", reflect.ValueOf(codes.App))...)
	allErrorCodes = append(allErrorCodes, processErrorDomain(f, "Unkey", "Ingress", reflect.ValueOf(codes.Ingress))...)

	f.WriteString(")\n")

	fmt.Println("Generated error constants with documentation")

	// Generate missing MDX documentation files
	if err := generateMissingMDXFiles(allErrorCodes); err != nil {
		fmt.Fprintf(os.Stderr, "Error generating MDX files: %v\n", err)
		os.Exit(1)
	}

	// Remove obsolete MDX files that don't have corresponding error codes
	if err := removeObsoleteMDXFiles(allErrorCodes); err != nil {
		fmt.Fprintf(os.Stderr, "Error removing obsolete MDX files: %v\n", err)
		os.Exit(1)
	}

	// Update docs.json with all error files
	if err := updateDocsJSON(allErrorCodes); err != nil {
		fmt.Fprintf(os.Stderr, "Error updating docs.json: %v\n", err)
		os.Exit(1)
	}
}

// ErrorCodeInfo stores information about an error code for MDX generation
type ErrorCodeInfo struct {
	URN         string
	Name        string
	Description string
	Domain      string // "User", "Auth", "Data", "App", "Gateway"
}

// extractComments parses source files to get documentation comments
func extractComments() error {
	fset := token.NewFileSet()

	// Get all Go files in the current directory except generated files
	files, err := filepath.Glob("*.go")
	if err != nil {
		return err
	}

	for _, filename := range files {
		// Skip generated files
		if strings.HasSuffix(filename, "_gen.go") || filename == "generate.go" {
			continue
		}

		// Parse the file
		file, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
		if err != nil {
			return err
		}

		// Visit all declarations in the file
		ast.Inspect(file, func(n ast.Node) bool {
			// Look for type declarations
			typeSpec, ok := n.(*ast.TypeSpec)
			if !ok {
				return true
			}

			typeName := typeSpec.Name.Name

			// Check if it's a struct type
			structType, ok := typeSpec.Type.(*ast.StructType)
			if !ok {
				return true
			}

			// Create entry for this type
			if _, exists := commentMap[typeName]; !exists {
				commentMap[typeName] = make(map[string]string)
			}

			// Extract comments for struct fields
			for _, field := range structType.Fields.List {
				if field.Doc != nil {
					comment := field.Doc.Text()

					// Store comment for each field name
					for _, name := range field.Names {
						commentMap[typeName][name.Name] = comment
					}
				}
			}

			return true
		})
	}

	return nil
}

// processErrorDomain extracts error codes from a domain using reflection
func processErrorDomain(f *os.File, systemName string, domain string, domainValue reflect.Value) []ErrorCodeInfo {
	// Section header
	domainType := domainValue.Type()
	domainName := domainType.Name()
	f.WriteString("// ----------------\n")
	fmt.Fprintf(f, "// %s\n", domainName)
	f.WriteString("// ----------------\n")
	f.WriteString("\n")

	errorCodes := []ErrorCodeInfo{}

	// Iterate through categories (fields of the domain struct)
	for i := 0; i < domainValue.NumField(); i++ {
		categoryField := domainValue.Field(i)
		categoryName := domainType.Field(i).Name

		fmt.Fprintf(f, "// %s\n\n", categoryName)

		// Iterate through error codes in this category
		codes := processCategory(f, systemName, domainName, categoryName, domain, categoryField)
		errorCodes = append(errorCodes, codes...)

		f.WriteString("\n")
	}

	return errorCodes
}

// processCategory extracts error codes from a category using reflection
func processCategory(f *os.File, systemName, domainName, categoryName, domain string, categoryValue reflect.Value) []ErrorCodeInfo {
	// Iterate through error codes in this category
	categoryType := categoryValue.Type()
	errorCodes := []ErrorCodeInfo{}

	for j := 0; j < categoryValue.NumField(); j++ {
		codeField := categoryValue.Field(j)
		codeName := categoryType.Field(j).Name

		// Extract the actual Code object
		codeObj := codeField.Interface().(codes.Code)

		// Generate constant name (PascalCase)
		constName := fmt.Sprintf("%s%s%s", domainName, categoryName, codeName)

		// Get the string representation
		codeStr := codeObj.URN()

		// Extract description from comments
		description := ""
		if comments, ok := commentMap[categoryType.Name()]; ok {
			if comment, ok := comments[codeName]; ok {
				// Clean up the comment and add it to the output
				lines := strings.Split(strings.TrimSpace(comment), "\n")
				for _, line := range lines {
					line = strings.TrimSpace(line)
					if line != "" {
						f.WriteString(fmt.Sprintf("\t// %s\n", line))
						// Use first line as description
						if description == "" {
							description = line
						}
					}
				}
			}
		}

		// Write the constant
		f.WriteString(fmt.Sprintf("\t%s URN = \"%s\"\n", constName, codeStr))

		// Store error code info for MDX generation
		errorCodes = append(errorCodes, ErrorCodeInfo{
			URN:         string(codeObj.URN()),
			Name:        codeObj.Specific,
			Description: description,
			Domain:      domain,
		})
	}

	return errorCodes
}

// generateMissingMDXFiles creates MDX documentation files for error codes that don't have them
func generateMissingMDXFiles(errorCodes []ErrorCodeInfo) error {
	// Get the base docs directory path (relative to this file)
	baseDocsPath := filepath.Join("..", "..", "..", "apps", "docs", "errors")

	created := 0
	skipped := 0

	for _, errCode := range errorCodes {
		// Skip gateway and ingress errors (these are internal, not API errors)
		if errCode.Domain == "Gateway" || errCode.Domain == "Ingress" {
			skipped++
			continue
		}

		// Parse URN to get file path
		// Example: err:user:bad_request:client_closed_request -> user/bad_request/client_closed_request.mdx
		parts := strings.Split(errCode.URN, ":")
		if len(parts) < 4 || parts[0] != "err" {
			continue
		}

		// Build file path from URN parts (skip "err:" prefix)
		pathParts := parts[1 : len(parts)-1]
		fileName := parts[len(parts)-1] + ".mdx"
		filePath := filepath.Join(append([]string{baseDocsPath}, append(pathParts, fileName)...)...)

		// Check if file already exists
		if _, err := os.Stat(filePath); err == nil {
			skipped++
			continue
		}

		// Create directory if it doesn't exist
		dir := filepath.Dir(filePath)
		if err := os.MkdirAll(dir, 0755); err != nil {
			return fmt.Errorf("failed to create directory %s: %w", dir, err)
		}

		// Generate description
		description := errCode.Description
		if description == "" {
			description = "Error occurred"
		}

		// Create MDX file with basic template
		content := fmt.Sprintf(`---
title: "%s"
description: "%s"
---

<Danger>`+"`%s`"+`</Danger>

`, errCode.Name, description, errCode.URN)

		if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
			return fmt.Errorf("failed to write file %s: %w", filePath, err)
		}

		created++
		fmt.Printf("Created: %s\n", filePath)
	}

	fmt.Printf("\nMDX files: %d created, %d already existed\n", created, skipped)
	return nil
}

// removeObsoleteMDXFiles deletes MDX files that don't have corresponding error codes
func removeObsoleteMDXFiles(errorCodes []ErrorCodeInfo) error {
	baseDocsPath := filepath.Join("..", "..", "..", "apps", "docs", "errors")

	// Build a set of valid file paths from error codes
	validPaths := make(map[string]bool)
	for _, errCode := range errorCodes {
		// Skip gateway and ingress errors
		if errCode.Domain == "Gateway" || errCode.Domain == "Ingress" {
			continue
		}

		parts := strings.Split(errCode.URN, ":")
		if len(parts) < 4 || parts[0] != "err" {
			continue
		}

		// Build file path from URN
		pathParts := parts[1 : len(parts)-1]
		fileName := parts[len(parts)-1] + ".mdx"
		filePath := filepath.Join(append([]string{baseDocsPath}, append(pathParts, fileName)...)...)
		validPaths[filePath] = true
	}

	deleted := 0

	// Walk through the errors directory
	err := filepath.Walk(baseDocsPath, func(path string, info os.FileInfo, err error) error {
		if err != nil {
			return err
		}

		// Skip directories and non-MDX files
		if info.IsDir() || !strings.HasSuffix(path, ".mdx") {
			return nil
		}

		// Skip overview.mdx (it's not an error code file)
		if strings.HasSuffix(path, "overview.mdx") {
			return nil
		}

		// Check if this file has a corresponding error code
		if !validPaths[path] {
			// Delete obsolete file
			if err := os.Remove(path); err != nil {
				return fmt.Errorf("failed to delete %s: %w", path, err)
			}
			deleted++
			fmt.Printf("Deleted obsolete: %s\n", path)
		}

		return nil
	})

	if err != nil {
		return err
	}

	if deleted > 0 {
		fmt.Printf("\nRemoved %d obsolete MDX file(s)\n", deleted)
	}

	return nil
}

// updateDocsJSON updates the docs.json navigation to include all error pages
func updateDocsJSON(errorCodes []ErrorCodeInfo) error {
	docsJSONPath := filepath.Join("..", "..", "..", "apps", "docs", "docs.json")

	// Read existing docs.json
	data, err := os.ReadFile(docsJSONPath)
	if err != nil {
		return fmt.Errorf("failed to read docs.json: %w", err)
	}

	// Parse JSON into a map to preserve structure
	var docsConfig map[string]interface{}
	if err := json.Unmarshal(data, &docsConfig); err != nil {
		return fmt.Errorf("failed to parse docs.json: %w", err)
	}

	// Navigate to the errors section in navigation
	navigation, ok := docsConfig["navigation"].(map[string]interface{})
	if !ok {
		return fmt.Errorf("navigation not found in docs.json")
	}

	dropdowns, ok := navigation["dropdowns"].([]interface{})
	if !ok {
		return fmt.Errorf("dropdowns not found in navigation")
	}

	// Find the Documentation dropdown
	var docsDropdown map[string]interface{}
	for _, dropdown := range dropdowns {
		dd := dropdown.(map[string]interface{})
		if dd["dropdown"] == "Documentation" {
			docsDropdown = dd
			break
		}
	}

	if docsDropdown == nil {
		return fmt.Errorf("Documentation dropdown not found")
	}

	groups := docsDropdown["groups"].([]interface{})

	// Find the Errors group (should be one of the top-level groups)
	var errorsGroup map[string]interface{}
	var errorsGroupIndex int
	for i, group := range groups {
		g := group.(map[string]interface{})
		if g["group"] == "Errors" {
			errorsGroup = g
			errorsGroupIndex = i
			break
		}
	}

	if errorsGroup == nil {
		return fmt.Errorf("Errors group not found in groups")
	}

	// Organize error codes by category
	type ErrorCategory struct {
		Name  string
		Path  string
		Files []string
	}

	unkeyCategories := make(map[string]*ErrorCategory)
	userCategories := make(map[string]*ErrorCategory)

	for _, errCode := range errorCodes {
		// Skip gateway and ingress errors (these are internal, not API errors)
		if errCode.Domain == "Gateway" || errCode.Domain == "Ingress" {
			continue
		}

		parts := strings.Split(errCode.URN, ":")
		if len(parts) < 4 {
			continue
		}

		system := parts[1]   // "user" or "unkey"
		category := parts[2] // "bad_request", "application", etc.
		errorName := parts[len(parts)-1]

		// Build the path for this error
		pathParts := parts[1 : len(parts)-1]
		errorPath := "errors/" + strings.Join(append(pathParts, errorName), "/")

		if system == "unkey" {
			if _, exists := unkeyCategories[category]; !exists {
				// Convert category name to title case
				titleName := strings.ReplaceAll(category, "_", " ")
				caser := cases.Title(language.English)
				titleName = caser.String(titleName)
				unkeyCategories[category] = &ErrorCategory{
					Name:  titleName,
					Path:  category,
					Files: []string{},
				}
			}
			unkeyCategories[category].Files = append(unkeyCategories[category].Files, errorPath)
		} else if system == "user" {
			if _, exists := userCategories[category]; !exists {
				titleName := strings.ReplaceAll(category, "_", " ")
				caser := cases.Title(language.English)
				titleName = caser.String(titleName)
				userCategories[category] = &ErrorCategory{
					Name:  titleName,
					Path:  category,
					Files: []string{},
				}
			}
			userCategories[category].Files = append(userCategories[category].Files, errorPath)
		}
	}

	// Sort files within each category
	for _, cat := range unkeyCategories {
		sort.Strings(cat.Files)
	}
	for _, cat := range userCategories {
		sort.Strings(cat.Files)
	}

	// Build the new errors pages structure
	errorPages := []interface{}{
		"errors/overview",
	}

	// Add Unkey Errors section
	unkeyErrorsPages := []interface{}{}

	// Sort category keys for consistent output
	unkeyCategoryKeys := make([]string, 0, len(unkeyCategories))
	for k := range unkeyCategories {
		unkeyCategoryKeys = append(unkeyCategoryKeys, k)
	}
	sort.Strings(unkeyCategoryKeys)

	for _, catKey := range unkeyCategoryKeys {
		cat := unkeyCategories[catKey]
		catPages := make([]interface{}, len(cat.Files))
		for i, file := range cat.Files {
			catPages[i] = file
		}
		unkeyErrorsPages = append(unkeyErrorsPages, map[string]interface{}{
			"group": cat.Name,
			"pages": catPages,
		})
	}

	errorPages = append(errorPages, map[string]interface{}{
		"group": "Unkey Errors",
		"pages": unkeyErrorsPages,
	})

	// Add User Errors section
	userErrorsPages := []interface{}{}

	userCategoryKeys := make([]string, 0, len(userCategories))
	for k := range userCategories {
		userCategoryKeys = append(userCategoryKeys, k)
	}
	sort.Strings(userCategoryKeys)

	for _, catKey := range userCategoryKeys {
		cat := userCategories[catKey]
		catPages := make([]interface{}, len(cat.Files))
		for i, file := range cat.Files {
			catPages[i] = file
		}

		// If only one category, don't create a subgroup
		if len(userCategories) == 1 {
			userErrorsPages = catPages
		} else {
			userErrorsPages = append(userErrorsPages, map[string]interface{}{
				"group": cat.Name,
				"pages": catPages,
			})
		}
	}

	errorPages = append(errorPages, map[string]interface{}{
		"group": "User Errors",
		"pages": userErrorsPages,
	})

	// Update the errors group
	errorsGroup["pages"] = errorPages
	groups[errorsGroupIndex] = errorsGroup
	docsDropdown["groups"] = groups

	// Write back to docs.json with nice formatting
	updatedJSON, err := json.MarshalIndent(docsConfig, "", "  ")
	if err != nil {
		return fmt.Errorf("failed to marshal docs.json: %w", err)
	}

	if err := os.WriteFile(docsJSONPath, updatedJSON, 0644); err != nil {
		return fmt.Errorf("failed to write docs.json: %w", err)
	}

	fmt.Println("Updated docs.json with all error pages")
	return nil
}
