package azure

import (
	"context"
	"encoding/json"
	"fmt"
	"github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai"
	"github.com/Azure/azure-sdk-for-go/sdk/azcore"
	"github.com/Azure/azure-sdk-for-go/sdk/azcore/to"
	"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
	"io"
	"os"
	"time"
)

func NewChat() *Chat {
	return &Chat{
		mode:     0,
		endpoint: "",
		apiKey:   "",

		model:       "chat-gpt",
		maxTokens:   2048,
		temperature: 0.0,

		modelTts:  "tts-1",
		formatTts: azopenai.SpeechGenerationResponseFormatMp3,
		voiceTts:  azopenai.SpeechVoiceEcho,

		modelAsr:  "",
		formatAsr: azopenai.AudioTranscriptionFormatText,

		modelImage:  "",
		formatImage: azopenai.ImageGenerationResponseFormatURL,
	}
}

type Chat struct {
	client *azopenai.Client

	mode     int
	endpoint string
	apiKey   string

	model           string
	modelEmbeddings string
	maxTokens       int32
	temperature     float32

	modelTts  string
	formatTts azopenai.SpeechGenerationResponseFormat
	voiceTts  azopenai.SpeechVoice

	modelAsr  string
	formatAsr azopenai.AudioTranscriptionFormat

	modelImage  string
	formatImage azopenai.ImageGenerationResponseFormat

	search         bool
	searchEndpoint string
	searchAPIKey   string
	searchIndex    string
}

func (r *Chat) Openai(apiKey string) *Chat {
	r.mode = 2
	r.endpoint = "https://api.openai.com/v1"
	r.apiKey = apiKey

	var err error
	r.client, err = azopenai.NewClientForOpenAI(r.endpoint, azcore.NewKeyCredential(r.apiKey), nil)
	if err != nil {
		panic(err)
	}

	return r
}
func (r *Chat) Azure(slug, apiKey string) *Chat {
	r.mode = 1
	r.endpoint = fmt.Sprintf("https://%s.openai.azure.com", slug)
	r.apiKey = apiKey

	var err error
	r.client, err = azopenai.NewClientWithKeyCredential(r.endpoint, azcore.NewKeyCredential(r.apiKey), nil)
	if err != nil {
		panic(err)
	}

	return r
}
func (r *Chat) AzureOrigin(slug string) *Chat {
	r.mode = 0
	r.endpoint = fmt.Sprintf("https://%s.openai.azure.com", slug)

	dac, err := azidentity.NewDefaultAzureCredential(nil)
	if err != nil {
		panic(err)
	}

	r.client, err = azopenai.NewClient(r.endpoint, dac, nil)
	if err != nil {
		panic(err)
	}

	return r
}
func (r *Chat) EnableSearch() *Chat {
	r.search = true

	return r
}

func (r *Chat) Completions(ctx context.Context, prompts []string) (azopenai.GetCompletionsResponse, error) {
	resp, err := r.client.GetCompletions(ctx, azopenai.CompletionsOptions{
		Prompt:         prompts,
		MaxTokens:      to.Ptr(r.maxTokens),
		Temperature:    to.Ptr(r.temperature),
		DeploymentName: to.Ptr(r.model),
	}, nil)
	if err != nil {
		return azopenai.GetCompletionsResponse{}, err
	}

	return resp, nil
}
func (r *Chat) CompletionsStream(ctx context.Context, prompts []string) (azopenai.GetCompletionsStreamResponse, error) {
	resp, err := r.client.GetCompletionsStream(ctx, azopenai.CompletionsOptions{
		Prompt:         prompts,
		MaxTokens:      to.Ptr(r.maxTokens),
		Temperature:    to.Ptr(r.temperature),
		DeploymentName: to.Ptr(r.model),
	}, nil)
	if err != nil {
		return azopenai.GetCompletionsStreamResponse{}, err
	}
	return resp, nil
}
func (r *Chat) ChatCompletions(ctx context.Context, messages []azopenai.ChatRequestMessageClassification) (azopenai.GetChatCompletionsResponse, error) {
	options := azopenai.ChatCompletionsOptions{
		Messages:       messages,
		DeploymentName: to.Ptr(r.model),
		MaxTokens:      to.Ptr(r.maxTokens),
	}
	if r.search {
		options.AzureExtensionsOptions = []azopenai.AzureChatExtensionConfigurationClassification{
			&azopenai.AzureSearchChatExtensionConfiguration{
				Parameters: &azopenai.AzureSearchChatExtensionParameters{
					Endpoint:  to.Ptr(r.searchEndpoint),
					IndexName: to.Ptr(r.searchIndex),
					Authentication: &azopenai.OnYourDataAPIKeyAuthenticationOptions{
						Key: to.Ptr(r.searchAPIKey),
					},
				},
			},
		}
	}

	return r.client.GetChatCompletions(ctx, options, nil)
}
func (r *Chat) ChatCompletionsStream(ctx context.Context, messages []azopenai.ChatRequestMessageClassification) (azopenai.GetChatCompletionsStreamResponse, error) {
	resp, err := r.client.GetChatCompletionsStream(ctx, azopenai.ChatCompletionsOptions{
		Messages:       messages,
		N:              to.Ptr[int32](1),
		DeploymentName: to.Ptr(r.model),
	}, nil)
	if err != nil {
		return azopenai.GetChatCompletionsStreamResponse{}, err
	}

	return resp, nil
}
func (r *Chat) Embeddings(ctx context.Context, prompts []string) (azopenai.GetEmbeddingsResponse, error) {
	resp, err := r.client.GetEmbeddings(ctx, azopenai.EmbeddingsOptions{
		Input:          prompts,
		DeploymentName: to.Ptr(r.modelEmbeddings),
	}, nil)
	if err != nil {
		return azopenai.GetEmbeddingsResponse{}, err
	}

	return resp, nil
}
func (r *Chat) ChatCompletions1(ctx context.Context, messages []azopenai.ChatRequestMessageClassification) (azopenai.GetChatCompletionsResponse, error) {
	jsonBytes, err := json.Marshal(map[string]any{
		"required": []string{"location"},
		"type":     "object",
		"properties": map[string]any{
			"location": map[string]any{
				"type":        "string",
				"description": "The city and state, e.g. San Francisco, CA",
			},
			"unit": map[string]any{
				"type": "string",
				"enum": []string{"celsius", "fahrenheit"},
			},
		},
	})
	if err != nil {
		panic(err)
	}

	funcDef := &azopenai.ChatCompletionsFunctionToolDefinitionFunction{
		Name:        to.Ptr("get_current_weather"),
		Description: to.Ptr("Get the current weather in a given location"),
		Parameters:  jsonBytes,
	}

	resp, err := r.client.GetChatCompletions(ctx, azopenai.ChatCompletionsOptions{
		DeploymentName: to.Ptr(r.model),
		Messages: []azopenai.ChatRequestMessageClassification{
			&azopenai.ChatRequestUserMessage{
				Content: azopenai.NewChatRequestUserMessageContent("What's the weather like in Boston, MA, in celsius?"),
			},
		},
		Tools: []azopenai.ChatCompletionsToolDefinitionClassification{
			&azopenai.ChatCompletionsFunctionToolDefinition{
				Function: funcDef,
			},
		},
		Temperature: to.Ptr[float32](0.0),
	}, nil)
	if err != nil {
		panic(err)
	}

	funcCall := resp.Choices[0].Message.ToolCalls[0].(*azopenai.ChatCompletionsFunctionToolCall).Function

	fmt.Fprintf(os.Stderr, "Function name: %q\n", *funcCall.Name)

	var funcParams *struct {
		Location string `json:"location"`
		Unit     string `json:"unit"`
	}
	err = json.Unmarshal([]byte(*funcCall.Arguments), &funcParams)
	if err != nil {
		panic(err)
	}

	return *funcParams
}
func (r *Chat) ChatCompletions2(ctx context.Context, messages []azopenai.ChatRequestMessageClassification) (azopenai.GetChatCompletionsResponse, error) {
	parametersJSON, err := json.Marshal(map[string]any{
		"required": []string{"location"},
		"type":     "object",
		"properties": map[string]any{
			"location": map[string]any{
				"type":        "string",
				"description": "The city and state, e.g. San Francisco, CA",
			},
			"unit": map[string]any{
				"type": "string",
				"enum": []string{"celsius", "fahrenheit"},
			},
		},
	})
	if err != nil {
		panic(err)
	}

	resp, err := r.client.GetChatCompletions(context.TODO(), azopenai.ChatCompletionsOptions{
		DeploymentName: to.Ptr(r.model),
		Messages: []azopenai.ChatRequestMessageClassification{
			&azopenai.ChatRequestUserMessage{
				Content: azopenai.NewChatRequestUserMessageContent("What's the weather like in Boston, MA, in celsius?"),
			},
		},
		FunctionCall: &azopenai.ChatCompletionsOptionsFunctionCall{
			Value: to.Ptr("auto"),
		},
		Functions: []azopenai.FunctionDefinition{
			{
				Name:        to.Ptr("get_current_weather"),
				Description: to.Ptr("Get the current weather in a given location"),

				Parameters: parametersJSON,
			},
		},
		Temperature: to.Ptr[float32](0.0),
	}, nil)
	if err != nil {
		panic(err)
	}

	funcCall := resp.Choices[0].Message.FunctionCall
	fmt.Fprintf(os.Stderr, "Function name: %q\n", *funcCall.Name)

	var funcParams *struct {
		Location string `json:"location"`
		Unit     string `json:"unit"`
	}
	err = json.Unmarshal([]byte(*funcCall.Arguments), &funcParams)
	if err != nil {
		panic(err)
	}

	return *funcParams
}
func (r *Chat) ChatCompletionsVisual(ctx context.Context, messages []azopenai.ChatRequestMessageClassification) (azopenai.GetChatCompletionsResponse, error) {
	imageURL := "https://www.bing.com/th?id=OHR.BradgateFallow_EN-US3932725763_1920x1080.jpg"

	content := azopenai.NewChatRequestUserMessageContent([]azopenai.ChatCompletionRequestMessageContentPartClassification{
		&azopenai.ChatCompletionRequestMessageContentPartText{
			Text: to.Ptr("Describe this image"),
		},
		&azopenai.ChatCompletionRequestMessageContentPartImage{
			ImageURL: &azopenai.ChatCompletionRequestMessageContentPartImageURL{
				URL: &imageURL,
			},
		},
	})

	ctx, cancel := context.WithTimeout(context.TODO(), time.Minute)
	defer cancel()

	resp, err := client.GetChatCompletions(ctx, azopenai.ChatCompletionsOptions{
		Messages: []azopenai.ChatRequestMessageClassification{
			&azopenai.ChatRequestUserMessage{
				Content: content,
			},
		},
		MaxTokens:      to.Ptr[int32](512),
		DeploymentName: to.Ptr(modelDeployment),
	}, nil)
	if err != nil {
		panic(err)
	}

	for _, choice := range resp.Choices {
		if choice.Message != nil && choice.Message.Content != nil {
			fmt.Fprintf(os.Stderr, "Result: %s\n", *choice.Message.Content)
		}
	}
}

func (r *Chat) AudioSynthesis(ctx context.Context, content string) (io.ReadCloser, error) {
	audioResp, err := r.client.GenerateSpeechFromText(ctx, azopenai.SpeechGenerationOptions{
		Input:          to.Ptr(content),
		Voice:          to.Ptr(r.voiceTts),
		ResponseFormat: to.Ptr(r.formatTts),
		DeploymentName: to.Ptr(r.modelTts),
	}, nil)
	if err != nil {
		return nil, err
	}

	return audioResp.Body, nil
}
func (r *Chat) AudioRecognition(ctx context.Context, file []byte) (string, error) {
	resp, err := r.client.GetAudioTranscription(ctx, azopenai.AudioTranscriptionOptions{
		File:           file,
		ResponseFormat: to.Ptr(r.formatAsr),
		DeploymentName: to.Ptr(r.modelAsr),
	}, nil)
	if err != nil {
		return "", err
	}

	return *resp.Text, nil
}
func (r *Chat) ImageGeneration(ctx context.Context, content string) ([]azopenai.ImageGenerationData, error) {
	resp, err := r.client.GetImageGenerations(ctx, azopenai.ImageGenerationOptions{
		Prompt:         to.Ptr(content),
		ResponseFormat: to.Ptr(r.formatImage),
		DeploymentName: to.Ptr(r.modelImage),
	}, nil)
	if err != nil {
		return nil, err
	}

	return resp.Data, nil
}
