const { config } = require('../config');
const { logger } = require('../utils/logger');

function randomVector(dim = 1536) {
  return Array.from({ length: dim }, () => Math.random());
}

/**
 * Generate embedding for text based on configured provider.
 * Currently supports: mock (default), openai, azure, ollama.
 */
async function embedText(text) {
  if (!text) return null;

  const provider = config.embeddingProvider;
  const dimension = config.vectorDimension;

  try {
    switch (provider) {
      case 'openai':
        return await embedWithOpenAI(text, dimension);
      case 'azure':
        return await embedWithAzure(text, dimension);
      case 'ollama':
        return await embedWithOllama(text, dimension);
      case 'mock':
      default:
        return embedMock(text, dimension);
    }
  } catch (error) {
    logger.warn({ err: error, provider }, 'Embedding failed, falling back to mock');
    return embedMock(text, dimension);
  }
}

function embedMock(text, dimension = 1536) {
  // Deterministic mock embedding based on text length
  const seed = text.length;
  return Array.from({ length: dimension }, (_, i) => ((seed + i * 31) % 101) / 100);
}

async function embedWithOpenAI(text, dimension = 1536) {
  const apiKey = config.providers.openai.apiKey;
  if (!apiKey) {
    throw new Error('OPENAI_API_KEY not configured');
  }

  // Placeholder for OpenAI API call
  // const axios = require('axios');
  // const response = await axios.post('https://api.openai.com/v1/embeddings', {
  //   model: 'text-embedding-3-small',
  //   input: text
  // }, {
  //   headers: { 'Authorization': `Bearer ${apiKey}` }
  // });
  // return response.data.data[0].embedding;

  logger.info('OpenAI embedding not yet implemented, using mock');
  return embedMock(text, dimension);
}

async function embedWithAzure(text, dimension = 1536) {
  const { endpoint, apiKey, deployment } = config.providers.azure;
  if (!endpoint || !apiKey || !deployment) {
    throw new Error('Azure OpenAI configuration incomplete');
  }

  // Placeholder for Azure OpenAI API call
  logger.info('Azure OpenAI embedding not yet implemented, using mock');
  return embedMock(text, dimension);
}

async function embedWithOllama(text, dimension = 1536) {
  const baseUrl = config.providers.ollama.baseUrl;
  if (!baseUrl) {
    throw new Error('OLLAMA_BASE_URL not configured');
  }

  // Placeholder for Ollama API call
  // const axios = require('axios');
  // const response = await axios.post(`${baseUrl}/api/embeddings`, {
  //   model: 'nomic-embed-text',
  //   prompt: text
  // });
  // return response.data.embedding;

  logger.info('Ollama embedding not yet implemented, using mock');
  return embedMock(text, dimension);
}

function cosineSimilarity(vecA, vecB) {
  if (!vecA || !vecB || vecA.length !== vecB.length) return 0;
  let dot = 0;
  let normA = 0;
  let normB = 0;
  for (let i = 0; i < vecA.length; i++) {
    dot += vecA[i] * vecB[i];
    normA += vecA[i] * vecA[i];
    normB += vecB[i] * vecB[i];
  }
  if (normA === 0 || normB === 0) return 0;
  return dot / (Math.sqrt(normA) * Math.sqrt(normB));
}

async function similaritySearch(vector, limit = 20) {
  if (!vector) return [];
  const { getDocumentStore } = require('../store');
  const store = getDocumentStore();
  const docs = await store.list({ limit: Number.MAX_SAFE_INTEGER, offset: 0 });
  const scored = docs
    .filter((doc) => Array.isArray(doc.vector))
    .map((doc) => ({ doc, score: cosineSimilarity(vector, doc.vector) }))
    .sort((a, b) => b.score - a.score)
    .slice(0, limit);
  return scored.map(({ doc, score }) => ({ ...doc, score }));
}

module.exports = {
  embeddingService: {
    embedText,
    similaritySearch,
    randomVector
  }
};
