const { randomUUID } = require('crypto');
const { Pool } = require('pg');
const pgvector = require('pgvector/node');
const { config } = require('../config');
const { logger } = require('../utils/logger');

class PostgresStore {
  constructor() {
    this.pool = null;
    this.ready = false;
  }

  async init() {
    if (this.ready) return true;
    if (!config.databaseUrl) {
      throw new Error('DATABASE_URL is required for Postgres backend');
    }
    this.pool = new Pool({ connectionString: config.databaseUrl });
    pgvector.registerTypeParser(this.pool);
    await this.ensureSchema();
    this.ready = true;
    return true;
  }

  async ensureSchema() {
    const client = await this.pool.connect();
    try {
      await client.query('CREATE EXTENSION IF NOT EXISTS vector');
      await client.query(`
        CREATE TABLE IF NOT EXISTS documents (
          id UUID PRIMARY KEY,
          title TEXT NOT NULL,
          content TEXT NOT NULL,
          metadata JSONB DEFAULT '{}'::jsonb,
          vector VECTOR(${config.vectorDimension}) NULL,
          created_at TIMESTAMPTZ DEFAULT NOW(),
          updated_at TIMESTAMPTZ DEFAULT NOW()
        )
      `);
      await client.query(`
        CREATE INDEX IF NOT EXISTS documents_metadata_gin_idx
        ON documents USING GIN ((metadata -> 'tags'))
      `);
      await client.query(`
        CREATE INDEX IF NOT EXISTS documents_vector_ivfflat_idx
        ON documents USING ivfflat (vector vector_cosine_ops)
        WITH (lists = 100)
      `);
    } catch (error) {
      logger.error({ err: error }, 'Failed to ensure Postgres schema');
      throw error;
    } finally {
      client.release();
    }
  }

  async create(doc) {
    return this.upsert(doc);
  }

  async update(id, updates) {
    const existing = await this.findById(id);
    if (!existing) return null;
    const payload = { ...existing, ...updates, id };
    return this.upsert(payload);
  }

  async upsert(doc) {
    const id = doc.id || randomUUID();
    const vectorSql = Array.isArray(doc.vector) ? pgvector.toSql(doc.vector) : null;
    const metadata = doc.metadata ? JSON.stringify(doc.metadata) : '{}';
    const result = await this.pool.query(
      `INSERT INTO documents (id, title, content, metadata, vector, created_at, updated_at)
       VALUES ($1, $2, $3, $4::jsonb, $5::vector, COALESCE($6, NOW()), NOW())
       ON CONFLICT (id)
       DO UPDATE SET
         title = EXCLUDED.title,
         content = EXCLUDED.content,
         metadata = EXCLUDED.metadata,
         vector = EXCLUDED.vector,
         updated_at = NOW()
       RETURNING *`,
      [
        id,
        doc.title,
        doc.content,
        metadata,
        vectorSql,
        doc.createdAt ? new Date(doc.createdAt) : null
      ]
    );
    return mapRow(result.rows[0]);
  }

  async findById(id) {
    const result = await this.pool.query('SELECT * FROM documents WHERE id = $1 LIMIT 1', [id]);
    return result.rows.length ? mapRow(result.rows[0]) : null;
  }

  async remove(id) {
    const result = await this.pool.query('DELETE FROM documents WHERE id = $1', [id]);
    return result.rowCount > 0;
  }

  async list({ limit = 50, offset = 0 } = {}) {
    const result = await this.pool.query(
      'SELECT * FROM documents ORDER BY created_at DESC LIMIT $1 OFFSET $2',
      [limit, offset]
    );
    return result.rows.map(mapRow);
  }

  async search({ query, tags = [], limit = 20 }) {
    const q = `%${query || ''}%`;
    const result = await this.pool.query(
      `SELECT * FROM documents
       WHERE ($1 = '%%' OR title ILIKE $1 OR content ILIKE $1)
         AND ($2::text[] IS NULL OR metadata -> 'tags' ?| $2)
       ORDER BY updated_at DESC
       LIMIT $3`,
      [q, tags.length ? tags : null, limit]
    );
    return result.rows.map(mapRow);
  }

  async updateVector(id, vector) {
    const vectorSql = Array.isArray(vector) ? pgvector.toSql(vector) : null;
    const result = await this.pool.query(
      'UPDATE documents SET vector = $2::vector, updated_at = NOW() WHERE id = $1 RETURNING *',
      [id, vectorSql]
    );
    return result.rows.length ? mapRow(result.rows[0]) : null;
  }

  async searchByVector(vector, limit = 20) {
    if (!Array.isArray(vector)) return [];
    const vectorSql = pgvector.toSql(vector);
    const result = await this.pool.query(
  `SELECT *, 1 - (vector <=> $1::vector) AS score
       FROM documents
       WHERE vector IS NOT NULL
   ORDER BY vector <=> $1::vector
       LIMIT $2`,
      [vectorSql, limit]
    );
    return result.rows.map(mapRowWithScore);
  }
}

function mapRow(row) {
  return {
    id: row.id,
    title: row.title,
    content: row.content,
    metadata: row.metadata || {},
    vector: Array.isArray(row.vector) ? row.vector : null,
    createdAt: row.created_at,
    updatedAt: row.updated_at
  };
}

function mapRowWithScore(row) {
  const mapped = mapRow(row);
  mapped.score = Number(row.score ?? 0);
  return mapped;
}

const postgresStore = new PostgresStore();

module.exports = { PostgresStore, postgresStore };
