use elasticsearch::{Elasticsearch, IndexParts};
use serde_json::json;
use crate::model::LogEntry;
use crate::config::Config;

pub struct ElasticStorage {
    client: Elasticsearch,
    index_prefix: String,
    buffer: Vec<LogEntry>,
}

impl ElasticStorage {
    pub fn new(config: &Config) -> Result<Self> {
        let client = Elasticsearch::new(
            elasticsearch::http::transport::Transport::single_node(
                &config.storage.elastic.url
            )?
            .auth(elasticsearch::auth::Credentials::Basic(
                config.storage.elastic.username.clone(),
                config.storage.elastic.password.clone(),
            ))
        );
        
        Ok(Self {
            client,
            index_prefix: config.storage.elastic.index_prefix.clone(),
            buffer: Vec::with_capacity(config.storage.buffer_size),
        })
    }
    
    async fn flush(&mut self) -> Result<()> {
        if self.buffer.is_empty() {
            return Ok(());
        }
        
        let index = format!("{}-{}", self.index_prefix, Utc::now().format("%Y.%m.%d"));
        let mut bulk = String::new();
        
        for log in &self.buffer {
            bulk.push_str(&json!({
                "index": {"_index": index}
            }).to_string());
            bulk.push('\n');
            bulk.push_str(&serde_json::to_string(log)?);
            bulk.push('\n');
        }
        
        self.client
            .bulk(elasticsearch::BulkParts::None)
            .body(bulk.as_bytes())
            .send()
            .await?;
        
        self.buffer.clear();
        Ok(())
    }
}

#[async_trait]
impl LogStorage for ElasticStorage {
    async fn store(&self, log: LogEntry) -> Result<()> {
        let mut storage = self.inner.lock().await;
        storage.buffer.push(log);
        
        if storage.buffer.len() >= storage.config.storage.buffer_size {
            storage.flush().await?;
        }
        
        Ok(())
    }
}