use crate::log_record::KafkaLogRow;
use axum::{
    extract::Query,
    extract::State,
    http::{HeaderMap, StatusCode},
    response::Json,
};
use bytes::Bytes;
use limiters::token_dropper::TokenDropper;
use opentelemetry_proto::tonic::collector::logs::v1::ExportLogsServiceRequest;
use prost::Message;
use serde::Deserialize;
use serde_json::json;
use std::sync::Arc;

use crate::kafka::KafkaSink;

use tracing::{debug, error, instrument};

#[derive(Clone)]
pub struct Service {
    sink: KafkaSink,
    token_dropper: Arc<TokenDropper>,
}

#[derive(Deserialize)]
pub struct QueryParams {
    token: Option<String>,
}

impl Service {
    pub async fn new(
        kafka_sink: KafkaSink,
        token_dropper: TokenDropper,
    ) -> Result<Self, anyhow::Error> {
        Ok(Self {
            sink: kafka_sink,
            token_dropper: token_dropper.into(),
        })
    }
}

#[instrument(skip_all, fields(
    token = tracing::field::Empty,
    content_type = %headers.get("content-type")
        .and_then(|v| v.to_str().ok())
        .unwrap_or(""),
    user_agent = %headers.get("user-agent")
        .and_then(|v| v.to_str().ok())
        .unwrap_or(""),
    content_length = %headers.get("content-length")
        .and_then(|v| v.to_str().ok())
        .unwrap_or(""),
    content_encoding = %headers.get("content-encoding")
        .and_then(|v| v.to_str().ok())
        .unwrap_or("")))
]
pub async fn export_logs_http(
    State(service): State<Service>,
    Query(query_params): Query<QueryParams>,
    headers: HeaderMap,
    body: Bytes,
) -> Result<Json<serde_json::Value>, (StatusCode, Json<serde_json::Value>)> {
    // The Project API key must be passed in as a Bearer token in the Authorization header
    if !headers.contains_key("Authorization") && query_params.token.is_none() {
        error!("No token provided");
        return Err((
            StatusCode::UNAUTHORIZED,
            Json(json!({"error": format!("No token provided")})),
        ));
    }

    let token = if headers.contains_key("Authorization") {
        match headers["Authorization"]
            .to_str()
            .unwrap_or("")
            .split("Bearer ")
            .last()
        {
            Some(token) if !token.is_empty() => token,
            _ => {
                error!("No token provided");
                return Err((
                    StatusCode::UNAUTHORIZED,
                    Json(json!({"error": format!("No token provided")})),
                ));
            }
        }
    } else {
        match query_params.token {
            Some(ref token) if !token.is_empty() => token,
            _ => {
                error!("No token provided");
                return Err((
                    StatusCode::UNAUTHORIZED,
                    Json(json!({"error": format!("No token provided")})),
                ));
            }
        }
    };
    if service.token_dropper.should_drop(token, "") {
        return Err((
            StatusCode::UNAUTHORIZED,
            Json(json!({"error": format!("Invalid token")})),
        ));
    }

    tracing::Span::current().record("token", token);

    // Try to decode as Protobuf, if this fails, try JSON.
    // We do this over relying on Content-Type headers to be as permissive as possible in what we accept.
    let export_request = match ExportLogsServiceRequest::decode(body.as_ref()) {
        Ok(request) => request,
        Err(proto_err) => match serde_json::from_slice(&body) {
            Ok(request) => request,
            Err(json_err) => {
                error!(
                    "Failed to decode JSON: {} or Protobuf: {}",
                    json_err, proto_err
                );
                return Err((
                    StatusCode::BAD_REQUEST,
                    Json(
                        json!({"error": format!("Failed to decode JSON: {} or Protobuf: {}", json_err, proto_err)}),
                    ),
                ));
            }
        },
    };

    let mut rows: Vec<KafkaLogRow> = Vec::new();
    for resource_logs in export_request.resource_logs {
        for scope_logs in resource_logs.scope_logs {
            for log_record in scope_logs.log_records {
                let row = match KafkaLogRow::new(
                    log_record,
                    resource_logs.resource.clone(),
                    scope_logs.scope.clone(),
                ) {
                    Ok(row) => row,
                    Err(e) => {
                        error!("Failed to create LogRow: {e}");
                        return Err((
                            StatusCode::BAD_REQUEST,
                            Json(json!({"error": format!("Bad input format provided")})),
                        ));
                    }
                };
                rows.push(row);
            }
        }
    }

    let row_count = rows.len();
    if let Err(e) = service.sink.write(token, rows, body.len() as u64).await {
        error!("Failed to send logs to Kafka: {}", e);
        return Err((
            StatusCode::INTERNAL_SERVER_ERROR,
            Json(json!({"error": format!("Internal server error")})),
        ));
    } else {
        debug!("Successfully sent {} logs to Kafka", row_count);
    }

    // Return empty JSON object per OTLP spec
    Ok(Json(json!({})))
}
