#[allow(unused_must_use)]
extern crate captrs;
extern crate reqwest;

use captrs::*;
use console::Emoji;
use serde::{Deserialize, Serialize};
use std::{
    time::{Duration, Instant},
};
use tokio::time::sleep;

#[derive(Clone, Serialize, Deserialize)]
struct Settings {
    api_endpoint: String,
    light_entity_names: Vec<String>,
    token: String,
    grab_interval: u64,
    skip_pixels: u64,
    smoothing_factor: f32,
    monitor_id: usize,
}

#[derive(Serialize, Deserialize)]
struct HASSApiBody {
    entity_id: String,
    rgb_color: [u64; 3],
    brightness: u64,
}

#[tokio::main(flavor = "multi_thread")]
async fn main() {
    let term = console::Term::stdout();
    term.set_title("HASS-Light-Sync running...");

    println!("{}hass-light-sync - Starting...", Emoji("💡 ", ""));
    println!("{}Reading config...", Emoji("⚙️ ", ""));

    // Read settings
    let settingsfile =
        std::fs::read_to_string("settings.json").expect("❌ settings.json file does not exist");

    let settings: Settings = serde_json::from_str(&settingsfile)
        .expect("❌ Failed to parse settings. Please read the configuration section in the README");

    println!("{}Config loaded successfully!", Emoji("✅ ", ""));

    let steps = settings.skip_pixels as usize;
    let grab_interval = settings.grab_interval;
    let smoothing_factor = settings.smoothing_factor;

    // Create a capture device
    let mut capturer = Capturer::new(settings.monitor_id)
        .expect("❌ Failed to get Capture Object");

    // Get the resolution of the monitor
    let (w, h) = capturer.geometry();
    let size = (w as u64 * h as u64) / settings.skip_pixels;

    // Create HTTP client
    let client = reqwest::Client::new();

    let (mut prev_r, mut prev_g, mut prev_b) = (0, 0, 0);

    println!();

    let mut last_timestamp = Instant::now();

    loop {
        // Allocate a vector array for the pixels of the display
        let ps: Vec<Bgr8>;

        // Try to grab a frame and fill it into the vector array, if successful, otherwise sleep for 100 ms and skip this frame.
        match capturer.capture_frame() {
            Ok(res) => ps = res,
            Err(error) => {
                println!("{} Failed to grab frame: {:?}", Emoji("❗ ", ""), error);
                sleep(Duration::from_millis(100)).await;
                continue;
            }
        }

        let (mut total_r, mut total_g, mut total_b) = (0, 0, 0);
        let mut count = 0;

        // For every nth pixel, add the RGB value
        for Bgr8 { r, g, b, .. } in ps.into_iter() {
            if count % steps == 0 {
                total_r += r as u64;
                total_g += g as u64;
                total_b += b as u64;
            }
            count += 1;
        }

        // Calculate average colors
        let (avg_r, avg_g, avg_b) = (total_r / size, total_g / size, total_b / size);

        // Smoothing
        let (sm_r, sm_g, sm_b) = (
            smoothing_factor * prev_r as f32 + (1.0 - smoothing_factor) * avg_r as f32,
            smoothing_factor * prev_g as f32 + (1.0 - smoothing_factor) * avg_g as f32,
            smoothing_factor * prev_b as f32 + (1.0 - smoothing_factor) * avg_b as f32,
        );

        // Store into prev
        prev_r = sm_r as u64;
        prev_g = sm_g as u64;
        prev_b = sm_b as u64;

        // Put into vector
        let avg_arr = vec![prev_r, prev_g, prev_b];

        // Get the highest RGB component value -> brightness
        let brightness = *avg_arr.iter().max().unwrap();

        let time_elapsed = last_timestamp.elapsed().as_millis();
        last_timestamp = Instant::now();

        let _ = term.move_cursor_up(1);
        let _ = term.clear_line();
    
        println!(
            "{}Current average color: {:?} - Brightness: {} - FPS: {}",
            Emoji("💡 ", ""),
            avg_arr,
            brightness,
            1000 / time_elapsed
        );

        // Send RGB data concurrently to all light entities
        send_rgb_concurrently(&client, &settings, &avg_arr, brightness, &settings.light_entity_names).await;

        sleep(Duration::from_millis(grab_interval)).await;
    }
}

async fn send_rgb_concurrently(
    client: &reqwest::Client,
    settings: &Settings,
    rgb_vec: &Vec<u64>,
    brightness: u64,
    light_entity_names: &Vec<String>,
) {
    let tasks = light_entity_names.iter().map(|lamp| {
        let client = client.clone();
        let settings = settings.clone();
        let rgb_vec = rgb_vec.clone();
        let brightness = brightness;
        let lamp = lamp.clone();

        tokio::spawn(async move {
            send_rgb(&client, &settings, &rgb_vec, brightness, &lamp).await;
        })
    }).collect::<Vec<_>>();

    for task in tasks {
        task.await.expect("Failed to send RGB data");
    }
}

async fn send_rgb(
    client: &reqwest::Client,
    settings: &Settings,
    rgb_vec: &Vec<u64>,
    brightness: u64,
    lamp: &String,
) {
    let api_body = HASSApiBody {
        entity_id: lamp.clone(),
        rgb_color: [rgb_vec[0], rgb_vec[1], rgb_vec[2]],
        brightness,
    };

    let response = client
        .post(format!(
            "{}/api/services/light/turn_on",
            settings.api_endpoint.as_str()
        ))
        .header("Authorization", format!("Bearer {}", settings.token).as_str())
        .json(&api_body)
        .send()
        .await;

    match response {
        Ok(res) => {
            if res.status() != 200 {
                println!(
                    "{}Connection to Home Assistant failed: HTTP {}",
                    Emoji("❌ ", ""),
                    res.status()
                );
                // return Err("Connection to Home Assistant failed".to_string());
            }
        }
        Err(e) => {
            println!(
                "{}Connection to Home Assistant failed: {}",
                Emoji("❌ ", ""),
                e
            );
            // return Err(format!("Connection to Home Assistant failed: {}", e));
        }
    }
}
