use base64::{engine::general_purpose, Engine as _};
use dashmap::DashMap;
use file_icon_provider::get_file_icon;
use image::{DynamicImage, ImageFormat, RgbaImage};
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use tauri::{AppHandle, Emitter};
use trash;

use std::{
    collections::HashMap,
    env,
    ffi::OsStr,
    io::Cursor,
    path::Path,
    sync::{
        atomic::{AtomicBool, Ordering},
        Arc, LazyLock,
    },
};

use tokio::{
    fs::{self, File},
    io::{self, AsyncReadExt},
    process::Command,
    sync::Semaphore,
    task::{self, JoinHandle},
};

// Indicates whether processing is canceled.
// static PROCESSING_CANCELED: LazyLock<Arc<Mutex<bool>>> =
//     LazyLock::new(|| Arc::new(Mutex::new(false)));

static PROCESSING_CANCELED: AtomicBool = AtomicBool::new(false);

// Stores found file type icon base64 string based on its extension.
// Then we don't have to create base64 everytime for same types.
static FILE_TYPE_ICON_BASE64_MAP: LazyLock<Arc<DashMap<String, String>>> =
    LazyLock::new(|| Arc::new(DashMap::new()));

const WEB_MAIN_WINDOW_NAME: &str = "main";
const EVENT_ANALYSIS_IN_PROGRESS: &str = "analysis-in-progress";
const MAX_CONCURRENCY: usize = 32;

#[derive(Clone, Serialize, Deserialize)]
#[serde_with::serde_as]
pub struct AnalysisParameter {
    #[serde(rename = "minSize")]
    min_size: Option<u64>, // It's in MB.

    #[serde(rename = "maxSize")]
    max_size: Option<u64>, // It's in MB.

    #[serde(rename = "includeFileTypes")]
    include_file_types: Option<Vec<String>>,

    #[serde(rename = "excludeFileTypes")]
    exclude_file_types: Option<Vec<String>>,

    paths: Vec<String>,
}

#[derive(Clone, Serialize)]
#[serde_with::serde_as]
pub struct FileInfo {
    #[serde(rename = "iconBase64")]
    icon_base64: String,

    #[serde(rename = "fileFullName")]
    file_full_name: String,

    filename: String,
    path: String,
    size: u64,
    md5: String,
}

#[derive(Default, Serialize)]
#[serde_with::serde_as]
pub struct DuplicationAnalysisResult {
    #[serde(rename = "duplicateFiles")]
    duplicate_files: HashMap<String, Vec<FileInfo>>,

    #[serde(rename = "totalFilesCount")]
    total_files_count: usize,
}

pub fn cancel_processing(canceled: bool) {
    PROCESSING_CANCELED.store(canceled, Ordering::Relaxed);
}

/// Opens given path and returns error message if failed.
pub async fn open_path(path: String) -> String {
    // Gets Path struct from the given path string.
    let path_struct = Path::new(&path);

    // Checks if the path exists.
    if path_struct.exists() {
        let os = env::consts::OS;

        // Executes the appropriate command to open the path in the user's system.
        let status = match os {
            "windows" => Command::new("explorer").arg(&path).status().await,
            "macos" => Command::new("open").arg(&path).status().await,
            "linux" => Command::new("xdg-open").arg(&path).status().await,
            _ => return format!("Unsupported OS: {}", os),
        };

        // Returns the result of the command execution.
        match status {
            Ok(_) => String::new(),
            Err(err) => format!("Error executing command: {}", err),
        }
    } else {
        format!("File not found: {}", path)
    }
}

/// Moves file to trash on given path and returns error message if failed.
pub async fn move_file_to_trash(file_path: String) -> String {
    let file_path_instance = Path::new(&file_path);

    if file_path_instance.exists() {
        match trash::delete(file_path_instance) {
            Ok(_) => String::new(),
            Err(err) => format!("Error moving file to trash bin: {}", err),
        }

        // Permanently deletes the file.
        // match fs::remove_file(file_path_instance).await {
        //     Ok(_) => String::new(),
        //     Err(err) => format!("Error deleting file: {}", err),
        // }
    } else {
        format!("File not found: {}", file_path)
    }
}

// Starts file duplication analysis.
pub async fn start_analysis(
    app: &AppHandle,
    param: AnalysisParameter,
) -> Result<DuplicationAnalysisResult, io::Error> {
    // Resets the canceling sign.
    PROCESSING_CANCELED.store(false, Ordering::Relaxed);

    // Reads all files from given path.
    let all_files = read_all_files(app, &param).await?;

    // Filters out duplicates from all files.
    let duplicate_files = filter_duplicates(&all_files);

    // Prepares the analysis result.
    let result = DuplicationAnalysisResult {
        total_files_count: all_files.len(),
        duplicate_files: duplicate_files,
    };

    Ok(result)
}

/// Reads all files from given path.
async fn read_all_files(
    app: &AppHandle,
    param: &AnalysisParameter,
) -> Result<Vec<FileInfo>, io::Error> {
    let mut path_entries: Vec<String> = param.paths.clone();
    let mut handles: Vec<JoinHandle<Result<FileInfo, io::Error>>> = vec![];
    let semaphore = Arc::new(Semaphore::new(MAX_CONCURRENCY));

    // We use iterrative approach instead of recrusion. Recrusion will cause stack overflow with deep tree traversal.
    while let Some(p) = path_entries.pop() {
        let path = Path::new(&p);

        // We only traverse directory.
        if path.is_dir() {
            // Gets all entries from this directory.
            let mut entries = fs::read_dir(path).await?;

            // Checks each entry.
            while let Some(entry) = entries.next_entry().await? {
                // If processing is canceled we need to break current execution and return.
                if PROCESSING_CANCELED.load(Ordering::Relaxed) == true {
                    return Ok(Vec::new());
                }

                // Reads metadata from entry.
                let entry_metadata = match entry.metadata().await {
                    Ok(e) => e,
                    Err(e) => {
                        eprintln!("Failed to read entry in: {}", e);
                        continue;
                    }
                };

                if entry_metadata.is_file() {
                    if let Ok(Some(handler)) =
                        get_read_file_info_handler(app, param, semaphore.clone(), &entry.path())
                            .await
                    {
                        // Pushes to concurrent task handlers.
                        handles.push(handler);
                    } else {
                        eprintln!("Failed to read entry in: {:?}", &entry.path().as_os_str());
                        continue;
                    }
                } else if entry_metadata.is_dir() {
                    // Adds directory path to the stack for further traversal.
                    path_entries.push(entry.path().to_string_lossy().to_string());
                }
            }
        } else if path.is_file() {
            if let Ok(Some(handler)) =
                get_read_file_info_handler(app, param, semaphore.clone(), path).await
            {
                // Pushes to concurrent task handlers.
                handles.push(handler);
            } else {
                eprintln!("Failed to read entry in: {:?}", p);
                continue;
            }
        }
    }

    // Gets all file info from all concurrent tasks.
    let mut file_info_vec: Vec<FileInfo> = Vec::new();

    for handle in handles {
        if let Ok(Ok(file_info)) = handle.await {
            // Adds file info to the result vector.
            file_info_vec.push(file_info);
        }
    }

    Ok(file_info_vec)
}

/// Tries to read file info and returns concurrent task handler.
async fn get_read_file_info_handler(
    app: &AppHandle,
    param: &AnalysisParameter,
    semaphore: Arc<Semaphore>,
    file_path: &Path,
) -> Result<Option<JoinHandle<Result<FileInfo, io::Error>>>, io::Error> {
    // Reads metadata from entry.
    let entry_metadata = fs::metadata(file_path).await?;

    let file_size_in_mb = convert_byte_to_mb(entry_metadata.len());

    if check_file_size_in_range(param, file_size_in_mb)
        && check_file_type(param, file_path.extension())
    {
        // Gets file full path which includes filename.
        let file_full_name = file_path.to_string_lossy().to_string();

        // Emits event to the main Tauri window to show progress.
        app.emit_to(
            WEB_MAIN_WINDOW_NAME,
            EVENT_ANALYSIS_IN_PROGRESS,
            &file_full_name,
        )
        .unwrap();

        // Introduces semaphore to limit concurrency. Which avoids overwhelming system resources.
        let permit = semaphore.clone().acquire_owned().await.unwrap();

        // let entry_path = file_path.clone();
        let filename = file_path.file_name().unwrap().to_string_lossy().to_string();
        let parent_path = file_path.parent().unwrap().to_string_lossy().to_string();

        // For moving file_path into task::spawn as closure parameter.
        let file_path = file_path.to_owned();

        // Makes file reading happens concurrently.
        let handle = task::spawn(async move {
            // Generates md5 hash for the file.
            let content_md5 = generate_file_md5(&file_path).await.unwrap();

            // Generates base64 icon for the file.
            //
            // Have to use spawn_blocking because file_icon_provider causes error on Linux running in tokio async context.
            // Error: GTK may only be used from the main thread
            let icon_base64 = tokio::task::spawn_blocking(move || get_file_icon_base64(&file_path))
                .await
                .unwrap_or_default();

            // Prepares file info.
            let file_info = FileInfo {
                filename: filename,
                path: parent_path,
                file_full_name,
                size: entry_metadata.len(),
                md5: content_md5,
                icon_base64,
            };

            drop(permit);

            // Instead od moving file_info_vec into spawn we just return file info from inside.
            Ok(file_info)
        });

        // Pushes to concurrent task handlers.
        return Ok(Some(handle));
    }

    Ok(None)
}

/// Checks if file size is in given range.
fn check_file_size_in_range(param: &AnalysisParameter, file_size_in_mb: u64) -> bool {
    (param.min_size.is_none() || param.min_size <= Some(file_size_in_mb))
        && (param.max_size.is_none() || param.max_size > Some(file_size_in_mb))
}

/// Checks if file type is included or excluded in given list.
fn check_file_type(param: &AnalysisParameter, file_type: Option<&OsStr>) -> bool {
    if let Some(file_type) = file_type {
        // Converts OsStr to String once for efficiency.
        let file_type_str = &file_type.to_string_lossy().to_string();

        let match_included_file_type = match &param.include_file_types {
            Some(include_file_types) => {
                include_file_types.len() == 0 || include_file_types.contains(file_type_str)
            }
            None => true,
        };

        let match_excluded_file_type = match &param.exclude_file_types {
            Some(exclude_file_types) => {
                exclude_file_types.len() == 0 || !exclude_file_types.contains(file_type_str)
            }
            None => true,
        };

        return match_included_file_type && match_excluded_file_type;
    }

    true
}

/// Generates md5 hash for the given file.
async fn generate_file_md5(file_path: &Path) -> Result<String, io::Error> {
    if let Ok(mut file) = File::open(file_path).await {
        let mut hasher = Sha256::new();

        // This is the chunk size during we reading a file full content chunk by chunk.
        let mut buffer = [0; 4096];

        // Reads full content from the file.
        loop {
            // If processing is canceled we need to break current execution and return.
            if PROCESSING_CANCELED.load(Ordering::Relaxed) == true {
                return Ok(String::new());
            }

            // Reads bytes from the file based on chunk size.
            let bytes_read = file.read(&mut buffer).await?;

            // Stops reading when reach the end of file.
            if bytes_read == 0 {
                break;
            }

            // Updates the hasher with the read bytes.
            hasher.update(&buffer[..bytes_read]);
        }

        // Gets the final hash result.
        let result = hasher.finalize();

        return Ok(format!("{:x}", result));
    }

    Ok(String::new())
}

/// Filters out all duplicates which share same md5 hash.
fn filter_duplicates(files: &Vec<FileInfo>) -> HashMap<String, Vec<FileInfo>> {
    let mut duplicates: HashMap<String, Vec<FileInfo>> = HashMap::new();

    // Creates HashMap to hold all files with md5 hash as keys.
    files.iter().for_each(|file| {
        duplicates
            .entry(file.md5.to_owned())
            .or_insert_with(Vec::new)
            .push(file.clone());
    });

    // Filters out files with more than one occurrence.
    duplicates
        .into_iter()
        .filter(|(_, files)| files.len() > 1)
        .collect()
}

fn convert_byte_to_mb(byte: u64) -> u64 {
    return byte / 1024 / 1024;
}

/// Gets file icon and returns as base64 encoded string and mime type.
fn get_file_icon_base64(file_path: &Path) -> String {
    // Gets file extension which is key for cached base64 encoded string.
    let extension = file_path.extension();

    // If file extension is found. Just just case files wihout extension.
    if let Some(extension) = extension {
        // Stores base64 string content into map by its extension as key.
        let mut map_key = extension.to_string_lossy().to_lowercase();

        // If this is executable file, we need to use full path as key.
        if extension.eq_ignore_ascii_case("exe") {
            map_key = file_path.to_string_lossy().to_lowercase();
        }

        if let Some(base64) = FILE_TYPE_ICON_BASE64_MAP.get(&map_key) {
            // If file extension is found in cached base64 encoded string, returns it.
            return base64.to_owned();
        } else {
            // Gets the file icon (size 32).
            let icon = get_file_icon(file_path, 32);

            if let Ok(icon) = icon {
                let image = RgbaImage::from_raw(icon.width, icon.height, icon.pixels)
                    .map(DynamicImage::ImageRgba8);

                if let Some(image) = image {
                    // Prepares png format bytes.
                    let mut png_bytes: Cursor<Vec<u8>> = Cursor::new(Vec::new());

                    // Tries to write the image to png format bytes.
                    if image.write_to(&mut png_bytes, ImageFormat::Png).is_ok() {
                        // Converts the png bytes to base64 encoded string.
                        let base64 = general_purpose::STANDARD.encode(&png_bytes.into_inner());

                        // Caches the base64 encoded string for the file extension.
                        FILE_TYPE_ICON_BASE64_MAP.insert(map_key, base64.clone());

                        return base64;
                    }
                }
            }
        }
    }

    String::new()
}
