// Copyright (c) 2025 Shenzhen Kaihong Digital Industry Development Co., Ltd.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

use std::{collections::BTreeSet, io::Write, path::PathBuf, str::FromStr};

use anyhow::Context;
use clap::{Parser, Subcommand};
use log::info;

use crate::{build::BuildContext, commands::{resolve_stats, BuildGraphArgs}, database::{deploy_stat::DeployStats, local::{collect_installed_pkg_specs, find_installed_pkgs, PKGS_ROOT}, pkg_stat::{PkgStatFile, StatSpec}}, deploy::uninstall_pkgs, utils::{confirm, sha256_to_string}, FerriumCommand};

#[derive(Debug, Parser)]
pub struct CleanupCommand {
    /// Cleanup invalid files and directories
    #[arg(long, short, default_value_t = true)]
    invalid: bool,
    #[command(subcommand)]
    command: Option<CleanupSubcommand>,
}

#[derive(Debug, Subcommand)]
enum CleanupSubcommand {
    /// Cleanup undeployed pkgs
    Undeployed,
    /// Cleanup all outdated pkgs recogized by the dependency graph
    Outdated {
        #[command(flatten)]
        build_graph_args: BuildGraphArgs,
    },
}

impl FerriumCommand for CleanupCommand {
    async fn run(self, _multi_progress: indicatif::MultiProgress) -> anyhow::Result<()> {
        if self.invalid {
            let mut invalid_files = Vec::new();
            let mut invalid_stats = Vec::new();
            let mut invalid_dirs = Vec::new();
            let mut valid_dirs = BTreeSet::new();

            let mut read_dir = tokio::fs::read_dir(PKGS_ROOT.as_path()).await
               .context(format!("failed to read '{}'", PKGS_ROOT.display()))?;
            while let Some(entry) = read_dir.next_entry().await? {
                let ty = entry.file_type().await?;
                if ty.is_file() {
                    let file_name = entry.file_name();
                    let file_name = file_name.to_string_lossy();
                    if file_name.ends_with(".stat") {
                        if StatSpec::from_str(file_name.strip_suffix(".stat").unwrap()).is_err() {
                            invalid_files.push(entry.path());
                        } else if PkgStatFile::open(entry.path()).await.is_err() {
                            invalid_stats.push(entry.path());
                        } else {
                            valid_dirs.insert(PKGS_ROOT.join(file_name.strip_suffix(".stat").unwrap()));
                        }
                    } else {
                        invalid_files.push(entry.path());
                    }
                } else if !ty.is_dir() {
                    invalid_files.push(entry.path());
                }
            }

            let mut read_dir = tokio::fs::read_dir(PKGS_ROOT.as_path()).await
               .context(format!("failed to read '{}'", PKGS_ROOT.display()))?;
            while let Some(entry) = read_dir.next_entry().await? {
                let ty = entry.file_type().await?;
                if ty.is_dir() {
                    if !valid_dirs.contains(&entry.path()) {
                        invalid_dirs.push(entry.path());
                    }
                }
            }

            if !invalid_files.is_empty() {
                let mut dump = Vec::new();
                if dump_invalid_files(invalid_files.as_slice(), &mut dump).is_ok() {
                    info!("{}", String::from_utf8_lossy(&dump));
                }
            }

            if !invalid_stats.is_empty() {
                let mut dump = Vec::new();
                if dump_invalid_stats(invalid_stats.as_slice(), &mut dump).is_ok() {
                    info!("{}", String::from_utf8_lossy(&dump));
                }
            }

            if !invalid_dirs.is_empty() {
                let mut dump = Vec::new();
                if dump_invalid_dirs(invalid_dirs.as_slice(), &mut dump).is_ok() {
                    info!("{}", String::from_utf8_lossy(&dump));
                }
            }

            if (!invalid_files.is_empty() || !invalid_stats.is_empty() || !invalid_dirs.is_empty())
                && confirm("Do you want to continue?").context("failed to read input")? {
                for invalid_file in invalid_files {
                    tokio::fs::remove_file(&invalid_file).await
                       .context(format!("failed to remove '{}'", invalid_file.display()))?;
                }
                for invalid_stat in invalid_stats {
                    tokio::fs::remove_file(&invalid_stat).await
                       .context(format!("failed to remove '{}'", invalid_stat.display()))?;
                }
                for invalid_dir in invalid_dirs {
                    tokio::fs::remove_dir_all(&invalid_dir).await
                       .context(format!("failed to remove '{}'", invalid_dir.display()))?;
                }
            }
        }

        if let Some(command) = self.command {
            match command {
                CleanupSubcommand::Undeployed => {
                    let mut to_cleanup_pkgs = collect_installed_pkg_specs().await
                        .context(format!("failed to read '{}'", PKGS_ROOT.display()))?;
                    let deployed_pkgs = DeployStats::collect_deployed_pkg_specs().await?;
                    to_cleanup_pkgs.retain(|pkg| !deployed_pkgs.contains(pkg));

                    let stat_dep_graph = resolve_stats(
                        to_cleanup_pkgs.iter().map(|s| s.as_str())
                    ).await?;

                    uninstall_pkgs(&stat_dep_graph, true, false).await?;
                },
                CleanupSubcommand::Outdated { build_graph_args } => {
                    let (mut dep_graph, toolchain_name, _) = build_graph_args.build_graph().await?;
                    let sorted_ids = dep_graph.verify(true)
                        .context("failed to resolve specs")?;
                    let hashes = BuildContext::spec_hashes(&dep_graph, &sorted_ids).await?
                        .into_iter()
                        .map(|hash| sha256_to_string(&hash))
                        .collect::<BTreeSet<_>>();
                    
                    let mut outdated = Vec::new();
                    let mut find_stat = find_installed_pkgs().await
                        .context(format!("failed to read '{}'", PKGS_ROOT.display()))?;
                    while let Some((file_name, stat_spec)) = find_stat.next().await? {
                        if hashes.contains(&sha256_to_string(&stat_spec.sha256)) {
                            continue;
                        }
                        let path = PKGS_ROOT.join(&file_name);
                        let mut stat_file = PkgStatFile::open(&path).await
                            .context(format!("failed to read '{}'", path.display()))?;
                        let stat_toolchain_name = stat_file.read_toolchain_name().await
                            .context(format!("failed to read toolchain name from '{}'", path.display()))?;
                        if toolchain_name == stat_toolchain_name {
                            outdated.push(file_name);
                        }
                    }

                    if !outdated.is_empty() {
                        let mut dump = Vec::new();
                        if dump_outdated_pkgs(outdated.as_slice(), &mut dump).is_ok() {
                            info!("{}", String::from_utf8_lossy(&dump));
                        }
                    }

                    if !outdated.is_empty() && confirm("Do you want to continue?").context("failed to read input")? {
                        for outdated_pkg in outdated {
                            let stat_path = PKGS_ROOT.join(&outdated_pkg);
                            let dist_path = PKGS_ROOT.join(outdated_pkg.strip_suffix(".stat").unwrap());
                            tokio::fs::remove_file(&stat_path).await
                                .context(format!("failed to remove '{}'", stat_path.display()))?;
                            tokio::fs::remove_dir_all(&dist_path).await
                                .context(format!("failed to remove '{}'", dist_path.display()))?;
                        }
                    }
                },
            }
        }

        Ok(())
    }
}

fn dump_invalid_files<W: Write>(invalid_files: &[PathBuf], mut w: W) -> std::io::Result<()> {
    writeln!(w, "Invalid files:")?;
    for invalid_file in invalid_files {
        writeln!(w, "  - {}", invalid_file.display())?;
    }
    Ok(())
}

fn dump_invalid_stats<W: Write>(invalid_stats: &[PathBuf], mut w: W) -> std::io::Result<()> {
    writeln!(w, "Invalid stats:")?;
    for invalid_stat in invalid_stats {
        writeln!(w, "  - {}", invalid_stat.display())?;
    }
    Ok(())
}

fn dump_invalid_dirs<W: Write>(invalid_dirs: &[PathBuf], mut w: W) -> std::io::Result<()> {
    writeln!(w, "Invalid dirs:")?;
    for invalid_dir in invalid_dirs {
        writeln!(w, "  - {}", invalid_dir.display())?;
    }
    Ok(())
}

fn dump_outdated_pkgs<W: Write>(outdated: &[String], mut w: W) -> std::io::Result<()> {
    writeln!(w, "Outdated pkgs:")?;
    for outdated_pkg in outdated {
        writeln!(w, "  - {}", outdated_pkg.strip_suffix(".stat").unwrap())?;
    }
    Ok(())
}
