use std::collections::BTreeMap;
use std::io::ErrorKind;
use std::path::PathBuf;
use std::{fmt, str};

use entity::unsync_crates;
use futures::{stream, StreamExt};
use serde::{Deserialize, Serialize};
use tokio::fs::File;
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio_stream::wrappers::ReceiverStream;
use url::Url;
use walkdir::{DirEntry, WalkDir};

use crate::cloud::s3::S3cmd;
use crate::cloud::{self, CloudStorage};
use crate::config::{CratesConfig, ProxyConfig};
use crate::database::context::Context;
use crate::download::{download_and_check_hash, DownloadOptions};
use crate::errors::FreightResult;

use super::index::CrateIndex;
use super::{utils, DownloadMode};

/// CratesOptions preserve the sync subcommand config
#[derive(Clone, Default)]
pub struct CratesOptions {
    pub crates: CratesConfig,

    pub proxy: ProxyConfig,

    pub index: CrateIndex,

    /// Whether to hide progressbar when start sync.
    pub no_progressbar: bool,

    /// start traverse all directories
    pub download_mode: DownloadMode,

    pub upload: bool,

    pub crates_path: PathBuf,

    // handle a single crate with name
    pub crates_name: Option<String>,

    pub log_path: PathBuf,

    pub bucket_name: String,

    pub delete_after_upload: bool,

    // skip save err record to database, return err to caller, set ture in fix mode
    pub fix_mode: bool,
}

impl CratesOptions {
    // the path rules of crates index file
    pub fn get_index_path(&self, name: &str) -> PathBuf {
        let suffix = utils::index_suffix(name);
        self.index.path.join(suffix)
    }
}

impl fmt::Debug for CratesOptions {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        f.debug_struct("CratesOptions")
            .field("crates", &self.crates)
            .field("proxy", &self.proxy)
            .field("index", &self.index)
            .field("no_progressbar", &self.no_progressbar)
            .field("download_mode", &self.download_mode)
            .field("upload", &self.upload)
            .field("crates_path", &self.crates_path)
            .field("crates_name", &self.crates_name)
            .field("log_path", &self.log_path)
            .field("bucket_name", &self.bucket_name)
            .field("delete_after_upload", &self.delete_after_upload)
            .field("fix_mode", &self.fix_mode)
            .field("notifier", &"skipped")
            .finish()
    }
}

/// Crate preserve the crates info parse from registry json file
#[derive(Serialize, Deserialize, Debug)]
pub struct IndexFile {
    pub name: String,
    pub vers: String,
    pub deps: Vec<Dependency>,
    #[serde(skip_serializing_if = "Option::is_none")]
    pub cksum: Option<String>,
    pub features: BTreeMap<String, Vec<String>>,
    #[serde(skip_serializing_if = "Option::is_none")]
    pub features2: Option<BTreeMap<String, Vec<String>>>,
    pub yanked: Option<bool>,
    #[serde(default)]
    pub links: Option<String>,
    #[serde(skip_serializing_if = "Option::is_none")]
    pub v: Option<u32>,
}

/// Dependencies maintain relationships between crate
///
///
#[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Ord, Eq)]
pub struct Dependency {
    pub name: String,
    pub req: String,
    pub features: Vec<String>,
    pub optional: bool,
    pub default_features: bool,
    pub target: Option<String>,
    pub kind: Option<DependencyKind>,
    #[serde(skip_serializing_if = "Option::is_none")]
    pub package: Option<String>,
}

/// DependencyKind represents which stage the current dependency is
///
///
#[derive(Copy, Clone, Serialize, Deserialize, Debug, PartialEq, PartialOrd, Ord, Eq)]
#[serde(rename_all = "lowercase")]
pub enum DependencyKind {
    Normal,
    Build,
    Dev,
}

/// full download and Incremental download from registry
pub async fn download_by_mode(context: Context) -> FreightResult {
    match context.crates_options.download_mode {
        DownloadMode::Init => full_downloads(context).await.unwrap(),
        DownloadMode::Fix => fix_download(context).await.unwrap(),
        DownloadMode::Increment => incremental_download(context).await.unwrap(),
    }
    Ok(())
}

/// <https://github.com/rust-lang/crates.io-index/blob/master/.github/workflows/update-dl-url.yml>
///
/// ```YAML
///env:
///   URL_api: "https://crates.io/api/v1/crates"
///   URL_cdn: "https://static.crates.io/crates/{crate}/{crate}-{version}.crate"
///   URL_s3_primary: "https://crates-io.s3-us-west-1.amazonaws.com/crates/{crate}/{crate}-{version}.crate"
///   URL_s3_fallback: "https://crates-io-fallback.s3-eu-west-1.amazonaws.com/crates/{crate}/{crate}-{version}.crate"
/// ```
pub async fn full_downloads(context: Context) -> FreightResult {
    let (tx, rx) = tokio::sync::mpsc::channel(100000);
    let iter_path = context.crates_options.index.path.clone();
    let walkdir_handle = tokio::spawn(async move {
        tokio::task::spawn_blocking(move || {
            WalkDir::new(iter_path)
                .into_iter()
                .filter_entry(is_not_hidden)
                .filter_map(|v| v.ok())
                .filter(|x| x.file_type().is_file() && x.path().extension().is_none())
                .for_each(|x| {
                    let path = x.path().to_owned();
                    tx.blocking_send(path).unwrap()
                })
        })
        .await
        .unwrap();
    });
    let receiver_stream = ReceiverStream::new(rx);
    let receiver_handle = tokio::spawn(async move {
        let stream = receiver_stream.map(|path| {
            let context = context.clone();
            async move {
                parse_index_and_download(context, path).await.unwrap();
            }
        });
        // The empty async {} block is used because the actual processing is already done in the map
        stream.buffer_unordered(32).for_each(|_| async {}).await;
    });
    walkdir_handle.await.unwrap();
    receiver_handle.await.unwrap();

    Ok(())
}

pub async fn incremental_download(context: Context) -> FreightResult {
    let unsync_records = context.db_service.get_all_unsync_crates().await.unwrap();
    if !unsync_records.is_empty() {
        stream::iter(unsync_records)
            .for_each_concurrent(16, |record| {
                let context = context.clone();
                async move {
                    let unsync_crates::Model {
                        id: _,
                        crate_name,
                        create_time: _,
                    } = record;
                    let index_path = context.crates_options.get_index_path(&crate_name);

                    match parse_index_and_download(context.clone(), index_path).await {
                        Ok(_) => {
                            context
                                .db_service
                                .delete_unsync_record(&crate_name)
                                .await
                                .unwrap();
                            tracing::info!("handle success, delete record: {}", &crate_name);
                        }
                        Err(err) => tracing::error!("Error processing {}: {:?}", crate_name, err),
                    }
                }
            })
            .await;
    }
    Ok(())
}

/// fix the previous error download crates
pub async fn fix_download(mut context: Context) -> FreightResult {
    context.crates_options.fix_mode = true;

    let opts = context.crates_options.clone();

    if opts.crates_name.is_some() {
        let index_path = opts.get_index_path(&opts.crates_name.clone().unwrap());
        parse_index_and_download(context, index_path).await.unwrap();
    } else {
        panic!("Provide a crate name to fix with arg --name")
    }

    Ok(())
}

pub fn upload_to_s3(context: Context) -> FreightResult {
    let s3cmd = S3cmd::default();
    let opts = context.crates_options.clone();
    if opts.crates_name.is_none() {
        cloud::upload_with_pool(opts.crates_path.clone(), opts.bucket_name.clone(), s3cmd).unwrap();
    } else {
        cloud::upload_single_dir(
            opts.crates_path.clone(),
            opts.crates_name.clone().unwrap(),
            opts.bucket_name.clone(),
            s3cmd,
        )
    }
    Ok(())
}

/// Check whether the directory is hidden
pub fn is_not_hidden(entry: &DirEntry) -> bool {
    entry
        .file_name()
        .to_str()
        .map(|s| entry.depth() == 0 || !s.starts_with('.'))
        .unwrap_or(false)
}

pub async fn parse_index_and_download(context: Context, index_path: PathBuf) -> FreightResult {
    match File::open(&index_path).await {
        Ok(file) => {
            let opts = context.crates_options.clone();
            let reader = BufReader::new(file);
            let mut lines = reader.lines();

            const MAX_CONCURRENT: usize = 16;

            let mut tasks = vec![];

            while let Some(line) = lines.next_line().await? {
                let c: IndexFile = match serde_json::from_str(&line) {
                    Ok(v) => v,
                    Err(e) => {
                        tracing::error!("JSON Parse Error: {e}, line: {line}");
                        continue;
                    }
                };

                let url = match Url::parse(&format!(
                    "{}/{}/{}-{}.crate",
                    opts.crates.domain, &c.name, &c.name, &c.vers
                )) {
                    Ok(u) => u,
                    Err(e) => {
                        tracing::error!("URL Parse Error: {e}");
                        continue;
                    }
                };

                let file_path = utils::get_file_path(&opts.crates_path, &c.name, &c.vers);
                let context_clone = context.clone();

                tasks.push(async move {
                    if let Err(e) = download_crates_with_log(context_clone, file_path, url, c).await
                    {
                        tracing::error!("Download failed: {e}");
                    }
                });
            }

            // limit concurrent
            stream::iter(tasks)
                .for_each_concurrent(MAX_CONCURRENT, |task| async {
                    task.await;
                })
                .await;
        }
        Err(err) => match err.kind() {
            ErrorKind::NotFound => {
                tracing::warn!(
                    "This file might have been removed from crates.io:{}",
                    &index_path.display()
                );
            }
            _ => return Err(err.into()),
        },
    };
    Ok(())
}

pub async fn download_crates_with_log(
    context: Context,
    path: PathBuf,
    url: Url,
    index_file: IndexFile,
) -> FreightResult {
    let opts = context.crates_options.clone();
    let down_opts = &DownloadOptions {
        proxy: opts.proxy.clone(),
        url,
        path,
    };

    let cksum = index_file.cksum.clone().unwrap();
    match download_and_check_hash(down_opts, Some(&cksum), false).await {
        Ok(_) => {
            let path = &down_opts.path;
            if opts.upload {
                let s3 = S3cmd::default();
                let s3_path = format!(
                    "crates{}",
                    path.to_str()
                        .unwrap()
                        .replace(opts.crates_path.to_str().unwrap(), "")
                );
                tracing::info!("s3_path: {}, {}", s3_path, opts.delete_after_upload);
                let uploaded = s3.upload_file(path, &s3_path, &opts.bucket_name);
                if uploaded.is_ok() && opts.delete_after_upload {
                    tokio::fs::remove_file(path).await.unwrap();
                }
            }
            Ok(())
        }
        Err(err) => {
            if !opts.fix_mode {
                context
                    .db_service
                    .save_unsync_crates(&index_file.name)
                    .await
                    .unwrap();
                tracing::info!("download crates err: {:?}", err);
                Ok(())
            } else {
                // fix mode need return err to caller
                Err(err)
            }
        }
    }
}
