use crate::{SolInput, SolInputKind};
use alloy_json_abi::{ContractObject, JsonAbi, ToSolConfig};
use proc_macro2::{Ident, TokenStream, TokenTree};
use quote::quote;
use syn::{AttrStyle, Result};

impl SolInput {
    /// Normalize JSON ABI inputs into Sol inputs.
    pub fn normalize_json(self) -> Result<Self> {
        let SolInput {
            attrs,
            path,
            kind: SolInputKind::Json(name, ContractObject { abi, bytecode, deployed_bytecode }),
        } = self
        else {
            return Ok(self);
        };

        let mut abi = abi.ok_or_else(|| syn::Error::new(name.span(), "ABI not found in JSON"))?;
        let sol = abi_to_sol(&name, &mut abi);
        let mut all_tokens = tokens_for_sol(&name, &sol)?.into_iter();

        let (inner_attrs, attrs) = attrs
            .into_iter()
            .partition::<Vec<_>, _>(|attr| matches!(attr.style, AttrStyle::Inner(_)));

        let (derives, sol_derives) = extract_derive_attrs(&attrs);

        let mut library_tokens_iter = all_tokens
            .by_ref()
            .take_while(|tt| !matches!(tt, TokenTree::Ident(id) if id == "interface"))
            .skip_while(|tt| matches!(tt, TokenTree::Ident(id) if id == "library"))
            .peekable();

        let library_tokens = library_tokens_iter.by_ref();

        let mut libraries = Vec::new();

        while library_tokens.peek().is_some() {
            let sol_library_tokens: TokenStream = std::iter::once(TokenTree::Ident(id("library")))
                .chain(
                    library_tokens
                        .take_while(|tt| !matches!(tt, TokenTree::Ident(id) if id == "library")),
                )
                .collect();

            let tokens = quote! {
                #(#derives)*
                #(#sol_derives)*
                #sol_library_tokens
            };

            libraries.push(tokens);
        }
        let sol_interface_tokens: TokenStream =
            std::iter::once(TokenTree::Ident(id("interface"))).chain(all_tokens).collect();
        let bytecode = bytecode.map(|bytes| {
            let s = bytes.to_string();
            quote!(bytecode = #s,)
        });
        let deployed_bytecode = deployed_bytecode.map(|bytes| {
            let s = bytes.to_string();
            quote!(deployed_bytecode = #s)
        });

        let attrs_iter = attrs.iter();
        let doc_str = format!(
            "\n\n\
Generated by the following Solidity interface...
```solidity
{sol}
```

...which was generated by the following JSON ABI:
```json
{json_s}
```",
            json_s = serde_json::to_string_pretty(&abi).unwrap()
        );
        let tokens = quote! {
            #(#inner_attrs)*
            #(#libraries)*

            #(#attrs_iter)*
            #[doc = #doc_str]
            #[sol(#bytecode #deployed_bytecode)]
            #sol_interface_tokens
        };

        let ast: ast::File = syn::parse2(tokens).map_err(|e| {
            let msg = format!(
                "failed to parse ABI-generated tokens into a Solidity AST for `{name}`: {e}.\n\
                 This is a bug. We would appreciate a bug report: \
                 https://github.com/alloy-rs/core/issues/new/choose"
            );
            syn::Error::new(name.span(), msg)
        })?;

        let kind = SolInputKind::Sol(ast);
        Ok(SolInput { attrs, path, kind })
    }
}

// doesn't parse Json

fn abi_to_sol(name: &Ident, abi: &mut JsonAbi) -> String {
    abi.dedup();
    let config = ToSolConfig::new().print_constructors(true).for_sol_macro(true);
    abi.to_sol(&name.to_string(), Some(config))
}

/// Returns `sol!` tokens.
pub fn tokens_for_sol(name: &Ident, sol: &str) -> Result<TokenStream> {
    let mk_err = |s: &str| {
        let msg = format!(
            "`JsonAbi::to_sol` generated invalid Rust tokens for `{name}`: {s}\n\
             This is a bug. We would appreciate a bug report: \
             https://github.com/alloy-rs/core/issues/new/choose"
        );
        syn::Error::new(name.span(), msg)
    };
    let tts = syn::parse_str::<TokenStream>(sol).map_err(|e| mk_err(&e.to_string()))?;
    Ok(tts
        .into_iter()
        .map(|mut tt| {
            if matches!(&tt, TokenTree::Ident(id) if id == name) {
                tt.set_span(name.span());
            }
            tt
        })
        .collect())
}

/// Extract both regular and `sol` derive attributes for propagation further.
fn extract_derive_attrs(attrs: &[syn::Attribute]) -> (Vec<&syn::Attribute>, Vec<&syn::Attribute>) {
    attrs.iter().fold((Vec::new(), Vec::new()), |(mut derives, mut sol_derives), attr| {
        if attr.path().is_ident("derive") {
            derives.push(attr);
        } else if attr.path().is_ident("sol") {
            if let Ok(meta) = attr.meta.require_list() {
                let mut contains_derives = false;
                let _ = meta.parse_nested_meta(|meta| {
                    contains_derives |=
                        meta.path.is_ident("all_derives") || meta.path.is_ident("extra_derives");
                    Ok(())
                });
                if contains_derives {
                    sol_derives.push(attr);
                }
            }
        }
        (derives, sol_derives)
    })
}

#[inline]
#[track_caller]
fn id(s: impl AsRef<str>) -> Ident {
    // Ident::new panics on Rust keywords and `r#` prefixes
    syn::parse_str(s.as_ref()).unwrap()
}

#[cfg(test)]
mod tests {
    use super::*;
    use std::path::{Path, PathBuf};

    #[test]
    #[cfg_attr(miri, ignore = "no fs")]
    fn abi() {
        let path = concat!(env!("CARGO_MANIFEST_DIR"), "/../json-abi/tests/abi");
        for file in std::fs::read_dir(path).unwrap() {
            let path = file.unwrap().path();
            if path.extension() != Some("json".as_ref()) {
                continue;
            }

            if path.file_name() == Some("LargeFunction.json".as_ref())
                || path.file_name() == Some("SomeLibUser.json".as_ref())
            {
                continue;
            }
            parse_test(&std::fs::read_to_string(&path).unwrap(), path.to_str().unwrap());
        }
    }

    fn parse_test(s: &str, path: &str) {
        let mut abi: JsonAbi = serde_json::from_str(s).unwrap();
        let name = Path::new(path).file_stem().unwrap().to_str().unwrap();

        let name_id = id(name);
        let sol = abi_to_sol(&name_id, &mut abi);
        let tokens = match tokens_for_sol(&name_id, &sol) {
            Ok(tokens) => tokens,
            Err(e) => {
                let path = write_tmp_sol(name, &sol);
                panic!(
                    "couldn't expand JSON ABI for {name:?}: {e}\n\
                     emitted interface: {}",
                    path.display()
                );
            }
        };

        let _ast = match syn::parse2::<ast::File>(tokens.clone()) {
            Ok(ast) => ast,
            Err(e) => {
                let spath = write_tmp_sol(name, &sol);
                let tpath = write_tmp_sol(&format!("{name}.tokens"), &tokens.to_string());
                panic!(
                    "couldn't parse expanded JSON ABI back to AST for {name:?}: {e}\n\
                     emitted interface: {}\n\
                     emitted tokens:    {}",
                    spath.display(),
                    tpath.display(),
                );
            }
        };
    }

    fn write_tmp_sol(name: &str, contents: &str) -> PathBuf {
        let path = std::env::temp_dir().join(format!("sol-macro-{name}.sol"));
        std::fs::write(&path, contents).unwrap();
        let _ = std::process::Command::new("forge").arg("fmt").arg(&path).output();
        path
    }
}
