use proc_macro::TokenStream;
use quote::quote;

#[derive(Debug)]
struct SeqParser {
    variable_ident: syn::Ident,
    start: isize,
    end: isize,
    body: proc_macro2::TokenStream,
}

impl syn::parse::Parse for SeqParser {
    fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
        let variable_ident = input.parse::<syn::Ident>()?;
        let _ = input.parse::<syn::Token![in]>()?;
        let start = input.parse::<syn::LitInt>()?;
        let _ = input.parse::<syn::Token![..]>()?;
        let end = input.parse::<syn::LitInt>()?;
        let body;
        syn::braced!(body in input);
        let body = body.parse::<proc_macro2::TokenStream>()?;
        Ok(Self {
            variable_ident,
            start: start.base10_parse()?,
            end: end.base10_parse()?,
            body,
        })
    }
}


fn parse_token_stream(input: &proc_macro2::TokenStream,validate_ident: &syn::Ident,n:isize) -> proc_macro2::TokenStream {
    let mut output = proc_macro2::TokenStream::new();
    let mut token_iter = input.clone().into_iter().peekable();
    while let Some(token) = token_iter.next() {
        match &token {
            proc_macro2::TokenTree::Ident(ident) => {
                eprintln!("token:{:#?}",token);
                if let Some(proc_macro2::TokenTree::Punct(next_token)) = token_iter.peek(){
                    if next_token.as_char() == '~' {
                        if let Some(proc_macro2::TokenTree::Ident(tow_ident)) = token_iter.peek(){
                            if tow_ident.to_string() == "N" {
                                token_iter.next(); // 弹出符号 ~
                                token_iter.next(); // 弹出N
                                let new_ident = proc_macro2::Ident::new(&format!("{}{}",ident.to_string(),n),ident.span());
                                output.extend(quote! { #new_ident });
                                continue
                            }
                        }
                    }
                }
                if ident == validate_ident {
                    eprintln!("is validate_ident:{:#?}", token);
                    let new_ident = proc_macro2::Literal::i64_unsuffixed(n as i64);
                    output.extend(quote! { #new_ident })
                }else {
                    eprintln!("normal ident:{:#?}",token);
                    output.extend(quote! { #token })
                }
            }
            proc_macro2::TokenTree::Group(group) => {
                eprintln!("group:{:#?}",group);
                let new_group = parse_token_stream(&group.stream(),validate_ident,n);
                let wrap_new_group = proc_macro2::Group::new(group.delimiter(),new_group);
                output.extend(quote! {#wrap_new_group});
            }
            _ => {
                eprintln!("other:{:#?}",token);
                output.extend(quote! {#token});
            }
        }
    }
    eprintln!("parse_token_stream-output:{:#?}",output);
    output

}

fn parse_token_stream_pattern(input: &proc_macro2::TokenStream,validate_ident: &syn::Ident,start:isize,end:isize) -> (proc_macro2::TokenStream,bool) {
    let token_buffer = syn::buffer::TokenBuffer::new2(input.clone());
    let mut output = proc_macro2::TokenStream::new();
    let mut cursor = token_buffer.begin();
    let mut repeat_section = false;
    eprintln!("input:{:#?}",input);
    while !cursor.eof() {
        eprintln!("cursor");
        if let Some((punct,next_cursor)) = cursor.punct(){
            eprintln!("punct is *:{:#?}",punct);
            if punct.as_char() == '#' {
                eprintln!("is#########");
                if let Some((group, _, cursor_2)) = next_cursor.group(proc_macro2::Delimiter::Parenthesis) {
                    eprintln!("group###########");
                    if let Some((end_punct, cursor_3)) = cursor_2.punct() {
                        eprintln!("group###########end");
                        if end_punct.as_char() == '*' {
                            eprintln!("group###########end*************");
                            for n in start..end {
                                let group_output = parse_token_stream(&group.token_stream(), validate_ident, n);
                                output.extend(group_output);
                            }
                            cursor = cursor_3;
                            repeat_section = true;
                            continue
                        }
                    }
                }
            }
        }
        if let Some((group_token,delimiter,next_cursor)) = cursor.group(proc_macro2::Delimiter::Brace){
            eprintln!("group_token*-Brace:{:#?}",delimiter);
            let (group_output,group_repeat_section) = parse_token_stream_pattern(&group_token.token_stream(),validate_ident,start,end);
            let wrap_group_output = proc_macro2::Group::new(proc_macro2::Delimiter::Brace,group_output);
            output.extend(quote! {#wrap_group_output});
            repeat_section = group_repeat_section;
            cursor = next_cursor;
        }else if let Some((group_token,delimiter,next_cursor)) = cursor.group(proc_macro2::Delimiter::Bracket){
            eprintln!("group_token*-Bracket:{:#?}",delimiter);
            let (group_output,group_repeat_section) = parse_token_stream_pattern(&group_token.token_stream(),validate_ident,start,end);
            let wrap_group_output = proc_macro2::Group::new(proc_macro2::Delimiter::Bracket,group_output);
            output.extend(quote! {#wrap_group_output});
            repeat_section = group_repeat_section;
            cursor = next_cursor;
        }else if let Some((group_token,delimiter,next_cursor)) = cursor.group(proc_macro2::Delimiter::Parenthesis){
            eprintln!("group_token*-Parenthesis:{:#?}",delimiter);
            let (group_output,group_repeat_section) = parse_token_stream_pattern(&group_token.token_stream(),validate_ident,start,end);
            let wrap_group_output = proc_macro2::Group::new(proc_macro2::Delimiter::Parenthesis,group_output);
            output.extend(quote! {#wrap_group_output});
            repeat_section = group_repeat_section;
            cursor = next_cursor;
        }else if let Some((ident,next_cursor)) = cursor.ident(){
            eprintln!("ident:{:#?}",ident);
            output.extend(quote! {#ident});
            cursor = next_cursor;
        }else if let Some((punct,next_cursor)) = cursor.punct(){
            eprintln!("punct:{:#?}",punct);
            output.extend(quote! {#punct});
            cursor = next_cursor;
        }else if let Some((literal,next_cursor)) = cursor.literal(){
            eprintln!("literal:{:#?}",literal);
            output.extend(quote! {#literal});
            cursor = next_cursor;
        }else if let Some((lifetime,next_cursor)) = cursor.lifetime(){
            output.extend(quote! {#lifetime});
            cursor = next_cursor;
        }


    }
    eprintln!("end:{:#?}",output);
    (output,repeat_section)
}

impl SeqParser {
    fn body_expand(&self) -> proc_macro2::TokenStream {
        let mut output = proc_macro2::TokenStream::new();
        for n in self.start..self.end {
            output.extend(parse_token_stream(&self.body, &self.variable_ident, n));
            eprintln!("ok");
        }
        output
    }

    fn body_expand_repeat(&self) -> (proc_macro2::TokenStream,bool) {
        parse_token_stream_pattern(&self.body,&self.variable_ident,self.start,self.end)
    }

}
#[proc_macro]
pub fn seq(input: TokenStream) -> TokenStream {
    let _ = input;
    eprintln!("input:{:#?}", input);
    let parser = syn::parse_macro_input!(input as SeqParser);
    eprintln!("parser:{:#?}", parser);
    // proc_macro2::TokenStream::new().into()
    let (output,repeat_section) = parser.body_expand_repeat();
    eprintln!("repeat_section:{:#?}", repeat_section);
    eprintln!("repeat_output:{:#?}", output);
    if repeat_section {
        output.into()
    }else {
        let output = parser.body_expand();
        eprintln!("output:{:#?}", output);
        output.into()
    }

}
