use anyhow::Ok;
pub use pest::{Parser, pratt_parser::*, iterators::*, error::*};
pub use pest_derive::Parser;
use anyhow::{Result};
use clap::{Arg, Command, Parser as ClapParser};
use std::fs;
use std::env;
use std::io::{self, Read};
use std::collections::HashMap;
use std::io::BufRead;
use std::path::{Path, PathBuf};
use super::token::*;

#[derive(Parser)]
#[grammar = "sysy.pest"]
pub struct SysyParser;

#[derive(Debug, ClapParser)]
pub struct Cli {
    input: String,
}

pub fn parse_token(p: &Pair<Rule>) -> Token {
    match p.as_rule() {
        // ident
        Rule::IDENT => Token::Ident(p.as_str().to_owned()),
        // number
        Rule::DECIMAL_CONST => {
            let v = p.as_str().parse().unwrap();
            Token::Integer(v)
        }
        Rule::OCTAL_CONST => {
            let s = &p.as_str()[1..];
            let v = i64::from_str_radix(s, 8).unwrap();
            Token::Integer(v)
        }
        Rule::HEX_CONST => {
            let s = &p.as_str()[2..];
            let v = i64::from_str_radix(s, 16).unwrap();
            Token::Integer(v)
        }
        // keyword
        Rule::CONST => Token::Keyword(Keyword::CONST),
        Rule::INT => Token::Keyword(Keyword::INT),
        Rule::FLOAT => Token::Keyword(Keyword::FLOAT),
        Rule::VOID => Token::Keyword(Keyword::VOID),
        Rule::IF => Token::Keyword(Keyword::IF),
        Rule::ELSE => Token::Keyword(Keyword::ELSE),
        Rule::WHILE => Token::Keyword(Keyword::WHILE),
        Rule::BREAK => Token::Keyword(Keyword::BREAK),
        Rule::CONTINUE => Token::Keyword(Keyword::CONTINUE),
        Rule::RETURN => Token::Keyword(Keyword::RETURN),
        // op
        Rule::PLUS => Token::Operator(Operator::PLUS),
        Rule::MINUS => Token::Operator(Operator::MINUS),
        Rule::MUL => Token::Operator(Operator::MUL),
        Rule::DIV => Token::Operator(Operator::DIV),
        Rule::MOD => Token::Operator(Operator::MOD),
        Rule::ASSIGN => Token::Operator(Operator::ASSIGN),
        Rule::EQ  => Token::Operator(Operator::EQ), 
        Rule::NEQ => Token::Operator(Operator::NEQ),
        Rule::LT  => Token::Operator(Operator::LT),
        Rule::GT  => Token::Operator(Operator::GT),
        Rule::LE  => Token::Operator(Operator::LE),
        Rule::GE  => Token::Operator(Operator::GE),
        Rule::NOT => Token::Operator(Operator::NOT),
        Rule::AND => Token::Operator(Operator::AND),
        Rule::OR => Token::Operator(Operator::OR),
        // delimit
        Rule::L_PAREN => Token::Delimit(Delimit::L_PAREN),
        Rule::R_PAREN => Token::Delimit(Delimit::R_PAREN),
        Rule::L_BRACE => Token::Delimit(Delimit::L_BRACE),
        Rule::R_BRACE => Token::Delimit(Delimit::R_BRACE),
        Rule::L_BRACKT => Token::Delimit(Delimit::L_BRACKT),
        Rule::R_BRACKT => Token::Delimit(Delimit::R_BRACKT),
        Rule::COMMA => Token::Delimit(Delimit::COMMA),
        Rule::SEMICOLON => Token::Delimit(Delimit::SEMICOLON),
        // 
        Rule::EOI => Token::EOI,
        _ => unreachable!(),
    }
}

pub fn tokenize(input: &str) -> Result<Vec<TokenComposite>, Error<Rule>> {
    let pairs = SysyParser::parse(Rule::PROGRAM, input)?;
    // println!("grammar matchs : {}", pairs);
    let r = pairs.map(|p| {
        let (l, c) = p.line_col();
        let span = p.as_span();
        let attri = TokenAttri {
            position: Position { line: l, column: c },
            len: (span.end() - span.start()),
        };
        let token = parse_token(&p);
        TokenComposite {
            token: token,
            attri: attri,
        }
    }).collect::<Vec<_>>();
    Result::Ok(r)
}

pub fn app_main() -> Result<()> {
    let args = Cli::parse();
    let input = args.input;
    // println!("Input : {}", &input);

    let content = fs::read_to_string(&input)
            .map_err(|e| anyhow::anyhow!("Failed to read program file '{}': {}", &input, e))?;
    // println!("Content :\n{}", &content);
    
    match tokenize(&content) {
        Result::Ok(token_composites) => {
            // println!("--tokens len-- : {}", token_composites.len());
            for tok_comp in &token_composites {
                let tk = &tok_comp.token;
                let attr = &tok_comp.attri;
                eprintln!("{:#?} at Line {:#?}.", tk, attr.position.line);
            }
        }
        Result::Err(e) => {
            let line_no: usize = match e.line_col {
                LineColLocation::Pos((l, _)) => l,
                LineColLocation::Span((l, _), (_, _)) => l,
            };
            eprintln!("Error type A at Line {:#?}:{}", line_no, e);
        }
    }
    Ok(())
}

// 
pub fn test() -> Result<()> {
    let matches = Command::new("Simple SysY Parser")
        .version("1.0")
        .author("neo <niujiaming126@126.com>")
        .about("A simple SysY parser built with pest")
        .long_about("
This simple SysY parser demonstrates how to build a complete language interpreter using Rust and pest.

Examples:
  compiler example.sy
  compiler -p '{ 1 + 2 }' data.txt
  compiler -f script.sy input.txt
  cat data.txt | compiler -p '{ 1 + 2 }'
        ")
        .arg(Arg::new("program")
            .short('p')
            .long("program")
            .value_name("PROGRAM")
            .help("program string")
            .long_help("program as a command-line string. Cannot be used with --file.")
            .required_unless_present("file")
            .conflicts_with("file"))
        .arg(Arg::new("file")
            .short('f')
            .long("file")
            .value_name("FILE")
            .help("program file")
            .long_help("Read program from a file. Cannot be used with --program.")
            .required_unless_present("program")
            .conflicts_with("program"))
        .arg(Arg::new("input")
            .help("Input file (reads from stdin if not provided)")
            .long_help("Input data file to process. If not provided, reads from standard input.")
            .index(1))
        .arg(Arg::new("field-separator")
            .short('F')
            .long("field-separator")
            .value_name("FS")
            .help("Field separator pattern")
            .long_help("Set the field separator. Default is whitespace. Examples: -F ',' for CSV, -F ':' for /etc/passwd"))
        .get_matches();

    // Parse program source
    let program_text = if let Some(prog) = matches.get_one::<String>("program") {
        prog.clone()
    } else if let Some(file) = matches.get_one::<String>("file") {
        fs::read_to_string(file)
            .map_err(|e| anyhow::anyhow!("Failed to read program file '{}': {}", file, e))?
    } else {
        unreachable!("clap should ensure either program or file is provided");
    };

    // Read input data
    let input_text = if let Some(input_file) = matches.get_one::<String>("input") {
        fs::read_to_string(input_file)
            .map_err(|e| anyhow::anyhow!("Failed to read input file '{}': {}", input_file, e))?
    } else {
        let mut buffer = String::new();
        io::stdin().read_to_string(&mut buffer)
            .map_err(|e| anyhow::anyhow!("Failed to read from stdin: {}", e))?;
        buffer
    };

    // 
    Ok(())
}
