//! 实现一个简单的json解析器，这个解析器只是简单的针对城市关联文件的解析，
//! 因为考题json文件中的键名有重复，所以弄一个简单的json解析器实现，只有对象类型、字符串、数组。
//!  {
//!     "5": {
//!        "北京": ["房山"],
//!        "北京": ["大兴", "通州"],
//!        "北京": ["房山", "昌平"],
//!        "昌平": ["怀柔"]
//!    }
//! }
//! 上面的形式，注意 键名:字符数组，例如： "昌平": "怀柔" 是错误的，要写成
//! "昌平": ["怀柔"] 才能被识别。

use std::collections::HashSet;
pub fn count_provinces() -> String {
    let content = std::fs::read_to_string("./district.json").unwrap();
    get_count_string(second_filter(first_filter(content)))
}

// 初步的过滤，生成OrderedHashMap类型，然后合并有联系的城市
// 生成 [[{"自贡", "成都", "绵阳", "宜宾", "泸州"}, {"桃园", "台北", "高雄", "台中", "台南"}, {"台中", "台南"}, {"湛江", "东莞", "佛山", "深圳", "广州"}], [..]]
// 这种形式，我们可以看到可能还是存在相关联的城市，需要进行进一步的过滤
fn first_filter(content: String) -> Vec<Vec<HashSet<String>>> {
    let token = parse(&content).expect("json文件格式有错误");
    let json = parse_json(token).expect("json文件格式有错误");
    let map = parse_json_to_map(json);
    let mut ret = vec![];
    for (_k, v) in map {
        let mut set: Vec<HashSet<String>> = vec![];
        for (kk, vv) in v {
            let mut yes = false;
            for s in &mut set {
                // 遍历数组
                if s.contains(&kk) {
                    // 先判断键是否在当前的HashSet中。
                    yes = true;
                    s.extend(vv.iter().map(|x| x.clone()));
                    break;
                }
                // 查询值是否在当前的HashSet中。
                if vv.iter().any(|value| s.contains(value)) {
                    yes = true;
                    s.insert(kk.clone());
                    s.extend(vv.iter().map(|x| x.clone()));
                    break;
                }
            }
            // 如果没有找到，则插入
            if !yes {
                let mut temp = HashSet::new();
                temp.insert(kk);
                temp.extend(vv.iter().map(|x| x.clone()));
                set.push(temp);
            }
        }
        ret.push(set);
    }
    ret
}

// 对Vec<Vec<HashSet<String>>>再次过滤合并，
// 最后生成[[{"自贡", "成都", "绵阳", "宜宾", "泸州"}, {"桃园", "台北", "高雄", "台中", "台南"}, {"湛江", "东莞", "佛山", "深圳", "广州"}], ..[]]
// 这种没有关联的形式。
pub fn second_filter(cities: Vec<Vec<HashSet<String>>>) -> Vec<Vec<HashSet<String>>> {
    let mut new_cities_ret = vec![];
    for row in cities {
        let mut new_cities = vec![];
        let mut index_arr = vec![];
        let mut row = marker_vec(row);

        for i in 0..row.len() {
            if row[i].1 {
                continue; // 已经被标记
            }
            row[i].1 = true; // 先标记，然后取出来，作为比较点
            let mut set = row[i].0.clone();
            loop {
                match find_can_merge(&set, &row, &mut index_arr) {
                    Ok(_) => {
                        if index_arr.len() != 0 {
                            // 找到了,从row中删除合并的项，合并后的set继续参加查找
                            merge(&mut set, &mut index_arr, &mut row);
                        } else {
                            // 没有找到可以合并的了，保存起来,跳出loop，寻找下一个set作为参照物
                            new_cities.push(set);
                            break;
                        }
                    }
                    Err(_) => {
                        // 当前的row已经空了,直接保存
                        new_cities.push(set);
                        break;
                    }
                }
            }
        }
        new_cities_ret.push(new_cities);
    }
    new_cities_ret
}

// row未标记的迭代器
#[inline(always)]
fn unmakered_iter(
    row: &[(HashSet<String>, bool)],
) -> impl Iterator<Item = &(HashSet<String>, bool)> {
    row.iter().filter(|&(_, f)| !*f)
}

// 从row中查找能合并的set的索引
#[inline(always)]
fn find_can_merge(
    set: &HashSet<String>,
    row: &[(HashSet<String>, bool)],
    index: &mut Vec<usize>,
) -> Result<(), ()> {
    // 不存在未标记的set了
    if unmakered_iter(row).count() == 0 {
        return Err(());
    }
    for (i, (c, d)) in row.iter().enumerate() {
        if !d && set.iter().any(|v| c.contains(v)) {
            index.push(i);
        }
    }
    Ok(())
}

// 合并
#[inline(always)]
fn merge(
    set: &mut HashSet<String>,
    index: &mut Vec<usize>,
    row: &mut Vec<(HashSet<String>, bool)>,
) {
    for i in index.iter() {
        let t = row[*i].0.clone();
        set.extend(t);
    }
    // 设置删除标记
    for i in index.iter() {
        row[*i].1 = true;
    }
    index.clear();
}

// 生成带有标记删除的数组
#[inline(always)]
fn marker_vec(row: Vec<HashSet<String>>) -> Vec<(HashSet<String>, bool)> {
    row.into_iter().map(|r| (r, false)).collect()
}

fn get_count_string(cities: Vec<Vec<HashSet<String>>>) -> String {
    let mut count = cities
        .iter()
        .fold(String::default(), |s, b| s + &b.len().to_string() + ",");
    count.pop();
    count
}

// -----------------------------------------------------------------------------------
use ordered_hash_map::OrderedHashMap;
use std::str::Chars;
use thiserror::Error;

/// JsonValue只有对象类型、字符串、数组。
#[derive(Debug, Clone)]
pub enum JsonValue {
    String(String),
    Object(OrderedHashMap<String, JsonValue>),
    Array(Vec<JsonValue>),
}

/// 最终的Json表示类型
#[derive(Debug)]
pub enum Json {
    Object(OrderedHashMap<String, JsonValue>),
    #[allow(dead_code)]
    Array(Vec<JsonValue>),
}

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum ParentNode {
    Map,
    Array,
}

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum State {
    ParseKey,
    ParseValue,
}

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum SymbolEnum {
    /// {
    BeginObject,
    /// }
    EndObject,
    /// :
    Colon,
    /// [  
    BeginArray,
    /// ]
    EndArray,
    /// ,
    Comma,
    /// null
    String,
}

impl Token {
    fn get_symbol(&self) -> SymbolEnum {
        match &self {
            Token::BeginObject(_, _) => SymbolEnum::BeginObject,
            Token::EndObject(_, _) => SymbolEnum::EndObject,
            Token::Colon(_, _) => SymbolEnum::Colon,
            Token::BeginArray(_, _) => SymbolEnum::BeginArray,
            Token::EndArray(_, _) => SymbolEnum::EndArray,
            Token::Comma(_, _) => SymbolEnum::Comma,
            Token::String(_, _, _) => SymbolEnum::String,
        }
    }
}

// 解析到json类型
pub fn parse_json(token_stream: Vec<Token>) -> Result<Json, ParseError> {
    let mut token_stream = token_stream.into_iter();
    let mut maps = Vec::new();
    let mut arrays = Vec::new();
    let mut keys = Vec::new();
    let mut parent_node = Vec::new();
    let mut symbols = Vec::new();
    let mut parse_state = Vec::new();
    let mut next_maybe_symbols = vec![SymbolEnum::BeginObject, SymbolEnum::BeginArray];

    parent_node.push(ParentNode::Array);
    arrays.push(Vec::new());

    while let Some(token) = token_stream.next() {
        let symbol = token.get_symbol();

        if !next_maybe_symbols.contains(&symbol) {
            let (row, col) = get_row_col(&token);
            let content = format!(
                "The following symbols may be required `{next_maybe_symbols:?}`,
             but provided is `{symbol:?}`, at {row}:{col}"
            );
            return Err(ParseError::SymbolMismatch(content))?;
        }

        next_maybe_symbols.clear();

        match token {
            Token::BeginObject(_, _) => {
                symbols.push(symbol);
                maps.push(OrderedHashMap::new());
                parent_node.push(ParentNode::Map);
                parse_state.push(State::ParseKey);
                next_maybe_symbols.push(SymbolEnum::String);
                next_maybe_symbols.push(SymbolEnum::EndObject);
            }
            Token::EndObject(row, col) => {
                if symbols.pop() != Some(SymbolEnum::BeginObject) {
                    let content = format!("Missing corresponding symbol, at {row}:{col}");
                    return Err(ParseError::SymbolMismatch(content))?;
                }

                let current_map = maps.pop().unwrap();
                let _ = parent_node.pop().unwrap();
                if current_map.is_empty() {
                    assert_eq!(parse_state.pop(), Some(State::ParseKey));
                }
                match parent_node.last().unwrap() {
                    ParentNode::Map => {
                        assert_eq!(parse_state.pop(), Some(State::ParseValue));
                        assert_eq!(parse_state.pop(), Some(State::ParseKey));

                        let k = keys.pop().unwrap();
                        maps.last_mut()
                            .unwrap()
                            .insert(k, JsonValue::Object(current_map));
                    }
                    ParentNode::Array => {
                        arrays
                            .last_mut()
                            .unwrap()
                            .push(JsonValue::Object(current_map));
                    }
                }

                next_maybe_symbols.push(SymbolEnum::Comma);
                next_maybe_symbols.push(SymbolEnum::EndArray);
                next_maybe_symbols.push(SymbolEnum::EndObject);
            }
            Token::BeginArray(_, _) => {
                symbols.push(symbol);
                arrays.push(Vec::new());
                parent_node.push(ParentNode::Array);

                next_maybe_symbols.push(SymbolEnum::String);
                next_maybe_symbols.push(SymbolEnum::BeginArray);
                next_maybe_symbols.push(SymbolEnum::EndArray);
                next_maybe_symbols.push(SymbolEnum::BeginObject);
            }
            Token::EndArray(row, col) => {
                if symbols.pop() != Some(SymbolEnum::BeginArray) {
                    let content = format!("Missing corresponding symbol, at {row}:{col}");
                    return Err(ParseError::SymbolMismatch(content));
                }

                let mut current_list = arrays.pop().unwrap();
                let _ = parent_node.pop().unwrap();

                match parent_node.last().unwrap() {
                    ParentNode::Map => {
                        assert_eq!(parse_state.pop(), Some(State::ParseValue));
                        assert_eq!(parse_state.pop(), Some(State::ParseKey));

                        let k = keys.pop().unwrap();
                        let map = maps.last_mut().unwrap();
                        if let Some(j) = map.get(&k) {
                            match j {
                                JsonValue::Array(vec) => {
                                    current_list.extend(vec.clone());
                                }
                                _ => {
                                    return Err(ParseError::InvalidToken(
                                        "重复键必须是数组".to_owned(),
                                    ))
                                }
                            }
                        }
                        map.insert(k, JsonValue::Array(current_list));
                    }
                    ParentNode::Array => {
                        arrays
                            .last_mut()
                            .unwrap()
                            .push(JsonValue::Array(current_list));
                    }
                }
                next_maybe_symbols.push(SymbolEnum::Comma);
                next_maybe_symbols.push(SymbolEnum::EndArray);
                next_maybe_symbols.push(SymbolEnum::EndObject);
            }
            Token::Comma(_, _) => match parent_node.last().unwrap() {
                ParentNode::Map => {
                    parse_state.push(State::ParseKey);
                    next_maybe_symbols.push(SymbolEnum::String);
                }
                ParentNode::Array => {
                    next_maybe_symbols.push(SymbolEnum::String);
                    next_maybe_symbols.push(SymbolEnum::BeginArray);
                    next_maybe_symbols.push(SymbolEnum::BeginObject);
                }
            },
            Token::String(_, _, string) => match parent_node.last().unwrap() {
                ParentNode::Map => match parse_state.last().unwrap() {
                    State::ParseKey => {
                        keys.push(string);
                        next_maybe_symbols.push(SymbolEnum::Colon);
                    }
                    State::ParseValue => {
                        return Err(ParseError::InvalidToken(
                            "键值可以是对象、数组，不能是字符串".to_owned(),
                        ));
                    }
                },
                ParentNode::Array => {
                    let value = JsonValue::String(string);
                    arrays.last_mut().unwrap().push(value);

                    next_maybe_symbols.push(SymbolEnum::Comma);
                    next_maybe_symbols.push(SymbolEnum::EndArray);
                }
            },
            Token::Colon(_, _) => {
                parse_state.push(State::ParseValue);
                next_maybe_symbols.push(SymbolEnum::String);
                next_maybe_symbols.push(SymbolEnum::BeginArray);
                next_maybe_symbols.push(SymbolEnum::BeginObject);
            }
        }
    }

    let json = arrays.pop().unwrap().pop().unwrap();
    match json {
        JsonValue::Object(m) => Ok(Json::Object(m)),
        JsonValue::Array(l) => Ok(Json::Array(l)),
        _ => unreachable!(),
    }
}

/// json -> BTreeMap
/// 由于前面的词法器和语法器保证，这里可以放心的使用panic。
pub fn parse_json_to_map(
    json: Json,
) -> OrderedHashMap<String, OrderedHashMap<String, Vec<String>>> {
    let Json::Object(json) = json else {
        panic!("token有错误");
    };
    json.into_iter()
        .map(|(keys, json)| {
            let JsonValue::Object(m) = json else {
                panic!("token有错误");
            };
            (
                keys,
                m.into_iter()
                    .map(|(k1, j1)| {
                        let JsonValue::Array(a) = j1 else {
                            panic!("token有错误");
                        };
                        (
                            k1,
                            a.into_iter()
                                .map(|v| {
                                    let JsonValue::String(s) = v else {
                                        panic!("token有错误");
                                    };
                                    s
                                })
                                .collect::<Vec<String>>(),
                        )
                    })
                    .collect(),
            )
        })
        .collect()
}

#[inline]
fn get_row_col(token: &Token) -> (u64, u64) {
    match token {
        Token::BeginObject(r, c) => (*r, *c),
        Token::EndObject(r, c) => (*r, *c),
        Token::Colon(r, c) => (*r, *c),
        Token::BeginArray(r, c) => (*r, *c),
        Token::EndArray(r, c) => (*r, *c),
        Token::Comma(r, c) => (*r, *c),
        Token::String(r, c, _) => (*r, *c),
    }
}
// ------------------------------------------------------------------------------------------------------------------
/// 错误处理类型
#[derive(Error, Debug)]
pub enum ParseError {
    #[error("`{0}`")]
    InvalidToken(String), // 无效的Token
    #[error("`{0}`")]
    SymbolMismatch(String), // 符号匹配错误
}

/// 词法解析: 只有对象类型、字符串、数组。
#[derive(Clone, Debug, PartialEq)]
pub enum Token {
    // row col
    BeginObject(u64, u64), //{
    EndObject(u64, u64),   //}
    Colon(u64, u64),       //:
    BeginArray(u64, u64),  //[
    EndArray(u64, u64),    //]
    Comma(u64, u64),       //,
    String(u64, u64, String),
}

// 显示token
impl std::fmt::Display for Token {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        match &self {
            Token::BeginObject(_, _) => write!(f, "{{"),
            Token::EndObject(_, _) => write!(f, "}}"),
            Token::Colon(_, _) => write!(f, ":"),
            Token::BeginArray(_, _) => write!(f, "["),
            Token::EndArray(_, _) => write!(f, "]"),
            Token::Comma(_, _) => write!(f, ","),
            Token::String(_, _, v) => write!(f, "\"{v}\""),
        }
    }
}

/// 解析到Token
pub fn parse(s: &str) -> Result<Vec<Token>, ParseError> {
    let mut token_stream = vec![];
    let mut chars: Chars = s.chars();
    let mut row = 1;
    let mut col = 1;
    while let Some(ch) = chars.next() {
        match ch {
            '{' => token_stream.push(Token::BeginObject(row, col)),
            '}' => token_stream.push(Token::EndObject(row, col)),
            '[' => token_stream.push(Token::BeginArray(row, col)),
            ']' => token_stream.push(Token::EndArray(row, col)),
            ':' => token_stream.push(Token::Colon(row, col)),
            ',' => token_stream.push(Token::Comma(row, col)),
            '\r' => continue,
            ' ' => {
                col += 1;
                continue;
            }
            '\t' => {
                col += 4;
                continue;
            }
            '\n' => {
                row += 1;
                col = 1;
                continue;
            }
            '"' => {
                let token = parse_string(&mut row, &mut col, &mut chars)?;
                token_stream.push(token);
            }
            _ => {
                return Err(ParseError::InvalidToken(format!(
                    "Invalid char '{ch}', at {row}:{col}."
                )))?;
            }
        }
        col += 1;
    }

    Ok(token_stream)
}

// 解析字符串
fn parse_string(row: &mut u64, col: &mut u64, chars: &mut Chars) -> Result<Token, ParseError> {
    let r = *row;
    let c = *col;
    let mut token = String::new();
    let mut prev_ch = ' ';
    loop {
        if let Some(ch) = chars.next() {
            match ch {
                '"' => {
                    *col += 1;
                    if prev_ch == '\\' {
                        continue;
                    } else {
                        return Ok(Token::String(r, c, token));
                    }
                }
                '\n' => {
                    return Err(ParseError::InvalidToken(format!(
                        "JSON's Key and Value cannot contain '\\n', at {row}:{col}"
                    )))?;
                }
                '\t' => {
                    *col += 4;
                    token.push(ch);
                }

                _ => token.push(ch),
            }
            prev_ch = ch;
            *col += 1;
        } else {
            return Err(ParseError::InvalidToken(format!(
                "String ends abnormally, at {row}:{col}."
            )))?;
        }
    }
}
