use bson::{doc, to_bson, to_document, Document};
use std::fs::File;
use std::io::SeekFrom;
use std::mem;

use std::io::prelude::*;

use crate::bytes_operation::*;
use std::fs::OpenOptions;

pub struct CollectionManager {
    pub collection_name: String,
    pub file: Option<File>,
}

impl CollectionManager {
    pub fn new(collection_name: &str) -> CollectionManager {
        CollectionManager {
            collection_name: collection_name.to_string(),
            file: None,
        }
    }

    fn get_collection_path(&self) -> String {
        return self.collection_name.to_string();
    }

    pub fn open(&mut self) {
        println!("Openning collection: {}", self.get_collection_path());
        let mut file = OpenOptions::new()
            .read(true)
            .write(true)
            .create(true)
            .open(&self.get_collection_path())
            .unwrap();

        Self::try_create_file_header(&mut file);

        self.file = Some(file);
    }

    pub fn try_create_file_header(file: &mut File) {
        let default_offset = (mem::size_of::<u32>() + 110000 * mem::size_of::<DocInfo>()) as u64;
        let n = file.seek(SeekFrom::End(0)).unwrap();
        if n < default_offset {
            let doc_infos: Vec<DocInfo> = Vec::new();
            Self::write_file_header(file, &doc_infos);
            file.set_len(default_offset);
        }
    }

    fn read_or_create_file_header(file: &mut File) -> Vec<DocInfo> {
        file.seek(SeekFrom::Start(0));
        let mut piece = file.take(4);
        let mut buf = Vec::new();
        piece.read_to_end(&mut buf);
        let size = as_u32_be(&buf);
        let mut piece = file.take((size * (mem::size_of::<DocInfo>() as u32)) as u64);
        let mut buf = Vec::new();
        piece.read_to_end(&mut buf);
        let mut doc_infos = struct_from_bytes(&buf);
        return doc_infos;
    }

    fn write_file_header(file: &mut File, doc_infos: &Vec<DocInfo>) {
        file.seek(SeekFrom::Start(0));
        let bs = struct_to_bytes(&doc_infos[..]);
        let size_bs = (doc_infos.len() as u32).to_be_bytes();
        file.write(&size_bs);
        file.write(&bs);
    }

    // Returns a Vec<(index, slot_size, index_offset)>. The index_offset marks a
    //  batch index shift caused by insertion in the middle of doc_infos.
    fn find_all_slots(doc_infos: &Vec<DocInfo>) -> Vec<(usize, u32, usize)> {
        let mut slots = Vec::new();

        if doc_infos.len() >= 1 {
            for i in 0..doc_infos.len() - 1 {
                let cur = &doc_infos[i];
                let next = &doc_infos[i + 1];
                let size = next.start_offset - (cur.start_offset + cur.length);
                slots.push((i, size, 0));
            }
        }

        return slots;
    }

    fn find_big_enough_slot(
        doc_infos: &Vec<DocInfo>,
        slots: &mut Vec<(usize, u32, usize)>,
        data_size: u32,
    ) -> i32 {
        let mut total_shift = 0;
        for i in 0..slots.len() {
            let (index, size, offset) = slots[i];
            total_shift += offset;
            if size >= data_size {
                let index = index + total_shift;
                // From now on, slots' indexes will increase by 1
                slots[i] = (index, size - data_size, offset + 1);
                return index as i32;
            }
        }

        return doc_infos.len() as i32 - 1;
    }

    fn write_data(file: &mut File, offset: u32, data: &[u8]) {
        file.seek(SeekFrom::Start(offset as u64));
        file.write(data);
    }

    // Insert many documents, each doc is specified by a tuple (id : u64, data bytes : &[u8])
    pub fn insert_docs(&mut self, id_data_pairs: &[(Option<u64>, &[u8])]) {
        if let Some(mut file) = self.file.as_mut() {
            println!("Inserting {} docs", id_data_pairs.len());
            let mut doc_infos = Self::read_or_create_file_header(file);
            let mut slots = Self::find_all_slots(&doc_infos);
            let mut next_id = 1u64;
            let max_id = doc_infos.iter().map(|item| item.id).max();
            if let Some(max_id) = max_id {
                next_id = max_id + 1;
            }

            for (id, data) in id_data_pairs {
                if doc_infos.len() >= 110000 {
                    println!("Maximum docs reached. Insertion aborted.");
                    break;
                }

                // find empty slot big enough for new doc's data
                let last_slot_index =
                    Self::find_big_enough_slot(&doc_infos, &mut slots, data.len() as u32);
                let id: u64 = match id {
                    Some(unwrapped) => *unwrapped,
                    None => next_id,
                };
                next_id += 1;
                let doc_info;
                // Is the vector empty?
                if last_slot_index >= 0 {
                    let last_doc = doc_infos[last_slot_index as usize];
                    doc_info = DocInfo::new(
                        id,
                        last_doc.start_offset + last_doc.length,
                        data.len() as u32,
                    );
                } else {
                    let default_offset = mem::size_of::<u32>() + 110000 * mem::size_of::<DocInfo>();
                    doc_info = DocInfo::new(id, default_offset as u32, data.len() as u32);
                }
                doc_infos.insert((last_slot_index + 1) as usize, doc_info);

                Self::write_data(file, doc_info.start_offset, data);
            }
            Self::write_file_header(file, &doc_infos);
        }
    }

    pub fn delete_all_docs(&mut self) {
        if let Some(mut file) = self.file.as_mut() {
            let mut doc_infos = Self::read_or_create_file_header(file);
            if doc_infos.len() > 0 {
                doc_infos.clear();
                Self::write_file_header(file, &doc_infos);
            }
        }
    }

    fn file_length(file: &mut File) -> u64 {
        let end = file.seek(SeekFrom::End(0)).unwrap();
        return end;
    }

    // Resize file if there's unused data in the end of data section
    fn try_resize_file(file: &mut File, actual_data_end_offset: u64) {
        let file_length = Self::file_length(file);
        if file_length > actual_data_end_offset {
            file.set_len(actual_data_end_offset);
        }
    }

    fn get_actual_data_end_offset(doc_infos: &Vec<DocInfo>) -> u32 {
        if let Some(last_doc) = doc_infos.last() {
            return last_doc.start_offset + last_doc.length;
        } else {
            let default_offset = mem::size_of::<u32>() + 110000 * mem::size_of::<DocInfo>();
            return default_offset as u32;
        }
    }

    pub fn rearrange_data(file: &mut File, doc_infos: &mut Vec<DocInfo>) {
        let (unused_bytes, used_bytes) = Self::analyze_unused_data(&doc_infos);
        let file_length = Self::file_length(file);
        println!(
            "Before rearranging: unused_bytes = {}, used_bytes = {}, file_length = {}",
            unused_bytes, used_bytes, file_length
        );

        let buf_size = 1_000_000;
        let mut buf = Vec::with_capacity(buf_size);

        let default_offset = mem::size_of::<u32>() + 110000 * mem::size_of::<DocInfo>();
        let mut write_cursor = default_offset as u32;
        for (i, doc_info) in doc_infos.iter().enumerate() {
            file.seek(SeekFrom::Start(doc_info.start_offset as u64));
            file.take(doc_info.length as u64).read_to_end(&mut buf);

            // write buf if buf is full (or big enough) or reaches last doc
            if buf.len() >= buf_size || i == doc_infos.len() - 1 {
                file.seek(SeekFrom::Start(write_cursor as u64));
                file.write(&buf);
                buf.clear();
            }
        }

        let mut last_end_offset = default_offset as u32;
        for i in 0..doc_infos.len() {
            let mut doc_info = doc_infos[i];
            doc_info.start_offset = last_end_offset;
            last_end_offset += doc_info.length;
            doc_infos[i] = doc_info;
        }

        Self::try_resize_file(file, Self::get_actual_data_end_offset(&doc_infos) as u64);

        Self::write_file_header(file, &doc_infos);

        let (unused_bytes, used_bytes) = Self::analyze_unused_data(&doc_infos);
        let file_length = Self::file_length(file);
        println!(
            "After rearranging: unused_bytes = {}, used_bytes = {}, file_length = {}",
            unused_bytes, used_bytes, file_length
        );
    }

    // Rearrange data if too much space wasted
    pub fn rearrange_data_if_necessary(file: &mut File, doc_infos: &mut Vec<DocInfo>) {
        let (unused_bytes, used_bytes) = Self::analyze_unused_data(doc_infos);
        //if (unused_bytes * 100) / (used_bytes + unused_bytes) > 50 {
        if unused_bytes > used_bytes {
            Self::rearrange_data(file, doc_infos);
        }
    }

    pub fn delete_doc_by_id(&mut self, id_list: &[u64]) {
        if let Some(mut file) = self.file.as_mut() {
            println!("try to delete doc: {:?}", id_list);
            let mut doc_infos = Self::read_or_create_file_header(file);
            let mut target_doc_info = DocInfo::default();

            for doc_id in id_list {
                let mut target_index = 0;
                for (pos, doc_info) in doc_infos.iter().enumerate() {
                    if doc_info.id == *doc_id {
                        target_doc_info = *doc_info;
                        target_index = pos;
                        break;
                    }
                }

                if target_doc_info.id > 0 {
                    //println!("{:?}", target_doc_info);
                    doc_infos.remove(target_index);
                    //println!("Deleted data with id {}", doc_id);
                } else {
                    println!("Delete: Cannot find data with id {}", doc_id);
                }
            }

            // Resize file if there's unused data in the end
            Self::try_resize_file(file, Self::get_actual_data_end_offset(&doc_infos) as u64);

            Self::write_file_header(file, &doc_infos);

            Self::rearrange_data_if_necessary(file, &mut doc_infos);
        }
    }

    // Return total amount of unused bytes in data section, as well as used bytes
    pub fn analyze_unused_data(doc_infos: &Vec<DocInfo>) -> (u32, u32) {
        let mut unused_bytes = 0;
        let mut used_bytes = 0;

        if doc_infos.len() > 0 {
            let mut last_end = doc_infos[0].start_offset;
            for doc in doc_infos {
                unused_bytes += doc.start_offset - last_end;
                used_bytes += doc.length;
                last_end = doc.start_offset + doc.length;
            }
        }

        return (unused_bytes, used_bytes);
    }

    pub fn get_doc_by_id(&mut self, id_list: &[u64]) -> Vec<(u64, Vec<u8>)> {
        let mut retreived = Vec::new();
        if let Some(mut file) = self.file.as_mut() {
            let mut doc_infos = Self::read_or_create_file_header(file);
            println!("header size: {}", doc_infos.len());

            println!("try to read doc: {:?}", id_list);
            for doc_id in id_list {
                let mut target_doc_info = DocInfo::default();

                for doc_info in &doc_infos {
                    if doc_info.id == *doc_id {
                        target_doc_info = *doc_info;
                        break;
                    }
                }

                if target_doc_info.id > 0 {
                    file.seek(SeekFrom::Start(target_doc_info.start_offset as u64));
                    let mut piece = file.take(target_doc_info.length as u64);
                    let mut buf = Vec::new();
                    piece.read_to_end(&mut buf);
                    let s = String::from_utf8_lossy(&buf);
                    println!("Found data: head = {:?}, body = {} ", target_doc_info, s);

                    retreived.push((target_doc_info.id, buf));
                } else {
                    println!("Cannot find data with id {}", doc_id);
                }
            }

            println!("Header doc id order:");
            for doc_info in &doc_infos {
                print!("{} ", doc_info.id);
            }
            println!();

            let (unused_bytes, used_bytes) = Self::analyze_unused_data(&doc_infos);
            println!("unused bytes: {}, used_bytes: {}", unused_bytes, used_bytes);
        }
        return retreived;
    }
}

#[test]
fn test_minidb() {
    let TEST_STRING = r#"
为什么我们开始厌弃海底捞的热情？
这个秋天，用“多事之秋”来形容海底捞的处境，再合适不过。不足一个月，海底捞两次登上热搜——先是海底捞72元200g的毛肚，实际只有138g；而后，又有一家海底捞全资子公司因为过期豆花，被相关部门罚款2万元。
与此同时，海底捞引以为豪的“服务”开始受到更多人质疑。在新浪科技近期发起的一份关于“海底捞是否过度服务”的微博调查中，有49.5%的网友认为其“过度服务了”。
有网友直言，“安静吃个饭，去什么海底捞，自找不自在”。专家则指出，服务是海底捞的基础，“如果没有服务，它就是一家司空见惯、寻常的火锅店”。
“我就想安静地吃个饭”
近日，北京消费者何女生和同事在海底捞吃饭，结账时却发现桌上的私人发票不见了，原来是海底捞员工误将发票当做纸巾，给清理掉了。
"#;
    let mut co = CollectionManager::new("test_collection.minidb");
    co.open();
    co.delete_all_docs(); // clean test file every time for reproducible result

    let test_strings = TEST_STRING
        .replace("\n", "") // avoid empty lines
        .replace("？", "\n")
        .replace("。", "\n")
        .replace("，", "\n");
    let mut test_docs = Vec::new();
    let mut datas = Vec::new();
    let mut last_pos = 0;

    // must split 'push' and 'slice' into two for loops, otherwise there'll be borrowing conflicts
    for (i, line) in test_strings.lines().enumerate() {
        let data = format!("line#{} {}", i, line);
        let data = data.as_bytes().to_vec();
        datas.push(data);
    }
    for i in 0..datas.len() {
        let data = &datas[i];
        test_docs.push((None, &data[..]));
    }

    co.insert_docs(&test_docs);
    co.get_doc_by_id(&[11, 22, 33]);

    co.delete_doc_by_id(&[11]);
    let test_data = [
        "HELLO THERE".as_bytes().to_vec(),
        "BYE, HAVE A BEAUTIFUL DAY".as_bytes().to_vec(),
    ];
    co.insert_docs(&[(Some(54), &test_data[0][..]), (Some(55), &test_data[1][..])]);
    co.get_doc_by_id(&[55]);

    // try delete a lot of data to trigger rearrange (unused > used)
    let to_delete: Vec<u64> = (1..22).collect();
    co.delete_doc_by_id(&to_delete);
    co.get_doc_by_id(&[54, 25]);
}
