use lhash::md5;
use serde::{Deserialize, Serialize};

const FASTCDC_SYMBOL_COUNT:usize = 256;
const FASTCDC_SEED_LENGTH:usize = 64;

/// Smallest acceptable value for the minimum chunk size.
const MINIMUM_MIN: usize = 64;
/// Largest acceptable value for the minimum chunk size.
const MINIMUM_MAX: usize = 1_048_576;
/// Smallest acceptable value for the average chunk size.
const AVERAGE_MIN: usize = 256;
/// Largest acceptable value for the average chunk size.
const AVERAGE_MAX: usize = 4_194_304;
/// Smallest acceptable value for the maximum chunk size.
const MAXIMUM_MIN: usize = 1024;
/// Largest acceptable value for the maximum chunk size.
const MAXIMUM_MAX: usize = 16_777_216;

pub const GEAR_MATRIX:[u64;FASTCDC_SYMBOL_COUNT] = seq!(N in 0..256 { [ #(gear_matrix_value(N),)* ] });
const GEAR_LS_MATRIX:[u64;FASTCDC_SYMBOL_COUNT] = seq!(N in 0..256 { [ #(GEAR_MATRIX[N] << 1,)* ] });
//
// Masks for each of the desired number of bits, where 0 through 5 are unused.
// The values for sizes 64 bytes through 128 kilo-bytes comes from the C
// reference implementation (found in the destor repository) while the extra
// values come from the restic-FastCDC repository. The FastCDC paper claims that
// the deduplication ratio is slightly improved when the mask bits are spread
// relatively evenly, hence these seemingly "magic" values.
//
const MASKS: [u64; 26] = [
    0,                  // padding
    0,                  // padding
    0,                  // padding
    0,                  // padding
    0,                  // padding
    0x0000000001804110, // unused except for NC 3
    0x0000000001803110, // 64B
    0x0000000018035100, // 128B
    0x0000001800035300, // 256B
    0x0000019000353000, // 512B
    0x0000590003530000, // 1KB
    0x0000d90003530000, // 2KB
    0x0000d90103530000, // 4KB
    0x0000d90303530000, // 8KB
    0x0000d90313530000, // 16KB
    0x0000d90f03530000, // 32KB
    0x0000d90303537000, // 64KB
    0x0000d90703537000, // 128KB
    0x0000d90707537000, // 256KB
    0x0000d91707537000, // 512KB
    0x0000d91747537000, // 1MB
    0x0000d91767537000, // 2MB
    0x0000d93767537000, // 4MB
    0x0000d93777537000, // 8MB
    0x0000d93777577000, // 16MB
    0x0000db3777577000, // unused except for NC 3
];

const fn gear_matrix_value(index:usize) -> u64 {
    let seed = [index as u8; FASTCDC_SEED_LENGTH];
    let md5_value = md5(&seed);
    let md5_prefix:[u8;8] = seq!(N in 0..8 { [ #(md5_value[N],)*] });
    u64::from_le_bytes(md5_prefix)
}

///
/// Base-2 logarithm function for unsigned 32-bit integers.
///
pub(crate) fn round2power(value: usize) -> usize {
    (value as f64).log2().round() as usize
}

#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
pub enum FastcdcNormalization {
    Level0,
    Level1,
    Level2,
    Level3,
}

impl FastcdcNormalization {
    fn range(&self) -> usize {
        match self {
            Self::Level0 => 0,
            Self::Level1 => 1,
            Self::Level2 => 2,
            Self::Level3 => 3,
        }
    }
}

#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct FastCDCBuilder {
    level:Option<FastcdcNormalization>,
    min_size: Option<usize>,
    avg_size: Option<usize>,
    max_size: Option<usize>,
}

impl FastCDCBuilder {
	pub fn new() -> Self {
		Self { level:None, min_size: None, avg_size: None, max_size: None }
	}

    pub fn level(&mut self, level:FastcdcNormalization) -> &mut Self {
		self.level = Some(level);
		self
	}

	pub fn min_size(&mut self, min_size:usize) -> &mut Self {
		self.min_size = Some(min_size);
		self
	}

	pub fn avg_size(&mut self, avg_size:usize) -> &mut Self {
		self.avg_size = Some(avg_size);
		self
	}

	pub fn max_size(&mut self, max_size:usize) -> &mut Self {
		self.max_size = Some(max_size);
		self
	}

	pub fn build(mut self) -> FastCDCConfig {
        let level = self.level.unwrap_or(FastcdcNormalization::Level0);
		let missed_parameter= 
			self.min_size.is_none() || 
			self.avg_size.is_none() || 
			self.max_size.is_none();
		let illegal = 
			self.min_size > self.avg_size || 
			self.avg_size > self.max_size;
		let invalid = missed_parameter || illegal;
		if invalid {
			self.min_size = None;
			self.avg_size = None;
			self.max_size = None;
		}
		let min_size = self.min_size.unwrap_or(2048);
		let avg_size = self.avg_size.unwrap_or(8192);
		let max_size = self.avg_size.unwrap_or(65536);
		return FastCDCConfig::with_level(min_size, avg_size, max_size, level);
	}
}


#[derive(Debug, Clone)]
pub struct FastCDCConfig {
    min_size: usize,
    avg_size: usize,
    max_size: usize,
    mask_s: u64,
    mask_l: u64,
    mask_s_ls: u64,
    mask_l_ls: u64,
}

impl FastCDCConfig {
    pub fn with_level(min_size: usize, avg_size: usize, max_size: usize, level: FastcdcNormalization) -> Self {
        assert!(min_size >= MINIMUM_MIN);
        assert!(min_size <= MINIMUM_MAX);
        assert!(avg_size >= AVERAGE_MIN);
        assert!(avg_size <= AVERAGE_MAX);
        assert!(max_size >= MAXIMUM_MIN);
        assert!(max_size <= MAXIMUM_MAX);
        let bits = round2power(avg_size);
        let normalization = level.range();
        let mask_s = MASKS[(bits + normalization) as usize];
        let mask_l = MASKS[(bits - normalization) as usize];
        Self {
            min_size,
            avg_size,
            max_size,
            mask_s,
            mask_l,
            mask_s_ls: mask_s << 1,
            mask_l_ls: mask_l << 1,
        }
    }

    pub fn min_chunking_len(&self) -> usize {
		self.max_size
	}

    pub fn chunking(&self, source: &[u8]) -> usize {
        let mut remaining = source.len();
        if remaining <= self.min_size {
            return remaining;
        }
        let mut center = self.avg_size;
        if remaining > self.max_size {
            remaining = self.max_size;
        } else if remaining < center {
            center = remaining;
        }
        let mut index = self.min_size;
        // Paraphrasing from the paper: Use the mask with more 1 bits for the
        // hash judgment when the current chunking position is smaller than the
        // desired size, which makes it harder to generate smaller chunks.
        let mut hash: u64 = 0;
        while index < center {
            hash = (hash << 1).wrapping_add(GEAR_MATRIX[source[index] as usize]);
            if (hash & self.mask_s) == 0 {
                return index;
            }
            index += 1;
        }
        // Again, paraphrasing: use the mask with fewer 1 bits for the hash
        // judgment when the current chunking position is larger than the
        // desired size, which makes it easier to generate larger chunks.
        let last_pos = remaining;
        while index < last_pos {
            hash = (hash << 1).wrapping_add(GEAR_MATRIX[source[index] as usize]);
            if (hash & self.mask_l) == 0 {
                return index;
            }
            index += 1;
        }
        // If all else fails, return the largest chunk. This will happen with
        // pathological data, such as all zeroes.
        index
    }
    
    pub fn accelerate_chunking(&self, source: &[u8]) -> usize {
        let mut remaining = source.len();
        if remaining <= self.min_size {
            return remaining;
        }
        let mut center = self.avg_size;
        if remaining > self.max_size {
            remaining = self.max_size;
        } else if remaining < center {
            center = remaining;
        }
        let mut index = self.min_size / 2;
        let mut hash: u64 = 0;
        while index < center / 2 {
            let a = index * 2;
            hash = (hash << 2).wrapping_add(GEAR_LS_MATRIX[source[a] as usize]);
            if (hash & self.mask_s_ls) == 0 {
                return a;
            }
            hash = hash.wrapping_add(GEAR_MATRIX[source[a + 1] as usize]);
            if (hash & self.mask_s) == 0 {
                return a + 1;
            }
            index += 1;
        }
        while index < remaining / 2 {
            let a = index * 2;
            hash = (hash << 2).wrapping_add(GEAR_LS_MATRIX[source[a] as usize]);
            if (hash & self.mask_l_ls) == 0 {
                return a;
            }
            hash = hash.wrapping_add(GEAR_MATRIX[source[a + 1] as usize]);
            if (hash & self.mask_l) == 0 {
                return a + 1;
            }
            index += 1;
        }
        // If all else fails, return the largest chunk. This will happen with
        // pathological data, such as all zeroes.
        remaining
    }
}

impl Default for FastCDCConfig {
    fn default() -> Self {
		Self::with_level(2048, 8192, 65536, FastcdcNormalization::Level0)
    }
}