pub mod read;
pub mod error;
pub mod write;
pub mod dedup;
pub mod delta;
pub mod matching;
pub mod statistics;
pub mod fingerprint;
pub mod extract;

pub use read::*;
pub use write::*;
pub use error::*;
pub use dedup::*;
pub use delta::*;
pub use matching::*;
pub use statistics::*;
pub use fingerprint::*;
pub use extract::*;

use std::sync::Arc;
use std::fmt::Debug;
use std::time::Instant;
use pipelines::Pipeline;
use parking_lot::Mutex;
use crate::recipe::ChunkPointer;

pub struct BackupConfig {
	pub sf_num:usize,
    pub find_in_recent_buffer:bool,
    pub find_in_read_cache:bool,
    pub find_in_write_buf:bool,
    pub find_in_index_store:bool,
}

impl BackupConfig {
	pub fn backup<T:ChunkPointer+Debug+Send+'static>(&self,path:&str,result_path:&str,version:&str) -> BackupResult<Arc<Mutex<BackupStatistics>>> {
		let statistics = Arc::new(Mutex::new(BackupStatistics::new()));
		let collector = BackupErrorCollector::new();
		let collector0 = collector.clone();
		let collector1 = collector.clone();
		let collector2 = collector.clone();
		let collector3 = collector.clone();
		let collector4 = collector.clone();
		let collector5 = collector.clone();
		let collector6 = collector.clone();
		let collector7 = collector.clone();
		let collector8 = collector.clone();
		let collector9 = collector.clone();
		let collector10 = collector.clone();
		let reader = Arc::new(ChunkReader::new(path.to_string(),statistics.clone()));
		let reader2 = reader.clone();
		let hasher = ChunkHasher::new(statistics.clone());
		let version = if version == "" { None } else { Some(version.to_string()) };
		let writer1 = Arc::new(ChunkWriter::from_path(result_path,self.sf_num,version,statistics.clone(),self)?);
		let writer2 = writer1.clone();
		let deduper = ChunkDeduper::new(writer1.container_store(),writer1.index_store(),statistics.clone(),self);
		let extractor = ChunkExtractor::new(statistics.clone());
		let matcher = Arc::new(ChunkMatcher::new(writer1.container_store(),writer1.index_store(),statistics.clone()));
		let matcher2 = matcher.clone();
		let encoder = DeltaEncoder::new(statistics.clone());
		let testor = ChunkTestor::new(statistics.clone());

		let now = Instant::now();

		std::thread::spawn(move||{
			collector0.collect(writer1.write_async());
		});
		Pipeline::new(move |out|{
					collector1.collect(reader.clone().read_async(&out));
				}).pipe(move |out,recv|{
					collector2.collect(reader2.split_async(&out,recv));
					//collector2.collect(reader2.split_rabin_avx8_async(&out, recv));
				}).pipe(move |out,recv|{
					collector3.collect(hasher.hash(&out,recv));
				}).pipe(move |out,recv|{
					collector4.collect(deduper.dedup(&out,recv));
				}).pipe(move |out,recv|{
					collector5.collect(extractor.extract::<T>(&out,recv));
				}).pipe(move |out,recv|{
					collector10.collect(testor.test::<T>(&out,recv));
				}).pipe(move |out,recv|{
					collector6.collect(matcher.matching::<T>(&out,recv));
				}).pipe(move |out,recv|{
					collector7.collect(matcher2.read_base::<T>(&out,recv));
				}).pipe(move |out,recv|{
					collector8.collect(encoder.encode::<T>(&out,recv));
				}).end(move |recv|{
					collector9.collect(writer2.write::<T>(recv));
				})?;
		let cost_time = now.elapsed().as_micros();
		statistics.lock().set_total_time(cost_time);
		collector.try_return(statistics)
	}
}
