|
|
|
|
|
|
|
use std::{collections::HashMap, time::Duration}; |
|
|
|
use error_stack::Report; |
|
use rand::Rng; |
|
use tokio::task::JoinHandle; |
|
|
|
use super::{ |
|
aggregation_models::{EngineErrorInfo, RawSearchResult, SearchResult, SearchResults}, |
|
user_agent::random_user_agent, |
|
}; |
|
|
|
use crate::engines::{ |
|
duckduckgo, |
|
engine_models::{EngineError, SearchEngine}, |
|
searx, |
|
}; |
|
|
|
|
|
type FutureVec = Vec<JoinHandle<Result<HashMap<String, RawSearchResult>, Report<EngineError>>>>; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pub async fn aggregate( |
|
query: String, |
|
page: u32, |
|
random_delay: bool, |
|
debug: bool, |
|
upstream_search_engines: Vec<String>, |
|
) -> Result<SearchResults, Box<dyn std::error::Error>> { |
|
let user_agent: String = random_user_agent(); |
|
let mut result_map: HashMap<String, RawSearchResult> = HashMap::new(); |
|
|
|
|
|
if random_delay || !debug { |
|
let mut rng = rand::thread_rng(); |
|
let delay_secs = rng.gen_range(1..10); |
|
std::thread::sleep(Duration::from_secs(delay_secs)); |
|
} |
|
|
|
|
|
let search_engines: Vec<Box<dyn SearchEngine + Send + Sync>> = upstream_search_engines |
|
.iter() |
|
.map(|engine| match engine.to_lowercase().as_str() { |
|
"duckduckgo" => Box::new(duckduckgo::DuckDuckGo) as Box<dyn SearchEngine + Send + Sync>, |
|
"searx" => Box::new(searx::Searx) as Box<dyn SearchEngine + Send + Sync>, |
|
&_ => panic!("Config Error: Incorrect config file option provided"), |
|
}) |
|
.collect(); |
|
|
|
let task_capacity: usize = search_engines.len(); |
|
|
|
let tasks: FutureVec = search_engines |
|
.into_iter() |
|
.map(|search_engine| { |
|
let query: String = query.clone(); |
|
let user_agent: String = user_agent.clone(); |
|
tokio::spawn( |
|
async move { search_engine.results(query, page, user_agent.clone()).await }, |
|
) |
|
}) |
|
.collect(); |
|
|
|
let mut outputs = Vec::with_capacity(task_capacity); |
|
|
|
for task in tasks { |
|
if let Ok(result) = task.await { |
|
outputs.push(result) |
|
} |
|
} |
|
|
|
let mut engine_errors_info: Vec<EngineErrorInfo> = Vec::new(); |
|
|
|
let mut initial: bool = true; |
|
let mut counter: usize = 0; |
|
outputs.iter().for_each(|results| { |
|
if initial { |
|
match results { |
|
Ok(result) => { |
|
result_map.extend(result.clone()); |
|
counter += 1; |
|
initial = false |
|
} |
|
Err(error_type) => { |
|
engine_errors_info.push(EngineErrorInfo::new( |
|
error_type.downcast_ref::<EngineError>().unwrap(), |
|
upstream_search_engines[counter].clone(), |
|
)); |
|
counter += 1 |
|
} |
|
} |
|
} else { |
|
match results { |
|
Ok(result) => { |
|
result.clone().into_iter().for_each(|(key, value)| { |
|
result_map |
|
.entry(key) |
|
.and_modify(|result| { |
|
result.add_engines(value.clone().engine()); |
|
}) |
|
.or_insert_with(|| -> RawSearchResult { |
|
RawSearchResult::new( |
|
value.title.clone(), |
|
value.visiting_url.clone(), |
|
value.description.clone(), |
|
value.engine.clone(), |
|
) |
|
}); |
|
}); |
|
counter += 1 |
|
} |
|
Err(error_type) => { |
|
engine_errors_info.push(EngineErrorInfo::new( |
|
error_type.downcast_ref::<EngineError>().unwrap(), |
|
upstream_search_engines[counter].clone(), |
|
)); |
|
counter += 1 |
|
} |
|
} |
|
} |
|
}); |
|
|
|
Ok(SearchResults::new( |
|
result_map |
|
.into_iter() |
|
.map(|(key, value)| { |
|
SearchResult::new( |
|
value.title, |
|
value.visiting_url, |
|
key, |
|
value.description, |
|
value.engine, |
|
) |
|
}) |
|
.collect(), |
|
query.to_string(), |
|
engine_errors_info, |
|
)) |
|
} |
|
|