File size: 6,087 Bytes
fc69ace c5c1684 0781385 c5c1684 0781385 15fc415 f94ac50 5aca5c0 f94ac50 15fc415 5aca5c0 15fc415 94ef62e 5aca5c0 a3edf70 cff7de9 fc69ace c5c1684 fc69ace c170de8 0527288 94ef62e 9cb582a 94ef62e fc69ace c5c1684 fc69ace 15fc415 0781385 c170de8 0527288 13632f1 5aca5c0 2d47e8d 15fc415 f94ac50 15fc415 c5c1684 13632f1 0527288 af3b1cb 0527288 c5c1684 5aca5c0 15dfda6 5aca5c0 1ebf888 15dfda6 15fc415 0781385 2f01651 15dfda6 2f01651 0781385 15fc415 15dfda6 5aca5c0 a3edf70 15dfda6 5aca5c0 15dfda6 5aca5c0 15dfda6 0781385 15dfda6 5aca5c0 0781385 15dfda6 5aca5c0 15dfda6 5aca5c0 15dfda6 5aca5c0 0781385 15dfda6 5aca5c0 15fc415 f94ac50 15dfda6 f94ac50 a3edf70 f94ac50 15fc415 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 |
//! This module provides the functionality to scrape and gathers all the results from the upstream
//! search engines and then removes duplicate results.
use std::{collections::HashMap, time::Duration};
use error_stack::Report;
use rand::Rng;
use tokio::task::JoinHandle;
use super::{
aggregation_models::{EngineErrorInfo, SearchResult, SearchResults},
user_agent::random_user_agent,
};
use crate::engines::engine_models::{EngineError, EngineHandler};
/// Aliases for long type annotations
type FutureVec = Vec<JoinHandle<Result<HashMap<String, SearchResult>, Report<EngineError>>>>;
/// The function aggregates the scraped results from the user-selected upstream search engines.
/// These engines can be chosen either from the user interface (UI) or from the configuration file.
/// The code handles this process by matching the selected search engines and adding them to a vector.
/// This vector is then used to create an asynchronous task vector using `tokio::spawn`, which returns
/// a future. This future is awaited in another loop. Once the results are collected, they are filtered
/// to remove any errors and ensure only proper results are included. If an error is encountered, it is
/// sent to the UI along with the name of the engine and the type of error. This information is finally
/// placed in the returned `SearchResults` struct.
///
/// Additionally, the function eliminates duplicate results. If two results are identified as coming from
/// multiple engines, their names are combined to indicate that the results were fetched from these upstream
/// engines. After this, all the data in the `HashMap` is removed and placed into a struct that contains all
/// the aggregated results in a vector. Furthermore, the query used is also added to the struct. This step is
/// necessary to ensure that the search bar in the search remains populated even when searched from the query URL.
///
/// Overall, this function serves to aggregate scraped results from user-selected search engines, handling errors,
/// removing duplicates, and organizing the data for display in the UI.
///
/// # Example:
///
/// If you search from the url like `https://127.0.0.1/search?q=huston` then the search bar should
/// contain the word huston and not remain empty.
///
/// # Arguments
///
/// * `query` - Accepts a string to query with the above upstream search engines.
/// * `page` - Accepts an u32 page number.
/// * `random_delay` - Accepts a boolean value to add a random delay before making the request.
/// * `debug` - Accepts a boolean value to enable or disable debug mode option.
/// * `upstream_search_engines` - Accepts a vector of search engine names which was selected by the
/// * `request_timeout` - Accepts a time (secs) as a value which controls the server request timeout.
/// user through the UI or the config file.
///
/// # Error
///
/// Returns an error a reqwest and scraping selector errors if any error occurs in the results
/// function in either `searx` or `duckduckgo` or both otherwise returns a `SearchResults struct`
/// containing appropriate values.
pub async fn aggregate(
query: String,
page: u32,
random_delay: bool,
debug: bool,
upstream_search_engines: Vec<EngineHandler>,
request_timeout: u8,
) -> Result<SearchResults, Box<dyn std::error::Error>> {
let user_agent: String = random_user_agent();
// Add a random delay before making the request.
if random_delay || !debug {
let mut rng = rand::thread_rng();
let delay_secs = rng.gen_range(1..10);
tokio::time::sleep(Duration::from_secs(delay_secs)).await;
}
let mut names: Vec<&str> = vec![];
// create tasks for upstream result fetching
let mut tasks: FutureVec = FutureVec::new();
for engine_handler in upstream_search_engines {
let (name, search_engine) = engine_handler.into_name_engine();
names.push(name);
let query: String = query.clone();
let user_agent: String = user_agent.clone();
tasks.push(tokio::spawn(async move {
search_engine
.results(query, page, user_agent.clone(), request_timeout)
.await
}));
}
// get upstream responses
let mut responses = Vec::with_capacity(tasks.len());
for task in tasks {
if let Ok(result) = task.await {
responses.push(result)
}
}
// aggregate search results, removing duplicates and handling errors the upstream engines returned
let mut result_map: HashMap<String, SearchResult> = HashMap::new();
let mut engine_errors_info: Vec<EngineErrorInfo> = Vec::new();
let mut handle_error = |error: Report<EngineError>, engine_name: String| {
log::error!("Engine Error: {:?}", error);
engine_errors_info.push(EngineErrorInfo::new(
error.downcast_ref::<EngineError>().unwrap(),
engine_name.to_string(),
));
};
for _ in 0..responses.len() {
let response = responses.pop().unwrap();
let engine = names.pop().unwrap().to_string();
if result_map.is_empty() {
match response {
Ok(results) => {
result_map = results.clone();
}
Err(error) => {
handle_error(error, engine);
}
}
continue;
}
match response {
Ok(result) => {
result.into_iter().for_each(|(key, value)| {
result_map
.entry(key)
.and_modify(|result| {
result.add_engines(engine.clone());
})
.or_insert_with(|| -> SearchResult { value });
});
}
Err(error) => {
handle_error(error, engine);
}
}
}
let results = result_map.into_values().collect();
Ok(SearchResults::new(
results,
query.to_string(),
engine_errors_info,
))
}
|