XFFXFF commited on
Commit
0527288
1 Parent(s): 018f925

supports the option to add a random delay

Browse files
src/config_parser/parser.rs CHANGED
@@ -20,6 +20,14 @@ pub struct Config {
20
  pub binding_ip_addr: String,
21
  pub style: Style,
22
  pub redis_connection_url: String,
 
 
 
 
 
 
 
 
23
  }
24
 
25
  impl Config {
@@ -41,6 +49,8 @@ impl Config {
41
  .load(&fs::read_to_string("./websurfx/config.lua")?)
42
  .exec()?;
43
 
 
 
44
  Ok(Config {
45
  port: globals.get::<_, u16>("port")?,
46
  binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?,
@@ -49,6 +59,9 @@ impl Config {
49
  globals.get::<_, String>("colorscheme")?,
50
  ),
51
  redis_connection_url: globals.get::<_, String>("redis_connection_url")?,
 
 
 
52
  })
53
  })
54
  }
 
20
  pub binding_ip_addr: String,
21
  pub style: Style,
22
  pub redis_connection_url: String,
23
+ pub aggregator: AggreatorConfig,
24
+ }
25
+
26
+ /// Configuration options for the aggregator.
27
+ #[derive(Clone)]
28
+ pub struct AggreatorConfig {
29
+ /// Whether to introduce a random delay before sending the request to the search engine.
30
+ pub random_delay: bool,
31
  }
32
 
33
  impl Config {
 
49
  .load(&fs::read_to_string("./websurfx/config.lua")?)
50
  .exec()?;
51
 
52
+ let aggregator_config = globals.get::<_, rlua::Table>("aggregator")?;
53
+
54
  Ok(Config {
55
  port: globals.get::<_, u16>("port")?,
56
  binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?,
 
59
  globals.get::<_, String>("colorscheme")?,
60
  ),
61
  redis_connection_url: globals.get::<_, String>("redis_connection_url")?,
62
+ aggregator: AggreatorConfig {
63
+ random_delay: aggregator_config.get::<_, bool>("random_delay")?,
64
+ },
65
  })
66
  })
67
  }
src/search_results_handler/aggregator.rs CHANGED
@@ -29,6 +29,7 @@ use crate::engines::{duckduckgo, searx};
29
  ///
30
  /// * `query` - Accepts a string to query with the above upstream search engines.
31
  /// * `page` - Accepts an u32 page number.
 
32
  ///
33
  /// # Error
34
  ///
@@ -38,14 +39,17 @@ use crate::engines::{duckduckgo, searx};
38
  pub async fn aggregate(
39
  query: &str,
40
  page: u32,
 
41
  ) -> Result<SearchResults, Box<dyn std::error::Error>> {
42
  let user_agent: String = random_user_agent();
43
  let mut result_map: HashMap<String, RawSearchResult> = HashMap::new();
44
 
45
  // Add a random delay before making the request.
46
- let mut rng = rand::thread_rng();
47
- let delay_secs = rng.gen_range(1..10);
48
- std::thread::sleep(Duration::from_secs(delay_secs));
 
 
49
 
50
  // fetch results from upstream search engines simultaneously/concurrently.
51
  let (ddg_map_results, searx_map_results) = join!(
 
29
  ///
30
  /// * `query` - Accepts a string to query with the above upstream search engines.
31
  /// * `page` - Accepts an u32 page number.
32
+ /// * `random_delay` - Accepts a boolean value to add a random delay before making the request.
33
  ///
34
  /// # Error
35
  ///
 
39
  pub async fn aggregate(
40
  query: &str,
41
  page: u32,
42
+ random_delay: bool,
43
  ) -> Result<SearchResults, Box<dyn std::error::Error>> {
44
  let user_agent: String = random_user_agent();
45
  let mut result_map: HashMap<String, RawSearchResult> = HashMap::new();
46
 
47
  // Add a random delay before making the request.
48
+ if random_delay {
49
+ let mut rng = rand::thread_rng();
50
+ let delay_secs = rng.gen_range(1..10);
51
+ std::thread::sleep(Duration::from_secs(delay_secs));
52
+ }
53
 
54
  // fetch results from upstream search engines simultaneously/concurrently.
55
  let (ddg_map_results, searx_map_results) = join!(
src/server/routes.rs CHANGED
@@ -127,7 +127,7 @@ pub async fn search(
127
  }
128
  Err(_) => {
129
  let mut results_json: crate::search_results_handler::aggregation_models::SearchResults =
130
- aggregate(query, page).await?;
131
  results_json.add_style(config.style.clone());
132
  redis_cache
133
  .cache_results(serde_json::to_string(&results_json)?, &page_url)?;
 
127
  }
128
  Err(_) => {
129
  let mut results_json: crate::search_results_handler::aggregation_models::SearchResults =
130
+ aggregate(query, page, config.aggregator.random_delay).await?;
131
  results_json.add_style(config.style.clone());
132
  redis_cache
133
  .cache_results(serde_json::to_string(&results_json)?, &page_url)?;
websurfx/config.lua CHANGED
@@ -19,3 +19,8 @@ theme = "simple" -- the theme name which should be used for the website
19
 
20
  -- Caching
21
  redis_connection_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
 
 
 
 
 
 
19
 
20
  -- Caching
21
  redis_connection_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
22
+
23
+ -- Aggregator
24
+ aggregator = {
25
+ random_delay = false, -- whether to add random delay before sending the request to the search engine
26
+ }