neon_arch commited on
Commit
8904f34
2 Parent(s): 018f925 ea013e7

Merge pull request #40 from XFFXFF/random_delay

Browse files

feat: support the option to choose whether to enable or disable production mode.

src/config_parser/parser.rs CHANGED
@@ -20,6 +20,14 @@ pub struct Config {
20
  pub binding_ip_addr: String,
21
  pub style: Style,
22
  pub redis_connection_url: String,
 
 
 
 
 
 
 
 
23
  }
24
 
25
  impl Config {
@@ -41,6 +49,15 @@ impl Config {
41
  .load(&fs::read_to_string("./websurfx/config.lua")?)
42
  .exec()?;
43
 
 
 
 
 
 
 
 
 
 
44
  Ok(Config {
45
  port: globals.get::<_, u16>("port")?,
46
  binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?,
@@ -49,6 +66,7 @@ impl Config {
49
  globals.get::<_, String>("colorscheme")?,
50
  ),
51
  redis_connection_url: globals.get::<_, String>("redis_connection_url")?,
 
52
  })
53
  })
54
  }
 
20
  pub binding_ip_addr: String,
21
  pub style: Style,
22
  pub redis_connection_url: String,
23
+ pub aggregator: AggreatorConfig,
24
+ }
25
+
26
+ /// Configuration options for the aggregator.
27
+ #[derive(Clone)]
28
+ pub struct AggreatorConfig {
29
+ /// Whether to introduce a random delay before sending the request to the search engine.
30
+ pub random_delay: bool,
31
  }
32
 
33
  impl Config {
 
49
  .load(&fs::read_to_string("./websurfx/config.lua")?)
50
  .exec()?;
51
 
52
+ let production_use = globals.get::<_, bool>("production_use")?;
53
+ let aggregator_config = if production_use {
54
+ AggreatorConfig { random_delay: true }
55
+ } else {
56
+ AggreatorConfig {
57
+ random_delay: false,
58
+ }
59
+ };
60
+
61
  Ok(Config {
62
  port: globals.get::<_, u16>("port")?,
63
  binding_ip_addr: globals.get::<_, String>("binding_ip_addr")?,
 
66
  globals.get::<_, String>("colorscheme")?,
67
  ),
68
  redis_connection_url: globals.get::<_, String>("redis_connection_url")?,
69
+ aggregator: aggregator_config,
70
  })
71
  })
72
  }
src/search_results_handler/aggregator.rs CHANGED
@@ -29,6 +29,7 @@ use crate::engines::{duckduckgo, searx};
29
  ///
30
  /// * `query` - Accepts a string to query with the above upstream search engines.
31
  /// * `page` - Accepts an u32 page number.
 
32
  ///
33
  /// # Error
34
  ///
@@ -38,14 +39,17 @@ use crate::engines::{duckduckgo, searx};
38
  pub async fn aggregate(
39
  query: &str,
40
  page: u32,
 
41
  ) -> Result<SearchResults, Box<dyn std::error::Error>> {
42
  let user_agent: String = random_user_agent();
43
  let mut result_map: HashMap<String, RawSearchResult> = HashMap::new();
44
 
45
  // Add a random delay before making the request.
46
- let mut rng = rand::thread_rng();
47
- let delay_secs = rng.gen_range(1..10);
48
- std::thread::sleep(Duration::from_secs(delay_secs));
 
 
49
 
50
  // fetch results from upstream search engines simultaneously/concurrently.
51
  let (ddg_map_results, searx_map_results) = join!(
 
29
  ///
30
  /// * `query` - Accepts a string to query with the above upstream search engines.
31
  /// * `page` - Accepts an u32 page number.
32
+ /// * `random_delay` - Accepts a boolean value to add a random delay before making the request.
33
  ///
34
  /// # Error
35
  ///
 
39
  pub async fn aggregate(
40
  query: &str,
41
  page: u32,
42
+ random_delay: bool,
43
  ) -> Result<SearchResults, Box<dyn std::error::Error>> {
44
  let user_agent: String = random_user_agent();
45
  let mut result_map: HashMap<String, RawSearchResult> = HashMap::new();
46
 
47
  // Add a random delay before making the request.
48
+ if random_delay {
49
+ let mut rng = rand::thread_rng();
50
+ let delay_secs = rng.gen_range(1..10);
51
+ std::thread::sleep(Duration::from_secs(delay_secs));
52
+ }
53
 
54
  // fetch results from upstream search engines simultaneously/concurrently.
55
  let (ddg_map_results, searx_map_results) = join!(
src/server/routes.rs CHANGED
@@ -127,7 +127,7 @@ pub async fn search(
127
  }
128
  Err(_) => {
129
  let mut results_json: crate::search_results_handler::aggregation_models::SearchResults =
130
- aggregate(query, page).await?;
131
  results_json.add_style(config.style.clone());
132
  redis_cache
133
  .cache_results(serde_json::to_string(&results_json)?, &page_url)?;
 
127
  }
128
  Err(_) => {
129
  let mut results_json: crate::search_results_handler::aggregation_models::SearchResults =
130
+ aggregate(query, page, config.aggregator.random_delay).await?;
131
  results_json.add_style(config.style.clone());
132
  redis_cache
133
  .cache_results(serde_json::to_string(&results_json)?, &page_url)?;
websurfx/config.lua CHANGED
@@ -19,3 +19,7 @@ theme = "simple" -- the theme name which should be used for the website
19
 
20
  -- Caching
21
  redis_connection_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
 
 
 
 
 
19
 
20
  -- Caching
21
  redis_connection_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
22
+
23
+ production_use = false -- whether to use production mode or not (in other words this option should be used if it is to be used to host it on the server to provide a service to a large number of users)
24
+ -- if production_use is set to true
25
+ -- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.