Merge pull request #245 from neon-mmd/fix-hybrid-cache-implementation
Browse files- Cargo.lock +19 -19
- Cargo.toml +1 -2
- src/cache/cacher.rs +155 -39
- src/cache/error.rs +7 -7
- src/cache/redis_cacher.rs +22 -7
- src/config/parser.rs +4 -2
- src/models/server_models.rs +7 -4
- src/server/routes/search.rs +16 -69
- tests/index.rs +1 -0
Cargo.lock
CHANGED
@@ -115,7 +115,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
115 |
checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb"
|
116 |
dependencies = [
|
117 |
"quote 1.0.33",
|
118 |
-
"syn 2.0.
|
119 |
]
|
120 |
|
121 |
[[package]]
|
@@ -228,7 +228,7 @@ dependencies = [
|
|
228 |
"actix-router",
|
229 |
"proc-macro2 1.0.67",
|
230 |
"quote 1.0.33",
|
231 |
-
"syn 2.0.
|
232 |
]
|
233 |
|
234 |
[[package]]
|
@@ -326,7 +326,7 @@ checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0"
|
|
326 |
dependencies = [
|
327 |
"proc-macro2 1.0.67",
|
328 |
"quote 1.0.33",
|
329 |
-
"syn 2.0.
|
330 |
]
|
331 |
|
332 |
[[package]]
|
@@ -863,7 +863,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
863 |
checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331"
|
864 |
dependencies = [
|
865 |
"quote 1.0.33",
|
866 |
-
"syn 2.0.
|
867 |
]
|
868 |
|
869 |
[[package]]
|
@@ -1218,7 +1218,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
|
|
1218 |
dependencies = [
|
1219 |
"proc-macro2 1.0.67",
|
1220 |
"quote 1.0.33",
|
1221 |
-
"syn 2.0.
|
1222 |
]
|
1223 |
|
1224 |
[[package]]
|
@@ -2093,7 +2093,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
|
|
2093 |
dependencies = [
|
2094 |
"proc-macro2 1.0.67",
|
2095 |
"quote 1.0.33",
|
2096 |
-
"syn 2.0.
|
2097 |
]
|
2098 |
|
2099 |
[[package]]
|
@@ -2212,7 +2212,7 @@ dependencies = [
|
|
2212 |
"pest_meta",
|
2213 |
"proc-macro2 1.0.67",
|
2214 |
"quote 1.0.33",
|
2215 |
-
"syn 2.0.
|
2216 |
]
|
2217 |
|
2218 |
[[package]]
|
@@ -2314,7 +2314,7 @@ dependencies = [
|
|
2314 |
"phf_shared 0.11.2",
|
2315 |
"proc-macro2 1.0.67",
|
2316 |
"quote 1.0.33",
|
2317 |
-
"syn 2.0.
|
2318 |
]
|
2319 |
|
2320 |
[[package]]
|
@@ -2361,7 +2361,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
|
|
2361 |
dependencies = [
|
2362 |
"proc-macro2 1.0.67",
|
2363 |
"quote 1.0.33",
|
2364 |
-
"syn 2.0.
|
2365 |
]
|
2366 |
|
2367 |
[[package]]
|
@@ -3005,7 +3005,7 @@ checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2"
|
|
3005 |
dependencies = [
|
3006 |
"proc-macro2 1.0.67",
|
3007 |
"quote 1.0.33",
|
3008 |
-
"syn 2.0.
|
3009 |
]
|
3010 |
|
3011 |
[[package]]
|
@@ -3262,9 +3262,9 @@ dependencies = [
|
|
3262 |
|
3263 |
[[package]]
|
3264 |
name = "syn"
|
3265 |
-
version = "2.0.
|
3266 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
3267 |
-
checksum = "
|
3268 |
dependencies = [
|
3269 |
"proc-macro2 1.0.67",
|
3270 |
"quote 1.0.33",
|
@@ -3349,7 +3349,7 @@ checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35"
|
|
3349 |
dependencies = [
|
3350 |
"proc-macro2 1.0.67",
|
3351 |
"quote 1.0.33",
|
3352 |
-
"syn 2.0.
|
3353 |
]
|
3354 |
|
3355 |
[[package]]
|
@@ -3509,7 +3509,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
|
|
3509 |
dependencies = [
|
3510 |
"proc-macro2 1.0.67",
|
3511 |
"quote 1.0.33",
|
3512 |
-
"syn 2.0.
|
3513 |
]
|
3514 |
|
3515 |
[[package]]
|
@@ -3678,9 +3678,9 @@ dependencies = [
|
|
3678 |
|
3679 |
[[package]]
|
3680 |
name = "typenum"
|
3681 |
-
version = "1.
|
3682 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
3683 |
-
checksum = "
|
3684 |
|
3685 |
[[package]]
|
3686 |
name = "ucd-trie"
|
@@ -3848,7 +3848,7 @@ dependencies = [
|
|
3848 |
"once_cell",
|
3849 |
"proc-macro2 1.0.67",
|
3850 |
"quote 1.0.33",
|
3851 |
-
"syn 2.0.
|
3852 |
"wasm-bindgen-shared",
|
3853 |
]
|
3854 |
|
@@ -3882,7 +3882,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
|
|
3882 |
dependencies = [
|
3883 |
"proc-macro2 1.0.67",
|
3884 |
"quote 1.0.33",
|
3885 |
-
"syn 2.0.
|
3886 |
"wasm-bindgen-backend",
|
3887 |
"wasm-bindgen-shared",
|
3888 |
]
|
@@ -3905,7 +3905,7 @@ dependencies = [
|
|
3905 |
|
3906 |
[[package]]
|
3907 |
name = "websurfx"
|
3908 |
-
version = "0.
|
3909 |
dependencies = [
|
3910 |
"actix-cors",
|
3911 |
"actix-files",
|
|
|
115 |
checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb"
|
116 |
dependencies = [
|
117 |
"quote 1.0.33",
|
118 |
+
"syn 2.0.36",
|
119 |
]
|
120 |
|
121 |
[[package]]
|
|
|
228 |
"actix-router",
|
229 |
"proc-macro2 1.0.67",
|
230 |
"quote 1.0.33",
|
231 |
+
"syn 2.0.36",
|
232 |
]
|
233 |
|
234 |
[[package]]
|
|
|
326 |
dependencies = [
|
327 |
"proc-macro2 1.0.67",
|
328 |
"quote 1.0.33",
|
329 |
+
"syn 2.0.36",
|
330 |
]
|
331 |
|
332 |
[[package]]
|
|
|
863 |
checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331"
|
864 |
dependencies = [
|
865 |
"quote 1.0.33",
|
866 |
+
"syn 2.0.36",
|
867 |
]
|
868 |
|
869 |
[[package]]
|
|
|
1218 |
dependencies = [
|
1219 |
"proc-macro2 1.0.67",
|
1220 |
"quote 1.0.33",
|
1221 |
+
"syn 2.0.36",
|
1222 |
]
|
1223 |
|
1224 |
[[package]]
|
|
|
2093 |
dependencies = [
|
2094 |
"proc-macro2 1.0.67",
|
2095 |
"quote 1.0.33",
|
2096 |
+
"syn 2.0.36",
|
2097 |
]
|
2098 |
|
2099 |
[[package]]
|
|
|
2212 |
"pest_meta",
|
2213 |
"proc-macro2 1.0.67",
|
2214 |
"quote 1.0.33",
|
2215 |
+
"syn 2.0.36",
|
2216 |
]
|
2217 |
|
2218 |
[[package]]
|
|
|
2314 |
"phf_shared 0.11.2",
|
2315 |
"proc-macro2 1.0.67",
|
2316 |
"quote 1.0.33",
|
2317 |
+
"syn 2.0.36",
|
2318 |
]
|
2319 |
|
2320 |
[[package]]
|
|
|
2361 |
dependencies = [
|
2362 |
"proc-macro2 1.0.67",
|
2363 |
"quote 1.0.33",
|
2364 |
+
"syn 2.0.36",
|
2365 |
]
|
2366 |
|
2367 |
[[package]]
|
|
|
3005 |
dependencies = [
|
3006 |
"proc-macro2 1.0.67",
|
3007 |
"quote 1.0.33",
|
3008 |
+
"syn 2.0.36",
|
3009 |
]
|
3010 |
|
3011 |
[[package]]
|
|
|
3262 |
|
3263 |
[[package]]
|
3264 |
name = "syn"
|
3265 |
+
version = "2.0.36"
|
3266 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
3267 |
+
checksum = "91e02e55d62894af2a08aca894c6577281f76769ba47c94d5756bec8ac6e7373"
|
3268 |
dependencies = [
|
3269 |
"proc-macro2 1.0.67",
|
3270 |
"quote 1.0.33",
|
|
|
3349 |
dependencies = [
|
3350 |
"proc-macro2 1.0.67",
|
3351 |
"quote 1.0.33",
|
3352 |
+
"syn 2.0.36",
|
3353 |
]
|
3354 |
|
3355 |
[[package]]
|
|
|
3509 |
dependencies = [
|
3510 |
"proc-macro2 1.0.67",
|
3511 |
"quote 1.0.33",
|
3512 |
+
"syn 2.0.36",
|
3513 |
]
|
3514 |
|
3515 |
[[package]]
|
|
|
3678 |
|
3679 |
[[package]]
|
3680 |
name = "typenum"
|
3681 |
+
version = "1.17.0"
|
3682 |
source = "registry+https://github.com/rust-lang/crates.io-index"
|
3683 |
+
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
|
3684 |
|
3685 |
[[package]]
|
3686 |
name = "ucd-trie"
|
|
|
3848 |
"once_cell",
|
3849 |
"proc-macro2 1.0.67",
|
3850 |
"quote 1.0.33",
|
3851 |
+
"syn 2.0.36",
|
3852 |
"wasm-bindgen-shared",
|
3853 |
]
|
3854 |
|
|
|
3882 |
dependencies = [
|
3883 |
"proc-macro2 1.0.67",
|
3884 |
"quote 1.0.33",
|
3885 |
+
"syn 2.0.36",
|
3886 |
"wasm-bindgen-backend",
|
3887 |
"wasm-bindgen-shared",
|
3888 |
]
|
|
|
3905 |
|
3906 |
[[package]]
|
3907 |
name = "websurfx"
|
3908 |
+
version = "0.21.1"
|
3909 |
dependencies = [
|
3910 |
"actix-cors",
|
3911 |
"actix-files",
|
Cargo.toml
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
[package]
|
2 |
name = "websurfx"
|
3 |
-
version = "0.
|
4 |
edition = "2021"
|
5 |
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
|
6 |
repository = "https://github.com/neon-mmd/websurfx"
|
@@ -71,4 +71,3 @@ default = ["memory-cache"]
|
|
71 |
dhat-heap = ["dep:dhat"]
|
72 |
memory-cache = ["dep:mini-moka"]
|
73 |
redis-cache = ["dep:redis"]
|
74 |
-
hybrid-cache = ["memory-cache", "redis-cache"]
|
|
|
1 |
[package]
|
2 |
name = "websurfx"
|
3 |
+
version = "0.21.1"
|
4 |
edition = "2021"
|
5 |
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
|
6 |
repository = "https://github.com/neon-mmd/websurfx"
|
|
|
71 |
dhat-heap = ["dep:dhat"]
|
72 |
memory-cache = ["dep:mini-moka"]
|
73 |
redis-cache = ["dep:redis"]
|
|
src/cache/cacher.rs
CHANGED
@@ -10,7 +10,7 @@ use tokio::sync::Mutex;
|
|
10 |
|
11 |
use crate::{config::parser::Config, models::aggregation_models::SearchResults};
|
12 |
|
13 |
-
use super::error::
|
14 |
#[cfg(feature = "redis-cache")]
|
15 |
use super::redis_cacher::RedisCache;
|
16 |
|
@@ -19,46 +19,80 @@ use super::redis_cacher::RedisCache;
|
|
19 |
pub enum Cache {
|
20 |
/// Caching is disabled
|
21 |
Disabled,
|
22 |
-
#[cfg(feature = "redis-cache")]
|
23 |
/// Encapsulates the Redis based cache
|
24 |
Redis(RedisCache),
|
25 |
-
#[cfg(feature = "memory-cache")]
|
26 |
/// Contains the in-memory cache.
|
27 |
InMemory(MokaCache<String, SearchResults>),
|
|
|
|
|
|
|
28 |
}
|
29 |
|
30 |
impl Cache {
|
31 |
-
///
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
pub async fn build(_config: &Config) -> Self {
|
33 |
-
#[cfg(feature = "redis-cache")]
|
34 |
-
|
35 |
-
log::info!("Using
|
36 |
-
|
37 |
-
RedisCache::new(
|
38 |
.await
|
39 |
.expect("Redis cache configured"),
|
40 |
-
)
|
41 |
}
|
42 |
-
#[cfg(feature = "memory-cache")]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
{
|
44 |
log::info!("Using an in-memory cache");
|
45 |
-
|
46 |
}
|
47 |
-
#[cfg(not(feature = "memory-cache"))]
|
48 |
{
|
49 |
log::info!("Caching is disabled");
|
50 |
Cache::Disabled
|
51 |
}
|
52 |
}
|
53 |
|
54 |
-
///
|
55 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
pub fn new(redis_cache: RedisCache) -> Self {
|
57 |
Cache::Redis(redis_cache)
|
58 |
}
|
59 |
|
60 |
-
///
|
61 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
62 |
pub fn new_in_memory() -> Self {
|
63 |
let cache = MokaCache::builder()
|
64 |
.max_capacity(1000)
|
@@ -67,24 +101,60 @@ impl Cache {
|
|
67 |
Cache::InMemory(cache)
|
68 |
}
|
69 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
70 |
/// A function which fetches the cached json results as json string.
|
71 |
///
|
72 |
/// # Arguments
|
73 |
///
|
74 |
/// * `url` - It takes an url as a string.
|
75 |
-
|
|
|
|
|
|
|
|
|
|
|
76 |
match self {
|
77 |
-
Cache::Disabled => Err(Report::new(
|
78 |
-
#[cfg(feature = "redis-cache")]
|
79 |
Cache::Redis(redis_cache) => {
|
80 |
-
let json = redis_cache.cached_json(
|
81 |
Ok(serde_json::from_str::<SearchResults>(&json)
|
82 |
-
.map_err(|_|
|
83 |
}
|
84 |
-
#[cfg(feature = "memory-cache")]
|
85 |
-
Cache::InMemory(in_memory) => match in_memory.get(&
|
86 |
Some(res) => Ok(res),
|
87 |
-
None => Err(Report::new(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
88 |
},
|
89 |
}
|
90 |
}
|
@@ -96,24 +166,42 @@ impl Cache {
|
|
96 |
///
|
97 |
/// * `json_results` - It takes the json results string as an argument.
|
98 |
/// * `url` - It takes the url as a String.
|
|
|
|
|
|
|
|
|
|
|
|
|
99 |
pub async fn cache_results(
|
100 |
&mut self,
|
101 |
-
|
102 |
-
|
103 |
-
) -> Result<(), Report<
|
104 |
match self {
|
105 |
Cache::Disabled => Ok(()),
|
106 |
-
#[cfg(feature = "redis-cache")]
|
107 |
Cache::Redis(redis_cache) => {
|
108 |
-
let json = serde_json::to_string(
|
109 |
-
.map_err(|_|
|
110 |
-
redis_cache.cache_results(&json,
|
111 |
}
|
112 |
-
#[cfg(feature = "memory-cache")]
|
113 |
Cache::InMemory(cache) => {
|
114 |
-
cache.insert(
|
115 |
Ok(())
|
116 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
117 |
}
|
118 |
}
|
119 |
}
|
@@ -125,26 +213,54 @@ pub struct SharedCache {
|
|
125 |
}
|
126 |
|
127 |
impl SharedCache {
|
128 |
-
///
|
|
|
|
|
|
|
|
|
|
|
|
|
129 |
pub fn new(cache: Cache) -> Self {
|
130 |
Self {
|
131 |
cache: Mutex::new(cache),
|
132 |
}
|
133 |
}
|
134 |
|
135 |
-
/// A function which retrieves the cached SearchResulsts from the internal cache.
|
136 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
137 |
let mut mut_cache = self.cache.lock().await;
|
138 |
mut_cache.cached_json(url).await
|
139 |
}
|
140 |
|
141 |
-
/// A function which caches the results by using the `url` as the key and
|
142 |
/// `SearchResults` as the value.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
143 |
pub async fn cache_results(
|
144 |
&self,
|
145 |
search_results: &SearchResults,
|
146 |
url: &str,
|
147 |
-
) -> Result<(), Report<
|
148 |
let mut mut_cache = self.cache.lock().await;
|
149 |
mut_cache.cache_results(search_results, url).await
|
150 |
}
|
|
|
10 |
|
11 |
use crate::{config::parser::Config, models::aggregation_models::SearchResults};
|
12 |
|
13 |
+
use super::error::CacheError;
|
14 |
#[cfg(feature = "redis-cache")]
|
15 |
use super::redis_cacher::RedisCache;
|
16 |
|
|
|
19 |
pub enum Cache {
|
20 |
/// Caching is disabled
|
21 |
Disabled,
|
22 |
+
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
23 |
/// Encapsulates the Redis based cache
|
24 |
Redis(RedisCache),
|
25 |
+
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
26 |
/// Contains the in-memory cache.
|
27 |
InMemory(MokaCache<String, SearchResults>),
|
28 |
+
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
29 |
+
/// Contains both the in-memory cache and Redis based cache
|
30 |
+
Hybrid(RedisCache, MokaCache<String, SearchResults>),
|
31 |
}
|
32 |
|
33 |
impl Cache {
|
34 |
+
/// A function that builds the cache from the given configuration.
|
35 |
+
///
|
36 |
+
/// # Arguments
|
37 |
+
///
|
38 |
+
/// * `config` - It takes the config struct as an argument.
|
39 |
+
///
|
40 |
+
/// # Returns
|
41 |
+
///
|
42 |
+
/// It returns a newly initialized variant based on the feature enabled by the user.
|
43 |
pub async fn build(_config: &Config) -> Self {
|
44 |
+
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
45 |
+
{
|
46 |
+
log::info!("Using a hybrid cache");
|
47 |
+
Cache::new_hybrid(
|
48 |
+
RedisCache::new(&_config.redis_url, 5)
|
49 |
.await
|
50 |
.expect("Redis cache configured"),
|
51 |
+
)
|
52 |
}
|
53 |
+
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
54 |
+
{
|
55 |
+
log::info!("Listening redis server on {}", &_config.redis_url);
|
56 |
+
Cache::new(
|
57 |
+
RedisCache::new(&_config.redis_url, 5)
|
58 |
+
.await
|
59 |
+
.expect("Redis cache configured"),
|
60 |
+
)
|
61 |
+
}
|
62 |
+
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
63 |
{
|
64 |
log::info!("Using an in-memory cache");
|
65 |
+
Cache::new_in_memory()
|
66 |
}
|
67 |
+
#[cfg(not(any(feature = "memory-cache", feature = "redis-cache")))]
|
68 |
{
|
69 |
log::info!("Caching is disabled");
|
70 |
Cache::Disabled
|
71 |
}
|
72 |
}
|
73 |
|
74 |
+
/// A function that initializes a new connection pool struct.
|
75 |
+
///
|
76 |
+
/// # Arguments
|
77 |
+
///
|
78 |
+
/// * `redis_cache` - It takes the newly initialized connection pool struct as an argument.
|
79 |
+
///
|
80 |
+
/// # Returns
|
81 |
+
///
|
82 |
+
/// It returns a `Redis` variant with the newly initialized connection pool struct.
|
83 |
+
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
84 |
pub fn new(redis_cache: RedisCache) -> Self {
|
85 |
Cache::Redis(redis_cache)
|
86 |
}
|
87 |
|
88 |
+
/// A function that initializes the `in memory` cache which is used to cache the results in
|
89 |
+
/// memory with the search engine thus improving performance by making retrieval and caching of
|
90 |
+
/// results faster.
|
91 |
+
///
|
92 |
+
/// # Returns
|
93 |
+
///
|
94 |
+
/// It returns a `InMemory` variant with the newly initialized in memory cache type.
|
95 |
+
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
96 |
pub fn new_in_memory() -> Self {
|
97 |
let cache = MokaCache::builder()
|
98 |
.max_capacity(1000)
|
|
|
101 |
Cache::InMemory(cache)
|
102 |
}
|
103 |
|
104 |
+
/// A function that initializes both in memory cache and redis client connection for being used
|
105 |
+
/// for managing hybrid cache which increases resiliancy of the search engine by allowing the
|
106 |
+
/// cache to switch to `in memory` caching if the `redis` cache server is temporarily
|
107 |
+
/// unavailable.
|
108 |
+
///
|
109 |
+
/// # Arguments
|
110 |
+
///
|
111 |
+
/// * `redis_cache` - It takes `redis` client connection struct as an argument.
|
112 |
+
///
|
113 |
+
/// # Returns
|
114 |
+
///
|
115 |
+
/// It returns a tuple variant `Hybrid` storing both the in-memory cache type and the `redis`
|
116 |
+
/// client connection struct.
|
117 |
+
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
118 |
+
pub fn new_hybrid(redis_cache: RedisCache) -> Self {
|
119 |
+
let cache = MokaCache::builder()
|
120 |
+
.max_capacity(1000)
|
121 |
+
.time_to_live(Duration::from_secs(60))
|
122 |
+
.build();
|
123 |
+
Cache::Hybrid(redis_cache, cache)
|
124 |
+
}
|
125 |
+
|
126 |
/// A function which fetches the cached json results as json string.
|
127 |
///
|
128 |
/// # Arguments
|
129 |
///
|
130 |
/// * `url` - It takes an url as a string.
|
131 |
+
///
|
132 |
+
/// # Error
|
133 |
+
///
|
134 |
+
/// Returns the `SearchResults` from the cache if the program executes normally otherwise
|
135 |
+
/// returns a `CacheError` if the results cannot be retrieved from the cache.
|
136 |
+
pub async fn cached_json(&mut self, _url: &str) -> Result<SearchResults, Report<CacheError>> {
|
137 |
match self {
|
138 |
+
Cache::Disabled => Err(Report::new(CacheError::MissingValue)),
|
139 |
+
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
140 |
Cache::Redis(redis_cache) => {
|
141 |
+
let json = redis_cache.cached_json(_url).await?;
|
142 |
Ok(serde_json::from_str::<SearchResults>(&json)
|
143 |
+
.map_err(|_| CacheError::SerializationError)?)
|
144 |
}
|
145 |
+
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
146 |
+
Cache::InMemory(in_memory) => match in_memory.get(&_url.to_string()) {
|
147 |
Some(res) => Ok(res),
|
148 |
+
None => Err(Report::new(CacheError::MissingValue)),
|
149 |
+
},
|
150 |
+
#[cfg(all(feature = "redis-cache", feature = "memory-cache"))]
|
151 |
+
Cache::Hybrid(redis_cache, in_memory) => match redis_cache.cached_json(_url).await {
|
152 |
+
Ok(res) => Ok(serde_json::from_str::<SearchResults>(&res)
|
153 |
+
.map_err(|_| CacheError::SerializationError)?),
|
154 |
+
Err(_) => match in_memory.get(&_url.to_string()) {
|
155 |
+
Some(res) => Ok(res),
|
156 |
+
None => Err(Report::new(CacheError::MissingValue)),
|
157 |
+
},
|
158 |
},
|
159 |
}
|
160 |
}
|
|
|
166 |
///
|
167 |
/// * `json_results` - It takes the json results string as an argument.
|
168 |
/// * `url` - It takes the url as a String.
|
169 |
+
///
|
170 |
+
/// # Error
|
171 |
+
///
|
172 |
+
/// Returns a unit type if the program caches the given search results without a failure
|
173 |
+
/// otherwise it returns a `CacheError` if the search results cannot be cached due to a
|
174 |
+
/// failure.
|
175 |
pub async fn cache_results(
|
176 |
&mut self,
|
177 |
+
_search_results: &SearchResults,
|
178 |
+
_url: &str,
|
179 |
+
) -> Result<(), Report<CacheError>> {
|
180 |
match self {
|
181 |
Cache::Disabled => Ok(()),
|
182 |
+
#[cfg(all(feature = "redis-cache", not(feature = "memory-cache")))]
|
183 |
Cache::Redis(redis_cache) => {
|
184 |
+
let json = serde_json::to_string(_search_results)
|
185 |
+
.map_err(|_| CacheError::SerializationError)?;
|
186 |
+
redis_cache.cache_results(&json, _url).await
|
187 |
}
|
188 |
+
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
189 |
Cache::InMemory(cache) => {
|
190 |
+
cache.insert(_url.to_string(), _search_results.clone());
|
191 |
Ok(())
|
192 |
}
|
193 |
+
#[cfg(all(feature = "memory-cache", feature = "redis-cache"))]
|
194 |
+
Cache::Hybrid(redis_cache, cache) => {
|
195 |
+
let json = serde_json::to_string(_search_results)
|
196 |
+
.map_err(|_| CacheError::SerializationError)?;
|
197 |
+
match redis_cache.cache_results(&json, _url).await {
|
198 |
+
Ok(_) => Ok(()),
|
199 |
+
Err(_) => {
|
200 |
+
cache.insert(_url.to_string(), _search_results.clone());
|
201 |
+
Ok(())
|
202 |
+
}
|
203 |
+
}
|
204 |
+
}
|
205 |
}
|
206 |
}
|
207 |
}
|
|
|
213 |
}
|
214 |
|
215 |
impl SharedCache {
|
216 |
+
/// A function that creates a new `SharedCache` from a Cache implementation.
|
217 |
+
///
|
218 |
+
/// # Arguments
|
219 |
+
///
|
220 |
+
/// * `cache` - It takes the `Cache` enum variant as an argument with the prefered cache type.
|
221 |
+
///
|
222 |
+
/// Returns a newly constructed `SharedCache` struct.
|
223 |
pub fn new(cache: Cache) -> Self {
|
224 |
Self {
|
225 |
cache: Mutex::new(cache),
|
226 |
}
|
227 |
}
|
228 |
|
229 |
+
/// A getter function which retrieves the cached SearchResulsts from the internal cache.
|
230 |
+
///
|
231 |
+
/// # Arguments
|
232 |
+
///
|
233 |
+
/// * `url` - It takes the search url as an argument which will be used as the key to fetch the
|
234 |
+
/// cached results from the cache.
|
235 |
+
///
|
236 |
+
/// # Error
|
237 |
+
///
|
238 |
+
/// Returns a `SearchResults` struct containing the search results from the cache if nothing
|
239 |
+
/// goes wrong otherwise returns a `CacheError`.
|
240 |
+
pub async fn cached_json(&self, url: &str) -> Result<SearchResults, Report<CacheError>> {
|
241 |
let mut mut_cache = self.cache.lock().await;
|
242 |
mut_cache.cached_json(url).await
|
243 |
}
|
244 |
|
245 |
+
/// A setter function which caches the results by using the `url` as the key and
|
246 |
/// `SearchResults` as the value.
|
247 |
+
///
|
248 |
+
/// # Arguments
|
249 |
+
///
|
250 |
+
/// * `search_results` - It takes the `SearchResults` as an argument which are results that
|
251 |
+
/// needs to be cached.
|
252 |
+
/// * `url` - It takes the search url as an argument which will be used as the key for storing
|
253 |
+
/// results in the cache.
|
254 |
+
///
|
255 |
+
/// # Error
|
256 |
+
///
|
257 |
+
/// Returns an unit type if the results are cached succesfully otherwise returns a `CacheError`
|
258 |
+
/// on a failure.
|
259 |
pub async fn cache_results(
|
260 |
&self,
|
261 |
search_results: &SearchResults,
|
262 |
url: &str,
|
263 |
+
) -> Result<(), Report<CacheError>> {
|
264 |
let mut mut_cache = self.cache.lock().await;
|
265 |
mut_cache.cache_results(search_results, url).await
|
266 |
}
|
src/cache/error.rs
CHANGED
@@ -7,7 +7,7 @@ use redis::RedisError;
|
|
7 |
|
8 |
/// A custom error type used for handling redis async pool associated errors.
|
9 |
#[derive(Debug)]
|
10 |
-
pub enum
|
11 |
/// This variant handles all errors related to `RedisError`,
|
12 |
#[cfg(feature = "redis-cache")]
|
13 |
RedisError(RedisError),
|
@@ -20,31 +20,31 @@ pub enum PoolError {
|
|
20 |
MissingValue,
|
21 |
}
|
22 |
|
23 |
-
impl fmt::Display for
|
24 |
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
25 |
match self {
|
26 |
#[cfg(feature = "redis-cache")]
|
27 |
-
|
28 |
if let Some(detail) = redis_error.detail() {
|
29 |
write!(f, "{}", detail)
|
30 |
} else {
|
31 |
write!(f, "")
|
32 |
}
|
33 |
}
|
34 |
-
|
35 |
write!(
|
36 |
f,
|
37 |
"Error all connections from the pool dropped with connection error"
|
38 |
)
|
39 |
}
|
40 |
-
|
41 |
write!(f, "The value is missing from the cache")
|
42 |
}
|
43 |
-
|
44 |
write!(f, "Unable to serialize, deserialize from the cache")
|
45 |
}
|
46 |
}
|
47 |
}
|
48 |
}
|
49 |
|
50 |
-
impl error_stack::Context for
|
|
|
7 |
|
8 |
/// A custom error type used for handling redis async pool associated errors.
|
9 |
#[derive(Debug)]
|
10 |
+
pub enum CacheError {
|
11 |
/// This variant handles all errors related to `RedisError`,
|
12 |
#[cfg(feature = "redis-cache")]
|
13 |
RedisError(RedisError),
|
|
|
20 |
MissingValue,
|
21 |
}
|
22 |
|
23 |
+
impl fmt::Display for CacheError {
|
24 |
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
25 |
match self {
|
26 |
#[cfg(feature = "redis-cache")]
|
27 |
+
CacheError::RedisError(redis_error) => {
|
28 |
if let Some(detail) = redis_error.detail() {
|
29 |
write!(f, "{}", detail)
|
30 |
} else {
|
31 |
write!(f, "")
|
32 |
}
|
33 |
}
|
34 |
+
CacheError::PoolExhaustionWithConnectionDropError => {
|
35 |
write!(
|
36 |
f,
|
37 |
"Error all connections from the pool dropped with connection error"
|
38 |
)
|
39 |
}
|
40 |
+
CacheError::MissingValue => {
|
41 |
write!(f, "The value is missing from the cache")
|
42 |
}
|
43 |
+
CacheError::SerializationError => {
|
44 |
write!(f, "Unable to serialize, deserialize from the cache")
|
45 |
}
|
46 |
}
|
47 |
}
|
48 |
}
|
49 |
|
50 |
+
impl error_stack::Context for CacheError {}
|
src/cache/redis_cacher.rs
CHANGED
@@ -6,7 +6,7 @@ use futures::future::try_join_all;
|
|
6 |
use md5::compute;
|
7 |
use redis::{aio::ConnectionManager, AsyncCommands, Client, RedisError};
|
8 |
|
9 |
-
use super::error::
|
10 |
|
11 |
/// A named struct which stores the redis Connection url address to which the client will
|
12 |
/// connect to.
|
@@ -29,6 +29,11 @@ impl RedisCache {
|
|
29 |
/// * `redis_connection_url` - It takes the redis Connection url address.
|
30 |
/// * `pool_size` - It takes the size of the connection pool (in other words the number of
|
31 |
/// connections that should be stored in the pool).
|
|
|
|
|
|
|
|
|
|
|
32 |
pub async fn new(
|
33 |
redis_connection_url: &str,
|
34 |
pool_size: u8,
|
@@ -62,7 +67,12 @@ impl RedisCache {
|
|
62 |
/// # Arguments
|
63 |
///
|
64 |
/// * `url` - It takes an url as a string.
|
65 |
-
|
|
|
|
|
|
|
|
|
|
|
66 |
self.current_connection = Default::default();
|
67 |
let hashed_url_string: &str = &self.hash_url(url);
|
68 |
|
@@ -85,7 +95,7 @@ impl RedisCache {
|
|
85 |
self.current_connection += 1;
|
86 |
if self.current_connection == self.pool_size {
|
87 |
return Err(Report::new(
|
88 |
-
|
89 |
));
|
90 |
}
|
91 |
result = self.connection_pool[self.current_connection as usize]
|
@@ -93,7 +103,7 @@ impl RedisCache {
|
|
93 |
.await;
|
94 |
continue;
|
95 |
}
|
96 |
-
false => return Err(Report::new(
|
97 |
},
|
98 |
Ok(res) => return Ok(res),
|
99 |
}
|
@@ -108,11 +118,16 @@ impl RedisCache {
|
|
108 |
///
|
109 |
/// * `json_results` - It takes the json results string as an argument.
|
110 |
/// * `url` - It takes the url as a String.
|
|
|
|
|
|
|
|
|
|
|
111 |
pub async fn cache_results(
|
112 |
&mut self,
|
113 |
json_results: &str,
|
114 |
url: &str,
|
115 |
-
) -> Result<(), Report<
|
116 |
self.current_connection = Default::default();
|
117 |
let hashed_url_string: &str = &self.hash_url(url);
|
118 |
|
@@ -135,7 +150,7 @@ impl RedisCache {
|
|
135 |
self.current_connection += 1;
|
136 |
if self.current_connection == self.pool_size {
|
137 |
return Err(Report::new(
|
138 |
-
|
139 |
));
|
140 |
}
|
141 |
result = self.connection_pool[self.current_connection as usize]
|
@@ -143,7 +158,7 @@ impl RedisCache {
|
|
143 |
.await;
|
144 |
continue;
|
145 |
}
|
146 |
-
false => return Err(Report::new(
|
147 |
},
|
148 |
Ok(_) => return Ok(()),
|
149 |
}
|
|
|
6 |
use md5::compute;
|
7 |
use redis::{aio::ConnectionManager, AsyncCommands, Client, RedisError};
|
8 |
|
9 |
+
use super::error::CacheError;
|
10 |
|
11 |
/// A named struct which stores the redis Connection url address to which the client will
|
12 |
/// connect to.
|
|
|
29 |
/// * `redis_connection_url` - It takes the redis Connection url address.
|
30 |
/// * `pool_size` - It takes the size of the connection pool (in other words the number of
|
31 |
/// connections that should be stored in the pool).
|
32 |
+
///
|
33 |
+
/// # Error
|
34 |
+
///
|
35 |
+
/// Returns a newly constructed `RedisCache` struct on success otherwise returns a standard
|
36 |
+
/// error type.
|
37 |
pub async fn new(
|
38 |
redis_connection_url: &str,
|
39 |
pool_size: u8,
|
|
|
67 |
/// # Arguments
|
68 |
///
|
69 |
/// * `url` - It takes an url as a string.
|
70 |
+
///
|
71 |
+
/// # Error
|
72 |
+
///
|
73 |
+
/// Returns the results as a String from the cache on success otherwise returns a `CacheError`
|
74 |
+
/// on a failure.
|
75 |
+
pub async fn cached_json(&mut self, url: &str) -> Result<String, Report<CacheError>> {
|
76 |
self.current_connection = Default::default();
|
77 |
let hashed_url_string: &str = &self.hash_url(url);
|
78 |
|
|
|
95 |
self.current_connection += 1;
|
96 |
if self.current_connection == self.pool_size {
|
97 |
return Err(Report::new(
|
98 |
+
CacheError::PoolExhaustionWithConnectionDropError,
|
99 |
));
|
100 |
}
|
101 |
result = self.connection_pool[self.current_connection as usize]
|
|
|
103 |
.await;
|
104 |
continue;
|
105 |
}
|
106 |
+
false => return Err(Report::new(CacheError::RedisError(error))),
|
107 |
},
|
108 |
Ok(res) => return Ok(res),
|
109 |
}
|
|
|
118 |
///
|
119 |
/// * `json_results` - It takes the json results string as an argument.
|
120 |
/// * `url` - It takes the url as a String.
|
121 |
+
///
|
122 |
+
/// # Error
|
123 |
+
///
|
124 |
+
/// Returns an unit type if the results are cached succesfully otherwise returns a `CacheError`
|
125 |
+
/// on a failure.
|
126 |
pub async fn cache_results(
|
127 |
&mut self,
|
128 |
json_results: &str,
|
129 |
url: &str,
|
130 |
+
) -> Result<(), Report<CacheError>> {
|
131 |
self.current_connection = Default::default();
|
132 |
let hashed_url_string: &str = &self.hash_url(url);
|
133 |
|
|
|
150 |
self.current_connection += 1;
|
151 |
if self.current_connection == self.pool_size {
|
152 |
return Err(Report::new(
|
153 |
+
CacheError::PoolExhaustionWithConnectionDropError,
|
154 |
));
|
155 |
}
|
156 |
result = self.connection_pool[self.current_connection as usize]
|
|
|
158 |
.await;
|
159 |
continue;
|
160 |
}
|
161 |
+
false => return Err(Report::new(CacheError::RedisError(error))),
|
162 |
},
|
163 |
Ok(_) => return Ok(()),
|
164 |
}
|
src/config/parser.rs
CHANGED
@@ -17,9 +17,10 @@ pub struct Config {
|
|
17 |
pub binding_ip: String,
|
18 |
/// It stores the theming options for the website.
|
19 |
pub style: Style,
|
|
|
20 |
/// It stores the redis connection url address on which the redis
|
21 |
/// client should connect.
|
22 |
-
pub redis_url:
|
23 |
/// It stores the option to whether enable or disable production use.
|
24 |
pub aggregator: AggregatorConfig,
|
25 |
/// It stores the option to whether enable or disable logs.
|
@@ -99,7 +100,8 @@ impl Config {
|
|
99 |
globals.get::<_, String>("theme")?,
|
100 |
globals.get::<_, String>("colorscheme")?,
|
101 |
),
|
102 |
-
|
|
|
103 |
aggregator: AggregatorConfig {
|
104 |
random_delay: globals.get::<_, bool>("production_use")?,
|
105 |
},
|
|
|
17 |
pub binding_ip: String,
|
18 |
/// It stores the theming options for the website.
|
19 |
pub style: Style,
|
20 |
+
#[cfg(feature = "redis-cache")]
|
21 |
/// It stores the redis connection url address on which the redis
|
22 |
/// client should connect.
|
23 |
+
pub redis_url: String,
|
24 |
/// It stores the option to whether enable or disable production use.
|
25 |
pub aggregator: AggregatorConfig,
|
26 |
/// It stores the option to whether enable or disable logs.
|
|
|
100 |
globals.get::<_, String>("theme")?,
|
101 |
globals.get::<_, String>("colorscheme")?,
|
102 |
),
|
103 |
+
#[cfg(feature = "redis-cache")]
|
104 |
+
redis_url: globals.get::<_, String>("redis_url")?,
|
105 |
aggregator: AggregatorConfig {
|
106 |
random_delay: globals.get::<_, bool>("production_use")?,
|
107 |
},
|
src/models/server_models.rs
CHANGED
@@ -11,16 +11,19 @@ pub struct SearchParams {
|
|
11 |
/// It stores the search parameter `page` (or pageno in simple words)
|
12 |
/// of the search url.
|
13 |
pub page: Option<u32>,
|
|
|
|
|
|
|
14 |
}
|
15 |
|
16 |
/// A named struct which is used to deserialize the cookies fetched from the client side.
|
17 |
#[allow(dead_code)]
|
18 |
#[derive(Deserialize)]
|
19 |
-
pub struct Cookie {
|
20 |
/// It stores the theme name used in the website.
|
21 |
-
pub theme:
|
22 |
/// It stores the colorscheme name used for the website theme.
|
23 |
-
pub colorscheme:
|
24 |
/// It stores the user selected upstream search engines selected from the UI.
|
25 |
-
pub engines: Vec
|
26 |
}
|
|
|
11 |
/// It stores the search parameter `page` (or pageno in simple words)
|
12 |
/// of the search url.
|
13 |
pub page: Option<u32>,
|
14 |
+
/// It stores the search parameter `safesearch` (or safe search level in simple words) of the
|
15 |
+
/// search url.
|
16 |
+
pub safesearch: Option<u8>,
|
17 |
}
|
18 |
|
19 |
/// A named struct which is used to deserialize the cookies fetched from the client side.
|
20 |
#[allow(dead_code)]
|
21 |
#[derive(Deserialize)]
|
22 |
+
pub struct Cookie<'a> {
|
23 |
/// It stores the theme name used in the website.
|
24 |
+
pub theme: &'a str,
|
25 |
/// It stores the colorscheme name used for the website theme.
|
26 |
+
pub colorscheme: &'a str,
|
27 |
/// It stores the user selected upstream search engines selected from the UI.
|
28 |
+
pub engines: Vec<&'a str>,
|
29 |
}
|
src/server/routes/search.rs
CHANGED
@@ -4,43 +4,22 @@ use crate::{
|
|
4 |
cache::cacher::SharedCache,
|
5 |
config::parser::Config,
|
6 |
handler::paths::{file_path, FileType},
|
7 |
-
models::{
|
|
|
|
|
|
|
|
|
8 |
results::aggregator::aggregate,
|
9 |
};
|
10 |
use actix_web::{get, web, HttpRequest, HttpResponse};
|
11 |
use handlebars::Handlebars;
|
12 |
use regex::Regex;
|
13 |
-
use serde::Deserialize;
|
14 |
use std::{
|
15 |
-
fs::
|
16 |
io::{BufRead, BufReader, Read},
|
17 |
};
|
18 |
use tokio::join;
|
19 |
|
20 |
-
/// A named struct which deserializes all the user provided search parameters and stores them.
|
21 |
-
#[derive(Deserialize)]
|
22 |
-
pub struct SearchParams {
|
23 |
-
/// It stores the search parameter option `q` (or query in simple words)
|
24 |
-
/// of the search url.
|
25 |
-
q: Option<String>,
|
26 |
-
/// It stores the search parameter `page` (or pageno in simple words)
|
27 |
-
/// of the search url.
|
28 |
-
page: Option<u32>,
|
29 |
-
/// It stores the search parameter `safesearch` (or safe search level in simple words) of the
|
30 |
-
/// search url.
|
31 |
-
safesearch: Option<u8>,
|
32 |
-
}
|
33 |
-
|
34 |
-
/// Handles the route of index page or main page of the `websurfx` meta search engine website.
|
35 |
-
#[get("/")]
|
36 |
-
pub async fn index(
|
37 |
-
hbs: web::Data<Handlebars<'_>>,
|
38 |
-
config: web::Data<Config>,
|
39 |
-
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
40 |
-
let page_content: String = hbs.render("index", &config.style).unwrap();
|
41 |
-
Ok(HttpResponse::Ok().body(page_content))
|
42 |
-
}
|
43 |
-
|
44 |
/// Handles the route of any other accessed route/page which is not provided by the
|
45 |
/// website essentially the 404 error page.
|
46 |
pub async fn not_found(
|
@@ -54,18 +33,6 @@ pub async fn not_found(
|
|
54 |
.body(page_content))
|
55 |
}
|
56 |
|
57 |
-
/// A named struct which is used to deserialize the cookies fetched from the client side.
|
58 |
-
#[allow(dead_code)]
|
59 |
-
#[derive(Deserialize)]
|
60 |
-
struct Cookie<'a> {
|
61 |
-
/// It stores the theme name used in the website.
|
62 |
-
theme: &'a str,
|
63 |
-
/// It stores the colorscheme name used for the website theme.
|
64 |
-
colorscheme: &'a str,
|
65 |
-
/// It stores the user selected upstream search engines selected from the UI.
|
66 |
-
engines: Vec<&'a str>,
|
67 |
-
}
|
68 |
-
|
69 |
/// Handles the route of search page of the `websurfx` meta search engine website and it takes
|
70 |
/// two search url parameters `q` and `page` where `page` parameter is optional.
|
71 |
///
|
@@ -264,6 +231,16 @@ async fn results(
|
|
264 |
|
265 |
/// A helper function which checks whether the search query contains any keywords which should be
|
266 |
/// disallowed/allowed based on the regex based rules present in the blocklist and allowlist files.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
267 |
fn is_match_from_filter_list(
|
268 |
file_path: &str,
|
269 |
query: &str,
|
@@ -279,33 +256,3 @@ fn is_match_from_filter_list(
|
|
279 |
}
|
280 |
Ok(flag)
|
281 |
}
|
282 |
-
|
283 |
-
/// Handles the route of robots.txt page of the `websurfx` meta search engine website.
|
284 |
-
#[get("/robots.txt")]
|
285 |
-
pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
286 |
-
let page_content: String =
|
287 |
-
read_to_string(format!("{}/robots.txt", file_path(FileType::Theme)?))?;
|
288 |
-
Ok(HttpResponse::Ok()
|
289 |
-
.content_type("text/plain; charset=ascii")
|
290 |
-
.body(page_content))
|
291 |
-
}
|
292 |
-
|
293 |
-
/// Handles the route of about page of the `websurfx` meta search engine website.
|
294 |
-
#[get("/about")]
|
295 |
-
pub async fn about(
|
296 |
-
hbs: web::Data<Handlebars<'_>>,
|
297 |
-
config: web::Data<Config>,
|
298 |
-
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
299 |
-
let page_content: String = hbs.render("about", &config.style)?;
|
300 |
-
Ok(HttpResponse::Ok().body(page_content))
|
301 |
-
}
|
302 |
-
|
303 |
-
/// Handles the route of settings page of the `websurfx` meta search engine website.
|
304 |
-
#[get("/settings")]
|
305 |
-
pub async fn settings(
|
306 |
-
hbs: web::Data<Handlebars<'_>>,
|
307 |
-
config: web::Data<Config>,
|
308 |
-
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
309 |
-
let page_content: String = hbs.render("settings", &config.style)?;
|
310 |
-
Ok(HttpResponse::Ok().body(page_content))
|
311 |
-
}
|
|
|
4 |
cache::cacher::SharedCache,
|
5 |
config::parser::Config,
|
6 |
handler::paths::{file_path, FileType},
|
7 |
+
models::{
|
8 |
+
aggregation_models::SearchResults,
|
9 |
+
engine_models::EngineHandler,
|
10 |
+
server_models::{Cookie, SearchParams},
|
11 |
+
},
|
12 |
results::aggregator::aggregate,
|
13 |
};
|
14 |
use actix_web::{get, web, HttpRequest, HttpResponse};
|
15 |
use handlebars::Handlebars;
|
16 |
use regex::Regex;
|
|
|
17 |
use std::{
|
18 |
+
fs::File,
|
19 |
io::{BufRead, BufReader, Read},
|
20 |
};
|
21 |
use tokio::join;
|
22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
/// Handles the route of any other accessed route/page which is not provided by the
|
24 |
/// website essentially the 404 error page.
|
25 |
pub async fn not_found(
|
|
|
33 |
.body(page_content))
|
34 |
}
|
35 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
/// Handles the route of search page of the `websurfx` meta search engine website and it takes
|
37 |
/// two search url parameters `q` and `page` where `page` parameter is optional.
|
38 |
///
|
|
|
231 |
|
232 |
/// A helper function which checks whether the search query contains any keywords which should be
|
233 |
/// disallowed/allowed based on the regex based rules present in the blocklist and allowlist files.
|
234 |
+
///
|
235 |
+
/// # Arguments
|
236 |
+
///
|
237 |
+
/// * `file_path` - It takes the file path of the list as the argument.
|
238 |
+
/// * `query` - It takes the search query to be checked against the list as an argument.
|
239 |
+
///
|
240 |
+
/// # Error
|
241 |
+
///
|
242 |
+
/// Returns a bool indicating whether the results were found in the list or not on success
|
243 |
+
/// otherwise returns a standard error type on a failure.
|
244 |
fn is_match_from_filter_list(
|
245 |
file_path: &str,
|
246 |
query: &str,
|
|
|
256 |
}
|
257 |
Ok(flag)
|
258 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tests/index.rs
CHANGED
@@ -12,6 +12,7 @@ fn spawn_app() -> String {
|
|
12 |
let server = run(
|
13 |
listener,
|
14 |
config,
|
|
|
15 |
websurfx::cache::cacher::Cache::new_in_memory(),
|
16 |
)
|
17 |
.expect("Failed to bind address");
|
|
|
12 |
let server = run(
|
13 |
listener,
|
14 |
config,
|
15 |
+
#[cfg(all(feature = "memory-cache", not(feature = "redis-cache")))]
|
16 |
websurfx::cache::cacher::Cache::new_in_memory(),
|
17 |
)
|
18 |
.expect("Failed to bind address");
|