Zsombor Gegesy
commited on
Commit
•
76795c4
1
Parent(s):
320f5f4
Make the cache types compile time feature flags too, and make it more configurable!
Browse files- Cargo.toml +5 -2
- src/bin/websurfx.rs +3 -11
- src/cache/cacher.rs +37 -2
- src/cache/error.rs +3 -0
- src/cache/mod.rs +1 -0
- src/config/parser.rs +6 -0
- websurfx/config.lua +1 -0
Cargo.toml
CHANGED
@@ -20,7 +20,7 @@ fake-useragent = {version="0.1.3"}
|
|
20 |
env_logger = {version="0.10.0"}
|
21 |
log = {version="0.4.20"}
|
22 |
mlua = {version="0.8.10", features=["luajit"]}
|
23 |
-
redis = {version="0.23.3", features=["tokio-comp","connection-manager"]}
|
24 |
md5 = {version="0.7.0"}
|
25 |
rand={version="0.8.5"}
|
26 |
once_cell = {version="1.18.0"}
|
@@ -33,7 +33,7 @@ dhat = {version="0.3.2", optional = true}
|
|
33 |
mimalloc = { version = "0.1.38", default-features = false }
|
34 |
async-once-cell = {version="0.5.3"}
|
35 |
actix-governor = {version="0.4.1"}
|
36 |
-
mini-moka = "0.10"
|
37 |
|
38 |
[dev-dependencies]
|
39 |
rusty-hook = "^0.11.2"
|
@@ -67,4 +67,7 @@ rpath = false
|
|
67 |
strip = "debuginfo"
|
68 |
|
69 |
[features]
|
|
|
70 |
dhat-heap = ["dep:dhat"]
|
|
|
|
|
|
20 |
env_logger = {version="0.10.0"}
|
21 |
log = {version="0.4.20"}
|
22 |
mlua = {version="0.8.10", features=["luajit"]}
|
23 |
+
redis = {version="0.23.3", features=["tokio-comp","connection-manager"], optional = true}
|
24 |
md5 = {version="0.7.0"}
|
25 |
rand={version="0.8.5"}
|
26 |
once_cell = {version="1.18.0"}
|
|
|
33 |
mimalloc = { version = "0.1.38", default-features = false }
|
34 |
async-once-cell = {version="0.5.3"}
|
35 |
actix-governor = {version="0.4.1"}
|
36 |
+
mini-moka = { version="0.10", optional = true}
|
37 |
|
38 |
[dev-dependencies]
|
39 |
rusty-hook = "^0.11.2"
|
|
|
67 |
strip = "debuginfo"
|
68 |
|
69 |
[features]
|
70 |
+
default = ["in_memory_cache", "redis"]
|
71 |
dhat-heap = ["dep:dhat"]
|
72 |
+
in_memory_cache = ["dep:mini-moka"]
|
73 |
+
redis = ["dep:redis"]
|
src/bin/websurfx.rs
CHANGED
@@ -5,9 +5,7 @@
|
|
5 |
|
6 |
use mimalloc::MiMalloc;
|
7 |
use std::net::TcpListener;
|
8 |
-
use websurfx::{
|
9 |
-
cache::cacher::Cache, cache::redis_cacher::RedisCache, config::parser::Config, run,
|
10 |
-
};
|
11 |
|
12 |
/// A dhat heap memory profiler
|
13 |
#[cfg(feature = "dhat-heap")]
|
@@ -32,14 +30,8 @@ async fn main() -> std::io::Result<()> {
|
|
32 |
|
33 |
// Initialize the parsed config file.
|
34 |
let config = Config::parse(false).unwrap();
|
35 |
-
|
36 |
-
|
37 |
-
RedisCache::new(url, 5)
|
38 |
-
.await
|
39 |
-
.expect("Redis cache configured"),
|
40 |
-
),
|
41 |
-
None => Cache::new_in_memory(),
|
42 |
-
};
|
43 |
|
44 |
log::info!(
|
45 |
"started server on port {} and IP {}",
|
|
|
5 |
|
6 |
use mimalloc::MiMalloc;
|
7 |
use std::net::TcpListener;
|
8 |
+
use websurfx::{cache::cacher::Cache, config::parser::Config, run};
|
|
|
|
|
9 |
|
10 |
/// A dhat heap memory profiler
|
11 |
#[cfg(feature = "dhat-heap")]
|
|
|
30 |
|
31 |
// Initialize the parsed config file.
|
32 |
let config = Config::parse(false).unwrap();
|
33 |
+
|
34 |
+
let cache = Cache::build(&config).await;
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
|
36 |
log::info!(
|
37 |
"started server on port {} and IP {}",
|
src/cache/cacher.rs
CHANGED
@@ -2,30 +2,59 @@
|
|
2 |
//! from the upstream search engines in a json format.
|
3 |
|
4 |
use error_stack::Report;
|
|
|
5 |
use mini_moka::sync::Cache as MokaCache;
|
6 |
use std::time::Duration;
|
7 |
use tokio::sync::Mutex;
|
8 |
|
9 |
-
use crate::results::aggregation_models::SearchResults;
|
10 |
|
11 |
-
use super::
|
|
|
|
|
12 |
|
13 |
/// Different implementations for caching, currently it is possible to cache in-memory or in Redis.
|
14 |
#[derive(Clone)]
|
15 |
pub enum Cache {
|
|
|
|
|
|
|
16 |
/// Encapsulates the Redis based cache
|
17 |
Redis(RedisCache),
|
|
|
18 |
/// Contains the in-memory cache.
|
19 |
InMemory(MokaCache<String, SearchResults>),
|
20 |
}
|
21 |
|
22 |
impl Cache {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
/// Creates a new cache, which wraps the given RedisCache.
|
|
|
24 |
pub fn new(redis_cache: RedisCache) -> Self {
|
25 |
Cache::Redis(redis_cache)
|
26 |
}
|
27 |
|
28 |
/// Creates an in-memory cache
|
|
|
29 |
pub fn new_in_memory() -> Self {
|
30 |
let cache = MokaCache::builder()
|
31 |
.max_capacity(1000)
|
@@ -41,11 +70,14 @@ impl Cache {
|
|
41 |
/// * `url` - It takes an url as a string.
|
42 |
pub async fn cached_json(&mut self, url: &str) -> Result<SearchResults, Report<PoolError>> {
|
43 |
match self {
|
|
|
|
|
44 |
Cache::Redis(redis_cache) => {
|
45 |
let json = redis_cache.cached_json(url).await?;
|
46 |
Ok(serde_json::from_str::<SearchResults>(&json)
|
47 |
.map_err(|_| PoolError::SerializationError)?)
|
48 |
}
|
|
|
49 |
Cache::InMemory(in_memory) => match in_memory.get(&url.to_string()) {
|
50 |
Some(res) => Ok(res),
|
51 |
None => Err(Report::new(PoolError::MissingValue)),
|
@@ -66,11 +98,14 @@ impl Cache {
|
|
66 |
url: &str,
|
67 |
) -> Result<(), Report<PoolError>> {
|
68 |
match self {
|
|
|
|
|
69 |
Cache::Redis(redis_cache) => {
|
70 |
let json = serde_json::to_string(search_results)
|
71 |
.map_err(|_| PoolError::SerializationError)?;
|
72 |
redis_cache.cache_results(&json, url).await
|
73 |
}
|
|
|
74 |
Cache::InMemory(cache) => {
|
75 |
cache.insert(url.to_string(), search_results.clone());
|
76 |
Ok(())
|
|
|
2 |
//! from the upstream search engines in a json format.
|
3 |
|
4 |
use error_stack::Report;
|
5 |
+
#[cfg(feature = "in_memory_cache")]
|
6 |
use mini_moka::sync::Cache as MokaCache;
|
7 |
use std::time::Duration;
|
8 |
use tokio::sync::Mutex;
|
9 |
|
10 |
+
use crate::{config::parser::Config, results::aggregation_models::SearchResults};
|
11 |
|
12 |
+
use super::error::PoolError;
|
13 |
+
#[cfg(feature = "redis")]
|
14 |
+
use super::redis_cacher::RedisCache;
|
15 |
|
16 |
/// Different implementations for caching, currently it is possible to cache in-memory or in Redis.
|
17 |
#[derive(Clone)]
|
18 |
pub enum Cache {
|
19 |
+
/// Caching is disabled
|
20 |
+
Disabled,
|
21 |
+
#[cfg(feature = "redis")]
|
22 |
/// Encapsulates the Redis based cache
|
23 |
Redis(RedisCache),
|
24 |
+
#[cfg(feature = "in_memory_cache")]
|
25 |
/// Contains the in-memory cache.
|
26 |
InMemory(MokaCache<String, SearchResults>),
|
27 |
}
|
28 |
|
29 |
impl Cache {
|
30 |
+
/// Builds the cache from the given configuration.
|
31 |
+
pub async fn build(config: &Config) -> Self {
|
32 |
+
#[cfg(feature = "redis")]
|
33 |
+
if let Some(url) = &config.redis_url {
|
34 |
+
log::info!("Using Redis running at {} for caching", &url);
|
35 |
+
return Cache::new(
|
36 |
+
RedisCache::new(url, 5)
|
37 |
+
.await
|
38 |
+
.expect("Redis cache configured"),
|
39 |
+
);
|
40 |
+
}
|
41 |
+
#[cfg(feature = "in_memory_cache")]
|
42 |
+
if config.in_memory_cache {
|
43 |
+
log::info!("Using an in-memory cache");
|
44 |
+
return Cache::new_in_memory();
|
45 |
+
}
|
46 |
+
log::info!("Caching is disabled");
|
47 |
+
Cache::Disabled
|
48 |
+
}
|
49 |
+
|
50 |
/// Creates a new cache, which wraps the given RedisCache.
|
51 |
+
#[cfg(feature = "redis")]
|
52 |
pub fn new(redis_cache: RedisCache) -> Self {
|
53 |
Cache::Redis(redis_cache)
|
54 |
}
|
55 |
|
56 |
/// Creates an in-memory cache
|
57 |
+
#[cfg(feature = "in_memory_cache")]
|
58 |
pub fn new_in_memory() -> Self {
|
59 |
let cache = MokaCache::builder()
|
60 |
.max_capacity(1000)
|
|
|
70 |
/// * `url` - It takes an url as a string.
|
71 |
pub async fn cached_json(&mut self, url: &str) -> Result<SearchResults, Report<PoolError>> {
|
72 |
match self {
|
73 |
+
Cache::Disabled => Err(Report::new(PoolError::MissingValue)),
|
74 |
+
#[cfg(feature = "redis")]
|
75 |
Cache::Redis(redis_cache) => {
|
76 |
let json = redis_cache.cached_json(url).await?;
|
77 |
Ok(serde_json::from_str::<SearchResults>(&json)
|
78 |
.map_err(|_| PoolError::SerializationError)?)
|
79 |
}
|
80 |
+
#[cfg(feature = "in_memory_cache")]
|
81 |
Cache::InMemory(in_memory) => match in_memory.get(&url.to_string()) {
|
82 |
Some(res) => Ok(res),
|
83 |
None => Err(Report::new(PoolError::MissingValue)),
|
|
|
98 |
url: &str,
|
99 |
) -> Result<(), Report<PoolError>> {
|
100 |
match self {
|
101 |
+
Cache::Disabled => Ok(()),
|
102 |
+
#[cfg(feature = "redis")]
|
103 |
Cache::Redis(redis_cache) => {
|
104 |
let json = serde_json::to_string(search_results)
|
105 |
.map_err(|_| PoolError::SerializationError)?;
|
106 |
redis_cache.cache_results(&json, url).await
|
107 |
}
|
108 |
+
#[cfg(feature = "in_memory_cache")]
|
109 |
Cache::InMemory(cache) => {
|
110 |
cache.insert(url.to_string(), search_results.clone());
|
111 |
Ok(())
|
src/cache/error.rs
CHANGED
@@ -2,12 +2,14 @@
|
|
2 |
//! the redis server using an async connection pool.
|
3 |
use std::fmt;
|
4 |
|
|
|
5 |
use redis::RedisError;
|
6 |
|
7 |
/// A custom error type used for handling redis async pool associated errors.
|
8 |
#[derive(Debug)]
|
9 |
pub enum PoolError {
|
10 |
/// This variant handles all errors related to `RedisError`,
|
|
|
11 |
RedisError(RedisError),
|
12 |
/// This variant handles the errors which occurs when all the connections
|
13 |
/// in the connection pool return a connection dropped redis error.
|
@@ -19,6 +21,7 @@ pub enum PoolError {
|
|
19 |
impl fmt::Display for PoolError {
|
20 |
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
21 |
match self {
|
|
|
22 |
PoolError::RedisError(redis_error) => {
|
23 |
if let Some(detail) = redis_error.detail() {
|
24 |
write!(f, "{}", detail)
|
|
|
2 |
//! the redis server using an async connection pool.
|
3 |
use std::fmt;
|
4 |
|
5 |
+
#[cfg(feature = "redis")]
|
6 |
use redis::RedisError;
|
7 |
|
8 |
/// A custom error type used for handling redis async pool associated errors.
|
9 |
#[derive(Debug)]
|
10 |
pub enum PoolError {
|
11 |
/// This variant handles all errors related to `RedisError`,
|
12 |
+
#[cfg(feature = "redis")]
|
13 |
RedisError(RedisError),
|
14 |
/// This variant handles the errors which occurs when all the connections
|
15 |
/// in the connection pool return a connection dropped redis error.
|
|
|
21 |
impl fmt::Display for PoolError {
|
22 |
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
23 |
match self {
|
24 |
+
#[cfg(feature = "redis")]
|
25 |
PoolError::RedisError(redis_error) => {
|
26 |
if let Some(detail) = redis_error.detail() {
|
27 |
write!(f, "{}", detail)
|
src/cache/mod.rs
CHANGED
@@ -3,4 +3,5 @@
|
|
3 |
|
4 |
pub mod cacher;
|
5 |
pub mod error;
|
|
|
6 |
pub mod redis_cacher;
|
|
|
3 |
|
4 |
pub mod cacher;
|
5 |
pub mod error;
|
6 |
+
#[cfg(feature = "redis")]
|
7 |
pub mod redis_cacher;
|
src/config/parser.rs
CHANGED
@@ -20,6 +20,8 @@ pub struct Config {
|
|
20 |
/// It stores the redis connection url address on which the redis
|
21 |
/// client should connect.
|
22 |
pub redis_url: Option<String>,
|
|
|
|
|
23 |
/// It stores the option to whether enable or disable production use.
|
24 |
pub aggregator: AggregatorConfig,
|
25 |
/// It stores the option to whether enable or disable logs.
|
@@ -100,6 +102,10 @@ impl Config {
|
|
100 |
globals.get::<_, String>("colorscheme")?,
|
101 |
),
|
102 |
redis_url: globals.get::<_, String>("redis_url").ok(),
|
|
|
|
|
|
|
|
|
103 |
aggregator: AggregatorConfig {
|
104 |
random_delay: globals.get::<_, bool>("production_use")?,
|
105 |
},
|
|
|
20 |
/// It stores the redis connection url address on which the redis
|
21 |
/// client should connect.
|
22 |
pub redis_url: Option<String>,
|
23 |
+
/// enable/disable the in-memory cache. Only checked, when no redis_url is provided.
|
24 |
+
pub in_memory_cache: bool,
|
25 |
/// It stores the option to whether enable or disable production use.
|
26 |
pub aggregator: AggregatorConfig,
|
27 |
/// It stores the option to whether enable or disable logs.
|
|
|
102 |
globals.get::<_, String>("colorscheme")?,
|
103 |
),
|
104 |
redis_url: globals.get::<_, String>("redis_url").ok(),
|
105 |
+
in_memory_cache: globals
|
106 |
+
.get::<_, bool>("in_memory_cache")
|
107 |
+
.ok()
|
108 |
+
.unwrap_or(false),
|
109 |
aggregator: AggregatorConfig {
|
110 |
random_delay: globals.get::<_, bool>("production_use")?,
|
111 |
},
|
websurfx/config.lua
CHANGED
@@ -47,6 +47,7 @@ theme = "simple" -- the theme name which should be used for the website
|
|
47 |
|
48 |
-- ### Caching ###
|
49 |
redis_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
|
|
|
50 |
|
51 |
-- ### Search Engines ###
|
52 |
upstream_search_engines = {
|
|
|
47 |
|
48 |
-- ### Caching ###
|
49 |
redis_url = "redis://127.0.0.1:8082" -- redis connection url address on which the client should connect on.
|
50 |
+
in_memory_cache = true
|
51 |
|
52 |
-- ### Search Engines ###
|
53 |
upstream_search_engines = {
|