Merge pull request 'feat: add redis caching' (#11) from wip/redis-caching into main
All checks were successful
ci/woodpecker/push/build-and-publish Pipeline was successful
All checks were successful
ci/woodpecker/push/build-and-publish Pipeline was successful
Reviewed-on: #11
This commit is contained in:
commit
c4dafa1f2c
9 changed files with 112 additions and 4 deletions
|
@ -22,6 +22,7 @@ serde = { version = "1.0", features = ["derive"] }
|
|||
serde_json = "1.0"
|
||||
simple_logger = "5.0.0"
|
||||
sqlx = { version = "0.8", features = ["runtime-tokio", "tls-native-tls", "postgres"] }
|
||||
redis = { version = "0.30", features= ["tokio-comp"] }
|
||||
toml = "0.8"
|
||||
url = { version = "2.5", features = ["serde"] }
|
||||
uuid = { version = "1.16", features = ["serde", "v7"] }
|
||||
|
|
|
@ -18,6 +18,12 @@ RUN useradd --create-home --home-dir /gorb gorb
|
|||
|
||||
USER gorb
|
||||
|
||||
ENV DATABASE_USERNAME="gorb" DATABASE_PASSWORD="gorb" DATABASE="gorb" DATABASE_HOST="localhost" DATABASE_PORT="5432"
|
||||
ENV DATABASE_USERNAME="gorb" \
|
||||
DATABASE_PASSWORD="gorb" \
|
||||
DATABASE="gorb" \
|
||||
DATABASE_HOST="database" \
|
||||
DATABASE_PORT="5432" \
|
||||
CACHE_DB_HOST="valkey" \
|
||||
CACHE_DB_PORT="6379"
|
||||
|
||||
ENTRYPOINT ["/usr/bin/entrypoint.sh"]
|
||||
|
|
|
@ -34,3 +34,8 @@ services:
|
|||
- POSTGRES_USER=gorb
|
||||
- POSTGRES_PASSWORD=gorb
|
||||
- POSTGRES_DB=gorb
|
||||
valkey:
|
||||
image: valkey/valkey
|
||||
restart: always
|
||||
networks:
|
||||
- gorb
|
||||
|
|
|
@ -32,3 +32,8 @@ services:
|
|||
- POSTGRES_USER=gorb
|
||||
- POSTGRES_PASSWORD=gorb
|
||||
- POSTGRES_DB=gorb
|
||||
valkey:
|
||||
image: valkey/valkey
|
||||
restart: always
|
||||
networks:
|
||||
- gorb
|
||||
|
|
|
@ -16,6 +16,10 @@ password = "${DATABASE_PASSWORD}"
|
|||
database = "${DATABASE}"
|
||||
host = "${DATABASE_HOST}"
|
||||
port = ${DATABASE_PORT}
|
||||
|
||||
[cache_database]
|
||||
host = "${CACHE_DB_HOST}"
|
||||
port = ${CACHE_DB_PORT}
|
||||
EOF
|
||||
fi
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ use uuid::Uuid;
|
|||
|
||||
use crate::{Data, api::v1::auth::check_access_token, utils::get_auth_header};
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[derive(Serialize, Clone)]
|
||||
struct Response {
|
||||
uuid: String,
|
||||
username: String,
|
||||
|
@ -34,6 +34,12 @@ pub async fn res(
|
|||
return Ok(error);
|
||||
}
|
||||
|
||||
let cache_result = data.get_cache_key(uuid.to_string()).await;
|
||||
|
||||
if let Ok(cache_hit) = cache_result {
|
||||
return Ok(HttpResponse::Ok().content_type("application/json").body(cache_hit))
|
||||
}
|
||||
|
||||
let row = sqlx::query_as(&format!(
|
||||
"SELECT username, display_name FROM users WHERE uuid = '{}'",
|
||||
uuid
|
||||
|
@ -48,9 +54,18 @@ pub async fn res(
|
|||
|
||||
let (username, display_name): (String, Option<String>) = row.unwrap();
|
||||
|
||||
Ok(HttpResponse::Ok().json(Response {
|
||||
let user = Response {
|
||||
uuid: uuid.to_string(),
|
||||
username,
|
||||
display_name: display_name.unwrap_or_default(),
|
||||
}))
|
||||
};
|
||||
|
||||
let cache_result = data.set_cache_key(uuid.to_string(), user.clone(), 1800).await;
|
||||
|
||||
if let Err(error) = cache_result {
|
||||
error!("{}", error);
|
||||
return Ok(HttpResponse::InternalServerError().finish());
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(user))
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ use tokio::fs::read_to_string;
|
|||
#[derive(Debug, Deserialize)]
|
||||
pub struct ConfigBuilder {
|
||||
database: Database,
|
||||
cache_database: CacheDatabase,
|
||||
web: Option<WebBuilder>,
|
||||
}
|
||||
|
||||
|
@ -19,6 +20,15 @@ pub struct Database {
|
|||
port: u16,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct CacheDatabase {
|
||||
username: Option<String>,
|
||||
password: Option<String>,
|
||||
host: String,
|
||||
database: Option<String>,
|
||||
port: u16,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct WebBuilder {
|
||||
url: Option<String>,
|
||||
|
@ -51,6 +61,7 @@ impl ConfigBuilder {
|
|||
|
||||
Config {
|
||||
database: self.database,
|
||||
cache_database: self.cache_database,
|
||||
web,
|
||||
}
|
||||
}
|
||||
|
@ -59,6 +70,7 @@ impl ConfigBuilder {
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct Config {
|
||||
pub database: Database,
|
||||
pub cache_database: CacheDatabase,
|
||||
pub web: Web,
|
||||
}
|
||||
|
||||
|
@ -78,3 +90,33 @@ impl Database {
|
|||
.port(self.port)
|
||||
}
|
||||
}
|
||||
|
||||
impl CacheDatabase {
|
||||
pub fn url(&self) -> String {
|
||||
let mut url = String::from("redis://");
|
||||
|
||||
if let Some(username) = &self.username {
|
||||
url += username;
|
||||
}
|
||||
|
||||
if let Some(password) = &self.password {
|
||||
url += ":";
|
||||
url += password;
|
||||
}
|
||||
|
||||
if self.username.is_some() || self.password.is_some() {
|
||||
url += "@";
|
||||
}
|
||||
|
||||
url += &self.host;
|
||||
url += ":";
|
||||
url += &self.port.to_string();
|
||||
|
||||
if let Some(database) = &self.database {
|
||||
url += "/";
|
||||
url += database;
|
||||
}
|
||||
|
||||
url
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ struct Args {
|
|||
#[derive(Clone)]
|
||||
struct Data {
|
||||
pub pool: Pool<Postgres>,
|
||||
pub cache_pool: redis::Client,
|
||||
pub _config: Config,
|
||||
pub argon2: Argon2<'static>,
|
||||
pub start_time: SystemTime,
|
||||
|
@ -44,6 +45,8 @@ async fn main() -> Result<(), Error> {
|
|||
|
||||
let pool = PgPool::connect_with(config.database.connect_options()).await?;
|
||||
|
||||
let cache_pool = redis::Client::open(config.cache_database.url())?;
|
||||
|
||||
/*
|
||||
TODO: Figure out if a table should be used here and if not then what.
|
||||
Also figure out if these should be different types from what they currently are and if we should add more "constraints"
|
||||
|
@ -81,6 +84,7 @@ async fn main() -> Result<(), Error> {
|
|||
|
||||
let data = Data {
|
||||
pool,
|
||||
cache_pool,
|
||||
_config: config,
|
||||
// TODO: Possibly implement "pepper" into this (thinking it could generate one if it doesnt exist and store it on disk)
|
||||
argon2: Argon2::default(),
|
||||
|
|
26
src/utils.rs
26
src/utils.rs
|
@ -1,6 +1,10 @@
|
|||
use actix_web::{cookie::{time::Duration, Cookie, SameSite}, http::header::HeaderMap, HttpResponse};
|
||||
use getrandom::fill;
|
||||
use hex::encode;
|
||||
use redis::RedisError;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::Data;
|
||||
|
||||
pub fn get_auth_header(headers: &HeaderMap) -> Result<&str, HttpResponse> {
|
||||
let auth_token = headers.get(actix_web::http::header::AUTHORIZATION);
|
||||
|
@ -46,3 +50,25 @@ pub fn generate_refresh_token() -> Result<String, getrandom::Error> {
|
|||
Ok(encode(buf))
|
||||
}
|
||||
|
||||
impl Data {
|
||||
pub async fn set_cache_key(&self, key: String, value: impl Serialize, expire: u32) -> Result<(), RedisError> {
|
||||
let mut conn = self.cache_pool.get_multiplexed_tokio_connection().await?;
|
||||
|
||||
let key_encoded = encode(key);
|
||||
|
||||
let value_json = serde_json::to_string(&value).unwrap();
|
||||
|
||||
redis::cmd("SET",).arg(&[key_encoded.clone(), value_json]).exec_async(&mut conn).await?;
|
||||
|
||||
redis::cmd("EXPIRE").arg(&[key_encoded, expire.to_string()]).exec_async(&mut conn).await
|
||||
}
|
||||
|
||||
pub async fn get_cache_key(&self, key: String) -> Result<String, RedisError> {
|
||||
let mut conn = self.cache_pool.get_multiplexed_tokio_connection().await?;
|
||||
|
||||
let key_encoded = encode(key);
|
||||
|
||||
redis::cmd("GET").arg(key_encoded).query_async(&mut conn).await
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue