Compare commits

...

2 commits

Author SHA1 Message Date
529ccd1b51 feat: use caching on user lookup
this needs to be deleted/expired on user update, we'll implement this when we get ways to "update" things like channels, servers and users
2025-05-07 20:33:23 +02:00
7ecc8c4270 feat: add redis caching 2025-05-07 20:32:32 +02:00
5 changed files with 95 additions and 4 deletions

View file

@ -22,6 +22,7 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
simple_logger = "5.0.0" simple_logger = "5.0.0"
sqlx = { version = "0.8", features = ["runtime-tokio", "tls-native-tls", "postgres"] } sqlx = { version = "0.8", features = ["runtime-tokio", "tls-native-tls", "postgres"] }
redis = { version = "0.30", features= ["tokio-comp"] }
toml = "0.8" toml = "0.8"
url = { version = "2.5", features = ["serde"] } url = { version = "2.5", features = ["serde"] }
uuid = { version = "1.16", features = ["serde", "v7"] } uuid = { version = "1.16", features = ["serde", "v7"] }

View file

@ -1,11 +1,12 @@
use actix_web::{Error, HttpRequest, HttpResponse, get, web}; use actix_web::{Error, HttpRequest, HttpResponse, get, web};
use log::error; use log::error;
use serde::Serialize; use serde::Serialize;
use tokio::sync::Mutex;
use uuid::Uuid; use uuid::Uuid;
use crate::{Data, api::v1::auth::check_access_token, utils::get_auth_header}; use crate::{Data, api::v1::auth::check_access_token, utils::get_auth_header};
#[derive(Serialize)] #[derive(Serialize, Clone)]
struct Response { struct Response {
uuid: String, uuid: String,
username: String, username: String,
@ -34,6 +35,12 @@ pub async fn res(
return Ok(error); return Ok(error);
} }
let cache_result = data.get_cache_key(uuid.to_string()).await;
if let Ok(cache_hit) = cache_result {
return Ok(HttpResponse::Ok().json(cache_hit))
}
let row = sqlx::query_as(&format!( let row = sqlx::query_as(&format!(
"SELECT username, display_name FROM users WHERE uuid = '{}'", "SELECT username, display_name FROM users WHERE uuid = '{}'",
uuid uuid
@ -48,9 +55,18 @@ pub async fn res(
let (username, display_name): (String, Option<String>) = row.unwrap(); let (username, display_name): (String, Option<String>) = row.unwrap();
Ok(HttpResponse::Ok().json(Response { let user = Response {
uuid: uuid.to_string(), uuid: uuid.to_string(),
username, username,
display_name: display_name.unwrap_or_default(), display_name: display_name.unwrap_or_default(),
})) };
let cache_result = data.set_cache_key(uuid.to_string(), user.clone(), 1800).await;
if let Err(error) = cache_result {
error!("{}", error);
return Ok(HttpResponse::InternalServerError().finish());
}
Ok(HttpResponse::Ok().json(user))
} }

View file

@ -7,6 +7,7 @@ use tokio::fs::read_to_string;
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct ConfigBuilder { pub struct ConfigBuilder {
database: Database, database: Database,
cache_database: CacheDatabase,
web: Option<WebBuilder>, web: Option<WebBuilder>,
} }
@ -19,6 +20,15 @@ pub struct Database {
port: u16, port: u16,
} }
#[derive(Debug, Deserialize, Clone)]
pub struct CacheDatabase {
username: Option<String>,
password: Option<String>,
host: String,
database: Option<String>,
port: u16,
}
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct WebBuilder { struct WebBuilder {
url: Option<String>, url: Option<String>,
@ -51,6 +61,7 @@ impl ConfigBuilder {
Config { Config {
database: self.database, database: self.database,
cache_database: self.cache_database,
web, web,
} }
} }
@ -59,6 +70,7 @@ impl ConfigBuilder {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Config { pub struct Config {
pub database: Database, pub database: Database,
pub cache_database: CacheDatabase,
pub web: Web, pub web: Web,
} }
@ -78,3 +90,33 @@ impl Database {
.port(self.port) .port(self.port)
} }
} }
impl CacheDatabase {
pub fn url(&self) -> String {
let mut url = String::from("redis://");
if let Some(username) = &self.username {
url += username;
}
if let Some(password) = &self.password {
url += ":";
url += password;
}
if self.username.is_some() || self.password.is_some() {
url += "@";
}
url += &self.host;
url += ":";
url += &self.port.to_string();
if let Some(database) = &self.database {
url += "/";
url += database;
}
url
}
}

View file

@ -2,9 +2,10 @@ use actix_cors::Cors;
use actix_web::{App, HttpServer, web}; use actix_web::{App, HttpServer, web};
use argon2::Argon2; use argon2::Argon2;
use clap::Parser; use clap::Parser;
use redis::aio::MultiplexedConnection;
use simple_logger::SimpleLogger; use simple_logger::SimpleLogger;
use sqlx::{PgPool, Pool, Postgres}; use sqlx::{PgPool, Pool, Postgres};
use std::time::SystemTime; use std::{cell::Cell, time::SystemTime};
mod config; mod config;
use config::{Config, ConfigBuilder}; use config::{Config, ConfigBuilder};
mod api; mod api;
@ -23,6 +24,7 @@ struct Args {
#[derive(Clone)] #[derive(Clone)]
struct Data { struct Data {
pub pool: Pool<Postgres>, pub pool: Pool<Postgres>,
pub cache_pool: redis::Client,
pub _config: Config, pub _config: Config,
pub argon2: Argon2<'static>, pub argon2: Argon2<'static>,
pub start_time: SystemTime, pub start_time: SystemTime,
@ -44,6 +46,8 @@ async fn main() -> Result<(), Error> {
let pool = PgPool::connect_with(config.database.connect_options()).await?; let pool = PgPool::connect_with(config.database.connect_options()).await?;
let cache_pool = redis::Client::open(config.cache_database.url())?;
/* /*
TODO: Figure out if a table should be used here and if not then what. TODO: Figure out if a table should be used here and if not then what.
Also figure out if these should be different types from what they currently are and if we should add more "constraints" Also figure out if these should be different types from what they currently are and if we should add more "constraints"
@ -81,6 +85,7 @@ async fn main() -> Result<(), Error> {
let data = Data { let data = Data {
pool, pool,
cache_pool,
_config: config, _config: config,
// TODO: Possibly implement "pepper" into this (thinking it could generate one if it doesnt exist and store it on disk) // TODO: Possibly implement "pepper" into this (thinking it could generate one if it doesnt exist and store it on disk)
argon2: Argon2::default(), argon2: Argon2::default(),

View file

@ -1,6 +1,11 @@
use actix_web::{cookie::{time::Duration, Cookie, SameSite}, http::header::HeaderMap, HttpResponse}; use actix_web::{cookie::{time::Duration, Cookie, SameSite}, http::header::HeaderMap, HttpResponse};
use getrandom::fill; use getrandom::fill;
use hex::encode; use hex::encode;
use redis::{AsyncCommands, RedisError};
use serde::Serialize;
use serde_json::json;
use crate::Data;
pub fn get_auth_header(headers: &HeaderMap) -> Result<&str, HttpResponse> { pub fn get_auth_header(headers: &HeaderMap) -> Result<&str, HttpResponse> {
let auth_token = headers.get(actix_web::http::header::AUTHORIZATION); let auth_token = headers.get(actix_web::http::header::AUTHORIZATION);
@ -46,3 +51,25 @@ pub fn generate_refresh_token() -> Result<String, getrandom::Error> {
Ok(encode(buf)) Ok(encode(buf))
} }
impl Data {
pub async fn set_cache_key(&self, key: String, value: impl Serialize, expire: u32) -> Result<(), RedisError> {
let mut conn = self.cache_pool.get_multiplexed_tokio_connection().await?;
let key_encoded = encode(key);
let value_json = json!(value).to_string();
redis::cmd("SET",).arg(&[key_encoded.clone(), value_json]).exec_async(&mut conn).await?;
redis::cmd("EXPIRE").arg(&[key_encoded, expire.to_string()]).exec_async(&mut conn).await
}
pub async fn get_cache_key(&self, key: String) -> Result<String, RedisError> {
let mut conn = self.cache_pool.get_multiplexed_tokio_connection().await?;
let key_encoded = encode(key);
redis::cmd("GET").arg(key_encoded).query_async(&mut conn).await
}
}