Compare commits

...
Sign in to create a new pull request.

23 commits

Author SHA1 Message Date
c4dafa1f2c Merge pull request 'feat: add redis caching' (#11) from wip/redis-caching into main
All checks were successful
ci/woodpecker/push/build-and-publish Pipeline was successful
Reviewed-on: #11
2025-05-07 21:04:40 +00:00
3e65cffe39 fix: fix user uuid cache hits 2025-05-07 22:21:59 +02:00
9e56eec021 fix: remove unused imports 2025-05-07 21:22:38 +02:00
3e64a49338 chore: add valkey configuration to docker 2025-05-07 20:57:01 +02:00
529ccd1b51 feat: use caching on user lookup
this needs to be deleted/expired on user update, we'll implement this when we get ways to "update" things like channels, servers and users
2025-05-07 20:33:23 +02:00
7ecc8c4270 feat: add redis caching 2025-05-07 20:32:32 +02:00
ca63a2a13c Merge pull request 'feat: implement cors' (#10) from wip/cors into main
All checks were successful
ci/woodpecker/push/build-and-publish Pipeline was successful
Reviewed-on: #10
Reviewed-by: Radical <radical@radical.fun>
2025-05-06 08:06:32 +00:00
c0f2948b76
feat: implement cors 2025-05-06 00:41:23 +02:00
135375f5b7 Merge pull request 'wip/username-regex' (#6) from wip/username-regex into main
All checks were successful
ci/woodpecker/push/build-and-publish Pipeline was successful
Reviewed-on: #6
Reviewed-by: Radical <radical@radical.fun>
2025-05-05 01:16:31 +00:00
77245e98c5 refactor: combine crypto.rs with utils.rs
All checks were successful
ci/woodpecker/push/build-and-publish Pipeline was successful
2025-05-04 23:50:38 +02:00
8a1467c26a Merge branch 'main' into wip/username-regex 2025-05-04 21:41:40 +00:00
ab5c85c4f5
fix: add numbers to username regex 2025-05-04 23:25:48 +02:00
c2bface373 Merge pull request 'wip/authorization-header' (#7) from wip/authorization-header into main
All checks were successful
ci/woodpecker/push/build-and-publish Pipeline was successful
Reviewed-on: #7
Reviewed-by: SauceyRed <saucey@saucey.red>

Uses authorization headers instead of keeping access_token and refresh_token in body.

auth header is used for `access_token`

## Changes
Moved everything to /api

### POST -> GET /v1/users/me
Request: auth header

### POST -> GET /v1/users/{uuid}
Request: auth header

### POST -> GET /v1/users
Request: auth header and query params `start=int` and `amount=int`

### POST /v1/auth/register
Response: Remove `refresh_token` from body and instead set-cookie `refresh_token`

### POST /v1/auth/login
Response: Remove `refresh_token` from body and instead set-cookie `refresh_token`

### POST /v1/auth/refresh
Request: cookie `refresh_token`
Response: Remove `refresh_token` from body and instead set-cookie `refresh_token`

### POST /v1/auth/revoke
Request: auth header (password still in body)
2025-05-04 21:18:27 +00:00
c61f96ffe7 feat: expire refresh_token immediately on unauthorized response 2025-05-04 23:02:17 +02:00
0f897dc0c6 feat: return refresh_token in cookie 2025-05-04 22:13:28 +02:00
ebb4286c08 refactor: move api to /api
serve api under /api
2025-05-04 22:13:05 +02:00
f12f81d584 fix: extract auth value 2025-05-04 21:30:33 +02:00
a3846a2620 fix: use correct header 2025-05-04 20:30:28 +02:00
cbf0131d14 feat: switch to headers for auth 2025-05-04 19:09:12 +02:00
6c706d973e style: use created_at instead of created 2025-05-04 19:09:06 +02:00
aa865e2ed4 feat: add utils.rs
provides a function that extracts auth header from headers
2025-05-04 18:11:12 +02:00
e29940d080
feat: only allow lowercase usernames 2025-05-03 03:04:07 +02:00
b530de8f52
fix: username regex 2025-05-03 02:20:37 +02:00
18 changed files with 336 additions and 221 deletions

View file

@ -9,6 +9,7 @@ lto = true
codegen-units = 1
[dependencies]
actix-cors = "0.7.1"
actix-web = "4.10"
argon2 = { version = "0.5.3", features = ["std"] }
clap = { version = "4.5.37", features = ["derive"] }
@ -21,6 +22,7 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
simple_logger = "5.0.0"
sqlx = { version = "0.8", features = ["runtime-tokio", "tls-native-tls", "postgres"] }
redis = { version = "0.30", features= ["tokio-comp"] }
toml = "0.8"
url = { version = "2.5", features = ["serde"] }
uuid = { version = "1.16", features = ["serde", "v7"] }

View file

@ -18,6 +18,12 @@ RUN useradd --create-home --home-dir /gorb gorb
USER gorb
ENV DATABASE_USERNAME="gorb" DATABASE_PASSWORD="gorb" DATABASE="gorb" DATABASE_HOST="localhost" DATABASE_PORT="5432"
ENV DATABASE_USERNAME="gorb" \
DATABASE_PASSWORD="gorb" \
DATABASE="gorb" \
DATABASE_HOST="database" \
DATABASE_PORT="5432" \
CACHE_DB_HOST="valkey" \
CACHE_DB_PORT="6379"
ENTRYPOINT ["/usr/bin/entrypoint.sh"]

View file

@ -34,3 +34,8 @@ services:
- POSTGRES_USER=gorb
- POSTGRES_PASSWORD=gorb
- POSTGRES_DB=gorb
valkey:
image: valkey/valkey
restart: always
networks:
- gorb

View file

@ -32,3 +32,8 @@ services:
- POSTGRES_USER=gorb
- POSTGRES_PASSWORD=gorb
- POSTGRES_DB=gorb
valkey:
image: valkey/valkey
restart: always
networks:
- gorb

View file

@ -16,6 +16,10 @@ password = "${DATABASE_PASSWORD}"
database = "${DATABASE}"
host = "${DATABASE_HOST}"
port = ${DATABASE_PORT}
[cache_database]
host = "${CACHE_DB_HOST}"
port = ${CACHE_DB_PORT}
EOF
fi

View file

@ -1,2 +1,11 @@
pub mod v1;
pub mod versions;
use actix_web::Scope;
use actix_web::web;
mod v1;
mod versions;
pub fn web() -> Scope {
web::scope("/api")
.service(v1::web())
.service(versions::res)
}

View file

@ -1,17 +1,17 @@
use std::time::{SystemTime, UNIX_EPOCH};
use actix_web::{Error, HttpResponse, error, post, web};
use actix_web::{error, post, web, Error, HttpResponse};
use argon2::{PasswordHash, PasswordVerifier};
use futures::StreamExt;
use log::error;
use serde::{Deserialize, Serialize};
use serde::Deserialize;
use crate::{
Data,
api::v1::auth::{EMAIL_REGEX, PASSWORD_REGEX, USERNAME_REGEX},
crypto::{generate_access_token, generate_refresh_token},
api::v1::auth::{EMAIL_REGEX, PASSWORD_REGEX, USERNAME_REGEX}, utils::{generate_access_token, generate_refresh_token, refresh_token_cookie}, Data
};
use super::Response;
#[derive(Deserialize)]
struct LoginInformation {
username: String,
@ -19,12 +19,6 @@ struct LoginInformation {
device_name: String,
}
#[derive(Serialize)]
pub struct Response {
pub access_token: String,
pub refresh_token: String,
}
const MAX_SIZE: usize = 262_144;
#[post("/login")]
@ -160,7 +154,7 @@ async fn login(
.as_secs() as i64;
if let Err(error) = sqlx::query(&format!(
"INSERT INTO refresh_tokens (token, uuid, created, device_name) VALUES ($1, '{}', $2, $3 )",
"INSERT INTO refresh_tokens (token, uuid, created_at, device_name) VALUES ($1, '{}', $2, $3 )",
uuid
))
.bind(&refresh_token)
@ -174,7 +168,7 @@ async fn login(
}
if let Err(error) = sqlx::query(&format!(
"INSERT INTO access_tokens (token, refresh_token, uuid, created) VALUES ($1, $2, '{}', $3 )",
"INSERT INTO access_tokens (token, refresh_token, uuid, created_at) VALUES ($1, $2, '{}', $3 )",
uuid
))
.bind(&access_token)
@ -187,8 +181,7 @@ async fn login(
return HttpResponse::InternalServerError().finish()
}
HttpResponse::Ok().json(Response {
HttpResponse::Ok().cookie(refresh_token_cookie(refresh_token)).json(Response {
access_token,
refresh_token,
})
}

View file

@ -7,6 +7,7 @@ use std::{
use actix_web::{HttpResponse, Scope, web};
use log::error;
use regex::Regex;
use serde::Serialize;
use sqlx::Postgres;
use uuid::Uuid;
@ -15,12 +16,16 @@ mod refresh;
mod register;
mod revoke;
#[derive(Serialize)]
struct Response {
access_token: String,
}
static EMAIL_REGEX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(r"[-A-Za-z0-9!#$%&'*+/=?^_`{|}~]+(?:\.[-A-Za-z0-9!#$%&'*+/=?^_`{|}~]+)*@(?:[A-Za-z0-9](?:[-A-Za-z0-9]*[A-Za-z0-9])?\.)+[A-Za-z0-9](?:[-A-Za-z0-9]*[A-Za-z0-9])?").unwrap()
});
// FIXME: This regex doesnt seem to be working
static USERNAME_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[a-zA-Z0-9.-_]").unwrap());
static USERNAME_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"^[a-z0-9_.-]+$").unwrap());
// Password is expected to be hashed using SHA3-384
static PASSWORD_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[0-9a-f]{96}").unwrap());
@ -34,33 +39,36 @@ pub fn web() -> Scope {
}
pub async fn check_access_token(
access_token: String,
access_token: &str,
pool: &sqlx::Pool<Postgres>,
) -> Result<Uuid, HttpResponse> {
let row = sqlx::query_as(
"SELECT CAST(uuid as VARCHAR), created FROM access_tokens WHERE token = $1",
)
let row =
sqlx::query_as("SELECT CAST(uuid as VARCHAR), created_at FROM access_tokens WHERE token = $1")
.bind(&access_token)
.fetch_one(pool)
.await;
if let Err(error) = row {
if error.to_string() == "no rows returned by a query that expected to return at least one row" {
return Err(HttpResponse::Unauthorized().finish())
if error.to_string()
== "no rows returned by a query that expected to return at least one row"
{
return Err(HttpResponse::Unauthorized().finish());
}
error!("{}", error);
return Err(HttpResponse::InternalServerError().json(r#"{ "error": "Unhandled exception occured, contact the server administrator" }"#))
return Err(HttpResponse::InternalServerError().json(
r#"{ "error": "Unhandled exception occured, contact the server administrator" }"#,
));
}
let (uuid, created): (String, i64) = row.unwrap();
let (uuid, created_at): (String, i64) = row.unwrap();
let current_time = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs() as i64;
let lifetime = current_time - created;
let lifetime = current_time - created_at;
if lifetime > 3600 {
return Err(HttpResponse::Unauthorized().finish());

View file

@ -1,40 +1,22 @@
use actix_web::{Error, HttpResponse, error, post, web};
use futures::StreamExt;
use actix_web::{post, web, Error, HttpRequest, HttpResponse};
use log::error;
use serde::{Deserialize, Serialize};
use std::time::{SystemTime, UNIX_EPOCH};
use crate::{
Data,
crypto::{generate_access_token, generate_refresh_token},
utils::{generate_access_token, generate_refresh_token, refresh_token_cookie}, Data
};
#[derive(Deserialize)]
struct RefreshRequest {
refresh_token: String,
}
#[derive(Serialize)]
struct Response {
refresh_token: String,
access_token: String,
}
const MAX_SIZE: usize = 262_144;
use super::Response;
#[post("/refresh")]
pub async fn res(mut payload: web::Payload, data: web::Data<Data>) -> Result<HttpResponse, Error> {
let mut body = web::BytesMut::new();
while let Some(chunk) = payload.next().await {
let chunk = chunk?;
// limit max size of in-memory payload
if (body.len() + chunk.len()) > MAX_SIZE {
return Err(error::ErrorBadRequest("overflow"));
}
body.extend_from_slice(&chunk);
pub async fn res(req: HttpRequest, data: web::Data<Data>) -> Result<HttpResponse, Error> {
let recv_refresh_token_cookie = req.cookie("refresh_token");
if let None = recv_refresh_token_cookie {
return Ok(HttpResponse::Unauthorized().finish())
}
let refresh_request = serde_json::from_slice::<RefreshRequest>(&body)?;
let mut refresh_token = String::from(recv_refresh_token_cookie.unwrap().value());
let current_time = SystemTime::now()
.duration_since(UNIX_EPOCH)
@ -42,33 +24,29 @@ pub async fn res(mut payload: web::Payload, data: web::Data<Data>) -> Result<Htt
.as_secs() as i64;
if let Ok(row) =
sqlx::query_as("SELECT CAST(uuid as VARCHAR), created FROM refresh_tokens WHERE token = $1")
.bind(&refresh_request.refresh_token)
sqlx::query_scalar("SELECT created_at FROM refresh_tokens WHERE token = $1")
.bind(&refresh_token)
.fetch_one(&data.pool)
.await
{
let (uuid, created): (String, i64) = row;
let created_at: i64 = row;
if let Err(error) = sqlx::query("DELETE FROM access_tokens WHERE refresh_token = $1")
.bind(&refresh_request.refresh_token)
.execute(&data.pool)
.await
{
error!("{}", error);
}
let lifetime = current_time - created;
let lifetime = current_time - created_at;
if lifetime > 2592000 {
if let Err(error) = sqlx::query("DELETE FROM refresh_tokens WHERE token = $1")
.bind(&refresh_request.refresh_token)
.bind(&refresh_token)
.execute(&data.pool)
.await
{
error!("{}", error);
}
return Ok(HttpResponse::Unauthorized().finish());
let mut refresh_token_cookie = refresh_token_cookie(refresh_token);
refresh_token_cookie.make_removal();
return Ok(HttpResponse::Unauthorized().cookie(refresh_token_cookie).finish());
}
let current_time = SystemTime::now()
@ -76,8 +54,6 @@ pub async fn res(mut payload: web::Payload, data: web::Data<Data>) -> Result<Htt
.unwrap()
.as_secs() as i64;
let mut refresh_token = refresh_request.refresh_token;
if lifetime > 1987200 {
let new_refresh_token = generate_refresh_token();
@ -88,7 +64,7 @@ pub async fn res(mut payload: web::Payload, data: web::Data<Data>) -> Result<Htt
let new_refresh_token = new_refresh_token.unwrap();
match sqlx::query("UPDATE refresh_tokens SET token = $1, created = $2 WHERE token = $3")
match sqlx::query("UPDATE refresh_tokens SET token = $1, created_at = $2 WHERE token = $3")
.bind(&new_refresh_token)
.bind(current_time)
.bind(&refresh_token)
@ -113,21 +89,24 @@ pub async fn res(mut payload: web::Payload, data: web::Data<Data>) -> Result<Htt
let access_token = access_token.unwrap();
if let Err(error) = sqlx::query(&format!("INSERT INTO access_tokens (token, refresh_token, uuid, created) VALUES ($1, $2, '{}', $3 )", uuid))
if let Err(error) = sqlx::query("UPDATE access_tokens SET token = $1, created_at = $2 WHERE refresh_token = $3")
.bind(&access_token)
.bind(&refresh_token)
.bind(current_time)
.bind(&refresh_token)
.execute(&data.pool)
.await {
error!("{}", error);
return Ok(HttpResponse::InternalServerError().finish())
}
return Ok(HttpResponse::Ok().json(Response {
refresh_token,
return Ok(HttpResponse::Ok().cookie(refresh_token_cookie(refresh_token)).json(Response {
access_token,
}));
}
Ok(HttpResponse::Unauthorized().finish())
let mut refresh_token_cookie = refresh_token_cookie(refresh_token);
refresh_token_cookie.make_removal();
Ok(HttpResponse::Unauthorized().cookie(refresh_token_cookie).finish())
}

View file

@ -10,11 +10,9 @@ use log::error;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use super::login::Response;
use super::Response;
use crate::{
Data,
api::v1::auth::{EMAIL_REGEX, PASSWORD_REGEX, USERNAME_REGEX},
crypto::{generate_access_token, generate_refresh_token},
api::v1::auth::{EMAIL_REGEX, PASSWORD_REGEX, USERNAME_REGEX}, utils::{generate_access_token, generate_refresh_token, refresh_token_cookie}, Data
};
#[derive(Deserialize)]
@ -139,7 +137,7 @@ pub async fn res(mut payload: web::Payload, data: web::Data<Data>) -> Result<Htt
.unwrap()
.as_secs() as i64;
if let Err(error) = sqlx::query(&format!("INSERT INTO refresh_tokens (token, uuid, created, device_name) VALUES ($1, '{}', $2, $3 )", uuid))
if let Err(error) = sqlx::query(&format!("INSERT INTO refresh_tokens (token, uuid, created_at, device_name) VALUES ($1, '{}', $2, $3 )", uuid))
.bind(&refresh_token)
.bind(current_time)
.bind(account_information.device_name)
@ -149,7 +147,7 @@ pub async fn res(mut payload: web::Payload, data: web::Data<Data>) -> Result<Htt
return Ok(HttpResponse::InternalServerError().finish())
}
if let Err(error) = sqlx::query(&format!("INSERT INTO access_tokens (token, refresh_token, uuid, created) VALUES ($1, $2, '{}', $3 )", uuid))
if let Err(error) = sqlx::query(&format!("INSERT INTO access_tokens (token, refresh_token, uuid, created_at) VALUES ($1, $2, '{}', $3 )", uuid))
.bind(&access_token)
.bind(&refresh_token)
.bind(current_time)
@ -159,9 +157,8 @@ pub async fn res(mut payload: web::Payload, data: web::Data<Data>) -> Result<Htt
return Ok(HttpResponse::InternalServerError().finish())
}
HttpResponse::Ok().json(Response {
HttpResponse::Ok().cookie(refresh_token_cookie(refresh_token)).json(Response {
access_token,
refresh_token,
})
}
Err(error) => {

View file

@ -1,14 +1,13 @@
use actix_web::{Error, HttpResponse, error, post, web};
use actix_web::{Error, HttpRequest, HttpResponse, error, post, web};
use argon2::{PasswordHash, PasswordVerifier};
use futures::{StreamExt, future};
use log::error;
use serde::{Deserialize, Serialize};
use crate::{Data, api::v1::auth::check_access_token};
use crate::{Data, api::v1::auth::check_access_token, utils::get_auth_header};
#[derive(Deserialize)]
struct RevokeRequest {
access_token: String,
password: String,
device_name: String,
}
@ -27,7 +26,19 @@ impl Response {
const MAX_SIZE: usize = 262_144;
#[post("/revoke")]
pub async fn res(mut payload: web::Payload, data: web::Data<Data>) -> Result<HttpResponse, Error> {
pub async fn res(
req: HttpRequest,
mut payload: web::Payload,
data: web::Data<Data>,
) -> Result<HttpResponse, Error> {
let headers = req.headers();
let auth_header = get_auth_header(headers);
if let Err(error) = auth_header {
return Ok(error);
}
let mut body = web::BytesMut::new();
while let Some(chunk) = payload.next().await {
let chunk = chunk?;
@ -40,7 +51,7 @@ pub async fn res(mut payload: web::Payload, data: web::Data<Data>) -> Result<Htt
let revoke_request = serde_json::from_slice::<RevokeRequest>(&body)?;
let authorized = check_access_token(revoke_request.access_token, &data.pool).await;
let authorized = check_access_token(auth_header.unwrap(), &data.pool).await;
if let Err(error) = authorized {
return Ok(error);
@ -94,16 +105,9 @@ pub async fn res(mut payload: web::Payload, data: web::Data<Data>) -> Result<Htt
let tokens: Vec<String> = tokens_raw.unwrap();
let mut access_tokens_delete = vec![];
let mut refresh_tokens_delete = vec![];
for token in tokens {
access_tokens_delete.push(
sqlx::query("DELETE FROM access_tokens WHERE refresh_token = $1")
.bind(token.clone())
.execute(&data.pool),
);
refresh_tokens_delete.push(
sqlx::query("DELETE FROM refresh_tokens WHERE token = $1")
.bind(token.clone())
@ -111,29 +115,16 @@ pub async fn res(mut payload: web::Payload, data: web::Data<Data>) -> Result<Htt
);
}
let results_access_tokens = future::join_all(access_tokens_delete).await;
let results_refresh_tokens = future::join_all(refresh_tokens_delete).await;
let results = future::join_all(refresh_tokens_delete).await;
let access_tokens_errors: Vec<&Result<sqlx::postgres::PgQueryResult, sqlx::Error>> =
results_access_tokens
.iter()
.filter(|r| r.is_err())
.collect();
let refresh_tokens_errors: Vec<&Result<sqlx::postgres::PgQueryResult, sqlx::Error>> =
results_refresh_tokens
let errors: Vec<&Result<sqlx::postgres::PgQueryResult, sqlx::Error>> =
results
.iter()
.filter(|r| r.is_err())
.collect();
if !access_tokens_errors.is_empty() && !refresh_tokens_errors.is_empty() {
error!("{:?}", access_tokens_errors);
error!("{:?}", refresh_tokens_errors);
return Ok(HttpResponse::InternalServerError().finish());
} else if !access_tokens_errors.is_empty() {
error!("{:?}", access_tokens_errors);
return Ok(HttpResponse::InternalServerError().finish());
} else if !refresh_tokens_errors.is_empty() {
error!("{:?}", refresh_tokens_errors);
if !errors.is_empty() {
error!("{:?}", errors);
return Ok(HttpResponse::InternalServerError().finish());
}

View file

@ -1,14 +1,8 @@
use actix_web::{Error, HttpResponse, error, post, web};
use futures::StreamExt;
use actix_web::{Error, HttpRequest, HttpResponse, get, web};
use log::error;
use serde::{Deserialize, Serialize};
use serde::Serialize;
use crate::{Data, api::v1::auth::check_access_token};
#[derive(Deserialize)]
struct AuthenticationRequest {
access_token: String,
}
use crate::{Data, api::v1::auth::check_access_token, utils::get_auth_header};
#[derive(Serialize)]
struct Response {
@ -17,26 +11,17 @@ struct Response {
display_name: String,
}
const MAX_SIZE: usize = 262_144;
#[get("/me")]
pub async fn res(req: HttpRequest, data: web::Data<Data>) -> Result<HttpResponse, Error> {
let headers = req.headers();
#[post("/me")]
pub async fn res(
mut payload: web::Payload,
data: web::Data<Data>,
) -> Result<HttpResponse, Error> {
let mut body = web::BytesMut::new();
while let Some(chunk) = payload.next().await {
let chunk = chunk?;
// limit max size of in-memory payload
if (body.len() + chunk.len()) > MAX_SIZE {
return Err(error::ErrorBadRequest("overflow"));
}
body.extend_from_slice(&chunk);
let auth_header = get_auth_header(headers);
if let Err(error) = auth_header {
return Ok(error);
}
let authentication_request = serde_json::from_slice::<AuthenticationRequest>(&body)?;
let authorized = check_access_token(authentication_request.access_token, &data.pool).await;
let authorized = check_access_token(auth_header.unwrap(), &data.pool).await;
if let Err(error) = authorized {
return Ok(error);

View file

@ -1,18 +1,16 @@
use actix_web::{error, post, web, Error, HttpResponse, Scope};
use futures::StreamExt;
use crate::{Data, api::v1::auth::check_access_token, utils::get_auth_header};
use actix_web::{get, web, Error, HttpRequest, HttpResponse, Scope};
use log::error;
use serde::{Deserialize, Serialize};
use sqlx::prelude::FromRow;
use crate::{Data, api::v1::auth::check_access_token};
mod me;
mod uuid;
#[derive(Deserialize)]
struct Request {
access_token: String,
start: i32,
amount: i32,
struct RequestQuery {
start: Option<i32>,
amount: Option<i32>,
}
#[derive(Serialize, FromRow)]
@ -23,8 +21,6 @@ struct Response {
email: String,
}
const MAX_SIZE: usize = 262_144;
pub fn web() -> Scope {
web::scope("/users")
.service(res)
@ -32,36 +28,33 @@ pub fn web() -> Scope {
.service(uuid::res)
}
#[post("")]
#[get("")]
pub async fn res(
mut payload: web::Payload,
req: HttpRequest,
request_query: web::Query<RequestQuery>,
data: web::Data<Data>,
) -> Result<HttpResponse, Error> {
let mut body = web::BytesMut::new();
while let Some(chunk) = payload.next().await {
let chunk = chunk?;
// limit max size of in-memory payload
if (body.len() + chunk.len()) > MAX_SIZE {
return Err(error::ErrorBadRequest("overflow"));
}
body.extend_from_slice(&chunk);
let headers = req.headers();
let auth_header = get_auth_header(headers);
let start = request_query.start.unwrap_or(0);
let amount = request_query.amount.unwrap_or(10);
if amount > 100 {
return Ok(HttpResponse::BadRequest().finish());
}
let request = serde_json::from_slice::<Request>(&body)?;
if request.amount > 100 {
return Ok(HttpResponse::BadRequest().finish())
}
let authorized = check_access_token(request.access_token, &data.pool).await;
let authorized = check_access_token(auth_header.unwrap(), &data.pool).await;
if let Err(error) = authorized {
return Ok(error);
}
let row = sqlx::query_as("SELECT CAST(uuid AS VARCHAR), username, display_name, email FROM users ORDER BY username LIMIT $1 OFFSET $2")
.bind(request.amount)
.bind(request.start)
.bind(amount)
.bind(start)
.fetch_all(&data.pool)
.await;
@ -74,4 +67,3 @@ pub async fn res(
Ok(HttpResponse::Ok().json(accounts))
}

View file

@ -1,51 +1,45 @@
use actix_web::{Error, HttpResponse, error, post, web};
use futures::StreamExt;
use actix_web::{Error, HttpRequest, HttpResponse, get, web};
use log::error;
use serde::{Deserialize, Serialize};
use serde::Serialize;
use uuid::Uuid;
use crate::{Data, api::v1::auth::check_access_token};
use crate::{Data, api::v1::auth::check_access_token, utils::get_auth_header};
#[derive(Deserialize)]
struct AuthenticationRequest {
access_token: String,
}
#[derive(Serialize)]
#[derive(Serialize, Clone)]
struct Response {
uuid: String,
username: String,
display_name: String,
}
const MAX_SIZE: usize = 262_144;
#[post("/{uuid}")]
#[get("/{uuid}")]
pub async fn res(
mut payload: web::Payload,
req: HttpRequest,
path: web::Path<(Uuid,)>,
data: web::Data<Data>,
) -> Result<HttpResponse, Error> {
let mut body = web::BytesMut::new();
while let Some(chunk) = payload.next().await {
let chunk = chunk?;
// limit max size of in-memory payload
if (body.len() + chunk.len()) > MAX_SIZE {
return Err(error::ErrorBadRequest("overflow"));
}
body.extend_from_slice(&chunk);
}
let headers = req.headers();
let uuid = path.into_inner().0;
let authentication_request = serde_json::from_slice::<AuthenticationRequest>(&body)?;
let auth_header = get_auth_header(headers);
let authorized = check_access_token(authentication_request.access_token, &data.pool).await;
if let Err(error) = auth_header {
return Ok(error);
}
let authorized = check_access_token(auth_header.unwrap(), &data.pool).await;
if let Err(error) = authorized {
return Ok(error);
}
let cache_result = data.get_cache_key(uuid.to_string()).await;
if let Ok(cache_hit) = cache_result {
return Ok(HttpResponse::Ok().content_type("application/json").body(cache_hit))
}
let row = sqlx::query_as(&format!(
"SELECT username, display_name FROM users WHERE uuid = '{}'",
uuid
@ -60,9 +54,18 @@ pub async fn res(
let (username, display_name): (String, Option<String>) = row.unwrap();
Ok(HttpResponse::Ok().json(Response {
let user = Response {
uuid: uuid.to_string(),
username,
display_name: display_name.unwrap_or_default(),
}))
};
let cache_result = data.set_cache_key(uuid.to_string(), user.clone(), 1800).await;
if let Err(error) = cache_result {
error!("{}", error);
return Ok(HttpResponse::InternalServerError().finish());
}
Ok(HttpResponse::Ok().json(user))
}

View file

@ -7,6 +7,7 @@ use tokio::fs::read_to_string;
#[derive(Debug, Deserialize)]
pub struct ConfigBuilder {
database: Database,
cache_database: CacheDatabase,
web: Option<WebBuilder>,
}
@ -19,6 +20,15 @@ pub struct Database {
port: u16,
}
#[derive(Debug, Deserialize, Clone)]
pub struct CacheDatabase {
username: Option<String>,
password: Option<String>,
host: String,
database: Option<String>,
port: u16,
}
#[derive(Debug, Deserialize)]
struct WebBuilder {
url: Option<String>,
@ -51,6 +61,7 @@ impl ConfigBuilder {
Config {
database: self.database,
cache_database: self.cache_database,
web,
}
}
@ -59,6 +70,7 @@ impl ConfigBuilder {
#[derive(Debug, Clone)]
pub struct Config {
pub database: Database,
pub cache_database: CacheDatabase,
pub web: Web,
}
@ -78,3 +90,33 @@ impl Database {
.port(self.port)
}
}
impl CacheDatabase {
pub fn url(&self) -> String {
let mut url = String::from("redis://");
if let Some(username) = &self.username {
url += username;
}
if let Some(password) = &self.password {
url += ":";
url += password;
}
if self.username.is_some() || self.password.is_some() {
url += "@";
}
url += &self.host;
url += ":";
url += &self.port.to_string();
if let Some(database) = &self.database {
url += "/";
url += database;
}
url
}
}

View file

@ -1,14 +0,0 @@
use getrandom::fill;
use hex::encode;
pub fn generate_access_token() -> Result<String, getrandom::Error> {
let mut buf = [0u8; 16];
fill(&mut buf)?;
Ok(encode(buf))
}
pub fn generate_refresh_token() -> Result<String, getrandom::Error> {
let mut buf = [0u8; 32];
fill(&mut buf)?;
Ok(encode(buf))
}

View file

@ -1,3 +1,4 @@
use actix_cors::Cors;
use actix_web::{App, HttpServer, web};
use argon2::Argon2;
use clap::Parser;
@ -7,7 +8,8 @@ use std::time::SystemTime;
mod config;
use config::{Config, ConfigBuilder};
mod api;
pub mod crypto;
pub mod utils;
type Error = Box<dyn std::error::Error>;
@ -21,6 +23,7 @@ struct Args {
#[derive(Clone)]
struct Data {
pub pool: Pool<Postgres>,
pub cache_pool: redis::Client,
pub _config: Config,
pub argon2: Argon2<'static>,
pub start_time: SystemTime,
@ -42,6 +45,8 @@ async fn main() -> Result<(), Error> {
let pool = PgPool::connect_with(config.database.connect_options()).await?;
let cache_pool = redis::Client::open(config.cache_database.url())?;
/*
TODO: Figure out if a table should be used here and if not then what.
Also figure out if these should be different types from what they currently are and if we should add more "constraints"
@ -63,14 +68,14 @@ async fn main() -> Result<(), Error> {
CREATE TABLE IF NOT EXISTS refresh_tokens (
token varchar(64) PRIMARY KEY UNIQUE NOT NULL,
uuid uuid NOT NULL REFERENCES users(uuid),
created int8 NOT NULL,
created_at int8 NOT NULL,
device_name varchar(16) NOT NULL
);
CREATE TABLE IF NOT EXISTS access_tokens (
token varchar(32) PRIMARY KEY UNIQUE NOT NULL,
refresh_token varchar(64) UNIQUE NOT NULL REFERENCES refresh_tokens(token),
refresh_token varchar(64) UNIQUE NOT NULL REFERENCES refresh_tokens(token) ON UPDATE CASCADE ON DELETE CASCADE,
uuid uuid NOT NULL REFERENCES users(uuid),
created int8 NOT NULL
created_at int8 NOT NULL
)
"#,
)
@ -79,17 +84,46 @@ async fn main() -> Result<(), Error> {
let data = Data {
pool,
cache_pool,
_config: config,
// TODO: Possibly implement "pepper" into this (thinking it could generate one if it doesnt exist and store it on disk)
argon2: Argon2::default(),
start_time: SystemTime::now(),
};
HttpServer::new(move || {
// Set CORS headers
let cors = Cors::default()
/*
Set Allowed-Control-Allow-Origin header to whatever
the request's Origin header is. Must be done like this
rather than setting it to "*" due to CORS not allowing
sending of credentials (cookies) with wildcard origin.
*/
.allowed_origin_fn(|_origin, _req_head| {
true
})
/*
Allows any request method in CORS preflight requests.
This will be restricted to only ones actually in use later.
*/
.allow_any_method()
/*
Allows any header(s) in request in CORS preflight requests.
This wll be restricted to only ones actually in use later.
*/
.allow_any_header()
/*
Allows browser to include cookies in requests.
This is needed for receiving the secure HttpOnly refresh_token cookie.
*/
.supports_credentials();
App::new()
.app_data(web::Data::new(data.clone()))
.service(api::versions::res)
.service(api::v1::web())
.wrap(cors)
.service(api::web())
})
.bind((web.url, web.port))?
.run()

74
src/utils.rs Normal file
View file

@ -0,0 +1,74 @@
use actix_web::{cookie::{time::Duration, Cookie, SameSite}, http::header::HeaderMap, HttpResponse};
use getrandom::fill;
use hex::encode;
use redis::RedisError;
use serde::Serialize;
use crate::Data;
pub fn get_auth_header(headers: &HeaderMap) -> Result<&str, HttpResponse> {
let auth_token = headers.get(actix_web::http::header::AUTHORIZATION);
if let None = auth_token {
return Err(HttpResponse::Unauthorized().finish());
}
let auth = auth_token.unwrap().to_str();
if let Err(error) = auth {
return Err(HttpResponse::Unauthorized().json(format!(r#" {{ "error": "{}" }} "#, error)));
}
let auth_value = auth.unwrap().split_whitespace().nth(1);
if let None = auth_value {
return Err(HttpResponse::BadRequest().finish());
}
Ok(auth_value.unwrap())
}
pub fn refresh_token_cookie(refresh_token: String) -> Cookie<'static> {
Cookie::build("refresh_token", refresh_token)
.http_only(true)
.secure(true)
.same_site(SameSite::None)
.path("/api")
.max_age(Duration::days(30))
.finish()
}
pub fn generate_access_token() -> Result<String, getrandom::Error> {
let mut buf = [0u8; 16];
fill(&mut buf)?;
Ok(encode(buf))
}
pub fn generate_refresh_token() -> Result<String, getrandom::Error> {
let mut buf = [0u8; 32];
fill(&mut buf)?;
Ok(encode(buf))
}
impl Data {
pub async fn set_cache_key(&self, key: String, value: impl Serialize, expire: u32) -> Result<(), RedisError> {
let mut conn = self.cache_pool.get_multiplexed_tokio_connection().await?;
let key_encoded = encode(key);
let value_json = serde_json::to_string(&value).unwrap();
redis::cmd("SET",).arg(&[key_encoded.clone(), value_json]).exec_async(&mut conn).await?;
redis::cmd("EXPIRE").arg(&[key_encoded, expire.to_string()]).exec_async(&mut conn).await
}
pub async fn get_cache_key(&self, key: String) -> Result<String, RedisError> {
let mut conn = self.cache_pool.get_multiplexed_tokio_connection().await?;
let key_encoded = encode(key);
redis::cmd("GET").arg(key_encoded).query_async(&mut conn).await
}
}