style: cargo clippy && format
All checks were successful
ci/woodpecker/push/build-and-publish Pipeline was successful

This commit is contained in:
Radical 2025-05-24 01:09:17 +02:00
parent 860fa7a66e
commit 8605b81e7b
26 changed files with 274 additions and 178 deletions

View file

@ -1,3 +1,3 @@
fn main() {
println!("cargo:rerun-if-changed=migrations");
}
println!("cargo:rerun-if-changed=migrations");
}

View file

@ -2,13 +2,17 @@ use std::time::{SystemTime, UNIX_EPOCH};
use actix_web::{HttpResponse, post, web};
use argon2::{PasswordHash, PasswordVerifier};
use diesel::{dsl::insert_into, ExpressionMethods, QueryDsl};
use diesel::{ExpressionMethods, QueryDsl, dsl::insert_into};
use diesel_async::RunQueryDsl;
use serde::Deserialize;
use uuid::Uuid;
use crate::{
error::Error, api::v1::auth::{EMAIL_REGEX, PASSWORD_REGEX, USERNAME_REGEX}, schema::*, utils::{generate_access_token, generate_refresh_token, refresh_token_cookie}, Data
Data,
api::v1::auth::{EMAIL_REGEX, PASSWORD_REGEX, USERNAME_REGEX},
error::Error,
schema::*,
utils::{generate_access_token, generate_refresh_token, refresh_token_cookie},
};
use super::Response;
@ -79,34 +83,45 @@ async fn login(
) -> Result<HttpResponse, Error> {
let mut conn = data.pool.get().await?;
let parsed_hash = PasswordHash::new(&database_password).map_err(|e| Error::PasswordHashError(e.to_string()))?;
let parsed_hash = PasswordHash::new(&database_password)
.map_err(|e| Error::PasswordHashError(e.to_string()))?;
if data
.argon2
.verify_password(request_password.as_bytes(), &parsed_hash)
.is_err()
{
return Err(Error::Unauthorized("Wrong username or password".to_string()));
return Err(Error::Unauthorized(
"Wrong username or password".to_string(),
));
}
let refresh_token = generate_refresh_token()?;
let access_token = generate_access_token()?;
let current_time = SystemTime::now()
.duration_since(UNIX_EPOCH)?
.as_secs() as i64;
let current_time = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() as i64;
use refresh_tokens::dsl as rdsl;
insert_into(refresh_tokens::table)
.values((rdsl::token.eq(&refresh_token), rdsl::uuid.eq(uuid), rdsl::created_at.eq(current_time), rdsl::device_name.eq(device_name)))
.values((
rdsl::token.eq(&refresh_token),
rdsl::uuid.eq(uuid),
rdsl::created_at.eq(current_time),
rdsl::device_name.eq(device_name),
))
.execute(&mut conn)
.await?;
use access_tokens::dsl as adsl;
insert_into(access_tokens::table)
.values((adsl::token.eq(&access_token), adsl::refresh_token.eq(&refresh_token), adsl::uuid.eq(uuid), adsl::created_at.eq(current_time)))
.values((
adsl::token.eq(&access_token),
adsl::refresh_token.eq(&refresh_token),
adsl::uuid.eq(uuid),
adsl::created_at.eq(current_time),
))
.execute(&mut conn)
.await?;

View file

@ -10,7 +10,7 @@ use regex::Regex;
use serde::Serialize;
use uuid::Uuid;
use crate::{error::Error, Conn, schema::access_tokens::dsl};
use crate::{Conn, error::Error, schema::access_tokens::dsl};
mod login;
mod refresh;
@ -39,10 +39,7 @@ pub fn web() -> Scope {
.service(revoke::res)
}
pub async fn check_access_token(
access_token: &str,
conn: &mut Conn,
) -> Result<Uuid, Error> {
pub async fn check_access_token(access_token: &str, conn: &mut Conn) -> Result<Uuid, Error> {
let (uuid, created_at): (Uuid, i64) = dsl::access_tokens
.filter(dsl::token.eq(access_token))
.select((dsl::uuid, dsl::created_at))
@ -56,9 +53,7 @@ pub async fn check_access_token(
}
})?;
let current_time = SystemTime::now()
.duration_since(UNIX_EPOCH)?
.as_secs() as i64;
let current_time = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() as i64;
let lifetime = current_time - created_at;

View file

@ -1,11 +1,17 @@
use actix_web::{HttpRequest, HttpResponse, post, web};
use diesel::{delete, update, ExpressionMethods, QueryDsl};
use diesel::{ExpressionMethods, QueryDsl, delete, update};
use diesel_async::RunQueryDsl;
use log::error;
use std::time::{SystemTime, UNIX_EPOCH};
use crate::{
error::Error, schema::{access_tokens::{self, dsl}, refresh_tokens::{self, dsl as rdsl}}, utils::{generate_access_token, generate_refresh_token, refresh_token_cookie}, Data
Data,
error::Error,
schema::{
access_tokens::{self, dsl},
refresh_tokens::{self, dsl as rdsl},
},
utils::{generate_access_token, generate_refresh_token, refresh_token_cookie},
};
use super::Response;
@ -20,9 +26,7 @@ pub async fn res(req: HttpRequest, data: web::Data<Data>) -> Result<HttpResponse
let mut refresh_token = String::from(recv_refresh_token_cookie.unwrap().value());
let current_time = SystemTime::now()
.duration_since(UNIX_EPOCH)?
.as_secs() as i64;
let current_time = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() as i64;
let mut conn = data.pool.get().await?;
@ -52,9 +56,7 @@ pub async fn res(req: HttpRequest, data: web::Data<Data>) -> Result<HttpResponse
.finish());
}
let current_time = SystemTime::now()
.duration_since(UNIX_EPOCH)?
.as_secs() as i64;
let current_time = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() as i64;
if lifetime > 1987200 {
let new_refresh_token = generate_refresh_token();

View file

@ -5,14 +5,22 @@ use argon2::{
PasswordHasher,
password_hash::{SaltString, rand_core::OsRng},
};
use diesel::{dsl::insert_into, ExpressionMethods};
use diesel::{ExpressionMethods, dsl::insert_into};
use diesel_async::RunQueryDsl;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use super::Response;
use crate::{
api::v1::auth::{EMAIL_REGEX, PASSWORD_REGEX, USERNAME_REGEX}, error::Error, schema::{access_tokens::{self, dsl as adsl}, refresh_tokens::{self, dsl as rdsl}, users::{self, dsl as udsl}}, utils::{generate_access_token, generate_refresh_token, refresh_token_cookie}, Data
Data,
api::v1::auth::{EMAIL_REGEX, PASSWORD_REGEX, USERNAME_REGEX},
error::Error,
schema::{
access_tokens::{self, dsl as adsl},
refresh_tokens::{self, dsl as rdsl},
users::{self, dsl as udsl},
},
utils::{generate_access_token, generate_refresh_token, refresh_token_cookie},
};
#[derive(Deserialize)]
@ -107,9 +115,7 @@ pub async fn res(
let refresh_token = generate_refresh_token()?;
let access_token = generate_access_token()?;
let current_time = SystemTime::now()
.duration_since(UNIX_EPOCH)?
.as_secs() as i64;
let current_time = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() as i64;
insert_into(refresh_tokens::table)
.values((
@ -133,7 +139,7 @@ pub async fn res(
return Ok(HttpResponse::Ok()
.cookie(refresh_token_cookie(refresh_token))
.json(Response { access_token }))
.json(Response { access_token }));
}
Ok(HttpResponse::InternalServerError().finish())

View file

@ -1,10 +1,17 @@
use actix_web::{HttpRequest, HttpResponse, post, web};
use argon2::{PasswordHash, PasswordVerifier};
use diesel::{delete, ExpressionMethods, QueryDsl};
use diesel::{ExpressionMethods, QueryDsl, delete};
use diesel_async::RunQueryDsl;
use serde::Deserialize;
use crate::{api::v1::auth::check_access_token, error::Error, schema::users::dsl as udsl, schema::refresh_tokens::{self, dsl as rdsl}, utils::get_auth_header, Data};
use crate::{
Data,
api::v1::auth::check_access_token,
error::Error,
schema::refresh_tokens::{self, dsl as rdsl},
schema::users::dsl as udsl,
utils::get_auth_header,
};
#[derive(Deserialize)]
struct RevokeRequest {
@ -33,14 +40,17 @@ pub async fn res(
.get_result(&mut conn)
.await?;
let hashed_password = PasswordHash::new(&database_password).map_err(|e| Error::PasswordHashError(e.to_string()))?;
let hashed_password = PasswordHash::new(&database_password)
.map_err(|e| Error::PasswordHashError(e.to_string()))?;
if data
.argon2
.verify_password(revoke_request.password.as_bytes(), &hashed_password)
.is_err()
{
return Err(Error::Unauthorized("Wrong username or password".to_string()));
return Err(Error::Unauthorized(
"Wrong username or password".to_string(),
));
}
delete(refresh_tokens::table)

View file

@ -1,9 +1,9 @@
use actix_web::{HttpRequest, HttpResponse, get, post, web};
use crate::{
error::Error,
Data,
api::v1::auth::check_access_token,
error::Error,
structs::{Guild, Invite, Member},
utils::get_auth_header,
};

View file

@ -1,9 +1,15 @@
use actix_web::{get, post, web, HttpRequest, HttpResponse, Scope};
use actix_web::{HttpRequest, HttpResponse, Scope, get, post, web};
use serde::Deserialize;
mod uuid;
use crate::{error::Error, api::v1::auth::check_access_token, structs::{Guild, StartAmountQuery}, utils::get_auth_header, Data};
use crate::{
Data,
api::v1::auth::check_access_token,
error::Error,
structs::{Guild, StartAmountQuery},
utils::get_auth_header,
};
#[derive(Deserialize)]
struct GuildInfo {
@ -63,4 +69,3 @@ pub async fn get(
Ok(HttpResponse::Ok().json(guilds))
}

View file

@ -1,7 +1,7 @@
use crate::{
error::Error,
Data,
api::v1::auth::check_access_token,
error::Error,
structs::{Channel, Member},
utils::get_auth_header,
};
@ -43,8 +43,7 @@ pub async fn get(
let channels = Channel::fetch_all(&data.pool, guild_uuid).await?;
data
.set_cache_key(format!("{}_channels", guild_uuid), channels.clone(), 1800)
data.set_cache_key(format!("{}_channels", guild_uuid), channels.clone(), 1800)
.await?;
Ok(HttpResponse::Ok().json(channels))

View file

@ -1,7 +1,7 @@
use crate::{
error::Error,
Data,
api::v1::auth::check_access_token,
error::Error,
structs::{Channel, Member},
utils::get_auth_header,
};
@ -41,8 +41,7 @@ pub async fn get(
} else {
channel = Channel::fetch_one(&mut conn, channel_uuid).await?;
data
.set_cache_key(format!("{}", channel_uuid), channel.clone(), 60)
data.set_cache_key(format!("{}", channel_uuid), channel.clone(), 60)
.await?;
}

View file

@ -2,14 +2,14 @@ pub mod messages;
pub mod socket;
use crate::{
error::Error,
Data,
api::v1::auth::check_access_token,
error::Error,
structs::{Channel, Member},
utils::get_auth_header,
};
use uuid::Uuid;
use actix_web::{HttpRequest, HttpResponse, delete, get, web};
use uuid::Uuid;
#[get("{uuid}/channels/{channel_uuid}")]
pub async fn get(
@ -37,8 +37,7 @@ pub async fn get(
let channel = Channel::fetch_one(&mut conn, channel_uuid).await?;
data
.set_cache_key(format!("{}", channel_uuid), channel.clone(), 60)
data.set_cache_key(format!("{}", channel_uuid), channel.clone(), 60)
.await?;
Ok(HttpResponse::Ok().json(channel))

View file

@ -26,7 +26,7 @@ pub async fn echo(
// Get uuids from path
let (guild_uuid, channel_uuid) = path.into_inner();
let mut conn = data.pool.get().await.map_err(|e| crate::error::Error::from(e))?;
let mut conn = data.pool.get().await.map_err(crate::error::Error::from)?;
// Authorize client using auth header
let uuid = check_access_token(auth_header, &mut conn).await?;
@ -42,8 +42,7 @@ pub async fn echo(
} else {
channel = Channel::fetch_one(&mut conn, channel_uuid).await?;
data
.set_cache_key(format!("{}", channel_uuid), channel.clone(), 60)
data.set_cache_key(format!("{}", channel_uuid), channel.clone(), 60)
.await?;
}
@ -54,7 +53,11 @@ pub async fn echo(
// aggregate continuation frames up to 1MiB
.max_continuation_size(2_usize.pow(20));
let mut pubsub = data.cache_pool.get_async_pubsub().await.map_err(|e| crate::error::Error::from(e))?;
let mut pubsub = data
.cache_pool
.get_async_pubsub()
.await
.map_err(crate::error::Error::from)?;
let mut session_2 = session_1.clone();

View file

@ -1,8 +1,14 @@
use actix_web::{put, web, HttpRequest, HttpResponse};
use uuid::Uuid;
use actix_web::{HttpRequest, HttpResponse, put, web};
use futures_util::StreamExt as _;
use uuid::Uuid;
use crate::{error::Error, api::v1::auth::check_access_token, structs::{Guild, Member}, utils::get_auth_header, Data};
use crate::{
Data,
api::v1::auth::check_access_token,
error::Error,
structs::{Guild, Member},
utils::get_auth_header,
};
#[put("{uuid}/icon")]
pub async fn upload(
@ -30,7 +36,14 @@ pub async fn upload(
bytes.extend_from_slice(&item?);
}
guild.set_icon(&data.bunny_cdn, &mut conn, data.config.bunny.cdn_url.clone(), bytes).await?;
guild
.set_icon(
&data.bunny_cdn,
&mut conn,
data.config.bunny.cdn_url.clone(),
bytes,
)
.await?;
Ok(HttpResponse::Ok().finish())
}

View file

@ -3,9 +3,9 @@ use serde::Deserialize;
use uuid::Uuid;
use crate::{
error::Error,
Data,
api::v1::auth::check_access_token,
error::Error,
structs::{Guild, Member},
utils::get_auth_header,
};

View file

@ -2,14 +2,14 @@ use actix_web::{HttpRequest, HttpResponse, Scope, get, web};
use uuid::Uuid;
mod channels;
mod icon;
mod invites;
mod roles;
mod icon;
use crate::{
error::Error,
Data,
api::v1::auth::check_access_token,
error::Error,
structs::{Guild, Member},
utils::get_auth_header,
};

View file

@ -3,9 +3,9 @@ use actix_web::{HttpRequest, HttpResponse, get, post, web};
use serde::Deserialize;
use crate::{
error::Error,
Data,
api::v1::auth::check_access_token,
error::Error,
structs::{Member, Role},
utils::get_auth_header,
};
@ -43,8 +43,7 @@ pub async fn get(
let roles = Role::fetch_all(&mut conn, guild_uuid).await?;
data
.set_cache_key(format!("{}_roles", guild_uuid), roles.clone(), 1800)
data.set_cache_key(format!("{}_roles", guild_uuid), roles.clone(), 1800)
.await?;
Ok(HttpResponse::Ok().json(roles))

View file

@ -1,7 +1,7 @@
use crate::{
error::Error,
Data,
api::v1::auth::check_access_token,
error::Error,
structs::{Member, Role},
utils::get_auth_header,
};
@ -34,8 +34,7 @@ pub async fn get(
let role = Role::fetch_one(&mut conn, role_uuid).await?;
data
.set_cache_key(format!("{}", role_uuid), role.clone(), 60)
data.set_cache_key(format!("{}", role_uuid), role.clone(), 60)
.await?;
Ok(HttpResponse::Ok().json(role))

View file

@ -5,8 +5,8 @@ use diesel::QueryDsl;
use diesel_async::RunQueryDsl;
use serde::Serialize;
use crate::error::Error;
use crate::Data;
use crate::error::Error;
use crate::schema::users::dsl::{users, uuid};
const VERSION: Option<&'static str> = option_env!("CARGO_PKG_VERSION");

View file

@ -1,8 +1,10 @@
use actix_web::{get, patch, web, HttpRequest, HttpResponse};
use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
use actix_multipart::form::{MultipartForm, json::Json as MpJson, tempfile::TempFile};
use actix_web::{HttpRequest, HttpResponse, get, patch, web};
use serde::Deserialize;
use crate::{error::Error, structs::Me, api::v1::auth::check_access_token, utils::get_auth_header, Data};
use crate::{
Data, api::v1::auth::check_access_token, error::Error, structs::Me, utils::get_auth_header,
};
#[get("/me")]
pub async fn res(req: HttpRequest, data: web::Data<Data>) -> Result<HttpResponse, Error> {
@ -35,7 +37,11 @@ struct UploadForm {
}
#[patch("/me")]
pub async fn update(req: HttpRequest, MultipartForm(form): MultipartForm<UploadForm>, data: web::Data<Data>) -> Result<HttpResponse, Error> {
pub async fn update(
req: HttpRequest,
MultipartForm(form): MultipartForm<UploadForm>,
data: web::Data<Data>,
) -> Result<HttpResponse, Error> {
let headers = req.headers();
let auth_header = get_auth_header(headers)?;
@ -51,7 +57,13 @@ pub async fn update(req: HttpRequest, MultipartForm(form): MultipartForm<UploadF
let byte_slice: &[u8] = &bytes;
me.set_avatar(&data.bunny_cdn, &mut conn, data.config.bunny.cdn_url.clone(), byte_slice.into()).await?;
me.set_avatar(
&data.bunny_cdn,
&mut conn,
data.config.bunny.cdn_url.clone(),
byte_slice.into(),
)
.await?;
}
if let Some(new_info) = form.json {

View file

@ -1,6 +1,12 @@
use actix_web::{HttpRequest, HttpResponse, Scope, get, web};
use crate::{api::v1::auth::check_access_token, error::Error, structs::{StartAmountQuery, User}, utils::get_auth_header, Data};
use crate::{
Data,
api::v1::auth::check_access_token,
error::Error,
structs::{StartAmountQuery, User},
utils::get_auth_header,
};
mod me;
mod uuid;

View file

@ -1,8 +1,9 @@
use actix_web::{HttpRequest, HttpResponse, get, web};
use uuid::Uuid;
use crate::{error::Error, api::v1::auth::check_access_token, structs::User, utils::get_auth_header, Data};
use crate::{
Data, api::v1::auth::check_access_token, error::Error, structs::User, utils::get_auth_header,
};
#[get("/{uuid}")]
pub async fn res(
@ -28,8 +29,7 @@ pub async fn res(
let user = User::fetch_one(&mut conn, uuid).await?;
data
.set_cache_key(uuid.to_string(), user.clone(), 1800)
data.set_cache_key(uuid.to_string(), user.clone(), 1800)
.await?;
Ok(HttpResponse::Ok().json(user))

View file

@ -1,5 +1,5 @@
use bunny_api_tokio::edge_storage::Endpoint;
use crate::error::Error;
use bunny_api_tokio::edge_storage::Endpoint;
use log::debug;
use serde::Deserialize;
use tokio::fs::read_to_string;

View file

@ -1,16 +1,23 @@
use std::{io, time::SystemTimeError};
use actix_web::{error::{PayloadError, ResponseError}, http::{header::{ContentType, ToStrError}, StatusCode}, HttpResponse};
use actix_web::{
HttpResponse,
error::{PayloadError, ResponseError},
http::{
StatusCode,
header::{ContentType, ToStrError},
},
};
use deadpool::managed::{BuildError, PoolError};
use diesel::{ConnectionError, result::Error as DieselError};
use diesel_async::pooled_connection::PoolError as DieselPoolError;
use log::{debug, error};
use redis::RedisError;
use serde::Serialize;
use thiserror::Error;
use diesel::{result::Error as DieselError, ConnectionError};
use diesel_async::pooled_connection::PoolError as DieselPoolError;
use tokio::task::JoinError;
use serde_json::Error as JsonError;
use thiserror::Error;
use tokio::task::JoinError;
use toml::de::Error as TomlError;
use log::{debug, error};
#[derive(Debug, Error)]
pub enum Error {
@ -55,7 +62,7 @@ pub enum Error {
impl ResponseError for Error {
fn error_response(&self) -> HttpResponse {
debug!("{:?}", self);
error!("{}: {}", self.status_code(), self.to_string());
error!("{}: {}", self.status_code(), self);
HttpResponse::build(self.status_code())
.insert_header(ContentType::json())
@ -79,8 +86,6 @@ struct WebError {
impl WebError {
fn new(message: String) -> Self {
Self {
message,
}
Self { message }
}
}

View file

@ -2,24 +2,25 @@ use actix_cors::Cors;
use actix_web::{App, HttpServer, web};
use argon2::Argon2;
use clap::Parser;
use error::Error;
use simple_logger::SimpleLogger;
use diesel_async::pooled_connection::AsyncDieselConnectionManager;
use diesel_async::pooled_connection::deadpool::Pool;
use error::Error;
use simple_logger::SimpleLogger;
use std::time::SystemTime;
mod config;
use config::{Config, ConfigBuilder};
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness};
use diesel_migrations::{EmbeddedMigrations, MigrationHarness, embed_migrations};
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!();
type Conn = deadpool::managed::Object<AsyncDieselConnectionManager<diesel_async::AsyncPgConnection>>;
type Conn =
deadpool::managed::Object<AsyncDieselConnectionManager<diesel_async::AsyncPgConnection>>;
mod api;
pub mod error;
pub mod schema;
pub mod structs;
pub mod utils;
pub mod schema;
pub mod error;
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
@ -30,7 +31,10 @@ struct Args {
#[derive(Clone)]
pub struct Data {
pub pool: deadpool::managed::Pool<AsyncDieselConnectionManager<diesel_async::AsyncPgConnection>, Conn>,
pub pool: deadpool::managed::Pool<
AsyncDieselConnectionManager<diesel_async::AsyncPgConnection>,
Conn,
>,
pub cache_pool: redis::Client,
pub config: Config,
pub argon2: Argon2<'static>,
@ -53,27 +57,33 @@ async fn main() -> Result<(), Error> {
let web = config.web.clone();
// create a new connection pool with the default config
let pool_config = AsyncDieselConnectionManager::<diesel_async::AsyncPgConnection>::new(config.database.url());
let pool_config =
AsyncDieselConnectionManager::<diesel_async::AsyncPgConnection>::new(config.database.url());
let pool = Pool::builder(pool_config).build()?;
let cache_pool = redis::Client::open(config.cache_database.url())?;
let mut bunny_cdn = bunny_api_tokio::Client::new(config.bunny.api_key.clone()).await?;
bunny_cdn.storage.init(config.bunny.endpoint.clone(), config.bunny.storage_zone.clone())?;
bunny_cdn.storage.init(
config.bunny.endpoint.clone(),
config.bunny.storage_zone.clone(),
)?;
let database_url = config.database.url();
tokio::task::spawn_blocking(move || {
use diesel::prelude::Connection;
use diesel_async::async_connection_wrapper::AsyncConnectionWrapper;
use diesel::prelude::Connection;
use diesel_async::async_connection_wrapper::AsyncConnectionWrapper;
let mut conn = AsyncConnectionWrapper::<diesel_async::AsyncPgConnection>::establish(&database_url)?;
let mut conn =
AsyncConnectionWrapper::<diesel_async::AsyncPgConnection>::establish(&database_url)?;
conn.run_pending_migrations(MIGRATIONS)?;
Ok::<_, Box<dyn std::error::Error + Send + Sync>>(())
}).await?.unwrap();
})
.await?
.unwrap();
/*
**Stored for later possible use**

View file

@ -1,14 +1,20 @@
use diesel::{delete, insert_into, prelude::{Insertable, Queryable}, update, ExpressionMethods, QueryDsl, Selectable, SelectableHelper};
use actix_web::web::BytesMut;
use diesel::{
ExpressionMethods, QueryDsl, Selectable, SelectableHelper, delete, insert_into,
prelude::{Insertable, Queryable},
update,
};
use diesel_async::{RunQueryDsl, pooled_connection::AsyncDieselConnectionManager};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use diesel_async::{pooled_connection::AsyncDieselConnectionManager, RunQueryDsl};
use tokio::task;
use url::Url;
use actix_web::web::BytesMut;
use uuid::Uuid;
use crate::{error::Error, schema::*, utils::image_check, Conn, Data};
use crate::{Conn, Data, error::Error, schema::*, utils::image_check};
fn load_or_empty<T>(query_result: Result<Vec<T>, diesel::result::Error>) -> Result<Vec<T>, diesel::result::Error> {
fn load_or_empty<T>(
query_result: Result<Vec<T>, diesel::result::Error>,
) -> Result<Vec<T>, diesel::result::Error> {
match query_result {
Ok(vec) => Ok(vec),
Err(diesel::result::Error::NotFound) => Ok(Vec::new()),
@ -34,7 +40,7 @@ impl ChannelBuilder {
.filter(channel_uuid.eq(self.uuid))
.select(ChannelPermission::as_select())
.load(conn)
.await
.await,
)?;
Ok(Channel {
@ -66,7 +72,10 @@ pub struct ChannelPermission {
impl Channel {
pub async fn fetch_all(
pool: &deadpool::managed::Pool<AsyncDieselConnectionManager<diesel_async::AsyncPgConnection>, Conn>,
pool: &deadpool::managed::Pool<
AsyncDieselConnectionManager<diesel_async::AsyncPgConnection>,
Conn,
>,
guild_uuid: Uuid,
) -> Result<Vec<Self>, Error> {
let mut conn = pool.get().await?;
@ -77,21 +86,18 @@ impl Channel {
.filter(dsl::guild_uuid.eq(guild_uuid))
.select(ChannelBuilder::as_select())
.load(&mut conn)
.await
.await,
)?;
let channel_futures = channel_builders.iter().map(async move |c| {
let mut conn = pool.get().await?;
c.clone().build(&mut conn).await
});
futures::future::try_join_all(channel_futures).await
}
pub async fn fetch_one(
conn: &mut Conn,
channel_uuid: Uuid,
) -> Result<Self, Error> {
pub async fn fetch_one(conn: &mut Conn, channel_uuid: Uuid) -> Result<Self, Error> {
use channels::dsl;
let channel_builder: ChannelBuilder = dsl::channels
.filter(dsl::uuid.eq(channel_uuid))
@ -114,7 +120,7 @@ impl Channel {
let new_channel = ChannelBuilder {
uuid: channel_uuid,
guild_uuid: guild_uuid,
guild_uuid,
name: name.clone(),
description: description.clone(),
};
@ -133,11 +139,11 @@ impl Channel {
permissions: vec![],
};
data
.set_cache_key(channel_uuid.to_string(), channel.clone(), 1800)
data.set_cache_key(channel_uuid.to_string(), channel.clone(), 1800)
.await?;
data.del_cache_key(format!("{}_channels", guild_uuid)).await?;
data.del_cache_key(format!("{}_channels", guild_uuid))
.await?;
Ok(channel)
}
@ -166,7 +172,7 @@ impl Channel {
.limit(amount)
.offset(offset)
.load(conn)
.await
.await,
)?;
Ok(messages)
@ -257,8 +263,8 @@ impl GuildBuilder {
description: self.description,
icon: self.icon.and_then(|i| i.parse().ok()),
owner_uuid: self.owner_uuid,
roles: roles,
member_count: member_count,
roles,
member_count,
})
}
}
@ -287,7 +293,10 @@ impl Guild {
}
pub async fn fetch_amount(
pool: &deadpool::managed::Pool<AsyncDieselConnectionManager<diesel_async::AsyncPgConnection>, Conn>,
pool: &deadpool::managed::Pool<
AsyncDieselConnectionManager<diesel_async::AsyncPgConnection>,
Conn,
>,
offset: i64,
amount: i64,
) -> Result<Vec<Self>, Error> {
@ -302,7 +311,7 @@ impl Guild {
.offset(offset)
.limit(amount)
.load(&mut conn)
.await
.await,
)?;
// Process each guild concurrently
@ -368,7 +377,7 @@ impl Guild {
.filter(dsl::guild_uuid.eq(self.uuid))
.select(Invite::as_select())
.load(conn)
.await
.await,
)?;
Ok(invites)
@ -385,7 +394,7 @@ impl Guild {
if let Some(id) = custom_id {
invite_id = id;
if invite_id.len() > 32 {
return Err(Error::BadRequest("MAX LENGTH".to_string()))
return Err(Error::BadRequest("MAX LENGTH".to_string()));
}
} else {
let charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
@ -408,14 +417,18 @@ impl Guild {
}
// FIXME: Horrible security
pub async fn set_icon(&mut self, bunny_cdn: &bunny_api_tokio::Client, conn: &mut Conn, cdn_url: Url, icon: BytesMut) -> Result<(), Error> {
pub async fn set_icon(
&mut self,
bunny_cdn: &bunny_api_tokio::Client,
conn: &mut Conn,
cdn_url: Url,
icon: BytesMut,
) -> Result<(), Error> {
let icon_clone = icon.clone();
let image_type = task::spawn_blocking(move || image_check(icon_clone)).await??;
if let Some(icon) = &self.icon {
let relative_url = icon
.path()
.trim_start_matches('/');
let relative_url = icon.path().trim_start_matches('/');
bunny_cdn.storage.delete(relative_url).await?;
}
@ -452,26 +465,20 @@ pub struct Role {
}
impl Role {
pub async fn fetch_all(
conn: &mut Conn,
guild_uuid: Uuid,
) -> Result<Vec<Self>, Error> {
pub async fn fetch_all(conn: &mut Conn, guild_uuid: Uuid) -> Result<Vec<Self>, Error> {
use roles::dsl;
let roles: Vec<Role> = load_or_empty(
dsl::roles
.filter(dsl::guild_uuid.eq(guild_uuid))
.select(Role::as_select())
.load(conn)
.await
.await,
)?;
Ok(roles)
}
pub async fn fetch_one(
conn: &mut Conn,
role_uuid: Uuid,
) -> Result<Self, Error> {
pub async fn fetch_one(conn: &mut Conn, role_uuid: Uuid) -> Result<Self, Error> {
use roles::dsl;
let role: Role = dsl::roles
.filter(dsl::uuid.eq(role_uuid))
@ -482,11 +489,7 @@ impl Role {
Ok(role)
}
pub async fn new(
conn: &mut Conn,
guild_uuid: Uuid,
name: String,
) -> Result<Self, Error> {
pub async fn new(conn: &mut Conn, guild_uuid: Uuid, name: String) -> Result<Self, Error> {
let role_uuid = Uuid::now_v7();
let role = Role {
@ -534,22 +537,18 @@ impl Member {
user_uuid: Uuid,
guild_uuid: Uuid,
) -> Result<Self, Error> {
use guild_members::dsl;
let member: Member = dsl::guild_members
.filter(dsl::user_uuid.eq(user_uuid))
.filter(dsl::guild_uuid.eq(guild_uuid))
.select(Member::as_select())
.get_result(conn)
.await?;
use guild_members::dsl;
let member: Member = dsl::guild_members
.filter(dsl::user_uuid.eq(user_uuid))
.filter(dsl::guild_uuid.eq(guild_uuid))
.select(Member::as_select())
.get_result(conn)
.await?;
Ok(member)
}
pub async fn new(
conn: &mut Conn,
user_uuid: Uuid,
guild_uuid: Uuid,
) -> Result<Self, Error> {
pub async fn new(conn: &mut Conn, user_uuid: Uuid, guild_uuid: Uuid) -> Result<Self, Error> {
let member_uuid = Uuid::now_v7();
let member = Member {
@ -629,7 +628,11 @@ impl User {
Ok(user)
}
pub async fn fetch_amount(conn: &mut Conn, offset: i64, amount: i64) -> Result<Vec<Self>, Error> {
pub async fn fetch_amount(
conn: &mut Conn,
offset: i64,
amount: i64,
) -> Result<Vec<Self>, Error> {
use users::dsl;
let users: Vec<User> = load_or_empty(
dsl::users
@ -637,7 +640,7 @@ impl User {
.offset(offset)
.select(User::as_select())
.load(conn)
.await
.await,
)?;
Ok(users)
@ -668,23 +671,30 @@ impl Me {
Ok(me)
}
pub async fn set_avatar(&mut self, bunny_cdn: &bunny_api_tokio::Client, conn: &mut Conn, cdn_url: Url, avatar: BytesMut) -> Result<(), Error> {
pub async fn set_avatar(
&mut self,
bunny_cdn: &bunny_api_tokio::Client,
conn: &mut Conn,
cdn_url: Url,
avatar: BytesMut,
) -> Result<(), Error> {
let avatar_clone = avatar.clone();
let image_type = task::spawn_blocking(move || image_check(avatar_clone)).await??;
if let Some(avatar) = &self.avatar {
let avatar_url: Url = avatar.parse()?;
let relative_url = avatar_url
.path()
.trim_start_matches('/');
let relative_url = avatar_url.path().trim_start_matches('/');
bunny_cdn.storage.delete(relative_url).await?;
}
let path = format!("avatar/{}/avatar.{}", self.uuid, image_type);
bunny_cdn.storage.upload(path.clone(), avatar.into()).await?;
bunny_cdn
.storage
.upload(path.clone(), avatar.into())
.await?;
let avatar_url = cdn_url.join(&path)?;

View file

@ -1,6 +1,7 @@
use actix_web::{
cookie::{time::Duration, Cookie, SameSite},
http::header::HeaderMap, web::BytesMut,
cookie::{Cookie, SameSite, time::Duration},
http::header::HeaderMap,
web::BytesMut,
};
use bindet::FileType;
use getrandom::fill;
@ -8,29 +9,35 @@ use hex::encode;
use redis::RedisError;
use serde::Serialize;
use crate::{error::Error, Data};
use crate::{Data, error::Error};
pub fn get_auth_header(headers: &HeaderMap) -> Result<&str, Error> {
let auth_token = headers.get(actix_web::http::header::AUTHORIZATION);
if auth_token.is_none() {
return Err(Error::Unauthorized("No authorization header provided".to_string()));
return Err(Error::Unauthorized(
"No authorization header provided".to_string(),
));
}
let auth_raw = auth_token.unwrap().to_str()?;
let mut auth = auth_raw.split_whitespace();
let auth_type = auth.nth(0);
let auth_type = auth.next();
let auth_value = auth.nth(0);
let auth_value = auth.next();
if auth_type.is_none() {
return Err(Error::BadRequest("Authorization header is empty".to_string()));
return Err(Error::BadRequest(
"Authorization header is empty".to_string(),
));
} else if auth_type.is_some_and(|at| at != "Bearer") {
return Err(Error::BadRequest("Only token auth is supported".to_string()));
return Err(Error::BadRequest(
"Only token auth is supported".to_string(),
));
}
if auth_value.is_none() {
return Err(Error::BadRequest("No token provided".to_string()));
}
@ -67,13 +74,15 @@ pub fn image_check(icon: BytesMut) -> Result<String, Error> {
if let Ok(Some(file_type)) = detect {
if file_type.likely_to_be == vec![FileType::Jpg] {
return Ok(String::from("jpg"))
return Ok(String::from("jpg"));
} else if file_type.likely_to_be == vec![FileType::Png] {
return Ok(String::from("png"))
return Ok(String::from("png"));
}
}
Err(Error::BadRequest("Uploaded file is not an image".to_string()))
Err(Error::BadRequest(
"Uploaded file is not an image".to_string(),
))
}
impl Data {