diff --git a/.woodpecker/build-and-publish.yml b/.woodpecker/build-and-publish.yml index c0f367a..57f2761 100644 --- a/.woodpecker/build-and-publish.yml +++ b/.woodpecker/build-and-publish.yml @@ -3,6 +3,21 @@ when: branch: main steps: + - name: build-x86_64 + image: rust:bookworm + commands: + - cargo build --release + - name: build-arm64 + image: rust:bookworm + commands: + - dpkg --add-architecture arm64 + - apt-get update -y && apt-get install -y crossbuild-essential-arm64 libssl-dev:arm64 + - rustup target add aarch64-unknown-linux-gnu + - cargo build --target aarch64-unknown-linux-gnu --release + environment: + CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc + PKG_CONFIG_ALLOW_CROSS: 1 + PKG_CONFIG_PATH: /usr/aarch64-linux-gnu/lib/pkgconfig - name: container-build-and-publish image: docker commands: diff --git a/.woodpecker/publish-docs.yml b/.woodpecker/publish-docs.yml new file mode 100644 index 0000000..e6ce482 --- /dev/null +++ b/.woodpecker/publish-docs.yml @@ -0,0 +1,19 @@ +when: + - event: push + branch: main + +steps: + - name: build-docs + image: rust:bookworm + commands: + - cargo doc --release --no-deps + + - name: publish-docs + image: debian:12 + commands: + - apt update -y && apt install -y rsync openssh-client + - printf "Host *\n StrictHostKeyChecking no" >> /etc/ssh/ssh_config + - ssh-agent bash -c "ssh-add <(echo '$KEY' | base64 -d) && rsync --archive --verbose --compress --hard-links --delete-during --partial --progress ./target/doc/ root@gorb.app:/var/www/docs.gorb.app/api && ssh root@gorb.app systemctl reload caddy.service" + environment: + KEY: + from_secret: ssh_key diff --git a/Cargo.toml b/Cargo.toml index 33d01e7..1c5f34b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,6 +8,12 @@ strip = true lto = true codegen-units = 1 +# Speed up compilation to make dev bearable +[profile.dev] +debug = 0 +strip = "debuginfo" +codegen-units = 512 + [dependencies] actix-cors = "0.7.1" actix-web = "4.11" @@ -21,16 +27,25 @@ regex = "1.11" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" simple_logger = "5.0.0" -sqlx = { version = "0.8", features = ["runtime-tokio", "tls-native-tls", "postgres"] } -redis = { version = "0.31.0", features= ["tokio-comp"] } -tokio-tungstenite = { version = "0.26", features = ["native-tls", "url"] } +redis = { version = "0.32", features= ["tokio-comp"] } +tokio-tungstenite = { version = "0.27", features = ["native-tls", "url"] } toml = "0.8" url = { version = "2.5", features = ["serde"] } -uuid = { version = "1.16", features = ["serde", "v7"] } +uuid = { version = "1.17", features = ["serde", "v7"] } random-string = "1.1" actix-ws = "0.3.0" futures-util = "0.3.31" +bunny-api-tokio = { version = "0.4", features = ["edge_storage"], default-features = false } +bindet = "0.3.2" +deadpool = "0.12" +diesel = { version = "2.2", features = ["uuid", "chrono"], default-features = false } +diesel-async = { version = "0.5", features = ["deadpool", "postgres", "async-connection-wrapper"] } +diesel_migrations = { version = "2.2.0", features = ["postgres"] } +thiserror = "2.0.12" +actix-multipart = "0.7.2" +lettre = { version = "0.11", features = ["tokio1", "tokio1-native-tls"] } +chrono = { version = "0.4.41", features = ["serde"] } [dependencies.tokio] -version = "1.44" +version = "1.45" features = ["full"] diff --git a/Dockerfile b/Dockerfile index d9a0389..25795a9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,17 @@ -FROM rust:bookworm AS builder +FROM --platform=linux/amd64 debian:12-slim AS prep WORKDIR /src -COPY . . - -RUN cargo build --release +COPY target/release/backend backend-amd64 +COPY target/aarch64-unknown-linux-gnu/release/backend backend-arm64 FROM debian:12-slim -RUN apt update && apt install libssl3 && rm -rf /var/lib/apt/lists/* /var/cache/apt/* /tmp/* +ARG TARGETARCH -COPY --from=builder /src/target/release/backend /usr/bin/gorb-backend +RUN apt update -y && apt install libssl3 ca-certificates -y && rm -rf /var/lib/apt/lists/* /var/cache/apt/* /tmp/* + +COPY --from=prep /src/backend-${TARGETARCH} /usr/bin/gorb-backend COPY entrypoint.sh /usr/bin/entrypoint.sh @@ -18,12 +19,23 @@ RUN useradd --create-home --home-dir /gorb gorb USER gorb -ENV DATABASE_USERNAME="gorb" \ -DATABASE_PASSWORD="gorb" \ -DATABASE="gorb" \ -DATABASE_HOST="database" \ -DATABASE_PORT="5432" \ -CACHE_DB_HOST="valkey" \ -CACHE_DB_PORT="6379" +ENV WEB_FRONTEND_URL=https://gorb.app/web/ \ +WEB_BASE_PATH=/api \ +DATABASE_USERNAME=gorb \ +DATABASE_PASSWORD=gorb \ +DATABASE=gorb \ +DATABASE_HOST=database \ +DATABASE_PORT=5432 \ +CACHE_DB_HOST=valkey \ +CACHE_DB_PORT=6379 \ +BUNNY_API_KEY=your_storage_zone_password_here \ +BUNNY_ENDPOINT=Frankfurt \ +BUNNY_ZONE=gorb \ +BUNNY_CDN_URL=https://cdn.gorb.app \ +MAIL_ADDRESS=noreply@gorb.app \ +MAIL_TLS=tls \ +SMTP_SERVER=mail.gorb.app \ +SMTP_USERNAME=your_smtp_username \ +SMTP_PASSWORD=your_smtp_password ENTRYPOINT ["/usr/bin/entrypoint.sh"] diff --git a/build.rs b/build.rs new file mode 100644 index 0000000..45de5ff --- /dev/null +++ b/build.rs @@ -0,0 +1,16 @@ +use std::process::Command; + +fn main() { + println!("cargo:rerun-if-changed=migrations"); + + let git_short_hash = Command::new("git") + .args(["rev-parse", "--short", "HEAD"]) + .output() + .ok() + .and_then(|o| String::from_utf8(o.stdout).ok()) + .map(|s| s.trim().to_string()) // Trim newline + .unwrap_or_else(|| "UNKNOWN".to_string()); + + // Tell Cargo to set `GIT_SHORT_HASH` for the main compilation + println!("cargo:rustc-env=GIT_SHORT_HASH={}", git_short_hash); +} diff --git a/compose.dev.yml b/compose.dev.yml index d064beb..93a1a85 100644 --- a/compose.dev.yml +++ b/compose.dev.yml @@ -18,11 +18,21 @@ services: - gorb-backend:/gorb environment: #- RUST_LOG=debug + - WEB_FRONTEND_URL=https://gorb.app/web/ - DATABASE_USERNAME=gorb - DATABASE_PASSWORD=gorb - DATABASE=gorb - DATABASE_HOST=database - DATABASE_PORT=5432 + - BUNNY_API_KEY=your_storage_zone_password_here + - BUNNY_ENDPOINT=Frankfurt + - BUNNY_ZONE=gorb + - BUNNY_CDN_URL=https://cdn.gorb.app + - MAIL_ADDRESS=Gorb + - MAIL_TLS=tls + - SMTP_SERVER=mail.gorb.app + - SMTP_USERNAME=your_smtp_username + - SMTP_PASSWORD=your_smtp_password database: image: postgres:16 restart: always diff --git a/compose.yml b/compose.yml index 84e6695..b1dc07d 100644 --- a/compose.yml +++ b/compose.yml @@ -16,11 +16,21 @@ services: - gorb-backend:/gorb environment: #- RUST_LOG=debug + - WEB_FRONTEND_URL=https://gorb.app/web/ - DATABASE_USERNAME=gorb - DATABASE_PASSWORD=gorb - DATABASE=gorb - DATABASE_HOST=database - DATABASE_PORT=5432 + - BUNNY_API_KEY=your_storage_zone_password_here + - BUNNY_ENDPOINT=Frankfurt + - BUNNY_ZONE=gorb + - BUNNY_CDN_URL=https://cdn.gorb.app + - MAIL_ADDRESS=Gorb + - MAIL_TLS=tls + - SMTP_SERVER=mail.gorb.app + - SMTP_USERNAME=your_smtp_username + - SMTP_PASSWORD=your_smtp_password database: image: postgres:16 restart: always diff --git a/diesel.toml b/diesel.toml new file mode 100644 index 0000000..a0d61bf --- /dev/null +++ b/diesel.toml @@ -0,0 +1,9 @@ +# For documentation on how to configure this file, +# see https://diesel.rs/guides/configuring-diesel-cli + +[print_schema] +file = "src/schema.rs" +custom_type_derives = ["diesel::query_builder::QueryId", "Clone"] + +[migrations_directory] +dir = "migrations" diff --git a/entrypoint.sh b/entrypoint.sh index a212f8e..38ba890 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -10,6 +10,10 @@ fi if [ ! -f "/gorb/config/config.toml" ]; then cat > /gorb/config/config.toml <&1 | tee /gorb/logs/backend.log diff --git a/migrations/.keep b/migrations/.keep new file mode 100644 index 0000000..e69de29 diff --git a/migrations/00000000000000_diesel_initial_setup/down.sql b/migrations/00000000000000_diesel_initial_setup/down.sql new file mode 100644 index 0000000..a9f5260 --- /dev/null +++ b/migrations/00000000000000_diesel_initial_setup/down.sql @@ -0,0 +1,6 @@ +-- This file was automatically created by Diesel to setup helper functions +-- and other internal bookkeeping. This file is safe to edit, any future +-- changes will be added to existing projects as new migrations. + +DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass); +DROP FUNCTION IF EXISTS diesel_set_updated_at(); diff --git a/migrations/00000000000000_diesel_initial_setup/up.sql b/migrations/00000000000000_diesel_initial_setup/up.sql new file mode 100644 index 0000000..d68895b --- /dev/null +++ b/migrations/00000000000000_diesel_initial_setup/up.sql @@ -0,0 +1,36 @@ +-- This file was automatically created by Diesel to setup helper functions +-- and other internal bookkeeping. This file is safe to edit, any future +-- changes will be added to existing projects as new migrations. + + + + +-- Sets up a trigger for the given table to automatically set a column called +-- `updated_at` whenever the row is modified (unless `updated_at` was included +-- in the modified columns) +-- +-- # Example +-- +-- ```sql +-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW()); +-- +-- SELECT diesel_manage_updated_at('users'); +-- ``` +CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$ +BEGIN + EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s + FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl); +END; +$$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$ +BEGIN + IF ( + NEW IS DISTINCT FROM OLD AND + NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at + ) THEN + NEW.updated_at := current_timestamp; + END IF; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; diff --git a/migrations/2025-05-21-192435_create_users/down.sql b/migrations/2025-05-21-192435_create_users/down.sql new file mode 100644 index 0000000..a54826f --- /dev/null +++ b/migrations/2025-05-21-192435_create_users/down.sql @@ -0,0 +1,4 @@ +-- This file should undo anything in `up.sql` +DROP INDEX idx_unique_username_active; +DROP INDEX idx_unique_email_active; +DROP TABLE users; diff --git a/migrations/2025-05-21-192435_create_users/up.sql b/migrations/2025-05-21-192435_create_users/up.sql new file mode 100644 index 0000000..0262507 --- /dev/null +++ b/migrations/2025-05-21-192435_create_users/up.sql @@ -0,0 +1,20 @@ +-- Your SQL goes here +CREATE TABLE users ( + uuid uuid PRIMARY KEY NOT NULL, + username varchar(32) NOT NULL, + display_name varchar(64) DEFAULT NULL, + password varchar(512) NOT NULL, + email varchar(100) NOT NULL, + email_verified boolean NOT NULL DEFAULT FALSE, + is_deleted boolean NOT NULL DEFAULT FALSE, + deleted_at int8 DEFAULT NULL, + CONSTRAINT unique_username_active UNIQUE NULLS NOT DISTINCT (username, is_deleted), + CONSTRAINT unique_email_active UNIQUE NULLS NOT DISTINCT (email, is_deleted) +); + +CREATE UNIQUE INDEX idx_unique_username_active +ON users(username) +WHERE is_deleted = FALSE; +CREATE UNIQUE INDEX idx_unique_email_active +ON users(email) +WHERE is_deleted = FALSE; diff --git a/migrations/2025-05-21-192936_create_instance_permissions/down.sql b/migrations/2025-05-21-192936_create_instance_permissions/down.sql new file mode 100644 index 0000000..c72fb0f --- /dev/null +++ b/migrations/2025-05-21-192936_create_instance_permissions/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE instance_permissions; diff --git a/migrations/2025-05-21-192936_create_instance_permissions/up.sql b/migrations/2025-05-21-192936_create_instance_permissions/up.sql new file mode 100644 index 0000000..f3dd755 --- /dev/null +++ b/migrations/2025-05-21-192936_create_instance_permissions/up.sql @@ -0,0 +1,5 @@ +-- Your SQL goes here +CREATE TABLE instance_permissions ( + uuid uuid PRIMARY KEY NOT NULL REFERENCES users(uuid), + administrator boolean NOT NULL DEFAULT FALSE +); diff --git a/migrations/2025-05-21-193321_create_tokens/down.sql b/migrations/2025-05-21-193321_create_tokens/down.sql new file mode 100644 index 0000000..4555fe6 --- /dev/null +++ b/migrations/2025-05-21-193321_create_tokens/down.sql @@ -0,0 +1,3 @@ +-- This file should undo anything in `up.sql` +DROP TABLE access_tokens; +DROP TABLE refresh_tokens; diff --git a/migrations/2025-05-21-193321_create_tokens/up.sql b/migrations/2025-05-21-193321_create_tokens/up.sql new file mode 100644 index 0000000..b3fb554 --- /dev/null +++ b/migrations/2025-05-21-193321_create_tokens/up.sql @@ -0,0 +1,13 @@ +-- Your SQL goes here +CREATE TABLE refresh_tokens ( + token varchar(64) PRIMARY KEY UNIQUE NOT NULL, + uuid uuid NOT NULL REFERENCES users(uuid), + created_at int8 NOT NULL, + device_name varchar(16) NOT NULL +); +CREATE TABLE access_tokens ( + token varchar(32) PRIMARY KEY UNIQUE NOT NULL, + refresh_token varchar(64) UNIQUE NOT NULL REFERENCES refresh_tokens(token) ON UPDATE CASCADE ON DELETE CASCADE, + uuid uuid NOT NULL REFERENCES users(uuid), + created_at int8 NOT NULL +); diff --git a/migrations/2025-05-21-193500_create_guilds/down.sql b/migrations/2025-05-21-193500_create_guilds/down.sql new file mode 100644 index 0000000..12ae87e --- /dev/null +++ b/migrations/2025-05-21-193500_create_guilds/down.sql @@ -0,0 +1,3 @@ +-- This file should undo anything in `up.sql` +DROP TABLE guild_members; +DROP TABLE guilds; diff --git a/migrations/2025-05-21-193500_create_guilds/up.sql b/migrations/2025-05-21-193500_create_guilds/up.sql new file mode 100644 index 0000000..268c597 --- /dev/null +++ b/migrations/2025-05-21-193500_create_guilds/up.sql @@ -0,0 +1,13 @@ +-- Your SQL goes here +CREATE TABLE guilds ( + uuid uuid PRIMARY KEY NOT NULL, + owner_uuid uuid NOT NULL REFERENCES users(uuid), + name VARCHAR(100) NOT NULL, + description VARCHAR(300) +); +CREATE TABLE guild_members ( + uuid uuid PRIMARY KEY NOT NULL, + guild_uuid uuid NOT NULL REFERENCES guilds(uuid) ON DELETE CASCADE, + user_uuid uuid NOT NULL REFERENCES users(uuid), + nickname VARCHAR(100) DEFAULT NULL +); diff --git a/migrations/2025-05-21-193620_create_roles/down.sql b/migrations/2025-05-21-193620_create_roles/down.sql new file mode 100644 index 0000000..f215a04 --- /dev/null +++ b/migrations/2025-05-21-193620_create_roles/down.sql @@ -0,0 +1,3 @@ +-- This file should undo anything in `up.sql` +DROP TABLE role_members; +DROP TABLE roles; diff --git a/migrations/2025-05-21-193620_create_roles/up.sql b/migrations/2025-05-21-193620_create_roles/up.sql new file mode 100644 index 0000000..55d051d --- /dev/null +++ b/migrations/2025-05-21-193620_create_roles/up.sql @@ -0,0 +1,15 @@ +-- Your SQL goes here +CREATE TABLE roles ( + uuid uuid UNIQUE NOT NULL, + guild_uuid uuid NOT NULL REFERENCES guilds(uuid) ON DELETE CASCADE, + name VARCHAR(50) NOT NULL, + color int NOT NULL DEFAULT 16777215, + position int NOT NULL, + permissions int8 NOT NULL DEFAULT 0, + PRIMARY KEY (uuid, guild_uuid) +); +CREATE TABLE role_members ( + role_uuid uuid NOT NULL REFERENCES roles(uuid) ON DELETE CASCADE, + member_uuid uuid NOT NULL REFERENCES guild_members(uuid) ON DELETE CASCADE, + PRIMARY KEY (role_uuid, member_uuid) +); diff --git a/migrations/2025-05-21-193745_create_channels/down.sql b/migrations/2025-05-21-193745_create_channels/down.sql new file mode 100644 index 0000000..6334604 --- /dev/null +++ b/migrations/2025-05-21-193745_create_channels/down.sql @@ -0,0 +1,3 @@ +-- This file should undo anything in `up.sql` +DROP TABLE channel_permissions; +DROP TABLE channels; diff --git a/migrations/2025-05-21-193745_create_channels/up.sql b/migrations/2025-05-21-193745_create_channels/up.sql new file mode 100644 index 0000000..2cce7f2 --- /dev/null +++ b/migrations/2025-05-21-193745_create_channels/up.sql @@ -0,0 +1,13 @@ +-- Your SQL goes here +CREATE TABLE channels ( + uuid uuid PRIMARY KEY NOT NULL, + guild_uuid uuid NOT NULL REFERENCES guilds(uuid) ON DELETE CASCADE, + name varchar(32) NOT NULL, + description varchar(500) NOT NULL +); +CREATE TABLE channel_permissions ( + channel_uuid uuid NOT NULL REFERENCES channels(uuid) ON DELETE CASCADE, + role_uuid uuid NOT NULL REFERENCES roles(uuid) ON DELETE CASCADE, + permissions int8 NOT NULL DEFAULT 0, + PRIMARY KEY (channel_uuid, role_uuid) +); diff --git a/migrations/2025-05-21-193954_create_messages/down.sql b/migrations/2025-05-21-193954_create_messages/down.sql new file mode 100644 index 0000000..bb9ce09 --- /dev/null +++ b/migrations/2025-05-21-193954_create_messages/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE messages; diff --git a/migrations/2025-05-21-193954_create_messages/up.sql b/migrations/2025-05-21-193954_create_messages/up.sql new file mode 100644 index 0000000..1510974 --- /dev/null +++ b/migrations/2025-05-21-193954_create_messages/up.sql @@ -0,0 +1,7 @@ +-- Your SQL goes here +CREATE TABLE messages ( + uuid uuid PRIMARY KEY NOT NULL, + channel_uuid uuid NOT NULL REFERENCES channels(uuid) ON DELETE CASCADE, + user_uuid uuid NOT NULL REFERENCES users(uuid), + message varchar(4000) NOT NULL +); diff --git a/migrations/2025-05-21-194207_create_invites/down.sql b/migrations/2025-05-21-194207_create_invites/down.sql new file mode 100644 index 0000000..03b72de --- /dev/null +++ b/migrations/2025-05-21-194207_create_invites/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE invites; diff --git a/migrations/2025-05-21-194207_create_invites/up.sql b/migrations/2025-05-21-194207_create_invites/up.sql new file mode 100644 index 0000000..795b39c --- /dev/null +++ b/migrations/2025-05-21-194207_create_invites/up.sql @@ -0,0 +1,6 @@ +-- Your SQL goes here +CREATE TABLE invites ( + id varchar(32) PRIMARY KEY NOT NULL, + guild_uuid uuid NOT NULL REFERENCES guilds(uuid) ON DELETE CASCADE, + user_uuid uuid NOT NULL REFERENCES users(uuid) +); diff --git a/migrations/2025-05-21-203022_channel_description_nullable/down.sql b/migrations/2025-05-21-203022_channel_description_nullable/down.sql new file mode 100644 index 0000000..73344b1 --- /dev/null +++ b/migrations/2025-05-21-203022_channel_description_nullable/down.sql @@ -0,0 +1,4 @@ +-- This file should undo anything in `up.sql` +UPDATE channels SET description = '' WHERE description IS NULL; +ALTER TABLE ONLY channels ALTER COLUMN description SET NOT NULL; +ALTER TABLE ONLY channels ALTER COLUMN description DROP DEFAULT; diff --git a/migrations/2025-05-21-203022_channel_description_nullable/up.sql b/migrations/2025-05-21-203022_channel_description_nullable/up.sql new file mode 100644 index 0000000..5ca6776 --- /dev/null +++ b/migrations/2025-05-21-203022_channel_description_nullable/up.sql @@ -0,0 +1,3 @@ +-- Your SQL goes here +ALTER TABLE ONLY channels ALTER COLUMN description DROP NOT NULL; +ALTER TABLE ONLY channels ALTER COLUMN description SET DEFAULT NULL; diff --git a/migrations/2025-05-23-112318_add_guild_icons/down.sql b/migrations/2025-05-23-112318_add_guild_icons/down.sql new file mode 100644 index 0000000..ac7eaf6 --- /dev/null +++ b/migrations/2025-05-23-112318_add_guild_icons/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +ALTER TABLE guilds DROP COLUMN icon; diff --git a/migrations/2025-05-23-112318_add_guild_icons/up.sql b/migrations/2025-05-23-112318_add_guild_icons/up.sql new file mode 100644 index 0000000..f698e39 --- /dev/null +++ b/migrations/2025-05-23-112318_add_guild_icons/up.sql @@ -0,0 +1,2 @@ +-- Your SQL goes here +ALTER TABLE guilds ADD COLUMN icon VARCHAR(100) DEFAULT NULL; diff --git a/migrations/2025-05-23-113933_add_user_avatars/down.sql b/migrations/2025-05-23-113933_add_user_avatars/down.sql new file mode 100644 index 0000000..89b6268 --- /dev/null +++ b/migrations/2025-05-23-113933_add_user_avatars/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +ALTER TABLE users DROP COLUMN avatar; diff --git a/migrations/2025-05-23-113933_add_user_avatars/up.sql b/migrations/2025-05-23-113933_add_user_avatars/up.sql new file mode 100644 index 0000000..6c6a6af --- /dev/null +++ b/migrations/2025-05-23-113933_add_user_avatars/up.sql @@ -0,0 +1,2 @@ +-- Your SQL goes here +ALTER TABLE users ADD COLUMN avatar varchar(100) DEFAULT NULL; diff --git a/migrations/2025-05-26-181536_add_channel_ordering/down.sql b/migrations/2025-05-26-181536_add_channel_ordering/down.sql new file mode 100644 index 0000000..0a70d35 --- /dev/null +++ b/migrations/2025-05-26-181536_add_channel_ordering/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +ALTER TABLE channels DROP COLUMN is_above; diff --git a/migrations/2025-05-26-181536_add_channel_ordering/up.sql b/migrations/2025-05-26-181536_add_channel_ordering/up.sql new file mode 100644 index 0000000..e18e5e2 --- /dev/null +++ b/migrations/2025-05-26-181536_add_channel_ordering/up.sql @@ -0,0 +1,2 @@ +-- Your SQL goes here +ALTER TABLE channels ADD COLUMN is_above UUID UNIQUE REFERENCES channels(uuid) DEFAULT NULL; diff --git a/migrations/2025-05-27-105059_redo_role_ordering/down.sql b/migrations/2025-05-27-105059_redo_role_ordering/down.sql new file mode 100644 index 0000000..6b38e1e --- /dev/null +++ b/migrations/2025-05-27-105059_redo_role_ordering/down.sql @@ -0,0 +1,3 @@ +-- This file should undo anything in `up.sql` +ALTER TABLE roles ADD COLUMN position int NOT NULL DEFAULT 0; +ALTER TABLE roles DROP COLUMN is_above; diff --git a/migrations/2025-05-27-105059_redo_role_ordering/up.sql b/migrations/2025-05-27-105059_redo_role_ordering/up.sql new file mode 100644 index 0000000..d426ab7 --- /dev/null +++ b/migrations/2025-05-27-105059_redo_role_ordering/up.sql @@ -0,0 +1,3 @@ +-- Your SQL goes here +ALTER TABLE roles DROP COLUMN position; +ALTER TABLE roles ADD COLUMN is_above UUID UNIQUE REFERENCES roles(uuid) DEFAULT NULL; diff --git a/migrations/2025-05-27-162114_create_email_tokens/down.sql b/migrations/2025-05-27-162114_create_email_tokens/down.sql new file mode 100644 index 0000000..f56c360 --- /dev/null +++ b/migrations/2025-05-27-162114_create_email_tokens/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE email_tokens; diff --git a/migrations/2025-05-27-162114_create_email_tokens/up.sql b/migrations/2025-05-27-162114_create_email_tokens/up.sql new file mode 100644 index 0000000..9761563 --- /dev/null +++ b/migrations/2025-05-27-162114_create_email_tokens/up.sql @@ -0,0 +1,7 @@ +-- Your SQL goes here +CREATE TABLE email_tokens ( + token VARCHAR(64) NOT NULL, + user_uuid uuid UNIQUE NOT NULL REFERENCES users(uuid), + created_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (token, user_uuid) +); diff --git a/migrations/2025-05-28-175918_create_password_reset_tokens/down.sql b/migrations/2025-05-28-175918_create_password_reset_tokens/down.sql new file mode 100644 index 0000000..dcccc77 --- /dev/null +++ b/migrations/2025-05-28-175918_create_password_reset_tokens/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE password_reset_tokens; diff --git a/migrations/2025-05-28-175918_create_password_reset_tokens/up.sql b/migrations/2025-05-28-175918_create_password_reset_tokens/up.sql new file mode 100644 index 0000000..f788b77 --- /dev/null +++ b/migrations/2025-05-28-175918_create_password_reset_tokens/up.sql @@ -0,0 +1,7 @@ +-- Your SQL goes here +CREATE TABLE password_reset_tokens ( + token VARCHAR(64) NOT NULL, + user_uuid uuid UNIQUE NOT NULL REFERENCES users(uuid), + created_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (token, user_uuid) +); diff --git a/migrations/2025-06-01-134036_add_pronouns_to_users/down.sql b/migrations/2025-06-01-134036_add_pronouns_to_users/down.sql new file mode 100644 index 0000000..32d891f --- /dev/null +++ b/migrations/2025-06-01-134036_add_pronouns_to_users/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +ALTER TABLE users DROP COLUMN pronouns; diff --git a/migrations/2025-06-01-134036_add_pronouns_to_users/up.sql b/migrations/2025-06-01-134036_add_pronouns_to_users/up.sql new file mode 100644 index 0000000..90807bb --- /dev/null +++ b/migrations/2025-06-01-134036_add_pronouns_to_users/up.sql @@ -0,0 +1,2 @@ +-- Your SQL goes here +ALTER TABLE users ADD COLUMN pronouns VARCHAR(32) DEFAULT NULL; diff --git a/migrations/2025-06-01-143713_add_about_to_users/down.sql b/migrations/2025-06-01-143713_add_about_to_users/down.sql new file mode 100644 index 0000000..de48d07 --- /dev/null +++ b/migrations/2025-06-01-143713_add_about_to_users/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +ALTER TABLE users DROP COLUMN about; \ No newline at end of file diff --git a/migrations/2025-06-01-143713_add_about_to_users/up.sql b/migrations/2025-06-01-143713_add_about_to_users/up.sql new file mode 100644 index 0000000..54b5449 --- /dev/null +++ b/migrations/2025-06-01-143713_add_about_to_users/up.sql @@ -0,0 +1,2 @@ +-- Your SQL goes here +ALTER TABLE users ADD COLUMN about VARCHAR(200) DEFAULT NULL; diff --git a/migrations/2025-06-03-103311_remove_email_tokens/down.sql b/migrations/2025-06-03-103311_remove_email_tokens/down.sql new file mode 100644 index 0000000..e8f0350 --- /dev/null +++ b/migrations/2025-06-03-103311_remove_email_tokens/down.sql @@ -0,0 +1,7 @@ +-- This file should undo anything in `up.sql` +CREATE TABLE email_tokens ( + token VARCHAR(64) NOT NULL, + user_uuid uuid UNIQUE NOT NULL REFERENCES users(uuid), + created_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (token, user_uuid) +); diff --git a/migrations/2025-06-03-103311_remove_email_tokens/up.sql b/migrations/2025-06-03-103311_remove_email_tokens/up.sql new file mode 100644 index 0000000..b41afe5 --- /dev/null +++ b/migrations/2025-06-03-103311_remove_email_tokens/up.sql @@ -0,0 +1,2 @@ +-- Your SQL goes here +DROP TABLE email_tokens; diff --git a/migrations/2025-06-03-110142_remove_password_reset_tokens/down.sql b/migrations/2025-06-03-110142_remove_password_reset_tokens/down.sql new file mode 100644 index 0000000..009d9e4 --- /dev/null +++ b/migrations/2025-06-03-110142_remove_password_reset_tokens/down.sql @@ -0,0 +1,7 @@ +-- This file should undo anything in `up.sql` +CREATE TABLE password_reset_tokens ( + token VARCHAR(64) NOT NULL, + user_uuid uuid UNIQUE NOT NULL REFERENCES users(uuid), + created_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY (token, user_uuid) +); diff --git a/migrations/2025-06-03-110142_remove_password_reset_tokens/up.sql b/migrations/2025-06-03-110142_remove_password_reset_tokens/up.sql new file mode 100644 index 0000000..181d7c5 --- /dev/null +++ b/migrations/2025-06-03-110142_remove_password_reset_tokens/up.sql @@ -0,0 +1,2 @@ +-- Your SQL goes here +DROP TABLE password_reset_tokens; diff --git a/migrations/2025-06-06-145916_guild_ownership_changes/down.sql b/migrations/2025-06-06-145916_guild_ownership_changes/down.sql new file mode 100644 index 0000000..21a08c9 --- /dev/null +++ b/migrations/2025-06-06-145916_guild_ownership_changes/down.sql @@ -0,0 +1,14 @@ +-- This file should undo anything in `up.sql` +ALTER TABLE guilds +ADD COLUMN owner_uuid UUID REFERENCES users(uuid); + +UPDATE guilds g +SET owner_uuid = gm.user_uuid +FROM guild_members gm +WHERE gm.guild_uuid = g.uuid AND gm.is_owner = TRUE; + +ALTER TABLE guilds +ALTER COLUMN owner_uuid SET NOT NULL; + +ALTER TABLE guild_members +DROP COLUMN is_owner; diff --git a/migrations/2025-06-06-145916_guild_ownership_changes/up.sql b/migrations/2025-06-06-145916_guild_ownership_changes/up.sql new file mode 100644 index 0000000..b94323f --- /dev/null +++ b/migrations/2025-06-06-145916_guild_ownership_changes/up.sql @@ -0,0 +1,14 @@ +-- Your SQL goes here +ALTER TABLE guild_members +ADD COLUMN is_owner BOOLEAN NOT NULL DEFAULT false; + +UPDATE guild_members gm +SET is_owner = true +FROM guilds g +WHERE gm.guild_uuid = g.uuid AND gm.user_uuid = g.owner_uuid; + +CREATE UNIQUE INDEX one_owner_per_guild ON guild_members (guild_uuid) +WHERE is_owner; + +ALTER TABLE guilds +DROP COLUMN owner_uuid; diff --git a/run-dev.sh b/run-dev.sh index 69067b8..242ca72 100755 --- a/run-dev.sh +++ b/run-dev.sh @@ -3,7 +3,7 @@ podman-compose --file compose.dev.yml up --build echo "SHUTTING DOWN CONTAINERS" -podman container stop backend_backend_1 backend_database_1 +podman container stop backend_backend_1 backend_database_1 backend_valkey_1 echo "DELETING CONTAINERS" -podman container rm backend_backend_1 backend_database_1 +podman container rm backend_backend_1 backend_database_1 backend_valkey_1 diff --git a/src/api/mod.rs b/src/api/mod.rs index 25391eb..6d83e02 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -1,9 +1,13 @@ +//! `/api` Contains the entire API + use actix_web::Scope; use actix_web::web; mod v1; mod versions; -pub fn web() -> Scope { - web::scope("/api").service(v1::web()).service(versions::res) +pub fn web(path: &str) -> Scope { + web::scope(path.trim_end_matches('/')) + .service(v1::web()) + .service(versions::get) } diff --git a/src/api/v1/auth/login.rs b/src/api/v1/auth/login.rs index 38d5449..ac6c1ad 100644 --- a/src/api/v1/auth/login.rs +++ b/src/api/v1/auth/login.rs @@ -1,14 +1,19 @@ use std::time::{SystemTime, UNIX_EPOCH}; -use actix_web::{Error, HttpResponse, post, web}; +use actix_web::{HttpResponse, post, web}; use argon2::{PasswordHash, PasswordVerifier}; -use log::error; +use diesel::{ExpressionMethods, QueryDsl, dsl::insert_into}; +use diesel_async::RunQueryDsl; use serde::Deserialize; use crate::{ Data, - api::v1::auth::{EMAIL_REGEX, PASSWORD_REGEX, USERNAME_REGEX}, - utils::{generate_access_token, generate_refresh_token, refresh_token_cookie}, + error::Error, + schema::*, + utils::{ + PASSWORD_REGEX, generate_token, new_refresh_token_cookie, + user_uuid_from_identifier, + }, }; use super::Response; @@ -29,146 +34,61 @@ pub async fn response( return Ok(HttpResponse::Forbidden().json(r#"{ "password_hashed": false }"#)); } - if EMAIL_REGEX.is_match(&login_information.username) { - let row = - sqlx::query_as("SELECT CAST(uuid as VARCHAR), password FROM users WHERE email = $1") - .bind(&login_information.username) - .fetch_one(&data.pool) - .await; + use users::dsl; - if let Err(error) = row { - if error.to_string() - == "no rows returned by a query that expected to return at least one row" - { - return Ok(HttpResponse::Unauthorized().finish()); - } + let mut conn = data.pool.get().await?; - error!("{}", error); - return Ok(HttpResponse::InternalServerError().json( - r#"{ "error": "Unhandled exception occured, contact the server administrator" }"#, - )); - } + let uuid = user_uuid_from_identifier(&mut conn, &login_information.username).await?; - let (uuid, password): (String, String) = row.unwrap(); + let database_password: String = dsl::users + .filter(dsl::uuid.eq(uuid)) + .select(dsl::password) + .get_result(&mut conn) + .await?; - return Ok(login( - data.clone(), - uuid, - login_information.password.clone(), - password, - login_information.device_name.clone(), - ) - .await); - } else if USERNAME_REGEX.is_match(&login_information.username) { - let row = - sqlx::query_as("SELECT CAST(uuid as VARCHAR), password FROM users WHERE username = $1") - .bind(&login_information.username) - .fetch_one(&data.pool) - .await; - - if let Err(error) = row { - if error.to_string() - == "no rows returned by a query that expected to return at least one row" - { - return Ok(HttpResponse::Unauthorized().finish()); - } - - error!("{}", error); - return Ok(HttpResponse::InternalServerError().json( - r#"{ "error": "Unhandled exception occured, contact the server administrator" }"#, - )); - } - - let (uuid, password): (String, String) = row.unwrap(); - - return Ok(login( - data.clone(), - uuid, - login_information.password.clone(), - password, - login_information.device_name.clone(), - ) - .await); - } - - Ok(HttpResponse::Unauthorized().finish()) -} - -async fn login( - data: actix_web::web::Data, - uuid: String, - request_password: String, - database_password: String, - device_name: String, -) -> HttpResponse { - let parsed_hash_raw = PasswordHash::new(&database_password); - - if let Err(error) = parsed_hash_raw { - error!("{}", error); - return HttpResponse::InternalServerError().finish(); - } - - let parsed_hash = parsed_hash_raw.unwrap(); + let parsed_hash = PasswordHash::new(&database_password) + .map_err(|e| Error::PasswordHashError(e.to_string()))?; if data .argon2 - .verify_password(request_password.as_bytes(), &parsed_hash) + .verify_password(login_information.password.as_bytes(), &parsed_hash) .is_err() { - return HttpResponse::Unauthorized().finish(); + return Err(Error::Unauthorized( + "Wrong username or password".to_string(), + )); } - let refresh_token_raw = generate_refresh_token(); - let access_token_raw = generate_access_token(); + let refresh_token = generate_token::<32>()?; + let access_token = generate_token::<16>()?; - if let Err(error) = refresh_token_raw { - error!("{}", error); - return HttpResponse::InternalServerError().finish(); - } + let current_time = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() as i64; - let refresh_token = refresh_token_raw.unwrap(); + use refresh_tokens::dsl as rdsl; - if let Err(error) = access_token_raw { - error!("{}", error); - return HttpResponse::InternalServerError().finish(); - } + insert_into(refresh_tokens::table) + .values(( + rdsl::token.eq(&refresh_token), + rdsl::uuid.eq(uuid), + rdsl::created_at.eq(current_time), + rdsl::device_name.eq(&login_information.device_name), + )) + .execute(&mut conn) + .await?; - let access_token = access_token_raw.unwrap(); + use access_tokens::dsl as adsl; - let current_time = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_secs() as i64; + insert_into(access_tokens::table) + .values(( + adsl::token.eq(&access_token), + adsl::refresh_token.eq(&refresh_token), + adsl::uuid.eq(uuid), + adsl::created_at.eq(current_time), + )) + .execute(&mut conn) + .await?; - if let Err(error) = sqlx::query(&format!( - "INSERT INTO refresh_tokens (token, uuid, created_at, device_name) VALUES ($1, '{}', $2, $3 )", - uuid - )) - .bind(&refresh_token) - .bind(current_time) - .bind(device_name) - .execute(&data.pool) - .await - { - error!("{}", error); - return HttpResponse::InternalServerError().finish(); - } - - if let Err(error) = sqlx::query(&format!( - "INSERT INTO access_tokens (token, refresh_token, uuid, created_at) VALUES ($1, $2, '{}', $3 )", - uuid - )) - .bind(&access_token) - .bind(&refresh_token) - .bind(current_time) - .execute(&data.pool) - .await - { - error!("{}", error); - return HttpResponse::InternalServerError().finish() - } - - HttpResponse::Ok() - .cookie(refresh_token_cookie(refresh_token)) - .json(Response { access_token }) + Ok(HttpResponse::Ok() + .cookie(new_refresh_token_cookie(&data.config, refresh_token)) + .json(Response { access_token })) } diff --git a/src/api/v1/auth/logout.rs b/src/api/v1/auth/logout.rs new file mode 100644 index 0000000..0f265bb --- /dev/null +++ b/src/api/v1/auth/logout.rs @@ -0,0 +1,38 @@ +use actix_web::{HttpRequest, HttpResponse, post, web}; +use diesel::{ExpressionMethods, delete}; +use diesel_async::RunQueryDsl; + +use crate::{ + Data, + error::Error, + schema::refresh_tokens::{self, dsl}, +}; + +/// `GET /api/v1/logout` +/// +/// requires auth: kinda, needs refresh token set but no access token is technically required +/// +/// ### Responses +/// 200 Logged out +/// 404 Refresh token is invalid +/// 401 Unauthorized (no refresh token found) +/// +#[post("/logout")] +pub async fn res(req: HttpRequest, data: web::Data) -> Result { + let mut refresh_token_cookie = req.cookie("refresh_token").ok_or(Error::Unauthorized( + "request has no refresh token".to_string(), + ))?; + + let refresh_token = String::from(refresh_token_cookie.value()); + + let mut conn = data.pool.get().await?; + + delete(refresh_tokens::table) + .filter(dsl::token.eq(refresh_token)) + .execute(&mut conn) + .await?; + + refresh_token_cookie.make_removal(); + + Ok(HttpResponse::Ok().cookie(refresh_token_cookie).finish()) +} diff --git a/src/api/v1/auth/mod.rs b/src/api/v1/auth/mod.rs index 326b2ef..75a6b0b 100644 --- a/src/api/v1/auth/mod.rs +++ b/src/api/v1/auth/mod.rs @@ -1,79 +1,60 @@ -use std::{ - str::FromStr, - sync::LazyLock, - time::{SystemTime, UNIX_EPOCH}, -}; +use std::time::{SystemTime, UNIX_EPOCH}; -use actix_web::{HttpResponse, Scope, web}; -use log::error; -use regex::Regex; +use actix_web::{Scope, web}; +use diesel::{ExpressionMethods, QueryDsl}; +use diesel_async::RunQueryDsl; use serde::Serialize; -use sqlx::Postgres; use uuid::Uuid; +use crate::{Conn, error::Error, schema::access_tokens::dsl}; + mod login; +mod logout; mod refresh; mod register; +mod reset_password; mod revoke; +mod verify_email; #[derive(Serialize)] struct Response { access_token: String, } -static EMAIL_REGEX: LazyLock = LazyLock::new(|| { - Regex::new(r"[-A-Za-z0-9!#$%&'*+/=?^_`{|}~]+(?:\.[-A-Za-z0-9!#$%&'*+/=?^_`{|}~]+)*@(?:[A-Za-z0-9](?:[-A-Za-z0-9]*[A-Za-z0-9])?\.)+[A-Za-z0-9](?:[-A-Za-z0-9]*[A-Za-z0-9])?").unwrap() -}); - -static USERNAME_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"^[a-z0-9_.-]+$").unwrap()); - -// Password is expected to be hashed using SHA3-384 -static PASSWORD_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"[0-9a-f]{96}").unwrap()); - pub fn web() -> Scope { web::scope("/auth") .service(register::res) .service(login::response) + .service(logout::res) .service(refresh::res) .service(revoke::res) + .service(verify_email::get) + .service(verify_email::post) + .service(reset_password::get) + .service(reset_password::post) } -pub async fn check_access_token( - access_token: &str, - pool: &sqlx::Pool, -) -> Result { - let row = sqlx::query_as( - "SELECT CAST(uuid as VARCHAR), created_at FROM access_tokens WHERE token = $1", - ) - .bind(access_token) - .fetch_one(pool) - .await; +pub async fn check_access_token(access_token: &str, conn: &mut Conn) -> Result { + let (uuid, created_at): (Uuid, i64) = dsl::access_tokens + .filter(dsl::token.eq(access_token)) + .select((dsl::uuid, dsl::created_at)) + .get_result(conn) + .await + .map_err(|error| { + if error == diesel::result::Error::NotFound { + Error::Unauthorized("Invalid access token".to_string()) + } else { + Error::from(error) + } + })?; - if let Err(error) = row { - if error.to_string() - == "no rows returned by a query that expected to return at least one row" - { - return Err(HttpResponse::Unauthorized().finish()); - } - - error!("{}", error); - return Err(HttpResponse::InternalServerError().json( - r#"{ "error": "Unhandled exception occured, contact the server administrator" }"#, - )); - } - - let (uuid, created_at): (String, i64) = row.unwrap(); - - let current_time = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_secs() as i64; + let current_time = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() as i64; let lifetime = current_time - created_at; if lifetime > 3600 { - return Err(HttpResponse::Unauthorized().finish()); + return Err(Error::Unauthorized("Invalid access token".to_string())); } - Ok(Uuid::from_str(&uuid).unwrap()) + Ok(uuid) } diff --git a/src/api/v1/auth/refresh.rs b/src/api/v1/auth/refresh.rs index cf1c4bb..1f4f406 100644 --- a/src/api/v1/auth/refresh.rs +++ b/src/api/v1/auth/refresh.rs @@ -1,49 +1,50 @@ -use actix_web::{Error, HttpRequest, HttpResponse, post, web}; +use actix_web::{HttpRequest, HttpResponse, post, web}; +use diesel::{ExpressionMethods, QueryDsl, delete, update}; +use diesel_async::RunQueryDsl; use log::error; use std::time::{SystemTime, UNIX_EPOCH}; use crate::{ Data, - utils::{generate_access_token, generate_refresh_token, refresh_token_cookie}, + error::Error, + schema::{ + access_tokens::{self, dsl}, + refresh_tokens::{self, dsl as rdsl}, + }, + utils::{generate_token, new_refresh_token_cookie}, }; use super::Response; #[post("/refresh")] pub async fn res(req: HttpRequest, data: web::Data) -> Result { - let recv_refresh_token_cookie = req.cookie("refresh_token"); + let mut refresh_token_cookie = req.cookie("refresh_token").ok_or(Error::Unauthorized( + "request has no refresh token".to_string(), + ))?; - if recv_refresh_token_cookie.is_none() { - return Ok(HttpResponse::Unauthorized().finish()); - } + let mut refresh_token = String::from(refresh_token_cookie.value()); - let mut refresh_token = String::from(recv_refresh_token_cookie.unwrap().value()); + let current_time = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() as i64; - let current_time = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_secs() as i64; + let mut conn = data.pool.get().await?; - if let Ok(row) = sqlx::query_scalar("SELECT created_at FROM refresh_tokens WHERE token = $1") - .bind(&refresh_token) - .fetch_one(&data.pool) + if let Ok(created_at) = rdsl::refresh_tokens + .filter(rdsl::token.eq(&refresh_token)) + .select(rdsl::created_at) + .get_result::(&mut conn) .await { - let created_at: i64 = row; - let lifetime = current_time - created_at; if lifetime > 2592000 { - if let Err(error) = sqlx::query("DELETE FROM refresh_tokens WHERE token = $1") - .bind(&refresh_token) - .execute(&data.pool) + if let Err(error) = delete(refresh_tokens::table) + .filter(rdsl::token.eq(&refresh_token)) + .execute(&mut conn) .await { error!("{}", error); } - let mut refresh_token_cookie = refresh_token_cookie(refresh_token); - refresh_token_cookie.make_removal(); return Ok(HttpResponse::Unauthorized() @@ -51,29 +52,19 @@ pub async fn res(req: HttpRequest, data: web::Data) -> Result 1987200 { - let new_refresh_token = generate_refresh_token(); + let new_refresh_token = generate_token::<32>()?; - if new_refresh_token.is_err() { - error!("{}", new_refresh_token.unwrap_err()); - return Ok(HttpResponse::InternalServerError().finish()); - } - - let new_refresh_token = new_refresh_token.unwrap(); - - match sqlx::query( - "UPDATE refresh_tokens SET token = $1, created_at = $2 WHERE token = $3", - ) - .bind(&new_refresh_token) - .bind(current_time) - .bind(&refresh_token) - .execute(&data.pool) - .await + match update(refresh_tokens::table) + .filter(rdsl::token.eq(&refresh_token)) + .set(( + rdsl::token.eq(&new_refresh_token), + rdsl::created_at.eq(current_time), + )) + .execute(&mut conn) + .await { Ok(_) => { refresh_token = new_refresh_token; @@ -84,35 +75,22 @@ pub async fn res(req: HttpRequest, data: web::Data) -> Result()?; - if access_token.is_err() { - error!("{}", access_token.unwrap_err()); - return Ok(HttpResponse::InternalServerError().finish()); - } - - let access_token = access_token.unwrap(); - - if let Err(error) = sqlx::query( - "UPDATE access_tokens SET token = $1, created_at = $2 WHERE refresh_token = $3", - ) - .bind(&access_token) - .bind(current_time) - .bind(&refresh_token) - .execute(&data.pool) - .await - { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()); - } + update(access_tokens::table) + .filter(dsl::refresh_token.eq(&refresh_token)) + .set(( + dsl::token.eq(&access_token), + dsl::created_at.eq(current_time), + )) + .execute(&mut conn) + .await?; return Ok(HttpResponse::Ok() - .cookie(refresh_token_cookie(refresh_token)) + .cookie(new_refresh_token_cookie(&data.config, refresh_token)) .json(Response { access_token })); } - let mut refresh_token_cookie = refresh_token_cookie(refresh_token); - refresh_token_cookie.make_removal(); Ok(HttpResponse::Unauthorized() diff --git a/src/api/v1/auth/register.rs b/src/api/v1/auth/register.rs index b4378ca..1d28088 100644 --- a/src/api/v1/auth/register.rs +++ b/src/api/v1/auth/register.rs @@ -1,19 +1,28 @@ use std::time::{SystemTime, UNIX_EPOCH}; -use actix_web::{Error, HttpResponse, post, web}; +use actix_web::{HttpResponse, post, web}; use argon2::{ PasswordHasher, password_hash::{SaltString, rand_core::OsRng}, }; -use log::error; +use diesel::{ExpressionMethods, dsl::insert_into}; +use diesel_async::RunQueryDsl; use serde::{Deserialize, Serialize}; use uuid::Uuid; use super::Response; use crate::{ Data, - api::v1::auth::{EMAIL_REGEX, PASSWORD_REGEX, USERNAME_REGEX}, - utils::{generate_access_token, generate_refresh_token, refresh_token_cookie}, + error::Error, + schema::{ + access_tokens::{self, dsl as adsl}, + refresh_tokens::{self, dsl as rdsl}, + users::{self, dsl as udsl}, + }, + utils::{ + EMAIL_REGEX, PASSWORD_REGEX, USERNAME_REGEX, generate_token, + new_refresh_token_cookie, + }, }; #[derive(Deserialize)] @@ -60,6 +69,12 @@ pub async fn res( account_information: web::Json, data: web::Data, ) -> Result { + if !data.config.instance.registration { + return Err(Error::Forbidden( + "registration is disabled on this instance".to_string(), + )); + } + let uuid = Uuid::now_v7(); if !EMAIL_REGEX.is_match(&account_information.email) { @@ -92,91 +107,47 @@ pub async fn res( .argon2 .hash_password(account_information.password.as_bytes(), &salt) { + let mut conn = data.pool.get().await?; + // TODO: Check security of this implementation - return Ok( - match sqlx::query(&format!( - "INSERT INTO users (uuid, username, password, email) VALUES ( '{}', $1, $2, $3 )", - uuid + insert_into(users::table) + .values(( + udsl::uuid.eq(uuid), + udsl::username.eq(&account_information.identifier), + udsl::password.eq(hashed_password.to_string()), + udsl::email.eq(&account_information.email), )) - .bind(&account_information.identifier) - .bind(hashed_password.to_string()) - .bind(&account_information.email) - .execute(&data.pool) - .await - { - Ok(_out) => { - let refresh_token = generate_refresh_token(); - let access_token = generate_access_token(); + .execute(&mut conn) + .await?; - if refresh_token.is_err() { - error!("{}", refresh_token.unwrap_err()); - return Ok(HttpResponse::InternalServerError().finish()); - } + let refresh_token = generate_token::<32>()?; + let access_token = generate_token::<16>()?; - let refresh_token = refresh_token.unwrap(); + let current_time = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() as i64; - if access_token.is_err() { - error!("{}", access_token.unwrap_err()); - return Ok(HttpResponse::InternalServerError().finish()); - } + insert_into(refresh_tokens::table) + .values(( + rdsl::token.eq(&refresh_token), + rdsl::uuid.eq(uuid), + rdsl::created_at.eq(current_time), + rdsl::device_name.eq(&account_information.device_name), + )) + .execute(&mut conn) + .await?; - let access_token = access_token.unwrap(); + insert_into(access_tokens::table) + .values(( + adsl::token.eq(&access_token), + adsl::refresh_token.eq(&refresh_token), + adsl::uuid.eq(uuid), + adsl::created_at.eq(current_time), + )) + .execute(&mut conn) + .await?; - let current_time = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_secs() as i64; - - if let Err(error) = sqlx::query(&format!("INSERT INTO refresh_tokens (token, uuid, created_at, device_name) VALUES ($1, '{}', $2, $3 )", uuid)) - .bind(&refresh_token) - .bind(current_time) - .bind(&account_information.device_name) - .execute(&data.pool) - .await { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()) - } - - if let Err(error) = sqlx::query(&format!("INSERT INTO access_tokens (token, refresh_token, uuid, created_at) VALUES ($1, $2, '{}', $3 )", uuid)) - .bind(&access_token) - .bind(&refresh_token) - .bind(current_time) - .execute(&data.pool) - .await { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()) - } - - HttpResponse::Ok() - .cookie(refresh_token_cookie(refresh_token)) - .json(Response { access_token }) - } - Err(error) => { - let err_msg = error.as_database_error().unwrap().message(); - - match err_msg { - err_msg - if err_msg.contains("unique") && err_msg.contains("username_key") => - { - HttpResponse::Forbidden().json(ResponseError { - gorb_id_available: false, - ..Default::default() - }) - } - err_msg if err_msg.contains("unique") && err_msg.contains("email_key") => { - HttpResponse::Forbidden().json(ResponseError { - email_available: false, - ..Default::default() - }) - } - _ => { - error!("{}", err_msg); - HttpResponse::InternalServerError().finish() - } - } - } - }, - ); + return Ok(HttpResponse::Ok() + .cookie(new_refresh_token_cookie(&data.config, refresh_token)) + .json(Response { access_token })); } Ok(HttpResponse::InternalServerError().finish()) diff --git a/src/api/v1/auth/reset_password.rs b/src/api/v1/auth/reset_password.rs new file mode 100644 index 0000000..444266c --- /dev/null +++ b/src/api/v1/auth/reset_password.rs @@ -0,0 +1,83 @@ +//! `/api/v1/auth/reset-password` Endpoints for resetting user password + +use actix_web::{HttpResponse, get, post, web}; +use chrono::{Duration, Utc}; +use serde::Deserialize; + +use crate::{Data, error::Error, objects::PasswordResetToken}; + +#[derive(Deserialize)] +struct Query { + identifier: String, +} + +/// `GET /api/v1/auth/reset-password` Sends password reset email to user +/// +/// requires auth? no +/// +/// ### Query Parameters +/// identifier: Email or username +/// +/// ### Responses +/// 200 Email sent +/// 429 Too Many Requests +/// 404 Not found +/// 400 Bad request +/// +#[get("/reset-password")] +pub async fn get(query: web::Query, data: web::Data) -> Result { + if let Ok(password_reset_token) = + PasswordResetToken::get_with_identifier(&data, query.identifier.clone()).await + { + if Utc::now().signed_duration_since(password_reset_token.created_at) > Duration::hours(1) { + password_reset_token.delete(&data).await?; + } else { + return Err(Error::TooManyRequests( + "Please allow 1 hour before sending a new email".to_string(), + )); + } + } + + PasswordResetToken::new(&data, query.identifier.clone()).await?; + + Ok(HttpResponse::Ok().finish()) +} + +#[derive(Deserialize)] +struct ResetPassword { + password: String, + token: String, +} + +/// `POST /api/v1/auth/reset-password` Resets user password +/// +/// requires auth? no +/// +/// ### Request Example: +/// ``` +/// json!({ +/// "password": "1608c17a27f6ae3891c23d680c73ae91528f20a54dcf4973e2c3126b9734f48b7253047f2395b51bb8a44a6daa188003", +/// "token": "a3f7e29c1b8d0456e2c9f83b7a1d6e4f5028c3b9a7e1f2d5c6b8a0d3e7f4a2b" +/// }); +/// ``` +/// +/// ### Responses +/// 200 Success +/// 410 Token Expired +/// 404 Not Found +/// 400 Bad Request +/// +#[post("/reset-password")] +pub async fn post( + reset_password: web::Json, + data: web::Data, +) -> Result { + let password_reset_token = + PasswordResetToken::get(&data, reset_password.token.clone()).await?; + + password_reset_token + .set_password(&data, reset_password.password.clone()) + .await?; + + Ok(HttpResponse::Ok().finish()) +} diff --git a/src/api/v1/auth/revoke.rs b/src/api/v1/auth/revoke.rs index a4f9196..2e95884 100644 --- a/src/api/v1/auth/revoke.rs +++ b/src/api/v1/auth/revoke.rs @@ -1,10 +1,17 @@ -use actix_web::{Error, HttpRequest, HttpResponse, post, web}; +use actix_web::{HttpRequest, HttpResponse, post, web}; use argon2::{PasswordHash, PasswordVerifier}; -use futures::future; -use log::error; -use serde::{Deserialize, Serialize}; +use diesel::{ExpressionMethods, QueryDsl, delete}; +use diesel_async::RunQueryDsl; +use serde::Deserialize; -use crate::{Data, api::v1::auth::check_access_token, utils::get_auth_header}; +use crate::{ + Data, + api::v1::auth::check_access_token, + error::Error, + schema::refresh_tokens::{self, dsl as rdsl}, + schema::users::dsl as udsl, + utils::get_auth_header, +}; #[derive(Deserialize)] struct RevokeRequest { @@ -12,17 +19,6 @@ struct RevokeRequest { device_name: String, } -#[derive(Serialize)] -struct Response { - deleted: bool, -} - -impl Response { - fn new(deleted: bool) -> Self { - Self { deleted } - } -} - // TODO: Should maybe be a delete request? #[post("/revoke")] pub async fn res( @@ -32,85 +28,36 @@ pub async fn res( ) -> Result { let headers = req.headers(); - let auth_header = get_auth_header(headers); + let auth_header = get_auth_header(headers)?; - if let Err(error) = auth_header { - return Ok(error); - } + let mut conn = data.pool.get().await?; - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; + let uuid = check_access_token(auth_header, &mut conn).await?; - if let Err(error) = authorized { - return Ok(error); - } + let database_password: String = udsl::users + .filter(udsl::uuid.eq(uuid)) + .select(udsl::password) + .get_result(&mut conn) + .await?; - let uuid = authorized.unwrap(); - - let database_password_raw = sqlx::query_scalar(&format!( - "SELECT password FROM users WHERE uuid = '{}'", - uuid - )) - .fetch_one(&data.pool) - .await; - - if let Err(error) = database_password_raw { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().json(Response::new(false))); - } - - let database_password: String = database_password_raw.unwrap(); - - let hashed_password_raw = PasswordHash::new(&database_password); - - if let Err(error) = hashed_password_raw { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().json(Response::new(false))); - } - - let hashed_password = hashed_password_raw.unwrap(); + let hashed_password = PasswordHash::new(&database_password) + .map_err(|e| Error::PasswordHashError(e.to_string()))?; if data .argon2 .verify_password(revoke_request.password.as_bytes(), &hashed_password) .is_err() { - return Ok(HttpResponse::Unauthorized().finish()); + return Err(Error::Unauthorized( + "Wrong username or password".to_string(), + )); } - let tokens_raw = sqlx::query_scalar(&format!( - "SELECT token FROM refresh_tokens WHERE uuid = '{}' AND device_name = $1", - uuid - )) - .bind(&revoke_request.device_name) - .fetch_all(&data.pool) - .await; + delete(refresh_tokens::table) + .filter(rdsl::uuid.eq(uuid)) + .filter(rdsl::device_name.eq(&revoke_request.device_name)) + .execute(&mut conn) + .await?; - if tokens_raw.is_err() { - error!("{:?}", tokens_raw); - return Ok(HttpResponse::InternalServerError().json(Response::new(false))); - } - - let tokens: Vec = tokens_raw.unwrap(); - - let mut refresh_tokens_delete = vec![]; - - for token in tokens { - refresh_tokens_delete.push( - sqlx::query("DELETE FROM refresh_tokens WHERE token = $1") - .bind(token.clone()) - .execute(&data.pool), - ); - } - - let results = future::join_all(refresh_tokens_delete).await; - - let errors: Vec<&Result> = - results.iter().filter(|r| r.is_err()).collect(); - - if !errors.is_empty() { - error!("{:?}", errors); - return Ok(HttpResponse::InternalServerError().finish()); - } - - Ok(HttpResponse::Ok().json(Response::new(true))) + Ok(HttpResponse::Ok().finish()) } diff --git a/src/api/v1/auth/verify_email.rs b/src/api/v1/auth/verify_email.rs new file mode 100644 index 0000000..e596500 --- /dev/null +++ b/src/api/v1/auth/verify_email.rs @@ -0,0 +1,101 @@ +//! `/api/v1/auth/verify-email` Endpoints for verifying user emails + +use actix_web::{HttpRequest, HttpResponse, get, post, web}; +use chrono::{Duration, Utc}; +use serde::Deserialize; + +use crate::{ + Data, + api::v1::auth::check_access_token, + error::Error, + objects::{EmailToken, Me}, + utils::get_auth_header, +}; + +#[derive(Deserialize)] +struct Query { + token: String, +} + +/// `GET /api/v1/auth/verify-email` Verifies user email address +/// +/// requires auth? yes +/// +/// ### Query Parameters +/// token +/// +/// ### Responses +/// 200 Success +/// 410 Token Expired +/// 404 Not Found +/// 401 Unauthorized +/// +#[get("/verify-email")] +pub async fn get( + req: HttpRequest, + query: web::Query, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + let me = Me::get(&mut conn, uuid).await?; + + let email_token = EmailToken::get(&data, me.uuid).await?; + + if query.token != email_token.token { + return Ok(HttpResponse::Unauthorized().finish()); + } + + me.verify_email(&mut conn).await?; + + email_token.delete(&data).await?; + + Ok(HttpResponse::Ok().finish()) +} + +/// `POST /api/v1/auth/verify-email` Sends user verification email +/// +/// requires auth? yes +/// +/// ### Responses +/// 200 Email sent +/// 204 Already verified +/// 429 Too Many Requests +/// 401 Unauthorized +/// +#[post("/verify-email")] +pub async fn post(req: HttpRequest, data: web::Data) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + let me = Me::get(&mut conn, uuid).await?; + + if me.email_verified { + return Ok(HttpResponse::NoContent().finish()); + } + + if let Ok(email_token) = EmailToken::get(&data, me.uuid).await { + if Utc::now().signed_duration_since(email_token.created_at) > Duration::hours(1) { + email_token.delete(&data).await?; + } else { + return Err(Error::TooManyRequests( + "Please allow 1 hour before sending a new email".to_string(), + )); + } + } + + EmailToken::new(&data, me).await?; + + Ok(HttpResponse::Ok().finish()) +} diff --git a/src/api/v1/channels/mod.rs b/src/api/v1/channels/mod.rs new file mode 100644 index 0000000..e9558c9 --- /dev/null +++ b/src/api/v1/channels/mod.rs @@ -0,0 +1,12 @@ +use actix_web::{Scope, web}; + +mod uuid; + +pub fn web() -> Scope { + web::scope("/channels") + .service(uuid::get) + .service(uuid::delete) + .service(uuid::patch) + .service(uuid::messages::get) + .service(uuid::socket::ws) +} diff --git a/src/api/v1/channels/uuid/messages.rs b/src/api/v1/channels/uuid/messages.rs new file mode 100644 index 0000000..9fdea0b --- /dev/null +++ b/src/api/v1/channels/uuid/messages.rs @@ -0,0 +1,78 @@ +//! `/api/v1/channels/{uuid}/messages` Endpoints related to channel messages + +use crate::{ + Data, + api::v1::auth::check_access_token, + error::Error, + objects::{Channel, Member}, + utils::{get_auth_header, global_checks}, +}; +use ::uuid::Uuid; +use actix_web::{HttpRequest, HttpResponse, get, web}; +use serde::Deserialize; + +#[derive(Deserialize)] +struct MessageRequest { + amount: i64, + offset: i64, +} + +/// `GET /api/v1/channels/{uuid}/messages` Returns user with the given UUID +/// +/// requires auth: yes +/// +/// requires relation: yes +/// +/// ### Request Example +/// ``` +/// json!({ +/// "amount": 100, +/// "offset": 0 +/// }) +/// ``` +/// +/// ### Response Example +/// ``` +/// json!({ +/// "uuid": "01971976-8618-74c0-b040-7ffbc44823f6", +/// "channel_uuid": "0196fcb1-e886-7de3-b685-0ee46def9a7b", +/// "user_uuid": "0196fc96-a822-76b0-b9bf-a9de232f54b7", +/// "message": "test", +/// "user": { +/// "uuid": "0196fc96-a822-76b0-b9bf-a9de232f54b7", +/// "username": "1234", +/// "display_name": null, +/// "avatar": "https://cdn.gorb.app/avatar/0196fc96-a822-76b0-b9bf-a9de232f54b7/avatar.jpg" +/// } +/// }); +/// ``` +/// +#[get("/{uuid}/messages")] +pub async fn get( + req: HttpRequest, + path: web::Path<(Uuid,)>, + message_request: web::Query, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let channel_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + let channel = Channel::fetch_one(&data, channel_uuid).await?; + + Member::check_membership(&mut conn, uuid, channel.guild_uuid).await?; + + let messages = channel + .fetch_messages(&data, message_request.amount, message_request.offset) + .await?; + + Ok(HttpResponse::Ok().json(messages)) +} diff --git a/src/api/v1/channels/uuid/mod.rs b/src/api/v1/channels/uuid/mod.rs new file mode 100644 index 0000000..bece6ed --- /dev/null +++ b/src/api/v1/channels/uuid/mod.rs @@ -0,0 +1,147 @@ +//! `/api/v1/channels/{uuid}` Channel specific endpoints + +pub mod messages; +pub mod socket; + +use crate::{ + api::v1::auth::check_access_token, error::Error, objects::{Channel, Member, Permissions}, utils::{get_auth_header, global_checks}, Data +}; +use actix_web::{HttpRequest, HttpResponse, delete, get, patch, web}; +use serde::Deserialize; +use uuid::Uuid; + +#[get("/{uuid}")] +pub async fn get( + req: HttpRequest, + path: web::Path<(Uuid,)>, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let channel_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + let channel = Channel::fetch_one(&data, channel_uuid).await?; + + Member::check_membership(&mut conn, uuid, channel.guild_uuid).await?; + + Ok(HttpResponse::Ok().json(channel)) +} + +#[delete("/{uuid}")] +pub async fn delete( + req: HttpRequest, + path: web::Path<(Uuid,)>, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let channel_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + let channel = Channel::fetch_one(&data, channel_uuid).await?; + + let member = Member::check_membership(&mut conn, uuid, channel.guild_uuid).await?; + + member.check_permission(&data, Permissions::DeleteChannel).await?; + + channel.delete(&data).await?; + + Ok(HttpResponse::Ok().finish()) +} + +#[derive(Deserialize)] +struct NewInfo { + name: Option, + description: Option, + is_above: Option, +} + +/// `PATCH /api/v1/channels/{uuid}` Returns user with the given UUID +/// +/// requires auth: yes +/// +/// requires relation: yes +/// +/// ### Request Example +/// All fields are optional and can be nulled/dropped if only changing 1 value +/// ``` +/// json!({ +/// "name": "gaming-chat", +/// "description": "Gaming related topics.", +/// "is_above": "398f6d7b-752c-4348-9771-fe6024adbfb1" +/// }); +/// ``` +/// +/// ### Response Example +/// ``` +/// json!({ +/// uuid: "cdcac171-5add-4f88-9559-3a247c8bba2c", +/// guild_uuid: "383d2afa-082f-4dd3-9050-ca6ed91487b6", +/// name: "gaming-chat", +/// description: "Gaming related topics.", +/// is_above: "398f6d7b-752c-4348-9771-fe6024adbfb1", +/// permissions: { +/// role_uuid: "79cc0806-0f37-4a06-a468-6639c4311a2d", +/// permissions: 0 +/// } +/// }); +/// ``` +/// NOTE: UUIDs in this response are made using `uuidgen`, UUIDs made by the actual backend will be UUIDv7 and have extractable timestamps +#[patch("/{uuid}")] +pub async fn patch( + req: HttpRequest, + path: web::Path<(Uuid,)>, + new_info: web::Json, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let channel_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + let mut channel = Channel::fetch_one(&data, channel_uuid).await?; + + let member = Member::check_membership(&mut conn, uuid, channel.guild_uuid).await?; + + member.check_permission(&data, Permissions::ManageChannel).await?; + + if let Some(new_name) = &new_info.name { + channel.set_name(&data, new_name.to_string()).await?; + } + + if let Some(new_description) = &new_info.description { + channel + .set_description(&data, new_description.to_string()) + .await?; + } + + if let Some(new_is_above) = &new_info.is_above { + channel + .set_description(&data, new_is_above.to_string()) + .await?; + } + + Ok(HttpResponse::Ok().json(channel)) +} diff --git a/src/api/v1/channels/uuid/socket.rs b/src/api/v1/channels/uuid/socket.rs new file mode 100644 index 0000000..b346e8e --- /dev/null +++ b/src/api/v1/channels/uuid/socket.rs @@ -0,0 +1,111 @@ +use actix_web::{ + Error, HttpRequest, HttpResponse, get, + http::header::{HeaderValue, SEC_WEBSOCKET_PROTOCOL}, + rt, web, +}; +use actix_ws::AggregatedMessage; +use futures_util::StreamExt as _; +use uuid::Uuid; + +use crate::{ + Data, + api::v1::auth::check_access_token, + objects::{Channel, Member}, + utils::{get_ws_protocol_header, global_checks}, +}; + +#[get("/{uuid}/socket")] +pub async fn ws( + req: HttpRequest, + path: web::Path<(Uuid,)>, + stream: web::Payload, + data: web::Data, +) -> Result { + // Get all headers + let headers = req.headers(); + + // Retrieve auth header + let auth_header = get_ws_protocol_header(headers)?; + + // Get uuid from path + let channel_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await.map_err(crate::error::Error::from)?; + + // Authorize client using auth header + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + let channel = Channel::fetch_one(&data, channel_uuid).await?; + + Member::check_membership(&mut conn, uuid, channel.guild_uuid).await?; + + let (mut res, mut session_1, stream) = actix_ws::handle(&req, stream)?; + + let mut stream = stream + .aggregate_continuations() + // aggregate continuation frames up to 1MiB + .max_continuation_size(2_usize.pow(20)); + + let mut pubsub = data + .cache_pool + .get_async_pubsub() + .await + .map_err(crate::error::Error::from)?; + + let mut session_2 = session_1.clone(); + + rt::spawn(async move { + pubsub.subscribe(channel_uuid.to_string()).await?; + while let Some(msg) = pubsub.on_message().next().await { + let payload: String = msg.get_payload()?; + session_1.text(payload).await?; + } + + Ok::<(), crate::error::Error>(()) + }); + + // start task but don't wait for it + rt::spawn(async move { + // receive messages from websocket + while let Some(msg) = stream.next().await { + match msg { + Ok(AggregatedMessage::Text(text)) => { + let mut conn = data.cache_pool.get_multiplexed_tokio_connection().await?; + + let message = channel.new_message(&data, uuid, text.to_string()).await?; + + redis::cmd("PUBLISH") + .arg(&[channel_uuid.to_string(), serde_json::to_string(&message)?]) + .exec_async(&mut conn) + .await?; + } + + Ok(AggregatedMessage::Binary(bin)) => { + // echo binary message + session_2.binary(bin).await?; + } + + Ok(AggregatedMessage::Ping(msg)) => { + // respond to PING frame with PONG frame + session_2.pong(&msg).await?; + } + + _ => {} + } + } + + Ok::<(), crate::error::Error>(()) + }); + + let headers = res.headers_mut(); + + headers.append( + SEC_WEBSOCKET_PROTOCOL, + HeaderValue::from_str("Authorization")?, + ); + + // respond immediately with response connected to WS session + Ok(res) +} diff --git a/src/api/v1/guilds/mod.rs b/src/api/v1/guilds/mod.rs new file mode 100644 index 0000000..ada5dc8 --- /dev/null +++ b/src/api/v1/guilds/mod.rs @@ -0,0 +1,139 @@ +//! `/api/v1/guilds` Guild related endpoints + +use actix_web::{HttpRequest, HttpResponse, Scope, get, post, web}; +use serde::Deserialize; + +mod uuid; + +use crate::{ + Data, + api::v1::auth::check_access_token, + error::Error, + objects::{Guild, StartAmountQuery}, + utils::{get_auth_header, global_checks}, +}; + +#[derive(Deserialize)] +struct GuildInfo { + name: String, +} + +pub fn web() -> Scope { + web::scope("/guilds") + .service(post) + .service(get) + .service(uuid::web()) +} + +/// `POST /api/v1/guilds` Creates a new guild +/// +/// requires auth: yes +/// +/// ### Request Example +/// ``` +/// json!({ +/// "name": "My new server!" +/// }); +/// ``` +/// +/// ### Response Example +/// ``` +/// json!({ +/// "uuid": "383d2afa-082f-4dd3-9050-ca6ed91487b6", +/// "name": "My new server!", +/// "description": null, +/// "icon": null, +/// "owner_uuid": "155d2291-fb23-46bd-a656-ae7c5d8218e6", +/// "roles": [], +/// "member_count": 1 +/// }); +/// ``` +/// NOTE: UUIDs in this response are made using `uuidgen`, UUIDs made by the actual backend will be UUIDv7 and have extractable timestamps +#[post("")] +pub async fn post( + req: HttpRequest, + guild_info: web::Json, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + let guild = Guild::new(&mut conn, guild_info.name.clone(), uuid).await?; + + Ok(HttpResponse::Ok().json(guild)) +} + +/// `GET /api/v1/servers` Fetches all guilds +/// +/// requires auth: yes +/// +/// requires admin: yes +/// +/// ### Response Example +/// ``` +/// json!([ +/// { +/// "uuid": "383d2afa-082f-4dd3-9050-ca6ed91487b6", +/// "name": "My new server!", +/// "description": null, +/// "icon": null, +/// "owner_uuid": "155d2291-fb23-46bd-a656-ae7c5d8218e6", +/// "roles": [], +/// "member_count": 1 +/// }, +/// { +/// "uuid": "5ba61ec7-5f97-43e1-89a5-d4693c155612", +/// "name": "My first server!", +/// "description": "This is a cool and nullable description!", +/// "icon": "https://nullable-url/path/to/icon.png", +/// "owner_uuid": "155d2291-fb23-46bd-a656-ae7c5d8218e6", +/// "roles": [ +/// { +/// "uuid": "be0e4da4-cf73-4f45-98f8-bb1c73d1ab8b", +/// "guild_uuid": "5ba61ec7-5f97-43e1-89a5-d4693c155612", +/// "name": "Cool people", +/// "color": 15650773, +/// "is_above": c7432f1c-f4ad-4ad3-8216-51388b6abb5b, +/// "permissions": 0 +/// } +/// { +/// "uuid": "c7432f1c-f4ad-4ad3-8216-51388b6abb5b", +/// "guild_uuid": "5ba61ec7-5f97-43e1-89a5-d4693c155612", +/// "name": "Equally cool people", +/// "color": 16777215, +/// "is_above": null, +/// "permissions": 0 +/// } +/// ], +/// "member_count": 20 +/// } +/// ]); +/// ``` +/// NOTE: UUIDs in this response are made using `uuidgen`, UUIDs made by the actual backend will be UUIDv7 and have extractable timestamps +#[get("")] +pub async fn get( + req: HttpRequest, + request_query: web::Query, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let start = request_query.start.unwrap_or(0); + + let amount = request_query.amount.unwrap_or(10); + + let uuid = check_access_token(auth_header, &mut data.pool.get().await?).await?; + + global_checks(&data, uuid).await?; + + let guilds = Guild::fetch_amount(&data.pool, start, amount).await?; + + Ok(HttpResponse::Ok().json(guilds)) +} diff --git a/src/api/v1/guilds/uuid/channels.rs b/src/api/v1/guilds/uuid/channels.rs new file mode 100644 index 0000000..db895e4 --- /dev/null +++ b/src/api/v1/guilds/uuid/channels.rs @@ -0,0 +1,86 @@ +use crate::{ + api::v1::auth::check_access_token, error::Error, objects::{Channel, Member, Permissions}, utils::{get_auth_header, global_checks, order_by_is_above}, Data +}; +use ::uuid::Uuid; +use actix_web::{HttpRequest, HttpResponse, get, post, web}; +use serde::Deserialize; + +#[derive(Deserialize)] +struct ChannelInfo { + name: String, + description: Option, +} + +#[get("{uuid}/channels")] +pub async fn get( + req: HttpRequest, + path: web::Path<(Uuid,)>, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let guild_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + Member::check_membership(&mut conn, uuid, guild_uuid).await?; + + if let Ok(cache_hit) = data.get_cache_key(format!("{}_channels", guild_uuid)).await { + return Ok(HttpResponse::Ok() + .content_type("application/json") + .body(cache_hit)); + } + + let channels = Channel::fetch_all(&data.pool, guild_uuid).await?; + + let channels_ordered = order_by_is_above(channels).await?; + + data.set_cache_key( + format!("{}_channels", guild_uuid), + channels_ordered.clone(), + 1800, + ) + .await?; + + Ok(HttpResponse::Ok().json(channels_ordered)) +} + +#[post("{uuid}/channels")] +pub async fn create( + req: HttpRequest, + channel_info: web::Json, + path: web::Path<(Uuid,)>, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let guild_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + let member = Member::check_membership(&mut conn, uuid, guild_uuid).await?; + + member.check_permission(&data, Permissions::CreateChannel).await?; + + let channel = Channel::new( + data.clone(), + guild_uuid, + channel_info.name.clone(), + channel_info.description.clone(), + ) + .await?; + + Ok(HttpResponse::Ok().json(channel)) +} diff --git a/src/api/v1/guilds/uuid/icon.rs b/src/api/v1/guilds/uuid/icon.rs new file mode 100644 index 0000000..0860435 --- /dev/null +++ b/src/api/v1/guilds/uuid/icon.rs @@ -0,0 +1,56 @@ +//! `/api/v1/guilds/{uuid}/icon` icon related endpoints, will probably be replaced by a multipart post to above endpoint + +use actix_web::{HttpRequest, HttpResponse, put, web}; +use futures_util::StreamExt as _; +use uuid::Uuid; + +use crate::{ + api::v1::auth::check_access_token, error::Error, objects::{Guild, Member, Permissions}, utils::{get_auth_header, global_checks}, Data +}; + +/// `PUT /api/v1/guilds/{uuid}/icon` Icon upload +/// +/// requires auth: no +/// +/// put request expects a file and nothing else +#[put("{uuid}/icon")] +pub async fn upload( + req: HttpRequest, + path: web::Path<(Uuid,)>, + mut payload: web::Payload, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let guild_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + let member = Member::check_membership(&mut conn, uuid, guild_uuid).await?; + + member.check_permission(&data, Permissions::ManageServer).await?; + + let mut guild = Guild::fetch_one(&mut conn, guild_uuid).await?; + + let mut bytes = web::BytesMut::new(); + while let Some(item) = payload.next().await { + bytes.extend_from_slice(&item?); + } + + guild + .set_icon( + &data.bunny_storage, + &mut conn, + data.config.bunny.cdn_url.clone(), + bytes, + ) + .await?; + + Ok(HttpResponse::Ok().finish()) +} diff --git a/src/api/v1/guilds/uuid/invites/id.rs b/src/api/v1/guilds/uuid/invites/id.rs new file mode 100644 index 0000000..e69de29 diff --git a/src/api/v1/guilds/uuid/invites/mod.rs b/src/api/v1/guilds/uuid/invites/mod.rs new file mode 100644 index 0000000..eb8d2ce --- /dev/null +++ b/src/api/v1/guilds/uuid/invites/mod.rs @@ -0,0 +1,69 @@ +use actix_web::{HttpRequest, HttpResponse, get, post, web}; +use serde::Deserialize; +use uuid::Uuid; + +use crate::{ + api::v1::auth::check_access_token, error::Error, objects::{Guild, Member, Permissions}, utils::{get_auth_header, global_checks}, Data +}; + +#[derive(Deserialize)] +struct InviteRequest { + custom_id: Option, +} + +#[get("{uuid}/invites")] +pub async fn get( + req: HttpRequest, + path: web::Path<(Uuid,)>, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let guild_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + Member::check_membership(&mut conn, uuid, guild_uuid).await?; + + let guild = Guild::fetch_one(&mut conn, guild_uuid).await?; + + let invites = guild.get_invites(&mut conn).await?; + + Ok(HttpResponse::Ok().json(invites)) +} + +#[post("{uuid}/invites")] +pub async fn create( + req: HttpRequest, + path: web::Path<(Uuid,)>, + invite_request: web::Json, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let guild_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + let member = Member::check_membership(&mut conn, uuid, guild_uuid).await?; + + member.check_permission(&data, Permissions::CreateInvite).await?; + + let guild = Guild::fetch_one(&mut conn, guild_uuid).await?; + + let invite = guild.create_invite(&mut conn, uuid, invite_request.custom_id.clone()).await?; + + Ok(HttpResponse::Ok().json(invite)) +} diff --git a/src/api/v1/guilds/uuid/members.rs b/src/api/v1/guilds/uuid/members.rs new file mode 100644 index 0000000..972d862 --- /dev/null +++ b/src/api/v1/guilds/uuid/members.rs @@ -0,0 +1,34 @@ +use crate::{ + Data, + api::v1::auth::check_access_token, + error::Error, + objects::Member, + utils::{get_auth_header, global_checks}, +}; +use ::uuid::Uuid; +use actix_web::{HttpRequest, HttpResponse, get, web}; + +#[get("{uuid}/members")] +pub async fn get( + req: HttpRequest, + path: web::Path<(Uuid,)>, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let guild_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + Member::check_membership(&mut conn, uuid, guild_uuid).await?; + + let members = Member::fetch_all(&data, guild_uuid).await?; + + Ok(HttpResponse::Ok().json(members)) +} diff --git a/src/api/v1/guilds/uuid/mod.rs b/src/api/v1/guilds/uuid/mod.rs new file mode 100644 index 0000000..4c88d7a --- /dev/null +++ b/src/api/v1/guilds/uuid/mod.rs @@ -0,0 +1,96 @@ +//! `/api/v1/guilds/{uuid}` Specific server endpoints + +use actix_web::{HttpRequest, HttpResponse, Scope, get, web}; +use uuid::Uuid; + +mod channels; +mod icon; +mod invites; +mod members; +mod roles; + +use crate::{ + Data, + api::v1::auth::check_access_token, + error::Error, + objects::{Guild, Member}, + utils::{get_auth_header, global_checks}, +}; + +pub fn web() -> Scope { + web::scope("") + // Servers + .service(get) + // Channels + .service(channels::get) + .service(channels::create) + // Roles + .service(roles::get) + .service(roles::create) + .service(roles::uuid::get) + // Invites + .service(invites::get) + .service(invites::create) + // Icon + .service(icon::upload) + // Members + .service(members::get) +} + +/// `GET /api/v1/guilds/{uuid}` DESCRIPTION +/// +/// requires auth: yes +/// +/// ### Response Example +/// ``` +/// json!({ +/// "uuid": "5ba61ec7-5f97-43e1-89a5-d4693c155612", +/// "name": "My first server!", +/// "description": "This is a cool and nullable description!", +/// "icon": "https://nullable-url/path/to/icon.png", +/// "owner_uuid": "155d2291-fb23-46bd-a656-ae7c5d8218e6", +/// "roles": [ +/// { +/// "uuid": "be0e4da4-cf73-4f45-98f8-bb1c73d1ab8b", +/// "guild_uuid": "5ba61ec7-5f97-43e1-89a5-d4693c155612", +/// "name": "Cool people", +/// "color": 15650773, +/// "is_above": c7432f1c-f4ad-4ad3-8216-51388b6abb5b, +/// "permissions": 0 +/// } +/// { +/// "uuid": "c7432f1c-f4ad-4ad3-8216-51388b6abb5b", +/// "guild_uuid": "5ba61ec7-5f97-43e1-89a5-d4693c155612", +/// "name": "Equally cool people", +/// "color": 16777215, +/// "is_above": null, +/// "permissions": 0 +/// } +/// ], +/// "member_count": 20 +/// }); +/// ``` +#[get("/{uuid}")] +pub async fn get( + req: HttpRequest, + path: web::Path<(Uuid,)>, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let guild_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + Member::check_membership(&mut conn, uuid, guild_uuid).await?; + + let guild = Guild::fetch_one(&mut conn, guild_uuid).await?; + + Ok(HttpResponse::Ok().json(guild)) +} diff --git a/src/api/v1/guilds/uuid/roles/mod.rs b/src/api/v1/guilds/uuid/roles/mod.rs new file mode 100644 index 0000000..c33f144 --- /dev/null +++ b/src/api/v1/guilds/uuid/roles/mod.rs @@ -0,0 +1,76 @@ +use ::uuid::Uuid; +use actix_web::{HttpRequest, HttpResponse, get, post, web}; +use serde::Deserialize; + +use crate::{ + api::v1::auth::check_access_token, error::Error, objects::{Member, Permissions, Role}, utils::{get_auth_header, global_checks, order_by_is_above}, Data +}; + +pub mod uuid; + +#[derive(Deserialize)] +struct RoleInfo { + name: String, +} + +#[get("{uuid}/roles")] +pub async fn get( + req: HttpRequest, + path: web::Path<(Uuid,)>, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let guild_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + Member::check_membership(&mut conn, uuid, guild_uuid).await?; + + if let Ok(cache_hit) = data.get_cache_key(format!("{}_roles", guild_uuid)).await { + return Ok(HttpResponse::Ok() + .content_type("application/json") + .body(cache_hit)); + } + + let roles = Role::fetch_all(&mut conn, guild_uuid).await?; + + let roles_ordered = order_by_is_above(roles).await?; + + data.set_cache_key(format!("{}_roles", guild_uuid), roles_ordered.clone(), 1800) + .await?; + + Ok(HttpResponse::Ok().json(roles_ordered)) +} + +#[post("{uuid}/roles")] +pub async fn create( + req: HttpRequest, + role_info: web::Json, + path: web::Path<(Uuid,)>, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let guild_uuid = path.into_inner().0; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + let member = Member::check_membership(&mut conn, uuid, guild_uuid).await?; + + member.check_permission(&data, Permissions::CreateRole).await?; + + let role = Role::new(&mut conn, guild_uuid, role_info.name.clone()).await?; + + Ok(HttpResponse::Ok().json(role)) +} diff --git a/src/api/v1/guilds/uuid/roles/uuid.rs b/src/api/v1/guilds/uuid/roles/uuid.rs new file mode 100644 index 0000000..f1a3206 --- /dev/null +++ b/src/api/v1/guilds/uuid/roles/uuid.rs @@ -0,0 +1,43 @@ +use crate::{ + Data, + api::v1::auth::check_access_token, + error::Error, + objects::{Member, Role}, + utils::{get_auth_header, global_checks}, +}; +use ::uuid::Uuid; +use actix_web::{HttpRequest, HttpResponse, get, web}; + +#[get("{uuid}/roles/{role_uuid}")] +pub async fn get( + req: HttpRequest, + path: web::Path<(Uuid, Uuid)>, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let (guild_uuid, role_uuid) = path.into_inner(); + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + Member::check_membership(&mut conn, uuid, guild_uuid).await?; + + if let Ok(cache_hit) = data.get_cache_key(format!("{}", role_uuid)).await { + return Ok(HttpResponse::Ok() + .content_type("application/json") + .body(cache_hit)); + } + + let role = Role::fetch_one(&mut conn, role_uuid).await?; + + data.set_cache_key(format!("{}", role_uuid), role.clone(), 60) + .await?; + + Ok(HttpResponse::Ok().json(role)) +} diff --git a/src/api/v1/invites/id.rs b/src/api/v1/invites/id.rs index 2adb8d8..22e2868 100644 --- a/src/api/v1/invites/id.rs +++ b/src/api/v1/invites/id.rs @@ -1,43 +1,22 @@ -use actix_web::{Error, HttpRequest, HttpResponse, get, post, web}; +use actix_web::{HttpRequest, HttpResponse, get, post, web}; use crate::{ Data, api::v1::auth::check_access_token, - structs::{Guild, Invite, Member}, - utils::get_auth_header, + error::Error, + objects::{Guild, Invite, Member}, + utils::{get_auth_header, global_checks}, }; #[get("{id}")] -pub async fn get( - req: HttpRequest, - path: web::Path<(String,)>, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } +pub async fn get(path: web::Path<(String,)>, data: web::Data) -> Result { + let mut conn = data.pool.get().await?; let invite_id = path.into_inner().0; - let result = Invite::fetch_one(&data.pool, invite_id).await; + let invite = Invite::fetch_one(&mut conn, invite_id).await?; - if let Err(error) = result { - return Ok(error); - } - - let invite = result.unwrap(); - - let guild_result = Guild::fetch_one(&data.pool, invite.guild_uuid).await; - - if let Err(error) = guild_result { - return Ok(error); - } - - let guild = guild_result.unwrap(); + let guild = Guild::fetch_one(&mut conn, invite.guild_uuid).await?; Ok(HttpResponse::Ok().json(guild)) } @@ -50,43 +29,21 @@ pub async fn join( ) -> Result { let headers = req.headers(); - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } + let auth_header = get_auth_header(headers)?; let invite_id = path.into_inner().0; - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; + let mut conn = data.pool.get().await?; - if let Err(error) = authorized { - return Ok(error); - } + let uuid = check_access_token(auth_header, &mut conn).await?; - let uuid = authorized.unwrap(); + global_checks(&data, uuid).await?; - let result = Invite::fetch_one(&data.pool, invite_id).await; + let invite = Invite::fetch_one(&mut conn, invite_id).await?; - if let Err(error) = result { - return Ok(error); - } + let guild = Guild::fetch_one(&mut conn, invite.guild_uuid).await?; - let invite = result.unwrap(); - - let guild_result = Guild::fetch_one(&data.pool, invite.guild_uuid).await; - - if let Err(error) = guild_result { - return Ok(error); - } - - let guild = guild_result.unwrap(); - - let member = Member::new(&data.pool, uuid, guild.uuid).await; - - if let Err(error) = member { - return Ok(error); - } + Member::new(&data, uuid, guild.uuid).await?; Ok(HttpResponse::Ok().json(guild)) } diff --git a/src/api/v1/me/friends/mod.rs b/src/api/v1/me/friends/mod.rs new file mode 100644 index 0000000..e69de29 diff --git a/src/api/v1/me/friends/pending.rs b/src/api/v1/me/friends/pending.rs new file mode 100644 index 0000000..e69de29 diff --git a/src/api/v1/me/guilds.rs b/src/api/v1/me/guilds.rs new file mode 100644 index 0000000..71cfca4 --- /dev/null +++ b/src/api/v1/me/guilds.rs @@ -0,0 +1,75 @@ +//! `/api/v1/me/guilds` Contains endpoint related to guild memberships + +use actix_web::{HttpRequest, HttpResponse, get, web}; + +use crate::{ + Data, + api::v1::auth::check_access_token, + error::Error, + objects::Me, + utils::{get_auth_header, global_checks}, +}; + +/// `GET /api/v1/me/guilds` Returns all guild memberships in a list +/// +/// requires auth: yes +/// +/// ### Example Response +/// ``` +/// json!([ +/// { +/// "uuid": "383d2afa-082f-4dd3-9050-ca6ed91487b6", +/// "name": "My new server!", +/// "description": null, +/// "icon": null, +/// "owner_uuid": "155d2291-fb23-46bd-a656-ae7c5d8218e6", +/// "roles": [], +/// "member_count": 1 +/// }, +/// { +/// "uuid": "5ba61ec7-5f97-43e1-89a5-d4693c155612", +/// "name": "My first server!", +/// "description": "This is a cool and nullable description!", +/// "icon": "https://nullable-url/path/to/icon.png", +/// "owner_uuid": "155d2291-fb23-46bd-a656-ae7c5d8218e6", +/// "roles": [ +/// { +/// "uuid": "be0e4da4-cf73-4f45-98f8-bb1c73d1ab8b", +/// "guild_uuid": "5ba61ec7-5f97-43e1-89a5-d4693c155612", +/// "name": "Cool people", +/// "color": 15650773, +/// "is_above": c7432f1c-f4ad-4ad3-8216-51388b6abb5b, +/// "permissions": 0 +/// } +/// { +/// "uuid": "c7432f1c-f4ad-4ad3-8216-51388b6abb5b", +/// "guild_uuid": "5ba61ec7-5f97-43e1-89a5-d4693c155612", +/// "name": "Equally cool people", +/// "color": 16777215, +/// "is_above": null, +/// "permissions": 0 +/// } +/// ], +/// "member_count": 20 +/// } +/// ]); +/// ``` +/// NOTE: UUIDs in this response are made using `uuidgen`, UUIDs made by the actual backend will be UUIDv7 and have extractable timestamps +#[get("/guilds")] +pub async fn get(req: HttpRequest, data: web::Data) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + global_checks(&data, uuid).await?; + + let me = Me::get(&mut conn, uuid).await?; + + let memberships = me.fetch_memberships(&mut conn).await?; + + Ok(HttpResponse::Ok().json(memberships)) +} diff --git a/src/api/v1/me/mod.rs b/src/api/v1/me/mod.rs new file mode 100644 index 0000000..da5c929 --- /dev/null +++ b/src/api/v1/me/mod.rs @@ -0,0 +1,104 @@ +use actix_multipart::form::{MultipartForm, json::Json as MpJson, tempfile::TempFile}; +use actix_web::{HttpRequest, HttpResponse, Scope, get, patch, web}; +use serde::Deserialize; + +use crate::{ + Data, + api::v1::auth::check_access_token, + error::Error, + objects::Me, + utils::{get_auth_header, global_checks}, +}; + +mod guilds; + +pub fn web() -> Scope { + web::scope("/me") + .service(get) + .service(update) + .service(guilds::get) +} + +#[get("")] +pub async fn get(req: HttpRequest, data: web::Data) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + let me = Me::get(&mut conn, uuid).await?; + + Ok(HttpResponse::Ok().json(me)) +} + +#[derive(Debug, Deserialize, Clone)] +struct NewInfo { + username: Option, + display_name: Option, + //password: Option, will probably be handled through a reset password link + email: Option, + pronouns: Option, + about: Option, +} + +#[derive(Debug, MultipartForm)] +struct UploadForm { + #[multipart(limit = "100MB")] + avatar: Option, + json: MpJson, +} + +#[patch("")] +pub async fn update( + req: HttpRequest, + MultipartForm(form): MultipartForm, + data: web::Data, +) -> Result { + let headers = req.headers(); + + let auth_header = get_auth_header(headers)?; + + let mut conn = data.pool.get().await?; + + let uuid = check_access_token(auth_header, &mut conn).await?; + + if form.avatar.is_some() || form.json.username.is_some() || form.json.display_name.is_some() { + global_checks(&data, uuid).await?; + } + + let mut me = Me::get(&mut conn, uuid).await?; + + if let Some(avatar) = form.avatar { + let bytes = tokio::fs::read(avatar.file).await?; + + let byte_slice: &[u8] = &bytes; + + me.set_avatar(&data, data.config.bunny.cdn_url.clone(), byte_slice.into()) + .await?; + } + + if let Some(username) = &form.json.username { + me.set_username(&data, username.clone()).await?; + } + + if let Some(display_name) = &form.json.display_name { + me.set_display_name(&data, display_name.clone()).await?; + } + + if let Some(email) = &form.json.email { + me.set_email(&data, email.clone()).await?; + } + + if let Some(pronouns) = &form.json.pronouns { + me.set_pronouns(&data, pronouns.clone()).await?; + } + + if let Some(about) = &form.json.about { + me.set_about(&data, about.clone()).await?; + } + + Ok(HttpResponse::Ok().finish()) +} diff --git a/src/api/v1/mod.rs b/src/api/v1/mod.rs index 749774d..6c2df0b 100644 --- a/src/api/v1/mod.rs +++ b/src/api/v1/mod.rs @@ -1,8 +1,12 @@ +//! `/api/v1` Contains version 1 of the api + use actix_web::{Scope, web}; mod auth; +mod channels; +mod guilds; mod invites; -mod servers; +mod me; mod stats; mod users; @@ -11,6 +15,8 @@ pub fn web() -> Scope { .service(stats::res) .service(auth::web()) .service(users::web()) - .service(servers::web()) + .service(channels::web()) + .service(guilds::web()) .service(invites::web()) + .service(me::web()) } diff --git a/src/api/v1/servers/mod.rs b/src/api/v1/servers/mod.rs deleted file mode 100644 index 7c74ff0..0000000 --- a/src/api/v1/servers/mod.rs +++ /dev/null @@ -1,90 +0,0 @@ -use actix_web::{get, post, web, Error, HttpRequest, HttpResponse, Scope}; -use serde::Deserialize; - -mod uuid; - -use crate::{api::v1::auth::check_access_token, structs::{Guild, StartAmountQuery}, utils::get_auth_header, Data}; - -#[derive(Deserialize)] -struct GuildInfo { - name: String, - description: Option, -} - -pub fn web() -> Scope { - web::scope("/servers") - .service(create) - .service(get) - .service(uuid::web()) -} - -#[post("")] -pub async fn create( - req: HttpRequest, - guild_info: web::Json, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let uuid = authorized.unwrap(); - - let guild = Guild::new( - &data.pool, - guild_info.name.clone(), - guild_info.description.clone(), - uuid, - ) - .await; - - if let Err(error) = guild { - return Ok(error); - } - - Ok(HttpResponse::Ok().json(guild.unwrap())) -} - -#[get("")] -pub async fn get( - req: HttpRequest, - request_query: web::Query, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - let start = request_query.start.unwrap_or(0); - - let amount = request_query.amount.unwrap_or(10); - - if let Err(error) = auth_header { - return Ok(error); - } - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let guilds = Guild::fetch_amount(&data.pool, start, amount).await; - - if let Err(error) = guilds { - return Ok(error); - } - - Ok(HttpResponse::Ok().json(guilds.unwrap())) -} - diff --git a/src/api/v1/servers/uuid/channels/mod.rs b/src/api/v1/servers/uuid/channels/mod.rs deleted file mode 100644 index 3e6a342..0000000 --- a/src/api/v1/servers/uuid/channels/mod.rs +++ /dev/null @@ -1,124 +0,0 @@ -use crate::{ - Data, - api::v1::auth::check_access_token, - structs::{Channel, Member}, - utils::get_auth_header, -}; -use ::uuid::Uuid; -use actix_web::{Error, HttpRequest, HttpResponse, get, post, web}; -use log::error; -use serde::Deserialize; - -pub mod uuid; - -#[derive(Deserialize)] -struct ChannelInfo { - name: String, - description: Option, -} - -#[get("{uuid}/channels")] -pub async fn get( - req: HttpRequest, - path: web::Path<(Uuid,)>, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - let guild_uuid = path.into_inner().0; - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let uuid = authorized.unwrap(); - - let member = Member::fetch_one(&data.pool, uuid, guild_uuid).await; - - if let Err(error) = member { - return Ok(error); - } - - let cache_result = data.get_cache_key(format!("{}_channels", guild_uuid)).await; - - if let Ok(cache_hit) = cache_result { - return Ok(HttpResponse::Ok() - .content_type("application/json") - .body(cache_hit)); - } - - let channels_result = Channel::fetch_all(&data.pool, guild_uuid).await; - - if let Err(error) = channels_result { - return Ok(error); - } - - let channels = channels_result.unwrap(); - - let cache_result = data - .set_cache_key(format!("{}_channels", guild_uuid), channels.clone(), 1800) - .await; - - if let Err(error) = cache_result { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()); - } - - Ok(HttpResponse::Ok().json(channels)) -} - -#[post("{uuid}/channels")] -pub async fn create( - req: HttpRequest, - channel_info: web::Json, - path: web::Path<(Uuid,)>, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - let guild_uuid = path.into_inner().0; - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let uuid = authorized.unwrap(); - - let member = Member::fetch_one(&data.pool, uuid, guild_uuid).await; - - if let Err(error) = member { - return Ok(error); - } - - // FIXME: Logic to check permissions, should probably be done in utils.rs - - let channel = Channel::new( - data.clone(), - guild_uuid, - channel_info.name.clone(), - channel_info.description.clone(), - ) - .await; - - if let Err(error) = channel { - return Ok(error); - } - - Ok(HttpResponse::Ok().json(channel.unwrap())) -} diff --git a/src/api/v1/servers/uuid/channels/uuid/messages.rs b/src/api/v1/servers/uuid/channels/uuid/messages.rs deleted file mode 100644 index ff36a4f..0000000 --- a/src/api/v1/servers/uuid/channels/uuid/messages.rs +++ /dev/null @@ -1,83 +0,0 @@ -use crate::{ - Data, - api::v1::auth::check_access_token, - structs::{Channel, Member}, - utils::get_auth_header, -}; -use ::uuid::Uuid; -use actix_web::{Error, HttpRequest, HttpResponse, get, web}; -use log::error; -use serde::Deserialize; - -#[derive(Deserialize)] -struct MessageRequest { - amount: i64, - offset: i64, -} - -#[get("{uuid}/channels/{channel_uuid}/messages")] -pub async fn get( - req: HttpRequest, - path: web::Path<(Uuid, Uuid)>, - message_request: web::Query, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - let (guild_uuid, channel_uuid) = path.into_inner(); - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let uuid = authorized.unwrap(); - - let member = Member::fetch_one(&data.pool, uuid, guild_uuid).await; - - if let Err(error) = member { - return Ok(error); - } - - let cache_result = data.get_cache_key(format!("{}", channel_uuid)).await; - - let channel: Channel; - - if let Ok(cache_hit) = cache_result { - channel = serde_json::from_str(&cache_hit).unwrap() - } else { - let channel_result = Channel::fetch_one(&data.pool, guild_uuid, channel_uuid).await; - - if let Err(error) = channel_result { - return Ok(error); - } - - channel = channel_result.unwrap(); - - let cache_result = data - .set_cache_key(format!("{}", channel_uuid), channel.clone(), 60) - .await; - - if let Err(error) = cache_result { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()); - } - } - - let messages = channel - .fetch_messages(&data.pool, message_request.amount, message_request.offset) - .await; - - if let Err(error) = messages { - return Ok(error); - } - - Ok(HttpResponse::Ok().json(messages.unwrap())) -} diff --git a/src/api/v1/servers/uuid/channels/uuid/mod.rs b/src/api/v1/servers/uuid/channels/uuid/mod.rs deleted file mode 100644 index c737509..0000000 --- a/src/api/v1/servers/uuid/channels/uuid/mod.rs +++ /dev/null @@ -1,131 +0,0 @@ -pub mod messages; -pub mod socket; - -use crate::{ - Data, - api::v1::auth::check_access_token, - structs::{Channel, Member}, - utils::get_auth_header, -}; -use ::uuid::Uuid; -use actix_web::{Error, HttpRequest, HttpResponse, delete, get, web}; -use log::error; - -#[get("{uuid}/channels/{channel_uuid}")] -pub async fn get( - req: HttpRequest, - path: web::Path<(Uuid, Uuid)>, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - let (guild_uuid, channel_uuid) = path.into_inner(); - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let uuid = authorized.unwrap(); - - let member = Member::fetch_one(&data.pool, uuid, guild_uuid).await; - - if let Err(error) = member { - return Ok(error); - } - - let cache_result = data.get_cache_key(format!("{}", channel_uuid)).await; - - if let Ok(cache_hit) = cache_result { - return Ok(HttpResponse::Ok() - .content_type("application/json") - .body(cache_hit)); - } - - let channel_result = Channel::fetch_one(&data.pool, guild_uuid, channel_uuid).await; - - if let Err(error) = channel_result { - return Ok(error); - } - - let channel = channel_result.unwrap(); - - let cache_result = data - .set_cache_key(format!("{}", channel_uuid), channel.clone(), 60) - .await; - - if let Err(error) = cache_result { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()); - } - - Ok(HttpResponse::Ok().json(channel)) -} - -#[delete("{uuid}/channels/{channel_uuid}")] -pub async fn delete( - req: HttpRequest, - path: web::Path<(Uuid, Uuid)>, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - let (guild_uuid, channel_uuid) = path.into_inner(); - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let uuid = authorized.unwrap(); - - let member = Member::fetch_one(&data.pool, uuid, guild_uuid).await; - - if let Err(error) = member { - return Ok(error); - } - - let cache_result = data.get_cache_key(format!("{}", channel_uuid)).await; - - let channel: Channel; - - if let Ok(cache_hit) = cache_result { - channel = serde_json::from_str(&cache_hit).unwrap(); - - let result = data.del_cache_key(format!("{}", channel_uuid)).await; - - if let Err(error) = result { - error!("{}", error) - } - } else { - let channel_result = Channel::fetch_one(&data.pool, guild_uuid, channel_uuid).await; - - if let Err(error) = channel_result { - return Ok(error); - } - - channel = channel_result.unwrap(); - } - - let delete_result = channel.delete(&data.pool).await; - - if let Err(error) = delete_result { - return Ok(error); - } - - Ok(HttpResponse::Ok().finish()) -} diff --git a/src/api/v1/servers/uuid/channels/uuid/socket.rs b/src/api/v1/servers/uuid/channels/uuid/socket.rs deleted file mode 100644 index b9b4ff7..0000000 --- a/src/api/v1/servers/uuid/channels/uuid/socket.rs +++ /dev/null @@ -1,143 +0,0 @@ -use actix_web::{Error, HttpRequest, HttpResponse, get, rt, web}; -use actix_ws::AggregatedMessage; -use futures_util::StreamExt as _; -use log::error; -use uuid::Uuid; - -use crate::{ - Data, - api::v1::auth::check_access_token, - structs::{Channel, Member}, - utils::get_auth_header, -}; - -#[get("{uuid}/channels/{channel_uuid}/socket")] -pub async fn echo( - req: HttpRequest, - path: web::Path<(Uuid, Uuid)>, - stream: web::Payload, - data: web::Data, -) -> Result { - // Get all headers - let headers = req.headers(); - - // Retrieve auth header - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - // Get uuids from path - let (guild_uuid, channel_uuid) = path.into_inner(); - - // Authorize client using auth header - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - // Unwrap user uuid from authorization - let uuid = authorized.unwrap(); - - // Get server member from psql - let member = Member::fetch_one(&data.pool, uuid, guild_uuid).await; - - if let Err(error) = member { - return Ok(error); - } - - // Get cache for channel - let cache_result = data.get_cache_key(format!("{}", channel_uuid)).await; - - let channel: Channel; - - // Return channel cache or result from psql as `channel` variable - if let Ok(cache_hit) = cache_result { - channel = serde_json::from_str(&cache_hit).unwrap() - } else { - let channel_result = Channel::fetch_one(&data.pool, guild_uuid, channel_uuid).await; - - if let Err(error) = channel_result { - return Ok(error); - } - - channel = channel_result.unwrap(); - - let cache_result = data - .set_cache_key(format!("{}", channel_uuid), channel.clone(), 60) - .await; - - if let Err(error) = cache_result { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()); - } - } - - let (res, mut session_1, stream) = actix_ws::handle(&req, stream)?; - - let mut stream = stream - .aggregate_continuations() - // aggregate continuation frames up to 1MiB - .max_continuation_size(2_usize.pow(20)); - - let pubsub_result = data.cache_pool.get_async_pubsub().await; - - if let Err(error) = pubsub_result { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()); - } - - let mut session_2 = session_1.clone(); - - rt::spawn(async move { - let mut pubsub = pubsub_result.unwrap(); - pubsub.subscribe(channel_uuid.to_string()).await.unwrap(); - while let Some(msg) = pubsub.on_message().next().await { - let payload: String = msg.get_payload().unwrap(); - session_1.text(payload).await.unwrap(); - } - }); - - // start task but don't wait for it - rt::spawn(async move { - let mut conn = data - .cache_pool - .get_multiplexed_tokio_connection() - .await - .unwrap(); - // receive messages from websocket - while let Some(msg) = stream.next().await { - match msg { - Ok(AggregatedMessage::Text(text)) => { - // echo text message - redis::cmd("PUBLISH") - .arg(&[channel_uuid.to_string(), text.to_string()]) - .exec_async(&mut conn) - .await - .unwrap(); - channel - .new_message(&data.pool, uuid, text.to_string()) - .await - .unwrap(); - } - - Ok(AggregatedMessage::Binary(bin)) => { - // echo binary message - session_2.binary(bin).await.unwrap(); - } - - Ok(AggregatedMessage::Ping(msg)) => { - // respond to PING frame with PONG frame - session_2.pong(&msg).await.unwrap(); - } - - _ => {} - } - } - }); - - // respond immediately with response connected to WS session - Ok(res) -} diff --git a/src/api/v1/servers/uuid/invites/id.rs b/src/api/v1/servers/uuid/invites/id.rs deleted file mode 100644 index ffd2547..0000000 --- a/src/api/v1/servers/uuid/invites/id.rs +++ /dev/null @@ -1,43 +0,0 @@ -use actix_web::{delete, web, Error, HttpRequest, HttpResponse}; -use uuid::Uuid; - -use crate::{api::v1::auth::check_access_token, structs::{Invite, Member}, utils::get_auth_header, Data}; - -#[delete("{uuid}/invites/{id}")] -pub async fn delete(req: HttpRequest, path: web::Path<(Uuid, String)>, data: web::Data) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error) - } - - let (guild_uuid, invite_id) = path.into_inner(); - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error) - } - - let uuid = authorized.unwrap(); - - if let Err(error) = Member::fetch_one(&data.pool, uuid, guild_uuid).await { - return Ok(error) - } - - let result = Invite::fetch_one(&data.pool, invite_id).await; - - if let Err(error) = result { - return Ok(error) - } - - let invite = result.unwrap(); - - if let Err(error) = invite.delete(&data.pool).await { - return Ok(error) - } - - Ok(HttpResponse::Ok().finish()) -} diff --git a/src/api/v1/servers/uuid/invites/mod.rs b/src/api/v1/servers/uuid/invites/mod.rs deleted file mode 100644 index 459b237..0000000 --- a/src/api/v1/servers/uuid/invites/mod.rs +++ /dev/null @@ -1,116 +0,0 @@ -use actix_web::{Error, HttpRequest, HttpResponse, get, post, web}; -use serde::Deserialize; -use uuid::Uuid; - -mod id; - -use crate::{ - Data, - api::v1::auth::check_access_token, - structs::{Guild, Member}, - utils::get_auth_header, -}; - -#[derive(Deserialize)] -struct InviteRequest { - custom_id: String, -} - -#[get("{uuid}/invites")] -pub async fn get( - req: HttpRequest, - path: web::Path<(Uuid,)>, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - let guild_uuid = path.into_inner().0; - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let uuid = authorized.unwrap(); - - let member = Member::fetch_one(&data.pool, uuid, guild_uuid).await; - - if let Err(error) = member { - return Ok(error); - } - - let guild_result = Guild::fetch_one(&data.pool, guild_uuid).await; - - if let Err(error) = guild_result { - return Ok(error); - } - - let guild = guild_result.unwrap(); - - let invites = guild.get_invites(&data.pool).await; - - if let Err(error) = invites { - return Ok(error); - } - - Ok(HttpResponse::Ok().json(invites.unwrap())) -} - -#[post("{uuid}/invites")] -pub async fn create( - req: HttpRequest, - path: web::Path<(Uuid,)>, - invite_request: web::Json>, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - let guild_uuid = path.into_inner().0; - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let uuid = authorized.unwrap(); - - let member_result = Member::fetch_one(&data.pool, uuid, guild_uuid).await; - - if let Err(error) = member_result { - return Ok(error); - } - - let member = member_result.unwrap(); - - let guild_result = Guild::fetch_one(&data.pool, guild_uuid).await; - - if let Err(error) = guild_result { - return Ok(error); - } - - let guild = guild_result.unwrap(); - - let custom_id = invite_request.as_ref().map(|ir| ir.custom_id.clone()); - - let invite = guild.create_invite(&data.pool, &member, custom_id).await; - - if let Err(error) = invite { - return Ok(error); - } - - Ok(HttpResponse::Ok().json(invite.unwrap())) -} diff --git a/src/api/v1/servers/uuid/mod.rs b/src/api/v1/servers/uuid/mod.rs deleted file mode 100644 index 8f387aa..0000000 --- a/src/api/v1/servers/uuid/mod.rs +++ /dev/null @@ -1,72 +0,0 @@ -use actix_web::{Error, HttpRequest, HttpResponse, Scope, get, web}; -use uuid::Uuid; - -mod channels; -mod invites; -mod roles; - -use crate::{ - Data, - api::v1::auth::check_access_token, - structs::{Guild, Member}, - utils::get_auth_header, -}; - -pub fn web() -> Scope { - web::scope("") - // Servers - .service(res) - // Channels - .service(channels::get) - .service(channels::create) - .service(channels::uuid::get) - .service(channels::uuid::delete) - .service(channels::uuid::messages::get) - .service(channels::uuid::socket::echo) - // Roles - .service(roles::get) - .service(roles::create) - .service(roles::uuid::get) - // Invites - .service(invites::get) - .service(invites::create) -} - -#[get("/{uuid}")] -pub async fn res( - req: HttpRequest, - path: web::Path<(Uuid,)>, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - let guild_uuid = path.into_inner().0; - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let uuid = authorized.unwrap(); - - let member = Member::fetch_one(&data.pool, uuid, guild_uuid).await; - - if let Err(error) = member { - return Ok(error); - } - - let guild = Guild::fetch_one(&data.pool, guild_uuid).await; - - if let Err(error) = guild { - return Ok(error); - } - - Ok(HttpResponse::Ok().json(guild.unwrap())) -} diff --git a/src/api/v1/servers/uuid/roles/mod.rs b/src/api/v1/servers/uuid/roles/mod.rs deleted file mode 100644 index 8d22813..0000000 --- a/src/api/v1/servers/uuid/roles/mod.rs +++ /dev/null @@ -1,117 +0,0 @@ -use crate::{ - Data, - api::v1::auth::check_access_token, - structs::{Member, Role}, - utils::get_auth_header, -}; -use ::uuid::Uuid; -use actix_web::{Error, HttpRequest, HttpResponse, get, post, web}; -use log::error; -use serde::Deserialize; - -pub mod uuid; - -#[derive(Deserialize)] -struct RoleInfo { - name: String, -} - -#[get("{uuid}/roles")] -pub async fn get( - req: HttpRequest, - path: web::Path<(Uuid,)>, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - let guild_uuid = path.into_inner().0; - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let uuid = authorized.unwrap(); - - let member = Member::fetch_one(&data.pool, uuid, guild_uuid).await; - - if let Err(error) = member { - return Ok(error); - } - - let cache_result = data.get_cache_key(format!("{}_roles", guild_uuid)).await; - - if let Ok(cache_hit) = cache_result { - return Ok(HttpResponse::Ok() - .content_type("application/json") - .body(cache_hit)); - } - - let roles_result = Role::fetch_all(&data.pool, guild_uuid).await; - - if let Err(error) = roles_result { - return Ok(error); - } - - let roles = roles_result.unwrap(); - - let cache_result = data - .set_cache_key(format!("{}_roles", guild_uuid), roles.clone(), 1800) - .await; - - if let Err(error) = cache_result { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()); - } - - Ok(HttpResponse::Ok().json(roles)) -} - -#[post("{uuid}/roles")] -pub async fn create( - req: HttpRequest, - role_info: web::Json, - path: web::Path<(Uuid,)>, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - let guild_uuid = path.into_inner().0; - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let uuid = authorized.unwrap(); - - let member = Member::fetch_one(&data.pool, uuid, guild_uuid).await; - - if let Err(error) = member { - return Ok(error); - } - - // FIXME: Logic to check permissions, should probably be done in utils.rs - - let role = Role::new(&data.pool, guild_uuid, role_info.name.clone()).await; - - if let Err(error) = role { - return Ok(error); - } - - Ok(HttpResponse::Ok().json(role.unwrap())) -} diff --git a/src/api/v1/servers/uuid/roles/uuid.rs b/src/api/v1/servers/uuid/roles/uuid.rs deleted file mode 100644 index 38bdca9..0000000 --- a/src/api/v1/servers/uuid/roles/uuid.rs +++ /dev/null @@ -1,67 +0,0 @@ -use crate::{ - Data, - api::v1::auth::check_access_token, - structs::{Member, Role}, - utils::get_auth_header, -}; -use ::uuid::Uuid; -use actix_web::{Error, HttpRequest, HttpResponse, get, web}; -use log::error; - -#[get("{uuid}/roles/{role_uuid}")] -pub async fn get( - req: HttpRequest, - path: web::Path<(Uuid, Uuid)>, - data: web::Data, -) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - let (guild_uuid, role_uuid) = path.into_inner(); - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let uuid = authorized.unwrap(); - - let member = Member::fetch_one(&data.pool, uuid, guild_uuid).await; - - if let Err(error) = member { - return Ok(error); - } - - let cache_result = data.get_cache_key(format!("{}", role_uuid)).await; - - if let Ok(cache_hit) = cache_result { - return Ok(HttpResponse::Ok() - .content_type("application/json") - .body(cache_hit)); - } - - let role_result = Role::fetch_one(&data.pool, guild_uuid, role_uuid).await; - - if let Err(error) = role_result { - return Ok(error); - } - - let role = role_result.unwrap(); - - let cache_result = data - .set_cache_key(format!("{}", role_uuid), role.clone(), 60) - .await; - - if let Err(error) = cache_result { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()); - } - - Ok(HttpResponse::Ok().json(role)) -} diff --git a/src/api/v1/stats.rs b/src/api/v1/stats.rs index 0ebf431..760ec71 100644 --- a/src/api/v1/stats.rs +++ b/src/api/v1/stats.rs @@ -1,31 +1,51 @@ +//! `/api/v1/stats` Returns stats about the server + use std::time::SystemTime; -use actix_web::{HttpResponse, Responder, get, web}; +use actix_web::{HttpResponse, get, web}; +use diesel::QueryDsl; +use diesel_async::RunQueryDsl; use serde::Serialize; use crate::Data; +use crate::error::Error; +use crate::schema::users::dsl::{users, uuid}; const VERSION: Option<&'static str> = option_env!("CARGO_PKG_VERSION"); +const GIT_SHORT_HASH: &str = env!("GIT_SHORT_HASH"); #[derive(Serialize)] struct Response { - accounts: usize, + accounts: i64, uptime: u64, version: String, + registration_enabled: bool, + email_verification_required: bool, build_number: String, } +/// `GET /api/v1/stats` Returns stats about the server +/// +/// requires auth: no +/// +/// ### Response Example +/// ``` +/// json!({ +/// "accounts": 3, +/// "uptime": 50000, +/// "version": "0.1.0", +/// "registration_enabled": true, +/// "email_verification_required": true, +/// "build_number": "39d01bb" +/// }); +/// ``` #[get("/stats")] -pub async fn res(data: web::Data) -> impl Responder { - let accounts; - if let Ok(users) = sqlx::query("SELECT uuid FROM users") - .fetch_all(&data.pool) - .await - { - accounts = users.len(); - } else { - return HttpResponse::InternalServerError().finish(); - } +pub async fn res(data: web::Data) -> Result { + let accounts: i64 = users + .select(uuid) + .count() + .get_result(&mut data.pool.get().await?) + .await?; let response = Response { // TODO: Get number of accounts from db @@ -35,9 +55,11 @@ pub async fn res(data: web::Data) -> impl Responder { .expect("Seriously why dont you have time??") .as_secs(), version: String::from(VERSION.unwrap_or("UNKNOWN")), + registration_enabled: data.config.instance.registration, + email_verification_required: data.config.instance.require_email_verification, // TODO: Get build number from git hash or remove this from the spec - build_number: String::from("how do i implement this?"), + build_number: String::from(GIT_SHORT_HASH), }; - HttpResponse::Ok().json(response) + Ok(HttpResponse::Ok().json(response)) } diff --git a/src/api/v1/users/me.rs b/src/api/v1/users/me.rs deleted file mode 100644 index f641678..0000000 --- a/src/api/v1/users/me.rs +++ /dev/null @@ -1,51 +0,0 @@ -use actix_web::{Error, HttpRequest, HttpResponse, get, web}; -use log::error; -use serde::Serialize; - -use crate::{Data, api::v1::auth::check_access_token, utils::get_auth_header}; - -#[derive(Serialize)] -struct Response { - uuid: String, - username: String, - display_name: String, -} - -#[get("/me")] -pub async fn res(req: HttpRequest, data: web::Data) -> Result { - let headers = req.headers(); - - let auth_header = get_auth_header(headers); - - if let Err(error) = auth_header { - return Ok(error); - } - - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; - - if let Err(error) = authorized { - return Ok(error); - } - - let uuid = authorized.unwrap(); - - let row = sqlx::query_as(&format!( - "SELECT username, display_name FROM users WHERE uuid = '{}'", - uuid - )) - .fetch_one(&data.pool) - .await; - - if let Err(error) = row { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()); - } - - let (username, display_name): (String, Option) = row.unwrap(); - - Ok(HttpResponse::Ok().json(Response { - uuid: uuid.to_string(), - username, - display_name: display_name.unwrap_or_default(), - })) -} diff --git a/src/api/v1/users/mod.rs b/src/api/v1/users/mod.rs index d6eb6bd..334fd5f 100644 --- a/src/api/v1/users/mod.rs +++ b/src/api/v1/users/mod.rs @@ -1,36 +1,60 @@ -use crate::{api::v1::auth::check_access_token, structs::StartAmountQuery, utils::get_auth_header, Data}; -use actix_web::{Error, HttpRequest, HttpResponse, Scope, get, web}; -use log::error; -use serde::Serialize; -use sqlx::prelude::FromRow; +//! `/api/v1/users` Contains endpoints related to all users + +use actix_web::{HttpRequest, HttpResponse, Scope, get, web}; + +use crate::{ + Data, + api::v1::auth::check_access_token, + error::Error, + objects::{StartAmountQuery, User}, + utils::{get_auth_header, global_checks}, +}; -mod me; mod uuid; -#[derive(Serialize, FromRow)] -struct Response { - uuid: String, - username: String, - display_name: Option, - email: String, -} - pub fn web() -> Scope { - web::scope("/users") - .service(res) - .service(me::res) - .service(uuid::res) + web::scope("/users").service(get).service(uuid::get) } +/// `GET /api/v1/users` Returns all users on this instance +/// +/// requires auth: yes +/// +/// requires admin: yes +/// +/// ### Response Example +/// ``` +/// json!([ +/// { +/// "uuid": "155d2291-fb23-46bd-a656-ae7c5d8218e6", +/// "username": "user1", +/// "display_name": "Nullable Name", +/// "avatar": "https://nullable-url.com/path/to/image.png" +/// }, +/// { +/// "uuid": "d48a3317-7b4d-443f-a250-ea9ab2bb8661", +/// "username": "user2", +/// "display_name": "John User 2", +/// "avatar": "https://also-supports-jpg.com/path/to/image.jpg" +/// }, +/// { +/// "uuid": "12c4b3f8-a25b-4b9b-8136-b275c855ed4a", +/// "username": "user3", +/// "display_name": null, +/// "avatar": null +/// } +/// ]); +/// ``` +/// NOTE: UUIDs in this response are made using `uuidgen`, UUIDs made by the actual backend will be UUIDv7 and have extractable timestamps #[get("")] -pub async fn res( +pub async fn get( req: HttpRequest, request_query: web::Query, data: web::Data, ) -> Result { let headers = req.headers(); - let auth_header = get_auth_header(headers); + let auth_header = get_auth_header(headers)?; let start = request_query.start.unwrap_or(0); @@ -40,24 +64,13 @@ pub async fn res( return Ok(HttpResponse::BadRequest().finish()); } - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; + let mut conn = data.pool.get().await?; - if let Err(error) = authorized { - return Ok(error); - } + let uuid = check_access_token(auth_header, &mut conn).await?; - let row = sqlx::query_as("SELECT CAST(uuid AS VARCHAR), username, display_name, email FROM users ORDER BY username LIMIT $1 OFFSET $2") - .bind(amount) - .bind(start) - .fetch_all(&data.pool) - .await; + global_checks(&data, uuid).await?; - if let Err(error) = row { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()); - } + let users = User::fetch_amount(&mut conn, start, amount).await?; - let accounts: Vec = row.unwrap(); - - Ok(HttpResponse::Ok().json(accounts)) + Ok(HttpResponse::Ok().json(users)) } diff --git a/src/api/v1/users/uuid.rs b/src/api/v1/users/uuid.rs index 9edaffa..9e602a0 100644 --- a/src/api/v1/users/uuid.rs +++ b/src/api/v1/users/uuid.rs @@ -1,75 +1,51 @@ -use actix_web::{Error, HttpRequest, HttpResponse, get, web}; -use log::error; -use serde::Serialize; +//! `/api/v1/users/{uuid}` Specific user endpoints + +use actix_web::{HttpRequest, HttpResponse, get, web}; use uuid::Uuid; -use crate::{Data, api::v1::auth::check_access_token, utils::get_auth_header}; - -#[derive(Serialize, Clone)] -struct Response { - uuid: String, - username: String, - display_name: String, -} +use crate::{ + Data, + api::v1::auth::check_access_token, + error::Error, + objects::User, + utils::{get_auth_header, global_checks}, +}; +/// `GET /api/v1/users/{uuid}` Returns user with the given UUID +/// +/// requires auth: yes +/// +/// requires relation: yes +/// +/// ### Response Example +/// ``` +/// json!({ +/// "uuid": "155d2291-fb23-46bd-a656-ae7c5d8218e6", +/// "username": "user1", +/// "display_name": "Nullable Name", +/// "avatar": "https://nullable-url.com/path/to/image.png" +/// }); +/// ``` +/// NOTE: UUIDs in this response are made using `uuidgen`, UUIDs made by the actual backend will be UUIDv7 and have extractable timestamps #[get("/{uuid}")] -pub async fn res( +pub async fn get( req: HttpRequest, path: web::Path<(Uuid,)>, data: web::Data, ) -> Result { let headers = req.headers(); - let uuid = path.into_inner().0; + let user_uuid = path.into_inner().0; - let auth_header = get_auth_header(headers); + let auth_header = get_auth_header(headers)?; - if let Err(error) = auth_header { - return Ok(error); - } + let mut conn = data.pool.get().await?; - let authorized = check_access_token(auth_header.unwrap(), &data.pool).await; + let uuid = check_access_token(auth_header, &mut conn).await?; - if let Err(error) = authorized { - return Ok(error); - } + global_checks(&data, uuid).await?; - let cache_result = data.get_cache_key(uuid.to_string()).await; - - if let Ok(cache_hit) = cache_result { - return Ok(HttpResponse::Ok() - .content_type("application/json") - .body(cache_hit)); - } - - let row = sqlx::query_as(&format!( - "SELECT username, display_name FROM users WHERE uuid = '{}'", - uuid - )) - .fetch_one(&data.pool) - .await; - - if let Err(error) = row { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()); - } - - let (username, display_name): (String, Option) = row.unwrap(); - - let user = Response { - uuid: uuid.to_string(), - username, - display_name: display_name.unwrap_or_default(), - }; - - let cache_result = data - .set_cache_key(uuid.to_string(), user.clone(), 1800) - .await; - - if let Err(error) = cache_result { - error!("{}", error); - return Ok(HttpResponse::InternalServerError().finish()); - } + let user = User::fetch_one(&data, user_uuid).await?; Ok(HttpResponse::Ok().json(user)) } diff --git a/src/api/versions.rs b/src/api/versions.rs index e5695be..0c3e106 100644 --- a/src/api/versions.rs +++ b/src/api/versions.rs @@ -1,3 +1,4 @@ +//! `/api/v1/versions` Returns info about api versions use actix_web::{HttpResponse, Responder, get}; use serde::Serialize; @@ -10,8 +11,21 @@ struct Response { #[derive(Serialize)] struct UnstableFeatures; +/// `GET /api/versions` Returns info about api versions. +/// +/// requires auth: no +/// +/// ### Response Example +/// ``` +/// json!({ +/// "unstable_features": {}, +/// "versions": [ +/// "1" +/// ] +/// }); +/// ``` #[get("/versions")] -pub async fn res() -> impl Responder { +pub async fn get() -> impl Responder { let response = Response { unstable_features: UnstableFeatures, // TODO: Find a way to dynamically update this possibly? diff --git a/src/config.rs b/src/config.rs index 65a5965..cbcc8c5 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,14 +1,19 @@ -use crate::Error; +use crate::error::Error; +use bunny_api_tokio::edge_storage::Endpoint; +use lettre::transport::smtp::authentication::Credentials; use log::debug; use serde::Deserialize; -use sqlx::postgres::PgConnectOptions; use tokio::fs::read_to_string; +use url::Url; #[derive(Debug, Deserialize)] pub struct ConfigBuilder { database: Database, cache_database: CacheDatabase, - web: Option, + web: WebBuilder, + instance: Option, + bunny: BunnyBuilder, + mail: Mail, } #[derive(Debug, Deserialize, Clone)] @@ -31,11 +36,42 @@ pub struct CacheDatabase { #[derive(Debug, Deserialize)] struct WebBuilder { - url: Option, + ip: Option, port: Option, + frontend_url: Url, + backend_url: Option, _ssl: Option, } +#[derive(Debug, Deserialize)] +struct InstanceBuilder { + name: Option, + registration: Option, + require_email_verification: Option, +} + +#[derive(Debug, Deserialize)] +struct BunnyBuilder { + api_key: String, + endpoint: String, + storage_zone: String, + cdn_url: Url, +} + +#[derive(Debug, Deserialize, Clone)] +pub struct Mail { + pub smtp: Smtp, + pub address: String, + pub tls: String, +} + +#[derive(Debug, Deserialize, Clone)] +pub struct Smtp { + pub server: String, + username: String, + password: String, +} + impl ConfigBuilder { pub async fn load(path: String) -> Result { debug!("loading config from: {}", path); @@ -47,22 +83,57 @@ impl ConfigBuilder { } pub fn build(self) -> Config { - let web = if let Some(web) = self.web { - Web { - url: web.url.unwrap_or(String::from("0.0.0.0")), - port: web.port.unwrap_or(8080), - } - } else { - Web { - url: String::from("0.0.0.0"), - port: 8080, - } + let web = Web { + ip: self.web.ip.unwrap_or(String::from("0.0.0.0")), + port: self.web.port.unwrap_or(8080), + frontend_url: self.web.frontend_url.clone(), + backend_url: self + .web + .backend_url + .or_else(|| self.web.frontend_url.join("/api").ok()) + .unwrap(), + }; + + let endpoint = match &*self.bunny.endpoint { + "Frankfurt" => Endpoint::Frankfurt, + "London" => Endpoint::London, + "New York" => Endpoint::NewYork, + "Los Angeles" => Endpoint::LosAngeles, + "Singapore" => Endpoint::Singapore, + "Stockholm" => Endpoint::Stockholm, + "Sao Paulo" => Endpoint::SaoPaulo, + "Johannesburg" => Endpoint::Johannesburg, + "Sydney" => Endpoint::Sydney, + url => Endpoint::Custom(url.to_string()), + }; + + let bunny = Bunny { + api_key: self.bunny.api_key, + endpoint, + storage_zone: self.bunny.storage_zone, + cdn_url: self.bunny.cdn_url, + }; + + let instance = match self.instance { + Some(instance) => Instance { + name: instance.name.unwrap_or("Gorb".to_string()), + registration: instance.registration.unwrap_or(true), + require_email_verification: instance.require_email_verification.unwrap_or(false), + }, + None => Instance { + name: "Gorb".to_string(), + registration: true, + require_email_verification: false, + }, }; Config { database: self.database, cache_database: self.cache_database, web, + instance, + bunny, + mail: self.mail, } } } @@ -72,22 +143,53 @@ pub struct Config { pub database: Database, pub cache_database: CacheDatabase, pub web: Web, + pub instance: Instance, + pub bunny: Bunny, + pub mail: Mail, } #[derive(Debug, Clone)] pub struct Web { - pub url: String, + pub ip: String, pub port: u16, + pub frontend_url: Url, + pub backend_url: Url, +} + +#[derive(Debug, Clone)] +pub struct Instance { + pub name: String, + pub registration: bool, + pub require_email_verification: bool, +} + +#[derive(Debug, Clone)] +pub struct Bunny { + pub api_key: String, + pub endpoint: Endpoint, + pub storage_zone: String, + pub cdn_url: Url, } impl Database { - pub fn connect_options(&self) -> PgConnectOptions { - PgConnectOptions::new() - .database(&self.database) - .host(&self.host) - .username(&self.username) - .password(&self.password) - .port(self.port) + pub fn url(&self) -> String { + let mut url = String::from("postgres://"); + + url += &self.username; + + url += ":"; + url += &self.password; + + url += "@"; + + url += &self.host; + url += ":"; + url += &self.port.to_string(); + + url += "/"; + url += &self.database; + + url } } @@ -120,3 +222,9 @@ impl CacheDatabase { url } } + +impl Smtp { + pub fn credentials(&self) -> Credentials { + Credentials::new(self.username.clone(), self.password.clone()) + } +} diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 0000000..1b1bfba --- /dev/null +++ b/src/error.rs @@ -0,0 +1,112 @@ +use std::{io, time::SystemTimeError}; + +use actix_web::{ + HttpResponse, + error::{PayloadError, ResponseError}, + http::{ + StatusCode, + header::{ContentType, ToStrError}, + }, +}; +use bunny_api_tokio::error::Error as BunnyError; +use deadpool::managed::{BuildError, PoolError}; +use diesel::{ConnectionError, result::Error as DieselError}; +use diesel_async::pooled_connection::PoolError as DieselPoolError; +use lettre::{ + address::AddressError, error::Error as EmailError, transport::smtp::Error as SmtpError, +}; +use log::{debug, error}; +use redis::RedisError; +use serde::Serialize; +use serde_json::Error as JsonError; +use thiserror::Error; +use tokio::task::JoinError; +use toml::de::Error as TomlError; + +#[derive(Debug, Error)] +pub enum Error { + #[error(transparent)] + SqlError(#[from] DieselError), + #[error(transparent)] + PoolError(#[from] PoolError), + #[error(transparent)] + BuildError(#[from] BuildError), + #[error(transparent)] + RedisError(#[from] RedisError), + #[error(transparent)] + ConnectionError(#[from] ConnectionError), + #[error(transparent)] + JoinError(#[from] JoinError), + #[error(transparent)] + IoError(#[from] io::Error), + #[error(transparent)] + TomlError(#[from] TomlError), + #[error(transparent)] + JsonError(#[from] JsonError), + #[error(transparent)] + SystemTimeError(#[from] SystemTimeError), + #[error(transparent)] + ToStrError(#[from] ToStrError), + #[error(transparent)] + RandomError(#[from] getrandom::Error), + #[error(transparent)] + BunnyError(#[from] BunnyError), + #[error(transparent)] + UrlParseError(#[from] url::ParseError), + #[error(transparent)] + PayloadError(#[from] PayloadError), + #[error(transparent)] + WsClosed(#[from] actix_ws::Closed), + #[error(transparent)] + EmailError(#[from] EmailError), + #[error(transparent)] + SmtpError(#[from] SmtpError), + #[error(transparent)] + SmtpAddressError(#[from] AddressError), + #[error("{0}")] + PasswordHashError(String), + #[error("{0}")] + BadRequest(String), + #[error("{0}")] + Unauthorized(String), + #[error("{0}")] + Forbidden(String), + #[error("{0}")] + TooManyRequests(String), + #[error("{0}")] + InternalServerError(String), +} + +impl ResponseError for Error { + fn error_response(&self) -> HttpResponse { + debug!("{:?}", self); + error!("{}: {}", self.status_code(), self); + + HttpResponse::build(self.status_code()) + .insert_header(ContentType::json()) + .json(WebError::new(self.to_string())) + } + + fn status_code(&self) -> StatusCode { + match *self { + Error::SqlError(DieselError::NotFound) => StatusCode::NOT_FOUND, + Error::BunnyError(BunnyError::NotFound(_)) => StatusCode::NOT_FOUND, + Error::BadRequest(_) => StatusCode::BAD_REQUEST, + Error::Unauthorized(_) => StatusCode::UNAUTHORIZED, + Error::Forbidden(_) => StatusCode::FORBIDDEN, + Error::TooManyRequests(_) => StatusCode::TOO_MANY_REQUESTS, + _ => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} + +#[derive(Serialize)] +struct WebError { + message: String, +} + +impl WebError { + fn new(message: String) -> Self { + Self { message } + } +} diff --git a/src/main.rs b/src/main.rs index fbad594..47794e3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -2,18 +2,27 @@ use actix_cors::Cors; use actix_web::{App, HttpServer, web}; use argon2::Argon2; use clap::Parser; +use diesel_async::pooled_connection::AsyncDieselConnectionManager; +use diesel_async::pooled_connection::deadpool::Pool; +use error::Error; +use objects::MailClient; use simple_logger::SimpleLogger; -use sqlx::{PgPool, Pool, Postgres}; use std::time::SystemTime; mod config; use config::{Config, ConfigBuilder}; +use diesel_migrations::{EmbeddedMigrations, MigrationHarness, embed_migrations}; + +pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!(); + +type Conn = + deadpool::managed::Object>; + mod api; - -pub mod structs; +pub mod error; +pub mod objects; +pub mod schema; pub mod utils; -type Error = Box; - #[derive(Parser, Debug)] #[command(version, about, long_about = None)] struct Args { @@ -23,11 +32,16 @@ struct Args { #[derive(Clone)] pub struct Data { - pub pool: Pool, + pub pool: deadpool::managed::Pool< + AsyncDieselConnectionManager, + Conn, + >, pub cache_pool: redis::Client, - pub _config: Config, + pub config: Config, pub argon2: Argon2<'static>, pub start_time: SystemTime, + pub bunny_storage: bunny_api_tokio::EdgeStorageClient, + pub mail_client: MailClient, } #[tokio::main] @@ -44,105 +58,40 @@ async fn main() -> Result<(), Error> { let web = config.web.clone(); - let pool = PgPool::connect_with(config.database.connect_options()).await?; + // create a new connection pool with the default config + let pool_config = + AsyncDieselConnectionManager::::new(config.database.url()); + let pool = Pool::builder(pool_config).build()?; let cache_pool = redis::Client::open(config.cache_database.url())?; - /* - TODO: Figure out if a table should be used here and if not then what. - Also figure out if these should be different types from what they currently are and if we should add more "constraints" + let bunny = config.bunny.clone(); - TODO: References to time should be removed in favor of using the timestamp built in to UUIDv7 (apart from deleted_at in users) - */ - sqlx::raw_sql( - r#" - CREATE TABLE IF NOT EXISTS users ( - uuid uuid PRIMARY KEY NOT NULL, - username varchar(32) NOT NULL, - display_name varchar(64) DEFAULT NULL, - password varchar(512) NOT NULL, - email varchar(100) NOT NULL, - email_verified boolean NOT NULL DEFAULT FALSE, - is_deleted boolean NOT NULL DEFAULT FALSE, - deleted_at int8 DEFAULT NULL, - CONSTRAINT unique_username_active UNIQUE NULLS NOT DISTINCT (username, is_deleted), - CONSTRAINT unique_email_active UNIQUE NULLS NOT DISTINCT (email, is_deleted) - ); - CREATE UNIQUE INDEX IF NOT EXISTS idx_unique_username_active - ON users(username) - WHERE is_deleted = FALSE; - CREATE UNIQUE INDEX IF NOT EXISTS idx_unique_email_active - ON users(email) - WHERE is_deleted = FALSE; - CREATE TABLE IF NOT EXISTS instance_permissions ( - uuid uuid NOT NULL REFERENCES users(uuid), - administrator boolean NOT NULL DEFAULT FALSE - ); - CREATE TABLE IF NOT EXISTS refresh_tokens ( - token varchar(64) PRIMARY KEY UNIQUE NOT NULL, - uuid uuid NOT NULL REFERENCES users(uuid), - created_at int8 NOT NULL, - device_name varchar(16) NOT NULL - ); - CREATE TABLE IF NOT EXISTS access_tokens ( - token varchar(32) PRIMARY KEY UNIQUE NOT NULL, - refresh_token varchar(64) UNIQUE NOT NULL REFERENCES refresh_tokens(token) ON UPDATE CASCADE ON DELETE CASCADE, - uuid uuid NOT NULL REFERENCES users(uuid), - created_at int8 NOT NULL - ); - CREATE TABLE IF NOT EXISTS guilds ( - uuid uuid PRIMARY KEY NOT NULL, - owner_uuid uuid NOT NULL REFERENCES users(uuid), - name VARCHAR(100) NOT NULL, - description VARCHAR(300) - ); - CREATE TABLE IF NOT EXISTS guild_members ( - uuid uuid PRIMARY KEY NOT NULL, - guild_uuid uuid NOT NULL REFERENCES guilds(uuid) ON DELETE CASCADE, - user_uuid uuid NOT NULL REFERENCES users(uuid), - nickname VARCHAR(100) DEFAULT NULL - ); - CREATE TABLE IF NOT EXISTS roles ( - uuid uuid UNIQUE NOT NULL, - guild_uuid uuid NOT NULL REFERENCES guilds(uuid) ON DELETE CASCADE, - name VARCHAR(50) NOT NULL, - color int NOT NULL DEFAULT 16777215, - position int NOT NULL, - permissions int8 NOT NULL DEFAULT 0, - PRIMARY KEY (uuid, guild_uuid) - ); - CREATE TABLE IF NOT EXISTS role_members ( - role_uuid uuid NOT NULL REFERENCES roles(uuid) ON DELETE CASCADE, - member_uuid uuid NOT NULL REFERENCES guild_members(uuid) ON DELETE CASCADE, - PRIMARY KEY (role_uuid, member_uuid) - ); - CREATE TABLE IF NOT EXISTS channels ( - uuid uuid PRIMARY KEY NOT NULL, - guild_uuid uuid NOT NULL REFERENCES guilds(uuid) ON DELETE CASCADE, - name varchar(32) NOT NULL, - description varchar(500) NOT NULL - ); - CREATE TABLE IF NOT EXISTS channel_permissions ( - channel_uuid uuid NOT NULL REFERENCES channels(uuid) ON DELETE CASCADE, - role_uuid uuid NOT NULL REFERENCES roles(uuid) ON DELETE CASCADE, - permissions int8 NOT NULL DEFAULT 0, - PRIMARY KEY (channel_uuid, role_uuid) - ); - CREATE TABLE IF NOT EXISTS messages ( - uuid uuid PRIMARY KEY NOT NULL, - channel_uuid uuid NOT NULL REFERENCES channels(uuid) ON DELETE CASCADE, - user_uuid uuid NOT NULL REFERENCES users(uuid), - message varchar(4000) NOT NULL - ); - CREATE TABLE IF NOT EXISTS invites ( - id varchar(32) PRIMARY KEY NOT NULL, - guild_uuid uuid NOT NULL REFERENCES guilds(uuid) ON DELETE CASCADE, - user_uuid uuid NOT NULL REFERENCES users(uuid) - ); - "#, - ) - .execute(&pool) - .await?; + let bunny_storage = bunny_api_tokio::EdgeStorageClient::new(bunny.api_key, bunny.endpoint, bunny.storage_zone).await?; + + let mail = config.mail.clone(); + + let mail_client = MailClient::new( + mail.smtp.credentials(), + mail.smtp.server, + mail.address, + mail.tls, + )?; + + let database_url = config.database.url(); + + tokio::task::spawn_blocking(move || { + use diesel::prelude::Connection; + use diesel_async::async_connection_wrapper::AsyncConnectionWrapper; + + let mut conn = + AsyncConnectionWrapper::::establish(&database_url)?; + + conn.run_pending_migrations(MIGRATIONS)?; + Ok::<_, Box>(()) + }) + .await? + .unwrap(); /* **Stored for later possible use** @@ -164,10 +113,12 @@ async fn main() -> Result<(), Error> { let data = Data { pool, cache_pool, - _config: config, + config, // TODO: Possibly implement "pepper" into this (thinking it could generate one if it doesnt exist and store it on disk) argon2: Argon2::default(), start_time: SystemTime::now(), + bunny_storage, + mail_client, }; HttpServer::new(move || { @@ -199,9 +150,9 @@ async fn main() -> Result<(), Error> { App::new() .app_data(web::Data::new(data.clone())) .wrap(cors) - .service(api::web()) + .service(api::web(data.config.web.backend_url.path())) }) - .bind((web.url, web.port))? + .bind((web.ip, web.port))? .run() .await?; diff --git a/src/objects/channel.rs b/src/objects/channel.rs new file mode 100644 index 0000000..4d52353 --- /dev/null +++ b/src/objects/channel.rs @@ -0,0 +1,384 @@ +use diesel::{ + ExpressionMethods, Insertable, QueryDsl, Queryable, Selectable, SelectableHelper, delete, + insert_into, update, +}; +use diesel_async::{RunQueryDsl, pooled_connection::AsyncDieselConnectionManager}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::{ + Conn, Data, + error::Error, + schema::{channel_permissions, channels, messages}, + utils::{CHANNEL_REGEX, order_by_is_above}, +}; + +use super::{HasIsAbove, HasUuid, Message, load_or_empty, message::MessageBuilder}; + +#[derive(Queryable, Selectable, Insertable, Clone, Debug)] +#[diesel(table_name = channels)] +#[diesel(check_for_backend(diesel::pg::Pg))] +struct ChannelBuilder { + uuid: Uuid, + guild_uuid: Uuid, + name: String, + description: Option, + is_above: Option, +} + +impl ChannelBuilder { + async fn build(self, conn: &mut Conn) -> Result { + use self::channel_permissions::dsl::*; + let channel_permission: Vec = load_or_empty( + channel_permissions + .filter(channel_uuid.eq(self.uuid)) + .select(ChannelPermission::as_select()) + .load(conn) + .await, + )?; + + Ok(Channel { + uuid: self.uuid, + guild_uuid: self.guild_uuid, + name: self.name, + description: self.description, + is_above: self.is_above, + permissions: channel_permission, + }) + } +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct Channel { + pub uuid: Uuid, + pub guild_uuid: Uuid, + name: String, + description: Option, + pub is_above: Option, + pub permissions: Vec, +} + +#[derive(Serialize, Deserialize, Clone, Queryable, Selectable, Debug)] +#[diesel(table_name = channel_permissions)] +#[diesel(check_for_backend(diesel::pg::Pg))] +pub struct ChannelPermission { + pub role_uuid: Uuid, + pub permissions: i64, +} + +impl HasUuid for Channel { + fn uuid(&self) -> &Uuid { + self.uuid.as_ref() + } +} + +impl HasIsAbove for Channel { + fn is_above(&self) -> Option<&Uuid> { + self.is_above.as_ref() + } +} + +impl Channel { + pub async fn fetch_all( + pool: &deadpool::managed::Pool< + AsyncDieselConnectionManager, + Conn, + >, + guild_uuid: Uuid, + ) -> Result, Error> { + let mut conn = pool.get().await?; + + use channels::dsl; + let channel_builders: Vec = load_or_empty( + dsl::channels + .filter(dsl::guild_uuid.eq(guild_uuid)) + .select(ChannelBuilder::as_select()) + .load(&mut conn) + .await, + )?; + + let channel_futures = channel_builders.iter().map(async move |c| { + let mut conn = pool.get().await?; + c.clone().build(&mut conn).await + }); + + futures::future::try_join_all(channel_futures).await + } + + pub async fn fetch_one(data: &Data, channel_uuid: Uuid) -> Result { + if let Ok(cache_hit) = data.get_cache_key(channel_uuid.to_string()).await { + return Ok(serde_json::from_str(&cache_hit)?); + } + + let mut conn = data.pool.get().await?; + + use channels::dsl; + let channel_builder: ChannelBuilder = dsl::channels + .filter(dsl::uuid.eq(channel_uuid)) + .select(ChannelBuilder::as_select()) + .get_result(&mut conn) + .await?; + + let channel = channel_builder.build(&mut conn).await?; + + data.set_cache_key(channel_uuid.to_string(), channel.clone(), 60) + .await?; + + Ok(channel) + } + + pub async fn new( + data: actix_web::web::Data, + guild_uuid: Uuid, + name: String, + description: Option, + ) -> Result { + if !CHANNEL_REGEX.is_match(&name) { + return Err(Error::BadRequest("Channel name is invalid".to_string())); + } + + let mut conn = data.pool.get().await?; + + let channel_uuid = Uuid::now_v7(); + + let channels = Self::fetch_all(&data.pool, guild_uuid).await?; + + let channels_ordered = order_by_is_above(channels).await?; + + let last_channel = channels_ordered.last(); + + let new_channel = ChannelBuilder { + uuid: channel_uuid, + guild_uuid, + name: name.clone(), + description: description.clone(), + is_above: None, + }; + + insert_into(channels::table) + .values(new_channel.clone()) + .execute(&mut conn) + .await?; + + if let Some(old_last_channel) = last_channel { + use channels::dsl; + update(channels::table) + .filter(dsl::uuid.eq(old_last_channel.uuid)) + .set(dsl::is_above.eq(new_channel.uuid)) + .execute(&mut conn) + .await?; + } + + // returns different object because there's no reason to build the channelbuilder (wastes 1 database request) + let channel = Self { + uuid: channel_uuid, + guild_uuid, + name, + description, + is_above: None, + permissions: vec![], + }; + + data.set_cache_key(channel_uuid.to_string(), channel.clone(), 1800) + .await?; + + if data + .get_cache_key(format!("{}_channels", guild_uuid)) + .await + .is_ok() + { + data.del_cache_key(format!("{}_channels", guild_uuid)) + .await?; + } + + Ok(channel) + } + + pub async fn delete(self, data: &Data) -> Result<(), Error> { + let mut conn = data.pool.get().await?; + + use channels::dsl; + match update(channels::table) + .filter(dsl::is_above.eq(self.uuid)) + .set(dsl::is_above.eq(None::)) + .execute(&mut conn) + .await + { + Ok(r) => Ok(r), + Err(diesel::result::Error::NotFound) => Ok(0), + Err(e) => Err(e), + }?; + + delete(channels::table) + .filter(dsl::uuid.eq(self.uuid)) + .execute(&mut conn) + .await?; + + match update(channels::table) + .filter(dsl::is_above.eq(self.uuid)) + .set(dsl::is_above.eq(self.is_above)) + .execute(&mut conn) + .await + { + Ok(r) => Ok(r), + Err(diesel::result::Error::NotFound) => Ok(0), + Err(e) => Err(e), + }?; + + if data.get_cache_key(self.uuid.to_string()).await.is_ok() { + data.del_cache_key(self.uuid.to_string()).await?; + } + + if data + .get_cache_key(format!("{}_channels", self.guild_uuid)) + .await + .is_ok() + { + data.del_cache_key(format!("{}_channels", self.guild_uuid)) + .await?; + } + + Ok(()) + } + + pub async fn fetch_messages( + &self, + data: &Data, + amount: i64, + offset: i64, + ) -> Result, Error> { + let mut conn = data.pool.get().await?; + + use messages::dsl; + let messages: Vec = load_or_empty( + dsl::messages + .filter(dsl::channel_uuid.eq(self.uuid)) + .select(MessageBuilder::as_select()) + .order(dsl::uuid.desc()) + .limit(amount) + .offset(offset) + .load(&mut conn) + .await, + )?; + + let message_futures = messages.iter().map(async move |b| b.build(data).await); + + futures::future::try_join_all(message_futures).await + } + + pub async fn new_message( + &self, + data: &Data, + user_uuid: Uuid, + message: String, + ) -> Result { + let message_uuid = Uuid::now_v7(); + + let message = MessageBuilder { + uuid: message_uuid, + channel_uuid: self.uuid, + user_uuid, + message, + }; + + let mut conn = data.pool.get().await?; + + insert_into(messages::table) + .values(message.clone()) + .execute(&mut conn) + .await?; + + message.build(data).await + } + + pub async fn set_name(&mut self, data: &Data, new_name: String) -> Result<(), Error> { + if !CHANNEL_REGEX.is_match(&new_name) { + return Err(Error::BadRequest("Channel name is invalid".to_string())); + } + + let mut conn = data.pool.get().await?; + + use channels::dsl; + update(channels::table) + .filter(dsl::uuid.eq(self.uuid)) + .set(dsl::name.eq(&new_name)) + .execute(&mut conn) + .await?; + + self.name = new_name; + + Ok(()) + } + + pub async fn set_description( + &mut self, + data: &Data, + new_description: String, + ) -> Result<(), Error> { + let mut conn = data.pool.get().await?; + + use channels::dsl; + update(channels::table) + .filter(dsl::uuid.eq(self.uuid)) + .set(dsl::description.eq(&new_description)) + .execute(&mut conn) + .await?; + + self.description = Some(new_description); + + Ok(()) + } + + pub async fn move_channel(&mut self, data: &Data, new_is_above: Uuid) -> Result<(), Error> { + let mut conn = data.pool.get().await?; + + use channels::dsl; + let old_above_uuid: Option = match dsl::channels + .filter(dsl::is_above.eq(self.uuid)) + .select(dsl::uuid) + .get_result(&mut conn) + .await + { + Ok(r) => Ok(Some(r)), + Err(diesel::result::Error::NotFound) => Ok(None), + Err(e) => Err(e), + }?; + + if let Some(uuid) = old_above_uuid { + update(channels::table) + .filter(dsl::uuid.eq(uuid)) + .set(dsl::is_above.eq(None::)) + .execute(&mut conn) + .await?; + } + + match update(channels::table) + .filter(dsl::is_above.eq(new_is_above)) + .set(dsl::is_above.eq(self.uuid)) + .execute(&mut conn) + .await + { + Ok(r) => Ok(r), + Err(diesel::result::Error::NotFound) => Ok(0), + Err(e) => Err(e), + }?; + + update(channels::table) + .filter(dsl::uuid.eq(self.uuid)) + .set(dsl::is_above.eq(new_is_above)) + .execute(&mut conn) + .await?; + + if let Some(uuid) = old_above_uuid { + update(channels::table) + .filter(dsl::uuid.eq(uuid)) + .set(dsl::is_above.eq(self.is_above)) + .execute(&mut conn) + .await?; + } + + self.is_above = Some(new_is_above); + + Ok(()) + } +} diff --git a/src/objects/email_token.rs b/src/objects/email_token.rs new file mode 100644 index 0000000..4ec6b7e --- /dev/null +++ b/src/objects/email_token.rs @@ -0,0 +1,61 @@ +use chrono::Utc; +use lettre::message::MultiPart; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::{Data, error::Error, utils::generate_token}; + +use super::Me; + +#[derive(Serialize, Deserialize)] +pub struct EmailToken { + user_uuid: Uuid, + pub token: String, + pub created_at: chrono::DateTime, +} + +impl EmailToken { + pub async fn get(data: &Data, user_uuid: Uuid) -> Result { + let email_token = serde_json::from_str(&data.get_cache_key(format!("{}_email_verify", user_uuid)).await?)?; + + Ok(email_token) + } + + #[allow(clippy::new_ret_no_self)] + pub async fn new(data: &Data, me: Me) -> Result<(), Error> { + let token = generate_token::<32>()?; + + let email_token = EmailToken { + user_uuid: me.uuid, + token: token.clone(), + // TODO: Check if this can be replaced with something built into valkey + created_at: Utc::now() + }; + + data.set_cache_key(format!("{}_email_verify", me.uuid), email_token, 86400).await?; + + let mut verify_endpoint = data.config.web.frontend_url.join("verify-email")?; + + verify_endpoint.set_query(Some(&format!("token={}", token))); + + let email = data + .mail_client + .message_builder() + .to(me.email.parse()?) + .subject(format!("{} E-mail Verification", data.config.instance.name)) + .multipart(MultiPart::alternative_plain_html( + format!("Verify your {} account\n\nHello, {}!\nThanks for creating a new account on Gorb.\nThe final step to create your account is to verify your email address by visiting the page, within 24 hours.\n\n{}\n\nIf you didn't ask to verify this address, you can safely ignore this email\n\nThanks, The gorb team.", data.config.instance.name, me.username, verify_endpoint), + format!(r#"

Verify your {} Account

Hello, {}!

Thanks for creating a new account on Gorb.

The final step to create your account is to verify your email address by clicking the button below, within 24 hours.

VERIFY ACCOUNT

If you didn't ask to verify this address, you can safely ignore this email.

"#, data.config.instance.name, me.username, verify_endpoint) + ))?; + + data.mail_client.send_mail(email).await?; + + Ok(()) + } + + pub async fn delete(&self, data: &Data) -> Result<(), Error> { + data.del_cache_key(format!("{}_email_verify", self.user_uuid)).await?; + + Ok(()) + } +} diff --git a/src/objects/guild.rs b/src/objects/guild.rs new file mode 100644 index 0000000..47058ee --- /dev/null +++ b/src/objects/guild.rs @@ -0,0 +1,222 @@ +use actix_web::web::BytesMut; +use diesel::{ + ExpressionMethods, Insertable, QueryDsl, Queryable, Selectable, SelectableHelper, insert_into, + update, +}; +use diesel_async::{RunQueryDsl, pooled_connection::AsyncDieselConnectionManager}; +use serde::Serialize; +use tokio::task; +use url::Url; +use uuid::Uuid; + +use crate::{ + Conn, + error::Error, + schema::{guild_members, guilds, invites}, + utils::image_check, +}; + +use super::{Invite, Member, Role, load_or_empty, member::MemberBuilder}; + +#[derive(Serialize, Queryable, Selectable, Insertable, Clone)] +#[diesel(table_name = guilds)] +#[diesel(check_for_backend(diesel::pg::Pg))] +pub struct GuildBuilder { + uuid: Uuid, + name: String, + description: Option, + icon: Option, +} + +impl GuildBuilder { + pub async fn build(self, conn: &mut Conn) -> Result { + let member_count = Member::count(conn, self.uuid).await?; + + let roles = Role::fetch_all(conn, self.uuid).await?; + + Ok(Guild { + uuid: self.uuid, + name: self.name, + description: self.description, + icon: self.icon.and_then(|i| i.parse().ok()), + roles, + member_count, + }) + } +} + +#[derive(Serialize)] +pub struct Guild { + pub uuid: Uuid, + name: String, + description: Option, + icon: Option, + pub roles: Vec, + member_count: i64, +} + +impl Guild { + pub async fn fetch_one(conn: &mut Conn, guild_uuid: Uuid) -> Result { + use guilds::dsl; + let guild_builder: GuildBuilder = dsl::guilds + .filter(dsl::uuid.eq(guild_uuid)) + .select(GuildBuilder::as_select()) + .get_result(conn) + .await?; + + guild_builder.build(conn).await + } + + pub async fn fetch_amount( + pool: &deadpool::managed::Pool< + AsyncDieselConnectionManager, + Conn, + >, + offset: i64, + amount: i64, + ) -> Result, Error> { + // Fetch guild data from database + let mut conn = pool.get().await?; + + use guilds::dsl; + let guild_builders: Vec = load_or_empty( + dsl::guilds + .select(GuildBuilder::as_select()) + .order_by(dsl::uuid) + .offset(offset) + .limit(amount) + .load(&mut conn) + .await, + )?; + + // Process each guild concurrently + let guild_futures = guild_builders.iter().map(async move |g| { + let mut conn = pool.get().await?; + g.clone().build(&mut conn).await + }); + + // Execute all futures concurrently and collect results + futures::future::try_join_all(guild_futures).await + } + + pub async fn new(conn: &mut Conn, name: String, owner_uuid: Uuid) -> Result { + let guild_uuid = Uuid::now_v7(); + + let guild_builder = GuildBuilder { + uuid: guild_uuid, + name: name.clone(), + description: None, + icon: None, + }; + + insert_into(guilds::table) + .values(guild_builder) + .execute(conn) + .await?; + + let member_uuid = Uuid::now_v7(); + + let member = MemberBuilder { + uuid: member_uuid, + nickname: None, + user_uuid: owner_uuid, + guild_uuid, + is_owner: true, + }; + + insert_into(guild_members::table) + .values(member) + .execute(conn) + .await?; + + Ok(Guild { + uuid: guild_uuid, + name, + description: None, + icon: None, + roles: vec![], + member_count: 1, + }) + } + + pub async fn get_invites(&self, conn: &mut Conn) -> Result, Error> { + use invites::dsl; + let invites = load_or_empty( + dsl::invites + .filter(dsl::guild_uuid.eq(self.uuid)) + .select(Invite::as_select()) + .load(conn) + .await, + )?; + + Ok(invites) + } + + pub async fn create_invite( + &self, + conn: &mut Conn, + user_uuid: Uuid, + custom_id: Option, + ) -> Result { + let invite_id; + + if let Some(id) = custom_id { + invite_id = id; + if invite_id.len() > 32 { + return Err(Error::BadRequest("MAX LENGTH".to_string())); + } + } else { + let charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; + + invite_id = random_string::generate(8, charset); + } + + let invite = Invite { + id: invite_id, + user_uuid, + guild_uuid: self.uuid, + }; + + insert_into(invites::table) + .values(invite.clone()) + .execute(conn) + .await?; + + Ok(invite) + } + + // FIXME: Horrible security + pub async fn set_icon( + &mut self, + bunny_storage: &bunny_api_tokio::EdgeStorageClient, + conn: &mut Conn, + cdn_url: Url, + icon: BytesMut, + ) -> Result<(), Error> { + let icon_clone = icon.clone(); + let image_type = task::spawn_blocking(move || image_check(icon_clone)).await??; + + if let Some(icon) = &self.icon { + let relative_url = icon.path().trim_start_matches('/'); + + bunny_storage.delete(relative_url).await?; + } + + let path = format!("icons/{}/icon.{}", self.uuid, image_type); + + bunny_storage.upload(path.clone(), icon.into()).await?; + + let icon_url = cdn_url.join(&path)?; + + use guilds::dsl; + update(guilds::table) + .filter(dsl::uuid.eq(self.uuid)) + .set(dsl::icon.eq(icon_url.as_str())) + .execute(conn) + .await?; + + self.icon = Some(icon_url); + + Ok(()) + } +} diff --git a/src/objects/invite.rs b/src/objects/invite.rs new file mode 100644 index 0000000..5e0827e --- /dev/null +++ b/src/objects/invite.rs @@ -0,0 +1,30 @@ +use diesel::{ExpressionMethods, Insertable, QueryDsl, Queryable, Selectable, SelectableHelper}; +use diesel_async::RunQueryDsl; +use serde::Serialize; +use uuid::Uuid; + +use crate::{Conn, error::Error, schema::invites}; + +/// Server invite struct +#[derive(Clone, Serialize, Queryable, Selectable, Insertable)] +pub struct Invite { + /// case-sensitive alphanumeric string with a fixed length of 8 characters, can be up to 32 characters for custom invites + pub id: String, + /// User that created the invite + pub user_uuid: Uuid, + /// UUID of the guild that the invite belongs to + pub guild_uuid: Uuid, +} + +impl Invite { + pub async fn fetch_one(conn: &mut Conn, invite_id: String) -> Result { + use invites::dsl; + let invite: Invite = dsl::invites + .filter(dsl::id.eq(invite_id)) + .select(Invite::as_select()) + .get_result(conn) + .await?; + + Ok(invite) + } +} diff --git a/src/objects/me.rs b/src/objects/me.rs new file mode 100644 index 0000000..e183c5d --- /dev/null +++ b/src/objects/me.rs @@ -0,0 +1,231 @@ +use actix_web::web::BytesMut; +use diesel::{ExpressionMethods, QueryDsl, Queryable, Selectable, SelectableHelper, update}; +use diesel_async::RunQueryDsl; +use serde::Serialize; +use tokio::task; +use url::Url; +use uuid::Uuid; + +use crate::{ + Conn, Data, + error::Error, + schema::{guild_members, guilds, users}, + utils::{EMAIL_REGEX, USERNAME_REGEX, image_check}, +}; + +use super::{Guild, guild::GuildBuilder, load_or_empty, member::MemberBuilder}; + +#[derive(Serialize, Queryable, Selectable)] +#[diesel(table_name = users)] +#[diesel(check_for_backend(diesel::pg::Pg))] +pub struct Me { + pub uuid: Uuid, + pub username: String, + pub display_name: Option, + avatar: Option, + pronouns: Option, + about: Option, + pub email: String, + pub email_verified: bool, +} + +impl Me { + pub async fn get(conn: &mut Conn, user_uuid: Uuid) -> Result { + use users::dsl; + let me: Me = dsl::users + .filter(dsl::uuid.eq(user_uuid)) + .select(Me::as_select()) + .get_result(conn) + .await?; + + Ok(me) + } + + pub async fn fetch_memberships(&self, conn: &mut Conn) -> Result, Error> { + use guild_members::dsl; + let memberships: Vec = load_or_empty( + dsl::guild_members + .filter(dsl::user_uuid.eq(self.uuid)) + .select(MemberBuilder::as_select()) + .load(conn) + .await, + )?; + + let mut guilds: Vec = vec![]; + + for membership in memberships { + use guilds::dsl; + guilds.push( + dsl::guilds + .filter(dsl::uuid.eq(membership.guild_uuid)) + .select(GuildBuilder::as_select()) + .get_result(conn) + .await? + .build(conn) + .await?, + ) + } + + Ok(guilds) + } + + pub async fn set_avatar( + &mut self, + data: &Data, + cdn_url: Url, + avatar: BytesMut, + ) -> Result<(), Error> { + let avatar_clone = avatar.clone(); + let image_type = task::spawn_blocking(move || image_check(avatar_clone)).await??; + + let mut conn = data.pool.get().await?; + + if let Some(avatar) = &self.avatar { + let avatar_url: Url = avatar.parse()?; + + let relative_url = avatar_url.path().trim_start_matches('/'); + + data.bunny_storage.delete(relative_url).await?; + } + + let path = format!("avatar/{}/avatar.{}", self.uuid, image_type); + + data.bunny_storage + .upload(path.clone(), avatar.into()) + .await?; + + let avatar_url = cdn_url.join(&path)?; + + use users::dsl; + update(users::table) + .filter(dsl::uuid.eq(self.uuid)) + .set(dsl::avatar.eq(avatar_url.as_str())) + .execute(&mut conn) + .await?; + + if data.get_cache_key(self.uuid.to_string()).await.is_ok() { + data.del_cache_key(self.uuid.to_string()).await? + } + + self.avatar = Some(avatar_url.to_string()); + + Ok(()) + } + + pub async fn verify_email(&self, conn: &mut Conn) -> Result<(), Error> { + use users::dsl; + update(users::table) + .filter(dsl::uuid.eq(self.uuid)) + .set(dsl::email_verified.eq(true)) + .execute(conn) + .await?; + + Ok(()) + } + + pub async fn set_username(&mut self, data: &Data, new_username: String) -> Result<(), Error> { + if !USERNAME_REGEX.is_match(&new_username) { + return Err(Error::BadRequest("Invalid username".to_string())); + } + + let mut conn = data.pool.get().await?; + + use users::dsl; + update(users::table) + .filter(dsl::uuid.eq(self.uuid)) + .set(dsl::username.eq(new_username.as_str())) + .execute(&mut conn) + .await?; + + if data.get_cache_key(self.uuid.to_string()).await.is_ok() { + data.del_cache_key(self.uuid.to_string()).await? + } + + self.username = new_username; + + Ok(()) + } + + pub async fn set_display_name( + &mut self, + data: &Data, + new_display_name: String, + ) -> Result<(), Error> { + let mut conn = data.pool.get().await?; + + use users::dsl; + update(users::table) + .filter(dsl::uuid.eq(self.uuid)) + .set(dsl::display_name.eq(new_display_name.as_str())) + .execute(&mut conn) + .await?; + + if data.get_cache_key(self.uuid.to_string()).await.is_ok() { + data.del_cache_key(self.uuid.to_string()).await? + } + + self.display_name = Some(new_display_name); + + Ok(()) + } + + pub async fn set_email(&mut self, data: &Data, new_email: String) -> Result<(), Error> { + if !EMAIL_REGEX.is_match(&new_email) { + return Err(Error::BadRequest("Invalid username".to_string())); + } + + let mut conn = data.pool.get().await?; + + use users::dsl; + update(users::table) + .filter(dsl::uuid.eq(self.uuid)) + .set(( + dsl::email.eq(new_email.as_str()), + dsl::email_verified.eq(false), + )) + .execute(&mut conn) + .await?; + + if data.get_cache_key(self.uuid.to_string()).await.is_ok() { + data.del_cache_key(self.uuid.to_string()).await? + } + + self.email = new_email; + + Ok(()) + } + + pub async fn set_pronouns(&mut self, data: &Data, new_pronouns: String) -> Result<(), Error> { + let mut conn = data.pool.get().await?; + + use users::dsl; + update(users::table) + .filter(dsl::uuid.eq(self.uuid)) + .set((dsl::pronouns.eq(new_pronouns.as_str()),)) + .execute(&mut conn) + .await?; + + if data.get_cache_key(self.uuid.to_string()).await.is_ok() { + data.del_cache_key(self.uuid.to_string()).await? + } + + Ok(()) + } + + pub async fn set_about(&mut self, data: &Data, new_about: String) -> Result<(), Error> { + let mut conn = data.pool.get().await?; + + use users::dsl; + update(users::table) + .filter(dsl::uuid.eq(self.uuid)) + .set((dsl::about.eq(new_about.as_str()),)) + .execute(&mut conn) + .await?; + + if data.get_cache_key(self.uuid.to_string()).await.is_ok() { + data.del_cache_key(self.uuid.to_string()).await? + } + + Ok(()) + } +} diff --git a/src/objects/member.rs b/src/objects/member.rs new file mode 100644 index 0000000..20bc848 --- /dev/null +++ b/src/objects/member.rs @@ -0,0 +1,141 @@ +use diesel::{ + ExpressionMethods, Insertable, QueryDsl, Queryable, Selectable, SelectableHelper, insert_into, +}; +use diesel_async::RunQueryDsl; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::{error::Error, objects::{Permissions, Role}, schema::guild_members, Conn, Data}; + +use super::{User, load_or_empty}; + +#[derive(Serialize, Queryable, Selectable, Insertable)] +#[diesel(table_name = guild_members)] +#[diesel(check_for_backend(diesel::pg::Pg))] +pub struct MemberBuilder { + pub uuid: Uuid, + pub nickname: Option, + pub user_uuid: Uuid, + pub guild_uuid: Uuid, + pub is_owner: bool, +} + +impl MemberBuilder { + pub async fn build(&self, data: &Data) -> Result { + let user = User::fetch_one(data, self.user_uuid).await?; + + Ok(Member { + uuid: self.uuid, + nickname: self.nickname.clone(), + user_uuid: self.user_uuid, + guild_uuid: self.guild_uuid, + is_owner: self.is_owner, + user, + }) + } + + pub async fn check_permission(&self, data: &Data, permission: Permissions) -> Result<(), Error> { + if !self.is_owner { + let roles = Role::fetch_from_member(&data, self.uuid).await?; + let allowed = roles.iter().any(|r| r.permissions & permission as i64 != 0); + if !allowed { + return Err(Error::Forbidden("Not allowed".to_string())) + } + } + + Ok(()) + } +} + +#[derive(Serialize, Deserialize)] +pub struct Member { + pub uuid: Uuid, + pub nickname: Option, + pub user_uuid: Uuid, + pub guild_uuid: Uuid, + pub is_owner: bool, + user: User, +} + +impl Member { + pub async fn count(conn: &mut Conn, guild_uuid: Uuid) -> Result { + use guild_members::dsl; + let count: i64 = dsl::guild_members + .filter(dsl::guild_uuid.eq(guild_uuid)) + .count() + .get_result(conn) + .await?; + + Ok(count) + } + + pub async fn check_membership( + conn: &mut Conn, + user_uuid: Uuid, + guild_uuid: Uuid, + ) -> Result { + use guild_members::dsl; + let member_builder = dsl::guild_members + .filter(dsl::user_uuid.eq(user_uuid)) + .filter(dsl::guild_uuid.eq(guild_uuid)) + .select(MemberBuilder::as_select()) + .get_result(conn) + .await?; + + Ok(member_builder) + } + + pub async fn fetch_one(data: &Data, user_uuid: Uuid, guild_uuid: Uuid) -> Result { + let mut conn = data.pool.get().await?; + + use guild_members::dsl; + let member: MemberBuilder = dsl::guild_members + .filter(dsl::user_uuid.eq(user_uuid)) + .filter(dsl::guild_uuid.eq(guild_uuid)) + .select(MemberBuilder::as_select()) + .get_result(&mut conn) + .await?; + + member.build(data).await + } + + pub async fn fetch_all(data: &Data, guild_uuid: Uuid) -> Result, Error> { + let mut conn = data.pool.get().await?; + + use guild_members::dsl; + let member_builders: Vec = load_or_empty( + dsl::guild_members + .filter(dsl::guild_uuid.eq(guild_uuid)) + .select(MemberBuilder::as_select()) + .load(&mut conn) + .await, + )?; + + let member_futures = member_builders + .iter() + .map(async move |m| m.build(data).await); + + futures::future::try_join_all(member_futures).await + } + + pub async fn new(data: &Data, user_uuid: Uuid, guild_uuid: Uuid) -> Result { + let mut conn = data.pool.get().await?; + + let member_uuid = Uuid::now_v7(); + + let member = MemberBuilder { + uuid: member_uuid, + guild_uuid, + user_uuid, + nickname: None, + is_owner: false, + }; + + insert_into(guild_members::table) + .values(&member) + .execute(&mut conn) + .await?; + + member.build(data).await + } +} diff --git a/src/objects/message.rs b/src/objects/message.rs new file mode 100644 index 0000000..6c1700a --- /dev/null +++ b/src/objects/message.rs @@ -0,0 +1,40 @@ +use diesel::{Insertable, Queryable, Selectable}; +use serde::Serialize; +use uuid::Uuid; + +use crate::{Data, error::Error, schema::messages}; + +use super::User; + +#[derive(Clone, Queryable, Selectable, Insertable)] +#[diesel(table_name = messages)] +#[diesel(check_for_backend(diesel::pg::Pg))] +pub struct MessageBuilder { + pub uuid: Uuid, + pub channel_uuid: Uuid, + pub user_uuid: Uuid, + pub message: String, +} + +impl MessageBuilder { + pub async fn build(&self, data: &Data) -> Result { + let user = User::fetch_one(data, self.user_uuid).await?; + + Ok(Message { + uuid: self.uuid, + channel_uuid: self.channel_uuid, + user_uuid: self.user_uuid, + message: self.message.clone(), + user, + }) + } +} + +#[derive(Clone, Serialize)] +pub struct Message { + uuid: Uuid, + channel_uuid: Uuid, + user_uuid: Uuid, + message: String, + user: User, +} diff --git a/src/objects/mod.rs b/src/objects/mod.rs new file mode 100644 index 0000000..30a0a64 --- /dev/null +++ b/src/objects/mod.rs @@ -0,0 +1,119 @@ +use lettre::{ + AsyncSmtpTransport, AsyncTransport, Message as Email, Tokio1Executor, + message::{Mailbox, MessageBuilder as EmailBuilder}, + transport::smtp::authentication::Credentials, +}; +use log::debug; +use serde::Deserialize; +use uuid::Uuid; + +mod channel; +mod email_token; +mod guild; +mod invite; +mod me; +mod member; +mod message; +mod password_reset_token; +mod role; +mod user; + +pub use channel::Channel; +pub use email_token::EmailToken; +pub use guild::Guild; +pub use invite::Invite; +pub use me::Me; +pub use member::Member; +pub use message::Message; +pub use password_reset_token::PasswordResetToken; +pub use role::Role; +pub use role::Permissions; +pub use user::User; + +use crate::error::Error; + +pub trait HasUuid { + fn uuid(&self) -> &Uuid; +} + +pub trait HasIsAbove { + fn is_above(&self) -> Option<&Uuid>; +} + +fn load_or_empty( + query_result: Result, diesel::result::Error>, +) -> Result, diesel::result::Error> { + match query_result { + Ok(vec) => Ok(vec), + Err(diesel::result::Error::NotFound) => Ok(Vec::new()), + Err(e) => Err(e), + } +} + +#[derive(PartialEq, Eq, Clone)] +pub enum MailTls { + StartTls, + Tls, +} + +impl From for MailTls { + fn from(value: String) -> Self { + match &*value.to_lowercase() { + "starttls" => Self::StartTls, + _ => Self::Tls, + } + } +} + +#[derive(Clone)] +pub struct MailClient { + creds: Credentials, + smtp_server: String, + mbox: Mailbox, + tls: MailTls, +} + +impl MailClient { + pub fn new>( + creds: Credentials, + smtp_server: String, + mbox: String, + tls: T, + ) -> Result { + Ok(Self { + creds, + smtp_server, + mbox: mbox.parse()?, + tls: tls.into(), + }) + } + + pub fn message_builder(&self) -> EmailBuilder { + Email::builder().from(self.mbox.clone()) + } + + pub async fn send_mail(&self, email: Email) -> Result<(), Error> { + let mailer: AsyncSmtpTransport = match self.tls { + MailTls::StartTls => { + AsyncSmtpTransport::::starttls_relay(&self.smtp_server)? + .credentials(self.creds.clone()) + .build() + } + MailTls::Tls => AsyncSmtpTransport::::relay(&self.smtp_server)? + .credentials(self.creds.clone()) + .build(), + }; + + let response = mailer.send(email).await?; + + debug!("mail sending response: {:?}", response); + + Ok(()) + } +} + +#[derive(Deserialize)] +pub struct StartAmountQuery { + pub start: Option, + pub amount: Option, +} diff --git a/src/objects/password_reset_token.rs b/src/objects/password_reset_token.rs new file mode 100644 index 0000000..e14d25a --- /dev/null +++ b/src/objects/password_reset_token.rs @@ -0,0 +1,146 @@ +use argon2::{ + PasswordHasher, + password_hash::{SaltString, rand_core::OsRng}, +}; +use chrono::Utc; +use diesel::{ + ExpressionMethods, QueryDsl, update, +}; +use diesel_async::RunQueryDsl; +use lettre::message::MultiPart; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::{ + error::Error, + schema::users, + utils::{generate_token, global_checks, user_uuid_from_identifier, PASSWORD_REGEX}, + Data +}; + +#[derive(Serialize, Deserialize)] +pub struct PasswordResetToken { + user_uuid: Uuid, + pub token: String, + pub created_at: chrono::DateTime, +} + +impl PasswordResetToken { + pub async fn get(data: &Data, token: String) -> Result { + let user_uuid: Uuid = serde_json::from_str(&data.get_cache_key(format!("{}", token)).await?)?; + let password_reset_token = serde_json::from_str(&data.get_cache_key(format!("{}_password_reset", user_uuid)).await?)?; + + Ok(password_reset_token) + } + + pub async fn get_with_identifier( + data: &Data, + identifier: String, + ) -> Result { + let mut conn = data.pool.get().await?; + + let user_uuid = user_uuid_from_identifier(&mut conn, &identifier).await?; + + let password_reset_token = serde_json::from_str(&data.get_cache_key(format!("{}_password_reset", user_uuid)).await?)?; + + Ok(password_reset_token) + } + + #[allow(clippy::new_ret_no_self)] + pub async fn new(data: &Data, identifier: String) -> Result<(), Error> { + let token = generate_token::<32>()?; + + let mut conn = data.pool.get().await?; + + let user_uuid = user_uuid_from_identifier(&mut conn, &identifier).await?; + + global_checks(data, user_uuid).await?; + + use users::dsl as udsl; + let (username, email_address): (String, String) = udsl::users + .filter(udsl::uuid.eq(user_uuid)) + .select((udsl::username, udsl::email)) + .get_result(&mut conn) + .await?; + + let password_reset_token = PasswordResetToken { + user_uuid, + token: token.clone(), + created_at: Utc::now(), + }; + + data.set_cache_key(format!("{}_password_reset", user_uuid), password_reset_token, 86400).await?; + data.set_cache_key(token.clone(), user_uuid, 86400).await?; + + let mut reset_endpoint = data.config.web.frontend_url.join("reset-password")?; + + reset_endpoint.set_query(Some(&format!("token={}", token))); + + let email = data + .mail_client + .message_builder() + .to(email_address.parse()?) + .subject(format!("{} Password Reset", data.config.instance.name)) + .multipart(MultiPart::alternative_plain_html( + format!("{} Password Reset\n\nHello, {}!\nSomeone requested a password reset for your Gorb account.\nClick the button below within 24 hours to reset your password.\n\n{}\n\nIf you didn't request a password reset, don't worry, your account is safe and you can safely ignore this email.\n\nThanks, The gorb team.", data.config.instance.name, username, reset_endpoint), + format!(r#"

{} Password Reset

Hello, {}!

Someone requested a password reset for your Gorb account.

Click the button below within 24 hours to reset your password.

RESET PASSWORD

If you didn't request a password reset, don't worry, your account is safe and you can safely ignore this email.

"#, data.config.instance.name, username, reset_endpoint) + ))?; + + data.mail_client.send_mail(email).await?; + + Ok(()) + } + + pub async fn set_password(&self, data: &Data, password: String) -> Result<(), Error> { + if !PASSWORD_REGEX.is_match(&password) { + return Err(Error::BadRequest( + "Please provide a valid password".to_string(), + )); + } + + let salt = SaltString::generate(&mut OsRng); + + let hashed_password = data + .argon2 + .hash_password(password.as_bytes(), &salt) + .map_err(|e| Error::PasswordHashError(e.to_string()))?; + + let mut conn = data.pool.get().await?; + + use users::dsl; + update(users::table) + .filter(dsl::uuid.eq(self.user_uuid)) + .set(dsl::password.eq(hashed_password.to_string())) + .execute(&mut conn) + .await?; + + let (username, email_address): (String, String) = dsl::users + .filter(dsl::uuid.eq(self.user_uuid)) + .select((dsl::username, dsl::email)) + .get_result(&mut conn) + .await?; + + let login_page = data.config.web.frontend_url.join("login")?; + + let email = data + .mail_client + .message_builder() + .to(email_address.parse()?) + .subject(format!("Your {} Password has been Reset", data.config.instance.name)) + .multipart(MultiPart::alternative_plain_html( + format!("{} Password Reset Confirmation\n\nHello, {}!\nYour password has been successfully reset for your Gorb account.\nIf you did not initiate this change, please click the link below to reset your password immediately.\n\n{}\n\nThanks, The gorb team.", data.config.instance.name, username, login_page), + format!(r#"

{} Password Reset Confirmation

Hello, {}!

Your password has been successfully reset for your Gorb account.

If you did not initiate this change, please click the button below to reset your password immediately.

RESET PASSWORD
"#, data.config.instance.name, username, login_page) + ))?; + + data.mail_client.send_mail(email).await?; + + self.delete(&data).await + } + + pub async fn delete(&self, data: &Data) -> Result<(), Error> { + data.del_cache_key(format!("{}_password_reset", &self.user_uuid)).await?; + data.del_cache_key(format!("{}", &self.token)).await?; + + Ok(()) + } +} diff --git a/src/objects/role.rs b/src/objects/role.rs new file mode 100644 index 0000000..a78798a --- /dev/null +++ b/src/objects/role.rs @@ -0,0 +1,186 @@ +use diesel::{ + ExpressionMethods, Insertable, QueryDsl, Queryable, Selectable, SelectableHelper, insert_into, + update, +}; +use diesel_async::RunQueryDsl; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::{error::Error, schema::{role_members, roles}, utils::order_by_is_above, Conn, Data}; + +use super::{HasIsAbove, HasUuid, load_or_empty}; + +#[derive(Deserialize, Serialize, Clone, Queryable, Selectable, Insertable)] +#[diesel(table_name = roles)] +#[diesel(check_for_backend(diesel::pg::Pg))] +pub struct Role { + uuid: Uuid, + guild_uuid: Uuid, + name: String, + color: i32, + is_above: Option, + pub permissions: i64, +} + +#[derive(Serialize, Clone, Queryable, Selectable, Insertable)] +#[diesel(table_name = role_members)] +#[diesel(check_for_backend(diesel::pg::Pg))] +pub struct RoleMember { + role_uuid: Uuid, + member_uuid: Uuid, +} + +impl RoleMember { + async fn fetch_role(&self, conn: &mut Conn) -> Result { + use roles::dsl; + let role: Role = dsl::roles + .filter(dsl::uuid.eq(self.role_uuid)) + .select(Role::as_select()) + .get_result(conn) + .await?; + + Ok(role) + } +} + +impl HasUuid for Role { + fn uuid(&self) -> &Uuid { + self.uuid.as_ref() + } +} + +impl HasIsAbove for Role { + fn is_above(&self) -> Option<&Uuid> { + self.is_above.as_ref() + } +} + +impl Role { + pub async fn fetch_all(conn: &mut Conn, guild_uuid: Uuid) -> Result, Error> { + use roles::dsl; + let roles: Vec = load_or_empty( + dsl::roles + .filter(dsl::guild_uuid.eq(guild_uuid)) + .select(Role::as_select()) + .load(conn) + .await, + )?; + + Ok(roles) + } + + pub async fn fetch_from_member(data: &Data, member_uuid: Uuid) -> Result, Error> { + if let Ok(roles) = data.get_cache_key(format!("{}_roles", member_uuid)).await { + return Ok(serde_json::from_str(&roles)?) + } + + let mut conn = data.pool.get().await?; + + use role_members::dsl; + let role_memberships: Vec = load_or_empty( + dsl::role_members + .filter(dsl::member_uuid.eq(member_uuid)) + .select(RoleMember::as_select()) + .load(&mut conn) + .await, + )?; + + let mut roles = vec![]; + + for membership in role_memberships { + roles.push(membership.fetch_role(&mut conn).await?); + } + + data.set_cache_key(format!("{}_roles", member_uuid), roles.clone(), 300).await?; + + Ok(roles) + } + + pub async fn fetch_one(conn: &mut Conn, role_uuid: Uuid) -> Result { + use roles::dsl; + let role: Role = dsl::roles + .filter(dsl::uuid.eq(role_uuid)) + .select(Role::as_select()) + .get_result(conn) + .await?; + + Ok(role) + } + + pub async fn fetch_permissions(&self) -> Vec { + Permissions::fetch_permissions(self.permissions.clone()) + } + + pub async fn new(conn: &mut Conn, guild_uuid: Uuid, name: String) -> Result { + let role_uuid = Uuid::now_v7(); + + let roles = Self::fetch_all(conn, guild_uuid).await?; + + let roles_ordered = order_by_is_above(roles).await?; + + let last_role = roles_ordered.last(); + + let new_role = Role { + uuid: role_uuid, + guild_uuid, + name, + color: 16777215, + is_above: None, + permissions: 0, + }; + + insert_into(roles::table) + .values(new_role.clone()) + .execute(conn) + .await?; + + if let Some(old_last_role) = last_role { + use roles::dsl; + update(roles::table) + .filter(dsl::uuid.eq(old_last_role.uuid)) + .set(dsl::is_above.eq(new_role.uuid)) + .execute(conn) + .await?; + } + + Ok(new_role) + } +} + +#[derive(Clone, Copy, PartialEq, Eq)] +pub enum Permissions { + SendMessage = 1, + CreateChannel = 2, + DeleteChannel = 4, + ManageChannel = 8, + CreateRole = 16, + DeleteRole = 32, + ManageRole = 64, + CreateInvite = 128, + ManageInvite = 256, + ManageServer = 512, + ManageMember = 1024, +} + +impl Permissions { + pub fn fetch_permissions(permissions: i64) -> Vec { + let all_perms = vec![ + Self::SendMessage, + Self::CreateChannel, + Self::DeleteChannel, + Self::ManageChannel, + Self::CreateRole, + Self::DeleteRole, + Self::ManageRole, + Self::CreateInvite, + Self::ManageInvite, + Self::ManageServer, + Self::ManageMember, + ]; + + all_perms + .into_iter() + .filter(|p| permissions & (*p as i64) != 0) + .collect() + } +} diff --git a/src/objects/user.rs b/src/objects/user.rs new file mode 100644 index 0000000..98e5e80 --- /dev/null +++ b/src/objects/user.rs @@ -0,0 +1,60 @@ +use diesel::{ExpressionMethods, QueryDsl, Queryable, Selectable, SelectableHelper}; +use diesel_async::RunQueryDsl; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::{Conn, Data, error::Error, schema::users}; + +use super::load_or_empty; + +#[derive(Deserialize, Serialize, Clone, Queryable, Selectable)] +#[diesel(table_name = users)] +#[diesel(check_for_backend(diesel::pg::Pg))] +pub struct User { + uuid: Uuid, + username: String, + display_name: Option, + avatar: Option, + pronouns: Option, + about: Option, +} + +impl User { + pub async fn fetch_one(data: &Data, user_uuid: Uuid) -> Result { + let mut conn = data.pool.get().await?; + + if let Ok(cache_hit) = data.get_cache_key(user_uuid.to_string()).await { + return Ok(serde_json::from_str(&cache_hit)?); + } + + use users::dsl; + let user: User = dsl::users + .filter(dsl::uuid.eq(user_uuid)) + .select(User::as_select()) + .get_result(&mut conn) + .await?; + + data.set_cache_key(user_uuid.to_string(), user.clone(), 1800) + .await?; + + Ok(user) + } + + pub async fn fetch_amount( + conn: &mut Conn, + offset: i64, + amount: i64, + ) -> Result, Error> { + use users::dsl; + let users: Vec = load_or_empty( + dsl::users + .limit(amount) + .offset(offset) + .select(User::as_select()) + .load(conn) + .await, + )?; + + Ok(users) + } +} diff --git a/src/schema.rs b/src/schema.rs new file mode 100644 index 0000000..c7a350c --- /dev/null +++ b/src/schema.rs @@ -0,0 +1,164 @@ +// @generated automatically by Diesel CLI. + +diesel::table! { + access_tokens (token) { + #[max_length = 32] + token -> Varchar, + #[max_length = 64] + refresh_token -> Varchar, + uuid -> Uuid, + created_at -> Int8, + } +} + +diesel::table! { + channel_permissions (channel_uuid, role_uuid) { + channel_uuid -> Uuid, + role_uuid -> Uuid, + permissions -> Int8, + } +} + +diesel::table! { + channels (uuid) { + uuid -> Uuid, + guild_uuid -> Uuid, + #[max_length = 32] + name -> Varchar, + #[max_length = 500] + description -> Nullable, + is_above -> Nullable, + } +} + +diesel::table! { + guild_members (uuid) { + uuid -> Uuid, + guild_uuid -> Uuid, + user_uuid -> Uuid, + #[max_length = 100] + nickname -> Nullable, + is_owner -> Bool, + } +} + +diesel::table! { + guilds (uuid) { + uuid -> Uuid, + #[max_length = 100] + name -> Varchar, + #[max_length = 300] + description -> Nullable, + #[max_length = 100] + icon -> Nullable, + } +} + +diesel::table! { + instance_permissions (uuid) { + uuid -> Uuid, + administrator -> Bool, + } +} + +diesel::table! { + invites (id) { + #[max_length = 32] + id -> Varchar, + guild_uuid -> Uuid, + user_uuid -> Uuid, + } +} + +diesel::table! { + messages (uuid) { + uuid -> Uuid, + channel_uuid -> Uuid, + user_uuid -> Uuid, + #[max_length = 4000] + message -> Varchar, + } +} + +diesel::table! { + refresh_tokens (token) { + #[max_length = 64] + token -> Varchar, + uuid -> Uuid, + created_at -> Int8, + #[max_length = 16] + device_name -> Varchar, + } +} + +diesel::table! { + role_members (role_uuid, member_uuid) { + role_uuid -> Uuid, + member_uuid -> Uuid, + } +} + +diesel::table! { + roles (uuid, guild_uuid) { + uuid -> Uuid, + guild_uuid -> Uuid, + #[max_length = 50] + name -> Varchar, + color -> Int4, + permissions -> Int8, + is_above -> Nullable, + } +} + +diesel::table! { + users (uuid) { + uuid -> Uuid, + #[max_length = 32] + username -> Varchar, + #[max_length = 64] + display_name -> Nullable, + #[max_length = 512] + password -> Varchar, + #[max_length = 100] + email -> Varchar, + email_verified -> Bool, + is_deleted -> Bool, + deleted_at -> Nullable, + #[max_length = 100] + avatar -> Nullable, + #[max_length = 32] + pronouns -> Nullable, + #[max_length = 200] + about -> Nullable, + } +} + +diesel::joinable!(access_tokens -> refresh_tokens (refresh_token)); +diesel::joinable!(access_tokens -> users (uuid)); +diesel::joinable!(channel_permissions -> channels (channel_uuid)); +diesel::joinable!(channels -> guilds (guild_uuid)); +diesel::joinable!(guild_members -> guilds (guild_uuid)); +diesel::joinable!(guild_members -> users (user_uuid)); +diesel::joinable!(instance_permissions -> users (uuid)); +diesel::joinable!(invites -> guilds (guild_uuid)); +diesel::joinable!(invites -> users (user_uuid)); +diesel::joinable!(messages -> channels (channel_uuid)); +diesel::joinable!(messages -> users (user_uuid)); +diesel::joinable!(refresh_tokens -> users (uuid)); +diesel::joinable!(role_members -> guild_members (member_uuid)); +diesel::joinable!(roles -> guilds (guild_uuid)); + +diesel::allow_tables_to_appear_in_same_query!( + access_tokens, + channel_permissions, + channels, + guild_members, + guilds, + instance_permissions, + invites, + messages, + refresh_tokens, + role_members, + roles, + users, +); diff --git a/src/structs.rs b/src/structs.rs deleted file mode 100644 index c1d503f..0000000 --- a/src/structs.rs +++ /dev/null @@ -1,784 +0,0 @@ -use std::str::FromStr; - -use actix_web::HttpResponse; -use log::error; -use serde::{Deserialize, Serialize}; -use sqlx::{Pool, Postgres, prelude::FromRow}; -use uuid::Uuid; - -use crate::Data; - -#[derive(Serialize, Deserialize, Clone)] -pub struct Channel { - pub uuid: Uuid, - pub guild_uuid: Uuid, - name: String, - description: Option, - pub permissions: Vec, -} - -#[derive(Serialize, Clone, FromRow)] -struct ChannelPermissionBuilder { - role_uuid: String, - permissions: i32, -} - -impl ChannelPermissionBuilder { - fn build(&self) -> ChannelPermission { - ChannelPermission { - role_uuid: Uuid::from_str(&self.role_uuid).unwrap(), - permissions: self.permissions, - } - } -} - -#[derive(Serialize, Deserialize, Clone, FromRow)] -pub struct ChannelPermission { - pub role_uuid: Uuid, - pub permissions: i32, -} - -impl Channel { - pub async fn fetch_all( - pool: &Pool, - guild_uuid: Uuid, - ) -> Result, HttpResponse> { - let row = sqlx::query_as(&format!( - "SELECT CAST(uuid AS VARCHAR), name, description FROM channels WHERE guild_uuid = '{}'", - guild_uuid - )) - .fetch_all(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - - return Err(HttpResponse::InternalServerError().finish()); - } - - let channels: Vec<(String, String, Option)> = row.unwrap(); - - let futures = channels.iter().map(async |t| { - let (uuid, name, description) = t.to_owned(); - - let row = sqlx::query_as(&format!("SELECT CAST(role_uuid AS VARCHAR), permissions FROM channel_permissions WHERE channel_uuid = '{}'", uuid)) - .fetch_all(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - - return Err(HttpResponse::InternalServerError().finish()) - } - - let channel_permission_builders: Vec = row.unwrap(); - - Ok(Self { - uuid: Uuid::from_str(&uuid).unwrap(), - guild_uuid, - name, - description, - permissions: channel_permission_builders.iter().map(|b| b.build()).collect(), - }) - }); - - let channels = futures::future::join_all(futures).await; - - let channels: Result, HttpResponse> = channels.into_iter().collect(); - - channels - } - - pub async fn fetch_one( - pool: &Pool, - guild_uuid: Uuid, - channel_uuid: Uuid, - ) -> Result { - let row = sqlx::query_as(&format!( - "SELECT name, description FROM channels WHERE guild_uuid = '{}' AND uuid = '{}'", - guild_uuid, channel_uuid - )) - .fetch_one(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - - return Err(HttpResponse::InternalServerError().finish()); - } - - let (name, description): (String, Option) = row.unwrap(); - - let row = sqlx::query_as(&format!("SELECT CAST(role_uuid AS VARCHAR), permissions FROM channel_permissions WHERE channel_uuid = '{}'", channel_uuid)) - .fetch_all(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - - return Err(HttpResponse::InternalServerError().finish()); - } - - let channel_permission_builders: Vec = row.unwrap(); - - Ok(Self { - uuid: channel_uuid, - guild_uuid, - name, - description, - permissions: channel_permission_builders - .iter() - .map(|b| b.build()) - .collect(), - }) - } - - pub async fn new( - data: actix_web::web::Data, - guild_uuid: Uuid, - name: String, - description: Option, - ) -> Result { - let channel_uuid = Uuid::now_v7(); - - let row = sqlx::query(&format!("INSERT INTO channels (uuid, guild_uuid, name, description) VALUES ('{}', '{}', $1, $2)", channel_uuid, guild_uuid)) - .bind(&name) - .bind(&description) - .execute(&data.pool) - .await; - - if let Err(error) = row { - error!("{}", error); - return Err(HttpResponse::InternalServerError().finish()); - } - - let channel = Self { - uuid: channel_uuid, - guild_uuid, - name, - description, - permissions: vec![], - }; - - let cache_result = data - .set_cache_key(channel_uuid.to_string(), channel.clone(), 1800) - .await; - - if let Err(error) = cache_result { - error!("{}", error); - return Err(HttpResponse::InternalServerError().finish()); - } - - let cache_deletion_result = data.del_cache_key(format!("{}_channels", guild_uuid)).await; - - if let Err(error) = cache_deletion_result { - error!("{}", error); - return Err(HttpResponse::InternalServerError().finish()); - } - - Ok(channel) - } - - pub async fn delete(self, pool: &Pool) -> Result<(), HttpResponse> { - let result = sqlx::query(&format!( - "DELETE FROM channels WHERE channel_uuid = '{}'", - self.uuid - )) - .execute(pool) - .await; - - if let Err(error) = result { - error!("{}", error); - - return Err(HttpResponse::InternalServerError().finish()); - } - - Ok(()) - } - - pub async fn fetch_messages( - &self, - pool: &Pool, - amount: i64, - offset: i64, - ) -> Result, HttpResponse> { - let row = sqlx::query_as(&format!("SELECT CAST(uuid AS VARCHAR), CAST(user_uuid AS VARCHAR), CAST(channel_uuid AS VARCHAR), message FROM messages WHERE channel_uuid = '{}' ORDER BY uuid DESC LIMIT $1 OFFSET $2", self.uuid)) - .bind(amount) - .bind(offset) - .fetch_all(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - return Err(HttpResponse::InternalServerError().finish()); - } - - let message_builders: Vec = row.unwrap(); - - Ok(message_builders.iter().map(|b| b.build()).collect()) - } - - pub async fn new_message( - &self, - pool: &Pool, - user_uuid: Uuid, - message: String, - ) -> Result { - let message_uuid = Uuid::now_v7(); - - let row = sqlx::query(&format!("INSERT INTO messages (uuid, channel_uuid, user_uuid, message) VALUES ('{}', '{}', '{}', $1)", message_uuid, self.uuid, user_uuid)) - .bind(&message) - .execute(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - return Err(HttpResponse::InternalServerError().finish()); - } - - Ok(Message { - uuid: message_uuid, - channel_uuid: self.uuid, - user_uuid, - message, - }) - } -} - -#[derive(Clone, Copy)] -pub enum Permissions { - SendMessage = 1, - CreateChannel = 2, - DeleteChannel = 4, - ManageChannel = 8, - CreateRole = 16, - DeleteRole = 32, - ManageRole = 64, - CreateInvite = 128, - ManageInvite = 256, - ManageServer = 512, - ManageMember = 1024, -} - -impl Permissions { - pub fn fetch_permissions(permissions: i64) -> Vec { - let all_perms = vec![ - Self::SendMessage, - Self::CreateChannel, - Self::DeleteChannel, - Self::ManageChannel, - Self::CreateRole, - Self::DeleteRole, - Self::ManageRole, - Self::CreateInvite, - Self::ManageInvite, - Self::ManageServer, - Self::ManageMember, - ]; - - all_perms - .into_iter() - .filter(|p| permissions & (*p as i64) != 0) - .collect() - } -} - -#[derive(Serialize)] -pub struct Guild { - pub uuid: Uuid, - name: String, - description: Option, - icon: String, - owner_uuid: Uuid, - pub roles: Vec, - member_count: i64, -} - -impl Guild { - pub async fn fetch_one(pool: &Pool, guild_uuid: Uuid) -> Result { - let row = sqlx::query_as(&format!( - "SELECT CAST(owner_uuid AS VARCHAR), name, description FROM guilds WHERE uuid = '{}'", - guild_uuid - )) - .fetch_one(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - - return Err(HttpResponse::InternalServerError().finish()); - } - - let (owner_uuid_raw, name, description): (String, String, Option) = row.unwrap(); - - let owner_uuid = Uuid::from_str(&owner_uuid_raw).unwrap(); - - let member_count = Member::count(pool, guild_uuid).await?; - - let roles = Role::fetch_all(pool, guild_uuid).await?; - - Ok(Self { - uuid: guild_uuid, - name, - description, - // FIXME: This isnt supposed to be bogus - icon: String::from("bogus"), - owner_uuid, - roles, - member_count, - }) - } - - pub async fn fetch_amount( - pool: &Pool, - start: i32, - amount: i32, - ) -> Result, HttpResponse> { - // Fetch guild data from database - let rows = sqlx::query_as::<_, (String, String, String, Option)>( - "SELECT CAST(uuid AS VARCHAR), CAST(owner_uuid AS VARCHAR), name, description - FROM guilds - ORDER BY name - LIMIT $1 OFFSET $2", - ) - .bind(amount) - .bind(start) - .fetch_all(pool) - .await - .map_err(|error| { - error!("{}", error); - HttpResponse::InternalServerError().finish() - })?; - - // Process each guild concurrently - let guild_futures = rows.into_iter().map(|(guild_uuid_raw, owner_uuid_raw, name, description)| async move { - let uuid = Uuid::from_str(&guild_uuid_raw).map_err(|_| { - HttpResponse::BadRequest().body("Invalid guild UUID format") - })?; - - let owner_uuid = Uuid::from_str(&owner_uuid_raw).map_err(|_| { - HttpResponse::BadRequest().body("Invalid owner UUID format") - })?; - - let (member_count, roles) = tokio::try_join!( - Member::count(pool, uuid), - Role::fetch_all(pool, uuid) - )?; - - Ok::(Self { - uuid, - name, - description, - icon: String::from("bogus"), // FIXME: Replace with actual icon handling - owner_uuid, - roles, - member_count, - }) - }); - - // Execute all futures concurrently and collect results - futures::future::try_join_all(guild_futures).await - } - - pub async fn new( - pool: &Pool, - name: String, - description: Option, - owner_uuid: Uuid, - ) -> Result { - let guild_uuid = Uuid::now_v7(); - - let row = sqlx::query(&format!( - "INSERT INTO guilds (uuid, owner_uuid, name, description) VALUES ('{}', '{}', $1, $2)", - guild_uuid, owner_uuid - )) - .bind(&name) - .bind(&description) - .execute(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - return Err(HttpResponse::InternalServerError().finish()); - } - - let row = sqlx::query(&format!( - "INSERT INTO guild_members (uuid, guild_uuid, user_uuid) VALUES ('{}', '{}', '{}')", - Uuid::now_v7(), - guild_uuid, - owner_uuid - )) - .execute(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - - let row = sqlx::query(&format!("DELETE FROM guilds WHERE uuid = '{}'", guild_uuid)) - .execute(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - } - - return Err(HttpResponse::InternalServerError().finish()); - } - - Ok(Guild { - uuid: guild_uuid, - name, - description, - icon: "bogus".to_string(), - owner_uuid, - roles: vec![], - member_count: 1, - }) - } - - pub async fn get_invites(&self, pool: &Pool) -> Result, HttpResponse> { - let invites = sqlx::query_as(&format!( - "SELECT (id, guild_uuid, user_uuid) FROM invites WHERE guild_uuid = '{}'", - self.uuid - )) - .fetch_all(pool) - .await; - - if let Err(error) = invites { - error!("{}", error); - return Err(HttpResponse::InternalServerError().finish()); - } - - Ok(invites - .unwrap() - .iter() - .map(|b: &InviteBuilder| b.build()) - .collect()) - } - - pub async fn create_invite( - &self, - pool: &Pool, - member: &Member, - custom_id: Option, - ) -> Result { - let invite_id; - - if custom_id.is_none() { - let charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; - - invite_id = random_string::generate(8, charset); - } else { - invite_id = custom_id.unwrap(); - if invite_id.len() > 32 { - return Err(HttpResponse::BadRequest().finish()); - } - } - - let result = sqlx::query(&format!( - "INSERT INTO invites (id, guild_uuid, user_uuid) VALUES ($1, '{}', '{}'", - self.uuid, member.user_uuid - )) - .bind(&invite_id) - .execute(pool) - .await; - - if let Err(error) = result { - error!("{}", error); - return Err(HttpResponse::InternalServerError().finish()); - } - - Ok(Invite { - id: invite_id, - user_uuid: member.user_uuid, - guild_uuid: self.uuid, - }) - } -} - -#[derive(FromRow)] -struct RoleBuilder { - uuid: String, - guild_uuid: String, - name: String, - color: i64, - position: i32, - permissions: i64, -} - -impl RoleBuilder { - fn build(&self) -> Role { - Role { - uuid: Uuid::from_str(&self.uuid).unwrap(), - guild_uuid: Uuid::from_str(&self.guild_uuid).unwrap(), - name: self.name.clone(), - color: self.color, - position: self.position, - permissions: self.permissions, - } - } -} - -#[derive(Serialize, Clone)] -pub struct Role { - uuid: Uuid, - guild_uuid: Uuid, - name: String, - color: i64, - position: i32, - permissions: i64, -} - -impl Role { - pub async fn fetch_all( - pool: &Pool, - guild_uuid: Uuid, - ) -> Result, HttpResponse> { - let role_builders_result = sqlx::query_as(&format!("SELECT (uuid, guild_uuid, name, color, position, permissions) FROM roles WHERE guild_uuid = '{}'", guild_uuid)) - .fetch_all(pool) - .await; - - if let Err(error) = role_builders_result { - error!("{}", error); - - return Err(HttpResponse::InternalServerError().finish()); - } - - let role_builders: Vec = role_builders_result.unwrap(); - - Ok(role_builders.iter().map(|b| b.build()).collect()) - } - - pub async fn fetch_one( - pool: &Pool, - role_uuid: Uuid, - guild_uuid: Uuid, - ) -> Result { - let row = sqlx::query_as(&format!("SELECT (name, color, position, permissions) FROM roles WHERE guild_uuid = '{}' AND uuid = '{}'", guild_uuid, role_uuid)) - .fetch_one(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - - return Err(HttpResponse::InternalServerError().finish()); - } - - let (name, color, position, permissions) = row.unwrap(); - - Ok(Role { - uuid: role_uuid, - guild_uuid, - name, - color, - position, - permissions, - }) - } - - pub async fn new( - pool: &Pool, - guild_uuid: Uuid, - name: String, - ) -> Result { - let role_uuid = Uuid::now_v7(); - - let row = sqlx::query(&format!( - "INSERT INTO channels (uuid, guild_uuid, name, position) VALUES ('{}', '{}', $1, $2)", - role_uuid, guild_uuid - )) - .bind(&name) - .bind(0) - .execute(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - return Err(HttpResponse::InternalServerError().finish()); - } - - let role = Self { - uuid: role_uuid, - guild_uuid, - name, - color: 16777215, - position: 0, - permissions: 0, - }; - - Ok(role) - } -} - -pub struct Member { - pub uuid: Uuid, - pub nickname: Option, - pub user_uuid: Uuid, - pub guild_uuid: Uuid, -} - -impl Member { - async fn count(pool: &Pool, guild_uuid: Uuid) -> Result { - let member_count = sqlx::query_scalar(&format!( - "SELECT COUNT(uuid) FROM guild_members WHERE guild_uuid = '{}'", - guild_uuid - )) - .fetch_one(pool) - .await; - - if let Err(error) = member_count { - error!("{}", error); - - return Err(HttpResponse::InternalServerError().finish()); - } - - Ok(member_count.unwrap()) - } - - pub async fn fetch_one( - pool: &Pool, - user_uuid: Uuid, - guild_uuid: Uuid, - ) -> Result { - let row = sqlx::query_as(&format!("SELECT CAST(uuid AS VARCHAR), nickname FROM guild_members WHERE guild_uuid = '{}' AND user_uuid = '{}'", guild_uuid, user_uuid)) - .fetch_one(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - - return Err(HttpResponse::InternalServerError().finish()); - } - - let (uuid, nickname): (String, Option) = row.unwrap(); - - Ok(Self { - uuid: Uuid::from_str(&uuid).unwrap(), - nickname, - user_uuid, - guild_uuid, - }) - } - - pub async fn new( - pool: &Pool, - user_uuid: Uuid, - guild_uuid: Uuid, - ) -> Result { - let member_uuid = Uuid::now_v7(); - - let row = sqlx::query(&format!( - "INSERT INTO guild_members uuid, guild_uuid, user_uuid VALUES ('{}', '{}', '{}')", - member_uuid, guild_uuid, user_uuid - )) - .execute(pool) - .await; - - if let Err(error) = row { - error!("{}", error); - - return Err(HttpResponse::InternalServerError().finish()); - } - - Ok(Self { - uuid: member_uuid, - nickname: None, - user_uuid, - guild_uuid, - }) - } -} - -#[derive(FromRow)] -struct MessageBuilder { - uuid: String, - channel_uuid: String, - user_uuid: String, - message: String, -} - -impl MessageBuilder { - fn build(&self) -> Message { - Message { - uuid: Uuid::from_str(&self.uuid).unwrap(), - channel_uuid: Uuid::from_str(&self.channel_uuid).unwrap(), - user_uuid: Uuid::from_str(&self.user_uuid).unwrap(), - message: self.message.clone(), - } - } -} - -#[derive(Serialize)] -pub struct Message { - uuid: Uuid, - channel_uuid: Uuid, - user_uuid: Uuid, - message: String, -} - -#[derive(FromRow)] -pub struct InviteBuilder { - id: String, - user_uuid: String, - guild_uuid: String, -} - -impl InviteBuilder { - fn build(&self) -> Invite { - Invite { - id: self.id.clone(), - user_uuid: Uuid::from_str(&self.user_uuid).unwrap(), - guild_uuid: Uuid::from_str(&self.guild_uuid).unwrap(), - } - } -} - -/// Server invite struct -#[derive(Serialize)] -pub struct Invite { - /// case-sensitive alphanumeric string with a fixed length of 8 characters, can be up to 32 characters for custom invites - id: String, - /// User that created the invite - user_uuid: Uuid, - /// UUID of the guild that the invite belongs to - pub guild_uuid: Uuid, -} - -impl Invite { - pub async fn fetch_one(pool: &Pool, invite_id: String) -> Result { - let invite: Result = - sqlx::query_as("SELECT id, user_uuid, guild_uuid FROM invites WHERE id = $1") - .bind(invite_id) - .fetch_one(pool) - .await; - - if let Err(error) = invite { - error!("{}", error); - - return Err(HttpResponse::InternalServerError().finish()); - } - - Ok(invite.unwrap().build()) - } - - pub async fn delete(self, pool: &Pool) -> Result<(), HttpResponse> { - let invite: Result = sqlx::query_as("DELETE FROM invites WHERE id = $1") - .bind(self.id) - .fetch_one(pool) - .await; - - if let Err(error) = invite { - error!("{}", error); - - return Err(HttpResponse::InternalServerError().finish()) - } - - Ok(()) - } -} - -#[derive(Deserialize)] -pub struct StartAmountQuery { - pub start: Option, - pub amount: Option, -} diff --git a/src/utils.rs b/src/utils.rs index 77c5e0a..7a5581a 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,57 +1,217 @@ +use std::sync::LazyLock; + use actix_web::{ - HttpResponse, cookie::{Cookie, SameSite, time::Duration}, http::header::HeaderMap, + web::BytesMut, }; +use bindet::FileType; +use diesel::{ExpressionMethods, QueryDsl}; +use diesel_async::RunQueryDsl; use getrandom::fill; use hex::encode; use redis::RedisError; +use regex::Regex; use serde::Serialize; +use uuid::Uuid; -use crate::Data; +use crate::{ + Conn, Data, + config::Config, + error::Error, + objects::{HasIsAbove, HasUuid}, + schema::users, +}; -pub fn get_auth_header(headers: &HeaderMap) -> Result<&str, HttpResponse> { +pub static EMAIL_REGEX: LazyLock = LazyLock::new(|| { + Regex::new(r"[-A-Za-z0-9!#$%&'*+/=?^_`{|}~]+(?:\.[-A-Za-z0-9!#$%&'*+/=?^_`{|}~]+)*@(?:[A-Za-z0-9](?:[-A-Za-z0-9]*[A-Za-z0-9])?\.)+[A-Za-z0-9](?:[-A-Za-z0-9]*[A-Za-z0-9])?").unwrap() +}); + +pub static USERNAME_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"^[a-z0-9_.-]+$").unwrap()); + +pub static CHANNEL_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"^[a-z0-9_.-]+$").unwrap()); + +// Password is expected to be hashed using SHA3-384 +pub static PASSWORD_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"[0-9a-f]{96}").unwrap()); + +pub fn get_auth_header(headers: &HeaderMap) -> Result<&str, Error> { let auth_token = headers.get(actix_web::http::header::AUTHORIZATION); if auth_token.is_none() { - return Err(HttpResponse::Unauthorized().finish()); + return Err(Error::Unauthorized( + "No authorization header provided".to_string(), + )); } - let auth = auth_token.unwrap().to_str(); + let auth_raw = auth_token.unwrap().to_str()?; - if let Err(error) = auth { - return Err(HttpResponse::Unauthorized().json(format!(r#" {{ "error": "{}" }} "#, error))); + let mut auth = auth_raw.split_whitespace(); + + let auth_type = auth.next(); + + let auth_value = auth.next(); + + if auth_type.is_none() { + return Err(Error::BadRequest( + "Authorization header is empty".to_string(), + )); + } else if auth_type.is_some_and(|at| at != "Bearer") { + return Err(Error::BadRequest( + "Only token auth is supported".to_string(), + )); } - let auth_value = auth.unwrap().split_whitespace().nth(1); - if auth_value.is_none() { - return Err(HttpResponse::BadRequest().finish()); + return Err(Error::BadRequest("No token provided".to_string())); } Ok(auth_value.unwrap()) } -pub fn refresh_token_cookie(refresh_token: String) -> Cookie<'static> { +pub fn get_ws_protocol_header(headers: &HeaderMap) -> Result<&str, Error> { + let auth_token = headers.get(actix_web::http::header::SEC_WEBSOCKET_PROTOCOL); + + if auth_token.is_none() { + return Err(Error::Unauthorized( + "No authorization header provided".to_string(), + )); + } + + let auth_raw = auth_token.unwrap().to_str()?; + + let mut auth = auth_raw.split_whitespace(); + + let response_proto = auth.next(); + + let auth_value = auth.next(); + + if response_proto.is_none() { + return Err(Error::BadRequest( + "Sec-WebSocket-Protocol header is empty".to_string(), + )); + } else if response_proto.is_some_and(|rp| rp != "Authorization,") { + return Err(Error::BadRequest( + "First protocol should be Authorization".to_string(), + )); + } + + if auth_value.is_none() { + return Err(Error::BadRequest("No token provided".to_string())); + } + + Ok(auth_value.unwrap()) +} + +pub fn new_refresh_token_cookie(config: &Config, refresh_token: String) -> Cookie<'static> { Cookie::build("refresh_token", refresh_token) .http_only(true) .secure(true) .same_site(SameSite::None) - .path("/api") + //.domain(config.web.backend_url.domain().unwrap().to_string()) + .path(config.web.backend_url.path().to_string()) .max_age(Duration::days(30)) .finish() } -pub fn generate_access_token() -> Result { - let mut buf = [0u8; 16]; +pub fn generate_token() -> Result { + let mut buf = [0u8; N]; fill(&mut buf)?; Ok(encode(buf)) } -pub fn generate_refresh_token() -> Result { - let mut buf = [0u8; 32]; - fill(&mut buf)?; - Ok(encode(buf)) +pub fn image_check(icon: BytesMut) -> Result { + let buf = std::io::Cursor::new(icon); + + let detect = bindet::detect(buf).map_err(|e| e.kind()); + + if let Ok(Some(file_type)) = detect { + if file_type.likely_to_be == vec![FileType::Jpg] { + return Ok(String::from("jpg")); + } else if file_type.likely_to_be == vec![FileType::Png] { + return Ok(String::from("png")); + } + } + + Err(Error::BadRequest( + "Uploaded file is not an image".to_string(), + )) +} + +pub async fn user_uuid_from_identifier( + conn: &mut Conn, + identifier: &String, +) -> Result { + if EMAIL_REGEX.is_match(identifier) { + use users::dsl; + let user_uuid = dsl::users + .filter(dsl::email.eq(identifier)) + .select(dsl::uuid) + .get_result(conn) + .await?; + + Ok(user_uuid) + } else if USERNAME_REGEX.is_match(identifier) { + use users::dsl; + let user_uuid = dsl::users + .filter(dsl::username.eq(identifier)) + .select(dsl::uuid) + .get_result(conn) + .await?; + + Ok(user_uuid) + } else { + Err(Error::BadRequest( + "Please provide a valid username or email".to_string(), + )) + } +} + +pub async fn global_checks(data: &Data, user_uuid: Uuid) -> Result<(), Error> { + if data.config.instance.require_email_verification { + let mut conn = data.pool.get().await?; + + use users::dsl; + let email_verified: bool = dsl::users + .filter(dsl::uuid.eq(user_uuid)) + .select(dsl::email_verified) + .get_result(&mut conn) + .await?; + + if !email_verified { + return Err(Error::Forbidden( + "server requires email verification".to_string(), + )); + } + } + + Ok(()) +} + +pub async fn order_by_is_above(mut items: Vec) -> Result, Error> +where + T: HasUuid + HasIsAbove, +{ + let mut ordered = Vec::new(); + + // Find head + let head_pos = items + .iter() + .position(|item| !items.iter().any(|i| i.is_above() == Some(item.uuid()))); + + if let Some(pos) = head_pos { + ordered.push(items.swap_remove(pos)); + + while let Some(next_pos) = items + .iter() + .position(|item| Some(item.uuid()) == ordered.last().unwrap().is_above()) + { + ordered.push(items.swap_remove(next_pos)); + } + } + + Ok(ordered) } impl Data { @@ -60,12 +220,12 @@ impl Data { key: String, value: impl Serialize, expire: u32, - ) -> Result<(), RedisError> { + ) -> Result<(), Error> { let mut conn = self.cache_pool.get_multiplexed_tokio_connection().await?; let key_encoded = encode(key); - let value_json = serde_json::to_string(&value).unwrap(); + let value_json = serde_json::to_string(&value)?; redis::cmd("SET") .arg(&[key_encoded.clone(), value_json]) @@ -75,7 +235,9 @@ impl Data { redis::cmd("EXPIRE") .arg(&[key_encoded, expire.to_string()]) .exec_async(&mut conn) - .await + .await?; + + Ok(()) } pub async fn get_cache_key(&self, key: String) -> Result {