Merge branch 'sections-backend' into develop

Jef Roosens 2021-09-23 15:32:59 +02:00
commit 1441e3e601
Signed by untrusted user: Jef Roosens
GPG Key ID: 955C0660072F691F
21 changed files with 1029 additions and 388 deletions

12
Rb.yaml
View File

@ -23,13 +23,21 @@ debug:
url: "postgres://rb:rb@localhost:5432/rb" url: "postgres://rb:rb@localhost:5432/rb"
release: release:
keep_alive: 5
read_timeout: 5
write_timeout: 5
log_level: "normal"
limits:
forms: 32768
admin_user: "admin" admin_user: "admin"
admin_pass: "password" admin_pass: "password"
jwt: jwt:
key: "secret" key: "secret"
refresh_token_size: 64 refresh_token_size: 64
refresh_token_expire: 86400 # Just 5 seconds for debugging
refresh_token_expire: 60
databases: databases:
postgres_rb: postgres_rb:
url: "postgres://rb:rb@db:5432/rb" url: "postgres://rb:rb@localhost:5432/rb"

View File

@ -0,0 +1,7 @@
-- This file should undo anything in `up.sql`
drop trigger insert_enforce_post_titles on posts;
drop trigger update_enforce_post_titles on posts;
drop function enforce_post_titles;
drop table posts cascade;
drop table sections cascade;

View File

@ -0,0 +1,56 @@
-- Your SQL goes here
create table sections (
id uuid DEFAULT gen_random_uuid() PRIMARY KEY,
-- Title of the section
title varchar(255) UNIQUE NOT NULL,
-- Optional description of the section
description text,
-- Wether to show the section in the default list on the homepage
is_default boolean NOT NULL DEFAULT false,
-- Wether the posts should contain titles or not
has_titles boolean NOT NULL DEFAULT true
);
create table posts (
id uuid DEFAULT gen_random_uuid() PRIMARY KEY,
section_id uuid NOT NULL REFERENCES sections(id) ON DELETE CASCADE,
-- Title of the post
-- Wether this is NULL or not is enforced using the enforce_post_titles trigger
title varchar(255),
-- Post date, defaults to today
publish_date date NOT NULL DEFAULT now(),
-- Content of the post
content text NOT NULL
);
create function enforce_post_titles() returns trigger as $enforce_post_titles$
begin
-- Check for a wrongfully null title
if new.title is null and exists (
select 1 from sections where id = new.section_id and has_titles
) then
raise exception 'Expected a post title, but got null.';
end if;
if new.title is not null and exists (
select 1 from sections where id = new.section_id and not has_titles
) then
raise exception 'Expected an empty post title, but got a value.';
end if;
return new;
end;
$enforce_post_titles$ language plpgsql;
create trigger insert_enforce_post_titles
before insert on posts
for each row
execute function enforce_post_titles();
create trigger update_enforce_post_titles
before update of title on posts
for each row
when (old.title is distinct from new.title)
execute function enforce_post_titles();

View File

@ -36,7 +36,7 @@ license_template_path = ""
make_backup = false make_backup = false
match_arm_blocks = true match_arm_blocks = true
match_arm_leading_pipes = "Never" match_arm_leading_pipes = "Never"
match_block_trailing_comma = false match_block_trailing_comma = true
max_width = 100 max_width = 100
merge_derives = true merge_derives = true
newline_style = "Auto" newline_style = "Auto"

View File

@ -1,5 +1,11 @@
//! The db module contains all Diesel-related logic. This is to prevent the various Diesel imports
//! from poluting other modules' namespaces.
pub mod posts;
pub mod sections;
pub mod tokens; pub mod tokens;
pub mod users; pub mod users;
pub use sections::{NewSection, Section};
pub use tokens::{NewRefreshToken, RefreshToken}; pub use tokens::{NewRefreshToken, RefreshToken};
pub use users::{NewUser, User}; pub use users::{NewUser, User};

58
src/db/posts.rs 100644
View File

@ -0,0 +1,58 @@
use chrono::NaiveDate;
use diesel::{insert_into, prelude::*, Insertable, PgConnection, Queryable};
use uuid::Uuid;
use crate::{
errors::{RbError, RbResult},
schema::{posts, posts::dsl::*},
};
#[derive(Queryable)]
pub struct Post
{
pub id: Uuid,
pub section_id: Uuid,
pub title: Option<String>,
pub publish_date: NaiveDate,
pub content: String,
}
#[derive(Insertable)]
#[table_name = "posts"]
pub struct NewPost
{
pub section_id: Uuid,
pub title: Option<String>,
pub publish_date: NaiveDate,
}
/// Returns all posts in the database; should be used with care as this method could quickly return
/// a large amount of data.
///
/// # Arguments
///
/// * `conn` - a reference to a database connection
pub fn all(conn: &PgConnection) -> RbResult<Vec<Post>>
{
posts
.load::<Post>(conn)
.map_err(|_| RbError::DbError("Couldn't get all posts."))
}
/// Insert a new post into the database.
///
/// # Arguments
///
/// * `conn` - reference to a database connection
/// * `new_post` - the new post object to insert
pub fn create(conn: &PgConnection, new_post: &NewPost) -> RbResult<()>
{
insert_into(posts)
.values(new_post)
.execute(conn)
.map_err(|_| RbError::DbError("Couldn't insert post."))?;
// TODO check for conflict?
Ok(())
}

63
src/db/sections.rs 100644
View File

@ -0,0 +1,63 @@
//! Handles all section-related database operations.
use diesel::{insert_into, prelude::*, Insertable, PgConnection, Queryable};
use serde::Deserialize;
use uuid::Uuid;
use crate::{
errors::{RbError, RbResult},
schema::{sections, sections::dsl::*},
};
/// Represents a section contained in the database.
#[derive(Queryable)]
pub struct Section
{
pub id: Uuid,
pub title: String,
pub description: Option<String>,
pub is_default: bool,
pub has_titles: bool,
}
/// A new section to be added into the database.
#[derive(Deserialize, Insertable)]
#[table_name = "sections"]
#[serde(rename_all = "camelCase")]
pub struct NewSection
{
title: String,
description: Option<String>,
is_default: Option<bool>,
has_titles: Option<bool>,
}
/// Returns all sections in the database.
///
/// # Arguments
///
/// * `conn` - reference to a database connection
pub fn all(conn: &PgConnection) -> RbResult<Vec<Section>>
{
sections
.load::<Section>(conn)
.map_err(|_| RbError::DbError("Couldn't get all sections"))
}
/// Inserts a new section into the database.
///
/// # Arguments
///
/// * `conn` - reference to a database connection
/// * `new_section` - the new section to be added
pub fn create(conn: &PgConnection, new_section: &NewSection) -> RbResult<()>
{
insert_into(sections)
.values(new_section)
.execute(conn)
.map_err(|_| RbError::DbError("Couldn't insert section."))?;
// TODO check for conflict?
Ok(())
}

View File

@ -1,3 +1,5 @@
//! Handles refresh token-related database operations.
use diesel::{insert_into, prelude::*, Insertable, PgConnection, Queryable}; use diesel::{insert_into, prelude::*, Insertable, PgConnection, Queryable};
use uuid::Uuid; use uuid::Uuid;
@ -6,6 +8,7 @@ use crate::{
schema::{refresh_tokens, refresh_tokens::dsl::*}, schema::{refresh_tokens, refresh_tokens::dsl::*},
}; };
/// A refresh token as stored in the database
#[derive(Queryable)] #[derive(Queryable)]
pub struct RefreshToken pub struct RefreshToken
{ {
@ -15,6 +18,7 @@ pub struct RefreshToken
pub last_used_at: Option<chrono::NaiveDateTime>, pub last_used_at: Option<chrono::NaiveDateTime>,
} }
/// A new refresh token to be added into the database
#[derive(Insertable)] #[derive(Insertable)]
#[table_name = "refresh_tokens"] #[table_name = "refresh_tokens"]
pub struct NewRefreshToken pub struct NewRefreshToken
@ -24,6 +28,12 @@ pub struct NewRefreshToken
pub expires_at: chrono::NaiveDateTime, pub expires_at: chrono::NaiveDateTime,
} }
// TODO add pagination as this could grow very quickly
/// Returns all refresh tokens contained in the database.
///
/// # Arguments
///
/// * `conn` - database connection to use
pub fn all(conn: &PgConnection) -> RbResult<Vec<RefreshToken>> pub fn all(conn: &PgConnection) -> RbResult<Vec<RefreshToken>>
{ {
refresh_tokens refresh_tokens
@ -31,18 +41,30 @@ pub fn all(conn: &PgConnection) -> RbResult<Vec<RefreshToken>>
.map_err(|_| RbError::DbError("Couldn't get all refresh tokens.")) .map_err(|_| RbError::DbError("Couldn't get all refresh tokens."))
} }
/// Insert a new refresh token into the database.
///
/// # Arguments
///
/// * `conn` - database connection to use
/// * `new_refresh_token` - token to insert
pub fn create(conn: &PgConnection, new_refresh_token: &NewRefreshToken) -> RbResult<()> pub fn create(conn: &PgConnection, new_refresh_token: &NewRefreshToken) -> RbResult<()>
{ {
insert_into(refresh_tokens) insert_into(refresh_tokens)
.values(new_refresh_token) .values(new_refresh_token)
.execute(conn) .execute(conn)
.map_err(|_| RbError::Custom("Couldn't insert refresh token."))?; .map_err(|_| RbError::DbError("Couldn't insert refresh token."))?;
// TODO check for conflict? // TODO check for conflict?
Ok(()) Ok(())
} }
/// Returns the token & user data associated with the given refresh token value.
///
/// # Arguments
///
/// * `conn` - database connection to use
/// * `token_val` - token value to search for
pub fn find_with_user( pub fn find_with_user(
conn: &PgConnection, conn: &PgConnection,
token_val: &[u8], token_val: &[u8],
@ -53,10 +75,20 @@ pub fn find_with_user(
.inner_join(crate::schema::users::dsl::users) .inner_join(crate::schema::users::dsl::users)
.filter(token.eq(token_val)) .filter(token.eq(token_val))
.first::<(RefreshToken, super::users::User)>(conn) .first::<(RefreshToken, super::users::User)>(conn)
.map_err(|_| RbError::Custom("Couldn't get refresh token & user.")) .map_err(|_| RbError::DbError("Couldn't get refresh token & user."))
.ok() .ok()
} }
/// Updates a token's `last_used_at` column value.
///
/// # Arguments
///
/// * `conn` - database connection to use
/// * `token_` - value of the refresh token to update
/// * `last_used_at_` - date value to update column with
///
/// **NOTE**: argument names use trailing underscores as to not conflict with Diesel's imported dsl
/// names.
pub fn update_last_used_at( pub fn update_last_used_at(
conn: &PgConnection, conn: &PgConnection,
token_: &[u8], token_: &[u8],

View File

@ -1,3 +1,5 @@
//! Handles user-related database operations.
use diesel::{prelude::*, AsChangeset, Insertable, Queryable}; use diesel::{prelude::*, AsChangeset, Insertable, Queryable};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use uuid::Uuid; use uuid::Uuid;
@ -7,6 +9,7 @@ use crate::{
schema::{users, users::dsl::*}, schema::{users, users::dsl::*},
}; };
/// A user as stored in the database.
#[derive(Queryable, Serialize)] #[derive(Queryable, Serialize)]
pub struct User pub struct User
{ {
@ -18,6 +21,7 @@ pub struct User
pub admin: bool, pub admin: bool,
} }
/// A new user to add to the database.
#[derive(Insertable, AsChangeset, Deserialize)] #[derive(Insertable, AsChangeset, Deserialize)]
#[table_name = "users"] #[table_name = "users"]
pub struct NewUser pub struct NewUser
@ -27,6 +31,11 @@ pub struct NewUser
pub admin: bool, pub admin: bool,
} }
/// Returns all users in the database.
///
/// # Arguments
///
/// * `conn` - database connection to use
pub fn all(conn: &PgConnection) -> RbResult<Vec<User>> pub fn all(conn: &PgConnection) -> RbResult<Vec<User>>
{ {
users users
@ -34,11 +43,23 @@ pub fn all(conn: &PgConnection) -> RbResult<Vec<User>>
.map_err(|_| RbError::DbError("Couldn't get all users.")) .map_err(|_| RbError::DbError("Couldn't get all users."))
} }
/// Find a user with a given ID.
///
/// # Arguments
///
/// * `conn` - database connection to use
/// * `user_id` - ID to search for
pub fn find(conn: &PgConnection, user_id: Uuid) -> Option<User> pub fn find(conn: &PgConnection, user_id: Uuid) -> Option<User>
{ {
users.find(user_id).first::<User>(conn).ok() users.find(user_id).first::<User>(conn).ok()
} }
/// Find a user with a given username.
///
/// # Arguments
///
/// * `conn` - database connection to use
/// * `username_` - username to search for
pub fn find_by_username(conn: &PgConnection, username_: &str) -> RbResult<User> pub fn find_by_username(conn: &PgConnection, username_: &str) -> RbResult<User>
{ {
Ok(users Ok(users
@ -47,6 +68,12 @@ pub fn find_by_username(conn: &PgConnection, username_: &str) -> RbResult<User>
.map_err(|_| RbError::DbError("Couldn't find users by username."))?) .map_err(|_| RbError::DbError("Couldn't find users by username."))?)
} }
/// Insert a new user into the database
///
/// # Arguments
///
/// * `conn` - database connection to use
/// * `new_user` - user to insert
pub fn create(conn: &PgConnection, new_user: &NewUser) -> RbResult<()> pub fn create(conn: &PgConnection, new_user: &NewUser) -> RbResult<()>
{ {
let count = diesel::insert_into(users) let count = diesel::insert_into(users)
@ -61,6 +88,12 @@ pub fn create(conn: &PgConnection, new_user: &NewUser) -> RbResult<()>
Ok(()) Ok(())
} }
/// Either create a new user or update an existing one on conflict.
///
/// # Arguments
///
/// * `conn` - database connection to use
/// * `new_user` - user to insert/update
pub fn create_or_update(conn: &PgConnection, new_user: &NewUser) -> RbResult<()> pub fn create_or_update(conn: &PgConnection, new_user: &NewUser) -> RbResult<()>
{ {
diesel::insert_into(users) diesel::insert_into(users)
@ -74,6 +107,12 @@ pub fn create_or_update(conn: &PgConnection, new_user: &NewUser) -> RbResult<()>
Ok(()) Ok(())
} }
/// Delete the user with the given ID.
///
/// # Arguments
///
/// `conn` - database connection to use
/// `user_id` - ID of user to delete
pub fn delete(conn: &PgConnection, user_id: Uuid) -> RbResult<()> pub fn delete(conn: &PgConnection, user_id: Uuid) -> RbResult<()>
{ {
diesel::delete(users.filter(id.eq(user_id))) diesel::delete(users.filter(id.eq(user_id)))
@ -83,6 +122,14 @@ pub fn delete(conn: &PgConnection, user_id: Uuid) -> RbResult<()>
Ok(()) Ok(())
} }
/// Block a user given an ID.
/// In practice, this means updating the user's entry so that the `blocked` column is set to
/// `true`.
///
/// # Arguments
///
/// `conn` - database connection to use
/// `user_id` - ID of user to block
pub fn block(conn: &PgConnection, user_id: Uuid) -> RbResult<()> pub fn block(conn: &PgConnection, user_id: Uuid) -> RbResult<()>
{ {
diesel::update(users.filter(id.eq(user_id))) diesel::update(users.filter(id.eq(user_id)))

View File

@ -61,7 +61,7 @@ impl RbError
RbError::AuthInvalidRefreshToken => "This refresh token is not valid.", RbError::AuthInvalidRefreshToken => "This refresh token is not valid.",
RbError::AuthDuplicateRefreshToken => { RbError::AuthDuplicateRefreshToken => {
"This refresh token has already been used. The user has been blocked." "This refresh token has already been used. The user has been blocked."
} },
RbError::AuthMissingHeader => "Missing Authorization header.", RbError::AuthMissingHeader => "Missing Authorization header.",
RbError::UMDuplicateUser => "This user already exists.", RbError::UMDuplicateUser => "This user already exists.",

View File

@ -10,7 +10,7 @@ use sha2::Sha256;
use crate::{auth::jwt::Claims, errors::RbError, RbConfig}; use crate::{auth::jwt::Claims, errors::RbError, RbConfig};
/// Extracts a "Authorization: Bearer" string from the headers. /// Extracts an "Authorization: Bearer" string from the headers.
pub struct Bearer<'a>(&'a str); pub struct Bearer<'a>(&'a str);
#[rocket::async_trait] #[rocket::async_trait]
@ -22,7 +22,7 @@ impl<'r> FromRequest<'r> for Bearer<'r>
{ {
// If the header isn't present, just forward to the next route // If the header isn't present, just forward to the next route
let header = match req.headers().get_one("Authorization") { let header = match req.headers().get_one("Authorization") {
None => return Outcome::Failure((Status::BadRequest, Self::Error::AuthMissingHeader)), None => return Outcome::Forward(()),
Some(val) => val, Some(val) => val,
}; };
@ -31,12 +31,10 @@ impl<'r> FromRequest<'r> for Bearer<'r>
} }
// Extract the jwt token from the header // Extract the jwt token from the header
let auth_string = match header.get(7..) { match header.get(7..) {
Some(s) => s, Some(s) => Outcome::Success(Self(s)),
None => return Outcome::Failure((Status::Unauthorized, Self::Error::AuthUnauthorized)), None => Outcome::Failure((Status::Unauthorized, Self::Error::AuthUnauthorized)),
}; }
Outcome::Success(Self(auth_string))
} }
} }
@ -63,14 +61,14 @@ impl<'r> FromRequest<'r> for Jwt
Status::InternalServerError, Status::InternalServerError,
Self::Error::Custom("Failed to do Hmac thing."), Self::Error::Custom("Failed to do Hmac thing."),
)) ))
} },
}; };
// Verify token using key // Verify token using key
let claims: Claims = match bearer.verify_with_key(&key) { let claims: Claims = match bearer.verify_with_key(&key) {
Ok(claims) => claims, Ok(claims) => claims,
Err(_) => { Err(_) => {
return Outcome::Failure((Status::Unauthorized, Self::Error::AuthUnauthorized)) return Outcome::Failure((Status::Unauthorized, Self::Error::AuthUnauthorized))
} },
}; };
Outcome::Success(Self(claims)) Outcome::Success(Self(claims))

View File

@ -27,6 +27,7 @@ pub mod db;
pub mod errors; pub mod errors;
pub mod guards; pub mod guards;
pub(crate) mod schema; pub(crate) mod schema;
pub mod sections;
#[global_allocator] #[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
@ -110,4 +111,5 @@ fn rocket() -> _
"/api/admin", "/api/admin",
routes![admin::get_users, admin::create_user, admin::get_user_info], routes![admin::get_users, admin::create_user, admin::get_user_info],
) )
.mount("/api/sections", routes![sections::create_section])
} }

View File

@ -1,3 +1,13 @@
table! {
posts (id) {
id -> Uuid,
section_id -> Uuid,
title -> Nullable<Varchar>,
publish_date -> Date,
content -> Text,
}
}
table! { table! {
refresh_tokens (token) { refresh_tokens (token) {
token -> Bytea, token -> Bytea,
@ -7,6 +17,16 @@ table! {
} }
} }
table! {
sections (id) {
id -> Uuid,
title -> Varchar,
description -> Nullable<Text>,
is_default -> Bool,
has_titles -> Bool,
}
}
table! { table! {
users (id) { users (id) {
id -> Uuid, id -> Uuid,
@ -17,6 +37,7 @@ table! {
} }
} }
joinable!(posts -> sections (section_id));
joinable!(refresh_tokens -> users (user_id)); joinable!(refresh_tokens -> users (user_id));
allow_tables_to_appear_in_same_query!(refresh_tokens, users,); allow_tables_to_appear_in_same_query!(posts, refresh_tokens, sections, users,);

24
src/sections.rs 100644
View File

@ -0,0 +1,24 @@
//! This module handles management of site sections (aka blogs).
use rocket::serde::json::Json;
use crate::{db, errors::RbResult, guards::Admin, RbDbConn};
/// Route for creating a new section.
///
/// # Arguments
///
/// * `_admin` - guard ensuring user is admin
/// * `conn` - guard providing a connection to the database
/// * `new_section` - Json-encoded NewSection object
#[post("/", data = "<new_section>")]
pub async fn create_section(
_admin: Admin,
conn: RbDbConn,
new_section: Json<db::NewSection>,
) -> RbResult<()>
{
Ok(conn
.run(move |c| db::sections::create(c, &new_section.into_inner()))
.await?)
}

View File

@ -2,7 +2,7 @@ import requests
class RbClient: class RbClient:
def __init__(self, username, password, base_url = "http://localhost:8000/api"): def __init__(self, username = "admin", password = "password", base_url = "http://localhost:8000/api"):
self.username = username self.username = username
self.password = password self.password = password
self.base_url = base_url self.base_url = base_url
@ -17,6 +17,7 @@ class RbClient:
}) })
if r.status_code != 200: if r.status_code != 200:
print(r.text)
raise Exception("Couldn't login") raise Exception("Couldn't login")
res = r.json() res = r.json()
@ -56,9 +57,15 @@ class RbClient:
def get(self, url, *args, **kwargs): def get(self, url, *args, **kwargs):
return self._request("GET", f"{self.base_url}{url}", *args, **kwargs) return self._request("GET", f"{self.base_url}{url}", *args, **kwargs)
def post(self, url, *args, **kwargs):
return self._request("POST", f"{self.base_url}{url}", *args, **kwargs)
if __name__ == "__main__": if __name__ == "__main__":
client = RbClient("admin", "password") client = RbClient()
print(client.get("/admin/users").json()) # print(client.get("/admin/users").json())
client.post("/sections", json={
"title": "this is a title"
})

View File

@ -7,7 +7,9 @@
"build": "astro build" "build": "astro build"
}, },
"devDependencies": { "devDependencies": {
"@astrojs/renderer-svelte": "^0.1.1",
"astro": "0.19.0-next.2", "astro": "0.19.0-next.2",
"@astrojs/renderer-svelte": "^0.1.1" "miragejs": "^0.1.41",
"typescript": "^4.4.3"
} }
} }

View File

@ -0,0 +1,19 @@
<script lang="ts">
console.log("sup");
let movies = [];
fetch("/api/movies").then(res => res.json()).then(res => {
movies = res;
console.log(movies);
});
</script>
<ul>
{#each movies as { id, name, year }, i}
<li><a target="_blank" href="https://www.youtube.com/watch?v={id}">
{i + 1}: {name} - {year}
</a></li>
{/each}
</ul>

View File

@ -1,4 +1,4 @@
<script> <script lang="ts">
let count = 0; let count = 0;
function add() { function add() {

View File

@ -1,7 +1,11 @@
---
---
<html> <html>
<body> <body>
<h1>huh</h1> <h1>huh</h1>
<p>lol</p> <p>lol</p>
<MirageTest />
<slot /> <slot />
</body> </body>
</html> </html>

View File

@ -1,3 +1,24 @@
---
import { createServer } from "miragejs"
import MirageTest from "../components/MirageTest"
createServer({
routes() {
this.namespace = "api"
this.get("/movies", () => {
return {
movies: [
{ id: 1, name: "Inception", year: 2010 },
{ id: 2, name: "Interstellar", year: 2014 },
{ id: 3, name: "Dunkirk", year: 2017 },
],
}
})
},
})
---
<html> <html>
<body> <body>
@ -7,6 +28,8 @@
<li class="nav-bar-item"><a href="/microblog">Microblog</a></li> <li class="nav-bar-item"><a href="/microblog">Microblog</a></li>
<li class="nav-bar-item"><a href="/devlogs">Devlogs</a></li> <li class="nav-bar-item"><a href="/devlogs">Devlogs</a></li>
</ul> </ul>
<MirageTest client:load />
</body> </body>
</html> </html>

File diff suppressed because it is too large Load Diff