Added most relevant code for blog

pull/3/head
Jef Roosens 2021-11-23 09:32:08 +01:00
parent f98f0e2d4e
commit 07f25219d6
Signed by: Jef Roosens
GPG Key ID: 955C0660072F691F
19 changed files with 2515 additions and 0 deletions

2
.env 100644
View File

@ -0,0 +1,2 @@
# This file is used by diesel to find the development database
DATABASE_URL=postgres://rb:rb@localhost:5433/rb

21
.gitignore vendored 100644
View File

@ -0,0 +1,21 @@
# ---> Rust
# Generated by Cargo
# will have compiled files and executables
debug/
out/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
# Cargo.lock
# These are backup files generated by rustfmt
**/*.rs.bk
# Added by cargo
/target
.vim/
vendor/

1906
Cargo.lock generated 100644

File diff suppressed because it is too large Load Diff

42
Cargo.toml 100644
View File

@ -0,0 +1,42 @@
[package]
name = "rb-blog"
version = "0.1.0"
edition = "2018"
[lib]
name = "rb_blog"
path = "src/lib.rs"
[[bin]]
name = "rb-blog"
path = "src/main.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
# Backend web framework
rocket = { version = "0.5.0-rc.1", features = [ "json", "uuid" ] }
# Used to provide Rocket routes with database connections
rocket_sync_db_pools = { version = "0.1.0-rc.1", default_features = false, features = [ "diesel_postgres_pool" ] }
# Used to (de)serialize JSON
serde = { version = "1.0.127", features = [ "derive" ] }
# ORM
diesel = { version = "1.4.7", features = ["postgres", "uuidv07", "chrono"] }
diesel_migrations = "1.4.0"
# To properly compile libpq statically
openssl = "0.10.36"
# For password hashing & verification
rust-argon2 = "0.8.3"
rand = "0.8.4"
uuid = { version = "0.8.2", features = ["serde"] }
# Authentification
jwt = "0.14.0"
hmac = "*"
sha2 = "*"
# Timestamps for JWT tokens
chrono = { version = "*", features = [ "serde" ] }
# Encoding of refresh tokens
base64 = "0.13.0"
# Reading in configuration files
figment = { version = "*", features = [ "yaml" ] }
mimalloc = { version = "0.1.26", default_features = false }

5
diesel.toml 100644
View File

@ -0,0 +1,5 @@
# For documentation on how to configure this file,
# see diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"

View File

View File

@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View File

@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View File

@ -0,0 +1,7 @@
-- This file should undo anything in `up.sql`
drop trigger insert_enforce_post_titles on posts;
drop trigger update_enforce_post_titles on posts;
drop function enforce_post_titles;
drop table posts cascade;
drop table sections cascade;

View File

@ -0,0 +1,58 @@
-- Your SQL goes here
create table sections (
id uuid DEFAULT gen_random_uuid() PRIMARY KEY,
-- Title of the section
title varchar(255) UNIQUE NOT NULL,
-- Name to use when routing (this just makes for prettier URLs)
shortname varchar(32) UNIQUE NOT NULL,
-- Optional description of the section
description text,
-- Wether to show the section in the default list on the homepage
is_default boolean NOT NULL DEFAULT false,
-- Wether the posts should contain titles or not
has_titles boolean NOT NULL DEFAULT true
);
create table posts (
id uuid DEFAULT gen_random_uuid() PRIMARY KEY,
section_id uuid NOT NULL REFERENCES sections(id) ON DELETE CASCADE,
-- Title of the post
-- Wether this is NULL or not is enforced using the enforce_post_titles trigger
title varchar(255),
-- Post date, defaults to today
publish_date date NOT NULL DEFAULT now(),
-- Content of the post
content text NOT NULL
);
create function enforce_post_titles() returns trigger as $enforce_post_titles$
begin
-- Check for a wrongfully null title
if new.title is null and exists (
select 1 from sections where id = new.section_id and has_titles
) then
raise exception 'Expected a post title, but got null.';
end if;
if new.title is not null and exists (
select 1 from sections where id = new.section_id and not has_titles
) then
raise exception 'Expected an empty post title, but got a value.';
end if;
return new;
end;
$enforce_post_titles$ language plpgsql;
create trigger insert_enforce_post_titles
before insert on posts
for each row
execute function enforce_post_titles();
create trigger update_enforce_post_titles
before update of title on posts
for each row
when (old.title is distinct from new.title)
execute function enforce_post_titles();

69
rustfmt.toml 100644
View File

@ -0,0 +1,69 @@
binop_separator = "Front"
blank_lines_lower_bound = 0
blank_lines_upper_bound = 1
# Trying something new
brace_style = "AlwaysNextLine"
color = "Auto"
combine_control_expr = false
comment_width = 80
condense_wildcard_suffixes = false
control_brace_style = "AlwaysSameLine"
disable_all_formatting = false
edition = "2018"
emit_mode = "Files"
empty_item_single_line = true
enum_discrim_align_threshold = 0
error_on_line_overflow = false
error_on_unformatted = false
fn_args_layout = "Tall"
fn_single_line = false
force_explicit_abi = true
force_multiline_blocks = false
format_code_in_doc_comments = false
format_macro_bodies = true
format_macro_matchers = false
format_strings = false
group_imports = "StdExternalCrate"
hard_tabs = false
hide_parse_errors = false
ignore = []
imports_granularity = "Crate"
imports_indent = "Block"
imports_layout = "Mixed"
indent_style = "Block"
inline_attribute_width = 0
license_template_path = ""
make_backup = false
match_arm_blocks = true
match_arm_leading_pipes = "Never"
match_block_trailing_comma = true
max_width = 100
merge_derives = true
newline_style = "Auto"
normalize_comments = false
normalize_doc_attributes = false
overflow_delimited_expr = false
remove_nested_parens = true
reorder_impl_items = false
reorder_imports = true
reorder_modules = true
report_fixme = "Always"
report_todo = "Always"
required_version = "1.4.37"
skip_children = false
space_after_colon = true
space_before_colon = false
spaces_around_ranges = false
struct_field_align_threshold = 0
struct_lit_single_line = true
tab_spaces = 4
trailing_comma = "Vertical"
trailing_semicolon = true
type_punctuation_density = "Wide"
unstable_features = false
use_field_init_shorthand = false
use_small_heuristics = "Default"
use_try_shorthand = false
version = "One"
where_single_line = false
wrap_comments = false

8
src/db/mod.rs 100644
View File

@ -0,0 +1,8 @@
//! The db module contains all Diesel-related logic. This is to prevent the various Diesel imports
//! from poluting other modules' namespaces.
pub mod posts;
pub mod sections;
pub use posts::{NewPost, PatchPost, Post};
pub use sections::{NewSection, Section};

85
src/db/posts.rs 100644
View File

@ -0,0 +1,85 @@
use chrono::NaiveDate;
use diesel::{insert_into, prelude::*, Insertable, PgConnection, Queryable};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::{
errors::{RbError, RbOption, RbResult},
schema::{posts, posts::dsl::*},
};
#[derive(Queryable, Serialize)]
pub struct Post
{
pub id: Uuid,
pub section_id: Uuid,
pub title: Option<String>,
pub publish_date: NaiveDate,
pub content: String,
}
#[derive(Deserialize, Insertable)]
#[table_name = "posts"]
#[serde(rename_all = "camelCase")]
pub struct NewPost
{
pub section_id: Uuid,
pub title: Option<String>,
pub publish_date: NaiveDate,
pub content: String,
}
#[derive(Deserialize, AsChangeset)]
#[table_name = "posts"]
pub struct PatchPost
{
pub section_id: Option<Uuid>,
pub title: Option<String>,
pub publish_date: Option<NaiveDate>,
pub content: Option<String>,
}
pub fn get(conn: &PgConnection, offset_: u32, limit_: u32) -> RbResult<Vec<Post>>
{
Ok(posts
.offset(offset_.into())
.limit(limit_.into())
.load(conn)
.map_err(|_| RbError::DbError("Couldn't query posts."))?)
}
pub fn find(conn: &PgConnection, id_: &Uuid) -> RbOption<Post>
{
match posts.find(id_).first(conn) {
Ok(val) => Ok(Some(val)),
Err(diesel::NotFound) => Ok(None),
_ => Err(RbError::DbError("Couldn't find post.")),
}
}
pub fn create(conn: &PgConnection, new_post: &NewPost) -> RbResult<Post>
{
Ok(insert_into(posts)
.values(new_post)
.get_result(conn)
.map_err(|_| RbError::DbError("Couldn't insert post."))?)
// TODO check for conflict?
}
pub fn update(conn: &PgConnection, post_id: &Uuid, patch_post: &PatchPost) -> RbResult<Post>
{
Ok(diesel::update(posts.filter(id.eq(post_id)))
.set(patch_post)
.get_result(conn)
.map_err(|_| RbError::DbError("Couldn't update post."))?)
}
pub fn delete(conn: &PgConnection, post_id: &Uuid) -> RbResult<()>
{
diesel::delete(posts.filter(id.eq(post_id)))
.execute(conn)
.map_err(|_| RbError::DbError("Couldn't delete post."))?;
Ok(())
}

79
src/db/sections.rs 100644
View File

@ -0,0 +1,79 @@
use diesel::{insert_into, prelude::*, Insertable, PgConnection, Queryable};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::{
errors::{RbError, RbResult},
schema::{sections, sections::dsl::*},
};
#[derive(Queryable, Serialize)]
pub struct Section
{
pub id: Uuid,
pub title: String,
pub shortname: String,
pub description: Option<String>,
pub is_default: bool,
pub has_titles: bool,
}
#[derive(Deserialize, Insertable)]
#[table_name = "sections"]
#[serde(rename_all = "camelCase")]
pub struct NewSection
{
title: String,
pub shortname: String,
description: Option<String>,
is_default: Option<bool>,
has_titles: Option<bool>,
}
#[derive(Deserialize, AsChangeset)]
#[table_name = "sections"]
#[serde(rename_all = "camelCase")]
pub struct PatchSection
{
title: Option<String>,
shortname: Option<String>,
description: Option<String>,
is_default: Option<bool>,
has_titles: Option<bool>,
}
pub fn get(conn: &PgConnection, offset_: u32, limit_: u32) -> RbResult<Vec<Section>>
{
Ok(sections
.offset(offset_.into())
.limit(limit_.into())
.load(conn)
.map_err(|_| RbError::DbError("Couldn't query sections."))?)
}
pub fn create(conn: &PgConnection, new_post: &NewSection) -> RbResult<Section>
{
Ok(insert_into(sections)
.values(new_post)
.get_result(conn)
.map_err(|_| RbError::DbError("Couldn't insert section."))?)
// TODO check for conflict?
}
pub fn update(conn: &PgConnection, post_id: &Uuid, patch_post: &PatchSection) -> RbResult<Section>
{
Ok(diesel::update(sections.filter(id.eq(post_id)))
.set(patch_post)
.get_result(conn)
.map_err(|_| RbError::DbError("Couldn't update section."))?)
}
pub fn delete(conn: &PgConnection, post_id: &Uuid) -> RbResult<()>
{
diesel::delete(sections.filter(id.eq(post_id)))
.execute(conn)
.map_err(|_| RbError::DbError("Couldn't delete section."))?;
Ok(())
}

0
src/lib.rs 100644
View File

108
src/main.rs 100644
View File

@ -0,0 +1,108 @@
// This needs to be explicitely included before diesel is imported to make sure
// compilation succeeds in the release Docker image.
extern crate openssl;
#[macro_use]
extern crate rocket;
#[macro_use]
extern crate diesel_migrations;
#[macro_use]
extern crate diesel;
use figment::{
providers::{Env, Format, Yaml},
Figment,
};
use rocket::{
fairing::AdHoc,
http::Status,
serde::json::{json, Value},
Build, Request, Rocket,
};
use rocket_sync_db_pools::database;
use serde::{Deserialize, Serialize};
pub mod db;
pub mod errors;
pub mod guards;
pub mod posts;
pub(crate) mod schema;
pub mod sections;
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
#[database("postgres_rb")]
pub struct RbDbConn(diesel::PgConnection);
#[catch(default)]
fn default_catcher(status: Status, _: &Request) -> Value
{
json!({"status": status.code, "message": ""})
}
embed_migrations!();
async fn run_db_migrations(rocket: Rocket<Build>) -> Result<Rocket<Build>, Rocket<Build>>
{
let conn = RbDbConn::get_one(&rocket)
.await
.expect("database connection");
conn.run(|c| match embedded_migrations::run(c) {
Ok(()) => Ok(rocket),
Err(_) => Err(rocket),
})
.await
}
// async fn create_admin_user<'a>(rocket: &'a Rocket<Orbit>)
// {
// let config = rocket.state::<RbConfig>().expect("RbConfig instance");
// let admin_user = config.admin_user.clone();
// let admin_pass = config.admin_pass.clone();
// let conn = RbDbConn::get_one(&rocket)
// .await
// .expect("database connection");
// conn.run(move |c| {
// admin::create_admin_user(c, &admin_user, &admin_pass).expect("failed to create admin user")
// })
// .await;
// }
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct RbJwtConf
{
key: String,
refresh_token_size: usize,
refresh_token_expire: i64,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct RbConfig
{
admin_user: String,
admin_pass: String,
jwt: RbJwtConf,
}
#[launch]
fn rocket() -> _
{
let figment = Figment::from(rocket::config::Config::default())
.merge(Yaml::file("Rb.yaml").nested())
.merge(Env::prefixed("RB_").global());
// This mut is necessary when the "docs" or "web" feature is enabled, as these further modify
// the instance variable
rocket::custom(figment)
.attach(RbDbConn::fairing())
.attach(AdHoc::try_on_ignite(
"Run database migrations",
run_db_migrations,
))
// .attach(AdHoc::try_on_ignite("Create admin user", create_admin_user))
.attach(AdHoc::config::<RbConfig>())
.register("/", catchers![default_catcher])
.mount("/sections", routes![sections::create_section])
.mount("/posts", routes![posts::get, posts::create])
}

58
src/posts.rs 100644
View File

@ -0,0 +1,58 @@
use rocket::serde::json::Json;
use crate::{
db,
errors::{RbOption, RbResult},
guards::Admin,
RbDbConn,
};
#[get("/?<offset>&<limit>")]
pub async fn get(conn: RbDbConn, offset: u32, limit: u32) -> RbResult<Json<Vec<db::Post>>>
{
Ok(Json(
conn.run(move |c| db::posts::get(c, offset, limit)).await?,
))
}
#[post("/", data = "<new_post>")]
pub async fn create(
_admin: Admin,
conn: RbDbConn,
new_post: Json<db::NewPost>,
) -> RbResult<Json<db::Post>>
{
Ok(Json(
conn.run(move |c| db::posts::create(c, &new_post.into_inner()))
.await?,
))
}
#[get("/<id>")]
pub async fn find(conn: RbDbConn, id: uuid::Uuid) -> RbOption<Json<db::Post>>
{
Ok(conn
.run(move |c| db::posts::find(c, &id))
.await?
.and_then(|p| Some(Json(p))))
}
#[patch("/<id>", data = "<patch_post>")]
pub async fn patch(
_admin: Admin,
conn: RbDbConn,
id: uuid::Uuid,
patch_post: Json<db::PatchPost>,
) -> RbResult<Json<db::Post>>
{
Ok(Json(
conn.run(move |c| db::posts::update(c, &id, &patch_post.into_inner()))
.await?,
))
}
#[delete("/<id>")]
pub async fn delete(_admin: Admin, conn: RbDbConn, id: uuid::Uuid) -> RbResult<()>
{
Ok(conn.run(move |c| db::posts::delete(c, &id)).await?)
}

0
src/schema.rs 100644
View File

25
src/sections.rs 100644
View File

@ -0,0 +1,25 @@
//! This module handles management of site sections (aka blogs).
use rocket::serde::json::Json;
use crate::{db, errors::RbResult, guards::Admin, RbDbConn};
/// Route for creating a new section.
///
/// # Arguments
///
/// * `_admin` - guard ensuring user is admin
/// * `conn` - guard providing a connection to the database
/// * `new_section` - Json-encoded NewSection object
#[post("/", data = "<new_section>")]
pub async fn create_section(
_admin: Admin,
conn: RbDbConn,
new_section: Json<db::NewSection>,
) -> RbResult<Json<db::Section>>
{
Ok(Json(
conn.run(move |c| db::sections::create(c, &new_section.into_inner()))
.await?,
))
}