Compare commits

..

1 Commits

57 changed files with 5277 additions and 1826 deletions

View File

@ -1,6 +1,5 @@
*
!.cargo/
!Cargo.lock
!Cargo.toml
!Makefile
@ -10,5 +9,6 @@
!src
!tests
!web
!target/x86_64-unknown-linux-musl/release/rbd
web/node_modules

View File

@ -1,7 +0,0 @@
root = true
[*]
end_of_line = lf
insert_final_newline = false
indent_style = space
indent_size = 4

41
API.md
View File

@ -1,41 +0,0 @@
# API Design
This file describes the API that the software adheres to. All routes are defined under a shared `api` namespace.
`(A)` means the route can only be accessed by an admin user.
## v1
## Authentification
* POST `/auth/login` - generate new JWT & refresh token pair given user credentials
* POST `/auth/refresh` - generate new JWT & refresh token pair given valid refresh token
## Posts
* GET `/posts?<offset>&<limit>` - get list of posts from the default feed given offset & limit
* GET `/posts?<section_id_or_shortname>&<offset>&<limit>` - get list of posts of a specific section
* (A) POST `/posts` - create a new post
* GET `/posts/<id>` - get a specific post
* (A) DELETE `/posts/<id>` - delete a post
* (A) PATCH `/posts/<id>` - patch a post
## Sections
* GET `/sections?<offset>&<limit>` - get list of sections
* GET `/sections/<id_or_shortname>` - get specific section
* (A) POST `/sections` - create a new section
* (A) PATCH `/sections/<id_or_shortname>` - patch a section
* (A) DELETE `/sections/<id_or_shortname>` - delete a section (what happens with posts?)
## Users
* (A) GET `/users?<offset>&<limit>`
* (A) POST `/users`
* (A) GET `/users/<id_or_username>`
* (A) PATCH `/users/<id_or_username>`
* (A) DELETE `/users/<id_or_username>`
## Feeds
WIP

4
Cargo.lock generated
View File

@ -939,6 +939,7 @@ version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ac25eee5a0582f45a67e837e350d784e7003bd29a5f460796772061ca49ffda"
dependencies = [
"pkg-config",
"vcpkg",
]
@ -1109,7 +1110,6 @@ dependencies = [
"tokio-stream",
"tokio-util",
"ubyte",
"uuid",
"version_check",
"yansi",
]
@ -1156,7 +1156,6 @@ dependencies = [
"time 0.2.27",
"tokio",
"uncased",
"uuid",
]
[[package]]
@ -1223,6 +1222,7 @@ dependencies = [
"jwt",
"mimalloc",
"openssl",
"pq-sys",
"rand",
"rocket",
"rocket_sync_db_pools",

View File

@ -8,16 +8,11 @@ edition = "2018"
name = "rbd"
path = "src/main.rs"
[features]
web = []
docs = []
static = ["web", "docs"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
# Backend web framework
rocket = { version = "0.5.0-rc.1", features = [ "json", "uuid" ] }
rocket = { version = "0.5.0-rc.1", features = [ "json" ] }
# Used to provide Rocket routes with database connections
rocket_sync_db_pools = { version = "0.1.0-rc.1", default_features = false, features = [ "diesel_postgres_pool" ] }
# Used to (de)serialize JSON
@ -26,7 +21,7 @@ serde = { version = "1.0.127", features = [ "derive" ] }
diesel = { version = "1.4.7", features = ["postgres", "uuidv07", "chrono"] }
diesel_migrations = "1.4.0"
# To properly compile libpq statically
openssl = "0.10.36"
openssl = "*"
# For password hashing & verification
rust-argon2 = "0.8.3"
rand = "0.8.4"
@ -43,6 +38,11 @@ base64 = "0.13.0"
figment = { version = "*", features = [ "yaml" ] }
mimalloc = { version = "0.1.26", default_features = false }
[dependencies.pq-sys]
version = "*"
default-features = false
features = ["pkg-config"]
[profile.release]
lto = "fat"
panic = "abort"

3
Cross.toml 100644
View File

@ -0,0 +1,3 @@
[target.x86_64-unknown-linux-musl]
image = "rusty-builder:x86_64-unknown-linux"

View File

@ -1,66 +1,4 @@
# Build frontend files
FROM node:16 AS fbuilder
FROM scratch
WORKDIR /usr/src/app
COPY web/ ./
RUN yarn install && \
yarn build
# Build backend & backend docs
FROM rust:1.55-alpine AS builder
ARG DI_VER=1.2.5
# ENV OPENSSL_STATIC=1 \
# PQ_LIB_STATIC=1
RUN apk update && \
apk add --no-cache \
postgresql \
postgresql-dev \
openssl-dev \
build-base
WORKDIR /usr/src/app
# Build backend
COPY .cargo/ ./.cargo
COPY src/ ./src
COPY migrations/ ./migrations
COPY Cargo.toml Cargo.lock ./
RUN cargo build --release && \
cargo doc --no-deps
# Build dumb-init
RUN curl -sSL "https://github.com/Yelp/dumb-init/archive/refs/tags/v$DI_VER.tar.gz" | \
tar -xzf - && \
cd "dumb-init-$DI_VER" && \
make build && \
mv dumb-init ..
FROM alpine:3.14.2
RUN mkdir -p /var/www/html
COPY --from=fbuilder /usr/src/app/dist /var/www/html/site
COPY --from=builder /usr/src/app/out/target/doc /var/www/html/doc
COPY --from=builder /usr/src/app/out/target/release/rbd /usr/bin/rbd
COPY --from=builder /usr/src/app/dumb-init /usr/bin/dumb-init
ENTRYPOINT [ "dumb-init", "--" ]
CMD [ "/usr/bin/rbd" ]
# RUN apt update && \
# apt install -y --no-install-recommends \
# musl-dev \
# musl-tools \
# libpq-dev \
# libssl-dev && \
# rustup target add x86_64-unknown-linux-musl && \
# mkdir "$PREFIX" && \
# echo "$PREFIX/lib" >> /etc/ld-musl-x86_64.path
COPY target/x86_64-unknown-linux-musl/release/rbd /
RUN ["/rbd"]

68
Dockerfile.build 100644
View File

@ -0,0 +1,68 @@
# Cross-compile for a specific target triplet (x86_64 by default)
ARG TARGET
ARG CORES=4
FROM rustembedded/cross:${TARGET}-musl
# Create download directory
RUN mkdir /src
### Environment
# Configure compiler
ENV MAKE="make -j$CORES" \
CC="musl-gcc -fPIE -pie -static" \
PREFIX=/usr/local/x86_64-linux-musl \
RUSTFLAGS="-C relocation-model=static"
# Configure paths
ENV PATH=$PREFIX/bin:$PATH \
C_INCLUDE_PATH=$PREFIX/include \
LD_LIBRARY_PATH=$PREFIX/lib
# Configure pkg-config
ENV PKG_CONFIG_PATH=$PREFIX/lib/pkgconfig \
PKG_CONFIG_ALLOW_CROSS=true \
PKG_CONFIG_ALL_STATIC=true
# Install development libraries
RUN apt-get update && apt-get install -y \
bison \
flex \
musl-dev \
musl-tools
### OpenSSL
ARG SSL_VER
# Download OpenSSL
RUN curl -sSL "https://www.openssl.org/source/openssl-${SSL_VER}.tar.gz" | tar -xzC /src
# Build OpenSSL statically
RUN cd "/src/openssl-${SSL_VER}" \
&& ./Configure \
no-shared \
no-zlib \
-fPIC \
--prefix=$PREFIX \
--openssldir=$PREFIX/ssl \
linux-x86_64 \
&& $MAKE depend \
&& $MAKE \
&& $MAKE install
# Configure OpenSSL crate
ENV OPENSSL_STATIC=true \
OPENSSL_NO_VENDOR=true
### PostgreSQL
ARG PQ_VER
# Download PostgreSQL
RUN curl -sSL "https://ftp.postgresql.org/pub/source/v${PQ_VER}/postgresql-${PQ_VER}.tar.gz" | tar -xzC /src
# Build PostgreSQL statically
RUN cd "/src/postgresql-${PQ_VER}" \
&& CPPFLAGS=-I$PREFIX/include LDFLAGS="-L$PREFIX/lib" \
./configure \
--with-openssl \
--without-readline \
--without-zlib \
--prefix=$PREFIX \
--host=$TARGET \
&& ${MAKE} -C src/interfaces/libpq all-static-lib \
&& ${MAKE} -C src/interfaces/libpq install-lib-pc \
&& ${MAKE} -C src/interfaces/libpq install-lib-static \
&& ${MAKE} -C src/bin/pg_config \
&& ${MAKE} -C src/bin/pg_config install

139
Makefile
View File

@ -6,133 +6,36 @@ SSL_VER ?= 1.1.1k
# Dumb-init version
DI_VER ?= 1.2.5
# Compilation target triplet
# Supported targets: https://github.com/rust-embedded/cross#supported-targets
TARGET = x86_64-unknown-linux
CORES != nproc
# =====AUTO-GENERATED VARIABLES=====
# This is such a lovely oneliner
# NOTE: $(dir PATH) outputs a trailing slash
OUT_DIR ?= $(dir $(abspath $(lastword $(MAKEFILE_LIST))))out
PREFIX := $(OUT_DIR)/prefix
OPENSSL_DIR := $(OUT_DIR)/openssl-$(SSL_VER)
PQ_DIR := $(OUT_DIR)/postgresql-$(PQ_VER)
DI_DIR := $(OUT_DIR)/dumb-init-$(DI_VER)
# Used in various make calls to specify parallel recipes
CORES != nproc
# =====ENVIRONMENT VARIABLES=====
export CC := musl-gcc -fPIC -pie -static
export LD_LIBRARY_PATH := $(PREFIX)
export PKG_CONFIG_PATH := /usr/local/lib/pkgconfig
export PATH := /usr/local/bin:/root/.cargo/bin:$(PATH)
# TODO check for header files (openssl-dev, libpq-dev) both for Arch & Ubuntu
# Create the out dir
$(shell mkdir -p "$(PREFIX)")
# =====BUILDING THE STATIC BINARY=====
.PHONY: all
all: build
all: build-debug
.PHONY: builder
builder:
docker build \
-t rusty-builder:latest - < docker/Dockerfile.builder
--build-arg TARGET=$(TARGET) \
--build-arg CORES=$(CORES) \
--build-arg SSL_VER=$(SSL_VER) \
--build-arg PQ_VER=$(PQ_VER) \
--tag rusty-builder:$(TARGET) \
--file Dockerfile.build \
.
.PHONY: docker
docker: builder
docker run \
--rm \
-v "$$PWD:/usr/src" \
--workdir "/usr/src" \
-it \
rusty-builder:latest \
bash build.sh
.PHONY: build-debug
build-debug: builder
cross build --target "$(TARGET)-musl"
.PHONY: run
run: builder
docker-compose up -d --build && docker-compose logs -f app
# libpq builds openssl as a dependency
.PHONY: build
build: libpq
.PHONY: clean
clean: clean-openssl clean-libpq clean-di
@ echo "Note: this only cleans the C dependencies, not the Cargo cache."
rm -rf "$(PREFIX)"
# This is used inside the Dockerfile
.PHONY: pathfile
pathfile:
echo "$(PREFIX)/lib" >> /etc/ld-musl-x86_64.path
## =====OPENSSL=====
# Download the source code & configure the project
$(OPENSSL_DIR)/Configure:
curl -sSL "https://www.openssl.org/source/openssl-$(SSL_VER).tar.gz" | \
tar -xzC "$(OUT_DIR)"
cd "$(OPENSSL_DIR)" && \
CC="$(CC) -idirafter /usr/include -idirafter /usr/include/x86_64-linux-gnu/" ./Configure \
no-zlib \
no-shared \
--prefix="$(PREFIX)" \
--openssldir="$(PREFIX)/ssl" \
linux-x86_64
# Build OpenSSL
.PHONY: openssl
openssl: $(OPENSSL_DIR)/Configure
cd "$(OPENSSL_DIR)" && env C_INCLUDE_PATH="$(PREFIX)/include" $(MAKE) depend 2> /dev/null
cd "$(OPENSSL_DIR)" && $(MAKE) -j$(CORES)
cd "$(OPENSSL_DIR)" && $(MAKE) install_sw
.PHONY: clean-openssl
clean-openssl:
rm -rf "$(OPENSSL_DIR)"
## =====LIBPQ=====
# Download the source code & configure the project
$(PQ_DIR)/configure:
curl -sSL "https://ftp.postgresql.org/pub/source/v$(PQ_VER)/postgresql-$(PQ_VER).tar.gz" | \
tar -xzC "$(OUT_DIR)"
cd "$(PQ_DIR)" && \
LDFLAGS="-L$(PREFIX)/lib" CFLAGS="-I$(PREFIX)/include" ./configure \
--without-readline \
--with-openssl \
--without-zlib \
--prefix="$(PREFIX)" \
--host=x86_64-unknown-linux-musl
.PHONY: libpq
libpq: openssl $(PQ_DIR)/configure
cd "$(PQ_DIR)/src/interfaces/libpq" && $(MAKE) -j$(CORES) all-static-lib
cd "$(PQ_DIR)/src/interfaces/libpq" && $(MAKE) install install-lib-static
cd "$(PQ_DIR)/src/bin/pg_config" && $(MAKE) -j$(CORES)
cd "$(PQ_DIR)/src/bin/pg_config" && $(MAKE) install
.PHONY: clean-libpq
clean-libpq:
rm -rf "$(PQ_DIR)"
# =====DUMB-INIT=====
# NOTE: this is only used inside the Docker image, but it's here for completeness.
$(DI_DIR)/Makefile:
curl -sSL "https://github.com/Yelp/dumb-init/archive/refs/tags/v$(DI_VER).tar.gz" | \
tar -C "$(OUT_DIR)" -xz
.PHONY: di
di: $(DI_DIR)/Makefile
make -C "$(DI_DIR)" build
.PHONY: clean-di
clean-di:
rm -rf "$(DI_DIR)"
.PHONY: release
build-release: builder
cross build --target "$(TARGET)-musl" --release
# ====UTILITIES FOR DEVELOPMENT=====
## The tests require a database, so we run them like this

View File

@ -1,34 +0,0 @@
# Roadmap
This file describes a general plan for the software, divided into versions.
## v0.1.0
### Summary
* Version 1 of backend API
* Read-only frontend (no login)
### Description
Version 0.1.0 will be the first deployable version. The goal is to replace my
current blog with an instance of v0.1.0. This includes developing a (basic) SDK
(probably in Python) that allows me to interact with my instance, or rather
just post stuff.
## v1.0.0
### Summary
* First stable release
* Base for all other releases
### Description
For me, a 1.0 release indicates that the project is stable and can be actively
and efficiently worked on. I basically just want to iron out any wrinkles from
the 0.1 release, so that I have a solid base to develop all other features on.
This will also allow me to better combine the development of this project with
my studies, as it can be properly planned and managed whenever I have the time.
Any other features won't appear in this file. Rather, they will be managed
using the milestones & issues on my Gitea instance.

12
Rb.yaml
View File

@ -23,21 +23,13 @@ debug:
url: "postgres://rb:rb@localhost:5432/rb"
release:
keep_alive: 5
read_timeout: 5
write_timeout: 5
log_level: "normal"
limits:
forms: 32768
admin_user: "admin"
admin_pass: "password"
jwt:
key: "secret"
refresh_token_size: 64
# Just 5 seconds for debugging
refresh_token_expire: 60
refresh_token_expire: 86400
databases:
postgres_rb:
url: "postgres://rb:rb@localhost:5432/rb"
url: "postgres://rb:rb@db:5432/rb"

7
build.rs 100644
View File

@ -0,0 +1,7 @@
fn main() {
println!("cargo:rustc-link-lib=static=c");
println!("cargo:rustc-link-lib=static=dl");
println!("cargo:rustc-link-lib=static=ssl");
println!("cargo:rustc-link-lib=static=pq");
}

14
build.sh 100644
View File

@ -0,0 +1,14 @@
#!/usr/bin/env bash
set -e
# Install build dependencies
# apt update
# apt install \
# -y --no-install-recommends \
# musl-dev \
# musl-tools \
# libssl-dev \
# libpq-dev
make

View File

@ -2,10 +2,11 @@
version: '3'
services:
app:
build: '.'
db:
image: 'postgres:13-alpine'
restart: 'always'
environment:
- 'POSTGRES_DB=rb'
- 'POSTGRES_USER=rb'
@ -14,6 +15,5 @@ services:
- '5432:5432'
volumes:
- 'db-data:/var/lib/postgresql/data'
volumes:
db-data:

View File

@ -1,28 +0,0 @@
# vim: ft=dockerfile
FROM rust:1.54
ENV PREFIX="/usr/src/out/prefix" \
CC="musl-gcc -fPIC -pie -static" \
LD_LIBRARY_PATH="$PREFIX" \
PKG_CONFIG_PATH="/usr/local/lib/pkgconfig" \
PATH="/usr/local/bin:/root/.cargo/bin:$PATH"
WORKDIR /usr/src/app
RUN groupadd -g 1000 builder && \
useradd -u 1000 -g 1000 builder && \
mkdir -p "$PREFIX" && \
chown -R builder:builder /usr/src/app && \
apt update && \
apt install -y --no-install-recommends \
musl-dev \
musl-tools \
libpq-dev \
libssl-dev && \
rustup target add x86_64-unknown-linux-musl && \
echo "$PREFIX/lib" >> /etc/ld-musl-x86_64.path
USER builder
CMD ["cargo", "test"]

View File

@ -1,7 +0,0 @@
-- This file should undo anything in `up.sql`
drop trigger insert_enforce_post_titles on posts;
drop trigger update_enforce_post_titles on posts;
drop function enforce_post_titles;
drop table posts cascade;
drop table sections cascade;

View File

@ -1,58 +0,0 @@
-- Your SQL goes here
create table sections (
id uuid DEFAULT gen_random_uuid() PRIMARY KEY,
-- Title of the section
title varchar(255) UNIQUE NOT NULL,
-- Name to use when routing (this just makes for prettier URLs)
shortname varchar(32) UNIQUE NOT NULL,
-- Optional description of the section
description text,
-- Wether to show the section in the default list on the homepage
is_default boolean NOT NULL DEFAULT false,
-- Wether the posts should contain titles or not
has_titles boolean NOT NULL DEFAULT true
);
create table posts (
id uuid DEFAULT gen_random_uuid() PRIMARY KEY,
section_id uuid NOT NULL REFERENCES sections(id) ON DELETE CASCADE,
-- Title of the post
-- Wether this is NULL or not is enforced using the enforce_post_titles trigger
title varchar(255),
-- Post date, defaults to today
publish_date date NOT NULL DEFAULT now(),
-- Content of the post
content text NOT NULL
);
create function enforce_post_titles() returns trigger as $enforce_post_titles$
begin
-- Check for a wrongfully null title
if new.title is null and exists (
select 1 from sections where id = new.section_id and has_titles
) then
raise exception 'Expected a post title, but got null.';
end if;
if new.title is not null and exists (
select 1 from sections where id = new.section_id and not has_titles
) then
raise exception 'Expected an empty post title, but got a value.';
end if;
return new;
end;
$enforce_post_titles$ language plpgsql;
create trigger insert_enforce_post_titles
before insert on posts
for each row
execute function enforce_post_titles();
create trigger update_enforce_post_titles
before update of title on posts
for each row
when (old.title is distinct from new.title)
execute function enforce_post_titles();

View File

@ -36,7 +36,7 @@ license_template_path = ""
make_backup = false
match_arm_blocks = true
match_arm_leading_pipes = "Never"
match_block_trailing_comma = true
match_block_trailing_comma = false
max_width = 100
merge_derives = true
newline_style = "Auto"

View File

@ -10,11 +10,11 @@ use crate::{
RbDbConn,
};
// #[get("/users")]
// pub async fn get_users(_admin: Admin, conn: RbDbConn) -> RbResult<Json<Vec<db::User>>>
// {
// Ok(Json(conn.run(|c| db::users::all(c)).await?))
// }
#[get("/users")]
pub async fn get_users(_admin: Admin, conn: RbDbConn) -> RbResult<Json<Vec<db::User>>>
{
Ok(Json(conn.run(|c| db::users::all(c)).await?))
}
#[post("/users", data = "<user>")]
pub async fn create_user(_admin: Admin, conn: RbDbConn, user: Json<db::NewUser>) -> RbResult<()>
@ -48,11 +48,8 @@ pub fn create_admin_user(conn: &PgConnection, username: &str, password: &str) ->
admin: true,
};
if db::users::find_by_username(conn, username).is_ok() {
db::users::create(conn, &new_user);
}
// db::users::create_or_update(conn, &new_user)
// .map_err(|_| RbError::Custom("Couldn't create admin."))?;
db::users::create_or_update(conn, &new_user)
.map_err(|_| RbError::Custom("Couldn't create admin."))?;
Ok(true)
}

View File

@ -1,12 +1,5 @@
//! The db module contains all Diesel-related logic. This is to prevent the various Diesel imports
//! from poluting other modules' namespaces.
pub mod posts;
pub mod sections;
pub mod tokens;
pub mod users;
pub use posts::{NewPost, PatchPost, Post};
pub use sections::{NewSection, Section};
pub use tokens::{NewRefreshToken, RefreshToken};
pub use users::{NewUser, User};

View File

@ -1,85 +0,0 @@
use chrono::NaiveDate;
use diesel::{insert_into, prelude::*, Insertable, PgConnection, Queryable};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::{
errors::{RbError, RbOption, RbResult},
schema::{posts, posts::dsl::*},
};
#[derive(Queryable, Serialize)]
pub struct Post
{
pub id: Uuid,
pub section_id: Uuid,
pub title: Option<String>,
pub publish_date: NaiveDate,
pub content: String,
}
#[derive(Deserialize, Insertable)]
#[table_name = "posts"]
#[serde(rename_all = "camelCase")]
pub struct NewPost
{
pub section_id: Uuid,
pub title: Option<String>,
pub publish_date: NaiveDate,
pub content: String,
}
#[derive(Deserialize, AsChangeset)]
#[table_name = "posts"]
pub struct PatchPost
{
pub section_id: Option<Uuid>,
pub title: Option<String>,
pub publish_date: Option<NaiveDate>,
pub content: Option<String>,
}
pub fn get(conn: &PgConnection, offset_: u32, limit_: u32) -> RbResult<Vec<Post>>
{
Ok(posts
.offset(offset_.into())
.limit(limit_.into())
.load(conn)
.map_err(|_| RbError::DbError("Couldn't query posts."))?)
}
pub fn find(conn: &PgConnection, id_: &Uuid) -> RbOption<Post>
{
match posts.find(id_).first(conn) {
Ok(val) => Ok(Some(val)),
Err(diesel::NotFound) => Ok(None),
_ => Err(RbError::DbError("Couldn't find post.")),
}
}
pub fn create(conn: &PgConnection, new_post: &NewPost) -> RbResult<Post>
{
Ok(insert_into(posts)
.values(new_post)
.get_result(conn)
.map_err(|_| RbError::DbError("Couldn't insert post."))?)
// TODO check for conflict?
}
pub fn update(conn: &PgConnection, post_id: &Uuid, patch_post: &PatchPost) -> RbResult<Post>
{
Ok(diesel::update(posts.filter(id.eq(post_id)))
.set(patch_post)
.get_result(conn)
.map_err(|_| RbError::DbError("Couldn't update post."))?)
}
pub fn delete(conn: &PgConnection, post_id: &Uuid) -> RbResult<()>
{
diesel::delete(posts.filter(id.eq(post_id)))
.execute(conn)
.map_err(|_| RbError::DbError("Couldn't delete post."))?;
Ok(())
}

View File

@ -1,79 +0,0 @@
use diesel::{insert_into, prelude::*, Insertable, PgConnection, Queryable};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::{
errors::{RbError, RbResult},
schema::{sections, sections::dsl::*},
};
#[derive(Queryable, Serialize)]
pub struct Section
{
pub id: Uuid,
pub title: String,
pub shortname: String,
pub description: Option<String>,
pub is_default: bool,
pub has_titles: bool,
}
#[derive(Deserialize, Insertable)]
#[table_name = "sections"]
#[serde(rename_all = "camelCase")]
pub struct NewSection
{
title: String,
pub shortname: String,
description: Option<String>,
is_default: Option<bool>,
has_titles: Option<bool>,
}
#[derive(Deserialize, AsChangeset)]
#[table_name = "sections"]
#[serde(rename_all = "camelCase")]
pub struct PatchSection
{
title: Option<String>,
shortname: Option<String>,
description: Option<String>,
is_default: Option<bool>,
has_titles: Option<bool>,
}
pub fn get(conn: &PgConnection, offset_: u32, limit_: u32) -> RbResult<Vec<Section>>
{
Ok(sections
.offset(offset_.into())
.limit(limit_.into())
.load(conn)
.map_err(|_| RbError::DbError("Couldn't query sections."))?)
}
pub fn create(conn: &PgConnection, new_post: &NewSection) -> RbResult<Section>
{
Ok(insert_into(sections)
.values(new_post)
.get_result(conn)
.map_err(|_| RbError::DbError("Couldn't insert section."))?)
// TODO check for conflict?
}
pub fn update(conn: &PgConnection, post_id: &Uuid, patch_post: &PatchSection) -> RbResult<Section>
{
Ok(diesel::update(sections.filter(id.eq(post_id)))
.set(patch_post)
.get_result(conn)
.map_err(|_| RbError::DbError("Couldn't update section."))?)
}
pub fn delete(conn: &PgConnection, post_id: &Uuid) -> RbResult<()>
{
diesel::delete(sections.filter(id.eq(post_id)))
.execute(conn)
.map_err(|_| RbError::DbError("Couldn't delete section."))?;
Ok(())
}

View File

@ -1,7 +1,4 @@
//! Handles refresh token-related database operations.
use diesel::{insert_into, prelude::*, Insertable, PgConnection, Queryable};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::{
@ -9,8 +6,7 @@ use crate::{
schema::{refresh_tokens, refresh_tokens::dsl::*},
};
/// A refresh token as stored in the database
#[derive(Queryable, Serialize)]
#[derive(Queryable)]
pub struct RefreshToken
{
pub token: Vec<u8>,
@ -19,8 +15,7 @@ pub struct RefreshToken
pub last_used_at: Option<chrono::NaiveDateTime>,
}
/// A new refresh token to be added into the database
#[derive(Deserialize, Insertable)]
#[derive(Insertable)]
#[table_name = "refresh_tokens"]
pub struct NewRefreshToken
{
@ -29,84 +24,39 @@ pub struct NewRefreshToken
pub expires_at: chrono::NaiveDateTime,
}
#[derive(Deserialize, AsChangeset)]
#[table_name = "refresh_tokens"]
pub struct PatchRefreshToken
pub fn all(conn: &PgConnection) -> RbResult<Vec<RefreshToken>>
{
pub expires_at: Option<chrono::NaiveDateTime>,
pub last_used_at: Option<chrono::NaiveDateTime>,
refresh_tokens
.load::<RefreshToken>(conn)
.map_err(|_| RbError::DbError("Couldn't get all refresh tokens."))
}
pub fn get(conn: &PgConnection, offset_: u32, limit_: u32) -> RbResult<Vec<RefreshToken>>
pub fn create(conn: &PgConnection, new_refresh_token: &NewRefreshToken) -> RbResult<()>
{
Ok(refresh_tokens
.offset(offset_.into())
.limit(limit_.into())
.load(conn)
.map_err(|_| RbError::DbError("Couldn't query tokens."))?)
}
pub fn create(conn: &PgConnection, new_token: &NewRefreshToken) -> RbResult<RefreshToken>
{
Ok(insert_into(refresh_tokens)
.values(new_token)
.get_result(conn)
.map_err(|_| RbError::DbError("Couldn't insert refresh token."))?)
insert_into(refresh_tokens)
.values(new_refresh_token)
.execute(conn)
.map_err(|_| RbError::Custom("Couldn't insert refresh token."))?;
// TODO check for conflict?
}
pub fn update(
conn: &PgConnection,
token_: &[u8],
patch_token: &PatchRefreshToken,
) -> RbResult<RefreshToken>
{
Ok(diesel::update(refresh_tokens.filter(token.eq(token_)))
.set(patch_token)
.get_result(conn)
.map_err(|_| RbError::DbError("Couldn't update token."))?)
}
pub fn delete(conn: &PgConnection, token_: &[u8]) -> RbResult<()>
{
diesel::delete(refresh_tokens.filter(token.eq(token_)))
.execute(conn)
.map_err(|_| RbError::DbError("Couldn't delete token."))?;
Ok(())
}
/// Returns the token & user data associated with the given refresh token value.
///
/// # Arguments
///
/// * `conn` - database connection to use
/// * `token_val` - token value to search for
pub fn find_with_user(
conn: &PgConnection,
token_: &[u8],
token_val: &[u8],
) -> Option<(RefreshToken, super::users::User)>
{
// TODO actually check for errors here
refresh_tokens
.inner_join(crate::schema::users::dsl::users)
.filter(token.eq(token_))
.filter(token.eq(token_val))
.first::<(RefreshToken, super::users::User)>(conn)
.map_err(|_| RbError::DbError("Couldn't get refresh token & user."))
.map_err(|_| RbError::Custom("Couldn't get refresh token & user."))
.ok()
}
/// Updates a token's `last_used_at` column value.
///
/// # Arguments
///
/// * `conn` - database connection to use
/// * `token_` - value of the refresh token to update
/// * `last_used_at_` - date value to update column with
///
/// **NOTE**: argument names use trailing underscores as to not conflict with Diesel's imported dsl
/// names.
pub fn update_last_used_at(
conn: &PgConnection,
token_: &[u8],

View File

@ -18,7 +18,7 @@ pub struct User
pub admin: bool,
}
#[derive(Insertable, Deserialize)]
#[derive(Insertable, AsChangeset, Deserialize)]
#[table_name = "users"]
pub struct NewUser
{
@ -27,22 +27,11 @@ pub struct NewUser
pub admin: bool,
}
#[derive(Deserialize, AsChangeset)]
#[table_name = "users"]
#[serde(rename_all = "camelCase")]
pub struct PatchSection
pub fn all(conn: &PgConnection) -> RbResult<Vec<User>>
{
username: Option<String>,
admin: Option<bool>,
}
pub fn get(conn: &PgConnection, offset_: u32, limit_: u32) -> RbResult<Vec<User>>
{
Ok(users
.offset(offset_.into())
.limit(limit_.into())
.load(conn)
.map_err(|_| RbError::DbError("Couldn't query users."))?)
users
.load::<User>(conn)
.map_err(|_| RbError::DbError("Couldn't get all users."))
}
pub fn find(conn: &PgConnection, user_id: Uuid) -> Option<User>
@ -58,12 +47,6 @@ pub fn find_by_username(conn: &PgConnection, username_: &str) -> RbResult<User>
.map_err(|_| RbError::DbError("Couldn't find users by username."))?)
}
/// Insert a new user into the database
///
/// # Arguments
///
/// * `conn` - database connection to use
/// * `new_user` - user to insert
pub fn create(conn: &PgConnection, new_user: &NewUser) -> RbResult<()>
{
let count = diesel::insert_into(users)
@ -78,31 +61,19 @@ pub fn create(conn: &PgConnection, new_user: &NewUser) -> RbResult<()>
Ok(())
}
/// Either create a new user or update an existing one on conflict.
///
/// # Arguments
///
/// * `conn` - database connection to use
/// * `new_user` - user to insert/update
// pub fn create_or_update(conn: &PgConnection, new_user: &NewUser) -> RbResult<()>
// {
// diesel::insert_into(users)
// .values(new_user)
// .on_conflict(username)
// .do_update()
// .set(new_user)
// .execute(conn)
// .map_err(|_| RbError::DbError("Couldn't create or update user."))?;
pub fn create_or_update(conn: &PgConnection, new_user: &NewUser) -> RbResult<()>
{
diesel::insert_into(users)
.values(new_user)
.on_conflict(username)
.do_update()
.set(new_user)
.execute(conn)
.map_err(|_| RbError::DbError("Couldn't create or update user."))?;
// Ok(())
// }
Ok(())
}
/// Delete the user with the given ID.
///
/// # Arguments
///
/// `conn` - database connection to use
/// `user_id` - ID of user to delete
pub fn delete(conn: &PgConnection, user_id: Uuid) -> RbResult<()>
{
diesel::delete(users.filter(id.eq(user_id)))
@ -112,14 +83,6 @@ pub fn delete(conn: &PgConnection, user_id: Uuid) -> RbResult<()>
Ok(())
}
/// Block a user given an ID.
/// In practice, this means updating the user's entry so that the `blocked` column is set to
/// `true`.
///
/// # Arguments
///
/// `conn` - database connection to use
/// `user_id` - ID of user to block
pub fn block(conn: &PgConnection, user_id: Uuid) -> RbResult<()>
{
diesel::update(users.filter(id.eq(user_id)))

View File

@ -61,7 +61,7 @@ impl RbError
RbError::AuthInvalidRefreshToken => "This refresh token is not valid.",
RbError::AuthDuplicateRefreshToken => {
"This refresh token has already been used. The user has been blocked."
},
}
RbError::AuthMissingHeader => "Missing Authorization header.",
RbError::UMDuplicateUser => "This user already exists.",
@ -87,8 +87,4 @@ impl<'r> Responder<'r, 'static> for RbError
}
}
/// Type alias for results that can return an RbError
pub type RbResult<T> = std::result::Result<T, RbError>;
/// Type alias for optional results that can fail & return an RbError
pub type RbOption<T> = RbResult<Option<T>>;

View File

@ -10,7 +10,7 @@ use sha2::Sha256;
use crate::{auth::jwt::Claims, errors::RbError, RbConfig};
/// Extracts an "Authorization: Bearer" string from the headers.
/// Extracts a "Authorization: Bearer" string from the headers.
pub struct Bearer<'a>(&'a str);
#[rocket::async_trait]
@ -22,18 +22,21 @@ impl<'r> FromRequest<'r> for Bearer<'r>
{
// If the header isn't present, just forward to the next route
let header = match req.headers().get_one("Authorization") {
None => return Outcome::Forward(()),
None => return Outcome::Failure((Status::BadRequest, Self::Error::AuthMissingHeader)),
Some(val) => val,
};
if header.starts_with("Bearer ") {
match header.get(7..) {
Some(s) => Outcome::Success(Self(s)),
None => Outcome::Failure((Status::Unauthorized, Self::Error::AuthUnauthorized)),
}
} else {
Outcome::Forward(())
if !header.starts_with("Bearer ") {
return Outcome::Forward(());
}
// Extract the jwt token from the header
let auth_string = match header.get(7..) {
Some(s) => s,
None => return Outcome::Failure((Status::Unauthorized, Self::Error::AuthUnauthorized)),
};
Outcome::Success(Self(auth_string))
}
}
@ -60,16 +63,17 @@ impl<'r> FromRequest<'r> for Jwt
Status::InternalServerError,
Self::Error::Custom("Failed to do Hmac thing."),
))
},
}
};
// Verify token using key
match bearer.verify_with_key(&key) {
Ok(claims) => Outcome::Success(Self(claims)),
let claims: Claims = match bearer.verify_with_key(&key) {
Ok(claims) => claims,
Err(_) => {
return Outcome::Failure((Status::Unauthorized, Self::Error::AuthUnauthorized))
},
}
}
};
Outcome::Success(Self(claims))
}
}
@ -87,10 +91,10 @@ impl<'r> FromRequest<'r> for User
// Verify key hasn't yet expired
if chrono::Utc::now().timestamp() > claims.exp {
Outcome::Failure((Status::Forbidden, Self::Error::AuthTokenExpired))
} else {
Outcome::Success(Self(claims))
return Outcome::Failure((Status::Forbidden, Self::Error::AuthTokenExpired));
}
Outcome::Success(Self(claims))
}
}

View File

@ -2,23 +2,21 @@
// compilation succeeds in the release Docker image.
extern crate openssl;
#[macro_use]
extern crate rocket;
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
#[macro_use]
extern crate diesel;
extern crate rocket;
use figment::{
providers::{Env, Format, Yaml},
Figment,
};
#[cfg(any(feature = "web", feature = "docs"))]
use rocket::fs;
use rocket::{
fairing::AdHoc,
http::Status,
serde::json::{json, Value},
Build, Orbit, Request, Rocket,
Build, Request, Rocket, Orbit,
};
use rocket_sync_db_pools::database;
use serde::{Deserialize, Serialize};
@ -28,9 +26,7 @@ pub mod auth;
pub mod db;
pub mod errors;
pub mod guards;
pub mod posts;
pub(crate) mod schema;
pub mod sections;
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
@ -68,7 +64,8 @@ async fn create_admin_user<'a>(rocket: &'a Rocket<Orbit>)
.await
.expect("database connection");
conn.run(move |c| {
admin::create_admin_user(c, &admin_user, &admin_pass).expect("failed to create admin user")
admin::create_admin_user(c, &admin_user, &admin_pass)
.expect("failed to create admin user")
})
.await;
}
@ -96,10 +93,7 @@ fn rocket() -> _
.merge(Yaml::file("Rb.yaml").nested())
.merge(Env::prefixed("RB_").global());
// This mut is necessary when the "docs" or "web" feature is enabled, as these further modify
// the instance variable
#[allow(unused_mut)]
let mut instance = rocket::custom(figment)
rocket::custom(figment)
.attach(RbDbConn::fairing())
.attach(AdHoc::try_on_ignite(
"Run database migrations",
@ -114,33 +108,6 @@ fn rocket() -> _
)
.mount(
"/api/admin",
routes![admin::create_user, admin::get_user_info],
routes![admin::get_users, admin::create_user, admin::get_user_info],
)
.mount("/api/sections", routes![sections::create_section])
.mount("/api/posts", routes![posts::get, posts::create]);
// It's weird that this is allowed, but the line on its own isn't
#[cfg(feature = "web")]
{
instance = instance.mount(
"/",
fs::FileServer::new(
"/var/www/html/web",
fs::Options::Index | fs::Options::NormalizeDirs,
),
);
}
#[cfg(feature = "docs")]
{
instance = instance.mount(
"/docs",
fs::FileServer::new(
"/var/www/html/docs",
fs::Options::Index | fs::Options::NormalizeDirs,
),
);
}
instance
}

View File

@ -1,58 +0,0 @@
use rocket::serde::json::Json;
use crate::{
db,
errors::{RbOption, RbResult},
guards::Admin,
RbDbConn,
};
#[get("/?<offset>&<limit>")]
pub async fn get(conn: RbDbConn, offset: u32, limit: u32) -> RbResult<Json<Vec<db::Post>>>
{
Ok(Json(
conn.run(move |c| db::posts::get(c, offset, limit)).await?,
))
}
#[post("/", data = "<new_post>")]
pub async fn create(
_admin: Admin,
conn: RbDbConn,
new_post: Json<db::NewPost>,
) -> RbResult<Json<db::Post>>
{
Ok(Json(
conn.run(move |c| db::posts::create(c, &new_post.into_inner()))
.await?,
))
}
#[get("/<id>")]
pub async fn find(conn: RbDbConn, id: uuid::Uuid) -> RbOption<Json<db::Post>>
{
Ok(conn
.run(move |c| db::posts::find(c, &id))
.await?
.and_then(|p| Some(Json(p))))
}
#[patch("/<id>", data = "<patch_post>")]
pub async fn patch(
_admin: Admin,
conn: RbDbConn,
id: uuid::Uuid,
patch_post: Json<db::PatchPost>,
) -> RbResult<Json<db::Post>>
{
Ok(Json(
conn.run(move |c| db::posts::update(c, &id, &patch_post.into_inner()))
.await?,
))
}
#[delete("/<id>")]
pub async fn delete(_admin: Admin, conn: RbDbConn, id: uuid::Uuid) -> RbResult<()>
{
Ok(conn.run(move |c| db::posts::delete(c, &id)).await?)
}

View File

@ -1,13 +1,3 @@
table! {
posts (id) {
id -> Uuid,
section_id -> Uuid,
title -> Nullable<Varchar>,
publish_date -> Date,
content -> Text,
}
}
table! {
refresh_tokens (token) {
token -> Bytea,
@ -17,17 +7,6 @@ table! {
}
}
table! {
sections (id) {
id -> Uuid,
title -> Varchar,
shortname -> Varchar,
description -> Nullable<Text>,
is_default -> Bool,
has_titles -> Bool,
}
}
table! {
users (id) {
id -> Uuid,
@ -38,7 +17,6 @@ table! {
}
}
joinable!(posts -> sections (section_id));
joinable!(refresh_tokens -> users (user_id));
allow_tables_to_appear_in_same_query!(posts, refresh_tokens, sections, users,);
allow_tables_to_appear_in_same_query!(refresh_tokens, users,);

View File

@ -1,25 +0,0 @@
//! This module handles management of site sections (aka blogs).
use rocket::serde::json::Json;
use crate::{db, errors::RbResult, guards::Admin, RbDbConn};
/// Route for creating a new section.
///
/// # Arguments
///
/// * `_admin` - guard ensuring user is admin
/// * `conn` - guard providing a connection to the database
/// * `new_section` - Json-encoded NewSection object
#[post("/", data = "<new_section>")]
pub async fn create_section(
_admin: Admin,
conn: RbDbConn,
new_section: Json<db::NewSection>,
) -> RbResult<Json<db::Section>>
{
Ok(Json(
conn.run(move |c| db::sections::create(c, &new_section.into_inner()))
.await?,
))
}

View File

@ -2,7 +2,7 @@ import requests
class RbClient:
def __init__(self, username = "admin", password = "password", base_url = "http://localhost:8000/api"):
def __init__(self, username, password, base_url = "http://localhost:8000/api"):
self.username = username
self.password = password
self.base_url = base_url
@ -17,7 +17,6 @@ class RbClient:
})
if r.status_code != 200:
print(r.text)
raise Exception("Couldn't login")
res = r.json()
@ -57,15 +56,9 @@ class RbClient:
def get(self, url, *args, **kwargs):
return self._request("GET", f"{self.base_url}{url}", *args, **kwargs)
def post(self, url, *args, **kwargs):
return self._request("POST", f"{self.base_url}{url}", *args, **kwargs)
if __name__ == "__main__":
client = RbClient()
client = RbClient("admin", "password")
# print(client.get("/admin/users").json())
client.post("/sections", json={
"title": "this is a title"
})
print(client.get("/admin/users").json())

21
web/.gitignore vendored
View File

@ -1,5 +1,18 @@
node_modules
.DS_Store
# build output
dist
dist-ssr
*.local
# dependencies
node_modules/
.snowpack/
# logs
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# environment variables
.env
.env.production
# macOS-specific files
.DS_Store

2
web/.npmrc 100644
View File

@ -0,0 +1,2 @@
## force pnpm to hoist
shamefully-hoist = true

View File

@ -1,3 +0,0 @@
{
"recommendations": ["johnsoncodehk.volar"]
}

40
web/README.md 100644
View File

@ -0,0 +1,40 @@
# Welcome to [Astro](https://astro.build)
> 🧑‍🚀 **Seasoned astronaut?** Delete this file. Have fun!
## 🚀 Project Structure
Inside of your Astro project, you'll see the following folders and files:
```
/
├── public/
│ ├── robots.txt
│ └── favicon.ico
├── src/
│ ├── components/
│ │ └── Tour.astro
│ └── pages/
│ └── index.astro
└── package.json
```
Astro looks for `.astro` or `.md` files in the `src/pages/` directory. Each page is exposed as a route based on its file name.
There's nothing special about `src/components/`, but that's where we like to put any Astro/React/Vue/Svelte/Preact components.
Any static assets, like images, can be placed in the `public/` directory.
## 🧞 Commands
All commands are run from the root of the project, from a terminal:
| Command | Action |
|:----------------|:--------------------------------------------|
| `npm install` | Installs dependencies |
| `npm start` | Starts local dev server at `localhost:3000` |
| `npm run build` | Build your production site to `./dist/` |
## 👀 Want to learn more?
Feel free to check [our documentation](https://github.com/snowpackjs/astro) or jump into our [Discord server](https://astro.build/chat).

View File

@ -0,0 +1,18 @@
export default {
// projectRoot: '.', // Where to resolve all URLs relative to. Useful if you have a monorepo project.
// pages: './src/pages', // Path to Astro components, pages, and data
// dist: './dist', // When running `astro build`, path to final static output
// public: './public', // A folder of static files Astro will copy to the root. Useful for favicons, images, and other files that dont need processing.
buildOptions: {
// site: 'http://example.com', // Your public domain, e.g.: https://my-site.dev/. Used to generate sitemaps and canonical URLs.
sitemap: true, // Generate sitemap (set to "false" to disable)
},
devOptions: {
// hostname: 'localhost', // The hostname to run the dev server on.
// port: 3000, // The port to run the dev server on.
// tailwindConfig: '', // Path to tailwind.config.js if used, e.g. './tailwind.config.js'
},
renderers: [
"@astrojs/renderer-svelte"
],
};

View File

@ -1,13 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" href="/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Vite App</title>
</head>
<body>
<div id="app"></div>
<script type="module" src="/src/main.ts"></script>
</body>
</html>

View File

@ -1,20 +1,13 @@
{
"name": "rusty-bever",
"version": "0.0.0",
"name": "@example/starter",
"version": "0.0.1",
"private": true,
"scripts": {
"dev": "vite",
"build": "vue-tsc --noEmit && vite build",
"serve": "vite preview"
},
"dependencies": {
"vue": "^3.2.16"
"start": "astro dev",
"build": "astro build"
},
"devDependencies": {
"@types/node": "^16.10.3",
"@vitejs/plugin-vue": "^1.9.3",
"miragejs": "^0.1.42",
"typescript": "^4.4.3",
"vite": "^2.6.4",
"vue-tsc": "^0.3.0"
"astro": "0.19.0-next.2",
"@astrojs/renderer-svelte": "^0.1.1"
}
}

View File

@ -0,0 +1,12 @@
<svg width="193" height="256" fill="none" xmlns="http://www.w3.org/2000/svg">
<style>
#flame { fill: #FF5D01; }
#a { fill: #000014; }
@media (prefers-color-scheme: dark) {
#a { fill: #fff; }
}
</style>
<path id="a" fill-rule="evenodd" clip-rule="evenodd" d="M131.496 18.929c1.943 2.413 2.935 5.67 4.917 12.181l43.309 142.27a180.277 180.277 0 00-51.778-17.53L99.746 60.56a3.67 3.67 0 00-7.042.01l-27.857 95.232a180.224 180.224 0 00-52.01 17.557l43.52-142.281c1.989-6.502 2.983-9.752 4.927-12.16a15.999 15.999 0 016.484-4.798c2.872-1.154 6.271-1.154 13.07-1.154h31.085c6.807 0 10.211 0 13.085 1.157a16 16 0 016.488 4.806z" fill="url(#paint0_linear)"/>
<path id="flame" fill-rule="evenodd" clip-rule="evenodd" d="M136.678 180.151c-7.14 6.105-21.39 10.268-37.804 10.268-20.147 0-37.033-6.272-41.513-14.707-1.602 4.835-1.962 10.367-1.962 13.902 0 0-1.055 17.355 11.016 29.426 0-6.268 5.081-11.349 11.349-11.349 10.743 0 10.731 9.373 10.721 16.977v.679c0 11.542 7.054 21.436 17.086 25.606a23.27 23.27 0 01-2.339-10.2c0-11.008 6.463-15.107 13.973-19.87 5.977-3.79 12.616-8.001 17.192-16.449a31.013 31.013 0 003.744-14.82c0-3.299-.513-6.479-1.463-9.463z" />
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.2 KiB

View File

@ -0,0 +1,11 @@
<svg width="256" height="256" fill="none" xmlns="http://www.w3.org/2000/svg">
<style>
#flame { fill: #FF5D01; }
#a { fill: #000014; }
@media (prefers-color-scheme: dark) {
#a { fill: #fff; }
}
</style>
<path id="a" fill-rule="evenodd" clip-rule="evenodd" d="M163.008 18.929c1.944 2.413 2.935 5.67 4.917 12.181l43.309 142.27a180.277 180.277 0 00-51.778-17.53l-28.198-95.29a3.67 3.67 0 00-7.042.01l-27.857 95.232a180.225 180.225 0 00-52.01 17.557l43.52-142.281c1.99-6.502 2.983-9.752 4.927-12.16a15.999 15.999 0 016.484-4.798c2.872-1.154 6.271-1.154 13.07-1.154h31.085c6.807 0 10.211 0 13.086 1.157a16.004 16.004 0 016.487 4.806z" />
<path id="flame" fill-rule="evenodd" clip-rule="evenodd" d="M168.19 180.151c-7.139 6.105-21.39 10.268-37.804 10.268-20.147 0-37.033-6.272-41.513-14.707-1.602 4.835-1.961 10.367-1.961 13.902 0 0-1.056 17.355 11.015 29.426 0-6.268 5.081-11.349 11.349-11.349 10.743 0 10.731 9.373 10.721 16.977v.679c0 11.542 7.054 21.436 17.086 25.606a23.27 23.27 0 01-2.339-10.2c0-11.008 6.463-15.107 13.974-19.87 5.976-3.79 12.616-8.001 17.192-16.449a31.024 31.024 0 003.743-14.82c0-3.299-.513-6.479-1.463-9.463z" />
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@ -0,0 +1,2 @@
User-agent: *
Disallow: /

View File

@ -0,0 +1,28 @@
* {
box-sizing: border-box;
margin: 0;
}
:root {
font-family: -apple-system, BlinkMacSystemFont, Segoe UI, Helvetica, Arial, sans-serif, Apple Color Emoji, Segoe UI Emoji;
font-size: 1rem;
--user-font-scale: 1rem - 16px;
font-size: clamp(0.875rem, 0.4626rem + 1.0309vw + var(--user-font-scale), 1.125rem);
}
body {
padding: 4rem 2rem;
width: 100%;
min-height: 100vh;
display: grid;
justify-content: center;
background: #f9fafb;
color: #111827;
}
@media (prefers-color-scheme: dark) {
body {
background: #111827;
color: #fff;
}
}

View File

@ -0,0 +1,53 @@
:root {
--font-mono: Consolas, 'Andale Mono WT', 'Andale Mono', 'Lucida Console', 'Lucida Sans Typewriter', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', 'Liberation Mono',
'Nimbus Mono L', Monaco, 'Courier New', Courier, monospace;
--color-light: #f3f4f6;
}
@media (prefers-color-scheme: dark) {
:root {
--color-light: #1f2937;
}
}
a {
color: inherit;
}
header > div {
font-size: clamp(2rem, -0.4742rem + 6.1856vw, 2.75rem);
}
header > div {
display: flex;
flex-direction: column;
align-items: center;
}
header h1 {
font-size: 1em;
font-weight: 500;
}
header img {
width: 2em;
height: 2.667em;
}
h2 {
font-weight: 500;
font-size: clamp(1.5rem, 1rem + 1.25vw, 2rem);
}
.counter {
display: grid;
grid-auto-flow: column;
gap: 1em;
font-size: 2rem;
justify-content: center;
padding: 2rem 1rem;
}
.counter > pre {
text-align: center;
min-width: 3ch;
}

View File

@ -1,21 +0,0 @@
<script setup lang="ts">
// This starter template is using Vue 3 <script setup> SFCs
// Check out https://v3.vuejs.org/api/sfc-script-setup.html#sfc-script-setup
import HelloWorld from './components/HelloWorld.vue'
</script>
<template>
<img alt="Vue logo" src="./assets/logo.png" />
<HelloWorld msg="Hello Vue 3 + TypeScript + Vite" />
</template>
<style>
#app {
font-family: Avenir, Helvetica, Arial, sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
text-align: center;
color: #2c3e50;
margin-top: 60px;
}
</style>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.7 KiB

View File

@ -1,68 +0,0 @@
<script setup lang="ts">
import { ref } from 'vue'
defineProps<{ msg: string }>()
const count = ref(0)
let test = ref("yeet")
fetch("/api/users").then(
res => {
if (!res.ok) {
console.log("ah chucks")
return Promise.reject()
}
return res.json()
}
).then(
json => test.value = json
)
</script>
<template>
<h1>{{ msg }}</h1>
<p>{{ test }}</p>
<p>
Recommended IDE setup:
<a href="https://code.visualstudio.com/" target="_blank">VSCode</a>
+
<a href="https://github.com/johnsoncodehk/volar" target="_blank">Volar</a>
</p>
<p>See <code>README.md</code> for more information.</p>
<p>
<a href="https://vitejs.dev/guide/features.html" target="_blank">
Vite Docs
</a>
|
<a href="https://v3.vuejs.org/" target="_blank">Vue 3 Docs</a>
</p>
<button type="button" @click="count++">count is: {{ count }}</button>
<p>
Edit
<code>components/HelloWorld.vue</code> to test hot module replacement.
</p>
</template>
<style scoped>
a {
color: #42b983;
}
label {
margin: 0 0.5em;
font-weight: bold;
}
code {
background-color: #eee;
padding: 2px 4px;
border-radius: 4px;
color: #304455;
}
</style>

View File

@ -0,0 +1,17 @@
<script>
let count = 0;
function add() {
count += 1;
}
function subtract() {
count -= 1;
}
</script>
<div id="svelte" class="counter">
<button on:click={subtract}>-</button>
<pre>{ count }</pre>
<button on:click={add}>+</button>
</div>

View File

@ -0,0 +1,85 @@
---
import { Markdown } from 'astro/components';
---
<article>
<div class="banner">
<p><strong>🧑‍🚀 Seasoned astronaut?</strong> Delete this file. Have fun!</p>
</div>
<section>
<Markdown>
## 🚀 Project Structure
Inside of your Astro project, you'll see the following folders and files:
```
/
├── public/
│ ├── robots.txt
│ └── favicon.ico
├── src/
│ ├── components/
│ │ └── Tour.astro
│ └── pages/
│ └── index.astro
└── package.json
```
Astro looks for `.astro` or `.md` files in the `src/pages/` directory.
Each page is exposed as a route based on its file name.
There's nothing special about `src/components/`, but that's where we like to put any Astro/React/Vue/Svelte/Preact components.
Any static assets, like images, can be placed in the `public/` directory.
</Markdown>
</section>
<section>
<h2>👀 Want to learn more?</h2>
<p>Feel free to check <a href="https://github.com/snowpackjs/astro">our documentation</a> or jump into our <a href="https://astro.build/chat">Discord server</a>.</p>
</section>
</article>
<style>
article {
padding-top: 2em;
line-height: 1.5;
}
section {
margin-top: 2em;
display: flex;
flex-direction: column;
gap: 1em;
max-width: 70ch;
}
.banner {
text-align: center;
font-size: 1.2rem;
background: var(--color-light);
padding: 1em 1.5em;
padding-left: 0.75em;
border-radius: 4px;
}
pre,
code {
font-family: var(--font-mono);
background: var(--color-light);
border-radius: 4px;
}
pre {
padding: 1em 1.5em;
}
.tree {
line-height: 1.2;
}
code:not(.tree) {
padding: 0.125em;
margin: 0 -0.125em;
}
</style>

8
web/src/env.d.ts vendored
View File

@ -1,8 +0,0 @@
/// <reference types="vite/client" />
declare module '*.vue' {
import { DefineComponent } from 'vue'
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/ban-types
const component: DefineComponent<{}, {}, any>
export default component
}

View File

@ -0,0 +1,7 @@
<html>
<body>
<h1>huh</h1>
<p>lol</p>
<slot />
</body>
</html>

View File

@ -1,10 +0,0 @@
import { createApp } from 'vue'
import App from './App.vue'
// @ts-ignore
import { makeServer } from "./server"
if (process.env.NODE_ENV === "development") {
makeServer()
}
createApp(App).mount('#app')

View File

@ -0,0 +1,44 @@
<html>
<body>
<ul id="nav-bar">
<li class="nav-bar-item"><a href="/home">Home</a></li>
<li class="nav-bar-item"><a href="/blog">Blog</a></li>
<li class="nav-bar-item"><a href="/microblog">Microblog</a></li>
<li class="nav-bar-item"><a href="/devlogs">Devlogs</a></li>
</ul>
</body>
</html>
<style>
ul#nav-bar {
list-style-type: none;
margin: 0;
padding: 0;
width: 200px;
background-color: #f1f1f1;
border: 1px solid #555;
}
ul#nav-bar li {
text-align: center;
display: inline;
}
li.nav-bar-item a {
display: block;
color: #000;
padding: 8px 16px;
text-decoration: none;
border: 1px solid #555;
}
li.nav-bar-item:last-child {
border-bottom: none;
}
li.nav-bar-item a:hover {
background-color: #555;
color: white;
}
</style>

View File

@ -1,27 +0,0 @@
// src/server.js
import { createServer, Model } from "miragejs"
export function makeServer({ environment = "development" } = {}) {
let server = createServer({
environment,
models: {
user: Model,
},
seeds(server) {
server.create("user", { name: "Bob" })
server.create("user", { name: "Alice" })
},
routes() {
this.namespace = "api"
this.get("/users", (schema) => {
return schema.users.all()
})
},
})
return server
}

View File

@ -1,15 +1,3 @@
{
"compilerOptions": {
"target": "esnext",
"useDefineForClassFields": true,
"module": "esnext",
"moduleResolution": "node",
"strict": true,
"jsx": "preserve",
"sourceMap": true,
"resolveJsonModule": true,
"esModuleInterop": true,
"lib": ["esnext", "dom"]
},
"include": ["src/**/*.ts", "src/**/*.d.ts", "src/**/*.tsx", "src/**/*.vue"]
"moduleResolution": "node"
}

View File

@ -1,7 +0,0 @@
import { defineConfig } from 'vite'
import vue from '@vitejs/plugin-vue'
// https://vitejs.dev/config/
export default defineConfig({
plugins: [vue()]
})

File diff suppressed because it is too large Load Diff