Compare commits

..

7 Commits

Author SHA1 Message Date
Jef Roosens b6d5bd3228
doc: listed new config variables in docs 2022-05-03 20:13:28 +02:00
Jef Roosens 5781796e99
doc: added docstrings to all db/git functions 2022-05-03 20:02:05 +02:00
Jef Roosens 204144cee8
refactor: removed commented code & ran formatter 2022-05-03 19:50:14 +02:00
Jef Roosens c818273790
feat: simplified config down to pkg_dir & data_dir
BREAKING: downloads are now stored inside the root of pkg_dir, the log
file is always stored in the root of data_dir
2022-05-03 19:50:14 +02:00
Jef Roosens 7419144f97
feat: removed git.GitRepo type
feat(cli): updated to new GitRepo format
2022-05-03 19:50:14 +02:00
Jef Roosens 0a2488a4df
feat(server): migrated repo patch to sqlite 2022-05-03 19:50:14 +02:00
Jef Roosens 891a206116
feat(server): partially migrated repos API to sqlite 2022-05-03 19:50:14 +02:00
20 changed files with 298 additions and 266 deletions

View File

@ -23,7 +23,7 @@ pipeline:
image: 'chewingbever/vlang:latest' image: 'chewingbever/vlang:latest'
pull: true pull: true
environment: environment:
- LDFLAGS=-lz -lbz2 -llzma -lexpat -lzstd -llz4 -static - LDFLAGS=-lz -lbz2 -llzma -lexpat -lzstd -llz4 -lsqlite3 -static
commands: commands:
# Apparently this -D is *very* important # Apparently this -D is *very* important
- CFLAGS='-DGC_THREADS=1' make prod - CFLAGS='-DGC_THREADS=1' make prod

View File

@ -10,6 +10,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed ### Changed
* Switched from compiler fork to fully vanilla compiler mirror * Switched from compiler fork to fully vanilla compiler mirror
* `download_dir`, `repos_file` & `repos_dir` config values have been replaced
with `data_dir`
* Storage of metadata (e.g. Git repositories) is now done using Sqlite
### Added ### Added
@ -67,4 +70,3 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
* Ability to publish packages * Ability to publish packages
* Re-wrote repo-add in V * Re-wrote repo-add in V

View File

@ -23,7 +23,7 @@ RUN if [ -n "${CI_COMMIT_SHA}" ]; then \
"https://s3.rustybever.be/vieter/commits/${CI_COMMIT_SHA}/vieter-$(echo "${TARGETPLATFORM}" | sed 's:/:-:g')" && \ "https://s3.rustybever.be/vieter/commits/${CI_COMMIT_SHA}/vieter-$(echo "${TARGETPLATFORM}" | sed 's:/:-:g')" && \
chmod +x vieter ; \ chmod +x vieter ; \
else \ else \
LDFLAGS='-lz -lbz2 -llzma -lexpat -lzstd -llz4 -static' make prod && \ LDFLAGS='-lz -lbz2 -llzma -lexpat -lzstd -llz4 -lsqlite3 -static' make prod && \
mv pvieter vieter ; \ mv pvieter vieter ; \
fi fi
@ -31,10 +31,8 @@ RUN if [ -n "${CI_COMMIT_SHA}" ]; then \
FROM busybox:1.35.0 FROM busybox:1.35.0
ENV PATH=/bin \ ENV PATH=/bin \
VIETER_REPOS_DIR=/data/repos \ VIETER_DATA_DIR=/data \
VIETER_PKG_DIR=/data/pkgs \ VIETER_PKG_DIR=/data/pkgs
VIETER_DOWNLOAD_DIR=/data/downloads \
VIETER_REPOS_FILE=/data/repos.json
COPY --from=builder /app/dumb-init /app/vieter /bin/ COPY --from=builder /app/dumb-init /app/vieter /bin/

View File

@ -5,7 +5,6 @@ SOURCES != find '$(SRC_DIR)' -iname '*.v'
V_PATH ?= v V_PATH ?= v
V := $(V_PATH) -showcc -gc boehm V := $(V_PATH) -showcc -gc boehm
all: vieter all: vieter

View File

@ -35,18 +35,10 @@ passed to them. Each mode requires a different configuration.
* `log_file`: log file to write logs to. Defaults to `vieter.log` in the * `log_file`: log file to write logs to. Defaults to `vieter.log` in the
current directory. current directory.
* `pkg_dir`: where Vieter should store the actual package archives. * `pkg_dir`: where Vieter should store the actual package archives.
* `download_dir`: where Vieter should initially download uploaded files. * `data_dir`: where Vieter stores the repositories, log file & database.
* `api_key`: the API key to use when authenticating requests. * `api_key`: the API key to use when authenticating requests.
* `repo_dir`: where Vieter should store the contents of the repository.
* `repos_file`: JSON file where the list of Git repositories is saved
* `default_arch`: architecture to always add packages of arch `any` to. * `default_arch`: architecture to always add packages of arch `any` to.
{{< hint info >}}
**Note**
Because Vieter hard links files between `download_dir` & `pkg_dir`, they need
to be on the same file system.
{{< /hint >}}
### Builder ### Builder
* `api_key`: the API key to use when authenticating requests. * `api_key`: the API key to use when authenticating requests.
@ -62,3 +54,18 @@ to be on the same file system.
* `api_key`: the API key to use when authenticating requests. * `api_key`: the API key to use when authenticating requests.
* `address`: Base your URL of your Vieter instance, e.g. https://example.com * `address`: Base your URL of your Vieter instance, e.g. https://example.com
### Cron
* `log_level`: defines how much logs to show. Valid values are one of `FATAL`,
`ERROR`, `WARN`, `INFO` or `DEBUG`. Defaults to `WARN`
* `api_key`: the API key to use when authenticating requests.
* `address`: Base your URL of your Vieter instance, e.g. https://example.com.
This *must* be the publicly facing URL of your Vieter instance.
* `data_dir`: where Vieter stores the log file.
* `base_image`: Docker image from which to create the builder images.
* `max_concurrent_builds`: amount of builds to run at once.
* `api_update_frequency`: how frequenty to check for changes in the repo list.
* `image_rebuild+frequency`: how frequently to rebuild the builder image
* `global_schedule`: cron schedule to use for any repo without an individual
schedule

View File

@ -5,6 +5,7 @@ import encoding.base64
import time import time
import git import git
import os import os
import db
const container_build_dir = '/build' const container_build_dir = '/build'
@ -75,7 +76,7 @@ pub fn create_build_image(base_image string) ?string {
// build_repo builds, packages & publishes a given Arch package based on the // build_repo builds, packages & publishes a given Arch package based on the
// provided GitRepo. The base image ID should be of an image previously created // provided GitRepo. The base image ID should be of an image previously created
// by create_build_image. // by create_build_image.
pub fn build_repo(address string, api_key string, base_image_id string, repo &git.GitRepo) ? { pub fn build_repo(address string, api_key string, base_image_id string, repo &db.GitRepo) ? {
build_arch := os.uname().machine build_arch := os.uname().machine
// TODO what to do with PKGBUILDs that build multiple packages? // TODO what to do with PKGBUILDs that build multiple packages?
@ -125,11 +126,11 @@ fn build(conf Config) ? {
build_arch := os.uname().machine build_arch := os.uname().machine
// We get the repos map from the Vieter instance // We get the repos map from the Vieter instance
repos_map := git.get_repos(conf.address, conf.api_key) ? repos := git.get_repos(conf.address, conf.api_key) ?
// We filter out any repos that aren't allowed to be built on this // We filter out any repos that aren't allowed to be built on this
// architecture // architecture
filtered_repos := repos_map.keys().map(repos_map[it]).filter(it.arch.contains(build_arch)) filtered_repos := repos.filter(it.arch.map(it.value).contains(build_arch))
// No point in doing work if there's no repos present // No point in doing work if there's no repos present
if filtered_repos.len == 0 { if filtered_repos.len == 0 {

View File

@ -6,9 +6,9 @@ import env
struct Config { struct Config {
pub: pub:
log_level string = 'WARN' log_level string = 'WARN'
log_file string = 'vieter.log'
api_key string api_key string
address string address string
data_dir string
base_image string = 'archlinux:base-devel' base_image string = 'archlinux:base-devel'
max_concurrent_builds int = 1 max_concurrent_builds int = 1
api_update_frequency int = 15 api_update_frequency int = 15

View File

@ -3,6 +3,9 @@ module cron
import log import log
import cron.daemon import cron.daemon
import cron.expression import cron.expression
import os
const log_file_name = 'vieter.cron.log'
// cron starts a cron daemon & starts periodically scheduling builds. // cron starts a cron daemon & starts periodically scheduling builds.
pub fn cron(conf Config) ? { pub fn cron(conf Config) ? {
@ -15,7 +18,8 @@ pub fn cron(conf Config) ? {
level: log_level level: log_level
} }
logger.set_full_logpath(conf.log_file) log_file := os.join_path_single(conf.data_dir, cron.log_file_name)
logger.set_full_logpath(log_file)
logger.log_to_console_too() logger.log_to_console_too()
ce := expression.parse_expression(conf.global_schedule) or { ce := expression.parse_expression(conf.global_schedule) or {

View File

@ -4,11 +4,11 @@ import time
import sync.stdatomic import sync.stdatomic
import build import build
const build_empty = 0 const (
build_empty = 0
const build_running = 1 build_running = 1
build_done = 2
const build_done = 2 )
// clean_finished_builds removes finished builds from the build slots & returns // clean_finished_builds removes finished builds from the build slots & returns
// them. // them.

View File

@ -8,17 +8,19 @@ import cron.expression { CronExpression, parse_expression }
import math import math
import build import build
import docker import docker
import db
const (
// How many seconds to wait before retrying to update API if failed // How many seconds to wait before retrying to update API if failed
const api_update_retry_timeout = 5 api_update_retry_timeout = 5
// How many seconds to wait before retrying to rebuild image if failed // How many seconds to wait before retrying to rebuild image if failed
const rebuild_base_image_retry_timout = 30 rebuild_base_image_retry_timout = 30
)
struct ScheduledBuild { struct ScheduledBuild {
pub: pub:
repo_id string repo_id string
repo git.GitRepo repo db.GitRepo
timestamp time.Time timestamp time.Time
} }
@ -37,7 +39,7 @@ mut:
api_update_frequency int api_update_frequency int
image_rebuild_frequency int image_rebuild_frequency int
// Repos currently loaded from API. // Repos currently loaded from API.
repos_map map[string]git.GitRepo repos []db.GitRepo
// At what point to update the list of repositories. // At what point to update the list of repositories.
api_update_timestamp time.Time api_update_timestamp time.Time
image_build_timestamp time.Time image_build_timestamp time.Time
@ -90,7 +92,7 @@ pub fn (mut d Daemon) run() {
// haven't been renewed. // haven't been renewed.
else { else {
for sb in finished_builds { for sb in finished_builds {
d.schedule_build(sb.repo_id, sb.repo) d.schedule_build(sb.repo)
} }
} }
@ -149,11 +151,11 @@ pub fn (mut d Daemon) run() {
} }
// schedule_build adds the next occurence of the given repo build to the queue. // schedule_build adds the next occurence of the given repo build to the queue.
fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) { fn (mut d Daemon) schedule_build(repo db.GitRepo) {
ce := if repo.schedule != '' { ce := if repo.schedule != '' {
parse_expression(repo.schedule) or { parse_expression(repo.schedule) or {
// TODO This shouldn't return an error if the expression is empty. // TODO This shouldn't return an error if the expression is empty.
d.lerror("Error while parsing cron expression '$repo.schedule' ($repo_id): $err.msg()") d.lerror("Error while parsing cron expression '$repo.schedule' (id $repo.id): $err.msg()")
d.global_schedule d.global_schedule
} }
@ -168,7 +170,6 @@ fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) {
} }
d.queue.insert(ScheduledBuild{ d.queue.insert(ScheduledBuild{
repo_id: repo_id
repo: repo repo: repo
timestamp: timestamp timestamp: timestamp
}) })
@ -186,7 +187,7 @@ fn (mut d Daemon) renew_repos() {
return return
} }
d.repos_map = new_repos.move() d.repos = new_repos
d.api_update_timestamp = time.now().add_seconds(60 * d.api_update_frequency) d.api_update_timestamp = time.now().add_seconds(60 * d.api_update_frequency)
} }
@ -224,8 +225,8 @@ fn (mut d Daemon) renew_queue() {
// For each repository in repos_map, parse their cron expression (or use // For each repository in repos_map, parse their cron expression (or use
// the default one if not present) & add them to the queue // the default one if not present) & add them to the queue
for id, repo in d.repos_map { for repo in d.repos {
d.schedule_build(id, repo) d.schedule_build(repo)
} }
} }

20
src/db/db.v 100644
View File

@ -0,0 +1,20 @@
module db
import sqlite
struct VieterDb {
conn sqlite.DB
}
// init initializes a database & adds the correct tables.
pub fn init(db_path string) ?VieterDb {
conn := sqlite.connect(db_path) ?
sql conn {
create table GitRepo
}
return VieterDb{
conn: conn
}
}

156
src/db/git.v 100644
View File

@ -0,0 +1,156 @@
module db
pub struct GitRepoArch {
pub:
id int [primary; sql: serial]
repo_id int [nonull]
value string [nonull]
}
// str returns a string representation.
pub fn (gra &GitRepoArch) str() string {
return gra.value
}
pub struct GitRepo {
pub mut:
id int [optional; primary; sql: serial]
// URL of the Git repository
url string [nonull]
// Branch of the Git repository to use
branch string [nonull]
// Which repo the builder should publish packages to
repo string [nonull]
// Cron schedule describing how frequently to build the repo.
schedule string [optional]
// On which architectures the package is allowed to be built. In reality,
// this controls which builders will periodically build the image.
arch []GitRepoArch [fkey: 'repo_id']
}
// str returns a string representation.
pub fn (gr &GitRepo) str() string {
mut parts := [
'id: $gr.id',
'url: $gr.url',
'branch: $gr.branch',
'repo: $gr.repo',
'schedule: $gr.schedule',
'arch: ${gr.arch.map(it.value).join(', ')}',
]
str := parts.join('\n')
return str
}
// patch_from_params patches a GitRepo from a map[string]string, usually
// provided from a web.App's params
pub fn (mut r GitRepo) patch_from_params(params map[string]string) {
$for field in GitRepo.fields {
if field.name in params {
$if field.typ is string {
r.$(field.name) = params[field.name]
// This specific type check is needed for the compiler to ensure
// our types are correct
} $else $if field.typ is []GitRepoArch {
r.$(field.name) = params[field.name].split(',').map(GitRepoArch{ value: it })
}
}
}
}
// git_repo_from_params creates a GitRepo from a map[string]string, usually
// provided from a web.App's params
pub fn git_repo_from_params(params map[string]string) ?GitRepo {
mut repo := GitRepo{}
// If we're creating a new GitRepo, we want all fields to be present before
// "patching".
$for field in GitRepo.fields {
if field.name !in params && !field.attrs.contains('optional') {
return error('Missing parameter: ${field.name}.')
}
}
repo.patch_from_params(params)
return repo
}
// get_git_repos returns all GitRepo's in the database.
pub fn (db &VieterDb) get_git_repos() []GitRepo {
res := sql db.conn {
select from GitRepo order by id
}
return res
}
// get_git_repo tries to return a specific GitRepo.
pub fn (db &VieterDb) get_git_repo(repo_id int) ?GitRepo {
res := sql db.conn {
select from GitRepo where id == repo_id
}
// If a select statement fails, it returns a zeroed object. By
// checking one of the required fields, we can see whether the query
// returned a result or not.
if res.url == '' {
return none
}
return res
}
// add_git_repo inserts the given GitRepo into the database.
pub fn (db &VieterDb) add_git_repo(repo GitRepo) {
sql db.conn {
insert repo into GitRepo
}
}
// delete_git_repo deletes the repo with the given ID from the database.
pub fn (db &VieterDb) delete_git_repo(repo_id int) {
sql db.conn {
delete from GitRepo where id == repo_id
delete from GitRepoArch where repo_id == repo_id
}
}
// update_git_repo updates any non-array values for a given GitRepo.
pub fn (db &VieterDb) update_git_repo(repo_id int, params map[string]string) {
// sql db.conn {
// update GitRepo set repo
//}
mut values := []string{}
$for field in GitRepo.fields {
if field.name in params {
// Any fields that are array types require their own update method
$if field.typ is string {
values << "$field.name = '${params[field.name]}'"
}
}
}
values_str := values.join(', ')
query := 'update GitRepo set $values_str where id == $repo_id'
db.conn.exec_none(query)
}
// update_git_repo_archs updates a given GitRepo's arch value.
pub fn (db &VieterDb) update_git_repo_archs(repo_id int, archs []GitRepoArch) {
archs_with_id := archs.map(GitRepoArch{
...it
repo_id: repo_id
})
sql db.conn {
delete from GitRepoArch where repo_id == repo_id
}
for arch in archs_with_id {
sql db.conn {
insert arch into GitRepoArch
}
}
}

View File

@ -116,34 +116,13 @@ pub fn cmd() cli.Command {
// get_repo_by_prefix tries to find the repo with the given prefix in its // get_repo_by_prefix tries to find the repo with the given prefix in its
// ID. If multiple or none are found, an error is raised. // ID. If multiple or none are found, an error is raised.
fn get_repo_by_prefix(conf Config, id_prefix string) ?(string, GitRepo) {
repos := get_repos(conf.address, conf.api_key) ?
mut res := map[string]GitRepo{}
for id, repo in repos {
if id.starts_with(id_prefix) {
res[id] = repo
}
}
if res.len == 0 {
return error('No repo found for given prefix.')
}
if res.len > 1 {
return error('Multiple repos found for given prefix.')
}
return res.keys()[0], res[res.keys()[0]]
}
// list prints out a list of all repositories. // list prints out a list of all repositories.
fn list(conf Config) ? { fn list(conf Config) ? {
repos := get_repos(conf.address, conf.api_key) ? repos := get_repos(conf.address, conf.api_key) ?
for id, details in repos { for repo in repos {
println('${id[..8]}\t$details.url\t$details.branch\t$details.repo') println('$repo.id\t$repo.url\t$repo.branch\t$repo.repo')
} }
} }
@ -155,15 +134,18 @@ fn add(conf Config, url string, branch string, repo string) ? {
} }
// remove removes a repository from the server's list. // remove removes a repository from the server's list.
fn remove(conf Config, id_prefix string) ? { fn remove(conf Config, id string) ? {
id, _ := get_repo_by_prefix(conf, id_prefix) ? // id, _ := get_repo_by_prefix(conf, id_prefix) ?
res := remove_repo(conf.address, conf.api_key, id) ? id_int := id.int()
if id_int != 0 {
res := remove_repo(conf.address, conf.api_key, id_int) ?
println(res.message) println(res.message)
} }
}
// patch patches a given repository with the provided params. // patch patches a given repository with the provided params.
fn patch(conf Config, id_prefix string, params map[string]string) ? { fn patch(conf Config, id string, params map[string]string) ? {
// We check the cron expression first because it's useless to send an // We check the cron expression first because it's useless to send an
// invalid one to the server. // invalid one to the server.
if 'schedule' in params && params['schedule'] != '' { if 'schedule' in params && params['schedule'] != '' {
@ -172,20 +154,22 @@ fn patch(conf Config, id_prefix string, params map[string]string) ? {
} }
} }
id, _ := get_repo_by_prefix(conf, id_prefix) ? id_int := id.int()
res := patch_repo(conf.address, conf.api_key, id, params) ? if id_int != 0 {
res := patch_repo(conf.address, conf.api_key, id_int, params) ?
println(res.message) println(res.message)
} }
}
// info shows detailed information for a given repo. // info shows detailed information for a given repo.
fn info(conf Config, id_prefix string) ? { fn info(conf Config, id string) ? {
id, repo := get_repo_by_prefix(conf, id_prefix) ? id_int := id.int()
println('id: $id') if id_int == 0 {
return
}
$for field in GitRepo.fields { repo := get_repo(conf.address, conf.api_key, id_int) ?
val := repo.$(field.name) println(repo)
println('$field.name: $val')
}
} }

View File

@ -3,6 +3,7 @@ module git
import json import json
import response { Response } import response { Response }
import net.http import net.http
import db
// send_request<T> is a convenience method for sending requests to the repos // send_request<T> is a convenience method for sending requests to the repos
// API. It mostly does string manipulation to create a query string containing // API. It mostly does string manipulation to create a query string containing
@ -26,8 +27,16 @@ fn send_request<T>(method http.Method, address string, url string, api_key strin
} }
// get_repos returns the current list of repos. // get_repos returns the current list of repos.
pub fn get_repos(address string, api_key string) ?map[string]GitRepo { pub fn get_repos(address string, api_key string) ?[]db.GitRepo {
data := send_request<map[string]GitRepo>(http.Method.get, address, '/api/repos', api_key, data := send_request<[]db.GitRepo>(http.Method.get, address, '/api/repos', api_key,
{}) ?
return data.data
}
// get_repo returns the repo for a specific ID.
pub fn get_repo(address string, api_key string, id int) ?db.GitRepo {
data := send_request<db.GitRepo>(http.Method.get, address, '/api/repos/$id', api_key,
{}) ? {}) ?
return data.data return data.data
@ -51,7 +60,7 @@ pub fn add_repo(address string, api_key string, url string, branch string, repo
} }
// remove_repo removes the repo with the given ID from the server. // remove_repo removes the repo with the given ID from the server.
pub fn remove_repo(address string, api_key string, id string) ?Response<string> { pub fn remove_repo(address string, api_key string, id int) ?Response<string> {
data := send_request<string>(http.Method.delete, address, '/api/repos/$id', api_key, data := send_request<string>(http.Method.delete, address, '/api/repos/$id', api_key,
{}) ? {}) ?
@ -60,7 +69,7 @@ pub fn remove_repo(address string, api_key string, id string) ?Response<string>
// patch_repo sends a PATCH request to the given repo with the params as // patch_repo sends a PATCH request to the given repo with the params as
// payload. // payload.
pub fn patch_repo(address string, api_key string, id string, params map[string]string) ?Response<string> { pub fn patch_repo(address string, api_key string, id int, params map[string]string) ?Response<string> {
data := send_request<string>(http.Method.patch, address, '/api/repos/$id', api_key, data := send_request<string>(http.Method.patch, address, '/api/repos/$id', api_key,
params) ? params) ?

View File

@ -1,84 +0,0 @@
module git
import os
import json
pub struct GitRepo {
pub mut:
// URL of the Git repository
url string
// Branch of the Git repository to use
branch string
// On which architectures the package is allowed to be built. In reality,
// this controls which builders will periodically build the image.
arch []string
// Which repo the builder should publish packages to
repo string
// Cron schedule describing how frequently to build the repo.
schedule string [optional]
}
// patch_from_params patches a GitRepo from a map[string]string, usually
// provided from a web.App's params
pub fn (mut r GitRepo) patch_from_params(params map[string]string) {
$for field in GitRepo.fields {
if field.name in params {
$if field.typ is string {
r.$(field.name) = params[field.name]
// This specific type check is needed for the compiler to ensure
// our types are correct
} $else $if field.typ is []string {
r.$(field.name) = params[field.name].split(',')
}
}
}
}
// read_repos reads the provided path & parses it into a map of GitRepo's.
pub fn read_repos(path string) ?map[string]GitRepo {
if !os.exists(path) {
mut f := os.create(path) ?
defer {
f.close()
}
f.write_string('{}') ?
return {}
}
content := os.read_file(path) ?
res := json.decode(map[string]GitRepo, content) ?
return res
}
// write_repos writes a map of GitRepo's back to disk given the provided path.
pub fn write_repos(path string, repos &map[string]GitRepo) ? {
mut f := os.create(path) ?
defer {
f.close()
}
value := json.encode(repos)
f.write_string(value) ?
}
// repo_from_params creates a GitRepo from a map[string]string, usually
// provided from a web.App's params
pub fn repo_from_params(params map[string]string) ?GitRepo {
mut repo := GitRepo{}
// If we're creating a new GitRepo, we want all fields to be present before
// "patching".
$for field in GitRepo.fields {
if field.name !in params && !field.attrs.contains('optional') {
return error('Missing parameter: ${field.name}.')
}
}
repo.patch_from_params(params)
return repo
}

View File

@ -6,12 +6,9 @@ import env
struct Config { struct Config {
pub: pub:
log_level string = 'WARN' log_level string = 'WARN'
log_file string = 'vieter.log'
pkg_dir string pkg_dir string
download_dir string data_dir string
api_key string api_key string
repos_dir string
repos_file string
default_arch string default_arch string
} }

View File

@ -1,12 +1,9 @@
module server module server
import web import web
import git
import net.http import net.http
import rand
import response { new_data_response, new_response } import response { new_data_response, new_response }
import db
const repos_file = 'repos.json'
// get_repos returns the current list of repos. // get_repos returns the current list of repos.
['/api/repos'; get] ['/api/repos'; get]
@ -15,37 +12,19 @@ fn (mut app App) get_repos() web.Result {
return app.json(http.Status.unauthorized, new_response('Unauthorized.')) return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
} }
repos := rlock app.git_mutex { repos := app.db.get_git_repos()
git.read_repos(app.conf.repos_file) or {
app.lerror('Failed to read repos file: $err.msg()')
return app.status(http.Status.internal_server_error)
}
}
return app.json(http.Status.ok, new_data_response(repos)) return app.json(http.Status.ok, new_data_response(repos))
} }
// get_single_repo returns the information for a single repo. // get_single_repo returns the information for a single repo.
['/api/repos/:id'; get] ['/api/repos/:id'; get]
fn (mut app App) get_single_repo(id string) web.Result { fn (mut app App) get_single_repo(id int) web.Result {
if !app.is_authorized() { if !app.is_authorized() {
return app.json(http.Status.unauthorized, new_response('Unauthorized.')) return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
} }
repos := rlock app.git_mutex { repo := app.db.get_git_repo(id) or { return app.not_found() }
git.read_repos(app.conf.repos_file) or {
app.lerror('Failed to read repos file.')
return app.status(http.Status.internal_server_error)
}
}
if id !in repos {
return app.not_found()
}
repo := repos[id]
return app.json(http.Status.ok, new_data_response(repo)) return app.json(http.Status.ok, new_data_response(repo))
} }
@ -65,89 +44,41 @@ fn (mut app App) post_repo() web.Result {
params['arch'] = app.conf.default_arch params['arch'] = app.conf.default_arch
} }
new_repo := git.repo_from_params(params) or { new_repo := db.git_repo_from_params(params) or {
return app.json(http.Status.bad_request, new_response(err.msg())) return app.json(http.Status.bad_request, new_response(err.msg()))
} }
id := rand.uuid_v4() app.db.add_git_repo(new_repo)
mut repos := rlock app.git_mutex {
git.read_repos(app.conf.repos_file) or {
app.lerror('Failed to read repos file.')
return app.status(http.Status.internal_server_error)
}
}
// We need to check for duplicates
for _, repo in repos {
if repo == new_repo {
return app.json(http.Status.bad_request, new_response('Duplicate repository.'))
}
}
repos[id] = new_repo
lock app.git_mutex {
git.write_repos(app.conf.repos_file, &repos) or {
return app.status(http.Status.internal_server_error)
}
}
return app.json(http.Status.ok, new_response('Repo added successfully.')) return app.json(http.Status.ok, new_response('Repo added successfully.'))
} }
// delete_repo removes a given repo from the server's list. // delete_repo removes a given repo from the server's list.
['/api/repos/:id'; delete] ['/api/repos/:id'; delete]
fn (mut app App) delete_repo(id string) web.Result { fn (mut app App) delete_repo(id int) web.Result {
if !app.is_authorized() { if !app.is_authorized() {
return app.json(http.Status.unauthorized, new_response('Unauthorized.')) return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
} }
mut repos := rlock app.git_mutex { // repos.delete(id)
git.read_repos(app.conf.repos_file) or { app.db.delete_git_repo(id)
app.lerror('Failed to read repos file.')
return app.status(http.Status.internal_server_error)
}
}
if id !in repos {
return app.not_found()
}
repos.delete(id)
lock app.git_mutex {
git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) }
}
return app.json(http.Status.ok, new_response('Repo removed successfully.')) return app.json(http.Status.ok, new_response('Repo removed successfully.'))
} }
// patch_repo updates a repo's data with the given query params. // patch_repo updates a repo's data with the given query params.
['/api/repos/:id'; patch] ['/api/repos/:id'; patch]
fn (mut app App) patch_repo(id string) web.Result { fn (mut app App) patch_repo(id int) web.Result {
if !app.is_authorized() { if !app.is_authorized() {
return app.json(http.Status.unauthorized, new_response('Unauthorized.')) return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
} }
mut repos := rlock app.git_mutex { app.db.update_git_repo(id, app.query)
git.read_repos(app.conf.repos_file) or {
app.lerror('Failed to read repos file.')
return app.status(http.Status.internal_server_error) if 'arch' in app.query {
} arch_objs := app.query['arch'].split(',').map(db.GitRepoArch{ value: it })
}
if id !in repos { app.db.update_git_repo_archs(id, arch_objs)
return app.not_found()
}
repos[id].patch_from_params(app.query)
lock app.git_mutex {
git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) }
} }
return app.json(http.Status.ok, new_response('Repo updated successfully.')) return app.json(http.Status.ok, new_response('Repo updated successfully.'))

View File

@ -68,7 +68,7 @@ fn (mut app App) put_package(repo string) web.Result {
if length := app.req.header.get(.content_length) { if length := app.req.header.get(.content_length) {
// Generate a random filename for the temp file // Generate a random filename for the temp file
pkg_path = os.join_path_single(app.conf.download_dir, rand.uuid_v4()) pkg_path = os.join_path_single(app.repo.pkg_dir, rand.uuid_v4())
app.ldebug("Uploading $length bytes (${util.pretty_bytes(length.int())}) to '$pkg_path'.") app.ldebug("Uploading $length bytes (${util.pretty_bytes(length.int())}) to '$pkg_path'.")

View File

@ -5,8 +5,14 @@ import os
import log import log
import repo import repo
import util import util
import db
const port = 8000 const (
port = 8000
log_file_name = 'vieter.log'
repo_dir_name = 'repos'
db_file_name = 'vieter.sqlite'
)
struct App { struct App {
web.Context web.Context
@ -14,8 +20,7 @@ pub:
conf Config [required; web_global] conf Config [required; web_global]
pub mut: pub mut:
repo repo.RepoGroupManager [required; web_global] repo repo.RepoGroupManager [required; web_global]
// This is used to claim the file lock on the repos file db db.VieterDb
git_mutex shared util.Dummy
} }
// server starts the web server & starts listening for requests // server starts the web server & starts listening for requests
@ -30,11 +35,14 @@ pub fn server(conf Config) ? {
util.exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.') util.exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.')
} }
os.mkdir_all(conf.data_dir) or { util.exit_with_message(1, 'Failed to create data directory.') }
mut logger := log.Log{ mut logger := log.Log{
level: log_level level: log_level
} }
logger.set_full_logpath(conf.log_file) log_file := os.join_path_single(conf.data_dir, server.log_file_name)
logger.set_full_logpath(log_file)
logger.log_to_console_too() logger.log_to_console_too()
defer { defer {
@ -43,19 +51,20 @@ pub fn server(conf Config) ? {
logger.close() logger.close()
} }
repo_dir := os.join_path_single(conf.data_dir, server.repo_dir_name)
// This also creates the directories if needed // This also creates the directories if needed
repo := repo.new(conf.repos_dir, conf.pkg_dir, conf.default_arch) or { repo := repo.new(repo_dir, conf.pkg_dir, conf.default_arch) or {
logger.error(err.msg()) logger.error(err.msg())
exit(1) exit(1)
} }
os.mkdir_all(conf.download_dir) or { db_file := os.join_path_single(conf.data_dir, server.db_file_name)
util.exit_with_message(1, 'Failed to create download directory.') db := db.init(db_file) or { util.exit_with_message(1, 'Failed to initialize database.') }
}
web.run(&App{ web.run(&App{
logger: logger logger: logger
conf: conf conf: conf
repo: repo repo: repo
db: db
}, server.port) }, server.port)
} }

View File

@ -1,10 +1,8 @@
# This file contains settings used during development # This file contains settings used during development
api_key = "test" api_key = "test"
download_dir = "data/downloads" data_dir = "data"
repos_dir = "data/repos"
pkg_dir = "data/pkgs" pkg_dir = "data/pkgs"
log_level = "DEBUG" log_level = "DEBUG"
repos_file = "data/repos.json"
default_arch = "x86_64" default_arch = "x86_64"
address = "http://localhost:8000" address = "http://localhost:8000"