From 41ee08045b5e27c5c1d64e6dbe14597075ed71d1 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 9 Apr 2022 09:46:07 +0200 Subject: [PATCH 01/67] Start of cron implementation --- src/cron/cli.v | 26 ++++++++++++++++++++++++++ src/cron/cron.v | 7 +++++++ src/main.v | 2 ++ 3 files changed, 35 insertions(+) create mode 100644 src/cron/cli.v create mode 100644 src/cron/cron.v diff --git a/src/cron/cli.v b/src/cron/cli.v new file mode 100644 index 00000000..cbf5b882 --- /dev/null +++ b/src/cron/cli.v @@ -0,0 +1,26 @@ +module cron + +import cli +import env + +struct Config { +pub: + log_level string = 'WARN' + log_file string = 'vieter.log' + api_key string + address string + base_image string = 'archlinux:base-devel' +} + +pub fn cmd() cli.Command { + return cli.Command{ + name: 'cron' + description: 'Start the cron service that periodically runs builds.' + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? + + cron(conf) ? + } + } +} diff --git a/src/cron/cron.v b/src/cron/cron.v new file mode 100644 index 00000000..ac584eb9 --- /dev/null +++ b/src/cron/cron.v @@ -0,0 +1,7 @@ +module cron + +import git + +pub fn cron(conf Config) ? { + repos_map := git.get_repos(conf.address, conf.api_key) ? +} diff --git a/src/main.v b/src/main.v index c77e5519..adaf4dcf 100644 --- a/src/main.v +++ b/src/main.v @@ -5,6 +5,7 @@ import server import cli import build import git +import cron fn main() { mut app := cli.Command{ @@ -25,6 +26,7 @@ fn main() { server.cmd(), build.cmd(), git.cmd(), + cron.cmd() ] } From e890128bda68fcdf40e1818b8486f5816126b689 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 9 Apr 2022 09:50:37 +0200 Subject: [PATCH 02/67] Ran formatter --- src/build/cli.v | 4 ++-- src/cron/cli.v | 8 ++++---- src/main.v | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/build/cli.v b/src/build/cli.v index 7cdcf834..01313960 100644 --- a/src/build/cli.v +++ b/src/build/cli.v @@ -5,8 +5,8 @@ import env pub struct Config { pub: - api_key string - address string + api_key string + address string base_image string = 'archlinux:base-devel' } diff --git a/src/cron/cli.v b/src/cron/cli.v index cbf5b882..9bdec9ae 100644 --- a/src/cron/cli.v +++ b/src/cron/cli.v @@ -5,10 +5,10 @@ import env struct Config { pub: - log_level string = 'WARN' - log_file string = 'vieter.log' - api_key string - address string + log_level string = 'WARN' + log_file string = 'vieter.log' + api_key string + address string base_image string = 'archlinux:base-devel' } diff --git a/src/main.v b/src/main.v index adaf4dcf..7e41f257 100644 --- a/src/main.v +++ b/src/main.v @@ -26,7 +26,7 @@ fn main() { server.cmd(), build.cmd(), git.cmd(), - cron.cmd() + cron.cmd(), ] } From 6d60ea15380bdaede26216918e0922edd701f1b5 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 10 Apr 2022 16:17:50 +0200 Subject: [PATCH 03/67] Started writing cron expression parser [CI SKIP] --- Makefile | 4 +++ src/cron/cron.v | 27 ++++++++++++++++++- src/cron/expression.v | 55 ++++++++++++++++++++++++++++++++++++++ src/cron/expression_test.v | 5 ++++ src/util.v | 1 + 5 files changed, 91 insertions(+), 1 deletion(-) create mode 100644 src/cron/expression.v create mode 100644 src/cron/expression_test.v diff --git a/Makefile b/Makefile index 76ab7b58..9421fb6f 100644 --- a/Makefile +++ b/Makefile @@ -60,6 +60,10 @@ fmt: vet: $(V) vet -W $(SRC_DIR) +.PHONY: test +test: + $(V) test $(SRC_DIR) + # Build & patch the V compiler .PHONY: v v: v/v diff --git a/src/cron/cron.v b/src/cron/cron.v index ac584eb9..ccb8f9e6 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -1,7 +1,32 @@ module cron import git +import datatypes +import time + +struct ScheduledBuild { + repo git.GitRepo + timestamp time.Time +} + +fn (r1 ScheduledBuild) < (r2 ScheduledBuild) bool { + return r1.timestamp < r2.timestamp +} pub fn cron(conf Config) ? { - repos_map := git.get_repos(conf.address, conf.api_key) ? + // mut queue := datatypes.MinHeap{} + // repos_map := git.get_repos(conf.address, conf.api_key) ? + + // for _, repo in repos_map { + // scheduled := ScheduledBuild{ + // repo: repo + // timestamp: 25 + // } + + // queue.insert(scheduled) + // } + + // println(queue) + exp := "10/2 5 *" + println(parse_expression(exp) ?) } diff --git a/src/cron/expression.v b/src/cron/expression.v new file mode 100644 index 00000000..8dae499e --- /dev/null +++ b/src/cron/expression.v @@ -0,0 +1,55 @@ +module cron + +import math + +struct CronExpression { + minutes []u32 + hours []u32 + days []u32 +} + +// parse_range parses a given string into a range of integers, if possible. +fn parse_range(s string, min u32, max u32) ?[]u32 { + mut out := []u32{} + mut start := min + mut interval := u32(1) + + if s != '*' { + exps := s.split('/') + + if exps.len > 1 { + interval = exps[1].u32() + } + // Here, s solely consists of a number, so that's the only value we + // should return. + else{ + return [exps[0].u32()] + } + + if exps[0] != '*' { + start = math.max(exps[0].u32(), min) + } + } + + for start <= max { + out << start + start += interval + } + + return out +} + +// min hour day month day-of-week +fn parse_expression(exp string) ?CronExpression { + parts := exp.split(' ') + + if parts.len != 3 { + return error("Expression must contain 5 space-separated parts.") + } + + return CronExpression{ + minutes: parse_range(parts[0], 0, 59) ? + hours: parse_range(parts[1], 0, 23) ? + days: parse_range(parts[2], 0, 31) ? + } +} diff --git a/src/cron/expression_test.v b/src/cron/expression_test.v new file mode 100644 index 00000000..b3b7422a --- /dev/null +++ b/src/cron/expression_test.v @@ -0,0 +1,5 @@ +module cron + +fn test_parse_star_range() { + assert parse_range('*', 0, 5) == [0, 1, 2, 3, 4, 5] +} diff --git a/src/util.v b/src/util.v index 49c9d223..228f5845 100644 --- a/src/util.v +++ b/src/util.v @@ -44,6 +44,7 @@ pub fn reader_to_file(mut reader io.BufferedReader, length int, path string) ? { for to_write > 0 { // TODO don't just loop infinitely here bytes_written := file.write(buf[bytes_read - to_write..bytes_read]) or { continue } + // file.flush() to_write = to_write - bytes_written } From f92a20fcf8b4702a0702788fe70d58868c57945e Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 10 Apr 2022 16:48:37 +0200 Subject: [PATCH 04/67] Gave all modules own directory; added test CI pipeline --- .woodpecker/.test.yml | 15 +++++++++++++++ src/cron/cron.v | 4 ++-- src/cron/expression.v | 8 ++++---- src/cron/expression_test.v | 8 ++++++-- src/{ => env}/env.v | 0 src/{ => package}/package.v | 0 src/{ => response}/response.v | 0 src/{ => util}/util.v | 0 8 files changed, 27 insertions(+), 8 deletions(-) create mode 100644 .woodpecker/.test.yml rename src/{ => env}/env.v (100%) rename src/{ => package}/package.v (100%) rename src/{ => response}/response.v (100%) rename src/{ => util}/util.v (100%) diff --git a/.woodpecker/.test.yml b/.woodpecker/.test.yml new file mode 100644 index 00000000..ec559c86 --- /dev/null +++ b/.woodpecker/.test.yml @@ -0,0 +1,15 @@ +matrix: + PLATFORM: + - linux/amd64 + - linux/arm64 + +platform: ${PLATFORM} + +pipeline: + test: + image: 'chewingbever/vlang:latest' + pull: true + commands: + - make test + when: + event: push diff --git a/src/cron/cron.v b/src/cron/cron.v index ccb8f9e6..13fc22d8 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -5,7 +5,7 @@ import datatypes import time struct ScheduledBuild { - repo git.GitRepo + repo git.GitRepo timestamp time.Time } @@ -27,6 +27,6 @@ pub fn cron(conf Config) ? { // } // println(queue) - exp := "10/2 5 *" + exp := '10/2 5 *' println(parse_expression(exp) ?) } diff --git a/src/cron/expression.v b/src/cron/expression.v index 8dae499e..2938dd97 100644 --- a/src/cron/expression.v +++ b/src/cron/expression.v @@ -4,8 +4,8 @@ import math struct CronExpression { minutes []u32 - hours []u32 - days []u32 + hours []u32 + days []u32 } // parse_range parses a given string into a range of integers, if possible. @@ -22,7 +22,7 @@ fn parse_range(s string, min u32, max u32) ?[]u32 { } // Here, s solely consists of a number, so that's the only value we // should return. - else{ + else { return [exps[0].u32()] } @@ -44,7 +44,7 @@ fn parse_expression(exp string) ?CronExpression { parts := exp.split(' ') if parts.len != 3 { - return error("Expression must contain 5 space-separated parts.") + return error('Expression must contain 5 space-separated parts.') } return CronExpression{ diff --git a/src/cron/expression_test.v b/src/cron/expression_test.v index b3b7422a..562ced27 100644 --- a/src/cron/expression_test.v +++ b/src/cron/expression_test.v @@ -1,5 +1,9 @@ module cron -fn test_parse_star_range() { - assert parse_range('*', 0, 5) == [0, 1, 2, 3, 4, 5] +fn test_parse_star_range() ? { + assert parse_range('*', 0, 5) ? == [u32(0), 1, 2, 3, 4, 5] +} + +fn test_parse_number() ? { + assert parse_range('4', 0, 5) ? == [u32(4)] } diff --git a/src/env.v b/src/env/env.v similarity index 100% rename from src/env.v rename to src/env/env.v diff --git a/src/package.v b/src/package/package.v similarity index 100% rename from src/package.v rename to src/package/package.v diff --git a/src/response.v b/src/response/response.v similarity index 100% rename from src/response.v rename to src/response/response.v diff --git a/src/util.v b/src/util/util.v similarity index 100% rename from src/util.v rename to src/util/util.v From 799fe2e4549ed545981349cdd2e193fde25fcce7 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 10 Apr 2022 16:58:55 +0200 Subject: [PATCH 05/67] Added some extra tests for parse_range --- src/cron/expression.v | 8 +++----- src/cron/expression_test.v | 28 ++++++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 5 deletions(-) diff --git a/src/cron/expression.v b/src/cron/expression.v index 2938dd97..87819005 100644 --- a/src/cron/expression.v +++ b/src/cron/expression.v @@ -17,17 +17,15 @@ fn parse_range(s string, min u32, max u32) ?[]u32 { if s != '*' { exps := s.split('/') + start = math.min(max, math.max(exps[0].u32(), min)) + if exps.len > 1 { interval = exps[1].u32() } // Here, s solely consists of a number, so that's the only value we // should return. else { - return [exps[0].u32()] - } - - if exps[0] != '*' { - start = math.max(exps[0].u32(), min) + return [start] } } diff --git a/src/cron/expression_test.v b/src/cron/expression_test.v index 562ced27..6d293c56 100644 --- a/src/cron/expression_test.v +++ b/src/cron/expression_test.v @@ -7,3 +7,31 @@ fn test_parse_star_range() ? { fn test_parse_number() ? { assert parse_range('4', 0, 5) ? == [u32(4)] } + +fn test_parse_number_too_large() ? { + assert parse_range('10', 0, 6) ? == [u32(6)] +} + +fn test_parse_number_too_small() ? { + assert parse_range('0', 2, 6) ? == [u32(2)] +} + +fn test_parse_step_star() ? { + assert parse_range('*/4', 0, 20) ? == [u32(0), 4, 8, 12, 16, 20] +} + +fn test_parse_step_star_too_large() ? { + assert parse_range('*/21', 0, 20) ? == [u32(0)] +} + +fn test_parse_step_number() ? { + assert parse_range('5/4', 0, 20) ? == [u32(5), 9, 13, 17] +} + +fn test_parse_step_number_too_large() ? { + assert parse_range('10/4', 0, 5) ? == [u32(5)] +} + +fn test_parse_step_number_too_small() ? { + assert parse_range('2/4', 5, 10) ? == [u32(5), 9] +} From e3da3d0d7f1ad0f1d55c2d9c7e775ef3f2670ff6 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 10 Apr 2022 17:47:46 +0200 Subject: [PATCH 06/67] Can't figure out cron algo right now [CI SKIP] --- src/cron/expression.v | 32 +++++++++++++++++++++++++++++++- src/cron/expression_test.v | 7 +++++++ 2 files changed, 38 insertions(+), 1 deletion(-) diff --git a/src/cron/expression.v b/src/cron/expression.v index 87819005..a29bf536 100644 --- a/src/cron/expression.v +++ b/src/cron/expression.v @@ -1,6 +1,7 @@ module cron import math +import time struct CronExpression { minutes []u32 @@ -8,7 +9,32 @@ struct CronExpression { days []u32 } -// parse_range parses a given string into a range of integers, if possible. +// next calculates the earliest time this cron expression is valid. +pub fn (ce &CronExpression) next(ref &time.Time) time.Time { + res := time.Time{} + + mut day := 0 + mut hour := 0 + mut minute := 0 + + // Find the next minute + // If ref.minute is greater than + if ref.minute >= ce.minutes[ce.minutes.len - 1] || ref.minute < ce.minutes[0] { + minute = ce.minutes[0] + }else{ + for i in 0..ce.minutes.len { + if ce.minutes[i] > ref.minute { + minute = ce.minutes[i] + break + } + } + } + + return res +} + +// parse_range parses a given string into a range of sorted integers, if +// possible. fn parse_range(s string, min u32, max u32) ?[]u32 { mut out := []u32{} mut start := min @@ -29,6 +55,10 @@ fn parse_range(s string, min u32, max u32) ?[]u32 { } } + if interval == 0 { + return [] + } + for start <= max { out << start start += interval diff --git a/src/cron/expression_test.v b/src/cron/expression_test.v index 6d293c56..abd5e5ff 100644 --- a/src/cron/expression_test.v +++ b/src/cron/expression_test.v @@ -1,5 +1,6 @@ module cron +// =====parse_range===== fn test_parse_star_range() ? { assert parse_range('*', 0, 5) ? == [u32(0), 1, 2, 3, 4, 5] } @@ -24,6 +25,10 @@ fn test_parse_step_star_too_large() ? { assert parse_range('*/21', 0, 20) ? == [u32(0)] } +fn test_parse_step_zero() ? { + assert parse_range('*/0', 0, 20) ? == [] +} + fn test_parse_step_number() ? { assert parse_range('5/4', 0, 20) ? == [u32(5), 9, 13, 17] } @@ -35,3 +40,5 @@ fn test_parse_step_number_too_large() ? { fn test_parse_step_number_too_small() ? { assert parse_range('2/4', 5, 10) ? == [u32(5), 9] } + + From 4a68cb3c032549f179847d81ffa3d8952a2f3c5c Mon Sep 17 00:00:00 2001 From: Renovate Bot Date: Mon, 11 Apr 2022 10:01:38 +0000 Subject: [PATCH 07/67] Add renovate.json --- renovate.json | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 renovate.json diff --git a/renovate.json b/renovate.json new file mode 100644 index 00000000..7190a60b --- /dev/null +++ b/renovate.json @@ -0,0 +1,3 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json" +} From 135b6c3d7ff5d2f383dbf0877ce16f7a1ead110e Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Mon, 11 Apr 2022 22:16:31 +0200 Subject: [PATCH 08/67] Alpha version cron 'next' function --- src/cron/cron.v | 27 +++++++- src/cron/expression.v | 128 +++++++++++++++++++++++++++++-------- src/cron/expression_test.v | 18 +++--- 3 files changed, 135 insertions(+), 38 deletions(-) diff --git a/src/cron/cron.v b/src/cron/cron.v index 13fc22d8..d802dbf3 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -27,6 +27,29 @@ pub fn cron(conf Config) ? { // } // println(queue) - exp := '10/2 5 *' - println(parse_expression(exp) ?) + // exp := '10/2 5 *' + // println(parse_expression(exp) ?) + ce := parse_expression('0 3 */2') ? + println(ce) + // ce := CronExpression{ + // minutes: [0] + // hours: [3] + // days: [1, 2, 3, 4, 5, 6] + // months: [1, 2] + // } + mut t := time.Time{ + year: 2022 + month: 2 + minute: 9 + hour: 13 + day: 12 + } + + // mut t := time.now() + println(t) + + for _ in 1..25 { + t = ce.next(t) ? + println(t) + } } diff --git a/src/cron/expression.v b/src/cron/expression.v index a29bf536..ff0fcd4e 100644 --- a/src/cron/expression.v +++ b/src/cron/expression.v @@ -3,50 +3,117 @@ module cron import math import time +const days_in_month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] + struct CronExpression { - minutes []u32 - hours []u32 - days []u32 + minutes []int + hours []int + days []int + months []int } // next calculates the earliest time this cron expression is valid. -pub fn (ce &CronExpression) next(ref &time.Time) time.Time { - res := time.Time{} +pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { + mut minute_index := 0 + mut hour_index := 0 + mut day_index := 0 + mut month_index := 0 - mut day := 0 - mut hour := 0 - mut minute := 0 + for month_index < ce.months.len && ref.month > ce.months[month_index] { + month_index++ + } - // Find the next minute - // If ref.minute is greater than - if ref.minute >= ce.minutes[ce.minutes.len - 1] || ref.minute < ce.minutes[0] { - minute = ce.minutes[0] - }else{ - for i in 0..ce.minutes.len { - if ce.minutes[i] > ref.minute { - minute = ce.minutes[i] - break + if month_index < ce.months.len { + for day_index < ce.days.len && ref.day > ce.days[day_index] { + day_index++ + } + + if day_index < ce.days.len { + for hour_index < ce.hours.len && ref.hour > ce.hours[hour_index] { + hour_index++ + } + + if hour_index < ce.hours.len { + // For each unit, we calculate what the next value is + for minute_index < ce.minutes.len && ref.minute >= ce.minutes[minute_index] { + minute_index++ + } } } } - return res + + // Sometime we have to shift values one more + if minute_index == ce.minutes.len && hour_index < ce.hours.len { + hour_index += 1 + } + + if hour_index == ce.hours.len && day_index < ce.days.len { + day_index += 1 + } + + if day_index == ce.days.len && month_index < ce.months.len { + month_index += 1 + } + + mut minute := ce.minutes[minute_index % ce.minutes.len] + mut hour := ce.hours[hour_index % ce.hours.len] + mut day := ce.days[day_index % ce.days.len] + + mut reset := false + + // If the day can't be planned in the current month, we go to the next one + // and go back to day one + if day > days_in_month[ce.months[month_index % ce.months.len] - 1] { + month_index += 1 + day = ce.days[0] + + // Make sure we only plan in a month that the day occurs in + for day > days_in_month[ce.months[month_index & ce.months.len] - 1] { + month_index += 1 + + // Prevent scenario where there are no months that can be scheduled. + if month_index == 2 * ce.months.len { + return error('No schedulable moment.') + } + } + } + + + month := ce.months[month_index % ce.months.len] + mut year := ref.year + + if month_index >= ce.months.len { + year++ + } + + return time.Time{ + year: year + month: month + day: day + minute: minute + hour: hour + } +} + +fn (ce &CronExpression) next_from_now() ?time.Time { + return ce.next(time.now()) } // parse_range parses a given string into a range of sorted integers, if // possible. -fn parse_range(s string, min u32, max u32) ?[]u32 { - mut out := []u32{} +fn parse_range(s string, min int, max int) ?[]int { + mut out := []int{} mut start := min - mut interval := u32(1) + mut interval := 1 if s != '*' { exps := s.split('/') - start = math.min(max, math.max(exps[0].u32(), min)) + start = math.min(max, math.max(exps[0].int(), min)) if exps.len > 1 { - interval = exps[1].u32() + interval = exps[1].int() } // Here, s solely consists of a number, so that's the only value we // should return. @@ -69,15 +136,22 @@ fn parse_range(s string, min u32, max u32) ?[]u32 { // min hour day month day-of-week fn parse_expression(exp string) ?CronExpression { - parts := exp.split(' ') + mut parts := exp.split(' ') - if parts.len != 3 { - return error('Expression must contain 5 space-separated parts.') + if parts.len < 2 || parts.len > 4 { + return error('Expression must contain between 2 and 4 space-separated parts.') + } + + // For ease of use, we allow the user to only specify as many parts as they + // need. + for parts.len < 4 { + parts << '*' } return CronExpression{ minutes: parse_range(parts[0], 0, 59) ? hours: parse_range(parts[1], 0, 23) ? - days: parse_range(parts[2], 0, 31) ? + days: parse_range(parts[2], 1, 31) ? + months: parse_range(parts[3], 1, 12) ? } } diff --git a/src/cron/expression_test.v b/src/cron/expression_test.v index abd5e5ff..9279ccef 100644 --- a/src/cron/expression_test.v +++ b/src/cron/expression_test.v @@ -2,27 +2,27 @@ module cron // =====parse_range===== fn test_parse_star_range() ? { - assert parse_range('*', 0, 5) ? == [u32(0), 1, 2, 3, 4, 5] + assert parse_range('*', 0, 5) ? == [0, 1, 2, 3, 4, 5] } fn test_parse_number() ? { - assert parse_range('4', 0, 5) ? == [u32(4)] + assert parse_range('4', 0, 5) ? == [4] } fn test_parse_number_too_large() ? { - assert parse_range('10', 0, 6) ? == [u32(6)] + assert parse_range('10', 0, 6) ? == [6] } fn test_parse_number_too_small() ? { - assert parse_range('0', 2, 6) ? == [u32(2)] + assert parse_range('0', 2, 6) ? == [2] } fn test_parse_step_star() ? { - assert parse_range('*/4', 0, 20) ? == [u32(0), 4, 8, 12, 16, 20] + assert parse_range('*/4', 0, 20) ? == [0, 4, 8, 12, 16, 20] } fn test_parse_step_star_too_large() ? { - assert parse_range('*/21', 0, 20) ? == [u32(0)] + assert parse_range('*/21', 0, 20) ? == [0] } fn test_parse_step_zero() ? { @@ -30,15 +30,15 @@ fn test_parse_step_zero() ? { } fn test_parse_step_number() ? { - assert parse_range('5/4', 0, 20) ? == [u32(5), 9, 13, 17] + assert parse_range('5/4', 0, 20) ? == [5, 9, 13, 17] } fn test_parse_step_number_too_large() ? { - assert parse_range('10/4', 0, 5) ? == [u32(5)] + assert parse_range('10/4', 0, 5) ? == [5] } fn test_parse_step_number_too_small() ? { - assert parse_range('2/4', 5, 10) ? == [u32(5), 9] + assert parse_range('2/4', 5, 10) ? == [5, 9] } From 0e5f31e64907fd10c32738d0a53a73cf9218f1b5 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Mon, 11 Apr 2022 22:30:22 +0200 Subject: [PATCH 09/67] Added some much-needed documentation --- src/cron/cron.v | 2 +- src/cron/expression.v | 38 +++++++++++++++++++++++++++----------- 2 files changed, 28 insertions(+), 12 deletions(-) diff --git a/src/cron/cron.v b/src/cron/cron.v index d802dbf3..6d7b7aa8 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -39,7 +39,7 @@ pub fn cron(conf Config) ? { // } mut t := time.Time{ year: 2022 - month: 2 + month: 12 minute: 9 hour: 13 day: 12 diff --git a/src/cron/expression.v b/src/cron/expression.v index ff0fcd4e..b3ae38d1 100644 --- a/src/cron/expression.v +++ b/src/cron/expression.v @@ -12,13 +12,24 @@ struct CronExpression { months []int } -// next calculates the earliest time this cron expression is valid. +// next calculates the earliest time this cron expression is valid. It will +// always pick a moment in the future, even if ref matches completely up to the +// minute. This function conciously does not take gap years into account. pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { + // For all of these values, the rule is the following: if their value is + // the length of their respective array in the CronExpression object, that + // means we've looped back around. This means that the "bigger" value has + // to be incremented by one. For example, if the minutes have looped + // around, that means that the hour has to be incremented as well. mut minute_index := 0 mut hour_index := 0 mut day_index := 0 mut month_index := 0 + // This chain is the same logic multiple times, namely that if a "bigger" + // value loops around, then the smaller value will always reset as well. + // For example, if we're going to a new day, the hour & minute will always + // be their smallest value again. for month_index < ce.months.len && ref.month > ce.months[month_index] { month_index++ } @@ -34,7 +45,9 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { } if hour_index < ce.hours.len { - // For each unit, we calculate what the next value is + // Minute is the only value where we explicitely make sure we + // can't match ref's value exactly. This is to ensure we only + // return values in the future. for minute_index < ce.minutes.len && ref.minute >= ce.minutes[minute_index] { minute_index++ } @@ -42,8 +55,9 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { } } - - // Sometime we have to shift values one more + // Here, we increment the "bigger" values by one if the smaller ones loop + // around. The order is important, as it allows a sort-of waterfall effect + // to occur which updates all values if required. if minute_index == ce.minutes.len && hour_index < ce.hours.len { hour_index += 1 } @@ -60,19 +74,20 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { mut hour := ce.hours[hour_index % ce.hours.len] mut day := ce.days[day_index % ce.days.len] - mut reset := false - - // If the day can't be planned in the current month, we go to the next one - // and go back to day one + // Sometimes, we end up with a day that does not exist within the selected + // month, e.g. day 30 in February. When this occurs, we reset day back to + // the smallest value & loop over to the next month that does have this + // day. if day > days_in_month[ce.months[month_index % ce.months.len] - 1] { - month_index += 1 day = ce.days[0] + month_index += 1 - // Make sure we only plan in a month that the day occurs in for day > days_in_month[ce.months[month_index & ce.months.len] - 1] { month_index += 1 - // Prevent scenario where there are no months that can be scheduled. + // If for whatever reason the day value ends up being something + // that can't be scheduled in any month, we have to make sure we + // don't create an infinite loop. if month_index == 2 * ce.months.len { return error('No schedulable moment.') } @@ -83,6 +98,7 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { month := ce.months[month_index % ce.months.len] mut year := ref.year + // If the month loops over, we need to increment the year. if month_index >= ce.months.len { year++ } From ab4f64b6b6997c91c637ed6861600706416ddda0 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Mon, 11 Apr 2022 22:52:06 +0200 Subject: [PATCH 10/67] Failed attempt at x,y,z cron stuff [CI SKIP] --- src/cron/cron.v | 2 +- src/cron/expression.v | 39 ++++++++++++++++++++++++++------------ src/cron/expression_test.v | 2 -- 3 files changed, 28 insertions(+), 15 deletions(-) diff --git a/src/cron/cron.v b/src/cron/cron.v index 6d7b7aa8..74e203f5 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -48,7 +48,7 @@ pub fn cron(conf Config) ? { // mut t := time.now() println(t) - for _ in 1..25 { + for _ in 1 .. 25 { t = ce.next(t) ? println(t) } diff --git a/src/cron/expression.v b/src/cron/expression.v index b3ae38d1..71ee9a10 100644 --- a/src/cron/expression.v +++ b/src/cron/expression.v @@ -78,11 +78,11 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { // month, e.g. day 30 in February. When this occurs, we reset day back to // the smallest value & loop over to the next month that does have this // day. - if day > days_in_month[ce.months[month_index % ce.months.len] - 1] { + if day > cron.days_in_month[ce.months[month_index % ce.months.len] - 1] { day = ce.days[0] month_index += 1 - for day > days_in_month[ce.months[month_index & ce.months.len] - 1] { + for day > cron.days_in_month[ce.months[month_index & ce.months.len] - 1] { month_index += 1 // If for whatever reason the day value ends up being something @@ -94,7 +94,6 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { } } - month := ce.months[month_index % ce.months.len] mut year := ref.year @@ -118,8 +117,7 @@ fn (ce &CronExpression) next_from_now() ?time.Time { // parse_range parses a given string into a range of sorted integers, if // possible. -fn parse_range(s string, min int, max int) ?[]int { - mut out := []int{} +fn parse_range(s string, min int, max int, mut bitv []bool) ? { mut start := min mut interval := 1 @@ -134,18 +132,35 @@ fn parse_range(s string, min int, max int) ?[]int { // Here, s solely consists of a number, so that's the only value we // should return. else { - return [start] + bitv[start - min - 1] = true + return } } if interval == 0 { - return [] + return } for start <= max { - out << start + bitv[start - min - 1] = true start += interval } +} + +fn parse_part(s string, min int, max int) ?[]int { + mut bitv := []bool{init: false, len: max - min + 1} + + for range in s.split(',') { + parse_range(range, min, max, mut bitv) ? + } + + mut out := []int{} + + for i in 0..max + 1 { + if bitv[i] { + out << min + i + } + } return out } @@ -165,9 +180,9 @@ fn parse_expression(exp string) ?CronExpression { } return CronExpression{ - minutes: parse_range(parts[0], 0, 59) ? - hours: parse_range(parts[1], 0, 23) ? - days: parse_range(parts[2], 1, 31) ? - months: parse_range(parts[3], 1, 12) ? + minutes: parse_part(parts[0], 0, 59) ? + hours: parse_part(parts[1], 0, 23) ? + days: parse_part(parts[2], 1, 31) ? + months: parse_part(parts[3], 1, 12) ? } } diff --git a/src/cron/expression_test.v b/src/cron/expression_test.v index 9279ccef..2d58b15c 100644 --- a/src/cron/expression_test.v +++ b/src/cron/expression_test.v @@ -40,5 +40,3 @@ fn test_parse_step_number_too_large() ? { fn test_parse_step_number_too_small() ? { assert parse_range('2/4', 5, 10) ? == [5, 9] } - - From 04e54b8b101a477840920f7037bf02dcd2812e95 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 12 Apr 2022 09:00:18 +0200 Subject: [PATCH 11/67] Migrated tests to new bitv-based implementation --- src/cron/expression.v | 22 ++++++----- src/cron/expression_parse_test.v | 65 ++++++++++++++++++++++++++++++++ src/cron/expression_test.v | 42 --------------------- 3 files changed, 78 insertions(+), 51 deletions(-) create mode 100644 src/cron/expression_parse_test.v delete mode 100644 src/cron/expression_test.v diff --git a/src/cron/expression.v b/src/cron/expression.v index 71ee9a10..60e1b747 100644 --- a/src/cron/expression.v +++ b/src/cron/expression.v @@ -147,6 +147,18 @@ fn parse_range(s string, min int, max int, mut bitv []bool) ? { } } +fn bitv_to_ints(bitv []bool, min int) []int { + mut out := []int{} + + for i in 0..bitv.len { + if bitv[i] { + out << min + i + } + } + + return out +} + fn parse_part(s string, min int, max int) ?[]int { mut bitv := []bool{init: false, len: max - min + 1} @@ -154,15 +166,7 @@ fn parse_part(s string, min int, max int) ?[]int { parse_range(range, min, max, mut bitv) ? } - mut out := []int{} - - for i in 0..max + 1 { - if bitv[i] { - out << min + i - } - } - - return out + return bitv_to_ints(bitv, min) } // min hour day month day-of-week diff --git a/src/cron/expression_parse_test.v b/src/cron/expression_parse_test.v new file mode 100644 index 00000000..7a4974dd --- /dev/null +++ b/src/cron/expression_parse_test.v @@ -0,0 +1,65 @@ +module cron + +// parse_range_error returns the returned error message. If the result is '', +// that means the function didn't error. +fn parse_range_error(s string, min int, max int) string { + mut bitv := []bool{init: false, len: max - min + 1} + + parse_range(s, min, max, mut bitv) or { + return err.msg + } + + return '' +} + +// =====parse_range===== +fn test_parse_star_range() ? { + mut bitv := []bool{init: false, len: 6} + parse_range('*', 0, 5, mut bitv) ? + + assert bitv == [true, true, true, true, true, true] +} + +fn test_parse_number() ? { + mut bitv := []bool{init: false, len: 6} + parse_range('4', 0, 5, mut bitv) ? + + assert bitv_to_ints(bitv, 0) == [4] +} + +fn test_parse_number_too_large() ? { + assert parse_range_error('10', 0, 6) == 'Out of range.' +} + +fn test_parse_number_too_small() ? { + assert parse_range_error('0', 2, 6) == 'Out of range.' +} + +fn test_parse_step_star() ? { + mut bitv := []bool{init: false, len: 21} + parse_range('*/4', 0, 20, mut bitv) ? + + assert bitv_to_ints(bitv, 0) == [0, 4, 8, 12, 16, 20] +} + +fn test_parse_step_star_too_large() ? { + assert parse_range_error('*/21', 0, 20) == 'Step too large.' +} + +fn test_parse_step_zero() ? { + assert parse_range_error('*/0', 0, 20) == 'Step size zero not allowed.' +} + +fn test_parse_step_number() ? { + mut bitv := []bool{init: false, len: 21} + parse_range('5/4', 0, 20, mut bitv) ? + assert bitv_to_ints(bitv, 0) == [5, 9, 13, 17] +} + +fn test_parse_step_number_too_large() ? { + assert parse_range_error('10/4', 0, 5) == 'Out of range.' +} + +fn test_parse_step_number_too_small() ? { + assert parse_range_error('2/4', 5, 10) == 'Out of range.' +} diff --git a/src/cron/expression_test.v b/src/cron/expression_test.v deleted file mode 100644 index 2d58b15c..00000000 --- a/src/cron/expression_test.v +++ /dev/null @@ -1,42 +0,0 @@ -module cron - -// =====parse_range===== -fn test_parse_star_range() ? { - assert parse_range('*', 0, 5) ? == [0, 1, 2, 3, 4, 5] -} - -fn test_parse_number() ? { - assert parse_range('4', 0, 5) ? == [4] -} - -fn test_parse_number_too_large() ? { - assert parse_range('10', 0, 6) ? == [6] -} - -fn test_parse_number_too_small() ? { - assert parse_range('0', 2, 6) ? == [2] -} - -fn test_parse_step_star() ? { - assert parse_range('*/4', 0, 20) ? == [0, 4, 8, 12, 16, 20] -} - -fn test_parse_step_star_too_large() ? { - assert parse_range('*/21', 0, 20) ? == [0] -} - -fn test_parse_step_zero() ? { - assert parse_range('*/0', 0, 20) ? == [] -} - -fn test_parse_step_number() ? { - assert parse_range('5/4', 0, 20) ? == [5, 9, 13, 17] -} - -fn test_parse_step_number_too_large() ? { - assert parse_range('10/4', 0, 5) ? == [5] -} - -fn test_parse_step_number_too_small() ? { - assert parse_range('2/4', 5, 10) ? == [5, 9] -} From f4bb03f488f489ba01d6efb4e8eae18f1b6da422 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 12 Apr 2022 09:48:25 +0200 Subject: [PATCH 12/67] Tests n bug fixes --- src/cron/expression.v | 50 +++++++++++++++++++++++--------- src/cron/expression_parse_test.v | 47 +++++++++++++++++++++--------- 2 files changed, 70 insertions(+), 27 deletions(-) diff --git a/src/cron/expression.v b/src/cron/expression.v index 60e1b747..46f92f9d 100644 --- a/src/cron/expression.v +++ b/src/cron/expression.v @@ -1,6 +1,5 @@ module cron -import math import time const days_in_month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] @@ -121,28 +120,50 @@ fn parse_range(s string, min int, max int, mut bitv []bool) ? { mut start := min mut interval := 1 - if s != '*' { - exps := s.split('/') + exps := s.split('/') - start = math.min(max, math.max(exps[0].int(), min)) + if exps[0] != '*' { + start = exps[0].int() - if exps.len > 1 { - interval = exps[1].int() + // The builtin parsing functions return zero if the string can't be + // parsed into a number, so we have to explicitely check whether they + // actually entered zero or if it's an invalid number. + if start == 0 && exps[0] != '0' { + return error('Invalid number.') } - // Here, s solely consists of a number, so that's the only value we - // should return. - else { - bitv[start - min - 1] = true - return + + // Check whether the start value is out of range + if start < min || start > max { + return error('Out of range.') } } - if interval == 0 { + if exps.len > 1 { + interval = exps[1].int() + + // interval being zero is always invalid, but we want to check why + // it's invalid for better error messages. + if interval == 0 { + if exps[1] != '0' { + return error('Invalid number.') + }else{ + return error('Step size zero not allowed.') + } + } + + if interval > max - min { + return error('Step size too large.') + } + } + // Here, s solely consists of a number, so that's the only value we + // should return. + else if exps[0] != '*' { + bitv[start - min] = true return } for start <= max { - bitv[start - min - 1] = true + bitv[start - min] = true start += interval } } @@ -171,7 +192,8 @@ fn parse_part(s string, min int, max int) ?[]int { // min hour day month day-of-week fn parse_expression(exp string) ?CronExpression { - mut parts := exp.split(' ') + // The filter allows for multiple spaces between parts + mut parts := exp.split(' ').filter(it != '') if parts.len < 2 || parts.len > 4 { return error('Expression must contain between 2 and 4 space-separated parts.') diff --git a/src/cron/expression_parse_test.v b/src/cron/expression_parse_test.v index 7a4974dd..8f228507 100644 --- a/src/cron/expression_parse_test.v +++ b/src/cron/expression_parse_test.v @@ -13,53 +13,74 @@ fn parse_range_error(s string, min int, max int) string { } // =====parse_range===== -fn test_parse_star_range() ? { +fn test_range_star_range() ? { mut bitv := []bool{init: false, len: 6} parse_range('*', 0, 5, mut bitv) ? assert bitv == [true, true, true, true, true, true] } -fn test_parse_number() ? { +fn test_range_number() ? { mut bitv := []bool{init: false, len: 6} parse_range('4', 0, 5, mut bitv) ? assert bitv_to_ints(bitv, 0) == [4] } -fn test_parse_number_too_large() ? { +fn test_range_number_too_large() ? { assert parse_range_error('10', 0, 6) == 'Out of range.' } -fn test_parse_number_too_small() ? { +fn test_range_number_too_small() ? { assert parse_range_error('0', 2, 6) == 'Out of range.' } -fn test_parse_step_star() ? { +fn test_range_number_invalid() ? { + assert parse_range_error('x', 0, 6) == 'Invalid number.' +} + +fn test_range_step_star_1() ? { mut bitv := []bool{init: false, len: 21} parse_range('*/4', 0, 20, mut bitv) ? assert bitv_to_ints(bitv, 0) == [0, 4, 8, 12, 16, 20] } -fn test_parse_step_star_too_large() ? { - assert parse_range_error('*/21', 0, 20) == 'Step too large.' +fn test_range_step_star_2() ? { + mut bitv := []bool{init: false, len: 9} + parse_range('*/3', 1, 8, mut bitv) ? + + assert bitv_to_ints(bitv, 1) == [1, 4, 7] } -fn test_parse_step_zero() ? { +fn test_range_step_star_too_large() ? { + assert parse_range_error('*/21', 0, 20) == 'Step size too large.' +} + +fn test_range_step_zero() ? { assert parse_range_error('*/0', 0, 20) == 'Step size zero not allowed.' } -fn test_parse_step_number() ? { +fn test_range_step_number() ? { mut bitv := []bool{init: false, len: 21} - parse_range('5/4', 0, 20, mut bitv) ? - assert bitv_to_ints(bitv, 0) == [5, 9, 13, 17] + parse_range('5/4', 2, 22, mut bitv) ? + + assert bitv_to_ints(bitv, 2) == [5, 9, 13, 17, 21] } -fn test_parse_step_number_too_large() ? { +fn test_range_step_number_too_large() ? { assert parse_range_error('10/4', 0, 5) == 'Out of range.' } -fn test_parse_step_number_too_small() ? { +fn test_range_step_number_too_small() ? { assert parse_range_error('2/4', 5, 10) == 'Out of range.' } + +// =====parse_part===== +fn test_part_single() ? { + assert parse_part('*', 0, 5) ? == [0, 1, 2, 3, 4, 5] +} + +fn test_part_multiple() ? { + assert parse_part('*/2,2/3', 1, 8) ? == [1, 2, 3, 5, 7, 8] +} From 2942793f40d596eef7bde4868a0ec451d0982498 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 12 Apr 2022 10:35:29 +0200 Subject: [PATCH 13/67] Added support for x-y syntax --- src/cron/cron.v | 2 +- src/cron/expression.v | 60 +++++++++++++++++++++++++------- src/cron/expression_parse_test.v | 30 +++++++++++----- 3 files changed, 70 insertions(+), 22 deletions(-) diff --git a/src/cron/cron.v b/src/cron/cron.v index 74e203f5..a049eec8 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -29,7 +29,7 @@ pub fn cron(conf Config) ? { // println(queue) // exp := '10/2 5 *' // println(parse_expression(exp) ?) - ce := parse_expression('0 3 */2') ? + ce := parse_expression('0 35 */2') ? println(ce) // ce := CronExpression{ // minutes: [0] diff --git a/src/cron/expression.v b/src/cron/expression.v index 46f92f9d..0bc15912 100644 --- a/src/cron/expression.v +++ b/src/cron/expression.v @@ -118,17 +118,28 @@ fn (ce &CronExpression) next_from_now() ?time.Time { // possible. fn parse_range(s string, min int, max int, mut bitv []bool) ? { mut start := min + mut end := max mut interval := 1 exps := s.split('/') + if exps.len > 2 { + return error('Invalid expression.') + } + if exps[0] != '*' { - start = exps[0].int() + dash_parts := exps[0].split('-') + + if dash_parts.len > 2 { + return error('Invalid expression.') + } + + start = dash_parts[0].int() // The builtin parsing functions return zero if the string can't be // parsed into a number, so we have to explicitely check whether they // actually entered zero or if it's an invalid number. - if start == 0 && exps[0] != '0' { + if start == 0 && dash_parts[0] != '0' { return error('Invalid number.') } @@ -136,6 +147,18 @@ fn parse_range(s string, min int, max int, mut bitv []bool) ? { if start < min || start > max { return error('Out of range.') } + + if dash_parts.len == 2 { + end = dash_parts[1].int() + + if end == 0 && dash_parts[1] != '0' { + return error('Invalid number.') + } + + if end < start || end > max { + return error('Out of range.') + } + } } if exps.len > 1 { @@ -146,7 +169,7 @@ fn parse_range(s string, min int, max int, mut bitv []bool) ? { if interval == 0 { if exps[1] != '0' { return error('Invalid number.') - }else{ + } else { return error('Step size zero not allowed.') } } @@ -157,12 +180,12 @@ fn parse_range(s string, min int, max int, mut bitv []bool) ? { } // Here, s solely consists of a number, so that's the only value we // should return. - else if exps[0] != '*' { + else if exps[0] != '*' && !exps[0].contains('-') { bitv[start - min] = true return } - for start <= max { + for start <= end { bitv[start - min] = true start += interval } @@ -171,7 +194,7 @@ fn parse_range(s string, min int, max int, mut bitv []bool) ? { fn bitv_to_ints(bitv []bool, min int) []int { mut out := []int{} - for i in 0..bitv.len { + for i in 0 .. bitv.len { if bitv[i] { out << min + i } @@ -181,7 +204,7 @@ fn bitv_to_ints(bitv []bool, min int) []int { } fn parse_part(s string, min int, max int) ?[]int { - mut bitv := []bool{init: false, len: max - min + 1} + mut bitv := []bool{len: max - min + 1, init: false} for range in s.split(',') { parse_range(range, min, max, mut bitv) ? @@ -190,7 +213,8 @@ fn parse_part(s string, min int, max int) ?[]int { return bitv_to_ints(bitv, min) } -// min hour day month day-of-week +// parse_expression parses an entire cron expression string into a +// CronExpression object, if possible. fn parse_expression(exp string) ?CronExpression { // The filter allows for multiple spaces between parts mut parts := exp.split(' ').filter(it != '') @@ -205,10 +229,22 @@ fn parse_expression(exp string) ?CronExpression { parts << '*' } + mut part_results := [][]int{} + + mins := [0, 0, 1, 1] + maxs := [59, 23, 31, 12] + + // This for loop allows us to more clearly propagate the error to the user. + for i, min in mins { + part_results << parse_part(parts[i], min, maxs[i]) or { + return error('An error occurred with part $i: $err.msg') + } + } + return CronExpression{ - minutes: parse_part(parts[0], 0, 59) ? - hours: parse_part(parts[1], 0, 23) ? - days: parse_part(parts[2], 1, 31) ? - months: parse_part(parts[3], 1, 12) ? + minutes: part_results[0] + hours: part_results[1] + days: part_results[2] + months: part_results[3] } } diff --git a/src/cron/expression_parse_test.v b/src/cron/expression_parse_test.v index 8f228507..8f3ac38e 100644 --- a/src/cron/expression_parse_test.v +++ b/src/cron/expression_parse_test.v @@ -3,25 +3,23 @@ module cron // parse_range_error returns the returned error message. If the result is '', // that means the function didn't error. fn parse_range_error(s string, min int, max int) string { - mut bitv := []bool{init: false, len: max - min + 1} + mut bitv := []bool{len: max - min + 1, init: false} - parse_range(s, min, max, mut bitv) or { - return err.msg - } + parse_range(s, min, max, mut bitv) or { return err.msg } return '' } // =====parse_range===== fn test_range_star_range() ? { - mut bitv := []bool{init: false, len: 6} + mut bitv := []bool{len: 6, init: false} parse_range('*', 0, 5, mut bitv) ? assert bitv == [true, true, true, true, true, true] } fn test_range_number() ? { - mut bitv := []bool{init: false, len: 6} + mut bitv := []bool{len: 6, init: false} parse_range('4', 0, 5, mut bitv) ? assert bitv_to_ints(bitv, 0) == [4] @@ -40,14 +38,14 @@ fn test_range_number_invalid() ? { } fn test_range_step_star_1() ? { - mut bitv := []bool{init: false, len: 21} + mut bitv := []bool{len: 21, init: false} parse_range('*/4', 0, 20, mut bitv) ? assert bitv_to_ints(bitv, 0) == [0, 4, 8, 12, 16, 20] } fn test_range_step_star_2() ? { - mut bitv := []bool{init: false, len: 9} + mut bitv := []bool{len: 8, init: false} parse_range('*/3', 1, 8, mut bitv) ? assert bitv_to_ints(bitv, 1) == [1, 4, 7] @@ -62,7 +60,7 @@ fn test_range_step_zero() ? { } fn test_range_step_number() ? { - mut bitv := []bool{init: false, len: 21} + mut bitv := []bool{len: 21, init: false} parse_range('5/4', 2, 22, mut bitv) ? assert bitv_to_ints(bitv, 2) == [5, 9, 13, 17, 21] @@ -76,6 +74,20 @@ fn test_range_step_number_too_small() ? { assert parse_range_error('2/4', 5, 10) == 'Out of range.' } +fn test_range_dash() ? { + mut bitv := []bool{len: 10, init: false} + parse_range('4-8', 0, 9, mut bitv) ? + + assert bitv_to_ints(bitv, 0) == [4, 5, 6, 7, 8] +} + +fn test_range_dash_step() ? { + mut bitv := []bool{len: 10, init: false} + parse_range('4-8/2', 0, 9, mut bitv) ? + + assert bitv_to_ints(bitv, 0) == [4, 6, 8] +} + // =====parse_part===== fn test_part_single() ? { assert parse_part('*', 0, 5) ? == [0, 1, 2, 3, 4, 5] From 65d6aae701d087f381ff5f11ec90f7afda5e9f2c Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 12 Apr 2022 11:10:49 +0200 Subject: [PATCH 14/67] Made sure unix value is calculated --- src/cron/cron.v | 44 +++++++++++-------------------------------- src/cron/expression.v | 4 ++-- 2 files changed, 13 insertions(+), 35 deletions(-) diff --git a/src/cron/cron.v b/src/cron/cron.v index a049eec8..cd32c448 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -3,6 +3,7 @@ module cron import git import datatypes import time +import rand struct ScheduledBuild { repo git.GitRepo @@ -14,42 +15,19 @@ fn (r1 ScheduledBuild) < (r2 ScheduledBuild) bool { } pub fn cron(conf Config) ? { - // mut queue := datatypes.MinHeap{} - // repos_map := git.get_repos(conf.address, conf.api_key) ? + mut queue := datatypes.MinHeap{} - // for _, repo in repos_map { - // scheduled := ScheduledBuild{ - // repo: repo - // timestamp: 25 - // } + for _ in 0..5000 { + minute := rand.int_in_range(0, 60) ? + hour := rand.int_in_range(0, 23) ? + ce := parse_expression('$minute $hour') ? - // queue.insert(scheduled) - // } - - // println(queue) - // exp := '10/2 5 *' - // println(parse_expression(exp) ?) - ce := parse_expression('0 35 */2') ? - println(ce) - // ce := CronExpression{ - // minutes: [0] - // hours: [3] - // days: [1, 2, 3, 4, 5, 6] - // months: [1, 2] - // } - mut t := time.Time{ - year: 2022 - month: 12 - minute: 9 - hour: 13 - day: 12 + t := ce.next_from_now() ? + // println(t) + queue.insert(t) } - // mut t := time.now() - println(t) - - for _ in 1 .. 25 { - t = ce.next(t) ? - println(t) + for queue.len() > 0 { + println(queue.pop() ?) } } diff --git a/src/cron/expression.v b/src/cron/expression.v index 0bc15912..d275a423 100644 --- a/src/cron/expression.v +++ b/src/cron/expression.v @@ -101,13 +101,13 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { year++ } - return time.Time{ + return time.new_time(time.Time{ year: year month: month day: day minute: minute hour: hour - } + }) } fn (ce &CronExpression) next_from_now() ?time.Time { From eb65bb8a69cebc4c687bbecee6214f57bfa61db4 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 12 Apr 2022 14:22:40 +0200 Subject: [PATCH 15/67] These bugs are gonna take a while --- src/cron/cron.v | 17 +++++------------ src/cron/expression.v | 26 ++++++++++++++----------- src/cron/expression_test.v | 39 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 59 insertions(+), 23 deletions(-) create mode 100644 src/cron/expression_test.v diff --git a/src/cron/cron.v b/src/cron/cron.v index cd32c448..931d1c8d 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -17,17 +17,10 @@ fn (r1 ScheduledBuild) < (r2 ScheduledBuild) bool { pub fn cron(conf Config) ? { mut queue := datatypes.MinHeap{} - for _ in 0..5000 { - minute := rand.int_in_range(0, 60) ? - hour := rand.int_in_range(0, 23) ? - ce := parse_expression('$minute $hour') ? + ce := parse_expression('0 3') ? + t := time.parse('2002-01-01 00:00:00') ? - t := ce.next_from_now() ? - // println(t) - queue.insert(t) - } - - for queue.len() > 0 { - println(queue.pop() ?) - } + println(t) + t2 := ce.next(t) ? + println(t2) } diff --git a/src/cron/expression.v b/src/cron/expression.v index d275a423..600e252b 100644 --- a/src/cron/expression.v +++ b/src/cron/expression.v @@ -2,8 +2,6 @@ module cron import time -const days_in_month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] - struct CronExpression { minutes []int hours []int @@ -15,6 +13,13 @@ struct CronExpression { // always pick a moment in the future, even if ref matches completely up to the // minute. This function conciously does not take gap years into account. pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { + // If the given ref matches the next cron occurence up to the minute, it + // will return that value. Because we always want to return a value in the + // future, we artifically shift the ref 60 seconds to make sure we always + // match in the future. A shift of 60 seconds is enough because the cron + // expression does not allow for accuracy smaller than one minute. + sref := ref + // For all of these values, the rule is the following: if their value is // the length of their respective array in the CronExpression object, that // means we've looped back around. This means that the "bigger" value has @@ -29,25 +34,25 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { // value loops around, then the smaller value will always reset as well. // For example, if we're going to a new day, the hour & minute will always // be their smallest value again. - for month_index < ce.months.len && ref.month > ce.months[month_index] { + for month_index < ce.months.len && sref.month > ce.months[month_index] { month_index++ } if month_index < ce.months.len { - for day_index < ce.days.len && ref.day > ce.days[day_index] { + for day_index < ce.days.len && sref.day > ce.days[day_index] { day_index++ } if day_index < ce.days.len { - for hour_index < ce.hours.len && ref.hour > ce.hours[hour_index] { + for hour_index < ce.hours.len && sref.hour > ce.hours[hour_index] { hour_index++ } if hour_index < ce.hours.len { // Minute is the only value where we explicitely make sure we - // can't match ref's value exactly. This is to ensure we only + // can't match sref's value exactly. This is to ensure we only // return values in the future. - for minute_index < ce.minutes.len && ref.minute >= ce.minutes[minute_index] { + for minute_index < ce.minutes.len && sref.minute >= ce.minutes[minute_index] { minute_index++ } } @@ -60,7 +65,6 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { if minute_index == ce.minutes.len && hour_index < ce.hours.len { hour_index += 1 } - if hour_index == ce.hours.len && day_index < ce.days.len { day_index += 1 } @@ -77,11 +81,11 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { // month, e.g. day 30 in February. When this occurs, we reset day back to // the smallest value & loop over to the next month that does have this // day. - if day > cron.days_in_month[ce.months[month_index % ce.months.len] - 1] { + if day > time.month_days[ce.months[month_index % ce.months.len] - 1] { day = ce.days[0] month_index += 1 - for day > cron.days_in_month[ce.months[month_index & ce.months.len] - 1] { + for day > time.month_days[ce.months[month_index & ce.months.len] - 1] { month_index += 1 // If for whatever reason the day value ends up being something @@ -94,7 +98,7 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { } month := ce.months[month_index % ce.months.len] - mut year := ref.year + mut year := sref.year // If the month loops over, we need to increment the year. if month_index >= ce.months.len { diff --git a/src/cron/expression_test.v b/src/cron/expression_test.v new file mode 100644 index 00000000..bc489777 --- /dev/null +++ b/src/cron/expression_test.v @@ -0,0 +1,39 @@ +module cron + +import time { new_time, Time, parse } + +fn test_next_simple() ? { + ce := parse_expression('0 3') ? + t := parse('2002-01-01 00:00:00') ? + t2 := ce.next(t) ? + + assert t2.year == 2002 + assert t2.month == 1 + assert t2.day == 1 + assert t2.hour == 3 + assert t2.minute == 0 +} + +fn test_next_identical() ? { + ce := parse_expression('0 3') ? + t := parse('2002-01-01 03:00:00') ? + t2 := ce.next(t) ? + + assert t2.year == 2002 + assert t2.month == 1 + assert t2.day == 2 + assert t2.hour == 3 + assert t2.minute == 0 +} + +fn test_next_next_day() ? { + ce := parse_expression('0 3') ? + t := parse('2002-01-01 04:00:00') ? + t2 := ce.next(t) ? + + assert t2.year == 2002 + assert t2.month == 1 + assert t2.day == 2 + assert t2.hour == 3 + assert t2.minute == 0 +} From e6033f9ab44ab7516fd251eef861e0b82c26a277 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 12 Apr 2022 20:53:20 +0200 Subject: [PATCH 16/67] Ran vfmt --- src/cron/expression_test.v | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cron/expression_test.v b/src/cron/expression_test.v index bc489777..aaead9c4 100644 --- a/src/cron/expression_test.v +++ b/src/cron/expression_test.v @@ -1,6 +1,6 @@ module cron -import time { new_time, Time, parse } +import time { parse } fn test_next_simple() ? { ce := parse_expression('0 3') ? From 1116fee3fc45d9c5f5826dbfe90a4d915fc3fafa Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 12 Apr 2022 21:16:09 +0200 Subject: [PATCH 17/67] Actually possibly kinda decent cron next func --- src/cron/cron.v | 6 ----- src/cron/expression.v | 6 ++--- src/cron/expression_test.v | 53 +++++++++++++++----------------------- 3 files changed, 24 insertions(+), 41 deletions(-) diff --git a/src/cron/cron.v b/src/cron/cron.v index 931d1c8d..25263269 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -1,9 +1,7 @@ module cron import git -import datatypes import time -import rand struct ScheduledBuild { repo git.GitRepo @@ -15,12 +13,8 @@ fn (r1 ScheduledBuild) < (r2 ScheduledBuild) bool { } pub fn cron(conf Config) ? { - mut queue := datatypes.MinHeap{} - ce := parse_expression('0 3') ? t := time.parse('2002-01-01 00:00:00') ? - - println(t) t2 := ce.next(t) ? println(t2) } diff --git a/src/cron/expression.v b/src/cron/expression.v index 600e252b..0a355418 100644 --- a/src/cron/expression.v +++ b/src/cron/expression.v @@ -38,17 +38,17 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { month_index++ } - if month_index < ce.months.len { + if month_index < ce.months.len && sref.month == ce.months[month_index] { for day_index < ce.days.len && sref.day > ce.days[day_index] { day_index++ } - if day_index < ce.days.len { + if day_index < ce.days.len && ce.days[day_index] == sref.day { for hour_index < ce.hours.len && sref.hour > ce.hours[hour_index] { hour_index++ } - if hour_index < ce.hours.len { + if hour_index < ce.hours.len && ce.hours[hour_index] == sref.hour { // Minute is the only value where we explicitely make sure we // can't match sref's value exactly. This is to ensure we only // return values in the future. diff --git a/src/cron/expression_test.v b/src/cron/expression_test.v index aaead9c4..ce8526b6 100644 --- a/src/cron/expression_test.v +++ b/src/cron/expression_test.v @@ -2,38 +2,27 @@ module cron import time { parse } +fn util_test_time(exp string, t1_str string, t2_str string) ? { + ce := parse_expression(exp) ? + t1 := parse(t1_str) ? + t2 := parse(t2_str) ? + + t3 := ce.next(t1) ? + + assert t2.year == t3.year + assert t2.month == t3.month + assert t2.day == t3.day + assert t2.hour == t3.hour + assert t2.minute == t3.minute +} + fn test_next_simple() ? { - ce := parse_expression('0 3') ? - t := parse('2002-01-01 00:00:00') ? - t2 := ce.next(t) ? + // Very simple + util_test_time('0 3', '2002-01-01 00:00:00', '2002-01-01 03:00:00') ? - assert t2.year == 2002 - assert t2.month == 1 - assert t2.day == 1 - assert t2.hour == 3 - assert t2.minute == 0 -} - -fn test_next_identical() ? { - ce := parse_expression('0 3') ? - t := parse('2002-01-01 03:00:00') ? - t2 := ce.next(t) ? - - assert t2.year == 2002 - assert t2.month == 1 - assert t2.day == 2 - assert t2.hour == 3 - assert t2.minute == 0 -} - -fn test_next_next_day() ? { - ce := parse_expression('0 3') ? - t := parse('2002-01-01 04:00:00') ? - t2 := ce.next(t) ? - - assert t2.year == 2002 - assert t2.month == 1 - assert t2.day == 2 - assert t2.hour == 3 - assert t2.minute == 0 + // Overlap to next day + util_test_time('0 3', '2002-01-01 03:00:00', '2002-01-02 03:00:00') ? + util_test_time('0 3', '2002-01-01 04:00:00', '2002-01-02 03:00:00') ? + + util_test_time('0 3/4', '2002-01-01 04:00:00', '2002-01-01 07:00:00') ? } From 5ce431aa4a21768d37a82919a90e9bd35e4dae40 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 12 Apr 2022 21:23:38 +0200 Subject: [PATCH 18/67] Added two more test dates; pleased v vet --- src/cron/cli.v | 1 + src/cron/cron.v | 1 + src/cron/expression_test.v | 6 ++++++ 3 files changed, 8 insertions(+) diff --git a/src/cron/cli.v b/src/cron/cli.v index 9bdec9ae..8e6b0f16 100644 --- a/src/cron/cli.v +++ b/src/cron/cli.v @@ -12,6 +12,7 @@ pub: base_image string = 'archlinux:base-devel' } +// cmd returns the cli module that handles the cron daemon. pub fn cmd() cli.Command { return cli.Command{ name: 'cron' diff --git a/src/cron/cron.v b/src/cron/cron.v index 25263269..be37ffa1 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -12,6 +12,7 @@ fn (r1 ScheduledBuild) < (r2 ScheduledBuild) bool { return r1.timestamp < r2.timestamp } +// cron starts a cron daemon & starts periodically scheduling builds. pub fn cron(conf Config) ? { ce := parse_expression('0 3') ? t := time.parse('2002-01-01 00:00:00') ? diff --git a/src/cron/expression_test.v b/src/cron/expression_test.v index ce8526b6..0be9a64d 100644 --- a/src/cron/expression_test.v +++ b/src/cron/expression_test.v @@ -25,4 +25,10 @@ fn test_next_simple() ? { util_test_time('0 3', '2002-01-01 04:00:00', '2002-01-02 03:00:00') ? util_test_time('0 3/4', '2002-01-01 04:00:00', '2002-01-01 07:00:00') ? + + // Overlap to next month + util_test_time('0 3', '2002-11-31 04:00:00', '2002-12-01 03:00:00') ? + + // Overlap to next year + util_test_time('0 3', '2002-12-31 04:00:00', '2003-01-01 03:00:00') ? } From bd0c276fd84c483a5e9bc73c3c833c6793f04615 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 12 Apr 2022 21:28:44 +0200 Subject: [PATCH 19/67] Added 'WIP' notice for cron cli --- src/cron/cron.v | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/cron/cron.v b/src/cron/cron.v index be37ffa1..3ba9d0fc 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -14,8 +14,5 @@ fn (r1 ScheduledBuild) < (r2 ScheduledBuild) bool { // cron starts a cron daemon & starts periodically scheduling builds. pub fn cron(conf Config) ? { - ce := parse_expression('0 3') ? - t := time.parse('2002-01-01 00:00:00') ? - t2 := ce.next(t) ? - println(t2) + println('WIP') } From 9a56bd03a795ac871b0563c80fbcd860a66e9ab0 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 12 Apr 2022 21:56:08 +0200 Subject: [PATCH 20/67] Prevents tests from running on dev or main --- .woodpecker/.test.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.woodpecker/.test.yml b/.woodpecker/.test.yml index ec559c86..3800cc1d 100644 --- a/.woodpecker/.test.yml +++ b/.woodpecker/.test.yml @@ -3,6 +3,8 @@ matrix: - linux/amd64 - linux/arm64 +branches: + exclude: [main, dev] platform: ${PLATFORM} pipeline: From 132a7a8ba5a80938993db7fa712048ea885283c4 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Wed, 13 Apr 2022 14:51:01 +0200 Subject: [PATCH 21/67] Added int support to env; fixed apparently broken defaults --- src/env/env.v | 40 +++++++++++++++++++++++++++------------- 1 file changed, 27 insertions(+), 13 deletions(-) diff --git a/src/env/env.v b/src/env/env.v index cbde67e1..01248506 100644 --- a/src/env/env.v +++ b/src/env/env.v @@ -55,27 +55,41 @@ pub fn load(path string) ?T { $for field in T.fields { s := doc.value(field.name) - // We currently only support strings - if s.type_name() == 'string' { - res.$(field.name) = s.string() + if s !is toml.Null { + $if field.typ is string { + res.$(field.name) = s.string() + }$else $if field.typ is int { + res.$(field.name) = s.int() + } } } } $for field in T.fields { - $if field.typ is string { - env_value := get_env_var(field.name) ? + env_value := get_env_var(field.name) ? - // The value of the env var will always be chosen over the config - // file - if env_value != '' { + // The value of an env var will always take precedence over the toml + // file. + if env_value != '' { + $if field.typ is string { res.$(field.name) = env_value + } $else $if field.typ is int { + res.$(field.name) = env_value.int() } - // If there's no value from the toml file either, we try to find a - // default value - else if res.$(field.name) == '' { - return error("Missing config variable '$field.name' with no provided default. Either add it to the config file or provide it using an environment variable.") - } + } + + // Now, we check whether a value is present. If there isn't, that means + // it isn't in the config file, nor is there a default or an env var. + mut has_value := false + + $if field.typ is string { + has_value = res.$(field.name) != '' + } $else $if field.typ is int { + has_value = res.$(field.name) != 0 + } + + if !has_value { + return error("Missing config variable '$field.name' with no provided default. Either add it to the config file or provide it using an environment variable.") } } return res From ff57d7399838e5f6edb2c26d46812ddb35d9da01 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Wed, 13 Apr 2022 15:24:55 +0200 Subject: [PATCH 22/67] Start of daemon (not working) [CI SKIP] --- src/cron/cli.v | 3 +++ src/cron/cron.v | 26 +++++++++++-------- src/cron/daemon/daemon.v | 54 ++++++++++++++++++++++++++++++++++++++++ src/cron/daemon/log.v | 35 ++++++++++++++++++++++++++ src/cron/expression.v | 2 +- src/git/git.v | 2 ++ 6 files changed, 111 insertions(+), 11 deletions(-) create mode 100644 src/cron/daemon/daemon.v create mode 100644 src/cron/daemon/log.v diff --git a/src/cron/cli.v b/src/cron/cli.v index 8e6b0f16..4d2b133c 100644 --- a/src/cron/cli.v +++ b/src/cron/cli.v @@ -10,6 +10,9 @@ pub: api_key string address string base_image string = 'archlinux:base-devel' + max_concurrent_builds int = 1 + api_update_frequency int = 60 + global_schedule string } // cmd returns the cli module that handles the cron daemon. diff --git a/src/cron/cron.v b/src/cron/cron.v index 3ba9d0fc..3d3ea9ad 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -2,17 +2,23 @@ module cron import git import time - -struct ScheduledBuild { - repo git.GitRepo - timestamp time.Time -} - -fn (r1 ScheduledBuild) < (r2 ScheduledBuild) bool { - return r1.timestamp < r2.timestamp -} +import log +import util +import cron.daemon // cron starts a cron daemon & starts periodically scheduling builds. pub fn cron(conf Config) ? { - println('WIP') + // Configure logger + log_level := log.level_from_tag(conf.log_level) or { + util.exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.') + } + + mut logger := log.Log{ + level: log_level + } + + logger.set_full_logpath(conf.log_file) + logger.log_to_console_too() + + d := daemon.init(conf) } diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v new file mode 100644 index 00000000..a887717d --- /dev/null +++ b/src/cron/daemon/daemon.v @@ -0,0 +1,54 @@ +module daemon + +import git +import time +import log +import datatypes + +struct ScheduledBuild { + repo git.GitRepo + timestamp time.Time +} + +fn (r1 ScheduledBuild) < (r2 ScheduledBuild) bool { + return r1.timestamp < r2.timestamp +} + +pub struct Daemon { +mut: + conf Config + // Repos currently loaded from API. + repos_map map[string]git.GitRepo + // At what point to update the list of repositories. + api_update_timestamp time.Time + queue datatypes.MinHeap + // Which builds are currently running + builds []git.GitRepo + // Atomic variables used to detect when a build has finished; length is the + // same as builds + atomics []u64 + logger shared log.Log +} + +// init +pub fn init(conf Config) Daemon { + return Daemon{ + conf: conf + atomics: [conf.max_concurrent_builds]u64{} + } +} + +fn (mut d Daemon) run() ? { + d.renew_repos() ? + d.renew_queue() ? +} + +fn (mut d Daemon) renew_repos() ? { + mut new_repos := git.get_repos(d.conf.address, d.conf.api_key) ? + + d.repos_map = new_repos.move() +} + +fn (mut d Daemon) renew_queue() ? { + +} diff --git a/src/cron/daemon/log.v b/src/cron/daemon/log.v new file mode 100644 index 00000000..003898b5 --- /dev/null +++ b/src/cron/daemon/log.v @@ -0,0 +1,35 @@ +module daemon + +import log + +// log reate a log message with the given level +pub fn (mut d Daemon) log(msg &string, level log.Level) { + lock d.logger { + d.logger.send_output(msg, level) + } +} + +// lfatal create a log message with the fatal level +pub fn (mut d Daemon) lfatal(msg &string) { + d.log(msg, log.Level.fatal) +} + +// lerror create a log message with the error level +pub fn (mut d Daemon) lerror(msg &string) { + d.log(msg, log.Level.error) +} + +// lwarn create a log message with the warn level +pub fn (mut d Daemon) lwarn(msg &string) { + d.log(msg, log.Level.warn) +} + +// linfo create a log message with the info level +pub fn (mut d Daemon) linfo(msg &string) { + d.log(msg, log.Level.info) +} + +// ldebug create a log message with the debug level +pub fn (mut d Daemon) ldebug(msg &string) { + d.log(msg, log.Level.debug) +} diff --git a/src/cron/expression.v b/src/cron/expression.v index 0a355418..b35c5687 100644 --- a/src/cron/expression.v +++ b/src/cron/expression.v @@ -241,7 +241,7 @@ fn parse_expression(exp string) ?CronExpression { // This for loop allows us to more clearly propagate the error to the user. for i, min in mins { part_results << parse_part(parts[i], min, maxs[i]) or { - return error('An error occurred with part $i: $err.msg') + return error('An error occurred with part $i: $err.msg()') } } diff --git a/src/git/git.v b/src/git/git.v index eaec8959..45aed606 100644 --- a/src/git/git.v +++ b/src/git/git.v @@ -14,6 +14,8 @@ pub mut: arch []string // Which repo the builder should publish packages to repo string + // Cron schedule describing how frequently to build the repo. + schedule string } // patch_from_params patches a GitRepo from a map[string]string, usually From f7e1aba30bc8d95c28632ace658998804f60763f Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Wed, 13 Apr 2022 16:12:22 +0200 Subject: [PATCH 23/67] Attempt at writing renew_queue function; seems to just stop in the middle --- src/cron/cli.v | 16 ++--- src/cron/cron.v | 12 +++- src/cron/daemon/daemon.v | 72 +++++++++++++++---- src/cron/{ => expression}/expression.v | 6 +- .../{ => expression}/expression_parse_test.v | 2 +- src/cron/{ => expression}/expression_test.v | 2 +- src/env/env.v | 2 +- src/git/git.v | 4 +- src/v.mod | 0 vieter.toml | 3 + 10 files changed, 89 insertions(+), 30 deletions(-) rename src/cron/{ => expression}/expression.v (98%) rename src/cron/{ => expression}/expression_parse_test.v (99%) rename src/cron/{ => expression}/expression_test.v (97%) create mode 100644 src/v.mod diff --git a/src/cron/cli.v b/src/cron/cli.v index 4d2b133c..f4b20ecc 100644 --- a/src/cron/cli.v +++ b/src/cron/cli.v @@ -5,14 +5,14 @@ import env struct Config { pub: - log_level string = 'WARN' - log_file string = 'vieter.log' - api_key string - address string - base_image string = 'archlinux:base-devel' - max_concurrent_builds int = 1 - api_update_frequency int = 60 - global_schedule string + log_level string = 'WARN' + log_file string = 'vieter.log' + api_key string + address string + base_image string = 'archlinux:base-devel' + max_concurrent_builds int = 1 + api_update_frequency int = 60 + global_schedule string } // cmd returns the cli module that handles the cron daemon. diff --git a/src/cron/cron.v b/src/cron/cron.v index 3d3ea9ad..d8b4d958 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -5,12 +5,13 @@ import time import log import util import cron.daemon +import cron.expression // cron starts a cron daemon & starts periodically scheduling builds. pub fn cron(conf Config) ? { // Configure logger log_level := log.level_from_tag(conf.log_level) or { - util.exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.') + return error('Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.') } mut logger := log.Log{ @@ -20,5 +21,12 @@ pub fn cron(conf Config) ? { logger.set_full_logpath(conf.log_file) logger.log_to_console_too() - d := daemon.init(conf) + ce := expression.parse_expression(conf.global_schedule) or { + return error('Error while parsing global cron expression: $err.msg()') + } + + mut d := daemon.init_daemon(logger, conf.address, conf.api_key, conf.base_image, ce, + conf.max_concurrent_builds, conf.api_update_frequency) ? + + d.run() ? } diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index a887717d..ede93202 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -3,9 +3,12 @@ module daemon import git import time import log -import datatypes +import datatypes { MinHeap } +import cron.expression { CronExpression, parse_expression } struct ScheduledBuild { +pub: + repo_id string repo git.GitRepo timestamp time.Time } @@ -16,39 +19,84 @@ fn (r1 ScheduledBuild) < (r2 ScheduledBuild) bool { pub struct Daemon { mut: - conf Config + address string + api_key string + base_image string + global_schedule CronExpression + api_update_frequency int // Repos currently loaded from API. repos_map map[string]git.GitRepo // At what point to update the list of repositories. api_update_timestamp time.Time - queue datatypes.MinHeap + queue MinHeap // Which builds are currently running builds []git.GitRepo // Atomic variables used to detect when a build has finished; length is the // same as builds atomics []u64 - logger shared log.Log + logger shared log.Log } -// init -pub fn init(conf Config) Daemon { - return Daemon{ - conf: conf - atomics: [conf.max_concurrent_builds]u64{} +pub fn init_daemon(logger log.Log, address string, api_key string, base_image string, global_schedule CronExpression, max_concurrent_builds int, api_update_frequency int) ?Daemon { + mut d := Daemon{ + address: address + api_key: api_key + base_image: base_image + global_schedule: global_schedule + api_update_frequency: api_update_frequency + atomics: []u64{len: max_concurrent_builds} + builds: []git.GitRepo{len: max_concurrent_builds} + logger: logger } -} -fn (mut d Daemon) run() ? { + // Initialize the repos & queue d.renew_repos() ? d.renew_queue() ? + + return d +} + +pub fn (mut d Daemon) run() ? { + println(d.queue) } fn (mut d Daemon) renew_repos() ? { - mut new_repos := git.get_repos(d.conf.address, d.conf.api_key) ? + mut new_repos := git.get_repos(d.address, d.api_key) ? d.repos_map = new_repos.move() + + d.api_update_timestamp = time.now().add_seconds(60 * d.api_update_frequency) } +// renew_queue replaces the old queue with a new one that reflects the newest +// values in repos_map. fn (mut d Daemon) renew_queue() ? { + mut new_queue := MinHeap{} + // Move any jobs that should have already started from the old queue onto + // the new one + now := time.now() + + for d.queue.len() > 0 && d.queue.peek() ?.timestamp < now { + new_queue.insert(d.queue.pop() ?) + } + + println('hey') + println(d.repos_map) + // For each repository in repos_map, parse their cron expression (or use + // the default one if not present) & add them to the queue + for id, repo in d.repos_map { + println('hey') + ce := parse_expression(repo.schedule) or { d.global_schedule } + // A repo that can't be scheduled will just be skipped for now + timestamp := ce.next(now) or { continue } + + new_queue.insert(ScheduledBuild{ + repo_id: id + repo: repo + timestamp: timestamp + }) + } + + d.queue = new_queue } diff --git a/src/cron/expression.v b/src/cron/expression/expression.v similarity index 98% rename from src/cron/expression.v rename to src/cron/expression/expression.v index b35c5687..c122585e 100644 --- a/src/cron/expression.v +++ b/src/cron/expression/expression.v @@ -1,8 +1,8 @@ -module cron +module expression import time -struct CronExpression { +pub struct CronExpression { minutes []int hours []int days []int @@ -219,7 +219,7 @@ fn parse_part(s string, min int, max int) ?[]int { // parse_expression parses an entire cron expression string into a // CronExpression object, if possible. -fn parse_expression(exp string) ?CronExpression { +pub fn parse_expression(exp string) ?CronExpression { // The filter allows for multiple spaces between parts mut parts := exp.split(' ').filter(it != '') diff --git a/src/cron/expression_parse_test.v b/src/cron/expression/expression_parse_test.v similarity index 99% rename from src/cron/expression_parse_test.v rename to src/cron/expression/expression_parse_test.v index 8f3ac38e..18531c0c 100644 --- a/src/cron/expression_parse_test.v +++ b/src/cron/expression/expression_parse_test.v @@ -1,4 +1,4 @@ -module cron +module expression // parse_range_error returns the returned error message. If the result is '', // that means the function didn't error. diff --git a/src/cron/expression_test.v b/src/cron/expression/expression_test.v similarity index 97% rename from src/cron/expression_test.v rename to src/cron/expression/expression_test.v index 0be9a64d..ef0283a7 100644 --- a/src/cron/expression_test.v +++ b/src/cron/expression/expression_test.v @@ -1,4 +1,4 @@ -module cron +module expression import time { parse } diff --git a/src/env/env.v b/src/env/env.v index 01248506..88f16507 100644 --- a/src/env/env.v +++ b/src/env/env.v @@ -58,7 +58,7 @@ pub fn load(path string) ?T { if s !is toml.Null { $if field.typ is string { res.$(field.name) = s.string() - }$else $if field.typ is int { + } $else $if field.typ is int { res.$(field.name) = s.int() } } diff --git a/src/git/git.v b/src/git/git.v index 45aed606..2023f341 100644 --- a/src/git/git.v +++ b/src/git/git.v @@ -15,7 +15,7 @@ pub mut: // Which repo the builder should publish packages to repo string // Cron schedule describing how frequently to build the repo. - schedule string + schedule string [optional] } // patch_from_params patches a GitRepo from a map[string]string, usually @@ -74,7 +74,7 @@ pub fn repo_from_params(params map[string]string) ?GitRepo { // If we're creating a new GitRepo, we want all fields to be present before // "patching". $for field in GitRepo.fields { - if field.name !in params { + if field.name !in params && !field.attrs.contains('optional') { return error('Missing parameter: ${field.name}.') } } diff --git a/src/v.mod b/src/v.mod new file mode 100644 index 00000000..e69de29b diff --git a/vieter.toml b/vieter.toml index 8e0447b2..e646739f 100644 --- a/vieter.toml +++ b/vieter.toml @@ -8,3 +8,6 @@ repos_file = "data/repos.json" default_arch = "x86_64" address = "http://localhost:8000" + +global_schedule = '0 3' + From 78b477fb9254df38656a8e69f4e9989ce8f5b131 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Wed, 13 Apr 2022 22:20:05 +0200 Subject: [PATCH 24/67] Removed deprecated err.msg & err.code --- src/cron/cron.v | 3 --- src/cron/daemon/daemon.v | 7 ++++--- src/env/env.v | 2 +- src/repo/repo.v | 6 +++--- src/server/git.v | 4 ++-- src/server/routes.v | 6 +++--- src/server/server.v | 2 +- src/web/web.v | 8 ++++---- 8 files changed, 18 insertions(+), 20 deletions(-) diff --git a/src/cron/cron.v b/src/cron/cron.v index d8b4d958..cb5bcd7e 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -1,9 +1,6 @@ module cron -import git -import time import log -import util import cron.daemon import cron.expression diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index ede93202..9931d4ff 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -58,6 +58,7 @@ pub fn init_daemon(logger log.Log, address string, api_key string, base_image st pub fn (mut d Daemon) run() ? { println(d.queue) + println('i am running') } fn (mut d Daemon) renew_repos() ? { @@ -81,12 +82,12 @@ fn (mut d Daemon) renew_queue() ? { new_queue.insert(d.queue.pop() ?) } - println('hey') - println(d.repos_map) + eprintln('hey') + eprintln(d.repos_map) // For each repository in repos_map, parse their cron expression (or use // the default one if not present) & add them to the queue for id, repo in d.repos_map { - println('hey') + eprintln('hey') ce := parse_expression(repo.schedule) or { d.global_schedule } // A repo that can't be scheduled will just be skipped for now timestamp := ce.next(now) or { continue } diff --git a/src/env/env.v b/src/env/env.v index 88f16507..b2b5f446 100644 --- a/src/env/env.v +++ b/src/env/env.v @@ -36,7 +36,7 @@ fn get_env_var(field_name string) ?string { // Otherwise, we process the file return os.read_file(env_file) or { - error('Failed to read file defined in $env_file_name: ${err.msg}.') + error('Failed to read file defined in $env_file_name: ${err.msg()}.') } } diff --git a/src/repo/repo.v b/src/repo/repo.v index f439f581..e27e232b 100644 --- a/src/repo/repo.v +++ b/src/repo/repo.v @@ -30,11 +30,11 @@ pub: // new creates a new RepoGroupManager & creates the directories as needed pub fn new(repos_dir string, pkg_dir string, default_arch string) ?RepoGroupManager { if !os.is_dir(repos_dir) { - os.mkdir_all(repos_dir) or { return error('Failed to create repos directory: $err.msg') } + os.mkdir_all(repos_dir) or { return error('Failed to create repos directory: $err.msg()') } } if !os.is_dir(pkg_dir) { - os.mkdir_all(pkg_dir) or { return error('Failed to create package directory: $err.msg') } + os.mkdir_all(pkg_dir) or { return error('Failed to create package directory: $err.msg()') } } return RepoGroupManager{ @@ -50,7 +50,7 @@ pub fn new(repos_dir string, pkg_dir string, default_arch string) ?RepoGroupMana // the right subdirectories in r.pkg_dir if it was successfully added. pub fn (r &RepoGroupManager) add_pkg_from_path(repo string, pkg_path string) ?RepoAddResult { pkg := package.read_pkg_archive(pkg_path) or { - return error('Failed to read package file: $err.msg') + return error('Failed to read package file: $err.msg()') } added := r.add_pkg_in_repo(repo, pkg) ? diff --git a/src/server/git.v b/src/server/git.v index 2a682d8d..a9d6f502 100644 --- a/src/server/git.v +++ b/src/server/git.v @@ -16,7 +16,7 @@ fn (mut app App) get_repos() web.Result { repos := rlock app.git_mutex { git.read_repos(app.conf.repos_file) or { - app.lerror('Failed to read repos file: $err.msg') + app.lerror('Failed to read repos file: $err.msg()') return app.status(http.Status.internal_server_error) } @@ -55,7 +55,7 @@ fn (mut app App) post_repo() web.Result { } new_repo := git.repo_from_params(app.query) or { - return app.json(http.Status.bad_request, new_response(err.msg)) + return app.json(http.Status.bad_request, new_response(err.msg())) } id := rand.uuid_v4() diff --git a/src/server/routes.v b/src/server/routes.v index 138f2532..4f6c4f00 100644 --- a/src/server/routes.v +++ b/src/server/routes.v @@ -87,15 +87,15 @@ fn (mut app App) put_package(repo string) web.Result { } res := app.repo.add_pkg_from_path(repo, pkg_path) or { - app.lerror('Error while adding package: $err.msg') + app.lerror('Error while adding package: $err.msg()') - os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg") } + os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg()") } return app.json(http.Status.internal_server_error, new_response('Failed to add package.')) } if !res.added { - os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg") } + os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg()") } app.lwarn("Duplicate package '$res.pkg.full_name()' in repo '$repo'.") diff --git a/src/server/server.v b/src/server/server.v index 5bf9a87e..c4317c5c 100644 --- a/src/server/server.v +++ b/src/server/server.v @@ -45,7 +45,7 @@ pub fn server(conf Config) ? { // This also creates the directories if needed repo := repo.new(conf.repos_dir, conf.pkg_dir, conf.default_arch) or { - logger.error(err.msg) + logger.error(err.msg()) exit(1) } diff --git a/src/web/web.v b/src/web/web.v index 000c6a69..688f854e 100644 --- a/src/web/web.v +++ b/src/web/web.v @@ -249,7 +249,7 @@ pub fn (mut ctx Context) file(f_path string) Result { // ext := os.file_ext(f_path) // data := os.read_file(f_path) or { - // eprint(err.msg) + // eprint(err.msg()) // ctx.server_error(500) // return Result{} // } @@ -267,7 +267,7 @@ pub fn (mut ctx Context) file(f_path string) Result { file_size := os.file_size(f_path) file := os.open(f_path) or { - eprintln(err.msg) + eprintln(err.msg()) ctx.server_error(500) return Result{} } @@ -361,7 +361,7 @@ interface DbInterface { // run runs the app [manualfree] pub fn run(global_app &T, port int) { - mut l := net.listen_tcp(.ip6, ':$port') or { panic('failed to listen $err.code $err') } + mut l := net.listen_tcp(.ip6, ':$port') or { panic('failed to listen $err.code() $err') } // Parsing methods attributes mut routes := map[string]Route{} @@ -393,7 +393,7 @@ pub fn run(global_app &T, port int) { request_app.Context = global_app.Context // copy the context ref that contains static files map etc mut conn := l.accept() or { // failures should not panic - eprintln('accept() failed with error: $err.msg') + eprintln('accept() failed with error: $err.msg()') continue } go handle_conn(mut conn, mut request_app, routes) From c8af362a4aba256fd8cdb13f853ceb15684f2296 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Thu, 14 Apr 2022 20:38:14 +0200 Subject: [PATCH 25/67] Workaround for weird bug --- src/cron/daemon/daemon.v | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index 9931d4ff..eadd04c1 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -37,6 +37,8 @@ mut: logger shared log.Log } +// init_daemon initializes a new Daemon object. It renews the repositories & +// populates the build queue for the first time. pub fn init_daemon(logger log.Log, address string, api_key string, base_image string, global_schedule CronExpression, max_concurrent_builds int, api_update_frequency int) ?Daemon { mut d := Daemon{ address: address @@ -56,6 +58,8 @@ pub fn init_daemon(logger log.Log, address string, api_key string, base_image st return d } +// run starts the actual daemon process. It runs builds when possible & +// periodically refreshes the list of repositories to ensure we stay in sync. pub fn (mut d Daemon) run() ? { println(d.queue) println('i am running') @@ -78,16 +82,23 @@ fn (mut d Daemon) renew_queue() ? { // the new one now := time.now() - for d.queue.len() > 0 && d.queue.peek() ?.timestamp < now { - new_queue.insert(d.queue.pop() ?) + // For some reason, using + // ```v + // for d.queue.len() > 0 && d.queue.peek() ?.timestamp < now { + //``` + // here causes the function to prematurely just exit, without any errors or anything, very weird + // https://github.com/vlang/v/issues/14042 + for d.queue.len() > 0 { + if d.queue.peek() ?.timestamp < now { + new_queue.insert(d.queue.pop() ?) + } else { + break + } } - eprintln('hey') - eprintln(d.repos_map) // For each repository in repos_map, parse their cron expression (or use // the default one if not present) & add them to the queue for id, repo in d.repos_map { - eprintln('hey') ce := parse_expression(repo.schedule) or { d.global_schedule } // A repo that can't be scheduled will just be skipped for now timestamp := ce.next(now) or { continue } From c8fc683384d2fe6b313508d1ae700bd9b36815fa Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Thu, 14 Apr 2022 21:20:10 +0200 Subject: [PATCH 26/67] Eh don't feel like writing scheduler rn --- src/cron/daemon/build.v | 45 ++++++++++++++++++++++++++++++++++++++++ src/cron/daemon/daemon.v | 9 ++++++-- vieter.toml | 2 +- 3 files changed, 53 insertions(+), 3 deletions(-) create mode 100644 src/cron/daemon/build.v diff --git a/src/cron/daemon/build.v b/src/cron/daemon/build.v new file mode 100644 index 00000000..e7e5ac34 --- /dev/null +++ b/src/cron/daemon/build.v @@ -0,0 +1,45 @@ +module daemon + +import git +import time +import sync.stdatomic + +// update_builds starts as many builds as possible. +fn (mut d Daemon) update_builds() ? { + now := time.now() + + for d.queue.len() > 0 { + if d.queue.peek() ?.timestamp < now { + sb := d.queue.pop() ? + + // If this build couldn't be scheduled, no more will be possible. + if !d.start_build(sb.repo_id)? { + break + } + } else { + break + } + } +} + +// start_build starts a build for the given repo_id. +fn (mut d Daemon) start_build(repo_id string) ?bool { + for i in 0..d.atomics.len { + if stdatomic.load_u64(&d.atomics[i]) == 0 { + stdatomic.store_u64(&d.atomics[i], 1) + + go d.run_build(i, d.repos_map[repo_id]) + + return true + } + } + + return false +} + +fn (mut d Daemon) run_build(build_index int, repo git.GitRepo) ? { + time.sleep(10 * time.second) + + stdatomic.store_u64(&d.atomics[build_index], 2) +} + diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index eadd04c1..fc917e49 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -61,8 +61,13 @@ pub fn init_daemon(logger log.Log, address string, api_key string, base_image st // run starts the actual daemon process. It runs builds when possible & // periodically refreshes the list of repositories to ensure we stay in sync. pub fn (mut d Daemon) run() ? { - println(d.queue) - println('i am running') + for { + d.update_builds() ? + println(d.queue) + println(d.atomics) + + time.sleep(60 * time.second) + } } fn (mut d Daemon) renew_repos() ? { diff --git a/vieter.toml b/vieter.toml index e646739f..452500fe 100644 --- a/vieter.toml +++ b/vieter.toml @@ -9,5 +9,5 @@ default_arch = "x86_64" address = "http://localhost:8000" -global_schedule = '0 3' +global_schedule = '* *' From cd8fd786168433c1fe79940e1235cfafcd3e5d15 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Thu, 14 Apr 2022 23:15:19 +0200 Subject: [PATCH 27/67] Added experimental builds to CI --- .woodpecker/.build_experimental.yml | 25 +++++++++++++++++++++++++ Makefile | 21 ++++++++++++++++++--- 2 files changed, 43 insertions(+), 3 deletions(-) create mode 100644 .woodpecker/.build_experimental.yml diff --git a/.woodpecker/.build_experimental.yml b/.woodpecker/.build_experimental.yml new file mode 100644 index 00000000..0d079626 --- /dev/null +++ b/.woodpecker/.build_experimental.yml @@ -0,0 +1,25 @@ +# These builds are not important for the project, but might be valuable for +# fixing bugs in the V compiler. + +platform: linux/amd64 +branches: + exclude: [master, dev] + +pipeline: + autofree: + image: 'chewingbever/vlang:latest' + pull: true + group: 'build' + commands: + - make autofree + when: + event: push + + skip-unused: + image: 'chewingbever/vlang:latest' + pull: true + group: 'build' + commands: + - make skip-unused + when: + event: push diff --git a/Makefile b/Makefile index 9421fb6f..6f2921a6 100644 --- a/Makefile +++ b/Makefile @@ -7,6 +7,7 @@ V := $(V_PATH) -showcc -gc boehm all: vieter + # =====COMPILATION===== # Regular binary vieter: $(SOURCES) @@ -33,19 +34,21 @@ pvieter: $(SOURCES) # Only generate C code .PHONY: c -c: +c: $(SOURCES) $(V) -o vieter.c $(SRC_DIR) + # =====EXECUTION===== # Run the server in the default 'data' directory .PHONY: run run: vieter - ./vieter -f vieter.toml server + ./vieter -f vieter.toml server .PHONY: run-prod run-prod: prod ./pvieter -f vieter.toml server + # =====OTHER===== .PHONY: lint lint: @@ -72,4 +75,16 @@ v/v: make -C v clean: - rm -rf 'data' 'vieter' 'dvieter' 'pvieter' 'vieter.c' 'dvieterctl' 'vieterctl' 'pkg' 'src/vieter' + rm -rf 'data' 'vieter' 'dvieter' 'pvieter' 'vieter.c' 'pkg' 'src/vieter' 'afvieter' 'suvieter' + + +# =====EXPERIMENTAL===== +.PHONY: autofree +autofree: afvieter +afvieter: $(SOURCES) + $(V_PATH) -showcc -autofree -o afvieter $(SRC_DIR) + +.PHONY: skip-unused +skip-unused: suvieter +suvieter: $(SOURCES) + $(V_PATH) -showcc -skip-unused -o suvieter $(SRC_DIR) From cf77037188124f5643048724f5400b165c007603 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Thu, 14 Apr 2022 23:17:52 +0200 Subject: [PATCH 28/67] Some more experimental builds --- .woodpecker/.build_experimental.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/.woodpecker/.build_experimental.yml b/.woodpecker/.build_experimental.yml index 0d079626..032a42b7 100644 --- a/.woodpecker/.build_experimental.yml +++ b/.woodpecker/.build_experimental.yml @@ -12,6 +12,8 @@ pipeline: group: 'build' commands: - make autofree + - readelf -d afvieter + - du -h afvieter when: event: push @@ -21,5 +23,20 @@ pipeline: group: 'build' commands: - make skip-unused + - readelf -d suvieter + - du -h suvieter + when: + event: push + + skip-unused-static: + image: 'chewingbever/vlang:latest' + pull: true + environment: + - LDFLAGS=-lz -lbz2 -llzma -lexpat -lzstd -llz4 -static + group: 'build' + commands: + - make skip-unused + - readelf -d suvieter + - du -h suvieter when: event: push From 20112b869395e670f068e01ad73c005da8a78a05 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Fri, 15 Apr 2022 10:59:05 +0200 Subject: [PATCH 29/67] Switched to official compiler instead of fork --- CHANGELOG.md | 2 ++ Makefile | 6 +++--- PKGBUILD | 4 ++-- README.md | 23 +++++++++++++---------- 4 files changed, 20 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8738952a..b4a7609e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased](https://git.rustybever.be/Chewing_Bever/vieter) +* Moved away from compiler fork + ## [0.2.0](https://git.rustybever.be/Chewing_Bever/vieter/src/tag/0.2.0) ### Changed diff --git a/Makefile b/Makefile index 9421fb6f..faae1a68 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ SRC_DIR := src SOURCES != find '$(SRC_DIR)' -iname '*.v' -V_PATH ?= v/v +V_PATH ?= v V := $(V_PATH) -showcc -gc boehm all: vieter @@ -68,8 +68,8 @@ test: .PHONY: v v: v/v v/v: - git clone --single-branch --branch patches https://git.rustybever.be/Chewing_Bever/vieter-v v + git clone --single-branch https://git.rustybever.be/Chewing_Bever/v v make -C v clean: - rm -rf 'data' 'vieter' 'dvieter' 'pvieter' 'vieter.c' 'dvieterctl' 'vieterctl' 'pkg' 'src/vieter' + rm -rf 'data' 'vieter' 'dvieter' 'pvieter' 'vieter.c' 'dvieterctl' 'vieterctl' 'pkg' 'src/vieter' *.pkg.tar.zst diff --git a/PKGBUILD b/PKGBUILD index 5011ab16..0c558b48 100644 --- a/PKGBUILD +++ b/PKGBUILD @@ -2,7 +2,7 @@ pkgbase='vieter' pkgname='vieter' -pkgver=0.1.0.rc1.r117.gc3ac00f +pkgver=0.2.0.r24.g9a56bd0 pkgrel=1 depends=('glibc' 'openssl' 'libarchive' 'gc') makedepends=('git' 'gcc') @@ -23,7 +23,7 @@ build() { # Build the compiler CFLAGS= make v - make prod + V_PATH=v/v make prod } package() { diff --git a/README.md b/README.md index cd78f744..96b104d2 100644 --- a/README.md +++ b/README.md @@ -20,15 +20,12 @@ a while now. I wanted a fast language that I could code while relaxing, without having to exert too much mental effort & V seemed like the right choice for that. -### Custom Compiler +### Compiler -Currently, this program only works with a very slightly modified version of the -V standard library, and therefore the compiler. The source code for this fork -can be found [here](https://git.rustybever.be/Chewing_Bever/vieter-v). You can -obtain this modified version of the compiler by running `make v`, which will -clone & build the compiler. Afterwards, all make commands that require the V -compiler will use this new binary. I try to keep this fork as up to date with -upstream as possible. +Vieter compiles with the standard Vlang compiler. However, I do maintain a +[mirror](https://git.rustybever.be/Chewing_Bever/v). This is to ensure my CI +does not break without reason, as I control when & how frequently the mirror is +updated to reflect the official repository. ## Features @@ -44,9 +41,15 @@ upstream as possible. In order to build Vieter, you'll need a couple of libraries: +* An installation of V * gc * libarchive * openssl -Before building Vieter, you'll have to build the compiler using `make v`. -Afterwards, run `make` to build the debug binary. +**NOTE**: if you encounter any issues compiling Vieter using the absolute +latest version of V, it might be because my mirror is missing a specific commit +that causes issues. For this reason, the `make v` command exists which will +clone my compiler in the `v` directory & build it. Afterwards, you can use this +compiler with make by prepending all make commands with `V_PATH=v/v`. If you do +encounter this issue, please let me know so I can update my mirror & the +codebase to fix it! From 20707f6af14ebce35476f7ddffa07b60c415ea86 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Fri, 15 Apr 2022 11:38:06 +0200 Subject: [PATCH 30/67] chore(ci): change debug build used chore(ci): removed skip-unused-static experimental build chore: updated Makefile --- .gitignore | 4 ++-- .woodpecker/.build.yml | 5 +++-- .woodpecker/.build_experimental.yml | 13 ------------- Makefile | 4 ++-- 4 files changed, 7 insertions(+), 19 deletions(-) diff --git a/.gitignore b/.gitignore index 7847b3fb..a3f6afcf 100644 --- a/.gitignore +++ b/.gitignore @@ -5,8 +5,8 @@ data/ vieter dvieter pvieter -dvieterctl -vieterctl +suvieter +afvieter vieter.c # Ignore testing files diff --git a/.woodpecker/.build.yml b/.woodpecker/.build.yml index e68c4c97..c612737b 100644 --- a/.woodpecker/.build.yml +++ b/.woodpecker/.build.yml @@ -9,15 +9,16 @@ matrix: platform: ${PLATFORM} pipeline: - # The default build isn't needed, as alpine switches to gcc for the compiler anyways debug: image: 'chewingbever/vlang:latest' pull: true group: 'build' commands: - - make debug + - make when: event: push + branch: + exclude: [main, dev] prod: image: 'chewingbever/vlang:latest' diff --git a/.woodpecker/.build_experimental.yml b/.woodpecker/.build_experimental.yml index 032a42b7..0129d2b4 100644 --- a/.woodpecker/.build_experimental.yml +++ b/.woodpecker/.build_experimental.yml @@ -27,16 +27,3 @@ pipeline: - du -h suvieter when: event: push - - skip-unused-static: - image: 'chewingbever/vlang:latest' - pull: true - environment: - - LDFLAGS=-lz -lbz2 -llzma -lexpat -lzstd -llz4 -static - group: 'build' - commands: - - make skip-unused - - readelf -d suvieter - - du -h suvieter - when: - event: push diff --git a/Makefile b/Makefile index 2f399831..fb97ec26 100644 --- a/Makefile +++ b/Makefile @@ -24,7 +24,7 @@ dvieter: $(SOURCES) # Run the debug build inside gdb .PHONY: gdb gdb: dvieter - gdb --args './dvieter -f vieter.toml server' + gdb --args ./dvieter -f vieter.toml server # Optimised production build .PHONY: prod @@ -75,7 +75,7 @@ v/v: make -C v clean: - rm -rf 'data' 'vieter' 'dvieter' 'pvieter' 'vieter.c' 'dvieterctl' 'vieterctl' 'pkg' 'src/vieter' *.pkg.tar.zst + rm -rf 'data' 'vieter' 'dvieter' 'pvieter' 'vieter.c' 'dvieterctl' 'vieterctl' 'pkg' 'src/vieter' *.pkg.tar.zst 'suvieter' 'afvieter' # =====EXPERIMENTAL===== From 7722d5a7e41cdaed0569acdfd51e21e9acb45734 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 30 Apr 2022 11:43:06 +0200 Subject: [PATCH 31/67] fix: replace byte with u8 BREAKING: the V compiler removed the byte type alias in favor of u8. --- src/docker/docker.v | 6 +++--- src/repo/sync.v | 2 +- src/util/util.v | 4 ++-- src/web/web.v | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/docker/docker.v b/src/docker/docker.v index a6f76409..07ceb8ed 100644 --- a/src/docker/docker.v +++ b/src/docker/docker.v @@ -28,8 +28,8 @@ fn send(req &string) ?http.Response { s.wait_for_write() ? mut c := 0 - mut buf := []byte{len: docker.buf_len} - mut res := []byte{} + mut buf := []u8{len: docker.buf_len} + mut res := []u8{} for { c = s.read(mut buf) or { return error('Failed to read data from socket ${docker.socket}.') } @@ -52,7 +52,7 @@ fn send(req &string) ?http.Response { // We loop until we've encountered the end of the chunked response // A chunked HTTP response always ends with '0\r\n\r\n'. - for res.len < 5 || res#[-5..] != [byte(`0`), `\r`, `\n`, `\r`, `\n`] { + for res.len < 5 || res#[-5..] != [u8(`0`), `\r`, `\n`, `\r`, `\n`] { // Wait for the server to respond s.wait_for_write() ? diff --git a/src/repo/sync.v b/src/repo/sync.v index e2b7aac7..12756b7f 100644 --- a/src/repo/sync.v +++ b/src/repo/sync.v @@ -19,7 +19,7 @@ fn archive_add_entry(archive &C.archive, entry &C.archive_entry, file_path &stri } // Write the file to the archive - buf := [8192]byte{} + buf := [8192]u8{} mut len := C.read(fd, &buf, sizeof(buf)) for len > 0 { diff --git a/src/util/util.v b/src/util/util.v index 228f5845..c1af30ec 100644 --- a/src/util/util.v +++ b/src/util/util.v @@ -30,7 +30,7 @@ pub fn reader_to_file(mut reader io.BufferedReader, length int, path string) ? { file.close() } - mut buf := []byte{len: util.reader_buf_size} + mut buf := []u8{len: util.reader_buf_size} mut bytes_left := length // Repeat as long as the stream still has data @@ -60,7 +60,7 @@ pub fn hash_file(path &string) ?(string, string) { mut sha256sum := sha256.new() buf_size := int(1_000_000) - mut buf := []byte{len: buf_size} + mut buf := []u8{len: buf_size} mut bytes_left := os.file_size(path) for bytes_left > 0 { diff --git a/src/web/web.v b/src/web/web.v index 688f854e..3e7b0478 100644 --- a/src/web/web.v +++ b/src/web/web.v @@ -285,7 +285,7 @@ pub fn (mut ctx Context) file(f_path string) Result { resp.set_status(ctx.status) send_string(mut ctx.conn, resp.bytestr()) or { return Result{} } - mut buf := []byte{len: 1_000_000} + mut buf := []u8{len: 1_000_000} mut bytes_left := file_size // Repeat as long as the stream still has data From 4d26797453c432297073e512942c2216c35c5edc Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Thu, 21 Apr 2022 09:07:16 +0200 Subject: [PATCH 32/67] chore(ci): Updated PKGBUILD to use vieter-v package --- .woodpecker/.arch.yml | 2 ++ PKGBUILD | 9 +++------ 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/.woodpecker/.arch.yml b/.woodpecker/.arch.yml index ab3c6ea4..e37dc1a6 100644 --- a/.woodpecker/.arch.yml +++ b/.woodpecker/.arch.yml @@ -10,6 +10,8 @@ pipeline: build: image: 'menci/archlinuxarm:base-devel' commands: + # Add the vieter repository so we can use the compiler + - echo -e '[vieter]\nServer = https://arch.r8r.be/$repo/$arch\nSigLevel = Optional' >> /etc/pacman.conf # Update packages - pacman -Syu --noconfirm # Create non-root user to perform build & switch to their home diff --git a/PKGBUILD b/PKGBUILD index 0c558b48..3f8c4801 100644 --- a/PKGBUILD +++ b/PKGBUILD @@ -2,10 +2,10 @@ pkgbase='vieter' pkgname='vieter' -pkgver=0.2.0.r24.g9a56bd0 +pkgver=0.2.0.r25.g20112b8 pkgrel=1 depends=('glibc' 'openssl' 'libarchive' 'gc') -makedepends=('git' 'gcc') +makedepends=('git' 'gcc' 'vieter-v') arch=('x86_64' 'aarch64' 'armv7') url='https://git.rustybever.be/Chewing_Bever/vieter' license=('AGPL3') @@ -20,10 +20,7 @@ pkgver() { build() { cd "$pkgname" - # Build the compiler - CFLAGS= make v - - V_PATH=v/v make prod + make prod } package() { From 6f9e1b5f3cf02a5f60c419da6c71523a790d19bf Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 30 Apr 2022 11:31:14 +0200 Subject: [PATCH 33/67] feat(cron): start of working loop --- src/cron/daemon/build.v | 36 +++++++++++++++++++------- src/cron/daemon/daemon.v | 43 +++++++++++++++++++++----------- src/cron/expression/expression.v | 2 +- 3 files changed, 57 insertions(+), 24 deletions(-) diff --git a/src/cron/daemon/build.v b/src/cron/daemon/build.v index e7e5ac34..73ba183a 100644 --- a/src/cron/daemon/build.v +++ b/src/cron/daemon/build.v @@ -1,9 +1,25 @@ module daemon -import git import time import sync.stdatomic +const build_empty = 0 +const build_running = 1 +const build_done = 2 + +// reschedule_builds looks for any builds with status code 2 & re-adds them to +// the queue. +fn (mut d Daemon) reschedule_builds() ? { + for i in 0..d.atomics.len { + if stdatomic.load_u64(&d.atomics[i]) == build_done { + stdatomic.store_u64(&d.atomics[i], build_empty) + sb := d.builds[i] + + d.schedule_build(sb.repo_id, sb.repo) ? + } + } +} + // update_builds starts as many builds as possible. fn (mut d Daemon) update_builds() ? { now := time.now() @@ -13,7 +29,7 @@ fn (mut d Daemon) update_builds() ? { sb := d.queue.pop() ? // If this build couldn't be scheduled, no more will be possible. - if !d.start_build(sb.repo_id)? { + if !d.start_build(sb)? { break } } else { @@ -22,13 +38,14 @@ fn (mut d Daemon) update_builds() ? { } } -// start_build starts a build for the given repo_id. -fn (mut d Daemon) start_build(repo_id string) ?bool { +// start_build starts a build for the given ScheduledBuild object. +fn (mut d Daemon) start_build(sb ScheduledBuild) ?bool { for i in 0..d.atomics.len { - if stdatomic.load_u64(&d.atomics[i]) == 0 { - stdatomic.store_u64(&d.atomics[i], 1) + if stdatomic.load_u64(&d.atomics[i]) == build_empty { + stdatomic.store_u64(&d.atomics[i], build_running) + d.builds[i] = sb - go d.run_build(i, d.repos_map[repo_id]) + go d.run_build(i, sb) return true } @@ -37,9 +54,10 @@ fn (mut d Daemon) start_build(repo_id string) ?bool { return false } -fn (mut d Daemon) run_build(build_index int, repo git.GitRepo) ? { +// run_build actually starts the build process for a given repo. +fn (mut d Daemon) run_build(build_index int, sb ScheduledBuild) ? { time.sleep(10 * time.second) - stdatomic.store_u64(&d.atomics[build_index], 2) + stdatomic.store_u64(&d.atomics[build_index], build_done) } diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index fc917e49..816bc159 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -30,7 +30,7 @@ mut: api_update_timestamp time.Time queue MinHeap // Which builds are currently running - builds []git.GitRepo + builds []ScheduledBuild // Atomic variables used to detect when a build has finished; length is the // same as builds atomics []u64 @@ -47,7 +47,7 @@ pub fn init_daemon(logger log.Log, address string, api_key string, base_image st global_schedule: global_schedule api_update_frequency: api_update_frequency atomics: []u64{len: max_concurrent_builds} - builds: []git.GitRepo{len: max_concurrent_builds} + builds: []ScheduledBuild{len: max_concurrent_builds} logger: logger } @@ -62,14 +62,37 @@ pub fn init_daemon(logger log.Log, address string, api_key string, base_image st // periodically refreshes the list of repositories to ensure we stay in sync. pub fn (mut d Daemon) run() ? { for { + println('1') + // Cleans up finished builds, opening up spots for new builds + d.reschedule_builds() ? + println('2') + // Schedules new builds when possible d.update_builds() ? + println(d.queue) println(d.atomics) - time.sleep(60 * time.second) + time.sleep(10 * time.second) } } +// schedule_build adds the next occurence of the given repo build to the queue. +fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) ? { + ce := parse_expression(repo.schedule) or { + d.lerror("Error while parsing cron expression '$repo.schedule' ($repo_id): $err.msg()") + + d.global_schedule + } + // A repo that can't be scheduled will just be skipped for now + timestamp := ce.next_from_now() ? + + d.queue.insert(ScheduledBuild{ + repo_id: repo_id + repo: repo + timestamp: timestamp + }) +} + fn (mut d Daemon) renew_repos() ? { mut new_repos := git.get_repos(d.address, d.api_key) ? @@ -101,19 +124,11 @@ fn (mut d Daemon) renew_queue() ? { } } + d.queue = new_queue + // For each repository in repos_map, parse their cron expression (or use // the default one if not present) & add them to the queue for id, repo in d.repos_map { - ce := parse_expression(repo.schedule) or { d.global_schedule } - // A repo that can't be scheduled will just be skipped for now - timestamp := ce.next(now) or { continue } - - new_queue.insert(ScheduledBuild{ - repo_id: id - repo: repo - timestamp: timestamp - }) + d.schedule_build(id, repo) ? } - - d.queue = new_queue } diff --git a/src/cron/expression/expression.v b/src/cron/expression/expression.v index c122585e..6e11da2b 100644 --- a/src/cron/expression/expression.v +++ b/src/cron/expression/expression.v @@ -114,7 +114,7 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { }) } -fn (ce &CronExpression) next_from_now() ?time.Time { +pub fn (ce &CronExpression) next_from_now() ?time.Time { return ce.next(time.now()) } From 5287067ea7470259417440a128d64135b41d135e Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Thu, 21 Apr 2022 16:49:39 +0200 Subject: [PATCH 34/67] chore(ci): run builds sequentially --- .woodpecker/.build.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.woodpecker/.build.yml b/.woodpecker/.build.yml index c612737b..e7341fd0 100644 --- a/.woodpecker/.build.yml +++ b/.woodpecker/.build.yml @@ -12,7 +12,6 @@ pipeline: debug: image: 'chewingbever/vlang:latest' pull: true - group: 'build' commands: - make when: @@ -25,7 +24,6 @@ pipeline: pull: true environment: - LDFLAGS=-lz -lbz2 -llzma -lexpat -lzstd -llz4 -static - group: 'build' commands: - make prod # Make sure the binary is actually statically built From 11ac3c0470d4bb3052a8a308a0a03facbd03d74d Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Fri, 29 Apr 2022 10:34:12 +0200 Subject: [PATCH 35/67] docs: added docs command & notice in README --- .gitignore | 3 +++ Makefile | 8 +++++++- README.md | 4 +++- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index a3f6afcf..6a06eb2b 100644 --- a/.gitignore +++ b/.gitignore @@ -23,3 +23,6 @@ v/ # gdb log file gdb.txt + +# Generated docs +_docs/ diff --git a/Makefile b/Makefile index fb97ec26..17936407 100644 --- a/Makefile +++ b/Makefile @@ -74,8 +74,14 @@ v/v: git clone --single-branch https://git.rustybever.be/Chewing_Bever/v v make -C v +.PHONY: clean clean: - rm -rf 'data' 'vieter' 'dvieter' 'pvieter' 'vieter.c' 'dvieterctl' 'vieterctl' 'pkg' 'src/vieter' *.pkg.tar.zst 'suvieter' 'afvieter' + rm -rf 'data' 'vieter' 'dvieter' 'pvieter' 'vieter.c' 'dvieterctl' 'vieterctl' 'pkg' 'src/vieter' *.pkg.tar.zst 'suvieter' 'afvieter' '$(SRC_DIR)/_docs' + +.PHONY: docs +docs: + rm -rf '$(SRC_DIR)/_docs' + cd '$(SRC_DIR)' && v doc -all -f html -m -readme . # =====EXPERIMENTAL===== diff --git a/README.md b/README.md index 96b104d2..08f1e759 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,9 @@ ## Documentation -I host documentation for Vieter over at https://rustybever.be/docs/vieter/. +I host documentation for Vieter over at https://rustybever.be/docs/vieter/. API +documentation for the current codebase can be found at +https://rustybever.be/api-docs/vieter/. ## Overview From a1c308f29ddb2bd92cc9f0e4b8195452be3d7043 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 30 Apr 2022 10:40:29 +0200 Subject: [PATCH 36/67] feature(daemon): added api renewal & calculated sleep time --- src/cron/cli.v | 5 +++-- src/cron/daemon/build.v | 23 +++++++++++++---------- src/cron/daemon/daemon.v | 38 +++++++++++++++++++++++++++++++++----- 3 files changed, 49 insertions(+), 17 deletions(-) diff --git a/src/cron/cli.v b/src/cron/cli.v index f4b20ecc..3b836dd2 100644 --- a/src/cron/cli.v +++ b/src/cron/cli.v @@ -11,8 +11,9 @@ pub: address string base_image string = 'archlinux:base-devel' max_concurrent_builds int = 1 - api_update_frequency int = 60 - global_schedule string + api_update_frequency int = 15 + // Replicates the behavior of the original cron system + global_schedule string = '0 3' } // cmd returns the cli module that handles the cron daemon. diff --git a/src/cron/daemon/build.v b/src/cron/daemon/build.v index 73ba183a..c5ef4283 100644 --- a/src/cron/daemon/build.v +++ b/src/cron/daemon/build.v @@ -4,15 +4,17 @@ import time import sync.stdatomic const build_empty = 0 + const build_running = 1 + const build_done = 2 // reschedule_builds looks for any builds with status code 2 & re-adds them to // the queue. fn (mut d Daemon) reschedule_builds() ? { - for i in 0..d.atomics.len { - if stdatomic.load_u64(&d.atomics[i]) == build_done { - stdatomic.store_u64(&d.atomics[i], build_empty) + for i in 0 .. d.atomics.len { + if stdatomic.load_u64(&d.atomics[i]) == daemon.build_done { + stdatomic.store_u64(&d.atomics[i], daemon.build_empty) sb := d.builds[i] d.schedule_build(sb.repo_id, sb.repo) ? @@ -29,7 +31,8 @@ fn (mut d Daemon) update_builds() ? { sb := d.queue.pop() ? // If this build couldn't be scheduled, no more will be possible. - if !d.start_build(sb)? { + // TODO a build that couldn't be scheduled should be re-added to the queue. + if !d.start_build(sb) { break } } else { @@ -39,10 +42,10 @@ fn (mut d Daemon) update_builds() ? { } // start_build starts a build for the given ScheduledBuild object. -fn (mut d Daemon) start_build(sb ScheduledBuild) ?bool { - for i in 0..d.atomics.len { - if stdatomic.load_u64(&d.atomics[i]) == build_empty { - stdatomic.store_u64(&d.atomics[i], build_running) +fn (mut d Daemon) start_build(sb ScheduledBuild) bool { + for i in 0 .. d.atomics.len { + if stdatomic.load_u64(&d.atomics[i]) == daemon.build_empty { + stdatomic.store_u64(&d.atomics[i], daemon.build_running) d.builds[i] = sb go d.run_build(i, sb) @@ -56,8 +59,8 @@ fn (mut d Daemon) start_build(sb ScheduledBuild) ?bool { // run_build actually starts the build process for a given repo. fn (mut d Daemon) run_build(build_index int, sb ScheduledBuild) ? { + d.linfo('build $sb.repo.url') time.sleep(10 * time.second) - stdatomic.store_u64(&d.atomics[build_index], build_done) + stdatomic.store_u64(&d.atomics[build_index], daemon.build_done) } - diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index 816bc159..7253e941 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -5,6 +5,8 @@ import time import log import datatypes { MinHeap } import cron.expression { CronExpression, parse_expression } +import math +import arrays struct ScheduledBuild { pub: @@ -62,23 +64,47 @@ pub fn init_daemon(logger log.Log, address string, api_key string, base_image st // periodically refreshes the list of repositories to ensure we stay in sync. pub fn (mut d Daemon) run() ? { for { - println('1') + // Update the API's contents if needed & renew the queue + if time.now() >= d.api_update_timestamp { + d.renew_repos() ? + d.renew_queue() ? + } + // Cleans up finished builds, opening up spots for new builds d.reschedule_builds() ? - println('2') + + // TODO rebuild builder image when needed + // Schedules new builds when possible d.update_builds() ? - println(d.queue) - println(d.atomics) + // Sleep either until we have to refresh the repos or when the next + // build has to start, with a minimum of 1 second. + now := time.now() - time.sleep(10 * time.second) + mut delay := d.api_update_timestamp - now + + if d.queue.len() > 0 { + time_until_next_job := d.queue.peek() ?.timestamp - now + + delay = math.min(delay, time_until_next_job) + } + + d.ldebug('Sleeping for ${delay}...') + + // TODO if there are builds active, the sleep time should be much lower to clean up the builds when they're finished. + + // We sleep for at least one second. This is to prevent the program + // from looping agressively when a cronjob can be scheduled, but + // there's no spots free for it to be started. + time.sleep(math.max(delay, 1 * time.second)) } } // schedule_build adds the next occurence of the given repo build to the queue. fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) ? { ce := parse_expression(repo.schedule) or { + // TODO This shouldn't return an error if the expression is empty. d.lerror("Error while parsing cron expression '$repo.schedule' ($repo_id): $err.msg()") d.global_schedule @@ -94,6 +120,7 @@ fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) ? { } fn (mut d Daemon) renew_repos() ? { + d.ldebug('Renewing repos...') mut new_repos := git.get_repos(d.address, d.api_key) ? d.repos_map = new_repos.move() @@ -104,6 +131,7 @@ fn (mut d Daemon) renew_repos() ? { // renew_queue replaces the old queue with a new one that reflects the newest // values in repos_map. fn (mut d Daemon) renew_queue() ? { + d.ldebug('Renewing queue...') mut new_queue := MinHeap{} // Move any jobs that should have already started from the old queue onto From caee56efd4632f86287c47343e5357e8d1a6fdcb Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 30 Apr 2022 16:08:35 +0200 Subject: [PATCH 37/67] feat(cron): improve sleep calculation; prevent invalid rescheduling of finished builds --- src/cron/daemon/build.v | 34 +++++++++++++++++++------- src/cron/daemon/daemon.v | 42 ++++++++++++++++++++------------ src/cron/expression/expression.v | 2 ++ vieter.toml | 3 ++- 4 files changed, 55 insertions(+), 26 deletions(-) diff --git a/src/cron/daemon/build.v b/src/cron/daemon/build.v index c5ef4283..ea3e6cad 100644 --- a/src/cron/daemon/build.v +++ b/src/cron/daemon/build.v @@ -2,6 +2,7 @@ module daemon import time import sync.stdatomic +import rand const build_empty = 0 @@ -9,21 +10,23 @@ const build_running = 1 const build_done = 2 -// reschedule_builds looks for any builds with status code 2 & re-adds them to -// the queue. -fn (mut d Daemon) reschedule_builds() ? { +// clean_finished_builds removes finished builds from the build slots & returns +// them. +fn (mut d Daemon) clean_finished_builds() ?[]ScheduledBuild { + mut out := []ScheduledBuild{} + for i in 0 .. d.atomics.len { if stdatomic.load_u64(&d.atomics[i]) == daemon.build_done { stdatomic.store_u64(&d.atomics[i], daemon.build_empty) - sb := d.builds[i] - - d.schedule_build(sb.repo_id, sb.repo) ? + out << d.builds[i] } } + + return out } // update_builds starts as many builds as possible. -fn (mut d Daemon) update_builds() ? { +fn (mut d Daemon) start_new_builds() ? { now := time.now() for d.queue.len() > 0 { @@ -31,8 +34,8 @@ fn (mut d Daemon) update_builds() ? { sb := d.queue.pop() ? // If this build couldn't be scheduled, no more will be possible. - // TODO a build that couldn't be scheduled should be re-added to the queue. if !d.start_build(sb) { + d.queue.insert(sb) break } } else { @@ -60,7 +63,20 @@ fn (mut d Daemon) start_build(sb ScheduledBuild) bool { // run_build actually starts the build process for a given repo. fn (mut d Daemon) run_build(build_index int, sb ScheduledBuild) ? { d.linfo('build $sb.repo.url') - time.sleep(10 * time.second) + time.sleep(rand.int_in_range(1, 6) ? * time.second) stdatomic.store_u64(&d.atomics[build_index], daemon.build_done) } + +// current_build_count returns how many builds are currently running. +fn (mut d Daemon) current_build_count() int { + mut res := 0 + + for i in 0 .. d.atomics.len { + if stdatomic.load_u64(&d.atomics[i]) == daemon.build_running { + res += 1 + } + } + + return res +} diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index 7253e941..25d38875 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -6,7 +6,6 @@ import log import datatypes { MinHeap } import cron.expression { CronExpression, parse_expression } import math -import arrays struct ScheduledBuild { pub: @@ -64,40 +63,51 @@ pub fn init_daemon(logger log.Log, address string, api_key string, base_image st // periodically refreshes the list of repositories to ensure we stay in sync. pub fn (mut d Daemon) run() ? { for { + finished_builds := d.clean_finished_builds() ? + // Update the API's contents if needed & renew the queue if time.now() >= d.api_update_timestamp { d.renew_repos() ? d.renew_queue() ? } - - // Cleans up finished builds, opening up spots for new builds - d.reschedule_builds() ? + // The finished builds should only be rescheduled if the API contents + // haven't been renewed. + else { + for sb in finished_builds { + d.schedule_build(sb.repo_id, sb.repo) ? + } + } // TODO rebuild builder image when needed // Schedules new builds when possible - d.update_builds() ? + d.start_new_builds() ? + + // If there are builds currently running, the daemon should refresh + // every second to clean up any finished builds & start new ones. + mut delay := time.Duration(1 * time.second) // Sleep either until we have to refresh the repos or when the next // build has to start, with a minimum of 1 second. - now := time.now() + if d.current_build_count() == 0 { + now := time.now() + delay = d.api_update_timestamp - now - mut delay := d.api_update_timestamp - now + if d.queue.len() > 0 { + time_until_next_job := d.queue.peek() ?.timestamp - now - if d.queue.len() > 0 { - time_until_next_job := d.queue.peek() ?.timestamp - now - - delay = math.min(delay, time_until_next_job) + delay = math.min(delay, time_until_next_job) + } } - d.ldebug('Sleeping for ${delay}...') - - // TODO if there are builds active, the sleep time should be much lower to clean up the builds when they're finished. - // We sleep for at least one second. This is to prevent the program // from looping agressively when a cronjob can be scheduled, but // there's no spots free for it to be started. - time.sleep(math.max(delay, 1 * time.second)) + delay = math.max(delay, 1 * time.second) + + d.ldebug('Sleeping for ${delay}...') + + time.sleep(delay) } } diff --git a/src/cron/expression/expression.v b/src/cron/expression/expression.v index 6e11da2b..652870df 100644 --- a/src/cron/expression/expression.v +++ b/src/cron/expression/expression.v @@ -114,6 +114,8 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { }) } +// next_from_now returns the result of ce.next(ref) where ref is the result of +// time.now(). pub fn (ce &CronExpression) next_from_now() ?time.Time { return ce.next(time.now()) } diff --git a/vieter.toml b/vieter.toml index 452500fe..c5ddf9f2 100644 --- a/vieter.toml +++ b/vieter.toml @@ -10,4 +10,5 @@ default_arch = "x86_64" address = "http://localhost:8000" global_schedule = '* *' - +api_update_frequency = 2 +max_concurrent_builds = 3 From 98c0e52b088bf2a2b88478f68f3120a23c52d451 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 30 Apr 2022 16:41:12 +0200 Subject: [PATCH 38/67] chore(ci): added missdoc -p check; merged lint commands --- .woodpecker/.lint.yml | 2 -- Makefile | 7 +++---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/.woodpecker/.lint.yml b/.woodpecker/.lint.yml index ce000cda..b1c16fd8 100644 --- a/.woodpecker/.lint.yml +++ b/.woodpecker/.lint.yml @@ -7,7 +7,5 @@ pipeline: lint: image: 'chewingbever/vlang:latest' pull: true - group: lint commands: - make lint - - make vet diff --git a/Makefile b/Makefile index 17936407..c4d496ac 100644 --- a/Makefile +++ b/Makefile @@ -53,16 +53,15 @@ run-prod: prod .PHONY: lint lint: $(V) fmt -verify $(SRC_DIR) + $(V) vet -W $(SRC_DIR) + $(V_PATH) missdoc -p $(SRC_DIR) + @ [ $$($(V_PATH) missdoc -p $(SRC_DIR) | wc -l) = 0 ] # Format the V codebase .PHONY: fmt fmt: $(V) fmt -w $(SRC_DIR) -.PHONY: vet -vet: - $(V) vet -W $(SRC_DIR) - .PHONY: test test: $(V) test $(SRC_DIR) From 369b4458c5751015e0140538379281f22db6c3c6 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 30 Apr 2022 17:56:35 +0200 Subject: [PATCH 39/67] feat(cron): added automatic rebuilding of image; implemented builds --- src/build/build.v | 95 +++++++++++++++++++++------------------- src/cron/cli.v | 15 ++++--- src/cron/cron.v | 2 +- src/cron/daemon/build.v | 6 ++- src/cron/daemon/daemon.v | 39 +++++++++++++---- 5 files changed, 94 insertions(+), 63 deletions(-) diff --git a/src/build/build.v b/src/build/build.v index 942ce8a8..5f545649 100644 --- a/src/build/build.v +++ b/src/build/build.v @@ -10,7 +10,7 @@ const container_build_dir = '/build' const build_image_repo = 'vieter-build' -fn create_build_image(base_image string) ?string { +pub fn create_build_image(base_image string) ?string { commands := [ // Update repos & install required packages 'pacman -Syu --needed --noconfirm base-devel git' @@ -53,12 +53,13 @@ fn create_build_image(base_image string) ?string { break } - // Wait for 5 seconds - time.sleep(5000000000) + time.sleep(1 * time.second) } // Finally, we create the image from the container // As the tag, we use the epoch value + // TODO also add the base image's name into the image name to prevent + // conflicts. tag := time.sys_mono_now().str() image := docker.create_image_from_container(id, 'vieter-build', tag) ? docker.remove_container(id) ? @@ -66,6 +67,52 @@ fn create_build_image(base_image string) ?string { return image.id } +pub fn build_repo(address string, api_key string, base_image_id string, repo &git.GitRepo) ? { + build_arch := os.uname().machine + + // TODO what to do with PKGBUILDs that build multiple packages? + commands := [ + 'git clone --single-branch --depth 1 --branch $repo.branch $repo.url repo', + 'cd repo', + 'makepkg --nobuild --nodeps', + 'source PKGBUILD', + // The build container checks whether the package is already + // present on the server + 'curl --head --fail $address/$repo.repo/$build_arch/\$pkgname-\$pkgver-\$pkgrel && exit 0', + 'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $address/$repo.repo/publish; done', + ] + + // We convert the list of commands into a base64 string, which then gets + // passed to the container as an env var + cmds_str := base64.encode_str(commands.join('\n')) + + c := docker.NewContainer{ + image: '$base_image_id' + env: ['BUILD_SCRIPT=$cmds_str', 'API_KEY=$api_key'] + entrypoint: ['/bin/sh', '-c'] + cmd: ['echo \$BUILD_SCRIPT | base64 -d | /bin/bash -e'] + work_dir: '/build' + user: 'builder:builder' + } + + id := docker.create_container(c) ? + docker.start_container(id) ? + + // This loop waits until the container has stopped, so we can remove it after + for { + data := docker.inspect_container(id) ? + + if !data.state.running { + break + } + + // Wait for 5 seconds + time.sleep(1 * time.second) + } + + docker.remove_container(id) ? +} + fn build(conf Config) ? { build_arch := os.uname().machine @@ -85,47 +132,7 @@ fn build(conf Config) ? { image_id := create_build_image(conf.base_image) ? for repo in filtered_repos { - // TODO what to do with PKGBUILDs that build multiple packages? - commands := [ - 'git clone --single-branch --depth 1 --branch $repo.branch $repo.url repo', - 'cd repo', - 'makepkg --nobuild --nodeps', - 'source PKGBUILD', - // The build container checks whether the package is already - // present on the server - 'curl --head --fail $conf.address/$repo.repo/$build_arch/\$pkgname-\$pkgver-\$pkgrel && exit 0', - 'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $conf.address/$repo.repo/publish; done', - ] - - // We convert the list of commands into a base64 string, which then gets - // passed to the container as an env var - cmds_str := base64.encode_str(commands.join('\n')) - - c := docker.NewContainer{ - image: '$image_id' - env: ['BUILD_SCRIPT=$cmds_str', 'API_KEY=$conf.api_key'] - entrypoint: ['/bin/sh', '-c'] - cmd: ['echo \$BUILD_SCRIPT | base64 -d | /bin/bash -e'] - work_dir: '/build' - user: 'builder:builder' - } - - id := docker.create_container(c) ? - docker.start_container(id) ? - - // This loop waits until the container has stopped, so we can remove it after - for { - data := docker.inspect_container(id) ? - - if !data.state.running { - break - } - - // Wait for 5 seconds - time.sleep(5000000000) - } - - docker.remove_container(id) ? + build_repo(conf.address, conf.api_key, image_id, repo) ? } // Finally, we remove the builder image diff --git a/src/cron/cli.v b/src/cron/cli.v index 3b836dd2..24cbe2c7 100644 --- a/src/cron/cli.v +++ b/src/cron/cli.v @@ -5,13 +5,14 @@ import env struct Config { pub: - log_level string = 'WARN' - log_file string = 'vieter.log' - api_key string - address string - base_image string = 'archlinux:base-devel' - max_concurrent_builds int = 1 - api_update_frequency int = 15 + log_level string = 'WARN' + log_file string = 'vieter.log' + api_key string + address string + base_image string = 'archlinux:base-devel' + max_concurrent_builds int = 1 + api_update_frequency int = 15 + image_rebuild_frequency int = 1440 // Replicates the behavior of the original cron system global_schedule string = '0 3' } diff --git a/src/cron/cron.v b/src/cron/cron.v index cb5bcd7e..49a379e9 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -23,7 +23,7 @@ pub fn cron(conf Config) ? { } mut d := daemon.init_daemon(logger, conf.address, conf.api_key, conf.base_image, ce, - conf.max_concurrent_builds, conf.api_update_frequency) ? + conf.max_concurrent_builds, conf.api_update_frequency, conf.image_rebuild_frequency) ? d.run() ? } diff --git a/src/cron/daemon/build.v b/src/cron/daemon/build.v index ea3e6cad..afe5044d 100644 --- a/src/cron/daemon/build.v +++ b/src/cron/daemon/build.v @@ -3,6 +3,7 @@ module daemon import time import sync.stdatomic import rand +import build const build_empty = 0 @@ -62,8 +63,9 @@ fn (mut d Daemon) start_build(sb ScheduledBuild) bool { // run_build actually starts the build process for a given repo. fn (mut d Daemon) run_build(build_index int, sb ScheduledBuild) ? { - d.linfo('build $sb.repo.url') - time.sleep(rand.int_in_range(1, 6) ? * time.second) + d.linfo('started build: ${sb.repo.url} ${sb.repo.branch}') + + build.build_repo(d.address, d.api_key, d.builder_image, &sb.repo) ? stdatomic.store_u64(&d.atomics[build_index], daemon.build_done) } diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index 25d38875..4eccfa03 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -6,6 +6,7 @@ import log import datatypes { MinHeap } import cron.expression { CronExpression, parse_expression } import math +import build struct ScheduledBuild { pub: @@ -20,16 +21,19 @@ fn (r1 ScheduledBuild) < (r2 ScheduledBuild) bool { pub struct Daemon { mut: - address string - api_key string - base_image string - global_schedule CronExpression - api_update_frequency int + address string + api_key string + base_image string + builder_image string + global_schedule CronExpression + api_update_frequency int + image_rebuild_frequency int // Repos currently loaded from API. repos_map map[string]git.GitRepo // At what point to update the list of repositories. - api_update_timestamp time.Time - queue MinHeap + api_update_timestamp time.Time + image_build_timestamp time.Time + queue MinHeap // Which builds are currently running builds []ScheduledBuild // Atomic variables used to detect when a build has finished; length is the @@ -40,13 +44,14 @@ mut: // init_daemon initializes a new Daemon object. It renews the repositories & // populates the build queue for the first time. -pub fn init_daemon(logger log.Log, address string, api_key string, base_image string, global_schedule CronExpression, max_concurrent_builds int, api_update_frequency int) ?Daemon { +pub fn init_daemon(logger log.Log, address string, api_key string, base_image string, global_schedule CronExpression, max_concurrent_builds int, api_update_frequency int, image_rebuild_frequency int) ?Daemon { mut d := Daemon{ address: address api_key: api_key base_image: base_image global_schedule: global_schedule api_update_frequency: api_update_frequency + image_rebuild_frequency: image_rebuild_frequency atomics: []u64{len: max_concurrent_builds} builds: []ScheduledBuild{len: max_concurrent_builds} logger: logger @@ -55,6 +60,7 @@ pub fn init_daemon(logger log.Log, address string, api_key string, base_image st // Initialize the repos & queue d.renew_repos() ? d.renew_queue() ? + d.rebuild_base_image() ? return d } @@ -78,7 +84,15 @@ pub fn (mut d Daemon) run() ? { } } - // TODO rebuild builder image when needed + // TODO remove old builder images. + // This issue is less trivial than it sounds, because a build could + // still be running when the image has to be rebuilt. That would + // prevent the image from being removed. Therefore, we will need to + // keep track of a list or something & remove an image once we have + // made sure it isn't being used anymore. + if time.now() >= d.image_build_timestamp { + d.rebuild_base_image() ? + } // Schedules new builds when possible d.start_new_builds() ? @@ -170,3 +184,10 @@ fn (mut d Daemon) renew_queue() ? { d.schedule_build(id, repo) ? } } + +fn (mut d Daemon) rebuild_base_image() ? { + d.linfo("Rebuilding builder image....") + + d.builder_image = build.create_build_image(d.base_image) ? + d.image_build_timestamp = time.now().add_seconds(60 * d.image_rebuild_frequency) +} From fb65efdfbe04fd521a7a7d480f5e14b8b101051f Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 30 Apr 2022 18:38:24 +0200 Subject: [PATCH 40/67] feat(cron): added removal of old builder images --- src/cron/daemon/build.v | 5 ++--- src/cron/daemon/daemon.v | 29 ++++++++++++++++++++++++----- vieter.toml | 2 ++ 3 files changed, 28 insertions(+), 8 deletions(-) diff --git a/src/cron/daemon/build.v b/src/cron/daemon/build.v index afe5044d..ec8be4d5 100644 --- a/src/cron/daemon/build.v +++ b/src/cron/daemon/build.v @@ -2,7 +2,6 @@ module daemon import time import sync.stdatomic -import rand import build const build_empty = 0 @@ -63,9 +62,9 @@ fn (mut d Daemon) start_build(sb ScheduledBuild) bool { // run_build actually starts the build process for a given repo. fn (mut d Daemon) run_build(build_index int, sb ScheduledBuild) ? { - d.linfo('started build: ${sb.repo.url} ${sb.repo.branch}') + d.linfo('started build: $sb.repo.url $sb.repo.branch') - build.build_repo(d.address, d.api_key, d.builder_image, &sb.repo) ? + build.build_repo(d.address, d.api_key, d.builder_images.last(), &sb.repo) ? stdatomic.store_u64(&d.atomics[build_index], daemon.build_done) } diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index 4eccfa03..09ccc3eb 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -7,6 +7,7 @@ import datatypes { MinHeap } import cron.expression { CronExpression, parse_expression } import math import build +import docker struct ScheduledBuild { pub: @@ -24,7 +25,7 @@ mut: address string api_key string base_image string - builder_image string + builder_images []string global_schedule CronExpression api_update_frequency int image_rebuild_frequency int @@ -92,6 +93,9 @@ pub fn (mut d Daemon) run() ? { // made sure it isn't being used anymore. if time.now() >= d.image_build_timestamp { d.rebuild_base_image() ? + // In theory, executing this function here allows an old builder + // image to exist for at most image_rebuild_frequency minutes. + d.clean_old_base_images() } // Schedules new builds when possible @@ -144,7 +148,7 @@ fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) ? { } fn (mut d Daemon) renew_repos() ? { - d.ldebug('Renewing repos...') + d.linfo('Renewing repos...') mut new_repos := git.get_repos(d.address, d.api_key) ? d.repos_map = new_repos.move() @@ -155,7 +159,7 @@ fn (mut d Daemon) renew_repos() ? { // renew_queue replaces the old queue with a new one that reflects the newest // values in repos_map. fn (mut d Daemon) renew_queue() ? { - d.ldebug('Renewing queue...') + d.linfo('Renewing queue...') mut new_queue := MinHeap{} // Move any jobs that should have already started from the old queue onto @@ -186,8 +190,23 @@ fn (mut d Daemon) renew_queue() ? { } fn (mut d Daemon) rebuild_base_image() ? { - d.linfo("Rebuilding builder image....") + d.linfo('Rebuilding builder image....') - d.builder_image = build.create_build_image(d.base_image) ? + d.builder_images << build.create_build_image(d.base_image) ? d.image_build_timestamp = time.now().add_seconds(60 * d.image_rebuild_frequency) } + +fn (mut d Daemon) clean_old_base_images() { + mut i := 0 + + for i < d.builder_images.len - 1 { + // For each builder image, we try to remove it by calling the Docker + // API. If the function returns an error or false, that means the image + // wasn't deleted. Therefore, we move the index over. If the function + // returns true, the array's length has decreased by one so we don't + // move the index. + if !docker.remove_image(d.builder_images[i]) or { false } { + i += 1 + } + } +} diff --git a/vieter.toml b/vieter.toml index c5ddf9f2..fc86d77d 100644 --- a/vieter.toml +++ b/vieter.toml @@ -11,4 +11,6 @@ address = "http://localhost:8000" global_schedule = '* *' api_update_frequency = 2 +image_rebuild_frequency = 1 max_concurrent_builds = 3 + From f9f440500efd10aad30f13bd78f3aac2cc15f276 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 30 Apr 2022 20:22:03 +0200 Subject: [PATCH 41/67] docs: added comment string to each function --- src/build/build.v | 9 +++++++++ src/cron/daemon/daemon.v | 6 ++++++ src/cron/expression/expression.v | 5 +++++ src/docker/docker.v | 4 ++++ src/git/cli.v | 6 ++++++ src/git/client.v | 3 +++ src/package/package.v | 1 + src/repo/sync.v | 4 +++- src/server/auth.v | 1 + src/server/git.v | 5 +++++ src/server/routes.v | 4 ++++ src/web/parse.v | 2 ++ 12 files changed, 49 insertions(+), 1 deletion(-) diff --git a/src/build/build.v b/src/build/build.v index 5f545649..2365fef4 100644 --- a/src/build/build.v +++ b/src/build/build.v @@ -10,6 +10,11 @@ const container_build_dir = '/build' const build_image_repo = 'vieter-build' +// create_build_image creates a builder image given some base image which can +// then be used to build & package Arch images. It mostly just updates the +// system, install some necessary packages & creates a non-root user to run +// makepkg with. The base image should be some Linux distribution that uses +// Pacman as its package manager. pub fn create_build_image(base_image string) ?string { commands := [ // Update repos & install required packages @@ -67,6 +72,9 @@ pub fn create_build_image(base_image string) ?string { return image.id } +// build_repo builds, packages & publishes a given Arch package based on the +// provided GitRepo. The base image ID should be of an image previously created +// by create_build_image. pub fn build_repo(address string, api_key string, base_image_id string, repo &git.GitRepo) ? { build_arch := os.uname().machine @@ -113,6 +121,7 @@ pub fn build_repo(address string, api_key string, base_image_id string, repo &gi docker.remove_container(id) ? } +// build builds every Git repo in the server's list. fn build(conf Config) ? { build_arch := os.uname().machine diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index 09ccc3eb..4b225220 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -16,6 +16,7 @@ pub: timestamp time.Time } +// Overloaded operator for comparing ScheduledBuild objects fn (r1 ScheduledBuild) < (r2 ScheduledBuild) bool { return r1.timestamp < r2.timestamp } @@ -147,6 +148,8 @@ fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) ? { }) } +// renew_repos requests the newest list of Git repos from the server & replaces +// the old one. fn (mut d Daemon) renew_repos() ? { d.linfo('Renewing repos...') mut new_repos := git.get_repos(d.address, d.api_key) ? @@ -189,6 +192,7 @@ fn (mut d Daemon) renew_queue() ? { } } +// rebuild_base_image recreates the builder image. fn (mut d Daemon) rebuild_base_image() ? { d.linfo('Rebuilding builder image....') @@ -196,6 +200,8 @@ fn (mut d Daemon) rebuild_base_image() ? { d.image_build_timestamp = time.now().add_seconds(60 * d.image_rebuild_frequency) } +// clean_old_base_images tries to remove any old but still present builder +// images. fn (mut d Daemon) clean_old_base_images() { mut i := 0 diff --git a/src/cron/expression/expression.v b/src/cron/expression/expression.v index 652870df..124337f0 100644 --- a/src/cron/expression/expression.v +++ b/src/cron/expression/expression.v @@ -65,6 +65,7 @@ pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { if minute_index == ce.minutes.len && hour_index < ce.hours.len { hour_index += 1 } + if hour_index == ce.hours.len && day_index < ce.days.len { day_index += 1 } @@ -197,6 +198,8 @@ fn parse_range(s string, min int, max int, mut bitv []bool) ? { } } +// bitv_to_ints converts a bit vector into an array containing the +// corresponding values. fn bitv_to_ints(bitv []bool, min int) []int { mut out := []int{} @@ -209,6 +212,8 @@ fn bitv_to_ints(bitv []bool, min int) []int { return out } +// parse_part parses a given part of a cron expression & returns the +// corresponding array of ints. fn parse_part(s string, min int, max int) ?[]int { mut bitv := []bool{len: max - min + 1, init: false} diff --git a/src/docker/docker.v b/src/docker/docker.v index 07ceb8ed..5deef830 100644 --- a/src/docker/docker.v +++ b/src/docker/docker.v @@ -9,6 +9,8 @@ const socket = '/var/run/docker.sock' const buf_len = 1024 +// send writes a request to the Docker socket, waits for a response & returns +// it. fn send(req &string) ?http.Response { // Open a connection to the socket mut s := unix.connect_stream(docker.socket) or { @@ -72,12 +74,14 @@ fn send(req &string) ?http.Response { return http.parse_response(res.bytestr()) } +// request_with_body sends a request to the Docker socket with the given body. fn request_with_body(method string, url urllib.URL, content_type string, body string) ?http.Response { req := '$method $url.request_uri() HTTP/1.1\nHost: localhost\nContent-Type: $content_type\nContent-Length: $body.len\n\n$body\n\n' return send(req) } +// request sends a request to the Docker socket with an empty body. fn request(method string, url urllib.URL) ?http.Response { req := '$method $url.request_uri() HTTP/1.1\nHost: localhost\n\n' diff --git a/src/git/cli.v b/src/git/cli.v index 463f1ba1..53527d52 100644 --- a/src/git/cli.v +++ b/src/git/cli.v @@ -96,6 +96,8 @@ pub fn cmd() cli.Command { } } +// get_repo_id_by_prefix tries to find the repo with the given prefix in its +// ID. If multiple or none are found, an error is raised. fn get_repo_id_by_prefix(conf Config, id_prefix string) ?string { repos := get_repos(conf.address, conf.api_key) ? @@ -118,6 +120,7 @@ fn get_repo_id_by_prefix(conf Config, id_prefix string) ?string { return res[0] } +// list prints out a list of all repositories. fn list(conf Config) ? { repos := get_repos(conf.address, conf.api_key) ? @@ -126,12 +129,14 @@ fn list(conf Config) ? { } } +// add adds a new repository to the server's list. fn add(conf Config, url string, branch string, repo string, arch []string) ? { res := add_repo(conf.address, conf.api_key, url, branch, repo, arch) ? println(res.message) } +// remove removes a repository from the server's list. fn remove(conf Config, id_prefix string) ? { id := get_repo_id_by_prefix(conf, id_prefix) ? res := remove_repo(conf.address, conf.api_key, id) ? @@ -139,6 +144,7 @@ fn remove(conf Config, id_prefix string) ? { println(res.message) } +// patch patches a given repository with the provided params. fn patch(conf Config, id_prefix string, params map[string]string) ? { id := get_repo_id_by_prefix(conf, id_prefix) ? res := patch_repo(conf.address, conf.api_key, id, params) ? diff --git a/src/git/client.v b/src/git/client.v index e4a39acd..a43c9ca0 100644 --- a/src/git/client.v +++ b/src/git/client.v @@ -4,6 +4,9 @@ import json import response { Response } import net.http +// send_request is a convenience method for sending requests to the repos +// API. It mostly does string manipulation to create a query string containing +// the provided params. fn send_request(method http.Method, address string, url string, api_key string, params map[string]string) ?Response { mut full_url := '$address$url' diff --git a/src/package/package.v b/src/package/package.v index a6be6360..a1042b54 100644 --- a/src/package/package.v +++ b/src/package/package.v @@ -175,6 +175,7 @@ pub fn read_pkg_archive(pkg_path string) ?Pkg { } } +// format_entry returns a string properly formatted to be added to a desc file. fn format_entry(key string, value string) string { return '\n%$key%\n$value\n' } diff --git a/src/repo/sync.v b/src/repo/sync.v index 12756b7f..9c5e7ed2 100644 --- a/src/repo/sync.v +++ b/src/repo/sync.v @@ -2,6 +2,8 @@ module repo import os +// archive_add_entry writes a file to an archive, given its path & inner path +// inside the archive. fn archive_add_entry(archive &C.archive, entry &C.archive_entry, file_path &string, inner_path &string) { st := C.stat{} @@ -29,7 +31,7 @@ fn archive_add_entry(archive &C.archive, entry &C.archive_entry, file_path &stri } } -// Re-generate the repo archive files +// sync regenerates the repository archive files. fn (r &RepoGroupManager) sync(repo string, arch string) ? { subrepo_path := os.join_path(r.repos_dir, repo, arch) diff --git a/src/server/auth.v b/src/server/auth.v index 8bc9d55d..7c8a676f 100644 --- a/src/server/auth.v +++ b/src/server/auth.v @@ -2,6 +2,7 @@ module server import net.http +// is_authorized checks whether the provided API key is correct. fn (mut app App) is_authorized() bool { x_header := app.req.header.get_custom('X-Api-Key', http.HeaderQueryConfig{ exact: true }) or { return false diff --git a/src/server/git.v b/src/server/git.v index a9d6f502..0cba17cb 100644 --- a/src/server/git.v +++ b/src/server/git.v @@ -8,6 +8,7 @@ import response { new_data_response, new_response } const repos_file = 'repos.json' +// get_repos returns the current list of repos. ['/api/repos'; get] fn (mut app App) get_repos() web.Result { if !app.is_authorized() { @@ -25,6 +26,7 @@ fn (mut app App) get_repos() web.Result { return app.json(http.Status.ok, new_data_response(repos)) } +// get_single_repo returns the information for a single repo. ['/api/repos/:id'; get] fn (mut app App) get_single_repo(id string) web.Result { if !app.is_authorized() { @@ -48,6 +50,7 @@ fn (mut app App) get_single_repo(id string) web.Result { return app.json(http.Status.ok, new_data_response(repo)) } +// post_repo creates a new repo from the provided query string. ['/api/repos'; post] fn (mut app App) post_repo() web.Result { if !app.is_authorized() { @@ -86,6 +89,7 @@ fn (mut app App) post_repo() web.Result { return app.json(http.Status.ok, new_response('Repo added successfully.')) } +// delete_repo removes a given repo from the server's list. ['/api/repos/:id'; delete] fn (mut app App) delete_repo(id string) web.Result { if !app.is_authorized() { @@ -113,6 +117,7 @@ fn (mut app App) delete_repo(id string) web.Result { return app.json(http.Status.ok, new_response('Repo removed successfully.')) } +// patch_repo updates a repo's data with the given query params. ['/api/repos/:id'; patch] fn (mut app App) patch_repo(id string) web.Result { if !app.is_authorized() { diff --git a/src/server/routes.v b/src/server/routes.v index 4f6c4f00..f27afb4d 100644 --- a/src/server/routes.v +++ b/src/server/routes.v @@ -16,6 +16,9 @@ pub fn (mut app App) healthcheck() web.Result { return app.json(http.Status.ok, new_response('Healthy.')) } +// get_repo_file handles all Pacman-related routes. It returns both the +// repository's archives, but also package archives or the contents of a +// package's desc file. ['/:repo/:arch/:filename'; get; head] fn (mut app App) get_repo_file(repo string, arch string, filename string) web.Result { mut full_path := '' @@ -54,6 +57,7 @@ fn (mut app App) get_repo_file(repo string, arch string, filename string) web.Re return app.file(full_path) } +// put_package handles publishing a package to a repository. ['/:repo/publish'; post] fn (mut app App) put_package(repo string) web.Result { if !app.is_authorized() { diff --git a/src/web/parse.v b/src/web/parse.v index 2eeef5e9..a095f0c9 100644 --- a/src/web/parse.v +++ b/src/web/parse.v @@ -47,6 +47,7 @@ fn parse_attrs(name string, attrs []string) ?([]http.Method, string) { return methods, path.to_lower() } +// Extracts query parameters from a URL. fn parse_query_from_url(url urllib.URL) map[string]string { mut query := map[string]string{} for v in url.query().data { @@ -55,6 +56,7 @@ fn parse_query_from_url(url urllib.URL) map[string]string { return query } +// Extract form data from an HTTP request. fn parse_form_from_request(request http.Request) ?(map[string]string, map[string][]http.FileData) { mut form := map[string]string{} mut files := map[string][]http.FileData{} From cfacf9ed0f5bd14e90163b6fe78bca16d25e4f7d Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 30 Apr 2022 20:48:49 +0200 Subject: [PATCH 42/67] fix(cron): don't show error for empty cron schedule --- Makefile | 4 ++-- src/build/build.v | 1 - src/cron/daemon/daemon.v | 11 ++++++++--- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/Makefile b/Makefile index c4d496ac..041bafc6 100644 --- a/Makefile +++ b/Makefile @@ -77,8 +77,8 @@ v/v: clean: rm -rf 'data' 'vieter' 'dvieter' 'pvieter' 'vieter.c' 'dvieterctl' 'vieterctl' 'pkg' 'src/vieter' *.pkg.tar.zst 'suvieter' 'afvieter' '$(SRC_DIR)/_docs' -.PHONY: docs -docs: +.PHONY: api-docs +api-docs: rm -rf '$(SRC_DIR)/_docs' cd '$(SRC_DIR)' && v doc -all -f html -m -readme . diff --git a/src/build/build.v b/src/build/build.v index 2365fef4..bc604fa1 100644 --- a/src/build/build.v +++ b/src/build/build.v @@ -114,7 +114,6 @@ pub fn build_repo(address string, api_key string, base_image_id string, repo &gi break } - // Wait for 5 seconds time.sleep(1 * time.second) } diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index 4b225220..729e94b0 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -132,12 +132,17 @@ pub fn (mut d Daemon) run() ? { // schedule_build adds the next occurence of the given repo build to the queue. fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) ? { - ce := parse_expression(repo.schedule) or { - // TODO This shouldn't return an error if the expression is empty. - d.lerror("Error while parsing cron expression '$repo.schedule' ($repo_id): $err.msg()") + ce := if repo.schedule != '' { + parse_expression(repo.schedule) or { + // TODO This shouldn't return an error if the expression is empty. + d.lerror("Error while parsing cron expression '$repo.schedule' ($repo_id): $err.msg()") + d.global_schedule + } + } else { d.global_schedule } + // A repo that can't be scheduled will just be skipped for now timestamp := ce.next_from_now() ? From 60598f719c07983ba75bba92ade336664dfc40a5 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 30 Apr 2022 22:22:12 +0200 Subject: [PATCH 43/67] fix(ci): only download PKGBUILD instead of cloning entire repo --- .woodpecker/.arch.yml | 5 ++++- PKGBUILD | 3 ++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.woodpecker/.arch.yml b/.woodpecker/.arch.yml index e37dc1a6..742095c0 100644 --- a/.woodpecker/.arch.yml +++ b/.woodpecker/.arch.yml @@ -5,6 +5,7 @@ matrix: platform: ${PLATFORM} branches: [dev] +skip_clone: true pipeline: build: @@ -20,7 +21,9 @@ pipeline: - chown -R builder:builder "$PWD" - "echo 'builder ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers" - su builder - # Build the package + # Due to a bug with the V compiler, we can't just use the PKGBUILD from + # inside the repo + - curl -OL https://git.rustybever.be/Chewing_Bever/vieter/raw/branch/dev/PKGBUILD - makepkg -s --noconfirm --needed publish: diff --git a/PKGBUILD b/PKGBUILD index 3f8c4801..eb866834 100644 --- a/PKGBUILD +++ b/PKGBUILD @@ -14,6 +14,7 @@ md5sums=('SKIP') pkgver() { cd "$pkgname" + git describe --long --tags | sed 's/^v//;s/\([^-]*-g\)/r\1/;s/-/./g' } @@ -25,7 +26,7 @@ build() { package() { pkgdesc="Vieter is a lightweight implementation of an Arch repository server." - install -dm755 "$pkgdir/usr/bin" + install -dm755 "$pkgdir/usr/bin" install -Dm755 "$pkgbase/pvieter" "$pkgdir/usr/bin/vieter" } From 37c27ae84b1ef42d977fb9483d6a767360041550 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 30 Apr 2022 23:02:54 +0200 Subject: [PATCH 44/67] fix(ci): add -DGC_THREADS flag to prod build --- .woodpecker/.build.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.woodpecker/.build.yml b/.woodpecker/.build.yml index e7341fd0..b41a39d5 100644 --- a/.woodpecker/.build.yml +++ b/.woodpecker/.build.yml @@ -25,7 +25,8 @@ pipeline: environment: - LDFLAGS=-lz -lbz2 -llzma -lexpat -lzstd -llz4 -static commands: - - make prod + # Apparently this -D is *very* important + - CFLAGS='-DGC_THREADS=1' make prod # Make sure the binary is actually statically built - readelf -d pvieter - du -h pvieter From 325dcc27de1a69fb8fd82d0104f432899e7461b1 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 1 May 2022 09:14:33 +0200 Subject: [PATCH 45/67] fix(cron): made Daemon.run infallible --- src/cron/cron.v | 2 +- src/cron/daemon/build.v | 31 ++++++++++++--- src/cron/daemon/daemon.v | 82 ++++++++++++++++++++++++++++++---------- 3 files changed, 87 insertions(+), 28 deletions(-) diff --git a/src/cron/cron.v b/src/cron/cron.v index 49a379e9..e10e4dda 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -25,5 +25,5 @@ pub fn cron(conf Config) ? { mut d := daemon.init_daemon(logger, conf.address, conf.api_key, conf.base_image, ce, conf.max_concurrent_builds, conf.api_update_frequency, conf.image_rebuild_frequency) ? - d.run() ? + d.run() } diff --git a/src/cron/daemon/build.v b/src/cron/daemon/build.v index ec8be4d5..5b2e9ccb 100644 --- a/src/cron/daemon/build.v +++ b/src/cron/daemon/build.v @@ -12,7 +12,7 @@ const build_done = 2 // clean_finished_builds removes finished builds from the build slots & returns // them. -fn (mut d Daemon) clean_finished_builds() ?[]ScheduledBuild { +fn (mut d Daemon) clean_finished_builds() []ScheduledBuild { mut out := []ScheduledBuild{} for i in 0 .. d.atomics.len { @@ -26,12 +26,22 @@ fn (mut d Daemon) clean_finished_builds() ?[]ScheduledBuild { } // update_builds starts as many builds as possible. -fn (mut d Daemon) start_new_builds() ? { +fn (mut d Daemon) start_new_builds() { now := time.now() for d.queue.len() > 0 { - if d.queue.peek() ?.timestamp < now { - sb := d.queue.pop() ? + elem := d.queue.peek() or { + d.lerror("queue.peek() unexpectedly returned an error. This shouldn't happen.") + + break + } + + if elem.timestamp < now { + sb := d.queue.pop() or { + d.lerror("queue.pop() unexpectedly returned an error. This shouldn't happen.") + + break + } // If this build couldn't be scheduled, no more will be possible. if !d.start_build(sb) { @@ -61,10 +71,19 @@ fn (mut d Daemon) start_build(sb ScheduledBuild) bool { } // run_build actually starts the build process for a given repo. -fn (mut d Daemon) run_build(build_index int, sb ScheduledBuild) ? { +fn (mut d Daemon) run_build(build_index int, sb ScheduledBuild) { d.linfo('started build: $sb.repo.url $sb.repo.branch') - build.build_repo(d.address, d.api_key, d.builder_images.last(), &sb.repo) ? + // 0 means success, 1 means failure + mut status := 0 + + build.build_repo(d.address, d.api_key, d.builder_images.last(), &sb.repo) or { status = 1 } + + if status == 0 { + d.linfo('finished build: $sb.repo.url $sb.repo.branch') + } else { + d.linfo('failed build: $sb.repo.url $sb.repo.branch') + } stdatomic.store_u64(&d.atomics[build_index], daemon.build_done) } diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index 729e94b0..088a24f6 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -9,6 +9,12 @@ import math import build import docker +// How many seconds to wait before retrying to update API if failed +const api_update_retry_timeout = 5 + +// How many seconds to wait before retrying to rebuild image if failed +const rebuild_base_image_retry_timout = 30 + struct ScheduledBuild { pub: repo_id string @@ -60,29 +66,31 @@ pub fn init_daemon(logger log.Log, address string, api_key string, base_image st } // Initialize the repos & queue - d.renew_repos() ? - d.renew_queue() ? - d.rebuild_base_image() ? + d.renew_repos() + d.renew_queue() + if !d.rebuild_base_image() { + return error('The base image failed to build. The Vieter cron daemon cannot run without an initial builder image.') + } return d } // run starts the actual daemon process. It runs builds when possible & // periodically refreshes the list of repositories to ensure we stay in sync. -pub fn (mut d Daemon) run() ? { +pub fn (mut d Daemon) run() { for { - finished_builds := d.clean_finished_builds() ? + finished_builds := d.clean_finished_builds() // Update the API's contents if needed & renew the queue if time.now() >= d.api_update_timestamp { - d.renew_repos() ? - d.renew_queue() ? + d.renew_repos() + d.renew_queue() } // The finished builds should only be rescheduled if the API contents // haven't been renewed. else { for sb in finished_builds { - d.schedule_build(sb.repo_id, sb.repo) ? + d.schedule_build(sb.repo_id, sb.repo) } } @@ -93,14 +101,14 @@ pub fn (mut d Daemon) run() ? { // keep track of a list or something & remove an image once we have // made sure it isn't being used anymore. if time.now() >= d.image_build_timestamp { - d.rebuild_base_image() ? + d.rebuild_base_image() // In theory, executing this function here allows an old builder // image to exist for at most image_rebuild_frequency minutes. d.clean_old_base_images() } // Schedules new builds when possible - d.start_new_builds() ? + d.start_new_builds() // If there are builds currently running, the daemon should refresh // every second to clean up any finished builds & start new ones. @@ -113,7 +121,17 @@ pub fn (mut d Daemon) run() ? { delay = d.api_update_timestamp - now if d.queue.len() > 0 { - time_until_next_job := d.queue.peek() ?.timestamp - now + elem := d.queue.peek() or { + d.lerror("queue.peek() unexpectedly returned an error. This shouldn't happen.") + + // This is just a fallback option. In theory, queue.peek() + // should *never* return an error or none, because we check + // its len beforehand. + time.sleep(1) + continue + } + + time_until_next_job := elem.timestamp - now delay = math.min(delay, time_until_next_job) } @@ -131,7 +149,7 @@ pub fn (mut d Daemon) run() ? { } // schedule_build adds the next occurence of the given repo build to the queue. -fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) ? { +fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) { ce := if repo.schedule != '' { parse_expression(repo.schedule) or { // TODO This shouldn't return an error if the expression is empty. @@ -144,7 +162,10 @@ fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) ? { } // A repo that can't be scheduled will just be skipped for now - timestamp := ce.next_from_now() ? + timestamp := ce.next_from_now() or { + d.lerror("Couldn't calculate next timestamp from '$repo.schedule'; skipping") + return + } d.queue.insert(ScheduledBuild{ repo_id: repo_id @@ -155,9 +176,15 @@ fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) ? { // renew_repos requests the newest list of Git repos from the server & replaces // the old one. -fn (mut d Daemon) renew_repos() ? { +fn (mut d Daemon) renew_repos() { d.linfo('Renewing repos...') - mut new_repos := git.get_repos(d.address, d.api_key) ? + + mut new_repos := git.get_repos(d.address, d.api_key) or { + d.lerror('Failed to renew repos. Retrying in ${daemon.api_update_retry_timeout}s...') + d.api_update_timestamp = time.now().add_seconds(daemon.api_update_retry_timeout) + + return + } d.repos_map = new_repos.move() @@ -166,7 +193,7 @@ fn (mut d Daemon) renew_repos() ? { // renew_queue replaces the old queue with a new one that reflects the newest // values in repos_map. -fn (mut d Daemon) renew_queue() ? { +fn (mut d Daemon) renew_queue() { d.linfo('Renewing queue...') mut new_queue := MinHeap{} @@ -181,8 +208,13 @@ fn (mut d Daemon) renew_queue() ? { // here causes the function to prematurely just exit, without any errors or anything, very weird // https://github.com/vlang/v/issues/14042 for d.queue.len() > 0 { - if d.queue.peek() ?.timestamp < now { - new_queue.insert(d.queue.pop() ?) + elem := d.queue.pop() or { + d.lerror("queue.pop() returned an error. This shouldn't happen.") + continue + } + + if elem.timestamp < now { + new_queue.insert(elem) } else { break } @@ -193,16 +225,24 @@ fn (mut d Daemon) renew_queue() ? { // For each repository in repos_map, parse their cron expression (or use // the default one if not present) & add them to the queue for id, repo in d.repos_map { - d.schedule_build(id, repo) ? + d.schedule_build(id, repo) } } // rebuild_base_image recreates the builder image. -fn (mut d Daemon) rebuild_base_image() ? { +fn (mut d Daemon) rebuild_base_image() bool { d.linfo('Rebuilding builder image....') - d.builder_images << build.create_build_image(d.base_image) ? + d.builder_images << build.create_build_image(d.base_image) or { + d.lerror('Failed to rebuild base image. Retrying in ${daemon.rebuild_base_image_retry_timout}s...') + d.image_build_timestamp = time.now().add_seconds(daemon.rebuild_base_image_retry_timout) + + return false + } + d.image_build_timestamp = time.now().add_seconds(60 * d.image_rebuild_frequency) + + return true } // clean_old_base_images tries to remove any old but still present builder From 1dd810a605ba70f6a171d3b1a4d8d21595bc9ecd Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 30 Apr 2022 23:24:07 +0200 Subject: [PATCH 46/67] docs: migrated over Hugo documentation --- .gitignore | 14 +-- .gitmodules | 3 + Makefile | 19 ++- docs/.gitignore | 2 + docs/.woodpecker.yml | 17 +++ docs/LICENSE | 9 ++ docs/README.md | 3 + docs/archetypes/default.md | 6 + docs/config.toml | 108 ++++++++++++++++++ docs/content/CLI.md | 27 +++++ docs/content/_index.md | 62 ++++++++++ docs/content/api.md | 84 ++++++++++++++ docs/content/builder.md | 56 +++++++++ docs/content/configuration.md | 64 +++++++++++ docs/content/installation.md | 78 +++++++++++++ docs/content/usage.md | 54 +++++++++ ...s_50fc8c04e12a2f59027287995557ceff.content | 1 + ...scss_50fc8c04e12a2f59027287995557ceff.json | 1 + ...s_50fc8c04e12a2f59027287995557ceff.content | 1 + ...scss_50fc8c04e12a2f59027287995557ceff.json | 1 + docs/themes/hugo-book | 1 + 21 files changed, 598 insertions(+), 13 deletions(-) create mode 100644 .gitmodules create mode 100644 docs/.gitignore create mode 100644 docs/.woodpecker.yml create mode 100644 docs/LICENSE create mode 100644 docs/README.md create mode 100644 docs/archetypes/default.md create mode 100644 docs/config.toml create mode 100644 docs/content/CLI.md create mode 100644 docs/content/_index.md create mode 100644 docs/content/api.md create mode 100644 docs/content/builder.md create mode 100644 docs/content/configuration.md create mode 100644 docs/content/installation.md create mode 100644 docs/content/usage.md create mode 100644 docs/resources/_gen/assets/scss/docs/book.scss_50fc8c04e12a2f59027287995557ceff.content create mode 100644 docs/resources/_gen/assets/scss/docs/book.scss_50fc8c04e12a2f59027287995557ceff.json create mode 100644 docs/resources/_gen/assets/scss/docs/vieter/book.scss_50fc8c04e12a2f59027287995557ceff.content create mode 100644 docs/resources/_gen/assets/scss/docs/vieter/book.scss_50fc8c04e12a2f59027287995557ceff.json create mode 160000 docs/themes/hugo-book diff --git a/.gitignore b/.gitignore index 6a06eb2b..f27a43f3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,13 +1,13 @@ *.c -data/ +/data/ # Build artifacts -vieter -dvieter -pvieter -suvieter -afvieter -vieter.c +/vieter +/dvieter +/pvieter +/suvieter +/afvieter +/vieter.c # Ignore testing files *.pkg* diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..47029a0e --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "docs/themes/hugo-book"] + path = docs/themes/hugo-book + url = https://github.com/alex-shpak/hugo-book diff --git a/Makefile b/Makefile index 041bafc6..199b99e6 100644 --- a/Makefile +++ b/Makefile @@ -49,6 +49,18 @@ run-prod: prod ./pvieter -f vieter.toml server +# =====DOCS===== +.PHONY: docs +docs: + rm -rf 'docs/public' + cd docs && hugo + +.PHONY: api-docs +api-docs: + rm -rf '$(SRC_DIR)/_docs' + cd '$(SRC_DIR)' && v doc -all -f html -m -readme . + + # =====OTHER===== .PHONY: lint lint: @@ -75,12 +87,7 @@ v/v: .PHONY: clean clean: - rm -rf 'data' 'vieter' 'dvieter' 'pvieter' 'vieter.c' 'dvieterctl' 'vieterctl' 'pkg' 'src/vieter' *.pkg.tar.zst 'suvieter' 'afvieter' '$(SRC_DIR)/_docs' - -.PHONY: api-docs -api-docs: - rm -rf '$(SRC_DIR)/_docs' - cd '$(SRC_DIR)' && v doc -all -f html -m -readme . + rm -rf 'data' 'vieter' 'dvieter' 'pvieter' 'vieter.c' 'dvieterctl' 'vieterctl' 'pkg' 'src/vieter' *.pkg.tar.zst 'suvieter' 'afvieter' '$(SRC_DIR)/_docs' 'docs/public' # =====EXPERIMENTAL===== diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 00000000..ca26866e --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,2 @@ +.hugo_build.lock +/public/ diff --git a/docs/.woodpecker.yml b/docs/.woodpecker.yml new file mode 100644 index 00000000..d0dfe7d2 --- /dev/null +++ b/docs/.woodpecker.yml @@ -0,0 +1,17 @@ +platform: 'linux/amd64' +branches: 'main' + +pipeline: + release: + image: 'klakegg/hugo:alpine' + commands: + - apk add git + - hugo + - 'cd public && tar czvf ../public.tar.gz *' + + deploy: + image: 'curlimages/curl' + secrets: + - 'api_key' + commands: + - 'curl -XPOST --fail -s -H "Authorization: Bearer $API_KEY" -T public.tar.gz https://rustybever.be/api/deploy?dir=docs' diff --git a/docs/LICENSE b/docs/LICENSE new file mode 100644 index 00000000..2071b23b --- /dev/null +++ b/docs/LICENSE @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 00000000..6a0b0465 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,3 @@ +# docs + +Repository containing docs for various personal projects I've made. \ No newline at end of file diff --git a/docs/archetypes/default.md b/docs/archetypes/default.md new file mode 100644 index 00000000..00e77bd7 --- /dev/null +++ b/docs/archetypes/default.md @@ -0,0 +1,6 @@ +--- +title: "{{ replace .Name "-" " " | title }}" +date: {{ .Date }} +draft: true +--- + diff --git a/docs/config.toml b/docs/config.toml new file mode 100644 index 00000000..7f7e1863 --- /dev/null +++ b/docs/config.toml @@ -0,0 +1,108 @@ +# hugo server --minify --themesDir ... --baseURL=http://0.0.0.0:1313/theme/hugo-book/ + +baseURL = 'https://rustybever.be/docs/vieter/' +title = 'The Rusty Bever - Docs' +theme = 'hugo-book' + +# Book configuration +disablePathToLower = true +enableGitInfo = true + +# Needed for mermaid/katex shortcodes +[markup] +[markup.goldmark.renderer] + unsafe = true + +[markup.tableOfContents] + startLevel = 1 + +# Multi-lingual mode config +# There are different options to translate files +# See https://gohugo.io/content-management/multilingual/#translation-by-filename +# And https://gohugo.io/content-management/multilingual/#translation-by-content-directory +[languages] +[languages.en] + languageName = 'English' + contentDir = 'content' + weight = 1 + +[menu] +# [[menu.before]] +[[menu.after]] + name = "Source" + url = "https://git.rustybever.be/Chewing_Bever/docs" + weight = 10 + +[[menu.after]] + name = "Hugo Theme" + url = "https://github.com/alex-shpak/hugo-book" + weight = 20 + +[params] + # (Optional, default light) Sets color theme: light, dark or auto. + # Theme 'auto' switches between dark and light modes based on browser/os preferences + BookTheme = 'auto' + + # (Optional, default true) Controls table of contents visibility on right side of pages. + # Start and end levels can be controlled with markup.tableOfContents setting. + # You can also specify this parameter per page in front matter. + BookToC = true + + # (Optional, default none) Set the path to a logo for the book. If the logo is + # /static/logo.png then the path would be logo.png + # BookLogo = 'logo.png' + + # (Optional, default none) Set leaf bundle to render as side menu + # When not specified file structure and weights will be used + # BookMenuBundle = '/menu' + + # (Optional, default docs) Specify root page to render child pages as menu. + # Page is resoled by .GetPage function: https://gohugo.io/functions/getpage/ + # For backward compatibility you can set '*' to render all sections to menu. Acts same as '/' + BookSection = '/' + + # Set source repository location. + # Used for 'Last Modified' and 'Edit this page' links. + BookRepo = 'https://git.rustybever.be/Chewing_Bever/docs' + + # (Optional, default 'commit') Specifies commit portion of the link to the page's last modified + # commit hash for 'doc' page type. + # Requires 'BookRepo' param. + # Value used to construct a URL consisting of BookRepo/BookCommitPath/ + # Github uses 'commit', Bitbucket uses 'commits' + # BookCommitPath = 'commit' + + # Enable "Edit this page" links for 'doc' page type. + # Disabled by default. Uncomment to enable. Requires 'BookRepo' param. + # Edit path must point to root directory of repo. + # BookEditPath = 'edit/main/exampleSite' + + # Configure the date format used on the pages + # - In git information + # - In blog posts + BookDateFormat = 'January 2, 2006' + + # (Optional, default true) Enables search function with flexsearch, + # Index is built on fly, therefore it might slowdown your website. + # Configuration for indexing can be adjusted in i18n folder per language. + BookSearch = true + + # (Optional, default true) Enables comments template on pages + # By default partals/docs/comments.html includes Disqus template + # See https://gohugo.io/content-management/comments/#configure-disqus + # Can be overwritten by same param in page frontmatter + BookComments = false + + # /!\ This is an experimental feature, might be removed or changed at any time + # (Optional, experimental, default false) Enables portable links and link checks in markdown pages. + # Portable links meant to work with text editors and let you write markdown without {{< relref >}} shortcode + # Theme will print warning if page referenced in markdown does not exists. + BookPortableLinks = true + + # /!\ This is an experimental feature, might be removed or changed at any time + # (Optional, experimental, default false) Enables service worker that caches visited pages and resources for offline use. + BookServiceWorker = true + + # /!\ This is an experimental feature, might be removed or changed at any time + # (Optional, experimental, default false) Enables a drop-down menu for translations only if a translation is present. + BookTranslatedOnly = false diff --git a/docs/content/CLI.md b/docs/content/CLI.md new file mode 100644 index 00000000..32bb6f86 --- /dev/null +++ b/docs/content/CLI.md @@ -0,0 +1,27 @@ +# Vieter CLI + +I provide a simple CLI tool that currently only allows changing the Git +repository API. Its usage is quite simple. + +First, you need to create a file in your home directory called `.vieterrc` with +the following content: + +```toml +address = "https://example.com" +api_key = "your-api-key" +``` + +You can also use a different file or use environment variables, as described in +[Configuration](/configuration). + +Now you're ready to use the CLI tool. + +## Usage + +* `vieter repos list` returns all repositories currently stored in the API. +* `vieter repos add url branch repo arch...` adds the repository with the given + URL, branch, repo & arch to the API. +* `vieter repos remove id` removes the repository with the given ID prefix. + +You can always check `vieter -help` or `vieter repos -help` for more +information about the commands. diff --git a/docs/content/_index.md b/docs/content/_index.md new file mode 100644 index 00000000..3a1144ba --- /dev/null +++ b/docs/content/_index.md @@ -0,0 +1,62 @@ +# Vieter + +{{< hint warning >}} +**Important** +Because this project is still in heavy development, this documentation tries to +follow the development branch & not the latest release. This means that the +documentation might not be relevant anymore for the latest release. +{{< /hint >}} + +## Overview + +Vieter has a few main features: + +* It's a simple & lightweight implementation of an Arch repository server +* It allows for uploading of built package archives +* It supports a basic build system to periodically re-build packages & upload + them to the server + +{{< hint info >}} +**Note** +While I mention Vieter being an "Arch" repository server, it works with any +distribution that uses Pacman as the package manager. I do recommend using a +base docker image for your distribution if you wish to use the build system as +well. +{{< /hint >}} + +### Why? + +Vieter is my personal solution for a problem I've been facing for months: +extremely long AUR package build times. I run EndeavourOS on both my laptops, +one of which being a rather old MacBook Air. I really like being a beta-tester +for projects & run development builds for multiple packages (nheko, +newsflash...). The issue with this is that I have to regularly re-build these +packages in order to stay up to date with development & these builds can take a +really long time on the old MacBook. This project is a solution to that +problem: instead of building the packages locally, I can build them +automatically in the cloud & just download them whenever I update my system! +Thanks to this solution, I'm able to shave 10-15 minutes off my update times, +just from not having to compile everything every time there's an update. + +Besides this, it's also just really useful to have a repository server that you +control & can upload your own packages to. For example, I package my st +terminal using a CI pipeline & upload it to my repository! + +### Why V? + +I had been interested in learning V for a couple of months ever since I +stumbled upon it by accident. It looked like a promising language & turned out +to be very fun to use! It's fast & easy to learn, & it's a nice contrast with +my usual Rust-based projects, which tend to get quite complex. + +I recommend checking out their [homepage](https://vlang.io/)! + +### What's with the name? + +Before deciding to write this project in V, I wrote a prototype in Python, +called [Pieter](https://git.rustybever.be/Chewing_Bever/pieter). The name +Pieter came from Pieter Post, the Dutch name for [Postname +Pat](https://en.wikipedia.org/wiki/Postman_Pat). The idea was that the server +"delivered packages", & a good friend of mine suggested the name. When I +decided to switch over to Vieter, I changed the P (for Python) to a V, it +seemed fitting. diff --git a/docs/content/api.md b/docs/content/api.md new file mode 100644 index 00000000..7c395eb2 --- /dev/null +++ b/docs/content/api.md @@ -0,0 +1,84 @@ +# API Reference + +All routes that return JSON use the following shape: + +```json +{ + "message": "some message", + "data": {} +} +``` + +Here, data can be any JSON object, so it's not guaranteed to be a struct. + +### `GET ///` + +This route serves the contents of a specific architecture' repo. + +If `` is one of `.db`, `.files`, `.db.tar.gz` or +`.files.tar.gz`, it will serve the respective archive file from the +repository. + +If `` contains `.pkg`, it assumes the request to be for a package +archive & will serve that file from the specific arch-repo's package directory. + +Finally, if none of the above are true, Vieter assumes it to be request for a +package version's desc file & tries to serve this instead. This functionality +is very useful for the build system for checking whether a package needs to be +rebuilt or not. + +### `HEAD ///` + +Behaves the same as the above route, but instead of returning actual data, it +returns either 200 or 404, depending on whether the file exists. This route is +used by the build system to determine whether a package needs to be rebuilt. + +### `POST //publish` + +This route is used to upload packages to a repository. It requires the API +key to be provided using the `X-Api-Key` HTTP header. Vieter will parse the +package's contents & update the repository files accordingely. I find the +easiest way to use this route is using cURL: + +```sh +curl -XPOST -T "path-to-package.pkg.tar.zst" -H "X-API-KEY: your-api-key" https://example.com/somerepo/publish +``` + +Packages are automatically added to the correct arch-repo. If a package type is +`any`, the package is added to the configured `default_arch`, as well as all +already present arch-repos. To prevent unnecessary duplication of package +files, these packages are shared between arch-repos' package directories using +hard links. + +{{< hint info >}} +**Note** +Vieter only supports uploading archives compressed using either gzip, zstd or +xz at the moment. +{{< /hint >}} + +## API + +All API routes require the API key to provided using the `X-Api-Key` header. +Otherwise, they'll return a status code 401. + +### `GET /api/repos` + +Returns the current list of Git repositories. + +### `GET /api/repos/` + +Get the information for the Git repo with the given ID. + +### `POST /api/repos?&&&` + +Adds a new Git repository with the provided URL, Git branch & comma-separated +list of architectures. + +### `DELETE /api/repos/` + +Deletes the Git repository with the provided ID. + +### `PATCH /api/repos/?&&&` + +Updates the provided parameters for the repo with the given ID. All arguments +are optional. diff --git a/docs/content/builder.md b/docs/content/builder.md new file mode 100644 index 00000000..6a1bc3ab --- /dev/null +++ b/docs/content/builder.md @@ -0,0 +1,56 @@ +# Builder + +Vieter supports a basic build system that allows you to build the packages +defined using the Git repositories API by running `vieter build`. For +configuration, see [here](/configuration#builder). + +## How it works + +The build system works in two stages. First it pulls down the +`archlinux:latest` image from Docker Hub, runs `pacman -Syu` & configures a +non-root build user. It then creates a new Docker image from this container. +This is to prevent each build having to fully update the container's +repositories. After the image has been created, each repository returned by +`/api/repos` is built sequentially by starting up a new container with the +previously created image as a base. Each container goes through the following steps: + +1. The repository is cloned +2. `makepkg --nobuild --nodeps` is ran to update the `pkgver` variable inside + the `PKGBUILD` file +3. A HEAD request is sent to the Vieter server to check whether the specific + version of the package is already present. If it is, the container exits. +4. `makepkg` is ran with `MAKEFLAGS="-j\$(nproc)` +5. Each produced package archive is uploaded to the Vieter instance's + repository, as defined in the API for that specific Git repo. + +## Cron image + +The Vieter Docker image contains crond & a cron config that runs `vieter build` +every night at 3AM. This value is currently hardcoded, but I wish to change +that down the line (work is in progress). There's also some other caveats you +should be aware of, namely that the image should be run as root & that the +healthcheck will always fail, so you might have to disable it. This boils down +to the following docker-compose file: + +```yaml +version: '3' + +services: + cron: + image: 'chewingbever/vieter:dev' + command: crond -f + user: root + + healthcheck: + disable: true + + environment: + - 'VIETER_API_KEY=some-key' + - 'VIETER_ADDRESS=https://example.com' + volumes: + - '/var/run/docker.sock:/var/run/docker.sock' +``` + +Important to note is that the container also requires the host's Docker socket +to be mounted as this is how it spawns the necessary containers, as well as a +change to the container's command. diff --git a/docs/content/configuration.md b/docs/content/configuration.md new file mode 100644 index 00000000..df92844c --- /dev/null +++ b/docs/content/configuration.md @@ -0,0 +1,64 @@ +--- +weight: 20 +--- +# Configuration + +All vieter operations by default try to read in the TOML file `~/.vieterrc` for +configuration. The location of this file can be changed by using the `-f` flag. + +If the above file doesn't exist or you wish to override some of its settings, +configuration is also possible using environment variables. Every variable in +the config file has a respective environment variable of the following form: +say the variable is called `api_key`, then the respective environment variable +would be `VIETER_API_KEY`. In essence, it's the variable in uppercase prepended +with `VIETER_`. + +If a variable is both present in the config file & as an environment variable, +the value in the environment variable is used. + +{{< hint info >}} +**Note** +All environment variables can also be provided from a file by appending them +with `_FILE`. This for example allows you to provide the API key from a docker +secrets file. +{{< /hint >}} + +## Modes + +The vieter binary can run in several "modes", indicated by the first argument +passed to them. Each mode requires a different configuration. + +### Server + +* `log_level`: defines how much logs to show. Valid values are one of `FATAL`, + `ERROR`, `WARN`, `INFO` or `DEBUG`. Defaults to `WARN` +* `log_file`: log file to write logs to. Defaults to `vieter.log` in the + current directory. +* `pkg_dir`: where Vieter should store the actual package archives. +* `download_dir`: where Vieter should initially download uploaded files. +* `api_key`: the API key to use when authenticating requests. +* `repo_dir`: where Vieter should store the contents of the repository. +* `repos_file`: JSON file where the list of Git repositories is saved +* `default_arch`: architecture to always add packages of arch `any` to. + +{{< hint info >}} +**Note** +Because Vieter hard links files between `download_dir` & `pkg_dir`, they need +to be on the same file system. +{{< /hint >}} + +### Builder + +* `api_key`: the API key to use when authenticating requests. +* `address`: Base your URL of your Vieter instance, e.g. https://example.com +* `base_image`: image to use when building a package. It should be an Archlinux + image. The default if not configured is `archlinux:base-devel`, but this + image only supports arm64. If you require aarch64 support as well, consider + using + [`menci/archlinuxarm:base-devel`](https://hub.docker.com/r/menci/archlinuxarm) + ([GH](https://github.com/Menci/docker-archlinuxarm)) + +### Repos + +* `api_key`: the API key to use when authenticating requests. +* `address`: Base your URL of your Vieter instance, e.g. https://example.com diff --git a/docs/content/installation.md b/docs/content/installation.md new file mode 100644 index 00000000..17d3874c --- /dev/null +++ b/docs/content/installation.md @@ -0,0 +1,78 @@ +--- +weight: 10 +--- +# Installation + +## Docker + +Docker is the recommended way to install vieter. The images can be pulled from +[`chewingbever/vieter`](https://hub.docker.com/r/chewingbever/vieter). You can +either pull a release tag (e.g. `chewingbever/vieter:0.1.0-rc1`), or pull the +`chewingbever/vieter:dev` tag. The latter is updated every time a new commit is +pushed to the development branch. This branch will be the most up to date, but +does not give any guarantees about stability, so beware! + +The simplest way to run the Docker image is using a plain Docker command: + +```sh +docker run \ + --rm \ + -d \ + -v /path/to/data:/data \ + -e VIETER_API_KEY=changeme \ + -e VIETER_DEFAULT_ARCH=x86_64 \ + -p 8000:8000 \ + chewingbever/vieter:dev +``` + +Here, you should change `/path/to/data` to the path on your host where you want +vieter to store its files. + +The default configuration will store everything inside the `/data` directory. + +Inside the container, the Vieter server runs on port 8000. This port should be +exposed to the public accordingely. + +For an overview of how to configure vieter & which environment variables can be +used, see the [Configuration](/configuration) page. + +## Binary + +On the [releases](https://git.rustybever.be/Chewing_Bever/vieter/releases) +page, you can find statically compiled binaries for all released versions. You +can download the binary for your host's architecture & run it that way. + +For more information about configuring the binary, check out the +[Configuration](/configuration) page. + +## Building from source + +Because the project is still in heavy development, it might be useful to build +from source instead. Luckily, this process is very easy. You'll need make, +libarchive & openssl; all of which should be present on an every-day Arch +install. Then, after cloning the repository, you can use the following commands: + +```sh +# Builds the compiler; should usually only be ran once. Vieter compiles using +# the default compiler, but I maintain my own mirror to ensure nothing breaks +# without me knowing. +make v + +# Build vieter +# Alternatively, use `make prod` to build the production build. +make +``` +{{< hint info >}} +**Note** +My version of the V compiler is also available on my Vieter instance, +https://arch.r8r.be. It's in the `vieter` repository, with the package being +named `vieter-v`. The compiler is available for both x86_64 & aarch64. +{{< /hint >}} + +## My Vieter instance + +Besides uploading development Docker images, my CI also publishes x86_64 & +aarch64 packages to my personal Vieter instance, https://arch.r8r.be. If you'd +like, you can use this repository as well by adding it to your Pacman +configuration as described [here](/usage#configuring-pacman). Both the +repository & the package are called `vieter`. diff --git a/docs/content/usage.md b/docs/content/usage.md new file mode 100644 index 00000000..06671b4c --- /dev/null +++ b/docs/content/usage.md @@ -0,0 +1,54 @@ +--- +weight: 30 +--- +# Usage + +## Starting the server + +To start a server, either install it using Docker (see +[Installation](/installation)) or run it locally by executing `vieter +server`. See [Configuration](/configuration) for more information about +configuring the binary. + +## Multiple repositories + +Vieter works with multiple repositories. This means that a single Vieter server +can serve multiple repositories in Pacman. It also automatically divides files +with specific architectures among arch-repos. Arch-repos are the actual +repositories you add to your `/etc/pacman.conf` file. See [Configuring +Pacman](/usage#configuring-pacman) below for more info. + +## Adding packages + +Using Vieter is currently very simple. If you wish to add a package to Vieter, +build it using makepkg & POST that file to the `//publish` endpoint of +your server. This will add the package to the repository. Authentification +requires you to add the API key as the `X-Api-Key` header. + +All of this can be combined into a simple cURL call: + +``` +curl -XPOST -H "X-API-KEY: your-key" -T some-package.pkg.tar.zst https://example.com/somerepo/publish +``` + +`somerepo` is automatically created if it doesn't exist yet. + +## Configuring Pacman + +Configuring Pacman to use a Vieter instance is very simple. In your +`/etc/pacman.conf` file, add the following lines: + +``` +[vieter] +Server = https://example.com/$repo/$arch +SigLevel = Optional +``` + +Here, you see two important placeholder variables. `$repo` is replaced by the +name within the square brackets, which in this case would be `vieter`. `$arch` +is replaced by the output of `uname -m`. Because Vieter supports multiple +repositories & architectures per repository, using this notation makes sure you +always use the correct endpoint for fetching files. + +I recommend placing this below all other repository entries, as the order +decides which repository should be used if there's ever a naming conflict. diff --git a/docs/resources/_gen/assets/scss/docs/book.scss_50fc8c04e12a2f59027287995557ceff.content b/docs/resources/_gen/assets/scss/docs/book.scss_50fc8c04e12a2f59027287995557ceff.content new file mode 100644 index 00000000..ed65056d --- /dev/null +++ b/docs/resources/_gen/assets/scss/docs/book.scss_50fc8c04e12a2f59027287995557ceff.content @@ -0,0 +1 @@ +@charset "UTF-8";:root{--gray-100:#f8f9fa;--gray-200:#e9ecef;--gray-500:#adb5bd;--color-link:#0055bb;--color-visited-link:#8440f1;--body-background:white;--body-font-color:black;--icon-filter:none;--hint-color-info:#6bf;--hint-color-warning:#fd6;--hint-color-danger:#f66}@media(prefers-color-scheme:dark){:root{--gray-100:rgba(255, 255, 255, 0.1);--gray-200:rgba(255, 255, 255, 0.2);--gray-500:rgba(255, 255, 255, 0.5);--color-link:#84b2ff;--color-visited-link:#b88dff;--body-background:#343a40;--body-font-color:#e9ecef;--icon-filter:brightness(0) invert(1);--hint-color-info:#6bf;--hint-color-warning:#fd6;--hint-color-danger:#f66}}/*!normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css*/html{line-height:1.15;-webkit-text-size-adjust:100%}body{margin:0}main{display:block}h1{font-size:2em;margin:.67em 0}hr{box-sizing:content-box;height:0;overflow:visible}pre{font-family:monospace,monospace;font-size:1em}a{background-color:transparent}abbr[title]{border-bottom:none;text-decoration:underline;text-decoration:underline dotted}b,strong{font-weight:bolder}code,kbd,samp{font-family:monospace,monospace;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}img{border-style:none}button,input,optgroup,select,textarea{font-family:inherit;font-size:100%;line-height:1.15;margin:0}button,input{overflow:visible}button,select{text-transform:none}button,[type=button],[type=reset],[type=submit]{-webkit-appearance:button}button::-moz-focus-inner,[type=button]::-moz-focus-inner,[type=reset]::-moz-focus-inner,[type=submit]::-moz-focus-inner{border-style:none;padding:0}button:-moz-focusring,[type=button]:-moz-focusring,[type=reset]:-moz-focusring,[type=submit]:-moz-focusring{outline:1px dotted ButtonText}fieldset{padding:.35em .75em .625em}legend{box-sizing:border-box;color:inherit;display:table;max-width:100%;padding:0;white-space:normal}progress{vertical-align:baseline}textarea{overflow:auto}[type=checkbox],[type=radio]{box-sizing:border-box;padding:0}[type=number]::-webkit-inner-spin-button,[type=number]::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}[type=search]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}details{display:block}summary{display:list-item}template{display:none}[hidden]{display:none}.flex{display:flex}.flex-auto{flex:auto}.flex-even{flex:1 1}.flex-wrap{flex-wrap:wrap}.justify-start{justify-content:flex-start}.justify-end{justify-content:flex-end}.justify-center{justify-content:center}.justify-between{justify-content:space-between}.align-center{align-items:center}.mx-auto{margin:0 auto}.text-center{text-align:center}.text-left{text-align:left}.text-right{text-align:right}.hidden{display:none}input.toggle{height:0;width:0;overflow:hidden;opacity:0;position:absolute}.clearfix::after{content:"";display:table;clear:both}html{font-size:16px;scroll-behavior:smooth;touch-action:manipulation}body{min-width:20rem;color:var(--body-font-color);background:var(--body-background);letter-spacing:.33px;font-weight:400;text-rendering:optimizeLegibility;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;box-sizing:border-box}body *{box-sizing:inherit}h1,h2,h3,h4,h5{font-weight:400}a{text-decoration:none;color:var(--color-link)}img{vertical-align:baseline}:focus{outline-style:auto;outline-color:currentColor;outline-color:-webkit-focus-ring-color}aside nav ul{padding:0;margin:0;list-style:none}aside nav ul li{margin:1em 0;position:relative}aside nav ul a{display:block}aside nav ul a:hover{opacity:.5}aside nav ul ul{padding-inline-start:1rem}ul.pagination{display:flex;justify-content:center;list-style-type:none}ul.pagination .page-item a{padding:1rem}.container{max-width:80rem;margin:0 auto}.book-icon{filter:var(--icon-filter)}.book-brand{margin-top:0;margin-bottom:1rem}.book-brand img{height:1.5em;width:1.5em;margin-inline-end:.5rem}.book-menu{flex:0 0 16rem;font-size:.875rem}.book-menu .book-menu-content{width:16rem;padding:1rem;background:var(--body-background);position:fixed;top:0;bottom:0;overflow-x:hidden;overflow-y:auto}.book-menu a,.book-menu label{color:inherit;cursor:pointer;word-wrap:break-word}.book-menu a.active{color:var(--color-link)}.book-menu input.toggle+label+ul{display:none}.book-menu input.toggle:checked+label+ul{display:block}.book-menu input.toggle+label::after{content:"â–¸"}.book-menu input.toggle:checked+label::after{content:"â–¾"}body[dir=rtl] .book-menu input.toggle+label::after{content:"â—‚"}body[dir=rtl] .book-menu input.toggle:checked+label::after{content:"â–¾"}.book-section-flat{margin:2rem 0}.book-section-flat>a,.book-section-flat>span,.book-section-flat>label{font-weight:bolder}.book-section-flat>ul{padding-inline-start:0}.book-page{min-width:20rem;flex-grow:1;padding:1rem}.book-post{margin-bottom:3rem}.book-header{display:none;margin-bottom:1rem}.book-header label{line-height:0}.book-header img.book-icon{height:1.5em;width:1.5em}.book-search{position:relative;margin:1rem 0;border-bottom:1px solid transparent}.book-search input{width:100%;padding:.5rem;border:0;border-radius:.25rem;background:var(--gray-100);color:var(--body-font-color)}.book-search input:required+.book-search-spinner{display:block}.book-search .book-search-spinner{position:absolute;top:0;margin:.5rem;margin-inline-start:calc(100% - 1.5rem);width:1rem;height:1rem;border:1px solid transparent;border-top-color:var(--body-font-color);border-radius:50%;animation:spin 1s ease infinite}@keyframes spin{100%{transform:rotate(360deg)}}.book-search small{opacity:.5}.book-toc{flex:0 0 16rem;font-size:.75rem}.book-toc .book-toc-content{width:16rem;padding:1rem;position:fixed;top:0;bottom:0;overflow-x:hidden;overflow-y:auto}.book-toc img{height:1em;width:1em}.book-toc nav>ul>li:first-child{margin-top:0}.book-footer{padding-top:1rem;font-size:.875rem}.book-footer img{height:1em;width:1em;margin-inline-end:.5rem}.book-comments{margin-top:1rem}.book-languages{margin-block-end:2rem}.book-languages .book-icon{height:1em;width:1em;margin-inline-end:.5em}.book-languages ul{padding-inline-start:1.5em}.book-menu-content,.book-toc-content,.book-page,.book-header aside,.markdown{transition:.2s ease-in-out;transition-property:transform,margin,opacity,visibility;will-change:transform,margin,opacity}@media screen and (max-width:56rem){#menu-control,#toc-control{display:inline}.book-menu{visibility:hidden;margin-inline-start:-16rem;font-size:16px;z-index:1}.book-toc{display:none}.book-header{display:block}#menu-control:focus~main label[for=menu-control]{outline-style:auto;outline-color:currentColor;outline-color:-webkit-focus-ring-color}#menu-control:checked~main .book-menu{visibility:initial}#menu-control:checked~main .book-menu .book-menu-content{transform:translateX(16rem);box-shadow:0 0 .5rem rgba(0,0,0,.1)}#menu-control:checked~main .book-page{opacity:.25}#menu-control:checked~main .book-menu-overlay{display:block;position:absolute;top:0;bottom:0;left:0;right:0}#toc-control:focus~main label[for=toc-control]{outline-style:auto;outline-color:currentColor;outline-color:-webkit-focus-ring-color}#toc-control:checked~main .book-header aside{display:block}body[dir=rtl] #menu-control:checked~main .book-menu .book-menu-content{transform:translateX(-16rem)}}@media screen and (min-width:80rem){.book-page,.book-menu .book-menu-content,.book-toc .book-toc-content{padding:2rem 1rem}}@font-face{font-family:roboto;font-style:normal;font-weight:400;font-display:swap;src:local(""),url(fonts/roboto-v27-latin-regular.woff2)format("woff2"),url(fonts/roboto-v27-latin-regular.woff)format("woff")}@font-face{font-family:roboto;font-style:normal;font-weight:700;font-display:swap;src:local(""),url(fonts/roboto-v27-latin-700.woff2)format("woff2"),url(fonts/roboto-v27-latin-700.woff)format("woff")}@font-face{font-family:roboto mono;font-style:normal;font-weight:400;font-display:swap;src:local(""),url(fonts/roboto-mono-v13-latin-regular.woff2)format("woff2"),url(fonts/roboto-mono-v13-latin-regular.woff)format("woff")}body{font-family:roboto,sans-serif}code{font-family:roboto mono,monospace}@media print{.book-menu,.book-footer,.book-toc{display:none}.book-header,.book-header aside{display:block}main{display:block!important}}.markdown{line-height:1.6}.markdown>:first-child{margin-top:0}.markdown h1,.markdown h2,.markdown h3,.markdown h4,.markdown h5,.markdown h6{font-weight:400;line-height:1;margin-top:1.5em;margin-bottom:1rem}.markdown h1 a.anchor,.markdown h2 a.anchor,.markdown h3 a.anchor,.markdown h4 a.anchor,.markdown h5 a.anchor,.markdown h6 a.anchor{opacity:0;font-size:.75em;vertical-align:middle;text-decoration:none}.markdown h1:hover a.anchor,.markdown h1 a.anchor:focus,.markdown h2:hover a.anchor,.markdown h2 a.anchor:focus,.markdown h3:hover a.anchor,.markdown h3 a.anchor:focus,.markdown h4:hover a.anchor,.markdown h4 a.anchor:focus,.markdown h5:hover a.anchor,.markdown h5 a.anchor:focus,.markdown h6:hover a.anchor,.markdown h6 a.anchor:focus{opacity:initial}.markdown h4,.markdown h5,.markdown h6{font-weight:bolder}.markdown h5{font-size:.875em}.markdown h6{font-size:.75em}.markdown b,.markdown optgroup,.markdown strong{font-weight:bolder}.markdown a{text-decoration:none}.markdown a:hover{text-decoration:underline}.markdown a:visited{color:var(--color-visited-link)}.markdown img{max-width:100%;height:auto}.markdown code{padding:0 .25rem;background:var(--gray-200);border-radius:.25rem;font-size:.875em}.markdown pre{padding:1rem;background:var(--gray-100);border-radius:.25rem;overflow-x:auto}.markdown pre code{padding:0;background:0 0}.markdown p{word-wrap:break-word}.markdown blockquote{margin:1rem 0;padding:.5rem 1rem .5rem .75rem;border-inline-start:.25rem solid var(--gray-200);border-radius:.25rem}.markdown blockquote :first-child{margin-top:0}.markdown blockquote :last-child{margin-bottom:0}.markdown table{overflow:auto;display:block;border-spacing:0;border-collapse:collapse;margin-top:1rem;margin-bottom:1rem}.markdown table tr th,.markdown table tr td{padding:.5rem 1rem;border:1px solid var(--gray-200)}.markdown table tr:nth-child(2n){background:var(--gray-100)}.markdown hr{height:1px;border:none;background:var(--gray-200)}.markdown ul,.markdown ol{padding-inline-start:2rem}.markdown dl dt{font-weight:bolder;margin-top:1rem}.markdown dl dd{margin-inline-start:0;margin-bottom:1rem}.markdown .highlight table tr td:nth-child(1) pre{margin:0;padding-inline-end:0}.markdown .highlight table tr td:nth-child(2) pre{margin:0;padding-inline-start:0}.markdown details{padding:1rem;border:1px solid var(--gray-200);border-radius:.25rem}.markdown details summary{line-height:1;padding:1rem;margin:-1rem;cursor:pointer}.markdown details[open] summary{margin-bottom:0}.markdown figure{margin:1rem 0}.markdown figure figcaption p{margin-top:0}.markdown-inner>:first-child{margin-top:0}.markdown-inner>:last-child{margin-bottom:0}.markdown .book-expand{margin-top:1rem;margin-bottom:1rem;border:1px solid var(--gray-200);border-radius:.25rem;overflow:hidden}.markdown .book-expand .book-expand-head{background:var(--gray-100);padding:.5rem 1rem;cursor:pointer}.markdown .book-expand .book-expand-content{display:none;padding:1rem}.markdown .book-expand input[type=checkbox]:checked+.book-expand-content{display:block}.markdown .book-tabs{margin-top:1rem;margin-bottom:1rem;border:1px solid var(--gray-200);border-radius:.25rem;overflow:hidden;display:flex;flex-wrap:wrap}.markdown .book-tabs label{display:inline-block;padding:.5rem 1rem;border-bottom:1px transparent;cursor:pointer}.markdown .book-tabs .book-tabs-content{order:999;width:100%;border-top:1px solid var(--gray-100);padding:1rem;display:none}.markdown .book-tabs input[type=radio]:checked+label{border-bottom:1px solid var(--color-link)}.markdown .book-tabs input[type=radio]:checked+label+.book-tabs-content{display:block}.markdown .book-tabs input[type=radio]:focus+label{outline-style:auto;outline-color:currentColor;outline-color:-webkit-focus-ring-color}.markdown .book-columns{margin-left:-1rem;margin-right:-1rem}.markdown .book-columns>div{margin:1rem 0;min-width:10rem;padding:0 1rem}.markdown a.book-btn{display:inline-block;font-size:.875rem;color:var(--color-link);line-height:2rem;padding:0 1rem;border:1px solid var(--color-link);border-radius:.25rem;cursor:pointer}.markdown a.book-btn:hover{text-decoration:none}.markdown .book-hint.info{border-color:#6bf;background-color:rgba(102,187,255,.1)}.markdown .book-hint.warning{border-color:#fd6;background-color:rgba(255,221,102,.1)}.markdown .book-hint.danger{border-color:#f66;background-color:rgba(255,102,102,.1)} \ No newline at end of file diff --git a/docs/resources/_gen/assets/scss/docs/book.scss_50fc8c04e12a2f59027287995557ceff.json b/docs/resources/_gen/assets/scss/docs/book.scss_50fc8c04e12a2f59027287995557ceff.json new file mode 100644 index 00000000..383eb235 --- /dev/null +++ b/docs/resources/_gen/assets/scss/docs/book.scss_50fc8c04e12a2f59027287995557ceff.json @@ -0,0 +1 @@ +{"Target":"book.min.97cfda4f5e3c9fa49a2bf8d401f4ddc0eec576c99cdcf6afbec19173200c37db.css","MediaType":"text/css","Data":{"Integrity":"sha256-l8/aT148n6SaK/jUAfTdwO7Fdsmc3PavvsGRcyAMN9s="}} \ No newline at end of file diff --git a/docs/resources/_gen/assets/scss/docs/vieter/book.scss_50fc8c04e12a2f59027287995557ceff.content b/docs/resources/_gen/assets/scss/docs/vieter/book.scss_50fc8c04e12a2f59027287995557ceff.content new file mode 100644 index 00000000..ed65056d --- /dev/null +++ b/docs/resources/_gen/assets/scss/docs/vieter/book.scss_50fc8c04e12a2f59027287995557ceff.content @@ -0,0 +1 @@ +@charset "UTF-8";:root{--gray-100:#f8f9fa;--gray-200:#e9ecef;--gray-500:#adb5bd;--color-link:#0055bb;--color-visited-link:#8440f1;--body-background:white;--body-font-color:black;--icon-filter:none;--hint-color-info:#6bf;--hint-color-warning:#fd6;--hint-color-danger:#f66}@media(prefers-color-scheme:dark){:root{--gray-100:rgba(255, 255, 255, 0.1);--gray-200:rgba(255, 255, 255, 0.2);--gray-500:rgba(255, 255, 255, 0.5);--color-link:#84b2ff;--color-visited-link:#b88dff;--body-background:#343a40;--body-font-color:#e9ecef;--icon-filter:brightness(0) invert(1);--hint-color-info:#6bf;--hint-color-warning:#fd6;--hint-color-danger:#f66}}/*!normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css*/html{line-height:1.15;-webkit-text-size-adjust:100%}body{margin:0}main{display:block}h1{font-size:2em;margin:.67em 0}hr{box-sizing:content-box;height:0;overflow:visible}pre{font-family:monospace,monospace;font-size:1em}a{background-color:transparent}abbr[title]{border-bottom:none;text-decoration:underline;text-decoration:underline dotted}b,strong{font-weight:bolder}code,kbd,samp{font-family:monospace,monospace;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}img{border-style:none}button,input,optgroup,select,textarea{font-family:inherit;font-size:100%;line-height:1.15;margin:0}button,input{overflow:visible}button,select{text-transform:none}button,[type=button],[type=reset],[type=submit]{-webkit-appearance:button}button::-moz-focus-inner,[type=button]::-moz-focus-inner,[type=reset]::-moz-focus-inner,[type=submit]::-moz-focus-inner{border-style:none;padding:0}button:-moz-focusring,[type=button]:-moz-focusring,[type=reset]:-moz-focusring,[type=submit]:-moz-focusring{outline:1px dotted ButtonText}fieldset{padding:.35em .75em .625em}legend{box-sizing:border-box;color:inherit;display:table;max-width:100%;padding:0;white-space:normal}progress{vertical-align:baseline}textarea{overflow:auto}[type=checkbox],[type=radio]{box-sizing:border-box;padding:0}[type=number]::-webkit-inner-spin-button,[type=number]::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}[type=search]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}details{display:block}summary{display:list-item}template{display:none}[hidden]{display:none}.flex{display:flex}.flex-auto{flex:auto}.flex-even{flex:1 1}.flex-wrap{flex-wrap:wrap}.justify-start{justify-content:flex-start}.justify-end{justify-content:flex-end}.justify-center{justify-content:center}.justify-between{justify-content:space-between}.align-center{align-items:center}.mx-auto{margin:0 auto}.text-center{text-align:center}.text-left{text-align:left}.text-right{text-align:right}.hidden{display:none}input.toggle{height:0;width:0;overflow:hidden;opacity:0;position:absolute}.clearfix::after{content:"";display:table;clear:both}html{font-size:16px;scroll-behavior:smooth;touch-action:manipulation}body{min-width:20rem;color:var(--body-font-color);background:var(--body-background);letter-spacing:.33px;font-weight:400;text-rendering:optimizeLegibility;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;box-sizing:border-box}body *{box-sizing:inherit}h1,h2,h3,h4,h5{font-weight:400}a{text-decoration:none;color:var(--color-link)}img{vertical-align:baseline}:focus{outline-style:auto;outline-color:currentColor;outline-color:-webkit-focus-ring-color}aside nav ul{padding:0;margin:0;list-style:none}aside nav ul li{margin:1em 0;position:relative}aside nav ul a{display:block}aside nav ul a:hover{opacity:.5}aside nav ul ul{padding-inline-start:1rem}ul.pagination{display:flex;justify-content:center;list-style-type:none}ul.pagination .page-item a{padding:1rem}.container{max-width:80rem;margin:0 auto}.book-icon{filter:var(--icon-filter)}.book-brand{margin-top:0;margin-bottom:1rem}.book-brand img{height:1.5em;width:1.5em;margin-inline-end:.5rem}.book-menu{flex:0 0 16rem;font-size:.875rem}.book-menu .book-menu-content{width:16rem;padding:1rem;background:var(--body-background);position:fixed;top:0;bottom:0;overflow-x:hidden;overflow-y:auto}.book-menu a,.book-menu label{color:inherit;cursor:pointer;word-wrap:break-word}.book-menu a.active{color:var(--color-link)}.book-menu input.toggle+label+ul{display:none}.book-menu input.toggle:checked+label+ul{display:block}.book-menu input.toggle+label::after{content:"â–¸"}.book-menu input.toggle:checked+label::after{content:"â–¾"}body[dir=rtl] .book-menu input.toggle+label::after{content:"â—‚"}body[dir=rtl] .book-menu input.toggle:checked+label::after{content:"â–¾"}.book-section-flat{margin:2rem 0}.book-section-flat>a,.book-section-flat>span,.book-section-flat>label{font-weight:bolder}.book-section-flat>ul{padding-inline-start:0}.book-page{min-width:20rem;flex-grow:1;padding:1rem}.book-post{margin-bottom:3rem}.book-header{display:none;margin-bottom:1rem}.book-header label{line-height:0}.book-header img.book-icon{height:1.5em;width:1.5em}.book-search{position:relative;margin:1rem 0;border-bottom:1px solid transparent}.book-search input{width:100%;padding:.5rem;border:0;border-radius:.25rem;background:var(--gray-100);color:var(--body-font-color)}.book-search input:required+.book-search-spinner{display:block}.book-search .book-search-spinner{position:absolute;top:0;margin:.5rem;margin-inline-start:calc(100% - 1.5rem);width:1rem;height:1rem;border:1px solid transparent;border-top-color:var(--body-font-color);border-radius:50%;animation:spin 1s ease infinite}@keyframes spin{100%{transform:rotate(360deg)}}.book-search small{opacity:.5}.book-toc{flex:0 0 16rem;font-size:.75rem}.book-toc .book-toc-content{width:16rem;padding:1rem;position:fixed;top:0;bottom:0;overflow-x:hidden;overflow-y:auto}.book-toc img{height:1em;width:1em}.book-toc nav>ul>li:first-child{margin-top:0}.book-footer{padding-top:1rem;font-size:.875rem}.book-footer img{height:1em;width:1em;margin-inline-end:.5rem}.book-comments{margin-top:1rem}.book-languages{margin-block-end:2rem}.book-languages .book-icon{height:1em;width:1em;margin-inline-end:.5em}.book-languages ul{padding-inline-start:1.5em}.book-menu-content,.book-toc-content,.book-page,.book-header aside,.markdown{transition:.2s ease-in-out;transition-property:transform,margin,opacity,visibility;will-change:transform,margin,opacity}@media screen and (max-width:56rem){#menu-control,#toc-control{display:inline}.book-menu{visibility:hidden;margin-inline-start:-16rem;font-size:16px;z-index:1}.book-toc{display:none}.book-header{display:block}#menu-control:focus~main label[for=menu-control]{outline-style:auto;outline-color:currentColor;outline-color:-webkit-focus-ring-color}#menu-control:checked~main .book-menu{visibility:initial}#menu-control:checked~main .book-menu .book-menu-content{transform:translateX(16rem);box-shadow:0 0 .5rem rgba(0,0,0,.1)}#menu-control:checked~main .book-page{opacity:.25}#menu-control:checked~main .book-menu-overlay{display:block;position:absolute;top:0;bottom:0;left:0;right:0}#toc-control:focus~main label[for=toc-control]{outline-style:auto;outline-color:currentColor;outline-color:-webkit-focus-ring-color}#toc-control:checked~main .book-header aside{display:block}body[dir=rtl] #menu-control:checked~main .book-menu .book-menu-content{transform:translateX(-16rem)}}@media screen and (min-width:80rem){.book-page,.book-menu .book-menu-content,.book-toc .book-toc-content{padding:2rem 1rem}}@font-face{font-family:roboto;font-style:normal;font-weight:400;font-display:swap;src:local(""),url(fonts/roboto-v27-latin-regular.woff2)format("woff2"),url(fonts/roboto-v27-latin-regular.woff)format("woff")}@font-face{font-family:roboto;font-style:normal;font-weight:700;font-display:swap;src:local(""),url(fonts/roboto-v27-latin-700.woff2)format("woff2"),url(fonts/roboto-v27-latin-700.woff)format("woff")}@font-face{font-family:roboto mono;font-style:normal;font-weight:400;font-display:swap;src:local(""),url(fonts/roboto-mono-v13-latin-regular.woff2)format("woff2"),url(fonts/roboto-mono-v13-latin-regular.woff)format("woff")}body{font-family:roboto,sans-serif}code{font-family:roboto mono,monospace}@media print{.book-menu,.book-footer,.book-toc{display:none}.book-header,.book-header aside{display:block}main{display:block!important}}.markdown{line-height:1.6}.markdown>:first-child{margin-top:0}.markdown h1,.markdown h2,.markdown h3,.markdown h4,.markdown h5,.markdown h6{font-weight:400;line-height:1;margin-top:1.5em;margin-bottom:1rem}.markdown h1 a.anchor,.markdown h2 a.anchor,.markdown h3 a.anchor,.markdown h4 a.anchor,.markdown h5 a.anchor,.markdown h6 a.anchor{opacity:0;font-size:.75em;vertical-align:middle;text-decoration:none}.markdown h1:hover a.anchor,.markdown h1 a.anchor:focus,.markdown h2:hover a.anchor,.markdown h2 a.anchor:focus,.markdown h3:hover a.anchor,.markdown h3 a.anchor:focus,.markdown h4:hover a.anchor,.markdown h4 a.anchor:focus,.markdown h5:hover a.anchor,.markdown h5 a.anchor:focus,.markdown h6:hover a.anchor,.markdown h6 a.anchor:focus{opacity:initial}.markdown h4,.markdown h5,.markdown h6{font-weight:bolder}.markdown h5{font-size:.875em}.markdown h6{font-size:.75em}.markdown b,.markdown optgroup,.markdown strong{font-weight:bolder}.markdown a{text-decoration:none}.markdown a:hover{text-decoration:underline}.markdown a:visited{color:var(--color-visited-link)}.markdown img{max-width:100%;height:auto}.markdown code{padding:0 .25rem;background:var(--gray-200);border-radius:.25rem;font-size:.875em}.markdown pre{padding:1rem;background:var(--gray-100);border-radius:.25rem;overflow-x:auto}.markdown pre code{padding:0;background:0 0}.markdown p{word-wrap:break-word}.markdown blockquote{margin:1rem 0;padding:.5rem 1rem .5rem .75rem;border-inline-start:.25rem solid var(--gray-200);border-radius:.25rem}.markdown blockquote :first-child{margin-top:0}.markdown blockquote :last-child{margin-bottom:0}.markdown table{overflow:auto;display:block;border-spacing:0;border-collapse:collapse;margin-top:1rem;margin-bottom:1rem}.markdown table tr th,.markdown table tr td{padding:.5rem 1rem;border:1px solid var(--gray-200)}.markdown table tr:nth-child(2n){background:var(--gray-100)}.markdown hr{height:1px;border:none;background:var(--gray-200)}.markdown ul,.markdown ol{padding-inline-start:2rem}.markdown dl dt{font-weight:bolder;margin-top:1rem}.markdown dl dd{margin-inline-start:0;margin-bottom:1rem}.markdown .highlight table tr td:nth-child(1) pre{margin:0;padding-inline-end:0}.markdown .highlight table tr td:nth-child(2) pre{margin:0;padding-inline-start:0}.markdown details{padding:1rem;border:1px solid var(--gray-200);border-radius:.25rem}.markdown details summary{line-height:1;padding:1rem;margin:-1rem;cursor:pointer}.markdown details[open] summary{margin-bottom:0}.markdown figure{margin:1rem 0}.markdown figure figcaption p{margin-top:0}.markdown-inner>:first-child{margin-top:0}.markdown-inner>:last-child{margin-bottom:0}.markdown .book-expand{margin-top:1rem;margin-bottom:1rem;border:1px solid var(--gray-200);border-radius:.25rem;overflow:hidden}.markdown .book-expand .book-expand-head{background:var(--gray-100);padding:.5rem 1rem;cursor:pointer}.markdown .book-expand .book-expand-content{display:none;padding:1rem}.markdown .book-expand input[type=checkbox]:checked+.book-expand-content{display:block}.markdown .book-tabs{margin-top:1rem;margin-bottom:1rem;border:1px solid var(--gray-200);border-radius:.25rem;overflow:hidden;display:flex;flex-wrap:wrap}.markdown .book-tabs label{display:inline-block;padding:.5rem 1rem;border-bottom:1px transparent;cursor:pointer}.markdown .book-tabs .book-tabs-content{order:999;width:100%;border-top:1px solid var(--gray-100);padding:1rem;display:none}.markdown .book-tabs input[type=radio]:checked+label{border-bottom:1px solid var(--color-link)}.markdown .book-tabs input[type=radio]:checked+label+.book-tabs-content{display:block}.markdown .book-tabs input[type=radio]:focus+label{outline-style:auto;outline-color:currentColor;outline-color:-webkit-focus-ring-color}.markdown .book-columns{margin-left:-1rem;margin-right:-1rem}.markdown .book-columns>div{margin:1rem 0;min-width:10rem;padding:0 1rem}.markdown a.book-btn{display:inline-block;font-size:.875rem;color:var(--color-link);line-height:2rem;padding:0 1rem;border:1px solid var(--color-link);border-radius:.25rem;cursor:pointer}.markdown a.book-btn:hover{text-decoration:none}.markdown .book-hint.info{border-color:#6bf;background-color:rgba(102,187,255,.1)}.markdown .book-hint.warning{border-color:#fd6;background-color:rgba(255,221,102,.1)}.markdown .book-hint.danger{border-color:#f66;background-color:rgba(255,102,102,.1)} \ No newline at end of file diff --git a/docs/resources/_gen/assets/scss/docs/vieter/book.scss_50fc8c04e12a2f59027287995557ceff.json b/docs/resources/_gen/assets/scss/docs/vieter/book.scss_50fc8c04e12a2f59027287995557ceff.json new file mode 100644 index 00000000..383eb235 --- /dev/null +++ b/docs/resources/_gen/assets/scss/docs/vieter/book.scss_50fc8c04e12a2f59027287995557ceff.json @@ -0,0 +1 @@ +{"Target":"book.min.97cfda4f5e3c9fa49a2bf8d401f4ddc0eec576c99cdcf6afbec19173200c37db.css","MediaType":"text/css","Data":{"Integrity":"sha256-l8/aT148n6SaK/jUAfTdwO7Fdsmc3PavvsGRcyAMN9s="}} \ No newline at end of file diff --git a/docs/themes/hugo-book b/docs/themes/hugo-book new file mode 160000 index 00000000..4ef38f3b --- /dev/null +++ b/docs/themes/hugo-book @@ -0,0 +1 @@ +Subproject commit 4ef38f3bbf5dae9a11a711d2ed1ced9294c6af5f From e5d50f3a59561a1b7a7f65d75888fdcd6d3e4b75 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sat, 30 Apr 2022 23:36:52 +0200 Subject: [PATCH 47/67] ci(docs): build & deploy docs in CI --- .woodpecker/.docs.yml | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 .woodpecker/.docs.yml diff --git a/.woodpecker/.docs.yml b/.woodpecker/.docs.yml new file mode 100644 index 00000000..51d49f74 --- /dev/null +++ b/.woodpecker/.docs.yml @@ -0,0 +1,42 @@ +platform: 'linux/amd64' +branches: + exclude: [ main ] + +pipeline: + docs: + image: 'klakegg/hugo:alpine' + group: 'generate' + commands: + - apk add git + - make docs + - 'cd docs/public && tar czvf ../../docs.tar.gz *' + + api-docs: + image: 'chewingbever/vlang:latest' + pull: true + group: 'generate' + commands: + - make api-docs + - 'cd src/_docs && tar czvf ../../api-docs.tar.gz *' + + deploy-docs: + image: 'curlimages/curl' + group: 'deploy' + secrets: + - 'site_api_key' + commands: + - 'curl -XPOST --fail -s -H "Authorization: Bearer $SITE_API_KEY" -T docs.tar.gz https://rustybever.be/api/deploy?dir=vieter-docs' + when: + event: push + branch: dev + + deploy-api-docs: + image: 'curlimages/curl' + group: 'deploy' + secrets: + - 'site_api_key' + commands: + - 'curl -XPOST --fail -s -H "Authorization: Bearer $SITE_API_KEY" -T api-docs.tar.gz https://rustybever.be/api/deploy?dir=vieter-api-docs' + when: + event: push + branch: dev From 1f1aa381e1819e14231bb142ff3155f8c201a047 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 1 May 2022 12:11:07 +0200 Subject: [PATCH 48/67] ci: corrected docs deploy step & re-enabled deploy workflow --- .woodpecker/.deploy.yml | 16 ++++++++++++++++ .woodpecker/.docs.yml | 4 ++-- 2 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 .woodpecker/.deploy.yml diff --git a/.woodpecker/.deploy.yml b/.woodpecker/.deploy.yml new file mode 100644 index 00000000..dd77fb98 --- /dev/null +++ b/.woodpecker/.deploy.yml @@ -0,0 +1,16 @@ +branches: 'dev' +platform: 'linux/amd64' +depends_on: + - 'docker' + +skip_clone: true + +pipeline: + webhooks: + image: 'curlimages/curl' + secrets: + - 'webhook_app' + - 'webhook_cron' + commands: + - 'curl -XPOST -s --fail $WEBHOOK_APP' + - 'curl -XPOST -s --fail $WEBHOOK_CRON' diff --git a/.woodpecker/.docs.yml b/.woodpecker/.docs.yml index 51d49f74..fc525e27 100644 --- a/.woodpecker/.docs.yml +++ b/.woodpecker/.docs.yml @@ -25,7 +25,7 @@ pipeline: secrets: - 'site_api_key' commands: - - 'curl -XPOST --fail -s -H "Authorization: Bearer $SITE_API_KEY" -T docs.tar.gz https://rustybever.be/api/deploy?dir=vieter-docs' + - 'curl -XPOST --fail -s -H "Authorization: Bearer $SITE_API_KEY" -T docs.tar.gz https://rustybever.be/api/deploy?dir=docs-vieter' when: event: push branch: dev @@ -36,7 +36,7 @@ pipeline: secrets: - 'site_api_key' commands: - - 'curl -XPOST --fail -s -H "Authorization: Bearer $SITE_API_KEY" -T api-docs.tar.gz https://rustybever.be/api/deploy?dir=vieter-api-docs' + - 'curl -XPOST --fail -s -H "Authorization: Bearer $SITE_API_KEY" -T api-docs.tar.gz https://rustybever.be/api/deploy?dir=api-docs-vieter' when: event: push branch: dev From b1ac39e2347406600bfed8929b63d89190fffebc Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 1 May 2022 12:44:54 +0200 Subject: [PATCH 49/67] feat: made arch param optional when adding Git repo --- src/git/cli.v | 10 +++++----- src/git/client.v | 8 ++++++-- src/server/git.v | 10 +++++++++- 3 files changed, 20 insertions(+), 8 deletions(-) diff --git a/src/git/cli.v b/src/git/cli.v index 53527d52..f7f125c9 100644 --- a/src/git/cli.v +++ b/src/git/cli.v @@ -26,14 +26,14 @@ pub fn cmd() cli.Command { }, cli.Command{ name: 'add' - required_args: 4 - usage: 'url branch repo arch...' + required_args: 3 + usage: 'url branch repo' description: 'Add a new repository.' execute: fn (cmd cli.Command) ? { config_file := cmd.flags.get_string('config-file') ? conf := env.load(config_file) ? - add(conf, cmd.args[0], cmd.args[1], cmd.args[2], cmd.args[3..]) ? + add(conf, cmd.args[0], cmd.args[1], cmd.args[2]) ? } }, cli.Command{ @@ -130,8 +130,8 @@ fn list(conf Config) ? { } // add adds a new repository to the server's list. -fn add(conf Config, url string, branch string, repo string, arch []string) ? { - res := add_repo(conf.address, conf.api_key, url, branch, repo, arch) ? +fn add(conf Config, url string, branch string, repo string) ? { + res := add_repo(conf.address, conf.api_key, url, branch, repo, []) ? println(res.message) } diff --git a/src/git/client.v b/src/git/client.v index a43c9ca0..0ed19b57 100644 --- a/src/git/client.v +++ b/src/git/client.v @@ -35,12 +35,16 @@ pub fn get_repos(address string, api_key string) ?map[string]GitRepo { // add_repo adds a new repo to the server. pub fn add_repo(address string, api_key string, url string, branch string, repo string, arch []string) ?Response { - params := { + mut params := { 'url': url 'branch': branch 'repo': repo - 'arch': arch.join(',') } + + if arch.len > 0 { + params['arch'] = arch.join(',') + } + data := send_request(http.Method.post, address, '/api/repos', api_key, params) ? return data diff --git a/src/server/git.v b/src/server/git.v index 0cba17cb..c136d986 100644 --- a/src/server/git.v +++ b/src/server/git.v @@ -57,7 +57,15 @@ fn (mut app App) post_repo() web.Result { return app.json(http.Status.unauthorized, new_response('Unauthorized.')) } - new_repo := git.repo_from_params(app.query) or { + mut params := app.query.clone() + + // If a repo is created without specifying the arch, we assume it's meant + // for the default architecture. + if 'arch' !in params { + params['arch'] = app.conf.default_arch + } + + new_repo := git.repo_from_params(params) or { return app.json(http.Status.bad_request, new_response(err.msg())) } From 92b8f1fb9321d38498832f4d566a2c1c3803de31 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 1 May 2022 12:52:05 +0200 Subject: [PATCH 50/67] feat(cli): added management of cron schedules --- src/git/cli.v | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/src/git/cli.v b/src/git/cli.v index f7f125c9..9ad2dc36 100644 --- a/src/git/cli.v +++ b/src/git/cli.v @@ -2,6 +2,7 @@ module git import cli import env +import cron.expression { parse_expression } struct Config { address string [required] @@ -74,6 +75,11 @@ pub fn cmd() cli.Command { description: 'Comma-separated list of architectures to build on.' flag: cli.FlagType.string }, + cli.Flag{ + name: 'schedule' + description: 'Cron schedule for repository.' + flag: cli.FlagType.string + }, ] execute: fn (cmd cli.Command) ? { config_file := cmd.flags.get_string('config-file') ? @@ -125,7 +131,7 @@ fn list(conf Config) ? { repos := get_repos(conf.address, conf.api_key) ? for id, details in repos { - println('${id[..8]}\t$details.url\t$details.branch\t$details.repo\t$details.arch') + println('${id[..8]}\t$details.url\t$details.branch\t$details.repo\t$details.arch\t$details.schedule') } } @@ -146,6 +152,14 @@ fn remove(conf Config, id_prefix string) ? { // patch patches a given repository with the provided params. fn patch(conf Config, id_prefix string, params map[string]string) ? { + // We check the cron expression first because it's useless to send an + // invalid one to the server. + if 'schedule' in params { + parse_expression(params['schedule']) or { + return error('Invalid cron expression: $err.msg()') + } + } + id := get_repo_id_by_prefix(conf, id_prefix) ? res := patch_repo(conf.address, conf.api_key, id, params) ? From d313c5b786d0cab578aacb158eb7a196d39a9db1 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 1 May 2022 13:06:57 +0200 Subject: [PATCH 51/67] feat(cli): added command to show detailed repo info --- src/git/cli.v | 42 +++++++++++++++++++++++++++++++++--------- 1 file changed, 33 insertions(+), 9 deletions(-) diff --git a/src/git/cli.v b/src/git/cli.v index 9ad2dc36..3a1f2e3a 100644 --- a/src/git/cli.v +++ b/src/git/cli.v @@ -49,6 +49,18 @@ pub fn cmd() cli.Command { remove(conf, cmd.args[0]) ? } }, + cli.Command{ + name: 'info' + required_args: 1 + usage: 'id' + description: 'Show detailed information for the repo matching the ID prefix.' + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? + + info(conf, cmd.args[0]) ? + } + }, cli.Command{ name: 'edit' required_args: 1 @@ -102,16 +114,16 @@ pub fn cmd() cli.Command { } } -// get_repo_id_by_prefix tries to find the repo with the given prefix in its +// get_repo_by_prefix tries to find the repo with the given prefix in its // ID. If multiple or none are found, an error is raised. -fn get_repo_id_by_prefix(conf Config, id_prefix string) ?string { +fn get_repo_by_prefix(conf Config, id_prefix string) ?(string, GitRepo) { repos := get_repos(conf.address, conf.api_key) ? - mut res := []string{} + mut res := map[string]GitRepo{} - for id, _ in repos { + for id, repo in repos { if id.starts_with(id_prefix) { - res << id + res[id] = repo } } @@ -123,7 +135,7 @@ fn get_repo_id_by_prefix(conf Config, id_prefix string) ?string { return error('Multiple repos found for given prefix.') } - return res[0] + return res.keys()[0], res[res.keys()[0]] } // list prints out a list of all repositories. @@ -131,7 +143,7 @@ fn list(conf Config) ? { repos := get_repos(conf.address, conf.api_key) ? for id, details in repos { - println('${id[..8]}\t$details.url\t$details.branch\t$details.repo\t$details.arch\t$details.schedule') + println('${id[..8]}\t$details.url\t$details.branch\t$details.repo') } } @@ -144,7 +156,7 @@ fn add(conf Config, url string, branch string, repo string) ? { // remove removes a repository from the server's list. fn remove(conf Config, id_prefix string) ? { - id := get_repo_id_by_prefix(conf, id_prefix) ? + id, _ := get_repo_by_prefix(conf, id_prefix) ? res := remove_repo(conf.address, conf.api_key, id) ? println(res.message) @@ -160,8 +172,20 @@ fn patch(conf Config, id_prefix string, params map[string]string) ? { } } - id := get_repo_id_by_prefix(conf, id_prefix) ? + id, _ := get_repo_by_prefix(conf, id_prefix) ? res := patch_repo(conf.address, conf.api_key, id, params) ? println(res.message) } + +// info shows detailed information for a given repo. +fn info(conf Config, id_prefix string) ? { + id, repo := get_repo_by_prefix(conf, id_prefix) ? + + println('id: $id') + + $for field in GitRepo.fields { + val := repo.$(field.name) + println('$field.name: $val') + } +} From 5cde3d0235da316ffb6756eca75880212f6a4480 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 1 May 2022 14:51:52 +0200 Subject: [PATCH 52/67] fix(cli): allow empty schedule to clear it --- src/git/cli.v | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/git/cli.v b/src/git/cli.v index 3a1f2e3a..0eff55f0 100644 --- a/src/git/cli.v +++ b/src/git/cli.v @@ -166,7 +166,7 @@ fn remove(conf Config, id_prefix string) ? { fn patch(conf Config, id_prefix string, params map[string]string) ? { // We check the cron expression first because it's useless to send an // invalid one to the server. - if 'schedule' in params { + if 'schedule' in params && params['schedule'] != '' { parse_expression(params['schedule']) or { return error('Invalid cron expression: $err.msg()') } From c5161cac372a2c520f3877e2b42cd7247a542c7e Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 1 May 2022 15:25:21 +0200 Subject: [PATCH 53/67] chore: updated changelog [CI SKIP] --- CHANGELOG.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b4a7609e..abbdc740 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,19 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased](https://git.rustybever.be/Chewing_Bever/vieter) -* Moved away from compiler fork +### Changed + +* Switched from compiler fork to fully vanilla compiler mirror + +### Added + +* Implemented own cron daemon for builder + * Build schedule can be configured globally or individually per repository +* Added CLI command to show detailed information per repo + +### Fixed + +* Binary no longer panics when an env var is missing ## [0.2.0](https://git.rustybever.be/Chewing_Bever/vieter/src/tag/0.2.0) From 03318586edc886cce949c967b4e026c79bd1beda Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Mon, 2 May 2022 07:50:44 +0200 Subject: [PATCH 54/67] feat(cron): added debug log on build_repo failure --- src/cron/daemon/build.v | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/cron/daemon/build.v b/src/cron/daemon/build.v index 5b2e9ccb..067d191e 100644 --- a/src/cron/daemon/build.v +++ b/src/cron/daemon/build.v @@ -77,7 +77,10 @@ fn (mut d Daemon) run_build(build_index int, sb ScheduledBuild) { // 0 means success, 1 means failure mut status := 0 - build.build_repo(d.address, d.api_key, d.builder_images.last(), &sb.repo) or { status = 1 } + build.build_repo(d.address, d.api_key, d.builder_images.last(), &sb.repo) or { + d.ldebug('build_repo error: $err.msg()') + status = 1 + } if status == 0 { d.linfo('finished build: $sb.repo.url $sb.repo.branch') From 891a206116dd33b1e4640d6a9867a1a897ce7b21 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 1 May 2022 22:47:00 +0200 Subject: [PATCH 55/67] feat(server): partially migrated repos API to sqlite --- .woodpecker/.build.yml | 2 +- src/db/db.v | 19 +++++++ src/db/git.v | 98 ++++++++++++++++++++++++++++++++++++ src/server/git.v | 109 ++++++++++++++++++++++------------------- src/server/server.v | 5 ++ 5 files changed, 181 insertions(+), 52 deletions(-) create mode 100644 src/db/db.v create mode 100644 src/db/git.v diff --git a/.woodpecker/.build.yml b/.woodpecker/.build.yml index b41a39d5..f9cab001 100644 --- a/.woodpecker/.build.yml +++ b/.woodpecker/.build.yml @@ -23,7 +23,7 @@ pipeline: image: 'chewingbever/vlang:latest' pull: true environment: - - LDFLAGS=-lz -lbz2 -llzma -lexpat -lzstd -llz4 -static + - LDFLAGS=-lz -lbz2 -llzma -lexpat -lzstd -llz4 -lsqlite3 -static commands: # Apparently this -D is *very* important - CFLAGS='-DGC_THREADS=1' make prod diff --git a/src/db/db.v b/src/db/db.v new file mode 100644 index 00000000..b62fa3b7 --- /dev/null +++ b/src/db/db.v @@ -0,0 +1,19 @@ +module db + +import sqlite + +struct VieterDb { + conn sqlite.DB +} + +pub fn init(db_path string) ?VieterDb { + conn := sqlite.connect(db_path) ? + + sql conn { + create table GitRepo + } + + return VieterDb{ + conn: conn + } +} diff --git a/src/db/git.v b/src/db/git.v new file mode 100644 index 00000000..fca46c66 --- /dev/null +++ b/src/db/git.v @@ -0,0 +1,98 @@ +module db + +struct GitRepoArch { +pub: + id int [primary; sql: serial] + repo_id int + value string +} + +pub struct GitRepo { +pub mut: + id int [optional; primary; sql: serial] + // URL of the Git repository + url string [nonull] + // Branch of the Git repository to use + branch string [nonull] + // Which repo the builder should publish packages to + repo string [nonull] + // Cron schedule describing how frequently to build the repo. + schedule string [optional] + // On which architectures the package is allowed to be built. In reality, + // this controls which builders will periodically build the image. + arch []GitRepoArch [fkey: 'repo_id'] +} + +// patch_from_params patches a GitRepo from a map[string]string, usually +// provided from a web.App's params +pub fn (mut r GitRepo) patch_from_params(params map[string]string) { + $for field in GitRepo.fields { + if field.name in params { + $if field.typ is string { + r.$(field.name) = params[field.name] + // This specific type check is needed for the compiler to ensure + // our types are correct + } $else $if field.typ is []GitRepoArch { + r.$(field.name) = params[field.name].split(',').map(GitRepoArch{ value: it }) + } + } + } +} + +// repo_from_params creates a GitRepo from a map[string]string, usually +// provided from a web.App's params +pub fn git_repo_from_params(params map[string]string) ?GitRepo { + mut repo := GitRepo{} + + // If we're creating a new GitRepo, we want all fields to be present before + // "patching". + $for field in GitRepo.fields { + if field.name !in params && !field.attrs.contains('optional') { + return error('Missing parameter: ${field.name}.') + } + } + repo.patch_from_params(params) + + return repo +} + +pub fn (db &VieterDb) get_git_repos() []GitRepo { + res := sql db.conn { + select from GitRepo + } + + return res +} + +pub fn (db &VieterDb) get_git_repo(repo_id int) ?GitRepo { + res := sql db.conn { + select from GitRepo where id == repo_id + } + + // If a select statement fails, it returns a zeroed object. By + // checking one of the required fields, we can see whether the query + // returned a result or not. + if res.url == '' { + return none + } + + return res +} + +pub fn (db &VieterDb) add_git_repo(repo GitRepo) { + sql db.conn { + insert repo into GitRepo + } +} + +pub fn (db &VieterDb) delete_git_repo(repo_id int) { + sql db.conn { + delete from GitRepo where id == repo_id + } +} + +pub fn (db &VieterDb) update_git_repo(repo GitRepo) { + /* sql db.conn { */ + /* update GitRepo set repo */ + /* } */ +} diff --git a/src/server/git.v b/src/server/git.v index c136d986..6485ecaf 100644 --- a/src/server/git.v +++ b/src/server/git.v @@ -5,6 +5,7 @@ import git import net.http import rand import response { new_data_response, new_response } +import db const repos_file = 'repos.json' @@ -15,37 +16,39 @@ fn (mut app App) get_repos() web.Result { return app.json(http.Status.unauthorized, new_response('Unauthorized.')) } - repos := rlock app.git_mutex { - git.read_repos(app.conf.repos_file) or { - app.lerror('Failed to read repos file: $err.msg()') + repos := app.db.get_git_repos() + // repos := rlock app.git_mutex { + // git.read_repos(app.conf.repos_file) or { + // app.lerror('Failed to read repos file: $err.msg()') - return app.status(http.Status.internal_server_error) - } - } + // return app.status(http.Status.internal_server_error) + // } + //} return app.json(http.Status.ok, new_data_response(repos)) } // get_single_repo returns the information for a single repo. ['/api/repos/:id'; get] -fn (mut app App) get_single_repo(id string) web.Result { +fn (mut app App) get_single_repo(id int) web.Result { if !app.is_authorized() { return app.json(http.Status.unauthorized, new_response('Unauthorized.')) } - repos := rlock app.git_mutex { - git.read_repos(app.conf.repos_file) or { - app.lerror('Failed to read repos file.') + // repos := rlock app.git_mutex { + // git.read_repos(app.conf.repos_file) or { + // app.lerror('Failed to read repos file.') - return app.status(http.Status.internal_server_error) - } - } + // return app.status(http.Status.internal_server_error) + // } + //} - if id !in repos { - return app.not_found() - } + // if id !in repos { + // return app.not_found() + //} - repo := repos[id] + // repo := repos[id] + repo := app.db.get_git_repo(id) or { return app.not_found() } return app.json(http.Status.ok, new_data_response(repo)) } @@ -65,62 +68,66 @@ fn (mut app App) post_repo() web.Result { params['arch'] = app.conf.default_arch } - new_repo := git.repo_from_params(params) or { + new_repo := db.git_repo_from_params(params) or { return app.json(http.Status.bad_request, new_response(err.msg())) } - id := rand.uuid_v4() + app.db.add_git_repo(new_repo) - mut repos := rlock app.git_mutex { - git.read_repos(app.conf.repos_file) or { - app.lerror('Failed to read repos file.') + // id := rand.uuid_v4() - return app.status(http.Status.internal_server_error) - } - } + // mut repos := rlock app.git_mutex { + // git.read_repos(app.conf.repos_file) or { + // app.lerror('Failed to read repos file.') - // We need to check for duplicates - for _, repo in repos { - if repo == new_repo { - return app.json(http.Status.bad_request, new_response('Duplicate repository.')) - } - } + // return app.status(http.Status.internal_server_error) + // } + //} + // repos := app.db.get_git_repos() - repos[id] = new_repo + //// We need to check for duplicates + // for _, repo in repos { + // if repo == new_repo { + // return app.json(http.Status.bad_request, new_response('Duplicate repository.')) + // } + //} - lock app.git_mutex { - git.write_repos(app.conf.repos_file, &repos) or { - return app.status(http.Status.internal_server_error) - } - } + // repos[id] = new_repo + + // lock app.git_mutex { + // git.write_repos(app.conf.repos_file, &repos) or { + // return app.status(http.Status.internal_server_error) + // } + //} return app.json(http.Status.ok, new_response('Repo added successfully.')) } // delete_repo removes a given repo from the server's list. ['/api/repos/:id'; delete] -fn (mut app App) delete_repo(id string) web.Result { +fn (mut app App) delete_repo(id int) web.Result { if !app.is_authorized() { return app.json(http.Status.unauthorized, new_response('Unauthorized.')) } - mut repos := rlock app.git_mutex { - git.read_repos(app.conf.repos_file) or { - app.lerror('Failed to read repos file.') + /* mut repos := rlock app.git_mutex { */ + /* git.read_repos(app.conf.repos_file) or { */ + /* app.lerror('Failed to read repos file.') */ - return app.status(http.Status.internal_server_error) - } - } + /* return app.status(http.Status.internal_server_error) */ + /* } */ + /* } */ - if id !in repos { - return app.not_found() - } + /* if id !in repos { */ + /* return app.not_found() */ + /* } */ - repos.delete(id) + /* repos.delete(id) */ + app.db.delete_git_repo(id) - lock app.git_mutex { - git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) } - } +/* lock app.git_mutex { */ +/* git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) } */ +/* } */ return app.json(http.Status.ok, new_response('Repo removed successfully.')) } diff --git a/src/server/server.v b/src/server/server.v index c4317c5c..751ea9c6 100644 --- a/src/server/server.v +++ b/src/server/server.v @@ -5,6 +5,7 @@ import os import log import repo import util +import db const port = 8000 @@ -16,6 +17,7 @@ pub mut: repo repo.RepoGroupManager [required; web_global] // This is used to claim the file lock on the repos file git_mutex shared util.Dummy + db db.VieterDb } // server starts the web server & starts listening for requests @@ -53,9 +55,12 @@ pub fn server(conf Config) ? { util.exit_with_message(1, 'Failed to create download directory.') } + db := db.init('test.db') or { util.exit_with_message(1, 'Failed to initialize database.') } + web.run(&App{ logger: logger conf: conf repo: repo + db: db }, server.port) } From 0a2488a4dfc54a32f26e8f4341090daa6c466a73 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Mon, 2 May 2022 20:59:53 +0200 Subject: [PATCH 56/67] feat(server): migrated repo patch to sqlite --- src/db/git.v | 49 +++++++++++++++++++++++++++++++++++++++++++++--- src/server/git.v | 34 ++++++++++++++++++++------------- 2 files changed, 67 insertions(+), 16 deletions(-) diff --git a/src/db/git.v b/src/db/git.v index fca46c66..29606e48 100644 --- a/src/db/git.v +++ b/src/db/git.v @@ -1,6 +1,6 @@ module db -struct GitRepoArch { +pub struct GitRepoArch { pub: id int [primary; sql: serial] repo_id int @@ -58,7 +58,7 @@ pub fn git_repo_from_params(params map[string]string) ?GitRepo { pub fn (db &VieterDb) get_git_repos() []GitRepo { res := sql db.conn { - select from GitRepo + select from GitRepo order by id } return res @@ -88,11 +88,54 @@ pub fn (db &VieterDb) add_git_repo(repo GitRepo) { pub fn (db &VieterDb) delete_git_repo(repo_id int) { sql db.conn { delete from GitRepo where id == repo_id + delete from GitRepoArch where repo_id == repo_id } } -pub fn (db &VieterDb) update_git_repo(repo GitRepo) { +pub fn (db &VieterDb) update_git_repo(repo_id int, params map[string]string) { /* sql db.conn { */ /* update GitRepo set repo */ /* } */ + mut values := []string{} + + $for field in GitRepo.fields { + if field.name in params { + // Any fields that are array types require their own update method + $if field.typ is string { + values << "${field.name} = '${params[field.name]}'" + /* r.$(field.name) = params[field.name] */ + // This specific type check is needed for the compiler to ensure + // our types are correct + } + /* $else $if field.typ is []GitRepoArch { */ + /* r.$(field.name) = params[field.name].split(',').map(GitRepoArch{ value: it }) */ + /* } */ + } + } + + values_str := values.join(', ') + query := "update GitRepo set $values_str where id == $repo_id" + println(query) + db.conn.exec_none(query) + +} + +pub fn (db &VieterDb) update_git_repo_archs(repo_id int, archs []GitRepoArch) { +archs_with_id := archs.map(GitRepoArch{ + ...it + repo_id: repo_id + }) + + sql db.conn { + // Remove all old values + delete from GitRepoArch where repo_id == repo_id + // Insert all the new ones + /* insert archs_with_id into GitRepoArch */ + } + + for arch in archs_with_id { + sql db.conn { + insert arch into GitRepoArch + } + } } diff --git a/src/server/git.v b/src/server/git.v index 6485ecaf..69cce819 100644 --- a/src/server/git.v +++ b/src/server/git.v @@ -134,28 +134,36 @@ fn (mut app App) delete_repo(id int) web.Result { // patch_repo updates a repo's data with the given query params. ['/api/repos/:id'; patch] -fn (mut app App) patch_repo(id string) web.Result { +fn (mut app App) patch_repo(id int) web.Result { if !app.is_authorized() { return app.json(http.Status.unauthorized, new_response('Unauthorized.')) } - mut repos := rlock app.git_mutex { - git.read_repos(app.conf.repos_file) or { - app.lerror('Failed to read repos file.') + app.db.update_git_repo(id, app.query) - return app.status(http.Status.internal_server_error) + if 'arch' in app.query { + arch_objs := app.query['arch'].split(',').map(db.GitRepoArch{value: it}) + + app.db.update_git_repo_archs(id, arch_objs) } - } - if id !in repos { - return app.not_found() - } +/* mut repos := rlock app.git_mutex { */ +/* git.read_repos(app.conf.repos_file) or { */ +/* app.lerror('Failed to read repos file.') */ - repos[id].patch_from_params(app.query) +/* return app.status(http.Status.internal_server_error) */ +/* } */ +/* } */ - lock app.git_mutex { - git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) } - } +/* if id !in repos { */ +/* return app.not_found() */ +/* } */ + +/* repos[id].patch_from_params(app.query) */ + +/* lock app.git_mutex { */ +/* git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) } */ +/* } */ return app.json(http.Status.ok, new_response('Repo updated successfully.')) } From 7419144f97a955c9d5d67816d483e8da55690d47 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 3 May 2022 16:16:56 +0200 Subject: [PATCH 57/67] feat: removed git.GitRepo type feat(cli): updated to new GitRepo format --- src/build/build.v | 7 +- src/cron/daemon/daemon.v | 18 +++--- src/db/git.v | 53 +++++++++------ src/git/cli.v | 61 +++++++----------- src/git/client.v | 16 +++-- src/git/git.v | 136 +++++++++++++++++++-------------------- src/server/git.v | 60 ++++++++--------- 7 files changed, 179 insertions(+), 172 deletions(-) diff --git a/src/build/build.v b/src/build/build.v index bc604fa1..15a5eb81 100644 --- a/src/build/build.v +++ b/src/build/build.v @@ -5,6 +5,7 @@ import encoding.base64 import time import git import os +import db const container_build_dir = '/build' @@ -75,7 +76,7 @@ pub fn create_build_image(base_image string) ?string { // build_repo builds, packages & publishes a given Arch package based on the // provided GitRepo. The base image ID should be of an image previously created // by create_build_image. -pub fn build_repo(address string, api_key string, base_image_id string, repo &git.GitRepo) ? { +pub fn build_repo(address string, api_key string, base_image_id string, repo &db.GitRepo) ? { build_arch := os.uname().machine // TODO what to do with PKGBUILDs that build multiple packages? @@ -125,11 +126,11 @@ fn build(conf Config) ? { build_arch := os.uname().machine // We get the repos map from the Vieter instance - repos_map := git.get_repos(conf.address, conf.api_key) ? + repos := git.get_repos(conf.address, conf.api_key) ? // We filter out any repos that aren't allowed to be built on this // architecture - filtered_repos := repos_map.keys().map(repos_map[it]).filter(it.arch.contains(build_arch)) + filtered_repos := repos.filter(it.arch.map(it.value).contains(build_arch)) // No point in doing work if there's no repos present if filtered_repos.len == 0 { diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index 088a24f6..17474940 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -8,6 +8,7 @@ import cron.expression { CronExpression, parse_expression } import math import build import docker +import db // How many seconds to wait before retrying to update API if failed const api_update_retry_timeout = 5 @@ -18,7 +19,7 @@ const rebuild_base_image_retry_timout = 30 struct ScheduledBuild { pub: repo_id string - repo git.GitRepo + repo db.GitRepo timestamp time.Time } @@ -37,7 +38,7 @@ mut: api_update_frequency int image_rebuild_frequency int // Repos currently loaded from API. - repos_map map[string]git.GitRepo + repos []db.GitRepo // At what point to update the list of repositories. api_update_timestamp time.Time image_build_timestamp time.Time @@ -90,7 +91,7 @@ pub fn (mut d Daemon) run() { // haven't been renewed. else { for sb in finished_builds { - d.schedule_build(sb.repo_id, sb.repo) + d.schedule_build(sb.repo) } } @@ -149,11 +150,11 @@ pub fn (mut d Daemon) run() { } // schedule_build adds the next occurence of the given repo build to the queue. -fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) { +fn (mut d Daemon) schedule_build(repo db.GitRepo) { ce := if repo.schedule != '' { parse_expression(repo.schedule) or { // TODO This shouldn't return an error if the expression is empty. - d.lerror("Error while parsing cron expression '$repo.schedule' ($repo_id): $err.msg()") + d.lerror("Error while parsing cron expression '$repo.schedule' (id $repo.id): $err.msg()") d.global_schedule } @@ -168,7 +169,6 @@ fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) { } d.queue.insert(ScheduledBuild{ - repo_id: repo_id repo: repo timestamp: timestamp }) @@ -186,7 +186,7 @@ fn (mut d Daemon) renew_repos() { return } - d.repos_map = new_repos.move() + d.repos = new_repos d.api_update_timestamp = time.now().add_seconds(60 * d.api_update_frequency) } @@ -224,8 +224,8 @@ fn (mut d Daemon) renew_queue() { // For each repository in repos_map, parse their cron expression (or use // the default one if not present) & add them to the queue - for id, repo in d.repos_map { - d.schedule_build(id, repo) + for repo in d.repos { + d.schedule_build(repo) } } diff --git a/src/db/git.v b/src/db/git.v index 29606e48..ac35ff4d 100644 --- a/src/db/git.v +++ b/src/db/git.v @@ -3,8 +3,12 @@ module db pub struct GitRepoArch { pub: id int [primary; sql: serial] - repo_id int - value string + repo_id int [nonull] + value string [nonull] +} + +pub fn (gra &GitRepoArch) str() string { + return gra.value } pub struct GitRepo { @@ -23,6 +27,20 @@ pub mut: arch []GitRepoArch [fkey: 'repo_id'] } +pub fn (gr &GitRepo) str() string { + mut parts := [ + "id: $gr.id", + "url: $gr.url", + "branch: $gr.branch", + "repo: $gr.repo", + "schedule: $gr.schedule", + "arch: ${gr.arch.map(it.value).join(', ')}" + ] + str := parts.join('\n') + + return str +} + // patch_from_params patches a GitRepo from a map[string]string, usually // provided from a web.App's params pub fn (mut r GitRepo) patch_from_params(params map[string]string) { @@ -93,44 +111,39 @@ pub fn (db &VieterDb) delete_git_repo(repo_id int) { } pub fn (db &VieterDb) update_git_repo(repo_id int, params map[string]string) { - /* sql db.conn { */ - /* update GitRepo set repo */ - /* } */ + // sql db.conn { + // update GitRepo set repo + //} mut values := []string{} $for field in GitRepo.fields { if field.name in params { // Any fields that are array types require their own update method $if field.typ is string { - values << "${field.name} = '${params[field.name]}'" - /* r.$(field.name) = params[field.name] */ + values << "$field.name = '${params[field.name]}'" + // r.$(field.name) = params[field.name] // This specific type check is needed for the compiler to ensure // our types are correct } - /* $else $if field.typ is []GitRepoArch { */ - /* r.$(field.name) = params[field.name].split(',').map(GitRepoArch{ value: it }) */ - /* } */ + //$else $if field.typ is []GitRepoArch { + // r.$(field.name) = params[field.name].split(',').map(GitRepoArch{ value: it }) + //} } } - values_str := values.join(', ') - query := "update GitRepo set $values_str where id == $repo_id" + query := 'update GitRepo set $values_str where id == $repo_id' println(query) db.conn.exec_none(query) - } pub fn (db &VieterDb) update_git_repo_archs(repo_id int, archs []GitRepoArch) { -archs_with_id := archs.map(GitRepoArch{ - ...it - repo_id: repo_id - }) + archs_with_id := archs.map(GitRepoArch{ + ...it + repo_id: repo_id + }) sql db.conn { - // Remove all old values delete from GitRepoArch where repo_id == repo_id - // Insert all the new ones - /* insert archs_with_id into GitRepoArch */ } for arch in archs_with_id { diff --git a/src/git/cli.v b/src/git/cli.v index 0eff55f0..18394925 100644 --- a/src/git/cli.v +++ b/src/git/cli.v @@ -3,6 +3,7 @@ module git import cli import env import cron.expression { parse_expression } +import db { GitRepo, GitRepoArch } struct Config { address string [required] @@ -116,34 +117,13 @@ pub fn cmd() cli.Command { // get_repo_by_prefix tries to find the repo with the given prefix in its // ID. If multiple or none are found, an error is raised. -fn get_repo_by_prefix(conf Config, id_prefix string) ?(string, GitRepo) { - repos := get_repos(conf.address, conf.api_key) ? - - mut res := map[string]GitRepo{} - - for id, repo in repos { - if id.starts_with(id_prefix) { - res[id] = repo - } - } - - if res.len == 0 { - return error('No repo found for given prefix.') - } - - if res.len > 1 { - return error('Multiple repos found for given prefix.') - } - - return res.keys()[0], res[res.keys()[0]] -} // list prints out a list of all repositories. fn list(conf Config) ? { repos := get_repos(conf.address, conf.api_key) ? - for id, details in repos { - println('${id[..8]}\t$details.url\t$details.branch\t$details.repo') + for repo in repos { + println('${repo.id}\t$repo.url\t$repo.branch\t$repo.repo') } } @@ -155,15 +135,18 @@ fn add(conf Config, url string, branch string, repo string) ? { } // remove removes a repository from the server's list. -fn remove(conf Config, id_prefix string) ? { - id, _ := get_repo_by_prefix(conf, id_prefix) ? - res := remove_repo(conf.address, conf.api_key, id) ? +fn remove(conf Config, id string) ? { + // id, _ := get_repo_by_prefix(conf, id_prefix) ? + id_int := id.int() - println(res.message) + if id_int != 0 { + res := remove_repo(conf.address, conf.api_key, id_int) ? + println(res.message) + } } // patch patches a given repository with the provided params. -fn patch(conf Config, id_prefix string, params map[string]string) ? { +fn patch(conf Config, id string, params map[string]string) ? { // We check the cron expression first because it's useless to send an // invalid one to the server. if 'schedule' in params && params['schedule'] != '' { @@ -172,20 +155,22 @@ fn patch(conf Config, id_prefix string, params map[string]string) ? { } } - id, _ := get_repo_by_prefix(conf, id_prefix) ? - res := patch_repo(conf.address, conf.api_key, id, params) ? + id_int := id.int() + if id_int != 0 { + res := patch_repo(conf.address, conf.api_key, id_int, params) ? - println(res.message) + println(res.message) + } } // info shows detailed information for a given repo. -fn info(conf Config, id_prefix string) ? { - id, repo := get_repo_by_prefix(conf, id_prefix) ? +fn info(conf Config, id string) ? { + id_int := id.int() - println('id: $id') - - $for field in GitRepo.fields { - val := repo.$(field.name) - println('$field.name: $val') + if id_int == 0 { + return } + + repo := get_repo(conf.address, conf.api_key, id_int) ? + println(repo) } diff --git a/src/git/client.v b/src/git/client.v index 0ed19b57..d4c5282f 100644 --- a/src/git/client.v +++ b/src/git/client.v @@ -3,6 +3,7 @@ module git import json import response { Response } import net.http +import db // send_request is a convenience method for sending requests to the repos // API. It mostly does string manipulation to create a query string containing @@ -26,8 +27,15 @@ fn send_request(method http.Method, address string, url string, api_key strin } // get_repos returns the current list of repos. -pub fn get_repos(address string, api_key string) ?map[string]GitRepo { - data := send_request(http.Method.get, address, '/api/repos', api_key, +pub fn get_repos(address string, api_key string) ?[]db.GitRepo { + data := send_request<[]db.GitRepo>(http.Method.get, address, '/api/repos', + api_key, {}) ? + + return data.data +} + +pub fn get_repo(address string, api_key string, id int) ?db.GitRepo { + data := send_request(http.Method.get, address, '/api/repos/$id', api_key, {}) ? return data.data @@ -51,7 +59,7 @@ pub fn add_repo(address string, api_key string, url string, branch string, repo } // remove_repo removes the repo with the given ID from the server. -pub fn remove_repo(address string, api_key string, id string) ?Response { +pub fn remove_repo(address string, api_key string, id int) ?Response { data := send_request(http.Method.delete, address, '/api/repos/$id', api_key, {}) ? @@ -60,7 +68,7 @@ pub fn remove_repo(address string, api_key string, id string) ?Response // patch_repo sends a PATCH request to the given repo with the params as // payload. -pub fn patch_repo(address string, api_key string, id string, params map[string]string) ?Response { +pub fn patch_repo(address string, api_key string, id int, params map[string]string) ?Response { data := send_request(http.Method.patch, address, '/api/repos/$id', api_key, params) ? diff --git a/src/git/git.v b/src/git/git.v index 2023f341..7c1c83c9 100644 --- a/src/git/git.v +++ b/src/git/git.v @@ -1,84 +1,84 @@ module git -import os -import json +/* import os */ +/* import json */ -pub struct GitRepo { -pub mut: - // URL of the Git repository - url string - // Branch of the Git repository to use - branch string - // On which architectures the package is allowed to be built. In reality, - // this controls which builders will periodically build the image. - arch []string - // Which repo the builder should publish packages to - repo string - // Cron schedule describing how frequently to build the repo. - schedule string [optional] -} +/* pub struct GitRepo { */ +/* pub mut: */ +/* // URL of the Git repository */ +/* url string */ +/* // Branch of the Git repository to use */ +/* branch string */ +/* // On which architectures the package is allowed to be built. In reality, */ +/* // this controls which builders will periodically build the image. */ +/* arch []string */ +/* // Which repo the builder should publish packages to */ +/* repo string */ +/* // Cron schedule describing how frequently to build the repo. */ +/* schedule string [optional] */ +/* } */ -// patch_from_params patches a GitRepo from a map[string]string, usually -// provided from a web.App's params -pub fn (mut r GitRepo) patch_from_params(params map[string]string) { - $for field in GitRepo.fields { - if field.name in params { - $if field.typ is string { - r.$(field.name) = params[field.name] - // This specific type check is needed for the compiler to ensure - // our types are correct - } $else $if field.typ is []string { - r.$(field.name) = params[field.name].split(',') - } - } - } -} +/* // patch_from_params patches a GitRepo from a map[string]string, usually */ +/* // provided from a web.App's params */ +/* pub fn (mut r GitRepo) patch_from_params(params map[string]string) { */ +/* $for field in GitRepo.fields { */ +/* if field.name in params { */ +/* $if field.typ is string { */ +/* r.$(field.name) = params[field.name] */ +/* // This specific type check is needed for the compiler to ensure */ +/* // our types are correct */ +/* } $else $if field.typ is []string { */ +/* r.$(field.name) = params[field.name].split(',') */ +/* } */ +/* } */ +/* } */ +/* } */ -// read_repos reads the provided path & parses it into a map of GitRepo's. -pub fn read_repos(path string) ?map[string]GitRepo { - if !os.exists(path) { - mut f := os.create(path) ? +/* // read_repos reads the provided path & parses it into a map of GitRepo's. */ +/* pub fn read_repos(path string) ?map[string]GitRepo { */ +/* if !os.exists(path) { */ +/* mut f := os.create(path) ? */ - defer { - f.close() - } +/* defer { */ +/* f.close() */ +/* } */ - f.write_string('{}') ? +/* f.write_string('{}') ? */ - return {} - } +/* return {} */ +/* } */ - content := os.read_file(path) ? - res := json.decode(map[string]GitRepo, content) ? +/* content := os.read_file(path) ? */ +/* res := json.decode(map[string]GitRepo, content) ? */ - return res -} +/* return res */ +/* } */ -// write_repos writes a map of GitRepo's back to disk given the provided path. -pub fn write_repos(path string, repos &map[string]GitRepo) ? { - mut f := os.create(path) ? +/* // write_repos writes a map of GitRepo's back to disk given the provided path. */ +/* pub fn write_repos(path string, repos &map[string]GitRepo) ? { */ +/* mut f := os.create(path) ? */ - defer { - f.close() - } +/* defer { */ +/* f.close() */ +/* } */ - value := json.encode(repos) - f.write_string(value) ? -} +/* value := json.encode(repos) */ +/* f.write_string(value) ? */ +/* } */ -// repo_from_params creates a GitRepo from a map[string]string, usually -// provided from a web.App's params -pub fn repo_from_params(params map[string]string) ?GitRepo { - mut repo := GitRepo{} +/* // repo_from_params creates a GitRepo from a map[string]string, usually */ +/* // provided from a web.App's params */ +/* pub fn repo_from_params(params map[string]string) ?GitRepo { */ +/* mut repo := GitRepo{} */ - // If we're creating a new GitRepo, we want all fields to be present before - // "patching". - $for field in GitRepo.fields { - if field.name !in params && !field.attrs.contains('optional') { - return error('Missing parameter: ${field.name}.') - } - } - repo.patch_from_params(params) +/* // If we're creating a new GitRepo, we want all fields to be present before */ +/* // "patching". */ +/* $for field in GitRepo.fields { */ +/* if field.name !in params && !field.attrs.contains('optional') { */ +/* return error('Missing parameter: ${field.name}.') */ +/* } */ +/* } */ +/* repo.patch_from_params(params) */ - return repo -} +/* return repo */ +/* } */ diff --git a/src/server/git.v b/src/server/git.v index 69cce819..0389d5fa 100644 --- a/src/server/git.v +++ b/src/server/git.v @@ -110,24 +110,24 @@ fn (mut app App) delete_repo(id int) web.Result { return app.json(http.Status.unauthorized, new_response('Unauthorized.')) } - /* mut repos := rlock app.git_mutex { */ - /* git.read_repos(app.conf.repos_file) or { */ - /* app.lerror('Failed to read repos file.') */ + // mut repos := rlock app.git_mutex { + // git.read_repos(app.conf.repos_file) or { + // app.lerror('Failed to read repos file.') - /* return app.status(http.Status.internal_server_error) */ - /* } */ - /* } */ + // return app.status(http.Status.internal_server_error) + // } + //} - /* if id !in repos { */ - /* return app.not_found() */ - /* } */ + // if id !in repos { + // return app.not_found() + //} - /* repos.delete(id) */ + // repos.delete(id) app.db.delete_git_repo(id) -/* lock app.git_mutex { */ -/* git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) } */ -/* } */ + // lock app.git_mutex { + // git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) } + // } return app.json(http.Status.ok, new_response('Repo removed successfully.')) } @@ -141,29 +141,29 @@ fn (mut app App) patch_repo(id int) web.Result { app.db.update_git_repo(id, app.query) - if 'arch' in app.query { - arch_objs := app.query['arch'].split(',').map(db.GitRepoArch{value: it}) + if 'arch' in app.query { + arch_objs := app.query['arch'].split(',').map(db.GitRepoArch{ value: it }) - app.db.update_git_repo_archs(id, arch_objs) - } + app.db.update_git_repo_archs(id, arch_objs) + } -/* mut repos := rlock app.git_mutex { */ -/* git.read_repos(app.conf.repos_file) or { */ -/* app.lerror('Failed to read repos file.') */ + // mut repos := rlock app.git_mutex { + // git.read_repos(app.conf.repos_file) or { + // app.lerror('Failed to read repos file.') -/* return app.status(http.Status.internal_server_error) */ -/* } */ -/* } */ + // return app.status(http.Status.internal_server_error) + // } + // } -/* if id !in repos { */ -/* return app.not_found() */ -/* } */ + // if id !in repos { + // return app.not_found() + // } -/* repos[id].patch_from_params(app.query) */ + // repos[id].patch_from_params(app.query) -/* lock app.git_mutex { */ -/* git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) } */ -/* } */ + // lock app.git_mutex { + // git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) } + // } return app.json(http.Status.ok, new_response('Repo updated successfully.')) } From c8182737903bf69e859e33bd276d76066dc0bcce Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 3 May 2022 16:54:12 +0200 Subject: [PATCH 58/67] feat: simplified config down to pkg_dir & data_dir BREAKING: downloads are now stored inside the root of pkg_dir, the log file is always stored in the root of data_dir --- Dockerfile | 8 +++----- src/cron/cli.v | 2 +- src/cron/cron.v | 6 +++++- src/cron/daemon/build.v | 10 +++++----- src/server/cli.v | 5 +---- src/server/routes.v | 2 +- src/server/server.v | 22 ++++++++++++++-------- vieter.toml | 4 +--- 8 files changed, 31 insertions(+), 28 deletions(-) diff --git a/Dockerfile b/Dockerfile index 58087ad2..2ba61817 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,7 +23,7 @@ RUN if [ -n "${CI_COMMIT_SHA}" ]; then \ "https://s3.rustybever.be/vieter/commits/${CI_COMMIT_SHA}/vieter-$(echo "${TARGETPLATFORM}" | sed 's:/:-:g')" && \ chmod +x vieter ; \ else \ - LDFLAGS='-lz -lbz2 -llzma -lexpat -lzstd -llz4 -static' make prod && \ + LDFLAGS='-lz -lbz2 -llzma -lexpat -lzstd -llz4 -lsqlite3 -static' make prod && \ mv pvieter vieter ; \ fi @@ -31,10 +31,8 @@ RUN if [ -n "${CI_COMMIT_SHA}" ]; then \ FROM busybox:1.35.0 ENV PATH=/bin \ - VIETER_REPOS_DIR=/data/repos \ - VIETER_PKG_DIR=/data/pkgs \ - VIETER_DOWNLOAD_DIR=/data/downloads \ - VIETER_REPOS_FILE=/data/repos.json + VIETER_DATA_DIR=/data \ + VIETER_PKG_DIR=/data/pkgs COPY --from=builder /app/dumb-init /app/vieter /bin/ diff --git a/src/cron/cli.v b/src/cron/cli.v index 24cbe2c7..9536c37c 100644 --- a/src/cron/cli.v +++ b/src/cron/cli.v @@ -6,9 +6,9 @@ import env struct Config { pub: log_level string = 'WARN' - log_file string = 'vieter.log' api_key string address string + data_dir string base_image string = 'archlinux:base-devel' max_concurrent_builds int = 1 api_update_frequency int = 15 diff --git a/src/cron/cron.v b/src/cron/cron.v index e10e4dda..e356faa1 100644 --- a/src/cron/cron.v +++ b/src/cron/cron.v @@ -3,6 +3,9 @@ module cron import log import cron.daemon import cron.expression +import os + +const log_file_name = 'vieter.cron.log' // cron starts a cron daemon & starts periodically scheduling builds. pub fn cron(conf Config) ? { @@ -15,7 +18,8 @@ pub fn cron(conf Config) ? { level: log_level } - logger.set_full_logpath(conf.log_file) + log_file := os.join_path_single(conf.data_dir, cron.log_file_name) + logger.set_full_logpath(log_file) logger.log_to_console_too() ce := expression.parse_expression(conf.global_schedule) or { diff --git a/src/cron/daemon/build.v b/src/cron/daemon/build.v index 067d191e..e54a39e0 100644 --- a/src/cron/daemon/build.v +++ b/src/cron/daemon/build.v @@ -4,11 +4,11 @@ import time import sync.stdatomic import build -const build_empty = 0 - -const build_running = 1 - -const build_done = 2 +const ( + build_empty = 0 + build_running = 1 + build_done = 2 +) // clean_finished_builds removes finished builds from the build slots & returns // them. diff --git a/src/server/cli.v b/src/server/cli.v index bea223d4..4d396661 100644 --- a/src/server/cli.v +++ b/src/server/cli.v @@ -6,12 +6,9 @@ import env struct Config { pub: log_level string = 'WARN' - log_file string = 'vieter.log' pkg_dir string - download_dir string + data_dir string api_key string - repos_dir string - repos_file string default_arch string } diff --git a/src/server/routes.v b/src/server/routes.v index f27afb4d..fbf37dfb 100644 --- a/src/server/routes.v +++ b/src/server/routes.v @@ -68,7 +68,7 @@ fn (mut app App) put_package(repo string) web.Result { if length := app.req.header.get(.content_length) { // Generate a random filename for the temp file - pkg_path = os.join_path_single(app.conf.download_dir, rand.uuid_v4()) + pkg_path = os.join_path_single(app.repo.pkg_dir, rand.uuid_v4()) app.ldebug("Uploading $length bytes (${util.pretty_bytes(length.int())}) to '$pkg_path'.") diff --git a/src/server/server.v b/src/server/server.v index 751ea9c6..28839425 100644 --- a/src/server/server.v +++ b/src/server/server.v @@ -7,7 +7,12 @@ import repo import util import db -const port = 8000 +const ( + port = 8000 + log_file_name = 'vieter.log' + repo_dir_name = 'repos' + db_file_name = 'vieter.sqlite' +) struct App { web.Context @@ -32,11 +37,14 @@ pub fn server(conf Config) ? { util.exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.') } + os.mkdir_all(conf.data_dir) or { util.exit_with_message(1, 'Failed to create data directory.') } + mut logger := log.Log{ level: log_level } - logger.set_full_logpath(conf.log_file) + log_file := os.join_path_single(conf.data_dir, server.log_file_name) + logger.set_full_logpath(log_file) logger.log_to_console_too() defer { @@ -45,17 +53,15 @@ pub fn server(conf Config) ? { logger.close() } + repo_dir := os.join_path_single(conf.data_dir, server.repo_dir_name) // This also creates the directories if needed - repo := repo.new(conf.repos_dir, conf.pkg_dir, conf.default_arch) or { + repo := repo.new(repo_dir, conf.pkg_dir, conf.default_arch) or { logger.error(err.msg()) exit(1) } - os.mkdir_all(conf.download_dir) or { - util.exit_with_message(1, 'Failed to create download directory.') - } - - db := db.init('test.db') or { util.exit_with_message(1, 'Failed to initialize database.') } + db_file := os.join_path_single(conf.data_dir, server.db_file_name) + db := db.init(db_file) or { util.exit_with_message(1, 'Failed to initialize database.') } web.run(&App{ logger: logger diff --git a/vieter.toml b/vieter.toml index fc86d77d..d3922a43 100644 --- a/vieter.toml +++ b/vieter.toml @@ -1,10 +1,8 @@ # This file contains settings used during development api_key = "test" -download_dir = "data/downloads" -repos_dir = "data/repos" +data_dir = "data" pkg_dir = "data/pkgs" log_level = "DEBUG" -repos_file = "data/repos.json" default_arch = "x86_64" address = "http://localhost:8000" From 204144cee8fdf0aa1dcc55cf1990a5449b83fc41 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 3 May 2022 16:55:50 +0200 Subject: [PATCH 59/67] refactor: removed commented code & ran formatter --- src/cron/daemon/daemon.v | 11 +++--- src/db/git.v | 20 ++++------ src/git/cli.v | 3 +- src/git/client.v | 4 +- src/git/git.v | 84 ---------------------------------------- src/server/git.v | 84 ---------------------------------------- src/server/server.v | 4 +- 7 files changed, 17 insertions(+), 193 deletions(-) delete mode 100644 src/git/git.v diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index 17474940..35cca5f1 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -10,11 +10,12 @@ import build import docker import db -// How many seconds to wait before retrying to update API if failed -const api_update_retry_timeout = 5 - -// How many seconds to wait before retrying to rebuild image if failed -const rebuild_base_image_retry_timout = 30 +const ( + // How many seconds to wait before retrying to update API if failed + api_update_retry_timeout = 5 + // How many seconds to wait before retrying to rebuild image if failed + rebuild_base_image_retry_timout = 30 +) struct ScheduledBuild { pub: diff --git a/src/db/git.v b/src/db/git.v index ac35ff4d..f4a66f0b 100644 --- a/src/db/git.v +++ b/src/db/git.v @@ -29,12 +29,12 @@ pub mut: pub fn (gr &GitRepo) str() string { mut parts := [ - "id: $gr.id", - "url: $gr.url", - "branch: $gr.branch", - "repo: $gr.repo", - "schedule: $gr.schedule", - "arch: ${gr.arch.map(it.value).join(', ')}" + 'id: $gr.id', + 'url: $gr.url', + 'branch: $gr.branch', + 'repo: $gr.repo', + 'schedule: $gr.schedule', + 'arch: ${gr.arch.map(it.value).join(', ')}', ] str := parts.join('\n') @@ -121,18 +121,12 @@ pub fn (db &VieterDb) update_git_repo(repo_id int, params map[string]string) { // Any fields that are array types require their own update method $if field.typ is string { values << "$field.name = '${params[field.name]}'" - // r.$(field.name) = params[field.name] - // This specific type check is needed for the compiler to ensure - // our types are correct } - //$else $if field.typ is []GitRepoArch { - // r.$(field.name) = params[field.name].split(',').map(GitRepoArch{ value: it }) - //} } } values_str := values.join(', ') query := 'update GitRepo set $values_str where id == $repo_id' - println(query) + db.conn.exec_none(query) } diff --git a/src/git/cli.v b/src/git/cli.v index 18394925..634b7782 100644 --- a/src/git/cli.v +++ b/src/git/cli.v @@ -3,7 +3,6 @@ module git import cli import env import cron.expression { parse_expression } -import db { GitRepo, GitRepoArch } struct Config { address string [required] @@ -123,7 +122,7 @@ fn list(conf Config) ? { repos := get_repos(conf.address, conf.api_key) ? for repo in repos { - println('${repo.id}\t$repo.url\t$repo.branch\t$repo.repo') + println('$repo.id\t$repo.url\t$repo.branch\t$repo.repo') } } diff --git a/src/git/client.v b/src/git/client.v index d4c5282f..f34d2ffd 100644 --- a/src/git/client.v +++ b/src/git/client.v @@ -28,8 +28,8 @@ fn send_request(method http.Method, address string, url string, api_key strin // get_repos returns the current list of repos. pub fn get_repos(address string, api_key string) ?[]db.GitRepo { - data := send_request<[]db.GitRepo>(http.Method.get, address, '/api/repos', - api_key, {}) ? + data := send_request<[]db.GitRepo>(http.Method.get, address, '/api/repos', api_key, + {}) ? return data.data } diff --git a/src/git/git.v b/src/git/git.v deleted file mode 100644 index 7c1c83c9..00000000 --- a/src/git/git.v +++ /dev/null @@ -1,84 +0,0 @@ -module git - -/* import os */ -/* import json */ - -/* pub struct GitRepo { */ -/* pub mut: */ -/* // URL of the Git repository */ -/* url string */ -/* // Branch of the Git repository to use */ -/* branch string */ -/* // On which architectures the package is allowed to be built. In reality, */ -/* // this controls which builders will periodically build the image. */ -/* arch []string */ -/* // Which repo the builder should publish packages to */ -/* repo string */ -/* // Cron schedule describing how frequently to build the repo. */ -/* schedule string [optional] */ -/* } */ - -/* // patch_from_params patches a GitRepo from a map[string]string, usually */ -/* // provided from a web.App's params */ -/* pub fn (mut r GitRepo) patch_from_params(params map[string]string) { */ -/* $for field in GitRepo.fields { */ -/* if field.name in params { */ -/* $if field.typ is string { */ -/* r.$(field.name) = params[field.name] */ -/* // This specific type check is needed for the compiler to ensure */ -/* // our types are correct */ -/* } $else $if field.typ is []string { */ -/* r.$(field.name) = params[field.name].split(',') */ -/* } */ -/* } */ -/* } */ -/* } */ - -/* // read_repos reads the provided path & parses it into a map of GitRepo's. */ -/* pub fn read_repos(path string) ?map[string]GitRepo { */ -/* if !os.exists(path) { */ -/* mut f := os.create(path) ? */ - -/* defer { */ -/* f.close() */ -/* } */ - -/* f.write_string('{}') ? */ - -/* return {} */ -/* } */ - -/* content := os.read_file(path) ? */ -/* res := json.decode(map[string]GitRepo, content) ? */ - -/* return res */ -/* } */ - -/* // write_repos writes a map of GitRepo's back to disk given the provided path. */ -/* pub fn write_repos(path string, repos &map[string]GitRepo) ? { */ -/* mut f := os.create(path) ? */ - -/* defer { */ -/* f.close() */ -/* } */ - -/* value := json.encode(repos) */ -/* f.write_string(value) ? */ -/* } */ - -/* // repo_from_params creates a GitRepo from a map[string]string, usually */ -/* // provided from a web.App's params */ -/* pub fn repo_from_params(params map[string]string) ?GitRepo { */ -/* mut repo := GitRepo{} */ - -/* // If we're creating a new GitRepo, we want all fields to be present before */ -/* // "patching". */ -/* $for field in GitRepo.fields { */ -/* if field.name !in params && !field.attrs.contains('optional') { */ -/* return error('Missing parameter: ${field.name}.') */ -/* } */ -/* } */ -/* repo.patch_from_params(params) */ - -/* return repo */ -/* } */ diff --git a/src/server/git.v b/src/server/git.v index 0389d5fa..6c852b84 100644 --- a/src/server/git.v +++ b/src/server/git.v @@ -1,14 +1,10 @@ module server import web -import git import net.http -import rand import response { new_data_response, new_response } import db -const repos_file = 'repos.json' - // get_repos returns the current list of repos. ['/api/repos'; get] fn (mut app App) get_repos() web.Result { @@ -17,13 +13,6 @@ fn (mut app App) get_repos() web.Result { } repos := app.db.get_git_repos() - // repos := rlock app.git_mutex { - // git.read_repos(app.conf.repos_file) or { - // app.lerror('Failed to read repos file: $err.msg()') - - // return app.status(http.Status.internal_server_error) - // } - //} return app.json(http.Status.ok, new_data_response(repos)) } @@ -35,19 +24,6 @@ fn (mut app App) get_single_repo(id int) web.Result { return app.json(http.Status.unauthorized, new_response('Unauthorized.')) } - // repos := rlock app.git_mutex { - // git.read_repos(app.conf.repos_file) or { - // app.lerror('Failed to read repos file.') - - // return app.status(http.Status.internal_server_error) - // } - //} - - // if id !in repos { - // return app.not_found() - //} - - // repo := repos[id] repo := app.db.get_git_repo(id) or { return app.not_found() } return app.json(http.Status.ok, new_data_response(repo)) @@ -74,32 +50,6 @@ fn (mut app App) post_repo() web.Result { app.db.add_git_repo(new_repo) - // id := rand.uuid_v4() - - // mut repos := rlock app.git_mutex { - // git.read_repos(app.conf.repos_file) or { - // app.lerror('Failed to read repos file.') - - // return app.status(http.Status.internal_server_error) - // } - //} - // repos := app.db.get_git_repos() - - //// We need to check for duplicates - // for _, repo in repos { - // if repo == new_repo { - // return app.json(http.Status.bad_request, new_response('Duplicate repository.')) - // } - //} - - // repos[id] = new_repo - - // lock app.git_mutex { - // git.write_repos(app.conf.repos_file, &repos) or { - // return app.status(http.Status.internal_server_error) - // } - //} - return app.json(http.Status.ok, new_response('Repo added successfully.')) } @@ -110,25 +60,9 @@ fn (mut app App) delete_repo(id int) web.Result { return app.json(http.Status.unauthorized, new_response('Unauthorized.')) } - // mut repos := rlock app.git_mutex { - // git.read_repos(app.conf.repos_file) or { - // app.lerror('Failed to read repos file.') - - // return app.status(http.Status.internal_server_error) - // } - //} - - // if id !in repos { - // return app.not_found() - //} - // repos.delete(id) app.db.delete_git_repo(id) - // lock app.git_mutex { - // git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) } - // } - return app.json(http.Status.ok, new_response('Repo removed successfully.')) } @@ -147,23 +81,5 @@ fn (mut app App) patch_repo(id int) web.Result { app.db.update_git_repo_archs(id, arch_objs) } - // mut repos := rlock app.git_mutex { - // git.read_repos(app.conf.repos_file) or { - // app.lerror('Failed to read repos file.') - - // return app.status(http.Status.internal_server_error) - // } - // } - - // if id !in repos { - // return app.not_found() - // } - - // repos[id].patch_from_params(app.query) - - // lock app.git_mutex { - // git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) } - // } - return app.json(http.Status.ok, new_response('Repo updated successfully.')) } diff --git a/src/server/server.v b/src/server/server.v index 28839425..b2a2ad29 100644 --- a/src/server/server.v +++ b/src/server/server.v @@ -20,9 +20,7 @@ pub: conf Config [required; web_global] pub mut: repo repo.RepoGroupManager [required; web_global] - // This is used to claim the file lock on the repos file - git_mutex shared util.Dummy - db db.VieterDb + db db.VieterDb } // server starts the web server & starts listening for requests From 5781796e99543f2f16d24bd210acb26d937d3684 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 3 May 2022 19:55:52 +0200 Subject: [PATCH 60/67] doc: added docstrings to all db/git functions --- CHANGELOG.md | 3 +++ src/cron/cli.v | 2 +- src/db/db.v | 1 + src/db/git.v | 10 +++++++++- src/git/client.v | 1 + 5 files changed, 15 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index abbdc740..f40ce526 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed * Switched from compiler fork to fully vanilla compiler mirror +* `download_dir`, `repos_file` & `repos_dir` config values have been replaced + with `data_dir` +* Storage of metadata (e.g. Git repositories) is now done using Sqlite ### Added diff --git a/src/cron/cli.v b/src/cron/cli.v index 9536c37c..15bc9867 100644 --- a/src/cron/cli.v +++ b/src/cron/cli.v @@ -8,7 +8,7 @@ pub: log_level string = 'WARN' api_key string address string - data_dir string + data_dir string base_image string = 'archlinux:base-devel' max_concurrent_builds int = 1 api_update_frequency int = 15 diff --git a/src/db/db.v b/src/db/db.v index b62fa3b7..a75c34c4 100644 --- a/src/db/db.v +++ b/src/db/db.v @@ -6,6 +6,7 @@ struct VieterDb { conn sqlite.DB } +// init initializes a database & adds the correct tables. pub fn init(db_path string) ?VieterDb { conn := sqlite.connect(db_path) ? diff --git a/src/db/git.v b/src/db/git.v index f4a66f0b..2fb2a92e 100644 --- a/src/db/git.v +++ b/src/db/git.v @@ -7,6 +7,7 @@ pub: value string [nonull] } +// str returns a string representation. pub fn (gra &GitRepoArch) str() string { return gra.value } @@ -27,6 +28,7 @@ pub mut: arch []GitRepoArch [fkey: 'repo_id'] } +// str returns a string representation. pub fn (gr &GitRepo) str() string { mut parts := [ 'id: $gr.id', @@ -57,7 +59,7 @@ pub fn (mut r GitRepo) patch_from_params(params map[string]string) { } } -// repo_from_params creates a GitRepo from a map[string]string, usually +// git_repo_from_params creates a GitRepo from a map[string]string, usually // provided from a web.App's params pub fn git_repo_from_params(params map[string]string) ?GitRepo { mut repo := GitRepo{} @@ -74,6 +76,7 @@ pub fn git_repo_from_params(params map[string]string) ?GitRepo { return repo } +// get_git_repos returns all GitRepo's in the database. pub fn (db &VieterDb) get_git_repos() []GitRepo { res := sql db.conn { select from GitRepo order by id @@ -82,6 +85,7 @@ pub fn (db &VieterDb) get_git_repos() []GitRepo { return res } +// get_git_repo tries to return a specific GitRepo. pub fn (db &VieterDb) get_git_repo(repo_id int) ?GitRepo { res := sql db.conn { select from GitRepo where id == repo_id @@ -97,12 +101,14 @@ pub fn (db &VieterDb) get_git_repo(repo_id int) ?GitRepo { return res } +// add_git_repo inserts the given GitRepo into the database. pub fn (db &VieterDb) add_git_repo(repo GitRepo) { sql db.conn { insert repo into GitRepo } } +// delete_git_repo deletes the repo with the given ID from the database. pub fn (db &VieterDb) delete_git_repo(repo_id int) { sql db.conn { delete from GitRepo where id == repo_id @@ -110,6 +116,7 @@ pub fn (db &VieterDb) delete_git_repo(repo_id int) { } } +// update_git_repo updates any non-array values for a given GitRepo. pub fn (db &VieterDb) update_git_repo(repo_id int, params map[string]string) { // sql db.conn { // update GitRepo set repo @@ -130,6 +137,7 @@ pub fn (db &VieterDb) update_git_repo(repo_id int, params map[string]string) { db.conn.exec_none(query) } +// update_git_repo_archs updates a given GitRepo's arch value. pub fn (db &VieterDb) update_git_repo_archs(repo_id int, archs []GitRepoArch) { archs_with_id := archs.map(GitRepoArch{ ...it diff --git a/src/git/client.v b/src/git/client.v index f34d2ffd..b5f8e9fd 100644 --- a/src/git/client.v +++ b/src/git/client.v @@ -34,6 +34,7 @@ pub fn get_repos(address string, api_key string) ?[]db.GitRepo { return data.data } +// get_repo returns the repo for a specific ID. pub fn get_repo(address string, api_key string, id int) ?db.GitRepo { data := send_request(http.Method.get, address, '/api/repos/$id', api_key, {}) ? From b6d5bd3228dd16169e539efcfe516ef0f109c1fd Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 3 May 2022 20:13:28 +0200 Subject: [PATCH 61/67] doc: listed new config variables in docs --- docs/content/configuration.md | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/docs/content/configuration.md b/docs/content/configuration.md index df92844c..ded40cb7 100644 --- a/docs/content/configuration.md +++ b/docs/content/configuration.md @@ -35,18 +35,10 @@ passed to them. Each mode requires a different configuration. * `log_file`: log file to write logs to. Defaults to `vieter.log` in the current directory. * `pkg_dir`: where Vieter should store the actual package archives. -* `download_dir`: where Vieter should initially download uploaded files. +* `data_dir`: where Vieter stores the repositories, log file & database. * `api_key`: the API key to use when authenticating requests. -* `repo_dir`: where Vieter should store the contents of the repository. -* `repos_file`: JSON file where the list of Git repositories is saved * `default_arch`: architecture to always add packages of arch `any` to. -{{< hint info >}} -**Note** -Because Vieter hard links files between `download_dir` & `pkg_dir`, they need -to be on the same file system. -{{< /hint >}} - ### Builder * `api_key`: the API key to use when authenticating requests. @@ -62,3 +54,18 @@ to be on the same file system. * `api_key`: the API key to use when authenticating requests. * `address`: Base your URL of your Vieter instance, e.g. https://example.com + +### Cron + +* `log_level`: defines how much logs to show. Valid values are one of `FATAL`, + `ERROR`, `WARN`, `INFO` or `DEBUG`. Defaults to `WARN` +* `api_key`: the API key to use when authenticating requests. +* `address`: Base your URL of your Vieter instance, e.g. https://example.com. + This *must* be the publicly facing URL of your Vieter instance. +* `data_dir`: where Vieter stores the log file. +* `base_image`: Docker image from which to create the builder images. +* `max_concurrent_builds`: amount of builds to run at once. +* `api_update_frequency`: how frequenty to check for changes in the repo list. +* `image_rebuild+frequency`: how frequently to rebuild the builder image +* `global_schedule`: cron schedule to use for any repo without an individual + schedule From 8c5652c2301a55e0c44c07653f628ae7349e6bdf Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Thu, 5 May 2022 09:35:19 +0200 Subject: [PATCH 62/67] ci: made build upload failable; updated ci for use with PRs --- .woodpecker/.arch.yml | 4 ++++ .woodpecker/.build.yml | 16 +++++++--------- .woodpecker/.build_experimental.yml | 29 ----------------------------- .woodpecker/.deploy.yml | 4 +++- .woodpecker/.docker.yml | 28 ++++++++++++++++------------ .woodpecker/.gitea.yml | 5 ++--- .woodpecker/.lint.yml | 6 ++++-- .woodpecker/.test.yml | 8 ++++---- src/db/git.v | 4 +--- src/server/git.v | 1 - 10 files changed, 41 insertions(+), 64 deletions(-) delete mode 100644 .woodpecker/.build_experimental.yml diff --git a/.woodpecker/.arch.yml b/.woodpecker/.arch.yml index 742095c0..93c98407 100644 --- a/.woodpecker/.arch.yml +++ b/.woodpecker/.arch.yml @@ -25,6 +25,8 @@ pipeline: # inside the repo - curl -OL https://git.rustybever.be/Chewing_Bever/vieter/raw/branch/dev/PKGBUILD - makepkg -s --noconfirm --needed + when: + event: push publish: image: 'curlimages/curl' @@ -33,3 +35,5 @@ pipeline: - 'for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $VIETER_API_KEY" https://arch.r8r.be/vieter/publish; done' secrets: - vieter_api_key + when: + event: push diff --git a/.woodpecker/.build.yml b/.woodpecker/.build.yml index f9cab001..a3b66ea6 100644 --- a/.woodpecker/.build.yml +++ b/.woodpecker/.build.yml @@ -1,11 +1,8 @@ matrix: PLATFORM: - - linux/amd64 - - linux/arm64 - # I just don't have a performant enough runner for this platform - # - linux/arm/v7 + - 'linux/amd64' + - 'linux/arm64' -# These checks already get performed on the feature branches platform: ${PLATFORM} pipeline: @@ -15,9 +12,9 @@ pipeline: commands: - make when: - event: push + event: [push, pull_request] branch: - exclude: [main, dev] + exclude: [main] prod: image: 'chewingbever/vlang:latest' @@ -35,7 +32,7 @@ pipeline: - strip -s pvieter - du -h pvieter when: - event: push + event: [push, pull_request] upload: image: 'chewingbever/vlang:latest' @@ -52,6 +49,7 @@ pipeline: - > curl --silent + --fail -XPUT -T pvieter -H "Host: $URL" @@ -60,4 +58,4 @@ pipeline: -H "Authorization: AWS $S3_USERNAME:$SIGNATURE" https://$URL$OBJ_PATH when: - event: push + event: [push, pull_request] diff --git a/.woodpecker/.build_experimental.yml b/.woodpecker/.build_experimental.yml deleted file mode 100644 index 0129d2b4..00000000 --- a/.woodpecker/.build_experimental.yml +++ /dev/null @@ -1,29 +0,0 @@ -# These builds are not important for the project, but might be valuable for -# fixing bugs in the V compiler. - -platform: linux/amd64 -branches: - exclude: [master, dev] - -pipeline: - autofree: - image: 'chewingbever/vlang:latest' - pull: true - group: 'build' - commands: - - make autofree - - readelf -d afvieter - - du -h afvieter - when: - event: push - - skip-unused: - image: 'chewingbever/vlang:latest' - pull: true - group: 'build' - commands: - - make skip-unused - - readelf -d suvieter - - du -h suvieter - when: - event: push diff --git a/.woodpecker/.deploy.yml b/.woodpecker/.deploy.yml index dd77fb98..8e68641e 100644 --- a/.woodpecker/.deploy.yml +++ b/.woodpecker/.deploy.yml @@ -1,4 +1,4 @@ -branches: 'dev' +branches: [ 'dev' ] platform: 'linux/amd64' depends_on: - 'docker' @@ -14,3 +14,5 @@ pipeline: commands: - 'curl -XPOST -s --fail $WEBHOOK_APP' - 'curl -XPOST -s --fail $WEBHOOK_CRON' + when: + event: push diff --git a/.woodpecker/.docker.yml b/.woodpecker/.docker.yml index 9b605f3f..bab869b4 100644 --- a/.woodpecker/.docker.yml +++ b/.woodpecker/.docker.yml @@ -1,30 +1,34 @@ branches: [main, dev] -platform: linux/amd64 +platform: 'linux/amd64' depends_on: - build pipeline: dev: - image: woodpeckerci/plugin-docker-buildx - secrets: [ docker_username, docker_password ] + image: 'woodpeckerci/plugin-docker-buildx' + secrets: + - 'docker_username' + - 'docker_password' settings: - repo: chewingbever/vieter - tag: dev - platforms: [ linux/arm64/v8, linux/amd64 ] + repo: 'chewingbever/vieter' + tag: 'dev' + platforms: [ 'linux/arm64/v8', 'linux/amd64' ] build_args_from_env: - - CI_COMMIT_SHA + - 'CI_COMMIT_SHA' when: event: push branch: dev release: - image: woodpeckerci/plugin-docker-buildx - secrets: [ docker_username, docker_password ] + image: 'woodpeckerci/plugin-docker-buildx' + secrets: + - 'docker_username' + - 'docker_password' settings: - repo: chewingbever/vieter + repo: 'chewingbever/vieter' auto_tag: true - platforms: [ linux/arm64/v8, linux/amd64 ] + platforms: [ 'linux/arm64/v8', 'linux/amd64' ] build_args_from_env: - - CI_COMMIT_SHA + - 'CI_COMMIT_SHA' when: event: tag diff --git a/.woodpecker/.gitea.yml b/.woodpecker/.gitea.yml index c492d345..18770502 100644 --- a/.woodpecker/.gitea.yml +++ b/.woodpecker/.gitea.yml @@ -1,6 +1,5 @@ -# Yeah so this only works on tags so we'll worry about this later -platform: linux/amd64 -branches: main +platform: 'linux/amd64' +branches: [ 'main' ] depends_on: - build diff --git a/.woodpecker/.lint.yml b/.woodpecker/.lint.yml index b1c16fd8..b13aea4a 100644 --- a/.woodpecker/.lint.yml +++ b/.woodpecker/.lint.yml @@ -1,7 +1,7 @@ # These checks already get performed on the feature branches branches: - exclude: [ main, dev ] -platform: linux/amd64 + exclude: [ main ] +platform: 'linux/amd64' pipeline: lint: @@ -9,3 +9,5 @@ pipeline: pull: true commands: - make lint + when: + event: [ push, pull_request ] diff --git a/.woodpecker/.test.yml b/.woodpecker/.test.yml index 3800cc1d..03375705 100644 --- a/.woodpecker/.test.yml +++ b/.woodpecker/.test.yml @@ -1,10 +1,10 @@ matrix: PLATFORM: - - linux/amd64 - - linux/arm64 + - 'linux/amd64' + - 'linux/arm64' branches: - exclude: [main, dev] + exclude: [ main ] platform: ${PLATFORM} pipeline: @@ -14,4 +14,4 @@ pipeline: commands: - make test when: - event: push + event: [push, pull_request] diff --git a/src/db/git.v b/src/db/git.v index 2fb2a92e..c40086b2 100644 --- a/src/db/git.v +++ b/src/db/git.v @@ -118,11 +118,9 @@ pub fn (db &VieterDb) delete_git_repo(repo_id int) { // update_git_repo updates any non-array values for a given GitRepo. pub fn (db &VieterDb) update_git_repo(repo_id int, params map[string]string) { - // sql db.conn { - // update GitRepo set repo - //} mut values := []string{} + // TODO does this allow for SQL injection? $for field in GitRepo.fields { if field.name in params { // Any fields that are array types require their own update method diff --git a/src/server/git.v b/src/server/git.v index 6c852b84..c5cbc0a6 100644 --- a/src/server/git.v +++ b/src/server/git.v @@ -60,7 +60,6 @@ fn (mut app App) delete_repo(id int) web.Result { return app.json(http.Status.unauthorized, new_response('Unauthorized.')) } - // repos.delete(id) app.db.delete_git_repo(id) return app.json(http.Status.ok, new_response('Repo removed successfully.')) From e00813398183bc1d38d936695739f45b6344c160 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Thu, 5 May 2022 23:29:08 +0200 Subject: [PATCH 63/67] ci(arch): changed PKGBUILD to new URL --- PKGBUILD | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/PKGBUILD b/PKGBUILD index eb866834..87c575ff 100644 --- a/PKGBUILD +++ b/PKGBUILD @@ -4,12 +4,12 @@ pkgbase='vieter' pkgname='vieter' pkgver=0.2.0.r25.g20112b8 pkgrel=1 -depends=('glibc' 'openssl' 'libarchive' 'gc') +depends=('glibc' 'openssl' 'libarchive' 'gc' 'sqlite') makedepends=('git' 'gcc' 'vieter-v') -arch=('x86_64' 'aarch64' 'armv7') -url='https://git.rustybever.be/Chewing_Bever/vieter' +arch=('x86_64' 'aarch64') +url='https://git.rustybever.be/vieter/vieter' license=('AGPL3') -source=($pkgname::git+https://git.rustybever.be/Chewing_Bever/vieter#branch=dev) +source=($pkgname::git+https://git.rustybever.be/vieter/vieter#branch=dev) md5sums=('SKIP') pkgver() { From 1990ade089856559c21793f1692c929685a361e6 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Thu, 5 May 2022 23:30:54 +0200 Subject: [PATCH 64/67] ci: fixed some steps running when not required --- .woodpecker/.build.yml | 2 +- .woodpecker/.lint.yml | 2 +- .woodpecker/.test.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.woodpecker/.build.yml b/.woodpecker/.build.yml index a3b66ea6..16981293 100644 --- a/.woodpecker/.build.yml +++ b/.woodpecker/.build.yml @@ -12,7 +12,7 @@ pipeline: commands: - make when: - event: [push, pull_request] + event: [pull_request] branch: exclude: [main] diff --git a/.woodpecker/.lint.yml b/.woodpecker/.lint.yml index b13aea4a..e70648d8 100644 --- a/.woodpecker/.lint.yml +++ b/.woodpecker/.lint.yml @@ -10,4 +10,4 @@ pipeline: commands: - make lint when: - event: [ push, pull_request ] + event: [ pull_request ] diff --git a/.woodpecker/.test.yml b/.woodpecker/.test.yml index 03375705..6b7b646d 100644 --- a/.woodpecker/.test.yml +++ b/.woodpecker/.test.yml @@ -14,4 +14,4 @@ pipeline: commands: - make test when: - event: [push, pull_request] + event: [pull_request] From 7fdbcdf3e7388ef6139afb5f55d07483609b9c98 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Thu, 5 May 2022 23:38:12 +0200 Subject: [PATCH 65/67] ci(arch): also change URL of downloaded PKGBUILD --- .woodpecker/.arch.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.woodpecker/.arch.yml b/.woodpecker/.arch.yml index 93c98407..6b8f8f2f 100644 --- a/.woodpecker/.arch.yml +++ b/.woodpecker/.arch.yml @@ -23,7 +23,7 @@ pipeline: - su builder # Due to a bug with the V compiler, we can't just use the PKGBUILD from # inside the repo - - curl -OL https://git.rustybever.be/Chewing_Bever/vieter/raw/branch/dev/PKGBUILD + - curl -OL https://git.rustybever.be/vieter/vieter/raw/branch/dev/PKGBUILD - makepkg -s --noconfirm --needed when: event: push From a3b66801535166cb9bc7b5ee6173a0ba8c121e08 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Fri, 6 May 2022 08:31:59 +0200 Subject: [PATCH 66/67] cron: filter out repos with wrong architecture --- src/cron/daemon/daemon.v | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v index 35cca5f1..ffa2f6e2 100644 --- a/src/cron/daemon/daemon.v +++ b/src/cron/daemon/daemon.v @@ -9,6 +9,7 @@ import math import build import docker import db +import os const ( // How many seconds to wait before retrying to update API if failed @@ -19,7 +20,6 @@ const ( struct ScheduledBuild { pub: - repo_id string repo db.GitRepo timestamp time.Time } @@ -187,6 +187,10 @@ fn (mut d Daemon) renew_repos() { return } + // Filter out any repos that shouldn't run on this architecture + cur_arch := os.uname().machine + new_repos = new_repos.filter(it.arch.any(it.value == cur_arch)) + d.repos = new_repos d.api_update_timestamp = time.now().add_seconds(60 * d.api_update_frequency) From 356a34ab01f7a9ffbb9c36b6fd8c7de6bf8b1cb2 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Fri, 6 May 2022 20:04:48 +0200 Subject: [PATCH 67/67] chore: bumped versions --- CHANGELOG.md | 8 ++++---- src/main.v | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f40ce526..2bbe4f06 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [Unreleased](https://git.rustybever.be/Chewing_Bever/vieter) +## [0.3.0-alpha.1](https://git.rustybever.be/vieter/vieter/src/tag/0.3.0-alpha.1) ### Changed @@ -24,7 +24,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * Binary no longer panics when an env var is missing -## [0.2.0](https://git.rustybever.be/Chewing_Bever/vieter/src/tag/0.2.0) +## [0.2.0](https://git.rustybever.be/vieter/vieter/src/tag/0.2.0) ### Changed @@ -58,13 +58,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * Packages with unknown fields in .PKGINFO are now allowed * Old packages are now properly removed -## [0.1.0](https://git.rustybever.be/Chewing_Bever/vieter/src/tag/0.1.0) +## [0.1.0](https://git.rustybever.be/vieter/vieter/src/tag/0.1.0) ### Changed * Improved logging -## [0.1.0-rc.1](https://git.rustybever.be/Chewing_Bever/vieter/src/tag/0.1.0-rc.1) +## [0.1.0-rc.1](https://git.rustybever.be/vieter/vieter/src/tag/0.1.0-rc.1) ### Added diff --git a/src/main.v b/src/main.v index 37cabc3a..4ba6d30f 100644 --- a/src/main.v +++ b/src/main.v @@ -11,7 +11,7 @@ fn main() { mut app := cli.Command{ name: 'vieter' description: 'Vieter is a lightweight implementation of an Arch repository server.' - version: '0.2.0' + version: '0.3.0-alpha.1' flags: [ cli.Flag{ flag: cli.FlagType.string