Merge pull request 'Better API: BuildLog API & CLI' (#206) from Chewing_Bever/vieter:better-api into dev

Reviewed-on: vieter/vieter#206
main
Jef Roosens 2022-05-30 23:40:58 +02:00
commit f0565c4168
8 changed files with 210 additions and 22 deletions

View File

@ -10,6 +10,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added ### Added
* Database migrations * Database migrations
* Improved GitRepo & BuildLog API
* Pagination using `limit` & `offset` query params
* GitRepo: filter by repo
* BuildLog: filter by start & end date, repo, exit code & arch
* CLI flags to take advantage of above API improvements
## [0.3.0-alpha.2](https://git.rustybever.be/vieter/vieter/src/tag/0.3.0-alpha.2) ## [0.3.0-alpha.2](https://git.rustybever.be/vieter/vieter/src/tag/0.3.0-alpha.2)

View File

@ -1,13 +1,14 @@
module client module client
import models { BuildLog } import models { BuildLog, BuildLogFilter }
import net.http { Method } import net.http { Method }
import response { Response } import response { Response }
import time import time
// get_build_logs returns all build logs. // get_build_logs returns all build logs.
pub fn (c &Client) get_build_logs() ?Response<[]BuildLog> { pub fn (c &Client) get_build_logs(filter BuildLogFilter) ?Response<[]BuildLog> {
data := c.send_request<[]BuildLog>(Method.get, '/api/logs', {})? params := models.params_from(filter)
data := c.send_request<[]BuildLog>(Method.get, '/api/logs', params)?
return data return data
} }

View File

@ -4,7 +4,8 @@ import cli
import env import env
import client import client
import console import console
import models { BuildLog } import time
import models { BuildLog, BuildLogFilter }
struct Config { struct Config {
address string [required] address string [required]
@ -19,21 +20,114 @@ pub fn cmd() cli.Command {
commands: [ commands: [
cli.Command{ cli.Command{
name: 'list' name: 'list'
description: 'List the build logs. If a repo ID is provided, only list the build logs for that repo.' description: 'List build logs.'
flags: [ flags: [
cli.Flag{ cli.Flag{
name: 'repo' name: 'limit'
description: 'ID of the Git repo to restrict list to.' description: 'How many results to return.'
flag: cli.FlagType.int flag: cli.FlagType.int
}, },
cli.Flag{
name: 'offset'
description: 'Minimum index to return.'
flag: cli.FlagType.int
},
cli.Flag{
name: 'repo'
description: 'Only return logs for this repo id.'
flag: cli.FlagType.int
},
cli.Flag{
name: 'today'
description: 'Only list logs started today (UTC time).'
flag: cli.FlagType.bool
},
cli.Flag{
name: 'failed'
description: 'Only list logs with non-zero exit codes.'
flag: cli.FlagType.bool
},
cli.Flag{
name: 'day'
description: 'Only list logs started on this day. Format is YYYY-MM-DD.'
flag: cli.FlagType.string
},
cli.Flag{
name: 'before'
description: 'Only list logs started before this timestamp. Accepts any RFC 3339 date.'
flag: cli.FlagType.string
},
cli.Flag{
name: 'after'
description: 'Only list logs started after this timestamp. Accepts any RFC 3339 date.'
flag: cli.FlagType.string
},
] ]
execute: fn (cmd cli.Command) ? { execute: fn (cmd cli.Command) ? {
config_file := cmd.flags.get_string('config-file')? config_file := cmd.flags.get_string('config-file')?
conf := env.load<Config>(config_file)? conf := env.load<Config>(config_file)?
repo_id := cmd.flags.get_int('repo')? mut filter := BuildLogFilter{}
if repo_id == 0 { list(conf)? } else { list_for_repo(conf, repo_id)? } limit := cmd.flags.get_int('limit')?
if limit != 0 {
filter.limit = u64(limit)
}
offset := cmd.flags.get_int('offset')?
if offset != 0 {
filter.offset = u64(offset)
}
repo_id := cmd.flags.get_int('repo')?
if repo_id != 0 {
filter.repo = repo_id
}
if cmd.flags.get_bool('today')? {
today := time.now()
filter.after = time.new_time(time.Time{
year: today.year
month: today.month
day: today.day
})
filter.before = filter.after.add_days(1)
}
// The -today flag overwrites any of the other date flags.
else {
day_str := cmd.flags.get_string('day')?
before_str := cmd.flags.get_string('before')?
after_str := cmd.flags.get_string('after')?
if day_str != '' {
day := time.parse_rfc3339(day_str)?
filter.after = time.new_time(time.Time{
year: day.year
month: day.month
day: day.day
})
filter.before = filter.after.add_days(1)
} else {
if before_str != '' {
filter.before = time.parse_rfc3339(before_str)?
}
if after_str != '' {
filter.after = time.parse_rfc3339(after_str)?
}
}
}
if cmd.flags.get_bool('failed')? {
filter.exit_codes = [
'!0',
]
}
list(conf, filter)?
} }
}, },
cli.Command{ cli.Command{
@ -75,9 +169,9 @@ fn print_log_list(logs []BuildLog) ? {
} }
// list prints a list of all build logs. // list prints a list of all build logs.
fn list(conf Config) ? { fn list(conf Config, filter BuildLogFilter) ? {
c := client.new(conf.address, conf.api_key) c := client.new(conf.address, conf.api_key)
logs := c.get_build_logs()?.data logs := c.get_build_logs(filter)?.data
print_log_list(logs)? print_log_list(logs)?
} }

View File

@ -1,6 +1,7 @@
module db module db
import sqlite import sqlite
import time
struct VieterDb { struct VieterDb {
conn sqlite.DB conn sqlite.DB
@ -66,3 +67,23 @@ pub fn init(db_path string) ?VieterDb {
conn: conn conn: conn
} }
} }
// row_into<T> converts an sqlite.Row into a given type T by parsing each field
// from a string according to its type.
pub fn row_into<T>(row sqlite.Row) T {
mut i := 0
mut out := T{}
$for field in T.fields {
$if field.typ is string {
out.$(field.name) = row.vals[i]
} $else $if field.typ is int {
out.$(field.name) = row.vals[i].int()
} $else $if field.typ is time.Time {
out.$(field.name) = time.unix(row.vals[i].int())
}
i += 1
}
return out
}

View File

@ -1,13 +1,57 @@
module db module db
import models { BuildLog } import models { BuildLog, BuildLogFilter }
import time
// get_build_logs returns all BuildLog's in the database. // get_build_logs returns all BuildLog's in the database.
pub fn (db &VieterDb) get_build_logs() []BuildLog { pub fn (db &VieterDb) get_build_logs(filter BuildLogFilter) []BuildLog {
res := sql db.conn { mut where_parts := []string{}
select from BuildLog order by id
if filter.repo != 0 {
where_parts << 'repo_id == $filter.repo'
} }
if filter.before != time.Time{} {
where_parts << 'start_time < $filter.before.unix_time()'
}
if filter.after != time.Time{} {
where_parts << 'start_time > $filter.after.unix_time()'
}
// NOTE: possible SQL injection
if filter.arch != '' {
where_parts << "arch == '$filter.arch'"
}
mut parts := []string{}
for exp in filter.exit_codes {
if exp[0] == `!` {
code := exp[1..].int()
parts << 'exit_code != $code'
} else {
code := exp.int()
parts << 'exit_code == $code'
}
}
if parts.len > 0 {
where_parts << parts.map('($it)').join(' or ')
}
mut where_str := ''
if where_parts.len > 0 {
where_str = 'where ' + where_parts.map('($it)').join(' and ')
}
query := 'select * from BuildLog $where_str limit $filter.limit offset $filter.offset'
rows, _ := db.conn.exec(query)
res := rows.map(row_into<BuildLog>(it))
return res return res
} }

View File

@ -3,7 +3,7 @@ module models
import time import time
pub struct BuildLog { pub struct BuildLog {
pub: pub mut:
id int [primary; sql: serial] id int [primary; sql: serial]
repo_id int [nonull] repo_id int [nonull]
start_time time.Time [nonull] start_time time.Time [nonull]
@ -26,3 +26,15 @@ pub fn (bl &BuildLog) str() string {
return str return str
} }
[params]
pub struct BuildLogFilter {
pub mut:
limit u64 = 25
offset u64
repo int
before time.Time
after time.Time
arch string
exit_codes []string
}

View File

@ -1,5 +1,7 @@
module models module models
import time
// from_params<T> creates a new instance of T from the given map by parsing all // from_params<T> creates a new instance of T from the given map by parsing all
// of its fields from the map. // of its fields from the map.
pub fn from_params<T>(params map[string]string) ?T { pub fn from_params<T>(params map[string]string) ?T {
@ -23,6 +25,10 @@ pub fn patch_from_params<T>(mut o T, params map[string]string) ? {
o.$(field.name) = params[field.name].u64() o.$(field.name) = params[field.name].u64()
} $else $if field.typ is []GitRepoArch { } $else $if field.typ is []GitRepoArch {
o.$(field.name) = params[field.name].split(',').map(GitRepoArch{ value: it }) o.$(field.name) = params[field.name].split(',').map(GitRepoArch{ value: it })
} $else $if field.typ is time.Time {
o.$(field.name) = time.unix(params[field.name].int())
} $else $if field.typ is []string {
o.$(field.name) = params[field.name].split(',')
} }
} else if field.attrs.contains('nonull') { } else if field.attrs.contains('nonull') {
return error('Missing parameter: ${field.name}.') return error('Missing parameter: ${field.name}.')
@ -35,7 +41,13 @@ pub fn params_from<T>(o &T) map[string]string {
mut out := map[string]string{} mut out := map[string]string{}
$for field in T.fields { $for field in T.fields {
out[field.name] = o.$(field.name).str() $if field.typ is time.Time {
out[field.name] = o.$(field.name).unix_time().str()
} $else $if field.typ is []string {
out[field.name] = o.$(field.name).join(',')
} $else {
out[field.name] = o.$(field.name).str()
}
} }
return out return out
} }

View File

@ -8,7 +8,7 @@ import db
import time import time
import os import os
import util import util
import models { BuildLog } import models { BuildLog, BuildLogFilter }
// get_logs returns all build logs in the database. A 'repo' query param can // get_logs returns all build logs in the database. A 'repo' query param can
// optionally be added to limit the list of build logs to that repository. // optionally be added to limit the list of build logs to that repository.
@ -18,11 +18,10 @@ fn (mut app App) get_logs() web.Result {
return app.json(http.Status.unauthorized, new_response('Unauthorized.')) return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
} }
logs := if 'repo' in app.query { filter := models.from_params<BuildLogFilter>(app.query) or {
app.db.get_build_logs_for_repo(app.query['repo'].int()) return app.json(http.Status.bad_request, new_response('Invalid query parameters.'))
} else {
app.db.get_build_logs()
} }
logs := app.db.get_build_logs(filter)
return app.json(http.Status.ok, new_data_response(logs)) return app.json(http.Status.ok, new_data_response(logs))
} }