forked from vieter-v/vieter
Lots of restructuring for repo backend
parent
5616e7a4e2
commit
d4b7a25c06
|
@ -14,3 +14,4 @@ vieter.log
|
||||||
|
|
||||||
# External lib; gets added by Makefile
|
# External lib; gets added by Makefile
|
||||||
libarchive-*
|
libarchive-*
|
||||||
|
test/
|
||||||
|
|
|
@ -28,3 +28,12 @@ daemon to start builds, which are then uploaded to the server's repository. The
|
||||||
server also allows for non-agents to upload packages, as long as they have the
|
server also allows for non-agents to upload packages, as long as they have the
|
||||||
required secrets. This allows me to also develop non-git packages, such as my
|
required secrets. This allows me to also develop non-git packages, such as my
|
||||||
terminal, & upload them to the servers using CI.
|
terminal, & upload them to the servers using CI.
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
The data directory consists of three main directories:
|
||||||
|
|
||||||
|
* `downloads` - This is where packages are initially downloaded. Because vieter moves files from this folder to the `pkgs` folder, these two folders should best be on the same drive
|
||||||
|
* `pkgs` - This is where approved package files are stored.
|
||||||
|
* `repos` - Each repository gets a subfolder here. The subfolder contains the uncompressed contents of the db file.
|
||||||
|
* Each repo subdirectory contains the compressed db & files archive for the repository, alongside a directory called `files` which contains the uncompressed contents.
|
||||||
|
|
|
@ -2,7 +2,8 @@ module archive
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
pub fn pkg_info_string(pkg_path string) ?string {
|
// Returns the .PKGINFO file's contents & the list of files.
|
||||||
|
pub fn pkg_info(pkg_path string) ?(string, []string) {
|
||||||
if !os.is_file(pkg_path) {
|
if !os.is_file(pkg_path) {
|
||||||
return error("'$pkg_path' doesn't exist or isn't a file.")
|
return error("'$pkg_path' doesn't exist or isn't a file.")
|
||||||
}
|
}
|
||||||
|
@ -26,18 +27,24 @@ pub fn pkg_info_string(pkg_path string) ?string {
|
||||||
|
|
||||||
// We iterate over every header in search of the .PKGINFO one
|
// We iterate over every header in search of the .PKGINFO one
|
||||||
mut buf := voidptr(0)
|
mut buf := voidptr(0)
|
||||||
|
mut files := []string{}
|
||||||
for C.archive_read_next_header(a, &entry) == C.ARCHIVE_OK {
|
for C.archive_read_next_header(a, &entry) == C.ARCHIVE_OK {
|
||||||
if C.strcmp(C.archive_entry_pathname(entry), c'.PKGINFO') == 0 {
|
pathname := C.archive_entry_pathname(entry)
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
files << cstring_to_vstring(pathname)
|
||||||
|
}
|
||||||
|
|
||||||
|
if C.strcmp(pathname, c'.PKGINFO') == 0 {
|
||||||
size := C.archive_entry_size(entry)
|
size := C.archive_entry_size(entry)
|
||||||
|
|
||||||
// TODO can this unsafe block be avoided?
|
// TODO can this unsafe block be avoided?
|
||||||
buf = unsafe { malloc(size) }
|
buf = unsafe { malloc(size) }
|
||||||
C.archive_read_data(a, voidptr(buf), size)
|
C.archive_read_data(a, voidptr(buf), size)
|
||||||
break
|
|
||||||
} else {
|
} else {
|
||||||
C.archive_read_data_skip(a)
|
C.archive_read_data_skip(a)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return unsafe { cstring_to_vstring(&char(buf)) }
|
return unsafe { cstring_to_vstring(&char(buf)) }, files
|
||||||
}
|
}
|
||||||
|
|
88
src/main.v
88
src/main.v
|
@ -4,8 +4,9 @@ import web
|
||||||
import os
|
import os
|
||||||
import log
|
import log
|
||||||
import io
|
import io
|
||||||
import repo
|
import pkg
|
||||||
import archive
|
import archive
|
||||||
|
import repo
|
||||||
|
|
||||||
const port = 8000
|
const port = 8000
|
||||||
|
|
||||||
|
@ -54,59 +55,62 @@ fn reader_to_file(mut reader io.BufferedReader, length int, path string) ? {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main2() {
|
// fn main2() {
|
||||||
// Configure logger
|
// // Configure logger
|
||||||
log_level_str := os.getenv_opt('LOG_LEVEL') or { 'WARN' }
|
// log_level_str := os.getenv_opt('LOG_LEVEL') or { 'WARN' }
|
||||||
log_level := log.level_from_tag(log_level_str) or {
|
// log_level := log.level_from_tag(log_level_str) or {
|
||||||
exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.')
|
// exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.')
|
||||||
}
|
// }
|
||||||
log_file := os.getenv_opt('LOG_FILE') or { 'vieter.log' }
|
// log_file := os.getenv_opt('LOG_FILE') or { 'vieter.log' }
|
||||||
|
|
||||||
mut logger := log.Log{
|
// mut logger := log.Log{
|
||||||
level: log_level
|
// level: log_level
|
||||||
}
|
// }
|
||||||
|
|
||||||
logger.set_full_logpath(log_file)
|
// logger.set_full_logpath(log_file)
|
||||||
logger.log_to_console_too()
|
// logger.log_to_console_too()
|
||||||
|
|
||||||
defer {
|
// defer {
|
||||||
logger.info('Flushing log file')
|
// logger.info('Flushing log file')
|
||||||
logger.flush()
|
// logger.flush()
|
||||||
logger.close()
|
// logger.close()
|
||||||
}
|
// }
|
||||||
|
|
||||||
// Configure web server
|
// // Configure web server
|
||||||
key := os.getenv_opt('API_KEY') or { exit_with_message(1, 'No API key was provided.') }
|
// key := os.getenv_opt('API_KEY') or { exit_with_message(1, 'No API key was provided.') }
|
||||||
repo_dir := os.getenv_opt('REPO_DIR') or {
|
// repo_dir := os.getenv_opt('REPO_DIR') or {
|
||||||
exit_with_message(1, 'No repo directory was configured.')
|
// exit_with_message(1, 'No repo directory was configured.')
|
||||||
}
|
// }
|
||||||
|
|
||||||
repo := repo.Repo{
|
// repo := repo.Repo{
|
||||||
dir: repo_dir
|
// dir: repo_dir
|
||||||
name: db_name
|
// name: db_name
|
||||||
}
|
// }
|
||||||
|
|
||||||
// We create the upload directory during startup
|
// // We create the upload directory during startup
|
||||||
if !os.is_dir(repo.pkg_dir()) {
|
// if !os.is_dir(repo.pkg_dir()) {
|
||||||
os.mkdir_all(repo.pkg_dir()) or {
|
// os.mkdir_all(repo.pkg_dir()) or {
|
||||||
exit_with_message(2, "Failed to create repo directory '$repo.pkg_dir()'.")
|
// exit_with_message(2, "Failed to create repo directory '$repo.pkg_dir()'.")
|
||||||
}
|
// }
|
||||||
|
|
||||||
logger.info("Created package directory '$repo.pkg_dir()'.")
|
// logger.info("Created package directory '$repo.pkg_dir()'.")
|
||||||
}
|
// }
|
||||||
|
|
||||||
web.run(&App{
|
// web.run(&App{
|
||||||
logger: logger
|
// logger: logger
|
||||||
api_key: key
|
// api_key: key
|
||||||
repo: repo
|
// repo: repo
|
||||||
}, port)
|
// }, port)
|
||||||
}
|
// }
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
// archive.list_filenames()
|
// archive.list_filenames()
|
||||||
info := repo.get_pkg_info('test/jjr-joplin-desktop-2.6.10-4-x86_64.pkg.tar.zst') or {
|
res := pkg.read_pkg('test/jjr-joplin-desktop-2.6.10-4-x86_64.pkg.tar.zst') or {
|
||||||
eprintln(err.msg)
|
eprintln(err.msg)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
println(info)
|
// println(info)
|
||||||
|
println(res.info)
|
||||||
|
print(res.files)
|
||||||
|
println(res.info.to_desc())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,14 @@
|
||||||
module repo
|
module pkg
|
||||||
|
|
||||||
import archive
|
import archive
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
struct Pkg {
|
||||||
|
pub:
|
||||||
|
info PkgInfo [required]
|
||||||
|
files []string [required]
|
||||||
|
}
|
||||||
|
|
||||||
struct PkgInfo {
|
struct PkgInfo {
|
||||||
mut:
|
mut:
|
||||||
// Single values
|
// Single values
|
||||||
|
@ -20,7 +26,6 @@ mut:
|
||||||
sha256sum string
|
sha256sum string
|
||||||
pgpsig string
|
pgpsig string
|
||||||
pgpsigsize i64
|
pgpsigsize i64
|
||||||
|
|
||||||
// Array values
|
// Array values
|
||||||
groups []string
|
groups []string
|
||||||
licenses []string
|
licenses []string
|
||||||
|
@ -33,13 +38,9 @@ mut:
|
||||||
checkdepends []string
|
checkdepends []string
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_pkg_info(pkg_path string) ?PkgInfo {
|
fn parse_pkg_info_string(pkg_info_str &string) ?PkgInfo {
|
||||||
pkg_info_str := archive.pkg_info_string(pkg_path) ?
|
|
||||||
mut pkg_info := PkgInfo{}
|
mut pkg_info := PkgInfo{}
|
||||||
|
|
||||||
mut i := 0
|
|
||||||
mut j := 0
|
|
||||||
|
|
||||||
// Iterate over the entire string
|
// Iterate over the entire string
|
||||||
for line in pkg_info_str.split_into_lines() {
|
for line in pkg_info_str.split_into_lines() {
|
||||||
// Skip any comment lines
|
// Skip any comment lines
|
||||||
|
@ -71,7 +72,6 @@ pub fn get_pkg_info(pkg_path string) ?PkgInfo {
|
||||||
'sha256sum' { pkg_info.sha256sum = value }
|
'sha256sum' { pkg_info.sha256sum = value }
|
||||||
'pgpsig' { pkg_info.pgpsig = value }
|
'pgpsig' { pkg_info.pgpsig = value }
|
||||||
'pgpsigsize' { pkg_info.pgpsigsize = value.int() }
|
'pgpsigsize' { pkg_info.pgpsigsize = value.int() }
|
||||||
|
|
||||||
// Array values
|
// Array values
|
||||||
'group' { pkg_info.groups << value }
|
'group' { pkg_info.groups << value }
|
||||||
'license' { pkg_info.licenses << value }
|
'license' { pkg_info.licenses << value }
|
||||||
|
@ -88,3 +88,21 @@ pub fn get_pkg_info(pkg_path string) ?PkgInfo {
|
||||||
|
|
||||||
return pkg_info
|
return pkg_info
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn read_pkg(pkg_path string) ?Pkg {
|
||||||
|
pkg_info_str, files := archive.pkg_info(pkg_path) ?
|
||||||
|
pkg_info := parse_pkg_info_string(pkg_info_str) ?
|
||||||
|
|
||||||
|
return Pkg{
|
||||||
|
info: pkg_info
|
||||||
|
files: files
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Represent a PkgInfo struct as a desc file
|
||||||
|
pub fn (p &PkgInfo) to_desc() string {
|
||||||
|
// TODO calculate md5 & sha256 instead of believing the file
|
||||||
|
mut desc := ''
|
||||||
|
|
||||||
|
return desc
|
||||||
|
}
|
|
@ -11,23 +11,35 @@ pub struct Dummy {
|
||||||
x int
|
x int
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handles management of a repository. Package files are stored in '$dir/pkgs'
|
// This struct manages a single repository.
|
||||||
// & moved there if necessary.
|
|
||||||
pub struct Repo {
|
pub struct Repo {
|
||||||
mut:
|
mut:
|
||||||
mutex shared Dummy
|
mutex shared Dummy
|
||||||
pub:
|
pub:
|
||||||
dir string [required]
|
// Where to store repository files; should exist
|
||||||
name string [required]
|
repo_dir string [required]
|
||||||
|
// Where to find packages; packages are expected to all be in the same directory
|
||||||
|
pkg_dir string [required]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (r &Repo) pkg_dir() string {
|
// Returns whether the repository contains the given package.
|
||||||
return os.join_path_single(r.dir, repo.pkgs_subpath)
|
pub fn (r &Repo) contains(pkg string) bool {
|
||||||
|
return os.exists(os.join_path(r.repo_dir, 'files', pkg))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Adds the given package to the repo. If false, the package was already
|
||||||
|
// present in the repository.
|
||||||
|
pub fn (r &Repo) add(pkg string) ?bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-generate the db & files archives.
|
||||||
|
fn (r &Repo) genenerate() ? {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns path to the given package, prepended with the repo's path.
|
// Returns path to the given package, prepended with the repo's path.
|
||||||
pub fn (r &Repo) pkg_path(pkg string) string {
|
pub fn (r &Repo) pkg_path(pkg string) string {
|
||||||
return os.join_path(r.dir, repo.pkgs_subpath, pkg)
|
return os.join_path_single(r.pkg_dir, pkg)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (r &Repo) exists(pkg string) bool {
|
pub fn (r &Repo) exists(pkg string) bool {
|
||||||
|
@ -36,7 +48,7 @@ pub fn (r &Repo) exists(pkg string) bool {
|
||||||
|
|
||||||
// Returns the full path to the database file
|
// Returns the full path to the database file
|
||||||
pub fn (r &Repo) db_path() string {
|
pub fn (r &Repo) db_path() string {
|
||||||
return os.join_path_single(r.dir, '${r.name}.tar.gz')
|
return os.join_path_single(r.repo_dir, 'repo.tar.gz')
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (r &Repo) add_package(pkg_path string) ? {
|
pub fn (r &Repo) add_package(pkg_path string) ? {
|
109
src/routes.v
109
src/routes.v
|
@ -28,62 +28,67 @@ fn (mut app App) get_root(filename string) web.Result {
|
||||||
mut full_path := ''
|
mut full_path := ''
|
||||||
|
|
||||||
if is_pkg_name(filename) {
|
if is_pkg_name(filename) {
|
||||||
full_path = os.join_path_single(app.repo.pkg_dir(), filename)
|
full_path = os.join_path_single(app.repo.pkg_dir, filename)
|
||||||
} else {
|
} else {
|
||||||
full_path = os.join_path_single(app.repo.dir, filename)
|
full_path = os.join_path_single(app.repo.repo_dir, filename)
|
||||||
}
|
}
|
||||||
|
|
||||||
return app.file(full_path)
|
return app.file(full_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
['/pkgs/:pkg'; put]
|
// ['/pkgs/:pkg'; put]
|
||||||
fn (mut app App) put_package(pkg string) web.Result {
|
// fn (mut app App) put_package(pkg string) web.Result {
|
||||||
if !app.is_authorized() {
|
// if !app.is_authorized() {
|
||||||
return app.text('Unauthorized.')
|
// return app.text('Unauthorized.')
|
||||||
}
|
// }
|
||||||
|
|
||||||
if !is_pkg_name(pkg) {
|
// if !is_pkg_name(pkg) {
|
||||||
app.lwarn("Invalid package name '$pkg'.")
|
// app.lwarn("Invalid package name '$pkg'.")
|
||||||
|
|
||||||
return app.text('Invalid filename.')
|
// return app.text('Invalid filename.')
|
||||||
}
|
// }
|
||||||
|
|
||||||
if app.repo.exists(pkg) {
|
// if app.repo.exists(pkg) {
|
||||||
app.lwarn("Duplicate package '$pkg'")
|
// app.lwarn("Duplicate package '$pkg'")
|
||||||
|
|
||||||
return app.text('File already exists.')
|
// return app.text('File already exists.')
|
||||||
}
|
// }
|
||||||
|
|
||||||
pkg_path := app.repo.pkg_path(pkg)
|
// pkg_path := app.repo.pkg_path(pkg)
|
||||||
|
|
||||||
if length := app.req.header.get(.content_length) {
|
// if length := app.req.header.get(.content_length) {
|
||||||
app.ldebug("Uploading $length (${pretty_bytes(length.int())}) bytes to package '$pkg'.")
|
// app.ldebug("Uploading $length (${pretty_bytes(length.int())}) bytes to package '$pkg'.")
|
||||||
|
|
||||||
// This is used to time how long it takes to upload a file
|
// // This is used to time how long it takes to upload a file
|
||||||
mut sw := time.new_stopwatch(time.StopWatchOptions{ auto_start: true })
|
// mut sw := time.new_stopwatch(time.StopWatchOptions{ auto_start: true })
|
||||||
|
|
||||||
reader_to_file(mut app.reader, length.int(), pkg_path) or {
|
// reader_to_file(mut app.reader, length.int(), pkg_path) or {
|
||||||
app.lwarn("Failed to upload package '$pkg'")
|
// app.lwarn("Failed to upload package '$pkg'")
|
||||||
|
|
||||||
return app.text('Failed to upload file.')
|
// return app.text('Failed to upload file.')
|
||||||
}
|
// }
|
||||||
|
|
||||||
sw.stop()
|
// sw.stop()
|
||||||
app.ldebug("Upload of package '$pkg' completed in ${sw.elapsed().seconds():.3}s.")
|
// app.ldebug("Upload of package '$pkg' completed in ${sw.elapsed().seconds():.3}s.")
|
||||||
} else {
|
// } else {
|
||||||
app.lwarn("Tried to upload package '$pkg' without specifying a Content-Length.")
|
// app.lwarn("Tried to upload package '$pkg' without specifying a Content-Length.")
|
||||||
return app.text("Content-Type header isn't set.")
|
// return app.text("Content-Type header isn't set.")
|
||||||
}
|
// }
|
||||||
|
|
||||||
app.repo.add_package(pkg_path) or {
|
// app.repo.add_package(pkg_path) or {
|
||||||
app.lwarn("Failed to add package '$pkg' to database.")
|
// app.lwarn("Failed to add package '$pkg' to database.")
|
||||||
|
|
||||||
os.rm(pkg_path) or { println('Failed to remove $pkg_path') }
|
// os.rm(pkg_path) or { println('Failed to remove $pkg_path') }
|
||||||
|
|
||||||
return app.text('Failed to add package to repo.')
|
// return app.text('Failed to add package to repo.')
|
||||||
}
|
// }
|
||||||
|
|
||||||
app.linfo("Added '$pkg' to repository.")
|
// app.linfo("Added '$pkg' to repository.")
|
||||||
|
|
||||||
return app.text('Package added successfully.')
|
// return app.text('Package added successfully.')
|
||||||
|
// }
|
||||||
|
|
||||||
|
['/add'; put]
|
||||||
|
pub fn (mut app App) add_package() web.Result {
|
||||||
|
return app.text('')
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue