Compare commits

...

3 Commits

Author SHA1 Message Date
Jef Roosens 57fe767a70
Working add to repo functionality 2022-01-19 18:54:33 +01:00
Jef Roosens 4bc3bfb8cf
Moved checksum calculation to desc function 2022-01-19 17:22:36 +01:00
Jef Roosens df3310944e
Working md5 checksum 2022-01-19 17:15:37 +01:00
4 changed files with 121 additions and 68 deletions

View File

@ -4,7 +4,6 @@ import web
import os
import log
import io
import pkg
import repo
const port = 8000
@ -103,11 +102,16 @@ fn reader_to_file(mut reader io.BufferedReader, length int, path string) ? {
// }
fn main() {
r := repo.new('data/repo', 'data/pkgs') or { return }
print(r.add_from_path('test/homebank-5.5.1-1-x86_64.pkg.tar.zst') or { panic('you fialed') })
// archive.list_filenames()
res := pkg.read_pkg('test/homebank-5.5.3-1-x86_64.pkg.tar.zst') or {
eprintln(err.msg)
return
}
// res := pkg.read_pkg('test/jjr-joplin-desktop-2.6.10-4-x86_64.pkg.tar.zst') or {
// eprintln(err.msg)
// return
// }
// println(info)
print(res.info.to_desc())
// println('hey')
// print(res.to_desc())
// print(res.to_files())
}

View File

@ -1,4 +1,4 @@
module pkg
module package
import time
import os
@ -45,7 +45,9 @@ fn C.archive_entry_new() &C.archive_entry
fn C.archive_entry_pathname(&C.archive_entry) &char
// Get an entry's file size
// Note: this function actually returns an i64, but as this can't be used as an arugment to malloc, we'll just roll with it & assume an entry is never bigger than 4 gigs
// Note: this function actually returns an i64, but as this can't be used as an
// arugment to malloc, we'll just roll with it & assume an entry is never
// bigger than 4 gigs
fn C.archive_entry_size(&C.archive_entry) int
#include <string.h>
@ -56,13 +58,14 @@ fn C.strcmp(&char, &char) int
// Represents a read archive
struct Pkg {
pub:
path string [required]
info PkgInfo [required]
files []string [required]
}
// Represents the contents of a .PKGINFO file
struct PkgInfo {
mut:
pub mut:
// Single values
name string
base string
@ -74,10 +77,10 @@ mut:
arch string
build_date i64
packager string
md5sum string
sha256sum string
pgpsig string
pgpsigsize i64
// md5sum string
// sha256sum string
pgpsig string
pgpsigsize i64
// Array values
groups []string
licenses []string
@ -90,6 +93,10 @@ mut:
checkdepends []string
}
pub fn (p &Pkg) checksum() ?(string, string) {
return util.hash_file(p.path)
}
// parse_pkg_info_string parses a PkgInfo object from a string
fn parse_pkg_info_string(pkg_info_str &string) ?PkgInfo {
mut pkg_info := PkgInfo{}
@ -195,11 +202,9 @@ pub fn read_pkg(pkg_path string) ?Pkg {
mut pkg_info := parse_pkg_info_string(unsafe { cstring_to_vstring(&char(buf)) }) ?
pkg_info.csize = i64(os.file_size(pkg_path))
pkg_info.md5sum, pkg_info.sha256sum = util.hash_file(pkg_path) or {
return error('Failed to hash package.')
}
return Pkg{
path: pkg_path
info: pkg_info
files: files
}
@ -209,11 +214,19 @@ fn format_entry(key string, value string) string {
return '\n%$key%\n$value\n'
}
pub fn (pkg &Pkg) filename() string {
p := pkg.info
return '$p.name-$p.version-${p.arch}.pkg.tar.zst'
}
// to_desc returns a desc file valid string representation
// TODO calculate md5 & sha256 instead of believing the file
pub fn (p &PkgInfo) to_desc() string {
pub fn (pkg &Pkg) to_desc() string {
p := pkg.info
// filename
mut desc := '%FILENAME%\n$p.name-$p.version-${p.arch}.pkg.tar.zst\n'
mut desc := '%FILENAME%\n$pkg.filename()\n'
desc += format_entry('NAME', p.name)
desc += format_entry('BASE', p.base)
@ -230,8 +243,12 @@ pub fn (p &PkgInfo) to_desc() string {
desc += format_entry('CSIZE', p.csize.str())
desc += format_entry('ISIZE', p.size.str())
desc += format_entry('MD5SUM', p.md5sum)
desc += format_entry('SHA256SUM', p.sha256sum)
md5sum, sha256sum := pkg.checksum() or { '', '' }
desc += format_entry('MD5SUM', md5sum)
// TODO add this
// desc += format_entry('SHA256SUM', sha256sum)
// TODO add pgpsig stuff
@ -277,3 +294,8 @@ pub fn (p &PkgInfo) to_desc() string {
return '$desc\n'
}
// to_files returns a files file valid string representation
pub fn (pkg &Pkg) to_files() string {
return '%FILES%\n$pkg.files.join_lines()\n'
}

View File

@ -1,8 +1,13 @@
module repo
import os
import package
const pkgs_subpath = 'pkgs'
// subpath where the uncompressed version of the files archive is stored
const files_subpath = 'files'
// subpath where the uncompressed version of the repo archive is stored
const repo_subpath = 'repo'
// Dummy struct to work around the fact that you can only share structs, maps &
// arrays
@ -21,46 +26,68 @@ pub:
pkg_dir string [required]
}
// contains returns whether the repository contains the given package.
pub fn (r &Repo) contains(pkg string) bool {
return os.exists(os.join_path(r.repo_dir, 'files', pkg))
}
// add adds the given package to the repo. If false, the package was already
// present in the repository.
pub fn (r &Repo) add(pkg string) ?bool {
return false
}
// generate re-generates the db & files archives.
fn (r &Repo) genenerate() ? {
}
// pkg_path returns path to the given package, prepended with the repo's path.
pub fn (r &Repo) pkg_path(pkg string) string {
return os.join_path_single(r.pkg_dir, pkg)
}
// exists checks whether a package file exists
pub fn (r &Repo) exists(pkg string) bool {
return os.exists(r.pkg_path(pkg))
}
// db_path returns the full path to the database file
pub fn (r &Repo) db_path() string {
return os.join_path_single(r.repo_dir, 'repo.tar.gz')
}
// add_package adds a package to the repository
pub fn (r &Repo) add_package(pkg_path string) ? {
mut res := os.Result{}
lock r.mutex {
res = os.execute("repo-add '$r.db_path()' '$pkg_path'")
pub fn new(repo_dir string, pkg_dir string) ?Repo {
if !os.is_dir(repo_dir) {
os.mkdir_all(repo_dir) or { return error('Failed to create repo directory.') }
}
if res.exit_code != 0 {
println(res.output)
return error('repo-add failed.')
if !os.is_dir(pkg_dir) {
os.mkdir_all(pkg_dir) or { return error('Failed to create package directory.') }
}
return Repo{
repo_dir: repo_dir
pkg_dir: pkg_dir
}
}
// add_from_path adds a package from an arbitrary path & moves it into the pkgs
// directory if necessary.
pub fn (r &Repo) add_from_path(pkg_path string) ?bool {
pkg := package.read_pkg(pkg_path) or { return error('Failed to read package file.') }
added := r.add(pkg) ?
// If the add was successful, we move the file to the packages directory
if added {
dest_path := os.real_path(os.join_path_single(r.pkg_dir, pkg.filename()))
// Only move the file if it's not already in the package directory
if dest_path != os.real_path(pkg_path) {
os.mv(pkg_path, dest_path) ?
}
}
return true
}
// add adds a given Pkg to the repository
fn (r &Repo) add(pkg &package.Pkg) ?bool {
pkg_dir := r.pkg_path(pkg)
// We can't add the same package twice
if os.exists(pkg_dir) {
return false
}
os.mkdir(pkg_dir) or { return error('Failed to create package directory.') }
os.write_file(os.join_path_single(pkg_dir, 'desc'), pkg.to_desc()) or {
os.rmdir_all(pkg_dir) ?
return error('Failed to write desc file.')
}
os.write_file(os.join_path_single(pkg_dir, 'files'), pkg.to_files()) or {
os.rmdir_all(pkg_dir) ?
return error('Failed to write files file.')
}
// TODO generate database archive
return true
}
// Returns the path where the given package's desc & files files are stored
fn (r &Repo) pkg_path(pkg &package.Pkg) string {
return os.join_path(r.repo_dir, '$pkg.info.name-$pkg.info.version')
}

View File

@ -2,7 +2,7 @@ module util
import os
import crypto.md5
import crypto.sha256
// import crypto.sha256
// hash_file returns the md5 & sha256 hash of a given file
// TODO actually implement sha256
@ -10,7 +10,7 @@ pub fn hash_file(path &string) ?(string, string) {
file := os.open(path) or { return error('Failed to open file.') }
mut md5sum := md5.new()
mut sha256sum := sha256.new()
// mut sha256sum := sha256.new()
buf_size := int(1_000_000)
mut buf := []byte{len: buf_size}
@ -18,16 +18,16 @@ pub fn hash_file(path &string) ?(string, string) {
for bytes_left > 0 {
// TODO check if just breaking here is safe
bytes_read := file.read(mut buf) or { break }
bytes_read := file.read(mut buf) or { return error('Failed to read from file.') }
bytes_left -= u64(bytes_read)
if bytes_left > buf_size {
// For now we'll assume that this always works
md5sum.write(buf) or {}
// sha256sum.write(buf) or {}
// For now we'll assume that this always works
md5sum.write(buf[..bytes_read]) or {
return error('Failed to update checksum. This should never happen.')
}
// sha256sum.write(buf) or {}
}
// return md5sum.sum(buf).hex(), sha256sum.sum(buf).hex()
return md5sum.sum(buf).hex(), ''
return md5sum.checksum().hex(), ''
}