Compare commits

..

No commits in common. "57fe767a7052ac0d115800d60e1d14a5d522cf8e" and "9bd14b4cbf584c524ffe1bd1ecebe2bed90a5cb3" have entirely different histories.

4 changed files with 65 additions and 118 deletions

View File

@ -4,6 +4,7 @@ import web
import os
import log
import io
import pkg
import repo
const port = 8000
@ -102,16 +103,11 @@ fn reader_to_file(mut reader io.BufferedReader, length int, path string) ? {
// }
fn main() {
r := repo.new('data/repo', 'data/pkgs') or { return }
print(r.add_from_path('test/homebank-5.5.1-1-x86_64.pkg.tar.zst') or { panic('you fialed') })
// archive.list_filenames()
// res := pkg.read_pkg('test/jjr-joplin-desktop-2.6.10-4-x86_64.pkg.tar.zst') or {
// eprintln(err.msg)
// return
// }
res := pkg.read_pkg('test/homebank-5.5.3-1-x86_64.pkg.tar.zst') or {
eprintln(err.msg)
return
}
// println(info)
// println('hey')
// print(res.to_desc())
// print(res.to_files())
print(res.info.to_desc())
}

View File

@ -1,4 +1,4 @@
module package
module pkg
import time
import os
@ -45,9 +45,7 @@ fn C.archive_entry_new() &C.archive_entry
fn C.archive_entry_pathname(&C.archive_entry) &char
// Get an entry's file size
// Note: this function actually returns an i64, but as this can't be used as an
// arugment to malloc, we'll just roll with it & assume an entry is never
// bigger than 4 gigs
// Note: this function actually returns an i64, but as this can't be used as an arugment to malloc, we'll just roll with it & assume an entry is never bigger than 4 gigs
fn C.archive_entry_size(&C.archive_entry) int
#include <string.h>
@ -58,14 +56,13 @@ fn C.strcmp(&char, &char) int
// Represents a read archive
struct Pkg {
pub:
path string [required]
info PkgInfo [required]
files []string [required]
}
// Represents the contents of a .PKGINFO file
struct PkgInfo {
pub mut:
mut:
// Single values
name string
base string
@ -77,10 +74,10 @@ pub mut:
arch string
build_date i64
packager string
// md5sum string
// sha256sum string
pgpsig string
pgpsigsize i64
md5sum string
sha256sum string
pgpsig string
pgpsigsize i64
// Array values
groups []string
licenses []string
@ -93,10 +90,6 @@ pub mut:
checkdepends []string
}
pub fn (p &Pkg) checksum() ?(string, string) {
return util.hash_file(p.path)
}
// parse_pkg_info_string parses a PkgInfo object from a string
fn parse_pkg_info_string(pkg_info_str &string) ?PkgInfo {
mut pkg_info := PkgInfo{}
@ -202,9 +195,11 @@ pub fn read_pkg(pkg_path string) ?Pkg {
mut pkg_info := parse_pkg_info_string(unsafe { cstring_to_vstring(&char(buf)) }) ?
pkg_info.csize = i64(os.file_size(pkg_path))
pkg_info.md5sum, pkg_info.sha256sum = util.hash_file(pkg_path) or {
return error('Failed to hash package.')
}
return Pkg{
path: pkg_path
info: pkg_info
files: files
}
@ -214,19 +209,11 @@ fn format_entry(key string, value string) string {
return '\n%$key%\n$value\n'
}
pub fn (pkg &Pkg) filename() string {
p := pkg.info
return '$p.name-$p.version-${p.arch}.pkg.tar.zst'
}
// to_desc returns a desc file valid string representation
// TODO calculate md5 & sha256 instead of believing the file
pub fn (pkg &Pkg) to_desc() string {
p := pkg.info
pub fn (p &PkgInfo) to_desc() string {
// filename
mut desc := '%FILENAME%\n$pkg.filename()\n'
mut desc := '%FILENAME%\n$p.name-$p.version-${p.arch}.pkg.tar.zst\n'
desc += format_entry('NAME', p.name)
desc += format_entry('BASE', p.base)
@ -243,12 +230,8 @@ pub fn (pkg &Pkg) to_desc() string {
desc += format_entry('CSIZE', p.csize.str())
desc += format_entry('ISIZE', p.size.str())
md5sum, sha256sum := pkg.checksum() or { '', '' }
desc += format_entry('MD5SUM', md5sum)
// TODO add this
// desc += format_entry('SHA256SUM', sha256sum)
desc += format_entry('MD5SUM', p.md5sum)
desc += format_entry('SHA256SUM', p.sha256sum)
// TODO add pgpsig stuff
@ -294,8 +277,3 @@ pub fn (pkg &Pkg) to_desc() string {
return '$desc\n'
}
// to_files returns a files file valid string representation
pub fn (pkg &Pkg) to_files() string {
return '%FILES%\n$pkg.files.join_lines()\n'
}

View File

@ -1,13 +1,8 @@
module repo
import os
import package
// subpath where the uncompressed version of the files archive is stored
const files_subpath = 'files'
// subpath where the uncompressed version of the repo archive is stored
const repo_subpath = 'repo'
const pkgs_subpath = 'pkgs'
// Dummy struct to work around the fact that you can only share structs, maps &
// arrays
@ -26,68 +21,46 @@ pub:
pkg_dir string [required]
}
pub fn new(repo_dir string, pkg_dir string) ?Repo {
if !os.is_dir(repo_dir) {
os.mkdir_all(repo_dir) or { return error('Failed to create repo directory.') }
}
if !os.is_dir(pkg_dir) {
os.mkdir_all(pkg_dir) or { return error('Failed to create package directory.') }
}
return Repo{
repo_dir: repo_dir
pkg_dir: pkg_dir
}
// contains returns whether the repository contains the given package.
pub fn (r &Repo) contains(pkg string) bool {
return os.exists(os.join_path(r.repo_dir, 'files', pkg))
}
// add_from_path adds a package from an arbitrary path & moves it into the pkgs
// directory if necessary.
pub fn (r &Repo) add_from_path(pkg_path string) ?bool {
pkg := package.read_pkg(pkg_path) or { return error('Failed to read package file.') }
added := r.add(pkg) ?
// If the add was successful, we move the file to the packages directory
if added {
dest_path := os.real_path(os.join_path_single(r.pkg_dir, pkg.filename()))
// Only move the file if it's not already in the package directory
if dest_path != os.real_path(pkg_path) {
os.mv(pkg_path, dest_path) ?
}
}
return true
// add adds the given package to the repo. If false, the package was already
// present in the repository.
pub fn (r &Repo) add(pkg string) ?bool {
return false
}
// add adds a given Pkg to the repository
fn (r &Repo) add(pkg &package.Pkg) ?bool {
pkg_dir := r.pkg_path(pkg)
// We can't add the same package twice
if os.exists(pkg_dir) {
return false
}
os.mkdir(pkg_dir) or { return error('Failed to create package directory.') }
os.write_file(os.join_path_single(pkg_dir, 'desc'), pkg.to_desc()) or {
os.rmdir_all(pkg_dir) ?
return error('Failed to write desc file.')
}
os.write_file(os.join_path_single(pkg_dir, 'files'), pkg.to_files()) or {
os.rmdir_all(pkg_dir) ?
return error('Failed to write files file.')
}
// TODO generate database archive
return true
// generate re-generates the db & files archives.
fn (r &Repo) genenerate() ? {
}
// Returns the path where the given package's desc & files files are stored
fn (r &Repo) pkg_path(pkg &package.Pkg) string {
return os.join_path(r.repo_dir, '$pkg.info.name-$pkg.info.version')
// pkg_path returns path to the given package, prepended with the repo's path.
pub fn (r &Repo) pkg_path(pkg string) string {
return os.join_path_single(r.pkg_dir, pkg)
}
// exists checks whether a package file exists
pub fn (r &Repo) exists(pkg string) bool {
return os.exists(r.pkg_path(pkg))
}
// db_path returns the full path to the database file
pub fn (r &Repo) db_path() string {
return os.join_path_single(r.repo_dir, 'repo.tar.gz')
}
// add_package adds a package to the repository
pub fn (r &Repo) add_package(pkg_path string) ? {
mut res := os.Result{}
lock r.mutex {
res = os.execute("repo-add '$r.db_path()' '$pkg_path'")
}
if res.exit_code != 0 {
println(res.output)
return error('repo-add failed.')
}
}

View File

@ -2,7 +2,7 @@ module util
import os
import crypto.md5
// import crypto.sha256
import crypto.sha256
// hash_file returns the md5 & sha256 hash of a given file
// TODO actually implement sha256
@ -10,7 +10,7 @@ pub fn hash_file(path &string) ?(string, string) {
file := os.open(path) or { return error('Failed to open file.') }
mut md5sum := md5.new()
// mut sha256sum := sha256.new()
mut sha256sum := sha256.new()
buf_size := int(1_000_000)
mut buf := []byte{len: buf_size}
@ -18,16 +18,16 @@ pub fn hash_file(path &string) ?(string, string) {
for bytes_left > 0 {
// TODO check if just breaking here is safe
bytes_read := file.read(mut buf) or { return error('Failed to read from file.') }
bytes_read := file.read(mut buf) or { break }
bytes_left -= u64(bytes_read)
// For now we'll assume that this always works
md5sum.write(buf[..bytes_read]) or {
return error('Failed to update checksum. This should never happen.')
if bytes_left > buf_size {
// For now we'll assume that this always works
md5sum.write(buf) or {}
// sha256sum.write(buf) or {}
}
// sha256sum.write(buf) or {}
}
// return md5sum.sum(buf).hex(), sha256sum.sum(buf).hex()
return md5sum.checksum().hex(), ''
return md5sum.sum(buf).hex(), ''
}