Added creation of files archive

main
Jef Roosens 2022-01-30 23:54:05 +01:00
parent 2e344eecc7
commit e26e2746de
Signed by untrusted user: Jef Roosens
GPG Key ID: B580B976584B5F30
4 changed files with 85 additions and 61 deletions

View File

@ -115,7 +115,7 @@ pub fn read_pkg(pkg_path string) ?Pkg {
C.archive_read_support_format_tar(a)
// TODO find out where does this 10240 come from
r := C.archive_read_open_filename(a, &char(pkg_path.str), 10240)
r := C.archive_read_open_filename(a, &char(pkg_path.str), 10240)
if r != C.ARCHIVE_OK {
return error('Failed to open package.')
@ -142,7 +142,7 @@ pub fn read_pkg(pkg_path string) ?Pkg {
size := C.archive_entry_size(entry)
// TODO can this unsafe block be avoided?
buf := unsafe { malloc(size) }
buf := unsafe { malloc(size) }
defer {
unsafe {
free(buf)
@ -150,7 +150,7 @@ pub fn read_pkg(pkg_path string) ?Pkg {
}
C.archive_read_data(a, buf, size)
pkg_text := unsafe { buf.vstring_with_len(size).clone() }
pkg_text := unsafe { buf.vstring_with_len(size).clone() }
pkg_info = parse_pkg_info_string(pkg_text) ?
} else {

View File

@ -93,55 +93,3 @@ fn (r &Repo) add(pkg &package.Pkg) ?bool {
fn (r &Repo) pkg_path(pkg &package.Pkg) string {
return os.join_path(r.repo_dir, '$pkg.info.name-$pkg.info.version')
}
// Re-generate the repo archive files
fn (r &Repo) sync() ? {
// TODO also write files archive
lock r.mutex {
a := C.archive_write_new()
entry := C.archive_entry_new()
st := C.stat{}
buf := [8192]byte{}
// This makes the archive a gzip-compressed tarball
C.archive_write_add_filter_gzip(a)
C.archive_write_set_format_pax_restricted(a)
// TODO add symlink to .tar.gz version
repo_path := os.join_path_single(r.repo_dir, 'repo.db')
C.archive_write_open_filename(a, &char(repo_path.str))
// Iterate over each directory
for d in os.ls(r.repo_dir) ?.filter(os.is_dir(os.join_path_single(r.repo_dir,
it))) {
inner_path := os.join_path_single(d, 'desc')
actual_path := os.join_path_single(r.repo_dir, inner_path)
unsafe {
C.stat(&char(actual_path.str), &st)
}
C.archive_entry_set_pathname(entry, &char(inner_path.str))
C.archive_entry_copy_stat(entry, &st)
// C.archive_entry_set_size(entry, st.st_size)
// C.archive_entry_set_filetype(entry, C.AE_IFREG)
// C.archive_entry_set_perm(entry, 0o644)
C.archive_write_header(a, entry)
fd := C.open(&char(actual_path.str), C.O_RDONLY)
mut len := C.read(fd, &buf, sizeof(buf))
for len > 0 {
C.archive_write_data(a, &buf, len)
len = C.read(fd, &buf, sizeof(buf))
}
C.close(fd)
C.archive_entry_clear(entry)
}
C.archive_write_close(a)
C.archive_write_free(a)
}
}

80
src/repo/sync.v 100644
View File

@ -0,0 +1,80 @@
module repo
import os
fn archive_add_entry(archive &C.archive, entry &C.archive_entry, file_path &string, inner_path &string) {
st := C.stat{}
unsafe {
C.stat(&char(file_path.str), &st)
}
C.archive_entry_set_pathname(entry, &char(inner_path.str))
C.archive_entry_copy_stat(entry, &st)
C.archive_write_header(archive, entry)
mut fd := C.open(&char(file_path.str), C.O_RDONLY)
defer {
C.close(fd)
}
// Write the file to the archive
buf := [8192]byte{}
mut len := C.read(fd, &buf, sizeof(buf))
for len > 0 {
C.archive_write_data(archive, &buf, len)
len = C.read(fd, &buf, sizeof(buf))
}
}
// Re-generate the repo archive files
fn (r &Repo) sync() ? {
// TODO also write files archive
lock r.mutex {
a_db := C.archive_write_new()
a_files := C.archive_write_new()
entry := C.archive_entry_new()
// This makes the archive a gzip-compressed tarball
C.archive_write_add_filter_gzip(a_db)
C.archive_write_set_format_pax_restricted(a_db)
C.archive_write_add_filter_gzip(a_files)
C.archive_write_set_format_pax_restricted(a_files)
// TODO add symlink to .tar.gz version
db_path := os.join_path_single(r.repo_dir, 'repo.db')
files_path := os.join_path_single(r.repo_dir, 'repo.files')
C.archive_write_open_filename(a_db, &char(db_path.str))
C.archive_write_open_filename(a_files, &char(files_path.str))
// Iterate over each directory
for d in os.ls(r.repo_dir) ?.filter(os.is_dir(os.join_path_single(r.repo_dir,
it))) {
// desc
mut inner_path := os.join_path_single(d, 'desc')
mut actual_path := os.join_path_single(r.repo_dir, inner_path)
archive_add_entry(a_db, entry, actual_path, inner_path)
archive_add_entry(a_files, entry, actual_path, inner_path)
C.archive_entry_clear(entry)
// files
inner_path = os.join_path_single(d, 'files')
actual_path = os.join_path_single(r.repo_dir, inner_path)
archive_add_entry(a_files, entry, actual_path, inner_path)
C.archive_entry_clear(entry)
}
C.archive_write_close(a_db)
C.archive_write_free(a_db)
C.archive_write_close(a_files)
C.archive_write_free(a_files)
}
}

View File

@ -76,16 +76,12 @@ fn (mut app App) put_package() web.Result {
added := app.repo.add_from_path(pkg_path) or {
app.lerror('Error while adding package: $err.msg')
os.rm(pkg_path) or {
app.lerror("Failed to remove download '$pkg_path'.")
}
os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path'.") }
return app.text('Failed to add package.')
}
if !added {
os.rm(pkg_path) or {
app.lerror("Failed to remove download '$pkg_path'.")
}
os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path'.") }
app.lwarn('Duplicate package.')