From e26e2746de9b2c9f7de95fcb4d23e49d253482ed Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 30 Jan 2022 23:54:05 +0100 Subject: [PATCH] Added creation of files archive --- src/package.v | 6 ++-- src/{ => repo}/repo.v | 52 ---------------------------- src/repo/sync.v | 80 +++++++++++++++++++++++++++++++++++++++++++ src/routes.v | 8 ++--- 4 files changed, 85 insertions(+), 61 deletions(-) rename src/{ => repo}/repo.v (63%) create mode 100644 src/repo/sync.v diff --git a/src/package.v b/src/package.v index a4d1d6c..da3988f 100644 --- a/src/package.v +++ b/src/package.v @@ -115,7 +115,7 @@ pub fn read_pkg(pkg_path string) ?Pkg { C.archive_read_support_format_tar(a) // TODO find out where does this 10240 come from - r := C.archive_read_open_filename(a, &char(pkg_path.str), 10240) + r := C.archive_read_open_filename(a, &char(pkg_path.str), 10240) if r != C.ARCHIVE_OK { return error('Failed to open package.') @@ -142,7 +142,7 @@ pub fn read_pkg(pkg_path string) ?Pkg { size := C.archive_entry_size(entry) // TODO can this unsafe block be avoided? - buf := unsafe { malloc(size) } + buf := unsafe { malloc(size) } defer { unsafe { free(buf) @@ -150,7 +150,7 @@ pub fn read_pkg(pkg_path string) ?Pkg { } C.archive_read_data(a, buf, size) - pkg_text := unsafe { buf.vstring_with_len(size).clone() } + pkg_text := unsafe { buf.vstring_with_len(size).clone() } pkg_info = parse_pkg_info_string(pkg_text) ? } else { diff --git a/src/repo.v b/src/repo/repo.v similarity index 63% rename from src/repo.v rename to src/repo/repo.v index fcf4808..ded30ba 100644 --- a/src/repo.v +++ b/src/repo/repo.v @@ -93,55 +93,3 @@ fn (r &Repo) add(pkg &package.Pkg) ?bool { fn (r &Repo) pkg_path(pkg &package.Pkg) string { return os.join_path(r.repo_dir, '$pkg.info.name-$pkg.info.version') } - -// Re-generate the repo archive files -fn (r &Repo) sync() ? { - // TODO also write files archive - lock r.mutex { - a := C.archive_write_new() - entry := C.archive_entry_new() - st := C.stat{} - buf := [8192]byte{} - - // This makes the archive a gzip-compressed tarball - C.archive_write_add_filter_gzip(a) - C.archive_write_set_format_pax_restricted(a) - - // TODO add symlink to .tar.gz version - repo_path := os.join_path_single(r.repo_dir, 'repo.db') - - C.archive_write_open_filename(a, &char(repo_path.str)) - - // Iterate over each directory - for d in os.ls(r.repo_dir) ?.filter(os.is_dir(os.join_path_single(r.repo_dir, - it))) { - inner_path := os.join_path_single(d, 'desc') - actual_path := os.join_path_single(r.repo_dir, inner_path) - - unsafe { - C.stat(&char(actual_path.str), &st) - } - - C.archive_entry_set_pathname(entry, &char(inner_path.str)) - C.archive_entry_copy_stat(entry, &st) - // C.archive_entry_set_size(entry, st.st_size) - // C.archive_entry_set_filetype(entry, C.AE_IFREG) - // C.archive_entry_set_perm(entry, 0o644) - C.archive_write_header(a, entry) - - fd := C.open(&char(actual_path.str), C.O_RDONLY) - mut len := C.read(fd, &buf, sizeof(buf)) - - for len > 0 { - C.archive_write_data(a, &buf, len) - len = C.read(fd, &buf, sizeof(buf)) - } - C.close(fd) - - C.archive_entry_clear(entry) - } - - C.archive_write_close(a) - C.archive_write_free(a) - } -} diff --git a/src/repo/sync.v b/src/repo/sync.v new file mode 100644 index 0000000..0bb5545 --- /dev/null +++ b/src/repo/sync.v @@ -0,0 +1,80 @@ +module repo + +import os + +fn archive_add_entry(archive &C.archive, entry &C.archive_entry, file_path &string, inner_path &string) { + st := C.stat{} + + unsafe { + C.stat(&char(file_path.str), &st) + } + + C.archive_entry_set_pathname(entry, &char(inner_path.str)) + C.archive_entry_copy_stat(entry, &st) + C.archive_write_header(archive, entry) + + mut fd := C.open(&char(file_path.str), C.O_RDONLY) + defer { + C.close(fd) + } + + // Write the file to the archive + buf := [8192]byte{} + mut len := C.read(fd, &buf, sizeof(buf)) + + for len > 0 { + C.archive_write_data(archive, &buf, len) + + len = C.read(fd, &buf, sizeof(buf)) + } +} + +// Re-generate the repo archive files +fn (r &Repo) sync() ? { + // TODO also write files archive + lock r.mutex { + a_db := C.archive_write_new() + a_files := C.archive_write_new() + + entry := C.archive_entry_new() + + // This makes the archive a gzip-compressed tarball + C.archive_write_add_filter_gzip(a_db) + C.archive_write_set_format_pax_restricted(a_db) + C.archive_write_add_filter_gzip(a_files) + C.archive_write_set_format_pax_restricted(a_files) + + // TODO add symlink to .tar.gz version + db_path := os.join_path_single(r.repo_dir, 'repo.db') + files_path := os.join_path_single(r.repo_dir, 'repo.files') + + C.archive_write_open_filename(a_db, &char(db_path.str)) + C.archive_write_open_filename(a_files, &char(files_path.str)) + + // Iterate over each directory + for d in os.ls(r.repo_dir) ?.filter(os.is_dir(os.join_path_single(r.repo_dir, + it))) { + // desc + mut inner_path := os.join_path_single(d, 'desc') + mut actual_path := os.join_path_single(r.repo_dir, inner_path) + + archive_add_entry(a_db, entry, actual_path, inner_path) + archive_add_entry(a_files, entry, actual_path, inner_path) + + C.archive_entry_clear(entry) + + // files + inner_path = os.join_path_single(d, 'files') + actual_path = os.join_path_single(r.repo_dir, inner_path) + + archive_add_entry(a_files, entry, actual_path, inner_path) + + C.archive_entry_clear(entry) + } + + C.archive_write_close(a_db) + C.archive_write_free(a_db) + C.archive_write_close(a_files) + C.archive_write_free(a_files) + } +} diff --git a/src/routes.v b/src/routes.v index c1e888b..6218898 100644 --- a/src/routes.v +++ b/src/routes.v @@ -76,16 +76,12 @@ fn (mut app App) put_package() web.Result { added := app.repo.add_from_path(pkg_path) or { app.lerror('Error while adding package: $err.msg') - os.rm(pkg_path) or { - app.lerror("Failed to remove download '$pkg_path'.") - } + os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path'.") } return app.text('Failed to add package.') } if !added { - os.rm(pkg_path) or { - app.lerror("Failed to remove download '$pkg_path'.") - } + os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path'.") } app.lwarn('Duplicate package.')