Compare commits
1 Commits
dev
...
hash-on-up
Author | SHA1 | Date |
---|---|---|
Jef Roosens | 228702c0ca |
|
@ -1,7 +1,6 @@
|
||||||
module package
|
module package
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import util
|
|
||||||
|
|
||||||
// Represents a read archive
|
// Represents a read archive
|
||||||
struct Pkg {
|
struct Pkg {
|
||||||
|
@ -10,6 +9,7 @@ pub:
|
||||||
info PkgInfo [required]
|
info PkgInfo [required]
|
||||||
files []string [required]
|
files []string [required]
|
||||||
compression int [required]
|
compression int [required]
|
||||||
|
sha256sum []u8 [required]
|
||||||
}
|
}
|
||||||
|
|
||||||
// Represents the contents of a .PKGINFO file
|
// Represents the contents of a .PKGINFO file
|
||||||
|
@ -26,10 +26,8 @@ pub mut:
|
||||||
arch string
|
arch string
|
||||||
build_date i64
|
build_date i64
|
||||||
packager string
|
packager string
|
||||||
// md5sum string
|
pgpsig string
|
||||||
// sha256sum string
|
pgpsigsize i64
|
||||||
pgpsig string
|
|
||||||
pgpsigsize i64
|
|
||||||
// Array values
|
// Array values
|
||||||
groups []string
|
groups []string
|
||||||
licenses []string
|
licenses []string
|
||||||
|
@ -42,11 +40,6 @@ pub mut:
|
||||||
checkdepends []string
|
checkdepends []string
|
||||||
}
|
}
|
||||||
|
|
||||||
// checksum calculates the md5 & sha256 hash of the package
|
|
||||||
pub fn (p &Pkg) checksum() ?(string, string) {
|
|
||||||
return util.hash_file(p.path)
|
|
||||||
}
|
|
||||||
|
|
||||||
// parse_pkg_info_string parses a PkgInfo object from a string
|
// parse_pkg_info_string parses a PkgInfo object from a string
|
||||||
fn parse_pkg_info_string(pkg_info_str &string) ?PkgInfo {
|
fn parse_pkg_info_string(pkg_info_str &string) ?PkgInfo {
|
||||||
mut pkg_info := PkgInfo{}
|
mut pkg_info := PkgInfo{}
|
||||||
|
@ -101,7 +94,7 @@ fn parse_pkg_info_string(pkg_info_str &string) ?PkgInfo {
|
||||||
|
|
||||||
// read_pkg_archive extracts the file list & .PKGINFO contents from an archive
|
// read_pkg_archive extracts the file list & .PKGINFO contents from an archive
|
||||||
// NOTE: this command only supports zstd-, xz- & gzip-compressed tarballs.
|
// NOTE: this command only supports zstd-, xz- & gzip-compressed tarballs.
|
||||||
pub fn read_pkg_archive(pkg_path string) ?Pkg {
|
pub fn read_pkg_archive(pkg_path string, sha256sum []u8) ?Pkg {
|
||||||
if !os.is_file(pkg_path) {
|
if !os.is_file(pkg_path) {
|
||||||
return error("'$pkg_path' doesn't exist or isn't a file.")
|
return error("'$pkg_path' doesn't exist or isn't a file.")
|
||||||
}
|
}
|
||||||
|
@ -172,6 +165,7 @@ pub fn read_pkg_archive(pkg_path string) ?Pkg {
|
||||||
info: pkg_info
|
info: pkg_info
|
||||||
files: files
|
files: files
|
||||||
compression: compression_code
|
compression: compression_code
|
||||||
|
sha256sum: sha256sum
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -223,10 +217,7 @@ pub fn (pkg &Pkg) to_desc() string {
|
||||||
desc += format_entry('CSIZE', p.csize.str())
|
desc += format_entry('CSIZE', p.csize.str())
|
||||||
desc += format_entry('ISIZE', p.size.str())
|
desc += format_entry('ISIZE', p.size.str())
|
||||||
|
|
||||||
md5sum, sha256sum := pkg.checksum() or { '', '' }
|
desc += format_entry('SHA256SUM', pkg.sha256sum.hex())
|
||||||
|
|
||||||
desc += format_entry('MD5SUM', md5sum)
|
|
||||||
desc += format_entry('SHA256SUM', sha256sum)
|
|
||||||
|
|
||||||
// TODO add pgpsig stuff
|
// TODO add pgpsig stuff
|
||||||
|
|
||||||
|
|
|
@ -48,8 +48,8 @@ pub fn new(repos_dir string, pkg_dir string, default_arch string) ?RepoGroupMana
|
||||||
// pkg archive. It's a wrapper around add_pkg_in_repo that parses the archive
|
// pkg archive. It's a wrapper around add_pkg_in_repo that parses the archive
|
||||||
// file, passes the result to add_pkg_in_repo, and hard links the archive to
|
// file, passes the result to add_pkg_in_repo, and hard links the archive to
|
||||||
// the right subdirectories in r.pkg_dir if it was successfully added.
|
// the right subdirectories in r.pkg_dir if it was successfully added.
|
||||||
pub fn (r &RepoGroupManager) add_pkg_from_path(repo string, pkg_path string) ?RepoAddResult {
|
pub fn (r &RepoGroupManager) add_pkg_from_path(repo string, pkg_path string, checksum []u8) ?RepoAddResult {
|
||||||
pkg := package.read_pkg_archive(pkg_path) or {
|
pkg := package.read_pkg_archive(pkg_path, checksum) or {
|
||||||
return error('Failed to read package file: $err.msg()')
|
return error('Failed to read package file: $err.msg()')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -66,31 +66,31 @@ fn (mut app App) put_package(repo string) web.Result {
|
||||||
|
|
||||||
mut pkg_path := ''
|
mut pkg_path := ''
|
||||||
|
|
||||||
if length := app.req.header.get(.content_length) {
|
length := app.req.header.get(.content_length) or {
|
||||||
// Generate a random filename for the temp file
|
|
||||||
pkg_path = os.join_path_single(app.repo.pkg_dir, rand.uuid_v4())
|
|
||||||
|
|
||||||
app.ldebug("Uploading $length bytes (${util.pretty_bytes(length.int())}) to '$pkg_path'.")
|
|
||||||
|
|
||||||
// This is used to time how long it takes to upload a file
|
|
||||||
mut sw := time.new_stopwatch(time.StopWatchOptions{ auto_start: true })
|
|
||||||
|
|
||||||
util.reader_to_file(mut app.reader, length.int(), pkg_path) or {
|
|
||||||
app.lwarn("Failed to upload '$pkg_path'")
|
|
||||||
|
|
||||||
return app.json(http.Status.internal_server_error, new_response('Failed to upload file.'))
|
|
||||||
}
|
|
||||||
|
|
||||||
sw.stop()
|
|
||||||
app.ldebug("Upload of '$pkg_path' completed in ${sw.elapsed().seconds():.3}s.")
|
|
||||||
} else {
|
|
||||||
app.lwarn('Tried to upload package without specifying a Content-Length.')
|
app.lwarn('Tried to upload package without specifying a Content-Length.')
|
||||||
|
|
||||||
// length required
|
// length required
|
||||||
return app.status(http.Status.length_required)
|
return app.status(http.Status.length_required)
|
||||||
}
|
}
|
||||||
|
|
||||||
res := app.repo.add_pkg_from_path(repo, pkg_path) or {
|
// Generate a random filename for the temp file
|
||||||
|
pkg_path = os.join_path_single(app.repo.pkg_dir, rand.uuid_v4())
|
||||||
|
|
||||||
|
app.ldebug("Uploading $length bytes (${util.pretty_bytes(length.int())}) to '$pkg_path'.")
|
||||||
|
|
||||||
|
// This is used to time how long it takes to upload a file
|
||||||
|
mut sw := time.new_stopwatch(time.StopWatchOptions{ auto_start: true })
|
||||||
|
|
||||||
|
checksum := util.reader_to_file_and_hash(mut app.reader, length.int(), pkg_path) or {
|
||||||
|
app.lwarn("Failed to upload '$pkg_path'")
|
||||||
|
|
||||||
|
return app.json(http.Status.internal_server_error, new_response('Failed to upload file.'))
|
||||||
|
}
|
||||||
|
|
||||||
|
sw.stop()
|
||||||
|
app.ldebug("Upload of '$pkg_path' completed in ${sw.elapsed().seconds():.3}s.")
|
||||||
|
|
||||||
|
res := app.repo.add_pkg_from_path(repo, pkg_path, checksum) or {
|
||||||
app.lerror('Error while adding package: $err.msg()')
|
app.lerror('Error while adding package: $err.msg()')
|
||||||
|
|
||||||
os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg()") }
|
os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg()") }
|
||||||
|
|
|
@ -3,6 +3,7 @@ module util
|
||||||
|
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
|
import crypto.sha256
|
||||||
|
|
||||||
// reader_to_writer tries to consume the entire reader & write it to the writer.
|
// reader_to_writer tries to consume the entire reader & write it to the writer.
|
||||||
pub fn reader_to_writer(mut reader io.Reader, mut writer io.Writer) ? {
|
pub fn reader_to_writer(mut reader io.Reader, mut writer io.Writer) ? {
|
||||||
|
@ -48,6 +49,40 @@ pub fn reader_to_file(mut reader io.BufferedReader, length int, path string) ? {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// reader_to_file_and_hash writes the contents of a BufferedReader to a file
|
||||||
|
// while also generating the sha256 checksum of the data in the process.
|
||||||
|
pub fn reader_to_file_and_hash(mut reader io.BufferedReader, length int, path string) ?[]u8 {
|
||||||
|
mut file := os.create(path)?
|
||||||
|
defer {
|
||||||
|
file.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
mut buf := []u8{len: reader_buf_size}
|
||||||
|
mut bytes_left := length
|
||||||
|
mut sha256sum := sha256.new()
|
||||||
|
|
||||||
|
// Repeat as long as the stream still has data
|
||||||
|
for bytes_left > 0 {
|
||||||
|
// TODO check if just breaking here is safe
|
||||||
|
bytes_read := reader.read(mut buf) or { break }
|
||||||
|
bytes_left -= bytes_read
|
||||||
|
|
||||||
|
// This is actually an infallible function, so this *should* never
|
||||||
|
// fail.
|
||||||
|
sha256sum.write(buf[..bytes_read])?
|
||||||
|
|
||||||
|
mut to_write := bytes_read
|
||||||
|
|
||||||
|
for to_write > 0 {
|
||||||
|
bytes_written := file.write(buf[bytes_read - to_write..bytes_read]) or { continue }
|
||||||
|
|
||||||
|
to_write = to_write - bytes_written
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sha256sum.checksum()
|
||||||
|
}
|
||||||
|
|
||||||
// match_array_in_array<T> returns how many elements of a2 overlap with a1. For
|
// match_array_in_array<T> returns how many elements of a2 overlap with a1. For
|
||||||
// example, if a1 = "abcd" & a2 = "cd", the result will be 2. If the match is
|
// example, if a1 = "abcd" & a2 = "cd", the result will be 2. If the match is
|
||||||
// not at the end of a1, the result is 0.
|
// not at the end of a1, the result is 0.
|
||||||
|
|
Loading…
Reference in New Issue