forked from vieter-v/vieter
Compare commits
5 Commits
ae579f83b5
...
e26e2746de
| Author | SHA1 | Date |
|---|---|---|
|
|
e26e2746de | |
|
|
2e344eecc7 | |
|
|
b0212b8162 | |
|
|
766e097be8 | |
|
|
4c56351132 |
|
|
@ -19,6 +19,3 @@ pipeline:
|
|||
when:
|
||||
event: pull_request
|
||||
branch: dev
|
||||
|
||||
depends_on:
|
||||
- build
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
branches: [main, dev]
|
||||
platform: linux/amd64
|
||||
depends_on:
|
||||
- builder
|
||||
- build
|
||||
|
||||
pipeline:
|
||||
dev:
|
||||
|
|
@ -31,7 +34,3 @@ pipeline:
|
|||
when:
|
||||
event: tag
|
||||
branch: main
|
||||
|
||||
depends_on:
|
||||
- builder
|
||||
- build
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# Yeah so this only works on tags so we'll worry about this later
|
||||
platform: linux/amd64
|
||||
branches: main
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
# We need the entire repo in order for the release names to work
|
||||
skip_clone: true
|
||||
|
|
@ -34,6 +36,3 @@ pipeline:
|
|||
title: title
|
||||
when:
|
||||
event: push
|
||||
|
||||
depends_on:
|
||||
- build
|
||||
|
|
|
|||
|
|
@ -12,6 +12,9 @@ fn C.archive_read_new() &C.archive
|
|||
// Configure the archive to work with zstd compression
|
||||
fn C.archive_read_support_filter_zstd(&C.archive)
|
||||
|
||||
// Configure the archive to work with gzip compression
|
||||
fn C.archive_read_support_filter_gzip(&C.archive)
|
||||
|
||||
// Configure the archive to work with a tarball content
|
||||
fn C.archive_read_support_format_tar(&C.archive)
|
||||
|
||||
|
|
|
|||
|
|
@ -107,24 +107,24 @@ pub fn read_pkg(pkg_path string) ?Pkg {
|
|||
|
||||
a := C.archive_read_new()
|
||||
entry := C.archive_entry_new()
|
||||
mut r := 0
|
||||
|
||||
// Sinds 2020, all newly built Arch packages use zstd
|
||||
C.archive_read_support_filter_zstd(a)
|
||||
C.archive_read_support_filter_gzip(a)
|
||||
// The content should always be a tarball
|
||||
C.archive_read_support_format_tar(a)
|
||||
|
||||
// TODO find out where does this 10240 come from
|
||||
r = C.archive_read_open_filename(a, &char(pkg_path.str), 10240)
|
||||
defer {
|
||||
C.archive_read_free(a)
|
||||
}
|
||||
r := C.archive_read_open_filename(a, &char(pkg_path.str), 10240)
|
||||
|
||||
if r != C.ARCHIVE_OK {
|
||||
return error('Failed to open package.')
|
||||
}
|
||||
|
||||
mut buf := voidptr(0)
|
||||
defer {
|
||||
C.archive_read_free(a)
|
||||
}
|
||||
|
||||
mut files := []string{}
|
||||
mut pkg_info := PkgInfo{}
|
||||
|
||||
|
|
@ -142,7 +142,7 @@ pub fn read_pkg(pkg_path string) ?Pkg {
|
|||
size := C.archive_entry_size(entry)
|
||||
|
||||
// TODO can this unsafe block be avoided?
|
||||
buf = unsafe { malloc(size) }
|
||||
buf := unsafe { malloc(size) }
|
||||
defer {
|
||||
unsafe {
|
||||
free(buf)
|
||||
|
|
@ -150,10 +150,9 @@ pub fn read_pkg(pkg_path string) ?Pkg {
|
|||
}
|
||||
C.archive_read_data(a, buf, size)
|
||||
|
||||
unsafe {
|
||||
println(cstring_to_vstring(buf))
|
||||
}
|
||||
pkg_info = parse_pkg_info_string(unsafe { cstring_to_vstring(buf) }) ?
|
||||
pkg_text := unsafe { buf.vstring_with_len(size).clone() }
|
||||
|
||||
pkg_info = parse_pkg_info_string(pkg_text) ?
|
||||
} else {
|
||||
C.archive_read_data_skip(a)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -93,53 +93,3 @@ fn (r &Repo) add(pkg &package.Pkg) ?bool {
|
|||
fn (r &Repo) pkg_path(pkg &package.Pkg) string {
|
||||
return os.join_path(r.repo_dir, '$pkg.info.name-$pkg.info.version')
|
||||
}
|
||||
|
||||
// Re-generate the repo archive files
|
||||
fn (r &Repo) sync() ? {
|
||||
lock r.mutex {
|
||||
a := C.archive_write_new()
|
||||
entry := C.archive_entry_new()
|
||||
st := C.stat{}
|
||||
buf := [8192]byte{}
|
||||
|
||||
// This makes the archive a gzip-compressed tarball
|
||||
C.archive_write_add_filter_gzip(a)
|
||||
C.archive_write_set_format_pax_restricted(a)
|
||||
|
||||
repo_path := os.join_path_single(r.repo_dir, 'repo.db')
|
||||
|
||||
C.archive_write_open_filename(a, &char(repo_path.str))
|
||||
|
||||
// Iterate over each directory
|
||||
for d in os.ls(r.repo_dir) ?.filter(os.is_dir(os.join_path_single(r.repo_dir,
|
||||
it))) {
|
||||
inner_path := os.join_path_single(d, 'desc')
|
||||
actual_path := os.join_path_single(r.repo_dir, inner_path)
|
||||
|
||||
unsafe {
|
||||
C.stat(&char(actual_path.str), &st)
|
||||
}
|
||||
|
||||
C.archive_entry_set_pathname(entry, &char(inner_path.str))
|
||||
C.archive_entry_copy_stat(entry, &st)
|
||||
// C.archive_entry_set_size(entry, st.st_size)
|
||||
// C.archive_entry_set_filetype(entry, C.AE_IFREG)
|
||||
// C.archive_entry_set_perm(entry, 0o644)
|
||||
C.archive_write_header(a, entry)
|
||||
|
||||
fd := C.open(&char(actual_path.str), C.O_RDONLY)
|
||||
mut len := C.read(fd, &buf, sizeof(buf))
|
||||
|
||||
for len > 0 {
|
||||
C.archive_write_data(a, &buf, len)
|
||||
len = C.read(fd, &buf, sizeof(buf))
|
||||
}
|
||||
C.close(fd)
|
||||
|
||||
C.archive_entry_clear(entry)
|
||||
}
|
||||
|
||||
C.archive_write_close(a)
|
||||
C.archive_write_free(a)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
module repo
|
||||
|
||||
import os
|
||||
|
||||
fn archive_add_entry(archive &C.archive, entry &C.archive_entry, file_path &string, inner_path &string) {
|
||||
st := C.stat{}
|
||||
|
||||
unsafe {
|
||||
C.stat(&char(file_path.str), &st)
|
||||
}
|
||||
|
||||
C.archive_entry_set_pathname(entry, &char(inner_path.str))
|
||||
C.archive_entry_copy_stat(entry, &st)
|
||||
C.archive_write_header(archive, entry)
|
||||
|
||||
mut fd := C.open(&char(file_path.str), C.O_RDONLY)
|
||||
defer {
|
||||
C.close(fd)
|
||||
}
|
||||
|
||||
// Write the file to the archive
|
||||
buf := [8192]byte{}
|
||||
mut len := C.read(fd, &buf, sizeof(buf))
|
||||
|
||||
for len > 0 {
|
||||
C.archive_write_data(archive, &buf, len)
|
||||
|
||||
len = C.read(fd, &buf, sizeof(buf))
|
||||
}
|
||||
}
|
||||
|
||||
// Re-generate the repo archive files
|
||||
fn (r &Repo) sync() ? {
|
||||
// TODO also write files archive
|
||||
lock r.mutex {
|
||||
a_db := C.archive_write_new()
|
||||
a_files := C.archive_write_new()
|
||||
|
||||
entry := C.archive_entry_new()
|
||||
|
||||
// This makes the archive a gzip-compressed tarball
|
||||
C.archive_write_add_filter_gzip(a_db)
|
||||
C.archive_write_set_format_pax_restricted(a_db)
|
||||
C.archive_write_add_filter_gzip(a_files)
|
||||
C.archive_write_set_format_pax_restricted(a_files)
|
||||
|
||||
// TODO add symlink to .tar.gz version
|
||||
db_path := os.join_path_single(r.repo_dir, 'repo.db')
|
||||
files_path := os.join_path_single(r.repo_dir, 'repo.files')
|
||||
|
||||
C.archive_write_open_filename(a_db, &char(db_path.str))
|
||||
C.archive_write_open_filename(a_files, &char(files_path.str))
|
||||
|
||||
// Iterate over each directory
|
||||
for d in os.ls(r.repo_dir) ?.filter(os.is_dir(os.join_path_single(r.repo_dir,
|
||||
it))) {
|
||||
// desc
|
||||
mut inner_path := os.join_path_single(d, 'desc')
|
||||
mut actual_path := os.join_path_single(r.repo_dir, inner_path)
|
||||
|
||||
archive_add_entry(a_db, entry, actual_path, inner_path)
|
||||
archive_add_entry(a_files, entry, actual_path, inner_path)
|
||||
|
||||
C.archive_entry_clear(entry)
|
||||
|
||||
// files
|
||||
inner_path = os.join_path_single(d, 'files')
|
||||
actual_path = os.join_path_single(r.repo_dir, inner_path)
|
||||
|
||||
archive_add_entry(a_files, entry, actual_path, inner_path)
|
||||
|
||||
C.archive_entry_clear(entry)
|
||||
}
|
||||
|
||||
C.archive_write_close(a_db)
|
||||
C.archive_write_free(a_db)
|
||||
C.archive_write_close(a_files)
|
||||
C.archive_write_free(a_files)
|
||||
}
|
||||
}
|
||||
|
|
@ -76,9 +76,13 @@ fn (mut app App) put_package() web.Result {
|
|||
added := app.repo.add_from_path(pkg_path) or {
|
||||
app.lerror('Error while adding package: $err.msg')
|
||||
|
||||
os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path'.") }
|
||||
|
||||
return app.text('Failed to add package.')
|
||||
}
|
||||
if !added {
|
||||
os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path'.") }
|
||||
|
||||
app.lwarn('Duplicate package.')
|
||||
|
||||
return app.text('File already exists.')
|
||||
|
|
|
|||
|
|
@ -0,0 +1,145 @@
|
|||
import random
|
||||
import tempfile
|
||||
import tarfile
|
||||
from pathlib import Path
|
||||
import uuid
|
||||
import argparse
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import sys
|
||||
|
||||
|
||||
# A list of words the program can choose from
|
||||
WORDS = ["alpha", "bravo", "charlie", "delta", "echo", "foxtrot", "golf",
|
||||
"hotel", "india", "juliet", "kilo", "lima", "mike", "november",
|
||||
"oscar", "papa", "quebec", "romeo", "sierra", "tango", "uniform",
|
||||
"victor", "whiskey", "xray", "yankee", "zulu"]
|
||||
SEED = 2022
|
||||
|
||||
|
||||
def random_words(words, min_len, max_len=None):
|
||||
"""
|
||||
Returns a random list of words, with a length randomly choosen between
|
||||
min_len and max_len. If max_len is None, it is equal to the length of
|
||||
words.
|
||||
"""
|
||||
if max_len is None:
|
||||
max_len = len(words)
|
||||
|
||||
k = random.randint(min_len, max_len)
|
||||
|
||||
return random.choices(words, k=k)
|
||||
|
||||
def random_lists(words, n, min_len, max_len=None):
|
||||
return [random_words(words, min_len, max_len) for _ in range(n)]
|
||||
|
||||
def create_random_pkginfo(words, name_min_len, name_max_len):
|
||||
"""
|
||||
Generates a random .PKGINFO
|
||||
"""
|
||||
name = "-".join(random_words(words, name_min_len, name_max_len))
|
||||
ver = "0.1.0" # doesn't matter what it is anyways
|
||||
|
||||
# TODO add random dependencies (all types)
|
||||
|
||||
data = {
|
||||
"pkgname": name,
|
||||
"pkgbase": name,
|
||||
"pkgver": ver,
|
||||
"arch": "x86_64"
|
||||
}
|
||||
|
||||
return "\n".join(f"{key} = {value}" for key, value in data.items())
|
||||
|
||||
def create_random_package(tmpdir, words, pkg_name_min_len, pkg_name_max_len, min_files, max_files, min_filename_len, max_filename_len):
|
||||
"""
|
||||
Creates a random, but valid Arch package, using the provided tmpdir. Output
|
||||
is the path to the created package tarball.
|
||||
"""
|
||||
|
||||
sub_path = tmpdir / uuid.uuid4().hex
|
||||
sub_path.mkdir()
|
||||
|
||||
tar_path = sub_path / "archive.pkg.tar.gz"
|
||||
|
||||
def remove_prefix(tar_info):
|
||||
tar_info.name = tar_info.name[len(str(sub_path)):]
|
||||
|
||||
return tar_info
|
||||
|
||||
with tarfile.open(tar_path, "w") as tar:
|
||||
# Add random .PKGINFO file
|
||||
pkginfo_file = sub_path / ".PKGINFO"
|
||||
pkginfo_file.write_text(create_random_pkginfo(words, pkg_name_min_len, pkg_name_max_len))
|
||||
tar.add(pkginfo_file, filter=remove_prefix)
|
||||
|
||||
# Create random files
|
||||
file_count = random.randint(min_files, max_files)
|
||||
|
||||
for words in random_lists(words, file_count, min_filename_len, max_filename_len):
|
||||
path = sub_path / 'usr' / ('/'.join(words) + ".txt")
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(' '.join(words))
|
||||
|
||||
tar.add(path, filter=remove_prefix)
|
||||
|
||||
return tar_path
|
||||
|
||||
|
||||
async def check_output(r):
|
||||
good = {"File already exists.", "Package added successfully."}
|
||||
txt = await r.text()
|
||||
|
||||
return (txt in good, txt)
|
||||
|
||||
|
||||
async def upload_random_package(tar_path, sem):
|
||||
async with sem:
|
||||
with open(tar_path, 'rb') as f:
|
||||
async with aiohttp.ClientSession() as s:
|
||||
async with s.post("http://localhost:8000/publish", data=f.read(), headers={"x-api-key": "test"}) as r:
|
||||
return await check_output(r)
|
||||
|
||||
|
||||
async def main():
|
||||
parser = argparse.ArgumentParser(description="Test vieter by uploading random package files.")
|
||||
|
||||
parser.add_argument("count", help="How many packages to upload.", default=1, type=int)
|
||||
parser.add_argument("-p", "--parallel", help="How many uploads to run in parallel.", default=1, type=int)
|
||||
parser.add_argument("-s", "--seed", help="Seed for the randomizer.", default=SEED, type=int)
|
||||
parser.add_argument("--min-files", help="Minimum amount of files to add to an archive.", default=5, type=int)
|
||||
parser.add_argument("--max-files", help="Max amount of files to add to an archive.", default=10, type=int)
|
||||
parser.add_argument("--min-filename-length", help="Minimum amount of words to use for generating filenames.", default=1, type=int)
|
||||
parser.add_argument("--max-filename-length", help="Max amount of words to use for generating filenames.", default=5, type=int)
|
||||
parser.add_argument("--min-pkg-name-length", help="Minimum amount of words to use for creating package name.", default=1, type=int)
|
||||
parser.add_argument("--max-pkg-name-length", help="Max amount of words to use for creating package name.", default=3, type=int)
|
||||
parser.add_argument("--words", help="Words to use for randomizing.", default=WORDS, type=lambda s: s.split(','))
|
||||
# parser.add_argument("--words", help="Words to use for randomizing.", default=WORDS, type=)
|
||||
# parser.add_argument("-d", "--dir", help="Directory to create ")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
sem = asyncio.BoundedSemaphore(args.parallel)
|
||||
random.seed(args.seed)
|
||||
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdirname:
|
||||
tmpdir = Path(tmpdirname)
|
||||
|
||||
# We generate the tars in advance because they're not async anyways
|
||||
print("Generating tarballs...")
|
||||
tars = {
|
||||
create_random_package(tmpdir, args.words, args.min_pkg_name_length, args.max_pkg_name_length, args.min_files, args.max_files, args.min_filename_length, args.max_filename_length)
|
||||
for _ in range(args.count)
|
||||
}
|
||||
|
||||
print("Sending requests...")
|
||||
res = await asyncio.gather(*(upload_random_package(tar, sem) for tar in tars))
|
||||
|
||||
# Generate status report
|
||||
if any(not x[0] for x in res):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
Loading…
Reference in New Issue