forked from vieter-v/vieter
				
			Merge pull request 'Buggy prototype of repo-add' (#48) from repo-add into dev
Reviewed-on: Chewing_Bever/vieter#48main
						commit
						8fcec3b18b
					
				|  | @ -1,15 +1,16 @@ | |||
| matrix: | ||||
|   PLATFORM: | ||||
|     - linux/amd64 | ||||
|     - linux/arm64 | ||||
|     - linux/arm/v7 | ||||
| 
 | ||||
| # These checks already get performed on the feature branches | ||||
| branches: | ||||
|   exclude: [ main, dev ] | ||||
| platform: linux/amd64 | ||||
| platform: ${PLATFORM} | ||||
| 
 | ||||
| pipeline: | ||||
|   vieter: | ||||
|     image: 'chewingbever/vlang:latest' | ||||
|     group: 'build' | ||||
|     commands: | ||||
|       - make vieter | ||||
| 
 | ||||
|   # The default build isn't needed, as alpine switches to gcc for the compiler anyways | ||||
|   debug: | ||||
|     image: 'chewingbever/vlang:latest' | ||||
|     group: 'build' | ||||
|  | @ -25,4 +26,5 @@ pipeline: | |||
|       - make prod | ||||
|       # Make sure the binary is actually static | ||||
|       - readelf -d pvieter | ||||
|       - du -h pvieter | ||||
|       - '[ "$(readelf -d pvieter | grep NEEDED | wc -l)" = 0 ]' | ||||
|  |  | |||
|  | @ -1,28 +0,0 @@ | |||
| # These checks already get performed on the feature branches | ||||
| branches: | ||||
|   exclude: [ main, dev ] | ||||
| platform: linux/arm64 | ||||
| 
 | ||||
| pipeline: | ||||
|   vieter: | ||||
|     image: 'chewingbever/vlang:latest' | ||||
|     group: 'build' | ||||
|     commands: | ||||
|       - make vieter | ||||
| 
 | ||||
|   debug: | ||||
|     image: 'chewingbever/vlang:latest' | ||||
|     group: 'build' | ||||
|     commands: | ||||
|       - make debug | ||||
| 
 | ||||
|   prod: | ||||
|     image: 'chewingbever/vlang:latest' | ||||
|     environment: | ||||
|       - LDFLAGS=-lz -lbz2 -llzma -lexpat -lzstd -llz4 -static | ||||
|     group: 'build' | ||||
|     commands: | ||||
|       - make prod | ||||
|       # Make sure the binary is actually static | ||||
|       - readelf -d pvieter | ||||
|       - '[ "$(readelf -d pvieter | grep NEEDED | wc -l)" = 0 ]' | ||||
|  | @ -0,0 +1,17 @@ | |||
| # Because the only step here is a pull_request event, the branch should be dev | ||||
| # because it has to be the target of the pull request | ||||
| branches: dev | ||||
| platform: linux/amd64 | ||||
| 
 | ||||
| pipeline: | ||||
|   dryrun: | ||||
|     image: woodpeckerci/plugin-docker-buildx | ||||
|     secrets: [ docker_username, docker_password ] | ||||
|     settings: | ||||
|       repo: chewingbever/vieter | ||||
|       tag: dev | ||||
|       platforms: [ linux/arm/v7, linux/arm64/v8, linux/amd64 ] | ||||
|       dry_run: true | ||||
|     when: | ||||
|       event: pull_request | ||||
|       branch: dev | ||||
|  | @ -8,7 +8,7 @@ pipeline: | |||
|     settings: | ||||
|       repo: chewingbever/vieter | ||||
|       tag: dev | ||||
|       platforms: [ linux/arm64/v8, linux/amd64 ] | ||||
|       platforms: [ linux/arm/v7, linux/arm64/v8, linux/amd64 ] | ||||
|     when: | ||||
|       event: push | ||||
|       branch: dev | ||||
|  | @ -21,7 +21,7 @@ pipeline: | |||
|       tag: | ||||
|         - latest | ||||
|         - $CI_COMMIT_TAG | ||||
|       platforms: [ linux/arm64/v8, linux/amd64 ] | ||||
|       platforms: [ linux/arm/v7, linux/arm64/v8, linux/amd64 ] | ||||
|     when: | ||||
|       event: tag | ||||
|       branch: main | ||||
|  |  | |||
							
								
								
									
										2
									
								
								Makefile
								
								
								
								
							
							
						
						
									
										2
									
								
								Makefile
								
								
								
								
							|  | @ -38,7 +38,7 @@ c: | |||
| # Run the server in the default 'data' directory
 | ||||
| .PHONY: run | ||||
| run: vieter | ||||
| 	 API_KEY=test REPO_DIR=data LOG_LEVEL=DEBUG ./vieter | ||||
| 	 API_KEY=test DOWNLOAD_DIR=data/downloads REPO_DIR=data/repo PKG_DIR=data/pkgs LOG_LEVEL=DEBUG ./vieter | ||||
| 
 | ||||
| .PHONY: run-prod | ||||
| run-prod: prod | ||||
|  |  | |||
|  | @ -0,0 +1,100 @@ | |||
| // Bindings for the libarchive library | ||||
| 
 | ||||
| #flag -larchive | ||||
| 
 | ||||
| #include "archive.h" | ||||
| 
 | ||||
| struct C.archive {} | ||||
| 
 | ||||
| // Create a new archive struct for reading | ||||
| fn C.archive_read_new() &C.archive | ||||
| 
 | ||||
| // Configure the archive to work with zstd compression | ||||
| fn C.archive_read_support_filter_zstd(&C.archive) | ||||
| 
 | ||||
| // Configure the archive to work with a tarball content | ||||
| fn C.archive_read_support_format_tar(&C.archive) | ||||
| 
 | ||||
| // Open an archive for reading | ||||
| fn C.archive_read_open_filename(&C.archive, &char, int) int | ||||
| 
 | ||||
| // Go to next entry header in archive | ||||
| fn C.archive_read_next_header(&C.archive, &&C.archive_entry) int | ||||
| 
 | ||||
| // Skip reading the current entry | ||||
| fn C.archive_read_data_skip(&C.archive) | ||||
| 
 | ||||
| // Free an archive | ||||
| fn C.archive_read_free(&C.archive) int | ||||
| 
 | ||||
| // Read an archive entry's contents into a pointer | ||||
| fn C.archive_read_data(&C.archive, voidptr, int) | ||||
| 
 | ||||
| // Create a new archive struct for writing | ||||
| fn C.archive_write_new() &C.archive | ||||
| 
 | ||||
| // Sets the filter for the archive to gzip | ||||
| fn C.archive_write_add_filter_gzip(&C.archive) | ||||
| 
 | ||||
| // Sets to archive to "pax restricted" mode. Libarchive's "pax restricted" | ||||
| // format is a tar format that uses pax extensions only when absolutely | ||||
| // necessary. Most of the time, it will write plain ustar entries. This is the | ||||
| // recommended tar format for most uses. You should explicitly use ustar format | ||||
| // only when you have to create archives that will be readable on older | ||||
| // systems; you should explicitly request pax format only when you need to | ||||
| // preserve as many attributes as possible. | ||||
| fn C.archive_write_set_format_pax_restricted(&C.archive) | ||||
| 
 | ||||
| // Opens up the filename for writing | ||||
| fn C.archive_write_open_filename(&C.archive, &char) | ||||
| 
 | ||||
| // Write an entry to the archive file | ||||
| fn C.archive_write_header(&C.archive, &C.archive_entry) | ||||
| 
 | ||||
| // Write the data in the buffer to the archive | ||||
| fn C.archive_write_data(&C.archive, voidptr, int) | ||||
| 
 | ||||
| // Close an archive for writing | ||||
| fn C.archive_write_close(&C.archive) | ||||
| 
 | ||||
| // Free the write archive | ||||
| fn C.archive_write_free(&C.archive) | ||||
| 
 | ||||
| #include "archive_entry.h" | ||||
| 
 | ||||
| struct C.archive_entry {} | ||||
| 
 | ||||
| // Create a new archive_entry struct | ||||
| fn C.archive_entry_new() &C.archive_entry | ||||
| 
 | ||||
| // Get the filename of the given entry | ||||
| fn C.archive_entry_pathname(&C.archive_entry) &char | ||||
| 
 | ||||
| // Get an entry's file size | ||||
| // Note: this function actually returns an i64, but as this can't be used as an | ||||
| // arugment to malloc, we'll just roll with it & assume an entry is never | ||||
| // bigger than 4 gigs | ||||
| fn C.archive_entry_size(&C.archive_entry) int | ||||
| 
 | ||||
| // Set the pathname for the entry | ||||
| fn C.archive_entry_set_pathname(&C.archive_entry, &char) | ||||
| 
 | ||||
| // Sets the file size of the entry | ||||
| fn C.archive_entry_set_size(&C.archive_entry, i64) | ||||
| 
 | ||||
| // Sets the file type for an entry | ||||
| fn C.archive_entry_set_filetype(&C.archive_entry, u32) | ||||
| 
 | ||||
| // Sets the file permissions for an entry | ||||
| fn C.archive_entry_set_perm(&C.archive_entry, int) | ||||
| 
 | ||||
| // Clears out an entry struct | ||||
| fn C.archive_entry_clear(&C.archive_entry) | ||||
| 
 | ||||
| // Copy over a stat struct to the archive entry | ||||
| fn C.archive_entry_copy_stat(entry &C.archive_entry, const_stat &C.stat) | ||||
| 
 | ||||
| #include <string.h> | ||||
| 
 | ||||
| // Compare two C strings; 0 means they're equal | ||||
| fn C.strcmp(&char, &char) int | ||||
|  | @ -1,53 +0,0 @@ | |||
| module archive | ||||
| 
 | ||||
| import os | ||||
| 
 | ||||
| // Returns the .PKGINFO file's contents & the list of files. | ||||
| pub fn pkg_info(pkg_path string) ?(string, []string) { | ||||
| 	if !os.is_file(pkg_path) { | ||||
| 		return error("'$pkg_path' doesn't exist or isn't a file.") | ||||
| 	} | ||||
| 
 | ||||
| 	a := C.archive_read_new() | ||||
| 	entry := C.archive_entry_new() | ||||
| 	mut r := 0 | ||||
| 
 | ||||
| 	C.archive_read_support_filter_all(a) | ||||
| 	C.archive_read_support_format_all(a) | ||||
| 
 | ||||
| 	// TODO find out where does this 10240 come from | ||||
| 	r = C.archive_read_open_filename(a, &char(pkg_path.str), 10240) | ||||
| 	defer { | ||||
| 		C.archive_read_free(a) | ||||
| 	} | ||||
| 
 | ||||
| 	if r != C.ARCHIVE_OK { | ||||
| 		return error('Failed to open package.') | ||||
| 	} | ||||
| 
 | ||||
| 	// We iterate over every header in search of the .PKGINFO one | ||||
| 	mut buf := voidptr(0) | ||||
| 	mut files := []string{} | ||||
| 	for C.archive_read_next_header(a, &entry) == C.ARCHIVE_OK { | ||||
| 		pathname := C.archive_entry_pathname(entry) | ||||
| 
 | ||||
| 		ignored_names := [c'.BUILDINFO', c'.INSTALL', c'.MTREE', c'.PKGINFO', c'.CHANGELOG'] | ||||
| 		if ignored_names.all(C.strcmp(it, pathname) != 0) { | ||||
| 			unsafe { | ||||
| 				files << cstring_to_vstring(pathname) | ||||
| 			} | ||||
| 		} | ||||
| 
 | ||||
| 		if C.strcmp(pathname, c'.PKGINFO') == 0 { | ||||
| 			size := C.archive_entry_size(entry) | ||||
| 
 | ||||
| 			// TODO can this unsafe block be avoided? | ||||
| 			buf = unsafe { malloc(size) } | ||||
| 			C.archive_read_data(a, voidptr(buf), size) | ||||
| 		} else { | ||||
| 			C.archive_read_data_skip(a) | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	return unsafe { cstring_to_vstring(&char(buf)) }, files | ||||
| } | ||||
|  | @ -1,46 +0,0 @@ | |||
| module archive | ||||
| 
 | ||||
| #flag -larchive | ||||
| 
 | ||||
| #include "archive.h" | ||||
| 
 | ||||
| struct C.archive {} | ||||
| 
 | ||||
| // Create a new archive struct | ||||
| fn C.archive_read_new() &C.archive | ||||
| fn C.archive_read_support_filter_all(&C.archive) | ||||
| fn C.archive_read_support_format_all(&C.archive) | ||||
| 
 | ||||
| // Open an archive for reading | ||||
| fn C.archive_read_open_filename(&C.archive, &char, int) int | ||||
| 
 | ||||
| // Go to next entry header in archive | ||||
| fn C.archive_read_next_header(&C.archive, &&C.archive_entry) int | ||||
| 
 | ||||
| // Skip reading the current entry | ||||
| fn C.archive_read_data_skip(&C.archive) | ||||
| 
 | ||||
| // Free an archive | ||||
| fn C.archive_read_free(&C.archive) int | ||||
| 
 | ||||
| // Read an archive entry's contents into a pointer | ||||
| fn C.archive_read_data(&C.archive, voidptr, int) | ||||
| 
 | ||||
| #include "archive_entry.h" | ||||
| 
 | ||||
| struct C.archive_entry {} | ||||
| 
 | ||||
| // Create a new archive_entry struct | ||||
| fn C.archive_entry_new() &C.archive_entry | ||||
| 
 | ||||
| // Get the filename of the given entry | ||||
| fn C.archive_entry_pathname(&C.archive_entry) &char | ||||
| 
 | ||||
| // Get an entry's file size | ||||
| // Note: this function actually returns an i64, but as this can't be used as an arugment to malloc, we'll just roll with it & assume an entry is never bigger than 4 gigs | ||||
| fn C.archive_entry_size(&C.archive_entry) int | ||||
| 
 | ||||
| #include <string.h> | ||||
| 
 | ||||
| // Compare two C strings; 0 means they're equal | ||||
| fn C.strcmp(&char, &char) int | ||||
							
								
								
									
										103
									
								
								src/main.v
								
								
								
								
							
							
						
						
									
										103
									
								
								src/main.v
								
								
								
								
							|  | @ -4,8 +4,6 @@ import web | |||
| import os | ||||
| import log | ||||
| import io | ||||
| import pkg | ||||
| import archive | ||||
| import repo | ||||
| 
 | ||||
| const port = 8000 | ||||
|  | @ -18,6 +16,7 @@ struct App { | |||
| 	web.Context | ||||
| pub: | ||||
| 	api_key string [required; web_global] | ||||
| 	dl_dir  string [required; web_global] | ||||
| pub mut: | ||||
| 	repo repo.Repo [required; web_global] | ||||
| } | ||||
|  | @ -55,62 +54,50 @@ fn reader_to_file(mut reader io.BufferedReader, length int, path string) ? { | |||
| 	} | ||||
| } | ||||
| 
 | ||||
| // fn main2() { | ||||
| // 	// Configure logger | ||||
| // 	log_level_str := os.getenv_opt('LOG_LEVEL') or { 'WARN' } | ||||
| // 	log_level := log.level_from_tag(log_level_str) or { | ||||
| // 		exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.') | ||||
| // 	} | ||||
| // 	log_file := os.getenv_opt('LOG_FILE') or { 'vieter.log' } | ||||
| 
 | ||||
| // 	mut logger := log.Log{ | ||||
| // 		level: log_level | ||||
| // 	} | ||||
| 
 | ||||
| // 	logger.set_full_logpath(log_file) | ||||
| // 	logger.log_to_console_too() | ||||
| 
 | ||||
| // 	defer { | ||||
| // 		logger.info('Flushing log file') | ||||
| // 		logger.flush() | ||||
| // 		logger.close() | ||||
| // 	} | ||||
| 
 | ||||
| // 	// Configure web server | ||||
| // 	key := os.getenv_opt('API_KEY') or { exit_with_message(1, 'No API key was provided.') } | ||||
| // 	repo_dir := os.getenv_opt('REPO_DIR') or { | ||||
| // 		exit_with_message(1, 'No repo directory was configured.') | ||||
| // 	} | ||||
| 
 | ||||
| // 	repo := repo.Repo{ | ||||
| // 		dir: repo_dir | ||||
| // 		name: db_name | ||||
| // 	} | ||||
| 
 | ||||
| // 	// We create the upload directory during startup | ||||
| // 	if !os.is_dir(repo.pkg_dir()) { | ||||
| // 		os.mkdir_all(repo.pkg_dir()) or { | ||||
| // 			exit_with_message(2, "Failed to create repo directory '$repo.pkg_dir()'.") | ||||
| // 		} | ||||
| 
 | ||||
| // 		logger.info("Created package directory '$repo.pkg_dir()'.") | ||||
| // 	} | ||||
| 
 | ||||
| // 	web.run(&App{ | ||||
| // 		logger: logger | ||||
| // 		api_key: key | ||||
| // 		repo: repo | ||||
| // 	}, port) | ||||
| // } | ||||
| 
 | ||||
| fn main() { | ||||
| 	// archive.list_filenames() | ||||
| 	res := pkg.read_pkg('test/homebank-5.5.1-1-x86_64.pkg.tar.zst') or { | ||||
| 		eprintln(err.msg) | ||||
| 		return | ||||
| 	// Configure logger | ||||
| 	log_level_str := os.getenv_opt('LOG_LEVEL') or { 'WARN' } | ||||
| 	log_level := log.level_from_tag(log_level_str) or { | ||||
| 		exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.') | ||||
| 	} | ||||
| 	// println(info) | ||||
| 	println(res.info) | ||||
| 	print(res.files) | ||||
| 	println(res.info.to_desc()) | ||||
| 	log_file := os.getenv_opt('LOG_FILE') or { 'vieter.log' } | ||||
| 
 | ||||
| 	mut logger := log.Log{ | ||||
| 		level: log_level | ||||
| 	} | ||||
| 
 | ||||
| 	logger.set_full_logpath(log_file) | ||||
| 	logger.log_to_console_too() | ||||
| 
 | ||||
| 	defer { | ||||
| 		logger.info('Flushing log file') | ||||
| 		logger.flush() | ||||
| 		logger.close() | ||||
| 	} | ||||
| 
 | ||||
| 	// Configure web server | ||||
| 	key := os.getenv_opt('API_KEY') or { exit_with_message(1, 'No API key was provided.') } | ||||
| 	repo_dir := os.getenv_opt('REPO_DIR') or { | ||||
| 		exit_with_message(1, 'No repo directory was configured.') | ||||
| 	} | ||||
| 	pkg_dir := os.getenv_opt('PKG_DIR') or { | ||||
| 		exit_with_message(1, 'No package directory was configured.') | ||||
| 	} | ||||
| 	dl_dir := os.getenv_opt('DOWNLOAD_DIR') or { | ||||
| 		exit_with_message(1, 'No download directory was configured.') | ||||
| 	} | ||||
| 
 | ||||
| 	// This also creates the directories if needed | ||||
| 	repo := repo.new(repo_dir, pkg_dir) or { | ||||
| 		exit_with_message(1, 'Failed to create required directories.') | ||||
| 	} | ||||
| 
 | ||||
| 	os.mkdir_all(dl_dir) or { exit_with_message(1, 'Failed to create download directory.') } | ||||
| 
 | ||||
| 	web.run(&App{ | ||||
| 		logger: logger | ||||
| 		api_key: key | ||||
| 		dl_dir: dl_dir | ||||
| 		repo: repo | ||||
| 	}, port) | ||||
| } | ||||
|  |  | |||
|  | @ -0,0 +1,259 @@ | |||
| module package | ||||
| 
 | ||||
| import os | ||||
| import util | ||||
| 
 | ||||
| // Represents a read archive | ||||
| struct Pkg { | ||||
| pub: | ||||
| 	path  string   [required] | ||||
| 	info  PkgInfo  [required] | ||||
| 	files []string [required] | ||||
| } | ||||
| 
 | ||||
| // Represents the contents of a .PKGINFO file | ||||
| struct PkgInfo { | ||||
| pub mut: | ||||
| 	// Single values | ||||
| 	name        string | ||||
| 	base        string | ||||
| 	version     string | ||||
| 	description string | ||||
| 	size        i64 | ||||
| 	csize       i64 | ||||
| 	url         string | ||||
| 	arch        string | ||||
| 	build_date  i64 | ||||
| 	packager    string | ||||
| 	// md5sum      string | ||||
| 	// sha256sum   string | ||||
| 	pgpsig     string | ||||
| 	pgpsigsize i64 | ||||
| 	// Array values | ||||
| 	groups       []string | ||||
| 	licenses     []string | ||||
| 	replaces     []string | ||||
| 	depends      []string | ||||
| 	conflicts    []string | ||||
| 	provides     []string | ||||
| 	optdepends   []string | ||||
| 	makedepends  []string | ||||
| 	checkdepends []string | ||||
| } | ||||
| 
 | ||||
| // checksum calculates the md5 & sha256 hash of the package | ||||
| pub fn (p &Pkg) checksum() ?(string, string) { | ||||
| 	return util.hash_file(p.path) | ||||
| } | ||||
| 
 | ||||
| // parse_pkg_info_string parses a PkgInfo object from a string | ||||
| fn parse_pkg_info_string(pkg_info_str &string) ?PkgInfo { | ||||
| 	mut pkg_info := PkgInfo{} | ||||
| 
 | ||||
| 	// Iterate over the entire string | ||||
| 	for line in pkg_info_str.split_into_lines() { | ||||
| 		// Skip any comment lines | ||||
| 		if line.starts_with('#') { | ||||
| 			continue | ||||
| 		} | ||||
| 		parts := line.split_nth('=', 2) | ||||
| 
 | ||||
| 		if parts.len < 2 { | ||||
| 			return error('Invalid line detected.') | ||||
| 		} | ||||
| 
 | ||||
| 		value := parts[1].trim_space() | ||||
| 		key := parts[0].trim_space() | ||||
| 
 | ||||
| 		match key { | ||||
| 			// Single values | ||||
| 			'pkgname' { pkg_info.name = value } | ||||
| 			'pkgbase' { pkg_info.base = value } | ||||
| 			'pkgver' { pkg_info.version = value } | ||||
| 			'pkgdesc' { pkg_info.description = value } | ||||
| 			'csize' { continue } | ||||
| 			'size' { pkg_info.size = value.int() } | ||||
| 			'url' { pkg_info.url = value } | ||||
| 			'arch' { pkg_info.arch = value } | ||||
| 			'builddate' { pkg_info.build_date = value.int() } | ||||
| 			'packager' { pkg_info.packager = value } | ||||
| 			'md5sum' { continue } | ||||
| 			'sha256sum' { continue } | ||||
| 			'pgpsig' { pkg_info.pgpsig = value } | ||||
| 			'pgpsigsize' { pkg_info.pgpsigsize = value.int() } | ||||
| 			// Array values | ||||
| 			'group' { pkg_info.groups << value } | ||||
| 			'license' { pkg_info.licenses << value } | ||||
| 			'replaces' { pkg_info.replaces << value } | ||||
| 			'depend' { pkg_info.depends << value } | ||||
| 			'conflict' { pkg_info.conflicts << value } | ||||
| 			'provides' { pkg_info.provides << value } | ||||
| 			'optdepend' { pkg_info.optdepends << value } | ||||
| 			'makedepend' { pkg_info.makedepends << value } | ||||
| 			'checkdepend' { pkg_info.checkdepends << value } | ||||
| 			else { return error("Invalid key '$key'.") } | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	return pkg_info | ||||
| } | ||||
| 
 | ||||
| // read_pkg extracts the file list & .PKGINFO contents from an archive | ||||
| // NOTE: this command currently only supports zstd-compressed tarballs | ||||
| pub fn read_pkg(pkg_path string) ?Pkg { | ||||
| 	if !os.is_file(pkg_path) { | ||||
| 		return error("'$pkg_path' doesn't exist or isn't a file.") | ||||
| 	} | ||||
| 
 | ||||
| 	a := C.archive_read_new() | ||||
| 	entry := C.archive_entry_new() | ||||
| 	mut r := 0 | ||||
| 
 | ||||
| 	// Sinds 2020, all newly built Arch packages use zstd | ||||
| 	C.archive_read_support_filter_zstd(a) | ||||
| 	// The content should always be a tarball | ||||
| 	C.archive_read_support_format_tar(a) | ||||
| 
 | ||||
| 	// TODO find out where does this 10240 come from | ||||
| 	r = C.archive_read_open_filename(a, &char(pkg_path.str), 10240) | ||||
| 	defer { | ||||
| 		C.archive_read_free(a) | ||||
| 	} | ||||
| 
 | ||||
| 	if r != C.ARCHIVE_OK { | ||||
| 		return error('Failed to open package.') | ||||
| 	} | ||||
| 
 | ||||
| 	mut buf := voidptr(0) | ||||
| 	mut files := []string{} | ||||
| 	mut pkg_info := PkgInfo{} | ||||
| 
 | ||||
| 	for C.archive_read_next_header(a, &entry) == C.ARCHIVE_OK { | ||||
| 		pathname := C.archive_entry_pathname(entry) | ||||
| 
 | ||||
| 		ignored_names := [c'.BUILDINFO', c'.INSTALL', c'.MTREE', c'.PKGINFO', c'.CHANGELOG'] | ||||
| 		if ignored_names.all(C.strcmp(it, pathname) != 0) { | ||||
| 			unsafe { | ||||
| 				files << cstring_to_vstring(pathname) | ||||
| 			} | ||||
| 		} | ||||
| 
 | ||||
| 		if C.strcmp(pathname, c'.PKGINFO') == 0 { | ||||
| 			size := C.archive_entry_size(entry) | ||||
| 
 | ||||
| 			// TODO can this unsafe block be avoided? | ||||
| 			buf = unsafe { malloc(size) } | ||||
| 			C.archive_read_data(a, buf, size) | ||||
| 
 | ||||
| 			unsafe { | ||||
| 				println(cstring_to_vstring(buf)) | ||||
| 			} | ||||
| 			pkg_info = parse_pkg_info_string(unsafe { cstring_to_vstring(buf) }) ? | ||||
| 
 | ||||
| 			unsafe { | ||||
| 				free(buf) | ||||
| 			} | ||||
| 		} else { | ||||
| 			C.archive_read_data_skip(a) | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	pkg_info.csize = i64(os.file_size(pkg_path)) | ||||
| 
 | ||||
| 	return Pkg{ | ||||
| 		path: pkg_path | ||||
| 		info: pkg_info | ||||
| 		files: files | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| fn format_entry(key string, value string) string { | ||||
| 	return '\n%$key%\n$value\n' | ||||
| } | ||||
| 
 | ||||
| // filename returns the correct filename of the package file | ||||
| pub fn (pkg &Pkg) filename() string { | ||||
| 	p := pkg.info | ||||
| 
 | ||||
| 	return '$p.name-$p.version-${p.arch}.pkg.tar.zst' | ||||
| } | ||||
| 
 | ||||
| // to_desc returns a desc file valid string representation | ||||
| // TODO calculate md5 & sha256 instead of believing the file | ||||
| pub fn (pkg &Pkg) to_desc() string { | ||||
| 	p := pkg.info | ||||
| 
 | ||||
| 	// filename | ||||
| 	mut desc := '%FILENAME%\n$pkg.filename()\n' | ||||
| 
 | ||||
| 	desc += format_entry('NAME', p.name) | ||||
| 	desc += format_entry('BASE', p.base) | ||||
| 	desc += format_entry('VERSION', p.version) | ||||
| 
 | ||||
| 	if p.description.len > 0 { | ||||
| 		desc += format_entry('DESC', p.description) | ||||
| 	} | ||||
| 
 | ||||
| 	if p.groups.len > 0 { | ||||
| 		desc += format_entry('GROUPS', p.groups.join_lines()) | ||||
| 	} | ||||
| 
 | ||||
| 	desc += format_entry('CSIZE', p.csize.str()) | ||||
| 	desc += format_entry('ISIZE', p.size.str()) | ||||
| 
 | ||||
| 	md5sum, _ := pkg.checksum() or { '', '' } | ||||
| 
 | ||||
| 	desc += format_entry('MD5SUM', md5sum) | ||||
| 
 | ||||
| 	// TODO add this | ||||
| 	// desc += format_entry('SHA256SUM', sha256sum) | ||||
| 
 | ||||
| 	// TODO add pgpsig stuff | ||||
| 
 | ||||
| 	if p.url.len > 0 { | ||||
| 		desc += format_entry('URL', p.url) | ||||
| 	} | ||||
| 
 | ||||
| 	if p.licenses.len > 0 { | ||||
| 		desc += format_entry('LICENSE', p.licenses.join_lines()) | ||||
| 	} | ||||
| 
 | ||||
| 	desc += format_entry('ARCH', p.arch) | ||||
| 	desc += format_entry('BUILDDATE', p.build_date.str()) | ||||
| 	desc += format_entry('PACKAGER', p.packager) | ||||
| 
 | ||||
| 	if p.replaces.len > 0 { | ||||
| 		desc += format_entry('REPLACES', p.replaces.join_lines()) | ||||
| 	} | ||||
| 
 | ||||
| 	if p.conflicts.len > 0 { | ||||
| 		desc += format_entry('CONFLICTS', p.conflicts.join_lines()) | ||||
| 	} | ||||
| 
 | ||||
| 	if p.provides.len > 0 { | ||||
| 		desc += format_entry('PROVIDES', p.provides.join_lines()) | ||||
| 	} | ||||
| 
 | ||||
| 	if p.depends.len > 0 { | ||||
| 		desc += format_entry('DEPENDS', p.depends.join_lines()) | ||||
| 	} | ||||
| 
 | ||||
| 	if p.optdepends.len > 0 { | ||||
| 		desc += format_entry('OPTDEPENDS', p.optdepends.join_lines()) | ||||
| 	} | ||||
| 
 | ||||
| 	if p.makedepends.len > 0 { | ||||
| 		desc += format_entry('MAKEDEPENDS', p.makedepends.join_lines()) | ||||
| 	} | ||||
| 
 | ||||
| 	if p.checkdepends.len > 0 { | ||||
| 		desc += format_entry('CHECKDEPENDS', p.checkdepends.join_lines()) | ||||
| 	} | ||||
| 
 | ||||
| 	return '$desc\n' | ||||
| } | ||||
| 
 | ||||
| // to_files returns a files file valid string representation | ||||
| pub fn (pkg &Pkg) to_files() string { | ||||
| 	return '%FILES%\n$pkg.files.join_lines()\n' | ||||
| } | ||||
							
								
								
									
										108
									
								
								src/pkg.v
								
								
								
								
							
							
						
						
									
										108
									
								
								src/pkg.v
								
								
								
								
							|  | @ -1,108 +0,0 @@ | |||
| module pkg | ||||
| 
 | ||||
| import archive | ||||
| import time | ||||
| 
 | ||||
| struct Pkg { | ||||
| pub: | ||||
| 	info  PkgInfo  [required] | ||||
| 	files []string [required] | ||||
| } | ||||
| 
 | ||||
| struct PkgInfo { | ||||
| mut: | ||||
| 	// Single values | ||||
| 	name        string | ||||
| 	base        string | ||||
| 	version     string | ||||
| 	description string | ||||
| 	size        i64 | ||||
| 	csize       i64 | ||||
| 	url         string | ||||
| 	arch        string | ||||
| 	build_date  i64 | ||||
| 	packager    string | ||||
| 	md5sum      string | ||||
| 	sha256sum   string | ||||
| 	pgpsig      string | ||||
| 	pgpsigsize  i64 | ||||
| 	// Array values | ||||
| 	groups       []string | ||||
| 	licenses     []string | ||||
| 	replaces     []string | ||||
| 	depends      []string | ||||
| 	conflicts    []string | ||||
| 	provides     []string | ||||
| 	optdepends   []string | ||||
| 	makedepends  []string | ||||
| 	checkdepends []string | ||||
| } | ||||
| 
 | ||||
| fn parse_pkg_info_string(pkg_info_str &string) ?PkgInfo { | ||||
| 	mut pkg_info := PkgInfo{} | ||||
| 
 | ||||
| 	// Iterate over the entire string | ||||
| 	for line in pkg_info_str.split_into_lines() { | ||||
| 		// Skip any comment lines | ||||
| 		if line.starts_with('#') { | ||||
| 			continue | ||||
| 		} | ||||
| 		parts := line.split_nth('=', 2) | ||||
| 
 | ||||
| 		if parts.len < 2 { | ||||
| 			return error('Invalid line detected.') | ||||
| 		} | ||||
| 
 | ||||
| 		value := parts[1].trim_space() | ||||
| 		key := parts[0].trim_space() | ||||
| 
 | ||||
| 		match key { | ||||
| 			// Single values | ||||
| 			'pkgname' { pkg_info.name = value } | ||||
| 			'pkgbase' { pkg_info.base = value } | ||||
| 			'pkgver' { pkg_info.version = value } | ||||
| 			'pkgdesc' { pkg_info.description = value } | ||||
| 			'csize' { pkg_info.csize = value.int() } | ||||
| 			'size' { pkg_info.size = value.int() } | ||||
| 			'url' { pkg_info.url = value } | ||||
| 			'arch' { pkg_info.arch = value } | ||||
| 			'builddate' { pkg_info.build_date = value.int() } | ||||
| 			'packager' { pkg_info.packager = value } | ||||
| 			'md5sum' { pkg_info.md5sum = value } | ||||
| 			'sha256sum' { pkg_info.sha256sum = value } | ||||
| 			'pgpsig' { pkg_info.pgpsig = value } | ||||
| 			'pgpsigsize' { pkg_info.pgpsigsize = value.int() } | ||||
| 			// Array values | ||||
| 			'group' { pkg_info.groups << value } | ||||
| 			'license' { pkg_info.licenses << value } | ||||
| 			'replaces' { pkg_info.replaces << value } | ||||
| 			'depend' { pkg_info.depends << value } | ||||
| 			'conflict' { pkg_info.conflicts << value } | ||||
| 			'provides' { pkg_info.provides << value } | ||||
| 			'optdepend' { pkg_info.optdepends << value } | ||||
| 			'makedepend' { pkg_info.makedepends << value } | ||||
| 			'checkdepend' { pkg_info.checkdepends << value } | ||||
| 			else { return error("Invalid key '$key'.") } | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	return pkg_info | ||||
| } | ||||
| 
 | ||||
| pub fn read_pkg(pkg_path string) ?Pkg { | ||||
| 	pkg_info_str, files := archive.pkg_info(pkg_path) ? | ||||
| 	pkg_info := parse_pkg_info_string(pkg_info_str) ? | ||||
| 
 | ||||
| 	return Pkg{ | ||||
| 		info: pkg_info | ||||
| 		files: files | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| // Represent a PkgInfo struct as a desc file | ||||
| pub fn (p &PkgInfo) to_desc() string { | ||||
| 	// TODO calculate md5 & sha256 instead of believing the file | ||||
| 	mut desc := '' | ||||
| 
 | ||||
| 	return desc | ||||
| } | ||||
							
								
								
									
										142
									
								
								src/repo.v
								
								
								
								
							
							
						
						
									
										142
									
								
								src/repo.v
								
								
								
								
							|  | @ -1,9 +1,13 @@ | |||
| module repo | ||||
| 
 | ||||
| import os | ||||
| import archive | ||||
| import package | ||||
| 
 | ||||
| const pkgs_subpath = 'pkgs' | ||||
| // subpath where the uncompressed version of the files archive is stored | ||||
| const files_subpath = 'files' | ||||
| 
 | ||||
| // subpath where the uncompressed version of the repo archive is stored | ||||
| const repo_subpath = 'repo' | ||||
| 
 | ||||
| // Dummy struct to work around the fact that you can only share structs, maps & | ||||
| // arrays | ||||
|  | @ -22,44 +26,120 @@ pub: | |||
| 	pkg_dir string [required] | ||||
| } | ||||
| 
 | ||||
| // Returns whether the repository contains the given package. | ||||
| pub fn (r &Repo) contains(pkg string) bool { | ||||
| 	return os.exists(os.join_path(r.repo_dir, 'files', pkg)) | ||||
| // new creates a new Repo & creates the directories as needed | ||||
| pub fn new(repo_dir string, pkg_dir string) ?Repo { | ||||
| 	if !os.is_dir(repo_dir) { | ||||
| 		os.mkdir_all(repo_dir) or { return error('Failed to create repo directory.') } | ||||
| 	} | ||||
| 
 | ||||
| 	if !os.is_dir(pkg_dir) { | ||||
| 		os.mkdir_all(pkg_dir) or { return error('Failed to create package directory.') } | ||||
| 	} | ||||
| 
 | ||||
| 	return Repo{ | ||||
| 		repo_dir: repo_dir | ||||
| 		pkg_dir: pkg_dir | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| // Adds the given package to the repo. If false, the package was already | ||||
| // present in the repository. | ||||
| pub fn (r &Repo) add(pkg string) ?bool { | ||||
| 	return false | ||||
| // add_from_path adds a package from an arbitrary path & moves it into the pkgs | ||||
| // directory if necessary. | ||||
| pub fn (r &Repo) add_from_path(pkg_path string) ?bool { | ||||
| 	pkg := package.read_pkg(pkg_path) or { return error('Failed to read package file: $err.msg') } | ||||
| 
 | ||||
| 	added := r.add(pkg) ? | ||||
| 
 | ||||
| 	// If the add was successful, we move the file to the packages directory | ||||
| 	if added { | ||||
| 		dest_path := os.real_path(os.join_path_single(r.pkg_dir, pkg.filename())) | ||||
| 
 | ||||
| 		// Only move the file if it's not already in the package directory | ||||
| 		if dest_path != os.real_path(pkg_path) { | ||||
| 			os.mv(pkg_path, dest_path) ? | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	return added | ||||
| } | ||||
| 
 | ||||
| // Re-generate the db & files archives. | ||||
| fn (r &Repo) genenerate() ? { | ||||
| // add adds a given Pkg to the repository | ||||
| fn (r &Repo) add(pkg &package.Pkg) ?bool { | ||||
| 	pkg_dir := r.pkg_path(pkg) | ||||
| 
 | ||||
| 	// We can't add the same package twice | ||||
| 	if os.exists(pkg_dir) { | ||||
| 		return false | ||||
| 	} | ||||
| 
 | ||||
| 	os.mkdir(pkg_dir) or { return error('Failed to create package directory.') } | ||||
| 
 | ||||
| 	os.write_file(os.join_path_single(pkg_dir, 'desc'), pkg.to_desc()) or { | ||||
| 		os.rmdir_all(pkg_dir) ? | ||||
| 
 | ||||
| 		return error('Failed to write desc file.') | ||||
| 	} | ||||
| 	os.write_file(os.join_path_single(pkg_dir, 'files'), pkg.to_files()) or { | ||||
| 		os.rmdir_all(pkg_dir) ? | ||||
| 
 | ||||
| 		return error('Failed to write files file.') | ||||
| 	} | ||||
| 
 | ||||
| 	r.sync() ? | ||||
| 
 | ||||
| 	return true | ||||
| } | ||||
| 
 | ||||
| // Returns path to the given package, prepended with the repo's path. | ||||
| pub fn (r &Repo) pkg_path(pkg string) string { | ||||
| 	return os.join_path_single(r.pkg_dir, pkg) | ||||
| // Returns the path where the given package's desc & files files are stored | ||||
| fn (r &Repo) pkg_path(pkg &package.Pkg) string { | ||||
| 	return os.join_path(r.repo_dir, '$pkg.info.name-$pkg.info.version') | ||||
| } | ||||
| 
 | ||||
| pub fn (r &Repo) exists(pkg string) bool { | ||||
| 	return os.exists(r.pkg_path(pkg)) | ||||
| } | ||||
| 
 | ||||
| // Returns the full path to the database file | ||||
| pub fn (r &Repo) db_path() string { | ||||
| 	return os.join_path_single(r.repo_dir, 'repo.tar.gz') | ||||
| } | ||||
| 
 | ||||
| pub fn (r &Repo) add_package(pkg_path string) ? { | ||||
| 	mut res := os.Result{} | ||||
| 
 | ||||
| // Re-generate the repo archive files | ||||
| fn (r &Repo) sync() ? { | ||||
| 	lock r.mutex { | ||||
| 		res = os.execute("repo-add '$r.db_path()' '$pkg_path'") | ||||
| 	} | ||||
| 		a := C.archive_write_new() | ||||
| 		entry := C.archive_entry_new() | ||||
| 		st := C.stat{} | ||||
| 		buf := [8192]byte{} | ||||
| 
 | ||||
| 	if res.exit_code != 0 { | ||||
| 		println(res.output) | ||||
| 		return error('repo-add failed.') | ||||
| 		// This makes the archive a gzip-compressed tarball | ||||
| 		C.archive_write_add_filter_gzip(a) | ||||
| 		C.archive_write_set_format_pax_restricted(a) | ||||
| 
 | ||||
| 		repo_path := os.join_path_single(r.repo_dir, 'repo.db') | ||||
| 
 | ||||
| 		C.archive_write_open_filename(a, &char(repo_path.str)) | ||||
| 
 | ||||
| 		// Iterate over each directory | ||||
| 		for d in os.ls(r.repo_dir) ?.filter(os.is_dir(os.join_path_single(r.repo_dir, | ||||
| 			it))) { | ||||
| 			inner_path := os.join_path_single(d, 'desc') | ||||
| 			actual_path := os.join_path_single(r.repo_dir, inner_path) | ||||
| 
 | ||||
| 			unsafe { | ||||
| 				C.stat(&char(actual_path.str), &st) | ||||
| 			} | ||||
| 
 | ||||
| 			C.archive_entry_set_pathname(entry, &char(inner_path.str)) | ||||
| 			C.archive_entry_copy_stat(entry, &st) | ||||
| 			// C.archive_entry_set_size(entry, st.st_size) | ||||
| 			// C.archive_entry_set_filetype(entry, C.AE_IFREG) | ||||
| 			// C.archive_entry_set_perm(entry, 0o644) | ||||
| 			C.archive_write_header(a, entry) | ||||
| 
 | ||||
| 			fd := C.open(&char(actual_path.str), C.O_RDONLY) | ||||
| 			mut len := C.read(fd, &buf, sizeof(buf)) | ||||
| 
 | ||||
| 			for len > 0 { | ||||
| 				C.archive_write_data(a, &buf, len) | ||||
| 				len = C.read(fd, &buf, sizeof(buf)) | ||||
| 			} | ||||
| 			C.close(fd) | ||||
| 
 | ||||
| 			C.archive_entry_clear(entry) | ||||
| 		} | ||||
| 
 | ||||
| 		C.archive_write_close(a) | ||||
| 		C.archive_write_free(a) | ||||
| 	} | ||||
| } | ||||
|  |  | |||
							
								
								
									
										78
									
								
								src/routes.v
								
								
								
								
							
							
						
						
									
										78
									
								
								src/routes.v
								
								
								
								
							|  | @ -4,9 +4,11 @@ import web | |||
| import os | ||||
| import repo | ||||
| import time | ||||
| import rand | ||||
| 
 | ||||
| const prefixes = ['B', 'KB', 'MB', 'GB'] | ||||
| 
 | ||||
| // pretty_bytes converts a byte count to human-readable version | ||||
| fn pretty_bytes(bytes int) string { | ||||
| 	mut i := 0 | ||||
| 	mut n := f32(bytes) | ||||
|  | @ -23,6 +25,7 @@ fn is_pkg_name(s string) bool { | |||
| 	return s.contains('.pkg') | ||||
| } | ||||
| 
 | ||||
| // get_root handles a GET request for a file on the root | ||||
| ['/:filename'; get] | ||||
| fn (mut app App) get_root(filename string) web.Result { | ||||
| 	mut full_path := '' | ||||
|  | @ -36,58 +39,57 @@ fn (mut app App) get_root(filename string) web.Result { | |||
| 	return app.file(full_path) | ||||
| } | ||||
| 
 | ||||
| // ['/pkgs/:pkg'; put] | ||||
| // fn (mut app App) put_package(pkg string) web.Result { | ||||
| // 	if !app.is_authorized() { | ||||
| // 		return app.text('Unauthorized.') | ||||
| // 	} | ||||
| ['/publish'; post] | ||||
| fn (mut app App) put_package() web.Result { | ||||
| 	if !app.is_authorized() { | ||||
| 		return app.text('Unauthorized.') | ||||
| 	} | ||||
| 
 | ||||
| // 	if !is_pkg_name(pkg) { | ||||
| // 		app.lwarn("Invalid package name '$pkg'.") | ||||
| 	mut pkg_path := '' | ||||
| 
 | ||||
| // 		return app.text('Invalid filename.') | ||||
| // 	} | ||||
| 	if length := app.req.header.get(.content_length) { | ||||
| 		// Generate a random filename for the temp file | ||||
| 		pkg_path = os.join_path_single(app.dl_dir, rand.uuid_v4()) | ||||
| 
 | ||||
| // 	if app.repo.exists(pkg) { | ||||
| // 		app.lwarn("Duplicate package '$pkg'") | ||||
| 		for os.exists(pkg_path) { | ||||
| 			pkg_path = os.join_path_single(app.dl_dir, rand.uuid_v4()) | ||||
| 		} | ||||
| 
 | ||||
| // 		return app.text('File already exists.') | ||||
| // 	} | ||||
| 		app.ldebug("Uploading $length (${pretty_bytes(length.int())}) bytes to '$pkg_path'.") | ||||
| 
 | ||||
| // 	pkg_path := app.repo.pkg_path(pkg) | ||||
| 		// This is used to time how long it takes to upload a file | ||||
| 		mut sw := time.new_stopwatch(time.StopWatchOptions{ auto_start: true }) | ||||
| 
 | ||||
| // 	if length := app.req.header.get(.content_length) { | ||||
| // 		app.ldebug("Uploading $length (${pretty_bytes(length.int())}) bytes to package '$pkg'.") | ||||
| 		reader_to_file(mut app.reader, length.int(), pkg_path) or { | ||||
| 			app.lwarn("Failed to upload '$pkg_path'") | ||||
| 
 | ||||
| // 		// This is used to time how long it takes to upload a file | ||||
| // 		mut sw := time.new_stopwatch(time.StopWatchOptions{ auto_start: true }) | ||||
| 			return app.text('Failed to upload file.') | ||||
| 		} | ||||
| 
 | ||||
| // 		reader_to_file(mut app.reader, length.int(), pkg_path) or { | ||||
| // 			app.lwarn("Failed to upload package '$pkg'") | ||||
| 		sw.stop() | ||||
| 		app.ldebug("Upload of '$pkg_path' completed in ${sw.elapsed().seconds():.3}s.") | ||||
| 	} else { | ||||
| 		app.lwarn('Tried to upload package without specifying a Content-Length.') | ||||
| 		return app.text("Content-Type header isn't set.") | ||||
| 	} | ||||
| 
 | ||||
| // 			return app.text('Failed to upload file.') | ||||
| // 		} | ||||
| 	added := app.repo.add_from_path(pkg_path) or { | ||||
| 		app.lerror('Error while adding package: $err.msg') | ||||
| 
 | ||||
| // 		sw.stop() | ||||
| // 		app.ldebug("Upload of package '$pkg' completed in ${sw.elapsed().seconds():.3}s.") | ||||
| // 	} else { | ||||
| // 		app.lwarn("Tried to upload package '$pkg' without specifying a Content-Length.") | ||||
| // 		return app.text("Content-Type header isn't set.") | ||||
| // 	} | ||||
| 		return app.text('Failed to add package.') | ||||
| 	} | ||||
| 	if !added { | ||||
| 		app.lwarn('Duplicate package.') | ||||
| 
 | ||||
| // 	app.repo.add_package(pkg_path) or { | ||||
| // 		app.lwarn("Failed to add package '$pkg' to database.") | ||||
| 		return app.text('File already exists.') | ||||
| 	} | ||||
| 
 | ||||
| // 		os.rm(pkg_path) or { println('Failed to remove $pkg_path') } | ||||
| 	app.linfo("Added '$pkg_path' to repository.") | ||||
| 
 | ||||
| // 		return app.text('Failed to add package to repo.') | ||||
| // 	} | ||||
| 
 | ||||
| // 	app.linfo("Added '$pkg' to repository.") | ||||
| 
 | ||||
| // 	return app.text('Package added successfully.') | ||||
| // } | ||||
| 	return app.text('Package added successfully.') | ||||
| } | ||||
| 
 | ||||
| // add_package PUT a new package to the server | ||||
| ['/add'; put] | ||||
| pub fn (mut app App) add_package() web.Result { | ||||
| 	return app.text('') | ||||
|  |  | |||
|  | @ -0,0 +1,33 @@ | |||
| module util | ||||
| 
 | ||||
| import os | ||||
| import crypto.md5 | ||||
| // import crypto.sha256 | ||||
| 
 | ||||
| // hash_file returns the md5 & sha256 hash of a given file | ||||
| // TODO actually implement sha256 | ||||
| pub fn hash_file(path &string) ?(string, string) { | ||||
| 	file := os.open(path) or { return error('Failed to open file.') } | ||||
| 
 | ||||
| 	mut md5sum := md5.new() | ||||
| 	// mut sha256sum := sha256.new() | ||||
| 
 | ||||
| 	buf_size := int(1_000_000) | ||||
| 	mut buf := []byte{len: buf_size} | ||||
| 	mut bytes_left := os.file_size(path) | ||||
| 
 | ||||
| 	for bytes_left > 0 { | ||||
| 		// TODO check if just breaking here is safe | ||||
| 		bytes_read := file.read(mut buf) or { return error('Failed to read from file.') } | ||||
| 		bytes_left -= u64(bytes_read) | ||||
| 
 | ||||
| 		// For now we'll assume that this always works | ||||
| 		md5sum.write(buf[..bytes_read]) or { | ||||
| 			return error('Failed to update checksum. This should never happen.') | ||||
| 		} | ||||
| 		// sha256sum.write(buf) or {} | ||||
| 	} | ||||
| 
 | ||||
| 	// return md5sum.sum(buf).hex(), sha256sum.sum(buf).hex() | ||||
| 	return md5sum.checksum().hex(), '' | ||||
| } | ||||
|  | @ -2,28 +2,34 @@ module web | |||
| 
 | ||||
| import log | ||||
| 
 | ||||
| // log reate a log message with the given level | ||||
| pub fn (mut ctx Context) log(msg &string, level log.Level) { | ||||
| 	lock ctx.logger { | ||||
| 		ctx.logger.send_output(msg, level) | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| // lfatal create a log message with the fatal level | ||||
| pub fn (mut ctx Context) lfatal(msg &string) { | ||||
| 	ctx.log(msg, log.Level.fatal) | ||||
| } | ||||
| 
 | ||||
| // lerror create a log message with the error level | ||||
| pub fn (mut ctx Context) lerror(msg &string) { | ||||
| 	ctx.log(msg, log.Level.error) | ||||
| } | ||||
| 
 | ||||
| // lwarn create a log message with the warn level | ||||
| pub fn (mut ctx Context) lwarn(msg &string) { | ||||
| 	ctx.log(msg, log.Level.warn) | ||||
| } | ||||
| 
 | ||||
| // linfo create a log message with the info level | ||||
| pub fn (mut ctx Context) linfo(msg &string) { | ||||
| 	ctx.log(msg, log.Level.info) | ||||
| } | ||||
| 
 | ||||
| // ldebug create a log message with the debug level | ||||
| pub fn (mut ctx Context) ldebug(msg &string) { | ||||
| 	ctx.log(msg, log.Level.debug) | ||||
| } | ||||
|  |  | |||
|  | @ -187,14 +187,14 @@ struct Route { | |||
| } | ||||
| 
 | ||||
| // Defining this method is optional. | ||||
| // This method called at server start. | ||||
| // init_server is called at server start. | ||||
| // You can use it for initializing globals. | ||||
| pub fn (ctx Context) init_server() { | ||||
| 	eprintln('init_server() has been deprecated, please init your web app in `fn main()`') | ||||
| } | ||||
| 
 | ||||
| // Defining this method is optional. | ||||
| // This method called before every request (aka middleware). | ||||
| // before_request is called before every request (aka middleware). | ||||
| // Probably you can use it for check user session cookie or add header. | ||||
| pub fn (ctx Context) before_request() {} | ||||
| 
 | ||||
|  | @ -206,7 +206,7 @@ pub struct Cookie { | |||
| 	http_only bool | ||||
| } | ||||
| 
 | ||||
| // web intern function | ||||
| // send_response_to_client sends a response to the client | ||||
| [manualfree] | ||||
| pub fn (mut ctx Context) send_response_to_client(mimetype string, res string) bool { | ||||
| 	if ctx.done { | ||||
|  | @ -230,33 +230,33 @@ pub fn (mut ctx Context) send_response_to_client(mimetype string, res string) bo | |||
| 	return true | ||||
| } | ||||
| 
 | ||||
| // Response HTTP_OK with s as payload with content-type `text/html` | ||||
| // html HTTP_OK with s as payload with content-type `text/html` | ||||
| pub fn (mut ctx Context) html(s string) Result { | ||||
| 	ctx.send_response_to_client('text/html', s) | ||||
| 	return Result{} | ||||
| } | ||||
| 
 | ||||
| // Response HTTP_OK with s as payload with content-type `text/plain` | ||||
| // text HTTP_OK with s as payload with content-type `text/plain` | ||||
| pub fn (mut ctx Context) text(s string) Result { | ||||
| 	ctx.send_response_to_client('text/plain', s) | ||||
| 	return Result{} | ||||
| } | ||||
| 
 | ||||
| // Response HTTP_OK with json_s as payload with content-type `application/json` | ||||
| // json<T> HTTP_OK with json_s as payload with content-type `application/json` | ||||
| pub fn (mut ctx Context) json<T>(j T) Result { | ||||
| 	json_s := json.encode(j) | ||||
| 	ctx.send_response_to_client('application/json', json_s) | ||||
| 	return Result{} | ||||
| } | ||||
| 
 | ||||
| // Response HTTP_OK with a pretty-printed JSON result | ||||
| // json_pretty<T> Response HTTP_OK with a pretty-printed JSON result | ||||
| pub fn (mut ctx Context) json_pretty<T>(j T) Result { | ||||
| 	json_s := json.encode_pretty(j) | ||||
| 	ctx.send_response_to_client('application/json', json_s) | ||||
| 	return Result{} | ||||
| } | ||||
| 
 | ||||
| // Response HTTP_OK with file as payload | ||||
| // file Response HTTP_OK with file as payload | ||||
| // This function manually implements responses because it needs to stream the file contents | ||||
| pub fn (mut ctx Context) file(f_path string) Result { | ||||
| 	if ctx.done { | ||||
|  | @ -329,13 +329,13 @@ pub fn (mut ctx Context) file(f_path string) Result { | |||
| 	return Result{} | ||||
| } | ||||
| 
 | ||||
| // Response HTTP_OK with s as payload | ||||
| // ok Response HTTP_OK with s as payload | ||||
| pub fn (mut ctx Context) ok(s string) Result { | ||||
| 	ctx.send_response_to_client(ctx.content_type, s) | ||||
| 	return Result{} | ||||
| } | ||||
| 
 | ||||
| // Response a server error | ||||
| // server_error Response a server error | ||||
| pub fn (mut ctx Context) server_error(ecode int) Result { | ||||
| 	$if debug { | ||||
| 		eprintln('> ctx.server_error ecode: $ecode') | ||||
|  | @ -347,7 +347,7 @@ pub fn (mut ctx Context) server_error(ecode int) Result { | |||
| 	return Result{} | ||||
| } | ||||
| 
 | ||||
| // Redirect to an url | ||||
| // redirect Redirect to an url | ||||
| pub fn (mut ctx Context) redirect(url string) Result { | ||||
| 	if ctx.done { | ||||
| 		return Result{} | ||||
|  | @ -360,7 +360,7 @@ pub fn (mut ctx Context) redirect(url string) Result { | |||
| 	return Result{} | ||||
| } | ||||
| 
 | ||||
| // Send an not_found response | ||||
| // not_found Send an not_found response | ||||
| pub fn (mut ctx Context) not_found() Result { | ||||
| 	if ctx.done { | ||||
| 		return Result{} | ||||
|  | @ -370,7 +370,7 @@ pub fn (mut ctx Context) not_found() Result { | |||
| 	return Result{} | ||||
| } | ||||
| 
 | ||||
| // Sets a cookie | ||||
| // set_cookie Sets a cookie | ||||
| pub fn (mut ctx Context) set_cookie(cookie Cookie) { | ||||
| 	mut cookie_data := []string{} | ||||
| 	mut secure := if cookie.secure { 'Secure;' } else { '' } | ||||
|  | @ -383,17 +383,17 @@ pub fn (mut ctx Context) set_cookie(cookie Cookie) { | |||
| 	ctx.add_header('Set-Cookie', '$cookie.name=$cookie.value; $data') | ||||
| } | ||||
| 
 | ||||
| // Sets the response content type | ||||
| // set_content_type Sets the response content type | ||||
| pub fn (mut ctx Context) set_content_type(typ string) { | ||||
| 	ctx.content_type = typ | ||||
| } | ||||
| 
 | ||||
| // Sets a cookie with a `expire_data` | ||||
| // set_cookie_with_expire_date Sets a cookie with a `expire_data` | ||||
| pub fn (mut ctx Context) set_cookie_with_expire_date(key string, val string, expire_date time.Time) { | ||||
| 	ctx.add_header('Set-Cookie', '$key=$val;  Secure; HttpOnly; expires=$expire_date.utc_string()') | ||||
| } | ||||
| 
 | ||||
| // Gets a cookie by a key | ||||
| // get_cookie Gets a cookie by a key | ||||
| pub fn (ctx &Context) get_cookie(key string) ?string { // TODO refactor | ||||
| 	mut cookie_header := ctx.get_header('cookie') | ||||
| 	if cookie_header == '' { | ||||
|  | @ -413,7 +413,7 @@ pub fn (ctx &Context) get_cookie(key string) ?string { // TODO refactor | |||
| 	return error('Cookie not found') | ||||
| } | ||||
| 
 | ||||
| // Sets the response status | ||||
| // set_status Sets the response status | ||||
| pub fn (mut ctx Context) set_status(code int, desc string) { | ||||
| 	if code < 100 || code > 599 { | ||||
| 		ctx.status = '500 Internal Server Error' | ||||
|  | @ -422,12 +422,12 @@ pub fn (mut ctx Context) set_status(code int, desc string) { | |||
| 	} | ||||
| } | ||||
| 
 | ||||
| // Adds an header to the response with key and val | ||||
| // add_header Adds an header to the response with key and val | ||||
| pub fn (mut ctx Context) add_header(key string, val string) { | ||||
| 	ctx.header.add_custom(key, val) or {} | ||||
| } | ||||
| 
 | ||||
| // Returns the header data from the key | ||||
| // get_header Returns the header data from the key | ||||
| pub fn (ctx &Context) get_header(key string) string { | ||||
| 	return ctx.req.header.get_custom(key) or { '' } | ||||
| } | ||||
|  | @ -436,7 +436,7 @@ interface DbInterface { | |||
| 	db voidptr | ||||
| } | ||||
| 
 | ||||
| // run_app | ||||
| // run runs the app | ||||
| [manualfree] | ||||
| pub fn run<T>(global_app &T, port int) { | ||||
| 	mut l := net.listen_tcp(.ip6, ':$port') or { panic('failed to listen $err.code $err') } | ||||
|  | @ -478,6 +478,7 @@ pub fn run<T>(global_app &T, port int) { | |||
| 	} | ||||
| } | ||||
| 
 | ||||
| // handle_conn handles a connection | ||||
| [manualfree] | ||||
| fn handle_conn<T>(mut conn net.TcpConn, mut app T, routes map[string]Route) { | ||||
| 	conn.set_read_timeout(30 * time.second) | ||||
|  | @ -615,6 +616,7 @@ fn handle_conn<T>(mut conn net.TcpConn, mut app T, routes map[string]Route) { | |||
| 	conn.write(web.http_404.bytes()) or {} | ||||
| } | ||||
| 
 | ||||
| // route_matches returns wether a route matches | ||||
| fn route_matches(url_words []string, route_words []string) ?[]string { | ||||
| 	// URL path should be at least as long as the route path | ||||
| 	// except for the catchall route (`/:path...`) | ||||
|  | @ -657,7 +659,7 @@ fn route_matches(url_words []string, route_words []string) ?[]string { | |||
| 	return params | ||||
| } | ||||
| 
 | ||||
| // check if request is for a static file and serves it | ||||
| // serve_if_static<T> checks if request is for a static file and serves it | ||||
| // returns true if we served a static file, false otherwise | ||||
| [manualfree] | ||||
| fn serve_if_static<T>(mut app T, url urllib.URL) bool { | ||||
|  | @ -676,6 +678,7 @@ fn serve_if_static<T>(mut app T, url urllib.URL) bool { | |||
| 	return true | ||||
| } | ||||
| 
 | ||||
| // scan_static_directory makes a static route for each file in a directory | ||||
| fn (mut ctx Context) scan_static_directory(directory_path string, mount_path string) { | ||||
| 	files := os.ls(directory_path) or { panic(err) } | ||||
| 	if files.len > 0 { | ||||
|  | @ -695,7 +698,7 @@ fn (mut ctx Context) scan_static_directory(directory_path string, mount_path str | |||
| 	} | ||||
| } | ||||
| 
 | ||||
| // Handles a directory static | ||||
| // handle_static Handles a directory static | ||||
| // If `root` is set the mount path for the dir will be in '/' | ||||
| pub fn (mut ctx Context) handle_static(directory_path string, root bool) bool { | ||||
| 	if ctx.done || !os.exists(directory_path) { | ||||
|  | @ -724,7 +727,7 @@ pub fn (mut ctx Context) mount_static_folder_at(directory_path string, mount_pat | |||
| 	return true | ||||
| } | ||||
| 
 | ||||
| // Serves a file static | ||||
| // serve_static Serves a file static | ||||
| // `url` is the access path on the site, `file_path` is the real path to the file, `mime_type` is the file type | ||||
| pub fn (mut ctx Context) serve_static(url string, file_path string) { | ||||
| 	ctx.static_files[url] = file_path | ||||
|  | @ -733,7 +736,7 @@ pub fn (mut ctx Context) serve_static(url string, file_path string) { | |||
| 	ctx.static_mime_types[url] = web.mime_types[ext] | ||||
| } | ||||
| 
 | ||||
| // Returns the ip address from the current user | ||||
| // ip Returns the ip address from the current user | ||||
| pub fn (ctx &Context) ip() string { | ||||
| 	mut ip := ctx.req.header.get(.x_forwarded_for) or { '' } | ||||
| 	if ip == '' { | ||||
|  | @ -749,22 +752,23 @@ pub fn (ctx &Context) ip() string { | |||
| 	return ip | ||||
| } | ||||
| 
 | ||||
| // Set s to the form error | ||||
| // error Set s to the form error | ||||
| pub fn (mut ctx Context) error(s string) { | ||||
| 	println('web error: $s') | ||||
| 	ctx.form_error = s | ||||
| } | ||||
| 
 | ||||
| // Returns an empty result | ||||
| // not_found Returns an empty result | ||||
| pub fn not_found() Result { | ||||
| 	return Result{} | ||||
| } | ||||
| 
 | ||||
| // send_string | ||||
| fn send_string(mut conn net.TcpConn, s string) ? { | ||||
| 	conn.write(s.bytes()) ? | ||||
| } | ||||
| 
 | ||||
| // Do not delete. | ||||
| // filter Do not delete. | ||||
| // It used by `vlib/v/gen/c/str_intp.v:130` for string interpolation inside web templates | ||||
| // TODO: move it to template render | ||||
| fn filter(s string) string { | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue