diff --git a/.clang-format b/.clang-format deleted file mode 100644 index 2e6afb4..0000000 --- a/.clang-format +++ /dev/null @@ -1,4 +0,0 @@ -# To stay consistent with the V formatting style, we use tabs -UseTab: Always -IndentWidth: 4 -TabWidth: 4 diff --git a/.editorconfig b/.editorconfig index e9c1e63..e23a3c7 100644 --- a/.editorconfig +++ b/.editorconfig @@ -5,5 +5,6 @@ root = true end_of_line = lf insert_final_newline = true -[*.{v,c,h}] +[*.v] +# vfmt wants it :( indent_style = tab diff --git a/.gitignore b/.gitignore index daeb3d3..f27a43f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -vieter.c +*.c /data/ # Build artifacts @@ -26,8 +26,3 @@ gdb.txt # Generated docs _docs/ -docs/resources/_gen/ -/man/ - -# VLS logs -vls.log diff --git a/.gitmodules b/.gitmodules index 24af818..47029a0 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,6 +1,3 @@ [submodule "docs/themes/hugo-book"] path = docs/themes/hugo-book url = https://github.com/alex-shpak/hugo-book -[submodule "src/libvieter"] - path = src/libvieter - url = https://git.rustybever.be/vieter-v/libvieter diff --git a/.woodpecker/arch.yml b/.woodpecker/.arch.yml similarity index 86% rename from .woodpecker/arch.yml rename to .woodpecker/.arch.yml index 7295065..6b8f8f2 100644 --- a/.woodpecker/arch.yml +++ b/.woodpecker/.arch.yml @@ -9,8 +9,7 @@ skip_clone: true pipeline: build: - image: 'git.rustybever.be/vieter-v/vieter-builder' - pull: true + image: 'menci/archlinuxarm:base-devel' commands: # Add the vieter repository so we can use the compiler - echo -e '[vieter]\nServer = https://arch.r8r.be/$repo/$arch\nSigLevel = Optional' >> /etc/pacman.conf @@ -24,7 +23,7 @@ pipeline: - su builder # Due to a bug with the V compiler, we can't just use the PKGBUILD from # inside the repo - - curl -o PKGBUILD -L https://git.rustybever.be/vieter-v/vieter/raw/branch/dev/PKGBUILD.dev + - curl -OL https://git.rustybever.be/vieter/vieter/raw/branch/dev/PKGBUILD - makepkg -s --noconfirm --needed when: event: push diff --git a/.woodpecker/build.yml b/.woodpecker/.build.yml similarity index 73% rename from .woodpecker/build.yml rename to .woodpecker/.build.yml index e288bb2..1698129 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/.build.yml @@ -1,6 +1,3 @@ -variables: - - &vlang_image 'git.rustybever.be/vieter/vlang:5d4c9dc9fc11bf8648541c934adb64f27cb94e37-alpine3.17' - matrix: PLATFORM: - 'linux/amd64' @@ -9,19 +6,10 @@ matrix: platform: ${PLATFORM} pipeline: - install-modules: - image: *vlang_image + debug: + image: 'chewingbever/vlang:latest' pull: true commands: - - export VMODULES=$PWD/.vmodules - - 'cd src && v install' - when: - event: [push, pull_request] - - debug: - image: *vlang_image - commands: - - export VMODULES=$PWD/.vmodules - make when: event: [pull_request] @@ -29,11 +17,11 @@ pipeline: exclude: [main] prod: - image: *vlang_image + image: 'chewingbever/vlang:latest' + pull: true environment: - LDFLAGS=-lz -lbz2 -llzma -lexpat -lzstd -llz4 -lsqlite3 -static commands: - - export VMODULES=$PWD/.vmodules # Apparently this -D is *very* important - CFLAGS='-DGC_THREADS=1' make prod # Make sure the binary is actually statically built @@ -47,7 +35,7 @@ pipeline: event: [push, pull_request] upload: - image: *vlang_image + image: 'chewingbever/vlang:latest' secrets: [ s3_username, s3_password ] commands: # https://gist.github.com/JustinTimperio/7c7115f87b775618637d67ac911e595f @@ -57,7 +45,7 @@ pipeline: - export OBJ_PATH="/vieter/commits/$CI_COMMIT_SHA/vieter-$(echo '${PLATFORM}' | sed 's:/:-:g')" - export SIG_STRING="PUT\n\n$CONTENT_TYPE\n$DATE\n$OBJ_PATH" - - export SIGNATURE="$(echo -en $SIG_STRING | openssl dgst -sha1 -hmac $S3_PASSWORD -binary | base64)" + - export SIGNATURE=`echo -en $SIG_STRING | openssl sha1 -hmac $S3_PASSWORD -binary | base64` - > curl --silent diff --git a/.woodpecker/deploy.yml b/.woodpecker/.deploy.yml similarity index 100% rename from .woodpecker/deploy.yml rename to .woodpecker/.deploy.yml diff --git a/.woodpecker/docker.yml b/.woodpecker/.docker.yml similarity index 100% rename from .woodpecker/docker.yml rename to .woodpecker/.docker.yml diff --git a/.woodpecker/docs.yml b/.woodpecker/.docs.yml similarity index 58% rename from .woodpecker/docs.yml rename to .woodpecker/.docs.yml index c7ecd59..fc525e2 100644 --- a/.woodpecker/docs.yml +++ b/.woodpecker/.docs.yml @@ -1,49 +1,41 @@ -variables: - - &vlang_image 'git.rustybever.be/vieter/vlang:5d4c9dc9fc11bf8648541c934adb64f27cb94e37-alpine3.17' - platform: 'linux/amd64' branches: exclude: [ main ] pipeline: docs: - image: 'klakegg/hugo:ext-alpine' + image: 'klakegg/hugo:alpine' group: 'generate' commands: - apk add git - make docs + - 'cd docs/public && tar czvf ../../docs.tar.gz *' api-docs: - image: *vlang_image + image: 'chewingbever/vlang:latest' pull: true group: 'generate' commands: - make api-docs + - 'cd src/_docs && tar czvf ../../api-docs.tar.gz *' - slate-docs: - image: 'slatedocs/slate:v2.13.0' - group: 'generate' - # Slate requires a specific directory to run in - commands: - - cd docs/api - - bundle exec middleman build --clean - - archive: - image: 'alpine' - commands: - - cp -r docs/api/build docs/public/api - - 'cd docs/public && tar czvf ../../docs.tar.gz *' - - 'cd ../../src/_docs && tar czvf ../../api-docs.tar.gz *' - when: - event: push - branch: dev - - deploy: + deploy-docs: image: 'curlimages/curl' + group: 'deploy' secrets: - 'site_api_key' commands: - 'curl -XPOST --fail -s -H "Authorization: Bearer $SITE_API_KEY" -T docs.tar.gz https://rustybever.be/api/deploy?dir=docs-vieter' + when: + event: push + branch: dev + + deploy-api-docs: + image: 'curlimages/curl' + group: 'deploy' + secrets: + - 'site_api_key' + commands: - 'curl -XPOST --fail -s -H "Authorization: Bearer $SITE_API_KEY" -T api-docs.tar.gz https://rustybever.be/api/deploy?dir=api-docs-vieter' when: event: push diff --git a/.woodpecker/gitea.yml b/.woodpecker/.gitea.yml similarity index 76% rename from .woodpecker/gitea.yml rename to .woodpecker/.gitea.yml index 6079b76..1877050 100644 --- a/.woodpecker/gitea.yml +++ b/.woodpecker/.gitea.yml @@ -1,6 +1,3 @@ -variables: - - &vlang_image 'git.rustybever.be/vieter/vlang:5d4c9dc9fc11bf8648541c934adb64f27cb94e37-alpine3.17' - platform: 'linux/amd64' branches: [ 'main' ] depends_on: @@ -11,13 +8,12 @@ skip_clone: true pipeline: prepare: - image: *vlang_image + image: 'chewingbever/vlang:latest' pull: true secrets: [ s3_username, s3_password ] commands: - mc alias set s3/ https://s3.rustybever.be "$S3_USERNAME" "$S3_PASSWORD" - mc cp -r "s3/vieter/commits/$CI_COMMIT_SHA" . - - mv "$CI_COMMIT_SHA"/vieter-* . when: event: tag @@ -27,8 +23,9 @@ pipeline: - gitea_release_api_key settings: base_url: https://git.rustybever.be - files: vieter-* + files: ${CI_COMMIT_SHA}/* checksum: + - md5 - sha256 title: ${CI_COMMIT_TAG} when: diff --git a/.woodpecker/.lint.yml b/.woodpecker/.lint.yml new file mode 100644 index 0000000..e70648d --- /dev/null +++ b/.woodpecker/.lint.yml @@ -0,0 +1,13 @@ +# These checks already get performed on the feature branches +branches: + exclude: [ main ] +platform: 'linux/amd64' + +pipeline: + lint: + image: 'chewingbever/vlang:latest' + pull: true + commands: + - make lint + when: + event: [ pull_request ] diff --git a/.woodpecker/.test.yml b/.woodpecker/.test.yml new file mode 100644 index 0000000..6b7b646 --- /dev/null +++ b/.woodpecker/.test.yml @@ -0,0 +1,17 @@ +matrix: + PLATFORM: + - 'linux/amd64' + - 'linux/arm64' + +branches: + exclude: [ main ] +platform: ${PLATFORM} + +pipeline: + test: + image: 'chewingbever/vlang:latest' + pull: true + commands: + - make test + when: + event: [pull_request] diff --git a/.woodpecker/arch-rel.yml b/.woodpecker/arch-rel.yml deleted file mode 100644 index 0cdf91d..0000000 --- a/.woodpecker/arch-rel.yml +++ /dev/null @@ -1,40 +0,0 @@ -matrix: - PLATFORM: - - linux/amd64 - - linux/arm64 - -platform: ${PLATFORM} -branches: [main] -skip_clone: true - -pipeline: - build: - image: 'git.rustybever.be/vieter-v/vieter-builder' - pull: true - commands: - # Add the vieter repository so we can use the compiler - - echo -e '[vieter]\nServer = https://arch.r8r.be/$repo/$arch\nSigLevel = Optional' >> /etc/pacman.conf - # Update packages - - pacman -Syu --noconfirm - # Create non-root user to perform build & switch to their home - - groupadd -g 1000 builder - - useradd -mg builder builder - - chown -R builder:builder "$PWD" - - "echo 'builder ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers" - - su builder - # Due to a bug with the V compiler, we can't just use the PKGBUILD from - # inside the repo - - curl -OL "https://git.rustybever.be/vieter-v/vieter/raw/tag/$CI_COMMIT_TAG/PKGBUILD" - - makepkg -s --noconfirm --needed - when: - event: tag - - publish: - image: 'curlimages/curl' - commands: - # Publish the package - - 'for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $VIETER_API_KEY" https://arch.r8r.be/vieter/publish; done' - secrets: - - vieter_api_key - when: - event: tag diff --git a/.woodpecker/lint.yml b/.woodpecker/lint.yml deleted file mode 100644 index 39918a9..0000000 --- a/.woodpecker/lint.yml +++ /dev/null @@ -1,27 +0,0 @@ -variables: - - &vlang_image 'git.rustybever.be/vieter/vlang:5d4c9dc9fc11bf8648541c934adb64f27cb94e37-alpine3.17' - -# These checks already get performed on the feature branches -branches: - exclude: [ main ] -platform: 'linux/amd64' - -pipeline: - # vfmt seems to get confused if these aren't present - install-modules: - image: *vlang_image - pull: true - commands: - - export VMODULES=$PWD/.vmodules - - 'cd src && v install' - when: - event: [pull_request] - - lint: - image: *vlang_image - pull: true - commands: - - export VMODULES=$PWD/.vmodules - - make lint - when: - event: [pull_request] diff --git a/.woodpecker/man.yml b/.woodpecker/man.yml deleted file mode 100644 index 23330f3..0000000 --- a/.woodpecker/man.yml +++ /dev/null @@ -1,45 +0,0 @@ -variables: - - &vlang_image 'git.rustybever.be/vieter/vlang:5d4c9dc9fc11bf8648541c934adb64f27cb94e37-alpine3.17' - -platform: 'linux/amd64' -branches: - exclude: [ main ] - -depends_on: - - build - -pipeline: - install-modules: - image: *vlang_image - pull: true - commands: - - export VMODULES=$PWD/.vmodules - - 'cd src && v install' - - generate: - image: *vlang_image - commands: - # - curl -o vieter -L "https://s3.rustybever.be/vieter/commits/$CI_COMMIT_SHA/vieter-linux-amd64" - # - chmod +x vieter - - export VMODULES=$PWD/.vmodules - - make - - ./vieter man man - - cd man - - # Generate an HTML page from each man page - - for f in $(ls -1 *.1); do mandoc -Thtml -O style=mandoc.css,man=%N.%S.html $f > "$f.html"; done - - # Download the mandoc.css file from the official site - - curl -o mandoc.css -L https://mandoc.bsd.lv/mandoc.css - - - tar czvf ../man.tar.gz *.html mandoc.css - - deploy: - image: 'curlimages/curl' - secrets: - - 'site_api_key' - commands: - - 'curl -XPOST --fail -s -H "Authorization: Bearer $SITE_API_KEY" -T man.tar.gz https://rustybever.be/api/deploy?dir=man-vieter' - when: - event: push - branch: dev diff --git a/.woodpecker/test.yml b/.woodpecker/test.yml deleted file mode 100644 index ba93957..0000000 --- a/.woodpecker/test.yml +++ /dev/null @@ -1,30 +0,0 @@ -variables: - - &vlang_image 'git.rustybever.be/vieter/vlang:5d4c9dc9fc11bf8648541c934adb64f27cb94e37-alpine3.17' - -matrix: - PLATFORM: - - 'linux/amd64' - - 'linux/arm64' - -branches: - exclude: [ main ] -platform: ${PLATFORM} - -pipeline: - install-modules: - image: *vlang_image - pull: true - commands: - - export VMODULES=$PWD/.vmodules - - 'cd src && v install' - when: - event: [pull_request] - - test: - image: *vlang_image - pull: true - commands: - - export VMODULES=$PWD/.vmodules - - make test - when: - event: [pull_request] diff --git a/CHANGELOG.md b/CHANGELOG.md index 871877e..7d9eb4d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,173 +5,14 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [Unreleased](https://git.rustybever.be/vieter-v/vieter/src/branch/dev) - -## [0.6.0](https://git.rustybever.be/vieter-v/vieter/src/tag/0.6.0) - -### Added - -* Metrics endpoint for Prometheus integration -* Search in list of targets using API & CLI -* Allow filtering targets by arch value -* Configurable global timeout for builds - -### Changed - -* Rewrote cron expression logic in C -* Updated codebase to V commit after 0.3.3 -* Agents now use worker threads and no longer spawn a new thread for every - build - -### Fixed - -* Package upload now fails if TCP connection is closed before all bytes have - been received - -### Removed - -* Deprecated cron daemon - -## [0.5.0](https://git.rustybever.be/vieter-v/vieter/src/tag/0.5.0) - -### Added - -* CLI commands for removing packages, arch-repos & repositories - -## [0.5.0-rc.2](https://git.rustybever.be/vieter-v/vieter/src/tag/0.5.0-rc.2) - -### Added - -* API route for removing logs & accompanying CLI command -* Daemon for periodically removing old logs -* CLI flag to filter logs by specific exit codes - -### Changed - -* Use `--long-option` instead of `-long-option` for CLI - -## [0.5.0-rc.1](https://git.rustybever.be/vieter-v/vieter/src/tag/0.5.0-rc.1) - -### Added - -* Allow specifying subdirectory inside Git repository -* Added option to deploy using agent-server architecture instead of cron daemon -* Allow scheduling builds on the server from the CLI tool instead of building - them locally -* Allow force-building packages, meaning the build won't check if the - repository is already up to date - -### Changed - -* Migrated codebase to V 0.3.2 -* Cron expression parser now uses bitfields instead of bool arrays - -### Fixed - -* Arch value for target is now properly set if not provided -* Allow NULL values for branch in database -* Endpoint for adding targets now returns the correct id -* CLI now correctly errors and doesn't error when sending requests -* Fixed possible infinite loop when removing old build images -* Check whether build image still exists before starting build -* Don't run makepkg `prepare()` function twice -* Don't buffer stdout in Docker containers - -## [0.4.0](https://git.rustybever.be/vieter-v/vieter/src/tag/0.4.0) - -### Added - -* Server port can now be configured -* Targets now have a 'kind' field describing whether it's a Git repository or a - URL to a PKGBUILD -* Targets with kind 'url' can provide a direct URL to a PKGBUILD instead of - providing a Git repository -* CLI commands for searching the AUR & directly adding packages -* HTTP routes for removing packages, arch-repos & repos -* All endpoints serving files now support HTTP byte range requests -* Better CLI UX - * When adding targets, the ID of the created target is returned - * The `-r` flag only shows raw data of action - * When adding a target, only ID is shown and not surrounding text - * Tabled output returns a tab-separated list (easy to script using - `cut`) - -### Changed - -* Moved all API routes under `/v1` namespace -* Renamed `vieter repos` to `vieter targets` -* Renamed `/api/v1/repos` namespace to `/api/v1/targets` -* Branch name for 'git' targets is now optional; if not provided, the - repository will be cloned with the default branch -* Build containers now explicitely set the PATH variable -* Refactor of web framework -* API endpoints now return id of newly created entries -* Repo POST requests now return information on published package -* `api` can no longer be used as a repository name -* CLI client now allows setting values to an empty value - -### Removed - -* md5 hashes are no longer calculated for packages - -## [0.3.0](https://git.rustybever.be/vieter-v/vieter/src/tag/0.3.0) - -Nothing besides bumping the versions. - -## [0.3.0-rc.1](https://git.rustybever.be/vieter-v/vieter/src/tag/0.3.0-rc.1) - -### Added - -* Database migrations -* Improved GitRepo & BuildLog API - * Pagination using `limit` & `offset` query params - * GitRepo: filter by repo - * BuildLog: filter by start & end date, repo, exit code & arch -* CLI flags to take advantage of above API improvements -* Added CLI command to generate all man pages -* PKGBUILDs now install man pages -* Hosted CLI man pages ([vieter(1)](https://rustybever.be/man/vieter/vieter.1.html)) -* Proper HTTP API docs ([link](https://rustybever.be/docs/vieter/api/)) - -### Changed - -* Packages from target repo are available during builds - * This can be used as a basic way to support AUR dependencies, by adding - the dependencies to the same repository -* Every build now updates its packages first instead of solely relying on the - updated builder image -* Build logs now show commands being executed - -### Fixed - -* `POST /api/logs` now correctly uses epoch timestamps instead of strings - -## [0.3.0-alpha.2](https://git.rustybever.be/vieter-v/vieter/src/tag/0.3.0-alpha.2) +## [Unreleased](https://git.rustybever.be/vieter/vieter/src/branch/dev) ### Added * Web API for adding & querying build logs * CLI commands to access build logs API -* Cron build logs are uploaded to above API -* Proper ASCII table output in CLI -* `vieter repos build id` command to run builds locally -### Removed - -* `vieter build` command - * This command was used alongside cron for periodic builds, but this has - been replaced by `vieter cron` - -### Changed - -* `vieter build` command now only builds a single repository & uploads the - build logs -* Official Arch packages are now split between `vieter` & `vieter-git` - * `vieter` is the latest release - * `vieter-git` is the latest commit on the dev branch -* Full refactor of Docker socket code - -## [0.3.0-alpha.1](https://git.rustybever.be/vieter-v/vieter/src/tag/0.3.0-alpha.1) +## [0.3.0-alpha.1](https://git.rustybever.be/vieter/vieter/src/tag/0.3.0-alpha.1) ### Changed @@ -190,7 +31,7 @@ Nothing besides bumping the versions. * Binary no longer panics when an env var is missing -## [0.2.0](https://git.rustybever.be/vieter-v/vieter/src/tag/0.2.0) +## [0.2.0](https://git.rustybever.be/vieter/vieter/src/tag/0.2.0) ### Changed @@ -224,13 +65,13 @@ Nothing besides bumping the versions. * Packages with unknown fields in .PKGINFO are now allowed * Old packages are now properly removed -## [0.1.0](https://git.rustybever.be/vieter-v/vieter/src/tag/0.1.0) +## [0.1.0](https://git.rustybever.be/vieter/vieter/src/tag/0.1.0) ### Changed * Improved logging -## [0.1.0-rc.1](https://git.rustybever.be/vieter-v/vieter/src/tag/0.1.0-rc.1) +## [0.1.0-rc.1](https://git.rustybever.be/vieter/vieter/src/tag/0.1.0-rc.1) ### Added diff --git a/Dockerfile b/Dockerfile index a27ad44..2ba6181 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM git.rustybever.be/chewing_bever/vlang:0.3.2 AS builder +FROM chewingbever/vlang:latest AS builder ARG TARGETPLATFORM ARG CI_COMMIT_SHA @@ -23,7 +23,6 @@ RUN if [ -n "${CI_COMMIT_SHA}" ]; then \ "https://s3.rustybever.be/vieter/commits/${CI_COMMIT_SHA}/vieter-$(echo "${TARGETPLATFORM}" | sed 's:/:-:g')" && \ chmod +x vieter ; \ else \ - cd src && v install && cd .. && \ LDFLAGS='-lz -lbz2 -llzma -lexpat -lzstd -llz4 -lsqlite3 -static' make prod && \ mv pvieter vieter ; \ fi @@ -37,8 +36,15 @@ ENV PATH=/bin \ COPY --from=builder /app/dumb-init /app/vieter /bin/ +HEALTHCHECK --interval=30s \ + --timeout=3s \ + --start-period=5s \ + CMD /bin/wget --spider http://localhost:8000/health || exit 1 + RUN mkdir /data && \ - chown -R www-data:www-data /data + chown -R www-data:www-data /data && \ + mkdir -p '/var/spool/cron/crontabs' && \ + echo '0 3 * * * /bin/vieter build' | crontab - WORKDIR /data diff --git a/Makefile b/Makefile index 7dda68c..199b99e 100644 --- a/Makefile +++ b/Makefile @@ -1,20 +1,16 @@ # =====CONFIG===== SRC_DIR := src -SRCS != find '$(SRC_DIR)' -iname '*.v' +SOURCES != find '$(SRC_DIR)' -iname '*.v' V_PATH ?= v -V := $(V_PATH) -showcc -gc boehm -d use_openssl -skip-unused +V := $(V_PATH) -showcc -gc boehm all: vieter # =====COMPILATION===== -.PHONY: libvieter -libvieter: - make -C '$(SRC_DIR)/libvieter' CFLAGS='-O3' - # Regular binary -vieter: $(SOURCES) libvieter +vieter: $(SOURCES) $(V) -g -o vieter $(SRC_DIR) # Debug build using gcc @@ -22,7 +18,7 @@ vieter: $(SOURCES) libvieter # multi-threaded and causes issues when running vieter inside gdb. .PHONY: debug debug: dvieter -dvieter: $(SOURCES) libvieter +dvieter: $(SOURCES) $(V_PATH) -showcc -keepc -cg -o dvieter $(SRC_DIR) # Run the debug build inside gdb @@ -33,12 +29,12 @@ gdb: dvieter # Optimised production build .PHONY: prod prod: pvieter -pvieter: $(SOURCES) libvieter +pvieter: $(SOURCES) $(V) -o pvieter -prod $(SRC_DIR) # Only generate C code .PHONY: c -c: $(SOURCES) libvieter +c: $(SOURCES) $(V) -o vieter.c $(SRC_DIR) @@ -64,14 +60,8 @@ api-docs: rm -rf '$(SRC_DIR)/_docs' cd '$(SRC_DIR)' && v doc -all -f html -m -readme . -.PHONY: man -man: vieter - rm -rf man - ./vieter man man - # =====OTHER===== -# Linting .PHONY: lint lint: $(V) fmt -verify $(SRC_DIR) @@ -79,33 +69,34 @@ lint: $(V_PATH) missdoc -p $(SRC_DIR) @ [ $$($(V_PATH) missdoc -p $(SRC_DIR) | wc -l) = 0 ] - -# Formatting +# Format the V codebase .PHONY: fmt fmt: $(V) fmt -w $(SRC_DIR) - -# Testing .PHONY: test -test: libvieter - $(V) -g test $(SRC_DIR) +test: + $(V) test $(SRC_DIR) +# Build & patch the V compiler +.PHONY: v +v: v/v +v/v: + git clone --single-branch https://git.rustybever.be/Chewing_Bever/v v + make -C v -# Cleaning .PHONY: clean clean: - rm -rf 'data' 'vieter' 'dvieter' 'pvieter' 'vieter.c' 'pkg' 'src/vieter' *.pkg.tar.zst 'suvieter' 'afvieter' '$(SRC_DIR)/_docs' 'docs/public' - make -C '$(SRC_DIR)/libvieter' clean + rm -rf 'data' 'vieter' 'dvieter' 'pvieter' 'vieter.c' 'dvieterctl' 'vieterctl' 'pkg' 'src/vieter' *.pkg.tar.zst 'suvieter' 'afvieter' '$(SRC_DIR)/_docs' 'docs/public' # =====EXPERIMENTAL===== .PHONY: autofree autofree: afvieter afvieter: $(SOURCES) - $(V) -showcc -autofree -o afvieter $(SRC_DIR) + $(V_PATH) -showcc -autofree -o afvieter $(SRC_DIR) .PHONY: skip-unused skip-unused: suvieter suvieter: $(SOURCES) - $(V) -skip-unused -o suvieter $(SRC_DIR) + $(V_PATH) -showcc -skip-unused -o suvieter $(SRC_DIR) diff --git a/PKGBUILD b/PKGBUILD index e5cde95..87c575f 100644 --- a/PKGBUILD +++ b/PKGBUILD @@ -1,52 +1,32 @@ -# vim: ft=bash # Maintainer: Jef Roosens pkgbase='vieter' pkgname='vieter' -pkgver='0.6.0' +pkgver=0.2.0.r25.g20112b8 pkgrel=1 -pkgdesc="Lightweight Arch repository server & package build system" -depends=('glibc' 'openssl' 'libarchive' 'sqlite') -makedepends=('git' 'vieter-vlang') +depends=('glibc' 'openssl' 'libarchive' 'gc' 'sqlite') +makedepends=('git' 'gcc' 'vieter-v') arch=('x86_64' 'aarch64') -url='https://git.rustybever.be/vieter-v/vieter' +url='https://git.rustybever.be/vieter/vieter' license=('AGPL3') -source=( - "$pkgname::git+https://git.rustybever.be/vieter-v/vieter#tag=${pkgver//_/-}" - "libvieter::git+https://git.rustybever.be/vieter-v/libvieter" -) -md5sums=('SKIP' 'SKIP') +source=($pkgname::git+https://git.rustybever.be/vieter/vieter#branch=dev) +md5sums=('SKIP') -prepare() { - cd "${pkgname}" +pkgver() { + cd "$pkgname" - # Add the libvieter submodule - git submodule init - git config submodules.src/libvieter.url "${srcdir}/libvieter" - git -c protocol.file.allow=always submodule update - - export VMODULES="${srcdir}/.vmodules" - - cd src && v install + git describe --long --tags | sed 's/^v//;s/\([^-]*-g\)/r\1/;s/-/./g' } build() { - export VMODULES="$srcdir/.vmodules" - cd "$pkgname" make prod - - # The default CFLAGS for some reason causes vieter to segfault if used - # inside the PKGBUILD. As a workaround, we use tcc to build a debug build - # that does work, so we can generate the manpages. - CFLAGS= LDFLAGS= make man } package() { - install -dm755 "$pkgdir/usr/bin" - install -Dm755 "$pkgname/pvieter" "$pkgdir/usr/bin/vieter" + pkgdesc="Vieter is a lightweight implementation of an Arch repository server." - install -dm755 "$pkgdir/usr/share/man/man1" - install -Dm644 "$pkgname/man"/*.1 "$pkgdir/usr/share/man/man1" + install -dm755 "$pkgdir/usr/bin" + install -Dm755 "$pkgbase/pvieter" "$pkgdir/usr/bin/vieter" } diff --git a/PKGBUILD.dev b/PKGBUILD.dev deleted file mode 100644 index 4ea213d..0000000 --- a/PKGBUILD.dev +++ /dev/null @@ -1,60 +0,0 @@ -# vim: ft=bash -# Maintainer: Jef Roosens - -pkgbase='vieter-git' -pkgname='vieter-git' -pkgver=0.2.0.r25.g20112b8 -pkgrel=1 -pkgdesc="Lightweight Arch repository server & package build system (development version)" -depends=('glibc' 'openssl' 'libarchive' 'sqlite') -makedepends=('git' 'vieter-vlang') -arch=('x86_64' 'aarch64') -url='https://git.rustybever.be/vieter-v/vieter' -license=('AGPL3') -source=( - "${pkgname}::git+https://git.rustybever.be/vieter-v/vieter#branch=dev" - "libvieter::git+https://git.rustybever.be/vieter-v/libvieter" -) -md5sums=('SKIP' 'SKIP') -provides=('vieter') -conflicts=('vieter') - -pkgver() { - cd "${pkgname}" - - git describe --long --tags | sed 's/^v//;s/\([^-]*-g\)/r\1/;s/-/./g' -} - -prepare() { - cd "${pkgname}" - - # Add the libvieter submodule - git submodule init - git config submodules.src/libvieter.url "${srcdir}/libvieter" - git -c protocol.file.allow=always submodule update - - export VMODULES="${srcdir}/.vmodules" - - cd src && v install -} - -build() { - export VMODULES="${srcdir}/.vmodules" - - cd "${pkgname}" - - make prod - - # The default CFLAGS for some reason causes vieter to segfault if used - # inside the PKGBUILD. As a workaround, we use tcc to build a debug build - # that does work, so we can generate the manpages. - CFLAGS= LDFLAGS= make man -} - -package() { - install -dm755 "${pkgdir}/usr/bin" - install -Dm755 "${pkgname}/pvieter" "${pkgdir}/usr/bin/vieter" - - install -dm755 "${pkgdir}/usr/share/man/man1" - install -Dm644 "${pkgname}/man"/*.1 "${pkgdir}/usr/share/man/man1" -} diff --git a/README.md b/README.md index 6b487b6..08f1e75 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,11 @@ # Vieter +## Documentation + I host documentation for Vieter over at https://rustybever.be/docs/vieter/. API documentation for the current codebase can be found at https://rustybever.be/api-docs/vieter/. -For more information, questions or just a chat, there's -[#vieter:rustybever.be](https://matrix.to/#/#vieter:rustybever.be) on Matrix! - ## Overview Vieter is a restart of the Pieter project. The goal is to create a simple, @@ -21,8 +20,14 @@ quicker. I chose [V](https://vlang.io/) as I've been very intrigued by this language for a while now. I wanted a fast language that I could code while relaxing, without having to exert too much mental effort & V seemed like the right choice for -that. Sadly, this didn't quite turn out the way I expected, but I'm sticking -with it anyways ;p +that. + +### Compiler + +Vieter compiles with the standard Vlang compiler. However, I do maintain a +[mirror](https://git.rustybever.be/Chewing_Bever/v). This is to ensure my CI +does not break without reason, as I control when & how frequently the mirror is +updated to reflect the official repository. ## Features @@ -36,62 +41,17 @@ with it anyways ;p ## Building -Besides a V installer, Vieter also requires the following libraries to work: +In order to build Vieter, you'll need a couple of libraries: +* An installation of V +* gc * libarchive * openssl -* sqlite3 -Vieter also depends on some external V modules which you can install using `cd -src && v install`. Make sure to keep these dependencies up to date using `v -update`. - -### Compiler - -V is developed using a specific compiler commit that is usually updated -whenever a new version is released. Information on this can be found in the -[tools](https://git.rustybever.be/vieter-v/tools) repository. - -## Contributing - -If you wish to contribute to the project, please take note of the following: - -* Rebase instead of merging whenever possible, e.g. when updating your branch - with the dev branch. -* Please follow the - [Conventional Commits](https://www.conventionalcommits.org/) style for your - commit messages. - -### Writing documentation - -The `docs` directory contains a Hugo site consisting of all user & -administrator documentation. `docs/api` on the other hand is a -[Slate](https://github.com/slatedocs/slate) project describing the HTTP web -API. - -To modify the Hugo documentation, you'll need to install Hugo. Afterwards, you -can use the following commands inside the `docs` directory: - -```sh -# Build the documentation -hugo - -# Host an auto-refreshing web server with the documentation. Important to note -# is that the files will be at `http://localhost:1313/docs/vieter` instead of -# just `http://localhost:1313/` -hugo server -``` - -For the Slate docs, I personally just start a docker container: - -```sh -docker run \ - --rm \ - -p 4567:4567 \ - --name slate \ - -v $(pwd)/docs/api/source:/srv/slate/source slatedocs/slate serve -``` - -This will make the Slate docs available at http://localhost:4567. Sadly, this -server doesn't auto-refresh, so you'll have to manually refresh your browser -every time you make a change. +**NOTE**: if you encounter any issues compiling Vieter using the absolute +latest version of V, it might be because my mirror is missing a specific commit +that causes issues. For this reason, the `make v` command exists which will +clone my compiler in the `v` directory & build it. Afterwards, you can use this +compiler with make by prepending all make commands with `V_PATH=v/v`. If you do +encounter this issue, please let me know so I can update my mirror & the +codebase to fix it! diff --git a/docs/api/.dockerignore b/docs/api/.dockerignore deleted file mode 100644 index f643017..0000000 --- a/docs/api/.dockerignore +++ /dev/null @@ -1,12 +0,0 @@ -.git/ -.github/ -build/ -.editorconfig -.gitattributes -.gitignore -CHANGELOG.md -CODE_OF_CONDUCT.md -deploy.sh -font-selection.json -README.md -Vagrantfile \ No newline at end of file diff --git a/docs/api/.editorconfig b/docs/api/.editorconfig deleted file mode 100644 index 1692977..0000000 --- a/docs/api/.editorconfig +++ /dev/null @@ -1,18 +0,0 @@ -# EditorConfig is awesome: https://EditorConfig.org - -# Top-most EditorConfig file -root = true - -# Unix-style newlines with a newline ending every file -[*] -end_of_line = lf -insert_final_newline = true -indent_style = space -indent_size = 2 -trim_trailing_whitespace = true - -[*.rb] -charset = utf-8 - -[*.md] -trim_trailing_whitespace = false diff --git a/docs/api/.gitattributes b/docs/api/.gitattributes deleted file mode 100644 index 3069c43..0000000 --- a/docs/api/.gitattributes +++ /dev/null @@ -1 +0,0 @@ -source/javascripts/lib/* linguist-vendored diff --git a/docs/api/.gitignore b/docs/api/.gitignore deleted file mode 100644 index 1d5d08d..0000000 --- a/docs/api/.gitignore +++ /dev/null @@ -1,27 +0,0 @@ -*.gem -*.rbc -.bundle -.config -coverage -InstalledFiles -lib/bundler/man -pkg -rdoc -spec/reports -test/tmp -test/version_tmp -tmp -*.DS_STORE -build/ -.cache -.vagrant -.sass-cache - -# YARD artifacts -.yardoc -_yardoc -doc/ -.idea/ - -# Vagrant artifacts -ubuntu-*-console.log diff --git a/docs/api/Gemfile b/docs/api/Gemfile deleted file mode 100644 index 7604fd4..0000000 --- a/docs/api/Gemfile +++ /dev/null @@ -1,13 +0,0 @@ -ruby '>= 2.6' -source 'https://rubygems.org' - -# Middleman -gem 'middleman', '~> 4.4' -gem 'middleman-syntax', '~> 3.2' -gem 'middleman-autoprefixer', '~> 3.0' -gem 'middleman-sprockets', '~> 4.1' -gem 'rouge', '~> 3.21' -gem 'redcarpet', '~> 3.5.0' -gem 'nokogiri', '~> 1.13.3' -gem 'sass' -gem 'webrick' diff --git a/docs/api/Gemfile.lock b/docs/api/Gemfile.lock deleted file mode 100644 index bd60f7f..0000000 --- a/docs/api/Gemfile.lock +++ /dev/null @@ -1,145 +0,0 @@ -GEM - remote: https://rubygems.org/ - specs: - activesupport (6.1.4.1) - concurrent-ruby (~> 1.0, >= 1.0.2) - i18n (>= 1.6, < 2) - minitest (>= 5.1) - tzinfo (~> 2.0) - zeitwerk (~> 2.3) - addressable (2.8.0) - public_suffix (>= 2.0.2, < 5.0) - autoprefixer-rails (10.2.5.0) - execjs (< 2.8.0) - backports (3.21.0) - coffee-script (2.4.1) - coffee-script-source - execjs - coffee-script-source (1.12.2) - concurrent-ruby (1.1.9) - contracts (0.13.0) - dotenv (2.7.6) - erubis (2.7.0) - execjs (2.7.0) - fast_blank (1.0.1) - fastimage (2.2.5) - ffi (1.15.4) - haml (5.2.2) - temple (>= 0.8.0) - tilt - hamster (3.0.0) - concurrent-ruby (~> 1.0) - hashie (3.6.0) - i18n (1.6.0) - concurrent-ruby (~> 1.0) - kramdown (2.3.1) - rexml - listen (3.0.8) - rb-fsevent (~> 0.9, >= 0.9.4) - rb-inotify (~> 0.9, >= 0.9.7) - memoist (0.16.2) - middleman (4.4.2) - coffee-script (~> 2.2) - haml (>= 4.0.5) - kramdown (>= 2.3.0) - middleman-cli (= 4.4.2) - middleman-core (= 4.4.2) - middleman-autoprefixer (3.0.0) - autoprefixer-rails (~> 10.0) - middleman-core (>= 4.0.0) - middleman-cli (4.4.2) - thor (>= 0.17.0, < 2.0) - middleman-core (4.4.2) - activesupport (>= 6.1, < 7.0) - addressable (~> 2.4) - backports (~> 3.6) - bundler (~> 2.0) - contracts (~> 0.13.0) - dotenv - erubis - execjs (~> 2.0) - fast_blank - fastimage (~> 2.0) - hamster (~> 3.0) - hashie (~> 3.4) - i18n (~> 1.6.0) - listen (~> 3.0.0) - memoist (~> 0.14) - padrino-helpers (~> 0.15.0) - parallel - rack (>= 1.4.5, < 3) - sassc (~> 2.0) - servolux - tilt (~> 2.0.9) - toml - uglifier (~> 3.0) - webrick - middleman-sprockets (4.1.1) - middleman-core (~> 4.0) - sprockets (>= 3.0) - middleman-syntax (3.2.0) - middleman-core (>= 3.2) - rouge (~> 3.2) - mini_portile2 (2.8.0) - minitest (5.14.4) - nokogiri (1.13.4) - mini_portile2 (~> 2.8.0) - racc (~> 1.4) - padrino-helpers (0.15.1) - i18n (>= 0.6.7, < 2) - padrino-support (= 0.15.1) - tilt (>= 1.4.1, < 3) - padrino-support (0.15.1) - parallel (1.21.0) - parslet (2.0.0) - public_suffix (4.0.6) - racc (1.6.0) - rack (2.2.3) - rb-fsevent (0.11.0) - rb-inotify (0.10.1) - ffi (~> 1.0) - redcarpet (3.5.1) - rexml (3.2.5) - rouge (3.28.0) - sass (3.7.4) - sass-listen (~> 4.0.0) - sass-listen (4.0.0) - rb-fsevent (~> 0.9, >= 0.9.4) - rb-inotify (~> 0.9, >= 0.9.7) - sassc (2.4.0) - ffi (~> 1.9) - servolux (0.13.0) - sprockets (3.7.2) - concurrent-ruby (~> 1.0) - rack (> 1, < 3) - temple (0.8.2) - thor (1.1.0) - tilt (2.0.10) - toml (0.3.0) - parslet (>= 1.8.0, < 3.0.0) - tzinfo (2.0.4) - concurrent-ruby (~> 1.0) - uglifier (3.2.0) - execjs (>= 0.3.0, < 3) - webrick (1.7.0) - zeitwerk (2.5.1) - -PLATFORMS - ruby - -DEPENDENCIES - middleman (~> 4.4) - middleman-autoprefixer (~> 3.0) - middleman-sprockets (~> 4.1) - middleman-syntax (~> 3.2) - nokogiri (~> 1.13.3) - redcarpet (~> 3.5.0) - rouge (~> 3.21) - sass - webrick - -RUBY VERSION - ruby 2.7.2p137 - -BUNDLED WITH - 2.2.22 diff --git a/docs/api/config.rb b/docs/api/config.rb deleted file mode 100644 index 6f8b677..0000000 --- a/docs/api/config.rb +++ /dev/null @@ -1,63 +0,0 @@ -# Unique header generation -require './lib/unique_head.rb' - -# Markdown -set :markdown_engine, :redcarpet -set :markdown, - fenced_code_blocks: true, - smartypants: true, - disable_indented_code_blocks: true, - prettify: true, - strikethrough: true, - tables: true, - with_toc_data: true, - no_intra_emphasis: true, - renderer: UniqueHeadCounter - -# Assets -set :css_dir, 'stylesheets' -set :js_dir, 'javascripts' -set :images_dir, 'images' -set :fonts_dir, 'fonts' - -# Activate the syntax highlighter -activate :syntax -ready do - require './lib/monokai_sublime_slate.rb' - require './lib/multilang.rb' -end - -activate :sprockets - -activate :autoprefixer do |config| - config.browsers = ['last 2 version', 'Firefox ESR'] - config.cascade = false - config.inline = true -end - -# Github pages require relative links -activate :relative_assets -set :relative_links, true - -# Build Configuration -configure :build do - # We do want to hash woff and woff2 as there's a bug where woff2 will use - # woff asset hash which breaks things. Trying to use a combination of ignore and - # rewrite_ignore does not work as it conflicts weirdly with relative_assets. Disabling - # the .woff2 extension only does not work as .woff will still activate it so have to - # have both. See https://github.com/slatedocs/slate/issues/1171 for more details. - activate :asset_hash, :exts => app.config[:asset_extensions] - %w[.woff .woff2] - # If you're having trouble with Middleman hanging, commenting - # out the following two lines has been known to help - activate :minify_css - activate :minify_javascript - # activate :gzip -end - -# Deploy Configuration -# If you want Middleman to listen on a different port, you can set that below -set :port, 4567 - -helpers do - require './lib/toc_data.rb' -end diff --git a/docs/api/font-selection.json b/docs/api/font-selection.json deleted file mode 100755 index 5e78f5d..0000000 --- a/docs/api/font-selection.json +++ /dev/null @@ -1,148 +0,0 @@ -{ - "IcoMoonType": "selection", - "icons": [ - { - "icon": { - "paths": [ - "M438.857 73.143q119.429 0 220.286 58.857t159.714 159.714 58.857 220.286-58.857 220.286-159.714 159.714-220.286 58.857-220.286-58.857-159.714-159.714-58.857-220.286 58.857-220.286 159.714-159.714 220.286-58.857zM512 785.714v-108.571q0-8-5.143-13.429t-12.571-5.429h-109.714q-7.429 0-13.143 5.714t-5.714 13.143v108.571q0 7.429 5.714 13.143t13.143 5.714h109.714q7.429 0 12.571-5.429t5.143-13.429zM510.857 589.143l10.286-354.857q0-6.857-5.714-10.286-5.714-4.571-13.714-4.571h-125.714q-8 0-13.714 4.571-5.714 3.429-5.714 10.286l9.714 354.857q0 5.714 5.714 10t13.714 4.286h105.714q8 0 13.429-4.286t6-10z" - ], - "attrs": [], - "isMulticolor": false, - "tags": [ - "exclamation-circle" - ], - "defaultCode": 61546, - "grid": 14 - }, - "attrs": [], - "properties": { - "id": 100, - "order": 4, - "prevSize": 28, - "code": 58880, - "name": "exclamation-sign", - "ligatures": "" - }, - "setIdx": 0, - "iconIdx": 0 - }, - { - "icon": { - "paths": [ - "M585.143 786.286v-91.429q0-8-5.143-13.143t-13.143-5.143h-54.857v-292.571q0-8-5.143-13.143t-13.143-5.143h-182.857q-8 0-13.143 5.143t-5.143 13.143v91.429q0 8 5.143 13.143t13.143 5.143h54.857v182.857h-54.857q-8 0-13.143 5.143t-5.143 13.143v91.429q0 8 5.143 13.143t13.143 5.143h256q8 0 13.143-5.143t5.143-13.143zM512 274.286v-91.429q0-8-5.143-13.143t-13.143-5.143h-109.714q-8 0-13.143 5.143t-5.143 13.143v91.429q0 8 5.143 13.143t13.143 5.143h109.714q8 0 13.143-5.143t5.143-13.143zM877.714 512q0 119.429-58.857 220.286t-159.714 159.714-220.286 58.857-220.286-58.857-159.714-159.714-58.857-220.286 58.857-220.286 159.714-159.714 220.286-58.857 220.286 58.857 159.714 159.714 58.857 220.286z" - ], - "attrs": [], - "isMulticolor": false, - "tags": [ - "info-circle" - ], - "defaultCode": 61530, - "grid": 14 - }, - "attrs": [], - "properties": { - "id": 85, - "order": 3, - "name": "info-sign", - "prevSize": 28, - "code": 58882 - }, - "setIdx": 0, - "iconIdx": 2 - }, - { - "icon": { - "paths": [ - "M733.714 419.429q0-16-10.286-26.286l-52-51.429q-10.857-10.857-25.714-10.857t-25.714 10.857l-233.143 232.571-129.143-129.143q-10.857-10.857-25.714-10.857t-25.714 10.857l-52 51.429q-10.286 10.286-10.286 26.286 0 15.429 10.286 25.714l206.857 206.857q10.857 10.857 25.714 10.857 15.429 0 26.286-10.857l310.286-310.286q10.286-10.286 10.286-25.714zM877.714 512q0 119.429-58.857 220.286t-159.714 159.714-220.286 58.857-220.286-58.857-159.714-159.714-58.857-220.286 58.857-220.286 159.714-159.714 220.286-58.857 220.286 58.857 159.714 159.714 58.857 220.286z" - ], - "attrs": [], - "isMulticolor": false, - "tags": [ - "check-circle" - ], - "defaultCode": 61528, - "grid": 14 - }, - "attrs": [], - "properties": { - "id": 83, - "order": 9, - "prevSize": 28, - "code": 58886, - "name": "ok-sign" - }, - "setIdx": 0, - "iconIdx": 6 - }, - { - "icon": { - "paths": [ - "M658.286 475.429q0-105.714-75.143-180.857t-180.857-75.143-180.857 75.143-75.143 180.857 75.143 180.857 180.857 75.143 180.857-75.143 75.143-180.857zM950.857 950.857q0 29.714-21.714 51.429t-51.429 21.714q-30.857 0-51.429-21.714l-196-195.429q-102.286 70.857-228 70.857-81.714 0-156.286-31.714t-128.571-85.714-85.714-128.571-31.714-156.286 31.714-156.286 85.714-128.571 128.571-85.714 156.286-31.714 156.286 31.714 128.571 85.714 85.714 128.571 31.714 156.286q0 125.714-70.857 228l196 196q21.143 21.143 21.143 51.429z" - ], - "width": 951, - "attrs": [], - "isMulticolor": false, - "tags": [ - "search" - ], - "defaultCode": 61442, - "grid": 14 - }, - "attrs": [], - "properties": { - "id": 2, - "order": 1, - "prevSize": 28, - "code": 58887, - "name": "icon-search" - }, - "setIdx": 0, - "iconIdx": 7 - } - ], - "height": 1024, - "metadata": { - "name": "slate", - "license": "SIL OFL 1.1" - }, - "preferences": { - "showGlyphs": true, - "showQuickUse": true, - "showQuickUse2": true, - "showSVGs": true, - "fontPref": { - "prefix": "icon-", - "metadata": { - "fontFamily": "slate", - "majorVersion": 1, - "minorVersion": 0, - "description": "Based on FontAwesome", - "license": "SIL OFL 1.1" - }, - "metrics": { - "emSize": 1024, - "baseline": 6.25, - "whitespace": 50 - }, - "resetPoint": 58880, - "showSelector": false, - "selector": "class", - "classSelector": ".icon", - "showMetrics": false, - "showMetadata": true, - "showVersion": true, - "ie7": false - }, - "imagePref": { - "prefix": "icon-", - "png": true, - "useClassSelector": true, - "color": 4473924, - "bgColor": 16777215 - }, - "historySize": 100, - "showCodes": true, - "gridSize": 16, - "showLiga": false - } -} diff --git a/docs/api/lib/monokai_sublime_slate.rb b/docs/api/lib/monokai_sublime_slate.rb deleted file mode 100644 index cd2de33..0000000 --- a/docs/api/lib/monokai_sublime_slate.rb +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- # -# frozen_string_literal: true - -# this is based on https://github.com/rouge-ruby/rouge/blob/master/lib/rouge/themes/monokai_sublime.rb -# but without the added background, and changed styling for JSON keys to be soft_yellow instead of white - -module Rouge - module Themes - class MonokaiSublimeSlate < CSSTheme - name 'monokai.sublime.slate' - - palette :black => '#000000' - palette :bright_green => '#a6e22e' - palette :bright_pink => '#f92672' - palette :carmine => '#960050' - palette :dark => '#49483e' - palette :dark_grey => '#888888' - palette :dark_red => '#aa0000' - palette :dimgrey => '#75715e' - palette :emperor => '#555555' - palette :grey => '#999999' - palette :light_grey => '#aaaaaa' - palette :light_violet => '#ae81ff' - palette :soft_cyan => '#66d9ef' - palette :soft_yellow => '#e6db74' - palette :very_dark => '#1e0010' - palette :whitish => '#f8f8f2' - palette :orange => '#f6aa11' - palette :white => '#ffffff' - - style Generic::Heading, :fg => :grey - style Literal::String::Regex, :fg => :orange - style Generic::Output, :fg => :dark_grey - style Generic::Prompt, :fg => :emperor - style Generic::Strong, :bold => false - style Generic::Subheading, :fg => :light_grey - style Name::Builtin, :fg => :orange - style Comment::Multiline, - Comment::Preproc, - Comment::Single, - Comment::Special, - Comment, :fg => :dimgrey - style Error, - Generic::Error, - Generic::Traceback, :fg => :carmine - style Generic::Deleted, - Generic::Inserted, - Generic::Emph, :fg => :dark - style Keyword::Constant, - Keyword::Declaration, - Keyword::Reserved, - Name::Constant, - Keyword::Type, :fg => :soft_cyan - style Literal::Number::Float, - Literal::Number::Hex, - Literal::Number::Integer::Long, - Literal::Number::Integer, - Literal::Number::Oct, - Literal::Number, - Literal::String::Char, - Literal::String::Escape, - Literal::String::Symbol, :fg => :light_violet - style Literal::String::Doc, - Literal::String::Double, - Literal::String::Backtick, - Literal::String::Heredoc, - Literal::String::Interpol, - Literal::String::Other, - Literal::String::Single, - Literal::String, :fg => :soft_yellow - style Name::Attribute, - Name::Class, - Name::Decorator, - Name::Exception, - Name::Function, :fg => :bright_green - style Name::Variable::Class, - Name::Namespace, - Name::Entity, - Name::Builtin::Pseudo, - Name::Variable::Global, - Name::Variable::Instance, - Name::Variable, - Text::Whitespace, - Text, - Name, :fg => :white - style Name::Label, :fg => :bright_pink - style Operator::Word, - Name::Tag, - Keyword, - Keyword::Namespace, - Keyword::Pseudo, - Operator, :fg => :bright_pink - end - end - end diff --git a/docs/api/lib/multilang.rb b/docs/api/lib/multilang.rb deleted file mode 100644 index 36fbe5b..0000000 --- a/docs/api/lib/multilang.rb +++ /dev/null @@ -1,16 +0,0 @@ -module Multilang - def block_code(code, full_lang_name) - if full_lang_name - parts = full_lang_name.split('--') - rouge_lang_name = (parts) ? parts[0] : "" # just parts[0] here causes null ref exception when no language specified - super(code, rouge_lang_name).sub("highlight #{rouge_lang_name}") do |match| - match + " tab-" + full_lang_name - end - else - super(code, full_lang_name) - end - end -end - -require 'middleman-core/renderers/redcarpet' -Middleman::Renderers::MiddlemanRedcarpetHTML.send :include, Multilang diff --git a/docs/api/lib/nesting_unique_head.rb b/docs/api/lib/nesting_unique_head.rb deleted file mode 100644 index 0127837..0000000 --- a/docs/api/lib/nesting_unique_head.rb +++ /dev/null @@ -1,22 +0,0 @@ -# Nested unique header generation -require 'middleman-core/renderers/redcarpet' - -class NestingUniqueHeadCounter < Middleman::Renderers::MiddlemanRedcarpetHTML - def initialize - super - @@headers_history = {} if !defined?(@@headers_history) - end - - def header(text, header_level) - friendly_text = text.gsub(/<[^>]*>/,"").parameterize - @@headers_history[header_level] = text.parameterize - - if header_level > 1 - for i in (header_level - 1).downto(1) - friendly_text.prepend("#{@@headers_history[i]}-") if @@headers_history.key?(i) - end - end - - return "#{text}" - end -end diff --git a/docs/api/lib/toc_data.rb b/docs/api/lib/toc_data.rb deleted file mode 100644 index 4a04efe..0000000 --- a/docs/api/lib/toc_data.rb +++ /dev/null @@ -1,31 +0,0 @@ -require 'nokogiri' - -def toc_data(page_content) - html_doc = Nokogiri::HTML::DocumentFragment.parse(page_content) - - # get a flat list of headers - headers = [] - html_doc.css('h1, h2, h3').each do |header| - headers.push({ - id: header.attribute('id').to_s, - content: header.children, - title: header.children.to_s.gsub(/<[^>]*>/, ''), - level: header.name[1].to_i, - children: [] - }) - end - - [3,2].each do |header_level| - header_to_nest = nil - headers = headers.reject do |header| - if header[:level] == header_level - header_to_nest[:children].push header if header_to_nest - true - else - header_to_nest = header if header[:level] < header_level - false - end - end - end - headers -end diff --git a/docs/api/lib/unique_head.rb b/docs/api/lib/unique_head.rb deleted file mode 100644 index d42bab2..0000000 --- a/docs/api/lib/unique_head.rb +++ /dev/null @@ -1,24 +0,0 @@ -# Unique header generation -require 'middleman-core/renderers/redcarpet' -require 'digest' -class UniqueHeadCounter < Middleman::Renderers::MiddlemanRedcarpetHTML - def initialize - super - @head_count = {} - end - def header(text, header_level) - friendly_text = text.gsub(/<[^>]*>/,"").parameterize - if friendly_text.strip.length == 0 - # Looks like parameterize removed the whole thing! It removes many unicode - # characters like Chinese and Russian. To get a unique URL, let's just - # URI escape the whole header - friendly_text = Digest::SHA1.hexdigest(text)[0,10] - end - @head_count[friendly_text] ||= 0 - @head_count[friendly_text] += 1 - if @head_count[friendly_text] > 1 - friendly_text += "-#{@head_count[friendly_text]}" - end - return "#{text}" - end -end diff --git a/docs/api/slate.sh b/docs/api/slate.sh deleted file mode 100755 index a3cc498..0000000 --- a/docs/api/slate.sh +++ /dev/null @@ -1,248 +0,0 @@ -#!/usr/bin/env bash -set -o errexit #abort if any command fails - -me=$(basename "$0") - -help_message="\ -Usage: $me [] [] -Run commands related to the slate process. - -Commands: - - serve Run the middleman server process, useful for - development. - build Run the build process. - deploy Will build and deploy files to branch. Use - --no-build to only deploy. - -Global Options: - - -h, --help Show this help information. - -v, --verbose Increase verbosity. Useful for debugging. - -Deploy options: - -e, --allow-empty Allow deployment of an empty directory. - -m, --message MESSAGE Specify the message used when committing on the - deploy branch. - -n, --no-hash Don't append the source commit's hash to the deploy - commit's message. - --no-build Do not build the source files. -" - - -run_serve() { - exec bundle exec middleman serve --watcher-force-polling -} - -run_build() { - bundle exec middleman build --clean -} - -parse_args() { - # Set args from a local environment file. - if [ -e ".env" ]; then - source .env - fi - - command= - - # Parse arg flags - # If something is exposed as an environment variable, set/overwrite it - # here. Otherwise, set/overwrite the internal variable instead. - while : ; do - if [[ $1 = "-h" || $1 = "--help" ]]; then - echo "$help_message" - exit 0 - elif [[ $1 = "-v" || $1 = "--verbose" ]]; then - verbose=true - shift - elif [[ $1 = "-e" || $1 = "--allow-empty" ]]; then - allow_empty=true - shift - elif [[ ( $1 = "-m" || $1 = "--message" ) && -n $2 ]]; then - commit_message=$2 - shift 2 - elif [[ $1 = "-n" || $1 = "--no-hash" ]]; then - GIT_DEPLOY_APPEND_HASH=false - shift - elif [[ $1 = "--no-build" ]]; then - no_build=true - shift - elif [[ $1 = "serve" || $1 = "build" || $1 = "deploy" ]]; then - if [ ! -z "${command}" ]; then - >&2 echo "You can only specify one command." - exit 1 - fi - command=$1 - shift - elif [ -z $1 ]; then - break - fi - done - - if [ -z "${command}" ]; then - >&2 echo "Command not specified." - exit 1 - fi - - # Set internal option vars from the environment and arg flags. All internal - # vars should be declared here, with sane defaults if applicable. - - # Source directory & target branch. - deploy_directory=build - deploy_branch=gh-pages - - #if no user identity is already set in the current git environment, use this: - default_username=${GIT_DEPLOY_USERNAME:-deploy.sh} - default_email=${GIT_DEPLOY_EMAIL:-} - - #repository to deploy to. must be readable and writable. - repo=origin - - #append commit hash to the end of message by default - append_hash=${GIT_DEPLOY_APPEND_HASH:-true} -} - -main() { - enable_expanded_output - - if ! git diff --exit-code --quiet --cached; then - echo Aborting due to uncommitted changes in the index >&2 - return 1 - fi - - commit_title=`git log -n 1 --format="%s" HEAD` - commit_hash=` git log -n 1 --format="%H" HEAD` - - #default commit message uses last title if a custom one is not supplied - if [[ -z $commit_message ]]; then - commit_message="publish: $commit_title" - fi - - #append hash to commit message unless no hash flag was found - if [ $append_hash = true ]; then - commit_message="$commit_message"$'\n\n'"generated from commit $commit_hash" - fi - - previous_branch=`git rev-parse --abbrev-ref HEAD` - - if [ ! -d "$deploy_directory" ]; then - echo "Deploy directory '$deploy_directory' does not exist. Aborting." >&2 - return 1 - fi - - # must use short form of flag in ls for compatibility with macOS and BSD - if [[ -z `ls -A "$deploy_directory" 2> /dev/null` && -z $allow_empty ]]; then - echo "Deploy directory '$deploy_directory' is empty. Aborting. If you're sure you want to deploy an empty tree, use the --allow-empty / -e flag." >&2 - return 1 - fi - - if git ls-remote --exit-code $repo "refs/heads/$deploy_branch" ; then - # deploy_branch exists in $repo; make sure we have the latest version - - disable_expanded_output - git fetch --force $repo $deploy_branch:$deploy_branch - enable_expanded_output - fi - - # check if deploy_branch exists locally - if git show-ref --verify --quiet "refs/heads/$deploy_branch" - then incremental_deploy - else initial_deploy - fi - - restore_head -} - -initial_deploy() { - git --work-tree "$deploy_directory" checkout --orphan $deploy_branch - git --work-tree "$deploy_directory" add --all - commit+push -} - -incremental_deploy() { - #make deploy_branch the current branch - git symbolic-ref HEAD refs/heads/$deploy_branch - #put the previously committed contents of deploy_branch into the index - git --work-tree "$deploy_directory" reset --mixed --quiet - git --work-tree "$deploy_directory" add --all - - set +o errexit - diff=$(git --work-tree "$deploy_directory" diff --exit-code --quiet HEAD --)$? - set -o errexit - case $diff in - 0) echo No changes to files in $deploy_directory. Skipping commit.;; - 1) commit+push;; - *) - echo git diff exited with code $diff. Aborting. Staying on branch $deploy_branch so you can debug. To switch back to main, use: git symbolic-ref HEAD refs/heads/main && git reset --mixed >&2 - return $diff - ;; - esac -} - -commit+push() { - set_user_id - git --work-tree "$deploy_directory" commit -m "$commit_message" - - disable_expanded_output - #--quiet is important here to avoid outputting the repo URL, which may contain a secret token - git push --quiet $repo $deploy_branch - enable_expanded_output -} - -#echo expanded commands as they are executed (for debugging) -enable_expanded_output() { - if [ $verbose ]; then - set -o xtrace - set +o verbose - fi -} - -#this is used to avoid outputting the repo URL, which may contain a secret token -disable_expanded_output() { - if [ $verbose ]; then - set +o xtrace - set -o verbose - fi -} - -set_user_id() { - if [[ -z `git config user.name` ]]; then - git config user.name "$default_username" - fi - if [[ -z `git config user.email` ]]; then - git config user.email "$default_email" - fi -} - -restore_head() { - if [[ $previous_branch = "HEAD" ]]; then - #we weren't on any branch before, so just set HEAD back to the commit it was on - git update-ref --no-deref HEAD $commit_hash $deploy_branch - else - git symbolic-ref HEAD refs/heads/$previous_branch - fi - - git reset --mixed -} - -filter() { - sed -e "s|$repo|\$repo|g" -} - -sanitize() { - "$@" 2> >(filter 1>&2) | filter -} - -parse_args "$@" - -if [ "${command}" = "serve" ]; then - run_serve -elif [[ "${command}" = "build" ]]; then - run_build -elif [[ ${command} = "deploy" ]]; then - if [[ ${no_build} != true ]]; then - run_build - fi - main "$@" -fi diff --git a/docs/api/source/fonts/slate.eot b/docs/api/source/fonts/slate.eot deleted file mode 100644 index 13c4839..0000000 Binary files a/docs/api/source/fonts/slate.eot and /dev/null differ diff --git a/docs/api/source/fonts/slate.svg b/docs/api/source/fonts/slate.svg deleted file mode 100644 index 5f34982..0000000 --- a/docs/api/source/fonts/slate.svg +++ /dev/null @@ -1,14 +0,0 @@ - - - -Generated by IcoMoon - - - - - - - - - - diff --git a/docs/api/source/fonts/slate.ttf b/docs/api/source/fonts/slate.ttf deleted file mode 100644 index ace9a46..0000000 Binary files a/docs/api/source/fonts/slate.ttf and /dev/null differ diff --git a/docs/api/source/fonts/slate.woff b/docs/api/source/fonts/slate.woff deleted file mode 100644 index 1e72e0e..0000000 Binary files a/docs/api/source/fonts/slate.woff and /dev/null differ diff --git a/docs/api/source/fonts/slate.woff2 b/docs/api/source/fonts/slate.woff2 deleted file mode 100644 index 7c585a7..0000000 Binary files a/docs/api/source/fonts/slate.woff2 and /dev/null differ diff --git a/docs/api/source/images/navbar.png b/docs/api/source/images/navbar.png deleted file mode 100644 index df38e90..0000000 Binary files a/docs/api/source/images/navbar.png and /dev/null differ diff --git a/docs/api/source/includes/_jobs.md b/docs/api/source/includes/_jobs.md deleted file mode 100644 index a25309d..0000000 --- a/docs/api/source/includes/_jobs.md +++ /dev/null @@ -1,78 +0,0 @@ -# Jobs - - - -## Manually schedule a job - -```shell -curl \ - -H 'X-Api-Key: secret' \ - https://example.com/api/v1/jobs/queue?target=10&force&arch=x86_64 -``` - -Manually schedule a job on the server. - -### HTTP Request - -`POST /api/v1/jobs/queue` - -### Query Parameters - -Parameter | Description ---------- | ----------- -target | Id of target to schedule build for -arch | Architecture to build on -force | Whether it's a forced build (true if present) - -## Poll for new jobs - - - -```shell -curl \ - -H 'X-Api-Key: secret' \ - https://example.com/api/v1/jobs/poll?arch=x86_64&max=2 -``` - -> JSON output format - -```json -{ - "message": "", - "data": [ - { - "target_id": 1, - "kind": "git", - "url": "https://aur.archlinux.org/discord-ptb.git", - "branch": "master", - "path": "", - "repo": "bur", - "base_image": "archlinux:base-devel", - "force": true - } - ] -} -``` - -Poll the server for new builds. - -### HTTP Request - -`GET /api/v1/jobs/poll` - -### Query Parameters - -Parameter | Description ---------- | ----------- -arch | For which architecture to receive jobs -max | How many jobs to receive at most diff --git a/docs/api/source/includes/_logs.md b/docs/api/source/includes/_logs.md deleted file mode 100644 index d6134b7..0000000 --- a/docs/api/source/includes/_logs.md +++ /dev/null @@ -1,172 +0,0 @@ -# Build Logs - - - -Endpoints for interacting with stored build logs. - -## List logs - -```shell -curl \ - -H 'X-Api-Key: secret' \ - https://example.com/api/v1/logs?offset=10&limit=20 -``` - -> JSON output format - -```json -{ - "message": "", - "data": [ - { - "id": 1, - "target_id": 3, - "start_time": 1652008554, - "end_time": 1652008559, - "arch": "x86_64", - "exit_code": 0 - } - ] -} -``` - -Retrieve a list of build logs. - -### HTTP Request - -`GET /api/v1/logs` - -### Query Parameters - -Parameter | Description ---------- | ----------- -limit | Maximum amount of results to return. -offset | Offset of results. -target | Only return builds for this target id. -before | Only return logs started before this time (UTC epoch) -after | Only return logs started after this time (UTC epoch) -arch | Only return logs built on this architecture -exit_codes | Comma-separated list of exit codes to limit result to; using `!` as a prefix makes it exclude that value. For example, `1,2` only returns logs with status code 1 or 2, while `!1,!2` returns those that don't have 1 or 2 as the result. - - -## Get build log - -```shell -curl \ - -H 'X-Api-Key: secret' \ - https://example.com/api/v1/logs/1 -``` - -> JSON output format - -```json -{ - "message": "", - "data": { - "id": 1, - "target_id": 3, - "start_time": 1652008554, - "end_time": 1652008559, - "arch": "x86_64", - "exit_code": 0 - } -} -``` - -Retrieve info about a specific build log. - -### HTTP Request - -`GET /api/v1/logs/:id` - -### URL Parameters - -Parameter | Description ---------- | ----------- -id | ID of requested log - -## Get log contents - -```shell -curl \ - -H 'X-Api-Key: secret' \ - https://example.com/api/v1/logs/15/content -``` - -Retrieve the contents of a build log. The response is the build log in -plaintext. - -### HTTP Request - -`GET /api/v1/logs/:id/content` - -### URL Parameters - -Parameter | Description ---------- | ----------- -id | ID of requested log - -## Publish build log - -> JSON output format - -```json -{ - "message": "", - "data": { - "id": 15 - } -} -``` - - - -Publish a new build log to the server. - -### HTTP Request - -`POST /api/v1/logs` - -### Query parameters - -Parameter | Description ---------- | ----------- -startTime | Start time of the build (UTC epoch) -endTime | End time of the build (UTC epoch) -arch | Architecture on which the build was done -exitCode | Exit code of the build container -target | id of target this build is for - -### Request body - -Plaintext contents of the build log. - -## Remove a build log - -```shell -curl \ - -XDELETE \ - -H 'X-Api-Key: secret' \ - https://example.com/api/v1/logs/1 -``` - -Remove a build log from the server. - -### HTTP Request - -`DELETE /api/v1/logs/:id` - -### URL Parameters - -Parameter | Description ---------- | ----------- -id | id of log to remove diff --git a/docs/api/source/includes/_repository.md b/docs/api/source/includes/_repository.md deleted file mode 100644 index ff17f71..0000000 --- a/docs/api/source/includes/_repository.md +++ /dev/null @@ -1,179 +0,0 @@ -# Repository - -Besides providing a RESTful API, the Vieter server is also a Pacman-compatible -repository server. This section describes the various routes that make this -possible. - -## Get a package archive or database file - -```shell -curl -L https://example.com/bur/x86_64/tuxedo-keyboard-3.0.10-1-x86_64.pkg.tar.zst -``` - -This endpoint is really the entire repository. It serves both the package -archives & the database files for a specific arch-repo. It has three different -behaviors, depending on `filename`: - -* If the file extension is one of `.db`, `.files`, `.db.tar.gz` or - `.files.tar.gz`, it tries to serve the requested database file. -* If the filename contains `.pkg`, it serves the package file. -* Otherwise, it assumes `filename` is the name & version of a package inside - the repository (e.g. `vieter-0.3.0_alpha.2-1`) & serves that package's `desc` - file from inside the database archive. - - - -### HTTP Request - -`GET /:repo/:arch/:filename` - -### URL Parameters - -Parameter | Description ---------- | ----------- -repo | Repository containing the package -arch | Arch-repo containing the package -filename | actual filename to request - -## Check whether file exists - -```shell -curl -L https://example.com/bur/x86_64/tuxedo-keyboard-3.0.10-1-x86_64.pkg.tar.zst -``` - -The above request can also be performed as a HEAD request. The behavior is the -same, except no data is returned besides an error 404 if the file doesn't exist -& an error 200 otherwise. - -### HTTP Request - -`GET /:repo/:arch/:filename` - -### URL Parameters - -Parameter | Description ---------- | ----------- -repo | Repository containing the package -arch | Arch-repo containing the package -filename | actual filename to request - -## Publish package - - - -```shell -curl \ - -H 'X-Api-Key: secret' \ - -XPOST \ - -T tuxedo-keyboard-3.0.10-1-x86_64.pkg.tar.zst \ - https://example.com/some-repo/publish -``` - -This endpoint allows you to publish a new package archive to a given repo. - -If the package's architecture is not `any`, it is added to that specific -arch-repo. Otherwise, it is added to the configured default architecture & any -other already present arch-repos. - -### HTTP Request - -`POST /:repo/publish` - -### URL Parameters - -Parameter | Description ---------- | ----------- -repo | Repository to publish package to - -## Remove package from arch-repo - - - -```shell -curl \ - -H 'X-Api-Key: secret' \ - -XDELETE \ - https://example.com/vieter/x86_64/mike -``` - -This endpoint allows you to remove a package from a given arch-repo. - -### HTTP Request - -`DELETE /:repo/:arch/:pkg` - -### URL Parameters - -Parameter | Description ---------- | ----------- -repo | Repository to delete package from -arch | Specific arch-repo to remove package from -pkg | Name of package to remove (without any version information) - -## Remove arch-repo - - - -```shell -curl \ - -H 'X-Api-Key: secret' \ - -XDELETE \ - https://example.com/vieter/x86_64 -``` - -This endpoint allows removing an entire arch-repo. - -### HTTP Request - -`DELETE /:repo/:arch` - -### URL Parameters - -Parameter | Description ---------- | ----------- -repo | Repository to delete arch-repo from -arch | Specific architecture to remove - -## Remove repo - - - -```shell -curl \ - -H 'X-Api-Key: secret' \ - -XDELETE \ - https://example.com/vieter -``` - -This endpoint allows removing an entire repo. - -### HTTP Request - -`DELETE /:repo` - -### URL Parameters - -Parameter | Description ---------- | ----------- -repo | Repository to delete diff --git a/docs/api/source/includes/_targets.md b/docs/api/source/includes/_targets.md deleted file mode 100644 index 1a5f3e0..0000000 --- a/docs/api/source/includes/_targets.md +++ /dev/null @@ -1,181 +0,0 @@ -# Targets - - - -Endpoints for interacting with the list of targets stored on the server. - -## List targets - -```shell -curl \ - -H 'X-Api-Key: secret' \ - https://example.com/api/v1/targets?offset=10&limit=20 -``` - -> JSON output format - -```json -{ - "message": "", - "data": [ - { - "id": 1, - "kind": "git", - "url": "https://aur.archlinux.org/discord-ptb.git", - "branch": "master", - "path" : "", - "repo": "bur", - "schedule": "", - "arch": [ - { - "id": 1, - "target_id": 1, - "value": "x86_64" - } - ] - } - ] -} -``` - -Retrieve a list of targets. - -### HTTP Request - -`GET /api/v1/targets` - -### Query Parameters - -Parameter | Description ---------- | ----------- -limit | Maximum amount of results to return. -offset | Offset of results. -repo | Limit results to targets that publish to the given repo. -query | Only return targets that have this substring in their URL, path or branch. -arch | Only return targets that publish to this arch. - -## Get specific target - -```shell -curl \ - -H 'X-Api-Key: secret' \ - https://example.com/api/v1/targets/1 -``` - -> JSON output format - -```json -{ - "message": "", - "data": { - "id": 1, - "kind": "git", - "url": "https://aur.archlinux.org/discord-ptb.git", - "branch": "master", - "path": "", - "repo": "bur", - "schedule": "0 2", - "arch": [ - { - "id": 1, - "target_id": 1, - "value": "x86_64" - } - ] - } -} -``` - -Get info about a specific target. - -### HTTP Request - -`GET /api/v1/targets/:id` - -### URL Parameters - -Parameter | Description ---------- | ----------- -id | id of requested target - -## Create a new target - -> JSON output format - -```json -{ - "message": "", - "data": { - "id": 15 - } -} -``` - -Create a new target with the given data. - -### HTTP Request - -`POST /api/v1/targets` - -### Query Parameters - -Parameter | Description ---------- | ----------- -kind | Kind of target to add; one of 'git', 'url'. -url | URL of the Git repository. -branch | Branch of the Git repository. -path | Subdirectory inside Git repository to use. -repo | Vieter repository to publish built packages to. -schedule | Cron build schedule (syntax explained [here](https://rustybever.be/docs/vieter/usage/builds/schedule/)) -arch | Comma-separated list of architectures to build package on. - -## Modify a target - -Modify the data of an existing target. - -### HTTP Request - -`PATCH /api/v1/targets/:id` - -### URL Parameters - -Parameter | Description ---------- | ----------- -id | id of target to modify - -### Query Parameters - -Parameter | Description ---------- | ----------- -kind | Kind of target; one of 'git', 'url'. -url | URL of the Git repository. -branch | Branch of the Git repository. -path | Subdirectory inside Git repository to use. -repo | Vieter repository to publish built packages to. -schedule | Cron build schedule -arch | Comma-separated list of architectures to build package on. - -## Remove a target - -```shell -curl \ - -XDELETE \ - -H 'X-Api-Key: secret' \ - https://example.com/api/v1/targets/1 -``` - -Remove a target from the server. - -### HTTP Request - -`DELETE /api/v1/targets/:id` - -### URL Parameters - -Parameter | Description ---------- | ----------- -id | id of target to remove diff --git a/docs/api/source/index.html.md b/docs/api/source/index.html.md deleted file mode 100644 index f61e44a..0000000 --- a/docs/api/source/index.html.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -title: API Reference - -language_tabs: # must be one of https://git.io/vQNgJ - - shell: cURL - -toc_footers: - - Documentation Powered by Slate - -includes: - - repository - - targets - - logs - - jobs - -search: true - -code_clipboard: true - -meta: - - name: description - content: Documentation for the Vieter API ---- - -# Introduction - -Welcome to the Vieter API documentation! Here, you can find everything related -to interacting with Vieter's HTTP API. - -# Authentication - -```shell -curl -H 'X-Api-Key: secret' https://example.com/api/some/path -``` - -> Don't forget to replace `secret` with your Vieter instance's secret. - -Authentication is done by passing the HTTP header `X-Api-Key: secret` along -with each request, where `secret` is replaced with your Vieter server's -configured secret. diff --git a/docs/api/source/javascripts/all.js b/docs/api/source/javascripts/all.js deleted file mode 100644 index 5f5d406..0000000 --- a/docs/api/source/javascripts/all.js +++ /dev/null @@ -1,2 +0,0 @@ -//= require ./all_nosearch -//= require ./app/_search diff --git a/docs/api/source/javascripts/all_nosearch.js b/docs/api/source/javascripts/all_nosearch.js deleted file mode 100644 index 026e5a2..0000000 --- a/docs/api/source/javascripts/all_nosearch.js +++ /dev/null @@ -1,27 +0,0 @@ -//= require ./lib/_energize -//= require ./app/_copy -//= require ./app/_toc -//= require ./app/_lang - -function adjustLanguageSelectorWidth() { - const elem = $('.dark-box > .lang-selector'); - elem.width(elem.parent().width()); -} - -$(function() { - loadToc($('#toc'), '.toc-link', '.toc-list-h2', 10); - setupLanguages($('body').data('languages')); - $('.content').imagesLoaded( function() { - window.recacheHeights(); - window.refreshToc(); - }); - - $(window).resize(function() { - adjustLanguageSelectorWidth(); - }); - adjustLanguageSelectorWidth(); -}); - -window.onpopstate = function() { - activateLanguage(getLanguageFromQueryString()); -}; diff --git a/docs/api/source/javascripts/app/_copy.js b/docs/api/source/javascripts/app/_copy.js deleted file mode 100644 index 4dfbbb6..0000000 --- a/docs/api/source/javascripts/app/_copy.js +++ /dev/null @@ -1,15 +0,0 @@ -function copyToClipboard(container) { - const el = document.createElement('textarea'); - el.value = container.textContent.replace(/\n$/, ''); - document.body.appendChild(el); - el.select(); - document.execCommand('copy'); - document.body.removeChild(el); -} - -function setupCodeCopy() { - $('pre.highlight').prepend('
Copy to Clipboard
'); - $('.copy-clipboard').on('click', function() { - copyToClipboard(this.parentNode.children[1]); - }); -} diff --git a/docs/api/source/javascripts/app/_lang.js b/docs/api/source/javascripts/app/_lang.js deleted file mode 100644 index cc5ac8b..0000000 --- a/docs/api/source/javascripts/app/_lang.js +++ /dev/null @@ -1,171 +0,0 @@ -//= require ../lib/_jquery - -/* -Copyright 2008-2013 Concur Technologies, Inc. - -Licensed under the Apache License, Version 2.0 (the "License"); you may -not use this file except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -License for the specific language governing permissions and limitations -under the License. -*/ -;(function () { - 'use strict'; - - var languages = []; - - window.setupLanguages = setupLanguages; - window.activateLanguage = activateLanguage; - window.getLanguageFromQueryString = getLanguageFromQueryString; - - function activateLanguage(language) { - if (!language) return; - if (language === "") return; - - $(".lang-selector a").removeClass('active'); - $(".lang-selector a[data-language-name='" + language + "']").addClass('active'); - for (var i=0; i < languages.length; i++) { - $(".highlight.tab-" + languages[i]).hide(); - $(".lang-specific." + languages[i]).hide(); - } - $(".highlight.tab-" + language).show(); - $(".lang-specific." + language).show(); - - window.recacheHeights(); - - // scroll to the new location of the position - if ($(window.location.hash).get(0)) { - $(window.location.hash).get(0).scrollIntoView(true); - } - } - - // parseURL and stringifyURL are from https://github.com/sindresorhus/query-string - // MIT licensed - // https://github.com/sindresorhus/query-string/blob/7bee64c16f2da1a326579e96977b9227bf6da9e6/license - function parseURL(str) { - if (typeof str !== 'string') { - return {}; - } - - str = str.trim().replace(/^(\?|#|&)/, ''); - - if (!str) { - return {}; - } - - return str.split('&').reduce(function (ret, param) { - var parts = param.replace(/\+/g, ' ').split('='); - var key = parts[0]; - var val = parts[1]; - - key = decodeURIComponent(key); - // missing `=` should be `null`: - // http://w3.org/TR/2012/WD-url-20120524/#collect-url-parameters - val = val === undefined ? null : decodeURIComponent(val); - - if (!ret.hasOwnProperty(key)) { - ret[key] = val; - } else if (Array.isArray(ret[key])) { - ret[key].push(val); - } else { - ret[key] = [ret[key], val]; - } - - return ret; - }, {}); - }; - - function stringifyURL(obj) { - return obj ? Object.keys(obj).sort().map(function (key) { - var val = obj[key]; - - if (Array.isArray(val)) { - return val.sort().map(function (val2) { - return encodeURIComponent(key) + '=' + encodeURIComponent(val2); - }).join('&'); - } - - return encodeURIComponent(key) + '=' + encodeURIComponent(val); - }).join('&') : ''; - }; - - // gets the language set in the query string - function getLanguageFromQueryString() { - if (location.search.length >= 1) { - var language = parseURL(location.search).language; - if (language) { - return language; - } else if (jQuery.inArray(location.search.substr(1), languages) != -1) { - return location.search.substr(1); - } - } - - return false; - } - - // returns a new query string with the new language in it - function generateNewQueryString(language) { - var url = parseURL(location.search); - if (url.language) { - url.language = language; - return stringifyURL(url); - } - return language; - } - - // if a button is clicked, add the state to the history - function pushURL(language) { - if (!history) { return; } - var hash = window.location.hash; - if (hash) { - hash = hash.replace(/^#+/, ''); - } - history.pushState({}, '', '?' + generateNewQueryString(language) + '#' + hash); - - // save language as next default - if (localStorage) { - localStorage.setItem("language", language); - } - } - - function setupLanguages(l) { - var defaultLanguage = null; - if (localStorage) { - defaultLanguage = localStorage.getItem("language"); - } - - languages = l; - - var presetLanguage = getLanguageFromQueryString(); - if (presetLanguage) { - // the language is in the URL, so use that language! - activateLanguage(presetLanguage); - - if (localStorage) { - localStorage.setItem("language", presetLanguage); - } - } else if ((defaultLanguage !== null) && (jQuery.inArray(defaultLanguage, languages) != -1)) { - // the language was the last selected one saved in localstorage, so use that language! - activateLanguage(defaultLanguage); - } else { - // no language selected, so use the default - activateLanguage(languages[0]); - } - } - - // if we click on a language tab, activate that language - $(function() { - $(".lang-selector a").on("click", function() { - var language = $(this).data("language-name"); - pushURL(language); - activateLanguage(language); - return false; - }); - }); -})(); diff --git a/docs/api/source/javascripts/app/_search.js b/docs/api/source/javascripts/app/_search.js deleted file mode 100644 index 0b0ccd9..0000000 --- a/docs/api/source/javascripts/app/_search.js +++ /dev/null @@ -1,102 +0,0 @@ -//= require ../lib/_lunr -//= require ../lib/_jquery -//= require ../lib/_jquery.highlight -;(function () { - 'use strict'; - - var content, searchResults; - var highlightOpts = { element: 'span', className: 'search-highlight' }; - var searchDelay = 0; - var timeoutHandle = 0; - var index; - - function populate() { - index = lunr(function(){ - - this.ref('id'); - this.field('title', { boost: 10 }); - this.field('body'); - this.pipeline.add(lunr.trimmer, lunr.stopWordFilter); - var lunrConfig = this; - - $('h1, h2').each(function() { - var title = $(this); - var body = title.nextUntil('h1, h2'); - lunrConfig.add({ - id: title.prop('id'), - title: title.text(), - body: body.text() - }); - }); - - }); - determineSearchDelay(); - } - - $(populate); - $(bind); - - function determineSearchDelay() { - if (index.tokenSet.toArray().length>5000) { - searchDelay = 300; - } - } - - function bind() { - content = $('.content'); - searchResults = $('.search-results'); - - $('#input-search').on('keyup',function(e) { - var wait = function() { - return function(executingFunction, waitTime){ - clearTimeout(timeoutHandle); - timeoutHandle = setTimeout(executingFunction, waitTime); - }; - }(); - wait(function(){ - search(e); - }, searchDelay); - }); - } - - function search(event) { - - var searchInput = $('#input-search')[0]; - - unhighlight(); - searchResults.addClass('visible'); - - // ESC clears the field - if (event.keyCode === 27) searchInput.value = ''; - - if (searchInput.value) { - var results = index.search(searchInput.value).filter(function(r) { - return r.score > 0.0001; - }); - - if (results.length) { - searchResults.empty(); - $.each(results, function (index, result) { - var elem = document.getElementById(result.ref); - searchResults.append("
  • " + $(elem).text() + "
  • "); - }); - highlight.call(searchInput); - } else { - searchResults.html('
  • '); - $('.search-results li').text('No Results Found for "' + searchInput.value + '"'); - } - } else { - unhighlight(); - searchResults.removeClass('visible'); - } - } - - function highlight() { - if (this.value) content.highlight(this.value, highlightOpts); - } - - function unhighlight() { - content.unhighlight(highlightOpts); - } -})(); - diff --git a/docs/api/source/javascripts/app/_toc.js b/docs/api/source/javascripts/app/_toc.js deleted file mode 100644 index f70bdc0..0000000 --- a/docs/api/source/javascripts/app/_toc.js +++ /dev/null @@ -1,122 +0,0 @@ -//= require ../lib/_jquery -//= require ../lib/_imagesloaded.min -;(function () { - 'use strict'; - - var htmlPattern = /<[^>]*>/g; - var loaded = false; - - var debounce = function(func, waitTime) { - var timeout = false; - return function() { - if (timeout === false) { - setTimeout(function() { - func(); - timeout = false; - }, waitTime); - timeout = true; - } - }; - }; - - var closeToc = function() { - $(".toc-wrapper").removeClass('open'); - $("#nav-button").removeClass('open'); - }; - - function loadToc($toc, tocLinkSelector, tocListSelector, scrollOffset) { - var headerHeights = {}; - var pageHeight = 0; - var windowHeight = 0; - var originalTitle = document.title; - - var recacheHeights = function() { - headerHeights = {}; - pageHeight = $(document).height(); - windowHeight = $(window).height(); - - $toc.find(tocLinkSelector).each(function() { - var targetId = $(this).attr('href'); - if (targetId[0] === "#") { - headerHeights[targetId] = $("#" + $.escapeSelector(targetId.substring(1))).offset().top; - } - }); - }; - - var refreshToc = function() { - var currentTop = $(document).scrollTop() + scrollOffset; - - if (currentTop + windowHeight >= pageHeight) { - // at bottom of page, so just select last header by making currentTop very large - // this fixes the problem where the last header won't ever show as active if its content - // is shorter than the window height - currentTop = pageHeight + 1000; - } - - var best = null; - for (var name in headerHeights) { - if ((headerHeights[name] < currentTop && headerHeights[name] > headerHeights[best]) || best === null) { - best = name; - } - } - - // Catch the initial load case - if (currentTop == scrollOffset && !loaded) { - best = window.location.hash; - loaded = true; - } - - var $best = $toc.find("[href='" + best + "']").first(); - if (!$best.hasClass("active")) { - // .active is applied to the ToC link we're currently on, and its parent
      s selected by tocListSelector - // .active-expanded is applied to the ToC links that are parents of this one - $toc.find(".active").removeClass("active"); - $toc.find(".active-parent").removeClass("active-parent"); - $best.addClass("active"); - $best.parents(tocListSelector).addClass("active").siblings(tocLinkSelector).addClass('active-parent'); - $best.siblings(tocListSelector).addClass("active"); - $toc.find(tocListSelector).filter(":not(.active)").slideUp(150); - $toc.find(tocListSelector).filter(".active").slideDown(150); - if (window.history.replaceState) { - window.history.replaceState(null, "", best); - } - var thisTitle = $best.data("title"); - if (thisTitle !== undefined && thisTitle.length > 0) { - document.title = thisTitle.replace(htmlPattern, "") + " – " + originalTitle; - } else { - document.title = originalTitle; - } - } - }; - - var makeToc = function() { - recacheHeights(); - refreshToc(); - - $("#nav-button").click(function() { - $(".toc-wrapper").toggleClass('open'); - $("#nav-button").toggleClass('open'); - return false; - }); - $(".page-wrapper").click(closeToc); - $(".toc-link").click(closeToc); - - // reload immediately after scrolling on toc click - $toc.find(tocLinkSelector).click(function() { - setTimeout(function() { - refreshToc(); - }, 0); - }); - - $(window).scroll(debounce(refreshToc, 200)); - $(window).resize(debounce(recacheHeights, 200)); - }; - - makeToc(); - - window.recacheHeights = recacheHeights; - window.refreshToc = refreshToc; - } - - window.loadToc = loadToc; -})(); diff --git a/docs/api/source/javascripts/lib/_energize.js b/docs/api/source/javascripts/lib/_energize.js deleted file mode 100644 index 6798f3c..0000000 --- a/docs/api/source/javascripts/lib/_energize.js +++ /dev/null @@ -1,169 +0,0 @@ -/** - * energize.js v0.1.0 - * - * Speeds up click events on mobile devices. - * https://github.com/davidcalhoun/energize.js - */ - -(function() { // Sandbox - /** - * Don't add to non-touch devices, which don't need to be sped up - */ - if(!('ontouchstart' in window)) return; - - var lastClick = {}, - isThresholdReached, touchstart, touchmove, touchend, - click, closest; - - /** - * isThresholdReached - * - * Compare touchstart with touchend xy coordinates, - * and only fire simulated click event if the coordinates - * are nearby. (don't want clicking to be confused with a swipe) - */ - isThresholdReached = function(startXY, xy) { - return Math.abs(startXY[0] - xy[0]) > 5 || Math.abs(startXY[1] - xy[1]) > 5; - }; - - /** - * touchstart - * - * Save xy coordinates when the user starts touching the screen - */ - touchstart = function(e) { - this.startXY = [e.touches[0].clientX, e.touches[0].clientY]; - this.threshold = false; - }; - - /** - * touchmove - * - * Check if the user is scrolling past the threshold. - * Have to check here because touchend will not always fire - * on some tested devices (Kindle Fire?) - */ - touchmove = function(e) { - // NOOP if the threshold has already been reached - if(this.threshold) return false; - - this.threshold = isThresholdReached(this.startXY, [e.touches[0].clientX, e.touches[0].clientY]); - }; - - /** - * touchend - * - * If the user didn't scroll past the threshold between - * touchstart and touchend, fire a simulated click. - * - * (This will fire before a native click) - */ - touchend = function(e) { - // Don't fire a click if the user scrolled past the threshold - if(this.threshold || isThresholdReached(this.startXY, [e.changedTouches[0].clientX, e.changedTouches[0].clientY])) { - return; - } - - /** - * Create and fire a click event on the target element - * https://developer.mozilla.org/en/DOM/event.initMouseEvent - */ - var touch = e.changedTouches[0], - evt = document.createEvent('MouseEvents'); - evt.initMouseEvent('click', true, true, window, 0, touch.screenX, touch.screenY, touch.clientX, touch.clientY, false, false, false, false, 0, null); - evt.simulated = true; // distinguish from a normal (nonsimulated) click - e.target.dispatchEvent(evt); - }; - - /** - * click - * - * Because we've already fired a click event in touchend, - * we need to listed for all native click events here - * and suppress them as necessary. - */ - click = function(e) { - /** - * Prevent ghost clicks by only allowing clicks we created - * in the click event we fired (look for e.simulated) - */ - var time = Date.now(), - timeDiff = time - lastClick.time, - x = e.clientX, - y = e.clientY, - xyDiff = [Math.abs(lastClick.x - x), Math.abs(lastClick.y - y)], - target = closest(e.target, 'A') || e.target, // needed for standalone apps - nodeName = target.nodeName, - isLink = nodeName === 'A', - standAlone = window.navigator.standalone && isLink && e.target.getAttribute("href"); - - lastClick.time = time; - lastClick.x = x; - lastClick.y = y; - - /** - * Unfortunately Android sometimes fires click events without touch events (seen on Kindle Fire), - * so we have to add more logic to determine the time of the last click. Not perfect... - * - * Older, simpler check: if((!e.simulated) || standAlone) - */ - if((!e.simulated && (timeDiff < 500 || (timeDiff < 1500 && xyDiff[0] < 50 && xyDiff[1] < 50))) || standAlone) { - e.preventDefault(); - e.stopPropagation(); - if(!standAlone) return false; - } - - /** - * Special logic for standalone web apps - * See http://stackoverflow.com/questions/2898740/iphone-safari-web-app-opens-links-in-new-window - */ - if(standAlone) { - window.location = target.getAttribute("href"); - } - - /** - * Add an energize-focus class to the targeted link (mimics :focus behavior) - * TODO: test and/or remove? Does this work? - */ - if(!target || !target.classList) return; - target.classList.add("energize-focus"); - window.setTimeout(function(){ - target.classList.remove("energize-focus"); - }, 150); - }; - - /** - * closest - * @param {HTMLElement} node current node to start searching from. - * @param {string} tagName the (uppercase) name of the tag you're looking for. - * - * Find the closest ancestor tag of a given node. - * - * Starts at node and goes up the DOM tree looking for a - * matching nodeName, continuing until hitting document.body - */ - closest = function(node, tagName){ - var curNode = node; - - while(curNode !== document.body) { // go up the dom until we find the tag we're after - if(!curNode || curNode.nodeName === tagName) { return curNode; } // found - curNode = curNode.parentNode; // not found, so keep going up - } - - return null; // not found - }; - - /** - * Add all delegated event listeners - * - * All the events we care about bubble up to document, - * so we can take advantage of event delegation. - * - * Note: no need to wait for DOMContentLoaded here - */ - document.addEventListener('touchstart', touchstart, false); - document.addEventListener('touchmove', touchmove, false); - document.addEventListener('touchend', touchend, false); - document.addEventListener('click', click, true); // TODO: why does this use capture? - -})(); \ No newline at end of file diff --git a/docs/api/source/javascripts/lib/_imagesloaded.min.js b/docs/api/source/javascripts/lib/_imagesloaded.min.js deleted file mode 100644 index e443a77..0000000 --- a/docs/api/source/javascripts/lib/_imagesloaded.min.js +++ /dev/null @@ -1,7 +0,0 @@ -/*! - * imagesLoaded PACKAGED v4.1.4 - * JavaScript is all like "You images are done yet or what?" - * MIT License - */ - -!function(e,t){"function"==typeof define&&define.amd?define("ev-emitter/ev-emitter",t):"object"==typeof module&&module.exports?module.exports=t():e.EvEmitter=t()}("undefined"!=typeof window?window:this,function(){function e(){}var t=e.prototype;return t.on=function(e,t){if(e&&t){var i=this._events=this._events||{},n=i[e]=i[e]||[];return n.indexOf(t)==-1&&n.push(t),this}},t.once=function(e,t){if(e&&t){this.on(e,t);var i=this._onceEvents=this._onceEvents||{},n=i[e]=i[e]||{};return n[t]=!0,this}},t.off=function(e,t){var i=this._events&&this._events[e];if(i&&i.length){var n=i.indexOf(t);return n!=-1&&i.splice(n,1),this}},t.emitEvent=function(e,t){var i=this._events&&this._events[e];if(i&&i.length){i=i.slice(0),t=t||[];for(var n=this._onceEvents&&this._onceEvents[e],o=0;o (default options) - * $('#content').highlight('lorem'); - * - * // search for and highlight more terms at once - * // so you can save some time on traversing DOM - * $('#content').highlight(['lorem', 'ipsum']); - * $('#content').highlight('lorem ipsum'); - * - * // search only for entire word 'lorem' - * $('#content').highlight('lorem', { wordsOnly: true }); - * - * // don't ignore case during search of term 'lorem' - * $('#content').highlight('lorem', { caseSensitive: true }); - * - * // wrap every occurrance of term 'ipsum' in content - * // with - * $('#content').highlight('ipsum', { element: 'em', className: 'important' }); - * - * // remove default highlight - * $('#content').unhighlight(); - * - * // remove custom highlight - * $('#content').unhighlight({ element: 'em', className: 'important' }); - * - * - * Copyright (c) 2009 Bartek Szopka - * - * Licensed under MIT license. - * - */ - -jQuery.extend({ - highlight: function (node, re, nodeName, className) { - if (node.nodeType === 3) { - var match = node.data.match(re); - if (match) { - var highlight = document.createElement(nodeName || 'span'); - highlight.className = className || 'highlight'; - var wordNode = node.splitText(match.index); - wordNode.splitText(match[0].length); - var wordClone = wordNode.cloneNode(true); - highlight.appendChild(wordClone); - wordNode.parentNode.replaceChild(highlight, wordNode); - return 1; //skip added node in parent - } - } else if ((node.nodeType === 1 && node.childNodes) && // only element nodes that have children - !/(script|style)/i.test(node.tagName) && // ignore script and style nodes - !(node.tagName === nodeName.toUpperCase() && node.className === className)) { // skip if already highlighted - for (var i = 0; i < node.childNodes.length; i++) { - i += jQuery.highlight(node.childNodes[i], re, nodeName, className); - } - } - return 0; - } -}); - -jQuery.fn.unhighlight = function (options) { - var settings = { className: 'highlight', element: 'span' }; - jQuery.extend(settings, options); - - return this.find(settings.element + "." + settings.className).each(function () { - var parent = this.parentNode; - parent.replaceChild(this.firstChild, this); - parent.normalize(); - }).end(); -}; - -jQuery.fn.highlight = function (words, options) { - var settings = { className: 'highlight', element: 'span', caseSensitive: false, wordsOnly: false }; - jQuery.extend(settings, options); - - if (words.constructor === String) { - words = [words]; - } - words = jQuery.grep(words, function(word, i){ - return word != ''; - }); - words = jQuery.map(words, function(word, i) { - return word.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); - }); - if (words.length == 0) { return this; }; - - var flag = settings.caseSensitive ? "" : "i"; - var pattern = "(" + words.join("|") + ")"; - if (settings.wordsOnly) { - pattern = "\\b" + pattern + "\\b"; - } - var re = new RegExp(pattern, flag); - - return this.each(function () { - jQuery.highlight(this, re, settings.element, settings.className); - }); -}; - diff --git a/docs/api/source/javascripts/lib/_jquery.js b/docs/api/source/javascripts/lib/_jquery.js deleted file mode 100644 index fc6c299..0000000 --- a/docs/api/source/javascripts/lib/_jquery.js +++ /dev/null @@ -1,10881 +0,0 @@ -/*! - * jQuery JavaScript Library v3.6.0 - * https://jquery.com/ - * - * Includes Sizzle.js - * https://sizzlejs.com/ - * - * Copyright OpenJS Foundation and other contributors - * Released under the MIT license - * https://jquery.org/license - * - * Date: 2021-03-02T17:08Z - */ -( function( global, factory ) { - - "use strict"; - - if ( typeof module === "object" && typeof module.exports === "object" ) { - - // For CommonJS and CommonJS-like environments where a proper `window` - // is present, execute the factory and get jQuery. - // For environments that do not have a `window` with a `document` - // (such as Node.js), expose a factory as module.exports. - // This accentuates the need for the creation of a real `window`. - // e.g. var jQuery = require("jquery")(window); - // See ticket #14549 for more info. - module.exports = global.document ? - factory( global, true ) : - function( w ) { - if ( !w.document ) { - throw new Error( "jQuery requires a window with a document" ); - } - return factory( w ); - }; - } else { - factory( global ); - } - -// Pass this if window is not defined yet -} )( typeof window !== "undefined" ? window : this, function( window, noGlobal ) { - -// Edge <= 12 - 13+, Firefox <=18 - 45+, IE 10 - 11, Safari 5.1 - 9+, iOS 6 - 9.1 -// throw exceptions when non-strict code (e.g., ASP.NET 4.5) accesses strict mode -// arguments.callee.caller (trac-13335). But as of jQuery 3.0 (2016), strict mode should be common -// enough that all such attempts are guarded in a try block. -"use strict"; - -var arr = []; - -var getProto = Object.getPrototypeOf; - -var slice = arr.slice; - -var flat = arr.flat ? function( array ) { - return arr.flat.call( array ); -} : function( array ) { - return arr.concat.apply( [], array ); -}; - - -var push = arr.push; - -var indexOf = arr.indexOf; - -var class2type = {}; - -var toString = class2type.toString; - -var hasOwn = class2type.hasOwnProperty; - -var fnToString = hasOwn.toString; - -var ObjectFunctionString = fnToString.call( Object ); - -var support = {}; - -var isFunction = function isFunction( obj ) { - - // Support: Chrome <=57, Firefox <=52 - // In some browsers, typeof returns "function" for HTML elements - // (i.e., `typeof document.createElement( "object" ) === "function"`). - // We don't want to classify *any* DOM node as a function. - // Support: QtWeb <=3.8.5, WebKit <=534.34, wkhtmltopdf tool <=0.12.5 - // Plus for old WebKit, typeof returns "function" for HTML collections - // (e.g., `typeof document.getElementsByTagName("div") === "function"`). (gh-4756) - return typeof obj === "function" && typeof obj.nodeType !== "number" && - typeof obj.item !== "function"; - }; - - -var isWindow = function isWindow( obj ) { - return obj != null && obj === obj.window; - }; - - -var document = window.document; - - - - var preservedScriptAttributes = { - type: true, - src: true, - nonce: true, - noModule: true - }; - - function DOMEval( code, node, doc ) { - doc = doc || document; - - var i, val, - script = doc.createElement( "script" ); - - script.text = code; - if ( node ) { - for ( i in preservedScriptAttributes ) { - - // Support: Firefox 64+, Edge 18+ - // Some browsers don't support the "nonce" property on scripts. - // On the other hand, just using `getAttribute` is not enough as - // the `nonce` attribute is reset to an empty string whenever it - // becomes browsing-context connected. - // See https://github.com/whatwg/html/issues/2369 - // See https://html.spec.whatwg.org/#nonce-attributes - // The `node.getAttribute` check was added for the sake of - // `jQuery.globalEval` so that it can fake a nonce-containing node - // via an object. - val = node[ i ] || node.getAttribute && node.getAttribute( i ); - if ( val ) { - script.setAttribute( i, val ); - } - } - } - doc.head.appendChild( script ).parentNode.removeChild( script ); - } - - -function toType( obj ) { - if ( obj == null ) { - return obj + ""; - } - - // Support: Android <=2.3 only (functionish RegExp) - return typeof obj === "object" || typeof obj === "function" ? - class2type[ toString.call( obj ) ] || "object" : - typeof obj; -} -/* global Symbol */ -// Defining this global in .eslintrc.json would create a danger of using the global -// unguarded in another place, it seems safer to define global only for this module - - - -var - version = "3.6.0", - - // Define a local copy of jQuery - jQuery = function( selector, context ) { - - // The jQuery object is actually just the init constructor 'enhanced' - // Need init if jQuery is called (just allow error to be thrown if not included) - return new jQuery.fn.init( selector, context ); - }; - -jQuery.fn = jQuery.prototype = { - - // The current version of jQuery being used - jquery: version, - - constructor: jQuery, - - // The default length of a jQuery object is 0 - length: 0, - - toArray: function() { - return slice.call( this ); - }, - - // Get the Nth element in the matched element set OR - // Get the whole matched element set as a clean array - get: function( num ) { - - // Return all the elements in a clean array - if ( num == null ) { - return slice.call( this ); - } - - // Return just the one element from the set - return num < 0 ? this[ num + this.length ] : this[ num ]; - }, - - // Take an array of elements and push it onto the stack - // (returning the new matched element set) - pushStack: function( elems ) { - - // Build a new jQuery matched element set - var ret = jQuery.merge( this.constructor(), elems ); - - // Add the old object onto the stack (as a reference) - ret.prevObject = this; - - // Return the newly-formed element set - return ret; - }, - - // Execute a callback for every element in the matched set. - each: function( callback ) { - return jQuery.each( this, callback ); - }, - - map: function( callback ) { - return this.pushStack( jQuery.map( this, function( elem, i ) { - return callback.call( elem, i, elem ); - } ) ); - }, - - slice: function() { - return this.pushStack( slice.apply( this, arguments ) ); - }, - - first: function() { - return this.eq( 0 ); - }, - - last: function() { - return this.eq( -1 ); - }, - - even: function() { - return this.pushStack( jQuery.grep( this, function( _elem, i ) { - return ( i + 1 ) % 2; - } ) ); - }, - - odd: function() { - return this.pushStack( jQuery.grep( this, function( _elem, i ) { - return i % 2; - } ) ); - }, - - eq: function( i ) { - var len = this.length, - j = +i + ( i < 0 ? len : 0 ); - return this.pushStack( j >= 0 && j < len ? [ this[ j ] ] : [] ); - }, - - end: function() { - return this.prevObject || this.constructor(); - }, - - // For internal use only. - // Behaves like an Array's method, not like a jQuery method. - push: push, - sort: arr.sort, - splice: arr.splice -}; - -jQuery.extend = jQuery.fn.extend = function() { - var options, name, src, copy, copyIsArray, clone, - target = arguments[ 0 ] || {}, - i = 1, - length = arguments.length, - deep = false; - - // Handle a deep copy situation - if ( typeof target === "boolean" ) { - deep = target; - - // Skip the boolean and the target - target = arguments[ i ] || {}; - i++; - } - - // Handle case when target is a string or something (possible in deep copy) - if ( typeof target !== "object" && !isFunction( target ) ) { - target = {}; - } - - // Extend jQuery itself if only one argument is passed - if ( i === length ) { - target = this; - i--; - } - - for ( ; i < length; i++ ) { - - // Only deal with non-null/undefined values - if ( ( options = arguments[ i ] ) != null ) { - - // Extend the base object - for ( name in options ) { - copy = options[ name ]; - - // Prevent Object.prototype pollution - // Prevent never-ending loop - if ( name === "__proto__" || target === copy ) { - continue; - } - - // Recurse if we're merging plain objects or arrays - if ( deep && copy && ( jQuery.isPlainObject( copy ) || - ( copyIsArray = Array.isArray( copy ) ) ) ) { - src = target[ name ]; - - // Ensure proper type for the source value - if ( copyIsArray && !Array.isArray( src ) ) { - clone = []; - } else if ( !copyIsArray && !jQuery.isPlainObject( src ) ) { - clone = {}; - } else { - clone = src; - } - copyIsArray = false; - - // Never move original objects, clone them - target[ name ] = jQuery.extend( deep, clone, copy ); - - // Don't bring in undefined values - } else if ( copy !== undefined ) { - target[ name ] = copy; - } - } - } - } - - // Return the modified object - return target; -}; - -jQuery.extend( { - - // Unique for each copy of jQuery on the page - expando: "jQuery" + ( version + Math.random() ).replace( /\D/g, "" ), - - // Assume jQuery is ready without the ready module - isReady: true, - - error: function( msg ) { - throw new Error( msg ); - }, - - noop: function() {}, - - isPlainObject: function( obj ) { - var proto, Ctor; - - // Detect obvious negatives - // Use toString instead of jQuery.type to catch host objects - if ( !obj || toString.call( obj ) !== "[object Object]" ) { - return false; - } - - proto = getProto( obj ); - - // Objects with no prototype (e.g., `Object.create( null )`) are plain - if ( !proto ) { - return true; - } - - // Objects with prototype are plain iff they were constructed by a global Object function - Ctor = hasOwn.call( proto, "constructor" ) && proto.constructor; - return typeof Ctor === "function" && fnToString.call( Ctor ) === ObjectFunctionString; - }, - - isEmptyObject: function( obj ) { - var name; - - for ( name in obj ) { - return false; - } - return true; - }, - - // Evaluates a script in a provided context; falls back to the global one - // if not specified. - globalEval: function( code, options, doc ) { - DOMEval( code, { nonce: options && options.nonce }, doc ); - }, - - each: function( obj, callback ) { - var length, i = 0; - - if ( isArrayLike( obj ) ) { - length = obj.length; - for ( ; i < length; i++ ) { - if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { - break; - } - } - } else { - for ( i in obj ) { - if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { - break; - } - } - } - - return obj; - }, - - // results is for internal usage only - makeArray: function( arr, results ) { - var ret = results || []; - - if ( arr != null ) { - if ( isArrayLike( Object( arr ) ) ) { - jQuery.merge( ret, - typeof arr === "string" ? - [ arr ] : arr - ); - } else { - push.call( ret, arr ); - } - } - - return ret; - }, - - inArray: function( elem, arr, i ) { - return arr == null ? -1 : indexOf.call( arr, elem, i ); - }, - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - merge: function( first, second ) { - var len = +second.length, - j = 0, - i = first.length; - - for ( ; j < len; j++ ) { - first[ i++ ] = second[ j ]; - } - - first.length = i; - - return first; - }, - - grep: function( elems, callback, invert ) { - var callbackInverse, - matches = [], - i = 0, - length = elems.length, - callbackExpect = !invert; - - // Go through the array, only saving the items - // that pass the validator function - for ( ; i < length; i++ ) { - callbackInverse = !callback( elems[ i ], i ); - if ( callbackInverse !== callbackExpect ) { - matches.push( elems[ i ] ); - } - } - - return matches; - }, - - // arg is for internal usage only - map: function( elems, callback, arg ) { - var length, value, - i = 0, - ret = []; - - // Go through the array, translating each of the items to their new values - if ( isArrayLike( elems ) ) { - length = elems.length; - for ( ; i < length; i++ ) { - value = callback( elems[ i ], i, arg ); - - if ( value != null ) { - ret.push( value ); - } - } - - // Go through every key on the object, - } else { - for ( i in elems ) { - value = callback( elems[ i ], i, arg ); - - if ( value != null ) { - ret.push( value ); - } - } - } - - // Flatten any nested arrays - return flat( ret ); - }, - - // A global GUID counter for objects - guid: 1, - - // jQuery.support is not used in Core but other projects attach their - // properties to it so it needs to exist. - support: support -} ); - -if ( typeof Symbol === "function" ) { - jQuery.fn[ Symbol.iterator ] = arr[ Symbol.iterator ]; -} - -// Populate the class2type map -jQuery.each( "Boolean Number String Function Array Date RegExp Object Error Symbol".split( " " ), - function( _i, name ) { - class2type[ "[object " + name + "]" ] = name.toLowerCase(); - } ); - -function isArrayLike( obj ) { - - // Support: real iOS 8.2 only (not reproducible in simulator) - // `in` check used to prevent JIT error (gh-2145) - // hasOwn isn't used here due to false negatives - // regarding Nodelist length in IE - var length = !!obj && "length" in obj && obj.length, - type = toType( obj ); - - if ( isFunction( obj ) || isWindow( obj ) ) { - return false; - } - - return type === "array" || length === 0 || - typeof length === "number" && length > 0 && ( length - 1 ) in obj; -} -var Sizzle = -/*! - * Sizzle CSS Selector Engine v2.3.6 - * https://sizzlejs.com/ - * - * Copyright JS Foundation and other contributors - * Released under the MIT license - * https://js.foundation/ - * - * Date: 2021-02-16 - */ -( function( window ) { -var i, - support, - Expr, - getText, - isXML, - tokenize, - compile, - select, - outermostContext, - sortInput, - hasDuplicate, - - // Local document vars - setDocument, - document, - docElem, - documentIsHTML, - rbuggyQSA, - rbuggyMatches, - matches, - contains, - - // Instance-specific data - expando = "sizzle" + 1 * new Date(), - preferredDoc = window.document, - dirruns = 0, - done = 0, - classCache = createCache(), - tokenCache = createCache(), - compilerCache = createCache(), - nonnativeSelectorCache = createCache(), - sortOrder = function( a, b ) { - if ( a === b ) { - hasDuplicate = true; - } - return 0; - }, - - // Instance methods - hasOwn = ( {} ).hasOwnProperty, - arr = [], - pop = arr.pop, - pushNative = arr.push, - push = arr.push, - slice = arr.slice, - - // Use a stripped-down indexOf as it's faster than native - // https://jsperf.com/thor-indexof-vs-for/5 - indexOf = function( list, elem ) { - var i = 0, - len = list.length; - for ( ; i < len; i++ ) { - if ( list[ i ] === elem ) { - return i; - } - } - return -1; - }, - - booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|" + - "ismap|loop|multiple|open|readonly|required|scoped", - - // Regular expressions - - // http://www.w3.org/TR/css3-selectors/#whitespace - whitespace = "[\\x20\\t\\r\\n\\f]", - - // https://www.w3.org/TR/css-syntax-3/#ident-token-diagram - identifier = "(?:\\\\[\\da-fA-F]{1,6}" + whitespace + - "?|\\\\[^\\r\\n\\f]|[\\w-]|[^\0-\\x7f])+", - - // Attribute selectors: http://www.w3.org/TR/selectors/#attribute-selectors - attributes = "\\[" + whitespace + "*(" + identifier + ")(?:" + whitespace + - - // Operator (capture 2) - "*([*^$|!~]?=)" + whitespace + - - // "Attribute values must be CSS identifiers [capture 5] - // or strings [capture 3 or capture 4]" - "*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|(" + identifier + "))|)" + - whitespace + "*\\]", - - pseudos = ":(" + identifier + ")(?:\\((" + - - // To reduce the number of selectors needing tokenize in the preFilter, prefer arguments: - // 1. quoted (capture 3; capture 4 or capture 5) - "('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|" + - - // 2. simple (capture 6) - "((?:\\\\.|[^\\\\()[\\]]|" + attributes + ")*)|" + - - // 3. anything else (capture 2) - ".*" + - ")\\)|)", - - // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter - rwhitespace = new RegExp( whitespace + "+", "g" ), - rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + - whitespace + "+$", "g" ), - - rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ), - rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + - "*" ), - rdescend = new RegExp( whitespace + "|>" ), - - rpseudo = new RegExp( pseudos ), - ridentifier = new RegExp( "^" + identifier + "$" ), - - matchExpr = { - "ID": new RegExp( "^#(" + identifier + ")" ), - "CLASS": new RegExp( "^\\.(" + identifier + ")" ), - "TAG": new RegExp( "^(" + identifier + "|[*])" ), - "ATTR": new RegExp( "^" + attributes ), - "PSEUDO": new RegExp( "^" + pseudos ), - "CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + - whitespace + "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + - whitespace + "*(\\d+)|))" + whitespace + "*\\)|)", "i" ), - "bool": new RegExp( "^(?:" + booleans + ")$", "i" ), - - // For use in libraries implementing .is() - // We use this for POS matching in `select` - "needsContext": new RegExp( "^" + whitespace + - "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + whitespace + - "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" ) - }, - - rhtml = /HTML$/i, - rinputs = /^(?:input|select|textarea|button)$/i, - rheader = /^h\d$/i, - - rnative = /^[^{]+\{\s*\[native \w/, - - // Easily-parseable/retrievable ID or TAG or CLASS selectors - rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/, - - rsibling = /[+~]/, - - // CSS escapes - // http://www.w3.org/TR/CSS21/syndata.html#escaped-characters - runescape = new RegExp( "\\\\[\\da-fA-F]{1,6}" + whitespace + "?|\\\\([^\\r\\n\\f])", "g" ), - funescape = function( escape, nonHex ) { - var high = "0x" + escape.slice( 1 ) - 0x10000; - - return nonHex ? - - // Strip the backslash prefix from a non-hex escape sequence - nonHex : - - // Replace a hexadecimal escape sequence with the encoded Unicode code point - // Support: IE <=11+ - // For values outside the Basic Multilingual Plane (BMP), manually construct a - // surrogate pair - high < 0 ? - String.fromCharCode( high + 0x10000 ) : - String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 ); - }, - - // CSS string/identifier serialization - // https://drafts.csswg.org/cssom/#common-serializing-idioms - rcssescape = /([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g, - fcssescape = function( ch, asCodePoint ) { - if ( asCodePoint ) { - - // U+0000 NULL becomes U+FFFD REPLACEMENT CHARACTER - if ( ch === "\0" ) { - return "\uFFFD"; - } - - // Control characters and (dependent upon position) numbers get escaped as code points - return ch.slice( 0, -1 ) + "\\" + - ch.charCodeAt( ch.length - 1 ).toString( 16 ) + " "; - } - - // Other potentially-special ASCII characters get backslash-escaped - return "\\" + ch; - }, - - // Used for iframes - // See setDocument() - // Removing the function wrapper causes a "Permission Denied" - // error in IE - unloadHandler = function() { - setDocument(); - }, - - inDisabledFieldset = addCombinator( - function( elem ) { - return elem.disabled === true && elem.nodeName.toLowerCase() === "fieldset"; - }, - { dir: "parentNode", next: "legend" } - ); - -// Optimize for push.apply( _, NodeList ) -try { - push.apply( - ( arr = slice.call( preferredDoc.childNodes ) ), - preferredDoc.childNodes - ); - - // Support: Android<4.0 - // Detect silently failing push.apply - // eslint-disable-next-line no-unused-expressions - arr[ preferredDoc.childNodes.length ].nodeType; -} catch ( e ) { - push = { apply: arr.length ? - - // Leverage slice if possible - function( target, els ) { - pushNative.apply( target, slice.call( els ) ); - } : - - // Support: IE<9 - // Otherwise append directly - function( target, els ) { - var j = target.length, - i = 0; - - // Can't trust NodeList.length - while ( ( target[ j++ ] = els[ i++ ] ) ) {} - target.length = j - 1; - } - }; -} - -function Sizzle( selector, context, results, seed ) { - var m, i, elem, nid, match, groups, newSelector, - newContext = context && context.ownerDocument, - - // nodeType defaults to 9, since context defaults to document - nodeType = context ? context.nodeType : 9; - - results = results || []; - - // Return early from calls with invalid selector or context - if ( typeof selector !== "string" || !selector || - nodeType !== 1 && nodeType !== 9 && nodeType !== 11 ) { - - return results; - } - - // Try to shortcut find operations (as opposed to filters) in HTML documents - if ( !seed ) { - setDocument( context ); - context = context || document; - - if ( documentIsHTML ) { - - // If the selector is sufficiently simple, try using a "get*By*" DOM method - // (excepting DocumentFragment context, where the methods don't exist) - if ( nodeType !== 11 && ( match = rquickExpr.exec( selector ) ) ) { - - // ID selector - if ( ( m = match[ 1 ] ) ) { - - // Document context - if ( nodeType === 9 ) { - if ( ( elem = context.getElementById( m ) ) ) { - - // Support: IE, Opera, Webkit - // TODO: identify versions - // getElementById can match elements by name instead of ID - if ( elem.id === m ) { - results.push( elem ); - return results; - } - } else { - return results; - } - - // Element context - } else { - - // Support: IE, Opera, Webkit - // TODO: identify versions - // getElementById can match elements by name instead of ID - if ( newContext && ( elem = newContext.getElementById( m ) ) && - contains( context, elem ) && - elem.id === m ) { - - results.push( elem ); - return results; - } - } - - // Type selector - } else if ( match[ 2 ] ) { - push.apply( results, context.getElementsByTagName( selector ) ); - return results; - - // Class selector - } else if ( ( m = match[ 3 ] ) && support.getElementsByClassName && - context.getElementsByClassName ) { - - push.apply( results, context.getElementsByClassName( m ) ); - return results; - } - } - - // Take advantage of querySelectorAll - if ( support.qsa && - !nonnativeSelectorCache[ selector + " " ] && - ( !rbuggyQSA || !rbuggyQSA.test( selector ) ) && - - // Support: IE 8 only - // Exclude object elements - ( nodeType !== 1 || context.nodeName.toLowerCase() !== "object" ) ) { - - newSelector = selector; - newContext = context; - - // qSA considers elements outside a scoping root when evaluating child or - // descendant combinators, which is not what we want. - // In such cases, we work around the behavior by prefixing every selector in the - // list with an ID selector referencing the scope context. - // The technique has to be used as well when a leading combinator is used - // as such selectors are not recognized by querySelectorAll. - // Thanks to Andrew Dupont for this technique. - if ( nodeType === 1 && - ( rdescend.test( selector ) || rcombinators.test( selector ) ) ) { - - // Expand context for sibling selectors - newContext = rsibling.test( selector ) && testContext( context.parentNode ) || - context; - - // We can use :scope instead of the ID hack if the browser - // supports it & if we're not changing the context. - if ( newContext !== context || !support.scope ) { - - // Capture the context ID, setting it first if necessary - if ( ( nid = context.getAttribute( "id" ) ) ) { - nid = nid.replace( rcssescape, fcssescape ); - } else { - context.setAttribute( "id", ( nid = expando ) ); - } - } - - // Prefix every selector in the list - groups = tokenize( selector ); - i = groups.length; - while ( i-- ) { - groups[ i ] = ( nid ? "#" + nid : ":scope" ) + " " + - toSelector( groups[ i ] ); - } - newSelector = groups.join( "," ); - } - - try { - push.apply( results, - newContext.querySelectorAll( newSelector ) - ); - return results; - } catch ( qsaError ) { - nonnativeSelectorCache( selector, true ); - } finally { - if ( nid === expando ) { - context.removeAttribute( "id" ); - } - } - } - } - } - - // All others - return select( selector.replace( rtrim, "$1" ), context, results, seed ); -} - -/** - * Create key-value caches of limited size - * @returns {function(string, object)} Returns the Object data after storing it on itself with - * property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength) - * deleting the oldest entry - */ -function createCache() { - var keys = []; - - function cache( key, value ) { - - // Use (key + " ") to avoid collision with native prototype properties (see Issue #157) - if ( keys.push( key + " " ) > Expr.cacheLength ) { - - // Only keep the most recent entries - delete cache[ keys.shift() ]; - } - return ( cache[ key + " " ] = value ); - } - return cache; -} - -/** - * Mark a function for special use by Sizzle - * @param {Function} fn The function to mark - */ -function markFunction( fn ) { - fn[ expando ] = true; - return fn; -} - -/** - * Support testing using an element - * @param {Function} fn Passed the created element and returns a boolean result - */ -function assert( fn ) { - var el = document.createElement( "fieldset" ); - - try { - return !!fn( el ); - } catch ( e ) { - return false; - } finally { - - // Remove from its parent by default - if ( el.parentNode ) { - el.parentNode.removeChild( el ); - } - - // release memory in IE - el = null; - } -} - -/** - * Adds the same handler for all of the specified attrs - * @param {String} attrs Pipe-separated list of attributes - * @param {Function} handler The method that will be applied - */ -function addHandle( attrs, handler ) { - var arr = attrs.split( "|" ), - i = arr.length; - - while ( i-- ) { - Expr.attrHandle[ arr[ i ] ] = handler; - } -} - -/** - * Checks document order of two siblings - * @param {Element} a - * @param {Element} b - * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b - */ -function siblingCheck( a, b ) { - var cur = b && a, - diff = cur && a.nodeType === 1 && b.nodeType === 1 && - a.sourceIndex - b.sourceIndex; - - // Use IE sourceIndex if available on both nodes - if ( diff ) { - return diff; - } - - // Check if b follows a - if ( cur ) { - while ( ( cur = cur.nextSibling ) ) { - if ( cur === b ) { - return -1; - } - } - } - - return a ? 1 : -1; -} - -/** - * Returns a function to use in pseudos for input types - * @param {String} type - */ -function createInputPseudo( type ) { - return function( elem ) { - var name = elem.nodeName.toLowerCase(); - return name === "input" && elem.type === type; - }; -} - -/** - * Returns a function to use in pseudos for buttons - * @param {String} type - */ -function createButtonPseudo( type ) { - return function( elem ) { - var name = elem.nodeName.toLowerCase(); - return ( name === "input" || name === "button" ) && elem.type === type; - }; -} - -/** - * Returns a function to use in pseudos for :enabled/:disabled - * @param {Boolean} disabled true for :disabled; false for :enabled - */ -function createDisabledPseudo( disabled ) { - - // Known :disabled false positives: fieldset[disabled] > legend:nth-of-type(n+2) :can-disable - return function( elem ) { - - // Only certain elements can match :enabled or :disabled - // https://html.spec.whatwg.org/multipage/scripting.html#selector-enabled - // https://html.spec.whatwg.org/multipage/scripting.html#selector-disabled - if ( "form" in elem ) { - - // Check for inherited disabledness on relevant non-disabled elements: - // * listed form-associated elements in a disabled fieldset - // https://html.spec.whatwg.org/multipage/forms.html#category-listed - // https://html.spec.whatwg.org/multipage/forms.html#concept-fe-disabled - // * option elements in a disabled optgroup - // https://html.spec.whatwg.org/multipage/forms.html#concept-option-disabled - // All such elements have a "form" property. - if ( elem.parentNode && elem.disabled === false ) { - - // Option elements defer to a parent optgroup if present - if ( "label" in elem ) { - if ( "label" in elem.parentNode ) { - return elem.parentNode.disabled === disabled; - } else { - return elem.disabled === disabled; - } - } - - // Support: IE 6 - 11 - // Use the isDisabled shortcut property to check for disabled fieldset ancestors - return elem.isDisabled === disabled || - - // Where there is no isDisabled, check manually - /* jshint -W018 */ - elem.isDisabled !== !disabled && - inDisabledFieldset( elem ) === disabled; - } - - return elem.disabled === disabled; - - // Try to winnow out elements that can't be disabled before trusting the disabled property. - // Some victims get caught in our net (label, legend, menu, track), but it shouldn't - // even exist on them, let alone have a boolean value. - } else if ( "label" in elem ) { - return elem.disabled === disabled; - } - - // Remaining elements are neither :enabled nor :disabled - return false; - }; -} - -/** - * Returns a function to use in pseudos for positionals - * @param {Function} fn - */ -function createPositionalPseudo( fn ) { - return markFunction( function( argument ) { - argument = +argument; - return markFunction( function( seed, matches ) { - var j, - matchIndexes = fn( [], seed.length, argument ), - i = matchIndexes.length; - - // Match elements found at the specified indexes - while ( i-- ) { - if ( seed[ ( j = matchIndexes[ i ] ) ] ) { - seed[ j ] = !( matches[ j ] = seed[ j ] ); - } - } - } ); - } ); -} - -/** - * Checks a node for validity as a Sizzle context - * @param {Element|Object=} context - * @returns {Element|Object|Boolean} The input node if acceptable, otherwise a falsy value - */ -function testContext( context ) { - return context && typeof context.getElementsByTagName !== "undefined" && context; -} - -// Expose support vars for convenience -support = Sizzle.support = {}; - -/** - * Detects XML nodes - * @param {Element|Object} elem An element or a document - * @returns {Boolean} True iff elem is a non-HTML XML node - */ -isXML = Sizzle.isXML = function( elem ) { - var namespace = elem && elem.namespaceURI, - docElem = elem && ( elem.ownerDocument || elem ).documentElement; - - // Support: IE <=8 - // Assume HTML when documentElement doesn't yet exist, such as inside loading iframes - // https://bugs.jquery.com/ticket/4833 - return !rhtml.test( namespace || docElem && docElem.nodeName || "HTML" ); -}; - -/** - * Sets document-related variables once based on the current document - * @param {Element|Object} [doc] An element or document object to use to set the document - * @returns {Object} Returns the current document - */ -setDocument = Sizzle.setDocument = function( node ) { - var hasCompare, subWindow, - doc = node ? node.ownerDocument || node : preferredDoc; - - // Return early if doc is invalid or already selected - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( doc == document || doc.nodeType !== 9 || !doc.documentElement ) { - return document; - } - - // Update global variables - document = doc; - docElem = document.documentElement; - documentIsHTML = !isXML( document ); - - // Support: IE 9 - 11+, Edge 12 - 18+ - // Accessing iframe documents after unload throws "permission denied" errors (jQuery #13936) - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( preferredDoc != document && - ( subWindow = document.defaultView ) && subWindow.top !== subWindow ) { - - // Support: IE 11, Edge - if ( subWindow.addEventListener ) { - subWindow.addEventListener( "unload", unloadHandler, false ); - - // Support: IE 9 - 10 only - } else if ( subWindow.attachEvent ) { - subWindow.attachEvent( "onunload", unloadHandler ); - } - } - - // Support: IE 8 - 11+, Edge 12 - 18+, Chrome <=16 - 25 only, Firefox <=3.6 - 31 only, - // Safari 4 - 5 only, Opera <=11.6 - 12.x only - // IE/Edge & older browsers don't support the :scope pseudo-class. - // Support: Safari 6.0 only - // Safari 6.0 supports :scope but it's an alias of :root there. - support.scope = assert( function( el ) { - docElem.appendChild( el ).appendChild( document.createElement( "div" ) ); - return typeof el.querySelectorAll !== "undefined" && - !el.querySelectorAll( ":scope fieldset div" ).length; - } ); - - /* Attributes - ---------------------------------------------------------------------- */ - - // Support: IE<8 - // Verify that getAttribute really returns attributes and not properties - // (excepting IE8 booleans) - support.attributes = assert( function( el ) { - el.className = "i"; - return !el.getAttribute( "className" ); - } ); - - /* getElement(s)By* - ---------------------------------------------------------------------- */ - - // Check if getElementsByTagName("*") returns only elements - support.getElementsByTagName = assert( function( el ) { - el.appendChild( document.createComment( "" ) ); - return !el.getElementsByTagName( "*" ).length; - } ); - - // Support: IE<9 - support.getElementsByClassName = rnative.test( document.getElementsByClassName ); - - // Support: IE<10 - // Check if getElementById returns elements by name - // The broken getElementById methods don't pick up programmatically-set names, - // so use a roundabout getElementsByName test - support.getById = assert( function( el ) { - docElem.appendChild( el ).id = expando; - return !document.getElementsByName || !document.getElementsByName( expando ).length; - } ); - - // ID filter and find - if ( support.getById ) { - Expr.filter[ "ID" ] = function( id ) { - var attrId = id.replace( runescape, funescape ); - return function( elem ) { - return elem.getAttribute( "id" ) === attrId; - }; - }; - Expr.find[ "ID" ] = function( id, context ) { - if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { - var elem = context.getElementById( id ); - return elem ? [ elem ] : []; - } - }; - } else { - Expr.filter[ "ID" ] = function( id ) { - var attrId = id.replace( runescape, funescape ); - return function( elem ) { - var node = typeof elem.getAttributeNode !== "undefined" && - elem.getAttributeNode( "id" ); - return node && node.value === attrId; - }; - }; - - // Support: IE 6 - 7 only - // getElementById is not reliable as a find shortcut - Expr.find[ "ID" ] = function( id, context ) { - if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { - var node, i, elems, - elem = context.getElementById( id ); - - if ( elem ) { - - // Verify the id attribute - node = elem.getAttributeNode( "id" ); - if ( node && node.value === id ) { - return [ elem ]; - } - - // Fall back on getElementsByName - elems = context.getElementsByName( id ); - i = 0; - while ( ( elem = elems[ i++ ] ) ) { - node = elem.getAttributeNode( "id" ); - if ( node && node.value === id ) { - return [ elem ]; - } - } - } - - return []; - } - }; - } - - // Tag - Expr.find[ "TAG" ] = support.getElementsByTagName ? - function( tag, context ) { - if ( typeof context.getElementsByTagName !== "undefined" ) { - return context.getElementsByTagName( tag ); - - // DocumentFragment nodes don't have gEBTN - } else if ( support.qsa ) { - return context.querySelectorAll( tag ); - } - } : - - function( tag, context ) { - var elem, - tmp = [], - i = 0, - - // By happy coincidence, a (broken) gEBTN appears on DocumentFragment nodes too - results = context.getElementsByTagName( tag ); - - // Filter out possible comments - if ( tag === "*" ) { - while ( ( elem = results[ i++ ] ) ) { - if ( elem.nodeType === 1 ) { - tmp.push( elem ); - } - } - - return tmp; - } - return results; - }; - - // Class - Expr.find[ "CLASS" ] = support.getElementsByClassName && function( className, context ) { - if ( typeof context.getElementsByClassName !== "undefined" && documentIsHTML ) { - return context.getElementsByClassName( className ); - } - }; - - /* QSA/matchesSelector - ---------------------------------------------------------------------- */ - - // QSA and matchesSelector support - - // matchesSelector(:active) reports false when true (IE9/Opera 11.5) - rbuggyMatches = []; - - // qSa(:focus) reports false when true (Chrome 21) - // We allow this because of a bug in IE8/9 that throws an error - // whenever `document.activeElement` is accessed on an iframe - // So, we allow :focus to pass through QSA all the time to avoid the IE error - // See https://bugs.jquery.com/ticket/13378 - rbuggyQSA = []; - - if ( ( support.qsa = rnative.test( document.querySelectorAll ) ) ) { - - // Build QSA regex - // Regex strategy adopted from Diego Perini - assert( function( el ) { - - var input; - - // Select is set to empty string on purpose - // This is to test IE's treatment of not explicitly - // setting a boolean content attribute, - // since its presence should be enough - // https://bugs.jquery.com/ticket/12359 - docElem.appendChild( el ).innerHTML = "" + - ""; - - // Support: IE8, Opera 11-12.16 - // Nothing should be selected when empty strings follow ^= or $= or *= - // The test attribute must be unknown in Opera but "safe" for WinRT - // https://msdn.microsoft.com/en-us/library/ie/hh465388.aspx#attribute_section - if ( el.querySelectorAll( "[msallowcapture^='']" ).length ) { - rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" ); - } - - // Support: IE8 - // Boolean attributes and "value" are not treated correctly - if ( !el.querySelectorAll( "[selected]" ).length ) { - rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" ); - } - - // Support: Chrome<29, Android<4.4, Safari<7.0+, iOS<7.0+, PhantomJS<1.9.8+ - if ( !el.querySelectorAll( "[id~=" + expando + "-]" ).length ) { - rbuggyQSA.push( "~=" ); - } - - // Support: IE 11+, Edge 15 - 18+ - // IE 11/Edge don't find elements on a `[name='']` query in some cases. - // Adding a temporary attribute to the document before the selection works - // around the issue. - // Interestingly, IE 10 & older don't seem to have the issue. - input = document.createElement( "input" ); - input.setAttribute( "name", "" ); - el.appendChild( input ); - if ( !el.querySelectorAll( "[name='']" ).length ) { - rbuggyQSA.push( "\\[" + whitespace + "*name" + whitespace + "*=" + - whitespace + "*(?:''|\"\")" ); - } - - // Webkit/Opera - :checked should return selected option elements - // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked - // IE8 throws error here and will not see later tests - if ( !el.querySelectorAll( ":checked" ).length ) { - rbuggyQSA.push( ":checked" ); - } - - // Support: Safari 8+, iOS 8+ - // https://bugs.webkit.org/show_bug.cgi?id=136851 - // In-page `selector#id sibling-combinator selector` fails - if ( !el.querySelectorAll( "a#" + expando + "+*" ).length ) { - rbuggyQSA.push( ".#.+[+~]" ); - } - - // Support: Firefox <=3.6 - 5 only - // Old Firefox doesn't throw on a badly-escaped identifier. - el.querySelectorAll( "\\\f" ); - rbuggyQSA.push( "[\\r\\n\\f]" ); - } ); - - assert( function( el ) { - el.innerHTML = "" + - ""; - - // Support: Windows 8 Native Apps - // The type and name attributes are restricted during .innerHTML assignment - var input = document.createElement( "input" ); - input.setAttribute( "type", "hidden" ); - el.appendChild( input ).setAttribute( "name", "D" ); - - // Support: IE8 - // Enforce case-sensitivity of name attribute - if ( el.querySelectorAll( "[name=d]" ).length ) { - rbuggyQSA.push( "name" + whitespace + "*[*^$|!~]?=" ); - } - - // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled) - // IE8 throws error here and will not see later tests - if ( el.querySelectorAll( ":enabled" ).length !== 2 ) { - rbuggyQSA.push( ":enabled", ":disabled" ); - } - - // Support: IE9-11+ - // IE's :disabled selector does not pick up the children of disabled fieldsets - docElem.appendChild( el ).disabled = true; - if ( el.querySelectorAll( ":disabled" ).length !== 2 ) { - rbuggyQSA.push( ":enabled", ":disabled" ); - } - - // Support: Opera 10 - 11 only - // Opera 10-11 does not throw on post-comma invalid pseudos - el.querySelectorAll( "*,:x" ); - rbuggyQSA.push( ",.*:" ); - } ); - } - - if ( ( support.matchesSelector = rnative.test( ( matches = docElem.matches || - docElem.webkitMatchesSelector || - docElem.mozMatchesSelector || - docElem.oMatchesSelector || - docElem.msMatchesSelector ) ) ) ) { - - assert( function( el ) { - - // Check to see if it's possible to do matchesSelector - // on a disconnected node (IE 9) - support.disconnectedMatch = matches.call( el, "*" ); - - // This should fail with an exception - // Gecko does not error, returns false instead - matches.call( el, "[s!='']:x" ); - rbuggyMatches.push( "!=", pseudos ); - } ); - } - - rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join( "|" ) ); - rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join( "|" ) ); - - /* Contains - ---------------------------------------------------------------------- */ - hasCompare = rnative.test( docElem.compareDocumentPosition ); - - // Element contains another - // Purposefully self-exclusive - // As in, an element does not contain itself - contains = hasCompare || rnative.test( docElem.contains ) ? - function( a, b ) { - var adown = a.nodeType === 9 ? a.documentElement : a, - bup = b && b.parentNode; - return a === bup || !!( bup && bup.nodeType === 1 && ( - adown.contains ? - adown.contains( bup ) : - a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16 - ) ); - } : - function( a, b ) { - if ( b ) { - while ( ( b = b.parentNode ) ) { - if ( b === a ) { - return true; - } - } - } - return false; - }; - - /* Sorting - ---------------------------------------------------------------------- */ - - // Document order sorting - sortOrder = hasCompare ? - function( a, b ) { - - // Flag for duplicate removal - if ( a === b ) { - hasDuplicate = true; - return 0; - } - - // Sort on method existence if only one input has compareDocumentPosition - var compare = !a.compareDocumentPosition - !b.compareDocumentPosition; - if ( compare ) { - return compare; - } - - // Calculate position if both inputs belong to the same document - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - compare = ( a.ownerDocument || a ) == ( b.ownerDocument || b ) ? - a.compareDocumentPosition( b ) : - - // Otherwise we know they are disconnected - 1; - - // Disconnected nodes - if ( compare & 1 || - ( !support.sortDetached && b.compareDocumentPosition( a ) === compare ) ) { - - // Choose the first element that is related to our preferred document - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( a == document || a.ownerDocument == preferredDoc && - contains( preferredDoc, a ) ) { - return -1; - } - - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( b == document || b.ownerDocument == preferredDoc && - contains( preferredDoc, b ) ) { - return 1; - } - - // Maintain original order - return sortInput ? - ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : - 0; - } - - return compare & 4 ? -1 : 1; - } : - function( a, b ) { - - // Exit early if the nodes are identical - if ( a === b ) { - hasDuplicate = true; - return 0; - } - - var cur, - i = 0, - aup = a.parentNode, - bup = b.parentNode, - ap = [ a ], - bp = [ b ]; - - // Parentless nodes are either documents or disconnected - if ( !aup || !bup ) { - - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - /* eslint-disable eqeqeq */ - return a == document ? -1 : - b == document ? 1 : - /* eslint-enable eqeqeq */ - aup ? -1 : - bup ? 1 : - sortInput ? - ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : - 0; - - // If the nodes are siblings, we can do a quick check - } else if ( aup === bup ) { - return siblingCheck( a, b ); - } - - // Otherwise we need full lists of their ancestors for comparison - cur = a; - while ( ( cur = cur.parentNode ) ) { - ap.unshift( cur ); - } - cur = b; - while ( ( cur = cur.parentNode ) ) { - bp.unshift( cur ); - } - - // Walk down the tree looking for a discrepancy - while ( ap[ i ] === bp[ i ] ) { - i++; - } - - return i ? - - // Do a sibling check if the nodes have a common ancestor - siblingCheck( ap[ i ], bp[ i ] ) : - - // Otherwise nodes in our document sort first - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - /* eslint-disable eqeqeq */ - ap[ i ] == preferredDoc ? -1 : - bp[ i ] == preferredDoc ? 1 : - /* eslint-enable eqeqeq */ - 0; - }; - - return document; -}; - -Sizzle.matches = function( expr, elements ) { - return Sizzle( expr, null, null, elements ); -}; - -Sizzle.matchesSelector = function( elem, expr ) { - setDocument( elem ); - - if ( support.matchesSelector && documentIsHTML && - !nonnativeSelectorCache[ expr + " " ] && - ( !rbuggyMatches || !rbuggyMatches.test( expr ) ) && - ( !rbuggyQSA || !rbuggyQSA.test( expr ) ) ) { - - try { - var ret = matches.call( elem, expr ); - - // IE 9's matchesSelector returns false on disconnected nodes - if ( ret || support.disconnectedMatch || - - // As well, disconnected nodes are said to be in a document - // fragment in IE 9 - elem.document && elem.document.nodeType !== 11 ) { - return ret; - } - } catch ( e ) { - nonnativeSelectorCache( expr, true ); - } - } - - return Sizzle( expr, document, null, [ elem ] ).length > 0; -}; - -Sizzle.contains = function( context, elem ) { - - // Set document vars if needed - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( ( context.ownerDocument || context ) != document ) { - setDocument( context ); - } - return contains( context, elem ); -}; - -Sizzle.attr = function( elem, name ) { - - // Set document vars if needed - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( ( elem.ownerDocument || elem ) != document ) { - setDocument( elem ); - } - - var fn = Expr.attrHandle[ name.toLowerCase() ], - - // Don't get fooled by Object.prototype properties (jQuery #13807) - val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ? - fn( elem, name, !documentIsHTML ) : - undefined; - - return val !== undefined ? - val : - support.attributes || !documentIsHTML ? - elem.getAttribute( name ) : - ( val = elem.getAttributeNode( name ) ) && val.specified ? - val.value : - null; -}; - -Sizzle.escape = function( sel ) { - return ( sel + "" ).replace( rcssescape, fcssescape ); -}; - -Sizzle.error = function( msg ) { - throw new Error( "Syntax error, unrecognized expression: " + msg ); -}; - -/** - * Document sorting and removing duplicates - * @param {ArrayLike} results - */ -Sizzle.uniqueSort = function( results ) { - var elem, - duplicates = [], - j = 0, - i = 0; - - // Unless we *know* we can detect duplicates, assume their presence - hasDuplicate = !support.detectDuplicates; - sortInput = !support.sortStable && results.slice( 0 ); - results.sort( sortOrder ); - - if ( hasDuplicate ) { - while ( ( elem = results[ i++ ] ) ) { - if ( elem === results[ i ] ) { - j = duplicates.push( i ); - } - } - while ( j-- ) { - results.splice( duplicates[ j ], 1 ); - } - } - - // Clear input after sorting to release objects - // See https://github.com/jquery/sizzle/pull/225 - sortInput = null; - - return results; -}; - -/** - * Utility function for retrieving the text value of an array of DOM nodes - * @param {Array|Element} elem - */ -getText = Sizzle.getText = function( elem ) { - var node, - ret = "", - i = 0, - nodeType = elem.nodeType; - - if ( !nodeType ) { - - // If no nodeType, this is expected to be an array - while ( ( node = elem[ i++ ] ) ) { - - // Do not traverse comment nodes - ret += getText( node ); - } - } else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) { - - // Use textContent for elements - // innerText usage removed for consistency of new lines (jQuery #11153) - if ( typeof elem.textContent === "string" ) { - return elem.textContent; - } else { - - // Traverse its children - for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { - ret += getText( elem ); - } - } - } else if ( nodeType === 3 || nodeType === 4 ) { - return elem.nodeValue; - } - - // Do not include comment or processing instruction nodes - - return ret; -}; - -Expr = Sizzle.selectors = { - - // Can be adjusted by the user - cacheLength: 50, - - createPseudo: markFunction, - - match: matchExpr, - - attrHandle: {}, - - find: {}, - - relative: { - ">": { dir: "parentNode", first: true }, - " ": { dir: "parentNode" }, - "+": { dir: "previousSibling", first: true }, - "~": { dir: "previousSibling" } - }, - - preFilter: { - "ATTR": function( match ) { - match[ 1 ] = match[ 1 ].replace( runescape, funescape ); - - // Move the given value to match[3] whether quoted or unquoted - match[ 3 ] = ( match[ 3 ] || match[ 4 ] || - match[ 5 ] || "" ).replace( runescape, funescape ); - - if ( match[ 2 ] === "~=" ) { - match[ 3 ] = " " + match[ 3 ] + " "; - } - - return match.slice( 0, 4 ); - }, - - "CHILD": function( match ) { - - /* matches from matchExpr["CHILD"] - 1 type (only|nth|...) - 2 what (child|of-type) - 3 argument (even|odd|\d*|\d*n([+-]\d+)?|...) - 4 xn-component of xn+y argument ([+-]?\d*n|) - 5 sign of xn-component - 6 x of xn-component - 7 sign of y-component - 8 y of y-component - */ - match[ 1 ] = match[ 1 ].toLowerCase(); - - if ( match[ 1 ].slice( 0, 3 ) === "nth" ) { - - // nth-* requires argument - if ( !match[ 3 ] ) { - Sizzle.error( match[ 0 ] ); - } - - // numeric x and y parameters for Expr.filter.CHILD - // remember that false/true cast respectively to 0/1 - match[ 4 ] = +( match[ 4 ] ? - match[ 5 ] + ( match[ 6 ] || 1 ) : - 2 * ( match[ 3 ] === "even" || match[ 3 ] === "odd" ) ); - match[ 5 ] = +( ( match[ 7 ] + match[ 8 ] ) || match[ 3 ] === "odd" ); - - // other types prohibit arguments - } else if ( match[ 3 ] ) { - Sizzle.error( match[ 0 ] ); - } - - return match; - }, - - "PSEUDO": function( match ) { - var excess, - unquoted = !match[ 6 ] && match[ 2 ]; - - if ( matchExpr[ "CHILD" ].test( match[ 0 ] ) ) { - return null; - } - - // Accept quoted arguments as-is - if ( match[ 3 ] ) { - match[ 2 ] = match[ 4 ] || match[ 5 ] || ""; - - // Strip excess characters from unquoted arguments - } else if ( unquoted && rpseudo.test( unquoted ) && - - // Get excess from tokenize (recursively) - ( excess = tokenize( unquoted, true ) ) && - - // advance to the next closing parenthesis - ( excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length ) ) { - - // excess is a negative index - match[ 0 ] = match[ 0 ].slice( 0, excess ); - match[ 2 ] = unquoted.slice( 0, excess ); - } - - // Return only captures needed by the pseudo filter method (type and argument) - return match.slice( 0, 3 ); - } - }, - - filter: { - - "TAG": function( nodeNameSelector ) { - var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase(); - return nodeNameSelector === "*" ? - function() { - return true; - } : - function( elem ) { - return elem.nodeName && elem.nodeName.toLowerCase() === nodeName; - }; - }, - - "CLASS": function( className ) { - var pattern = classCache[ className + " " ]; - - return pattern || - ( pattern = new RegExp( "(^|" + whitespace + - ")" + className + "(" + whitespace + "|$)" ) ) && classCache( - className, function( elem ) { - return pattern.test( - typeof elem.className === "string" && elem.className || - typeof elem.getAttribute !== "undefined" && - elem.getAttribute( "class" ) || - "" - ); - } ); - }, - - "ATTR": function( name, operator, check ) { - return function( elem ) { - var result = Sizzle.attr( elem, name ); - - if ( result == null ) { - return operator === "!="; - } - if ( !operator ) { - return true; - } - - result += ""; - - /* eslint-disable max-len */ - - return operator === "=" ? result === check : - operator === "!=" ? result !== check : - operator === "^=" ? check && result.indexOf( check ) === 0 : - operator === "*=" ? check && result.indexOf( check ) > -1 : - operator === "$=" ? check && result.slice( -check.length ) === check : - operator === "~=" ? ( " " + result.replace( rwhitespace, " " ) + " " ).indexOf( check ) > -1 : - operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" : - false; - /* eslint-enable max-len */ - - }; - }, - - "CHILD": function( type, what, _argument, first, last ) { - var simple = type.slice( 0, 3 ) !== "nth", - forward = type.slice( -4 ) !== "last", - ofType = what === "of-type"; - - return first === 1 && last === 0 ? - - // Shortcut for :nth-*(n) - function( elem ) { - return !!elem.parentNode; - } : - - function( elem, _context, xml ) { - var cache, uniqueCache, outerCache, node, nodeIndex, start, - dir = simple !== forward ? "nextSibling" : "previousSibling", - parent = elem.parentNode, - name = ofType && elem.nodeName.toLowerCase(), - useCache = !xml && !ofType, - diff = false; - - if ( parent ) { - - // :(first|last|only)-(child|of-type) - if ( simple ) { - while ( dir ) { - node = elem; - while ( ( node = node[ dir ] ) ) { - if ( ofType ? - node.nodeName.toLowerCase() === name : - node.nodeType === 1 ) { - - return false; - } - } - - // Reverse direction for :only-* (if we haven't yet done so) - start = dir = type === "only" && !start && "nextSibling"; - } - return true; - } - - start = [ forward ? parent.firstChild : parent.lastChild ]; - - // non-xml :nth-child(...) stores cache data on `parent` - if ( forward && useCache ) { - - // Seek `elem` from a previously-cached index - - // ...in a gzip-friendly way - node = parent; - outerCache = node[ expando ] || ( node[ expando ] = {} ); - - // Support: IE <9 only - // Defend against cloned attroperties (jQuery gh-1709) - uniqueCache = outerCache[ node.uniqueID ] || - ( outerCache[ node.uniqueID ] = {} ); - - cache = uniqueCache[ type ] || []; - nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; - diff = nodeIndex && cache[ 2 ]; - node = nodeIndex && parent.childNodes[ nodeIndex ]; - - while ( ( node = ++nodeIndex && node && node[ dir ] || - - // Fallback to seeking `elem` from the start - ( diff = nodeIndex = 0 ) || start.pop() ) ) { - - // When found, cache indexes on `parent` and break - if ( node.nodeType === 1 && ++diff && node === elem ) { - uniqueCache[ type ] = [ dirruns, nodeIndex, diff ]; - break; - } - } - - } else { - - // Use previously-cached element index if available - if ( useCache ) { - - // ...in a gzip-friendly way - node = elem; - outerCache = node[ expando ] || ( node[ expando ] = {} ); - - // Support: IE <9 only - // Defend against cloned attroperties (jQuery gh-1709) - uniqueCache = outerCache[ node.uniqueID ] || - ( outerCache[ node.uniqueID ] = {} ); - - cache = uniqueCache[ type ] || []; - nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; - diff = nodeIndex; - } - - // xml :nth-child(...) - // or :nth-last-child(...) or :nth(-last)?-of-type(...) - if ( diff === false ) { - - // Use the same loop as above to seek `elem` from the start - while ( ( node = ++nodeIndex && node && node[ dir ] || - ( diff = nodeIndex = 0 ) || start.pop() ) ) { - - if ( ( ofType ? - node.nodeName.toLowerCase() === name : - node.nodeType === 1 ) && - ++diff ) { - - // Cache the index of each encountered element - if ( useCache ) { - outerCache = node[ expando ] || - ( node[ expando ] = {} ); - - // Support: IE <9 only - // Defend against cloned attroperties (jQuery gh-1709) - uniqueCache = outerCache[ node.uniqueID ] || - ( outerCache[ node.uniqueID ] = {} ); - - uniqueCache[ type ] = [ dirruns, diff ]; - } - - if ( node === elem ) { - break; - } - } - } - } - } - - // Incorporate the offset, then check against cycle size - diff -= last; - return diff === first || ( diff % first === 0 && diff / first >= 0 ); - } - }; - }, - - "PSEUDO": function( pseudo, argument ) { - - // pseudo-class names are case-insensitive - // http://www.w3.org/TR/selectors/#pseudo-classes - // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters - // Remember that setFilters inherits from pseudos - var args, - fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] || - Sizzle.error( "unsupported pseudo: " + pseudo ); - - // The user may use createPseudo to indicate that - // arguments are needed to create the filter function - // just as Sizzle does - if ( fn[ expando ] ) { - return fn( argument ); - } - - // But maintain support for old signatures - if ( fn.length > 1 ) { - args = [ pseudo, pseudo, "", argument ]; - return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ? - markFunction( function( seed, matches ) { - var idx, - matched = fn( seed, argument ), - i = matched.length; - while ( i-- ) { - idx = indexOf( seed, matched[ i ] ); - seed[ idx ] = !( matches[ idx ] = matched[ i ] ); - } - } ) : - function( elem ) { - return fn( elem, 0, args ); - }; - } - - return fn; - } - }, - - pseudos: { - - // Potentially complex pseudos - "not": markFunction( function( selector ) { - - // Trim the selector passed to compile - // to avoid treating leading and trailing - // spaces as combinators - var input = [], - results = [], - matcher = compile( selector.replace( rtrim, "$1" ) ); - - return matcher[ expando ] ? - markFunction( function( seed, matches, _context, xml ) { - var elem, - unmatched = matcher( seed, null, xml, [] ), - i = seed.length; - - // Match elements unmatched by `matcher` - while ( i-- ) { - if ( ( elem = unmatched[ i ] ) ) { - seed[ i ] = !( matches[ i ] = elem ); - } - } - } ) : - function( elem, _context, xml ) { - input[ 0 ] = elem; - matcher( input, null, xml, results ); - - // Don't keep the element (issue #299) - input[ 0 ] = null; - return !results.pop(); - }; - } ), - - "has": markFunction( function( selector ) { - return function( elem ) { - return Sizzle( selector, elem ).length > 0; - }; - } ), - - "contains": markFunction( function( text ) { - text = text.replace( runescape, funescape ); - return function( elem ) { - return ( elem.textContent || getText( elem ) ).indexOf( text ) > -1; - }; - } ), - - // "Whether an element is represented by a :lang() selector - // is based solely on the element's language value - // being equal to the identifier C, - // or beginning with the identifier C immediately followed by "-". - // The matching of C against the element's language value is performed case-insensitively. - // The identifier C does not have to be a valid language name." - // http://www.w3.org/TR/selectors/#lang-pseudo - "lang": markFunction( function( lang ) { - - // lang value must be a valid identifier - if ( !ridentifier.test( lang || "" ) ) { - Sizzle.error( "unsupported lang: " + lang ); - } - lang = lang.replace( runescape, funescape ).toLowerCase(); - return function( elem ) { - var elemLang; - do { - if ( ( elemLang = documentIsHTML ? - elem.lang : - elem.getAttribute( "xml:lang" ) || elem.getAttribute( "lang" ) ) ) { - - elemLang = elemLang.toLowerCase(); - return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0; - } - } while ( ( elem = elem.parentNode ) && elem.nodeType === 1 ); - return false; - }; - } ), - - // Miscellaneous - "target": function( elem ) { - var hash = window.location && window.location.hash; - return hash && hash.slice( 1 ) === elem.id; - }, - - "root": function( elem ) { - return elem === docElem; - }, - - "focus": function( elem ) { - return elem === document.activeElement && - ( !document.hasFocus || document.hasFocus() ) && - !!( elem.type || elem.href || ~elem.tabIndex ); - }, - - // Boolean properties - "enabled": createDisabledPseudo( false ), - "disabled": createDisabledPseudo( true ), - - "checked": function( elem ) { - - // In CSS3, :checked should return both checked and selected elements - // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked - var nodeName = elem.nodeName.toLowerCase(); - return ( nodeName === "input" && !!elem.checked ) || - ( nodeName === "option" && !!elem.selected ); - }, - - "selected": function( elem ) { - - // Accessing this property makes selected-by-default - // options in Safari work properly - if ( elem.parentNode ) { - // eslint-disable-next-line no-unused-expressions - elem.parentNode.selectedIndex; - } - - return elem.selected === true; - }, - - // Contents - "empty": function( elem ) { - - // http://www.w3.org/TR/selectors/#empty-pseudo - // :empty is negated by element (1) or content nodes (text: 3; cdata: 4; entity ref: 5), - // but not by others (comment: 8; processing instruction: 7; etc.) - // nodeType < 6 works because attributes (2) do not appear as children - for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { - if ( elem.nodeType < 6 ) { - return false; - } - } - return true; - }, - - "parent": function( elem ) { - return !Expr.pseudos[ "empty" ]( elem ); - }, - - // Element/input types - "header": function( elem ) { - return rheader.test( elem.nodeName ); - }, - - "input": function( elem ) { - return rinputs.test( elem.nodeName ); - }, - - "button": function( elem ) { - var name = elem.nodeName.toLowerCase(); - return name === "input" && elem.type === "button" || name === "button"; - }, - - "text": function( elem ) { - var attr; - return elem.nodeName.toLowerCase() === "input" && - elem.type === "text" && - - // Support: IE<8 - // New HTML5 attribute values (e.g., "search") appear with elem.type === "text" - ( ( attr = elem.getAttribute( "type" ) ) == null || - attr.toLowerCase() === "text" ); - }, - - // Position-in-collection - "first": createPositionalPseudo( function() { - return [ 0 ]; - } ), - - "last": createPositionalPseudo( function( _matchIndexes, length ) { - return [ length - 1 ]; - } ), - - "eq": createPositionalPseudo( function( _matchIndexes, length, argument ) { - return [ argument < 0 ? argument + length : argument ]; - } ), - - "even": createPositionalPseudo( function( matchIndexes, length ) { - var i = 0; - for ( ; i < length; i += 2 ) { - matchIndexes.push( i ); - } - return matchIndexes; - } ), - - "odd": createPositionalPseudo( function( matchIndexes, length ) { - var i = 1; - for ( ; i < length; i += 2 ) { - matchIndexes.push( i ); - } - return matchIndexes; - } ), - - "lt": createPositionalPseudo( function( matchIndexes, length, argument ) { - var i = argument < 0 ? - argument + length : - argument > length ? - length : - argument; - for ( ; --i >= 0; ) { - matchIndexes.push( i ); - } - return matchIndexes; - } ), - - "gt": createPositionalPseudo( function( matchIndexes, length, argument ) { - var i = argument < 0 ? argument + length : argument; - for ( ; ++i < length; ) { - matchIndexes.push( i ); - } - return matchIndexes; - } ) - } -}; - -Expr.pseudos[ "nth" ] = Expr.pseudos[ "eq" ]; - -// Add button/input type pseudos -for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) { - Expr.pseudos[ i ] = createInputPseudo( i ); -} -for ( i in { submit: true, reset: true } ) { - Expr.pseudos[ i ] = createButtonPseudo( i ); -} - -// Easy API for creating new setFilters -function setFilters() {} -setFilters.prototype = Expr.filters = Expr.pseudos; -Expr.setFilters = new setFilters(); - -tokenize = Sizzle.tokenize = function( selector, parseOnly ) { - var matched, match, tokens, type, - soFar, groups, preFilters, - cached = tokenCache[ selector + " " ]; - - if ( cached ) { - return parseOnly ? 0 : cached.slice( 0 ); - } - - soFar = selector; - groups = []; - preFilters = Expr.preFilter; - - while ( soFar ) { - - // Comma and first run - if ( !matched || ( match = rcomma.exec( soFar ) ) ) { - if ( match ) { - - // Don't consume trailing commas as valid - soFar = soFar.slice( match[ 0 ].length ) || soFar; - } - groups.push( ( tokens = [] ) ); - } - - matched = false; - - // Combinators - if ( ( match = rcombinators.exec( soFar ) ) ) { - matched = match.shift(); - tokens.push( { - value: matched, - - // Cast descendant combinators to space - type: match[ 0 ].replace( rtrim, " " ) - } ); - soFar = soFar.slice( matched.length ); - } - - // Filters - for ( type in Expr.filter ) { - if ( ( match = matchExpr[ type ].exec( soFar ) ) && ( !preFilters[ type ] || - ( match = preFilters[ type ]( match ) ) ) ) { - matched = match.shift(); - tokens.push( { - value: matched, - type: type, - matches: match - } ); - soFar = soFar.slice( matched.length ); - } - } - - if ( !matched ) { - break; - } - } - - // Return the length of the invalid excess - // if we're just parsing - // Otherwise, throw an error or return tokens - return parseOnly ? - soFar.length : - soFar ? - Sizzle.error( selector ) : - - // Cache the tokens - tokenCache( selector, groups ).slice( 0 ); -}; - -function toSelector( tokens ) { - var i = 0, - len = tokens.length, - selector = ""; - for ( ; i < len; i++ ) { - selector += tokens[ i ].value; - } - return selector; -} - -function addCombinator( matcher, combinator, base ) { - var dir = combinator.dir, - skip = combinator.next, - key = skip || dir, - checkNonElements = base && key === "parentNode", - doneName = done++; - - return combinator.first ? - - // Check against closest ancestor/preceding element - function( elem, context, xml ) { - while ( ( elem = elem[ dir ] ) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - return matcher( elem, context, xml ); - } - } - return false; - } : - - // Check against all ancestor/preceding elements - function( elem, context, xml ) { - var oldCache, uniqueCache, outerCache, - newCache = [ dirruns, doneName ]; - - // We can't set arbitrary data on XML nodes, so they don't benefit from combinator caching - if ( xml ) { - while ( ( elem = elem[ dir ] ) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - if ( matcher( elem, context, xml ) ) { - return true; - } - } - } - } else { - while ( ( elem = elem[ dir ] ) ) { - if ( elem.nodeType === 1 || checkNonElements ) { - outerCache = elem[ expando ] || ( elem[ expando ] = {} ); - - // Support: IE <9 only - // Defend against cloned attroperties (jQuery gh-1709) - uniqueCache = outerCache[ elem.uniqueID ] || - ( outerCache[ elem.uniqueID ] = {} ); - - if ( skip && skip === elem.nodeName.toLowerCase() ) { - elem = elem[ dir ] || elem; - } else if ( ( oldCache = uniqueCache[ key ] ) && - oldCache[ 0 ] === dirruns && oldCache[ 1 ] === doneName ) { - - // Assign to newCache so results back-propagate to previous elements - return ( newCache[ 2 ] = oldCache[ 2 ] ); - } else { - - // Reuse newcache so results back-propagate to previous elements - uniqueCache[ key ] = newCache; - - // A match means we're done; a fail means we have to keep checking - if ( ( newCache[ 2 ] = matcher( elem, context, xml ) ) ) { - return true; - } - } - } - } - } - return false; - }; -} - -function elementMatcher( matchers ) { - return matchers.length > 1 ? - function( elem, context, xml ) { - var i = matchers.length; - while ( i-- ) { - if ( !matchers[ i ]( elem, context, xml ) ) { - return false; - } - } - return true; - } : - matchers[ 0 ]; -} - -function multipleContexts( selector, contexts, results ) { - var i = 0, - len = contexts.length; - for ( ; i < len; i++ ) { - Sizzle( selector, contexts[ i ], results ); - } - return results; -} - -function condense( unmatched, map, filter, context, xml ) { - var elem, - newUnmatched = [], - i = 0, - len = unmatched.length, - mapped = map != null; - - for ( ; i < len; i++ ) { - if ( ( elem = unmatched[ i ] ) ) { - if ( !filter || filter( elem, context, xml ) ) { - newUnmatched.push( elem ); - if ( mapped ) { - map.push( i ); - } - } - } - } - - return newUnmatched; -} - -function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) { - if ( postFilter && !postFilter[ expando ] ) { - postFilter = setMatcher( postFilter ); - } - if ( postFinder && !postFinder[ expando ] ) { - postFinder = setMatcher( postFinder, postSelector ); - } - return markFunction( function( seed, results, context, xml ) { - var temp, i, elem, - preMap = [], - postMap = [], - preexisting = results.length, - - // Get initial elements from seed or context - elems = seed || multipleContexts( - selector || "*", - context.nodeType ? [ context ] : context, - [] - ), - - // Prefilter to get matcher input, preserving a map for seed-results synchronization - matcherIn = preFilter && ( seed || !selector ) ? - condense( elems, preMap, preFilter, context, xml ) : - elems, - - matcherOut = matcher ? - - // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results, - postFinder || ( seed ? preFilter : preexisting || postFilter ) ? - - // ...intermediate processing is necessary - [] : - - // ...otherwise use results directly - results : - matcherIn; - - // Find primary matches - if ( matcher ) { - matcher( matcherIn, matcherOut, context, xml ); - } - - // Apply postFilter - if ( postFilter ) { - temp = condense( matcherOut, postMap ); - postFilter( temp, [], context, xml ); - - // Un-match failing elements by moving them back to matcherIn - i = temp.length; - while ( i-- ) { - if ( ( elem = temp[ i ] ) ) { - matcherOut[ postMap[ i ] ] = !( matcherIn[ postMap[ i ] ] = elem ); - } - } - } - - if ( seed ) { - if ( postFinder || preFilter ) { - if ( postFinder ) { - - // Get the final matcherOut by condensing this intermediate into postFinder contexts - temp = []; - i = matcherOut.length; - while ( i-- ) { - if ( ( elem = matcherOut[ i ] ) ) { - - // Restore matcherIn since elem is not yet a final match - temp.push( ( matcherIn[ i ] = elem ) ); - } - } - postFinder( null, ( matcherOut = [] ), temp, xml ); - } - - // Move matched elements from seed to results to keep them synchronized - i = matcherOut.length; - while ( i-- ) { - if ( ( elem = matcherOut[ i ] ) && - ( temp = postFinder ? indexOf( seed, elem ) : preMap[ i ] ) > -1 ) { - - seed[ temp ] = !( results[ temp ] = elem ); - } - } - } - - // Add elements to results, through postFinder if defined - } else { - matcherOut = condense( - matcherOut === results ? - matcherOut.splice( preexisting, matcherOut.length ) : - matcherOut - ); - if ( postFinder ) { - postFinder( null, results, matcherOut, xml ); - } else { - push.apply( results, matcherOut ); - } - } - } ); -} - -function matcherFromTokens( tokens ) { - var checkContext, matcher, j, - len = tokens.length, - leadingRelative = Expr.relative[ tokens[ 0 ].type ], - implicitRelative = leadingRelative || Expr.relative[ " " ], - i = leadingRelative ? 1 : 0, - - // The foundational matcher ensures that elements are reachable from top-level context(s) - matchContext = addCombinator( function( elem ) { - return elem === checkContext; - }, implicitRelative, true ), - matchAnyContext = addCombinator( function( elem ) { - return indexOf( checkContext, elem ) > -1; - }, implicitRelative, true ), - matchers = [ function( elem, context, xml ) { - var ret = ( !leadingRelative && ( xml || context !== outermostContext ) ) || ( - ( checkContext = context ).nodeType ? - matchContext( elem, context, xml ) : - matchAnyContext( elem, context, xml ) ); - - // Avoid hanging onto element (issue #299) - checkContext = null; - return ret; - } ]; - - for ( ; i < len; i++ ) { - if ( ( matcher = Expr.relative[ tokens[ i ].type ] ) ) { - matchers = [ addCombinator( elementMatcher( matchers ), matcher ) ]; - } else { - matcher = Expr.filter[ tokens[ i ].type ].apply( null, tokens[ i ].matches ); - - // Return special upon seeing a positional matcher - if ( matcher[ expando ] ) { - - // Find the next relative operator (if any) for proper handling - j = ++i; - for ( ; j < len; j++ ) { - if ( Expr.relative[ tokens[ j ].type ] ) { - break; - } - } - return setMatcher( - i > 1 && elementMatcher( matchers ), - i > 1 && toSelector( - - // If the preceding token was a descendant combinator, insert an implicit any-element `*` - tokens - .slice( 0, i - 1 ) - .concat( { value: tokens[ i - 2 ].type === " " ? "*" : "" } ) - ).replace( rtrim, "$1" ), - matcher, - i < j && matcherFromTokens( tokens.slice( i, j ) ), - j < len && matcherFromTokens( ( tokens = tokens.slice( j ) ) ), - j < len && toSelector( tokens ) - ); - } - matchers.push( matcher ); - } - } - - return elementMatcher( matchers ); -} - -function matcherFromGroupMatchers( elementMatchers, setMatchers ) { - var bySet = setMatchers.length > 0, - byElement = elementMatchers.length > 0, - superMatcher = function( seed, context, xml, results, outermost ) { - var elem, j, matcher, - matchedCount = 0, - i = "0", - unmatched = seed && [], - setMatched = [], - contextBackup = outermostContext, - - // We must always have either seed elements or outermost context - elems = seed || byElement && Expr.find[ "TAG" ]( "*", outermost ), - - // Use integer dirruns iff this is the outermost matcher - dirrunsUnique = ( dirruns += contextBackup == null ? 1 : Math.random() || 0.1 ), - len = elems.length; - - if ( outermost ) { - - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - outermostContext = context == document || context || outermost; - } - - // Add elements passing elementMatchers directly to results - // Support: IE<9, Safari - // Tolerate NodeList properties (IE: "length"; Safari: ) matching elements by id - for ( ; i !== len && ( elem = elems[ i ] ) != null; i++ ) { - if ( byElement && elem ) { - j = 0; - - // Support: IE 11+, Edge 17 - 18+ - // IE/Edge sometimes throw a "Permission denied" error when strict-comparing - // two documents; shallow comparisons work. - // eslint-disable-next-line eqeqeq - if ( !context && elem.ownerDocument != document ) { - setDocument( elem ); - xml = !documentIsHTML; - } - while ( ( matcher = elementMatchers[ j++ ] ) ) { - if ( matcher( elem, context || document, xml ) ) { - results.push( elem ); - break; - } - } - if ( outermost ) { - dirruns = dirrunsUnique; - } - } - - // Track unmatched elements for set filters - if ( bySet ) { - - // They will have gone through all possible matchers - if ( ( elem = !matcher && elem ) ) { - matchedCount--; - } - - // Lengthen the array for every element, matched or not - if ( seed ) { - unmatched.push( elem ); - } - } - } - - // `i` is now the count of elements visited above, and adding it to `matchedCount` - // makes the latter nonnegative. - matchedCount += i; - - // Apply set filters to unmatched elements - // NOTE: This can be skipped if there are no unmatched elements (i.e., `matchedCount` - // equals `i`), unless we didn't visit _any_ elements in the above loop because we have - // no element matchers and no seed. - // Incrementing an initially-string "0" `i` allows `i` to remain a string only in that - // case, which will result in a "00" `matchedCount` that differs from `i` but is also - // numerically zero. - if ( bySet && i !== matchedCount ) { - j = 0; - while ( ( matcher = setMatchers[ j++ ] ) ) { - matcher( unmatched, setMatched, context, xml ); - } - - if ( seed ) { - - // Reintegrate element matches to eliminate the need for sorting - if ( matchedCount > 0 ) { - while ( i-- ) { - if ( !( unmatched[ i ] || setMatched[ i ] ) ) { - setMatched[ i ] = pop.call( results ); - } - } - } - - // Discard index placeholder values to get only actual matches - setMatched = condense( setMatched ); - } - - // Add matches to results - push.apply( results, setMatched ); - - // Seedless set matches succeeding multiple successful matchers stipulate sorting - if ( outermost && !seed && setMatched.length > 0 && - ( matchedCount + setMatchers.length ) > 1 ) { - - Sizzle.uniqueSort( results ); - } - } - - // Override manipulation of globals by nested matchers - if ( outermost ) { - dirruns = dirrunsUnique; - outermostContext = contextBackup; - } - - return unmatched; - }; - - return bySet ? - markFunction( superMatcher ) : - superMatcher; -} - -compile = Sizzle.compile = function( selector, match /* Internal Use Only */ ) { - var i, - setMatchers = [], - elementMatchers = [], - cached = compilerCache[ selector + " " ]; - - if ( !cached ) { - - // Generate a function of recursive functions that can be used to check each element - if ( !match ) { - match = tokenize( selector ); - } - i = match.length; - while ( i-- ) { - cached = matcherFromTokens( match[ i ] ); - if ( cached[ expando ] ) { - setMatchers.push( cached ); - } else { - elementMatchers.push( cached ); - } - } - - // Cache the compiled function - cached = compilerCache( - selector, - matcherFromGroupMatchers( elementMatchers, setMatchers ) - ); - - // Save selector and tokenization - cached.selector = selector; - } - return cached; -}; - -/** - * A low-level selection function that works with Sizzle's compiled - * selector functions - * @param {String|Function} selector A selector or a pre-compiled - * selector function built with Sizzle.compile - * @param {Element} context - * @param {Array} [results] - * @param {Array} [seed] A set of elements to match against - */ -select = Sizzle.select = function( selector, context, results, seed ) { - var i, tokens, token, type, find, - compiled = typeof selector === "function" && selector, - match = !seed && tokenize( ( selector = compiled.selector || selector ) ); - - results = results || []; - - // Try to minimize operations if there is only one selector in the list and no seed - // (the latter of which guarantees us context) - if ( match.length === 1 ) { - - // Reduce context if the leading compound selector is an ID - tokens = match[ 0 ] = match[ 0 ].slice( 0 ); - if ( tokens.length > 2 && ( token = tokens[ 0 ] ).type === "ID" && - context.nodeType === 9 && documentIsHTML && Expr.relative[ tokens[ 1 ].type ] ) { - - context = ( Expr.find[ "ID" ]( token.matches[ 0 ] - .replace( runescape, funescape ), context ) || [] )[ 0 ]; - if ( !context ) { - return results; - - // Precompiled matchers will still verify ancestry, so step up a level - } else if ( compiled ) { - context = context.parentNode; - } - - selector = selector.slice( tokens.shift().value.length ); - } - - // Fetch a seed set for right-to-left matching - i = matchExpr[ "needsContext" ].test( selector ) ? 0 : tokens.length; - while ( i-- ) { - token = tokens[ i ]; - - // Abort if we hit a combinator - if ( Expr.relative[ ( type = token.type ) ] ) { - break; - } - if ( ( find = Expr.find[ type ] ) ) { - - // Search, expanding context for leading sibling combinators - if ( ( seed = find( - token.matches[ 0 ].replace( runescape, funescape ), - rsibling.test( tokens[ 0 ].type ) && testContext( context.parentNode ) || - context - ) ) ) { - - // If seed is empty or no tokens remain, we can return early - tokens.splice( i, 1 ); - selector = seed.length && toSelector( tokens ); - if ( !selector ) { - push.apply( results, seed ); - return results; - } - - break; - } - } - } - } - - // Compile and execute a filtering function if one is not provided - // Provide `match` to avoid retokenization if we modified the selector above - ( compiled || compile( selector, match ) )( - seed, - context, - !documentIsHTML, - results, - !context || rsibling.test( selector ) && testContext( context.parentNode ) || context - ); - return results; -}; - -// One-time assignments - -// Sort stability -support.sortStable = expando.split( "" ).sort( sortOrder ).join( "" ) === expando; - -// Support: Chrome 14-35+ -// Always assume duplicates if they aren't passed to the comparison function -support.detectDuplicates = !!hasDuplicate; - -// Initialize against the default document -setDocument(); - -// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27) -// Detached nodes confoundingly follow *each other* -support.sortDetached = assert( function( el ) { - - // Should return 1, but returns 4 (following) - return el.compareDocumentPosition( document.createElement( "fieldset" ) ) & 1; -} ); - -// Support: IE<8 -// Prevent attribute/property "interpolation" -// https://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx -if ( !assert( function( el ) { - el.innerHTML = ""; - return el.firstChild.getAttribute( "href" ) === "#"; -} ) ) { - addHandle( "type|href|height|width", function( elem, name, isXML ) { - if ( !isXML ) { - return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 ); - } - } ); -} - -// Support: IE<9 -// Use defaultValue in place of getAttribute("value") -if ( !support.attributes || !assert( function( el ) { - el.innerHTML = ""; - el.firstChild.setAttribute( "value", "" ); - return el.firstChild.getAttribute( "value" ) === ""; -} ) ) { - addHandle( "value", function( elem, _name, isXML ) { - if ( !isXML && elem.nodeName.toLowerCase() === "input" ) { - return elem.defaultValue; - } - } ); -} - -// Support: IE<9 -// Use getAttributeNode to fetch booleans when getAttribute lies -if ( !assert( function( el ) { - return el.getAttribute( "disabled" ) == null; -} ) ) { - addHandle( booleans, function( elem, name, isXML ) { - var val; - if ( !isXML ) { - return elem[ name ] === true ? name.toLowerCase() : - ( val = elem.getAttributeNode( name ) ) && val.specified ? - val.value : - null; - } - } ); -} - -return Sizzle; - -} )( window ); - - - -jQuery.find = Sizzle; -jQuery.expr = Sizzle.selectors; - -// Deprecated -jQuery.expr[ ":" ] = jQuery.expr.pseudos; -jQuery.uniqueSort = jQuery.unique = Sizzle.uniqueSort; -jQuery.text = Sizzle.getText; -jQuery.isXMLDoc = Sizzle.isXML; -jQuery.contains = Sizzle.contains; -jQuery.escapeSelector = Sizzle.escape; - - - - -var dir = function( elem, dir, until ) { - var matched = [], - truncate = until !== undefined; - - while ( ( elem = elem[ dir ] ) && elem.nodeType !== 9 ) { - if ( elem.nodeType === 1 ) { - if ( truncate && jQuery( elem ).is( until ) ) { - break; - } - matched.push( elem ); - } - } - return matched; -}; - - -var siblings = function( n, elem ) { - var matched = []; - - for ( ; n; n = n.nextSibling ) { - if ( n.nodeType === 1 && n !== elem ) { - matched.push( n ); - } - } - - return matched; -}; - - -var rneedsContext = jQuery.expr.match.needsContext; - - - -function nodeName( elem, name ) { - - return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase(); - -} -var rsingleTag = ( /^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i ); - - - -// Implement the identical functionality for filter and not -function winnow( elements, qualifier, not ) { - if ( isFunction( qualifier ) ) { - return jQuery.grep( elements, function( elem, i ) { - return !!qualifier.call( elem, i, elem ) !== not; - } ); - } - - // Single element - if ( qualifier.nodeType ) { - return jQuery.grep( elements, function( elem ) { - return ( elem === qualifier ) !== not; - } ); - } - - // Arraylike of elements (jQuery, arguments, Array) - if ( typeof qualifier !== "string" ) { - return jQuery.grep( elements, function( elem ) { - return ( indexOf.call( qualifier, elem ) > -1 ) !== not; - } ); - } - - // Filtered directly for both simple and complex selectors - return jQuery.filter( qualifier, elements, not ); -} - -jQuery.filter = function( expr, elems, not ) { - var elem = elems[ 0 ]; - - if ( not ) { - expr = ":not(" + expr + ")"; - } - - if ( elems.length === 1 && elem.nodeType === 1 ) { - return jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : []; - } - - return jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) { - return elem.nodeType === 1; - } ) ); -}; - -jQuery.fn.extend( { - find: function( selector ) { - var i, ret, - len = this.length, - self = this; - - if ( typeof selector !== "string" ) { - return this.pushStack( jQuery( selector ).filter( function() { - for ( i = 0; i < len; i++ ) { - if ( jQuery.contains( self[ i ], this ) ) { - return true; - } - } - } ) ); - } - - ret = this.pushStack( [] ); - - for ( i = 0; i < len; i++ ) { - jQuery.find( selector, self[ i ], ret ); - } - - return len > 1 ? jQuery.uniqueSort( ret ) : ret; - }, - filter: function( selector ) { - return this.pushStack( winnow( this, selector || [], false ) ); - }, - not: function( selector ) { - return this.pushStack( winnow( this, selector || [], true ) ); - }, - is: function( selector ) { - return !!winnow( - this, - - // If this is a positional/relative selector, check membership in the returned set - // so $("p:first").is("p:last") won't return true for a doc with two "p". - typeof selector === "string" && rneedsContext.test( selector ) ? - jQuery( selector ) : - selector || [], - false - ).length; - } -} ); - - -// Initialize a jQuery object - - -// A central reference to the root jQuery(document) -var rootjQuery, - - // A simple way to check for HTML strings - // Prioritize #id over to avoid XSS via location.hash (#9521) - // Strict HTML recognition (#11290: must start with <) - // Shortcut simple #id case for speed - rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/, - - init = jQuery.fn.init = function( selector, context, root ) { - var match, elem; - - // HANDLE: $(""), $(null), $(undefined), $(false) - if ( !selector ) { - return this; - } - - // Method init() accepts an alternate rootjQuery - // so migrate can support jQuery.sub (gh-2101) - root = root || rootjQuery; - - // Handle HTML strings - if ( typeof selector === "string" ) { - if ( selector[ 0 ] === "<" && - selector[ selector.length - 1 ] === ">" && - selector.length >= 3 ) { - - // Assume that strings that start and end with <> are HTML and skip the regex check - match = [ null, selector, null ]; - - } else { - match = rquickExpr.exec( selector ); - } - - // Match html or make sure no context is specified for #id - if ( match && ( match[ 1 ] || !context ) ) { - - // HANDLE: $(html) -> $(array) - if ( match[ 1 ] ) { - context = context instanceof jQuery ? context[ 0 ] : context; - - // Option to run scripts is true for back-compat - // Intentionally let the error be thrown if parseHTML is not present - jQuery.merge( this, jQuery.parseHTML( - match[ 1 ], - context && context.nodeType ? context.ownerDocument || context : document, - true - ) ); - - // HANDLE: $(html, props) - if ( rsingleTag.test( match[ 1 ] ) && jQuery.isPlainObject( context ) ) { - for ( match in context ) { - - // Properties of context are called as methods if possible - if ( isFunction( this[ match ] ) ) { - this[ match ]( context[ match ] ); - - // ...and otherwise set as attributes - } else { - this.attr( match, context[ match ] ); - } - } - } - - return this; - - // HANDLE: $(#id) - } else { - elem = document.getElementById( match[ 2 ] ); - - if ( elem ) { - - // Inject the element directly into the jQuery object - this[ 0 ] = elem; - this.length = 1; - } - return this; - } - - // HANDLE: $(expr, $(...)) - } else if ( !context || context.jquery ) { - return ( context || root ).find( selector ); - - // HANDLE: $(expr, context) - // (which is just equivalent to: $(context).find(expr) - } else { - return this.constructor( context ).find( selector ); - } - - // HANDLE: $(DOMElement) - } else if ( selector.nodeType ) { - this[ 0 ] = selector; - this.length = 1; - return this; - - // HANDLE: $(function) - // Shortcut for document ready - } else if ( isFunction( selector ) ) { - return root.ready !== undefined ? - root.ready( selector ) : - - // Execute immediately if ready is not present - selector( jQuery ); - } - - return jQuery.makeArray( selector, this ); - }; - -// Give the init function the jQuery prototype for later instantiation -init.prototype = jQuery.fn; - -// Initialize central reference -rootjQuery = jQuery( document ); - - -var rparentsprev = /^(?:parents|prev(?:Until|All))/, - - // Methods guaranteed to produce a unique set when starting from a unique set - guaranteedUnique = { - children: true, - contents: true, - next: true, - prev: true - }; - -jQuery.fn.extend( { - has: function( target ) { - var targets = jQuery( target, this ), - l = targets.length; - - return this.filter( function() { - var i = 0; - for ( ; i < l; i++ ) { - if ( jQuery.contains( this, targets[ i ] ) ) { - return true; - } - } - } ); - }, - - closest: function( selectors, context ) { - var cur, - i = 0, - l = this.length, - matched = [], - targets = typeof selectors !== "string" && jQuery( selectors ); - - // Positional selectors never match, since there's no _selection_ context - if ( !rneedsContext.test( selectors ) ) { - for ( ; i < l; i++ ) { - for ( cur = this[ i ]; cur && cur !== context; cur = cur.parentNode ) { - - // Always skip document fragments - if ( cur.nodeType < 11 && ( targets ? - targets.index( cur ) > -1 : - - // Don't pass non-elements to Sizzle - cur.nodeType === 1 && - jQuery.find.matchesSelector( cur, selectors ) ) ) { - - matched.push( cur ); - break; - } - } - } - } - - return this.pushStack( matched.length > 1 ? jQuery.uniqueSort( matched ) : matched ); - }, - - // Determine the position of an element within the set - index: function( elem ) { - - // No argument, return index in parent - if ( !elem ) { - return ( this[ 0 ] && this[ 0 ].parentNode ) ? this.first().prevAll().length : -1; - } - - // Index in selector - if ( typeof elem === "string" ) { - return indexOf.call( jQuery( elem ), this[ 0 ] ); - } - - // Locate the position of the desired element - return indexOf.call( this, - - // If it receives a jQuery object, the first element is used - elem.jquery ? elem[ 0 ] : elem - ); - }, - - add: function( selector, context ) { - return this.pushStack( - jQuery.uniqueSort( - jQuery.merge( this.get(), jQuery( selector, context ) ) - ) - ); - }, - - addBack: function( selector ) { - return this.add( selector == null ? - this.prevObject : this.prevObject.filter( selector ) - ); - } -} ); - -function sibling( cur, dir ) { - while ( ( cur = cur[ dir ] ) && cur.nodeType !== 1 ) {} - return cur; -} - -jQuery.each( { - parent: function( elem ) { - var parent = elem.parentNode; - return parent && parent.nodeType !== 11 ? parent : null; - }, - parents: function( elem ) { - return dir( elem, "parentNode" ); - }, - parentsUntil: function( elem, _i, until ) { - return dir( elem, "parentNode", until ); - }, - next: function( elem ) { - return sibling( elem, "nextSibling" ); - }, - prev: function( elem ) { - return sibling( elem, "previousSibling" ); - }, - nextAll: function( elem ) { - return dir( elem, "nextSibling" ); - }, - prevAll: function( elem ) { - return dir( elem, "previousSibling" ); - }, - nextUntil: function( elem, _i, until ) { - return dir( elem, "nextSibling", until ); - }, - prevUntil: function( elem, _i, until ) { - return dir( elem, "previousSibling", until ); - }, - siblings: function( elem ) { - return siblings( ( elem.parentNode || {} ).firstChild, elem ); - }, - children: function( elem ) { - return siblings( elem.firstChild ); - }, - contents: function( elem ) { - if ( elem.contentDocument != null && - - // Support: IE 11+ - // elements with no `data` attribute has an object - // `contentDocument` with a `null` prototype. - getProto( elem.contentDocument ) ) { - - return elem.contentDocument; - } - - // Support: IE 9 - 11 only, iOS 7 only, Android Browser <=4.3 only - // Treat the template element as a regular one in browsers that - // don't support it. - if ( nodeName( elem, "template" ) ) { - elem = elem.content || elem; - } - - return jQuery.merge( [], elem.childNodes ); - } -}, function( name, fn ) { - jQuery.fn[ name ] = function( until, selector ) { - var matched = jQuery.map( this, fn, until ); - - if ( name.slice( -5 ) !== "Until" ) { - selector = until; - } - - if ( selector && typeof selector === "string" ) { - matched = jQuery.filter( selector, matched ); - } - - if ( this.length > 1 ) { - - // Remove duplicates - if ( !guaranteedUnique[ name ] ) { - jQuery.uniqueSort( matched ); - } - - // Reverse order for parents* and prev-derivatives - if ( rparentsprev.test( name ) ) { - matched.reverse(); - } - } - - return this.pushStack( matched ); - }; -} ); -var rnothtmlwhite = ( /[^\x20\t\r\n\f]+/g ); - - - -// Convert String-formatted options into Object-formatted ones -function createOptions( options ) { - var object = {}; - jQuery.each( options.match( rnothtmlwhite ) || [], function( _, flag ) { - object[ flag ] = true; - } ); - return object; -} - -/* - * Create a callback list using the following parameters: - * - * options: an optional list of space-separated options that will change how - * the callback list behaves or a more traditional option object - * - * By default a callback list will act like an event callback list and can be - * "fired" multiple times. - * - * Possible options: - * - * once: will ensure the callback list can only be fired once (like a Deferred) - * - * memory: will keep track of previous values and will call any callback added - * after the list has been fired right away with the latest "memorized" - * values (like a Deferred) - * - * unique: will ensure a callback can only be added once (no duplicate in the list) - * - * stopOnFalse: interrupt callings when a callback returns false - * - */ -jQuery.Callbacks = function( options ) { - - // Convert options from String-formatted to Object-formatted if needed - // (we check in cache first) - options = typeof options === "string" ? - createOptions( options ) : - jQuery.extend( {}, options ); - - var // Flag to know if list is currently firing - firing, - - // Last fire value for non-forgettable lists - memory, - - // Flag to know if list was already fired - fired, - - // Flag to prevent firing - locked, - - // Actual callback list - list = [], - - // Queue of execution data for repeatable lists - queue = [], - - // Index of currently firing callback (modified by add/remove as needed) - firingIndex = -1, - - // Fire callbacks - fire = function() { - - // Enforce single-firing - locked = locked || options.once; - - // Execute callbacks for all pending executions, - // respecting firingIndex overrides and runtime changes - fired = firing = true; - for ( ; queue.length; firingIndex = -1 ) { - memory = queue.shift(); - while ( ++firingIndex < list.length ) { - - // Run callback and check for early termination - if ( list[ firingIndex ].apply( memory[ 0 ], memory[ 1 ] ) === false && - options.stopOnFalse ) { - - // Jump to end and forget the data so .add doesn't re-fire - firingIndex = list.length; - memory = false; - } - } - } - - // Forget the data if we're done with it - if ( !options.memory ) { - memory = false; - } - - firing = false; - - // Clean up if we're done firing for good - if ( locked ) { - - // Keep an empty list if we have data for future add calls - if ( memory ) { - list = []; - - // Otherwise, this object is spent - } else { - list = ""; - } - } - }, - - // Actual Callbacks object - self = { - - // Add a callback or a collection of callbacks to the list - add: function() { - if ( list ) { - - // If we have memory from a past run, we should fire after adding - if ( memory && !firing ) { - firingIndex = list.length - 1; - queue.push( memory ); - } - - ( function add( args ) { - jQuery.each( args, function( _, arg ) { - if ( isFunction( arg ) ) { - if ( !options.unique || !self.has( arg ) ) { - list.push( arg ); - } - } else if ( arg && arg.length && toType( arg ) !== "string" ) { - - // Inspect recursively - add( arg ); - } - } ); - } )( arguments ); - - if ( memory && !firing ) { - fire(); - } - } - return this; - }, - - // Remove a callback from the list - remove: function() { - jQuery.each( arguments, function( _, arg ) { - var index; - while ( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) { - list.splice( index, 1 ); - - // Handle firing indexes - if ( index <= firingIndex ) { - firingIndex--; - } - } - } ); - return this; - }, - - // Check if a given callback is in the list. - // If no argument is given, return whether or not list has callbacks attached. - has: function( fn ) { - return fn ? - jQuery.inArray( fn, list ) > -1 : - list.length > 0; - }, - - // Remove all callbacks from the list - empty: function() { - if ( list ) { - list = []; - } - return this; - }, - - // Disable .fire and .add - // Abort any current/pending executions - // Clear all callbacks and values - disable: function() { - locked = queue = []; - list = memory = ""; - return this; - }, - disabled: function() { - return !list; - }, - - // Disable .fire - // Also disable .add unless we have memory (since it would have no effect) - // Abort any pending executions - lock: function() { - locked = queue = []; - if ( !memory && !firing ) { - list = memory = ""; - } - return this; - }, - locked: function() { - return !!locked; - }, - - // Call all callbacks with the given context and arguments - fireWith: function( context, args ) { - if ( !locked ) { - args = args || []; - args = [ context, args.slice ? args.slice() : args ]; - queue.push( args ); - if ( !firing ) { - fire(); - } - } - return this; - }, - - // Call all the callbacks with the given arguments - fire: function() { - self.fireWith( this, arguments ); - return this; - }, - - // To know if the callbacks have already been called at least once - fired: function() { - return !!fired; - } - }; - - return self; -}; - - -function Identity( v ) { - return v; -} -function Thrower( ex ) { - throw ex; -} - -function adoptValue( value, resolve, reject, noValue ) { - var method; - - try { - - // Check for promise aspect first to privilege synchronous behavior - if ( value && isFunction( ( method = value.promise ) ) ) { - method.call( value ).done( resolve ).fail( reject ); - - // Other thenables - } else if ( value && isFunction( ( method = value.then ) ) ) { - method.call( value, resolve, reject ); - - // Other non-thenables - } else { - - // Control `resolve` arguments by letting Array#slice cast boolean `noValue` to integer: - // * false: [ value ].slice( 0 ) => resolve( value ) - // * true: [ value ].slice( 1 ) => resolve() - resolve.apply( undefined, [ value ].slice( noValue ) ); - } - - // For Promises/A+, convert exceptions into rejections - // Since jQuery.when doesn't unwrap thenables, we can skip the extra checks appearing in - // Deferred#then to conditionally suppress rejection. - } catch ( value ) { - - // Support: Android 4.0 only - // Strict mode functions invoked without .call/.apply get global-object context - reject.apply( undefined, [ value ] ); - } -} - -jQuery.extend( { - - Deferred: function( func ) { - var tuples = [ - - // action, add listener, callbacks, - // ... .then handlers, argument index, [final state] - [ "notify", "progress", jQuery.Callbacks( "memory" ), - jQuery.Callbacks( "memory" ), 2 ], - [ "resolve", "done", jQuery.Callbacks( "once memory" ), - jQuery.Callbacks( "once memory" ), 0, "resolved" ], - [ "reject", "fail", jQuery.Callbacks( "once memory" ), - jQuery.Callbacks( "once memory" ), 1, "rejected" ] - ], - state = "pending", - promise = { - state: function() { - return state; - }, - always: function() { - deferred.done( arguments ).fail( arguments ); - return this; - }, - "catch": function( fn ) { - return promise.then( null, fn ); - }, - - // Keep pipe for back-compat - pipe: function( /* fnDone, fnFail, fnProgress */ ) { - var fns = arguments; - - return jQuery.Deferred( function( newDefer ) { - jQuery.each( tuples, function( _i, tuple ) { - - // Map tuples (progress, done, fail) to arguments (done, fail, progress) - var fn = isFunction( fns[ tuple[ 4 ] ] ) && fns[ tuple[ 4 ] ]; - - // deferred.progress(function() { bind to newDefer or newDefer.notify }) - // deferred.done(function() { bind to newDefer or newDefer.resolve }) - // deferred.fail(function() { bind to newDefer or newDefer.reject }) - deferred[ tuple[ 1 ] ]( function() { - var returned = fn && fn.apply( this, arguments ); - if ( returned && isFunction( returned.promise ) ) { - returned.promise() - .progress( newDefer.notify ) - .done( newDefer.resolve ) - .fail( newDefer.reject ); - } else { - newDefer[ tuple[ 0 ] + "With" ]( - this, - fn ? [ returned ] : arguments - ); - } - } ); - } ); - fns = null; - } ).promise(); - }, - then: function( onFulfilled, onRejected, onProgress ) { - var maxDepth = 0; - function resolve( depth, deferred, handler, special ) { - return function() { - var that = this, - args = arguments, - mightThrow = function() { - var returned, then; - - // Support: Promises/A+ section 2.3.3.3.3 - // https://promisesaplus.com/#point-59 - // Ignore double-resolution attempts - if ( depth < maxDepth ) { - return; - } - - returned = handler.apply( that, args ); - - // Support: Promises/A+ section 2.3.1 - // https://promisesaplus.com/#point-48 - if ( returned === deferred.promise() ) { - throw new TypeError( "Thenable self-resolution" ); - } - - // Support: Promises/A+ sections 2.3.3.1, 3.5 - // https://promisesaplus.com/#point-54 - // https://promisesaplus.com/#point-75 - // Retrieve `then` only once - then = returned && - - // Support: Promises/A+ section 2.3.4 - // https://promisesaplus.com/#point-64 - // Only check objects and functions for thenability - ( typeof returned === "object" || - typeof returned === "function" ) && - returned.then; - - // Handle a returned thenable - if ( isFunction( then ) ) { - - // Special processors (notify) just wait for resolution - if ( special ) { - then.call( - returned, - resolve( maxDepth, deferred, Identity, special ), - resolve( maxDepth, deferred, Thrower, special ) - ); - - // Normal processors (resolve) also hook into progress - } else { - - // ...and disregard older resolution values - maxDepth++; - - then.call( - returned, - resolve( maxDepth, deferred, Identity, special ), - resolve( maxDepth, deferred, Thrower, special ), - resolve( maxDepth, deferred, Identity, - deferred.notifyWith ) - ); - } - - // Handle all other returned values - } else { - - // Only substitute handlers pass on context - // and multiple values (non-spec behavior) - if ( handler !== Identity ) { - that = undefined; - args = [ returned ]; - } - - // Process the value(s) - // Default process is resolve - ( special || deferred.resolveWith )( that, args ); - } - }, - - // Only normal processors (resolve) catch and reject exceptions - process = special ? - mightThrow : - function() { - try { - mightThrow(); - } catch ( e ) { - - if ( jQuery.Deferred.exceptionHook ) { - jQuery.Deferred.exceptionHook( e, - process.stackTrace ); - } - - // Support: Promises/A+ section 2.3.3.3.4.1 - // https://promisesaplus.com/#point-61 - // Ignore post-resolution exceptions - if ( depth + 1 >= maxDepth ) { - - // Only substitute handlers pass on context - // and multiple values (non-spec behavior) - if ( handler !== Thrower ) { - that = undefined; - args = [ e ]; - } - - deferred.rejectWith( that, args ); - } - } - }; - - // Support: Promises/A+ section 2.3.3.3.1 - // https://promisesaplus.com/#point-57 - // Re-resolve promises immediately to dodge false rejection from - // subsequent errors - if ( depth ) { - process(); - } else { - - // Call an optional hook to record the stack, in case of exception - // since it's otherwise lost when execution goes async - if ( jQuery.Deferred.getStackHook ) { - process.stackTrace = jQuery.Deferred.getStackHook(); - } - window.setTimeout( process ); - } - }; - } - - return jQuery.Deferred( function( newDefer ) { - - // progress_handlers.add( ... ) - tuples[ 0 ][ 3 ].add( - resolve( - 0, - newDefer, - isFunction( onProgress ) ? - onProgress : - Identity, - newDefer.notifyWith - ) - ); - - // fulfilled_handlers.add( ... ) - tuples[ 1 ][ 3 ].add( - resolve( - 0, - newDefer, - isFunction( onFulfilled ) ? - onFulfilled : - Identity - ) - ); - - // rejected_handlers.add( ... ) - tuples[ 2 ][ 3 ].add( - resolve( - 0, - newDefer, - isFunction( onRejected ) ? - onRejected : - Thrower - ) - ); - } ).promise(); - }, - - // Get a promise for this deferred - // If obj is provided, the promise aspect is added to the object - promise: function( obj ) { - return obj != null ? jQuery.extend( obj, promise ) : promise; - } - }, - deferred = {}; - - // Add list-specific methods - jQuery.each( tuples, function( i, tuple ) { - var list = tuple[ 2 ], - stateString = tuple[ 5 ]; - - // promise.progress = list.add - // promise.done = list.add - // promise.fail = list.add - promise[ tuple[ 1 ] ] = list.add; - - // Handle state - if ( stateString ) { - list.add( - function() { - - // state = "resolved" (i.e., fulfilled) - // state = "rejected" - state = stateString; - }, - - // rejected_callbacks.disable - // fulfilled_callbacks.disable - tuples[ 3 - i ][ 2 ].disable, - - // rejected_handlers.disable - // fulfilled_handlers.disable - tuples[ 3 - i ][ 3 ].disable, - - // progress_callbacks.lock - tuples[ 0 ][ 2 ].lock, - - // progress_handlers.lock - tuples[ 0 ][ 3 ].lock - ); - } - - // progress_handlers.fire - // fulfilled_handlers.fire - // rejected_handlers.fire - list.add( tuple[ 3 ].fire ); - - // deferred.notify = function() { deferred.notifyWith(...) } - // deferred.resolve = function() { deferred.resolveWith(...) } - // deferred.reject = function() { deferred.rejectWith(...) } - deferred[ tuple[ 0 ] ] = function() { - deferred[ tuple[ 0 ] + "With" ]( this === deferred ? undefined : this, arguments ); - return this; - }; - - // deferred.notifyWith = list.fireWith - // deferred.resolveWith = list.fireWith - // deferred.rejectWith = list.fireWith - deferred[ tuple[ 0 ] + "With" ] = list.fireWith; - } ); - - // Make the deferred a promise - promise.promise( deferred ); - - // Call given func if any - if ( func ) { - func.call( deferred, deferred ); - } - - // All done! - return deferred; - }, - - // Deferred helper - when: function( singleValue ) { - var - - // count of uncompleted subordinates - remaining = arguments.length, - - // count of unprocessed arguments - i = remaining, - - // subordinate fulfillment data - resolveContexts = Array( i ), - resolveValues = slice.call( arguments ), - - // the primary Deferred - primary = jQuery.Deferred(), - - // subordinate callback factory - updateFunc = function( i ) { - return function( value ) { - resolveContexts[ i ] = this; - resolveValues[ i ] = arguments.length > 1 ? slice.call( arguments ) : value; - if ( !( --remaining ) ) { - primary.resolveWith( resolveContexts, resolveValues ); - } - }; - }; - - // Single- and empty arguments are adopted like Promise.resolve - if ( remaining <= 1 ) { - adoptValue( singleValue, primary.done( updateFunc( i ) ).resolve, primary.reject, - !remaining ); - - // Use .then() to unwrap secondary thenables (cf. gh-3000) - if ( primary.state() === "pending" || - isFunction( resolveValues[ i ] && resolveValues[ i ].then ) ) { - - return primary.then(); - } - } - - // Multiple arguments are aggregated like Promise.all array elements - while ( i-- ) { - adoptValue( resolveValues[ i ], updateFunc( i ), primary.reject ); - } - - return primary.promise(); - } -} ); - - -// These usually indicate a programmer mistake during development, -// warn about them ASAP rather than swallowing them by default. -var rerrorNames = /^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/; - -jQuery.Deferred.exceptionHook = function( error, stack ) { - - // Support: IE 8 - 9 only - // Console exists when dev tools are open, which can happen at any time - if ( window.console && window.console.warn && error && rerrorNames.test( error.name ) ) { - window.console.warn( "jQuery.Deferred exception: " + error.message, error.stack, stack ); - } -}; - - - - -jQuery.readyException = function( error ) { - window.setTimeout( function() { - throw error; - } ); -}; - - - - -// The deferred used on DOM ready -var readyList = jQuery.Deferred(); - -jQuery.fn.ready = function( fn ) { - - readyList - .then( fn ) - - // Wrap jQuery.readyException in a function so that the lookup - // happens at the time of error handling instead of callback - // registration. - .catch( function( error ) { - jQuery.readyException( error ); - } ); - - return this; -}; - -jQuery.extend( { - - // Is the DOM ready to be used? Set to true once it occurs. - isReady: false, - - // A counter to track how many items to wait for before - // the ready event fires. See #6781 - readyWait: 1, - - // Handle when the DOM is ready - ready: function( wait ) { - - // Abort if there are pending holds or we're already ready - if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) { - return; - } - - // Remember that the DOM is ready - jQuery.isReady = true; - - // If a normal DOM Ready event fired, decrement, and wait if need be - if ( wait !== true && --jQuery.readyWait > 0 ) { - return; - } - - // If there are functions bound, to execute - readyList.resolveWith( document, [ jQuery ] ); - } -} ); - -jQuery.ready.then = readyList.then; - -// The ready event handler and self cleanup method -function completed() { - document.removeEventListener( "DOMContentLoaded", completed ); - window.removeEventListener( "load", completed ); - jQuery.ready(); -} - -// Catch cases where $(document).ready() is called -// after the browser event has already occurred. -// Support: IE <=9 - 10 only -// Older IE sometimes signals "interactive" too soon -if ( document.readyState === "complete" || - ( document.readyState !== "loading" && !document.documentElement.doScroll ) ) { - - // Handle it asynchronously to allow scripts the opportunity to delay ready - window.setTimeout( jQuery.ready ); - -} else { - - // Use the handy event callback - document.addEventListener( "DOMContentLoaded", completed ); - - // A fallback to window.onload, that will always work - window.addEventListener( "load", completed ); -} - - - - -// Multifunctional method to get and set values of a collection -// The value/s can optionally be executed if it's a function -var access = function( elems, fn, key, value, chainable, emptyGet, raw ) { - var i = 0, - len = elems.length, - bulk = key == null; - - // Sets many values - if ( toType( key ) === "object" ) { - chainable = true; - for ( i in key ) { - access( elems, fn, i, key[ i ], true, emptyGet, raw ); - } - - // Sets one value - } else if ( value !== undefined ) { - chainable = true; - - if ( !isFunction( value ) ) { - raw = true; - } - - if ( bulk ) { - - // Bulk operations run against the entire set - if ( raw ) { - fn.call( elems, value ); - fn = null; - - // ...except when executing function values - } else { - bulk = fn; - fn = function( elem, _key, value ) { - return bulk.call( jQuery( elem ), value ); - }; - } - } - - if ( fn ) { - for ( ; i < len; i++ ) { - fn( - elems[ i ], key, raw ? - value : - value.call( elems[ i ], i, fn( elems[ i ], key ) ) - ); - } - } - } - - if ( chainable ) { - return elems; - } - - // Gets - if ( bulk ) { - return fn.call( elems ); - } - - return len ? fn( elems[ 0 ], key ) : emptyGet; -}; - - -// Matches dashed string for camelizing -var rmsPrefix = /^-ms-/, - rdashAlpha = /-([a-z])/g; - -// Used by camelCase as callback to replace() -function fcamelCase( _all, letter ) { - return letter.toUpperCase(); -} - -// Convert dashed to camelCase; used by the css and data modules -// Support: IE <=9 - 11, Edge 12 - 15 -// Microsoft forgot to hump their vendor prefix (#9572) -function camelCase( string ) { - return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase ); -} -var acceptData = function( owner ) { - - // Accepts only: - // - Node - // - Node.ELEMENT_NODE - // - Node.DOCUMENT_NODE - // - Object - // - Any - return owner.nodeType === 1 || owner.nodeType === 9 || !( +owner.nodeType ); -}; - - - - -function Data() { - this.expando = jQuery.expando + Data.uid++; -} - -Data.uid = 1; - -Data.prototype = { - - cache: function( owner ) { - - // Check if the owner object already has a cache - var value = owner[ this.expando ]; - - // If not, create one - if ( !value ) { - value = {}; - - // We can accept data for non-element nodes in modern browsers, - // but we should not, see #8335. - // Always return an empty object. - if ( acceptData( owner ) ) { - - // If it is a node unlikely to be stringify-ed or looped over - // use plain assignment - if ( owner.nodeType ) { - owner[ this.expando ] = value; - - // Otherwise secure it in a non-enumerable property - // configurable must be true to allow the property to be - // deleted when data is removed - } else { - Object.defineProperty( owner, this.expando, { - value: value, - configurable: true - } ); - } - } - } - - return value; - }, - set: function( owner, data, value ) { - var prop, - cache = this.cache( owner ); - - // Handle: [ owner, key, value ] args - // Always use camelCase key (gh-2257) - if ( typeof data === "string" ) { - cache[ camelCase( data ) ] = value; - - // Handle: [ owner, { properties } ] args - } else { - - // Copy the properties one-by-one to the cache object - for ( prop in data ) { - cache[ camelCase( prop ) ] = data[ prop ]; - } - } - return cache; - }, - get: function( owner, key ) { - return key === undefined ? - this.cache( owner ) : - - // Always use camelCase key (gh-2257) - owner[ this.expando ] && owner[ this.expando ][ camelCase( key ) ]; - }, - access: function( owner, key, value ) { - - // In cases where either: - // - // 1. No key was specified - // 2. A string key was specified, but no value provided - // - // Take the "read" path and allow the get method to determine - // which value to return, respectively either: - // - // 1. The entire cache object - // 2. The data stored at the key - // - if ( key === undefined || - ( ( key && typeof key === "string" ) && value === undefined ) ) { - - return this.get( owner, key ); - } - - // When the key is not a string, or both a key and value - // are specified, set or extend (existing objects) with either: - // - // 1. An object of properties - // 2. A key and value - // - this.set( owner, key, value ); - - // Since the "set" path can have two possible entry points - // return the expected data based on which path was taken[*] - return value !== undefined ? value : key; - }, - remove: function( owner, key ) { - var i, - cache = owner[ this.expando ]; - - if ( cache === undefined ) { - return; - } - - if ( key !== undefined ) { - - // Support array or space separated string of keys - if ( Array.isArray( key ) ) { - - // If key is an array of keys... - // We always set camelCase keys, so remove that. - key = key.map( camelCase ); - } else { - key = camelCase( key ); - - // If a key with the spaces exists, use it. - // Otherwise, create an array by matching non-whitespace - key = key in cache ? - [ key ] : - ( key.match( rnothtmlwhite ) || [] ); - } - - i = key.length; - - while ( i-- ) { - delete cache[ key[ i ] ]; - } - } - - // Remove the expando if there's no more data - if ( key === undefined || jQuery.isEmptyObject( cache ) ) { - - // Support: Chrome <=35 - 45 - // Webkit & Blink performance suffers when deleting properties - // from DOM nodes, so set to undefined instead - // https://bugs.chromium.org/p/chromium/issues/detail?id=378607 (bug restricted) - if ( owner.nodeType ) { - owner[ this.expando ] = undefined; - } else { - delete owner[ this.expando ]; - } - } - }, - hasData: function( owner ) { - var cache = owner[ this.expando ]; - return cache !== undefined && !jQuery.isEmptyObject( cache ); - } -}; -var dataPriv = new Data(); - -var dataUser = new Data(); - - - -// Implementation Summary -// -// 1. Enforce API surface and semantic compatibility with 1.9.x branch -// 2. Improve the module's maintainability by reducing the storage -// paths to a single mechanism. -// 3. Use the same single mechanism to support "private" and "user" data. -// 4. _Never_ expose "private" data to user code (TODO: Drop _data, _removeData) -// 5. Avoid exposing implementation details on user objects (eg. expando properties) -// 6. Provide a clear path for implementation upgrade to WeakMap in 2014 - -var rbrace = /^(?:\{[\w\W]*\}|\[[\w\W]*\])$/, - rmultiDash = /[A-Z]/g; - -function getData( data ) { - if ( data === "true" ) { - return true; - } - - if ( data === "false" ) { - return false; - } - - if ( data === "null" ) { - return null; - } - - // Only convert to a number if it doesn't change the string - if ( data === +data + "" ) { - return +data; - } - - if ( rbrace.test( data ) ) { - return JSON.parse( data ); - } - - return data; -} - -function dataAttr( elem, key, data ) { - var name; - - // If nothing was found internally, try to fetch any - // data from the HTML5 data-* attribute - if ( data === undefined && elem.nodeType === 1 ) { - name = "data-" + key.replace( rmultiDash, "-$&" ).toLowerCase(); - data = elem.getAttribute( name ); - - if ( typeof data === "string" ) { - try { - data = getData( data ); - } catch ( e ) {} - - // Make sure we set the data so it isn't changed later - dataUser.set( elem, key, data ); - } else { - data = undefined; - } - } - return data; -} - -jQuery.extend( { - hasData: function( elem ) { - return dataUser.hasData( elem ) || dataPriv.hasData( elem ); - }, - - data: function( elem, name, data ) { - return dataUser.access( elem, name, data ); - }, - - removeData: function( elem, name ) { - dataUser.remove( elem, name ); - }, - - // TODO: Now that all calls to _data and _removeData have been replaced - // with direct calls to dataPriv methods, these can be deprecated. - _data: function( elem, name, data ) { - return dataPriv.access( elem, name, data ); - }, - - _removeData: function( elem, name ) { - dataPriv.remove( elem, name ); - } -} ); - -jQuery.fn.extend( { - data: function( key, value ) { - var i, name, data, - elem = this[ 0 ], - attrs = elem && elem.attributes; - - // Gets all values - if ( key === undefined ) { - if ( this.length ) { - data = dataUser.get( elem ); - - if ( elem.nodeType === 1 && !dataPriv.get( elem, "hasDataAttrs" ) ) { - i = attrs.length; - while ( i-- ) { - - // Support: IE 11 only - // The attrs elements can be null (#14894) - if ( attrs[ i ] ) { - name = attrs[ i ].name; - if ( name.indexOf( "data-" ) === 0 ) { - name = camelCase( name.slice( 5 ) ); - dataAttr( elem, name, data[ name ] ); - } - } - } - dataPriv.set( elem, "hasDataAttrs", true ); - } - } - - return data; - } - - // Sets multiple values - if ( typeof key === "object" ) { - return this.each( function() { - dataUser.set( this, key ); - } ); - } - - return access( this, function( value ) { - var data; - - // The calling jQuery object (element matches) is not empty - // (and therefore has an element appears at this[ 0 ]) and the - // `value` parameter was not undefined. An empty jQuery object - // will result in `undefined` for elem = this[ 0 ] which will - // throw an exception if an attempt to read a data cache is made. - if ( elem && value === undefined ) { - - // Attempt to get data from the cache - // The key will always be camelCased in Data - data = dataUser.get( elem, key ); - if ( data !== undefined ) { - return data; - } - - // Attempt to "discover" the data in - // HTML5 custom data-* attrs - data = dataAttr( elem, key ); - if ( data !== undefined ) { - return data; - } - - // We tried really hard, but the data doesn't exist. - return; - } - - // Set the data... - this.each( function() { - - // We always store the camelCased key - dataUser.set( this, key, value ); - } ); - }, null, value, arguments.length > 1, null, true ); - }, - - removeData: function( key ) { - return this.each( function() { - dataUser.remove( this, key ); - } ); - } -} ); - - -jQuery.extend( { - queue: function( elem, type, data ) { - var queue; - - if ( elem ) { - type = ( type || "fx" ) + "queue"; - queue = dataPriv.get( elem, type ); - - // Speed up dequeue by getting out quickly if this is just a lookup - if ( data ) { - if ( !queue || Array.isArray( data ) ) { - queue = dataPriv.access( elem, type, jQuery.makeArray( data ) ); - } else { - queue.push( data ); - } - } - return queue || []; - } - }, - - dequeue: function( elem, type ) { - type = type || "fx"; - - var queue = jQuery.queue( elem, type ), - startLength = queue.length, - fn = queue.shift(), - hooks = jQuery._queueHooks( elem, type ), - next = function() { - jQuery.dequeue( elem, type ); - }; - - // If the fx queue is dequeued, always remove the progress sentinel - if ( fn === "inprogress" ) { - fn = queue.shift(); - startLength--; - } - - if ( fn ) { - - // Add a progress sentinel to prevent the fx queue from being - // automatically dequeued - if ( type === "fx" ) { - queue.unshift( "inprogress" ); - } - - // Clear up the last queue stop function - delete hooks.stop; - fn.call( elem, next, hooks ); - } - - if ( !startLength && hooks ) { - hooks.empty.fire(); - } - }, - - // Not public - generate a queueHooks object, or return the current one - _queueHooks: function( elem, type ) { - var key = type + "queueHooks"; - return dataPriv.get( elem, key ) || dataPriv.access( elem, key, { - empty: jQuery.Callbacks( "once memory" ).add( function() { - dataPriv.remove( elem, [ type + "queue", key ] ); - } ) - } ); - } -} ); - -jQuery.fn.extend( { - queue: function( type, data ) { - var setter = 2; - - if ( typeof type !== "string" ) { - data = type; - type = "fx"; - setter--; - } - - if ( arguments.length < setter ) { - return jQuery.queue( this[ 0 ], type ); - } - - return data === undefined ? - this : - this.each( function() { - var queue = jQuery.queue( this, type, data ); - - // Ensure a hooks for this queue - jQuery._queueHooks( this, type ); - - if ( type === "fx" && queue[ 0 ] !== "inprogress" ) { - jQuery.dequeue( this, type ); - } - } ); - }, - dequeue: function( type ) { - return this.each( function() { - jQuery.dequeue( this, type ); - } ); - }, - clearQueue: function( type ) { - return this.queue( type || "fx", [] ); - }, - - // Get a promise resolved when queues of a certain type - // are emptied (fx is the type by default) - promise: function( type, obj ) { - var tmp, - count = 1, - defer = jQuery.Deferred(), - elements = this, - i = this.length, - resolve = function() { - if ( !( --count ) ) { - defer.resolveWith( elements, [ elements ] ); - } - }; - - if ( typeof type !== "string" ) { - obj = type; - type = undefined; - } - type = type || "fx"; - - while ( i-- ) { - tmp = dataPriv.get( elements[ i ], type + "queueHooks" ); - if ( tmp && tmp.empty ) { - count++; - tmp.empty.add( resolve ); - } - } - resolve(); - return defer.promise( obj ); - } -} ); -var pnum = ( /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/ ).source; - -var rcssNum = new RegExp( "^(?:([+-])=|)(" + pnum + ")([a-z%]*)$", "i" ); - - -var cssExpand = [ "Top", "Right", "Bottom", "Left" ]; - -var documentElement = document.documentElement; - - - - var isAttached = function( elem ) { - return jQuery.contains( elem.ownerDocument, elem ); - }, - composed = { composed: true }; - - // Support: IE 9 - 11+, Edge 12 - 18+, iOS 10.0 - 10.2 only - // Check attachment across shadow DOM boundaries when possible (gh-3504) - // Support: iOS 10.0-10.2 only - // Early iOS 10 versions support `attachShadow` but not `getRootNode`, - // leading to errors. We need to check for `getRootNode`. - if ( documentElement.getRootNode ) { - isAttached = function( elem ) { - return jQuery.contains( elem.ownerDocument, elem ) || - elem.getRootNode( composed ) === elem.ownerDocument; - }; - } -var isHiddenWithinTree = function( elem, el ) { - - // isHiddenWithinTree might be called from jQuery#filter function; - // in that case, element will be second argument - elem = el || elem; - - // Inline style trumps all - return elem.style.display === "none" || - elem.style.display === "" && - - // Otherwise, check computed style - // Support: Firefox <=43 - 45 - // Disconnected elements can have computed display: none, so first confirm that elem is - // in the document. - isAttached( elem ) && - - jQuery.css( elem, "display" ) === "none"; - }; - - - -function adjustCSS( elem, prop, valueParts, tween ) { - var adjusted, scale, - maxIterations = 20, - currentValue = tween ? - function() { - return tween.cur(); - } : - function() { - return jQuery.css( elem, prop, "" ); - }, - initial = currentValue(), - unit = valueParts && valueParts[ 3 ] || ( jQuery.cssNumber[ prop ] ? "" : "px" ), - - // Starting value computation is required for potential unit mismatches - initialInUnit = elem.nodeType && - ( jQuery.cssNumber[ prop ] || unit !== "px" && +initial ) && - rcssNum.exec( jQuery.css( elem, prop ) ); - - if ( initialInUnit && initialInUnit[ 3 ] !== unit ) { - - // Support: Firefox <=54 - // Halve the iteration target value to prevent interference from CSS upper bounds (gh-2144) - initial = initial / 2; - - // Trust units reported by jQuery.css - unit = unit || initialInUnit[ 3 ]; - - // Iteratively approximate from a nonzero starting point - initialInUnit = +initial || 1; - - while ( maxIterations-- ) { - - // Evaluate and update our best guess (doubling guesses that zero out). - // Finish if the scale equals or crosses 1 (making the old*new product non-positive). - jQuery.style( elem, prop, initialInUnit + unit ); - if ( ( 1 - scale ) * ( 1 - ( scale = currentValue() / initial || 0.5 ) ) <= 0 ) { - maxIterations = 0; - } - initialInUnit = initialInUnit / scale; - - } - - initialInUnit = initialInUnit * 2; - jQuery.style( elem, prop, initialInUnit + unit ); - - // Make sure we update the tween properties later on - valueParts = valueParts || []; - } - - if ( valueParts ) { - initialInUnit = +initialInUnit || +initial || 0; - - // Apply relative offset (+=/-=) if specified - adjusted = valueParts[ 1 ] ? - initialInUnit + ( valueParts[ 1 ] + 1 ) * valueParts[ 2 ] : - +valueParts[ 2 ]; - if ( tween ) { - tween.unit = unit; - tween.start = initialInUnit; - tween.end = adjusted; - } - } - return adjusted; -} - - -var defaultDisplayMap = {}; - -function getDefaultDisplay( elem ) { - var temp, - doc = elem.ownerDocument, - nodeName = elem.nodeName, - display = defaultDisplayMap[ nodeName ]; - - if ( display ) { - return display; - } - - temp = doc.body.appendChild( doc.createElement( nodeName ) ); - display = jQuery.css( temp, "display" ); - - temp.parentNode.removeChild( temp ); - - if ( display === "none" ) { - display = "block"; - } - defaultDisplayMap[ nodeName ] = display; - - return display; -} - -function showHide( elements, show ) { - var display, elem, - values = [], - index = 0, - length = elements.length; - - // Determine new display value for elements that need to change - for ( ; index < length; index++ ) { - elem = elements[ index ]; - if ( !elem.style ) { - continue; - } - - display = elem.style.display; - if ( show ) { - - // Since we force visibility upon cascade-hidden elements, an immediate (and slow) - // check is required in this first loop unless we have a nonempty display value (either - // inline or about-to-be-restored) - if ( display === "none" ) { - values[ index ] = dataPriv.get( elem, "display" ) || null; - if ( !values[ index ] ) { - elem.style.display = ""; - } - } - if ( elem.style.display === "" && isHiddenWithinTree( elem ) ) { - values[ index ] = getDefaultDisplay( elem ); - } - } else { - if ( display !== "none" ) { - values[ index ] = "none"; - - // Remember what we're overwriting - dataPriv.set( elem, "display", display ); - } - } - } - - // Set the display of the elements in a second loop to avoid constant reflow - for ( index = 0; index < length; index++ ) { - if ( values[ index ] != null ) { - elements[ index ].style.display = values[ index ]; - } - } - - return elements; -} - -jQuery.fn.extend( { - show: function() { - return showHide( this, true ); - }, - hide: function() { - return showHide( this ); - }, - toggle: function( state ) { - if ( typeof state === "boolean" ) { - return state ? this.show() : this.hide(); - } - - return this.each( function() { - if ( isHiddenWithinTree( this ) ) { - jQuery( this ).show(); - } else { - jQuery( this ).hide(); - } - } ); - } -} ); -var rcheckableType = ( /^(?:checkbox|radio)$/i ); - -var rtagName = ( /<([a-z][^\/\0>\x20\t\r\n\f]*)/i ); - -var rscriptType = ( /^$|^module$|\/(?:java|ecma)script/i ); - - - -( function() { - var fragment = document.createDocumentFragment(), - div = fragment.appendChild( document.createElement( "div" ) ), - input = document.createElement( "input" ); - - // Support: Android 4.0 - 4.3 only - // Check state lost if the name is set (#11217) - // Support: Windows Web Apps (WWA) - // `name` and `type` must use .setAttribute for WWA (#14901) - input.setAttribute( "type", "radio" ); - input.setAttribute( "checked", "checked" ); - input.setAttribute( "name", "t" ); - - div.appendChild( input ); - - // Support: Android <=4.1 only - // Older WebKit doesn't clone checked state correctly in fragments - support.checkClone = div.cloneNode( true ).cloneNode( true ).lastChild.checked; - - // Support: IE <=11 only - // Make sure textarea (and checkbox) defaultValue is properly cloned - div.innerHTML = ""; - support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue; - - // Support: IE <=9 only - // IE <=9 replaces "; - support.option = !!div.lastChild; -} )(); - - -// We have to close these tags to support XHTML (#13200) -var wrapMap = { - - // XHTML parsers do not magically insert elements in the - // same way that tag soup parsers do. So we cannot shorten - // this by omitting or other required elements. - thead: [ 1, "", "
      " ], - col: [ 2, "", "
      " ], - tr: [ 2, "", "
      " ], - td: [ 3, "", "
      " ], - - _default: [ 0, "", "" ] -}; - -wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; -wrapMap.th = wrapMap.td; - -// Support: IE <=9 only -if ( !support.option ) { - wrapMap.optgroup = wrapMap.option = [ 1, "" ]; -} - - -function getAll( context, tag ) { - - // Support: IE <=9 - 11 only - // Use typeof to avoid zero-argument method invocation on host objects (#15151) - var ret; - - if ( typeof context.getElementsByTagName !== "undefined" ) { - ret = context.getElementsByTagName( tag || "*" ); - - } else if ( typeof context.querySelectorAll !== "undefined" ) { - ret = context.querySelectorAll( tag || "*" ); - - } else { - ret = []; - } - - if ( tag === undefined || tag && nodeName( context, tag ) ) { - return jQuery.merge( [ context ], ret ); - } - - return ret; -} - - -// Mark scripts as having already been evaluated -function setGlobalEval( elems, refElements ) { - var i = 0, - l = elems.length; - - for ( ; i < l; i++ ) { - dataPriv.set( - elems[ i ], - "globalEval", - !refElements || dataPriv.get( refElements[ i ], "globalEval" ) - ); - } -} - - -var rhtml = /<|&#?\w+;/; - -function buildFragment( elems, context, scripts, selection, ignored ) { - var elem, tmp, tag, wrap, attached, j, - fragment = context.createDocumentFragment(), - nodes = [], - i = 0, - l = elems.length; - - for ( ; i < l; i++ ) { - elem = elems[ i ]; - - if ( elem || elem === 0 ) { - - // Add nodes directly - if ( toType( elem ) === "object" ) { - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem ); - - // Convert non-html into a text node - } else if ( !rhtml.test( elem ) ) { - nodes.push( context.createTextNode( elem ) ); - - // Convert html into DOM nodes - } else { - tmp = tmp || fragment.appendChild( context.createElement( "div" ) ); - - // Deserialize a standard representation - tag = ( rtagName.exec( elem ) || [ "", "" ] )[ 1 ].toLowerCase(); - wrap = wrapMap[ tag ] || wrapMap._default; - tmp.innerHTML = wrap[ 1 ] + jQuery.htmlPrefilter( elem ) + wrap[ 2 ]; - - // Descend through wrappers to the right content - j = wrap[ 0 ]; - while ( j-- ) { - tmp = tmp.lastChild; - } - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - jQuery.merge( nodes, tmp.childNodes ); - - // Remember the top-level container - tmp = fragment.firstChild; - - // Ensure the created nodes are orphaned (#12392) - tmp.textContent = ""; - } - } - } - - // Remove wrapper from fragment - fragment.textContent = ""; - - i = 0; - while ( ( elem = nodes[ i++ ] ) ) { - - // Skip elements already in the context collection (trac-4087) - if ( selection && jQuery.inArray( elem, selection ) > -1 ) { - if ( ignored ) { - ignored.push( elem ); - } - continue; - } - - attached = isAttached( elem ); - - // Append to fragment - tmp = getAll( fragment.appendChild( elem ), "script" ); - - // Preserve script evaluation history - if ( attached ) { - setGlobalEval( tmp ); - } - - // Capture executables - if ( scripts ) { - j = 0; - while ( ( elem = tmp[ j++ ] ) ) { - if ( rscriptType.test( elem.type || "" ) ) { - scripts.push( elem ); - } - } - } - } - - return fragment; -} - - -var rtypenamespace = /^([^.]*)(?:\.(.+)|)/; - -function returnTrue() { - return true; -} - -function returnFalse() { - return false; -} - -// Support: IE <=9 - 11+ -// focus() and blur() are asynchronous, except when they are no-op. -// So expect focus to be synchronous when the element is already active, -// and blur to be synchronous when the element is not already active. -// (focus and blur are always synchronous in other supported browsers, -// this just defines when we can count on it). -function expectSync( elem, type ) { - return ( elem === safeActiveElement() ) === ( type === "focus" ); -} - -// Support: IE <=9 only -// Accessing document.activeElement can throw unexpectedly -// https://bugs.jquery.com/ticket/13393 -function safeActiveElement() { - try { - return document.activeElement; - } catch ( err ) { } -} - -function on( elem, types, selector, data, fn, one ) { - var origFn, type; - - // Types can be a map of types/handlers - if ( typeof types === "object" ) { - - // ( types-Object, selector, data ) - if ( typeof selector !== "string" ) { - - // ( types-Object, data ) - data = data || selector; - selector = undefined; - } - for ( type in types ) { - on( elem, type, selector, data, types[ type ], one ); - } - return elem; - } - - if ( data == null && fn == null ) { - - // ( types, fn ) - fn = selector; - data = selector = undefined; - } else if ( fn == null ) { - if ( typeof selector === "string" ) { - - // ( types, selector, fn ) - fn = data; - data = undefined; - } else { - - // ( types, data, fn ) - fn = data; - data = selector; - selector = undefined; - } - } - if ( fn === false ) { - fn = returnFalse; - } else if ( !fn ) { - return elem; - } - - if ( one === 1 ) { - origFn = fn; - fn = function( event ) { - - // Can use an empty set, since event contains the info - jQuery().off( event ); - return origFn.apply( this, arguments ); - }; - - // Use same guid so caller can remove using origFn - fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); - } - return elem.each( function() { - jQuery.event.add( this, types, fn, data, selector ); - } ); -} - -/* - * Helper functions for managing events -- not part of the public interface. - * Props to Dean Edwards' addEvent library for many of the ideas. - */ -jQuery.event = { - - global: {}, - - add: function( elem, types, handler, data, selector ) { - - var handleObjIn, eventHandle, tmp, - events, t, handleObj, - special, handlers, type, namespaces, origType, - elemData = dataPriv.get( elem ); - - // Only attach events to objects that accept data - if ( !acceptData( elem ) ) { - return; - } - - // Caller can pass in an object of custom data in lieu of the handler - if ( handler.handler ) { - handleObjIn = handler; - handler = handleObjIn.handler; - selector = handleObjIn.selector; - } - - // Ensure that invalid selectors throw exceptions at attach time - // Evaluate against documentElement in case elem is a non-element node (e.g., document) - if ( selector ) { - jQuery.find.matchesSelector( documentElement, selector ); - } - - // Make sure that the handler has a unique ID, used to find/remove it later - if ( !handler.guid ) { - handler.guid = jQuery.guid++; - } - - // Init the element's event structure and main handler, if this is the first - if ( !( events = elemData.events ) ) { - events = elemData.events = Object.create( null ); - } - if ( !( eventHandle = elemData.handle ) ) { - eventHandle = elemData.handle = function( e ) { - - // Discard the second event of a jQuery.event.trigger() and - // when an event is called after a page has unloaded - return typeof jQuery !== "undefined" && jQuery.event.triggered !== e.type ? - jQuery.event.dispatch.apply( elem, arguments ) : undefined; - }; - } - - // Handle multiple events separated by a space - types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; - t = types.length; - while ( t-- ) { - tmp = rtypenamespace.exec( types[ t ] ) || []; - type = origType = tmp[ 1 ]; - namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); - - // There *must* be a type, no attaching namespace-only handlers - if ( !type ) { - continue; - } - - // If event changes its type, use the special event handlers for the changed type - special = jQuery.event.special[ type ] || {}; - - // If selector defined, determine special event api type, otherwise given type - type = ( selector ? special.delegateType : special.bindType ) || type; - - // Update special based on newly reset type - special = jQuery.event.special[ type ] || {}; - - // handleObj is passed to all event handlers - handleObj = jQuery.extend( { - type: type, - origType: origType, - data: data, - handler: handler, - guid: handler.guid, - selector: selector, - needsContext: selector && jQuery.expr.match.needsContext.test( selector ), - namespace: namespaces.join( "." ) - }, handleObjIn ); - - // Init the event handler queue if we're the first - if ( !( handlers = events[ type ] ) ) { - handlers = events[ type ] = []; - handlers.delegateCount = 0; - - // Only use addEventListener if the special events handler returns false - if ( !special.setup || - special.setup.call( elem, data, namespaces, eventHandle ) === false ) { - - if ( elem.addEventListener ) { - elem.addEventListener( type, eventHandle ); - } - } - } - - if ( special.add ) { - special.add.call( elem, handleObj ); - - if ( !handleObj.handler.guid ) { - handleObj.handler.guid = handler.guid; - } - } - - // Add to the element's handler list, delegates in front - if ( selector ) { - handlers.splice( handlers.delegateCount++, 0, handleObj ); - } else { - handlers.push( handleObj ); - } - - // Keep track of which events have ever been used, for event optimization - jQuery.event.global[ type ] = true; - } - - }, - - // Detach an event or set of events from an element - remove: function( elem, types, handler, selector, mappedTypes ) { - - var j, origCount, tmp, - events, t, handleObj, - special, handlers, type, namespaces, origType, - elemData = dataPriv.hasData( elem ) && dataPriv.get( elem ); - - if ( !elemData || !( events = elemData.events ) ) { - return; - } - - // Once for each type.namespace in types; type may be omitted - types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; - t = types.length; - while ( t-- ) { - tmp = rtypenamespace.exec( types[ t ] ) || []; - type = origType = tmp[ 1 ]; - namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); - - // Unbind all events (on this namespace, if provided) for the element - if ( !type ) { - for ( type in events ) { - jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); - } - continue; - } - - special = jQuery.event.special[ type ] || {}; - type = ( selector ? special.delegateType : special.bindType ) || type; - handlers = events[ type ] || []; - tmp = tmp[ 2 ] && - new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ); - - // Remove matching events - origCount = j = handlers.length; - while ( j-- ) { - handleObj = handlers[ j ]; - - if ( ( mappedTypes || origType === handleObj.origType ) && - ( !handler || handler.guid === handleObj.guid ) && - ( !tmp || tmp.test( handleObj.namespace ) ) && - ( !selector || selector === handleObj.selector || - selector === "**" && handleObj.selector ) ) { - handlers.splice( j, 1 ); - - if ( handleObj.selector ) { - handlers.delegateCount--; - } - if ( special.remove ) { - special.remove.call( elem, handleObj ); - } - } - } - - // Remove generic event handler if we removed something and no more handlers exist - // (avoids potential for endless recursion during removal of special event handlers) - if ( origCount && !handlers.length ) { - if ( !special.teardown || - special.teardown.call( elem, namespaces, elemData.handle ) === false ) { - - jQuery.removeEvent( elem, type, elemData.handle ); - } - - delete events[ type ]; - } - } - - // Remove data and the expando if it's no longer used - if ( jQuery.isEmptyObject( events ) ) { - dataPriv.remove( elem, "handle events" ); - } - }, - - dispatch: function( nativeEvent ) { - - var i, j, ret, matched, handleObj, handlerQueue, - args = new Array( arguments.length ), - - // Make a writable jQuery.Event from the native event object - event = jQuery.event.fix( nativeEvent ), - - handlers = ( - dataPriv.get( this, "events" ) || Object.create( null ) - )[ event.type ] || [], - special = jQuery.event.special[ event.type ] || {}; - - // Use the fix-ed jQuery.Event rather than the (read-only) native event - args[ 0 ] = event; - - for ( i = 1; i < arguments.length; i++ ) { - args[ i ] = arguments[ i ]; - } - - event.delegateTarget = this; - - // Call the preDispatch hook for the mapped type, and let it bail if desired - if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { - return; - } - - // Determine handlers - handlerQueue = jQuery.event.handlers.call( this, event, handlers ); - - // Run delegates first; they may want to stop propagation beneath us - i = 0; - while ( ( matched = handlerQueue[ i++ ] ) && !event.isPropagationStopped() ) { - event.currentTarget = matched.elem; - - j = 0; - while ( ( handleObj = matched.handlers[ j++ ] ) && - !event.isImmediatePropagationStopped() ) { - - // If the event is namespaced, then each handler is only invoked if it is - // specially universal or its namespaces are a superset of the event's. - if ( !event.rnamespace || handleObj.namespace === false || - event.rnamespace.test( handleObj.namespace ) ) { - - event.handleObj = handleObj; - event.data = handleObj.data; - - ret = ( ( jQuery.event.special[ handleObj.origType ] || {} ).handle || - handleObj.handler ).apply( matched.elem, args ); - - if ( ret !== undefined ) { - if ( ( event.result = ret ) === false ) { - event.preventDefault(); - event.stopPropagation(); - } - } - } - } - } - - // Call the postDispatch hook for the mapped type - if ( special.postDispatch ) { - special.postDispatch.call( this, event ); - } - - return event.result; - }, - - handlers: function( event, handlers ) { - var i, handleObj, sel, matchedHandlers, matchedSelectors, - handlerQueue = [], - delegateCount = handlers.delegateCount, - cur = event.target; - - // Find delegate handlers - if ( delegateCount && - - // Support: IE <=9 - // Black-hole SVG instance trees (trac-13180) - cur.nodeType && - - // Support: Firefox <=42 - // Suppress spec-violating clicks indicating a non-primary pointer button (trac-3861) - // https://www.w3.org/TR/DOM-Level-3-Events/#event-type-click - // Support: IE 11 only - // ...but not arrow key "clicks" of radio inputs, which can have `button` -1 (gh-2343) - !( event.type === "click" && event.button >= 1 ) ) { - - for ( ; cur !== this; cur = cur.parentNode || this ) { - - // Don't check non-elements (#13208) - // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764) - if ( cur.nodeType === 1 && !( event.type === "click" && cur.disabled === true ) ) { - matchedHandlers = []; - matchedSelectors = {}; - for ( i = 0; i < delegateCount; i++ ) { - handleObj = handlers[ i ]; - - // Don't conflict with Object.prototype properties (#13203) - sel = handleObj.selector + " "; - - if ( matchedSelectors[ sel ] === undefined ) { - matchedSelectors[ sel ] = handleObj.needsContext ? - jQuery( sel, this ).index( cur ) > -1 : - jQuery.find( sel, this, null, [ cur ] ).length; - } - if ( matchedSelectors[ sel ] ) { - matchedHandlers.push( handleObj ); - } - } - if ( matchedHandlers.length ) { - handlerQueue.push( { elem: cur, handlers: matchedHandlers } ); - } - } - } - } - - // Add the remaining (directly-bound) handlers - cur = this; - if ( delegateCount < handlers.length ) { - handlerQueue.push( { elem: cur, handlers: handlers.slice( delegateCount ) } ); - } - - return handlerQueue; - }, - - addProp: function( name, hook ) { - Object.defineProperty( jQuery.Event.prototype, name, { - enumerable: true, - configurable: true, - - get: isFunction( hook ) ? - function() { - if ( this.originalEvent ) { - return hook( this.originalEvent ); - } - } : - function() { - if ( this.originalEvent ) { - return this.originalEvent[ name ]; - } - }, - - set: function( value ) { - Object.defineProperty( this, name, { - enumerable: true, - configurable: true, - writable: true, - value: value - } ); - } - } ); - }, - - fix: function( originalEvent ) { - return originalEvent[ jQuery.expando ] ? - originalEvent : - new jQuery.Event( originalEvent ); - }, - - special: { - load: { - - // Prevent triggered image.load events from bubbling to window.load - noBubble: true - }, - click: { - - // Utilize native event to ensure correct state for checkable inputs - setup: function( data ) { - - // For mutual compressibility with _default, replace `this` access with a local var. - // `|| data` is dead code meant only to preserve the variable through minification. - var el = this || data; - - // Claim the first handler - if ( rcheckableType.test( el.type ) && - el.click && nodeName( el, "input" ) ) { - - // dataPriv.set( el, "click", ... ) - leverageNative( el, "click", returnTrue ); - } - - // Return false to allow normal processing in the caller - return false; - }, - trigger: function( data ) { - - // For mutual compressibility with _default, replace `this` access with a local var. - // `|| data` is dead code meant only to preserve the variable through minification. - var el = this || data; - - // Force setup before triggering a click - if ( rcheckableType.test( el.type ) && - el.click && nodeName( el, "input" ) ) { - - leverageNative( el, "click" ); - } - - // Return non-false to allow normal event-path propagation - return true; - }, - - // For cross-browser consistency, suppress native .click() on links - // Also prevent it if we're currently inside a leveraged native-event stack - _default: function( event ) { - var target = event.target; - return rcheckableType.test( target.type ) && - target.click && nodeName( target, "input" ) && - dataPriv.get( target, "click" ) || - nodeName( target, "a" ); - } - }, - - beforeunload: { - postDispatch: function( event ) { - - // Support: Firefox 20+ - // Firefox doesn't alert if the returnValue field is not set. - if ( event.result !== undefined && event.originalEvent ) { - event.originalEvent.returnValue = event.result; - } - } - } - } -}; - -// Ensure the presence of an event listener that handles manually-triggered -// synthetic events by interrupting progress until reinvoked in response to -// *native* events that it fires directly, ensuring that state changes have -// already occurred before other listeners are invoked. -function leverageNative( el, type, expectSync ) { - - // Missing expectSync indicates a trigger call, which must force setup through jQuery.event.add - if ( !expectSync ) { - if ( dataPriv.get( el, type ) === undefined ) { - jQuery.event.add( el, type, returnTrue ); - } - return; - } - - // Register the controller as a special universal handler for all event namespaces - dataPriv.set( el, type, false ); - jQuery.event.add( el, type, { - namespace: false, - handler: function( event ) { - var notAsync, result, - saved = dataPriv.get( this, type ); - - if ( ( event.isTrigger & 1 ) && this[ type ] ) { - - // Interrupt processing of the outer synthetic .trigger()ed event - // Saved data should be false in such cases, but might be a leftover capture object - // from an async native handler (gh-4350) - if ( !saved.length ) { - - // Store arguments for use when handling the inner native event - // There will always be at least one argument (an event object), so this array - // will not be confused with a leftover capture object. - saved = slice.call( arguments ); - dataPriv.set( this, type, saved ); - - // Trigger the native event and capture its result - // Support: IE <=9 - 11+ - // focus() and blur() are asynchronous - notAsync = expectSync( this, type ); - this[ type ](); - result = dataPriv.get( this, type ); - if ( saved !== result || notAsync ) { - dataPriv.set( this, type, false ); - } else { - result = {}; - } - if ( saved !== result ) { - - // Cancel the outer synthetic event - event.stopImmediatePropagation(); - event.preventDefault(); - - // Support: Chrome 86+ - // In Chrome, if an element having a focusout handler is blurred by - // clicking outside of it, it invokes the handler synchronously. If - // that handler calls `.remove()` on the element, the data is cleared, - // leaving `result` undefined. We need to guard against this. - return result && result.value; - } - - // If this is an inner synthetic event for an event with a bubbling surrogate - // (focus or blur), assume that the surrogate already propagated from triggering the - // native event and prevent that from happening again here. - // This technically gets the ordering wrong w.r.t. to `.trigger()` (in which the - // bubbling surrogate propagates *after* the non-bubbling base), but that seems - // less bad than duplication. - } else if ( ( jQuery.event.special[ type ] || {} ).delegateType ) { - event.stopPropagation(); - } - - // If this is a native event triggered above, everything is now in order - // Fire an inner synthetic event with the original arguments - } else if ( saved.length ) { - - // ...and capture the result - dataPriv.set( this, type, { - value: jQuery.event.trigger( - - // Support: IE <=9 - 11+ - // Extend with the prototype to reset the above stopImmediatePropagation() - jQuery.extend( saved[ 0 ], jQuery.Event.prototype ), - saved.slice( 1 ), - this - ) - } ); - - // Abort handling of the native event - event.stopImmediatePropagation(); - } - } - } ); -} - -jQuery.removeEvent = function( elem, type, handle ) { - - // This "if" is needed for plain objects - if ( elem.removeEventListener ) { - elem.removeEventListener( type, handle ); - } -}; - -jQuery.Event = function( src, props ) { - - // Allow instantiation without the 'new' keyword - if ( !( this instanceof jQuery.Event ) ) { - return new jQuery.Event( src, props ); - } - - // Event object - if ( src && src.type ) { - this.originalEvent = src; - this.type = src.type; - - // Events bubbling up the document may have been marked as prevented - // by a handler lower down the tree; reflect the correct value. - this.isDefaultPrevented = src.defaultPrevented || - src.defaultPrevented === undefined && - - // Support: Android <=2.3 only - src.returnValue === false ? - returnTrue : - returnFalse; - - // Create target properties - // Support: Safari <=6 - 7 only - // Target should not be a text node (#504, #13143) - this.target = ( src.target && src.target.nodeType === 3 ) ? - src.target.parentNode : - src.target; - - this.currentTarget = src.currentTarget; - this.relatedTarget = src.relatedTarget; - - // Event type - } else { - this.type = src; - } - - // Put explicitly provided properties onto the event object - if ( props ) { - jQuery.extend( this, props ); - } - - // Create a timestamp if incoming event doesn't have one - this.timeStamp = src && src.timeStamp || Date.now(); - - // Mark it as fixed - this[ jQuery.expando ] = true; -}; - -// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding -// https://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html -jQuery.Event.prototype = { - constructor: jQuery.Event, - isDefaultPrevented: returnFalse, - isPropagationStopped: returnFalse, - isImmediatePropagationStopped: returnFalse, - isSimulated: false, - - preventDefault: function() { - var e = this.originalEvent; - - this.isDefaultPrevented = returnTrue; - - if ( e && !this.isSimulated ) { - e.preventDefault(); - } - }, - stopPropagation: function() { - var e = this.originalEvent; - - this.isPropagationStopped = returnTrue; - - if ( e && !this.isSimulated ) { - e.stopPropagation(); - } - }, - stopImmediatePropagation: function() { - var e = this.originalEvent; - - this.isImmediatePropagationStopped = returnTrue; - - if ( e && !this.isSimulated ) { - e.stopImmediatePropagation(); - } - - this.stopPropagation(); - } -}; - -// Includes all common event props including KeyEvent and MouseEvent specific props -jQuery.each( { - altKey: true, - bubbles: true, - cancelable: true, - changedTouches: true, - ctrlKey: true, - detail: true, - eventPhase: true, - metaKey: true, - pageX: true, - pageY: true, - shiftKey: true, - view: true, - "char": true, - code: true, - charCode: true, - key: true, - keyCode: true, - button: true, - buttons: true, - clientX: true, - clientY: true, - offsetX: true, - offsetY: true, - pointerId: true, - pointerType: true, - screenX: true, - screenY: true, - targetTouches: true, - toElement: true, - touches: true, - which: true -}, jQuery.event.addProp ); - -jQuery.each( { focus: "focusin", blur: "focusout" }, function( type, delegateType ) { - jQuery.event.special[ type ] = { - - // Utilize native event if possible so blur/focus sequence is correct - setup: function() { - - // Claim the first handler - // dataPriv.set( this, "focus", ... ) - // dataPriv.set( this, "blur", ... ) - leverageNative( this, type, expectSync ); - - // Return false to allow normal processing in the caller - return false; - }, - trigger: function() { - - // Force setup before trigger - leverageNative( this, type ); - - // Return non-false to allow normal event-path propagation - return true; - }, - - // Suppress native focus or blur as it's already being fired - // in leverageNative. - _default: function() { - return true; - }, - - delegateType: delegateType - }; -} ); - -// Create mouseenter/leave events using mouseover/out and event-time checks -// so that event delegation works in jQuery. -// Do the same for pointerenter/pointerleave and pointerover/pointerout -// -// Support: Safari 7 only -// Safari sends mouseenter too often; see: -// https://bugs.chromium.org/p/chromium/issues/detail?id=470258 -// for the description of the bug (it existed in older Chrome versions as well). -jQuery.each( { - mouseenter: "mouseover", - mouseleave: "mouseout", - pointerenter: "pointerover", - pointerleave: "pointerout" -}, function( orig, fix ) { - jQuery.event.special[ orig ] = { - delegateType: fix, - bindType: fix, - - handle: function( event ) { - var ret, - target = this, - related = event.relatedTarget, - handleObj = event.handleObj; - - // For mouseenter/leave call the handler if related is outside the target. - // NB: No relatedTarget if the mouse left/entered the browser window - if ( !related || ( related !== target && !jQuery.contains( target, related ) ) ) { - event.type = handleObj.origType; - ret = handleObj.handler.apply( this, arguments ); - event.type = fix; - } - return ret; - } - }; -} ); - -jQuery.fn.extend( { - - on: function( types, selector, data, fn ) { - return on( this, types, selector, data, fn ); - }, - one: function( types, selector, data, fn ) { - return on( this, types, selector, data, fn, 1 ); - }, - off: function( types, selector, fn ) { - var handleObj, type; - if ( types && types.preventDefault && types.handleObj ) { - - // ( event ) dispatched jQuery.Event - handleObj = types.handleObj; - jQuery( types.delegateTarget ).off( - handleObj.namespace ? - handleObj.origType + "." + handleObj.namespace : - handleObj.origType, - handleObj.selector, - handleObj.handler - ); - return this; - } - if ( typeof types === "object" ) { - - // ( types-object [, selector] ) - for ( type in types ) { - this.off( type, selector, types[ type ] ); - } - return this; - } - if ( selector === false || typeof selector === "function" ) { - - // ( types [, fn] ) - fn = selector; - selector = undefined; - } - if ( fn === false ) { - fn = returnFalse; - } - return this.each( function() { - jQuery.event.remove( this, types, fn, selector ); - } ); - } -} ); - - -var - - // Support: IE <=10 - 11, Edge 12 - 13 only - // In IE/Edge using regex groups here causes severe slowdowns. - // See https://connect.microsoft.com/IE/feedback/details/1736512/ - rnoInnerhtml = /\s*$/g; - -// Prefer a tbody over its parent table for containing new rows -function manipulationTarget( elem, content ) { - if ( nodeName( elem, "table" ) && - nodeName( content.nodeType !== 11 ? content : content.firstChild, "tr" ) ) { - - return jQuery( elem ).children( "tbody" )[ 0 ] || elem; - } - - return elem; -} - -// Replace/restore the type attribute of script elements for safe DOM manipulation -function disableScript( elem ) { - elem.type = ( elem.getAttribute( "type" ) !== null ) + "/" + elem.type; - return elem; -} -function restoreScript( elem ) { - if ( ( elem.type || "" ).slice( 0, 5 ) === "true/" ) { - elem.type = elem.type.slice( 5 ); - } else { - elem.removeAttribute( "type" ); - } - - return elem; -} - -function cloneCopyEvent( src, dest ) { - var i, l, type, pdataOld, udataOld, udataCur, events; - - if ( dest.nodeType !== 1 ) { - return; - } - - // 1. Copy private data: events, handlers, etc. - if ( dataPriv.hasData( src ) ) { - pdataOld = dataPriv.get( src ); - events = pdataOld.events; - - if ( events ) { - dataPriv.remove( dest, "handle events" ); - - for ( type in events ) { - for ( i = 0, l = events[ type ].length; i < l; i++ ) { - jQuery.event.add( dest, type, events[ type ][ i ] ); - } - } - } - } - - // 2. Copy user data - if ( dataUser.hasData( src ) ) { - udataOld = dataUser.access( src ); - udataCur = jQuery.extend( {}, udataOld ); - - dataUser.set( dest, udataCur ); - } -} - -// Fix IE bugs, see support tests -function fixInput( src, dest ) { - var nodeName = dest.nodeName.toLowerCase(); - - // Fails to persist the checked state of a cloned checkbox or radio button. - if ( nodeName === "input" && rcheckableType.test( src.type ) ) { - dest.checked = src.checked; - - // Fails to return the selected option to the default selected state when cloning options - } else if ( nodeName === "input" || nodeName === "textarea" ) { - dest.defaultValue = src.defaultValue; - } -} - -function domManip( collection, args, callback, ignored ) { - - // Flatten any nested arrays - args = flat( args ); - - var fragment, first, scripts, hasScripts, node, doc, - i = 0, - l = collection.length, - iNoClone = l - 1, - value = args[ 0 ], - valueIsFunction = isFunction( value ); - - // We can't cloneNode fragments that contain checked, in WebKit - if ( valueIsFunction || - ( l > 1 && typeof value === "string" && - !support.checkClone && rchecked.test( value ) ) ) { - return collection.each( function( index ) { - var self = collection.eq( index ); - if ( valueIsFunction ) { - args[ 0 ] = value.call( this, index, self.html() ); - } - domManip( self, args, callback, ignored ); - } ); - } - - if ( l ) { - fragment = buildFragment( args, collection[ 0 ].ownerDocument, false, collection, ignored ); - first = fragment.firstChild; - - if ( fragment.childNodes.length === 1 ) { - fragment = first; - } - - // Require either new content or an interest in ignored elements to invoke the callback - if ( first || ignored ) { - scripts = jQuery.map( getAll( fragment, "script" ), disableScript ); - hasScripts = scripts.length; - - // Use the original fragment for the last item - // instead of the first because it can end up - // being emptied incorrectly in certain situations (#8070). - for ( ; i < l; i++ ) { - node = fragment; - - if ( i !== iNoClone ) { - node = jQuery.clone( node, true, true ); - - // Keep references to cloned scripts for later restoration - if ( hasScripts ) { - - // Support: Android <=4.0 only, PhantomJS 1 only - // push.apply(_, arraylike) throws on ancient WebKit - jQuery.merge( scripts, getAll( node, "script" ) ); - } - } - - callback.call( collection[ i ], node, i ); - } - - if ( hasScripts ) { - doc = scripts[ scripts.length - 1 ].ownerDocument; - - // Reenable scripts - jQuery.map( scripts, restoreScript ); - - // Evaluate executable scripts on first document insertion - for ( i = 0; i < hasScripts; i++ ) { - node = scripts[ i ]; - if ( rscriptType.test( node.type || "" ) && - !dataPriv.access( node, "globalEval" ) && - jQuery.contains( doc, node ) ) { - - if ( node.src && ( node.type || "" ).toLowerCase() !== "module" ) { - - // Optional AJAX dependency, but won't run scripts if not present - if ( jQuery._evalUrl && !node.noModule ) { - jQuery._evalUrl( node.src, { - nonce: node.nonce || node.getAttribute( "nonce" ) - }, doc ); - } - } else { - DOMEval( node.textContent.replace( rcleanScript, "" ), node, doc ); - } - } - } - } - } - } - - return collection; -} - -function remove( elem, selector, keepData ) { - var node, - nodes = selector ? jQuery.filter( selector, elem ) : elem, - i = 0; - - for ( ; ( node = nodes[ i ] ) != null; i++ ) { - if ( !keepData && node.nodeType === 1 ) { - jQuery.cleanData( getAll( node ) ); - } - - if ( node.parentNode ) { - if ( keepData && isAttached( node ) ) { - setGlobalEval( getAll( node, "script" ) ); - } - node.parentNode.removeChild( node ); - } - } - - return elem; -} - -jQuery.extend( { - htmlPrefilter: function( html ) { - return html; - }, - - clone: function( elem, dataAndEvents, deepDataAndEvents ) { - var i, l, srcElements, destElements, - clone = elem.cloneNode( true ), - inPage = isAttached( elem ); - - // Fix IE cloning issues - if ( !support.noCloneChecked && ( elem.nodeType === 1 || elem.nodeType === 11 ) && - !jQuery.isXMLDoc( elem ) ) { - - // We eschew Sizzle here for performance reasons: https://jsperf.com/getall-vs-sizzle/2 - destElements = getAll( clone ); - srcElements = getAll( elem ); - - for ( i = 0, l = srcElements.length; i < l; i++ ) { - fixInput( srcElements[ i ], destElements[ i ] ); - } - } - - // Copy the events from the original to the clone - if ( dataAndEvents ) { - if ( deepDataAndEvents ) { - srcElements = srcElements || getAll( elem ); - destElements = destElements || getAll( clone ); - - for ( i = 0, l = srcElements.length; i < l; i++ ) { - cloneCopyEvent( srcElements[ i ], destElements[ i ] ); - } - } else { - cloneCopyEvent( elem, clone ); - } - } - - // Preserve script evaluation history - destElements = getAll( clone, "script" ); - if ( destElements.length > 0 ) { - setGlobalEval( destElements, !inPage && getAll( elem, "script" ) ); - } - - // Return the cloned set - return clone; - }, - - cleanData: function( elems ) { - var data, elem, type, - special = jQuery.event.special, - i = 0; - - for ( ; ( elem = elems[ i ] ) !== undefined; i++ ) { - if ( acceptData( elem ) ) { - if ( ( data = elem[ dataPriv.expando ] ) ) { - if ( data.events ) { - for ( type in data.events ) { - if ( special[ type ] ) { - jQuery.event.remove( elem, type ); - - // This is a shortcut to avoid jQuery.event.remove's overhead - } else { - jQuery.removeEvent( elem, type, data.handle ); - } - } - } - - // Support: Chrome <=35 - 45+ - // Assign undefined instead of using delete, see Data#remove - elem[ dataPriv.expando ] = undefined; - } - if ( elem[ dataUser.expando ] ) { - - // Support: Chrome <=35 - 45+ - // Assign undefined instead of using delete, see Data#remove - elem[ dataUser.expando ] = undefined; - } - } - } - } -} ); - -jQuery.fn.extend( { - detach: function( selector ) { - return remove( this, selector, true ); - }, - - remove: function( selector ) { - return remove( this, selector ); - }, - - text: function( value ) { - return access( this, function( value ) { - return value === undefined ? - jQuery.text( this ) : - this.empty().each( function() { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - this.textContent = value; - } - } ); - }, null, value, arguments.length ); - }, - - append: function() { - return domManip( this, arguments, function( elem ) { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - var target = manipulationTarget( this, elem ); - target.appendChild( elem ); - } - } ); - }, - - prepend: function() { - return domManip( this, arguments, function( elem ) { - if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { - var target = manipulationTarget( this, elem ); - target.insertBefore( elem, target.firstChild ); - } - } ); - }, - - before: function() { - return domManip( this, arguments, function( elem ) { - if ( this.parentNode ) { - this.parentNode.insertBefore( elem, this ); - } - } ); - }, - - after: function() { - return domManip( this, arguments, function( elem ) { - if ( this.parentNode ) { - this.parentNode.insertBefore( elem, this.nextSibling ); - } - } ); - }, - - empty: function() { - var elem, - i = 0; - - for ( ; ( elem = this[ i ] ) != null; i++ ) { - if ( elem.nodeType === 1 ) { - - // Prevent memory leaks - jQuery.cleanData( getAll( elem, false ) ); - - // Remove any remaining nodes - elem.textContent = ""; - } - } - - return this; - }, - - clone: function( dataAndEvents, deepDataAndEvents ) { - dataAndEvents = dataAndEvents == null ? false : dataAndEvents; - deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents; - - return this.map( function() { - return jQuery.clone( this, dataAndEvents, deepDataAndEvents ); - } ); - }, - - html: function( value ) { - return access( this, function( value ) { - var elem = this[ 0 ] || {}, - i = 0, - l = this.length; - - if ( value === undefined && elem.nodeType === 1 ) { - return elem.innerHTML; - } - - // See if we can take a shortcut and just use innerHTML - if ( typeof value === "string" && !rnoInnerhtml.test( value ) && - !wrapMap[ ( rtagName.exec( value ) || [ "", "" ] )[ 1 ].toLowerCase() ] ) { - - value = jQuery.htmlPrefilter( value ); - - try { - for ( ; i < l; i++ ) { - elem = this[ i ] || {}; - - // Remove element nodes and prevent memory leaks - if ( elem.nodeType === 1 ) { - jQuery.cleanData( getAll( elem, false ) ); - elem.innerHTML = value; - } - } - - elem = 0; - - // If using innerHTML throws an exception, use the fallback method - } catch ( e ) {} - } - - if ( elem ) { - this.empty().append( value ); - } - }, null, value, arguments.length ); - }, - - replaceWith: function() { - var ignored = []; - - // Make the changes, replacing each non-ignored context element with the new content - return domManip( this, arguments, function( elem ) { - var parent = this.parentNode; - - if ( jQuery.inArray( this, ignored ) < 0 ) { - jQuery.cleanData( getAll( this ) ); - if ( parent ) { - parent.replaceChild( elem, this ); - } - } - - // Force callback invocation - }, ignored ); - } -} ); - -jQuery.each( { - appendTo: "append", - prependTo: "prepend", - insertBefore: "before", - insertAfter: "after", - replaceAll: "replaceWith" -}, function( name, original ) { - jQuery.fn[ name ] = function( selector ) { - var elems, - ret = [], - insert = jQuery( selector ), - last = insert.length - 1, - i = 0; - - for ( ; i <= last; i++ ) { - elems = i === last ? this : this.clone( true ); - jQuery( insert[ i ] )[ original ]( elems ); - - // Support: Android <=4.0 only, PhantomJS 1 only - // .get() because push.apply(_, arraylike) throws on ancient WebKit - push.apply( ret, elems.get() ); - } - - return this.pushStack( ret ); - }; -} ); -var rnumnonpx = new RegExp( "^(" + pnum + ")(?!px)[a-z%]+$", "i" ); - -var getStyles = function( elem ) { - - // Support: IE <=11 only, Firefox <=30 (#15098, #14150) - // IE throws on elements created in popups - // FF meanwhile throws on frame elements through "defaultView.getComputedStyle" - var view = elem.ownerDocument.defaultView; - - if ( !view || !view.opener ) { - view = window; - } - - return view.getComputedStyle( elem ); - }; - -var swap = function( elem, options, callback ) { - var ret, name, - old = {}; - - // Remember the old values, and insert the new ones - for ( name in options ) { - old[ name ] = elem.style[ name ]; - elem.style[ name ] = options[ name ]; - } - - ret = callback.call( elem ); - - // Revert the old values - for ( name in options ) { - elem.style[ name ] = old[ name ]; - } - - return ret; -}; - - -var rboxStyle = new RegExp( cssExpand.join( "|" ), "i" ); - - - -( function() { - - // Executing both pixelPosition & boxSizingReliable tests require only one layout - // so they're executed at the same time to save the second computation. - function computeStyleTests() { - - // This is a singleton, we need to execute it only once - if ( !div ) { - return; - } - - container.style.cssText = "position:absolute;left:-11111px;width:60px;" + - "margin-top:1px;padding:0;border:0"; - div.style.cssText = - "position:relative;display:block;box-sizing:border-box;overflow:scroll;" + - "margin:auto;border:1px;padding:1px;" + - "width:60%;top:1%"; - documentElement.appendChild( container ).appendChild( div ); - - var divStyle = window.getComputedStyle( div ); - pixelPositionVal = divStyle.top !== "1%"; - - // Support: Android 4.0 - 4.3 only, Firefox <=3 - 44 - reliableMarginLeftVal = roundPixelMeasures( divStyle.marginLeft ) === 12; - - // Support: Android 4.0 - 4.3 only, Safari <=9.1 - 10.1, iOS <=7.0 - 9.3 - // Some styles come back with percentage values, even though they shouldn't - div.style.right = "60%"; - pixelBoxStylesVal = roundPixelMeasures( divStyle.right ) === 36; - - // Support: IE 9 - 11 only - // Detect misreporting of content dimensions for box-sizing:border-box elements - boxSizingReliableVal = roundPixelMeasures( divStyle.width ) === 36; - - // Support: IE 9 only - // Detect overflow:scroll screwiness (gh-3699) - // Support: Chrome <=64 - // Don't get tricked when zoom affects offsetWidth (gh-4029) - div.style.position = "absolute"; - scrollboxSizeVal = roundPixelMeasures( div.offsetWidth / 3 ) === 12; - - documentElement.removeChild( container ); - - // Nullify the div so it wouldn't be stored in the memory and - // it will also be a sign that checks already performed - div = null; - } - - function roundPixelMeasures( measure ) { - return Math.round( parseFloat( measure ) ); - } - - var pixelPositionVal, boxSizingReliableVal, scrollboxSizeVal, pixelBoxStylesVal, - reliableTrDimensionsVal, reliableMarginLeftVal, - container = document.createElement( "div" ), - div = document.createElement( "div" ); - - // Finish early in limited (non-browser) environments - if ( !div.style ) { - return; - } - - // Support: IE <=9 - 11 only - // Style of cloned element affects source element cloned (#8908) - div.style.backgroundClip = "content-box"; - div.cloneNode( true ).style.backgroundClip = ""; - support.clearCloneStyle = div.style.backgroundClip === "content-box"; - - jQuery.extend( support, { - boxSizingReliable: function() { - computeStyleTests(); - return boxSizingReliableVal; - }, - pixelBoxStyles: function() { - computeStyleTests(); - return pixelBoxStylesVal; - }, - pixelPosition: function() { - computeStyleTests(); - return pixelPositionVal; - }, - reliableMarginLeft: function() { - computeStyleTests(); - return reliableMarginLeftVal; - }, - scrollboxSize: function() { - computeStyleTests(); - return scrollboxSizeVal; - }, - - // Support: IE 9 - 11+, Edge 15 - 18+ - // IE/Edge misreport `getComputedStyle` of table rows with width/height - // set in CSS while `offset*` properties report correct values. - // Behavior in IE 9 is more subtle than in newer versions & it passes - // some versions of this test; make sure not to make it pass there! - // - // Support: Firefox 70+ - // Only Firefox includes border widths - // in computed dimensions. (gh-4529) - reliableTrDimensions: function() { - var table, tr, trChild, trStyle; - if ( reliableTrDimensionsVal == null ) { - table = document.createElement( "table" ); - tr = document.createElement( "tr" ); - trChild = document.createElement( "div" ); - - table.style.cssText = "position:absolute;left:-11111px;border-collapse:separate"; - tr.style.cssText = "border:1px solid"; - - // Support: Chrome 86+ - // Height set through cssText does not get applied. - // Computed height then comes back as 0. - tr.style.height = "1px"; - trChild.style.height = "9px"; - - // Support: Android 8 Chrome 86+ - // In our bodyBackground.html iframe, - // display for all div elements is set to "inline", - // which causes a problem only in Android 8 Chrome 86. - // Ensuring the div is display: block - // gets around this issue. - trChild.style.display = "block"; - - documentElement - .appendChild( table ) - .appendChild( tr ) - .appendChild( trChild ); - - trStyle = window.getComputedStyle( tr ); - reliableTrDimensionsVal = ( parseInt( trStyle.height, 10 ) + - parseInt( trStyle.borderTopWidth, 10 ) + - parseInt( trStyle.borderBottomWidth, 10 ) ) === tr.offsetHeight; - - documentElement.removeChild( table ); - } - return reliableTrDimensionsVal; - } - } ); -} )(); - - -function curCSS( elem, name, computed ) { - var width, minWidth, maxWidth, ret, - - // Support: Firefox 51+ - // Retrieving style before computed somehow - // fixes an issue with getting wrong values - // on detached elements - style = elem.style; - - computed = computed || getStyles( elem ); - - // getPropertyValue is needed for: - // .css('filter') (IE 9 only, #12537) - // .css('--customProperty) (#3144) - if ( computed ) { - ret = computed.getPropertyValue( name ) || computed[ name ]; - - if ( ret === "" && !isAttached( elem ) ) { - ret = jQuery.style( elem, name ); - } - - // A tribute to the "awesome hack by Dean Edwards" - // Android Browser returns percentage for some values, - // but width seems to be reliably pixels. - // This is against the CSSOM draft spec: - // https://drafts.csswg.org/cssom/#resolved-values - if ( !support.pixelBoxStyles() && rnumnonpx.test( ret ) && rboxStyle.test( name ) ) { - - // Remember the original values - width = style.width; - minWidth = style.minWidth; - maxWidth = style.maxWidth; - - // Put in the new values to get a computed value out - style.minWidth = style.maxWidth = style.width = ret; - ret = computed.width; - - // Revert the changed values - style.width = width; - style.minWidth = minWidth; - style.maxWidth = maxWidth; - } - } - - return ret !== undefined ? - - // Support: IE <=9 - 11 only - // IE returns zIndex value as an integer. - ret + "" : - ret; -} - - -function addGetHookIf( conditionFn, hookFn ) { - - // Define the hook, we'll check on the first run if it's really needed. - return { - get: function() { - if ( conditionFn() ) { - - // Hook not needed (or it's not possible to use it due - // to missing dependency), remove it. - delete this.get; - return; - } - - // Hook needed; redefine it so that the support test is not executed again. - return ( this.get = hookFn ).apply( this, arguments ); - } - }; -} - - -var cssPrefixes = [ "Webkit", "Moz", "ms" ], - emptyStyle = document.createElement( "div" ).style, - vendorProps = {}; - -// Return a vendor-prefixed property or undefined -function vendorPropName( name ) { - - // Check for vendor prefixed names - var capName = name[ 0 ].toUpperCase() + name.slice( 1 ), - i = cssPrefixes.length; - - while ( i-- ) { - name = cssPrefixes[ i ] + capName; - if ( name in emptyStyle ) { - return name; - } - } -} - -// Return a potentially-mapped jQuery.cssProps or vendor prefixed property -function finalPropName( name ) { - var final = jQuery.cssProps[ name ] || vendorProps[ name ]; - - if ( final ) { - return final; - } - if ( name in emptyStyle ) { - return name; - } - return vendorProps[ name ] = vendorPropName( name ) || name; -} - - -var - - // Swappable if display is none or starts with table - // except "table", "table-cell", or "table-caption" - // See here for display values: https://developer.mozilla.org/en-US/docs/CSS/display - rdisplayswap = /^(none|table(?!-c[ea]).+)/, - rcustomProp = /^--/, - cssShow = { position: "absolute", visibility: "hidden", display: "block" }, - cssNormalTransform = { - letterSpacing: "0", - fontWeight: "400" - }; - -function setPositiveNumber( _elem, value, subtract ) { - - // Any relative (+/-) values have already been - // normalized at this point - var matches = rcssNum.exec( value ); - return matches ? - - // Guard against undefined "subtract", e.g., when used as in cssHooks - Math.max( 0, matches[ 2 ] - ( subtract || 0 ) ) + ( matches[ 3 ] || "px" ) : - value; -} - -function boxModelAdjustment( elem, dimension, box, isBorderBox, styles, computedVal ) { - var i = dimension === "width" ? 1 : 0, - extra = 0, - delta = 0; - - // Adjustment may not be necessary - if ( box === ( isBorderBox ? "border" : "content" ) ) { - return 0; - } - - for ( ; i < 4; i += 2 ) { - - // Both box models exclude margin - if ( box === "margin" ) { - delta += jQuery.css( elem, box + cssExpand[ i ], true, styles ); - } - - // If we get here with a content-box, we're seeking "padding" or "border" or "margin" - if ( !isBorderBox ) { - - // Add padding - delta += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); - - // For "border" or "margin", add border - if ( box !== "padding" ) { - delta += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - - // But still keep track of it otherwise - } else { - extra += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - } - - // If we get here with a border-box (content + padding + border), we're seeking "content" or - // "padding" or "margin" - } else { - - // For "content", subtract padding - if ( box === "content" ) { - delta -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); - } - - // For "content" or "padding", subtract border - if ( box !== "margin" ) { - delta -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); - } - } - } - - // Account for positive content-box scroll gutter when requested by providing computedVal - if ( !isBorderBox && computedVal >= 0 ) { - - // offsetWidth/offsetHeight is a rounded sum of content, padding, scroll gutter, and border - // Assuming integer scroll gutter, subtract the rest and round down - delta += Math.max( 0, Math.ceil( - elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - - computedVal - - delta - - extra - - 0.5 - - // If offsetWidth/offsetHeight is unknown, then we can't determine content-box scroll gutter - // Use an explicit zero to avoid NaN (gh-3964) - ) ) || 0; - } - - return delta; -} - -function getWidthOrHeight( elem, dimension, extra ) { - - // Start with computed style - var styles = getStyles( elem ), - - // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-4322). - // Fake content-box until we know it's needed to know the true value. - boxSizingNeeded = !support.boxSizingReliable() || extra, - isBorderBox = boxSizingNeeded && - jQuery.css( elem, "boxSizing", false, styles ) === "border-box", - valueIsBorderBox = isBorderBox, - - val = curCSS( elem, dimension, styles ), - offsetProp = "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ); - - // Support: Firefox <=54 - // Return a confounding non-pixel value or feign ignorance, as appropriate. - if ( rnumnonpx.test( val ) ) { - if ( !extra ) { - return val; - } - val = "auto"; - } - - - // Support: IE 9 - 11 only - // Use offsetWidth/offsetHeight for when box sizing is unreliable. - // In those cases, the computed value can be trusted to be border-box. - if ( ( !support.boxSizingReliable() && isBorderBox || - - // Support: IE 10 - 11+, Edge 15 - 18+ - // IE/Edge misreport `getComputedStyle` of table rows with width/height - // set in CSS while `offset*` properties report correct values. - // Interestingly, in some cases IE 9 doesn't suffer from this issue. - !support.reliableTrDimensions() && nodeName( elem, "tr" ) || - - // Fall back to offsetWidth/offsetHeight when value is "auto" - // This happens for inline elements with no explicit setting (gh-3571) - val === "auto" || - - // Support: Android <=4.1 - 4.3 only - // Also use offsetWidth/offsetHeight for misreported inline dimensions (gh-3602) - !parseFloat( val ) && jQuery.css( elem, "display", false, styles ) === "inline" ) && - - // Make sure the element is visible & connected - elem.getClientRects().length ) { - - isBorderBox = jQuery.css( elem, "boxSizing", false, styles ) === "border-box"; - - // Where available, offsetWidth/offsetHeight approximate border box dimensions. - // Where not available (e.g., SVG), assume unreliable box-sizing and interpret the - // retrieved value as a content box dimension. - valueIsBorderBox = offsetProp in elem; - if ( valueIsBorderBox ) { - val = elem[ offsetProp ]; - } - } - - // Normalize "" and auto - val = parseFloat( val ) || 0; - - // Adjust for the element's box model - return ( val + - boxModelAdjustment( - elem, - dimension, - extra || ( isBorderBox ? "border" : "content" ), - valueIsBorderBox, - styles, - - // Provide the current computed size to request scroll gutter calculation (gh-3589) - val - ) - ) + "px"; -} - -jQuery.extend( { - - // Add in style property hooks for overriding the default - // behavior of getting and setting a style property - cssHooks: { - opacity: { - get: function( elem, computed ) { - if ( computed ) { - - // We should always get a number back from opacity - var ret = curCSS( elem, "opacity" ); - return ret === "" ? "1" : ret; - } - } - } - }, - - // Don't automatically add "px" to these possibly-unitless properties - cssNumber: { - "animationIterationCount": true, - "columnCount": true, - "fillOpacity": true, - "flexGrow": true, - "flexShrink": true, - "fontWeight": true, - "gridArea": true, - "gridColumn": true, - "gridColumnEnd": true, - "gridColumnStart": true, - "gridRow": true, - "gridRowEnd": true, - "gridRowStart": true, - "lineHeight": true, - "opacity": true, - "order": true, - "orphans": true, - "widows": true, - "zIndex": true, - "zoom": true - }, - - // Add in properties whose names you wish to fix before - // setting or getting the value - cssProps: {}, - - // Get and set the style property on a DOM Node - style: function( elem, name, value, extra ) { - - // Don't set styles on text and comment nodes - if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) { - return; - } - - // Make sure that we're working with the right name - var ret, type, hooks, - origName = camelCase( name ), - isCustomProp = rcustomProp.test( name ), - style = elem.style; - - // Make sure that we're working with the right name. We don't - // want to query the value if it is a CSS custom property - // since they are user-defined. - if ( !isCustomProp ) { - name = finalPropName( origName ); - } - - // Gets hook for the prefixed version, then unprefixed version - hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; - - // Check if we're setting a value - if ( value !== undefined ) { - type = typeof value; - - // Convert "+=" or "-=" to relative numbers (#7345) - if ( type === "string" && ( ret = rcssNum.exec( value ) ) && ret[ 1 ] ) { - value = adjustCSS( elem, name, ret ); - - // Fixes bug #9237 - type = "number"; - } - - // Make sure that null and NaN values aren't set (#7116) - if ( value == null || value !== value ) { - return; - } - - // If a number was passed in, add the unit (except for certain CSS properties) - // The isCustomProp check can be removed in jQuery 4.0 when we only auto-append - // "px" to a few hardcoded values. - if ( type === "number" && !isCustomProp ) { - value += ret && ret[ 3 ] || ( jQuery.cssNumber[ origName ] ? "" : "px" ); - } - - // background-* props affect original clone's values - if ( !support.clearCloneStyle && value === "" && name.indexOf( "background" ) === 0 ) { - style[ name ] = "inherit"; - } - - // If a hook was provided, use that value, otherwise just set the specified value - if ( !hooks || !( "set" in hooks ) || - ( value = hooks.set( elem, value, extra ) ) !== undefined ) { - - if ( isCustomProp ) { - style.setProperty( name, value ); - } else { - style[ name ] = value; - } - } - - } else { - - // If a hook was provided get the non-computed value from there - if ( hooks && "get" in hooks && - ( ret = hooks.get( elem, false, extra ) ) !== undefined ) { - - return ret; - } - - // Otherwise just get the value from the style object - return style[ name ]; - } - }, - - css: function( elem, name, extra, styles ) { - var val, num, hooks, - origName = camelCase( name ), - isCustomProp = rcustomProp.test( name ); - - // Make sure that we're working with the right name. We don't - // want to modify the value if it is a CSS custom property - // since they are user-defined. - if ( !isCustomProp ) { - name = finalPropName( origName ); - } - - // Try prefixed name followed by the unprefixed name - hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; - - // If a hook was provided get the computed value from there - if ( hooks && "get" in hooks ) { - val = hooks.get( elem, true, extra ); - } - - // Otherwise, if a way to get the computed value exists, use that - if ( val === undefined ) { - val = curCSS( elem, name, styles ); - } - - // Convert "normal" to computed value - if ( val === "normal" && name in cssNormalTransform ) { - val = cssNormalTransform[ name ]; - } - - // Make numeric if forced or a qualifier was provided and val looks numeric - if ( extra === "" || extra ) { - num = parseFloat( val ); - return extra === true || isFinite( num ) ? num || 0 : val; - } - - return val; - } -} ); - -jQuery.each( [ "height", "width" ], function( _i, dimension ) { - jQuery.cssHooks[ dimension ] = { - get: function( elem, computed, extra ) { - if ( computed ) { - - // Certain elements can have dimension info if we invisibly show them - // but it must have a current display style that would benefit - return rdisplayswap.test( jQuery.css( elem, "display" ) ) && - - // Support: Safari 8+ - // Table columns in Safari have non-zero offsetWidth & zero - // getBoundingClientRect().width unless display is changed. - // Support: IE <=11 only - // Running getBoundingClientRect on a disconnected node - // in IE throws an error. - ( !elem.getClientRects().length || !elem.getBoundingClientRect().width ) ? - swap( elem, cssShow, function() { - return getWidthOrHeight( elem, dimension, extra ); - } ) : - getWidthOrHeight( elem, dimension, extra ); - } - }, - - set: function( elem, value, extra ) { - var matches, - styles = getStyles( elem ), - - // Only read styles.position if the test has a chance to fail - // to avoid forcing a reflow. - scrollboxSizeBuggy = !support.scrollboxSize() && - styles.position === "absolute", - - // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-3991) - boxSizingNeeded = scrollboxSizeBuggy || extra, - isBorderBox = boxSizingNeeded && - jQuery.css( elem, "boxSizing", false, styles ) === "border-box", - subtract = extra ? - boxModelAdjustment( - elem, - dimension, - extra, - isBorderBox, - styles - ) : - 0; - - // Account for unreliable border-box dimensions by comparing offset* to computed and - // faking a content-box to get border and padding (gh-3699) - if ( isBorderBox && scrollboxSizeBuggy ) { - subtract -= Math.ceil( - elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - - parseFloat( styles[ dimension ] ) - - boxModelAdjustment( elem, dimension, "border", false, styles ) - - 0.5 - ); - } - - // Convert to pixels if value adjustment is needed - if ( subtract && ( matches = rcssNum.exec( value ) ) && - ( matches[ 3 ] || "px" ) !== "px" ) { - - elem.style[ dimension ] = value; - value = jQuery.css( elem, dimension ); - } - - return setPositiveNumber( elem, value, subtract ); - } - }; -} ); - -jQuery.cssHooks.marginLeft = addGetHookIf( support.reliableMarginLeft, - function( elem, computed ) { - if ( computed ) { - return ( parseFloat( curCSS( elem, "marginLeft" ) ) || - elem.getBoundingClientRect().left - - swap( elem, { marginLeft: 0 }, function() { - return elem.getBoundingClientRect().left; - } ) - ) + "px"; - } - } -); - -// These hooks are used by animate to expand properties -jQuery.each( { - margin: "", - padding: "", - border: "Width" -}, function( prefix, suffix ) { - jQuery.cssHooks[ prefix + suffix ] = { - expand: function( value ) { - var i = 0, - expanded = {}, - - // Assumes a single number if not a string - parts = typeof value === "string" ? value.split( " " ) : [ value ]; - - for ( ; i < 4; i++ ) { - expanded[ prefix + cssExpand[ i ] + suffix ] = - parts[ i ] || parts[ i - 2 ] || parts[ 0 ]; - } - - return expanded; - } - }; - - if ( prefix !== "margin" ) { - jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber; - } -} ); - -jQuery.fn.extend( { - css: function( name, value ) { - return access( this, function( elem, name, value ) { - var styles, len, - map = {}, - i = 0; - - if ( Array.isArray( name ) ) { - styles = getStyles( elem ); - len = name.length; - - for ( ; i < len; i++ ) { - map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles ); - } - - return map; - } - - return value !== undefined ? - jQuery.style( elem, name, value ) : - jQuery.css( elem, name ); - }, name, value, arguments.length > 1 ); - } -} ); - - -function Tween( elem, options, prop, end, easing ) { - return new Tween.prototype.init( elem, options, prop, end, easing ); -} -jQuery.Tween = Tween; - -Tween.prototype = { - constructor: Tween, - init: function( elem, options, prop, end, easing, unit ) { - this.elem = elem; - this.prop = prop; - this.easing = easing || jQuery.easing._default; - this.options = options; - this.start = this.now = this.cur(); - this.end = end; - this.unit = unit || ( jQuery.cssNumber[ prop ] ? "" : "px" ); - }, - cur: function() { - var hooks = Tween.propHooks[ this.prop ]; - - return hooks && hooks.get ? - hooks.get( this ) : - Tween.propHooks._default.get( this ); - }, - run: function( percent ) { - var eased, - hooks = Tween.propHooks[ this.prop ]; - - if ( this.options.duration ) { - this.pos = eased = jQuery.easing[ this.easing ]( - percent, this.options.duration * percent, 0, 1, this.options.duration - ); - } else { - this.pos = eased = percent; - } - this.now = ( this.end - this.start ) * eased + this.start; - - if ( this.options.step ) { - this.options.step.call( this.elem, this.now, this ); - } - - if ( hooks && hooks.set ) { - hooks.set( this ); - } else { - Tween.propHooks._default.set( this ); - } - return this; - } -}; - -Tween.prototype.init.prototype = Tween.prototype; - -Tween.propHooks = { - _default: { - get: function( tween ) { - var result; - - // Use a property on the element directly when it is not a DOM element, - // or when there is no matching style property that exists. - if ( tween.elem.nodeType !== 1 || - tween.elem[ tween.prop ] != null && tween.elem.style[ tween.prop ] == null ) { - return tween.elem[ tween.prop ]; - } - - // Passing an empty string as a 3rd parameter to .css will automatically - // attempt a parseFloat and fallback to a string if the parse fails. - // Simple values such as "10px" are parsed to Float; - // complex values such as "rotate(1rad)" are returned as-is. - result = jQuery.css( tween.elem, tween.prop, "" ); - - // Empty strings, null, undefined and "auto" are converted to 0. - return !result || result === "auto" ? 0 : result; - }, - set: function( tween ) { - - // Use step hook for back compat. - // Use cssHook if its there. - // Use .style if available and use plain properties where available. - if ( jQuery.fx.step[ tween.prop ] ) { - jQuery.fx.step[ tween.prop ]( tween ); - } else if ( tween.elem.nodeType === 1 && ( - jQuery.cssHooks[ tween.prop ] || - tween.elem.style[ finalPropName( tween.prop ) ] != null ) ) { - jQuery.style( tween.elem, tween.prop, tween.now + tween.unit ); - } else { - tween.elem[ tween.prop ] = tween.now; - } - } - } -}; - -// Support: IE <=9 only -// Panic based approach to setting things on disconnected nodes -Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = { - set: function( tween ) { - if ( tween.elem.nodeType && tween.elem.parentNode ) { - tween.elem[ tween.prop ] = tween.now; - } - } -}; - -jQuery.easing = { - linear: function( p ) { - return p; - }, - swing: function( p ) { - return 0.5 - Math.cos( p * Math.PI ) / 2; - }, - _default: "swing" -}; - -jQuery.fx = Tween.prototype.init; - -// Back compat <1.8 extension point -jQuery.fx.step = {}; - - - - -var - fxNow, inProgress, - rfxtypes = /^(?:toggle|show|hide)$/, - rrun = /queueHooks$/; - -function schedule() { - if ( inProgress ) { - if ( document.hidden === false && window.requestAnimationFrame ) { - window.requestAnimationFrame( schedule ); - } else { - window.setTimeout( schedule, jQuery.fx.interval ); - } - - jQuery.fx.tick(); - } -} - -// Animations created synchronously will run synchronously -function createFxNow() { - window.setTimeout( function() { - fxNow = undefined; - } ); - return ( fxNow = Date.now() ); -} - -// Generate parameters to create a standard animation -function genFx( type, includeWidth ) { - var which, - i = 0, - attrs = { height: type }; - - // If we include width, step value is 1 to do all cssExpand values, - // otherwise step value is 2 to skip over Left and Right - includeWidth = includeWidth ? 1 : 0; - for ( ; i < 4; i += 2 - includeWidth ) { - which = cssExpand[ i ]; - attrs[ "margin" + which ] = attrs[ "padding" + which ] = type; - } - - if ( includeWidth ) { - attrs.opacity = attrs.width = type; - } - - return attrs; -} - -function createTween( value, prop, animation ) { - var tween, - collection = ( Animation.tweeners[ prop ] || [] ).concat( Animation.tweeners[ "*" ] ), - index = 0, - length = collection.length; - for ( ; index < length; index++ ) { - if ( ( tween = collection[ index ].call( animation, prop, value ) ) ) { - - // We're done with this property - return tween; - } - } -} - -function defaultPrefilter( elem, props, opts ) { - var prop, value, toggle, hooks, oldfire, propTween, restoreDisplay, display, - isBox = "width" in props || "height" in props, - anim = this, - orig = {}, - style = elem.style, - hidden = elem.nodeType && isHiddenWithinTree( elem ), - dataShow = dataPriv.get( elem, "fxshow" ); - - // Queue-skipping animations hijack the fx hooks - if ( !opts.queue ) { - hooks = jQuery._queueHooks( elem, "fx" ); - if ( hooks.unqueued == null ) { - hooks.unqueued = 0; - oldfire = hooks.empty.fire; - hooks.empty.fire = function() { - if ( !hooks.unqueued ) { - oldfire(); - } - }; - } - hooks.unqueued++; - - anim.always( function() { - - // Ensure the complete handler is called before this completes - anim.always( function() { - hooks.unqueued--; - if ( !jQuery.queue( elem, "fx" ).length ) { - hooks.empty.fire(); - } - } ); - } ); - } - - // Detect show/hide animations - for ( prop in props ) { - value = props[ prop ]; - if ( rfxtypes.test( value ) ) { - delete props[ prop ]; - toggle = toggle || value === "toggle"; - if ( value === ( hidden ? "hide" : "show" ) ) { - - // Pretend to be hidden if this is a "show" and - // there is still data from a stopped show/hide - if ( value === "show" && dataShow && dataShow[ prop ] !== undefined ) { - hidden = true; - - // Ignore all other no-op show/hide data - } else { - continue; - } - } - orig[ prop ] = dataShow && dataShow[ prop ] || jQuery.style( elem, prop ); - } - } - - // Bail out if this is a no-op like .hide().hide() - propTween = !jQuery.isEmptyObject( props ); - if ( !propTween && jQuery.isEmptyObject( orig ) ) { - return; - } - - // Restrict "overflow" and "display" styles during box animations - if ( isBox && elem.nodeType === 1 ) { - - // Support: IE <=9 - 11, Edge 12 - 15 - // Record all 3 overflow attributes because IE does not infer the shorthand - // from identically-valued overflowX and overflowY and Edge just mirrors - // the overflowX value there. - opts.overflow = [ style.overflow, style.overflowX, style.overflowY ]; - - // Identify a display type, preferring old show/hide data over the CSS cascade - restoreDisplay = dataShow && dataShow.display; - if ( restoreDisplay == null ) { - restoreDisplay = dataPriv.get( elem, "display" ); - } - display = jQuery.css( elem, "display" ); - if ( display === "none" ) { - if ( restoreDisplay ) { - display = restoreDisplay; - } else { - - // Get nonempty value(s) by temporarily forcing visibility - showHide( [ elem ], true ); - restoreDisplay = elem.style.display || restoreDisplay; - display = jQuery.css( elem, "display" ); - showHide( [ elem ] ); - } - } - - // Animate inline elements as inline-block - if ( display === "inline" || display === "inline-block" && restoreDisplay != null ) { - if ( jQuery.css( elem, "float" ) === "none" ) { - - // Restore the original display value at the end of pure show/hide animations - if ( !propTween ) { - anim.done( function() { - style.display = restoreDisplay; - } ); - if ( restoreDisplay == null ) { - display = style.display; - restoreDisplay = display === "none" ? "" : display; - } - } - style.display = "inline-block"; - } - } - } - - if ( opts.overflow ) { - style.overflow = "hidden"; - anim.always( function() { - style.overflow = opts.overflow[ 0 ]; - style.overflowX = opts.overflow[ 1 ]; - style.overflowY = opts.overflow[ 2 ]; - } ); - } - - // Implement show/hide animations - propTween = false; - for ( prop in orig ) { - - // General show/hide setup for this element animation - if ( !propTween ) { - if ( dataShow ) { - if ( "hidden" in dataShow ) { - hidden = dataShow.hidden; - } - } else { - dataShow = dataPriv.access( elem, "fxshow", { display: restoreDisplay } ); - } - - // Store hidden/visible for toggle so `.stop().toggle()` "reverses" - if ( toggle ) { - dataShow.hidden = !hidden; - } - - // Show elements before animating them - if ( hidden ) { - showHide( [ elem ], true ); - } - - /* eslint-disable no-loop-func */ - - anim.done( function() { - - /* eslint-enable no-loop-func */ - - // The final step of a "hide" animation is actually hiding the element - if ( !hidden ) { - showHide( [ elem ] ); - } - dataPriv.remove( elem, "fxshow" ); - for ( prop in orig ) { - jQuery.style( elem, prop, orig[ prop ] ); - } - } ); - } - - // Per-property setup - propTween = createTween( hidden ? dataShow[ prop ] : 0, prop, anim ); - if ( !( prop in dataShow ) ) { - dataShow[ prop ] = propTween.start; - if ( hidden ) { - propTween.end = propTween.start; - propTween.start = 0; - } - } - } -} - -function propFilter( props, specialEasing ) { - var index, name, easing, value, hooks; - - // camelCase, specialEasing and expand cssHook pass - for ( index in props ) { - name = camelCase( index ); - easing = specialEasing[ name ]; - value = props[ index ]; - if ( Array.isArray( value ) ) { - easing = value[ 1 ]; - value = props[ index ] = value[ 0 ]; - } - - if ( index !== name ) { - props[ name ] = value; - delete props[ index ]; - } - - hooks = jQuery.cssHooks[ name ]; - if ( hooks && "expand" in hooks ) { - value = hooks.expand( value ); - delete props[ name ]; - - // Not quite $.extend, this won't overwrite existing keys. - // Reusing 'index' because we have the correct "name" - for ( index in value ) { - if ( !( index in props ) ) { - props[ index ] = value[ index ]; - specialEasing[ index ] = easing; - } - } - } else { - specialEasing[ name ] = easing; - } - } -} - -function Animation( elem, properties, options ) { - var result, - stopped, - index = 0, - length = Animation.prefilters.length, - deferred = jQuery.Deferred().always( function() { - - // Don't match elem in the :animated selector - delete tick.elem; - } ), - tick = function() { - if ( stopped ) { - return false; - } - var currentTime = fxNow || createFxNow(), - remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ), - - // Support: Android 2.3 only - // Archaic crash bug won't allow us to use `1 - ( 0.5 || 0 )` (#12497) - temp = remaining / animation.duration || 0, - percent = 1 - temp, - index = 0, - length = animation.tweens.length; - - for ( ; index < length; index++ ) { - animation.tweens[ index ].run( percent ); - } - - deferred.notifyWith( elem, [ animation, percent, remaining ] ); - - // If there's more to do, yield - if ( percent < 1 && length ) { - return remaining; - } - - // If this was an empty animation, synthesize a final progress notification - if ( !length ) { - deferred.notifyWith( elem, [ animation, 1, 0 ] ); - } - - // Resolve the animation and report its conclusion - deferred.resolveWith( elem, [ animation ] ); - return false; - }, - animation = deferred.promise( { - elem: elem, - props: jQuery.extend( {}, properties ), - opts: jQuery.extend( true, { - specialEasing: {}, - easing: jQuery.easing._default - }, options ), - originalProperties: properties, - originalOptions: options, - startTime: fxNow || createFxNow(), - duration: options.duration, - tweens: [], - createTween: function( prop, end ) { - var tween = jQuery.Tween( elem, animation.opts, prop, end, - animation.opts.specialEasing[ prop ] || animation.opts.easing ); - animation.tweens.push( tween ); - return tween; - }, - stop: function( gotoEnd ) { - var index = 0, - - // If we are going to the end, we want to run all the tweens - // otherwise we skip this part - length = gotoEnd ? animation.tweens.length : 0; - if ( stopped ) { - return this; - } - stopped = true; - for ( ; index < length; index++ ) { - animation.tweens[ index ].run( 1 ); - } - - // Resolve when we played the last frame; otherwise, reject - if ( gotoEnd ) { - deferred.notifyWith( elem, [ animation, 1, 0 ] ); - deferred.resolveWith( elem, [ animation, gotoEnd ] ); - } else { - deferred.rejectWith( elem, [ animation, gotoEnd ] ); - } - return this; - } - } ), - props = animation.props; - - propFilter( props, animation.opts.specialEasing ); - - for ( ; index < length; index++ ) { - result = Animation.prefilters[ index ].call( animation, elem, props, animation.opts ); - if ( result ) { - if ( isFunction( result.stop ) ) { - jQuery._queueHooks( animation.elem, animation.opts.queue ).stop = - result.stop.bind( result ); - } - return result; - } - } - - jQuery.map( props, createTween, animation ); - - if ( isFunction( animation.opts.start ) ) { - animation.opts.start.call( elem, animation ); - } - - // Attach callbacks from options - animation - .progress( animation.opts.progress ) - .done( animation.opts.done, animation.opts.complete ) - .fail( animation.opts.fail ) - .always( animation.opts.always ); - - jQuery.fx.timer( - jQuery.extend( tick, { - elem: elem, - anim: animation, - queue: animation.opts.queue - } ) - ); - - return animation; -} - -jQuery.Animation = jQuery.extend( Animation, { - - tweeners: { - "*": [ function( prop, value ) { - var tween = this.createTween( prop, value ); - adjustCSS( tween.elem, prop, rcssNum.exec( value ), tween ); - return tween; - } ] - }, - - tweener: function( props, callback ) { - if ( isFunction( props ) ) { - callback = props; - props = [ "*" ]; - } else { - props = props.match( rnothtmlwhite ); - } - - var prop, - index = 0, - length = props.length; - - for ( ; index < length; index++ ) { - prop = props[ index ]; - Animation.tweeners[ prop ] = Animation.tweeners[ prop ] || []; - Animation.tweeners[ prop ].unshift( callback ); - } - }, - - prefilters: [ defaultPrefilter ], - - prefilter: function( callback, prepend ) { - if ( prepend ) { - Animation.prefilters.unshift( callback ); - } else { - Animation.prefilters.push( callback ); - } - } -} ); - -jQuery.speed = function( speed, easing, fn ) { - var opt = speed && typeof speed === "object" ? jQuery.extend( {}, speed ) : { - complete: fn || !fn && easing || - isFunction( speed ) && speed, - duration: speed, - easing: fn && easing || easing && !isFunction( easing ) && easing - }; - - // Go to the end state if fx are off - if ( jQuery.fx.off ) { - opt.duration = 0; - - } else { - if ( typeof opt.duration !== "number" ) { - if ( opt.duration in jQuery.fx.speeds ) { - opt.duration = jQuery.fx.speeds[ opt.duration ]; - - } else { - opt.duration = jQuery.fx.speeds._default; - } - } - } - - // Normalize opt.queue - true/undefined/null -> "fx" - if ( opt.queue == null || opt.queue === true ) { - opt.queue = "fx"; - } - - // Queueing - opt.old = opt.complete; - - opt.complete = function() { - if ( isFunction( opt.old ) ) { - opt.old.call( this ); - } - - if ( opt.queue ) { - jQuery.dequeue( this, opt.queue ); - } - }; - - return opt; -}; - -jQuery.fn.extend( { - fadeTo: function( speed, to, easing, callback ) { - - // Show any hidden elements after setting opacity to 0 - return this.filter( isHiddenWithinTree ).css( "opacity", 0 ).show() - - // Animate to the value specified - .end().animate( { opacity: to }, speed, easing, callback ); - }, - animate: function( prop, speed, easing, callback ) { - var empty = jQuery.isEmptyObject( prop ), - optall = jQuery.speed( speed, easing, callback ), - doAnimation = function() { - - // Operate on a copy of prop so per-property easing won't be lost - var anim = Animation( this, jQuery.extend( {}, prop ), optall ); - - // Empty animations, or finishing resolves immediately - if ( empty || dataPriv.get( this, "finish" ) ) { - anim.stop( true ); - } - }; - - doAnimation.finish = doAnimation; - - return empty || optall.queue === false ? - this.each( doAnimation ) : - this.queue( optall.queue, doAnimation ); - }, - stop: function( type, clearQueue, gotoEnd ) { - var stopQueue = function( hooks ) { - var stop = hooks.stop; - delete hooks.stop; - stop( gotoEnd ); - }; - - if ( typeof type !== "string" ) { - gotoEnd = clearQueue; - clearQueue = type; - type = undefined; - } - if ( clearQueue ) { - this.queue( type || "fx", [] ); - } - - return this.each( function() { - var dequeue = true, - index = type != null && type + "queueHooks", - timers = jQuery.timers, - data = dataPriv.get( this ); - - if ( index ) { - if ( data[ index ] && data[ index ].stop ) { - stopQueue( data[ index ] ); - } - } else { - for ( index in data ) { - if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) { - stopQueue( data[ index ] ); - } - } - } - - for ( index = timers.length; index--; ) { - if ( timers[ index ].elem === this && - ( type == null || timers[ index ].queue === type ) ) { - - timers[ index ].anim.stop( gotoEnd ); - dequeue = false; - timers.splice( index, 1 ); - } - } - - // Start the next in the queue if the last step wasn't forced. - // Timers currently will call their complete callbacks, which - // will dequeue but only if they were gotoEnd. - if ( dequeue || !gotoEnd ) { - jQuery.dequeue( this, type ); - } - } ); - }, - finish: function( type ) { - if ( type !== false ) { - type = type || "fx"; - } - return this.each( function() { - var index, - data = dataPriv.get( this ), - queue = data[ type + "queue" ], - hooks = data[ type + "queueHooks" ], - timers = jQuery.timers, - length = queue ? queue.length : 0; - - // Enable finishing flag on private data - data.finish = true; - - // Empty the queue first - jQuery.queue( this, type, [] ); - - if ( hooks && hooks.stop ) { - hooks.stop.call( this, true ); - } - - // Look for any active animations, and finish them - for ( index = timers.length; index--; ) { - if ( timers[ index ].elem === this && timers[ index ].queue === type ) { - timers[ index ].anim.stop( true ); - timers.splice( index, 1 ); - } - } - - // Look for any animations in the old queue and finish them - for ( index = 0; index < length; index++ ) { - if ( queue[ index ] && queue[ index ].finish ) { - queue[ index ].finish.call( this ); - } - } - - // Turn off finishing flag - delete data.finish; - } ); - } -} ); - -jQuery.each( [ "toggle", "show", "hide" ], function( _i, name ) { - var cssFn = jQuery.fn[ name ]; - jQuery.fn[ name ] = function( speed, easing, callback ) { - return speed == null || typeof speed === "boolean" ? - cssFn.apply( this, arguments ) : - this.animate( genFx( name, true ), speed, easing, callback ); - }; -} ); - -// Generate shortcuts for custom animations -jQuery.each( { - slideDown: genFx( "show" ), - slideUp: genFx( "hide" ), - slideToggle: genFx( "toggle" ), - fadeIn: { opacity: "show" }, - fadeOut: { opacity: "hide" }, - fadeToggle: { opacity: "toggle" } -}, function( name, props ) { - jQuery.fn[ name ] = function( speed, easing, callback ) { - return this.animate( props, speed, easing, callback ); - }; -} ); - -jQuery.timers = []; -jQuery.fx.tick = function() { - var timer, - i = 0, - timers = jQuery.timers; - - fxNow = Date.now(); - - for ( ; i < timers.length; i++ ) { - timer = timers[ i ]; - - // Run the timer and safely remove it when done (allowing for external removal) - if ( !timer() && timers[ i ] === timer ) { - timers.splice( i--, 1 ); - } - } - - if ( !timers.length ) { - jQuery.fx.stop(); - } - fxNow = undefined; -}; - -jQuery.fx.timer = function( timer ) { - jQuery.timers.push( timer ); - jQuery.fx.start(); -}; - -jQuery.fx.interval = 13; -jQuery.fx.start = function() { - if ( inProgress ) { - return; - } - - inProgress = true; - schedule(); -}; - -jQuery.fx.stop = function() { - inProgress = null; -}; - -jQuery.fx.speeds = { - slow: 600, - fast: 200, - - // Default speed - _default: 400 -}; - - -// Based off of the plugin by Clint Helfers, with permission. -// https://web.archive.org/web/20100324014747/http://blindsignals.com/index.php/2009/07/jquery-delay/ -jQuery.fn.delay = function( time, type ) { - time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time; - type = type || "fx"; - - return this.queue( type, function( next, hooks ) { - var timeout = window.setTimeout( next, time ); - hooks.stop = function() { - window.clearTimeout( timeout ); - }; - } ); -}; - - -( function() { - var input = document.createElement( "input" ), - select = document.createElement( "select" ), - opt = select.appendChild( document.createElement( "option" ) ); - - input.type = "checkbox"; - - // Support: Android <=4.3 only - // Default value for a checkbox should be "on" - support.checkOn = input.value !== ""; - - // Support: IE <=11 only - // Must access selectedIndex to make default options select - support.optSelected = opt.selected; - - // Support: IE <=11 only - // An input loses its value after becoming a radio - input = document.createElement( "input" ); - input.value = "t"; - input.type = "radio"; - support.radioValue = input.value === "t"; -} )(); - - -var boolHook, - attrHandle = jQuery.expr.attrHandle; - -jQuery.fn.extend( { - attr: function( name, value ) { - return access( this, jQuery.attr, name, value, arguments.length > 1 ); - }, - - removeAttr: function( name ) { - return this.each( function() { - jQuery.removeAttr( this, name ); - } ); - } -} ); - -jQuery.extend( { - attr: function( elem, name, value ) { - var ret, hooks, - nType = elem.nodeType; - - // Don't get/set attributes on text, comment and attribute nodes - if ( nType === 3 || nType === 8 || nType === 2 ) { - return; - } - - // Fallback to prop when attributes are not supported - if ( typeof elem.getAttribute === "undefined" ) { - return jQuery.prop( elem, name, value ); - } - - // Attribute hooks are determined by the lowercase version - // Grab necessary hook if one is defined - if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { - hooks = jQuery.attrHooks[ name.toLowerCase() ] || - ( jQuery.expr.match.bool.test( name ) ? boolHook : undefined ); - } - - if ( value !== undefined ) { - if ( value === null ) { - jQuery.removeAttr( elem, name ); - return; - } - - if ( hooks && "set" in hooks && - ( ret = hooks.set( elem, value, name ) ) !== undefined ) { - return ret; - } - - elem.setAttribute( name, value + "" ); - return value; - } - - if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { - return ret; - } - - ret = jQuery.find.attr( elem, name ); - - // Non-existent attributes return null, we normalize to undefined - return ret == null ? undefined : ret; - }, - - attrHooks: { - type: { - set: function( elem, value ) { - if ( !support.radioValue && value === "radio" && - nodeName( elem, "input" ) ) { - var val = elem.value; - elem.setAttribute( "type", value ); - if ( val ) { - elem.value = val; - } - return value; - } - } - } - }, - - removeAttr: function( elem, value ) { - var name, - i = 0, - - // Attribute names can contain non-HTML whitespace characters - // https://html.spec.whatwg.org/multipage/syntax.html#attributes-2 - attrNames = value && value.match( rnothtmlwhite ); - - if ( attrNames && elem.nodeType === 1 ) { - while ( ( name = attrNames[ i++ ] ) ) { - elem.removeAttribute( name ); - } - } - } -} ); - -// Hooks for boolean attributes -boolHook = { - set: function( elem, value, name ) { - if ( value === false ) { - - // Remove boolean attributes when set to false - jQuery.removeAttr( elem, name ); - } else { - elem.setAttribute( name, name ); - } - return name; - } -}; - -jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( _i, name ) { - var getter = attrHandle[ name ] || jQuery.find.attr; - - attrHandle[ name ] = function( elem, name, isXML ) { - var ret, handle, - lowercaseName = name.toLowerCase(); - - if ( !isXML ) { - - // Avoid an infinite loop by temporarily removing this function from the getter - handle = attrHandle[ lowercaseName ]; - attrHandle[ lowercaseName ] = ret; - ret = getter( elem, name, isXML ) != null ? - lowercaseName : - null; - attrHandle[ lowercaseName ] = handle; - } - return ret; - }; -} ); - - - - -var rfocusable = /^(?:input|select|textarea|button)$/i, - rclickable = /^(?:a|area)$/i; - -jQuery.fn.extend( { - prop: function( name, value ) { - return access( this, jQuery.prop, name, value, arguments.length > 1 ); - }, - - removeProp: function( name ) { - return this.each( function() { - delete this[ jQuery.propFix[ name ] || name ]; - } ); - } -} ); - -jQuery.extend( { - prop: function( elem, name, value ) { - var ret, hooks, - nType = elem.nodeType; - - // Don't get/set properties on text, comment and attribute nodes - if ( nType === 3 || nType === 8 || nType === 2 ) { - return; - } - - if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { - - // Fix name and attach hooks - name = jQuery.propFix[ name ] || name; - hooks = jQuery.propHooks[ name ]; - } - - if ( value !== undefined ) { - if ( hooks && "set" in hooks && - ( ret = hooks.set( elem, value, name ) ) !== undefined ) { - return ret; - } - - return ( elem[ name ] = value ); - } - - if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { - return ret; - } - - return elem[ name ]; - }, - - propHooks: { - tabIndex: { - get: function( elem ) { - - // Support: IE <=9 - 11 only - // elem.tabIndex doesn't always return the - // correct value when it hasn't been explicitly set - // https://web.archive.org/web/20141116233347/http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ - // Use proper attribute retrieval(#12072) - var tabindex = jQuery.find.attr( elem, "tabindex" ); - - if ( tabindex ) { - return parseInt( tabindex, 10 ); - } - - if ( - rfocusable.test( elem.nodeName ) || - rclickable.test( elem.nodeName ) && - elem.href - ) { - return 0; - } - - return -1; - } - } - }, - - propFix: { - "for": "htmlFor", - "class": "className" - } -} ); - -// Support: IE <=11 only -// Accessing the selectedIndex property -// forces the browser to respect setting selected -// on the option -// The getter ensures a default option is selected -// when in an optgroup -// eslint rule "no-unused-expressions" is disabled for this code -// since it considers such accessions noop -if ( !support.optSelected ) { - jQuery.propHooks.selected = { - get: function( elem ) { - - /* eslint no-unused-expressions: "off" */ - - var parent = elem.parentNode; - if ( parent && parent.parentNode ) { - parent.parentNode.selectedIndex; - } - return null; - }, - set: function( elem ) { - - /* eslint no-unused-expressions: "off" */ - - var parent = elem.parentNode; - if ( parent ) { - parent.selectedIndex; - - if ( parent.parentNode ) { - parent.parentNode.selectedIndex; - } - } - } - }; -} - -jQuery.each( [ - "tabIndex", - "readOnly", - "maxLength", - "cellSpacing", - "cellPadding", - "rowSpan", - "colSpan", - "useMap", - "frameBorder", - "contentEditable" -], function() { - jQuery.propFix[ this.toLowerCase() ] = this; -} ); - - - - - // Strip and collapse whitespace according to HTML spec - // https://infra.spec.whatwg.org/#strip-and-collapse-ascii-whitespace - function stripAndCollapse( value ) { - var tokens = value.match( rnothtmlwhite ) || []; - return tokens.join( " " ); - } - - -function getClass( elem ) { - return elem.getAttribute && elem.getAttribute( "class" ) || ""; -} - -function classesToArray( value ) { - if ( Array.isArray( value ) ) { - return value; - } - if ( typeof value === "string" ) { - return value.match( rnothtmlwhite ) || []; - } - return []; -} - -jQuery.fn.extend( { - addClass: function( value ) { - var classes, elem, cur, curValue, clazz, j, finalValue, - i = 0; - - if ( isFunction( value ) ) { - return this.each( function( j ) { - jQuery( this ).addClass( value.call( this, j, getClass( this ) ) ); - } ); - } - - classes = classesToArray( value ); - - if ( classes.length ) { - while ( ( elem = this[ i++ ] ) ) { - curValue = getClass( elem ); - cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); - - if ( cur ) { - j = 0; - while ( ( clazz = classes[ j++ ] ) ) { - if ( cur.indexOf( " " + clazz + " " ) < 0 ) { - cur += clazz + " "; - } - } - - // Only assign if different to avoid unneeded rendering. - finalValue = stripAndCollapse( cur ); - if ( curValue !== finalValue ) { - elem.setAttribute( "class", finalValue ); - } - } - } - } - - return this; - }, - - removeClass: function( value ) { - var classes, elem, cur, curValue, clazz, j, finalValue, - i = 0; - - if ( isFunction( value ) ) { - return this.each( function( j ) { - jQuery( this ).removeClass( value.call( this, j, getClass( this ) ) ); - } ); - } - - if ( !arguments.length ) { - return this.attr( "class", "" ); - } - - classes = classesToArray( value ); - - if ( classes.length ) { - while ( ( elem = this[ i++ ] ) ) { - curValue = getClass( elem ); - - // This expression is here for better compressibility (see addClass) - cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); - - if ( cur ) { - j = 0; - while ( ( clazz = classes[ j++ ] ) ) { - - // Remove *all* instances - while ( cur.indexOf( " " + clazz + " " ) > -1 ) { - cur = cur.replace( " " + clazz + " ", " " ); - } - } - - // Only assign if different to avoid unneeded rendering. - finalValue = stripAndCollapse( cur ); - if ( curValue !== finalValue ) { - elem.setAttribute( "class", finalValue ); - } - } - } - } - - return this; - }, - - toggleClass: function( value, stateVal ) { - var type = typeof value, - isValidValue = type === "string" || Array.isArray( value ); - - if ( typeof stateVal === "boolean" && isValidValue ) { - return stateVal ? this.addClass( value ) : this.removeClass( value ); - } - - if ( isFunction( value ) ) { - return this.each( function( i ) { - jQuery( this ).toggleClass( - value.call( this, i, getClass( this ), stateVal ), - stateVal - ); - } ); - } - - return this.each( function() { - var className, i, self, classNames; - - if ( isValidValue ) { - - // Toggle individual class names - i = 0; - self = jQuery( this ); - classNames = classesToArray( value ); - - while ( ( className = classNames[ i++ ] ) ) { - - // Check each className given, space separated list - if ( self.hasClass( className ) ) { - self.removeClass( className ); - } else { - self.addClass( className ); - } - } - - // Toggle whole class name - } else if ( value === undefined || type === "boolean" ) { - className = getClass( this ); - if ( className ) { - - // Store className if set - dataPriv.set( this, "__className__", className ); - } - - // If the element has a class name or if we're passed `false`, - // then remove the whole classname (if there was one, the above saved it). - // Otherwise bring back whatever was previously saved (if anything), - // falling back to the empty string if nothing was stored. - if ( this.setAttribute ) { - this.setAttribute( "class", - className || value === false ? - "" : - dataPriv.get( this, "__className__" ) || "" - ); - } - } - } ); - }, - - hasClass: function( selector ) { - var className, elem, - i = 0; - - className = " " + selector + " "; - while ( ( elem = this[ i++ ] ) ) { - if ( elem.nodeType === 1 && - ( " " + stripAndCollapse( getClass( elem ) ) + " " ).indexOf( className ) > -1 ) { - return true; - } - } - - return false; - } -} ); - - - - -var rreturn = /\r/g; - -jQuery.fn.extend( { - val: function( value ) { - var hooks, ret, valueIsFunction, - elem = this[ 0 ]; - - if ( !arguments.length ) { - if ( elem ) { - hooks = jQuery.valHooks[ elem.type ] || - jQuery.valHooks[ elem.nodeName.toLowerCase() ]; - - if ( hooks && - "get" in hooks && - ( ret = hooks.get( elem, "value" ) ) !== undefined - ) { - return ret; - } - - ret = elem.value; - - // Handle most common string cases - if ( typeof ret === "string" ) { - return ret.replace( rreturn, "" ); - } - - // Handle cases where value is null/undef or number - return ret == null ? "" : ret; - } - - return; - } - - valueIsFunction = isFunction( value ); - - return this.each( function( i ) { - var val; - - if ( this.nodeType !== 1 ) { - return; - } - - if ( valueIsFunction ) { - val = value.call( this, i, jQuery( this ).val() ); - } else { - val = value; - } - - // Treat null/undefined as ""; convert numbers to string - if ( val == null ) { - val = ""; - - } else if ( typeof val === "number" ) { - val += ""; - - } else if ( Array.isArray( val ) ) { - val = jQuery.map( val, function( value ) { - return value == null ? "" : value + ""; - } ); - } - - hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ]; - - // If set returns undefined, fall back to normal setting - if ( !hooks || !( "set" in hooks ) || hooks.set( this, val, "value" ) === undefined ) { - this.value = val; - } - } ); - } -} ); - -jQuery.extend( { - valHooks: { - option: { - get: function( elem ) { - - var val = jQuery.find.attr( elem, "value" ); - return val != null ? - val : - - // Support: IE <=10 - 11 only - // option.text throws exceptions (#14686, #14858) - // Strip and collapse whitespace - // https://html.spec.whatwg.org/#strip-and-collapse-whitespace - stripAndCollapse( jQuery.text( elem ) ); - } - }, - select: { - get: function( elem ) { - var value, option, i, - options = elem.options, - index = elem.selectedIndex, - one = elem.type === "select-one", - values = one ? null : [], - max = one ? index + 1 : options.length; - - if ( index < 0 ) { - i = max; - - } else { - i = one ? index : 0; - } - - // Loop through all the selected options - for ( ; i < max; i++ ) { - option = options[ i ]; - - // Support: IE <=9 only - // IE8-9 doesn't update selected after form reset (#2551) - if ( ( option.selected || i === index ) && - - // Don't return options that are disabled or in a disabled optgroup - !option.disabled && - ( !option.parentNode.disabled || - !nodeName( option.parentNode, "optgroup" ) ) ) { - - // Get the specific value for the option - value = jQuery( option ).val(); - - // We don't need an array for one selects - if ( one ) { - return value; - } - - // Multi-Selects return an array - values.push( value ); - } - } - - return values; - }, - - set: function( elem, value ) { - var optionSet, option, - options = elem.options, - values = jQuery.makeArray( value ), - i = options.length; - - while ( i-- ) { - option = options[ i ]; - - /* eslint-disable no-cond-assign */ - - if ( option.selected = - jQuery.inArray( jQuery.valHooks.option.get( option ), values ) > -1 - ) { - optionSet = true; - } - - /* eslint-enable no-cond-assign */ - } - - // Force browsers to behave consistently when non-matching value is set - if ( !optionSet ) { - elem.selectedIndex = -1; - } - return values; - } - } - } -} ); - -// Radios and checkboxes getter/setter -jQuery.each( [ "radio", "checkbox" ], function() { - jQuery.valHooks[ this ] = { - set: function( elem, value ) { - if ( Array.isArray( value ) ) { - return ( elem.checked = jQuery.inArray( jQuery( elem ).val(), value ) > -1 ); - } - } - }; - if ( !support.checkOn ) { - jQuery.valHooks[ this ].get = function( elem ) { - return elem.getAttribute( "value" ) === null ? "on" : elem.value; - }; - } -} ); - - - - -// Return jQuery for attributes-only inclusion - - -support.focusin = "onfocusin" in window; - - -var rfocusMorph = /^(?:focusinfocus|focusoutblur)$/, - stopPropagationCallback = function( e ) { - e.stopPropagation(); - }; - -jQuery.extend( jQuery.event, { - - trigger: function( event, data, elem, onlyHandlers ) { - - var i, cur, tmp, bubbleType, ontype, handle, special, lastElement, - eventPath = [ elem || document ], - type = hasOwn.call( event, "type" ) ? event.type : event, - namespaces = hasOwn.call( event, "namespace" ) ? event.namespace.split( "." ) : []; - - cur = lastElement = tmp = elem = elem || document; - - // Don't do events on text and comment nodes - if ( elem.nodeType === 3 || elem.nodeType === 8 ) { - return; - } - - // focus/blur morphs to focusin/out; ensure we're not firing them right now - if ( rfocusMorph.test( type + jQuery.event.triggered ) ) { - return; - } - - if ( type.indexOf( "." ) > -1 ) { - - // Namespaced trigger; create a regexp to match event type in handle() - namespaces = type.split( "." ); - type = namespaces.shift(); - namespaces.sort(); - } - ontype = type.indexOf( ":" ) < 0 && "on" + type; - - // Caller can pass in a jQuery.Event object, Object, or just an event type string - event = event[ jQuery.expando ] ? - event : - new jQuery.Event( type, typeof event === "object" && event ); - - // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true) - event.isTrigger = onlyHandlers ? 2 : 3; - event.namespace = namespaces.join( "." ); - event.rnamespace = event.namespace ? - new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ) : - null; - - // Clean up the event in case it is being reused - event.result = undefined; - if ( !event.target ) { - event.target = elem; - } - - // Clone any incoming data and prepend the event, creating the handler arg list - data = data == null ? - [ event ] : - jQuery.makeArray( data, [ event ] ); - - // Allow special events to draw outside the lines - special = jQuery.event.special[ type ] || {}; - if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) { - return; - } - - // Determine event propagation path in advance, per W3C events spec (#9951) - // Bubble up to document, then to window; watch for a global ownerDocument var (#9724) - if ( !onlyHandlers && !special.noBubble && !isWindow( elem ) ) { - - bubbleType = special.delegateType || type; - if ( !rfocusMorph.test( bubbleType + type ) ) { - cur = cur.parentNode; - } - for ( ; cur; cur = cur.parentNode ) { - eventPath.push( cur ); - tmp = cur; - } - - // Only add window if we got to document (e.g., not plain obj or detached DOM) - if ( tmp === ( elem.ownerDocument || document ) ) { - eventPath.push( tmp.defaultView || tmp.parentWindow || window ); - } - } - - // Fire handlers on the event path - i = 0; - while ( ( cur = eventPath[ i++ ] ) && !event.isPropagationStopped() ) { - lastElement = cur; - event.type = i > 1 ? - bubbleType : - special.bindType || type; - - // jQuery handler - handle = ( dataPriv.get( cur, "events" ) || Object.create( null ) )[ event.type ] && - dataPriv.get( cur, "handle" ); - if ( handle ) { - handle.apply( cur, data ); - } - - // Native handler - handle = ontype && cur[ ontype ]; - if ( handle && handle.apply && acceptData( cur ) ) { - event.result = handle.apply( cur, data ); - if ( event.result === false ) { - event.preventDefault(); - } - } - } - event.type = type; - - // If nobody prevented the default action, do it now - if ( !onlyHandlers && !event.isDefaultPrevented() ) { - - if ( ( !special._default || - special._default.apply( eventPath.pop(), data ) === false ) && - acceptData( elem ) ) { - - // Call a native DOM method on the target with the same name as the event. - // Don't do default actions on window, that's where global variables be (#6170) - if ( ontype && isFunction( elem[ type ] ) && !isWindow( elem ) ) { - - // Don't re-trigger an onFOO event when we call its FOO() method - tmp = elem[ ontype ]; - - if ( tmp ) { - elem[ ontype ] = null; - } - - // Prevent re-triggering of the same event, since we already bubbled it above - jQuery.event.triggered = type; - - if ( event.isPropagationStopped() ) { - lastElement.addEventListener( type, stopPropagationCallback ); - } - - elem[ type ](); - - if ( event.isPropagationStopped() ) { - lastElement.removeEventListener( type, stopPropagationCallback ); - } - - jQuery.event.triggered = undefined; - - if ( tmp ) { - elem[ ontype ] = tmp; - } - } - } - } - - return event.result; - }, - - // Piggyback on a donor event to simulate a different one - // Used only for `focus(in | out)` events - simulate: function( type, elem, event ) { - var e = jQuery.extend( - new jQuery.Event(), - event, - { - type: type, - isSimulated: true - } - ); - - jQuery.event.trigger( e, null, elem ); - } - -} ); - -jQuery.fn.extend( { - - trigger: function( type, data ) { - return this.each( function() { - jQuery.event.trigger( type, data, this ); - } ); - }, - triggerHandler: function( type, data ) { - var elem = this[ 0 ]; - if ( elem ) { - return jQuery.event.trigger( type, data, elem, true ); - } - } -} ); - - -// Support: Firefox <=44 -// Firefox doesn't have focus(in | out) events -// Related ticket - https://bugzilla.mozilla.org/show_bug.cgi?id=687787 -// -// Support: Chrome <=48 - 49, Safari <=9.0 - 9.1 -// focus(in | out) events fire after focus & blur events, -// which is spec violation - http://www.w3.org/TR/DOM-Level-3-Events/#events-focusevent-event-order -// Related ticket - https://bugs.chromium.org/p/chromium/issues/detail?id=449857 -if ( !support.focusin ) { - jQuery.each( { focus: "focusin", blur: "focusout" }, function( orig, fix ) { - - // Attach a single capturing handler on the document while someone wants focusin/focusout - var handler = function( event ) { - jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ) ); - }; - - jQuery.event.special[ fix ] = { - setup: function() { - - // Handle: regular nodes (via `this.ownerDocument`), window - // (via `this.document`) & document (via `this`). - var doc = this.ownerDocument || this.document || this, - attaches = dataPriv.access( doc, fix ); - - if ( !attaches ) { - doc.addEventListener( orig, handler, true ); - } - dataPriv.access( doc, fix, ( attaches || 0 ) + 1 ); - }, - teardown: function() { - var doc = this.ownerDocument || this.document || this, - attaches = dataPriv.access( doc, fix ) - 1; - - if ( !attaches ) { - doc.removeEventListener( orig, handler, true ); - dataPriv.remove( doc, fix ); - - } else { - dataPriv.access( doc, fix, attaches ); - } - } - }; - } ); -} -var location = window.location; - -var nonce = { guid: Date.now() }; - -var rquery = ( /\?/ ); - - - -// Cross-browser xml parsing -jQuery.parseXML = function( data ) { - var xml, parserErrorElem; - if ( !data || typeof data !== "string" ) { - return null; - } - - // Support: IE 9 - 11 only - // IE throws on parseFromString with invalid input. - try { - xml = ( new window.DOMParser() ).parseFromString( data, "text/xml" ); - } catch ( e ) {} - - parserErrorElem = xml && xml.getElementsByTagName( "parsererror" )[ 0 ]; - if ( !xml || parserErrorElem ) { - jQuery.error( "Invalid XML: " + ( - parserErrorElem ? - jQuery.map( parserErrorElem.childNodes, function( el ) { - return el.textContent; - } ).join( "\n" ) : - data - ) ); - } - return xml; -}; - - -var - rbracket = /\[\]$/, - rCRLF = /\r?\n/g, - rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i, - rsubmittable = /^(?:input|select|textarea|keygen)/i; - -function buildParams( prefix, obj, traditional, add ) { - var name; - - if ( Array.isArray( obj ) ) { - - // Serialize array item. - jQuery.each( obj, function( i, v ) { - if ( traditional || rbracket.test( prefix ) ) { - - // Treat each array item as a scalar. - add( prefix, v ); - - } else { - - // Item is non-scalar (array or object), encode its numeric index. - buildParams( - prefix + "[" + ( typeof v === "object" && v != null ? i : "" ) + "]", - v, - traditional, - add - ); - } - } ); - - } else if ( !traditional && toType( obj ) === "object" ) { - - // Serialize object item. - for ( name in obj ) { - buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add ); - } - - } else { - - // Serialize scalar item. - add( prefix, obj ); - } -} - -// Serialize an array of form elements or a set of -// key/values into a query string -jQuery.param = function( a, traditional ) { - var prefix, - s = [], - add = function( key, valueOrFunction ) { - - // If value is a function, invoke it and use its return value - var value = isFunction( valueOrFunction ) ? - valueOrFunction() : - valueOrFunction; - - s[ s.length ] = encodeURIComponent( key ) + "=" + - encodeURIComponent( value == null ? "" : value ); - }; - - if ( a == null ) { - return ""; - } - - // If an array was passed in, assume that it is an array of form elements. - if ( Array.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) { - - // Serialize the form elements - jQuery.each( a, function() { - add( this.name, this.value ); - } ); - - } else { - - // If traditional, encode the "old" way (the way 1.3.2 or older - // did it), otherwise encode params recursively. - for ( prefix in a ) { - buildParams( prefix, a[ prefix ], traditional, add ); - } - } - - // Return the resulting serialization - return s.join( "&" ); -}; - -jQuery.fn.extend( { - serialize: function() { - return jQuery.param( this.serializeArray() ); - }, - serializeArray: function() { - return this.map( function() { - - // Can add propHook for "elements" to filter or add form elements - var elements = jQuery.prop( this, "elements" ); - return elements ? jQuery.makeArray( elements ) : this; - } ).filter( function() { - var type = this.type; - - // Use .is( ":disabled" ) so that fieldset[disabled] works - return this.name && !jQuery( this ).is( ":disabled" ) && - rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) && - ( this.checked || !rcheckableType.test( type ) ); - } ).map( function( _i, elem ) { - var val = jQuery( this ).val(); - - if ( val == null ) { - return null; - } - - if ( Array.isArray( val ) ) { - return jQuery.map( val, function( val ) { - return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; - } ); - } - - return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; - } ).get(); - } -} ); - - -var - r20 = /%20/g, - rhash = /#.*$/, - rantiCache = /([?&])_=[^&]*/, - rheaders = /^(.*?):[ \t]*([^\r\n]*)$/mg, - - // #7653, #8125, #8152: local protocol detection - rlocalProtocol = /^(?:about|app|app-storage|.+-extension|file|res|widget):$/, - rnoContent = /^(?:GET|HEAD)$/, - rprotocol = /^\/\//, - - /* Prefilters - * 1) They are useful to introduce custom dataTypes (see ajax/jsonp.js for an example) - * 2) These are called: - * - BEFORE asking for a transport - * - AFTER param serialization (s.data is a string if s.processData is true) - * 3) key is the dataType - * 4) the catchall symbol "*" can be used - * 5) execution will start with transport dataType and THEN continue down to "*" if needed - */ - prefilters = {}, - - /* Transports bindings - * 1) key is the dataType - * 2) the catchall symbol "*" can be used - * 3) selection will start with transport dataType and THEN go to "*" if needed - */ - transports = {}, - - // Avoid comment-prolog char sequence (#10098); must appease lint and evade compression - allTypes = "*/".concat( "*" ), - - // Anchor tag for parsing the document origin - originAnchor = document.createElement( "a" ); - -originAnchor.href = location.href; - -// Base "constructor" for jQuery.ajaxPrefilter and jQuery.ajaxTransport -function addToPrefiltersOrTransports( structure ) { - - // dataTypeExpression is optional and defaults to "*" - return function( dataTypeExpression, func ) { - - if ( typeof dataTypeExpression !== "string" ) { - func = dataTypeExpression; - dataTypeExpression = "*"; - } - - var dataType, - i = 0, - dataTypes = dataTypeExpression.toLowerCase().match( rnothtmlwhite ) || []; - - if ( isFunction( func ) ) { - - // For each dataType in the dataTypeExpression - while ( ( dataType = dataTypes[ i++ ] ) ) { - - // Prepend if requested - if ( dataType[ 0 ] === "+" ) { - dataType = dataType.slice( 1 ) || "*"; - ( structure[ dataType ] = structure[ dataType ] || [] ).unshift( func ); - - // Otherwise append - } else { - ( structure[ dataType ] = structure[ dataType ] || [] ).push( func ); - } - } - } - }; -} - -// Base inspection function for prefilters and transports -function inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR ) { - - var inspected = {}, - seekingTransport = ( structure === transports ); - - function inspect( dataType ) { - var selected; - inspected[ dataType ] = true; - jQuery.each( structure[ dataType ] || [], function( _, prefilterOrFactory ) { - var dataTypeOrTransport = prefilterOrFactory( options, originalOptions, jqXHR ); - if ( typeof dataTypeOrTransport === "string" && - !seekingTransport && !inspected[ dataTypeOrTransport ] ) { - - options.dataTypes.unshift( dataTypeOrTransport ); - inspect( dataTypeOrTransport ); - return false; - } else if ( seekingTransport ) { - return !( selected = dataTypeOrTransport ); - } - } ); - return selected; - } - - return inspect( options.dataTypes[ 0 ] ) || !inspected[ "*" ] && inspect( "*" ); -} - -// A special extend for ajax options -// that takes "flat" options (not to be deep extended) -// Fixes #9887 -function ajaxExtend( target, src ) { - var key, deep, - flatOptions = jQuery.ajaxSettings.flatOptions || {}; - - for ( key in src ) { - if ( src[ key ] !== undefined ) { - ( flatOptions[ key ] ? target : ( deep || ( deep = {} ) ) )[ key ] = src[ key ]; - } - } - if ( deep ) { - jQuery.extend( true, target, deep ); - } - - return target; -} - -/* Handles responses to an ajax request: - * - finds the right dataType (mediates between content-type and expected dataType) - * - returns the corresponding response - */ -function ajaxHandleResponses( s, jqXHR, responses ) { - - var ct, type, finalDataType, firstDataType, - contents = s.contents, - dataTypes = s.dataTypes; - - // Remove auto dataType and get content-type in the process - while ( dataTypes[ 0 ] === "*" ) { - dataTypes.shift(); - if ( ct === undefined ) { - ct = s.mimeType || jqXHR.getResponseHeader( "Content-Type" ); - } - } - - // Check if we're dealing with a known content-type - if ( ct ) { - for ( type in contents ) { - if ( contents[ type ] && contents[ type ].test( ct ) ) { - dataTypes.unshift( type ); - break; - } - } - } - - // Check to see if we have a response for the expected dataType - if ( dataTypes[ 0 ] in responses ) { - finalDataType = dataTypes[ 0 ]; - } else { - - // Try convertible dataTypes - for ( type in responses ) { - if ( !dataTypes[ 0 ] || s.converters[ type + " " + dataTypes[ 0 ] ] ) { - finalDataType = type; - break; - } - if ( !firstDataType ) { - firstDataType = type; - } - } - - // Or just use first one - finalDataType = finalDataType || firstDataType; - } - - // If we found a dataType - // We add the dataType to the list if needed - // and return the corresponding response - if ( finalDataType ) { - if ( finalDataType !== dataTypes[ 0 ] ) { - dataTypes.unshift( finalDataType ); - } - return responses[ finalDataType ]; - } -} - -/* Chain conversions given the request and the original response - * Also sets the responseXXX fields on the jqXHR instance - */ -function ajaxConvert( s, response, jqXHR, isSuccess ) { - var conv2, current, conv, tmp, prev, - converters = {}, - - // Work with a copy of dataTypes in case we need to modify it for conversion - dataTypes = s.dataTypes.slice(); - - // Create converters map with lowercased keys - if ( dataTypes[ 1 ] ) { - for ( conv in s.converters ) { - converters[ conv.toLowerCase() ] = s.converters[ conv ]; - } - } - - current = dataTypes.shift(); - - // Convert to each sequential dataType - while ( current ) { - - if ( s.responseFields[ current ] ) { - jqXHR[ s.responseFields[ current ] ] = response; - } - - // Apply the dataFilter if provided - if ( !prev && isSuccess && s.dataFilter ) { - response = s.dataFilter( response, s.dataType ); - } - - prev = current; - current = dataTypes.shift(); - - if ( current ) { - - // There's only work to do if current dataType is non-auto - if ( current === "*" ) { - - current = prev; - - // Convert response if prev dataType is non-auto and differs from current - } else if ( prev !== "*" && prev !== current ) { - - // Seek a direct converter - conv = converters[ prev + " " + current ] || converters[ "* " + current ]; - - // If none found, seek a pair - if ( !conv ) { - for ( conv2 in converters ) { - - // If conv2 outputs current - tmp = conv2.split( " " ); - if ( tmp[ 1 ] === current ) { - - // If prev can be converted to accepted input - conv = converters[ prev + " " + tmp[ 0 ] ] || - converters[ "* " + tmp[ 0 ] ]; - if ( conv ) { - - // Condense equivalence converters - if ( conv === true ) { - conv = converters[ conv2 ]; - - // Otherwise, insert the intermediate dataType - } else if ( converters[ conv2 ] !== true ) { - current = tmp[ 0 ]; - dataTypes.unshift( tmp[ 1 ] ); - } - break; - } - } - } - } - - // Apply converter (if not an equivalence) - if ( conv !== true ) { - - // Unless errors are allowed to bubble, catch and return them - if ( conv && s.throws ) { - response = conv( response ); - } else { - try { - response = conv( response ); - } catch ( e ) { - return { - state: "parsererror", - error: conv ? e : "No conversion from " + prev + " to " + current - }; - } - } - } - } - } - } - - return { state: "success", data: response }; -} - -jQuery.extend( { - - // Counter for holding the number of active queries - active: 0, - - // Last-Modified header cache for next request - lastModified: {}, - etag: {}, - - ajaxSettings: { - url: location.href, - type: "GET", - isLocal: rlocalProtocol.test( location.protocol ), - global: true, - processData: true, - async: true, - contentType: "application/x-www-form-urlencoded; charset=UTF-8", - - /* - timeout: 0, - data: null, - dataType: null, - username: null, - password: null, - cache: null, - throws: false, - traditional: false, - headers: {}, - */ - - accepts: { - "*": allTypes, - text: "text/plain", - html: "text/html", - xml: "application/xml, text/xml", - json: "application/json, text/javascript" - }, - - contents: { - xml: /\bxml\b/, - html: /\bhtml/, - json: /\bjson\b/ - }, - - responseFields: { - xml: "responseXML", - text: "responseText", - json: "responseJSON" - }, - - // Data converters - // Keys separate source (or catchall "*") and destination types with a single space - converters: { - - // Convert anything to text - "* text": String, - - // Text to html (true = no transformation) - "text html": true, - - // Evaluate text as a json expression - "text json": JSON.parse, - - // Parse text as xml - "text xml": jQuery.parseXML - }, - - // For options that shouldn't be deep extended: - // you can add your own custom options here if - // and when you create one that shouldn't be - // deep extended (see ajaxExtend) - flatOptions: { - url: true, - context: true - } - }, - - // Creates a full fledged settings object into target - // with both ajaxSettings and settings fields. - // If target is omitted, writes into ajaxSettings. - ajaxSetup: function( target, settings ) { - return settings ? - - // Building a settings object - ajaxExtend( ajaxExtend( target, jQuery.ajaxSettings ), settings ) : - - // Extending ajaxSettings - ajaxExtend( jQuery.ajaxSettings, target ); - }, - - ajaxPrefilter: addToPrefiltersOrTransports( prefilters ), - ajaxTransport: addToPrefiltersOrTransports( transports ), - - // Main method - ajax: function( url, options ) { - - // If url is an object, simulate pre-1.5 signature - if ( typeof url === "object" ) { - options = url; - url = undefined; - } - - // Force options to be an object - options = options || {}; - - var transport, - - // URL without anti-cache param - cacheURL, - - // Response headers - responseHeadersString, - responseHeaders, - - // timeout handle - timeoutTimer, - - // Url cleanup var - urlAnchor, - - // Request state (becomes false upon send and true upon completion) - completed, - - // To know if global events are to be dispatched - fireGlobals, - - // Loop variable - i, - - // uncached part of the url - uncached, - - // Create the final options object - s = jQuery.ajaxSetup( {}, options ), - - // Callbacks context - callbackContext = s.context || s, - - // Context for global events is callbackContext if it is a DOM node or jQuery collection - globalEventContext = s.context && - ( callbackContext.nodeType || callbackContext.jquery ) ? - jQuery( callbackContext ) : - jQuery.event, - - // Deferreds - deferred = jQuery.Deferred(), - completeDeferred = jQuery.Callbacks( "once memory" ), - - // Status-dependent callbacks - statusCode = s.statusCode || {}, - - // Headers (they are sent all at once) - requestHeaders = {}, - requestHeadersNames = {}, - - // Default abort message - strAbort = "canceled", - - // Fake xhr - jqXHR = { - readyState: 0, - - // Builds headers hashtable if needed - getResponseHeader: function( key ) { - var match; - if ( completed ) { - if ( !responseHeaders ) { - responseHeaders = {}; - while ( ( match = rheaders.exec( responseHeadersString ) ) ) { - responseHeaders[ match[ 1 ].toLowerCase() + " " ] = - ( responseHeaders[ match[ 1 ].toLowerCase() + " " ] || [] ) - .concat( match[ 2 ] ); - } - } - match = responseHeaders[ key.toLowerCase() + " " ]; - } - return match == null ? null : match.join( ", " ); - }, - - // Raw string - getAllResponseHeaders: function() { - return completed ? responseHeadersString : null; - }, - - // Caches the header - setRequestHeader: function( name, value ) { - if ( completed == null ) { - name = requestHeadersNames[ name.toLowerCase() ] = - requestHeadersNames[ name.toLowerCase() ] || name; - requestHeaders[ name ] = value; - } - return this; - }, - - // Overrides response content-type header - overrideMimeType: function( type ) { - if ( completed == null ) { - s.mimeType = type; - } - return this; - }, - - // Status-dependent callbacks - statusCode: function( map ) { - var code; - if ( map ) { - if ( completed ) { - - // Execute the appropriate callbacks - jqXHR.always( map[ jqXHR.status ] ); - } else { - - // Lazy-add the new callbacks in a way that preserves old ones - for ( code in map ) { - statusCode[ code ] = [ statusCode[ code ], map[ code ] ]; - } - } - } - return this; - }, - - // Cancel the request - abort: function( statusText ) { - var finalText = statusText || strAbort; - if ( transport ) { - transport.abort( finalText ); - } - done( 0, finalText ); - return this; - } - }; - - // Attach deferreds - deferred.promise( jqXHR ); - - // Add protocol if not provided (prefilters might expect it) - // Handle falsy url in the settings object (#10093: consistency with old signature) - // We also use the url parameter if available - s.url = ( ( url || s.url || location.href ) + "" ) - .replace( rprotocol, location.protocol + "//" ); - - // Alias method option to type as per ticket #12004 - s.type = options.method || options.type || s.method || s.type; - - // Extract dataTypes list - s.dataTypes = ( s.dataType || "*" ).toLowerCase().match( rnothtmlwhite ) || [ "" ]; - - // A cross-domain request is in order when the origin doesn't match the current origin. - if ( s.crossDomain == null ) { - urlAnchor = document.createElement( "a" ); - - // Support: IE <=8 - 11, Edge 12 - 15 - // IE throws exception on accessing the href property if url is malformed, - // e.g. http://example.com:80x/ - try { - urlAnchor.href = s.url; - - // Support: IE <=8 - 11 only - // Anchor's host property isn't correctly set when s.url is relative - urlAnchor.href = urlAnchor.href; - s.crossDomain = originAnchor.protocol + "//" + originAnchor.host !== - urlAnchor.protocol + "//" + urlAnchor.host; - } catch ( e ) { - - // If there is an error parsing the URL, assume it is crossDomain, - // it can be rejected by the transport if it is invalid - s.crossDomain = true; - } - } - - // Convert data if not already a string - if ( s.data && s.processData && typeof s.data !== "string" ) { - s.data = jQuery.param( s.data, s.traditional ); - } - - // Apply prefilters - inspectPrefiltersOrTransports( prefilters, s, options, jqXHR ); - - // If request was aborted inside a prefilter, stop there - if ( completed ) { - return jqXHR; - } - - // We can fire global events as of now if asked to - // Don't fire events if jQuery.event is undefined in an AMD-usage scenario (#15118) - fireGlobals = jQuery.event && s.global; - - // Watch for a new set of requests - if ( fireGlobals && jQuery.active++ === 0 ) { - jQuery.event.trigger( "ajaxStart" ); - } - - // Uppercase the type - s.type = s.type.toUpperCase(); - - // Determine if request has content - s.hasContent = !rnoContent.test( s.type ); - - // Save the URL in case we're toying with the If-Modified-Since - // and/or If-None-Match header later on - // Remove hash to simplify url manipulation - cacheURL = s.url.replace( rhash, "" ); - - // More options handling for requests with no content - if ( !s.hasContent ) { - - // Remember the hash so we can put it back - uncached = s.url.slice( cacheURL.length ); - - // If data is available and should be processed, append data to url - if ( s.data && ( s.processData || typeof s.data === "string" ) ) { - cacheURL += ( rquery.test( cacheURL ) ? "&" : "?" ) + s.data; - - // #9682: remove data so that it's not used in an eventual retry - delete s.data; - } - - // Add or update anti-cache param if needed - if ( s.cache === false ) { - cacheURL = cacheURL.replace( rantiCache, "$1" ); - uncached = ( rquery.test( cacheURL ) ? "&" : "?" ) + "_=" + ( nonce.guid++ ) + - uncached; - } - - // Put hash and anti-cache on the URL that will be requested (gh-1732) - s.url = cacheURL + uncached; - - // Change '%20' to '+' if this is encoded form body content (gh-2658) - } else if ( s.data && s.processData && - ( s.contentType || "" ).indexOf( "application/x-www-form-urlencoded" ) === 0 ) { - s.data = s.data.replace( r20, "+" ); - } - - // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. - if ( s.ifModified ) { - if ( jQuery.lastModified[ cacheURL ] ) { - jqXHR.setRequestHeader( "If-Modified-Since", jQuery.lastModified[ cacheURL ] ); - } - if ( jQuery.etag[ cacheURL ] ) { - jqXHR.setRequestHeader( "If-None-Match", jQuery.etag[ cacheURL ] ); - } - } - - // Set the correct header, if data is being sent - if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) { - jqXHR.setRequestHeader( "Content-Type", s.contentType ); - } - - // Set the Accepts header for the server, depending on the dataType - jqXHR.setRequestHeader( - "Accept", - s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[ 0 ] ] ? - s.accepts[ s.dataTypes[ 0 ] ] + - ( s.dataTypes[ 0 ] !== "*" ? ", " + allTypes + "; q=0.01" : "" ) : - s.accepts[ "*" ] - ); - - // Check for headers option - for ( i in s.headers ) { - jqXHR.setRequestHeader( i, s.headers[ i ] ); - } - - // Allow custom headers/mimetypes and early abort - if ( s.beforeSend && - ( s.beforeSend.call( callbackContext, jqXHR, s ) === false || completed ) ) { - - // Abort if not done already and return - return jqXHR.abort(); - } - - // Aborting is no longer a cancellation - strAbort = "abort"; - - // Install callbacks on deferreds - completeDeferred.add( s.complete ); - jqXHR.done( s.success ); - jqXHR.fail( s.error ); - - // Get transport - transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR ); - - // If no transport, we auto-abort - if ( !transport ) { - done( -1, "No Transport" ); - } else { - jqXHR.readyState = 1; - - // Send global event - if ( fireGlobals ) { - globalEventContext.trigger( "ajaxSend", [ jqXHR, s ] ); - } - - // If request was aborted inside ajaxSend, stop there - if ( completed ) { - return jqXHR; - } - - // Timeout - if ( s.async && s.timeout > 0 ) { - timeoutTimer = window.setTimeout( function() { - jqXHR.abort( "timeout" ); - }, s.timeout ); - } - - try { - completed = false; - transport.send( requestHeaders, done ); - } catch ( e ) { - - // Rethrow post-completion exceptions - if ( completed ) { - throw e; - } - - // Propagate others as results - done( -1, e ); - } - } - - // Callback for when everything is done - function done( status, nativeStatusText, responses, headers ) { - var isSuccess, success, error, response, modified, - statusText = nativeStatusText; - - // Ignore repeat invocations - if ( completed ) { - return; - } - - completed = true; - - // Clear timeout if it exists - if ( timeoutTimer ) { - window.clearTimeout( timeoutTimer ); - } - - // Dereference transport for early garbage collection - // (no matter how long the jqXHR object will be used) - transport = undefined; - - // Cache response headers - responseHeadersString = headers || ""; - - // Set readyState - jqXHR.readyState = status > 0 ? 4 : 0; - - // Determine if successful - isSuccess = status >= 200 && status < 300 || status === 304; - - // Get response data - if ( responses ) { - response = ajaxHandleResponses( s, jqXHR, responses ); - } - - // Use a noop converter for missing script but not if jsonp - if ( !isSuccess && - jQuery.inArray( "script", s.dataTypes ) > -1 && - jQuery.inArray( "json", s.dataTypes ) < 0 ) { - s.converters[ "text script" ] = function() {}; - } - - // Convert no matter what (that way responseXXX fields are always set) - response = ajaxConvert( s, response, jqXHR, isSuccess ); - - // If successful, handle type chaining - if ( isSuccess ) { - - // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. - if ( s.ifModified ) { - modified = jqXHR.getResponseHeader( "Last-Modified" ); - if ( modified ) { - jQuery.lastModified[ cacheURL ] = modified; - } - modified = jqXHR.getResponseHeader( "etag" ); - if ( modified ) { - jQuery.etag[ cacheURL ] = modified; - } - } - - // if no content - if ( status === 204 || s.type === "HEAD" ) { - statusText = "nocontent"; - - // if not modified - } else if ( status === 304 ) { - statusText = "notmodified"; - - // If we have data, let's convert it - } else { - statusText = response.state; - success = response.data; - error = response.error; - isSuccess = !error; - } - } else { - - // Extract error from statusText and normalize for non-aborts - error = statusText; - if ( status || !statusText ) { - statusText = "error"; - if ( status < 0 ) { - status = 0; - } - } - } - - // Set data for the fake xhr object - jqXHR.status = status; - jqXHR.statusText = ( nativeStatusText || statusText ) + ""; - - // Success/Error - if ( isSuccess ) { - deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] ); - } else { - deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] ); - } - - // Status-dependent callbacks - jqXHR.statusCode( statusCode ); - statusCode = undefined; - - if ( fireGlobals ) { - globalEventContext.trigger( isSuccess ? "ajaxSuccess" : "ajaxError", - [ jqXHR, s, isSuccess ? success : error ] ); - } - - // Complete - completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] ); - - if ( fireGlobals ) { - globalEventContext.trigger( "ajaxComplete", [ jqXHR, s ] ); - - // Handle the global AJAX counter - if ( !( --jQuery.active ) ) { - jQuery.event.trigger( "ajaxStop" ); - } - } - } - - return jqXHR; - }, - - getJSON: function( url, data, callback ) { - return jQuery.get( url, data, callback, "json" ); - }, - - getScript: function( url, callback ) { - return jQuery.get( url, undefined, callback, "script" ); - } -} ); - -jQuery.each( [ "get", "post" ], function( _i, method ) { - jQuery[ method ] = function( url, data, callback, type ) { - - // Shift arguments if data argument was omitted - if ( isFunction( data ) ) { - type = type || callback; - callback = data; - data = undefined; - } - - // The url can be an options object (which then must have .url) - return jQuery.ajax( jQuery.extend( { - url: url, - type: method, - dataType: type, - data: data, - success: callback - }, jQuery.isPlainObject( url ) && url ) ); - }; -} ); - -jQuery.ajaxPrefilter( function( s ) { - var i; - for ( i in s.headers ) { - if ( i.toLowerCase() === "content-type" ) { - s.contentType = s.headers[ i ] || ""; - } - } -} ); - - -jQuery._evalUrl = function( url, options, doc ) { - return jQuery.ajax( { - url: url, - - // Make this explicit, since user can override this through ajaxSetup (#11264) - type: "GET", - dataType: "script", - cache: true, - async: false, - global: false, - - // Only evaluate the response if it is successful (gh-4126) - // dataFilter is not invoked for failure responses, so using it instead - // of the default converter is kludgy but it works. - converters: { - "text script": function() {} - }, - dataFilter: function( response ) { - jQuery.globalEval( response, options, doc ); - } - } ); -}; - - -jQuery.fn.extend( { - wrapAll: function( html ) { - var wrap; - - if ( this[ 0 ] ) { - if ( isFunction( html ) ) { - html = html.call( this[ 0 ] ); - } - - // The elements to wrap the target around - wrap = jQuery( html, this[ 0 ].ownerDocument ).eq( 0 ).clone( true ); - - if ( this[ 0 ].parentNode ) { - wrap.insertBefore( this[ 0 ] ); - } - - wrap.map( function() { - var elem = this; - - while ( elem.firstElementChild ) { - elem = elem.firstElementChild; - } - - return elem; - } ).append( this ); - } - - return this; - }, - - wrapInner: function( html ) { - if ( isFunction( html ) ) { - return this.each( function( i ) { - jQuery( this ).wrapInner( html.call( this, i ) ); - } ); - } - - return this.each( function() { - var self = jQuery( this ), - contents = self.contents(); - - if ( contents.length ) { - contents.wrapAll( html ); - - } else { - self.append( html ); - } - } ); - }, - - wrap: function( html ) { - var htmlIsFunction = isFunction( html ); - - return this.each( function( i ) { - jQuery( this ).wrapAll( htmlIsFunction ? html.call( this, i ) : html ); - } ); - }, - - unwrap: function( selector ) { - this.parent( selector ).not( "body" ).each( function() { - jQuery( this ).replaceWith( this.childNodes ); - } ); - return this; - } -} ); - - -jQuery.expr.pseudos.hidden = function( elem ) { - return !jQuery.expr.pseudos.visible( elem ); -}; -jQuery.expr.pseudos.visible = function( elem ) { - return !!( elem.offsetWidth || elem.offsetHeight || elem.getClientRects().length ); -}; - - - - -jQuery.ajaxSettings.xhr = function() { - try { - return new window.XMLHttpRequest(); - } catch ( e ) {} -}; - -var xhrSuccessStatus = { - - // File protocol always yields status code 0, assume 200 - 0: 200, - - // Support: IE <=9 only - // #1450: sometimes IE returns 1223 when it should be 204 - 1223: 204 - }, - xhrSupported = jQuery.ajaxSettings.xhr(); - -support.cors = !!xhrSupported && ( "withCredentials" in xhrSupported ); -support.ajax = xhrSupported = !!xhrSupported; - -jQuery.ajaxTransport( function( options ) { - var callback, errorCallback; - - // Cross domain only allowed if supported through XMLHttpRequest - if ( support.cors || xhrSupported && !options.crossDomain ) { - return { - send: function( headers, complete ) { - var i, - xhr = options.xhr(); - - xhr.open( - options.type, - options.url, - options.async, - options.username, - options.password - ); - - // Apply custom fields if provided - if ( options.xhrFields ) { - for ( i in options.xhrFields ) { - xhr[ i ] = options.xhrFields[ i ]; - } - } - - // Override mime type if needed - if ( options.mimeType && xhr.overrideMimeType ) { - xhr.overrideMimeType( options.mimeType ); - } - - // X-Requested-With header - // For cross-domain requests, seeing as conditions for a preflight are - // akin to a jigsaw puzzle, we simply never set it to be sure. - // (it can always be set on a per-request basis or even using ajaxSetup) - // For same-domain requests, won't change header if already provided. - if ( !options.crossDomain && !headers[ "X-Requested-With" ] ) { - headers[ "X-Requested-With" ] = "XMLHttpRequest"; - } - - // Set headers - for ( i in headers ) { - xhr.setRequestHeader( i, headers[ i ] ); - } - - // Callback - callback = function( type ) { - return function() { - if ( callback ) { - callback = errorCallback = xhr.onload = - xhr.onerror = xhr.onabort = xhr.ontimeout = - xhr.onreadystatechange = null; - - if ( type === "abort" ) { - xhr.abort(); - } else if ( type === "error" ) { - - // Support: IE <=9 only - // On a manual native abort, IE9 throws - // errors on any property access that is not readyState - if ( typeof xhr.status !== "number" ) { - complete( 0, "error" ); - } else { - complete( - - // File: protocol always yields status 0; see #8605, #14207 - xhr.status, - xhr.statusText - ); - } - } else { - complete( - xhrSuccessStatus[ xhr.status ] || xhr.status, - xhr.statusText, - - // Support: IE <=9 only - // IE9 has no XHR2 but throws on binary (trac-11426) - // For XHR2 non-text, let the caller handle it (gh-2498) - ( xhr.responseType || "text" ) !== "text" || - typeof xhr.responseText !== "string" ? - { binary: xhr.response } : - { text: xhr.responseText }, - xhr.getAllResponseHeaders() - ); - } - } - }; - }; - - // Listen to events - xhr.onload = callback(); - errorCallback = xhr.onerror = xhr.ontimeout = callback( "error" ); - - // Support: IE 9 only - // Use onreadystatechange to replace onabort - // to handle uncaught aborts - if ( xhr.onabort !== undefined ) { - xhr.onabort = errorCallback; - } else { - xhr.onreadystatechange = function() { - - // Check readyState before timeout as it changes - if ( xhr.readyState === 4 ) { - - // Allow onerror to be called first, - // but that will not handle a native abort - // Also, save errorCallback to a variable - // as xhr.onerror cannot be accessed - window.setTimeout( function() { - if ( callback ) { - errorCallback(); - } - } ); - } - }; - } - - // Create the abort callback - callback = callback( "abort" ); - - try { - - // Do send the request (this may raise an exception) - xhr.send( options.hasContent && options.data || null ); - } catch ( e ) { - - // #14683: Only rethrow if this hasn't been notified as an error yet - if ( callback ) { - throw e; - } - } - }, - - abort: function() { - if ( callback ) { - callback(); - } - } - }; - } -} ); - - - - -// Prevent auto-execution of scripts when no explicit dataType was provided (See gh-2432) -jQuery.ajaxPrefilter( function( s ) { - if ( s.crossDomain ) { - s.contents.script = false; - } -} ); - -// Install script dataType -jQuery.ajaxSetup( { - accepts: { - script: "text/javascript, application/javascript, " + - "application/ecmascript, application/x-ecmascript" - }, - contents: { - script: /\b(?:java|ecma)script\b/ - }, - converters: { - "text script": function( text ) { - jQuery.globalEval( text ); - return text; - } - } -} ); - -// Handle cache's special case and crossDomain -jQuery.ajaxPrefilter( "script", function( s ) { - if ( s.cache === undefined ) { - s.cache = false; - } - if ( s.crossDomain ) { - s.type = "GET"; - } -} ); - -// Bind script tag hack transport -jQuery.ajaxTransport( "script", function( s ) { - - // This transport only deals with cross domain or forced-by-attrs requests - if ( s.crossDomain || s.scriptAttrs ) { - var script, callback; - return { - send: function( _, complete ) { - script = jQuery( " - <% end %> - - - - - - NAV - <%= image_tag('navbar.png') %> - - -
      - <%= image_tag "logo.png", class: 'logo' %> - <% if language_tabs.any? %> -
      - <% language_tabs.each do |lang| %> - <% if lang.is_a? Hash %> - <%= lang.values.first %> - <% else %> - <%= lang %> - <% end %> - <% end %> -
      - <% end %> - <% if current_page.data.search %> - -
        - <% end %> -
          - <% toc_data(page_content).each do |h1| %> -
        • - <%= h1[:content] %> - <% if h1[:children].length > 0 %> - - <% end %> -
        • - <% end %> -
        - <% if current_page.data.toc_footers %> - - <% end %> -
        -
        -
        -
        - <%= page_content %> -
        -
        - <% if language_tabs.any? %> -
        - <% language_tabs.each do |lang| %> - <% if lang.is_a? Hash %> - <%= lang.values.first %> - <% else %> - <%= lang %> - <% end %> - <% end %> -
        - <% end %> -
        -
        - - diff --git a/docs/api/source/stylesheets/_icon-font.scss b/docs/api/source/stylesheets/_icon-font.scss deleted file mode 100644 index b599483..0000000 --- a/docs/api/source/stylesheets/_icon-font.scss +++ /dev/null @@ -1,38 +0,0 @@ -@font-face { - font-family: 'slate'; - src:font-url('slate.eot?-syv14m'); - src:font-url('slate.eot?#iefix-syv14m') format('embedded-opentype'), - font-url('slate.woff2?-syv14m') format('woff2'), - font-url('slate.woff?-syv14m') format('woff'), - font-url('slate.ttf?-syv14m') format('truetype'), - font-url('slate.svg?-syv14m#slate') format('svg'); - font-weight: normal; - font-style: normal; -} - -%icon { - font-family: 'slate'; - speak: none; - font-style: normal; - font-weight: normal; - font-variant: normal; - text-transform: none; - line-height: 1; -} - -%icon-exclamation-sign { - @extend %icon; - content: "\e600"; -} -%icon-info-sign { - @extend %icon; - content: "\e602"; -} -%icon-ok-sign { - @extend %icon; - content: "\e606"; -} -%icon-search { - @extend %icon; - content: "\e607"; -} diff --git a/docs/api/source/stylesheets/_normalize.scss b/docs/api/source/stylesheets/_normalize.scss deleted file mode 100644 index 46f646a..0000000 --- a/docs/api/source/stylesheets/_normalize.scss +++ /dev/null @@ -1,427 +0,0 @@ -/*! normalize.css v3.0.2 | MIT License | git.io/normalize */ - -/** - * 1. Set default font family to sans-serif. - * 2. Prevent iOS text size adjust after orientation change, without disabling - * user zoom. - */ - -html { - font-family: sans-serif; /* 1 */ - -ms-text-size-adjust: 100%; /* 2 */ - -webkit-text-size-adjust: 100%; /* 2 */ -} - -/** - * Remove default margin. - */ - -body { - margin: 0; -} - -/* HTML5 display definitions - ========================================================================== */ - -/** - * Correct `block` display not defined for any HTML5 element in IE 8/9. - * Correct `block` display not defined for `details` or `summary` in IE 10/11 - * and Firefox. - * Correct `block` display not defined for `main` in IE 11. - */ - -article, -aside, -details, -figcaption, -figure, -footer, -header, -hgroup, -main, -menu, -nav, -section, -summary { - display: block; -} - -/** - * 1. Correct `inline-block` display not defined in IE 8/9. - * 2. Normalize vertical alignment of `progress` in Chrome, Firefox, and Opera. - */ - -audio, -canvas, -progress, -video { - display: inline-block; /* 1 */ - vertical-align: baseline; /* 2 */ -} - -/** - * Prevent modern browsers from displaying `audio` without controls. - * Remove excess height in iOS 5 devices. - */ - -audio:not([controls]) { - display: none; - height: 0; -} - -/** - * Address `[hidden]` styling not present in IE 8/9/10. - * Hide the `template` element in IE 8/9/11, Safari, and Firefox < 22. - */ - -[hidden], -template { - display: none; -} - -/* Links - ========================================================================== */ - -/** - * Remove the gray background color from active links in IE 10. - */ - -a { - background-color: transparent; -} - -/** - * Improve readability when focused and also mouse hovered in all browsers. - */ - -a:active, -a:hover { - outline: 0; -} - -/* Text-level semantics - ========================================================================== */ - -/** - * Address styling not present in IE 8/9/10/11, Safari, and Chrome. - */ - -abbr[title] { - border-bottom: 1px dotted; -} - -/** - * Address style set to `bolder` in Firefox 4+, Safari, and Chrome. - */ - -b, -strong { - font-weight: bold; -} - -/** - * Address styling not present in Safari and Chrome. - */ - -dfn { - font-style: italic; -} - -/** - * Address variable `h1` font-size and margin within `section` and `article` - * contexts in Firefox 4+, Safari, and Chrome. - */ - -h1 { - font-size: 2em; - margin: 0.67em 0; -} - -/** - * Address styling not present in IE 8/9. - */ - -mark { - background: #ff0; - color: #000; -} - -/** - * Address inconsistent and variable font size in all browsers. - */ - -small { - font-size: 80%; -} - -/** - * Prevent `sub` and `sup` affecting `line-height` in all browsers. - */ - -sub, -sup { - font-size: 75%; - line-height: 0; - position: relative; - vertical-align: baseline; -} - -sup { - top: -0.5em; -} - -sub { - bottom: -0.25em; -} - -/* Embedded content - ========================================================================== */ - -/** - * Remove border when inside `a` element in IE 8/9/10. - */ - -img { - border: 0; -} - -/** - * Correct overflow not hidden in IE 9/10/11. - */ - -svg:not(:root) { - overflow: hidden; -} - -/* Grouping content - ========================================================================== */ - -/** - * Address margin not present in IE 8/9 and Safari. - */ - -figure { - margin: 1em 40px; -} - -/** - * Address differences between Firefox and other browsers. - */ - -hr { - -moz-box-sizing: content-box; - box-sizing: content-box; - height: 0; -} - -/** - * Contain overflow in all browsers. - */ - -pre { - overflow: auto; -} - -/** - * Address odd `em`-unit font size rendering in all browsers. - */ - -code, -kbd, -pre, -samp { - font-family: monospace, monospace; - font-size: 1em; -} - -/* Forms - ========================================================================== */ - -/** - * Known limitation: by default, Chrome and Safari on OS X allow very limited - * styling of `select`, unless a `border` property is set. - */ - -/** - * 1. Correct color not being inherited. - * Known issue: affects color of disabled elements. - * 2. Correct font properties not being inherited. - * 3. Address margins set differently in Firefox 4+, Safari, and Chrome. - */ - -button, -input, -optgroup, -select, -textarea { - color: inherit; /* 1 */ - font: inherit; /* 2 */ - margin: 0; /* 3 */ -} - -/** - * Address `overflow` set to `hidden` in IE 8/9/10/11. - */ - -button { - overflow: visible; -} - -/** - * Address inconsistent `text-transform` inheritance for `button` and `select`. - * All other form control elements do not inherit `text-transform` values. - * Correct `button` style inheritance in Firefox, IE 8/9/10/11, and Opera. - * Correct `select` style inheritance in Firefox. - */ - -button, -select { - text-transform: none; -} - -/** - * 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio` - * and `video` controls. - * 2. Correct inability to style clickable `input` types in iOS. - * 3. Improve usability and consistency of cursor style between image-type - * `input` and others. - */ - -button, -html input[type="button"], /* 1 */ -input[type="reset"], -input[type="submit"] { - -webkit-appearance: button; /* 2 */ - cursor: pointer; /* 3 */ -} - -/** - * Re-set default cursor for disabled elements. - */ - -button[disabled], -html input[disabled] { - cursor: default; -} - -/** - * Remove inner padding and border in Firefox 4+. - */ - -button::-moz-focus-inner, -input::-moz-focus-inner { - border: 0; - padding: 0; -} - -/** - * Address Firefox 4+ setting `line-height` on `input` using `!important` in - * the UA stylesheet. - */ - -input { - line-height: normal; -} - -/** - * It's recommended that you don't attempt to style these elements. - * Firefox's implementation doesn't respect box-sizing, padding, or width. - * - * 1. Address box sizing set to `content-box` in IE 8/9/10. - * 2. Remove excess padding in IE 8/9/10. - */ - -input[type="checkbox"], -input[type="radio"] { - box-sizing: border-box; /* 1 */ - padding: 0; /* 2 */ -} - -/** - * Fix the cursor style for Chrome's increment/decrement buttons. For certain - * `font-size` values of the `input`, it causes the cursor style of the - * decrement button to change from `default` to `text`. - */ - -input[type="number"]::-webkit-inner-spin-button, -input[type="number"]::-webkit-outer-spin-button { - height: auto; -} - -/** - * 1. Address `appearance` set to `searchfield` in Safari and Chrome. - * 2. Address `box-sizing` set to `border-box` in Safari and Chrome - * (include `-moz` to future-proof). - */ - -input[type="search"] { - -webkit-appearance: textfield; /* 1 */ - -moz-box-sizing: content-box; - -webkit-box-sizing: content-box; /* 2 */ - box-sizing: content-box; -} - -/** - * Remove inner padding and search cancel button in Safari and Chrome on OS X. - * Safari (but not Chrome) clips the cancel button when the search input has - * padding (and `textfield` appearance). - */ - -input[type="search"]::-webkit-search-cancel-button, -input[type="search"]::-webkit-search-decoration { - -webkit-appearance: none; -} - -/** - * Define consistent border, margin, and padding. - */ - -fieldset { - border: 1px solid #c0c0c0; - margin: 0 2px; - padding: 0.35em 0.625em 0.75em; -} - -/** - * 1. Correct `color` not being inherited in IE 8/9/10/11. - * 2. Remove padding so people aren't caught out if they zero out fieldsets. - */ - -legend { - border: 0; /* 1 */ - padding: 0; /* 2 */ -} - -/** - * Remove default vertical scrollbar in IE 8/9/10/11. - */ - -textarea { - overflow: auto; -} - -/** - * Don't inherit the `font-weight` (applied by a rule above). - * NOTE: the default cannot safely be changed in Chrome and Safari on OS X. - */ - -optgroup { - font-weight: bold; -} - -/* Tables - ========================================================================== */ - -/** - * Remove most spacing between table cells. - */ - -table { - border-collapse: collapse; - border-spacing: 0; -} - -td, -th { - padding: 0; -} diff --git a/docs/api/source/stylesheets/_rtl.scss b/docs/api/source/stylesheets/_rtl.scss deleted file mode 100644 index 720719a..0000000 --- a/docs/api/source/stylesheets/_rtl.scss +++ /dev/null @@ -1,140 +0,0 @@ -//////////////////////////////////////////////////////////////////////////////// -// RTL Styles Variables -//////////////////////////////////////////////////////////////////////////////// - -$default: auto; - -//////////////////////////////////////////////////////////////////////////////// -// TABLE OF CONTENTS -//////////////////////////////////////////////////////////////////////////////// - -#toc>ul>li>a>span { - float: left; -} - -.toc-wrapper { - transition: right 0.3s ease-in-out !important; - left: $default !important; - #{right}: 0; -} - -.toc-h2 { - padding-#{right}: $nav-padding + $nav-indent; -} - -#nav-button { - #{right}: 0; - transition: right 0.3s ease-in-out; - &.open { - right: $nav-width - } -} - -//////////////////////////////////////////////////////////////////////////////// -// PAGE LAYOUT AND CODE SAMPLE BACKGROUND -//////////////////////////////////////////////////////////////////////////////// -.page-wrapper { - margin-#{left}: $default !important; - margin-#{right}: $nav-width; - .dark-box { - #{right}: $default; - #{left}: 0; - } -} - -.lang-selector { - width: $default !important; - a { - float: right; - } -} - -//////////////////////////////////////////////////////////////////////////////// -// CODE SAMPLE STYLES -//////////////////////////////////////////////////////////////////////////////// -.content { - &>h1, - &>h2, - &>h3, - &>h4, - &>h5, - &>h6, - &>p, - &>table, - &>ul, - &>ol, - &>aside, - &>dl { - margin-#{left}: $examples-width; - margin-#{right}: $default !important; - } - &>ul, - &>ol { - padding-#{right}: $main-padding + 15px; - } - table { - th, - td { - text-align: right; - } - } - dd { - margin-#{right}: 15px; - } - aside { - aside:before { - padding-#{left}: 0.5em; - } - .search-highlight { - background: linear-gradient(to top right, #F7E633 0%, #F1D32F 100%); - } - } - pre, - blockquote { - float: left !important; - clear: left !important; - } -} - -//////////////////////////////////////////////////////////////////////////////// -// TYPOGRAPHY -//////////////////////////////////////////////////////////////////////////////// -h1, -h2, -h3, -h4, -h5, -h6, -p, -aside { - text-align: right; - direction: rtl; -} - -.toc-wrapper { - text-align: right; - direction: rtl; - font-weight: 100 !important; -} - - -//////////////////////////////////////////////////////////////////////////////// -// RESPONSIVE DESIGN -//////////////////////////////////////////////////////////////////////////////// -@media (max-width: $tablet-width) { - .toc-wrapper { - #{right}: -$nav-width; - &.open { - #{right}: 0; - } - } - .page-wrapper { - margin-#{right}: 0; - } -} - -@media (max-width: $phone-width) { - %left-col { - margin-#{left}: 0; - } -} diff --git a/docs/api/source/stylesheets/_variables.scss b/docs/api/source/stylesheets/_variables.scss deleted file mode 100644 index 7693261..0000000 --- a/docs/api/source/stylesheets/_variables.scss +++ /dev/null @@ -1,103 +0,0 @@ -/* -Copyright 2008-2013 Concur Technologies, Inc. - -Licensed under the Apache License, Version 2.0 (the "License"); you may -not use this file except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -License for the specific language governing permissions and limitations -under the License. -*/ - - -//////////////////////////////////////////////////////////////////////////////// -// CUSTOMIZE SLATE -//////////////////////////////////////////////////////////////////////////////// -// Use these settings to help adjust the appearance of Slate - - -// BACKGROUND COLORS -//////////////////// -$nav-bg: #2E3336 !default; -$examples-bg: #2E3336 !default; -$code-bg: #1E2224 !default; -$code-annotation-bg: #191D1F !default; -$nav-subitem-bg: #1E2224 !default; -$nav-active-bg: #0F75D4 !default; -$nav-active-parent-bg: #1E2224 !default; // parent links of the current section -$lang-select-border: #000 !default; -$lang-select-bg: #1E2224 !default; -$lang-select-active-bg: $examples-bg !default; // feel free to change this to blue or something -$lang-select-pressed-bg: #111 !default; // color of language tab bg when mouse is pressed -$main-bg: #F3F7F9 !default; -$aside-notice-bg: #8fbcd4 !default; -$aside-warning-bg: #c97a7e !default; -$aside-success-bg: #6ac174 !default; -$search-notice-bg: #c97a7e !default; - - -// TEXT COLORS -//////////////////// -$main-text: #333 !default; // main content text color -$nav-text: #fff !default; -$nav-active-text: #fff !default; -$nav-active-parent-text: #fff !default; // parent links of the current section -$lang-select-text: #fff !default; // color of unselected language tab text -$lang-select-active-text: #fff !default; // color of selected language tab text -$lang-select-pressed-text: #fff !default; // color of language tab text when mouse is pressed - - -// SIZES -//////////////////// -$nav-width: 230px !default; // width of the navbar -$examples-width: 50% !default; // portion of the screen taken up by code examples -$logo-margin: 0px !default; // margin below logo -$main-padding: 28px !default; // padding to left and right of content & examples -$nav-padding: 15px !default; // padding to left and right of navbar -$nav-v-padding: 10px !default; // padding used vertically around search boxes and results -$nav-indent: 10px !default; // extra padding for ToC subitems -$code-annotation-padding: 13px !default; // padding inside code annotations -$h1-margin-bottom: 21px !default; // padding under the largest header tags -$tablet-width: 930px !default; // min width before reverting to tablet size -$phone-width: $tablet-width - $nav-width !default; // min width before reverting to mobile size - - -// FONTS -//////////////////// -%default-font { - font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; - font-size: 14px; -} - -%header-font { - @extend %default-font; - font-weight: bold; -} - -%code-font { - font-family: Consolas, Menlo, Monaco, "Lucida Console", "Liberation Mono", "DejaVu Sans Mono", "Bitstream Vera Sans Mono", "Courier New", monospace, serif; - font-size: 12px; - line-height: 1.5; -} - - -// OTHER -//////////////////// -$nav-footer-border-color: #666 !default; -$search-box-border-color: #666 !default; - - -//////////////////////////////////////////////////////////////////////////////// -// INTERNAL -//////////////////////////////////////////////////////////////////////////////// -// These settings are probably best left alone. - -%break-words { - word-break: break-all; - hyphens: auto; -} diff --git a/docs/api/source/stylesheets/print.css.scss b/docs/api/source/stylesheets/print.css.scss deleted file mode 100644 index aea88c3..0000000 --- a/docs/api/source/stylesheets/print.css.scss +++ /dev/null @@ -1,153 +0,0 @@ -@charset "utf-8"; -@import 'normalize'; -@import 'variables'; -@import 'icon-font'; - -/* -Copyright 2008-2013 Concur Technologies, Inc. - -Licensed under the Apache License, Version 2.0 (the "License"); you may -not use this file except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -License for the specific language governing permissions and limitations -under the License. -*/ - -$print-color: #999; -$print-color-light: #ccc; -$print-font-size: 12px; - -body { - @extend %default-font; -} - -.tocify, .toc-footer, .lang-selector, .search, #nav-button { - display: none; -} - -.tocify-wrapper>img { - margin: 0 auto; - display: block; -} - -.content { - font-size: 12px; - - pre, code { - @extend %code-font; - @extend %break-words; - border: 1px solid $print-color; - border-radius: 5px; - font-size: 0.8em; - } - - pre { - code { - border: 0; - } - } - - pre { - padding: 1.3em; - } - - code { - padding: 0.2em; - } - - table { - border: 1px solid $print-color; - tr { - border-bottom: 1px solid $print-color; - } - td,th { - padding: 0.7em; - } - } - - p { - line-height: 1.5; - } - - a { - text-decoration: none; - color: #000; - } - - h1 { - @extend %header-font; - font-size: 2.5em; - padding-top: 0.5em; - padding-bottom: 0.5em; - margin-top: 1em; - margin-bottom: $h1-margin-bottom; - border: 2px solid $print-color-light; - border-width: 2px 0; - text-align: center; - } - - h2 { - @extend %header-font; - font-size: 1.8em; - margin-top: 2em; - border-top: 2px solid $print-color-light; - padding-top: 0.8em; - } - - h1+h2, h1+div+h2 { - border-top: none; - padding-top: 0; - margin-top: 0; - } - - h3, h4 { - @extend %header-font; - font-size: 0.8em; - margin-top: 1.5em; - margin-bottom: 0.8em; - text-transform: uppercase; - } - - h5, h6 { - text-transform: uppercase; - } - - aside { - padding: 1em; - border: 1px solid $print-color-light; - border-radius: 5px; - margin-top: 1.5em; - margin-bottom: 1.5em; - line-height: 1.6; - } - - aside:before { - vertical-align: middle; - padding-right: 0.5em; - font-size: 14px; - } - - aside.notice:before { - @extend %icon-info-sign; - } - - aside.warning:before { - @extend %icon-exclamation-sign; - } - - aside.success:before { - @extend %icon-ok-sign; - } -} - -.copy-clipboard { - @media print { - display: none - } -} diff --git a/docs/api/source/stylesheets/screen.css.scss b/docs/api/source/stylesheets/screen.css.scss deleted file mode 100644 index 70e3527..0000000 --- a/docs/api/source/stylesheets/screen.css.scss +++ /dev/null @@ -1,633 +0,0 @@ -@charset "utf-8"; -@import 'normalize'; -@import 'variables'; -@import 'icon-font'; -// @import 'rtl'; // uncomment to switch to RTL format - -/* -Copyright 2008-2013 Concur Technologies, Inc. - -Licensed under the Apache License, Version 2.0 (the "License"); you may -not use this file except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -License for the specific language governing permissions and limitations -under the License. -*/ - -//////////////////////////////////////////////////////////////////////////////// -// GENERAL STUFF -//////////////////////////////////////////////////////////////////////////////// - -html, body { - color: $main-text; - padding: 0; - margin: 0; - -webkit-font-smoothing: antialiased; - -moz-osx-font-smoothing: grayscale; - @extend %default-font; - background-color: $main-bg; - height: 100%; - -webkit-text-size-adjust: none; /* Never autoresize text */ -} - -//////////////////////////////////////////////////////////////////////////////// -// TABLE OF CONTENTS -//////////////////////////////////////////////////////////////////////////////// - -#toc > ul > li > a > span { - float: right; - background-color: #2484FF; - border-radius: 40px; - width: 20px; -} - -.toc-wrapper { - transition: left 0.3s ease-in-out; - - overflow-y: auto; - overflow-x: hidden; - position: fixed; - z-index: 30; - top: 0; - left: 0; - bottom: 0; - width: $nav-width; - background-color: $nav-bg; - font-size: 13px; - font-weight: bold; - - // language selector for mobile devices - .lang-selector { - display: none; - a { - padding-top: 0.5em; - padding-bottom: 0.5em; - } - } - - // This is the logo at the top of the ToC - .logo { - display: block; - max-width: 100%; - margin-bottom: $logo-margin; - } - - &>.search { - position: relative; - - input { - background: $nav-bg; - border-width: 0 0 1px 0; - border-color: $search-box-border-color; - padding: 6px 0 6px 20px; - box-sizing: border-box; - margin: $nav-v-padding $nav-padding; - width: $nav-width - ($nav-padding*2); - outline: none; - color: $nav-text; - border-radius: 0; /* ios has a default border radius */ - } - - &:before { - position: absolute; - top: 17px; - left: $nav-padding; - color: $nav-text; - @extend %icon-search; - } - } - - .search-results { - margin-top: 0; - box-sizing: border-box; - height: 0; - overflow-y: auto; - overflow-x: hidden; - transition-property: height, margin; - transition-duration: 180ms; - transition-timing-function: ease-in-out; - background: $nav-subitem-bg; - &.visible { - height: 30%; - margin-bottom: 1em; - } - - li { - margin: 1em $nav-padding; - line-height: 1; - } - - a { - color: $nav-text; - text-decoration: none; - - &:hover { - text-decoration: underline; - } - } - } - - - // The Table of Contents is composed of multiple nested - // unordered lists. These styles remove the default - // styling of an unordered list because it is ugly. - ul, li { - list-style: none; - margin: 0; - padding: 0; - line-height: 28px; - } - - li { - color: $nav-text; - transition-property: background; - transition-timing-function: linear; - transition-duration: 200ms; - } - - // This is the currently selected ToC entry - .toc-link.active { - background-color: $nav-active-bg; - color: $nav-active-text; - } - - // this is parent links of the currently selected ToC entry - .toc-link.active-parent { - background-color: $nav-active-parent-bg; - color: $nav-active-parent-text; - } - - .toc-list-h2 { - display: none; - background-color: $nav-subitem-bg; - font-weight: 500; - } - - .toc-h2 { - padding-left: $nav-padding + $nav-indent; - font-size: 12px; - } - - .toc-footer { - padding: 1em 0; - margin-top: 1em; - border-top: 1px dashed $nav-footer-border-color; - - li,a { - color: $nav-text; - text-decoration: none; - } - - a:hover { - text-decoration: underline; - } - - li { - font-size: 0.8em; - line-height: 1.7; - text-decoration: none; - } - } -} - -.toc-link, .toc-footer li { - padding: 0 $nav-padding 0 $nav-padding; - display: block; - overflow-x: hidden; - white-space: nowrap; - text-overflow: ellipsis; - text-decoration: none; - color: $nav-text; - transition-property: background; - transition-timing-function: linear; - transition-duration: 130ms; -} - -// button to show navigation on mobile devices -#nav-button { - span { - display: block; - $side-pad: $main-padding / 2 - 8px; - padding: $side-pad $side-pad $side-pad; - background-color: rgba($main-bg, 0.7); - transform-origin: 0 0; - transform: rotate(-90deg) translate(-100%, 0); - border-radius: 0 0 0 5px; - } - padding: 0 1.5em 5em 0; // increase touch size area - display: none; - position: fixed; - top: 0; - left: 0; - z-index: 100; - color: #000; - text-decoration: none; - font-weight: bold; - opacity: 0.7; - line-height: 16px; - img { - height: 16px; - vertical-align: bottom; - } - - transition: left 0.3s ease-in-out; - - &:hover { opacity: 1; } - &.open {left: $nav-width} -} - - -//////////////////////////////////////////////////////////////////////////////// -// PAGE LAYOUT AND CODE SAMPLE BACKGROUND -//////////////////////////////////////////////////////////////////////////////// - -.page-wrapper { - margin-left: $nav-width; - position: relative; - z-index: 10; - background-color: $main-bg; - min-height: 100%; - - padding-bottom: 1px; // prevent margin overflow - - // The dark box is what gives the code samples their dark background. - // It sits essentially under the actual content block, which has a - // transparent background. - // I know, it's hackish, but it's the simplist way to make the left - // half of the content always this background color. - .dark-box { - width: $examples-width; - background-color: $examples-bg; - position: absolute; - right: 0; - top: 0; - bottom: 0; - } - - .lang-selector { - position: fixed; - z-index: 50; - border-bottom: 5px solid $lang-select-active-bg; - } -} - -.lang-selector { - display: flex; - background-color: $lang-select-bg; - width: 100%; - font-weight: bold; - overflow-x: auto; - a { - display: inline; - color: $lang-select-text; - text-decoration: none; - padding: 0 10px; - line-height: 30px; - outline: 0; - - &:active, &:focus { - background-color: $lang-select-pressed-bg; - color: $lang-select-pressed-text; - } - - &.active { - background-color: $lang-select-active-bg; - color: $lang-select-active-text; - } - } - - &:after { - content: ''; - clear: both; - display: block; - } -} - -//////////////////////////////////////////////////////////////////////////////// -// CONTENT STYLES -//////////////////////////////////////////////////////////////////////////////// -// This is all the stuff with the light background in the left half of the page - -.content { - // fixes webkit rendering bug for some: see #538 - -webkit-transform: translateZ(0); - // to place content above the dark box - position: relative; - z-index: 30; - - &:after { - content: ''; - display: block; - clear: both; - } - - &>h1, &>h2, &>h3, &>h4, &>h5, &>h6, &>p, &>table, &>ul, &>ol, &>aside, &>dl { - margin-right: $examples-width; - padding: 0 $main-padding; - box-sizing: border-box; - display: block; - - @extend %left-col; - } - - &>ul, &>ol { - padding-left: $main-padding + 15px; - } - - // the div is the tocify hidden div for placeholding stuff - &>h1, &>h2, &>div { - clear:both; - } - - h1 { - @extend %header-font; - font-size: 25px; - padding-top: 0.5em; - padding-bottom: 0.5em; - margin-bottom: $h1-margin-bottom; - margin-top: 2em; - border-top: 1px solid #ccc; - border-bottom: 1px solid #ccc; - background-color: #fdfdfd; - } - - h1:first-child, div:first-child + h1 { - border-top-width: 0; - margin-top: 0; - } - - h2 { - @extend %header-font; - font-size: 19px; - margin-top: 4em; - margin-bottom: 0; - border-top: 1px solid #ccc; - padding-top: 1.2em; - padding-bottom: 1.2em; - background-image: linear-gradient(to bottom, rgba(#fff, 0.2), rgba(#fff, 0)); - } - - // h2s right after h1s should bump right up - // against the h1s. - h1 + h2, h1 + div + h2 { - margin-top: $h1-margin-bottom * -1; - border-top: none; - } - - h3, h4, h5, h6 { - @extend %header-font; - font-size: 15px; - margin-top: 2.5em; - margin-bottom: 0.8em; - } - - h4, h5, h6 { - font-size: 10px; - } - - hr { - margin: 2em 0; - border-top: 2px solid $examples-bg; - border-bottom: 2px solid $main-bg; - } - - table { - margin-bottom: 1em; - overflow: auto; - th,td { - text-align: left; - vertical-align: top; - line-height: 1.6; - code { - white-space: nowrap; - } - } - - th { - padding: 5px 10px; - border-bottom: 1px solid #ccc; - vertical-align: bottom; - } - - td { - padding: 10px; - } - - tr:last-child { - border-bottom: 1px solid #ccc; - } - - tr:nth-child(odd)>td { - background-color: lighten($main-bg,4.2%); - } - - tr:nth-child(even)>td { - background-color: lighten($main-bg,2.4%); - } - } - - dt { - font-weight: bold; - } - - dd { - margin-left: 15px; - } - - p, li, dt, dd { - line-height: 1.6; - margin-top: 0; - } - - img { - max-width: 100%; - } - - code { - background-color: rgba(0,0,0,0.05); - padding: 3px; - border-radius: 3px; - @extend %break-words; - @extend %code-font; - } - - pre>code { - background-color: transparent; - padding: 0; - } - - aside { - padding-top: 1em; - padding-bottom: 1em; - margin-top: 1.5em; - margin-bottom: 1.5em; - background: $aside-notice-bg; - line-height: 1.6; - - &.warning { - background-color: $aside-warning-bg; - } - - &.success { - background-color: $aside-success-bg; - } - } - - aside:before { - vertical-align: middle; - padding-right: 0.5em; - font-size: 14px; - } - - aside.notice:before { - @extend %icon-info-sign; - } - - aside.warning:before { - @extend %icon-exclamation-sign; - } - - aside.success:before { - @extend %icon-ok-sign; - } - - .search-highlight { - padding: 2px; - margin: -3px; - border-radius: 4px; - border: 1px solid #F7E633; - background: linear-gradient(to top left, #F7E633 0%, #F1D32F 100%); - } -} - -//////////////////////////////////////////////////////////////////////////////// -// CODE SAMPLE STYLES -//////////////////////////////////////////////////////////////////////////////// -// This is all the stuff that appears in the right half of the page - -.content { - &>div.highlight { - clear:none; - } - - pre, blockquote { - background-color: $code-bg; - color: #fff; - - margin: 0; - width: $examples-width; - - float:right; - clear:right; - - box-sizing: border-box; - - @extend %right-col; - - &>p { margin: 0; } - - a { - color: #fff; - text-decoration: none; - border-bottom: dashed 1px #ccc; - } - } - - pre { - @extend %code-font; - padding-top: 2em; - padding-bottom: 2em; - padding: 2em $main-padding; - } - - blockquote { - &>p { - background-color: $code-annotation-bg; - padding: $code-annotation-padding 2em; - color: #eee; - } - } -} - -//////////////////////////////////////////////////////////////////////////////// -// RESPONSIVE DESIGN -//////////////////////////////////////////////////////////////////////////////// -// These are the styles for phones and tablets -// There are also a couple styles disperesed - -@media (max-width: $tablet-width) { - .toc-wrapper { - left: -$nav-width; - - &.open { - left: 0; - } - } - - .page-wrapper { - margin-left: 0; - } - - #nav-button { - display: block; - } - - .toc-link { - padding-top: 0.3em; - padding-bottom: 0.3em; - } -} - -@media (max-width: $phone-width) { - .dark-box { - display: none; - } - - %left-col { - margin-right: 0; - } - - .toc-wrapper .lang-selector { - display: block; - } - - .page-wrapper .lang-selector { - display: none; - } - - %right-col { - width: auto; - float: none; - } - - %right-col + %left-col { - margin-top: $main-padding; - } -} - -.highlight .c, .highlight .cm, .highlight .c1, .highlight .cs { - color: #909090; -} - -.highlight, .highlight .w { - background-color: $code-bg; -} - -.copy-clipboard { - float: right; - fill: #9DAAB6; - cursor: pointer; - opacity: 0.4; - height: 18px; - width: 18px; -} - -.copy-clipboard:hover { - opacity: 0.8; -} diff --git a/docs/config.toml b/docs/config.toml index 7d23d06..7f7e186 100644 --- a/docs/config.toml +++ b/docs/config.toml @@ -1,12 +1,11 @@ # hugo server --minify --themesDir ... --baseURL=http://0.0.0.0:1313/theme/hugo-book/ baseURL = 'https://rustybever.be/docs/vieter/' -title = 'Vieter - Docs' +title = 'The Rusty Bever - Docs' theme = 'hugo-book' # Book configuration disablePathToLower = true -# Doesn't work with docs as subdir enableGitInfo = true # Needed for mermaid/katex shortcodes @@ -28,22 +27,16 @@ enableGitInfo = true weight = 1 [menu] +# [[menu.before]] [[menu.after]] - name = "HTTP API Docs" - url = "https://rustybever.be/docs/vieter/api/" + name = "Source" + url = "https://git.rustybever.be/Chewing_Bever/docs" weight = 10 -[[menu.after]] - name = "Man Pages" - url = "https://rustybever.be/man/vieter/vieter.1.html" - weight = 20 -[[menu.after]] - name = "Vieter" - url = "https://git.rustybever.be/vieter-v/vieter" - weight = 30 + [[menu.after]] name = "Hugo Theme" url = "https://github.com/alex-shpak/hugo-book" - weight = 40 + weight = 20 [params] # (Optional, default light) Sets color theme: light, dark or auto. @@ -70,14 +63,14 @@ enableGitInfo = true # Set source repository location. # Used for 'Last Modified' and 'Edit this page' links. - BookRepo = 'https://git.rustybever.be/vieter-v/vieter' + BookRepo = 'https://git.rustybever.be/Chewing_Bever/docs' # (Optional, default 'commit') Specifies commit portion of the link to the page's last modified # commit hash for 'doc' page type. # Requires 'BookRepo' param. # Value used to construct a URL consisting of BookRepo/BookCommitPath/ # Github uses 'commit', Bitbucket uses 'commits' - BookCommitPath = 'src/commit' + # BookCommitPath = 'commit' # Enable "Edit this page" links for 'doc' page type. # Disabled by default. Uncomment to enable. Requires 'BookRepo' param. diff --git a/docs/content/CLI.md b/docs/content/CLI.md new file mode 100644 index 0000000..32bb6f8 --- /dev/null +++ b/docs/content/CLI.md @@ -0,0 +1,27 @@ +# Vieter CLI + +I provide a simple CLI tool that currently only allows changing the Git +repository API. Its usage is quite simple. + +First, you need to create a file in your home directory called `.vieterrc` with +the following content: + +```toml +address = "https://example.com" +api_key = "your-api-key" +``` + +You can also use a different file or use environment variables, as described in +[Configuration](/configuration). + +Now you're ready to use the CLI tool. + +## Usage + +* `vieter repos list` returns all repositories currently stored in the API. +* `vieter repos add url branch repo arch...` adds the repository with the given + URL, branch, repo & arch to the API. +* `vieter repos remove id` removes the repository with the given ID prefix. + +You can always check `vieter -help` or `vieter repos -help` for more +information about the commands. diff --git a/docs/content/_index.md b/docs/content/_index.md index efcdf6d..3a1144b 100644 --- a/docs/content/_index.md +++ b/docs/content/_index.md @@ -9,9 +9,12 @@ documentation might not be relevant anymore for the latest release. ## Overview -Vieter consists of two main parts, namely an implementation of an Arch -repository server & a scheduling system to periodically build Pacman packages & -publish them to a repository. +Vieter has a few main features: + +* It's a simple & lightweight implementation of an Arch repository server +* It allows for uploading of built package archives +* It supports a basic build system to periodically re-build packages & upload + them to the server {{< hint info >}} **Note** @@ -23,12 +26,12 @@ well. ### Why? -Vieter is my personal solution to a problem I've been facing for months: +Vieter is my personal solution for a problem I've been facing for months: extremely long AUR package build times. I run EndeavourOS on both my laptops, one of which being a rather old MacBook Air. I really like being a beta-tester for projects & run development builds for multiple packages (nheko, -newsflash...). Because of this, I have to regularly re-build these packages in -order to stay up to date with development. However, these builds can take a +newsflash...). The issue with this is that I have to regularly re-build these +packages in order to stay up to date with development & these builds can take a really long time on the old MacBook. This project is a solution to that problem: instead of building the packages locally, I can build them automatically in the cloud & just download them whenever I update my system! diff --git a/docs/content/api.md b/docs/content/api.md new file mode 100644 index 0000000..7c395eb --- /dev/null +++ b/docs/content/api.md @@ -0,0 +1,84 @@ +# API Reference + +All routes that return JSON use the following shape: + +```json +{ + "message": "some message", + "data": {} +} +``` + +Here, data can be any JSON object, so it's not guaranteed to be a struct. + +### `GET ///` + +This route serves the contents of a specific architecture' repo. + +If `` is one of `.db`, `.files`, `.db.tar.gz` or +`.files.tar.gz`, it will serve the respective archive file from the +repository. + +If `` contains `.pkg`, it assumes the request to be for a package +archive & will serve that file from the specific arch-repo's package directory. + +Finally, if none of the above are true, Vieter assumes it to be request for a +package version's desc file & tries to serve this instead. This functionality +is very useful for the build system for checking whether a package needs to be +rebuilt or not. + +### `HEAD ///` + +Behaves the same as the above route, but instead of returning actual data, it +returns either 200 or 404, depending on whether the file exists. This route is +used by the build system to determine whether a package needs to be rebuilt. + +### `POST //publish` + +This route is used to upload packages to a repository. It requires the API +key to be provided using the `X-Api-Key` HTTP header. Vieter will parse the +package's contents & update the repository files accordingely. I find the +easiest way to use this route is using cURL: + +```sh +curl -XPOST -T "path-to-package.pkg.tar.zst" -H "X-API-KEY: your-api-key" https://example.com/somerepo/publish +``` + +Packages are automatically added to the correct arch-repo. If a package type is +`any`, the package is added to the configured `default_arch`, as well as all +already present arch-repos. To prevent unnecessary duplication of package +files, these packages are shared between arch-repos' package directories using +hard links. + +{{< hint info >}} +**Note** +Vieter only supports uploading archives compressed using either gzip, zstd or +xz at the moment. +{{< /hint >}} + +## API + +All API routes require the API key to provided using the `X-Api-Key` header. +Otherwise, they'll return a status code 401. + +### `GET /api/repos` + +Returns the current list of Git repositories. + +### `GET /api/repos/` + +Get the information for the Git repo with the given ID. + +### `POST /api/repos?&&&` + +Adds a new Git repository with the provided URL, Git branch & comma-separated +list of architectures. + +### `DELETE /api/repos/` + +Deletes the Git repository with the provided ID. + +### `PATCH /api/repos/?&&&` + +Updates the provided parameters for the repo with the given ID. All arguments +are optional. diff --git a/docs/content/builder.md b/docs/content/builder.md new file mode 100644 index 0000000..6a1bc3a --- /dev/null +++ b/docs/content/builder.md @@ -0,0 +1,56 @@ +# Builder + +Vieter supports a basic build system that allows you to build the packages +defined using the Git repositories API by running `vieter build`. For +configuration, see [here](/configuration#builder). + +## How it works + +The build system works in two stages. First it pulls down the +`archlinux:latest` image from Docker Hub, runs `pacman -Syu` & configures a +non-root build user. It then creates a new Docker image from this container. +This is to prevent each build having to fully update the container's +repositories. After the image has been created, each repository returned by +`/api/repos` is built sequentially by starting up a new container with the +previously created image as a base. Each container goes through the following steps: + +1. The repository is cloned +2. `makepkg --nobuild --nodeps` is ran to update the `pkgver` variable inside + the `PKGBUILD` file +3. A HEAD request is sent to the Vieter server to check whether the specific + version of the package is already present. If it is, the container exits. +4. `makepkg` is ran with `MAKEFLAGS="-j\$(nproc)` +5. Each produced package archive is uploaded to the Vieter instance's + repository, as defined in the API for that specific Git repo. + +## Cron image + +The Vieter Docker image contains crond & a cron config that runs `vieter build` +every night at 3AM. This value is currently hardcoded, but I wish to change +that down the line (work is in progress). There's also some other caveats you +should be aware of, namely that the image should be run as root & that the +healthcheck will always fail, so you might have to disable it. This boils down +to the following docker-compose file: + +```yaml +version: '3' + +services: + cron: + image: 'chewingbever/vieter:dev' + command: crond -f + user: root + + healthcheck: + disable: true + + environment: + - 'VIETER_API_KEY=some-key' + - 'VIETER_ADDRESS=https://example.com' + volumes: + - '/var/run/docker.sock:/var/run/docker.sock' +``` + +Important to note is that the container also requires the host's Docker socket +to be mounted as this is how it spawns the necessary containers, as well as a +change to the container's command. diff --git a/docs/content/configuration.md b/docs/content/configuration.md index 612c505..ded40cb 100644 --- a/docs/content/configuration.md +++ b/docs/content/configuration.md @@ -3,7 +3,7 @@ weight: 20 --- # Configuration -By default, all vieter commands try to read in the TOML file `~/.vieterrc` for +All vieter operations by default try to read in the TOML file `~/.vieterrc` for configuration. The location of this file can be changed by using the `-f` flag. If the above file doesn't exist or you wish to override some of its settings, @@ -17,122 +17,55 @@ If a variable is both present in the config file & as an environment variable, the value in the environment variable is used. {{< hint info >}} -**Note** +**Note** All environment variables can also be provided from a file by appending them -with `_FILE`. This for example allows you to provide the API key from a Docker +with `_FILE`. This for example allows you to provide the API key from a docker secrets file. {{< /hint >}} -## Commands +## Modes -The first argument passed to Vieter determines which command you wish to use. -Each of these can contain subcommands (e.g. `vieter targets list`), but all -subcommands will use the same configuration. Below you can find the -configuration variable required for each command. +The vieter binary can run in several "modes", indicated by the first argument +passed to them. Each mode requires a different configuration. -### `vieter server` +### Server -* `port`: HTTP port to run on - * Default: `8000` -* `log_level`: log verbosity level. Value should be one of `FATAL`, `ERROR`, - `WARN`, `INFO` or `DEBUG`. - * Default: `WARN` +* `log_level`: defines how much logs to show. Valid values are one of `FATAL`, + `ERROR`, `WARN`, `INFO` or `DEBUG`. Defaults to `WARN` +* `log_file`: log file to write logs to. Defaults to `vieter.log` in the + current directory. * `pkg_dir`: where Vieter should store the actual package archives. * `data_dir`: where Vieter stores the repositories, log file & database. * `api_key`: the API key to use when authenticating requests. -* `default_arch`: this setting serves two main purposes: - * Packages with architecture `any` are always added to this architecture. - This prevents the server from being confused when an `any` package is - published as the very first package for a repository. - * Targets added without an `arch` value use this value instead. -* `global_schedule`: build schedule for any target that does not have a - schedule defined. For information about this syntax, see - [here](/usage/builds/schedule). - * Default: `0 3` (3AM every night) -* `base_image`: Docker image to use when building a package. Any Pacman-based - distro image should work, as long as `/etc/pacman.conf` is used & - `base-devel` exists in the repositories. Make sure that the image supports - the architecture of your cron daemon. - * Default: `archlinux:base-devel` (only works on `x86_64`). If you require - `aarch64` support, consider using - [`menci/archlinuxarm:base-devel`](https://hub.docker.com/r/menci/archlinuxarm) - ([GitHub](https://github.com/Menci/docker-archlinuxarm)). This is the - image used for the Vieter CI builds. -* `max_log_age`: maximum age of logs (in days). Logs older than this will get - cleaned by the log removal daemon. If set to zero, no logs are ever removed. - The age of logs is determined by the time the build was started. - * Default: `0` -* `log_removal_schedule`: cron schedule defining when to clean old logs. - * Default: `0 0` (every day at midnight) +* `default_arch`: architecture to always add packages of arch `any` to. -### `vieter cron` - -* `log_level`: log verbosity level. Value should be one of `FATAL`, `ERROR`, - `WARN`, `INFO` or `DEBUG`. - * Default: `WARN` -* `log_file`: log file to write logs to. - * Default: `vieter.log` (in `data_dir`) -* `address`: *public* URL of the Vieter repository server to build for. From - this server the list of Git repositories is retrieved. All built packages are - published to this server. -* `api_key`: API key of the above server. -* `data_dir`: directory to store log file in. -* `base_image`: Docker image to use when building a package. Any Pacman-based - distro image should work, as long as `/etc/pacman.conf` is used & - `base-devel` exists in the repositories. Make sure that the image supports - the architecture of your cron daemon. - * Default: `archlinux:base-devel` (only works on `x86_64`). If you require - `aarch64` support, consider using - [`menci/archlinuxarm:base-devel`](https://hub.docker.com/r/menci/archlinuxarm) - ([GitHub](https://github.com/Menci/docker-archlinuxarm)). This is the image - used for the Vieter CI builds. -* `max_concurrent_builds`: how many builds to run at the same time. - * Default: `1` -* `api_update_frequency`: how frequently (in minutes) to poll the Vieter - repository server for a new list of Git repositories to build. - * Default: `15` -* `image_rebuild_frequency`: Vieter periodically builds a builder image using - the configured base image. This makes sure build containers do not have to - download a lot of packages when updating their system. This setting defines - how frequently (in minutes) to rebuild this builder image. - * Default: `1440` (every 24 hours) -* `global_schedule`: build schedule for any Git repository that does not have a - schedule defined. For information about this syntax, see - [here](/usage/builds/schedule). - * Default: `0 3` (3AM every night) - -### `vieter logs` +### Builder * `api_key`: the API key to use when authenticating requests. -* `address`: Base URL of your Vieter instance, e.g. https://example.com +* `address`: Base your URL of your Vieter instance, e.g. https://example.com +* `base_image`: image to use when building a package. It should be an Archlinux + image. The default if not configured is `archlinux:base-devel`, but this + image only supports arm64. If you require aarch64 support as well, consider + using + [`menci/archlinuxarm:base-devel`](https://hub.docker.com/r/menci/archlinuxarm) + ([GH](https://github.com/Menci/docker-archlinuxarm)) -### `vieter targets` +### Repos * `api_key`: the API key to use when authenticating requests. -* `address`: Base URL of your Vieter instance, e.g. https://example.com -* `base_image`: image to use when building a package using `vieter targets - build`. - * Default: `archlinux:base-devel` +* `address`: Base your URL of your Vieter instance, e.g. https://example.com -### `vieter agent` +### Cron -* `log_level`: log verbosity level. Value should be one of `FATAL`, `ERROR`, - `WARN`, `INFO` or `DEBUG`. - * Default: `WARN` -* `address`: *public* URL of the Vieter repository server to build for. From - this server jobs are retrieved. All built packages are published to this - server. -* `api_key`: API key of the above server. -* `data_dir`: directory to store log file in. -* `max_concurrent_builds`: how many builds to run at the same time. - * Default: `1` -* `polling_frequency`: how often (in seconds) to poll the server for new - builds. Note that the agent might poll more frequently when it's actively - processing builds. -* `image_rebuild_frequency`: Vieter periodically builds images that are then - used as a basis for running build containers. This is to prevent each build - from downloading an entire repository worth of dependencies. This setting - defines how frequently (in minutes) to rebuild these images. - * Default: `1440` (every 24 hours) -* `arch`: architecture for which this agent should pull down builds (e.g. - `x86_64`) +* `log_level`: defines how much logs to show. Valid values are one of `FATAL`, + `ERROR`, `WARN`, `INFO` or `DEBUG`. Defaults to `WARN` +* `api_key`: the API key to use when authenticating requests. +* `address`: Base your URL of your Vieter instance, e.g. https://example.com. + This *must* be the publicly facing URL of your Vieter instance. +* `data_dir`: where Vieter stores the log file. +* `base_image`: Docker image from which to create the builder images. +* `max_concurrent_builds`: amount of builds to run at once. +* `api_update_frequency`: how frequenty to check for changes in the repo list. +* `image_rebuild+frequency`: how frequently to rebuild the builder image +* `global_schedule`: cron schedule to use for any repo without an individual + schedule diff --git a/docs/content/installation.md b/docs/content/installation.md index 5b8e2d8..17d3874 100644 --- a/docs/content/installation.md +++ b/docs/content/installation.md @@ -3,112 +3,76 @@ weight: 10 --- # Installation -Vieter consists of a single binary, akin to busybox. The binary's behavior is -determined by its CLI arguments, e.g. `vieter server` starts the repository -server. - -All installation solutions can be configured the same way, -as described [here](/configuration). - ## Docker -Docker images are published to the -[`chewingbever/vieter`](https://hub.docker.com/r/chewingbever/vieter) Docker -Hub repository. You can either pull a release tag (e.g. -`chewingbever/vieter:0.1.0-rc1`), or pull the `chewingbever/vieter:dev` tag. -The latter is updated every time a new commit is pushed to the development -branch. This branch will be the most up to date, but does not give any -guarantees about stability, so beware! +Docker is the recommended way to install vieter. The images can be pulled from +[`chewingbever/vieter`](https://hub.docker.com/r/chewingbever/vieter). You can +either pull a release tag (e.g. `chewingbever/vieter:0.1.0-rc1`), or pull the +`chewingbever/vieter:dev` tag. The latter is updated every time a new commit is +pushed to the development branch. This branch will be the most up to date, but +does not give any guarantees about stability, so beware! -Thanks to the single-binary design of Vieter, this image can be used both for -the repository server, the cron daemon and the agent. +The simplest way to run the Docker image is using a plain Docker command: -Below is a minimal compose file to set up both the repository server & a build -agent: - -```yaml -version: '3' - -services: - server: - image: 'chewingbever/vieter:0.5.0-rc.1' - restart: 'always' - - environment: - - 'VIETER_API_KEY=secret' - - 'VIETER_DEFAULT_ARCH=x86_64' - volumes: - - 'data:/data' - - cron: - image: 'chewingbever/vieter:0.5.0-rc.1' - restart: 'always' - # Required to connect to the Docker daemon - user: root - command: 'vieter agent' - - environment: - - 'VIETER_API_KEY=secret' - # MUST be public URL of Vieter repository - - 'VIETER_ADDRESS=https://example.com' - # Architecture for which the agent builds - - 'VIETER_ARCH=x86_64' - - 'VIETER_MAX_CONCURRENT_BUILDS=2' - volumes: - - '/var/run/docker.sock:/var/run/docker.sock' - -volumes: - data: +```sh +docker run \ + --rm \ + -d \ + -v /path/to/data:/data \ + -e VIETER_API_KEY=changeme \ + -e VIETER_DEFAULT_ARCH=x86_64 \ + -p 8000:8000 \ + chewingbever/vieter:dev ``` -If you do not require the build system, the repository server can be used -independently as well. +Here, you should change `/path/to/data` to the path on your host where you want +vieter to store its files. -Of course, Vieter allows a lot more configuration than this. This compose file -is meant as a starting point for setting up your installation. +The default configuration will store everything inside the `/data` directory. -{{< hint info >}} -**Note** -Builds are executed on the agent's system using the host's Docker daemon. An -agent for a specific `arch` will only build packages for that specific -architecture. Therefore, if you wish to build packages for both `x86_64` & -`aarch64`, you'll have to deploy two agents, one on each architecture. -Afterwards, any Git repositories enabled for those two architectures will build -on both. -{{< /hint >}} +Inside the container, the Vieter server runs on port 8000. This port should be +exposed to the public accordingely. + +For an overview of how to configure vieter & which environment variables can be +used, see the [Configuration](/configuration) page. ## Binary -On the -[releases](https://git.rustybever.be/vieter-v/vieter/releases) -page, you can find statically compiled binaries for all -released versions. This is the same binary as used inside -the Docker images. +On the [releases](https://git.rustybever.be/Chewing_Bever/vieter/releases) +page, you can find statically compiled binaries for all released versions. You +can download the binary for your host's architecture & run it that way. -## Arch - -I publish both development & release versions of Vieter to my personal -repository, https://arch.r8r.be. Packages are available for `x86_64` & -`aarch64`. To use the repository, add the following to your `pacman.conf`: - -``` -[vieter] -Server = https://arch.r8r.be/$repo/$arch -SigLevel = Optional -``` - -Afterwards, you can update your system & install the `vieter` package for the -latest official release or `vieter-git` for the latest development release. - -### AUR - -If you prefer building the packages locally (or on your own Vieter instance), -there's the [`vieter`](https://aur.archlinux.org/packages/vieter) & -[`vieter-git`](https://aur.archlinux.org/packages/vieter-git) packages on the -AUR. These packages build using the `vlang` compiler package, so I can't -guarantee that a compiler update won't temporarily break them. +For more information about configuring the binary, check out the +[Configuration](/configuration) page. ## Building from source -The project [README](https://git.rustybever.be/vieter-v/vieter#building) -contains instructions for building Vieter from source. +Because the project is still in heavy development, it might be useful to build +from source instead. Luckily, this process is very easy. You'll need make, +libarchive & openssl; all of which should be present on an every-day Arch +install. Then, after cloning the repository, you can use the following commands: + +```sh +# Builds the compiler; should usually only be ran once. Vieter compiles using +# the default compiler, but I maintain my own mirror to ensure nothing breaks +# without me knowing. +make v + +# Build vieter +# Alternatively, use `make prod` to build the production build. +make +``` +{{< hint info >}} +**Note** +My version of the V compiler is also available on my Vieter instance, +https://arch.r8r.be. It's in the `vieter` repository, with the package being +named `vieter-v`. The compiler is available for both x86_64 & aarch64. +{{< /hint >}} + +## My Vieter instance + +Besides uploading development Docker images, my CI also publishes x86_64 & +aarch64 packages to my personal Vieter instance, https://arch.r8r.be. If you'd +like, you can use this repository as well by adding it to your Pacman +configuration as described [here](/usage#configuring-pacman). Both the +repository & the package are called `vieter`. diff --git a/docs/content/usage.md b/docs/content/usage.md new file mode 100644 index 0000000..06671b4 --- /dev/null +++ b/docs/content/usage.md @@ -0,0 +1,54 @@ +--- +weight: 30 +--- +# Usage + +## Starting the server + +To start a server, either install it using Docker (see +[Installation](/installation)) or run it locally by executing `vieter +server`. See [Configuration](/configuration) for more information about +configuring the binary. + +## Multiple repositories + +Vieter works with multiple repositories. This means that a single Vieter server +can serve multiple repositories in Pacman. It also automatically divides files +with specific architectures among arch-repos. Arch-repos are the actual +repositories you add to your `/etc/pacman.conf` file. See [Configuring +Pacman](/usage#configuring-pacman) below for more info. + +## Adding packages + +Using Vieter is currently very simple. If you wish to add a package to Vieter, +build it using makepkg & POST that file to the `//publish` endpoint of +your server. This will add the package to the repository. Authentification +requires you to add the API key as the `X-Api-Key` header. + +All of this can be combined into a simple cURL call: + +``` +curl -XPOST -H "X-API-KEY: your-key" -T some-package.pkg.tar.zst https://example.com/somerepo/publish +``` + +`somerepo` is automatically created if it doesn't exist yet. + +## Configuring Pacman + +Configuring Pacman to use a Vieter instance is very simple. In your +`/etc/pacman.conf` file, add the following lines: + +``` +[vieter] +Server = https://example.com/$repo/$arch +SigLevel = Optional +``` + +Here, you see two important placeholder variables. `$repo` is replaced by the +name within the square brackets, which in this case would be `vieter`. `$arch` +is replaced by the output of `uname -m`. Because Vieter supports multiple +repositories & architectures per repository, using this notation makes sure you +always use the correct endpoint for fetching files. + +I recommend placing this below all other repository entries, as the order +decides which repository should be used if there's ever a naming conflict. diff --git a/docs/content/usage/_index.md b/docs/content/usage/_index.md deleted file mode 100644 index 1518e5e..0000000 --- a/docs/content/usage/_index.md +++ /dev/null @@ -1,3 +0,0 @@ ---- -weight: 30 ---- diff --git a/docs/content/usage/builds/_index.md b/docs/content/usage/builds/_index.md deleted file mode 100644 index e6c0b1c..0000000 --- a/docs/content/usage/builds/_index.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -weight: 20 ---- -# Building packages - -The automatic build system is what makes Vieter very useful as a replacement -for an AUR helper. It can perodically build packages & publish them to your -personal Vieter repository server, removing the need to build the packages -locally. - -## Adding builds - -Before the cron system can start building your package, you need to add its -info to the system. The Vieter repository server exposes an HTTP API for this -(see the [HTTP API Docs](https://rustybever.be/docs/vieter/api/) for more -info). For ease of use, the Vieter binary contains a CLI interface for -interacting with this API (see [Configuration](/configuration) for -configuration details). The [man -pages](https://rustybever.be/man/vieter/vieter-targets.1.html) describe this in -greater detail, but the basic usage is as follows: - -``` -vieter targets add some-url some-repository -``` - -Here, `some-url` is the URL of the Git repository containing the PKGBUILD. This -URL is passed to `git clone`, meaning the repository should be public. Vieter -expects the same format as an AUR Git repository, so you can directly use AUR -URLs here. Alternatively, you can also provide the URL to a PKGBUILD file -instead. See -[vieter-targets-add(1)](https://rustybever.be/man/vieter/vieter-targets-add.1.html) -for more information. - -`some-repo` is the repository to which the built package archives should be -published. - -The above command intentionally leaves out a few parameters to make the CLI -more useable. For information on how to modify all parameters using the CLI, -see -[vieter-targets(1)](https://rustybever.be/man/vieter/vieter-targets.1.html). - -## Reading logs - -The logs of each build are uploaded to the Vieter repository server, along with -information about the exit code of the build container, when the build -started/ended etc. These logs can then be accessed using the [HTTP -API](https://rustybever.be/docs/vieter/api/). - -For ease of use, the logs are also available using some CLI commands; see -[vieter-logs(1)](https://rustybever.be/man/vieter/vieter-logs.1.html) for more -information. diff --git a/docs/content/usage/builds/cleanup.md b/docs/content/usage/builds/cleanup.md deleted file mode 100644 index 724a75f..0000000 --- a/docs/content/usage/builds/cleanup.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -weight: 20 ---- - -# Cleanup - -Vieter stores the logs of every single package build. While this is great for -debugging why builds fail, it also causes an active or long-running Vieter -instance to accumulate thousands of logs. - -To combat this, a log removal daemon can be enabled that periodically removes -old build logs. By starting your server with the `max_log_age` variable (see -[Configuration](/configuration#vieter-server)), a daemon will get enabled that -periodically removes logs older than this setting. By default, this will happen -every day at midnight, but this behavior can be changed using the -`log_removal_schedule` variable. - -{{< hint info >}} -**Note** -The daemon will always run a removal of logs on startup. Therefore, it's -possible the daemon will be *very* active when first enabling this setting. -After the initial surge of logs to remove, it'll calm down again. -{{< /hint >}} diff --git a/docs/content/usage/builds/schedule.md b/docs/content/usage/builds/schedule.md deleted file mode 100644 index d3802fd..0000000 --- a/docs/content/usage/builds/schedule.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -weight: 10 ---- - -# Cron schedule syntax - -The Vieter cron daemon uses a subset of the cron expression syntax to schedule -builds. - -## Format - -`a b c d` - -* `a`: minutes -* `b`: hours -* `c`: days -* `d`: months - -An expression consists of two to four sections. If less than four sections are -provided, the parser will append `*` until there are four sections. This means -that `0 3` is the same as `0 3 * *`. - -Each section consists of one or more parts, separated by a comma. Each of these -parts, in turn, can be one of the following (any letters are integers): - -* `*`: allow all possible values. -* `a`: only this value is allowed. -* `*/n`: allow every n-th value. -* `a/n`: allow every n-th value, starting at a in the list. -* `a-b`: allow every value between a and b, bounds included. -* `a-b/n`: allow every n-th value inside the list of values between a and b, - bounds included. - -Each section can consist of as many of these parts as necessary. - -## Examples - -* `0 3`: every day at 03:00AM. -* `0 0 */7`: every 7th day of the month, at midnight. - -## CLI tool - -The Vieter binary contains a command that shows you the next matching times for -a given expression. This can be useful for understanding the syntax. For more -information, see -[vieter-schedule(1)](https://rustybever.be/man/vieter/vieter-schedule.1.html). diff --git a/docs/content/usage/repository.md b/docs/content/usage/repository.md deleted file mode 100644 index 3ddd2fc..0000000 --- a/docs/content/usage/repository.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -weight: 10 ---- -# Pacman repository - -The part of Vieter that users will interact with the most is the Pacman -repository aka `vieter server`. - -## Design overview - -A Vieter repository server has support for multiple repositories, with each -repository containing packages for multiple architectures. - -If you wish to use these repositories on your system, add the following to -`/etc/pacman.conf` for each repository you wish to use: - -``` -[repo-name] -Server = https://example.com/$repo/$arch -SigLevel = Optional -``` - -Here, `$repo` and `$arch` are not variables you have to fill in yourself. -Rather, Pacman will substitute these when reading the config file. `$repo` is -replaced by the name between the square brackets (in this case `repo-name`), -and `$arch` is replaced by your system's architecture, e.g. `x86_64`. Of -course, you can also fill in these values manually yourself, e.g. if you wish -to use a different name inside the square brackets. - -Important to note is that, when two repositories contain a package with the -same name, Pacman will choose the one from the repository that's highest up in -the `pacman.conf` file. Therefore, if you know your repository has packages -with the same name as ones from the official repositories, it might be better -to place the repository below the official repositories to avoid overwriting -official packages. - -## Publishing packages - -Packages can be easily published using a single HTTP POST request. Check out -the [HTTP API docs](https://rustybever.be/docs/vieter/api/) for more info on -these routes, including example cURL commands. diff --git a/src/agent/agent.v b/src/agent/agent.v deleted file mode 100644 index 69b9947..0000000 --- a/src/agent/agent.v +++ /dev/null @@ -1,27 +0,0 @@ -module agent - -import log -import os -import util - -const log_file_name = 'vieter.agent.log' - -// agent starts an agent service -pub fn agent(conf Config) ! { - log_level := log.level_from_tag(conf.log_level) or { - return error('Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.') - } - - mut logger := log.Log{ - level: log_level - } - - os.mkdir_all(conf.data_dir) or { util.exit_with_message(1, 'Failed to create data directory.') } - - log_file := os.join_path_single(conf.data_dir, agent.log_file_name) - logger.set_full_logpath(log_file) - logger.log_to_console_too() - - mut d := agent_init(logger, conf) - d.run() -} diff --git a/src/agent/cli.v b/src/agent/cli.v deleted file mode 100644 index 2dee8d6..0000000 --- a/src/agent/cli.v +++ /dev/null @@ -1,31 +0,0 @@ -module agent - -import cli -import conf as vconf - -struct Config { -pub: - log_level string = 'WARN' - // Architecture that the agent represents - arch string - api_key string - address string - data_dir string - max_concurrent_builds int = 1 - polling_frequency int = 30 - image_rebuild_frequency int = 1440 -} - -// cmd returns the cli module that handles the cron daemon. -pub fn cmd() cli.Command { - return cli.Command{ - name: 'agent' - description: 'Start an agent daemon.' - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! - - agent(conf_)! - } - } -} diff --git a/src/agent/daemon.v b/src/agent/daemon.v deleted file mode 100644 index d49b45e..0000000 --- a/src/agent/daemon.v +++ /dev/null @@ -1,197 +0,0 @@ -module agent - -import log -import sync.stdatomic -import build -import models { BuildConfig } -import client -import time -import os - -const ( - build_empty = 0 - build_running = 1 - build_done = 2 -) - -struct AgentDaemon { - logger shared log.Log - conf Config - client client.Client -mut: - images ImageManager - // Atomic variables used to detect when a build has finished; length is - // conf.max_concurrent_builds. This approach is used as the difference - // between a recently finished build and an empty build slot is important - // for knowing whether the agent is currently "active". - atomics []u64 - // Channel used to send builds to worker threads - build_channel chan BuildConfig -} - -// agent_init initializes a new agent -fn agent_init(logger log.Log, conf Config) AgentDaemon { - mut d := AgentDaemon{ - logger: logger - client: client.new(conf.address, conf.api_key) - conf: conf - images: new_image_manager(conf.image_rebuild_frequency * 60) - atomics: []u64{len: conf.max_concurrent_builds} - build_channel: chan BuildConfig{cap: conf.max_concurrent_builds} - } - - return d -} - -// run starts the actual agent daemon. This function will run forever. -pub fn (mut d AgentDaemon) run() { - // Spawn worker threads - for builder_index in 0 .. d.conf.max_concurrent_builds { - spawn d.builder_thread(d.build_channel, builder_index) - } - - // This is just so that the very first time the loop is ran, the jobs are - // always polled - mut last_poll_time := time.now().add_seconds(-d.conf.polling_frequency) - mut sleep_time := 0 * time.second - mut finished, mut empty, mut running := 0, 0, 0 - - for { - if sleep_time > 0 { - d.ldebug('Sleeping for ${sleep_time}') - time.sleep(sleep_time) - } - - finished, empty = d.update_atomics() - running = d.conf.max_concurrent_builds - finished - empty - - // No new finished builds and no free slots, so there's nothing to be - // done - if finished + empty == 0 { - sleep_time = 1 * time.second - continue - } - - // Builds have finished, so old builder images might have freed up. - // TODO this might query the docker daemon too frequently. - if finished > 0 { - d.images.clean_old_images() - } - - // The agent will always poll for new jobs after at most - // `polling_frequency` seconds. However, when jobs have finished, the - // agent will also poll for new jobs. This is because jobs are often - // clustered together (especially when mostly using the global cron - // schedule), so there's a much higher chance jobs are available. - if finished > 0 || time.now() >= last_poll_time.add_seconds(d.conf.polling_frequency) { - d.ldebug('Polling for new jobs') - - new_configs := d.client.poll_jobs(d.conf.arch, finished + empty) or { - d.lerror('Failed to poll jobs: ${err.msg()}') - - // TODO pick a better delay here - sleep_time = 5 * time.second - continue - } - - d.ldebug('Received ${new_configs.len} jobs') - - last_poll_time = time.now() - - for config in new_configs { - // Make sure a recent build base image is available for - // building the config - if !d.images.up_to_date(config.base_image) { - d.linfo('Building builder image from base image ${config.base_image}') - - // TODO handle this better than to just skip the config - d.images.refresh_image(config.base_image) or { - d.lerror(err.msg()) - continue - } - } - - // It's technically still possible that the build image is - // removed in the very short period between building the - // builder image and starting a build container with it. If - // this happens, fate really just didn't want you to do this - // build. - - d.build_channel <- config - running++ - } - } - - // The agent is not doing anything, so we just wait until the next poll - // time - if running == 0 { - sleep_time = last_poll_time.add_seconds(d.conf.polling_frequency) - time.now() - } else { - sleep_time = 1 * time.second - } - } -} - -// update_atomics checks for each build whether it's completed, and sets it to -// empty again if so. The return value is a tuple `(finished, empty)` where -// `finished` is how many builds were just finished and thus set to empty, and -// `empty` is how many build slots were already empty. The amount of running -// builds can then be calculated by substracting these two values from the -// total allowed concurrent builds. -fn (mut d AgentDaemon) update_atomics() (int, int) { - mut finished := 0 - mut empty := 0 - - for i in 0 .. d.atomics.len { - if stdatomic.load_u64(&d.atomics[i]) == agent.build_done { - stdatomic.store_u64(&d.atomics[i], agent.build_empty) - finished++ - } else if stdatomic.load_u64(&d.atomics[i]) == agent.build_empty { - empty++ - } - } - - return finished, empty -} - -// run_build actually starts the build process for a given target. -fn (mut d AgentDaemon) run_build(build_index int, config BuildConfig) { - d.linfo('started build: ${config}') - - // 0 means success, 1 means failure - mut status := 0 - - new_config := BuildConfig{ - ...config - base_image: d.images.get(config.base_image) - } - - res := build.build_config(d.client.address, d.client.api_key, new_config) or { - d.ldebug('build_config error: ${err.msg()}') - status = 1 - - build.BuildResult{} - } - - if status == 0 { - d.linfo('Uploading build logs for ${config}') - - // TODO use the arch value here - build_arch := os.uname().machine - d.client.add_build_log(config.target_id, res.start_time, res.end_time, build_arch, - res.exit_code, res.logs) or { d.lerror('Failed to upload logs for ${config}') } - } else { - d.lwarn('an error occurred during build: ${config}') - } - - stdatomic.store_u64(&d.atomics[build_index], agent.build_done) -} - -// builder_thread is a thread that constantly listens for builds to process -fn (mut d AgentDaemon) builder_thread(ch chan BuildConfig, builder_index int) { - for { - build_config := <-ch or { break } - - d.run_build(builder_index, build_config) - } -} diff --git a/src/agent/images.v b/src/agent/images.v deleted file mode 100644 index 9befc0c..0000000 --- a/src/agent/images.v +++ /dev/null @@ -1,119 +0,0 @@ -module agent - -import time -import docker -import build - -// An ImageManager is a utility that creates builder images from given base -// images, updating these builder images if they've become too old. This -// structure can manage images from any number of base images, paving the way -// for configurable base images per target/repository. -struct ImageManager { - max_image_age int [required] -mut: - // For each base image, one or more builder images can exist at the same - // time - images map[string][]string [required] - // For each base image, we track when its newest image was built - timestamps map[string]time.Time [required] -} - -// new_image_manager initializes a new image manager. -fn new_image_manager(max_image_age int) ImageManager { - return ImageManager{ - max_image_age: max_image_age - images: map[string][]string{} - timestamps: map[string]time.Time{} - } -} - -// get returns the name of the newest image for the given base image. Note that -// this function should only be called *after* a first call to `refresh_image`. -pub fn (m &ImageManager) get(base_image string) string { - return m.images[base_image].last() -} - -// up_to_date returns true if the last known builder image exists and is up to -// date. If this function returns true, the last builder image may be used to -// perform a build. -pub fn (mut m ImageManager) up_to_date(base_image string) bool { - if base_image !in m.timestamps - || m.timestamps[base_image].add_seconds(m.max_image_age) <= time.now() { - return false - } - - // It's possible the image has been removed by some external event, so we - // check whether it actually exists as well. - mut dd := docker.new_conn() or { return false } - - defer { - dd.close() or {} - } - - dd.image_inspect(m.images[base_image].last()) or { - // Image doesn't exist, so we stop tracking it - if err.code() == 404 { - m.images[base_image].delete_last() - m.timestamps.delete(base_image) - } - - // If the inspect fails, it's either because the image doesn't exist or - // because of some other error. Either way, we can't know *for certain* - // that the image exists, so we return false. - return false - } - - return true -} - -// refresh_image builds a new builder image from the given base image. This -// function should only be called if `up_to_date` returned false. -fn (mut m ImageManager) refresh_image(base_image string) ! { - // TODO use better image tags for built images - new_image := build.create_build_image(base_image) or { - return error('Failed to build builder image from base image ${base_image}') - } - - m.images[base_image] << new_image - m.timestamps[base_image] = time.now() -} - -// clean_old_images removes all older builder images that are no longer in use. -// The function will always leave at least one builder image, namely the newest -// one. -fn (mut m ImageManager) clean_old_images() { - mut dd := docker.new_conn() or { return } - - defer { - dd.close() or {} - } - - mut i := 0 - - for image in m.images.keys() { - i = 0 - - for i < m.images[image].len - 1 { - // For each builder image, we try to remove it by calling the Docker - // API. If the function returns an error or false, that means the image - // wasn't deleted. Therefore, we move the index over. If the function - // returns true, the array's length has decreased by one so we don't - // move the index. - dd.image_remove(m.images[image][i]) or { - // The image was removed by an external event - if err.code() == 404 { - m.images[image].delete(i) - } - // The image couldn't be removed, so we need to keep track of - // it - else { - i += 1 - } - - continue - } - - m.images[image].delete(i) - } - } -} diff --git a/src/agent/log.v b/src/agent/log.v deleted file mode 100644 index fcd8373..0000000 --- a/src/agent/log.v +++ /dev/null @@ -1,36 +0,0 @@ -module agent - -// lfatal create a log message with the fatal level -pub fn (mut d AgentDaemon) lfatal(msg string) { - lock d.logger { - d.logger.fatal(msg) - } -} - -// lerror create a log message with the error level -pub fn (mut d AgentDaemon) lerror(msg string) { - lock d.logger { - d.logger.error(msg) - } -} - -// lwarn create a log message with the warn level -pub fn (mut d AgentDaemon) lwarn(msg string) { - lock d.logger { - d.logger.warn(msg) - } -} - -// linfo create a log message with the info level -pub fn (mut d AgentDaemon) linfo(msg string) { - lock d.logger { - d.logger.info(msg) - } -} - -// ldebug create a log message with the debug level -pub fn (mut d AgentDaemon) ldebug(msg string) { - lock d.logger { - d.logger.debug(msg) - } -} diff --git a/src/archive.c.v b/src/archive.c.v index a40cdef..1f0d1dd 100644 --- a/src/archive.c.v +++ b/src/archive.c.v @@ -4,7 +4,7 @@ #include "archive.h" -pub struct C.archive {} +struct C.archive {} // Create a new archive struct for reading fn C.archive_read_new() &C.archive @@ -71,7 +71,7 @@ fn C.archive_filter_code(&C.archive, int) int #include "archive_entry.h" -pub struct C.archive_entry {} +struct C.archive_entry {} // Create a new archive_entry struct fn C.archive_entry_new() &C.archive_entry diff --git a/src/build/build.v b/src/build/build.v index b864792..0a978aa 100644 --- a/src/build/build.v +++ b/src/build/build.v @@ -4,30 +4,19 @@ import docker import encoding.base64 import time import os -import strings -import util -import models { BuildConfig, Target } +import db +import client -const ( - container_build_dir = '/build' - build_image_repo = 'vieter-build' - // Contents of PATH variable in build containers - path_dirs = ['/sbin', '/bin', '/usr/sbin', '/usr/bin', '/usr/local/sbin', - '/usr/local/bin', '/usr/bin/site_perl', '/usr/bin/vendor_perl', '/usr/bin/core_perl'] -) +const container_build_dir = '/build' + +const build_image_repo = 'vieter-build' // create_build_image creates a builder image given some base image which can // then be used to build & package Arch images. It mostly just updates the // system, install some necessary packages & creates a non-root user to run // makepkg with. The base image should be some Linux distribution that uses // Pacman as its package manager. -pub fn create_build_image(base_image string) !string { - mut dd := docker.new_conn()! - - defer { - dd.close() or {} - } - +pub fn create_build_image(base_image string) ?string { commands := [ // Update repos & install required packages 'pacman -Syu --needed --noconfirm base-devel git' @@ -45,7 +34,7 @@ pub fn create_build_image(base_image string) !string { c := docker.NewContainer{ image: base_image - env: ['BUILD_SCRIPT=${cmds_str}'] + env: ['BUILD_SCRIPT=$cmds_str'] entrypoint: ['/bin/sh', '-c'] cmd: ['echo \$BUILD_SCRIPT | base64 -d | /bin/sh -e'] } @@ -57,15 +46,14 @@ pub fn create_build_image(base_image string) !string { image_tag := if image_parts.len > 1 { image_parts[1] } else { 'latest' } // We pull the provided image - dd.image_pull(image_name, image_tag)! + docker.pull_image(image_name, image_tag) ? - id := dd.container_create(c)!.id - // id := docker.create_container(c)! - dd.container_start(id)! + id := docker.create_container(c) ? + docker.start_container(id) ? // This loop waits until the container has stopped, so we can remove it after for { - data := dd.container_inspect(id)! + data := docker.inspect_container(id) ? if !data.state.running { break @@ -79,8 +67,8 @@ pub fn create_build_image(base_image string) !string { // TODO also add the base image's name into the image name to prevent // conflicts. tag := time.sys_mono_now().str() - image := dd.image_from_container(id, 'vieter-build', tag)! - dd.container_remove(id)! + image := docker.create_image_from_container(id, 'vieter-build', tag) ? + docker.remove_container(id) ? return image.id } @@ -93,77 +81,78 @@ pub: logs string } -// build_target builds the given target. Internally it calls `build_config`. -pub fn build_target(address string, api_key string, base_image_id string, target &Target, force bool, timeout int) !BuildResult { - config := target.as_build_config(base_image_id, force, timeout) - - return build_config(address, api_key, config) -} - -// build_config builds, packages & publishes a given Arch package based on the -// provided target. The base image ID should be of an image previously created +// build_repo builds, packages & publishes a given Arch package based on the +// provided GitRepo. The base image ID should be of an image previously created // by create_build_image. It returns the logs of the container. -pub fn build_config(address string, api_key string, config BuildConfig) !BuildResult { - mut dd := docker.new_conn()! - - defer { - dd.close() or {} - } - +pub fn build_repo(address string, api_key string, base_image_id string, repo &db.GitRepo) ?BuildResult { build_arch := os.uname().machine - build_script := create_build_script(address, config, build_arch) - // We convert the build script into a base64 string, which then gets passed - // to the container as an env var - base64_script := base64.encode_str(build_script) + // TODO what to do with PKGBUILDs that build multiple packages? + commands := [ + 'git clone --single-branch --depth 1 --branch $repo.branch $repo.url repo', + 'cd repo', + 'makepkg --nobuild --nodeps', + 'source PKGBUILD', + // The build container checks whether the package is already + // present on the server + 'curl -s --head --fail $address/$repo.repo/$build_arch/\$pkgname-\$pkgver-\$pkgrel && exit 0', + 'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $address/$repo.repo/publish; done', + ] + + // We convert the list of commands into a base64 string, which then gets + // passed to the container as an env var + cmds_str := base64.encode_str(commands.join('\n')) c := docker.NewContainer{ - image: '${config.base_image}' - env: [ - 'BUILD_SCRIPT=${base64_script}', - 'API_KEY=${api_key}', - // `archlinux:base-devel` does not correctly set the path variable, - // causing certain builds to fail. This fixes it. - 'PATH=${build.path_dirs.join(':')}', - ] + image: '$base_image_id' + env: ['BUILD_SCRIPT=$cmds_str', 'API_KEY=$api_key'] entrypoint: ['/bin/sh', '-c'] cmd: ['echo \$BUILD_SCRIPT | base64 -d | /bin/bash -e'] work_dir: '/build' - user: '0:0' + user: 'builder:builder' } - id := dd.container_create(c)!.id - dd.container_start(id)! + id := docker.create_container(c) ? + docker.start_container(id) ? - mut data := dd.container_inspect(id)! - start_time := time.now() + mut data := docker.inspect_container(id) ? // This loop waits until the container has stopped, so we can remove it after for data.state.running { - if time.now() - start_time > config.timeout * time.second { - dd.container_kill(id)! - dd.container_remove(id)! - - return error('Build killed due to timeout (${config.timeout}s)') - } - time.sleep(1 * time.second) - data = dd.container_inspect(id)! + data = docker.inspect_container(id) ? } - mut logs_stream := dd.container_get_logs(id)! + logs := docker.get_container_logs(id) ? - // Read in the entire stream - mut logs_builder := strings.new_builder(10 * 1024) - util.reader_to_writer(mut logs_stream, mut logs_builder)! - - dd.container_remove(id)! + docker.remove_container(id) ? return BuildResult{ start_time: data.state.start_time end_time: data.state.end_time exit_code: data.state.exit_code - logs: logs_builder.str() + logs: logs } } + +// build builds every Git repo in the server's list. +fn build(conf Config, repo_id int) ? { + c := client.new(conf.address, conf.api_key) + repo := c.get_git_repo(repo_id) ? + + build_arch := os.uname().machine + + println('Creating base image...') + image_id := create_build_image(conf.base_image) ? + + println('Running build...') + res := build_repo(conf.address, conf.api_key, image_id, repo) ? + + println('Removing build image...') + docker.remove_image(image_id) ? + + println('Uploading logs to Vieter...') + c.add_build_log(repo.id, res.start_time, res.end_time, build_arch, res.exit_code, + res.logs) ? +} diff --git a/src/build/cli.v b/src/build/cli.v new file mode 100644 index 0000000..5247e87 --- /dev/null +++ b/src/build/cli.v @@ -0,0 +1,29 @@ +module build + +import cli +import env + +pub struct Config { +pub: + api_key string + address string + base_image string = 'archlinux:base-devel' +} + +// cmd returns the cli submodule that handles the build process +pub fn cmd() cli.Command { + return cli.Command{ + name: 'build' + required_args: 1 + usage: 'id' + description: 'Build the repository with the given ID.' + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? + + id := cmd.args[0].int() + + build(conf, id) ? + } + } +} diff --git a/src/build/queue.v b/src/build/queue.v deleted file mode 100644 index bc4db9d..0000000 --- a/src/build/queue.v +++ /dev/null @@ -1,219 +0,0 @@ -module build - -import models { BuildConfig, Target } -import cron -import time -import datatypes { MinHeap } -import util - -struct BuildJob { -pub mut: - // Time at which this build job was created/queued - created time.Time - // Next timestamp from which point this job is allowed to be executed - timestamp time.Time - // Required for calculating next timestamp after having pop'ed a job - ce &cron.Expression = unsafe { nil } - // Actual build config sent to the agent - config BuildConfig - // Whether this is a one-time job - single bool -} - -// Allows BuildJob structs to be sorted according to their timestamp in -// MinHeaps -fn (r1 BuildJob) < (r2 BuildJob) bool { - return r1.timestamp < r2.timestamp -} - -// The build job queue is responsible for managing the list of scheduled builds -// for each architecture. Agents receive jobs from this queue. -pub struct BuildJobQueue { - // Schedule to use for targets without explicitely defined cron expression - default_schedule &cron.Expression - // Base image to use for targets without defined base image - default_base_image string - // After how many minutes a build should be forcefully cancelled - default_build_timeout int -mut: - mutex shared util.Dummy - // For each architecture, a priority queue is tracked - queues map[string]MinHeap[BuildJob] - // When a target is removed from the server or edited, its previous build - // configs will be invalid. This map allows for those to be simply skipped - // by ignoring any build configs created before this timestamp. - invalidated map[int]time.Time -} - -// new_job_queue initializes a new job queue -pub fn new_job_queue(default_schedule &cron.Expression, default_base_image string, default_build_timeout int) BuildJobQueue { - return BuildJobQueue{ - default_schedule: unsafe { default_schedule } - default_base_image: default_base_image - default_build_timeout: default_build_timeout - invalidated: map[int]time.Time{} - } -} - -// insert_all executes insert for each architecture of the given Target. -pub fn (mut q BuildJobQueue) insert_all(target Target) ! { - for arch in target.arch { - q.insert(target: target, arch: arch.value)! - } -} - -[params] -pub struct InsertConfig { - target Target [required] - arch string [required] - single bool - force bool - now bool -} - -// insert a new target's job into the queue for the given architecture. This -// job will then be endlessly rescheduled after being pop'ed, unless removed -// explicitely. -pub fn (mut q BuildJobQueue) insert(input InsertConfig) ! { - lock q.mutex { - if input.arch !in q.queues { - q.queues[input.arch] = MinHeap[BuildJob]{} - } - - mut job := BuildJob{ - created: time.now() - single: input.single - config: input.target.as_build_config(q.default_base_image, input.force, q.default_build_timeout) - } - - if !input.now { - ce := if input.target.schedule != '' { - cron.parse_expression(input.target.schedule) or { - return error("Error while parsing cron expression '${input.target.schedule}' (id ${input.target.id}): ${err.msg()}") - } - } else { - q.default_schedule - } - - job.timestamp = ce.next_from_now() - job.ce = ce - } else { - job.timestamp = time.now() - } - - q.queues[input.arch].insert(job) - } -} - -// reschedule the given job by calculating the next timestamp and re-adding it -// to its respective queue. This function is called by the pop functions -// *after* having pop'ed the job. -fn (mut q BuildJobQueue) reschedule(job BuildJob, arch string) { - new_timestamp := job.ce.next_from_now() - - new_job := BuildJob{ - ...job - created: time.now() - timestamp: new_timestamp - } - - q.queues[arch].insert(new_job) -} - -// pop_invalid pops all invalid jobs. -fn (mut q BuildJobQueue) pop_invalid(arch string) { - for { - job := q.queues[arch].peek() or { return } - - if job.config.target_id in q.invalidated - && job.created < q.invalidated[job.config.target_id] { - // This pop *should* never fail according to the source code - q.queues[arch].pop() or {} - } else { - break - } - } -} - -// peek shows the first job for the given architecture that's ready to be -// executed, if present. -pub fn (mut q BuildJobQueue) peek(arch string) ?BuildJob { - // Even peek requires a write lock, because pop_invalid can modify the data - // structure - lock q.mutex { - if arch !in q.queues { - return none - } - - q.pop_invalid(arch) - job := q.queues[arch].peek() or { return none } - - if job.timestamp < time.now() { - return job - } - } - - return none -} - -// pop removes the first job for the given architecture that's ready to be -// executed from the queue and returns it, if present. -pub fn (mut q BuildJobQueue) pop(arch string) ?BuildJob { - lock q.mutex { - if arch !in q.queues { - return none - } - - q.pop_invalid(arch) - mut job := q.queues[arch].peek() or { return none } - - if job.timestamp < time.now() { - job = q.queues[arch].pop() or { return none } - - if !job.single { - q.reschedule(job, arch) - } - - return job - } - } - - return none -} - -// pop_n tries to pop at most n available jobs for the given architecture. -pub fn (mut q BuildJobQueue) pop_n(arch string, n int) []BuildJob { - lock q.mutex { - if arch !in q.queues { - return [] - } - - mut out := []BuildJob{} - - for out.len < n { - q.pop_invalid(arch) - mut job := q.queues[arch].peek() or { break } - - if job.timestamp < time.now() { - job = q.queues[arch].pop() or { break } - - if !job.single { - q.reschedule(job, arch) - } - - out << job - } else { - break - } - } - - return out - } - - return [] -} - -// invalidate a target's old build jobs. -pub fn (mut q BuildJobQueue) invalidate(target_id int) { - q.invalidated[target_id] = time.now() -} diff --git a/src/build/scripts/git.sh b/src/build/scripts/git.sh deleted file mode 100644 index 2644243..0000000 --- a/src/build/scripts/git.sh +++ /dev/null @@ -1,20 +0,0 @@ -echo -e '+ echo -e '\''[vieter]\\nServer = https://example.com/$repo/$arch\\nSigLevel = Optional'\'' >> /etc/pacman.conf' -echo -e '[vieter]\nServer = https://example.com/$repo/$arch\nSigLevel = Optional' >> /etc/pacman.conf -echo -e '+ pacman -Syu --needed --noconfirm' -pacman -Syu --needed --noconfirm -echo -e '+ su builder' -su builder -echo -e '+ git clone --single-branch --depth 1 '\''https://examplerepo.com'\'' repo' -git clone --single-branch --depth 1 'https://examplerepo.com' repo -echo -e '+ cd repo' -cd repo -echo -e '+ makepkg --nobuild --syncdeps --needed --noconfirm' -makepkg --nobuild --syncdeps --needed --noconfirm -echo -e '+ source PKGBUILD' -source PKGBUILD -echo -e '+ curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0' -curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0 -echo -e '+ [ "$(id -u)" == 0 ] && exit 0' -[ "$(id -u)" == 0 ] && exit 0 -echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done' -MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done diff --git a/src/build/scripts/git_branch.sh b/src/build/scripts/git_branch.sh deleted file mode 100644 index 9f36bdc..0000000 --- a/src/build/scripts/git_branch.sh +++ /dev/null @@ -1,20 +0,0 @@ -echo -e '+ echo -e '\''[vieter]\\nServer = https://example.com/$repo/$arch\\nSigLevel = Optional'\'' >> /etc/pacman.conf' -echo -e '[vieter]\nServer = https://example.com/$repo/$arch\nSigLevel = Optional' >> /etc/pacman.conf -echo -e '+ pacman -Syu --needed --noconfirm' -pacman -Syu --needed --noconfirm -echo -e '+ su builder' -su builder -echo -e '+ git clone --single-branch --depth 1 --branch main '\''https://examplerepo.com'\'' repo' -git clone --single-branch --depth 1 --branch main 'https://examplerepo.com' repo -echo -e '+ cd repo' -cd repo -echo -e '+ makepkg --nobuild --syncdeps --needed --noconfirm' -makepkg --nobuild --syncdeps --needed --noconfirm -echo -e '+ source PKGBUILD' -source PKGBUILD -echo -e '+ curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0' -curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0 -echo -e '+ [ "$(id -u)" == 0 ] && exit 0' -[ "$(id -u)" == 0 ] && exit 0 -echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done' -MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done diff --git a/src/build/scripts/git_path.sh b/src/build/scripts/git_path.sh deleted file mode 100644 index 65b7fb9..0000000 --- a/src/build/scripts/git_path.sh +++ /dev/null @@ -1,20 +0,0 @@ -echo -e '+ echo -e '\''[vieter]\\nServer = https://example.com/$repo/$arch\\nSigLevel = Optional'\'' >> /etc/pacman.conf' -echo -e '[vieter]\nServer = https://example.com/$repo/$arch\nSigLevel = Optional' >> /etc/pacman.conf -echo -e '+ pacman -Syu --needed --noconfirm' -pacman -Syu --needed --noconfirm -echo -e '+ su builder' -su builder -echo -e '+ git clone --single-branch --depth 1 '\''https://examplerepo.com'\'' repo' -git clone --single-branch --depth 1 'https://examplerepo.com' repo -echo -e '+ cd '\''repo/example/path'\''' -cd 'repo/example/path' -echo -e '+ makepkg --nobuild --syncdeps --needed --noconfirm' -makepkg --nobuild --syncdeps --needed --noconfirm -echo -e '+ source PKGBUILD' -source PKGBUILD -echo -e '+ curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0' -curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0 -echo -e '+ [ "$(id -u)" == 0 ] && exit 0' -[ "$(id -u)" == 0 ] && exit 0 -echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done' -MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done diff --git a/src/build/scripts/git_path_spaces.sh b/src/build/scripts/git_path_spaces.sh deleted file mode 100644 index b632b91..0000000 --- a/src/build/scripts/git_path_spaces.sh +++ /dev/null @@ -1,20 +0,0 @@ -echo -e '+ echo -e '\''[vieter]\\nServer = https://example.com/$repo/$arch\\nSigLevel = Optional'\'' >> /etc/pacman.conf' -echo -e '[vieter]\nServer = https://example.com/$repo/$arch\nSigLevel = Optional' >> /etc/pacman.conf -echo -e '+ pacman -Syu --needed --noconfirm' -pacman -Syu --needed --noconfirm -echo -e '+ su builder' -su builder -echo -e '+ git clone --single-branch --depth 1 '\''https://examplerepo.com'\'' repo' -git clone --single-branch --depth 1 'https://examplerepo.com' repo -echo -e '+ cd '\''repo/example/path with spaces'\''' -cd 'repo/example/path with spaces' -echo -e '+ makepkg --nobuild --syncdeps --needed --noconfirm' -makepkg --nobuild --syncdeps --needed --noconfirm -echo -e '+ source PKGBUILD' -source PKGBUILD -echo -e '+ curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0' -curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0 -echo -e '+ [ "$(id -u)" == 0 ] && exit 0' -[ "$(id -u)" == 0 ] && exit 0 -echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done' -MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done diff --git a/src/build/scripts/url.sh b/src/build/scripts/url.sh deleted file mode 100644 index 2d27de7..0000000 --- a/src/build/scripts/url.sh +++ /dev/null @@ -1,22 +0,0 @@ -echo -e '+ echo -e '\''[vieter]\\nServer = https://example.com/$repo/$arch\\nSigLevel = Optional'\'' >> /etc/pacman.conf' -echo -e '[vieter]\nServer = https://example.com/$repo/$arch\nSigLevel = Optional' >> /etc/pacman.conf -echo -e '+ pacman -Syu --needed --noconfirm' -pacman -Syu --needed --noconfirm -echo -e '+ su builder' -su builder -echo -e '+ mkdir repo' -mkdir repo -echo -e '+ curl -o repo/PKGBUILD -L '\''https://examplerepo.com'\''' -curl -o repo/PKGBUILD -L 'https://examplerepo.com' -echo -e '+ cd repo' -cd repo -echo -e '+ makepkg --nobuild --syncdeps --needed --noconfirm' -makepkg --nobuild --syncdeps --needed --noconfirm -echo -e '+ source PKGBUILD' -source PKGBUILD -echo -e '+ curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0' -curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0 -echo -e '+ [ "$(id -u)" == 0 ] && exit 0' -[ "$(id -u)" == 0 ] && exit 0 -echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done' -MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done diff --git a/src/build/shell.v b/src/build/shell.v deleted file mode 100644 index f32cd08..0000000 --- a/src/build/shell.v +++ /dev/null @@ -1,93 +0,0 @@ -module build - -import models { BuildConfig } - -// escape_shell_string escapes any characters that could be interpreted -// incorrectly by a shell. The resulting value should be safe to use inside an -// echo statement. -fn escape_shell_string(s string) string { - return s.replace(r'\', r'\\').replace("'", r"'\''") -} - -// echo_commands takes a list of shell commands & prepends each one with -// an echo call displaying said command. -pub fn echo_commands(cmds []string) []string { - mut out := []string{cap: 2 * cmds.len} - - for cmd in cmds { - out << "echo -e '+ ${escape_shell_string(cmd)}'" - out << cmd - } - - return out -} - -// create_build_script generates a shell script that builds a given Target. -fn create_build_script(address string, config BuildConfig, build_arch string) string { - repo_url := '${address}/${config.repo}' - - mut commands := [ - // This will later be replaced by a proper setting for changing the - // mirrorlist - "echo -e '[${config.repo}]\\nServer = ${address}/\$repo/\$arch\\nSigLevel = Optional' >> /etc/pacman.conf" - // We need to update the package list of the repo we just added above. - // This should however not pull in a lot of packages as long as the - // builder image is rebuilt frequently. - 'pacman -Syu --needed --noconfirm', - // makepkg can't run as root - 'su builder', - ] - - commands << match config.kind { - 'git' { - if config.branch == '' { - [ - "git clone --single-branch --depth 1 '${config.url}' repo", - ] - } else { - [ - "git clone --single-branch --depth 1 --branch ${config.branch} '${config.url}' repo", - ] - } - } - 'url' { - [ - 'mkdir repo', - "curl -o repo/PKGBUILD -L '${config.url}'", - ] - } - else { - panic("Invalid kind. This shouldn't be possible.") - } - } - - commands << if config.path != '' { - "cd 'repo/${config.path}'" - } else { - 'cd repo' - } - - commands << [ - 'makepkg --nobuild --syncdeps --needed --noconfirm', - 'source PKGBUILD', - ] - - if !config.force { - // The build container checks whether the package is already present on - // the server. - commands << [ - 'curl -s --head --fail ${repo_url}/${build_arch}/\$pkgname-\$pkgver-\$pkgrel && exit 0', - // If the above curl command succeeds, we don't need to rebuild the - // package. However, because we're in a su shell, the exit command will - // drop us back into the root shell. Therefore, we must check whether - // we're in root so we don't proceed. - '[ "\$(id -u)" == 0 ] && exit 0', - ] - } - - commands << [ - 'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" ${repo_url}/publish; done', - ] - - return echo_commands(commands).join('\n') -} diff --git a/src/build/shell_test.v b/src/build/shell_test.v deleted file mode 100644 index e23d964..0000000 --- a/src/build/shell_test.v +++ /dev/null @@ -1,75 +0,0 @@ -module build - -import models { BuildConfig } - -fn test_create_build_script_git() { - config := BuildConfig{ - target_id: 1 - kind: 'git' - url: 'https://examplerepo.com' - repo: 'vieter' - base_image: 'not-used:latest' - } - - build_script := create_build_script('https://example.com', config, 'x86_64') - expected := $embed_file('scripts/git.sh') - - assert build_script == expected.to_string().trim_space() -} - -fn test_create_build_script_git_path() { - mut config := BuildConfig{ - target_id: 1 - kind: 'git' - url: 'https://examplerepo.com' - repo: 'vieter' - path: 'example/path' - base_image: 'not-used:latest' - } - - mut build_script := create_build_script('https://example.com', config, 'x86_64') - mut expected := $embed_file('scripts/git_path.sh') - - assert build_script == expected.to_string().trim_space() - - config = BuildConfig{ - ...config - path: 'example/path with spaces' - } - - build_script = create_build_script('https://example.com', config, 'x86_64') - expected = $embed_file('scripts/git_path_spaces.sh') - - assert build_script == expected.to_string().trim_space() -} - -fn test_create_build_script_git_branch() { - config := BuildConfig{ - target_id: 1 - kind: 'git' - url: 'https://examplerepo.com' - branch: 'main' - repo: 'vieter' - base_image: 'not-used:latest' - } - - build_script := create_build_script('https://example.com', config, 'x86_64') - expected := $embed_file('scripts/git_branch.sh') - - assert build_script == expected.to_string().trim_space() -} - -fn test_create_build_script_url() { - config := BuildConfig{ - target_id: 1 - kind: 'url' - url: 'https://examplerepo.com' - repo: 'vieter' - base_image: 'not-used:latest' - } - - build_script := create_build_script('https://example.com', config, 'x86_64') - expected := $embed_file('scripts/url.sh') - - assert build_script == expected.to_string().trim_space() -} diff --git a/src/client/client.v b/src/client/client.v index 7d57e92..25224a5 100644 --- a/src/client/client.v +++ b/src/client/client.v @@ -2,7 +2,7 @@ module client import net.http { Method } import net.urllib -import web.response { Response, new_data_response } +import response { Response } import json pub struct Client { @@ -21,70 +21,47 @@ pub fn new(address string, api_key string) Client { // send_request_raw sends an HTTP request, returning the http.Response object. // It encodes the params so that they're safe to pass as HTTP query parameters. -fn (c &Client) send_request_raw(method Method, url string, params map[string]string, body string) !http.Response { - mut full_url := '${c.address}${url}' +fn (c &Client) send_request_raw(method Method, url string, params map[string]string, body string) ?http.Response { + mut full_url := '$c.address$url' if params.len > 0 { mut params_escaped := map[string]string{} // Escape each query param for k, v in params { - // An empty parameter should be the same as not providing it at all params_escaped[k] = urllib.query_escape(v) } - params_str := params_escaped.keys().map('${it}=${params_escaped[it]}').join('&') + params_str := params_escaped.keys().map('$it=${params[it]}').join('&') - full_url = '${full_url}?${params_str}' + full_url = '$full_url?$params_str' } - // Looking at the source code, this function doesn't actually fail, so I'm - // not sure why it returns an optional - mut req := http.new_request(method, full_url, body) or { return error('') } - req.add_custom_header('X-Api-Key', c.api_key)! + mut req := http.new_request(method, full_url, body) ? + req.add_custom_header('X-Api-Key', c.api_key) ? - res := req.do()! + res := req.do() ? return res } // send_request just calls send_request_with_body with an empty body. -fn (c &Client) send_request[T](method Method, url string, params map[string]string) !Response[T] { - return c.send_request_with_body[T](method, url, params, '') +fn (c &Client) send_request(method Method, url string, params map[string]string) ?Response { + return c.send_request_with_body(method, url, params, '') } // send_request_with_body calls send_request_raw_response & parses its // output as a Response object. -fn (c &Client) send_request_with_body[T](method Method, url string, params map[string]string, body string) !Response[T] { - res := c.send_request_raw(method, url, params, body)! - status := res.status() - - // Non-successful requests are expected to return either an empty body or - // Response - if status.is_error() { - // A non-successful status call will have an empty body - if res.body == '' { - return error('Error ${res.status_code} (${status.str()}): (empty response)') - } - - data := json.decode(Response[string], res.body)! - - return error('Status ${res.status_code} (${status.str()}): ${data.message}') - } - - // Just return an empty successful response - if res.body == '' { - return new_data_response(T{}) - } - - data := json.decode(Response[T], res.body)! +fn (c &Client) send_request_with_body(method Method, url string, params map[string]string, body string) ?Response { + res_text := c.send_request_raw_response(method, url, params, body) ? + data := json.decode(Response, res_text) ? return data } // send_request_raw_response returns the raw text response for an HTTP request. -fn (c &Client) send_request_raw_response(method Method, url string, params map[string]string, body string) !string { - res := c.send_request_raw(method, url, params, body)! +fn (c &Client) send_request_raw_response(method Method, url string, params map[string]string, body string) ?string { + res := c.send_request_raw(method, url, params, body) ? - return res.body + return res.text } diff --git a/src/client/git.v b/src/client/git.v new file mode 100644 index 0000000..b09d4c2 --- /dev/null +++ b/src/client/git.v @@ -0,0 +1,51 @@ +module client + +import db { GitRepo } +import net.http { Method } +import response { Response } + +// get_git_repos returns the current list of repos. +pub fn (c &Client) get_git_repos() ?[]GitRepo { + data := c.send_request<[]GitRepo>(Method.get, '/api/repos', {}) ? + + return data.data +} + +// get_git_repo returns the repo for a specific ID. +pub fn (c &Client) get_git_repo(id int) ?GitRepo { + data := c.send_request(Method.get, '/api/repos/$id', {}) ? + + return data.data +} + +// add_git_repo adds a new repo to the server. +pub fn (c &Client) add_git_repo(url string, branch string, repo string, arch []string) ?Response { + mut params := { + 'url': url + 'branch': branch + 'repo': repo + } + + if arch.len > 0 { + params['arch'] = arch.join(',') + } + + data := c.send_request(Method.post, '/api/repos', params) ? + + return data +} + +// remove_git_repo removes the repo with the given ID from the server. +pub fn (c &Client) remove_git_repo(id int) ?Response { + data := c.send_request(Method.delete, '/api/repos/$id', {}) ? + + return data +} + +// patch_git_repo sends a PATCH request to the given repo with the params as +// payload. +pub fn (c &Client) patch_git_repo(id int, params map[string]string) ?Response { + data := c.send_request(Method.patch, '/api/repos/$id', params) ? + + return data +} diff --git a/src/client/jobs.v b/src/client/jobs.v deleted file mode 100644 index ddb9e2d..0000000 --- a/src/client/jobs.v +++ /dev/null @@ -1,23 +0,0 @@ -module client - -import models { BuildConfig } - -// poll_jobs requests a list of new build jobs from the server. -pub fn (c &Client) poll_jobs(arch string, max int) ![]BuildConfig { - data := c.send_request[[]BuildConfig](.get, '/api/v1/jobs/poll', { - 'arch': arch - 'max': max.str() - })! - - return data.data -} - -// queue_job adds a new one-time build job for the given target to the job -// queue. -pub fn (c &Client) queue_job(target_id int, arch string, force bool) ! { - c.send_request[string](.post, '/api/v1/jobs/queue', { - 'target': target_id.str() - 'arch': arch - 'force': force.str() - })! -} diff --git a/src/client/logs.v b/src/client/logs.v index ff6b7c5..8c53213 100644 --- a/src/client/logs.v +++ b/src/client/logs.v @@ -1,47 +1,53 @@ module client -import models { BuildLog, BuildLogFilter } -import web.response { Response } +import db { BuildLog } +import net.http { Method } +import response { Response } import time // get_build_logs returns all build logs. -pub fn (c &Client) get_build_logs(filter BuildLogFilter) ![]BuildLog { - params := models.params_from(filter) - data := c.send_request[[]BuildLog](.get, '/api/v1/logs', params)! +pub fn (c &Client) get_build_logs() ?Response<[]BuildLog> { + data := c.send_request<[]BuildLog>(Method.get, '/api/logs', {}) ? - return data.data + return data +} + +// get_build_logs_for_repo returns all build logs for a given repo. +pub fn (c &Client) get_build_logs_for_repo(repo_id int) ?Response<[]BuildLog> { + params := { + 'repo': repo_id.str() + } + + data := c.send_request<[]BuildLog>(Method.get, '/api/logs', params) ? + + return data } // get_build_log returns a specific build log. -pub fn (c &Client) get_build_log(id int) !BuildLog { - data := c.send_request[BuildLog](.get, '/api/v1/logs/${id}', {})! +pub fn (c &Client) get_build_log(id int) ?Response { + data := c.send_request(Method.get, '/api/logs/$id', {}) ? - return data.data + return data } // get_build_log_content returns the contents of the build log file. -pub fn (c &Client) get_build_log_content(id int) !string { - data := c.send_request_raw_response(.get, '/api/v1/logs/${id}/content', {}, '')! +pub fn (c &Client) get_build_log_content(id int) ?string { + data := c.send_request_raw_response(Method.get, '/api/logs/$id/content', {}, '') ? return data } // add_build_log adds a new build log to the server. -pub fn (c &Client) add_build_log(target_id int, start_time time.Time, end_time time.Time, arch string, exit_code int, content string) !Response[int] { +pub fn (c &Client) add_build_log(repo_id int, start_time time.Time, end_time time.Time, arch string, exit_code int, content string) ?Response { params := { - 'target': target_id.str() - 'startTime': start_time.unix_time().str() - 'endTime': end_time.unix_time().str() + 'repo': repo_id.str() + 'startTime': start_time.str() + 'endTime': end_time.str() 'arch': arch 'exitCode': exit_code.str() } - data := c.send_request_with_body[int](.post, '/api/v1/logs', params, content)! + data := c.send_request_with_body(Method.post, '/api/logs', params, content) ? return data } - -// remove_build_log removes the build log with the given id from the server. -pub fn (c &Client) remove_build_log(id int) ! { - c.send_request[string](.delete, '/api/v1/logs/${id}', {})! -} diff --git a/src/client/repos.v b/src/client/repos.v deleted file mode 100644 index dff5d90..0000000 --- a/src/client/repos.v +++ /dev/null @@ -1,16 +0,0 @@ -module client - -// remove_repo removes an entire repository. -pub fn (c &Client) remove_repo(repo string) ! { - c.send_request[string](.delete, '/${repo}', {})! -} - -// remove_arch_repo removes an entire arch-repo. -pub fn (c &Client) remove_arch_repo(repo string, arch string) ! { - c.send_request[string](.delete, '/${repo}/${arch}', {})! -} - -// remove_package removes a single package from the given arch-repo. -pub fn (c &Client) remove_package(repo string, arch string, pkgname string) ! { - c.send_request[string](.delete, '/${repo}/${arch}/${pkgname}', {})! -} diff --git a/src/client/targets.v b/src/client/targets.v deleted file mode 100644 index 3d43d43..0000000 --- a/src/client/targets.v +++ /dev/null @@ -1,71 +0,0 @@ -module client - -import models { Target, TargetFilter } - -// get_targets returns a list of targets, given a filter object. -pub fn (c &Client) get_targets(filter TargetFilter) ![]Target { - params := models.params_from(filter) - data := c.send_request[[]Target](.get, '/api/v1/targets', params)! - - return data.data -} - -// get_all_targets retrieves *all* targs from the API using the default -// limit. -pub fn (c &Client) get_all_targets() ![]Target { - mut targets := []Target{} - mut offset := u64(0) - - for { - sub_targets := c.get_targets(offset: offset)! - - if sub_targets.len == 0 { - break - } - - targets << sub_targets - - offset += u64(sub_targets.len) - } - - return targets -} - -// get_target returns the target for a specific id. -pub fn (c &Client) get_target(id int) !Target { - data := c.send_request[Target](.get, '/api/v1/targets/${id}', {})! - - return data.data -} - -pub struct NewTarget { - kind string - url string - branch string - repo string - path string - arch []string -} - -// add_target adds a new target to the server. -pub fn (c &Client) add_target(t NewTarget) !int { - params := models.params_from[NewTarget](t) - data := c.send_request[int](.post, '/api/v1/targets', params)! - - return data.data -} - -// remove_target removes the target with the given id from the server. -pub fn (c &Client) remove_target(id int) !string { - data := c.send_request[string](.delete, '/api/v1/targets/${id}', {})! - - return data.data -} - -// patch_target sends a PATCH request to the given target with the params as -// payload. -pub fn (c &Client) patch_target(id int, params map[string]string) !string { - data := c.send_request[string](.patch, '/api/v1/targets/${id}', params)! - - return data.data -} diff --git a/src/console/aur/aur.v b/src/console/aur/aur.v deleted file mode 100644 index c1c409c..0000000 --- a/src/console/aur/aur.v +++ /dev/null @@ -1,62 +0,0 @@ -module aur - -import cli -import console -import client -import aur -import conf as vconf - -struct Config { - address string [required] - api_key string [required] -} - -// cmd returns the cli module for interacting with the AUR API. -pub fn cmd() cli.Command { - return cli.Command{ - name: 'aur' - description: 'Interact with the AUR.' - commands: [ - cli.Command{ - name: 'search' - description: 'Search for packages.' - required_args: 1 - execute: fn (cmd cli.Command) ! { - c := aur.new() - pkgs := c.search(cmd.args[0])! - data := pkgs.map([it.name, it.description]) - - println(console.pretty_table(['name', 'description'], data)!) - } - }, - cli.Command{ - name: 'add' - usage: 'repo pkg-name [pkg-name...]' - description: 'Add the given AUR package(s) to Vieter. Non-existent packages will be silently ignored.' - required_args: 2 - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! - - c := aur.new() - pkgs := c.info(cmd.args[1..])! - - vc := client.new(conf_.address, conf_.api_key) - - for pkg in pkgs { - vc.add_target( - kind: 'git' - url: 'https://aur.archlinux.org/${pkg.package_base}' + '.git' - repo: cmd.args[0] - ) or { - println('Failed to add ${pkg.name}: ${err.msg()}') - continue - } - - println('Added ${pkg.name}' + '.') - } - } - }, - ] - } -} diff --git a/src/console/console.v b/src/console/console.v index 5c40de8..6f296bd 100644 --- a/src/console/console.v +++ b/src/console/console.v @@ -1,75 +1 @@ module console - -import arrays -import strings -import cli -import os - -// tabbed_table returns a simple textual table, with tabs as separators. -pub fn tabbed_table(data [][]string) string { - return data.map(it.join('\t')).join('\n') -} - -// pretty_table converts a list of string data into a pretty table. Many thanks -// to @hungrybluedev in the Vlang Discord for providing this code! -// https://ptb.discord.com/channels/592103645835821068/592106336838352923/970278787143045192 -pub fn pretty_table(header []string, data [][]string) !string { - column_count := header.len - - mut column_widths := []int{len: column_count, init: header[it].len} - - for values in data { - for col, value in values { - if column_widths[col] < value.len { - column_widths[col] = value.len - } - } - } - - single_line_length := arrays.sum(column_widths)! + (column_count + 1) * 3 - 4 - - horizontal_line := '+' + strings.repeat(`-`, single_line_length) + '+' - mut buffer := strings.new_builder(data.len * single_line_length) - - buffer.writeln(horizontal_line) - - buffer.write_string('| ') - for col, head in header { - if col != 0 { - buffer.write_string(' | ') - } - buffer.write_string(head) - buffer.write_string(strings.repeat(` `, column_widths[col] - head.len)) - } - buffer.writeln(' |') - - buffer.writeln(horizontal_line) - - for values in data { - buffer.write_string('| ') - for col, value in values { - if col != 0 { - buffer.write_string(' | ') - } - buffer.write_string(value) - buffer.write_string(strings.repeat(` `, column_widths[col] - value.len)) - } - buffer.writeln(' |') - } - - buffer.writeln(horizontal_line) - - return buffer.str() -} - -// export_man_pages recursively generates all man pages for the given -// cli.Command & writes them to the given directory. -pub fn export_man_pages(cmd cli.Command, path string) ! { - man := cmd.manpage() - os.write_file(os.join_path_single(path, cmd.full_name().replace(' ', '-') + '.1'), - man)! - - for sub_cmd in cmd.commands { - export_man_pages(sub_cmd, path)! - } -} diff --git a/src/console/git/git.v b/src/console/git/git.v new file mode 100644 index 0000000..3bf78d1 --- /dev/null +++ b/src/console/git/git.v @@ -0,0 +1,181 @@ +module git + +import cli +import env +import cron.expression { parse_expression } +import client + +struct Config { + address string [required] + api_key string [required] +} + +// cmd returns the cli submodule that handles the repos API interaction +pub fn cmd() cli.Command { + return cli.Command{ + name: 'repos' + description: 'Interact with the repos API.' + commands: [ + cli.Command{ + name: 'list' + description: 'List the current repos.' + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? + + list(conf) ? + } + }, + cli.Command{ + name: 'add' + required_args: 3 + usage: 'url branch repo' + description: 'Add a new repository.' + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? + + add(conf, cmd.args[0], cmd.args[1], cmd.args[2]) ? + } + }, + cli.Command{ + name: 'remove' + required_args: 1 + usage: 'id' + description: 'Remove a repository that matches the given ID prefix.' + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? + + remove(conf, cmd.args[0]) ? + } + }, + cli.Command{ + name: 'info' + required_args: 1 + usage: 'id' + description: 'Show detailed information for the repo matching the ID prefix.' + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? + + info(conf, cmd.args[0]) ? + } + }, + cli.Command{ + name: 'edit' + required_args: 1 + usage: 'id' + description: 'Edit the repository that matches the given ID prefix.' + flags: [ + cli.Flag{ + name: 'url' + description: 'URL of the Git repository.' + flag: cli.FlagType.string + }, + cli.Flag{ + name: 'branch' + description: 'Branch of the Git repository.' + flag: cli.FlagType.string + }, + cli.Flag{ + name: 'repo' + description: 'Repo to publish builds to.' + flag: cli.FlagType.string + }, + cli.Flag{ + name: 'arch' + description: 'Comma-separated list of architectures to build on.' + flag: cli.FlagType.string + }, + cli.Flag{ + name: 'schedule' + description: 'Cron schedule for repository.' + flag: cli.FlagType.string + }, + ] + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? + + found := cmd.flags.get_all_found() + + mut params := map[string]string{} + + for f in found { + if f.name != 'config-file' { + params[f.name] = f.get_string() ? + } + } + + patch(conf, cmd.args[0], params) ? + } + }, + ] + } +} + +// get_repo_by_prefix tries to find the repo with the given prefix in its +// ID. If multiple or none are found, an error is raised. + +// list prints out a list of all repositories. +fn list(conf Config) ? { + c := client.new(conf.address, conf.api_key) + repos := c.get_git_repos() ? + + for repo in repos { + println('$repo.id\t$repo.url\t$repo.branch\t$repo.repo') + } +} + +// add adds a new repository to the server's list. +fn add(conf Config, url string, branch string, repo string) ? { + c := client.new(conf.address, conf.api_key) + res := c.add_git_repo(url, branch, repo, []) ? + + println(res.message) +} + +// remove removes a repository from the server's list. +fn remove(conf Config, id string) ? { + // id, _ := get_repo_by_prefix(conf, id_prefix) ? + id_int := id.int() + + if id_int != 0 { + c := client.new(conf.address, conf.api_key) + res := c.remove_git_repo(id_int) ? + println(res.message) + } +} + +// patch patches a given repository with the provided params. +fn patch(conf Config, id string, params map[string]string) ? { + // We check the cron expression first because it's useless to send an + // invalid one to the server. + if 'schedule' in params && params['schedule'] != '' { + parse_expression(params['schedule']) or { + return error('Invalid cron expression: $err.msg()') + } + } + + id_int := id.int() + if id_int != 0 { + c := client.new(conf.address, conf.api_key) + res := c.patch_git_repo(id_int, params) ? + + println(res.message) + } +} + +// info shows detailed information for a given repo. +fn info(conf Config, id string) ? { + id_int := id.int() + + if id_int == 0 { + return + } + + c := client.new(conf.address, conf.api_key) + repo := c.get_git_repo(id_int) ? + println(repo) +} diff --git a/src/console/logs/logs.v b/src/console/logs/logs.v index b8e088c..e3c7d14 100644 --- a/src/console/logs/logs.v +++ b/src/console/logs/logs.v @@ -1,18 +1,16 @@ module logs import cli -import conf as vconf +import env import client -import console -import time -import models { BuildLog, BuildLogFilter } +import db struct Config { address string [required] api_key string [required] } -// cmd returns the cli module that handles the build logs API. +// cmd returns the cli module that handles the build repos API. pub fn cmd() cli.Command { return cli.Command{ name: 'logs' @@ -20,145 +18,21 @@ pub fn cmd() cli.Command { commands: [ cli.Command{ name: 'list' - description: 'List build logs. All date strings in the output are converted to the local timezone. Any time strings provided as input should be in the local timezone as well.' + description: 'List the build logs. If a repo ID is provided, only list the build logs for that repo.' flags: [ cli.Flag{ - name: 'limit' - abbrev: 'l' - description: 'How many results to return.' + name: 'repo' + description: 'ID of the Git repo to restrict list to.' flag: cli.FlagType.int }, - cli.Flag{ - name: 'offset' - abbrev: 'o' - description: 'Minimum index to return.' - flag: cli.FlagType.int - }, - cli.Flag{ - name: 'target' - description: 'Only return logs for this target id.' - flag: cli.FlagType.int - }, - cli.Flag{ - name: 'today' - abbrev: 't' - description: 'Only list logs started today. This flag overwrites any other date-related flag.' - flag: cli.FlagType.bool - }, - cli.Flag{ - name: 'failed' - description: 'Only list logs with non-zero exit codes. This flag overwrites the --code flag.' - flag: cli.FlagType.bool - }, - cli.Flag{ - name: 'day' - abbrev: 'd' - description: 'Only list logs started on this day. (format: YYYY-MM-DD)' - flag: cli.FlagType.string - }, - cli.Flag{ - name: 'before' - description: 'Only list logs started before this timestamp. (format: YYYY-MM-DD HH:mm:ss)' - flag: cli.FlagType.string - }, - cli.Flag{ - name: 'after' - description: 'Only list logs started after this timestamp. (format: YYYY-MM-DD HH:mm:ss)' - flag: cli.FlagType.string - }, - cli.Flag{ - name: 'code' - description: 'Only return logs with the given exit code. Prepend with `!` to exclude instead of include. Can be specified multiple times.' - flag: cli.FlagType.string_array - }, ] - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? - mut filter := BuildLogFilter{} + repo_id := cmd.flags.get_int('repo') ? - limit := cmd.flags.get_int('limit')! - if limit != 0 { - filter.limit = u64(limit) - } - - offset := cmd.flags.get_int('offset')! - if offset != 0 { - filter.offset = u64(offset) - } - - target_id := cmd.flags.get_int('target')! - if target_id != 0 { - filter.target = target_id - } - - tz_offset := time.offset() - - if cmd.flags.get_bool('today')! { - today := time.now() - - filter.after = time.new_time(time.Time{ - year: today.year - month: today.month - day: today.day - }).add_seconds(-tz_offset) - filter.before = filter.after.add_days(1) - } - // The -today flag overwrites any of the other date flags. - else { - day_str := cmd.flags.get_string('day')! - before_str := cmd.flags.get_string('before')! - after_str := cmd.flags.get_string('after')! - - if day_str != '' { - day := time.parse_rfc3339(day_str)! - day_utc := time.new_time(time.Time{ - year: day.year - month: day.month - day: day.day - }).add_seconds(-tz_offset) - - // The extra -1 is so we also return logs that - // started at exactly midnight (filter bounds are - // exclusive). therefore, we have to request logs - // started after 23:59:59 the previous day. - filter.after = day_utc.add_seconds(-1) - filter.before = day_utc.add_days(1) - } else { - if before_str != '' { - filter.before = time.parse(before_str)!.add_seconds(-tz_offset) - } - - if after_str != '' { - filter.after = time.parse(after_str)!.add_seconds(-tz_offset) - } - } - } - - if cmd.flags.get_bool('failed')! { - filter.exit_codes = [ - '!0', - ] - } else { - filter.exit_codes = cmd.flags.get_strings('code')! - } - - raw := cmd.flags.get_bool('raw')! - - list(conf_, filter, raw)! - } - }, - cli.Command{ - name: 'remove' - required_args: 1 - usage: 'id' - description: 'Remove a build log that matches the given id.' - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! - - remove(conf_, cmd.args[0])! + if repo_id == 0 { list(conf) ? } else { list_for_repo(conf, repo_id) ? } } }, cli.Command{ @@ -166,12 +40,12 @@ pub fn cmd() cli.Command { required_args: 1 usage: 'id' description: 'Show all info for a specific build log.' - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? id := cmd.args[0].int() - info(conf_, id)! + info(conf, id) ? } }, cli.Command{ @@ -179,12 +53,12 @@ pub fn cmd() cli.Command { required_args: 1 usage: 'id' description: 'Output the content of a build log to stdout.' - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? id := cmd.args[0].int() - content(conf_, id)! + content(conf, id) ? } }, ] @@ -192,44 +66,41 @@ pub fn cmd() cli.Command { } // print_log_list prints a list of logs. -fn print_log_list(logs []BuildLog, raw bool) ! { - data := logs.map([it.id.str(), it.target_id.str(), it.start_time.local().str(), - it.exit_code.str()]) - - if raw { - println(console.tabbed_table(data)) - } else { - println(console.pretty_table(['id', 'target', 'start time', 'exit code'], data)!) +fn print_log_list(logs []db.BuildLog) { + for log in logs { + println('$log.id\t$log.start_time\t$log.exit_code') } } // list prints a list of all build logs. -fn list(conf_ Config, filter BuildLogFilter, raw bool) ! { - c := client.new(conf_.address, conf_.api_key) - logs := c.get_build_logs(filter)! +fn list(conf Config) ? { + c := client.new(conf.address, conf.api_key) + logs := c.get_build_logs() ?.data - print_log_list(logs, raw)! + print_log_list(logs) +} + +// list prints a list of all build logs for a given repo. +fn list_for_repo(conf Config, repo_id int) ? { + c := client.new(conf.address, conf.api_key) + logs := c.get_build_logs_for_repo(repo_id) ?.data + + print_log_list(logs) } // info print the detailed info for a given build log. -fn info(conf_ Config, id int) ! { - c := client.new(conf_.address, conf_.api_key) - log := c.get_build_log(id)! +fn info(conf Config, id int) ? { + c := client.new(conf.address, conf.api_key) + log := c.get_build_log(id) ?.data print(log) } // content outputs the contents of the log file for a given build log to // stdout. -fn content(conf_ Config, id int) ! { - c := client.new(conf_.address, conf_.api_key) - content := c.get_build_log_content(id)! +fn content(conf Config, id int) ? { + c := client.new(conf.address, conf.api_key) + content := c.get_build_log_content(id) ? println(content) } - -// remove removes a build log from the server's list. -fn remove(conf_ Config, id string) ! { - c := client.new(conf_.address, conf_.api_key) - c.remove_build_log(id.int())! -} diff --git a/src/console/man/man.v b/src/console/man/man.v deleted file mode 100644 index 22cb5f7..0000000 --- a/src/console/man/man.v +++ /dev/null @@ -1,21 +0,0 @@ -module man - -import cli -import console -import os - -// cmd returns the cli submodule that handles generating man pages. -pub fn cmd() cli.Command { - return cli.Command{ - name: 'man' - description: 'Generate all man pages & save them in the given directory.' - usage: 'dir' - required_args: 1 - execute: fn (cmd cli.Command) ! { - root := cmd.root() - os.mkdir_all(cmd.args[0])! - - console.export_man_pages(root, cmd.args[0])! - } - } -} diff --git a/src/console/repos/repos.v b/src/console/repos/repos.v deleted file mode 100644 index 3779d33..0000000 --- a/src/console/repos/repos.v +++ /dev/null @@ -1,52 +0,0 @@ -module repos - -import cli -import conf as vconf -import client - -struct Config { - address string [required] - api_key string [required] -} - -// cmd returns the cli module that handles modifying the repository contents. -pub fn cmd() cli.Command { - return cli.Command{ - name: 'repos' - description: 'Interact with the repositories & packages stored on the server.' - commands: [ - cli.Command{ - name: 'remove' - required_args: 1 - usage: 'repo [arch [pkgname]]' - description: 'Remove a repo, arch-repo, or package from the server.' - flags: [ - cli.Flag{ - name: 'force' - flag: cli.FlagType.bool - }, - ] - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! - - if cmd.args.len < 3 { - if !cmd.flags.get_bool('force')! { - return error('Removing an arch-repo or repository is a very destructive command. If you really do wish to perform this operation, explicitely add the --force flag.') - } - } - - client_ := client.new(conf_.address, conf_.api_key) - - if cmd.args.len == 1 { - client_.remove_repo(cmd.args[0])! - } else if cmd.args.len == 2 { - client_.remove_arch_repo(cmd.args[0], cmd.args[1])! - } else { - client_.remove_package(cmd.args[0], cmd.args[1], cmd.args[2])! - } - } - }, - ] - } -} diff --git a/src/console/schedule/schedule.v b/src/console/schedule/schedule.v deleted file mode 100644 index ceabf24..0000000 --- a/src/console/schedule/schedule.v +++ /dev/null @@ -1,30 +0,0 @@ -module schedule - -import cli -import cron -import time - -// cmd returns the cli submodule for previewing a cron schedule. -pub fn cmd() cli.Command { - return cli.Command{ - name: 'schedule' - usage: 'schedule' - description: 'Preview the behavior of a cron schedule.' - flags: [ - cli.Flag{ - name: 'count' - description: 'How many scheduled times to show.' - flag: cli.FlagType.int - default_value: ['5'] - }, - ] - execute: fn (cmd cli.Command) ! { - ce := cron.parse_expression(cmd.args.join(' '))! - count := cmd.flags.get_int('count')! - - for t in ce.next_n(time.now(), count) { - println(t) - } - } - } -} diff --git a/src/console/targets/build.v b/src/console/targets/build.v deleted file mode 100644 index 93464af..0000000 --- a/src/console/targets/build.v +++ /dev/null @@ -1,34 +0,0 @@ -module targets - -import client -import docker -import os -import build - -// build locally builds the target with the given id. -fn build_target(conf Config, target_id int, force bool, timeout int) ! { - c := client.new(conf.address, conf.api_key) - target := c.get_target(target_id)! - - build_arch := os.uname().machine - - println('Creating base image...') - image_id := build.create_build_image(conf.base_image)! - - println('Running build...') - res := build.build_target(conf.address, conf.api_key, image_id, target, force, timeout)! - - println('Removing build image...') - - mut dd := docker.new_conn()! - - defer { - dd.close() or {} - } - - dd.image_remove(image_id)! - - println('Uploading logs to Vieter...') - c.add_build_log(target.id, res.start_time, res.end_time, build_arch, res.exit_code, - res.logs)! -} diff --git a/src/console/targets/targets.v b/src/console/targets/targets.v deleted file mode 100644 index f85c4c0..0000000 --- a/src/console/targets/targets.v +++ /dev/null @@ -1,319 +0,0 @@ -module targets - -import cli -import conf as vconf -import cron -import client { NewTarget } -import console -import models { TargetFilter } - -struct Config { - address string [required] - api_key string [required] - base_image string = 'archlinux:base-devel' -} - -// cmd returns the cli submodule that handles the targets API interaction -pub fn cmd() cli.Command { - return cli.Command{ - name: 'targets' - description: 'Interact with the targets API.' - commands: [ - cli.Command{ - name: 'list' - description: 'List the current targets.' - flags: [ - cli.Flag{ - name: 'limit' - abbrev: 'l' - description: 'How many results to return.' - flag: cli.FlagType.int - }, - cli.Flag{ - name: 'offset' - abbrev: 'o' - description: 'Minimum index to return.' - flag: cli.FlagType.int - }, - cli.Flag{ - name: 'repo' - description: 'Only return targets that publish to this repo.' - flag: cli.FlagType.string - }, - cli.Flag{ - name: 'query' - abbrev: 'q' - description: 'Search string to filter targets by.' - flag: cli.FlagType.string - }, - cli.Flag{ - name: 'arch' - description: 'Only list targets that build for this arch.' - flag: cli.FlagType.string - }, - ] - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! - - mut filter := TargetFilter{} - - limit := cmd.flags.get_int('limit')! - if limit != 0 { - filter.limit = u64(limit) - } - - offset := cmd.flags.get_int('offset')! - if offset != 0 { - filter.offset = u64(offset) - } - - repo := cmd.flags.get_string('repo')! - if repo != '' { - filter.repo = repo - } - - query := cmd.flags.get_string('query')! - if query != '' { - filter.query = query - } - - arch := cmd.flags.get_string('arch')! - if arch != '' { - filter.arch = arch - } - - raw := cmd.flags.get_bool('raw')! - - list(conf_, filter, raw)! - } - }, - cli.Command{ - name: 'add' - required_args: 2 - usage: 'url repo' - description: 'Add a new target with the given URL & target repo.' - flags: [ - cli.Flag{ - name: 'kind' - description: "Kind of target to add. Defaults to 'git' if not specified. One of 'git', 'url'." - flag: cli.FlagType.string - default_value: ['git'] - }, - cli.Flag{ - name: 'branch' - description: "Which branch to clone; only applies to kind 'git'." - flag: cli.FlagType.string - }, - cli.Flag{ - name: 'path' - description: 'Subdirectory inside Git repository to use.' - flag: cli.FlagType.string - }, - ] - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! - - t := NewTarget{ - kind: cmd.flags.get_string('kind')! - url: cmd.args[0] - repo: cmd.args[1] - branch: cmd.flags.get_string('branch') or { '' } - path: cmd.flags.get_string('path') or { '' } - } - - raw := cmd.flags.get_bool('raw')! - - add(conf_, t, raw)! - } - }, - cli.Command{ - name: 'remove' - required_args: 1 - usage: 'id' - description: 'Remove a target that matches the given id.' - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! - - remove(conf_, cmd.args[0])! - } - }, - cli.Command{ - name: 'info' - required_args: 1 - usage: 'id' - description: 'Show detailed information for the target matching the id.' - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! - - info(conf_, cmd.args[0])! - } - }, - cli.Command{ - name: 'edit' - required_args: 1 - usage: 'id' - description: 'Edit the target that matches the given id.' - flags: [ - cli.Flag{ - name: 'url' - description: 'URL value. Meaning depends on kind of target.' - flag: cli.FlagType.string - }, - cli.Flag{ - name: 'branch' - description: 'Branch of the Git repository.' - flag: cli.FlagType.string - }, - cli.Flag{ - name: 'repo' - description: 'Repo to publish builds to.' - flag: cli.FlagType.string - }, - cli.Flag{ - name: 'arch' - description: 'Comma-separated list of architectures to build on.' - flag: cli.FlagType.string - }, - cli.Flag{ - name: 'schedule' - description: 'Cron schedule for repository.' - flag: cli.FlagType.string - }, - cli.Flag{ - name: 'kind' - description: 'Kind of target.' - flag: cli.FlagType.string - }, - cli.Flag{ - name: 'path' - description: 'Subdirectory inside Git repository to use.' - flag: cli.FlagType.string - }, - ] - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! - - found := cmd.flags.get_all_found() - - mut params := map[string]string{} - - for f in found { - if f.name != 'config-file' { - params[f.name] = f.get_string()! - } - } - - patch(conf_, cmd.args[0], params)! - } - }, - cli.Command{ - name: 'build' - required_args: 1 - usage: 'id' - description: 'Build the target with the given id & publish it.' - flags: [ - cli.Flag{ - name: 'force' - description: 'Build the target without checking whether it needs to be renewed.' - flag: cli.FlagType.bool - }, - cli.Flag{ - name: 'remote' - description: 'Schedule the build on the server instead of running it locally.' - flag: cli.FlagType.bool - }, - cli.Flag{ - name: 'arch' - description: 'Architecture to schedule build for. Required when using -remote.' - flag: cli.FlagType.string - }, - cli.Flag{ - name: 'timeout' - description: 'After how many minutes to cancel the build. Only applies to local builds.' - flag: cli.FlagType.int - default_value: ['3600'] - }, - ] - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! - - remote := cmd.flags.get_bool('remote')! - force := cmd.flags.get_bool('force')! - timeout := cmd.flags.get_int('timeout')! - target_id := cmd.args[0].int() - - if remote { - arch := cmd.flags.get_string('arch')! - - if arch == '' { - return error('When scheduling the build remotely, you have to specify an architecture.') - } - - c := client.new(conf_.address, conf_.api_key) - c.queue_job(target_id, arch, force)! - } else { - build_target(conf_, target_id, force, timeout)! - } - } - }, - ] - } -} - -// list prints out a list of all repositories. -fn list(conf_ Config, filter TargetFilter, raw bool) ! { - c := client.new(conf_.address, conf_.api_key) - targets := c.get_targets(filter)! - data := targets.map([it.id.str(), it.kind, it.url, it.repo]) - - if raw { - println(console.tabbed_table(data)) - } else { - println(console.pretty_table(['id', 'kind', 'url', 'repo'], data)!) - } -} - -// add adds a new target to the server's list. -fn add(conf_ Config, t &NewTarget, raw bool) ! { - c := client.new(conf_.address, conf_.api_key) - target_id := c.add_target(t)! - - if raw { - println(target_id) - } else { - println('Target added with id ${target_id}') - } -} - -// remove removes a target from the server's list. -fn remove(conf_ Config, id string) ! { - c := client.new(conf_.address, conf_.api_key) - c.remove_target(id.int())! -} - -// patch patches a given target with the provided params. -fn patch(conf_ Config, id string, params map[string]string) ! { - // We check the cron expression first because it's useless to send an - // invalid one to the server. - if 'schedule' in params && params['schedule'] != '' { - cron.parse_expression(params['schedule']) or { - return error('Invalid cron expression: ${err.msg()}') - } - } - - c := client.new(conf_.address, conf_.api_key) - c.patch_target(id.int(), params)! -} - -// info shows detailed information for a given target. -fn info(conf_ Config, id string) ! { - c := client.new(conf_.address, conf_.api_key) - target := c.get_target(id.int())! - println(target) -} diff --git a/src/cron/cli.v b/src/cron/cli.v new file mode 100644 index 0000000..15bc986 --- /dev/null +++ b/src/cron/cli.v @@ -0,0 +1,32 @@ +module cron + +import cli +import env + +struct Config { +pub: + log_level string = 'WARN' + api_key string + address string + data_dir string + base_image string = 'archlinux:base-devel' + max_concurrent_builds int = 1 + api_update_frequency int = 15 + image_rebuild_frequency int = 1440 + // Replicates the behavior of the original cron system + global_schedule string = '0 3' +} + +// cmd returns the cli module that handles the cron daemon. +pub fn cmd() cli.Command { + return cli.Command{ + name: 'cron' + description: 'Start the cron service that periodically runs builds.' + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? + + cron(conf) ? + } + } +} diff --git a/src/cron/cron.v b/src/cron/cron.v new file mode 100644 index 0000000..e356faa --- /dev/null +++ b/src/cron/cron.v @@ -0,0 +1,33 @@ +module cron + +import log +import cron.daemon +import cron.expression +import os + +const log_file_name = 'vieter.cron.log' + +// cron starts a cron daemon & starts periodically scheduling builds. +pub fn cron(conf Config) ? { + // Configure logger + log_level := log.level_from_tag(conf.log_level) or { + return error('Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.') + } + + mut logger := log.Log{ + level: log_level + } + + log_file := os.join_path_single(conf.data_dir, cron.log_file_name) + logger.set_full_logpath(log_file) + logger.log_to_console_too() + + ce := expression.parse_expression(conf.global_schedule) or { + return error('Error while parsing global cron expression: $err.msg()') + } + + mut d := daemon.init_daemon(logger, conf.address, conf.api_key, conf.base_image, ce, + conf.max_concurrent_builds, conf.api_update_frequency, conf.image_rebuild_frequency) ? + + d.run() +} diff --git a/src/cron/daemon/build.v b/src/cron/daemon/build.v new file mode 100644 index 0000000..aa08f9f --- /dev/null +++ b/src/cron/daemon/build.v @@ -0,0 +1,113 @@ +module daemon + +import time +import sync.stdatomic +import build +import os + +const ( + build_empty = 0 + build_running = 1 + build_done = 2 +) + +// clean_finished_builds removes finished builds from the build slots & returns +// them. +fn (mut d Daemon) clean_finished_builds() []ScheduledBuild { + mut out := []ScheduledBuild{} + + for i in 0 .. d.atomics.len { + if stdatomic.load_u64(&d.atomics[i]) == daemon.build_done { + stdatomic.store_u64(&d.atomics[i], daemon.build_empty) + out << d.builds[i] + } + } + + return out +} + +// update_builds starts as many builds as possible. +fn (mut d Daemon) start_new_builds() { + now := time.now() + + for d.queue.len() > 0 { + elem := d.queue.peek() or { + d.lerror("queue.peek() unexpectedly returned an error. This shouldn't happen.") + + break + } + + if elem.timestamp < now { + sb := d.queue.pop() or { + d.lerror("queue.pop() unexpectedly returned an error. This shouldn't happen.") + + break + } + + // If this build couldn't be scheduled, no more will be possible. + if !d.start_build(sb) { + d.queue.insert(sb) + break + } + } else { + break + } + } +} + +// start_build starts a build for the given ScheduledBuild object. +fn (mut d Daemon) start_build(sb ScheduledBuild) bool { + for i in 0 .. d.atomics.len { + if stdatomic.load_u64(&d.atomics[i]) == daemon.build_empty { + stdatomic.store_u64(&d.atomics[i], daemon.build_running) + d.builds[i] = sb + + go d.run_build(i, sb) + + return true + } + } + + return false +} + +// run_build actually starts the build process for a given repo. +fn (mut d Daemon) run_build(build_index int, sb ScheduledBuild) { + d.linfo('started build: $sb.repo.url $sb.repo.branch') + + // 0 means success, 1 means failure + mut status := 0 + + res := build.build_repo(d.client.address, d.client.api_key, d.builder_images.last(), + &sb.repo) or { + d.ldebug('build_repo error: $err.msg()') + status = 1 + + build.BuildResult{} + } + + if status == 0 { + d.linfo('finished build: $sb.repo.url $sb.repo.branch; uploading logs...') + + build_arch := os.uname().machine + d.client.add_build_log(sb.repo.id, res.start_time, res.end_time, build_arch, res.exit_code, + res.logs) or { d.lerror('Failed to upload logs for $sb.repo.url $sb.repo.arch') } + } else { + d.linfo('failed build: $sb.repo.url $sb.repo.branch') + } + + stdatomic.store_u64(&d.atomics[build_index], daemon.build_done) +} + +// current_build_count returns how many builds are currently running. +fn (mut d Daemon) current_build_count() int { + mut res := 0 + + for i in 0 .. d.atomics.len { + if stdatomic.load_u64(&d.atomics[i]) == daemon.build_running { + res += 1 + } + } + + return res +} diff --git a/src/cron/daemon/daemon.v b/src/cron/daemon/daemon.v new file mode 100644 index 0000000..ade8fcb --- /dev/null +++ b/src/cron/daemon/daemon.v @@ -0,0 +1,266 @@ +module daemon + +import time +import log +import datatypes { MinHeap } +import cron.expression { CronExpression, parse_expression } +import math +import build +import docker +import db +import os +import client + +const ( + // How many seconds to wait before retrying to update API if failed + api_update_retry_timeout = 5 + // How many seconds to wait before retrying to rebuild image if failed + rebuild_base_image_retry_timout = 30 +) + +struct ScheduledBuild { +pub: + repo db.GitRepo + timestamp time.Time +} + +// Overloaded operator for comparing ScheduledBuild objects +fn (r1 ScheduledBuild) < (r2 ScheduledBuild) bool { + return r1.timestamp < r2.timestamp +} + +pub struct Daemon { +mut: + client client.Client + base_image string + builder_images []string + global_schedule CronExpression + api_update_frequency int + image_rebuild_frequency int + // Repos currently loaded from API. + repos []db.GitRepo + // At what point to update the list of repositories. + api_update_timestamp time.Time + image_build_timestamp time.Time + queue MinHeap + // Which builds are currently running + builds []ScheduledBuild + // Atomic variables used to detect when a build has finished; length is the + // same as builds + atomics []u64 + logger shared log.Log +} + +// init_daemon initializes a new Daemon object. It renews the repositories & +// populates the build queue for the first time. +pub fn init_daemon(logger log.Log, address string, api_key string, base_image string, global_schedule CronExpression, max_concurrent_builds int, api_update_frequency int, image_rebuild_frequency int) ?Daemon { + mut d := Daemon{ + client: client.new(address, api_key) + base_image: base_image + global_schedule: global_schedule + api_update_frequency: api_update_frequency + image_rebuild_frequency: image_rebuild_frequency + atomics: []u64{len: max_concurrent_builds} + builds: []ScheduledBuild{len: max_concurrent_builds} + logger: logger + } + + // Initialize the repos & queue + d.renew_repos() + d.renew_queue() + if !d.rebuild_base_image() { + return error('The base image failed to build. The Vieter cron daemon cannot run without an initial builder image.') + } + + return d +} + +// run starts the actual daemon process. It runs builds when possible & +// periodically refreshes the list of repositories to ensure we stay in sync. +pub fn (mut d Daemon) run() { + for { + finished_builds := d.clean_finished_builds() + + // Update the API's contents if needed & renew the queue + if time.now() >= d.api_update_timestamp { + d.renew_repos() + d.renew_queue() + } + // The finished builds should only be rescheduled if the API contents + // haven't been renewed. + else { + for sb in finished_builds { + d.schedule_build(sb.repo) + } + } + + // TODO remove old builder images. + // This issue is less trivial than it sounds, because a build could + // still be running when the image has to be rebuilt. That would + // prevent the image from being removed. Therefore, we will need to + // keep track of a list or something & remove an image once we have + // made sure it isn't being used anymore. + if time.now() >= d.image_build_timestamp { + d.rebuild_base_image() + // In theory, executing this function here allows an old builder + // image to exist for at most image_rebuild_frequency minutes. + d.clean_old_base_images() + } + + // Schedules new builds when possible + d.start_new_builds() + + // If there are builds currently running, the daemon should refresh + // every second to clean up any finished builds & start new ones. + mut delay := time.Duration(1 * time.second) + + // Sleep either until we have to refresh the repos or when the next + // build has to start, with a minimum of 1 second. + if d.current_build_count() == 0 { + now := time.now() + delay = d.api_update_timestamp - now + + if d.queue.len() > 0 { + elem := d.queue.peek() or { + d.lerror("queue.peek() unexpectedly returned an error. This shouldn't happen.") + + // This is just a fallback option. In theory, queue.peek() + // should *never* return an error or none, because we check + // its len beforehand. + time.sleep(1) + continue + } + + time_until_next_job := elem.timestamp - now + + delay = math.min(delay, time_until_next_job) + } + } + + // We sleep for at least one second. This is to prevent the program + // from looping agressively when a cronjob can be scheduled, but + // there's no spots free for it to be started. + delay = math.max(delay, 1 * time.second) + + d.ldebug('Sleeping for ${delay}...') + + time.sleep(delay) + } +} + +// schedule_build adds the next occurence of the given repo build to the queue. +fn (mut d Daemon) schedule_build(repo db.GitRepo) { + ce := if repo.schedule != '' { + parse_expression(repo.schedule) or { + // TODO This shouldn't return an error if the expression is empty. + d.lerror("Error while parsing cron expression '$repo.schedule' (id $repo.id): $err.msg()") + + d.global_schedule + } + } else { + d.global_schedule + } + + // A repo that can't be scheduled will just be skipped for now + timestamp := ce.next_from_now() or { + d.lerror("Couldn't calculate next timestamp from '$repo.schedule'; skipping") + return + } + + d.queue.insert(ScheduledBuild{ + repo: repo + timestamp: timestamp + }) +} + +// renew_repos requests the newest list of Git repos from the server & replaces +// the old one. +fn (mut d Daemon) renew_repos() { + d.linfo('Renewing repos...') + + mut new_repos := d.client.get_git_repos() or { + d.lerror('Failed to renew repos. Retrying in ${daemon.api_update_retry_timeout}s...') + d.api_update_timestamp = time.now().add_seconds(daemon.api_update_retry_timeout) + + return + } + + // Filter out any repos that shouldn't run on this architecture + cur_arch := os.uname().machine + new_repos = new_repos.filter(it.arch.any(it.value == cur_arch)) + + d.repos = new_repos + + d.api_update_timestamp = time.now().add_seconds(60 * d.api_update_frequency) +} + +// renew_queue replaces the old queue with a new one that reflects the newest +// values in repos_map. +fn (mut d Daemon) renew_queue() { + d.linfo('Renewing queue...') + mut new_queue := MinHeap{} + + // Move any jobs that should have already started from the old queue onto + // the new one + now := time.now() + + // For some reason, using + // ```v + // for d.queue.len() > 0 && d.queue.peek() ?.timestamp < now { + //``` + // here causes the function to prematurely just exit, without any errors or anything, very weird + // https://github.com/vlang/v/issues/14042 + for d.queue.len() > 0 { + elem := d.queue.pop() or { + d.lerror("queue.pop() returned an error. This shouldn't happen.") + continue + } + + if elem.timestamp < now { + new_queue.insert(elem) + } else { + break + } + } + + d.queue = new_queue + + // For each repository in repos_map, parse their cron expression (or use + // the default one if not present) & add them to the queue + for repo in d.repos { + d.schedule_build(repo) + } +} + +// rebuild_base_image recreates the builder image. +fn (mut d Daemon) rebuild_base_image() bool { + d.linfo('Rebuilding builder image....') + + d.builder_images << build.create_build_image(d.base_image) or { + d.lerror('Failed to rebuild base image. Retrying in ${daemon.rebuild_base_image_retry_timout}s...') + d.image_build_timestamp = time.now().add_seconds(daemon.rebuild_base_image_retry_timout) + + return false + } + + d.image_build_timestamp = time.now().add_seconds(60 * d.image_rebuild_frequency) + + return true +} + +// clean_old_base_images tries to remove any old but still present builder +// images. +fn (mut d Daemon) clean_old_base_images() { + mut i := 0 + + for i < d.builder_images.len - 1 { + // For each builder image, we try to remove it by calling the Docker + // API. If the function returns an error or false, that means the image + // wasn't deleted. Therefore, we move the index over. If the function + // returns true, the array's length has decreased by one so we don't + // move the index. + if !docker.remove_image(d.builder_images[i]) or { false } { + i += 1 + } + } +} diff --git a/src/cron/daemon/log.v b/src/cron/daemon/log.v new file mode 100644 index 0000000..003898b --- /dev/null +++ b/src/cron/daemon/log.v @@ -0,0 +1,35 @@ +module daemon + +import log + +// log reate a log message with the given level +pub fn (mut d Daemon) log(msg &string, level log.Level) { + lock d.logger { + d.logger.send_output(msg, level) + } +} + +// lfatal create a log message with the fatal level +pub fn (mut d Daemon) lfatal(msg &string) { + d.log(msg, log.Level.fatal) +} + +// lerror create a log message with the error level +pub fn (mut d Daemon) lerror(msg &string) { + d.log(msg, log.Level.error) +} + +// lwarn create a log message with the warn level +pub fn (mut d Daemon) lwarn(msg &string) { + d.log(msg, log.Level.warn) +} + +// linfo create a log message with the info level +pub fn (mut d Daemon) linfo(msg &string) { + d.log(msg, log.Level.info) +} + +// ldebug create a log message with the debug level +pub fn (mut d Daemon) ldebug(msg &string) { + d.log(msg, log.Level.debug) +} diff --git a/src/cron/expression.c.v b/src/cron/expression.c.v deleted file mode 100644 index e9686d6..0000000 --- a/src/cron/expression.c.v +++ /dev/null @@ -1,101 +0,0 @@ -module cron - -#flag -I @VMODROOT/libvieter/include -#flag -L @VMODROOT/libvieter/build -#flag -lvieter -#include "vieter_cron.h" - -[typedef] -pub struct C.vieter_cron_expression { - minutes &u8 - hours &u8 - days &u8 - months &u8 - minute_count u8 - hour_count u8 - day_count u8 - month_count u8 -} - -pub type Expression = C.vieter_cron_expression - -// == returns whether the two expressions are equal by value. -fn (ce1 Expression) == (ce2 Expression) bool { - if ce1.month_count != ce2.month_count || ce1.day_count != ce2.day_count - || ce1.hour_count != ce2.hour_count || ce1.minute_count != ce2.minute_count { - return false - } - - for i in 0 .. ce1.month_count { - unsafe { - if ce1.months[i] != ce2.months[i] { - return false - } - } - } - for i in 0 .. ce1.day_count { - unsafe { - if ce1.days[i] != ce2.days[i] { - return false - } - } - } - for i in 0 .. ce1.hour_count { - unsafe { - if ce1.hours[i] != ce2.hours[i] { - return false - } - } - } - for i in 0 .. ce1.minute_count { - unsafe { - if ce1.minutes[i] != ce2.minutes[i] { - return false - } - } - } - - return true -} - -[typedef] -struct C.vieter_cron_simple_time { - year int - month int - day int - hour int - minute int -} - -type SimpleTime = C.vieter_cron_simple_time - -enum ParseError as u8 { - ok = 0 - invalid_expression = 1 - invalid_number = 2 - out_of_range = 3 - too_many_parts = 4 - not_enough_parts = 5 -} - -// str returns the string representation of a ParseError. -fn (e ParseError) str() string { - return match e { - .ok { '' } - .invalid_expression { 'Invalid expression' } - .invalid_number { 'Invalid number' } - .out_of_range { 'Out of range' } - .too_many_parts { 'Too many parts' } - .not_enough_parts { 'Not enough parts' } - } -} - -fn C.vieter_cron_expr_init() &C.vieter_cron_expression - -fn C.vieter_cron_expr_free(ce &C.vieter_cron_expression) - -fn C.vieter_cron_expr_next(out &C.vieter_cron_simple_time, ce &C.vieter_cron_expression, ref &C.vieter_cron_simple_time) - -fn C.vieter_cron_expr_next_from_now(out &C.vieter_cron_simple_time, ce &C.vieter_cron_expression) - -fn C.vieter_cron_expr_parse(out &C.vieter_cron_expression, s &char) ParseError diff --git a/src/cron/expression.v b/src/cron/expression.v deleted file mode 100644 index 62692fa..0000000 --- a/src/cron/expression.v +++ /dev/null @@ -1,73 +0,0 @@ -module cron - -import time - -// free the memory associated with the Expression. -[unsafe] -pub fn (ce &Expression) free() { - C.vieter_cron_expr_free(ce) -} - -// parse_expression parses a string into an Expression. -pub fn parse_expression(exp string) !&Expression { - out := C.vieter_cron_expr_init() - res := C.vieter_cron_expr_parse(out, exp.str) - - if res != .ok { - return error(res.str()) - } - - return out -} - -// next calculates the next occurence of the cron schedule, given a reference -// point. -pub fn (ce &Expression) next(ref time.Time) time.Time { - st := SimpleTime{ - year: ref.year - month: ref.month - day: ref.day - hour: ref.hour - minute: ref.minute - } - - out := SimpleTime{} - C.vieter_cron_expr_next(&out, ce, &st) - - return time.new_time(time.Time{ - year: out.year - month: out.month - day: out.day - hour: out.hour - minute: out.minute - }) -} - -// next_from_now calculates the next occurence of the cron schedule with the -// current time as reference. -pub fn (ce &Expression) next_from_now() time.Time { - out := SimpleTime{} - C.vieter_cron_expr_next_from_now(&out, ce) - - return time.new_time(time.Time{ - year: out.year - month: out.month - day: out.day - hour: out.hour - minute: out.minute - }) -} - -// next_n returns the n next occurences of the expression, given a starting -// time. -pub fn (ce &Expression) next_n(ref time.Time, n int) []time.Time { - mut times := []time.Time{cap: n} - - times << ce.next(ref) - - for i in 1 .. n { - times << ce.next(times[i - 1]) - } - - return times -} diff --git a/src/cron/expression/expression.v b/src/cron/expression/expression.v new file mode 100644 index 0000000..124337f --- /dev/null +++ b/src/cron/expression/expression.v @@ -0,0 +1,261 @@ +module expression + +import time + +pub struct CronExpression { + minutes []int + hours []int + days []int + months []int +} + +// next calculates the earliest time this cron expression is valid. It will +// always pick a moment in the future, even if ref matches completely up to the +// minute. This function conciously does not take gap years into account. +pub fn (ce &CronExpression) next(ref time.Time) ?time.Time { + // If the given ref matches the next cron occurence up to the minute, it + // will return that value. Because we always want to return a value in the + // future, we artifically shift the ref 60 seconds to make sure we always + // match in the future. A shift of 60 seconds is enough because the cron + // expression does not allow for accuracy smaller than one minute. + sref := ref + + // For all of these values, the rule is the following: if their value is + // the length of their respective array in the CronExpression object, that + // means we've looped back around. This means that the "bigger" value has + // to be incremented by one. For example, if the minutes have looped + // around, that means that the hour has to be incremented as well. + mut minute_index := 0 + mut hour_index := 0 + mut day_index := 0 + mut month_index := 0 + + // This chain is the same logic multiple times, namely that if a "bigger" + // value loops around, then the smaller value will always reset as well. + // For example, if we're going to a new day, the hour & minute will always + // be their smallest value again. + for month_index < ce.months.len && sref.month > ce.months[month_index] { + month_index++ + } + + if month_index < ce.months.len && sref.month == ce.months[month_index] { + for day_index < ce.days.len && sref.day > ce.days[day_index] { + day_index++ + } + + if day_index < ce.days.len && ce.days[day_index] == sref.day { + for hour_index < ce.hours.len && sref.hour > ce.hours[hour_index] { + hour_index++ + } + + if hour_index < ce.hours.len && ce.hours[hour_index] == sref.hour { + // Minute is the only value where we explicitely make sure we + // can't match sref's value exactly. This is to ensure we only + // return values in the future. + for minute_index < ce.minutes.len && sref.minute >= ce.minutes[minute_index] { + minute_index++ + } + } + } + } + + // Here, we increment the "bigger" values by one if the smaller ones loop + // around. The order is important, as it allows a sort-of waterfall effect + // to occur which updates all values if required. + if minute_index == ce.minutes.len && hour_index < ce.hours.len { + hour_index += 1 + } + + if hour_index == ce.hours.len && day_index < ce.days.len { + day_index += 1 + } + + if day_index == ce.days.len && month_index < ce.months.len { + month_index += 1 + } + + mut minute := ce.minutes[minute_index % ce.minutes.len] + mut hour := ce.hours[hour_index % ce.hours.len] + mut day := ce.days[day_index % ce.days.len] + + // Sometimes, we end up with a day that does not exist within the selected + // month, e.g. day 30 in February. When this occurs, we reset day back to + // the smallest value & loop over to the next month that does have this + // day. + if day > time.month_days[ce.months[month_index % ce.months.len] - 1] { + day = ce.days[0] + month_index += 1 + + for day > time.month_days[ce.months[month_index & ce.months.len] - 1] { + month_index += 1 + + // If for whatever reason the day value ends up being something + // that can't be scheduled in any month, we have to make sure we + // don't create an infinite loop. + if month_index == 2 * ce.months.len { + return error('No schedulable moment.') + } + } + } + + month := ce.months[month_index % ce.months.len] + mut year := sref.year + + // If the month loops over, we need to increment the year. + if month_index >= ce.months.len { + year++ + } + + return time.new_time(time.Time{ + year: year + month: month + day: day + minute: minute + hour: hour + }) +} + +// next_from_now returns the result of ce.next(ref) where ref is the result of +// time.now(). +pub fn (ce &CronExpression) next_from_now() ?time.Time { + return ce.next(time.now()) +} + +// parse_range parses a given string into a range of sorted integers, if +// possible. +fn parse_range(s string, min int, max int, mut bitv []bool) ? { + mut start := min + mut end := max + mut interval := 1 + + exps := s.split('/') + + if exps.len > 2 { + return error('Invalid expression.') + } + + if exps[0] != '*' { + dash_parts := exps[0].split('-') + + if dash_parts.len > 2 { + return error('Invalid expression.') + } + + start = dash_parts[0].int() + + // The builtin parsing functions return zero if the string can't be + // parsed into a number, so we have to explicitely check whether they + // actually entered zero or if it's an invalid number. + if start == 0 && dash_parts[0] != '0' { + return error('Invalid number.') + } + + // Check whether the start value is out of range + if start < min || start > max { + return error('Out of range.') + } + + if dash_parts.len == 2 { + end = dash_parts[1].int() + + if end == 0 && dash_parts[1] != '0' { + return error('Invalid number.') + } + + if end < start || end > max { + return error('Out of range.') + } + } + } + + if exps.len > 1 { + interval = exps[1].int() + + // interval being zero is always invalid, but we want to check why + // it's invalid for better error messages. + if interval == 0 { + if exps[1] != '0' { + return error('Invalid number.') + } else { + return error('Step size zero not allowed.') + } + } + + if interval > max - min { + return error('Step size too large.') + } + } + // Here, s solely consists of a number, so that's the only value we + // should return. + else if exps[0] != '*' && !exps[0].contains('-') { + bitv[start - min] = true + return + } + + for start <= end { + bitv[start - min] = true + start += interval + } +} + +// bitv_to_ints converts a bit vector into an array containing the +// corresponding values. +fn bitv_to_ints(bitv []bool, min int) []int { + mut out := []int{} + + for i in 0 .. bitv.len { + if bitv[i] { + out << min + i + } + } + + return out +} + +// parse_part parses a given part of a cron expression & returns the +// corresponding array of ints. +fn parse_part(s string, min int, max int) ?[]int { + mut bitv := []bool{len: max - min + 1, init: false} + + for range in s.split(',') { + parse_range(range, min, max, mut bitv) ? + } + + return bitv_to_ints(bitv, min) +} + +// parse_expression parses an entire cron expression string into a +// CronExpression object, if possible. +pub fn parse_expression(exp string) ?CronExpression { + // The filter allows for multiple spaces between parts + mut parts := exp.split(' ').filter(it != '') + + if parts.len < 2 || parts.len > 4 { + return error('Expression must contain between 2 and 4 space-separated parts.') + } + + // For ease of use, we allow the user to only specify as many parts as they + // need. + for parts.len < 4 { + parts << '*' + } + + mut part_results := [][]int{} + + mins := [0, 0, 1, 1] + maxs := [59, 23, 31, 12] + + // This for loop allows us to more clearly propagate the error to the user. + for i, min in mins { + part_results << parse_part(parts[i], min, maxs[i]) or { + return error('An error occurred with part $i: $err.msg()') + } + } + + return CronExpression{ + minutes: part_results[0] + hours: part_results[1] + days: part_results[2] + months: part_results[3] + } +} diff --git a/src/cron/expression/expression_parse_test.v b/src/cron/expression/expression_parse_test.v new file mode 100644 index 0000000..18531c0 --- /dev/null +++ b/src/cron/expression/expression_parse_test.v @@ -0,0 +1,98 @@ +module expression + +// parse_range_error returns the returned error message. If the result is '', +// that means the function didn't error. +fn parse_range_error(s string, min int, max int) string { + mut bitv := []bool{len: max - min + 1, init: false} + + parse_range(s, min, max, mut bitv) or { return err.msg } + + return '' +} + +// =====parse_range===== +fn test_range_star_range() ? { + mut bitv := []bool{len: 6, init: false} + parse_range('*', 0, 5, mut bitv) ? + + assert bitv == [true, true, true, true, true, true] +} + +fn test_range_number() ? { + mut bitv := []bool{len: 6, init: false} + parse_range('4', 0, 5, mut bitv) ? + + assert bitv_to_ints(bitv, 0) == [4] +} + +fn test_range_number_too_large() ? { + assert parse_range_error('10', 0, 6) == 'Out of range.' +} + +fn test_range_number_too_small() ? { + assert parse_range_error('0', 2, 6) == 'Out of range.' +} + +fn test_range_number_invalid() ? { + assert parse_range_error('x', 0, 6) == 'Invalid number.' +} + +fn test_range_step_star_1() ? { + mut bitv := []bool{len: 21, init: false} + parse_range('*/4', 0, 20, mut bitv) ? + + assert bitv_to_ints(bitv, 0) == [0, 4, 8, 12, 16, 20] +} + +fn test_range_step_star_2() ? { + mut bitv := []bool{len: 8, init: false} + parse_range('*/3', 1, 8, mut bitv) ? + + assert bitv_to_ints(bitv, 1) == [1, 4, 7] +} + +fn test_range_step_star_too_large() ? { + assert parse_range_error('*/21', 0, 20) == 'Step size too large.' +} + +fn test_range_step_zero() ? { + assert parse_range_error('*/0', 0, 20) == 'Step size zero not allowed.' +} + +fn test_range_step_number() ? { + mut bitv := []bool{len: 21, init: false} + parse_range('5/4', 2, 22, mut bitv) ? + + assert bitv_to_ints(bitv, 2) == [5, 9, 13, 17, 21] +} + +fn test_range_step_number_too_large() ? { + assert parse_range_error('10/4', 0, 5) == 'Out of range.' +} + +fn test_range_step_number_too_small() ? { + assert parse_range_error('2/4', 5, 10) == 'Out of range.' +} + +fn test_range_dash() ? { + mut bitv := []bool{len: 10, init: false} + parse_range('4-8', 0, 9, mut bitv) ? + + assert bitv_to_ints(bitv, 0) == [4, 5, 6, 7, 8] +} + +fn test_range_dash_step() ? { + mut bitv := []bool{len: 10, init: false} + parse_range('4-8/2', 0, 9, mut bitv) ? + + assert bitv_to_ints(bitv, 0) == [4, 6, 8] +} + +// =====parse_part===== +fn test_part_single() ? { + assert parse_part('*', 0, 5) ? == [0, 1, 2, 3, 4, 5] +} + +fn test_part_multiple() ? { + assert parse_part('*/2,2/3', 1, 8) ? == [1, 2, 3, 5, 7, 8] +} diff --git a/src/cron/expression/expression_test.v b/src/cron/expression/expression_test.v new file mode 100644 index 0000000..ef0283a --- /dev/null +++ b/src/cron/expression/expression_test.v @@ -0,0 +1,34 @@ +module expression + +import time { parse } + +fn util_test_time(exp string, t1_str string, t2_str string) ? { + ce := parse_expression(exp) ? + t1 := parse(t1_str) ? + t2 := parse(t2_str) ? + + t3 := ce.next(t1) ? + + assert t2.year == t3.year + assert t2.month == t3.month + assert t2.day == t3.day + assert t2.hour == t3.hour + assert t2.minute == t3.minute +} + +fn test_next_simple() ? { + // Very simple + util_test_time('0 3', '2002-01-01 00:00:00', '2002-01-01 03:00:00') ? + + // Overlap to next day + util_test_time('0 3', '2002-01-01 03:00:00', '2002-01-02 03:00:00') ? + util_test_time('0 3', '2002-01-01 04:00:00', '2002-01-02 03:00:00') ? + + util_test_time('0 3/4', '2002-01-01 04:00:00', '2002-01-01 07:00:00') ? + + // Overlap to next month + util_test_time('0 3', '2002-11-31 04:00:00', '2002-12-01 03:00:00') ? + + // Overlap to next year + util_test_time('0 3', '2002-12-31 04:00:00', '2003-01-01 03:00:00') ? +} diff --git a/src/cron/expression_test.v b/src/cron/expression_test.v deleted file mode 100644 index c7065f8..0000000 --- a/src/cron/expression_test.v +++ /dev/null @@ -1,35 +0,0 @@ -module cron - -import time { parse } - -fn util_test_time(exp string, t1_str string, t2_str string) ! { - ce := parse_expression(exp)! - t1 := parse(t1_str)! - t2 := parse(t2_str)! - - t3 := ce.next(t1) - - assert t2.year == t3.year - assert t2.month == t3.month - assert t2.day == t3.day - assert t2.hour == t3.hour - assert t2.minute == t3.minute -} - -fn test_next_simple() ! { - // Very simple - // util_test_time('0 3', '2002-01-01 00:00:00', '2002-01-01 03:00:00')! - - // Overlap to next day - mut exp := '0 3 ' - util_test_time(exp, '2002-01-01 03:00:00', '2002-01-02 03:00:00')! - util_test_time(exp, '2002-01-01 04:00:00', '2002-01-02 03:00:00')! - - util_test_time('0 3-7/4,7-19', '2002-01-01 04:00:00', '2002-01-01 07:00:00')! - - //// Overlap to next month - util_test_time('0 3', '2002-11-31 04:00:00', '2002-12-01 03:00:00')! - - //// Overlap to next year - util_test_time('0 3', '2002-12-31 04:00:00', '2003-01-01 03:00:00')! -} diff --git a/src/cron/parse_test.v b/src/cron/parse_test.v deleted file mode 100644 index 19575d7..0000000 --- a/src/cron/parse_test.v +++ /dev/null @@ -1,42 +0,0 @@ -module cron - -fn test_not_allowed() { - illegal_expressions := [ - '4 *-7', - '4 *-7/4', - '4 7/*', - '0 0 30 2', - '0 /5', - '0 ', - '0', - ' 0', - ' 0 ', - '1 2 3 4~9', - '1 1-3-5', - '0 5/2-5', - '', - '1 1/2/3', - '*5 8', - 'x 8', - ] - - mut res := false - - for exp in illegal_expressions { - res = false - parse_expression(exp) or { res = true } - assert res, "'${exp}' should produce an error" - } -} - -fn test_auto_extend() ! { - ce1 := parse_expression('5 5')! - ce2 := parse_expression('5 5 *')! - ce3 := parse_expression('5 5 * *')! - - assert ce1 == ce2 && ce2 == ce3 -} - -fn test_four() { - parse_expression('0 1 2 3 ') or { assert false } -} diff --git a/src/db/db.v b/src/db/db.v new file mode 100644 index 0000000..5ec240d --- /dev/null +++ b/src/db/db.v @@ -0,0 +1,21 @@ +module db + +import sqlite + +struct VieterDb { + conn sqlite.DB +} + +// init initializes a database & adds the correct tables. +pub fn init(db_path string) ?VieterDb { + conn := sqlite.connect(db_path) ? + + sql conn { + create table GitRepo + create table BuildLog + } + + return VieterDb{ + conn: conn + } +} diff --git a/src/db/git.v b/src/db/git.v new file mode 100644 index 0000000..9a475a5 --- /dev/null +++ b/src/db/git.v @@ -0,0 +1,162 @@ +module db + +pub struct GitRepoArch { +pub: + id int [primary; sql: serial] + repo_id int [nonull] + value string [nonull] +} + +// str returns a string representation. +pub fn (gra &GitRepoArch) str() string { + return gra.value +} + +pub struct GitRepo { +pub mut: + id int [optional; primary; sql: serial] + // URL of the Git repository + url string [nonull] + // Branch of the Git repository to use + branch string [nonull] + // Which repo the builder should publish packages to + repo string [nonull] + // Cron schedule describing how frequently to build the repo. + schedule string [optional] + // On which architectures the package is allowed to be built. In reality, + // this controls which builders will periodically build the image. + arch []GitRepoArch [fkey: 'repo_id'] +} + +// str returns a string representation. +pub fn (gr &GitRepo) str() string { + mut parts := [ + 'id: $gr.id', + 'url: $gr.url', + 'branch: $gr.branch', + 'repo: $gr.repo', + 'schedule: $gr.schedule', + 'arch: ${gr.arch.map(it.value).join(', ')}', + ] + str := parts.join('\n') + + return str +} + +// patch_from_params patches a GitRepo from a map[string]string, usually +// provided from a web.App's params +pub fn (mut r GitRepo) patch_from_params(params map[string]string) { + $for field in GitRepo.fields { + if field.name in params { + $if field.typ is string { + r.$(field.name) = params[field.name] + // This specific type check is needed for the compiler to ensure + // our types are correct + } $else $if field.typ is []GitRepoArch { + r.$(field.name) = params[field.name].split(',').map(GitRepoArch{ value: it }) + } + } + } +} + +// git_repo_from_params creates a GitRepo from a map[string]string, usually +// provided from a web.App's params +pub fn git_repo_from_params(params map[string]string) ?GitRepo { + mut repo := GitRepo{} + + // If we're creating a new GitRepo, we want all fields to be present before + // "patching". + $for field in GitRepo.fields { + if field.name !in params && !field.attrs.contains('optional') { + return error('Missing parameter: ${field.name}.') + } + } + repo.patch_from_params(params) + + return repo +} + +// get_git_repos returns all GitRepo's in the database. +pub fn (db &VieterDb) get_git_repos() []GitRepo { + res := sql db.conn { + select from GitRepo order by id + } + + return res +} + +// get_git_repo tries to return a specific GitRepo. +pub fn (db &VieterDb) get_git_repo(repo_id int) ?GitRepo { + res := sql db.conn { + select from GitRepo where id == repo_id + } + + // If a select statement fails, it returns a zeroed object. By + // checking one of the required fields, we can see whether the query + // returned a result or not. + if res.id == 0 { + return none + } + + return res +} + +// add_git_repo inserts the given GitRepo into the database. +pub fn (db &VieterDb) add_git_repo(repo GitRepo) { + sql db.conn { + insert repo into GitRepo + } +} + +// delete_git_repo deletes the repo with the given ID from the database. +pub fn (db &VieterDb) delete_git_repo(repo_id int) { + sql db.conn { + delete from GitRepo where id == repo_id + delete from GitRepoArch where repo_id == repo_id + } +} + +// update_git_repo updates any non-array values for a given GitRepo. +pub fn (db &VieterDb) update_git_repo(repo_id int, params map[string]string) { + mut values := []string{} + + // TODO does this allow for SQL injection? + $for field in GitRepo.fields { + if field.name in params { + // Any fields that are array types require their own update method + $if field.typ is string { + values << "$field.name = '${params[field.name]}'" + } + } + } + values_str := values.join(', ') + query := 'update GitRepo set $values_str where id == $repo_id' + + db.conn.exec_none(query) +} + +// update_git_repo_archs updates a given GitRepo's arch value. +pub fn (db &VieterDb) update_git_repo_archs(repo_id int, archs []GitRepoArch) { + archs_with_id := archs.map(GitRepoArch{ + ...it + repo_id: repo_id + }) + + sql db.conn { + delete from GitRepoArch where repo_id == repo_id + } + + for arch in archs_with_id { + sql db.conn { + insert arch into GitRepoArch + } + } +} + +// git_repo_exists is a utility function that checks whether a repo with the +// given id exists. +pub fn (db &VieterDb) git_repo_exists(repo_id int) bool { + db.get_git_repo(repo_id) or { return false } + + return true +} diff --git a/src/db/logs.v b/src/db/logs.v new file mode 100644 index 0000000..817db78 --- /dev/null +++ b/src/db/logs.v @@ -0,0 +1,74 @@ +module db + +import time + +pub struct BuildLog { +pub: + id int [primary; sql: serial] + repo_id int [nonull] + start_time time.Time [nonull] + end_time time.Time [nonull] + arch string [nonull] + exit_code int [nonull] +} + +// str returns a string representation. +pub fn (bl &BuildLog) str() string { + mut parts := [ + 'id: $bl.id', + 'repo id: $bl.repo_id', + 'start time: $bl.start_time', + 'end time: $bl.end_time', + 'arch: $bl.arch', + 'exit code: $bl.exit_code', + ] + str := parts.join('\n') + + return str +} + +// get_build_logs returns all BuildLog's in the database. +pub fn (db &VieterDb) get_build_logs() []BuildLog { + res := sql db.conn { + select from BuildLog order by id + } + + return res +} + +// get_build_logs_for_repo returns all BuildLog's in the database for a given +// repo. +pub fn (db &VieterDb) get_build_logs_for_repo(repo_id int) []BuildLog { + res := sql db.conn { + select from BuildLog where repo_id == repo_id order by id + } + + return res +} + +// get_build_log tries to return a specific BuildLog. +pub fn (db &VieterDb) get_build_log(id int) ?BuildLog { + res := sql db.conn { + select from BuildLog where id == id + } + + if res.id == 0 { + return none + } + + return res +} + +// add_build_log inserts the given BuildLog into the database. +pub fn (db &VieterDb) add_build_log(log BuildLog) { + sql db.conn { + insert log into BuildLog + } +} + +// delete_build_log delete the BuildLog with the given ID from the database. +pub fn (db &VieterDb) delete_build_log(id int) { + sql db.conn { + delete from BuildLog where id == id + } +} diff --git a/src/dbms/dbms.v b/src/dbms/dbms.v deleted file mode 100644 index e5676ab..0000000 --- a/src/dbms/dbms.v +++ /dev/null @@ -1,101 +0,0 @@ -module dbms - -import db.sqlite -import time - -pub struct VieterDb { - conn sqlite.DB -} - -struct MigrationVersion { - id int [primary] - version int -} - -const ( - migrations_up = [ - $embed_file('migrations/001-initial/up.sql'), - $embed_file('migrations/002-rename-to-targets/up.sql'), - $embed_file('migrations/003-target-url-type/up.sql'), - $embed_file('migrations/004-nullable-branch/up.sql'), - $embed_file('migrations/005-repo-path/up.sql'), - ] - migrations_down = [ - $embed_file('migrations/001-initial/down.sql'), - $embed_file('migrations/002-rename-to-targets/down.sql'), - $embed_file('migrations/003-target-url-type/down.sql'), - $embed_file('migrations/004-nullable-branch/down.sql'), - $embed_file('migrations/005-repo-path/down.sql'), - ] -) - -// init initializes a database & adds the correct tables. -pub fn init(db_path string) !VieterDb { - conn := sqlite.connect(db_path)! - - sql conn { - create table MigrationVersion - } - - cur_version := sql conn { - select from MigrationVersion limit 1 - } - - // If there's no row yet, we add it here - if cur_version == MigrationVersion{} { - sql conn { - insert cur_version into MigrationVersion - } - } - - // Apply each migration in order - for i in cur_version.version .. dbms.migrations_up.len { - migration := dbms.migrations_up[i].to_string() - - version_num := i + 1 - - // vfmt does not like these dots - println('Applying migration ${version_num}' + '...') - - // The sqlite library seems to not like it when multiple statements are - // passed in a single exec. Therefore, we split them & run them all - // separately. - for part in migration.split(';').map(it.trim_space()).filter(it != '') { - res := conn.exec_none(part) - - if res != sqlite.sqlite_done { - return error('An error occurred while applying migration ${version_num}: SQLite error code ${res}') - } - } - - // The where clause doesn't really matter, as there will always only be - // one entry anyways. - sql conn { - update MigrationVersion set version = version_num where id > 0 - } - } - - return VieterDb{ - conn: conn - } -} - -// row_into[T] converts an sqlite.Row into a given type T by parsing each field -// from a string according to its type. -pub fn row_into[T](row sqlite.Row) T { - mut i := 0 - mut out := T{} - - $for field in T.fields { - $if field.typ is string { - out.$(field.name) = row.vals[i] - } $else $if field.typ is int { - out.$(field.name) = row.vals[i].int() - } $else $if field.typ is time.Time { - out.$(field.name) = time.unix(row.vals[i].int()) - } - - i += 1 - } - return out -} diff --git a/src/dbms/logs.v b/src/dbms/logs.v deleted file mode 100644 index b0786b8..0000000 --- a/src/dbms/logs.v +++ /dev/null @@ -1,99 +0,0 @@ -module dbms - -import models { BuildLog, BuildLogFilter } -import time - -// get_build_logs returns all BuildLog's in the database. -pub fn (db &VieterDb) get_build_logs(filter BuildLogFilter) []BuildLog { - mut where_parts := []string{} - - if filter.target != 0 { - where_parts << 'target_id == ${filter.target}' - } - - if filter.before != time.Time{} { - where_parts << 'start_time < ${filter.before.unix_time()}' - } - - if filter.after != time.Time{} { - where_parts << 'start_time > ${filter.after.unix_time()}' - } - - // NOTE: possible SQL injection - if filter.arch != '' { - where_parts << "arch == '${filter.arch}'" - } - - mut parts := []string{} - - for exp in filter.exit_codes { - if exp[0] == `!` { - code := exp[1..].int() - - parts << 'exit_code != ${code}' - } else { - code := exp.int() - - parts << 'exit_code == ${code}' - } - } - - if parts.len > 0 { - where_parts << parts.map('(${it})').join(' or ') - } - - mut where_str := '' - - if where_parts.len > 0 { - where_str = 'where ' + where_parts.map('(${it})').join(' and ') - } - - query := 'select * from BuildLog ${where_str} limit ${filter.limit} offset ${filter.offset}' - rows, _ := db.conn.exec(query) - res := rows.map(row_into[BuildLog](it)) - - return res -} - -// get_build_logs_for_target returns all BuildLog's in the database for a given -// target. -pub fn (db &VieterDb) get_build_logs_for_target(target_id int) []BuildLog { - res := sql db.conn { - select from BuildLog where target_id == target_id order by id - } - - return res -} - -// get_build_log tries to return a specific BuildLog. -pub fn (db &VieterDb) get_build_log(id int) ?BuildLog { - res := sql db.conn { - select from BuildLog where id == id - } - - if res.id == 0 { - return none - } - - return res -} - -// add_build_log inserts the given BuildLog into the database. -pub fn (db &VieterDb) add_build_log(log BuildLog) int { - sql db.conn { - insert log into BuildLog - } - - // Here, this does work because a log doesn't contain any foreign keys, - // meaning the ORM only has to do a single add - inserted_id := db.conn.last_id() as int - - return inserted_id -} - -// delete_build_log delete the BuildLog with the given ID from the database. -pub fn (db &VieterDb) delete_build_log(id int) { - sql db.conn { - delete from BuildLog where id == id - } -} diff --git a/src/dbms/migrations/001-initial/down.sql b/src/dbms/migrations/001-initial/down.sql deleted file mode 100644 index 43ad40b..0000000 --- a/src/dbms/migrations/001-initial/down.sql +++ /dev/null @@ -1,3 +0,0 @@ -DROP TABLE IF EXISTS BuildLog; -DROP TABLE IF EXISTS GitRepoArch; -DROP TABLE IF EXISTS GitRepo; diff --git a/src/dbms/migrations/001-initial/up.sql b/src/dbms/migrations/001-initial/up.sql deleted file mode 100644 index ca0aace..0000000 --- a/src/dbms/migrations/001-initial/up.sql +++ /dev/null @@ -1,22 +0,0 @@ -CREATE TABLE IF NOT EXISTS GitRepo ( - id INTEGER PRIMARY KEY, - url TEXT NOT NULL, - branch TEXT NOT NULL, - repo TEXT NOT NULL, - schedule TEXT -); - -CREATE TABLE IF NOT EXISTS GitRepoArch ( - id INTEGER PRIMARY KEY, - repo_id INTEGER NOT NULL, - value TEXT NOT NULL -); - -CREATE TABLE IF NOT EXISTS BuildLog ( - id INTEGER PRIMARY KEY, - repo_id INTEGER NOT NULL, - start_time INTEGER NOT NULL, - end_time iNTEGER NOT NULL, - arch TEXT NOT NULL, - exit_code INTEGER NOT NULL -); diff --git a/src/dbms/migrations/002-rename-to-targets/down.sql b/src/dbms/migrations/002-rename-to-targets/down.sql deleted file mode 100644 index 861bfa9..0000000 --- a/src/dbms/migrations/002-rename-to-targets/down.sql +++ /dev/null @@ -1,5 +0,0 @@ -ALTER TABLE Target RENAME TO GitRepo; -ALTER TABLE TargetArch RENAME TO GitRepoArch; - -ALTER TABLE GitRepoArch RENAME COLUMN target_id TO repo_id; -ALTER TABLE BuildLog RENAME COLUMN target_id TO repo_id; diff --git a/src/dbms/migrations/002-rename-to-targets/up.sql b/src/dbms/migrations/002-rename-to-targets/up.sql deleted file mode 100644 index 081e3ee..0000000 --- a/src/dbms/migrations/002-rename-to-targets/up.sql +++ /dev/null @@ -1,5 +0,0 @@ -ALTER TABLE GitRepo RENAME TO Target; -ALTER TABLE GitRepoArch RENAME TO TargetArch; - -ALTER TABLE TargetArch RENAME COLUMN repo_id TO target_id; -ALTER TABLE BuildLog RENAME COLUMN repo_id TO target_id; diff --git a/src/dbms/migrations/003-target-url-type/down.sql b/src/dbms/migrations/003-target-url-type/down.sql deleted file mode 100644 index 9d9b45c..0000000 --- a/src/dbms/migrations/003-target-url-type/down.sql +++ /dev/null @@ -1,4 +0,0 @@ --- I'm not sure whether I should remove any non-git targets here. Keeping them --- will result in invalid targets, but removing them means losing data. -ALTER TABLE Target DROP COLUMN kind; - diff --git a/src/dbms/migrations/003-target-url-type/up.sql b/src/dbms/migrations/003-target-url-type/up.sql deleted file mode 100644 index f6be4f4..0000000 --- a/src/dbms/migrations/003-target-url-type/up.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE Target ADD COLUMN kind TEXT NOT NULL DEFAULT 'git'; diff --git a/src/dbms/migrations/004-nullable-branch/down.sql b/src/dbms/migrations/004-nullable-branch/down.sql deleted file mode 100644 index 2515593..0000000 --- a/src/dbms/migrations/004-nullable-branch/down.sql +++ /dev/null @@ -1,26 +0,0 @@ --- This down won't really work because it'll throw NOT NULL errors, but I'm --- just putting it here for future reference (still not sure whether I'm even - -- gonna use these) -PRAGMA foreign_keys=off; - -BEGIN TRANSACTION; - -ALTER TABLE Target RENAME TO _Target_old; - -CREATE TABLE Target ( - id INTEGER PRIMARY KEY, - url TEXT NOT NULL, - branch TEXT NOT NULL, - repo TEXT NOT NULL, - schedule TEXT, - kind TEXT NOT NULL DEFAULT 'git' -); - -INSERT INTO Target (id, url, branch, repo, schedule, kind) - SELECT id, url, branch, repo, schedule, kind FROM _Target_old; - -DROP TABLE _Target_old; - -COMMIT; - -PRAGMA foreign_keys=on; diff --git a/src/dbms/migrations/004-nullable-branch/up.sql b/src/dbms/migrations/004-nullable-branch/up.sql deleted file mode 100644 index 6333c37..0000000 --- a/src/dbms/migrations/004-nullable-branch/up.sql +++ /dev/null @@ -1,23 +0,0 @@ -PRAGMA foreign_keys=off; - -BEGIN TRANSACTION; - -ALTER TABLE Target RENAME TO _Target_old; - -CREATE TABLE Target ( - id INTEGER PRIMARY KEY, - url TEXT NOT NULL, - branch TEXT, - repo TEXT NOT NULL, - schedule TEXT, - kind TEXT NOT NULL DEFAULT 'git' -); - -INSERT INTO Target (id, url, branch, repo, schedule, kind) - SELECT id, url, branch, repo, schedule, kind FROM _Target_old; - -DROP TABLE _Target_old; - -COMMIT; - -PRAGMA foreign_keys=on; diff --git a/src/dbms/migrations/005-repo-path/down.sql b/src/dbms/migrations/005-repo-path/down.sql deleted file mode 100644 index 8a6f021..0000000 --- a/src/dbms/migrations/005-repo-path/down.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE Target DROP COLUMN path; diff --git a/src/dbms/migrations/005-repo-path/up.sql b/src/dbms/migrations/005-repo-path/up.sql deleted file mode 100644 index f7e5c29..0000000 --- a/src/dbms/migrations/005-repo-path/up.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE Target ADD COLUMN path TEXT; diff --git a/src/dbms/targets.v b/src/dbms/targets.v deleted file mode 100644 index a55220f..0000000 --- a/src/dbms/targets.v +++ /dev/null @@ -1,87 +0,0 @@ -module dbms - -import models { Target, TargetArch } - -// get_target tries to return a specific target. -pub fn (db &VieterDb) get_target(target_id int) ?Target { - res := sql db.conn { - select from Target where id == target_id - } - - // If a select statement fails, it returns a zeroed object. By - // checking one of the required fields, we can see whether the query - // returned a result or not. - if res.id == 0 { - return none - } - - return res -} - -// add_target inserts the given target into the database. -pub fn (db &VieterDb) add_target(target Target) int { - sql db.conn { - insert target into Target - } - - // ID of inserted target is the largest id - inserted_target := sql db.conn { - select from Target order by id desc limit 1 - } - - return inserted_target.id -} - -// delete_target deletes the target with the given id from the database. -pub fn (db &VieterDb) delete_target(target_id int) { - sql db.conn { - delete from Target where id == target_id - delete from TargetArch where target_id == target_id - } -} - -// update_target updates any non-array values for a given target. -pub fn (db &VieterDb) update_target(target_id int, params map[string]string) { - mut values := []string{} - - // TODO does this allow for SQL injection? - $for field in Target.fields { - if field.name in params { - // Any fields that are array types require their own update method - $if field.typ is string { - values << "${field.name} = '${params[field.name]}'" - } - } - } - values_str := values.join(', ') - // I think this is actual SQL & not the ORM language - query := 'update Target set ${values_str} where id == ${target_id}' - - db.conn.exec_none(query) -} - -// update_target_archs updates a given target's arch value. -pub fn (db &VieterDb) update_target_archs(target_id int, archs []TargetArch) { - archs_with_id := archs.map(TargetArch{ - ...it - target_id: target_id - }) - - sql db.conn { - delete from TargetArch where target_id == target_id - } - - for arch in archs_with_id { - sql db.conn { - insert arch into TargetArch - } - } -} - -// target_exists is a utility function that checks whether a target with the -// given id exists. -pub fn (db &VieterDb) target_exists(target_id int) bool { - db.get_target(target_id) or { return false } - - return true -} diff --git a/src/dbms/targets_iter.v b/src/dbms/targets_iter.v deleted file mode 100644 index ca149b9..0000000 --- a/src/dbms/targets_iter.v +++ /dev/null @@ -1,129 +0,0 @@ -module dbms - -import models { Target, TargetFilter } -import db.sqlite - -// Iterator providing a filtered view into the list of targets currently stored -// in the database. It replaces functionality usually performed in the database -// using SQL queries that can't currently be used due to missing stuff in V's -// ORM. -pub struct TargetsIterator { - conn sqlite.DB - filter TargetFilter - window_size int = 32 -mut: - window []Target - window_index u64 - // Offset in entire list of unfiltered targets - offset int - // Offset in filtered list of targets - filtered_offset u64 - started bool - done bool -} - -// targets returns an iterator allowing filtered access to the list of targets. -pub fn (db &VieterDb) targets(filter TargetFilter) TargetsIterator { - window_size := 32 - - return TargetsIterator{ - conn: db.conn - filter: filter - window: []Target{cap: window_size} - window_size: window_size - } -} - -// advance_window moves the sliding window over the filtered list of targets -// until it either reaches the end of the list of targets, or has encountered a -// non-empty window. -fn (mut ti TargetsIterator) advance_window() { - for { - ti.window = sql ti.conn { - select from Target order by id limit ti.window_size offset ti.offset - } - ti.offset += ti.window.len - - if ti.window.len == 0 { - ti.done = true - - return - } - - if ti.filter.repo != '' { - ti.window = ti.window.filter(it.repo == ti.filter.repo) - } - - if ti.filter.arch != '' { - ti.window = ti.window.filter(it.arch.any(it.value == ti.filter.arch)) - } - - if ti.filter.query != '' { - ti.window = ti.window.filter(it.url.contains(ti.filter.query) - || it.path.contains(ti.filter.query) || it.branch.contains(ti.filter.query)) - } - - // We break out of the loop once we found a non-empty window - if ti.window.len > 0 { - break - } - } -} - -// next returns the next target, if possible. -pub fn (mut ti TargetsIterator) next() ?Target { - if ti.done { - return none - } - - // The first call to `next` will cause the sliding window to move to where - // the requested offset starts - if !ti.started { - ti.advance_window() - - // Skip all matched targets until the requested offset - for !ti.done && ti.filtered_offset + u64(ti.window.len) <= ti.filter.offset { - ti.filtered_offset += u64(ti.window.len) - ti.advance_window() - } - - if ti.done { - return none - } - - left_inside_window := ti.filter.offset - ti.filtered_offset - ti.window_index = left_inside_window - ti.filtered_offset += left_inside_window - - ti.started = true - } - - return_value := ti.window[ti.window_index] - - ti.window_index++ - ti.filtered_offset++ - - // Next call will be past the requested offset - if ti.filter.limit > 0 && ti.filtered_offset == ti.filter.offset + ti.filter.limit { - ti.done = true - } - - // Ensure the next call has a new valid window - if ti.window_index == u64(ti.window.len) { - ti.advance_window() - ti.window_index = 0 - } - - return return_value -} - -// collect consumes the entire iterator & returns the result as an array. -pub fn (mut ti TargetsIterator) collect() []Target { - mut out := []Target{} - - for t in ti { - out << t - } - - return out -} diff --git a/src/docker/containers.v b/src/docker/containers.v new file mode 100644 index 0000000..2258f3b --- /dev/null +++ b/src/docker/containers.v @@ -0,0 +1,117 @@ +module docker + +import json +import net.urllib +import time + +struct Container { + id string [json: Id] + names []string [json: Names] +} + +// containers returns a list of all currently running containers +pub fn containers() ?[]Container { + res := request('GET', urllib.parse('/v1.41/containers/json') ?) ? + + return json.decode([]Container, res.text) or {} +} + +pub struct NewContainer { + image string [json: Image] + entrypoint []string [json: Entrypoint] + cmd []string [json: Cmd] + env []string [json: Env] + work_dir string [json: WorkingDir] + user string [json: User] +} + +struct CreatedContainer { + id string [json: Id] +} + +// create_container creates a container defined by the given configuration. If +// successful, it returns the ID of the newly created container. +pub fn create_container(c &NewContainer) ?string { + res := request_with_json('POST', urllib.parse('/v1.41/containers/create') ?, c) ? + + if res.status_code != 201 { + return error('Failed to create container.') + } + + return json.decode(CreatedContainer, res.text) ?.id +} + +// start_container starts a container with a given ID. It returns whether the +// container was started or not. +pub fn start_container(id string) ?bool { + res := request('POST', urllib.parse('/v1.41/containers/$id/start') ?) ? + + return res.status_code == 204 +} + +struct ContainerInspect { +pub mut: + state ContainerState [json: State] +} + +struct ContainerState { +pub: + running bool [json: Running] + status string [json: Status] + exit_code int [json: ExitCode] + // These use a rather specific format so they have to be parsed later + start_time_str string [json: StartedAt] + end_time_str string [json: FinishedAt] +pub mut: + start_time time.Time [skip] + end_time time.Time [skip] +} + +// inspect_container returns the result of inspecting a container with a given +// ID. +pub fn inspect_container(id string) ?ContainerInspect { + res := request('GET', urllib.parse('/v1.41/containers/$id/json') ?) ? + + if res.status_code != 200 { + return error('Failed to inspect container.') + } + + mut data := json.decode(ContainerInspect, res.text) ? + + data.state.start_time = time.parse_rfc3339(data.state.start_time_str) ? + + if data.state.status == 'exited' { + data.state.end_time = time.parse_rfc3339(data.state.end_time_str) ? + } + + return data +} + +// remove_container removes a container with a given ID. +pub fn remove_container(id string) ?bool { + res := request('DELETE', urllib.parse('/v1.41/containers/$id') ?) ? + + return res.status_code == 204 +} + +// get_container_logs retrieves the logs for a Docker container, both stdout & +// stderr. +pub fn get_container_logs(id string) ?string { + res := request('GET', urllib.parse('/v1.41/containers/$id/logs?stdout=true&stderr=true') ?) ? + mut res_bytes := res.text.bytes() + + // Docker uses a special "stream" format for their logs, so we have to + // clean up the data. + mut index := 0 + + for index < res_bytes.len { + // The reverse is required because V reads in the bytes differently + t := res_bytes[index + 4..index + 8].reverse() + len_length := unsafe { *(&u32(&t[0])) } + + res_bytes.delete_many(index, 8) + index += int(len_length) + } + + return res_bytes.bytestr() +} diff --git a/src/docker/docker.v b/src/docker/docker.v new file mode 100644 index 0000000..5deef83 --- /dev/null +++ b/src/docker/docker.v @@ -0,0 +1,97 @@ +module docker + +import net.unix +import net.urllib +import net.http +import json + +const socket = '/var/run/docker.sock' + +const buf_len = 1024 + +// send writes a request to the Docker socket, waits for a response & returns +// it. +fn send(req &string) ?http.Response { + // Open a connection to the socket + mut s := unix.connect_stream(docker.socket) or { + return error('Failed to connect to socket ${docker.socket}.') + } + + defer { + // This or is required because otherwise, the V compiler segfaults for + // some reason + // https://github.com/vlang/v/issues/13534 + s.close() or {} + } + + // Write the request to the socket + s.write_string(req) or { return error('Failed to write request to socket ${docker.socket}.') } + + s.wait_for_write() ? + + mut c := 0 + mut buf := []u8{len: docker.buf_len} + mut res := []u8{} + + for { + c = s.read(mut buf) or { return error('Failed to read data from socket ${docker.socket}.') } + res << buf[..c] + + if c < docker.buf_len { + break + } + } + + // After reading the first part of the response, we parse it into an HTTP + // response. If it isn't chunked, we return early with the data. + parsed := http.parse_response(res.bytestr()) or { + return error('Failed to parse HTTP response from socket ${docker.socket}.') + } + + if parsed.header.get(http.CommonHeader.transfer_encoding) or { '' } != 'chunked' { + return parsed + } + + // We loop until we've encountered the end of the chunked response + // A chunked HTTP response always ends with '0\r\n\r\n'. + for res.len < 5 || res#[-5..] != [u8(`0`), `\r`, `\n`, `\r`, `\n`] { + // Wait for the server to respond + s.wait_for_write() ? + + for { + c = s.read(mut buf) or { + return error('Failed to read data from socket ${docker.socket}.') + } + res << buf[..c] + + if c < docker.buf_len { + break + } + } + } + + // Decode chunked response + return http.parse_response(res.bytestr()) +} + +// request_with_body sends a request to the Docker socket with the given body. +fn request_with_body(method string, url urllib.URL, content_type string, body string) ?http.Response { + req := '$method $url.request_uri() HTTP/1.1\nHost: localhost\nContent-Type: $content_type\nContent-Length: $body.len\n\n$body\n\n' + + return send(req) +} + +// request sends a request to the Docker socket with an empty body. +fn request(method string, url urllib.URL) ?http.Response { + req := '$method $url.request_uri() HTTP/1.1\nHost: localhost\n\n' + + return send(req) +} + +// request_with_json sends a request to the Docker socket with a given JSON +// payload +pub fn request_with_json(method string, url urllib.URL, data &T) ?http.Response { + body := json.encode(data) + + return request_with_body(method, url, 'application/json', body) +} diff --git a/src/docker/images.v b/src/docker/images.v new file mode 100644 index 0000000..e94ceca --- /dev/null +++ b/src/docker/images.v @@ -0,0 +1,34 @@ +module docker + +import net.http +import net.urllib +import json + +struct Image { +pub: + id string [json: Id] +} + +// pull_image pulls tries to pull the image for the given image & tag +pub fn pull_image(image string, tag string) ?http.Response { + return request('POST', urllib.parse('/v1.41/images/create?fromImage=$image&tag=$tag') ?) +} + +// create_image_from_container creates a new image from a container with the +// given repo & tag, given the container's ID. +pub fn create_image_from_container(id string, repo string, tag string) ?Image { + res := request('POST', urllib.parse('/v1.41/commit?container=$id&repo=$repo&tag=$tag') ?) ? + + if res.status_code != 201 { + return error('Failed to create image from container.') + } + + return json.decode(Image, res.text) or {} +} + +// remove_image removes the image with the given ID. +pub fn remove_image(id string) ?bool { + res := request('DELETE', urllib.parse('/v1.41/images/$id') ?) ? + + return res.status_code == 200 +} diff --git a/src/env/env.v b/src/env/env.v new file mode 100644 index 0000000..b2b5f44 --- /dev/null +++ b/src/env/env.v @@ -0,0 +1,96 @@ +module env + +import os +import toml + +// The prefix that every environment variable should have +const prefix = 'VIETER_' + +// The suffix an environment variable in order for it to be loaded from a file +// instead +const file_suffix = '_FILE' + +fn get_env_var(field_name string) ?string { + env_var_name := '$env.prefix$field_name.to_upper()' + env_file_name := '$env.prefix$field_name.to_upper()$env.file_suffix' + env_var := os.getenv(env_var_name) + env_file := os.getenv(env_file_name) + + // If both are missing, we return an empty string + if env_var == '' && env_file == '' { + return '' + } + + // If they're both set, we report a conflict + if env_var != '' && env_file != '' { + return error('Only one of $env_var_name or $env_file_name can be defined.') + } + + // If it's the env var itself, we return it. + // I'm pretty sure this also prevents variable ending in _FILE (e.g. + // VIETER_LOG_FILE) from being mistakingely read as an _FILE suffixed env + // var. + if env_var != '' { + return env_var + } + + // Otherwise, we process the file + return os.read_file(env_file) or { + error('Failed to read file defined in $env_file_name: ${err.msg()}.') + } +} + +// load attempts to create an object of type T from the given path to a toml +// file & environment variables. For each field, it will select either a value +// given from an environment variable, a value defined in the config file or a +// configured default if present, in that order. +pub fn load(path string) ?T { + mut res := T{} + + if os.exists(path) { + // We don't use reflect here because reflect also sets any fields not + // in the toml back to their zero value, which we don't want + doc := toml.parse_file(path) ? + + $for field in T.fields { + s := doc.value(field.name) + + if s !is toml.Null { + $if field.typ is string { + res.$(field.name) = s.string() + } $else $if field.typ is int { + res.$(field.name) = s.int() + } + } + } + } + + $for field in T.fields { + env_value := get_env_var(field.name) ? + + // The value of an env var will always take precedence over the toml + // file. + if env_value != '' { + $if field.typ is string { + res.$(field.name) = env_value + } $else $if field.typ is int { + res.$(field.name) = env_value.int() + } + } + + // Now, we check whether a value is present. If there isn't, that means + // it isn't in the config file, nor is there a default or an env var. + mut has_value := false + + $if field.typ is string { + has_value = res.$(field.name) != '' + } $else $if field.typ is int { + has_value = res.$(field.name) != 0 + } + + if !has_value { + return error("Missing config variable '$field.name' with no provided default. Either add it to the config file or provide it using an environment variable.") + } + } + return res +} diff --git a/src/libvieter b/src/libvieter deleted file mode 160000 index 379a05a..0000000 --- a/src/libvieter +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 379a05a7b6b604c107360e0a679fb3ea5400e02c diff --git a/src/main.v b/src/main.v index e3b8a1a..41d0d33 100644 --- a/src/main.v +++ b/src/main.v @@ -3,25 +3,16 @@ module main import os import server import cli -import console.targets +import build +import console.git import console.logs -import console.schedule -import console.man -import console.aur -import console.repos -import agent +import cron fn main() { - // Stop buffering output so logs always show up immediately - unsafe { - C.setbuf(C.stdout, 0) - } - mut app := cli.Command{ name: 'vieter' description: 'Vieter is a lightweight implementation of an Arch repository server.' - version: '0.6.0' - posix_mode: true + version: '0.3.0-alpha.1' flags: [ cli.Flag{ flag: cli.FlagType.string @@ -31,26 +22,16 @@ fn main() { global: true default_value: [os.expand_tilde_to_home('~/.vieterrc')] }, - cli.Flag{ - flag: cli.FlagType.bool - name: 'raw' - abbrev: 'r' - description: 'Only output minimal information (no formatted tables, etc.)' - global: true - }, ] commands: [ server.cmd(), - targets.cmd(), + build.cmd(), + git.cmd(), + cron.cmd(), logs.cmd(), - schedule.cmd(), - man.cmd(), - aur.cmd(), - agent.cmd(), - repos.cmd(), ] } + app.setup() app.parse(os.args) - return } diff --git a/src/models/builds.v b/src/models/builds.v deleted file mode 100644 index 6923115..0000000 --- a/src/models/builds.v +++ /dev/null @@ -1,19 +0,0 @@ -module models - -pub struct BuildConfig { -pub: - target_id int - kind string - url string - branch string - path string - repo string - base_image string - force bool - timeout int -} - -// str return a single-line string representation of a build log -pub fn (c BuildConfig) str() string { - return '{ target: ${c.target_id}, kind: ${c.kind}, url: ${c.url}, branch: ${c.branch}, path: ${c.path}, repo: ${c.repo}, base_image: ${c.base_image}, force: ${c.force}, timeout: ${c.timeout} }' -} diff --git a/src/models/logs.v b/src/models/logs.v deleted file mode 100644 index cb01d08..0000000 --- a/src/models/logs.v +++ /dev/null @@ -1,49 +0,0 @@ -module models - -import time -import os - -pub struct BuildLog { -pub mut: - id int [primary; sql: serial] - target_id int [nonull] - start_time time.Time [nonull] - end_time time.Time [nonull] - arch string [nonull] - exit_code int [nonull] -} - -// str returns a string representation. -pub fn (bl &BuildLog) str() string { - mut parts := [ - 'id: ${bl.id}', - 'target id: ${bl.target_id}', - 'start time: ${bl.start_time.local()}', - 'end time: ${bl.end_time.local()}', - 'duration: ${bl.end_time - bl.start_time}', - 'arch: ${bl.arch}', - 'exit code: ${bl.exit_code}', - ] - str := parts.join('\n') - - return str -} - -// path returns the path to the log file, relative to the logs directory -pub fn (bl &BuildLog) path() string { - filename := bl.start_time.custom_format('YYYY-MM-DD_HH-mm-ss') - - return os.join_path(bl.target_id.str(), bl.arch, filename) -} - -[params] -pub struct BuildLogFilter { -pub mut: - limit u64 = 25 - offset u64 - target int - before time.Time - after time.Time - arch string - exit_codes []string -} diff --git a/src/models/models.v b/src/models/models.v deleted file mode 100644 index 1ed0da8..0000000 --- a/src/models/models.v +++ /dev/null @@ -1,53 +0,0 @@ -module models - -import time - -// from_params[T] creates a new instance of T from the given map by parsing all -// of its fields from the map. -pub fn from_params[T](params map[string]string) ?T { - mut o := T{} - - patch_from_params[T](mut o, params)? - - return o -} - -// patch_from_params[T] updates the given T object with the params defined in -// the map. -pub fn patch_from_params[T](mut o T, params map[string]string) ? { - $for field in T.fields { - if field.name in params && params[field.name] != '' { - $if field.typ is string { - o.$(field.name) = params[field.name] - } $else $if field.typ is int { - o.$(field.name) = params[field.name].int() - } $else $if field.typ is u64 { - o.$(field.name) = params[field.name].u64() - } $else $if field.typ is []TargetArch { - o.$(field.name) = params[field.name].split(',').map(TargetArch{ value: it }) - } $else $if field.typ is time.Time { - o.$(field.name) = time.unix(params[field.name].int()) - } $else $if field.typ is []string { - o.$(field.name) = params[field.name].split(',') - } - } else if field.attrs.contains('nonull') { - return error('Missing parameter: ${field.name}.') - } - } -} - -// params_from[T] converts a given T struct into a map of strings. -pub fn params_from[T](o &T) map[string]string { - mut out := map[string]string{} - - $for field in T.fields { - $if field.typ is time.Time { - out[field.name] = o.$(field.name).unix_time().str() - } $else $if field.typ is []string { - out[field.name] = o.$(field.name).join(',') - } $else { - out[field.name] = o.$(field.name).str() - } - } - return out -} diff --git a/src/models/targets.v b/src/models/targets.v deleted file mode 100644 index 14cc8a6..0000000 --- a/src/models/targets.v +++ /dev/null @@ -1,79 +0,0 @@ -module models - -pub const valid_kinds = ['git', 'url'] - -pub struct TargetArch { -pub: - id int [primary; sql: serial] - target_id int [nonull] - value string [nonull] -} - -// str returns a string representation. -pub fn (gra &TargetArch) str() string { - return gra.value -} - -pub struct Target { -pub mut: - id int [primary; sql: serial] - kind string [nonull] - // If kind is git: URL of the Git repository - // If kind is url: URL to PKGBUILD file - url string [nonull] - // Branch of the Git repository to use; only applicable when kind is git. - // If not provided, the repository is cloned with the default branch. - branch string - // Which repo the builder should publish packages to - repo string [nonull] - // Cron schedule describing how frequently to build the repo. - schedule string - // Subdirectory in the Git repository to cd into - path string - // On which architectures the package is allowed to be built. In reality, - // this controls which agents will build this package when scheduled. - arch []TargetArch [fkey: 'target_id'] -} - -// str returns a string representation. -pub fn (t &Target) str() string { - mut parts := [ - 'id: ${t.id}', - 'kind: ${t.kind}', - 'url: ${t.url}', - 'branch: ${t.branch}', - 'path: ${t.path}', - 'repo: ${t.repo}', - 'schedule: ${t.schedule}', - 'arch: ${t.arch.map(it.value).join(', ')}', - ] - str := parts.join('\n') - - return str -} - -// as_build_config converts a Target into a BuildConfig, given some extra -// needed information. -pub fn (t &Target) as_build_config(base_image string, force bool, timeout int) BuildConfig { - return BuildConfig{ - target_id: t.id - kind: t.kind - url: t.url - branch: t.branch - path: t.path - repo: t.repo - base_image: base_image - force: force - timeout: timeout - } -} - -[params] -pub struct TargetFilter { -pub mut: - limit u64 = 25 - offset u64 - repo string - query string - arch string -} diff --git a/src/package/README.md b/src/package/README.md deleted file mode 100644 index b2bcbd7..0000000 --- a/src/package/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# package - -This module handles both parsing the published Arch tarballs & the contents of -their `.PKGINFO` files, as well as generating the contents of the database -archives' `desc` & `files` files. diff --git a/src/package/format.v b/src/package/format.v deleted file mode 100644 index b126f3a..0000000 --- a/src/package/format.v +++ /dev/null @@ -1,103 +0,0 @@ -module package - -// format_entry returns a string properly formatted to be added to a desc file. -[inline] -fn format_entry(key string, value string) string { - return '\n%${key}%\n${value}\n' -} - -// full_name returns the properly formatted name for the package, including -// version & architecture -pub fn (pkg &Pkg) full_name() string { - p := pkg.info - return '${p.name}-${p.version}-${p.arch}' -} - -// filename returns the correct filename of the package file -pub fn (pkg &Pkg) filename() string { - ext := match pkg.compression { - 0 { '.tar' } - 1 { '.tar.gz' } - 6 { '.tar.xz' } - 14 { '.tar.zst' } - else { panic("Another compression code shouldn't be possible. Faulty code: ${pkg.compression}") } - } - - return '${pkg.full_name()}.pkg${ext}' -} - -// to_desc returns a desc file valid string representation -pub fn (pkg &Pkg) to_desc() !string { - p := pkg.info - - // filename - mut desc := '%FILENAME%\n${pkg.filename()}\n' - - desc += format_entry('NAME', p.name) - desc += format_entry('BASE', p.base) - desc += format_entry('VERSION', p.version) - - if p.description.len > 0 { - desc += format_entry('DESC', p.description) - } - - if p.groups.len > 0 { - desc += format_entry('GROUPS', p.groups.join_lines()) - } - - desc += format_entry('CSIZE', p.csize.str()) - desc += format_entry('ISIZE', p.size.str()) - - sha256sum := pkg.checksum()! - - desc += format_entry('SHA256SUM', sha256sum) - - // TODO add pgpsig stuff - - if p.url.len > 0 { - desc += format_entry('URL', p.url) - } - - if p.licenses.len > 0 { - desc += format_entry('LICENSE', p.licenses.join_lines()) - } - - desc += format_entry('ARCH', p.arch) - desc += format_entry('BUILDDATE', p.build_date.str()) - desc += format_entry('PACKAGER', p.packager) - - if p.replaces.len > 0 { - desc += format_entry('REPLACES', p.replaces.join_lines()) - } - - if p.conflicts.len > 0 { - desc += format_entry('CONFLICTS', p.conflicts.join_lines()) - } - - if p.provides.len > 0 { - desc += format_entry('PROVIDES', p.provides.join_lines()) - } - - if p.depends.len > 0 { - desc += format_entry('DEPENDS', p.depends.join_lines()) - } - - if p.optdepends.len > 0 { - desc += format_entry('OPTDEPENDS', p.optdepends.join_lines()) - } - - if p.makedepends.len > 0 { - desc += format_entry('MAKEDEPENDS', p.makedepends.join_lines()) - } - - if p.checkdepends.len > 0 { - desc += format_entry('CHECKDEPENDS', p.checkdepends.join_lines()) - } - - return '${desc}\n' -} - -// to_files returns a files file valid string representation -pub fn (pkg &Pkg) to_files() string { - return '%FILES%\n${pkg.files.join_lines()}\n' -} diff --git a/src/package/package.v b/src/package/package.v index 6cf8e3d..a1042b5 100644 --- a/src/package/package.v +++ b/src/package/package.v @@ -4,7 +4,7 @@ import os import util // Represents a read archive -pub struct Pkg { +struct Pkg { pub: path string [required] info PkgInfo [required] @@ -42,13 +42,13 @@ pub mut: checkdepends []string } -// checksum calculates the sha256 hash of the package -pub fn (p &Pkg) checksum() !string { +// checksum calculates the md5 & sha256 hash of the package +pub fn (p &Pkg) checksum() ?(string, string) { return util.hash_file(p.path) } // parse_pkg_info_string parses a PkgInfo object from a string -fn parse_pkg_info_string(pkg_info_str &string) !PkgInfo { +fn parse_pkg_info_string(pkg_info_str &string) ?PkgInfo { mut pkg_info := PkgInfo{} // Iterate over the entire string @@ -101,9 +101,9 @@ fn parse_pkg_info_string(pkg_info_str &string) !PkgInfo { // read_pkg_archive extracts the file list & .PKGINFO contents from an archive // NOTE: this command only supports zstd-, xz- & gzip-compressed tarballs. -pub fn read_pkg_archive(pkg_path string) !Pkg { +pub fn read_pkg_archive(pkg_path string) ?Pkg { if !os.is_file(pkg_path) { - return error("'${pkg_path}' doesn't exist or isn't a file.") + return error("'$pkg_path' doesn't exist or isn't a file.") } a := C.archive_read_new() @@ -159,7 +159,7 @@ pub fn read_pkg_archive(pkg_path string) !Pkg { pkg_text := unsafe { buf.vstring_with_len(size).clone() } - pkg_info = parse_pkg_info_string(pkg_text)! + pkg_info = parse_pkg_info_string(pkg_text) ? } else { C.archive_read_data_skip(a) } @@ -174,3 +174,106 @@ pub fn read_pkg_archive(pkg_path string) !Pkg { compression: compression_code } } + +// format_entry returns a string properly formatted to be added to a desc file. +fn format_entry(key string, value string) string { + return '\n%$key%\n$value\n' +} + +// full_name returns the properly formatted name for the package, including +// version & architecture +pub fn (pkg &Pkg) full_name() string { + p := pkg.info + return '$p.name-$p.version-$p.arch' +} + +// filename returns the correct filename of the package file +pub fn (pkg &Pkg) filename() string { + ext := match pkg.compression { + 0 { '.tar' } + 1 { '.tar.gz' } + 6 { '.tar.xz' } + 14 { '.tar.zst' } + else { panic("Another compression code shouldn't be possible. Faulty code: $pkg.compression") } + } + + return '${pkg.full_name()}.pkg$ext' +} + +// to_desc returns a desc file valid string representation +// TODO calculate md5 & sha256 instead of believing the file +pub fn (pkg &Pkg) to_desc() string { + p := pkg.info + + // filename + mut desc := '%FILENAME%\n$pkg.filename()\n' + + desc += format_entry('NAME', p.name) + desc += format_entry('BASE', p.base) + desc += format_entry('VERSION', p.version) + + if p.description.len > 0 { + desc += format_entry('DESC', p.description) + } + + if p.groups.len > 0 { + desc += format_entry('GROUPS', p.groups.join_lines()) + } + + desc += format_entry('CSIZE', p.csize.str()) + desc += format_entry('ISIZE', p.size.str()) + + md5sum, sha256sum := pkg.checksum() or { '', '' } + + desc += format_entry('MD5SUM', md5sum) + desc += format_entry('SHA256SUM', sha256sum) + + // TODO add pgpsig stuff + + if p.url.len > 0 { + desc += format_entry('URL', p.url) + } + + if p.licenses.len > 0 { + desc += format_entry('LICENSE', p.licenses.join_lines()) + } + + desc += format_entry('ARCH', p.arch) + desc += format_entry('BUILDDATE', p.build_date.str()) + desc += format_entry('PACKAGER', p.packager) + + if p.replaces.len > 0 { + desc += format_entry('REPLACES', p.replaces.join_lines()) + } + + if p.conflicts.len > 0 { + desc += format_entry('CONFLICTS', p.conflicts.join_lines()) + } + + if p.provides.len > 0 { + desc += format_entry('PROVIDES', p.provides.join_lines()) + } + + if p.depends.len > 0 { + desc += format_entry('DEPENDS', p.depends.join_lines()) + } + + if p.optdepends.len > 0 { + desc += format_entry('OPTDEPENDS', p.optdepends.join_lines()) + } + + if p.makedepends.len > 0 { + desc += format_entry('MAKEDEPENDS', p.makedepends.join_lines()) + } + + if p.checkdepends.len > 0 { + desc += format_entry('CHECKDEPENDS', p.checkdepends.join_lines()) + } + + return '$desc\n' +} + +// to_files returns a files file valid string representation +pub fn (pkg &Pkg) to_files() string { + return '%FILES%\n$pkg.files.join_lines()\n' +} diff --git a/src/repo/README.md b/src/repo/README.md deleted file mode 100644 index f06b1d3..0000000 --- a/src/repo/README.md +++ /dev/null @@ -1,43 +0,0 @@ -# repo - -This module manages the contents of the various repositories stored within a -Vieter instance. - -## Terminology - -* Arch-repository (arch-repo): specific architecture of a given repository. This is what - Pacman actually uses as a repository, and contains its own `.db` & `.files` - files. -* Repository (repo): a collection of arch-repositories. A single repository can - contain packages of different architectures, with each package being stored - in that specific architecture' arch-repository. -* Repository group (repo-group): a collection of repositories. Each Vieter - instance consists of a single repository group, which manages all underlying - repositories & arch-repositories. - -## Arch-repository layout - -An arch-repository (aka a regular Pacman repository) consists of a directory -with the following files (`{repo}` should be replaced with the name of the -repository): - -* One or more package directories. These directories follow the naming scheme - `${pkgname}-${pkgver}-${pkgrel}`. Each of these directories contains two - files, `desc` & `files`. The `desc` file is a list of the package's metadata, - while `files` contains a list of all files that the package contains. The - latter is used when using `pacman -F`. -* `{repo}.db` & `{repo}.db.tar.gz`: the database file of the repository. This - is just a compressed tarball of all package directories, but only their - `desc` files. Both these files should have the same content (`repo-add` - creates a symlink, but Vieter just serves the same file for both routes) -* `{repo}.files` & `{repo}.files.tar.gz`: the same as the `.db` file, but this - also contains the `files` files, instead of just the `desc` files. - -## Filesystem layout - -The repository part of Vieter consists of two directories. One is the `repos` -directory inside the configured `data_dir`, while the other is the configured -`pkg_dir`. `repos` contains only the repository group, while `pkg_dir` contains -the actual package archives. `pkg_dir` is the directory that can take up a -significant amount of memory, while `repos` solely consists of small text -files. diff --git a/src/repo/remove.v b/src/repo/remove.v deleted file mode 100644 index 6d949c3..0000000 --- a/src/repo/remove.v +++ /dev/null @@ -1,85 +0,0 @@ -module repo - -import os - -// remove_pkg_from_arch_repo removes a package from an arch-repo's database. It -// returns false if the package wasn't present in the database. It also -// optionally re-syncs the repo archives. -pub fn (r &RepoGroupManager) remove_pkg_from_arch_repo(repo string, arch string, pkg_name string, perform_sync bool) !bool { - repo_dir := os.join_path(r.repos_dir, repo, arch) - - // If the repository doesn't exist yet, the result is automatically false - if !os.exists(repo_dir) { - return false - } - - // We iterate over every directory in the repo dir - // TODO filter so we only check directories - for d in os.ls(repo_dir)! { - // Because a repository only allows a single version of each package, - // we need only compare whether the name of the package is the same, - // not the version. - name := d.split('-')#[..-2].join('-') - - if name == pkg_name { - // We lock the mutex here to prevent other routines from creating a - // new archive while we remove an entry - lock r.mutex { - os.rmdir_all(os.join_path_single(repo_dir, d))! - } - - // Also remove the package archive - repo_pkg_dir := os.join_path(r.pkg_dir, repo, arch) - - archives := os.ls(repo_pkg_dir)!.filter(it.split('-')#[..-3].join('-') == name) - - for archive_name in archives { - full_path := os.join_path_single(repo_pkg_dir, archive_name) - os.rm(full_path)! - } - - // Sync the db archives if requested - if perform_sync { - r.sync(repo, arch)! - } - - return true - } - } - - return false -} - -// remove_arch_repo removes an arch-repo & its packages. -pub fn (r &RepoGroupManager) remove_arch_repo(repo string, arch string) !bool { - repo_dir := os.join_path(r.repos_dir, repo, arch) - - // If the repository doesn't exist yet, the result is automatically false - if !os.exists(repo_dir) { - return false - } - - os.rmdir_all(repo_dir)! - - pkg_dir := os.join_path(r.pkg_dir, repo, arch) - os.rmdir_all(pkg_dir)! - - return true -} - -// remove_repo removes a repo & its packages. -pub fn (r &RepoGroupManager) remove_repo(repo string) !bool { - repo_dir := os.join_path_single(r.repos_dir, repo) - - // If the repository doesn't exist yet, the result is automatically false - if !os.exists(repo_dir) { - return false - } - - os.rmdir_all(repo_dir)! - - pkg_dir := os.join_path_single(r.pkg_dir, repo) - os.rmdir_all(pkg_dir)! - - return true -} diff --git a/src/repo/add.v b/src/repo/repo.v similarity index 57% rename from src/repo/add.v rename to src/repo/repo.v index 47b0d7e..e27e232 100644 --- a/src/repo/add.v +++ b/src/repo/repo.v @@ -23,23 +23,18 @@ pub: pub struct RepoAddResult { pub: - name string - version string - archs []string + added bool [required] + pkg &package.Pkg [required] } // new creates a new RepoGroupManager & creates the directories as needed -pub fn new(repos_dir string, pkg_dir string, default_arch string) !RepoGroupManager { +pub fn new(repos_dir string, pkg_dir string, default_arch string) ?RepoGroupManager { if !os.is_dir(repos_dir) { - os.mkdir_all(repos_dir) or { - return error('Failed to create repos directory: ${err.msg()}') - } + os.mkdir_all(repos_dir) or { return error('Failed to create repos directory: $err.msg()') } } if !os.is_dir(pkg_dir) { - os.mkdir_all(pkg_dir) or { - return error('Failed to create package directory: ${err.msg()}') - } + os.mkdir_all(pkg_dir) or { return error('Failed to create package directory: $err.msg()') } } return RepoGroupManager{ @@ -53,32 +48,31 @@ pub fn new(repos_dir string, pkg_dir string, default_arch string) !RepoGroupMana // pkg archive. It's a wrapper around add_pkg_in_repo that parses the archive // file, passes the result to add_pkg_in_repo, and hard links the archive to // the right subdirectories in r.pkg_dir if it was successfully added. -pub fn (r &RepoGroupManager) add_pkg_from_path(repo string, pkg_path string) !RepoAddResult { +pub fn (r &RepoGroupManager) add_pkg_from_path(repo string, pkg_path string) ?RepoAddResult { pkg := package.read_pkg_archive(pkg_path) or { - return error('Failed to read package file: ${err.msg()}') + return error('Failed to read package file: $err.msg()') } - archs := r.add_pkg_in_repo(repo, pkg)! + added := r.add_pkg_in_repo(repo, pkg) ? // If the add was successful, we move the file to the packages directory - for arch in archs { + for arch in added { repo_pkg_path := os.real_path(os.join_path(r.pkg_dir, repo, arch)) dest_path := os.join_path_single(repo_pkg_path, pkg.filename()) - os.mkdir_all(repo_pkg_path)! + os.mkdir_all(repo_pkg_path) ? // We create hard links so that "any" arch packages aren't stored // multiple times - os.link(pkg_path, dest_path)! + os.link(pkg_path, dest_path) ? } // After linking, we can remove the original file - os.rm(pkg_path)! + os.rm(pkg_path) ? return RepoAddResult{ - name: pkg.info.name - version: pkg.info.version - archs: archs + added: added.len > 0 + pkg: &pkg } } @@ -89,13 +83,15 @@ pub fn (r &RepoGroupManager) add_pkg_from_path(repo string, pkg_path string) !Re // r.default_arch. If this arch-repo doesn't exist yet, it is created. If the // architecture isn't 'any', the package is only added to the specific // architecture. -fn (r &RepoGroupManager) add_pkg_in_repo(repo string, pkg &package.Pkg) ![]string { +fn (r &RepoGroupManager) add_pkg_in_repo(repo string, pkg &package.Pkg) ?[]string { // A package not of arch 'any' can be handled easily by adding it to the // respective repo if pkg.info.arch != 'any' { - r.add_pkg_in_arch_repo(repo, pkg.info.arch, pkg)! - - return [pkg.info.arch] + if r.add_pkg_in_arch_repo(repo, pkg.info.arch, pkg) ? { + return [pkg.info.arch] + } else { + return [] + } } mut arch_repos := []string{} @@ -108,7 +104,7 @@ fn (r &RepoGroupManager) add_pkg_in_repo(repo string, pkg &package.Pkg) ![]strin // If this is the first package that's added to the repo, the directory // won't exist yet if os.exists(repo_dir) { - arch_repos = os.ls(repo_dir)! + arch_repos = os.ls(repo_dir) ? } // The default_arch should always be updated when a package with arch 'any' @@ -117,39 +113,92 @@ fn (r &RepoGroupManager) add_pkg_in_repo(repo string, pkg &package.Pkg) ![]strin arch_repos << r.default_arch } - // Add the package to each found architecture - // NOTE: if any of these fail, the function fails. This means the user does - // not know which arch-repositories did succeed in adding the package, if - // any. + mut added := []string{} + + // We add the package to each repository. If any of the repositories + // return true, the result of the function is also true. for arch in arch_repos { - r.add_pkg_in_arch_repo(repo, arch, pkg)! + if r.add_pkg_in_arch_repo(repo, arch, pkg) ? { + added << arch + } } - return arch_repos + return added } // add_pkg_in_arch_repo is the function that actually adds a package to a given // arch-repo. It records the package's data in the arch-repo's desc & files // files, and afterwards updates the db & files archives to reflect these -// changes. -fn (r &RepoGroupManager) add_pkg_in_arch_repo(repo string, arch string, pkg &package.Pkg) ! { - pkg_dir := os.join_path(r.repos_dir, repo, arch, '${pkg.info.name}-${pkg.info.version}') +// changes. The function returns false if the package was already present in +// the repo, and true otherwise. +fn (r &RepoGroupManager) add_pkg_in_arch_repo(repo string, arch string, pkg &package.Pkg) ?bool { + pkg_dir := os.join_path(r.repos_dir, repo, arch, '$pkg.info.name-$pkg.info.version') // Remove the previous version of the package, if present - r.remove_pkg_from_arch_repo(repo, arch, pkg.info.name, false)! + r.remove_pkg_from_arch_repo(repo, arch, pkg.info.name, false) ? os.mkdir_all(pkg_dir) or { return error('Failed to create package directory.') } - os.write_file(os.join_path_single(pkg_dir, 'desc'), pkg.to_desc()!) or { - os.rmdir_all(pkg_dir)! + os.write_file(os.join_path_single(pkg_dir, 'desc'), pkg.to_desc()) or { + os.rmdir_all(pkg_dir) ? return error('Failed to write desc file.') } os.write_file(os.join_path_single(pkg_dir, 'files'), pkg.to_files()) or { - os.rmdir_all(pkg_dir)! + os.rmdir_all(pkg_dir) ? return error('Failed to write files file.') } - r.sync(repo, arch)! + r.sync(repo, arch) ? + + return true +} + +// remove_pkg_from_arch_repo removes a package from an arch-repo's database. It +// returns false if the package wasn't present in the database. It also +// optionally re-syncs the repo archives. +fn (r &RepoGroupManager) remove_pkg_from_arch_repo(repo string, arch string, pkg_name string, sync bool) ?bool { + repo_dir := os.join_path(r.repos_dir, repo, arch) + + // If the repository doesn't exist yet, the result is automatically false + if !os.exists(repo_dir) { + return false + } + + // We iterate over every directory in the repo dir + // TODO filter so we only check directories + for d in os.ls(repo_dir) ? { + // Because a repository only allows a single version of each package, + // we need only compare whether the name of the package is the same, + // not the version. + name := d.split('-')#[..-2].join('-') + + if name == pkg_name { + // We lock the mutex here to prevent other routines from creating a + // new archive while we remove an entry + lock r.mutex { + os.rmdir_all(os.join_path_single(repo_dir, d)) ? + } + + // Also remove the package archive + repo_pkg_dir := os.join_path(r.pkg_dir, repo, arch) + + archives := os.ls(repo_pkg_dir) ?.filter(it.split('-')#[..-3].join('-') == name) + + for archive_name in archives { + full_path := os.join_path_single(repo_pkg_dir, archive_name) + os.rm(full_path) ? + } + + // Sync the db archives if requested + if sync { + r.sync(repo, arch) ? + } + + return true + } + } + + return false } diff --git a/src/repo/sync.v b/src/repo/sync.v index 9554748..9c5e7ed 100644 --- a/src/repo/sync.v +++ b/src/repo/sync.v @@ -32,7 +32,7 @@ fn archive_add_entry(archive &C.archive, entry &C.archive_entry, file_path &stri } // sync regenerates the repository archive files. -fn (r &RepoGroupManager) sync(repo string, arch string) ! { +fn (r &RepoGroupManager) sync(repo string, arch string) ? { subrepo_path := os.join_path(r.repos_dir, repo, arch) lock r.mutex { @@ -54,7 +54,7 @@ fn (r &RepoGroupManager) sync(repo string, arch string) ! { C.archive_write_open_filename(a_files, &char(files_path.str)) // Iterate over each directory - for d in os.ls(subrepo_path)!.filter(os.is_dir(os.join_path_single(subrepo_path, + for d in os.ls(subrepo_path) ?.filter(os.is_dir(os.join_path_single(subrepo_path, it))) { // desc mut inner_path := os.join_path_single(d, 'desc') diff --git a/src/response/response.v b/src/response/response.v new file mode 100644 index 0000000..a06a589 --- /dev/null +++ b/src/response/response.v @@ -0,0 +1,34 @@ +module response + +pub struct Response { +pub: + message string + data T +} + +// new_response constructs a new Response object with the given message +// & an empty data field. +pub fn new_response(message string) Response { + return Response{ + message: message + data: '' + } +} + +// new_data_response constructs a new Response object with the given data +// & an empty message field. +pub fn new_data_response(data T) Response { + return Response{ + message: '' + data: data + } +} + +// new_full_response constructs a new Response object with the given +// message & data. +pub fn new_full_response(message string, data T) Response { + return Response{ + message: message + data: data + } +} diff --git a/src/server/README.md b/src/server/README.md deleted file mode 100644 index ded9985..0000000 --- a/src/server/README.md +++ /dev/null @@ -1,6 +0,0 @@ -This module contains the Vieter HTTP server, consisting of the repository -implementation & the REST API. - -**NOTE**: vweb defines the priority order of routes by the file names in this -module. Therefore, it's very important that all API routes are defined in files -prefixed with `api_`, as this is before the word `routes` alphabetically. diff --git a/src/server/api_jobs.v b/src/server/api_jobs.v deleted file mode 100644 index 62bcb27..0000000 --- a/src/server/api_jobs.v +++ /dev/null @@ -1,49 +0,0 @@ -module server - -import web -import web.response { new_data_response, new_response } - -// v1_poll_job_queue allows agents to poll for new build jobs. -['/api/v1/jobs/poll'; auth; get; markused] -fn (mut app App) v1_poll_job_queue() web.Result { - arch := app.query['arch'] or { - return app.json(.bad_request, new_response('Missing arch query arg.')) - } - - max_str := app.query['max'] or { - return app.json(.bad_request, new_response('Missing max query arg.')) - } - max := max_str.int() - - mut out := app.job_queue.pop_n(arch, max).map(it.config) - - return app.json(.ok, new_data_response(out)) -} - -// v1_queue_job allows queueing a new one-time build job for the given target. -['/api/v1/jobs/queue'; auth; markused; post] -fn (mut app App) v1_queue_job() web.Result { - target_id := app.query['target'] or { - return app.json(.bad_request, new_response('Missing target query arg.')) - }.int() - - arch := app.query['arch'] or { - return app.json(.bad_request, new_response('Missing arch query arg.')) - } - - if arch == '' { - app.json(.bad_request, new_response('Empty arch query arg.')) - } - - force := 'force' in app.query - - target := app.db.get_target(target_id) or { - return app.json(.bad_request, new_response('Unknown target id.')) - } - - app.job_queue.insert(target: target, arch: arch, single: true, now: true, force: force) or { - return app.status(.internal_server_error) - } - - return app.status(.ok) -} diff --git a/src/server/api_logs.v b/src/server/api_logs.v deleted file mode 100644 index 00a7e2e..0000000 --- a/src/server/api_logs.v +++ /dev/null @@ -1,137 +0,0 @@ -module server - -import web -import net.urllib -import web.response { new_data_response, new_response } -import time -import os -import util -import models { BuildLog, BuildLogFilter } - -// v1_get_logs returns all build logs in the database. A 'target' query param can -// optionally be added to limit the list of build logs to that repository. -['/api/v1/logs'; auth; get; markused] -fn (mut app App) v1_get_logs() web.Result { - filter := models.from_params[BuildLogFilter](app.query) or { - return app.json(.bad_request, new_response('Invalid query parameters.')) - } - logs := app.db.get_build_logs(filter) - - return app.json(.ok, new_data_response(logs)) -} - -// v1_get_single_log returns the build log with the given id. -['/api/v1/logs/:id'; auth; get; markused] -fn (mut app App) v1_get_single_log(id int) web.Result { - log := app.db.get_build_log(id) or { return app.status(.not_found) } - - return app.json(.ok, new_data_response(log)) -} - -// v1_get_log_content returns the actual build log file for the given id. -['/api/v1/logs/:id/content'; auth; get; markused] -fn (mut app App) v1_get_log_content(id int) web.Result { - log := app.db.get_build_log(id) or { return app.status(.not_found) } - file_name := log.start_time.custom_format('YYYY-MM-DD_HH-mm-ss') - full_path := os.join_path(app.conf.data_dir, logs_dir_name, log.target_id.str(), log.arch, - file_name) - - return app.file(full_path) -} - -// parse_query_time unescapes an HTTP query parameter & tries to parse it as a -// time.Time struct. -fn parse_query_time(query string) !time.Time { - unescaped := urllib.query_unescape(query)! - t := time.parse(unescaped)! - - return t -} - -// v1_post_log adds a new log to the database. -['/api/v1/logs'; auth; markused; post] -fn (mut app App) v1_post_log() web.Result { - // Parse query params - start_time_int := app.query['startTime'].int() - - if start_time_int == 0 { - return app.json(.bad_request, new_response('Invalid or missing start time.')) - } - start_time := time.unix(start_time_int) - - end_time_int := app.query['endTime'].int() - - if end_time_int == 0 { - return app.json(.bad_request, new_response('Invalid or missing end time.')) - } - end_time := time.unix(end_time_int) - - if 'exitCode' !in app.query { - return app.json(.bad_request, new_response('Missing exit code.')) - } - - exit_code := app.query['exitCode'].int() - - if 'arch' !in app.query { - return app.json(.bad_request, new_response("Missing parameter 'arch'.")) - } - - arch := app.query['arch'] - - target_id := app.query['target'].int() - - if !app.db.target_exists(target_id) { - return app.json(.bad_request, new_response('Unknown target.')) - } - - // Store log in db - mut log := BuildLog{ - target_id: target_id - start_time: start_time - end_time: end_time - arch: arch - exit_code: exit_code - } - - // id of newly created log - log.id = app.db.add_build_log(log) - log_file_path := os.join_path(app.conf.data_dir, logs_dir_name, log.path()) - - // Create the logs directory of it doesn't exist - if !os.exists(os.dir(log_file_path)) { - os.mkdir_all(os.dir(log_file_path)) or { - app.lerror('Error while creating log file: ${err.msg()}') - - return app.status(.internal_server_error) - } - } - - if length := app.req.header.get(.content_length) { - util.reader_to_file(mut app.reader, length.int(), log_file_path) or { - app.lerror('An error occured while receiving logs: ${err.msg()}') - - return app.status(.internal_server_error) - } - } else { - return app.status(.length_required) - } - - return app.json(.ok, new_data_response(log.id)) -} - -// v1_delete_log allows removing a build log from the system. -['/api/v1/logs/:id'; auth; delete; markused] -fn (mut app App) v1_delete_log(id int) web.Result { - log := app.db.get_build_log(id) or { return app.status(.not_found) } - full_path := os.join_path(app.conf.data_dir, logs_dir_name, log.path()) - - os.rm(full_path) or { - app.lerror('Failed to remove log file ${full_path}: ${err.msg()}') - - return app.status(.internal_server_error) - } - - app.db.delete_build_log(id) - - return app.status(.ok) -} diff --git a/src/server/api_metrics.v b/src/server/api_metrics.v deleted file mode 100644 index 5ba0452..0000000 --- a/src/server/api_metrics.v +++ /dev/null @@ -1,19 +0,0 @@ -module server - -import metrics -import web - -// v1_metrics serves a Prometheus-compatible metrics endpoint. -['/api/v1/metrics'; get; markused] -fn (mut app App) v1_metrics() web.Result { - if !app.conf.collect_metrics { - return app.status(.not_found) - } - - mut exporter := metrics.new_prometheus_exporter() - exporter.load('vieter_', app.collector) - - // TODO stream to connection instead - body := exporter.export_to_string() or { return app.status(.internal_server_error) } - return app.body(.ok, 'text/plain', body) -} diff --git a/src/server/api_targets.v b/src/server/api_targets.v deleted file mode 100644 index ed121d9..0000000 --- a/src/server/api_targets.v +++ /dev/null @@ -1,82 +0,0 @@ -module server - -import web -import web.response { new_data_response, new_response } -import models { Target, TargetArch, TargetFilter } - -// v1_get_targets returns the current list of targets. -['/api/v1/targets'; auth; get; markused] -fn (mut app App) v1_get_targets() web.Result { - filter := models.from_params[TargetFilter](app.query) or { - return app.json(.bad_request, new_response('Invalid query parameters.')) - } - mut iter := app.db.targets(filter) - - return app.json(.ok, new_data_response(iter.collect())) -} - -// v1_get_single_target returns the information for a single target. -['/api/v1/targets/:id'; auth; get; markused] -fn (mut app App) v1_get_single_target(id int) web.Result { - target := app.db.get_target(id) or { return app.status(.not_found) } - - return app.json(.ok, new_data_response(target)) -} - -// v1_post_target creates a new target from the provided query string. -['/api/v1/targets'; auth; markused; post] -fn (mut app App) v1_post_target() web.Result { - mut params := app.query.clone() - - // If a target is created without specifying the arch, we assume it's meant - // for the default architecture. - if 'arch' !in params || params['arch'] == '' { - params['arch'] = app.conf.default_arch - } - - mut new_target := models.from_params[Target](params) or { - return app.json(.bad_request, new_response(err.msg())) - } - - // Ensure someone doesn't submit an invalid kind - if new_target.kind !in models.valid_kinds { - return app.json(.bad_request, new_response('Invalid kind.')) - } - - id := app.db.add_target(new_target) - new_target.id = id - - // Add the target to the job queue - // TODO return better error here if it's the cron schedule that's incorrect - app.job_queue.insert_all(new_target) or { return app.status(.internal_server_error) } - - return app.json(.ok, new_data_response(id)) -} - -// v1_delete_target removes a given target from the server's list. -['/api/v1/targets/:id'; auth; delete; markused] -fn (mut app App) v1_delete_target(id int) web.Result { - app.db.delete_target(id) - app.job_queue.invalidate(id) - - return app.status(.ok) -} - -// v1_patch_target updates a target's data with the given query params. -['/api/v1/targets/:id'; auth; markused; patch] -fn (mut app App) v1_patch_target(id int) web.Result { - app.db.update_target(id, app.query) - - if 'arch' in app.query { - arch_objs := app.query['arch'].split(',').map(TargetArch{ value: it }) - - app.db.update_target_archs(id, arch_objs) - } - - target := app.db.get_target(id) or { return app.status(.internal_server_error) } - - app.job_queue.invalidate(id) - app.job_queue.insert_all(target) or { return app.status(.internal_server_error) } - - return app.json(.ok, new_data_response(target)) -} diff --git a/src/server/auth.v b/src/server/auth.v new file mode 100644 index 0000000..7c8a676 --- /dev/null +++ b/src/server/auth.v @@ -0,0 +1,12 @@ +module server + +import net.http + +// is_authorized checks whether the provided API key is correct. +fn (mut app App) is_authorized() bool { + x_header := app.req.header.get_custom('X-Api-Key', http.HeaderQueryConfig{ exact: true }) or { + return false + } + + return x_header.trim_space() == app.conf.api_key +} diff --git a/src/server/cli.v b/src/server/cli.v index abb5fe3..4d39666 100644 --- a/src/server/cli.v +++ b/src/server/cli.v @@ -1,22 +1,15 @@ module server import cli -import conf as vconf +import env struct Config { pub: - port int = 8000 - log_level string = 'WARN' - pkg_dir string - data_dir string - api_key string - default_arch string - global_schedule string = '0 3' - base_image string = 'archlinux:base-devel' - max_log_age int [empty_default] - log_removal_schedule string = '0 0' - collect_metrics bool [empty_default] - default_build_timeout int = 3600 + log_level string = 'WARN' + pkg_dir string + data_dir string + api_key string + default_arch string } // cmd returns the cli submodule that handles starting the server @@ -24,11 +17,11 @@ pub fn cmd() cli.Command { return cli.Command{ name: 'server' description: 'Start the Vieter server.' - execute: fn (cmd cli.Command) ! { - config_file := cmd.flags.get_string('config-file')! - conf_ := vconf.load[Config](prefix: 'VIETER_', default_path: config_file)! + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? - server(conf_)! + server(conf) ? } } } diff --git a/src/server/git.v b/src/server/git.v new file mode 100644 index 0000000..c5cbc0a --- /dev/null +++ b/src/server/git.v @@ -0,0 +1,84 @@ +module server + +import web +import net.http +import response { new_data_response, new_response } +import db + +// get_repos returns the current list of repos. +['/api/repos'; get] +fn (mut app App) get_repos() web.Result { + if !app.is_authorized() { + return app.json(http.Status.unauthorized, new_response('Unauthorized.')) + } + + repos := app.db.get_git_repos() + + return app.json(http.Status.ok, new_data_response(repos)) +} + +// get_single_repo returns the information for a single repo. +['/api/repos/:id'; get] +fn (mut app App) get_single_repo(id int) web.Result { + if !app.is_authorized() { + return app.json(http.Status.unauthorized, new_response('Unauthorized.')) + } + + repo := app.db.get_git_repo(id) or { return app.not_found() } + + return app.json(http.Status.ok, new_data_response(repo)) +} + +// post_repo creates a new repo from the provided query string. +['/api/repos'; post] +fn (mut app App) post_repo() web.Result { + if !app.is_authorized() { + return app.json(http.Status.unauthorized, new_response('Unauthorized.')) + } + + mut params := app.query.clone() + + // If a repo is created without specifying the arch, we assume it's meant + // for the default architecture. + if 'arch' !in params { + params['arch'] = app.conf.default_arch + } + + new_repo := db.git_repo_from_params(params) or { + return app.json(http.Status.bad_request, new_response(err.msg())) + } + + app.db.add_git_repo(new_repo) + + return app.json(http.Status.ok, new_response('Repo added successfully.')) +} + +// delete_repo removes a given repo from the server's list. +['/api/repos/:id'; delete] +fn (mut app App) delete_repo(id int) web.Result { + if !app.is_authorized() { + return app.json(http.Status.unauthorized, new_response('Unauthorized.')) + } + + app.db.delete_git_repo(id) + + return app.json(http.Status.ok, new_response('Repo removed successfully.')) +} + +// patch_repo updates a repo's data with the given query params. +['/api/repos/:id'; patch] +fn (mut app App) patch_repo(id int) web.Result { + if !app.is_authorized() { + return app.json(http.Status.unauthorized, new_response('Unauthorized.')) + } + + app.db.update_git_repo(id, app.query) + + if 'arch' in app.query { + arch_objs := app.query['arch'].split(',').map(db.GitRepoArch{ value: it }) + + app.db.update_git_repo_archs(id, arch_objs) + } + + return app.json(http.Status.ok, new_response('Repo updated successfully.')) +} diff --git a/src/server/log_removal.v b/src/server/log_removal.v deleted file mode 100644 index bc51bcf..0000000 --- a/src/server/log_removal.v +++ /dev/null @@ -1,53 +0,0 @@ -module server - -import time -import models { BuildLog } -import os -import cron - -const fallback_log_removal_frequency = 24 * time.hour - -// log_removal_daemon removes old build logs every `log_removal_frequency`. -fn (mut app App) log_removal_daemon(schedule &cron.Expression) { - for { - mut too_old_timestamp := time.now().add_days(-app.conf.max_log_age) - - app.linfo('Cleaning logs before ${too_old_timestamp}') - - mut logs := []BuildLog{} - mut counter := 0 - mut failed := u64(0) - - // Remove old logs - for { - // The offset is used to skip logs that failed to remove. Besides - // this, we don't need to move the offset, because all previously - // oldest logs will have been removed. - logs = app.db.get_build_logs(before: too_old_timestamp, offset: failed, limit: 50) - - for log in logs { - log_file_path := os.join_path(app.conf.data_dir, logs_dir_name, log.path()) - - os.rm(log_file_path) or { - app.lerror('Failed to remove log file ${log_file_path}: ${err.msg()}') - failed += 1 - - continue - } - app.db.delete_build_log(log.id) - - counter += 1 - } - - if logs.len < 50 { - break - } - } - - app.linfo('Cleaned ${counter} logs (${failed} failed)') - - // Sleep until the next cycle - next_time := schedule.next_from_now() - time.sleep(next_time - time.now()) - } -} diff --git a/src/server/logs.v b/src/server/logs.v new file mode 100644 index 0000000..b048dc4 --- /dev/null +++ b/src/server/logs.v @@ -0,0 +1,136 @@ +module server + +import web +import net.http +import net.urllib +import response { new_data_response, new_response } +import db +import time +import os +import util + +// get_logs returns all build logs in the database. A 'repo' query param can +// optionally be added to limit the list of build logs to that repository. +['/api/logs'; get] +fn (mut app App) get_logs() web.Result { + if !app.is_authorized() { + return app.json(http.Status.unauthorized, new_response('Unauthorized.')) + } + + logs := if 'repo' in app.query { + app.db.get_build_logs_for_repo(app.query['repo'].int()) + } else { + app.db.get_build_logs() + } + + return app.json(http.Status.ok, new_data_response(logs)) +} + +// get_single_log returns the build log with the given id. +['/api/logs/:id'; get] +fn (mut app App) get_single_log(id int) web.Result { + if !app.is_authorized() { + return app.json(http.Status.unauthorized, new_response('Unauthorized.')) + } + + log := app.db.get_build_log(id) or { return app.not_found() } + + return app.json(http.Status.ok, new_data_response(log)) +} + +// get_log_content returns the actual build log file for the given id. +['/api/logs/:id/content'; get] +fn (mut app App) get_log_content(id int) web.Result { + if !app.is_authorized() { + return app.json(http.Status.unauthorized, new_response('Unauthorized.')) + } + + log := app.db.get_build_log(id) or { return app.not_found() } + file_name := log.start_time.custom_format('YYYY-MM-DD_HH-mm-ss') + full_path := os.join_path(app.conf.data_dir, logs_dir_name, log.repo_id.str(), log.arch, + file_name) + + return app.file(full_path) +} + +// parse_query_time unescapes an HTTP query parameter & tries to parse it as a +// time.Time struct. +fn parse_query_time(query string) ?time.Time { + unescaped := urllib.query_unescape(query) ? + t := time.parse(unescaped) ? + + return t +} + +// post_log adds a new log to the database. +['/api/logs'; post] +fn (mut app App) post_log() web.Result { + if !app.is_authorized() { + return app.json(http.Status.unauthorized, new_response('Unauthorized.')) + } + + // Parse query params + start_time := parse_query_time(app.query['startTime']) or { + return app.json(http.Status.bad_request, new_response('Invalid or missing start time.')) + } + + end_time := parse_query_time(app.query['endTime']) or { + return app.json(http.Status.bad_request, new_response('Invalid or missing end time.')) + } + + if 'exitCode' !in app.query { + return app.json(http.Status.bad_request, new_response('Missing exit code.')) + } + + exit_code := app.query['exitCode'].int() + + if 'arch' !in app.query { + return app.json(http.Status.bad_request, new_response("Missing parameter 'arch'.")) + } + + arch := app.query['arch'] + + repo_id := app.query['repo'].int() + + if !app.db.git_repo_exists(repo_id) { + return app.json(http.Status.bad_request, new_response('Unknown Git repo.')) + } + + // Store log in db + log := db.BuildLog{ + repo_id: repo_id + start_time: start_time + end_time: end_time + arch: arch + exit_code: exit_code + } + + app.db.add_build_log(log) + + repo_logs_dir := os.join_path(app.conf.data_dir, logs_dir_name, repo_id.str(), arch) + + // Create the logs directory of it doesn't exist + if !os.exists(repo_logs_dir) { + os.mkdir_all(repo_logs_dir) or { + app.lerror("Couldn't create dir '$repo_logs_dir'.") + + return app.json(http.Status.internal_server_error, new_response('An error occured while processing the request.')) + } + } + + // Stream log contents to correct file + file_name := start_time.custom_format('YYYY-MM-DD_HH-mm-ss') + full_path := os.join_path_single(repo_logs_dir, file_name) + + if length := app.req.header.get(.content_length) { + util.reader_to_file(mut app.reader, length.int(), full_path) or { + app.lerror('An error occured while receiving logs: $err.msg()') + + return app.json(http.Status.internal_server_error, new_response('Failed to upload logs.')) + } + } else { + return app.status(http.Status.length_required) + } + + return app.json(http.Status.ok, new_response('Logs added successfully.')) +} diff --git a/src/server/repo.v b/src/server/repo.v deleted file mode 100644 index 8f8270d..0000000 --- a/src/server/repo.v +++ /dev/null @@ -1,96 +0,0 @@ -module server - -import web -import os -import repo -import time -import rand -import util -import web.response { new_data_response, new_response } - -// healthcheck just returns a string, but can be used to quickly check if the -// server is still responsive. -['/health'; get; markused] -pub fn (mut app App) healthcheck() web.Result { - return app.json(.ok, new_response('Healthy.')) -} - -// get_repo_file handles all Pacman-related routes. It returns both the -// repository's archives, but also package archives or the contents of a -// package's desc file. -['/:repo/:arch/:filename'; get; head; markused] -fn (mut app App) get_repo_file(repo_ string, arch string, filename string) web.Result { - mut full_path := '' - - db_exts := ['.db', '.files', '.db.tar.gz', '.files.tar.gz'] - - // There's no point in having the ability to serve db archives with wrong - // filenames - if db_exts.any(filename == '${repo_}${it}') { - full_path = os.join_path(app.repo.repos_dir, repo_, arch, filename) - - // repo-add does this using symlinks, but we just change the requested - // path - if !full_path.ends_with('.tar.gz') { - full_path += '.tar.gz' - } - } else if filename.contains('.pkg') { - full_path = os.join_path(app.repo.pkg_dir, repo_, arch, filename) - } - // Default behavior is to return the desc file for the package, if present. - // This can then also be used by the build system to properly check whether - // a package is present in an arch-repo. - else { - full_path = os.join_path(app.repo.repos_dir, repo_, arch, filename, 'desc') - } - - return app.file(full_path) -} - -// put_package handles publishing a package to a repository. -['/:repo/publish'; auth; markused; post] -fn (mut app App) put_package(repo_ string) web.Result { - // api is a reserved keyword for api routes & should never be allowed to be - // a repository. - if repo_.to_lower() == 'api' { - return app.json(.bad_request, new_response("'api' is a reserved keyword & cannot be used as a repository name.")) - } - - mut pkg_path := '' - - if length := app.req.header.get(.content_length) { - // Generate a random filename for the temp file - pkg_path = os.join_path_single(app.repo.pkg_dir, rand.uuid_v4()) - - app.ldebug("Uploading ${length} bytes (${util.pretty_bytes(length.int())}) to '${pkg_path}'.") - - // This is used to time how long it takes to upload a file - mut sw := time.new_stopwatch(time.StopWatchOptions{ auto_start: true }) - - util.reader_to_file(mut app.reader, length.int(), pkg_path) or { - app.lwarn("Failed to upload '${pkg_path}': ${err.msg()}") - - return app.status(.internal_server_error) - } - - sw.stop() - app.ldebug("Upload of '${pkg_path}' completed in ${sw.elapsed().seconds():.3}s.") - } else { - app.lwarn('Tried to upload package without specifying a Content-Length.') - - // length required - return app.status(.length_required) - } - - res := app.repo.add_pkg_from_path(repo_, pkg_path) or { - app.lerror('Error while adding package: ${err.msg()}') - - os.rm(pkg_path) or { app.lerror("Failed to remove download '${pkg_path}': ${err.msg()}") } - - return app.status(.internal_server_error) - } - - app.linfo("Added '${res.name}-${res.version}' to '${repo_} (${res.archs.join(',')})'.") - - return app.json(.ok, new_data_response(res)) -} diff --git a/src/server/repo_remove.v b/src/server/repo_remove.v deleted file mode 100644 index 24baeaf..0000000 --- a/src/server/repo_remove.v +++ /dev/null @@ -1,63 +0,0 @@ -module server - -import web - -// delete_package tries to remove the given package. -['/:repo/:arch/:pkg'; auth; delete; markused] -fn (mut app App) delete_package(repo string, arch string, pkg string) web.Result { - res := app.repo.remove_pkg_from_arch_repo(repo, arch, pkg, true) or { - app.lerror('Error while deleting package: ${err.msg()}') - - return app.status(.internal_server_error) - } - - if res { - app.linfo("Removed package '${pkg}' from '${repo}/${arch}'") - - return app.status(.ok) - } else { - app.linfo("Tried removing package '${pkg}' from '${repo}/${arch}', but it doesn't exist.") - - return app.status(.not_found) - } -} - -// delete_arch_repo tries to remove the given arch-repo. -['/:repo/:arch'; auth; delete; markused] -fn (mut app App) delete_arch_repo(repo string, arch string) web.Result { - res := app.repo.remove_arch_repo(repo, arch) or { - app.lerror('Error while deleting arch-repo: ${err.msg()}') - - return app.status(.internal_server_error) - } - - if res { - app.linfo("Removed arch-repo '${repo}/${arch}'") - - return app.status(.ok) - } else { - app.linfo("Tried removing '${repo}/${arch}', but it doesn't exist.") - - return app.status(.not_found) - } -} - -// delete_repo tries to remove the given repo. -['/:repo'; auth; delete; markused] -fn (mut app App) delete_repo(repo string) web.Result { - res := app.repo.remove_repo(repo) or { - app.lerror('Error while deleting repo: ${err.msg()}') - - return app.status(.internal_server_error) - } - - if res { - app.linfo("Removed repo '${repo}'") - - return app.status(.ok) - } else { - app.linfo("Tried removing '${repo}', but it doesn't exist.") - - return app.status(.not_found) - } -} diff --git a/src/server/routes.v b/src/server/routes.v new file mode 100644 index 0000000..fbf37df --- /dev/null +++ b/src/server/routes.v @@ -0,0 +1,112 @@ +module server + +import web +import os +import repo +import time +import rand +import util +import net.http +import response { new_response } + +// healthcheck just returns a string, but can be used to quickly check if the +// server is still responsive. +['/health'; get] +pub fn (mut app App) healthcheck() web.Result { + return app.json(http.Status.ok, new_response('Healthy.')) +} + +// get_repo_file handles all Pacman-related routes. It returns both the +// repository's archives, but also package archives or the contents of a +// package's desc file. +['/:repo/:arch/:filename'; get; head] +fn (mut app App) get_repo_file(repo string, arch string, filename string) web.Result { + mut full_path := '' + + db_exts := ['.db', '.files', '.db.tar.gz', '.files.tar.gz'] + + // There's no point in having the ability to serve db archives with wrong + // filenames + if db_exts.any(filename == '$repo$it') { + full_path = os.join_path(app.repo.repos_dir, repo, arch, filename) + + // repo-add does this using symlinks, but we just change the requested + // path + if !full_path.ends_with('.tar.gz') { + full_path += '.tar.gz' + } + } else if filename.contains('.pkg') { + full_path = os.join_path(app.repo.pkg_dir, repo, arch, filename) + } + // Default behavior is to return the desc file for the package, if present. + // This can then also be used by the build system to properly check whether + // a package is present in an arch-repo. + else { + full_path = os.join_path(app.repo.repos_dir, repo, arch, filename, 'desc') + } + + // Scuffed way to respond to HEAD requests + if app.req.method == http.Method.head { + if os.exists(full_path) { + return app.status(http.Status.ok) + } + + return app.not_found() + } + + return app.file(full_path) +} + +// put_package handles publishing a package to a repository. +['/:repo/publish'; post] +fn (mut app App) put_package(repo string) web.Result { + if !app.is_authorized() { + return app.json(http.Status.unauthorized, new_response('Unauthorized.')) + } + + mut pkg_path := '' + + if length := app.req.header.get(.content_length) { + // Generate a random filename for the temp file + pkg_path = os.join_path_single(app.repo.pkg_dir, rand.uuid_v4()) + + app.ldebug("Uploading $length bytes (${util.pretty_bytes(length.int())}) to '$pkg_path'.") + + // This is used to time how long it takes to upload a file + mut sw := time.new_stopwatch(time.StopWatchOptions{ auto_start: true }) + + util.reader_to_file(mut app.reader, length.int(), pkg_path) or { + app.lwarn("Failed to upload '$pkg_path'") + + return app.json(http.Status.internal_server_error, new_response('Failed to upload file.')) + } + + sw.stop() + app.ldebug("Upload of '$pkg_path' completed in ${sw.elapsed().seconds():.3}s.") + } else { + app.lwarn('Tried to upload package without specifying a Content-Length.') + + // length required + return app.status(http.Status.length_required) + } + + res := app.repo.add_pkg_from_path(repo, pkg_path) or { + app.lerror('Error while adding package: $err.msg()') + + os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg()") } + + return app.json(http.Status.internal_server_error, new_response('Failed to add package.')) + } + + if !res.added { + os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg()") } + + app.lwarn("Duplicate package '$res.pkg.full_name()' in repo '$repo'.") + + return app.json(http.Status.bad_request, new_response('File already exists.')) + } + + app.linfo("Added '$res.pkg.full_name()' to repo '$repo ($res.pkg.info.arch)'.") + + return app.json(http.Status.ok, new_response('Package added successfully.')) +} diff --git a/src/server/server.v b/src/server/server.v index e1516fa..090aa76 100644 --- a/src/server/server.v +++ b/src/server/server.v @@ -5,12 +5,10 @@ import os import log import repo import util -import dbms -import build { BuildJobQueue } -import cron -import metrics +import db const ( + port = 8000 log_file_name = 'vieter.log' repo_dir_name = 'repos' db_file_name = 'vieter.sqlite' @@ -23,34 +21,16 @@ pub: conf Config [required; web_global] pub mut: repo repo.RepoGroupManager [required; web_global] - // Keys are the various architectures for packages - job_queue BuildJobQueue [required; web_global] - db dbms.VieterDb -} - -// init_job_queue populates a fresh job queue with all the targets currently -// stored in the database. -fn (mut app App) init_job_queue() ! { - for target in app.db.targets(limit: 0) { - app.job_queue.insert_all(target)! - } + db db.VieterDb } // server starts the web server & starts listening for requests -pub fn server(conf Config) ! { +pub fn server(conf Config) ? { // Prevent using 'any' as the default arch if conf.default_arch == 'any' { util.exit_with_message(1, "'any' is not allowed as the value for default_arch.") } - global_ce := cron.parse_expression(conf.global_schedule) or { - util.exit_with_message(1, 'Invalid global cron expression: ${err.msg()}') - } - - log_removal_ce := cron.parse_expression(conf.log_removal_schedule) or { - util.exit_with_message(1, 'Invalid log removal cron expression: ${err.msg()}') - } - // Configure logger log_level := log.level_from_tag(conf.log_level) or { util.exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.') @@ -82,41 +62,18 @@ pub fn server(conf Config) ! { repo_dir := os.join_path_single(conf.data_dir, server.repo_dir_name) // This also creates the directories if needed - repo_ := repo.new(repo_dir, conf.pkg_dir, conf.default_arch) or { + repo := repo.new(repo_dir, conf.pkg_dir, conf.default_arch) or { logger.error(err.msg()) exit(1) } db_file := os.join_path_single(conf.data_dir, server.db_file_name) - db := dbms.init(db_file) or { - util.exit_with_message(1, 'Failed to initialize database: ${err.msg()}') - } + db := db.init(db_file) or { util.exit_with_message(1, 'Failed to initialize database.') } - mut collector := if conf.collect_metrics { - &metrics.MetricsCollector(metrics.new_default_collector()) - } else { - &metrics.MetricsCollector(metrics.new_null_collector()) - } - - collector.histogram_buckets_set('http_requests_duration_seconds', [0.001, 0.005, 0.01, 0.05, - 0.1, 0.5, 1, 5, 10]) - - mut app := &App{ + web.run(&App{ logger: logger - api_key: conf.api_key conf: conf - repo: repo_ + repo: repo db: db - collector: collector - job_queue: build.new_job_queue(global_ce, conf.base_image, conf.default_build_timeout) - } - app.init_job_queue() or { - util.exit_with_message(1, 'Failed to inialize job queue: ${err.msg()}') - } - - if conf.max_log_age > 0 { - spawn app.log_removal_daemon(log_removal_ce) - } - - web.run(app, conf.port) + }, server.port) } diff --git a/src/util/README.md b/src/util/README.md deleted file mode 100644 index 529e412..0000000 --- a/src/util/README.md +++ /dev/null @@ -1,2 +0,0 @@ -This module defines a few useful functions used throughout the codebase that -don't specifically fit inside a module. diff --git a/src/util/stream.v b/src/util/stream.v deleted file mode 100644 index ef6e872..0000000 --- a/src/util/stream.v +++ /dev/null @@ -1,99 +0,0 @@ -// Functions for interacting with `io.Reader` & `io.Writer` objects. -module util - -import io -import os - -// reader_to_writer tries to consume the entire reader & write it to the writer. -pub fn reader_to_writer(mut reader io.Reader, mut writer io.Writer) ! { - mut buf := []u8{len: 10 * 1024} - - for { - bytes_read := reader.read(mut buf) or { break } - mut bytes_written := 0 - - for bytes_written < bytes_read { - c := writer.write(buf[bytes_written..bytes_read]) or { break } - - bytes_written += c - } - } -} - -// reader_to_file writes the contents of a BufferedReader to a file -pub fn reader_to_file(mut reader io.BufferedReader, length int, path string) ! { - mut file := os.create(path)! - defer { - file.close() - } - - mut buf := []u8{len: reader_buf_size} - mut bytes_left := length - - // Repeat as long as the stream still has data - for bytes_left > 0 { - // TODO check if just breaking here is safe - bytes_read := reader.read(mut buf) or { break } - bytes_left -= bytes_read - - mut to_write := bytes_read - - for to_write > 0 { - // TODO don't just loop infinitely here - bytes_written := file.write(buf[bytes_read - to_write..bytes_read]) or { continue } - // file.flush() - - to_write = to_write - bytes_written - } - } - - if bytes_left > 0 { - return error('Not all bytes were received.') - } -} - -// match_array_in_array[T] returns how many elements of a2 overlap with a1. For -// example, if a1 = "abcd" & a2 = "cd", the result will be 2. If the match is -// not at the end of a1, the result is 0. -pub fn match_array_in_array[T](a1 []T, a2 []T) int { - mut i := 0 - mut match_len := 0 - - for i + match_len < a1.len { - if a1[i + match_len] == a2[match_len] { - match_len += 1 - } else { - i += match_len + 1 - match_len = 0 - } - } - - return match_len -} - -// read_until_separator consumes an io.Reader until it encounters some -// separator array. The data read is stored inside the provided res array. -pub fn read_until_separator(mut reader io.Reader, mut res []u8, sep []u8) ! { - mut buf := []u8{len: sep.len} - - for { - c := reader.read(mut buf)! - res << buf[..c] - - match_len := match_array_in_array(buf[..c], sep) - - if match_len == sep.len { - break - } - - if match_len > 0 { - match_left := sep.len - match_len - c2 := reader.read(mut buf[..match_left])! - res << buf[..c2] - - if buf[..c2] == sep[match_len..] { - break - } - } - } -} diff --git a/src/util/util.v b/src/util/util.v index 213104c..c1af30e 100644 --- a/src/util/util.v +++ b/src/util/util.v @@ -1,12 +1,13 @@ module util import os +import io +import crypto.md5 import crypto.sha256 -const ( - reader_buf_size = 1_000_000 - prefixes = ['B', 'KB', 'MB', 'GB'] -) +const reader_buf_size = 1_000_000 + +const prefixes = ['B', 'KB', 'MB', 'GB'] // Dummy struct to work around the fact that you can only share structs, maps & // arrays @@ -22,10 +23,40 @@ pub fn exit_with_message(code int, msg string) { exit(code) } -// hash_file returns the sha256 hash of a given file -pub fn hash_file(path &string) !string { +// reader_to_file writes the contents of a BufferedReader to a file +pub fn reader_to_file(mut reader io.BufferedReader, length int, path string) ? { + mut file := os.create(path) ? + defer { + file.close() + } + + mut buf := []u8{len: util.reader_buf_size} + mut bytes_left := length + + // Repeat as long as the stream still has data + for bytes_left > 0 { + // TODO check if just breaking here is safe + bytes_read := reader.read(mut buf) or { break } + bytes_left -= bytes_read + + mut to_write := bytes_read + + for to_write > 0 { + // TODO don't just loop infinitely here + bytes_written := file.write(buf[bytes_read - to_write..bytes_read]) or { continue } + // file.flush() + + to_write = to_write - bytes_written + } + } +} + +// hash_file returns the md5 & sha256 hash of a given file +// TODO actually implement sha256 +pub fn hash_file(path &string) ?(string, string) { file := os.open(path) or { return error('Failed to open file.') } + mut md5sum := md5.new() mut sha256sum := sha256.new() buf_size := int(1_000_000) @@ -37,12 +68,16 @@ pub fn hash_file(path &string) !string { bytes_read := file.read(mut buf) or { return error('Failed to read from file.') } bytes_left -= u64(bytes_read) - // This function never actually fails, but returns an option to follow - // the Writer interface. - sha256sum.write(buf[..bytes_read])! + // For now we'll assume that this always works + md5sum.write(buf[..bytes_read]) or { + return error('Failed to update md5 checksum. This should never happen.') + } + sha256sum.write(buf[..bytes_read]) or { + return error('Failed to update sha256 checksum. This should never happen.') + } } - return sha256sum.checksum().hex() + return md5sum.checksum().hex(), sha256sum.checksum().hex() } // pretty_bytes converts a byte count to human-readable version diff --git a/src/v.mod b/src/v.mod index 461af6a..e69de29 100644 --- a/src/v.mod +++ b/src/v.mod @@ -1,8 +0,0 @@ -Module { - dependencies: [ - 'https://git.rustybever.be/vieter-v/conf', - 'https://git.rustybever.be/vieter-v/docker', - 'https://git.rustybever.be/vieter-v/aur', - 'https://git.rustybever.be/vieter-v/metrics' - ] -} diff --git a/src/web/consts.v b/src/web/consts.v deleted file mode 100644 index df8cdb2..0000000 --- a/src/web/consts.v +++ /dev/null @@ -1,133 +0,0 @@ -module web - -import net.http - -// A dummy structure that returns from routes to indicate that you actually sent something to a user -[noinit] -pub struct Result {} - -pub const ( - methods_with_form = [http.Method.post, .put, .patch] - headers_close = http.new_custom_header_from_map({ - 'Server': 'Vieter' - http.CommonHeader.connection.str(): 'close' - }) or { panic('should never fail') } - - http_302 = http.new_response( - status: .found - body: '302 Found' - header: headers_close - ) - http_400 = http.new_response( - status: .bad_request - body: '400 Bad Request' - header: http.new_header( - key: .content_type - value: 'text/plain' - ).join(headers_close) - ) - http_401 = http.new_response( - status: .unauthorized - body: '401 Unauthorized' - header: http.new_header( - key: .content_type - value: 'text/plain' - ).join(headers_close) - ) - http_404 = http.new_response( - status: .not_found - body: '404 Not Found' - header: http.new_header( - key: .content_type - value: 'text/plain' - ).join(headers_close) - ) - http_500 = http.new_response( - status: .internal_server_error - body: '500 Internal Server Error' - header: http.new_header( - key: .content_type - value: 'text/plain' - ).join(headers_close) - ) - mime_types = { - '.aac': 'audio/aac' - '.abw': 'application/x-abiword' - '.arc': 'application/x-freearc' - '.avi': 'video/x-msvideo' - '.azw': 'application/vnd.amazon.ebook' - '.bin': 'application/octet-stream' - '.bmp': 'image/bmp' - '.bz': 'application/x-bzip' - '.bz2': 'application/x-bzip2' - '.cda': 'application/x-cdf' - '.csh': 'application/x-csh' - '.css': 'text/css' - '.csv': 'text/csv' - '.doc': 'application/msword' - '.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' - '.eot': 'application/vnd.ms-fontobject' - '.epub': 'application/epub+zip' - '.gz': 'application/gzip' - '.gif': 'image/gif' - '.htm': 'text/html' - '.html': 'text/html' - '.ico': 'image/vnd.microsoft.icon' - '.ics': 'text/calendar' - '.jar': 'application/java-archive' - '.jpeg': 'image/jpeg' - '.jpg': 'image/jpeg' - '.js': 'text/javascript' - '.json': 'application/json' - '.jsonld': 'application/ld+json' - '.mid': 'audio/midi audio/x-midi' - '.midi': 'audio/midi audio/x-midi' - '.mjs': 'text/javascript' - '.mp3': 'audio/mpeg' - '.mp4': 'video/mp4' - '.mpeg': 'video/mpeg' - '.mpkg': 'application/vnd.apple.installer+xml' - '.odp': 'application/vnd.oasis.opendocument.presentation' - '.ods': 'application/vnd.oasis.opendocument.spreadsheet' - '.odt': 'application/vnd.oasis.opendocument.text' - '.oga': 'audio/ogg' - '.ogv': 'video/ogg' - '.ogx': 'application/ogg' - '.opus': 'audio/opus' - '.otf': 'font/otf' - '.png': 'image/png' - '.pdf': 'application/pdf' - '.php': 'application/x-httpd-php' - '.ppt': 'application/vnd.ms-powerpoint' - '.pptx': 'application/vnd.openxmlformats-officedocument.presentationml.presentation' - '.rar': 'application/vnd.rar' - '.rtf': 'application/rtf' - '.sh': 'application/x-sh' - '.svg': 'image/svg+xml' - '.swf': 'application/x-shockwave-flash' - '.tar': 'application/x-tar' - '.tif': 'image/tiff' - '.tiff': 'image/tiff' - '.ts': 'video/mp2t' - '.ttf': 'font/ttf' - '.txt': 'text/plain' - '.vsd': 'application/vnd.visio' - '.wav': 'audio/wav' - '.weba': 'audio/webm' - '.webm': 'video/webm' - '.webp': 'image/webp' - '.woff': 'font/woff' - '.woff2': 'font/woff2' - '.xhtml': 'application/xhtml+xml' - '.xls': 'application/vnd.ms-excel' - '.xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' - '.xml': 'application/xml' - '.xul': 'application/vnd.mozilla.xul+xml' - '.zip': 'application/zip' - '.3gp': 'video/3gpp' - '.3g2': 'video/3gpp2' - '.7z': 'application/x-7z-compressed' - } - max_http_post_size = 1024 * 1024 - default_port = 8080 -) diff --git a/src/web/logging.v b/src/web/logging.v index 7ba649c..fc697ff 100644 --- a/src/web/logging.v +++ b/src/web/logging.v @@ -1,36 +1,35 @@ module web -// lfatal create a log message with the fatal level -pub fn (mut ctx Context) lfatal(msg string) { +import log + +// log reate a log message with the given level +pub fn (mut ctx Context) log(msg &string, level log.Level) { lock ctx.logger { - ctx.logger.fatal(msg) + ctx.logger.send_output(msg, level) } } +// lfatal create a log message with the fatal level +pub fn (mut ctx Context) lfatal(msg &string) { + ctx.log(msg, log.Level.fatal) +} + // lerror create a log message with the error level -pub fn (mut ctx Context) lerror(msg string) { - lock ctx.logger { - ctx.logger.error(msg) - } +pub fn (mut ctx Context) lerror(msg &string) { + ctx.log(msg, log.Level.error) } // lwarn create a log message with the warn level -pub fn (mut ctx Context) lwarn(msg string) { - lock ctx.logger { - ctx.logger.warn(msg) - } +pub fn (mut ctx Context) lwarn(msg &string) { + ctx.log(msg, log.Level.warn) } // linfo create a log message with the info level -pub fn (mut ctx Context) linfo(msg string) { - lock ctx.logger { - ctx.logger.info(msg) - } +pub fn (mut ctx Context) linfo(msg &string) { + ctx.log(msg, log.Level.info) } // ldebug create a log message with the debug level -pub fn (mut ctx Context) ldebug(msg string) { - lock ctx.logger { - ctx.logger.debug(msg) - } +pub fn (mut ctx Context) ldebug(msg &string) { + ctx.log(msg, log.Level.debug) } diff --git a/src/web/parse.v b/src/web/parse.v index 9e26f85..a095f0c 100644 --- a/src/web/parse.v +++ b/src/web/parse.v @@ -3,14 +3,10 @@ module web import net.urllib import net.http -// Method attributes that should be ignored when parsing, as they're used -// elsewhere. -const attrs_to_ignore = ['auth', 'markused'] - // Parsing function attributes for methods and path. -fn parse_attrs(name string, attrs []string) !([]http.Method, string) { +fn parse_attrs(name string, attrs []string) ?([]http.Method, string) { if attrs.len == 0 { - return [http.Method.get], '/${name}' + return [http.Method.get], '/$name' } mut x := attrs.clone() @@ -36,7 +32,7 @@ fn parse_attrs(name string, attrs []string) !([]http.Method, string) { } i++ } - if x.len > 0 && x.any(!web.attrs_to_ignore.contains(it)) { + if x.len > 0 { return IError(http.UnexpectedExtraAttributeError{ attributes: x }) @@ -45,7 +41,7 @@ fn parse_attrs(name string, attrs []string) !([]http.Method, string) { methods = [http.Method.get] } if path == '' { - path = '/${name}' + path = '/$name' } // Make path lowercase for case-insensitive comparisons return methods, path.to_lower() @@ -61,7 +57,7 @@ fn parse_query_from_url(url urllib.URL) map[string]string { } // Extract form data from an HTTP request. -fn parse_form_from_request(request http.Request) !(map[string]string, map[string][]http.FileData) { +fn parse_form_from_request(request http.Request) ?(map[string]string, map[string][]http.FileData) { mut form := map[string]string{} mut files := map[string][]http.FileData{} if request.method in methods_with_form { diff --git a/src/web/response/response.v b/src/web/response/response.v deleted file mode 100644 index c1475ff..0000000 --- a/src/web/response/response.v +++ /dev/null @@ -1,34 +0,0 @@ -module response - -pub struct Response[T] { -pub: - message string - data T -} - -// new_response constructs a new Response object with the given message -// & an empty data field. -pub fn new_response(message string) Response[string] { - return Response[string]{ - message: message - data: '' - } -} - -// new_data_response[T] constructs a new Response object with the given data -// & an empty message field. -pub fn new_data_response[T](data T) Response[T] { - return Response[T]{ - message: '' - data: data - } -} - -// new_full_response[T] constructs a new Response object with the given -// message & data. -pub fn new_full_response[T](message string, data T) Response[T] { - return Response[T]{ - message: message - data: data - } -} diff --git a/src/web/web.v b/src/web/web.v index 775354a..3e7b047 100644 --- a/src/web/web.v +++ b/src/web/web.v @@ -11,29 +11,147 @@ import net.urllib import time import json import log -import metrics + +// A dummy structure that returns from routes to indicate that you actually sent something to a user +[noinit] +pub struct Result {} + +pub const ( + methods_with_form = [http.Method.post, .put, .patch] + headers_close = http.new_custom_header_from_map({ + 'Server': 'VWeb' + http.CommonHeader.connection.str(): 'close' + }) or { panic('should never fail') } + + http_302 = http.new_response( + status: .found + text: '302 Found' + header: headers_close + ) + http_400 = http.new_response( + status: .bad_request + text: '400 Bad Request' + header: http.new_header( + key: .content_type + value: 'text/plain' + ).join(headers_close) + ) + http_404 = http.new_response( + status: .not_found + text: '404 Not Found' + header: http.new_header( + key: .content_type + value: 'text/plain' + ).join(headers_close) + ) + http_500 = http.new_response( + status: .internal_server_error + text: '500 Internal Server Error' + header: http.new_header( + key: .content_type + value: 'text/plain' + ).join(headers_close) + ) + mime_types = { + '.aac': 'audio/aac' + '.abw': 'application/x-abiword' + '.arc': 'application/x-freearc' + '.avi': 'video/x-msvideo' + '.azw': 'application/vnd.amazon.ebook' + '.bin': 'application/octet-stream' + '.bmp': 'image/bmp' + '.bz': 'application/x-bzip' + '.bz2': 'application/x-bzip2' + '.cda': 'application/x-cdf' + '.csh': 'application/x-csh' + '.css': 'text/css' + '.csv': 'text/csv' + '.doc': 'application/msword' + '.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' + '.eot': 'application/vnd.ms-fontobject' + '.epub': 'application/epub+zip' + '.gz': 'application/gzip' + '.gif': 'image/gif' + '.htm': 'text/html' + '.html': 'text/html' + '.ico': 'image/vnd.microsoft.icon' + '.ics': 'text/calendar' + '.jar': 'application/java-archive' + '.jpeg': 'image/jpeg' + '.jpg': 'image/jpeg' + '.js': 'text/javascript' + '.json': 'application/json' + '.jsonld': 'application/ld+json' + '.mid': 'audio/midi audio/x-midi' + '.midi': 'audio/midi audio/x-midi' + '.mjs': 'text/javascript' + '.mp3': 'audio/mpeg' + '.mp4': 'video/mp4' + '.mpeg': 'video/mpeg' + '.mpkg': 'application/vnd.apple.installer+xml' + '.odp': 'application/vnd.oasis.opendocument.presentation' + '.ods': 'application/vnd.oasis.opendocument.spreadsheet' + '.odt': 'application/vnd.oasis.opendocument.text' + '.oga': 'audio/ogg' + '.ogv': 'video/ogg' + '.ogx': 'application/ogg' + '.opus': 'audio/opus' + '.otf': 'font/otf' + '.png': 'image/png' + '.pdf': 'application/pdf' + '.php': 'application/x-httpd-php' + '.ppt': 'application/vnd.ms-powerpoint' + '.pptx': 'application/vnd.openxmlformats-officedocument.presentationml.presentation' + '.rar': 'application/vnd.rar' + '.rtf': 'application/rtf' + '.sh': 'application/x-sh' + '.svg': 'image/svg+xml' + '.swf': 'application/x-shockwave-flash' + '.tar': 'application/x-tar' + '.tif': 'image/tiff' + '.tiff': 'image/tiff' + '.ts': 'video/mp2t' + '.ttf': 'font/ttf' + '.txt': 'text/plain' + '.vsd': 'application/vnd.visio' + '.wav': 'audio/wav' + '.weba': 'audio/webm' + '.webm': 'video/webm' + '.webp': 'image/webp' + '.woff': 'font/woff' + '.woff2': 'font/woff2' + '.xhtml': 'application/xhtml+xml' + '.xls': 'application/vnd.ms-excel' + '.xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' + '.xml': 'application/xml' + '.xul': 'application/vnd.mozilla.xul+xml' + '.zip': 'application/zip' + '.3gp': 'video/3gpp' + '.3g2': 'video/3gpp2' + '.7z': 'application/x-7z-compressed' + } + max_http_post_size = 1024 * 1024 + default_port = 8080 +) // The Context struct represents the Context which hold the HTTP request and response. // It has fields for the query, form, files. pub struct Context { +mut: + content_type string = 'text/plain' + status http.Status = http.Status.ok pub: // HTTP Request req http.Request - // API key used when authenticating requests - api_key string // TODO Response pub mut: - // TCP connection to client. - // But beware, do not store it for further use, after request processing web will close connection. - conn &net.TcpConn = unsafe { nil } - // Gives access to a shared logger object - logger shared log.Log - // Used to collect metrics on the web server - collector &metrics.MetricsCollector + done bool // time.ticks() from start of web connection handle. // You can use it to determine how much time is spent on your request. page_gen_start i64 - // REQUEST + // TCP connection to client. + // But beware, do not store it for further use, after request processing web will close connection. + conn &net.TcpConn static_files map[string]string static_mime_types map[string]string // Map containing query params for the route. @@ -43,13 +161,14 @@ pub mut: form map[string]string // Files from multipart-form. files map[string][]http.FileData + + header http.Header // response headers + // ? It doesn't seem to be used anywhere + form_error string // Allows reading the request body - reader &io.BufferedReader = unsafe { nil } - // RESPONSE - status http.Status = http.Status.ok - content_type string = 'text/plain' - // response headers - header http.Header + reader io.BufferedReader + // Gives access to a shared logger object + logger shared log.Log } struct FileData { @@ -69,101 +188,50 @@ struct Route { // Probably you can use it for check user session cookie or add header. pub fn (ctx Context) before_request() {} -// send_string writes the given string to the TCP connection socket. -fn (mut ctx Context) send_string(s string) ! { - ctx.conn.write(s.bytes())! +// send_string +fn send_string(mut conn net.TcpConn, s string) ? { + conn.write(s.bytes()) ? } -// send_reader reads at most `size` bytes from the given reader & writes them -// to the TCP connection socket. Internally, a 10KB buffer is used, to avoid -// having to store all bytes in memory at once. -fn (mut ctx Context) send_reader(mut reader io.Reader, size u64) ! { - mut buf := []u8{len: 10_000} - mut bytes_left := size - - // Repeat as long as the stream still has data - for bytes_left > 0 { - bytes_read := reader.read(mut buf)! - bytes_left -= u64(bytes_read) - - mut to_write := bytes_read - - for to_write > 0 { - bytes_written := ctx.conn.write(buf[bytes_read - to_write..bytes_read]) or { break } - - to_write = to_write - bytes_written - } +// send_response_to_client sends a response to the client +[manualfree] +pub fn (mut ctx Context) send_response_to_client(mimetype string, res string) bool { + if ctx.done { + return false } -} + ctx.done = true -// send_custom_response sends the given http.Response to the client. It can be -// used to overwrite the Context object & send a completely custom -// http.Response instead. -fn (mut ctx Context) send_custom_response(resp &http.Response) ! { - ctx.send_string(resp.bytestr())! -} + // build header + header := http.new_header_from_map({ + http.CommonHeader.content_type: mimetype + http.CommonHeader.content_length: res.len.str() + }).join(ctx.header) -// send_response_header constructs a valid HTTP response with an empty body & -// sends it to the client. -pub fn (mut ctx Context) send_response_header() ! { - mut resp := http.new_response( - header: ctx.header.join(headers_close) - ) - resp.header.add(.content_type, ctx.content_type) + mut resp := http.Response{ + header: header.join(web.headers_close) + text: res + } + resp.set_version(.v1_1) resp.set_status(ctx.status) - - ctx.send_custom_response(resp)! -} - -// send is a convenience function for sending the HTTP response with an empty -// body. -pub fn (mut ctx Context) send() bool { - return ctx.send_response('') -} - -// send_response constructs the resulting HTTP response with the given body -// string & sends it to the client. -pub fn (mut ctx Context) send_response(res string) bool { - ctx.send_response_header() or { return false } - ctx.send_string(res) or { return false } - + send_string(mut ctx.conn, resp.bytestr()) or { return false } return true } -// send_reader_response constructs the resulting HTTP response with the given -// body & streams the reader's contents to the client. -pub fn (mut ctx Context) send_reader_response(mut reader io.Reader, size u64) bool { - ctx.send_response_header() or { return false } - ctx.send_reader(mut reader, size) or { return false } - - return true -} - -// is_authenticated checks whether the request passes a correct API key. -pub fn (ctx &Context) is_authenticated() bool { - if provided_key := ctx.req.header.get_custom('X-Api-Key') { - return provided_key == ctx.api_key - } - - return false -} - -// body sends the given body as an HTTP response. -pub fn (mut ctx Context) body(status http.Status, content_type string, body string) Result { +// text responds to a request with some plaintext. +pub fn (mut ctx Context) text(status http.Status, s string) Result { ctx.status = status - ctx.content_type = content_type - ctx.send_response(body) + + ctx.send_response_to_client('text/plain', s) return Result{} } -// json[T] HTTP_OK with json_s as payload with content-type `application/json` -pub fn (mut ctx Context) json[T](status http.Status, j T) Result { +// json HTTP_OK with json_s as payload with content-type `application/json` +pub fn (mut ctx Context) json(status http.Status, j T) Result { ctx.status = status - ctx.content_type = 'application/json' json_s := json.encode(j) - ctx.send_response(json_s) + ctx.send_response_to_client('application/json', json_s) return Result{} } @@ -171,121 +239,135 @@ pub fn (mut ctx Context) json[T](status http.Status, j T) Result { // file Response HTTP_OK with file as payload // This function manually implements responses because it needs to stream the file contents pub fn (mut ctx Context) file(f_path string) Result { - // If the file doesn't exist, just respond with a 404 + if ctx.done { + return Result{} + } + if !os.is_file(f_path) { - ctx.status = .not_found - ctx.send() - - return Result{} + return ctx.not_found() } - ctx.header.add(.accept_ranges, 'bytes') + // ext := os.file_ext(f_path) + // data := os.read_file(f_path) or { + // eprint(err.msg()) + // ctx.server_error(500) + // return Result{} + // } + // content_type := web.mime_types[ext] + // if content_type == '' { + // eprintln('no MIME type found for extension $ext') + // ctx.server_error(500) + // return Result{} + // } + + // First, we return the headers for the request + + // We open the file before sending the headers in case reading fails file_size := os.file_size(f_path) - ctx.header.add(http.CommonHeader.content_length, file_size.str()) - // A HEAD request only returns the size of the file. - if ctx.req.method == .head { - ctx.send() - - return Result{} - } - - mut file := os.open(f_path) or { + file := os.open(f_path) or { eprintln(err.msg()) ctx.server_error(500) return Result{} } - defer { - file.close() - } - - // Currently, this only supports a single provided range, e.g. - // bytes=0-1023, and not multiple ranges, e.g. bytes=0-50, 100-150 - if range_str := ctx.req.header.get(.range) { - mut parts := range_str.split_nth('=', 2) - - // We only support the 'bytes' range type - if parts[0] != 'bytes' { - ctx.status = .requested_range_not_satisfiable - ctx.header.delete(.content_length) - ctx.send() - return Result{} - } - - parts = parts[1].split_nth('-', 2) - - start := parts[0].i64() - end := if parts[1] == '' { file_size - 1 } else { parts[1].u64() } - - // Either the actual number 0 or the result of an invalid integer - if end == 0 { - ctx.status = .requested_range_not_satisfiable - ctx.header.delete(.content_length) - ctx.send() - return Result{} - } - - // Move cursor to start of data to read - file.seek(start, .start) or { - ctx.server_error(500) - return Result{} - } - - length := end - u64(start) + 1 - - ctx.status = .partial_content - ctx.header.set(.content_length, length.str()) - ctx.send_reader_response(mut file, length) - } else { - ctx.send_reader_response(mut file, file_size) + // build header + header := http.new_header_from_map({ + // http.CommonHeader.content_type: content_type + http.CommonHeader.content_length: file_size.str() + }).join(ctx.header) + + mut resp := http.Response{ + header: header.join(web.headers_close) + } + resp.set_version(.v1_1) + resp.set_status(ctx.status) + send_string(mut ctx.conn, resp.bytestr()) or { return Result{} } + + mut buf := []u8{len: 1_000_000} + mut bytes_left := file_size + + // Repeat as long as the stream still has data + for bytes_left > 0 { + // TODO check if just breaking here is safe + bytes_read := file.read(mut buf) or { break } + bytes_left -= u64(bytes_read) + + mut to_write := bytes_read + + for to_write > 0 { + // TODO don't just loop infinitely here + bytes_written := ctx.conn.write(buf[bytes_read - to_write..bytes_read]) or { continue } + + to_write = to_write - bytes_written + } } + ctx.done = true return Result{} } // status responds with an empty textual response, essentially only returning // the given status code. pub fn (mut ctx Context) status(status http.Status) Result { - ctx.status = status - ctx.send() - - return Result{} + return ctx.text(status, '') } // server_error Response a server error pub fn (mut ctx Context) server_error(ecode int) Result { - ctx.send_custom_response(http_500) or {} - + $if debug { + eprintln('> ctx.server_error ecode: $ecode') + } + if ctx.done { + return Result{} + } + send_string(mut ctx.conn, web.http_500.bytestr()) or {} return Result{} } // redirect Redirect to an url pub fn (mut ctx Context) redirect(url string) Result { - mut resp := http_302 + if ctx.done { + return Result{} + } + ctx.done = true + mut resp := web.http_302 resp.header = resp.header.join(ctx.header) resp.header.add(.location, url) - - ctx.send_custom_response(resp) or {} - + send_string(mut ctx.conn, resp.bytestr()) or { return Result{} } return Result{} } +// not_found Send an not_found response +pub fn (mut ctx Context) not_found() Result { + return ctx.status(http.Status.not_found) +} + +// add_header Adds an header to the response with key and val +pub fn (mut ctx Context) add_header(key string, val string) { + ctx.header.add_custom(key, val) or {} +} + +// get_header Returns the header data from the key +pub fn (ctx &Context) get_header(key string) string { + return ctx.req.header.get_custom(key) or { '' } +} + interface DbInterface { db voidptr } // run runs the app [manualfree] -pub fn run[T](global_app &T, port int) { - mut l := net.listen_tcp(.ip6, ':${port}') or { panic('failed to listen ${err.code()} ${err}') } +pub fn run(global_app &T, port int) { + mut l := net.listen_tcp(.ip6, ':$port') or { panic('failed to listen $err.code() $err') } // Parsing methods attributes mut routes := map[string]Route{} $for method in T.methods { http_methods, route_path := parse_attrs(method.name, method.attrs) or { - eprintln('error parsing method attributes: ${err}') + eprintln('error parsing method attributes: $err') return } @@ -294,7 +376,7 @@ pub fn run[T](global_app &T, port int) { path: route_path } } - println('[Vweb] Running app on http://localhost:${port}') + println('[Vweb] Running app on http://localhost:$port') for { // Create a new app object for each connection, copy global data like db connections mut request_app := &T{} @@ -311,16 +393,16 @@ pub fn run[T](global_app &T, port int) { request_app.Context = global_app.Context // copy the context ref that contains static files map etc mut conn := l.accept() or { // failures should not panic - eprintln('accept() failed with error: ${err.msg()}') + eprintln('accept() failed with error: $err.msg()') continue } - spawn handle_conn[T](mut conn, mut request_app, routes) + go handle_conn(mut conn, mut request_app, routes) } } // handle_conn handles a connection [manualfree] -fn handle_conn[T](mut conn net.TcpConn, mut app T, routes map[string]Route) { +fn handle_conn(mut conn net.TcpConn, mut app T, routes map[string]Route) { conn.set_read_timeout(30 * time.second) conn.set_write_timeout(30 * time.second) @@ -331,23 +413,6 @@ fn handle_conn[T](mut conn net.TcpConn, mut app T, routes map[string]Route) { app.logger.flush() } - // Record how long request took to process - path := urllib.parse(app.req.url) or { urllib.URL{} }.path - labels := [ - ['method', app.req.method.str()]!, - ['path', path]!, - // Not all methods properly set this value yet I think - ['status', app.status.int().str()]!, - ] - app.collector.counter_increment(name: 'http_requests_total', labels: labels) - // Prometheus prefers metrics containing base units, as defined here - // https://prometheus.io/docs/practices/naming/ - app.collector.histogram_record(f64(time.ticks() - app.page_gen_start) / 1000, - - name: 'http_requests_duration_seconds' - labels: labels - ) - unsafe { free(app) } @@ -363,8 +428,8 @@ fn handle_conn[T](mut conn net.TcpConn, mut app T, routes map[string]Route) { // Request parse head := http.parse_request_head(mut reader) or { // Prevents errors from being thrown when BufferedReader is empty - if '${err}' != 'none' { - eprintln('error parsing request head: ${err}') + if '$err' != 'none' { + eprintln('error parsing request head: $err') } return } @@ -372,7 +437,7 @@ fn handle_conn[T](mut conn net.TcpConn, mut app T, routes map[string]Route) { // The healthcheck spams the logs, which isn't very useful if head.url != '/health' { lock app.logger { - app.logger.debug('${head.method} ${head.url} ${head.version}') + app.logger.debug('$head.method $head.url $head.version') } } @@ -386,7 +451,7 @@ fn handle_conn[T](mut conn net.TcpConn, mut app T, routes map[string]Route) { // URL Parse url := urllib.parse(head.url) or { - eprintln('error parsing path: ${err}') + eprintln('error parsing path: $err') return } @@ -413,8 +478,6 @@ fn handle_conn[T](mut conn net.TcpConn, mut app T, routes map[string]Route) { static_mime_types: app.static_mime_types reader: reader logger: app.logger - collector: app.collector - api_key: app.api_key } // Calling middleware... @@ -424,7 +487,7 @@ fn handle_conn[T](mut conn net.TcpConn, mut app T, routes map[string]Route) { $for method in T.methods { $if method.return_type is Result { route := routes[method.name] or { - eprintln('parsed attributes for the `${method.name}` are not found, skipping...') + eprintln('parsed attributes for the `$method.name` are not found, skipping...') Route{} } @@ -433,30 +496,34 @@ fn handle_conn[T](mut conn net.TcpConn, mut app T, routes map[string]Route) { // Used for route matching route_words := route.path.split('/').filter(it != '') - // Route immediate matches & index files first + // Route immediate matches first // For example URL `/register` matches route `/:user`, but `fn register()` // should be called first. - if (!route.path.contains('/:') && url_words == route_words) - || (url_words.len == 0 && route_words == ['index'] && method.name == 'index') { - // Check whether the request is authorised - if 'auth' in method.attrs && !app.is_authenticated() { - conn.write(http_401.bytes()) or {} - return - } - + if !route.path.contains('/:') && url_words == route_words { // We found a match + if head.method == .post && method.args.len > 0 { + // TODO implement POST requests + // Populate method args with form values + // mut args := []string{cap: method.args.len} + // for param in method.args { + // args << form[param.name] + // } + // app.$method(args) + } else { + app.$method() + } + return + } + + if url_words.len == 0 && route_words == ['index'] && method.name == 'index' { app.$method() return - } else if params := route_matches(url_words, route_words) { - // Check whether the request is authorised - if 'auth' in method.attrs && !app.is_authenticated() { - conn.write(http_401.bytes()) or {} - return - } + } + if params := route_matches(url_words, route_words) { method_args := params.clone() if method_args.len != method.args.len { - eprintln('warning: uneven parameters count (${method.args.len}) in `${method.name}`, compared to the web route `${method.attrs}` (${method_args.len})') + eprintln('warning: uneven parameters count ($method.args.len) in `$method.name`, compared to the web route `$method.attrs` ($method_args.len)') } app.$method(method_args) return @@ -465,7 +532,7 @@ fn handle_conn[T](mut conn net.TcpConn, mut app T, routes map[string]Route) { } } // Route not found - conn.write(http_404.bytes()) or {} + conn.write(web.http_404.bytes()) or {} } // route_matches returns wether a route matches @@ -511,6 +578,28 @@ fn route_matches(url_words []string, route_words []string) ?[]string { return params } +// ip Returns the ip address from the current user +pub fn (ctx &Context) ip() string { + mut ip := ctx.req.header.get(.x_forwarded_for) or { '' } + if ip == '' { + ip = ctx.req.header.get_custom('X-Real-Ip') or { '' } + } + + if ip.contains(',') { + ip = ip.all_before(',') + } + if ip == '' { + ip = ctx.conn.peer_ip() or { '' } + } + return ip +} + +// error Set s to the form error +pub fn (mut ctx Context) error(s string) { + println('web error: $s') + ctx.form_error = s +} + // filter Do not delete. // It used by `vlib/v/gen/c/str_intp.v:130` for string interpolation inside web templates // TODO: move it to template render diff --git a/vieter.toml b/vieter.toml index 34b4f4e..d3922a4 100644 --- a/vieter.toml +++ b/vieter.toml @@ -4,14 +4,11 @@ data_dir = "data" pkg_dir = "data/pkgs" log_level = "DEBUG" default_arch = "x86_64" -arch = "x86_64" address = "http://localhost:8000" -# global_schedule = '* *' +global_schedule = '* *' api_update_frequency = 2 image_rebuild_frequency = 1 max_concurrent_builds = 3 -# max_log_age = 64 -log_removal_schedule = '* * *' -collect_metrics = true +