Compare commits

..

No commits in common. "03020fe798df1311a6202080220dc4426b6d817f" and "785acd5c2e22ff995ce55df7e4981078f3b446fc" have entirely different histories.

5 changed files with 33 additions and 79 deletions

View File

@ -1,91 +1,43 @@
pipeline: pipeline:
# Download the cache from S3
restore-cache:
image: plugins/s3-cache
pull: true
endpoint: https://s3.roosens.me
root: build-cache/
restore: true
secrets: [ cache_s3_access_key, cache_s3_secret_key ]
# =====BUILDING=====
build-frontend: build-frontend:
image: node:15-alpine3.13 image: node:15-alpine3.13
pull: true pull: true
group: build
commands: commands:
- cd web - cd web
- yarn install - yarn install
- yarn run build - yarn run build
build-backend:
image: chewingbever/fej-builder:latest
pull: true
group: build
environment:
- CARGO_HOME=.cargo
commands:
- cargo build
# =====TESTING=====
test-backend:
image: chewingbever/fej-builder:latest
environment:
- CARGO_HOME=.cargo
commands:
- cargo test
# =====LINTING=====
lint-frontend: lint-frontend:
image: node:15-alpine3.13 image: node:15-alpine3.13
group: lint
commands: commands:
- cd web - cd web
- yarn run lint - yarn run lint
# This doesn't require compiling anything
lint-backend: lint-backend:
image: chewingbever/fej-builder:latest image: chewingbever/fej-builder:latest
group: lint pull: true
environment:
- CARGO_HOME=.cargo
commands: commands:
- cargo fmt -- --check - cargo fmt -- --check
# This is run here because it requires compilation
- cargo clippy --all-targets -- -D warnings
# =====REBUILD & FLUSH CACHE===== # publish-builder:
rebuild-cache: # image: plugins/docker
image: plugins/s3-cache # repo: chewingbever/fej-builder
# dockerfile: docker/Dockerfile.builder
# tag: [ latest ]
# secrets: [ docker_username, docker_password ]
# when:
# branch: develop
# event: push
endpoint: https://s3.roosens.me # Backend cicd jobs are disabled until we can figure out a way to cache stuff
root: build-cache/ # test-backend:
rebuild: true # image: chewingbever/fej-builder:latest
mount: # # Always update the builder image
- target # pull: true
- .cargo # commands:
- web/node_modules # - cargo test
secrets: [ cache_s3_access_key, cache_s3_secret_key ] # TODO build dev & rel image, deploy these images
# Push the cache, even on failure
when:
status: [ success, failure ]
flush-cache: # branches: [ master, develop ]
image: plugins/s3-cache
endpoint: https://s3.roosens.me
root: build-cache/
flush: true
# Delete cache older than 30 days (might lower this)
flush_age: 30
secrets: [ cache_s3_access_key, cache_s3_secret_key ]
# Push the cache, even on failure
when:
status: [ success, failure ]

View File

@ -16,7 +16,10 @@ impl PickupTime {
/// * `date` - Date of pickup time /// * `date` - Date of pickup time
/// * `label` - Type of trash /// * `label` - Type of trash
pub fn new(date: BasicDate, label: String) -> PickupTime { pub fn new(date: BasicDate, label: String) -> PickupTime {
PickupTime { date, label } PickupTime {
date: date,
label: label,
}
} }
} }

View File

@ -18,7 +18,10 @@ impl Street {
// This constructor just makes my life a bit easier during testing // This constructor just makes my life a bit easier during testing
#[cfg(test)] #[cfg(test)]
fn new(name: String, city: String) -> Street { fn new(name: String, city: String) -> Street {
Street { name, city } Street {
name: name,
city: city,
}
} }
} }

View File

@ -8,15 +8,12 @@ fn main() {
let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set"); let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set");
let db_conn = PgConnection::establish(&database_url) let db_conn = PgConnection::establish(&database_url)
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)); .expect(&format!("Error connecting to {}", database_url));
// Doing this linearly is good enough I'd say // Doing this linearly is good enough I'd say
for c in ABC.chars() { for c in ABC.chars() {
if let Ok(streets) = search_streets(&c.to_string()) { if let Ok(streets) = search_streets(&c.to_string()) {
insert_into(ivago_streets) insert_into(ivago_streets).values(streets).execute(&db_conn);
.values(streets)
.execute(&db_conn)
.expect("Failed to insert rows.");
} }
} }
} }

View File

@ -23,9 +23,9 @@ use rocket_contrib::databases::diesel;
#[cfg(feature = "frontend")] #[cfg(feature = "frontend")]
use rocket_contrib::serve::StaticFiles; use rocket_contrib::serve::StaticFiles;
pub struct Cors; pub struct CORS;
impl Fairing for Cors { impl Fairing for CORS {
fn info(&self) -> Info { fn info(&self) -> Info {
Info { Info {
name: "Add CORS headers to responses", name: "Add CORS headers to responses",
@ -63,9 +63,8 @@ fn run_db_migrations(rocket: Rocket) -> Result<Rocket, Rocket> {
fn rocket() -> rocket::Rocket { fn rocket() -> rocket::Rocket {
// This needs to be muted for the frontend feature // This needs to be muted for the frontend feature
#[allow(unused_mut)]
let mut rocket = rocket::ignite() let mut rocket = rocket::ignite()
.attach(Cors) .attach(CORS)
.attach(FejDbConn::fairing()) .attach(FejDbConn::fairing())
.attach(AdHoc::on_attach("Database Migrations", run_db_migrations)) .attach(AdHoc::on_attach("Database Migrations", run_db_migrations))
.mount("/api/ivago", routes::ivago()) // /api being hardcoded is temporary .mount("/api/ivago", routes::ivago()) // /api being hardcoded is temporary