chore: removed dead code
parent
029eb95382
commit
d8971f6c62
|
@ -33,8 +33,7 @@ impl ServeCommand {
|
|||
let pool = db::initialize_db(cli.data_dir.join(crate::DB_FILENAME), true).unwrap();
|
||||
|
||||
let ctx = server::Context {
|
||||
pool: pool.clone(),
|
||||
repo: db::SqliteRepository::from(pool.clone()),
|
||||
repo: db::SqliteRepository::from(pool),
|
||||
};
|
||||
let app = server::app(ctx);
|
||||
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
use std::collections::HashSet;
|
||||
|
||||
use diesel::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::db::{schema::*, DbPool, DbResult};
|
||||
use crate::db::schema::*;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Queryable, Selectable)]
|
||||
#[diesel(table_name = device_subscriptions)]
|
||||
|
@ -25,194 +23,3 @@ pub struct NewDeviceSubscription {
|
|||
pub time_changed: i64,
|
||||
pub deleted: bool,
|
||||
}
|
||||
|
||||
impl DeviceSubscription {
|
||||
pub fn for_device(pool: &DbPool, device_id: i64) -> DbResult<Vec<String>> {
|
||||
Ok(device_subscriptions::table
|
||||
.select(device_subscriptions::podcast_url)
|
||||
.filter(device_subscriptions::device_id.eq(device_id))
|
||||
.get_results(&mut pool.get()?)?)
|
||||
}
|
||||
|
||||
pub fn for_user(pool: &DbPool, user_id: i64) -> DbResult<Vec<String>> {
|
||||
Ok(device_subscriptions::table
|
||||
.inner_join(devices::table)
|
||||
.filter(devices::user_id.eq(user_id))
|
||||
.select(device_subscriptions::podcast_url)
|
||||
.distinct()
|
||||
.get_results(&mut pool.get()?)?)
|
||||
}
|
||||
|
||||
pub fn set_for_device(
|
||||
pool: &DbPool,
|
||||
device_id: i64,
|
||||
urls: Vec<String>,
|
||||
timestamp: i64,
|
||||
) -> DbResult<()> {
|
||||
pool.get()?.transaction(|conn| {
|
||||
// https://github.com/diesel-rs/diesel/discussions/2826
|
||||
// SQLite doesn't support default on conflict set values, so we can't handle this using
|
||||
// on conflict. Therefore, we instead calculate which URLs should be inserted and which
|
||||
// updated, so we avoid conflicts.
|
||||
let urls: HashSet<String> = urls.into_iter().collect();
|
||||
let urls_in_db: HashSet<String> = device_subscriptions::table
|
||||
.select(device_subscriptions::podcast_url)
|
||||
.filter(device_subscriptions::device_id.eq(device_id))
|
||||
.get_results(&mut pool.get()?)?
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
// URLs originally in the database that are no longer in the list
|
||||
let urls_to_delete = urls_in_db.difference(&urls);
|
||||
|
||||
// URLs not in the database that are in the new list
|
||||
let urls_to_insert = urls.difference(&urls_in_db);
|
||||
|
||||
// URLs that are in both the database and the new list. For these, those marked as
|
||||
// "deleted" in the database are updated so they're no longer deleted, with their
|
||||
// timestamp updated.
|
||||
let urls_to_update = urls.intersection(&urls_in_db);
|
||||
|
||||
// Mark the URLs to delete as properly deleted
|
||||
diesel::update(
|
||||
device_subscriptions::table.filter(
|
||||
device_subscriptions::device_id
|
||||
.eq(device_id)
|
||||
.and(device_subscriptions::podcast_url.eq_any(urls_to_delete)),
|
||||
),
|
||||
)
|
||||
.set((
|
||||
device_subscriptions::deleted.eq(true),
|
||||
device_subscriptions::time_changed.eq(timestamp),
|
||||
))
|
||||
.execute(conn)?;
|
||||
|
||||
// Update the existing deleted URLs that are reinserted as no longer deleted
|
||||
diesel::update(
|
||||
device_subscriptions::table.filter(
|
||||
device_subscriptions::device_id
|
||||
.eq(device_id)
|
||||
.and(device_subscriptions::podcast_url.eq_any(urls_to_update))
|
||||
.and(device_subscriptions::deleted.eq(true)),
|
||||
),
|
||||
)
|
||||
.set((
|
||||
device_subscriptions::deleted.eq(false),
|
||||
device_subscriptions::time_changed.eq(timestamp),
|
||||
))
|
||||
.execute(conn)?;
|
||||
|
||||
// Insert the new values into the database
|
||||
diesel::insert_into(device_subscriptions::table)
|
||||
.values(
|
||||
urls_to_insert
|
||||
.into_iter()
|
||||
.map(|url| NewDeviceSubscription {
|
||||
device_id,
|
||||
podcast_url: url.to_string(),
|
||||
deleted: false,
|
||||
time_changed: timestamp,
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.execute(conn)?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn update_for_device(
|
||||
pool: &DbPool,
|
||||
device_id: i64,
|
||||
added: Vec<String>,
|
||||
removed: Vec<String>,
|
||||
timestamp: i64,
|
||||
) -> DbResult<()> {
|
||||
// TODO URLs that are in both the added and removed lists will currently get "re-added",
|
||||
// meaning their change timestamp will be updated even though they haven't really changed.
|
||||
let added: HashSet<_> = added.into_iter().collect();
|
||||
let removed: HashSet<_> = removed.into_iter().collect();
|
||||
|
||||
pool.get()?.transaction(|conn| {
|
||||
let urls_in_db: HashSet<String> = device_subscriptions::table
|
||||
.select(device_subscriptions::podcast_url)
|
||||
.filter(device_subscriptions::device_id.eq(device_id))
|
||||
.get_results(&mut pool.get()?)?
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
// Subscriptions to remove are those that were already in the database and are now part
|
||||
// of the removed list. Subscriptions that were never added in the first place don't
|
||||
// need to be marked as deleted. We also only update those that aren't already marked
|
||||
// as deleted.
|
||||
let urls_to_delete = removed.intersection(&urls_in_db);
|
||||
|
||||
diesel::update(
|
||||
device_subscriptions::table.filter(
|
||||
device_subscriptions::device_id
|
||||
.eq(device_id)
|
||||
.and(device_subscriptions::podcast_url.eq_any(urls_to_delete))
|
||||
.and(device_subscriptions::deleted.eq(false)),
|
||||
),
|
||||
)
|
||||
.set((
|
||||
device_subscriptions::deleted.eq(true),
|
||||
device_subscriptions::time_changed.eq(timestamp),
|
||||
))
|
||||
.execute(conn)?;
|
||||
|
||||
// Subscriptions to update are those that are already in the database, but are also in
|
||||
// the added list. Only those who were originally marked as deleted get updated.
|
||||
let urls_to_update = added.intersection(&urls_in_db);
|
||||
|
||||
diesel::update(
|
||||
device_subscriptions::table.filter(
|
||||
device_subscriptions::device_id
|
||||
.eq(device_id)
|
||||
.and(device_subscriptions::podcast_url.eq_any(urls_to_update))
|
||||
.and(device_subscriptions::deleted.eq(true)),
|
||||
),
|
||||
)
|
||||
.set((
|
||||
device_subscriptions::deleted.eq(false),
|
||||
device_subscriptions::time_changed.eq(timestamp),
|
||||
))
|
||||
.execute(conn)?;
|
||||
|
||||
// Subscriptions to insert are those that aren't in the database and are part of the
|
||||
// added list
|
||||
let urls_to_insert = added.difference(&urls_in_db);
|
||||
|
||||
diesel::insert_into(device_subscriptions::table)
|
||||
.values(
|
||||
urls_to_insert
|
||||
.into_iter()
|
||||
.map(|url| NewDeviceSubscription {
|
||||
device_id,
|
||||
podcast_url: url.to_string(),
|
||||
deleted: false,
|
||||
time_changed: timestamp,
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.execute(conn)?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn updated_since_for_device(
|
||||
pool: &DbPool,
|
||||
device_id: i64,
|
||||
timestamp: i64,
|
||||
) -> DbResult<Vec<Self>> {
|
||||
Ok(device_subscriptions::table
|
||||
.select(Self::as_select())
|
||||
.filter(
|
||||
device_subscriptions::device_id
|
||||
.eq(device_id)
|
||||
.and(device_subscriptions::time_changed.ge(timestamp)),
|
||||
)
|
||||
.get_results(&mut pool.get()?)?)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
use std::{fmt, str::FromStr};
|
||||
|
||||
use chrono::NaiveDateTime;
|
||||
use diesel::{
|
||||
deserialize::{FromSql, FromSqlRow},
|
||||
expression::AsExpression,
|
||||
|
@ -12,7 +11,7 @@ use diesel::{
|
|||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::db::{schema::*, DbPool, DbResult};
|
||||
use crate::db::schema::*;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Queryable, Selectable)]
|
||||
#[diesel(table_name = episode_actions)]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use chrono::{DateTime, NaiveDateTime, Utc};
|
||||
use chrono::DateTime;
|
||||
use diesel::prelude::*;
|
||||
|
||||
use super::SqliteRepository;
|
||||
|
|
|
@ -1,56 +1,5 @@
|
|||
use chrono::{NaiveDateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::db;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum DeviceType {
|
||||
Desktop,
|
||||
Laptop,
|
||||
Mobile,
|
||||
Server,
|
||||
Other,
|
||||
}
|
||||
|
||||
impl From<DeviceType> for db::DeviceType {
|
||||
fn from(value: DeviceType) -> Self {
|
||||
match value {
|
||||
DeviceType::Desktop => Self::Desktop,
|
||||
DeviceType::Laptop => Self::Laptop,
|
||||
DeviceType::Mobile => Self::Mobile,
|
||||
DeviceType::Server => Self::Server,
|
||||
DeviceType::Other => Self::Other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<db::DeviceType> for DeviceType {
|
||||
fn from(value: db::DeviceType) -> Self {
|
||||
match value {
|
||||
db::DeviceType::Desktop => Self::Desktop,
|
||||
db::DeviceType::Laptop => Self::Laptop,
|
||||
db::DeviceType::Mobile => Self::Mobile,
|
||||
db::DeviceType::Server => Self::Server,
|
||||
db::DeviceType::Other => Self::Other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Device {
|
||||
pub id: String,
|
||||
pub caption: String,
|
||||
pub r#type: DeviceType,
|
||||
pub subscriptions: i64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct DevicePatch {
|
||||
pub caption: Option<String>,
|
||||
pub r#type: Option<DeviceType>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SubscriptionDelta {
|
||||
pub add: Vec<String>,
|
||||
|
|
|
@ -6,7 +6,6 @@ use tower_http::trace::TraceLayer;
|
|||
|
||||
#[derive(Clone)]
|
||||
pub struct Context {
|
||||
pub pool: crate::db::DbPool,
|
||||
pub repo: crate::db::SqliteRepository,
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue