From d7a108004bbc5c930bed1c69e4f62204a1ac4acc Mon Sep 17 00:00:00 2001 From: "NGnius (Graham)" Date: Sun, 17 Mar 2024 17:15:23 -0400 Subject: [PATCH] Make limits cache update only occur exactly once per period --- backend/Cargo.lock | 2 +- backend/Cargo.toml | 2 +- backend/src/consts.rs | 2 + backend/src/settings/detect/auto_detect.rs | 23 +----- backend/src/settings/detect/limits_worker.rs | 73 ++++++++++++-------- backend/src/utility.rs | 15 ++++ package.json | 2 +- 7 files changed, 67 insertions(+), 52 deletions(-) diff --git a/backend/Cargo.lock b/backend/Cargo.lock index dc737ff..01176da 100644 --- a/backend/Cargo.lock +++ b/backend/Cargo.lock @@ -1170,7 +1170,7 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "powertools" -version = "2.0.0-alpha4" +version = "2.0.0-alpha5" dependencies = [ "async-trait", "chrono", diff --git a/backend/Cargo.toml b/backend/Cargo.toml index fcbe28b..e22502c 100644 --- a/backend/Cargo.toml +++ b/backend/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "powertools" -version = "2.0.0-alpha4" +version = "2.0.0-alpha5" edition = "2021" authors = ["NGnius (Graham) "] description = "Backend (superuser) functionality for PowerTools" diff --git a/backend/src/consts.rs b/backend/src/consts.rs index cd1e2f3..246b1bb 100644 --- a/backend/src/consts.rs +++ b/backend/src/consts.rs @@ -12,6 +12,8 @@ pub const LIMITS_FILE: &str = "limits_cache.ron"; pub const LIMITS_REFRESH_PERIOD: std::time::Duration = std::time::Duration::from_secs(60 * 60 * 24); // 1 day #[cfg(feature = "online")] pub const LIMITS_STARTUP_WAIT: std::time::Duration = std::time::Duration::from_secs(60); // 1 minute +#[cfg(feature = "online")] +pub const LIMITS_CHECK_PERIOD: std::time::Duration = std::time::Duration::from_secs(5 * 60); // 5 minutes pub const LIMITS_OVERRIDE_FILE: &str = "limits_override.ron"; #[cfg(feature = "online")] diff --git a/backend/src/settings/detect/auto_detect.rs b/backend/src/settings/detect/auto_detect.rs index 5ee8e4b..7ebba0e 100644 --- a/backend/src/settings/detect/auto_detect.rs +++ b/backend/src/settings/detect/auto_detect.rs @@ -8,28 +8,7 @@ use crate::persist::{DriverJson, SettingsJson}; use crate::settings::{Driver, General, ProviderBuilder, TBattery, TCpus, TGeneral, TGpu}; fn get_limits() -> limits_core::json_v2::Base { - let limits_path = super::utility::limits_path(); - match File::open(&limits_path) { - Ok(f) => match ron::de::from_reader(f) { - Ok(lim) => lim, - Err(e) => { - log::warn!( - "Failed to parse limits file `{}`, cannot use for auto_detect: {}", - limits_path.display(), - e - ); - limits_core::json_v2::Base::default() - } - }, - Err(e) => { - log::warn!( - "Failed to open limits file `{}`: {}", - limits_path.display(), - e - ); - super::limits_worker::get_limits_cached() - } - } + super::limits_worker::get_limits_cached() } fn get_limits_overrides() -> Option { diff --git a/backend/src/settings/detect/limits_worker.rs b/backend/src/settings/detect/limits_worker.rs index f211cc8..9c671fa 100644 --- a/backend/src/settings/detect/limits_worker.rs +++ b/backend/src/settings/detect/limits_worker.rs @@ -2,6 +2,11 @@ use std::thread::{self, JoinHandle}; use limits_core::json_v2::Base; +#[inline] +fn expired_updated_time() -> chrono::DateTime { + chrono::offset::Utc::now() - (crate::consts::LIMITS_REFRESH_PERIOD * 2) +} + #[cfg(feature = "online")] pub fn spawn() -> JoinHandle<()> { thread::spawn(move || { @@ -12,41 +17,53 @@ pub fn spawn() -> JoinHandle<()> { loop { if (limits_path.exists() && limits_path.is_file()) || !limits_path.exists() { // try to load limits from file, fallback to built-in default - let base = if limits_path.exists() { + let mut base = if limits_path.exists() { match std::fs::File::open(&limits_path) { Ok(f) => match ron::de::from_reader(f) { Ok(b) => b, Err(e) => { log::error!("Cannot parse {}: {}", limits_path.display(), e); - Base::default() + crate::utility::CachedData { + data: Base::default(), + updated: expired_updated_time(), + } } }, Err(e) => { log::error!("Cannot open {}: {}", limits_path.display(), e); - Base::default() + crate::utility::CachedData { + data: Base::default(), + updated: expired_updated_time(), + } } } } else { - let base = Base::default(); - save_base(&base, &limits_path); + let mut base = crate::utility::CachedData { + data: Base::default(), + updated: expired_updated_time(), + }; + save_base(&mut base, &limits_path); base }; - crate::api::web::set_base_url(base.store); - if let Some(refresh) = &base.refresh { - // try to retrieve newer version - match ureq::get(refresh).call() { - Ok(response) => { - let json_res: std::io::Result = response.into_json(); - match json_res { - Ok(new_base) => { - save_base(&new_base, &limits_path); - } - Err(e) => { - log::error!("Cannot parse response from `{}`: {}", refresh, e) + crate::api::web::set_base_url(base.data.store.clone()); + if let Some(refresh) = &base.data.refresh { + if base.needs_update(crate::consts::LIMITS_REFRESH_PERIOD) { + // try to retrieve newer version + match ureq::get(refresh).call() { + Ok(response) => { + let json_res: std::io::Result = response.into_json(); + match json_res { + Ok(new_base) => { + base.data = new_base; + save_base(&mut base, &limits_path); + } + Err(e) => { + log::error!("Cannot parse response from `{}`: {}", refresh, e) + } } } + Err(e) => log::warn!("Cannot download limits from `{}`: {}", refresh, e), } - Err(e) => log::warn!("Cannot download limits from `{}`: {}", refresh, e), } } else { log::info!("limits_worker refresh is empty, terminating..."); @@ -55,7 +72,7 @@ pub fn spawn() -> JoinHandle<()> { } else if !limits_path.is_file() { log::error!("Path for storing limits is not a file!"); } - thread::sleep(crate::consts::LIMITS_REFRESH_PERIOD); + thread::sleep(crate::consts::LIMITS_CHECK_PERIOD); } log::warn!("limits_worker completed!"); }) @@ -68,33 +85,35 @@ pub fn spawn() -> JoinHandle<()> { }) } -pub fn get_limits_cached() -> Base { +pub fn get_limits_cached() -> Base { let limits_path = super::utility::limits_path(); - if limits_path.is_file() { + let cached: crate::utility::CachedData = if limits_path.is_file() { match std::fs::File::open(&limits_path) { Ok(f) => match ron::de::from_reader(f) { Ok(b) => b, Err(e) => { log::error!("Cannot parse {}: {}", limits_path.display(), e); - Base::default() + return Base::default(); } }, Err(e) => { log::error!("Cannot open {}: {}", limits_path.display(), e); - Base::default() + return Base::default(); } } } else { - Base::default() - } + return Base::default(); + }; + cached.data } #[cfg(feature = "online")] -fn save_base(new_base: &Base, path: impl AsRef) { +fn save_base(new_base: &mut crate::utility::CachedData, path: impl AsRef) { let limits_path = path.as_ref(); + new_base.updated = chrono::offset::Utc::now(); match std::fs::File::create(&limits_path) { Ok(f) => { - match ron::ser::to_writer_pretty(f, &new_base, crate::utility::ron_pretty_config()) { + match ron::ser::to_writer_pretty(f, new_base, crate::utility::ron_pretty_config()) { Ok(_) => log::info!("Successfully saved new limits to {}", limits_path.display()), Err(e) => log::error!( "Failed to save limits json to file `{}`: {}", diff --git a/backend/src/utility.rs b/backend/src/utility.rs index d9ec7b1..3814211 100644 --- a/backend/src/utility.rs +++ b/backend/src/utility.rs @@ -3,6 +3,9 @@ use std::io::{Read, Write}; use std::os::unix::fs::PermissionsExt; +use serde::{Deserialize, Serialize}; +use chrono::{offset::Utc, DateTime}; + /*pub fn unwrap_lock<'a, T: Sized>( result: LockResult>, lock_name: &str, @@ -16,6 +19,18 @@ use std::os::unix::fs::PermissionsExt; } }*/ +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct CachedData { + pub data: T, + pub updated: DateTime, +} + +impl CachedData { + pub fn needs_update(&self, max_age: std::time::Duration) -> bool { + self.updated < (Utc::now() - max_age) + } +} + pub fn ron_pretty_config() -> ron::ser::PrettyConfig { ron::ser::PrettyConfig::default() .struct_names(true) diff --git a/package.json b/package.json index 793d0ca..e7dfd03 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "PowerTools", - "version": "2.0.0-alpha4", + "version": "2.0.0-alpha5", "description": "Power tweaks for power users", "scripts": { "build": "shx rm -rf dist && rollup -c",