Compare commits
2 commits
23f5f607cc
...
2ec89ee1eb
Author | SHA1 | Date | |
---|---|---|---|
2ec89ee1eb | |||
d714b98dee |
13 changed files with 264 additions and 81 deletions
2
backend/Cargo.lock
generated
2
backend/Cargo.lock
generated
|
@ -304,6 +304,7 @@ dependencies = [
|
||||||
"iana-time-zone",
|
"iana-time-zone",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
|
"serde",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
"windows-targets 0.48.5",
|
"windows-targets 0.48.5",
|
||||||
]
|
]
|
||||||
|
@ -1172,6 +1173,7 @@ name = "powertools"
|
||||||
version = "1.5.0-ng1"
|
version = "1.5.0-ng1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-trait",
|
"async-trait",
|
||||||
|
"chrono",
|
||||||
"clap",
|
"clap",
|
||||||
"community_settings_core",
|
"community_settings_core",
|
||||||
"libc",
|
"libc",
|
||||||
|
|
|
@ -28,12 +28,20 @@ simplelog = "0.12"
|
||||||
|
|
||||||
# limits & driver functionality
|
# limits & driver functionality
|
||||||
limits_core = { version = "3", path = "./limits_core" }
|
limits_core = { version = "3", path = "./limits_core" }
|
||||||
community_settings_core = { version = "0.1", path = "./community_settings_core" }
|
|
||||||
regex = "1"
|
regex = "1"
|
||||||
|
|
||||||
|
# steam deck libs
|
||||||
smokepatio = { version = "0.1", features = [ "std" ], path = "../../smokepatio" }
|
smokepatio = { version = "0.1", features = [ "std" ], path = "../../smokepatio" }
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
|
|
||||||
|
# online settings
|
||||||
|
community_settings_core = { version = "0.1", path = "./community_settings_core" }
|
||||||
|
chrono = { version = "0.4", features = [ "serde" ] }
|
||||||
|
|
||||||
|
# hardware enablement
|
||||||
#libryzenadj = { version = "0.14", path = "../../libryzenadj-rs-14" }
|
#libryzenadj = { version = "0.14", path = "../../libryzenadj-rs-14" }
|
||||||
libryzenadj = { version = "0.13" }
|
libryzenadj = { version = "0.13" }
|
||||||
|
|
||||||
# ureq's tls feature does not like musl targets
|
# ureq's tls feature does not like musl targets
|
||||||
ureq = { version = "2", features = ["json", "gzip", "brotli", "charset"], default-features = false, optional = true }
|
ureq = { version = "2", features = ["json", "gzip", "brotli", "charset"], default-features = false, optional = true }
|
||||||
|
|
||||||
|
@ -53,13 +61,13 @@ debug = false
|
||||||
strip = true
|
strip = true
|
||||||
lto = true
|
lto = true
|
||||||
codegen-units = 1
|
codegen-units = 1
|
||||||
|
opt-level = 3
|
||||||
|
|
||||||
[profile.docker]
|
[profile.docker]
|
||||||
inherits = "release"
|
inherits = "release"
|
||||||
debug = false
|
debug = false
|
||||||
strip = true
|
strip = true
|
||||||
lto = "thin"
|
lto = "thin"
|
||||||
codegen-units = 16
|
codegen-units = 8
|
||||||
opt-level = 2
|
|
||||||
debug-assertions = false
|
debug-assertions = false
|
||||||
overflow-checks = false
|
overflow-checks = false
|
||||||
|
|
|
@ -111,14 +111,17 @@ pub fn load_variant(
|
||||||
) -> impl Fn(super::ApiParameterType) -> super::ApiParameterType {
|
) -> impl Fn(super::ApiParameterType) -> super::ApiParameterType {
|
||||||
let sender = Mutex::new(sender); // Sender is not Sync; this is required for safety
|
let sender = Mutex::new(sender); // Sender is not Sync; this is required for safety
|
||||||
let setter = move |variant: u64, variant_name: Option<String>| {
|
let setter = move |variant: u64, variant_name: Option<String>| {
|
||||||
log::debug!("load_variant(variant: {}, variant_name: {:?})", variant, variant_name);
|
log::debug!(
|
||||||
|
"load_variant(variant: {}, variant_name: {:?})",
|
||||||
|
variant,
|
||||||
|
variant_name
|
||||||
|
);
|
||||||
sender
|
sender
|
||||||
.lock()
|
.lock()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.send(ApiMessage::LoadVariant(
|
.send(ApiMessage::LoadVariant(
|
||||||
variant,
|
variant,
|
||||||
variant_name
|
variant_name.unwrap_or_else(|| "".to_owned()),
|
||||||
.unwrap_or_else(|| "".to_owned()),
|
|
||||||
))
|
))
|
||||||
.expect("load_variant send failed")
|
.expect("load_variant send failed")
|
||||||
};
|
};
|
||||||
|
|
|
@ -3,17 +3,36 @@ use std::sync::{Arc, Mutex, RwLock};
|
||||||
use usdpl_back::core::serdes::Primitive;
|
use usdpl_back::core::serdes::Primitive;
|
||||||
use usdpl_back::AsyncCallable;
|
use usdpl_back::AsyncCallable;
|
||||||
|
|
||||||
|
use chrono::{offset::Utc, DateTime};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use super::handler::{ApiMessage, GeneralMessage};
|
use super::handler::{ApiMessage, GeneralMessage};
|
||||||
|
|
||||||
const BASE_URL_FALLBACK: &'static str = "https://powertools.ngni.us";
|
const BASE_URL_FALLBACK: &'static str = "https://powertools.ngni.us";
|
||||||
static BASE_URL: RwLock<Option<String>> = RwLock::new(None);
|
static BASE_URL: RwLock<Option<String>> = RwLock::new(None);
|
||||||
|
|
||||||
|
const MAX_CACHE_DURATION: std::time::Duration =
|
||||||
|
std::time::Duration::from_secs(60 * 60 * 24 * 7 /* 7 days */);
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||||
|
struct CachedData<T> {
|
||||||
|
data: T,
|
||||||
|
updated: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
type StoreCache =
|
||||||
|
std::collections::HashMap<u32, CachedData<Vec<community_settings_core::v1::Metadata>>>;
|
||||||
|
|
||||||
pub fn set_base_url(base_url: String) {
|
pub fn set_base_url(base_url: String) {
|
||||||
*BASE_URL.write().expect("Failed to acquire write lock for store base url") = Some(base_url);
|
*BASE_URL
|
||||||
|
.write()
|
||||||
|
.expect("Failed to acquire write lock for store base url") = Some(base_url);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_base_url() -> String {
|
fn get_base_url() -> String {
|
||||||
BASE_URL.read().expect("Failed to acquire read lock for store base url")
|
BASE_URL
|
||||||
|
.read()
|
||||||
|
.expect("Failed to acquire read lock for store base url")
|
||||||
.clone()
|
.clone()
|
||||||
.unwrap_or_else(|| BASE_URL_FALLBACK.to_owned())
|
.unwrap_or_else(|| BASE_URL_FALLBACK.to_owned())
|
||||||
}
|
}
|
||||||
|
@ -30,35 +49,124 @@ fn url_upload_config() -> String {
|
||||||
format!("{}/api/setting", get_base_url())
|
format!("{}/api/setting", get_base_url())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn cache_path() -> std::path::PathBuf {
|
||||||
|
crate::utility::settings_dir().join(crate::consts::WEB_SETTINGS_CACHE)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_cache() -> StoreCache {
|
||||||
|
let path = cache_path();
|
||||||
|
let file = match std::fs::File::open(&path) {
|
||||||
|
Ok(f) => f,
|
||||||
|
Err(e) => {
|
||||||
|
log::warn!("Failed to open store cache {}: {}", path.display(), e);
|
||||||
|
return StoreCache::default();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let mut file = std::io::BufReader::new(file);
|
||||||
|
match ron::de::from_reader(&mut file) {
|
||||||
|
Ok(cache) => cache,
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Failed to parse store cache {}: {}", path.display(), e);
|
||||||
|
return StoreCache::default();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn save_cache(cache: &StoreCache) {
|
||||||
|
let path = cache_path();
|
||||||
|
let file = match std::fs::File::create(&path) {
|
||||||
|
Ok(f) => f,
|
||||||
|
Err(e) => {
|
||||||
|
log::warn!("Failed to create store cache {}: {}", path.display(), e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let mut file = std::io::BufWriter::new(file);
|
||||||
|
if let Err(e) =
|
||||||
|
ron::ser::to_writer_pretty(&mut file, cache, crate::utility::ron_pretty_config())
|
||||||
|
{
|
||||||
|
log::error!("Failed to parse store cache {}: {}", path.display(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_maybe_cached(steam_app_id: u32) -> Vec<community_settings_core::v1::Metadata> {
|
||||||
|
let mut cache = load_cache();
|
||||||
|
if let Some(cached_result) = cache.get(&steam_app_id) {
|
||||||
|
if cached_result.updated < (Utc::now() - MAX_CACHE_DURATION) {
|
||||||
|
// cache needs update
|
||||||
|
if let Ok(result) = search_by_app_id_online(steam_app_id) {
|
||||||
|
cache.insert(
|
||||||
|
steam_app_id,
|
||||||
|
CachedData {
|
||||||
|
data: result.clone(),
|
||||||
|
updated: Utc::now(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
save_cache(&cache);
|
||||||
|
result
|
||||||
|
} else {
|
||||||
|
// if all else fails, out of date results are better than no results
|
||||||
|
cached_result.data.to_owned()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// cache is ok, use it
|
||||||
|
cached_result.data.to_owned()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if let Ok(result) = search_by_app_id_online(steam_app_id) {
|
||||||
|
cache.insert(
|
||||||
|
steam_app_id,
|
||||||
|
CachedData {
|
||||||
|
data: result.clone(),
|
||||||
|
updated: Utc::now(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
save_cache(&cache);
|
||||||
|
result
|
||||||
|
} else {
|
||||||
|
Vec::with_capacity(0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_by_app_id_online(
|
||||||
|
steam_app_id: u32,
|
||||||
|
) -> std::io::Result<Vec<community_settings_core::v1::Metadata>> {
|
||||||
|
let req_url = url_search_by_app_id(steam_app_id);
|
||||||
|
match ureq::get(&req_url).call() {
|
||||||
|
Ok(response) => {
|
||||||
|
let json_res: std::io::Result<Vec<community_settings_core::v1::Metadata>> =
|
||||||
|
response.into_json();
|
||||||
|
match json_res {
|
||||||
|
Ok(search_results) => Ok(search_results),
|
||||||
|
Err(e) => {
|
||||||
|
log::error!("Cannot parse response from `{}`: {}", req_url, e);
|
||||||
|
Err(std::io::Error::new(std::io::ErrorKind::InvalidData, e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
log::warn!("Cannot get search results from `{}`: {}", req_url, e);
|
||||||
|
Err(std::io::Error::new(
|
||||||
|
std::io::ErrorKind::ConnectionAborted,
|
||||||
|
e,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Get search results web method
|
/// Get search results web method
|
||||||
pub fn search_by_app_id() -> impl AsyncCallable {
|
pub fn search_by_app_id() -> impl AsyncCallable {
|
||||||
let getter = move || {
|
let getter = move || {
|
||||||
move |steam_app_id: u32| {
|
move |steam_app_id: u32| {
|
||||||
let req_url = url_search_by_app_id(steam_app_id);
|
let search_results = get_maybe_cached(steam_app_id);
|
||||||
match ureq::get(&req_url).call() {
|
match serde_json::to_string(&search_results) {
|
||||||
Ok(response) => {
|
Err(e) => {
|
||||||
let json_res: std::io::Result<Vec<community_settings_core::v1::Metadata>> =
|
log::error!("Cannot convert search results to JSON: {}", e);
|
||||||
response.into_json();
|
"[]".to_owned()
|
||||||
match json_res {
|
|
||||||
Ok(search_results) => {
|
|
||||||
// search results may be quite large, so let's do the JSON string conversion in the background (blocking) thread
|
|
||||||
match serde_json::to_string(&search_results) {
|
|
||||||
Err(e) => log::error!(
|
|
||||||
"Cannot convert search results from `{}` to JSON: {}",
|
|
||||||
req_url,
|
|
||||||
e
|
|
||||||
),
|
|
||||||
Ok(s) => return s,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
log::error!("Cannot parse response from `{}`: {}", req_url, e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Err(e) => log::warn!("Cannot get search results from `{}`: {}", req_url, e),
|
Ok(s) => s,
|
||||||
}
|
}
|
||||||
"[]".to_owned()
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
super::async_utils::AsyncIsh {
|
super::async_utils::AsyncIsh {
|
||||||
|
|
|
@ -26,10 +26,7 @@ impl Args {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_default(&self) -> bool {
|
pub fn is_default(&self) -> bool {
|
||||||
self.port.is_none()
|
self.port.is_none() && self.log.is_none() && !self.verbose && self.op.is_none()
|
||||||
&& self.log.is_none()
|
|
||||||
&& !self.verbose
|
|
||||||
&& self.op.is_none()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,9 @@ pub fn clean_up() -> Result<(), ()> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn clean_up_io(directories: impl Iterator<Item=impl AsRef<std::path::Path>>) -> std::io::Result<()> {
|
fn clean_up_io(
|
||||||
|
directories: impl Iterator<Item = impl AsRef<std::path::Path>>,
|
||||||
|
) -> std::io::Result<()> {
|
||||||
let results = directories.map(|dir| std::fs::remove_dir_all(dir));
|
let results = directories.map(|dir| std::fs::remove_dir_all(dir));
|
||||||
for res in results {
|
for res in results {
|
||||||
res?;
|
res?;
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
|
use std::io::Write;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use std::thread::{self, JoinHandle};
|
|
||||||
use std::sync::mpsc::{channel, Sender};
|
use std::sync::mpsc::{channel, Sender};
|
||||||
use std::io::Write;
|
use std::thread::{self, JoinHandle};
|
||||||
|
|
||||||
pub fn dump_sys_info() -> Result<(), ()> {
|
pub fn dump_sys_info() -> Result<(), ()> {
|
||||||
let (tx, rx) = channel();
|
let (tx, rx) = channel();
|
||||||
|
@ -16,9 +16,7 @@ pub fn dump_sys_info() -> Result<(), ()> {
|
||||||
join_handles.push(read_file(file, tx.clone()));
|
join_handles.push(read_file(file, tx.clone()));
|
||||||
}
|
}
|
||||||
|
|
||||||
let useful_commands = vec![
|
let useful_commands = vec!["dmidecode"];
|
||||||
"dmidecode",
|
|
||||||
];
|
|
||||||
for cmd in useful_commands.into_iter() {
|
for cmd in useful_commands.into_iter() {
|
||||||
join_handles.push(execute_command(cmd, tx.clone()));
|
join_handles.push(execute_command(cmd, tx.clone()));
|
||||||
}
|
}
|
||||||
|
@ -29,38 +27,48 @@ pub fn dump_sys_info() -> Result<(), ()> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut dump_file = std::fs::File::create("powertools_sys_dump.txt").expect("Failed to create dump file");
|
let mut dump_file =
|
||||||
|
std::fs::File::create("powertools_sys_dump.txt").expect("Failed to create dump file");
|
||||||
for response in rx.into_iter() {
|
for response in rx.into_iter() {
|
||||||
dump_file.write(
|
dump_file
|
||||||
&format!("{} v{} ###### {} ######\n{}\n",
|
.write(
|
||||||
crate::consts::PACKAGE_NAME,
|
&format!(
|
||||||
crate::consts::PACKAGE_VERSION,
|
"{} v{} ###### {} ######\n{}\n",
|
||||||
response.0,
|
crate::consts::PACKAGE_NAME,
|
||||||
response.1.unwrap_or("[None]".to_owned())
|
crate::consts::PACKAGE_VERSION,
|
||||||
).into_bytes()
|
response.0,
|
||||||
).expect("Failed to write to dump file");
|
response.1.unwrap_or("[None]".to_owned())
|
||||||
|
)
|
||||||
|
.into_bytes(),
|
||||||
|
)
|
||||||
|
.expect("Failed to write to dump file");
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_file(file: impl AsRef<Path> + Send + 'static, tx: Sender<(String, Option<String>)>) -> JoinHandle<()> {
|
fn read_file(
|
||||||
|
file: impl AsRef<Path> + Send + 'static,
|
||||||
|
tx: Sender<(String, Option<String>)>,
|
||||||
|
) -> JoinHandle<()> {
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
let file = file.as_ref();
|
let file = file.as_ref();
|
||||||
tx.send(
|
tx.send((
|
||||||
(file.display().to_string(),
|
file.display().to_string(),
|
||||||
std::fs::read_to_string(file).ok())
|
std::fs::read_to_string(file).ok(),
|
||||||
).expect("Failed to send file contents");
|
))
|
||||||
|
.expect("Failed to send file contents");
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn execute_command(command: &'static str, tx: Sender<(String, Option<String>)>) -> JoinHandle<()> {
|
fn execute_command(command: &'static str, tx: Sender<(String, Option<String>)>) -> JoinHandle<()> {
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
tx.send(
|
tx.send((
|
||||||
(command.to_owned(), Command::new(command)
|
command.to_owned(),
|
||||||
|
Command::new(command)
|
||||||
.output()
|
.output()
|
||||||
.map(|out| String::from_utf8_lossy(&out.stdout).into_owned())
|
.map(|out| String::from_utf8_lossy(&out.stdout).into_owned())
|
||||||
.ok()
|
.ok(),
|
||||||
)).expect("Failed to send command output");
|
))
|
||||||
}
|
.expect("Failed to send command output");
|
||||||
)
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,4 +10,6 @@ pub const DEFAULT_SETTINGS_VARIANT_NAME: &str = "Primary";
|
||||||
pub const LIMITS_FILE: &str = "limits_cache.ron";
|
pub const LIMITS_FILE: &str = "limits_cache.ron";
|
||||||
pub const LIMITS_OVERRIDE_FILE: &str = "limits_override.ron";
|
pub const LIMITS_OVERRIDE_FILE: &str = "limits_override.ron";
|
||||||
|
|
||||||
|
pub const WEB_SETTINGS_CACHE: &str = "store_cache.ron";
|
||||||
|
|
||||||
pub const MESSAGE_SEEN_ID_FILE: &str = "seen_message.bin";
|
pub const MESSAGE_SEEN_ID_FILE: &str = "seen_message.bin";
|
||||||
|
|
|
@ -50,7 +50,11 @@ fn main() -> Result<(), ()> {
|
||||||
},
|
},
|
||||||
#[cfg(not(debug_assertions))]
|
#[cfg(not(debug_assertions))]
|
||||||
{
|
{
|
||||||
if args.verbose { LevelFilter::Debug } else { LevelFilter::Info }
|
if args.verbose {
|
||||||
|
LevelFilter::Debug
|
||||||
|
} else {
|
||||||
|
LevelFilter::Info
|
||||||
|
}
|
||||||
},
|
},
|
||||||
Default::default(),
|
Default::default(),
|
||||||
std::fs::File::create(&log_filepath).expect("Failed to create log file"),
|
std::fs::File::create(&log_filepath).expect("Failed to create log file"),
|
||||||
|
|
|
@ -71,7 +71,13 @@ impl FileJson {
|
||||||
if setting.name.is_empty() {
|
if setting.name.is_empty() {
|
||||||
setting.name = format!("Variant {}", setting.variant);
|
setting.name = format!("Variant {}", setting.variant);
|
||||||
}
|
}
|
||||||
log::debug!("Inserting setting variant `{}` ({}) for app `{}` ({})", setting.name, setting.variant, file.name, app_id);
|
log::debug!(
|
||||||
|
"Inserting setting variant `{}` ({}) for app `{}` ({})",
|
||||||
|
setting.name,
|
||||||
|
setting.variant,
|
||||||
|
file.name,
|
||||||
|
app_id
|
||||||
|
);
|
||||||
file.variants.insert(setting.variant, setting.clone());
|
file.variants.insert(setting.variant, setting.clone());
|
||||||
(file, setting)
|
(file, setting)
|
||||||
} else {
|
} else {
|
||||||
|
@ -83,15 +89,24 @@ impl FileJson {
|
||||||
if setting.name.is_empty() {
|
if setting.name.is_empty() {
|
||||||
setting.name = format!("Variant {}", setting.variant);
|
setting.name = format!("Variant {}", setting.variant);
|
||||||
}
|
}
|
||||||
log::debug!("Creating new setting variant `{}` ({}) for app `{}` ({})", setting.name, setting.variant, app_name, app_id);
|
log::debug!(
|
||||||
|
"Creating new setting variant `{}` ({}) for app `{}` ({})",
|
||||||
|
setting.name,
|
||||||
|
setting.variant,
|
||||||
|
app_name,
|
||||||
|
app_id
|
||||||
|
);
|
||||||
let mut setting_variants = HashMap::with_capacity(1);
|
let mut setting_variants = HashMap::with_capacity(1);
|
||||||
setting_variants.insert(setting.variant, setting.clone());
|
setting_variants.insert(setting.variant, setting.clone());
|
||||||
(Self {
|
(
|
||||||
version: 0,
|
Self {
|
||||||
app_id: app_id,
|
version: 0,
|
||||||
name: app_name,
|
app_id: app_id,
|
||||||
variants: setting_variants,
|
name: app_name,
|
||||||
}, setting)
|
variants: setting_variants,
|
||||||
|
},
|
||||||
|
setting,
|
||||||
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
file.save(path)?;
|
file.save(path)?;
|
||||||
|
|
|
@ -135,7 +135,8 @@ impl TGeneral for General {
|
||||||
setting: SettingVariant::General,
|
setting: SettingVariant::General,
|
||||||
})
|
})
|
||||||
.map(|file| {
|
.map(|file| {
|
||||||
file.0.variants
|
file.0
|
||||||
|
.variants
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(id, conf)| crate::api::VariantInfo {
|
.map(|(id, conf)| crate::api::VariantInfo {
|
||||||
id: id.to_string(),
|
id: id.to_string(),
|
||||||
|
@ -147,7 +148,11 @@ impl TGeneral for General {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_variant_info(&self) -> crate::api::VariantInfo {
|
fn get_variant_info(&self) -> crate::api::VariantInfo {
|
||||||
log::debug!("Current variant `{}` ({})", self.variant_name, self.variant_id);
|
log::debug!(
|
||||||
|
"Current variant `{}` ({})",
|
||||||
|
self.variant_name,
|
||||||
|
self.variant_id
|
||||||
|
);
|
||||||
crate::api::VariantInfo {
|
crate::api::VariantInfo {
|
||||||
id: self.variant_id.to_string(),
|
id: self.variant_id.to_string(),
|
||||||
name: self.variant_name.clone(),
|
name: self.variant_name.clone(),
|
||||||
|
@ -260,7 +265,10 @@ impl Settings {
|
||||||
let mut valid_ids: Vec<&u64> = settings_file.variants.keys().collect();
|
let mut valid_ids: Vec<&u64> = settings_file.variants.keys().collect();
|
||||||
valid_ids.sort();
|
valid_ids.sort();
|
||||||
if let Some(id) = valid_ids.get(0) {
|
if let Some(id) = valid_ids.get(0) {
|
||||||
Ok(settings_file.variants.get(id).expect("variant id key magically disappeared"))
|
Ok(settings_file
|
||||||
|
.variants
|
||||||
|
.get(id)
|
||||||
|
.expect("variant id key magically disappeared"))
|
||||||
} else {
|
} else {
|
||||||
Err(SettingError {
|
Err(SettingError {
|
||||||
msg: format!(
|
msg: format!(
|
||||||
|
@ -293,7 +301,11 @@ impl Settings {
|
||||||
let json_path = crate::utility::settings_dir().join(&filename);
|
let json_path = crate::utility::settings_dir().join(&filename);
|
||||||
if json_path.exists() {
|
if json_path.exists() {
|
||||||
if variant == u64::MAX {
|
if variant == u64::MAX {
|
||||||
log::debug!("Creating new variant `{}` in existing settings file {}", variant_name, json_path.display());
|
log::debug!(
|
||||||
|
"Creating new variant `{}` in existing settings file {}",
|
||||||
|
variant_name,
|
||||||
|
json_path.display()
|
||||||
|
);
|
||||||
self.create_and_load_variant(&json_path, app_id, variant_name)?;
|
self.create_and_load_variant(&json_path, app_id, variant_name)?;
|
||||||
} else {
|
} else {
|
||||||
let file_json = FileJson::open(&json_path).map_err(|e| SettingError {
|
let file_json = FileJson::open(&json_path).map_err(|e| SettingError {
|
||||||
|
@ -336,7 +348,11 @@ impl Settings {
|
||||||
}
|
}
|
||||||
*self.general.persistent() = false;
|
*self.general.persistent() = false;
|
||||||
if variant == u64::MAX {
|
if variant == u64::MAX {
|
||||||
log::debug!("Creating new variant `{}` in new settings file {}", variant_name, json_path.display());
|
log::debug!(
|
||||||
|
"Creating new variant `{}` in new settings file {}",
|
||||||
|
variant_name,
|
||||||
|
json_path.display()
|
||||||
|
);
|
||||||
self.create_and_load_variant(&json_path, app_id, variant_name)?;
|
self.create_and_load_variant(&json_path, app_id, variant_name)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -345,7 +361,12 @@ impl Settings {
|
||||||
Ok(*self.general.persistent())
|
Ok(*self.general.persistent())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_and_load_variant(&mut self, json_path: &PathBuf, app_id: u64, variant_name: String) -> Result<(), SettingError> {
|
fn create_and_load_variant(
|
||||||
|
&mut self,
|
||||||
|
json_path: &PathBuf,
|
||||||
|
app_id: u64,
|
||||||
|
variant_name: String,
|
||||||
|
) -> Result<(), SettingError> {
|
||||||
*self.general.persistent() = true;
|
*self.general.persistent() = true;
|
||||||
self.general.variant_id(u64::MAX);
|
self.general.variant_id(u64::MAX);
|
||||||
self.general.variant_name(variant_name.clone());
|
self.general.variant_name(variant_name.clone());
|
||||||
|
|
|
@ -196,9 +196,20 @@ impl Gpu {
|
||||||
let is_lcd = matches!(self.variant, super::Model::LCD);
|
let is_lcd = matches!(self.variant, super::Model::LCD);
|
||||||
let is_lock_feature_enabled = self.limits.extras.quirks.contains("pp_dpm_fclk-static");
|
let is_lock_feature_enabled = self.limits.extras.quirks.contains("pp_dpm_fclk-static");
|
||||||
|
|
||||||
if (is_oled && self.limits.extras.quirks.contains("pp_dpm_fclk-reversed-on-OLED"))
|
if (is_oled
|
||||||
|| (is_lcd && self.limits.extras.quirks.contains("pp_dpm_fclk-reversed-on-LCD"))
|
&& self
|
||||||
|| self.limits.extras.quirks.contains("pp_dpm_fclk-reversed") {
|
.limits
|
||||||
|
.extras
|
||||||
|
.quirks
|
||||||
|
.contains("pp_dpm_fclk-reversed-on-OLED"))
|
||||||
|
|| (is_lcd
|
||||||
|
&& self
|
||||||
|
.limits
|
||||||
|
.extras
|
||||||
|
.quirks
|
||||||
|
.contains("pp_dpm_fclk-reversed-on-LCD"))
|
||||||
|
|| self.limits.extras.quirks.contains("pp_dpm_fclk-reversed")
|
||||||
|
{
|
||||||
let options_count = self
|
let options_count = self
|
||||||
.sysfs_card
|
.sysfs_card
|
||||||
.read_value(GPU_MEMORY_DOWNCLOCK_ATTRIBUTE.to_owned())
|
.read_value(GPU_MEMORY_DOWNCLOCK_ATTRIBUTE.to_owned())
|
||||||
|
@ -208,12 +219,13 @@ impl Gpu {
|
||||||
if is_lock_feature_enabled {
|
if is_lock_feature_enabled {
|
||||||
format!("{}\n", modifier - max_val)
|
format!("{}\n", modifier - max_val)
|
||||||
} else {
|
} else {
|
||||||
if max_val == 0 as u64 {
|
if max_val == 0 as u64 {
|
||||||
format!("{}\n", modifier)
|
format!("{}\n", modifier)
|
||||||
} else {
|
} else {
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
let mut payload = format!("{}", modifier - max_val);
|
let mut payload = format!("{}", modifier - max_val);
|
||||||
for i in (0..max_val).rev(/* rev() isn't necessary but it creates a nicer (ascending) order */) {
|
for i in (0..max_val).rev(/* rev() isn't necessary but it creates a nicer (ascending) order */)
|
||||||
|
{
|
||||||
write!(payload, " {}", modifier - i)
|
write!(payload, " {}", modifier - i)
|
||||||
.expect("Failed to write to memory payload (should be infallible!?)");
|
.expect("Failed to write to memory payload (should be infallible!?)");
|
||||||
}
|
}
|
||||||
|
|
|
@ -174,6 +174,7 @@ mod generate {
|
||||||
);
|
);
|
||||||
let savefile = crate::persist::FileJson {
|
let savefile = crate::persist::FileJson {
|
||||||
version: 0,
|
version: 0,
|
||||||
|
app_id: 0,
|
||||||
name: crate::consts::DEFAULT_SETTINGS_NAME.to_owned(),
|
name: crate::consts::DEFAULT_SETTINGS_NAME.to_owned(),
|
||||||
variants: mini_variants,
|
variants: mini_variants,
|
||||||
};
|
};
|
||||||
|
|
Loading…
Reference in a new issue