Add basic support for store plugin query strings
This commit is contained in:
parent
fbfcd89880
commit
512e03d9c7
9 changed files with 573 additions and 671 deletions
1177
Cargo.lock
generated
1177
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -29,8 +29,3 @@ chrono = { version = "0.4" }
|
|||
|
||||
# cli
|
||||
clap = { version = "4.0", features = ["derive"] }
|
||||
|
||||
[workspace]
|
||||
include = [
|
||||
"decky_api"
|
||||
]
|
||||
|
|
|
@ -9,7 +9,7 @@ pub struct CliArgs {
|
|||
/// Cache results for a period
|
||||
#[arg(name = "cache", long)]
|
||||
pub cache_duration: Option<i64>,
|
||||
/// Local server port (default: 222252)
|
||||
/// Local server port (default: 22252)
|
||||
#[arg(name = "port", short, long)]
|
||||
pub server_port: Option<u16>,
|
||||
/// Storage adapter
|
||||
|
|
|
@ -5,7 +5,10 @@ use actix_web::{get, web, Responder};
|
|||
use crate::storage::IStorage;
|
||||
|
||||
#[get("/plugins")]
|
||||
pub async fn decky_plugins(data: actix_web::web::Data<Box<dyn IStorage>>) -> impl Responder {
|
||||
let plugins: StorePluginList = web::block(move || data.plugins()).await.unwrap();
|
||||
pub async fn decky_plugins(req: actix_web::HttpRequest, data: actix_web::web::Data<Box<dyn IStorage>>) -> impl Responder {
|
||||
let query_string = req.query_string().to_owned();
|
||||
log::debug!("Got request with uri {}", req.uri());
|
||||
let plugins: StorePluginList = web::block(move || data.plugins(&query_string)).await.unwrap();
|
||||
log::debug!("Got {} plugin results", plugins.len());
|
||||
web::Json(plugins)
|
||||
}
|
||||
|
|
|
@ -49,27 +49,43 @@ impl<T: Clone> Cached<T> {
|
|||
|
||||
pub struct CachedStorage<S: AsRef<dyn IStorage> + Send + Sync> {
|
||||
fallback: S,
|
||||
plugins_cache: Cached<StorePluginList>,
|
||||
plugins_cache: RwLock<HashMap<String, Cached<StorePluginList>>>,
|
||||
statistics_cache: Cached<HashMap<String, u64>>,
|
||||
artifacts_cache: Cached<HashMap<String, Bytes>>,
|
||||
images_cache: Cached<HashMap<String, Bytes>>,
|
||||
ttl: i64,
|
||||
}
|
||||
|
||||
impl<S: AsRef<dyn IStorage> + Send + Sync> CachedStorage<S> {
|
||||
pub fn new(duration: i64, inner: S) -> Self {
|
||||
Self {
|
||||
plugins_cache: Cached::new(inner.as_ref().plugins(), duration),
|
||||
plugins_cache: RwLock::new(HashMap::new()),
|
||||
statistics_cache: Cached::new(inner.as_ref().get_statistics(), duration),
|
||||
artifacts_cache: Cached::new(HashMap::new(), duration),
|
||||
images_cache: Cached::new(HashMap::new(), duration),
|
||||
fallback: inner,
|
||||
ttl: duration
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: AsRef<dyn IStorage> + Send + Sync> IStorage for CachedStorage<S> {
|
||||
fn plugins(&self) -> StorePluginList {
|
||||
self.plugins_cache.get(|| self.fallback.as_ref().plugins())
|
||||
fn plugins(&self, query: &str) -> StorePluginList {
|
||||
let is_already_cached = self.plugins_cache.read().expect("Failed to acquire plugins_cache read lock")
|
||||
.contains_key(query);
|
||||
if is_already_cached
|
||||
{
|
||||
let lock = self.plugins_cache.read()
|
||||
.expect("Failed to acquire plugins_cache read lock");
|
||||
let cached_result = lock.get(query)
|
||||
.unwrap(); // cannot fail (already checked that exists, and existing entries are never removed)
|
||||
cached_result.get(|| self.fallback.as_ref().plugins(query))
|
||||
} else {
|
||||
let result_to_cache = self.fallback.as_ref().plugins(query);
|
||||
let mut lock = self.plugins_cache.write().expect("Failed to acquire plugins_cache write lock");
|
||||
lock.insert(query.to_owned(), Cached::new(result_to_cache.clone(), self.ttl));
|
||||
result_to_cache
|
||||
}
|
||||
}
|
||||
|
||||
fn get_artifact(&self, name: &str, version: &str, hash: &str) -> Result<bytes::Bytes, std::io::Error> {
|
||||
|
|
|
@ -114,7 +114,7 @@ impl FileStorage {
|
|||
let extension = entry_path.extension().unwrap().to_string_lossy().into_owned();
|
||||
if extension == "zip" {
|
||||
let version_name = entry_path.file_stem().unwrap().to_string_lossy().into_owned();
|
||||
let hash_str = sha256::try_digest(entry_path.as_ref())?;
|
||||
let hash_str = sha256::try_digest(&entry_path)?;
|
||||
let artifact_url = format!("{}/plugins/{}/{}/{}.zip", self.domain_root, plugin_name, version_name, hash_str);
|
||||
versions.push(StorePluginVersion {
|
||||
name: version_name,
|
||||
|
@ -137,7 +137,8 @@ impl FileStorage {
|
|||
}
|
||||
|
||||
impl IStorage for FileStorage {
|
||||
fn plugins(&self) -> StorePluginList {
|
||||
fn plugins(&self, _query: &str) -> StorePluginList {
|
||||
// TODO handle query string
|
||||
match self.read_all_plugins() {
|
||||
Err(e) => {
|
||||
log::error!("Plugins read error: {}", e);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
pub trait IStorage: Send + Sync {
|
||||
fn plugins(&self) -> decky_api::StorePluginList;
|
||||
fn plugins(&self, query: &str) -> decky_api::StorePluginList;
|
||||
|
||||
fn get_artifact(&self, _name: &str, _version: &str, _hash: &str) -> Result<bytes::Bytes, std::io::Error> {
|
||||
Err(std::io::Error::new(std::io::ErrorKind::InvalidInput, "Artifact downloading not supported"))
|
||||
|
@ -17,7 +17,7 @@ pub trait IStorage: Send + Sync {
|
|||
pub struct EmptyStorage;
|
||||
|
||||
impl IStorage for EmptyStorage {
|
||||
fn plugins(&self) -> decky_api::StorePluginList {
|
||||
fn plugins(&self, _query: &str) -> decky_api::StorePluginList {
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,14 +71,15 @@ impl<S: AsRef<dyn IStorage> + Send + Sync> MergedStorage<S> {
|
|||
}
|
||||
|
||||
impl<S: AsRef<dyn IStorage> + Send + Sync> IStorage for MergedStorage<S> {
|
||||
fn plugins(&self) -> StorePluginList {
|
||||
fn plugins(&self, query: &str) -> StorePluginList {
|
||||
let mut merged_plugins = HashMap::new();
|
||||
log::debug!("Acquiring store map write locks");
|
||||
let mut arti_lock = self.store_artifact_map.write().expect("Failed to acquire store_artifact_map write lock");
|
||||
let mut img_lock = self.store_image_map.write().expect("Failed to acquire store_image_map write lock");
|
||||
for (index, store) in self.stores.iter().enumerate() {
|
||||
let plugins = store.as_ref().plugins();
|
||||
// re-build store mappins
|
||||
log::debug!("Handling store #{}", index);
|
||||
let plugins = store.as_ref().plugins(query);
|
||||
// re-build store mappings
|
||||
for plugin in &plugins {
|
||||
for version in &plugin.versions {
|
||||
let hashable_ver = HashablePluginVersion {
|
||||
|
@ -96,6 +97,7 @@ impl<S: AsRef<dyn IStorage> + Send + Sync> IStorage for MergedStorage<S> {
|
|||
}
|
||||
}
|
||||
Self::merge_plugins_into(&mut merged_plugins, plugins);
|
||||
log::debug!("Completed store #{}", index);
|
||||
}
|
||||
Self::map_to_vec(merged_plugins)
|
||||
}
|
||||
|
|
|
@ -17,16 +17,16 @@ impl ProxiedStorage {
|
|||
}
|
||||
}
|
||||
|
||||
fn plugins_url(&self) -> String {
|
||||
format!("{}/plugins", self.store_url)
|
||||
fn plugins_url(&self, query: &str) -> String {
|
||||
format!("{}/plugins?{}", self.store_url, query)
|
||||
}
|
||||
|
||||
fn default_artifact_url(ver: &StorePluginVersion) -> String {
|
||||
format!("https://cdn.tzatzikiweeb.moe/file/steam-deck-homebrew/versions/{}.zip", ver.hash)
|
||||
}
|
||||
|
||||
fn proxy_plugins(&self) -> StorePluginList {
|
||||
let url = self.plugins_url();
|
||||
fn proxy_plugins(&self, query: &str) -> StorePluginList {
|
||||
let url = self.plugins_url(query);
|
||||
match self.agent.get(&url).call() {
|
||||
Err(e) => {
|
||||
log::error!("Plugins proxy error for {}: {}", url, e);
|
||||
|
@ -46,8 +46,8 @@ impl ProxiedStorage {
|
|||
}
|
||||
|
||||
impl IStorage for ProxiedStorage {
|
||||
fn plugins(&self) -> StorePluginList {
|
||||
let mut proxy = self.proxy_plugins();
|
||||
fn plugins(&self, query: &str) -> StorePluginList {
|
||||
let mut proxy = self.proxy_plugins(query);
|
||||
for plugin in &mut proxy {
|
||||
for version in &mut plugin.versions {
|
||||
if version.artifact.is_none() {
|
||||
|
|
Loading…
Reference in a new issue