mirror of
https://github.com/Drop-OSS/drop-app.git
synced 2025-11-13 16:22:43 +10:00
Merge branch 'develop' into 139-add-and-resolve-clippy-lints-to-prevent-unwrap-and-expect-functions
Signed-off-by: quexeky <git@quexeky.dev>
This commit is contained in:
19
src-tauri/Cargo.lock
generated
19
src-tauri/Cargo.lock
generated
@ -1289,6 +1289,7 @@ dependencies = [
|
||||
"atomic-instant-full",
|
||||
"bitcode",
|
||||
"boxcar",
|
||||
"bytes",
|
||||
"cacache 13.1.0",
|
||||
"chrono",
|
||||
"deranged",
|
||||
@ -1296,6 +1297,7 @@ dependencies = [
|
||||
"droplet-rs",
|
||||
"dynfmt",
|
||||
"filetime",
|
||||
"futures-core",
|
||||
"futures-lite",
|
||||
"gethostname",
|
||||
"hex 0.4.3",
|
||||
@ -1339,6 +1341,7 @@ dependencies = [
|
||||
"tempfile",
|
||||
"throttle_my_fn",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"umu-wrapper-lib",
|
||||
"url",
|
||||
"urlencoding",
|
||||
@ -3118,13 +3121,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "native_model"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7050d759e3da6673361dddda4f4a743492279dd2c6484a21fbee0a8278620df0"
|
||||
version = "0.6.4"
|
||||
source = "git+https://github.com/Drop-OSS/native_model.git#a91b422cbd53116df1f20b2459fb3d8257458bfd"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
"doc-comment",
|
||||
"log",
|
||||
"native_model_macro",
|
||||
"rmp-serde",
|
||||
"serde",
|
||||
@ -3134,10 +3137,10 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "native_model_macro"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1577a0bebf5ed1754e240baf5d9b1845f51e598b20600aa894f55e11cd20cc6c"
|
||||
version = "0.6.4"
|
||||
source = "git+https://github.com/Drop-OSS/native_model.git#a91b422cbd53116df1f20b2459fb3d8257458bfd"
|
||||
dependencies = [
|
||||
"log",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.101",
|
||||
@ -6083,9 +6086,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tokio-util"
|
||||
version = "0.7.15"
|
||||
version = "0.7.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "66a539a9ad6d5d281510d5bd368c973d636c02dbf8a67300bfb6b950696ad7df"
|
||||
checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
|
||||
@ -65,7 +65,7 @@ whoami = "1.6.0"
|
||||
filetime = "0.2.25"
|
||||
walkdir = "2.5.0"
|
||||
known-folders = "1.2.0"
|
||||
native_model = { version = "0.6.1", features = ["rmp_serde_1_3"] }
|
||||
native_model = { version = "0.6.4", features = ["rmp_serde_1_3"], git = "https://github.com/Drop-OSS/native_model.git"}
|
||||
tauri-plugin-opener = "2.4.0"
|
||||
bitcode = "0.6.6"
|
||||
reqwest-websocket = "0.5.0"
|
||||
@ -73,6 +73,9 @@ futures-lite = "2.6.0"
|
||||
page_size = "0.6.0"
|
||||
sysinfo = "0.36.1"
|
||||
humansize = "2.1.3"
|
||||
tokio-util = { version = "0.7.16", features = ["io"] }
|
||||
futures-core = "0.3.31"
|
||||
bytes = "1.10.1"
|
||||
# tailscale = { path = "./tailscale" }
|
||||
|
||||
[dependencies.dynfmt]
|
||||
@ -106,7 +109,15 @@ features = ["other_errors"] # You can also use "yaml_enc" or "bin_enc"
|
||||
[dependencies.reqwest]
|
||||
version = "0.12.22"
|
||||
default-features = false
|
||||
features = ["json", "http2", "blocking", "rustls-tls", "native-tls-alpn", "rustls-tls-native-roots"]
|
||||
features = [
|
||||
"json",
|
||||
"http2",
|
||||
"blocking",
|
||||
"rustls-tls",
|
||||
"native-tls-alpn",
|
||||
"rustls-tls-native-roots",
|
||||
"stream",
|
||||
]
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1"
|
||||
|
||||
@ -8,7 +8,6 @@ use std::{
|
||||
|
||||
use chrono::Utc;
|
||||
use log::{debug, error, info, warn};
|
||||
use native_model::{Decode, Encode};
|
||||
use rustbreak::{DeSerError, DeSerializer, PathDatabase, RustbreakError};
|
||||
use serde::{Serialize, de::DeserializeOwned};
|
||||
use url::Url;
|
||||
@ -17,11 +16,16 @@ use crate::DB;
|
||||
|
||||
use super::models::data::Database;
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
static DATA_ROOT_PREFIX: &'static str = "drop";
|
||||
#[cfg(debug_assertions)]
|
||||
static DATA_ROOT_PREFIX: &str = "drop-debug";
|
||||
|
||||
pub static DATA_ROOT_DIR: LazyLock<Arc<PathBuf>> = LazyLock::new(|| {
|
||||
Arc::new(
|
||||
dirs::data_dir()
|
||||
.expect("Failed to get data dir")
|
||||
.join("drop"),
|
||||
.join(DATA_ROOT_PREFIX),
|
||||
)
|
||||
});
|
||||
|
||||
@ -33,7 +37,7 @@ impl<T: native_model::Model + Serialize + DeserializeOwned> DeSerializer<T>
|
||||
for DropDatabaseSerializer
|
||||
{
|
||||
fn serialize(&self, val: &T) -> rustbreak::error::DeSerResult<Vec<u8>> {
|
||||
native_model::rmp_serde_1_3::RmpSerde::encode(val)
|
||||
native_model::encode(val)
|
||||
.map_err(|e| DeSerError::Internal(e.to_string()))
|
||||
}
|
||||
|
||||
@ -41,7 +45,7 @@ impl<T: native_model::Model + Serialize + DeserializeOwned> DeSerializer<T>
|
||||
let mut buf = Vec::new();
|
||||
s.read_to_end(&mut buf)
|
||||
.map_err(|e| rustbreak::error::DeSerError::Other(e.into()))?;
|
||||
let val = native_model::rmp_serde_1_3::RmpSerde::decode(buf)
|
||||
let (val, _version) = native_model::decode(buf)
|
||||
.map_err(|e| DeSerError::Internal(e.to_string()))?;
|
||||
Ok(val)
|
||||
}
|
||||
|
||||
@ -1,17 +1,12 @@
|
||||
/**
|
||||
* NEXT BREAKING CHANGE
|
||||
*
|
||||
* UPDATE DATABASE TO USE RPMSERDENAMED
|
||||
*
|
||||
* WE CAN'T DELETE ANY FIELDS
|
||||
*/
|
||||
pub mod data {
|
||||
use std::path::PathBuf;
|
||||
use std::{hash::Hash, path::PathBuf};
|
||||
|
||||
|
||||
use native_model::native_model;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// NOTE: Within each version, you should NEVER use these types.
|
||||
// Declare it using the actual version that it is from, i.e. v1::Settings rather than just Settings from here
|
||||
|
||||
pub type GameVersion = v1::GameVersion;
|
||||
pub type Database = v3::Database;
|
||||
pub type Settings = v1::Settings;
|
||||
@ -19,14 +14,29 @@ pub mod data {
|
||||
|
||||
pub type GameDownloadStatus = v2::GameDownloadStatus;
|
||||
pub type ApplicationTransientStatus = v1::ApplicationTransientStatus;
|
||||
/**
|
||||
* Need to be universally accessible by the ID, and the version is just a couple sprinkles on top
|
||||
*/
|
||||
pub type DownloadableMetadata = v1::DownloadableMetadata;
|
||||
pub type DownloadType = v1::DownloadType;
|
||||
pub type DatabaseApplications = v2::DatabaseApplications;
|
||||
pub type DatabaseCompatInfo = v2::DatabaseCompatInfo;
|
||||
// pub type DatabaseCompatInfo = v2::DatabaseCompatInfo;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub mod v1 {
|
||||
impl PartialEq for DownloadableMetadata {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.id == other.id && self.download_type == other.download_type
|
||||
}
|
||||
}
|
||||
impl Hash for DownloadableMetadata {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.id.hash(state);
|
||||
self.download_type.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
mod v1 {
|
||||
use crate::process::process_manager::Platform;
|
||||
use serde_with::serde_as;
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
@ -116,6 +126,7 @@ pub mod data {
|
||||
// Stuff that shouldn't be synced to disk
|
||||
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||
pub enum ApplicationTransientStatus {
|
||||
Queued { version_name: String },
|
||||
Downloading { version_name: String },
|
||||
Uninstalling {},
|
||||
Updating { version_name: String },
|
||||
@ -144,7 +155,7 @@ pub mod data {
|
||||
}
|
||||
|
||||
#[native_model(id = 7, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)]
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, Clone)]
|
||||
#[derive(Debug, Eq, PartialOrd, Ord, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DownloadableMetadata {
|
||||
pub id: String,
|
||||
@ -174,22 +185,21 @@ pub mod data {
|
||||
}
|
||||
}
|
||||
|
||||
pub mod v2 {
|
||||
mod v2 {
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
use serde_with::serde_as;
|
||||
|
||||
use super::{
|
||||
ApplicationTransientStatus, DatabaseAuth, Deserialize, DownloadableMetadata,
|
||||
GameVersion, Serialize, Settings, native_model, v1,
|
||||
Deserialize, Serialize, native_model, v1,
|
||||
};
|
||||
|
||||
#[native_model(id = 1, version = 2, with = native_model::rmp_serde_1_3::RmpSerde)]
|
||||
#[native_model(id = 1, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from = v1::Database)]
|
||||
#[derive(Serialize, Deserialize, Clone, Default)]
|
||||
pub struct Database {
|
||||
#[serde(default)]
|
||||
pub settings: Settings,
|
||||
pub auth: Option<DatabaseAuth>,
|
||||
pub settings: v1::Settings,
|
||||
pub auth: Option<v1::DatabaseAuth>,
|
||||
pub base_url: String,
|
||||
pub applications: v1::DatabaseApplications,
|
||||
#[serde(skip)]
|
||||
@ -198,7 +208,7 @@ pub mod data {
|
||||
pub compat_info: Option<DatabaseCompatInfo>,
|
||||
}
|
||||
|
||||
#[native_model(id = 8, version = 2, with = native_model::rmp_serde_1_3::RmpSerde)]
|
||||
#[native_model(id = 9, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)]
|
||||
#[derive(Serialize, Deserialize, Clone, Default)]
|
||||
|
||||
pub struct DatabaseCompatInfo {
|
||||
@ -221,7 +231,7 @@ pub mod data {
|
||||
// Strings are version names for a particular game
|
||||
#[derive(Serialize, Clone, Deserialize, Debug)]
|
||||
#[serde(tag = "type")]
|
||||
#[native_model(id = 5, version = 2, with = native_model::rmp_serde_1_3::RmpSerde)]
|
||||
#[native_model(id = 5, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from = v1::GameDownloadStatus)]
|
||||
pub enum GameDownloadStatus {
|
||||
Remote {},
|
||||
SetupRequired {
|
||||
@ -261,17 +271,17 @@ pub mod data {
|
||||
#[serde_as]
|
||||
#[derive(Serialize, Clone, Deserialize, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[native_model(id = 3, version = 2, with = native_model::rmp_serde_1_3::RmpSerde)]
|
||||
#[native_model(id = 3, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from=v1::DatabaseApplications)]
|
||||
pub struct DatabaseApplications {
|
||||
pub install_dirs: Vec<PathBuf>,
|
||||
// Guaranteed to exist if the game also exists in the app state map
|
||||
pub game_statuses: HashMap<String, GameDownloadStatus>,
|
||||
|
||||
pub game_versions: HashMap<String, HashMap<String, GameVersion>>,
|
||||
pub installed_game_version: HashMap<String, DownloadableMetadata>,
|
||||
pub game_versions: HashMap<String, HashMap<String, v1::GameVersion>>,
|
||||
pub installed_game_version: HashMap<String, v1::DownloadableMetadata>,
|
||||
|
||||
#[serde(skip)]
|
||||
pub transient_statuses: HashMap<DownloadableMetadata, ApplicationTransientStatus>,
|
||||
pub transient_statuses: HashMap<v1::DownloadableMetadata, v1::ApplicationTransientStatus>,
|
||||
}
|
||||
impl From<v1::DatabaseApplications> for DatabaseApplications {
|
||||
fn from(value: v1::DatabaseApplications) -> Self {
|
||||
@ -293,21 +303,21 @@ pub mod data {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::{
|
||||
DatabaseApplications, DatabaseAuth, DatabaseCompatInfo, Deserialize, Serialize,
|
||||
Settings, native_model, v2,
|
||||
Deserialize, Serialize,
|
||||
native_model, v2, v1,
|
||||
};
|
||||
#[native_model(id = 1, version = 3, with = native_model::rmp_serde_1_3::RmpSerde)]
|
||||
#[native_model(id = 1, version = 3, with = native_model::rmp_serde_1_3::RmpSerde, from = v2::Database)]
|
||||
#[derive(Serialize, Deserialize, Clone, Default)]
|
||||
pub struct Database {
|
||||
#[serde(default)]
|
||||
pub settings: Settings,
|
||||
pub auth: Option<DatabaseAuth>,
|
||||
pub settings: v1::Settings,
|
||||
pub auth: Option<v1::DatabaseAuth>,
|
||||
pub base_url: String,
|
||||
pub applications: DatabaseApplications,
|
||||
pub applications: v2::DatabaseApplications,
|
||||
#[serde(skip)]
|
||||
pub prev_database: Option<PathBuf>,
|
||||
pub cache_dir: PathBuf,
|
||||
pub compat_info: Option<DatabaseCompatInfo>,
|
||||
pub compat_info: Option<v2::DatabaseCompatInfo>,
|
||||
}
|
||||
|
||||
impl From<v2::Database> for Database {
|
||||
@ -347,5 +357,6 @@ pub mod data {
|
||||
compat_info: None,
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -5,10 +5,10 @@ use log::warn;
|
||||
use crate::{
|
||||
database::{
|
||||
db::borrow_db_mut_checked,
|
||||
models::data::v1::{DownloadType, DownloadableMetadata},
|
||||
models::data::{DownloadType, DownloadableMetadata},
|
||||
},
|
||||
games::{
|
||||
downloads::drop_data::{v1::DropData, DROP_DATA_PATH},
|
||||
downloads::drop_data::{DropData, DROP_DATA_PATH},
|
||||
library::set_partially_installed_db,
|
||||
},
|
||||
};
|
||||
|
||||
@ -11,7 +11,7 @@ use log::{debug, error, info, warn};
|
||||
use tauri::{AppHandle, Emitter};
|
||||
|
||||
use crate::{
|
||||
app_emit, database::models::data::DownloadableMetadata, error::application_download_error::ApplicationDownloadError, games::library::{QueueUpdateEvent, QueueUpdateEventQueueData, StatsUpdateEvent}, lock, send
|
||||
app_emit, database::models::data::DownloadableMetadata, download_manager::download_manager_frontend::DownloadStatus, error::application_download_error::ApplicationDownloadError, games::library::{QueueUpdateEvent, QueueUpdateEventQueueData, StatsUpdateEvent}, lock, send
|
||||
};
|
||||
|
||||
use super::{
|
||||
@ -73,7 +73,6 @@ pub struct DownloadManagerBuilder {
|
||||
status: Arc<Mutex<DownloadManagerStatus>>,
|
||||
app_handle: AppHandle,
|
||||
|
||||
current_download_agent: Option<DownloadAgent>, // Should be the only download agent in the map with the "Go" flag
|
||||
current_download_thread: Mutex<Option<JoinHandle<()>>>,
|
||||
active_control_flag: Option<DownloadThreadControl>,
|
||||
}
|
||||
@ -93,7 +92,6 @@ impl DownloadManagerBuilder {
|
||||
progress: active_progress.clone(),
|
||||
app_handle,
|
||||
|
||||
current_download_agent: None,
|
||||
current_download_thread: Mutex::new(None),
|
||||
active_control_flag: None,
|
||||
};
|
||||
@ -119,7 +117,6 @@ impl DownloadManagerBuilder {
|
||||
fn cleanup_current_download(&mut self) {
|
||||
self.active_control_flag = None;
|
||||
*lock!(self.progress) = None;
|
||||
self.current_download_agent = None;
|
||||
|
||||
let mut download_thread_lock = lock!(self.current_download_thread);
|
||||
|
||||
@ -195,7 +192,7 @@ impl DownloadManagerBuilder {
|
||||
return;
|
||||
}
|
||||
|
||||
download_agent.on_initialised(&self.app_handle);
|
||||
download_agent.on_queued(&self.app_handle);
|
||||
self.download_queue.append(meta.clone());
|
||||
self.download_agent_registry.insert(meta, download_agent);
|
||||
|
||||
@ -212,19 +209,13 @@ impl DownloadManagerBuilder {
|
||||
return;
|
||||
}
|
||||
|
||||
if self.current_download_agent.is_some()
|
||||
&& self.download_queue.read().front().unwrap()
|
||||
== &self.current_download_agent.as_ref().unwrap().metadata()
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
debug!("current download queue: {:?}", self.download_queue.read());
|
||||
|
||||
// Should always be Some if the above two statements keep going
|
||||
let agent_data = self.download_queue.read().front().unwrap().clone();
|
||||
|
||||
info!("starting download for {agent_data:?}");
|
||||
let agent_data = if let Some(agent_data) = self.download_queue.read().front() {
|
||||
agent_data.clone()
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
let download_agent = self
|
||||
.download_agent_registry
|
||||
@ -232,8 +223,22 @@ impl DownloadManagerBuilder {
|
||||
.unwrap()
|
||||
.clone();
|
||||
|
||||
let status = download_agent.status();
|
||||
|
||||
// This download is already going
|
||||
if status != DownloadStatus::Queued {
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure all others are marked as queued
|
||||
for agent in self.download_agent_registry.values() {
|
||||
if agent.metadata() != agent_data && agent.status() != DownloadStatus::Queued {
|
||||
agent.on_queued(&self.app_handle);
|
||||
}
|
||||
}
|
||||
|
||||
info!("starting download for {agent_data:?}");
|
||||
self.active_control_flag = Some(download_agent.control_flag());
|
||||
self.current_download_agent = Some(download_agent.clone());
|
||||
|
||||
let sender = self.sender.clone();
|
||||
|
||||
@ -304,8 +309,8 @@ impl DownloadManagerBuilder {
|
||||
}
|
||||
fn manage_completed_signal(&mut self, meta: DownloadableMetadata) {
|
||||
debug!("got signal Completed");
|
||||
if let Some(interface) = &self.current_download_agent
|
||||
&& interface.metadata() == meta
|
||||
if let Some(interface) = self.download_queue.read().front()
|
||||
&& interface == &meta
|
||||
{
|
||||
self.remove_and_cleanup_front_download(&meta);
|
||||
}
|
||||
@ -315,11 +320,13 @@ impl DownloadManagerBuilder {
|
||||
}
|
||||
fn manage_error_signal(&mut self, error: ApplicationDownloadError) {
|
||||
debug!("got signal Error");
|
||||
if let Some(current_agent) = self.current_download_agent.clone() {
|
||||
if let Some(metadata) = self.download_queue.read().front()
|
||||
&& let Some(current_agent) = self.download_agent_registry.get(metadata)
|
||||
{
|
||||
current_agent.on_error(&self.app_handle, &error);
|
||||
|
||||
self.stop_and_wait_current_download();
|
||||
self.remove_and_cleanup_front_download(¤t_agent.metadata());
|
||||
self.remove_and_cleanup_front_download(metadata);
|
||||
}
|
||||
self.push_ui_queue_update();
|
||||
self.set_status(DownloadManagerStatus::Error);
|
||||
@ -327,32 +334,23 @@ impl DownloadManagerBuilder {
|
||||
fn manage_cancel_signal(&mut self, meta: &DownloadableMetadata) {
|
||||
debug!("got signal Cancel");
|
||||
|
||||
if let Some(current_download) = &self.current_download_agent {
|
||||
if ¤t_download.metadata() == meta {
|
||||
self.set_status(DownloadManagerStatus::Paused);
|
||||
current_download.on_cancelled(&self.app_handle);
|
||||
self.stop_and_wait_current_download();
|
||||
// If the current download is the one we're tryna cancel
|
||||
if let Some(current_metadata) = self.download_queue.read().front()
|
||||
&& current_metadata == meta
|
||||
&& let Some(current_download) = self.download_agent_registry.get(current_metadata)
|
||||
{
|
||||
self.set_status(DownloadManagerStatus::Paused);
|
||||
current_download.on_cancelled(&self.app_handle);
|
||||
self.stop_and_wait_current_download();
|
||||
|
||||
self.download_queue.pop_front();
|
||||
self.download_queue.pop_front();
|
||||
|
||||
self.cleanup_current_download();
|
||||
debug!("current download queue: {:?}", self.download_queue.read());
|
||||
}
|
||||
// TODO: Collapse these two into a single if statement somehow
|
||||
else if let Some(download_agent) = self.download_agent_registry.get(meta) {
|
||||
let index = self.download_queue.get_by_meta(meta);
|
||||
if let Some(index) = index {
|
||||
download_agent.on_cancelled(&self.app_handle);
|
||||
let _ = self.download_queue.edit().remove(index);
|
||||
let removed = self.download_agent_registry.remove(meta);
|
||||
debug!(
|
||||
"removed {:?} from queue {:?}",
|
||||
removed.map(|x| x.metadata()),
|
||||
self.download_queue.read()
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if let Some(download_agent) = self.download_agent_registry.get(meta) {
|
||||
self.cleanup_current_download();
|
||||
self.download_agent_registry.remove(meta);
|
||||
debug!("current download queue: {:?}", self.download_queue.read());
|
||||
}
|
||||
// else just cancel it
|
||||
else if let Some(download_agent) = self.download_agent_registry.get(meta) {
|
||||
let index = self.download_queue.get_by_meta(meta);
|
||||
if let Some(index) = index {
|
||||
download_agent.on_cancelled(&self.app_handle);
|
||||
@ -365,6 +363,7 @@ impl DownloadManagerBuilder {
|
||||
);
|
||||
}
|
||||
}
|
||||
self.sender.send(DownloadManagerSignal::Go).unwrap();
|
||||
self.push_ui_queue_update();
|
||||
}
|
||||
fn push_ui_stats_update(&self, kbs: usize, time: usize) {
|
||||
|
||||
@ -62,7 +62,7 @@ impl Serialize for DownloadManagerStatus {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone, Debug)]
|
||||
#[derive(Serialize, Clone, Debug, PartialEq)]
|
||||
pub enum DownloadStatus {
|
||||
Queued,
|
||||
Downloading,
|
||||
|
||||
@ -12,6 +12,12 @@ use super::{
|
||||
util::{download_thread_control_flag::DownloadThreadControl, progress_object::ProgressObject},
|
||||
};
|
||||
|
||||
/**
|
||||
* Downloadables are responsible for managing their specific object's download state
|
||||
* e.g, the GameDownloadAgent is responsible for pushing game updates
|
||||
*
|
||||
* But the download manager manages the queue state
|
||||
*/
|
||||
pub trait Downloadable: Send + Sync {
|
||||
fn download(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError>;
|
||||
fn validate(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError>;
|
||||
@ -20,7 +26,7 @@ pub trait Downloadable: Send + Sync {
|
||||
fn control_flag(&self) -> DownloadThreadControl;
|
||||
fn status(&self) -> DownloadStatus;
|
||||
fn metadata(&self) -> DownloadableMetadata;
|
||||
fn on_initialised(&self, app_handle: &AppHandle);
|
||||
fn on_queued(&self, app_handle: &AppHandle);
|
||||
fn on_error(&self, app_handle: &AppHandle, error: &ApplicationDownloadError);
|
||||
fn on_complete(&self, app_handle: &AppHandle);
|
||||
fn on_cancelled(&self, app_handle: &AppHandle);
|
||||
|
||||
@ -23,7 +23,7 @@ pub struct ProgressObject {
|
||||
//last_update: Arc<RwLock<Instant>>,
|
||||
last_update_time: Arc<AtomicInstant>,
|
||||
bytes_last_update: Arc<AtomicUsize>,
|
||||
rolling: RollingProgressWindow<1>,
|
||||
rolling: RollingProgressWindow<1000>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -116,7 +116,7 @@ pub fn calculate_update(progress: &ProgressObject) {
|
||||
let last_update_time = progress
|
||||
.last_update_time
|
||||
.swap(Instant::now(), Ordering::SeqCst);
|
||||
let time_since_last_update = Instant::now().duration_since(last_update_time).as_millis();
|
||||
let time_since_last_update = Instant::now().duration_since(last_update_time).as_millis_f64();
|
||||
|
||||
let current_bytes_downloaded = progress.sum();
|
||||
let max = progress.get_max();
|
||||
@ -124,17 +124,17 @@ pub fn calculate_update(progress: &ProgressObject) {
|
||||
.bytes_last_update
|
||||
.swap(current_bytes_downloaded, Ordering::Acquire);
|
||||
|
||||
let bytes_since_last_update = current_bytes_downloaded.saturating_sub(bytes_at_last_update);
|
||||
let bytes_since_last_update = current_bytes_downloaded.saturating_sub(bytes_at_last_update) as f64;
|
||||
|
||||
let kilobytes_per_second = bytes_since_last_update / (time_since_last_update as usize).max(1);
|
||||
let kilobytes_per_second = bytes_since_last_update / time_since_last_update;
|
||||
|
||||
let bytes_remaining = max.saturating_sub(current_bytes_downloaded); // bytes
|
||||
|
||||
progress.update_window(kilobytes_per_second);
|
||||
progress.update_window(kilobytes_per_second as usize);
|
||||
push_update(progress, bytes_remaining);
|
||||
}
|
||||
|
||||
#[throttle(1, Duration::from_millis(500))]
|
||||
#[throttle(1, Duration::from_millis(250))]
|
||||
pub fn push_update(progress: &ProgressObject, bytes_remaining: usize) {
|
||||
let average_speed = progress.rolling.get_average();
|
||||
let time_remaining = (bytes_remaining / 1000) / average_speed.max(1);
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
use std::sync::{
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
Arc,
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -22,17 +22,22 @@ impl<const S: usize> RollingProgressWindow<S> {
|
||||
}
|
||||
pub fn get_average(&self) -> usize {
|
||||
let current = self.current.load(Ordering::SeqCst);
|
||||
self.window
|
||||
let valid = self
|
||||
.window
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(i, _)| i < ¤t)
|
||||
.map(|(_, x)| x.load(Ordering::Acquire))
|
||||
.sum::<usize>()
|
||||
/ S
|
||||
.collect::<Vec<usize>>();
|
||||
let amount = valid.len();
|
||||
let sum = valid.into_iter().sum::<usize>();
|
||||
|
||||
sum / amount
|
||||
}
|
||||
pub fn reset(&self) {
|
||||
self.window
|
||||
.iter()
|
||||
.for_each(|x| x.store(0, Ordering::Release));
|
||||
self.current.store(0, Ordering::Release);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,10 +1,11 @@
|
||||
use bitcode::{Decode, Encode};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::games::library::Game;
|
||||
|
||||
pub type Collections = Vec<Collection>;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Default, Encode, Decode)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Collection {
|
||||
id: String,
|
||||
@ -14,7 +15,7 @@ pub struct Collection {
|
||||
entries: Vec<CollectionObject>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Default, Encode, Decode)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CollectionObject {
|
||||
collection_id: String,
|
||||
|
||||
@ -4,6 +4,7 @@ use crate::{
|
||||
error::remote_access_error::RemoteAccessError,
|
||||
remote::{
|
||||
auth::generate_authorization_header,
|
||||
cache::{cache_object, get_cached_object},
|
||||
requests::{generate_url, make_authenticated_get},
|
||||
utils::DROP_CLIENT_ASYNC,
|
||||
},
|
||||
@ -12,11 +13,23 @@ use crate::{
|
||||
use super::collection::{Collection, Collections};
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn fetch_collections() -> Result<Collections, RemoteAccessError> {
|
||||
pub async fn fetch_collections(
|
||||
hard_refresh: Option<bool>,
|
||||
) -> Result<Collections, RemoteAccessError> {
|
||||
let do_hard_refresh = hard_refresh.unwrap_or(false);
|
||||
if !do_hard_refresh && let Ok(cached_response) = get_cached_object::<Collections>("collections")
|
||||
{
|
||||
return Ok(cached_response);
|
||||
}
|
||||
|
||||
let response =
|
||||
make_authenticated_get(generate_url(&["/api/v1/client/collection"], &[])?).await?;
|
||||
|
||||
Ok(response.json().await?)
|
||||
let collections: Collections = response.json().await?;
|
||||
|
||||
cache_object("collections", &collections)?;
|
||||
|
||||
Ok(collections)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
@ -90,7 +103,8 @@ pub async fn delete_game_in_collection(
|
||||
.delete(url)
|
||||
.header("Authorization", generate_authorization_header())
|
||||
.json(&json!({"id": game_id}))
|
||||
.send().await?;
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -27,12 +27,14 @@ use super::{
|
||||
#[tauri::command]
|
||||
pub async fn fetch_library(
|
||||
state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
hard_refresh: Option<bool>,
|
||||
) -> Result<Vec<Game>, RemoteAccessError> {
|
||||
offline!(
|
||||
state,
|
||||
fetch_library_logic,
|
||||
fetch_library_logic_offline,
|
||||
state
|
||||
state,
|
||||
hard_refresh
|
||||
).await
|
||||
}
|
||||
|
||||
|
||||
@ -42,6 +42,7 @@ use super::drop_data::DropData;
|
||||
static RETRY_COUNT: usize = 3;
|
||||
|
||||
const TARGET_BUCKET_SIZE: usize = 63 * 1000 * 1000;
|
||||
const MAX_FILES_PER_BUCKET: usize = (1024 / 4) - 1;
|
||||
|
||||
pub struct GameDownloadAgent {
|
||||
pub id: String,
|
||||
@ -86,6 +87,8 @@ impl GameDownloadAgent {
|
||||
let stored_manifest =
|
||||
DropData::generate(id.clone(), version.clone(), data_base_dir_path.clone());
|
||||
|
||||
let context_lock = stored_manifest.contexts.lock().unwrap().clone();
|
||||
|
||||
let result = Self {
|
||||
id,
|
||||
version,
|
||||
@ -106,7 +109,14 @@ impl GameDownloadAgent {
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.values()
|
||||
.map(|e| e.lengths.iter().sum::<usize>())
|
||||
.map(|e| {
|
||||
e.lengths
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(i, _)| *context_lock.get(&e.checksums[*i]).unwrap_or(&false))
|
||||
.map(|(_, v)| v)
|
||||
.sum::<usize>()
|
||||
})
|
||||
.sum::<usize>() as u64;
|
||||
|
||||
let available_space = get_disk_available(data_base_dir_path)? as u64;
|
||||
@ -308,7 +318,8 @@ impl GameDownloadAgent {
|
||||
drops: vec![],
|
||||
});
|
||||
|
||||
if *current_bucket_size + length >= TARGET_BUCKET_SIZE
|
||||
if (*current_bucket_size + length >= TARGET_BUCKET_SIZE
|
||||
|| current_bucket.drops.len() >= MAX_FILES_PER_BUCKET)
|
||||
&& !current_bucket.drops.is_empty()
|
||||
{
|
||||
// Move current bucket into list and make a new one
|
||||
@ -486,6 +497,7 @@ impl GameDownloadAgent {
|
||||
ApplicationDownloadError::Communication(_)
|
||||
| ApplicationDownloadError::Checksum
|
||||
| ApplicationDownloadError::Lock
|
||||
| ApplicationDownloadError::IoError(_)
|
||||
);
|
||||
|
||||
if i == RETRY_COUNT - 1 || !retry {
|
||||
@ -654,15 +666,24 @@ impl Downloadable for GameDownloadAgent {
|
||||
}
|
||||
}
|
||||
|
||||
fn on_initialised(&self, _app_handle: &tauri::AppHandle) {
|
||||
*lock!(self.status) = DownloadStatus::Queued;
|
||||
fn on_queued(&self, app_handle: &tauri::AppHandle) {
|
||||
*self.status.lock().unwrap() = DownloadStatus::Queued;
|
||||
let mut db_lock = borrow_db_mut_checked();
|
||||
let status = ApplicationTransientStatus::Queued {
|
||||
version_name: self.version.clone(),
|
||||
};
|
||||
db_lock
|
||||
.applications
|
||||
.transient_statuses
|
||||
.insert(self.metadata(), status.clone());
|
||||
push_game_update(app_handle, &self.id, None, (None, Some(status)));
|
||||
}
|
||||
|
||||
fn on_error(&self, app_handle: &tauri::AppHandle, error: &ApplicationDownloadError) {
|
||||
*lock!(self.status) = DownloadStatus::Error;
|
||||
app_emit!(app_handle, "download_error", error.to_string());
|
||||
|
||||
error!("error while managing download: {error}");
|
||||
error!("error while managing download: {error:?}");
|
||||
|
||||
let mut handle = borrow_db_mut_checked();
|
||||
handle
|
||||
@ -693,15 +714,8 @@ impl Downloadable for GameDownloadAgent {
|
||||
}
|
||||
|
||||
fn on_cancelled(&self, app_handle: &tauri::AppHandle) {
|
||||
info!("cancelled {}", self.id);
|
||||
self.cancel(app_handle);
|
||||
/*
|
||||
on_game_incomplete(
|
||||
&self.metadata(),
|
||||
self.dropdata.base_path.to_string_lossy().to_string(),
|
||||
app_handle,
|
||||
)
|
||||
.unwrap();
|
||||
*/
|
||||
}
|
||||
|
||||
fn status(&self) -> DownloadStatus {
|
||||
|
||||
@ -9,7 +9,7 @@ use crate::games::downloads::manifest::{ChunkBody, DownloadBucket, DownloadConte
|
||||
use crate::remote::auth::generate_authorization_header;
|
||||
use crate::remote::requests::generate_url;
|
||||
use crate::remote::utils::DROP_CLIENT_SYNC;
|
||||
use log::{info, warn};
|
||||
use log::{debug, info, warn};
|
||||
use md5::{Context, Digest};
|
||||
use reqwest::blocking::Response;
|
||||
|
||||
@ -18,6 +18,7 @@ use std::io::Read;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
use std::{
|
||||
fs::{File, OpenOptions},
|
||||
io::{self, BufWriter, Seek, SeekFrom, Write},
|
||||
@ -25,6 +26,7 @@ use std::{
|
||||
};
|
||||
|
||||
static MAX_PACKET_LENGTH: usize = 4096 * 4;
|
||||
static BUMP_SIZE: usize = 4096 * 16;
|
||||
|
||||
pub struct DropWriter<W: Write> {
|
||||
hasher: Context,
|
||||
@ -79,6 +81,8 @@ pub struct DropDownloadPipeline<'a, R: Read, W: Write> {
|
||||
pub drops: Vec<DownloadDrop>,
|
||||
pub destination: Vec<DropWriter<W>>,
|
||||
pub control_flag: &'a DownloadThreadControl,
|
||||
#[allow(dead_code)]
|
||||
progress: ProgressHandle,
|
||||
}
|
||||
|
||||
impl<'a> DropDownloadPipeline<'a, Response, File> {
|
||||
@ -96,6 +100,7 @@ impl<'a> DropDownloadPipeline<'a, Response, File> {
|
||||
.try_collect()?,
|
||||
drops,
|
||||
control_flag,
|
||||
progress,
|
||||
})
|
||||
}
|
||||
|
||||
@ -110,13 +115,24 @@ impl<'a> DropDownloadPipeline<'a, Response, File> {
|
||||
if drop.start != 0 {
|
||||
destination.seek(SeekFrom::Start(drop.start as u64))?;
|
||||
}
|
||||
let mut last_bump = 0;
|
||||
loop {
|
||||
let size = MAX_PACKET_LENGTH.min(remaining);
|
||||
self.source.read_exact(&mut copy_buffer[0..size])?;
|
||||
let size = self.source.read(&mut copy_buffer[0..size]).inspect_err(|_| {
|
||||
info!("got error from {}", drop.filename);
|
||||
})?;
|
||||
remaining -= size;
|
||||
last_bump += size;
|
||||
|
||||
destination.write_all(©_buffer[0..size])?;
|
||||
|
||||
if last_bump > BUMP_SIZE {
|
||||
last_bump -= BUMP_SIZE;
|
||||
if self.control_flag.get() == DownloadThreadControlFlag::Stop {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
|
||||
if remaining == 0 {
|
||||
break;
|
||||
};
|
||||
@ -130,6 +146,13 @@ impl<'a> DropDownloadPipeline<'a, Response, File> {
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn debug_skip_checksum(self) {
|
||||
self.destination
|
||||
.into_iter()
|
||||
.for_each(|mut e| e.flush().unwrap());
|
||||
}
|
||||
|
||||
fn finish(self) -> Result<Vec<Digest>, io::Error> {
|
||||
let checksums = self
|
||||
.destination
|
||||
@ -152,6 +175,8 @@ pub fn download_game_bucket(
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let start = Instant::now();
|
||||
|
||||
let header = generate_authorization_header();
|
||||
|
||||
let url = generate_url(&["/api/v2/client/chunk"], &[])
|
||||
@ -225,6 +250,10 @@ pub fn download_game_bucket(
|
||||
}
|
||||
}
|
||||
|
||||
let timestep = start.elapsed().as_millis();
|
||||
|
||||
debug!("took {}ms to start downloading", timestep);
|
||||
|
||||
let mut pipeline =
|
||||
DropDownloadPipeline::new(response, bucket.drops.clone(), control_flag, progress)
|
||||
.map_err(|e| ApplicationDownloadError::IoError(Arc::new(e)))?;
|
||||
|
||||
@ -82,7 +82,13 @@ pub struct StatsUpdateEvent {
|
||||
|
||||
pub async fn fetch_library_logic(
|
||||
state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
hard_fresh: Option<bool>,
|
||||
) -> Result<Vec<Game>, RemoteAccessError> {
|
||||
let do_hard_refresh = hard_fresh.unwrap_or(false);
|
||||
if !do_hard_refresh && let Ok(library) = get_cached_object("library") {
|
||||
return Ok(library);
|
||||
}
|
||||
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let response = generate_url(&["/api/v1/client/user/library"], &[])?;
|
||||
let response = client
|
||||
@ -144,6 +150,7 @@ pub async fn fetch_library_logic(
|
||||
}
|
||||
pub async fn fetch_library_logic_offline(
|
||||
_state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
_hard_refresh: Option<bool>,
|
||||
) -> Result<Vec<Game>, RemoteAccessError> {
|
||||
let mut games: Vec<Game> = get_cached_object("library")?;
|
||||
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
use crate::database::models::data::{ApplicationTransientStatus, Database, GameDownloadStatus};
|
||||
use crate::database::models::data::{
|
||||
ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus,
|
||||
};
|
||||
|
||||
pub type GameStatusWithTransient = (
|
||||
Option<GameDownloadStatus>,
|
||||
@ -8,10 +10,16 @@ pub struct GameStatusManager {}
|
||||
|
||||
impl GameStatusManager {
|
||||
pub fn fetch_state(game_id: &String, database: &Database) -> GameStatusWithTransient {
|
||||
let online_state = match database.applications.installed_game_version.get(game_id) {
|
||||
Some(meta) => database.applications.transient_statuses.get(meta).cloned(),
|
||||
None => None,
|
||||
};
|
||||
let online_state = database
|
||||
.applications
|
||||
.transient_statuses
|
||||
.get(&DownloadableMetadata {
|
||||
id: game_id.to_string(),
|
||||
download_type: DownloadType::Game,
|
||||
version: None,
|
||||
})
|
||||
.cloned();
|
||||
|
||||
let offline_state = database.applications.game_statuses.get(game_id).cloned();
|
||||
|
||||
if online_state.is_some() {
|
||||
|
||||
@ -9,17 +9,10 @@ use tauri::{AppHandle, Emitter, Manager};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
AppState, AppStatus, User, app_emit,
|
||||
database::{
|
||||
app_emit, database::{
|
||||
db::{borrow_db_checked, borrow_db_mut_checked},
|
||||
models::data::DatabaseAuth,
|
||||
},
|
||||
error::{drop_server_error::DropServerError, remote_access_error::RemoteAccessError},
|
||||
lock,
|
||||
remote::{
|
||||
requests::make_authenticated_get,
|
||||
utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC},
|
||||
},
|
||||
}, error::{drop_server_error::DropServerError, remote_access_error::RemoteAccessError}, lock, remote::{cache::clear_cached_object, requests::make_authenticated_get, utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC}}, AppState, AppStatus, User
|
||||
};
|
||||
|
||||
use super::{
|
||||
@ -168,6 +161,9 @@ pub async fn recieve_handshake(app: AppHandle, path: String) {
|
||||
state_lock.status = app_status;
|
||||
state_lock.user = user;
|
||||
|
||||
let _ = clear_cached_object("collections");
|
||||
let _ = clear_cached_object("library");
|
||||
|
||||
drop(state_lock);
|
||||
|
||||
app_emit!(app, "auth/finished", ());
|
||||
|
||||
@ -50,6 +50,12 @@ fn read_sync(base: &Path, key: &str) -> io::Result<Vec<u8>> {
|
||||
Ok(file)
|
||||
}
|
||||
|
||||
fn delete_sync(base: &Path, key: &str) -> io::Result<()> {
|
||||
let cache_path = get_cache_path(base, key);
|
||||
std::fs::remove_file(cache_path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cache_object<D: Encode>(key: &str, data: &D) -> Result<(), RemoteAccessError> {
|
||||
cache_object_db(key, data, &borrow_db_checked())
|
||||
}
|
||||
@ -73,6 +79,17 @@ pub fn get_cached_object_db<D: DecodeOwned>(
|
||||
bitcode::decode::<D>(&bytes).map_err(|e| RemoteAccessError::Cache(io::Error::other(e)))?;
|
||||
Ok(data)
|
||||
}
|
||||
pub fn clear_cached_object(key: &str) -> Result<(), RemoteAccessError> {
|
||||
clear_cached_object_db(key, &borrow_db_checked())
|
||||
}
|
||||
pub fn clear_cached_object_db(
|
||||
key: &str,
|
||||
db: &Database,
|
||||
) -> Result<(), RemoteAccessError> {
|
||||
delete_sync(&db.cache_dir, key).map_err(RemoteAccessError::Cache)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Encode, Decode)]
|
||||
pub struct ObjectCache {
|
||||
content_type: String,
|
||||
|
||||
Reference in New Issue
Block a user