mirror of
https://github.com/Drop-OSS/drop-app.git
synced 2025-11-13 00:02:41 +10:00
Compare commits
3 Commits
e8b9ec020d
...
importexpo
| Author | SHA1 | Date | |
|---|---|---|---|
| 43b56462d6 | |||
| ab219670dc | |||
| c1beef380e |
24
README.md
24
README.md
@ -1,21 +1,29 @@
|
||||
# Drop Desktop Client
|
||||
# Drop App
|
||||
|
||||
The Drop Desktop Client is the companion app for [Drop](https://github.com/Drop-OSS/drop). It is the official & intended way to download and play games on your Drop server.
|
||||
Drop app is the companion app for [Drop](https://github.com/Drop-OSS/drop). It uses a Tauri base with Nuxt 3 + TailwindCSS on top of it, so we can re-use components from the web UI.
|
||||
|
||||
## Internals
|
||||
## Running
|
||||
Before setting up the drop app, be sure that you have a server set up.
|
||||
The instructions for this can be found on the [Drop Docs](https://docs.droposs.org/docs/guides/quickstart)
|
||||
|
||||
It uses a Tauri base with Nuxt 3 + TailwindCSS on top of it, so we can re-use components from the web UI.
|
||||
## Current features
|
||||
Currently supported are the following features:
|
||||
- Signin (with custom server)
|
||||
- Database registering & recovery
|
||||
- Dynamic library fetching from server
|
||||
- Installing & uninstalling games
|
||||
- Download progress monitoring
|
||||
- Launching / playing games
|
||||
|
||||
## Development
|
||||
Before setting up a development environemnt, be sure that you have a server set up. The instructions for this can be found on the [Drop Docs](https://docs.droposs.org/docs/guides/quickstart).
|
||||
|
||||
Then, install dependencies with `yarn`. This'll install the custom builder's dependencies. Then, check everything works properly with `yarn tauri build`.
|
||||
Install dependencies with `yarn`
|
||||
|
||||
Run the app in development with `yarn tauri dev`. NVIDIA users on Linux, use shell script `./nvidia-prop-dev.sh`
|
||||
Run the app in development with `yarn tauri dev`. NVIDIA users on Linux, use shell script `./nvidia-prop-dev.sh`
|
||||
|
||||
To manually specify the logging level, add the environment variable `RUST_LOG=[debug, info, warn, error]` to `yarn tauri dev`:
|
||||
|
||||
e.g. `RUST_LOG=debug yarn tauri dev`
|
||||
|
||||
## Contributing
|
||||
Check out the contributing guide on our Developer Docs: [Drop Developer Docs - Contributing](https://developer.droposs.org/contributing).
|
||||
Check the original [Drop repo](https://github.com/Drop-OSS/drop/blob/main/CONTRIBUTING.md) for contributing guidelines.
|
||||
@ -14,8 +14,7 @@
|
||||
"@tauri-apps/plugin-os": "^2.3.0",
|
||||
"@tauri-apps/plugin-shell": "^2.3.0",
|
||||
"pino": "^9.7.0",
|
||||
"pino-pretty": "^13.1.1",
|
||||
"tauri": "^0.15.0"
|
||||
"pino-pretty": "^13.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tauri-apps/cli": "^2.7.1"
|
||||
|
||||
1462
src-tauri/Cargo.lock
generated
1462
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,129 +1,101 @@
|
||||
[package]
|
||||
name = "drop-app"
|
||||
version = "0.3.3"
|
||||
description = "The client application for the open-source, self-hosted game distribution platform Drop"
|
||||
authors = ["Drop OSS"]
|
||||
# authors = ["Drop OSS"]
|
||||
edition = "2024"
|
||||
description = "The client application for the open-source, self-hosted game distribution platform Drop"
|
||||
|
||||
[workspace]
|
||||
resolver = "3"
|
||||
members = ["drop-consts",
|
||||
"drop-database",
|
||||
"drop-downloads",
|
||||
"drop-errors", "drop-library",
|
||||
"drop-native-library",
|
||||
"drop-process",
|
||||
"drop-remote",
|
||||
]
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
|
||||
tauri-plugin-single-instance = { version = "2.0.0", features = ["deep-link"] }
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib", "staticlib"]
|
||||
# The `_lib` suffix may seem redundant but it is necessary
|
||||
# to make the lib name unique and wouldn't conflict with the bin name.
|
||||
# This seems to be only an issue on Windows, see https://github.com/rust-lang/cargo/issues/8519
|
||||
name = "drop_app_lib"
|
||||
crate-type = ["staticlib", "cdylib", "rlib"]
|
||||
rustflags = ["-C", "target-feature=+aes,+sse2"]
|
||||
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2.0.0", features = [] }
|
||||
# rustflags = ["-C", "target-feature=+aes,+sse2"]
|
||||
|
||||
[dependencies]
|
||||
tauri-plugin-shell = "2.2.1"
|
||||
serde_json = "1"
|
||||
rayon = "1.10.0"
|
||||
webbrowser = "1.0.2"
|
||||
url = "2.5.2"
|
||||
tauri-plugin-deep-link = "2"
|
||||
log = "0.4.22"
|
||||
hex = "0.4.3"
|
||||
tauri-plugin-dialog = "2"
|
||||
http = "1.1.0"
|
||||
urlencoding = "2.1.3"
|
||||
md5 = "0.7.0"
|
||||
chrono = "0.4.38"
|
||||
tauri-plugin-os = "2"
|
||||
boxcar = "0.2.7"
|
||||
umu-wrapper-lib = "0.1.0"
|
||||
tauri-plugin-autostart = "2.0.0"
|
||||
shared_child = "1.0.1"
|
||||
serde_with = "3.12.0"
|
||||
slice-deque = "0.3.0"
|
||||
throttle_my_fn = "0.2.6"
|
||||
parking_lot = "0.12.3"
|
||||
atomic-instant-full = "0.1.0"
|
||||
cacache = "13.1.0"
|
||||
http-serde = "2.1.1"
|
||||
reqwest-middleware = "0.4.0"
|
||||
reqwest-middleware-cache = "0.1.1"
|
||||
deranged = "=0.4.0"
|
||||
droplet-rs = "0.7.3"
|
||||
gethostname = "1.0.1"
|
||||
zstd = "0.13.3"
|
||||
tar = "0.4.44"
|
||||
rand = "0.9.1"
|
||||
regex = "1.11.1"
|
||||
tempfile = "3.19.1"
|
||||
schemars = "0.8.22"
|
||||
sha1 = "0.10.6"
|
||||
dirs = "6.0.0"
|
||||
whoami = "1.6.0"
|
||||
filetime = "0.2.25"
|
||||
walkdir = "2.5.0"
|
||||
known-folders = "1.2.0"
|
||||
native_model = { version = "0.6.4", features = ["rmp_serde_1_3"], git = "https://github.com/Drop-OSS/native_model.git"}
|
||||
tauri-plugin-opener = "2.4.0"
|
||||
bitcode = "0.6.6"
|
||||
reqwest-websocket = "0.5.0"
|
||||
drop-database = { path = "./drop-database" }
|
||||
drop-downloads = { path = "./drop-downloads" }
|
||||
drop-errors = { path = "./drop-errors" }
|
||||
drop-native-library = { path = "./drop-native-library" }
|
||||
drop-process = { path = "./drop-process" }
|
||||
drop-remote = { path = "./drop-remote" }
|
||||
futures-lite = "2.6.0"
|
||||
page_size = "0.6.0"
|
||||
sysinfo = "0.36.1"
|
||||
humansize = "2.1.3"
|
||||
tokio-util = { version = "0.7.16", features = ["io"] }
|
||||
futures-core = "0.3.31"
|
||||
bytes = "1.10.1"
|
||||
# tailscale = { path = "./tailscale" }
|
||||
|
||||
[dependencies.dynfmt]
|
||||
version = "0.1.5"
|
||||
features = ["curly"]
|
||||
|
||||
[dependencies.tauri]
|
||||
version = "2.7.0"
|
||||
features = ["protocol-asset", "tray-icon"]
|
||||
|
||||
[dependencies.tokio]
|
||||
version = "1.40.0"
|
||||
features = ["rt", "tokio-macros", "signal"]
|
||||
hex = "0.4.3"
|
||||
http = "1.1.0"
|
||||
known-folders = "1.2.0"
|
||||
log = "0.4.22"
|
||||
md5 = "0.7.0"
|
||||
rayon = "1.10.0"
|
||||
regex = "1.11.1"
|
||||
reqwest-websocket = "0.5.0"
|
||||
serde_json = "1"
|
||||
tar = "0.4.44"
|
||||
tauri = { version = "2.7.0", features = ["protocol-asset", "tray-icon"] }
|
||||
tauri-plugin-autostart = "2.0.0"
|
||||
tauri-plugin-deep-link = "2"
|
||||
tauri-plugin-dialog = "2"
|
||||
tauri-plugin-opener = "2.4.0"
|
||||
tauri-plugin-os = "2"
|
||||
tauri-plugin-shell = "2.2.1"
|
||||
tempfile = "3.19.1"
|
||||
url = "2.5.2"
|
||||
webbrowser = "1.0.2"
|
||||
whoami = "1.6.0"
|
||||
zstd = "0.13.3"
|
||||
|
||||
[dependencies.log4rs]
|
||||
version = "1.3.0"
|
||||
features = ["console_appender", "file_appender"]
|
||||
|
||||
[dependencies.rustix]
|
||||
version = "0.38.37"
|
||||
features = ["fs"]
|
||||
|
||||
[dependencies.uuid]
|
||||
version = "1.10.0"
|
||||
features = ["v4", "fast-rng", "macro-diagnostics"]
|
||||
|
||||
[dependencies.rustbreak]
|
||||
version = "2"
|
||||
features = ["other_errors"] # You can also use "yaml_enc" or "bin_enc"
|
||||
|
||||
[dependencies.reqwest]
|
||||
version = "0.12.22"
|
||||
default-features = false
|
||||
features = [
|
||||
"json",
|
||||
"http2",
|
||||
"blocking",
|
||||
"rustls-tls",
|
||||
"native-tls-alpn",
|
||||
"rustls-tls-native-roots",
|
||||
"stream",
|
||||
"blocking",
|
||||
"http2",
|
||||
"json",
|
||||
"native-tls-alpn",
|
||||
"rustls-tls",
|
||||
"rustls-tls-native-roots",
|
||||
"stream",
|
||||
]
|
||||
|
||||
[dependencies.rustix]
|
||||
version = "0.38.37"
|
||||
features = ["fs"]
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1"
|
||||
features = ["derive", "rc"]
|
||||
|
||||
[dependencies.uuid]
|
||||
version = "1.10.0"
|
||||
features = ["fast-rng", "macro-diagnostics", "v4"]
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2.0.0", features = [] }
|
||||
|
||||
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
|
||||
tauri-plugin-single-instance = { version = "2.0.0", features = ["deep-link"] }
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
panic = "abort"
|
||||
codegen-units = 1
|
||||
panic = 'abort'
|
||||
|
||||
|
||||
7
src-tauri/drop-consts/Cargo.toml
Normal file
7
src-tauri/drop-consts/Cargo.toml
Normal file
@ -0,0 +1,7 @@
|
||||
[package]
|
||||
name = "drop-consts"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
dirs = "6.0.0"
|
||||
15
src-tauri/drop-consts/src/lib.rs
Normal file
15
src-tauri/drop-consts/src/lib.rs
Normal file
@ -0,0 +1,15 @@
|
||||
use std::{
|
||||
path::PathBuf,
|
||||
sync::{Arc, LazyLock},
|
||||
};
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
static DATA_ROOT_PREFIX: &'static str = "drop";
|
||||
#[cfg(debug_assertions)]
|
||||
static DATA_ROOT_PREFIX: &str = "drop-debug";
|
||||
|
||||
pub static DATA_ROOT_DIR: LazyLock<&'static PathBuf> =
|
||||
LazyLock::new(|| Box::leak(Box::new(dirs::data_dir().unwrap().join(DATA_ROOT_PREFIX))));
|
||||
|
||||
pub static CACHE_DIR: LazyLock<&'static PathBuf> =
|
||||
LazyLock::new(|| Box::leak(Box::new(DATA_ROOT_DIR.join("cache"))));
|
||||
21
src-tauri/drop-database/Cargo.toml
Normal file
21
src-tauri/drop-database/Cargo.toml
Normal file
@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "drop-database"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
bitcode = "0.6.7"
|
||||
chrono = "0.4.42"
|
||||
drop-consts = { path = "../drop-consts" }
|
||||
drop-library = { path = "../drop-library" }
|
||||
drop-native-library = { path = "../drop-native-library" }
|
||||
log = "0.4.28"
|
||||
native_model = { git = "https://github.com/Drop-OSS/native_model.git", version = "0.6.4", features = [
|
||||
"rmp_serde_1_3",
|
||||
] }
|
||||
rustbreak = "2.0.0"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_with = "3.14.0"
|
||||
url = "2.5.7"
|
||||
whoami = "1.6.1"
|
||||
|
||||
140
src-tauri/drop-database/src/db.rs
Normal file
140
src-tauri/drop-database/src/db.rs
Normal file
@ -0,0 +1,140 @@
|
||||
use std::{
|
||||
fs::{self, create_dir_all},
|
||||
mem::ManuallyDrop,
|
||||
ops::{Deref, DerefMut},
|
||||
path::PathBuf,
|
||||
sync::{Arc, LazyLock, RwLockReadGuard, RwLockWriteGuard},
|
||||
};
|
||||
|
||||
use chrono::Utc;
|
||||
use drop_consts::DATA_ROOT_DIR;
|
||||
use log::{debug, error, info, warn};
|
||||
use rustbreak::{DeSerError, DeSerializer, PathDatabase, RustbreakError};
|
||||
use serde::{Serialize, de::DeserializeOwned};
|
||||
|
||||
use crate::DB;
|
||||
|
||||
use super::models::data::Database;
|
||||
|
||||
// Custom JSON serializer to support everything we need
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct DropDatabaseSerializer;
|
||||
|
||||
impl<T: native_model::Model + Serialize + DeserializeOwned> DeSerializer<T>
|
||||
for DropDatabaseSerializer
|
||||
{
|
||||
fn serialize(&self, val: &T) -> rustbreak::error::DeSerResult<Vec<u8>> {
|
||||
native_model::encode(val).map_err(|e| DeSerError::Internal(e.to_string()))
|
||||
}
|
||||
|
||||
fn deserialize<R: std::io::Read>(&self, mut s: R) -> rustbreak::error::DeSerResult<T> {
|
||||
let mut buf = Vec::new();
|
||||
s.read_to_end(&mut buf)
|
||||
.map_err(|e| rustbreak::error::DeSerError::Internal(e.to_string()))?;
|
||||
let (val, _version) =
|
||||
native_model::decode(buf).map_err(|e| DeSerError::Internal(e.to_string()))?;
|
||||
Ok(val)
|
||||
}
|
||||
}
|
||||
|
||||
pub type DatabaseInterface =
|
||||
rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer>;
|
||||
|
||||
pub trait DatabaseImpls {
|
||||
fn set_up_database() -> DatabaseInterface;
|
||||
}
|
||||
impl DatabaseImpls for DatabaseInterface {
|
||||
fn set_up_database() -> DatabaseInterface {
|
||||
let db_path = DATA_ROOT_DIR.join("drop.db");
|
||||
let games_base_dir = DATA_ROOT_DIR.join("games");
|
||||
let logs_root_dir = DATA_ROOT_DIR.join("logs");
|
||||
let cache_dir = DATA_ROOT_DIR.join("cache");
|
||||
let pfx_dir = DATA_ROOT_DIR.join("pfx");
|
||||
|
||||
debug!("creating data directory at {DATA_ROOT_DIR:?}");
|
||||
create_dir_all(DATA_ROOT_DIR.as_path()).unwrap();
|
||||
create_dir_all(&games_base_dir).unwrap();
|
||||
create_dir_all(&logs_root_dir).unwrap();
|
||||
create_dir_all(&cache_dir).unwrap();
|
||||
create_dir_all(&pfx_dir).unwrap();
|
||||
|
||||
let exists = fs::exists(db_path.clone()).unwrap();
|
||||
|
||||
if exists {
|
||||
match PathDatabase::load_from_path(db_path.clone()) {
|
||||
Ok(db) => db,
|
||||
Err(e) => handle_invalid_database(e, db_path, games_base_dir, cache_dir),
|
||||
}
|
||||
} else {
|
||||
let default = Database::new(games_base_dir, None);
|
||||
debug!(
|
||||
"Creating database at path {}",
|
||||
db_path.as_os_str().to_str().unwrap()
|
||||
);
|
||||
PathDatabase::create_at_path(db_path, default).expect("Database could not be created")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Make the error relelvant rather than just assume that it's a Deserialize error
|
||||
fn handle_invalid_database(
|
||||
_e: RustbreakError,
|
||||
db_path: PathBuf,
|
||||
games_base_dir: PathBuf,
|
||||
cache_dir: PathBuf,
|
||||
) -> rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer> {
|
||||
warn!("{_e}");
|
||||
let new_path = {
|
||||
let time = Utc::now().timestamp();
|
||||
let mut base = db_path.clone();
|
||||
base.set_file_name(format!("drop.db.backup-{time}"));
|
||||
base
|
||||
};
|
||||
info!("old database stored at: {}", new_path.to_string_lossy());
|
||||
fs::rename(&db_path, &new_path).unwrap();
|
||||
|
||||
let db = Database::new(
|
||||
games_base_dir.into_os_string().into_string().unwrap(),
|
||||
Some(new_path),
|
||||
);
|
||||
|
||||
PathDatabase::create_at_path(db_path, db).expect("Database could not be created")
|
||||
}
|
||||
|
||||
// To automatically save the database upon drop
|
||||
pub struct DBRead<'a>(pub(crate) RwLockReadGuard<'a, Database>);
|
||||
pub struct DBWrite<'a>(pub(crate) ManuallyDrop<RwLockWriteGuard<'a, Database>>);
|
||||
impl<'a> Deref for DBWrite<'a> {
|
||||
type Target = Database;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
impl<'a> DerefMut for DBWrite<'a> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
impl<'a> Deref for DBRead<'a> {
|
||||
type Target = Database;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
impl Drop for DBWrite<'_> {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
ManuallyDrop::drop(&mut self.0);
|
||||
}
|
||||
|
||||
match DB.save() {
|
||||
Ok(()) => {}
|
||||
Err(e) => {
|
||||
error!("database failed to save with error {e}");
|
||||
panic!("database failed to save with error {e}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -5,13 +5,11 @@ use std::{
|
||||
use log::error;
|
||||
use native_model::{Decode, Encode};
|
||||
|
||||
use crate::lock;
|
||||
|
||||
pub type DropData = v1::DropData;
|
||||
|
||||
pub static DROP_DATA_PATH: &str = ".dropdata";
|
||||
|
||||
pub mod v1 {
|
||||
mod v1 {
|
||||
use std::{collections::HashMap, path::PathBuf, sync::Mutex};
|
||||
|
||||
use native_model::native_model;
|
||||
@ -51,12 +49,7 @@ impl DropData {
|
||||
let mut s = Vec::new();
|
||||
file.read_to_end(&mut s)?;
|
||||
|
||||
native_model::rmp_serde_1_3::RmpSerde::decode(s).map_err(|e| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Failed to decode drop data: {e}"),
|
||||
)
|
||||
})
|
||||
Ok(native_model::rmp_serde_1_3::RmpSerde::decode(s).unwrap())
|
||||
}
|
||||
pub fn write(&self) {
|
||||
let manifest_raw = match native_model::rmp_serde_1_3::RmpSerde::encode(&self) {
|
||||
@ -78,12 +71,12 @@ impl DropData {
|
||||
}
|
||||
}
|
||||
pub fn set_contexts(&self, completed_contexts: &[(String, bool)]) {
|
||||
*lock!(self.contexts) = completed_contexts.iter().map(|s| (s.0.clone(), s.1)).collect();
|
||||
*self.contexts.lock().unwrap() = completed_contexts.iter().map(|s| (s.0.clone(), s.1)).collect();
|
||||
}
|
||||
pub fn set_context(&self, context: String, state: bool) {
|
||||
lock!(self.contexts).entry(context).insert_entry(state);
|
||||
self.contexts.lock().unwrap().entry(context).insert_entry(state);
|
||||
}
|
||||
pub fn get_contexts(&self) -> HashMap<String, bool> {
|
||||
lock!(self.contexts).clone()
|
||||
self.contexts.lock().unwrap().clone()
|
||||
}
|
||||
}
|
||||
34
src-tauri/drop-database/src/lib.rs
Normal file
34
src-tauri/drop-database/src/lib.rs
Normal file
@ -0,0 +1,34 @@
|
||||
use std::{mem::ManuallyDrop, sync::LazyLock};
|
||||
|
||||
use log::error;
|
||||
|
||||
use crate::db::{DBRead, DBWrite, DatabaseImpls, DatabaseInterface};
|
||||
|
||||
pub mod db;
|
||||
pub mod debug;
|
||||
pub mod models;
|
||||
pub mod process;
|
||||
pub mod runtime_models;
|
||||
pub mod drop_data;
|
||||
|
||||
pub static DB: LazyLock<DatabaseInterface> = LazyLock::new(DatabaseInterface::set_up_database);
|
||||
|
||||
pub fn borrow_db_checked<'a>() -> DBRead<'a> {
|
||||
match DB.borrow_data() {
|
||||
Ok(data) => DBRead(data),
|
||||
Err(e) => {
|
||||
error!("database borrow failed with error {e}");
|
||||
panic!("database borrow failed with error {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn borrow_db_mut_checked<'a>() -> DBWrite<'a> {
|
||||
match DB.borrow_data_mut() {
|
||||
Ok(data) => DBWrite(ManuallyDrop::new(data)),
|
||||
Err(e) => {
|
||||
error!("database borrow mut failed with error {e}");
|
||||
panic!("database borrow mut failed with error {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4,11 +4,11 @@ pub mod data {
|
||||
use native_model::native_model;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// NOTE: Within each version, you should NEVER use these types.
|
||||
// NOTE: Within each version, you should NEVER use these types.
|
||||
// Declare it using the actual version that it is from, i.e. v1::Settings rather than just Settings from here
|
||||
|
||||
pub type GameVersion = v1::GameVersion;
|
||||
pub type Database = v3::Database;
|
||||
pub type Database = v4::Database;
|
||||
pub type Settings = v1::Settings;
|
||||
pub type DatabaseAuth = v1::DatabaseAuth;
|
||||
|
||||
@ -19,7 +19,7 @@ pub mod data {
|
||||
*/
|
||||
pub type DownloadableMetadata = v1::DownloadableMetadata;
|
||||
pub type DownloadType = v1::DownloadType;
|
||||
pub type DatabaseApplications = v2::DatabaseApplications;
|
||||
pub type DatabaseApplications = v4::DatabaseApplications;
|
||||
// pub type DatabaseCompatInfo = v2::DatabaseCompatInfo;
|
||||
|
||||
use std::collections::HashMap;
|
||||
@ -37,10 +37,11 @@ pub mod data {
|
||||
}
|
||||
|
||||
mod v1 {
|
||||
use crate::process::process_manager::Platform;
|
||||
use serde_with::serde_as;
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
use crate::process::Platform;
|
||||
|
||||
use super::{Deserialize, Serialize, native_model};
|
||||
|
||||
fn default_template() -> String {
|
||||
@ -190,9 +191,9 @@ pub mod data {
|
||||
|
||||
use serde_with::serde_as;
|
||||
|
||||
use super::{
|
||||
Deserialize, Serialize, native_model, v1,
|
||||
};
|
||||
use crate::runtime_models::Game;
|
||||
|
||||
use super::{Deserialize, Serialize, native_model, v1};
|
||||
|
||||
#[native_model(id = 1, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from = v1::Database)]
|
||||
#[derive(Serialize, Deserialize, Clone, Default)]
|
||||
@ -274,14 +275,13 @@ pub mod data {
|
||||
#[native_model(id = 3, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from=v1::DatabaseApplications)]
|
||||
pub struct DatabaseApplications {
|
||||
pub install_dirs: Vec<PathBuf>,
|
||||
// Guaranteed to exist if the game also exists in the app state map
|
||||
pub game_statuses: HashMap<String, GameDownloadStatus>,
|
||||
|
||||
pub game_versions: HashMap<String, HashMap<String, v1::GameVersion>>,
|
||||
pub installed_game_version: HashMap<String, v1::DownloadableMetadata>,
|
||||
|
||||
#[serde(skip)]
|
||||
pub transient_statuses: HashMap<v1::DownloadableMetadata, v1::ApplicationTransientStatus>,
|
||||
pub transient_statuses:
|
||||
HashMap<v1::DownloadableMetadata, v1::ApplicationTransientStatus>,
|
||||
}
|
||||
impl From<v1::DatabaseApplications> for DatabaseApplications {
|
||||
fn from(value: v1::DatabaseApplications) -> Self {
|
||||
@ -302,10 +302,7 @@ pub mod data {
|
||||
mod v3 {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::{
|
||||
Deserialize, Serialize,
|
||||
native_model, v2, v1,
|
||||
};
|
||||
use super::{Deserialize, Serialize, native_model, v1, v2};
|
||||
#[native_model(id = 1, version = 3, with = native_model::rmp_serde_1_3::RmpSerde, from = v2::Database)]
|
||||
#[derive(Serialize, Deserialize, Clone, Default)]
|
||||
pub struct Database {
|
||||
@ -335,28 +332,73 @@ pub mod data {
|
||||
}
|
||||
}
|
||||
|
||||
mod v4 {
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
use drop_library::libraries::LibraryProviderIdentifier;
|
||||
use drop_native_library::impls::DropNativeLibraryProvider;
|
||||
use serde_with::serde_as;
|
||||
use crate::models::data::v3;
|
||||
use super::{Deserialize, Serialize, native_model, v1, v2};
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub enum Library {
|
||||
NativeLibrary(DropNativeLibraryProvider),
|
||||
}
|
||||
|
||||
#[serde_as]
|
||||
#[derive(Serialize, Deserialize, Default, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[native_model(id = 3, version = 4, with = native_model::rmp_serde_1_3::RmpSerde, from=v2::DatabaseApplications)]
|
||||
pub struct DatabaseApplications {
|
||||
pub install_dirs: Vec<PathBuf>,
|
||||
pub libraries: HashMap<LibraryProviderIdentifier, Library>,
|
||||
|
||||
#[serde(skip)]
|
||||
pub transient_statuses:
|
||||
HashMap<v1::DownloadableMetadata, v1::ApplicationTransientStatus>,
|
||||
}
|
||||
|
||||
impl From<v2::DatabaseApplications> for DatabaseApplications {
|
||||
fn from(value: v2::DatabaseApplications) -> Self {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
#[native_model(id = 1, version = 4, with = native_model::rmp_serde_1_3::RmpSerde, from = v3::Database)]
|
||||
#[derive(Serialize, Deserialize, Default, Clone)]
|
||||
pub struct Database {
|
||||
#[serde(default)]
|
||||
pub settings: v1::Settings,
|
||||
pub drop_applications: DatabaseApplications,
|
||||
#[serde(skip)]
|
||||
pub prev_database: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl From<v3::Database> for Database {
|
||||
fn from(value: v3::Database) -> Self {
|
||||
Database {
|
||||
settings: value.settings,
|
||||
drop_applications: value.applications.into(),
|
||||
prev_database: value.prev_database,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub fn new<T: Into<PathBuf>>(
|
||||
games_base_dir: T,
|
||||
prev_database: Option<PathBuf>,
|
||||
cache_dir: PathBuf,
|
||||
) -> Self {
|
||||
Self {
|
||||
applications: DatabaseApplications {
|
||||
drop_applications: DatabaseApplications {
|
||||
install_dirs: vec![games_base_dir.into()],
|
||||
game_statuses: HashMap::new(),
|
||||
game_versions: HashMap::new(),
|
||||
installed_game_version: HashMap::new(),
|
||||
libraries: HashMap::new(),
|
||||
transient_statuses: HashMap::new(),
|
||||
},
|
||||
prev_database,
|
||||
base_url: String::new(),
|
||||
auth: None,
|
||||
settings: Settings::default(),
|
||||
cache_dir,
|
||||
compat_info: None,
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
46
src-tauri/drop-database/src/process.rs
Normal file
46
src-tauri/drop-database/src/process.rs
Normal file
@ -0,0 +1,46 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Eq, Hash, PartialEq, Serialize, Deserialize, Clone, Copy, Debug)]
|
||||
pub enum Platform {
|
||||
Windows,
|
||||
Linux,
|
||||
MacOs,
|
||||
}
|
||||
|
||||
impl Platform {
|
||||
#[cfg(target_os = "windows")]
|
||||
pub const HOST: Platform = Self::Windows;
|
||||
#[cfg(target_os = "macos")]
|
||||
pub const HOST: Platform = Self::MacOs;
|
||||
#[cfg(target_os = "linux")]
|
||||
pub const HOST: Platform = Self::Linux;
|
||||
|
||||
pub fn is_case_sensitive(&self) -> bool {
|
||||
match self {
|
||||
Self::Windows | Self::MacOs => false,
|
||||
Self::Linux => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Platform {
|
||||
fn from(value: &str) -> Self {
|
||||
match value.to_lowercase().trim() {
|
||||
"windows" => Self::Windows,
|
||||
"linux" => Self::Linux,
|
||||
"mac" | "macos" => Self::MacOs,
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<whoami::Platform> for Platform {
|
||||
fn from(value: whoami::Platform) -> Self {
|
||||
match value {
|
||||
whoami::Platform::Windows => Platform::Windows,
|
||||
whoami::Platform::Linux => Platform::Linux,
|
||||
whoami::Platform::MacOS => Platform::MacOs,
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
28
src-tauri/drop-database/src/runtime_models.rs
Normal file
28
src-tauri/drop-database/src/runtime_models.rs
Normal file
@ -0,0 +1,28 @@
|
||||
use bitcode::{Decode, Encode};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, Default, Encode, Decode)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Game {
|
||||
pub id: String,
|
||||
m_name: String,
|
||||
m_short_description: String,
|
||||
m_description: String,
|
||||
// mDevelopers
|
||||
// mPublishers
|
||||
m_icon_object_id: String,
|
||||
m_banner_object_id: String,
|
||||
m_cover_object_id: String,
|
||||
m_image_library_object_ids: Vec<String>,
|
||||
m_image_carousel_object_ids: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Encode, Decode)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct User {
|
||||
id: String,
|
||||
username: String,
|
||||
admin: bool,
|
||||
display_name: String,
|
||||
profile_picture_object_id: String,
|
||||
}
|
||||
16
src-tauri/drop-downloads/Cargo.toml
Normal file
16
src-tauri/drop-downloads/Cargo.toml
Normal file
@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "drop-downloads"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
atomic-instant-full = "0.1.0"
|
||||
drop-database = { path = "../drop-database" }
|
||||
drop-errors = { path = "../drop-errors" }
|
||||
# can't depend, cycle
|
||||
# drop-native-library = { path = "../drop-native-library" }
|
||||
log = "0.4.22"
|
||||
parking_lot = "0.12.4"
|
||||
serde = "1.0.219"
|
||||
tauri = { version = "2.7.0" }
|
||||
throttle_my_fn = "0.2.6"
|
||||
@ -7,11 +7,13 @@ use std::{
|
||||
thread::{JoinHandle, spawn},
|
||||
};
|
||||
|
||||
use drop_database::models::data::DownloadableMetadata;
|
||||
use drop_errors::application_download_error::ApplicationDownloadError;
|
||||
use log::{debug, error, info, warn};
|
||||
use tauri::{AppHandle, Emitter};
|
||||
|
||||
use crate::{
|
||||
app_emit, database::models::data::DownloadableMetadata, download_manager::download_manager_frontend::DownloadStatus, error::application_download_error::ApplicationDownloadError, games::library::{QueueUpdateEvent, QueueUpdateEventQueueData, StatsUpdateEvent}, lock, send
|
||||
download_manager_frontend::DownloadStatus, events::{QueueUpdateEvent, QueueUpdateEventQueueData, StatsUpdateEvent}
|
||||
};
|
||||
|
||||
use super::{
|
||||
@ -27,43 +29,6 @@ use super::{
|
||||
pub type DownloadAgent = Arc<Box<dyn Downloadable + Send + Sync>>;
|
||||
pub type CurrentProgressObject = Arc<Mutex<Option<Arc<ProgressObject>>>>;
|
||||
|
||||
/*
|
||||
|
||||
Welcome to the download manager, the most overengineered, glorious piece of bullshit.
|
||||
|
||||
The download manager takes a queue of ids and their associated
|
||||
DownloadAgents, and then, one-by-one, executes them. It provides an interface
|
||||
to interact with the currently downloading agent, and manage the queue.
|
||||
|
||||
When the DownloadManager is initialised, it is designed to provide a reference
|
||||
which can be used to provide some instructions (the DownloadManagerInterface),
|
||||
but other than that, it runs without any sort of interruptions.
|
||||
|
||||
It does this by opening up two data structures. Primarily is the command_receiver,
|
||||
and mpsc (multi-channel-single-producer) which allows commands to be sent from
|
||||
the Interface, and queued up for the Manager to process.
|
||||
|
||||
These have been mapped in the DownloadManagerSignal docs.
|
||||
|
||||
The other way to interact with the DownloadManager is via the donwload_queue,
|
||||
which is just a collection of ids which may be rearranged to suit
|
||||
whichever download queue order is required.
|
||||
|
||||
+----------------------------------------------------------------------------+
|
||||
| DO NOT ATTEMPT TO ADD OR REMOVE FROM THE QUEUE WITHOUT USING SIGNALS!! |
|
||||
| THIS WILL CAUSE A DESYNC BETWEEN THE DOWNLOAD AGENT REGISTRY AND THE QUEUE |
|
||||
| WHICH HAS NOT BEEN ACCOUNTED FOR |
|
||||
+----------------------------------------------------------------------------+
|
||||
|
||||
This download queue does not actually own any of the DownloadAgents. It is
|
||||
simply an id-based reference system. The actual Agents are stored in the
|
||||
download_agent_registry HashMap, as ordering is no issue here. This is why
|
||||
appending or removing from the download_queue must be done via signals.
|
||||
|
||||
Behold, my madness - quexeky
|
||||
|
||||
*/
|
||||
|
||||
pub struct DownloadManagerBuilder {
|
||||
download_agent_registry: HashMap<DownloadableMetadata, DownloadAgent>,
|
||||
download_queue: Queue,
|
||||
@ -102,7 +67,7 @@ impl DownloadManagerBuilder {
|
||||
}
|
||||
|
||||
fn set_status(&self, status: DownloadManagerStatus) {
|
||||
*lock!(self.status) = status;
|
||||
*self.status.lock().unwrap() = status;
|
||||
}
|
||||
|
||||
fn remove_and_cleanup_front_download(&mut self, meta: &DownloadableMetadata) -> DownloadAgent {
|
||||
@ -116,9 +81,9 @@ impl DownloadManagerBuilder {
|
||||
// Make sure the download thread is terminated
|
||||
fn cleanup_current_download(&mut self) {
|
||||
self.active_control_flag = None;
|
||||
*lock!(self.progress) = None;
|
||||
*self.progress.lock().unwrap() = None;
|
||||
|
||||
let mut download_thread_lock = lock!(self.current_download_thread);
|
||||
let mut download_thread_lock = self.current_download_thread.lock().unwrap();
|
||||
|
||||
if let Some(unfinished_thread) = download_thread_lock.take()
|
||||
&& !unfinished_thread.is_finished()
|
||||
@ -134,7 +99,7 @@ impl DownloadManagerBuilder {
|
||||
current_flag.set(DownloadThreadControlFlag::Stop);
|
||||
}
|
||||
|
||||
let mut download_thread_lock = lock!(self.current_download_thread);
|
||||
let mut download_thread_lock = self.current_download_thread.lock().unwrap();
|
||||
if let Some(current_download_thread) = download_thread_lock.take() {
|
||||
return current_download_thread.join().is_ok();
|
||||
};
|
||||
@ -196,7 +161,9 @@ impl DownloadManagerBuilder {
|
||||
self.download_queue.append(meta.clone());
|
||||
self.download_agent_registry.insert(meta, download_agent);
|
||||
|
||||
send!(self.sender, DownloadManagerSignal::UpdateUIQueue);
|
||||
self.sender
|
||||
.send(DownloadManagerSignal::UpdateUIQueue)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn manage_go_signal(&mut self) {
|
||||
@ -242,7 +209,7 @@ impl DownloadManagerBuilder {
|
||||
|
||||
let sender = self.sender.clone();
|
||||
|
||||
let mut download_thread_lock = lock!(self.current_download_thread);
|
||||
let mut download_thread_lock = self.current_download_thread.lock().unwrap();
|
||||
let app_handle = self.app_handle.clone();
|
||||
|
||||
*download_thread_lock = Some(spawn(move || {
|
||||
@ -253,7 +220,7 @@ impl DownloadManagerBuilder {
|
||||
Err(e) => {
|
||||
error!("download {:?} has error {}", download_agent.metadata(), &e);
|
||||
download_agent.on_error(&app_handle, &e);
|
||||
send!(sender, DownloadManagerSignal::Error(e));
|
||||
sender.send(DownloadManagerSignal::Error(e)).unwrap();
|
||||
return;
|
||||
}
|
||||
};
|
||||
@ -277,7 +244,7 @@ impl DownloadManagerBuilder {
|
||||
&e
|
||||
);
|
||||
download_agent.on_error(&app_handle, &e);
|
||||
send!(sender, DownloadManagerSignal::Error(e));
|
||||
sender.send(DownloadManagerSignal::Error(e)).unwrap();
|
||||
return;
|
||||
}
|
||||
};
|
||||
@ -288,8 +255,10 @@ impl DownloadManagerBuilder {
|
||||
|
||||
if validate_result {
|
||||
download_agent.on_complete(&app_handle);
|
||||
send!(sender, DownloadManagerSignal::Completed(download_agent.metadata()));
|
||||
send!(sender, DownloadManagerSignal::UpdateUIQueue);
|
||||
sender
|
||||
.send(DownloadManagerSignal::Completed(download_agent.metadata()))
|
||||
.unwrap();
|
||||
sender.send(DownloadManagerSignal::UpdateUIQueue).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -316,7 +285,7 @@ impl DownloadManagerBuilder {
|
||||
}
|
||||
|
||||
self.push_ui_queue_update();
|
||||
send!(self.sender, DownloadManagerSignal::Go);
|
||||
self.sender.send(DownloadManagerSignal::Go).unwrap();
|
||||
}
|
||||
fn manage_error_signal(&mut self, error: ApplicationDownloadError) {
|
||||
debug!("got signal Error");
|
||||
@ -354,7 +323,7 @@ impl DownloadManagerBuilder {
|
||||
let index = self.download_queue.get_by_meta(meta);
|
||||
if let Some(index) = index {
|
||||
download_agent.on_cancelled(&self.app_handle);
|
||||
let _ = self.download_queue.edit().remove(index);
|
||||
let _ = self.download_queue.edit().remove(index).unwrap();
|
||||
let removed = self.download_agent_registry.remove(meta);
|
||||
debug!(
|
||||
"removed {:?} from queue {:?}",
|
||||
@ -369,7 +338,7 @@ impl DownloadManagerBuilder {
|
||||
fn push_ui_stats_update(&self, kbs: usize, time: usize) {
|
||||
let event_data = StatsUpdateEvent { speed: kbs, time };
|
||||
|
||||
app_emit!(self.app_handle, "update_stats", event_data);
|
||||
self.app_handle.emit("update_stats", event_data).unwrap();
|
||||
}
|
||||
fn push_ui_queue_update(&self) {
|
||||
let queue = &self.download_queue.read();
|
||||
@ -388,6 +357,6 @@ impl DownloadManagerBuilder {
|
||||
.collect();
|
||||
|
||||
let event_data = QueueUpdateEvent { queue: queue_objs };
|
||||
app_emit!(self.app_handle, "update_queue", event_data);
|
||||
self.app_handle.emit("update_queue", event_data).unwrap();
|
||||
}
|
||||
}
|
||||
@ -3,20 +3,17 @@ use std::{
|
||||
collections::VecDeque,
|
||||
fmt::Debug,
|
||||
sync::{
|
||||
Mutex, MutexGuard,
|
||||
mpsc::{SendError, Sender},
|
||||
Mutex, MutexGuard,
|
||||
},
|
||||
thread::JoinHandle,
|
||||
};
|
||||
|
||||
use drop_database::models::data::DownloadableMetadata;
|
||||
use drop_errors::application_download_error::ApplicationDownloadError;
|
||||
use log::{debug, info};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::{
|
||||
database::models::data::DownloadableMetadata,
|
||||
error::application_download_error::ApplicationDownloadError, lock, send,
|
||||
};
|
||||
|
||||
use super::{
|
||||
download_manager_builder::{CurrentProgressObject, DownloadAgent},
|
||||
util::queue::Queue,
|
||||
@ -119,18 +116,22 @@ impl DownloadManager {
|
||||
self.download_queue.read()
|
||||
}
|
||||
pub fn get_current_download_progress(&self) -> Option<f64> {
|
||||
let progress_object = (*lock!(self.progress)).clone()?;
|
||||
let progress_object = (*self.progress.lock().unwrap()).clone()?;
|
||||
Some(progress_object.get_progress())
|
||||
}
|
||||
pub fn rearrange_string(&self, meta: &DownloadableMetadata, new_index: usize) {
|
||||
let mut queue = self.edit();
|
||||
let current_index = get_index_from_id(&mut queue, meta).expect("Failed to get meta index from id");
|
||||
let to_move = queue.remove(current_index).expect("Failed to remove meta at index from queue");
|
||||
let current_index = get_index_from_id(&mut queue, meta).unwrap();
|
||||
let to_move = queue.remove(current_index).unwrap();
|
||||
queue.insert(new_index, to_move);
|
||||
send!(self.command_sender, DownloadManagerSignal::UpdateUIQueue);
|
||||
self.command_sender
|
||||
.send(DownloadManagerSignal::UpdateUIQueue)
|
||||
.unwrap();
|
||||
}
|
||||
pub fn cancel(&self, meta: DownloadableMetadata) {
|
||||
send!(self.command_sender, DownloadManagerSignal::Cancel(meta));
|
||||
self.command_sender
|
||||
.send(DownloadManagerSignal::Cancel(meta))
|
||||
.unwrap();
|
||||
}
|
||||
pub fn rearrange(&self, current_index: usize, new_index: usize) {
|
||||
if current_index == new_index {
|
||||
@ -139,31 +140,39 @@ impl DownloadManager {
|
||||
|
||||
let needs_pause = current_index == 0 || new_index == 0;
|
||||
if needs_pause {
|
||||
send!(self.command_sender, DownloadManagerSignal::Stop);
|
||||
self.command_sender
|
||||
.send(DownloadManagerSignal::Stop)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
debug!("moving download at index {current_index} to index {new_index}");
|
||||
|
||||
let mut queue = self.edit();
|
||||
let to_move = queue.remove(current_index).expect("Failed to get");
|
||||
let to_move = queue.remove(current_index).unwrap();
|
||||
queue.insert(new_index, to_move);
|
||||
drop(queue);
|
||||
|
||||
if needs_pause {
|
||||
send!(self.command_sender, DownloadManagerSignal::Go);
|
||||
self.command_sender.send(DownloadManagerSignal::Go).unwrap();
|
||||
}
|
||||
send!(self.command_sender, DownloadManagerSignal::UpdateUIQueue);
|
||||
send!(self.command_sender, DownloadManagerSignal::Go);
|
||||
self.command_sender
|
||||
.send(DownloadManagerSignal::UpdateUIQueue)
|
||||
.unwrap();
|
||||
self.command_sender.send(DownloadManagerSignal::Go).unwrap();
|
||||
}
|
||||
pub fn pause_downloads(&self) {
|
||||
send!(self.command_sender, DownloadManagerSignal::Stop);
|
||||
self.command_sender
|
||||
.send(DownloadManagerSignal::Stop)
|
||||
.unwrap();
|
||||
}
|
||||
pub fn resume_downloads(&self) {
|
||||
send!(self.command_sender, DownloadManagerSignal::Go);
|
||||
self.command_sender.send(DownloadManagerSignal::Go).unwrap();
|
||||
}
|
||||
pub fn ensure_terminated(&self) -> Result<Result<(), ()>, Box<dyn Any + Send>> {
|
||||
send!(self.command_sender, DownloadManagerSignal::Finish);
|
||||
let terminator = lock!(self.terminator).take();
|
||||
self.command_sender
|
||||
.send(DownloadManagerSignal::Finish)
|
||||
.unwrap();
|
||||
let terminator = self.terminator.lock().unwrap().take();
|
||||
terminator.unwrap().join()
|
||||
}
|
||||
pub fn get_sender(&self) -> Sender<DownloadManagerSignal> {
|
||||
@ -1,12 +1,9 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use drop_database::models::data::DownloadableMetadata;
|
||||
use drop_errors::application_download_error::ApplicationDownloadError;
|
||||
use tauri::AppHandle;
|
||||
|
||||
use crate::{
|
||||
database::models::data::DownloadableMetadata,
|
||||
error::application_download_error::ApplicationDownloadError,
|
||||
};
|
||||
|
||||
use super::{
|
||||
download_manager_frontend::DownloadStatus,
|
||||
util::{download_thread_control_flag::DownloadThreadControl, progress_object::ProgressObject},
|
||||
24
src-tauri/drop-downloads/src/events.rs
Normal file
24
src-tauri/drop-downloads/src/events.rs
Normal file
@ -0,0 +1,24 @@
|
||||
use drop_database::models::data::DownloadableMetadata;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::download_manager_frontend::DownloadStatus;
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct QueueUpdateEventQueueData {
|
||||
pub meta: DownloadableMetadata,
|
||||
pub status: DownloadStatus,
|
||||
pub progress: f64,
|
||||
pub current: usize,
|
||||
pub max: usize,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct QueueUpdateEvent {
|
||||
pub queue: Vec<QueueUpdateEventQueueData>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct StatsUpdateEvent {
|
||||
pub speed: usize,
|
||||
pub time: usize,
|
||||
}
|
||||
7
src-tauri/drop-downloads/src/lib.rs
Normal file
7
src-tauri/drop-downloads/src/lib.rs
Normal file
@ -0,0 +1,7 @@
|
||||
#![feature(duration_millis_float)]
|
||||
|
||||
pub mod download_manager_builder;
|
||||
pub mod download_manager_frontend;
|
||||
pub mod downloadable;
|
||||
pub mod events;
|
||||
pub mod util;
|
||||
@ -10,7 +10,7 @@ use std::{
|
||||
use atomic_instant_full::AtomicInstant;
|
||||
use throttle_my_fn::throttle;
|
||||
|
||||
use crate::{download_manager::download_manager_frontend::DownloadManagerSignal, lock, send};
|
||||
use crate::download_manager_frontend::DownloadManagerSignal;
|
||||
|
||||
use super::rolling_progress_updates::RollingProgressWindow;
|
||||
|
||||
@ -74,10 +74,12 @@ impl ProgressObject {
|
||||
}
|
||||
|
||||
pub fn set_time_now(&self) {
|
||||
*lock!(self.start) = Instant::now();
|
||||
*self.start.lock().unwrap() = Instant::now();
|
||||
}
|
||||
pub fn sum(&self) -> usize {
|
||||
lock!(self.progress_instances)
|
||||
self.progress_instances
|
||||
.lock()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|instance| instance.load(Ordering::Acquire))
|
||||
.sum()
|
||||
@ -86,25 +88,27 @@ impl ProgressObject {
|
||||
self.set_time_now();
|
||||
self.bytes_last_update.store(0, Ordering::Release);
|
||||
self.rolling.reset();
|
||||
lock!(self.progress_instances)
|
||||
self.progress_instances
|
||||
.lock()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.for_each(|x| x.store(0, Ordering::SeqCst));
|
||||
}
|
||||
pub fn get_max(&self) -> usize {
|
||||
*lock!(self.max)
|
||||
*self.max.lock().unwrap()
|
||||
}
|
||||
pub fn set_max(&self, new_max: usize) {
|
||||
*lock!(self.max) = new_max;
|
||||
*self.max.lock().unwrap() = new_max;
|
||||
}
|
||||
pub fn set_size(&self, length: usize) {
|
||||
*lock!(self.progress_instances) =
|
||||
*self.progress_instances.lock().unwrap() =
|
||||
(0..length).map(|_| Arc::new(AtomicUsize::new(0))).collect();
|
||||
}
|
||||
pub fn get_progress(&self) -> f64 {
|
||||
self.sum() as f64 / self.get_max() as f64
|
||||
}
|
||||
pub fn get(&self, index: usize) -> Arc<AtomicUsize> {
|
||||
lock!(self.progress_instances)[index].clone()
|
||||
self.progress_instances.lock().unwrap()[index].clone()
|
||||
}
|
||||
fn update_window(&self, kilobytes_per_second: usize) {
|
||||
self.rolling.update(kilobytes_per_second);
|
||||
@ -144,12 +148,18 @@ pub fn push_update(progress: &ProgressObject, bytes_remaining: usize) {
|
||||
}
|
||||
|
||||
fn update_ui(progress_object: &ProgressObject, kilobytes_per_second: usize, time_remaining: usize) {
|
||||
send!(
|
||||
progress_object.sender,
|
||||
DownloadManagerSignal::UpdateUIStats(kilobytes_per_second, time_remaining)
|
||||
);
|
||||
progress_object
|
||||
.sender
|
||||
.send(DownloadManagerSignal::UpdateUIStats(
|
||||
kilobytes_per_second,
|
||||
time_remaining,
|
||||
))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn update_queue(progress: &ProgressObject) {
|
||||
send!(progress.sender, DownloadManagerSignal::UpdateUIQueue)
|
||||
progress
|
||||
.sender
|
||||
.send(DownloadManagerSignal::UpdateUIQueue)
|
||||
.unwrap();
|
||||
}
|
||||
@ -3,7 +3,7 @@ use std::{
|
||||
sync::{Arc, Mutex, MutexGuard},
|
||||
};
|
||||
|
||||
use crate::{database::models::data::DownloadableMetadata, lock};
|
||||
use drop_database::models::data::DownloadableMetadata;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Queue {
|
||||
@ -24,10 +24,10 @@ impl Queue {
|
||||
}
|
||||
}
|
||||
pub fn read(&self) -> VecDeque<DownloadableMetadata> {
|
||||
lock!(self.inner).clone()
|
||||
self.inner.lock().unwrap().clone()
|
||||
}
|
||||
pub fn edit(&self) -> MutexGuard<'_, VecDeque<DownloadableMetadata>> {
|
||||
lock!(self.inner)
|
||||
self.inner.lock().unwrap()
|
||||
}
|
||||
pub fn pop_front(&self) -> Option<DownloadableMetadata> {
|
||||
self.edit().pop_front()
|
||||
14
src-tauri/drop-errors/Cargo.toml
Normal file
14
src-tauri/drop-errors/Cargo.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "drop-errors"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
http = "1.3.1"
|
||||
humansize = "2.1.3"
|
||||
reqwest = "0.12.23"
|
||||
reqwest-websocket = "0.5.1"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_with = "3.14.0"
|
||||
tauri-plugin-opener = "2.5.0"
|
||||
url = "2.5.7"
|
||||
@ -18,7 +18,7 @@ pub enum ApplicationDownloadError {
|
||||
Checksum,
|
||||
Lock,
|
||||
IoError(Arc<io::Error>),
|
||||
DownloadError(RemoteAccessError),
|
||||
DownloadError,
|
||||
}
|
||||
|
||||
impl Display for ApplicationDownloadError {
|
||||
@ -40,16 +40,10 @@ impl Display for ApplicationDownloadError {
|
||||
write!(f, "checksum failed to validate for download")
|
||||
}
|
||||
ApplicationDownloadError::IoError(error) => write!(f, "io error: {error}"),
|
||||
ApplicationDownloadError::DownloadError(error) => write!(
|
||||
ApplicationDownloadError::DownloadError => write!(
|
||||
f,
|
||||
"Download failed with error {error}"
|
||||
"Download failed. See Download Manager status for specific error"
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<io::Error> for ApplicationDownloadError {
|
||||
fn from(value: io::Error) -> Self {
|
||||
ApplicationDownloadError::IoError(Arc::new(value))
|
||||
}
|
||||
}
|
||||
@ -2,7 +2,7 @@ use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DropServerError {
|
||||
pub struct ServerError {
|
||||
pub status_code: usize,
|
||||
pub status_message: String,
|
||||
// pub message: String,
|
||||
@ -4,4 +4,3 @@ pub mod drop_server_error;
|
||||
pub mod library_error;
|
||||
pub mod process_error;
|
||||
pub mod remote_access_error;
|
||||
pub mod cache_error;
|
||||
18
src-tauri/drop-errors/src/library_error.rs
Normal file
18
src-tauri/drop-errors/src/library_error.rs
Normal file
@ -0,0 +1,18 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use serde_with::SerializeDisplay;
|
||||
|
||||
#[derive(SerializeDisplay)]
|
||||
pub enum LibraryError {
|
||||
MetaNotFound(String),
|
||||
}
|
||||
impl Display for LibraryError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
LibraryError::MetaNotFound(id) => write!(
|
||||
f,
|
||||
"Could not locate any installed version of game ID {id} in the database"
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -11,8 +11,7 @@ pub enum ProcessError {
|
||||
IOError(Error),
|
||||
FormatError(String), // String errors supremacy
|
||||
InvalidPlatform,
|
||||
OpenerError(tauri_plugin_opener::Error),
|
||||
InvalidArguments(String)
|
||||
OpenerError(tauri_plugin_opener::Error)
|
||||
}
|
||||
|
||||
impl Display for ProcessError {
|
||||
@ -24,9 +23,8 @@ impl Display for ProcessError {
|
||||
ProcessError::InvalidVersion => "Invalid game version",
|
||||
ProcessError::IOError(error) => &error.to_string(),
|
||||
ProcessError::InvalidPlatform => "This game cannot be played on the current platform",
|
||||
ProcessError::FormatError(e) => &format!("Could not format template: {e}"),
|
||||
ProcessError::OpenerError(error) => &format!("Could not open directory: {error}"),
|
||||
ProcessError::InvalidArguments(arguments) => &format!("Invalid arguments in command {arguments}"),
|
||||
ProcessError::FormatError(e) => &format!("Failed to format template: {e}"),
|
||||
ProcessError::OpenerError(error) => &format!("Failed to open directory: {error}"),
|
||||
};
|
||||
write!(f, "{s}")
|
||||
}
|
||||
@ -8,7 +8,7 @@ use http::StatusCode;
|
||||
use serde_with::SerializeDisplay;
|
||||
use url::ParseError;
|
||||
|
||||
use super::drop_server_error::DropServerError;
|
||||
use super::drop_server_error::ServerError;
|
||||
|
||||
#[derive(Debug, SerializeDisplay)]
|
||||
pub enum RemoteAccessError {
|
||||
@ -18,7 +18,7 @@ pub enum RemoteAccessError {
|
||||
InvalidEndpoint,
|
||||
HandshakeFailed(String),
|
||||
GameNotFound(String),
|
||||
InvalidResponse(DropServerError),
|
||||
InvalidResponse(ServerError),
|
||||
UnparseableResponse(String),
|
||||
ManifestDownloadFailed(StatusCode, String),
|
||||
OutOfSync,
|
||||
@ -44,7 +44,8 @@ impl Display for RemoteAccessError {
|
||||
error
|
||||
.source()
|
||||
.map(std::string::ToString::to_string)
|
||||
.unwrap_or("Unknown error".to_string())
|
||||
.or_else(|| Some("Unknown error".to_string()))
|
||||
.unwrap()
|
||||
)
|
||||
}
|
||||
RemoteAccessError::FetchErrorWS(error) => write!(
|
||||
@ -53,8 +54,9 @@ impl Display for RemoteAccessError {
|
||||
error,
|
||||
error
|
||||
.source()
|
||||
.map(std::string::ToString::to_string)
|
||||
.unwrap_or("Unknown error".to_string())
|
||||
.map(|e| e.to_string())
|
||||
.or_else(|| Some("Unknown error".to_string()))
|
||||
.unwrap()
|
||||
),
|
||||
RemoteAccessError::ParsingError(parse_error) => {
|
||||
write!(f, "{parse_error}")
|
||||
11
src-tauri/drop-library/Cargo.toml
Normal file
11
src-tauri/drop-library/Cargo.toml
Normal file
@ -0,0 +1,11 @@
|
||||
[package]
|
||||
name = "drop-library"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
drop-errors = { path = "../drop-errors" }
|
||||
http = "*"
|
||||
reqwest = { version = "*", default-features = false }
|
||||
serde = { version = "*", default-features = false, features = ["derive"] }
|
||||
tauri = "*"
|
||||
11
src-tauri/drop-library/src/errors.rs
Normal file
11
src-tauri/drop-library/src/errors.rs
Normal file
@ -0,0 +1,11 @@
|
||||
pub enum DropLibraryError {
|
||||
NetworkError(reqwest::Error),
|
||||
ServerError(drop_errors::drop_server_error::ServerError),
|
||||
Unconfigured,
|
||||
}
|
||||
|
||||
impl From<reqwest::Error> for DropLibraryError {
|
||||
fn from(value: reqwest::Error) -> Self {
|
||||
DropLibraryError::NetworkError(value)
|
||||
}
|
||||
}
|
||||
30
src-tauri/drop-library/src/game.rs
Normal file
30
src-tauri/drop-library/src/game.rs
Normal file
@ -0,0 +1,30 @@
|
||||
use crate::libraries::LibraryProviderIdentifier;
|
||||
|
||||
pub struct LibraryGamePreview {
|
||||
pub library: LibraryProviderIdentifier,
|
||||
pub internal_id: String,
|
||||
pub name: String,
|
||||
pub short_description: String,
|
||||
pub icon: String,
|
||||
}
|
||||
|
||||
pub struct LibraryGame {
|
||||
pub library: LibraryProviderIdentifier,
|
||||
pub internal_id: String,
|
||||
pub name: String,
|
||||
pub short_description: String,
|
||||
pub md_description: String,
|
||||
pub icon: String,
|
||||
}
|
||||
|
||||
impl From<LibraryGame> for LibraryGamePreview {
|
||||
fn from(value: LibraryGame) -> Self {
|
||||
LibraryGamePreview {
|
||||
library: value.library,
|
||||
internal_id: value.internal_id,
|
||||
name: value.name,
|
||||
short_description: value.short_description,
|
||||
icon: value.icon,
|
||||
}
|
||||
}
|
||||
}
|
||||
3
src-tauri/drop-library/src/lib.rs
Normal file
3
src-tauri/drop-library/src/lib.rs
Normal file
@ -0,0 +1,3 @@
|
||||
pub mod libraries;
|
||||
pub mod game;
|
||||
pub mod errors;
|
||||
76
src-tauri/drop-library/src/libraries.rs
Normal file
76
src-tauri/drop-library/src/libraries.rs
Normal file
@ -0,0 +1,76 @@
|
||||
use std::{
|
||||
fmt::Display,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
};
|
||||
|
||||
use http::Request;
|
||||
use serde::{Deserialize, Serialize, de::DeserializeOwned};
|
||||
use tauri::UriSchemeResponder;
|
||||
|
||||
use crate::{
|
||||
errors::DropLibraryError,
|
||||
game::{LibraryGame, LibraryGamePreview},
|
||||
};
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct LibraryProviderIdentifier {
|
||||
internal_id: usize,
|
||||
name: String,
|
||||
}
|
||||
|
||||
impl PartialEq for LibraryProviderIdentifier {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.internal_id == other.internal_id
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for LibraryProviderIdentifier {}
|
||||
|
||||
impl Hash for LibraryProviderIdentifier {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.internal_id.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for LibraryProviderIdentifier {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.name)
|
||||
}
|
||||
}
|
||||
|
||||
impl LibraryProviderIdentifier {
|
||||
pub fn str_hash(&self) -> String {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
self.hash(&mut hasher);
|
||||
hasher.finish().to_string()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LibraryFetchConfig {
|
||||
pub hard_refresh: bool,
|
||||
}
|
||||
|
||||
pub trait DropLibraryProvider: Serialize + DeserializeOwned + Sized {
|
||||
fn build(identifier: LibraryProviderIdentifier) -> Self;
|
||||
fn id(&self) -> &LibraryProviderIdentifier;
|
||||
fn load_object(
|
||||
&self,
|
||||
request: Request<Vec<u8>>,
|
||||
responder: UriSchemeResponder,
|
||||
) -> impl Future<Output = Result<(), DropLibraryError>> + Send;
|
||||
|
||||
fn fetch_library(
|
||||
&self,
|
||||
config: &LibraryFetchConfig,
|
||||
) -> impl Future<Output = Result<Vec<LibraryGamePreview>, DropLibraryError>> + Send;
|
||||
fn fetch_game(
|
||||
&self,
|
||||
config: &LibraryFetchConfig,
|
||||
) -> impl Future<Output = Result<LibraryGame, DropLibraryError>> + Send;
|
||||
|
||||
|
||||
|
||||
fn owns_game(&self, id: &LibraryProviderIdentifier) -> bool {
|
||||
self.id().internal_id == id.internal_id
|
||||
}
|
||||
}
|
||||
14
src-tauri/drop-native-library/Cargo.toml
Normal file
14
src-tauri/drop-native-library/Cargo.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "drop-native-library"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
bitcode = "*"
|
||||
drop-errors = { path = "../drop-errors" }
|
||||
drop-library = { path = "../drop-library" }
|
||||
drop-remote = { path = "../drop-remote" }
|
||||
log = "*"
|
||||
serde = { version = "*", features = ["derive"] }
|
||||
tauri = "*"
|
||||
url = "*"
|
||||
@ -1,8 +1,7 @@
|
||||
use bitcode::{Decode, Encode};
|
||||
// use drop_database::runtime_models::Game;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::games::library::Game;
|
||||
|
||||
pub type Collections = Vec<Collection>;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Default, Encode, Decode)]
|
||||
11
src-tauri/drop-native-library/src/events.rs
Normal file
11
src-tauri/drop-native-library/src/events.rs
Normal file
@ -0,0 +1,11 @@
|
||||
use drop_database::models::data::{ApplicationTransientStatus, GameDownloadStatus, GameVersion};
|
||||
|
||||
#[derive(serde::Serialize, Clone)]
|
||||
pub struct GameUpdateEvent {
|
||||
pub game_id: String,
|
||||
pub status: (
|
||||
Option<GameDownloadStatus>,
|
||||
Option<ApplicationTransientStatus>,
|
||||
),
|
||||
pub version: Option<GameVersion>,
|
||||
}
|
||||
50
src-tauri/drop-native-library/src/impls.rs
Normal file
50
src-tauri/drop-native-library/src/impls.rs
Normal file
@ -0,0 +1,50 @@
|
||||
use drop_library::{
|
||||
errors::DropLibraryError, game::{LibraryGame, LibraryGamePreview}, libraries::{DropLibraryProvider, LibraryFetchConfig, LibraryProviderIdentifier}
|
||||
};
|
||||
use drop_remote::{fetch_object::fetch_object, DropRemoteContext};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct DropNativeLibraryProvider {
|
||||
identifier: LibraryProviderIdentifier,
|
||||
context: Option<DropRemoteContext>,
|
||||
}
|
||||
|
||||
impl DropNativeLibraryProvider {
|
||||
pub fn configure(&mut self, base_url: Url) {
|
||||
self.context = Some(DropRemoteContext::new(base_url));
|
||||
}
|
||||
}
|
||||
|
||||
impl DropLibraryProvider for DropNativeLibraryProvider {
|
||||
fn build(identifier: LibraryProviderIdentifier) -> Self {
|
||||
Self {
|
||||
identifier,
|
||||
context: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn id(&self) -> &LibraryProviderIdentifier {
|
||||
&self.identifier
|
||||
}
|
||||
|
||||
async fn load_object(&self, request: tauri::http::Request<Vec<u8>>, responder: tauri::UriSchemeResponder) -> Result<(), DropLibraryError> {
|
||||
let context = self.context.as_ref().ok_or(DropLibraryError::Unconfigured)?;
|
||||
fetch_object(context, request, responder).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn fetch_library(
|
||||
&self,
|
||||
config: &LibraryFetchConfig
|
||||
) -> Result<Vec<LibraryGamePreview>, DropLibraryError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn fetch_game(&self, config: &LibraryFetchConfig) -> Result<LibraryGame, DropLibraryError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
5
src-tauri/drop-native-library/src/lib.rs
Normal file
5
src-tauri/drop-native-library/src/lib.rs
Normal file
@ -0,0 +1,5 @@
|
||||
//pub mod collections;
|
||||
//pub mod library;
|
||||
//pub mod state;
|
||||
//pub mod events;
|
||||
pub mod impls;
|
||||
@ -1,32 +1,34 @@
|
||||
use std::fs::remove_dir_all;
|
||||
use std::sync::Mutex;
|
||||
use std::thread::spawn;
|
||||
|
||||
use drop_database::borrow_db_checked;
|
||||
use drop_database::borrow_db_mut_checked;
|
||||
use drop_database::models::data::ApplicationTransientStatus;
|
||||
use drop_database::models::data::Database;
|
||||
use drop_database::models::data::DownloadableMetadata;
|
||||
use drop_database::models::data::GameDownloadStatus;
|
||||
use drop_database::models::data::GameVersion;
|
||||
use drop_database::runtime_models::Game;
|
||||
use drop_errors::drop_server_error::ServerError;
|
||||
use drop_errors::library_error::LibraryError;
|
||||
use drop_errors::remote_access_error::RemoteAccessError;
|
||||
use drop_remote::DropRemoteContext;
|
||||
use drop_remote::auth::generate_authorization_header;
|
||||
use drop_remote::cache::cache_object;
|
||||
use drop_remote::cache::cache_object_db;
|
||||
use drop_remote::cache::get_cached_object;
|
||||
use drop_remote::cache::get_cached_object_db;
|
||||
use drop_remote::requests::generate_url;
|
||||
use drop_remote::utils::DROP_CLIENT_ASYNC;
|
||||
use drop_remote::utils::DROP_CLIENT_SYNC;
|
||||
use log::{debug, error, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::AppHandle;
|
||||
use tauri::Emitter;
|
||||
use tauri::Emitter as _;
|
||||
|
||||
use crate::AppState;
|
||||
use crate::app_emit;
|
||||
use crate::database::db::{borrow_db_checked, borrow_db_mut_checked};
|
||||
use crate::database::models::data::Database;
|
||||
use crate::database::models::data::{
|
||||
ApplicationTransientStatus, DownloadableMetadata, GameDownloadStatus, GameVersion,
|
||||
};
|
||||
use crate::download_manager::download_manager_frontend::DownloadStatus;
|
||||
use crate::error::drop_server_error::DropServerError;
|
||||
use crate::error::library_error::LibraryError;
|
||||
use crate::error::remote_access_error::RemoteAccessError;
|
||||
use crate::games::state::{GameStatusManager, GameStatusWithTransient};
|
||||
use crate::lock;
|
||||
use crate::remote::auth::generate_authorization_header;
|
||||
use crate::remote::cache::cache_object_db;
|
||||
use crate::remote::cache::{cache_object, get_cached_object, get_cached_object_db};
|
||||
use crate::remote::requests::generate_url;
|
||||
use crate::remote::utils::DROP_CLIENT_ASYNC;
|
||||
use crate::remote::utils::DROP_CLIENT_SYNC;
|
||||
use bitcode::{Decode, Encode};
|
||||
use crate::events::GameUpdateEvent;
|
||||
use crate::state::GameStatusManager;
|
||||
use crate::state::GameStatusWithTransient;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct FetchGameStruct {
|
||||
@ -35,53 +37,8 @@ pub struct FetchGameStruct {
|
||||
version: Option<GameVersion>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, Default, Encode, Decode)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Game {
|
||||
id: String,
|
||||
m_name: String,
|
||||
m_short_description: String,
|
||||
m_description: String,
|
||||
// mDevelopers
|
||||
// mPublishers
|
||||
m_icon_object_id: String,
|
||||
m_banner_object_id: String,
|
||||
m_cover_object_id: String,
|
||||
m_image_library_object_ids: Vec<String>,
|
||||
m_image_carousel_object_ids: Vec<String>,
|
||||
}
|
||||
#[derive(serde::Serialize, Clone)]
|
||||
pub struct GameUpdateEvent {
|
||||
pub game_id: String,
|
||||
pub status: (
|
||||
Option<GameDownloadStatus>,
|
||||
Option<ApplicationTransientStatus>,
|
||||
),
|
||||
pub version: Option<GameVersion>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct QueueUpdateEventQueueData {
|
||||
pub meta: DownloadableMetadata,
|
||||
pub status: DownloadStatus,
|
||||
pub progress: f64,
|
||||
pub current: usize,
|
||||
pub max: usize,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, Clone)]
|
||||
pub struct QueueUpdateEvent {
|
||||
pub queue: Vec<QueueUpdateEventQueueData>,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, Clone)]
|
||||
pub struct StatsUpdateEvent {
|
||||
pub speed: usize,
|
||||
pub time: usize,
|
||||
}
|
||||
|
||||
pub async fn fetch_library_logic(
|
||||
state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
context: &DropRemoteContext,
|
||||
hard_fresh: Option<bool>,
|
||||
) -> Result<Vec<Game>, RemoteAccessError> {
|
||||
let do_hard_refresh = hard_fresh.unwrap_or(false);
|
||||
@ -90,15 +47,15 @@ pub async fn fetch_library_logic(
|
||||
}
|
||||
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let response = generate_url(&["/api/v1/client/user/library"], &[])?;
|
||||
let response = generate_url(context, &["/api/v1/client/user/library"], &[])?;
|
||||
let response = client
|
||||
.get(response)
|
||||
.header("Authorization", generate_authorization_header())
|
||||
.header("Authorization", generate_authorization_header(context))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status() != 200 {
|
||||
let err = response.json().await.unwrap_or(DropServerError {
|
||||
let err = response.json().await.unwrap_or(ServerError {
|
||||
status_code: 500,
|
||||
status_message: "Invalid response from server.".to_owned(),
|
||||
});
|
||||
@ -108,12 +65,13 @@ pub async fn fetch_library_logic(
|
||||
|
||||
let mut games: Vec<Game> = response.json().await?;
|
||||
|
||||
let mut handle = lock!(state);
|
||||
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
|
||||
for game in &games {
|
||||
handle.games.insert(game.id.clone(), game.clone());
|
||||
db_handle
|
||||
.applications
|
||||
.games
|
||||
.insert(game.id.clone(), game.clone());
|
||||
if !db_handle.applications.game_statuses.contains_key(&game.id) {
|
||||
db_handle
|
||||
.applications
|
||||
@ -129,7 +87,7 @@ pub async fn fetch_library_logic(
|
||||
}
|
||||
// We should always have a cache of the object
|
||||
// Pass db_handle because otherwise we get a gridlock
|
||||
let game = match get_cached_object_db::<Game>(&meta.id.clone(), &db_handle) {
|
||||
let game = match get_cached_object_db::<Game>(&meta.id.clone()) {
|
||||
Ok(game) => game,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
@ -142,14 +100,12 @@ pub async fn fetch_library_logic(
|
||||
games.push(game);
|
||||
}
|
||||
|
||||
drop(handle);
|
||||
drop(db_handle);
|
||||
cache_object("library", &games)?;
|
||||
|
||||
Ok(games)
|
||||
}
|
||||
pub async fn fetch_library_logic_offline(
|
||||
_state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
_hard_refresh: Option<bool>,
|
||||
) -> Result<Vec<Game>, RemoteAccessError> {
|
||||
let mut games: Vec<Game> = get_cached_object("library")?;
|
||||
@ -170,12 +126,10 @@ pub async fn fetch_library_logic_offline(
|
||||
Ok(games)
|
||||
}
|
||||
pub async fn fetch_game_logic(
|
||||
context: &DropRemoteContext,
|
||||
id: String,
|
||||
state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
) -> Result<FetchGameStruct, RemoteAccessError> {
|
||||
let version = {
|
||||
let state_handle = lock!(state);
|
||||
|
||||
let db_lock = borrow_db_checked();
|
||||
|
||||
let metadata_option = db_lock.applications.installed_game_version.get(&id);
|
||||
@ -189,7 +143,7 @@ pub async fn fetch_game_logic(
|
||||
.cloned(),
|
||||
};
|
||||
|
||||
let game = state_handle.games.get(&id);
|
||||
let game = db_lock.applications.games.get(&id);
|
||||
if let Some(game) = game {
|
||||
let status = GameStatusManager::fetch_state(&id, &db_lock);
|
||||
|
||||
@ -208,15 +162,15 @@ pub async fn fetch_game_logic(
|
||||
};
|
||||
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let response = generate_url(&["/api/v1/client/game/", &id], &[])?;
|
||||
let response = generate_url(context, &["/api/v1/client/game/", &id], &[])?;
|
||||
let response = client
|
||||
.get(response)
|
||||
.header("Authorization", generate_authorization_header())
|
||||
.header("Authorization", generate_authorization_header(context))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status() == 404 {
|
||||
let offline_fetch = fetch_game_logic_offline(id.clone(), state).await;
|
||||
let offline_fetch = fetch_game_logic_offline(id.clone()).await;
|
||||
if let Ok(fetch_data) = offline_fetch {
|
||||
return Ok(fetch_data);
|
||||
}
|
||||
@ -224,17 +178,18 @@ pub async fn fetch_game_logic(
|
||||
return Err(RemoteAccessError::GameNotFound(id));
|
||||
}
|
||||
if response.status() != 200 {
|
||||
let err = response.json().await?;
|
||||
let err = response.json().await.unwrap();
|
||||
warn!("{err:?}");
|
||||
return Err(RemoteAccessError::InvalidResponse(err));
|
||||
}
|
||||
|
||||
let game: Game = response.json().await?;
|
||||
|
||||
let mut state_handle = lock!(state);
|
||||
state_handle.games.insert(id.clone(), game.clone());
|
||||
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
db_handle
|
||||
.applications
|
||||
.games
|
||||
.insert(id.clone(), game.clone());
|
||||
|
||||
db_handle
|
||||
.applications
|
||||
@ -257,10 +212,7 @@ pub async fn fetch_game_logic(
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
pub async fn fetch_game_logic_offline(
|
||||
id: String,
|
||||
_state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
) -> Result<FetchGameStruct, RemoteAccessError> {
|
||||
pub async fn fetch_game_logic_offline(id: String) -> Result<FetchGameStruct, RemoteAccessError> {
|
||||
let db_handle = borrow_db_checked();
|
||||
let metadata_option = db_handle.applications.installed_game_version.get(&id);
|
||||
let version = match metadata_option {
|
||||
@ -286,35 +238,30 @@ pub async fn fetch_game_logic_offline(
|
||||
}
|
||||
|
||||
pub async fn fetch_game_version_options_logic(
|
||||
context: &DropRemoteContext,
|
||||
game_id: String,
|
||||
state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
) -> Result<Vec<GameVersion>, RemoteAccessError> {
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
|
||||
let response = generate_url(&["/api/v1/client/game/versions"], &[("id", &game_id)])?;
|
||||
let response = generate_url(
|
||||
context,
|
||||
&["/api/v1/client/game/versions"],
|
||||
&[("id", &game_id)],
|
||||
)?;
|
||||
let response = client
|
||||
.get(response)
|
||||
.header("Authorization", generate_authorization_header())
|
||||
.header("Authorization", generate_authorization_header(context))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status() != 200 {
|
||||
let err = response.json().await?;
|
||||
let err = response.json().await.unwrap();
|
||||
warn!("{err:?}");
|
||||
return Err(RemoteAccessError::InvalidResponse(err));
|
||||
}
|
||||
|
||||
let data: Vec<GameVersion> = response.json().await?;
|
||||
|
||||
let state_lock = lock!(state);
|
||||
let process_manager_lock = lock!(state_lock.process_manager);
|
||||
let data: Vec<GameVersion> = data
|
||||
.into_iter()
|
||||
.filter(|v| process_manager_lock.valid_platform(&v.platform, &state_lock))
|
||||
.collect();
|
||||
drop(process_manager_lock);
|
||||
drop(state_lock);
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
@ -377,13 +324,11 @@ pub fn uninstall_game_logic(meta: DownloadableMetadata, app_handle: &AppHandle)
|
||||
);
|
||||
|
||||
let previous_state = db_handle.applications.game_statuses.get(&meta.id).cloned();
|
||||
|
||||
let previous_state = if let Some(state) = previous_state {
|
||||
state
|
||||
} else {
|
||||
if previous_state.is_none() {
|
||||
warn!("uninstall job doesn't have previous state, failing silently");
|
||||
return;
|
||||
};
|
||||
}
|
||||
let previous_state = previous_state.unwrap();
|
||||
|
||||
if let Some((_, install_dir)) = match previous_state {
|
||||
GameDownloadStatus::Installed {
|
||||
@ -432,7 +377,7 @@ pub fn uninstall_game_logic(meta: DownloadableMetadata, app_handle: &AppHandle)
|
||||
);
|
||||
|
||||
debug!("uninstalled game id {}", &meta.id);
|
||||
app_emit!(app_handle, "update_library", ());
|
||||
app_handle.emit("update_library", ()).unwrap();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
@ -449,6 +394,7 @@ pub fn get_current_meta(game_id: &String) -> Option<DownloadableMetadata> {
|
||||
}
|
||||
|
||||
pub fn on_game_complete(
|
||||
context: &DropRemoteContext,
|
||||
meta: &DownloadableMetadata,
|
||||
install_dir: String,
|
||||
app_handle: &AppHandle,
|
||||
@ -460,6 +406,7 @@ pub fn on_game_complete(
|
||||
|
||||
let client = DROP_CLIENT_SYNC.clone();
|
||||
let response = generate_url(
|
||||
context,
|
||||
&["/api/v1/client/game/version"],
|
||||
&[
|
||||
("id", &meta.id),
|
||||
@ -468,7 +415,7 @@ pub fn on_game_complete(
|
||||
)?;
|
||||
let response = client
|
||||
.get(response)
|
||||
.header("Authorization", generate_authorization_header())
|
||||
.header("Authorization", generate_authorization_header(context))
|
||||
.send()?;
|
||||
|
||||
let game_version: GameVersion = response.json()?;
|
||||
@ -505,15 +452,17 @@ pub fn on_game_complete(
|
||||
.game_statuses
|
||||
.insert(meta.id.clone(), status.clone());
|
||||
drop(db_handle);
|
||||
app_emit!(
|
||||
app_handle,
|
||||
&format!("update_game/{}", meta.id),
|
||||
GameUpdateEvent {
|
||||
game_id: meta.id.clone(),
|
||||
status: (Some(status), None),
|
||||
version: Some(game_version),
|
||||
}
|
||||
);
|
||||
|
||||
app_handle
|
||||
.emit(
|
||||
&format!("update_game/{}", meta.id),
|
||||
GameUpdateEvent {
|
||||
game_id: meta.id.clone(),
|
||||
status: (Some(status), None),
|
||||
version: Some(game_version),
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -531,58 +480,14 @@ pub fn push_game_update(
|
||||
panic!("pushed game for installed game that doesn't have version information");
|
||||
}
|
||||
|
||||
app_emit!(
|
||||
app_handle,
|
||||
&format!("update_game/{game_id}"),
|
||||
GameUpdateEvent {
|
||||
game_id: game_id.clone(),
|
||||
status,
|
||||
version,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct FrontendGameOptions {
|
||||
launch_string: String,
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn update_game_configuration(
|
||||
game_id: String,
|
||||
options: FrontendGameOptions,
|
||||
) -> Result<(), LibraryError> {
|
||||
let mut handle = borrow_db_mut_checked();
|
||||
let installed_version = handle
|
||||
.applications
|
||||
.installed_game_version
|
||||
.get(&game_id)
|
||||
.ok_or(LibraryError::MetaNotFound(game_id))?;
|
||||
|
||||
let id = installed_version.id.clone();
|
||||
let version = installed_version.version.clone().ok_or(LibraryError::VersionNotFound(id.clone()))?;
|
||||
|
||||
let mut existing_configuration = handle
|
||||
.applications
|
||||
.game_versions
|
||||
.get(&id)
|
||||
.unwrap()
|
||||
.get(&version)
|
||||
.unwrap()
|
||||
.clone();
|
||||
|
||||
// Add more options in here
|
||||
existing_configuration.launch_command_template = options.launch_string;
|
||||
|
||||
// Add no more options past here
|
||||
|
||||
handle
|
||||
.applications
|
||||
.game_versions
|
||||
.get_mut(&id)
|
||||
.unwrap()
|
||||
.insert(version.to_string(), existing_configuration);
|
||||
|
||||
Ok(())
|
||||
app_handle
|
||||
.emit(
|
||||
&format!("update_game/{game_id}"),
|
||||
GameUpdateEvent {
|
||||
game_id: game_id.clone(),
|
||||
status,
|
||||
version,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
@ -1,6 +1,4 @@
|
||||
use crate::database::models::data::{
|
||||
ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus,
|
||||
};
|
||||
// use drop_database::models::data::{ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus};
|
||||
|
||||
pub type GameStatusWithTransient = (
|
||||
Option<GameDownloadStatus>,
|
||||
18
src-tauri/drop-process/Cargo.toml
Normal file
18
src-tauri/drop-process/Cargo.toml
Normal file
@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "drop-process"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
chrono = "0.4.42"
|
||||
drop-database = { path = "../drop-database" }
|
||||
drop-errors = { path = "../drop-errors" }
|
||||
drop-native-library = { path = "../drop-native-library" }
|
||||
dynfmt = { version = "0.1.5", features = ["curly"] }
|
||||
log = "0.4.28"
|
||||
page_size = "0.6.0"
|
||||
shared_child = "1.1.1"
|
||||
sysinfo = "0.37.0"
|
||||
tauri = "2.8.5"
|
||||
tauri-plugin-opener = "2.5.0"
|
||||
|
||||
4
src-tauri/drop-process/src/lib.rs
Normal file
4
src-tauri/drop-process/src/lib.rs
Normal file
@ -0,0 +1,4 @@
|
||||
mod format;
|
||||
mod process_handlers;
|
||||
pub mod process_manager;
|
||||
pub mod utils;
|
||||
@ -5,14 +5,11 @@ use std::{
|
||||
sync::LazyLock,
|
||||
};
|
||||
|
||||
use drop_database::{models::data::{Database, DownloadableMetadata, GameVersion}, process::Platform};
|
||||
use log::{debug, info};
|
||||
|
||||
use crate::{
|
||||
AppState,
|
||||
database::models::data::{Database, DownloadableMetadata, GameVersion},
|
||||
error::process_error::ProcessError,
|
||||
process::process_manager::{Platform, ProcessHandler},
|
||||
};
|
||||
use crate::process_manager::ProcessHandler;
|
||||
|
||||
|
||||
pub struct NativeGameLauncher;
|
||||
impl ProcessHandler for NativeGameLauncher {
|
||||
@ -23,11 +20,11 @@ impl ProcessHandler for NativeGameLauncher {
|
||||
args: Vec<String>,
|
||||
_game_version: &GameVersion,
|
||||
_current_dir: &str,
|
||||
) -> Result<String, ProcessError> {
|
||||
Ok(format!("\"{}\" {}", launch_command, args.join(" ")))
|
||||
) -> String {
|
||||
format!("\"{}\" {}", launch_command, args.join(" "))
|
||||
}
|
||||
|
||||
fn valid_for_platform(&self, _db: &Database, _state: &AppState, _target: &Platform) -> bool {
|
||||
fn valid_for_platform(&self, _db: &Database, _target: &Platform) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
@ -66,7 +63,7 @@ impl ProcessHandler for UMULauncher {
|
||||
args: Vec<String>,
|
||||
game_version: &GameVersion,
|
||||
_current_dir: &str,
|
||||
) -> Result<String, ProcessError> {
|
||||
) -> String {
|
||||
debug!("Game override: \"{:?}\"", &game_version.umu_id_override);
|
||||
let game_id = match &game_version.umu_id_override {
|
||||
Some(game_override) => {
|
||||
@ -78,19 +75,16 @@ impl ProcessHandler for UMULauncher {
|
||||
}
|
||||
None => game_version.game_id.clone(),
|
||||
};
|
||||
Ok(format!(
|
||||
format!(
|
||||
"GAMEID={game_id} {umu:?} \"{launch}\" {args}",
|
||||
umu = UMU_LAUNCHER_EXECUTABLE.as_ref().expect("Failed to get UMU_LAUNCHER_EXECUTABLE as ref"),
|
||||
umu = UMU_LAUNCHER_EXECUTABLE.as_ref().unwrap(),
|
||||
launch = launch_command,
|
||||
args = args.join(" ")
|
||||
))
|
||||
)
|
||||
}
|
||||
|
||||
fn valid_for_platform(&self, _db: &Database, state: &AppState, _target: &Platform) -> bool {
|
||||
let Some(ref compat_info) = state.compat_info else {
|
||||
return false;
|
||||
};
|
||||
compat_info.umu_installed
|
||||
fn valid_for_platform(&self, _db: &Database, _target: &Platform) -> bool {
|
||||
UMU_LAUNCHER_EXECUTABLE.is_some()
|
||||
}
|
||||
}
|
||||
|
||||
@ -103,7 +97,7 @@ impl ProcessHandler for AsahiMuvmLauncher {
|
||||
args: Vec<String>,
|
||||
game_version: &GameVersion,
|
||||
current_dir: &str,
|
||||
) -> Result<String, ProcessError> {
|
||||
) -> String {
|
||||
let umu_launcher = UMULauncher {};
|
||||
let umu_string = umu_launcher.create_launch_process(
|
||||
meta,
|
||||
@ -111,23 +105,20 @@ impl ProcessHandler for AsahiMuvmLauncher {
|
||||
args,
|
||||
game_version,
|
||||
current_dir,
|
||||
)?;
|
||||
);
|
||||
let mut args_cmd = umu_string
|
||||
.split("umu-run")
|
||||
.collect::<Vec<&str>>()
|
||||
.into_iter();
|
||||
let args = args_cmd
|
||||
.next()
|
||||
.ok_or(ProcessError::InvalidArguments(umu_string.clone()))?
|
||||
.trim();
|
||||
let cmd = format!("umu-run{}", args_cmd.next().ok_or(ProcessError::InvalidArguments(umu_string.clone()))?);
|
||||
let args = args_cmd.next().unwrap().trim();
|
||||
let cmd = format!("umu-run{}", args_cmd.next().unwrap());
|
||||
|
||||
Ok(format!("{args} muvm -- {cmd}"))
|
||||
format!("{args} muvm -- {cmd}")
|
||||
}
|
||||
|
||||
#[allow(unreachable_code)]
|
||||
#[allow(unused_variables)]
|
||||
fn valid_for_platform(&self, _db: &Database, state: &AppState, _target: &Platform) -> bool {
|
||||
fn valid_for_platform(&self, _db: &Database, _target: &Platform) -> bool {
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
return false;
|
||||
|
||||
@ -139,10 +130,6 @@ impl ProcessHandler for AsahiMuvmLauncher {
|
||||
return false;
|
||||
}
|
||||
|
||||
let Some(ref compat_info) = state.compat_info else {
|
||||
return false;
|
||||
};
|
||||
|
||||
compat_info.umu_installed
|
||||
UMU_LAUNCHER_EXECUTABLE.is_some()
|
||||
}
|
||||
}
|
||||
@ -10,31 +10,17 @@ use std::{
|
||||
time::{Duration, SystemTime},
|
||||
};
|
||||
|
||||
use drop_database::{borrow_db_checked, borrow_db_mut_checked, db::DATA_ROOT_DIR, models::data::{ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus, GameVersion}, process::Platform, DB};
|
||||
use drop_errors::process_error::ProcessError;
|
||||
use drop_native_library::{library::push_game_update, state::GameStatusManager};
|
||||
use dynfmt::Format;
|
||||
use dynfmt::SimpleCurlyFormat;
|
||||
use log::{debug, info, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use shared_child::SharedChild;
|
||||
use tauri::{AppHandle, Emitter, Manager};
|
||||
use tauri::{AppHandle, Emitter};
|
||||
use tauri_plugin_opener::OpenerExt;
|
||||
|
||||
use crate::{
|
||||
AppState,
|
||||
database::{
|
||||
db::{DATA_ROOT_DIR, borrow_db_checked, borrow_db_mut_checked},
|
||||
models::data::{
|
||||
ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata,
|
||||
GameDownloadStatus, GameVersion,
|
||||
},
|
||||
},
|
||||
error::process_error::ProcessError,
|
||||
games::{library::push_game_update, state::GameStatusManager},
|
||||
process::{
|
||||
format::DropFormatArgs,
|
||||
process_handlers::{AsahiMuvmLauncher, NativeGameLauncher, UMULauncher},
|
||||
},
|
||||
lock,
|
||||
};
|
||||
use crate::{format::DropFormatArgs, process_handlers::{AsahiMuvmLauncher, NativeGameLauncher, UMULauncher}};
|
||||
|
||||
pub struct RunningProcess {
|
||||
handle: Arc<SharedChild>,
|
||||
@ -119,7 +105,7 @@ impl ProcessManager<'_> {
|
||||
let dir = self.get_log_dir(game_id);
|
||||
self.app_handle
|
||||
.opener()
|
||||
.open_path(dir.display().to_string(), None::<&str>)
|
||||
.open_path(dir.to_str().unwrap(), None::<&str>)
|
||||
.map_err(ProcessError::OpenerError)?;
|
||||
Ok(())
|
||||
}
|
||||
@ -134,13 +120,7 @@ impl ProcessManager<'_> {
|
||||
|
||||
debug!("process for {:?} exited with {:?}", &game_id, result);
|
||||
|
||||
let process = match self.processes.remove(&game_id) {
|
||||
Some(process) => process,
|
||||
None => {
|
||||
info!("Attempted to stop process {game_id} which didn't exist");
|
||||
return;
|
||||
}
|
||||
};
|
||||
let process = self.processes.remove(&game_id).unwrap();
|
||||
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
let meta = db_handle
|
||||
@ -148,7 +128,7 @@ impl ProcessManager<'_> {
|
||||
.installed_game_version
|
||||
.get(&game_id)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| panic!("Could not get installed version of {}", &game_id));
|
||||
.unwrap();
|
||||
db_handle.applications.transient_statuses.remove(&meta);
|
||||
|
||||
let current_state = db_handle.applications.game_statuses.get(&game_id).cloned();
|
||||
@ -173,17 +153,20 @@ impl ProcessManager<'_> {
|
||||
// Or if the status isn't 0
|
||||
// Or if it's an error
|
||||
if !process.manually_killed
|
||||
&& (elapsed.as_secs() <= 2 || result.map_or(true, |r| !r.success()))
|
||||
&& (elapsed.as_secs() <= 2 || result.is_err() || !result.unwrap().success())
|
||||
{
|
||||
warn!("drop detected that the game {game_id} may have failed to launch properly");
|
||||
let _ = self.app_handle.emit("launch_external_error", &game_id);
|
||||
}
|
||||
|
||||
let version_data = match db_handle.applications.game_versions.get(&game_id) {
|
||||
// This unwrap here should be resolved by just making the hashmap accept an option rather than just a String
|
||||
Some(res) => res.get(&meta.version.unwrap()).expect("Failed to get game version from installed game versions. Is the database corrupted?"),
|
||||
None => todo!(),
|
||||
};
|
||||
// This is too many unwraps for me to be comfortable
|
||||
let version_data = db_handle
|
||||
.applications
|
||||
.game_versions
|
||||
.get(&game_id)
|
||||
.unwrap()
|
||||
.get(&meta.version.unwrap())
|
||||
.unwrap();
|
||||
|
||||
let status = GameStatusManager::fetch_state(&game_id, &db_handle);
|
||||
|
||||
@ -198,7 +181,6 @@ impl ProcessManager<'_> {
|
||||
fn fetch_process_handler(
|
||||
&self,
|
||||
db_lock: &Database,
|
||||
state: &AppState,
|
||||
target_platform: &Platform,
|
||||
) -> Result<&(dyn ProcessHandler + Send + Sync), ProcessError> {
|
||||
Ok(self
|
||||
@ -208,28 +190,30 @@ impl ProcessManager<'_> {
|
||||
let (e_current, e_target) = e.0;
|
||||
e_current == self.current_platform
|
||||
&& e_target == *target_platform
|
||||
&& e.1.valid_for_platform(db_lock, state, target_platform)
|
||||
&& e.1.valid_for_platform(db_lock, target_platform)
|
||||
})
|
||||
.ok_or(ProcessError::InvalidPlatform)?
|
||||
.1)
|
||||
}
|
||||
|
||||
pub fn valid_platform(&self, platform: &Platform, state: &AppState) -> bool {
|
||||
pub fn valid_platform(&self, platform: &Platform,) -> Result<bool, String> {
|
||||
let db_lock = borrow_db_checked();
|
||||
let process_handler = self.fetch_process_handler(&db_lock, state, platform);
|
||||
process_handler.is_ok()
|
||||
let process_handler = self.fetch_process_handler(&db_lock, platform);
|
||||
Ok(process_handler.is_ok())
|
||||
}
|
||||
|
||||
pub fn launch_process(
|
||||
&mut self,
|
||||
game_id: String,
|
||||
state: &AppState,
|
||||
process_manager_lock: &'static Mutex<ProcessManager<'static>>,
|
||||
) -> Result<(), ProcessError> {
|
||||
if self.processes.contains_key(&game_id) {
|
||||
return Err(ProcessError::AlreadyRunning);
|
||||
}
|
||||
|
||||
let version = match borrow_db_checked()
|
||||
let version = match DB
|
||||
.borrow_data()
|
||||
.unwrap()
|
||||
.applications
|
||||
.game_statuses
|
||||
.get(&game_id)
|
||||
@ -268,7 +252,7 @@ impl ProcessManager<'_> {
|
||||
debug!(
|
||||
"Launching process {:?} with version {:?}",
|
||||
&game_id,
|
||||
db_lock.applications.game_versions.get(&game_id)
|
||||
db_lock.applications.game_versions.get(&game_id).unwrap()
|
||||
);
|
||||
|
||||
let game_version = db_lock
|
||||
@ -306,7 +290,7 @@ impl ProcessManager<'_> {
|
||||
|
||||
let target_platform = game_version.platform;
|
||||
|
||||
let process_handler = self.fetch_process_handler(&db_lock, state, &target_platform)?;
|
||||
let process_handler = self.fetch_process_handler(&db_lock, &target_platform)?;
|
||||
|
||||
let (launch, args) = match game_status {
|
||||
GameDownloadStatus::Installed {
|
||||
@ -324,9 +308,8 @@ impl ProcessManager<'_> {
|
||||
GameDownloadStatus::Remote {} => unreachable!("Game registered as 'Remote'"),
|
||||
};
|
||||
|
||||
#[allow(clippy::unwrap_used)]
|
||||
let launch = PathBuf::from_str(install_dir).unwrap().join(launch);
|
||||
let launch = launch.display().to_string();
|
||||
let launch = launch.to_str().unwrap();
|
||||
|
||||
let launch_string = process_handler.create_launch_process(
|
||||
&meta,
|
||||
@ -334,7 +317,7 @@ impl ProcessManager<'_> {
|
||||
args.clone(),
|
||||
game_version,
|
||||
install_dir,
|
||||
)?;
|
||||
);
|
||||
|
||||
let format_args = DropFormatArgs::new(
|
||||
launch_string,
|
||||
@ -388,25 +371,17 @@ impl ProcessManager<'_> {
|
||||
);
|
||||
|
||||
let wait_thread_handle = launch_process_handle.clone();
|
||||
let wait_thread_apphandle = self.app_handle.clone();
|
||||
let wait_thread_game_id = meta.clone();
|
||||
|
||||
spawn(move || {
|
||||
let result: Result<ExitStatus, std::io::Error> = launch_process_handle.wait();
|
||||
|
||||
let app_state = wait_thread_apphandle.state::<Mutex<AppState>>();
|
||||
let app_state_handle = lock!(app_state);
|
||||
|
||||
let mut process_manager_handle = app_state_handle
|
||||
.process_manager
|
||||
.lock()
|
||||
.expect("Failed to lock onto process manager");
|
||||
let mut process_manager_handle = process_manager_lock.lock().unwrap();
|
||||
process_manager_handle.on_process_finish(wait_thread_game_id.id, result);
|
||||
|
||||
// As everything goes out of scope, they should get dropped
|
||||
// But just to explicit about it
|
||||
drop(process_manager_handle);
|
||||
drop(app_state_handle);
|
||||
});
|
||||
|
||||
self.processes.insert(
|
||||
@ -421,51 +396,6 @@ impl ProcessManager<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Eq, Hash, PartialEq, Serialize, Deserialize, Clone, Copy, Debug)]
|
||||
pub enum Platform {
|
||||
Windows,
|
||||
Linux,
|
||||
MacOs,
|
||||
}
|
||||
|
||||
impl Platform {
|
||||
#[cfg(target_os = "windows")]
|
||||
pub const HOST: Platform = Self::Windows;
|
||||
#[cfg(target_os = "macos")]
|
||||
pub const HOST: Platform = Self::MacOs;
|
||||
#[cfg(target_os = "linux")]
|
||||
pub const HOST: Platform = Self::Linux;
|
||||
|
||||
pub fn is_case_sensitive(&self) -> bool {
|
||||
match self {
|
||||
Self::Windows | Self::MacOs => false,
|
||||
Self::Linux => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Platform {
|
||||
fn from(value: &str) -> Self {
|
||||
match value.to_lowercase().trim() {
|
||||
"windows" => Self::Windows,
|
||||
"linux" => Self::Linux,
|
||||
"mac" | "macos" => Self::MacOs,
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<whoami::Platform> for Platform {
|
||||
fn from(value: whoami::Platform) -> Self {
|
||||
match value {
|
||||
whoami::Platform::Windows => Platform::Windows,
|
||||
whoami::Platform::Linux => Platform::Linux,
|
||||
whoami::Platform::MacOS => Platform::MacOs,
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ProcessHandler: Send + 'static {
|
||||
fn create_launch_process(
|
||||
&self,
|
||||
@ -474,7 +404,7 @@ pub trait ProcessHandler: Send + 'static {
|
||||
args: Vec<String>,
|
||||
game_version: &GameVersion,
|
||||
current_dir: &str,
|
||||
) -> Result<String, ProcessError>;
|
||||
) -> String;
|
||||
|
||||
fn valid_for_platform(&self, db: &Database, state: &AppState, target: &Platform) -> bool;
|
||||
fn valid_for_platform(&self, db: &Database, target: &Platform) -> bool;
|
||||
}
|
||||
@ -1,10 +1,8 @@
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use std::{io, path::PathBuf, sync::Arc};
|
||||
|
||||
use futures_lite::io;
|
||||
use drop_errors::application_download_error::ApplicationDownloadError;
|
||||
use sysinfo::{Disk, DiskRefreshKind, Disks};
|
||||
|
||||
use crate::error::application_download_error::ApplicationDownloadError;
|
||||
|
||||
pub fn get_disk_available(mount_point: PathBuf) -> Result<u64, ApplicationDownloadError> {
|
||||
let disks = Disks::new_with_refreshed_list_specifics(DiskRefreshKind::nothing().with_storage());
|
||||
|
||||
20
src-tauri/drop-remote/Cargo.toml
Normal file
20
src-tauri/drop-remote/Cargo.toml
Normal file
@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "drop-remote"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
bitcode = "0.6.7"
|
||||
chrono = "0.4.42"
|
||||
drop-consts = { path = "../drop-consts" }
|
||||
drop-errors = { path = "../drop-errors" }
|
||||
droplet-rs = "0.7.3"
|
||||
gethostname = "1.0.2"
|
||||
hex = "0.4.3"
|
||||
http = "1.3.1"
|
||||
log = "0.4.28"
|
||||
md5 = "0.8.0"
|
||||
reqwest = "0.12.23"
|
||||
serde = { version = "1.0.220", features = ["derive"] }
|
||||
tauri = "2.8.5"
|
||||
url = "2.5.7"
|
||||
156
src-tauri/drop-remote/src/auth.rs
Normal file
156
src-tauri/drop-remote/src/auth.rs
Normal file
@ -0,0 +1,156 @@
|
||||
use std::{collections::HashMap, env, sync::Mutex};
|
||||
|
||||
use chrono::Utc;
|
||||
use drop_errors::{drop_server_error::ServerError, remote_access_error::RemoteAccessError};
|
||||
use droplet_rs::ssl::sign_nonce;
|
||||
use gethostname::gethostname;
|
||||
use log::{debug, error, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
requests::make_authenticated_get, utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC}, DropRemoteAuth, DropRemoteContext
|
||||
};
|
||||
|
||||
use super::requests::generate_url;
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct CapabilityConfiguration {}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct InitiateRequestBody {
|
||||
name: String,
|
||||
platform: String,
|
||||
capabilities: HashMap<String, CapabilityConfiguration>,
|
||||
mode: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct HandshakeRequestBody {
|
||||
client_id: String,
|
||||
token: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct HandshakeResponse {
|
||||
private: String,
|
||||
certificate: String,
|
||||
id: String,
|
||||
}
|
||||
|
||||
pub fn generate_authorization_header(context: &DropRemoteContext) -> String {
|
||||
let auth = if let Some(auth) = &context.auth {
|
||||
auth
|
||||
} else {
|
||||
return "".to_owned();
|
||||
};
|
||||
let nonce = Utc::now().timestamp_millis().to_string();
|
||||
|
||||
let signature = sign_nonce(auth.private.clone(), nonce.clone()).unwrap();
|
||||
|
||||
format!("Nonce {} {} {}", auth.client_id, nonce, signature)
|
||||
}
|
||||
|
||||
pub async fn fetch_user(context: &DropRemoteContext) -> Result<Vec<u8>, RemoteAccessError> {
|
||||
let response =
|
||||
make_authenticated_get(context, generate_url(context, &["/api/v1/client/user"], &[])?).await?;
|
||||
if response.status() != 200 {
|
||||
let err: ServerError = response.json().await?;
|
||||
warn!("{err:?}");
|
||||
|
||||
if err.status_message == "Nonce expired" {
|
||||
return Err(RemoteAccessError::OutOfSync);
|
||||
}
|
||||
|
||||
return Err(RemoteAccessError::InvalidResponse(err));
|
||||
}
|
||||
|
||||
response
|
||||
.bytes()
|
||||
.await
|
||||
.map_err(std::convert::Into::into)
|
||||
.map(|v| v.to_vec())
|
||||
}
|
||||
|
||||
pub async fn recieve_handshake_logic(
|
||||
context: &mut DropRemoteContext,
|
||||
path: String,
|
||||
) -> Result<(), RemoteAccessError> {
|
||||
let path_chunks: Vec<&str> = path.split('/').collect();
|
||||
if path_chunks.len() != 3 {
|
||||
// app.emit("auth/failed", ()).unwrap();
|
||||
return Err(RemoteAccessError::HandshakeFailed(
|
||||
"failed to parse token".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let client_id = path_chunks.get(1).unwrap();
|
||||
let token = path_chunks.get(2).unwrap();
|
||||
let body = HandshakeRequestBody {
|
||||
client_id: (*client_id).to_string(),
|
||||
token: (*token).to_string(),
|
||||
};
|
||||
|
||||
let endpoint = generate_url(context, &["/api/v1/client/auth/handshake"], &[])?;
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let response = client.post(endpoint).json(&body).send().await?;
|
||||
debug!("handshake responsded with {}", response.status().as_u16());
|
||||
if !response.status().is_success() {
|
||||
return Err(RemoteAccessError::InvalidResponse(response.json().await?));
|
||||
}
|
||||
let response_struct: HandshakeResponse = response.json().await?;
|
||||
|
||||
let web_token = {
|
||||
let header = generate_authorization_header(context);
|
||||
let token = client
|
||||
.post(generate_url(context, &["/api/v1/client/user/webtoken"], &[])?)
|
||||
.header("Authorization", header)
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
token.text().await.unwrap()
|
||||
};
|
||||
|
||||
context.auth = Some(DropRemoteAuth {
|
||||
private: response_struct.private,
|
||||
cert: response_struct.certificate,
|
||||
client_id: response_struct.id,
|
||||
web_token: web_token,
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn auth_initiate_logic(context: &DropRemoteContext, mode: String) -> Result<String, RemoteAccessError> {
|
||||
let hostname = gethostname();
|
||||
|
||||
let endpoint = generate_url(context, &["/api/v1/client/auth/initiate"], &[])?;
|
||||
let body = InitiateRequestBody {
|
||||
name: format!("{} (Desktop)", hostname.into_string().unwrap()),
|
||||
platform: env::consts::OS.to_string(),
|
||||
capabilities: HashMap::from([
|
||||
("peerAPI".to_owned(), CapabilityConfiguration {}),
|
||||
("cloudSaves".to_owned(), CapabilityConfiguration {}),
|
||||
]),
|
||||
mode,
|
||||
};
|
||||
|
||||
let client = DROP_CLIENT_SYNC.clone();
|
||||
let response = client.post(endpoint.to_string()).json(&body).send()?;
|
||||
|
||||
if response.status() != 200 {
|
||||
let data: ServerError = response.json()?;
|
||||
error!("could not start handshake: {}", data.status_message);
|
||||
|
||||
return Err(RemoteAccessError::HandshakeFailed(data.status_message));
|
||||
}
|
||||
|
||||
let response = response.text()?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
@ -5,18 +5,18 @@ use std::{
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
database::{db::borrow_db_checked, models::data::Database},
|
||||
error::{cache_error::CacheError, remote_access_error::RemoteAccessError},
|
||||
};
|
||||
use bitcode::{Decode, DecodeOwned, Encode};
|
||||
use http::{header::{CONTENT_TYPE}, response::Builder as ResponseBuilder, Response};
|
||||
use drop_consts::CACHE_DIR;
|
||||
use drop_errors::remote_access_error::RemoteAccessError;
|
||||
use http::{Response, header::CONTENT_TYPE, response::Builder as ResponseBuilder};
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! offline {
|
||||
($var:expr, $func1:expr, $func2:expr, $( $arg:expr ),* ) => {
|
||||
|
||||
async move { if $crate::borrow_db_checked().settings.force_offline || $crate::lock!($var).status == $crate::AppStatus::Offline {
|
||||
// TODO add offline mode back
|
||||
// || $var.lock().unwrap().status == AppStatus::Offline
|
||||
async move { if drop_database::borrow_db_checked().settings.force_offline {
|
||||
$func2( $( $arg ), *).await
|
||||
} else {
|
||||
$func1( $( $arg ), *).await
|
||||
@ -57,36 +57,33 @@ fn delete_sync(base: &Path, key: &str) -> io::Result<()> {
|
||||
}
|
||||
|
||||
pub fn cache_object<D: Encode>(key: &str, data: &D) -> Result<(), RemoteAccessError> {
|
||||
cache_object_db(key, data, &borrow_db_checked())
|
||||
cache_object_db(key, data)
|
||||
}
|
||||
pub fn cache_object_db<D: Encode>(
|
||||
key: &str,
|
||||
data: &D,
|
||||
database: &Database,
|
||||
) -> Result<(), RemoteAccessError> {
|
||||
let bytes = bitcode::encode(data);
|
||||
write_sync(&database.cache_dir, key, bytes).map_err(RemoteAccessError::Cache)
|
||||
write_sync(&CACHE_DIR, key, bytes).map_err(RemoteAccessError::Cache)
|
||||
}
|
||||
pub fn get_cached_object<D: Encode + DecodeOwned>(key: &str) -> Result<D, RemoteAccessError> {
|
||||
get_cached_object_db::<D>(key, &borrow_db_checked())
|
||||
get_cached_object_db::<D>(key)
|
||||
}
|
||||
pub fn get_cached_object_db<D: DecodeOwned>(
|
||||
key: &str,
|
||||
db: &Database,
|
||||
) -> Result<D, RemoteAccessError> {
|
||||
let bytes = read_sync(&db.cache_dir, key).map_err(RemoteAccessError::Cache)?;
|
||||
let bytes = read_sync(&CACHE_DIR, key).map_err(RemoteAccessError::Cache)?;
|
||||
let data =
|
||||
bitcode::decode::<D>(&bytes).map_err(|e| RemoteAccessError::Cache(io::Error::other(e)))?;
|
||||
Ok(data)
|
||||
}
|
||||
pub fn clear_cached_object(key: &str) -> Result<(), RemoteAccessError> {
|
||||
clear_cached_object_db(key, &borrow_db_checked())
|
||||
clear_cached_object_db(key)
|
||||
}
|
||||
pub fn clear_cached_object_db(
|
||||
key: &str,
|
||||
db: &Database,
|
||||
) -> Result<(), RemoteAccessError> {
|
||||
delete_sync(&db.cache_dir, key).map_err(RemoteAccessError::Cache)?;
|
||||
delete_sync(&CACHE_DIR, key).map_err(RemoteAccessError::Cache)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -104,36 +101,30 @@ impl ObjectCache {
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<Response<Vec<u8>>> for ObjectCache {
|
||||
type Error = CacheError;
|
||||
|
||||
fn try_from(value: Response<Vec<u8>>) -> Result<Self, Self::Error> {
|
||||
Ok(ObjectCache {
|
||||
impl From<Response<Vec<u8>>> for ObjectCache {
|
||||
fn from(value: Response<Vec<u8>>) -> Self {
|
||||
ObjectCache {
|
||||
content_type: value
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.ok_or(CacheError::HeaderNotFound(CONTENT_TYPE))?
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.map_err(CacheError::ParseError)?
|
||||
.unwrap()
|
||||
.to_owned(),
|
||||
body: value.body().clone(),
|
||||
expiry: get_sys_time_in_secs() + 60 * 60 * 24,
|
||||
})
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
impl TryFrom<ObjectCache> for Response<Vec<u8>> {
|
||||
type Error = CacheError;
|
||||
fn try_from(value: ObjectCache) -> Result<Self, Self::Error> {
|
||||
impl From<ObjectCache> for Response<Vec<u8>> {
|
||||
fn from(value: ObjectCache) -> Self {
|
||||
let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type);
|
||||
resp_builder.body(value.body).map_err(CacheError::ConstructionError)
|
||||
resp_builder.body(value.body).unwrap()
|
||||
}
|
||||
}
|
||||
impl TryFrom<&ObjectCache> for Response<Vec<u8>> {
|
||||
type Error = CacheError;
|
||||
|
||||
fn try_from(value: &ObjectCache) -> Result<Self, Self::Error> {
|
||||
impl From<&ObjectCache> for Response<Vec<u8>> {
|
||||
fn from(value: &ObjectCache) -> Self {
|
||||
let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type.clone());
|
||||
resp_builder.body(value.body.clone()).map_err(CacheError::ConstructionError)
|
||||
resp_builder.body(value.body.clone()).unwrap()
|
||||
}
|
||||
}
|
||||
52
src-tauri/drop-remote/src/fetch_object.rs
Normal file
52
src-tauri/drop-remote/src/fetch_object.rs
Normal file
@ -0,0 +1,52 @@
|
||||
use http::{header::CONTENT_TYPE, response::Builder as ResponseBuilder, Request};
|
||||
use log::warn;
|
||||
use tauri::UriSchemeResponder;
|
||||
|
||||
|
||||
use crate::{requests::generate_url, utils::DROP_CLIENT_ASYNC, DropRemoteContext};
|
||||
|
||||
use super::{
|
||||
auth::generate_authorization_header,
|
||||
cache::{ObjectCache, cache_object, get_cached_object},
|
||||
};
|
||||
|
||||
pub async fn fetch_object(context: &DropRemoteContext, request: Request<Vec<u8>>, responder: UriSchemeResponder) {
|
||||
// Drop leading /
|
||||
let object_id = &request.uri().path()[1..];
|
||||
|
||||
let cache_result = get_cached_object::<ObjectCache>(object_id);
|
||||
if let Ok(cache_result) = &cache_result
|
||||
&& !cache_result.has_expired()
|
||||
{
|
||||
responder.respond(cache_result.into());
|
||||
return;
|
||||
}
|
||||
|
||||
let header = generate_authorization_header(context);
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let url = generate_url(context, &["/api/v1/client/object", object_id], &[]).expect("failed to generated object url");
|
||||
let response = client.get(url).header("Authorization", header).send().await;
|
||||
|
||||
if response.is_err() {
|
||||
match cache_result {
|
||||
Ok(cache_result) => responder.respond(cache_result.into()),
|
||||
Err(e) => {
|
||||
warn!("{e}");
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
let response = response.unwrap();
|
||||
|
||||
let resp_builder = ResponseBuilder::new().header(
|
||||
CONTENT_TYPE,
|
||||
response.headers().get("Content-Type").unwrap(),
|
||||
);
|
||||
let data = Vec::from(response.bytes().await.unwrap());
|
||||
let resp = resp_builder.body(data).unwrap();
|
||||
if cache_result.is_err() || cache_result.unwrap().has_expired() {
|
||||
cache_object::<ObjectCache>(object_id, &resp.clone().into()).unwrap();
|
||||
}
|
||||
|
||||
responder.respond(resp);
|
||||
}
|
||||
29
src-tauri/drop-remote/src/lib.rs
Normal file
29
src-tauri/drop-remote/src/lib.rs
Normal file
@ -0,0 +1,29 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
pub mod auth;
|
||||
pub mod cache;
|
||||
pub mod fetch_object;
|
||||
pub mod requests;
|
||||
pub mod utils;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
struct DropRemoteAuth {
|
||||
private: String,
|
||||
cert: String,
|
||||
client_id: String,
|
||||
web_token: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct DropRemoteContext {
|
||||
base_url: Url,
|
||||
auth: Option<DropRemoteAuth>,
|
||||
}
|
||||
|
||||
|
||||
impl DropRemoteContext {
|
||||
pub fn new(base_url: Url) -> Self {
|
||||
DropRemoteContext { base_url, auth: None }
|
||||
}
|
||||
}
|
||||
@ -1,17 +1,14 @@
|
||||
use drop_errors::remote_access_error::RemoteAccessError;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
DB,
|
||||
database::db::DatabaseImpls,
|
||||
error::remote_access_error::RemoteAccessError,
|
||||
remote::{auth::generate_authorization_header, utils::DROP_CLIENT_ASYNC},
|
||||
};
|
||||
use crate::{auth::generate_authorization_header, utils::DROP_CLIENT_ASYNC, DropRemoteContext};
|
||||
|
||||
pub fn generate_url<T: AsRef<str>>(
|
||||
context: &DropRemoteContext,
|
||||
path_components: &[T],
|
||||
query: &[(T, T)],
|
||||
) -> Result<Url, RemoteAccessError> {
|
||||
let mut base_url = DB.fetch_base_url();
|
||||
let mut base_url = context.base_url.clone();
|
||||
for endpoint in path_components {
|
||||
base_url = base_url.join(endpoint.as_ref())?;
|
||||
}
|
||||
@ -24,10 +21,10 @@ pub fn generate_url<T: AsRef<str>>(
|
||||
Ok(base_url)
|
||||
}
|
||||
|
||||
pub async fn make_authenticated_get(url: Url) -> Result<reqwest::Response, reqwest::Error> {
|
||||
pub async fn make_authenticated_get(context: &DropRemoteContext, url: Url) -> Result<reqwest::Response, reqwest::Error> {
|
||||
DROP_CLIENT_ASYNC
|
||||
.get(url)
|
||||
.header("Authorization", generate_authorization_header())
|
||||
.header("Authorization", generate_authorization_header(context))
|
||||
.send()
|
||||
.await
|
||||
}
|
||||
71
src-tauri/drop-remote/src/utils.rs
Normal file
71
src-tauri/drop-remote/src/utils.rs
Normal file
@ -0,0 +1,71 @@
|
||||
use std::{
|
||||
fs::{self, File},
|
||||
io::Read,
|
||||
sync::LazyLock,
|
||||
};
|
||||
|
||||
use drop_consts::DATA_ROOT_DIR;
|
||||
use log::{debug, info};
|
||||
use reqwest::Certificate;
|
||||
|
||||
static DROP_CERT_BUNDLE: LazyLock<Vec<Certificate>> = LazyLock::new(fetch_certificates);
|
||||
pub static DROP_CLIENT_SYNC: LazyLock<reqwest::blocking::Client> = LazyLock::new(get_client_sync);
|
||||
pub static DROP_CLIENT_ASYNC: LazyLock<reqwest::Client> = LazyLock::new(get_client_async);
|
||||
pub static DROP_CLIENT_WS_CLIENT: LazyLock<reqwest::Client> = LazyLock::new(get_client_ws);
|
||||
|
||||
fn fetch_certificates() -> Vec<Certificate> {
|
||||
let certificate_dir = DATA_ROOT_DIR.join("certificates");
|
||||
|
||||
let mut certs = Vec::new();
|
||||
match fs::read_dir(certificate_dir) {
|
||||
Ok(c) => {
|
||||
for entry in c {
|
||||
match entry {
|
||||
Ok(c) => {
|
||||
let mut buf = Vec::new();
|
||||
File::open(c.path()).unwrap().read_to_end(&mut buf).unwrap();
|
||||
|
||||
for cert in Certificate::from_pem_bundle(&buf).unwrap() {
|
||||
certs.push(cert);
|
||||
}
|
||||
info!(
|
||||
"added {} certificate(s) from {}",
|
||||
certs.len(),
|
||||
c.file_name().into_string().unwrap()
|
||||
);
|
||||
}
|
||||
Err(_) => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("not loading certificates due to error: {e}");
|
||||
}
|
||||
};
|
||||
certs
|
||||
}
|
||||
|
||||
pub fn get_client_sync() -> reqwest::blocking::Client {
|
||||
let mut client = reqwest::blocking::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client.use_rustls_tls().build().unwrap()
|
||||
}
|
||||
pub fn get_client_async() -> reqwest::Client {
|
||||
let mut client = reqwest::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client.use_rustls_tls().build().unwrap()
|
||||
}
|
||||
pub fn get_client_ws() -> reqwest::Client {
|
||||
let mut client = reqwest::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client.use_rustls_tls().http1_only().build().unwrap()
|
||||
}
|
||||
59
src-tauri/src/auth.rs
Normal file
59
src-tauri/src/auth.rs
Normal file
@ -0,0 +1,59 @@
|
||||
use std::sync::Mutex;
|
||||
|
||||
use drop_database::{borrow_db_checked, runtime_models::User};
|
||||
use drop_errors::remote_access_error::RemoteAccessError;
|
||||
use drop_remote::{auth::{fetch_user, recieve_handshake_logic}, cache::{cache_object, clear_cached_object, get_cached_object}};
|
||||
use log::warn;
|
||||
use tauri::{AppHandle, Emitter as _, Manager as _};
|
||||
|
||||
use crate::{AppState, AppStatus};
|
||||
|
||||
pub async fn setup() -> (AppStatus, Option<User>) {
|
||||
let auth = {
|
||||
let data = borrow_db_checked();
|
||||
data.auth.clone()
|
||||
};
|
||||
|
||||
if auth.is_some() {
|
||||
let user_result = match fetch_user().await {
|
||||
Ok(data) => data,
|
||||
Err(RemoteAccessError::FetchError(_)) => {
|
||||
let user = get_cached_object::<User>("user").unwrap();
|
||||
return (AppStatus::Offline, Some(user));
|
||||
}
|
||||
Err(_) => return (AppStatus::SignedInNeedsReauth, None),
|
||||
};
|
||||
cache_object("user", &user_result).unwrap();
|
||||
return (AppStatus::SignedIn, Some(user_result));
|
||||
}
|
||||
|
||||
(AppStatus::SignedOut, None)
|
||||
}
|
||||
|
||||
pub async fn recieve_handshake(app: AppHandle, path: String) {
|
||||
// Tell the app we're processing
|
||||
app.emit("auth/processing", ()).unwrap();
|
||||
|
||||
let handshake_result = recieve_handshake_logic(path).await;
|
||||
if let Err(e) = handshake_result {
|
||||
warn!("error with authentication: {e}");
|
||||
app.emit("auth/failed", e.to_string()).unwrap();
|
||||
return;
|
||||
}
|
||||
|
||||
let app_state = app.state::<Mutex<AppState>>();
|
||||
|
||||
let (app_status, user) = setup().await;
|
||||
|
||||
let mut state_lock = app_state.lock().unwrap();
|
||||
|
||||
state_lock.status = app_status;
|
||||
state_lock.user = user;
|
||||
|
||||
let _ = clear_cached_object("collections");
|
||||
let _ = clear_cached_object("library");
|
||||
|
||||
drop(state_lock);
|
||||
|
||||
app.emit("auth/finished", ()).unwrap();
|
||||
}
|
||||
@ -1,4 +1,4 @@
|
||||
use crate::database::db::{borrow_db_checked, borrow_db_mut_checked};
|
||||
use drop_database::{borrow_db_checked, borrow_db_mut_checked};
|
||||
use log::debug;
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_autostart::ManagerExt;
|
||||
|
||||
@ -1,16 +1,16 @@
|
||||
use log::{debug, error};
|
||||
use tauri::AppHandle;
|
||||
|
||||
use crate::{lock, AppState};
|
||||
use crate::AppState;
|
||||
|
||||
#[tauri::command]
|
||||
pub fn quit(app: tauri::AppHandle, state: tauri::State<'_, std::sync::Mutex<AppState<'_>>>) {
|
||||
pub fn quit(app: tauri::AppHandle, state: tauri::State<'_, std::sync::Mutex<AppState>>) {
|
||||
cleanup_and_exit(&app, &state);
|
||||
}
|
||||
|
||||
pub fn cleanup_and_exit(app: &AppHandle, state: &tauri::State<'_, std::sync::Mutex<AppState<'_>>>) {
|
||||
pub fn cleanup_and_exit(app: &AppHandle, state: &tauri::State<'_, std::sync::Mutex<AppState>>) {
|
||||
debug!("cleaning up and exiting application");
|
||||
let download_manager = lock!(state).download_manager.clone();
|
||||
let download_manager = state.lock().unwrap().download_manager.clone();
|
||||
match download_manager.ensure_terminated() {
|
||||
Ok(res) => match res {
|
||||
Ok(()) => debug!("download manager terminated correctly"),
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
use crate::{lock, AppState};
|
||||
use crate::AppState;
|
||||
|
||||
#[tauri::command]
|
||||
pub fn fetch_state(
|
||||
state: tauri::State<'_, std::sync::Mutex<AppState<'_>>>,
|
||||
state: tauri::State<'_, std::sync::Mutex<AppState>>,
|
||||
) -> Result<String, String> {
|
||||
let guard = lock!(state);
|
||||
let guard = state.lock().unwrap();
|
||||
let cloned_state = serde_json::to_string(&guard.clone()).map_err(|e| e.to_string())?;
|
||||
drop(guard);
|
||||
Ok(cloned_state)
|
||||
|
||||
@ -4,17 +4,12 @@ use std::{
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use drop_database::{borrow_db_checked, borrow_db_mut_checked, db::DATA_ROOT_DIR, debug::SystemData, models::data::Settings};
|
||||
use drop_errors::download_manager_error::DownloadManagerError;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::{
|
||||
database::{db::borrow_db_mut_checked, scan::scan_install_dirs}, error::download_manager_error::DownloadManagerError,
|
||||
};
|
||||
use crate::database::scan::scan_install_dirs;
|
||||
|
||||
use super::{
|
||||
db::{borrow_db_checked, DATA_ROOT_DIR},
|
||||
debug::SystemData,
|
||||
models::data::Settings,
|
||||
};
|
||||
|
||||
// Will, in future, return disk/remaining size
|
||||
// Just returns the directories that have been set up
|
||||
@ -67,15 +62,11 @@ pub fn add_download_dir(new_dir: PathBuf) -> Result<(), DownloadManagerError<()>
|
||||
#[tauri::command]
|
||||
pub fn update_settings(new_settings: Value) {
|
||||
let mut db_lock = borrow_db_mut_checked();
|
||||
let mut current_settings = serde_json::to_value(db_lock.settings.clone()).expect("Failed to parse existing settings");
|
||||
let values = match new_settings.as_object() {
|
||||
Some(values) => values,
|
||||
None => { panic!("Could not parse settings values"); },
|
||||
};
|
||||
for (key, value) in values {
|
||||
let mut current_settings = serde_json::to_value(db_lock.settings.clone()).unwrap();
|
||||
for (key, value) in new_settings.as_object().unwrap() {
|
||||
current_settings[key] = value.clone();
|
||||
}
|
||||
let new_settings: Settings = serde_json::from_value(current_settings).unwrap_or_else(|e| panic!("Failed to parse settings with error {}", e));
|
||||
let new_settings: Settings = serde_json::from_value(current_settings).unwrap();
|
||||
db_lock.settings = new_settings;
|
||||
}
|
||||
#[tauri::command]
|
||||
|
||||
@ -1,223 +0,0 @@
|
||||
use std::{
|
||||
fs::{self, create_dir_all},
|
||||
mem::ManuallyDrop,
|
||||
ops::{Deref, DerefMut},
|
||||
path::PathBuf,
|
||||
sync::{Arc, LazyLock, RwLockReadGuard, RwLockWriteGuard},
|
||||
};
|
||||
|
||||
use chrono::Utc;
|
||||
use log::{debug, error, info, warn};
|
||||
use rustbreak::{DeSerError, DeSerializer, PathDatabase, RustbreakError};
|
||||
use serde::{Serialize, de::DeserializeOwned};
|
||||
use url::Url;
|
||||
|
||||
use crate::DB;
|
||||
|
||||
use super::models::data::Database;
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
static DATA_ROOT_PREFIX: &'static str = "drop";
|
||||
#[cfg(debug_assertions)]
|
||||
static DATA_ROOT_PREFIX: &str = "drop-debug";
|
||||
|
||||
pub static DATA_ROOT_DIR: LazyLock<Arc<PathBuf>> = LazyLock::new(|| {
|
||||
Arc::new(
|
||||
dirs::data_dir()
|
||||
.expect("Failed to get data dir")
|
||||
.join(DATA_ROOT_PREFIX),
|
||||
)
|
||||
});
|
||||
|
||||
// Custom JSON serializer to support everything we need
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct DropDatabaseSerializer;
|
||||
|
||||
impl<T: native_model::Model + Serialize + DeserializeOwned> DeSerializer<T>
|
||||
for DropDatabaseSerializer
|
||||
{
|
||||
fn serialize(&self, val: &T) -> rustbreak::error::DeSerResult<Vec<u8>> {
|
||||
native_model::encode(val)
|
||||
.map_err(|e| DeSerError::Internal(e.to_string()))
|
||||
}
|
||||
|
||||
fn deserialize<R: std::io::Read>(&self, mut s: R) -> rustbreak::error::DeSerResult<T> {
|
||||
let mut buf = Vec::new();
|
||||
s.read_to_end(&mut buf)
|
||||
.map_err(|e| rustbreak::error::DeSerError::Other(e.into()))?;
|
||||
let (val, _version) = native_model::decode(buf)
|
||||
.map_err(|e| DeSerError::Internal(e.to_string()))?;
|
||||
Ok(val)
|
||||
}
|
||||
}
|
||||
|
||||
pub type DatabaseInterface =
|
||||
rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer>;
|
||||
|
||||
pub trait DatabaseImpls {
|
||||
fn set_up_database() -> DatabaseInterface;
|
||||
fn database_is_set_up(&self) -> bool;
|
||||
fn fetch_base_url(&self) -> Url;
|
||||
}
|
||||
impl DatabaseImpls for DatabaseInterface {
|
||||
fn set_up_database() -> DatabaseInterface {
|
||||
let db_path = DATA_ROOT_DIR.join("drop.db");
|
||||
let games_base_dir = DATA_ROOT_DIR.join("games");
|
||||
let logs_root_dir = DATA_ROOT_DIR.join("logs");
|
||||
let cache_dir = DATA_ROOT_DIR.join("cache");
|
||||
let pfx_dir = DATA_ROOT_DIR.join("pfx");
|
||||
|
||||
debug!("creating data directory at {DATA_ROOT_DIR:?}");
|
||||
create_dir_all(DATA_ROOT_DIR.as_path()).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to create directory {} with error {}",
|
||||
DATA_ROOT_DIR.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
create_dir_all(&games_base_dir).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to create directory {} with error {}",
|
||||
games_base_dir.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
create_dir_all(&logs_root_dir).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to create directory {} with error {}",
|
||||
logs_root_dir.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
create_dir_all(&cache_dir).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to create directory {} with error {}",
|
||||
cache_dir.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
create_dir_all(&pfx_dir).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to create directory {} with error {}",
|
||||
pfx_dir.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
|
||||
let exists = fs::exists(db_path.clone()).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to find if {} exists with error {}",
|
||||
db_path.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
|
||||
if exists {
|
||||
match PathDatabase::load_from_path(db_path.clone()) {
|
||||
Ok(db) => db,
|
||||
Err(e) => handle_invalid_database(e, db_path, games_base_dir, cache_dir),
|
||||
}
|
||||
} else {
|
||||
let default = Database::new(games_base_dir, None, cache_dir);
|
||||
debug!("Creating database at path {}", db_path.display());
|
||||
PathDatabase::create_at_path(db_path, default).expect("Database could not be created")
|
||||
}
|
||||
}
|
||||
|
||||
fn database_is_set_up(&self) -> bool {
|
||||
!borrow_db_checked().base_url.is_empty()
|
||||
}
|
||||
|
||||
fn fetch_base_url(&self) -> Url {
|
||||
let handle = borrow_db_checked();
|
||||
Url::parse(&handle.base_url)
|
||||
.unwrap_or_else(|_| panic!("Failed to parse base url {}", handle.base_url))
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Make the error relelvant rather than just assume that it's a Deserialize error
|
||||
fn handle_invalid_database(
|
||||
_e: RustbreakError,
|
||||
db_path: PathBuf,
|
||||
games_base_dir: PathBuf,
|
||||
cache_dir: PathBuf,
|
||||
) -> rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer> {
|
||||
warn!("{_e}");
|
||||
let new_path = {
|
||||
let time = Utc::now().timestamp();
|
||||
let mut base = db_path.clone();
|
||||
base.set_file_name(format!("drop.db.backup-{time}"));
|
||||
base
|
||||
};
|
||||
info!("old database stored at: {}", new_path.to_string_lossy());
|
||||
fs::rename(&db_path, &new_path).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Could not rename database {} to {} with error {}",
|
||||
db_path.display(),
|
||||
new_path.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
|
||||
let db = Database::new(games_base_dir, Some(new_path), cache_dir);
|
||||
|
||||
PathDatabase::create_at_path(db_path, db).expect("Database could not be created")
|
||||
}
|
||||
|
||||
// To automatically save the database upon drop
|
||||
pub struct DBRead<'a>(RwLockReadGuard<'a, Database>);
|
||||
pub struct DBWrite<'a>(ManuallyDrop<RwLockWriteGuard<'a, Database>>);
|
||||
impl<'a> Deref for DBWrite<'a> {
|
||||
type Target = Database;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
impl<'a> DerefMut for DBWrite<'a> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
impl<'a> Deref for DBRead<'a> {
|
||||
type Target = Database;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
impl Drop for DBWrite<'_> {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
ManuallyDrop::drop(&mut self.0);
|
||||
}
|
||||
|
||||
match DB.save() {
|
||||
Ok(()) => {}
|
||||
Err(e) => {
|
||||
error!("database failed to save with error {e}");
|
||||
panic!("database failed to save with error {e}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn borrow_db_checked<'a>() -> DBRead<'a> {
|
||||
match DB.borrow_data() {
|
||||
Ok(data) => DBRead(data),
|
||||
Err(e) => {
|
||||
error!("database borrow failed with error {e}");
|
||||
panic!("database borrow failed with error {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn borrow_db_mut_checked<'a>() -> DBWrite<'a> {
|
||||
match DB.borrow_data_mut() {
|
||||
Ok(data) => DBWrite(ManuallyDrop::new(data)),
|
||||
Err(e) => {
|
||||
error!("database borrow mut failed with error {e}");
|
||||
panic!("database borrow mut failed with error {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,5 +1,2 @@
|
||||
pub mod commands;
|
||||
pub mod db;
|
||||
pub mod debug;
|
||||
pub mod models;
|
||||
pub mod scan;
|
||||
@ -1,18 +1,9 @@
|
||||
use std::fs;
|
||||
|
||||
use drop_database::{borrow_db_mut_checked, drop_data::{DropData, DROP_DATA_PATH}, models::data::{DownloadType, DownloadableMetadata}};
|
||||
use drop_native_library::library::set_partially_installed_db;
|
||||
use log::warn;
|
||||
|
||||
use crate::{
|
||||
database::{
|
||||
db::borrow_db_mut_checked,
|
||||
models::data::{DownloadType, DownloadableMetadata},
|
||||
},
|
||||
games::{
|
||||
downloads::drop_data::{DropData, DROP_DATA_PATH},
|
||||
library::set_partially_installed_db,
|
||||
},
|
||||
};
|
||||
|
||||
pub fn scan_install_dirs() {
|
||||
let mut db_lock = borrow_db_mut_checked();
|
||||
for install_dir in db_lock.applications.install_dirs.clone() {
|
||||
@ -24,11 +15,11 @@ pub fn scan_install_dirs() {
|
||||
if !drop_data_file.exists() {
|
||||
continue;
|
||||
}
|
||||
let game_id = game.file_name().display().to_string();
|
||||
let game_id = game.file_name().into_string().unwrap();
|
||||
let Ok(drop_data) = DropData::read(&game.path()) else {
|
||||
warn!(
|
||||
".dropdata exists for {}, but couldn't read it. is it corrupted?",
|
||||
game.file_name().display()
|
||||
game.file_name().into_string().unwrap()
|
||||
);
|
||||
continue;
|
||||
};
|
||||
|
||||
@ -1,15 +1,17 @@
|
||||
use std::sync::Mutex;
|
||||
|
||||
use crate::{AppState, database::models::data::DownloadableMetadata, lock};
|
||||
use drop_database::models::data::DownloadableMetadata;
|
||||
|
||||
use crate::AppState;
|
||||
|
||||
#[tauri::command]
|
||||
pub fn pause_downloads(state: tauri::State<'_, Mutex<AppState>>) {
|
||||
lock!(state).download_manager.pause_downloads();
|
||||
state.lock().unwrap().download_manager.pause_downloads();
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn resume_downloads(state: tauri::State<'_, Mutex<AppState>>) {
|
||||
lock!(state).download_manager.resume_downloads();
|
||||
state.lock().unwrap().download_manager.resume_downloads();
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
@ -18,12 +20,14 @@ pub fn move_download_in_queue(
|
||||
old_index: usize,
|
||||
new_index: usize,
|
||||
) {
|
||||
lock!(state)
|
||||
state
|
||||
.lock()
|
||||
.unwrap()
|
||||
.download_manager
|
||||
.rearrange(old_index, new_index);
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn cancel_game(state: tauri::State<'_, Mutex<AppState>>, meta: DownloadableMetadata) {
|
||||
lock!(state).download_manager.cancel(meta);
|
||||
state.lock().unwrap().download_manager.cancel(meta);
|
||||
}
|
||||
|
||||
@ -1,5 +1 @@
|
||||
pub mod commands;
|
||||
pub mod download_manager_builder;
|
||||
pub mod download_manager_frontend;
|
||||
pub mod downloadable;
|
||||
pub mod util;
|
||||
|
||||
@ -1,26 +0,0 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use http::{header::ToStrError, HeaderName};
|
||||
use serde_with::SerializeDisplay;
|
||||
|
||||
use crate::error::remote_access_error::RemoteAccessError;
|
||||
|
||||
#[derive(Debug, SerializeDisplay)]
|
||||
pub enum CacheError {
|
||||
HeaderNotFound(HeaderName),
|
||||
ParseError(ToStrError),
|
||||
Remote(RemoteAccessError),
|
||||
ConstructionError(http::Error)
|
||||
}
|
||||
|
||||
impl Display for CacheError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let s = match self {
|
||||
CacheError::HeaderNotFound(header_name) => format!("Could not find header {header_name} in cache"),
|
||||
CacheError::ParseError(to_str_error) => format!("Could not parse cache with error {to_str_error}"),
|
||||
CacheError::Remote(remote_access_error) => format!("Cache got remote access error: {remote_access_error}"),
|
||||
CacheError::ConstructionError(error) => format!("Could not construct cache body with error {error}"),
|
||||
};
|
||||
write!(f, "{s}")
|
||||
}
|
||||
}
|
||||
@ -1,21 +0,0 @@
|
||||
use std::fmt::{Display};
|
||||
|
||||
use serde_with::SerializeDisplay;
|
||||
|
||||
#[derive(SerializeDisplay)]
|
||||
pub enum LibraryError {
|
||||
MetaNotFound(String),
|
||||
VersionNotFound(String),
|
||||
}
|
||||
impl Display for LibraryError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", match self {
|
||||
LibraryError::MetaNotFound(id) => {
|
||||
format!("Could not locate any installed version of game ID {id} in the database")
|
||||
}
|
||||
LibraryError::VersionNotFound(game_id) => {
|
||||
format!("Could not locate any installed version for game id {game_id} in the database")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,2 +0,0 @@
|
||||
pub mod collection;
|
||||
pub mod commands;
|
||||
@ -1,78 +0,0 @@
|
||||
use std::sync::Mutex;
|
||||
|
||||
use tauri::AppHandle;
|
||||
|
||||
use crate::{
|
||||
AppState,
|
||||
database::{
|
||||
db::borrow_db_checked,
|
||||
models::data::GameVersion,
|
||||
},
|
||||
error::{library_error::LibraryError, remote_access_error::RemoteAccessError},
|
||||
games::library::{
|
||||
fetch_game_logic_offline, fetch_library_logic_offline, get_current_meta,
|
||||
uninstall_game_logic,
|
||||
},
|
||||
offline,
|
||||
};
|
||||
|
||||
use super::{
|
||||
library::{
|
||||
FetchGameStruct, Game, fetch_game_logic, fetch_game_version_options_logic,
|
||||
fetch_library_logic,
|
||||
},
|
||||
state::{GameStatusManager, GameStatusWithTransient},
|
||||
};
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn fetch_library(
|
||||
state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
hard_refresh: Option<bool>,
|
||||
) -> Result<Vec<Game>, RemoteAccessError> {
|
||||
offline!(
|
||||
state,
|
||||
fetch_library_logic,
|
||||
fetch_library_logic_offline,
|
||||
state,
|
||||
hard_refresh
|
||||
).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn fetch_game(
|
||||
game_id: String,
|
||||
state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
) -> Result<FetchGameStruct, RemoteAccessError> {
|
||||
offline!(
|
||||
state,
|
||||
fetch_game_logic,
|
||||
fetch_game_logic_offline,
|
||||
game_id,
|
||||
state
|
||||
).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn fetch_game_status(id: String) -> GameStatusWithTransient {
|
||||
let db_handle = borrow_db_checked();
|
||||
GameStatusManager::fetch_state(&id, &db_handle)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn uninstall_game(game_id: String, app_handle: AppHandle) -> Result<(), LibraryError> {
|
||||
let meta = match get_current_meta(&game_id) {
|
||||
Some(data) => data,
|
||||
None => return Err(LibraryError::MetaNotFound(game_id)),
|
||||
};
|
||||
uninstall_game_logic(meta, &app_handle);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn fetch_game_version_options(
|
||||
game_id: String,
|
||||
state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
) -> Result<Vec<GameVersion>, RemoteAccessError> {
|
||||
fetch_game_version_options_logic(game_id, state).await
|
||||
}
|
||||
@ -1,5 +0,0 @@
|
||||
pub mod collections;
|
||||
pub mod commands;
|
||||
pub mod downloads;
|
||||
pub mod library;
|
||||
pub mod state;
|
||||
@ -5,79 +5,59 @@
|
||||
#![feature(iterator_try_collect)]
|
||||
#![deny(clippy::all)]
|
||||
|
||||
mod database;
|
||||
mod games;
|
||||
|
||||
mod auth;
|
||||
mod client;
|
||||
mod database;
|
||||
mod download_manager;
|
||||
mod error;
|
||||
mod native_library;
|
||||
mod process;
|
||||
mod remote;
|
||||
mod utils;
|
||||
mod setup;
|
||||
|
||||
use crate::database::scan::scan_install_dirs;
|
||||
use crate::process::commands::open_process_logs;
|
||||
use crate::process::process_handlers::UMU_LAUNCHER_EXECUTABLE;
|
||||
use crate::auth::recieve_handshake;
|
||||
use crate::native_library::collection_commands::{add_game_to_collection, create_collection, delete_collection, delete_game_in_collection, fetch_collection, fetch_collections};
|
||||
use crate::native_library::commands::{
|
||||
fetch_game, fetch_game_status, fetch_game_version_options, fetch_library, uninstall_game,
|
||||
};
|
||||
use crate::native_library::downloads::commands::{download_game, resume_download};
|
||||
use crate::process::commands::{open_process_logs, update_game_configuration};
|
||||
use crate::remote::commands::auth_initiate_code;
|
||||
use crate::remote::fetch_object::fetch_object_wrapper;
|
||||
use crate::remote::server_proto::handle_server_proto_wrapper;
|
||||
use crate::{database::db::DatabaseImpls, games::downloads::commands::resume_download};
|
||||
use bitcode::{Decode, Encode};
|
||||
use crate::remote::server_proto::{handle_server_proto, handle_server_proto_offline};
|
||||
use client::commands::fetch_state;
|
||||
use client::{
|
||||
autostart::{get_autostart_enabled, sync_autostart_on_startup, toggle_autostart},
|
||||
autostart::{get_autostart_enabled, toggle_autostart},
|
||||
cleanup::{cleanup_and_exit, quit},
|
||||
};
|
||||
use database::commands::{
|
||||
add_download_dir, delete_download_dir, fetch_download_dir_stats, fetch_settings,
|
||||
fetch_system_data, update_settings,
|
||||
};
|
||||
use database::db::{DATA_ROOT_DIR, DatabaseInterface, borrow_db_checked, borrow_db_mut_checked};
|
||||
use database::models::data::GameDownloadStatus;
|
||||
use download_manager::commands::{
|
||||
cancel_game, move_download_in_queue, pause_downloads, resume_downloads,
|
||||
};
|
||||
use download_manager::download_manager_builder::DownloadManagerBuilder;
|
||||
use download_manager::download_manager_frontend::DownloadManager;
|
||||
use games::collections::commands::{
|
||||
add_game_to_collection, create_collection, delete_collection, delete_game_in_collection,
|
||||
fetch_collection, fetch_collections,
|
||||
};
|
||||
use games::commands::{
|
||||
fetch_game, fetch_game_status, fetch_game_version_options, fetch_library, uninstall_game,
|
||||
};
|
||||
use games::downloads::commands::download_game;
|
||||
use games::library::{Game, update_game_configuration};
|
||||
use log::{LevelFilter, debug, info, warn};
|
||||
use log4rs::Config;
|
||||
use log4rs::append::console::ConsoleAppender;
|
||||
use log4rs::append::file::FileAppender;
|
||||
use log4rs::config::{Appender, Root};
|
||||
use log4rs::encode::pattern::PatternEncoder;
|
||||
use drop_database::borrow_db_mut_checked;
|
||||
use drop_database::db::DATA_ROOT_DIR;
|
||||
use drop_database::runtime_models::User;
|
||||
use drop_downloads::download_manager_frontend::DownloadManager;
|
||||
use drop_process::process_manager::ProcessManager;
|
||||
use drop_remote::{fetch_object::fetch_object, offline};
|
||||
use log::{debug, info, warn};
|
||||
use process::commands::{kill_game, launch_game};
|
||||
use process::process_manager::ProcessManager;
|
||||
use remote::auth::{self, recieve_handshake};
|
||||
use remote::commands::{
|
||||
auth_initiate, fetch_drop_object, gen_drop_url, manual_recieve_handshake, retry_connect,
|
||||
sign_out, use_remote,
|
||||
};
|
||||
use remote::server_proto::handle_server_proto_offline_wrapper;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::Serialize;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::panic::PanicHookInfo;
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use std::time::SystemTime;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{LazyLock, Mutex},
|
||||
};
|
||||
use std::{env, panic};
|
||||
use tauri::menu::{Menu, MenuItem, PredefinedMenuItem};
|
||||
use tauri::tray::TrayIconBuilder;
|
||||
use tauri::{AppHandle, Manager, RunEvent, WindowEvent};
|
||||
use tauri::{Manager, RunEvent, WindowEvent};
|
||||
use tauri_plugin_deep_link::DeepLinkExt;
|
||||
use tauri_plugin_dialog::DialogExt;
|
||||
|
||||
@ -92,165 +72,18 @@ pub enum AppStatus {
|
||||
ServerUnavailable,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Encode, Decode)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct User {
|
||||
id: String,
|
||||
username: String,
|
||||
admin: bool,
|
||||
display_name: String,
|
||||
profile_picture_object_id: String,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CompatInfo {
|
||||
umu_installed: bool,
|
||||
}
|
||||
|
||||
fn create_new_compat_info() -> Option<CompatInfo> {
|
||||
#[cfg(target_os = "windows")]
|
||||
return None;
|
||||
|
||||
let has_umu_installed = UMU_LAUNCHER_EXECUTABLE.is_some();
|
||||
Some(CompatInfo {
|
||||
umu_installed: has_umu_installed,
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AppState<'a> {
|
||||
pub struct AppState {
|
||||
status: AppStatus,
|
||||
user: Option<User>,
|
||||
games: HashMap<String, Game>,
|
||||
|
||||
#[serde(skip_serializing)]
|
||||
download_manager: Arc<DownloadManager>,
|
||||
#[serde(skip_serializing)]
|
||||
process_manager: Arc<Mutex<ProcessManager<'a>>>,
|
||||
#[serde(skip_serializing)]
|
||||
compat_info: Option<CompatInfo>,
|
||||
process_manager: &'static Mutex<ProcessManager<'static>>,
|
||||
}
|
||||
|
||||
async fn setup(handle: AppHandle) -> AppState<'static> {
|
||||
let logfile = FileAppender::builder()
|
||||
.encoder(Box::new(PatternEncoder::new(
|
||||
"{d} | {l} | {f}:{L} - {m}{n}",
|
||||
)))
|
||||
.append(false)
|
||||
.build(DATA_ROOT_DIR.join("./drop.log"))
|
||||
.expect("Failed to setup logfile");
|
||||
|
||||
let console = ConsoleAppender::builder()
|
||||
.encoder(Box::new(PatternEncoder::new(
|
||||
"{d} | {l} | {f}:{L} - {m}{n}",
|
||||
)))
|
||||
.build();
|
||||
|
||||
let log_level = env::var("RUST_LOG").unwrap_or(String::from("Info"));
|
||||
|
||||
let config = Config::builder()
|
||||
.appenders(vec![
|
||||
Appender::builder().build("logfile", Box::new(logfile)),
|
||||
Appender::builder().build("console", Box::new(console)),
|
||||
])
|
||||
.build(
|
||||
Root::builder()
|
||||
.appenders(vec!["logfile", "console"])
|
||||
.build(LevelFilter::from_str(&log_level).expect("Invalid log level")),
|
||||
)
|
||||
.expect("Failed to build config");
|
||||
|
||||
log4rs::init_config(config).expect("Failed to initialise log4rs");
|
||||
|
||||
let games = HashMap::new();
|
||||
let download_manager = Arc::new(DownloadManagerBuilder::build(handle.clone()));
|
||||
let process_manager = Arc::new(Mutex::new(ProcessManager::new(handle.clone())));
|
||||
let compat_info = create_new_compat_info();
|
||||
|
||||
debug!("checking if database is set up");
|
||||
let is_set_up = DB.database_is_set_up();
|
||||
|
||||
scan_install_dirs();
|
||||
|
||||
if !is_set_up {
|
||||
return AppState {
|
||||
status: AppStatus::NotConfigured,
|
||||
user: None,
|
||||
games,
|
||||
download_manager,
|
||||
process_manager,
|
||||
compat_info,
|
||||
};
|
||||
}
|
||||
|
||||
debug!("database is set up");
|
||||
|
||||
// TODO: Account for possible failure
|
||||
let (app_status, user) = auth::setup().await;
|
||||
|
||||
let db_handle = borrow_db_checked();
|
||||
let mut missing_games = Vec::new();
|
||||
let statuses = db_handle.applications.game_statuses.clone();
|
||||
drop(db_handle);
|
||||
|
||||
for (game_id, status) in statuses {
|
||||
match status {
|
||||
GameDownloadStatus::Remote {} => {}
|
||||
GameDownloadStatus::PartiallyInstalled { .. } => {}
|
||||
GameDownloadStatus::SetupRequired {
|
||||
version_name: _,
|
||||
install_dir,
|
||||
} => {
|
||||
let install_dir_path = Path::new(&install_dir);
|
||||
if !install_dir_path.exists() {
|
||||
missing_games.push(game_id);
|
||||
}
|
||||
}
|
||||
GameDownloadStatus::Installed {
|
||||
version_name: _,
|
||||
install_dir,
|
||||
} => {
|
||||
let install_dir_path = Path::new(&install_dir);
|
||||
if !install_dir_path.exists() {
|
||||
missing_games.push(game_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!("detected games missing: {missing_games:?}");
|
||||
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
for game_id in missing_games {
|
||||
db_handle
|
||||
.applications
|
||||
.game_statuses
|
||||
.entry(game_id)
|
||||
.and_modify(|v| *v = GameDownloadStatus::Remote {});
|
||||
}
|
||||
|
||||
drop(db_handle);
|
||||
|
||||
debug!("finished setup!");
|
||||
|
||||
// Sync autostart state
|
||||
if let Err(e) = sync_autostart_on_startup(&handle) {
|
||||
warn!("failed to sync autostart state: {e}");
|
||||
}
|
||||
|
||||
AppState {
|
||||
status: app_status,
|
||||
user,
|
||||
games,
|
||||
download_manager,
|
||||
process_manager,
|
||||
compat_info,
|
||||
}
|
||||
}
|
||||
|
||||
pub static DB: LazyLock<DatabaseInterface> = LazyLock::new(DatabaseInterface::set_up_database);
|
||||
|
||||
pub fn custom_panic_handler(e: &PanicHookInfo) -> Option<()> {
|
||||
let crash_file = DATA_ROOT_DIR.join(format!(
|
||||
"crash-{}.log",
|
||||
@ -348,7 +181,7 @@ pub fn run() {
|
||||
let handle = app.handle().clone();
|
||||
|
||||
tauri::async_runtime::block_on(async move {
|
||||
let state = setup(handle).await;
|
||||
let state = setup::setup(handle).await;
|
||||
info!("initialized drop client");
|
||||
app.manage(Mutex::new(state));
|
||||
|
||||
@ -372,57 +205,42 @@ pub fn run() {
|
||||
.shadow(false)
|
||||
.data_directory(DATA_ROOT_DIR.join(".webview"))
|
||||
.build()
|
||||
.expect("Failed to build main window");
|
||||
.unwrap();
|
||||
|
||||
app.deep_link().on_open_url(move |event| {
|
||||
debug!("handling drop:// url");
|
||||
let binding = event.urls();
|
||||
let url = match binding.first() {
|
||||
Some(url) => url,
|
||||
None => {
|
||||
warn!("No value recieved from deep link. Is this a drop server?");
|
||||
return;
|
||||
}
|
||||
};
|
||||
if let Some("handshake") = url.host_str() {
|
||||
tauri::async_runtime::spawn(recieve_handshake(
|
||||
handle.clone(),
|
||||
url.path().to_string(),
|
||||
));
|
||||
let url = binding.first().unwrap();
|
||||
if url.host_str().unwrap() == "handshake" {
|
||||
tauri::async_runtime::spawn(recieve_handshake(
|
||||
handle.clone(),
|
||||
url.path().to_string(),
|
||||
));
|
||||
}
|
||||
});
|
||||
let open_menu_item = MenuItem::with_id(app, "open", "Open", true, None::<&str>).expect("Failed to generate open menu item");
|
||||
|
||||
let sep = PredefinedMenuItem::separator(app).expect("Failed to generate menu separator item");
|
||||
|
||||
let quit_menu_item = MenuItem::with_id(app, "quit", "Quit", true, None::<&str>).expect("Failed to generate quit menu item");
|
||||
|
||||
let menu = Menu::with_items(
|
||||
app,
|
||||
&[
|
||||
&open_menu_item,
|
||||
&sep,
|
||||
&MenuItem::with_id(app, "open", "Open", true, None::<&str>).unwrap(),
|
||||
&PredefinedMenuItem::separator(app).unwrap(),
|
||||
/*
|
||||
&MenuItem::with_id(app, "show_library", "Library", true, None::<&str>)?,
|
||||
&MenuItem::with_id(app, "show_settings", "Settings", true, None::<&str>)?,
|
||||
&PredefinedMenuItem::separator(app)?,
|
||||
*/
|
||||
&quit_menu_item,
|
||||
&MenuItem::with_id(app, "quit", "Quit", true, None::<&str>).unwrap(),
|
||||
],
|
||||
)
|
||||
.expect("Failed to generate menu");
|
||||
.unwrap();
|
||||
|
||||
run_on_tray(|| {
|
||||
TrayIconBuilder::new()
|
||||
.icon(app.default_window_icon().expect("Failed to get default window icon").clone())
|
||||
.icon(app.default_window_icon().unwrap().clone())
|
||||
.menu(&menu)
|
||||
.on_menu_event(|app, event| match event.id.as_ref() {
|
||||
"open" => {
|
||||
app.webview_windows()
|
||||
.get("main")
|
||||
.expect("Failed to get webview")
|
||||
.show()
|
||||
.expect("Failed to show window");
|
||||
app.webview_windows().get("main").unwrap().show().unwrap();
|
||||
}
|
||||
"quit" => {
|
||||
cleanup_and_exit(app, &app.state());
|
||||
@ -439,19 +257,15 @@ pub fn run() {
|
||||
{
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
if let Some(original) = db_handle.prev_database.take() {
|
||||
let canonicalised = match original.canonicalize() {
|
||||
Ok(o) => o,
|
||||
Err(_) => original,
|
||||
};
|
||||
warn!(
|
||||
"Database corrupted. Original file at {}",
|
||||
canonicalised.display()
|
||||
original.canonicalize().unwrap().to_string_lossy()
|
||||
);
|
||||
app.dialog()
|
||||
.message(format!(
|
||||
"Database corrupted. A copy has been saved at: {}",
|
||||
canonicalised.display()
|
||||
))
|
||||
.message(
|
||||
"Database corrupted. A copy has been saved at: ".to_string()
|
||||
+ original.to_str().unwrap(),
|
||||
)
|
||||
.title("Database corrupted")
|
||||
.show(|_| {});
|
||||
}
|
||||
@ -462,7 +276,7 @@ pub fn run() {
|
||||
})
|
||||
.register_asynchronous_uri_scheme_protocol("object", move |_ctx, request, responder| {
|
||||
tauri::async_runtime::spawn(async move {
|
||||
fetch_object_wrapper(request, responder).await;
|
||||
fetch_object(request, responder).await;
|
||||
});
|
||||
})
|
||||
.register_asynchronous_uri_scheme_protocol("server", |ctx, request, responder| {
|
||||
@ -473,8 +287,8 @@ pub fn run() {
|
||||
|
||||
offline!(
|
||||
state,
|
||||
handle_server_proto_wrapper,
|
||||
handle_server_proto_offline_wrapper,
|
||||
handle_server_proto,
|
||||
handle_server_proto_offline,
|
||||
request,
|
||||
responder
|
||||
)
|
||||
@ -484,7 +298,7 @@ pub fn run() {
|
||||
.on_window_event(|window, event| {
|
||||
if let WindowEvent::CloseRequested { api, .. } = event {
|
||||
run_on_tray(|| {
|
||||
window.hide().expect("Failed to close window in tray");
|
||||
window.hide().unwrap();
|
||||
api.prevent_close();
|
||||
});
|
||||
}
|
||||
|
||||
@ -1,16 +1,9 @@
|
||||
use serde_json::json;
|
||||
|
||||
use crate::{
|
||||
error::remote_access_error::RemoteAccessError,
|
||||
remote::{
|
||||
auth::generate_authorization_header,
|
||||
cache::{cache_object, get_cached_object},
|
||||
requests::{generate_url, make_authenticated_get},
|
||||
utils::DROP_CLIENT_ASYNC,
|
||||
},
|
||||
use drop_errors::remote_access_error::RemoteAccessError;
|
||||
use drop_native_library::collections::{Collection, Collections};
|
||||
use drop_remote::{
|
||||
auth::generate_authorization_header, cache::{cache_object, get_cached_object}, requests::{generate_url, make_authenticated_get}, utils::DROP_CLIENT_ASYNC
|
||||
};
|
||||
|
||||
use super::collection::{Collection, Collections};
|
||||
use serde_json::json;
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn fetch_collections(
|
||||
76
src-tauri/src/native_library/commands.rs
Normal file
76
src-tauri/src/native_library/commands.rs
Normal file
@ -0,0 +1,76 @@
|
||||
use std::sync::Mutex;
|
||||
|
||||
use drop_database::{borrow_db_checked, models::data::GameVersion, runtime_models::Game};
|
||||
use drop_errors::{library_error::LibraryError, remote_access_error::RemoteAccessError};
|
||||
use drop_native_library::{library::{fetch_game_logic, fetch_game_logic_offline, fetch_game_version_options_logic, fetch_library_logic, fetch_library_logic_offline, get_current_meta, uninstall_game_logic, FetchGameStruct}, state::{GameStatusManager, GameStatusWithTransient}};
|
||||
use tauri::AppHandle;
|
||||
|
||||
use crate::{AppState, offline};
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn fetch_library(
|
||||
state: tauri::State<'_, Mutex<AppState>>,
|
||||
hard_refresh: Option<bool>,
|
||||
) -> Result<Vec<Game>, RemoteAccessError> {
|
||||
offline!(
|
||||
state,
|
||||
fetch_library_logic,
|
||||
fetch_library_logic_offline,
|
||||
hard_refresh
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn fetch_game(
|
||||
game_id: String,
|
||||
state: tauri::State<'_, Mutex<AppState>>,
|
||||
) -> Result<FetchGameStruct, RemoteAccessError> {
|
||||
offline!(
|
||||
state,
|
||||
fetch_game_logic,
|
||||
fetch_game_logic_offline,
|
||||
game_id
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn fetch_game_status(id: String) -> GameStatusWithTransient {
|
||||
let db_handle = borrow_db_checked();
|
||||
GameStatusManager::fetch_state(&id, &db_handle)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn uninstall_game(game_id: String, app_handle: AppHandle) -> Result<(), LibraryError> {
|
||||
let meta = match get_current_meta(&game_id) {
|
||||
Some(data) => data,
|
||||
None => return Err(LibraryError::MetaNotFound(game_id)),
|
||||
};
|
||||
uninstall_game_logic(meta, &app_handle);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn fetch_game_version_options(
|
||||
game_id: String,
|
||||
state: tauri::State<'_, Mutex<AppState>>,
|
||||
) -> Result<Vec<GameVersion>, RemoteAccessError> {
|
||||
let all_versions = fetch_game_version_options_logic(game_id).await?;
|
||||
|
||||
let state_lock = state.lock().unwrap();
|
||||
let process_manager_lock = state_lock.process_manager.lock().unwrap();
|
||||
let data: Vec<GameVersion> = all_versions
|
||||
.into_iter()
|
||||
.filter(|v| {
|
||||
process_manager_lock
|
||||
.valid_platform(&v.platform)
|
||||
.unwrap()
|
||||
})
|
||||
.collect();
|
||||
drop(process_manager_lock);
|
||||
drop(state_lock);
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
@ -3,13 +3,11 @@ use std::{
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use drop_database::{borrow_db_checked, models::data::GameDownloadStatus};
|
||||
use drop_downloads::downloadable::Downloadable;
|
||||
use drop_errors::application_download_error::ApplicationDownloadError;
|
||||
|
||||
use crate::{
|
||||
database::{
|
||||
db::borrow_db_checked,
|
||||
models::data::GameDownloadStatus,
|
||||
}, download_manager::downloadable::Downloadable, error::application_download_error::ApplicationDownloadError, lock, AppState
|
||||
};
|
||||
use crate::AppState;
|
||||
|
||||
use super::download_agent::GameDownloadAgent;
|
||||
|
||||
@ -18,16 +16,18 @@ pub async fn download_game(
|
||||
game_id: String,
|
||||
game_version: String,
|
||||
install_dir: usize,
|
||||
state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
state: tauri::State<'_, Mutex<AppState>>,
|
||||
) -> Result<(), ApplicationDownloadError> {
|
||||
let sender = { lock!(state).download_manager.get_sender().clone() };
|
||||
let sender = { state.lock().unwrap().download_manager.get_sender().clone() };
|
||||
|
||||
let game_download_agent =
|
||||
GameDownloadAgent::new_from_index(game_id.clone(), game_version.clone(), install_dir, sender).await?;
|
||||
|
||||
let game_download_agent =
|
||||
Arc::new(Box::new(game_download_agent) as Box<dyn Downloadable + Send + Sync>);
|
||||
lock!(state)
|
||||
state
|
||||
.lock()
|
||||
.unwrap()
|
||||
.download_manager
|
||||
.queue_download(game_download_agent.clone())
|
||||
.unwrap();
|
||||
@ -38,7 +38,7 @@ pub async fn download_game(
|
||||
#[tauri::command]
|
||||
pub async fn resume_download(
|
||||
game_id: String,
|
||||
state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
state: tauri::State<'_, Mutex<AppState>>,
|
||||
) -> Result<(), ApplicationDownloadError> {
|
||||
let s = borrow_db_checked()
|
||||
.applications
|
||||
@ -57,20 +57,22 @@ pub async fn resume_download(
|
||||
} => (version_name, install_dir),
|
||||
};
|
||||
|
||||
let sender = lock!(state).download_manager.get_sender();
|
||||
let sender = state.lock().unwrap().download_manager.get_sender();
|
||||
let parent_dir: PathBuf = install_dir.into();
|
||||
|
||||
let game_download_agent = Arc::new(Box::new(
|
||||
GameDownloadAgent::new(
|
||||
game_id,
|
||||
version_name.clone(),
|
||||
parent_dir.parent().unwrap_or_else(|| panic!("Failed to get parent directry of {}", parent_dir.display())).to_path_buf(),
|
||||
parent_dir.parent().unwrap().to_path_buf(),
|
||||
sender,
|
||||
)
|
||||
.await?,
|
||||
) as Box<dyn Downloadable + Send + Sync>);
|
||||
|
||||
lock!(state)
|
||||
state
|
||||
.lock()
|
||||
.unwrap()
|
||||
.download_manager
|
||||
.queue_download(game_download_agent)
|
||||
.unwrap();
|
||||
@ -1,31 +1,22 @@
|
||||
use crate::auth::generate_authorization_header;
|
||||
use crate::database::db::{borrow_db_checked, borrow_db_mut_checked};
|
||||
use crate::database::models::data::{
|
||||
ApplicationTransientStatus, DownloadType, DownloadableMetadata,
|
||||
};
|
||||
use crate::download_manager::download_manager_frontend::{DownloadManagerSignal, DownloadStatus};
|
||||
use crate::download_manager::downloadable::Downloadable;
|
||||
use crate::download_manager::util::download_thread_control_flag::{
|
||||
DownloadThreadControl, DownloadThreadControlFlag,
|
||||
};
|
||||
use crate::download_manager::util::progress_object::{ProgressHandle, ProgressObject};
|
||||
use crate::error::application_download_error::ApplicationDownloadError;
|
||||
use crate::error::remote_access_error::RemoteAccessError;
|
||||
use crate::games::downloads::manifest::{
|
||||
DownloadBucket, DownloadContext, DownloadDrop, DropManifest, DropValidateContext, ManifestBody,
|
||||
};
|
||||
use crate::games::downloads::validate::validate_game_chunk;
|
||||
use crate::games::library::{on_game_complete, push_game_update, set_partially_installed};
|
||||
use crate::games::state::GameStatusManager;
|
||||
use crate::process::utils::get_disk_available;
|
||||
use crate::remote::requests::generate_url;
|
||||
use crate::remote::utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC};
|
||||
use crate::{app_emit, lock, send};
|
||||
use drop_database::{borrow_db_checked, borrow_db_mut_checked};
|
||||
use drop_database::drop_data::DropData;
|
||||
use drop_database::models::data::{ApplicationTransientStatus, DownloadType, DownloadableMetadata};
|
||||
use drop_downloads::download_manager_frontend::{DownloadManagerSignal, DownloadStatus};
|
||||
use drop_downloads::downloadable::Downloadable;
|
||||
use drop_downloads::util::download_thread_control_flag::{DownloadThreadControl, DownloadThreadControlFlag};
|
||||
use drop_downloads::util::progress_object::{ProgressHandle, ProgressObject};
|
||||
use drop_errors::application_download_error::ApplicationDownloadError;
|
||||
use drop_errors::remote_access_error::RemoteAccessError;
|
||||
use drop_native_library::library::{on_game_complete, push_game_update, set_partially_installed};
|
||||
use drop_native_library::state::GameStatusManager;
|
||||
use drop_process::utils::get_disk_available;
|
||||
use drop_remote::auth::generate_authorization_header;
|
||||
use drop_remote::requests::generate_url;
|
||||
use drop_remote::utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC};
|
||||
use log::{debug, error, info, warn};
|
||||
use rayon::ThreadPoolBuilder;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs::{OpenOptions, create_dir_all};
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::mpsc::Sender;
|
||||
use std::sync::{Arc, Mutex};
|
||||
@ -35,8 +26,10 @@ use tauri::{AppHandle, Emitter};
|
||||
#[cfg(target_os = "linux")]
|
||||
use rustix::fs::{FallocateFlags, fallocate};
|
||||
|
||||
use crate::native_library::downloads::manifest::{DownloadBucket, DownloadContext, DownloadDrop, DropManifest, DropValidateContext, ManifestBody};
|
||||
use crate::native_library::downloads::validate::validate_game_chunk;
|
||||
|
||||
use super::download_logic::download_game_bucket;
|
||||
use super::drop_data::DropData;
|
||||
|
||||
static RETRY_COUNT: usize = 3;
|
||||
|
||||
@ -103,8 +96,10 @@ impl GameDownloadAgent {
|
||||
|
||||
result.ensure_manifest_exists().await?;
|
||||
|
||||
let required_space = lock!(result
|
||||
.manifest)
|
||||
let required_space = result
|
||||
.manifest
|
||||
.lock()
|
||||
.unwrap()
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.values()
|
||||
@ -172,11 +167,11 @@ impl GameDownloadAgent {
|
||||
}
|
||||
|
||||
pub fn check_manifest_exists(&self) -> bool {
|
||||
lock!(self.manifest).is_some()
|
||||
self.manifest.lock().unwrap().is_some()
|
||||
}
|
||||
|
||||
pub async fn ensure_manifest_exists(&self) -> Result<(), ApplicationDownloadError> {
|
||||
if lock!(self.manifest).is_some() {
|
||||
if self.manifest.lock().unwrap().is_some() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@ -207,10 +202,7 @@ impl GameDownloadAgent {
|
||||
));
|
||||
}
|
||||
|
||||
let manifest_download: DropManifest = response
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| ApplicationDownloadError::Communication(e.into()))?;
|
||||
let manifest_download: DropManifest = response.json().await.unwrap();
|
||||
|
||||
if let Ok(mut manifest) = self.manifest.lock() {
|
||||
*manifest = Some(manifest_download);
|
||||
@ -222,7 +214,7 @@ impl GameDownloadAgent {
|
||||
|
||||
// Sets it up for both download and validate
|
||||
fn setup_progress(&self) {
|
||||
let buckets = lock!(self.buckets);
|
||||
let buckets = self.buckets.lock().unwrap();
|
||||
|
||||
let chunk_count = buckets.iter().map(|e| e.drops.len()).sum();
|
||||
|
||||
@ -237,23 +229,21 @@ impl GameDownloadAgent {
|
||||
}
|
||||
|
||||
pub fn ensure_buckets(&self) -> Result<(), ApplicationDownloadError> {
|
||||
if lock!(self.buckets).is_empty() {
|
||||
if self.buckets.lock().unwrap().is_empty() {
|
||||
self.generate_buckets()?;
|
||||
}
|
||||
|
||||
*lock!(self.context_map) = self.dropdata.get_contexts();
|
||||
*self.context_map.lock().unwrap() = self.dropdata.get_contexts();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn generate_buckets(&self) -> Result<(), ApplicationDownloadError> {
|
||||
let manifest = lock!(self.manifest)
|
||||
.clone()
|
||||
.ok_or(ApplicationDownloadError::NotInitialized)?;
|
||||
let manifest = self.manifest.lock().unwrap().clone().unwrap();
|
||||
let game_id = self.id.clone();
|
||||
|
||||
let base_path = Path::new(&self.dropdata.base_path);
|
||||
create_dir_all(base_path)?;
|
||||
create_dir_all(base_path).unwrap();
|
||||
|
||||
let mut buckets = Vec::new();
|
||||
|
||||
@ -263,13 +253,8 @@ impl GameDownloadAgent {
|
||||
for (raw_path, chunk) in manifest {
|
||||
let path = base_path.join(Path::new(&raw_path));
|
||||
|
||||
let container = path
|
||||
.parent()
|
||||
.ok_or(ApplicationDownloadError::IoError(Arc::new(io::Error::new(
|
||||
io::ErrorKind::NotFound,
|
||||
"no parent directory",
|
||||
))))?;
|
||||
create_dir_all(container)?;
|
||||
let container = path.parent().unwrap();
|
||||
create_dir_all(container).unwrap();
|
||||
|
||||
let already_exists = path.exists();
|
||||
let file = OpenOptions::new()
|
||||
@ -277,7 +262,8 @@ impl GameDownloadAgent {
|
||||
.write(true)
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.open(&path)?;
|
||||
.open(path.clone())
|
||||
.unwrap();
|
||||
let mut file_running_offset = 0;
|
||||
|
||||
for (index, length) in chunk.lengths.iter().enumerate() {
|
||||
@ -361,7 +347,7 @@ impl GameDownloadAgent {
|
||||
.collect::<Vec<(String, bool)>>(),
|
||||
);
|
||||
|
||||
*lock!(self.buckets) = buckets;
|
||||
*self.buckets.lock().unwrap() = buckets;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -377,11 +363,9 @@ impl GameDownloadAgent {
|
||||
let pool = ThreadPoolBuilder::new()
|
||||
.num_threads(max_download_threads)
|
||||
.build()
|
||||
.unwrap_or_else(|_| {
|
||||
panic!("failed to build thread pool with {max_download_threads} threads")
|
||||
});
|
||||
.unwrap();
|
||||
|
||||
let buckets = lock!(self.buckets);
|
||||
let buckets = self.buckets.lock().unwrap();
|
||||
|
||||
let mut download_contexts = HashMap::<String, DownloadContext>::new();
|
||||
|
||||
@ -400,7 +384,7 @@ impl GameDownloadAgent {
|
||||
|
||||
for version in versions {
|
||||
let download_context = DROP_CLIENT_SYNC
|
||||
.post(generate_url(&["/api/v2/client/context"], &[])?)
|
||||
.post(generate_url(&["/api/v2/client/context"], &[]).unwrap())
|
||||
.json(&ManifestBody {
|
||||
game: self.id.clone(),
|
||||
version: version.clone(),
|
||||
@ -423,7 +407,7 @@ impl GameDownloadAgent {
|
||||
let download_contexts = &download_contexts;
|
||||
|
||||
pool.scope(|scope| {
|
||||
let context_map = lock!(self.context_map);
|
||||
let context_map = self.context_map.lock().unwrap();
|
||||
for (index, bucket) in buckets.iter().enumerate() {
|
||||
let mut bucket = (*bucket).clone();
|
||||
let completed_contexts = completed_indexes_loop_arc.clone();
|
||||
@ -455,7 +439,8 @@ impl GameDownloadAgent {
|
||||
|
||||
let download_context = download_contexts
|
||||
.get(&bucket.version)
|
||||
.unwrap_or_else(|| panic!("Could not get bucket version {}. Corrupted state.", bucket.version));
|
||||
.ok_or(RemoteAccessError::CorruptedState)
|
||||
.unwrap();
|
||||
|
||||
scope.spawn(move |_| {
|
||||
// 3 attempts
|
||||
@ -487,7 +472,7 @@ impl GameDownloadAgent {
|
||||
|
||||
if i == RETRY_COUNT - 1 || !retry {
|
||||
warn!("retry logic failed, not re-attempting.");
|
||||
send!(sender, DownloadManagerSignal::Error(e));
|
||||
sender.send(DownloadManagerSignal::Error(e)).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -500,7 +485,7 @@ impl GameDownloadAgent {
|
||||
let newly_completed = completed_contexts.clone();
|
||||
|
||||
let completed_lock_len = {
|
||||
let mut context_map_lock = lock!(self.context_map);
|
||||
let mut context_map_lock = self.context_map.lock().unwrap();
|
||||
for (_, item) in newly_completed.iter() {
|
||||
context_map_lock.insert(item.clone(), true);
|
||||
}
|
||||
@ -508,7 +493,7 @@ impl GameDownloadAgent {
|
||||
context_map_lock.values().filter(|x| **x).count()
|
||||
};
|
||||
|
||||
let context_map_lock = lock!(self.context_map);
|
||||
let context_map_lock = self.context_map.lock().unwrap();
|
||||
let contexts = buckets
|
||||
.iter()
|
||||
.flat_map(|x| x.drops.iter().map(|e| e.checksum.clone()))
|
||||
@ -557,7 +542,7 @@ impl GameDownloadAgent {
|
||||
pub fn validate(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError> {
|
||||
self.setup_validate(app_handle);
|
||||
|
||||
let buckets = lock!(self.buckets);
|
||||
let buckets = self.buckets.lock().unwrap();
|
||||
let contexts: Vec<DropValidateContext> = buckets
|
||||
.clone()
|
||||
.into_iter()
|
||||
@ -569,9 +554,7 @@ impl GameDownloadAgent {
|
||||
let pool = ThreadPoolBuilder::new()
|
||||
.num_threads(max_download_threads)
|
||||
.build()
|
||||
.unwrap_or_else(|_| {
|
||||
panic!("failed to build thread pool with {max_download_threads} threads")
|
||||
});
|
||||
.unwrap();
|
||||
|
||||
let invalid_chunks = Arc::new(boxcar::Vec::new());
|
||||
pool.scope(|scope| {
|
||||
@ -589,7 +572,7 @@ impl GameDownloadAgent {
|
||||
}
|
||||
Err(e) => {
|
||||
error!("{e}");
|
||||
send!(sender, DownloadManagerSignal::Error(e));
|
||||
sender.send(DownloadManagerSignal::Error(e)).unwrap();
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -616,7 +599,7 @@ impl GameDownloadAgent {
|
||||
// See docs on usage
|
||||
set_partially_installed(
|
||||
&self.metadata(),
|
||||
self.dropdata.base_path.display().to_string(),
|
||||
self.dropdata.base_path.to_str().unwrap().to_string(),
|
||||
Some(app_handle),
|
||||
);
|
||||
|
||||
@ -626,12 +609,12 @@ impl GameDownloadAgent {
|
||||
|
||||
impl Downloadable for GameDownloadAgent {
|
||||
fn download(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError> {
|
||||
*lock!(self.status) = DownloadStatus::Downloading;
|
||||
*self.status.lock().unwrap() = DownloadStatus::Downloading;
|
||||
self.download(app_handle)
|
||||
}
|
||||
|
||||
fn validate(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError> {
|
||||
*lock!(self.status) = DownloadStatus::Validating;
|
||||
*self.status.lock().unwrap() = DownloadStatus::Validating;
|
||||
self.validate(app_handle)
|
||||
}
|
||||
|
||||
@ -665,8 +648,10 @@ impl Downloadable for GameDownloadAgent {
|
||||
}
|
||||
|
||||
fn on_error(&self, app_handle: &tauri::AppHandle, error: &ApplicationDownloadError) {
|
||||
*lock!(self.status) = DownloadStatus::Error;
|
||||
app_emit!(app_handle, "download_error", error.to_string());
|
||||
*self.status.lock().unwrap() = DownloadStatus::Error;
|
||||
app_handle
|
||||
.emit("download_error", error.to_string())
|
||||
.unwrap();
|
||||
|
||||
error!("error while managing download: {error:?}");
|
||||
|
||||
@ -685,17 +670,12 @@ impl Downloadable for GameDownloadAgent {
|
||||
}
|
||||
|
||||
fn on_complete(&self, app_handle: &tauri::AppHandle) {
|
||||
match on_game_complete(
|
||||
on_game_complete(
|
||||
&self.metadata(),
|
||||
self.dropdata.base_path.to_string_lossy().to_string(),
|
||||
app_handle,
|
||||
) {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
error!("could not mark game as complete: {e}");
|
||||
send!(self.sender, DownloadManagerSignal::Error(ApplicationDownloadError::DownloadError(e)));
|
||||
}
|
||||
}
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn on_cancelled(&self, app_handle: &tauri::AppHandle) {
|
||||
@ -704,6 +684,6 @@ impl Downloadable for GameDownloadAgent {
|
||||
}
|
||||
|
||||
fn status(&self) -> DownloadStatus {
|
||||
lock!(self.status).clone()
|
||||
self.status.lock().unwrap().clone()
|
||||
}
|
||||
}
|
||||
@ -1,14 +1,11 @@
|
||||
use crate::download_manager::util::download_thread_control_flag::{
|
||||
DownloadThreadControl, DownloadThreadControlFlag,
|
||||
};
|
||||
use crate::download_manager::util::progress_object::ProgressHandle;
|
||||
use crate::error::application_download_error::ApplicationDownloadError;
|
||||
use crate::error::drop_server_error::DropServerError;
|
||||
use crate::error::remote_access_error::RemoteAccessError;
|
||||
use crate::games::downloads::manifest::{ChunkBody, DownloadBucket, DownloadContext, DownloadDrop};
|
||||
use crate::remote::auth::generate_authorization_header;
|
||||
use crate::remote::requests::generate_url;
|
||||
use crate::remote::utils::DROP_CLIENT_SYNC;
|
||||
use drop_downloads::util::download_thread_control_flag::{DownloadThreadControl, DownloadThreadControlFlag};
|
||||
use drop_downloads::util::progress_object::ProgressHandle;
|
||||
use drop_errors::application_download_error::ApplicationDownloadError;
|
||||
use drop_errors::drop_server_error::ServerError;
|
||||
use drop_errors::remote_access_error::RemoteAccessError;
|
||||
use drop_remote::auth::generate_authorization_header;
|
||||
use drop_remote::requests::generate_url;
|
||||
use drop_remote::utils::DROP_CLIENT_SYNC;
|
||||
use log::{debug, info, warn};
|
||||
use md5::{Context, Digest};
|
||||
use reqwest::blocking::Response;
|
||||
@ -25,6 +22,8 @@ use std::{
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use crate::native_library::downloads::manifest::{ChunkBody, DownloadBucket, DownloadContext, DownloadDrop};
|
||||
|
||||
static MAX_PACKET_LENGTH: usize = 4096 * 4;
|
||||
static BUMP_SIZE: usize = 4096 * 16;
|
||||
|
||||
@ -110,10 +109,11 @@ impl<'a> DropDownloadPipeline<'a, Response, File> {
|
||||
let destination = self
|
||||
.destination
|
||||
.get_mut(index)
|
||||
.ok_or(io::Error::other("no destination"))?;
|
||||
.ok_or(io::Error::other("no destination"))
|
||||
.unwrap();
|
||||
let mut remaining = drop.length;
|
||||
if drop.start != 0 {
|
||||
destination.seek(SeekFrom::Start(drop.start as u64))?;
|
||||
destination.seek(SeekFrom::Start(drop.start.try_into().unwrap()))?;
|
||||
}
|
||||
let mut last_bump = 0;
|
||||
loop {
|
||||
@ -197,7 +197,7 @@ pub fn download_game_bucket(
|
||||
ApplicationDownloadError::Communication(RemoteAccessError::FetchError(e.into()))
|
||||
})?;
|
||||
info!("{raw_res}");
|
||||
if let Ok(err) = serde_json::from_str::<DropServerError>(&raw_res) {
|
||||
if let Ok(err) = serde_json::from_str::<ServerError>(&raw_res) {
|
||||
return Err(ApplicationDownloadError::Communication(
|
||||
RemoteAccessError::InvalidResponse(err),
|
||||
));
|
||||
@ -214,39 +214,20 @@ pub fn download_game_bucket(
|
||||
RemoteAccessError::UnparseableResponse("missing Content-Lengths header".to_owned()),
|
||||
))?
|
||||
.to_str()
|
||||
.map_err(|e| {
|
||||
ApplicationDownloadError::Communication(RemoteAccessError::UnparseableResponse(
|
||||
e.to_string(),
|
||||
))
|
||||
})?;
|
||||
.unwrap();
|
||||
|
||||
for (i, raw_length) in lengths.split(",").enumerate() {
|
||||
let length = raw_length.parse::<usize>().unwrap_or(0);
|
||||
let Some(drop) = bucket.drops.get(i) else {
|
||||
warn!("invalid number of Content-Lengths recieved: {i}, {lengths}");
|
||||
return Err(ApplicationDownloadError::DownloadError(
|
||||
RemoteAccessError::InvalidResponse(DropServerError {
|
||||
status_code: 400,
|
||||
status_message: format!(
|
||||
"invalid number of Content-Lengths recieved: {i}, {lengths}"
|
||||
),
|
||||
}),
|
||||
));
|
||||
return Err(ApplicationDownloadError::DownloadError);
|
||||
};
|
||||
if drop.length != length {
|
||||
warn!(
|
||||
"for {}, expected {}, got {} ({})",
|
||||
drop.filename, drop.length, raw_length, length
|
||||
);
|
||||
return Err(ApplicationDownloadError::DownloadError(
|
||||
RemoteAccessError::InvalidResponse(DropServerError {
|
||||
status_code: 400,
|
||||
status_message: format!(
|
||||
"for {}, expected {}, got {} ({})",
|
||||
drop.filename, drop.length, raw_length, length
|
||||
),
|
||||
}),
|
||||
));
|
||||
return Err(ApplicationDownloadError::DownloadError);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
pub mod commands;
|
||||
pub mod download_agent;
|
||||
mod download_logic;
|
||||
pub mod drop_data;
|
||||
mod manifest;
|
||||
pub mod validate;
|
||||
@ -3,17 +3,12 @@ use std::{
|
||||
io::{self, BufWriter, Read, Seek, SeekFrom, Write},
|
||||
};
|
||||
|
||||
use drop_downloads::util::{download_thread_control_flag::{DownloadThreadControl, DownloadThreadControlFlag}, progress_object::ProgressHandle};
|
||||
use drop_errors::application_download_error::ApplicationDownloadError;
|
||||
use log::debug;
|
||||
use md5::Context;
|
||||
|
||||
use crate::{
|
||||
download_manager::util::{
|
||||
download_thread_control_flag::{DownloadThreadControl, DownloadThreadControlFlag},
|
||||
progress_object::ProgressHandle,
|
||||
},
|
||||
error::application_download_error::ApplicationDownloadError,
|
||||
games::downloads::manifest::DropValidateContext,
|
||||
};
|
||||
use crate::native_library::downloads::manifest::DropValidateContext;
|
||||
|
||||
pub fn validate_game_chunk(
|
||||
ctx: &DropValidateContext,
|
||||
@ -36,14 +31,14 @@ pub fn validate_game_chunk(
|
||||
|
||||
if ctx.offset != 0 {
|
||||
source
|
||||
.seek(SeekFrom::Start(ctx.offset as u64))
|
||||
.seek(SeekFrom::Start(ctx.offset.try_into().unwrap()))
|
||||
.expect("Failed to seek to file offset");
|
||||
}
|
||||
|
||||
let mut hasher = md5::Context::new();
|
||||
|
||||
let completed =
|
||||
validate_copy(&mut source, &mut hasher, ctx.length, control_flag, progress)?;
|
||||
validate_copy(&mut source, &mut hasher, ctx.length, control_flag, progress).unwrap();
|
||||
if !completed {
|
||||
return Ok(false);
|
||||
}
|
||||
3
src-tauri/src/native_library/mod.rs
Normal file
3
src-tauri/src/native_library/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
pub mod collection_commands;
|
||||
pub mod commands;
|
||||
pub mod downloads;
|
||||
@ -1,14 +1,18 @@
|
||||
use std::sync::Mutex;
|
||||
|
||||
use crate::{error::process_error::ProcessError, lock, AppState};
|
||||
use drop_database::borrow_db_mut_checked;
|
||||
use drop_errors::{library_error::LibraryError, process_error::ProcessError};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::AppState;
|
||||
|
||||
#[tauri::command]
|
||||
pub fn launch_game(
|
||||
id: String,
|
||||
state: tauri::State<'_, Mutex<AppState>>,
|
||||
) -> Result<(), ProcessError> {
|
||||
let state_lock = lock!(state);
|
||||
let mut process_manager_lock = lock!(state_lock.process_manager);
|
||||
let state_lock = state.lock().unwrap();
|
||||
let mut process_manager_lock = state_lock.process_manager.lock().unwrap();
|
||||
|
||||
//let meta = DownloadableMetadata {
|
||||
// id,
|
||||
@ -16,7 +20,7 @@ pub fn launch_game(
|
||||
// download_type: DownloadType::Game,
|
||||
//};
|
||||
|
||||
match process_manager_lock.launch_process(id, &state_lock) {
|
||||
match process_manager_lock.launch_process(id, state_lock.process_manager) {
|
||||
Ok(()) => {}
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
@ -32,8 +36,8 @@ pub fn kill_game(
|
||||
game_id: String,
|
||||
state: tauri::State<'_, Mutex<AppState>>,
|
||||
) -> Result<(), ProcessError> {
|
||||
let state_lock = lock!(state);
|
||||
let mut process_manager_lock = lock!(state_lock.process_manager);
|
||||
let state_lock = state.lock().unwrap();
|
||||
let mut process_manager_lock = state_lock.process_manager.lock().unwrap();
|
||||
process_manager_lock
|
||||
.kill_game(game_id)
|
||||
.map_err(ProcessError::IOError)
|
||||
@ -44,7 +48,52 @@ pub fn open_process_logs(
|
||||
game_id: String,
|
||||
state: tauri::State<'_, Mutex<AppState>>,
|
||||
) -> Result<(), ProcessError> {
|
||||
let state_lock = lock!(state);
|
||||
let mut process_manager_lock = lock!(state_lock.process_manager);
|
||||
let state_lock = state.lock().unwrap();
|
||||
let mut process_manager_lock = state_lock.process_manager.lock().unwrap();
|
||||
process_manager_lock.open_process_logs(game_id)
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct FrontendGameOptions {
|
||||
pub launch_string: String,
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn update_game_configuration(
|
||||
game_id: String,
|
||||
options: FrontendGameOptions,
|
||||
) -> Result<(), LibraryError> {
|
||||
let mut handle = borrow_db_mut_checked();
|
||||
let installed_version = handle
|
||||
.applications
|
||||
.installed_game_version
|
||||
.get(&game_id)
|
||||
.ok_or(LibraryError::MetaNotFound(game_id))?;
|
||||
|
||||
let id = installed_version.id.clone();
|
||||
let version = installed_version.version.clone().unwrap();
|
||||
|
||||
let mut existing_configuration = handle
|
||||
.applications
|
||||
.game_versions
|
||||
.get(&id)
|
||||
.unwrap()
|
||||
.get(&version)
|
||||
.unwrap()
|
||||
.clone();
|
||||
|
||||
// Add more options in here
|
||||
existing_configuration.launch_command_template = options.launch_string;
|
||||
|
||||
// Add no more options past here
|
||||
|
||||
handle
|
||||
.applications
|
||||
.game_versions
|
||||
.get_mut(&id)
|
||||
.unwrap()
|
||||
.insert(version.to_string(), existing_configuration);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1,5 +1 @@
|
||||
pub mod commands;
|
||||
pub mod process_manager;
|
||||
pub mod process_handlers;
|
||||
pub mod format;
|
||||
pub mod utils;
|
||||
pub mod commands;
|
||||
@ -1,228 +0,0 @@
|
||||
use std::{collections::HashMap, env, sync::Mutex};
|
||||
|
||||
use chrono::Utc;
|
||||
use droplet_rs::ssl::sign_nonce;
|
||||
use gethostname::gethostname;
|
||||
use log::{debug, error, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::{AppHandle, Emitter, Manager};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
app_emit, database::{
|
||||
db::{borrow_db_checked, borrow_db_mut_checked},
|
||||
models::data::DatabaseAuth,
|
||||
}, error::{drop_server_error::DropServerError, remote_access_error::RemoteAccessError}, lock, remote::{cache::clear_cached_object, requests::make_authenticated_get, utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC}}, AppState, AppStatus, User
|
||||
};
|
||||
|
||||
use super::{
|
||||
cache::{cache_object, get_cached_object},
|
||||
requests::generate_url,
|
||||
};
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct CapabilityConfiguration {}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct InitiateRequestBody {
|
||||
name: String,
|
||||
platform: String,
|
||||
capabilities: HashMap<String, CapabilityConfiguration>,
|
||||
mode: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct HandshakeRequestBody {
|
||||
client_id: String,
|
||||
token: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct HandshakeResponse {
|
||||
private: String,
|
||||
certificate: String,
|
||||
id: String,
|
||||
}
|
||||
|
||||
pub fn generate_authorization_header() -> String {
|
||||
let certs = {
|
||||
let db = borrow_db_checked();
|
||||
db.auth.clone().expect("Authorisation not initialised")
|
||||
};
|
||||
|
||||
let nonce = Utc::now().timestamp_millis().to_string();
|
||||
|
||||
let signature =
|
||||
sign_nonce(certs.private, nonce.clone()).expect("Failed to generate authorisation header");
|
||||
|
||||
format!("Nonce {} {} {}", certs.client_id, nonce, signature)
|
||||
}
|
||||
|
||||
pub async fn fetch_user() -> Result<User, RemoteAccessError> {
|
||||
let response = make_authenticated_get(generate_url(&["/api/v1/client/user"], &[])?).await?;
|
||||
if response.status() != 200 {
|
||||
let err: DropServerError = response.json().await?;
|
||||
warn!("{err:?}");
|
||||
|
||||
if err.status_message == "Nonce expired" {
|
||||
return Err(RemoteAccessError::OutOfSync);
|
||||
}
|
||||
|
||||
return Err(RemoteAccessError::InvalidResponse(err));
|
||||
}
|
||||
|
||||
response
|
||||
.json::<User>()
|
||||
.await
|
||||
.map_err(std::convert::Into::into)
|
||||
}
|
||||
|
||||
async fn recieve_handshake_logic(app: &AppHandle, path: String) -> Result<(), RemoteAccessError> {
|
||||
let path_chunks: Vec<&str> = path.split('/').collect();
|
||||
if path_chunks.len() != 3 {
|
||||
app_emit!(app, "auth/failed", ());
|
||||
return Err(RemoteAccessError::HandshakeFailed(
|
||||
"failed to parse token".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let base_url = {
|
||||
let handle = borrow_db_checked();
|
||||
Url::parse(handle.base_url.as_str())?
|
||||
};
|
||||
|
||||
let client_id = path_chunks
|
||||
.get(1)
|
||||
.expect("Failed to get client id from path chunks");
|
||||
let token = path_chunks
|
||||
.get(2)
|
||||
.expect("Failed to get token from path chunks");
|
||||
let body = HandshakeRequestBody {
|
||||
client_id: (client_id).to_string(),
|
||||
token: (token).to_string(),
|
||||
};
|
||||
|
||||
let endpoint = base_url.join("/api/v1/client/auth/handshake")?;
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let response = client.post(endpoint).json(&body).send().await?;
|
||||
debug!("handshake responsded with {}", response.status().as_u16());
|
||||
if !response.status().is_success() {
|
||||
return Err(RemoteAccessError::InvalidResponse(response.json().await?));
|
||||
}
|
||||
let response_struct: HandshakeResponse = response.json().await?;
|
||||
|
||||
{
|
||||
let mut handle = borrow_db_mut_checked();
|
||||
handle.auth = Some(DatabaseAuth {
|
||||
private: response_struct.private,
|
||||
cert: response_struct.certificate,
|
||||
client_id: response_struct.id,
|
||||
web_token: None,
|
||||
});
|
||||
}
|
||||
|
||||
let web_token = {
|
||||
let header = generate_authorization_header();
|
||||
let token = client
|
||||
.post(base_url.join("/api/v1/client/user/webtoken")?)
|
||||
.header("Authorization", header)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
token.text().await?
|
||||
};
|
||||
let mut handle = borrow_db_mut_checked();
|
||||
handle.auth.as_mut().unwrap().web_token = Some(web_token);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn recieve_handshake(app: AppHandle, path: String) {
|
||||
// Tell the app we're processing
|
||||
app_emit!(app, "auth/processing", ());
|
||||
|
||||
let handshake_result = recieve_handshake_logic(&app, path).await;
|
||||
if let Err(e) = handshake_result {
|
||||
warn!("error with authentication: {e}");
|
||||
app_emit!(app, "auth/failed", e.to_string());
|
||||
return;
|
||||
}
|
||||
|
||||
let app_state = app.state::<Mutex<AppState>>();
|
||||
|
||||
let (app_status, user) = setup().await;
|
||||
|
||||
let mut state_lock = lock!(app_state);
|
||||
|
||||
state_lock.status = app_status;
|
||||
state_lock.user = user;
|
||||
|
||||
let _ = clear_cached_object("collections");
|
||||
let _ = clear_cached_object("library");
|
||||
|
||||
drop(state_lock);
|
||||
|
||||
app_emit!(app, "auth/finished", ());
|
||||
}
|
||||
|
||||
pub fn auth_initiate_logic(mode: String) -> Result<String, RemoteAccessError> {
|
||||
let base_url = {
|
||||
let db_lock = borrow_db_checked();
|
||||
Url::parse(&db_lock.base_url.clone())?
|
||||
};
|
||||
|
||||
let hostname = gethostname();
|
||||
|
||||
let endpoint = base_url.join("/api/v1/client/auth/initiate")?;
|
||||
let body = InitiateRequestBody {
|
||||
name: format!("{} (Desktop)", hostname.display()),
|
||||
platform: env::consts::OS.to_string(),
|
||||
capabilities: HashMap::from([
|
||||
("peerAPI".to_owned(), CapabilityConfiguration {}),
|
||||
("cloudSaves".to_owned(), CapabilityConfiguration {}),
|
||||
]),
|
||||
mode,
|
||||
};
|
||||
|
||||
let client = DROP_CLIENT_SYNC.clone();
|
||||
let response = client.post(endpoint.to_string()).json(&body).send()?;
|
||||
|
||||
if response.status() != 200 {
|
||||
let data: DropServerError = response.json()?;
|
||||
error!("could not start handshake: {}", data.status_message);
|
||||
|
||||
return Err(RemoteAccessError::HandshakeFailed(data.status_message));
|
||||
}
|
||||
|
||||
let response = response.text()?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub async fn setup() -> (AppStatus, Option<User>) {
|
||||
let auth = {
|
||||
let data = borrow_db_checked();
|
||||
data.auth.clone()
|
||||
};
|
||||
|
||||
if auth.is_some() {
|
||||
let user_result = match fetch_user().await {
|
||||
Ok(data) => data,
|
||||
Err(RemoteAccessError::FetchError(_)) => {
|
||||
let user = get_cached_object::<User>("user").ok();
|
||||
return (AppStatus::Offline, user);
|
||||
}
|
||||
Err(_) => return (AppStatus::SignedInNeedsReauth, None),
|
||||
};
|
||||
if let Err(e) = cache_object("user", &user_result) {
|
||||
warn!("Could not cache user object with error {e}");
|
||||
}
|
||||
return (AppStatus::SignedIn, Some(user_result));
|
||||
}
|
||||
|
||||
(AppStatus::SignedOut, None)
|
||||
}
|
||||
@ -1,5 +1,8 @@
|
||||
use std::sync::Mutex;
|
||||
use std::{sync::Mutex, time::Duration};
|
||||
|
||||
use drop_database::{borrow_db_checked, borrow_db_mut_checked};
|
||||
use drop_errors::remote_access_error::RemoteAccessError;
|
||||
use drop_remote::{auth::{auth_initiate_logic, generate_authorization_header}, cache::{cache_object, get_cached_object}, requests::generate_url, utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC, DROP_CLIENT_WS_CLIENT}};
|
||||
use futures_lite::StreamExt;
|
||||
use log::{debug, warn};
|
||||
use reqwest_websocket::{Message, RequestBuilderExt};
|
||||
@ -7,29 +10,12 @@ use serde::Deserialize;
|
||||
use tauri::{AppHandle, Emitter, Manager};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
AppState, AppStatus, app_emit,
|
||||
database::db::{borrow_db_checked, borrow_db_mut_checked},
|
||||
error::remote_access_error::RemoteAccessError,
|
||||
lock,
|
||||
remote::{
|
||||
auth::generate_authorization_header,
|
||||
requests::generate_url,
|
||||
utils::{DROP_CLIENT_SYNC, DROP_CLIENT_WS_CLIENT},
|
||||
},
|
||||
utils::webbrowser_open::webbrowser_open,
|
||||
};
|
||||
|
||||
use super::{
|
||||
auth::{auth_initiate_logic, recieve_handshake, setup},
|
||||
cache::{cache_object, get_cached_object},
|
||||
utils::use_remote_logic,
|
||||
};
|
||||
use crate::{auth::{recieve_handshake, setup}, AppState, AppStatus};
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn use_remote(
|
||||
url: String,
|
||||
state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
state: tauri::State<'_, Mutex<AppState>>,
|
||||
) -> Result<(), RemoteAccessError> {
|
||||
use_remote_logic(url, state).await
|
||||
}
|
||||
@ -42,7 +28,7 @@ pub fn gen_drop_url(path: String) -> Result<String, RemoteAccessError> {
|
||||
Url::parse(&handle.base_url).map_err(RemoteAccessError::ParsingError)?
|
||||
};
|
||||
|
||||
let url = base_url.join(&path)?;
|
||||
let url = base_url.join(&path).unwrap();
|
||||
|
||||
Ok(url.to_string())
|
||||
}
|
||||
@ -79,20 +65,20 @@ pub fn sign_out(app: AppHandle) {
|
||||
// Update app state
|
||||
{
|
||||
let app_state = app.state::<Mutex<AppState>>();
|
||||
let mut app_state_handle = lock!(app_state);
|
||||
let mut app_state_handle = app_state.lock().unwrap();
|
||||
app_state_handle.status = AppStatus::SignedOut;
|
||||
app_state_handle.user = None;
|
||||
}
|
||||
|
||||
// Emit event for frontend
|
||||
app_emit!(app, "auth/signedout", ());
|
||||
app.emit("auth/signedout", ()).unwrap();
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn retry_connect(state: tauri::State<'_, Mutex<AppState<'_>>>) -> Result<(), ()> {
|
||||
pub async fn retry_connect(state: tauri::State<'_, Mutex<AppState>>) -> Result<(), ()> {
|
||||
let (app_status, user) = setup().await;
|
||||
|
||||
let mut guard = lock!(state);
|
||||
let mut guard = state.lock().unwrap();
|
||||
guard.status = app_status;
|
||||
guard.user = user;
|
||||
drop(guard);
|
||||
@ -111,7 +97,7 @@ pub fn auth_initiate() -> Result<(), RemoteAccessError> {
|
||||
let complete_redir_url = base_url.join(&redir_url)?;
|
||||
|
||||
debug!("opening web browser to continue authentication");
|
||||
webbrowser_open(complete_redir_url.as_ref());
|
||||
webbrowser::open(complete_redir_url.as_ref()).unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -126,7 +112,7 @@ struct CodeWebsocketResponse {
|
||||
pub fn auth_initiate_code(app: AppHandle) -> Result<String, RemoteAccessError> {
|
||||
let base_url = {
|
||||
let db_lock = borrow_db_checked();
|
||||
Url::parse(&db_lock.base_url.clone())?.clone()
|
||||
Url::parse(&db_lock.base_url.clone())?
|
||||
};
|
||||
|
||||
let code = auth_initiate_logic("code".to_string())?;
|
||||
@ -153,13 +139,16 @@ pub fn auth_initiate_code(app: AppHandle) -> Result<String, RemoteAccessError> {
|
||||
match response.response_type.as_str() {
|
||||
"token" => {
|
||||
let recieve_app = app.clone();
|
||||
manual_recieve_handshake(recieve_app, response.value).await;
|
||||
manual_recieve_handshake(recieve_app, response.value)
|
||||
.await
|
||||
.unwrap();
|
||||
return Ok(());
|
||||
}
|
||||
_ => return Err(RemoteAccessError::HandshakeFailed(response.value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(RemoteAccessError::HandshakeFailed(
|
||||
"Failed to connect to websocket".to_string(),
|
||||
))
|
||||
@ -168,7 +157,7 @@ pub fn auth_initiate_code(app: AppHandle) -> Result<String, RemoteAccessError> {
|
||||
let result = load().await;
|
||||
if let Err(err) = result {
|
||||
warn!("{err}");
|
||||
app_emit!(app, "auth/failed", err.to_string());
|
||||
app.emit("auth/failed", err.to_string()).unwrap();
|
||||
}
|
||||
});
|
||||
|
||||
@ -176,6 +165,47 @@ pub fn auth_initiate_code(app: AppHandle) -> Result<String, RemoteAccessError> {
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn manual_recieve_handshake(app: AppHandle, token: String) {
|
||||
pub async fn manual_recieve_handshake(app: AppHandle, token: String) -> Result<(), ()> {
|
||||
recieve_handshake(app, format!("handshake/{token}")).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct DropHealthcheck {
|
||||
app_name: String,
|
||||
}
|
||||
|
||||
pub async fn use_remote_logic(
|
||||
url: String,
|
||||
state: tauri::State<'_, Mutex<AppState>>,
|
||||
) -> Result<(), RemoteAccessError> {
|
||||
debug!("connecting to url {url}");
|
||||
let base_url = Url::parse(&url)?;
|
||||
|
||||
// Test Drop url
|
||||
let test_endpoint = base_url.join("/api/v1")?;
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let response = client
|
||||
.get(test_endpoint.to_string())
|
||||
.timeout(Duration::from_secs(3))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let result: DropHealthcheck = response.json().await?;
|
||||
|
||||
if result.app_name != "Drop" {
|
||||
warn!("user entered drop endpoint that connected, but wasn't identified as Drop");
|
||||
return Err(RemoteAccessError::InvalidEndpoint);
|
||||
}
|
||||
|
||||
let mut app_state = state.lock().unwrap();
|
||||
app_state.status = AppStatus::SignedOut;
|
||||
drop(app_state);
|
||||
|
||||
let mut db_state = borrow_db_mut_checked();
|
||||
db_state.base_url = base_url.to_string();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1,75 +0,0 @@
|
||||
use http::{header::CONTENT_TYPE, response::Builder as ResponseBuilder, Response};
|
||||
use log::{debug, warn};
|
||||
use tauri::UriSchemeResponder;
|
||||
|
||||
use crate::{database::db::DatabaseImpls, error::cache_error::CacheError, remote::utils::DROP_CLIENT_ASYNC, DB};
|
||||
|
||||
use super::{
|
||||
auth::generate_authorization_header,
|
||||
cache::{ObjectCache, cache_object, get_cached_object},
|
||||
};
|
||||
|
||||
pub async fn fetch_object_wrapper(request: http::Request<Vec<u8>>, responder: UriSchemeResponder) {
|
||||
match fetch_object(request).await {
|
||||
Ok(r) => responder.respond(r),
|
||||
Err(e) => {
|
||||
warn!("Cache error: {e}");
|
||||
responder.respond(Response::builder().status(500).body(Vec::new()).expect("Failed to build error response"));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub async fn fetch_object(request: http::Request<Vec<u8>>) -> Result<Response<Vec<u8>>, CacheError>
|
||||
{
|
||||
// Drop leading /
|
||||
let object_id = &request.uri().path()[1..];
|
||||
|
||||
let cache_result = get_cached_object::<ObjectCache>(object_id);
|
||||
if let Ok(cache_result) = &cache_result
|
||||
&& !cache_result.has_expired()
|
||||
{
|
||||
return cache_result.try_into();
|
||||
}
|
||||
|
||||
let header = generate_authorization_header();
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let url = format!("{}api/v1/client/object/{object_id}", DB.fetch_base_url());
|
||||
let response = client.get(url).header("Authorization", header).send().await;
|
||||
|
||||
match response {
|
||||
Ok(r) => {
|
||||
let resp_builder = ResponseBuilder::new().header(
|
||||
CONTENT_TYPE,
|
||||
r.headers()
|
||||
.get("Content-Type")
|
||||
.expect("Failed get Content-Type header"),
|
||||
);
|
||||
let data = match r.bytes().await {
|
||||
Ok(data) => Vec::from(data),
|
||||
Err(e) => {
|
||||
warn!(
|
||||
"Could not get data from cache object {object_id} with error {e}",
|
||||
);
|
||||
Vec::new()
|
||||
}
|
||||
};
|
||||
let resp = resp_builder.body(data).expect("Failed to build object cache response body");
|
||||
if cache_result.map_or(true, |x| x.has_expired()) {
|
||||
cache_object::<ObjectCache>(object_id, &resp.clone().try_into()?)
|
||||
.expect("Failed to create cached object");
|
||||
}
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("Object fetch failed with error {e}. Attempting to download from cache");
|
||||
match cache_result {
|
||||
Ok(cache_result) => cache_result.try_into(),
|
||||
Err(e) => {
|
||||
warn!("{e}");
|
||||
Err(CacheError::Remote(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,8 +1,2 @@
|
||||
pub mod auth;
|
||||
#[macro_use]
|
||||
pub mod cache;
|
||||
pub mod commands;
|
||||
pub mod fetch_object;
|
||||
pub mod requests;
|
||||
pub mod server_proto;
|
||||
pub mod utils;
|
||||
pub mod server_proto;
|
||||
@ -1,91 +1,56 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use drop_database::borrow_db_checked;
|
||||
use http::{uri::PathAndQuery, Request, Response, StatusCode, Uri};
|
||||
use log::{error, warn};
|
||||
use tauri::UriSchemeResponder;
|
||||
|
||||
use crate::{database::db::borrow_db_checked, remote::utils::DROP_CLIENT_SYNC, utils::webbrowser_open::webbrowser_open};
|
||||
|
||||
pub async fn handle_server_proto_offline_wrapper(request: Request<Vec<u8>>, responder: UriSchemeResponder) {
|
||||
responder.respond(match handle_server_proto_offline(request).await {
|
||||
Ok(res) => res,
|
||||
Err(_) => unreachable!()
|
||||
});
|
||||
}
|
||||
|
||||
pub async fn handle_server_proto_offline(_request: Request<Vec<u8>>) -> Result<Response<Vec<u8>>, StatusCode>{
|
||||
Ok(Response::builder()
|
||||
pub async fn handle_server_proto_offline(_request: Request<Vec<u8>>, responder: UriSchemeResponder) {
|
||||
let four_oh_four = Response::builder()
|
||||
.status(StatusCode::NOT_FOUND)
|
||||
.body(Vec::new())
|
||||
.expect("Failed to build error response for proto offline"))
|
||||
|
||||
.unwrap();
|
||||
responder.respond(four_oh_four);
|
||||
}
|
||||
|
||||
pub async fn handle_server_proto_wrapper(request: Request<Vec<u8>>, responder: UriSchemeResponder) {
|
||||
match handle_server_proto(request).await {
|
||||
Ok(r) => responder.respond(r),
|
||||
Err(e) => {
|
||||
warn!("Cache error: {e}");
|
||||
responder.respond(Response::builder().status(e).body(Vec::new()).expect("Failed to build error response"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_server_proto(request: Request<Vec<u8>>) -> Result<Response<Vec<u8>>, StatusCode> {
|
||||
pub async fn handle_server_proto(request: Request<Vec<u8>>, responder: UriSchemeResponder) {
|
||||
let db_handle = borrow_db_checked();
|
||||
let auth = match db_handle.auth.as_ref() {
|
||||
Some(auth) => auth,
|
||||
None => {
|
||||
error!("Could not find auth in database");
|
||||
return Err(StatusCode::UNAUTHORIZED)
|
||||
}
|
||||
let web_token = match &db_handle.auth.as_ref().unwrap().web_token {
|
||||
Some(e) => e,
|
||||
None => return,
|
||||
};
|
||||
let web_token = match &auth.web_token {
|
||||
Some(token) => token,
|
||||
None => return Err(StatusCode::UNAUTHORIZED),
|
||||
};
|
||||
let remote_uri = db_handle.base_url.parse::<Uri>().expect("Failed to parse base url");
|
||||
let remote_uri = db_handle.base_url.parse::<Uri>().unwrap();
|
||||
|
||||
let path = request.uri().path();
|
||||
|
||||
let mut new_uri = request.uri().clone().into_parts();
|
||||
new_uri.path_and_query =
|
||||
Some(PathAndQuery::from_str(&format!("{path}?noWrapper=true")).expect("Failed to parse request path in proto"));
|
||||
Some(PathAndQuery::from_str(&format!("{path}?noWrapper=true")).unwrap());
|
||||
new_uri.authority = remote_uri.authority().cloned();
|
||||
new_uri.scheme = remote_uri.scheme().cloned();
|
||||
let err_msg = &format!("Failed to build new uri from parts {new_uri:?}");
|
||||
let new_uri = Uri::from_parts(new_uri).expect(err_msg);
|
||||
let new_uri = Uri::from_parts(new_uri).unwrap();
|
||||
|
||||
let whitelist_prefix = ["/store", "/api", "/_", "/fonts"];
|
||||
|
||||
if whitelist_prefix.iter().all(|f| !path.starts_with(f)) {
|
||||
webbrowser_open(new_uri.to_string());
|
||||
return Ok(Response::new(Vec::new()))
|
||||
webbrowser::open(&new_uri.to_string()).unwrap();
|
||||
return;
|
||||
}
|
||||
|
||||
let client = DROP_CLIENT_SYNC.clone();
|
||||
let response = match client
|
||||
let client = drop_remote::utils::DROP_CLIENT_SYNC.clone();
|
||||
let response = client
|
||||
.request(request.method().clone(), new_uri.to_string())
|
||||
.header("Authorization", format!("Bearer {web_token}"))
|
||||
.headers(request.headers().clone())
|
||||
.send() {
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
warn!("Could not send response. Got {e} when sending");
|
||||
return Err(e.status().unwrap_or(StatusCode::BAD_REQUEST))
|
||||
},
|
||||
};
|
||||
.send()
|
||||
.unwrap();
|
||||
|
||||
let response_status = response.status();
|
||||
let response_body = match response.bytes() {
|
||||
Ok(bytes) => bytes,
|
||||
Err(e) => return Err(e.status().unwrap_or(StatusCode::INTERNAL_SERVER_ERROR)),
|
||||
};
|
||||
let response_body = response.bytes().unwrap();
|
||||
|
||||
let http_response = Response::builder()
|
||||
.status(response_status)
|
||||
.body(response_body.to_vec())
|
||||
.expect("Failed to build server proto response");
|
||||
.unwrap();
|
||||
|
||||
Ok(http_response)
|
||||
responder.respond(http_response);
|
||||
}
|
||||
|
||||
@ -1,145 +0,0 @@
|
||||
use std::{
|
||||
fs::{self, File},
|
||||
io::Read,
|
||||
sync::{LazyLock, Mutex},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use log::{debug, info, warn};
|
||||
use reqwest::Certificate;
|
||||
use serde::Deserialize;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
database::db::{borrow_db_mut_checked, DATA_ROOT_DIR}, error::remote_access_error::RemoteAccessError, lock, AppState, AppStatus
|
||||
};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct DropHealthcheck {
|
||||
app_name: String,
|
||||
}
|
||||
|
||||
static DROP_CERT_BUNDLE: LazyLock<Vec<Certificate>> = LazyLock::new(fetch_certificates);
|
||||
pub static DROP_CLIENT_SYNC: LazyLock<reqwest::blocking::Client> = LazyLock::new(get_client_sync);
|
||||
pub static DROP_CLIENT_ASYNC: LazyLock<reqwest::Client> = LazyLock::new(get_client_async);
|
||||
pub static DROP_CLIENT_WS_CLIENT: LazyLock<reqwest::Client> = LazyLock::new(get_client_ws);
|
||||
|
||||
fn fetch_certificates() -> Vec<Certificate> {
|
||||
let certificate_dir = DATA_ROOT_DIR.join("certificates");
|
||||
|
||||
let mut certs = Vec::new();
|
||||
match fs::read_dir(certificate_dir) {
|
||||
Ok(c) => {
|
||||
for entry in c {
|
||||
match entry {
|
||||
Ok(c) => {
|
||||
let mut buf = Vec::new();
|
||||
match File::open(c.path()) {
|
||||
Ok(f) => f,
|
||||
Err(e) => {
|
||||
warn!(
|
||||
"Failed to open file at {} with error {}",
|
||||
c.path().display(),
|
||||
e
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
.read_to_end(&mut buf)
|
||||
.unwrap_or_else(|e| panic!(
|
||||
"Failed to read to end of certificate file {} with error {}",
|
||||
c.path().display(),
|
||||
e
|
||||
));
|
||||
|
||||
match Certificate::from_pem_bundle(&buf) {
|
||||
Ok(certificates) => {
|
||||
for cert in certificates {
|
||||
certs.push(cert);
|
||||
}
|
||||
info!(
|
||||
"added {} certificate(s) from {}",
|
||||
certs.len(),
|
||||
c.file_name().display()
|
||||
);
|
||||
}
|
||||
Err(e) => warn!(
|
||||
"Invalid certificate file {} with error {}",
|
||||
c.path().display(),
|
||||
e
|
||||
),
|
||||
}
|
||||
}
|
||||
Err(_) => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("not loading certificates due to error: {e}");
|
||||
}
|
||||
};
|
||||
certs
|
||||
}
|
||||
|
||||
pub fn get_client_sync() -> reqwest::blocking::Client {
|
||||
let mut client = reqwest::blocking::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client.use_rustls_tls().build().expect("Failed to build synchronous client")
|
||||
}
|
||||
pub fn get_client_async() -> reqwest::Client {
|
||||
let mut client = reqwest::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client.use_rustls_tls().build().expect("Failed to build asynchronous client")
|
||||
}
|
||||
pub fn get_client_ws() -> reqwest::Client {
|
||||
let mut client = reqwest::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client
|
||||
.use_rustls_tls()
|
||||
.http1_only()
|
||||
.build()
|
||||
.expect("Failed to build websocket client")
|
||||
}
|
||||
|
||||
pub async fn use_remote_logic(
|
||||
url: String,
|
||||
state: tauri::State<'_, Mutex<AppState<'_>>>,
|
||||
) -> Result<(), RemoteAccessError> {
|
||||
debug!("connecting to url {url}");
|
||||
let base_url = Url::parse(&url)?;
|
||||
|
||||
// Test Drop url
|
||||
let test_endpoint = base_url.join("/api/v1")?;
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let response = client
|
||||
.get(test_endpoint.to_string())
|
||||
.timeout(Duration::from_secs(3))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let result: DropHealthcheck = response.json().await?;
|
||||
|
||||
if result.app_name != "Drop" {
|
||||
warn!("user entered drop endpoint that connected, but wasn't identified as Drop");
|
||||
return Err(RemoteAccessError::InvalidEndpoint);
|
||||
}
|
||||
|
||||
let mut app_state = lock!(state);
|
||||
app_state.status = AppStatus::SignedOut;
|
||||
drop(app_state);
|
||||
|
||||
let mut db_state = borrow_db_mut_checked();
|
||||
db_state.base_url = base_url.to_string();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
121
src-tauri/src/setup.rs
Normal file
121
src-tauri/src/setup.rs
Normal file
@ -0,0 +1,121 @@
|
||||
use std::{env, path::Path, str::FromStr as _, sync::{Arc, Mutex}};
|
||||
|
||||
use drop_database::{borrow_db_checked, borrow_db_mut_checked, db::{DatabaseImpls as _, DATA_ROOT_DIR}, models::data::GameDownloadStatus, DB};
|
||||
use drop_downloads::download_manager_builder::DownloadManagerBuilder;
|
||||
use drop_process::process_manager::ProcessManager;
|
||||
use log::{debug, info, warn, LevelFilter};
|
||||
use log4rs::{append::{console::ConsoleAppender, file::FileAppender}, config::{Appender, Root}, encode::pattern::PatternEncoder, Config};
|
||||
use tauri::AppHandle;
|
||||
|
||||
use crate::{auth, client::autostart::sync_autostart_on_startup, database::scan::scan_install_dirs, AppState, AppStatus};
|
||||
|
||||
pub async fn setup(handle: AppHandle) -> AppState {
|
||||
let logfile = FileAppender::builder()
|
||||
.encoder(Box::new(PatternEncoder::new(
|
||||
"{d} | {l} | {f}:{L} - {m}{n}",
|
||||
)))
|
||||
.append(false)
|
||||
.build(DATA_ROOT_DIR.join("./drop.log"))
|
||||
.unwrap();
|
||||
|
||||
let console = ConsoleAppender::builder()
|
||||
.encoder(Box::new(PatternEncoder::new(
|
||||
"{d} | {l} | {f}:{L} - {m}{n}",
|
||||
)))
|
||||
.build();
|
||||
|
||||
let log_level = env::var("RUST_LOG").unwrap_or(String::from("Info"));
|
||||
|
||||
let config = Config::builder()
|
||||
.appenders(vec![
|
||||
Appender::builder().build("logfile", Box::new(logfile)),
|
||||
Appender::builder().build("console", Box::new(console)),
|
||||
])
|
||||
.build(
|
||||
Root::builder()
|
||||
.appenders(vec!["logfile", "console"])
|
||||
.build(LevelFilter::from_str(&log_level).expect("Invalid log level")),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
log4rs::init_config(config).unwrap();
|
||||
|
||||
let download_manager = Arc::new(DownloadManagerBuilder::build(handle.clone()));
|
||||
let process_manager = Box::leak(Box::new(Mutex::new(ProcessManager::new(handle.clone()))));
|
||||
|
||||
debug!("checking if database is set up");
|
||||
let is_set_up = DB.database_is_set_up();
|
||||
|
||||
scan_install_dirs();
|
||||
|
||||
if !is_set_up {
|
||||
return AppState {
|
||||
status: AppStatus::NotConfigured,
|
||||
user: None,
|
||||
download_manager,
|
||||
process_manager,
|
||||
};
|
||||
}
|
||||
|
||||
debug!("database is set up");
|
||||
|
||||
// TODO: Account for possible failure
|
||||
let (app_status, user) = auth::setup().await;
|
||||
|
||||
let db_handle = borrow_db_checked();
|
||||
let mut missing_games = Vec::new();
|
||||
let statuses = db_handle.applications.game_statuses.clone();
|
||||
drop(db_handle);
|
||||
|
||||
for (game_id, status) in statuses {
|
||||
match status {
|
||||
GameDownloadStatus::Remote {} => {}
|
||||
GameDownloadStatus::PartiallyInstalled { .. } => {}
|
||||
GameDownloadStatus::SetupRequired {
|
||||
version_name: _,
|
||||
install_dir,
|
||||
} => {
|
||||
let install_dir_path = Path::new(&install_dir);
|
||||
if !install_dir_path.exists() {
|
||||
missing_games.push(game_id);
|
||||
}
|
||||
}
|
||||
GameDownloadStatus::Installed {
|
||||
version_name: _,
|
||||
install_dir,
|
||||
} => {
|
||||
let install_dir_path = Path::new(&install_dir);
|
||||
if !install_dir_path.exists() {
|
||||
missing_games.push(game_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!("detected games missing: {missing_games:?}");
|
||||
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
for game_id in missing_games {
|
||||
db_handle
|
||||
.applications
|
||||
.game_statuses
|
||||
.entry(game_id)
|
||||
.and_modify(|v| *v = GameDownloadStatus::Remote {});
|
||||
}
|
||||
|
||||
drop(db_handle);
|
||||
|
||||
debug!("finished setup!");
|
||||
|
||||
// Sync autostart state
|
||||
if let Err(e) = sync_autostart_on_startup(&handle) {
|
||||
warn!("failed to sync autostart state: {e}");
|
||||
}
|
||||
|
||||
AppState {
|
||||
status: app_status,
|
||||
user,
|
||||
download_manager,
|
||||
process_manager,
|
||||
}
|
||||
}
|
||||
@ -1,6 +0,0 @@
|
||||
#[macro_export]
|
||||
macro_rules! app_emit {
|
||||
($app:expr, $event:expr, $p:expr) => {
|
||||
$app.emit($event, $p).expect(&format!("Failed to emit event {}", $event));
|
||||
};
|
||||
}
|
||||
@ -1,6 +0,0 @@
|
||||
#[macro_export]
|
||||
macro_rules! send {
|
||||
($download_manager:expr, $signal:expr) => {
|
||||
$download_manager.send($signal).unwrap_or_else(|_| panic!("Failed to send signal {} to the download manager", stringify!(signal)))
|
||||
};
|
||||
}
|
||||
@ -1,6 +0,0 @@
|
||||
#[macro_export]
|
||||
macro_rules! lock {
|
||||
($mutex:expr) => {
|
||||
$mutex.lock().unwrap_or_else(|_| panic!("Failed to lock onto {}", stringify!($mutex)))
|
||||
};
|
||||
}
|
||||
@ -1,4 +0,0 @@
|
||||
mod app_emit;
|
||||
mod download_manager_send;
|
||||
mod lock;
|
||||
pub mod webbrowser_open;
|
||||
@ -1,7 +0,0 @@
|
||||
use log::warn;
|
||||
|
||||
pub fn webbrowser_open<T: AsRef<str>>(url: T) {
|
||||
if let Err(e) = webbrowser::open(url.as_ref()) {
|
||||
warn!("Could not open web browser to url {} with error {}", url.as_ref(), e);
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user