mirror of
https://github.com/Drop-OSS/drop-app.git
synced 2025-11-12 07:42:44 +10:00
Compare commits
3 Commits
5d22b883d5
...
importexpo
| Author | SHA1 | Date | |
|---|---|---|---|
| 43b56462d6 | |||
| ab219670dc | |||
| c1beef380e |
4
.gitignore
vendored
4
.gitignore
vendored
@ -29,6 +29,4 @@ src-tauri/flamegraph.svg
|
||||
src-tauri/perf*
|
||||
|
||||
/*.AppImage
|
||||
/squashfs-root
|
||||
|
||||
/target/
|
||||
/squashfs-root
|
||||
8288
Cargo.lock
generated
8288
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
14
Cargo.toml
14
Cargo.toml
@ -1,14 +0,0 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"client",
|
||||
"database",
|
||||
"src-tauri",
|
||||
"process",
|
||||
"remote",
|
||||
"utils",
|
||||
"cloud_saves",
|
||||
"download_manager",
|
||||
"games",
|
||||
]
|
||||
|
||||
resolver = "3"
|
||||
24
README.md
24
README.md
@ -1,21 +1,29 @@
|
||||
# Drop Desktop Client
|
||||
# Drop App
|
||||
|
||||
The Drop Desktop Client is the companion app for [Drop](https://github.com/Drop-OSS/drop). It is the official & intended way to download and play games on your Drop server.
|
||||
Drop app is the companion app for [Drop](https://github.com/Drop-OSS/drop). It uses a Tauri base with Nuxt 3 + TailwindCSS on top of it, so we can re-use components from the web UI.
|
||||
|
||||
## Internals
|
||||
## Running
|
||||
Before setting up the drop app, be sure that you have a server set up.
|
||||
The instructions for this can be found on the [Drop Docs](https://docs.droposs.org/docs/guides/quickstart)
|
||||
|
||||
It uses a Tauri base with Nuxt 3 + TailwindCSS on top of it, so we can re-use components from the web UI.
|
||||
## Current features
|
||||
Currently supported are the following features:
|
||||
- Signin (with custom server)
|
||||
- Database registering & recovery
|
||||
- Dynamic library fetching from server
|
||||
- Installing & uninstalling games
|
||||
- Download progress monitoring
|
||||
- Launching / playing games
|
||||
|
||||
## Development
|
||||
Before setting up a development environemnt, be sure that you have a server set up. The instructions for this can be found on the [Drop Docs](https://docs.droposs.org/docs/guides/quickstart).
|
||||
|
||||
Then, install dependencies with `yarn`. This'll install the custom builder's dependencies. Then, check everything works properly with `yarn tauri build`.
|
||||
Install dependencies with `yarn`
|
||||
|
||||
Run the app in development with `yarn tauri dev`. NVIDIA users on Linux, use shell script `./nvidia-prop-dev.sh`
|
||||
Run the app in development with `yarn tauri dev`. NVIDIA users on Linux, use shell script `./nvidia-prop-dev.sh`
|
||||
|
||||
To manually specify the logging level, add the environment variable `RUST_LOG=[debug, info, warn, error]` to `yarn tauri dev`:
|
||||
|
||||
e.g. `RUST_LOG=debug yarn tauri dev`
|
||||
|
||||
## Contributing
|
||||
Check out the contributing guide on our Developer Docs: [Drop Developer Docs - Contributing](https://developer.droposs.org/contributing).
|
||||
Check the original [Drop repo](https://github.com/Drop-OSS/drop/blob/main/CONTRIBUTING.md) for contributing guidelines.
|
||||
4862
client/Cargo.lock
generated
4862
client/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,12 +0,0 @@
|
||||
[package]
|
||||
name = "client"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
bitcode = "0.6.7"
|
||||
database = { version = "0.1.0", path = "../database" }
|
||||
log = "0.4.28"
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
tauri = "2.8.5"
|
||||
tauri-plugin-autostart = "2.5.0"
|
||||
@ -1,12 +0,0 @@
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Copy, Serialize, Eq, PartialEq)]
|
||||
pub enum AppStatus {
|
||||
NotConfigured,
|
||||
Offline,
|
||||
ServerError,
|
||||
SignedOut,
|
||||
SignedIn,
|
||||
SignedInNeedsReauth,
|
||||
ServerUnavailable,
|
||||
}
|
||||
@ -1,26 +0,0 @@
|
||||
use database::borrow_db_checked;
|
||||
use log::debug;
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_autostart::ManagerExt;
|
||||
|
||||
// New function to sync state on startup
|
||||
pub fn sync_autostart_on_startup(app: &AppHandle) -> Result<(), String> {
|
||||
let db_handle = borrow_db_checked();
|
||||
let should_be_enabled = db_handle.settings.autostart;
|
||||
drop(db_handle);
|
||||
|
||||
let manager = app.autolaunch();
|
||||
let current_state = manager.is_enabled().map_err(|e| e.to_string())?;
|
||||
|
||||
if current_state != should_be_enabled {
|
||||
if should_be_enabled {
|
||||
manager.enable().map_err(|e| e.to_string())?;
|
||||
debug!("synced autostart: enabled");
|
||||
} else {
|
||||
manager.disable().map_err(|e| e.to_string())?;
|
||||
debug!("synced autostart: disabled");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -1,47 +0,0 @@
|
||||
use std::{ffi::OsStr, path::PathBuf, process::{Command, Stdio}, sync::LazyLock};
|
||||
|
||||
use log::info;
|
||||
|
||||
pub static COMPAT_INFO: LazyLock<Option<CompatInfo>> = LazyLock::new(create_new_compat_info);
|
||||
|
||||
pub static UMU_LAUNCHER_EXECUTABLE: LazyLock<Option<PathBuf>> = LazyLock::new(|| {
|
||||
let x = get_umu_executable();
|
||||
info!("{:?}", &x);
|
||||
x
|
||||
});
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CompatInfo {
|
||||
pub umu_installed: bool,
|
||||
}
|
||||
|
||||
fn create_new_compat_info() -> Option<CompatInfo> {
|
||||
#[cfg(target_os = "windows")]
|
||||
return None;
|
||||
|
||||
let has_umu_installed = UMU_LAUNCHER_EXECUTABLE.is_some();
|
||||
Some(CompatInfo {
|
||||
umu_installed: has_umu_installed,
|
||||
})
|
||||
}
|
||||
|
||||
const UMU_BASE_LAUNCHER_EXECUTABLE: &str = "umu-run";
|
||||
const UMU_INSTALL_DIRS: [&str; 4] = ["/app/share", "/use/local/share", "/usr/share", "/opt"];
|
||||
|
||||
fn get_umu_executable() -> Option<PathBuf> {
|
||||
if check_executable_exists(UMU_BASE_LAUNCHER_EXECUTABLE) {
|
||||
return Some(PathBuf::from(UMU_BASE_LAUNCHER_EXECUTABLE));
|
||||
}
|
||||
|
||||
for dir in UMU_INSTALL_DIRS {
|
||||
let p = PathBuf::from(dir).join(UMU_BASE_LAUNCHER_EXECUTABLE);
|
||||
if check_executable_exists(&p) {
|
||||
return Some(p);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
fn check_executable_exists<P: AsRef<OsStr>>(exec: P) -> bool {
|
||||
let has_umu_installed = Command::new(exec).stdout(Stdio::null()).output();
|
||||
has_umu_installed.is_ok()
|
||||
}
|
||||
@ -1,4 +0,0 @@
|
||||
pub mod autostart;
|
||||
pub mod user;
|
||||
pub mod app_status;
|
||||
pub mod compat;
|
||||
@ -1,13 +0,0 @@
|
||||
use bitcode::{Decode, Encode};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Encode, Decode)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct User {
|
||||
id: String,
|
||||
username: String,
|
||||
admin: bool,
|
||||
display_name: String,
|
||||
profile_picture_object_id: String,
|
||||
}
|
||||
|
||||
@ -1,19 +0,0 @@
|
||||
[package]
|
||||
name = "cloud_saves"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
database = { version = "0.1.0", path = "../database" }
|
||||
dirs = "6.0.0"
|
||||
log = "0.4.28"
|
||||
regex = "1.11.3"
|
||||
rustix = "1.1.2"
|
||||
serde = "1.0.228"
|
||||
serde_json = "1.0.145"
|
||||
serde_with = "3.15.0"
|
||||
tar = "0.4.44"
|
||||
tempfile = "3.23.0"
|
||||
uuid = "1.18.1"
|
||||
whoami = "1.6.1"
|
||||
zstd = "0.13.3"
|
||||
@ -1,27 +0,0 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use serde_with::SerializeDisplay;
|
||||
|
||||
#[derive(Debug, SerializeDisplay, Clone, Copy)]
|
||||
|
||||
pub enum BackupError {
|
||||
InvalidSystem,
|
||||
|
||||
NotFound,
|
||||
|
||||
ParseError,
|
||||
}
|
||||
|
||||
impl Display for BackupError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let s = match self {
|
||||
BackupError::InvalidSystem => "Attempted to generate path for invalid system",
|
||||
|
||||
BackupError::NotFound => "Could not generate or find path",
|
||||
|
||||
BackupError::ParseError => "Failed to parse path",
|
||||
};
|
||||
|
||||
write!(f, "{}", s)
|
||||
}
|
||||
}
|
||||
@ -1,8 +0,0 @@
|
||||
pub mod conditions;
|
||||
pub mod metadata;
|
||||
pub mod resolver;
|
||||
pub mod placeholder;
|
||||
pub mod normalise;
|
||||
pub mod path;
|
||||
pub mod backup_manager;
|
||||
pub mod error;
|
||||
@ -1,15 +0,0 @@
|
||||
[package]
|
||||
name = "database"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
chrono = "0.4.42"
|
||||
dirs = "6.0.0"
|
||||
log = "0.4.28"
|
||||
native_model = { version = "0.6.4", features = ["rmp_serde_1_3"], git = "https://github.com/Drop-OSS/native_model.git"}
|
||||
rustbreak = "2.0.0"
|
||||
serde = "1.0.228"
|
||||
serde_with = "3.15.0"
|
||||
url = "2.5.7"
|
||||
whoami = "1.6.1"
|
||||
@ -1,47 +0,0 @@
|
||||
use std::{
|
||||
path::PathBuf,
|
||||
sync::{Arc, LazyLock},
|
||||
};
|
||||
|
||||
use rustbreak::{DeSerError, DeSerializer};
|
||||
use serde::{Serialize, de::DeserializeOwned};
|
||||
|
||||
use crate::interface::{DatabaseImpls, DatabaseInterface};
|
||||
|
||||
pub static DB: LazyLock<DatabaseInterface> = LazyLock::new(DatabaseInterface::set_up_database);
|
||||
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
static DATA_ROOT_PREFIX: &str = "drop";
|
||||
#[cfg(debug_assertions)]
|
||||
static DATA_ROOT_PREFIX: &str = "drop-debug";
|
||||
|
||||
pub static DATA_ROOT_DIR: LazyLock<Arc<PathBuf>> = LazyLock::new(|| {
|
||||
Arc::new(
|
||||
dirs::data_dir()
|
||||
.expect("Failed to get data dir")
|
||||
.join(DATA_ROOT_PREFIX),
|
||||
)
|
||||
});
|
||||
|
||||
// Custom JSON serializer to support everything we need
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct DropDatabaseSerializer;
|
||||
|
||||
impl<T: native_model::Model + Serialize + DeserializeOwned> DeSerializer<T>
|
||||
for DropDatabaseSerializer
|
||||
{
|
||||
fn serialize(&self, val: &T) -> rustbreak::error::DeSerResult<Vec<u8>> {
|
||||
native_model::encode(val)
|
||||
.map_err(|e| DeSerError::Internal(e.to_string()))
|
||||
}
|
||||
|
||||
fn deserialize<R: std::io::Read>(&self, mut s: R) -> rustbreak::error::DeSerResult<T> {
|
||||
let mut buf = Vec::new();
|
||||
s.read_to_end(&mut buf)
|
||||
.map_err(|e| rustbreak::error::DeSerError::Other(e.into()))?;
|
||||
let (val, _version) = native_model::decode(buf)
|
||||
.map_err(|e| DeSerError::Internal(e.to_string()))?;
|
||||
Ok(val)
|
||||
}
|
||||
}
|
||||
@ -1,179 +0,0 @@
|
||||
use std::{fs::{self, create_dir_all}, mem::ManuallyDrop, ops::{Deref, DerefMut}, path::PathBuf, sync::{RwLockReadGuard, RwLockWriteGuard}};
|
||||
|
||||
use chrono::Utc;
|
||||
use log::{debug, error, info, warn};
|
||||
use rustbreak::{PathDatabase, RustbreakError};
|
||||
use url::Url;
|
||||
|
||||
use crate::{db::{DropDatabaseSerializer, DATA_ROOT_DIR, DB}, models::data::Database};
|
||||
|
||||
pub type DatabaseInterface =
|
||||
rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer>;
|
||||
|
||||
pub trait DatabaseImpls {
|
||||
fn set_up_database() -> DatabaseInterface;
|
||||
fn database_is_set_up(&self) -> bool;
|
||||
fn fetch_base_url(&self) -> Url;
|
||||
}
|
||||
impl DatabaseImpls for DatabaseInterface {
|
||||
fn set_up_database() -> DatabaseInterface {
|
||||
let db_path = DATA_ROOT_DIR.join("drop.db");
|
||||
let games_base_dir = DATA_ROOT_DIR.join("games");
|
||||
let logs_root_dir = DATA_ROOT_DIR.join("logs");
|
||||
let cache_dir = DATA_ROOT_DIR.join("cache");
|
||||
let pfx_dir = DATA_ROOT_DIR.join("pfx");
|
||||
|
||||
debug!("creating data directory at {DATA_ROOT_DIR:?}");
|
||||
create_dir_all(DATA_ROOT_DIR.as_path()).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to create directory {} with error {}",
|
||||
DATA_ROOT_DIR.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
create_dir_all(&games_base_dir).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to create directory {} with error {}",
|
||||
games_base_dir.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
create_dir_all(&logs_root_dir).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to create directory {} with error {}",
|
||||
logs_root_dir.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
create_dir_all(&cache_dir).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to create directory {} with error {}",
|
||||
cache_dir.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
create_dir_all(&pfx_dir).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to create directory {} with error {}",
|
||||
pfx_dir.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
|
||||
let exists = fs::exists(db_path.clone()).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to find if {} exists with error {}",
|
||||
db_path.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
|
||||
if exists {
|
||||
match PathDatabase::load_from_path(db_path.clone()) {
|
||||
Ok(db) => db,
|
||||
Err(e) => handle_invalid_database(e, db_path, games_base_dir, cache_dir),
|
||||
}
|
||||
} else {
|
||||
let default = Database::new(games_base_dir, None, cache_dir);
|
||||
debug!("Creating database at path {}", db_path.display());
|
||||
PathDatabase::create_at_path(db_path, default).expect("Database could not be created")
|
||||
}
|
||||
}
|
||||
|
||||
fn database_is_set_up(&self) -> bool {
|
||||
!borrow_db_checked().base_url.is_empty()
|
||||
}
|
||||
|
||||
fn fetch_base_url(&self) -> Url {
|
||||
let handle = borrow_db_checked();
|
||||
Url::parse(&handle.base_url)
|
||||
.unwrap_or_else(|_| panic!("Failed to parse base url {}", handle.base_url))
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Make the error relelvant rather than just assume that it's a Deserialize error
|
||||
fn handle_invalid_database(
|
||||
_e: RustbreakError,
|
||||
db_path: PathBuf,
|
||||
games_base_dir: PathBuf,
|
||||
cache_dir: PathBuf,
|
||||
) -> rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer> {
|
||||
warn!("{_e}");
|
||||
let new_path = {
|
||||
let time = Utc::now().timestamp();
|
||||
let mut base = db_path.clone();
|
||||
base.set_file_name(format!("drop.db.backup-{time}"));
|
||||
base
|
||||
};
|
||||
info!("old database stored at: {}", new_path.to_string_lossy());
|
||||
fs::rename(&db_path, &new_path).unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Could not rename database {} to {} with error {}",
|
||||
db_path.display(),
|
||||
new_path.display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
|
||||
let db = Database::new(games_base_dir, Some(new_path), cache_dir);
|
||||
|
||||
PathDatabase::create_at_path(db_path, db).expect("Database could not be created")
|
||||
}
|
||||
|
||||
// To automatically save the database upon drop
|
||||
pub struct DBRead<'a>(RwLockReadGuard<'a, Database>);
|
||||
pub struct DBWrite<'a>(ManuallyDrop<RwLockWriteGuard<'a, Database>>);
|
||||
impl<'a> Deref for DBWrite<'a> {
|
||||
type Target = Database;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
impl<'a> DerefMut for DBWrite<'a> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
impl<'a> Deref for DBRead<'a> {
|
||||
type Target = Database;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
impl Drop for DBWrite<'_> {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
ManuallyDrop::drop(&mut self.0);
|
||||
}
|
||||
|
||||
match DB.save() {
|
||||
Ok(()) => {}
|
||||
Err(e) => {
|
||||
error!("database failed to save with error {e}");
|
||||
panic!("database failed to save with error {e}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn borrow_db_checked<'a>() -> DBRead<'a> {
|
||||
match DB.borrow_data() {
|
||||
Ok(data) => DBRead(data),
|
||||
Err(e) => {
|
||||
error!("database borrow failed with error {e}");
|
||||
panic!("database borrow failed with error {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn borrow_db_mut_checked<'a>() -> DBWrite<'a> {
|
||||
match DB.borrow_data_mut() {
|
||||
Ok(data) => DBWrite(ManuallyDrop::new(data)),
|
||||
Err(e) => {
|
||||
error!("database borrow mut failed with error {e}");
|
||||
panic!("database borrow mut failed with error {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,21 +0,0 @@
|
||||
#![feature(nonpoison_rwlock)]
|
||||
|
||||
pub mod db;
|
||||
pub mod debug;
|
||||
pub mod models;
|
||||
pub mod platform;
|
||||
pub mod interface;
|
||||
|
||||
pub use models::data::{
|
||||
ApplicationTransientStatus,
|
||||
Database,
|
||||
DatabaseApplications,
|
||||
DatabaseAuth,
|
||||
DownloadType,
|
||||
DownloadableMetadata,
|
||||
GameDownloadStatus,
|
||||
GameVersion,
|
||||
Settings
|
||||
};
|
||||
pub use db::DB;
|
||||
pub use interface::{borrow_db_checked, borrow_db_mut_checked};
|
||||
@ -1,17 +0,0 @@
|
||||
[package]
|
||||
name = "download_manager"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
atomic-instant-full = "0.1.0"
|
||||
database = { version = "0.1.0", path = "../database" }
|
||||
humansize = "2.1.3"
|
||||
log = "0.4.28"
|
||||
parking_lot = "0.12.5"
|
||||
remote = { version = "0.1.0", path = "../remote" }
|
||||
serde = "1.0.228"
|
||||
serde_with = "3.15.0"
|
||||
tauri = "2.8.5"
|
||||
throttle_my_fn = "0.2.6"
|
||||
utils = { version = "0.1.0", path = "../utils" }
|
||||
@ -1,16 +0,0 @@
|
||||
#![feature(duration_millis_float)]
|
||||
#![feature(nonpoison_mutex)]
|
||||
#![feature(sync_nonpoison)]
|
||||
|
||||
use std::sync::{nonpoison::Mutex, LazyLock};
|
||||
|
||||
use crate::{download_manager_builder::DownloadManagerBuilder, download_manager_frontend::DownloadManager};
|
||||
|
||||
pub mod download_manager_builder;
|
||||
pub mod download_manager_frontend;
|
||||
pub mod downloadable;
|
||||
pub mod util;
|
||||
pub mod error;
|
||||
pub mod frontend_updates;
|
||||
|
||||
pub static DOWNLOAD_MANAGER: LazyLock<Mutex<DownloadManager>> = LazyLock::new(|| todo!());
|
||||
@ -1,26 +0,0 @@
|
||||
[package]
|
||||
name = "games"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
atomic-instant-full = "0.1.0"
|
||||
bitcode = "0.6.7"
|
||||
boxcar = "0.2.14"
|
||||
database = { version = "0.1.0", path = "../database" }
|
||||
download_manager = { version = "0.1.0", path = "../download_manager" }
|
||||
hex = "0.4.3"
|
||||
log = "0.4.28"
|
||||
md5 = "0.8.0"
|
||||
rayon = "1.11.0"
|
||||
remote = { version = "0.1.0", path = "../remote" }
|
||||
reqwest = "0.12.23"
|
||||
rustix = "1.1.2"
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
serde_with = "3.15.0"
|
||||
sysinfo = "0.37.2"
|
||||
tauri = "2.8.5"
|
||||
throttle_my_fn = "0.2.6"
|
||||
utils = { version = "0.1.0", path = "../utils" }
|
||||
native_model = { version = "0.6.4", features = ["rmp_serde_1_3"], git = "https://github.com/Drop-OSS/native_model.git"}
|
||||
serde_json = "1.0.145"
|
||||
@ -1 +0,0 @@
|
||||
pub mod collection;
|
||||
@ -1,21 +0,0 @@
|
||||
use std::fmt::{Display};
|
||||
|
||||
use serde_with::SerializeDisplay;
|
||||
|
||||
#[derive(SerializeDisplay)]
|
||||
pub enum LibraryError {
|
||||
MetaNotFound(String),
|
||||
VersionNotFound(String),
|
||||
}
|
||||
impl Display for LibraryError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", match self {
|
||||
LibraryError::MetaNotFound(id) => {
|
||||
format!("Could not locate any installed version of game ID {id} in the database")
|
||||
}
|
||||
LibraryError::VersionNotFound(game_id) => {
|
||||
format!("Could not locate any installed version for game id {game_id} in the database")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,6 +0,0 @@
|
||||
#![feature(iterator_try_collect)]
|
||||
|
||||
pub mod collections;
|
||||
pub mod downloads;
|
||||
pub mod library;
|
||||
pub mod state;
|
||||
@ -1,324 +0,0 @@
|
||||
use std::fs::remove_dir_all;
|
||||
use std::sync::Mutex;
|
||||
use std::thread::spawn;
|
||||
use bitcode::{Decode, Encode};
|
||||
use database::{borrow_db_checked, borrow_db_mut_checked, ApplicationTransientStatus, Database, DownloadableMetadata, GameDownloadStatus, GameVersion};
|
||||
use log::{debug, error, warn};
|
||||
use remote::{auth::generate_authorization_header, error::RemoteAccessError, requests::generate_url, utils::DROP_CLIENT_SYNC};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::{AppHandle, Emitter};
|
||||
use utils::app_emit;
|
||||
|
||||
use crate::{downloads::error::LibraryError, state::{GameStatusManager, GameStatusWithTransient}};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct FetchGameStruct {
|
||||
game: Game,
|
||||
status: GameStatusWithTransient,
|
||||
version: Option<GameVersion>,
|
||||
}
|
||||
|
||||
impl FetchGameStruct {
|
||||
pub fn new(game: Game, status: GameStatusWithTransient, version: Option<GameVersion>) -> Self {
|
||||
Self { game, status, version }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, Default, Encode, Decode)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Game {
|
||||
id: String,
|
||||
m_name: String,
|
||||
m_short_description: String,
|
||||
m_description: String,
|
||||
// mDevelopers
|
||||
// mPublishers
|
||||
m_icon_object_id: String,
|
||||
m_banner_object_id: String,
|
||||
m_cover_object_id: String,
|
||||
m_image_library_object_ids: Vec<String>,
|
||||
m_image_carousel_object_ids: Vec<String>,
|
||||
}
|
||||
impl Game {
|
||||
pub fn id(&self) -> &String {
|
||||
&self.id
|
||||
}
|
||||
}
|
||||
#[derive(serde::Serialize, Clone)]
|
||||
pub struct GameUpdateEvent {
|
||||
pub game_id: String,
|
||||
pub status: (
|
||||
Option<GameDownloadStatus>,
|
||||
Option<ApplicationTransientStatus>,
|
||||
),
|
||||
pub version: Option<GameVersion>,
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by:
|
||||
* - on_cancel, when cancelled, for obvious reasons
|
||||
* - when downloading, so if drop unexpectedly quits, we can resume the download. hidden by the "Downloading..." transient state, though
|
||||
* - when scanning, to import the game
|
||||
*/
|
||||
pub fn set_partially_installed(
|
||||
meta: &DownloadableMetadata,
|
||||
install_dir: String,
|
||||
app_handle: Option<&AppHandle>,
|
||||
) {
|
||||
set_partially_installed_db(&mut borrow_db_mut_checked(), meta, install_dir, app_handle);
|
||||
}
|
||||
|
||||
pub fn set_partially_installed_db(
|
||||
db_lock: &mut Database,
|
||||
meta: &DownloadableMetadata,
|
||||
install_dir: String,
|
||||
app_handle: Option<&AppHandle>,
|
||||
) {
|
||||
db_lock.applications.transient_statuses.remove(meta);
|
||||
db_lock.applications.game_statuses.insert(
|
||||
meta.id.clone(),
|
||||
GameDownloadStatus::PartiallyInstalled {
|
||||
version_name: meta.version.as_ref().unwrap().clone(),
|
||||
install_dir,
|
||||
},
|
||||
);
|
||||
db_lock
|
||||
.applications
|
||||
.installed_game_version
|
||||
.insert(meta.id.clone(), meta.clone());
|
||||
|
||||
if let Some(app_handle) = app_handle {
|
||||
push_game_update(
|
||||
app_handle,
|
||||
&meta.id,
|
||||
None,
|
||||
GameStatusManager::fetch_state(&meta.id, db_lock),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn uninstall_game_logic(meta: DownloadableMetadata, app_handle: &AppHandle) {
|
||||
debug!("triggered uninstall for agent");
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
db_handle
|
||||
.applications
|
||||
.transient_statuses
|
||||
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
|
||||
|
||||
push_game_update(
|
||||
app_handle,
|
||||
&meta.id,
|
||||
None,
|
||||
GameStatusManager::fetch_state(&meta.id, &db_handle),
|
||||
);
|
||||
|
||||
let previous_state = db_handle.applications.game_statuses.get(&meta.id).cloned();
|
||||
|
||||
let previous_state = if let Some(state) = previous_state {
|
||||
state
|
||||
} else {
|
||||
warn!("uninstall job doesn't have previous state, failing silently");
|
||||
return;
|
||||
};
|
||||
|
||||
if let Some((_, install_dir)) = match previous_state {
|
||||
GameDownloadStatus::Installed {
|
||||
version_name,
|
||||
install_dir,
|
||||
} => Some((version_name, install_dir)),
|
||||
GameDownloadStatus::SetupRequired {
|
||||
version_name,
|
||||
install_dir,
|
||||
} => Some((version_name, install_dir)),
|
||||
GameDownloadStatus::PartiallyInstalled {
|
||||
version_name,
|
||||
install_dir,
|
||||
} => Some((version_name, install_dir)),
|
||||
_ => None,
|
||||
} {
|
||||
db_handle
|
||||
.applications
|
||||
.transient_statuses
|
||||
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
|
||||
|
||||
drop(db_handle);
|
||||
|
||||
let app_handle = app_handle.clone();
|
||||
spawn(move || {
|
||||
if let Err(e) = remove_dir_all(install_dir) {
|
||||
error!("{e}");
|
||||
} else {
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
db_handle.applications.transient_statuses.remove(&meta);
|
||||
db_handle
|
||||
.applications
|
||||
.installed_game_version
|
||||
.remove(&meta.id);
|
||||
db_handle
|
||||
.applications
|
||||
.game_statuses
|
||||
.insert(meta.id.clone(), GameDownloadStatus::Remote {});
|
||||
let _ = db_handle.applications.transient_statuses.remove(&meta);
|
||||
|
||||
push_game_update(
|
||||
&app_handle,
|
||||
&meta.id,
|
||||
None,
|
||||
GameStatusManager::fetch_state(&meta.id, &db_handle),
|
||||
);
|
||||
|
||||
debug!("uninstalled game id {}", &meta.id);
|
||||
app_emit!(app_handle, "update_library", ());
|
||||
}
|
||||
});
|
||||
} else {
|
||||
warn!("invalid previous state for uninstall, failing silently.");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_current_meta(game_id: &String) -> Option<DownloadableMetadata> {
|
||||
borrow_db_checked()
|
||||
.applications
|
||||
.installed_game_version
|
||||
.get(game_id)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
pub fn on_game_complete(
|
||||
meta: &DownloadableMetadata,
|
||||
install_dir: String,
|
||||
app_handle: &AppHandle,
|
||||
) -> Result<(), RemoteAccessError> {
|
||||
// Fetch game version information from remote
|
||||
if meta.version.is_none() {
|
||||
return Err(RemoteAccessError::GameNotFound(meta.id.clone()));
|
||||
}
|
||||
|
||||
let client = DROP_CLIENT_SYNC.clone();
|
||||
let response = generate_url(
|
||||
&["/api/v1/client/game/version"],
|
||||
&[
|
||||
("id", &meta.id),
|
||||
("version", meta.version.as_ref().unwrap()),
|
||||
],
|
||||
)?;
|
||||
let response = client
|
||||
.get(response)
|
||||
.header("Authorization", generate_authorization_header())
|
||||
.send()?;
|
||||
|
||||
let game_version: GameVersion = response.json()?;
|
||||
|
||||
let mut handle = borrow_db_mut_checked();
|
||||
handle
|
||||
.applications
|
||||
.game_versions
|
||||
.entry(meta.id.clone())
|
||||
.or_default()
|
||||
.insert(meta.version.clone().unwrap(), game_version.clone());
|
||||
handle
|
||||
.applications
|
||||
.installed_game_version
|
||||
.insert(meta.id.clone(), meta.clone());
|
||||
|
||||
drop(handle);
|
||||
|
||||
let status = if game_version.setup_command.is_empty() {
|
||||
GameDownloadStatus::Installed {
|
||||
version_name: meta.version.clone().unwrap(),
|
||||
install_dir,
|
||||
}
|
||||
} else {
|
||||
GameDownloadStatus::SetupRequired {
|
||||
version_name: meta.version.clone().unwrap(),
|
||||
install_dir,
|
||||
}
|
||||
};
|
||||
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
db_handle
|
||||
.applications
|
||||
.game_statuses
|
||||
.insert(meta.id.clone(), status.clone());
|
||||
drop(db_handle);
|
||||
app_emit!(
|
||||
app_handle,
|
||||
&format!("update_game/{}", meta.id),
|
||||
GameUpdateEvent {
|
||||
game_id: meta.id.clone(),
|
||||
status: (Some(status), None),
|
||||
version: Some(game_version),
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn push_game_update(
|
||||
app_handle: &AppHandle,
|
||||
game_id: &String,
|
||||
version: Option<GameVersion>,
|
||||
status: GameStatusWithTransient,
|
||||
) {
|
||||
if let Some(GameDownloadStatus::Installed { .. } | GameDownloadStatus::SetupRequired { .. }) =
|
||||
&status.0
|
||||
&& version.is_none()
|
||||
{
|
||||
panic!("pushed game for installed game that doesn't have version information");
|
||||
}
|
||||
|
||||
app_emit!(
|
||||
app_handle,
|
||||
&format!("update_game/{game_id}"),
|
||||
GameUpdateEvent {
|
||||
game_id: game_id.clone(),
|
||||
status,
|
||||
version,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct FrontendGameOptions {
|
||||
launch_string: String,
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn update_game_configuration(
|
||||
game_id: String,
|
||||
options: FrontendGameOptions,
|
||||
) -> Result<(), LibraryError> {
|
||||
let mut handle = borrow_db_mut_checked();
|
||||
let installed_version = handle
|
||||
.applications
|
||||
.installed_game_version
|
||||
.get(&game_id)
|
||||
.ok_or(LibraryError::MetaNotFound(game_id))?;
|
||||
|
||||
let id = installed_version.id.clone();
|
||||
let version = installed_version.version.clone().ok_or(LibraryError::VersionNotFound(id.clone()))?;
|
||||
|
||||
let mut existing_configuration = handle
|
||||
.applications
|
||||
.game_versions
|
||||
.get(&id)
|
||||
.unwrap()
|
||||
.get(&version)
|
||||
.unwrap()
|
||||
.clone();
|
||||
|
||||
// Add more options in here
|
||||
existing_configuration.launch_command_template = options.launch_string;
|
||||
|
||||
// Add no more options past here
|
||||
|
||||
handle
|
||||
.applications
|
||||
.game_versions
|
||||
.get_mut(&id)
|
||||
.unwrap()
|
||||
.insert(version.to_string(), existing_configuration);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -14,8 +14,7 @@
|
||||
"@tauri-apps/plugin-os": "^2.3.0",
|
||||
"@tauri-apps/plugin-shell": "^2.3.0",
|
||||
"pino": "^9.7.0",
|
||||
"pino-pretty": "^13.1.1",
|
||||
"tauri": "^0.15.0"
|
||||
"pino-pretty": "^13.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tauri-apps/cli": "^2.7.1"
|
||||
|
||||
@ -1,18 +0,0 @@
|
||||
[package]
|
||||
name = "process"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
chrono = "0.4.42"
|
||||
client = { version = "0.1.0", path = "../client" }
|
||||
database = { version = "0.1.0", path = "../database" }
|
||||
dynfmt = "0.1.5"
|
||||
games = { version = "0.1.0", path = "../games" }
|
||||
log = "0.4.28"
|
||||
page_size = "0.6.0"
|
||||
serde = "1.0.228"
|
||||
serde_with = "3.15.0"
|
||||
shared_child = "1.1.1"
|
||||
tauri-plugin-opener = "2.5.0"
|
||||
utils = { version = "0.1.0", path = "../utils" }
|
||||
@ -1,14 +0,0 @@
|
||||
#![feature(nonpoison_mutex)]
|
||||
#![feature(sync_nonpoison)]
|
||||
|
||||
use std::sync::{LazyLock, nonpoison::Mutex};
|
||||
|
||||
use crate::process_manager::ProcessManager;
|
||||
|
||||
pub static PROCESS_MANAGER: LazyLock<Mutex<ProcessManager>> =
|
||||
LazyLock::new(|| Mutex::new(ProcessManager::new()));
|
||||
|
||||
pub mod error;
|
||||
pub mod format;
|
||||
pub mod process_handlers;
|
||||
pub mod process_manager;
|
||||
@ -1,136 +0,0 @@
|
||||
use std::{collections::HashMap, env};
|
||||
|
||||
use chrono::Utc;
|
||||
use client::{app_status::AppStatus, user::User};
|
||||
use database::interface::borrow_db_checked;
|
||||
use droplet_rs::ssl::sign_nonce;
|
||||
use gethostname::gethostname;
|
||||
use log::{error, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
use crate::{error::{DropServerError, RemoteAccessError}, requests::make_authenticated_get, utils::DROP_CLIENT_SYNC};
|
||||
|
||||
use super::{
|
||||
cache::{cache_object, get_cached_object},
|
||||
requests::generate_url,
|
||||
};
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct CapabilityConfiguration {}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct InitiateRequestBody {
|
||||
name: String,
|
||||
platform: String,
|
||||
capabilities: HashMap<String, CapabilityConfiguration>,
|
||||
mode: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct HandshakeRequestBody {
|
||||
client_id: String,
|
||||
token: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct HandshakeResponse {
|
||||
private: String,
|
||||
certificate: String,
|
||||
id: String,
|
||||
}
|
||||
|
||||
pub fn generate_authorization_header() -> String {
|
||||
let certs = {
|
||||
let db = borrow_db_checked();
|
||||
db.auth.clone().expect("Authorisation not initialised")
|
||||
};
|
||||
|
||||
let nonce = Utc::now().timestamp_millis().to_string();
|
||||
|
||||
let signature =
|
||||
sign_nonce(certs.private, nonce.clone()).expect("Failed to generate authorisation header");
|
||||
|
||||
format!("Nonce {} {} {}", certs.client_id, nonce, signature)
|
||||
}
|
||||
|
||||
pub async fn fetch_user() -> Result<User, RemoteAccessError> {
|
||||
let response = make_authenticated_get(generate_url(&["/api/v1/client/user"], &[])?).await?;
|
||||
if response.status() != 200 {
|
||||
let err: DropServerError = response.json().await?;
|
||||
warn!("{err:?}");
|
||||
|
||||
if err.status_message == "Nonce expired" {
|
||||
return Err(RemoteAccessError::OutOfSync);
|
||||
}
|
||||
|
||||
return Err(RemoteAccessError::InvalidResponse(err));
|
||||
}
|
||||
|
||||
response
|
||||
.json::<User>()
|
||||
.await
|
||||
.map_err(std::convert::Into::into)
|
||||
}
|
||||
|
||||
pub fn auth_initiate_logic(mode: String) -> Result<String, RemoteAccessError> {
|
||||
let base_url = {
|
||||
let db_lock = borrow_db_checked();
|
||||
Url::parse(&db_lock.base_url.clone())?
|
||||
};
|
||||
|
||||
let hostname = gethostname();
|
||||
|
||||
let endpoint = base_url.join("/api/v1/client/auth/initiate")?;
|
||||
let body = InitiateRequestBody {
|
||||
name: format!("{} (Desktop)", hostname.display()),
|
||||
platform: env::consts::OS.to_string(),
|
||||
capabilities: HashMap::from([
|
||||
("peerAPI".to_owned(), CapabilityConfiguration {}),
|
||||
("cloudSaves".to_owned(), CapabilityConfiguration {}),
|
||||
]),
|
||||
mode,
|
||||
};
|
||||
|
||||
let client = DROP_CLIENT_SYNC.clone();
|
||||
let response = client.post(endpoint.to_string()).json(&body).send()?;
|
||||
|
||||
if response.status() != 200 {
|
||||
let data: DropServerError = response.json()?;
|
||||
error!("could not start handshake: {}", data.status_message);
|
||||
|
||||
return Err(RemoteAccessError::HandshakeFailed(data.status_message));
|
||||
}
|
||||
|
||||
let response = response.text()?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub async fn setup() -> (AppStatus, Option<User>) {
|
||||
let auth = {
|
||||
let data = borrow_db_checked();
|
||||
data.auth.clone()
|
||||
};
|
||||
|
||||
if auth.is_some() {
|
||||
let user_result = match fetch_user().await {
|
||||
Ok(data) => data,
|
||||
Err(RemoteAccessError::FetchError(_)) => {
|
||||
let user = get_cached_object::<User>("user").ok();
|
||||
return (AppStatus::Offline, user);
|
||||
}
|
||||
Err(_) => return (AppStatus::SignedInNeedsReauth, None),
|
||||
};
|
||||
if let Err(e) = cache_object("user", &user_result) {
|
||||
warn!("Could not cache user object with error {e}");
|
||||
}
|
||||
return (AppStatus::SignedIn, Some(user_result));
|
||||
}
|
||||
|
||||
(AppStatus::SignedOut, None)
|
||||
}
|
||||
@ -1,76 +0,0 @@
|
||||
use database::{interface::DatabaseImpls, DB};
|
||||
use http::{header::CONTENT_TYPE, response::Builder as ResponseBuilder, Response};
|
||||
use log::{debug, warn};
|
||||
use tauri::UriSchemeResponder;
|
||||
|
||||
use crate::{error::CacheError, utils::DROP_CLIENT_ASYNC};
|
||||
|
||||
use super::{
|
||||
auth::generate_authorization_header,
|
||||
cache::{ObjectCache, cache_object, get_cached_object},
|
||||
};
|
||||
|
||||
pub async fn fetch_object_wrapper(request: http::Request<Vec<u8>>, responder: UriSchemeResponder) {
|
||||
match fetch_object(request).await {
|
||||
Ok(r) => responder.respond(r),
|
||||
Err(e) => {
|
||||
warn!("Cache error: {e}");
|
||||
responder.respond(Response::builder().status(500).body(Vec::new()).expect("Failed to build error response"));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub async fn fetch_object(request: http::Request<Vec<u8>>) -> Result<Response<Vec<u8>>, CacheError>
|
||||
{
|
||||
// Drop leading /
|
||||
let object_id = &request.uri().path()[1..];
|
||||
|
||||
let cache_result = get_cached_object::<ObjectCache>(object_id);
|
||||
if let Ok(cache_result) = &cache_result
|
||||
&& !cache_result.has_expired()
|
||||
{
|
||||
return cache_result.try_into();
|
||||
}
|
||||
|
||||
let header = generate_authorization_header();
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let url = format!("{}api/v1/client/object/{object_id}", DB.fetch_base_url());
|
||||
let response = client.get(url).header("Authorization", header).send().await;
|
||||
|
||||
match response {
|
||||
Ok(r) => {
|
||||
let resp_builder = ResponseBuilder::new().header(
|
||||
CONTENT_TYPE,
|
||||
r.headers()
|
||||
.get("Content-Type")
|
||||
.expect("Failed get Content-Type header"),
|
||||
);
|
||||
let data = match r.bytes().await {
|
||||
Ok(data) => Vec::from(data),
|
||||
Err(e) => {
|
||||
warn!(
|
||||
"Could not get data from cache object {object_id} with error {e}",
|
||||
);
|
||||
Vec::new()
|
||||
}
|
||||
};
|
||||
let resp = resp_builder.body(data).expect("Failed to build object cache response body");
|
||||
if cache_result.map_or(true, |x| x.has_expired()) {
|
||||
cache_object::<ObjectCache>(object_id, &resp.clone().try_into()?)
|
||||
.expect("Failed to create cached object");
|
||||
}
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("Object fetch failed with error {e}. Attempting to download from cache");
|
||||
match cache_result {
|
||||
Ok(cache_result) => cache_result.try_into(),
|
||||
Err(e) => {
|
||||
warn!("{e}");
|
||||
Err(CacheError::Remote(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,10 +0,0 @@
|
||||
pub mod auth;
|
||||
#[macro_use]
|
||||
pub mod cache;
|
||||
pub mod fetch_object;
|
||||
pub mod requests;
|
||||
pub mod server_proto;
|
||||
pub mod utils;
|
||||
pub mod error;
|
||||
|
||||
pub use auth::setup;
|
||||
@ -1,93 +0,0 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use database::borrow_db_checked;
|
||||
use http::{uri::PathAndQuery, Request, Response, StatusCode, Uri};
|
||||
use log::{error, warn};
|
||||
use tauri::UriSchemeResponder;
|
||||
use utils::webbrowser_open::webbrowser_open;
|
||||
|
||||
use crate::utils::DROP_CLIENT_SYNC;
|
||||
|
||||
pub async fn handle_server_proto_offline_wrapper(request: Request<Vec<u8>>, responder: UriSchemeResponder) {
|
||||
responder.respond(match handle_server_proto_offline(request).await {
|
||||
Ok(res) => res,
|
||||
Err(_) => unreachable!()
|
||||
});
|
||||
}
|
||||
|
||||
pub async fn handle_server_proto_offline(_request: Request<Vec<u8>>) -> Result<Response<Vec<u8>>, StatusCode>{
|
||||
Ok(Response::builder()
|
||||
.status(StatusCode::NOT_FOUND)
|
||||
.body(Vec::new())
|
||||
.expect("Failed to build error response for proto offline"))
|
||||
|
||||
}
|
||||
|
||||
pub async fn handle_server_proto_wrapper(request: Request<Vec<u8>>, responder: UriSchemeResponder) {
|
||||
match handle_server_proto(request).await {
|
||||
Ok(r) => responder.respond(r),
|
||||
Err(e) => {
|
||||
warn!("Cache error: {e}");
|
||||
responder.respond(Response::builder().status(e).body(Vec::new()).expect("Failed to build error response"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_server_proto(request: Request<Vec<u8>>) -> Result<Response<Vec<u8>>, StatusCode> {
|
||||
let db_handle = borrow_db_checked();
|
||||
let auth = match db_handle.auth.as_ref() {
|
||||
Some(auth) => auth,
|
||||
None => {
|
||||
error!("Could not find auth in database");
|
||||
return Err(StatusCode::UNAUTHORIZED)
|
||||
}
|
||||
};
|
||||
let web_token = match &auth.web_token {
|
||||
Some(token) => token,
|
||||
None => return Err(StatusCode::UNAUTHORIZED),
|
||||
};
|
||||
let remote_uri = db_handle.base_url.parse::<Uri>().expect("Failed to parse base url");
|
||||
|
||||
let path = request.uri().path();
|
||||
|
||||
let mut new_uri = request.uri().clone().into_parts();
|
||||
new_uri.path_and_query =
|
||||
Some(PathAndQuery::from_str(&format!("{path}?noWrapper=true")).expect("Failed to parse request path in proto"));
|
||||
new_uri.authority = remote_uri.authority().cloned();
|
||||
new_uri.scheme = remote_uri.scheme().cloned();
|
||||
let err_msg = &format!("Failed to build new uri from parts {new_uri:?}");
|
||||
let new_uri = Uri::from_parts(new_uri).expect(err_msg);
|
||||
|
||||
let whitelist_prefix = ["/store", "/api", "/_", "/fonts"];
|
||||
|
||||
if whitelist_prefix.iter().all(|f| !path.starts_with(f)) {
|
||||
webbrowser_open(new_uri.to_string());
|
||||
return Ok(Response::new(Vec::new()))
|
||||
}
|
||||
|
||||
let client = DROP_CLIENT_SYNC.clone();
|
||||
let response = match client
|
||||
.request(request.method().clone(), new_uri.to_string())
|
||||
.header("Authorization", format!("Bearer {web_token}"))
|
||||
.headers(request.headers().clone())
|
||||
.send() {
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
warn!("Could not send response. Got {e} when sending");
|
||||
return Err(e.status().unwrap_or(StatusCode::BAD_REQUEST))
|
||||
},
|
||||
};
|
||||
|
||||
let response_status = response.status();
|
||||
let response_body = match response.bytes() {
|
||||
Ok(bytes) => bytes,
|
||||
Err(e) => return Err(e.status().unwrap_or(StatusCode::INTERNAL_SERVER_ERROR)),
|
||||
};
|
||||
|
||||
let http_response = Response::builder()
|
||||
.status(response_status)
|
||||
.body(response_body.to_vec())
|
||||
.expect("Failed to build server proto response");
|
||||
|
||||
Ok(http_response)
|
||||
}
|
||||
@ -1,111 +0,0 @@
|
||||
use std::{
|
||||
fs::{self, File},
|
||||
io::Read,
|
||||
sync::LazyLock,
|
||||
};
|
||||
|
||||
use database::db::DATA_ROOT_DIR;
|
||||
use log::{debug, info, warn};
|
||||
use reqwest::Certificate;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DropHealthcheck {
|
||||
app_name: String,
|
||||
}
|
||||
impl DropHealthcheck {
|
||||
pub fn app_name(&self) -> &String{
|
||||
&self.app_name
|
||||
}
|
||||
}
|
||||
static DROP_CERT_BUNDLE: LazyLock<Vec<Certificate>> = LazyLock::new(fetch_certificates);
|
||||
pub static DROP_CLIENT_SYNC: LazyLock<reqwest::blocking::Client> = LazyLock::new(get_client_sync);
|
||||
pub static DROP_CLIENT_ASYNC: LazyLock<reqwest::Client> = LazyLock::new(get_client_async);
|
||||
pub static DROP_CLIENT_WS_CLIENT: LazyLock<reqwest::Client> = LazyLock::new(get_client_ws);
|
||||
|
||||
fn fetch_certificates() -> Vec<Certificate> {
|
||||
let certificate_dir = DATA_ROOT_DIR.join("certificates");
|
||||
|
||||
let mut certs = Vec::new();
|
||||
match fs::read_dir(certificate_dir) {
|
||||
Ok(c) => {
|
||||
for entry in c {
|
||||
match entry {
|
||||
Ok(c) => {
|
||||
let mut buf = Vec::new();
|
||||
match File::open(c.path()) {
|
||||
Ok(f) => f,
|
||||
Err(e) => {
|
||||
warn!(
|
||||
"Failed to open file at {} with error {}",
|
||||
c.path().display(),
|
||||
e
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
.read_to_end(&mut buf)
|
||||
.unwrap_or_else(|e| panic!(
|
||||
"Failed to read to end of certificate file {} with error {}",
|
||||
c.path().display(),
|
||||
e
|
||||
));
|
||||
|
||||
match Certificate::from_pem_bundle(&buf) {
|
||||
Ok(certificates) => {
|
||||
for cert in certificates {
|
||||
certs.push(cert);
|
||||
}
|
||||
info!(
|
||||
"added {} certificate(s) from {}",
|
||||
certs.len(),
|
||||
c.file_name().display()
|
||||
);
|
||||
}
|
||||
Err(e) => warn!(
|
||||
"Invalid certificate file {} with error {}",
|
||||
c.path().display(),
|
||||
e
|
||||
),
|
||||
}
|
||||
}
|
||||
Err(_) => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("not loading certificates due to error: {e}");
|
||||
}
|
||||
};
|
||||
certs
|
||||
}
|
||||
|
||||
pub fn get_client_sync() -> reqwest::blocking::Client {
|
||||
let mut client = reqwest::blocking::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client.use_rustls_tls().build().expect("Failed to build synchronous client")
|
||||
}
|
||||
pub fn get_client_async() -> reqwest::Client {
|
||||
let mut client = reqwest::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client.use_rustls_tls().build().expect("Failed to build asynchronous client")
|
||||
}
|
||||
pub fn get_client_ws() -> reqwest::Client {
|
||||
let mut client = reqwest::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client
|
||||
.use_rustls_tls()
|
||||
.http1_only()
|
||||
.build()
|
||||
.expect("Failed to build websocket client")
|
||||
}
|
||||
1410
src-tauri/Cargo.lock
generated
1410
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,138 +1,101 @@
|
||||
[package]
|
||||
name = "drop-app"
|
||||
version = "0.3.3"
|
||||
description = "The client application for the open-source, self-hosted game distribution platform Drop"
|
||||
authors = ["Drop OSS"]
|
||||
# authors = ["Drop OSS"]
|
||||
edition = "2024"
|
||||
description = "The client application for the open-source, self-hosted game distribution platform Drop"
|
||||
|
||||
[workspace]
|
||||
resolver = "3"
|
||||
members = ["drop-consts",
|
||||
"drop-database",
|
||||
"drop-downloads",
|
||||
"drop-errors", "drop-library",
|
||||
"drop-native-library",
|
||||
"drop-process",
|
||||
"drop-remote",
|
||||
]
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
|
||||
tauri-plugin-single-instance = { version = "2.0.0", features = ["deep-link"] }
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib", "staticlib"]
|
||||
# The `_lib` suffix may seem redundant but it is necessary
|
||||
# to make the lib name unique and wouldn't conflict with the bin name.
|
||||
# This seems to be only an issue on Windows, see https://github.com/rust-lang/cargo/issues/8519
|
||||
name = "drop_app_lib"
|
||||
crate-type = ["staticlib", "cdylib", "rlib"]
|
||||
rustflags = ["-C", "target-feature=+aes,+sse2"]
|
||||
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2.0.0", features = [] }
|
||||
# rustflags = ["-C", "target-feature=+aes,+sse2"]
|
||||
|
||||
[dependencies]
|
||||
tauri-plugin-shell = "2.2.1"
|
||||
serde_json = "1"
|
||||
rayon = "1.10.0"
|
||||
webbrowser = "1.0.2"
|
||||
url = "2.5.2"
|
||||
tauri-plugin-deep-link = "2"
|
||||
log = "0.4.22"
|
||||
hex = "0.4.3"
|
||||
tauri-plugin-dialog = "2"
|
||||
http = "1.1.0"
|
||||
urlencoding = "2.1.3"
|
||||
md5 = "0.7.0"
|
||||
chrono = "0.4.38"
|
||||
tauri-plugin-os = "2"
|
||||
boxcar = "0.2.7"
|
||||
umu-wrapper-lib = "0.1.0"
|
||||
tauri-plugin-autostart = "2.0.0"
|
||||
shared_child = "1.0.1"
|
||||
serde_with = "3.12.0"
|
||||
slice-deque = "0.3.0"
|
||||
throttle_my_fn = "0.2.6"
|
||||
parking_lot = "0.12.3"
|
||||
atomic-instant-full = "0.1.0"
|
||||
cacache = "13.1.0"
|
||||
http-serde = "2.1.1"
|
||||
reqwest-middleware = "0.4.0"
|
||||
reqwest-middleware-cache = "0.1.1"
|
||||
deranged = "=0.4.0"
|
||||
droplet-rs = "0.7.3"
|
||||
gethostname = "1.0.1"
|
||||
zstd = "0.13.3"
|
||||
tar = "0.4.44"
|
||||
rand = "0.9.1"
|
||||
regex = "1.11.1"
|
||||
tempfile = "3.19.1"
|
||||
schemars = "0.8.22"
|
||||
sha1 = "0.10.6"
|
||||
dirs = "6.0.0"
|
||||
whoami = "1.6.0"
|
||||
filetime = "0.2.25"
|
||||
walkdir = "2.5.0"
|
||||
known-folders = "1.2.0"
|
||||
native_model = { version = "0.6.4", features = ["rmp_serde_1_3"], git = "https://github.com/Drop-OSS/native_model.git"}
|
||||
tauri-plugin-opener = "2.4.0"
|
||||
bitcode = "0.6.6"
|
||||
reqwest-websocket = "0.5.0"
|
||||
drop-database = { path = "./drop-database" }
|
||||
drop-downloads = { path = "./drop-downloads" }
|
||||
drop-errors = { path = "./drop-errors" }
|
||||
drop-native-library = { path = "./drop-native-library" }
|
||||
drop-process = { path = "./drop-process" }
|
||||
drop-remote = { path = "./drop-remote" }
|
||||
futures-lite = "2.6.0"
|
||||
page_size = "0.6.0"
|
||||
sysinfo = "0.36.1"
|
||||
humansize = "2.1.3"
|
||||
tokio-util = { version = "0.7.16", features = ["io"] }
|
||||
futures-core = "0.3.31"
|
||||
bytes = "1.10.1"
|
||||
# tailscale = { path = "./tailscale" }
|
||||
|
||||
|
||||
# Workspaces
|
||||
client = { version = "0.1.0", path = "../client" }
|
||||
database = { path = "../database" }
|
||||
process = { path = "../process" }
|
||||
remote = { version = "0.1.0", path = "../remote" }
|
||||
utils = { path = "../utils" }
|
||||
games = { version = "0.1.0", path = "../games" }
|
||||
|
||||
[dependencies.dynfmt]
|
||||
version = "0.1.5"
|
||||
features = ["curly"]
|
||||
|
||||
[dependencies.tauri]
|
||||
version = "2.7.0"
|
||||
features = ["protocol-asset", "tray-icon"]
|
||||
|
||||
[dependencies.tokio]
|
||||
version = "1.40.0"
|
||||
features = ["rt", "tokio-macros", "signal"]
|
||||
hex = "0.4.3"
|
||||
http = "1.1.0"
|
||||
known-folders = "1.2.0"
|
||||
log = "0.4.22"
|
||||
md5 = "0.7.0"
|
||||
rayon = "1.10.0"
|
||||
regex = "1.11.1"
|
||||
reqwest-websocket = "0.5.0"
|
||||
serde_json = "1"
|
||||
tar = "0.4.44"
|
||||
tauri = { version = "2.7.0", features = ["protocol-asset", "tray-icon"] }
|
||||
tauri-plugin-autostart = "2.0.0"
|
||||
tauri-plugin-deep-link = "2"
|
||||
tauri-plugin-dialog = "2"
|
||||
tauri-plugin-opener = "2.4.0"
|
||||
tauri-plugin-os = "2"
|
||||
tauri-plugin-shell = "2.2.1"
|
||||
tempfile = "3.19.1"
|
||||
url = "2.5.2"
|
||||
webbrowser = "1.0.2"
|
||||
whoami = "1.6.0"
|
||||
zstd = "0.13.3"
|
||||
|
||||
[dependencies.log4rs]
|
||||
version = "1.3.0"
|
||||
features = ["console_appender", "file_appender"]
|
||||
|
||||
[dependencies.rustix]
|
||||
version = "0.38.37"
|
||||
features = ["fs"]
|
||||
|
||||
[dependencies.uuid]
|
||||
version = "1.10.0"
|
||||
features = ["v4", "fast-rng", "macro-diagnostics"]
|
||||
|
||||
[dependencies.rustbreak]
|
||||
version = "2"
|
||||
features = ["other_errors"] # You can also use "yaml_enc" or "bin_enc"
|
||||
|
||||
[dependencies.reqwest]
|
||||
version = "0.12.22"
|
||||
default-features = false
|
||||
features = [
|
||||
"json",
|
||||
"http2",
|
||||
"blocking",
|
||||
"rustls-tls",
|
||||
"native-tls-alpn",
|
||||
"rustls-tls-native-roots",
|
||||
"stream",
|
||||
"blocking",
|
||||
"http2",
|
||||
"json",
|
||||
"native-tls-alpn",
|
||||
"rustls-tls",
|
||||
"rustls-tls-native-roots",
|
||||
"stream",
|
||||
]
|
||||
|
||||
[dependencies.rustix]
|
||||
version = "0.38.37"
|
||||
features = ["fs"]
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1"
|
||||
features = ["derive", "rc"]
|
||||
|
||||
[dependencies.uuid]
|
||||
version = "1.10.0"
|
||||
features = ["fast-rng", "macro-diagnostics", "v4"]
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2.0.0", features = [] }
|
||||
|
||||
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
|
||||
tauri-plugin-single-instance = { version = "2.0.0", features = ["deep-link"] }
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
panic = "abort"
|
||||
codegen-units = 1
|
||||
panic = 'abort'
|
||||
|
||||
|
||||
@ -1,8 +1,7 @@
|
||||
[package]
|
||||
name = "utils"
|
||||
name = "drop-consts"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
log = "0.4.28"
|
||||
webbrowser = "1.0.5"
|
||||
dirs = "6.0.0"
|
||||
15
src-tauri/drop-consts/src/lib.rs
Normal file
15
src-tauri/drop-consts/src/lib.rs
Normal file
@ -0,0 +1,15 @@
|
||||
use std::{
|
||||
path::PathBuf,
|
||||
sync::{Arc, LazyLock},
|
||||
};
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
static DATA_ROOT_PREFIX: &'static str = "drop";
|
||||
#[cfg(debug_assertions)]
|
||||
static DATA_ROOT_PREFIX: &str = "drop-debug";
|
||||
|
||||
pub static DATA_ROOT_DIR: LazyLock<&'static PathBuf> =
|
||||
LazyLock::new(|| Box::leak(Box::new(dirs::data_dir().unwrap().join(DATA_ROOT_PREFIX))));
|
||||
|
||||
pub static CACHE_DIR: LazyLock<&'static PathBuf> =
|
||||
LazyLock::new(|| Box::leak(Box::new(DATA_ROOT_DIR.join("cache"))));
|
||||
21
src-tauri/drop-database/Cargo.toml
Normal file
21
src-tauri/drop-database/Cargo.toml
Normal file
@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "drop-database"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
bitcode = "0.6.7"
|
||||
chrono = "0.4.42"
|
||||
drop-consts = { path = "../drop-consts" }
|
||||
drop-library = { path = "../drop-library" }
|
||||
drop-native-library = { path = "../drop-native-library" }
|
||||
log = "0.4.28"
|
||||
native_model = { git = "https://github.com/Drop-OSS/native_model.git", version = "0.6.4", features = [
|
||||
"rmp_serde_1_3",
|
||||
] }
|
||||
rustbreak = "2.0.0"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_with = "3.14.0"
|
||||
url = "2.5.7"
|
||||
whoami = "1.6.1"
|
||||
|
||||
140
src-tauri/drop-database/src/db.rs
Normal file
140
src-tauri/drop-database/src/db.rs
Normal file
@ -0,0 +1,140 @@
|
||||
use std::{
|
||||
fs::{self, create_dir_all},
|
||||
mem::ManuallyDrop,
|
||||
ops::{Deref, DerefMut},
|
||||
path::PathBuf,
|
||||
sync::{Arc, LazyLock, RwLockReadGuard, RwLockWriteGuard},
|
||||
};
|
||||
|
||||
use chrono::Utc;
|
||||
use drop_consts::DATA_ROOT_DIR;
|
||||
use log::{debug, error, info, warn};
|
||||
use rustbreak::{DeSerError, DeSerializer, PathDatabase, RustbreakError};
|
||||
use serde::{Serialize, de::DeserializeOwned};
|
||||
|
||||
use crate::DB;
|
||||
|
||||
use super::models::data::Database;
|
||||
|
||||
// Custom JSON serializer to support everything we need
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct DropDatabaseSerializer;
|
||||
|
||||
impl<T: native_model::Model + Serialize + DeserializeOwned> DeSerializer<T>
|
||||
for DropDatabaseSerializer
|
||||
{
|
||||
fn serialize(&self, val: &T) -> rustbreak::error::DeSerResult<Vec<u8>> {
|
||||
native_model::encode(val).map_err(|e| DeSerError::Internal(e.to_string()))
|
||||
}
|
||||
|
||||
fn deserialize<R: std::io::Read>(&self, mut s: R) -> rustbreak::error::DeSerResult<T> {
|
||||
let mut buf = Vec::new();
|
||||
s.read_to_end(&mut buf)
|
||||
.map_err(|e| rustbreak::error::DeSerError::Internal(e.to_string()))?;
|
||||
let (val, _version) =
|
||||
native_model::decode(buf).map_err(|e| DeSerError::Internal(e.to_string()))?;
|
||||
Ok(val)
|
||||
}
|
||||
}
|
||||
|
||||
pub type DatabaseInterface =
|
||||
rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer>;
|
||||
|
||||
pub trait DatabaseImpls {
|
||||
fn set_up_database() -> DatabaseInterface;
|
||||
}
|
||||
impl DatabaseImpls for DatabaseInterface {
|
||||
fn set_up_database() -> DatabaseInterface {
|
||||
let db_path = DATA_ROOT_DIR.join("drop.db");
|
||||
let games_base_dir = DATA_ROOT_DIR.join("games");
|
||||
let logs_root_dir = DATA_ROOT_DIR.join("logs");
|
||||
let cache_dir = DATA_ROOT_DIR.join("cache");
|
||||
let pfx_dir = DATA_ROOT_DIR.join("pfx");
|
||||
|
||||
debug!("creating data directory at {DATA_ROOT_DIR:?}");
|
||||
create_dir_all(DATA_ROOT_DIR.as_path()).unwrap();
|
||||
create_dir_all(&games_base_dir).unwrap();
|
||||
create_dir_all(&logs_root_dir).unwrap();
|
||||
create_dir_all(&cache_dir).unwrap();
|
||||
create_dir_all(&pfx_dir).unwrap();
|
||||
|
||||
let exists = fs::exists(db_path.clone()).unwrap();
|
||||
|
||||
if exists {
|
||||
match PathDatabase::load_from_path(db_path.clone()) {
|
||||
Ok(db) => db,
|
||||
Err(e) => handle_invalid_database(e, db_path, games_base_dir, cache_dir),
|
||||
}
|
||||
} else {
|
||||
let default = Database::new(games_base_dir, None);
|
||||
debug!(
|
||||
"Creating database at path {}",
|
||||
db_path.as_os_str().to_str().unwrap()
|
||||
);
|
||||
PathDatabase::create_at_path(db_path, default).expect("Database could not be created")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Make the error relelvant rather than just assume that it's a Deserialize error
|
||||
fn handle_invalid_database(
|
||||
_e: RustbreakError,
|
||||
db_path: PathBuf,
|
||||
games_base_dir: PathBuf,
|
||||
cache_dir: PathBuf,
|
||||
) -> rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer> {
|
||||
warn!("{_e}");
|
||||
let new_path = {
|
||||
let time = Utc::now().timestamp();
|
||||
let mut base = db_path.clone();
|
||||
base.set_file_name(format!("drop.db.backup-{time}"));
|
||||
base
|
||||
};
|
||||
info!("old database stored at: {}", new_path.to_string_lossy());
|
||||
fs::rename(&db_path, &new_path).unwrap();
|
||||
|
||||
let db = Database::new(
|
||||
games_base_dir.into_os_string().into_string().unwrap(),
|
||||
Some(new_path),
|
||||
);
|
||||
|
||||
PathDatabase::create_at_path(db_path, db).expect("Database could not be created")
|
||||
}
|
||||
|
||||
// To automatically save the database upon drop
|
||||
pub struct DBRead<'a>(pub(crate) RwLockReadGuard<'a, Database>);
|
||||
pub struct DBWrite<'a>(pub(crate) ManuallyDrop<RwLockWriteGuard<'a, Database>>);
|
||||
impl<'a> Deref for DBWrite<'a> {
|
||||
type Target = Database;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
impl<'a> DerefMut for DBWrite<'a> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
impl<'a> Deref for DBRead<'a> {
|
||||
type Target = Database;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
impl Drop for DBWrite<'_> {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
ManuallyDrop::drop(&mut self.0);
|
||||
}
|
||||
|
||||
match DB.save() {
|
||||
Ok(()) => {}
|
||||
Err(e) => {
|
||||
error!("database failed to save with error {e}");
|
||||
panic!("database failed to save with error {e}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4,13 +4,12 @@ use std::{
|
||||
|
||||
use log::error;
|
||||
use native_model::{Decode, Encode};
|
||||
use utils::lock;
|
||||
|
||||
pub type DropData = v1::DropData;
|
||||
|
||||
pub static DROP_DATA_PATH: &str = ".dropdata";
|
||||
|
||||
pub mod v1 {
|
||||
mod v1 {
|
||||
use std::{collections::HashMap, path::PathBuf, sync::Mutex};
|
||||
|
||||
use native_model::native_model;
|
||||
@ -50,12 +49,7 @@ impl DropData {
|
||||
let mut s = Vec::new();
|
||||
file.read_to_end(&mut s)?;
|
||||
|
||||
native_model::rmp_serde_1_3::RmpSerde::decode(s).map_err(|e| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("Failed to decode drop data: {e}"),
|
||||
)
|
||||
})
|
||||
Ok(native_model::rmp_serde_1_3::RmpSerde::decode(s).unwrap())
|
||||
}
|
||||
pub fn write(&self) {
|
||||
let manifest_raw = match native_model::rmp_serde_1_3::RmpSerde::encode(&self) {
|
||||
@ -77,12 +71,12 @@ impl DropData {
|
||||
}
|
||||
}
|
||||
pub fn set_contexts(&self, completed_contexts: &[(String, bool)]) {
|
||||
*lock!(self.contexts) = completed_contexts.iter().map(|s| (s.0.clone(), s.1)).collect();
|
||||
*self.contexts.lock().unwrap() = completed_contexts.iter().map(|s| (s.0.clone(), s.1)).collect();
|
||||
}
|
||||
pub fn set_context(&self, context: String, state: bool) {
|
||||
lock!(self.contexts).entry(context).insert_entry(state);
|
||||
self.contexts.lock().unwrap().entry(context).insert_entry(state);
|
||||
}
|
||||
pub fn get_contexts(&self) -> HashMap<String, bool> {
|
||||
lock!(self.contexts).clone()
|
||||
self.contexts.lock().unwrap().clone()
|
||||
}
|
||||
}
|
||||
34
src-tauri/drop-database/src/lib.rs
Normal file
34
src-tauri/drop-database/src/lib.rs
Normal file
@ -0,0 +1,34 @@
|
||||
use std::{mem::ManuallyDrop, sync::LazyLock};
|
||||
|
||||
use log::error;
|
||||
|
||||
use crate::db::{DBRead, DBWrite, DatabaseImpls, DatabaseInterface};
|
||||
|
||||
pub mod db;
|
||||
pub mod debug;
|
||||
pub mod models;
|
||||
pub mod process;
|
||||
pub mod runtime_models;
|
||||
pub mod drop_data;
|
||||
|
||||
pub static DB: LazyLock<DatabaseInterface> = LazyLock::new(DatabaseInterface::set_up_database);
|
||||
|
||||
pub fn borrow_db_checked<'a>() -> DBRead<'a> {
|
||||
match DB.borrow_data() {
|
||||
Ok(data) => DBRead(data),
|
||||
Err(e) => {
|
||||
error!("database borrow failed with error {e}");
|
||||
panic!("database borrow failed with error {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn borrow_db_mut_checked<'a>() -> DBWrite<'a> {
|
||||
match DB.borrow_data_mut() {
|
||||
Ok(data) => DBWrite(ManuallyDrop::new(data)),
|
||||
Err(e) => {
|
||||
error!("database borrow mut failed with error {e}");
|
||||
panic!("database borrow mut failed with error {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4,11 +4,11 @@ pub mod data {
|
||||
use native_model::native_model;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// NOTE: Within each version, you should NEVER use these types.
|
||||
// NOTE: Within each version, you should NEVER use these types.
|
||||
// Declare it using the actual version that it is from, i.e. v1::Settings rather than just Settings from here
|
||||
|
||||
pub type GameVersion = v1::GameVersion;
|
||||
pub type Database = v3::Database;
|
||||
pub type Database = v4::Database;
|
||||
pub type Settings = v1::Settings;
|
||||
pub type DatabaseAuth = v1::DatabaseAuth;
|
||||
|
||||
@ -19,7 +19,7 @@ pub mod data {
|
||||
*/
|
||||
pub type DownloadableMetadata = v1::DownloadableMetadata;
|
||||
pub type DownloadType = v1::DownloadType;
|
||||
pub type DatabaseApplications = v2::DatabaseApplications;
|
||||
pub type DatabaseApplications = v4::DatabaseApplications;
|
||||
// pub type DatabaseCompatInfo = v2::DatabaseCompatInfo;
|
||||
|
||||
use std::collections::HashMap;
|
||||
@ -40,7 +40,7 @@ pub mod data {
|
||||
use serde_with::serde_as;
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
use crate::platform::Platform;
|
||||
use crate::process::Platform;
|
||||
|
||||
use super::{Deserialize, Serialize, native_model};
|
||||
|
||||
@ -48,7 +48,7 @@ pub mod data {
|
||||
"{}".to_owned()
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[native_model(id = 2, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)]
|
||||
pub struct GameVersion {
|
||||
@ -191,9 +191,9 @@ pub mod data {
|
||||
|
||||
use serde_with::serde_as;
|
||||
|
||||
use super::{
|
||||
Deserialize, Serialize, native_model, v1,
|
||||
};
|
||||
use crate::runtime_models::Game;
|
||||
|
||||
use super::{Deserialize, Serialize, native_model, v1};
|
||||
|
||||
#[native_model(id = 1, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from = v1::Database)]
|
||||
#[derive(Serialize, Deserialize, Clone, Default)]
|
||||
@ -275,14 +275,13 @@ pub mod data {
|
||||
#[native_model(id = 3, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from=v1::DatabaseApplications)]
|
||||
pub struct DatabaseApplications {
|
||||
pub install_dirs: Vec<PathBuf>,
|
||||
// Guaranteed to exist if the game also exists in the app state map
|
||||
pub game_statuses: HashMap<String, GameDownloadStatus>,
|
||||
|
||||
pub game_versions: HashMap<String, HashMap<String, v1::GameVersion>>,
|
||||
pub installed_game_version: HashMap<String, v1::DownloadableMetadata>,
|
||||
|
||||
#[serde(skip)]
|
||||
pub transient_statuses: HashMap<v1::DownloadableMetadata, v1::ApplicationTransientStatus>,
|
||||
pub transient_statuses:
|
||||
HashMap<v1::DownloadableMetadata, v1::ApplicationTransientStatus>,
|
||||
}
|
||||
impl From<v1::DatabaseApplications> for DatabaseApplications {
|
||||
fn from(value: v1::DatabaseApplications) -> Self {
|
||||
@ -303,10 +302,7 @@ pub mod data {
|
||||
mod v3 {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::{
|
||||
Deserialize, Serialize,
|
||||
native_model, v2, v1,
|
||||
};
|
||||
use super::{Deserialize, Serialize, native_model, v1, v2};
|
||||
#[native_model(id = 1, version = 3, with = native_model::rmp_serde_1_3::RmpSerde, from = v2::Database)]
|
||||
#[derive(Serialize, Deserialize, Clone, Default)]
|
||||
pub struct Database {
|
||||
@ -336,28 +332,73 @@ pub mod data {
|
||||
}
|
||||
}
|
||||
|
||||
mod v4 {
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
use drop_library::libraries::LibraryProviderIdentifier;
|
||||
use drop_native_library::impls::DropNativeLibraryProvider;
|
||||
use serde_with::serde_as;
|
||||
use crate::models::data::v3;
|
||||
use super::{Deserialize, Serialize, native_model, v1, v2};
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub enum Library {
|
||||
NativeLibrary(DropNativeLibraryProvider),
|
||||
}
|
||||
|
||||
#[serde_as]
|
||||
#[derive(Serialize, Deserialize, Default, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[native_model(id = 3, version = 4, with = native_model::rmp_serde_1_3::RmpSerde, from=v2::DatabaseApplications)]
|
||||
pub struct DatabaseApplications {
|
||||
pub install_dirs: Vec<PathBuf>,
|
||||
pub libraries: HashMap<LibraryProviderIdentifier, Library>,
|
||||
|
||||
#[serde(skip)]
|
||||
pub transient_statuses:
|
||||
HashMap<v1::DownloadableMetadata, v1::ApplicationTransientStatus>,
|
||||
}
|
||||
|
||||
impl From<v2::DatabaseApplications> for DatabaseApplications {
|
||||
fn from(value: v2::DatabaseApplications) -> Self {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
#[native_model(id = 1, version = 4, with = native_model::rmp_serde_1_3::RmpSerde, from = v3::Database)]
|
||||
#[derive(Serialize, Deserialize, Default, Clone)]
|
||||
pub struct Database {
|
||||
#[serde(default)]
|
||||
pub settings: v1::Settings,
|
||||
pub drop_applications: DatabaseApplications,
|
||||
#[serde(skip)]
|
||||
pub prev_database: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl From<v3::Database> for Database {
|
||||
fn from(value: v3::Database) -> Self {
|
||||
Database {
|
||||
settings: value.settings,
|
||||
drop_applications: value.applications.into(),
|
||||
prev_database: value.prev_database,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub fn new<T: Into<PathBuf>>(
|
||||
games_base_dir: T,
|
||||
prev_database: Option<PathBuf>,
|
||||
cache_dir: PathBuf,
|
||||
) -> Self {
|
||||
Self {
|
||||
applications: DatabaseApplications {
|
||||
drop_applications: DatabaseApplications {
|
||||
install_dirs: vec![games_base_dir.into()],
|
||||
game_statuses: HashMap::new(),
|
||||
game_versions: HashMap::new(),
|
||||
installed_game_version: HashMap::new(),
|
||||
libraries: HashMap::new(),
|
||||
transient_statuses: HashMap::new(),
|
||||
},
|
||||
prev_database,
|
||||
base_url: String::new(),
|
||||
auth: None,
|
||||
settings: Settings::default(),
|
||||
cache_dir,
|
||||
compat_info: None,
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@ -1,6 +1,5 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
|
||||
#[derive(Eq, Hash, PartialEq, Serialize, Deserialize, Clone, Copy, Debug)]
|
||||
pub enum Platform {
|
||||
Windows,
|
||||
@ -41,7 +40,7 @@ impl From<whoami::Platform> for Platform {
|
||||
whoami::Platform::Windows => Platform::Windows,
|
||||
whoami::Platform::Linux => Platform::Linux,
|
||||
whoami::Platform::MacOS => Platform::MacOs,
|
||||
platform => unimplemented!("Playform {} is not supported", platform),
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
28
src-tauri/drop-database/src/runtime_models.rs
Normal file
28
src-tauri/drop-database/src/runtime_models.rs
Normal file
@ -0,0 +1,28 @@
|
||||
use bitcode::{Decode, Encode};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, Default, Encode, Decode)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Game {
|
||||
pub id: String,
|
||||
m_name: String,
|
||||
m_short_description: String,
|
||||
m_description: String,
|
||||
// mDevelopers
|
||||
// mPublishers
|
||||
m_icon_object_id: String,
|
||||
m_banner_object_id: String,
|
||||
m_cover_object_id: String,
|
||||
m_image_library_object_ids: Vec<String>,
|
||||
m_image_carousel_object_ids: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Encode, Decode)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct User {
|
||||
id: String,
|
||||
username: String,
|
||||
admin: bool,
|
||||
display_name: String,
|
||||
profile_picture_object_id: String,
|
||||
}
|
||||
16
src-tauri/drop-downloads/Cargo.toml
Normal file
16
src-tauri/drop-downloads/Cargo.toml
Normal file
@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "drop-downloads"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
atomic-instant-full = "0.1.0"
|
||||
drop-database = { path = "../drop-database" }
|
||||
drop-errors = { path = "../drop-errors" }
|
||||
# can't depend, cycle
|
||||
# drop-native-library = { path = "../drop-native-library" }
|
||||
log = "0.4.22"
|
||||
parking_lot = "0.12.4"
|
||||
serde = "1.0.219"
|
||||
tauri = { version = "2.7.0" }
|
||||
throttle_my_fn = "0.2.6"
|
||||
@ -7,13 +7,14 @@ use std::{
|
||||
thread::{JoinHandle, spawn},
|
||||
};
|
||||
|
||||
use database::DownloadableMetadata;
|
||||
use drop_database::models::data::DownloadableMetadata;
|
||||
use drop_errors::application_download_error::ApplicationDownloadError;
|
||||
use log::{debug, error, info, warn};
|
||||
use tauri::{AppHandle, Emitter};
|
||||
use utils::{app_emit, lock, send};
|
||||
|
||||
|
||||
use crate::{download_manager_frontend::DownloadStatus, error::ApplicationDownloadError, frontend_updates::{QueueUpdateEvent, QueueUpdateEventQueueData, StatsUpdateEvent}};
|
||||
use crate::{
|
||||
download_manager_frontend::DownloadStatus, events::{QueueUpdateEvent, QueueUpdateEventQueueData, StatsUpdateEvent}
|
||||
};
|
||||
|
||||
use super::{
|
||||
download_manager_frontend::{DownloadManager, DownloadManagerSignal, DownloadManagerStatus},
|
||||
@ -28,43 +29,6 @@ use super::{
|
||||
pub type DownloadAgent = Arc<Box<dyn Downloadable + Send + Sync>>;
|
||||
pub type CurrentProgressObject = Arc<Mutex<Option<Arc<ProgressObject>>>>;
|
||||
|
||||
/*
|
||||
|
||||
Welcome to the download manager, the most overengineered, glorious piece of bullshit.
|
||||
|
||||
The download manager takes a queue of ids and their associated
|
||||
DownloadAgents, and then, one-by-one, executes them. It provides an interface
|
||||
to interact with the currently downloading agent, and manage the queue.
|
||||
|
||||
When the DownloadManager is initialised, it is designed to provide a reference
|
||||
which can be used to provide some instructions (the DownloadManagerInterface),
|
||||
but other than that, it runs without any sort of interruptions.
|
||||
|
||||
It does this by opening up two data structures. Primarily is the command_receiver,
|
||||
and mpsc (multi-channel-single-producer) which allows commands to be sent from
|
||||
the Interface, and queued up for the Manager to process.
|
||||
|
||||
These have been mapped in the DownloadManagerSignal docs.
|
||||
|
||||
The other way to interact with the DownloadManager is via the donwload_queue,
|
||||
which is just a collection of ids which may be rearranged to suit
|
||||
whichever download queue order is required.
|
||||
|
||||
+----------------------------------------------------------------------------+
|
||||
| DO NOT ATTEMPT TO ADD OR REMOVE FROM THE QUEUE WITHOUT USING SIGNALS!! |
|
||||
| THIS WILL CAUSE A DESYNC BETWEEN THE DOWNLOAD AGENT REGISTRY AND THE QUEUE |
|
||||
| WHICH HAS NOT BEEN ACCOUNTED FOR |
|
||||
+----------------------------------------------------------------------------+
|
||||
|
||||
This download queue does not actually own any of the DownloadAgents. It is
|
||||
simply an id-based reference system. The actual Agents are stored in the
|
||||
download_agent_registry HashMap, as ordering is no issue here. This is why
|
||||
appending or removing from the download_queue must be done via signals.
|
||||
|
||||
Behold, my madness - quexeky
|
||||
|
||||
*/
|
||||
|
||||
pub struct DownloadManagerBuilder {
|
||||
download_agent_registry: HashMap<DownloadableMetadata, DownloadAgent>,
|
||||
download_queue: Queue,
|
||||
@ -103,7 +67,7 @@ impl DownloadManagerBuilder {
|
||||
}
|
||||
|
||||
fn set_status(&self, status: DownloadManagerStatus) {
|
||||
*lock!(self.status) = status;
|
||||
*self.status.lock().unwrap() = status;
|
||||
}
|
||||
|
||||
fn remove_and_cleanup_front_download(&mut self, meta: &DownloadableMetadata) -> DownloadAgent {
|
||||
@ -117,9 +81,9 @@ impl DownloadManagerBuilder {
|
||||
// Make sure the download thread is terminated
|
||||
fn cleanup_current_download(&mut self) {
|
||||
self.active_control_flag = None;
|
||||
*lock!(self.progress) = None;
|
||||
*self.progress.lock().unwrap() = None;
|
||||
|
||||
let mut download_thread_lock = lock!(self.current_download_thread);
|
||||
let mut download_thread_lock = self.current_download_thread.lock().unwrap();
|
||||
|
||||
if let Some(unfinished_thread) = download_thread_lock.take()
|
||||
&& !unfinished_thread.is_finished()
|
||||
@ -135,7 +99,7 @@ impl DownloadManagerBuilder {
|
||||
current_flag.set(DownloadThreadControlFlag::Stop);
|
||||
}
|
||||
|
||||
let mut download_thread_lock = lock!(self.current_download_thread);
|
||||
let mut download_thread_lock = self.current_download_thread.lock().unwrap();
|
||||
if let Some(current_download_thread) = download_thread_lock.take() {
|
||||
return current_download_thread.join().is_ok();
|
||||
};
|
||||
@ -197,7 +161,9 @@ impl DownloadManagerBuilder {
|
||||
self.download_queue.append(meta.clone());
|
||||
self.download_agent_registry.insert(meta, download_agent);
|
||||
|
||||
send!(self.sender, DownloadManagerSignal::UpdateUIQueue);
|
||||
self.sender
|
||||
.send(DownloadManagerSignal::UpdateUIQueue)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn manage_go_signal(&mut self) {
|
||||
@ -243,7 +209,7 @@ impl DownloadManagerBuilder {
|
||||
|
||||
let sender = self.sender.clone();
|
||||
|
||||
let mut download_thread_lock = lock!(self.current_download_thread);
|
||||
let mut download_thread_lock = self.current_download_thread.lock().unwrap();
|
||||
let app_handle = self.app_handle.clone();
|
||||
|
||||
*download_thread_lock = Some(spawn(move || {
|
||||
@ -254,7 +220,7 @@ impl DownloadManagerBuilder {
|
||||
Err(e) => {
|
||||
error!("download {:?} has error {}", download_agent.metadata(), &e);
|
||||
download_agent.on_error(&app_handle, &e);
|
||||
send!(sender, DownloadManagerSignal::Error(e));
|
||||
sender.send(DownloadManagerSignal::Error(e)).unwrap();
|
||||
return;
|
||||
}
|
||||
};
|
||||
@ -278,7 +244,7 @@ impl DownloadManagerBuilder {
|
||||
&e
|
||||
);
|
||||
download_agent.on_error(&app_handle, &e);
|
||||
send!(sender, DownloadManagerSignal::Error(e));
|
||||
sender.send(DownloadManagerSignal::Error(e)).unwrap();
|
||||
return;
|
||||
}
|
||||
};
|
||||
@ -289,8 +255,10 @@ impl DownloadManagerBuilder {
|
||||
|
||||
if validate_result {
|
||||
download_agent.on_complete(&app_handle);
|
||||
send!(sender, DownloadManagerSignal::Completed(download_agent.metadata()));
|
||||
send!(sender, DownloadManagerSignal::UpdateUIQueue);
|
||||
sender
|
||||
.send(DownloadManagerSignal::Completed(download_agent.metadata()))
|
||||
.unwrap();
|
||||
sender.send(DownloadManagerSignal::UpdateUIQueue).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -317,7 +285,7 @@ impl DownloadManagerBuilder {
|
||||
}
|
||||
|
||||
self.push_ui_queue_update();
|
||||
send!(self.sender, DownloadManagerSignal::Go);
|
||||
self.sender.send(DownloadManagerSignal::Go).unwrap();
|
||||
}
|
||||
fn manage_error_signal(&mut self, error: ApplicationDownloadError) {
|
||||
debug!("got signal Error");
|
||||
@ -355,7 +323,7 @@ impl DownloadManagerBuilder {
|
||||
let index = self.download_queue.get_by_meta(meta);
|
||||
if let Some(index) = index {
|
||||
download_agent.on_cancelled(&self.app_handle);
|
||||
let _ = self.download_queue.edit().remove(index);
|
||||
let _ = self.download_queue.edit().remove(index).unwrap();
|
||||
let removed = self.download_agent_registry.remove(meta);
|
||||
debug!(
|
||||
"removed {:?} from queue {:?}",
|
||||
@ -370,7 +338,7 @@ impl DownloadManagerBuilder {
|
||||
fn push_ui_stats_update(&self, kbs: usize, time: usize) {
|
||||
let event_data = StatsUpdateEvent { speed: kbs, time };
|
||||
|
||||
app_emit!(self.app_handle, "update_stats", event_data);
|
||||
self.app_handle.emit("update_stats", event_data).unwrap();
|
||||
}
|
||||
fn push_ui_queue_update(&self) {
|
||||
let queue = &self.download_queue.read();
|
||||
@ -389,6 +357,6 @@ impl DownloadManagerBuilder {
|
||||
.collect();
|
||||
|
||||
let event_data = QueueUpdateEvent { queue: queue_objs };
|
||||
app_emit!(self.app_handle, "update_queue", event_data);
|
||||
self.app_handle.emit("update_queue", event_data).unwrap();
|
||||
}
|
||||
}
|
||||
@ -3,19 +3,16 @@ use std::{
|
||||
collections::VecDeque,
|
||||
fmt::Debug,
|
||||
sync::{
|
||||
Mutex, MutexGuard,
|
||||
mpsc::{SendError, Sender},
|
||||
Mutex, MutexGuard,
|
||||
},
|
||||
thread::JoinHandle,
|
||||
};
|
||||
|
||||
use database::DownloadableMetadata;
|
||||
use drop_database::models::data::DownloadableMetadata;
|
||||
use drop_errors::application_download_error::ApplicationDownloadError;
|
||||
use log::{debug, info};
|
||||
use serde::Serialize;
|
||||
use utils::{lock, send};
|
||||
|
||||
|
||||
use crate::error::ApplicationDownloadError;
|
||||
|
||||
use super::{
|
||||
download_manager_builder::{CurrentProgressObject, DownloadAgent},
|
||||
@ -119,18 +116,22 @@ impl DownloadManager {
|
||||
self.download_queue.read()
|
||||
}
|
||||
pub fn get_current_download_progress(&self) -> Option<f64> {
|
||||
let progress_object = (*lock!(self.progress)).clone()?;
|
||||
let progress_object = (*self.progress.lock().unwrap()).clone()?;
|
||||
Some(progress_object.get_progress())
|
||||
}
|
||||
pub fn rearrange_string(&self, meta: &DownloadableMetadata, new_index: usize) {
|
||||
let mut queue = self.edit();
|
||||
let current_index = get_index_from_id(&mut queue, meta).expect("Failed to get meta index from id");
|
||||
let to_move = queue.remove(current_index).expect("Failed to remove meta at index from queue");
|
||||
let current_index = get_index_from_id(&mut queue, meta).unwrap();
|
||||
let to_move = queue.remove(current_index).unwrap();
|
||||
queue.insert(new_index, to_move);
|
||||
send!(self.command_sender, DownloadManagerSignal::UpdateUIQueue);
|
||||
self.command_sender
|
||||
.send(DownloadManagerSignal::UpdateUIQueue)
|
||||
.unwrap();
|
||||
}
|
||||
pub fn cancel(&self, meta: DownloadableMetadata) {
|
||||
send!(self.command_sender, DownloadManagerSignal::Cancel(meta));
|
||||
self.command_sender
|
||||
.send(DownloadManagerSignal::Cancel(meta))
|
||||
.unwrap();
|
||||
}
|
||||
pub fn rearrange(&self, current_index: usize, new_index: usize) {
|
||||
if current_index == new_index {
|
||||
@ -139,31 +140,39 @@ impl DownloadManager {
|
||||
|
||||
let needs_pause = current_index == 0 || new_index == 0;
|
||||
if needs_pause {
|
||||
send!(self.command_sender, DownloadManagerSignal::Stop);
|
||||
self.command_sender
|
||||
.send(DownloadManagerSignal::Stop)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
debug!("moving download at index {current_index} to index {new_index}");
|
||||
|
||||
let mut queue = self.edit();
|
||||
let to_move = queue.remove(current_index).expect("Failed to get");
|
||||
let to_move = queue.remove(current_index).unwrap();
|
||||
queue.insert(new_index, to_move);
|
||||
drop(queue);
|
||||
|
||||
if needs_pause {
|
||||
send!(self.command_sender, DownloadManagerSignal::Go);
|
||||
self.command_sender.send(DownloadManagerSignal::Go).unwrap();
|
||||
}
|
||||
send!(self.command_sender, DownloadManagerSignal::UpdateUIQueue);
|
||||
send!(self.command_sender, DownloadManagerSignal::Go);
|
||||
self.command_sender
|
||||
.send(DownloadManagerSignal::UpdateUIQueue)
|
||||
.unwrap();
|
||||
self.command_sender.send(DownloadManagerSignal::Go).unwrap();
|
||||
}
|
||||
pub fn pause_downloads(&self) {
|
||||
send!(self.command_sender, DownloadManagerSignal::Stop);
|
||||
self.command_sender
|
||||
.send(DownloadManagerSignal::Stop)
|
||||
.unwrap();
|
||||
}
|
||||
pub fn resume_downloads(&self) {
|
||||
send!(self.command_sender, DownloadManagerSignal::Go);
|
||||
self.command_sender.send(DownloadManagerSignal::Go).unwrap();
|
||||
}
|
||||
pub fn ensure_terminated(&self) -> Result<Result<(), ()>, Box<dyn Any + Send>> {
|
||||
send!(self.command_sender, DownloadManagerSignal::Finish);
|
||||
let terminator = lock!(self.terminator).take();
|
||||
self.command_sender
|
||||
.send(DownloadManagerSignal::Finish)
|
||||
.unwrap();
|
||||
let terminator = self.terminator.lock().unwrap().take();
|
||||
terminator.unwrap().join()
|
||||
}
|
||||
pub fn get_sender(&self) -> Sender<DownloadManagerSignal> {
|
||||
@ -1,11 +1,9 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use database::DownloadableMetadata;
|
||||
use drop_database::models::data::DownloadableMetadata;
|
||||
use drop_errors::application_download_error::ApplicationDownloadError;
|
||||
use tauri::AppHandle;
|
||||
|
||||
|
||||
use crate::error::ApplicationDownloadError;
|
||||
|
||||
use super::{
|
||||
download_manager_frontend::DownloadStatus,
|
||||
util::{download_thread_control_flag::DownloadThreadControl, progress_object::ProgressObject},
|
||||
@ -1,4 +1,4 @@
|
||||
use database::DownloadableMetadata;
|
||||
use drop_database::models::data::DownloadableMetadata;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::download_manager_frontend::DownloadStatus;
|
||||
7
src-tauri/drop-downloads/src/lib.rs
Normal file
7
src-tauri/drop-downloads/src/lib.rs
Normal file
@ -0,0 +1,7 @@
|
||||
#![feature(duration_millis_float)]
|
||||
|
||||
pub mod download_manager_builder;
|
||||
pub mod download_manager_frontend;
|
||||
pub mod downloadable;
|
||||
pub mod events;
|
||||
pub mod util;
|
||||
@ -9,7 +9,6 @@ use std::{
|
||||
|
||||
use atomic_instant_full::AtomicInstant;
|
||||
use throttle_my_fn::throttle;
|
||||
use utils::{lock, send};
|
||||
|
||||
use crate::download_manager_frontend::DownloadManagerSignal;
|
||||
|
||||
@ -75,10 +74,12 @@ impl ProgressObject {
|
||||
}
|
||||
|
||||
pub fn set_time_now(&self) {
|
||||
*lock!(self.start) = Instant::now();
|
||||
*self.start.lock().unwrap() = Instant::now();
|
||||
}
|
||||
pub fn sum(&self) -> usize {
|
||||
lock!(self.progress_instances)
|
||||
self.progress_instances
|
||||
.lock()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|instance| instance.load(Ordering::Acquire))
|
||||
.sum()
|
||||
@ -87,25 +88,27 @@ impl ProgressObject {
|
||||
self.set_time_now();
|
||||
self.bytes_last_update.store(0, Ordering::Release);
|
||||
self.rolling.reset();
|
||||
lock!(self.progress_instances)
|
||||
self.progress_instances
|
||||
.lock()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.for_each(|x| x.store(0, Ordering::SeqCst));
|
||||
}
|
||||
pub fn get_max(&self) -> usize {
|
||||
*lock!(self.max)
|
||||
*self.max.lock().unwrap()
|
||||
}
|
||||
pub fn set_max(&self, new_max: usize) {
|
||||
*lock!(self.max) = new_max;
|
||||
*self.max.lock().unwrap() = new_max;
|
||||
}
|
||||
pub fn set_size(&self, length: usize) {
|
||||
*lock!(self.progress_instances) =
|
||||
*self.progress_instances.lock().unwrap() =
|
||||
(0..length).map(|_| Arc::new(AtomicUsize::new(0))).collect();
|
||||
}
|
||||
pub fn get_progress(&self) -> f64 {
|
||||
self.sum() as f64 / self.get_max() as f64
|
||||
}
|
||||
pub fn get(&self, index: usize) -> Arc<AtomicUsize> {
|
||||
lock!(self.progress_instances)[index].clone()
|
||||
self.progress_instances.lock().unwrap()[index].clone()
|
||||
}
|
||||
fn update_window(&self, kilobytes_per_second: usize) {
|
||||
self.rolling.update(kilobytes_per_second);
|
||||
@ -145,12 +148,18 @@ pub fn push_update(progress: &ProgressObject, bytes_remaining: usize) {
|
||||
}
|
||||
|
||||
fn update_ui(progress_object: &ProgressObject, kilobytes_per_second: usize, time_remaining: usize) {
|
||||
send!(
|
||||
progress_object.sender,
|
||||
DownloadManagerSignal::UpdateUIStats(kilobytes_per_second, time_remaining)
|
||||
);
|
||||
progress_object
|
||||
.sender
|
||||
.send(DownloadManagerSignal::UpdateUIStats(
|
||||
kilobytes_per_second,
|
||||
time_remaining,
|
||||
))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn update_queue(progress: &ProgressObject) {
|
||||
send!(progress.sender, DownloadManagerSignal::UpdateUIQueue)
|
||||
progress
|
||||
.sender
|
||||
.send(DownloadManagerSignal::UpdateUIQueue)
|
||||
.unwrap();
|
||||
}
|
||||
@ -3,8 +3,7 @@ use std::{
|
||||
sync::{Arc, Mutex, MutexGuard},
|
||||
};
|
||||
|
||||
use database::DownloadableMetadata;
|
||||
use utils::lock;
|
||||
use drop_database::models::data::DownloadableMetadata;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Queue {
|
||||
@ -25,10 +24,10 @@ impl Queue {
|
||||
}
|
||||
}
|
||||
pub fn read(&self) -> VecDeque<DownloadableMetadata> {
|
||||
lock!(self.inner).clone()
|
||||
self.inner.lock().unwrap().clone()
|
||||
}
|
||||
pub fn edit(&self) -> MutexGuard<'_, VecDeque<DownloadableMetadata>> {
|
||||
lock!(self.inner)
|
||||
self.inner.lock().unwrap()
|
||||
}
|
||||
pub fn pop_front(&self) -> Option<DownloadableMetadata> {
|
||||
self.edit().pop_front()
|
||||
14
src-tauri/drop-errors/Cargo.toml
Normal file
14
src-tauri/drop-errors/Cargo.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "drop-errors"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
http = "1.3.1"
|
||||
humansize = "2.1.3"
|
||||
reqwest = "0.12.23"
|
||||
reqwest-websocket = "0.5.1"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_with = "3.14.0"
|
||||
tauri-plugin-opener = "2.5.0"
|
||||
url = "2.5.7"
|
||||
@ -1,32 +1,12 @@
|
||||
use std::{fmt::{Display, Formatter}, io, sync::{mpsc::SendError, Arc}};
|
||||
use std::{
|
||||
fmt::{Display, Formatter},
|
||||
io, sync::Arc,
|
||||
};
|
||||
|
||||
use serde_with::SerializeDisplay;
|
||||
use humansize::{format_size, BINARY};
|
||||
|
||||
use remote::error::RemoteAccessError;
|
||||
use serde_with::SerializeDisplay;
|
||||
|
||||
#[derive(SerializeDisplay)]
|
||||
pub enum DownloadManagerError<T> {
|
||||
IOError(io::Error),
|
||||
SignalError(SendError<T>),
|
||||
}
|
||||
impl<T> Display for DownloadManagerError<T> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
DownloadManagerError::IOError(error) => write!(f, "{error}"),
|
||||
DownloadManagerError::SignalError(send_error) => write!(f, "{send_error}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<T> From<SendError<T>> for DownloadManagerError<T> {
|
||||
fn from(value: SendError<T>) -> Self {
|
||||
DownloadManagerError::SignalError(value)
|
||||
}
|
||||
}
|
||||
impl<T> From<io::Error> for DownloadManagerError<T> {
|
||||
fn from(value: io::Error) -> Self {
|
||||
DownloadManagerError::IOError(value)
|
||||
}
|
||||
}
|
||||
use super::remote_access_error::RemoteAccessError;
|
||||
|
||||
// TODO: Rename / separate from downloads
|
||||
#[derive(Debug, SerializeDisplay)]
|
||||
@ -38,7 +18,7 @@ pub enum ApplicationDownloadError {
|
||||
Checksum,
|
||||
Lock,
|
||||
IoError(Arc<io::Error>),
|
||||
DownloadError(RemoteAccessError),
|
||||
DownloadError,
|
||||
}
|
||||
|
||||
impl Display for ApplicationDownloadError {
|
||||
@ -60,16 +40,10 @@ impl Display for ApplicationDownloadError {
|
||||
write!(f, "checksum failed to validate for download")
|
||||
}
|
||||
ApplicationDownloadError::IoError(error) => write!(f, "io error: {error}"),
|
||||
ApplicationDownloadError::DownloadError(error) => write!(
|
||||
ApplicationDownloadError::DownloadError => write!(
|
||||
f,
|
||||
"Download failed with error {error:?}"
|
||||
"Download failed. See Download Manager status for specific error"
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<io::Error> for ApplicationDownloadError {
|
||||
fn from(value: io::Error) -> Self {
|
||||
ApplicationDownloadError::IoError(Arc::new(value))
|
||||
}
|
||||
}
|
||||
27
src-tauri/drop-errors/src/download_manager_error.rs
Normal file
27
src-tauri/drop-errors/src/download_manager_error.rs
Normal file
@ -0,0 +1,27 @@
|
||||
use std::{fmt::Display, io, sync::mpsc::SendError};
|
||||
|
||||
use serde_with::SerializeDisplay;
|
||||
|
||||
#[derive(SerializeDisplay)]
|
||||
pub enum DownloadManagerError<T> {
|
||||
IOError(io::Error),
|
||||
SignalError(SendError<T>),
|
||||
}
|
||||
impl<T> Display for DownloadManagerError<T> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
DownloadManagerError::IOError(error) => write!(f, "{error}"),
|
||||
DownloadManagerError::SignalError(send_error) => write!(f, "{send_error}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<T> From<SendError<T>> for DownloadManagerError<T> {
|
||||
fn from(value: SendError<T>) -> Self {
|
||||
DownloadManagerError::SignalError(value)
|
||||
}
|
||||
}
|
||||
impl<T> From<io::Error> for DownloadManagerError<T> {
|
||||
fn from(value: io::Error) -> Self {
|
||||
DownloadManagerError::IOError(value)
|
||||
}
|
||||
}
|
||||
10
src-tauri/drop-errors/src/drop_server_error.rs
Normal file
10
src-tauri/drop-errors/src/drop_server_error.rs
Normal file
@ -0,0 +1,10 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ServerError {
|
||||
pub status_code: usize,
|
||||
pub status_message: String,
|
||||
// pub message: String,
|
||||
// pub url: String,
|
||||
}
|
||||
6
src-tauri/drop-errors/src/lib.rs
Normal file
6
src-tauri/drop-errors/src/lib.rs
Normal file
@ -0,0 +1,6 @@
|
||||
pub mod application_download_error;
|
||||
pub mod download_manager_error;
|
||||
pub mod drop_server_error;
|
||||
pub mod library_error;
|
||||
pub mod process_error;
|
||||
pub mod remote_access_error;
|
||||
18
src-tauri/drop-errors/src/library_error.rs
Normal file
18
src-tauri/drop-errors/src/library_error.rs
Normal file
@ -0,0 +1,18 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use serde_with::SerializeDisplay;
|
||||
|
||||
#[derive(SerializeDisplay)]
|
||||
pub enum LibraryError {
|
||||
MetaNotFound(String),
|
||||
}
|
||||
impl Display for LibraryError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
LibraryError::MetaNotFound(id) => write!(
|
||||
f,
|
||||
"Could not locate any installed version of game ID {id} in the database"
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -11,9 +11,7 @@ pub enum ProcessError {
|
||||
IOError(Error),
|
||||
FormatError(String), // String errors supremacy
|
||||
InvalidPlatform,
|
||||
OpenerError(tauri_plugin_opener::Error),
|
||||
InvalidArguments(String),
|
||||
FailedLaunch(String),
|
||||
OpenerError(tauri_plugin_opener::Error)
|
||||
}
|
||||
|
||||
impl Display for ProcessError {
|
||||
@ -25,15 +23,9 @@ impl Display for ProcessError {
|
||||
ProcessError::InvalidVersion => "Invalid game version",
|
||||
ProcessError::IOError(error) => &error.to_string(),
|
||||
ProcessError::InvalidPlatform => "This game cannot be played on the current platform",
|
||||
ProcessError::FormatError(error) => &format!("Could not format template: {error:?}"),
|
||||
ProcessError::OpenerError(error) => &format!("Could not open directory: {error:?}"),
|
||||
ProcessError::InvalidArguments(arguments) => {
|
||||
&format!("Invalid arguments in command {arguments}")
|
||||
}
|
||||
ProcessError::FailedLaunch(game_id) => {
|
||||
&format!("Drop detected that the game {game_id} may have failed to launch properly")
|
||||
}
|
||||
};
|
||||
ProcessError::FormatError(e) => &format!("Failed to format template: {e}"),
|
||||
ProcessError::OpenerError(error) => &format!("Failed to open directory: {error}"),
|
||||
};
|
||||
write!(f, "{s}")
|
||||
}
|
||||
}
|
||||
@ -4,21 +4,11 @@ use std::{
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use http::{header::ToStrError, HeaderName, StatusCode};
|
||||
use http::StatusCode;
|
||||
use serde_with::SerializeDisplay;
|
||||
use url::ParseError;
|
||||
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DropServerError {
|
||||
pub status_code: usize,
|
||||
pub status_message: String,
|
||||
// pub message: String,
|
||||
// pub url: String,
|
||||
}
|
||||
|
||||
use super::drop_server_error::ServerError;
|
||||
|
||||
#[derive(Debug, SerializeDisplay)]
|
||||
pub enum RemoteAccessError {
|
||||
@ -28,7 +18,7 @@ pub enum RemoteAccessError {
|
||||
InvalidEndpoint,
|
||||
HandshakeFailed(String),
|
||||
GameNotFound(String),
|
||||
InvalidResponse(DropServerError),
|
||||
InvalidResponse(ServerError),
|
||||
UnparseableResponse(String),
|
||||
ManifestDownloadFailed(StatusCode, String),
|
||||
OutOfSync,
|
||||
@ -54,7 +44,8 @@ impl Display for RemoteAccessError {
|
||||
error
|
||||
.source()
|
||||
.map(std::string::ToString::to_string)
|
||||
.unwrap_or("Unknown error".to_string())
|
||||
.or_else(|| Some("Unknown error".to_string()))
|
||||
.unwrap()
|
||||
)
|
||||
}
|
||||
RemoteAccessError::FetchErrorWS(error) => write!(
|
||||
@ -63,8 +54,9 @@ impl Display for RemoteAccessError {
|
||||
error,
|
||||
error
|
||||
.source()
|
||||
.map(std::string::ToString::to_string)
|
||||
.unwrap_or("Unknown error".to_string())
|
||||
.map(|e| e.to_string())
|
||||
.or_else(|| Some("Unknown error".to_string()))
|
||||
.unwrap()
|
||||
),
|
||||
RemoteAccessError::ParsingError(parse_error) => {
|
||||
write!(f, "{parse_error}")
|
||||
@ -114,23 +106,3 @@ impl From<ParseError> for RemoteAccessError {
|
||||
}
|
||||
}
|
||||
impl std::error::Error for RemoteAccessError {}
|
||||
|
||||
#[derive(Debug, SerializeDisplay)]
|
||||
pub enum CacheError {
|
||||
HeaderNotFound(HeaderName),
|
||||
ParseError(ToStrError),
|
||||
Remote(RemoteAccessError),
|
||||
ConstructionError(http::Error)
|
||||
}
|
||||
|
||||
impl Display for CacheError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let s = match self {
|
||||
CacheError::HeaderNotFound(header_name) => format!("Could not find header {header_name} in cache"),
|
||||
CacheError::ParseError(to_str_error) => format!("Could not parse cache with error {to_str_error}"),
|
||||
CacheError::Remote(remote_access_error) => format!("Cache got remote access error: {remote_access_error}"),
|
||||
CacheError::ConstructionError(error) => format!("Could not construct cache body with error {error}"),
|
||||
};
|
||||
write!(f, "{s}")
|
||||
}
|
||||
}
|
||||
11
src-tauri/drop-library/Cargo.toml
Normal file
11
src-tauri/drop-library/Cargo.toml
Normal file
@ -0,0 +1,11 @@
|
||||
[package]
|
||||
name = "drop-library"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
drop-errors = { path = "../drop-errors" }
|
||||
http = "*"
|
||||
reqwest = { version = "*", default-features = false }
|
||||
serde = { version = "*", default-features = false, features = ["derive"] }
|
||||
tauri = "*"
|
||||
11
src-tauri/drop-library/src/errors.rs
Normal file
11
src-tauri/drop-library/src/errors.rs
Normal file
@ -0,0 +1,11 @@
|
||||
pub enum DropLibraryError {
|
||||
NetworkError(reqwest::Error),
|
||||
ServerError(drop_errors::drop_server_error::ServerError),
|
||||
Unconfigured,
|
||||
}
|
||||
|
||||
impl From<reqwest::Error> for DropLibraryError {
|
||||
fn from(value: reqwest::Error) -> Self {
|
||||
DropLibraryError::NetworkError(value)
|
||||
}
|
||||
}
|
||||
30
src-tauri/drop-library/src/game.rs
Normal file
30
src-tauri/drop-library/src/game.rs
Normal file
@ -0,0 +1,30 @@
|
||||
use crate::libraries::LibraryProviderIdentifier;
|
||||
|
||||
pub struct LibraryGamePreview {
|
||||
pub library: LibraryProviderIdentifier,
|
||||
pub internal_id: String,
|
||||
pub name: String,
|
||||
pub short_description: String,
|
||||
pub icon: String,
|
||||
}
|
||||
|
||||
pub struct LibraryGame {
|
||||
pub library: LibraryProviderIdentifier,
|
||||
pub internal_id: String,
|
||||
pub name: String,
|
||||
pub short_description: String,
|
||||
pub md_description: String,
|
||||
pub icon: String,
|
||||
}
|
||||
|
||||
impl From<LibraryGame> for LibraryGamePreview {
|
||||
fn from(value: LibraryGame) -> Self {
|
||||
LibraryGamePreview {
|
||||
library: value.library,
|
||||
internal_id: value.internal_id,
|
||||
name: value.name,
|
||||
short_description: value.short_description,
|
||||
icon: value.icon,
|
||||
}
|
||||
}
|
||||
}
|
||||
3
src-tauri/drop-library/src/lib.rs
Normal file
3
src-tauri/drop-library/src/lib.rs
Normal file
@ -0,0 +1,3 @@
|
||||
pub mod libraries;
|
||||
pub mod game;
|
||||
pub mod errors;
|
||||
76
src-tauri/drop-library/src/libraries.rs
Normal file
76
src-tauri/drop-library/src/libraries.rs
Normal file
@ -0,0 +1,76 @@
|
||||
use std::{
|
||||
fmt::Display,
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
};
|
||||
|
||||
use http::Request;
|
||||
use serde::{Deserialize, Serialize, de::DeserializeOwned};
|
||||
use tauri::UriSchemeResponder;
|
||||
|
||||
use crate::{
|
||||
errors::DropLibraryError,
|
||||
game::{LibraryGame, LibraryGamePreview},
|
||||
};
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct LibraryProviderIdentifier {
|
||||
internal_id: usize,
|
||||
name: String,
|
||||
}
|
||||
|
||||
impl PartialEq for LibraryProviderIdentifier {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.internal_id == other.internal_id
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for LibraryProviderIdentifier {}
|
||||
|
||||
impl Hash for LibraryProviderIdentifier {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.internal_id.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for LibraryProviderIdentifier {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.name)
|
||||
}
|
||||
}
|
||||
|
||||
impl LibraryProviderIdentifier {
|
||||
pub fn str_hash(&self) -> String {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
self.hash(&mut hasher);
|
||||
hasher.finish().to_string()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LibraryFetchConfig {
|
||||
pub hard_refresh: bool,
|
||||
}
|
||||
|
||||
pub trait DropLibraryProvider: Serialize + DeserializeOwned + Sized {
|
||||
fn build(identifier: LibraryProviderIdentifier) -> Self;
|
||||
fn id(&self) -> &LibraryProviderIdentifier;
|
||||
fn load_object(
|
||||
&self,
|
||||
request: Request<Vec<u8>>,
|
||||
responder: UriSchemeResponder,
|
||||
) -> impl Future<Output = Result<(), DropLibraryError>> + Send;
|
||||
|
||||
fn fetch_library(
|
||||
&self,
|
||||
config: &LibraryFetchConfig,
|
||||
) -> impl Future<Output = Result<Vec<LibraryGamePreview>, DropLibraryError>> + Send;
|
||||
fn fetch_game(
|
||||
&self,
|
||||
config: &LibraryFetchConfig,
|
||||
) -> impl Future<Output = Result<LibraryGame, DropLibraryError>> + Send;
|
||||
|
||||
|
||||
|
||||
fn owns_game(&self, id: &LibraryProviderIdentifier) -> bool {
|
||||
self.id().internal_id == id.internal_id
|
||||
}
|
||||
}
|
||||
14
src-tauri/drop-native-library/Cargo.toml
Normal file
14
src-tauri/drop-native-library/Cargo.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "drop-native-library"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
bitcode = "*"
|
||||
drop-errors = { path = "../drop-errors" }
|
||||
drop-library = { path = "../drop-library" }
|
||||
drop-remote = { path = "../drop-remote" }
|
||||
log = "*"
|
||||
serde = { version = "*", features = ["derive"] }
|
||||
tauri = "*"
|
||||
url = "*"
|
||||
@ -1,8 +1,7 @@
|
||||
use bitcode::{Decode, Encode};
|
||||
// use drop_database::runtime_models::Game;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::library::Game;
|
||||
|
||||
pub type Collections = Vec<Collection>;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Default, Encode, Decode)]
|
||||
11
src-tauri/drop-native-library/src/events.rs
Normal file
11
src-tauri/drop-native-library/src/events.rs
Normal file
@ -0,0 +1,11 @@
|
||||
use drop_database::models::data::{ApplicationTransientStatus, GameDownloadStatus, GameVersion};
|
||||
|
||||
#[derive(serde::Serialize, Clone)]
|
||||
pub struct GameUpdateEvent {
|
||||
pub game_id: String,
|
||||
pub status: (
|
||||
Option<GameDownloadStatus>,
|
||||
Option<ApplicationTransientStatus>,
|
||||
),
|
||||
pub version: Option<GameVersion>,
|
||||
}
|
||||
50
src-tauri/drop-native-library/src/impls.rs
Normal file
50
src-tauri/drop-native-library/src/impls.rs
Normal file
@ -0,0 +1,50 @@
|
||||
use drop_library::{
|
||||
errors::DropLibraryError, game::{LibraryGame, LibraryGamePreview}, libraries::{DropLibraryProvider, LibraryFetchConfig, LibraryProviderIdentifier}
|
||||
};
|
||||
use drop_remote::{fetch_object::fetch_object, DropRemoteContext};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct DropNativeLibraryProvider {
|
||||
identifier: LibraryProviderIdentifier,
|
||||
context: Option<DropRemoteContext>,
|
||||
}
|
||||
|
||||
impl DropNativeLibraryProvider {
|
||||
pub fn configure(&mut self, base_url: Url) {
|
||||
self.context = Some(DropRemoteContext::new(base_url));
|
||||
}
|
||||
}
|
||||
|
||||
impl DropLibraryProvider for DropNativeLibraryProvider {
|
||||
fn build(identifier: LibraryProviderIdentifier) -> Self {
|
||||
Self {
|
||||
identifier,
|
||||
context: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn id(&self) -> &LibraryProviderIdentifier {
|
||||
&self.identifier
|
||||
}
|
||||
|
||||
async fn load_object(&self, request: tauri::http::Request<Vec<u8>>, responder: tauri::UriSchemeResponder) -> Result<(), DropLibraryError> {
|
||||
let context = self.context.as_ref().ok_or(DropLibraryError::Unconfigured)?;
|
||||
fetch_object(context, request, responder).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn fetch_library(
|
||||
&self,
|
||||
config: &LibraryFetchConfig
|
||||
) -> Result<Vec<LibraryGamePreview>, DropLibraryError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn fetch_game(&self, config: &LibraryFetchConfig) -> Result<LibraryGame, DropLibraryError> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
5
src-tauri/drop-native-library/src/lib.rs
Normal file
5
src-tauri/drop-native-library/src/lib.rs
Normal file
@ -0,0 +1,5 @@
|
||||
//pub mod collections;
|
||||
//pub mod library;
|
||||
//pub mod state;
|
||||
//pub mod events;
|
||||
pub mod impls;
|
||||
493
src-tauri/drop-native-library/src/library.rs
Normal file
493
src-tauri/drop-native-library/src/library.rs
Normal file
@ -0,0 +1,493 @@
|
||||
use std::fs::remove_dir_all;
|
||||
use std::thread::spawn;
|
||||
|
||||
use drop_database::borrow_db_checked;
|
||||
use drop_database::borrow_db_mut_checked;
|
||||
use drop_database::models::data::ApplicationTransientStatus;
|
||||
use drop_database::models::data::Database;
|
||||
use drop_database::models::data::DownloadableMetadata;
|
||||
use drop_database::models::data::GameDownloadStatus;
|
||||
use drop_database::models::data::GameVersion;
|
||||
use drop_database::runtime_models::Game;
|
||||
use drop_errors::drop_server_error::ServerError;
|
||||
use drop_errors::library_error::LibraryError;
|
||||
use drop_errors::remote_access_error::RemoteAccessError;
|
||||
use drop_remote::DropRemoteContext;
|
||||
use drop_remote::auth::generate_authorization_header;
|
||||
use drop_remote::cache::cache_object;
|
||||
use drop_remote::cache::cache_object_db;
|
||||
use drop_remote::cache::get_cached_object;
|
||||
use drop_remote::cache::get_cached_object_db;
|
||||
use drop_remote::requests::generate_url;
|
||||
use drop_remote::utils::DROP_CLIENT_ASYNC;
|
||||
use drop_remote::utils::DROP_CLIENT_SYNC;
|
||||
use log::{debug, error, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::AppHandle;
|
||||
use tauri::Emitter as _;
|
||||
|
||||
use crate::events::GameUpdateEvent;
|
||||
use crate::state::GameStatusManager;
|
||||
use crate::state::GameStatusWithTransient;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct FetchGameStruct {
|
||||
game: Game,
|
||||
status: GameStatusWithTransient,
|
||||
version: Option<GameVersion>,
|
||||
}
|
||||
|
||||
pub async fn fetch_library_logic(
|
||||
context: &DropRemoteContext,
|
||||
hard_fresh: Option<bool>,
|
||||
) -> Result<Vec<Game>, RemoteAccessError> {
|
||||
let do_hard_refresh = hard_fresh.unwrap_or(false);
|
||||
if !do_hard_refresh && let Ok(library) = get_cached_object("library") {
|
||||
return Ok(library);
|
||||
}
|
||||
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let response = generate_url(context, &["/api/v1/client/user/library"], &[])?;
|
||||
let response = client
|
||||
.get(response)
|
||||
.header("Authorization", generate_authorization_header(context))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status() != 200 {
|
||||
let err = response.json().await.unwrap_or(ServerError {
|
||||
status_code: 500,
|
||||
status_message: "Invalid response from server.".to_owned(),
|
||||
});
|
||||
warn!("{err:?}");
|
||||
return Err(RemoteAccessError::InvalidResponse(err));
|
||||
}
|
||||
|
||||
let mut games: Vec<Game> = response.json().await?;
|
||||
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
|
||||
for game in &games {
|
||||
db_handle
|
||||
.applications
|
||||
.games
|
||||
.insert(game.id.clone(), game.clone());
|
||||
if !db_handle.applications.game_statuses.contains_key(&game.id) {
|
||||
db_handle
|
||||
.applications
|
||||
.game_statuses
|
||||
.insert(game.id.clone(), GameDownloadStatus::Remote {});
|
||||
}
|
||||
}
|
||||
|
||||
// Add games that are installed but no longer in library
|
||||
for meta in db_handle.applications.installed_game_version.values() {
|
||||
if games.iter().any(|e| e.id == meta.id) {
|
||||
continue;
|
||||
}
|
||||
// We should always have a cache of the object
|
||||
// Pass db_handle because otherwise we get a gridlock
|
||||
let game = match get_cached_object_db::<Game>(&meta.id.clone()) {
|
||||
Ok(game) => game,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"{} is installed, but encountered error fetching its error: {}.",
|
||||
meta.id, err
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
games.push(game);
|
||||
}
|
||||
|
||||
drop(db_handle);
|
||||
cache_object("library", &games)?;
|
||||
|
||||
Ok(games)
|
||||
}
|
||||
pub async fn fetch_library_logic_offline(
|
||||
_hard_refresh: Option<bool>,
|
||||
) -> Result<Vec<Game>, RemoteAccessError> {
|
||||
let mut games: Vec<Game> = get_cached_object("library")?;
|
||||
|
||||
let db_handle = borrow_db_checked();
|
||||
|
||||
games.retain(|game| {
|
||||
matches!(
|
||||
&db_handle
|
||||
.applications
|
||||
.game_statuses
|
||||
.get(&game.id)
|
||||
.unwrap_or(&GameDownloadStatus::Remote {}),
|
||||
GameDownloadStatus::Installed { .. } | GameDownloadStatus::SetupRequired { .. }
|
||||
)
|
||||
});
|
||||
|
||||
Ok(games)
|
||||
}
|
||||
pub async fn fetch_game_logic(
|
||||
context: &DropRemoteContext,
|
||||
id: String,
|
||||
) -> Result<FetchGameStruct, RemoteAccessError> {
|
||||
let version = {
|
||||
let db_lock = borrow_db_checked();
|
||||
|
||||
let metadata_option = db_lock.applications.installed_game_version.get(&id);
|
||||
let version = match metadata_option {
|
||||
None => None,
|
||||
Some(metadata) => db_lock
|
||||
.applications
|
||||
.game_versions
|
||||
.get(&metadata.id)
|
||||
.map(|v| v.get(metadata.version.as_ref().unwrap()).unwrap())
|
||||
.cloned(),
|
||||
};
|
||||
|
||||
let game = db_lock.applications.games.get(&id);
|
||||
if let Some(game) = game {
|
||||
let status = GameStatusManager::fetch_state(&id, &db_lock);
|
||||
|
||||
let data = FetchGameStruct {
|
||||
game: game.clone(),
|
||||
status,
|
||||
version,
|
||||
};
|
||||
|
||||
cache_object_db(&id, game, &db_lock)?;
|
||||
|
||||
return Ok(data);
|
||||
}
|
||||
|
||||
version
|
||||
};
|
||||
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let response = generate_url(context, &["/api/v1/client/game/", &id], &[])?;
|
||||
let response = client
|
||||
.get(response)
|
||||
.header("Authorization", generate_authorization_header(context))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status() == 404 {
|
||||
let offline_fetch = fetch_game_logic_offline(id.clone()).await;
|
||||
if let Ok(fetch_data) = offline_fetch {
|
||||
return Ok(fetch_data);
|
||||
}
|
||||
|
||||
return Err(RemoteAccessError::GameNotFound(id));
|
||||
}
|
||||
if response.status() != 200 {
|
||||
let err = response.json().await.unwrap();
|
||||
warn!("{err:?}");
|
||||
return Err(RemoteAccessError::InvalidResponse(err));
|
||||
}
|
||||
|
||||
let game: Game = response.json().await?;
|
||||
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
db_handle
|
||||
.applications
|
||||
.games
|
||||
.insert(id.clone(), game.clone());
|
||||
|
||||
db_handle
|
||||
.applications
|
||||
.game_statuses
|
||||
.entry(id.clone())
|
||||
.or_insert(GameDownloadStatus::Remote {});
|
||||
|
||||
let status = GameStatusManager::fetch_state(&id, &db_handle);
|
||||
|
||||
drop(db_handle);
|
||||
|
||||
let data = FetchGameStruct {
|
||||
game: game.clone(),
|
||||
status,
|
||||
version,
|
||||
};
|
||||
|
||||
cache_object(&id, &game)?;
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
pub async fn fetch_game_logic_offline(id: String) -> Result<FetchGameStruct, RemoteAccessError> {
|
||||
let db_handle = borrow_db_checked();
|
||||
let metadata_option = db_handle.applications.installed_game_version.get(&id);
|
||||
let version = match metadata_option {
|
||||
None => None,
|
||||
Some(metadata) => db_handle
|
||||
.applications
|
||||
.game_versions
|
||||
.get(&metadata.id)
|
||||
.map(|v| v.get(metadata.version.as_ref().unwrap()).unwrap())
|
||||
.cloned(),
|
||||
};
|
||||
|
||||
let status = GameStatusManager::fetch_state(&id, &db_handle);
|
||||
let game = get_cached_object::<Game>(&id)?;
|
||||
|
||||
drop(db_handle);
|
||||
|
||||
Ok(FetchGameStruct {
|
||||
game,
|
||||
status,
|
||||
version,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn fetch_game_version_options_logic(
|
||||
context: &DropRemoteContext,
|
||||
game_id: String,
|
||||
) -> Result<Vec<GameVersion>, RemoteAccessError> {
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
|
||||
let response = generate_url(
|
||||
context,
|
||||
&["/api/v1/client/game/versions"],
|
||||
&[("id", &game_id)],
|
||||
)?;
|
||||
let response = client
|
||||
.get(response)
|
||||
.header("Authorization", generate_authorization_header(context))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status() != 200 {
|
||||
let err = response.json().await.unwrap();
|
||||
warn!("{err:?}");
|
||||
return Err(RemoteAccessError::InvalidResponse(err));
|
||||
}
|
||||
|
||||
let data: Vec<GameVersion> = response.json().await?;
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by:
|
||||
* - on_cancel, when cancelled, for obvious reasons
|
||||
* - when downloading, so if drop unexpectedly quits, we can resume the download. hidden by the "Downloading..." transient state, though
|
||||
* - when scanning, to import the game
|
||||
*/
|
||||
pub fn set_partially_installed(
|
||||
meta: &DownloadableMetadata,
|
||||
install_dir: String,
|
||||
app_handle: Option<&AppHandle>,
|
||||
) {
|
||||
set_partially_installed_db(&mut borrow_db_mut_checked(), meta, install_dir, app_handle);
|
||||
}
|
||||
|
||||
pub fn set_partially_installed_db(
|
||||
db_lock: &mut Database,
|
||||
meta: &DownloadableMetadata,
|
||||
install_dir: String,
|
||||
app_handle: Option<&AppHandle>,
|
||||
) {
|
||||
db_lock.applications.transient_statuses.remove(meta);
|
||||
db_lock.applications.game_statuses.insert(
|
||||
meta.id.clone(),
|
||||
GameDownloadStatus::PartiallyInstalled {
|
||||
version_name: meta.version.as_ref().unwrap().clone(),
|
||||
install_dir,
|
||||
},
|
||||
);
|
||||
db_lock
|
||||
.applications
|
||||
.installed_game_version
|
||||
.insert(meta.id.clone(), meta.clone());
|
||||
|
||||
if let Some(app_handle) = app_handle {
|
||||
push_game_update(
|
||||
app_handle,
|
||||
&meta.id,
|
||||
None,
|
||||
GameStatusManager::fetch_state(&meta.id, db_lock),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn uninstall_game_logic(meta: DownloadableMetadata, app_handle: &AppHandle) {
|
||||
debug!("triggered uninstall for agent");
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
db_handle
|
||||
.applications
|
||||
.transient_statuses
|
||||
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
|
||||
|
||||
push_game_update(
|
||||
app_handle,
|
||||
&meta.id,
|
||||
None,
|
||||
GameStatusManager::fetch_state(&meta.id, &db_handle),
|
||||
);
|
||||
|
||||
let previous_state = db_handle.applications.game_statuses.get(&meta.id).cloned();
|
||||
if previous_state.is_none() {
|
||||
warn!("uninstall job doesn't have previous state, failing silently");
|
||||
return;
|
||||
}
|
||||
let previous_state = previous_state.unwrap();
|
||||
|
||||
if let Some((_, install_dir)) = match previous_state {
|
||||
GameDownloadStatus::Installed {
|
||||
version_name,
|
||||
install_dir,
|
||||
} => Some((version_name, install_dir)),
|
||||
GameDownloadStatus::SetupRequired {
|
||||
version_name,
|
||||
install_dir,
|
||||
} => Some((version_name, install_dir)),
|
||||
GameDownloadStatus::PartiallyInstalled {
|
||||
version_name,
|
||||
install_dir,
|
||||
} => Some((version_name, install_dir)),
|
||||
_ => None,
|
||||
} {
|
||||
db_handle
|
||||
.applications
|
||||
.transient_statuses
|
||||
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
|
||||
|
||||
drop(db_handle);
|
||||
|
||||
let app_handle = app_handle.clone();
|
||||
spawn(move || {
|
||||
if let Err(e) = remove_dir_all(install_dir) {
|
||||
error!("{e}");
|
||||
} else {
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
db_handle.applications.transient_statuses.remove(&meta);
|
||||
db_handle
|
||||
.applications
|
||||
.installed_game_version
|
||||
.remove(&meta.id);
|
||||
db_handle
|
||||
.applications
|
||||
.game_statuses
|
||||
.insert(meta.id.clone(), GameDownloadStatus::Remote {});
|
||||
let _ = db_handle.applications.transient_statuses.remove(&meta);
|
||||
|
||||
push_game_update(
|
||||
&app_handle,
|
||||
&meta.id,
|
||||
None,
|
||||
GameStatusManager::fetch_state(&meta.id, &db_handle),
|
||||
);
|
||||
|
||||
debug!("uninstalled game id {}", &meta.id);
|
||||
app_handle.emit("update_library", ()).unwrap();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
warn!("invalid previous state for uninstall, failing silently.");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_current_meta(game_id: &String) -> Option<DownloadableMetadata> {
|
||||
borrow_db_checked()
|
||||
.applications
|
||||
.installed_game_version
|
||||
.get(game_id)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
pub fn on_game_complete(
|
||||
context: &DropRemoteContext,
|
||||
meta: &DownloadableMetadata,
|
||||
install_dir: String,
|
||||
app_handle: &AppHandle,
|
||||
) -> Result<(), RemoteAccessError> {
|
||||
// Fetch game version information from remote
|
||||
if meta.version.is_none() {
|
||||
return Err(RemoteAccessError::GameNotFound(meta.id.clone()));
|
||||
}
|
||||
|
||||
let client = DROP_CLIENT_SYNC.clone();
|
||||
let response = generate_url(
|
||||
context,
|
||||
&["/api/v1/client/game/version"],
|
||||
&[
|
||||
("id", &meta.id),
|
||||
("version", meta.version.as_ref().unwrap()),
|
||||
],
|
||||
)?;
|
||||
let response = client
|
||||
.get(response)
|
||||
.header("Authorization", generate_authorization_header(context))
|
||||
.send()?;
|
||||
|
||||
let game_version: GameVersion = response.json()?;
|
||||
|
||||
let mut handle = borrow_db_mut_checked();
|
||||
handle
|
||||
.applications
|
||||
.game_versions
|
||||
.entry(meta.id.clone())
|
||||
.or_default()
|
||||
.insert(meta.version.clone().unwrap(), game_version.clone());
|
||||
handle
|
||||
.applications
|
||||
.installed_game_version
|
||||
.insert(meta.id.clone(), meta.clone());
|
||||
|
||||
drop(handle);
|
||||
|
||||
let status = if game_version.setup_command.is_empty() {
|
||||
GameDownloadStatus::Installed {
|
||||
version_name: meta.version.clone().unwrap(),
|
||||
install_dir,
|
||||
}
|
||||
} else {
|
||||
GameDownloadStatus::SetupRequired {
|
||||
version_name: meta.version.clone().unwrap(),
|
||||
install_dir,
|
||||
}
|
||||
};
|
||||
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
db_handle
|
||||
.applications
|
||||
.game_statuses
|
||||
.insert(meta.id.clone(), status.clone());
|
||||
drop(db_handle);
|
||||
|
||||
app_handle
|
||||
.emit(
|
||||
&format!("update_game/{}", meta.id),
|
||||
GameUpdateEvent {
|
||||
game_id: meta.id.clone(),
|
||||
status: (Some(status), None),
|
||||
version: Some(game_version),
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn push_game_update(
|
||||
app_handle: &AppHandle,
|
||||
game_id: &String,
|
||||
version: Option<GameVersion>,
|
||||
status: GameStatusWithTransient,
|
||||
) {
|
||||
if let Some(GameDownloadStatus::Installed { .. } | GameDownloadStatus::SetupRequired { .. }) =
|
||||
&status.0
|
||||
&& version.is_none()
|
||||
{
|
||||
panic!("pushed game for installed game that doesn't have version information");
|
||||
}
|
||||
|
||||
app_handle
|
||||
.emit(
|
||||
&format!("update_game/{game_id}"),
|
||||
GameUpdateEvent {
|
||||
game_id: game_id.clone(),
|
||||
status,
|
||||
version,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
@ -1,6 +1,4 @@
|
||||
use database::models::data::{
|
||||
ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus,
|
||||
};
|
||||
// use drop_database::models::data::{ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus};
|
||||
|
||||
pub type GameStatusWithTransient = (
|
||||
Option<GameDownloadStatus>,
|
||||
18
src-tauri/drop-process/Cargo.toml
Normal file
18
src-tauri/drop-process/Cargo.toml
Normal file
@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "drop-process"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
chrono = "0.4.42"
|
||||
drop-database = { path = "../drop-database" }
|
||||
drop-errors = { path = "../drop-errors" }
|
||||
drop-native-library = { path = "../drop-native-library" }
|
||||
dynfmt = { version = "0.1.5", features = ["curly"] }
|
||||
log = "0.4.28"
|
||||
page_size = "0.6.0"
|
||||
shared_child = "1.1.1"
|
||||
sysinfo = "0.37.0"
|
||||
tauri = "2.8.5"
|
||||
tauri-plugin-opener = "2.5.0"
|
||||
|
||||
4
src-tauri/drop-process/src/lib.rs
Normal file
4
src-tauri/drop-process/src/lib.rs
Normal file
@ -0,0 +1,4 @@
|
||||
mod format;
|
||||
mod process_handlers;
|
||||
pub mod process_manager;
|
||||
pub mod utils;
|
||||
@ -1,9 +1,15 @@
|
||||
use client::compat::{COMPAT_INFO, UMU_LAUNCHER_EXECUTABLE};
|
||||
use database::{platform::Platform, Database, DownloadableMetadata, GameVersion};
|
||||
use log::debug;
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
path::PathBuf,
|
||||
process::{Command, Stdio},
|
||||
sync::LazyLock,
|
||||
};
|
||||
|
||||
use drop_database::{models::data::{Database, DownloadableMetadata, GameVersion}, process::Platform};
|
||||
use log::{debug, info};
|
||||
|
||||
use crate::process_manager::ProcessHandler;
|
||||
|
||||
use crate::{error::ProcessError, process_manager::ProcessHandler};
|
||||
|
||||
pub struct NativeGameLauncher;
|
||||
impl ProcessHandler for NativeGameLauncher {
|
||||
@ -14,8 +20,8 @@ impl ProcessHandler for NativeGameLauncher {
|
||||
args: Vec<String>,
|
||||
_game_version: &GameVersion,
|
||||
_current_dir: &str,
|
||||
) -> Result<String, ProcessError> {
|
||||
Ok(format!("\"{}\" {}", launch_command, args.join(" ")))
|
||||
) -> String {
|
||||
format!("\"{}\" {}", launch_command, args.join(" "))
|
||||
}
|
||||
|
||||
fn valid_for_platform(&self, _db: &Database, _target: &Platform) -> bool {
|
||||
@ -23,6 +29,31 @@ impl ProcessHandler for NativeGameLauncher {
|
||||
}
|
||||
}
|
||||
|
||||
pub static UMU_LAUNCHER_EXECUTABLE: LazyLock<Option<PathBuf>> = LazyLock::new(|| {
|
||||
let x = get_umu_executable();
|
||||
info!("{:?}", &x);
|
||||
x
|
||||
});
|
||||
const UMU_BASE_LAUNCHER_EXECUTABLE: &str = "umu-run";
|
||||
const UMU_INSTALL_DIRS: [&str; 4] = ["/app/share", "/use/local/share", "/usr/share", "/opt"];
|
||||
|
||||
fn get_umu_executable() -> Option<PathBuf> {
|
||||
if check_executable_exists(UMU_BASE_LAUNCHER_EXECUTABLE) {
|
||||
return Some(PathBuf::from(UMU_BASE_LAUNCHER_EXECUTABLE));
|
||||
}
|
||||
|
||||
for dir in UMU_INSTALL_DIRS {
|
||||
let p = PathBuf::from(dir).join(UMU_BASE_LAUNCHER_EXECUTABLE);
|
||||
if check_executable_exists(&p) {
|
||||
return Some(p);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
fn check_executable_exists<P: AsRef<OsStr>>(exec: P) -> bool {
|
||||
let has_umu_installed = Command::new(exec).stdout(Stdio::null()).output();
|
||||
has_umu_installed.is_ok()
|
||||
}
|
||||
pub struct UMULauncher;
|
||||
impl ProcessHandler for UMULauncher {
|
||||
fn create_launch_process(
|
||||
@ -32,7 +63,7 @@ impl ProcessHandler for UMULauncher {
|
||||
args: Vec<String>,
|
||||
game_version: &GameVersion,
|
||||
_current_dir: &str,
|
||||
) -> Result<String, ProcessError> {
|
||||
) -> String {
|
||||
debug!("Game override: \"{:?}\"", &game_version.umu_id_override);
|
||||
let game_id = match &game_version.umu_id_override {
|
||||
Some(game_override) => {
|
||||
@ -44,19 +75,16 @@ impl ProcessHandler for UMULauncher {
|
||||
}
|
||||
None => game_version.game_id.clone(),
|
||||
};
|
||||
Ok(format!(
|
||||
format!(
|
||||
"GAMEID={game_id} {umu:?} \"{launch}\" {args}",
|
||||
umu = UMU_LAUNCHER_EXECUTABLE.as_ref().expect("Failed to get UMU_LAUNCHER_EXECUTABLE as ref"),
|
||||
umu = UMU_LAUNCHER_EXECUTABLE.as_ref().unwrap(),
|
||||
launch = launch_command,
|
||||
args = args.join(" ")
|
||||
))
|
||||
)
|
||||
}
|
||||
|
||||
fn valid_for_platform(&self, _db: &Database, _target: &Platform) -> bool {
|
||||
let Some(compat_info) = &*COMPAT_INFO else {
|
||||
return false;
|
||||
};
|
||||
compat_info.umu_installed
|
||||
UMU_LAUNCHER_EXECUTABLE.is_some()
|
||||
}
|
||||
}
|
||||
|
||||
@ -69,7 +97,7 @@ impl ProcessHandler for AsahiMuvmLauncher {
|
||||
args: Vec<String>,
|
||||
game_version: &GameVersion,
|
||||
current_dir: &str,
|
||||
) -> Result<String, ProcessError> {
|
||||
) -> String {
|
||||
let umu_launcher = UMULauncher {};
|
||||
let umu_string = umu_launcher.create_launch_process(
|
||||
meta,
|
||||
@ -77,18 +105,15 @@ impl ProcessHandler for AsahiMuvmLauncher {
|
||||
args,
|
||||
game_version,
|
||||
current_dir,
|
||||
)?;
|
||||
);
|
||||
let mut args_cmd = umu_string
|
||||
.split("umu-run")
|
||||
.collect::<Vec<&str>>()
|
||||
.into_iter();
|
||||
let args = args_cmd
|
||||
.next()
|
||||
.ok_or(ProcessError::InvalidArguments(umu_string.clone()))?
|
||||
.trim();
|
||||
let cmd = format!("umu-run{}", args_cmd.next().ok_or(ProcessError::InvalidArguments(umu_string.clone()))?);
|
||||
let args = args_cmd.next().unwrap().trim();
|
||||
let cmd = format!("umu-run{}", args_cmd.next().unwrap());
|
||||
|
||||
Ok(format!("{args} muvm -- {cmd}"))
|
||||
format!("{args} muvm -- {cmd}")
|
||||
}
|
||||
|
||||
#[allow(unreachable_code)]
|
||||
@ -105,10 +130,6 @@ impl ProcessHandler for AsahiMuvmLauncher {
|
||||
return false;
|
||||
}
|
||||
|
||||
let Some(compat_info) = &*COMPAT_INFO else {
|
||||
return false;
|
||||
};
|
||||
|
||||
compat_info.umu_installed
|
||||
UMU_LAUNCHER_EXECUTABLE.is_some()
|
||||
}
|
||||
}
|
||||
@ -1,31 +1,26 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::{OpenOptions, create_dir_all},
|
||||
io,
|
||||
io::{self},
|
||||
path::PathBuf,
|
||||
process::{Command, ExitStatus},
|
||||
str::FromStr,
|
||||
sync::Arc,
|
||||
sync::{Arc, Mutex},
|
||||
thread::spawn,
|
||||
time::{Duration, SystemTime},
|
||||
};
|
||||
|
||||
use database::{
|
||||
ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus,
|
||||
GameVersion, borrow_db_checked, borrow_db_mut_checked, db::DATA_ROOT_DIR, platform::Platform,
|
||||
};
|
||||
use drop_database::{borrow_db_checked, borrow_db_mut_checked, db::DATA_ROOT_DIR, models::data::{ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus, GameVersion}, process::Platform, DB};
|
||||
use drop_errors::process_error::ProcessError;
|
||||
use drop_native_library::{library::push_game_update, state::GameStatusManager};
|
||||
use dynfmt::Format;
|
||||
use dynfmt::SimpleCurlyFormat;
|
||||
use games::state::GameStatusManager;
|
||||
use log::{debug, info, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use shared_child::SharedChild;
|
||||
use tauri::{AppHandle, Emitter};
|
||||
use tauri_plugin_opener::OpenerExt;
|
||||
|
||||
use crate::{
|
||||
PROCESS_MANAGER,
|
||||
error::ProcessError,
|
||||
format::DropFormatArgs,
|
||||
process_handlers::{AsahiMuvmLauncher, NativeGameLauncher, UMULauncher},
|
||||
};
|
||||
use crate::{format::DropFormatArgs, process_handlers::{AsahiMuvmLauncher, NativeGameLauncher, UMULauncher}};
|
||||
|
||||
pub struct RunningProcess {
|
||||
handle: Arc<SharedChild>,
|
||||
@ -37,6 +32,7 @@ pub struct ProcessManager<'a> {
|
||||
current_platform: Platform,
|
||||
log_output_dir: PathBuf,
|
||||
processes: HashMap<String, RunningProcess>,
|
||||
app_handle: AppHandle,
|
||||
game_launchers: Vec<(
|
||||
(Platform, Platform),
|
||||
&'a (dyn ProcessHandler + Sync + Send + 'static),
|
||||
@ -44,7 +40,7 @@ pub struct ProcessManager<'a> {
|
||||
}
|
||||
|
||||
impl ProcessManager<'_> {
|
||||
pub fn new() -> Self {
|
||||
pub fn new(app_handle: AppHandle) -> Self {
|
||||
let log_output_dir = DATA_ROOT_DIR.join("logs");
|
||||
|
||||
ProcessManager {
|
||||
@ -57,6 +53,7 @@ impl ProcessManager<'_> {
|
||||
#[cfg(target_os = "linux")]
|
||||
current_platform: Platform::Linux,
|
||||
|
||||
app_handle,
|
||||
processes: HashMap::new(),
|
||||
log_output_dir,
|
||||
game_launchers: vec![
|
||||
@ -100,31 +97,30 @@ impl ProcessManager<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_log_dir(&self, game_id: String) -> PathBuf {
|
||||
fn get_log_dir(&self, game_id: String) -> PathBuf {
|
||||
self.log_output_dir.join(game_id)
|
||||
}
|
||||
|
||||
fn on_process_finish(
|
||||
&mut self,
|
||||
game_id: String,
|
||||
result: Result<ExitStatus, std::io::Error>,
|
||||
) -> Result<(), ProcessError> {
|
||||
pub fn open_process_logs(&mut self, game_id: String) -> Result<(), ProcessError> {
|
||||
let dir = self.get_log_dir(game_id);
|
||||
self.app_handle
|
||||
.opener()
|
||||
.open_path(dir.to_str().unwrap(), None::<&str>)
|
||||
.map_err(ProcessError::OpenerError)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn on_process_finish(&mut self, game_id: String, result: Result<ExitStatus, std::io::Error>) {
|
||||
if !self.processes.contains_key(&game_id) {
|
||||
warn!(
|
||||
"process on_finish was called, but game_id is no longer valid. finished with result: {result:?}"
|
||||
);
|
||||
return Ok(());
|
||||
return;
|
||||
}
|
||||
|
||||
debug!("process for {:?} exited with {:?}", &game_id, result);
|
||||
|
||||
let process = match self.processes.remove(&game_id) {
|
||||
Some(process) => process,
|
||||
None => {
|
||||
info!("Attempted to stop process {game_id} which didn't exist");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
let process = self.processes.remove(&game_id).unwrap();
|
||||
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
let meta = db_handle
|
||||
@ -132,7 +128,7 @@ impl ProcessManager<'_> {
|
||||
.installed_game_version
|
||||
.get(&game_id)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| panic!("Could not get installed version of {}", &game_id));
|
||||
.unwrap();
|
||||
db_handle.applications.transient_statuses.remove(&meta);
|
||||
|
||||
let current_state = db_handle.applications.game_statuses.get(&game_id).cloned();
|
||||
@ -157,29 +153,29 @@ impl ProcessManager<'_> {
|
||||
// Or if the status isn't 0
|
||||
// Or if it's an error
|
||||
if !process.manually_killed
|
||||
&& (elapsed.as_secs() <= 2 || result.map_or(true, |r| !r.success()))
|
||||
&& (elapsed.as_secs() <= 2 || result.is_err() || !result.unwrap().success())
|
||||
{
|
||||
warn!("drop detected that the game {game_id} may have failed to launch properly");
|
||||
return Err(ProcessError::FailedLaunch(game_id));
|
||||
// let _ = self.app_handle.emit("launch_external_error", &game_id);
|
||||
let _ = self.app_handle.emit("launch_external_error", &game_id);
|
||||
}
|
||||
|
||||
let version_data = match db_handle.applications.game_versions.get(&game_id) {
|
||||
// This unwrap here should be resolved by just making the hashmap accept an option rather than just a String
|
||||
Some(res) => res.get(&meta.version.unwrap()).expect("Failed to get game version from installed game versions. Is the database corrupted?"),
|
||||
None => todo!(),
|
||||
};
|
||||
// This is too many unwraps for me to be comfortable
|
||||
let version_data = db_handle
|
||||
.applications
|
||||
.game_versions
|
||||
.get(&game_id)
|
||||
.unwrap()
|
||||
.get(&meta.version.unwrap())
|
||||
.unwrap();
|
||||
|
||||
let status = GameStatusManager::fetch_state(&game_id, &db_handle);
|
||||
|
||||
// TODO
|
||||
// push_game_update(
|
||||
// &self.app_handle,
|
||||
// &game_id,
|
||||
// Some(version_data.clone()),
|
||||
// status,
|
||||
// );
|
||||
Ok(())
|
||||
push_game_update(
|
||||
&self.app_handle,
|
||||
&game_id,
|
||||
Some(version_data.clone()),
|
||||
status,
|
||||
);
|
||||
}
|
||||
|
||||
fn fetch_process_handler(
|
||||
@ -200,19 +196,24 @@ impl ProcessManager<'_> {
|
||||
.1)
|
||||
}
|
||||
|
||||
pub fn valid_platform(&self, platform: &Platform) -> bool {
|
||||
pub fn valid_platform(&self, platform: &Platform,) -> Result<bool, String> {
|
||||
let db_lock = borrow_db_checked();
|
||||
let process_handler = self.fetch_process_handler(&db_lock, platform);
|
||||
process_handler.is_ok()
|
||||
Ok(process_handler.is_ok())
|
||||
}
|
||||
|
||||
/// Must be called through spawn as it is currently blocking
|
||||
pub fn launch_process(&mut self, game_id: String) -> Result<(), ProcessError> {
|
||||
pub fn launch_process(
|
||||
&mut self,
|
||||
game_id: String,
|
||||
process_manager_lock: &'static Mutex<ProcessManager<'static>>,
|
||||
) -> Result<(), ProcessError> {
|
||||
if self.processes.contains_key(&game_id) {
|
||||
return Err(ProcessError::AlreadyRunning);
|
||||
}
|
||||
|
||||
let version = match borrow_db_checked()
|
||||
let version = match DB
|
||||
.borrow_data()
|
||||
.unwrap()
|
||||
.applications
|
||||
.game_statuses
|
||||
.get(&game_id)
|
||||
@ -251,7 +252,7 @@ impl ProcessManager<'_> {
|
||||
debug!(
|
||||
"Launching process {:?} with version {:?}",
|
||||
&game_id,
|
||||
db_lock.applications.game_versions.get(&game_id)
|
||||
db_lock.applications.game_versions.get(&game_id).unwrap()
|
||||
);
|
||||
|
||||
let game_version = db_lock
|
||||
@ -307,9 +308,8 @@ impl ProcessManager<'_> {
|
||||
GameDownloadStatus::Remote {} => unreachable!("Game registered as 'Remote'"),
|
||||
};
|
||||
|
||||
#[allow(clippy::unwrap_used)]
|
||||
let launch = PathBuf::from_str(install_dir).unwrap().join(launch);
|
||||
let launch = launch.display().to_string();
|
||||
let launch = launch.to_str().unwrap();
|
||||
|
||||
let launch_string = process_handler.create_launch_process(
|
||||
&meta,
|
||||
@ -317,7 +317,7 @@ impl ProcessManager<'_> {
|
||||
args.clone(),
|
||||
game_version,
|
||||
install_dir,
|
||||
)?;
|
||||
);
|
||||
|
||||
let format_args = DropFormatArgs::new(
|
||||
launch_string,
|
||||
@ -363,17 +363,27 @@ impl ProcessManager<'_> {
|
||||
.transient_statuses
|
||||
.insert(meta.clone(), ApplicationTransientStatus::Running {});
|
||||
|
||||
// TODO
|
||||
// push_game_update(
|
||||
// &self.app_handle,
|
||||
// &meta.id,
|
||||
// None,
|
||||
// (None, Some(ApplicationTransientStatus::Running {})),
|
||||
// );
|
||||
push_game_update(
|
||||
&self.app_handle,
|
||||
&meta.id,
|
||||
None,
|
||||
(None, Some(ApplicationTransientStatus::Running {})),
|
||||
);
|
||||
|
||||
let wait_thread_handle = launch_process_handle.clone();
|
||||
let wait_thread_game_id = meta.clone();
|
||||
|
||||
spawn(move || {
|
||||
let result: Result<ExitStatus, std::io::Error> = launch_process_handle.wait();
|
||||
|
||||
let mut process_manager_handle = process_manager_lock.lock().unwrap();
|
||||
process_manager_handle.on_process_finish(wait_thread_game_id.id, result);
|
||||
|
||||
// As everything goes out of scope, they should get dropped
|
||||
// But just to explicit about it
|
||||
drop(process_manager_handle);
|
||||
});
|
||||
|
||||
self.processes.insert(
|
||||
meta.id,
|
||||
RunningProcess {
|
||||
@ -382,12 +392,7 @@ impl ProcessManager<'_> {
|
||||
manually_killed: false,
|
||||
},
|
||||
);
|
||||
|
||||
let result: Result<ExitStatus, std::io::Error> = launch_process_handle.wait();
|
||||
|
||||
PROCESS_MANAGER
|
||||
.lock()
|
||||
.on_process_finish(wait_thread_game_id.id, result)
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@ -399,7 +404,7 @@ pub trait ProcessHandler: Send + 'static {
|
||||
args: Vec<String>,
|
||||
game_version: &GameVersion,
|
||||
current_dir: &str,
|
||||
) -> Result<String, ProcessError>;
|
||||
) -> String;
|
||||
|
||||
fn valid_for_platform(&self, db: &Database, target: &Platform) -> bool;
|
||||
}
|
||||
@ -1,6 +1,6 @@
|
||||
use std::{io, path::PathBuf, sync::Arc};
|
||||
|
||||
use download_manager::error::ApplicationDownloadError;
|
||||
use drop_errors::application_download_error::ApplicationDownloadError;
|
||||
use sysinfo::{Disk, DiskRefreshKind, Disks};
|
||||
|
||||
pub fn get_disk_available(mount_point: PathBuf) -> Result<u64, ApplicationDownloadError> {
|
||||
@ -1,13 +1,13 @@
|
||||
[package]
|
||||
name = "remote"
|
||||
name = "drop-remote"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
bitcode = "0.6.7"
|
||||
chrono = "0.4.42"
|
||||
client = { version = "0.1.0", path = "../client" }
|
||||
database = { version = "0.1.0", path = "../database" }
|
||||
drop-consts = { path = "../drop-consts" }
|
||||
drop-errors = { path = "../drop-errors" }
|
||||
droplet-rs = "0.7.3"
|
||||
gethostname = "1.0.2"
|
||||
hex = "0.4.3"
|
||||
@ -15,9 +15,6 @@ http = "1.3.1"
|
||||
log = "0.4.28"
|
||||
md5 = "0.8.0"
|
||||
reqwest = "0.12.23"
|
||||
reqwest-websocket = "0.5.1"
|
||||
serde = "1.0.228"
|
||||
serde_with = "3.15.0"
|
||||
serde = { version = "1.0.220", features = ["derive"] }
|
||||
tauri = "2.8.5"
|
||||
url = "2.5.7"
|
||||
utils = { version = "0.1.0", path = "../utils" }
|
||||
156
src-tauri/drop-remote/src/auth.rs
Normal file
156
src-tauri/drop-remote/src/auth.rs
Normal file
@ -0,0 +1,156 @@
|
||||
use std::{collections::HashMap, env, sync::Mutex};
|
||||
|
||||
use chrono::Utc;
|
||||
use drop_errors::{drop_server_error::ServerError, remote_access_error::RemoteAccessError};
|
||||
use droplet_rs::ssl::sign_nonce;
|
||||
use gethostname::gethostname;
|
||||
use log::{debug, error, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
requests::make_authenticated_get, utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC}, DropRemoteAuth, DropRemoteContext
|
||||
};
|
||||
|
||||
use super::requests::generate_url;
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct CapabilityConfiguration {}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct InitiateRequestBody {
|
||||
name: String,
|
||||
platform: String,
|
||||
capabilities: HashMap<String, CapabilityConfiguration>,
|
||||
mode: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct HandshakeRequestBody {
|
||||
client_id: String,
|
||||
token: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct HandshakeResponse {
|
||||
private: String,
|
||||
certificate: String,
|
||||
id: String,
|
||||
}
|
||||
|
||||
pub fn generate_authorization_header(context: &DropRemoteContext) -> String {
|
||||
let auth = if let Some(auth) = &context.auth {
|
||||
auth
|
||||
} else {
|
||||
return "".to_owned();
|
||||
};
|
||||
let nonce = Utc::now().timestamp_millis().to_string();
|
||||
|
||||
let signature = sign_nonce(auth.private.clone(), nonce.clone()).unwrap();
|
||||
|
||||
format!("Nonce {} {} {}", auth.client_id, nonce, signature)
|
||||
}
|
||||
|
||||
pub async fn fetch_user(context: &DropRemoteContext) -> Result<Vec<u8>, RemoteAccessError> {
|
||||
let response =
|
||||
make_authenticated_get(context, generate_url(context, &["/api/v1/client/user"], &[])?).await?;
|
||||
if response.status() != 200 {
|
||||
let err: ServerError = response.json().await?;
|
||||
warn!("{err:?}");
|
||||
|
||||
if err.status_message == "Nonce expired" {
|
||||
return Err(RemoteAccessError::OutOfSync);
|
||||
}
|
||||
|
||||
return Err(RemoteAccessError::InvalidResponse(err));
|
||||
}
|
||||
|
||||
response
|
||||
.bytes()
|
||||
.await
|
||||
.map_err(std::convert::Into::into)
|
||||
.map(|v| v.to_vec())
|
||||
}
|
||||
|
||||
pub async fn recieve_handshake_logic(
|
||||
context: &mut DropRemoteContext,
|
||||
path: String,
|
||||
) -> Result<(), RemoteAccessError> {
|
||||
let path_chunks: Vec<&str> = path.split('/').collect();
|
||||
if path_chunks.len() != 3 {
|
||||
// app.emit("auth/failed", ()).unwrap();
|
||||
return Err(RemoteAccessError::HandshakeFailed(
|
||||
"failed to parse token".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let client_id = path_chunks.get(1).unwrap();
|
||||
let token = path_chunks.get(2).unwrap();
|
||||
let body = HandshakeRequestBody {
|
||||
client_id: (*client_id).to_string(),
|
||||
token: (*token).to_string(),
|
||||
};
|
||||
|
||||
let endpoint = generate_url(context, &["/api/v1/client/auth/handshake"], &[])?;
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let response = client.post(endpoint).json(&body).send().await?;
|
||||
debug!("handshake responsded with {}", response.status().as_u16());
|
||||
if !response.status().is_success() {
|
||||
return Err(RemoteAccessError::InvalidResponse(response.json().await?));
|
||||
}
|
||||
let response_struct: HandshakeResponse = response.json().await?;
|
||||
|
||||
let web_token = {
|
||||
let header = generate_authorization_header(context);
|
||||
let token = client
|
||||
.post(generate_url(context, &["/api/v1/client/user/webtoken"], &[])?)
|
||||
.header("Authorization", header)
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
token.text().await.unwrap()
|
||||
};
|
||||
|
||||
context.auth = Some(DropRemoteAuth {
|
||||
private: response_struct.private,
|
||||
cert: response_struct.certificate,
|
||||
client_id: response_struct.id,
|
||||
web_token: web_token,
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn auth_initiate_logic(context: &DropRemoteContext, mode: String) -> Result<String, RemoteAccessError> {
|
||||
let hostname = gethostname();
|
||||
|
||||
let endpoint = generate_url(context, &["/api/v1/client/auth/initiate"], &[])?;
|
||||
let body = InitiateRequestBody {
|
||||
name: format!("{} (Desktop)", hostname.into_string().unwrap()),
|
||||
platform: env::consts::OS.to_string(),
|
||||
capabilities: HashMap::from([
|
||||
("peerAPI".to_owned(), CapabilityConfiguration {}),
|
||||
("cloudSaves".to_owned(), CapabilityConfiguration {}),
|
||||
]),
|
||||
mode,
|
||||
};
|
||||
|
||||
let client = DROP_CLIENT_SYNC.clone();
|
||||
let response = client.post(endpoint.to_string()).json(&body).send()?;
|
||||
|
||||
if response.status() != 200 {
|
||||
let data: ServerError = response.json()?;
|
||||
error!("could not start handshake: {}", data.status_message);
|
||||
|
||||
return Err(RemoteAccessError::HandshakeFailed(data.status_message));
|
||||
}
|
||||
|
||||
let response = response.text()?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
@ -6,18 +6,17 @@ use std::{
|
||||
};
|
||||
|
||||
use bitcode::{Decode, DecodeOwned, Encode};
|
||||
use database::{Database, borrow_db_checked};
|
||||
use drop_consts::CACHE_DIR;
|
||||
use drop_errors::remote_access_error::RemoteAccessError;
|
||||
use http::{Response, header::CONTENT_TYPE, response::Builder as ResponseBuilder};
|
||||
|
||||
use crate::error::{CacheError, RemoteAccessError};
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! offline {
|
||||
($var:expr, $func1:expr, $func2:expr, $( $arg:expr ),* ) => {
|
||||
|
||||
async move {
|
||||
if ::database::borrow_db_checked().settings.force_offline
|
||||
|| ::utils::lock!($var).status == ::client::app_status::AppStatus::Offline {
|
||||
// TODO add offline mode back
|
||||
// || $var.lock().unwrap().status == AppStatus::Offline
|
||||
async move { if drop_database::borrow_db_checked().settings.force_offline {
|
||||
$func2( $( $arg ), *).await
|
||||
} else {
|
||||
$func1( $( $arg ), *).await
|
||||
@ -58,33 +57,33 @@ fn delete_sync(base: &Path, key: &str) -> io::Result<()> {
|
||||
}
|
||||
|
||||
pub fn cache_object<D: Encode>(key: &str, data: &D) -> Result<(), RemoteAccessError> {
|
||||
cache_object_db(key, data, &borrow_db_checked())
|
||||
cache_object_db(key, data)
|
||||
}
|
||||
pub fn cache_object_db<D: Encode>(
|
||||
key: &str,
|
||||
data: &D,
|
||||
database: &Database,
|
||||
) -> Result<(), RemoteAccessError> {
|
||||
let bytes = bitcode::encode(data);
|
||||
write_sync(&database.cache_dir, key, bytes).map_err(RemoteAccessError::Cache)
|
||||
write_sync(&CACHE_DIR, key, bytes).map_err(RemoteAccessError::Cache)
|
||||
}
|
||||
pub fn get_cached_object<D: Encode + DecodeOwned>(key: &str) -> Result<D, RemoteAccessError> {
|
||||
get_cached_object_db::<D>(key, &borrow_db_checked())
|
||||
get_cached_object_db::<D>(key)
|
||||
}
|
||||
pub fn get_cached_object_db<D: DecodeOwned>(
|
||||
key: &str,
|
||||
db: &Database,
|
||||
) -> Result<D, RemoteAccessError> {
|
||||
let bytes = read_sync(&db.cache_dir, key).map_err(RemoteAccessError::Cache)?;
|
||||
let bytes = read_sync(&CACHE_DIR, key).map_err(RemoteAccessError::Cache)?;
|
||||
let data =
|
||||
bitcode::decode::<D>(&bytes).map_err(|e| RemoteAccessError::Cache(io::Error::other(e)))?;
|
||||
Ok(data)
|
||||
}
|
||||
pub fn clear_cached_object(key: &str) -> Result<(), RemoteAccessError> {
|
||||
clear_cached_object_db(key, &borrow_db_checked())
|
||||
clear_cached_object_db(key)
|
||||
}
|
||||
pub fn clear_cached_object_db(key: &str, db: &Database) -> Result<(), RemoteAccessError> {
|
||||
delete_sync(&db.cache_dir, key).map_err(RemoteAccessError::Cache)?;
|
||||
pub fn clear_cached_object_db(
|
||||
key: &str,
|
||||
) -> Result<(), RemoteAccessError> {
|
||||
delete_sync(&CACHE_DIR, key).map_err(RemoteAccessError::Cache)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -102,39 +101,30 @@ impl ObjectCache {
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<Response<Vec<u8>>> for ObjectCache {
|
||||
type Error = CacheError;
|
||||
|
||||
fn try_from(value: Response<Vec<u8>>) -> Result<Self, Self::Error> {
|
||||
Ok(ObjectCache {
|
||||
impl From<Response<Vec<u8>>> for ObjectCache {
|
||||
fn from(value: Response<Vec<u8>>) -> Self {
|
||||
ObjectCache {
|
||||
content_type: value
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.ok_or(CacheError::HeaderNotFound(CONTENT_TYPE))?
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.map_err(CacheError::ParseError)?
|
||||
.unwrap()
|
||||
.to_owned(),
|
||||
body: value.body().clone(),
|
||||
expiry: get_sys_time_in_secs() + 60 * 60 * 24,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
impl TryFrom<ObjectCache> for Response<Vec<u8>> {
|
||||
type Error = CacheError;
|
||||
fn try_from(value: ObjectCache) -> Result<Self, Self::Error> {
|
||||
impl From<ObjectCache> for Response<Vec<u8>> {
|
||||
fn from(value: ObjectCache) -> Self {
|
||||
let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type);
|
||||
resp_builder
|
||||
.body(value.body)
|
||||
.map_err(CacheError::ConstructionError)
|
||||
resp_builder.body(value.body).unwrap()
|
||||
}
|
||||
}
|
||||
impl TryFrom<&ObjectCache> for Response<Vec<u8>> {
|
||||
type Error = CacheError;
|
||||
|
||||
fn try_from(value: &ObjectCache) -> Result<Self, Self::Error> {
|
||||
impl From<&ObjectCache> for Response<Vec<u8>> {
|
||||
fn from(value: &ObjectCache) -> Self {
|
||||
let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type.clone());
|
||||
resp_builder
|
||||
.body(value.body.clone())
|
||||
.map_err(CacheError::ConstructionError)
|
||||
resp_builder.body(value.body.clone()).unwrap()
|
||||
}
|
||||
}
|
||||
52
src-tauri/drop-remote/src/fetch_object.rs
Normal file
52
src-tauri/drop-remote/src/fetch_object.rs
Normal file
@ -0,0 +1,52 @@
|
||||
use http::{header::CONTENT_TYPE, response::Builder as ResponseBuilder, Request};
|
||||
use log::warn;
|
||||
use tauri::UriSchemeResponder;
|
||||
|
||||
|
||||
use crate::{requests::generate_url, utils::DROP_CLIENT_ASYNC, DropRemoteContext};
|
||||
|
||||
use super::{
|
||||
auth::generate_authorization_header,
|
||||
cache::{ObjectCache, cache_object, get_cached_object},
|
||||
};
|
||||
|
||||
pub async fn fetch_object(context: &DropRemoteContext, request: Request<Vec<u8>>, responder: UriSchemeResponder) {
|
||||
// Drop leading /
|
||||
let object_id = &request.uri().path()[1..];
|
||||
|
||||
let cache_result = get_cached_object::<ObjectCache>(object_id);
|
||||
if let Ok(cache_result) = &cache_result
|
||||
&& !cache_result.has_expired()
|
||||
{
|
||||
responder.respond(cache_result.into());
|
||||
return;
|
||||
}
|
||||
|
||||
let header = generate_authorization_header(context);
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let url = generate_url(context, &["/api/v1/client/object", object_id], &[]).expect("failed to generated object url");
|
||||
let response = client.get(url).header("Authorization", header).send().await;
|
||||
|
||||
if response.is_err() {
|
||||
match cache_result {
|
||||
Ok(cache_result) => responder.respond(cache_result.into()),
|
||||
Err(e) => {
|
||||
warn!("{e}");
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
let response = response.unwrap();
|
||||
|
||||
let resp_builder = ResponseBuilder::new().header(
|
||||
CONTENT_TYPE,
|
||||
response.headers().get("Content-Type").unwrap(),
|
||||
);
|
||||
let data = Vec::from(response.bytes().await.unwrap());
|
||||
let resp = resp_builder.body(data).unwrap();
|
||||
if cache_result.is_err() || cache_result.unwrap().has_expired() {
|
||||
cache_object::<ObjectCache>(object_id, &resp.clone().into()).unwrap();
|
||||
}
|
||||
|
||||
responder.respond(resp);
|
||||
}
|
||||
29
src-tauri/drop-remote/src/lib.rs
Normal file
29
src-tauri/drop-remote/src/lib.rs
Normal file
@ -0,0 +1,29 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
pub mod auth;
|
||||
pub mod cache;
|
||||
pub mod fetch_object;
|
||||
pub mod requests;
|
||||
pub mod utils;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
struct DropRemoteAuth {
|
||||
private: String,
|
||||
cert: String,
|
||||
client_id: String,
|
||||
web_token: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct DropRemoteContext {
|
||||
base_url: Url,
|
||||
auth: Option<DropRemoteAuth>,
|
||||
}
|
||||
|
||||
|
||||
impl DropRemoteContext {
|
||||
pub fn new(base_url: Url) -> Self {
|
||||
DropRemoteContext { base_url, auth: None }
|
||||
}
|
||||
}
|
||||
@ -1,15 +1,14 @@
|
||||
use database::{interface::DatabaseImpls, DB};
|
||||
use drop_errors::remote_access_error::RemoteAccessError;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
auth::generate_authorization_header, error::RemoteAccessError, utils::DROP_CLIENT_ASYNC,
|
||||
};
|
||||
use crate::{auth::generate_authorization_header, utils::DROP_CLIENT_ASYNC, DropRemoteContext};
|
||||
|
||||
pub fn generate_url<T: AsRef<str>>(
|
||||
context: &DropRemoteContext,
|
||||
path_components: &[T],
|
||||
query: &[(T, T)],
|
||||
) -> Result<Url, RemoteAccessError> {
|
||||
let mut base_url = DB.fetch_base_url();
|
||||
let mut base_url = context.base_url.clone();
|
||||
for endpoint in path_components {
|
||||
base_url = base_url.join(endpoint.as_ref())?;
|
||||
}
|
||||
@ -22,10 +21,10 @@ pub fn generate_url<T: AsRef<str>>(
|
||||
Ok(base_url)
|
||||
}
|
||||
|
||||
pub async fn make_authenticated_get(url: Url) -> Result<reqwest::Response, reqwest::Error> {
|
||||
pub async fn make_authenticated_get(context: &DropRemoteContext, url: Url) -> Result<reqwest::Response, reqwest::Error> {
|
||||
DROP_CLIENT_ASYNC
|
||||
.get(url)
|
||||
.header("Authorization", generate_authorization_header())
|
||||
.header("Authorization", generate_authorization_header(context))
|
||||
.send()
|
||||
.await
|
||||
}
|
||||
71
src-tauri/drop-remote/src/utils.rs
Normal file
71
src-tauri/drop-remote/src/utils.rs
Normal file
@ -0,0 +1,71 @@
|
||||
use std::{
|
||||
fs::{self, File},
|
||||
io::Read,
|
||||
sync::LazyLock,
|
||||
};
|
||||
|
||||
use drop_consts::DATA_ROOT_DIR;
|
||||
use log::{debug, info};
|
||||
use reqwest::Certificate;
|
||||
|
||||
static DROP_CERT_BUNDLE: LazyLock<Vec<Certificate>> = LazyLock::new(fetch_certificates);
|
||||
pub static DROP_CLIENT_SYNC: LazyLock<reqwest::blocking::Client> = LazyLock::new(get_client_sync);
|
||||
pub static DROP_CLIENT_ASYNC: LazyLock<reqwest::Client> = LazyLock::new(get_client_async);
|
||||
pub static DROP_CLIENT_WS_CLIENT: LazyLock<reqwest::Client> = LazyLock::new(get_client_ws);
|
||||
|
||||
fn fetch_certificates() -> Vec<Certificate> {
|
||||
let certificate_dir = DATA_ROOT_DIR.join("certificates");
|
||||
|
||||
let mut certs = Vec::new();
|
||||
match fs::read_dir(certificate_dir) {
|
||||
Ok(c) => {
|
||||
for entry in c {
|
||||
match entry {
|
||||
Ok(c) => {
|
||||
let mut buf = Vec::new();
|
||||
File::open(c.path()).unwrap().read_to_end(&mut buf).unwrap();
|
||||
|
||||
for cert in Certificate::from_pem_bundle(&buf).unwrap() {
|
||||
certs.push(cert);
|
||||
}
|
||||
info!(
|
||||
"added {} certificate(s) from {}",
|
||||
certs.len(),
|
||||
c.file_name().into_string().unwrap()
|
||||
);
|
||||
}
|
||||
Err(_) => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("not loading certificates due to error: {e}");
|
||||
}
|
||||
};
|
||||
certs
|
||||
}
|
||||
|
||||
pub fn get_client_sync() -> reqwest::blocking::Client {
|
||||
let mut client = reqwest::blocking::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client.use_rustls_tls().build().unwrap()
|
||||
}
|
||||
pub fn get_client_async() -> reqwest::Client {
|
||||
let mut client = reqwest::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client.use_rustls_tls().build().unwrap()
|
||||
}
|
||||
pub fn get_client_ws() -> reqwest::Client {
|
||||
let mut client = reqwest::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client.use_rustls_tls().http1_only().build().unwrap()
|
||||
}
|
||||
59
src-tauri/src/auth.rs
Normal file
59
src-tauri/src/auth.rs
Normal file
@ -0,0 +1,59 @@
|
||||
use std::sync::Mutex;
|
||||
|
||||
use drop_database::{borrow_db_checked, runtime_models::User};
|
||||
use drop_errors::remote_access_error::RemoteAccessError;
|
||||
use drop_remote::{auth::{fetch_user, recieve_handshake_logic}, cache::{cache_object, clear_cached_object, get_cached_object}};
|
||||
use log::warn;
|
||||
use tauri::{AppHandle, Emitter as _, Manager as _};
|
||||
|
||||
use crate::{AppState, AppStatus};
|
||||
|
||||
pub async fn setup() -> (AppStatus, Option<User>) {
|
||||
let auth = {
|
||||
let data = borrow_db_checked();
|
||||
data.auth.clone()
|
||||
};
|
||||
|
||||
if auth.is_some() {
|
||||
let user_result = match fetch_user().await {
|
||||
Ok(data) => data,
|
||||
Err(RemoteAccessError::FetchError(_)) => {
|
||||
let user = get_cached_object::<User>("user").unwrap();
|
||||
return (AppStatus::Offline, Some(user));
|
||||
}
|
||||
Err(_) => return (AppStatus::SignedInNeedsReauth, None),
|
||||
};
|
||||
cache_object("user", &user_result).unwrap();
|
||||
return (AppStatus::SignedIn, Some(user_result));
|
||||
}
|
||||
|
||||
(AppStatus::SignedOut, None)
|
||||
}
|
||||
|
||||
pub async fn recieve_handshake(app: AppHandle, path: String) {
|
||||
// Tell the app we're processing
|
||||
app.emit("auth/processing", ()).unwrap();
|
||||
|
||||
let handshake_result = recieve_handshake_logic(path).await;
|
||||
if let Err(e) = handshake_result {
|
||||
warn!("error with authentication: {e}");
|
||||
app.emit("auth/failed", e.to_string()).unwrap();
|
||||
return;
|
||||
}
|
||||
|
||||
let app_state = app.state::<Mutex<AppState>>();
|
||||
|
||||
let (app_status, user) = setup().await;
|
||||
|
||||
let mut state_lock = app_state.lock().unwrap();
|
||||
|
||||
state_lock.status = app_status;
|
||||
state_lock.user = user;
|
||||
|
||||
let _ = clear_cached_object("collections");
|
||||
let _ = clear_cached_object("library");
|
||||
|
||||
drop(state_lock);
|
||||
|
||||
app.emit("auth/finished", ()).unwrap();
|
||||
}
|
||||
@ -1,42 +1,9 @@
|
||||
use database::{borrow_db_checked, borrow_db_mut_checked};
|
||||
use log::{debug, error};
|
||||
use drop_database::{borrow_db_checked, borrow_db_mut_checked};
|
||||
use log::debug;
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_autostart::ManagerExt;
|
||||
use utils::lock;
|
||||
|
||||
use crate::{AppState};
|
||||
|
||||
#[tauri::command]
|
||||
pub fn fetch_state(
|
||||
state: tauri::State<'_, std::sync::Mutex<AppState<'_>>>,
|
||||
) -> Result<String, String> {
|
||||
let guard = lock!(state);
|
||||
let cloned_state = serde_json::to_string(&guard.clone()).map_err(|e| e.to_string())?;
|
||||
drop(guard);
|
||||
Ok(cloned_state)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn quit(app: tauri::AppHandle, state: tauri::State<'_, std::sync::Mutex<AppState<'_>>>) {
|
||||
cleanup_and_exit(&app, &state);
|
||||
}
|
||||
|
||||
pub fn cleanup_and_exit(app: &AppHandle, state: &tauri::State<'_, std::sync::Mutex<AppState<'_>>>) {
|
||||
debug!("cleaning up and exiting application");
|
||||
let download_manager = lock!(state).download_manager.clone();
|
||||
match download_manager.ensure_terminated() {
|
||||
Ok(res) => match res {
|
||||
Ok(()) => debug!("download manager terminated correctly"),
|
||||
Err(()) => error!("download manager failed to terminate correctly"),
|
||||
},
|
||||
Err(e) => panic!("{e:?}"),
|
||||
}
|
||||
|
||||
app.exit(0);
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn toggle_autostart(app: AppHandle, enabled: bool) -> Result<(), String> {
|
||||
pub fn toggle_autostart_logic(app: AppHandle, enabled: bool) -> Result<(), String> {
|
||||
let manager = app.autolaunch();
|
||||
if enabled {
|
||||
manager.enable().map_err(|e| e.to_string())?;
|
||||
@ -49,11 +16,13 @@ pub fn toggle_autostart(app: AppHandle, enabled: bool) -> Result<(), String> {
|
||||
// Store the state in DB
|
||||
let mut db_handle = borrow_db_mut_checked();
|
||||
db_handle.settings.autostart = enabled;
|
||||
drop(db_handle);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn get_autostart_enabled(app: AppHandle) -> Result<bool, tauri_plugin_autostart::Error> {
|
||||
pub fn get_autostart_enabled_logic(app: AppHandle) -> Result<bool, tauri_plugin_autostart::Error> {
|
||||
// First check DB state
|
||||
let db_handle = borrow_db_checked();
|
||||
let db_state = db_handle.settings.autostart;
|
||||
drop(db_handle);
|
||||
@ -73,3 +42,34 @@ pub fn get_autostart_enabled(app: AppHandle) -> Result<bool, tauri_plugin_autost
|
||||
|
||||
Ok(db_state)
|
||||
}
|
||||
|
||||
// New function to sync state on startup
|
||||
pub fn sync_autostart_on_startup(app: &AppHandle) -> Result<(), String> {
|
||||
let db_handle = borrow_db_checked();
|
||||
let should_be_enabled = db_handle.settings.autostart;
|
||||
drop(db_handle);
|
||||
|
||||
let manager = app.autolaunch();
|
||||
let current_state = manager.is_enabled().map_err(|e| e.to_string())?;
|
||||
|
||||
if current_state != should_be_enabled {
|
||||
if should_be_enabled {
|
||||
manager.enable().map_err(|e| e.to_string())?;
|
||||
debug!("synced autostart: enabled");
|
||||
} else {
|
||||
manager.disable().map_err(|e| e.to_string())?;
|
||||
debug!("synced autostart: disabled");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
#[tauri::command]
|
||||
pub fn toggle_autostart(app: AppHandle, enabled: bool) -> Result<(), String> {
|
||||
toggle_autostart_logic(app, enabled)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn get_autostart_enabled(app: AppHandle) -> Result<bool, tauri_plugin_autostart::Error> {
|
||||
get_autostart_enabled_logic(app)
|
||||
}
|
||||
23
src-tauri/src/client/cleanup.rs
Normal file
23
src-tauri/src/client/cleanup.rs
Normal file
@ -0,0 +1,23 @@
|
||||
use log::{debug, error};
|
||||
use tauri::AppHandle;
|
||||
|
||||
use crate::AppState;
|
||||
|
||||
#[tauri::command]
|
||||
pub fn quit(app: tauri::AppHandle, state: tauri::State<'_, std::sync::Mutex<AppState>>) {
|
||||
cleanup_and_exit(&app, &state);
|
||||
}
|
||||
|
||||
pub fn cleanup_and_exit(app: &AppHandle, state: &tauri::State<'_, std::sync::Mutex<AppState>>) {
|
||||
debug!("cleaning up and exiting application");
|
||||
let download_manager = state.lock().unwrap().download_manager.clone();
|
||||
match download_manager.ensure_terminated() {
|
||||
Ok(res) => match res {
|
||||
Ok(()) => debug!("download manager terminated correctly"),
|
||||
Err(()) => error!("download manager failed to terminate correctly"),
|
||||
},
|
||||
Err(e) => panic!("{e:?}"),
|
||||
}
|
||||
|
||||
app.exit(0);
|
||||
}
|
||||
11
src-tauri/src/client/commands.rs
Normal file
11
src-tauri/src/client/commands.rs
Normal file
@ -0,0 +1,11 @@
|
||||
use crate::AppState;
|
||||
|
||||
#[tauri::command]
|
||||
pub fn fetch_state(
|
||||
state: tauri::State<'_, std::sync::Mutex<AppState>>,
|
||||
) -> Result<String, String> {
|
||||
let guard = state.lock().unwrap();
|
||||
let cloned_state = serde_json::to_string(&guard.clone()).map_err(|e| e.to_string())?;
|
||||
drop(guard);
|
||||
Ok(cloned_state)
|
||||
}
|
||||
3
src-tauri/src/client/mod.rs
Normal file
3
src-tauri/src/client/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
pub mod autostart;
|
||||
pub mod cleanup;
|
||||
pub mod commands;
|
||||
@ -1,11 +1,8 @@
|
||||
use std::{collections::HashMap, path::PathBuf, str::FromStr};
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
use database::platform::Platform;
|
||||
use database::{db::DATA_ROOT_DIR, GameVersion};
|
||||
use log::warn;
|
||||
|
||||
use crate::error::BackupError;
|
||||
use crate::{database::db::{GameVersion, DATA_ROOT_DIR}, error::backup_error::BackupError, process::process_manager::Platform};
|
||||
|
||||
use super::path::CommonPath;
|
||||
|
||||
@ -48,7 +45,7 @@ impl BackupManager<'_> {
|
||||
}
|
||||
|
||||
pub trait BackupHandler: Send + Sync {
|
||||
fn root_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(DATA_ROOT_DIR.join("games")) }
|
||||
fn root_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(DATA_ROOT_DIR.lock().unwrap().join("games")) }
|
||||
fn game_translate(&self, _path: &PathBuf, game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(PathBuf::from_str(&game.game_id).unwrap()) }
|
||||
fn base_translate(&self, path: &PathBuf, game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(self.root_translate(path, game)?.join(self.game_translate(path, game)?)) }
|
||||
fn home_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { let c = CommonPath::Home.get().ok_or(BackupError::NotFound); println!("{:?}", c); c }
|
||||
@ -1,4 +1,4 @@
|
||||
use database::platform::Platform;
|
||||
use crate::process::process_manager::Platform;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
|
||||
pub enum Condition {
|
||||
@ -1,4 +1,4 @@
|
||||
use database::GameVersion;
|
||||
use crate::database::db::GameVersion;
|
||||
|
||||
use super::conditions::{Condition};
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use database::platform::Platform;
|
||||
use regex::Regex;
|
||||
use crate::process::process_manager::Platform;
|
||||
|
||||
use super::placeholder::*;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user