Compare commits

...

3 Commits

Author SHA1 Message Date
31f7d5bcfa Merge branch 'develop' into bigpicturemode 2025-10-14 17:23:52 +11:00
87bbe1da49 156 refactor into workspaces (#157)
* chore: Major refactoring

Still needs a massive go-over because there shouldn't be anything referencing tauri in any of the workspaces except the original one. Process manager has been refactored as an example

Signed-off-by: quexeky <git@quexeky.dev>

* fix: Remote tauri dependency from process

Signed-off-by: quexeky <git@quexeky.dev>

* refactor: Improvements to src-tauri

Signed-off-by: quexeky <git@quexeky.dev>

* refactor: Builds, but some logic still left to move back

Signed-off-by: quexeky <git@quexeky.dev>

* refactor: Finish refactor

Signed-off-by: quexeky <git@quexeky.dev>

* chore: Run cargo clippy && cargo fmt

Signed-off-by: quexeky <git@quexeky.dev>

* refactor: Move everything into src-tauri

Signed-off-by: quexeky <git@quexeky.dev>

---------

Signed-off-by: quexeky <git@quexeky.dev>
2025-10-14 17:12:51 +11:00
cc57ca7076 139 add and resolve clippy lints to prevent unwrap and expect functions (#154)
* fix: Add lint and remove all unwraps from lib.rs

Signed-off-by: quexeky <git@quexeky.dev>

* chore: Remove all unwraps from util.rs and add state_lock macro

Signed-off-by: quexeky <git@quexeky.dev>

* chore: Add CacheError and remove unwraps from fetch_object

Signed-off-by: quexeky <git@quexeky.dev>

* chore: Remove unwraps from fetch_object and server_proto

Signed-off-by: quexeky <git@quexeky.dev>

* chore: Remove unwraps from auth.rs

Signed-off-by: quexeky <git@quexeky.dev>

* chore: Remove unwraps from process_handlers

Signed-off-by: quexeky <git@quexeky.dev>

* chore: Clippy unwrap linting

Signed-off-by: quexeky <git@quexeky.dev>

* chore: Remove lint

Because not everything is actually resolved yet: will be resolved with a restructure of the library

Signed-off-by: quexeky <git@quexeky.dev>

* chore: Make the rest of clippy happy

Signed-off-by: quexeky <git@quexeky.dev>

* fix: Send download signal instead of triggering self.on_error

Signed-off-by: quexeky <git@quexeky.dev>

* fix: Corrupted state should panic

Signed-off-by: quexeky <git@quexeky.dev>

* fix: Use debug instead of display for specific errors

Signed-off-by: quexeky <git@quexeky.dev>

* fix: Settings now log error instead of panicking

Signed-off-by: quexeky <git@quexeky.dev>

---------

Signed-off-by: quexeky <git@quexeky.dev>
2025-10-08 16:17:24 +11:00
109 changed files with 14593 additions and 3252 deletions

4
.gitignore vendored
View File

@ -29,4 +29,6 @@ src-tauri/flamegraph.svg
src-tauri/perf*
/*.AppImage
/squashfs-root
/squashfs-root
/target/

View File

@ -14,7 +14,8 @@
"@tauri-apps/plugin-os": "^2.3.0",
"@tauri-apps/plugin-shell": "^2.3.0",
"pino": "^9.7.0",
"pino-pretty": "^13.1.1"
"pino-pretty": "^13.1.1",
"tauri": "^0.15.0"
},
"devDependencies": {
"@tauri-apps/cli": "^2.7.1"

2381
src-tauri/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -78,6 +78,16 @@ futures-core = "0.3.31"
bytes = "1.10.1"
# tailscale = { path = "./tailscale" }
# Workspaces
client = { version = "0.1.0", path = "./client" }
database = { path = "./database" }
process = { path = "./process" }
remote = { version = "0.1.0", path = "./remote" }
utils = { path = "./utils" }
games = { version = "0.1.0", path = "./games" }
download_manager = { version = "0.1.0", path = "./download_manager" }
[dependencies.dynfmt]
version = "0.1.5"
features = ["curly"]
@ -127,3 +137,18 @@ features = ["derive", "rc"]
lto = true
codegen-units = 1
panic = 'abort'
[workspace]
members = [
"client",
"database",
"process",
"remote",
"utils",
"cloud_saves",
"download_manager",
"games",
]
resolver = "3"

4862
src-tauri/client/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,12 @@
[package]
name = "client"
version = "0.1.0"
edition = "2024"
[dependencies]
bitcode = "0.6.7"
database = { version = "0.1.0", path = "../database" }
log = "0.4.28"
serde = { version = "1.0.228", features = ["derive"] }
tauri = "2.8.5"
tauri-plugin-autostart = "2.5.0"

View File

@ -0,0 +1,12 @@
use serde::Serialize;
#[derive(Clone, Copy, Serialize, Eq, PartialEq)]
pub enum AppStatus {
NotConfigured,
Offline,
ServerError,
SignedOut,
SignedIn,
SignedInNeedsReauth,
ServerUnavailable,
}

View File

@ -0,0 +1,26 @@
use database::borrow_db_checked;
use log::debug;
use tauri::AppHandle;
use tauri_plugin_autostart::ManagerExt;
// New function to sync state on startup
pub fn sync_autostart_on_startup(app: &AppHandle) -> Result<(), String> {
let db_handle = borrow_db_checked();
let should_be_enabled = db_handle.settings.autostart;
drop(db_handle);
let manager = app.autolaunch();
let current_state = manager.is_enabled().map_err(|e| e.to_string())?;
if current_state != should_be_enabled {
if should_be_enabled {
manager.enable().map_err(|e| e.to_string())?;
debug!("synced autostart: enabled");
} else {
manager.disable().map_err(|e| e.to_string())?;
debug!("synced autostart: disabled");
}
}
Ok(())
}

View File

@ -0,0 +1,52 @@
use std::{
ffi::OsStr,
path::PathBuf,
process::{Command, Stdio},
sync::LazyLock,
};
use log::info;
pub static COMPAT_INFO: LazyLock<Option<CompatInfo>> = LazyLock::new(create_new_compat_info);
pub static UMU_LAUNCHER_EXECUTABLE: LazyLock<Option<PathBuf>> = LazyLock::new(|| {
let x = get_umu_executable();
info!("{:?}", &x);
x
});
#[derive(Clone)]
pub struct CompatInfo {
pub umu_installed: bool,
}
fn create_new_compat_info() -> Option<CompatInfo> {
#[cfg(target_os = "windows")]
return None;
let has_umu_installed = UMU_LAUNCHER_EXECUTABLE.is_some();
Some(CompatInfo {
umu_installed: has_umu_installed,
})
}
const UMU_BASE_LAUNCHER_EXECUTABLE: &str = "umu-run";
const UMU_INSTALL_DIRS: [&str; 4] = ["/app/share", "/use/local/share", "/usr/share", "/opt"];
fn get_umu_executable() -> Option<PathBuf> {
if check_executable_exists(UMU_BASE_LAUNCHER_EXECUTABLE) {
return Some(PathBuf::from(UMU_BASE_LAUNCHER_EXECUTABLE));
}
for dir in UMU_INSTALL_DIRS {
let p = PathBuf::from(dir).join(UMU_BASE_LAUNCHER_EXECUTABLE);
if check_executable_exists(&p) {
return Some(p);
}
}
None
}
fn check_executable_exists<P: AsRef<OsStr>>(exec: P) -> bool {
let has_umu_installed = Command::new(exec).stdout(Stdio::null()).output();
has_umu_installed.is_ok()
}

View File

@ -0,0 +1,4 @@
pub mod app_status;
pub mod autostart;
pub mod compat;
pub mod user;

View File

@ -0,0 +1,12 @@
use bitcode::{Decode, Encode};
use serde::{Deserialize, Serialize};
#[derive(Clone, Serialize, Deserialize, Encode, Decode)]
#[serde(rename_all = "camelCase")]
pub struct User {
id: String,
username: String,
admin: bool,
display_name: String,
profile_picture_object_id: String,
}

View File

@ -0,0 +1,19 @@
[package]
name = "cloud_saves"
version = "0.1.0"
edition = "2024"
[dependencies]
database = { version = "0.1.0", path = "../database" }
dirs = "6.0.0"
log = "0.4.28"
regex = "1.11.3"
rustix = "1.1.2"
serde = "1.0.228"
serde_json = "1.0.145"
serde_with = "3.15.0"
tar = "0.4.44"
tempfile = "3.23.0"
uuid = "1.18.1"
whoami = "1.6.1"
zstd = "0.13.3"

View File

@ -0,0 +1,234 @@
use std::{collections::HashMap, path::PathBuf, str::FromStr};
#[cfg(target_os = "linux")]
use database::platform::Platform;
use database::{GameVersion, db::DATA_ROOT_DIR};
use log::warn;
use crate::error::BackupError;
use super::path::CommonPath;
pub struct BackupManager<'a> {
pub current_platform: Platform,
pub sources: HashMap<(Platform, Platform), &'a (dyn BackupHandler + Sync + Send)>,
}
impl Default for BackupManager<'_> {
fn default() -> Self {
Self::new()
}
}
impl BackupManager<'_> {
pub fn new() -> Self {
BackupManager {
#[cfg(target_os = "windows")]
current_platform: Platform::Windows,
#[cfg(target_os = "macos")]
current_platform: Platform::MacOs,
#[cfg(target_os = "linux")]
current_platform: Platform::Linux,
sources: HashMap::from([
// Current platform to target platform
(
(Platform::Windows, Platform::Windows),
&WindowsBackupManager {} as &(dyn BackupHandler + Sync + Send),
),
(
(Platform::Linux, Platform::Linux),
&LinuxBackupManager {} as &(dyn BackupHandler + Sync + Send),
),
(
(Platform::MacOs, Platform::MacOs),
&MacBackupManager {} as &(dyn BackupHandler + Sync + Send),
),
]),
}
}
}
pub trait BackupHandler: Send + Sync {
fn root_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(DATA_ROOT_DIR.join("games"))
}
fn game_translate(&self, _path: &PathBuf, game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(PathBuf::from_str(&game.game_id).unwrap())
}
fn base_translate(&self, path: &PathBuf, game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(self
.root_translate(path, game)?
.join(self.game_translate(path, game)?))
}
fn home_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
let c = CommonPath::Home.get().ok_or(BackupError::NotFound);
println!("{:?}", c);
c
}
fn store_user_id_translate(
&self,
_path: &PathBuf,
game: &GameVersion,
) -> Result<PathBuf, BackupError> {
PathBuf::from_str(&game.game_id).map_err(|_| BackupError::ParseError)
}
fn os_user_name_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
Ok(PathBuf::from_str(&whoami::username()).unwrap())
}
fn win_app_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winAppData>");
Err(BackupError::InvalidSystem)
}
fn win_local_app_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winLocalAppData>");
Err(BackupError::InvalidSystem)
}
fn win_local_app_data_low_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winLocalAppDataLow>");
Err(BackupError::InvalidSystem)
}
fn win_documents_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winDocuments>");
Err(BackupError::InvalidSystem)
}
fn win_public_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winPublic>");
Err(BackupError::InvalidSystem)
}
fn win_program_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winProgramData>");
Err(BackupError::InvalidSystem)
}
fn win_dir_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winDir>");
Err(BackupError::InvalidSystem)
}
fn xdg_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected XDG Reference in Backup <xdgData>");
Err(BackupError::InvalidSystem)
}
fn xdg_config_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected XDG Reference in Backup <xdgConfig>");
Err(BackupError::InvalidSystem)
}
fn skip_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(PathBuf::new())
}
}
pub struct LinuxBackupManager {}
impl BackupHandler for LinuxBackupManager {
fn xdg_config_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::Data.get().ok_or(BackupError::NotFound)
}
fn xdg_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::Config.get().ok_or(BackupError::NotFound)
}
}
pub struct WindowsBackupManager {}
impl BackupHandler for WindowsBackupManager {
fn win_app_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::Config.get().ok_or(BackupError::NotFound)
}
fn win_local_app_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::DataLocal.get().ok_or(BackupError::NotFound)
}
fn win_local_app_data_low_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::DataLocalLow
.get()
.ok_or(BackupError::NotFound)
}
fn win_dir_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
Ok(PathBuf::from_str("C:/Windows").unwrap())
}
fn win_documents_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::Document.get().ok_or(BackupError::NotFound)
}
fn win_program_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
Ok(PathBuf::from_str("C:/ProgramData").unwrap())
}
fn win_public_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::Public.get().ok_or(BackupError::NotFound)
}
}
pub struct MacBackupManager {}
impl BackupHandler for MacBackupManager {}

View File

@ -1,6 +1,7 @@
use crate::process::process_manager::Platform;
use database::platform::Platform;
#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
pub enum Condition {
Os(Platform)
Os(Platform),
Other
}

View File

@ -0,0 +1,27 @@
use std::fmt::Display;
use serde_with::SerializeDisplay;
#[derive(Debug, SerializeDisplay, Clone, Copy)]
pub enum BackupError {
InvalidSystem,
NotFound,
ParseError,
}
impl Display for BackupError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
BackupError::InvalidSystem => "Attempted to generate path for invalid system",
BackupError::NotFound => "Could not generate or find path",
BackupError::ParseError => "Failed to parse path",
};
write!(f, "{}", s)
}
}

View File

@ -0,0 +1,8 @@
pub mod backup_manager;
pub mod conditions;
pub mod error;
pub mod metadata;
pub mod normalise;
pub mod path;
pub mod placeholder;
pub mod resolver;

View File

@ -1,7 +1,6 @@
use crate::database::db::GameVersion;
use super::conditions::{Condition};
use database::GameVersion;
use super::conditions::Condition;
#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
pub struct CloudSaveMetadata {
@ -16,15 +15,17 @@ pub struct GameFile {
pub id: Option<String>,
pub data_type: DataType,
pub tags: Vec<Tag>,
pub conditions: Vec<Condition>
pub conditions: Vec<Condition>,
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize)]
pub enum DataType {
Registry,
File,
Other
Other,
}
#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize)]
#[derive(
Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize,
)]
#[serde(rename_all = "camelCase")]
pub enum Tag {
Config,
@ -32,4 +33,4 @@ pub enum Tag {
#[default]
#[serde(other)]
Other,
}
}

View File

@ -1,11 +1,10 @@
use std::sync::LazyLock;
use database::platform::Platform;
use regex::Regex;
use crate::process::process_manager::Platform;
use super::placeholder::*;
pub fn normalize(path: &str, os: Platform) -> String {
let mut path = path.trim().trim_end_matches(['/', '\\']).replace('\\', "/");
@ -14,18 +13,25 @@ pub fn normalize(path: &str, os: Platform) -> String {
}
static CONSECUTIVE_SLASHES: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"/{2,}").unwrap());
static UNNECESSARY_DOUBLE_STAR_1: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"([^/*])\*{2,}").unwrap());
static UNNECESSARY_DOUBLE_STAR_2: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\*{2,}([^/*])").unwrap());
static UNNECESSARY_DOUBLE_STAR_1: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"([^/*])\*{2,}").unwrap());
static UNNECESSARY_DOUBLE_STAR_2: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\*{2,}([^/*])").unwrap());
static ENDING_WILDCARD: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(/\*)+$").unwrap());
static ENDING_DOT: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(/\.)$").unwrap());
static INTERMEDIATE_DOT: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(/\./)").unwrap());
static BLANK_SEGMENT: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(/\s+/)").unwrap());
static APP_DATA: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?i)%appdata%").unwrap());
static APP_DATA_ROAMING: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?i)%userprofile%/AppData/Roaming").unwrap());
static APP_DATA_LOCAL: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?i)%localappdata%").unwrap());
static APP_DATA_LOCAL_2: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?i)%userprofile%/AppData/Local/").unwrap());
static USER_PROFILE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?i)%userprofile%").unwrap());
static DOCUMENTS: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?i)%userprofile%/Documents").unwrap());
static APP_DATA_ROAMING: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?i)%userprofile%/AppData/Roaming").unwrap());
static APP_DATA_LOCAL: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?i)%localappdata%").unwrap());
static APP_DATA_LOCAL_2: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?i)%userprofile%/AppData/Local/").unwrap());
static USER_PROFILE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?i)%userprofile%").unwrap());
static DOCUMENTS: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?i)%userprofile%/Documents").unwrap());
for (pattern, replacement) in [
(&CONSECUTIVE_SLASHES, "/"),
@ -66,7 +72,9 @@ pub fn normalize(path: &str, os: Platform) -> String {
fn too_broad(path: &str) -> bool {
println!("Path: {}", path);
use {BASE, HOME, ROOT, STORE_USER_ID, WIN_APP_DATA, WIN_DIR, WIN_DOCUMENTS, XDG_CONFIG, XDG_DATA};
use {
BASE, HOME, ROOT, STORE_USER_ID, WIN_APP_DATA, WIN_DIR, WIN_DOCUMENTS, XDG_CONFIG, XDG_DATA,
};
let path_lower = path.to_lowercase();
@ -77,7 +85,9 @@ fn too_broad(path: &str) -> bool {
}
for item in AVOID_WILDCARDS {
if path.starts_with(&format!("{}/*", item)) || path.starts_with(&format!("{}/{}", item, STORE_USER_ID)) {
if path.starts_with(&format!("{}/*", item))
|| path.starts_with(&format!("{}/{}", item, STORE_USER_ID))
{
return true;
}
}
@ -124,7 +134,6 @@ fn too_broad(path: &str) -> bool {
return true;
}
}
// Drive letters:
let drives: Regex = Regex::new(r"^[a-zA-Z]:$").unwrap();
@ -159,4 +168,4 @@ pub fn usable(path: &str) -> bool {
&& !path.starts_with("../")
&& !too_broad(path)
&& !unprintable.is_match(path)
}
}

View File

@ -13,12 +13,12 @@ pub enum CommonPath {
impl CommonPath {
pub fn get(&self) -> Option<PathBuf> {
static CONFIG: LazyLock<Option<PathBuf>> = LazyLock::new(|| dirs::config_dir());
static DATA: LazyLock<Option<PathBuf>> = LazyLock::new(|| dirs::data_dir());
static DATA_LOCAL: LazyLock<Option<PathBuf>> = LazyLock::new(|| dirs::data_local_dir());
static DOCUMENT: LazyLock<Option<PathBuf>> = LazyLock::new(|| dirs::document_dir());
static HOME: LazyLock<Option<PathBuf>> = LazyLock::new(|| dirs::home_dir());
static PUBLIC: LazyLock<Option<PathBuf>> = LazyLock::new(|| dirs::public_dir());
static CONFIG: LazyLock<Option<PathBuf>> = LazyLock::new(dirs::config_dir);
static DATA: LazyLock<Option<PathBuf>> = LazyLock::new(dirs::data_dir);
static DATA_LOCAL: LazyLock<Option<PathBuf>> = LazyLock::new(dirs::data_local_dir);
static DOCUMENT: LazyLock<Option<PathBuf>> = LazyLock::new(dirs::document_dir);
static HOME: LazyLock<Option<PathBuf>> = LazyLock::new(dirs::home_dir);
static PUBLIC: LazyLock<Option<PathBuf>> = LazyLock::new(dirs::public_dir);
#[cfg(windows)]
static DATA_LOCAL_LOW: LazyLock<Option<PathBuf>> = LazyLock::new(|| {

View File

@ -48,4 +48,4 @@ pub const XDG_DATA: &str = "<xdgData>"; // %WINDIR% on Windows
pub const XDG_CONFIG: &str = "<xdgConfig>"; // $XDG_DATA_HOME on Linux
pub const SKIP: &str = "<skip>"; // $XDG_CONFIG_HOME on Linux
pub static OS_USERNAME: LazyLock<String> = LazyLock::new(|| whoami::username());
pub static OS_USERNAME: LazyLock<String> = LazyLock::new(whoami::username);

View File

@ -1,22 +1,17 @@
use std::{
fs::{self, create_dir_all, File},
io::{self, ErrorKind, Read, Write},
fs::{self, File, create_dir_all},
io::{self, Read, Write},
path::{Path, PathBuf},
thread::sleep,
time::Duration,
};
use super::{
backup_manager::BackupHandler, conditions::Condition, metadata::GameFile, placeholder::*,
};
use crate::error::BackupError;
use super::{backup_manager::BackupHandler, placeholder::*};
use database::GameVersion;
use log::{debug, warn};
use rustix::path::Arg;
use tempfile::tempfile;
use crate::{
database::db::GameVersion, error::backup_error::BackupError, process::process_manager::Platform,
};
use super::{backup_manager::BackupManager, metadata::CloudSaveMetadata, normalise::normalize};
pub fn resolve(meta: &mut CloudSaveMetadata) -> File {
@ -31,7 +26,7 @@ pub fn resolve(meta: &mut CloudSaveMetadata) -> File {
.iter()
.find_map(|p| match p {
super::conditions::Condition::Os(os) => Some(os),
_ => None,
_ => None
})
.cloned()
{
@ -64,7 +59,7 @@ pub fn resolve(meta: &mut CloudSaveMetadata) -> File {
let binding = serde_json::to_string(meta).unwrap();
let serialized = binding.as_bytes();
let mut file = tempfile().unwrap();
file.write(serialized).unwrap();
file.write_all(serialized).unwrap();
tarball.append_file("metadata", &mut file).unwrap();
tarball.into_inner().unwrap().finish().unwrap()
}
@ -97,7 +92,7 @@ pub fn extract(file: PathBuf) -> Result<(), BackupError> {
.iter()
.find_map(|p| match p {
super::conditions::Condition::Os(os) => Some(os),
_ => None,
_ => None
})
.cloned()
{
@ -116,7 +111,7 @@ pub fn extract(file: PathBuf) -> Result<(), BackupError> {
};
let new_path = parse_path(file.path.into(), handler, &manifest.game_version)?;
create_dir_all(&new_path.parent().unwrap()).unwrap();
create_dir_all(new_path.parent().unwrap()).unwrap();
println!(
"Current path {:?} copying to {:?}",
@ -133,23 +128,22 @@ pub fn copy_item<P: AsRef<Path>>(src: P, dest: P) -> io::Result<()> {
let src_path = src.as_ref();
let dest_path = dest.as_ref();
let metadata = fs::metadata(&src_path)?;
let metadata = fs::metadata(src_path)?;
if metadata.is_file() {
// Ensure the parent directory of the destination exists for a file copy
if let Some(parent) = dest_path.parent() {
fs::create_dir_all(parent)?;
}
fs::copy(&src_path, &dest_path)?;
fs::copy(src_path, dest_path)?;
} else if metadata.is_dir() {
// For directories, we call the recursive helper function.
// The destination for the recursive copy is the `dest_path` itself.
copy_dir_recursive(&src_path, &dest_path)?;
copy_dir_recursive(src_path, dest_path)?;
} else {
// Handle other file types like symlinks if necessary,
// for now, return an error or skip.
return Err(io::Error::new(
io::ErrorKind::Other,
return Err(io::Error::other(
format!("Source {:?} is neither a file nor a directory", src_path),
));
}
@ -158,7 +152,7 @@ pub fn copy_item<P: AsRef<Path>>(src: P, dest: P) -> io::Result<()> {
}
fn copy_dir_recursive(src: &Path, dest: &Path) -> io::Result<()> {
fs::create_dir_all(&dest)?;
fs::create_dir_all(dest)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
@ -220,43 +214,3 @@ pub fn parse_path(
println!("Final line: {:?}", &s);
Ok(s)
}
pub fn test() {
let mut meta = CloudSaveMetadata {
files: vec![
GameFile {
path: String::from("<home>/favicon.png"),
id: None,
data_type: super::metadata::DataType::File,
tags: Vec::new(),
conditions: vec![Condition::Os(Platform::Linux)],
},
GameFile {
path: String::from("<home>/Documents/Pixel Art"),
id: None,
data_type: super::metadata::DataType::File,
tags: Vec::new(),
conditions: vec![Condition::Os(Platform::Linux)],
},
],
game_version: GameVersion {
game_id: String::new(),
version_name: String::new(),
platform: Platform::Linux,
launch_command: String::new(),
launch_args: Vec::new(),
launch_command_template: String::new(),
setup_command: String::new(),
setup_args: Vec::new(),
setup_command_template: String::new(),
only_setup: true,
version_index: 0,
delta: false,
umu_id_override: None,
},
save_id: String::from("aaaaaaa"),
};
//resolve(&mut meta);
extract("save".into()).unwrap();
}

View File

View File

@ -0,0 +1,15 @@
[package]
name = "database"
version = "0.1.0"
edition = "2024"
[dependencies]
chrono = "0.4.42"
dirs = "6.0.0"
log = "0.4.28"
native_model = { version = "0.6.4", features = ["rmp_serde_1_3"], git = "https://github.com/Drop-OSS/native_model.git"}
rustbreak = "2.0.0"
serde = "1.0.228"
serde_with = "3.15.0"
url = "2.5.7"
whoami = "1.6.1"

View File

@ -0,0 +1,45 @@
use std::{
path::PathBuf,
sync::{Arc, LazyLock},
};
use rustbreak::{DeSerError, DeSerializer};
use serde::{Serialize, de::DeserializeOwned};
use crate::interface::{DatabaseImpls, DatabaseInterface};
pub static DB: LazyLock<DatabaseInterface> = LazyLock::new(DatabaseInterface::set_up_database);
#[cfg(not(debug_assertions))]
static DATA_ROOT_PREFIX: &str = "drop";
#[cfg(debug_assertions)]
static DATA_ROOT_PREFIX: &str = "drop-debug";
pub static DATA_ROOT_DIR: LazyLock<Arc<PathBuf>> = LazyLock::new(|| {
Arc::new(
dirs::data_dir()
.expect("Failed to get data dir")
.join(DATA_ROOT_PREFIX),
)
});
// Custom JSON serializer to support everything we need
#[derive(Debug, Default, Clone)]
pub struct DropDatabaseSerializer;
impl<T: native_model::Model + Serialize + DeserializeOwned> DeSerializer<T>
for DropDatabaseSerializer
{
fn serialize(&self, val: &T) -> rustbreak::error::DeSerResult<Vec<u8>> {
native_model::encode(val).map_err(|e| DeSerError::Internal(e.to_string()))
}
fn deserialize<R: std::io::Read>(&self, mut s: R) -> rustbreak::error::DeSerResult<T> {
let mut buf = Vec::new();
s.read_to_end(&mut buf)
.map_err(|e| rustbreak::error::DeSerError::Other(e.into()))?;
let (val, _version) =
native_model::decode(buf).map_err(|e| DeSerError::Internal(e.to_string()))?;
Ok(val)
}
}

View File

@ -3,48 +3,18 @@ use std::{
mem::ManuallyDrop,
ops::{Deref, DerefMut},
path::PathBuf,
sync::{Arc, LazyLock, RwLockReadGuard, RwLockWriteGuard},
sync::{RwLockReadGuard, RwLockWriteGuard},
};
use chrono::Utc;
use log::{debug, error, info, warn};
use rustbreak::{DeSerError, DeSerializer, PathDatabase, RustbreakError};
use serde::{Serialize, de::DeserializeOwned};
use rustbreak::{PathDatabase, RustbreakError};
use url::Url;
use crate::DB;
use super::models::data::Database;
#[cfg(not(debug_assertions))]
static DATA_ROOT_PREFIX: &'static str = "drop";
#[cfg(debug_assertions)]
static DATA_ROOT_PREFIX: &str = "drop-debug";
pub static DATA_ROOT_DIR: LazyLock<Arc<PathBuf>> =
LazyLock::new(|| Arc::new(dirs::data_dir().unwrap().join(DATA_ROOT_PREFIX)));
// Custom JSON serializer to support everything we need
#[derive(Debug, Default, Clone)]
pub struct DropDatabaseSerializer;
impl<T: native_model::Model + Serialize + DeserializeOwned> DeSerializer<T>
for DropDatabaseSerializer
{
fn serialize(&self, val: &T) -> rustbreak::error::DeSerResult<Vec<u8>> {
native_model::encode(val)
.map_err(|e| DeSerError::Internal(e.to_string()))
}
fn deserialize<R: std::io::Read>(&self, mut s: R) -> rustbreak::error::DeSerResult<T> {
let mut buf = Vec::new();
s.read_to_end(&mut buf)
.map_err(|e| rustbreak::error::DeSerError::Other(e.into()))?;
let (val, _version) = native_model::decode(buf)
.map_err(|e| DeSerError::Internal(e.to_string()))?;
Ok(val)
}
}
use crate::{
db::{DATA_ROOT_DIR, DB, DropDatabaseSerializer},
models::data::Database,
};
pub type DatabaseInterface =
rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer>;
@ -63,13 +33,49 @@ impl DatabaseImpls for DatabaseInterface {
let pfx_dir = DATA_ROOT_DIR.join("pfx");
debug!("creating data directory at {DATA_ROOT_DIR:?}");
create_dir_all(DATA_ROOT_DIR.as_path()).unwrap();
create_dir_all(&games_base_dir).unwrap();
create_dir_all(&logs_root_dir).unwrap();
create_dir_all(&cache_dir).unwrap();
create_dir_all(&pfx_dir).unwrap();
create_dir_all(DATA_ROOT_DIR.as_path()).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
DATA_ROOT_DIR.display(),
e
)
});
create_dir_all(&games_base_dir).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
games_base_dir.display(),
e
)
});
create_dir_all(&logs_root_dir).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
logs_root_dir.display(),
e
)
});
create_dir_all(&cache_dir).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
cache_dir.display(),
e
)
});
create_dir_all(&pfx_dir).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
pfx_dir.display(),
e
)
});
let exists = fs::exists(db_path.clone()).unwrap();
let exists = fs::exists(db_path.clone()).unwrap_or_else(|e| {
panic!(
"Failed to find if {} exists with error {}",
db_path.display(),
e
)
});
if exists {
match PathDatabase::load_from_path(db_path.clone()) {
@ -78,21 +84,19 @@ impl DatabaseImpls for DatabaseInterface {
}
} else {
let default = Database::new(games_base_dir, None, cache_dir);
debug!(
"Creating database at path {}",
db_path.as_os_str().to_str().unwrap()
);
debug!("Creating database at path {}", db_path.display());
PathDatabase::create_at_path(db_path, default).expect("Database could not be created")
}
}
fn database_is_set_up(&self) -> bool {
!self.borrow_data().unwrap().base_url.is_empty()
!borrow_db_checked().base_url.is_empty()
}
fn fetch_base_url(&self) -> Url {
let handle = self.borrow_data().unwrap();
Url::parse(&handle.base_url).unwrap()
let handle = borrow_db_checked();
Url::parse(&handle.base_url)
.unwrap_or_else(|_| panic!("Failed to parse base url {}", handle.base_url))
}
}
@ -111,13 +115,16 @@ fn handle_invalid_database(
base
};
info!("old database stored at: {}", new_path.to_string_lossy());
fs::rename(&db_path, &new_path).unwrap();
fs::rename(&db_path, &new_path).unwrap_or_else(|e| {
panic!(
"Could not rename database {} to {} with error {}",
db_path.display(),
new_path.display(),
e
)
});
let db = Database::new(
games_base_dir.into_os_string().into_string().unwrap(),
Some(new_path),
cache_dir,
);
let db = Database::new(games_base_dir, Some(new_path), cache_dir);
PathDatabase::create_at_path(db_path, db).expect("Database could not be created")
}

View File

@ -0,0 +1,14 @@
#![feature(nonpoison_rwlock)]
pub mod db;
pub mod debug;
pub mod interface;
pub mod models;
pub mod platform;
pub use db::DB;
pub use interface::{borrow_db_checked, borrow_db_mut_checked};
pub use models::data::{
ApplicationTransientStatus, Database, DatabaseApplications, DatabaseAuth, DownloadType,
DownloadableMetadata, GameDownloadStatus, GameVersion, Settings,
};

View File

@ -4,7 +4,7 @@ pub mod data {
use native_model::native_model;
use serde::{Deserialize, Serialize};
// NOTE: Within each version, you should NEVER use these types.
// NOTE: Within each version, you should NEVER use these types.
// Declare it using the actual version that it is from, i.e. v1::Settings rather than just Settings from here
pub type GameVersion = v1::GameVersion;
@ -37,17 +37,18 @@ pub mod data {
}
mod v1 {
use crate::process::process_manager::Platform;
use serde_with::serde_as;
use std::{collections::HashMap, path::PathBuf};
use crate::platform::Platform;
use super::{Deserialize, Serialize, native_model};
fn default_template() -> String {
"{}".to_owned()
}
#[derive(Serialize, Deserialize, Clone, Debug)]
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
#[native_model(id = 2, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)]
pub struct GameVersion {
@ -190,9 +191,7 @@ pub mod data {
use serde_with::serde_as;
use super::{
Deserialize, Serialize, native_model, v1,
};
use super::{Deserialize, Serialize, native_model, v1};
#[native_model(id = 1, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from = v1::Database)]
#[derive(Serialize, Deserialize, Clone, Default)]
@ -276,12 +275,13 @@ pub mod data {
pub install_dirs: Vec<PathBuf>,
// Guaranteed to exist if the game also exists in the app state map
pub game_statuses: HashMap<String, GameDownloadStatus>,
pub game_versions: HashMap<String, HashMap<String, v1::GameVersion>>,
pub installed_game_version: HashMap<String, v1::DownloadableMetadata>,
#[serde(skip)]
pub transient_statuses: HashMap<v1::DownloadableMetadata, v1::ApplicationTransientStatus>,
pub transient_statuses:
HashMap<v1::DownloadableMetadata, v1::ApplicationTransientStatus>,
}
impl From<v1::DatabaseApplications> for DatabaseApplications {
fn from(value: v1::DatabaseApplications) -> Self {
@ -302,10 +302,7 @@ pub mod data {
mod v3 {
use std::path::PathBuf;
use super::{
Deserialize, Serialize,
native_model, v2, v1,
};
use super::{Deserialize, Serialize, native_model, v1, v2};
#[native_model(id = 1, version = 3, with = native_model::rmp_serde_1_3::RmpSerde, from = v2::Database)]
#[derive(Serialize, Deserialize, Clone, Default)]
pub struct Database {
@ -357,6 +354,20 @@ pub mod data {
compat_info: None,
}
}
}
impl DatabaseAuth {
pub fn new(
private: String,
cert: String,
client_id: String,
web_token: Option<String>,
) -> Self {
Self {
private,
cert,
client_id,
web_token,
}
}
}
}

View File

@ -0,0 +1,46 @@
use serde::{Deserialize, Serialize};
#[derive(Eq, Hash, PartialEq, Serialize, Deserialize, Clone, Copy, Debug)]
pub enum Platform {
Windows,
Linux,
MacOs,
}
impl Platform {
#[cfg(target_os = "windows")]
pub const HOST: Platform = Self::Windows;
#[cfg(target_os = "macos")]
pub const HOST: Platform = Self::MacOs;
#[cfg(target_os = "linux")]
pub const HOST: Platform = Self::Linux;
pub fn is_case_sensitive(&self) -> bool {
match self {
Self::Windows | Self::MacOs => false,
Self::Linux => true,
}
}
}
impl From<&str> for Platform {
fn from(value: &str) -> Self {
match value.to_lowercase().trim() {
"windows" => Self::Windows,
"linux" => Self::Linux,
"mac" | "macos" => Self::MacOs,
_ => unimplemented!(),
}
}
}
impl From<whoami::Platform> for Platform {
fn from(value: whoami::Platform) -> Self {
match value {
whoami::Platform::Windows => Platform::Windows,
whoami::Platform::Linux => Platform::Linux,
whoami::Platform::MacOS => Platform::MacOs,
platform => unimplemented!("Playform {} is not supported", platform),
}
}
}

View File

@ -0,0 +1,17 @@
[package]
name = "download_manager"
version = "0.1.0"
edition = "2024"
[dependencies]
atomic-instant-full = "0.1.0"
database = { version = "0.1.0", path = "../database" }
humansize = "2.1.3"
log = "0.4.28"
parking_lot = "0.12.5"
remote = { version = "0.1.0", path = "../remote" }
serde = "1.0.228"
serde_with = "3.15.0"
tauri = "2.8.5"
throttle_my_fn = "0.2.6"
utils = { version = "0.1.0", path = "../utils" }

View File

@ -7,14 +7,15 @@ use std::{
thread::{JoinHandle, spawn},
};
use database::DownloadableMetadata;
use log::{debug, error, info, warn};
use tauri::{AppHandle, Emitter};
use tauri::AppHandle;
use utils::{app_emit, lock, send};
use crate::{
database::models::data::DownloadableMetadata,
download_manager::download_manager_frontend::DownloadStatus,
error::application_download_error::ApplicationDownloadError,
games::library::{QueueUpdateEvent, QueueUpdateEventQueueData, StatsUpdateEvent},
download_manager_frontend::DownloadStatus,
error::ApplicationDownloadError,
frontend_updates::{QueueUpdateEvent, QueueUpdateEventQueueData, StatsUpdateEvent},
};
use super::{
@ -105,7 +106,7 @@ impl DownloadManagerBuilder {
}
fn set_status(&self, status: DownloadManagerStatus) {
*self.status.lock().unwrap() = status;
*lock!(self.status) = status;
}
fn remove_and_cleanup_front_download(&mut self, meta: &DownloadableMetadata) -> DownloadAgent {
@ -119,9 +120,9 @@ impl DownloadManagerBuilder {
// Make sure the download thread is terminated
fn cleanup_current_download(&mut self) {
self.active_control_flag = None;
*self.progress.lock().unwrap() = None;
*lock!(self.progress) = None;
let mut download_thread_lock = self.current_download_thread.lock().unwrap();
let mut download_thread_lock = lock!(self.current_download_thread);
if let Some(unfinished_thread) = download_thread_lock.take()
&& !unfinished_thread.is_finished()
@ -137,7 +138,7 @@ impl DownloadManagerBuilder {
current_flag.set(DownloadThreadControlFlag::Stop);
}
let mut download_thread_lock = self.current_download_thread.lock().unwrap();
let mut download_thread_lock = lock!(self.current_download_thread);
if let Some(current_download_thread) = download_thread_lock.take() {
return current_download_thread.join().is_ok();
};
@ -199,9 +200,7 @@ impl DownloadManagerBuilder {
self.download_queue.append(meta.clone());
self.download_agent_registry.insert(meta, download_agent);
self.sender
.send(DownloadManagerSignal::UpdateUIQueue)
.unwrap();
send!(self.sender, DownloadManagerSignal::UpdateUIQueue);
}
fn manage_go_signal(&mut self) {
@ -247,7 +246,7 @@ impl DownloadManagerBuilder {
let sender = self.sender.clone();
let mut download_thread_lock = self.current_download_thread.lock().unwrap();
let mut download_thread_lock = lock!(self.current_download_thread);
let app_handle = self.app_handle.clone();
*download_thread_lock = Some(spawn(move || {
@ -258,7 +257,7 @@ impl DownloadManagerBuilder {
Err(e) => {
error!("download {:?} has error {}", download_agent.metadata(), &e);
download_agent.on_error(&app_handle, &e);
sender.send(DownloadManagerSignal::Error(e)).unwrap();
send!(sender, DownloadManagerSignal::Error(e));
return;
}
};
@ -282,7 +281,7 @@ impl DownloadManagerBuilder {
&e
);
download_agent.on_error(&app_handle, &e);
sender.send(DownloadManagerSignal::Error(e)).unwrap();
send!(sender, DownloadManagerSignal::Error(e));
return;
}
};
@ -293,10 +292,11 @@ impl DownloadManagerBuilder {
if validate_result {
download_agent.on_complete(&app_handle);
sender
.send(DownloadManagerSignal::Completed(download_agent.metadata()))
.unwrap();
sender.send(DownloadManagerSignal::UpdateUIQueue).unwrap();
send!(
sender,
DownloadManagerSignal::Completed(download_agent.metadata())
);
send!(sender, DownloadManagerSignal::UpdateUIQueue);
return;
}
}
@ -323,7 +323,7 @@ impl DownloadManagerBuilder {
}
self.push_ui_queue_update();
self.sender.send(DownloadManagerSignal::Go).unwrap();
send!(self.sender, DownloadManagerSignal::Go);
}
fn manage_error_signal(&mut self, error: ApplicationDownloadError) {
debug!("got signal Error");
@ -361,7 +361,7 @@ impl DownloadManagerBuilder {
let index = self.download_queue.get_by_meta(meta);
if let Some(index) = index {
download_agent.on_cancelled(&self.app_handle);
let _ = self.download_queue.edit().remove(index).unwrap();
let _ = self.download_queue.edit().remove(index);
let removed = self.download_agent_registry.remove(meta);
debug!(
"removed {:?} from queue {:?}",
@ -376,7 +376,7 @@ impl DownloadManagerBuilder {
fn push_ui_stats_update(&self, kbs: usize, time: usize) {
let event_data = StatsUpdateEvent { speed: kbs, time };
self.app_handle.emit("update_stats", event_data).unwrap();
app_emit!(&self.app_handle, "update_stats", event_data);
}
fn push_ui_queue_update(&self) {
let queue = &self.download_queue.read();
@ -395,6 +395,6 @@ impl DownloadManagerBuilder {
.collect();
let event_data = QueueUpdateEvent { queue: queue_objs };
self.app_handle.emit("update_queue", event_data).unwrap();
app_emit!(&self.app_handle, "update_queue", event_data);
}
}

View File

@ -3,19 +3,18 @@ use std::{
collections::VecDeque,
fmt::Debug,
sync::{
mpsc::{SendError, Sender},
Mutex, MutexGuard,
mpsc::{SendError, Sender},
},
thread::JoinHandle,
};
use database::DownloadableMetadata;
use log::{debug, info};
use serde::Serialize;
use utils::{lock, send};
use crate::{
database::models::data::DownloadableMetadata,
error::application_download_error::ApplicationDownloadError,
};
use crate::error::ApplicationDownloadError;
use super::{
download_manager_builder::{CurrentProgressObject, DownloadAgent},
@ -80,6 +79,7 @@ pub enum DownloadStatus {
/// The actual download queue may be accessed through the .`edit()` function,
/// which provides raw access to the underlying queue.
/// THIS EDITING IS BLOCKING!!!
#[derive(Debug)]
pub struct DownloadManager {
terminator: Mutex<Option<JoinHandle<Result<(), ()>>>>,
download_queue: Queue,
@ -119,22 +119,21 @@ impl DownloadManager {
self.download_queue.read()
}
pub fn get_current_download_progress(&self) -> Option<f64> {
let progress_object = (*self.progress.lock().unwrap()).clone()?;
let progress_object = (*lock!(self.progress)).clone()?;
Some(progress_object.get_progress())
}
pub fn rearrange_string(&self, meta: &DownloadableMetadata, new_index: usize) {
let mut queue = self.edit();
let current_index = get_index_from_id(&mut queue, meta).unwrap();
let to_move = queue.remove(current_index).unwrap();
let current_index =
get_index_from_id(&mut queue, meta).expect("Failed to get meta index from id");
let to_move = queue
.remove(current_index)
.expect("Failed to remove meta at index from queue");
queue.insert(new_index, to_move);
self.command_sender
.send(DownloadManagerSignal::UpdateUIQueue)
.unwrap();
send!(self.command_sender, DownloadManagerSignal::UpdateUIQueue);
}
pub fn cancel(&self, meta: DownloadableMetadata) {
self.command_sender
.send(DownloadManagerSignal::Cancel(meta))
.unwrap();
send!(self.command_sender, DownloadManagerSignal::Cancel(meta));
}
pub fn rearrange(&self, current_index: usize, new_index: usize) {
if current_index == new_index {
@ -143,39 +142,31 @@ impl DownloadManager {
let needs_pause = current_index == 0 || new_index == 0;
if needs_pause {
self.command_sender
.send(DownloadManagerSignal::Stop)
.unwrap();
send!(self.command_sender, DownloadManagerSignal::Stop);
}
debug!("moving download at index {current_index} to index {new_index}");
let mut queue = self.edit();
let to_move = queue.remove(current_index).unwrap();
let to_move = queue.remove(current_index).expect("Failed to get");
queue.insert(new_index, to_move);
drop(queue);
if needs_pause {
self.command_sender.send(DownloadManagerSignal::Go).unwrap();
send!(self.command_sender, DownloadManagerSignal::Go);
}
self.command_sender
.send(DownloadManagerSignal::UpdateUIQueue)
.unwrap();
self.command_sender.send(DownloadManagerSignal::Go).unwrap();
send!(self.command_sender, DownloadManagerSignal::UpdateUIQueue);
send!(self.command_sender, DownloadManagerSignal::Go);
}
pub fn pause_downloads(&self) {
self.command_sender
.send(DownloadManagerSignal::Stop)
.unwrap();
send!(self.command_sender, DownloadManagerSignal::Stop);
}
pub fn resume_downloads(&self) {
self.command_sender.send(DownloadManagerSignal::Go).unwrap();
send!(self.command_sender, DownloadManagerSignal::Go);
}
pub fn ensure_terminated(&self) -> Result<Result<(), ()>, Box<dyn Any + Send>> {
self.command_sender
.send(DownloadManagerSignal::Finish)
.unwrap();
let terminator = self.terminator.lock().unwrap().take();
send!(self.command_sender, DownloadManagerSignal::Finish);
let terminator = lock!(self.terminator).take();
terminator.unwrap().join()
}
pub fn get_sender(&self) -> Sender<DownloadManagerSignal> {

View File

@ -1,11 +1,9 @@
use std::sync::Arc;
use database::DownloadableMetadata;
use tauri::AppHandle;
use crate::{
database::models::data::DownloadableMetadata,
error::application_download_error::ApplicationDownloadError,
};
use crate::error::ApplicationDownloadError;
use super::{
download_manager_frontend::DownloadStatus,

View File

@ -0,0 +1,80 @@
use humansize::{BINARY, format_size};
use std::{
fmt::{Display, Formatter},
io,
sync::{Arc, mpsc::SendError},
};
use remote::error::RemoteAccessError;
use serde_with::SerializeDisplay;
#[derive(SerializeDisplay)]
pub enum DownloadManagerError<T> {
IOError(io::Error),
SignalError(SendError<T>),
}
impl<T> Display for DownloadManagerError<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DownloadManagerError::IOError(error) => write!(f, "{error}"),
DownloadManagerError::SignalError(send_error) => write!(f, "{send_error}"),
}
}
}
impl<T> From<SendError<T>> for DownloadManagerError<T> {
fn from(value: SendError<T>) -> Self {
DownloadManagerError::SignalError(value)
}
}
impl<T> From<io::Error> for DownloadManagerError<T> {
fn from(value: io::Error) -> Self {
DownloadManagerError::IOError(value)
}
}
// TODO: Rename / separate from downloads
#[derive(Debug, SerializeDisplay)]
pub enum ApplicationDownloadError {
NotInitialized,
Communication(RemoteAccessError),
DiskFull(u64, u64),
#[allow(dead_code)]
Checksum,
Lock,
IoError(Arc<io::Error>),
DownloadError(RemoteAccessError),
}
impl Display for ApplicationDownloadError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
ApplicationDownloadError::NotInitialized => {
write!(f, "Download not initalized, did something go wrong?")
}
ApplicationDownloadError::DiskFull(required, available) => write!(
f,
"Game requires {}, {} remaining left on disk.",
format_size(*required, BINARY),
format_size(*available, BINARY),
),
ApplicationDownloadError::Communication(error) => write!(f, "{error}"),
ApplicationDownloadError::Lock => write!(
f,
"failed to acquire lock. Something has gone very wrong internally. Please restart the application"
),
ApplicationDownloadError::Checksum => {
write!(f, "checksum failed to validate for download")
}
ApplicationDownloadError::IoError(error) => write!(f, "io error: {error}"),
ApplicationDownloadError::DownloadError(error) => {
write!(f, "Download failed with error {error:?}")
}
}
}
}
impl From<io::Error> for ApplicationDownloadError {
fn from(value: io::Error) -> Self {
ApplicationDownloadError::IoError(Arc::new(value))
}
}

View File

@ -0,0 +1,24 @@
use database::DownloadableMetadata;
use serde::Serialize;
use crate::download_manager_frontend::DownloadStatus;
#[derive(Serialize, Clone)]
pub struct QueueUpdateEventQueueData {
pub meta: DownloadableMetadata,
pub status: DownloadStatus,
pub progress: f64,
pub current: usize,
pub max: usize,
}
#[derive(Serialize, Clone)]
pub struct QueueUpdateEvent {
pub queue: Vec<QueueUpdateEventQueueData>,
}
#[derive(Serialize, Clone)]
pub struct StatsUpdateEvent {
pub speed: usize,
pub time: usize,
}

View File

@ -0,0 +1,44 @@
#![feature(duration_millis_float)]
#![feature(nonpoison_mutex)]
#![feature(sync_nonpoison)]
use std::{ops::Deref, sync::OnceLock};
use tauri::AppHandle;
use crate::{
download_manager_builder::DownloadManagerBuilder, download_manager_frontend::DownloadManager,
};
pub mod download_manager_builder;
pub mod download_manager_frontend;
pub mod downloadable;
pub mod error;
pub mod frontend_updates;
pub mod util;
pub static DOWNLOAD_MANAGER: DownloadManagerWrapper = DownloadManagerWrapper::new();
pub struct DownloadManagerWrapper(OnceLock<DownloadManager>);
impl DownloadManagerWrapper {
const fn new() -> Self {
DownloadManagerWrapper(OnceLock::new())
}
pub fn init(app_handle: AppHandle) {
DOWNLOAD_MANAGER
.0
.set(DownloadManagerBuilder::build(app_handle))
.expect("Failed to initialise download manager");
}
}
impl Deref for DownloadManagerWrapper {
type Target = DownloadManager;
fn deref(&self) -> &Self::Target {
match self.0.get() {
Some(download_manager) => download_manager,
None => unreachable!("Download manager should always be initialised"),
}
}
}

View File

@ -1,6 +1,6 @@
use std::sync::{
atomic::{AtomicBool, Ordering},
Arc,
atomic::{AtomicBool, Ordering},
};
#[derive(PartialEq, Eq, PartialOrd, Ord)]
@ -22,7 +22,11 @@ impl From<DownloadThreadControlFlag> for bool {
/// false => Stop
impl From<bool> for DownloadThreadControlFlag {
fn from(value: bool) -> Self {
if value { DownloadThreadControlFlag::Go } else { DownloadThreadControlFlag::Stop }
if value {
DownloadThreadControlFlag::Go
} else {
DownloadThreadControlFlag::Stop
}
}
}

View File

@ -9,12 +9,13 @@ use std::{
use atomic_instant_full::AtomicInstant;
use throttle_my_fn::throttle;
use utils::{lock, send};
use crate::download_manager::download_manager_frontend::DownloadManagerSignal;
use crate::download_manager_frontend::DownloadManagerSignal;
use super::rolling_progress_updates::RollingProgressWindow;
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct ProgressObject {
max: Arc<Mutex<usize>>,
progress_instances: Arc<Mutex<Vec<Arc<AtomicUsize>>>>,
@ -74,12 +75,10 @@ impl ProgressObject {
}
pub fn set_time_now(&self) {
*self.start.lock().unwrap() = Instant::now();
*lock!(self.start) = Instant::now();
}
pub fn sum(&self) -> usize {
self.progress_instances
.lock()
.unwrap()
lock!(self.progress_instances)
.iter()
.map(|instance| instance.load(Ordering::Acquire))
.sum()
@ -88,27 +87,25 @@ impl ProgressObject {
self.set_time_now();
self.bytes_last_update.store(0, Ordering::Release);
self.rolling.reset();
self.progress_instances
.lock()
.unwrap()
lock!(self.progress_instances)
.iter()
.for_each(|x| x.store(0, Ordering::SeqCst));
}
pub fn get_max(&self) -> usize {
*self.max.lock().unwrap()
*lock!(self.max)
}
pub fn set_max(&self, new_max: usize) {
*self.max.lock().unwrap() = new_max;
*lock!(self.max) = new_max;
}
pub fn set_size(&self, length: usize) {
*self.progress_instances.lock().unwrap() =
*lock!(self.progress_instances) =
(0..length).map(|_| Arc::new(AtomicUsize::new(0))).collect();
}
pub fn get_progress(&self) -> f64 {
self.sum() as f64 / self.get_max() as f64
}
pub fn get(&self, index: usize) -> Arc<AtomicUsize> {
self.progress_instances.lock().unwrap()[index].clone()
lock!(self.progress_instances)[index].clone()
}
fn update_window(&self, kilobytes_per_second: usize) {
self.rolling.update(kilobytes_per_second);
@ -120,7 +117,9 @@ pub fn calculate_update(progress: &ProgressObject) {
let last_update_time = progress
.last_update_time
.swap(Instant::now(), Ordering::SeqCst);
let time_since_last_update = Instant::now().duration_since(last_update_time).as_millis_f64();
let time_since_last_update = Instant::now()
.duration_since(last_update_time)
.as_millis_f64();
let current_bytes_downloaded = progress.sum();
let max = progress.get_max();
@ -128,7 +127,8 @@ pub fn calculate_update(progress: &ProgressObject) {
.bytes_last_update
.swap(current_bytes_downloaded, Ordering::Acquire);
let bytes_since_last_update = current_bytes_downloaded.saturating_sub(bytes_at_last_update) as f64;
let bytes_since_last_update =
current_bytes_downloaded.saturating_sub(bytes_at_last_update) as f64;
let kilobytes_per_second = bytes_since_last_update / time_since_last_update;
@ -148,18 +148,12 @@ pub fn push_update(progress: &ProgressObject, bytes_remaining: usize) {
}
fn update_ui(progress_object: &ProgressObject, kilobytes_per_second: usize, time_remaining: usize) {
progress_object
.sender
.send(DownloadManagerSignal::UpdateUIStats(
kilobytes_per_second,
time_remaining,
))
.unwrap();
send!(
progress_object.sender,
DownloadManagerSignal::UpdateUIStats(kilobytes_per_second, time_remaining)
);
}
fn update_queue(progress: &ProgressObject) {
progress
.sender
.send(DownloadManagerSignal::UpdateUIQueue)
.unwrap();
send!(progress.sender, DownloadManagerSignal::UpdateUIQueue)
}

View File

@ -3,9 +3,10 @@ use std::{
sync::{Arc, Mutex, MutexGuard},
};
use crate::database::models::data::DownloadableMetadata;
use database::DownloadableMetadata;
use utils::lock;
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct Queue {
inner: Arc<Mutex<VecDeque<DownloadableMetadata>>>,
}
@ -24,10 +25,10 @@ impl Queue {
}
}
pub fn read(&self) -> VecDeque<DownloadableMetadata> {
self.inner.lock().unwrap().clone()
lock!(self.inner).clone()
}
pub fn edit(&self) -> MutexGuard<'_, VecDeque<DownloadableMetadata>> {
self.inner.lock().unwrap()
lock!(self.inner)
}
pub fn pop_front(&self) -> Option<DownloadableMetadata> {
self.edit().pop_front()

View File

@ -3,11 +3,17 @@ use std::sync::{
atomic::{AtomicUsize, Ordering},
};
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct RollingProgressWindow<const S: usize> {
window: Arc<[AtomicUsize; S]>,
current: Arc<AtomicUsize>,
}
impl<const S: usize> Default for RollingProgressWindow<S> {
fn default() -> Self {
Self::new()
}
}
impl<const S: usize> RollingProgressWindow<S> {
pub fn new() -> Self {
Self {
@ -31,7 +37,7 @@ impl<const S: usize> RollingProgressWindow<S> {
.collect::<Vec<usize>>();
let amount = valid.len();
let sum = valid.into_iter().sum::<usize>();
sum / amount
}
pub fn reset(&self) {

View File

@ -0,0 +1,26 @@
[package]
name = "games"
version = "0.1.0"
edition = "2024"
[dependencies]
atomic-instant-full = "0.1.0"
bitcode = "0.6.7"
boxcar = "0.2.14"
database = { version = "0.1.0", path = "../database" }
download_manager = { version = "0.1.0", path = "../download_manager" }
hex = "0.4.3"
log = "0.4.28"
md5 = "0.8.0"
rayon = "1.11.0"
remote = { version = "0.1.0", path = "../remote" }
reqwest = "0.12.23"
rustix = "1.1.2"
serde = { version = "1.0.228", features = ["derive"] }
serde_with = "3.15.0"
sysinfo = "0.37.2"
tauri = "2.8.5"
throttle_my_fn = "0.2.6"
utils = { version = "0.1.0", path = "../utils" }
native_model = { version = "0.6.4", features = ["rmp_serde_1_3"], git = "https://github.com/Drop-OSS/native_model.git"}
serde_json = "1.0.145"

View File

@ -1,7 +1,7 @@
use bitcode::{Decode, Encode};
use serde::{Deserialize, Serialize};
use crate::games::library::Game;
use crate::library::Game;
pub type Collections = Vec<Collection>;

View File

@ -1,2 +1 @@
pub mod collection;
pub mod commands;

View File

@ -1,38 +1,41 @@
use crate::auth::generate_authorization_header;
use crate::database::db::{borrow_db_checked, borrow_db_mut_checked};
use crate::database::models::data::{
ApplicationTransientStatus, DownloadType, DownloadableMetadata,
use database::{
ApplicationTransientStatus, DownloadType, DownloadableMetadata, borrow_db_checked,
borrow_db_mut_checked,
};
use crate::download_manager::download_manager_frontend::{DownloadManagerSignal, DownloadStatus};
use crate::download_manager::downloadable::Downloadable;
use crate::download_manager::util::download_thread_control_flag::{
use download_manager::download_manager_frontend::{DownloadManagerSignal, DownloadStatus};
use download_manager::downloadable::Downloadable;
use download_manager::error::ApplicationDownloadError;
use download_manager::util::download_thread_control_flag::{
DownloadThreadControl, DownloadThreadControlFlag,
};
use crate::download_manager::util::progress_object::{ProgressHandle, ProgressObject};
use crate::error::application_download_error::ApplicationDownloadError;
use crate::error::remote_access_error::RemoteAccessError;
use crate::games::downloads::manifest::{
DownloadBucket, DownloadContext, DownloadDrop, DropManifest, DropValidateContext, ManifestBody,
};
use crate::games::downloads::validate::validate_game_chunk;
use crate::games::library::{on_game_complete, push_game_update, set_partially_installed};
use crate::games::state::GameStatusManager;
use crate::process::utils::get_disk_available;
use crate::remote::requests::generate_url;
use crate::remote::utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC};
use download_manager::util::progress_object::{ProgressHandle, ProgressObject};
use log::{debug, error, info, warn};
use rayon::ThreadPoolBuilder;
use remote::auth::generate_authorization_header;
use remote::error::RemoteAccessError;
use remote::requests::generate_url;
use remote::utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC};
use std::collections::{HashMap, HashSet};
use std::fs::{OpenOptions, create_dir_all};
use std::io;
use std::path::{Path, PathBuf};
use std::sync::mpsc::Sender;
use std::sync::{Arc, Mutex};
use std::time::Instant;
use tauri::{AppHandle, Emitter};
use tauri::AppHandle;
use utils::{app_emit, lock, send};
#[cfg(target_os = "linux")]
use rustix::fs::{FallocateFlags, fallocate};
use crate::downloads::manifest::{
DownloadBucket, DownloadContext, DownloadDrop, DropManifest, DropValidateContext, ManifestBody,
};
use crate::downloads::utils::get_disk_available;
use crate::downloads::validate::validate_game_chunk;
use crate::library::{on_game_complete, push_game_update, set_partially_installed};
use crate::state::GameStatusManager;
use super::download_logic::download_game_bucket;
use super::drop_data::DropData;
@ -101,10 +104,7 @@ impl GameDownloadAgent {
result.ensure_manifest_exists().await?;
let required_space = result
.manifest
.lock()
.unwrap()
let required_space = lock!(result.manifest)
.as_ref()
.unwrap()
.values()
@ -172,11 +172,11 @@ impl GameDownloadAgent {
}
pub fn check_manifest_exists(&self) -> bool {
self.manifest.lock().unwrap().is_some()
lock!(self.manifest).is_some()
}
pub async fn ensure_manifest_exists(&self) -> Result<(), ApplicationDownloadError> {
if self.manifest.lock().unwrap().is_some() {
if lock!(self.manifest).is_some() {
return Ok(());
}
@ -207,7 +207,10 @@ impl GameDownloadAgent {
));
}
let manifest_download: DropManifest = response.json().await.unwrap();
let manifest_download: DropManifest = response
.json()
.await
.map_err(|e| ApplicationDownloadError::Communication(e.into()))?;
if let Ok(mut manifest) = self.manifest.lock() {
*manifest = Some(manifest_download);
@ -219,7 +222,7 @@ impl GameDownloadAgent {
// Sets it up for both download and validate
fn setup_progress(&self) {
let buckets = self.buckets.lock().unwrap();
let buckets = lock!(self.buckets);
let chunk_count = buckets.iter().map(|e| e.drops.len()).sum();
@ -234,21 +237,23 @@ impl GameDownloadAgent {
}
pub fn ensure_buckets(&self) -> Result<(), ApplicationDownloadError> {
if self.buckets.lock().unwrap().is_empty() {
if lock!(self.buckets).is_empty() {
self.generate_buckets()?;
}
*self.context_map.lock().unwrap() = self.dropdata.get_contexts();
*lock!(self.context_map) = self.dropdata.get_contexts();
Ok(())
}
pub fn generate_buckets(&self) -> Result<(), ApplicationDownloadError> {
let manifest = self.manifest.lock().unwrap().clone().unwrap();
let manifest = lock!(self.manifest)
.clone()
.ok_or(ApplicationDownloadError::NotInitialized)?;
let game_id = self.id.clone();
let base_path = Path::new(&self.dropdata.base_path);
create_dir_all(base_path).unwrap();
create_dir_all(base_path)?;
let mut buckets = Vec::new();
@ -258,8 +263,13 @@ impl GameDownloadAgent {
for (raw_path, chunk) in manifest {
let path = base_path.join(Path::new(&raw_path));
let container = path.parent().unwrap();
create_dir_all(container).unwrap();
let container = path
.parent()
.ok_or(ApplicationDownloadError::IoError(Arc::new(io::Error::new(
io::ErrorKind::NotFound,
"no parent directory",
))))?;
create_dir_all(container)?;
let already_exists = path.exists();
let file = OpenOptions::new()
@ -267,8 +277,7 @@ impl GameDownloadAgent {
.write(true)
.create(true)
.truncate(false)
.open(path.clone())
.unwrap();
.open(&path)?;
let mut file_running_offset = 0;
for (index, length) in chunk.lengths.iter().enumerate() {
@ -352,7 +361,7 @@ impl GameDownloadAgent {
.collect::<Vec<(String, bool)>>(),
);
*self.buckets.lock().unwrap() = buckets;
*lock!(self.buckets) = buckets;
Ok(())
}
@ -368,9 +377,11 @@ impl GameDownloadAgent {
let pool = ThreadPoolBuilder::new()
.num_threads(max_download_threads)
.build()
.unwrap();
.unwrap_or_else(|_| {
panic!("failed to build thread pool with {max_download_threads} threads")
});
let buckets = self.buckets.lock().unwrap();
let buckets = lock!(self.buckets);
let mut download_contexts = HashMap::<String, DownloadContext>::new();
@ -389,7 +400,7 @@ impl GameDownloadAgent {
for version in versions {
let download_context = DROP_CLIENT_SYNC
.post(generate_url(&["/api/v2/client/context"], &[]).unwrap())
.post(generate_url(&["/api/v2/client/context"], &[])?)
.json(&ManifestBody {
game: self.id.clone(),
version: version.clone(),
@ -412,7 +423,7 @@ impl GameDownloadAgent {
let download_contexts = &download_contexts;
pool.scope(|scope| {
let context_map = self.context_map.lock().unwrap();
let context_map = lock!(self.context_map);
for (index, bucket) in buckets.iter().enumerate() {
let mut bucket = (*bucket).clone();
let completed_contexts = completed_indexes_loop_arc.clone();
@ -442,10 +453,13 @@ impl GameDownloadAgent {
let sender = self.sender.clone();
let download_context = download_contexts
.get(&bucket.version)
.ok_or(RemoteAccessError::CorruptedState)
.unwrap();
let download_context =
download_contexts.get(&bucket.version).unwrap_or_else(|| {
panic!(
"Could not get bucket version {}. Corrupted state.",
bucket.version
)
});
scope.spawn(move |_| {
// 3 attempts
@ -477,7 +491,7 @@ impl GameDownloadAgent {
if i == RETRY_COUNT - 1 || !retry {
warn!("retry logic failed, not re-attempting.");
sender.send(DownloadManagerSignal::Error(e)).unwrap();
send!(sender, DownloadManagerSignal::Error(e));
return;
}
}
@ -490,7 +504,7 @@ impl GameDownloadAgent {
let newly_completed = completed_contexts.clone();
let completed_lock_len = {
let mut context_map_lock = self.context_map.lock().unwrap();
let mut context_map_lock = lock!(self.context_map);
for (_, item) in newly_completed.iter() {
context_map_lock.insert(item.clone(), true);
}
@ -498,7 +512,7 @@ impl GameDownloadAgent {
context_map_lock.values().filter(|x| **x).count()
};
let context_map_lock = self.context_map.lock().unwrap();
let context_map_lock = lock!(self.context_map);
let contexts = buckets
.iter()
.flat_map(|x| x.drops.iter().map(|e| e.checksum.clone()))
@ -547,7 +561,7 @@ impl GameDownloadAgent {
pub fn validate(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError> {
self.setup_validate(app_handle);
let buckets = self.buckets.lock().unwrap();
let buckets = lock!(self.buckets);
let contexts: Vec<DropValidateContext> = buckets
.clone()
.into_iter()
@ -559,7 +573,9 @@ impl GameDownloadAgent {
let pool = ThreadPoolBuilder::new()
.num_threads(max_download_threads)
.build()
.unwrap();
.unwrap_or_else(|_| {
panic!("failed to build thread pool with {max_download_threads} threads")
});
let invalid_chunks = Arc::new(boxcar::Vec::new());
pool.scope(|scope| {
@ -577,7 +593,7 @@ impl GameDownloadAgent {
}
Err(e) => {
error!("{e}");
sender.send(DownloadManagerSignal::Error(e)).unwrap();
send!(sender, DownloadManagerSignal::Error(e));
}
}
});
@ -604,7 +620,7 @@ impl GameDownloadAgent {
// See docs on usage
set_partially_installed(
&self.metadata(),
self.dropdata.base_path.to_str().unwrap().to_string(),
self.dropdata.base_path.display().to_string(),
Some(app_handle),
);
@ -614,12 +630,12 @@ impl GameDownloadAgent {
impl Downloadable for GameDownloadAgent {
fn download(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError> {
*self.status.lock().unwrap() = DownloadStatus::Downloading;
*lock!(self.status) = DownloadStatus::Downloading;
self.download(app_handle)
}
fn validate(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError> {
*self.status.lock().unwrap() = DownloadStatus::Validating;
*lock!(self.status) = DownloadStatus::Validating;
self.validate(app_handle)
}
@ -653,10 +669,8 @@ impl Downloadable for GameDownloadAgent {
}
fn on_error(&self, app_handle: &tauri::AppHandle, error: &ApplicationDownloadError) {
*self.status.lock().unwrap() = DownloadStatus::Error;
app_handle
.emit("download_error", error.to_string())
.unwrap();
*lock!(self.status) = DownloadStatus::Error;
app_emit!(app_handle, "download_error", error.to_string());
error!("error while managing download: {error:?}");
@ -675,12 +689,20 @@ impl Downloadable for GameDownloadAgent {
}
fn on_complete(&self, app_handle: &tauri::AppHandle) {
on_game_complete(
match on_game_complete(
&self.metadata(),
self.dropdata.base_path.to_string_lossy().to_string(),
app_handle,
)
.unwrap();
) {
Ok(_) => {}
Err(e) => {
error!("could not mark game as complete: {e}");
send!(
self.sender,
DownloadManagerSignal::Error(ApplicationDownloadError::DownloadError(e))
);
}
}
}
fn on_cancelled(&self, app_handle: &tauri::AppHandle) {
@ -689,6 +711,6 @@ impl Downloadable for GameDownloadAgent {
}
fn status(&self) -> DownloadStatus {
self.status.lock().unwrap().clone()
lock!(self.status).clone()
}
}

View File

@ -1,18 +1,3 @@
use crate::download_manager::util::download_thread_control_flag::{
DownloadThreadControl, DownloadThreadControlFlag,
};
use crate::download_manager::util::progress_object::ProgressHandle;
use crate::error::application_download_error::ApplicationDownloadError;
use crate::error::drop_server_error::DropServerError;
use crate::error::remote_access_error::RemoteAccessError;
use crate::games::downloads::manifest::{ChunkBody, DownloadBucket, DownloadContext, DownloadDrop};
use crate::remote::auth::generate_authorization_header;
use crate::remote::requests::generate_url;
use crate::remote::utils::DROP_CLIENT_SYNC;
use log::{debug, info, warn};
use md5::{Context, Digest};
use reqwest::blocking::Response;
use std::fs::{Permissions, set_permissions};
use std::io::Read;
#[cfg(unix)]
@ -25,6 +10,21 @@ use std::{
path::PathBuf,
};
use download_manager::error::ApplicationDownloadError;
use download_manager::util::download_thread_control_flag::{
DownloadThreadControl, DownloadThreadControlFlag,
};
use download_manager::util::progress_object::ProgressHandle;
use log::{debug, info, warn};
use md5::{Context, Digest};
use remote::auth::generate_authorization_header;
use remote::error::{DropServerError, RemoteAccessError};
use remote::requests::generate_url;
use remote::utils::DROP_CLIENT_SYNC;
use reqwest::blocking::Response;
use crate::downloads::manifest::{ChunkBody, DownloadBucket, DownloadContext, DownloadDrop};
static MAX_PACKET_LENGTH: usize = 4096 * 4;
static BUMP_SIZE: usize = 4096 * 16;
@ -49,7 +49,7 @@ impl DropWriter<File> {
fn finish(mut self) -> io::Result<Digest> {
self.flush()?;
Ok(self.hasher.compute())
Ok(self.hasher.finalize())
}
}
// Write automatically pushes to file and hasher
@ -110,18 +110,20 @@ impl<'a> DropDownloadPipeline<'a, Response, File> {
let destination = self
.destination
.get_mut(index)
.ok_or(io::Error::other("no destination"))
.unwrap();
.ok_or(io::Error::other("no destination"))?;
let mut remaining = drop.length;
if drop.start != 0 {
destination.seek(SeekFrom::Start(drop.start.try_into().unwrap()))?;
destination.seek(SeekFrom::Start(drop.start as u64))?;
}
let mut last_bump = 0;
loop {
let size = MAX_PACKET_LENGTH.min(remaining);
let size = self.source.read(&mut copy_buffer[0..size]).inspect_err(|_| {
info!("got error from {}", drop.filename);
})?;
let size = self
.source
.read(&mut copy_buffer[0..size])
.inspect_err(|_| {
info!("got error from {}", drop.filename);
})?;
remaining -= size;
last_bump += size;
@ -215,20 +217,39 @@ pub fn download_game_bucket(
RemoteAccessError::UnparseableResponse("missing Content-Lengths header".to_owned()),
))?
.to_str()
.unwrap();
.map_err(|e| {
ApplicationDownloadError::Communication(RemoteAccessError::UnparseableResponse(
e.to_string(),
))
})?;
for (i, raw_length) in lengths.split(",").enumerate() {
let length = raw_length.parse::<usize>().unwrap_or(0);
let Some(drop) = bucket.drops.get(i) else {
warn!("invalid number of Content-Lengths recieved: {i}, {lengths}");
return Err(ApplicationDownloadError::DownloadError);
return Err(ApplicationDownloadError::DownloadError(
RemoteAccessError::InvalidResponse(DropServerError {
status_code: 400,
status_message: format!(
"invalid number of Content-Lengths recieved: {i}, {lengths}"
),
}),
));
};
if drop.length != length {
warn!(
"for {}, expected {}, got {} ({})",
drop.filename, drop.length, raw_length, length
);
return Err(ApplicationDownloadError::DownloadError);
return Err(ApplicationDownloadError::DownloadError(
RemoteAccessError::InvalidResponse(DropServerError {
status_code: 400,
status_message: format!(
"for {}, expected {}, got {} ({})",
drop.filename, drop.length, raw_length, length
),
}),
));
}
}

View File

@ -1,9 +1,13 @@
use std::{
collections::HashMap, fs::File, io::{self, Read, Write}, path::{Path, PathBuf}
collections::HashMap,
fs::File,
io::{self, Read, Write},
path::{Path, PathBuf},
};
use log::error;
use native_model::{Decode, Encode};
use utils::lock;
pub type DropData = v1::DropData;
@ -49,7 +53,12 @@ impl DropData {
let mut s = Vec::new();
file.read_to_end(&mut s)?;
Ok(native_model::rmp_serde_1_3::RmpSerde::decode(s).unwrap())
native_model::rmp_serde_1_3::RmpSerde::decode(s).map_err(|e| {
io::Error::new(
io::ErrorKind::InvalidData,
format!("Failed to decode drop data: {e}"),
)
})
}
pub fn write(&self) {
let manifest_raw = match native_model::rmp_serde_1_3::RmpSerde::encode(&self) {
@ -71,12 +80,15 @@ impl DropData {
}
}
pub fn set_contexts(&self, completed_contexts: &[(String, bool)]) {
*self.contexts.lock().unwrap() = completed_contexts.iter().map(|s| (s.0.clone(), s.1)).collect();
*lock!(self.contexts) = completed_contexts
.iter()
.map(|s| (s.0.clone(), s.1))
.collect();
}
pub fn set_context(&self, context: String, state: bool) {
self.contexts.lock().unwrap().entry(context).insert_entry(state);
lock!(self.contexts).entry(context).insert_entry(state);
}
pub fn get_contexts(&self) -> HashMap<String, bool> {
self.contexts.lock().unwrap().clone()
lock!(self.contexts).clone()
}
}

View File

@ -0,0 +1,29 @@
use std::fmt::Display;
use serde_with::SerializeDisplay;
#[derive(SerializeDisplay)]
pub enum LibraryError {
MetaNotFound(String),
VersionNotFound(String),
}
impl Display for LibraryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
match self {
LibraryError::MetaNotFound(id) => {
format!(
"Could not locate any installed version of game ID {id} in the database"
)
}
LibraryError::VersionNotFound(game_id) => {
format!(
"Could not locate any installed version for game id {game_id} in the database"
)
}
}
)
}
}

View File

@ -1,6 +1,7 @@
pub mod commands;
pub mod download_agent;
mod download_logic;
pub mod drop_data;
pub mod error;
mod manifest;
pub mod utils;
pub mod validate;

View File

@ -1,10 +1,8 @@
use std::{path::PathBuf, sync::Arc};
use std::{io, path::PathBuf, sync::Arc};
use futures_lite::io;
use download_manager::error::ApplicationDownloadError;
use sysinfo::{Disk, DiskRefreshKind, Disks};
use crate::error::application_download_error::ApplicationDownloadError;
pub fn get_disk_available(mount_point: PathBuf) -> Result<u64, ApplicationDownloadError> {
let disks = Disks::new_with_refreshed_list_specifics(DiskRefreshKind::nothing().with_storage());
@ -21,7 +19,7 @@ pub fn get_disk_available(mount_point: PathBuf) -> Result<u64, ApplicationDownlo
return Ok(disk.available_space());
}
}
Err(ApplicationDownloadError::IoError(Arc::new(io::Error::other(
"could not find disk of path",
))))
Err(ApplicationDownloadError::IoError(Arc::new(
io::Error::other("could not find disk of path"),
)))
}

View File

@ -3,17 +3,17 @@ use std::{
io::{self, BufWriter, Read, Seek, SeekFrom, Write},
};
use log::debug;
use md5::Context;
use crate::{
download_manager::util::{
use download_manager::{
error::ApplicationDownloadError,
util::{
download_thread_control_flag::{DownloadThreadControl, DownloadThreadControlFlag},
progress_object::ProgressHandle,
},
error::application_download_error::ApplicationDownloadError,
games::downloads::manifest::DropValidateContext,
};
use log::debug;
use md5::Context;
use crate::downloads::manifest::DropValidateContext;
pub fn validate_game_chunk(
ctx: &DropValidateContext,
@ -22,7 +22,10 @@ pub fn validate_game_chunk(
) -> Result<bool, ApplicationDownloadError> {
debug!(
"Starting chunk validation {}, {}, {} #{}",
ctx.path.display(), ctx.index, ctx.offset, ctx.checksum
ctx.path.display(),
ctx.index,
ctx.offset,
ctx.checksum
);
// If we're paused
if control_flag.get() == DownloadThreadControlFlag::Stop {
@ -36,19 +39,18 @@ pub fn validate_game_chunk(
if ctx.offset != 0 {
source
.seek(SeekFrom::Start(ctx.offset.try_into().unwrap()))
.seek(SeekFrom::Start(ctx.offset as u64))
.expect("Failed to seek to file offset");
}
let mut hasher = md5::Context::new();
let completed =
validate_copy(&mut source, &mut hasher, ctx.length, control_flag, progress).unwrap();
let completed = validate_copy(&mut source, &mut hasher, ctx.length, control_flag, progress)?;
if !completed {
return Ok(false);
}
let res = hex::encode(hasher.compute().0);
let res = hex::encode(hasher.finalize().0);
if res != ctx.checksum {
return Ok(false);
}

View File

@ -1,5 +1,7 @@
#![feature(iterator_try_collect)]
pub mod collections;
pub mod commands;
pub mod downloads;
pub mod library;
pub mod scan;
pub mod state;

View File

@ -0,0 +1,300 @@
use bitcode::{Decode, Encode};
use database::{
ApplicationTransientStatus, Database, DownloadableMetadata, GameDownloadStatus, GameVersion,
borrow_db_checked, borrow_db_mut_checked,
};
use log::{debug, error, warn};
use remote::{
auth::generate_authorization_header, error::RemoteAccessError, requests::generate_url,
utils::DROP_CLIENT_SYNC,
};
use serde::{Deserialize, Serialize};
use std::fs::remove_dir_all;
use std::thread::spawn;
use tauri::AppHandle;
use utils::app_emit;
use crate::state::{GameStatusManager, GameStatusWithTransient};
#[derive(Serialize, Deserialize, Debug)]
pub struct FetchGameStruct {
game: Game,
status: GameStatusWithTransient,
version: Option<GameVersion>,
}
impl FetchGameStruct {
pub fn new(game: Game, status: GameStatusWithTransient, version: Option<GameVersion>) -> Self {
Self {
game,
status,
version,
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Default, Encode, Decode)]
#[serde(rename_all = "camelCase")]
pub struct Game {
id: String,
m_name: String,
m_short_description: String,
m_description: String,
// mDevelopers
// mPublishers
m_icon_object_id: String,
m_banner_object_id: String,
m_cover_object_id: String,
m_image_library_object_ids: Vec<String>,
m_image_carousel_object_ids: Vec<String>,
}
impl Game {
pub fn id(&self) -> &String {
&self.id
}
}
#[derive(serde::Serialize, Clone)]
pub struct GameUpdateEvent {
pub game_id: String,
pub status: (
Option<GameDownloadStatus>,
Option<ApplicationTransientStatus>,
),
pub version: Option<GameVersion>,
}
/**
* Called by:
* - on_cancel, when cancelled, for obvious reasons
* - when downloading, so if drop unexpectedly quits, we can resume the download. hidden by the "Downloading..." transient state, though
* - when scanning, to import the game
*/
pub fn set_partially_installed(
meta: &DownloadableMetadata,
install_dir: String,
app_handle: Option<&AppHandle>,
) {
set_partially_installed_db(&mut borrow_db_mut_checked(), meta, install_dir, app_handle);
}
pub fn set_partially_installed_db(
db_lock: &mut Database,
meta: &DownloadableMetadata,
install_dir: String,
app_handle: Option<&AppHandle>,
) {
db_lock.applications.transient_statuses.remove(meta);
db_lock.applications.game_statuses.insert(
meta.id.clone(),
GameDownloadStatus::PartiallyInstalled {
version_name: meta.version.as_ref().unwrap().clone(),
install_dir,
},
);
db_lock
.applications
.installed_game_version
.insert(meta.id.clone(), meta.clone());
if let Some(app_handle) = app_handle {
push_game_update(
app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, db_lock),
);
}
}
pub fn uninstall_game_logic(meta: DownloadableMetadata, app_handle: &AppHandle) {
debug!("triggered uninstall for agent");
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.transient_statuses
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
push_game_update(
app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, &db_handle),
);
let previous_state = db_handle.applications.game_statuses.get(&meta.id).cloned();
let previous_state = if let Some(state) = previous_state {
state
} else {
warn!("uninstall job doesn't have previous state, failing silently");
return;
};
if let Some((_, install_dir)) = match previous_state {
GameDownloadStatus::Installed {
version_name,
install_dir,
} => Some((version_name, install_dir)),
GameDownloadStatus::SetupRequired {
version_name,
install_dir,
} => Some((version_name, install_dir)),
GameDownloadStatus::PartiallyInstalled {
version_name,
install_dir,
} => Some((version_name, install_dir)),
_ => None,
} {
db_handle
.applications
.transient_statuses
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
drop(db_handle);
let app_handle = app_handle.clone();
spawn(move || {
if let Err(e) = remove_dir_all(install_dir) {
error!("{e}");
} else {
let mut db_handle = borrow_db_mut_checked();
db_handle.applications.transient_statuses.remove(&meta);
db_handle
.applications
.installed_game_version
.remove(&meta.id);
db_handle
.applications
.game_statuses
.insert(meta.id.clone(), GameDownloadStatus::Remote {});
let _ = db_handle.applications.transient_statuses.remove(&meta);
push_game_update(
&app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, &db_handle),
);
debug!("uninstalled game id {}", &meta.id);
app_emit!(&app_handle, "update_library", ());
}
});
} else {
warn!("invalid previous state for uninstall, failing silently.");
}
}
pub fn get_current_meta(game_id: &String) -> Option<DownloadableMetadata> {
borrow_db_checked()
.applications
.installed_game_version
.get(game_id)
.cloned()
}
pub fn on_game_complete(
meta: &DownloadableMetadata,
install_dir: String,
app_handle: &AppHandle,
) -> Result<(), RemoteAccessError> {
// Fetch game version information from remote
if meta.version.is_none() {
return Err(RemoteAccessError::GameNotFound(meta.id.clone()));
}
let client = DROP_CLIENT_SYNC.clone();
let response = generate_url(
&["/api/v1/client/game/version"],
&[
("id", &meta.id),
("version", meta.version.as_ref().unwrap()),
],
)?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()?;
let game_version: GameVersion = response.json()?;
let mut handle = borrow_db_mut_checked();
handle
.applications
.game_versions
.entry(meta.id.clone())
.or_default()
.insert(meta.version.clone().unwrap(), game_version.clone());
handle
.applications
.installed_game_version
.insert(meta.id.clone(), meta.clone());
drop(handle);
let status = if game_version.setup_command.is_empty() {
GameDownloadStatus::Installed {
version_name: meta.version.clone().unwrap(),
install_dir,
}
} else {
GameDownloadStatus::SetupRequired {
version_name: meta.version.clone().unwrap(),
install_dir,
}
};
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.game_statuses
.insert(meta.id.clone(), status.clone());
drop(db_handle);
app_emit!(
app_handle,
&format!("update_game/{}", meta.id),
GameUpdateEvent {
game_id: meta.id.clone(),
status: (Some(status), None),
version: Some(game_version),
}
);
Ok(())
}
pub fn push_game_update(
app_handle: &AppHandle,
game_id: &String,
version: Option<GameVersion>,
status: GameStatusWithTransient,
) {
if let Some(GameDownloadStatus::Installed { .. } | GameDownloadStatus::SetupRequired { .. }) =
&status.0
&& version.is_none()
{
panic!("pushed game for installed game that doesn't have version information");
}
app_emit!(
app_handle,
&format!("update_game/{game_id}"),
GameUpdateEvent {
game_id: game_id.clone(),
status,
version,
}
);
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FrontendGameOptions {
launch_string: String,
}
impl FrontendGameOptions {
pub fn launch_string(&self) -> &String {
&self.launch_string
}
}

View File

@ -1,16 +1,11 @@
use std::fs;
use database::{DownloadType, DownloadableMetadata, borrow_db_mut_checked};
use log::warn;
use crate::{
database::{
db::borrow_db_mut_checked,
models::data::{DownloadType, DownloadableMetadata},
},
games::{
downloads::drop_data::{DropData, DROP_DATA_PATH},
library::set_partially_installed_db,
},
downloads::drop_data::{DROP_DATA_PATH, DropData},
library::set_partially_installed_db,
};
pub fn scan_install_dirs() {
@ -24,11 +19,11 @@ pub fn scan_install_dirs() {
if !drop_data_file.exists() {
continue;
}
let game_id = game.file_name().into_string().unwrap();
let game_id = game.file_name().display().to_string();
let Ok(drop_data) = DropData::read(&game.path()) else {
warn!(
".dropdata exists for {}, but couldn't read it. is it corrupted?",
game.file_name().into_string().unwrap()
game.file_name().display()
);
continue;
};

View File

@ -1,4 +1,4 @@
use crate::database::models::data::{
use database::models::data::{
ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus,
};
@ -19,7 +19,7 @@ impl GameStatusManager {
version: None,
})
.cloned();
let offline_state = database.applications.game_statuses.get(game_id).cloned();
if online_state.is_some() {

View File

@ -0,0 +1,19 @@
[package]
name = "process"
version = "0.1.0"
edition = "2024"
[dependencies]
chrono = "0.4.42"
client = { version = "0.1.0", path = "../client" }
database = { version = "0.1.0", path = "../database" }
dynfmt = "0.1.5"
games = { version = "0.1.0", path = "../games" }
log = "0.4.28"
page_size = "0.6.0"
serde = "1.0.228"
serde_with = "3.15.0"
shared_child = "1.1.1"
tauri = "2.8.5"
tauri-plugin-opener = "2.5.0"
utils = { version = "0.1.0", path = "../utils" }

View File

@ -11,7 +11,9 @@ pub enum ProcessError {
IOError(Error),
FormatError(String), // String errors supremacy
InvalidPlatform,
OpenerError(tauri_plugin_opener::Error)
OpenerError(tauri_plugin_opener::Error),
InvalidArguments(String),
FailedLaunch(String),
}
impl Display for ProcessError {
@ -23,9 +25,15 @@ impl Display for ProcessError {
ProcessError::InvalidVersion => "Invalid game version",
ProcessError::IOError(error) => &error.to_string(),
ProcessError::InvalidPlatform => "This game cannot be played on the current platform",
ProcessError::FormatError(e) => &format!("Failed to format template: {e}"),
ProcessError::OpenerError(error) => &format!("Failed to open directory: {error}"),
};
ProcessError::FormatError(error) => &format!("Could not format template: {error:?}"),
ProcessError::OpenerError(error) => &format!("Could not open directory: {error:?}"),
ProcessError::InvalidArguments(arguments) => {
&format!("Invalid arguments in command {arguments}")
}
ProcessError::FailedLaunch(game_id) => {
&format!("Drop detected that the game {game_id} may have failed to launch properly")
}
};
write!(f, "{s}")
}
}

View File

@ -8,7 +8,12 @@ pub struct DropFormatArgs {
}
impl DropFormatArgs {
pub fn new(launch_string: String, working_dir: &String, executable_name: &String, absolute_executable_name: String) -> Self {
pub fn new(
launch_string: String,
working_dir: &String,
executable_name: &String,
absolute_executable_name: String,
) -> Self {
let mut positional = Vec::new();
let mut map: HashMap<&'static str, String> = HashMap::new();

View File

@ -0,0 +1,41 @@
#![feature(nonpoison_mutex)]
#![feature(sync_nonpoison)]
use std::{
ops::Deref,
sync::{OnceLock, nonpoison::Mutex},
};
use tauri::AppHandle;
use crate::process_manager::ProcessManager;
pub static PROCESS_MANAGER: ProcessManagerWrapper = ProcessManagerWrapper::new();
pub mod error;
pub mod format;
pub mod process_handlers;
pub mod process_manager;
pub struct ProcessManagerWrapper(OnceLock<Mutex<ProcessManager<'static>>>);
impl ProcessManagerWrapper {
const fn new() -> Self {
ProcessManagerWrapper(OnceLock::new())
}
pub fn init(app_handle: AppHandle) {
PROCESS_MANAGER
.0
.set(Mutex::new(ProcessManager::new(app_handle)))
.unwrap_or_else(|_| panic!("Failed to initialise Process Manager")); // Using panic! here because we can't implement Debug
}
}
impl Deref for ProcessManagerWrapper {
type Target = Mutex<ProcessManager<'static>>;
fn deref(&self) -> &Self::Target {
match self.0.get() {
Some(process_manager) => process_manager,
None => unreachable!("Download manager should always be initialised"),
}
}
}

View File

@ -1,17 +1,8 @@
use std::{
ffi::OsStr,
path::PathBuf,
process::{Command, Stdio},
sync::LazyLock,
};
use client::compat::{COMPAT_INFO, UMU_LAUNCHER_EXECUTABLE};
use database::{Database, DownloadableMetadata, GameVersion, platform::Platform};
use log::debug;
use log::{debug, info};
use crate::{
AppState,
database::models::data::{Database, DownloadableMetadata, GameVersion},
process::process_manager::{Platform, ProcessHandler},
};
use crate::{error::ProcessError, process_manager::ProcessHandler};
pub struct NativeGameLauncher;
impl ProcessHandler for NativeGameLauncher {
@ -22,40 +13,15 @@ impl ProcessHandler for NativeGameLauncher {
args: Vec<String>,
_game_version: &GameVersion,
_current_dir: &str,
) -> String {
format!("\"{}\" {}", launch_command, args.join(" "))
) -> Result<String, ProcessError> {
Ok(format!("\"{}\" {}", launch_command, args.join(" ")))
}
fn valid_for_platform(&self, _db: &Database, _state: &AppState, _target: &Platform) -> bool {
fn valid_for_platform(&self, _db: &Database, _target: &Platform) -> bool {
true
}
}
pub static UMU_LAUNCHER_EXECUTABLE: LazyLock<Option<PathBuf>> = LazyLock::new(|| {
let x = get_umu_executable();
info!("{:?}", &x);
x
});
const UMU_BASE_LAUNCHER_EXECUTABLE: &str = "umu-run";
const UMU_INSTALL_DIRS: [&str; 4] = ["/app/share", "/use/local/share", "/usr/share", "/opt"];
fn get_umu_executable() -> Option<PathBuf> {
if check_executable_exists(UMU_BASE_LAUNCHER_EXECUTABLE) {
return Some(PathBuf::from(UMU_BASE_LAUNCHER_EXECUTABLE));
}
for dir in UMU_INSTALL_DIRS {
let p = PathBuf::from(dir).join(UMU_BASE_LAUNCHER_EXECUTABLE);
if check_executable_exists(&p) {
return Some(p);
}
}
None
}
fn check_executable_exists<P: AsRef<OsStr>>(exec: P) -> bool {
let has_umu_installed = Command::new(exec).stdout(Stdio::null()).output();
has_umu_installed.is_ok()
}
pub struct UMULauncher;
impl ProcessHandler for UMULauncher {
fn create_launch_process(
@ -65,7 +31,7 @@ impl ProcessHandler for UMULauncher {
args: Vec<String>,
game_version: &GameVersion,
_current_dir: &str,
) -> String {
) -> Result<String, ProcessError> {
debug!("Game override: \"{:?}\"", &game_version.umu_id_override);
let game_id = match &game_version.umu_id_override {
Some(game_override) => {
@ -77,16 +43,18 @@ impl ProcessHandler for UMULauncher {
}
None => game_version.game_id.clone(),
};
format!(
Ok(format!(
"GAMEID={game_id} {umu:?} \"{launch}\" {args}",
umu = UMU_LAUNCHER_EXECUTABLE.as_ref().unwrap(),
umu = UMU_LAUNCHER_EXECUTABLE
.as_ref()
.expect("Failed to get UMU_LAUNCHER_EXECUTABLE as ref"),
launch = launch_command,
args = args.join(" ")
)
))
}
fn valid_for_platform(&self, _db: &Database, state: &AppState, _target: &Platform) -> bool {
let Some(ref compat_info) = state.compat_info else {
fn valid_for_platform(&self, _db: &Database, _target: &Platform) -> bool {
let Some(compat_info) = &*COMPAT_INFO else {
return false;
};
compat_info.umu_installed
@ -102,7 +70,7 @@ impl ProcessHandler for AsahiMuvmLauncher {
args: Vec<String>,
game_version: &GameVersion,
current_dir: &str,
) -> String {
) -> Result<String, ProcessError> {
let umu_launcher = UMULauncher {};
let umu_string = umu_launcher.create_launch_process(
meta,
@ -110,20 +78,28 @@ impl ProcessHandler for AsahiMuvmLauncher {
args,
game_version,
current_dir,
);
)?;
let mut args_cmd = umu_string
.split("umu-run")
.collect::<Vec<&str>>()
.into_iter();
let args = args_cmd.next().unwrap().trim();
let cmd = format!("umu-run{}", args_cmd.next().unwrap());
let args = args_cmd
.next()
.ok_or(ProcessError::InvalidArguments(umu_string.clone()))?
.trim();
let cmd = format!(
"umu-run{}",
args_cmd
.next()
.ok_or(ProcessError::InvalidArguments(umu_string.clone()))?
);
format!("{args} muvm -- {cmd}")
Ok(format!("{args} muvm -- {cmd}"))
}
#[allow(unreachable_code)]
#[allow(unused_variables)]
fn valid_for_platform(&self, _db: &Database, state: &AppState, _target: &Platform) -> bool {
fn valid_for_platform(&self, _db: &Database, _target: &Platform) -> bool {
#[cfg(not(target_os = "linux"))]
return false;
@ -135,7 +111,7 @@ impl ProcessHandler for AsahiMuvmLauncher {
return false;
}
let Some(ref compat_info) = state.compat_info else {
let Some(compat_info) = &*COMPAT_INFO else {
return false;
};

View File

@ -1,38 +1,31 @@
use std::{
collections::HashMap,
fs::{OpenOptions, create_dir_all},
io::{self},
io,
path::PathBuf,
process::{Command, ExitStatus},
str::FromStr,
sync::{Arc, Mutex},
sync::Arc,
thread::spawn,
time::{Duration, SystemTime},
};
use database::{
ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus,
GameVersion, borrow_db_checked, borrow_db_mut_checked, db::DATA_ROOT_DIR, platform::Platform,
};
use dynfmt::Format;
use dynfmt::SimpleCurlyFormat;
use games::{library::push_game_update, state::GameStatusManager};
use log::{debug, info, warn};
use serde::{Deserialize, Serialize};
use shared_child::SharedChild;
use tauri::{AppHandle, Emitter, Manager};
use tauri_plugin_opener::OpenerExt;
use tauri::AppHandle;
use crate::{
AppState, DB,
database::{
db::{DATA_ROOT_DIR, borrow_db_checked, borrow_db_mut_checked},
models::data::{
ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata,
GameDownloadStatus, GameVersion,
},
},
error::process_error::ProcessError,
games::{library::push_game_update, state::GameStatusManager},
process::{
format::DropFormatArgs,
process_handlers::{AsahiMuvmLauncher, NativeGameLauncher, UMULauncher},
},
PROCESS_MANAGER,
error::ProcessError,
format::DropFormatArgs,
process_handlers::{AsahiMuvmLauncher, NativeGameLauncher, UMULauncher},
};
pub struct RunningProcess {
@ -45,11 +38,11 @@ pub struct ProcessManager<'a> {
current_platform: Platform,
log_output_dir: PathBuf,
processes: HashMap<String, RunningProcess>,
app_handle: AppHandle,
game_launchers: Vec<(
(Platform, Platform),
&'a (dyn ProcessHandler + Sync + Send + 'static),
)>,
app_handle: AppHandle,
}
impl ProcessManager<'_> {
@ -66,7 +59,6 @@ impl ProcessManager<'_> {
#[cfg(target_os = "linux")]
current_platform: Platform::Linux,
app_handle,
processes: HashMap::new(),
log_output_dir,
game_launchers: vec![
@ -92,6 +84,7 @@ impl ProcessManager<'_> {
&UMULauncher {} as &(dyn ProcessHandler + Sync + Send + 'static),
),
],
app_handle,
}
}
@ -110,30 +103,31 @@ impl ProcessManager<'_> {
}
}
fn get_log_dir(&self, game_id: String) -> PathBuf {
pub fn get_log_dir(&self, game_id: String) -> PathBuf {
self.log_output_dir.join(game_id)
}
pub fn open_process_logs(&mut self, game_id: String) -> Result<(), ProcessError> {
let dir = self.get_log_dir(game_id);
self.app_handle
.opener()
.open_path(dir.to_str().unwrap(), None::<&str>)
.map_err(ProcessError::OpenerError)?;
Ok(())
}
fn on_process_finish(&mut self, game_id: String, result: Result<ExitStatus, std::io::Error>) {
fn on_process_finish(
&mut self,
game_id: String,
result: Result<ExitStatus, std::io::Error>,
) -> Result<(), ProcessError> {
if !self.processes.contains_key(&game_id) {
warn!(
"process on_finish was called, but game_id is no longer valid. finished with result: {result:?}"
);
return;
return Ok(());
}
debug!("process for {:?} exited with {:?}", &game_id, result);
let process = self.processes.remove(&game_id).unwrap();
let process = match self.processes.remove(&game_id) {
Some(process) => process,
None => {
info!("Attempted to stop process {game_id} which didn't exist");
return Ok(());
}
};
let mut db_handle = borrow_db_mut_checked();
let meta = db_handle
@ -141,7 +135,7 @@ impl ProcessManager<'_> {
.installed_game_version
.get(&game_id)
.cloned()
.unwrap();
.unwrap_or_else(|| panic!("Could not get installed version of {}", &game_id));
db_handle.applications.transient_statuses.remove(&meta);
let current_state = db_handle.applications.game_statuses.get(&game_id).cloned();
@ -166,20 +160,18 @@ impl ProcessManager<'_> {
// Or if the status isn't 0
// Or if it's an error
if !process.manually_killed
&& (elapsed.as_secs() <= 2 || result.is_err() || !result.unwrap().success())
&& (elapsed.as_secs() <= 2 || result.map_or(true, |r| !r.success()))
{
warn!("drop detected that the game {game_id} may have failed to launch properly");
let _ = self.app_handle.emit("launch_external_error", &game_id);
return Err(ProcessError::FailedLaunch(game_id));
// let _ = self.app_handle.emit("launch_external_error", &game_id);
}
// This is too many unwraps for me to be comfortable
let version_data = db_handle
.applications
.game_versions
.get(&game_id)
.unwrap()
.get(&meta.version.unwrap())
.unwrap();
let version_data = match db_handle.applications.game_versions.get(&game_id) {
// This unwrap here should be resolved by just making the hashmap accept an option rather than just a String
Some(res) => res.get(&meta.version.unwrap()).expect("Failed to get game version from installed game versions. Is the database corrupted?"),
None => todo!(),
};
let status = GameStatusManager::fetch_state(&game_id, &db_handle);
@ -189,12 +181,12 @@ impl ProcessManager<'_> {
Some(version_data.clone()),
status,
);
Ok(())
}
fn fetch_process_handler(
&self,
db_lock: &Database,
state: &AppState,
target_platform: &Platform,
) -> Result<&(dyn ProcessHandler + Send + Sync), ProcessError> {
Ok(self
@ -204,30 +196,25 @@ impl ProcessManager<'_> {
let (e_current, e_target) = e.0;
e_current == self.current_platform
&& e_target == *target_platform
&& e.1.valid_for_platform(db_lock, state, target_platform)
&& e.1.valid_for_platform(db_lock, target_platform)
})
.ok_or(ProcessError::InvalidPlatform)?
.1)
}
pub fn valid_platform(&self, platform: &Platform, state: &AppState) -> Result<bool, String> {
pub fn valid_platform(&self, platform: &Platform) -> bool {
let db_lock = borrow_db_checked();
let process_handler = self.fetch_process_handler(&db_lock, state, platform);
Ok(process_handler.is_ok())
let process_handler = self.fetch_process_handler(&db_lock, platform);
process_handler.is_ok()
}
pub fn launch_process(
&mut self,
game_id: String,
state: &AppState,
) -> Result<(), ProcessError> {
/// Must be called through spawn as it is currently blocking
pub fn launch_process(&mut self, game_id: String) -> Result<(), ProcessError> {
if self.processes.contains_key(&game_id) {
return Err(ProcessError::AlreadyRunning);
}
let version = match DB
.borrow_data()
.unwrap()
let version = match borrow_db_checked()
.applications
.game_statuses
.get(&game_id)
@ -266,7 +253,7 @@ impl ProcessManager<'_> {
debug!(
"Launching process {:?} with version {:?}",
&game_id,
db_lock.applications.game_versions.get(&game_id).unwrap()
db_lock.applications.game_versions.get(&game_id)
);
let game_version = db_lock
@ -304,7 +291,7 @@ impl ProcessManager<'_> {
let target_platform = game_version.platform;
let process_handler = self.fetch_process_handler(&db_lock, state, &target_platform)?;
let process_handler = self.fetch_process_handler(&db_lock, &target_platform)?;
let (launch, args) = match game_status {
GameDownloadStatus::Installed {
@ -322,8 +309,9 @@ impl ProcessManager<'_> {
GameDownloadStatus::Remote {} => unreachable!("Game registered as 'Remote'"),
};
#[allow(clippy::unwrap_used)]
let launch = PathBuf::from_str(install_dir).unwrap().join(launch);
let launch = launch.to_str().unwrap();
let launch = launch.display().to_string();
let launch_string = process_handler.create_launch_process(
&meta,
@ -331,7 +319,7 @@ impl ProcessManager<'_> {
args.clone(),
game_version,
install_dir,
);
)?;
let format_args = DropFormatArgs::new(
launch_string,
@ -385,24 +373,8 @@ impl ProcessManager<'_> {
);
let wait_thread_handle = launch_process_handle.clone();
let wait_thread_apphandle = self.app_handle.clone();
let wait_thread_game_id = meta.clone();
spawn(move || {
let result: Result<ExitStatus, std::io::Error> = launch_process_handle.wait();
let app_state = wait_thread_apphandle.state::<Mutex<AppState>>();
let app_state_handle = app_state.lock().unwrap();
let mut process_manager_handle = app_state_handle.process_manager.lock().unwrap();
process_manager_handle.on_process_finish(wait_thread_game_id.id, result);
// As everything goes out of scope, they should get dropped
// But just to explicit about it
drop(process_manager_handle);
drop(app_state_handle);
});
self.processes.insert(
meta.id,
RunningProcess {
@ -411,55 +383,17 @@ impl ProcessManager<'_> {
manually_killed: false,
},
);
spawn(move || {
let result: Result<ExitStatus, std::io::Error> = launch_process_handle.wait();
PROCESS_MANAGER
.lock()
.on_process_finish(wait_thread_game_id.id, result)
});
Ok(())
}
}
#[derive(Eq, Hash, PartialEq, Serialize, Deserialize, Clone, Copy, Debug)]
pub enum Platform {
Windows,
Linux,
MacOs,
}
impl Platform {
#[cfg(target_os = "windows")]
pub const HOST: Platform = Self::Windows;
#[cfg(target_os = "macos")]
pub const HOST: Platform = Self::MacOs;
#[cfg(target_os = "linux")]
pub const HOST: Platform = Self::Linux;
pub fn is_case_sensitive(&self) -> bool {
match self {
Self::Windows | Self::MacOs => false,
Self::Linux => true,
}
}
}
impl From<&str> for Platform {
fn from(value: &str) -> Self {
match value.to_lowercase().trim() {
"windows" => Self::Windows,
"linux" => Self::Linux,
"mac" | "macos" => Self::MacOs,
_ => unimplemented!(),
}
}
}
impl From<whoami::Platform> for Platform {
fn from(value: whoami::Platform) -> Self {
match value {
whoami::Platform::Windows => Platform::Windows,
whoami::Platform::Linux => Platform::Linux,
whoami::Platform::MacOS => Platform::MacOs,
_ => unimplemented!(),
}
}
}
pub trait ProcessHandler: Send + 'static {
fn create_launch_process(
&self,
@ -468,7 +402,7 @@ pub trait ProcessHandler: Send + 'static {
args: Vec<String>,
game_version: &GameVersion,
current_dir: &str,
) -> String;
) -> Result<String, ProcessError>;
fn valid_for_platform(&self, db: &Database, state: &AppState, target: &Platform) -> bool;
fn valid_for_platform(&self, db: &Database, target: &Platform) -> bool;
}

View File

@ -0,0 +1,23 @@
[package]
name = "remote"
version = "0.1.0"
edition = "2024"
[dependencies]
bitcode = "0.6.7"
chrono = "0.4.42"
client = { version = "0.1.0", path = "../client" }
database = { version = "0.1.0", path = "../database" }
droplet-rs = "0.7.3"
gethostname = "1.0.2"
hex = "0.4.3"
http = "1.3.1"
log = "0.4.28"
md5 = "0.8.0"
reqwest = "0.12.23"
reqwest-websocket = "0.5.1"
serde = "1.0.228"
serde_with = "3.15.0"
tauri = "2.8.5"
url = "2.5.7"
utils = { version = "0.1.0", path = "../utils" }

View File

@ -0,0 +1,152 @@
use std::{collections::HashMap, env};
use chrono::Utc;
use client::{app_status::AppStatus, user::User};
use database::{DatabaseAuth, interface::borrow_db_checked};
use droplet_rs::ssl::sign_nonce;
use gethostname::gethostname;
use log::{error, warn};
use serde::{Deserialize, Serialize};
use url::Url;
use crate::{
error::{DropServerError, RemoteAccessError},
requests::make_authenticated_get,
utils::DROP_CLIENT_SYNC,
};
use super::{
cache::{cache_object, get_cached_object},
requests::generate_url,
};
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct CapabilityConfiguration {}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct InitiateRequestBody {
name: String,
platform: String,
capabilities: HashMap<String, CapabilityConfiguration>,
mode: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct HandshakeRequestBody {
client_id: String,
token: String,
}
impl HandshakeRequestBody {
pub fn new(client_id: String, token: String) -> Self {
Self { client_id, token }
}
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HandshakeResponse {
private: String,
certificate: String,
id: String,
}
impl From<HandshakeResponse> for DatabaseAuth {
fn from(value: HandshakeResponse) -> Self {
DatabaseAuth::new(value.private, value.certificate, value.id, None)
}
}
pub fn generate_authorization_header() -> String {
let certs = {
let db = borrow_db_checked();
db.auth.clone().expect("Authorisation not initialised")
};
let nonce = Utc::now().timestamp_millis().to_string();
let signature =
sign_nonce(certs.private, nonce.clone()).expect("Failed to generate authorisation header");
format!("Nonce {} {} {}", certs.client_id, nonce, signature)
}
pub async fn fetch_user() -> Result<User, RemoteAccessError> {
let response = make_authenticated_get(generate_url(&["/api/v1/client/user"], &[])?).await?;
if response.status() != 200 {
let err: DropServerError = response.json().await?;
warn!("{err:?}");
if err.status_message == "Nonce expired" {
return Err(RemoteAccessError::OutOfSync);
}
return Err(RemoteAccessError::InvalidResponse(err));
}
response
.json::<User>()
.await
.map_err(std::convert::Into::into)
}
pub fn auth_initiate_logic(mode: String) -> Result<String, RemoteAccessError> {
let base_url = {
let db_lock = borrow_db_checked();
Url::parse(&db_lock.base_url.clone())?
};
let hostname = gethostname();
let endpoint = base_url.join("/api/v1/client/auth/initiate")?;
let body = InitiateRequestBody {
name: format!("{} (Desktop)", hostname.display()),
platform: env::consts::OS.to_string(),
capabilities: HashMap::from([
("peerAPI".to_owned(), CapabilityConfiguration {}),
("cloudSaves".to_owned(), CapabilityConfiguration {}),
]),
mode,
};
let client = DROP_CLIENT_SYNC.clone();
let response = client.post(endpoint.to_string()).json(&body).send()?;
if response.status() != 200 {
let data: DropServerError = response.json()?;
error!("could not start handshake: {}", data.status_message);
return Err(RemoteAccessError::HandshakeFailed(data.status_message));
}
let response = response.text()?;
Ok(response)
}
pub async fn setup() -> (AppStatus, Option<User>) {
let auth = {
let data = borrow_db_checked();
data.auth.clone()
};
if auth.is_some() {
let user_result = match fetch_user().await {
Ok(data) => data,
Err(RemoteAccessError::FetchError(_)) => {
let user = get_cached_object::<User>("user").ok();
return (AppStatus::Offline, user);
}
Err(_) => return (AppStatus::SignedInNeedsReauth, None),
};
if let Err(e) = cache_object("user", &user_result) {
warn!("Could not cache user object with error {e}");
}
return (AppStatus::SignedIn, Some(user_result));
}
(AppStatus::SignedOut, None)
}

View File

@ -5,18 +5,19 @@ use std::{
time::SystemTime,
};
use crate::{
database::{db::borrow_db_checked, models::data::Database},
error::remote_access_error::RemoteAccessError,
};
use bitcode::{Decode, DecodeOwned, Encode};
use database::{Database, borrow_db_checked};
use http::{Response, header::CONTENT_TYPE, response::Builder as ResponseBuilder};
use crate::error::{CacheError, RemoteAccessError};
#[macro_export]
macro_rules! offline {
($var:expr, $func1:expr, $func2:expr, $( $arg:expr ),* ) => {
async move { if $crate::borrow_db_checked().settings.force_offline || $var.lock().unwrap().status == $crate::AppStatus::Offline {
async move {
if ::database::borrow_db_checked().settings.force_offline
|| $var.lock().status == ::client::app_status::AppStatus::Offline {
$func2( $( $arg ), *).await
} else {
$func1( $( $arg ), *).await
@ -82,10 +83,7 @@ pub fn get_cached_object_db<D: DecodeOwned>(
pub fn clear_cached_object(key: &str) -> Result<(), RemoteAccessError> {
clear_cached_object_db(key, &borrow_db_checked())
}
pub fn clear_cached_object_db(
key: &str,
db: &Database,
) -> Result<(), RemoteAccessError> {
pub fn clear_cached_object_db(key: &str, db: &Database) -> Result<(), RemoteAccessError> {
delete_sync(&db.cache_dir, key).map_err(RemoteAccessError::Cache)?;
Ok(())
}
@ -104,30 +102,39 @@ impl ObjectCache {
}
}
impl From<Response<Vec<u8>>> for ObjectCache {
fn from(value: Response<Vec<u8>>) -> Self {
ObjectCache {
impl TryFrom<Response<Vec<u8>>> for ObjectCache {
type Error = CacheError;
fn try_from(value: Response<Vec<u8>>) -> Result<Self, Self::Error> {
Ok(ObjectCache {
content_type: value
.headers()
.get(CONTENT_TYPE)
.unwrap()
.ok_or(CacheError::HeaderNotFound(CONTENT_TYPE))?
.to_str()
.unwrap()
.map_err(CacheError::ParseError)?
.to_owned(),
body: value.body().clone(),
expiry: get_sys_time_in_secs() + 60 * 60 * 24,
}
})
}
}
impl From<ObjectCache> for Response<Vec<u8>> {
fn from(value: ObjectCache) -> Self {
impl TryFrom<ObjectCache> for Response<Vec<u8>> {
type Error = CacheError;
fn try_from(value: ObjectCache) -> Result<Self, Self::Error> {
let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type);
resp_builder.body(value.body).unwrap()
resp_builder
.body(value.body)
.map_err(CacheError::ConstructionError)
}
}
impl From<&ObjectCache> for Response<Vec<u8>> {
fn from(value: &ObjectCache) -> Self {
impl TryFrom<&ObjectCache> for Response<Vec<u8>> {
type Error = CacheError;
fn try_from(value: &ObjectCache) -> Result<Self, Self::Error> {
let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type.clone());
resp_builder.body(value.body.clone()).unwrap()
resp_builder
.body(value.body.clone())
.map_err(CacheError::ConstructionError)
}
}

View File

@ -4,11 +4,20 @@ use std::{
sync::Arc,
};
use http::StatusCode;
use http::{HeaderName, StatusCode, header::ToStrError};
use serde_with::SerializeDisplay;
use url::ParseError;
use super::drop_server_error::DropServerError;
use serde::Deserialize;
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct DropServerError {
pub status_code: usize,
pub status_message: String,
// pub message: String,
// pub url: String,
}
#[derive(Debug, SerializeDisplay)]
pub enum RemoteAccessError {
@ -44,8 +53,7 @@ impl Display for RemoteAccessError {
error
.source()
.map(std::string::ToString::to_string)
.or_else(|| Some("Unknown error".to_string()))
.unwrap()
.unwrap_or("Unknown error".to_string())
)
}
RemoteAccessError::FetchErrorWS(error) => write!(
@ -54,9 +62,8 @@ impl Display for RemoteAccessError {
error,
error
.source()
.map(|e| e.to_string())
.or_else(|| Some("Unknown error".to_string()))
.unwrap()
.map(std::string::ToString::to_string)
.unwrap_or("Unknown error".to_string())
),
RemoteAccessError::ParsingError(parse_error) => {
write!(f, "{parse_error}")
@ -106,3 +113,31 @@ impl From<ParseError> for RemoteAccessError {
}
}
impl std::error::Error for RemoteAccessError {}
#[derive(Debug, SerializeDisplay)]
pub enum CacheError {
HeaderNotFound(HeaderName),
ParseError(ToStrError),
Remote(RemoteAccessError),
ConstructionError(http::Error),
}
impl Display for CacheError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
CacheError::HeaderNotFound(header_name) => {
format!("Could not find header {header_name} in cache")
}
CacheError::ParseError(to_str_error) => {
format!("Could not parse cache with error {to_str_error}")
}
CacheError::Remote(remote_access_error) => {
format!("Cache got remote access error: {remote_access_error}")
}
CacheError::ConstructionError(error) => {
format!("Could not construct cache body with error {error}")
}
};
write!(f, "{s}")
}
}

View File

@ -0,0 +1,82 @@
use database::{DB, interface::DatabaseImpls};
use http::{Response, header::CONTENT_TYPE, response::Builder as ResponseBuilder};
use log::{debug, warn};
use tauri::UriSchemeResponder;
use crate::{error::CacheError, utils::DROP_CLIENT_ASYNC};
use super::{
auth::generate_authorization_header,
cache::{ObjectCache, cache_object, get_cached_object},
};
pub async fn fetch_object_wrapper(request: http::Request<Vec<u8>>, responder: UriSchemeResponder) {
match fetch_object(request).await {
Ok(r) => responder.respond(r),
Err(e) => {
warn!("Cache error: {e}");
responder.respond(
Response::builder()
.status(500)
.body(Vec::new())
.expect("Failed to build error response"),
);
}
};
}
pub async fn fetch_object(
request: http::Request<Vec<u8>>,
) -> Result<Response<Vec<u8>>, CacheError> {
// Drop leading /
let object_id = &request.uri().path()[1..];
let cache_result = get_cached_object::<ObjectCache>(object_id);
if let Ok(cache_result) = &cache_result
&& !cache_result.has_expired()
{
return cache_result.try_into();
}
let header = generate_authorization_header();
let client = DROP_CLIENT_ASYNC.clone();
let url = format!("{}api/v1/client/object/{object_id}", DB.fetch_base_url());
let response = client.get(url).header("Authorization", header).send().await;
match response {
Ok(r) => {
let resp_builder = ResponseBuilder::new().header(
CONTENT_TYPE,
r.headers()
.get("Content-Type")
.expect("Failed get Content-Type header"),
);
let data = match r.bytes().await {
Ok(data) => Vec::from(data),
Err(e) => {
warn!("Could not get data from cache object {object_id} with error {e}",);
Vec::new()
}
};
let resp = resp_builder
.body(data)
.expect("Failed to build object cache response body");
if cache_result.map_or(true, |x| x.has_expired()) {
cache_object::<ObjectCache>(object_id, &resp.clone().try_into()?)
.expect("Failed to create cached object");
}
Ok(resp)
}
Err(e) => {
debug!("Object fetch failed with error {e}. Attempting to download from cache");
match cache_result {
Ok(cache_result) => cache_result.try_into(),
Err(e) => {
warn!("{e}");
Err(CacheError::Remote(e))
}
}
}
}
}

View File

@ -1,8 +1,10 @@
pub mod auth;
#[macro_use]
pub mod cache;
pub mod commands;
pub mod error;
pub mod fetch_object;
pub mod requests;
pub mod server_proto;
pub mod utils;
pub use auth::setup;

View File

@ -1,10 +1,8 @@
use database::{DB, interface::DatabaseImpls};
use url::Url;
use crate::{
DB,
database::db::DatabaseImpls,
error::remote_access_error::RemoteAccessError,
remote::{auth::generate_authorization_header, utils::DROP_CLIENT_ASYNC},
auth::generate_authorization_header, error::RemoteAccessError, utils::DROP_CLIENT_ASYNC,
};
pub fn generate_url<T: AsRef<str>>(

View File

@ -0,0 +1,108 @@
use std::str::FromStr;
use database::borrow_db_checked;
use http::{Request, Response, StatusCode, Uri, uri::PathAndQuery};
use log::{error, warn};
use tauri::UriSchemeResponder;
use utils::webbrowser_open::webbrowser_open;
use crate::utils::DROP_CLIENT_SYNC;
pub async fn handle_server_proto_offline_wrapper(
request: Request<Vec<u8>>,
responder: UriSchemeResponder,
) {
responder.respond(match handle_server_proto_offline(request).await {
Ok(res) => res,
Err(_) => unreachable!(),
});
}
pub async fn handle_server_proto_offline(
_request: Request<Vec<u8>>,
) -> Result<Response<Vec<u8>>, StatusCode> {
Ok(Response::builder()
.status(StatusCode::NOT_FOUND)
.body(Vec::new())
.expect("Failed to build error response for proto offline"))
}
pub async fn handle_server_proto_wrapper(request: Request<Vec<u8>>, responder: UriSchemeResponder) {
match handle_server_proto(request).await {
Ok(r) => responder.respond(r),
Err(e) => {
warn!("Cache error: {e}");
responder.respond(
Response::builder()
.status(e)
.body(Vec::new())
.expect("Failed to build error response"),
);
}
}
}
async fn handle_server_proto(request: Request<Vec<u8>>) -> Result<Response<Vec<u8>>, StatusCode> {
let db_handle = borrow_db_checked();
let auth = match db_handle.auth.as_ref() {
Some(auth) => auth,
None => {
error!("Could not find auth in database");
return Err(StatusCode::UNAUTHORIZED);
}
};
let web_token = match &auth.web_token {
Some(token) => token,
None => return Err(StatusCode::UNAUTHORIZED),
};
let remote_uri = db_handle
.base_url
.parse::<Uri>()
.expect("Failed to parse base url");
let path = request.uri().path();
let mut new_uri = request.uri().clone().into_parts();
new_uri.path_and_query = Some(
PathAndQuery::from_str(&format!("{path}?noWrapper=true"))
.expect("Failed to parse request path in proto"),
);
new_uri.authority = remote_uri.authority().cloned();
new_uri.scheme = remote_uri.scheme().cloned();
let err_msg = &format!("Failed to build new uri from parts {new_uri:?}");
let new_uri = Uri::from_parts(new_uri).expect(err_msg);
let whitelist_prefix = ["/store", "/api", "/_", "/fonts"];
if whitelist_prefix.iter().all(|f| !path.starts_with(f)) {
webbrowser_open(new_uri.to_string());
return Ok(Response::new(Vec::new()));
}
let client = DROP_CLIENT_SYNC.clone();
let response = match client
.request(request.method().clone(), new_uri.to_string())
.header("Authorization", format!("Bearer {web_token}"))
.headers(request.headers().clone())
.send()
{
Ok(response) => response,
Err(e) => {
warn!("Could not send response. Got {e} when sending");
return Err(e.status().unwrap_or(StatusCode::BAD_REQUEST));
}
};
let response_status = response.status();
let response_body = match response.bytes() {
Ok(bytes) => bytes,
Err(e) => return Err(e.status().unwrap_or(StatusCode::INTERNAL_SERVER_ERROR)),
};
let http_response = Response::builder()
.status(response_status)
.body(response_body.to_vec())
.expect("Failed to build server proto response");
Ok(http_response)
}

View File

@ -0,0 +1,119 @@
use std::{
fs::{self, File},
io::Read,
sync::LazyLock,
};
use database::db::DATA_ROOT_DIR;
use log::{debug, info, warn};
use reqwest::Certificate;
use serde::Deserialize;
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DropHealthcheck {
app_name: String,
}
impl DropHealthcheck {
pub fn app_name(&self) -> &String {
&self.app_name
}
}
static DROP_CERT_BUNDLE: LazyLock<Vec<Certificate>> = LazyLock::new(fetch_certificates);
pub static DROP_CLIENT_SYNC: LazyLock<reqwest::blocking::Client> = LazyLock::new(get_client_sync);
pub static DROP_CLIENT_ASYNC: LazyLock<reqwest::Client> = LazyLock::new(get_client_async);
pub static DROP_CLIENT_WS_CLIENT: LazyLock<reqwest::Client> = LazyLock::new(get_client_ws);
fn fetch_certificates() -> Vec<Certificate> {
let certificate_dir = DATA_ROOT_DIR.join("certificates");
let mut certs = Vec::new();
match fs::read_dir(certificate_dir) {
Ok(c) => {
for entry in c {
match entry {
Ok(c) => {
let mut buf = Vec::new();
match File::open(c.path()) {
Ok(f) => f,
Err(e) => {
warn!(
"Failed to open file at {} with error {}",
c.path().display(),
e
);
continue;
}
}
.read_to_end(&mut buf)
.unwrap_or_else(|e| {
panic!(
"Failed to read to end of certificate file {} with error {}",
c.path().display(),
e
)
});
match Certificate::from_pem_bundle(&buf) {
Ok(certificates) => {
for cert in certificates {
certs.push(cert);
}
info!(
"added {} certificate(s) from {}",
certs.len(),
c.file_name().display()
);
}
Err(e) => warn!(
"Invalid certificate file {} with error {}",
c.path().display(),
e
),
}
}
Err(_) => todo!(),
}
}
}
Err(e) => {
debug!("not loading certificates due to error: {e}");
}
};
certs
}
pub fn get_client_sync() -> reqwest::blocking::Client {
let mut client = reqwest::blocking::ClientBuilder::new();
for cert in DROP_CERT_BUNDLE.iter() {
client = client.add_root_certificate(cert.clone());
}
client
.use_rustls_tls()
.build()
.expect("Failed to build synchronous client")
}
pub fn get_client_async() -> reqwest::Client {
let mut client = reqwest::ClientBuilder::new();
for cert in DROP_CERT_BUNDLE.iter() {
client = client.add_root_certificate(cert.clone());
}
client
.use_rustls_tls()
.build()
.expect("Failed to build asynchronous client")
}
pub fn get_client_ws() -> reqwest::Client {
let mut client = reqwest::ClientBuilder::new();
for cert in DROP_CERT_BUNDLE.iter() {
client = client.add_root_certificate(cert.clone());
}
client
.use_rustls_tls()
.http1_only()
.build()
.expect("Failed to build websocket client")
}

View File

@ -1,9 +1,41 @@
use crate::database::db::{borrow_db_checked, borrow_db_mut_checked};
use log::debug;
use std::sync::nonpoison::Mutex;
use database::{borrow_db_checked, borrow_db_mut_checked};
use download_manager::DOWNLOAD_MANAGER;
use log::{debug, error};
use tauri::AppHandle;
use tauri_plugin_autostart::ManagerExt;
pub fn toggle_autostart_logic(app: AppHandle, enabled: bool) -> Result<(), String> {
use crate::AppState;
#[tauri::command]
pub fn fetch_state(state: tauri::State<'_, Mutex<AppState>>) -> Result<String, String> {
let guard = state.lock();
let cloned_state = serde_json::to_string(&guard.clone()).map_err(|e| e.to_string())?;
drop(guard);
Ok(cloned_state)
}
#[tauri::command]
pub fn quit(app: tauri::AppHandle) {
cleanup_and_exit(&app);
}
pub fn cleanup_and_exit(app: &AppHandle) {
debug!("cleaning up and exiting application");
match DOWNLOAD_MANAGER.ensure_terminated() {
Ok(res) => match res {
Ok(()) => debug!("download manager terminated correctly"),
Err(()) => error!("download manager failed to terminate correctly"),
},
Err(e) => panic!("{e:?}"),
}
app.exit(0);
}
#[tauri::command]
pub fn toggle_autostart(app: AppHandle, enabled: bool) -> Result<(), String> {
let manager = app.autolaunch();
if enabled {
manager.enable().map_err(|e| e.to_string())?;
@ -16,13 +48,11 @@ pub fn toggle_autostart_logic(app: AppHandle, enabled: bool) -> Result<(), Strin
// Store the state in DB
let mut db_handle = borrow_db_mut_checked();
db_handle.settings.autostart = enabled;
drop(db_handle);
Ok(())
}
pub fn get_autostart_enabled_logic(app: AppHandle) -> Result<bool, tauri_plugin_autostart::Error> {
// First check DB state
#[tauri::command]
pub fn get_autostart_enabled(app: AppHandle) -> Result<bool, tauri_plugin_autostart::Error> {
let db_handle = borrow_db_checked();
let db_state = db_handle.settings.autostart;
drop(db_handle);
@ -42,34 +72,3 @@ pub fn get_autostart_enabled_logic(app: AppHandle) -> Result<bool, tauri_plugin_
Ok(db_state)
}
// New function to sync state on startup
pub fn sync_autostart_on_startup(app: &AppHandle) -> Result<(), String> {
let db_handle = borrow_db_checked();
let should_be_enabled = db_handle.settings.autostart;
drop(db_handle);
let manager = app.autolaunch();
let current_state = manager.is_enabled().map_err(|e| e.to_string())?;
if current_state != should_be_enabled {
if should_be_enabled {
manager.enable().map_err(|e| e.to_string())?;
debug!("synced autostart: enabled");
} else {
manager.disable().map_err(|e| e.to_string())?;
debug!("synced autostart: disabled");
}
}
Ok(())
}
#[tauri::command]
pub fn toggle_autostart(app: AppHandle, enabled: bool) -> Result<(), String> {
toggle_autostart_logic(app, enabled)
}
#[tauri::command]
pub fn get_autostart_enabled(app: AppHandle) -> Result<bool, tauri_plugin_autostart::Error> {
get_autostart_enabled_logic(app)
}

View File

@ -1,23 +0,0 @@
use log::{debug, error};
use tauri::AppHandle;
use crate::AppState;
#[tauri::command]
pub fn quit(app: tauri::AppHandle, state: tauri::State<'_, std::sync::Mutex<AppState<'_>>>) {
cleanup_and_exit(&app, &state);
}
pub fn cleanup_and_exit(app: &AppHandle, state: &tauri::State<'_, std::sync::Mutex<AppState<'_>>>) {
debug!("cleaning up and exiting application");
let download_manager = state.lock().unwrap().download_manager.clone();
match download_manager.ensure_terminated() {
Ok(res) => match res {
Ok(()) => debug!("download manager terminated correctly"),
Err(()) => error!("download manager failed to terminate correctly"),
},
Err(e) => panic!("{e:?}"),
}
app.exit(0);
}

View File

@ -1,11 +0,0 @@
use crate::AppState;
#[tauri::command]
pub fn fetch_state(
state: tauri::State<'_, std::sync::Mutex<AppState<'_>>>,
) -> Result<String, String> {
let guard = state.lock().unwrap();
let cloned_state = serde_json::to_string(&guard.clone()).map_err(|e| e.to_string())?;
drop(guard);
Ok(cloned_state)
}

View File

@ -1,3 +0,0 @@
pub mod autostart;
pub mod cleanup;
pub mod commands;

View File

@ -1,102 +0,0 @@
use std::{collections::HashMap, path::PathBuf, str::FromStr};
use log::warn;
use crate::{database::db::{GameVersion, DATA_ROOT_DIR}, error::backup_error::BackupError, process::process_manager::Platform};
use super::path::CommonPath;
pub struct BackupManager<'a> {
pub current_platform: Platform,
pub sources: HashMap<(Platform, Platform), &'a (dyn BackupHandler + Sync + Send)>,
}
impl BackupManager<'_> {
pub fn new() -> Self {
BackupManager {
#[cfg(target_os = "windows")]
current_platform: Platform::Windows,
#[cfg(target_os = "macos")]
current_platform: Platform::MacOs,
#[cfg(target_os = "linux")]
current_platform: Platform::Linux,
sources: HashMap::from([
// Current platform to target platform
(
(Platform::Windows, Platform::Windows),
&WindowsBackupManager {} as &(dyn BackupHandler + Sync + Send),
),
(
(Platform::Linux, Platform::Linux),
&LinuxBackupManager {} as &(dyn BackupHandler + Sync + Send),
),
(
(Platform::MacOs, Platform::MacOs),
&MacBackupManager {} as &(dyn BackupHandler + Sync + Send),
),
]),
}
}
}
pub trait BackupHandler: Send + Sync {
fn root_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(DATA_ROOT_DIR.lock().unwrap().join("games")) }
fn game_translate(&self, _path: &PathBuf, game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(PathBuf::from_str(&game.game_id).unwrap()) }
fn base_translate(&self, path: &PathBuf, game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(self.root_translate(path, game)?.join(self.game_translate(path, game)?)) }
fn home_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { let c = CommonPath::Home.get().ok_or(BackupError::NotFound); println!("{:?}", c); c }
fn store_user_id_translate(&self, _path: &PathBuf, game: &GameVersion) -> Result<PathBuf, BackupError> { PathBuf::from_str(&game.game_id).map_err(|_| BackupError::ParseError) }
fn os_user_name_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(PathBuf::from_str(&whoami::username()).unwrap()) }
fn win_app_data_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winAppData>"); Err(BackupError::InvalidSystem) }
fn win_local_app_data_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winLocalAppData>"); Err(BackupError::InvalidSystem) }
fn win_local_app_data_low_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winLocalAppDataLow>"); Err(BackupError::InvalidSystem) }
fn win_documents_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winDocuments>"); Err(BackupError::InvalidSystem) }
fn win_public_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winPublic>"); Err(BackupError::InvalidSystem) }
fn win_program_data_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winProgramData>"); Err(BackupError::InvalidSystem) }
fn win_dir_translate(&self, _path: &PathBuf,_game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winDir>"); Err(BackupError::InvalidSystem) }
fn xdg_data_translate(&self, _path: &PathBuf,_game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected XDG Reference in Backup <xdgData>"); Err(BackupError::InvalidSystem) }
fn xdg_config_translate(&self, _path: &PathBuf,_game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected XDG Reference in Backup <xdgConfig>"); Err(BackupError::InvalidSystem) }
fn skip_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(PathBuf::new()) }
}
pub struct LinuxBackupManager {}
impl BackupHandler for LinuxBackupManager {
fn xdg_config_translate(&self, _path: &PathBuf,_game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::Data.get().ok_or(BackupError::NotFound)?)
}
fn xdg_data_translate(&self, _path: &PathBuf,_game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::Config.get().ok_or(BackupError::NotFound)?)
}
}
pub struct WindowsBackupManager {}
impl BackupHandler for WindowsBackupManager {
fn win_app_data_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::Config.get().ok_or(BackupError::NotFound)?)
}
fn win_local_app_data_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::DataLocal.get().ok_or(BackupError::NotFound)?)
}
fn win_local_app_data_low_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::DataLocalLow.get().ok_or(BackupError::NotFound)?)
}
fn win_dir_translate(&self, _path: &PathBuf,_game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(PathBuf::from_str("C:/Windows").unwrap())
}
fn win_documents_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::Document.get().ok_or(BackupError::NotFound)?)
}
fn win_program_data_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(PathBuf::from_str("C:/ProgramData").unwrap())
}
fn win_public_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::Public.get().ok_or(BackupError::NotFound)?)
}
}
pub struct MacBackupManager {}
impl BackupHandler for MacBackupManager {}

View File

@ -1,16 +1,12 @@
use serde_json::json;
use crate::{
error::remote_access_error::RemoteAccessError,
remote::{
auth::generate_authorization_header,
cache::{cache_object, get_cached_object},
requests::{generate_url, make_authenticated_get},
utils::DROP_CLIENT_ASYNC,
},
use games::collections::collection::{Collection, Collections};
use remote::{
auth::generate_authorization_header,
cache::{cache_object, get_cached_object},
error::RemoteAccessError,
requests::{generate_url, make_authenticated_get},
utils::DROP_CLIENT_ASYNC,
};
use super::collection::{Collection, Collections};
use serde_json::json;
#[tauri::command]
pub async fn fetch_collections(

View File

@ -1,5 +0,0 @@
pub mod commands;
pub mod db;
pub mod debug;
pub mod models;
pub mod scan;

View File

@ -0,0 +1,22 @@
use database::DownloadableMetadata;
use download_manager::DOWNLOAD_MANAGER;
#[tauri::command]
pub fn pause_downloads() {
DOWNLOAD_MANAGER.pause_downloads();
}
#[tauri::command]
pub fn resume_downloads() {
DOWNLOAD_MANAGER.resume_downloads();
}
#[tauri::command]
pub fn move_download_in_queue(old_index: usize, new_index: usize) {
DOWNLOAD_MANAGER.rearrange(old_index, new_index);
}
#[tauri::command]
pub fn cancel_game(meta: DownloadableMetadata) {
DOWNLOAD_MANAGER.cancel(meta);
}

View File

@ -1,31 +0,0 @@
use std::sync::Mutex;
use crate::{database::models::data::DownloadableMetadata, AppState};
#[tauri::command]
pub fn pause_downloads(state: tauri::State<'_, Mutex<AppState>>) {
state.lock().unwrap().download_manager.pause_downloads();
}
#[tauri::command]
pub fn resume_downloads(state: tauri::State<'_, Mutex<AppState>>) {
state.lock().unwrap().download_manager.resume_downloads();
}
#[tauri::command]
pub fn move_download_in_queue(
state: tauri::State<'_, Mutex<AppState>>,
old_index: usize,
new_index: usize,
) {
state
.lock()
.unwrap()
.download_manager
.rearrange(old_index, new_index);
}
#[tauri::command]
pub fn cancel_game(state: tauri::State<'_, Mutex<AppState>>, meta: DownloadableMetadata) {
state.lock().unwrap().download_manager.cancel(meta);
}

View File

@ -1,5 +0,0 @@
pub mod commands;
pub mod download_manager_builder;
pub mod download_manager_frontend;
pub mod downloadable;
pub mod util;

View File

@ -1,39 +1,31 @@
use std::{
path::PathBuf,
sync::{Arc, Mutex},
use std::{path::PathBuf, sync::Arc};
use database::{GameDownloadStatus, borrow_db_checked};
use download_manager::{
DOWNLOAD_MANAGER, downloadable::Downloadable, error::ApplicationDownloadError,
};
use crate::{
AppState,
database::{
db::borrow_db_checked,
models::data::GameDownloadStatus,
},
download_manager::downloadable::Downloadable,
error::application_download_error::ApplicationDownloadError,
};
use super::download_agent::GameDownloadAgent;
use games::downloads::download_agent::GameDownloadAgent;
#[tauri::command]
pub async fn download_game(
game_id: String,
game_version: String,
install_dir: usize,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<(), ApplicationDownloadError> {
let sender = { state.lock().unwrap().download_manager.get_sender().clone() };
let sender = { DOWNLOAD_MANAGER.get_sender().clone() };
let game_download_agent =
GameDownloadAgent::new_from_index(game_id.clone(), game_version.clone(), install_dir, sender).await?;
let game_download_agent = GameDownloadAgent::new_from_index(
game_id.clone(),
game_version.clone(),
install_dir,
sender,
)
.await?;
let game_download_agent =
Arc::new(Box::new(game_download_agent) as Box<dyn Downloadable + Send + Sync>);
state
.lock()
.unwrap()
.download_manager
DOWNLOAD_MANAGER
.queue_download(game_download_agent.clone())
.unwrap();
@ -41,10 +33,7 @@ pub async fn download_game(
}
#[tauri::command]
pub async fn resume_download(
game_id: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<(), ApplicationDownloadError> {
pub async fn resume_download(game_id: String) -> Result<(), ApplicationDownloadError> {
let s = borrow_db_checked()
.applications
.game_statuses
@ -62,23 +51,25 @@ pub async fn resume_download(
} => (version_name, install_dir),
};
let sender = state.lock().unwrap().download_manager.get_sender();
let sender = DOWNLOAD_MANAGER.get_sender();
let parent_dir: PathBuf = install_dir.into();
let game_download_agent = Arc::new(Box::new(
GameDownloadAgent::new(
game_id,
version_name.clone(),
parent_dir.parent().unwrap().to_path_buf(),
parent_dir
.parent()
.unwrap_or_else(|| {
panic!("Failed to get parent directry of {}", parent_dir.display())
})
.to_path_buf(),
sender,
)
.await?,
) as Box<dyn Downloadable + Send + Sync>);
state
.lock()
.unwrap()
.download_manager
DOWNLOAD_MANAGER
.queue_download(game_download_agent)
.unwrap();
Ok(())

View File

@ -1,49 +0,0 @@
use std::{
fmt::{Display, Formatter},
io, sync::Arc,
};
use serde_with::SerializeDisplay;
use humansize::{format_size, BINARY};
use super::remote_access_error::RemoteAccessError;
// TODO: Rename / separate from downloads
#[derive(Debug, SerializeDisplay)]
pub enum ApplicationDownloadError {
NotInitialized,
Communication(RemoteAccessError),
DiskFull(u64, u64),
#[allow(dead_code)]
Checksum,
Lock,
IoError(Arc<io::Error>),
DownloadError,
}
impl Display for ApplicationDownloadError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
ApplicationDownloadError::NotInitialized => write!(f, "Download not initalized, did something go wrong?"),
ApplicationDownloadError::DiskFull(required, available) => write!(
f,
"Game requires {}, {} remaining left on disk.",
format_size(*required, BINARY),
format_size(*available, BINARY),
),
ApplicationDownloadError::Communication(error) => write!(f, "{error}"),
ApplicationDownloadError::Lock => write!(
f,
"failed to acquire lock. Something has gone very wrong internally. Please restart the application"
),
ApplicationDownloadError::Checksum => {
write!(f, "checksum failed to validate for download")
}
ApplicationDownloadError::IoError(error) => write!(f, "io error: {error}"),
ApplicationDownloadError::DownloadError => write!(
f,
"Download failed. See Download Manager status for specific error"
),
}
}
}

View File

@ -1,27 +0,0 @@
use std::{fmt::Display, io, sync::mpsc::SendError};
use serde_with::SerializeDisplay;
#[derive(SerializeDisplay)]
pub enum DownloadManagerError<T> {
IOError(io::Error),
SignalError(SendError<T>),
}
impl<T> Display for DownloadManagerError<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DownloadManagerError::IOError(error) => write!(f, "{error}"),
DownloadManagerError::SignalError(send_error) => write!(f, "{send_error}"),
}
}
}
impl<T> From<SendError<T>> for DownloadManagerError<T> {
fn from(value: SendError<T>) -> Self {
DownloadManagerError::SignalError(value)
}
}
impl<T> From<io::Error> for DownloadManagerError<T> {
fn from(value: io::Error) -> Self {
DownloadManagerError::IOError(value)
}
}

View File

@ -1,10 +0,0 @@
use serde::Deserialize;
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct DropServerError {
pub status_code: usize,
pub status_message: String,
// pub message: String,
// pub url: String,
}

View File

@ -1,18 +0,0 @@
use std::fmt::Display;
use serde_with::SerializeDisplay;
#[derive(SerializeDisplay)]
pub enum LibraryError {
MetaNotFound(String),
}
impl Display for LibraryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
LibraryError::MetaNotFound(id) => write!(
f,
"Could not locate any installed version of game ID {id} in the database"
),
}
}
}

View File

@ -1,6 +0,0 @@
pub mod application_download_error;
pub mod download_manager_error;
pub mod drop_server_error;
pub mod library_error;
pub mod process_error;
pub mod remote_access_error;

344
src-tauri/src/games.rs Normal file
View File

@ -0,0 +1,344 @@
use std::sync::nonpoison::Mutex;
use database::{GameDownloadStatus, GameVersion, borrow_db_checked, borrow_db_mut_checked};
use games::{
downloads::error::LibraryError,
library::{FetchGameStruct, FrontendGameOptions, Game, get_current_meta, uninstall_game_logic},
state::{GameStatusManager, GameStatusWithTransient},
};
use log::warn;
use process::PROCESS_MANAGER;
use remote::{
auth::generate_authorization_header,
cache::{cache_object, cache_object_db, get_cached_object, get_cached_object_db},
error::{DropServerError, RemoteAccessError},
offline,
requests::generate_url,
utils::DROP_CLIENT_ASYNC,
};
use tauri::AppHandle;
use crate::AppState;
#[tauri::command]
pub async fn fetch_library(
state: tauri::State<'_, Mutex<AppState>>,
hard_refresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
offline!(
state,
fetch_library_logic,
fetch_library_logic_offline,
state,
hard_refresh
)
.await
}
pub async fn fetch_library_logic(
state: tauri::State<'_, Mutex<AppState>>,
hard_fresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
let do_hard_refresh = hard_fresh.unwrap_or(false);
if !do_hard_refresh && let Ok(library) = get_cached_object("library") {
return Ok(library);
}
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(&["/api/v1/client/user/library"], &[])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()
.await?;
if response.status() != 200 {
let err = response.json().await.unwrap_or(DropServerError {
status_code: 500,
status_message: "Invalid response from server.".to_owned(),
});
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let mut games: Vec<Game> = response.json().await?;
let mut handle = state.lock();
let mut db_handle = borrow_db_mut_checked();
for game in &games {
handle.games.insert(game.id().clone(), game.clone());
if !db_handle.applications.game_statuses.contains_key(game.id()) {
db_handle
.applications
.game_statuses
.insert(game.id().clone(), GameDownloadStatus::Remote {});
}
}
// Add games that are installed but no longer in library
for meta in db_handle.applications.installed_game_version.values() {
if games.iter().any(|e| *e.id() == meta.id) {
continue;
}
// We should always have a cache of the object
// Pass db_handle because otherwise we get a gridlock
let game = match get_cached_object_db::<Game>(&meta.id.clone(), &db_handle) {
Ok(game) => game,
Err(err) => {
warn!(
"{} is installed, but encountered error fetching its error: {}.",
meta.id, err
);
continue;
}
};
games.push(game);
}
drop(handle);
drop(db_handle);
cache_object("library", &games)?;
Ok(games)
}
pub async fn fetch_library_logic_offline(
_state: tauri::State<'_, Mutex<AppState>>,
_hard_refresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
let mut games: Vec<Game> = get_cached_object("library")?;
let db_handle = borrow_db_checked();
games.retain(|game| {
matches!(
&db_handle
.applications
.game_statuses
.get(game.id())
.unwrap_or(&GameDownloadStatus::Remote {}),
GameDownloadStatus::Installed { .. } | GameDownloadStatus::SetupRequired { .. }
)
});
Ok(games)
}
pub async fn fetch_game_logic(
id: String,
state: tauri::State<'_, Mutex<AppState>>,
) -> Result<FetchGameStruct, RemoteAccessError> {
let version = {
let state_handle = state.lock();
let db_lock = borrow_db_checked();
let metadata_option = db_lock.applications.installed_game_version.get(&id);
let version = match metadata_option {
None => None,
Some(metadata) => db_lock
.applications
.game_versions
.get(&metadata.id)
.map(|v| v.get(metadata.version.as_ref().unwrap()).unwrap())
.cloned(),
};
let game = state_handle.games.get(&id);
if let Some(game) = game {
let status = GameStatusManager::fetch_state(&id, &db_lock);
let data = FetchGameStruct::new(game.clone(), status, version);
cache_object_db(&id, game, &db_lock)?;
return Ok(data);
}
version
};
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(&["/api/v1/client/game/", &id], &[])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()
.await?;
if response.status() == 404 {
let offline_fetch = fetch_game_logic_offline(id.clone(), state).await;
if let Ok(fetch_data) = offline_fetch {
return Ok(fetch_data);
}
return Err(RemoteAccessError::GameNotFound(id));
}
if response.status() != 200 {
let err = response.json().await?;
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let game: Game = response.json().await?;
let mut state_handle = state.lock();
state_handle.games.insert(id.clone(), game.clone());
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.game_statuses
.entry(id.clone())
.or_insert(GameDownloadStatus::Remote {});
let status = GameStatusManager::fetch_state(&id, &db_handle);
drop(db_handle);
let data = FetchGameStruct::new(game.clone(), status, version);
cache_object(&id, &game)?;
Ok(data)
}
pub async fn fetch_game_version_options_logic(
game_id: String,
state: tauri::State<'_, Mutex<AppState>>,
) -> Result<Vec<GameVersion>, RemoteAccessError> {
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(&["/api/v1/client/game/versions"], &[("id", &game_id)])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()
.await?;
if response.status() != 200 {
let err = response.json().await?;
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let data: Vec<GameVersion> = response.json().await?;
let state_lock = state.lock();
let process_manager_lock = PROCESS_MANAGER.lock();
let data: Vec<GameVersion> = data
.into_iter()
.filter(|v| process_manager_lock.valid_platform(&v.platform))
.collect();
drop(process_manager_lock);
drop(state_lock);
Ok(data)
}
pub async fn fetch_game_logic_offline(
id: String,
_state: tauri::State<'_, Mutex<AppState>>,
) -> Result<FetchGameStruct, RemoteAccessError> {
let db_handle = borrow_db_checked();
let metadata_option = db_handle.applications.installed_game_version.get(&id);
let version = match metadata_option {
None => None,
Some(metadata) => db_handle
.applications
.game_versions
.get(&metadata.id)
.map(|v| v.get(metadata.version.as_ref().unwrap()).unwrap())
.cloned(),
};
let status = GameStatusManager::fetch_state(&id, &db_handle);
let game = get_cached_object::<Game>(&id)?;
drop(db_handle);
Ok(FetchGameStruct::new(game, status, version))
}
#[tauri::command]
pub async fn fetch_game(
game_id: String,
state: tauri::State<'_, Mutex<AppState>>,
) -> Result<FetchGameStruct, RemoteAccessError> {
offline!(
state,
fetch_game_logic,
fetch_game_logic_offline,
game_id,
state
)
.await
}
#[tauri::command]
pub fn fetch_game_status(id: String) -> GameStatusWithTransient {
let db_handle = borrow_db_checked();
GameStatusManager::fetch_state(&id, &db_handle)
}
#[tauri::command]
pub fn uninstall_game(game_id: String, app_handle: AppHandle) -> Result<(), LibraryError> {
let meta = match get_current_meta(&game_id) {
Some(data) => data,
None => return Err(LibraryError::MetaNotFound(game_id)),
};
uninstall_game_logic(meta, &app_handle);
Ok(())
}
#[tauri::command]
pub async fn fetch_game_version_options(
game_id: String,
state: tauri::State<'_, Mutex<AppState>>,
) -> Result<Vec<GameVersion>, RemoteAccessError> {
fetch_game_version_options_logic(game_id, state).await
}
#[tauri::command]
pub fn update_game_configuration(
game_id: String,
options: FrontendGameOptions,
) -> Result<(), LibraryError> {
let mut handle = borrow_db_mut_checked();
let installed_version = handle
.applications
.installed_game_version
.get(&game_id)
.ok_or(LibraryError::MetaNotFound(game_id))?;
let id = installed_version.id.clone();
let version = installed_version
.version
.clone()
.ok_or(LibraryError::VersionNotFound(id.clone()))?;
let mut existing_configuration = handle
.applications
.game_versions
.get(&id)
.unwrap()
.get(&version)
.unwrap()
.clone();
// Add more options in here
existing_configuration.launch_command_template = options.launch_string().clone();
// Add no more options past here
handle
.applications
.game_versions
.get_mut(&id)
.unwrap()
.insert(version.to_string(), existing_configuration);
Ok(())
}

View File

@ -1,78 +0,0 @@
use std::sync::Mutex;
use tauri::AppHandle;
use crate::{
AppState,
database::{
db::borrow_db_checked,
models::data::GameVersion,
},
error::{library_error::LibraryError, remote_access_error::RemoteAccessError},
games::library::{
fetch_game_logic_offline, fetch_library_logic_offline, get_current_meta,
uninstall_game_logic,
},
offline,
};
use super::{
library::{
FetchGameStruct, Game, fetch_game_logic, fetch_game_version_options_logic,
fetch_library_logic,
},
state::{GameStatusManager, GameStatusWithTransient},
};
#[tauri::command]
pub async fn fetch_library(
state: tauri::State<'_, Mutex<AppState<'_>>>,
hard_refresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
offline!(
state,
fetch_library_logic,
fetch_library_logic_offline,
state,
hard_refresh
).await
}
#[tauri::command]
pub async fn fetch_game(
game_id: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<FetchGameStruct, RemoteAccessError> {
offline!(
state,
fetch_game_logic,
fetch_game_logic_offline,
game_id,
state
).await
}
#[tauri::command]
pub fn fetch_game_status(id: String) -> GameStatusWithTransient {
let db_handle = borrow_db_checked();
GameStatusManager::fetch_state(&id, &db_handle)
}
#[tauri::command]
pub fn uninstall_game(game_id: String, app_handle: AppHandle) -> Result<(), LibraryError> {
let meta = match get_current_meta(&game_id) {
Some(data) => data,
None => return Err(LibraryError::MetaNotFound(game_id)),
};
uninstall_game_logic(meta, &app_handle);
Ok(())
}
#[tauri::command]
pub async fn fetch_game_version_options(
game_id: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<Vec<GameVersion>, RemoteAccessError> {
fetch_game_version_options_logic(game_id, state).await
}

View File

@ -1,591 +0,0 @@
use std::fs::remove_dir_all;
use std::sync::Mutex;
use std::thread::spawn;
use log::{debug, error, warn};
use serde::{Deserialize, Serialize};
use tauri::AppHandle;
use tauri::Emitter;
use crate::AppState;
use crate::database::db::{borrow_db_checked, borrow_db_mut_checked};
use crate::database::models::data::Database;
use crate::database::models::data::{
ApplicationTransientStatus, DownloadableMetadata, GameDownloadStatus, GameVersion,
};
use crate::download_manager::download_manager_frontend::DownloadStatus;
use crate::error::drop_server_error::DropServerError;
use crate::error::library_error::LibraryError;
use crate::error::remote_access_error::RemoteAccessError;
use crate::games::state::{GameStatusManager, GameStatusWithTransient};
use crate::remote::auth::generate_authorization_header;
use crate::remote::cache::cache_object_db;
use crate::remote::cache::{cache_object, get_cached_object, get_cached_object_db};
use crate::remote::requests::generate_url;
use crate::remote::utils::DROP_CLIENT_ASYNC;
use crate::remote::utils::DROP_CLIENT_SYNC;
use bitcode::{Decode, Encode};
#[derive(Serialize, Deserialize, Debug)]
pub struct FetchGameStruct {
game: Game,
status: GameStatusWithTransient,
version: Option<GameVersion>,
}
#[derive(Serialize, Deserialize, Clone, Debug, Default, Encode, Decode)]
#[serde(rename_all = "camelCase")]
pub struct Game {
id: String,
m_name: String,
m_short_description: String,
m_description: String,
// mDevelopers
// mPublishers
m_icon_object_id: String,
m_banner_object_id: String,
m_cover_object_id: String,
m_image_library_object_ids: Vec<String>,
m_image_carousel_object_ids: Vec<String>,
}
#[derive(serde::Serialize, Clone)]
pub struct GameUpdateEvent {
pub game_id: String,
pub status: (
Option<GameDownloadStatus>,
Option<ApplicationTransientStatus>,
),
pub version: Option<GameVersion>,
}
#[derive(Serialize, Clone)]
pub struct QueueUpdateEventQueueData {
pub meta: DownloadableMetadata,
pub status: DownloadStatus,
pub progress: f64,
pub current: usize,
pub max: usize,
}
#[derive(serde::Serialize, Clone)]
pub struct QueueUpdateEvent {
pub queue: Vec<QueueUpdateEventQueueData>,
}
#[derive(serde::Serialize, Clone)]
pub struct StatsUpdateEvent {
pub speed: usize,
pub time: usize,
}
pub async fn fetch_library_logic(
state: tauri::State<'_, Mutex<AppState<'_>>>,
hard_fresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
let do_hard_refresh = hard_fresh.unwrap_or(false);
if !do_hard_refresh && let Ok(library) = get_cached_object("library") {
return Ok(library);
}
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(&["/api/v1/client/user/library"], &[])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()
.await?;
if response.status() != 200 {
let err = response.json().await.unwrap_or(DropServerError {
status_code: 500,
status_message: "Invalid response from server.".to_owned(),
});
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let mut games: Vec<Game> = response.json().await?;
let mut handle = state.lock().unwrap();
let mut db_handle = borrow_db_mut_checked();
for game in &games {
handle.games.insert(game.id.clone(), game.clone());
if !db_handle.applications.game_statuses.contains_key(&game.id) {
db_handle
.applications
.game_statuses
.insert(game.id.clone(), GameDownloadStatus::Remote {});
}
}
// Add games that are installed but no longer in library
for meta in db_handle.applications.installed_game_version.values() {
if games.iter().any(|e| e.id == meta.id) {
continue;
}
// We should always have a cache of the object
// Pass db_handle because otherwise we get a gridlock
let game = match get_cached_object_db::<Game>(&meta.id.clone(), &db_handle) {
Ok(game) => game,
Err(err) => {
warn!(
"{} is installed, but encountered error fetching its error: {}.",
meta.id, err
);
continue;
}
};
games.push(game);
}
drop(handle);
drop(db_handle);
cache_object("library", &games)?;
Ok(games)
}
pub async fn fetch_library_logic_offline(
_state: tauri::State<'_, Mutex<AppState<'_>>>,
_hard_refresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
let mut games: Vec<Game> = get_cached_object("library")?;
let db_handle = borrow_db_checked();
games.retain(|game| {
matches!(
&db_handle
.applications
.game_statuses
.get(&game.id)
.unwrap_or(&GameDownloadStatus::Remote {}),
GameDownloadStatus::Installed { .. } | GameDownloadStatus::SetupRequired { .. }
)
});
Ok(games)
}
pub async fn fetch_game_logic(
id: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<FetchGameStruct, RemoteAccessError> {
let version = {
let state_handle = state.lock().unwrap();
let db_lock = borrow_db_checked();
let metadata_option = db_lock.applications.installed_game_version.get(&id);
let version = match metadata_option {
None => None,
Some(metadata) => db_lock
.applications
.game_versions
.get(&metadata.id)
.map(|v| v.get(metadata.version.as_ref().unwrap()).unwrap())
.cloned(),
};
let game = state_handle.games.get(&id);
if let Some(game) = game {
let status = GameStatusManager::fetch_state(&id, &db_lock);
let data = FetchGameStruct {
game: game.clone(),
status,
version,
};
cache_object_db(&id, game, &db_lock)?;
return Ok(data);
}
version
};
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(&["/api/v1/client/game/", &id], &[])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()
.await?;
if response.status() == 404 {
let offline_fetch = fetch_game_logic_offline(id.clone(), state).await;
if let Ok(fetch_data) = offline_fetch {
return Ok(fetch_data);
}
return Err(RemoteAccessError::GameNotFound(id));
}
if response.status() != 200 {
let err = response.json().await.unwrap();
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let game: Game = response.json().await?;
let mut state_handle = state.lock().unwrap();
state_handle.games.insert(id.clone(), game.clone());
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.game_statuses
.entry(id.clone())
.or_insert(GameDownloadStatus::Remote {});
let status = GameStatusManager::fetch_state(&id, &db_handle);
drop(db_handle);
let data = FetchGameStruct {
game: game.clone(),
status,
version,
};
cache_object(&id, &game)?;
Ok(data)
}
pub async fn fetch_game_logic_offline(
id: String,
_state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<FetchGameStruct, RemoteAccessError> {
let db_handle = borrow_db_checked();
let metadata_option = db_handle.applications.installed_game_version.get(&id);
let version = match metadata_option {
None => None,
Some(metadata) => db_handle
.applications
.game_versions
.get(&metadata.id)
.map(|v| v.get(metadata.version.as_ref().unwrap()).unwrap())
.cloned(),
};
let status = GameStatusManager::fetch_state(&id, &db_handle);
let game = get_cached_object::<Game>(&id)?;
drop(db_handle);
Ok(FetchGameStruct {
game,
status,
version,
})
}
pub async fn fetch_game_version_options_logic(
game_id: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<Vec<GameVersion>, RemoteAccessError> {
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(&["/api/v1/client/game/versions"], &[("id", &game_id)])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()
.await?;
if response.status() != 200 {
let err = response.json().await.unwrap();
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let data: Vec<GameVersion> = response.json().await?;
let state_lock = state.lock().unwrap();
let process_manager_lock = state_lock.process_manager.lock().unwrap();
let data: Vec<GameVersion> = data
.into_iter()
.filter(|v| {
process_manager_lock
.valid_platform(&v.platform, &state_lock)
.unwrap()
})
.collect();
drop(process_manager_lock);
drop(state_lock);
Ok(data)
}
/**
* Called by:
* - on_cancel, when cancelled, for obvious reasons
* - when downloading, so if drop unexpectedly quits, we can resume the download. hidden by the "Downloading..." transient state, though
* - when scanning, to import the game
*/
pub fn set_partially_installed(
meta: &DownloadableMetadata,
install_dir: String,
app_handle: Option<&AppHandle>,
) {
set_partially_installed_db(&mut borrow_db_mut_checked(), meta, install_dir, app_handle);
}
pub fn set_partially_installed_db(
db_lock: &mut Database,
meta: &DownloadableMetadata,
install_dir: String,
app_handle: Option<&AppHandle>,
) {
db_lock.applications.transient_statuses.remove(meta);
db_lock.applications.game_statuses.insert(
meta.id.clone(),
GameDownloadStatus::PartiallyInstalled {
version_name: meta.version.as_ref().unwrap().clone(),
install_dir,
},
);
db_lock
.applications
.installed_game_version
.insert(meta.id.clone(), meta.clone());
if let Some(app_handle) = app_handle {
push_game_update(
app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, db_lock),
);
}
}
pub fn uninstall_game_logic(meta: DownloadableMetadata, app_handle: &AppHandle) {
debug!("triggered uninstall for agent");
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.transient_statuses
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
push_game_update(
app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, &db_handle),
);
let previous_state = db_handle.applications.game_statuses.get(&meta.id).cloned();
if previous_state.is_none() {
warn!("uninstall job doesn't have previous state, failing silently");
return;
}
let previous_state = previous_state.unwrap();
if let Some((_, install_dir)) = match previous_state {
GameDownloadStatus::Installed {
version_name,
install_dir,
} => Some((version_name, install_dir)),
GameDownloadStatus::SetupRequired {
version_name,
install_dir,
} => Some((version_name, install_dir)),
GameDownloadStatus::PartiallyInstalled {
version_name,
install_dir,
} => Some((version_name, install_dir)),
_ => None,
} {
db_handle
.applications
.transient_statuses
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
drop(db_handle);
let app_handle = app_handle.clone();
spawn(move || {
if let Err(e) = remove_dir_all(install_dir) {
error!("{e}");
} else {
let mut db_handle = borrow_db_mut_checked();
db_handle.applications.transient_statuses.remove(&meta);
db_handle
.applications
.installed_game_version
.remove(&meta.id);
db_handle
.applications
.game_statuses
.insert(meta.id.clone(), GameDownloadStatus::Remote {});
let _ = db_handle.applications.transient_statuses.remove(&meta);
push_game_update(
&app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, &db_handle),
);
debug!("uninstalled game id {}", &meta.id);
app_handle.emit("update_library", ()).unwrap();
}
});
} else {
warn!("invalid previous state for uninstall, failing silently.");
}
}
pub fn get_current_meta(game_id: &String) -> Option<DownloadableMetadata> {
borrow_db_checked()
.applications
.installed_game_version
.get(game_id)
.cloned()
}
pub fn on_game_complete(
meta: &DownloadableMetadata,
install_dir: String,
app_handle: &AppHandle,
) -> Result<(), RemoteAccessError> {
// Fetch game version information from remote
if meta.version.is_none() {
return Err(RemoteAccessError::GameNotFound(meta.id.clone()));
}
let client = DROP_CLIENT_SYNC.clone();
let response = generate_url(
&["/api/v1/client/game/version"],
&[
("id", &meta.id),
("version", meta.version.as_ref().unwrap()),
],
)?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()?;
let game_version: GameVersion = response.json()?;
let mut handle = borrow_db_mut_checked();
handle
.applications
.game_versions
.entry(meta.id.clone())
.or_default()
.insert(meta.version.clone().unwrap(), game_version.clone());
handle
.applications
.installed_game_version
.insert(meta.id.clone(), meta.clone());
drop(handle);
let status = if game_version.setup_command.is_empty() {
GameDownloadStatus::Installed {
version_name: meta.version.clone().unwrap(),
install_dir,
}
} else {
GameDownloadStatus::SetupRequired {
version_name: meta.version.clone().unwrap(),
install_dir,
}
};
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.game_statuses
.insert(meta.id.clone(), status.clone());
drop(db_handle);
app_handle
.emit(
&format!("update_game/{}", meta.id),
GameUpdateEvent {
game_id: meta.id.clone(),
status: (Some(status), None),
version: Some(game_version),
},
)
.unwrap();
Ok(())
}
pub fn push_game_update(
app_handle: &AppHandle,
game_id: &String,
version: Option<GameVersion>,
status: GameStatusWithTransient,
) {
if let Some(GameDownloadStatus::Installed { .. } | GameDownloadStatus::SetupRequired { .. }) =
&status.0
&& version.is_none()
{
panic!("pushed game for installed game that doesn't have version information");
}
app_handle
.emit(
&format!("update_game/{game_id}"),
GameUpdateEvent {
game_id: game_id.clone(),
status,
version,
},
)
.unwrap();
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FrontendGameOptions {
launch_string: String,
}
#[tauri::command]
pub fn update_game_configuration(
game_id: String,
options: FrontendGameOptions,
) -> Result<(), LibraryError> {
let mut handle = borrow_db_mut_checked();
let installed_version = handle
.applications
.installed_game_version
.get(&game_id)
.ok_or(LibraryError::MetaNotFound(game_id))?;
let id = installed_version.id.clone();
let version = installed_version.version.clone().unwrap();
let mut existing_configuration = handle
.applications
.game_versions
.get(&id)
.unwrap()
.get(&version)
.unwrap()
.clone();
// Add more options in here
existing_configuration.launch_command_template = options.launch_string;
// Add no more options past here
handle
.applications
.game_versions
.get_mut(&id)
.unwrap()
.insert(version.to_string(), existing_configuration);
Ok(())
}

View File

@ -3,141 +3,86 @@
#![feature(duration_constructors)]
#![feature(duration_millis_float)]
#![feature(iterator_try_collect)]
#![feature(nonpoison_mutex)]
#![feature(sync_nonpoison)]
#![deny(clippy::all)]
mod database;
mod games;
mod client;
mod download_manager;
mod error;
mod process;
mod remote;
use crate::database::scan::scan_install_dirs;
use crate::process::commands::open_process_logs;
use crate::process::process_handlers::UMU_LAUNCHER_EXECUTABLE;
use crate::remote::commands::auth_initiate_code;
use crate::{database::db::DatabaseImpls, games::downloads::commands::resume_download};
use bitcode::{Decode, Encode};
use client::commands::fetch_state;
use client::{
autostart::{get_autostart_enabled, sync_autostart_on_startup, toggle_autostart},
cleanup::{cleanup_and_exit, quit},
};
use database::commands::{
add_download_dir, delete_download_dir, fetch_download_dir_stats, fetch_settings,
fetch_system_data, update_settings,
};
use database::db::{DATA_ROOT_DIR, DatabaseInterface, borrow_db_checked, borrow_db_mut_checked};
use database::models::data::GameDownloadStatus;
use download_manager::commands::{
cancel_game, move_download_in_queue, pause_downloads, resume_downloads,
};
use download_manager::download_manager_builder::DownloadManagerBuilder;
use download_manager::download_manager_frontend::DownloadManager;
use games::collections::commands::{
add_game_to_collection, create_collection, delete_collection, delete_game_in_collection,
fetch_collection, fetch_collections,
};
use games::commands::{
fetch_game, fetch_game_status, fetch_game_version_options, fetch_library, uninstall_game,
};
use games::downloads::commands::download_game;
use games::library::{Game, update_game_configuration};
use log::{LevelFilter, debug, info, warn};
use log4rs::Config;
use log4rs::append::console::ConsoleAppender;
use log4rs::append::file::FileAppender;
use log4rs::config::{Appender, Root};
use log4rs::encode::pattern::PatternEncoder;
use process::commands::{kill_game, launch_game};
use process::process_manager::ProcessManager;
use remote::auth::{self, recieve_handshake};
use remote::commands::{
auth_initiate, fetch_drop_object, gen_drop_url, manual_recieve_handshake, retry_connect,
sign_out, use_remote,
};
use remote::fetch_object::fetch_object;
use remote::server_proto::{handle_server_proto, handle_server_proto_offline};
use serde::{Deserialize, Serialize};
use std::fs::File;
use std::io::Write;
use std::panic::PanicHookInfo;
use std::path::Path;
use std::str::FromStr;
use std::sync::Arc;
use std::time::SystemTime;
use std::{
collections::HashMap,
sync::{LazyLock, Mutex},
collections::HashMap, env, fs::File, io::Write, panic::PanicHookInfo, path::Path, str::FromStr,
sync::nonpoison::Mutex, time::SystemTime,
};
use ::client::{app_status::AppStatus, autostart::sync_autostart_on_startup, user::User};
use ::download_manager::DownloadManagerWrapper;
use ::games::{library::Game, scan::scan_install_dirs};
use ::process::ProcessManagerWrapper;
use ::remote::{
auth::{self, HandshakeRequestBody, HandshakeResponse, generate_authorization_header},
cache::clear_cached_object,
error::RemoteAccessError,
fetch_object::fetch_object_wrapper,
offline,
server_proto::{handle_server_proto_offline_wrapper, handle_server_proto_wrapper},
utils::DROP_CLIENT_ASYNC,
};
use database::{
DB, GameDownloadStatus, borrow_db_checked, borrow_db_mut_checked, db::DATA_ROOT_DIR,
interface::DatabaseImpls,
};
use log::{LevelFilter, debug, info, warn};
use log4rs::{
Config,
append::{console::ConsoleAppender, file::FileAppender},
config::{Appender, Root},
encode::pattern::PatternEncoder,
};
use serde::Serialize;
use tauri::{
AppHandle, Manager, RunEvent, WindowEvent,
menu::{Menu, MenuItem, PredefinedMenuItem},
tray::TrayIconBuilder,
};
use std::{env, panic};
use tauri::menu::{Menu, MenuItem, PredefinedMenuItem};
use tauri::tray::TrayIconBuilder;
use tauri::{AppHandle, Manager, RunEvent, WindowEvent};
use tauri_plugin_deep_link::DeepLinkExt;
use tauri_plugin_dialog::DialogExt;
use url::Url;
use utils::app_emit;
#[derive(Clone, Copy, Serialize, Eq, PartialEq)]
pub enum AppStatus {
NotConfigured,
Offline,
ServerError,
SignedOut,
SignedIn,
SignedInNeedsReauth,
ServerUnavailable,
}
use crate::client::cleanup_and_exit;
#[derive(Clone, Serialize, Deserialize, Encode, Decode)]
#[serde(rename_all = "camelCase")]
pub struct User {
id: String,
username: String,
admin: bool,
display_name: String,
profile_picture_object_id: String,
}
mod client;
mod collections;
mod download_manager;
mod downloads;
mod games;
mod process;
mod remote;
mod settings;
#[derive(Clone)]
pub struct CompatInfo {
umu_installed: bool,
}
fn create_new_compat_info() -> Option<CompatInfo> {
#[cfg(target_os = "windows")]
return None;
let has_umu_installed = UMU_LAUNCHER_EXECUTABLE.is_some();
Some(CompatInfo {
umu_installed: has_umu_installed,
})
}
use client::*;
use collections::*;
use download_manager::*;
use downloads::*;
use games::*;
use process::*;
use remote::*;
use settings::*;
#[derive(Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct AppState<'a> {
pub struct AppState {
status: AppStatus,
user: Option<User>,
games: HashMap<String, Game>,
#[serde(skip_serializing)]
download_manager: Arc<DownloadManager>,
#[serde(skip_serializing)]
process_manager: Arc<Mutex<ProcessManager<'a>>>,
#[serde(skip_serializing)]
compat_info: Option<CompatInfo>,
}
async fn setup(handle: AppHandle) -> AppState<'static> {
async fn setup(handle: AppHandle) -> AppState {
let logfile = FileAppender::builder()
.encoder(Box::new(PatternEncoder::new(
"{d} | {l} | {f}:{L} - {m}{n}",
)))
.append(false)
.build(DATA_ROOT_DIR.join("./drop.log"))
.unwrap();
.expect("Failed to setup logfile");
let console = ConsoleAppender::builder()
.encoder(Box::new(PatternEncoder::new(
@ -157,14 +102,14 @@ async fn setup(handle: AppHandle) -> AppState<'static> {
.appenders(vec!["logfile", "console"])
.build(LevelFilter::from_str(&log_level).expect("Invalid log level")),
)
.unwrap();
.expect("Failed to build config");
log4rs::init_config(config).unwrap();
log4rs::init_config(config).expect("Failed to initialise log4rs");
let games = HashMap::new();
let download_manager = Arc::new(DownloadManagerBuilder::build(handle.clone()));
let process_manager = Arc::new(Mutex::new(ProcessManager::new(handle.clone())));
let compat_info = create_new_compat_info();
ProcessManagerWrapper::init(handle.clone());
DownloadManagerWrapper::init(handle.clone());
debug!("checking if database is set up");
let is_set_up = DB.database_is_set_up();
@ -176,9 +121,6 @@ async fn setup(handle: AppHandle) -> AppState<'static> {
status: AppStatus::NotConfigured,
user: None,
games,
download_manager,
process_manager,
compat_info,
};
}
@ -241,14 +183,9 @@ async fn setup(handle: AppHandle) -> AppState<'static> {
status: app_status,
user,
games,
download_manager,
process_manager,
compat_info,
}
}
pub static DB: LazyLock<DatabaseInterface> = LazyLock::new(DatabaseInterface::set_up_database);
pub fn custom_panic_handler(e: &PanicHookInfo) -> Option<()> {
let crash_file = DATA_ROOT_DIR.join(format!(
"crash-{}.log",
@ -267,7 +204,7 @@ pub fn custom_panic_handler(e: &PanicHookInfo) -> Option<()> {
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
panic::set_hook(Box::new(|e| {
std::panic::set_hook(Box::new(|e| {
let _ = custom_panic_handler(e);
println!("{e}");
}));
@ -370,45 +307,67 @@ pub fn run() {
.shadow(false)
.data_directory(DATA_ROOT_DIR.join(".webview"))
.build()
.unwrap();
.expect("Failed to build main window");
app.deep_link().on_open_url(move |event| {
debug!("handling drop:// url");
let binding = event.urls();
let url = binding.first().unwrap();
if url.host_str().unwrap() == "handshake" {
let url = match binding.first() {
Some(url) => url,
None => {
warn!("No value recieved from deep link. Is this a drop server?");
return;
}
};
if let Some("handshake") = url.host_str() {
tauri::async_runtime::spawn(recieve_handshake(
handle.clone(),
url.path().to_string(),
));
}
});
let open_menu_item = MenuItem::with_id(app, "open", "Open", true, None::<&str>)
.expect("Failed to generate open menu item");
let sep = PredefinedMenuItem::separator(app)
.expect("Failed to generate menu separator item");
let quit_menu_item = MenuItem::with_id(app, "quit", "Quit", true, None::<&str>)
.expect("Failed to generate quit menu item");
let menu = Menu::with_items(
app,
&[
&MenuItem::with_id(app, "open", "Open", true, None::<&str>).unwrap(),
&PredefinedMenuItem::separator(app).unwrap(),
&open_menu_item,
&sep,
/*
&MenuItem::with_id(app, "show_library", "Library", true, None::<&str>)?,
&MenuItem::with_id(app, "show_settings", "Settings", true, None::<&str>)?,
&PredefinedMenuItem::separator(app)?,
*/
&MenuItem::with_id(app, "quit", "Quit", true, None::<&str>).unwrap(),
&quit_menu_item,
],
)
.unwrap();
.expect("Failed to generate menu");
run_on_tray(|| {
TrayIconBuilder::new()
.icon(app.default_window_icon().unwrap().clone())
.icon(
app.default_window_icon()
.expect("Failed to get default window icon")
.clone(),
)
.menu(&menu)
.on_menu_event(|app, event| match event.id.as_ref() {
"open" => {
app.webview_windows().get("main").unwrap().show().unwrap();
app.webview_windows()
.get("main")
.expect("Failed to get webview")
.show()
.expect("Failed to show window");
}
"quit" => {
cleanup_and_exit(app, &app.state());
cleanup_and_exit(app);
}
_ => {
@ -422,15 +381,19 @@ pub fn run() {
{
let mut db_handle = borrow_db_mut_checked();
if let Some(original) = db_handle.prev_database.take() {
let canonicalised = match original.canonicalize() {
Ok(o) => o,
Err(_) => original,
};
warn!(
"Database corrupted. Original file at {}",
original.canonicalize().unwrap().to_string_lossy()
canonicalised.display()
);
app.dialog()
.message(
"Database corrupted. A copy has been saved at: ".to_string()
+ original.to_str().unwrap(),
)
.message(format!(
"Database corrupted. A copy has been saved at: {}",
canonicalised.display()
))
.title("Database corrupted")
.show(|_| {});
}
@ -441,7 +404,7 @@ pub fn run() {
})
.register_asynchronous_uri_scheme_protocol("object", move |_ctx, request, responder| {
tauri::async_runtime::spawn(async move {
fetch_object(request, responder).await;
fetch_object_wrapper(request, responder).await;
});
})
.register_asynchronous_uri_scheme_protocol("server", |ctx, request, responder| {
@ -452,8 +415,8 @@ pub fn run() {
offline!(
state,
handle_server_proto,
handle_server_proto_offline,
handle_server_proto_wrapper,
handle_server_proto_offline_wrapper,
request,
responder
)
@ -463,7 +426,7 @@ pub fn run() {
.on_window_event(|window, event| {
if let WindowEvent::CloseRequested { api, .. } = event {
run_on_tray(|| {
window.hide().unwrap();
window.hide().expect("Failed to close window in tray");
api.prevent_close();
});
}
@ -490,3 +453,85 @@ fn run_on_tray<T: FnOnce()>(f: T) {
(f)();
}
}
// TODO: Refactor
pub async fn recieve_handshake(app: AppHandle, path: String) {
// Tell the app we're processing
app_emit!(&app, "auth/processing", ());
let handshake_result = recieve_handshake_logic(&app, path).await;
if let Err(e) = handshake_result {
warn!("error with authentication: {e}");
app_emit!(&app, "auth/failed", e.to_string());
return;
}
let app_state = app.state::<Mutex<AppState>>();
let (app_status, user) = auth::setup().await;
let mut state_lock = app_state.lock();
state_lock.status = app_status;
state_lock.user = user;
let _ = clear_cached_object("collections");
let _ = clear_cached_object("library");
drop(state_lock);
app_emit!(&app, "auth/finished", ());
}
// TODO: Refactor
async fn recieve_handshake_logic(app: &AppHandle, path: String) -> Result<(), RemoteAccessError> {
let path_chunks: Vec<&str> = path.split('/').collect();
if path_chunks.len() != 3 {
app_emit!(app, "auth/failed", ());
return Err(RemoteAccessError::HandshakeFailed(
"failed to parse token".to_string(),
));
}
let base_url = {
let handle = borrow_db_checked();
Url::parse(handle.base_url.as_str())?
};
let client_id = path_chunks
.get(1)
.expect("Failed to get client id from path chunks");
let token = path_chunks
.get(2)
.expect("Failed to get token from path chunks");
let body = HandshakeRequestBody::new((client_id).to_string(), (token).to_string());
let endpoint = base_url.join("/api/v1/client/auth/handshake")?;
let client = DROP_CLIENT_ASYNC.clone();
let response = client.post(endpoint).json(&body).send().await?;
debug!("handshake responsded with {}", response.status().as_u16());
if !response.status().is_success() {
return Err(RemoteAccessError::InvalidResponse(response.json().await?));
}
let response_struct: HandshakeResponse = response.json().await?;
{
let mut handle = borrow_db_mut_checked();
handle.auth = Some(response_struct.into());
}
let web_token = {
let header = generate_authorization_header();
let token = client
.post(base_url.join("/api/v1/client/user/webtoken")?)
.header("Authorization", header)
.send()
.await?;
token.text().await?
};
let mut handle = borrow_db_mut_checked();
handle.auth.as_mut().unwrap().web_token = Some(web_token);
Ok(())
}

50
src-tauri/src/process.rs Normal file
View File

@ -0,0 +1,50 @@
use std::sync::nonpoison::Mutex;
use process::{PROCESS_MANAGER, error::ProcessError};
use tauri::AppHandle;
use tauri_plugin_opener::OpenerExt;
use crate::AppState;
#[tauri::command]
pub fn launch_game(
id: String,
state: tauri::State<'_, Mutex<AppState>>,
) -> Result<(), ProcessError> {
let state_lock = state.lock();
let mut process_manager_lock = PROCESS_MANAGER.lock();
//let meta = DownloadableMetadata {
// id,
// version: Some(version),
// download_type: DownloadType::Game,
//};
match process_manager_lock.launch_process(id) {
Ok(()) => {}
Err(e) => return Err(e),
}
drop(process_manager_lock);
drop(state_lock);
Ok(())
}
#[tauri::command]
pub fn kill_game(game_id: String) -> Result<(), ProcessError> {
PROCESS_MANAGER
.lock()
.kill_game(game_id)
.map_err(ProcessError::IOError)
}
#[tauri::command]
pub fn open_process_logs(game_id: String, app_handle: AppHandle) -> Result<(), ProcessError> {
let process_manager_lock = PROCESS_MANAGER.lock();
let dir = process_manager_lock.get_log_dir(game_id);
app_handle
.opener()
.open_path(dir.display().to_string(), None::<&str>)
.map_err(ProcessError::OpenerError)
}

View File

@ -1,50 +0,0 @@
use std::sync::Mutex;
use crate::{error::process_error::ProcessError, AppState};
#[tauri::command]
pub fn launch_game(
id: String,
state: tauri::State<'_, Mutex<AppState>>,
) -> Result<(), ProcessError> {
let state_lock = state.lock().unwrap();
let mut process_manager_lock = state_lock.process_manager.lock().unwrap();
//let meta = DownloadableMetadata {
// id,
// version: Some(version),
// download_type: DownloadType::Game,
//};
match process_manager_lock.launch_process(id, &state_lock) {
Ok(()) => {}
Err(e) => return Err(e),
}
drop(process_manager_lock);
drop(state_lock);
Ok(())
}
#[tauri::command]
pub fn kill_game(
game_id: String,
state: tauri::State<'_, Mutex<AppState>>,
) -> Result<(), ProcessError> {
let state_lock = state.lock().unwrap();
let mut process_manager_lock = state_lock.process_manager.lock().unwrap();
process_manager_lock
.kill_game(game_id)
.map_err(ProcessError::IOError)
}
#[tauri::command]
pub fn open_process_logs(
game_id: String,
state: tauri::State<'_, Mutex<AppState>>,
) -> Result<(), ProcessError> {
let state_lock = state.lock().unwrap();
let mut process_manager_lock = state_lock.process_manager.lock().unwrap();
process_manager_lock.open_process_logs(game_id)
}

View File

@ -1,5 +0,0 @@
pub mod commands;
pub mod process_manager;
pub mod process_handlers;
pub mod format;
pub mod utils;

View File

@ -1,35 +1,57 @@
use std::sync::Mutex;
use std::{sync::nonpoison::Mutex, time::Duration};
use client::app_status::AppStatus;
use database::{borrow_db_checked, borrow_db_mut_checked};
use futures_lite::StreamExt;
use log::{debug, warn};
use remote::{
auth::{auth_initiate_logic, generate_authorization_header},
cache::{cache_object, get_cached_object},
error::RemoteAccessError,
requests::generate_url,
setup,
utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_WS_CLIENT, DropHealthcheck},
};
use reqwest_websocket::{Message, RequestBuilderExt};
use serde::Deserialize;
use tauri::{AppHandle, Emitter, Manager};
use tauri::{AppHandle, Manager};
use url::Url;
use utils::{app_emit, webbrowser_open::webbrowser_open};
use crate::{
AppState, AppStatus,
database::db::{borrow_db_checked, borrow_db_mut_checked},
error::remote_access_error::RemoteAccessError,
remote::{
auth::generate_authorization_header,
requests::generate_url,
utils::{DROP_CLIENT_SYNC, DROP_CLIENT_WS_CLIENT},
},
};
use super::{
auth::{auth_initiate_logic, recieve_handshake, setup},
cache::{cache_object, get_cached_object},
utils::use_remote_logic,
};
use crate::{AppState, recieve_handshake};
#[tauri::command]
pub async fn use_remote(
url: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
state: tauri::State<'_, Mutex<AppState>>,
) -> Result<(), RemoteAccessError> {
use_remote_logic(url, state).await
debug!("connecting to url {url}");
let base_url = Url::parse(&url)?;
// Test Drop url
let test_endpoint = base_url.join("/api/v1")?;
let client = DROP_CLIENT_ASYNC.clone();
let response = client
.get(test_endpoint.to_string())
.timeout(Duration::from_secs(3))
.send()
.await?;
let result: DropHealthcheck = response.json().await?;
if result.app_name() != "Drop" {
warn!("user entered drop endpoint that connected, but wasn't identified as Drop");
return Err(RemoteAccessError::InvalidEndpoint);
}
let mut app_state = state.lock();
app_state.status = AppStatus::SignedOut;
drop(app_state);
let mut db_state = borrow_db_mut_checked();
db_state.base_url = base_url.to_string();
Ok(())
}
#[tauri::command]
@ -40,7 +62,7 @@ pub fn gen_drop_url(path: String) -> Result<String, RemoteAccessError> {
Url::parse(&handle.base_url).map_err(RemoteAccessError::ParsingError)?
};
let url = base_url.join(&path).unwrap();
let url = base_url.join(&path)?;
Ok(url.to_string())
}
@ -49,7 +71,7 @@ pub fn gen_drop_url(path: String) -> Result<String, RemoteAccessError> {
pub fn fetch_drop_object(path: String) -> Result<Vec<u8>, RemoteAccessError> {
let _drop_url = gen_drop_url(path.clone())?;
let req = generate_url(&[&path], &[])?;
let req = DROP_CLIENT_SYNC
let req = remote::utils::DROP_CLIENT_SYNC
.get(req)
.header("Authorization", generate_authorization_header())
.send();
@ -76,21 +98,21 @@ pub fn sign_out(app: AppHandle) {
// Update app state
{
let app_state = app.state::<Mutex<AppState>>();
let mut app_state_handle = app_state.lock().unwrap();
let state = app.state::<Mutex<AppState>>();
let mut app_state_handle = state.lock();
app_state_handle.status = AppStatus::SignedOut;
app_state_handle.user = None;
}
// Emit event for frontend
app.emit("auth/signedout", ()).unwrap();
app_emit!(&app, "auth/signedout", ());
}
#[tauri::command]
pub async fn retry_connect(state: tauri::State<'_, Mutex<AppState<'_>>>) -> Result<(), ()> {
pub async fn retry_connect(state: tauri::State<'_, Mutex<AppState>>) -> Result<(), ()> {
let (app_status, user) = setup().await;
let mut guard = state.lock().unwrap();
let mut guard = state.lock();
guard.status = app_status;
guard.user = user;
drop(guard);
@ -109,7 +131,7 @@ pub fn auth_initiate() -> Result<(), RemoteAccessError> {
let complete_redir_url = base_url.join(&redir_url)?;
debug!("opening web browser to continue authentication");
webbrowser::open(complete_redir_url.as_ref()).unwrap();
webbrowser_open(complete_redir_url.as_ref());
Ok(())
}
@ -124,7 +146,7 @@ struct CodeWebsocketResponse {
pub fn auth_initiate_code(app: AppHandle) -> Result<String, RemoteAccessError> {
let base_url = {
let db_lock = borrow_db_checked();
Url::parse(&db_lock.base_url.clone())?
Url::parse(&db_lock.base_url.clone())?.clone()
};
let code = auth_initiate_logic("code".to_string())?;
@ -151,14 +173,13 @@ pub fn auth_initiate_code(app: AppHandle) -> Result<String, RemoteAccessError> {
match response.response_type.as_str() {
"token" => {
let recieve_app = app.clone();
manual_recieve_handshake(recieve_app, response.value).await.unwrap();
manual_recieve_handshake(recieve_app, response.value).await;
return Ok(());
}
_ => return Err(RemoteAccessError::HandshakeFailed(response.value)),
}
}
}
Err(RemoteAccessError::HandshakeFailed(
"Failed to connect to websocket".to_string(),
))
@ -167,7 +188,7 @@ pub fn auth_initiate_code(app: AppHandle) -> Result<String, RemoteAccessError> {
let result = load().await;
if let Err(err) = result {
warn!("{err}");
app.emit("auth/failed", err.to_string()).unwrap();
app_emit!(&app, "auth/failed", err.to_string());
}
});
@ -175,8 +196,6 @@ pub fn auth_initiate_code(app: AppHandle) -> Result<String, RemoteAccessError> {
}
#[tauri::command]
pub async fn manual_recieve_handshake(app: AppHandle, token: String) -> Result<(), ()> {
pub async fn manual_recieve_handshake(app: AppHandle, token: String) {
recieve_handshake(app, format!("handshake/{token}")).await;
Ok(())
}

View File

@ -1,224 +0,0 @@
use std::{collections::HashMap, env, sync::Mutex};
use chrono::Utc;
use droplet_rs::ssl::sign_nonce;
use gethostname::gethostname;
use log::{debug, error, warn};
use serde::{Deserialize, Serialize};
use tauri::{AppHandle, Emitter, Manager};
use url::Url;
use crate::{
database::{
db::{borrow_db_checked, borrow_db_mut_checked},
models::data::DatabaseAuth,
}, error::{drop_server_error::DropServerError, remote_access_error::RemoteAccessError}, remote::{cache::clear_cached_object, requests::make_authenticated_get, utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC}}, AppState, AppStatus, User
};
use super::{
cache::{cache_object, get_cached_object},
requests::generate_url,
};
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct CapabilityConfiguration {}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct InitiateRequestBody {
name: String,
platform: String,
capabilities: HashMap<String, CapabilityConfiguration>,
mode: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct HandshakeRequestBody {
client_id: String,
token: String,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct HandshakeResponse {
private: String,
certificate: String,
id: String,
}
pub fn generate_authorization_header() -> String {
let certs = {
let db = borrow_db_checked();
db.auth.clone().unwrap()
};
let nonce = Utc::now().timestamp_millis().to_string();
let signature = sign_nonce(certs.private, nonce.clone()).unwrap();
format!("Nonce {} {} {}", certs.client_id, nonce, signature)
}
pub async fn fetch_user() -> Result<User, RemoteAccessError> {
let response = make_authenticated_get(generate_url(&["/api/v1/client/user"], &[])?).await?;
if response.status() != 200 {
let err: DropServerError = response.json().await?;
warn!("{err:?}");
if err.status_message == "Nonce expired" {
return Err(RemoteAccessError::OutOfSync);
}
return Err(RemoteAccessError::InvalidResponse(err));
}
response
.json::<User>()
.await
.map_err(std::convert::Into::into)
}
async fn recieve_handshake_logic(app: &AppHandle, path: String) -> Result<(), RemoteAccessError> {
let path_chunks: Vec<&str> = path.split('/').collect();
if path_chunks.len() != 3 {
app.emit("auth/failed", ()).unwrap();
return Err(RemoteAccessError::HandshakeFailed(
"failed to parse token".to_string(),
));
}
let base_url = {
let handle = borrow_db_checked();
Url::parse(handle.base_url.as_str())?
};
let client_id = path_chunks.get(1).unwrap();
let token = path_chunks.get(2).unwrap();
let body = HandshakeRequestBody {
client_id: (*client_id).to_string(),
token: (*token).to_string(),
};
let endpoint = base_url.join("/api/v1/client/auth/handshake")?;
let client = DROP_CLIENT_ASYNC.clone();
let response = client.post(endpoint).json(&body).send().await?;
debug!("handshake responsded with {}", response.status().as_u16());
if !response.status().is_success() {
return Err(RemoteAccessError::InvalidResponse(response.json().await?));
}
let response_struct: HandshakeResponse = response.json().await?;
{
let mut handle = borrow_db_mut_checked();
handle.auth = Some(DatabaseAuth {
private: response_struct.private,
cert: response_struct.certificate,
client_id: response_struct.id,
web_token: None, // gets created later
});
}
let web_token = {
let header = generate_authorization_header();
let token = client
.post(base_url.join("/api/v1/client/user/webtoken").unwrap())
.header("Authorization", header)
.send()
.await
.unwrap();
token.text().await.unwrap()
};
let mut handle = borrow_db_mut_checked();
let mut_auth = handle.auth.as_mut().unwrap();
mut_auth.web_token = Some(web_token);
Ok(())
}
pub async fn recieve_handshake(app: AppHandle, path: String) {
// Tell the app we're processing
app.emit("auth/processing", ()).unwrap();
let handshake_result = recieve_handshake_logic(&app, path).await;
if let Err(e) = handshake_result {
warn!("error with authentication: {e}");
app.emit("auth/failed", e.to_string()).unwrap();
return;
}
let app_state = app.state::<Mutex<AppState>>();
let (app_status, user) = setup().await;
let mut state_lock = app_state.lock().unwrap();
state_lock.status = app_status;
state_lock.user = user;
let _ = clear_cached_object("collections");
let _ = clear_cached_object("library");
drop(state_lock);
app.emit("auth/finished", ()).unwrap();
}
pub fn auth_initiate_logic(mode: String) -> Result<String, RemoteAccessError> {
let base_url = {
let db_lock = borrow_db_checked();
Url::parse(&db_lock.base_url.clone())?
};
let hostname = gethostname();
let endpoint = base_url.join("/api/v1/client/auth/initiate")?;
let body = InitiateRequestBody {
name: format!("{} (Desktop)", hostname.into_string().unwrap()),
platform: env::consts::OS.to_string(),
capabilities: HashMap::from([
("peerAPI".to_owned(), CapabilityConfiguration {}),
("cloudSaves".to_owned(), CapabilityConfiguration {}),
]),
mode,
};
let client = DROP_CLIENT_SYNC.clone();
let response = client.post(endpoint.to_string()).json(&body).send()?;
if response.status() != 200 {
let data: DropServerError = response.json()?;
error!("could not start handshake: {}", data.status_message);
return Err(RemoteAccessError::HandshakeFailed(data.status_message));
}
let response = response.text()?;
Ok(response)
}
pub async fn setup() -> (AppStatus, Option<User>) {
let auth = {
let data = borrow_db_checked();
data.auth.clone()
};
if auth.is_some() {
let user_result = match fetch_user().await {
Ok(data) => data,
Err(RemoteAccessError::FetchError(_)) => {
let user = get_cached_object::<User>("user").unwrap();
return (AppStatus::Offline, Some(user));
}
Err(_) => return (AppStatus::SignedInNeedsReauth, None),
};
cache_object("user", &user_result).unwrap();
return (AppStatus::SignedIn, Some(user_result));
}
(AppStatus::SignedOut, None)
}

View File

@ -1,51 +0,0 @@
use http::{header::CONTENT_TYPE, response::Builder as ResponseBuilder};
use log::warn;
use tauri::UriSchemeResponder;
use crate::{database::db::DatabaseImpls, remote::utils::DROP_CLIENT_ASYNC, DB};
use super::{
auth::generate_authorization_header,
cache::{ObjectCache, cache_object, get_cached_object},
};
pub async fn fetch_object(request: http::Request<Vec<u8>>, responder: UriSchemeResponder) {
// Drop leading /
let object_id = &request.uri().path()[1..];
let cache_result = get_cached_object::<ObjectCache>(object_id);
if let Ok(cache_result) = &cache_result
&& !cache_result.has_expired()
{
responder.respond(cache_result.into());
return;
}
let header = generate_authorization_header();
let client = DROP_CLIENT_ASYNC.clone();
let url = format!("{}api/v1/client/object/{object_id}", DB.fetch_base_url());
let response = client.get(url).header("Authorization", header).send().await;
if response.is_err() {
match cache_result {
Ok(cache_result) => responder.respond(cache_result.into()),
Err(e) => {
warn!("{e}");
}
}
return;
}
let response = response.unwrap();
let resp_builder = ResponseBuilder::new().header(
CONTENT_TYPE,
response.headers().get("Content-Type").unwrap(),
);
let data = Vec::from(response.bytes().await.unwrap());
let resp = resp_builder.body(data).unwrap();
if cache_result.is_err() || cache_result.unwrap().has_expired() {
cache_object::<ObjectCache>(object_id, &resp.clone().into()).unwrap();
}
responder.respond(resp);
}

View File

@ -1,57 +0,0 @@
use std::str::FromStr;
use http::{uri::PathAndQuery, Request, Response, StatusCode, Uri};
use tauri::UriSchemeResponder;
use crate::{database::db::borrow_db_checked, remote::utils::DROP_CLIENT_SYNC};
pub async fn handle_server_proto_offline(_request: Request<Vec<u8>>, responder: UriSchemeResponder) {
let four_oh_four = Response::builder()
.status(StatusCode::NOT_FOUND)
.body(Vec::new())
.unwrap();
responder.respond(four_oh_four);
}
pub async fn handle_server_proto(request: Request<Vec<u8>>, responder: UriSchemeResponder) {
let db_handle = borrow_db_checked();
let web_token = match &db_handle.auth.as_ref().unwrap().web_token {
Some(e) => e,
None => return,
};
let remote_uri = db_handle.base_url.parse::<Uri>().unwrap();
let path = request.uri().path();
let mut new_uri = request.uri().clone().into_parts();
new_uri.path_and_query =
Some(PathAndQuery::from_str(&format!("{path}?noWrapper=true")).unwrap());
new_uri.authority = remote_uri.authority().cloned();
new_uri.scheme = remote_uri.scheme().cloned();
let new_uri = Uri::from_parts(new_uri).unwrap();
let whitelist_prefix = ["/store", "/api", "/_", "/fonts"];
if whitelist_prefix.iter().all(|f| !path.starts_with(f)) {
webbrowser::open(&new_uri.to_string()).unwrap();
return;
}
let client = DROP_CLIENT_SYNC.clone();
let response = client
.request(request.method().clone(), new_uri.to_string())
.header("Authorization", format!("Bearer {web_token}"))
.headers(request.headers().clone())
.send()
.unwrap();
let response_status = response.status();
let response_body = response.bytes().unwrap();
let http_response = Response::builder()
.status(response_status)
.body(response_body.to_vec())
.unwrap();
responder.respond(http_response);
}

Some files were not shown because too many files have changed in this diff Show More