Compare commits

..

3 Commits

Author SHA1 Message Date
43b56462d6 feat: more refactoring (broken) 2025-09-16 15:09:43 +10:00
ab219670dc fix: cleanup dependencies 2025-09-14 09:27:49 +10:00
c1beef380e refactor: into rust workspaces 2025-09-14 09:19:03 +10:00
141 changed files with 3433 additions and 22710 deletions

2
.gitignore vendored
View File

@ -30,5 +30,3 @@ src-tauri/perf*
/*.AppImage /*.AppImage
/squashfs-root /squashfs-root
/target/

8303
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,14 +0,0 @@
[workspace]
members = [
"client",
"database",
"src-tauri",
"process",
"remote",
"utils",
"cloud_saves",
"download_manager",
"games", "drop-consts",
]
resolver = "3"

View File

@ -1,21 +1,29 @@
# Drop Desktop Client # Drop App
The Drop Desktop Client is the companion app for [Drop](https://github.com/Drop-OSS/drop). It is the official & intended way to download and play games on your Drop server. Drop app is the companion app for [Drop](https://github.com/Drop-OSS/drop). It uses a Tauri base with Nuxt 3 + TailwindCSS on top of it, so we can re-use components from the web UI.
## Internals ## Running
Before setting up the drop app, be sure that you have a server set up.
The instructions for this can be found on the [Drop Docs](https://docs.droposs.org/docs/guides/quickstart)
It uses a Tauri base with Nuxt 3 + TailwindCSS on top of it, so we can re-use components from the web UI. ## Current features
Currently supported are the following features:
- Signin (with custom server)
- Database registering & recovery
- Dynamic library fetching from server
- Installing & uninstalling games
- Download progress monitoring
- Launching / playing games
## Development ## Development
Before setting up a development environemnt, be sure that you have a server set up. The instructions for this can be found on the [Drop Docs](https://docs.droposs.org/docs/guides/quickstart).
Then, install dependencies with `yarn`. This'll install the custom builder's dependencies. Then, check everything works properly with `yarn tauri build`. Install dependencies with `yarn`
Run the app in development with `yarn tauri dev`. NVIDIA users on Linux, use shell script `./nvidia-prop-dev.sh` Run the app in development with `yarn tauri dev`. NVIDIA users on Linux, use shell script `./nvidia-prop-dev.sh`
To manually specify the logging level, add the environment variable `RUST_LOG=[debug, info, warn, error]` to `yarn tauri dev`: To manually specify the logging level, add the environment variable `RUST_LOG=[debug, info, warn, error]` to `yarn tauri dev`:
e.g. `RUST_LOG=debug yarn tauri dev` e.g. `RUST_LOG=debug yarn tauri dev`
## Contributing ## Contributing
Check out the contributing guide on our Developer Docs: [Drop Developer Docs - Contributing](https://developer.droposs.org/contributing). Check the original [Drop repo](https://github.com/Drop-OSS/drop/blob/main/CONTRIBUTING.md) for contributing guidelines.

4862
client/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,12 +0,0 @@
[package]
name = "client"
version = "0.1.0"
edition = "2024"
[dependencies]
bitcode = "0.6.7"
database = { version = "0.1.0", path = "../database" }
log = "0.4.28"
serde = { version = "1.0.228", features = ["derive"] }
tauri = "2.8.5"
tauri-plugin-autostart = "2.5.0"

View File

@ -1,12 +0,0 @@
use serde::Serialize;
#[derive(Clone, Copy, Serialize, Eq, PartialEq)]
pub enum AppStatus {
NotConfigured,
Offline,
ServerError,
SignedOut,
SignedIn,
SignedInNeedsReauth,
ServerUnavailable,
}

View File

@ -1,26 +0,0 @@
use database::borrow_db_checked;
use log::debug;
use tauri::AppHandle;
use tauri_plugin_autostart::ManagerExt;
// New function to sync state on startup
pub fn sync_autostart_on_startup(app: &AppHandle) -> Result<(), String> {
let db_handle = borrow_db_checked();
let should_be_enabled = db_handle.settings.autostart;
drop(db_handle);
let manager = app.autolaunch();
let current_state = manager.is_enabled().map_err(|e| e.to_string())?;
if current_state != should_be_enabled {
if should_be_enabled {
manager.enable().map_err(|e| e.to_string())?;
debug!("synced autostart: enabled");
} else {
manager.disable().map_err(|e| e.to_string())?;
debug!("synced autostart: disabled");
}
}
Ok(())
}

View File

@ -1,49 +0,0 @@
use std::{
cell::LazyCell, ffi::OsStr, path::PathBuf, process::{Command, Stdio}
};
use log::info;
pub const COMPAT_INFO: LazyCell<Option<CompatInfo>> = LazyCell::new(create_new_compat_info);
pub const UMU_LAUNCHER_EXECUTABLE: LazyCell<Option<PathBuf>> = LazyCell::new(|| {
let x = get_umu_executable();
info!("{:?}", &x);
x
});
#[derive(Clone)]
pub struct CompatInfo {
pub umu_installed: bool,
}
fn create_new_compat_info() -> Option<CompatInfo> {
#[cfg(target_os = "windows")]
return None;
let has_umu_installed = UMU_LAUNCHER_EXECUTABLE.is_some();
Some(CompatInfo {
umu_installed: has_umu_installed,
})
}
const UMU_BASE_LAUNCHER_EXECUTABLE: &str = "umu-run";
const UMU_INSTALL_DIRS: [&str; 4] = ["/app/share", "/use/local/share", "/usr/share", "/opt"];
fn get_umu_executable() -> Option<PathBuf> {
if check_executable_exists(UMU_BASE_LAUNCHER_EXECUTABLE) {
return Some(PathBuf::from(UMU_BASE_LAUNCHER_EXECUTABLE));
}
for dir in UMU_INSTALL_DIRS {
let p = PathBuf::from(dir).join(UMU_BASE_LAUNCHER_EXECUTABLE);
if check_executable_exists(&p) {
return Some(p);
}
}
None
}
fn check_executable_exists<P: AsRef<OsStr>>(exec: P) -> bool {
let has_umu_installed = Command::new(exec).stdout(Stdio::null()).output();
has_umu_installed.is_ok()
}

View File

@ -1,4 +0,0 @@
pub mod app_status;
pub mod autostart;
pub mod compat;
pub mod user;

View File

@ -1,12 +0,0 @@
use bitcode::{Decode, Encode};
use serde::{Deserialize, Serialize};
#[derive(Clone, Serialize, Deserialize, Encode, Decode)]
#[serde(rename_all = "camelCase")]
pub struct User {
id: String,
username: String,
admin: bool,
display_name: String,
profile_picture_object_id: String,
}

View File

@ -1,20 +0,0 @@
[package]
name = "cloud_saves"
version = "0.1.0"
edition = "2024"
[dependencies]
database = { version = "0.1.0", path = "../database" }
dirs = "6.0.0"
drop-consts = { version = "0.1.0", path = "../drop-consts" }
log = "0.4.28"
regex = "1.11.3"
rustix = "1.1.2"
serde = "1.0.228"
serde_json = "1.0.145"
serde_with = "3.15.0"
tar = "0.4.44"
tempfile = "3.23.0"
uuid = "1.18.1"
whoami = "1.6.1"
zstd = "0.13.3"

View File

@ -1,235 +0,0 @@
use std::{collections::HashMap, path::PathBuf, str::FromStr};
#[cfg(target_os = "linux")]
use database::platform::Platform;
use database::GameVersion;
use drop_consts::DATA_ROOT_DIR;
use log::warn;
use crate::error::BackupError;
use super::path::CommonPath;
pub struct BackupManager<'a> {
pub current_platform: Platform,
pub sources: HashMap<(Platform, Platform), &'a (dyn BackupHandler + Sync + Send)>,
}
impl Default for BackupManager<'_> {
fn default() -> Self {
Self::new()
}
}
impl BackupManager<'_> {
pub fn new() -> Self {
BackupManager {
#[cfg(target_os = "windows")]
current_platform: Platform::Windows,
#[cfg(target_os = "macos")]
current_platform: Platform::MacOs,
#[cfg(target_os = "linux")]
current_platform: Platform::Linux,
sources: HashMap::from([
// Current platform to target platform
(
(Platform::Windows, Platform::Windows),
&WindowsBackupManager {} as &(dyn BackupHandler + Sync + Send),
),
(
(Platform::Linux, Platform::Linux),
&LinuxBackupManager {} as &(dyn BackupHandler + Sync + Send),
),
(
(Platform::MacOs, Platform::MacOs),
&MacBackupManager {} as &(dyn BackupHandler + Sync + Send),
),
]),
}
}
}
pub trait BackupHandler: Send + Sync {
fn root_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(DATA_ROOT_DIR.join("games"))
}
fn game_translate(&self, _path: &PathBuf, game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(PathBuf::from_str(&game.game_id).unwrap())
}
fn base_translate(&self, path: &PathBuf, game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(self
.root_translate(path, game)?
.join(self.game_translate(path, game)?))
}
fn home_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
let c = CommonPath::Home.get().ok_or(BackupError::NotFound);
println!("{:?}", c);
c
}
fn store_user_id_translate(
&self,
_path: &PathBuf,
game: &GameVersion,
) -> Result<PathBuf, BackupError> {
PathBuf::from_str(&game.game_id).map_err(|_| BackupError::ParseError)
}
fn os_user_name_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
Ok(PathBuf::from_str(&whoami::username()).unwrap())
}
fn win_app_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winAppData>");
Err(BackupError::InvalidSystem)
}
fn win_local_app_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winLocalAppData>");
Err(BackupError::InvalidSystem)
}
fn win_local_app_data_low_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winLocalAppDataLow>");
Err(BackupError::InvalidSystem)
}
fn win_documents_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winDocuments>");
Err(BackupError::InvalidSystem)
}
fn win_public_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winPublic>");
Err(BackupError::InvalidSystem)
}
fn win_program_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winProgramData>");
Err(BackupError::InvalidSystem)
}
fn win_dir_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected Windows Reference in Backup <winDir>");
Err(BackupError::InvalidSystem)
}
fn xdg_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected XDG Reference in Backup <xdgData>");
Err(BackupError::InvalidSystem)
}
fn xdg_config_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
warn!("Unexpected XDG Reference in Backup <xdgConfig>");
Err(BackupError::InvalidSystem)
}
fn skip_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(PathBuf::new())
}
}
pub struct LinuxBackupManager {}
impl BackupHandler for LinuxBackupManager {
fn xdg_config_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::Data.get().ok_or(BackupError::NotFound)
}
fn xdg_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::Config.get().ok_or(BackupError::NotFound)
}
}
pub struct WindowsBackupManager {}
impl BackupHandler for WindowsBackupManager {
fn win_app_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::Config.get().ok_or(BackupError::NotFound)
}
fn win_local_app_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::DataLocal.get().ok_or(BackupError::NotFound)
}
fn win_local_app_data_low_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::DataLocalLow
.get()
.ok_or(BackupError::NotFound)
}
fn win_dir_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
Ok(PathBuf::from_str("C:/Windows").unwrap())
}
fn win_documents_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::Document.get().ok_or(BackupError::NotFound)
}
fn win_program_data_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
Ok(PathBuf::from_str("C:/ProgramData").unwrap())
}
fn win_public_translate(
&self,
_path: &PathBuf,
_game: &GameVersion,
) -> Result<PathBuf, BackupError> {
CommonPath::Public.get().ok_or(BackupError::NotFound)
}
}
pub struct MacBackupManager {}
impl BackupHandler for MacBackupManager {}

View File

@ -1,27 +0,0 @@
use std::fmt::Display;
use serde_with::SerializeDisplay;
#[derive(Debug, SerializeDisplay, Clone, Copy)]
pub enum BackupError {
InvalidSystem,
NotFound,
ParseError,
}
impl Display for BackupError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
BackupError::InvalidSystem => "Attempted to generate path for invalid system",
BackupError::NotFound => "Could not generate or find path",
BackupError::ParseError => "Failed to parse path",
};
write!(f, "{}", s)
}
}

View File

@ -1,8 +0,0 @@
pub mod backup_manager;
pub mod conditions;
pub mod error;
pub mod metadata;
pub mod normalise;
pub mod path;
pub mod placeholder;
pub mod resolver;

View File

@ -1,16 +0,0 @@
[package]
name = "database"
version = "0.1.0"
edition = "2024"
[dependencies]
chrono = "0.4.42"
dirs = "6.0.0"
drop-consts = { version = "0.1.0", path = "../drop-consts" }
log = "0.4.28"
native_model = { version = "0.6.4", features = ["rmp_serde_1_3"], git = "https://github.com/Drop-OSS/native_model.git"}
rustbreak = "2.0.0"
serde = "1.0.228"
serde_with = "3.15.0"
url = "2.5.7"
whoami = "1.6.1"

View File

@ -1,31 +0,0 @@
use std::{
sync::LazyLock,
};
use rustbreak::{DeSerError, DeSerializer};
use serde::{Serialize, de::DeserializeOwned};
use crate::interface::{DatabaseImpls, DatabaseInterface};
pub static DB: LazyLock<DatabaseInterface> = LazyLock::new(DatabaseInterface::set_up_database);
// Custom JSON serializer to support everything we need
#[derive(Debug, Default, Clone)]
pub struct DropDatabaseSerializer;
impl<T: native_model::Model + Serialize + DeserializeOwned> DeSerializer<T>
for DropDatabaseSerializer
{
fn serialize(&self, val: &T) -> rustbreak::error::DeSerResult<Vec<u8>> {
native_model::encode(val).map_err(|e| DeSerError::Internal(e.to_string()))
}
fn deserialize<R: std::io::Read>(&self, mut s: R) -> rustbreak::error::DeSerResult<T> {
let mut buf = Vec::new();
s.read_to_end(&mut buf)
.map_err(|e| rustbreak::error::DeSerError::Other(e.into()))?;
let (val, _version) =
native_model::decode(buf).map_err(|e| rustbreak::error::DeSerError::Internal(e.to_string()))?;
Ok(val)
}
}

View File

@ -1,189 +0,0 @@
use std::{
fs::{self, create_dir_all},
mem::ManuallyDrop,
ops::{Deref, DerefMut},
path::PathBuf,
sync::{RwLockReadGuard, RwLockWriteGuard},
};
use chrono::Utc;
use drop_consts::DATA_ROOT_DIR;
use log::{debug, error, info, warn};
use rustbreak::{PathDatabase, RustbreakError};
use url::Url;
use crate::{
db::{DB, DropDatabaseSerializer},
models::data::Database,
};
pub type DatabaseInterface =
rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer>;
pub trait DatabaseImpls {
fn set_up_database() -> DatabaseInterface;
fn database_is_set_up(&self) -> bool;
fn fetch_base_url(&self) -> Url;
}
impl DatabaseImpls for DatabaseInterface {
fn set_up_database() -> DatabaseInterface {
let db_path = DATA_ROOT_DIR.join("drop.db");
let games_base_dir = DATA_ROOT_DIR.join("games");
let logs_root_dir = DATA_ROOT_DIR.join("logs");
let cache_dir = DATA_ROOT_DIR.join("cache");
let pfx_dir = DATA_ROOT_DIR.join("pfx");
debug!("creating data directory at {DATA_ROOT_DIR:?}");
create_dir_all(DATA_ROOT_DIR.as_path()).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
DATA_ROOT_DIR.display(),
e
)
});
create_dir_all(&games_base_dir).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
games_base_dir.display(),
e
)
});
create_dir_all(&logs_root_dir).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
logs_root_dir.display(),
e
)
});
create_dir_all(&cache_dir).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
cache_dir.display(),
e
)
});
create_dir_all(&pfx_dir).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
pfx_dir.display(),
e
)
});
let exists = fs::exists(db_path.clone()).unwrap_or_else(|e| {
panic!(
"Failed to find if {} exists with error {}",
db_path.display(),
e
)
});
if exists {
match PathDatabase::load_from_path(db_path.clone()) {
Ok(db) => db,
Err(e) => handle_invalid_database(e, db_path, games_base_dir, cache_dir),
}
} else {
let default = Database::new(games_base_dir, None, cache_dir);
debug!("Creating database at path {}", db_path.display());
PathDatabase::create_at_path(db_path, default).expect("Database could not be created")
}
}
fn database_is_set_up(&self) -> bool {
!borrow_db_checked().base_url.is_empty()
}
fn fetch_base_url(&self) -> Url {
let handle = borrow_db_checked();
Url::parse(&handle.base_url)
.unwrap_or_else(|_| panic!("Failed to parse base url {}", handle.base_url))
}
}
// TODO: Make the error relelvant rather than just assume that it's a Deserialize error
fn handle_invalid_database(
_e: RustbreakError,
db_path: PathBuf,
games_base_dir: PathBuf,
cache_dir: PathBuf,
) -> rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer> {
warn!("{_e}");
let new_path = {
let time = Utc::now().timestamp();
let mut base = db_path.clone();
base.set_file_name(format!("drop.db.backup-{time}"));
base
};
info!("old database stored at: {}", new_path.to_string_lossy());
fs::rename(&db_path, &new_path).unwrap_or_else(|e| {
panic!(
"Could not rename database {} to {} with error {}",
db_path.display(),
new_path.display(),
e
)
});
let db = Database::new(games_base_dir, Some(new_path), cache_dir);
PathDatabase::create_at_path(db_path, db).expect("Database could not be created")
}
// To automatically save the database upon drop
pub struct DBRead<'a>(RwLockReadGuard<'a, Database>);
pub struct DBWrite<'a>(ManuallyDrop<RwLockWriteGuard<'a, Database>>);
impl<'a> Deref for DBWrite<'a> {
type Target = Database;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'a> DerefMut for DBWrite<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<'a> Deref for DBRead<'a> {
type Target = Database;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Drop for DBWrite<'_> {
fn drop(&mut self) {
unsafe {
ManuallyDrop::drop(&mut self.0);
}
match DB.save() {
Ok(()) => {}
Err(e) => {
error!("database failed to save with error {e}");
panic!("database failed to save with error {e}")
}
}
}
}
pub fn borrow_db_checked<'a>() -> DBRead<'a> {
match DB.borrow_data() {
Ok(data) => DBRead(data),
Err(e) => {
error!("database borrow failed with error {e}");
panic!("database borrow failed with error {e}");
}
}
}
pub fn borrow_db_mut_checked<'a>() -> DBWrite<'a> {
match DB.borrow_data_mut() {
Ok(data) => DBWrite(ManuallyDrop::new(data)),
Err(e) => {
error!("database borrow mut failed with error {e}");
panic!("database borrow mut failed with error {e}");
}
}
}

View File

@ -1,14 +0,0 @@
#![feature(nonpoison_rwlock)]
pub mod db;
pub mod debug;
pub mod interface;
pub mod models;
pub mod platform;
pub use db::DB;
pub use interface::{borrow_db_checked, borrow_db_mut_checked};
pub use models::data::{
ApplicationTransientStatus, Database, DatabaseApplications, DatabaseAuth, DownloadType,
DownloadableMetadata, GameDownloadStatus, GameVersion, Settings,
};

View File

@ -1,17 +0,0 @@
[package]
name = "download_manager"
version = "0.1.0"
edition = "2024"
[dependencies]
atomic-instant-full = "0.1.0"
database = { version = "0.1.0", path = "../database" }
humansize = "2.1.3"
log = "0.4.28"
parking_lot = "0.12.5"
remote = { version = "0.1.0", path = "../remote" }
serde = "1.0.228"
serde_with = "3.15.0"
tauri = "2.8.5"
throttle_my_fn = "0.2.6"
utils = { version = "0.1.0", path = "../utils" }

View File

@ -1,80 +0,0 @@
use humansize::{BINARY, format_size};
use std::{
fmt::{Display, Formatter},
io,
sync::{Arc, mpsc::SendError},
};
use remote::error::RemoteAccessError;
use serde_with::SerializeDisplay;
#[derive(SerializeDisplay)]
pub enum DownloadManagerError<T> {
IOError(io::Error),
SignalError(SendError<T>),
}
impl<T> Display for DownloadManagerError<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DownloadManagerError::IOError(error) => write!(f, "{error}"),
DownloadManagerError::SignalError(send_error) => write!(f, "{send_error}"),
}
}
}
impl<T> From<SendError<T>> for DownloadManagerError<T> {
fn from(value: SendError<T>) -> Self {
DownloadManagerError::SignalError(value)
}
}
impl<T> From<io::Error> for DownloadManagerError<T> {
fn from(value: io::Error) -> Self {
DownloadManagerError::IOError(value)
}
}
// TODO: Rename / separate from downloads
#[derive(Debug, SerializeDisplay)]
pub enum ApplicationDownloadError {
NotInitialized,
Communication(RemoteAccessError),
DiskFull(u64, u64),
#[allow(dead_code)]
Checksum,
Lock,
IoError(Arc<io::Error>),
DownloadError(RemoteAccessError),
}
impl Display for ApplicationDownloadError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
ApplicationDownloadError::NotInitialized => {
write!(f, "Download not initalized, did something go wrong?")
}
ApplicationDownloadError::DiskFull(required, available) => write!(
f,
"Game requires {}, {} remaining left on disk.",
format_size(*required, BINARY),
format_size(*available, BINARY),
),
ApplicationDownloadError::Communication(error) => write!(f, "{error}"),
ApplicationDownloadError::Lock => write!(
f,
"failed to acquire lock. Something has gone very wrong internally. Please restart the application"
),
ApplicationDownloadError::Checksum => {
write!(f, "checksum failed to validate for download")
}
ApplicationDownloadError::IoError(error) => write!(f, "io error: {error}"),
ApplicationDownloadError::DownloadError(error) => {
write!(f, "Download failed with error {error:?}")
}
}
}
}
impl From<io::Error> for ApplicationDownloadError {
fn from(value: io::Error) -> Self {
ApplicationDownloadError::IoError(Arc::new(value))
}
}

View File

@ -1,44 +0,0 @@
#![feature(duration_millis_float)]
#![feature(nonpoison_mutex)]
#![feature(sync_nonpoison)]
use std::{ops::Deref, sync::OnceLock};
use tauri::AppHandle;
use crate::{
download_manager_builder::DownloadManagerBuilder, download_manager_frontend::DownloadManager,
};
pub mod download_manager_builder;
pub mod download_manager_frontend;
pub mod downloadable;
pub mod error;
pub mod frontend_updates;
pub mod util;
pub static DOWNLOAD_MANAGER: DownloadManagerWrapper = DownloadManagerWrapper::new();
pub struct DownloadManagerWrapper(OnceLock<DownloadManager>);
impl DownloadManagerWrapper {
const fn new() -> Self {
DownloadManagerWrapper(OnceLock::new())
}
pub fn init(app_handle: AppHandle) {
DOWNLOAD_MANAGER
.0
.set(DownloadManagerBuilder::build(app_handle))
.expect("Failed to initialise download manager");
}
}
impl Deref for DownloadManagerWrapper {
type Target = DownloadManager;
fn deref(&self) -> &Self::Target {
match self.0.get() {
Some(download_manager) => download_manager,
None => unreachable!("Download manager should always be initialised"),
}
}
}

View File

@ -1,23 +0,0 @@
use std::{cell::LazyCell, path::PathBuf};
#[cfg(not(debug_assertions))]
pub const DATA_ROOT_PREFIX: &str = "drop";
#[cfg(debug_assertions)]
pub const DATA_ROOT_PREFIX: &str = "drop-debug";
pub const DATA_ROOT_DIR: LazyCell<PathBuf> = LazyCell::new(|| {
dirs::data_dir()
.expect("Failed to get data dir")
.join(DATA_ROOT_PREFIX)
});
pub const DROP_DATA_PATH: &str = ".dropdata";
// Downloads
pub const MAX_PACKET_LENGTH: usize = 4096 * 4;
pub const BUMP_SIZE: usize = 4096 * 16;
pub const RETRY_COUNT: usize = 3;
pub const TARGET_BUCKET_SIZE: usize = 63 * 1000 * 1000;
pub const MAX_FILES_PER_BUCKET: usize = (1024 / 4) - 1;

View File

@ -1,27 +0,0 @@
[package]
name = "games"
version = "0.1.0"
edition = "2024"
[dependencies]
atomic-instant-full = "0.1.0"
bitcode = "0.6.7"
boxcar = "0.2.14"
database = { version = "0.1.0", path = "../database" }
download_manager = { version = "0.1.0", path = "../download_manager" }
hex = "0.4.3"
log = "0.4.28"
md5 = "0.8.0"
rayon = "1.11.0"
remote = { version = "0.1.0", path = "../remote" }
reqwest = "0.12.23"
rustix = "1.1.2"
serde = { version = "1.0.228", features = ["derive"] }
serde_with = "3.15.0"
sysinfo = "0.37.2"
tauri = "2.8.5"
throttle_my_fn = "0.2.6"
utils = { version = "0.1.0", path = "../utils" }
native_model = { version = "0.6.4", features = ["rmp_serde_1_3"], git = "https://github.com/Drop-OSS/native_model.git"}
serde_json = "1.0.145"
drop-consts = { version = "0.1.0", path = "../drop-consts" }

View File

@ -1 +0,0 @@
pub mod collection;

View File

@ -1,29 +0,0 @@
use std::fmt::Display;
use serde_with::SerializeDisplay;
#[derive(SerializeDisplay)]
pub enum LibraryError {
MetaNotFound(String),
VersionNotFound(String),
}
impl Display for LibraryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
match self {
LibraryError::MetaNotFound(id) => {
format!(
"Could not locate any installed version of game ID {id} in the database"
)
}
LibraryError::VersionNotFound(game_id) => {
format!(
"Could not locate any installed version for game id {game_id} in the database"
)
}
}
)
}
}

View File

@ -1,7 +0,0 @@
#![feature(iterator_try_collect)]
pub mod collections;
pub mod downloads;
pub mod library;
pub mod scan;
pub mod state;

View File

@ -1,300 +0,0 @@
use bitcode::{Decode, Encode};
use database::{
ApplicationTransientStatus, Database, DownloadableMetadata, GameDownloadStatus, GameVersion,
borrow_db_checked, borrow_db_mut_checked,
};
use log::{debug, error, warn};
use remote::{
auth::generate_authorization_header, error::RemoteAccessError, requests::generate_url,
utils::DROP_CLIENT_SYNC,
};
use serde::{Deserialize, Serialize};
use std::fs::remove_dir_all;
use std::thread::spawn;
use tauri::AppHandle;
use utils::app_emit;
use crate::state::{GameStatusManager, GameStatusWithTransient};
#[derive(Serialize, Deserialize, Debug)]
pub struct FetchGameStruct {
game: Game,
status: GameStatusWithTransient,
version: Option<GameVersion>,
}
impl FetchGameStruct {
pub fn new(game: Game, status: GameStatusWithTransient, version: Option<GameVersion>) -> Self {
Self {
game,
status,
version,
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Default, Encode, Decode)]
#[serde(rename_all = "camelCase")]
pub struct Game {
id: String,
m_name: String,
m_short_description: String,
m_description: String,
// mDevelopers
// mPublishers
m_icon_object_id: String,
m_banner_object_id: String,
m_cover_object_id: String,
m_image_library_object_ids: Vec<String>,
m_image_carousel_object_ids: Vec<String>,
}
impl Game {
pub fn id(&self) -> &String {
&self.id
}
}
#[derive(serde::Serialize, Clone)]
pub struct GameUpdateEvent {
pub game_id: String,
pub status: (
Option<GameDownloadStatus>,
Option<ApplicationTransientStatus>,
),
pub version: Option<GameVersion>,
}
/**
* Called by:
* - on_cancel, when cancelled, for obvious reasons
* - when downloading, so if drop unexpectedly quits, we can resume the download. hidden by the "Downloading..." transient state, though
* - when scanning, to import the game
*/
pub fn set_partially_installed(
meta: &DownloadableMetadata,
install_dir: String,
app_handle: Option<&AppHandle>,
) {
set_partially_installed_db(&mut borrow_db_mut_checked(), meta, install_dir, app_handle);
}
pub fn set_partially_installed_db(
db_lock: &mut Database,
meta: &DownloadableMetadata,
install_dir: String,
app_handle: Option<&AppHandle>,
) {
db_lock.applications.transient_statuses.remove(meta);
db_lock.applications.game_statuses.insert(
meta.id.clone(),
GameDownloadStatus::PartiallyInstalled {
version_name: meta.version.as_ref().unwrap().clone(),
install_dir,
},
);
db_lock
.applications
.installed_game_version
.insert(meta.id.clone(), meta.clone());
if let Some(app_handle) = app_handle {
push_game_update(
app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, db_lock),
);
}
}
pub fn uninstall_game_logic(meta: DownloadableMetadata, app_handle: &AppHandle) {
debug!("triggered uninstall for agent");
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.transient_statuses
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
push_game_update(
app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, &db_handle),
);
let previous_state = db_handle.applications.game_statuses.get(&meta.id).cloned();
let previous_state = if let Some(state) = previous_state {
state
} else {
warn!("uninstall job doesn't have previous state, failing silently");
return;
};
if let Some((_, install_dir)) = match previous_state {
GameDownloadStatus::Installed {
version_name,
install_dir,
} => Some((version_name, install_dir)),
GameDownloadStatus::SetupRequired {
version_name,
install_dir,
} => Some((version_name, install_dir)),
GameDownloadStatus::PartiallyInstalled {
version_name,
install_dir,
} => Some((version_name, install_dir)),
_ => None,
} {
db_handle
.applications
.transient_statuses
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
drop(db_handle);
let app_handle = app_handle.clone();
spawn(move || {
if let Err(e) = remove_dir_all(install_dir) {
error!("{e}");
} else {
let mut db_handle = borrow_db_mut_checked();
db_handle.applications.transient_statuses.remove(&meta);
db_handle
.applications
.installed_game_version
.remove(&meta.id);
db_handle
.applications
.game_statuses
.insert(meta.id.clone(), GameDownloadStatus::Remote {});
let _ = db_handle.applications.transient_statuses.remove(&meta);
push_game_update(
&app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, &db_handle),
);
debug!("uninstalled game id {}", &meta.id);
app_emit!(&app_handle, "update_library", ());
}
});
} else {
warn!("invalid previous state for uninstall, failing silently.");
}
}
pub fn get_current_meta(game_id: &String) -> Option<DownloadableMetadata> {
borrow_db_checked()
.applications
.installed_game_version
.get(game_id)
.cloned()
}
pub fn on_game_complete(
meta: &DownloadableMetadata,
install_dir: String,
app_handle: &AppHandle,
) -> Result<(), RemoteAccessError> {
// Fetch game version information from remote
if meta.version.is_none() {
return Err(RemoteAccessError::GameNotFound(meta.id.clone()));
}
let client = DROP_CLIENT_SYNC.clone();
let response = generate_url(
&["/api/v1/client/game/version"],
&[
("id", &meta.id),
("version", meta.version.as_ref().unwrap()),
],
)?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()?;
let game_version: GameVersion = response.json()?;
let mut handle = borrow_db_mut_checked();
handle
.applications
.game_versions
.entry(meta.id.clone())
.or_default()
.insert(meta.version.clone().unwrap(), game_version.clone());
handle
.applications
.installed_game_version
.insert(meta.id.clone(), meta.clone());
drop(handle);
let status = if game_version.setup_command.is_empty() {
GameDownloadStatus::Installed {
version_name: meta.version.clone().unwrap(),
install_dir,
}
} else {
GameDownloadStatus::SetupRequired {
version_name: meta.version.clone().unwrap(),
install_dir,
}
};
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.game_statuses
.insert(meta.id.clone(), status.clone());
drop(db_handle);
app_emit!(
app_handle,
&format!("update_game/{}", meta.id),
GameUpdateEvent {
game_id: meta.id.clone(),
status: (Some(status), None),
version: Some(game_version),
}
);
Ok(())
}
pub fn push_game_update(
app_handle: &AppHandle,
game_id: &String,
version: Option<GameVersion>,
status: GameStatusWithTransient,
) {
if let Some(GameDownloadStatus::Installed { .. } | GameDownloadStatus::SetupRequired { .. }) =
&status.0
&& version.is_none()
{
panic!("pushed game for installed game that doesn't have version information");
}
app_emit!(
app_handle,
&format!("update_game/{game_id}"),
GameUpdateEvent {
game_id: game_id.clone(),
status,
version,
}
);
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FrontendGameOptions {
launch_string: String,
}
impl FrontendGameOptions {
pub fn launch_string(&self) -> &String {
&self.launch_string
}
}

View File

@ -116,7 +116,7 @@ platformInfo.value = currentPlatform;
async function openDataDir() { async function openDataDir() {
if (!dataDir.value) return; if (!dataDir.value) return;
try { try {
await invoke("open_fs", { path: dataDir.value }); await open(dataDir.value);
} catch (error) { } catch (error) {
console.error("Failed to open data dir:", error); console.error("Failed to open data dir:", error);
} }
@ -126,7 +126,7 @@ async function openLogFile() {
if (!dataDir.value) return; if (!dataDir.value) return;
try { try {
const logPath = `${dataDir.value}/drop.log`; const logPath = `${dataDir.value}/drop.log`;
await invoke("open_fs", { path: logPath }); await open(logPath);
} catch (error) { } catch (error) {
console.error("Failed to open log file:", error); console.error("Failed to open log file:", error);
} }

View File

@ -14,8 +14,7 @@
"@tauri-apps/plugin-os": "^2.3.0", "@tauri-apps/plugin-os": "^2.3.0",
"@tauri-apps/plugin-shell": "^2.3.0", "@tauri-apps/plugin-shell": "^2.3.0",
"pino": "^9.7.0", "pino": "^9.7.0",
"pino-pretty": "^13.1.1", "pino-pretty": "^13.1.1"
"tauri": "^0.15.0"
}, },
"devDependencies": { "devDependencies": {
"@tauri-apps/cli": "^2.7.1" "@tauri-apps/cli": "^2.7.1"

View File

@ -1,20 +0,0 @@
[package]
name = "process"
version = "0.1.0"
edition = "2024"
[dependencies]
chrono = "0.4.42"
client = { version = "0.1.0", path = "../client" }
database = { version = "0.1.0", path = "../database" }
drop-consts = { version = "0.1.0", path = "../drop-consts" }
dynfmt = "0.1.5"
games = { version = "0.1.0", path = "../games" }
log = "0.4.28"
page_size = "0.6.0"
serde = "1.0.228"
serde_with = "3.15.0"
shared_child = "1.1.1"
tauri = "2.8.5"
tauri-plugin-opener = "2.5.0"
utils = { version = "0.1.0", path = "../utils" }

View File

@ -1,41 +0,0 @@
#![feature(nonpoison_mutex)]
#![feature(sync_nonpoison)]
use std::{
ops::Deref,
sync::{OnceLock, nonpoison::Mutex},
};
use tauri::AppHandle;
use crate::process_manager::ProcessManager;
pub static PROCESS_MANAGER: ProcessManagerWrapper = ProcessManagerWrapper::new();
pub mod error;
pub mod format;
pub mod process_handlers;
pub mod process_manager;
pub struct ProcessManagerWrapper(OnceLock<Mutex<ProcessManager<'static>>>);
impl ProcessManagerWrapper {
const fn new() -> Self {
ProcessManagerWrapper(OnceLock::new())
}
pub fn init(app_handle: AppHandle) {
PROCESS_MANAGER
.0
.set(Mutex::new(ProcessManager::new(app_handle)))
.unwrap_or_else(|_| panic!("Failed to initialise Process Manager")); // Using panic! here because we can't implement Debug
}
}
impl Deref for ProcessManagerWrapper {
type Target = Mutex<ProcessManager<'static>>;
fn deref(&self) -> &Self::Target {
match self.0.get() {
Some(process_manager) => process_manager,
None => unreachable!("Download manager should always be initialised"),
}
}
}

View File

@ -1,24 +0,0 @@
[package]
name = "remote"
version = "0.1.0"
edition = "2024"
[dependencies]
bitcode = "0.6.7"
chrono = "0.4.42"
client = { version = "0.1.0", path = "../client" }
database = { version = "0.1.0", path = "../database" }
drop-consts = { version = "0.1.0", path = "../drop-consts" }
droplet-rs = "0.7.3"
gethostname = "1.0.2"
hex = "0.4.3"
http = "1.3.1"
log = "0.4.28"
md5 = "0.8.0"
reqwest = "0.12.23"
reqwest-websocket = "0.5.1"
serde = "1.0.228"
serde_with = "3.15.0"
tauri = "2.8.5"
url = "2.5.7"
utils = { version = "0.1.0", path = "../utils" }

View File

@ -1,152 +0,0 @@
use std::{collections::HashMap, env};
use chrono::Utc;
use client::{app_status::AppStatus, user::User};
use database::{DatabaseAuth, interface::borrow_db_checked};
use droplet_rs::ssl::sign_nonce;
use gethostname::gethostname;
use log::{error, warn};
use serde::{Deserialize, Serialize};
use url::Url;
use crate::{
error::{DropServerError, RemoteAccessError},
requests::make_authenticated_get,
utils::DROP_CLIENT_SYNC,
};
use super::{
cache::{cache_object, get_cached_object},
requests::generate_url,
};
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct CapabilityConfiguration {}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct InitiateRequestBody {
name: String,
platform: String,
capabilities: HashMap<String, CapabilityConfiguration>,
mode: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct HandshakeRequestBody {
client_id: String,
token: String,
}
impl HandshakeRequestBody {
pub fn new(client_id: String, token: String) -> Self {
Self { client_id, token }
}
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HandshakeResponse {
private: String,
certificate: String,
id: String,
}
impl From<HandshakeResponse> for DatabaseAuth {
fn from(value: HandshakeResponse) -> Self {
DatabaseAuth::new(value.private, value.certificate, value.id, None)
}
}
pub fn generate_authorization_header() -> String {
let certs = {
let db = borrow_db_checked();
db.auth.clone().expect("Authorisation not initialised")
};
let nonce = Utc::now().timestamp_millis().to_string();
let signature =
sign_nonce(certs.private, nonce.clone()).expect("Failed to generate authorisation header");
format!("Nonce {} {} {}", certs.client_id, nonce, signature)
}
pub async fn fetch_user() -> Result<User, RemoteAccessError> {
let response = make_authenticated_get(generate_url(&["/api/v1/client/user"], &[])?).await?;
if response.status() != 200 {
let err: DropServerError = response.json().await?;
warn!("{err:?}");
if err.status_message == "Nonce expired" {
return Err(RemoteAccessError::OutOfSync);
}
return Err(RemoteAccessError::InvalidResponse(err));
}
response
.json::<User>()
.await
.map_err(std::convert::Into::into)
}
pub fn auth_initiate_logic(mode: String) -> Result<String, RemoteAccessError> {
let base_url = {
let db_lock = borrow_db_checked();
Url::parse(&db_lock.base_url.clone())?
};
let hostname = gethostname();
let endpoint = base_url.join("/api/v1/client/auth/initiate")?;
let body = InitiateRequestBody {
name: format!("{} (Desktop)", hostname.display()),
platform: env::consts::OS.to_string(),
capabilities: HashMap::from([
("peerAPI".to_owned(), CapabilityConfiguration {}),
("cloudSaves".to_owned(), CapabilityConfiguration {}),
]),
mode,
};
let client = DROP_CLIENT_SYNC.clone();
let response = client.post(endpoint.to_string()).json(&body).send()?;
if response.status() != 200 {
let data: DropServerError = response.json()?;
error!("could not start handshake: {}", data.status_message);
return Err(RemoteAccessError::HandshakeFailed(data.status_message));
}
let response = response.text()?;
Ok(response)
}
pub async fn setup() -> (AppStatus, Option<User>) {
let auth = {
let data = borrow_db_checked();
data.auth.clone()
};
if auth.is_some() {
let user_result = match fetch_user().await {
Ok(data) => data,
Err(RemoteAccessError::FetchError(_)) => {
let user = get_cached_object::<User>("user").ok();
return (AppStatus::Offline, user);
}
Err(_) => return (AppStatus::SignedInNeedsReauth, None),
};
if let Err(e) = cache_object("user", &user_result) {
warn!("Could not cache user object with error {e}");
}
return (AppStatus::SignedIn, Some(user_result));
}
(AppStatus::SignedOut, None)
}

View File

@ -1,82 +0,0 @@
use database::{DB, interface::DatabaseImpls};
use http::{Response, header::CONTENT_TYPE, response::Builder as ResponseBuilder};
use log::{debug, warn};
use tauri::UriSchemeResponder;
use crate::{error::CacheError, utils::DROP_CLIENT_ASYNC};
use super::{
auth::generate_authorization_header,
cache::{ObjectCache, cache_object, get_cached_object},
};
pub async fn fetch_object_wrapper(request: http::Request<Vec<u8>>, responder: UriSchemeResponder) {
match fetch_object(request).await {
Ok(r) => responder.respond(r),
Err(e) => {
warn!("Cache error: {e}");
responder.respond(
Response::builder()
.status(500)
.body(Vec::new())
.expect("Failed to build error response"),
);
}
};
}
pub async fn fetch_object(
request: http::Request<Vec<u8>>,
) -> Result<Response<Vec<u8>>, CacheError> {
// Drop leading /
let object_id = &request.uri().path()[1..];
let cache_result = get_cached_object::<ObjectCache>(object_id);
if let Ok(cache_result) = &cache_result
&& !cache_result.has_expired()
{
return cache_result.try_into();
}
let header = generate_authorization_header();
let client = DROP_CLIENT_ASYNC.clone();
let url = format!("{}api/v1/client/object/{object_id}", DB.fetch_base_url());
let response = client.get(url).header("Authorization", header).send().await;
match response {
Ok(r) => {
let resp_builder = ResponseBuilder::new().header(
CONTENT_TYPE,
r.headers()
.get("Content-Type")
.expect("Failed get Content-Type header"),
);
let data = match r.bytes().await {
Ok(data) => Vec::from(data),
Err(e) => {
warn!("Could not get data from cache object {object_id} with error {e}",);
Vec::new()
}
};
let resp = resp_builder
.body(data)
.expect("Failed to build object cache response body");
if cache_result.map_or(true, |x| x.has_expired()) {
cache_object::<ObjectCache>(object_id, &resp.clone().try_into()?)
.expect("Failed to create cached object");
}
Ok(resp)
}
Err(e) => {
debug!("Object fetch failed with error {e}. Attempting to download from cache");
match cache_result {
Ok(cache_result) => cache_result.try_into(),
Err(e) => {
warn!("{e}");
Err(CacheError::Remote(e))
}
}
}
}
}

View File

@ -1,10 +0,0 @@
pub mod auth;
#[macro_use]
pub mod cache;
pub mod error;
pub mod fetch_object;
pub mod requests;
pub mod server_proto;
pub mod utils;
pub use auth::setup;

View File

@ -1,108 +0,0 @@
use std::str::FromStr;
use database::borrow_db_checked;
use http::{Request, Response, StatusCode, Uri, uri::PathAndQuery};
use log::{error, warn};
use tauri::UriSchemeResponder;
use utils::webbrowser_open::webbrowser_open;
use crate::utils::DROP_CLIENT_SYNC;
pub async fn handle_server_proto_offline_wrapper(
request: Request<Vec<u8>>,
responder: UriSchemeResponder,
) {
responder.respond(match handle_server_proto_offline(request).await {
Ok(res) => res,
Err(_) => unreachable!(),
});
}
pub async fn handle_server_proto_offline(
_request: Request<Vec<u8>>,
) -> Result<Response<Vec<u8>>, StatusCode> {
Ok(Response::builder()
.status(StatusCode::NOT_FOUND)
.body(Vec::new())
.expect("Failed to build error response for proto offline"))
}
pub async fn handle_server_proto_wrapper(request: Request<Vec<u8>>, responder: UriSchemeResponder) {
match handle_server_proto(request).await {
Ok(r) => responder.respond(r),
Err(e) => {
warn!("Cache error: {e}");
responder.respond(
Response::builder()
.status(e)
.body(Vec::new())
.expect("Failed to build error response"),
);
}
}
}
async fn handle_server_proto(request: Request<Vec<u8>>) -> Result<Response<Vec<u8>>, StatusCode> {
let db_handle = borrow_db_checked();
let auth = match db_handle.auth.as_ref() {
Some(auth) => auth,
None => {
error!("Could not find auth in database");
return Err(StatusCode::UNAUTHORIZED);
}
};
let web_token = match &auth.web_token {
Some(token) => token,
None => return Err(StatusCode::UNAUTHORIZED),
};
let remote_uri = db_handle
.base_url
.parse::<Uri>()
.expect("Failed to parse base url");
let path = request.uri().path();
let mut new_uri = request.uri().clone().into_parts();
new_uri.path_and_query = Some(
PathAndQuery::from_str(&format!("{path}?noWrapper=true"))
.expect("Failed to parse request path in proto"),
);
new_uri.authority = remote_uri.authority().cloned();
new_uri.scheme = remote_uri.scheme().cloned();
let err_msg = &format!("Failed to build new uri from parts {new_uri:?}");
let new_uri = Uri::from_parts(new_uri).expect(err_msg);
let whitelist_prefix = ["/store", "/api", "/_", "/fonts"];
if whitelist_prefix.iter().all(|f| !path.starts_with(f)) {
webbrowser_open(new_uri.to_string());
return Ok(Response::new(Vec::new()));
}
let client = DROP_CLIENT_SYNC.clone();
let response = match client
.request(request.method().clone(), new_uri.to_string())
.header("Authorization", format!("Bearer {web_token}"))
.headers(request.headers().clone())
.send()
{
Ok(response) => response,
Err(e) => {
warn!("Could not send response. Got {e} when sending");
return Err(e.status().unwrap_or(StatusCode::BAD_REQUEST));
}
};
let response_status = response.status();
let response_body = match response.bytes() {
Ok(bytes) => bytes,
Err(e) => return Err(e.status().unwrap_or(StatusCode::INTERNAL_SERVER_ERROR)),
};
let http_response = Response::builder()
.status(response_status)
.body(response_body.to_vec())
.expect("Failed to build server proto response");
Ok(http_response)
}

View File

@ -1,117 +0,0 @@
use std::{
fs::{self, File}, io::Read, sync::LazyLock
};
use drop_consts::DATA_ROOT_DIR;
use log::{debug, info, warn};
use reqwest::Certificate;
use serde::Deserialize;
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DropHealthcheck {
app_name: String,
}
impl DropHealthcheck {
pub fn app_name(&self) -> &String {
&self.app_name
}
}
static DROP_CERT_BUNDLE: LazyLock<Vec<Certificate>> = LazyLock::new(fetch_certificates);
pub static DROP_CLIENT_SYNC: LazyLock<reqwest::blocking::Client> = LazyLock::new(get_client_sync);
pub static DROP_CLIENT_ASYNC: LazyLock<reqwest::Client> = LazyLock::new(get_client_async);
pub static DROP_CLIENT_WS_CLIENT: LazyLock<reqwest::Client> = LazyLock::new(get_client_ws);
fn fetch_certificates() -> Vec<Certificate> {
let certificate_dir = DATA_ROOT_DIR.join("certificates");
let mut certs = Vec::new();
match fs::read_dir(certificate_dir) {
Ok(c) => {
for entry in c {
match entry {
Ok(c) => {
let mut buf = Vec::new();
match File::open(c.path()) {
Ok(f) => f,
Err(e) => {
warn!(
"Failed to open file at {} with error {}",
c.path().display(),
e
);
continue;
}
}
.read_to_end(&mut buf)
.unwrap_or_else(|e| {
panic!(
"Failed to read to end of certificate file {} with error {}",
c.path().display(),
e
)
});
match Certificate::from_pem_bundle(&buf) {
Ok(certificates) => {
for cert in certificates {
certs.push(cert);
}
info!(
"added {} certificate(s) from {}",
certs.len(),
c.file_name().display()
);
}
Err(e) => warn!(
"Invalid certificate file {} with error {}",
c.path().display(),
e
),
}
}
Err(_) => todo!(),
}
}
}
Err(e) => {
debug!("not loading certificates due to error: {e}");
}
};
certs
}
pub fn get_client_sync() -> reqwest::blocking::Client {
let mut client = reqwest::blocking::ClientBuilder::new();
for cert in DROP_CERT_BUNDLE.iter() {
client = client.add_root_certificate(cert.clone());
}
client
.use_rustls_tls()
.build()
.expect("Failed to build synchronous client")
}
pub fn get_client_async() -> reqwest::Client {
let mut client = reqwest::ClientBuilder::new();
for cert in DROP_CERT_BUNDLE.iter() {
client = client.add_root_certificate(cert.clone());
}
client
.use_rustls_tls()
.build()
.expect("Failed to build asynchronous client")
}
pub fn get_client_ws() -> reqwest::Client {
let mut client = reqwest::ClientBuilder::new();
for cert in DROP_CERT_BUNDLE.iter() {
client = client.add_root_certificate(cert.clone());
}
client
.use_rustls_tls()
.http1_only()
.build()
.expect("Failed to build websocket client")
}

1410
src-tauri/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,140 +1,101 @@
[package] [package]
name = "drop-app" name = "drop-app"
version = "0.3.3" version = "0.3.3"
description = "The client application for the open-source, self-hosted game distribution platform Drop" # authors = ["Drop OSS"]
authors = ["Drop OSS"]
edition = "2024" edition = "2024"
description = "The client application for the open-source, self-hosted game distribution platform Drop"
[workspace]
resolver = "3"
members = ["drop-consts",
"drop-database",
"drop-downloads",
"drop-errors", "drop-library",
"drop-native-library",
"drop-process",
"drop-remote",
]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
tauri-plugin-single-instance = { version = "2.0.0", features = ["deep-link"] }
[lib] [lib]
crate-type = ["cdylib", "rlib", "staticlib"]
# The `_lib` suffix may seem redundant but it is necessary # The `_lib` suffix may seem redundant but it is necessary
# to make the lib name unique and wouldn't conflict with the bin name. # to make the lib name unique and wouldn't conflict with the bin name.
# This seems to be only an issue on Windows, see https://github.com/rust-lang/cargo/issues/8519 # This seems to be only an issue on Windows, see https://github.com/rust-lang/cargo/issues/8519
name = "drop_app_lib" name = "drop_app_lib"
crate-type = ["staticlib", "cdylib", "rlib"] # rustflags = ["-C", "target-feature=+aes,+sse2"]
rustflags = ["-C", "target-feature=+aes,+sse2"]
[build-dependencies]
tauri-build = { version = "2.0.0", features = [] }
[dependencies] [dependencies]
tauri-plugin-shell = "2.2.1"
serde_json = "1"
rayon = "1.10.0"
webbrowser = "1.0.2"
url = "2.5.2"
tauri-plugin-deep-link = "2"
log = "0.4.22"
hex = "0.4.3"
tauri-plugin-dialog = "2"
http = "1.1.0"
urlencoding = "2.1.3"
md5 = "0.7.0"
chrono = "0.4.38"
tauri-plugin-os = "2"
boxcar = "0.2.7" boxcar = "0.2.7"
umu-wrapper-lib = "0.1.0"
tauri-plugin-autostart = "2.0.0"
shared_child = "1.0.1"
serde_with = "3.12.0"
slice-deque = "0.3.0"
throttle_my_fn = "0.2.6"
parking_lot = "0.12.3"
atomic-instant-full = "0.1.0"
cacache = "13.1.0"
http-serde = "2.1.1"
reqwest-middleware = "0.4.0"
reqwest-middleware-cache = "0.1.1"
deranged = "=0.4.0"
droplet-rs = "0.7.3"
gethostname = "1.0.1"
zstd = "0.13.3"
tar = "0.4.44"
rand = "0.9.1"
regex = "1.11.1"
tempfile = "3.19.1"
schemars = "0.8.22"
sha1 = "0.10.6"
dirs = "6.0.0" dirs = "6.0.0"
whoami = "1.6.0" drop-database = { path = "./drop-database" }
filetime = "0.2.25" drop-downloads = { path = "./drop-downloads" }
walkdir = "2.5.0" drop-errors = { path = "./drop-errors" }
known-folders = "1.2.0" drop-native-library = { path = "./drop-native-library" }
native_model = { version = "0.6.4", features = ["rmp_serde_1_3"], git = "https://github.com/Drop-OSS/native_model.git"} drop-process = { path = "./drop-process" }
tauri-plugin-opener = "2.4.0" drop-remote = { path = "./drop-remote" }
bitcode = "0.6.6"
reqwest-websocket = "0.5.0"
futures-lite = "2.6.0" futures-lite = "2.6.0"
page_size = "0.6.0" hex = "0.4.3"
sysinfo = "0.36.1" http = "1.1.0"
humansize = "2.1.3" known-folders = "1.2.0"
tokio-util = { version = "0.7.16", features = ["io"] } log = "0.4.22"
futures-core = "0.3.31" md5 = "0.7.0"
bytes = "1.10.1" rayon = "1.10.0"
# tailscale = { path = "./tailscale" } regex = "1.11.1"
reqwest-websocket = "0.5.0"
serde_json = "1"
# Workspaces tar = "0.4.44"
client = { version = "0.1.0", path = "../client" } tauri = { version = "2.7.0", features = ["protocol-asset", "tray-icon"] }
database = { path = "../database" } tauri-plugin-autostart = "2.0.0"
process = { path = "../process" } tauri-plugin-deep-link = "2"
remote = { version = "0.1.0", path = "../remote" } tauri-plugin-dialog = "2"
utils = { path = "../utils" } tauri-plugin-opener = "2.4.0"
games = { version = "0.1.0", path = "../games" } tauri-plugin-os = "2"
download_manager = { version = "0.1.0", path = "../download_manager" } tauri-plugin-shell = "2.2.1"
drop-consts = { version = "0.1.0", path = "../drop-consts" } tempfile = "3.19.1"
url = "2.5.2"
[dependencies.dynfmt] webbrowser = "1.0.2"
version = "0.1.5" whoami = "1.6.0"
features = ["curly"] zstd = "0.13.3"
[dependencies.tauri]
version = "2.7.0"
features = ["protocol-asset", "tray-icon"]
[dependencies.tokio]
version = "1.40.0"
features = ["rt", "tokio-macros", "signal"]
[dependencies.log4rs] [dependencies.log4rs]
version = "1.3.0" version = "1.3.0"
features = ["console_appender", "file_appender"] features = ["console_appender", "file_appender"]
[dependencies.rustix]
version = "0.38.37"
features = ["fs"]
[dependencies.uuid]
version = "1.10.0"
features = ["v4", "fast-rng", "macro-diagnostics"]
[dependencies.rustbreak]
version = "2"
features = ["other_errors"] # You can also use "yaml_enc" or "bin_enc"
[dependencies.reqwest] [dependencies.reqwest]
version = "0.12.22" version = "0.12.22"
default-features = false default-features = false
features = [ features = [
"json",
"http2",
"blocking", "blocking",
"rustls-tls", "http2",
"json",
"native-tls-alpn", "native-tls-alpn",
"rustls-tls",
"rustls-tls-native-roots", "rustls-tls-native-roots",
"stream", "stream",
] ]
[dependencies.rustix]
version = "0.38.37"
features = ["fs"]
[dependencies.serde] [dependencies.serde]
version = "1" version = "1"
features = ["derive", "rc"] features = ["derive", "rc"]
[dependencies.uuid]
version = "1.10.0"
features = ["fast-rng", "macro-diagnostics", "v4"]
[build-dependencies]
tauri-build = { version = "2.0.0", features = [] }
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
tauri-plugin-single-instance = { version = "2.0.0", features = ["deep-link"] }
[profile.release] [profile.release]
lto = true lto = true
panic = "abort"
codegen-units = 1 codegen-units = 1
panic = 'abort'

View File

@ -0,0 +1,15 @@
use std::{
path::PathBuf,
sync::{Arc, LazyLock},
};
#[cfg(not(debug_assertions))]
static DATA_ROOT_PREFIX: &'static str = "drop";
#[cfg(debug_assertions)]
static DATA_ROOT_PREFIX: &str = "drop-debug";
pub static DATA_ROOT_DIR: LazyLock<&'static PathBuf> =
LazyLock::new(|| Box::leak(Box::new(dirs::data_dir().unwrap().join(DATA_ROOT_PREFIX))));
pub static CACHE_DIR: LazyLock<&'static PathBuf> =
LazyLock::new(|| Box::leak(Box::new(DATA_ROOT_DIR.join("cache"))));

View File

@ -0,0 +1,21 @@
[package]
name = "drop-database"
version = "0.1.0"
edition = "2024"
[dependencies]
bitcode = "0.6.7"
chrono = "0.4.42"
drop-consts = { path = "../drop-consts" }
drop-library = { path = "../drop-library" }
drop-native-library = { path = "../drop-native-library" }
log = "0.4.28"
native_model = { git = "https://github.com/Drop-OSS/native_model.git", version = "0.6.4", features = [
"rmp_serde_1_3",
] }
rustbreak = "2.0.0"
serde = { version = "1.0.219", features = ["derive"] }
serde_with = "3.14.0"
url = "2.5.7"
whoami = "1.6.1"

View File

@ -0,0 +1,140 @@
use std::{
fs::{self, create_dir_all},
mem::ManuallyDrop,
ops::{Deref, DerefMut},
path::PathBuf,
sync::{Arc, LazyLock, RwLockReadGuard, RwLockWriteGuard},
};
use chrono::Utc;
use drop_consts::DATA_ROOT_DIR;
use log::{debug, error, info, warn};
use rustbreak::{DeSerError, DeSerializer, PathDatabase, RustbreakError};
use serde::{Serialize, de::DeserializeOwned};
use crate::DB;
use super::models::data::Database;
// Custom JSON serializer to support everything we need
#[derive(Debug, Default, Clone)]
pub struct DropDatabaseSerializer;
impl<T: native_model::Model + Serialize + DeserializeOwned> DeSerializer<T>
for DropDatabaseSerializer
{
fn serialize(&self, val: &T) -> rustbreak::error::DeSerResult<Vec<u8>> {
native_model::encode(val).map_err(|e| DeSerError::Internal(e.to_string()))
}
fn deserialize<R: std::io::Read>(&self, mut s: R) -> rustbreak::error::DeSerResult<T> {
let mut buf = Vec::new();
s.read_to_end(&mut buf)
.map_err(|e| rustbreak::error::DeSerError::Internal(e.to_string()))?;
let (val, _version) =
native_model::decode(buf).map_err(|e| DeSerError::Internal(e.to_string()))?;
Ok(val)
}
}
pub type DatabaseInterface =
rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer>;
pub trait DatabaseImpls {
fn set_up_database() -> DatabaseInterface;
}
impl DatabaseImpls for DatabaseInterface {
fn set_up_database() -> DatabaseInterface {
let db_path = DATA_ROOT_DIR.join("drop.db");
let games_base_dir = DATA_ROOT_DIR.join("games");
let logs_root_dir = DATA_ROOT_DIR.join("logs");
let cache_dir = DATA_ROOT_DIR.join("cache");
let pfx_dir = DATA_ROOT_DIR.join("pfx");
debug!("creating data directory at {DATA_ROOT_DIR:?}");
create_dir_all(DATA_ROOT_DIR.as_path()).unwrap();
create_dir_all(&games_base_dir).unwrap();
create_dir_all(&logs_root_dir).unwrap();
create_dir_all(&cache_dir).unwrap();
create_dir_all(&pfx_dir).unwrap();
let exists = fs::exists(db_path.clone()).unwrap();
if exists {
match PathDatabase::load_from_path(db_path.clone()) {
Ok(db) => db,
Err(e) => handle_invalid_database(e, db_path, games_base_dir, cache_dir),
}
} else {
let default = Database::new(games_base_dir, None);
debug!(
"Creating database at path {}",
db_path.as_os_str().to_str().unwrap()
);
PathDatabase::create_at_path(db_path, default).expect("Database could not be created")
}
}
}
// TODO: Make the error relelvant rather than just assume that it's a Deserialize error
fn handle_invalid_database(
_e: RustbreakError,
db_path: PathBuf,
games_base_dir: PathBuf,
cache_dir: PathBuf,
) -> rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer> {
warn!("{_e}");
let new_path = {
let time = Utc::now().timestamp();
let mut base = db_path.clone();
base.set_file_name(format!("drop.db.backup-{time}"));
base
};
info!("old database stored at: {}", new_path.to_string_lossy());
fs::rename(&db_path, &new_path).unwrap();
let db = Database::new(
games_base_dir.into_os_string().into_string().unwrap(),
Some(new_path),
);
PathDatabase::create_at_path(db_path, db).expect("Database could not be created")
}
// To automatically save the database upon drop
pub struct DBRead<'a>(pub(crate) RwLockReadGuard<'a, Database>);
pub struct DBWrite<'a>(pub(crate) ManuallyDrop<RwLockWriteGuard<'a, Database>>);
impl<'a> Deref for DBWrite<'a> {
type Target = Database;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'a> DerefMut for DBWrite<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<'a> Deref for DBRead<'a> {
type Target = Database;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Drop for DBWrite<'_> {
fn drop(&mut self) {
unsafe {
ManuallyDrop::drop(&mut self.0);
}
match DB.save() {
Ok(()) => {}
Err(e) => {
error!("database failed to save with error {e}");
panic!("database failed to save with error {e}")
}
}
}
}

View File

@ -1,18 +1,15 @@
use std::{ use std::{
collections::HashMap, collections::HashMap, fs::File, io::{self, Read, Write}, path::{Path, PathBuf}
fs::File,
io::{self, Read, Write},
path::{Path, PathBuf},
}; };
use drop_consts::DROP_DATA_PATH;
use log::error; use log::error;
use native_model::{Decode, Encode}; use native_model::{Decode, Encode};
use utils::lock;
pub type DropData = v1::DropData; pub type DropData = v1::DropData;
pub mod v1 { pub static DROP_DATA_PATH: &str = ".dropdata";
mod v1 {
use std::{collections::HashMap, path::PathBuf, sync::Mutex}; use std::{collections::HashMap, path::PathBuf, sync::Mutex};
use native_model::native_model; use native_model::native_model;
@ -52,12 +49,7 @@ impl DropData {
let mut s = Vec::new(); let mut s = Vec::new();
file.read_to_end(&mut s)?; file.read_to_end(&mut s)?;
native_model::rmp_serde_1_3::RmpSerde::decode(s).map_err(|e| { Ok(native_model::rmp_serde_1_3::RmpSerde::decode(s).unwrap())
io::Error::new(
io::ErrorKind::InvalidData,
format!("Failed to decode drop data: {e}"),
)
})
} }
pub fn write(&self) { pub fn write(&self) {
let manifest_raw = match native_model::rmp_serde_1_3::RmpSerde::encode(&self) { let manifest_raw = match native_model::rmp_serde_1_3::RmpSerde::encode(&self) {
@ -79,15 +71,12 @@ impl DropData {
} }
} }
pub fn set_contexts(&self, completed_contexts: &[(String, bool)]) { pub fn set_contexts(&self, completed_contexts: &[(String, bool)]) {
*lock!(self.contexts) = completed_contexts *self.contexts.lock().unwrap() = completed_contexts.iter().map(|s| (s.0.clone(), s.1)).collect();
.iter()
.map(|s| (s.0.clone(), s.1))
.collect();
} }
pub fn set_context(&self, context: String, state: bool) { pub fn set_context(&self, context: String, state: bool) {
lock!(self.contexts).entry(context).insert_entry(state); self.contexts.lock().unwrap().entry(context).insert_entry(state);
} }
pub fn get_contexts(&self) -> HashMap<String, bool> { pub fn get_contexts(&self) -> HashMap<String, bool> {
lock!(self.contexts).clone() self.contexts.lock().unwrap().clone()
} }
} }

View File

@ -0,0 +1,34 @@
use std::{mem::ManuallyDrop, sync::LazyLock};
use log::error;
use crate::db::{DBRead, DBWrite, DatabaseImpls, DatabaseInterface};
pub mod db;
pub mod debug;
pub mod models;
pub mod process;
pub mod runtime_models;
pub mod drop_data;
pub static DB: LazyLock<DatabaseInterface> = LazyLock::new(DatabaseInterface::set_up_database);
pub fn borrow_db_checked<'a>() -> DBRead<'a> {
match DB.borrow_data() {
Ok(data) => DBRead(data),
Err(e) => {
error!("database borrow failed with error {e}");
panic!("database borrow failed with error {e}");
}
}
}
pub fn borrow_db_mut_checked<'a>() -> DBWrite<'a> {
match DB.borrow_data_mut() {
Ok(data) => DBWrite(ManuallyDrop::new(data)),
Err(e) => {
error!("database borrow mut failed with error {e}");
panic!("database borrow mut failed with error {e}");
}
}
}

View File

@ -8,7 +8,7 @@ pub mod data {
// Declare it using the actual version that it is from, i.e. v1::Settings rather than just Settings from here // Declare it using the actual version that it is from, i.e. v1::Settings rather than just Settings from here
pub type GameVersion = v1::GameVersion; pub type GameVersion = v1::GameVersion;
pub type Database = v3::Database; pub type Database = v4::Database;
pub type Settings = v1::Settings; pub type Settings = v1::Settings;
pub type DatabaseAuth = v1::DatabaseAuth; pub type DatabaseAuth = v1::DatabaseAuth;
@ -19,7 +19,7 @@ pub mod data {
*/ */
pub type DownloadableMetadata = v1::DownloadableMetadata; pub type DownloadableMetadata = v1::DownloadableMetadata;
pub type DownloadType = v1::DownloadType; pub type DownloadType = v1::DownloadType;
pub type DatabaseApplications = v2::DatabaseApplications; pub type DatabaseApplications = v4::DatabaseApplications;
// pub type DatabaseCompatInfo = v2::DatabaseCompatInfo; // pub type DatabaseCompatInfo = v2::DatabaseCompatInfo;
use std::collections::HashMap; use std::collections::HashMap;
@ -40,7 +40,7 @@ pub mod data {
use serde_with::serde_as; use serde_with::serde_as;
use std::{collections::HashMap, path::PathBuf}; use std::{collections::HashMap, path::PathBuf};
use crate::platform::Platform; use crate::process::Platform;
use super::{Deserialize, Serialize, native_model}; use super::{Deserialize, Serialize, native_model};
@ -48,7 +48,7 @@ pub mod data {
"{}".to_owned() "{}".to_owned()
} }
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] #[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[native_model(id = 2, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)] #[native_model(id = 2, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)]
pub struct GameVersion { pub struct GameVersion {
@ -191,6 +191,8 @@ pub mod data {
use serde_with::serde_as; use serde_with::serde_as;
use crate::runtime_models::Game;
use super::{Deserialize, Serialize, native_model, v1}; use super::{Deserialize, Serialize, native_model, v1};
#[native_model(id = 1, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from = v1::Database)] #[native_model(id = 1, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from = v1::Database)]
@ -273,9 +275,7 @@ pub mod data {
#[native_model(id = 3, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from=v1::DatabaseApplications)] #[native_model(id = 3, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from=v1::DatabaseApplications)]
pub struct DatabaseApplications { pub struct DatabaseApplications {
pub install_dirs: Vec<PathBuf>, pub install_dirs: Vec<PathBuf>,
// Guaranteed to exist if the game also exists in the app state map
pub game_statuses: HashMap<String, GameDownloadStatus>, pub game_statuses: HashMap<String, GameDownloadStatus>,
pub game_versions: HashMap<String, HashMap<String, v1::GameVersion>>, pub game_versions: HashMap<String, HashMap<String, v1::GameVersion>>,
pub installed_game_version: HashMap<String, v1::DownloadableMetadata>, pub installed_game_version: HashMap<String, v1::DownloadableMetadata>,
@ -332,41 +332,72 @@ pub mod data {
} }
} }
mod v4 {
use std::{collections::HashMap, path::PathBuf};
use drop_library::libraries::LibraryProviderIdentifier;
use drop_native_library::impls::DropNativeLibraryProvider;
use serde_with::serde_as;
use crate::models::data::v3;
use super::{Deserialize, Serialize, native_model, v1, v2};
#[derive(Serialize, Deserialize, Clone)]
pub enum Library {
NativeLibrary(DropNativeLibraryProvider),
}
#[serde_as]
#[derive(Serialize, Deserialize, Default, Clone)]
#[serde(rename_all = "camelCase")]
#[native_model(id = 3, version = 4, with = native_model::rmp_serde_1_3::RmpSerde, from=v2::DatabaseApplications)]
pub struct DatabaseApplications {
pub install_dirs: Vec<PathBuf>,
pub libraries: HashMap<LibraryProviderIdentifier, Library>,
#[serde(skip)]
pub transient_statuses:
HashMap<v1::DownloadableMetadata, v1::ApplicationTransientStatus>,
}
impl From<v2::DatabaseApplications> for DatabaseApplications {
fn from(value: v2::DatabaseApplications) -> Self {
todo!()
}
}
#[native_model(id = 1, version = 4, with = native_model::rmp_serde_1_3::RmpSerde, from = v3::Database)]
#[derive(Serialize, Deserialize, Default, Clone)]
pub struct Database {
#[serde(default)]
pub settings: v1::Settings,
pub drop_applications: DatabaseApplications,
#[serde(skip)]
pub prev_database: Option<PathBuf>,
}
impl From<v3::Database> for Database {
fn from(value: v3::Database) -> Self {
Database {
settings: value.settings,
drop_applications: value.applications.into(),
prev_database: value.prev_database,
}
}
}
}
impl Database { impl Database {
pub fn new<T: Into<PathBuf>>( pub fn new<T: Into<PathBuf>>(
games_base_dir: T, games_base_dir: T,
prev_database: Option<PathBuf>, prev_database: Option<PathBuf>,
cache_dir: PathBuf,
) -> Self { ) -> Self {
Self { Self {
applications: DatabaseApplications { drop_applications: DatabaseApplications {
install_dirs: vec![games_base_dir.into()], install_dirs: vec![games_base_dir.into()],
game_statuses: HashMap::new(), libraries: HashMap::new(),
game_versions: HashMap::new(),
installed_game_version: HashMap::new(),
transient_statuses: HashMap::new(), transient_statuses: HashMap::new(),
}, },
prev_database, prev_database,
base_url: String::new(),
auth: None,
settings: Settings::default(), settings: Settings::default(),
cache_dir,
compat_info: None,
}
}
}
impl DatabaseAuth {
pub fn new(
private: String,
cert: String,
client_id: String,
web_token: Option<String>,
) -> Self {
Self {
private,
cert,
client_id,
web_token,
} }
} }
} }

View File

@ -40,7 +40,7 @@ impl From<whoami::Platform> for Platform {
whoami::Platform::Windows => Platform::Windows, whoami::Platform::Windows => Platform::Windows,
whoami::Platform::Linux => Platform::Linux, whoami::Platform::Linux => Platform::Linux,
whoami::Platform::MacOS => Platform::MacOs, whoami::Platform::MacOS => Platform::MacOs,
platform => unimplemented!("Playform {} is not supported", platform), _ => unimplemented!(),
} }
} }
} }

View File

@ -0,0 +1,28 @@
use bitcode::{Decode, Encode};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug, Default, Encode, Decode)]
#[serde(rename_all = "camelCase")]
pub struct Game {
pub id: String,
m_name: String,
m_short_description: String,
m_description: String,
// mDevelopers
// mPublishers
m_icon_object_id: String,
m_banner_object_id: String,
m_cover_object_id: String,
m_image_library_object_ids: Vec<String>,
m_image_carousel_object_ids: Vec<String>,
}
#[derive(Clone, Serialize, Deserialize, Encode, Decode)]
#[serde(rename_all = "camelCase")]
pub struct User {
id: String,
username: String,
admin: bool,
display_name: String,
profile_picture_object_id: String,
}

View File

@ -0,0 +1,16 @@
[package]
name = "drop-downloads"
version = "0.1.0"
edition = "2024"
[dependencies]
atomic-instant-full = "0.1.0"
drop-database = { path = "../drop-database" }
drop-errors = { path = "../drop-errors" }
# can't depend, cycle
# drop-native-library = { path = "../drop-native-library" }
log = "0.4.22"
parking_lot = "0.12.4"
serde = "1.0.219"
tauri = { version = "2.7.0" }
throttle_my_fn = "0.2.6"

View File

@ -7,15 +7,13 @@ use std::{
thread::{JoinHandle, spawn}, thread::{JoinHandle, spawn},
}; };
use database::DownloadableMetadata; use drop_database::models::data::DownloadableMetadata;
use drop_errors::application_download_error::ApplicationDownloadError;
use log::{debug, error, info, warn}; use log::{debug, error, info, warn};
use tauri::AppHandle; use tauri::{AppHandle, Emitter};
use utils::{app_emit, lock, send};
use crate::{ use crate::{
download_manager_frontend::DownloadStatus, download_manager_frontend::DownloadStatus, events::{QueueUpdateEvent, QueueUpdateEventQueueData, StatsUpdateEvent}
error::ApplicationDownloadError,
frontend_updates::{QueueUpdateEvent, QueueUpdateEventQueueData, StatsUpdateEvent},
}; };
use super::{ use super::{
@ -31,43 +29,6 @@ use super::{
pub type DownloadAgent = Arc<Box<dyn Downloadable + Send + Sync>>; pub type DownloadAgent = Arc<Box<dyn Downloadable + Send + Sync>>;
pub type CurrentProgressObject = Arc<Mutex<Option<Arc<ProgressObject>>>>; pub type CurrentProgressObject = Arc<Mutex<Option<Arc<ProgressObject>>>>;
/*
Welcome to the download manager, the most overengineered, glorious piece of bullshit.
The download manager takes a queue of ids and their associated
DownloadAgents, and then, one-by-one, executes them. It provides an interface
to interact with the currently downloading agent, and manage the queue.
When the DownloadManager is initialised, it is designed to provide a reference
which can be used to provide some instructions (the DownloadManagerInterface),
but other than that, it runs without any sort of interruptions.
It does this by opening up two data structures. Primarily is the command_receiver,
and mpsc (multi-channel-single-producer) which allows commands to be sent from
the Interface, and queued up for the Manager to process.
These have been mapped in the DownloadManagerSignal docs.
The other way to interact with the DownloadManager is via the donwload_queue,
which is just a collection of ids which may be rearranged to suit
whichever download queue order is required.
+----------------------------------------------------------------------------+
| DO NOT ATTEMPT TO ADD OR REMOVE FROM THE QUEUE WITHOUT USING SIGNALS!! |
| THIS WILL CAUSE A DESYNC BETWEEN THE DOWNLOAD AGENT REGISTRY AND THE QUEUE |
| WHICH HAS NOT BEEN ACCOUNTED FOR |
+----------------------------------------------------------------------------+
This download queue does not actually own any of the DownloadAgents. It is
simply an id-based reference system. The actual Agents are stored in the
download_agent_registry HashMap, as ordering is no issue here. This is why
appending or removing from the download_queue must be done via signals.
Behold, my madness - quexeky
*/
pub struct DownloadManagerBuilder { pub struct DownloadManagerBuilder {
download_agent_registry: HashMap<DownloadableMetadata, DownloadAgent>, download_agent_registry: HashMap<DownloadableMetadata, DownloadAgent>,
download_queue: Queue, download_queue: Queue,
@ -106,7 +67,7 @@ impl DownloadManagerBuilder {
} }
fn set_status(&self, status: DownloadManagerStatus) { fn set_status(&self, status: DownloadManagerStatus) {
*lock!(self.status) = status; *self.status.lock().unwrap() = status;
} }
fn remove_and_cleanup_front_download(&mut self, meta: &DownloadableMetadata) -> DownloadAgent { fn remove_and_cleanup_front_download(&mut self, meta: &DownloadableMetadata) -> DownloadAgent {
@ -120,9 +81,9 @@ impl DownloadManagerBuilder {
// Make sure the download thread is terminated // Make sure the download thread is terminated
fn cleanup_current_download(&mut self) { fn cleanup_current_download(&mut self) {
self.active_control_flag = None; self.active_control_flag = None;
*lock!(self.progress) = None; *self.progress.lock().unwrap() = None;
let mut download_thread_lock = lock!(self.current_download_thread); let mut download_thread_lock = self.current_download_thread.lock().unwrap();
if let Some(unfinished_thread) = download_thread_lock.take() if let Some(unfinished_thread) = download_thread_lock.take()
&& !unfinished_thread.is_finished() && !unfinished_thread.is_finished()
@ -138,7 +99,7 @@ impl DownloadManagerBuilder {
current_flag.set(DownloadThreadControlFlag::Stop); current_flag.set(DownloadThreadControlFlag::Stop);
} }
let mut download_thread_lock = lock!(self.current_download_thread); let mut download_thread_lock = self.current_download_thread.lock().unwrap();
if let Some(current_download_thread) = download_thread_lock.take() { if let Some(current_download_thread) = download_thread_lock.take() {
return current_download_thread.join().is_ok(); return current_download_thread.join().is_ok();
}; };
@ -200,7 +161,9 @@ impl DownloadManagerBuilder {
self.download_queue.append(meta.clone()); self.download_queue.append(meta.clone());
self.download_agent_registry.insert(meta, download_agent); self.download_agent_registry.insert(meta, download_agent);
send!(self.sender, DownloadManagerSignal::UpdateUIQueue); self.sender
.send(DownloadManagerSignal::UpdateUIQueue)
.unwrap();
} }
fn manage_go_signal(&mut self) { fn manage_go_signal(&mut self) {
@ -246,7 +209,7 @@ impl DownloadManagerBuilder {
let sender = self.sender.clone(); let sender = self.sender.clone();
let mut download_thread_lock = lock!(self.current_download_thread); let mut download_thread_lock = self.current_download_thread.lock().unwrap();
let app_handle = self.app_handle.clone(); let app_handle = self.app_handle.clone();
*download_thread_lock = Some(spawn(move || { *download_thread_lock = Some(spawn(move || {
@ -257,7 +220,7 @@ impl DownloadManagerBuilder {
Err(e) => { Err(e) => {
error!("download {:?} has error {}", download_agent.metadata(), &e); error!("download {:?} has error {}", download_agent.metadata(), &e);
download_agent.on_error(&app_handle, &e); download_agent.on_error(&app_handle, &e);
send!(sender, DownloadManagerSignal::Error(e)); sender.send(DownloadManagerSignal::Error(e)).unwrap();
return; return;
} }
}; };
@ -281,7 +244,7 @@ impl DownloadManagerBuilder {
&e &e
); );
download_agent.on_error(&app_handle, &e); download_agent.on_error(&app_handle, &e);
send!(sender, DownloadManagerSignal::Error(e)); sender.send(DownloadManagerSignal::Error(e)).unwrap();
return; return;
} }
}; };
@ -292,11 +255,10 @@ impl DownloadManagerBuilder {
if validate_result { if validate_result {
download_agent.on_complete(&app_handle); download_agent.on_complete(&app_handle);
send!( sender
sender, .send(DownloadManagerSignal::Completed(download_agent.metadata()))
DownloadManagerSignal::Completed(download_agent.metadata()) .unwrap();
); sender.send(DownloadManagerSignal::UpdateUIQueue).unwrap();
send!(sender, DownloadManagerSignal::UpdateUIQueue);
return; return;
} }
} }
@ -323,7 +285,7 @@ impl DownloadManagerBuilder {
} }
self.push_ui_queue_update(); self.push_ui_queue_update();
send!(self.sender, DownloadManagerSignal::Go); self.sender.send(DownloadManagerSignal::Go).unwrap();
} }
fn manage_error_signal(&mut self, error: ApplicationDownloadError) { fn manage_error_signal(&mut self, error: ApplicationDownloadError) {
debug!("got signal Error"); debug!("got signal Error");
@ -361,7 +323,7 @@ impl DownloadManagerBuilder {
let index = self.download_queue.get_by_meta(meta); let index = self.download_queue.get_by_meta(meta);
if let Some(index) = index { if let Some(index) = index {
download_agent.on_cancelled(&self.app_handle); download_agent.on_cancelled(&self.app_handle);
let _ = self.download_queue.edit().remove(index); let _ = self.download_queue.edit().remove(index).unwrap();
let removed = self.download_agent_registry.remove(meta); let removed = self.download_agent_registry.remove(meta);
debug!( debug!(
"removed {:?} from queue {:?}", "removed {:?} from queue {:?}",
@ -376,7 +338,7 @@ impl DownloadManagerBuilder {
fn push_ui_stats_update(&self, kbs: usize, time: usize) { fn push_ui_stats_update(&self, kbs: usize, time: usize) {
let event_data = StatsUpdateEvent { speed: kbs, time }; let event_data = StatsUpdateEvent { speed: kbs, time };
app_emit!(&self.app_handle, "update_stats", event_data); self.app_handle.emit("update_stats", event_data).unwrap();
} }
fn push_ui_queue_update(&self) { fn push_ui_queue_update(&self) {
let queue = &self.download_queue.read(); let queue = &self.download_queue.read();
@ -395,6 +357,6 @@ impl DownloadManagerBuilder {
.collect(); .collect();
let event_data = QueueUpdateEvent { queue: queue_objs }; let event_data = QueueUpdateEvent { queue: queue_objs };
app_emit!(&self.app_handle, "update_queue", event_data); self.app_handle.emit("update_queue", event_data).unwrap();
} }
} }

View File

@ -3,18 +3,16 @@ use std::{
collections::VecDeque, collections::VecDeque,
fmt::Debug, fmt::Debug,
sync::{ sync::{
Mutex, MutexGuard,
mpsc::{SendError, Sender}, mpsc::{SendError, Sender},
Mutex, MutexGuard,
}, },
thread::JoinHandle, thread::JoinHandle,
}; };
use database::DownloadableMetadata; use drop_database::models::data::DownloadableMetadata;
use drop_errors::application_download_error::ApplicationDownloadError;
use log::{debug, info}; use log::{debug, info};
use serde::Serialize; use serde::Serialize;
use utils::{lock, send};
use crate::error::ApplicationDownloadError;
use super::{ use super::{
download_manager_builder::{CurrentProgressObject, DownloadAgent}, download_manager_builder::{CurrentProgressObject, DownloadAgent},
@ -79,7 +77,6 @@ pub enum DownloadStatus {
/// The actual download queue may be accessed through the .`edit()` function, /// The actual download queue may be accessed through the .`edit()` function,
/// which provides raw access to the underlying queue. /// which provides raw access to the underlying queue.
/// THIS EDITING IS BLOCKING!!! /// THIS EDITING IS BLOCKING!!!
#[derive(Debug)]
pub struct DownloadManager { pub struct DownloadManager {
terminator: Mutex<Option<JoinHandle<Result<(), ()>>>>, terminator: Mutex<Option<JoinHandle<Result<(), ()>>>>,
download_queue: Queue, download_queue: Queue,
@ -119,21 +116,22 @@ impl DownloadManager {
self.download_queue.read() self.download_queue.read()
} }
pub fn get_current_download_progress(&self) -> Option<f64> { pub fn get_current_download_progress(&self) -> Option<f64> {
let progress_object = (*lock!(self.progress)).clone()?; let progress_object = (*self.progress.lock().unwrap()).clone()?;
Some(progress_object.get_progress()) Some(progress_object.get_progress())
} }
pub fn rearrange_string(&self, meta: &DownloadableMetadata, new_index: usize) { pub fn rearrange_string(&self, meta: &DownloadableMetadata, new_index: usize) {
let mut queue = self.edit(); let mut queue = self.edit();
let current_index = let current_index = get_index_from_id(&mut queue, meta).unwrap();
get_index_from_id(&mut queue, meta).expect("Failed to get meta index from id"); let to_move = queue.remove(current_index).unwrap();
let to_move = queue
.remove(current_index)
.expect("Failed to remove meta at index from queue");
queue.insert(new_index, to_move); queue.insert(new_index, to_move);
send!(self.command_sender, DownloadManagerSignal::UpdateUIQueue); self.command_sender
.send(DownloadManagerSignal::UpdateUIQueue)
.unwrap();
} }
pub fn cancel(&self, meta: DownloadableMetadata) { pub fn cancel(&self, meta: DownloadableMetadata) {
send!(self.command_sender, DownloadManagerSignal::Cancel(meta)); self.command_sender
.send(DownloadManagerSignal::Cancel(meta))
.unwrap();
} }
pub fn rearrange(&self, current_index: usize, new_index: usize) { pub fn rearrange(&self, current_index: usize, new_index: usize) {
if current_index == new_index { if current_index == new_index {
@ -142,31 +140,39 @@ impl DownloadManager {
let needs_pause = current_index == 0 || new_index == 0; let needs_pause = current_index == 0 || new_index == 0;
if needs_pause { if needs_pause {
send!(self.command_sender, DownloadManagerSignal::Stop); self.command_sender
.send(DownloadManagerSignal::Stop)
.unwrap();
} }
debug!("moving download at index {current_index} to index {new_index}"); debug!("moving download at index {current_index} to index {new_index}");
let mut queue = self.edit(); let mut queue = self.edit();
let to_move = queue.remove(current_index).expect("Failed to get"); let to_move = queue.remove(current_index).unwrap();
queue.insert(new_index, to_move); queue.insert(new_index, to_move);
drop(queue); drop(queue);
if needs_pause { if needs_pause {
send!(self.command_sender, DownloadManagerSignal::Go); self.command_sender.send(DownloadManagerSignal::Go).unwrap();
} }
send!(self.command_sender, DownloadManagerSignal::UpdateUIQueue); self.command_sender
send!(self.command_sender, DownloadManagerSignal::Go); .send(DownloadManagerSignal::UpdateUIQueue)
.unwrap();
self.command_sender.send(DownloadManagerSignal::Go).unwrap();
} }
pub fn pause_downloads(&self) { pub fn pause_downloads(&self) {
send!(self.command_sender, DownloadManagerSignal::Stop); self.command_sender
.send(DownloadManagerSignal::Stop)
.unwrap();
} }
pub fn resume_downloads(&self) { pub fn resume_downloads(&self) {
send!(self.command_sender, DownloadManagerSignal::Go); self.command_sender.send(DownloadManagerSignal::Go).unwrap();
} }
pub fn ensure_terminated(&self) -> Result<Result<(), ()>, Box<dyn Any + Send>> { pub fn ensure_terminated(&self) -> Result<Result<(), ()>, Box<dyn Any + Send>> {
send!(self.command_sender, DownloadManagerSignal::Finish); self.command_sender
let terminator = lock!(self.terminator).take(); .send(DownloadManagerSignal::Finish)
.unwrap();
let terminator = self.terminator.lock().unwrap().take();
terminator.unwrap().join() terminator.unwrap().join()
} }
pub fn get_sender(&self) -> Sender<DownloadManagerSignal> { pub fn get_sender(&self) -> Sender<DownloadManagerSignal> {

View File

@ -1,10 +1,9 @@
use std::sync::Arc; use std::sync::Arc;
use database::DownloadableMetadata; use drop_database::models::data::DownloadableMetadata;
use drop_errors::application_download_error::ApplicationDownloadError;
use tauri::AppHandle; use tauri::AppHandle;
use crate::error::ApplicationDownloadError;
use super::{ use super::{
download_manager_frontend::DownloadStatus, download_manager_frontend::DownloadStatus,
util::{download_thread_control_flag::DownloadThreadControl, progress_object::ProgressObject}, util::{download_thread_control_flag::DownloadThreadControl, progress_object::ProgressObject},

View File

@ -1,4 +1,4 @@
use database::DownloadableMetadata; use drop_database::models::data::DownloadableMetadata;
use serde::Serialize; use serde::Serialize;
use crate::download_manager_frontend::DownloadStatus; use crate::download_manager_frontend::DownloadStatus;

View File

@ -0,0 +1,7 @@
#![feature(duration_millis_float)]
pub mod download_manager_builder;
pub mod download_manager_frontend;
pub mod downloadable;
pub mod events;
pub mod util;

View File

@ -1,6 +1,6 @@
use std::sync::{ use std::sync::{
Arc,
atomic::{AtomicBool, Ordering}, atomic::{AtomicBool, Ordering},
Arc,
}; };
#[derive(PartialEq, Eq, PartialOrd, Ord)] #[derive(PartialEq, Eq, PartialOrd, Ord)]
@ -22,11 +22,7 @@ impl From<DownloadThreadControlFlag> for bool {
/// false => Stop /// false => Stop
impl From<bool> for DownloadThreadControlFlag { impl From<bool> for DownloadThreadControlFlag {
fn from(value: bool) -> Self { fn from(value: bool) -> Self {
if value { if value { DownloadThreadControlFlag::Go } else { DownloadThreadControlFlag::Stop }
DownloadThreadControlFlag::Go
} else {
DownloadThreadControlFlag::Stop
}
} }
} }

View File

@ -9,13 +9,12 @@ use std::{
use atomic_instant_full::AtomicInstant; use atomic_instant_full::AtomicInstant;
use throttle_my_fn::throttle; use throttle_my_fn::throttle;
use utils::{lock, send};
use crate::download_manager_frontend::DownloadManagerSignal; use crate::download_manager_frontend::DownloadManagerSignal;
use super::rolling_progress_updates::RollingProgressWindow; use super::rolling_progress_updates::RollingProgressWindow;
#[derive(Clone, Debug)] #[derive(Clone)]
pub struct ProgressObject { pub struct ProgressObject {
max: Arc<Mutex<usize>>, max: Arc<Mutex<usize>>,
progress_instances: Arc<Mutex<Vec<Arc<AtomicUsize>>>>, progress_instances: Arc<Mutex<Vec<Arc<AtomicUsize>>>>,
@ -75,10 +74,12 @@ impl ProgressObject {
} }
pub fn set_time_now(&self) { pub fn set_time_now(&self) {
*lock!(self.start) = Instant::now(); *self.start.lock().unwrap() = Instant::now();
} }
pub fn sum(&self) -> usize { pub fn sum(&self) -> usize {
lock!(self.progress_instances) self.progress_instances
.lock()
.unwrap()
.iter() .iter()
.map(|instance| instance.load(Ordering::Acquire)) .map(|instance| instance.load(Ordering::Acquire))
.sum() .sum()
@ -87,25 +88,27 @@ impl ProgressObject {
self.set_time_now(); self.set_time_now();
self.bytes_last_update.store(0, Ordering::Release); self.bytes_last_update.store(0, Ordering::Release);
self.rolling.reset(); self.rolling.reset();
lock!(self.progress_instances) self.progress_instances
.lock()
.unwrap()
.iter() .iter()
.for_each(|x| x.store(0, Ordering::SeqCst)); .for_each(|x| x.store(0, Ordering::SeqCst));
} }
pub fn get_max(&self) -> usize { pub fn get_max(&self) -> usize {
*lock!(self.max) *self.max.lock().unwrap()
} }
pub fn set_max(&self, new_max: usize) { pub fn set_max(&self, new_max: usize) {
*lock!(self.max) = new_max; *self.max.lock().unwrap() = new_max;
} }
pub fn set_size(&self, length: usize) { pub fn set_size(&self, length: usize) {
*lock!(self.progress_instances) = *self.progress_instances.lock().unwrap() =
(0..length).map(|_| Arc::new(AtomicUsize::new(0))).collect(); (0..length).map(|_| Arc::new(AtomicUsize::new(0))).collect();
} }
pub fn get_progress(&self) -> f64 { pub fn get_progress(&self) -> f64 {
self.sum() as f64 / self.get_max() as f64 self.sum() as f64 / self.get_max() as f64
} }
pub fn get(&self, index: usize) -> Arc<AtomicUsize> { pub fn get(&self, index: usize) -> Arc<AtomicUsize> {
lock!(self.progress_instances)[index].clone() self.progress_instances.lock().unwrap()[index].clone()
} }
fn update_window(&self, kilobytes_per_second: usize) { fn update_window(&self, kilobytes_per_second: usize) {
self.rolling.update(kilobytes_per_second); self.rolling.update(kilobytes_per_second);
@ -117,9 +120,7 @@ pub fn calculate_update(progress: &ProgressObject) {
let last_update_time = progress let last_update_time = progress
.last_update_time .last_update_time
.swap(Instant::now(), Ordering::SeqCst); .swap(Instant::now(), Ordering::SeqCst);
let time_since_last_update = Instant::now() let time_since_last_update = Instant::now().duration_since(last_update_time).as_millis_f64();
.duration_since(last_update_time)
.as_millis_f64();
let current_bytes_downloaded = progress.sum(); let current_bytes_downloaded = progress.sum();
let max = progress.get_max(); let max = progress.get_max();
@ -127,8 +128,7 @@ pub fn calculate_update(progress: &ProgressObject) {
.bytes_last_update .bytes_last_update
.swap(current_bytes_downloaded, Ordering::Acquire); .swap(current_bytes_downloaded, Ordering::Acquire);
let bytes_since_last_update = let bytes_since_last_update = current_bytes_downloaded.saturating_sub(bytes_at_last_update) as f64;
current_bytes_downloaded.saturating_sub(bytes_at_last_update) as f64;
let kilobytes_per_second = bytes_since_last_update / time_since_last_update; let kilobytes_per_second = bytes_since_last_update / time_since_last_update;
@ -148,12 +148,18 @@ pub fn push_update(progress: &ProgressObject, bytes_remaining: usize) {
} }
fn update_ui(progress_object: &ProgressObject, kilobytes_per_second: usize, time_remaining: usize) { fn update_ui(progress_object: &ProgressObject, kilobytes_per_second: usize, time_remaining: usize) {
send!( progress_object
progress_object.sender, .sender
DownloadManagerSignal::UpdateUIStats(kilobytes_per_second, time_remaining) .send(DownloadManagerSignal::UpdateUIStats(
); kilobytes_per_second,
time_remaining,
))
.unwrap();
} }
fn update_queue(progress: &ProgressObject) { fn update_queue(progress: &ProgressObject) {
send!(progress.sender, DownloadManagerSignal::UpdateUIQueue) progress
.sender
.send(DownloadManagerSignal::UpdateUIQueue)
.unwrap();
} }

View File

@ -3,10 +3,9 @@ use std::{
sync::{Arc, Mutex, MutexGuard}, sync::{Arc, Mutex, MutexGuard},
}; };
use database::DownloadableMetadata; use drop_database::models::data::DownloadableMetadata;
use utils::lock;
#[derive(Clone, Debug)] #[derive(Clone)]
pub struct Queue { pub struct Queue {
inner: Arc<Mutex<VecDeque<DownloadableMetadata>>>, inner: Arc<Mutex<VecDeque<DownloadableMetadata>>>,
} }
@ -25,10 +24,10 @@ impl Queue {
} }
} }
pub fn read(&self) -> VecDeque<DownloadableMetadata> { pub fn read(&self) -> VecDeque<DownloadableMetadata> {
lock!(self.inner).clone() self.inner.lock().unwrap().clone()
} }
pub fn edit(&self) -> MutexGuard<'_, VecDeque<DownloadableMetadata>> { pub fn edit(&self) -> MutexGuard<'_, VecDeque<DownloadableMetadata>> {
lock!(self.inner) self.inner.lock().unwrap()
} }
pub fn pop_front(&self) -> Option<DownloadableMetadata> { pub fn pop_front(&self) -> Option<DownloadableMetadata> {
self.edit().pop_front() self.edit().pop_front()

View File

@ -3,17 +3,11 @@ use std::sync::{
atomic::{AtomicUsize, Ordering}, atomic::{AtomicUsize, Ordering},
}; };
#[derive(Clone, Debug)] #[derive(Clone)]
pub struct RollingProgressWindow<const S: usize> { pub struct RollingProgressWindow<const S: usize> {
window: Arc<[AtomicUsize; S]>, window: Arc<[AtomicUsize; S]>,
current: Arc<AtomicUsize>, current: Arc<AtomicUsize>,
} }
impl<const S: usize> Default for RollingProgressWindow<S> {
fn default() -> Self {
Self::new()
}
}
impl<const S: usize> RollingProgressWindow<S> { impl<const S: usize> RollingProgressWindow<S> {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {

View File

@ -0,0 +1,14 @@
[package]
name = "drop-errors"
version = "0.1.0"
edition = "2024"
[dependencies]
http = "1.3.1"
humansize = "2.1.3"
reqwest = "0.12.23"
reqwest-websocket = "0.5.1"
serde = { version = "1.0.219", features = ["derive"] }
serde_with = "3.14.0"
tauri-plugin-opener = "2.5.0"
url = "2.5.7"

View File

@ -0,0 +1,49 @@
use std::{
fmt::{Display, Formatter},
io, sync::Arc,
};
use serde_with::SerializeDisplay;
use humansize::{format_size, BINARY};
use super::remote_access_error::RemoteAccessError;
// TODO: Rename / separate from downloads
#[derive(Debug, SerializeDisplay)]
pub enum ApplicationDownloadError {
NotInitialized,
Communication(RemoteAccessError),
DiskFull(u64, u64),
#[allow(dead_code)]
Checksum,
Lock,
IoError(Arc<io::Error>),
DownloadError,
}
impl Display for ApplicationDownloadError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
ApplicationDownloadError::NotInitialized => write!(f, "Download not initalized, did something go wrong?"),
ApplicationDownloadError::DiskFull(required, available) => write!(
f,
"Game requires {}, {} remaining left on disk.",
format_size(*required, BINARY),
format_size(*available, BINARY),
),
ApplicationDownloadError::Communication(error) => write!(f, "{error}"),
ApplicationDownloadError::Lock => write!(
f,
"failed to acquire lock. Something has gone very wrong internally. Please restart the application"
),
ApplicationDownloadError::Checksum => {
write!(f, "checksum failed to validate for download")
}
ApplicationDownloadError::IoError(error) => write!(f, "io error: {error}"),
ApplicationDownloadError::DownloadError => write!(
f,
"Download failed. See Download Manager status for specific error"
),
}
}
}

View File

@ -0,0 +1,27 @@
use std::{fmt::Display, io, sync::mpsc::SendError};
use serde_with::SerializeDisplay;
#[derive(SerializeDisplay)]
pub enum DownloadManagerError<T> {
IOError(io::Error),
SignalError(SendError<T>),
}
impl<T> Display for DownloadManagerError<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DownloadManagerError::IOError(error) => write!(f, "{error}"),
DownloadManagerError::SignalError(send_error) => write!(f, "{send_error}"),
}
}
}
impl<T> From<SendError<T>> for DownloadManagerError<T> {
fn from(value: SendError<T>) -> Self {
DownloadManagerError::SignalError(value)
}
}
impl<T> From<io::Error> for DownloadManagerError<T> {
fn from(value: io::Error) -> Self {
DownloadManagerError::IOError(value)
}
}

View File

@ -0,0 +1,10 @@
use serde::Deserialize;
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ServerError {
pub status_code: usize,
pub status_message: String,
// pub message: String,
// pub url: String,
}

View File

@ -0,0 +1,6 @@
pub mod application_download_error;
pub mod download_manager_error;
pub mod drop_server_error;
pub mod library_error;
pub mod process_error;
pub mod remote_access_error;

View File

@ -0,0 +1,18 @@
use std::fmt::Display;
use serde_with::SerializeDisplay;
#[derive(SerializeDisplay)]
pub enum LibraryError {
MetaNotFound(String),
}
impl Display for LibraryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
LibraryError::MetaNotFound(id) => write!(
f,
"Could not locate any installed version of game ID {id} in the database"
),
}
}
}

View File

@ -11,9 +11,7 @@ pub enum ProcessError {
IOError(Error), IOError(Error),
FormatError(String), // String errors supremacy FormatError(String), // String errors supremacy
InvalidPlatform, InvalidPlatform,
OpenerError(tauri_plugin_opener::Error), OpenerError(tauri_plugin_opener::Error)
InvalidArguments(String),
FailedLaunch(String),
} }
impl Display for ProcessError { impl Display for ProcessError {
@ -25,14 +23,8 @@ impl Display for ProcessError {
ProcessError::InvalidVersion => "Invalid game version", ProcessError::InvalidVersion => "Invalid game version",
ProcessError::IOError(error) => &error.to_string(), ProcessError::IOError(error) => &error.to_string(),
ProcessError::InvalidPlatform => "This game cannot be played on the current platform", ProcessError::InvalidPlatform => "This game cannot be played on the current platform",
ProcessError::FormatError(error) => &format!("Could not format template: {error:?}"), ProcessError::FormatError(e) => &format!("Failed to format template: {e}"),
ProcessError::OpenerError(error) => &format!("Could not open directory: {error:?}"), ProcessError::OpenerError(error) => &format!("Failed to open directory: {error}"),
ProcessError::InvalidArguments(arguments) => {
&format!("Invalid arguments in command {arguments}")
}
ProcessError::FailedLaunch(game_id) => {
&format!("Drop detected that the game {game_id} may have failed to launch properly")
}
}; };
write!(f, "{s}") write!(f, "{s}")
} }

View File

@ -4,20 +4,11 @@ use std::{
sync::Arc, sync::Arc,
}; };
use http::{HeaderName, StatusCode, header::ToStrError}; use http::StatusCode;
use serde_with::SerializeDisplay; use serde_with::SerializeDisplay;
use url::ParseError; use url::ParseError;
use serde::Deserialize; use super::drop_server_error::ServerError;
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct DropServerError {
pub status_code: usize,
pub status_message: String,
// pub message: String,
// pub url: String,
}
#[derive(Debug, SerializeDisplay)] #[derive(Debug, SerializeDisplay)]
pub enum RemoteAccessError { pub enum RemoteAccessError {
@ -27,7 +18,7 @@ pub enum RemoteAccessError {
InvalidEndpoint, InvalidEndpoint,
HandshakeFailed(String), HandshakeFailed(String),
GameNotFound(String), GameNotFound(String),
InvalidResponse(DropServerError), InvalidResponse(ServerError),
UnparseableResponse(String), UnparseableResponse(String),
ManifestDownloadFailed(StatusCode, String), ManifestDownloadFailed(StatusCode, String),
OutOfSync, OutOfSync,
@ -53,7 +44,8 @@ impl Display for RemoteAccessError {
error error
.source() .source()
.map(std::string::ToString::to_string) .map(std::string::ToString::to_string)
.unwrap_or("Unknown error".to_string()) .or_else(|| Some("Unknown error".to_string()))
.unwrap()
) )
} }
RemoteAccessError::FetchErrorWS(error) => write!( RemoteAccessError::FetchErrorWS(error) => write!(
@ -62,8 +54,9 @@ impl Display for RemoteAccessError {
error, error,
error error
.source() .source()
.map(std::string::ToString::to_string) .map(|e| e.to_string())
.unwrap_or("Unknown error".to_string()) .or_else(|| Some("Unknown error".to_string()))
.unwrap()
), ),
RemoteAccessError::ParsingError(parse_error) => { RemoteAccessError::ParsingError(parse_error) => {
write!(f, "{parse_error}") write!(f, "{parse_error}")
@ -113,31 +106,3 @@ impl From<ParseError> for RemoteAccessError {
} }
} }
impl std::error::Error for RemoteAccessError {} impl std::error::Error for RemoteAccessError {}
#[derive(Debug, SerializeDisplay)]
pub enum CacheError {
HeaderNotFound(HeaderName),
ParseError(ToStrError),
Remote(RemoteAccessError),
ConstructionError(http::Error),
}
impl Display for CacheError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
CacheError::HeaderNotFound(header_name) => {
format!("Could not find header {header_name} in cache")
}
CacheError::ParseError(to_str_error) => {
format!("Could not parse cache with error {to_str_error}")
}
CacheError::Remote(remote_access_error) => {
format!("Cache got remote access error: {remote_access_error}")
}
CacheError::ConstructionError(error) => {
format!("Could not construct cache body with error {error}")
}
};
write!(f, "{s}")
}
}

View File

@ -0,0 +1,11 @@
[package]
name = "drop-library"
version = "0.1.0"
edition = "2024"
[dependencies]
drop-errors = { path = "../drop-errors" }
http = "*"
reqwest = { version = "*", default-features = false }
serde = { version = "*", default-features = false, features = ["derive"] }
tauri = "*"

View File

@ -0,0 +1,11 @@
pub enum DropLibraryError {
NetworkError(reqwest::Error),
ServerError(drop_errors::drop_server_error::ServerError),
Unconfigured,
}
impl From<reqwest::Error> for DropLibraryError {
fn from(value: reqwest::Error) -> Self {
DropLibraryError::NetworkError(value)
}
}

View File

@ -0,0 +1,30 @@
use crate::libraries::LibraryProviderIdentifier;
pub struct LibraryGamePreview {
pub library: LibraryProviderIdentifier,
pub internal_id: String,
pub name: String,
pub short_description: String,
pub icon: String,
}
pub struct LibraryGame {
pub library: LibraryProviderIdentifier,
pub internal_id: String,
pub name: String,
pub short_description: String,
pub md_description: String,
pub icon: String,
}
impl From<LibraryGame> for LibraryGamePreview {
fn from(value: LibraryGame) -> Self {
LibraryGamePreview {
library: value.library,
internal_id: value.internal_id,
name: value.name,
short_description: value.short_description,
icon: value.icon,
}
}
}

View File

@ -0,0 +1,3 @@
pub mod libraries;
pub mod game;
pub mod errors;

View File

@ -0,0 +1,76 @@
use std::{
fmt::Display,
hash::{DefaultHasher, Hash, Hasher},
};
use http::Request;
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use tauri::UriSchemeResponder;
use crate::{
errors::DropLibraryError,
game::{LibraryGame, LibraryGamePreview},
};
#[derive(Clone, Serialize, Deserialize)]
pub struct LibraryProviderIdentifier {
internal_id: usize,
name: String,
}
impl PartialEq for LibraryProviderIdentifier {
fn eq(&self, other: &Self) -> bool {
self.internal_id == other.internal_id
}
}
impl Eq for LibraryProviderIdentifier {}
impl Hash for LibraryProviderIdentifier {
fn hash<H: Hasher>(&self, state: &mut H) {
self.internal_id.hash(state);
}
}
impl Display for LibraryProviderIdentifier {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.name)
}
}
impl LibraryProviderIdentifier {
pub fn str_hash(&self) -> String {
let mut hasher = DefaultHasher::new();
self.hash(&mut hasher);
hasher.finish().to_string()
}
}
pub struct LibraryFetchConfig {
pub hard_refresh: bool,
}
pub trait DropLibraryProvider: Serialize + DeserializeOwned + Sized {
fn build(identifier: LibraryProviderIdentifier) -> Self;
fn id(&self) -> &LibraryProviderIdentifier;
fn load_object(
&self,
request: Request<Vec<u8>>,
responder: UriSchemeResponder,
) -> impl Future<Output = Result<(), DropLibraryError>> + Send;
fn fetch_library(
&self,
config: &LibraryFetchConfig,
) -> impl Future<Output = Result<Vec<LibraryGamePreview>, DropLibraryError>> + Send;
fn fetch_game(
&self,
config: &LibraryFetchConfig,
) -> impl Future<Output = Result<LibraryGame, DropLibraryError>> + Send;
fn owns_game(&self, id: &LibraryProviderIdentifier) -> bool {
self.id().internal_id == id.internal_id
}
}

View File

@ -0,0 +1,14 @@
[package]
name = "drop-native-library"
version = "0.1.0"
edition = "2024"
[dependencies]
bitcode = "*"
drop-errors = { path = "../drop-errors" }
drop-library = { path = "../drop-library" }
drop-remote = { path = "../drop-remote" }
log = "*"
serde = { version = "*", features = ["derive"] }
tauri = "*"
url = "*"

View File

@ -1,8 +1,7 @@
use bitcode::{Decode, Encode}; use bitcode::{Decode, Encode};
// use drop_database::runtime_models::Game;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::library::Game;
pub type Collections = Vec<Collection>; pub type Collections = Vec<Collection>;
#[derive(Serialize, Deserialize, Debug, Clone, Default, Encode, Decode)] #[derive(Serialize, Deserialize, Debug, Clone, Default, Encode, Decode)]

View File

@ -0,0 +1,11 @@
use drop_database::models::data::{ApplicationTransientStatus, GameDownloadStatus, GameVersion};
#[derive(serde::Serialize, Clone)]
pub struct GameUpdateEvent {
pub game_id: String,
pub status: (
Option<GameDownloadStatus>,
Option<ApplicationTransientStatus>,
),
pub version: Option<GameVersion>,
}

View File

@ -0,0 +1,50 @@
use drop_library::{
errors::DropLibraryError, game::{LibraryGame, LibraryGamePreview}, libraries::{DropLibraryProvider, LibraryFetchConfig, LibraryProviderIdentifier}
};
use drop_remote::{fetch_object::fetch_object, DropRemoteContext};
use serde::{Deserialize, Serialize};
use url::Url;
#[derive(Serialize, Deserialize, Clone)]
pub struct DropNativeLibraryProvider {
identifier: LibraryProviderIdentifier,
context: Option<DropRemoteContext>,
}
impl DropNativeLibraryProvider {
pub fn configure(&mut self, base_url: Url) {
self.context = Some(DropRemoteContext::new(base_url));
}
}
impl DropLibraryProvider for DropNativeLibraryProvider {
fn build(identifier: LibraryProviderIdentifier) -> Self {
Self {
identifier,
context: None,
}
}
fn id(&self) -> &LibraryProviderIdentifier {
&self.identifier
}
async fn load_object(&self, request: tauri::http::Request<Vec<u8>>, responder: tauri::UriSchemeResponder) -> Result<(), DropLibraryError> {
let context = self.context.as_ref().ok_or(DropLibraryError::Unconfigured)?;
fetch_object(context, request, responder).await;
Ok(())
}
async fn fetch_library(
&self,
config: &LibraryFetchConfig
) -> Result<Vec<LibraryGamePreview>, DropLibraryError> {
todo!()
}
async fn fetch_game(&self, config: &LibraryFetchConfig) -> Result<LibraryGame, DropLibraryError> {
todo!()
}
}

View File

@ -0,0 +1,5 @@
//pub mod collections;
//pub mod library;
//pub mod state;
//pub mod events;
pub mod impls;

View File

@ -0,0 +1,493 @@
use std::fs::remove_dir_all;
use std::thread::spawn;
use drop_database::borrow_db_checked;
use drop_database::borrow_db_mut_checked;
use drop_database::models::data::ApplicationTransientStatus;
use drop_database::models::data::Database;
use drop_database::models::data::DownloadableMetadata;
use drop_database::models::data::GameDownloadStatus;
use drop_database::models::data::GameVersion;
use drop_database::runtime_models::Game;
use drop_errors::drop_server_error::ServerError;
use drop_errors::library_error::LibraryError;
use drop_errors::remote_access_error::RemoteAccessError;
use drop_remote::DropRemoteContext;
use drop_remote::auth::generate_authorization_header;
use drop_remote::cache::cache_object;
use drop_remote::cache::cache_object_db;
use drop_remote::cache::get_cached_object;
use drop_remote::cache::get_cached_object_db;
use drop_remote::requests::generate_url;
use drop_remote::utils::DROP_CLIENT_ASYNC;
use drop_remote::utils::DROP_CLIENT_SYNC;
use log::{debug, error, warn};
use serde::{Deserialize, Serialize};
use tauri::AppHandle;
use tauri::Emitter as _;
use crate::events::GameUpdateEvent;
use crate::state::GameStatusManager;
use crate::state::GameStatusWithTransient;
#[derive(Serialize, Deserialize, Debug)]
pub struct FetchGameStruct {
game: Game,
status: GameStatusWithTransient,
version: Option<GameVersion>,
}
pub async fn fetch_library_logic(
context: &DropRemoteContext,
hard_fresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
let do_hard_refresh = hard_fresh.unwrap_or(false);
if !do_hard_refresh && let Ok(library) = get_cached_object("library") {
return Ok(library);
}
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(context, &["/api/v1/client/user/library"], &[])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header(context))
.send()
.await?;
if response.status() != 200 {
let err = response.json().await.unwrap_or(ServerError {
status_code: 500,
status_message: "Invalid response from server.".to_owned(),
});
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let mut games: Vec<Game> = response.json().await?;
let mut db_handle = borrow_db_mut_checked();
for game in &games {
db_handle
.applications
.games
.insert(game.id.clone(), game.clone());
if !db_handle.applications.game_statuses.contains_key(&game.id) {
db_handle
.applications
.game_statuses
.insert(game.id.clone(), GameDownloadStatus::Remote {});
}
}
// Add games that are installed but no longer in library
for meta in db_handle.applications.installed_game_version.values() {
if games.iter().any(|e| e.id == meta.id) {
continue;
}
// We should always have a cache of the object
// Pass db_handle because otherwise we get a gridlock
let game = match get_cached_object_db::<Game>(&meta.id.clone()) {
Ok(game) => game,
Err(err) => {
warn!(
"{} is installed, but encountered error fetching its error: {}.",
meta.id, err
);
continue;
}
};
games.push(game);
}
drop(db_handle);
cache_object("library", &games)?;
Ok(games)
}
pub async fn fetch_library_logic_offline(
_hard_refresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
let mut games: Vec<Game> = get_cached_object("library")?;
let db_handle = borrow_db_checked();
games.retain(|game| {
matches!(
&db_handle
.applications
.game_statuses
.get(&game.id)
.unwrap_or(&GameDownloadStatus::Remote {}),
GameDownloadStatus::Installed { .. } | GameDownloadStatus::SetupRequired { .. }
)
});
Ok(games)
}
pub async fn fetch_game_logic(
context: &DropRemoteContext,
id: String,
) -> Result<FetchGameStruct, RemoteAccessError> {
let version = {
let db_lock = borrow_db_checked();
let metadata_option = db_lock.applications.installed_game_version.get(&id);
let version = match metadata_option {
None => None,
Some(metadata) => db_lock
.applications
.game_versions
.get(&metadata.id)
.map(|v| v.get(metadata.version.as_ref().unwrap()).unwrap())
.cloned(),
};
let game = db_lock.applications.games.get(&id);
if let Some(game) = game {
let status = GameStatusManager::fetch_state(&id, &db_lock);
let data = FetchGameStruct {
game: game.clone(),
status,
version,
};
cache_object_db(&id, game, &db_lock)?;
return Ok(data);
}
version
};
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(context, &["/api/v1/client/game/", &id], &[])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header(context))
.send()
.await?;
if response.status() == 404 {
let offline_fetch = fetch_game_logic_offline(id.clone()).await;
if let Ok(fetch_data) = offline_fetch {
return Ok(fetch_data);
}
return Err(RemoteAccessError::GameNotFound(id));
}
if response.status() != 200 {
let err = response.json().await.unwrap();
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let game: Game = response.json().await?;
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.games
.insert(id.clone(), game.clone());
db_handle
.applications
.game_statuses
.entry(id.clone())
.or_insert(GameDownloadStatus::Remote {});
let status = GameStatusManager::fetch_state(&id, &db_handle);
drop(db_handle);
let data = FetchGameStruct {
game: game.clone(),
status,
version,
};
cache_object(&id, &game)?;
Ok(data)
}
pub async fn fetch_game_logic_offline(id: String) -> Result<FetchGameStruct, RemoteAccessError> {
let db_handle = borrow_db_checked();
let metadata_option = db_handle.applications.installed_game_version.get(&id);
let version = match metadata_option {
None => None,
Some(metadata) => db_handle
.applications
.game_versions
.get(&metadata.id)
.map(|v| v.get(metadata.version.as_ref().unwrap()).unwrap())
.cloned(),
};
let status = GameStatusManager::fetch_state(&id, &db_handle);
let game = get_cached_object::<Game>(&id)?;
drop(db_handle);
Ok(FetchGameStruct {
game,
status,
version,
})
}
pub async fn fetch_game_version_options_logic(
context: &DropRemoteContext,
game_id: String,
) -> Result<Vec<GameVersion>, RemoteAccessError> {
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(
context,
&["/api/v1/client/game/versions"],
&[("id", &game_id)],
)?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header(context))
.send()
.await?;
if response.status() != 200 {
let err = response.json().await.unwrap();
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let data: Vec<GameVersion> = response.json().await?;
Ok(data)
}
/**
* Called by:
* - on_cancel, when cancelled, for obvious reasons
* - when downloading, so if drop unexpectedly quits, we can resume the download. hidden by the "Downloading..." transient state, though
* - when scanning, to import the game
*/
pub fn set_partially_installed(
meta: &DownloadableMetadata,
install_dir: String,
app_handle: Option<&AppHandle>,
) {
set_partially_installed_db(&mut borrow_db_mut_checked(), meta, install_dir, app_handle);
}
pub fn set_partially_installed_db(
db_lock: &mut Database,
meta: &DownloadableMetadata,
install_dir: String,
app_handle: Option<&AppHandle>,
) {
db_lock.applications.transient_statuses.remove(meta);
db_lock.applications.game_statuses.insert(
meta.id.clone(),
GameDownloadStatus::PartiallyInstalled {
version_name: meta.version.as_ref().unwrap().clone(),
install_dir,
},
);
db_lock
.applications
.installed_game_version
.insert(meta.id.clone(), meta.clone());
if let Some(app_handle) = app_handle {
push_game_update(
app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, db_lock),
);
}
}
pub fn uninstall_game_logic(meta: DownloadableMetadata, app_handle: &AppHandle) {
debug!("triggered uninstall for agent");
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.transient_statuses
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
push_game_update(
app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, &db_handle),
);
let previous_state = db_handle.applications.game_statuses.get(&meta.id).cloned();
if previous_state.is_none() {
warn!("uninstall job doesn't have previous state, failing silently");
return;
}
let previous_state = previous_state.unwrap();
if let Some((_, install_dir)) = match previous_state {
GameDownloadStatus::Installed {
version_name,
install_dir,
} => Some((version_name, install_dir)),
GameDownloadStatus::SetupRequired {
version_name,
install_dir,
} => Some((version_name, install_dir)),
GameDownloadStatus::PartiallyInstalled {
version_name,
install_dir,
} => Some((version_name, install_dir)),
_ => None,
} {
db_handle
.applications
.transient_statuses
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
drop(db_handle);
let app_handle = app_handle.clone();
spawn(move || {
if let Err(e) = remove_dir_all(install_dir) {
error!("{e}");
} else {
let mut db_handle = borrow_db_mut_checked();
db_handle.applications.transient_statuses.remove(&meta);
db_handle
.applications
.installed_game_version
.remove(&meta.id);
db_handle
.applications
.game_statuses
.insert(meta.id.clone(), GameDownloadStatus::Remote {});
let _ = db_handle.applications.transient_statuses.remove(&meta);
push_game_update(
&app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, &db_handle),
);
debug!("uninstalled game id {}", &meta.id);
app_handle.emit("update_library", ()).unwrap();
}
});
} else {
warn!("invalid previous state for uninstall, failing silently.");
}
}
pub fn get_current_meta(game_id: &String) -> Option<DownloadableMetadata> {
borrow_db_checked()
.applications
.installed_game_version
.get(game_id)
.cloned()
}
pub fn on_game_complete(
context: &DropRemoteContext,
meta: &DownloadableMetadata,
install_dir: String,
app_handle: &AppHandle,
) -> Result<(), RemoteAccessError> {
// Fetch game version information from remote
if meta.version.is_none() {
return Err(RemoteAccessError::GameNotFound(meta.id.clone()));
}
let client = DROP_CLIENT_SYNC.clone();
let response = generate_url(
context,
&["/api/v1/client/game/version"],
&[
("id", &meta.id),
("version", meta.version.as_ref().unwrap()),
],
)?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header(context))
.send()?;
let game_version: GameVersion = response.json()?;
let mut handle = borrow_db_mut_checked();
handle
.applications
.game_versions
.entry(meta.id.clone())
.or_default()
.insert(meta.version.clone().unwrap(), game_version.clone());
handle
.applications
.installed_game_version
.insert(meta.id.clone(), meta.clone());
drop(handle);
let status = if game_version.setup_command.is_empty() {
GameDownloadStatus::Installed {
version_name: meta.version.clone().unwrap(),
install_dir,
}
} else {
GameDownloadStatus::SetupRequired {
version_name: meta.version.clone().unwrap(),
install_dir,
}
};
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.game_statuses
.insert(meta.id.clone(), status.clone());
drop(db_handle);
app_handle
.emit(
&format!("update_game/{}", meta.id),
GameUpdateEvent {
game_id: meta.id.clone(),
status: (Some(status), None),
version: Some(game_version),
},
)
.unwrap();
Ok(())
}
pub fn push_game_update(
app_handle: &AppHandle,
game_id: &String,
version: Option<GameVersion>,
status: GameStatusWithTransient,
) {
if let Some(GameDownloadStatus::Installed { .. } | GameDownloadStatus::SetupRequired { .. }) =
&status.0
&& version.is_none()
{
panic!("pushed game for installed game that doesn't have version information");
}
app_handle
.emit(
&format!("update_game/{game_id}"),
GameUpdateEvent {
game_id: game_id.clone(),
status,
version,
},
)
.unwrap();
}

View File

@ -1,6 +1,4 @@
use database::models::data::{ // use drop_database::models::data::{ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus};
ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus,
};
pub type GameStatusWithTransient = ( pub type GameStatusWithTransient = (
Option<GameDownloadStatus>, Option<GameDownloadStatus>,

View File

@ -0,0 +1,18 @@
[package]
name = "drop-process"
version = "0.1.0"
edition = "2024"
[dependencies]
chrono = "0.4.42"
drop-database = { path = "../drop-database" }
drop-errors = { path = "../drop-errors" }
drop-native-library = { path = "../drop-native-library" }
dynfmt = { version = "0.1.5", features = ["curly"] }
log = "0.4.28"
page_size = "0.6.0"
shared_child = "1.1.1"
sysinfo = "0.37.0"
tauri = "2.8.5"
tauri-plugin-opener = "2.5.0"

View File

@ -8,12 +8,7 @@ pub struct DropFormatArgs {
} }
impl DropFormatArgs { impl DropFormatArgs {
pub fn new( pub fn new(launch_string: String, working_dir: &String, executable_name: &String, absolute_executable_name: String) -> Self {
launch_string: String,
working_dir: &String,
executable_name: &String,
absolute_executable_name: String,
) -> Self {
let mut positional = Vec::new(); let mut positional = Vec::new();
let mut map: HashMap<&'static str, String> = HashMap::new(); let mut map: HashMap<&'static str, String> = HashMap::new();

View File

@ -0,0 +1,4 @@
mod format;
mod process_handlers;
pub mod process_manager;
pub mod utils;

View File

@ -1,8 +1,15 @@
use client::compat::{COMPAT_INFO, UMU_LAUNCHER_EXECUTABLE}; use std::{
use database::{Database, DownloadableMetadata, GameVersion, platform::Platform}; ffi::OsStr,
use log::debug; path::PathBuf,
process::{Command, Stdio},
sync::LazyLock,
};
use drop_database::{models::data::{Database, DownloadableMetadata, GameVersion}, process::Platform};
use log::{debug, info};
use crate::process_manager::ProcessHandler;
use crate::{error::ProcessError, process_manager::ProcessHandler};
pub struct NativeGameLauncher; pub struct NativeGameLauncher;
impl ProcessHandler for NativeGameLauncher { impl ProcessHandler for NativeGameLauncher {
@ -13,8 +20,8 @@ impl ProcessHandler for NativeGameLauncher {
args: Vec<String>, args: Vec<String>,
_game_version: &GameVersion, _game_version: &GameVersion,
_current_dir: &str, _current_dir: &str,
) -> Result<String, ProcessError> { ) -> String {
Ok(format!("\"{}\" {}", launch_command, args.join(" "))) format!("\"{}\" {}", launch_command, args.join(" "))
} }
fn valid_for_platform(&self, _db: &Database, _target: &Platform) -> bool { fn valid_for_platform(&self, _db: &Database, _target: &Platform) -> bool {
@ -22,6 +29,31 @@ impl ProcessHandler for NativeGameLauncher {
} }
} }
pub static UMU_LAUNCHER_EXECUTABLE: LazyLock<Option<PathBuf>> = LazyLock::new(|| {
let x = get_umu_executable();
info!("{:?}", &x);
x
});
const UMU_BASE_LAUNCHER_EXECUTABLE: &str = "umu-run";
const UMU_INSTALL_DIRS: [&str; 4] = ["/app/share", "/use/local/share", "/usr/share", "/opt"];
fn get_umu_executable() -> Option<PathBuf> {
if check_executable_exists(UMU_BASE_LAUNCHER_EXECUTABLE) {
return Some(PathBuf::from(UMU_BASE_LAUNCHER_EXECUTABLE));
}
for dir in UMU_INSTALL_DIRS {
let p = PathBuf::from(dir).join(UMU_BASE_LAUNCHER_EXECUTABLE);
if check_executable_exists(&p) {
return Some(p);
}
}
None
}
fn check_executable_exists<P: AsRef<OsStr>>(exec: P) -> bool {
let has_umu_installed = Command::new(exec).stdout(Stdio::null()).output();
has_umu_installed.is_ok()
}
pub struct UMULauncher; pub struct UMULauncher;
impl ProcessHandler for UMULauncher { impl ProcessHandler for UMULauncher {
fn create_launch_process( fn create_launch_process(
@ -31,7 +63,7 @@ impl ProcessHandler for UMULauncher {
args: Vec<String>, args: Vec<String>,
game_version: &GameVersion, game_version: &GameVersion,
_current_dir: &str, _current_dir: &str,
) -> Result<String, ProcessError> { ) -> String {
debug!("Game override: \"{:?}\"", &game_version.umu_id_override); debug!("Game override: \"{:?}\"", &game_version.umu_id_override);
let game_id = match &game_version.umu_id_override { let game_id = match &game_version.umu_id_override {
Some(game_override) => { Some(game_override) => {
@ -43,21 +75,16 @@ impl ProcessHandler for UMULauncher {
} }
None => game_version.game_id.clone(), None => game_version.game_id.clone(),
}; };
Ok(format!( format!(
"GAMEID={game_id} {umu:?} \"{launch}\" {args}", "GAMEID={game_id} {umu:?} \"{launch}\" {args}",
umu = UMU_LAUNCHER_EXECUTABLE umu = UMU_LAUNCHER_EXECUTABLE.as_ref().unwrap(),
.as_ref()
.expect("Failed to get UMU_LAUNCHER_EXECUTABLE as ref"),
launch = launch_command, launch = launch_command,
args = args.join(" ") args = args.join(" ")
)) )
} }
fn valid_for_platform(&self, _db: &Database, _target: &Platform) -> bool { fn valid_for_platform(&self, _db: &Database, _target: &Platform) -> bool {
let Some(compat_info) = &*COMPAT_INFO else { UMU_LAUNCHER_EXECUTABLE.is_some()
return false;
};
compat_info.umu_installed
} }
} }
@ -70,7 +97,7 @@ impl ProcessHandler for AsahiMuvmLauncher {
args: Vec<String>, args: Vec<String>,
game_version: &GameVersion, game_version: &GameVersion,
current_dir: &str, current_dir: &str,
) -> Result<String, ProcessError> { ) -> String {
let umu_launcher = UMULauncher {}; let umu_launcher = UMULauncher {};
let umu_string = umu_launcher.create_launch_process( let umu_string = umu_launcher.create_launch_process(
meta, meta,
@ -78,23 +105,15 @@ impl ProcessHandler for AsahiMuvmLauncher {
args, args,
game_version, game_version,
current_dir, current_dir,
)?; );
let mut args_cmd = umu_string let mut args_cmd = umu_string
.split("umu-run") .split("umu-run")
.collect::<Vec<&str>>() .collect::<Vec<&str>>()
.into_iter(); .into_iter();
let args = args_cmd let args = args_cmd.next().unwrap().trim();
.next() let cmd = format!("umu-run{}", args_cmd.next().unwrap());
.ok_or(ProcessError::InvalidArguments(umu_string.clone()))?
.trim();
let cmd = format!(
"umu-run{}",
args_cmd
.next()
.ok_or(ProcessError::InvalidArguments(umu_string.clone()))?
);
Ok(format!("{args} muvm -- {cmd}")) format!("{args} muvm -- {cmd}")
} }
#[allow(unreachable_code)] #[allow(unreachable_code)]
@ -111,10 +130,6 @@ impl ProcessHandler for AsahiMuvmLauncher {
return false; return false;
} }
let Some(compat_info) = &*COMPAT_INFO else { UMU_LAUNCHER_EXECUTABLE.is_some()
return false;
};
compat_info.umu_installed
} }
} }

View File

@ -1,33 +1,26 @@
use std::{ use std::{
collections::HashMap, collections::HashMap,
fs::{OpenOptions, create_dir_all}, fs::{OpenOptions, create_dir_all},
io, io::{self},
path::PathBuf, path::PathBuf,
process::{Command, ExitStatus}, process::{Command, ExitStatus},
str::FromStr, str::FromStr,
sync::Arc, sync::{Arc, Mutex},
thread::spawn, thread::spawn,
time::{Duration, SystemTime}, time::{Duration, SystemTime},
}; };
use database::{ use drop_database::{borrow_db_checked, borrow_db_mut_checked, db::DATA_ROOT_DIR, models::data::{ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus, GameVersion}, process::Platform, DB};
ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus, use drop_errors::process_error::ProcessError;
GameVersion, borrow_db_checked, borrow_db_mut_checked, platform::Platform, use drop_native_library::{library::push_game_update, state::GameStatusManager};
};
use drop_consts::DATA_ROOT_DIR;
use dynfmt::Format; use dynfmt::Format;
use dynfmt::SimpleCurlyFormat; use dynfmt::SimpleCurlyFormat;
use games::{library::push_game_update, state::GameStatusManager};
use log::{debug, info, warn}; use log::{debug, info, warn};
use shared_child::SharedChild; use shared_child::SharedChild;
use tauri::AppHandle; use tauri::{AppHandle, Emitter};
use tauri_plugin_opener::OpenerExt;
use crate::{ use crate::{format::DropFormatArgs, process_handlers::{AsahiMuvmLauncher, NativeGameLauncher, UMULauncher}};
PROCESS_MANAGER,
error::ProcessError,
format::DropFormatArgs,
process_handlers::{AsahiMuvmLauncher, NativeGameLauncher, UMULauncher},
};
pub struct RunningProcess { pub struct RunningProcess {
handle: Arc<SharedChild>, handle: Arc<SharedChild>,
@ -39,11 +32,11 @@ pub struct ProcessManager<'a> {
current_platform: Platform, current_platform: Platform,
log_output_dir: PathBuf, log_output_dir: PathBuf,
processes: HashMap<String, RunningProcess>, processes: HashMap<String, RunningProcess>,
app_handle: AppHandle,
game_launchers: Vec<( game_launchers: Vec<(
(Platform, Platform), (Platform, Platform),
&'a (dyn ProcessHandler + Sync + Send + 'static), &'a (dyn ProcessHandler + Sync + Send + 'static),
)>, )>,
app_handle: AppHandle,
} }
impl ProcessManager<'_> { impl ProcessManager<'_> {
@ -60,6 +53,7 @@ impl ProcessManager<'_> {
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
current_platform: Platform::Linux, current_platform: Platform::Linux,
app_handle,
processes: HashMap::new(), processes: HashMap::new(),
log_output_dir, log_output_dir,
game_launchers: vec![ game_launchers: vec![
@ -85,7 +79,6 @@ impl ProcessManager<'_> {
&UMULauncher {} as &(dyn ProcessHandler + Sync + Send + 'static), &UMULauncher {} as &(dyn ProcessHandler + Sync + Send + 'static),
), ),
], ],
app_handle,
} }
} }
@ -104,31 +97,30 @@ impl ProcessManager<'_> {
} }
} }
pub fn get_log_dir(&self, game_id: String) -> PathBuf { fn get_log_dir(&self, game_id: String) -> PathBuf {
self.log_output_dir.join(game_id) self.log_output_dir.join(game_id)
} }
fn on_process_finish( pub fn open_process_logs(&mut self, game_id: String) -> Result<(), ProcessError> {
&mut self, let dir = self.get_log_dir(game_id);
game_id: String, self.app_handle
result: Result<ExitStatus, std::io::Error>, .opener()
) -> Result<(), ProcessError> { .open_path(dir.to_str().unwrap(), None::<&str>)
.map_err(ProcessError::OpenerError)?;
Ok(())
}
fn on_process_finish(&mut self, game_id: String, result: Result<ExitStatus, std::io::Error>) {
if !self.processes.contains_key(&game_id) { if !self.processes.contains_key(&game_id) {
warn!( warn!(
"process on_finish was called, but game_id is no longer valid. finished with result: {result:?}" "process on_finish was called, but game_id is no longer valid. finished with result: {result:?}"
); );
return Ok(()); return;
} }
debug!("process for {:?} exited with {:?}", &game_id, result); debug!("process for {:?} exited with {:?}", &game_id, result);
let process = match self.processes.remove(&game_id) { let process = self.processes.remove(&game_id).unwrap();
Some(process) => process,
None => {
info!("Attempted to stop process {game_id} which didn't exist");
return Ok(());
}
};
let mut db_handle = borrow_db_mut_checked(); let mut db_handle = borrow_db_mut_checked();
let meta = db_handle let meta = db_handle
@ -136,7 +128,7 @@ impl ProcessManager<'_> {
.installed_game_version .installed_game_version
.get(&game_id) .get(&game_id)
.cloned() .cloned()
.unwrap_or_else(|| panic!("Could not get installed version of {}", &game_id)); .unwrap();
db_handle.applications.transient_statuses.remove(&meta); db_handle.applications.transient_statuses.remove(&meta);
let current_state = db_handle.applications.game_statuses.get(&game_id).cloned(); let current_state = db_handle.applications.game_statuses.get(&game_id).cloned();
@ -161,18 +153,20 @@ impl ProcessManager<'_> {
// Or if the status isn't 0 // Or if the status isn't 0
// Or if it's an error // Or if it's an error
if !process.manually_killed if !process.manually_killed
&& (elapsed.as_secs() <= 2 || result.map_or(true, |r| !r.success())) && (elapsed.as_secs() <= 2 || result.is_err() || !result.unwrap().success())
{ {
warn!("drop detected that the game {game_id} may have failed to launch properly"); warn!("drop detected that the game {game_id} may have failed to launch properly");
return Err(ProcessError::FailedLaunch(game_id)); let _ = self.app_handle.emit("launch_external_error", &game_id);
// let _ = self.app_handle.emit("launch_external_error", &game_id);
} }
let version_data = match db_handle.applications.game_versions.get(&game_id) { // This is too many unwraps for me to be comfortable
// This unwrap here should be resolved by just making the hashmap accept an option rather than just a String let version_data = db_handle
Some(res) => res.get(&meta.version.unwrap()).expect("Failed to get game version from installed game versions. Is the database corrupted?"), .applications
None => todo!(), .game_versions
}; .get(&game_id)
.unwrap()
.get(&meta.version.unwrap())
.unwrap();
let status = GameStatusManager::fetch_state(&game_id, &db_handle); let status = GameStatusManager::fetch_state(&game_id, &db_handle);
@ -182,7 +176,6 @@ impl ProcessManager<'_> {
Some(version_data.clone()), Some(version_data.clone()),
status, status,
); );
Ok(())
} }
fn fetch_process_handler( fn fetch_process_handler(
@ -203,19 +196,24 @@ impl ProcessManager<'_> {
.1) .1)
} }
pub fn valid_platform(&self, platform: &Platform) -> bool { pub fn valid_platform(&self, platform: &Platform,) -> Result<bool, String> {
let db_lock = borrow_db_checked(); let db_lock = borrow_db_checked();
let process_handler = self.fetch_process_handler(&db_lock, platform); let process_handler = self.fetch_process_handler(&db_lock, platform);
process_handler.is_ok() Ok(process_handler.is_ok())
} }
/// Must be called through spawn as it is currently blocking pub fn launch_process(
pub fn launch_process(&mut self, game_id: String) -> Result<(), ProcessError> { &mut self,
game_id: String,
process_manager_lock: &'static Mutex<ProcessManager<'static>>,
) -> Result<(), ProcessError> {
if self.processes.contains_key(&game_id) { if self.processes.contains_key(&game_id) {
return Err(ProcessError::AlreadyRunning); return Err(ProcessError::AlreadyRunning);
} }
let version = match borrow_db_checked() let version = match DB
.borrow_data()
.unwrap()
.applications .applications
.game_statuses .game_statuses
.get(&game_id) .get(&game_id)
@ -254,7 +252,7 @@ impl ProcessManager<'_> {
debug!( debug!(
"Launching process {:?} with version {:?}", "Launching process {:?} with version {:?}",
&game_id, &game_id,
db_lock.applications.game_versions.get(&game_id) db_lock.applications.game_versions.get(&game_id).unwrap()
); );
let game_version = db_lock let game_version = db_lock
@ -310,9 +308,8 @@ impl ProcessManager<'_> {
GameDownloadStatus::Remote {} => unreachable!("Game registered as 'Remote'"), GameDownloadStatus::Remote {} => unreachable!("Game registered as 'Remote'"),
}; };
#[allow(clippy::unwrap_used)]
let launch = PathBuf::from_str(install_dir).unwrap().join(launch); let launch = PathBuf::from_str(install_dir).unwrap().join(launch);
let launch = launch.display().to_string(); let launch = launch.to_str().unwrap();
let launch_string = process_handler.create_launch_process( let launch_string = process_handler.create_launch_process(
&meta, &meta,
@ -320,7 +317,7 @@ impl ProcessManager<'_> {
args.clone(), args.clone(),
game_version, game_version,
install_dir, install_dir,
)?; );
let format_args = DropFormatArgs::new( let format_args = DropFormatArgs::new(
launch_string, launch_string,
@ -376,6 +373,17 @@ impl ProcessManager<'_> {
let wait_thread_handle = launch_process_handle.clone(); let wait_thread_handle = launch_process_handle.clone();
let wait_thread_game_id = meta.clone(); let wait_thread_game_id = meta.clone();
spawn(move || {
let result: Result<ExitStatus, std::io::Error> = launch_process_handle.wait();
let mut process_manager_handle = process_manager_lock.lock().unwrap();
process_manager_handle.on_process_finish(wait_thread_game_id.id, result);
// As everything goes out of scope, they should get dropped
// But just to explicit about it
drop(process_manager_handle);
});
self.processes.insert( self.processes.insert(
meta.id, meta.id,
RunningProcess { RunningProcess {
@ -384,13 +392,6 @@ impl ProcessManager<'_> {
manually_killed: false, manually_killed: false,
}, },
); );
spawn(move || {
let result: Result<ExitStatus, std::io::Error> = launch_process_handle.wait();
PROCESS_MANAGER
.lock()
.on_process_finish(wait_thread_game_id.id, result)
});
Ok(()) Ok(())
} }
} }
@ -403,7 +404,7 @@ pub trait ProcessHandler: Send + 'static {
args: Vec<String>, args: Vec<String>,
game_version: &GameVersion, game_version: &GameVersion,
current_dir: &str, current_dir: &str,
) -> Result<String, ProcessError>; ) -> String;
fn valid_for_platform(&self, db: &Database, target: &Platform) -> bool; fn valid_for_platform(&self, db: &Database, target: &Platform) -> bool;
} }

View File

@ -1,6 +1,6 @@
use std::{io, path::PathBuf, sync::Arc}; use std::{io, path::PathBuf, sync::Arc};
use download_manager::error::ApplicationDownloadError; use drop_errors::application_download_error::ApplicationDownloadError;
use sysinfo::{Disk, DiskRefreshKind, Disks}; use sysinfo::{Disk, DiskRefreshKind, Disks};
pub fn get_disk_available(mount_point: PathBuf) -> Result<u64, ApplicationDownloadError> { pub fn get_disk_available(mount_point: PathBuf) -> Result<u64, ApplicationDownloadError> {
@ -19,7 +19,7 @@ pub fn get_disk_available(mount_point: PathBuf) -> Result<u64, ApplicationDownlo
return Ok(disk.available_space()); return Ok(disk.available_space());
} }
} }
Err(ApplicationDownloadError::IoError(Arc::new( Err(ApplicationDownloadError::IoError(Arc::new(io::Error::other(
io::Error::other("could not find disk of path"), "could not find disk of path",
))) ))))
} }

View File

@ -0,0 +1,20 @@
[package]
name = "drop-remote"
version = "0.1.0"
edition = "2024"
[dependencies]
bitcode = "0.6.7"
chrono = "0.4.42"
drop-consts = { path = "../drop-consts" }
drop-errors = { path = "../drop-errors" }
droplet-rs = "0.7.3"
gethostname = "1.0.2"
hex = "0.4.3"
http = "1.3.1"
log = "0.4.28"
md5 = "0.8.0"
reqwest = "0.12.23"
serde = { version = "1.0.220", features = ["derive"] }
tauri = "2.8.5"
url = "2.5.7"

View File

@ -0,0 +1,156 @@
use std::{collections::HashMap, env, sync::Mutex};
use chrono::Utc;
use drop_errors::{drop_server_error::ServerError, remote_access_error::RemoteAccessError};
use droplet_rs::ssl::sign_nonce;
use gethostname::gethostname;
use log::{debug, error, warn};
use serde::{Deserialize, Serialize};
use url::Url;
use crate::{
requests::make_authenticated_get, utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC}, DropRemoteAuth, DropRemoteContext
};
use super::requests::generate_url;
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct CapabilityConfiguration {}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct InitiateRequestBody {
name: String,
platform: String,
capabilities: HashMap<String, CapabilityConfiguration>,
mode: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct HandshakeRequestBody {
client_id: String,
token: String,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct HandshakeResponse {
private: String,
certificate: String,
id: String,
}
pub fn generate_authorization_header(context: &DropRemoteContext) -> String {
let auth = if let Some(auth) = &context.auth {
auth
} else {
return "".to_owned();
};
let nonce = Utc::now().timestamp_millis().to_string();
let signature = sign_nonce(auth.private.clone(), nonce.clone()).unwrap();
format!("Nonce {} {} {}", auth.client_id, nonce, signature)
}
pub async fn fetch_user(context: &DropRemoteContext) -> Result<Vec<u8>, RemoteAccessError> {
let response =
make_authenticated_get(context, generate_url(context, &["/api/v1/client/user"], &[])?).await?;
if response.status() != 200 {
let err: ServerError = response.json().await?;
warn!("{err:?}");
if err.status_message == "Nonce expired" {
return Err(RemoteAccessError::OutOfSync);
}
return Err(RemoteAccessError::InvalidResponse(err));
}
response
.bytes()
.await
.map_err(std::convert::Into::into)
.map(|v| v.to_vec())
}
pub async fn recieve_handshake_logic(
context: &mut DropRemoteContext,
path: String,
) -> Result<(), RemoteAccessError> {
let path_chunks: Vec<&str> = path.split('/').collect();
if path_chunks.len() != 3 {
// app.emit("auth/failed", ()).unwrap();
return Err(RemoteAccessError::HandshakeFailed(
"failed to parse token".to_string(),
));
}
let client_id = path_chunks.get(1).unwrap();
let token = path_chunks.get(2).unwrap();
let body = HandshakeRequestBody {
client_id: (*client_id).to_string(),
token: (*token).to_string(),
};
let endpoint = generate_url(context, &["/api/v1/client/auth/handshake"], &[])?;
let client = DROP_CLIENT_ASYNC.clone();
let response = client.post(endpoint).json(&body).send().await?;
debug!("handshake responsded with {}", response.status().as_u16());
if !response.status().is_success() {
return Err(RemoteAccessError::InvalidResponse(response.json().await?));
}
let response_struct: HandshakeResponse = response.json().await?;
let web_token = {
let header = generate_authorization_header(context);
let token = client
.post(generate_url(context, &["/api/v1/client/user/webtoken"], &[])?)
.header("Authorization", header)
.send()
.await
.unwrap();
token.text().await.unwrap()
};
context.auth = Some(DropRemoteAuth {
private: response_struct.private,
cert: response_struct.certificate,
client_id: response_struct.id,
web_token: web_token,
});
Ok(())
}
pub fn auth_initiate_logic(context: &DropRemoteContext, mode: String) -> Result<String, RemoteAccessError> {
let hostname = gethostname();
let endpoint = generate_url(context, &["/api/v1/client/auth/initiate"], &[])?;
let body = InitiateRequestBody {
name: format!("{} (Desktop)", hostname.into_string().unwrap()),
platform: env::consts::OS.to_string(),
capabilities: HashMap::from([
("peerAPI".to_owned(), CapabilityConfiguration {}),
("cloudSaves".to_owned(), CapabilityConfiguration {}),
]),
mode,
};
let client = DROP_CLIENT_SYNC.clone();
let response = client.post(endpoint.to_string()).json(&body).send()?;
if response.status() != 200 {
let data: ServerError = response.json()?;
error!("could not start handshake: {}", data.status_message);
return Err(RemoteAccessError::HandshakeFailed(data.status_message));
}
let response = response.text()?;
Ok(response)
}

View File

@ -6,18 +6,17 @@ use std::{
}; };
use bitcode::{Decode, DecodeOwned, Encode}; use bitcode::{Decode, DecodeOwned, Encode};
use database::{Database, borrow_db_checked}; use drop_consts::CACHE_DIR;
use drop_errors::remote_access_error::RemoteAccessError;
use http::{Response, header::CONTENT_TYPE, response::Builder as ResponseBuilder}; use http::{Response, header::CONTENT_TYPE, response::Builder as ResponseBuilder};
use crate::error::{CacheError, RemoteAccessError};
#[macro_export] #[macro_export]
macro_rules! offline { macro_rules! offline {
($var:expr, $func1:expr, $func2:expr, $( $arg:expr ),* ) => { ($var:expr, $func1:expr, $func2:expr, $( $arg:expr ),* ) => {
async move { // TODO add offline mode back
if ::database::borrow_db_checked().settings.force_offline // || $var.lock().unwrap().status == AppStatus::Offline
|| $var.lock().status == ::client::app_status::AppStatus::Offline { async move { if drop_database::borrow_db_checked().settings.force_offline {
$func2( $( $arg ), *).await $func2( $( $arg ), *).await
} else { } else {
$func1( $( $arg ), *).await $func1( $( $arg ), *).await
@ -58,33 +57,33 @@ fn delete_sync(base: &Path, key: &str) -> io::Result<()> {
} }
pub fn cache_object<D: Encode>(key: &str, data: &D) -> Result<(), RemoteAccessError> { pub fn cache_object<D: Encode>(key: &str, data: &D) -> Result<(), RemoteAccessError> {
cache_object_db(key, data, &borrow_db_checked()) cache_object_db(key, data)
} }
pub fn cache_object_db<D: Encode>( pub fn cache_object_db<D: Encode>(
key: &str, key: &str,
data: &D, data: &D,
database: &Database,
) -> Result<(), RemoteAccessError> { ) -> Result<(), RemoteAccessError> {
let bytes = bitcode::encode(data); let bytes = bitcode::encode(data);
write_sync(&database.cache_dir, key, bytes).map_err(RemoteAccessError::Cache) write_sync(&CACHE_DIR, key, bytes).map_err(RemoteAccessError::Cache)
} }
pub fn get_cached_object<D: Encode + DecodeOwned>(key: &str) -> Result<D, RemoteAccessError> { pub fn get_cached_object<D: Encode + DecodeOwned>(key: &str) -> Result<D, RemoteAccessError> {
get_cached_object_db::<D>(key, &borrow_db_checked()) get_cached_object_db::<D>(key)
} }
pub fn get_cached_object_db<D: DecodeOwned>( pub fn get_cached_object_db<D: DecodeOwned>(
key: &str, key: &str,
db: &Database,
) -> Result<D, RemoteAccessError> { ) -> Result<D, RemoteAccessError> {
let bytes = read_sync(&db.cache_dir, key).map_err(RemoteAccessError::Cache)?; let bytes = read_sync(&CACHE_DIR, key).map_err(RemoteAccessError::Cache)?;
let data = let data =
bitcode::decode::<D>(&bytes).map_err(|e| RemoteAccessError::Cache(io::Error::other(e)))?; bitcode::decode::<D>(&bytes).map_err(|e| RemoteAccessError::Cache(io::Error::other(e)))?;
Ok(data) Ok(data)
} }
pub fn clear_cached_object(key: &str) -> Result<(), RemoteAccessError> { pub fn clear_cached_object(key: &str) -> Result<(), RemoteAccessError> {
clear_cached_object_db(key, &borrow_db_checked()) clear_cached_object_db(key)
} }
pub fn clear_cached_object_db(key: &str, db: &Database) -> Result<(), RemoteAccessError> { pub fn clear_cached_object_db(
delete_sync(&db.cache_dir, key).map_err(RemoteAccessError::Cache)?; key: &str,
) -> Result<(), RemoteAccessError> {
delete_sync(&CACHE_DIR, key).map_err(RemoteAccessError::Cache)?;
Ok(()) Ok(())
} }
@ -102,39 +101,30 @@ impl ObjectCache {
} }
} }
impl TryFrom<Response<Vec<u8>>> for ObjectCache { impl From<Response<Vec<u8>>> for ObjectCache {
type Error = CacheError; fn from(value: Response<Vec<u8>>) -> Self {
ObjectCache {
fn try_from(value: Response<Vec<u8>>) -> Result<Self, Self::Error> {
Ok(ObjectCache {
content_type: value content_type: value
.headers() .headers()
.get(CONTENT_TYPE) .get(CONTENT_TYPE)
.ok_or(CacheError::HeaderNotFound(CONTENT_TYPE))? .unwrap()
.to_str() .to_str()
.map_err(CacheError::ParseError)? .unwrap()
.to_owned(), .to_owned(),
body: value.body().clone(), body: value.body().clone(),
expiry: get_sys_time_in_secs() + 60 * 60 * 24, expiry: get_sys_time_in_secs() + 60 * 60 * 24,
})
} }
} }
impl TryFrom<ObjectCache> for Response<Vec<u8>> { }
type Error = CacheError; impl From<ObjectCache> for Response<Vec<u8>> {
fn try_from(value: ObjectCache) -> Result<Self, Self::Error> { fn from(value: ObjectCache) -> Self {
let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type); let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type);
resp_builder resp_builder.body(value.body).unwrap()
.body(value.body)
.map_err(CacheError::ConstructionError)
} }
} }
impl TryFrom<&ObjectCache> for Response<Vec<u8>> { impl From<&ObjectCache> for Response<Vec<u8>> {
type Error = CacheError; fn from(value: &ObjectCache) -> Self {
fn try_from(value: &ObjectCache) -> Result<Self, Self::Error> {
let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type.clone()); let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type.clone());
resp_builder resp_builder.body(value.body.clone()).unwrap()
.body(value.body.clone())
.map_err(CacheError::ConstructionError)
} }
} }

View File

@ -0,0 +1,52 @@
use http::{header::CONTENT_TYPE, response::Builder as ResponseBuilder, Request};
use log::warn;
use tauri::UriSchemeResponder;
use crate::{requests::generate_url, utils::DROP_CLIENT_ASYNC, DropRemoteContext};
use super::{
auth::generate_authorization_header,
cache::{ObjectCache, cache_object, get_cached_object},
};
pub async fn fetch_object(context: &DropRemoteContext, request: Request<Vec<u8>>, responder: UriSchemeResponder) {
// Drop leading /
let object_id = &request.uri().path()[1..];
let cache_result = get_cached_object::<ObjectCache>(object_id);
if let Ok(cache_result) = &cache_result
&& !cache_result.has_expired()
{
responder.respond(cache_result.into());
return;
}
let header = generate_authorization_header(context);
let client = DROP_CLIENT_ASYNC.clone();
let url = generate_url(context, &["/api/v1/client/object", object_id], &[]).expect("failed to generated object url");
let response = client.get(url).header("Authorization", header).send().await;
if response.is_err() {
match cache_result {
Ok(cache_result) => responder.respond(cache_result.into()),
Err(e) => {
warn!("{e}");
}
}
return;
}
let response = response.unwrap();
let resp_builder = ResponseBuilder::new().header(
CONTENT_TYPE,
response.headers().get("Content-Type").unwrap(),
);
let data = Vec::from(response.bytes().await.unwrap());
let resp = resp_builder.body(data).unwrap();
if cache_result.is_err() || cache_result.unwrap().has_expired() {
cache_object::<ObjectCache>(object_id, &resp.clone().into()).unwrap();
}
responder.respond(resp);
}

View File

@ -0,0 +1,29 @@
use serde::{Deserialize, Serialize};
use url::Url;
pub mod auth;
pub mod cache;
pub mod fetch_object;
pub mod requests;
pub mod utils;
#[derive(Serialize, Deserialize, Clone)]
struct DropRemoteAuth {
private: String,
cert: String,
client_id: String,
web_token: String,
}
#[derive(Serialize, Deserialize, Clone)]
pub struct DropRemoteContext {
base_url: Url,
auth: Option<DropRemoteAuth>,
}
impl DropRemoteContext {
pub fn new(base_url: Url) -> Self {
DropRemoteContext { base_url, auth: None }
}
}

View File

@ -1,15 +1,14 @@
use database::{DB, interface::DatabaseImpls}; use drop_errors::remote_access_error::RemoteAccessError;
use url::Url; use url::Url;
use crate::{ use crate::{auth::generate_authorization_header, utils::DROP_CLIENT_ASYNC, DropRemoteContext};
auth::generate_authorization_header, error::RemoteAccessError, utils::DROP_CLIENT_ASYNC,
};
pub fn generate_url<T: AsRef<str>>( pub fn generate_url<T: AsRef<str>>(
context: &DropRemoteContext,
path_components: &[T], path_components: &[T],
query: &[(T, T)], query: &[(T, T)],
) -> Result<Url, RemoteAccessError> { ) -> Result<Url, RemoteAccessError> {
let mut base_url = DB.fetch_base_url(); let mut base_url = context.base_url.clone();
for endpoint in path_components { for endpoint in path_components {
base_url = base_url.join(endpoint.as_ref())?; base_url = base_url.join(endpoint.as_ref())?;
} }
@ -22,10 +21,10 @@ pub fn generate_url<T: AsRef<str>>(
Ok(base_url) Ok(base_url)
} }
pub async fn make_authenticated_get(url: Url) -> Result<reqwest::Response, reqwest::Error> { pub async fn make_authenticated_get(context: &DropRemoteContext, url: Url) -> Result<reqwest::Response, reqwest::Error> {
DROP_CLIENT_ASYNC DROP_CLIENT_ASYNC
.get(url) .get(url)
.header("Authorization", generate_authorization_header()) .header("Authorization", generate_authorization_header(context))
.send() .send()
.await .await
} }

View File

@ -0,0 +1,71 @@
use std::{
fs::{self, File},
io::Read,
sync::LazyLock,
};
use drop_consts::DATA_ROOT_DIR;
use log::{debug, info};
use reqwest::Certificate;
static DROP_CERT_BUNDLE: LazyLock<Vec<Certificate>> = LazyLock::new(fetch_certificates);
pub static DROP_CLIENT_SYNC: LazyLock<reqwest::blocking::Client> = LazyLock::new(get_client_sync);
pub static DROP_CLIENT_ASYNC: LazyLock<reqwest::Client> = LazyLock::new(get_client_async);
pub static DROP_CLIENT_WS_CLIENT: LazyLock<reqwest::Client> = LazyLock::new(get_client_ws);
fn fetch_certificates() -> Vec<Certificate> {
let certificate_dir = DATA_ROOT_DIR.join("certificates");
let mut certs = Vec::new();
match fs::read_dir(certificate_dir) {
Ok(c) => {
for entry in c {
match entry {
Ok(c) => {
let mut buf = Vec::new();
File::open(c.path()).unwrap().read_to_end(&mut buf).unwrap();
for cert in Certificate::from_pem_bundle(&buf).unwrap() {
certs.push(cert);
}
info!(
"added {} certificate(s) from {}",
certs.len(),
c.file_name().into_string().unwrap()
);
}
Err(_) => todo!(),
}
}
}
Err(e) => {
debug!("not loading certificates due to error: {e}");
}
};
certs
}
pub fn get_client_sync() -> reqwest::blocking::Client {
let mut client = reqwest::blocking::ClientBuilder::new();
for cert in DROP_CERT_BUNDLE.iter() {
client = client.add_root_certificate(cert.clone());
}
client.use_rustls_tls().build().unwrap()
}
pub fn get_client_async() -> reqwest::Client {
let mut client = reqwest::ClientBuilder::new();
for cert in DROP_CERT_BUNDLE.iter() {
client = client.add_root_certificate(cert.clone());
}
client.use_rustls_tls().build().unwrap()
}
pub fn get_client_ws() -> reqwest::Client {
let mut client = reqwest::ClientBuilder::new();
for cert in DROP_CERT_BUNDLE.iter() {
client = client.add_root_certificate(cert.clone());
}
client.use_rustls_tls().http1_only().build().unwrap()
}

59
src-tauri/src/auth.rs Normal file
View File

@ -0,0 +1,59 @@
use std::sync::Mutex;
use drop_database::{borrow_db_checked, runtime_models::User};
use drop_errors::remote_access_error::RemoteAccessError;
use drop_remote::{auth::{fetch_user, recieve_handshake_logic}, cache::{cache_object, clear_cached_object, get_cached_object}};
use log::warn;
use tauri::{AppHandle, Emitter as _, Manager as _};
use crate::{AppState, AppStatus};
pub async fn setup() -> (AppStatus, Option<User>) {
let auth = {
let data = borrow_db_checked();
data.auth.clone()
};
if auth.is_some() {
let user_result = match fetch_user().await {
Ok(data) => data,
Err(RemoteAccessError::FetchError(_)) => {
let user = get_cached_object::<User>("user").unwrap();
return (AppStatus::Offline, Some(user));
}
Err(_) => return (AppStatus::SignedInNeedsReauth, None),
};
cache_object("user", &user_result).unwrap();
return (AppStatus::SignedIn, Some(user_result));
}
(AppStatus::SignedOut, None)
}
pub async fn recieve_handshake(app: AppHandle, path: String) {
// Tell the app we're processing
app.emit("auth/processing", ()).unwrap();
let handshake_result = recieve_handshake_logic(path).await;
if let Err(e) = handshake_result {
warn!("error with authentication: {e}");
app.emit("auth/failed", e.to_string()).unwrap();
return;
}
let app_state = app.state::<Mutex<AppState>>();
let (app_status, user) = setup().await;
let mut state_lock = app_state.lock().unwrap();
state_lock.status = app_status;
state_lock.user = user;
let _ = clear_cached_object("collections");
let _ = clear_cached_object("library");
drop(state_lock);
app.emit("auth/finished", ()).unwrap();
}

View File

@ -1,82 +0,0 @@
use std::sync::nonpoison::Mutex;
use database::{borrow_db_checked, borrow_db_mut_checked};
use download_manager::DOWNLOAD_MANAGER;
use log::{debug, error};
use tauri::AppHandle;
use tauri_plugin_autostart::ManagerExt;
use tauri_plugin_opener::OpenerExt;
use crate::AppState;
#[tauri::command]
pub fn fetch_state(state: tauri::State<'_, Mutex<AppState>>) -> Result<String, String> {
let guard = state.lock();
let cloned_state = serde_json::to_string(&guard.clone()).map_err(|e| e.to_string())?;
drop(guard);
Ok(cloned_state)
}
#[tauri::command]
pub fn quit(app: tauri::AppHandle) {
cleanup_and_exit(&app);
}
pub fn cleanup_and_exit(app: &AppHandle) {
debug!("cleaning up and exiting application");
match DOWNLOAD_MANAGER.ensure_terminated() {
Ok(res) => match res {
Ok(()) => debug!("download manager terminated correctly"),
Err(()) => error!("download manager failed to terminate correctly"),
},
Err(e) => panic!("{e:?}"),
}
app.exit(0);
}
#[tauri::command]
pub fn toggle_autostart(app: AppHandle, enabled: bool) -> Result<(), String> {
let manager = app.autolaunch();
if enabled {
manager.enable().map_err(|e| e.to_string())?;
debug!("enabled autostart");
} else {
manager.disable().map_err(|e| e.to_string())?;
debug!("eisabled autostart");
}
// Store the state in DB
let mut db_handle = borrow_db_mut_checked();
db_handle.settings.autostart = enabled;
Ok(())
}
#[tauri::command]
pub fn get_autostart_enabled(app: AppHandle) -> Result<bool, tauri_plugin_autostart::Error> {
let db_handle = borrow_db_checked();
let db_state = db_handle.settings.autostart;
drop(db_handle);
// Get actual system state
let manager = app.autolaunch();
let system_state = manager.is_enabled()?;
// If they don't match, sync to DB state
if db_state != system_state {
if db_state {
manager.enable()?;
} else {
manager.disable()?;
}
}
Ok(db_state)
}
#[tauri::command]
pub fn open_fs(path: String, app_handle: AppHandle) -> Result<(), tauri_plugin_opener::Error> {
app_handle
.opener()
.open_path(path, None::<&str>)
}

View File

@ -0,0 +1,75 @@
use drop_database::{borrow_db_checked, borrow_db_mut_checked};
use log::debug;
use tauri::AppHandle;
use tauri_plugin_autostart::ManagerExt;
pub fn toggle_autostart_logic(app: AppHandle, enabled: bool) -> Result<(), String> {
let manager = app.autolaunch();
if enabled {
manager.enable().map_err(|e| e.to_string())?;
debug!("enabled autostart");
} else {
manager.disable().map_err(|e| e.to_string())?;
debug!("eisabled autostart");
}
// Store the state in DB
let mut db_handle = borrow_db_mut_checked();
db_handle.settings.autostart = enabled;
drop(db_handle);
Ok(())
}
pub fn get_autostart_enabled_logic(app: AppHandle) -> Result<bool, tauri_plugin_autostart::Error> {
// First check DB state
let db_handle = borrow_db_checked();
let db_state = db_handle.settings.autostart;
drop(db_handle);
// Get actual system state
let manager = app.autolaunch();
let system_state = manager.is_enabled()?;
// If they don't match, sync to DB state
if db_state != system_state {
if db_state {
manager.enable()?;
} else {
manager.disable()?;
}
}
Ok(db_state)
}
// New function to sync state on startup
pub fn sync_autostart_on_startup(app: &AppHandle) -> Result<(), String> {
let db_handle = borrow_db_checked();
let should_be_enabled = db_handle.settings.autostart;
drop(db_handle);
let manager = app.autolaunch();
let current_state = manager.is_enabled().map_err(|e| e.to_string())?;
if current_state != should_be_enabled {
if should_be_enabled {
manager.enable().map_err(|e| e.to_string())?;
debug!("synced autostart: enabled");
} else {
manager.disable().map_err(|e| e.to_string())?;
debug!("synced autostart: disabled");
}
}
Ok(())
}
#[tauri::command]
pub fn toggle_autostart(app: AppHandle, enabled: bool) -> Result<(), String> {
toggle_autostart_logic(app, enabled)
}
#[tauri::command]
pub fn get_autostart_enabled(app: AppHandle) -> Result<bool, tauri_plugin_autostart::Error> {
get_autostart_enabled_logic(app)
}

View File

@ -0,0 +1,23 @@
use log::{debug, error};
use tauri::AppHandle;
use crate::AppState;
#[tauri::command]
pub fn quit(app: tauri::AppHandle, state: tauri::State<'_, std::sync::Mutex<AppState>>) {
cleanup_and_exit(&app, &state);
}
pub fn cleanup_and_exit(app: &AppHandle, state: &tauri::State<'_, std::sync::Mutex<AppState>>) {
debug!("cleaning up and exiting application");
let download_manager = state.lock().unwrap().download_manager.clone();
match download_manager.ensure_terminated() {
Ok(res) => match res {
Ok(()) => debug!("download manager terminated correctly"),
Err(()) => error!("download manager failed to terminate correctly"),
},
Err(e) => panic!("{e:?}"),
}
app.exit(0);
}

View File

@ -0,0 +1,11 @@
use crate::AppState;
#[tauri::command]
pub fn fetch_state(
state: tauri::State<'_, std::sync::Mutex<AppState>>,
) -> Result<String, String> {
let guard = state.lock().unwrap();
let cloned_state = serde_json::to_string(&guard.clone()).map_err(|e| e.to_string())?;
drop(guard);
Ok(cloned_state)
}

Some files were not shown because too many files have changed in this diff Show More