chore: Major refactoring

Still needs a massive go-over because there shouldn't be anything referencing tauri in any of the workspaces except the original one. Process manager has been refactored as an example

Signed-off-by: quexeky <git@quexeky.dev>
This commit is contained in:
quexeky
2025-10-09 07:46:17 +11:00
parent cc57ca7076
commit 59f040bc8b
97 changed files with 14473 additions and 1063 deletions

268
src-tauri/Cargo.lock generated
View File

@ -740,7 +740,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02260d489095346e5cafd04dea8e8cb54d1d74fcd759022a9b72986ebe9a1257"
dependencies = [
"serde",
"toml",
"toml 0.8.22",
]
[[package]]
@ -808,6 +808,16 @@ dependencies = [
"windows-link",
]
[[package]]
name = "client"
version = "0.1.0"
dependencies = [
"database 0.5.0",
"log",
"tauri",
"tauri-plugin-autostart",
]
[[package]]
name = "combine"
version = "4.6.7"
@ -1059,6 +1069,16 @@ version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476"
[[package]]
name = "database"
version = "0.1.0"
[[package]]
name = "database"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4851b30af1e6d2db292e4873b486fa2ad00a6fb6466da44f827e18bdac62f50"
[[package]]
name = "der-parser"
version = "9.0.0"
@ -1237,9 +1257,9 @@ dependencies = [
[[package]]
name = "dlopen2"
version = "0.7.0"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1297103d2bbaea85724fcee6294c2d50b1081f9ad47d0f6f6f61eda65315a6"
checksum = "b54f373ccf864bf587a89e880fb7610f8d73f3045f13580948ccbcaff26febff"
dependencies = [
"dlopen2_derive",
"libc",
@ -1292,6 +1312,8 @@ dependencies = [
"bytes",
"cacache 13.1.0",
"chrono",
"client",
"database 0.1.0",
"deranged",
"dirs 6.0.0",
"droplet-rs",
@ -1311,9 +1333,11 @@ dependencies = [
"native_model",
"page_size",
"parking_lot 0.12.3",
"process",
"rand 0.9.1",
"rayon",
"regex",
"remote",
"reqwest 0.12.22",
"reqwest-middleware 0.4.2",
"reqwest-middleware-cache",
@ -1345,6 +1369,7 @@ dependencies = [
"umu-wrapper-lib",
"url",
"urlencoding",
"utils",
"uuid",
"walkdir",
"webbrowser",
@ -1423,7 +1448,7 @@ dependencies = [
"cc",
"memchr",
"rustc_version",
"toml",
"toml 0.8.22",
"vswhom",
"winreg 0.52.0",
]
@ -2113,7 +2138,7 @@ dependencies = [
"futures-sink",
"futures-util",
"http 0.2.12",
"indexmap 2.9.0",
"indexmap 2.11.4",
"slab",
"tokio",
"tokio-util",
@ -2132,7 +2157,7 @@ dependencies = [
"futures-core",
"futures-sink",
"http 1.3.1",
"indexmap 2.9.0",
"indexmap 2.11.4",
"slab",
"tokio",
"tokio-util",
@ -2592,13 +2617,14 @@ dependencies = [
[[package]]
name = "indexmap"
version = "2.9.0"
version = "2.11.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5"
dependencies = [
"equivalent",
"hashbrown 0.15.3",
"serde",
"serde_core",
]
[[package]]
@ -2775,7 +2801,7 @@ checksum = "02cb977175687f33fa4afa0c95c112b987ea1443e5a51c8f8ff27dc618270cc2"
dependencies = [
"cssparser",
"html5ever",
"indexmap 2.9.0",
"indexmap 2.11.4",
"selectors",
]
@ -2881,9 +2907,9 @@ dependencies = [
[[package]]
name = "log"
version = "0.4.27"
version = "0.4.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
dependencies = [
"serde",
"value-bag",
@ -3437,6 +3463,16 @@ dependencies = [
"objc2-core-foundation",
]
[[package]]
name = "objc2-javascript-core"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9052cb1bb50a4c161d934befcf879526fb87ae9a68858f241e693ca46225cf5a"
dependencies = [
"objc2 0.6.1",
"objc2-core-foundation",
]
[[package]]
name = "objc2-metal"
version = "0.2.2"
@ -3473,6 +3509,17 @@ dependencies = [
"objc2-foundation 0.3.1",
]
[[package]]
name = "objc2-security"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1f8e0ef3ab66b08c42644dcb34dba6ec0a574bbd8adbb8bdbdc7a2779731a44"
dependencies = [
"bitflags 2.9.1",
"objc2 0.6.1",
"objc2-core-foundation",
]
[[package]]
name = "objc2-ui-kit"
version = "0.3.1"
@ -3497,6 +3544,8 @@ dependencies = [
"objc2-app-kit",
"objc2-core-foundation",
"objc2-foundation 0.3.1",
"objc2-javascript-core",
"objc2-security",
]
[[package]]
@ -3943,7 +3992,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eac26e981c03a6e53e0aee43c113e3202f5581d5360dae7bd2c70e800dd0451d"
dependencies = [
"base64 0.22.1",
"indexmap 2.9.0",
"indexmap 2.11.4",
"quick-xml",
"serde",
"time",
@ -4074,6 +4123,10 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "process"
version = "0.1.0"
[[package]]
name = "quick-xml"
version = "0.32.0"
@ -4384,6 +4437,10 @@ version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "remote"
version = "0.1.0"
[[package]]
name = "reqwest"
version = "0.11.27"
@ -4852,10 +4909,11 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.219"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
dependencies = [
"serde_core",
"serde_derive",
]
@ -4881,10 +4939,19 @@ dependencies = [
]
[[package]]
name = "serde_derive"
version = "1.0.219"
name = "serde_core"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [
"proc-macro2",
"quote",
@ -4934,6 +5001,15 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_spanned"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5417783452c2be558477e104686f7de5dae53dba813c28435e0e70f82d9b04ee"
dependencies = [
"serde_core",
]
[[package]]
name = "serde_urlencoded"
version = "0.7.1"
@ -4956,7 +5032,7 @@ dependencies = [
"chrono",
"hex 0.4.3",
"indexmap 1.9.3",
"indexmap 2.9.0",
"indexmap 2.11.4",
"serde",
"serde_derive",
"serde_json",
@ -4982,7 +5058,7 @@ version = "0.9.34+deprecated"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
dependencies = [
"indexmap 2.9.0",
"indexmap 2.11.4",
"itoa",
"ryu",
"serde",
@ -4991,9 +5067,9 @@ dependencies = [
[[package]]
name = "serialize-to-javascript"
version = "0.1.1"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9823f2d3b6a81d98228151fdeaf848206a7855a7a042bbf9bf870449a66cafb"
checksum = "04f3666a07a197cdb77cdf306c32be9b7f598d7060d50cfd4d5aa04bfd92f6c5"
dependencies = [
"serde",
"serde_json",
@ -5002,13 +5078,13 @@ dependencies = [
[[package]]
name = "serialize-to-javascript-impl"
version = "0.1.1"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74064874e9f6a15f04c1f3cb627902d0e6b410abbf36668afa873c61889f1763"
checksum = "772ee033c0916d670af7860b6e1ef7d658a4629a6d0b4c8c3e67f09b3765b75d"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.101",
]
[[package]]
@ -5424,17 +5500,18 @@ dependencies = [
"cfg-expr",
"heck 0.5.0",
"pkg-config",
"toml",
"toml 0.8.22",
"version-compare",
]
[[package]]
name = "tao"
version = "0.34.0"
version = "0.34.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49c380ca75a231b87b6c9dd86948f035012e7171d1a7c40a9c2890489a7ffd8a"
checksum = "959469667dbcea91e5485fc48ba7dd6023face91bb0f1a14681a70f99847c3f7"
dependencies = [
"bitflags 2.9.1",
"block2 0.6.1",
"core-foundation 0.10.1",
"core-graphics",
"crossbeam-channel",
@ -5506,12 +5583,13 @@ dependencies = [
[[package]]
name = "tauri"
version = "2.7.0"
version = "2.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "352a4bc7bf6c25f5624227e3641adf475a6535707451b09bb83271df8b7a6ac7"
checksum = "d4d1d3b3dc4c101ac989fd7db77e045cc6d91a25349cd410455cb5c57d510c1c"
dependencies = [
"anyhow",
"bytes",
"cookie",
"dirs 6.0.0",
"dunce",
"embed_plist",
@ -5530,6 +5608,7 @@ dependencies = [
"objc2-app-kit",
"objc2-foundation 0.3.1",
"objc2-ui-kit",
"objc2-web-kit",
"percent-encoding",
"plist",
"raw-window-handle",
@ -5557,9 +5636,9 @@ dependencies = [
[[package]]
name = "tauri-build"
version = "2.3.1"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "182d688496c06bf08ea896459bf483eb29cdff35c1c4c115fb14053514303064"
checksum = "9c432ccc9ff661803dab74c6cd78de11026a578a9307610bbc39d3c55be7943f"
dependencies = [
"anyhow",
"cargo_toml",
@ -5573,15 +5652,15 @@ dependencies = [
"serde_json",
"tauri-utils",
"tauri-winres",
"toml",
"toml 0.9.7",
"walkdir",
]
[[package]]
name = "tauri-codegen"
version = "2.3.1"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b54a99a6cd8e01abcfa61508177e6096a4fe2681efecee9214e962f2f073ae4a"
checksum = "1ab3a62cf2e6253936a8b267c2e95839674e7439f104fa96ad0025e149d54d8a"
dependencies = [
"base64 0.22.1",
"brotli",
@ -5606,9 +5685,9 @@ dependencies = [
[[package]]
name = "tauri-macros"
version = "2.3.2"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7945b14dc45e23532f2ded6e120170bbdd4af5ceaa45784a6b33d250fbce3f9e"
checksum = "4368ea8094e7045217edb690f493b55b30caf9f3e61f79b4c24b6db91f07995e"
dependencies = [
"heck 0.5.0",
"proc-macro2",
@ -5620,9 +5699,9 @@ dependencies = [
[[package]]
name = "tauri-plugin"
version = "2.3.0"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d9a0bd00bf1930ad1a604d08b0eb6b2a9c1822686d65d7f4731a7723b8901d3"
checksum = "9946a3cede302eac0c6eb6c6070ac47b1768e326092d32efbb91f21ed58d978f"
dependencies = [
"anyhow",
"glob",
@ -5631,15 +5710,15 @@ dependencies = [
"serde",
"serde_json",
"tauri-utils",
"toml",
"toml 0.9.7",
"walkdir",
]
[[package]]
name = "tauri-plugin-autostart"
version = "2.3.0"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c58593aafcb03892dbf9998b35a96ead3b8e597435c7af46aff1654d076d5d03"
checksum = "062cdcd483d5e3148c9a64dabf8c574e239e2aa1193cf208d95cf89a676f87a5"
dependencies = [
"auto-launch",
"serde",
@ -5705,7 +5784,7 @@ dependencies = [
"tauri-plugin",
"tauri-utils",
"thiserror 2.0.12",
"toml",
"toml 0.8.22",
"url",
]
@ -5733,9 +5812,9 @@ dependencies = [
[[package]]
name = "tauri-plugin-os"
version = "2.2.1"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "424f19432397850c2ddd42aa58078630c15287bbce3866eb1d90e7dbee680637"
checksum = "77a1c77ebf6f20417ab2a74e8c310820ba52151406d0c80fbcea7df232e3f6ba"
dependencies = [
"gethostname",
"log",
@ -5788,9 +5867,9 @@ dependencies = [
[[package]]
name = "tauri-runtime"
version = "2.7.1"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b1cc885be806ea15ff7b0eb47098a7b16323d9228876afda329e34e2d6c4676"
checksum = "d4cfc9ad45b487d3fded5a4731a567872a4812e9552e3964161b08edabf93846"
dependencies = [
"cookie",
"dpi",
@ -5799,20 +5878,23 @@ dependencies = [
"jni",
"objc2 0.6.1",
"objc2-ui-kit",
"objc2-web-kit",
"raw-window-handle",
"serde",
"serde_json",
"tauri-utils",
"thiserror 2.0.12",
"url",
"webkit2gtk",
"webview2-com",
"windows",
]
[[package]]
name = "tauri-runtime-wry"
version = "2.7.2"
version = "2.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe653a2fbbef19fe898efc774bc52c8742576342a33d3d028c189b57eb1d2439"
checksum = "c1fe9d48bd122ff002064e88cfcd7027090d789c4302714e68fcccba0f4b7807"
dependencies = [
"gtk",
"http 1.3.1",
@ -5837,9 +5919,9 @@ dependencies = [
[[package]]
name = "tauri-utils"
version = "2.6.0"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9330c15cabfe1d9f213478c9e8ec2b0c76dab26bb6f314b8ad1c8a568c1d186e"
checksum = "41a3852fdf9a4f8fbeaa63dc3e9a85284dd6ef7200751f0bd66ceee30c93f212"
dependencies = [
"anyhow",
"brotli",
@ -5866,7 +5948,7 @@ dependencies = [
"serde_with",
"swift-rs",
"thiserror 2.0.12",
"toml",
"toml 0.9.7",
"url",
"urlpattern",
"uuid",
@ -5880,8 +5962,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8d321dbc6f998d825ab3f0d62673e810c861aac2d0de2cc2c395328f1d113b4"
dependencies = [
"embed-resource",
"indexmap 2.9.0",
"toml",
"indexmap 2.11.4",
"toml 0.8.22",
]
[[package]]
@ -6105,11 +6187,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05ae329d1f08c4d17a59bed7ff5b5a769d062e64a62d34a3261b219e62cd5aae"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"serde_spanned 0.6.8",
"toml_datetime 0.6.9",
"toml_edit 0.22.26",
]
[[package]]
name = "toml"
version = "0.9.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00e5e5d9bf2475ac9d4f0d9edab68cc573dc2fd644b0dba36b0c30a92dd9eaa0"
dependencies = [
"indexmap 2.11.4",
"serde_core",
"serde_spanned 1.0.2",
"toml_datetime 0.7.2",
"toml_parser",
"toml_writer",
"winnow 0.7.13",
]
[[package]]
name = "toml_datetime"
version = "0.6.9"
@ -6119,14 +6216,23 @@ dependencies = [
"serde",
]
[[package]]
name = "toml_datetime"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32f1085dec27c2b6632b04c80b3bb1b4300d6495d1e129693bdda7d91e72eec1"
dependencies = [
"serde_core",
]
[[package]]
name = "toml_edit"
version = "0.19.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
dependencies = [
"indexmap 2.9.0",
"toml_datetime",
"indexmap 2.11.4",
"toml_datetime 0.6.9",
"winnow 0.5.40",
]
@ -6136,8 +6242,8 @@ version = "0.20.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81"
dependencies = [
"indexmap 2.9.0",
"toml_datetime",
"indexmap 2.11.4",
"toml_datetime 0.6.9",
"winnow 0.5.40",
]
@ -6147,12 +6253,21 @@ version = "0.22.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e"
dependencies = [
"indexmap 2.9.0",
"indexmap 2.11.4",
"serde",
"serde_spanned",
"toml_datetime",
"serde_spanned 0.6.8",
"toml_datetime 0.6.9",
"toml_write",
"winnow 0.7.10",
"winnow 0.7.13",
]
[[package]]
name = "toml_parser"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4cf893c33be71572e0e9aa6dd15e6677937abd686b066eac3f8cd3531688a627"
dependencies = [
"winnow 0.7.13",
]
[[package]]
@ -6161,6 +6276,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfb942dfe1d8e29a7ee7fcbde5bd2b9a25fb89aa70caea2eba3bee836ff41076"
[[package]]
name = "toml_writer"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d163a63c116ce562a22cda521fcc4d79152e7aba014456fb5eb442f6d6a10109"
[[package]]
name = "tower"
version = "0.5.2"
@ -6454,6 +6575,10 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
[[package]]
name = "utils"
version = "0.1.0"
[[package]]
name = "uuid"
version = "1.17.0"
@ -7202,9 +7327,9 @@ dependencies = [
[[package]]
name = "winnow"
version = "0.7.10"
version = "0.7.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c06928c8748d81b05c9be96aad92e1b6ff01833332f281e8cfca3be4b35fc9ec"
checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
dependencies = [
"memchr",
]
@ -7255,14 +7380,15 @@ checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
[[package]]
name = "wry"
version = "0.52.1"
version = "0.53.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12a714d9ba7075aae04a6e50229d6109e3d584774b99a6a8c60de1698ca111b9"
checksum = "6d78ec082b80fa088569a970d043bb3050abaabf4454101d44514ee8d9a8c9f6"
dependencies = [
"base64 0.22.1",
"block2 0.6.1",
"cookie",
"crossbeam-channel",
"dirs 6.0.0",
"dpi",
"dunce",
"gdkx11",
@ -7431,7 +7557,7 @@ dependencies = [
"tracing",
"uds_windows",
"windows-sys 0.59.0",
"winnow 0.7.10",
"winnow 0.7.13",
"zbus_macros",
"zbus_names",
"zvariant",
@ -7460,7 +7586,7 @@ checksum = "7be68e64bf6ce8db94f63e72f0c7eb9a60d733f7e0499e628dfab0f84d6bcb97"
dependencies = [
"serde",
"static_assertions",
"winnow 0.7.10",
"winnow 0.7.13",
"zvariant",
]
@ -7582,7 +7708,7 @@ dependencies = [
"enumflags2",
"serde",
"url",
"winnow 0.7.10",
"winnow 0.7.13",
"zvariant_derive",
"zvariant_utils",
]
@ -7611,5 +7737,5 @@ dependencies = [
"serde",
"static_assertions",
"syn 2.0.101",
"winnow 0.7.10",
"winnow 0.7.13",
]

View File

@ -78,6 +78,14 @@ futures-core = "0.3.31"
bytes = "1.10.1"
# tailscale = { path = "./tailscale" }
# Workspaces
client = { path = "../client" }
database = { path = "../database" }
process = { path = "../process" }
remote = { path = "../remote" }
utils = { path = "../utils" }
[dependencies.dynfmt]
version = "0.1.5"
features = ["curly"]

View File

@ -1,8 +1,15 @@
use log::{debug, error};
use tauri::AppHandle;
use crate::{lock, AppState};
#[tauri::command]
pub fn fetch_state(
state: tauri::State<'_, std::sync::Mutex<AppState<'_>>>,
) -> Result<String, String> {
let guard = lock!(state);
let cloned_state = serde_json::to_string(&guard.clone()).map_err(|e| e.to_string())?;
drop(guard);
Ok(cloned_state)
}
#[tauri::command]
pub fn quit(app: tauri::AppHandle, state: tauri::State<'_, std::sync::Mutex<AppState<'_>>>) {
cleanup_and_exit(&app, &state);
@ -21,3 +28,13 @@ pub fn cleanup_and_exit(app: &AppHandle, state: &tauri::State<'_, std::sync::Mut
app.exit(0);
}
#[tauri::command]
pub fn toggle_autostart(app: AppHandle, enabled: bool) -> Result<(), String> {
toggle_autostart_logic(app, enabled)
}
#[tauri::command]
pub fn get_autostart_enabled(app: AppHandle) -> Result<bool, tauri_plugin_autostart::Error> {
get_autostart_enabled_logic(app)
}

View File

@ -1,75 +0,0 @@
use crate::database::db::{borrow_db_checked, borrow_db_mut_checked};
use log::debug;
use tauri::AppHandle;
use tauri_plugin_autostart::ManagerExt;
pub fn toggle_autostart_logic(app: AppHandle, enabled: bool) -> Result<(), String> {
let manager = app.autolaunch();
if enabled {
manager.enable().map_err(|e| e.to_string())?;
debug!("enabled autostart");
} else {
manager.disable().map_err(|e| e.to_string())?;
debug!("eisabled autostart");
}
// Store the state in DB
let mut db_handle = borrow_db_mut_checked();
db_handle.settings.autostart = enabled;
drop(db_handle);
Ok(())
}
pub fn get_autostart_enabled_logic(app: AppHandle) -> Result<bool, tauri_plugin_autostart::Error> {
// First check DB state
let db_handle = borrow_db_checked();
let db_state = db_handle.settings.autostart;
drop(db_handle);
// Get actual system state
let manager = app.autolaunch();
let system_state = manager.is_enabled()?;
// If they don't match, sync to DB state
if db_state != system_state {
if db_state {
manager.enable()?;
} else {
manager.disable()?;
}
}
Ok(db_state)
}
// New function to sync state on startup
pub fn sync_autostart_on_startup(app: &AppHandle) -> Result<(), String> {
let db_handle = borrow_db_checked();
let should_be_enabled = db_handle.settings.autostart;
drop(db_handle);
let manager = app.autolaunch();
let current_state = manager.is_enabled().map_err(|e| e.to_string())?;
if current_state != should_be_enabled {
if should_be_enabled {
manager.enable().map_err(|e| e.to_string())?;
debug!("synced autostart: enabled");
} else {
manager.disable().map_err(|e| e.to_string())?;
debug!("synced autostart: disabled");
}
}
Ok(())
}
#[tauri::command]
pub fn toggle_autostart(app: AppHandle, enabled: bool) -> Result<(), String> {
toggle_autostart_logic(app, enabled)
}
#[tauri::command]
pub fn get_autostart_enabled(app: AppHandle) -> Result<bool, tauri_plugin_autostart::Error> {
get_autostart_enabled_logic(app)
}

View File

@ -1,11 +0,0 @@
use crate::{lock, AppState};
#[tauri::command]
pub fn fetch_state(
state: tauri::State<'_, std::sync::Mutex<AppState<'_>>>,
) -> Result<String, String> {
let guard = lock!(state);
let cloned_state = serde_json::to_string(&guard.clone()).map_err(|e| e.to_string())?;
drop(guard);
Ok(cloned_state)
}

View File

@ -1,3 +0,0 @@
pub mod autostart;
pub mod cleanup;
pub mod commands;

View File

@ -1,102 +0,0 @@
use std::{collections::HashMap, path::PathBuf, str::FromStr};
use log::warn;
use crate::{database::db::{GameVersion, DATA_ROOT_DIR}, error::backup_error::BackupError, process::process_manager::Platform};
use super::path::CommonPath;
pub struct BackupManager<'a> {
pub current_platform: Platform,
pub sources: HashMap<(Platform, Platform), &'a (dyn BackupHandler + Sync + Send)>,
}
impl BackupManager<'_> {
pub fn new() -> Self {
BackupManager {
#[cfg(target_os = "windows")]
current_platform: Platform::Windows,
#[cfg(target_os = "macos")]
current_platform: Platform::MacOs,
#[cfg(target_os = "linux")]
current_platform: Platform::Linux,
sources: HashMap::from([
// Current platform to target platform
(
(Platform::Windows, Platform::Windows),
&WindowsBackupManager {} as &(dyn BackupHandler + Sync + Send),
),
(
(Platform::Linux, Platform::Linux),
&LinuxBackupManager {} as &(dyn BackupHandler + Sync + Send),
),
(
(Platform::MacOs, Platform::MacOs),
&MacBackupManager {} as &(dyn BackupHandler + Sync + Send),
),
]),
}
}
}
pub trait BackupHandler: Send + Sync {
fn root_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(DATA_ROOT_DIR.lock().unwrap().join("games")) }
fn game_translate(&self, _path: &PathBuf, game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(PathBuf::from_str(&game.game_id).unwrap()) }
fn base_translate(&self, path: &PathBuf, game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(self.root_translate(path, game)?.join(self.game_translate(path, game)?)) }
fn home_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { let c = CommonPath::Home.get().ok_or(BackupError::NotFound); println!("{:?}", c); c }
fn store_user_id_translate(&self, _path: &PathBuf, game: &GameVersion) -> Result<PathBuf, BackupError> { PathBuf::from_str(&game.game_id).map_err(|_| BackupError::ParseError) }
fn os_user_name_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(PathBuf::from_str(&whoami::username()).unwrap()) }
fn win_app_data_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winAppData>"); Err(BackupError::InvalidSystem) }
fn win_local_app_data_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winLocalAppData>"); Err(BackupError::InvalidSystem) }
fn win_local_app_data_low_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winLocalAppDataLow>"); Err(BackupError::InvalidSystem) }
fn win_documents_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winDocuments>"); Err(BackupError::InvalidSystem) }
fn win_public_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winPublic>"); Err(BackupError::InvalidSystem) }
fn win_program_data_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winProgramData>"); Err(BackupError::InvalidSystem) }
fn win_dir_translate(&self, _path: &PathBuf,_game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected Windows Reference in Backup <winDir>"); Err(BackupError::InvalidSystem) }
fn xdg_data_translate(&self, _path: &PathBuf,_game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected XDG Reference in Backup <xdgData>"); Err(BackupError::InvalidSystem) }
fn xdg_config_translate(&self, _path: &PathBuf,_game: &GameVersion) -> Result<PathBuf, BackupError> { warn!("Unexpected XDG Reference in Backup <xdgConfig>"); Err(BackupError::InvalidSystem) }
fn skip_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> { Ok(PathBuf::new()) }
}
pub struct LinuxBackupManager {}
impl BackupHandler for LinuxBackupManager {
fn xdg_config_translate(&self, _path: &PathBuf,_game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::Data.get().ok_or(BackupError::NotFound)?)
}
fn xdg_data_translate(&self, _path: &PathBuf,_game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::Config.get().ok_or(BackupError::NotFound)?)
}
}
pub struct WindowsBackupManager {}
impl BackupHandler for WindowsBackupManager {
fn win_app_data_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::Config.get().ok_or(BackupError::NotFound)?)
}
fn win_local_app_data_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::DataLocal.get().ok_or(BackupError::NotFound)?)
}
fn win_local_app_data_low_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::DataLocalLow.get().ok_or(BackupError::NotFound)?)
}
fn win_dir_translate(&self, _path: &PathBuf,_game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(PathBuf::from_str("C:/Windows").unwrap())
}
fn win_documents_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::Document.get().ok_or(BackupError::NotFound)?)
}
fn win_program_data_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(PathBuf::from_str("C:/ProgramData").unwrap())
}
fn win_public_translate(&self, _path: &PathBuf, _game: &GameVersion) -> Result<PathBuf, BackupError> {
Ok(CommonPath::Public.get().ok_or(BackupError::NotFound)?)
}
}
pub struct MacBackupManager {}
impl BackupHandler for MacBackupManager {}

View File

@ -1,6 +0,0 @@
use crate::process::process_manager::Platform;
#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
pub enum Condition {
Os(Platform)
}

View File

@ -1,35 +0,0 @@
use crate::database::db::GameVersion;
use super::conditions::{Condition};
#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
pub struct CloudSaveMetadata {
pub files: Vec<GameFile>,
pub game_version: GameVersion,
pub save_id: String,
}
#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
pub struct GameFile {
pub path: String,
pub id: Option<String>,
pub data_type: DataType,
pub tags: Vec<Tag>,
pub conditions: Vec<Condition>
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize)]
pub enum DataType {
Registry,
File,
Other
}
#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum Tag {
Config,
Save,
#[default]
#[serde(other)]
Other,
}

View File

@ -1,7 +0,0 @@
pub mod conditions;
pub mod metadata;
pub mod resolver;
pub mod placeholder;
pub mod normalise;
pub mod path;
pub mod backup_manager;

View File

@ -1,162 +0,0 @@
use std::sync::LazyLock;
use regex::Regex;
use crate::process::process_manager::Platform;
use super::placeholder::*;
pub fn normalize(path: &str, os: Platform) -> String {
let mut path = path.trim().trim_end_matches(['/', '\\']).replace('\\', "/");
if path == "~" || path.starts_with("~/") {
path = path.replacen('~', HOME, 1);
}
static CONSECUTIVE_SLASHES: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"/{2,}").unwrap());
static UNNECESSARY_DOUBLE_STAR_1: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"([^/*])\*{2,}").unwrap());
static UNNECESSARY_DOUBLE_STAR_2: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\*{2,}([^/*])").unwrap());
static ENDING_WILDCARD: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(/\*)+$").unwrap());
static ENDING_DOT: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(/\.)$").unwrap());
static INTERMEDIATE_DOT: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(/\./)").unwrap());
static BLANK_SEGMENT: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(/\s+/)").unwrap());
static APP_DATA: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?i)%appdata%").unwrap());
static APP_DATA_ROAMING: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?i)%userprofile%/AppData/Roaming").unwrap());
static APP_DATA_LOCAL: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?i)%localappdata%").unwrap());
static APP_DATA_LOCAL_2: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?i)%userprofile%/AppData/Local/").unwrap());
static USER_PROFILE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?i)%userprofile%").unwrap());
static DOCUMENTS: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?i)%userprofile%/Documents").unwrap());
for (pattern, replacement) in [
(&CONSECUTIVE_SLASHES, "/"),
(&UNNECESSARY_DOUBLE_STAR_1, "${1}*"),
(&UNNECESSARY_DOUBLE_STAR_2, "*${1}"),
(&ENDING_WILDCARD, ""),
(&ENDING_DOT, ""),
(&INTERMEDIATE_DOT, "/"),
(&BLANK_SEGMENT, "/"),
(&APP_DATA, WIN_APP_DATA),
(&APP_DATA_ROAMING, WIN_APP_DATA),
(&APP_DATA_LOCAL, WIN_LOCAL_APP_DATA),
(&APP_DATA_LOCAL_2, &format!("{}/", WIN_LOCAL_APP_DATA)),
(&USER_PROFILE, HOME),
(&DOCUMENTS, WIN_DOCUMENTS),
] {
path = pattern.replace_all(&path, replacement).to_string();
}
if os == Platform::Windows {
let documents_2: Regex = Regex::new(r"(?i)<home>/Documents").unwrap();
#[allow(clippy::single_element_loop)]
for (pattern, replacement) in [(&documents_2, WIN_DOCUMENTS)] {
path = pattern.replace_all(&path, replacement).to_string();
}
}
for (pattern, replacement) in [
("{64BitSteamID}", STORE_USER_ID),
("{Steam3AccountID}", STORE_USER_ID),
] {
path = path.replace(pattern, replacement);
}
path
}
fn too_broad(path: &str) -> bool {
println!("Path: {}", path);
use {BASE, HOME, ROOT, STORE_USER_ID, WIN_APP_DATA, WIN_DIR, WIN_DOCUMENTS, XDG_CONFIG, XDG_DATA};
let path_lower = path.to_lowercase();
for item in ALL {
if path == *item {
return true;
}
}
for item in AVOID_WILDCARDS {
if path.starts_with(&format!("{}/*", item)) || path.starts_with(&format!("{}/{}", item, STORE_USER_ID)) {
return true;
}
}
// These paths are present whether or not the game is installed.
// If possible, they should be narrowed down on the wiki.
for item in [
format!("{}/{}", BASE, STORE_USER_ID), // because `<storeUserId>` is handled as `*`
format!("{}/Documents", HOME),
format!("{}/Saved Games", HOME),
format!("{}/AppData", HOME),
format!("{}/AppData/Local", HOME),
format!("{}/AppData/Local/Packages", HOME),
format!("{}/AppData/LocalLow", HOME),
format!("{}/AppData/Roaming", HOME),
format!("{}/Documents/My Games", HOME),
format!("{}/Library/Application Support", HOME),
format!("{}/Library/Application Support/UserData", HOME),
format!("{}/Library/Preferences", HOME),
format!("{}/.renpy", HOME),
format!("{}/.renpy/persistent", HOME),
format!("{}/Library", HOME),
format!("{}/Library/RenPy", HOME),
format!("{}/Telltale Games", HOME),
format!("{}/config", ROOT),
format!("{}/MMFApplications", WIN_APP_DATA),
format!("{}/RenPy", WIN_APP_DATA),
format!("{}/RenPy/persistent", WIN_APP_DATA),
format!("{}/win.ini", WIN_DIR),
format!("{}/SysWOW64", WIN_DIR),
format!("{}/My Games", WIN_DOCUMENTS),
format!("{}/Telltale Games", WIN_DOCUMENTS),
format!("{}/unity3d", XDG_CONFIG),
format!("{}/unity3d", XDG_DATA),
"C:/Program Files".to_string(),
"C:/Program Files (x86)".to_string(),
] {
let item = item.to_lowercase();
if path_lower == item
|| path_lower.starts_with(&format!("{}/*", item))
|| path_lower.starts_with(&format!("{}/{}", item, STORE_USER_ID.to_lowercase()))
|| path_lower.starts_with(&format!("{}/savesdir", item))
{
return true;
}
}
// Drive letters:
let drives: Regex = Regex::new(r"^[a-zA-Z]:$").unwrap();
if drives.is_match(path) {
return true;
}
// Colon not for a drive letter
if path.get(2..).is_some_and(|path| path.contains(':')) {
return true;
}
// Root:
if path == "/" {
return true;
}
// Relative path wildcard:
if path.starts_with('*') {
return true;
}
false
}
pub fn usable(path: &str) -> bool {
let unprintable: Regex = Regex::new(r"(\p{Cc}|\p{Cf})").unwrap();
!path.is_empty()
&& !path.contains("{{")
&& !path.starts_with("./")
&& !path.starts_with("../")
&& !too_broad(path)
&& !unprintable.is_match(path)
}

View File

@ -1,48 +0,0 @@
use std::{path::PathBuf, sync::LazyLock};
pub enum CommonPath {
Config,
Data,
DataLocal,
DataLocalLow,
Document,
Home,
Public,
SavedGames,
}
impl CommonPath {
pub fn get(&self) -> Option<PathBuf> {
static CONFIG: LazyLock<Option<PathBuf>> = LazyLock::new(|| dirs::config_dir());
static DATA: LazyLock<Option<PathBuf>> = LazyLock::new(|| dirs::data_dir());
static DATA_LOCAL: LazyLock<Option<PathBuf>> = LazyLock::new(|| dirs::data_local_dir());
static DOCUMENT: LazyLock<Option<PathBuf>> = LazyLock::new(|| dirs::document_dir());
static HOME: LazyLock<Option<PathBuf>> = LazyLock::new(|| dirs::home_dir());
static PUBLIC: LazyLock<Option<PathBuf>> = LazyLock::new(|| dirs::public_dir());
#[cfg(windows)]
static DATA_LOCAL_LOW: LazyLock<Option<PathBuf>> = LazyLock::new(|| {
known_folders::get_known_folder_path(known_folders::KnownFolder::LocalAppDataLow)
});
#[cfg(not(windows))]
static DATA_LOCAL_LOW: Option<PathBuf> = None;
#[cfg(windows)]
static SAVED_GAMES: LazyLock<Option<PathBuf>> = LazyLock::new(|| {
known_folders::get_known_folder_path(known_folders::KnownFolder::SavedGames)
});
#[cfg(not(windows))]
static SAVED_GAMES: Option<PathBuf> = None;
match self {
Self::Config => CONFIG.clone(),
Self::Data => DATA.clone(),
Self::DataLocal => DATA_LOCAL.clone(),
Self::DataLocalLow => DATA_LOCAL_LOW.clone(),
Self::Document => DOCUMENT.clone(),
Self::Home => HOME.clone(),
Self::Public => PUBLIC.clone(),
Self::SavedGames => SAVED_GAMES.clone(),
}
}
}

View File

@ -1,51 +0,0 @@
use std::sync::LazyLock;
pub const ALL: &[&str] = &[
ROOT,
GAME,
BASE,
HOME,
STORE_USER_ID,
OS_USER_NAME,
WIN_APP_DATA,
WIN_LOCAL_APP_DATA,
WIN_DOCUMENTS,
WIN_PUBLIC,
WIN_PROGRAM_DATA,
WIN_DIR,
XDG_DATA,
XDG_CONFIG,
];
/// These are paths where `<placeholder>/*/` is suspicious.
pub const AVOID_WILDCARDS: &[&str] = &[
ROOT,
HOME,
WIN_APP_DATA,
WIN_LOCAL_APP_DATA,
WIN_DOCUMENTS,
WIN_PUBLIC,
WIN_PROGRAM_DATA,
WIN_DIR,
XDG_DATA,
XDG_CONFIG,
];
pub const ROOT: &str = "<root>"; // a directory where games are installed (configured in backup tool)
pub const GAME: &str = "<game>"; // an installDir (if defined) or the game's canonical name in the manifest
pub const BASE: &str = "<base>"; // shorthand for <root>/<game> (unless overridden by store-specific rules)
pub const HOME: &str = "<home>"; // current user's home directory in the OS (~)
pub const STORE_USER_ID: &str = "<storeUserId>"; // a store-specific id from the manifest, corresponding to the root's store type
pub const OS_USER_NAME: &str = "<osUserName>"; // current user's ID in the game store
pub const WIN_APP_DATA: &str = "<winAppData>"; // current user's name in the OS
pub const WIN_LOCAL_APP_DATA: &str = "<winLocalAppData>"; // %APPDATA% on Windows
pub const WIN_LOCAL_APP_DATA_LOW: &str = "<winLocalAppDataLow>"; // %LOCALAPPDATA% on Windows
pub const WIN_DOCUMENTS: &str = "<winDocuments>"; // <home>/AppData/LocalLow on Windows
pub const WIN_PUBLIC: &str = "<winPublic>"; // <home>/Documents (f.k.a. <home>/My Documents) or a localized equivalent on Windows
pub const WIN_PROGRAM_DATA: &str = "<winProgramData>"; // %PUBLIC% on Windows
pub const WIN_DIR: &str = "<winDir>"; // %PROGRAMDATA% on Windows
pub const XDG_DATA: &str = "<xdgData>"; // %WINDIR% on Windows
pub const XDG_CONFIG: &str = "<xdgConfig>"; // $XDG_DATA_HOME on Linux
pub const SKIP: &str = "<skip>"; // $XDG_CONFIG_HOME on Linux
pub static OS_USERNAME: LazyLock<String> = LazyLock::new(|| whoami::username());

View File

@ -1,262 +0,0 @@
use std::{
fs::{self, create_dir_all, File},
io::{self, ErrorKind, Read, Write},
path::{Path, PathBuf},
thread::sleep,
time::Duration,
};
use super::{
backup_manager::BackupHandler, conditions::Condition, metadata::GameFile, placeholder::*,
};
use log::{debug, warn};
use rustix::path::Arg;
use tempfile::tempfile;
use crate::{
database::db::GameVersion, error::backup_error::BackupError, process::process_manager::Platform,
};
use super::{backup_manager::BackupManager, metadata::CloudSaveMetadata, normalise::normalize};
pub fn resolve(meta: &mut CloudSaveMetadata) -> File {
let f = File::create_new("save").unwrap();
let compressor = zstd::Encoder::new(f, 22).unwrap();
let mut tarball = tar::Builder::new(compressor);
let manager = BackupManager::new();
for file in meta.files.iter_mut() {
let id = uuid::Uuid::new_v4().to_string();
let os = match file
.conditions
.iter()
.find_map(|p| match p {
super::conditions::Condition::Os(os) => Some(os),
_ => None,
})
.cloned()
{
Some(os) => os,
None => {
warn!(
"File {:?} could not be backed up because it did not provide an OS",
&file
);
continue;
}
};
let handler = match manager.sources.get(&(manager.current_platform, os)) {
Some(h) => *h,
None => continue,
};
let t_path = PathBuf::from(normalize(&file.path, os));
println!("{:?}", &t_path);
let path = parse_path(t_path, handler, &meta.game_version).unwrap();
let f = std::fs::metadata(&path).unwrap(); // TODO: Fix unwrap here
if f.is_dir() {
tarball.append_dir_all(&id, path).unwrap();
} else if f.is_file() {
tarball
.append_file(&id, &mut File::open(path).unwrap())
.unwrap();
}
file.id = Some(id);
}
let binding = serde_json::to_string(meta).unwrap();
let serialized = binding.as_bytes();
let mut file = tempfile().unwrap();
file.write(serialized).unwrap();
tarball.append_file("metadata", &mut file).unwrap();
tarball.into_inner().unwrap().finish().unwrap()
}
pub fn extract(file: PathBuf) -> Result<(), BackupError> {
let tmpdir = tempfile::tempdir().unwrap();
// Reopen the file for reading
let file = File::open(file).unwrap();
let decompressor = zstd::Decoder::new(file).unwrap();
let mut f = tar::Archive::new(decompressor);
f.unpack(tmpdir.path()).unwrap();
let path = tmpdir.path();
let mut manifest = File::open(path.join("metadata")).unwrap();
let mut manifest_slice = Vec::new();
manifest.read_to_end(&mut manifest_slice).unwrap();
let manifest: CloudSaveMetadata = serde_json::from_slice(&manifest_slice).unwrap();
for file in manifest.files {
let current_path = path.join(file.id.as_ref().unwrap());
let manager = BackupManager::new();
let os = match file
.conditions
.iter()
.find_map(|p| match p {
super::conditions::Condition::Os(os) => Some(os),
_ => None,
})
.cloned()
{
Some(os) => os,
None => {
warn!(
"File {:?} could not be replaced up because it did not provide an OS",
&file
);
continue;
}
};
let handler = match manager.sources.get(&(manager.current_platform, os)) {
Some(h) => *h,
None => continue,
};
let new_path = parse_path(file.path.into(), handler, &manifest.game_version)?;
create_dir_all(&new_path.parent().unwrap()).unwrap();
println!(
"Current path {:?} copying to {:?}",
&current_path, &new_path
);
copy_item(current_path, new_path).unwrap();
}
Ok(())
}
pub fn copy_item<P: AsRef<Path>>(src: P, dest: P) -> io::Result<()> {
let src_path = src.as_ref();
let dest_path = dest.as_ref();
let metadata = fs::metadata(&src_path)?;
if metadata.is_file() {
// Ensure the parent directory of the destination exists for a file copy
if let Some(parent) = dest_path.parent() {
fs::create_dir_all(parent)?;
}
fs::copy(&src_path, &dest_path)?;
} else if metadata.is_dir() {
// For directories, we call the recursive helper function.
// The destination for the recursive copy is the `dest_path` itself.
copy_dir_recursive(&src_path, &dest_path)?;
} else {
// Handle other file types like symlinks if necessary,
// for now, return an error or skip.
return Err(io::Error::new(
io::ErrorKind::Other,
format!("Source {:?} is neither a file nor a directory", src_path),
));
}
Ok(())
}
fn copy_dir_recursive(src: &Path, dest: &Path) -> io::Result<()> {
fs::create_dir_all(&dest)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
let entry_path = entry.path();
let entry_file_name = match entry_path.file_name() {
Some(name) => name,
None => continue, // Skip if somehow there's no file name
};
let dest_entry_path = dest.join(entry_file_name);
let metadata = entry.metadata()?;
if metadata.is_file() {
debug!(
"Writing file {} to {}",
entry_path.display(),
dest_entry_path.display()
);
fs::copy(&entry_path, &dest_entry_path)?;
} else if metadata.is_dir() {
copy_dir_recursive(&entry_path, &dest_entry_path)?;
}
// Ignore other types like symlinks for this basic implementation
}
Ok(())
}
pub fn parse_path(
path: PathBuf,
backup_handler: &dyn BackupHandler,
game: &GameVersion,
) -> Result<PathBuf, BackupError> {
println!("Parsing: {:?}", &path);
let mut s = PathBuf::new();
for component in path.components() {
match component.as_str().unwrap() {
ROOT => s.push(backup_handler.root_translate(&path, game)?),
GAME => s.push(backup_handler.game_translate(&path, game)?),
BASE => s.push(backup_handler.base_translate(&path, game)?),
HOME => s.push(backup_handler.home_translate(&path, game)?),
STORE_USER_ID => s.push(backup_handler.store_user_id_translate(&path, game)?),
OS_USER_NAME => s.push(backup_handler.os_user_name_translate(&path, game)?),
WIN_APP_DATA => s.push(backup_handler.win_app_data_translate(&path, game)?),
WIN_LOCAL_APP_DATA => s.push(backup_handler.win_local_app_data_translate(&path, game)?),
WIN_LOCAL_APP_DATA_LOW => {
s.push(backup_handler.win_local_app_data_low_translate(&path, game)?)
}
WIN_DOCUMENTS => s.push(backup_handler.win_documents_translate(&path, game)?),
WIN_PUBLIC => s.push(backup_handler.win_public_translate(&path, game)?),
WIN_PROGRAM_DATA => s.push(backup_handler.win_program_data_translate(&path, game)?),
WIN_DIR => s.push(backup_handler.win_dir_translate(&path, game)?),
XDG_DATA => s.push(backup_handler.xdg_data_translate(&path, game)?),
XDG_CONFIG => s.push(backup_handler.xdg_config_translate(&path, game)?),
SKIP => s.push(backup_handler.skip_translate(&path, game)?),
_ => s.push(PathBuf::from(component.as_os_str())),
}
}
println!("Final line: {:?}", &s);
Ok(s)
}
pub fn test() {
let mut meta = CloudSaveMetadata {
files: vec![
GameFile {
path: String::from("<home>/favicon.png"),
id: None,
data_type: super::metadata::DataType::File,
tags: Vec::new(),
conditions: vec![Condition::Os(Platform::Linux)],
},
GameFile {
path: String::from("<home>/Documents/Pixel Art"),
id: None,
data_type: super::metadata::DataType::File,
tags: Vec::new(),
conditions: vec![Condition::Os(Platform::Linux)],
},
],
game_version: GameVersion {
game_id: String::new(),
version_name: String::new(),
platform: Platform::Linux,
launch_command: String::new(),
launch_args: Vec::new(),
launch_command_template: String::new(),
setup_command: String::new(),
setup_args: Vec::new(),
setup_command_template: String::new(),
only_setup: true,
version_index: 0,
delta: false,
umu_id_override: None,
},
save_id: String::from("aaaaaaa"),
};
//resolve(&mut meta);
extract("save".into()).unwrap();
}

View File

@ -1,223 +0,0 @@
use std::{
fs::{self, create_dir_all},
mem::ManuallyDrop,
ops::{Deref, DerefMut},
path::PathBuf,
sync::{Arc, LazyLock, RwLockReadGuard, RwLockWriteGuard},
};
use chrono::Utc;
use log::{debug, error, info, warn};
use rustbreak::{DeSerError, DeSerializer, PathDatabase, RustbreakError};
use serde::{Serialize, de::DeserializeOwned};
use url::Url;
use crate::DB;
use super::models::data::Database;
#[cfg(not(debug_assertions))]
static DATA_ROOT_PREFIX: &'static str = "drop";
#[cfg(debug_assertions)]
static DATA_ROOT_PREFIX: &str = "drop-debug";
pub static DATA_ROOT_DIR: LazyLock<Arc<PathBuf>> = LazyLock::new(|| {
Arc::new(
dirs::data_dir()
.expect("Failed to get data dir")
.join(DATA_ROOT_PREFIX),
)
});
// Custom JSON serializer to support everything we need
#[derive(Debug, Default, Clone)]
pub struct DropDatabaseSerializer;
impl<T: native_model::Model + Serialize + DeserializeOwned> DeSerializer<T>
for DropDatabaseSerializer
{
fn serialize(&self, val: &T) -> rustbreak::error::DeSerResult<Vec<u8>> {
native_model::encode(val)
.map_err(|e| DeSerError::Internal(e.to_string()))
}
fn deserialize<R: std::io::Read>(&self, mut s: R) -> rustbreak::error::DeSerResult<T> {
let mut buf = Vec::new();
s.read_to_end(&mut buf)
.map_err(|e| rustbreak::error::DeSerError::Other(e.into()))?;
let (val, _version) = native_model::decode(buf)
.map_err(|e| DeSerError::Internal(e.to_string()))?;
Ok(val)
}
}
pub type DatabaseInterface =
rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer>;
pub trait DatabaseImpls {
fn set_up_database() -> DatabaseInterface;
fn database_is_set_up(&self) -> bool;
fn fetch_base_url(&self) -> Url;
}
impl DatabaseImpls for DatabaseInterface {
fn set_up_database() -> DatabaseInterface {
let db_path = DATA_ROOT_DIR.join("drop.db");
let games_base_dir = DATA_ROOT_DIR.join("games");
let logs_root_dir = DATA_ROOT_DIR.join("logs");
let cache_dir = DATA_ROOT_DIR.join("cache");
let pfx_dir = DATA_ROOT_DIR.join("pfx");
debug!("creating data directory at {DATA_ROOT_DIR:?}");
create_dir_all(DATA_ROOT_DIR.as_path()).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
DATA_ROOT_DIR.display(),
e
)
});
create_dir_all(&games_base_dir).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
games_base_dir.display(),
e
)
});
create_dir_all(&logs_root_dir).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
logs_root_dir.display(),
e
)
});
create_dir_all(&cache_dir).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
cache_dir.display(),
e
)
});
create_dir_all(&pfx_dir).unwrap_or_else(|e| {
panic!(
"Failed to create directory {} with error {}",
pfx_dir.display(),
e
)
});
let exists = fs::exists(db_path.clone()).unwrap_or_else(|e| {
panic!(
"Failed to find if {} exists with error {}",
db_path.display(),
e
)
});
if exists {
match PathDatabase::load_from_path(db_path.clone()) {
Ok(db) => db,
Err(e) => handle_invalid_database(e, db_path, games_base_dir, cache_dir),
}
} else {
let default = Database::new(games_base_dir, None, cache_dir);
debug!("Creating database at path {}", db_path.display());
PathDatabase::create_at_path(db_path, default).expect("Database could not be created")
}
}
fn database_is_set_up(&self) -> bool {
!borrow_db_checked().base_url.is_empty()
}
fn fetch_base_url(&self) -> Url {
let handle = borrow_db_checked();
Url::parse(&handle.base_url)
.unwrap_or_else(|_| panic!("Failed to parse base url {}", handle.base_url))
}
}
// TODO: Make the error relelvant rather than just assume that it's a Deserialize error
fn handle_invalid_database(
_e: RustbreakError,
db_path: PathBuf,
games_base_dir: PathBuf,
cache_dir: PathBuf,
) -> rustbreak::Database<Database, rustbreak::backend::PathBackend, DropDatabaseSerializer> {
warn!("{_e}");
let new_path = {
let time = Utc::now().timestamp();
let mut base = db_path.clone();
base.set_file_name(format!("drop.db.backup-{time}"));
base
};
info!("old database stored at: {}", new_path.to_string_lossy());
fs::rename(&db_path, &new_path).unwrap_or_else(|e| {
panic!(
"Could not rename database {} to {} with error {}",
db_path.display(),
new_path.display(),
e
)
});
let db = Database::new(games_base_dir, Some(new_path), cache_dir);
PathDatabase::create_at_path(db_path, db).expect("Database could not be created")
}
// To automatically save the database upon drop
pub struct DBRead<'a>(RwLockReadGuard<'a, Database>);
pub struct DBWrite<'a>(ManuallyDrop<RwLockWriteGuard<'a, Database>>);
impl<'a> Deref for DBWrite<'a> {
type Target = Database;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'a> DerefMut for DBWrite<'a> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<'a> Deref for DBRead<'a> {
type Target = Database;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Drop for DBWrite<'_> {
fn drop(&mut self) {
unsafe {
ManuallyDrop::drop(&mut self.0);
}
match DB.save() {
Ok(()) => {}
Err(e) => {
error!("database failed to save with error {e}");
panic!("database failed to save with error {e}")
}
}
}
}
pub fn borrow_db_checked<'a>() -> DBRead<'a> {
match DB.borrow_data() {
Ok(data) => DBRead(data),
Err(e) => {
error!("database borrow failed with error {e}");
panic!("database borrow failed with error {e}");
}
}
}
pub fn borrow_db_mut_checked<'a>() -> DBWrite<'a> {
match DB.borrow_data_mut() {
Ok(data) => DBWrite(ManuallyDrop::new(data)),
Err(e) => {
error!("database borrow mut failed with error {e}");
panic!("database borrow mut failed with error {e}");
}
}
}

View File

@ -1,21 +0,0 @@
use serde::Serialize;
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SystemData {
client_id: String,
base_url: String,
data_dir: String,
log_level: String,
}
impl SystemData {
pub fn new(client_id: String, base_url: String, data_dir: String, log_level: String) -> Self {
Self {
client_id,
base_url,
data_dir,
log_level,
}
}
}

View File

@ -1,5 +0,0 @@
pub mod commands;
pub mod db;
pub mod debug;
pub mod models;
pub mod scan;

View File

@ -1,362 +0,0 @@
pub mod data {
use std::{hash::Hash, path::PathBuf};
use native_model::native_model;
use serde::{Deserialize, Serialize};
// NOTE: Within each version, you should NEVER use these types.
// Declare it using the actual version that it is from, i.e. v1::Settings rather than just Settings from here
pub type GameVersion = v1::GameVersion;
pub type Database = v3::Database;
pub type Settings = v1::Settings;
pub type DatabaseAuth = v1::DatabaseAuth;
pub type GameDownloadStatus = v2::GameDownloadStatus;
pub type ApplicationTransientStatus = v1::ApplicationTransientStatus;
/**
* Need to be universally accessible by the ID, and the version is just a couple sprinkles on top
*/
pub type DownloadableMetadata = v1::DownloadableMetadata;
pub type DownloadType = v1::DownloadType;
pub type DatabaseApplications = v2::DatabaseApplications;
// pub type DatabaseCompatInfo = v2::DatabaseCompatInfo;
use std::collections::HashMap;
impl PartialEq for DownloadableMetadata {
fn eq(&self, other: &Self) -> bool {
self.id == other.id && self.download_type == other.download_type
}
}
impl Hash for DownloadableMetadata {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.id.hash(state);
self.download_type.hash(state);
}
}
mod v1 {
use crate::process::process_manager::Platform;
use serde_with::serde_as;
use std::{collections::HashMap, path::PathBuf};
use super::{Deserialize, Serialize, native_model};
fn default_template() -> String {
"{}".to_owned()
}
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(rename_all = "camelCase")]
#[native_model(id = 2, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)]
pub struct GameVersion {
pub game_id: String,
pub version_name: String,
pub platform: Platform,
pub launch_command: String,
pub launch_args: Vec<String>,
#[serde(default = "default_template")]
pub launch_command_template: String,
pub setup_command: String,
pub setup_args: Vec<String>,
#[serde(default = "default_template")]
pub setup_command_template: String,
pub only_setup: bool,
pub version_index: usize,
pub delta: bool,
pub umu_id_override: Option<String>,
}
#[serde_as]
#[derive(Serialize, Clone, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
#[native_model(id = 3, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)]
pub struct DatabaseApplications {
pub install_dirs: Vec<PathBuf>,
// Guaranteed to exist if the game also exists in the app state map
pub game_statuses: HashMap<String, GameDownloadStatus>,
pub game_versions: HashMap<String, HashMap<String, GameVersion>>,
pub installed_game_version: HashMap<String, DownloadableMetadata>,
#[serde(skip)]
pub transient_statuses: HashMap<DownloadableMetadata, ApplicationTransientStatus>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(rename_all = "camelCase")]
#[native_model(id = 4, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)]
pub struct Settings {
pub autostart: bool,
pub max_download_threads: usize,
pub force_offline: bool, // ... other settings ...
}
impl Default for Settings {
fn default() -> Self {
Self {
autostart: false,
max_download_threads: 4,
force_offline: false,
}
}
}
// Strings are version names for a particular game
#[derive(Serialize, Clone, Deserialize)]
#[serde(tag = "type")]
#[native_model(id = 5, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)]
pub enum GameDownloadStatus {
Remote {},
SetupRequired {
version_name: String,
install_dir: String,
},
Installed {
version_name: String,
install_dir: String,
},
}
// Stuff that shouldn't be synced to disk
#[derive(Clone, Serialize, Deserialize, Debug)]
pub enum ApplicationTransientStatus {
Queued { version_name: String },
Downloading { version_name: String },
Uninstalling {},
Updating { version_name: String },
Validating { version_name: String },
Running {},
}
#[derive(serde::Serialize, Clone, Deserialize)]
#[native_model(id = 6, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)]
pub struct DatabaseAuth {
pub private: String,
pub cert: String,
pub client_id: String,
pub web_token: Option<String>,
}
#[native_model(id = 8, version = 1)]
#[derive(
Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, Clone, Copy,
)]
pub enum DownloadType {
Game,
Tool,
Dlc,
Mod,
}
#[native_model(id = 7, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)]
#[derive(Debug, Eq, PartialOrd, Ord, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct DownloadableMetadata {
pub id: String,
pub version: Option<String>,
pub download_type: DownloadType,
}
impl DownloadableMetadata {
pub fn new(id: String, version: Option<String>, download_type: DownloadType) -> Self {
Self {
id,
version,
download_type,
}
}
}
#[native_model(id = 1, version = 1)]
#[derive(Serialize, Deserialize, Clone, Default)]
pub struct Database {
#[serde(default)]
pub settings: Settings,
pub auth: Option<DatabaseAuth>,
pub base_url: String,
pub applications: DatabaseApplications,
pub prev_database: Option<PathBuf>,
pub cache_dir: PathBuf,
}
}
mod v2 {
use std::{collections::HashMap, path::PathBuf};
use serde_with::serde_as;
use super::{
Deserialize, Serialize, native_model, v1,
};
#[native_model(id = 1, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from = v1::Database)]
#[derive(Serialize, Deserialize, Clone, Default)]
pub struct Database {
#[serde(default)]
pub settings: v1::Settings,
pub auth: Option<v1::DatabaseAuth>,
pub base_url: String,
pub applications: v1::DatabaseApplications,
#[serde(skip)]
pub prev_database: Option<PathBuf>,
pub cache_dir: PathBuf,
pub compat_info: Option<DatabaseCompatInfo>,
}
#[native_model(id = 9, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)]
#[derive(Serialize, Deserialize, Clone, Default)]
pub struct DatabaseCompatInfo {
pub umu_installed: bool,
}
impl From<v1::Database> for Database {
fn from(value: v1::Database) -> Self {
Self {
settings: value.settings,
auth: value.auth,
base_url: value.base_url,
applications: value.applications,
prev_database: value.prev_database,
cache_dir: value.cache_dir,
compat_info: None,
}
}
}
// Strings are version names for a particular game
#[derive(Serialize, Clone, Deserialize, Debug)]
#[serde(tag = "type")]
#[native_model(id = 5, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from = v1::GameDownloadStatus)]
pub enum GameDownloadStatus {
Remote {},
SetupRequired {
version_name: String,
install_dir: String,
},
Installed {
version_name: String,
install_dir: String,
},
PartiallyInstalled {
version_name: String,
install_dir: String,
},
}
impl From<v1::GameDownloadStatus> for GameDownloadStatus {
fn from(value: v1::GameDownloadStatus) -> Self {
match value {
v1::GameDownloadStatus::Remote {} => Self::Remote {},
v1::GameDownloadStatus::SetupRequired {
version_name,
install_dir,
} => Self::SetupRequired {
version_name,
install_dir,
},
v1::GameDownloadStatus::Installed {
version_name,
install_dir,
} => Self::Installed {
version_name,
install_dir,
},
}
}
}
#[serde_as]
#[derive(Serialize, Clone, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
#[native_model(id = 3, version = 2, with = native_model::rmp_serde_1_3::RmpSerde, from=v1::DatabaseApplications)]
pub struct DatabaseApplications {
pub install_dirs: Vec<PathBuf>,
// Guaranteed to exist if the game also exists in the app state map
pub game_statuses: HashMap<String, GameDownloadStatus>,
pub game_versions: HashMap<String, HashMap<String, v1::GameVersion>>,
pub installed_game_version: HashMap<String, v1::DownloadableMetadata>,
#[serde(skip)]
pub transient_statuses: HashMap<v1::DownloadableMetadata, v1::ApplicationTransientStatus>,
}
impl From<v1::DatabaseApplications> for DatabaseApplications {
fn from(value: v1::DatabaseApplications) -> Self {
Self {
game_statuses: value
.game_statuses
.into_iter()
.map(|x| (x.0, x.1.into()))
.collect::<HashMap<String, GameDownloadStatus>>(),
install_dirs: value.install_dirs,
game_versions: value.game_versions,
installed_game_version: value.installed_game_version,
transient_statuses: value.transient_statuses,
}
}
}
}
mod v3 {
use std::path::PathBuf;
use super::{
Deserialize, Serialize,
native_model, v2, v1,
};
#[native_model(id = 1, version = 3, with = native_model::rmp_serde_1_3::RmpSerde, from = v2::Database)]
#[derive(Serialize, Deserialize, Clone, Default)]
pub struct Database {
#[serde(default)]
pub settings: v1::Settings,
pub auth: Option<v1::DatabaseAuth>,
pub base_url: String,
pub applications: v2::DatabaseApplications,
#[serde(skip)]
pub prev_database: Option<PathBuf>,
pub cache_dir: PathBuf,
pub compat_info: Option<v2::DatabaseCompatInfo>,
}
impl From<v2::Database> for Database {
fn from(value: v2::Database) -> Self {
Self {
settings: value.settings,
auth: value.auth,
base_url: value.base_url,
applications: value.applications.into(),
prev_database: value.prev_database,
cache_dir: value.cache_dir,
compat_info: None,
}
}
}
}
impl Database {
pub fn new<T: Into<PathBuf>>(
games_base_dir: T,
prev_database: Option<PathBuf>,
cache_dir: PathBuf,
) -> Self {
Self {
applications: DatabaseApplications {
install_dirs: vec![games_base_dir.into()],
game_statuses: HashMap::new(),
game_versions: HashMap::new(),
installed_game_version: HashMap::new(),
transient_statuses: HashMap::new(),
},
prev_database,
base_url: String::new(),
auth: None,
settings: Settings::default(),
cache_dir,
compat_info: None,
}
}
}
}

View File

@ -1,52 +0,0 @@
use std::fs;
use log::warn;
use crate::{
database::{
db::borrow_db_mut_checked,
models::data::{DownloadType, DownloadableMetadata},
},
games::{
downloads::drop_data::{DropData, DROP_DATA_PATH},
library::set_partially_installed_db,
},
};
pub fn scan_install_dirs() {
let mut db_lock = borrow_db_mut_checked();
for install_dir in db_lock.applications.install_dirs.clone() {
let Ok(files) = fs::read_dir(install_dir) else {
continue;
};
for game in files.into_iter().flatten() {
let drop_data_file = game.path().join(DROP_DATA_PATH);
if !drop_data_file.exists() {
continue;
}
let game_id = game.file_name().display().to_string();
let Ok(drop_data) = DropData::read(&game.path()) else {
warn!(
".dropdata exists for {}, but couldn't read it. is it corrupted?",
game.file_name().display()
);
continue;
};
if db_lock.applications.game_statuses.contains_key(&game_id) {
continue;
}
let metadata = DownloadableMetadata::new(
drop_data.game_id,
Some(drop_data.game_version),
DownloadType::Game,
);
set_partially_installed_db(
&mut db_lock,
&metadata,
drop_data.base_path.to_str().unwrap().to_string(),
None,
);
}
}
}

View File

@ -1,7 +1,5 @@
use std::sync::Mutex;
use crate::{AppState, database::models::data::DownloadableMetadata, lock};
#[tauri::command]
pub fn pause_downloads(state: tauri::State<'_, Mutex<AppState>>) {
lock!(state).download_manager.pause_downloads();

View File

@ -1,393 +0,0 @@
use std::{
collections::HashMap,
sync::{
Arc, Mutex,
mpsc::{Receiver, Sender, channel},
},
thread::{JoinHandle, spawn},
};
use log::{debug, error, info, warn};
use tauri::{AppHandle, Emitter};
use crate::{
app_emit, database::models::data::DownloadableMetadata, download_manager::download_manager_frontend::DownloadStatus, error::application_download_error::ApplicationDownloadError, games::library::{QueueUpdateEvent, QueueUpdateEventQueueData, StatsUpdateEvent}, lock, send
};
use super::{
download_manager_frontend::{DownloadManager, DownloadManagerSignal, DownloadManagerStatus},
downloadable::Downloadable,
util::{
download_thread_control_flag::{DownloadThreadControl, DownloadThreadControlFlag},
progress_object::ProgressObject,
queue::Queue,
},
};
pub type DownloadAgent = Arc<Box<dyn Downloadable + Send + Sync>>;
pub type CurrentProgressObject = Arc<Mutex<Option<Arc<ProgressObject>>>>;
/*
Welcome to the download manager, the most overengineered, glorious piece of bullshit.
The download manager takes a queue of ids and their associated
DownloadAgents, and then, one-by-one, executes them. It provides an interface
to interact with the currently downloading agent, and manage the queue.
When the DownloadManager is initialised, it is designed to provide a reference
which can be used to provide some instructions (the DownloadManagerInterface),
but other than that, it runs without any sort of interruptions.
It does this by opening up two data structures. Primarily is the command_receiver,
and mpsc (multi-channel-single-producer) which allows commands to be sent from
the Interface, and queued up for the Manager to process.
These have been mapped in the DownloadManagerSignal docs.
The other way to interact with the DownloadManager is via the donwload_queue,
which is just a collection of ids which may be rearranged to suit
whichever download queue order is required.
+----------------------------------------------------------------------------+
| DO NOT ATTEMPT TO ADD OR REMOVE FROM THE QUEUE WITHOUT USING SIGNALS!! |
| THIS WILL CAUSE A DESYNC BETWEEN THE DOWNLOAD AGENT REGISTRY AND THE QUEUE |
| WHICH HAS NOT BEEN ACCOUNTED FOR |
+----------------------------------------------------------------------------+
This download queue does not actually own any of the DownloadAgents. It is
simply an id-based reference system. The actual Agents are stored in the
download_agent_registry HashMap, as ordering is no issue here. This is why
appending or removing from the download_queue must be done via signals.
Behold, my madness - quexeky
*/
pub struct DownloadManagerBuilder {
download_agent_registry: HashMap<DownloadableMetadata, DownloadAgent>,
download_queue: Queue,
command_receiver: Receiver<DownloadManagerSignal>,
sender: Sender<DownloadManagerSignal>,
progress: CurrentProgressObject,
status: Arc<Mutex<DownloadManagerStatus>>,
app_handle: AppHandle,
current_download_thread: Mutex<Option<JoinHandle<()>>>,
active_control_flag: Option<DownloadThreadControl>,
}
impl DownloadManagerBuilder {
pub fn build(app_handle: AppHandle) -> DownloadManager {
let queue = Queue::new();
let (command_sender, command_receiver) = channel();
let active_progress = Arc::new(Mutex::new(None));
let status = Arc::new(Mutex::new(DownloadManagerStatus::Empty));
let manager = Self {
download_agent_registry: HashMap::new(),
download_queue: queue.clone(),
command_receiver,
status: status.clone(),
sender: command_sender.clone(),
progress: active_progress.clone(),
app_handle,
current_download_thread: Mutex::new(None),
active_control_flag: None,
};
let terminator = spawn(|| manager.manage_queue());
DownloadManager::new(terminator, queue, active_progress, command_sender)
}
fn set_status(&self, status: DownloadManagerStatus) {
*lock!(self.status) = status;
}
fn remove_and_cleanup_front_download(&mut self, meta: &DownloadableMetadata) -> DownloadAgent {
self.download_queue.pop_front();
let download_agent = self.download_agent_registry.remove(meta).unwrap();
self.cleanup_current_download();
download_agent
}
// CAREFUL WITH THIS FUNCTION
// Make sure the download thread is terminated
fn cleanup_current_download(&mut self) {
self.active_control_flag = None;
*lock!(self.progress) = None;
let mut download_thread_lock = lock!(self.current_download_thread);
if let Some(unfinished_thread) = download_thread_lock.take()
&& !unfinished_thread.is_finished()
{
unfinished_thread.join().unwrap();
}
drop(download_thread_lock);
}
fn stop_and_wait_current_download(&self) -> bool {
self.set_status(DownloadManagerStatus::Paused);
if let Some(current_flag) = &self.active_control_flag {
current_flag.set(DownloadThreadControlFlag::Stop);
}
let mut download_thread_lock = lock!(self.current_download_thread);
if let Some(current_download_thread) = download_thread_lock.take() {
return current_download_thread.join().is_ok();
};
true
}
fn manage_queue(mut self) -> Result<(), ()> {
loop {
let signal = match self.command_receiver.recv() {
Ok(signal) => signal,
Err(_) => return Err(()),
};
match signal {
DownloadManagerSignal::Go => {
self.manage_go_signal();
}
DownloadManagerSignal::Stop => {
self.manage_stop_signal();
}
DownloadManagerSignal::Completed(meta) => {
self.manage_completed_signal(meta);
}
DownloadManagerSignal::Queue(download_agent) => {
self.manage_queue_signal(download_agent);
}
DownloadManagerSignal::Error(e) => {
self.manage_error_signal(e);
}
DownloadManagerSignal::UpdateUIQueue => {
self.push_ui_queue_update();
}
DownloadManagerSignal::UpdateUIStats(kbs, time) => {
self.push_ui_stats_update(kbs, time);
}
DownloadManagerSignal::Finish => {
self.stop_and_wait_current_download();
return Ok(());
}
DownloadManagerSignal::Cancel(meta) => {
self.manage_cancel_signal(&meta);
}
}
}
}
fn manage_queue_signal(&mut self, download_agent: DownloadAgent) {
debug!("got signal Queue");
let meta = download_agent.metadata();
debug!("queue metadata: {meta:?}");
if self.download_queue.exists(meta.clone()) {
warn!("download with same ID already exists");
return;
}
download_agent.on_queued(&self.app_handle);
self.download_queue.append(meta.clone());
self.download_agent_registry.insert(meta, download_agent);
send!(self.sender, DownloadManagerSignal::UpdateUIQueue);
}
fn manage_go_signal(&mut self) {
debug!("got signal Go");
if self.download_agent_registry.is_empty() {
debug!(
"Download agent registry: {:?}",
self.download_agent_registry.len()
);
return;
}
debug!("current download queue: {:?}", self.download_queue.read());
let agent_data = if let Some(agent_data) = self.download_queue.read().front() {
agent_data.clone()
} else {
return;
};
let download_agent = self
.download_agent_registry
.get(&agent_data)
.unwrap()
.clone();
let status = download_agent.status();
// This download is already going
if status != DownloadStatus::Queued {
return;
}
// Ensure all others are marked as queued
for agent in self.download_agent_registry.values() {
if agent.metadata() != agent_data && agent.status() != DownloadStatus::Queued {
agent.on_queued(&self.app_handle);
}
}
info!("starting download for {agent_data:?}");
self.active_control_flag = Some(download_agent.control_flag());
let sender = self.sender.clone();
let mut download_thread_lock = lock!(self.current_download_thread);
let app_handle = self.app_handle.clone();
*download_thread_lock = Some(spawn(move || {
loop {
let download_result = match download_agent.download(&app_handle) {
// Ok(true) is for completed and exited properly
Ok(v) => v,
Err(e) => {
error!("download {:?} has error {}", download_agent.metadata(), &e);
download_agent.on_error(&app_handle, &e);
send!(sender, DownloadManagerSignal::Error(e));
return;
}
};
// If the download gets canceled
// immediately return, on_cancelled gets called for us earlier
if !download_result {
return;
}
if download_agent.control_flag().get() == DownloadThreadControlFlag::Stop {
return;
}
let validate_result = match download_agent.validate(&app_handle) {
Ok(v) => v,
Err(e) => {
error!(
"download {:?} has validation error {}",
download_agent.metadata(),
&e
);
download_agent.on_error(&app_handle, &e);
send!(sender, DownloadManagerSignal::Error(e));
return;
}
};
if download_agent.control_flag().get() == DownloadThreadControlFlag::Stop {
return;
}
if validate_result {
download_agent.on_complete(&app_handle);
send!(sender, DownloadManagerSignal::Completed(download_agent.metadata()));
send!(sender, DownloadManagerSignal::UpdateUIQueue);
return;
}
}
}));
self.set_status(DownloadManagerStatus::Downloading);
let active_control_flag = self.active_control_flag.clone().unwrap();
active_control_flag.set(DownloadThreadControlFlag::Go);
}
fn manage_stop_signal(&mut self) {
debug!("got signal Stop");
if let Some(active_control_flag) = self.active_control_flag.clone() {
self.set_status(DownloadManagerStatus::Paused);
active_control_flag.set(DownloadThreadControlFlag::Stop);
}
}
fn manage_completed_signal(&mut self, meta: DownloadableMetadata) {
debug!("got signal Completed");
if let Some(interface) = self.download_queue.read().front()
&& interface == &meta
{
self.remove_and_cleanup_front_download(&meta);
}
self.push_ui_queue_update();
send!(self.sender, DownloadManagerSignal::Go);
}
fn manage_error_signal(&mut self, error: ApplicationDownloadError) {
debug!("got signal Error");
if let Some(metadata) = self.download_queue.read().front()
&& let Some(current_agent) = self.download_agent_registry.get(metadata)
{
current_agent.on_error(&self.app_handle, &error);
self.stop_and_wait_current_download();
self.remove_and_cleanup_front_download(metadata);
}
self.push_ui_queue_update();
self.set_status(DownloadManagerStatus::Error);
}
fn manage_cancel_signal(&mut self, meta: &DownloadableMetadata) {
debug!("got signal Cancel");
// If the current download is the one we're tryna cancel
if let Some(current_metadata) = self.download_queue.read().front()
&& current_metadata == meta
&& let Some(current_download) = self.download_agent_registry.get(current_metadata)
{
self.set_status(DownloadManagerStatus::Paused);
current_download.on_cancelled(&self.app_handle);
self.stop_and_wait_current_download();
self.download_queue.pop_front();
self.cleanup_current_download();
self.download_agent_registry.remove(meta);
debug!("current download queue: {:?}", self.download_queue.read());
}
// else just cancel it
else if let Some(download_agent) = self.download_agent_registry.get(meta) {
let index = self.download_queue.get_by_meta(meta);
if let Some(index) = index {
download_agent.on_cancelled(&self.app_handle);
let _ = self.download_queue.edit().remove(index);
let removed = self.download_agent_registry.remove(meta);
debug!(
"removed {:?} from queue {:?}",
removed.map(|x| x.metadata()),
self.download_queue.read()
);
}
}
self.sender.send(DownloadManagerSignal::Go).unwrap();
self.push_ui_queue_update();
}
fn push_ui_stats_update(&self, kbs: usize, time: usize) {
let event_data = StatsUpdateEvent { speed: kbs, time };
app_emit!(self.app_handle, "update_stats", event_data);
}
fn push_ui_queue_update(&self) {
let queue = &self.download_queue.read();
let queue_objs = queue
.iter()
.map(|key| {
let val = self.download_agent_registry.get(key).unwrap();
QueueUpdateEventQueueData {
meta: DownloadableMetadata::clone(key),
status: val.status(),
progress: val.progress().get_progress(),
current: val.progress().sum(),
max: val.progress().get_max(),
}
})
.collect();
let event_data = QueueUpdateEvent { queue: queue_objs };
app_emit!(self.app_handle, "update_queue", event_data);
}
}

View File

@ -1,183 +0,0 @@
use std::{
any::Any,
collections::VecDeque,
fmt::Debug,
sync::{
Mutex, MutexGuard,
mpsc::{SendError, Sender},
},
thread::JoinHandle,
};
use log::{debug, info};
use serde::Serialize;
use crate::{
database::models::data::DownloadableMetadata,
error::application_download_error::ApplicationDownloadError, lock, send,
};
use super::{
download_manager_builder::{CurrentProgressObject, DownloadAgent},
util::queue::Queue,
};
pub enum DownloadManagerSignal {
/// Resumes (or starts) the `DownloadManager`
Go,
/// Pauses the `DownloadManager`
Stop,
/// Called when a `DownloadAgent` has fully completed a download.
Completed(DownloadableMetadata),
/// Generates and appends a `DownloadAgent`
/// to the registry and queue
Queue(DownloadAgent),
/// Tells the Manager to stop the current
/// download, sync everything to disk, and
/// then exit
Finish,
/// Stops, removes, and tells a download to cleanup
Cancel(DownloadableMetadata),
/// Any error which occurs in the agent
Error(ApplicationDownloadError),
/// Pushes UI update
UpdateUIQueue,
UpdateUIStats(usize, usize), //kb/s and seconds
}
#[derive(Debug)]
pub enum DownloadManagerStatus {
Downloading,
Paused,
Empty,
Error,
}
impl Serialize for DownloadManagerStatus {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&format!["{self:?}"])
}
}
#[derive(Serialize, Clone, Debug, PartialEq)]
pub enum DownloadStatus {
Queued,
Downloading,
Validating,
Error,
}
/// Accessible front-end for the `DownloadManager`
///
/// The system works entirely through signals, both internally and externally,
/// all of which are accessible through the `DownloadManagerSignal` type, but
/// should not be used directly. Rather, signals are abstracted through this
/// interface.
///
/// The actual download queue may be accessed through the .`edit()` function,
/// which provides raw access to the underlying queue.
/// THIS EDITING IS BLOCKING!!!
pub struct DownloadManager {
terminator: Mutex<Option<JoinHandle<Result<(), ()>>>>,
download_queue: Queue,
progress: CurrentProgressObject,
command_sender: Sender<DownloadManagerSignal>,
}
#[allow(dead_code)]
impl DownloadManager {
pub fn new(
terminator: JoinHandle<Result<(), ()>>,
download_queue: Queue,
progress: CurrentProgressObject,
command_sender: Sender<DownloadManagerSignal>,
) -> Self {
Self {
terminator: Mutex::new(Some(terminator)),
download_queue,
progress,
command_sender,
}
}
pub fn queue_download(
&self,
download: DownloadAgent,
) -> Result<(), SendError<DownloadManagerSignal>> {
info!("creating download with meta {:?}", download.metadata());
self.command_sender
.send(DownloadManagerSignal::Queue(download))?;
self.command_sender.send(DownloadManagerSignal::Go)
}
pub fn edit(&self) -> MutexGuard<'_, VecDeque<DownloadableMetadata>> {
self.download_queue.edit()
}
pub fn read_queue(&self) -> VecDeque<DownloadableMetadata> {
self.download_queue.read()
}
pub fn get_current_download_progress(&self) -> Option<f64> {
let progress_object = (*lock!(self.progress)).clone()?;
Some(progress_object.get_progress())
}
pub fn rearrange_string(&self, meta: &DownloadableMetadata, new_index: usize) {
let mut queue = self.edit();
let current_index = get_index_from_id(&mut queue, meta).expect("Failed to get meta index from id");
let to_move = queue.remove(current_index).expect("Failed to remove meta at index from queue");
queue.insert(new_index, to_move);
send!(self.command_sender, DownloadManagerSignal::UpdateUIQueue);
}
pub fn cancel(&self, meta: DownloadableMetadata) {
send!(self.command_sender, DownloadManagerSignal::Cancel(meta));
}
pub fn rearrange(&self, current_index: usize, new_index: usize) {
if current_index == new_index {
return;
}
let needs_pause = current_index == 0 || new_index == 0;
if needs_pause {
send!(self.command_sender, DownloadManagerSignal::Stop);
}
debug!("moving download at index {current_index} to index {new_index}");
let mut queue = self.edit();
let to_move = queue.remove(current_index).expect("Failed to get");
queue.insert(new_index, to_move);
drop(queue);
if needs_pause {
send!(self.command_sender, DownloadManagerSignal::Go);
}
send!(self.command_sender, DownloadManagerSignal::UpdateUIQueue);
send!(self.command_sender, DownloadManagerSignal::Go);
}
pub fn pause_downloads(&self) {
send!(self.command_sender, DownloadManagerSignal::Stop);
}
pub fn resume_downloads(&self) {
send!(self.command_sender, DownloadManagerSignal::Go);
}
pub fn ensure_terminated(&self) -> Result<Result<(), ()>, Box<dyn Any + Send>> {
send!(self.command_sender, DownloadManagerSignal::Finish);
let terminator = lock!(self.terminator).take();
terminator.unwrap().join()
}
pub fn get_sender(&self) -> Sender<DownloadManagerSignal> {
self.command_sender.clone()
}
}
/// Takes in the locked value from .`edit()` and attempts to
/// get the index of whatever id is passed in
fn get_index_from_id(
queue: &mut MutexGuard<'_, VecDeque<DownloadableMetadata>>,
meta: &DownloadableMetadata,
) -> Option<usize> {
queue
.iter()
.position(|download_agent| download_agent == meta)
}

View File

@ -1,33 +0,0 @@
use std::sync::Arc;
use tauri::AppHandle;
use crate::{
database::models::data::DownloadableMetadata,
error::application_download_error::ApplicationDownloadError,
};
use super::{
download_manager_frontend::DownloadStatus,
util::{download_thread_control_flag::DownloadThreadControl, progress_object::ProgressObject},
};
/**
* Downloadables are responsible for managing their specific object's download state
* e.g, the GameDownloadAgent is responsible for pushing game updates
*
* But the download manager manages the queue state
*/
pub trait Downloadable: Send + Sync {
fn download(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError>;
fn validate(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError>;
fn progress(&self) -> Arc<ProgressObject>;
fn control_flag(&self) -> DownloadThreadControl;
fn status(&self) -> DownloadStatus;
fn metadata(&self) -> DownloadableMetadata;
fn on_queued(&self, app_handle: &AppHandle);
fn on_error(&self, app_handle: &AppHandle, error: &ApplicationDownloadError);
fn on_complete(&self, app_handle: &AppHandle);
fn on_cancelled(&self, app_handle: &AppHandle);
}

View File

@ -1,5 +0,0 @@
pub mod commands;
pub mod download_manager_builder;
pub mod download_manager_frontend;
pub mod downloadable;
pub mod util;

View File

@ -1,46 +0,0 @@
use std::sync::{
atomic::{AtomicBool, Ordering},
Arc,
};
#[derive(PartialEq, Eq, PartialOrd, Ord)]
pub enum DownloadThreadControlFlag {
Stop,
Go,
}
/// Go => true
/// Stop => false
impl From<DownloadThreadControlFlag> for bool {
fn from(value: DownloadThreadControlFlag) -> Self {
match value {
DownloadThreadControlFlag::Go => true,
DownloadThreadControlFlag::Stop => false,
}
}
}
/// true => Go
/// false => Stop
impl From<bool> for DownloadThreadControlFlag {
fn from(value: bool) -> Self {
if value { DownloadThreadControlFlag::Go } else { DownloadThreadControlFlag::Stop }
}
}
#[derive(Clone)]
pub struct DownloadThreadControl {
inner: Arc<AtomicBool>,
}
impl DownloadThreadControl {
pub fn new(flag: DownloadThreadControlFlag) -> Self {
Self {
inner: Arc::new(AtomicBool::new(flag.into())),
}
}
pub fn get(&self) -> DownloadThreadControlFlag {
self.inner.load(Ordering::Acquire).into()
}
pub fn set(&self, flag: DownloadThreadControlFlag) {
self.inner.store(flag.into(), Ordering::Release);
}
}

View File

@ -1,4 +0,0 @@
pub mod download_thread_control_flag;
pub mod progress_object;
pub mod queue;
pub mod rolling_progress_updates;

View File

@ -1,155 +0,0 @@
use std::{
sync::{
Arc, Mutex,
atomic::{AtomicUsize, Ordering},
mpsc::Sender,
},
time::{Duration, Instant},
};
use atomic_instant_full::AtomicInstant;
use throttle_my_fn::throttle;
use crate::{download_manager::download_manager_frontend::DownloadManagerSignal, lock, send};
use super::rolling_progress_updates::RollingProgressWindow;
#[derive(Clone)]
pub struct ProgressObject {
max: Arc<Mutex<usize>>,
progress_instances: Arc<Mutex<Vec<Arc<AtomicUsize>>>>,
start: Arc<Mutex<Instant>>,
sender: Sender<DownloadManagerSignal>,
//last_update: Arc<RwLock<Instant>>,
last_update_time: Arc<AtomicInstant>,
bytes_last_update: Arc<AtomicUsize>,
rolling: RollingProgressWindow<1000>,
}
#[derive(Clone)]
pub struct ProgressHandle {
progress: Arc<AtomicUsize>,
progress_object: Arc<ProgressObject>,
}
impl ProgressHandle {
pub fn new(progress: Arc<AtomicUsize>, progress_object: Arc<ProgressObject>) -> Self {
Self {
progress,
progress_object,
}
}
pub fn set(&self, amount: usize) {
self.progress.store(amount, Ordering::Release);
}
pub fn add(&self, amount: usize) {
self.progress
.fetch_add(amount, std::sync::atomic::Ordering::AcqRel);
calculate_update(&self.progress_object);
}
pub fn skip(&self, amount: usize) {
self.progress
.fetch_add(amount, std::sync::atomic::Ordering::Acquire);
// Offset the bytes at last offset by this amount
self.progress_object
.bytes_last_update
.fetch_add(amount, Ordering::Acquire);
// Dont' fire update
}
}
impl ProgressObject {
pub fn new(max: usize, length: usize, sender: Sender<DownloadManagerSignal>) -> Self {
let arr = Mutex::new((0..length).map(|_| Arc::new(AtomicUsize::new(0))).collect());
Self {
max: Arc::new(Mutex::new(max)),
progress_instances: Arc::new(arr),
start: Arc::new(Mutex::new(Instant::now())),
sender,
last_update_time: Arc::new(AtomicInstant::now()),
bytes_last_update: Arc::new(AtomicUsize::new(0)),
rolling: RollingProgressWindow::new(),
}
}
pub fn set_time_now(&self) {
*lock!(self.start) = Instant::now();
}
pub fn sum(&self) -> usize {
lock!(self.progress_instances)
.iter()
.map(|instance| instance.load(Ordering::Acquire))
.sum()
}
pub fn reset(&self) {
self.set_time_now();
self.bytes_last_update.store(0, Ordering::Release);
self.rolling.reset();
lock!(self.progress_instances)
.iter()
.for_each(|x| x.store(0, Ordering::SeqCst));
}
pub fn get_max(&self) -> usize {
*lock!(self.max)
}
pub fn set_max(&self, new_max: usize) {
*lock!(self.max) = new_max;
}
pub fn set_size(&self, length: usize) {
*lock!(self.progress_instances) =
(0..length).map(|_| Arc::new(AtomicUsize::new(0))).collect();
}
pub fn get_progress(&self) -> f64 {
self.sum() as f64 / self.get_max() as f64
}
pub fn get(&self, index: usize) -> Arc<AtomicUsize> {
lock!(self.progress_instances)[index].clone()
}
fn update_window(&self, kilobytes_per_second: usize) {
self.rolling.update(kilobytes_per_second);
}
}
#[throttle(1, Duration::from_millis(20))]
pub fn calculate_update(progress: &ProgressObject) {
let last_update_time = progress
.last_update_time
.swap(Instant::now(), Ordering::SeqCst);
let time_since_last_update = Instant::now().duration_since(last_update_time).as_millis_f64();
let current_bytes_downloaded = progress.sum();
let max = progress.get_max();
let bytes_at_last_update = progress
.bytes_last_update
.swap(current_bytes_downloaded, Ordering::Acquire);
let bytes_since_last_update = current_bytes_downloaded.saturating_sub(bytes_at_last_update) as f64;
let kilobytes_per_second = bytes_since_last_update / time_since_last_update;
let bytes_remaining = max.saturating_sub(current_bytes_downloaded); // bytes
progress.update_window(kilobytes_per_second as usize);
push_update(progress, bytes_remaining);
}
#[throttle(1, Duration::from_millis(250))]
pub fn push_update(progress: &ProgressObject, bytes_remaining: usize) {
let average_speed = progress.rolling.get_average();
let time_remaining = (bytes_remaining / 1000) / average_speed.max(1);
update_ui(progress, average_speed, time_remaining);
update_queue(progress);
}
fn update_ui(progress_object: &ProgressObject, kilobytes_per_second: usize, time_remaining: usize) {
send!(
progress_object.sender,
DownloadManagerSignal::UpdateUIStats(kilobytes_per_second, time_remaining)
);
}
fn update_queue(progress: &ProgressObject) {
send!(progress.sender, DownloadManagerSignal::UpdateUIQueue)
}

View File

@ -1,44 +0,0 @@
use std::{
collections::VecDeque,
sync::{Arc, Mutex, MutexGuard},
};
use crate::{database::models::data::DownloadableMetadata, lock};
#[derive(Clone)]
pub struct Queue {
inner: Arc<Mutex<VecDeque<DownloadableMetadata>>>,
}
#[allow(dead_code)]
impl Default for Queue {
fn default() -> Self {
Self::new()
}
}
impl Queue {
pub fn new() -> Self {
Self {
inner: Arc::new(Mutex::new(VecDeque::new())),
}
}
pub fn read(&self) -> VecDeque<DownloadableMetadata> {
lock!(self.inner).clone()
}
pub fn edit(&self) -> MutexGuard<'_, VecDeque<DownloadableMetadata>> {
lock!(self.inner)
}
pub fn pop_front(&self) -> Option<DownloadableMetadata> {
self.edit().pop_front()
}
pub fn exists(&self, meta: DownloadableMetadata) -> bool {
self.read().contains(&meta)
}
pub fn append(&self, interface: DownloadableMetadata) {
self.edit().push_back(interface);
}
pub fn get_by_meta(&self, meta: &DownloadableMetadata) -> Option<usize> {
self.read().iter().position(|data| data == meta)
}
}

View File

@ -1,43 +0,0 @@
use std::sync::{
Arc,
atomic::{AtomicUsize, Ordering},
};
#[derive(Clone)]
pub struct RollingProgressWindow<const S: usize> {
window: Arc<[AtomicUsize; S]>,
current: Arc<AtomicUsize>,
}
impl<const S: usize> RollingProgressWindow<S> {
pub fn new() -> Self {
Self {
window: Arc::new([(); S].map(|()| AtomicUsize::new(0))),
current: Arc::new(AtomicUsize::new(0)),
}
}
pub fn update(&self, kilobytes_per_second: usize) {
let index = self.current.fetch_add(1, Ordering::SeqCst);
let current = &self.window[index % S];
current.store(kilobytes_per_second, Ordering::SeqCst);
}
pub fn get_average(&self) -> usize {
let current = self.current.load(Ordering::SeqCst);
let valid = self
.window
.iter()
.enumerate()
.filter(|(i, _)| i < &current)
.map(|(_, x)| x.load(Ordering::Acquire))
.collect::<Vec<usize>>();
let amount = valid.len();
let sum = valid.into_iter().sum::<usize>();
sum / amount
}
pub fn reset(&self) {
self.window
.iter()
.for_each(|x| x.store(0, Ordering::Release));
self.current.store(0, Ordering::Release);
}
}

View File

@ -1,55 +0,0 @@
use std::{
fmt::{Display, Formatter},
io, sync::Arc,
};
use serde_with::SerializeDisplay;
use humansize::{format_size, BINARY};
use super::remote_access_error::RemoteAccessError;
// TODO: Rename / separate from downloads
#[derive(Debug, SerializeDisplay)]
pub enum ApplicationDownloadError {
NotInitialized,
Communication(RemoteAccessError),
DiskFull(u64, u64),
#[allow(dead_code)]
Checksum,
Lock,
IoError(Arc<io::Error>),
DownloadError(RemoteAccessError),
}
impl Display for ApplicationDownloadError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
ApplicationDownloadError::NotInitialized => write!(f, "Download not initalized, did something go wrong?"),
ApplicationDownloadError::DiskFull(required, available) => write!(
f,
"Game requires {}, {} remaining left on disk.",
format_size(*required, BINARY),
format_size(*available, BINARY),
),
ApplicationDownloadError::Communication(error) => write!(f, "{error}"),
ApplicationDownloadError::Lock => write!(
f,
"failed to acquire lock. Something has gone very wrong internally. Please restart the application"
),
ApplicationDownloadError::Checksum => {
write!(f, "checksum failed to validate for download")
}
ApplicationDownloadError::IoError(error) => write!(f, "io error: {error}"),
ApplicationDownloadError::DownloadError(error) => write!(
f,
"Download failed with error {error:?}"
),
}
}
}
impl From<io::Error> for ApplicationDownloadError {
fn from(value: io::Error) -> Self {
ApplicationDownloadError::IoError(Arc::new(value))
}
}

View File

@ -1,26 +0,0 @@
use std::fmt::Display;
use http::{header::ToStrError, HeaderName};
use serde_with::SerializeDisplay;
use crate::error::remote_access_error::RemoteAccessError;
#[derive(Debug, SerializeDisplay)]
pub enum CacheError {
HeaderNotFound(HeaderName),
ParseError(ToStrError),
Remote(RemoteAccessError),
ConstructionError(http::Error)
}
impl Display for CacheError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
CacheError::HeaderNotFound(header_name) => format!("Could not find header {header_name} in cache"),
CacheError::ParseError(to_str_error) => format!("Could not parse cache with error {to_str_error}"),
CacheError::Remote(remote_access_error) => format!("Cache got remote access error: {remote_access_error}"),
CacheError::ConstructionError(error) => format!("Could not construct cache body with error {error}"),
};
write!(f, "{s}")
}
}

View File

@ -1,10 +0,0 @@
use serde::Deserialize;
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct DropServerError {
pub status_code: usize,
pub status_message: String,
// pub message: String,
// pub url: String,
}

View File

@ -1,21 +0,0 @@
use std::fmt::{Display};
use serde_with::SerializeDisplay;
#[derive(SerializeDisplay)]
pub enum LibraryError {
MetaNotFound(String),
VersionNotFound(String),
}
impl Display for LibraryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", match self {
LibraryError::MetaNotFound(id) => {
format!("Could not locate any installed version of game ID {id} in the database")
}
LibraryError::VersionNotFound(game_id) => {
format!("Could not locate any installed version for game id {game_id} in the database")
}
})
}
}

View File

@ -1,33 +0,0 @@
use std::{fmt::Display, io::Error};
use serde_with::SerializeDisplay;
#[derive(SerializeDisplay)]
pub enum ProcessError {
NotInstalled,
AlreadyRunning,
InvalidID,
InvalidVersion,
IOError(Error),
FormatError(String), // String errors supremacy
InvalidPlatform,
OpenerError(tauri_plugin_opener::Error),
InvalidArguments(String)
}
impl Display for ProcessError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
ProcessError::NotInstalled => "Game not installed",
ProcessError::AlreadyRunning => "Game already running",
ProcessError::InvalidID => "Invalid game ID",
ProcessError::InvalidVersion => "Invalid game version",
ProcessError::IOError(error) => &error.to_string(),
ProcessError::InvalidPlatform => "This game cannot be played on the current platform",
ProcessError::FormatError(error) => &format!("Could not format template: {error:?}"),
ProcessError::OpenerError(error) => &format!("Could not open directory: {error:?}"),
ProcessError::InvalidArguments(arguments) => &format!("Invalid arguments in command {arguments}"),
};
write!(f, "{s}")
}
}

View File

@ -1,106 +0,0 @@
use std::{
error::Error,
fmt::{Display, Formatter},
sync::Arc,
};
use http::StatusCode;
use serde_with::SerializeDisplay;
use url::ParseError;
use super::drop_server_error::DropServerError;
#[derive(Debug, SerializeDisplay)]
pub enum RemoteAccessError {
FetchError(Arc<reqwest::Error>),
FetchErrorWS(Arc<reqwest_websocket::Error>),
ParsingError(ParseError),
InvalidEndpoint,
HandshakeFailed(String),
GameNotFound(String),
InvalidResponse(DropServerError),
UnparseableResponse(String),
ManifestDownloadFailed(StatusCode, String),
OutOfSync,
Cache(std::io::Error),
CorruptedState,
}
impl Display for RemoteAccessError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
RemoteAccessError::FetchError(error) => {
if error.is_connect() {
return write!(
f,
"Failed to connect to Drop server. Check if you access Drop through a browser, and then try again."
);
}
write!(
f,
"{}: {}",
error,
error
.source()
.map(std::string::ToString::to_string)
.unwrap_or("Unknown error".to_string())
)
}
RemoteAccessError::FetchErrorWS(error) => write!(
f,
"{}: {}",
error,
error
.source()
.map(std::string::ToString::to_string)
.unwrap_or("Unknown error".to_string())
),
RemoteAccessError::ParsingError(parse_error) => {
write!(f, "{parse_error}")
}
RemoteAccessError::InvalidEndpoint => write!(f, "invalid drop endpoint"),
RemoteAccessError::HandshakeFailed(message) => {
write!(f, "failed to complete handshake: {message}")
}
RemoteAccessError::GameNotFound(id) => write!(f, "could not find game on server: {id}"),
RemoteAccessError::InvalidResponse(error) => write!(
f,
"server returned an invalid response: {}, {}",
error.status_code, error.status_message
),
RemoteAccessError::UnparseableResponse(error) => {
write!(f, "server returned an invalid response: {error}")
}
RemoteAccessError::ManifestDownloadFailed(status, response) => {
write!(f, "failed to download game manifest: {status} {response}")
}
RemoteAccessError::OutOfSync => write!(
f,
"server's and client's time are out of sync. Please ensure they are within at least 30 seconds of each other"
),
RemoteAccessError::Cache(error) => write!(f, "Cache Error: {error}"),
RemoteAccessError::CorruptedState => write!(
f,
"Drop encountered a corrupted internal state. Please report this to the developers, with details of reproduction."
),
}
}
}
impl From<reqwest::Error> for RemoteAccessError {
fn from(err: reqwest::Error) -> Self {
RemoteAccessError::FetchError(Arc::new(err))
}
}
impl From<reqwest_websocket::Error> for RemoteAccessError {
fn from(err: reqwest_websocket::Error) -> Self {
RemoteAccessError::FetchErrorWS(Arc::new(err))
}
}
impl From<ParseError> for RemoteAccessError {
fn from(err: ParseError) -> Self {
RemoteAccessError::ParsingError(err)
}
}
impl std::error::Error for RemoteAccessError {}

318
src-tauri/src/games.rs Normal file
View File

@ -0,0 +1,318 @@
use std::sync::Mutex;
use tauri::AppHandle;
use crate::{
AppState,
database::{
db::borrow_db_checked,
models::data::GameVersion,
},
error::{library_error::LibraryError, remote_access_error::RemoteAccessError},
games::library::{
fetch_game_logic_offline, fetch_library_logic_offline, get_current_meta,
uninstall_game_logic,
},
offline,
};
use super::{
library::{
FetchGameStruct, Game, fetch_game_logic, fetch_game_version_options_logic,
fetch_library_logic,
},
state::{GameStatusManager, GameStatusWithTransient},
};
#[tauri::command]
pub async fn fetch_library(
state: tauri::State<'_, Mutex<AppState<'_>>>,
hard_refresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
offline!(
state,
fetch_library_logic,
fetch_library_logic_offline,
state,
hard_refresh
).await
}
pub async fn fetch_library_logic(
state: tauri::State<'_, Mutex<AppState<'_>>>,
hard_fresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
let do_hard_refresh = hard_fresh.unwrap_or(false);
if !do_hard_refresh && let Ok(library) = get_cached_object("library") {
return Ok(library);
}
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(&["/api/v1/client/user/library"], &[])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()
.await?;
if response.status() != 200 {
let err = response.json().await.unwrap_or(DropServerError {
status_code: 500,
status_message: "Invalid response from server.".to_owned(),
});
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let mut games: Vec<Game> = response.json().await?;
let mut handle = lock!(state);
let mut db_handle = borrow_db_mut_checked();
for game in &games {
handle.games.insert(game.id.clone(), game.clone());
if !db_handle.applications.game_statuses.contains_key(&game.id) {
db_handle
.applications
.game_statuses
.insert(game.id.clone(), GameDownloadStatus::Remote {});
}
}
// Add games that are installed but no longer in library
for meta in db_handle.applications.installed_game_version.values() {
if games.iter().any(|e| e.id == meta.id) {
continue;
}
// We should always have a cache of the object
// Pass db_handle because otherwise we get a gridlock
let game = match get_cached_object_db::<Game>(&meta.id.clone(), &db_handle) {
Ok(game) => game,
Err(err) => {
warn!(
"{} is installed, but encountered error fetching its error: {}.",
meta.id, err
);
continue;
}
};
games.push(game);
}
drop(handle);
drop(db_handle);
cache_object("library", &games)?;
Ok(games)
}
pub async fn fetch_library_logic_offline(
_state: tauri::State<'_, Mutex<AppState<'_>>>,
_hard_refresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
let mut games: Vec<Game> = get_cached_object("library")?;
let db_handle = borrow_db_checked();
games.retain(|game| {
matches!(
&db_handle
.applications
.game_statuses
.get(&game.id)
.unwrap_or(&GameDownloadStatus::Remote {}),
GameDownloadStatus::Installed { .. } | GameDownloadStatus::SetupRequired { .. }
)
});
Ok(games)
}
pub async fn fetch_game_logic(
id: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<FetchGameStruct, RemoteAccessError> {
let version = {
let state_handle = lock!(state);
let db_lock = borrow_db_checked();
let metadata_option = db_lock.applications.installed_game_version.get(&id);
let version = match metadata_option {
None => None,
Some(metadata) => db_lock
.applications
.game_versions
.get(&metadata.id)
.map(|v| v.get(metadata.version.as_ref().unwrap()).unwrap())
.cloned(),
};
let game = state_handle.games.get(&id);
if let Some(game) = game {
let status = GameStatusManager::fetch_state(&id, &db_lock);
let data = FetchGameStruct {
game: game.clone(),
status,
version,
};
cache_object_db(&id, game, &db_lock)?;
return Ok(data);
}
version
};
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(&["/api/v1/client/game/", &id], &[])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()
.await?;
if response.status() == 404 {
let offline_fetch = fetch_game_logic_offline(id.clone(), state).await;
if let Ok(fetch_data) = offline_fetch {
return Ok(fetch_data);
}
return Err(RemoteAccessError::GameNotFound(id));
}
if response.status() != 200 {
let err = response.json().await?;
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let game: Game = response.json().await?;
let mut state_handle = lock!(state);
state_handle.games.insert(id.clone(), game.clone());
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.game_statuses
.entry(id.clone())
.or_insert(GameDownloadStatus::Remote {});
let status = GameStatusManager::fetch_state(&id, &db_handle);
drop(db_handle);
let data = FetchGameStruct {
game: game.clone(),
status,
version,
};
cache_object(&id, &game)?;
Ok(data)
}
pub async fn fetch_game_version_options_logic(
game_id: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<Vec<GameVersion>, RemoteAccessError> {
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(&["/api/v1/client/game/versions"], &[("id", &game_id)])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()
.await?;
if response.status() != 200 {
let err = response.json().await?;
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let data: Vec<GameVersion> = response.json().await?;
let state_lock = lock!(state);
let process_manager_lock = lock!(state_lock.process_manager);
let data: Vec<GameVersion> = data
.into_iter()
.filter(|v| process_manager_lock.valid_platform(&v.platform, &state_lock))
.collect();
drop(process_manager_lock);
drop(state_lock);
Ok(data)
}
pub async fn fetch_game_logic_offline(
id: String,
_state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<FetchGameStruct, RemoteAccessError> {
let db_handle = borrow_db_checked();
let metadata_option = db_handle.applications.installed_game_version.get(&id);
let version = match metadata_option {
None => None,
Some(metadata) => db_handle
.applications
.game_versions
.get(&metadata.id)
.map(|v| v.get(metadata.version.as_ref().unwrap()).unwrap())
.cloned(),
};
let status = GameStatusManager::fetch_state(&id, &db_handle);
let game = get_cached_object::<Game>(&id)?;
drop(db_handle);
Ok(FetchGameStruct {
game,
status,
version,
})
}
#[tauri::command]
pub async fn fetch_game(
game_id: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<FetchGameStruct, RemoteAccessError> {
offline!(
state,
fetch_game_logic,
fetch_game_logic_offline,
game_id,
state
).await
}
#[tauri::command]
pub fn fetch_game_status(id: String) -> GameStatusWithTransient {
let db_handle = borrow_db_checked();
GameStatusManager::fetch_state(&id, &db_handle)
}
#[tauri::command]
pub fn uninstall_game(game_id: String, app_handle: AppHandle) -> Result<(), LibraryError> {
let meta = match get_current_meta(&game_id) {
Some(data) => data,
None => return Err(LibraryError::MetaNotFound(game_id)),
};
uninstall_game_logic(meta, &app_handle);
Ok(())
}
#[tauri::command]
pub async fn fetch_game_version_options(
game_id: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<Vec<GameVersion>, RemoteAccessError> {
fetch_game_version_options_logic(game_id, state).await
}

View File

@ -1,24 +0,0 @@
use bitcode::{Decode, Encode};
use serde::{Deserialize, Serialize};
use crate::games::library::Game;
pub type Collections = Vec<Collection>;
#[derive(Serialize, Deserialize, Debug, Clone, Default, Encode, Decode)]
#[serde(rename_all = "camelCase")]
pub struct Collection {
id: String,
name: String,
is_default: bool,
user_id: String,
entries: Vec<CollectionObject>,
}
#[derive(Serialize, Deserialize, Debug, Clone, Default, Encode, Decode)]
#[serde(rename_all = "camelCase")]
pub struct CollectionObject {
collection_id: String,
game_id: String,
game: Game,
}

View File

@ -1,2 +0,0 @@
pub mod collection;
pub mod commands;

View File

@ -1,78 +0,0 @@
use std::sync::Mutex;
use tauri::AppHandle;
use crate::{
AppState,
database::{
db::borrow_db_checked,
models::data::GameVersion,
},
error::{library_error::LibraryError, remote_access_error::RemoteAccessError},
games::library::{
fetch_game_logic_offline, fetch_library_logic_offline, get_current_meta,
uninstall_game_logic,
},
offline,
};
use super::{
library::{
FetchGameStruct, Game, fetch_game_logic, fetch_game_version_options_logic,
fetch_library_logic,
},
state::{GameStatusManager, GameStatusWithTransient},
};
#[tauri::command]
pub async fn fetch_library(
state: tauri::State<'_, Mutex<AppState<'_>>>,
hard_refresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
offline!(
state,
fetch_library_logic,
fetch_library_logic_offline,
state,
hard_refresh
).await
}
#[tauri::command]
pub async fn fetch_game(
game_id: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<FetchGameStruct, RemoteAccessError> {
offline!(
state,
fetch_game_logic,
fetch_game_logic_offline,
game_id,
state
).await
}
#[tauri::command]
pub fn fetch_game_status(id: String) -> GameStatusWithTransient {
let db_handle = borrow_db_checked();
GameStatusManager::fetch_state(&id, &db_handle)
}
#[tauri::command]
pub fn uninstall_game(game_id: String, app_handle: AppHandle) -> Result<(), LibraryError> {
let meta = match get_current_meta(&game_id) {
Some(data) => data,
None => return Err(LibraryError::MetaNotFound(game_id)),
};
uninstall_game_logic(meta, &app_handle);
Ok(())
}
#[tauri::command]
pub async fn fetch_game_version_options(
game_id: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<Vec<GameVersion>, RemoteAccessError> {
fetch_game_version_options_logic(game_id, state).await
}

View File

@ -1,709 +0,0 @@
use crate::auth::generate_authorization_header;
use crate::database::db::{borrow_db_checked, borrow_db_mut_checked};
use crate::database::models::data::{
ApplicationTransientStatus, DownloadType, DownloadableMetadata,
};
use crate::download_manager::download_manager_frontend::{DownloadManagerSignal, DownloadStatus};
use crate::download_manager::downloadable::Downloadable;
use crate::download_manager::util::download_thread_control_flag::{
DownloadThreadControl, DownloadThreadControlFlag,
};
use crate::download_manager::util::progress_object::{ProgressHandle, ProgressObject};
use crate::error::application_download_error::ApplicationDownloadError;
use crate::error::remote_access_error::RemoteAccessError;
use crate::games::downloads::manifest::{
DownloadBucket, DownloadContext, DownloadDrop, DropManifest, DropValidateContext, ManifestBody,
};
use crate::games::downloads::validate::validate_game_chunk;
use crate::games::library::{on_game_complete, push_game_update, set_partially_installed};
use crate::games::state::GameStatusManager;
use crate::process::utils::get_disk_available;
use crate::remote::requests::generate_url;
use crate::remote::utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC};
use crate::{app_emit, lock, send};
use log::{debug, error, info, warn};
use rayon::ThreadPoolBuilder;
use std::collections::{HashMap, HashSet};
use std::fs::{OpenOptions, create_dir_all};
use std::io;
use std::path::{Path, PathBuf};
use std::sync::mpsc::Sender;
use std::sync::{Arc, Mutex};
use std::time::Instant;
use tauri::{AppHandle, Emitter};
#[cfg(target_os = "linux")]
use rustix::fs::{FallocateFlags, fallocate};
use super::download_logic::download_game_bucket;
use super::drop_data::DropData;
static RETRY_COUNT: usize = 3;
const TARGET_BUCKET_SIZE: usize = 63 * 1000 * 1000;
const MAX_FILES_PER_BUCKET: usize = (1024 / 4) - 1;
pub struct GameDownloadAgent {
pub id: String,
pub version: String,
pub control_flag: DownloadThreadControl,
buckets: Mutex<Vec<DownloadBucket>>,
context_map: Mutex<HashMap<String, bool>>,
pub manifest: Mutex<Option<DropManifest>>,
pub progress: Arc<ProgressObject>,
sender: Sender<DownloadManagerSignal>,
pub dropdata: DropData,
status: Mutex<DownloadStatus>,
}
impl GameDownloadAgent {
pub async fn new_from_index(
id: String,
version: String,
target_download_dir: usize,
sender: Sender<DownloadManagerSignal>,
) -> Result<Self, ApplicationDownloadError> {
let base_dir = {
let db_lock = borrow_db_checked();
db_lock.applications.install_dirs[target_download_dir].clone()
};
Self::new(id, version, base_dir, sender).await
}
pub async fn new(
id: String,
version: String,
base_dir: PathBuf,
sender: Sender<DownloadManagerSignal>,
) -> Result<Self, ApplicationDownloadError> {
// Don't run by default
let control_flag = DownloadThreadControl::new(DownloadThreadControlFlag::Stop);
let base_dir_path = Path::new(&base_dir);
let data_base_dir_path = base_dir_path.join(id.clone());
let stored_manifest =
DropData::generate(id.clone(), version.clone(), data_base_dir_path.clone());
let context_lock = stored_manifest.contexts.lock().unwrap().clone();
let result = Self {
id,
version,
control_flag,
manifest: Mutex::new(None),
buckets: Mutex::new(Vec::new()),
context_map: Mutex::new(HashMap::new()),
progress: Arc::new(ProgressObject::new(0, 0, sender.clone())),
sender,
dropdata: stored_manifest,
status: Mutex::new(DownloadStatus::Queued),
};
result.ensure_manifest_exists().await?;
let required_space = lock!(result
.manifest)
.as_ref()
.unwrap()
.values()
.map(|e| {
e.lengths
.iter()
.enumerate()
.filter(|(i, _)| *context_lock.get(&e.checksums[*i]).unwrap_or(&false))
.map(|(_, v)| v)
.sum::<usize>()
})
.sum::<usize>() as u64;
let available_space = get_disk_available(data_base_dir_path)? as u64;
if required_space > available_space {
return Err(ApplicationDownloadError::DiskFull(
required_space,
available_space,
));
}
Ok(result)
}
// Blocking
pub fn setup_download(&self, app_handle: &AppHandle) -> Result<(), ApplicationDownloadError> {
let mut db_lock = borrow_db_mut_checked();
let status = ApplicationTransientStatus::Downloading {
version_name: self.version.clone(),
};
db_lock
.applications
.transient_statuses
.insert(self.metadata(), status.clone());
// Don't use GameStatusManager because this game isn't installed
push_game_update(app_handle, &self.metadata().id, None, (None, Some(status)));
if !self.check_manifest_exists() {
return Err(ApplicationDownloadError::NotInitialized);
}
self.ensure_buckets()?;
self.control_flag.set(DownloadThreadControlFlag::Go);
Ok(())
}
// Blocking
pub fn download(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError> {
self.setup_download(app_handle)?;
let timer = Instant::now();
info!("beginning download for {}...", self.metadata().id);
let res = self.run().map_err(ApplicationDownloadError::Communication);
debug!(
"{} took {}ms to download",
self.id,
timer.elapsed().as_millis()
);
res
}
pub fn check_manifest_exists(&self) -> bool {
lock!(self.manifest).is_some()
}
pub async fn ensure_manifest_exists(&self) -> Result<(), ApplicationDownloadError> {
if lock!(self.manifest).is_some() {
return Ok(());
}
self.download_manifest().await
}
async fn download_manifest(&self) -> Result<(), ApplicationDownloadError> {
let client = DROP_CLIENT_ASYNC.clone();
let url = generate_url(
&["/api/v1/client/game/manifest"],
&[("id", &self.id), ("version", &self.version)],
)
.map_err(ApplicationDownloadError::Communication)?;
let response = client
.get(url)
.header("Authorization", generate_authorization_header())
.send()
.await
.map_err(|e| ApplicationDownloadError::Communication(e.into()))?;
if response.status() != 200 {
return Err(ApplicationDownloadError::Communication(
RemoteAccessError::ManifestDownloadFailed(
response.status(),
response.text().await.unwrap(),
),
));
}
let manifest_download: DropManifest = response
.json()
.await
.map_err(|e| ApplicationDownloadError::Communication(e.into()))?;
if let Ok(mut manifest) = self.manifest.lock() {
*manifest = Some(manifest_download);
return Ok(());
}
Err(ApplicationDownloadError::Lock)
}
// Sets it up for both download and validate
fn setup_progress(&self) {
let buckets = lock!(self.buckets);
let chunk_count = buckets.iter().map(|e| e.drops.len()).sum();
let total_length = buckets
.iter()
.map(|bucket| bucket.drops.iter().map(|e| e.length).sum::<usize>())
.sum();
self.progress.set_max(total_length);
self.progress.set_size(chunk_count);
self.progress.reset();
}
pub fn ensure_buckets(&self) -> Result<(), ApplicationDownloadError> {
if lock!(self.buckets).is_empty() {
self.generate_buckets()?;
}
*lock!(self.context_map) = self.dropdata.get_contexts();
Ok(())
}
pub fn generate_buckets(&self) -> Result<(), ApplicationDownloadError> {
let manifest = lock!(self.manifest)
.clone()
.ok_or(ApplicationDownloadError::NotInitialized)?;
let game_id = self.id.clone();
let base_path = Path::new(&self.dropdata.base_path);
create_dir_all(base_path)?;
let mut buckets = Vec::new();
let mut current_buckets = HashMap::<String, DownloadBucket>::new();
let mut current_bucket_sizes = HashMap::<String, usize>::new();
for (raw_path, chunk) in manifest {
let path = base_path.join(Path::new(&raw_path));
let container = path
.parent()
.ok_or(ApplicationDownloadError::IoError(Arc::new(io::Error::new(
io::ErrorKind::NotFound,
"no parent directory",
))))?;
create_dir_all(container)?;
let already_exists = path.exists();
let file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.truncate(false)
.open(&path)?;
let mut file_running_offset = 0;
for (index, length) in chunk.lengths.iter().enumerate() {
let drop = DownloadDrop {
filename: raw_path.to_string(),
start: file_running_offset,
length: *length,
checksum: chunk.checksums[index].clone(),
permissions: chunk.permissions,
path: path.clone(),
index,
};
file_running_offset += *length;
if *length >= TARGET_BUCKET_SIZE {
// They get their own bucket
buckets.push(DownloadBucket {
game_id: game_id.clone(),
version: chunk.version_name.clone(),
drops: vec![drop],
});
continue;
}
let current_bucket_size = current_bucket_sizes
.entry(chunk.version_name.clone())
.or_insert_with(|| 0);
let c_version_name = chunk.version_name.clone();
let c_game_id = game_id.clone();
let current_bucket = current_buckets
.entry(chunk.version_name.clone())
.or_insert_with(|| DownloadBucket {
game_id: c_game_id,
version: c_version_name,
drops: vec![],
});
if (*current_bucket_size + length >= TARGET_BUCKET_SIZE
|| current_bucket.drops.len() >= MAX_FILES_PER_BUCKET)
&& !current_bucket.drops.is_empty()
{
// Move current bucket into list and make a new one
buckets.push(current_bucket.clone());
*current_bucket = DownloadBucket {
game_id: game_id.clone(),
version: chunk.version_name.clone(),
drops: vec![],
};
*current_bucket_size = 0;
}
current_bucket.drops.push(drop);
*current_bucket_size += *length;
}
#[cfg(target_os = "linux")]
if file_running_offset > 0 && !already_exists {
let _ = fallocate(file, FallocateFlags::empty(), 0, file_running_offset as u64);
}
}
for (_, bucket) in current_buckets.into_iter() {
if !bucket.drops.is_empty() {
buckets.push(bucket);
}
}
info!("buckets: {}", buckets.len());
let existing_contexts = self.dropdata.get_contexts();
self.dropdata.set_contexts(
&buckets
.iter()
.flat_map(|x| x.drops.iter().map(|v| v.checksum.clone()))
.map(|x| {
let contains = existing_contexts.get(&x).unwrap_or(&false);
(x, *contains)
})
.collect::<Vec<(String, bool)>>(),
);
*lock!(self.buckets) = buckets;
Ok(())
}
fn run(&self) -> Result<bool, RemoteAccessError> {
self.setup_progress();
let max_download_threads = borrow_db_checked().settings.max_download_threads;
debug!(
"downloading game: {} with {} threads",
self.id, max_download_threads
);
let pool = ThreadPoolBuilder::new()
.num_threads(max_download_threads)
.build()
.unwrap_or_else(|_| {
panic!("failed to build thread pool with {max_download_threads} threads")
});
let buckets = lock!(self.buckets);
let mut download_contexts = HashMap::<String, DownloadContext>::new();
let versions = buckets
.iter()
.map(|e| &e.version)
.collect::<HashSet<_>>()
.into_iter()
.cloned()
.collect::<Vec<String>>();
info!("downloading across these versions: {versions:?}");
let completed_contexts = Arc::new(boxcar::Vec::new());
let completed_indexes_loop_arc = completed_contexts.clone();
for version in versions {
let download_context = DROP_CLIENT_SYNC
.post(generate_url(&["/api/v2/client/context"], &[])?)
.json(&ManifestBody {
game: self.id.clone(),
version: version.clone(),
})
.header("Authorization", generate_authorization_header())
.send()?;
if download_context.status() != 200 {
return Err(RemoteAccessError::InvalidResponse(download_context.json()?));
}
let download_context = download_context.json::<DownloadContext>()?;
info!(
"download context: ({}) {}",
&version, download_context.context
);
download_contexts.insert(version, download_context);
}
let download_contexts = &download_contexts;
pool.scope(|scope| {
let context_map = lock!(self.context_map);
for (index, bucket) in buckets.iter().enumerate() {
let mut bucket = (*bucket).clone();
let completed_contexts = completed_indexes_loop_arc.clone();
let progress = self.progress.get(index);
let progress_handle = ProgressHandle::new(progress, self.progress.clone());
// If we've done this one already, skip it
// Note to future DecDuck, DropData gets loaded into context_map
let todo_drops = bucket
.drops
.into_iter()
.filter(|e| {
let todo = !*context_map.get(&e.checksum).unwrap_or(&false);
if !todo {
progress_handle.skip(e.length);
}
todo
})
.collect::<Vec<DownloadDrop>>();
if todo_drops.is_empty() {
continue;
};
bucket.drops = todo_drops;
let sender = self.sender.clone();
let download_context = download_contexts
.get(&bucket.version)
.unwrap_or_else(|| panic!("Could not get bucket version {}. Corrupted state.", bucket.version));
scope.spawn(move |_| {
// 3 attempts
for i in 0..RETRY_COUNT {
let loop_progress_handle = progress_handle.clone();
match download_game_bucket(
&bucket,
download_context,
&self.control_flag,
loop_progress_handle,
) {
Ok(true) => {
for drop in bucket.drops {
completed_contexts.push(drop.checksum);
}
return;
}
Ok(false) => return,
Err(e) => {
warn!("game download agent error: {e}");
let retry = matches!(
&e,
ApplicationDownloadError::Communication(_)
| ApplicationDownloadError::Checksum
| ApplicationDownloadError::Lock
| ApplicationDownloadError::IoError(_)
);
if i == RETRY_COUNT - 1 || !retry {
warn!("retry logic failed, not re-attempting.");
send!(sender, DownloadManagerSignal::Error(e));
return;
}
}
}
}
});
}
});
let newly_completed = completed_contexts.clone();
let completed_lock_len = {
let mut context_map_lock = lock!(self.context_map);
for (_, item) in newly_completed.iter() {
context_map_lock.insert(item.clone(), true);
}
context_map_lock.values().filter(|x| **x).count()
};
let context_map_lock = lock!(self.context_map);
let contexts = buckets
.iter()
.flat_map(|x| x.drops.iter().map(|e| e.checksum.clone()))
.map(|x| {
let completed = context_map_lock.get(&x).unwrap_or(&false);
(x, *completed)
})
.collect::<Vec<(String, bool)>>();
drop(context_map_lock);
self.dropdata.set_contexts(&contexts);
self.dropdata.write();
// If there are any contexts left which are false
if !contexts.iter().all(|x| x.1) {
info!(
"download agent for {} exited without completing ({}/{}) ({} buckets)",
self.id.clone(),
completed_lock_len,
contexts.len(),
buckets.len()
);
return Ok(false);
}
Ok(true)
}
fn setup_validate(&self, app_handle: &AppHandle) {
self.setup_progress();
self.control_flag.set(DownloadThreadControlFlag::Go);
let status = ApplicationTransientStatus::Validating {
version_name: self.version.clone(),
};
let mut db_lock = borrow_db_mut_checked();
db_lock
.applications
.transient_statuses
.insert(self.metadata(), status.clone());
push_game_update(app_handle, &self.metadata().id, None, (None, Some(status)));
}
pub fn validate(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError> {
self.setup_validate(app_handle);
let buckets = lock!(self.buckets);
let contexts: Vec<DropValidateContext> = buckets
.clone()
.into_iter()
.flat_map(|e| -> Vec<DropValidateContext> { e.into() })
.collect();
let max_download_threads = borrow_db_checked().settings.max_download_threads;
info!("{} validation contexts", contexts.len());
let pool = ThreadPoolBuilder::new()
.num_threads(max_download_threads)
.build()
.unwrap_or_else(|_| {
panic!("failed to build thread pool with {max_download_threads} threads")
});
let invalid_chunks = Arc::new(boxcar::Vec::new());
pool.scope(|scope| {
for (index, context) in contexts.iter().enumerate() {
let current_progress = self.progress.get(index);
let progress_handle = ProgressHandle::new(current_progress, self.progress.clone());
let invalid_chunks_scoped = invalid_chunks.clone();
let sender = self.sender.clone();
scope.spawn(move |_| {
match validate_game_chunk(context, &self.control_flag, progress_handle) {
Ok(true) => {}
Ok(false) => {
invalid_chunks_scoped.push(context.checksum.clone());
}
Err(e) => {
error!("{e}");
send!(sender, DownloadManagerSignal::Error(e));
}
}
});
}
});
// If there are any contexts left which are false
if !invalid_chunks.is_empty() {
info!("validation of game id {} failed", self.id);
for context in invalid_chunks.iter() {
self.dropdata.set_context(context.1.clone(), false);
}
self.dropdata.write();
return Ok(false);
}
Ok(true)
}
pub fn cancel(&self, app_handle: &AppHandle) {
// See docs on usage
set_partially_installed(
&self.metadata(),
self.dropdata.base_path.display().to_string(),
Some(app_handle),
);
self.dropdata.write();
}
}
impl Downloadable for GameDownloadAgent {
fn download(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError> {
*lock!(self.status) = DownloadStatus::Downloading;
self.download(app_handle)
}
fn validate(&self, app_handle: &AppHandle) -> Result<bool, ApplicationDownloadError> {
*lock!(self.status) = DownloadStatus::Validating;
self.validate(app_handle)
}
fn progress(&self) -> Arc<ProgressObject> {
self.progress.clone()
}
fn control_flag(&self) -> DownloadThreadControl {
self.control_flag.clone()
}
fn metadata(&self) -> DownloadableMetadata {
DownloadableMetadata {
id: self.id.clone(),
version: Some(self.version.clone()),
download_type: DownloadType::Game,
}
}
fn on_queued(&self, app_handle: &tauri::AppHandle) {
*self.status.lock().unwrap() = DownloadStatus::Queued;
let mut db_lock = borrow_db_mut_checked();
let status = ApplicationTransientStatus::Queued {
version_name: self.version.clone(),
};
db_lock
.applications
.transient_statuses
.insert(self.metadata(), status.clone());
push_game_update(app_handle, &self.id, None, (None, Some(status)));
}
fn on_error(&self, app_handle: &tauri::AppHandle, error: &ApplicationDownloadError) {
*lock!(self.status) = DownloadStatus::Error;
app_emit!(app_handle, "download_error", error.to_string());
error!("error while managing download: {error:?}");
let mut handle = borrow_db_mut_checked();
handle
.applications
.transient_statuses
.remove(&self.metadata());
push_game_update(
app_handle,
&self.id,
None,
GameStatusManager::fetch_state(&self.id, &handle),
);
}
fn on_complete(&self, app_handle: &tauri::AppHandle) {
match on_game_complete(
&self.metadata(),
self.dropdata.base_path.to_string_lossy().to_string(),
app_handle,
) {
Ok(_) => {}
Err(e) => {
error!("could not mark game as complete: {e}");
send!(self.sender, DownloadManagerSignal::Error(ApplicationDownloadError::DownloadError(e)));
}
}
}
fn on_cancelled(&self, app_handle: &tauri::AppHandle) {
info!("cancelled {}", self.id);
self.cancel(app_handle);
}
fn status(&self) -> DownloadStatus {
lock!(self.status).clone()
}
}

View File

@ -1,292 +0,0 @@
use crate::download_manager::util::download_thread_control_flag::{
DownloadThreadControl, DownloadThreadControlFlag,
};
use crate::download_manager::util::progress_object::ProgressHandle;
use crate::error::application_download_error::ApplicationDownloadError;
use crate::error::drop_server_error::DropServerError;
use crate::error::remote_access_error::RemoteAccessError;
use crate::games::downloads::manifest::{ChunkBody, DownloadBucket, DownloadContext, DownloadDrop};
use crate::remote::auth::generate_authorization_header;
use crate::remote::requests::generate_url;
use crate::remote::utils::DROP_CLIENT_SYNC;
use log::{debug, info, warn};
use md5::{Context, Digest};
use reqwest::blocking::Response;
use std::fs::{Permissions, set_permissions};
use std::io::Read;
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::sync::Arc;
use std::time::Instant;
use std::{
fs::{File, OpenOptions},
io::{self, BufWriter, Seek, SeekFrom, Write},
path::PathBuf,
};
static MAX_PACKET_LENGTH: usize = 4096 * 4;
static BUMP_SIZE: usize = 4096 * 16;
pub struct DropWriter<W: Write> {
hasher: Context,
destination: BufWriter<W>,
progress: ProgressHandle,
}
impl DropWriter<File> {
fn new(path: PathBuf, progress: ProgressHandle) -> Result<Self, io::Error> {
let destination = OpenOptions::new()
.write(true)
.create(true)
.truncate(false)
.open(&path)?;
Ok(Self {
destination: BufWriter::with_capacity(1024 * 1024, destination),
hasher: Context::new(),
progress,
})
}
fn finish(mut self) -> io::Result<Digest> {
self.flush()?;
Ok(self.hasher.compute())
}
}
// Write automatically pushes to file and hasher
impl Write for DropWriter<File> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.hasher
.write_all(buf)
.map_err(|e| io::Error::other(format!("Unable to write to hasher: {e}")))?;
let bytes_written = self.destination.write(buf)?;
self.progress.add(bytes_written);
Ok(bytes_written)
}
fn flush(&mut self) -> io::Result<()> {
self.hasher.flush()?;
self.destination.flush()
}
}
// Seek moves around destination output
impl Seek for DropWriter<File> {
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
self.destination.seek(pos)
}
}
pub struct DropDownloadPipeline<'a, R: Read, W: Write> {
pub source: R,
pub drops: Vec<DownloadDrop>,
pub destination: Vec<DropWriter<W>>,
pub control_flag: &'a DownloadThreadControl,
#[allow(dead_code)]
progress: ProgressHandle,
}
impl<'a> DropDownloadPipeline<'a, Response, File> {
fn new(
source: Response,
drops: Vec<DownloadDrop>,
control_flag: &'a DownloadThreadControl,
progress: ProgressHandle,
) -> Result<Self, io::Error> {
Ok(Self {
source,
destination: drops
.iter()
.map(|drop| DropWriter::new(drop.path.clone(), progress.clone()))
.try_collect()?,
drops,
control_flag,
progress,
})
}
fn copy(&mut self) -> Result<bool, io::Error> {
let mut copy_buffer = [0u8; MAX_PACKET_LENGTH];
for (index, drop) in self.drops.iter().enumerate() {
let destination = self
.destination
.get_mut(index)
.ok_or(io::Error::other("no destination"))?;
let mut remaining = drop.length;
if drop.start != 0 {
destination.seek(SeekFrom::Start(drop.start as u64))?;
}
let mut last_bump = 0;
loop {
let size = MAX_PACKET_LENGTH.min(remaining);
let size = self.source.read(&mut copy_buffer[0..size]).inspect_err(|_| {
info!("got error from {}", drop.filename);
})?;
remaining -= size;
last_bump += size;
destination.write_all(&copy_buffer[0..size])?;
if last_bump > BUMP_SIZE {
last_bump -= BUMP_SIZE;
if self.control_flag.get() == DownloadThreadControlFlag::Stop {
return Ok(false);
}
}
if remaining == 0 {
break;
};
}
if self.control_flag.get() == DownloadThreadControlFlag::Stop {
return Ok(false);
}
}
Ok(true)
}
#[allow(dead_code)]
fn debug_skip_checksum(self) {
self.destination
.into_iter()
.for_each(|mut e| e.flush().unwrap());
}
fn finish(self) -> Result<Vec<Digest>, io::Error> {
let checksums = self
.destination
.into_iter()
.map(|e| e.finish())
.try_collect()?;
Ok(checksums)
}
}
pub fn download_game_bucket(
bucket: &DownloadBucket,
ctx: &DownloadContext,
control_flag: &DownloadThreadControl,
progress: ProgressHandle,
) -> Result<bool, ApplicationDownloadError> {
// If we're paused
if control_flag.get() == DownloadThreadControlFlag::Stop {
progress.set(0);
return Ok(false);
}
let start = Instant::now();
let header = generate_authorization_header();
let url = generate_url(&["/api/v2/client/chunk"], &[])
.map_err(ApplicationDownloadError::Communication)?;
let body = ChunkBody::create(ctx, &bucket.drops);
let response = DROP_CLIENT_SYNC
.post(url)
.json(&body)
.header("Authorization", header)
.send()
.map_err(|e| ApplicationDownloadError::Communication(e.into()))?;
if response.status() != 200 {
info!("chunk request got status code: {}", response.status());
let raw_res = response.text().map_err(|e| {
ApplicationDownloadError::Communication(RemoteAccessError::FetchError(e.into()))
})?;
info!("{raw_res}");
if let Ok(err) = serde_json::from_str::<DropServerError>(&raw_res) {
return Err(ApplicationDownloadError::Communication(
RemoteAccessError::InvalidResponse(err),
));
}
return Err(ApplicationDownloadError::Communication(
RemoteAccessError::UnparseableResponse(raw_res),
));
}
let lengths = response
.headers()
.get("Content-Lengths")
.ok_or(ApplicationDownloadError::Communication(
RemoteAccessError::UnparseableResponse("missing Content-Lengths header".to_owned()),
))?
.to_str()
.map_err(|e| {
ApplicationDownloadError::Communication(RemoteAccessError::UnparseableResponse(
e.to_string(),
))
})?;
for (i, raw_length) in lengths.split(",").enumerate() {
let length = raw_length.parse::<usize>().unwrap_or(0);
let Some(drop) = bucket.drops.get(i) else {
warn!("invalid number of Content-Lengths recieved: {i}, {lengths}");
return Err(ApplicationDownloadError::DownloadError(
RemoteAccessError::InvalidResponse(DropServerError {
status_code: 400,
status_message: format!(
"invalid number of Content-Lengths recieved: {i}, {lengths}"
),
}),
));
};
if drop.length != length {
warn!(
"for {}, expected {}, got {} ({})",
drop.filename, drop.length, raw_length, length
);
return Err(ApplicationDownloadError::DownloadError(
RemoteAccessError::InvalidResponse(DropServerError {
status_code: 400,
status_message: format!(
"for {}, expected {}, got {} ({})",
drop.filename, drop.length, raw_length, length
),
}),
));
}
}
let timestep = start.elapsed().as_millis();
debug!("took {}ms to start downloading", timestep);
let mut pipeline =
DropDownloadPipeline::new(response, bucket.drops.clone(), control_flag, progress)
.map_err(|e| ApplicationDownloadError::IoError(Arc::new(e)))?;
let completed = pipeline
.copy()
.map_err(|e| ApplicationDownloadError::IoError(Arc::new(e)))?;
if !completed {
return Ok(false);
}
// If we complete the file, set the permissions (if on Linux)
#[cfg(unix)]
{
for drop in bucket.drops.iter() {
let permissions = Permissions::from_mode(drop.permissions);
set_permissions(drop.path.clone(), permissions)
.map_err(|e| ApplicationDownloadError::IoError(Arc::new(e)))?;
}
}
let checksums = pipeline
.finish()
.map_err(|e| ApplicationDownloadError::IoError(Arc::new(e)))?;
for (index, drop) in bucket.drops.iter().enumerate() {
let res = hex::encode(**checksums.get(index).unwrap());
if res != drop.checksum {
warn!("context didn't match... doing nothing because we will validate later.");
// return Ok(false);
// return Err(ApplicationDownloadError::Checksum);
}
}
Ok(true)
}

View File

@ -1,89 +0,0 @@
use std::{
collections::HashMap, fs::File, io::{self, Read, Write}, path::{Path, PathBuf}
};
use log::error;
use native_model::{Decode, Encode};
use crate::lock;
pub type DropData = v1::DropData;
pub static DROP_DATA_PATH: &str = ".dropdata";
pub mod v1 {
use std::{collections::HashMap, path::PathBuf, sync::Mutex};
use native_model::native_model;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
#[native_model(id = 9, version = 1, with = native_model::rmp_serde_1_3::RmpSerde)]
pub struct DropData {
pub game_id: String,
pub game_version: String,
pub contexts: Mutex<HashMap<String, bool>>,
pub base_path: PathBuf,
}
impl DropData {
pub fn new(game_id: String, game_version: String, base_path: PathBuf) -> Self {
Self {
base_path,
game_id,
game_version,
contexts: Mutex::new(HashMap::new()),
}
}
}
}
impl DropData {
pub fn generate(game_id: String, game_version: String, base_path: PathBuf) -> Self {
match DropData::read(&base_path) {
Ok(v) => v,
Err(_) => DropData::new(game_id, game_version, base_path),
}
}
pub fn read(base_path: &Path) -> Result<Self, io::Error> {
let mut file = File::open(base_path.join(DROP_DATA_PATH))?;
let mut s = Vec::new();
file.read_to_end(&mut s)?;
native_model::rmp_serde_1_3::RmpSerde::decode(s).map_err(|e| {
io::Error::new(
io::ErrorKind::InvalidData,
format!("Failed to decode drop data: {e}"),
)
})
}
pub fn write(&self) {
let manifest_raw = match native_model::rmp_serde_1_3::RmpSerde::encode(&self) {
Ok(data) => data,
Err(_) => return,
};
let mut file = match File::create(self.base_path.join(DROP_DATA_PATH)) {
Ok(file) => file,
Err(e) => {
error!("{e}");
return;
}
};
match file.write_all(&manifest_raw) {
Ok(()) => {}
Err(e) => error!("{e}"),
}
}
pub fn set_contexts(&self, completed_contexts: &[(String, bool)]) {
*lock!(self.contexts) = completed_contexts.iter().map(|s| (s.0.clone(), s.1)).collect();
}
pub fn set_context(&self, context: String, state: bool) {
lock!(self.contexts).entry(context).insert_entry(state);
}
pub fn get_contexts(&self) -> HashMap<String, bool> {
lock!(self.contexts).clone()
}
}

View File

@ -1,98 +0,0 @@
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
#[derive(Debug, Clone, Serialize)]
// Drops go in buckets
pub struct DownloadDrop {
pub index: usize,
pub filename: String,
pub path: PathBuf,
pub start: usize,
pub length: usize,
pub checksum: String,
pub permissions: u32,
}
#[derive(Debug, Clone, Serialize)]
pub struct DownloadBucket {
pub game_id: String,
pub version: String,
pub drops: Vec<DownloadDrop>,
}
#[derive(Deserialize)]
pub struct DownloadContext {
pub context: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ChunkBodyFile {
filename: String,
chunk_index: usize,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ChunkBody {
pub context: String,
pub files: Vec<ChunkBodyFile>,
}
#[derive(Serialize)]
pub struct ManifestBody {
pub game: String,
pub version: String,
}
impl ChunkBody {
pub fn create(context: &DownloadContext, drops: &[DownloadDrop]) -> ChunkBody {
Self {
context: context.context.clone(),
files: drops
.iter()
.map(|e| ChunkBodyFile {
filename: e.filename.clone(),
chunk_index: e.index,
})
.collect(),
}
}
}
pub type DropManifest = HashMap<String, DropChunk>;
#[derive(Serialize, Deserialize, Debug, Clone, Ord, PartialOrd, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct DropChunk {
pub permissions: u32,
pub ids: Vec<String>,
pub checksums: Vec<String>,
pub lengths: Vec<usize>,
pub version_name: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DropValidateContext {
pub index: usize,
pub offset: usize,
pub path: PathBuf,
pub checksum: String,
pub length: usize,
}
impl From<DownloadBucket> for Vec<DropValidateContext> {
fn from(value: DownloadBucket) -> Self {
value
.drops
.into_iter()
.map(|e| DropValidateContext {
index: e.index,
offset: e.start,
path: e.path,
checksum: e.checksum,
length: e.length,
})
.collect()
}
}

View File

@ -1,6 +0,0 @@
pub mod commands;
pub mod download_agent;
mod download_logic;
pub mod drop_data;
mod manifest;
pub mod validate;

View File

@ -1,102 +0,0 @@
use std::{
fs::File,
io::{self, BufWriter, Read, Seek, SeekFrom, Write},
};
use log::debug;
use md5::Context;
use crate::{
download_manager::util::{
download_thread_control_flag::{DownloadThreadControl, DownloadThreadControlFlag},
progress_object::ProgressHandle,
},
error::application_download_error::ApplicationDownloadError,
games::downloads::manifest::DropValidateContext,
};
pub fn validate_game_chunk(
ctx: &DropValidateContext,
control_flag: &DownloadThreadControl,
progress: ProgressHandle,
) -> Result<bool, ApplicationDownloadError> {
debug!(
"Starting chunk validation {}, {}, {} #{}",
ctx.path.display(), ctx.index, ctx.offset, ctx.checksum
);
// If we're paused
if control_flag.get() == DownloadThreadControlFlag::Stop {
progress.set(0);
return Ok(false);
}
let Ok(mut source) = File::open(&ctx.path) else {
return Ok(false);
};
if ctx.offset != 0 {
source
.seek(SeekFrom::Start(ctx.offset as u64))
.expect("Failed to seek to file offset");
}
let mut hasher = md5::Context::new();
let completed =
validate_copy(&mut source, &mut hasher, ctx.length, control_flag, progress)?;
if !completed {
return Ok(false);
}
let res = hex::encode(hasher.compute().0);
if res != ctx.checksum {
return Ok(false);
}
debug!(
"Successfully finished verification #{}, copied {} bytes",
ctx.checksum, ctx.length
);
Ok(true)
}
fn validate_copy(
source: &mut File,
dest: &mut Context,
size: usize,
control_flag: &DownloadThreadControl,
progress: ProgressHandle,
) -> Result<bool, io::Error> {
let copy_buf_size = 512;
let mut copy_buf = vec![0; copy_buf_size];
let mut buf_writer = BufWriter::with_capacity(1024 * 1024, dest);
let mut total_bytes = 0;
loop {
if control_flag.get() == DownloadThreadControlFlag::Stop {
buf_writer.flush()?;
return Ok(false);
}
let mut bytes_read = source.read(&mut copy_buf)?;
total_bytes += bytes_read;
// If we read over (likely), truncate our read to
// the right size
if total_bytes > size {
let over = total_bytes - size;
bytes_read -= over;
total_bytes = size;
}
buf_writer.write_all(&copy_buf[0..bytes_read])?;
progress.add(bytes_read);
if total_bytes >= size {
break;
}
}
buf_writer.flush()?;
Ok(true)
}

View File

@ -1,588 +0,0 @@
use std::fs::remove_dir_all;
use std::sync::Mutex;
use std::thread::spawn;
use log::{debug, error, warn};
use serde::{Deserialize, Serialize};
use tauri::AppHandle;
use tauri::Emitter;
use crate::AppState;
use crate::app_emit;
use crate::database::db::{borrow_db_checked, borrow_db_mut_checked};
use crate::database::models::data::Database;
use crate::database::models::data::{
ApplicationTransientStatus, DownloadableMetadata, GameDownloadStatus, GameVersion,
};
use crate::download_manager::download_manager_frontend::DownloadStatus;
use crate::error::drop_server_error::DropServerError;
use crate::error::library_error::LibraryError;
use crate::error::remote_access_error::RemoteAccessError;
use crate::games::state::{GameStatusManager, GameStatusWithTransient};
use crate::lock;
use crate::remote::auth::generate_authorization_header;
use crate::remote::cache::cache_object_db;
use crate::remote::cache::{cache_object, get_cached_object, get_cached_object_db};
use crate::remote::requests::generate_url;
use crate::remote::utils::DROP_CLIENT_ASYNC;
use crate::remote::utils::DROP_CLIENT_SYNC;
use bitcode::{Decode, Encode};
#[derive(Serialize, Deserialize, Debug)]
pub struct FetchGameStruct {
game: Game,
status: GameStatusWithTransient,
version: Option<GameVersion>,
}
#[derive(Serialize, Deserialize, Clone, Debug, Default, Encode, Decode)]
#[serde(rename_all = "camelCase")]
pub struct Game {
id: String,
m_name: String,
m_short_description: String,
m_description: String,
// mDevelopers
// mPublishers
m_icon_object_id: String,
m_banner_object_id: String,
m_cover_object_id: String,
m_image_library_object_ids: Vec<String>,
m_image_carousel_object_ids: Vec<String>,
}
#[derive(serde::Serialize, Clone)]
pub struct GameUpdateEvent {
pub game_id: String,
pub status: (
Option<GameDownloadStatus>,
Option<ApplicationTransientStatus>,
),
pub version: Option<GameVersion>,
}
#[derive(Serialize, Clone)]
pub struct QueueUpdateEventQueueData {
pub meta: DownloadableMetadata,
pub status: DownloadStatus,
pub progress: f64,
pub current: usize,
pub max: usize,
}
#[derive(serde::Serialize, Clone)]
pub struct QueueUpdateEvent {
pub queue: Vec<QueueUpdateEventQueueData>,
}
#[derive(serde::Serialize, Clone)]
pub struct StatsUpdateEvent {
pub speed: usize,
pub time: usize,
}
pub async fn fetch_library_logic(
state: tauri::State<'_, Mutex<AppState<'_>>>,
hard_fresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
let do_hard_refresh = hard_fresh.unwrap_or(false);
if !do_hard_refresh && let Ok(library) = get_cached_object("library") {
return Ok(library);
}
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(&["/api/v1/client/user/library"], &[])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()
.await?;
if response.status() != 200 {
let err = response.json().await.unwrap_or(DropServerError {
status_code: 500,
status_message: "Invalid response from server.".to_owned(),
});
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let mut games: Vec<Game> = response.json().await?;
let mut handle = lock!(state);
let mut db_handle = borrow_db_mut_checked();
for game in &games {
handle.games.insert(game.id.clone(), game.clone());
if !db_handle.applications.game_statuses.contains_key(&game.id) {
db_handle
.applications
.game_statuses
.insert(game.id.clone(), GameDownloadStatus::Remote {});
}
}
// Add games that are installed but no longer in library
for meta in db_handle.applications.installed_game_version.values() {
if games.iter().any(|e| e.id == meta.id) {
continue;
}
// We should always have a cache of the object
// Pass db_handle because otherwise we get a gridlock
let game = match get_cached_object_db::<Game>(&meta.id.clone(), &db_handle) {
Ok(game) => game,
Err(err) => {
warn!(
"{} is installed, but encountered error fetching its error: {}.",
meta.id, err
);
continue;
}
};
games.push(game);
}
drop(handle);
drop(db_handle);
cache_object("library", &games)?;
Ok(games)
}
pub async fn fetch_library_logic_offline(
_state: tauri::State<'_, Mutex<AppState<'_>>>,
_hard_refresh: Option<bool>,
) -> Result<Vec<Game>, RemoteAccessError> {
let mut games: Vec<Game> = get_cached_object("library")?;
let db_handle = borrow_db_checked();
games.retain(|game| {
matches!(
&db_handle
.applications
.game_statuses
.get(&game.id)
.unwrap_or(&GameDownloadStatus::Remote {}),
GameDownloadStatus::Installed { .. } | GameDownloadStatus::SetupRequired { .. }
)
});
Ok(games)
}
pub async fn fetch_game_logic(
id: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<FetchGameStruct, RemoteAccessError> {
let version = {
let state_handle = lock!(state);
let db_lock = borrow_db_checked();
let metadata_option = db_lock.applications.installed_game_version.get(&id);
let version = match metadata_option {
None => None,
Some(metadata) => db_lock
.applications
.game_versions
.get(&metadata.id)
.map(|v| v.get(metadata.version.as_ref().unwrap()).unwrap())
.cloned(),
};
let game = state_handle.games.get(&id);
if let Some(game) = game {
let status = GameStatusManager::fetch_state(&id, &db_lock);
let data = FetchGameStruct {
game: game.clone(),
status,
version,
};
cache_object_db(&id, game, &db_lock)?;
return Ok(data);
}
version
};
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(&["/api/v1/client/game/", &id], &[])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()
.await?;
if response.status() == 404 {
let offline_fetch = fetch_game_logic_offline(id.clone(), state).await;
if let Ok(fetch_data) = offline_fetch {
return Ok(fetch_data);
}
return Err(RemoteAccessError::GameNotFound(id));
}
if response.status() != 200 {
let err = response.json().await?;
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let game: Game = response.json().await?;
let mut state_handle = lock!(state);
state_handle.games.insert(id.clone(), game.clone());
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.game_statuses
.entry(id.clone())
.or_insert(GameDownloadStatus::Remote {});
let status = GameStatusManager::fetch_state(&id, &db_handle);
drop(db_handle);
let data = FetchGameStruct {
game: game.clone(),
status,
version,
};
cache_object(&id, &game)?;
Ok(data)
}
pub async fn fetch_game_logic_offline(
id: String,
_state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<FetchGameStruct, RemoteAccessError> {
let db_handle = borrow_db_checked();
let metadata_option = db_handle.applications.installed_game_version.get(&id);
let version = match metadata_option {
None => None,
Some(metadata) => db_handle
.applications
.game_versions
.get(&metadata.id)
.map(|v| v.get(metadata.version.as_ref().unwrap()).unwrap())
.cloned(),
};
let status = GameStatusManager::fetch_state(&id, &db_handle);
let game = get_cached_object::<Game>(&id)?;
drop(db_handle);
Ok(FetchGameStruct {
game,
status,
version,
})
}
pub async fn fetch_game_version_options_logic(
game_id: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<Vec<GameVersion>, RemoteAccessError> {
let client = DROP_CLIENT_ASYNC.clone();
let response = generate_url(&["/api/v1/client/game/versions"], &[("id", &game_id)])?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()
.await?;
if response.status() != 200 {
let err = response.json().await?;
warn!("{err:?}");
return Err(RemoteAccessError::InvalidResponse(err));
}
let data: Vec<GameVersion> = response.json().await?;
let state_lock = lock!(state);
let process_manager_lock = lock!(state_lock.process_manager);
let data: Vec<GameVersion> = data
.into_iter()
.filter(|v| process_manager_lock.valid_platform(&v.platform, &state_lock))
.collect();
drop(process_manager_lock);
drop(state_lock);
Ok(data)
}
/**
* Called by:
* - on_cancel, when cancelled, for obvious reasons
* - when downloading, so if drop unexpectedly quits, we can resume the download. hidden by the "Downloading..." transient state, though
* - when scanning, to import the game
*/
pub fn set_partially_installed(
meta: &DownloadableMetadata,
install_dir: String,
app_handle: Option<&AppHandle>,
) {
set_partially_installed_db(&mut borrow_db_mut_checked(), meta, install_dir, app_handle);
}
pub fn set_partially_installed_db(
db_lock: &mut Database,
meta: &DownloadableMetadata,
install_dir: String,
app_handle: Option<&AppHandle>,
) {
db_lock.applications.transient_statuses.remove(meta);
db_lock.applications.game_statuses.insert(
meta.id.clone(),
GameDownloadStatus::PartiallyInstalled {
version_name: meta.version.as_ref().unwrap().clone(),
install_dir,
},
);
db_lock
.applications
.installed_game_version
.insert(meta.id.clone(), meta.clone());
if let Some(app_handle) = app_handle {
push_game_update(
app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, db_lock),
);
}
}
pub fn uninstall_game_logic(meta: DownloadableMetadata, app_handle: &AppHandle) {
debug!("triggered uninstall for agent");
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.transient_statuses
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
push_game_update(
app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, &db_handle),
);
let previous_state = db_handle.applications.game_statuses.get(&meta.id).cloned();
let previous_state = if let Some(state) = previous_state {
state
} else {
warn!("uninstall job doesn't have previous state, failing silently");
return;
};
if let Some((_, install_dir)) = match previous_state {
GameDownloadStatus::Installed {
version_name,
install_dir,
} => Some((version_name, install_dir)),
GameDownloadStatus::SetupRequired {
version_name,
install_dir,
} => Some((version_name, install_dir)),
GameDownloadStatus::PartiallyInstalled {
version_name,
install_dir,
} => Some((version_name, install_dir)),
_ => None,
} {
db_handle
.applications
.transient_statuses
.insert(meta.clone(), ApplicationTransientStatus::Uninstalling {});
drop(db_handle);
let app_handle = app_handle.clone();
spawn(move || {
if let Err(e) = remove_dir_all(install_dir) {
error!("{e}");
} else {
let mut db_handle = borrow_db_mut_checked();
db_handle.applications.transient_statuses.remove(&meta);
db_handle
.applications
.installed_game_version
.remove(&meta.id);
db_handle
.applications
.game_statuses
.insert(meta.id.clone(), GameDownloadStatus::Remote {});
let _ = db_handle.applications.transient_statuses.remove(&meta);
push_game_update(
&app_handle,
&meta.id,
None,
GameStatusManager::fetch_state(&meta.id, &db_handle),
);
debug!("uninstalled game id {}", &meta.id);
app_emit!(app_handle, "update_library", ());
}
});
} else {
warn!("invalid previous state for uninstall, failing silently.");
}
}
pub fn get_current_meta(game_id: &String) -> Option<DownloadableMetadata> {
borrow_db_checked()
.applications
.installed_game_version
.get(game_id)
.cloned()
}
pub fn on_game_complete(
meta: &DownloadableMetadata,
install_dir: String,
app_handle: &AppHandle,
) -> Result<(), RemoteAccessError> {
// Fetch game version information from remote
if meta.version.is_none() {
return Err(RemoteAccessError::GameNotFound(meta.id.clone()));
}
let client = DROP_CLIENT_SYNC.clone();
let response = generate_url(
&["/api/v1/client/game/version"],
&[
("id", &meta.id),
("version", meta.version.as_ref().unwrap()),
],
)?;
let response = client
.get(response)
.header("Authorization", generate_authorization_header())
.send()?;
let game_version: GameVersion = response.json()?;
let mut handle = borrow_db_mut_checked();
handle
.applications
.game_versions
.entry(meta.id.clone())
.or_default()
.insert(meta.version.clone().unwrap(), game_version.clone());
handle
.applications
.installed_game_version
.insert(meta.id.clone(), meta.clone());
drop(handle);
let status = if game_version.setup_command.is_empty() {
GameDownloadStatus::Installed {
version_name: meta.version.clone().unwrap(),
install_dir,
}
} else {
GameDownloadStatus::SetupRequired {
version_name: meta.version.clone().unwrap(),
install_dir,
}
};
let mut db_handle = borrow_db_mut_checked();
db_handle
.applications
.game_statuses
.insert(meta.id.clone(), status.clone());
drop(db_handle);
app_emit!(
app_handle,
&format!("update_game/{}", meta.id),
GameUpdateEvent {
game_id: meta.id.clone(),
status: (Some(status), None),
version: Some(game_version),
}
);
Ok(())
}
pub fn push_game_update(
app_handle: &AppHandle,
game_id: &String,
version: Option<GameVersion>,
status: GameStatusWithTransient,
) {
if let Some(GameDownloadStatus::Installed { .. } | GameDownloadStatus::SetupRequired { .. }) =
&status.0
&& version.is_none()
{
panic!("pushed game for installed game that doesn't have version information");
}
app_emit!(
app_handle,
&format!("update_game/{game_id}"),
GameUpdateEvent {
game_id: game_id.clone(),
status,
version,
}
);
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FrontendGameOptions {
launch_string: String,
}
#[tauri::command]
pub fn update_game_configuration(
game_id: String,
options: FrontendGameOptions,
) -> Result<(), LibraryError> {
let mut handle = borrow_db_mut_checked();
let installed_version = handle
.applications
.installed_game_version
.get(&game_id)
.ok_or(LibraryError::MetaNotFound(game_id))?;
let id = installed_version.id.clone();
let version = installed_version.version.clone().ok_or(LibraryError::VersionNotFound(id.clone()))?;
let mut existing_configuration = handle
.applications
.game_versions
.get(&id)
.unwrap()
.get(&version)
.unwrap()
.clone();
// Add more options in here
existing_configuration.launch_command_template = options.launch_string;
// Add no more options past here
handle
.applications
.game_versions
.get_mut(&id)
.unwrap()
.insert(version.to_string(), existing_configuration);
Ok(())
}

View File

@ -1,5 +0,0 @@
pub mod collections;
pub mod commands;
pub mod downloads;
pub mod library;
pub mod state;

View File

@ -1,35 +0,0 @@
use crate::database::models::data::{
ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata, GameDownloadStatus,
};
pub type GameStatusWithTransient = (
Option<GameDownloadStatus>,
Option<ApplicationTransientStatus>,
);
pub struct GameStatusManager {}
impl GameStatusManager {
pub fn fetch_state(game_id: &String, database: &Database) -> GameStatusWithTransient {
let online_state = database
.applications
.transient_statuses
.get(&DownloadableMetadata {
id: game_id.to_string(),
download_type: DownloadType::Game,
version: None,
})
.cloned();
let offline_state = database.applications.game_statuses.get(game_id).cloned();
if online_state.is_some() {
return (None, online_state);
}
if offline_state.is_some() {
return (offline_state, None);
}
(None, None)
}
}

View File

@ -5,117 +5,13 @@
#![feature(iterator_try_collect)]
#![deny(clippy::all)]
mod database;
mod games;
mod client;
mod download_manager;
mod error;
mod process;
mod remote;
mod utils;
use crate::database::scan::scan_install_dirs;
use crate::process::commands::open_process_logs;
use crate::process::process_handlers::UMU_LAUNCHER_EXECUTABLE;
use crate::remote::commands::auth_initiate_code;
use crate::remote::fetch_object::fetch_object_wrapper;
use crate::remote::server_proto::handle_server_proto_wrapper;
use crate::{database::db::DatabaseImpls, games::downloads::commands::resume_download};
use bitcode::{Decode, Encode};
use client::commands::fetch_state;
use client::{
autostart::{get_autostart_enabled, sync_autostart_on_startup, toggle_autostart},
cleanup::{cleanup_and_exit, quit},
};
use database::commands::{
add_download_dir, delete_download_dir, fetch_download_dir_stats, fetch_settings,
fetch_system_data, update_settings,
};
use database::db::{DATA_ROOT_DIR, DatabaseInterface, borrow_db_checked, borrow_db_mut_checked};
use database::models::data::GameDownloadStatus;
use download_manager::commands::{
cancel_game, move_download_in_queue, pause_downloads, resume_downloads,
};
use download_manager::download_manager_builder::DownloadManagerBuilder;
use download_manager::download_manager_frontend::DownloadManager;
use games::collections::commands::{
add_game_to_collection, create_collection, delete_collection, delete_game_in_collection,
fetch_collection, fetch_collections,
};
use games::commands::{
fetch_game, fetch_game_status, fetch_game_version_options, fetch_library, uninstall_game,
};
use games::downloads::commands::download_game;
use games::library::{Game, update_game_configuration};
use log::{LevelFilter, debug, info, warn};
use log4rs::Config;
use log4rs::append::console::ConsoleAppender;
use log4rs::append::file::FileAppender;
use log4rs::config::{Appender, Root};
use log4rs::encode::pattern::PatternEncoder;
use process::commands::{kill_game, launch_game};
use process::process_manager::ProcessManager;
use remote::auth::{self, recieve_handshake};
use remote::commands::{
auth_initiate, fetch_drop_object, gen_drop_url, manual_recieve_handshake, retry_connect,
sign_out, use_remote,
};
use remote::server_proto::handle_server_proto_offline_wrapper;
use serde::{Deserialize, Serialize};
use std::fs::File;
use std::io::Write;
use std::panic::PanicHookInfo;
use std::path::Path;
use std::str::FromStr;
use std::sync::Arc;
use std::time::SystemTime;
use std::{
collections::HashMap,
sync::{LazyLock, Mutex},
};
use std::{env, panic};
use tauri::menu::{Menu, MenuItem, PredefinedMenuItem};
use tauri::tray::TrayIconBuilder;
use tauri::{AppHandle, Manager, RunEvent, WindowEvent};
use tauri_plugin_deep_link::DeepLinkExt;
use tauri_plugin_dialog::DialogExt;
#[derive(Clone, Copy, Serialize, Eq, PartialEq)]
pub enum AppStatus {
NotConfigured,
Offline,
ServerError,
SignedOut,
SignedIn,
SignedInNeedsReauth,
ServerUnavailable,
}
#[derive(Clone, Serialize, Deserialize, Encode, Decode)]
#[serde(rename_all = "camelCase")]
pub struct User {
id: String,
username: String,
admin: bool,
display_name: String,
profile_picture_object_id: String,
}
#[derive(Clone)]
pub struct CompatInfo {
umu_installed: bool,
}
fn create_new_compat_info() -> Option<CompatInfo> {
#[cfg(target_os = "windows")]
return None;
let has_umu_installed = UMU_LAUNCHER_EXECUTABLE.is_some();
Some(CompatInfo {
umu_installed: has_umu_installed,
})
}
#[derive(Clone, Serialize)]
#[serde(rename_all = "camelCase")]
@ -164,7 +60,7 @@ async fn setup(handle: AppHandle) -> AppState<'static> {
log4rs::init_config(config).expect("Failed to initialise log4rs");
let games = HashMap::new();
let download_manager = Arc::new(DownloadManagerBuilder::build(handle.clone()));
let download_manager = Arc::new(DownloadManagerBuilder::build(handle.clone()));
let process_manager = Arc::new(Mutex::new(ProcessManager::new(handle.clone())));
let compat_info = create_new_compat_info();
@ -249,8 +145,6 @@ async fn setup(handle: AppHandle) -> AppState<'static> {
}
}
pub static DB: LazyLock<DatabaseInterface> = LazyLock::new(DatabaseInterface::set_up_database);
pub fn custom_panic_handler(e: &PanicHookInfo) -> Option<()> {
let crash_file = DATA_ROOT_DIR.join(format!(
"crash-{}.log",
@ -385,17 +279,20 @@ pub fn run() {
}
};
if let Some("handshake") = url.host_str() {
tauri::async_runtime::spawn(recieve_handshake(
handle.clone(),
url.path().to_string(),
));
tauri::async_runtime::spawn(recieve_handshake(
handle.clone(),
url.path().to_string(),
));
}
});
let open_menu_item = MenuItem::with_id(app, "open", "Open", true, None::<&str>).expect("Failed to generate open menu item");
let sep = PredefinedMenuItem::separator(app).expect("Failed to generate menu separator item");
let open_menu_item = MenuItem::with_id(app, "open", "Open", true, None::<&str>)
.expect("Failed to generate open menu item");
let quit_menu_item = MenuItem::with_id(app, "quit", "Quit", true, None::<&str>).expect("Failed to generate quit menu item");
let sep = PredefinedMenuItem::separator(app)
.expect("Failed to generate menu separator item");
let quit_menu_item = MenuItem::with_id(app, "quit", "Quit", true, None::<&str>)
.expect("Failed to generate quit menu item");
let menu = Menu::with_items(
app,
@ -414,7 +311,11 @@ pub fn run() {
run_on_tray(|| {
TrayIconBuilder::new()
.icon(app.default_window_icon().expect("Failed to get default window icon").clone())
.icon(
app.default_window_icon()
.expect("Failed to get default window icon")
.clone(),
)
.menu(&menu)
.on_menu_event(|app, event| match event.id.as_ref() {
"open" => {
@ -511,3 +412,94 @@ fn run_on_tray<T: FnOnce()>(f: T) {
(f)();
}
}
// TODO: Refactor
pub async fn recieve_handshake(app: AppHandle, path: String) {
// Tell the app we're processing
app_emit!(app, "auth/processing", ());
let handshake_result = recieve_handshake_logic(&app, path).await;
if let Err(e) = handshake_result {
warn!("error with authentication: {e}");
app_emit!(app, "auth/failed", e.to_string());
return;
}
let app_state = app.state::<Mutex<AppState>>();
let (app_status, user) = remote::setup().await;
let mut state_lock = lock!(app_state);
state_lock.status = app_status;
state_lock.user = user;
let _ = clear_cached_object("collections");
let _ = clear_cached_object("library");
drop(state_lock);
app_emit!(app, "auth/finished", ());
}
// TODO: Refactor
async fn recieve_handshake_logic(app: &AppHandle, path: String) -> Result<(), RemoteAccessError> {
let path_chunks: Vec<&str> = path.split('/').collect();
if path_chunks.len() != 3 {
app_emit!(app, "auth/failed", ());
return Err(RemoteAccessError::HandshakeFailed(
"failed to parse token".to_string(),
));
}
let base_url = {
let handle = borrow_db_checked();
Url::parse(handle.base_url.as_str())?
};
let client_id = path_chunks
.get(1)
.expect("Failed to get client id from path chunks");
let token = path_chunks
.get(2)
.expect("Failed to get token from path chunks");
let body = HandshakeRequestBody {
client_id: (client_id).to_string(),
token: (token).to_string(),
};
let endpoint = base_url.join("/api/v1/client/auth/handshake")?;
let client = DROP_CLIENT_ASYNC.clone();
let response = client.post(endpoint).json(&body).send().await?;
debug!("handshake responsded with {}", response.status().as_u16());
if !response.status().is_success() {
return Err(RemoteAccessError::InvalidResponse(response.json().await?));
}
let response_struct: HandshakeResponse = response.json().await?;
{
let mut handle = borrow_db_mut_checked();
handle.auth = Some(DatabaseAuth {
private: response_struct.private,
cert: response_struct.certificate,
client_id: response_struct.id,
web_token: None,
});
}
let web_token = {
let header = generate_authorization_header();
let token = client
.post(base_url.join("/api/v1/client/user/webtoken")?)
.header("Authorization", header)
.send()
.await?;
token.text().await?
};
let mut handle = borrow_db_mut_checked();
handle.auth.as_mut().unwrap().web_token = Some(web_token);
Ok(())
}

View File

@ -1,6 +1,6 @@
use std::sync::Mutex;
use crate::{error::process_error::ProcessError, lock, AppState};
use crate::{AppState, error::process_error::ProcessError, lock};
#[tauri::command]
pub fn launch_game(
@ -46,5 +46,13 @@ pub fn open_process_logs(
) -> Result<(), ProcessError> {
let state_lock = lock!(state);
let mut process_manager_lock = lock!(state_lock.process_manager);
let dir = process_manager_lock.get_log_dir(game_id);
state
.handle()
.opener()
.open_path(dir.display().to_string(), None::<&str>)
.map_err(ProcessError::OpenerError)?;
process_manager_lock.open_process_logs(game_id)
}

View File

@ -1,33 +0,0 @@
use std::collections::HashMap;
use dynfmt::{Argument, FormatArgs};
pub struct DropFormatArgs {
positional: Vec<String>,
map: HashMap<&'static str, String>,
}
impl DropFormatArgs {
pub fn new(launch_string: String, working_dir: &String, executable_name: &String, absolute_executable_name: String) -> Self {
let mut positional = Vec::new();
let mut map: HashMap<&'static str, String> = HashMap::new();
positional.push(launch_string);
map.insert("dir", working_dir.to_string());
map.insert("exe", executable_name.to_string());
map.insert("abs_exe", absolute_executable_name);
Self { positional, map }
}
}
impl FormatArgs for DropFormatArgs {
fn get_index(&self, index: usize) -> Result<Option<dynfmt::Argument<'_>>, ()> {
Ok(self.positional.get(index).map(|arg| arg as Argument<'_>))
}
fn get_key(&self, key: &str) -> Result<Option<dynfmt::Argument<'_>>, ()> {
Ok(self.map.get(key).map(|arg| arg as Argument<'_>))
}
}

View File

@ -1,5 +0,0 @@
pub mod commands;
pub mod process_manager;
pub mod process_handlers;
pub mod format;
pub mod utils;

View File

@ -1,148 +0,0 @@
use std::{
ffi::OsStr,
path::PathBuf,
process::{Command, Stdio},
sync::LazyLock,
};
use log::{debug, info};
use crate::{
AppState,
database::models::data::{Database, DownloadableMetadata, GameVersion},
error::process_error::ProcessError,
process::process_manager::{Platform, ProcessHandler},
};
pub struct NativeGameLauncher;
impl ProcessHandler for NativeGameLauncher {
fn create_launch_process(
&self,
_meta: &DownloadableMetadata,
launch_command: String,
args: Vec<String>,
_game_version: &GameVersion,
_current_dir: &str,
) -> Result<String, ProcessError> {
Ok(format!("\"{}\" {}", launch_command, args.join(" ")))
}
fn valid_for_platform(&self, _db: &Database, _state: &AppState, _target: &Platform) -> bool {
true
}
}
pub static UMU_LAUNCHER_EXECUTABLE: LazyLock<Option<PathBuf>> = LazyLock::new(|| {
let x = get_umu_executable();
info!("{:?}", &x);
x
});
const UMU_BASE_LAUNCHER_EXECUTABLE: &str = "umu-run";
const UMU_INSTALL_DIRS: [&str; 4] = ["/app/share", "/use/local/share", "/usr/share", "/opt"];
fn get_umu_executable() -> Option<PathBuf> {
if check_executable_exists(UMU_BASE_LAUNCHER_EXECUTABLE) {
return Some(PathBuf::from(UMU_BASE_LAUNCHER_EXECUTABLE));
}
for dir in UMU_INSTALL_DIRS {
let p = PathBuf::from(dir).join(UMU_BASE_LAUNCHER_EXECUTABLE);
if check_executable_exists(&p) {
return Some(p);
}
}
None
}
fn check_executable_exists<P: AsRef<OsStr>>(exec: P) -> bool {
let has_umu_installed = Command::new(exec).stdout(Stdio::null()).output();
has_umu_installed.is_ok()
}
pub struct UMULauncher;
impl ProcessHandler for UMULauncher {
fn create_launch_process(
&self,
_meta: &DownloadableMetadata,
launch_command: String,
args: Vec<String>,
game_version: &GameVersion,
_current_dir: &str,
) -> Result<String, ProcessError> {
debug!("Game override: \"{:?}\"", &game_version.umu_id_override);
let game_id = match &game_version.umu_id_override {
Some(game_override) => {
if game_override.is_empty() {
game_version.game_id.clone()
} else {
game_override.clone()
}
}
None => game_version.game_id.clone(),
};
Ok(format!(
"GAMEID={game_id} {umu:?} \"{launch}\" {args}",
umu = UMU_LAUNCHER_EXECUTABLE.as_ref().expect("Failed to get UMU_LAUNCHER_EXECUTABLE as ref"),
launch = launch_command,
args = args.join(" ")
))
}
fn valid_for_platform(&self, _db: &Database, state: &AppState, _target: &Platform) -> bool {
let Some(ref compat_info) = state.compat_info else {
return false;
};
compat_info.umu_installed
}
}
pub struct AsahiMuvmLauncher;
impl ProcessHandler for AsahiMuvmLauncher {
fn create_launch_process(
&self,
meta: &DownloadableMetadata,
launch_command: String,
args: Vec<String>,
game_version: &GameVersion,
current_dir: &str,
) -> Result<String, ProcessError> {
let umu_launcher = UMULauncher {};
let umu_string = umu_launcher.create_launch_process(
meta,
launch_command,
args,
game_version,
current_dir,
)?;
let mut args_cmd = umu_string
.split("umu-run")
.collect::<Vec<&str>>()
.into_iter();
let args = args_cmd
.next()
.ok_or(ProcessError::InvalidArguments(umu_string.clone()))?
.trim();
let cmd = format!("umu-run{}", args_cmd.next().ok_or(ProcessError::InvalidArguments(umu_string.clone()))?);
Ok(format!("{args} muvm -- {cmd}"))
}
#[allow(unreachable_code)]
#[allow(unused_variables)]
fn valid_for_platform(&self, _db: &Database, state: &AppState, _target: &Platform) -> bool {
#[cfg(not(target_os = "linux"))]
return false;
#[cfg(not(target_arch = "aarch64"))]
return false;
let page_size = page_size::get();
if page_size != 16384 {
return false;
}
let Some(ref compat_info) = state.compat_info else {
return false;
};
compat_info.umu_installed
}
}

View File

@ -1,480 +0,0 @@
use std::{
collections::HashMap,
fs::{OpenOptions, create_dir_all},
io::{self},
path::PathBuf,
process::{Command, ExitStatus},
str::FromStr,
sync::{Arc, Mutex},
thread::spawn,
time::{Duration, SystemTime},
};
use dynfmt::Format;
use dynfmt::SimpleCurlyFormat;
use log::{debug, info, warn};
use serde::{Deserialize, Serialize};
use shared_child::SharedChild;
use tauri::{AppHandle, Emitter, Manager};
use tauri_plugin_opener::OpenerExt;
use crate::{
AppState,
database::{
db::{DATA_ROOT_DIR, borrow_db_checked, borrow_db_mut_checked},
models::data::{
ApplicationTransientStatus, Database, DownloadType, DownloadableMetadata,
GameDownloadStatus, GameVersion,
},
},
error::process_error::ProcessError,
games::{library::push_game_update, state::GameStatusManager},
process::{
format::DropFormatArgs,
process_handlers::{AsahiMuvmLauncher, NativeGameLauncher, UMULauncher},
},
lock,
};
pub struct RunningProcess {
handle: Arc<SharedChild>,
start: SystemTime,
manually_killed: bool,
}
pub struct ProcessManager<'a> {
current_platform: Platform,
log_output_dir: PathBuf,
processes: HashMap<String, RunningProcess>,
app_handle: AppHandle,
game_launchers: Vec<(
(Platform, Platform),
&'a (dyn ProcessHandler + Sync + Send + 'static),
)>,
}
impl ProcessManager<'_> {
pub fn new(app_handle: AppHandle) -> Self {
let log_output_dir = DATA_ROOT_DIR.join("logs");
ProcessManager {
#[cfg(target_os = "windows")]
current_platform: Platform::Windows,
#[cfg(target_os = "macos")]
current_platform: Platform::MacOs,
#[cfg(target_os = "linux")]
current_platform: Platform::Linux,
app_handle,
processes: HashMap::new(),
log_output_dir,
game_launchers: vec![
// Current platform to target platform
(
(Platform::Windows, Platform::Windows),
&NativeGameLauncher {} as &(dyn ProcessHandler + Sync + Send + 'static),
),
(
(Platform::Linux, Platform::Linux),
&NativeGameLauncher {} as &(dyn ProcessHandler + Sync + Send + 'static),
),
(
(Platform::MacOs, Platform::MacOs),
&NativeGameLauncher {} as &(dyn ProcessHandler + Sync + Send + 'static),
),
(
(Platform::Linux, Platform::Windows),
&AsahiMuvmLauncher {} as &(dyn ProcessHandler + Sync + Send + 'static),
),
(
(Platform::Linux, Platform::Windows),
&UMULauncher {} as &(dyn ProcessHandler + Sync + Send + 'static),
),
],
}
}
pub fn kill_game(&mut self, game_id: String) -> Result<(), io::Error> {
match self.processes.get_mut(&game_id) {
Some(process) => {
process.manually_killed = true;
process.handle.kill()?;
process.handle.wait()?;
Ok(())
}
None => Err(io::Error::new(
io::ErrorKind::NotFound,
"Game ID not running",
)),
}
}
fn get_log_dir(&self, game_id: String) -> PathBuf {
self.log_output_dir.join(game_id)
}
pub fn open_process_logs(&mut self, game_id: String) -> Result<(), ProcessError> {
let dir = self.get_log_dir(game_id);
self.app_handle
.opener()
.open_path(dir.display().to_string(), None::<&str>)
.map_err(ProcessError::OpenerError)?;
Ok(())
}
fn on_process_finish(&mut self, game_id: String, result: Result<ExitStatus, std::io::Error>) {
if !self.processes.contains_key(&game_id) {
warn!(
"process on_finish was called, but game_id is no longer valid. finished with result: {result:?}"
);
return;
}
debug!("process for {:?} exited with {:?}", &game_id, result);
let process = match self.processes.remove(&game_id) {
Some(process) => process,
None => {
info!("Attempted to stop process {game_id} which didn't exist");
return;
}
};
let mut db_handle = borrow_db_mut_checked();
let meta = db_handle
.applications
.installed_game_version
.get(&game_id)
.cloned()
.unwrap_or_else(|| panic!("Could not get installed version of {}", &game_id));
db_handle.applications.transient_statuses.remove(&meta);
let current_state = db_handle.applications.game_statuses.get(&game_id).cloned();
if let Some(GameDownloadStatus::SetupRequired {
version_name,
install_dir,
}) = current_state
&& let Ok(exit_code) = result
&& exit_code.success()
{
db_handle.applications.game_statuses.insert(
game_id.clone(),
GameDownloadStatus::Installed {
version_name: version_name.to_string(),
install_dir: install_dir.to_string(),
},
);
}
let elapsed = process.start.elapsed().unwrap_or(Duration::ZERO);
// If we started and ended really quickly, something might've gone wrong
// Or if the status isn't 0
// Or if it's an error
if !process.manually_killed
&& (elapsed.as_secs() <= 2 || result.map_or(true, |r| !r.success()))
{
warn!("drop detected that the game {game_id} may have failed to launch properly");
let _ = self.app_handle.emit("launch_external_error", &game_id);
}
let version_data = match db_handle.applications.game_versions.get(&game_id) {
// This unwrap here should be resolved by just making the hashmap accept an option rather than just a String
Some(res) => res.get(&meta.version.unwrap()).expect("Failed to get game version from installed game versions. Is the database corrupted?"),
None => todo!(),
};
let status = GameStatusManager::fetch_state(&game_id, &db_handle);
push_game_update(
&self.app_handle,
&game_id,
Some(version_data.clone()),
status,
);
}
fn fetch_process_handler(
&self,
db_lock: &Database,
state: &AppState,
target_platform: &Platform,
) -> Result<&(dyn ProcessHandler + Send + Sync), ProcessError> {
Ok(self
.game_launchers
.iter()
.find(|e| {
let (e_current, e_target) = e.0;
e_current == self.current_platform
&& e_target == *target_platform
&& e.1.valid_for_platform(db_lock, state, target_platform)
})
.ok_or(ProcessError::InvalidPlatform)?
.1)
}
pub fn valid_platform(&self, platform: &Platform, state: &AppState) -> bool {
let db_lock = borrow_db_checked();
let process_handler = self.fetch_process_handler(&db_lock, state, platform);
process_handler.is_ok()
}
pub fn launch_process(
&mut self,
game_id: String,
state: &AppState,
) -> Result<(), ProcessError> {
if self.processes.contains_key(&game_id) {
return Err(ProcessError::AlreadyRunning);
}
let version = match borrow_db_checked()
.applications
.game_statuses
.get(&game_id)
.cloned()
{
Some(GameDownloadStatus::Installed { version_name, .. }) => version_name,
Some(GameDownloadStatus::SetupRequired { version_name, .. }) => version_name,
_ => return Err(ProcessError::NotInstalled),
};
let meta = DownloadableMetadata {
id: game_id.clone(),
version: Some(version.clone()),
download_type: DownloadType::Game,
};
let mut db_lock = borrow_db_mut_checked();
let game_status = db_lock
.applications
.game_statuses
.get(&game_id)
.ok_or(ProcessError::NotInstalled)?;
let (version_name, install_dir) = match game_status {
GameDownloadStatus::Installed {
version_name,
install_dir,
} => (version_name, install_dir),
GameDownloadStatus::SetupRequired {
version_name,
install_dir,
} => (version_name, install_dir),
_ => return Err(ProcessError::NotInstalled),
};
debug!(
"Launching process {:?} with version {:?}",
&game_id,
db_lock.applications.game_versions.get(&game_id)
);
let game_version = db_lock
.applications
.game_versions
.get(&game_id)
.ok_or(ProcessError::InvalidID)?
.get(version_name)
.ok_or(ProcessError::InvalidVersion)?;
// TODO: refactor this path with open_process_logs
let game_log_folder = &self.get_log_dir(game_id);
create_dir_all(game_log_folder).map_err(ProcessError::IOError)?;
let current_time = chrono::offset::Local::now();
let log_file = OpenOptions::new()
.write(true)
.truncate(true)
.read(true)
.create(true)
.open(game_log_folder.join(format!("{}-{}.log", &version, current_time.timestamp())))
.map_err(ProcessError::IOError)?;
let error_file = OpenOptions::new()
.write(true)
.truncate(true)
.read(true)
.create(true)
.open(game_log_folder.join(format!(
"{}-{}-error.log",
&version,
current_time.timestamp()
)))
.map_err(ProcessError::IOError)?;
let target_platform = game_version.platform;
let process_handler = self.fetch_process_handler(&db_lock, state, &target_platform)?;
let (launch, args) = match game_status {
GameDownloadStatus::Installed {
version_name: _,
install_dir: _,
} => (&game_version.launch_command, &game_version.launch_args),
GameDownloadStatus::SetupRequired {
version_name: _,
install_dir: _,
} => (&game_version.setup_command, &game_version.setup_args),
GameDownloadStatus::PartiallyInstalled {
version_name: _,
install_dir: _,
} => unreachable!("Game registered as 'Partially Installed'"),
GameDownloadStatus::Remote {} => unreachable!("Game registered as 'Remote'"),
};
#[allow(clippy::unwrap_used)]
let launch = PathBuf::from_str(install_dir).unwrap().join(launch);
let launch = launch.display().to_string();
let launch_string = process_handler.create_launch_process(
&meta,
launch.to_string(),
args.clone(),
game_version,
install_dir,
)?;
let format_args = DropFormatArgs::new(
launch_string,
install_dir,
&game_version.launch_command,
launch.to_string(),
);
let launch_string = SimpleCurlyFormat
.format(&game_version.launch_command_template, format_args)
.map_err(|e| ProcessError::FormatError(e.to_string()))?
.to_string();
#[cfg(target_os = "windows")]
use std::os::windows::process::CommandExt;
#[cfg(target_os = "windows")]
let mut command = Command::new("cmd");
#[cfg(target_os = "windows")]
command.raw_arg(format!("/C \"{}\"", &launch_string));
info!("launching (in {install_dir}): {launch_string}",);
#[cfg(unix)]
let mut command: Command = Command::new("sh");
#[cfg(unix)]
command.args(vec!["-c", &launch_string]);
debug!("final launch string:\n\n{launch_string}\n");
command
.stderr(error_file)
.stdout(log_file)
.env_remove("RUST_LOG")
.current_dir(install_dir);
let child = command.spawn().map_err(ProcessError::IOError)?;
let launch_process_handle =
Arc::new(SharedChild::new(child).map_err(ProcessError::IOError)?);
db_lock
.applications
.transient_statuses
.insert(meta.clone(), ApplicationTransientStatus::Running {});
push_game_update(
&self.app_handle,
&meta.id,
None,
(None, Some(ApplicationTransientStatus::Running {})),
);
let wait_thread_handle = launch_process_handle.clone();
let wait_thread_apphandle = self.app_handle.clone();
let wait_thread_game_id = meta.clone();
spawn(move || {
let result: Result<ExitStatus, std::io::Error> = launch_process_handle.wait();
let app_state = wait_thread_apphandle.state::<Mutex<AppState>>();
let app_state_handle = lock!(app_state);
let mut process_manager_handle = app_state_handle
.process_manager
.lock()
.expect("Failed to lock onto process manager");
process_manager_handle.on_process_finish(wait_thread_game_id.id, result);
// As everything goes out of scope, they should get dropped
// But just to explicit about it
drop(process_manager_handle);
drop(app_state_handle);
});
self.processes.insert(
meta.id,
RunningProcess {
handle: wait_thread_handle,
start: SystemTime::now(),
manually_killed: false,
},
);
Ok(())
}
}
#[derive(Eq, Hash, PartialEq, Serialize, Deserialize, Clone, Copy, Debug)]
pub enum Platform {
Windows,
Linux,
MacOs,
}
impl Platform {
#[cfg(target_os = "windows")]
pub const HOST: Platform = Self::Windows;
#[cfg(target_os = "macos")]
pub const HOST: Platform = Self::MacOs;
#[cfg(target_os = "linux")]
pub const HOST: Platform = Self::Linux;
pub fn is_case_sensitive(&self) -> bool {
match self {
Self::Windows | Self::MacOs => false,
Self::Linux => true,
}
}
}
impl From<&str> for Platform {
fn from(value: &str) -> Self {
match value.to_lowercase().trim() {
"windows" => Self::Windows,
"linux" => Self::Linux,
"mac" | "macos" => Self::MacOs,
_ => unimplemented!(),
}
}
}
impl From<whoami::Platform> for Platform {
fn from(value: whoami::Platform) -> Self {
match value {
whoami::Platform::Windows => Platform::Windows,
whoami::Platform::Linux => Platform::Linux,
whoami::Platform::MacOS => Platform::MacOs,
_ => unimplemented!(),
}
}
}
pub trait ProcessHandler: Send + 'static {
fn create_launch_process(
&self,
meta: &DownloadableMetadata,
launch_command: String,
args: Vec<String>,
game_version: &GameVersion,
current_dir: &str,
) -> Result<String, ProcessError>;
fn valid_for_platform(&self, db: &Database, state: &AppState, target: &Platform) -> bool;
}

View File

@ -1,27 +0,0 @@
use std::{path::PathBuf, sync::Arc};
use futures_lite::io;
use sysinfo::{Disk, DiskRefreshKind, Disks};
use crate::error::application_download_error::ApplicationDownloadError;
pub fn get_disk_available(mount_point: PathBuf) -> Result<u64, ApplicationDownloadError> {
let disks = Disks::new_with_refreshed_list_specifics(DiskRefreshKind::nothing().with_storage());
let mut disk_iter = disks.into_iter().collect::<Vec<&Disk>>();
disk_iter.sort_by(|a, b| {
b.mount_point()
.to_string_lossy()
.len()
.cmp(&a.mount_point().to_string_lossy().len())
});
for disk in disk_iter {
if mount_point.starts_with(disk.mount_point()) {
return Ok(disk.available_space());
}
}
Err(ApplicationDownloadError::IoError(Arc::new(io::Error::other(
"could not find disk of path",
))))
}

View File

@ -31,7 +31,33 @@ pub async fn use_remote(
url: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<(), RemoteAccessError> {
use_remote_logic(url, state).await
debug!("connecting to url {url}");
let base_url = Url::parse(&url)?;
// Test Drop url
let test_endpoint = base_url.join("/api/v1")?;
let client = DROP_CLIENT_ASYNC.clone();
let response = client
.get(test_endpoint.to_string())
.timeout(Duration::from_secs(3))
.send()
.await?;
let result: DropHealthcheck = response.json().await?;
if result.app_name != "Drop" {
warn!("user entered drop endpoint that connected, but wasn't identified as Drop");
return Err(RemoteAccessError::InvalidEndpoint);
}
let mut app_state = lock!(state);
app_state.status = AppStatus::SignedOut;
drop(app_state);
let mut db_state = borrow_db_mut_checked();
db_state.base_url = base_url.to_string();
Ok(())
}
#[tauri::command]

View File

@ -1,228 +0,0 @@
use std::{collections::HashMap, env, sync::Mutex};
use chrono::Utc;
use droplet_rs::ssl::sign_nonce;
use gethostname::gethostname;
use log::{debug, error, warn};
use serde::{Deserialize, Serialize};
use tauri::{AppHandle, Emitter, Manager};
use url::Url;
use crate::{
app_emit, database::{
db::{borrow_db_checked, borrow_db_mut_checked},
models::data::DatabaseAuth,
}, error::{drop_server_error::DropServerError, remote_access_error::RemoteAccessError}, lock, remote::{cache::clear_cached_object, requests::make_authenticated_get, utils::{DROP_CLIENT_ASYNC, DROP_CLIENT_SYNC}}, AppState, AppStatus, User
};
use super::{
cache::{cache_object, get_cached_object},
requests::generate_url,
};
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct CapabilityConfiguration {}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct InitiateRequestBody {
name: String,
platform: String,
capabilities: HashMap<String, CapabilityConfiguration>,
mode: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct HandshakeRequestBody {
client_id: String,
token: String,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct HandshakeResponse {
private: String,
certificate: String,
id: String,
}
pub fn generate_authorization_header() -> String {
let certs = {
let db = borrow_db_checked();
db.auth.clone().expect("Authorisation not initialised")
};
let nonce = Utc::now().timestamp_millis().to_string();
let signature =
sign_nonce(certs.private, nonce.clone()).expect("Failed to generate authorisation header");
format!("Nonce {} {} {}", certs.client_id, nonce, signature)
}
pub async fn fetch_user() -> Result<User, RemoteAccessError> {
let response = make_authenticated_get(generate_url(&["/api/v1/client/user"], &[])?).await?;
if response.status() != 200 {
let err: DropServerError = response.json().await?;
warn!("{err:?}");
if err.status_message == "Nonce expired" {
return Err(RemoteAccessError::OutOfSync);
}
return Err(RemoteAccessError::InvalidResponse(err));
}
response
.json::<User>()
.await
.map_err(std::convert::Into::into)
}
async fn recieve_handshake_logic(app: &AppHandle, path: String) -> Result<(), RemoteAccessError> {
let path_chunks: Vec<&str> = path.split('/').collect();
if path_chunks.len() != 3 {
app_emit!(app, "auth/failed", ());
return Err(RemoteAccessError::HandshakeFailed(
"failed to parse token".to_string(),
));
}
let base_url = {
let handle = borrow_db_checked();
Url::parse(handle.base_url.as_str())?
};
let client_id = path_chunks
.get(1)
.expect("Failed to get client id from path chunks");
let token = path_chunks
.get(2)
.expect("Failed to get token from path chunks");
let body = HandshakeRequestBody {
client_id: (client_id).to_string(),
token: (token).to_string(),
};
let endpoint = base_url.join("/api/v1/client/auth/handshake")?;
let client = DROP_CLIENT_ASYNC.clone();
let response = client.post(endpoint).json(&body).send().await?;
debug!("handshake responsded with {}", response.status().as_u16());
if !response.status().is_success() {
return Err(RemoteAccessError::InvalidResponse(response.json().await?));
}
let response_struct: HandshakeResponse = response.json().await?;
{
let mut handle = borrow_db_mut_checked();
handle.auth = Some(DatabaseAuth {
private: response_struct.private,
cert: response_struct.certificate,
client_id: response_struct.id,
web_token: None,
});
}
let web_token = {
let header = generate_authorization_header();
let token = client
.post(base_url.join("/api/v1/client/user/webtoken")?)
.header("Authorization", header)
.send()
.await?;
token.text().await?
};
let mut handle = borrow_db_mut_checked();
handle.auth.as_mut().unwrap().web_token = Some(web_token);
Ok(())
}
pub async fn recieve_handshake(app: AppHandle, path: String) {
// Tell the app we're processing
app_emit!(app, "auth/processing", ());
let handshake_result = recieve_handshake_logic(&app, path).await;
if let Err(e) = handshake_result {
warn!("error with authentication: {e}");
app_emit!(app, "auth/failed", e.to_string());
return;
}
let app_state = app.state::<Mutex<AppState>>();
let (app_status, user) = setup().await;
let mut state_lock = lock!(app_state);
state_lock.status = app_status;
state_lock.user = user;
let _ = clear_cached_object("collections");
let _ = clear_cached_object("library");
drop(state_lock);
app_emit!(app, "auth/finished", ());
}
pub fn auth_initiate_logic(mode: String) -> Result<String, RemoteAccessError> {
let base_url = {
let db_lock = borrow_db_checked();
Url::parse(&db_lock.base_url.clone())?
};
let hostname = gethostname();
let endpoint = base_url.join("/api/v1/client/auth/initiate")?;
let body = InitiateRequestBody {
name: format!("{} (Desktop)", hostname.display()),
platform: env::consts::OS.to_string(),
capabilities: HashMap::from([
("peerAPI".to_owned(), CapabilityConfiguration {}),
("cloudSaves".to_owned(), CapabilityConfiguration {}),
]),
mode,
};
let client = DROP_CLIENT_SYNC.clone();
let response = client.post(endpoint.to_string()).json(&body).send()?;
if response.status() != 200 {
let data: DropServerError = response.json()?;
error!("could not start handshake: {}", data.status_message);
return Err(RemoteAccessError::HandshakeFailed(data.status_message));
}
let response = response.text()?;
Ok(response)
}
pub async fn setup() -> (AppStatus, Option<User>) {
let auth = {
let data = borrow_db_checked();
data.auth.clone()
};
if auth.is_some() {
let user_result = match fetch_user().await {
Ok(data) => data,
Err(RemoteAccessError::FetchError(_)) => {
let user = get_cached_object::<User>("user").ok();
return (AppStatus::Offline, user);
}
Err(_) => return (AppStatus::SignedInNeedsReauth, None),
};
if let Err(e) = cache_object("user", &user_result) {
warn!("Could not cache user object with error {e}");
}
return (AppStatus::SignedIn, Some(user_result));
}
(AppStatus::SignedOut, None)
}

View File

@ -1,139 +0,0 @@
use std::{
fs::File,
io::{self, Write},
path::{Path, PathBuf},
time::SystemTime,
};
use crate::{
database::{db::borrow_db_checked, models::data::Database},
error::{cache_error::CacheError, remote_access_error::RemoteAccessError},
};
use bitcode::{Decode, DecodeOwned, Encode};
use http::{header::{CONTENT_TYPE}, response::Builder as ResponseBuilder, Response};
#[macro_export]
macro_rules! offline {
($var:expr, $func1:expr, $func2:expr, $( $arg:expr ),* ) => {
async move { if $crate::borrow_db_checked().settings.force_offline || $crate::lock!($var).status == $crate::AppStatus::Offline {
$func2( $( $arg ), *).await
} else {
$func1( $( $arg ), *).await
}
}
}
}
fn get_sys_time_in_secs() -> u64 {
match SystemTime::now().duration_since(SystemTime::UNIX_EPOCH) {
Ok(n) => n.as_secs(),
Err(_) => panic!("SystemTime before UNIX EPOCH!"),
}
}
fn get_cache_path(base: &Path, key: &str) -> PathBuf {
let key_hash = hex::encode(md5::compute(key.as_bytes()).0);
base.join(key_hash)
}
fn write_sync(base: &Path, key: &str, data: Vec<u8>) -> io::Result<()> {
let cache_path = get_cache_path(base, key);
let mut file = File::create(cache_path)?;
file.write_all(&data)?;
Ok(())
}
fn read_sync(base: &Path, key: &str) -> io::Result<Vec<u8>> {
let cache_path = get_cache_path(base, key);
let file = std::fs::read(cache_path)?;
Ok(file)
}
fn delete_sync(base: &Path, key: &str) -> io::Result<()> {
let cache_path = get_cache_path(base, key);
std::fs::remove_file(cache_path)?;
Ok(())
}
pub fn cache_object<D: Encode>(key: &str, data: &D) -> Result<(), RemoteAccessError> {
cache_object_db(key, data, &borrow_db_checked())
}
pub fn cache_object_db<D: Encode>(
key: &str,
data: &D,
database: &Database,
) -> Result<(), RemoteAccessError> {
let bytes = bitcode::encode(data);
write_sync(&database.cache_dir, key, bytes).map_err(RemoteAccessError::Cache)
}
pub fn get_cached_object<D: Encode + DecodeOwned>(key: &str) -> Result<D, RemoteAccessError> {
get_cached_object_db::<D>(key, &borrow_db_checked())
}
pub fn get_cached_object_db<D: DecodeOwned>(
key: &str,
db: &Database,
) -> Result<D, RemoteAccessError> {
let bytes = read_sync(&db.cache_dir, key).map_err(RemoteAccessError::Cache)?;
let data =
bitcode::decode::<D>(&bytes).map_err(|e| RemoteAccessError::Cache(io::Error::other(e)))?;
Ok(data)
}
pub fn clear_cached_object(key: &str) -> Result<(), RemoteAccessError> {
clear_cached_object_db(key, &borrow_db_checked())
}
pub fn clear_cached_object_db(
key: &str,
db: &Database,
) -> Result<(), RemoteAccessError> {
delete_sync(&db.cache_dir, key).map_err(RemoteAccessError::Cache)?;
Ok(())
}
#[derive(Encode, Decode)]
pub struct ObjectCache {
content_type: String,
body: Vec<u8>,
expiry: u64,
}
impl ObjectCache {
pub fn has_expired(&self) -> bool {
let current = get_sys_time_in_secs();
self.expiry < current
}
}
impl TryFrom<Response<Vec<u8>>> for ObjectCache {
type Error = CacheError;
fn try_from(value: Response<Vec<u8>>) -> Result<Self, Self::Error> {
Ok(ObjectCache {
content_type: value
.headers()
.get(CONTENT_TYPE)
.ok_or(CacheError::HeaderNotFound(CONTENT_TYPE))?
.to_str()
.map_err(CacheError::ParseError)?
.to_owned(),
body: value.body().clone(),
expiry: get_sys_time_in_secs() + 60 * 60 * 24,
})
}
}
impl TryFrom<ObjectCache> for Response<Vec<u8>> {
type Error = CacheError;
fn try_from(value: ObjectCache) -> Result<Self, Self::Error> {
let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type);
resp_builder.body(value.body).map_err(CacheError::ConstructionError)
}
}
impl TryFrom<&ObjectCache> for Response<Vec<u8>> {
type Error = CacheError;
fn try_from(value: &ObjectCache) -> Result<Self, Self::Error> {
let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type.clone());
resp_builder.body(value.body.clone()).map_err(CacheError::ConstructionError)
}
}

View File

@ -1,75 +0,0 @@
use http::{header::CONTENT_TYPE, response::Builder as ResponseBuilder, Response};
use log::{debug, warn};
use tauri::UriSchemeResponder;
use crate::{database::db::DatabaseImpls, error::cache_error::CacheError, remote::utils::DROP_CLIENT_ASYNC, DB};
use super::{
auth::generate_authorization_header,
cache::{ObjectCache, cache_object, get_cached_object},
};
pub async fn fetch_object_wrapper(request: http::Request<Vec<u8>>, responder: UriSchemeResponder) {
match fetch_object(request).await {
Ok(r) => responder.respond(r),
Err(e) => {
warn!("Cache error: {e}");
responder.respond(Response::builder().status(500).body(Vec::new()).expect("Failed to build error response"));
}
};
}
pub async fn fetch_object(request: http::Request<Vec<u8>>) -> Result<Response<Vec<u8>>, CacheError>
{
// Drop leading /
let object_id = &request.uri().path()[1..];
let cache_result = get_cached_object::<ObjectCache>(object_id);
if let Ok(cache_result) = &cache_result
&& !cache_result.has_expired()
{
return cache_result.try_into();
}
let header = generate_authorization_header();
let client = DROP_CLIENT_ASYNC.clone();
let url = format!("{}api/v1/client/object/{object_id}", DB.fetch_base_url());
let response = client.get(url).header("Authorization", header).send().await;
match response {
Ok(r) => {
let resp_builder = ResponseBuilder::new().header(
CONTENT_TYPE,
r.headers()
.get("Content-Type")
.expect("Failed get Content-Type header"),
);
let data = match r.bytes().await {
Ok(data) => Vec::from(data),
Err(e) => {
warn!(
"Could not get data from cache object {object_id} with error {e}",
);
Vec::new()
}
};
let resp = resp_builder.body(data).expect("Failed to build object cache response body");
if cache_result.map_or(true, |x| x.has_expired()) {
cache_object::<ObjectCache>(object_id, &resp.clone().try_into()?)
.expect("Failed to create cached object");
}
Ok(resp)
}
Err(e) => {
debug!("Object fetch failed with error {e}. Attempting to download from cache");
match cache_result {
Ok(cache_result) => cache_result.try_into(),
Err(e) => {
warn!("{e}");
Err(CacheError::Remote(e))
}
}
}
}
}

View File

@ -1,8 +0,0 @@
pub mod auth;
#[macro_use]
pub mod cache;
pub mod commands;
pub mod fetch_object;
pub mod requests;
pub mod server_proto;
pub mod utils;

View File

@ -1,33 +0,0 @@
use url::Url;
use crate::{
DB,
database::db::DatabaseImpls,
error::remote_access_error::RemoteAccessError,
remote::{auth::generate_authorization_header, utils::DROP_CLIENT_ASYNC},
};
pub fn generate_url<T: AsRef<str>>(
path_components: &[T],
query: &[(T, T)],
) -> Result<Url, RemoteAccessError> {
let mut base_url = DB.fetch_base_url();
for endpoint in path_components {
base_url = base_url.join(endpoint.as_ref())?;
}
{
let mut queries = base_url.query_pairs_mut();
for (param, val) in query {
queries.append_pair(param.as_ref(), val.as_ref());
}
}
Ok(base_url)
}
pub async fn make_authenticated_get(url: Url) -> Result<reqwest::Response, reqwest::Error> {
DROP_CLIENT_ASYNC
.get(url)
.header("Authorization", generate_authorization_header())
.send()
.await
}

View File

@ -1,91 +0,0 @@
use std::str::FromStr;
use http::{uri::PathAndQuery, Request, Response, StatusCode, Uri};
use log::{error, warn};
use tauri::UriSchemeResponder;
use crate::{database::db::borrow_db_checked, remote::utils::DROP_CLIENT_SYNC, utils::webbrowser_open::webbrowser_open};
pub async fn handle_server_proto_offline_wrapper(request: Request<Vec<u8>>, responder: UriSchemeResponder) {
responder.respond(match handle_server_proto_offline(request).await {
Ok(res) => res,
Err(_) => unreachable!()
});
}
pub async fn handle_server_proto_offline(_request: Request<Vec<u8>>) -> Result<Response<Vec<u8>>, StatusCode>{
Ok(Response::builder()
.status(StatusCode::NOT_FOUND)
.body(Vec::new())
.expect("Failed to build error response for proto offline"))
}
pub async fn handle_server_proto_wrapper(request: Request<Vec<u8>>, responder: UriSchemeResponder) {
match handle_server_proto(request).await {
Ok(r) => responder.respond(r),
Err(e) => {
warn!("Cache error: {e}");
responder.respond(Response::builder().status(e).body(Vec::new()).expect("Failed to build error response"));
}
}
}
async fn handle_server_proto(request: Request<Vec<u8>>) -> Result<Response<Vec<u8>>, StatusCode> {
let db_handle = borrow_db_checked();
let auth = match db_handle.auth.as_ref() {
Some(auth) => auth,
None => {
error!("Could not find auth in database");
return Err(StatusCode::UNAUTHORIZED)
}
};
let web_token = match &auth.web_token {
Some(token) => token,
None => return Err(StatusCode::UNAUTHORIZED),
};
let remote_uri = db_handle.base_url.parse::<Uri>().expect("Failed to parse base url");
let path = request.uri().path();
let mut new_uri = request.uri().clone().into_parts();
new_uri.path_and_query =
Some(PathAndQuery::from_str(&format!("{path}?noWrapper=true")).expect("Failed to parse request path in proto"));
new_uri.authority = remote_uri.authority().cloned();
new_uri.scheme = remote_uri.scheme().cloned();
let err_msg = &format!("Failed to build new uri from parts {new_uri:?}");
let new_uri = Uri::from_parts(new_uri).expect(err_msg);
let whitelist_prefix = ["/store", "/api", "/_", "/fonts"];
if whitelist_prefix.iter().all(|f| !path.starts_with(f)) {
webbrowser_open(new_uri.to_string());
return Ok(Response::new(Vec::new()))
}
let client = DROP_CLIENT_SYNC.clone();
let response = match client
.request(request.method().clone(), new_uri.to_string())
.header("Authorization", format!("Bearer {web_token}"))
.headers(request.headers().clone())
.send() {
Ok(response) => response,
Err(e) => {
warn!("Could not send response. Got {e} when sending");
return Err(e.status().unwrap_or(StatusCode::BAD_REQUEST))
},
};
let response_status = response.status();
let response_body = match response.bytes() {
Ok(bytes) => bytes,
Err(e) => return Err(e.status().unwrap_or(StatusCode::INTERNAL_SERVER_ERROR)),
};
let http_response = Response::builder()
.status(response_status)
.body(response_body.to_vec())
.expect("Failed to build server proto response");
Ok(http_response)
}

View File

@ -1,145 +0,0 @@
use std::{
fs::{self, File},
io::Read,
sync::{LazyLock, Mutex},
time::Duration,
};
use log::{debug, info, warn};
use reqwest::Certificate;
use serde::Deserialize;
use url::Url;
use crate::{
database::db::{borrow_db_mut_checked, DATA_ROOT_DIR}, error::remote_access_error::RemoteAccessError, lock, AppState, AppStatus
};
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct DropHealthcheck {
app_name: String,
}
static DROP_CERT_BUNDLE: LazyLock<Vec<Certificate>> = LazyLock::new(fetch_certificates);
pub static DROP_CLIENT_SYNC: LazyLock<reqwest::blocking::Client> = LazyLock::new(get_client_sync);
pub static DROP_CLIENT_ASYNC: LazyLock<reqwest::Client> = LazyLock::new(get_client_async);
pub static DROP_CLIENT_WS_CLIENT: LazyLock<reqwest::Client> = LazyLock::new(get_client_ws);
fn fetch_certificates() -> Vec<Certificate> {
let certificate_dir = DATA_ROOT_DIR.join("certificates");
let mut certs = Vec::new();
match fs::read_dir(certificate_dir) {
Ok(c) => {
for entry in c {
match entry {
Ok(c) => {
let mut buf = Vec::new();
match File::open(c.path()) {
Ok(f) => f,
Err(e) => {
warn!(
"Failed to open file at {} with error {}",
c.path().display(),
e
);
continue;
}
}
.read_to_end(&mut buf)
.unwrap_or_else(|e| panic!(
"Failed to read to end of certificate file {} with error {}",
c.path().display(),
e
));
match Certificate::from_pem_bundle(&buf) {
Ok(certificates) => {
for cert in certificates {
certs.push(cert);
}
info!(
"added {} certificate(s) from {}",
certs.len(),
c.file_name().display()
);
}
Err(e) => warn!(
"Invalid certificate file {} with error {}",
c.path().display(),
e
),
}
}
Err(_) => todo!(),
}
}
}
Err(e) => {
debug!("not loading certificates due to error: {e}");
}
};
certs
}
pub fn get_client_sync() -> reqwest::blocking::Client {
let mut client = reqwest::blocking::ClientBuilder::new();
for cert in DROP_CERT_BUNDLE.iter() {
client = client.add_root_certificate(cert.clone());
}
client.use_rustls_tls().build().expect("Failed to build synchronous client")
}
pub fn get_client_async() -> reqwest::Client {
let mut client = reqwest::ClientBuilder::new();
for cert in DROP_CERT_BUNDLE.iter() {
client = client.add_root_certificate(cert.clone());
}
client.use_rustls_tls().build().expect("Failed to build asynchronous client")
}
pub fn get_client_ws() -> reqwest::Client {
let mut client = reqwest::ClientBuilder::new();
for cert in DROP_CERT_BUNDLE.iter() {
client = client.add_root_certificate(cert.clone());
}
client
.use_rustls_tls()
.http1_only()
.build()
.expect("Failed to build websocket client")
}
pub async fn use_remote_logic(
url: String,
state: tauri::State<'_, Mutex<AppState<'_>>>,
) -> Result<(), RemoteAccessError> {
debug!("connecting to url {url}");
let base_url = Url::parse(&url)?;
// Test Drop url
let test_endpoint = base_url.join("/api/v1")?;
let client = DROP_CLIENT_ASYNC.clone();
let response = client
.get(test_endpoint.to_string())
.timeout(Duration::from_secs(3))
.send()
.await?;
let result: DropHealthcheck = response.json().await?;
if result.app_name != "Drop" {
warn!("user entered drop endpoint that connected, but wasn't identified as Drop");
return Err(RemoteAccessError::InvalidEndpoint);
}
let mut app_state = lock!(state);
app_state.status = AppStatus::SignedOut;
drop(app_state);
let mut db_state = borrow_db_mut_checked();
db_state.base_url = base_url.to_string();
Ok(())
}

View File

@ -1,6 +0,0 @@
#[macro_export]
macro_rules! app_emit {
($app:expr, $event:expr, $p:expr) => {
$app.emit($event, $p).expect(&format!("Failed to emit event {}", $event));
};
}

View File

@ -1,6 +0,0 @@
#[macro_export]
macro_rules! send {
($download_manager:expr, $signal:expr) => {
$download_manager.send($signal).unwrap_or_else(|_| panic!("Failed to send signal {} to the download manager", stringify!(signal)))
};
}

View File

@ -1,6 +0,0 @@
#[macro_export]
macro_rules! lock {
($mutex:expr) => {
$mutex.lock().unwrap_or_else(|_| panic!("Failed to lock onto {}", stringify!($mutex)))
};
}

View File

@ -1,4 +0,0 @@
mod app_emit;
mod download_manager_send;
mod lock;
pub mod webbrowser_open;

View File

@ -1,7 +0,0 @@
use log::warn;
pub fn webbrowser_open<T: AsRef<str>>(url: T) {
if let Err(e) = webbrowser::open(url.as_ref()) {
warn!("Could not open web browser to url {} with error {}", url.as_ref(), e);
};
}

View File

@ -7,7 +7,7 @@
"beforeDevCommand": "yarn --cwd main dev --port 1432",
"devUrl": "http://localhost:1432/",
"beforeBuildCommand": "yarn build",
"frontendDist": "../.output"
"frontendDist": "../../.output"
},
"app": {
"security": {