mirror of
https://github.com/Drop-OSS/drop-app.git
synced 2025-11-16 01:31:22 +10:00
156 refactor into workspaces (#157)
* chore: Major refactoring Still needs a massive go-over because there shouldn't be anything referencing tauri in any of the workspaces except the original one. Process manager has been refactored as an example Signed-off-by: quexeky <git@quexeky.dev> * fix: Remote tauri dependency from process Signed-off-by: quexeky <git@quexeky.dev> * refactor: Improvements to src-tauri Signed-off-by: quexeky <git@quexeky.dev> * refactor: Builds, but some logic still left to move back Signed-off-by: quexeky <git@quexeky.dev> * refactor: Finish refactor Signed-off-by: quexeky <git@quexeky.dev> * chore: Run cargo clippy && cargo fmt Signed-off-by: quexeky <git@quexeky.dev> * refactor: Move everything into src-tauri Signed-off-by: quexeky <git@quexeky.dev> --------- Signed-off-by: quexeky <git@quexeky.dev>
This commit is contained in:
23
src-tauri/remote/Cargo.toml
Normal file
23
src-tauri/remote/Cargo.toml
Normal file
@ -0,0 +1,23 @@
|
||||
[package]
|
||||
name = "remote"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
bitcode = "0.6.7"
|
||||
chrono = "0.4.42"
|
||||
client = { version = "0.1.0", path = "../client" }
|
||||
database = { version = "0.1.0", path = "../database" }
|
||||
droplet-rs = "0.7.3"
|
||||
gethostname = "1.0.2"
|
||||
hex = "0.4.3"
|
||||
http = "1.3.1"
|
||||
log = "0.4.28"
|
||||
md5 = "0.8.0"
|
||||
reqwest = "0.12.23"
|
||||
reqwest-websocket = "0.5.1"
|
||||
serde = "1.0.228"
|
||||
serde_with = "3.15.0"
|
||||
tauri = "2.8.5"
|
||||
url = "2.5.7"
|
||||
utils = { version = "0.1.0", path = "../utils" }
|
||||
152
src-tauri/remote/src/auth.rs
Normal file
152
src-tauri/remote/src/auth.rs
Normal file
@ -0,0 +1,152 @@
|
||||
use std::{collections::HashMap, env};
|
||||
|
||||
use chrono::Utc;
|
||||
use client::{app_status::AppStatus, user::User};
|
||||
use database::{DatabaseAuth, interface::borrow_db_checked};
|
||||
use droplet_rs::ssl::sign_nonce;
|
||||
use gethostname::gethostname;
|
||||
use log::{error, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
error::{DropServerError, RemoteAccessError},
|
||||
requests::make_authenticated_get,
|
||||
utils::DROP_CLIENT_SYNC,
|
||||
};
|
||||
|
||||
use super::{
|
||||
cache::{cache_object, get_cached_object},
|
||||
requests::generate_url,
|
||||
};
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct CapabilityConfiguration {}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct InitiateRequestBody {
|
||||
name: String,
|
||||
platform: String,
|
||||
capabilities: HashMap<String, CapabilityConfiguration>,
|
||||
mode: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct HandshakeRequestBody {
|
||||
client_id: String,
|
||||
token: String,
|
||||
}
|
||||
|
||||
impl HandshakeRequestBody {
|
||||
pub fn new(client_id: String, token: String) -> Self {
|
||||
Self { client_id, token }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct HandshakeResponse {
|
||||
private: String,
|
||||
certificate: String,
|
||||
id: String,
|
||||
}
|
||||
|
||||
impl From<HandshakeResponse> for DatabaseAuth {
|
||||
fn from(value: HandshakeResponse) -> Self {
|
||||
DatabaseAuth::new(value.private, value.certificate, value.id, None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_authorization_header() -> String {
|
||||
let certs = {
|
||||
let db = borrow_db_checked();
|
||||
db.auth.clone().expect("Authorisation not initialised")
|
||||
};
|
||||
|
||||
let nonce = Utc::now().timestamp_millis().to_string();
|
||||
|
||||
let signature =
|
||||
sign_nonce(certs.private, nonce.clone()).expect("Failed to generate authorisation header");
|
||||
|
||||
format!("Nonce {} {} {}", certs.client_id, nonce, signature)
|
||||
}
|
||||
|
||||
pub async fn fetch_user() -> Result<User, RemoteAccessError> {
|
||||
let response = make_authenticated_get(generate_url(&["/api/v1/client/user"], &[])?).await?;
|
||||
if response.status() != 200 {
|
||||
let err: DropServerError = response.json().await?;
|
||||
warn!("{err:?}");
|
||||
|
||||
if err.status_message == "Nonce expired" {
|
||||
return Err(RemoteAccessError::OutOfSync);
|
||||
}
|
||||
|
||||
return Err(RemoteAccessError::InvalidResponse(err));
|
||||
}
|
||||
|
||||
response
|
||||
.json::<User>()
|
||||
.await
|
||||
.map_err(std::convert::Into::into)
|
||||
}
|
||||
|
||||
pub fn auth_initiate_logic(mode: String) -> Result<String, RemoteAccessError> {
|
||||
let base_url = {
|
||||
let db_lock = borrow_db_checked();
|
||||
Url::parse(&db_lock.base_url.clone())?
|
||||
};
|
||||
|
||||
let hostname = gethostname();
|
||||
|
||||
let endpoint = base_url.join("/api/v1/client/auth/initiate")?;
|
||||
let body = InitiateRequestBody {
|
||||
name: format!("{} (Desktop)", hostname.display()),
|
||||
platform: env::consts::OS.to_string(),
|
||||
capabilities: HashMap::from([
|
||||
("peerAPI".to_owned(), CapabilityConfiguration {}),
|
||||
("cloudSaves".to_owned(), CapabilityConfiguration {}),
|
||||
]),
|
||||
mode,
|
||||
};
|
||||
|
||||
let client = DROP_CLIENT_SYNC.clone();
|
||||
let response = client.post(endpoint.to_string()).json(&body).send()?;
|
||||
|
||||
if response.status() != 200 {
|
||||
let data: DropServerError = response.json()?;
|
||||
error!("could not start handshake: {}", data.status_message);
|
||||
|
||||
return Err(RemoteAccessError::HandshakeFailed(data.status_message));
|
||||
}
|
||||
|
||||
let response = response.text()?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub async fn setup() -> (AppStatus, Option<User>) {
|
||||
let auth = {
|
||||
let data = borrow_db_checked();
|
||||
data.auth.clone()
|
||||
};
|
||||
|
||||
if auth.is_some() {
|
||||
let user_result = match fetch_user().await {
|
||||
Ok(data) => data,
|
||||
Err(RemoteAccessError::FetchError(_)) => {
|
||||
let user = get_cached_object::<User>("user").ok();
|
||||
return (AppStatus::Offline, user);
|
||||
}
|
||||
Err(_) => return (AppStatus::SignedInNeedsReauth, None),
|
||||
};
|
||||
if let Err(e) = cache_object("user", &user_result) {
|
||||
warn!("Could not cache user object with error {e}");
|
||||
}
|
||||
return (AppStatus::SignedIn, Some(user_result));
|
||||
}
|
||||
|
||||
(AppStatus::SignedOut, None)
|
||||
}
|
||||
140
src-tauri/remote/src/cache.rs
Normal file
140
src-tauri/remote/src/cache.rs
Normal file
@ -0,0 +1,140 @@
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{self, Write},
|
||||
path::{Path, PathBuf},
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
use bitcode::{Decode, DecodeOwned, Encode};
|
||||
use database::{Database, borrow_db_checked};
|
||||
use http::{Response, header::CONTENT_TYPE, response::Builder as ResponseBuilder};
|
||||
|
||||
use crate::error::{CacheError, RemoteAccessError};
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! offline {
|
||||
($var:expr, $func1:expr, $func2:expr, $( $arg:expr ),* ) => {
|
||||
|
||||
async move {
|
||||
if ::database::borrow_db_checked().settings.force_offline
|
||||
|| $var.lock().status == ::client::app_status::AppStatus::Offline {
|
||||
$func2( $( $arg ), *).await
|
||||
} else {
|
||||
$func1( $( $arg ), *).await
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_sys_time_in_secs() -> u64 {
|
||||
match SystemTime::now().duration_since(SystemTime::UNIX_EPOCH) {
|
||||
Ok(n) => n.as_secs(),
|
||||
Err(_) => panic!("SystemTime before UNIX EPOCH!"),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_cache_path(base: &Path, key: &str) -> PathBuf {
|
||||
let key_hash = hex::encode(md5::compute(key.as_bytes()).0);
|
||||
base.join(key_hash)
|
||||
}
|
||||
|
||||
fn write_sync(base: &Path, key: &str, data: Vec<u8>) -> io::Result<()> {
|
||||
let cache_path = get_cache_path(base, key);
|
||||
let mut file = File::create(cache_path)?;
|
||||
file.write_all(&data)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_sync(base: &Path, key: &str) -> io::Result<Vec<u8>> {
|
||||
let cache_path = get_cache_path(base, key);
|
||||
let file = std::fs::read(cache_path)?;
|
||||
Ok(file)
|
||||
}
|
||||
|
||||
fn delete_sync(base: &Path, key: &str) -> io::Result<()> {
|
||||
let cache_path = get_cache_path(base, key);
|
||||
std::fs::remove_file(cache_path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn cache_object<D: Encode>(key: &str, data: &D) -> Result<(), RemoteAccessError> {
|
||||
cache_object_db(key, data, &borrow_db_checked())
|
||||
}
|
||||
pub fn cache_object_db<D: Encode>(
|
||||
key: &str,
|
||||
data: &D,
|
||||
database: &Database,
|
||||
) -> Result<(), RemoteAccessError> {
|
||||
let bytes = bitcode::encode(data);
|
||||
write_sync(&database.cache_dir, key, bytes).map_err(RemoteAccessError::Cache)
|
||||
}
|
||||
pub fn get_cached_object<D: Encode + DecodeOwned>(key: &str) -> Result<D, RemoteAccessError> {
|
||||
get_cached_object_db::<D>(key, &borrow_db_checked())
|
||||
}
|
||||
pub fn get_cached_object_db<D: DecodeOwned>(
|
||||
key: &str,
|
||||
db: &Database,
|
||||
) -> Result<D, RemoteAccessError> {
|
||||
let bytes = read_sync(&db.cache_dir, key).map_err(RemoteAccessError::Cache)?;
|
||||
let data =
|
||||
bitcode::decode::<D>(&bytes).map_err(|e| RemoteAccessError::Cache(io::Error::other(e)))?;
|
||||
Ok(data)
|
||||
}
|
||||
pub fn clear_cached_object(key: &str) -> Result<(), RemoteAccessError> {
|
||||
clear_cached_object_db(key, &borrow_db_checked())
|
||||
}
|
||||
pub fn clear_cached_object_db(key: &str, db: &Database) -> Result<(), RemoteAccessError> {
|
||||
delete_sync(&db.cache_dir, key).map_err(RemoteAccessError::Cache)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Encode, Decode)]
|
||||
pub struct ObjectCache {
|
||||
content_type: String,
|
||||
body: Vec<u8>,
|
||||
expiry: u64,
|
||||
}
|
||||
|
||||
impl ObjectCache {
|
||||
pub fn has_expired(&self) -> bool {
|
||||
let current = get_sys_time_in_secs();
|
||||
self.expiry < current
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<Response<Vec<u8>>> for ObjectCache {
|
||||
type Error = CacheError;
|
||||
|
||||
fn try_from(value: Response<Vec<u8>>) -> Result<Self, Self::Error> {
|
||||
Ok(ObjectCache {
|
||||
content_type: value
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.ok_or(CacheError::HeaderNotFound(CONTENT_TYPE))?
|
||||
.to_str()
|
||||
.map_err(CacheError::ParseError)?
|
||||
.to_owned(),
|
||||
body: value.body().clone(),
|
||||
expiry: get_sys_time_in_secs() + 60 * 60 * 24,
|
||||
})
|
||||
}
|
||||
}
|
||||
impl TryFrom<ObjectCache> for Response<Vec<u8>> {
|
||||
type Error = CacheError;
|
||||
fn try_from(value: ObjectCache) -> Result<Self, Self::Error> {
|
||||
let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type);
|
||||
resp_builder
|
||||
.body(value.body)
|
||||
.map_err(CacheError::ConstructionError)
|
||||
}
|
||||
}
|
||||
impl TryFrom<&ObjectCache> for Response<Vec<u8>> {
|
||||
type Error = CacheError;
|
||||
|
||||
fn try_from(value: &ObjectCache) -> Result<Self, Self::Error> {
|
||||
let resp_builder = ResponseBuilder::new().header(CONTENT_TYPE, value.content_type.clone());
|
||||
resp_builder
|
||||
.body(value.body.clone())
|
||||
.map_err(CacheError::ConstructionError)
|
||||
}
|
||||
}
|
||||
143
src-tauri/remote/src/error.rs
Normal file
143
src-tauri/remote/src/error.rs
Normal file
@ -0,0 +1,143 @@
|
||||
use std::{
|
||||
error::Error,
|
||||
fmt::{Display, Formatter},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use http::{HeaderName, StatusCode, header::ToStrError};
|
||||
use serde_with::SerializeDisplay;
|
||||
use url::ParseError;
|
||||
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DropServerError {
|
||||
pub status_code: usize,
|
||||
pub status_message: String,
|
||||
// pub message: String,
|
||||
// pub url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, SerializeDisplay)]
|
||||
pub enum RemoteAccessError {
|
||||
FetchError(Arc<reqwest::Error>),
|
||||
FetchErrorWS(Arc<reqwest_websocket::Error>),
|
||||
ParsingError(ParseError),
|
||||
InvalidEndpoint,
|
||||
HandshakeFailed(String),
|
||||
GameNotFound(String),
|
||||
InvalidResponse(DropServerError),
|
||||
UnparseableResponse(String),
|
||||
ManifestDownloadFailed(StatusCode, String),
|
||||
OutOfSync,
|
||||
Cache(std::io::Error),
|
||||
CorruptedState,
|
||||
}
|
||||
|
||||
impl Display for RemoteAccessError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
RemoteAccessError::FetchError(error) => {
|
||||
if error.is_connect() {
|
||||
return write!(
|
||||
f,
|
||||
"Failed to connect to Drop server. Check if you access Drop through a browser, and then try again."
|
||||
);
|
||||
}
|
||||
|
||||
write!(
|
||||
f,
|
||||
"{}: {}",
|
||||
error,
|
||||
error
|
||||
.source()
|
||||
.map(std::string::ToString::to_string)
|
||||
.unwrap_or("Unknown error".to_string())
|
||||
)
|
||||
}
|
||||
RemoteAccessError::FetchErrorWS(error) => write!(
|
||||
f,
|
||||
"{}: {}",
|
||||
error,
|
||||
error
|
||||
.source()
|
||||
.map(std::string::ToString::to_string)
|
||||
.unwrap_or("Unknown error".to_string())
|
||||
),
|
||||
RemoteAccessError::ParsingError(parse_error) => {
|
||||
write!(f, "{parse_error}")
|
||||
}
|
||||
RemoteAccessError::InvalidEndpoint => write!(f, "invalid drop endpoint"),
|
||||
RemoteAccessError::HandshakeFailed(message) => {
|
||||
write!(f, "failed to complete handshake: {message}")
|
||||
}
|
||||
RemoteAccessError::GameNotFound(id) => write!(f, "could not find game on server: {id}"),
|
||||
RemoteAccessError::InvalidResponse(error) => write!(
|
||||
f,
|
||||
"server returned an invalid response: {}, {}",
|
||||
error.status_code, error.status_message
|
||||
),
|
||||
RemoteAccessError::UnparseableResponse(error) => {
|
||||
write!(f, "server returned an invalid response: {error}")
|
||||
}
|
||||
RemoteAccessError::ManifestDownloadFailed(status, response) => {
|
||||
write!(f, "failed to download game manifest: {status} {response}")
|
||||
}
|
||||
RemoteAccessError::OutOfSync => write!(
|
||||
f,
|
||||
"server's and client's time are out of sync. Please ensure they are within at least 30 seconds of each other"
|
||||
),
|
||||
RemoteAccessError::Cache(error) => write!(f, "Cache Error: {error}"),
|
||||
RemoteAccessError::CorruptedState => write!(
|
||||
f,
|
||||
"Drop encountered a corrupted internal state. Please report this to the developers, with details of reproduction."
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<reqwest::Error> for RemoteAccessError {
|
||||
fn from(err: reqwest::Error) -> Self {
|
||||
RemoteAccessError::FetchError(Arc::new(err))
|
||||
}
|
||||
}
|
||||
impl From<reqwest_websocket::Error> for RemoteAccessError {
|
||||
fn from(err: reqwest_websocket::Error) -> Self {
|
||||
RemoteAccessError::FetchErrorWS(Arc::new(err))
|
||||
}
|
||||
}
|
||||
impl From<ParseError> for RemoteAccessError {
|
||||
fn from(err: ParseError) -> Self {
|
||||
RemoteAccessError::ParsingError(err)
|
||||
}
|
||||
}
|
||||
impl std::error::Error for RemoteAccessError {}
|
||||
|
||||
#[derive(Debug, SerializeDisplay)]
|
||||
pub enum CacheError {
|
||||
HeaderNotFound(HeaderName),
|
||||
ParseError(ToStrError),
|
||||
Remote(RemoteAccessError),
|
||||
ConstructionError(http::Error),
|
||||
}
|
||||
|
||||
impl Display for CacheError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let s = match self {
|
||||
CacheError::HeaderNotFound(header_name) => {
|
||||
format!("Could not find header {header_name} in cache")
|
||||
}
|
||||
CacheError::ParseError(to_str_error) => {
|
||||
format!("Could not parse cache with error {to_str_error}")
|
||||
}
|
||||
CacheError::Remote(remote_access_error) => {
|
||||
format!("Cache got remote access error: {remote_access_error}")
|
||||
}
|
||||
CacheError::ConstructionError(error) => {
|
||||
format!("Could not construct cache body with error {error}")
|
||||
}
|
||||
};
|
||||
write!(f, "{s}")
|
||||
}
|
||||
}
|
||||
82
src-tauri/remote/src/fetch_object.rs
Normal file
82
src-tauri/remote/src/fetch_object.rs
Normal file
@ -0,0 +1,82 @@
|
||||
use database::{DB, interface::DatabaseImpls};
|
||||
use http::{Response, header::CONTENT_TYPE, response::Builder as ResponseBuilder};
|
||||
use log::{debug, warn};
|
||||
use tauri::UriSchemeResponder;
|
||||
|
||||
use crate::{error::CacheError, utils::DROP_CLIENT_ASYNC};
|
||||
|
||||
use super::{
|
||||
auth::generate_authorization_header,
|
||||
cache::{ObjectCache, cache_object, get_cached_object},
|
||||
};
|
||||
|
||||
pub async fn fetch_object_wrapper(request: http::Request<Vec<u8>>, responder: UriSchemeResponder) {
|
||||
match fetch_object(request).await {
|
||||
Ok(r) => responder.respond(r),
|
||||
Err(e) => {
|
||||
warn!("Cache error: {e}");
|
||||
responder.respond(
|
||||
Response::builder()
|
||||
.status(500)
|
||||
.body(Vec::new())
|
||||
.expect("Failed to build error response"),
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub async fn fetch_object(
|
||||
request: http::Request<Vec<u8>>,
|
||||
) -> Result<Response<Vec<u8>>, CacheError> {
|
||||
// Drop leading /
|
||||
let object_id = &request.uri().path()[1..];
|
||||
|
||||
let cache_result = get_cached_object::<ObjectCache>(object_id);
|
||||
if let Ok(cache_result) = &cache_result
|
||||
&& !cache_result.has_expired()
|
||||
{
|
||||
return cache_result.try_into();
|
||||
}
|
||||
|
||||
let header = generate_authorization_header();
|
||||
let client = DROP_CLIENT_ASYNC.clone();
|
||||
let url = format!("{}api/v1/client/object/{object_id}", DB.fetch_base_url());
|
||||
let response = client.get(url).header("Authorization", header).send().await;
|
||||
|
||||
match response {
|
||||
Ok(r) => {
|
||||
let resp_builder = ResponseBuilder::new().header(
|
||||
CONTENT_TYPE,
|
||||
r.headers()
|
||||
.get("Content-Type")
|
||||
.expect("Failed get Content-Type header"),
|
||||
);
|
||||
let data = match r.bytes().await {
|
||||
Ok(data) => Vec::from(data),
|
||||
Err(e) => {
|
||||
warn!("Could not get data from cache object {object_id} with error {e}",);
|
||||
Vec::new()
|
||||
}
|
||||
};
|
||||
let resp = resp_builder
|
||||
.body(data)
|
||||
.expect("Failed to build object cache response body");
|
||||
if cache_result.map_or(true, |x| x.has_expired()) {
|
||||
cache_object::<ObjectCache>(object_id, &resp.clone().try_into()?)
|
||||
.expect("Failed to create cached object");
|
||||
}
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("Object fetch failed with error {e}. Attempting to download from cache");
|
||||
match cache_result {
|
||||
Ok(cache_result) => cache_result.try_into(),
|
||||
Err(e) => {
|
||||
warn!("{e}");
|
||||
Err(CacheError::Remote(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
10
src-tauri/remote/src/lib.rs
Normal file
10
src-tauri/remote/src/lib.rs
Normal file
@ -0,0 +1,10 @@
|
||||
pub mod auth;
|
||||
#[macro_use]
|
||||
pub mod cache;
|
||||
pub mod error;
|
||||
pub mod fetch_object;
|
||||
pub mod requests;
|
||||
pub mod server_proto;
|
||||
pub mod utils;
|
||||
|
||||
pub use auth::setup;
|
||||
31
src-tauri/remote/src/requests.rs
Normal file
31
src-tauri/remote/src/requests.rs
Normal file
@ -0,0 +1,31 @@
|
||||
use database::{DB, interface::DatabaseImpls};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
auth::generate_authorization_header, error::RemoteAccessError, utils::DROP_CLIENT_ASYNC,
|
||||
};
|
||||
|
||||
pub fn generate_url<T: AsRef<str>>(
|
||||
path_components: &[T],
|
||||
query: &[(T, T)],
|
||||
) -> Result<Url, RemoteAccessError> {
|
||||
let mut base_url = DB.fetch_base_url();
|
||||
for endpoint in path_components {
|
||||
base_url = base_url.join(endpoint.as_ref())?;
|
||||
}
|
||||
{
|
||||
let mut queries = base_url.query_pairs_mut();
|
||||
for (param, val) in query {
|
||||
queries.append_pair(param.as_ref(), val.as_ref());
|
||||
}
|
||||
}
|
||||
Ok(base_url)
|
||||
}
|
||||
|
||||
pub async fn make_authenticated_get(url: Url) -> Result<reqwest::Response, reqwest::Error> {
|
||||
DROP_CLIENT_ASYNC
|
||||
.get(url)
|
||||
.header("Authorization", generate_authorization_header())
|
||||
.send()
|
||||
.await
|
||||
}
|
||||
108
src-tauri/remote/src/server_proto.rs
Normal file
108
src-tauri/remote/src/server_proto.rs
Normal file
@ -0,0 +1,108 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use database::borrow_db_checked;
|
||||
use http::{Request, Response, StatusCode, Uri, uri::PathAndQuery};
|
||||
use log::{error, warn};
|
||||
use tauri::UriSchemeResponder;
|
||||
use utils::webbrowser_open::webbrowser_open;
|
||||
|
||||
use crate::utils::DROP_CLIENT_SYNC;
|
||||
|
||||
pub async fn handle_server_proto_offline_wrapper(
|
||||
request: Request<Vec<u8>>,
|
||||
responder: UriSchemeResponder,
|
||||
) {
|
||||
responder.respond(match handle_server_proto_offline(request).await {
|
||||
Ok(res) => res,
|
||||
Err(_) => unreachable!(),
|
||||
});
|
||||
}
|
||||
|
||||
pub async fn handle_server_proto_offline(
|
||||
_request: Request<Vec<u8>>,
|
||||
) -> Result<Response<Vec<u8>>, StatusCode> {
|
||||
Ok(Response::builder()
|
||||
.status(StatusCode::NOT_FOUND)
|
||||
.body(Vec::new())
|
||||
.expect("Failed to build error response for proto offline"))
|
||||
}
|
||||
|
||||
pub async fn handle_server_proto_wrapper(request: Request<Vec<u8>>, responder: UriSchemeResponder) {
|
||||
match handle_server_proto(request).await {
|
||||
Ok(r) => responder.respond(r),
|
||||
Err(e) => {
|
||||
warn!("Cache error: {e}");
|
||||
responder.respond(
|
||||
Response::builder()
|
||||
.status(e)
|
||||
.body(Vec::new())
|
||||
.expect("Failed to build error response"),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_server_proto(request: Request<Vec<u8>>) -> Result<Response<Vec<u8>>, StatusCode> {
|
||||
let db_handle = borrow_db_checked();
|
||||
let auth = match db_handle.auth.as_ref() {
|
||||
Some(auth) => auth,
|
||||
None => {
|
||||
error!("Could not find auth in database");
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
};
|
||||
let web_token = match &auth.web_token {
|
||||
Some(token) => token,
|
||||
None => return Err(StatusCode::UNAUTHORIZED),
|
||||
};
|
||||
let remote_uri = db_handle
|
||||
.base_url
|
||||
.parse::<Uri>()
|
||||
.expect("Failed to parse base url");
|
||||
|
||||
let path = request.uri().path();
|
||||
|
||||
let mut new_uri = request.uri().clone().into_parts();
|
||||
new_uri.path_and_query = Some(
|
||||
PathAndQuery::from_str(&format!("{path}?noWrapper=true"))
|
||||
.expect("Failed to parse request path in proto"),
|
||||
);
|
||||
new_uri.authority = remote_uri.authority().cloned();
|
||||
new_uri.scheme = remote_uri.scheme().cloned();
|
||||
let err_msg = &format!("Failed to build new uri from parts {new_uri:?}");
|
||||
let new_uri = Uri::from_parts(new_uri).expect(err_msg);
|
||||
|
||||
let whitelist_prefix = ["/store", "/api", "/_", "/fonts"];
|
||||
|
||||
if whitelist_prefix.iter().all(|f| !path.starts_with(f)) {
|
||||
webbrowser_open(new_uri.to_string());
|
||||
return Ok(Response::new(Vec::new()));
|
||||
}
|
||||
|
||||
let client = DROP_CLIENT_SYNC.clone();
|
||||
let response = match client
|
||||
.request(request.method().clone(), new_uri.to_string())
|
||||
.header("Authorization", format!("Bearer {web_token}"))
|
||||
.headers(request.headers().clone())
|
||||
.send()
|
||||
{
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
warn!("Could not send response. Got {e} when sending");
|
||||
return Err(e.status().unwrap_or(StatusCode::BAD_REQUEST));
|
||||
}
|
||||
};
|
||||
|
||||
let response_status = response.status();
|
||||
let response_body = match response.bytes() {
|
||||
Ok(bytes) => bytes,
|
||||
Err(e) => return Err(e.status().unwrap_or(StatusCode::INTERNAL_SERVER_ERROR)),
|
||||
};
|
||||
|
||||
let http_response = Response::builder()
|
||||
.status(response_status)
|
||||
.body(response_body.to_vec())
|
||||
.expect("Failed to build server proto response");
|
||||
|
||||
Ok(http_response)
|
||||
}
|
||||
119
src-tauri/remote/src/utils.rs
Normal file
119
src-tauri/remote/src/utils.rs
Normal file
@ -0,0 +1,119 @@
|
||||
use std::{
|
||||
fs::{self, File},
|
||||
io::Read,
|
||||
sync::LazyLock,
|
||||
};
|
||||
|
||||
use database::db::DATA_ROOT_DIR;
|
||||
use log::{debug, info, warn};
|
||||
use reqwest::Certificate;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DropHealthcheck {
|
||||
app_name: String,
|
||||
}
|
||||
impl DropHealthcheck {
|
||||
pub fn app_name(&self) -> &String {
|
||||
&self.app_name
|
||||
}
|
||||
}
|
||||
static DROP_CERT_BUNDLE: LazyLock<Vec<Certificate>> = LazyLock::new(fetch_certificates);
|
||||
pub static DROP_CLIENT_SYNC: LazyLock<reqwest::blocking::Client> = LazyLock::new(get_client_sync);
|
||||
pub static DROP_CLIENT_ASYNC: LazyLock<reqwest::Client> = LazyLock::new(get_client_async);
|
||||
pub static DROP_CLIENT_WS_CLIENT: LazyLock<reqwest::Client> = LazyLock::new(get_client_ws);
|
||||
|
||||
fn fetch_certificates() -> Vec<Certificate> {
|
||||
let certificate_dir = DATA_ROOT_DIR.join("certificates");
|
||||
|
||||
let mut certs = Vec::new();
|
||||
match fs::read_dir(certificate_dir) {
|
||||
Ok(c) => {
|
||||
for entry in c {
|
||||
match entry {
|
||||
Ok(c) => {
|
||||
let mut buf = Vec::new();
|
||||
match File::open(c.path()) {
|
||||
Ok(f) => f,
|
||||
Err(e) => {
|
||||
warn!(
|
||||
"Failed to open file at {} with error {}",
|
||||
c.path().display(),
|
||||
e
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
.read_to_end(&mut buf)
|
||||
.unwrap_or_else(|e| {
|
||||
panic!(
|
||||
"Failed to read to end of certificate file {} with error {}",
|
||||
c.path().display(),
|
||||
e
|
||||
)
|
||||
});
|
||||
|
||||
match Certificate::from_pem_bundle(&buf) {
|
||||
Ok(certificates) => {
|
||||
for cert in certificates {
|
||||
certs.push(cert);
|
||||
}
|
||||
info!(
|
||||
"added {} certificate(s) from {}",
|
||||
certs.len(),
|
||||
c.file_name().display()
|
||||
);
|
||||
}
|
||||
Err(e) => warn!(
|
||||
"Invalid certificate file {} with error {}",
|
||||
c.path().display(),
|
||||
e
|
||||
),
|
||||
}
|
||||
}
|
||||
Err(_) => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("not loading certificates due to error: {e}");
|
||||
}
|
||||
};
|
||||
certs
|
||||
}
|
||||
|
||||
pub fn get_client_sync() -> reqwest::blocking::Client {
|
||||
let mut client = reqwest::blocking::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client
|
||||
.use_rustls_tls()
|
||||
.build()
|
||||
.expect("Failed to build synchronous client")
|
||||
}
|
||||
pub fn get_client_async() -> reqwest::Client {
|
||||
let mut client = reqwest::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client
|
||||
.use_rustls_tls()
|
||||
.build()
|
||||
.expect("Failed to build asynchronous client")
|
||||
}
|
||||
pub fn get_client_ws() -> reqwest::Client {
|
||||
let mut client = reqwest::ClientBuilder::new();
|
||||
|
||||
for cert in DROP_CERT_BUNDLE.iter() {
|
||||
client = client.add_root_certificate(cert.clone());
|
||||
}
|
||||
client
|
||||
.use_rustls_tls()
|
||||
.http1_only()
|
||||
.build()
|
||||
.expect("Failed to build websocket client")
|
||||
}
|
||||
Reference in New Issue
Block a user