4 Commits

Author SHA1 Message Date
0431eebaa7 fix: remove lua tests 2025-08-25 13:02:00 +10:00
e66a6581cb fix: temporary remove luajit for compliation reasons 2025-08-25 12:43:23 +10:00
817c3cf503 feat: script backend, fixes 2025-08-25 12:35:12 +10:00
0d01809fd0 feat: no panik 2025-08-25 12:20:51 +10:00
12 changed files with 285 additions and 264 deletions

61
Cargo.lock generated
View File

@ -37,6 +37,12 @@ version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "anyhow"
version = "1.0.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100"
[[package]] [[package]]
name = "arrayvec" name = "arrayvec"
version = "0.7.6" version = "0.7.6"
@ -282,16 +288,6 @@ dependencies = [
"static_assertions", "static_assertions",
] ]
[[package]]
name = "bstr"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4"
dependencies = [
"memchr",
"serde",
]
[[package]] [[package]]
name = "bumpalo" name = "bumpalo"
version = "3.19.0" version = "3.19.0"
@ -477,12 +473,12 @@ dependencies = [
name = "droplet" name = "droplet"
version = "0.7.0" version = "0.7.0"
dependencies = [ dependencies = [
"anyhow",
"boa_engine", "boa_engine",
"dyn-clone", "dyn-clone",
"flate2", "flate2",
"hex", "hex",
"md5", "md5",
"mlua",
"napi", "napi",
"napi-build", "napi-build",
"napi-derive", "napi-derive",
@ -893,38 +889,13 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "mlua"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab2fea92b2adabd51808311b101551d6e3f8602b65e9fae51f7ad5b3d500f4cd"
dependencies = [
"bstr",
"either",
"mlua-sys",
"num-traits",
"parking_lot",
"rustc-hash",
"rustversion",
]
[[package]]
name = "mlua-sys"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d4dc9cfc5a7698899802e97480617d9726f7da78c910db989d4d0fd4991d900"
dependencies = [
"cc",
"cfg-if",
"pkg-config",
]
[[package]] [[package]]
name = "napi" name = "napi"
version = "3.0.0-beta.11" version = "3.0.0-beta.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca1763658b41abbdf10caaa63b74e58f4ec62d52b889a558e0af6f3638cc9426" checksum = "ca1763658b41abbdf10caaa63b74e58f4ec62d52b889a558e0af6f3638cc9426"
dependencies = [ dependencies = [
"anyhow",
"bitflags", "bitflags",
"ctor", "ctor",
"futures-core", "futures-core",
@ -1096,16 +1067,6 @@ dependencies = [
"portable-atomic", "portable-atomic",
] ]
[[package]]
name = "parking_lot"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13"
dependencies = [
"lock_api",
"parking_lot_core",
]
[[package]] [[package]]
name = "parking_lot_core" name = "parking_lot_core"
version = "0.9.11" version = "0.9.11"
@ -1183,12 +1144,6 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
[[package]]
name = "pkg-config"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
[[package]] [[package]]
name = "pollster" name = "pollster"
version = "0.4.0" version = "0.4.0"

View File

@ -9,11 +9,7 @@ crate-type = ["cdylib"]
[dependencies] [dependencies]
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix # Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
napi = { version = "3.0.0-beta.11", default-features = false, features = [ napi = { version = "3.0.0-beta.11", default-features = false, features = ["napi6", "async", "web_stream", "error_anyhow"] }
"napi6",
"async",
"web_stream",
] }
napi-derive = "3.0.0-beta.11" napi-derive = "3.0.0-beta.11"
hex = "0.4.3" hex = "0.4.3"
md5 = "0.7.0" md5 = "0.7.0"
@ -27,9 +23,10 @@ rawzip = "0.3.0"
dyn-clone = "1.0.20" dyn-clone = "1.0.20"
flate2 = "1.1.2" flate2 = "1.1.2"
rhai = "1.22.2" rhai = "1.22.2"
mlua = { version = "0.11.2", features = ["luajit"] } # mlua = { version = "0.11.2", features = ["luajit"] }
boa_engine = "0.20.0" boa_engine = "0.20.0"
serde_json = "1.0.143" serde_json = "1.0.143"
anyhow = "1.0.99"
[package.metadata.patch] [package.metadata.patch]
crates = ["rawzip"] crates = ["rawzip"]

View File

@ -93,3 +93,47 @@ test.skip("performance test", async (t) => {
fs.rmSync(dirName, { recursive: true }); fs.rmSync(dirName, { recursive: true });
}); });
test("special characters", async (t) => {
// Setup test dir
const dirName = "./.test/sc";
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
fs.mkdirSync(dirName, { recursive: true });
// Config
const fileNames = ["Technická podpora.rtf", "Servicio técnico.rtf"];
for (let i = 0; i < fileNames.length; i++) {
const fileName = path.join(dirName, fileNames[i]);
fs.writeFileSync(fileName, i.toString());
}
const dropletHandler = new DropletHandler();
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
dropletHandler,
dirName,
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
)
);
// Check the first few checksums
const checksums = [
"cfcd208495d565ef66e7dff9f98764da",
"c4ca4238a0b923820dcc509a6f75849b",
];
for (let index in checksums) {
const entry = manifest[fileNames[index]];
if (!entry) return t.fail(`manifest missing file ${index}`);
const checksum = entry.checksums[0];
t.is(checksum, checksums[index], `checksums do not match for ${index}`);
}
fs.rmSync(dirName, { recursive: true });
});

View File

@ -1,7 +1,7 @@
import test from "ava"; import test from "ava";
import { ScriptEngine } from "../index.js"; import { ScriptEngine } from "../index.js";
test("lua syntax fail", (t) => { test.skip("lua syntax fail", (t) => {
const scriptEngine = new ScriptEngine(); const scriptEngine = new ScriptEngine();
const luaIshCode = ` const luaIshCode = `
@ -46,7 +46,7 @@ test("js", (t) => {
t.pass(); t.pass();
}); });
test("lua", (t) => { test.skip("lua", (t) => {
const scriptEngine = new ScriptEngine(); const scriptEngine = new ScriptEngine();
const luaModule = ` const luaModule = `

View File

@ -1,6 +1,6 @@
{ {
"name": "@drop-oss/droplet", "name": "@drop-oss/droplet",
"version": "2.3.1", "version": "3.0.1",
"main": "index.js", "main": "index.js",
"types": "index.d.ts", "types": "index.d.ts",
"napi": { "napi": {
@ -46,7 +46,7 @@
"build": "napi build --platform --release", "build": "napi build --platform --release",
"build:debug": "napi build --platform", "build:debug": "napi build --platform",
"prepublishOnly": "napi prepublish -t npm", "prepublishOnly": "napi prepublish -t npm",
"test": "ava", "test": "ava ",
"universal": "napi universalize", "universal": "napi universalize",
"version": "napi version" "version": "napi version"
}, },

View File

@ -1,10 +1,13 @@
#![deny(clippy::all)] #![deny(clippy::unwrap_used)]
#![deny(clippy::expect_used)]
#![deny(clippy::panic)]
#![feature(trait_alias)] #![feature(trait_alias)]
pub mod manifest; pub mod manifest;
pub mod script;
pub mod ssl; pub mod ssl;
pub mod version; pub mod version;
pub mod script;
#[macro_use] #[macro_use]
extern crate napi_derive; extern crate napi_derive;

View File

@ -35,7 +35,7 @@ pub fn generate_manifest<'a>(
progress_sfn: ThreadsafeFunction<i32>, progress_sfn: ThreadsafeFunction<i32>,
log_sfn: ThreadsafeFunction<String>, log_sfn: ThreadsafeFunction<String>,
callback_sfn: ThreadsafeFunction<String>, callback_sfn: ThreadsafeFunction<String>,
) -> Result<()> { ) -> anyhow::Result<()> {
let backend: &mut Box<dyn VersionBackend + Send> = droplet_handler let backend: &mut Box<dyn VersionBackend + Send> = droplet_handler
.create_backend_for_path(dir) .create_backend_for_path(dir)
.ok_or(napi::Error::from_reason( .ok_or(napi::Error::from_reason(
@ -49,83 +49,94 @@ pub fn generate_manifest<'a>(
unsafe { std::mem::transmute(backend) }; unsafe { std::mem::transmute(backend) };
thread::spawn(move || { thread::spawn(move || {
let files = backend.list_files(); let callback_borrow = &callback_sfn;
// Filepath to chunk data let mut inner = move || -> Result<()> {
let mut chunks: HashMap<String, ChunkData> = HashMap::new(); let files = backend.list_files()?;
let total: i32 = files.len() as i32; // Filepath to chunk data
let mut i: i32 = 0; let mut chunks: HashMap<String, ChunkData> = HashMap::new();
let mut buf = [0u8; 1024 * 16]; let total: i32 = files.len() as i32;
let mut i: i32 = 0;
for version_file in files { let mut buf = [0u8; 1024 * 16];
let mut reader = backend.reader(&version_file, 0, 0).unwrap();
let mut chunk_data = ChunkData { for version_file in files {
permissions: version_file.permission, let mut reader = backend.reader(&version_file, 0, 0)?;
ids: Vec::new(),
checksums: Vec::new(),
lengths: Vec::new(),
};
let mut chunk_index = 0; let mut chunk_data = ChunkData {
loop { permissions: version_file.permission,
let mut length = 0; ids: Vec::new(),
let mut buffer: Vec<u8> = Vec::new(); checksums: Vec::new(),
let mut file_empty = false; lengths: Vec::new(),
};
let mut chunk_index = 0;
loop { loop {
let read = reader.read(&mut buf).unwrap(); let mut length = 0;
let mut buffer: Vec<u8> = Vec::new();
let mut file_empty = false;
length += read; loop {
let read = reader.read(&mut buf)?;
// If we're out of data, add this chunk and then move onto the next file length += read;
if read == 0 {
file_empty = true; // If we're out of data, add this chunk and then move onto the next file
break; if read == 0 {
file_empty = true;
break;
}
buffer.extend_from_slice(&buf[0..read]);
if length >= CHUNK_SIZE {
break;
}
} }
buffer.extend_from_slice(&buf[0..read]); let chunk_id = Uuid::new_v4();
let checksum = md5::compute(buffer).0;
let checksum_string = hex::encode(checksum);
if length >= CHUNK_SIZE { chunk_data.ids.push(chunk_id.to_string());
chunk_data.checksums.push(checksum_string);
chunk_data.lengths.push(length);
let log_str = format!(
"Processed chunk {} for {}",
chunk_index, &version_file.relative_filename
);
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
chunk_index += 1;
if file_empty {
break; break;
} }
} }
let chunk_id = Uuid::new_v4(); chunks.insert(version_file.relative_filename, chunk_data);
let checksum = md5::compute(buffer).0;
let checksum_string = hex::encode(checksum);
chunk_data.ids.push(chunk_id.to_string()); i += 1;
chunk_data.checksums.push(checksum_string); let progress = i * 100 / total;
chunk_data.lengths.push(length); progress_sfn.call(Ok(progress), ThreadsafeFunctionCallMode::Blocking);
let log_str = format!(
"Processed chunk {} for {}",
chunk_index, &version_file.relative_filename
);
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
chunk_index += 1;
if file_empty {
break;
}
} }
chunks.insert(version_file.relative_filename, chunk_data); callback_borrow.call(
Ok(json!(chunks).to_string()),
ThreadsafeFunctionCallMode::Blocking,
);
i += 1; Ok(())
let progress = i * 100 / total; };
progress_sfn.call(Ok(progress), ThreadsafeFunctionCallMode::Blocking);
let result = inner();
if let Err(generate_err) = result {
callback_borrow.call(Err(generate_err), ThreadsafeFunctionCallMode::Blocking);
} }
callback_sfn.call(
Ok(json!(chunks).to_string()),
ThreadsafeFunctionCallMode::Blocking,
);
}); });
Ok(()) Ok(())

View File

@ -1,5 +1,5 @@
use boa_engine::{Context, JsValue, Source}; use boa_engine::{Context, JsValue, Source};
use mlua::{FromLuaMulti, Function, Lua}; // use mlua::{FromLuaMulti, Function, Lua};
use napi::Result; use napi::Result;
use rhai::AST; use rhai::AST;
@ -14,14 +14,14 @@ pub struct Script(ScriptInner);
pub enum ScriptInner { pub enum ScriptInner {
Rhai { script: AST }, Rhai { script: AST },
Lua { script: Function }, // Lua { script: Function },
Javascript { script: boa_engine::Script }, Javascript { script: boa_engine::Script },
} }
#[napi] #[napi]
pub struct ScriptEngine { pub struct ScriptEngine {
rhai_engine: rhai::Engine, rhai_engine: rhai::Engine,
lua_engine: Lua, // lua_engine: Lua,
js_engine: Context, js_engine: Context,
} }
@ -31,13 +31,13 @@ impl ScriptEngine {
pub fn new() -> Self { pub fn new() -> Self {
ScriptEngine { ScriptEngine {
rhai_engine: rhai::Engine::new(), rhai_engine: rhai::Engine::new(),
lua_engine: Lua::new(), // lua_engine: Lua::new(),
js_engine: Context::default(), js_engine: Context::default(),
} }
} }
#[napi] #[napi]
pub fn build_rahi_script(&self, content: String) -> Result<Script> { pub fn build_rhai_script(&self, content: String) -> Result<Script> {
let script = self let script = self
.rhai_engine .rhai_engine
.compile(content.clone()) .compile(content.clone())
@ -45,6 +45,7 @@ impl ScriptEngine {
Ok(Script(ScriptInner::Rhai { script })) Ok(Script(ScriptInner::Rhai { script }))
} }
/*
#[napi] #[napi]
pub fn build_lua_script(&self, content: String) -> Result<Script> { pub fn build_lua_script(&self, content: String) -> Result<Script> {
let func = self let func = self
@ -54,6 +55,7 @@ impl ScriptEngine {
.map_err(|e| napi::Error::from_reason(e.to_string()))?; .map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(Script(ScriptInner::Lua { script: func })) Ok(Script(ScriptInner::Lua { script: func }))
} }
*/
#[napi] #[napi]
pub fn build_js_script(&mut self, content: String) -> Result<Script> { pub fn build_js_script(&mut self, content: String) -> Result<Script> {
@ -76,6 +78,7 @@ impl ScriptEngine {
Ok(v) Ok(v)
} }
/*
fn execute_lua_script<T>(&self, function: &Function) -> Result<T> fn execute_lua_script<T>(&self, function: &Function) -> Result<T>
where where
T: FromLuaMulti, T: FromLuaMulti,
@ -85,6 +88,7 @@ impl ScriptEngine {
.map_err(|e| napi::Error::from_reason(e.to_string()))?; .map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(v) Ok(v)
} }
*/
fn execute_js_script(&mut self, func: &boa_engine::Script) -> Result<JsValue> { fn execute_js_script(&mut self, func: &boa_engine::Script) -> Result<JsValue> {
let v = func let v = func
@ -100,9 +104,9 @@ impl ScriptEngine {
ScriptInner::Rhai { script } => { ScriptInner::Rhai { script } => {
self.execute_rhai_script::<()>(script)?; self.execute_rhai_script::<()>(script)?;
} }
ScriptInner::Lua { script } => { /*ScriptInner::Lua { script } => {
self.execute_lua_script::<()>(script)?; self.execute_lua_script::<()>(script)?;
} }*/
ScriptInner::Javascript { script } => { ScriptInner::Javascript { script } => {
self.execute_js_script(script)?; self.execute_js_script(script)?;
} }
@ -114,14 +118,15 @@ impl ScriptEngine {
pub fn fetch_strings(&mut self, script: &mut Script) -> Result<Vec<String>> { pub fn fetch_strings(&mut self, script: &mut Script) -> Result<Vec<String>> {
Ok(match &script.0 { Ok(match &script.0 {
ScriptInner::Rhai { script } => self.execute_rhai_script(script)?, ScriptInner::Rhai { script } => self.execute_rhai_script(script)?,
ScriptInner::Lua { script } => self.execute_lua_script(script)?, //ScriptInner::Lua { script } => self.execute_lua_script(script)?,
ScriptInner::Javascript { script } => { ScriptInner::Javascript { script } => {
let v = self.execute_js_script(script)?; let v = self.execute_js_script(script)?;
serde_json::from_value( serde_json::from_value(
v.to_json(&mut self.js_engine) v.to_json(&mut self.js_engine)
.map_err(|e| napi::Error::from_reason(e.to_string()))?, .map_err(|e| napi::Error::from_reason(e.to_string()))?,
).map_err(|e| napi::Error::from_reason(e.to_string()))? )
.map_err(|e| napi::Error::from_reason(e.to_string()))?
} }
}) })
} }

View File

@ -1,4 +1,4 @@
use napi::Error; use anyhow::anyhow;
use rcgen::{ use rcgen::{
CertificateParams, DistinguishedName, IsCa, KeyPair, KeyUsagePurpose, PublicKeyData, CertificateParams, DistinguishedName, IsCa, KeyPair, KeyUsagePurpose, PublicKeyData,
SubjectPublicKeyInfo, SubjectPublicKeyInfo,
@ -10,7 +10,7 @@ use x509_parser::parse_x509_certificate;
use x509_parser::pem::Pem; use x509_parser::pem::Pem;
#[napi] #[napi]
pub fn generate_root_ca() -> Result<Vec<String>, Error> { pub fn generate_root_ca() -> anyhow::Result<Vec<String>> {
let mut params = CertificateParams::default(); let mut params = CertificateParams::default();
let mut name = DistinguishedName::new(); let mut name = DistinguishedName::new();
@ -22,7 +22,7 @@ pub fn generate_root_ca() -> Result<Vec<String>, Error> {
params.not_before = OffsetDateTime::now_utc(); params.not_before = OffsetDateTime::now_utc();
params.not_after = OffsetDateTime::now_utc() params.not_after = OffsetDateTime::now_utc()
.checked_add(Duration::days(365 * 1000)) .checked_add(Duration::days(365 * 1000))
.unwrap(); .ok_or(anyhow!("failed to calculate end date"))?;
params.is_ca = IsCa::Ca(rcgen::BasicConstraints::Unconstrained); params.is_ca = IsCa::Ca(rcgen::BasicConstraints::Unconstrained);
@ -32,9 +32,8 @@ pub fn generate_root_ca() -> Result<Vec<String>, Error> {
KeyUsagePurpose::DigitalSignature, KeyUsagePurpose::DigitalSignature,
]; ];
let key_pair = KeyPair::generate().map_err(|e| napi::Error::from_reason(e.to_string()))?; let key_pair = KeyPair::generate()?;
let certificate = CertificateParams::self_signed(params, &key_pair) let certificate = CertificateParams::self_signed(params, &key_pair)?;
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
// Returns certificate, then private key // Returns certificate, then private key
Ok(vec![certificate.pem(), key_pair.serialize_pem()]) Ok(vec![certificate.pem(), key_pair.serialize_pem()])
@ -46,13 +45,10 @@ pub fn generate_client_certificate(
_client_name: String, _client_name: String,
root_ca: String, root_ca: String,
root_ca_private: String, root_ca_private: String,
) -> Result<Vec<String>, Error> { ) -> anyhow::Result<Vec<String>> {
let root_key_pair = let root_key_pair = KeyPair::from_pem(&root_ca_private)?;
KeyPair::from_pem(&root_ca_private).map_err(|e| napi::Error::from_reason(e.to_string()))?; let certificate_params = CertificateParams::from_ca_cert_pem(&root_ca)?;
let certificate_params = CertificateParams::from_ca_cert_pem(&root_ca) let root_ca = CertificateParams::self_signed(certificate_params, &root_key_pair)?;
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let root_ca = CertificateParams::self_signed(certificate_params, &root_key_pair)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let mut params = CertificateParams::default(); let mut params = CertificateParams::default();
@ -66,28 +62,24 @@ pub fn generate_client_certificate(
KeyUsagePurpose::DataEncipherment, KeyUsagePurpose::DataEncipherment,
]; ];
let key_pair = KeyPair::generate_for(&rcgen::PKCS_ECDSA_P384_SHA384) let key_pair = KeyPair::generate_for(&rcgen::PKCS_ECDSA_P384_SHA384)?;
.map_err(|e| napi::Error::from_reason(e.to_string()))?; let certificate = CertificateParams::signed_by(params, &key_pair, &root_ca, &root_key_pair)?;
let certificate = CertificateParams::signed_by(params, &key_pair, &root_ca, &root_key_pair)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
// Returns certificate, then private key // Returns certificate, then private key
Ok(vec![certificate.pem(), key_pair.serialize_pem()]) Ok(vec![certificate.pem(), key_pair.serialize_pem()])
} }
#[napi] #[napi]
pub fn verify_client_certificate(client_cert: String, root_ca: String) -> Result<bool, Error> { pub fn verify_client_certificate(client_cert: String, root_ca: String) -> anyhow::Result<bool> {
let root_ca = Pem::iter_from_buffer(root_ca.as_bytes()) let root_ca = Pem::iter_from_buffer(root_ca.as_bytes())
.next() .next()
.unwrap() .ok_or(anyhow!("no certificates in root ca"))??;
.unwrap(); let root_ca = root_ca.parse_x509()?;
let root_ca = root_ca.parse_x509().unwrap();
let client_cert = Pem::iter_from_buffer(client_cert.as_bytes()) let client_cert = Pem::iter_from_buffer(client_cert.as_bytes())
.next() .next()
.unwrap() .ok_or(anyhow!("No client certs in chain."))??;
.unwrap(); let client_cert = client_cert.parse_x509()?;
let client_cert = client_cert.parse_x509().unwrap();
let valid = root_ca let valid = root_ca
.verify_signature(Some(client_cert.public_key())) .verify_signature(Some(client_cert.public_key()))
@ -97,31 +89,33 @@ pub fn verify_client_certificate(client_cert: String, root_ca: String) -> Result
} }
#[napi] #[napi]
pub fn sign_nonce(private_key: String, nonce: String) -> Result<String, Error> { pub fn sign_nonce(private_key: String, nonce: String) -> anyhow::Result<String> {
let rng = SystemRandom::new(); let rng = SystemRandom::new();
let key_pair = KeyPair::from_pem(&private_key).unwrap(); let key_pair = KeyPair::from_pem(&private_key)?;
let key_pair = EcdsaKeyPair::from_pkcs8( let key_pair = EcdsaKeyPair::from_pkcs8(
&ring::signature::ECDSA_P384_SHA384_FIXED_SIGNING, &ring::signature::ECDSA_P384_SHA384_FIXED_SIGNING,
&key_pair.serialize_der(), &key_pair.serialize_der(),
&rng, &rng,
) )
.unwrap(); .map_err(|e| napi::Error::from_reason(e.to_string()))?;
let signature = key_pair.sign(&rng, nonce.as_bytes()).unwrap(); let signature = key_pair
.sign(&rng, nonce.as_bytes())
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let hex_signature = hex::encode(signature); let hex_signature = hex::encode(signature);
Ok(hex_signature) Ok(hex_signature)
} }
#[napi] #[napi]
pub fn verify_nonce(public_cert: String, nonce: String, signature: String) -> Result<bool, Error> { pub fn verify_nonce(public_cert: String, nonce: String, signature: String) -> anyhow::Result<bool> {
let (_, pem) = x509_parser::pem::parse_x509_pem(public_cert.as_bytes()).unwrap(); let (_, pem) = x509_parser::pem::parse_x509_pem(public_cert.as_bytes())?;
let (_, spki) = parse_x509_certificate(&pem.contents).unwrap(); let (_, spki) = parse_x509_certificate(&pem.contents)?;
let public_key = SubjectPublicKeyInfo::from_der(spki.public_key().raw).unwrap(); let public_key = SubjectPublicKeyInfo::from_der(spki.public_key().raw)?;
let raw_signature = hex::decode(signature).unwrap(); let raw_signature = hex::decode(signature)?;
let valid = ring::signature::ECDSA_P384_SHA384_FIXED let valid = ring::signature::ECDSA_P384_SHA384_FIXED
.verify( .verify(

View File

@ -7,26 +7,29 @@ use std::{
sync::Arc, sync::Arc,
}; };
use anyhow::anyhow;
use flate2::read::DeflateDecoder; use flate2::read::DeflateDecoder;
use rawzip::{ use rawzip::{
CompressionMethod, FileReader, ZipArchive, ZipArchiveEntryWayfinder, ZipEntry, CompressionMethod, FileReader, ZipArchive, ZipArchiveEntryWayfinder, ZipEntry, ZipVerifier,
ZipVerifier, RECOMMENDED_BUFFER_SIZE, RECOMMENDED_BUFFER_SIZE,
}; };
use crate::version::types::{MinimumFileObject, VersionBackend, VersionFile}; use crate::version::types::{MinimumFileObject, VersionBackend, VersionFile};
pub fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) { pub fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) -> napi::Result<()> {
if metadata(path).unwrap().is_dir() { if metadata(path)?.is_dir() {
let paths = fs::read_dir(path).unwrap(); let paths = fs::read_dir(path)?;
for path_result in paths { for path_result in paths {
let full_path = path_result.unwrap().path(); let full_path = path_result?.path();
if metadata(&full_path).unwrap().is_dir() { if metadata(&full_path)?.is_dir() {
_list_files(vec, &full_path); _list_files(vec, &full_path)?;
} else { } else {
vec.push(full_path); vec.push(full_path);
} }
} }
} };
Ok(())
} }
#[derive(Clone)] #[derive(Clone)]
@ -34,23 +37,26 @@ pub struct PathVersionBackend {
pub base_dir: PathBuf, pub base_dir: PathBuf,
} }
impl VersionBackend for PathVersionBackend { impl VersionBackend for PathVersionBackend {
fn list_files(&mut self) -> Vec<VersionFile> { fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>> {
let mut vec = Vec::new(); let mut vec = Vec::new();
_list_files(&mut vec, &self.base_dir); _list_files(&mut vec, &self.base_dir)?;
let mut results = Vec::new(); let mut results = Vec::new();
for pathbuf in vec.iter() { for pathbuf in vec.iter() {
let relative = pathbuf.strip_prefix(self.base_dir.clone()).unwrap(); let relative = pathbuf.strip_prefix(self.base_dir.clone())?;
results.push( results.push(
self self.peek_file(
.peek_file(relative.to_str().unwrap().to_owned()) relative
.unwrap(), .to_str()
.ok_or(napi::Error::from_reason("Could not parse path"))?
.to_owned(),
)?,
); );
} }
results Ok(results)
} }
fn reader( fn reader(
@ -58,28 +64,28 @@ impl VersionBackend for PathVersionBackend {
file: &VersionFile, file: &VersionFile,
start: u64, start: u64,
end: u64, end: u64,
) -> Option<Box<dyn MinimumFileObject + 'static>> { ) -> anyhow::Result<Box<dyn MinimumFileObject + 'static>> {
let mut file = File::open(self.base_dir.join(file.relative_filename.clone())).ok()?; let mut file = File::open(self.base_dir.join(file.relative_filename.clone()))?;
if start != 0 { if start != 0 {
file.seek(SeekFrom::Start(start)).ok()?; file.seek(SeekFrom::Start(start))?;
} }
if end != 0 { if end != 0 {
return Some(Box::new(file.take(end - start))); return Ok(Box::new(file.take(end - start)));
} }
return Some(Box::new(file)); Ok(Box::new(file))
} }
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile> { fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile> {
let pathbuf = self.base_dir.join(sub_path.clone()); let pathbuf = self.base_dir.join(sub_path.clone());
if !pathbuf.exists() { if !pathbuf.exists() {
return None; return Err(anyhow!("Path doesn't exist."));
}; };
let file = File::open(pathbuf.clone()).unwrap(); let file = File::open(pathbuf.clone())?;
let metadata = file.try_clone().unwrap().metadata().unwrap(); let metadata = file.try_clone()?.metadata()?;
let permission_object = metadata.permissions(); let permission_object = metadata.permissions();
let permissions = { let permissions = {
let perm: u32; let perm: u32;
@ -94,7 +100,7 @@ impl VersionBackend for PathVersionBackend {
perm perm
}; };
Some(VersionFile { Ok(VersionFile {
relative_filename: sub_path, relative_filename: sub_path,
permission: permissions, permission: permissions,
size: metadata.len(), size: metadata.len(),
@ -107,11 +113,11 @@ pub struct ZipVersionBackend {
archive: Arc<ZipArchive<FileReader>>, archive: Arc<ZipArchive<FileReader>>,
} }
impl ZipVersionBackend { impl ZipVersionBackend {
pub fn new(archive: File) -> Self { pub fn new(archive: File) -> anyhow::Result<Self> {
let archive = ZipArchive::from_file(archive, &mut [0u8; RECOMMENDED_BUFFER_SIZE]).unwrap(); let archive = ZipArchive::from_file(archive, &mut [0u8; RECOMMENDED_BUFFER_SIZE])?;
Self { Ok(Self {
archive: Arc::new(archive), archive: Arc::new(archive),
} })
} }
pub fn new_entry<'archive>( pub fn new_entry<'archive>(
@ -120,27 +126,26 @@ impl ZipVersionBackend {
compression_method: CompressionMethod, compression_method: CompressionMethod,
start: u64, start: u64,
end: u64, end: u64,
) -> ZipFileWrapper<'archive> { ) -> anyhow::Result<ZipFileWrapper<'archive>> {
let deflater: Box<dyn Read + Send + 'archive> = match compression_method { let deflater: Box<dyn Read + Send + 'archive> = match compression_method {
CompressionMethod::Store => Box::new(entry.reader()), CompressionMethod::Store => Box::new(entry.reader()),
CompressionMethod::Deflate => Box::new(DeflateDecoder::new(entry.reader())), CompressionMethod::Deflate => Box::new(DeflateDecoder::new(entry.reader())),
CompressionMethod::Deflate64 => Box::new(DeflateDecoder::new(entry.reader())), CompressionMethod::Deflate64 => Box::new(DeflateDecoder::new(entry.reader())),
_ => panic!( _ => Err(anyhow!(
"unsupported decompression algorithm: {:?}", "unsupported decompression algorithm: {compression_method:?}"
compression_method ))?,
),
}; };
let mut verifier = entry.verifying_reader(deflater); let mut verifier = entry.verifying_reader(deflater);
if start != 0 { if start != 0 {
io::copy(&mut (&mut verifier).take(start), &mut Sink::default()).unwrap(); io::copy(&mut (&mut verifier).take(start), &mut Sink::default())?;
} }
ZipFileWrapper { Ok(ZipFileWrapper {
reader: verifier, reader: verifier,
limit: (end - start) as usize, limit: (end - start) as usize,
current: 0, current: 0,
} })
} }
} }
@ -159,10 +164,8 @@ impl<'a> Read for ZipFileWrapper<'a> {
let has_limit = self.limit != 0; let has_limit = self.limit != 0;
// End this stream if the read is the right size // End this stream if the read is the right size
if has_limit { if has_limit && self.current >= self.limit {
if self.current >= self.limit { return Ok(0);
return Ok(0);
}
} }
let read = self.reader.read(buf)?; let read = self.reader.read(buf)?;
@ -173,7 +176,7 @@ impl<'a> Read for ZipFileWrapper<'a> {
return Ok(read - over); return Ok(read - over);
} }
} }
return Ok(read); Ok(read)
} }
} }
//impl<'a> MinimumFileObject for ZipFileWrapper<'a> {} //impl<'a> MinimumFileObject for ZipFileWrapper<'a> {}
@ -182,40 +185,40 @@ impl ZipVersionBackend {
fn find_wayfinder( fn find_wayfinder(
&mut self, &mut self,
filename: &str, filename: &str,
) -> Option<(ZipArchiveEntryWayfinder, CompressionMethod)> { ) -> anyhow::Result<(ZipArchiveEntryWayfinder, CompressionMethod)> {
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE]; let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
let mut entries = self.archive.entries(read_buffer); let mut entries = self.archive.entries(read_buffer);
let entry = loop { let entry = loop {
if let Some(v) = entries.next_entry().unwrap() { if let Some(v) = entries.next_entry()? {
if v.file_path().try_normalize().unwrap().as_ref() == filename { if v.file_path().try_normalize()?.as_ref() == filename {
break Some(v); break Ok(v);
} }
} else { } else {
break None; break Err(anyhow!("failed to fetch zip file header."));
} }
}?; }?;
let wayfinder = entry.wayfinder(); let wayfinder = entry.wayfinder();
Some((wayfinder, entry.compression_method())) Ok((wayfinder, entry.compression_method()))
} }
} }
impl VersionBackend for ZipVersionBackend { impl VersionBackend for ZipVersionBackend {
fn list_files(&mut self) -> Vec<VersionFile> { fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>> {
let mut results = Vec::new(); let mut results = Vec::new();
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE]; let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
let mut budget_iterator = self.archive.entries(read_buffer); let mut budget_iterator = self.archive.entries(read_buffer);
while let Some(entry) = budget_iterator.next_entry().unwrap() { while let Some(entry) = budget_iterator.next_entry()? {
if entry.is_dir() { if entry.is_dir() {
continue; continue;
} }
results.push(VersionFile { results.push(VersionFile {
relative_filename: String::from(entry.file_path().try_normalize().unwrap()), relative_filename: String::from(entry.file_path().try_normalize()?),
permission: entry.mode().permissions(), permission: entry.mode().permissions(),
size: entry.uncompressed_size_hint(), size: entry.uncompressed_size_hint(),
}); });
} }
results Ok(results)
} }
fn reader( fn reader(
@ -223,19 +226,21 @@ impl VersionBackend for ZipVersionBackend {
file: &VersionFile, file: &VersionFile,
start: u64, start: u64,
end: u64, end: u64,
) -> Option<Box<dyn MinimumFileObject + '_>> { ) -> anyhow::Result<Box<dyn MinimumFileObject + '_>> {
let (wayfinder, compression_method) = self.find_wayfinder(&file.relative_filename)?; let (wayfinder, compression_method) = self.find_wayfinder(&file.relative_filename)?;
let local_entry = self.archive.get_entry(wayfinder).unwrap(); let local_entry = self
.archive
.get_entry(wayfinder)?;
let wrapper = self.new_entry(local_entry, compression_method, start, end); let wrapper = self.new_entry(local_entry, compression_method, start, end)?;
Some(Box::new(wrapper) as Box<dyn MinimumFileObject>) Ok(Box::new(wrapper) as Box<dyn MinimumFileObject>)
} }
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile> { fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile> {
let (entry, _) = self.find_wayfinder(&sub_path)?; let (entry, _) = self.find_wayfinder(&sub_path)?;
Some(VersionFile { Ok(VersionFile {
relative_filename: sub_path, relative_filename: sub_path,
permission: 0, permission: 0,
size: entry.uncompressed_size_hint(), size: entry.uncompressed_size_hint(),

View File

@ -1,6 +1,4 @@
use std::{ use std::{fmt::Debug, io::Read};
fmt::Debug, io::Read
};
use dyn_clone::DynClone; use dyn_clone::DynClone;
use tokio::io::{self, AsyncRead}; use tokio::io::{self, AsyncRead};
@ -12,7 +10,7 @@ pub struct VersionFile {
pub size: u64, pub size: u64,
} }
pub trait MinimumFileObject: Read + Send {} pub trait MinimumFileObject: Read + Send {}
impl<T: Read + Send> MinimumFileObject for T {} impl<T: Read + Send> MinimumFileObject for T {}
// Intentionally not a generic, because of types in read_file // Intentionally not a generic, because of types in read_file
@ -30,16 +28,27 @@ impl<'a> AsyncRead for ReadToAsyncRead<'a> {
) -> std::task::Poll<io::Result<()>> { ) -> std::task::Poll<io::Result<()>> {
let mut read_buf = [0u8; ASYNC_READ_BUFFER_SIZE]; let mut read_buf = [0u8; ASYNC_READ_BUFFER_SIZE];
let read_size = ASYNC_READ_BUFFER_SIZE.min(buf.remaining()); let read_size = ASYNC_READ_BUFFER_SIZE.min(buf.remaining());
let read = self.inner.read(&mut read_buf[0..read_size]).unwrap(); match self.inner.read(&mut read_buf[0..read_size]) {
buf.put_slice(&read_buf[0..read]); Ok(read) => {
std::task::Poll::Ready(Ok(())) buf.put_slice(&read_buf[0..read]);
std::task::Poll::Ready(Ok(()))
}
Err(err) => {
std::task::Poll::Ready(Err(err))
},
}
} }
} }
pub trait VersionBackend: DynClone { pub trait VersionBackend: DynClone {
fn list_files(&mut self) -> Vec<VersionFile>; fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>>;
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile>; fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile>;
fn reader(&mut self, file: &VersionFile, start: u64, end: u64) -> Option<Box<dyn MinimumFileObject + '_>>; fn reader(
&mut self,
file: &VersionFile,
start: u64,
end: u64,
) -> anyhow::Result<Box<dyn MinimumFileObject + '_>>;
} }
dyn_clone::clone_trait_object!(VersionBackend); dyn_clone::clone_trait_object!(VersionBackend);

View File

@ -1,7 +1,6 @@
use std::{ use std::{collections::HashMap, fs::File, path::Path};
collections::HashMap, fs::File, path::Path
};
use anyhow::anyhow;
use napi::{bindgen_prelude::*, sys::napi_value__, tokio_stream::StreamExt}; use napi::{bindgen_prelude::*, sys::napi_value__, tokio_stream::StreamExt};
use tokio_util::codec::{BytesCodec, FramedRead}; use tokio_util::codec::{BytesCodec, FramedRead};
@ -15,7 +14,7 @@ use crate::version::{
*/ */
pub fn create_backend_constructor<'a>( pub fn create_backend_constructor<'a>(
path: &Path, path: &Path,
) -> Option<Box<dyn FnOnce() -> Box<dyn VersionBackend + Send + 'a>>> { ) -> Option<Box<dyn FnOnce() -> Result<Box<dyn VersionBackend + Send + 'a>>>> {
if !path.exists() { if !path.exists() {
return None; return None;
} }
@ -23,12 +22,14 @@ pub fn create_backend_constructor<'a>(
let is_directory = path.is_dir(); let is_directory = path.is_dir();
if is_directory { if is_directory {
let base_dir = path.to_path_buf(); let base_dir = path.to_path_buf();
return Some(Box::new(move || Box::new(PathVersionBackend { base_dir }))); return Some(Box::new(move || {
Ok(Box::new(PathVersionBackend { base_dir }))
}));
}; };
if path.to_string_lossy().ends_with(".zip") { if path.to_string_lossy().ends_with(".zip") {
let f = File::open(path.to_path_buf()).unwrap(); let f = File::open(path.to_path_buf()).ok()?;
return Some(Box::new(|| Box::new(ZipVersionBackend::new(f)))); return Some(Box::new(|| Ok(Box::new(ZipVersionBackend::new(f)?))));
} }
None None
@ -58,10 +59,13 @@ impl<'a> DropletHandler<'a> {
let fs_path = Path::new(&path); let fs_path = Path::new(&path);
let constructor = create_backend_constructor(fs_path)?; let constructor = create_backend_constructor(fs_path)?;
let existing_backend = self.backend_cache.entry(path).or_insert_with(|| { let existing_backend = match self.backend_cache.entry(path) {
let backend = constructor(); std::collections::hash_map::Entry::Occupied(occupied_entry) => occupied_entry.into_mut(),
backend std::collections::hash_map::Entry::Vacant(vacant_entry) => {
}); let backend = constructor().ok()?;
vacant_entry.insert(backend)
}
};
Some(existing_backend) Some(existing_backend)
} }
@ -80,7 +84,7 @@ impl<'a> DropletHandler<'a> {
let backend = self let backend = self
.create_backend_for_path(path) .create_backend_for_path(path)
.ok_or(napi::Error::from_reason("No backend for path"))?; .ok_or(napi::Error::from_reason("No backend for path"))?;
let files = backend.list_files(); let files = backend.list_files()?;
Ok(files.into_iter().map(|e| e.relative_filename).collect()) Ok(files.into_iter().map(|e| e.relative_filename).collect())
} }
@ -90,11 +94,9 @@ impl<'a> DropletHandler<'a> {
.create_backend_for_path(path) .create_backend_for_path(path)
.ok_or(napi::Error::from_reason("No backend for path"))?; .ok_or(napi::Error::from_reason("No backend for path"))?;
let file = backend let file = backend.peek_file(sub_path)?;
.peek_file(sub_path)
.ok_or(napi::Error::from_reason("Can't find file to peek"))?;
return Ok(file.size.try_into().unwrap()); Ok(file.size)
} }
#[napi] #[napi]
@ -106,28 +108,24 @@ impl<'a> DropletHandler<'a> {
env: Env, env: Env,
start: Option<BigInt>, start: Option<BigInt>,
end: Option<BigInt>, end: Option<BigInt>,
) -> Result<JsDropStreamable> { ) -> anyhow::Result<JsDropStreamable> {
let stream = reference.share_with(env, |handler| { let stream = reference.share_with(env, |handler| {
let backend = handler let backend = handler
.create_backend_for_path(path) .create_backend_for_path(path)
.ok_or(napi::Error::from_reason("Failed to create backend."))?; .ok_or(anyhow!("Failed to create backend."))?;
let version_file = VersionFile { let version_file = VersionFile {
relative_filename: sub_path, relative_filename: sub_path,
permission: 0, // Shouldn't matter permission: 0, // Shouldn't matter
size: 0, // Shouldn't matter size: 0, // Shouldn't matter
}; };
// Use `?` operator for cleaner error propagation from `Option` // Use `?` operator for cleaner error propagation from `Option`
let reader = backend let reader = backend.reader(
.reader( &version_file,
&version_file, start.map(|e| e.get_u64().1).unwrap_or(0),
start.map(|e| e.get_u64().1).unwrap_or(0), end.map(|e| e.get_u64().1).unwrap_or(0),
end.map(|e| e.get_u64().1).unwrap_or(0), )?;
)
.ok_or(napi::Error::from_reason("Failed to create reader."))?;
let async_reader = ReadToAsyncRead { let async_reader = ReadToAsyncRead { inner: reader };
inner: reader,
};
// Create a FramedRead stream with BytesCodec for chunking // Create a FramedRead stream with BytesCodec for chunking
let stream = FramedRead::new(async_reader, BytesCodec::new()) let stream = FramedRead::new(async_reader, BytesCodec::new())
@ -137,12 +135,12 @@ impl<'a> DropletHandler<'a> {
// Apply Result::map to transform Ok(BytesMut) to Ok(Vec<u8>) // Apply Result::map to transform Ok(BytesMut) to Ok(Vec<u8>)
.map(|bytes| bytes.to_vec()) .map(|bytes| bytes.to_vec())
// Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error) // Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error)
.map_err(|e| napi::Error::from(e)) // napi::Error implements From<tokio::io::Error> .map_err(napi::Error::from) // napi::Error implements From<tokio::io::Error>
}); });
// Create the napi-rs ReadableStream from the tokio_stream::Stream // Create the napi-rs ReadableStream from the tokio_stream::Stream
// The unwrap() here means if stream creation fails, it will panic. // The unwrap() here means if stream creation fails, it will panic.
// For a production system, consider returning Result<Option<...>> and handling this. // For a production system, consider returning Result<Option<...>> and handling this.
Ok(ReadableStream::create_with_stream_bytes(&env, stream).unwrap()) ReadableStream::create_with_stream_bytes(&env, stream)
})?; })?;
Ok(JsDropStreamable { inner: stream }) Ok(JsDropStreamable { inner: stream })