4 Commits

Author SHA1 Message Date
0431eebaa7 fix: remove lua tests 2025-08-25 13:02:00 +10:00
e66a6581cb fix: temporary remove luajit for compliation reasons 2025-08-25 12:43:23 +10:00
817c3cf503 feat: script backend, fixes 2025-08-25 12:35:12 +10:00
0d01809fd0 feat: no panik 2025-08-25 12:20:51 +10:00
12 changed files with 285 additions and 264 deletions

61
Cargo.lock generated
View File

@ -37,6 +37,12 @@ version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "anyhow"
version = "1.0.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100"
[[package]]
name = "arrayvec"
version = "0.7.6"
@ -282,16 +288,6 @@ dependencies = [
"static_assertions",
]
[[package]]
name = "bstr"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4"
dependencies = [
"memchr",
"serde",
]
[[package]]
name = "bumpalo"
version = "3.19.0"
@ -477,12 +473,12 @@ dependencies = [
name = "droplet"
version = "0.7.0"
dependencies = [
"anyhow",
"boa_engine",
"dyn-clone",
"flate2",
"hex",
"md5",
"mlua",
"napi",
"napi-build",
"napi-derive",
@ -893,38 +889,13 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "mlua"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab2fea92b2adabd51808311b101551d6e3f8602b65e9fae51f7ad5b3d500f4cd"
dependencies = [
"bstr",
"either",
"mlua-sys",
"num-traits",
"parking_lot",
"rustc-hash",
"rustversion",
]
[[package]]
name = "mlua-sys"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d4dc9cfc5a7698899802e97480617d9726f7da78c910db989d4d0fd4991d900"
dependencies = [
"cc",
"cfg-if",
"pkg-config",
]
[[package]]
name = "napi"
version = "3.0.0-beta.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca1763658b41abbdf10caaa63b74e58f4ec62d52b889a558e0af6f3638cc9426"
dependencies = [
"anyhow",
"bitflags",
"ctor",
"futures-core",
@ -1096,16 +1067,6 @@ dependencies = [
"portable-atomic",
]
[[package]]
name = "parking_lot"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13"
dependencies = [
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.9.11"
@ -1183,12 +1144,6 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
[[package]]
name = "pkg-config"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
[[package]]
name = "pollster"
version = "0.4.0"

View File

@ -9,11 +9,7 @@ crate-type = ["cdylib"]
[dependencies]
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
napi = { version = "3.0.0-beta.11", default-features = false, features = [
"napi6",
"async",
"web_stream",
] }
napi = { version = "3.0.0-beta.11", default-features = false, features = ["napi6", "async", "web_stream", "error_anyhow"] }
napi-derive = "3.0.0-beta.11"
hex = "0.4.3"
md5 = "0.7.0"
@ -27,9 +23,10 @@ rawzip = "0.3.0"
dyn-clone = "1.0.20"
flate2 = "1.1.2"
rhai = "1.22.2"
mlua = { version = "0.11.2", features = ["luajit"] }
# mlua = { version = "0.11.2", features = ["luajit"] }
boa_engine = "0.20.0"
serde_json = "1.0.143"
anyhow = "1.0.99"
[package.metadata.patch]
crates = ["rawzip"]

View File

@ -93,3 +93,47 @@ test.skip("performance test", async (t) => {
fs.rmSync(dirName, { recursive: true });
});
test("special characters", async (t) => {
// Setup test dir
const dirName = "./.test/sc";
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
fs.mkdirSync(dirName, { recursive: true });
// Config
const fileNames = ["Technická podpora.rtf", "Servicio técnico.rtf"];
for (let i = 0; i < fileNames.length; i++) {
const fileName = path.join(dirName, fileNames[i]);
fs.writeFileSync(fileName, i.toString());
}
const dropletHandler = new DropletHandler();
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
dropletHandler,
dirName,
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
)
);
// Check the first few checksums
const checksums = [
"cfcd208495d565ef66e7dff9f98764da",
"c4ca4238a0b923820dcc509a6f75849b",
];
for (let index in checksums) {
const entry = manifest[fileNames[index]];
if (!entry) return t.fail(`manifest missing file ${index}`);
const checksum = entry.checksums[0];
t.is(checksum, checksums[index], `checksums do not match for ${index}`);
}
fs.rmSync(dirName, { recursive: true });
});

View File

@ -1,7 +1,7 @@
import test from "ava";
import { ScriptEngine } from "../index.js";
test("lua syntax fail", (t) => {
test.skip("lua syntax fail", (t) => {
const scriptEngine = new ScriptEngine();
const luaIshCode = `
@ -46,7 +46,7 @@ test("js", (t) => {
t.pass();
});
test("lua", (t) => {
test.skip("lua", (t) => {
const scriptEngine = new ScriptEngine();
const luaModule = `

View File

@ -1,6 +1,6 @@
{
"name": "@drop-oss/droplet",
"version": "2.3.1",
"version": "3.0.1",
"main": "index.js",
"types": "index.d.ts",
"napi": {
@ -46,7 +46,7 @@
"build": "napi build --platform --release",
"build:debug": "napi build --platform",
"prepublishOnly": "napi prepublish -t npm",
"test": "ava",
"test": "ava ",
"universal": "napi universalize",
"version": "napi version"
},

View File

@ -1,10 +1,13 @@
#![deny(clippy::all)]
#![deny(clippy::unwrap_used)]
#![deny(clippy::expect_used)]
#![deny(clippy::panic)]
#![feature(trait_alias)]
pub mod manifest;
pub mod script;
pub mod ssl;
pub mod version;
pub mod script;
#[macro_use]
extern crate napi_derive;
extern crate napi_derive;

View File

@ -35,7 +35,7 @@ pub fn generate_manifest<'a>(
progress_sfn: ThreadsafeFunction<i32>,
log_sfn: ThreadsafeFunction<String>,
callback_sfn: ThreadsafeFunction<String>,
) -> Result<()> {
) -> anyhow::Result<()> {
let backend: &mut Box<dyn VersionBackend + Send> = droplet_handler
.create_backend_for_path(dir)
.ok_or(napi::Error::from_reason(
@ -49,83 +49,94 @@ pub fn generate_manifest<'a>(
unsafe { std::mem::transmute(backend) };
thread::spawn(move || {
let files = backend.list_files();
let callback_borrow = &callback_sfn;
// Filepath to chunk data
let mut chunks: HashMap<String, ChunkData> = HashMap::new();
let mut inner = move || -> Result<()> {
let files = backend.list_files()?;
let total: i32 = files.len() as i32;
let mut i: i32 = 0;
// Filepath to chunk data
let mut chunks: HashMap<String, ChunkData> = HashMap::new();
let mut buf = [0u8; 1024 * 16];
let total: i32 = files.len() as i32;
let mut i: i32 = 0;
for version_file in files {
let mut reader = backend.reader(&version_file, 0, 0).unwrap();
let mut buf = [0u8; 1024 * 16];
let mut chunk_data = ChunkData {
permissions: version_file.permission,
ids: Vec::new(),
checksums: Vec::new(),
lengths: Vec::new(),
};
for version_file in files {
let mut reader = backend.reader(&version_file, 0, 0)?;
let mut chunk_index = 0;
loop {
let mut length = 0;
let mut buffer: Vec<u8> = Vec::new();
let mut file_empty = false;
let mut chunk_data = ChunkData {
permissions: version_file.permission,
ids: Vec::new(),
checksums: Vec::new(),
lengths: Vec::new(),
};
let mut chunk_index = 0;
loop {
let read = reader.read(&mut buf).unwrap();
let mut length = 0;
let mut buffer: Vec<u8> = Vec::new();
let mut file_empty = false;
length += read;
loop {
let read = reader.read(&mut buf)?;
// If we're out of data, add this chunk and then move onto the next file
if read == 0 {
file_empty = true;
break;
length += read;
// If we're out of data, add this chunk and then move onto the next file
if read == 0 {
file_empty = true;
break;
}
buffer.extend_from_slice(&buf[0..read]);
if length >= CHUNK_SIZE {
break;
}
}
buffer.extend_from_slice(&buf[0..read]);
let chunk_id = Uuid::new_v4();
let checksum = md5::compute(buffer).0;
let checksum_string = hex::encode(checksum);
if length >= CHUNK_SIZE {
chunk_data.ids.push(chunk_id.to_string());
chunk_data.checksums.push(checksum_string);
chunk_data.lengths.push(length);
let log_str = format!(
"Processed chunk {} for {}",
chunk_index, &version_file.relative_filename
);
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
chunk_index += 1;
if file_empty {
break;
}
}
let chunk_id = Uuid::new_v4();
let checksum = md5::compute(buffer).0;
let checksum_string = hex::encode(checksum);
chunks.insert(version_file.relative_filename, chunk_data);
chunk_data.ids.push(chunk_id.to_string());
chunk_data.checksums.push(checksum_string);
chunk_data.lengths.push(length);
let log_str = format!(
"Processed chunk {} for {}",
chunk_index, &version_file.relative_filename
);
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
chunk_index += 1;
if file_empty {
break;
}
i += 1;
let progress = i * 100 / total;
progress_sfn.call(Ok(progress), ThreadsafeFunctionCallMode::Blocking);
}
chunks.insert(version_file.relative_filename, chunk_data);
callback_borrow.call(
Ok(json!(chunks).to_string()),
ThreadsafeFunctionCallMode::Blocking,
);
i += 1;
let progress = i * 100 / total;
progress_sfn.call(Ok(progress), ThreadsafeFunctionCallMode::Blocking);
Ok(())
};
let result = inner();
if let Err(generate_err) = result {
callback_borrow.call(Err(generate_err), ThreadsafeFunctionCallMode::Blocking);
}
callback_sfn.call(
Ok(json!(chunks).to_string()),
ThreadsafeFunctionCallMode::Blocking,
);
});
Ok(())

View File

@ -1,5 +1,5 @@
use boa_engine::{Context, JsValue, Source};
use mlua::{FromLuaMulti, Function, Lua};
// use mlua::{FromLuaMulti, Function, Lua};
use napi::Result;
use rhai::AST;
@ -14,14 +14,14 @@ pub struct Script(ScriptInner);
pub enum ScriptInner {
Rhai { script: AST },
Lua { script: Function },
// Lua { script: Function },
Javascript { script: boa_engine::Script },
}
#[napi]
pub struct ScriptEngine {
rhai_engine: rhai::Engine,
lua_engine: Lua,
// lua_engine: Lua,
js_engine: Context,
}
@ -31,13 +31,13 @@ impl ScriptEngine {
pub fn new() -> Self {
ScriptEngine {
rhai_engine: rhai::Engine::new(),
lua_engine: Lua::new(),
// lua_engine: Lua::new(),
js_engine: Context::default(),
}
}
#[napi]
pub fn build_rahi_script(&self, content: String) -> Result<Script> {
pub fn build_rhai_script(&self, content: String) -> Result<Script> {
let script = self
.rhai_engine
.compile(content.clone())
@ -45,6 +45,7 @@ impl ScriptEngine {
Ok(Script(ScriptInner::Rhai { script }))
}
/*
#[napi]
pub fn build_lua_script(&self, content: String) -> Result<Script> {
let func = self
@ -54,6 +55,7 @@ impl ScriptEngine {
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(Script(ScriptInner::Lua { script: func }))
}
*/
#[napi]
pub fn build_js_script(&mut self, content: String) -> Result<Script> {
@ -76,6 +78,7 @@ impl ScriptEngine {
Ok(v)
}
/*
fn execute_lua_script<T>(&self, function: &Function) -> Result<T>
where
T: FromLuaMulti,
@ -85,6 +88,7 @@ impl ScriptEngine {
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(v)
}
*/
fn execute_js_script(&mut self, func: &boa_engine::Script) -> Result<JsValue> {
let v = func
@ -100,9 +104,9 @@ impl ScriptEngine {
ScriptInner::Rhai { script } => {
self.execute_rhai_script::<()>(script)?;
}
ScriptInner::Lua { script } => {
/*ScriptInner::Lua { script } => {
self.execute_lua_script::<()>(script)?;
}
}*/
ScriptInner::Javascript { script } => {
self.execute_js_script(script)?;
}
@ -114,14 +118,15 @@ impl ScriptEngine {
pub fn fetch_strings(&mut self, script: &mut Script) -> Result<Vec<String>> {
Ok(match &script.0 {
ScriptInner::Rhai { script } => self.execute_rhai_script(script)?,
ScriptInner::Lua { script } => self.execute_lua_script(script)?,
//ScriptInner::Lua { script } => self.execute_lua_script(script)?,
ScriptInner::Javascript { script } => {
let v = self.execute_js_script(script)?;
serde_json::from_value(
v.to_json(&mut self.js_engine)
.map_err(|e| napi::Error::from_reason(e.to_string()))?,
).map_err(|e| napi::Error::from_reason(e.to_string()))?
)
.map_err(|e| napi::Error::from_reason(e.to_string()))?
}
})
}

View File

@ -1,4 +1,4 @@
use napi::Error;
use anyhow::anyhow;
use rcgen::{
CertificateParams, DistinguishedName, IsCa, KeyPair, KeyUsagePurpose, PublicKeyData,
SubjectPublicKeyInfo,
@ -10,7 +10,7 @@ use x509_parser::parse_x509_certificate;
use x509_parser::pem::Pem;
#[napi]
pub fn generate_root_ca() -> Result<Vec<String>, Error> {
pub fn generate_root_ca() -> anyhow::Result<Vec<String>> {
let mut params = CertificateParams::default();
let mut name = DistinguishedName::new();
@ -22,7 +22,7 @@ pub fn generate_root_ca() -> Result<Vec<String>, Error> {
params.not_before = OffsetDateTime::now_utc();
params.not_after = OffsetDateTime::now_utc()
.checked_add(Duration::days(365 * 1000))
.unwrap();
.ok_or(anyhow!("failed to calculate end date"))?;
params.is_ca = IsCa::Ca(rcgen::BasicConstraints::Unconstrained);
@ -32,9 +32,8 @@ pub fn generate_root_ca() -> Result<Vec<String>, Error> {
KeyUsagePurpose::DigitalSignature,
];
let key_pair = KeyPair::generate().map_err(|e| napi::Error::from_reason(e.to_string()))?;
let certificate = CertificateParams::self_signed(params, &key_pair)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let key_pair = KeyPair::generate()?;
let certificate = CertificateParams::self_signed(params, &key_pair)?;
// Returns certificate, then private key
Ok(vec![certificate.pem(), key_pair.serialize_pem()])
@ -46,13 +45,10 @@ pub fn generate_client_certificate(
_client_name: String,
root_ca: String,
root_ca_private: String,
) -> Result<Vec<String>, Error> {
let root_key_pair =
KeyPair::from_pem(&root_ca_private).map_err(|e| napi::Error::from_reason(e.to_string()))?;
let certificate_params = CertificateParams::from_ca_cert_pem(&root_ca)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let root_ca = CertificateParams::self_signed(certificate_params, &root_key_pair)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
) -> anyhow::Result<Vec<String>> {
let root_key_pair = KeyPair::from_pem(&root_ca_private)?;
let certificate_params = CertificateParams::from_ca_cert_pem(&root_ca)?;
let root_ca = CertificateParams::self_signed(certificate_params, &root_key_pair)?;
let mut params = CertificateParams::default();
@ -66,28 +62,24 @@ pub fn generate_client_certificate(
KeyUsagePurpose::DataEncipherment,
];
let key_pair = KeyPair::generate_for(&rcgen::PKCS_ECDSA_P384_SHA384)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let certificate = CertificateParams::signed_by(params, &key_pair, &root_ca, &root_key_pair)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let key_pair = KeyPair::generate_for(&rcgen::PKCS_ECDSA_P384_SHA384)?;
let certificate = CertificateParams::signed_by(params, &key_pair, &root_ca, &root_key_pair)?;
// Returns certificate, then private key
Ok(vec![certificate.pem(), key_pair.serialize_pem()])
}
#[napi]
pub fn verify_client_certificate(client_cert: String, root_ca: String) -> Result<bool, Error> {
pub fn verify_client_certificate(client_cert: String, root_ca: String) -> anyhow::Result<bool> {
let root_ca = Pem::iter_from_buffer(root_ca.as_bytes())
.next()
.unwrap()
.unwrap();
let root_ca = root_ca.parse_x509().unwrap();
.ok_or(anyhow!("no certificates in root ca"))??;
let root_ca = root_ca.parse_x509()?;
let client_cert = Pem::iter_from_buffer(client_cert.as_bytes())
.next()
.unwrap()
.unwrap();
let client_cert = client_cert.parse_x509().unwrap();
.ok_or(anyhow!("No client certs in chain."))??;
let client_cert = client_cert.parse_x509()?;
let valid = root_ca
.verify_signature(Some(client_cert.public_key()))
@ -97,31 +89,33 @@ pub fn verify_client_certificate(client_cert: String, root_ca: String) -> Result
}
#[napi]
pub fn sign_nonce(private_key: String, nonce: String) -> Result<String, Error> {
pub fn sign_nonce(private_key: String, nonce: String) -> anyhow::Result<String> {
let rng = SystemRandom::new();
let key_pair = KeyPair::from_pem(&private_key).unwrap();
let key_pair = KeyPair::from_pem(&private_key)?;
let key_pair = EcdsaKeyPair::from_pkcs8(
&ring::signature::ECDSA_P384_SHA384_FIXED_SIGNING,
&key_pair.serialize_der(),
&rng,
)
.unwrap();
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let signature = key_pair.sign(&rng, nonce.as_bytes()).unwrap();
let signature = key_pair
.sign(&rng, nonce.as_bytes())
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let hex_signature = hex::encode(signature);
Ok(hex_signature)
}
#[napi]
pub fn verify_nonce(public_cert: String, nonce: String, signature: String) -> Result<bool, Error> {
let (_, pem) = x509_parser::pem::parse_x509_pem(public_cert.as_bytes()).unwrap();
let (_, spki) = parse_x509_certificate(&pem.contents).unwrap();
let public_key = SubjectPublicKeyInfo::from_der(spki.public_key().raw).unwrap();
pub fn verify_nonce(public_cert: String, nonce: String, signature: String) -> anyhow::Result<bool> {
let (_, pem) = x509_parser::pem::parse_x509_pem(public_cert.as_bytes())?;
let (_, spki) = parse_x509_certificate(&pem.contents)?;
let public_key = SubjectPublicKeyInfo::from_der(spki.public_key().raw)?;
let raw_signature = hex::decode(signature).unwrap();
let raw_signature = hex::decode(signature)?;
let valid = ring::signature::ECDSA_P384_SHA384_FIXED
.verify(

View File

@ -7,26 +7,29 @@ use std::{
sync::Arc,
};
use anyhow::anyhow;
use flate2::read::DeflateDecoder;
use rawzip::{
CompressionMethod, FileReader, ZipArchive, ZipArchiveEntryWayfinder, ZipEntry,
ZipVerifier, RECOMMENDED_BUFFER_SIZE,
CompressionMethod, FileReader, ZipArchive, ZipArchiveEntryWayfinder, ZipEntry, ZipVerifier,
RECOMMENDED_BUFFER_SIZE,
};
use crate::version::types::{MinimumFileObject, VersionBackend, VersionFile};
pub fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) {
if metadata(path).unwrap().is_dir() {
let paths = fs::read_dir(path).unwrap();
pub fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) -> napi::Result<()> {
if metadata(path)?.is_dir() {
let paths = fs::read_dir(path)?;
for path_result in paths {
let full_path = path_result.unwrap().path();
if metadata(&full_path).unwrap().is_dir() {
_list_files(vec, &full_path);
let full_path = path_result?.path();
if metadata(&full_path)?.is_dir() {
_list_files(vec, &full_path)?;
} else {
vec.push(full_path);
}
}
}
};
Ok(())
}
#[derive(Clone)]
@ -34,23 +37,26 @@ pub struct PathVersionBackend {
pub base_dir: PathBuf,
}
impl VersionBackend for PathVersionBackend {
fn list_files(&mut self) -> Vec<VersionFile> {
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>> {
let mut vec = Vec::new();
_list_files(&mut vec, &self.base_dir);
_list_files(&mut vec, &self.base_dir)?;
let mut results = Vec::new();
for pathbuf in vec.iter() {
let relative = pathbuf.strip_prefix(self.base_dir.clone()).unwrap();
let relative = pathbuf.strip_prefix(self.base_dir.clone())?;
results.push(
self
.peek_file(relative.to_str().unwrap().to_owned())
.unwrap(),
self.peek_file(
relative
.to_str()
.ok_or(napi::Error::from_reason("Could not parse path"))?
.to_owned(),
)?,
);
}
results
Ok(results)
}
fn reader(
@ -58,28 +64,28 @@ impl VersionBackend for PathVersionBackend {
file: &VersionFile,
start: u64,
end: u64,
) -> Option<Box<dyn MinimumFileObject + 'static>> {
let mut file = File::open(self.base_dir.join(file.relative_filename.clone())).ok()?;
) -> anyhow::Result<Box<dyn MinimumFileObject + 'static>> {
let mut file = File::open(self.base_dir.join(file.relative_filename.clone()))?;
if start != 0 {
file.seek(SeekFrom::Start(start)).ok()?;
file.seek(SeekFrom::Start(start))?;
}
if end != 0 {
return Some(Box::new(file.take(end - start)));
return Ok(Box::new(file.take(end - start)));
}
return Some(Box::new(file));
Ok(Box::new(file))
}
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile> {
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile> {
let pathbuf = self.base_dir.join(sub_path.clone());
if !pathbuf.exists() {
return None;
return Err(anyhow!("Path doesn't exist."));
};
let file = File::open(pathbuf.clone()).unwrap();
let metadata = file.try_clone().unwrap().metadata().unwrap();
let file = File::open(pathbuf.clone())?;
let metadata = file.try_clone()?.metadata()?;
let permission_object = metadata.permissions();
let permissions = {
let perm: u32;
@ -94,7 +100,7 @@ impl VersionBackend for PathVersionBackend {
perm
};
Some(VersionFile {
Ok(VersionFile {
relative_filename: sub_path,
permission: permissions,
size: metadata.len(),
@ -107,11 +113,11 @@ pub struct ZipVersionBackend {
archive: Arc<ZipArchive<FileReader>>,
}
impl ZipVersionBackend {
pub fn new(archive: File) -> Self {
let archive = ZipArchive::from_file(archive, &mut [0u8; RECOMMENDED_BUFFER_SIZE]).unwrap();
Self {
pub fn new(archive: File) -> anyhow::Result<Self> {
let archive = ZipArchive::from_file(archive, &mut [0u8; RECOMMENDED_BUFFER_SIZE])?;
Ok(Self {
archive: Arc::new(archive),
}
})
}
pub fn new_entry<'archive>(
@ -120,27 +126,26 @@ impl ZipVersionBackend {
compression_method: CompressionMethod,
start: u64,
end: u64,
) -> ZipFileWrapper<'archive> {
) -> anyhow::Result<ZipFileWrapper<'archive>> {
let deflater: Box<dyn Read + Send + 'archive> = match compression_method {
CompressionMethod::Store => Box::new(entry.reader()),
CompressionMethod::Deflate => Box::new(DeflateDecoder::new(entry.reader())),
CompressionMethod::Deflate64 => Box::new(DeflateDecoder::new(entry.reader())),
_ => panic!(
"unsupported decompression algorithm: {:?}",
compression_method
),
_ => Err(anyhow!(
"unsupported decompression algorithm: {compression_method:?}"
))?,
};
let mut verifier = entry.verifying_reader(deflater);
if start != 0 {
io::copy(&mut (&mut verifier).take(start), &mut Sink::default()).unwrap();
io::copy(&mut (&mut verifier).take(start), &mut Sink::default())?;
}
ZipFileWrapper {
Ok(ZipFileWrapper {
reader: verifier,
limit: (end - start) as usize,
current: 0,
}
})
}
}
@ -159,10 +164,8 @@ impl<'a> Read for ZipFileWrapper<'a> {
let has_limit = self.limit != 0;
// End this stream if the read is the right size
if has_limit {
if self.current >= self.limit {
return Ok(0);
}
if has_limit && self.current >= self.limit {
return Ok(0);
}
let read = self.reader.read(buf)?;
@ -173,7 +176,7 @@ impl<'a> Read for ZipFileWrapper<'a> {
return Ok(read - over);
}
}
return Ok(read);
Ok(read)
}
}
//impl<'a> MinimumFileObject for ZipFileWrapper<'a> {}
@ -182,40 +185,40 @@ impl ZipVersionBackend {
fn find_wayfinder(
&mut self,
filename: &str,
) -> Option<(ZipArchiveEntryWayfinder, CompressionMethod)> {
) -> anyhow::Result<(ZipArchiveEntryWayfinder, CompressionMethod)> {
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
let mut entries = self.archive.entries(read_buffer);
let entry = loop {
if let Some(v) = entries.next_entry().unwrap() {
if v.file_path().try_normalize().unwrap().as_ref() == filename {
break Some(v);
if let Some(v) = entries.next_entry()? {
if v.file_path().try_normalize()?.as_ref() == filename {
break Ok(v);
}
} else {
break None;
break Err(anyhow!("failed to fetch zip file header."));
}
}?;
let wayfinder = entry.wayfinder();
Some((wayfinder, entry.compression_method()))
Ok((wayfinder, entry.compression_method()))
}
}
impl VersionBackend for ZipVersionBackend {
fn list_files(&mut self) -> Vec<VersionFile> {
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>> {
let mut results = Vec::new();
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
let mut budget_iterator = self.archive.entries(read_buffer);
while let Some(entry) = budget_iterator.next_entry().unwrap() {
while let Some(entry) = budget_iterator.next_entry()? {
if entry.is_dir() {
continue;
}
results.push(VersionFile {
relative_filename: String::from(entry.file_path().try_normalize().unwrap()),
relative_filename: String::from(entry.file_path().try_normalize()?),
permission: entry.mode().permissions(),
size: entry.uncompressed_size_hint(),
});
}
results
Ok(results)
}
fn reader(
@ -223,19 +226,21 @@ impl VersionBackend for ZipVersionBackend {
file: &VersionFile,
start: u64,
end: u64,
) -> Option<Box<dyn MinimumFileObject + '_>> {
) -> anyhow::Result<Box<dyn MinimumFileObject + '_>> {
let (wayfinder, compression_method) = self.find_wayfinder(&file.relative_filename)?;
let local_entry = self.archive.get_entry(wayfinder).unwrap();
let local_entry = self
.archive
.get_entry(wayfinder)?;
let wrapper = self.new_entry(local_entry, compression_method, start, end);
let wrapper = self.new_entry(local_entry, compression_method, start, end)?;
Some(Box::new(wrapper) as Box<dyn MinimumFileObject>)
Ok(Box::new(wrapper) as Box<dyn MinimumFileObject>)
}
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile> {
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile> {
let (entry, _) = self.find_wayfinder(&sub_path)?;
Some(VersionFile {
Ok(VersionFile {
relative_filename: sub_path,
permission: 0,
size: entry.uncompressed_size_hint(),

View File

@ -1,6 +1,4 @@
use std::{
fmt::Debug, io::Read
};
use std::{fmt::Debug, io::Read};
use dyn_clone::DynClone;
use tokio::io::{self, AsyncRead};
@ -12,7 +10,7 @@ pub struct VersionFile {
pub size: u64,
}
pub trait MinimumFileObject: Read + Send {}
pub trait MinimumFileObject: Read + Send {}
impl<T: Read + Send> MinimumFileObject for T {}
// Intentionally not a generic, because of types in read_file
@ -30,16 +28,27 @@ impl<'a> AsyncRead for ReadToAsyncRead<'a> {
) -> std::task::Poll<io::Result<()>> {
let mut read_buf = [0u8; ASYNC_READ_BUFFER_SIZE];
let read_size = ASYNC_READ_BUFFER_SIZE.min(buf.remaining());
let read = self.inner.read(&mut read_buf[0..read_size]).unwrap();
buf.put_slice(&read_buf[0..read]);
std::task::Poll::Ready(Ok(()))
match self.inner.read(&mut read_buf[0..read_size]) {
Ok(read) => {
buf.put_slice(&read_buf[0..read]);
std::task::Poll::Ready(Ok(()))
}
Err(err) => {
std::task::Poll::Ready(Err(err))
},
}
}
}
pub trait VersionBackend: DynClone {
fn list_files(&mut self) -> Vec<VersionFile>;
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile>;
fn reader(&mut self, file: &VersionFile, start: u64, end: u64) -> Option<Box<dyn MinimumFileObject + '_>>;
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>>;
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile>;
fn reader(
&mut self,
file: &VersionFile,
start: u64,
end: u64,
) -> anyhow::Result<Box<dyn MinimumFileObject + '_>>;
}
dyn_clone::clone_trait_object!(VersionBackend);
dyn_clone::clone_trait_object!(VersionBackend);

View File

@ -1,7 +1,6 @@
use std::{
collections::HashMap, fs::File, path::Path
};
use std::{collections::HashMap, fs::File, path::Path};
use anyhow::anyhow;
use napi::{bindgen_prelude::*, sys::napi_value__, tokio_stream::StreamExt};
use tokio_util::codec::{BytesCodec, FramedRead};
@ -15,7 +14,7 @@ use crate::version::{
*/
pub fn create_backend_constructor<'a>(
path: &Path,
) -> Option<Box<dyn FnOnce() -> Box<dyn VersionBackend + Send + 'a>>> {
) -> Option<Box<dyn FnOnce() -> Result<Box<dyn VersionBackend + Send + 'a>>>> {
if !path.exists() {
return None;
}
@ -23,12 +22,14 @@ pub fn create_backend_constructor<'a>(
let is_directory = path.is_dir();
if is_directory {
let base_dir = path.to_path_buf();
return Some(Box::new(move || Box::new(PathVersionBackend { base_dir })));
return Some(Box::new(move || {
Ok(Box::new(PathVersionBackend { base_dir }))
}));
};
if path.to_string_lossy().ends_with(".zip") {
let f = File::open(path.to_path_buf()).unwrap();
return Some(Box::new(|| Box::new(ZipVersionBackend::new(f))));
let f = File::open(path.to_path_buf()).ok()?;
return Some(Box::new(|| Ok(Box::new(ZipVersionBackend::new(f)?))));
}
None
@ -58,10 +59,13 @@ impl<'a> DropletHandler<'a> {
let fs_path = Path::new(&path);
let constructor = create_backend_constructor(fs_path)?;
let existing_backend = self.backend_cache.entry(path).or_insert_with(|| {
let backend = constructor();
backend
});
let existing_backend = match self.backend_cache.entry(path) {
std::collections::hash_map::Entry::Occupied(occupied_entry) => occupied_entry.into_mut(),
std::collections::hash_map::Entry::Vacant(vacant_entry) => {
let backend = constructor().ok()?;
vacant_entry.insert(backend)
}
};
Some(existing_backend)
}
@ -80,7 +84,7 @@ impl<'a> DropletHandler<'a> {
let backend = self
.create_backend_for_path(path)
.ok_or(napi::Error::from_reason("No backend for path"))?;
let files = backend.list_files();
let files = backend.list_files()?;
Ok(files.into_iter().map(|e| e.relative_filename).collect())
}
@ -90,11 +94,9 @@ impl<'a> DropletHandler<'a> {
.create_backend_for_path(path)
.ok_or(napi::Error::from_reason("No backend for path"))?;
let file = backend
.peek_file(sub_path)
.ok_or(napi::Error::from_reason("Can't find file to peek"))?;
let file = backend.peek_file(sub_path)?;
return Ok(file.size.try_into().unwrap());
Ok(file.size)
}
#[napi]
@ -106,28 +108,24 @@ impl<'a> DropletHandler<'a> {
env: Env,
start: Option<BigInt>,
end: Option<BigInt>,
) -> Result<JsDropStreamable> {
) -> anyhow::Result<JsDropStreamable> {
let stream = reference.share_with(env, |handler| {
let backend = handler
.create_backend_for_path(path)
.ok_or(napi::Error::from_reason("Failed to create backend."))?;
.ok_or(anyhow!("Failed to create backend."))?;
let version_file = VersionFile {
relative_filename: sub_path,
permission: 0, // Shouldn't matter
size: 0, // Shouldn't matter
};
// Use `?` operator for cleaner error propagation from `Option`
let reader = backend
.reader(
&version_file,
start.map(|e| e.get_u64().1).unwrap_or(0),
end.map(|e| e.get_u64().1).unwrap_or(0),
)
.ok_or(napi::Error::from_reason("Failed to create reader."))?;
let reader = backend.reader(
&version_file,
start.map(|e| e.get_u64().1).unwrap_or(0),
end.map(|e| e.get_u64().1).unwrap_or(0),
)?;
let async_reader = ReadToAsyncRead {
inner: reader,
};
let async_reader = ReadToAsyncRead { inner: reader };
// Create a FramedRead stream with BytesCodec for chunking
let stream = FramedRead::new(async_reader, BytesCodec::new())
@ -137,12 +135,12 @@ impl<'a> DropletHandler<'a> {
// Apply Result::map to transform Ok(BytesMut) to Ok(Vec<u8>)
.map(|bytes| bytes.to_vec())
// Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error)
.map_err(|e| napi::Error::from(e)) // napi::Error implements From<tokio::io::Error>
.map_err(napi::Error::from) // napi::Error implements From<tokio::io::Error>
});
// Create the napi-rs ReadableStream from the tokio_stream::Stream
// The unwrap() here means if stream creation fails, it will panic.
// For a production system, consider returning Result<Option<...>> and handling this.
Ok(ReadableStream::create_with_stream_bytes(&env, stream).unwrap())
ReadableStream::create_with_stream_bytes(&env, stream)
})?;
Ok(JsDropStreamable { inner: stream })