8 Commits

Author SHA1 Message Date
efab43720f fix: 7z and streams 2025-11-20 13:39:05 +11:00
894f2b354a fix: 7z 2025-11-20 11:38:11 +11:00
416cada9f4 fix: unix permissions properly fixed with 7z 2025-10-28 19:31:59 +11:00
97312585db fix: fix to unix permissions with 7z 2025-10-28 19:29:25 +11:00
538aa3bb57 fix: update license 2025-10-14 12:11:24 +11:00
7ec09bee1e feat: fix 7zip integration 2025-10-13 11:29:30 +11:00
96c1b15de7 remove unneeded deps 2025-10-02 17:14:26 +10:00
bd6d7060fd feat: the 7z update 2025-10-02 17:06:58 +10:00
12 changed files with 160 additions and 214 deletions

27
Cargo.lock generated
View File

@ -364,15 +364,6 @@ dependencies = [
"unicode-segmentation",
]
[[package]]
name = "crc32fast"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511"
dependencies = [
"cfg-if",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.21"
@ -476,13 +467,11 @@ dependencies = [
"anyhow",
"boa_engine",
"dyn-clone",
"flate2",
"hex",
"md5",
"napi",
"napi-build",
"napi-derive",
"rawzip",
"rcgen",
"rhai",
"ring",
@ -536,16 +525,6 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8eb564c5c7423d25c886fb561d1e4ee69f72354d16918afa32c08811f6b6a55"
[[package]]
name = "flate2"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d"
dependencies = [
"crc32fast",
"miniz_oxide",
]
[[package]]
name = "foldhash"
version = "0.1.5"
@ -1263,12 +1242,6 @@ dependencies = [
"getrandom 0.3.3",
]
[[package]]
name = "rawzip"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e512201a808c46ad2a0c810057db306d66b58d5516304548d2445a53db933499"
[[package]]
name = "rcgen"
version = "0.13.2"

View File

@ -19,18 +19,13 @@ webpki = "0.22.4"
ring = "0.17.14"
tokio = { version = "1.45.1", features = ["fs", "io-util"] }
tokio-util = { version = "0.7.15", features = ["codec"] }
rawzip = "0.3.0"
dyn-clone = "1.0.20"
flate2 = "1.1.2"
rhai = "1.22.2"
# mlua = { version = "0.11.2", features = ["luajit"] }
boa_engine = "0.20.0"
serde_json = "1.0.143"
anyhow = "1.0.99"
[package.metadata.patch]
crates = ["rawzip"]
[dependencies.x509-parser]
version = "0.17.0"
features = ["verify"]

View File

@ -67,7 +67,7 @@ test("read file", async (t) => {
let finalString = "";
for await (const chunk of stream.getStream()) {
for await (const chunk of stream) {
// Do something with each 'chunk'
finalString += String.fromCharCode.apply(null, chunk);
}
@ -94,7 +94,7 @@ test("read file offset", async (t) => {
let finalString = "";
for await (const chunk of stream.getStream()) {
for await (const chunk of stream) {
// Do something with each 'chunk'
finalString += String.fromCharCode.apply(null, chunk);
}
@ -121,7 +121,7 @@ test.skip("zip speed test", async (t) => {
const timeThreshold = BigInt(1_000_000_000);
let runningTotal = 0;
let runningTime = BigInt(0);
for await (const chunk of stream.getStream()) {
for await (const chunk of stream) {
// Do something with each 'chunk'
const currentTime = process.hrtime.bigint();
const timeDiff = currentTime - lastTime;
@ -146,55 +146,61 @@ test.skip("zip speed test", async (t) => {
t.pass();
});
test.skip("zip manifest test", async (t) => {
test("zip manifest test", async (t) => {
const zipFiles = fs.readdirSync("./assets").filter((v) => v.endsWith(".zip"));
const dropletHandler = new DropletHandler();
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
dropletHandler,
"./assets/TheGame.zip",
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
)
);
for (const [filename, data] of Object.entries(manifest)) {
let start = 0;
for (const [chunkIndex, length] of data.lengths.entries()) {
const hash = createHash("md5");
const stream = (
await dropletHandler.readFile(
"./assets/TheGame.zip",
filename,
BigInt(start),
BigInt(start + length)
for (const zipFile of zipFiles) {
console.log("generating manifest for " + zipFile);
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
dropletHandler,
"./assets/" + zipFile,
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
).getStream();
)
);
let streamLength = 0;
await stream.pipeTo(
new WritableStream({
write(chunk) {
streamLength += chunk.length;
hash.update(chunk);
},
})
);
for (const [filename, data] of Object.entries(manifest)) {
let start = 0;
for (const [chunkIndex, length] of data.lengths.entries()) {
const hash = createHash("md5");
const stream = (
await dropletHandler.readFile(
"./assets/" + zipFile,
filename,
BigInt(start),
BigInt(start + length)
)
);
console.log(stream);
if (streamLength != length)
return t.fail(
`stream length for chunk index ${chunkIndex} was not expected: real: ${streamLength} vs expected: ${length}`
let streamLength = 0;
await stream.pipeTo(
new WritableStream({
write(chunk) {
streamLength += chunk.length;
hash.update(chunk);
},
})
);
const digest = hash.digest("hex");
if (data.checksums[chunkIndex] != digest)
return t.fail(
`checksums did not match for chunk index ${chunkIndex}: real: ${digest} vs expected: ${data.checksums[chunkIndex]}`
);
if (streamLength != length)
return t.fail(
`stream length for chunk index ${chunkIndex} was not expected: real: ${streamLength} vs expected: ${length}`
);
start += length;
const digest = hash.digest("hex");
if (data.checksums[chunkIndex] != digest)
return t.fail(
`checksums did not match for chunk index ${chunkIndex}: real: ${digest} vs expected: ${data.checksums[chunkIndex]}`
);
start += length;
}
}
}

View File

@ -1,4 +1,4 @@
# yes "droplet is awesome" | dd of=./setup.exe bs=1024 count=1000000
dd if=/dev/random of=./setup.exe bs=1024 count=1000000
zip TheGame.zip setup.exe
zip TheGame.zip setup.exe "test file.txt"
rm setup.exe

9
index.d.ts vendored
View File

@ -8,11 +8,7 @@ export declare class DropletHandler {
hasBackendForPath(path: string): boolean
listFiles(path: string): Array<string>
peekFile(path: string, subPath: string): bigint
readFile(path: string, subPath: string, start?: bigint | undefined | null, end?: bigint | undefined | null): JsDropStreamable
}
export declare class JsDropStreamable {
getStream(): any
readFile(path: string, subPath: string, start?: bigint | undefined | null, end?: bigint | undefined | null): ReadableStream
}
export declare class Script {
@ -21,8 +17,7 @@ export declare class Script {
export declare class ScriptEngine {
constructor()
buildRahiScript(content: string): Script
buildLuaScript(content: string): Script
buildRhaiScript(content: string): Script
buildJsScript(content: string): Script
execute(script: Script): void
fetchStrings(script: Script): Array<string>

View File

@ -377,7 +377,6 @@ if (!nativeBinding) {
module.exports = nativeBinding
module.exports.DropletHandler = nativeBinding.DropletHandler
module.exports.JsDropStreamable = nativeBinding.JsDropStreamable
module.exports.Script = nativeBinding.Script
module.exports.ScriptEngine = nativeBinding.ScriptEngine
module.exports.callAltThreadFunc = nativeBinding.callAltThreadFunc

View File

@ -1,6 +1,6 @@
{
"name": "@drop-oss/droplet",
"version": "3.0.1",
"version": "3.4.0",
"main": "index.js",
"types": "index.d.ts",
"napi": {
@ -20,7 +20,7 @@
]
}
},
"license": "MIT",
"license": "AGPL-3.0-only",
"devDependencies": {
"@napi-rs/cli": "3.0.0-alpha.91",
"@types/node": "^22.13.10",

View File

@ -2,6 +2,7 @@
#![deny(clippy::expect_used)]
#![deny(clippy::panic)]
#![feature(trait_alias)]
#![feature(iterator_try_collect)]
pub mod manifest;

View File

@ -48,6 +48,8 @@ pub fn generate_manifest<'a>(
let backend: &'static mut Box<dyn VersionBackend + Send> =
unsafe { std::mem::transmute(backend) };
let required_single_file = backend.require_whole_files();
thread::spawn(move || {
let callback_borrow = &callback_sfn;
@ -91,7 +93,7 @@ pub fn generate_manifest<'a>(
buffer.extend_from_slice(&buf[0..read]);
if length >= CHUNK_SIZE {
if length >= CHUNK_SIZE && !required_single_file {
break;
}
}

View File

@ -1,18 +1,15 @@
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::{
cell::LazyCell,
fs::{self, metadata, File},
io::{self, Read, Seek, SeekFrom, Sink},
io::{self, BufRead, BufReader, Read, Seek, SeekFrom, Sink},
path::{Path, PathBuf},
sync::Arc,
process::{Child, ChildStdout, Command, Stdio},
sync::{Arc, LazyLock},
};
use anyhow::anyhow;
use flate2::read::DeflateDecoder;
use rawzip::{
CompressionMethod, FileReader, ZipArchive, ZipArchiveEntryWayfinder, ZipEntry, ZipVerifier,
RECOMMENDED_BUFFER_SIZE,
};
use crate::version::types::{MinimumFileObject, VersionBackend, VersionFile};
@ -106,118 +103,98 @@ impl VersionBackend for PathVersionBackend {
size: metadata.len(),
})
}
fn require_whole_files(&self) -> bool {
false
}
}
pub static SEVEN_ZIP_INSTALLED: LazyLock<bool> =
LazyLock::new(|| Command::new("7z").output().is_ok());
#[derive(Clone)]
pub struct ZipVersionBackend {
archive: Arc<ZipArchive<FileReader>>,
path: String,
}
impl ZipVersionBackend {
pub fn new(archive: File) -> anyhow::Result<Self> {
let archive = ZipArchive::from_file(archive, &mut [0u8; RECOMMENDED_BUFFER_SIZE])?;
pub fn new(path: PathBuf) -> anyhow::Result<Self> {
Ok(Self {
archive: Arc::new(archive),
})
}
pub fn new_entry<'archive>(
&self,
entry: ZipEntry<'archive, FileReader>,
compression_method: CompressionMethod,
start: u64,
end: u64,
) -> anyhow::Result<ZipFileWrapper<'archive>> {
let deflater: Box<dyn Read + Send + 'archive> = match compression_method {
CompressionMethod::Store => Box::new(entry.reader()),
CompressionMethod::Deflate => Box::new(DeflateDecoder::new(entry.reader())),
CompressionMethod::Deflate64 => Box::new(DeflateDecoder::new(entry.reader())),
_ => Err(anyhow!(
"unsupported decompression algorithm: {compression_method:?}"
))?,
};
let mut verifier = entry.verifying_reader(deflater);
if start != 0 {
io::copy(&mut (&mut verifier).take(start), &mut Sink::default())?;
}
Ok(ZipFileWrapper {
reader: verifier,
limit: (end - start) as usize,
current: 0,
path: path.to_str().expect("invalid utf path").to_owned(),
})
}
}
pub struct ZipFileWrapper<'archive> {
reader: ZipVerifier<'archive, Box<dyn Read + Send + 'archive>, FileReader>,
limit: usize,
current: usize,
pub struct ZipFileWrapper {
command: Child,
reader: BufReader<ChildStdout>,
}
impl ZipFileWrapper {
pub fn new(mut command: Child) -> Self {
let stdout = command
.stdout
.take()
.expect("failed to access stdout of 7z");
let reader = BufReader::new(stdout);
ZipFileWrapper { command, reader }
}
}
/**
* This read implemention is a result of debugging hell
* It should probably be replaced with a .take() call.
*/
impl<'a> Read for ZipFileWrapper<'a> {
impl Read for ZipFileWrapper {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let has_limit = self.limit != 0;
// End this stream if the read is the right size
if has_limit && self.current >= self.limit {
return Ok(0);
}
let read = self.reader.read(buf)?;
if self.limit != 0 {
self.current += read;
if self.current > self.limit {
let over = self.current - self.limit;
return Ok(read - over);
}
}
Ok(read)
self.reader.read(buf)
}
}
//impl<'a> MinimumFileObject for ZipFileWrapper<'a> {}
impl ZipVersionBackend {
fn find_wayfinder(
&mut self,
filename: &str,
) -> anyhow::Result<(ZipArchiveEntryWayfinder, CompressionMethod)> {
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
let mut entries = self.archive.entries(read_buffer);
let entry = loop {
if let Some(v) = entries.next_entry()? {
if v.file_path().try_normalize()?.as_ref() == filename {
break Ok(v);
}
} else {
break Err(anyhow!("failed to fetch zip file header."));
}
}?;
let wayfinder = entry.wayfinder();
Ok((wayfinder, entry.compression_method()))
impl Drop for ZipFileWrapper {
fn drop(&mut self) {
self.command.wait().expect("failed to wait for 7z exit");
}
}
impl VersionBackend for ZipVersionBackend {
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>> {
let mut list_command = Command::new("7z");
list_command.args(vec!["l", "-ba", &self.path]);
let result = list_command.output()?;
if !result.status.success() {
return Err(anyhow!(
"failed to list files: code {:?}",
result.status.code()
));
}
let raw_result = String::from_utf8(result.stdout)?;
let files = raw_result
.split("\n")
.filter(|v| v.len() > 0)
.map(|v| v.split(" ").filter(|v| v.len() > 0));
let mut results = Vec::new();
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
let mut budget_iterator = self.archive.entries(read_buffer);
while let Some(entry) = budget_iterator.next_entry()? {
if entry.is_dir() {
for file in files {
let values = file.collect::<Vec<&str>>();
let mut iter = values.iter();
let (date, time, attrs, size, compress, name) = (
iter.next().expect("failed to read date"),
iter.next().expect("failed to read time"),
iter.next().expect("failed to read attrs"),
iter.next().expect("failed to read size"),
iter.next().expect("failed to read compress"),
iter.collect::<Vec<&&str>>(),
);
if attrs.starts_with("D") {
continue;
}
results.push(VersionFile {
relative_filename: String::from(entry.file_path().try_normalize()?),
permission: entry.mode().permissions(),
size: entry.uncompressed_size_hint(),
relative_filename: name.into_iter().map(|v| *v).fold(String::new(), |a, b| a + b + " ").trim_end().to_owned(),
permission: 0o744, // owner r/w/x, everyone else, read
size: size.parse().unwrap(),
});
}
Ok(results)
}
@ -227,23 +204,26 @@ impl VersionBackend for ZipVersionBackend {
start: u64,
end: u64,
) -> anyhow::Result<Box<dyn MinimumFileObject + '_>> {
let (wayfinder, compression_method) = self.find_wayfinder(&file.relative_filename)?;
let local_entry = self
.archive
.get_entry(wayfinder)?;
let wrapper = self.new_entry(local_entry, compression_method, start, end)?;
Ok(Box::new(wrapper) as Box<dyn MinimumFileObject>)
let mut read_command = Command::new("7z");
read_command.args(vec!["e", "-so", &self.path, &file.relative_filename]);
let output = read_command
.stdout(Stdio::piped())
.spawn()
.expect("failed to spawn 7z");
Ok(Box::new(ZipFileWrapper::new(output)))
}
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile> {
let (entry, _) = self.find_wayfinder(&sub_path)?;
let files = self.list_files()?;
let file = files
.iter()
.find(|v| v.relative_filename == sub_path)
.expect("file not found");
Ok(VersionFile {
relative_filename: sub_path,
permission: 0,
size: entry.uncompressed_size_hint(),
})
Ok(file.clone())
}
fn require_whole_files(&self) -> bool {
true
}
}

View File

@ -41,6 +41,7 @@ impl<'a> AsyncRead for ReadToAsyncRead<'a> {
}
pub trait VersionBackend: DynClone {
fn require_whole_files(&self) -> bool;
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>>;
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile>;
fn reader(

View File

@ -1,11 +1,16 @@
use std::{collections::HashMap, fs::File, path::Path};
use std::{
collections::HashMap,
fs::File,
path::Path,
process::{Command, ExitStatus},
};
use anyhow::anyhow;
use napi::{bindgen_prelude::*, sys::napi_value__, tokio_stream::StreamExt};
use tokio_util::codec::{BytesCodec, FramedRead};
use crate::version::{
backends::{PathVersionBackend, ZipVersionBackend},
backends::{PathVersionBackend, ZipVersionBackend, SEVEN_ZIP_INSTALLED},
types::{ReadToAsyncRead, VersionBackend, VersionFile},
};
@ -27,9 +32,14 @@ pub fn create_backend_constructor<'a>(
}));
};
if path.to_string_lossy().ends_with(".zip") {
let f = File::open(path.to_path_buf()).ok()?;
return Some(Box::new(|| Ok(Box::new(ZipVersionBackend::new(f)?))));
if *SEVEN_ZIP_INSTALLED {
let mut test = Command::new("7z");
test.args(vec!["t", path.to_str().expect("invalid utf path")]);
let status = test.status().ok()?;
if status.code().unwrap_or(1) == 0 {
let buf = path.to_path_buf();
return Some(Box::new(move || Ok(Box::new(ZipVersionBackend::new(buf)?))));
}
}
None
@ -99,7 +109,7 @@ impl<'a> DropletHandler<'a> {
Ok(file.size)
}
#[napi]
#[napi(ts_return_type = "ReadableStream")]
pub fn read_file(
&mut self,
reference: Reference<DropletHandler<'static>>,
@ -108,7 +118,7 @@ impl<'a> DropletHandler<'a> {
env: Env,
start: Option<BigInt>,
end: Option<BigInt>,
) -> anyhow::Result<JsDropStreamable> {
) -> anyhow::Result<*mut napi_value__> {
let stream = reference.share_with(env, |handler| {
let backend = handler
.create_backend_for_path(path)
@ -137,25 +147,9 @@ impl<'a> DropletHandler<'a> {
// Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error)
.map_err(napi::Error::from) // napi::Error implements From<tokio::io::Error>
});
// Create the napi-rs ReadableStream from the tokio_stream::Stream
// The unwrap() here means if stream creation fails, it will panic.
// For a production system, consider returning Result<Option<...>> and handling this.
ReadableStream::create_with_stream_bytes(&env, stream)
})?;
Ok(JsDropStreamable { inner: stream })
}
}
#[napi]
pub struct JsDropStreamable {
inner: SharedReference<DropletHandler<'static>, ReadableStream<'static, BufferSlice<'static>>>,
}
#[napi]
impl JsDropStreamable {
#[napi]
pub fn get_stream(&self) -> *mut napi_value__ {
self.inner.raw()
Ok(stream.raw())
}
}