5 Commits

Author SHA1 Message Date
efab43720f fix: 7z and streams 2025-11-20 13:39:05 +11:00
894f2b354a fix: 7z 2025-11-20 11:38:11 +11:00
416cada9f4 fix: unix permissions properly fixed with 7z 2025-10-28 19:31:59 +11:00
97312585db fix: fix to unix permissions with 7z 2025-10-28 19:29:25 +11:00
538aa3bb57 fix: update license 2025-10-14 12:11:24 +11:00
7 changed files with 84 additions and 91 deletions

View File

@ -67,7 +67,7 @@ test("read file", async (t) => {
let finalString = "";
for await (const chunk of stream.getStream()) {
for await (const chunk of stream) {
// Do something with each 'chunk'
finalString += String.fromCharCode.apply(null, chunk);
}
@ -94,7 +94,7 @@ test("read file offset", async (t) => {
let finalString = "";
for await (const chunk of stream.getStream()) {
for await (const chunk of stream) {
// Do something with each 'chunk'
finalString += String.fromCharCode.apply(null, chunk);
}
@ -121,7 +121,7 @@ test.skip("zip speed test", async (t) => {
const timeThreshold = BigInt(1_000_000_000);
let runningTotal = 0;
let runningTime = BigInt(0);
for await (const chunk of stream.getStream()) {
for await (const chunk of stream) {
// Do something with each 'chunk'
const currentTime = process.hrtime.bigint();
const timeDiff = currentTime - lastTime;
@ -146,55 +146,61 @@ test.skip("zip speed test", async (t) => {
t.pass();
});
test.skip("zip manifest test", async (t) => {
test("zip manifest test", async (t) => {
const zipFiles = fs.readdirSync("./assets").filter((v) => v.endsWith(".zip"));
const dropletHandler = new DropletHandler();
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
dropletHandler,
"./assets/TheGame.zip",
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
)
);
for (const [filename, data] of Object.entries(manifest)) {
let start = 0;
for (const [chunkIndex, length] of data.lengths.entries()) {
const hash = createHash("md5");
const stream = (
await dropletHandler.readFile(
"./assets/TheGame.zip",
filename,
BigInt(start),
BigInt(start + length)
for (const zipFile of zipFiles) {
console.log("generating manifest for " + zipFile);
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
dropletHandler,
"./assets/" + zipFile,
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
).getStream();
)
);
let streamLength = 0;
await stream.pipeTo(
new WritableStream({
write(chunk) {
streamLength += chunk.length;
hash.update(chunk);
},
})
);
for (const [filename, data] of Object.entries(manifest)) {
let start = 0;
for (const [chunkIndex, length] of data.lengths.entries()) {
const hash = createHash("md5");
const stream = (
await dropletHandler.readFile(
"./assets/" + zipFile,
filename,
BigInt(start),
BigInt(start + length)
)
);
console.log(stream);
if (streamLength != length)
return t.fail(
`stream length for chunk index ${chunkIndex} was not expected: real: ${streamLength} vs expected: ${length}`
let streamLength = 0;
await stream.pipeTo(
new WritableStream({
write(chunk) {
streamLength += chunk.length;
hash.update(chunk);
},
})
);
const digest = hash.digest("hex");
if (data.checksums[chunkIndex] != digest)
return t.fail(
`checksums did not match for chunk index ${chunkIndex}: real: ${digest} vs expected: ${data.checksums[chunkIndex]}`
);
if (streamLength != length)
return t.fail(
`stream length for chunk index ${chunkIndex} was not expected: real: ${streamLength} vs expected: ${length}`
);
start += length;
const digest = hash.digest("hex");
if (data.checksums[chunkIndex] != digest)
return t.fail(
`checksums did not match for chunk index ${chunkIndex}: real: ${digest} vs expected: ${data.checksums[chunkIndex]}`
);
start += length;
}
}
}

View File

@ -1,4 +1,4 @@
# yes "droplet is awesome" | dd of=./setup.exe bs=1024 count=1000000
dd if=/dev/random of=./setup.exe bs=1024 count=1000000
zip TheGame.zip setup.exe
zip TheGame.zip setup.exe "test file.txt"
rm setup.exe

6
index.d.ts vendored
View File

@ -8,11 +8,7 @@ export declare class DropletHandler {
hasBackendForPath(path: string): boolean
listFiles(path: string): Array<string>
peekFile(path: string, subPath: string): bigint
readFile(path: string, subPath: string, start?: bigint | undefined | null, end?: bigint | undefined | null): JsDropStreamable
}
export declare class JsDropStreamable {
getStream(): any
readFile(path: string, subPath: string, start?: bigint | undefined | null, end?: bigint | undefined | null): ReadableStream
}
export declare class Script {

View File

@ -377,7 +377,6 @@ if (!nativeBinding) {
module.exports = nativeBinding
module.exports.DropletHandler = nativeBinding.DropletHandler
module.exports.JsDropStreamable = nativeBinding.JsDropStreamable
module.exports.Script = nativeBinding.Script
module.exports.ScriptEngine = nativeBinding.ScriptEngine
module.exports.callAltThreadFunc = nativeBinding.callAltThreadFunc

View File

@ -1,6 +1,6 @@
{
"name": "@drop-oss/droplet",
"version": "3.2.0",
"version": "3.4.0",
"main": "index.js",
"types": "index.d.ts",
"napi": {
@ -20,7 +20,7 @@
]
}
},
"license": "MIT",
"license": "AGPL-3.0-only",
"devDependencies": {
"@napi-rs/cli": "3.0.0-alpha.91",
"@types/node": "^22.13.10",

View File

@ -126,12 +126,15 @@ impl ZipVersionBackend {
pub struct ZipFileWrapper {
command: Child,
reader: BufReader<ChildStdout>
reader: BufReader<ChildStdout>,
}
impl ZipFileWrapper {
pub fn new(mut command: Child) -> Self {
let stdout = command.stdout.take().expect("failed to access stdout of 7z");
let stdout = command
.stdout
.take()
.expect("failed to access stdout of 7z");
let reader = BufReader::new(stdout);
ZipFileWrapper { command, reader }
}
@ -148,9 +151,9 @@ impl Read for ZipFileWrapper {
}
impl Drop for ZipFileWrapper {
fn drop(&mut self) {
self.command.wait().expect("failed to wait for 7z exit");
}
fn drop(&mut self) {
self.command.wait().expect("failed to wait for 7z exit");
}
}
impl VersionBackend for ZipVersionBackend {
@ -165,25 +168,29 @@ impl VersionBackend for ZipVersionBackend {
));
}
let raw_result = String::from_utf8(result.stdout)?;
let files = raw_result.split("\n").filter(|v| v.len() > 0).map(|v| v.split(" ").filter(|v| v.len() > 0));
let files = raw_result
.split("\n")
.filter(|v| v.len() > 0)
.map(|v| v.split(" ").filter(|v| v.len() > 0));
let mut results = Vec::new();
for file in files {
let mut values = file.collect::<Vec<&str>>();
values.reverse();
let values = file.collect::<Vec<&str>>();
let mut iter = values.iter();
let (name, compress, size, attrs) = (
iter.next().expect("failed to fetch name"),
iter.next().expect("failed to read compressed size"),
iter.next().expect("failed to read file size"),
iter.next().expect("failed to fetch attrs")
let (date, time, attrs, size, compress, name) = (
iter.next().expect("failed to read date"),
iter.next().expect("failed to read time"),
iter.next().expect("failed to read attrs"),
iter.next().expect("failed to read size"),
iter.next().expect("failed to read compress"),
iter.collect::<Vec<&&str>>(),
);
if attrs.starts_with("D") {
continue;
}
results.push(VersionFile {
relative_filename: name.to_owned().to_owned(),
permission: 0,
relative_filename: name.into_iter().map(|v| *v).fold(String::new(), |a, b| a + b + " ").trim_end().to_owned(),
permission: 0o744, // owner r/w/x, everyone else, read
size: size.parse().unwrap(),
});
}
@ -199,7 +206,10 @@ impl VersionBackend for ZipVersionBackend {
) -> anyhow::Result<Box<dyn MinimumFileObject + '_>> {
let mut read_command = Command::new("7z");
read_command.args(vec!["e", "-so", &self.path, &file.relative_filename]);
let output = read_command.stdout(Stdio::piped()).spawn().expect("failed to spawn 7z");
let output = read_command
.stdout(Stdio::piped())
.spawn()
.expect("failed to spawn 7z");
Ok(Box::new(ZipFileWrapper::new(output)))
}

View File

@ -38,9 +38,7 @@ pub fn create_backend_constructor<'a>(
let status = test.status().ok()?;
if status.code().unwrap_or(1) == 0 {
let buf = path.to_path_buf();
return Some(Box::new(move || {
Ok(Box::new(ZipVersionBackend::new(buf)?))
}));
return Some(Box::new(move || Ok(Box::new(ZipVersionBackend::new(buf)?))));
}
}
@ -111,7 +109,7 @@ impl<'a> DropletHandler<'a> {
Ok(file.size)
}
#[napi]
#[napi(ts_return_type = "ReadableStream")]
pub fn read_file(
&mut self,
reference: Reference<DropletHandler<'static>>,
@ -120,7 +118,7 @@ impl<'a> DropletHandler<'a> {
env: Env,
start: Option<BigInt>,
end: Option<BigInt>,
) -> anyhow::Result<JsDropStreamable> {
) -> anyhow::Result<*mut napi_value__> {
let stream = reference.share_with(env, |handler| {
let backend = handler
.create_backend_for_path(path)
@ -149,25 +147,9 @@ impl<'a> DropletHandler<'a> {
// Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error)
.map_err(napi::Error::from) // napi::Error implements From<tokio::io::Error>
});
// Create the napi-rs ReadableStream from the tokio_stream::Stream
// The unwrap() here means if stream creation fails, it will panic.
// For a production system, consider returning Result<Option<...>> and handling this.
ReadableStream::create_with_stream_bytes(&env, stream)
})?;
Ok(JsDropStreamable { inner: stream })
}
}
#[napi]
pub struct JsDropStreamable {
inner: SharedReference<DropletHandler<'static>, ReadableStream<'static, BufferSlice<'static>>>,
}
#[napi]
impl JsDropStreamable {
#[napi]
pub fn get_stream(&self) -> *mut napi_value__ {
self.inner.raw()
Ok(stream.raw())
}
}