9 Commits

Author SHA1 Message Date
ba35ca9a14 feat: start of scripting engine 2025-08-24 13:50:44 +10:00
ae4648845e feat: add support for partially deflated zips 2025-08-17 11:21:09 +10:00
bd30464a08 fix: manifest generation with multiple chunks 2025-08-15 21:56:33 +10:00
c67cca4ee0 fix: remove debug println 2025-08-15 21:41:48 +10:00
cae208a3e0 fix: zip read sizing 2025-08-15 21:30:25 +10:00
4276b9d668 fix: skip zip test 2025-08-15 19:47:50 +10:00
4fb9bb7563 fix: manifest sizing for slow backends 2025-08-15 16:49:18 +10:00
913dc2f58d feat: add zip speed test 2025-08-15 12:17:10 +10:00
7ec5e9f215 fix: zip file reader offset 2025-08-13 16:22:48 +10:00
16 changed files with 1410 additions and 106 deletions

923
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -16,7 +16,6 @@ napi = { version = "3.0.0-beta.11", default-features = false, features = [
] }
napi-derive = "3.0.0-beta.11"
hex = "0.4.3"
serde_json = "1.0.128"
md5 = "0.7.0"
time-macros = "0.2.22"
time = "0.3.41"
@ -27,6 +26,10 @@ tokio-util = { version = "0.7.15", features = ["codec"] }
rawzip = "0.3.0"
dyn-clone = "1.0.20"
flate2 = "1.1.2"
rhai = "1.22.2"
mlua = { version = "0.11.2", features = ["luajit"] }
boa_engine = "0.20.0"
serde_json = "1.0.143"
[package.metadata.patch]
crates = ["rawzip"]

22
__test__/debug.spec.mjs Normal file
View File

@ -0,0 +1,22 @@
import test from "ava";
import { DropletHandler, generateManifest } from "../index.js";
test.skip("debug", async (t) => {
const handler = new DropletHandler();
console.log("created handler");
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
handler,
"./assets/TheGame.zip",
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
)
);
return t.pass();
});

62
__test__/script.spec.mjs Normal file
View File

@ -0,0 +1,62 @@
import test from "ava";
import { ScriptEngine } from "../index.js";
test("lua syntax fail", (t) => {
const scriptEngine = new ScriptEngine();
const luaIshCode = `
print("hello world);
`;
try {
const script = scriptEngine.buildLuaScript(luaIshCode);
} catch {
return t.pass();
}
t.fail();
});
test("js syntax fail", (t) => {
const scriptEngine = new ScriptEngine();
const jsIshCode = `
const v = "hello world;
`;
try {
const script = scriptEngine.buildJsScript(jsIshCode);
} catch {
return t.pass();
}
t.fail();
});
test("js", (t) => {
const scriptEngine = new ScriptEngine();
const jsModule = `
const v = "1" + "2";
["1", "2", "3", v]
`;
const script = scriptEngine.buildJsScript(jsModule);
scriptEngine.fetchStrings(script);
t.pass();
});
test("lua", (t) => {
const scriptEngine = new ScriptEngine();
const luaModule = `
local arr = {"1", "2"};
return arr;
`;
const script = scriptEngine.buildLuaScript(luaModule);
scriptEngine.fetchStrings(script);
t.pass();
});

View File

@ -1,6 +1,8 @@
import test from "ava";
import fs from "node:fs";
import path from "path";
import { createHash } from "node:crypto";
import prettyBytes from "pretty-bytes";
import droplet, { DropletHandler, generateManifest } from "../index.js";
@ -56,7 +58,12 @@ test("read file", async (t) => {
const dropletHandler = new DropletHandler();
const stream = dropletHandler.readFile(dirName, "TESTFILE");
const stream = dropletHandler.readFile(
dirName,
"TESTFILE",
BigInt(0),
BigInt(testString.length)
);
let finalString = "";
@ -78,7 +85,12 @@ test("read file offset", async (t) => {
fs.writeFileSync(dirName + "/TESTFILE", testString);
const dropletHandler = new DropletHandler();
const stream = dropletHandler.readFile(dirName, "TESTFILE", BigInt(1), BigInt(4));
const stream = dropletHandler.readFile(
dirName,
"TESTFILE",
BigInt(1),
BigInt(4)
);
let finalString = "";
@ -96,10 +108,45 @@ test("read file offset", async (t) => {
fs.rmSync(dirName, { recursive: true });
});
test("zip file reader", async (t) => {
return t.pass();
test.skip("zip speed test", async (t) => {
t.timeout(100_000_000);
const dropletHandler = new DropletHandler();
t.timeout(10_000);
const stream = dropletHandler.readFile("./assets/TheGame.zip", "setup.exe");
let totalRead = 0;
let totalSeconds = 0;
let lastTime = process.hrtime.bigint();
const timeThreshold = BigInt(1_000_000_000);
let runningTotal = 0;
let runningTime = BigInt(0);
for await (const chunk of stream.getStream()) {
// Do something with each 'chunk'
const currentTime = process.hrtime.bigint();
const timeDiff = currentTime - lastTime;
lastTime = currentTime;
runningTime += timeDiff;
runningTotal += chunk.length;
if (runningTime >= timeThreshold) {
console.log(`${prettyBytes(runningTotal)}/s`);
totalRead += runningTotal;
totalSeconds += 1;
runningTime = BigInt(0);
runningTotal = 0;
}
}
const roughAverage = totalRead / totalSeconds;
console.log(`total rough average: ${prettyBytes(roughAverage)}/s`);
t.pass();
});
test.skip("zip manifest test", async (t) => {
const dropletHandler = new DropletHandler();
const manifest = JSON.parse(
await new Promise((r, e) =>
@ -113,20 +160,61 @@ test("zip file reader", async (t) => {
)
);
const stream = dropletHandler.readFile(
for (const [filename, data] of Object.entries(manifest)) {
let start = 0;
for (const [chunkIndex, length] of data.lengths.entries()) {
const hash = createHash("md5");
const stream = (
await dropletHandler.readFile(
"./assets/TheGame.zip",
"setup.exe",
BigInt(10),
BigInt(20)
filename,
BigInt(start),
BigInt(start + length)
)
).getStream();
let streamLength = 0;
await stream.pipeTo(
new WritableStream({
write(chunk) {
streamLength += chunk.length;
hash.update(chunk);
},
})
);
if (streamLength != length)
return t.fail(
`stream length for chunk index ${chunkIndex} was not expected: real: ${streamLength} vs expected: ${length}`
);
let finalString = "";
for await (const chunk of stream.getStream()) {
// Do something with each 'chunk'
finalString = String.fromCharCode.apply(null, chunk);
if(finalString.length > 100) break;
const digest = hash.digest("hex");
if (data.checksums[chunkIndex] != digest)
return t.fail(
`checksums did not match for chunk index ${chunkIndex}: real: ${digest} vs expected: ${data.checksums[chunkIndex]}`
);
start += length;
}
}
t.pass();
});
test.skip("partially compress zip test", async (t) => {
const dropletHandler = new DropletHandler();
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
dropletHandler,
"./assets/my horror game.zip",
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
)
);
return t.pass();
});

View File

@ -1,3 +1,4 @@
yes "droplet is awesome" | dd of=./setup.exe bs=1024 count=1000000
# yes "droplet is awesome" | dd of=./setup.exe bs=1024 count=1000000
dd if=/dev/random of=./setup.exe bs=1024 count=1000000
zip TheGame.zip setup.exe
rm setup.exe

13
index.d.ts vendored
View File

@ -15,6 +15,19 @@ export declare class JsDropStreamable {
getStream(): any
}
export declare class Script {
}
export declare class ScriptEngine {
constructor()
buildRahiScript(content: string): Script
buildLuaScript(content: string): Script
buildJsScript(content: string): Script
execute(script: Script): void
fetchStrings(script: Script): Array<string>
}
export declare function callAltThreadFunc(tsfn: ((err: Error | null, ) => any)): void
export declare function generateClientCertificate(clientId: string, clientName: string, rootCa: string, rootCaPrivate: string): Array<string>

View File

@ -378,6 +378,8 @@ if (!nativeBinding) {
module.exports = nativeBinding
module.exports.DropletHandler = nativeBinding.DropletHandler
module.exports.JsDropStreamable = nativeBinding.JsDropStreamable
module.exports.Script = nativeBinding.Script
module.exports.ScriptEngine = nativeBinding.ScriptEngine
module.exports.callAltThreadFunc = nativeBinding.callAltThreadFunc
module.exports.generateClientCertificate = nativeBinding.generateClientCertificate
module.exports.generateManifest = nativeBinding.generateManifest

View File

@ -1,6 +1,6 @@
{
"name": "@drop-oss/droplet",
"version": "2.0.1",
"version": "2.3.1",
"main": "index.js",
"types": "index.d.ts",
"napi": {
@ -24,7 +24,9 @@
"devDependencies": {
"@napi-rs/cli": "3.0.0-alpha.91",
"@types/node": "^22.13.10",
"ava": "^6.2.0"
"ava": "^6.2.0",
"pretty-bytes": "^7.0.1",
"tsimp": "^2.0.12"
},
"ava": {
"timeout": "3m",
@ -51,8 +53,5 @@
"packageManager": "yarn@4.7.0",
"repository": {
"url": "git+https://github.com/Drop-OSS/droplet.git"
},
"dependencies": {
"tsimp": "^2.0.12"
}
}

View File

@ -4,6 +4,7 @@
pub mod manifest;
pub mod ssl;
pub mod version;
pub mod script;
#[macro_use]
extern crate napi_derive;

View File

@ -1,10 +1,4 @@
use std::{
collections::HashMap,
io::{BufRead, BufReader},
path::Path,
sync::Arc,
thread,
};
use std::{collections::HashMap, sync::Arc, thread};
use napi::{
threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
@ -42,10 +36,18 @@ pub fn generate_manifest<'a>(
log_sfn: ThreadsafeFunction<String>,
callback_sfn: ThreadsafeFunction<String>,
) -> Result<()> {
let backend: &mut Box<dyn VersionBackend + Send> =
droplet_handler.create_backend_for_path(dir).ok_or(napi::Error::from_reason("Could not create backend for path."))?;
let backend: &mut Box<dyn VersionBackend + Send> = droplet_handler
.create_backend_for_path(dir)
.ok_or(napi::Error::from_reason(
"Could not create backend for path.",
))?;
// This is unsafe (obviously)
// But it's allg as long the DropletHandler doesn't get
// dropped while we're generating the manifest.
let backend: &'static mut Box<dyn VersionBackend + Send> =
unsafe { std::mem::transmute(backend) };
thread::spawn(move || {
let files = backend.list_files();
@ -55,9 +57,10 @@ pub fn generate_manifest<'a>(
let total: i32 = files.len() as i32;
let mut i: i32 = 0;
let mut buf = [0u8; 1024 * 16];
for version_file in files {
let raw_reader = backend.reader(&version_file).unwrap();
let mut reader = BufReader::with_capacity(CHUNK_SIZE, raw_reader);
let mut reader = backend.reader(&version_file, 0, 0).unwrap();
let mut chunk_data = ChunkData {
permissions: version_file.permission,
@ -68,14 +71,28 @@ pub fn generate_manifest<'a>(
let mut chunk_index = 0;
loop {
let mut length = 0;
let mut buffer: Vec<u8> = Vec::new();
reader.fill_buf().unwrap().clone_into(&mut buffer);
let length = buffer.len();
let mut file_empty = false;
if length == 0 {
loop {
let read = reader.read(&mut buf).unwrap();
length += read;
// If we're out of data, add this chunk and then move onto the next file
if read == 0 {
file_empty = true;
break;
}
buffer.extend_from_slice(&buf[0..read]);
if length >= CHUNK_SIZE {
break;
}
}
let chunk_id = Uuid::new_v4();
let checksum = md5::compute(buffer).0;
let checksum_string = hex::encode(checksum);
@ -88,10 +105,14 @@ pub fn generate_manifest<'a>(
"Processed chunk {} for {}",
chunk_index, &version_file.relative_filename
);
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
reader.consume(length);
chunk_index += 1;
if file_empty {
break;
}
}
chunks.insert(version_file.relative_filename, chunk_data);

128
src/script/mod.rs Normal file
View File

@ -0,0 +1,128 @@
use boa_engine::{Context, JsValue, Source};
use mlua::{FromLuaMulti, Function, Lua};
use napi::Result;
use rhai::AST;
pub enum ScriptType {
Rhai,
Lua,
Javascript,
}
#[napi]
pub struct Script(ScriptInner);
pub enum ScriptInner {
Rhai { script: AST },
Lua { script: Function },
Javascript { script: boa_engine::Script },
}
#[napi]
pub struct ScriptEngine {
rhai_engine: rhai::Engine,
lua_engine: Lua,
js_engine: Context,
}
#[napi]
impl ScriptEngine {
#[napi(constructor)]
pub fn new() -> Self {
ScriptEngine {
rhai_engine: rhai::Engine::new(),
lua_engine: Lua::new(),
js_engine: Context::default(),
}
}
#[napi]
pub fn build_rahi_script(&self, content: String) -> Result<Script> {
let script = self
.rhai_engine
.compile(content.clone())
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(Script(ScriptInner::Rhai { script }))
}
#[napi]
pub fn build_lua_script(&self, content: String) -> Result<Script> {
let func = self
.lua_engine
.load(content.clone())
.into_function()
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(Script(ScriptInner::Lua { script: func }))
}
#[napi]
pub fn build_js_script(&mut self, content: String) -> Result<Script> {
let source = Source::from_bytes(content.as_bytes());
let script = boa_engine::Script::parse(source, None, &mut self.js_engine)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(Script(ScriptInner::Javascript { script }))
}
fn execute_rhai_script<T>(&self, ast: &AST) -> Result<T>
where
T: Clone + 'static,
{
let v = self
.rhai_engine
.eval_ast::<T>(ast)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(v)
}
fn execute_lua_script<T>(&self, function: &Function) -> Result<T>
where
T: FromLuaMulti,
{
let v = function
.call::<T>(())
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(v)
}
fn execute_js_script(&mut self, func: &boa_engine::Script) -> Result<JsValue> {
let v = func
.evaluate(&mut self.js_engine)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(v)
}
#[napi]
pub fn execute(&mut self, script: &mut Script) -> Result<()> {
match &script.0 {
ScriptInner::Rhai { script } => {
self.execute_rhai_script::<()>(script)?;
}
ScriptInner::Lua { script } => {
self.execute_lua_script::<()>(script)?;
}
ScriptInner::Javascript { script } => {
self.execute_js_script(script)?;
}
};
Ok(())
}
#[napi]
pub fn fetch_strings(&mut self, script: &mut Script) -> Result<Vec<String>> {
Ok(match &script.0 {
ScriptInner::Rhai { script } => self.execute_rhai_script(script)?,
ScriptInner::Lua { script } => self.execute_lua_script(script)?,
ScriptInner::Javascript { script } => {
let v = self.execute_js_script(script)?;
serde_json::from_value(
v.to_json(&mut self.js_engine)
.map_err(|e| napi::Error::from_reason(e.to_string()))?,
).map_err(|e| napi::Error::from_reason(e.to_string()))?
}
})
}
}

View File

@ -2,17 +2,18 @@
use std::os::unix::fs::PermissionsExt;
use std::{
fs::{self, metadata, File},
io::{self, Read, Sink},
io::{self, Read, Seek, SeekFrom, Sink},
path::{Path, PathBuf},
sync::Arc,
};
use flate2::read::DeflateDecoder;
use rawzip::{
FileReader, ZipArchive, ZipArchiveEntryWayfinder, ZipEntry, ZipReader, RECOMMENDED_BUFFER_SIZE,
CompressionMethod, FileReader, ZipArchive, ZipArchiveEntryWayfinder, ZipEntry,
ZipVerifier, RECOMMENDED_BUFFER_SIZE,
};
use crate::version::types::{MinimumFileObject, Skippable, VersionBackend, VersionFile};
use crate::version::types::{MinimumFileObject, VersionBackend, VersionFile};
pub fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) {
if metadata(path).unwrap().is_dir() {
@ -52,8 +53,21 @@ impl VersionBackend for PathVersionBackend {
results
}
fn reader(&mut self, file: &VersionFile) -> Option<Box<dyn MinimumFileObject + 'static>> {
let file = File::open(self.base_dir.join(file.relative_filename.clone())).ok()?;
fn reader(
&mut self,
file: &VersionFile,
start: u64,
end: u64,
) -> Option<Box<dyn MinimumFileObject + 'static>> {
let mut file = File::open(self.base_dir.join(file.relative_filename.clone())).ok()?;
if start != 0 {
file.seek(SeekFrom::Start(start)).ok()?;
}
if end != 0 {
return Some(Box::new(file.take(end - start)));
}
return Some(Box::new(file));
}
@ -103,31 +117,72 @@ impl ZipVersionBackend {
pub fn new_entry<'archive>(
&self,
entry: ZipEntry<'archive, FileReader>,
compression_method: CompressionMethod,
start: u64,
end: u64,
) -> ZipFileWrapper<'archive> {
let deflater = DeflateDecoder::new(entry.reader());
ZipFileWrapper { reader: deflater }
let deflater: Box<dyn Read + Send + 'archive> = match compression_method {
CompressionMethod::Store => Box::new(entry.reader()),
CompressionMethod::Deflate => Box::new(DeflateDecoder::new(entry.reader())),
CompressionMethod::Deflate64 => Box::new(DeflateDecoder::new(entry.reader())),
_ => panic!(
"unsupported decompression algorithm: {:?}",
compression_method
),
};
let mut verifier = entry.verifying_reader(deflater);
if start != 0 {
io::copy(&mut (&mut verifier).take(start), &mut Sink::default()).unwrap();
}
ZipFileWrapper {
reader: verifier,
limit: (end - start) as usize,
current: 0,
}
}
}
pub struct ZipFileWrapper<'archive> {
reader: DeflateDecoder<ZipReader<'archive, FileReader>>,
reader: ZipVerifier<'archive, Box<dyn Read + Send + 'archive>, FileReader>,
limit: usize,
current: usize,
}
/**
* This read implemention is a result of debugging hell
* It should probably be replaced with a .take() call.
*/
impl<'a> Read for ZipFileWrapper<'a> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let has_limit = self.limit != 0;
// End this stream if the read is the right size
if has_limit {
if self.current >= self.limit {
return Ok(0);
}
}
let read = self.reader.read(buf)?;
Ok(read)
if self.limit != 0 {
self.current += read;
if self.current > self.limit {
let over = self.current - self.limit;
return Ok(read - over);
}
}
return Ok(read);
}
}
impl<'a> Skippable for ZipFileWrapper<'a> {
fn skip(&mut self, amount: u64) {
io::copy(&mut self.reader.by_ref().take(amount), &mut Sink::default()).unwrap();
}
}
impl<'a> MinimumFileObject for ZipFileWrapper<'a> {}
//impl<'a> MinimumFileObject for ZipFileWrapper<'a> {}
impl ZipVersionBackend {
fn find_wayfinder(&mut self, filename: &str) -> Option<ZipArchiveEntryWayfinder> {
fn find_wayfinder(
&mut self,
filename: &str,
) -> Option<(ZipArchiveEntryWayfinder, CompressionMethod)> {
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
let mut entries = self.archive.entries(read_buffer);
let entry = loop {
@ -142,7 +197,7 @@ impl ZipVersionBackend {
let wayfinder = entry.wayfinder();
Some(wayfinder)
Some((wayfinder, entry.compression_method()))
}
}
impl VersionBackend for ZipVersionBackend {
@ -163,17 +218,22 @@ impl VersionBackend for ZipVersionBackend {
results
}
fn reader(&mut self, file: &VersionFile) -> Option<Box<dyn MinimumFileObject + '_>> {
let wayfinder = self.find_wayfinder(&file.relative_filename)?;
fn reader(
&mut self,
file: &VersionFile,
start: u64,
end: u64,
) -> Option<Box<dyn MinimumFileObject + '_>> {
let (wayfinder, compression_method) = self.find_wayfinder(&file.relative_filename)?;
let local_entry = self.archive.get_entry(wayfinder).unwrap();
let wrapper = self.new_entry(local_entry);
let wrapper = self.new_entry(local_entry, compression_method, start, end);
Some(Box::new(wrapper))
Some(Box::new(wrapper) as Box<dyn MinimumFileObject>)
}
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile> {
let entry = self.find_wayfinder(&sub_path)?;
let (entry, _) = self.find_wayfinder(&sub_path)?;
Some(VersionFile {
relative_filename: sub_path,

View File

@ -1,6 +1,5 @@
use std::{
fmt::Debug,
io::{Read, Seek, SeekFrom},
fmt::Debug, io::Read
};
use dyn_clone::DynClone;
@ -13,36 +12,26 @@ pub struct VersionFile {
pub size: u64,
}
pub trait Skippable {
fn skip(&mut self, amount: u64);
}
impl<T> Skippable for T
where
T: Seek,
{
fn skip(&mut self, amount: u64) {
self.seek(SeekFrom::Start(amount)).unwrap();
}
}
pub trait MinimumFileObject: Read + Send + Skippable {}
impl<T: Read + Send + Seek> MinimumFileObject for T {}
pub trait MinimumFileObject: Read + Send {}
impl<T: Read + Send> MinimumFileObject for T {}
// Intentionally not a generic, because of types in read_file
pub struct ReadToAsyncRead<'a> {
pub inner: Box<dyn Read + Send + 'a>,
}
const ASYNC_READ_BUFFER_SIZE: usize = 8128;
impl<'a> AsyncRead for ReadToAsyncRead<'a> {
fn poll_read(
mut self: std::pin::Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
buf: &mut tokio::io::ReadBuf<'_>,
) -> std::task::Poll<io::Result<()>> {
let mut read_buf = [0u8; 8192];
let var_name = self.inner.read(&mut read_buf).unwrap();
let amount = var_name.min(buf.remaining());
buf.put_slice(&read_buf[0..amount]);
let mut read_buf = [0u8; ASYNC_READ_BUFFER_SIZE];
let read_size = ASYNC_READ_BUFFER_SIZE.min(buf.remaining());
let read = self.inner.read(&mut read_buf[0..read_size]).unwrap();
buf.put_slice(&read_buf[0..read]);
std::task::Poll::Ready(Ok(()))
}
}
@ -50,7 +39,7 @@ impl<'a> AsyncRead for ReadToAsyncRead<'a> {
pub trait VersionBackend: DynClone {
fn list_files(&mut self) -> Vec<VersionFile>;
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile>;
fn reader(&mut self, file: &VersionFile) -> Option<Box<dyn MinimumFileObject + '_>>;
fn reader(&mut self, file: &VersionFile, start: u64, end: u64) -> Option<Box<dyn MinimumFileObject + '_>>;
}
dyn_clone::clone_trait_object!(VersionBackend);

View File

@ -1,4 +1,6 @@
use std::{collections::HashMap, fs::File, io::Read, path::Path};
use std::{
collections::HashMap, fs::File, path::Path
};
use napi::{bindgen_prelude::*, sys::napi_value__, tokio_stream::StreamExt};
use tokio_util::codec::{BytesCodec, FramedRead};
@ -115,20 +117,16 @@ impl<'a> DropletHandler<'a> {
size: 0, // Shouldn't matter
};
// Use `?` operator for cleaner error propagation from `Option`
let mut reader = backend.reader(&version_file).ok_or(napi::Error::from_reason("Failed to create reader."))?;
let reader = backend
.reader(
&version_file,
start.map(|e| e.get_u64().1).unwrap_or(0),
end.map(|e| e.get_u64().1).unwrap_or(0),
)
.ok_or(napi::Error::from_reason("Failed to create reader."))?;
if let Some(skip) = start.clone() {
reader.skip(skip.get_u64().1.into());
// io::copy(&mut reader.by_ref().take(skip.into()), &mut io::sink()).unwrap();
}
let async_reader = if let Some(limit) = end {
let amount = limit.get_u64().1 - start.map_or(Some(0), |v| Some(v.get_u64().1)).unwrap();
ReadToAsyncRead {
inner: Box::new(reader.take(amount.into())),
}
} else {
ReadToAsyncRead { inner: reader }
let async_reader = ReadToAsyncRead {
inner: reader,
};
// Create a FramedRead stream with BytesCodec for chunking
@ -147,9 +145,7 @@ impl<'a> DropletHandler<'a> {
Ok(ReadableStream::create_with_stream_bytes(&env, stream).unwrap())
})?;
Ok(JsDropStreamable {
inner: stream,
})
Ok(JsDropStreamable { inner: stream })
}
}

View File

@ -12,6 +12,7 @@ __metadata:
"@napi-rs/cli": "npm:3.0.0-alpha.91"
"@types/node": "npm:^22.13.10"
ava: "npm:^6.2.0"
pretty-bytes: "npm:^7.0.1"
tsimp: "npm:^2.0.12"
languageName: unknown
linkType: soft
@ -2432,6 +2433,13 @@ __metadata:
languageName: node
linkType: hard
"pretty-bytes@npm:^7.0.1":
version: 7.0.1
resolution: "pretty-bytes@npm:7.0.1"
checksum: 10c0/14ffb503d2de3588042c722848062a4897e6faece1694e0c83ba5669ec003d73311d946d50d2b3c6099a6a306760011b8446ee3cf9cf86eca13a454a8f1c47cb
languageName: node
linkType: hard
"pretty-ms@npm:^9.1.0":
version: 9.2.0
resolution: "pretty-ms@npm:9.2.0"