diff --git a/__test__/utils.spec.mjs b/__test__/utils.spec.mjs index 30f551d..8827970 100644 --- a/__test__/utils.spec.mjs +++ b/__test__/utils.spec.mjs @@ -67,7 +67,7 @@ test("read file", async (t) => { let finalString = ""; - for await (const chunk of stream.getStream()) { + for await (const chunk of stream) { // Do something with each 'chunk' finalString += String.fromCharCode.apply(null, chunk); } @@ -94,7 +94,7 @@ test("read file offset", async (t) => { let finalString = ""; - for await (const chunk of stream.getStream()) { + for await (const chunk of stream) { // Do something with each 'chunk' finalString += String.fromCharCode.apply(null, chunk); } @@ -121,7 +121,7 @@ test.skip("zip speed test", async (t) => { const timeThreshold = BigInt(1_000_000_000); let runningTotal = 0; let runningTime = BigInt(0); - for await (const chunk of stream.getStream()) { + for await (const chunk of stream) { // Do something with each 'chunk' const currentTime = process.hrtime.bigint(); const timeDiff = currentTime - lastTime; @@ -147,55 +147,60 @@ test.skip("zip speed test", async (t) => { }); test("zip manifest test", async (t) => { + const zipFiles = fs.readdirSync("./assets").filter((v) => v.endsWith(".zip")); const dropletHandler = new DropletHandler(); - const manifest = JSON.parse( - await new Promise((r, e) => - generateManifest( - dropletHandler, - "./assets/TheGame.zip", - (_, __) => {}, - (_, __) => {}, - (err, manifest) => (err ? e(err) : r(manifest)) - ) - ) - ); - for (const [filename, data] of Object.entries(manifest)) { - console.log(filename); - let start = 0; - for (const [chunkIndex, length] of data.lengths.entries()) { - const hash = createHash("md5"); - const stream = ( - await dropletHandler.readFile( - "./assets/TheGame.zip", - filename, - BigInt(start), - BigInt(start + length) + for (const zipFile of zipFiles) { + console.log("generating manifest for " + zipFile); + const manifest = JSON.parse( + await new Promise((r, e) => + generateManifest( + dropletHandler, + "./assets/" + zipFile, + (_, __) => {}, + (_, __) => {}, + (err, manifest) => (err ? e(err) : r(manifest)) ) - ).getStream(); + ) + ); - let streamLength = 0; - await stream.pipeTo( - new WritableStream({ - write(chunk) { - streamLength += chunk.length; - hash.update(chunk); - }, - }) - ); + for (const [filename, data] of Object.entries(manifest)) { + let start = 0; + for (const [chunkIndex, length] of data.lengths.entries()) { + const hash = createHash("md5"); + const stream = ( + await dropletHandler.readFile( + "./assets/" + zipFile, + filename, + BigInt(start), + BigInt(start + length) + ) + ); + console.log(stream); - if (streamLength != length) - return t.fail( - `stream length for chunk index ${chunkIndex} was not expected: real: ${streamLength} vs expected: ${length}` + let streamLength = 0; + await stream.pipeTo( + new WritableStream({ + write(chunk) { + streamLength += chunk.length; + hash.update(chunk); + }, + }) ); - const digest = hash.digest("hex"); - if (data.checksums[chunkIndex] != digest) - return t.fail( - `checksums did not match for chunk index ${chunkIndex}: real: ${digest} vs expected: ${data.checksums[chunkIndex]}` - ); + if (streamLength != length) + return t.fail( + `stream length for chunk index ${chunkIndex} was not expected: real: ${streamLength} vs expected: ${length}` + ); - start += length; + const digest = hash.digest("hex"); + if (data.checksums[chunkIndex] != digest) + return t.fail( + `checksums did not match for chunk index ${chunkIndex}: real: ${digest} vs expected: ${data.checksums[chunkIndex]}` + ); + + start += length; + } } } diff --git a/index.d.ts b/index.d.ts index eced819..8265e98 100644 --- a/index.d.ts +++ b/index.d.ts @@ -8,11 +8,7 @@ export declare class DropletHandler { hasBackendForPath(path: string): boolean listFiles(path: string): Array peekFile(path: string, subPath: string): bigint - readFile(path: string, subPath: string, start?: bigint | undefined | null, end?: bigint | undefined | null): JsDropStreamable -} - -export declare class JsDropStreamable { - getStream(): any + readFile(path: string, subPath: string, start?: bigint | undefined | null, end?: bigint | undefined | null): ReadableStream } export declare class Script { diff --git a/index.js b/index.js index 957f892..62fa75d 100644 --- a/index.js +++ b/index.js @@ -377,7 +377,6 @@ if (!nativeBinding) { module.exports = nativeBinding module.exports.DropletHandler = nativeBinding.DropletHandler -module.exports.JsDropStreamable = nativeBinding.JsDropStreamable module.exports.Script = nativeBinding.Script module.exports.ScriptEngine = nativeBinding.ScriptEngine module.exports.callAltThreadFunc = nativeBinding.callAltThreadFunc diff --git a/package.json b/package.json index bda848c..652f4cc 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@drop-oss/droplet", - "version": "3.3.0", + "version": "3.4.0", "main": "index.js", "types": "index.d.ts", "napi": { diff --git a/src/version/utils.rs b/src/version/utils.rs index b375c34..7288465 100644 --- a/src/version/utils.rs +++ b/src/version/utils.rs @@ -38,9 +38,7 @@ pub fn create_backend_constructor<'a>( let status = test.status().ok()?; if status.code().unwrap_or(1) == 0 { let buf = path.to_path_buf(); - return Some(Box::new(move || { - Ok(Box::new(ZipVersionBackend::new(buf)?)) - })); + return Some(Box::new(move || Ok(Box::new(ZipVersionBackend::new(buf)?)))); } } @@ -111,7 +109,7 @@ impl<'a> DropletHandler<'a> { Ok(file.size) } - #[napi] + #[napi(ts_return_type = "ReadableStream")] pub fn read_file( &mut self, reference: Reference>, @@ -120,7 +118,7 @@ impl<'a> DropletHandler<'a> { env: Env, start: Option, end: Option, - ) -> anyhow::Result { + ) -> anyhow::Result<*mut napi_value__> { let stream = reference.share_with(env, |handler| { let backend = handler .create_backend_for_path(path) @@ -149,25 +147,9 @@ impl<'a> DropletHandler<'a> { // Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error) .map_err(napi::Error::from) // napi::Error implements From }); - // Create the napi-rs ReadableStream from the tokio_stream::Stream - // The unwrap() here means if stream creation fails, it will panic. - // For a production system, consider returning Result> and handling this. ReadableStream::create_with_stream_bytes(&env, stream) })?; - Ok(JsDropStreamable { inner: stream }) - } -} - -#[napi] -pub struct JsDropStreamable { - inner: SharedReference, ReadableStream<'static, BufferSlice<'static>>>, -} - -#[napi] -impl JsDropStreamable { - #[napi] - pub fn get_stream(&self) -> *mut napi_value__ { - self.inner.raw() + Ok(stream.raw()) } }