From 4fb9bb75633decb6e0428e9091cfaaad82c0db3e Mon Sep 17 00:00:00 2001 From: DecDuck Date: Fri, 15 Aug 2025 16:49:18 +1000 Subject: [PATCH] fix: manifest sizing for slow backends --- __test__/debug.spec.mjs | 22 ++++++++++++++++++++ __test__/utils.spec.mjs | 28 +++++++++++++++++++++++-- package.json | 2 +- src/manifest.rs | 46 ++++++++++++++++++++++++++++------------- 4 files changed, 81 insertions(+), 17 deletions(-) create mode 100644 __test__/debug.spec.mjs diff --git a/__test__/debug.spec.mjs b/__test__/debug.spec.mjs new file mode 100644 index 0000000..5df0565 --- /dev/null +++ b/__test__/debug.spec.mjs @@ -0,0 +1,22 @@ +import test from "ava"; +import { DropletHandler, generateManifest } from "../index.js"; + +test.skip("debug", async (t) => { + const handler = new DropletHandler(); + + console.log("created handler"); + + const manifest = JSON.parse( + await new Promise((r, e) => + generateManifest( + handler, + "./assets/TheGame.zip", + (_, __) => {}, + (_, __) => {}, + (err, manifest) => (err ? e(err) : r(manifest)) + ) + ) + ); + + return t.pass(); +}); diff --git a/__test__/utils.spec.mjs b/__test__/utils.spec.mjs index eb30ca1..890b4c1 100644 --- a/__test__/utils.spec.mjs +++ b/__test__/utils.spec.mjs @@ -102,7 +102,7 @@ test("read file offset", async (t) => { fs.rmSync(dirName, { recursive: true }); }); -test("zip speed test", async (t) => { +test.skip("zip speed test", async (t) => { t.timeout(100_000_000); const dropletHandler = new DropletHandler(); @@ -135,7 +135,31 @@ test("zip speed test", async (t) => { const roughAverage = totalRead / totalSeconds; - console.log(`total rough average: ${prettyBytes(roughAverage)}/s`) + console.log(`total rough average: ${prettyBytes(roughAverage)}/s`); + + t.pass(); +}); + +test("zip manifest test", async (t) => { + const dropletHandler = new DropletHandler(); + const manifest = JSON.parse( + await new Promise((r, e) => + generateManifest( + dropletHandler, + "./assets/TheGame.zip", + (_, __) => {}, + (_, __) => {}, + (err, manifest) => (err ? e(err) : r(manifest)) + ) + ) + ); + + const file = manifest[Object.keys(manifest).at(0)]; + const amount = file.ids.length; + + if(amount > 20) { + return t.fail(`Zip manifest has ${amount} chunks, more than 20`); + } t.pass(); }); diff --git a/package.json b/package.json index 269ed27..e1e5af4 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@drop-oss/droplet", - "version": "2.0.2", + "version": "2.1.0", "main": "index.js", "types": "index.d.ts", "napi": { diff --git a/src/manifest.rs b/src/manifest.rs index ef6049a..9b3b68a 100644 --- a/src/manifest.rs +++ b/src/manifest.rs @@ -1,7 +1,6 @@ use std::{ collections::HashMap, io::{BufRead, BufReader}, - path::Path, sync::Arc, thread, }; @@ -42,11 +41,11 @@ pub fn generate_manifest<'a>( log_sfn: ThreadsafeFunction, callback_sfn: ThreadsafeFunction, ) -> Result<()> { - let backend: &mut Box = - droplet_handler.create_backend_for_path(dir).ok_or(napi::Error::from_reason("Could not create backend for path."))?; - let backend: &'static mut Box = - unsafe { std::mem::transmute(backend) }; - thread::spawn(move || { + let backend: &mut Box = droplet_handler + .create_backend_for_path(dir) + .ok_or(napi::Error::from_reason( + "Could not create backend for path.", + ))?; let files = backend.list_files(); // Filepath to chunk data @@ -56,8 +55,8 @@ pub fn generate_manifest<'a>( let mut i: i32 = 0; for version_file in files { - let raw_reader = backend.reader(&version_file).unwrap(); - let mut reader = BufReader::with_capacity(CHUNK_SIZE, raw_reader); + let reader = backend.reader(&version_file).unwrap(); + let mut reader = BufReader::with_capacity(8128, reader); let mut chunk_data = ChunkData { permissions: version_file.permission, @@ -68,12 +67,28 @@ pub fn generate_manifest<'a>( let mut chunk_index = 0; loop { + let mut length = 0; let mut buffer: Vec = Vec::new(); - reader.fill_buf().unwrap().clone_into(&mut buffer); - let length = buffer.len(); + let mut file_empty = false; - if length == 0 { - break; + loop { + let read_buf = reader.fill_buf().unwrap(); + let buf_length = read_buf.len(); + + length += buf_length; + + if length >= CHUNK_SIZE { + break; + } + + // If we're out of data, add this chunk and then move onto the next file + if buf_length == 0 { + file_empty = true; + break; + } + + buffer.extend_from_slice(read_buf); + reader.consume(length); } let chunk_id = Uuid::new_v4(); @@ -88,10 +103,14 @@ pub fn generate_manifest<'a>( "Processed chunk {} for {}", chunk_index, &version_file.relative_filename ); + log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking); - reader.consume(length); chunk_index += 1; + + if file_empty { + break; + } } chunks.insert(version_file.relative_filename, chunk_data); @@ -105,7 +124,6 @@ pub fn generate_manifest<'a>( Ok(json!(chunks).to_string()), ThreadsafeFunctionCallMode::Blocking, ); - }); Ok(()) }