3 Commits

Author SHA1 Message Date
ba35ca9a14 feat: start of scripting engine 2025-08-24 13:50:44 +10:00
ae4648845e feat: add support for partially deflated zips 2025-08-17 11:21:09 +10:00
bd30464a08 fix: manifest generation with multiple chunks 2025-08-15 21:56:33 +10:00
11 changed files with 1194 additions and 25 deletions

923
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -16,7 +16,6 @@ napi = { version = "3.0.0-beta.11", default-features = false, features = [
] }
napi-derive = "3.0.0-beta.11"
hex = "0.4.3"
serde_json = "1.0.128"
md5 = "0.7.0"
time-macros = "0.2.22"
time = "0.3.41"
@ -27,6 +26,10 @@ tokio-util = { version = "0.7.15", features = ["codec"] }
rawzip = "0.3.0"
dyn-clone = "1.0.20"
flate2 = "1.1.2"
rhai = "1.22.2"
mlua = { version = "0.11.2", features = ["luajit"] }
boa_engine = "0.20.0"
serde_json = "1.0.143"
[package.metadata.patch]
crates = ["rawzip"]

62
__test__/script.spec.mjs Normal file
View File

@ -0,0 +1,62 @@
import test from "ava";
import { ScriptEngine } from "../index.js";
test("lua syntax fail", (t) => {
const scriptEngine = new ScriptEngine();
const luaIshCode = `
print("hello world);
`;
try {
const script = scriptEngine.buildLuaScript(luaIshCode);
} catch {
return t.pass();
}
t.fail();
});
test("js syntax fail", (t) => {
const scriptEngine = new ScriptEngine();
const jsIshCode = `
const v = "hello world;
`;
try {
const script = scriptEngine.buildJsScript(jsIshCode);
} catch {
return t.pass();
}
t.fail();
});
test("js", (t) => {
const scriptEngine = new ScriptEngine();
const jsModule = `
const v = "1" + "2";
["1", "2", "3", v]
`;
const script = scriptEngine.buildJsScript(jsModule);
scriptEngine.fetchStrings(script);
t.pass();
});
test("lua", (t) => {
const scriptEngine = new ScriptEngine();
const luaModule = `
local arr = {"1", "2"};
return arr;
`;
const script = scriptEngine.buildLuaScript(luaModule);
scriptEngine.fetchStrings(script);
t.pass();
});

View File

@ -1,6 +1,7 @@
import test from "ava";
import fs from "node:fs";
import path from "path";
import { createHash } from "node:crypto";
import prettyBytes from "pretty-bytes";
import droplet, { DropletHandler, generateManifest } from "../index.js";
@ -57,7 +58,12 @@ test("read file", async (t) => {
const dropletHandler = new DropletHandler();
const stream = dropletHandler.readFile(dirName, "TESTFILE", BigInt(0), BigInt(testString.length));
const stream = dropletHandler.readFile(
dirName,
"TESTFILE",
BigInt(0),
BigInt(testString.length)
);
let finalString = "";
@ -157,6 +163,7 @@ test.skip("zip manifest test", async (t) => {
for (const [filename, data] of Object.entries(manifest)) {
let start = 0;
for (const [chunkIndex, length] of data.lengths.entries()) {
const hash = createHash("md5");
const stream = (
await dropletHandler.readFile(
"./assets/TheGame.zip",
@ -171,6 +178,7 @@ test.skip("zip manifest test", async (t) => {
new WritableStream({
write(chunk) {
streamLength += chunk.length;
hash.update(chunk);
},
})
);
@ -180,9 +188,33 @@ test.skip("zip manifest test", async (t) => {
`stream length for chunk index ${chunkIndex} was not expected: real: ${streamLength} vs expected: ${length}`
);
const digest = hash.digest("hex");
if (data.checksums[chunkIndex] != digest)
return t.fail(
`checksums did not match for chunk index ${chunkIndex}: real: ${digest} vs expected: ${data.checksums[chunkIndex]}`
);
start += length;
}
}
t.pass();
});
test.skip("partially compress zip test", async (t) => {
const dropletHandler = new DropletHandler();
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
dropletHandler,
"./assets/my horror game.zip",
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
)
);
return t.pass();
});

13
index.d.ts vendored
View File

@ -15,6 +15,19 @@ export declare class JsDropStreamable {
getStream(): any
}
export declare class Script {
}
export declare class ScriptEngine {
constructor()
buildRahiScript(content: string): Script
buildLuaScript(content: string): Script
buildJsScript(content: string): Script
execute(script: Script): void
fetchStrings(script: Script): Array<string>
}
export declare function callAltThreadFunc(tsfn: ((err: Error | null, ) => any)): void
export declare function generateClientCertificate(clientId: string, clientName: string, rootCa: string, rootCaPrivate: string): Array<string>

View File

@ -378,6 +378,8 @@ if (!nativeBinding) {
module.exports = nativeBinding
module.exports.DropletHandler = nativeBinding.DropletHandler
module.exports.JsDropStreamable = nativeBinding.JsDropStreamable
module.exports.Script = nativeBinding.Script
module.exports.ScriptEngine = nativeBinding.ScriptEngine
module.exports.callAltThreadFunc = nativeBinding.callAltThreadFunc
module.exports.generateClientCertificate = nativeBinding.generateClientCertificate
module.exports.generateManifest = nativeBinding.generateManifest

View File

@ -1,6 +1,6 @@
{
"name": "@drop-oss/droplet",
"version": "2.2.1",
"version": "2.3.1",
"main": "index.js",
"types": "index.d.ts",
"napi": {

View File

@ -4,6 +4,7 @@
pub mod manifest;
pub mod ssl;
pub mod version;
pub mod script;
#[macro_use]
extern crate napi_derive;

View File

@ -80,17 +80,17 @@ pub fn generate_manifest<'a>(
length += read;
if length >= CHUNK_SIZE {
break;
}
// If we're out of data, add this chunk and then move onto the next file
if read == 0 {
file_empty = true;
break;
}
buffer.extend_from_slice(&buf[..read]);
buffer.extend_from_slice(&buf[0..read]);
if length >= CHUNK_SIZE {
break;
}
}
let chunk_id = Uuid::new_v4();

128
src/script/mod.rs Normal file
View File

@ -0,0 +1,128 @@
use boa_engine::{Context, JsValue, Source};
use mlua::{FromLuaMulti, Function, Lua};
use napi::Result;
use rhai::AST;
pub enum ScriptType {
Rhai,
Lua,
Javascript,
}
#[napi]
pub struct Script(ScriptInner);
pub enum ScriptInner {
Rhai { script: AST },
Lua { script: Function },
Javascript { script: boa_engine::Script },
}
#[napi]
pub struct ScriptEngine {
rhai_engine: rhai::Engine,
lua_engine: Lua,
js_engine: Context,
}
#[napi]
impl ScriptEngine {
#[napi(constructor)]
pub fn new() -> Self {
ScriptEngine {
rhai_engine: rhai::Engine::new(),
lua_engine: Lua::new(),
js_engine: Context::default(),
}
}
#[napi]
pub fn build_rahi_script(&self, content: String) -> Result<Script> {
let script = self
.rhai_engine
.compile(content.clone())
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(Script(ScriptInner::Rhai { script }))
}
#[napi]
pub fn build_lua_script(&self, content: String) -> Result<Script> {
let func = self
.lua_engine
.load(content.clone())
.into_function()
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(Script(ScriptInner::Lua { script: func }))
}
#[napi]
pub fn build_js_script(&mut self, content: String) -> Result<Script> {
let source = Source::from_bytes(content.as_bytes());
let script = boa_engine::Script::parse(source, None, &mut self.js_engine)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(Script(ScriptInner::Javascript { script }))
}
fn execute_rhai_script<T>(&self, ast: &AST) -> Result<T>
where
T: Clone + 'static,
{
let v = self
.rhai_engine
.eval_ast::<T>(ast)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(v)
}
fn execute_lua_script<T>(&self, function: &Function) -> Result<T>
where
T: FromLuaMulti,
{
let v = function
.call::<T>(())
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(v)
}
fn execute_js_script(&mut self, func: &boa_engine::Script) -> Result<JsValue> {
let v = func
.evaluate(&mut self.js_engine)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(v)
}
#[napi]
pub fn execute(&mut self, script: &mut Script) -> Result<()> {
match &script.0 {
ScriptInner::Rhai { script } => {
self.execute_rhai_script::<()>(script)?;
}
ScriptInner::Lua { script } => {
self.execute_lua_script::<()>(script)?;
}
ScriptInner::Javascript { script } => {
self.execute_js_script(script)?;
}
};
Ok(())
}
#[napi]
pub fn fetch_strings(&mut self, script: &mut Script) -> Result<Vec<String>> {
Ok(match &script.0 {
ScriptInner::Rhai { script } => self.execute_rhai_script(script)?,
ScriptInner::Lua { script } => self.execute_lua_script(script)?,
ScriptInner::Javascript { script } => {
let v = self.execute_js_script(script)?;
serde_json::from_value(
v.to_json(&mut self.js_engine)
.map_err(|e| napi::Error::from_reason(e.to_string()))?,
).map_err(|e| napi::Error::from_reason(e.to_string()))?
}
})
}
}

View File

@ -9,7 +9,8 @@ use std::{
use flate2::read::DeflateDecoder;
use rawzip::{
FileReader, ZipArchive, ZipArchiveEntryWayfinder, ZipEntry, ZipReader, RECOMMENDED_BUFFER_SIZE,
CompressionMethod, FileReader, ZipArchive, ZipArchiveEntryWayfinder, ZipEntry,
ZipVerifier, RECOMMENDED_BUFFER_SIZE,
};
use crate::version::types::{MinimumFileObject, VersionBackend, VersionFile};
@ -116,15 +117,27 @@ impl ZipVersionBackend {
pub fn new_entry<'archive>(
&self,
entry: ZipEntry<'archive, FileReader>,
compression_method: CompressionMethod,
start: u64,
end: u64,
) -> ZipFileWrapper<'archive> {
let mut deflater = DeflateDecoder::new(entry.reader());
let deflater: Box<dyn Read + Send + 'archive> = match compression_method {
CompressionMethod::Store => Box::new(entry.reader()),
CompressionMethod::Deflate => Box::new(DeflateDecoder::new(entry.reader())),
CompressionMethod::Deflate64 => Box::new(DeflateDecoder::new(entry.reader())),
_ => panic!(
"unsupported decompression algorithm: {:?}",
compression_method
),
};
let mut verifier = entry.verifying_reader(deflater);
if start != 0 {
io::copy(&mut (&mut deflater).take(start), &mut Sink::default()).unwrap();
io::copy(&mut (&mut verifier).take(start), &mut Sink::default()).unwrap();
}
ZipFileWrapper {
reader: deflater,
reader: verifier,
limit: (end - start) as usize,
current: 0,
}
@ -132,7 +145,7 @@ impl ZipVersionBackend {
}
pub struct ZipFileWrapper<'archive> {
reader: DeflateDecoder<ZipReader<'archive, FileReader>>,
reader: ZipVerifier<'archive, Box<dyn Read + Send + 'archive>, FileReader>,
limit: usize,
current: usize,
}
@ -163,9 +176,13 @@ impl<'a> Read for ZipFileWrapper<'a> {
return Ok(read);
}
}
//impl<'a> MinimumFileObject for ZipFileWrapper<'a> {}
impl ZipVersionBackend {
fn find_wayfinder(&mut self, filename: &str) -> Option<ZipArchiveEntryWayfinder> {
fn find_wayfinder(
&mut self,
filename: &str,
) -> Option<(ZipArchiveEntryWayfinder, CompressionMethod)> {
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
let mut entries = self.archive.entries(read_buffer);
let entry = loop {
@ -180,7 +197,7 @@ impl ZipVersionBackend {
let wayfinder = entry.wayfinder();
Some(wayfinder)
Some((wayfinder, entry.compression_method()))
}
}
impl VersionBackend for ZipVersionBackend {
@ -207,16 +224,16 @@ impl VersionBackend for ZipVersionBackend {
start: u64,
end: u64,
) -> Option<Box<dyn MinimumFileObject + '_>> {
let wayfinder = self.find_wayfinder(&file.relative_filename)?;
let (wayfinder, compression_method) = self.find_wayfinder(&file.relative_filename)?;
let local_entry = self.archive.get_entry(wayfinder).unwrap();
let wrapper = self.new_entry(local_entry, start, end);
let wrapper = self.new_entry(local_entry, compression_method, start, end);
Some(Box::new(wrapper))
Some(Box::new(wrapper) as Box<dyn MinimumFileObject>)
}
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile> {
let entry = self.find_wayfinder(&sub_path)?;
let (entry, _) = self.find_wayfinder(&sub_path)?;
Some(VersionFile {
relative_filename: sub_path,