mirror of
https://github.com/Drop-OSS/droplet.git
synced 2025-11-13 00:02:46 +10:00
Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 96c1b15de7 | |||
| bd6d7060fd | |||
| 0431eebaa7 | |||
| e66a6581cb | |||
| 817c3cf503 | |||
| 0d01809fd0 | |||
| ba35ca9a14 | |||
| ae4648845e | |||
| bd30464a08 | |||
| c67cca4ee0 |
893
Cargo.lock
generated
893
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
17
Cargo.toml
17
Cargo.toml
@ -9,14 +9,9 @@ crate-type = ["cdylib"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
|
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
|
||||||
napi = { version = "3.0.0-beta.11", default-features = false, features = [
|
napi = { version = "3.0.0-beta.11", default-features = false, features = ["napi6", "async", "web_stream", "error_anyhow"] }
|
||||||
"napi6",
|
|
||||||
"async",
|
|
||||||
"web_stream",
|
|
||||||
] }
|
|
||||||
napi-derive = "3.0.0-beta.11"
|
napi-derive = "3.0.0-beta.11"
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
serde_json = "1.0.128"
|
|
||||||
md5 = "0.7.0"
|
md5 = "0.7.0"
|
||||||
time-macros = "0.2.22"
|
time-macros = "0.2.22"
|
||||||
time = "0.3.41"
|
time = "0.3.41"
|
||||||
@ -24,12 +19,12 @@ webpki = "0.22.4"
|
|||||||
ring = "0.17.14"
|
ring = "0.17.14"
|
||||||
tokio = { version = "1.45.1", features = ["fs", "io-util"] }
|
tokio = { version = "1.45.1", features = ["fs", "io-util"] }
|
||||||
tokio-util = { version = "0.7.15", features = ["codec"] }
|
tokio-util = { version = "0.7.15", features = ["codec"] }
|
||||||
rawzip = "0.3.0"
|
|
||||||
dyn-clone = "1.0.20"
|
dyn-clone = "1.0.20"
|
||||||
flate2 = "1.1.2"
|
rhai = "1.22.2"
|
||||||
|
# mlua = { version = "0.11.2", features = ["luajit"] }
|
||||||
[package.metadata.patch]
|
boa_engine = "0.20.0"
|
||||||
crates = ["rawzip"]
|
serde_json = "1.0.143"
|
||||||
|
anyhow = "1.0.99"
|
||||||
|
|
||||||
[dependencies.x509-parser]
|
[dependencies.x509-parser]
|
||||||
version = "0.17.0"
|
version = "0.17.0"
|
||||||
|
|||||||
@ -93,3 +93,47 @@ test.skip("performance test", async (t) => {
|
|||||||
|
|
||||||
fs.rmSync(dirName, { recursive: true });
|
fs.rmSync(dirName, { recursive: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("special characters", async (t) => {
|
||||||
|
// Setup test dir
|
||||||
|
const dirName = "./.test/sc";
|
||||||
|
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
||||||
|
fs.mkdirSync(dirName, { recursive: true });
|
||||||
|
|
||||||
|
// Config
|
||||||
|
const fileNames = ["Technická podpora.rtf", "Servicio técnico.rtf"];
|
||||||
|
|
||||||
|
for (let i = 0; i < fileNames.length; i++) {
|
||||||
|
const fileName = path.join(dirName, fileNames[i]);
|
||||||
|
fs.writeFileSync(fileName, i.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
const dropletHandler = new DropletHandler();
|
||||||
|
|
||||||
|
const manifest = JSON.parse(
|
||||||
|
await new Promise((r, e) =>
|
||||||
|
generateManifest(
|
||||||
|
dropletHandler,
|
||||||
|
dirName,
|
||||||
|
(_, __) => {},
|
||||||
|
(_, __) => {},
|
||||||
|
(err, manifest) => (err ? e(err) : r(manifest))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check the first few checksums
|
||||||
|
const checksums = [
|
||||||
|
"cfcd208495d565ef66e7dff9f98764da",
|
||||||
|
"c4ca4238a0b923820dcc509a6f75849b",
|
||||||
|
];
|
||||||
|
for (let index in checksums) {
|
||||||
|
const entry = manifest[fileNames[index]];
|
||||||
|
if (!entry) return t.fail(`manifest missing file ${index}`);
|
||||||
|
|
||||||
|
const checksum = entry.checksums[0];
|
||||||
|
t.is(checksum, checksums[index], `checksums do not match for ${index}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.rmSync(dirName, { recursive: true });
|
||||||
|
});
|
||||||
62
__test__/script.spec.mjs
Normal file
62
__test__/script.spec.mjs
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
import test from "ava";
|
||||||
|
import { ScriptEngine } from "../index.js";
|
||||||
|
|
||||||
|
test.skip("lua syntax fail", (t) => {
|
||||||
|
const scriptEngine = new ScriptEngine();
|
||||||
|
|
||||||
|
const luaIshCode = `
|
||||||
|
print("hello world);
|
||||||
|
`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const script = scriptEngine.buildLuaScript(luaIshCode);
|
||||||
|
} catch {
|
||||||
|
return t.pass();
|
||||||
|
}
|
||||||
|
t.fail();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("js syntax fail", (t) => {
|
||||||
|
const scriptEngine = new ScriptEngine();
|
||||||
|
|
||||||
|
const jsIshCode = `
|
||||||
|
const v = "hello world;
|
||||||
|
`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const script = scriptEngine.buildJsScript(jsIshCode);
|
||||||
|
} catch {
|
||||||
|
return t.pass();
|
||||||
|
}
|
||||||
|
t.fail();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("js", (t) => {
|
||||||
|
const scriptEngine = new ScriptEngine();
|
||||||
|
|
||||||
|
const jsModule = `
|
||||||
|
const v = "1" + "2";
|
||||||
|
["1", "2", "3", v]
|
||||||
|
`;
|
||||||
|
|
||||||
|
const script = scriptEngine.buildJsScript(jsModule);
|
||||||
|
|
||||||
|
scriptEngine.fetchStrings(script);
|
||||||
|
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
|
||||||
|
test.skip("lua", (t) => {
|
||||||
|
const scriptEngine = new ScriptEngine();
|
||||||
|
|
||||||
|
const luaModule = `
|
||||||
|
local arr = {"1", "2"};
|
||||||
|
return arr;
|
||||||
|
`;
|
||||||
|
|
||||||
|
const script = scriptEngine.buildLuaScript(luaModule);
|
||||||
|
|
||||||
|
scriptEngine.fetchStrings(script);
|
||||||
|
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
@ -1,6 +1,7 @@
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
import fs from "node:fs";
|
import fs from "node:fs";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
import { createHash } from "node:crypto";
|
||||||
import prettyBytes from "pretty-bytes";
|
import prettyBytes from "pretty-bytes";
|
||||||
|
|
||||||
import droplet, { DropletHandler, generateManifest } from "../index.js";
|
import droplet, { DropletHandler, generateManifest } from "../index.js";
|
||||||
@ -57,7 +58,12 @@ test("read file", async (t) => {
|
|||||||
|
|
||||||
const dropletHandler = new DropletHandler();
|
const dropletHandler = new DropletHandler();
|
||||||
|
|
||||||
const stream = dropletHandler.readFile(dirName, "TESTFILE", BigInt(0), BigInt(testString.length));
|
const stream = dropletHandler.readFile(
|
||||||
|
dirName,
|
||||||
|
"TESTFILE",
|
||||||
|
BigInt(0),
|
||||||
|
BigInt(testString.length)
|
||||||
|
);
|
||||||
|
|
||||||
let finalString = "";
|
let finalString = "";
|
||||||
|
|
||||||
@ -157,6 +163,7 @@ test.skip("zip manifest test", async (t) => {
|
|||||||
for (const [filename, data] of Object.entries(manifest)) {
|
for (const [filename, data] of Object.entries(manifest)) {
|
||||||
let start = 0;
|
let start = 0;
|
||||||
for (const [chunkIndex, length] of data.lengths.entries()) {
|
for (const [chunkIndex, length] of data.lengths.entries()) {
|
||||||
|
const hash = createHash("md5");
|
||||||
const stream = (
|
const stream = (
|
||||||
await dropletHandler.readFile(
|
await dropletHandler.readFile(
|
||||||
"./assets/TheGame.zip",
|
"./assets/TheGame.zip",
|
||||||
@ -171,6 +178,7 @@ test.skip("zip manifest test", async (t) => {
|
|||||||
new WritableStream({
|
new WritableStream({
|
||||||
write(chunk) {
|
write(chunk) {
|
||||||
streamLength += chunk.length;
|
streamLength += chunk.length;
|
||||||
|
hash.update(chunk);
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
@ -180,9 +188,33 @@ test.skip("zip manifest test", async (t) => {
|
|||||||
`stream length for chunk index ${chunkIndex} was not expected: real: ${streamLength} vs expected: ${length}`
|
`stream length for chunk index ${chunkIndex} was not expected: real: ${streamLength} vs expected: ${length}`
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const digest = hash.digest("hex");
|
||||||
|
if (data.checksums[chunkIndex] != digest)
|
||||||
|
return t.fail(
|
||||||
|
`checksums did not match for chunk index ${chunkIndex}: real: ${digest} vs expected: ${data.checksums[chunkIndex]}`
|
||||||
|
);
|
||||||
|
|
||||||
start += length;
|
start += length;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
t.pass();
|
t.pass();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test.skip("partially compress zip test", async (t) => {
|
||||||
|
const dropletHandler = new DropletHandler();
|
||||||
|
|
||||||
|
const manifest = JSON.parse(
|
||||||
|
await new Promise((r, e) =>
|
||||||
|
generateManifest(
|
||||||
|
dropletHandler,
|
||||||
|
"./assets/my horror game.zip",
|
||||||
|
(_, __) => {},
|
||||||
|
(_, __) => {},
|
||||||
|
(err, manifest) => (err ? e(err) : r(manifest))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
return t.pass();
|
||||||
|
});
|
||||||
|
|||||||
12
index.d.ts
vendored
12
index.d.ts
vendored
@ -15,6 +15,18 @@ export declare class JsDropStreamable {
|
|||||||
getStream(): any
|
getStream(): any
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export declare class Script {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare class ScriptEngine {
|
||||||
|
constructor()
|
||||||
|
buildRhaiScript(content: string): Script
|
||||||
|
buildJsScript(content: string): Script
|
||||||
|
execute(script: Script): void
|
||||||
|
fetchStrings(script: Script): Array<string>
|
||||||
|
}
|
||||||
|
|
||||||
export declare function callAltThreadFunc(tsfn: ((err: Error | null, ) => any)): void
|
export declare function callAltThreadFunc(tsfn: ((err: Error | null, ) => any)): void
|
||||||
|
|
||||||
export declare function generateClientCertificate(clientId: string, clientName: string, rootCa: string, rootCaPrivate: string): Array<string>
|
export declare function generateClientCertificate(clientId: string, clientName: string, rootCa: string, rootCaPrivate: string): Array<string>
|
||||||
|
|||||||
2
index.js
2
index.js
@ -378,6 +378,8 @@ if (!nativeBinding) {
|
|||||||
module.exports = nativeBinding
|
module.exports = nativeBinding
|
||||||
module.exports.DropletHandler = nativeBinding.DropletHandler
|
module.exports.DropletHandler = nativeBinding.DropletHandler
|
||||||
module.exports.JsDropStreamable = nativeBinding.JsDropStreamable
|
module.exports.JsDropStreamable = nativeBinding.JsDropStreamable
|
||||||
|
module.exports.Script = nativeBinding.Script
|
||||||
|
module.exports.ScriptEngine = nativeBinding.ScriptEngine
|
||||||
module.exports.callAltThreadFunc = nativeBinding.callAltThreadFunc
|
module.exports.callAltThreadFunc = nativeBinding.callAltThreadFunc
|
||||||
module.exports.generateClientCertificate = nativeBinding.generateClientCertificate
|
module.exports.generateClientCertificate = nativeBinding.generateClientCertificate
|
||||||
module.exports.generateManifest = nativeBinding.generateManifest
|
module.exports.generateManifest = nativeBinding.generateManifest
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@drop-oss/droplet",
|
"name": "@drop-oss/droplet",
|
||||||
"version": "2.2.0",
|
"version": "3.1.0",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"types": "index.d.ts",
|
"types": "index.d.ts",
|
||||||
"napi": {
|
"napi": {
|
||||||
@ -46,7 +46,7 @@
|
|||||||
"build": "napi build --platform --release",
|
"build": "napi build --platform --release",
|
||||||
"build:debug": "napi build --platform",
|
"build:debug": "napi build --platform",
|
||||||
"prepublishOnly": "napi prepublish -t npm",
|
"prepublishOnly": "napi prepublish -t npm",
|
||||||
"test": "ava",
|
"test": "ava ",
|
||||||
"universal": "napi universalize",
|
"universal": "napi universalize",
|
||||||
"version": "napi version"
|
"version": "napi version"
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1,9 +1,14 @@
|
|||||||
#![deny(clippy::all)]
|
#![deny(clippy::unwrap_used)]
|
||||||
|
#![deny(clippy::expect_used)]
|
||||||
|
#![deny(clippy::panic)]
|
||||||
#![feature(trait_alias)]
|
#![feature(trait_alias)]
|
||||||
|
#![feature(iterator_try_collect)]
|
||||||
|
|
||||||
|
|
||||||
pub mod manifest;
|
pub mod manifest;
|
||||||
|
pub mod script;
|
||||||
pub mod ssl;
|
pub mod ssl;
|
||||||
pub mod version;
|
pub mod version;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate napi_derive;
|
extern crate napi_derive;
|
||||||
129
src/manifest.rs
129
src/manifest.rs
@ -35,7 +35,7 @@ pub fn generate_manifest<'a>(
|
|||||||
progress_sfn: ThreadsafeFunction<i32>,
|
progress_sfn: ThreadsafeFunction<i32>,
|
||||||
log_sfn: ThreadsafeFunction<String>,
|
log_sfn: ThreadsafeFunction<String>,
|
||||||
callback_sfn: ThreadsafeFunction<String>,
|
callback_sfn: ThreadsafeFunction<String>,
|
||||||
) -> Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
let backend: &mut Box<dyn VersionBackend + Send> = droplet_handler
|
let backend: &mut Box<dyn VersionBackend + Send> = droplet_handler
|
||||||
.create_backend_for_path(dir)
|
.create_backend_for_path(dir)
|
||||||
.ok_or(napi::Error::from_reason(
|
.ok_or(napi::Error::from_reason(
|
||||||
@ -48,86 +48,97 @@ pub fn generate_manifest<'a>(
|
|||||||
let backend: &'static mut Box<dyn VersionBackend + Send> =
|
let backend: &'static mut Box<dyn VersionBackend + Send> =
|
||||||
unsafe { std::mem::transmute(backend) };
|
unsafe { std::mem::transmute(backend) };
|
||||||
|
|
||||||
|
let required_single_file = backend.require_whole_files();
|
||||||
|
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
let files = backend.list_files();
|
let callback_borrow = &callback_sfn;
|
||||||
|
|
||||||
// Filepath to chunk data
|
let mut inner = move || -> Result<()> {
|
||||||
let mut chunks: HashMap<String, ChunkData> = HashMap::new();
|
let files = backend.list_files()?;
|
||||||
|
|
||||||
let total: i32 = files.len() as i32;
|
// Filepath to chunk data
|
||||||
let mut i: i32 = 0;
|
let mut chunks: HashMap<String, ChunkData> = HashMap::new();
|
||||||
|
|
||||||
let mut buf = [0u8; 1024 * 16];
|
let total: i32 = files.len() as i32;
|
||||||
|
let mut i: i32 = 0;
|
||||||
|
|
||||||
for version_file in files {
|
let mut buf = [0u8; 1024 * 16];
|
||||||
let mut reader = backend.reader(&version_file, 0, 0).unwrap();
|
|
||||||
|
|
||||||
let mut chunk_data = ChunkData {
|
for version_file in files {
|
||||||
permissions: version_file.permission,
|
let mut reader = backend.reader(&version_file, 0, 0)?;
|
||||||
ids: Vec::new(),
|
|
||||||
checksums: Vec::new(),
|
|
||||||
lengths: Vec::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut chunk_index = 0;
|
let mut chunk_data = ChunkData {
|
||||||
loop {
|
permissions: version_file.permission,
|
||||||
let mut length = 0;
|
ids: Vec::new(),
|
||||||
let mut buffer: Vec<u8> = Vec::new();
|
checksums: Vec::new(),
|
||||||
let mut file_empty = false;
|
lengths: Vec::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut chunk_index = 0;
|
||||||
loop {
|
loop {
|
||||||
let read = reader.read(&mut buf).unwrap();
|
let mut length = 0;
|
||||||
|
let mut buffer: Vec<u8> = Vec::new();
|
||||||
|
let mut file_empty = false;
|
||||||
|
|
||||||
length += read;
|
loop {
|
||||||
|
let read = reader.read(&mut buf)?;
|
||||||
|
|
||||||
if length >= CHUNK_SIZE {
|
length += read;
|
||||||
break;
|
|
||||||
|
// If we're out of data, add this chunk and then move onto the next file
|
||||||
|
if read == 0 {
|
||||||
|
file_empty = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
buffer.extend_from_slice(&buf[0..read]);
|
||||||
|
|
||||||
|
if length >= CHUNK_SIZE && !required_single_file {
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we're out of data, add this chunk and then move onto the next file
|
let chunk_id = Uuid::new_v4();
|
||||||
if read == 0 {
|
let checksum = md5::compute(buffer).0;
|
||||||
file_empty = true;
|
let checksum_string = hex::encode(checksum);
|
||||||
|
|
||||||
|
chunk_data.ids.push(chunk_id.to_string());
|
||||||
|
chunk_data.checksums.push(checksum_string);
|
||||||
|
chunk_data.lengths.push(length);
|
||||||
|
|
||||||
|
let log_str = format!(
|
||||||
|
"Processed chunk {} for {}",
|
||||||
|
chunk_index, &version_file.relative_filename
|
||||||
|
);
|
||||||
|
|
||||||
|
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
|
||||||
|
|
||||||
|
chunk_index += 1;
|
||||||
|
|
||||||
|
if file_empty {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
buffer.extend_from_slice(&buf[..read]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("created chunk of size {}", length);
|
chunks.insert(version_file.relative_filename, chunk_data);
|
||||||
|
|
||||||
let chunk_id = Uuid::new_v4();
|
i += 1;
|
||||||
let checksum = md5::compute(buffer).0;
|
let progress = i * 100 / total;
|
||||||
let checksum_string = hex::encode(checksum);
|
progress_sfn.call(Ok(progress), ThreadsafeFunctionCallMode::Blocking);
|
||||||
|
|
||||||
chunk_data.ids.push(chunk_id.to_string());
|
|
||||||
chunk_data.checksums.push(checksum_string);
|
|
||||||
chunk_data.lengths.push(length);
|
|
||||||
|
|
||||||
let log_str = format!(
|
|
||||||
"Processed chunk {} for {}",
|
|
||||||
chunk_index, &version_file.relative_filename
|
|
||||||
);
|
|
||||||
|
|
||||||
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
|
|
||||||
|
|
||||||
chunk_index += 1;
|
|
||||||
|
|
||||||
if file_empty {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
chunks.insert(version_file.relative_filename, chunk_data);
|
callback_borrow.call(
|
||||||
|
Ok(json!(chunks).to_string()),
|
||||||
|
ThreadsafeFunctionCallMode::Blocking,
|
||||||
|
);
|
||||||
|
|
||||||
i += 1;
|
Ok(())
|
||||||
let progress = i * 100 / total;
|
};
|
||||||
progress_sfn.call(Ok(progress), ThreadsafeFunctionCallMode::Blocking);
|
|
||||||
|
let result = inner();
|
||||||
|
if let Err(generate_err) = result {
|
||||||
|
callback_borrow.call(Err(generate_err), ThreadsafeFunctionCallMode::Blocking);
|
||||||
}
|
}
|
||||||
|
|
||||||
callback_sfn.call(
|
|
||||||
Ok(json!(chunks).to_string()),
|
|
||||||
ThreadsafeFunctionCallMode::Blocking,
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
133
src/script/mod.rs
Normal file
133
src/script/mod.rs
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
use boa_engine::{Context, JsValue, Source};
|
||||||
|
// use mlua::{FromLuaMulti, Function, Lua};
|
||||||
|
use napi::Result;
|
||||||
|
use rhai::AST;
|
||||||
|
|
||||||
|
pub enum ScriptType {
|
||||||
|
Rhai,
|
||||||
|
Lua,
|
||||||
|
Javascript,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub struct Script(ScriptInner);
|
||||||
|
|
||||||
|
pub enum ScriptInner {
|
||||||
|
Rhai { script: AST },
|
||||||
|
// Lua { script: Function },
|
||||||
|
Javascript { script: boa_engine::Script },
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub struct ScriptEngine {
|
||||||
|
rhai_engine: rhai::Engine,
|
||||||
|
// lua_engine: Lua,
|
||||||
|
js_engine: Context,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
impl ScriptEngine {
|
||||||
|
#[napi(constructor)]
|
||||||
|
pub fn new() -> Self {
|
||||||
|
ScriptEngine {
|
||||||
|
rhai_engine: rhai::Engine::new(),
|
||||||
|
// lua_engine: Lua::new(),
|
||||||
|
js_engine: Context::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub fn build_rhai_script(&self, content: String) -> Result<Script> {
|
||||||
|
let script = self
|
||||||
|
.rhai_engine
|
||||||
|
.compile(content.clone())
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
Ok(Script(ScriptInner::Rhai { script }))
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
#[napi]
|
||||||
|
pub fn build_lua_script(&self, content: String) -> Result<Script> {
|
||||||
|
let func = self
|
||||||
|
.lua_engine
|
||||||
|
.load(content.clone())
|
||||||
|
.into_function()
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
Ok(Script(ScriptInner::Lua { script: func }))
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub fn build_js_script(&mut self, content: String) -> Result<Script> {
|
||||||
|
let source = Source::from_bytes(content.as_bytes());
|
||||||
|
let script = boa_engine::Script::parse(source, None, &mut self.js_engine)
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(Script(ScriptInner::Javascript { script }))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn execute_rhai_script<T>(&self, ast: &AST) -> Result<T>
|
||||||
|
where
|
||||||
|
T: Clone + 'static,
|
||||||
|
{
|
||||||
|
let v = self
|
||||||
|
.rhai_engine
|
||||||
|
.eval_ast::<T>(ast)
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
fn execute_lua_script<T>(&self, function: &Function) -> Result<T>
|
||||||
|
where
|
||||||
|
T: FromLuaMulti,
|
||||||
|
{
|
||||||
|
let v = function
|
||||||
|
.call::<T>(())
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
Ok(v)
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
fn execute_js_script(&mut self, func: &boa_engine::Script) -> Result<JsValue> {
|
||||||
|
let v = func
|
||||||
|
.evaluate(&mut self.js_engine)
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub fn execute(&mut self, script: &mut Script) -> Result<()> {
|
||||||
|
match &script.0 {
|
||||||
|
ScriptInner::Rhai { script } => {
|
||||||
|
self.execute_rhai_script::<()>(script)?;
|
||||||
|
}
|
||||||
|
/*ScriptInner::Lua { script } => {
|
||||||
|
self.execute_lua_script::<()>(script)?;
|
||||||
|
}*/
|
||||||
|
ScriptInner::Javascript { script } => {
|
||||||
|
self.execute_js_script(script)?;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub fn fetch_strings(&mut self, script: &mut Script) -> Result<Vec<String>> {
|
||||||
|
Ok(match &script.0 {
|
||||||
|
ScriptInner::Rhai { script } => self.execute_rhai_script(script)?,
|
||||||
|
//ScriptInner::Lua { script } => self.execute_lua_script(script)?,
|
||||||
|
ScriptInner::Javascript { script } => {
|
||||||
|
let v = self.execute_js_script(script)?;
|
||||||
|
|
||||||
|
serde_json::from_value(
|
||||||
|
v.to_json(&mut self.js_engine)
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?,
|
||||||
|
)
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
60
src/ssl.rs
60
src/ssl.rs
@ -1,4 +1,4 @@
|
|||||||
use napi::Error;
|
use anyhow::anyhow;
|
||||||
use rcgen::{
|
use rcgen::{
|
||||||
CertificateParams, DistinguishedName, IsCa, KeyPair, KeyUsagePurpose, PublicKeyData,
|
CertificateParams, DistinguishedName, IsCa, KeyPair, KeyUsagePurpose, PublicKeyData,
|
||||||
SubjectPublicKeyInfo,
|
SubjectPublicKeyInfo,
|
||||||
@ -10,7 +10,7 @@ use x509_parser::parse_x509_certificate;
|
|||||||
use x509_parser::pem::Pem;
|
use x509_parser::pem::Pem;
|
||||||
|
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn generate_root_ca() -> Result<Vec<String>, Error> {
|
pub fn generate_root_ca() -> anyhow::Result<Vec<String>> {
|
||||||
let mut params = CertificateParams::default();
|
let mut params = CertificateParams::default();
|
||||||
|
|
||||||
let mut name = DistinguishedName::new();
|
let mut name = DistinguishedName::new();
|
||||||
@ -22,7 +22,7 @@ pub fn generate_root_ca() -> Result<Vec<String>, Error> {
|
|||||||
params.not_before = OffsetDateTime::now_utc();
|
params.not_before = OffsetDateTime::now_utc();
|
||||||
params.not_after = OffsetDateTime::now_utc()
|
params.not_after = OffsetDateTime::now_utc()
|
||||||
.checked_add(Duration::days(365 * 1000))
|
.checked_add(Duration::days(365 * 1000))
|
||||||
.unwrap();
|
.ok_or(anyhow!("failed to calculate end date"))?;
|
||||||
|
|
||||||
params.is_ca = IsCa::Ca(rcgen::BasicConstraints::Unconstrained);
|
params.is_ca = IsCa::Ca(rcgen::BasicConstraints::Unconstrained);
|
||||||
|
|
||||||
@ -32,9 +32,8 @@ pub fn generate_root_ca() -> Result<Vec<String>, Error> {
|
|||||||
KeyUsagePurpose::DigitalSignature,
|
KeyUsagePurpose::DigitalSignature,
|
||||||
];
|
];
|
||||||
|
|
||||||
let key_pair = KeyPair::generate().map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
let key_pair = KeyPair::generate()?;
|
||||||
let certificate = CertificateParams::self_signed(params, &key_pair)
|
let certificate = CertificateParams::self_signed(params, &key_pair)?;
|
||||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
|
||||||
|
|
||||||
// Returns certificate, then private key
|
// Returns certificate, then private key
|
||||||
Ok(vec![certificate.pem(), key_pair.serialize_pem()])
|
Ok(vec![certificate.pem(), key_pair.serialize_pem()])
|
||||||
@ -46,13 +45,10 @@ pub fn generate_client_certificate(
|
|||||||
_client_name: String,
|
_client_name: String,
|
||||||
root_ca: String,
|
root_ca: String,
|
||||||
root_ca_private: String,
|
root_ca_private: String,
|
||||||
) -> Result<Vec<String>, Error> {
|
) -> anyhow::Result<Vec<String>> {
|
||||||
let root_key_pair =
|
let root_key_pair = KeyPair::from_pem(&root_ca_private)?;
|
||||||
KeyPair::from_pem(&root_ca_private).map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
let certificate_params = CertificateParams::from_ca_cert_pem(&root_ca)?;
|
||||||
let certificate_params = CertificateParams::from_ca_cert_pem(&root_ca)
|
let root_ca = CertificateParams::self_signed(certificate_params, &root_key_pair)?;
|
||||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
|
||||||
let root_ca = CertificateParams::self_signed(certificate_params, &root_key_pair)
|
|
||||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
|
||||||
|
|
||||||
let mut params = CertificateParams::default();
|
let mut params = CertificateParams::default();
|
||||||
|
|
||||||
@ -66,28 +62,24 @@ pub fn generate_client_certificate(
|
|||||||
KeyUsagePurpose::DataEncipherment,
|
KeyUsagePurpose::DataEncipherment,
|
||||||
];
|
];
|
||||||
|
|
||||||
let key_pair = KeyPair::generate_for(&rcgen::PKCS_ECDSA_P384_SHA384)
|
let key_pair = KeyPair::generate_for(&rcgen::PKCS_ECDSA_P384_SHA384)?;
|
||||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
let certificate = CertificateParams::signed_by(params, &key_pair, &root_ca, &root_key_pair)?;
|
||||||
let certificate = CertificateParams::signed_by(params, &key_pair, &root_ca, &root_key_pair)
|
|
||||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
|
||||||
|
|
||||||
// Returns certificate, then private key
|
// Returns certificate, then private key
|
||||||
Ok(vec![certificate.pem(), key_pair.serialize_pem()])
|
Ok(vec![certificate.pem(), key_pair.serialize_pem()])
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn verify_client_certificate(client_cert: String, root_ca: String) -> Result<bool, Error> {
|
pub fn verify_client_certificate(client_cert: String, root_ca: String) -> anyhow::Result<bool> {
|
||||||
let root_ca = Pem::iter_from_buffer(root_ca.as_bytes())
|
let root_ca = Pem::iter_from_buffer(root_ca.as_bytes())
|
||||||
.next()
|
.next()
|
||||||
.unwrap()
|
.ok_or(anyhow!("no certificates in root ca"))??;
|
||||||
.unwrap();
|
let root_ca = root_ca.parse_x509()?;
|
||||||
let root_ca = root_ca.parse_x509().unwrap();
|
|
||||||
|
|
||||||
let client_cert = Pem::iter_from_buffer(client_cert.as_bytes())
|
let client_cert = Pem::iter_from_buffer(client_cert.as_bytes())
|
||||||
.next()
|
.next()
|
||||||
.unwrap()
|
.ok_or(anyhow!("No client certs in chain."))??;
|
||||||
.unwrap();
|
let client_cert = client_cert.parse_x509()?;
|
||||||
let client_cert = client_cert.parse_x509().unwrap();
|
|
||||||
|
|
||||||
let valid = root_ca
|
let valid = root_ca
|
||||||
.verify_signature(Some(client_cert.public_key()))
|
.verify_signature(Some(client_cert.public_key()))
|
||||||
@ -97,31 +89,33 @@ pub fn verify_client_certificate(client_cert: String, root_ca: String) -> Result
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn sign_nonce(private_key: String, nonce: String) -> Result<String, Error> {
|
pub fn sign_nonce(private_key: String, nonce: String) -> anyhow::Result<String> {
|
||||||
let rng = SystemRandom::new();
|
let rng = SystemRandom::new();
|
||||||
|
|
||||||
let key_pair = KeyPair::from_pem(&private_key).unwrap();
|
let key_pair = KeyPair::from_pem(&private_key)?;
|
||||||
|
|
||||||
let key_pair = EcdsaKeyPair::from_pkcs8(
|
let key_pair = EcdsaKeyPair::from_pkcs8(
|
||||||
&ring::signature::ECDSA_P384_SHA384_FIXED_SIGNING,
|
&ring::signature::ECDSA_P384_SHA384_FIXED_SIGNING,
|
||||||
&key_pair.serialize_der(),
|
&key_pair.serialize_der(),
|
||||||
&rng,
|
&rng,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
|
||||||
let signature = key_pair.sign(&rng, nonce.as_bytes()).unwrap();
|
let signature = key_pair
|
||||||
|
.sign(&rng, nonce.as_bytes())
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
let hex_signature = hex::encode(signature);
|
let hex_signature = hex::encode(signature);
|
||||||
|
|
||||||
Ok(hex_signature)
|
Ok(hex_signature)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn verify_nonce(public_cert: String, nonce: String, signature: String) -> Result<bool, Error> {
|
pub fn verify_nonce(public_cert: String, nonce: String, signature: String) -> anyhow::Result<bool> {
|
||||||
let (_, pem) = x509_parser::pem::parse_x509_pem(public_cert.as_bytes()).unwrap();
|
let (_, pem) = x509_parser::pem::parse_x509_pem(public_cert.as_bytes())?;
|
||||||
let (_, spki) = parse_x509_certificate(&pem.contents).unwrap();
|
let (_, spki) = parse_x509_certificate(&pem.contents)?;
|
||||||
let public_key = SubjectPublicKeyInfo::from_der(spki.public_key().raw).unwrap();
|
let public_key = SubjectPublicKeyInfo::from_der(spki.public_key().raw)?;
|
||||||
|
|
||||||
let raw_signature = hex::decode(signature).unwrap();
|
let raw_signature = hex::decode(signature)?;
|
||||||
|
|
||||||
let valid = ring::signature::ECDSA_P384_SHA384_FIXED
|
let valid = ring::signature::ECDSA_P384_SHA384_FIXED
|
||||||
.verify(
|
.verify(
|
||||||
|
|||||||
@ -1,31 +1,32 @@
|
|||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
use std::os::unix::fs::PermissionsExt;
|
use std::os::unix::fs::PermissionsExt;
|
||||||
use std::{
|
use std::{
|
||||||
|
cell::LazyCell,
|
||||||
fs::{self, metadata, File},
|
fs::{self, metadata, File},
|
||||||
io::{self, Read, Seek, SeekFrom, Sink},
|
io::{self, BufRead, BufReader, Read, Seek, SeekFrom, Sink},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::Arc,
|
process::{Child, ChildStdout, Command, Stdio},
|
||||||
|
sync::{Arc, LazyLock},
|
||||||
};
|
};
|
||||||
|
|
||||||
use flate2::read::DeflateDecoder;
|
use anyhow::anyhow;
|
||||||
use rawzip::{
|
|
||||||
FileReader, ZipArchive, ZipArchiveEntryWayfinder, ZipEntry, ZipReader, RECOMMENDED_BUFFER_SIZE,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::version::types::{MinimumFileObject, VersionBackend, VersionFile};
|
use crate::version::types::{MinimumFileObject, VersionBackend, VersionFile};
|
||||||
|
|
||||||
pub fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) {
|
pub fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) -> napi::Result<()> {
|
||||||
if metadata(path).unwrap().is_dir() {
|
if metadata(path)?.is_dir() {
|
||||||
let paths = fs::read_dir(path).unwrap();
|
let paths = fs::read_dir(path)?;
|
||||||
for path_result in paths {
|
for path_result in paths {
|
||||||
let full_path = path_result.unwrap().path();
|
let full_path = path_result?.path();
|
||||||
if metadata(&full_path).unwrap().is_dir() {
|
if metadata(&full_path)?.is_dir() {
|
||||||
_list_files(vec, &full_path);
|
_list_files(vec, &full_path)?;
|
||||||
} else {
|
} else {
|
||||||
vec.push(full_path);
|
vec.push(full_path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -33,23 +34,26 @@ pub struct PathVersionBackend {
|
|||||||
pub base_dir: PathBuf,
|
pub base_dir: PathBuf,
|
||||||
}
|
}
|
||||||
impl VersionBackend for PathVersionBackend {
|
impl VersionBackend for PathVersionBackend {
|
||||||
fn list_files(&mut self) -> Vec<VersionFile> {
|
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>> {
|
||||||
let mut vec = Vec::new();
|
let mut vec = Vec::new();
|
||||||
_list_files(&mut vec, &self.base_dir);
|
_list_files(&mut vec, &self.base_dir)?;
|
||||||
|
|
||||||
let mut results = Vec::new();
|
let mut results = Vec::new();
|
||||||
|
|
||||||
for pathbuf in vec.iter() {
|
for pathbuf in vec.iter() {
|
||||||
let relative = pathbuf.strip_prefix(self.base_dir.clone()).unwrap();
|
let relative = pathbuf.strip_prefix(self.base_dir.clone())?;
|
||||||
|
|
||||||
results.push(
|
results.push(
|
||||||
self
|
self.peek_file(
|
||||||
.peek_file(relative.to_str().unwrap().to_owned())
|
relative
|
||||||
.unwrap(),
|
.to_str()
|
||||||
|
.ok_or(napi::Error::from_reason("Could not parse path"))?
|
||||||
|
.to_owned(),
|
||||||
|
)?,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
results
|
Ok(results)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reader(
|
fn reader(
|
||||||
@ -57,28 +61,28 @@ impl VersionBackend for PathVersionBackend {
|
|||||||
file: &VersionFile,
|
file: &VersionFile,
|
||||||
start: u64,
|
start: u64,
|
||||||
end: u64,
|
end: u64,
|
||||||
) -> Option<Box<dyn MinimumFileObject + 'static>> {
|
) -> anyhow::Result<Box<dyn MinimumFileObject + 'static>> {
|
||||||
let mut file = File::open(self.base_dir.join(file.relative_filename.clone())).ok()?;
|
let mut file = File::open(self.base_dir.join(file.relative_filename.clone()))?;
|
||||||
|
|
||||||
if start != 0 {
|
if start != 0 {
|
||||||
file.seek(SeekFrom::Start(start)).ok()?;
|
file.seek(SeekFrom::Start(start))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if end != 0 {
|
if end != 0 {
|
||||||
return Some(Box::new(file.take(end - start)));
|
return Ok(Box::new(file.take(end - start)));
|
||||||
}
|
}
|
||||||
|
|
||||||
return Some(Box::new(file));
|
Ok(Box::new(file))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile> {
|
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile> {
|
||||||
let pathbuf = self.base_dir.join(sub_path.clone());
|
let pathbuf = self.base_dir.join(sub_path.clone());
|
||||||
if !pathbuf.exists() {
|
if !pathbuf.exists() {
|
||||||
return None;
|
return Err(anyhow!("Path doesn't exist."));
|
||||||
};
|
};
|
||||||
|
|
||||||
let file = File::open(pathbuf.clone()).unwrap();
|
let file = File::open(pathbuf.clone())?;
|
||||||
let metadata = file.try_clone().unwrap().metadata().unwrap();
|
let metadata = file.try_clone()?.metadata()?;
|
||||||
let permission_object = metadata.permissions();
|
let permission_object = metadata.permissions();
|
||||||
let permissions = {
|
let permissions = {
|
||||||
let perm: u32;
|
let perm: u32;
|
||||||
@ -93,112 +97,95 @@ impl VersionBackend for PathVersionBackend {
|
|||||||
perm
|
perm
|
||||||
};
|
};
|
||||||
|
|
||||||
Some(VersionFile {
|
Ok(VersionFile {
|
||||||
relative_filename: sub_path,
|
relative_filename: sub_path,
|
||||||
permission: permissions,
|
permission: permissions,
|
||||||
size: metadata.len(),
|
size: metadata.len(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn require_whole_files(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub static SEVEN_ZIP_INSTALLED: LazyLock<bool> =
|
||||||
|
LazyLock::new(|| Command::new("7z").output().is_ok());
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct ZipVersionBackend {
|
pub struct ZipVersionBackend {
|
||||||
archive: Arc<ZipArchive<FileReader>>,
|
path: String,
|
||||||
}
|
}
|
||||||
impl ZipVersionBackend {
|
impl ZipVersionBackend {
|
||||||
pub fn new(archive: File) -> Self {
|
pub fn new(path: PathBuf) -> anyhow::Result<Self> {
|
||||||
let archive = ZipArchive::from_file(archive, &mut [0u8; RECOMMENDED_BUFFER_SIZE]).unwrap();
|
Ok(Self {
|
||||||
Self {
|
path: path.to_str().expect("invalid utf path").to_owned(),
|
||||||
archive: Arc::new(archive),
|
})
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new_entry<'archive>(
|
|
||||||
&self,
|
|
||||||
entry: ZipEntry<'archive, FileReader>,
|
|
||||||
start: u64,
|
|
||||||
end: u64,
|
|
||||||
) -> ZipFileWrapper<'archive> {
|
|
||||||
let mut deflater = DeflateDecoder::new(entry.reader());
|
|
||||||
if start != 0 {
|
|
||||||
io::copy(&mut (&mut deflater).take(start), &mut Sink::default()).unwrap();
|
|
||||||
}
|
|
||||||
ZipFileWrapper {
|
|
||||||
reader: deflater,
|
|
||||||
limit: (end - start) as usize,
|
|
||||||
current: 0,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ZipFileWrapper<'archive> {
|
pub struct ZipFileWrapper {
|
||||||
reader: DeflateDecoder<ZipReader<'archive, FileReader>>,
|
command: Child,
|
||||||
limit: usize,
|
reader: BufReader<ChildStdout>
|
||||||
current: usize,
|
}
|
||||||
|
|
||||||
|
impl ZipFileWrapper {
|
||||||
|
pub fn new(mut command: Child) -> Self {
|
||||||
|
let stdout = command.stdout.take().expect("failed to access stdout of 7z");
|
||||||
|
let reader = BufReader::new(stdout);
|
||||||
|
ZipFileWrapper { command, reader }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This read implemention is a result of debugging hell
|
* This read implemention is a result of debugging hell
|
||||||
* It should probably be replaced with a .take() call.
|
* It should probably be replaced with a .take() call.
|
||||||
*/
|
*/
|
||||||
impl<'a> Read for ZipFileWrapper<'a> {
|
impl Read for ZipFileWrapper {
|
||||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||||
let has_limit = self.limit != 0;
|
self.reader.read(buf)
|
||||||
|
|
||||||
// End this stream if the read is the right size
|
|
||||||
if has_limit {
|
|
||||||
if self.current >= self.limit {
|
|
||||||
return Ok(0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let read = self.reader.read(buf)?;
|
|
||||||
if self.limit != 0 {
|
|
||||||
self.current += read;
|
|
||||||
if self.current > self.limit {
|
|
||||||
let over = self.current - self.limit;
|
|
||||||
return Ok(read - over);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return Ok(read);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ZipVersionBackend {
|
impl Drop for ZipFileWrapper {
|
||||||
fn find_wayfinder(&mut self, filename: &str) -> Option<ZipArchiveEntryWayfinder> {
|
fn drop(&mut self) {
|
||||||
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
|
self.command.wait().expect("failed to wait for 7z exit");
|
||||||
let mut entries = self.archive.entries(read_buffer);
|
}
|
||||||
let entry = loop {
|
|
||||||
if let Some(v) = entries.next_entry().unwrap() {
|
|
||||||
if v.file_path().try_normalize().unwrap().as_ref() == filename {
|
|
||||||
break Some(v);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
break None;
|
|
||||||
}
|
|
||||||
}?;
|
|
||||||
|
|
||||||
let wayfinder = entry.wayfinder();
|
|
||||||
|
|
||||||
Some(wayfinder)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl VersionBackend for ZipVersionBackend {
|
impl VersionBackend for ZipVersionBackend {
|
||||||
fn list_files(&mut self) -> Vec<VersionFile> {
|
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>> {
|
||||||
|
let mut list_command = Command::new("7z");
|
||||||
|
list_command.args(vec!["l", "-ba", &self.path]);
|
||||||
|
let result = list_command.output()?;
|
||||||
|
if !result.status.success() {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"failed to list files: code {:?}",
|
||||||
|
result.status.code()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
let raw_result = String::from_utf8(result.stdout)?;
|
||||||
|
let files = raw_result.split("\n").filter(|v| v.len() > 0).map(|v| v.split(" ").filter(|v| v.len() > 0));
|
||||||
let mut results = Vec::new();
|
let mut results = Vec::new();
|
||||||
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
|
|
||||||
let mut budget_iterator = self.archive.entries(read_buffer);
|
for mut file in files {
|
||||||
while let Some(entry) = budget_iterator.next_entry().unwrap() {
|
let (date, time, attrs, size, compress, name) = (
|
||||||
if entry.is_dir() {
|
file.next().unwrap(),
|
||||||
continue;
|
file.next().unwrap(),
|
||||||
}
|
file.next().unwrap(),
|
||||||
|
file.next().unwrap(),
|
||||||
|
file.next().unwrap(),
|
||||||
|
file.next().unwrap(),
|
||||||
|
);
|
||||||
|
println!("got line: {} {} {} {} {} {}", date, time, attrs, size, compress, name);
|
||||||
results.push(VersionFile {
|
results.push(VersionFile {
|
||||||
relative_filename: String::from(entry.file_path().try_normalize().unwrap()),
|
relative_filename: name.to_owned(),
|
||||||
permission: entry.mode().permissions(),
|
permission: 0,
|
||||||
size: entry.uncompressed_size_hint(),
|
size: size.parse().unwrap(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
results
|
|
||||||
|
Ok(results)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reader(
|
fn reader(
|
||||||
@ -206,22 +193,24 @@ impl VersionBackend for ZipVersionBackend {
|
|||||||
file: &VersionFile,
|
file: &VersionFile,
|
||||||
start: u64,
|
start: u64,
|
||||||
end: u64,
|
end: u64,
|
||||||
) -> Option<Box<dyn MinimumFileObject + '_>> {
|
) -> anyhow::Result<Box<dyn MinimumFileObject + '_>> {
|
||||||
let wayfinder = self.find_wayfinder(&file.relative_filename)?;
|
let mut read_command = Command::new("7z");
|
||||||
let local_entry = self.archive.get_entry(wayfinder).unwrap();
|
read_command.args(vec!["e", "-so", &self.path, &file.relative_filename]);
|
||||||
|
let output = read_command.stdout(Stdio::piped()).spawn().expect("failed to spawn 7z");
|
||||||
let wrapper = self.new_entry(local_entry, start, end);
|
Ok(Box::new(ZipFileWrapper::new(output)))
|
||||||
|
|
||||||
Some(Box::new(wrapper))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile> {
|
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile> {
|
||||||
let entry = self.find_wayfinder(&sub_path)?;
|
let files = self.list_files()?;
|
||||||
|
let file = files
|
||||||
|
.iter()
|
||||||
|
.find(|v| v.relative_filename == sub_path)
|
||||||
|
.expect("file not found");
|
||||||
|
|
||||||
Some(VersionFile {
|
Ok(file.clone())
|
||||||
relative_filename: sub_path,
|
}
|
||||||
permission: 0,
|
|
||||||
size: entry.uncompressed_size_hint(),
|
fn require_whole_files(&self) -> bool {
|
||||||
})
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,4 @@
|
|||||||
use std::{
|
use std::{fmt::Debug, io::Read};
|
||||||
fmt::Debug, io::Read
|
|
||||||
};
|
|
||||||
|
|
||||||
use dyn_clone::DynClone;
|
use dyn_clone::DynClone;
|
||||||
use tokio::io::{self, AsyncRead};
|
use tokio::io::{self, AsyncRead};
|
||||||
@ -12,7 +10,7 @@ pub struct VersionFile {
|
|||||||
pub size: u64,
|
pub size: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait MinimumFileObject: Read + Send {}
|
pub trait MinimumFileObject: Read + Send {}
|
||||||
impl<T: Read + Send> MinimumFileObject for T {}
|
impl<T: Read + Send> MinimumFileObject for T {}
|
||||||
|
|
||||||
// Intentionally not a generic, because of types in read_file
|
// Intentionally not a generic, because of types in read_file
|
||||||
@ -30,16 +28,28 @@ impl<'a> AsyncRead for ReadToAsyncRead<'a> {
|
|||||||
) -> std::task::Poll<io::Result<()>> {
|
) -> std::task::Poll<io::Result<()>> {
|
||||||
let mut read_buf = [0u8; ASYNC_READ_BUFFER_SIZE];
|
let mut read_buf = [0u8; ASYNC_READ_BUFFER_SIZE];
|
||||||
let read_size = ASYNC_READ_BUFFER_SIZE.min(buf.remaining());
|
let read_size = ASYNC_READ_BUFFER_SIZE.min(buf.remaining());
|
||||||
let read = self.inner.read(&mut read_buf[0..read_size]).unwrap();
|
match self.inner.read(&mut read_buf[0..read_size]) {
|
||||||
buf.put_slice(&read_buf[0..read]);
|
Ok(read) => {
|
||||||
std::task::Poll::Ready(Ok(()))
|
buf.put_slice(&read_buf[0..read]);
|
||||||
|
std::task::Poll::Ready(Ok(()))
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
std::task::Poll::Ready(Err(err))
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait VersionBackend: DynClone {
|
pub trait VersionBackend: DynClone {
|
||||||
fn list_files(&mut self) -> Vec<VersionFile>;
|
fn require_whole_files(&self) -> bool;
|
||||||
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile>;
|
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>>;
|
||||||
fn reader(&mut self, file: &VersionFile, start: u64, end: u64) -> Option<Box<dyn MinimumFileObject + '_>>;
|
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile>;
|
||||||
|
fn reader(
|
||||||
|
&mut self,
|
||||||
|
file: &VersionFile,
|
||||||
|
start: u64,
|
||||||
|
end: u64,
|
||||||
|
) -> anyhow::Result<Box<dyn MinimumFileObject + '_>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
dyn_clone::clone_trait_object!(VersionBackend);
|
dyn_clone::clone_trait_object!(VersionBackend);
|
||||||
|
|||||||
@ -1,12 +1,16 @@
|
|||||||
use std::{
|
use std::{
|
||||||
collections::HashMap, fs::File, path::Path
|
collections::HashMap,
|
||||||
|
fs::File,
|
||||||
|
path::Path,
|
||||||
|
process::{Command, ExitStatus},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use anyhow::anyhow;
|
||||||
use napi::{bindgen_prelude::*, sys::napi_value__, tokio_stream::StreamExt};
|
use napi::{bindgen_prelude::*, sys::napi_value__, tokio_stream::StreamExt};
|
||||||
use tokio_util::codec::{BytesCodec, FramedRead};
|
use tokio_util::codec::{BytesCodec, FramedRead};
|
||||||
|
|
||||||
use crate::version::{
|
use crate::version::{
|
||||||
backends::{PathVersionBackend, ZipVersionBackend},
|
backends::{PathVersionBackend, ZipVersionBackend, SEVEN_ZIP_INSTALLED},
|
||||||
types::{ReadToAsyncRead, VersionBackend, VersionFile},
|
types::{ReadToAsyncRead, VersionBackend, VersionFile},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -15,7 +19,7 @@ use crate::version::{
|
|||||||
*/
|
*/
|
||||||
pub fn create_backend_constructor<'a>(
|
pub fn create_backend_constructor<'a>(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
) -> Option<Box<dyn FnOnce() -> Box<dyn VersionBackend + Send + 'a>>> {
|
) -> Option<Box<dyn FnOnce() -> Result<Box<dyn VersionBackend + Send + 'a>>>> {
|
||||||
if !path.exists() {
|
if !path.exists() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
@ -23,12 +27,21 @@ pub fn create_backend_constructor<'a>(
|
|||||||
let is_directory = path.is_dir();
|
let is_directory = path.is_dir();
|
||||||
if is_directory {
|
if is_directory {
|
||||||
let base_dir = path.to_path_buf();
|
let base_dir = path.to_path_buf();
|
||||||
return Some(Box::new(move || Box::new(PathVersionBackend { base_dir })));
|
return Some(Box::new(move || {
|
||||||
|
Ok(Box::new(PathVersionBackend { base_dir }))
|
||||||
|
}));
|
||||||
};
|
};
|
||||||
|
|
||||||
if path.to_string_lossy().ends_with(".zip") {
|
if *SEVEN_ZIP_INSTALLED {
|
||||||
let f = File::open(path.to_path_buf()).unwrap();
|
let mut test = Command::new("7z");
|
||||||
return Some(Box::new(|| Box::new(ZipVersionBackend::new(f))));
|
test.args(vec!["t", path.to_str().expect("invalid utf path")]);
|
||||||
|
let status = test.status().ok()?;
|
||||||
|
if status.code().unwrap_or(1) == 0 {
|
||||||
|
let buf = path.to_path_buf();
|
||||||
|
return Some(Box::new(move || {
|
||||||
|
Ok(Box::new(ZipVersionBackend::new(buf)?))
|
||||||
|
}));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
None
|
None
|
||||||
@ -58,10 +71,13 @@ impl<'a> DropletHandler<'a> {
|
|||||||
let fs_path = Path::new(&path);
|
let fs_path = Path::new(&path);
|
||||||
let constructor = create_backend_constructor(fs_path)?;
|
let constructor = create_backend_constructor(fs_path)?;
|
||||||
|
|
||||||
let existing_backend = self.backend_cache.entry(path).or_insert_with(|| {
|
let existing_backend = match self.backend_cache.entry(path) {
|
||||||
let backend = constructor();
|
std::collections::hash_map::Entry::Occupied(occupied_entry) => occupied_entry.into_mut(),
|
||||||
backend
|
std::collections::hash_map::Entry::Vacant(vacant_entry) => {
|
||||||
});
|
let backend = constructor().ok()?;
|
||||||
|
vacant_entry.insert(backend)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
Some(existing_backend)
|
Some(existing_backend)
|
||||||
}
|
}
|
||||||
@ -80,7 +96,7 @@ impl<'a> DropletHandler<'a> {
|
|||||||
let backend = self
|
let backend = self
|
||||||
.create_backend_for_path(path)
|
.create_backend_for_path(path)
|
||||||
.ok_or(napi::Error::from_reason("No backend for path"))?;
|
.ok_or(napi::Error::from_reason("No backend for path"))?;
|
||||||
let files = backend.list_files();
|
let files = backend.list_files()?;
|
||||||
Ok(files.into_iter().map(|e| e.relative_filename).collect())
|
Ok(files.into_iter().map(|e| e.relative_filename).collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -90,11 +106,9 @@ impl<'a> DropletHandler<'a> {
|
|||||||
.create_backend_for_path(path)
|
.create_backend_for_path(path)
|
||||||
.ok_or(napi::Error::from_reason("No backend for path"))?;
|
.ok_or(napi::Error::from_reason("No backend for path"))?;
|
||||||
|
|
||||||
let file = backend
|
let file = backend.peek_file(sub_path)?;
|
||||||
.peek_file(sub_path)
|
|
||||||
.ok_or(napi::Error::from_reason("Can't find file to peek"))?;
|
|
||||||
|
|
||||||
return Ok(file.size.try_into().unwrap());
|
Ok(file.size)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi]
|
||||||
@ -106,28 +120,24 @@ impl<'a> DropletHandler<'a> {
|
|||||||
env: Env,
|
env: Env,
|
||||||
start: Option<BigInt>,
|
start: Option<BigInt>,
|
||||||
end: Option<BigInt>,
|
end: Option<BigInt>,
|
||||||
) -> Result<JsDropStreamable> {
|
) -> anyhow::Result<JsDropStreamable> {
|
||||||
let stream = reference.share_with(env, |handler| {
|
let stream = reference.share_with(env, |handler| {
|
||||||
let backend = handler
|
let backend = handler
|
||||||
.create_backend_for_path(path)
|
.create_backend_for_path(path)
|
||||||
.ok_or(napi::Error::from_reason("Failed to create backend."))?;
|
.ok_or(anyhow!("Failed to create backend."))?;
|
||||||
let version_file = VersionFile {
|
let version_file = VersionFile {
|
||||||
relative_filename: sub_path,
|
relative_filename: sub_path,
|
||||||
permission: 0, // Shouldn't matter
|
permission: 0, // Shouldn't matter
|
||||||
size: 0, // Shouldn't matter
|
size: 0, // Shouldn't matter
|
||||||
};
|
};
|
||||||
// Use `?` operator for cleaner error propagation from `Option`
|
// Use `?` operator for cleaner error propagation from `Option`
|
||||||
let reader = backend
|
let reader = backend.reader(
|
||||||
.reader(
|
&version_file,
|
||||||
&version_file,
|
start.map(|e| e.get_u64().1).unwrap_or(0),
|
||||||
start.map(|e| e.get_u64().1).unwrap_or(0),
|
end.map(|e| e.get_u64().1).unwrap_or(0),
|
||||||
end.map(|e| e.get_u64().1).unwrap_or(0),
|
)?;
|
||||||
)
|
|
||||||
.ok_or(napi::Error::from_reason("Failed to create reader."))?;
|
|
||||||
|
|
||||||
let async_reader = ReadToAsyncRead {
|
let async_reader = ReadToAsyncRead { inner: reader };
|
||||||
inner: reader,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create a FramedRead stream with BytesCodec for chunking
|
// Create a FramedRead stream with BytesCodec for chunking
|
||||||
let stream = FramedRead::new(async_reader, BytesCodec::new())
|
let stream = FramedRead::new(async_reader, BytesCodec::new())
|
||||||
@ -137,12 +147,12 @@ impl<'a> DropletHandler<'a> {
|
|||||||
// Apply Result::map to transform Ok(BytesMut) to Ok(Vec<u8>)
|
// Apply Result::map to transform Ok(BytesMut) to Ok(Vec<u8>)
|
||||||
.map(|bytes| bytes.to_vec())
|
.map(|bytes| bytes.to_vec())
|
||||||
// Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error)
|
// Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error)
|
||||||
.map_err(|e| napi::Error::from(e)) // napi::Error implements From<tokio::io::Error>
|
.map_err(napi::Error::from) // napi::Error implements From<tokio::io::Error>
|
||||||
});
|
});
|
||||||
// Create the napi-rs ReadableStream from the tokio_stream::Stream
|
// Create the napi-rs ReadableStream from the tokio_stream::Stream
|
||||||
// The unwrap() here means if stream creation fails, it will panic.
|
// The unwrap() here means if stream creation fails, it will panic.
|
||||||
// For a production system, consider returning Result<Option<...>> and handling this.
|
// For a production system, consider returning Result<Option<...>> and handling this.
|
||||||
Ok(ReadableStream::create_with_stream_bytes(&env, stream).unwrap())
|
ReadableStream::create_with_stream_bytes(&env, stream)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok(JsDropStreamable { inner: stream })
|
Ok(JsDropStreamable { inner: stream })
|
||||||
|
|||||||
Reference in New Issue
Block a user