mirror of
https://github.com/Drop-OSS/droplet.git
synced 2025-11-12 15:52:47 +10:00
Compare commits
47 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 96c1b15de7 | |||
| bd6d7060fd | |||
| 0431eebaa7 | |||
| e66a6581cb | |||
| 817c3cf503 | |||
| 0d01809fd0 | |||
| ba35ca9a14 | |||
| ae4648845e | |||
| bd30464a08 | |||
| c67cca4ee0 | |||
| cae208a3e0 | |||
| 4276b9d668 | |||
| 4fb9bb7563 | |||
| 913dc2f58d | |||
| 7ec5e9f215 | |||
| b67a67d809 | |||
| 87b19a5c8c | |||
| dc3a420986 | |||
| 1665033fd9 | |||
| 2969d64c45 | |||
| e525ff44bb | |||
| 52a685391a | |||
| 535d5a4062 | |||
| 450734f5c9 | |||
| 20e2eda381 | |||
| 04d3f2dd8c | |||
| 59ca57ee1b | |||
| 8f4b2a6c6d | |||
| 7c3e6fe63c | |||
| 204902951e | |||
| b3011c517d | |||
| 74a54eb9ac | |||
| 89e94e3afd | |||
| 169d471bb7 | |||
| 076dc60155 | |||
| 48e5b97a4e | |||
| c1aaf8adcd | |||
| fe43f79062 | |||
| 30b9c4a1cc | |||
| 42f770aed9 | |||
| 4670df4127 | |||
| e33eaebe1a | |||
| f954f23410 | |||
| 3632687001 | |||
| 90817487ed | |||
| 98b84c64d4 | |||
| d3186cdd5f |
34
.github/workflows/CI.yml
vendored
34
.github/workflows/CI.yml
vendored
@ -12,12 +12,9 @@ permissions:
|
|||||||
- main
|
- main
|
||||||
tags-ignore:
|
tags-ignore:
|
||||||
- "**"
|
- "**"
|
||||||
paths-ignore:
|
paths:
|
||||||
- "**/*.md"
|
- package.json
|
||||||
- LICENSE
|
- .github/workflows/*
|
||||||
- "**/*.gitignore"
|
|
||||||
- .editorconfig
|
|
||||||
- docs/**
|
|
||||||
pull_request: null
|
pull_request: null
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
@ -46,15 +43,6 @@ jobs:
|
|||||||
target: aarch64-unknown-linux-gnu
|
target: aarch64-unknown-linux-gnu
|
||||||
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
|
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
|
||||||
build: yarn build --target aarch64-unknown-linux-gnu
|
build: yarn build --target aarch64-unknown-linux-gnu
|
||||||
- host: ubuntu-latest
|
|
||||||
target: armv7-unknown-linux-gnueabihf
|
|
||||||
setup: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install gcc-arm-linux-gnueabihf -y
|
|
||||||
build: yarn build --target armv7-unknown-linux-gnueabihf
|
|
||||||
- host: ubuntu-latest
|
|
||||||
target: armv7-unknown-linux-musleabihf
|
|
||||||
build: yarn build --target armv7-unknown-linux-musleabihf
|
|
||||||
- host: ubuntu-latest
|
- host: ubuntu-latest
|
||||||
target: aarch64-unknown-linux-musl
|
target: aarch64-unknown-linux-musl
|
||||||
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine
|
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine
|
||||||
@ -71,7 +59,7 @@ jobs:
|
|||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install gcc-riscv64-linux-gnu -y
|
sudo apt-get install gcc-riscv64-linux-gnu -y
|
||||||
build: yarn build --target riscv64gc-unknown-linux-gnu
|
build: yarn build --target riscv64gc-unknown-linux-gnu
|
||||||
name: stable - ${{ matrix.settings.target }} - node@20
|
name: nightly - ${{ matrix.settings.target }} - node@20
|
||||||
runs-on: ${{ matrix.settings.host }}
|
runs-on: ${{ matrix.settings.host }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@ -82,7 +70,7 @@ jobs:
|
|||||||
node-version: 20
|
node-version: 20
|
||||||
cache: yarn
|
cache: yarn
|
||||||
- name: Install
|
- name: Install
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@nightly
|
||||||
if: ${{ !matrix.settings.docker }}
|
if: ${{ !matrix.settings.docker }}
|
||||||
with:
|
with:
|
||||||
toolchain: nightly
|
toolchain: nightly
|
||||||
@ -106,14 +94,20 @@ jobs:
|
|||||||
if: ${{ matrix.settings.setup }}
|
if: ${{ matrix.settings.setup }}
|
||||||
shell: bash
|
shell: bash
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: yarn install
|
run: |-
|
||||||
|
cargo install patch-crate &&
|
||||||
|
cargo patch-crate &&
|
||||||
|
yarn install
|
||||||
- name: Build in docker
|
- name: Build in docker
|
||||||
uses: addnab/docker-run-action@v3
|
uses: addnab/docker-run-action@v3
|
||||||
if: ${{ matrix.settings.docker }}
|
if: ${{ matrix.settings.docker }}
|
||||||
with:
|
with:
|
||||||
image: ${{ matrix.settings.docker }}
|
image: ${{ matrix.settings.docker }}
|
||||||
options: "--user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build"
|
options: "--user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build"
|
||||||
run: ${{ matrix.settings.build }}
|
run: |-
|
||||||
|
rustup default nightly &&
|
||||||
|
rustup target add ${{ matrix.settings.target }} &&
|
||||||
|
${{ matrix.settings.build }}
|
||||||
- name: Build
|
- name: Build
|
||||||
run: ${{ matrix.settings.build }}
|
run: ${{ matrix.settings.build }}
|
||||||
if: ${{ !matrix.settings.docker }}
|
if: ${{ !matrix.settings.docker }}
|
||||||
@ -358,6 +352,8 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: bindings-aarch64-apple-darwin
|
name: bindings-aarch64-apple-darwin
|
||||||
path: artifacts
|
path: artifacts
|
||||||
|
- name: Move artifacts
|
||||||
|
run: mv artifacts/* .
|
||||||
- name: Combine binaries
|
- name: Combine binaries
|
||||||
run: yarn universal
|
run: yarn universal
|
||||||
- name: Upload artifact
|
- name: Upload artifact
|
||||||
|
|||||||
6
.gitignore
vendored
6
.gitignore
vendored
@ -186,7 +186,6 @@ $RECYCLE.BIN/
|
|||||||
#Added by cargo
|
#Added by cargo
|
||||||
|
|
||||||
/target
|
/target
|
||||||
Cargo.lock
|
|
||||||
|
|
||||||
.pnp.*
|
.pnp.*
|
||||||
.yarn/*
|
.yarn/*
|
||||||
@ -201,4 +200,7 @@ test.mjs
|
|||||||
manifest.json
|
manifest.json
|
||||||
|
|
||||||
# JetBrains
|
# JetBrains
|
||||||
.idea
|
.idea
|
||||||
|
|
||||||
|
assets/*
|
||||||
|
!assets/generate.sh
|
||||||
2103
Cargo.lock
generated
Normal file
2103
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
17
Cargo.toml
17
Cargo.toml
@ -9,21 +9,22 @@ crate-type = ["cdylib"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
|
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
|
||||||
napi = { version = "3.0.0-alpha.33", default-features = false, features = [
|
napi = { version = "3.0.0-beta.11", default-features = false, features = ["napi6", "async", "web_stream", "error_anyhow"] }
|
||||||
"napi4",
|
napi-derive = "3.0.0-beta.11"
|
||||||
"async",
|
|
||||||
"web_stream",
|
|
||||||
] }
|
|
||||||
napi-derive = "3.0.0-alpha.33"
|
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
serde_json = "1.0.128"
|
|
||||||
md5 = "0.7.0"
|
md5 = "0.7.0"
|
||||||
time-macros = "0.2.22"
|
time-macros = "0.2.22"
|
||||||
time = "0.3.41"
|
time = "0.3.41"
|
||||||
webpki = "0.22.4"
|
webpki = "0.22.4"
|
||||||
ring = "0.17.14"
|
ring = "0.17.14"
|
||||||
tokio = { version = "1.45.1", features = ["fs"] }
|
tokio = { version = "1.45.1", features = ["fs", "io-util"] }
|
||||||
tokio-util = { version = "0.7.15", features = ["codec"] }
|
tokio-util = { version = "0.7.15", features = ["codec"] }
|
||||||
|
dyn-clone = "1.0.20"
|
||||||
|
rhai = "1.22.2"
|
||||||
|
# mlua = { version = "0.11.2", features = ["luajit"] }
|
||||||
|
boa_engine = "0.20.0"
|
||||||
|
serde_json = "1.0.143"
|
||||||
|
anyhow = "1.0.99"
|
||||||
|
|
||||||
[dependencies.x509-parser]
|
[dependencies.x509-parser]
|
||||||
version = "0.17.0"
|
version = "0.17.0"
|
||||||
|
|||||||
22
__test__/debug.spec.mjs
Normal file
22
__test__/debug.spec.mjs
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import test from "ava";
|
||||||
|
import { DropletHandler, generateManifest } from "../index.js";
|
||||||
|
|
||||||
|
test.skip("debug", async (t) => {
|
||||||
|
const handler = new DropletHandler();
|
||||||
|
|
||||||
|
console.log("created handler");
|
||||||
|
|
||||||
|
const manifest = JSON.parse(
|
||||||
|
await new Promise((r, e) =>
|
||||||
|
generateManifest(
|
||||||
|
handler,
|
||||||
|
"./assets/TheGame.zip",
|
||||||
|
(_, __) => {},
|
||||||
|
(_, __) => {},
|
||||||
|
(err, manifest) => (err ? e(err) : r(manifest))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
return t.pass();
|
||||||
|
});
|
||||||
@ -2,7 +2,7 @@ import test from "ava";
|
|||||||
import fs from "node:fs";
|
import fs from "node:fs";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
|
||||||
import { generateManifest, listFiles } from "../index.js";
|
import { DropletHandler, generateManifest } from "../index.js";
|
||||||
|
|
||||||
test("numerous small file", async (t) => {
|
test("numerous small file", async (t) => {
|
||||||
// Setup test dir
|
// Setup test dir
|
||||||
@ -18,9 +18,12 @@ test("numerous small file", async (t) => {
|
|||||||
fs.writeFileSync(fileName, i.toString());
|
fs.writeFileSync(fileName, i.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const dropletHandler = new DropletHandler();
|
||||||
|
|
||||||
const manifest = JSON.parse(
|
const manifest = JSON.parse(
|
||||||
await new Promise((r, e) =>
|
await new Promise((r, e) =>
|
||||||
generateManifest(
|
generateManifest(
|
||||||
|
dropletHandler,
|
||||||
dirName,
|
dirName,
|
||||||
(_, __) => {},
|
(_, __) => {},
|
||||||
(_, __) => {},
|
(_, __) => {},
|
||||||
@ -51,5 +54,86 @@ test("numerous small file", async (t) => {
|
|||||||
t.is(entry.lengths[0], i.toString().length);
|
t.is(entry.lengths[0], i.toString().length);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fs.rmSync(dirName, { recursive: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
test.skip("performance test", async (t) => {
|
||||||
|
t.timeout(5 * 60 * 1000);
|
||||||
|
const dirName = "./.test/pt";
|
||||||
|
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
||||||
|
fs.mkdirSync(dirName, { recursive: true });
|
||||||
|
|
||||||
|
const fileSize = 1 * 1000 * 1000 * 1000; // 1GB
|
||||||
|
|
||||||
|
const randomStream = fs.createReadStream("/dev/random", {
|
||||||
|
start: 0,
|
||||||
|
end: fileSize,
|
||||||
|
});
|
||||||
|
const outputStream = fs.createWriteStream(path.join(dirName, "file.bin"));
|
||||||
|
await new Promise((r) => {
|
||||||
|
randomStream.pipe(outputStream);
|
||||||
|
randomStream.on("end", r);
|
||||||
|
});
|
||||||
|
|
||||||
|
const dropletHandler = new DropletHandler();
|
||||||
|
|
||||||
|
const start = Date.now();
|
||||||
|
await new Promise((r, e) =>
|
||||||
|
generateManifest(
|
||||||
|
dropletHandler,
|
||||||
|
dirName,
|
||||||
|
(_, __) => {},
|
||||||
|
(_, __) => {},
|
||||||
|
(err, manifest) => (err ? e(err) : r(manifest))
|
||||||
|
)
|
||||||
|
);
|
||||||
|
const end = Date.now();
|
||||||
|
|
||||||
|
t.pass(`Took ${end - start}ms to process ${fileSize / (1000 * 1000)}MB`);
|
||||||
|
|
||||||
|
fs.rmSync(dirName, { recursive: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
test("special characters", async (t) => {
|
||||||
|
// Setup test dir
|
||||||
|
const dirName = "./.test/sc";
|
||||||
|
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
||||||
|
fs.mkdirSync(dirName, { recursive: true });
|
||||||
|
|
||||||
|
// Config
|
||||||
|
const fileNames = ["Technická podpora.rtf", "Servicio técnico.rtf"];
|
||||||
|
|
||||||
|
for (let i = 0; i < fileNames.length; i++) {
|
||||||
|
const fileName = path.join(dirName, fileNames[i]);
|
||||||
|
fs.writeFileSync(fileName, i.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
const dropletHandler = new DropletHandler();
|
||||||
|
|
||||||
|
const manifest = JSON.parse(
|
||||||
|
await new Promise((r, e) =>
|
||||||
|
generateManifest(
|
||||||
|
dropletHandler,
|
||||||
|
dirName,
|
||||||
|
(_, __) => {},
|
||||||
|
(_, __) => {},
|
||||||
|
(err, manifest) => (err ? e(err) : r(manifest))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check the first few checksums
|
||||||
|
const checksums = [
|
||||||
|
"cfcd208495d565ef66e7dff9f98764da",
|
||||||
|
"c4ca4238a0b923820dcc509a6f75849b",
|
||||||
|
];
|
||||||
|
for (let index in checksums) {
|
||||||
|
const entry = manifest[fileNames[index]];
|
||||||
|
if (!entry) return t.fail(`manifest missing file ${index}`);
|
||||||
|
|
||||||
|
const checksum = entry.checksums[0];
|
||||||
|
t.is(checksum, checksums[index], `checksums do not match for ${index}`);
|
||||||
|
}
|
||||||
|
|
||||||
fs.rmSync(dirName, { recursive: true });
|
fs.rmSync(dirName, { recursive: true });
|
||||||
});
|
});
|
||||||
62
__test__/script.spec.mjs
Normal file
62
__test__/script.spec.mjs
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
import test from "ava";
|
||||||
|
import { ScriptEngine } from "../index.js";
|
||||||
|
|
||||||
|
test.skip("lua syntax fail", (t) => {
|
||||||
|
const scriptEngine = new ScriptEngine();
|
||||||
|
|
||||||
|
const luaIshCode = `
|
||||||
|
print("hello world);
|
||||||
|
`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const script = scriptEngine.buildLuaScript(luaIshCode);
|
||||||
|
} catch {
|
||||||
|
return t.pass();
|
||||||
|
}
|
||||||
|
t.fail();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("js syntax fail", (t) => {
|
||||||
|
const scriptEngine = new ScriptEngine();
|
||||||
|
|
||||||
|
const jsIshCode = `
|
||||||
|
const v = "hello world;
|
||||||
|
`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const script = scriptEngine.buildJsScript(jsIshCode);
|
||||||
|
} catch {
|
||||||
|
return t.pass();
|
||||||
|
}
|
||||||
|
t.fail();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("js", (t) => {
|
||||||
|
const scriptEngine = new ScriptEngine();
|
||||||
|
|
||||||
|
const jsModule = `
|
||||||
|
const v = "1" + "2";
|
||||||
|
["1", "2", "3", v]
|
||||||
|
`;
|
||||||
|
|
||||||
|
const script = scriptEngine.buildJsScript(jsModule);
|
||||||
|
|
||||||
|
scriptEngine.fetchStrings(script);
|
||||||
|
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
|
||||||
|
test.skip("lua", (t) => {
|
||||||
|
const scriptEngine = new ScriptEngine();
|
||||||
|
|
||||||
|
const luaModule = `
|
||||||
|
local arr = {"1", "2"};
|
||||||
|
return arr;
|
||||||
|
`;
|
||||||
|
|
||||||
|
const script = scriptEngine.buildLuaScript(luaModule);
|
||||||
|
|
||||||
|
scriptEngine.fetchStrings(script);
|
||||||
|
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
@ -1,8 +1,10 @@
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
import fs from "node:fs";
|
import fs from "node:fs";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
import { createHash } from "node:crypto";
|
||||||
|
import prettyBytes from "pretty-bytes";
|
||||||
|
|
||||||
import droplet from "../index.js";
|
import droplet, { DropletHandler, generateManifest } from "../index.js";
|
||||||
|
|
||||||
test("check alt thread util", async (t) => {
|
test("check alt thread util", async (t) => {
|
||||||
let endtime1, endtime2;
|
let endtime1, endtime2;
|
||||||
@ -23,20 +25,49 @@ test("check alt thread util", async (t) => {
|
|||||||
t.pass();
|
t.pass();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("list files", async (t) => {
|
||||||
|
const dirName = "./.listfiles";
|
||||||
|
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
||||||
|
fs.mkdirSync(dirName, { recursive: true });
|
||||||
|
fs.mkdirSync(dirName + "/subdir", { recursive: true });
|
||||||
|
fs.mkdirSync(dirName + "/subddir", { recursive: true });
|
||||||
|
|
||||||
|
fs.writeFileSync(dirName + "/root.txt", "root");
|
||||||
|
fs.writeFileSync(dirName + "/subdir/one.txt", "the first subdir");
|
||||||
|
fs.writeFileSync(dirName + "/subddir/two.txt", "the second");
|
||||||
|
|
||||||
|
const dropletHandler = new DropletHandler();
|
||||||
|
const files = dropletHandler.listFiles(dirName);
|
||||||
|
|
||||||
|
t.assert(
|
||||||
|
files.sort().join("\n"),
|
||||||
|
["root.txt", "subddir/two.txt", "subdir/one.txt"].join("\n")
|
||||||
|
);
|
||||||
|
|
||||||
|
fs.rmSync(dirName, { recursive: true });
|
||||||
|
});
|
||||||
|
|
||||||
test("read file", async (t) => {
|
test("read file", async (t) => {
|
||||||
const dirName = "./.test2";
|
const dirName = "./.test2";
|
||||||
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
||||||
fs.mkdirSync(dirName, { recursive: true });
|
fs.mkdirSync(dirName, { recursive: true });
|
||||||
|
|
||||||
const testString = "g'day what's up my koala bros\n".repeat(10000);
|
const testString = "g'day what's up my koala bros\n".repeat(1000);
|
||||||
|
|
||||||
fs.writeFileSync("./.test2/TESTFILE", testString);
|
fs.writeFileSync(dirName + "/TESTFILE", testString);
|
||||||
|
|
||||||
const stream = droplet.readFile("./.test2", "TESTFILE");
|
const dropletHandler = new DropletHandler();
|
||||||
|
|
||||||
|
const stream = dropletHandler.readFile(
|
||||||
|
dirName,
|
||||||
|
"TESTFILE",
|
||||||
|
BigInt(0),
|
||||||
|
BigInt(testString.length)
|
||||||
|
);
|
||||||
|
|
||||||
let finalString = "";
|
let finalString = "";
|
||||||
|
|
||||||
for await (const chunk of stream) {
|
for await (const chunk of stream.getStream()) {
|
||||||
// Do something with each 'chunk'
|
// Do something with each 'chunk'
|
||||||
finalString += String.fromCharCode.apply(null, chunk);
|
finalString += String.fromCharCode.apply(null, chunk);
|
||||||
}
|
}
|
||||||
@ -44,3 +75,146 @@ test("read file", async (t) => {
|
|||||||
t.assert(finalString == testString, "file strings don't match");
|
t.assert(finalString == testString, "file strings don't match");
|
||||||
fs.rmSync(dirName, { recursive: true });
|
fs.rmSync(dirName, { recursive: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("read file offset", async (t) => {
|
||||||
|
const dirName = "./.test3";
|
||||||
|
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
||||||
|
fs.mkdirSync(dirName, { recursive: true });
|
||||||
|
|
||||||
|
const testString = "0123456789";
|
||||||
|
fs.writeFileSync(dirName + "/TESTFILE", testString);
|
||||||
|
|
||||||
|
const dropletHandler = new DropletHandler();
|
||||||
|
const stream = dropletHandler.readFile(
|
||||||
|
dirName,
|
||||||
|
"TESTFILE",
|
||||||
|
BigInt(1),
|
||||||
|
BigInt(4)
|
||||||
|
);
|
||||||
|
|
||||||
|
let finalString = "";
|
||||||
|
|
||||||
|
for await (const chunk of stream.getStream()) {
|
||||||
|
// Do something with each 'chunk'
|
||||||
|
finalString += String.fromCharCode.apply(null, chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
const expectedString = testString.slice(1, 4);
|
||||||
|
|
||||||
|
t.assert(
|
||||||
|
finalString == expectedString,
|
||||||
|
`file strings don't match: ${finalString} vs ${expectedString}`
|
||||||
|
);
|
||||||
|
fs.rmSync(dirName, { recursive: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
test.skip("zip speed test", async (t) => {
|
||||||
|
t.timeout(100_000_000);
|
||||||
|
const dropletHandler = new DropletHandler();
|
||||||
|
|
||||||
|
const stream = dropletHandler.readFile("./assets/TheGame.zip", "setup.exe");
|
||||||
|
|
||||||
|
let totalRead = 0;
|
||||||
|
let totalSeconds = 0;
|
||||||
|
|
||||||
|
let lastTime = process.hrtime.bigint();
|
||||||
|
const timeThreshold = BigInt(1_000_000_000);
|
||||||
|
let runningTotal = 0;
|
||||||
|
let runningTime = BigInt(0);
|
||||||
|
for await (const chunk of stream.getStream()) {
|
||||||
|
// Do something with each 'chunk'
|
||||||
|
const currentTime = process.hrtime.bigint();
|
||||||
|
const timeDiff = currentTime - lastTime;
|
||||||
|
lastTime = currentTime;
|
||||||
|
runningTime += timeDiff;
|
||||||
|
|
||||||
|
runningTotal += chunk.length;
|
||||||
|
|
||||||
|
if (runningTime >= timeThreshold) {
|
||||||
|
console.log(`${prettyBytes(runningTotal)}/s`);
|
||||||
|
totalRead += runningTotal;
|
||||||
|
totalSeconds += 1;
|
||||||
|
runningTime = BigInt(0);
|
||||||
|
runningTotal = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const roughAverage = totalRead / totalSeconds;
|
||||||
|
|
||||||
|
console.log(`total rough average: ${prettyBytes(roughAverage)}/s`);
|
||||||
|
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
|
||||||
|
test.skip("zip manifest test", async (t) => {
|
||||||
|
const dropletHandler = new DropletHandler();
|
||||||
|
const manifest = JSON.parse(
|
||||||
|
await new Promise((r, e) =>
|
||||||
|
generateManifest(
|
||||||
|
dropletHandler,
|
||||||
|
"./assets/TheGame.zip",
|
||||||
|
(_, __) => {},
|
||||||
|
(_, __) => {},
|
||||||
|
(err, manifest) => (err ? e(err) : r(manifest))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
for (const [filename, data] of Object.entries(manifest)) {
|
||||||
|
let start = 0;
|
||||||
|
for (const [chunkIndex, length] of data.lengths.entries()) {
|
||||||
|
const hash = createHash("md5");
|
||||||
|
const stream = (
|
||||||
|
await dropletHandler.readFile(
|
||||||
|
"./assets/TheGame.zip",
|
||||||
|
filename,
|
||||||
|
BigInt(start),
|
||||||
|
BigInt(start + length)
|
||||||
|
)
|
||||||
|
).getStream();
|
||||||
|
|
||||||
|
let streamLength = 0;
|
||||||
|
await stream.pipeTo(
|
||||||
|
new WritableStream({
|
||||||
|
write(chunk) {
|
||||||
|
streamLength += chunk.length;
|
||||||
|
hash.update(chunk);
|
||||||
|
},
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
if (streamLength != length)
|
||||||
|
return t.fail(
|
||||||
|
`stream length for chunk index ${chunkIndex} was not expected: real: ${streamLength} vs expected: ${length}`
|
||||||
|
);
|
||||||
|
|
||||||
|
const digest = hash.digest("hex");
|
||||||
|
if (data.checksums[chunkIndex] != digest)
|
||||||
|
return t.fail(
|
||||||
|
`checksums did not match for chunk index ${chunkIndex}: real: ${digest} vs expected: ${data.checksums[chunkIndex]}`
|
||||||
|
);
|
||||||
|
|
||||||
|
start += length;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
|
||||||
|
test.skip("partially compress zip test", async (t) => {
|
||||||
|
const dropletHandler = new DropletHandler();
|
||||||
|
|
||||||
|
const manifest = JSON.parse(
|
||||||
|
await new Promise((r, e) =>
|
||||||
|
generateManifest(
|
||||||
|
dropletHandler,
|
||||||
|
"./assets/my horror game.zip",
|
||||||
|
(_, __) => {},
|
||||||
|
(_, __) => {},
|
||||||
|
(err, manifest) => (err ? e(err) : r(manifest))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
return t.pass();
|
||||||
|
});
|
||||||
|
|||||||
4
assets/generate.sh
Executable file
4
assets/generate.sh
Executable file
@ -0,0 +1,4 @@
|
|||||||
|
# yes "droplet is awesome" | dd of=./setup.exe bs=1024 count=1000000
|
||||||
|
dd if=/dev/random of=./setup.exe bs=1024 count=1000000
|
||||||
|
zip TheGame.zip setup.exe
|
||||||
|
rm setup.exe
|
||||||
54
index.d.ts
vendored
54
index.d.ts
vendored
@ -1,16 +1,42 @@
|
|||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
|
|
||||||
/* auto-generated by NAPI-RS */
|
/* auto-generated by NAPI-RS */
|
||||||
|
/* eslint-disable */
|
||||||
|
/**
|
||||||
|
* Persistent object so we can cache things between commands
|
||||||
|
*/
|
||||||
|
export declare class DropletHandler {
|
||||||
|
constructor()
|
||||||
|
hasBackendForPath(path: string): boolean
|
||||||
|
listFiles(path: string): Array<string>
|
||||||
|
peekFile(path: string, subPath: string): bigint
|
||||||
|
readFile(path: string, subPath: string, start?: bigint | undefined | null, end?: bigint | undefined | null): JsDropStreamable
|
||||||
|
}
|
||||||
|
|
||||||
function hasBackendForPath(path: string): boolean
|
export declare class JsDropStreamable {
|
||||||
function listFiles(path: string): Array<string>
|
getStream(): any
|
||||||
function readFile(path: string, subPath: string): ReadableStream<Buffer> | null
|
}
|
||||||
function callAltThreadFunc(tsfn: ((err: Error | null, ) => any)): void
|
|
||||||
function generateManifest(dir: string, progressSfn: ((err: Error | null, arg: number) => any), logSfn: ((err: Error | null, arg: string) => any), callbackSfn: ((err: Error | null, arg: string) => any)): void
|
export declare class Script {
|
||||||
function generateRootCa(): Array<string>
|
|
||||||
function generateClientCertificate(clientId: string, clientName: string, rootCa: string, rootCaPrivate: string): Array<string>
|
}
|
||||||
function verifyClientCertificate(clientCert: string, rootCa: string): boolean
|
|
||||||
function signNonce(privateKey: string, nonce: string): string
|
export declare class ScriptEngine {
|
||||||
function verifyNonce(publicCert: string, nonce: string, signature: string): boolean
|
constructor()
|
||||||
undefinedundefined
|
buildRhaiScript(content: string): Script
|
||||||
|
buildJsScript(content: string): Script
|
||||||
|
execute(script: Script): void
|
||||||
|
fetchStrings(script: Script): Array<string>
|
||||||
|
}
|
||||||
|
|
||||||
|
export declare function callAltThreadFunc(tsfn: ((err: Error | null, ) => any)): void
|
||||||
|
|
||||||
|
export declare function generateClientCertificate(clientId: string, clientName: string, rootCa: string, rootCaPrivate: string): Array<string>
|
||||||
|
|
||||||
|
export declare function generateManifest(dropletHandler: DropletHandler, dir: string, progressSfn: ((err: Error | null, arg: number) => any), logSfn: ((err: Error | null, arg: string) => any), callbackSfn: ((err: Error | null, arg: string) => any)): void
|
||||||
|
|
||||||
|
export declare function generateRootCa(): Array<string>
|
||||||
|
|
||||||
|
export declare function signNonce(privateKey: string, nonce: string): string
|
||||||
|
|
||||||
|
export declare function verifyClientCertificate(clientCert: string, rootCa: string): boolean
|
||||||
|
|
||||||
|
export declare function verifyNonce(publicCert: string, nonce: string, signature: string): boolean
|
||||||
|
|||||||
660
index.js
660
index.js
@ -1,325 +1,389 @@
|
|||||||
/* tslint:disable */
|
// prettier-ignore
|
||||||
/* eslint-disable */
|
/* eslint-disable */
|
||||||
/* prettier-ignore */
|
// @ts-nocheck
|
||||||
|
|
||||||
/* auto-generated by NAPI-RS */
|
/* auto-generated by NAPI-RS */
|
||||||
|
|
||||||
const { existsSync, readFileSync } = require('fs')
|
const { createRequire } = require('node:module')
|
||||||
const { join } = require('path')
|
require = createRequire(__filename)
|
||||||
|
|
||||||
const { platform, arch } = process
|
|
||||||
|
|
||||||
|
const { readFileSync } = require('node:fs')
|
||||||
let nativeBinding = null
|
let nativeBinding = null
|
||||||
let localFileExisted = false
|
const loadErrors = []
|
||||||
let loadError = null
|
|
||||||
|
|
||||||
function isMusl() {
|
const isMusl = () => {
|
||||||
// For Node 10
|
let musl = false
|
||||||
if (!process.report || typeof process.report.getReport !== 'function') {
|
if (process.platform === 'linux') {
|
||||||
try {
|
musl = isMuslFromFilesystem()
|
||||||
const lddPath = require('child_process').execSync('which ldd').toString().trim()
|
if (musl === null) {
|
||||||
return readFileSync(lddPath, 'utf8').includes('musl')
|
musl = isMuslFromReport()
|
||||||
} catch (e) {
|
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
} else {
|
if (musl === null) {
|
||||||
const { glibcVersionRuntime } = process.report.getReport().header
|
musl = isMuslFromChildProcess()
|
||||||
return !glibcVersionRuntime
|
}
|
||||||
|
}
|
||||||
|
return musl
|
||||||
|
}
|
||||||
|
|
||||||
|
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
|
||||||
|
|
||||||
|
const isMuslFromFilesystem = () => {
|
||||||
|
try {
|
||||||
|
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
|
||||||
|
} catch {
|
||||||
|
return null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (platform) {
|
const isMuslFromReport = () => {
|
||||||
case 'android':
|
let report = null
|
||||||
switch (arch) {
|
if (typeof process.report?.getReport === 'function') {
|
||||||
case 'arm64':
|
process.report.excludeNetwork = true
|
||||||
localFileExisted = existsSync(join(__dirname, 'droplet.android-arm64.node'))
|
report = process.report.getReport()
|
||||||
try {
|
}
|
||||||
if (localFileExisted) {
|
if (!report) {
|
||||||
nativeBinding = require('./droplet.android-arm64.node')
|
return null
|
||||||
} else {
|
}
|
||||||
nativeBinding = require('@drop-oss/droplet-android-arm64')
|
if (report.header && report.header.glibcVersionRuntime) {
|
||||||
}
|
return false
|
||||||
} catch (e) {
|
}
|
||||||
loadError = e
|
if (Array.isArray(report.sharedObjects)) {
|
||||||
}
|
if (report.sharedObjects.some(isFileMusl)) {
|
||||||
break
|
return true
|
||||||
case 'arm':
|
|
||||||
localFileExisted = existsSync(join(__dirname, 'droplet.android-arm-eabi.node'))
|
|
||||||
try {
|
|
||||||
if (localFileExisted) {
|
|
||||||
nativeBinding = require('./droplet.android-arm-eabi.node')
|
|
||||||
} else {
|
|
||||||
nativeBinding = require('@drop-oss/droplet-android-arm-eabi')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
loadError = e
|
|
||||||
}
|
|
||||||
break
|
|
||||||
default:
|
|
||||||
throw new Error(`Unsupported architecture on Android ${arch}`)
|
|
||||||
}
|
}
|
||||||
break
|
}
|
||||||
case 'win32':
|
return false
|
||||||
switch (arch) {
|
}
|
||||||
case 'x64':
|
|
||||||
localFileExisted = existsSync(
|
const isMuslFromChildProcess = () => {
|
||||||
join(__dirname, 'droplet.win32-x64-msvc.node')
|
try {
|
||||||
)
|
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
|
||||||
try {
|
} catch (e) {
|
||||||
if (localFileExisted) {
|
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
|
||||||
nativeBinding = require('./droplet.win32-x64-msvc.node')
|
return false
|
||||||
} else {
|
}
|
||||||
nativeBinding = require('@drop-oss/droplet-win32-x64-msvc')
|
}
|
||||||
}
|
|
||||||
} catch (e) {
|
function requireNative() {
|
||||||
loadError = e
|
if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
|
||||||
}
|
|
||||||
break
|
|
||||||
case 'ia32':
|
|
||||||
localFileExisted = existsSync(
|
|
||||||
join(__dirname, 'droplet.win32-ia32-msvc.node')
|
|
||||||
)
|
|
||||||
try {
|
|
||||||
if (localFileExisted) {
|
|
||||||
nativeBinding = require('./droplet.win32-ia32-msvc.node')
|
|
||||||
} else {
|
|
||||||
nativeBinding = require('@drop-oss/droplet-win32-ia32-msvc')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
loadError = e
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case 'arm64':
|
|
||||||
localFileExisted = existsSync(
|
|
||||||
join(__dirname, 'droplet.win32-arm64-msvc.node')
|
|
||||||
)
|
|
||||||
try {
|
|
||||||
if (localFileExisted) {
|
|
||||||
nativeBinding = require('./droplet.win32-arm64-msvc.node')
|
|
||||||
} else {
|
|
||||||
nativeBinding = require('@drop-oss/droplet-win32-arm64-msvc')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
loadError = e
|
|
||||||
}
|
|
||||||
break
|
|
||||||
default:
|
|
||||||
throw new Error(`Unsupported architecture on Windows: ${arch}`)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case 'darwin':
|
|
||||||
localFileExisted = existsSync(join(__dirname, 'droplet.darwin-universal.node'))
|
|
||||||
try {
|
try {
|
||||||
if (localFileExisted) {
|
nativeBinding = require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
|
||||||
nativeBinding = require('./droplet.darwin-universal.node')
|
} catch (err) {
|
||||||
} else {
|
loadErrors.push(err)
|
||||||
nativeBinding = require('@drop-oss/droplet-darwin-universal')
|
}
|
||||||
|
} else if (process.platform === 'android') {
|
||||||
|
if (process.arch === 'arm64') {
|
||||||
|
try {
|
||||||
|
return require('./droplet.android-arm64.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
}
|
}
|
||||||
break
|
try {
|
||||||
} catch {}
|
return require('@drop-oss/droplet-android-arm64')
|
||||||
switch (arch) {
|
} catch (e) {
|
||||||
case 'x64':
|
loadErrors.push(e)
|
||||||
localFileExisted = existsSync(join(__dirname, 'droplet.darwin-x64.node'))
|
}
|
||||||
try {
|
|
||||||
if (localFileExisted) {
|
} else if (process.arch === 'arm') {
|
||||||
nativeBinding = require('./droplet.darwin-x64.node')
|
try {
|
||||||
} else {
|
return require('./droplet.android-arm-eabi.node')
|
||||||
nativeBinding = require('@drop-oss/droplet-darwin-x64')
|
} catch (e) {
|
||||||
}
|
loadErrors.push(e)
|
||||||
} catch (e) {
|
}
|
||||||
loadError = e
|
try {
|
||||||
}
|
return require('@drop-oss/droplet-android-arm-eabi')
|
||||||
break
|
} catch (e) {
|
||||||
case 'arm64':
|
loadErrors.push(e)
|
||||||
localFileExisted = existsSync(
|
}
|
||||||
join(__dirname, 'droplet.darwin-arm64.node')
|
|
||||||
)
|
} else {
|
||||||
try {
|
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
|
||||||
if (localFileExisted) {
|
|
||||||
nativeBinding = require('./droplet.darwin-arm64.node')
|
|
||||||
} else {
|
|
||||||
nativeBinding = require('@drop-oss/droplet-darwin-arm64')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
loadError = e
|
|
||||||
}
|
|
||||||
break
|
|
||||||
default:
|
|
||||||
throw new Error(`Unsupported architecture on macOS: ${arch}`)
|
|
||||||
}
|
}
|
||||||
break
|
} else if (process.platform === 'win32') {
|
||||||
case 'freebsd':
|
if (process.arch === 'x64') {
|
||||||
if (arch !== 'x64') {
|
try {
|
||||||
throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
|
return require('./droplet.win32-x64-msvc.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-win32-x64-msvc')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if (process.arch === 'ia32') {
|
||||||
|
try {
|
||||||
|
return require('./droplet.win32-ia32-msvc.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-win32-ia32-msvc')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if (process.arch === 'arm64') {
|
||||||
|
try {
|
||||||
|
return require('./droplet.win32-arm64-msvc.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-win32-arm64-msvc')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
|
||||||
}
|
}
|
||||||
localFileExisted = existsSync(join(__dirname, 'droplet.freebsd-x64.node'))
|
} else if (process.platform === 'darwin') {
|
||||||
try {
|
try {
|
||||||
if (localFileExisted) {
|
return require('./droplet.darwin-universal.node')
|
||||||
nativeBinding = require('./droplet.freebsd-x64.node')
|
} catch (e) {
|
||||||
} else {
|
loadErrors.push(e)
|
||||||
nativeBinding = require('@drop-oss/droplet-freebsd-x64')
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
try {
|
||||||
loadError = e
|
return require('@drop-oss/droplet-darwin-universal')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.arch === 'x64') {
|
||||||
|
try {
|
||||||
|
return require('./droplet.darwin-x64.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-darwin-x64')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if (process.arch === 'arm64') {
|
||||||
|
try {
|
||||||
|
return require('./droplet.darwin-arm64.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-darwin-arm64')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
|
||||||
}
|
}
|
||||||
break
|
} else if (process.platform === 'freebsd') {
|
||||||
case 'linux':
|
if (process.arch === 'x64') {
|
||||||
switch (arch) {
|
try {
|
||||||
case 'x64':
|
return require('./droplet.freebsd-x64.node')
|
||||||
if (isMusl()) {
|
} catch (e) {
|
||||||
localFileExisted = existsSync(
|
loadErrors.push(e)
|
||||||
join(__dirname, 'droplet.linux-x64-musl.node')
|
}
|
||||||
)
|
try {
|
||||||
try {
|
return require('@drop-oss/droplet-freebsd-x64')
|
||||||
if (localFileExisted) {
|
} catch (e) {
|
||||||
nativeBinding = require('./droplet.linux-x64-musl.node')
|
loadErrors.push(e)
|
||||||
} else {
|
}
|
||||||
nativeBinding = require('@drop-oss/droplet-linux-x64-musl')
|
|
||||||
}
|
} else if (process.arch === 'arm64') {
|
||||||
} catch (e) {
|
try {
|
||||||
loadError = e
|
return require('./droplet.freebsd-arm64.node')
|
||||||
}
|
} catch (e) {
|
||||||
} else {
|
loadErrors.push(e)
|
||||||
localFileExisted = existsSync(
|
}
|
||||||
join(__dirname, 'droplet.linux-x64-gnu.node')
|
try {
|
||||||
)
|
return require('@drop-oss/droplet-freebsd-arm64')
|
||||||
try {
|
} catch (e) {
|
||||||
if (localFileExisted) {
|
loadErrors.push(e)
|
||||||
nativeBinding = require('./droplet.linux-x64-gnu.node')
|
}
|
||||||
} else {
|
|
||||||
nativeBinding = require('@drop-oss/droplet-linux-x64-gnu')
|
} else {
|
||||||
}
|
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
|
||||||
} catch (e) {
|
}
|
||||||
loadError = e
|
} else if (process.platform === 'linux') {
|
||||||
}
|
if (process.arch === 'x64') {
|
||||||
}
|
if (isMusl()) {
|
||||||
break
|
|
||||||
case 'arm64':
|
|
||||||
if (isMusl()) {
|
|
||||||
localFileExisted = existsSync(
|
|
||||||
join(__dirname, 'droplet.linux-arm64-musl.node')
|
|
||||||
)
|
|
||||||
try {
|
|
||||||
if (localFileExisted) {
|
|
||||||
nativeBinding = require('./droplet.linux-arm64-musl.node')
|
|
||||||
} else {
|
|
||||||
nativeBinding = require('@drop-oss/droplet-linux-arm64-musl')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
loadError = e
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
localFileExisted = existsSync(
|
|
||||||
join(__dirname, 'droplet.linux-arm64-gnu.node')
|
|
||||||
)
|
|
||||||
try {
|
|
||||||
if (localFileExisted) {
|
|
||||||
nativeBinding = require('./droplet.linux-arm64-gnu.node')
|
|
||||||
} else {
|
|
||||||
nativeBinding = require('@drop-oss/droplet-linux-arm64-gnu')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
loadError = e
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case 'arm':
|
|
||||||
if (isMusl()) {
|
|
||||||
localFileExisted = existsSync(
|
|
||||||
join(__dirname, 'droplet.linux-arm-musleabihf.node')
|
|
||||||
)
|
|
||||||
try {
|
|
||||||
if (localFileExisted) {
|
|
||||||
nativeBinding = require('./droplet.linux-arm-musleabihf.node')
|
|
||||||
} else {
|
|
||||||
nativeBinding = require('@drop-oss/droplet-linux-arm-musleabihf')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
loadError = e
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
localFileExisted = existsSync(
|
|
||||||
join(__dirname, 'droplet.linux-arm-gnueabihf.node')
|
|
||||||
)
|
|
||||||
try {
|
|
||||||
if (localFileExisted) {
|
|
||||||
nativeBinding = require('./droplet.linux-arm-gnueabihf.node')
|
|
||||||
} else {
|
|
||||||
nativeBinding = require('@drop-oss/droplet-linux-arm-gnueabihf')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
loadError = e
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case 'riscv64':
|
|
||||||
if (isMusl()) {
|
|
||||||
localFileExisted = existsSync(
|
|
||||||
join(__dirname, 'droplet.linux-riscv64-musl.node')
|
|
||||||
)
|
|
||||||
try {
|
|
||||||
if (localFileExisted) {
|
|
||||||
nativeBinding = require('./droplet.linux-riscv64-musl.node')
|
|
||||||
} else {
|
|
||||||
nativeBinding = require('@drop-oss/droplet-linux-riscv64-musl')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
loadError = e
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
localFileExisted = existsSync(
|
|
||||||
join(__dirname, 'droplet.linux-riscv64-gnu.node')
|
|
||||||
)
|
|
||||||
try {
|
|
||||||
if (localFileExisted) {
|
|
||||||
nativeBinding = require('./droplet.linux-riscv64-gnu.node')
|
|
||||||
} else {
|
|
||||||
nativeBinding = require('@drop-oss/droplet-linux-riscv64-gnu')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
loadError = e
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case 's390x':
|
|
||||||
localFileExisted = existsSync(
|
|
||||||
join(__dirname, 'droplet.linux-s390x-gnu.node')
|
|
||||||
)
|
|
||||||
try {
|
try {
|
||||||
if (localFileExisted) {
|
return require('./droplet.linux-x64-musl.node')
|
||||||
nativeBinding = require('./droplet.linux-s390x-gnu.node')
|
} catch (e) {
|
||||||
} else {
|
loadErrors.push(e)
|
||||||
nativeBinding = require('@drop-oss/droplet-linux-s390x-gnu')
|
}
|
||||||
}
|
try {
|
||||||
} catch (e) {
|
return require('@drop-oss/droplet-linux-x64-musl')
|
||||||
loadError = e
|
} catch (e) {
|
||||||
}
|
loadErrors.push(e)
|
||||||
break
|
}
|
||||||
default:
|
|
||||||
throw new Error(`Unsupported architecture on Linux: ${arch}`)
|
} else {
|
||||||
|
try {
|
||||||
|
return require('./droplet.linux-x64-gnu.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-linux-x64-gnu')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
} else if (process.arch === 'arm64') {
|
||||||
|
if (isMusl()) {
|
||||||
|
try {
|
||||||
|
return require('./droplet.linux-arm64-musl.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-linux-arm64-musl')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
return require('./droplet.linux-arm64-gnu.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-linux-arm64-gnu')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
} else if (process.arch === 'arm') {
|
||||||
|
if (isMusl()) {
|
||||||
|
try {
|
||||||
|
return require('./droplet.linux-arm-musleabihf.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-linux-arm-musleabihf')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
return require('./droplet.linux-arm-gnueabihf.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-linux-arm-gnueabihf')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
} else if (process.arch === 'riscv64') {
|
||||||
|
if (isMusl()) {
|
||||||
|
try {
|
||||||
|
return require('./droplet.linux-riscv64-musl.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-linux-riscv64-musl')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
return require('./droplet.linux-riscv64-gnu.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-linux-riscv64-gnu')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
} else if (process.arch === 'ppc64') {
|
||||||
|
try {
|
||||||
|
return require('./droplet.linux-ppc64-gnu.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-linux-ppc64-gnu')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if (process.arch === 's390x') {
|
||||||
|
try {
|
||||||
|
return require('./droplet.linux-s390x-gnu.node')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return require('@drop-oss/droplet-linux-s390x-gnu')
|
||||||
|
} catch (e) {
|
||||||
|
loadErrors.push(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
|
||||||
}
|
}
|
||||||
break
|
} else {
|
||||||
default:
|
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
|
||||||
throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nativeBinding = requireNative()
|
||||||
|
|
||||||
|
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
|
||||||
|
try {
|
||||||
|
nativeBinding = require('./droplet.wasi.cjs')
|
||||||
|
} catch (err) {
|
||||||
|
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||||
|
loadErrors.push(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!nativeBinding) {
|
||||||
|
try {
|
||||||
|
nativeBinding = require('@drop-oss/droplet-wasm32-wasi')
|
||||||
|
} catch (err) {
|
||||||
|
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||||
|
loadErrors.push(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!nativeBinding) {
|
if (!nativeBinding) {
|
||||||
if (loadError) {
|
if (loadErrors.length > 0) {
|
||||||
throw loadError
|
throw new Error(
|
||||||
|
`Cannot find native binding. ` +
|
||||||
|
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
|
||||||
|
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
|
||||||
|
{ cause: loadErrors }
|
||||||
|
)
|
||||||
}
|
}
|
||||||
throw new Error(`Failed to load native binding`)
|
throw new Error(`Failed to load native binding`)
|
||||||
}
|
}
|
||||||
|
|
||||||
const { hasBackendForPath, listFiles, readFile, callAltThreadFunc, generateManifest, generateRootCa, generateClientCertificate, verifyClientCertificate, signNonce, verifyNonce, } = nativeBinding
|
module.exports = nativeBinding
|
||||||
|
module.exports.DropletHandler = nativeBinding.DropletHandler
|
||||||
module.exports.hasBackendForPath = hasBackendForPath
|
module.exports.JsDropStreamable = nativeBinding.JsDropStreamable
|
||||||
module.exports.listFiles = listFiles
|
module.exports.Script = nativeBinding.Script
|
||||||
module.exports.readFile = readFile
|
module.exports.ScriptEngine = nativeBinding.ScriptEngine
|
||||||
module.exports.callAltThreadFunc = callAltThreadFunc
|
module.exports.callAltThreadFunc = nativeBinding.callAltThreadFunc
|
||||||
module.exports.generateManifest = generateManifest
|
module.exports.generateClientCertificate = nativeBinding.generateClientCertificate
|
||||||
module.exports.generateRootCa = generateRootCa
|
module.exports.generateManifest = nativeBinding.generateManifest
|
||||||
module.exports.generateClientCertificate = generateClientCertificate
|
module.exports.generateRootCa = nativeBinding.generateRootCa
|
||||||
module.exports.verifyClientCertificate = verifyClientCertificate
|
module.exports.signNonce = nativeBinding.signNonce
|
||||||
module.exports.signNonce = signNonce
|
module.exports.verifyClientCertificate = nativeBinding.verifyClientCertificate
|
||||||
module.exports.verifyNonce = verifyNonce
|
module.exports.verifyNonce = nativeBinding.verifyNonce
|
||||||
module.exports.undefined = undefined
|
|
||||||
|
|||||||
@ -1,3 +0,0 @@
|
|||||||
# `@drop-oss/droplet-linux-arm-gnueabihf`
|
|
||||||
|
|
||||||
This is the **armv7-unknown-linux-gnueabihf** binary for `@drop-oss/droplet`
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@drop-oss/droplet-linux-arm-gnueabihf",
|
|
||||||
"version": "0.0.0",
|
|
||||||
"os": [
|
|
||||||
"linux"
|
|
||||||
],
|
|
||||||
"cpu": [
|
|
||||||
"arm"
|
|
||||||
],
|
|
||||||
"main": "droplet.linux-arm-gnueabihf.node",
|
|
||||||
"files": [
|
|
||||||
"droplet.linux-arm-gnueabihf.node"
|
|
||||||
],
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 10"
|
|
||||||
},
|
|
||||||
"repository": {
|
|
||||||
"url": "https://github.com/Drop-OSS/droplet"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
# `@drop-oss/droplet-linux-arm-musleabihf`
|
|
||||||
|
|
||||||
This is the **armv7-unknown-linux-musleabihf** binary for `@drop-oss/droplet`
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@drop-oss/droplet-linux-arm-musleabihf",
|
|
||||||
"version": "0.0.0",
|
|
||||||
"os": [
|
|
||||||
"linux"
|
|
||||||
],
|
|
||||||
"cpu": [
|
|
||||||
"arm"
|
|
||||||
],
|
|
||||||
"main": "droplet.linux-arm-musleabihf.node",
|
|
||||||
"files": [
|
|
||||||
"droplet.linux-arm-musleabihf.node"
|
|
||||||
],
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 10"
|
|
||||||
},
|
|
||||||
"repository": {
|
|
||||||
"url": "https://github.com/Drop-OSS/droplet"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
32
package.json
32
package.json
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@drop-oss/droplet",
|
"name": "@drop-oss/droplet",
|
||||||
"version": "1.2.0",
|
"version": "3.1.0",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"types": "index.d.ts",
|
"types": "index.d.ts",
|
||||||
"napi": {
|
"napi": {
|
||||||
@ -8,25 +8,35 @@
|
|||||||
"triples": {
|
"triples": {
|
||||||
"additional": [
|
"additional": [
|
||||||
"aarch64-apple-darwin",
|
"aarch64-apple-darwin",
|
||||||
|
"x86_64-apple-darwin",
|
||||||
|
"universal-apple-darwin",
|
||||||
"aarch64-unknown-linux-gnu",
|
"aarch64-unknown-linux-gnu",
|
||||||
"aarch64-unknown-linux-musl",
|
"aarch64-unknown-linux-musl",
|
||||||
"aarch64-pc-windows-msvc",
|
"x86_64-unknown-linux-gnu",
|
||||||
"armv7-unknown-linux-gnueabihf",
|
|
||||||
"armv7-unknown-linux-musleabihf",
|
|
||||||
"x86_64-unknown-linux-musl",
|
"x86_64-unknown-linux-musl",
|
||||||
"universal-apple-darwin",
|
"riscv64gc-unknown-linux-gnu",
|
||||||
"riscv64gc-unknown-linux-gnu"
|
"aarch64-pc-windows-msvc",
|
||||||
|
"x86_64-pc-windows-msvc"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@napi-rs/cli": "2.18.4",
|
"@napi-rs/cli": "3.0.0-alpha.91",
|
||||||
"@types/node": "^22.13.10",
|
"@types/node": "^22.13.10",
|
||||||
"ava": "^6.2.0"
|
"ava": "^6.2.0",
|
||||||
|
"pretty-bytes": "^7.0.1",
|
||||||
|
"tsimp": "^2.0.12"
|
||||||
},
|
},
|
||||||
"ava": {
|
"ava": {
|
||||||
"timeout": "3m"
|
"timeout": "3m",
|
||||||
|
"extensions": [
|
||||||
|
"cjs",
|
||||||
|
"mjs",
|
||||||
|
"js",
|
||||||
|
"ts",
|
||||||
|
"mts"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
@ -36,8 +46,8 @@
|
|||||||
"build": "napi build --platform --release",
|
"build": "napi build --platform --release",
|
||||||
"build:debug": "napi build --platform",
|
"build:debug": "napi build --platform",
|
||||||
"prepublishOnly": "napi prepublish -t npm",
|
"prepublishOnly": "napi prepublish -t npm",
|
||||||
"test": "ava",
|
"test": "ava ",
|
||||||
"universal": "napi universal",
|
"universal": "napi universalize",
|
||||||
"version": "napi version"
|
"version": "napi version"
|
||||||
},
|
},
|
||||||
"packageManager": "yarn@4.7.0",
|
"packageManager": "yarn@4.7.0",
|
||||||
|
|||||||
@ -1,165 +0,0 @@
|
|||||||
#[cfg(unix)]
|
|
||||||
use std::os::unix::fs::PermissionsExt;
|
|
||||||
use std::{
|
|
||||||
fs::{self, metadata, File},
|
|
||||||
io::{self, BufReader, ErrorKind, Read},
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
task::Poll,
|
|
||||||
};
|
|
||||||
|
|
||||||
use napi::{
|
|
||||||
bindgen_prelude::*,
|
|
||||||
tokio_stream::{Stream, StreamExt},
|
|
||||||
};
|
|
||||||
use tokio_util::{
|
|
||||||
bytes::BytesMut,
|
|
||||||
codec::{BytesCodec, FramedRead},
|
|
||||||
};
|
|
||||||
|
|
||||||
fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) {
|
|
||||||
if metadata(path).unwrap().is_dir() {
|
|
||||||
let paths = fs::read_dir(path).unwrap();
|
|
||||||
for path_result in paths {
|
|
||||||
let full_path = path_result.unwrap().path();
|
|
||||||
if metadata(&full_path).unwrap().is_dir() {
|
|
||||||
_list_files(vec, &full_path);
|
|
||||||
} else {
|
|
||||||
vec.push(full_path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct VersionFile {
|
|
||||||
pub relative_filename: String,
|
|
||||||
pub permission: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait VersionBackend: 'static {
|
|
||||||
fn list_files(&self, path: &Path) -> Vec<VersionFile>;
|
|
||||||
fn reader(&self, file: &VersionFile) -> Option<File>;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct PathVersionBackend {
|
|
||||||
pub base_dir: PathBuf,
|
|
||||||
}
|
|
||||||
impl VersionBackend for PathVersionBackend {
|
|
||||||
fn list_files(&self, path: &Path) -> Vec<VersionFile> {
|
|
||||||
let mut vec = Vec::new();
|
|
||||||
_list_files(&mut vec, path);
|
|
||||||
|
|
||||||
let mut results = Vec::new();
|
|
||||||
|
|
||||||
for pathbuf in vec.iter() {
|
|
||||||
let file = File::open(pathbuf.clone()).unwrap();
|
|
||||||
let relative = pathbuf.strip_prefix(path).unwrap();
|
|
||||||
let metadata = file.try_clone().unwrap().metadata().unwrap();
|
|
||||||
let permission_object = metadata.permissions();
|
|
||||||
let permissions = {
|
|
||||||
let perm: u32;
|
|
||||||
#[cfg(target_family = "unix")]
|
|
||||||
{
|
|
||||||
perm = permission_object.mode();
|
|
||||||
}
|
|
||||||
#[cfg(not(target_family = "unix"))]
|
|
||||||
{
|
|
||||||
perm = 0
|
|
||||||
}
|
|
||||||
perm
|
|
||||||
};
|
|
||||||
|
|
||||||
results.push(VersionFile {
|
|
||||||
relative_filename: relative.to_string_lossy().to_string(),
|
|
||||||
permission: permissions,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
results
|
|
||||||
}
|
|
||||||
|
|
||||||
fn reader(&self, file: &VersionFile) -> Option<File> {
|
|
||||||
let file = File::open(self.base_dir.join(file.relative_filename.clone())).ok()?;
|
|
||||||
|
|
||||||
return Some(file);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Todo implementation for archives
|
|
||||||
// Split into a separate impl for each type of archive
|
|
||||||
pub struct ArchiveVersionBackend {}
|
|
||||||
impl VersionBackend for ArchiveVersionBackend {
|
|
||||||
fn list_files(&self, path: &Path) -> Vec<VersionFile> {
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn reader(&self, file: &VersionFile) -> Option<File> {
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create_backend_for_path(path: &Path) -> Option<Box<(dyn VersionBackend)>> {
|
|
||||||
let is_directory = path.is_dir();
|
|
||||||
if is_directory {
|
|
||||||
return Some(Box::new(PathVersionBackend {
|
|
||||||
base_dir: path.to_path_buf(),
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
|
|
||||||
/*
|
|
||||||
Insert checks for whatever backend you like
|
|
||||||
*/
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
#[napi]
|
|
||||||
pub fn has_backend_for_path(path: String) -> bool {
|
|
||||||
let path = Path::new(&path);
|
|
||||||
|
|
||||||
let has_backend = create_backend_for_path(path).is_some();
|
|
||||||
|
|
||||||
has_backend
|
|
||||||
}
|
|
||||||
|
|
||||||
#[napi]
|
|
||||||
pub fn list_files(path: String) -> Vec<String> {
|
|
||||||
let path = Path::new(&path);
|
|
||||||
let backend = create_backend_for_path(path).unwrap();
|
|
||||||
let files = backend.list_files(path);
|
|
||||||
files.into_iter().map(|e| e.relative_filename).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[napi]
|
|
||||||
pub fn read_file(
|
|
||||||
path: String,
|
|
||||||
sub_path: String,
|
|
||||||
env: &Env,
|
|
||||||
) -> Option<ReadableStream<'static, BufferSlice<'static>>> {
|
|
||||||
let path = Path::new(&path);
|
|
||||||
let backend = create_backend_for_path(path).unwrap();
|
|
||||||
let version_file = VersionFile {
|
|
||||||
relative_filename: sub_path,
|
|
||||||
permission: 0, // Shouldn't matter
|
|
||||||
};
|
|
||||||
// Use `?` operator for cleaner error propagation from `Option`
|
|
||||||
let reader = backend.reader(&version_file)?;
|
|
||||||
|
|
||||||
// Convert std::fs::File to tokio::fs::File for async operations
|
|
||||||
let reader = tokio::fs::File::from_std(reader);
|
|
||||||
|
|
||||||
// Create a FramedRead stream with BytesCodec for chunking
|
|
||||||
|
|
||||||
let stream = FramedRead::new(reader, BytesCodec::new())
|
|
||||||
// Use StreamExt::map to transform each Result item
|
|
||||||
.map(|result_item| {
|
|
||||||
result_item
|
|
||||||
// Apply Result::map to transform Ok(BytesMut) to Ok(Vec<u8>)
|
|
||||||
.map(|bytes| bytes.to_vec())
|
|
||||||
// Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error)
|
|
||||||
.map_err(|e| napi::Error::from(e)) // napi::Error implements From<tokio::io::Error>
|
|
||||||
});
|
|
||||||
// Create the napi-rs ReadableStream from the tokio_stream::Stream
|
|
||||||
// The unwrap() here means if stream creation fails, it will panic.
|
|
||||||
// For a production system, consider returning Result<Option<...>> and handling this.
|
|
||||||
Some(ReadableStream::create_with_stream_bytes(env, stream).unwrap())
|
|
||||||
}
|
|
||||||
12
src/lib.rs
12
src/lib.rs
@ -1,8 +1,14 @@
|
|||||||
#![deny(clippy::all)]
|
#![deny(clippy::unwrap_used)]
|
||||||
|
#![deny(clippy::expect_used)]
|
||||||
|
#![deny(clippy::panic)]
|
||||||
|
#![feature(trait_alias)]
|
||||||
|
#![feature(iterator_try_collect)]
|
||||||
|
|
||||||
|
|
||||||
pub mod file_utils;
|
|
||||||
pub mod manifest;
|
pub mod manifest;
|
||||||
|
pub mod script;
|
||||||
pub mod ssl;
|
pub mod ssl;
|
||||||
|
pub mod version;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate napi_derive;
|
extern crate napi_derive;
|
||||||
153
src/manifest.rs
153
src/manifest.rs
@ -1,19 +1,13 @@
|
|||||||
use std::{
|
use std::{collections::HashMap, sync::Arc, thread};
|
||||||
collections::HashMap, fs::File, io::{BufRead, BufReader}, path::Path, rc::Rc, sync::Arc, thread
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(unix)]
|
|
||||||
use std::os::unix::fs::PermissionsExt;
|
|
||||||
|
|
||||||
use napi::{
|
use napi::{
|
||||||
bindgen_prelude::Function,
|
|
||||||
threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
|
threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
|
||||||
Env, Error, Result,
|
Result,
|
||||||
};
|
};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::file_utils::create_backend_for_path;
|
use crate::version::{types::VersionBackend, utils::DropletHandler};
|
||||||
|
|
||||||
const CHUNK_SIZE: usize = 1024 * 1024 * 64;
|
const CHUNK_SIZE: usize = 1024 * 1024 * 64;
|
||||||
|
|
||||||
@ -35,73 +29,116 @@ pub fn call_alt_thread_func(tsfn: Arc<ThreadsafeFunction<()>>) -> Result<(), Str
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn generate_manifest(
|
pub fn generate_manifest<'a>(
|
||||||
|
droplet_handler: &mut DropletHandler,
|
||||||
dir: String,
|
dir: String,
|
||||||
progress_sfn: ThreadsafeFunction<i32>,
|
progress_sfn: ThreadsafeFunction<i32>,
|
||||||
log_sfn: ThreadsafeFunction<String>,
|
log_sfn: ThreadsafeFunction<String>,
|
||||||
callback_sfn: ThreadsafeFunction<String>,
|
callback_sfn: ThreadsafeFunction<String>,
|
||||||
) -> Result<(), String> {
|
) -> anyhow::Result<()> {
|
||||||
|
let backend: &mut Box<dyn VersionBackend + Send> = droplet_handler
|
||||||
|
.create_backend_for_path(dir)
|
||||||
|
.ok_or(napi::Error::from_reason(
|
||||||
|
"Could not create backend for path.",
|
||||||
|
))?;
|
||||||
|
|
||||||
|
// This is unsafe (obviously)
|
||||||
|
// But it's allg as long the DropletHandler doesn't get
|
||||||
|
// dropped while we're generating the manifest.
|
||||||
|
let backend: &'static mut Box<dyn VersionBackend + Send> =
|
||||||
|
unsafe { std::mem::transmute(backend) };
|
||||||
|
|
||||||
|
let required_single_file = backend.require_whole_files();
|
||||||
|
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
let base_dir = Path::new(&dir);
|
let callback_borrow = &callback_sfn;
|
||||||
let backend = create_backend_for_path(base_dir).unwrap();
|
|
||||||
let files = backend.list_files(base_dir);
|
|
||||||
|
|
||||||
// Filepath to chunk data
|
let mut inner = move || -> Result<()> {
|
||||||
let mut chunks: HashMap<String, ChunkData> = HashMap::new();
|
let files = backend.list_files()?;
|
||||||
|
|
||||||
let total: i32 = files.len() as i32;
|
// Filepath to chunk data
|
||||||
let mut i: i32 = 0;
|
let mut chunks: HashMap<String, ChunkData> = HashMap::new();
|
||||||
|
|
||||||
for version_file in files {
|
let total: i32 = files.len() as i32;
|
||||||
let mut raw_reader= backend.reader(&version_file).unwrap();
|
let mut i: i32 = 0;
|
||||||
let mut reader = BufReader::with_capacity(CHUNK_SIZE, raw_reader);
|
|
||||||
|
|
||||||
let mut chunk_data = ChunkData {
|
let mut buf = [0u8; 1024 * 16];
|
||||||
permissions: version_file.permission,
|
|
||||||
ids: Vec::new(),
|
|
||||||
checksums: Vec::new(),
|
|
||||||
lengths: Vec::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut chunk_index = 0;
|
for version_file in files {
|
||||||
loop {
|
let mut reader = backend.reader(&version_file, 0, 0)?;
|
||||||
let mut buffer: Vec<u8> = Vec::new();
|
|
||||||
reader.fill_buf().unwrap().clone_into(&mut buffer);
|
|
||||||
let length = buffer.len();
|
|
||||||
|
|
||||||
if length == 0 {
|
let mut chunk_data = ChunkData {
|
||||||
break;
|
permissions: version_file.permission,
|
||||||
|
ids: Vec::new(),
|
||||||
|
checksums: Vec::new(),
|
||||||
|
lengths: Vec::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut chunk_index = 0;
|
||||||
|
loop {
|
||||||
|
let mut length = 0;
|
||||||
|
let mut buffer: Vec<u8> = Vec::new();
|
||||||
|
let mut file_empty = false;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let read = reader.read(&mut buf)?;
|
||||||
|
|
||||||
|
length += read;
|
||||||
|
|
||||||
|
// If we're out of data, add this chunk and then move onto the next file
|
||||||
|
if read == 0 {
|
||||||
|
file_empty = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
buffer.extend_from_slice(&buf[0..read]);
|
||||||
|
|
||||||
|
if length >= CHUNK_SIZE && !required_single_file {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let chunk_id = Uuid::new_v4();
|
||||||
|
let checksum = md5::compute(buffer).0;
|
||||||
|
let checksum_string = hex::encode(checksum);
|
||||||
|
|
||||||
|
chunk_data.ids.push(chunk_id.to_string());
|
||||||
|
chunk_data.checksums.push(checksum_string);
|
||||||
|
chunk_data.lengths.push(length);
|
||||||
|
|
||||||
|
let log_str = format!(
|
||||||
|
"Processed chunk {} for {}",
|
||||||
|
chunk_index, &version_file.relative_filename
|
||||||
|
);
|
||||||
|
|
||||||
|
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
|
||||||
|
|
||||||
|
chunk_index += 1;
|
||||||
|
|
||||||
|
if file_empty {
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let chunk_id = Uuid::new_v4();
|
chunks.insert(version_file.relative_filename, chunk_data);
|
||||||
let checksum = md5::compute(buffer).0;
|
|
||||||
let checksum_string = hex::encode(checksum);
|
|
||||||
|
|
||||||
chunk_data.ids.push(chunk_id.to_string());
|
i += 1;
|
||||||
chunk_data.checksums.push(checksum_string);
|
let progress = i * 100 / total;
|
||||||
chunk_data.lengths.push(length);
|
progress_sfn.call(Ok(progress), ThreadsafeFunctionCallMode::Blocking);
|
||||||
|
|
||||||
let log_str = format!(
|
|
||||||
"Processed chunk {} for {}",
|
|
||||||
chunk_index, &version_file.relative_filename
|
|
||||||
);
|
|
||||||
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
|
|
||||||
|
|
||||||
reader.consume(length);
|
|
||||||
chunk_index += 1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
chunks.insert(version_file.relative_filename, chunk_data);
|
callback_borrow.call(
|
||||||
|
Ok(json!(chunks).to_string()),
|
||||||
|
ThreadsafeFunctionCallMode::Blocking,
|
||||||
|
);
|
||||||
|
|
||||||
i += 1;
|
Ok(())
|
||||||
let progress = i * 100 / total;
|
};
|
||||||
progress_sfn.call(Ok(progress), ThreadsafeFunctionCallMode::Blocking);
|
|
||||||
|
let result = inner();
|
||||||
|
if let Err(generate_err) = result {
|
||||||
|
callback_borrow.call(Err(generate_err), ThreadsafeFunctionCallMode::Blocking);
|
||||||
}
|
}
|
||||||
|
|
||||||
callback_sfn.call(
|
|
||||||
Ok(json!(chunks).to_string()),
|
|
||||||
ThreadsafeFunctionCallMode::Blocking,
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
133
src/script/mod.rs
Normal file
133
src/script/mod.rs
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
use boa_engine::{Context, JsValue, Source};
|
||||||
|
// use mlua::{FromLuaMulti, Function, Lua};
|
||||||
|
use napi::Result;
|
||||||
|
use rhai::AST;
|
||||||
|
|
||||||
|
pub enum ScriptType {
|
||||||
|
Rhai,
|
||||||
|
Lua,
|
||||||
|
Javascript,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub struct Script(ScriptInner);
|
||||||
|
|
||||||
|
pub enum ScriptInner {
|
||||||
|
Rhai { script: AST },
|
||||||
|
// Lua { script: Function },
|
||||||
|
Javascript { script: boa_engine::Script },
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub struct ScriptEngine {
|
||||||
|
rhai_engine: rhai::Engine,
|
||||||
|
// lua_engine: Lua,
|
||||||
|
js_engine: Context,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
impl ScriptEngine {
|
||||||
|
#[napi(constructor)]
|
||||||
|
pub fn new() -> Self {
|
||||||
|
ScriptEngine {
|
||||||
|
rhai_engine: rhai::Engine::new(),
|
||||||
|
// lua_engine: Lua::new(),
|
||||||
|
js_engine: Context::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub fn build_rhai_script(&self, content: String) -> Result<Script> {
|
||||||
|
let script = self
|
||||||
|
.rhai_engine
|
||||||
|
.compile(content.clone())
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
Ok(Script(ScriptInner::Rhai { script }))
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
#[napi]
|
||||||
|
pub fn build_lua_script(&self, content: String) -> Result<Script> {
|
||||||
|
let func = self
|
||||||
|
.lua_engine
|
||||||
|
.load(content.clone())
|
||||||
|
.into_function()
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
Ok(Script(ScriptInner::Lua { script: func }))
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub fn build_js_script(&mut self, content: String) -> Result<Script> {
|
||||||
|
let source = Source::from_bytes(content.as_bytes());
|
||||||
|
let script = boa_engine::Script::parse(source, None, &mut self.js_engine)
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(Script(ScriptInner::Javascript { script }))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn execute_rhai_script<T>(&self, ast: &AST) -> Result<T>
|
||||||
|
where
|
||||||
|
T: Clone + 'static,
|
||||||
|
{
|
||||||
|
let v = self
|
||||||
|
.rhai_engine
|
||||||
|
.eval_ast::<T>(ast)
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
fn execute_lua_script<T>(&self, function: &Function) -> Result<T>
|
||||||
|
where
|
||||||
|
T: FromLuaMulti,
|
||||||
|
{
|
||||||
|
let v = function
|
||||||
|
.call::<T>(())
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
Ok(v)
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
fn execute_js_script(&mut self, func: &boa_engine::Script) -> Result<JsValue> {
|
||||||
|
let v = func
|
||||||
|
.evaluate(&mut self.js_engine)
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub fn execute(&mut self, script: &mut Script) -> Result<()> {
|
||||||
|
match &script.0 {
|
||||||
|
ScriptInner::Rhai { script } => {
|
||||||
|
self.execute_rhai_script::<()>(script)?;
|
||||||
|
}
|
||||||
|
/*ScriptInner::Lua { script } => {
|
||||||
|
self.execute_lua_script::<()>(script)?;
|
||||||
|
}*/
|
||||||
|
ScriptInner::Javascript { script } => {
|
||||||
|
self.execute_js_script(script)?;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub fn fetch_strings(&mut self, script: &mut Script) -> Result<Vec<String>> {
|
||||||
|
Ok(match &script.0 {
|
||||||
|
ScriptInner::Rhai { script } => self.execute_rhai_script(script)?,
|
||||||
|
//ScriptInner::Lua { script } => self.execute_lua_script(script)?,
|
||||||
|
ScriptInner::Javascript { script } => {
|
||||||
|
let v = self.execute_js_script(script)?;
|
||||||
|
|
||||||
|
serde_json::from_value(
|
||||||
|
v.to_json(&mut self.js_engine)
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?,
|
||||||
|
)
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
60
src/ssl.rs
60
src/ssl.rs
@ -1,4 +1,4 @@
|
|||||||
use napi::Error;
|
use anyhow::anyhow;
|
||||||
use rcgen::{
|
use rcgen::{
|
||||||
CertificateParams, DistinguishedName, IsCa, KeyPair, KeyUsagePurpose, PublicKeyData,
|
CertificateParams, DistinguishedName, IsCa, KeyPair, KeyUsagePurpose, PublicKeyData,
|
||||||
SubjectPublicKeyInfo,
|
SubjectPublicKeyInfo,
|
||||||
@ -10,7 +10,7 @@ use x509_parser::parse_x509_certificate;
|
|||||||
use x509_parser::pem::Pem;
|
use x509_parser::pem::Pem;
|
||||||
|
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn generate_root_ca() -> Result<Vec<String>, Error> {
|
pub fn generate_root_ca() -> anyhow::Result<Vec<String>> {
|
||||||
let mut params = CertificateParams::default();
|
let mut params = CertificateParams::default();
|
||||||
|
|
||||||
let mut name = DistinguishedName::new();
|
let mut name = DistinguishedName::new();
|
||||||
@ -22,7 +22,7 @@ pub fn generate_root_ca() -> Result<Vec<String>, Error> {
|
|||||||
params.not_before = OffsetDateTime::now_utc();
|
params.not_before = OffsetDateTime::now_utc();
|
||||||
params.not_after = OffsetDateTime::now_utc()
|
params.not_after = OffsetDateTime::now_utc()
|
||||||
.checked_add(Duration::days(365 * 1000))
|
.checked_add(Duration::days(365 * 1000))
|
||||||
.unwrap();
|
.ok_or(anyhow!("failed to calculate end date"))?;
|
||||||
|
|
||||||
params.is_ca = IsCa::Ca(rcgen::BasicConstraints::Unconstrained);
|
params.is_ca = IsCa::Ca(rcgen::BasicConstraints::Unconstrained);
|
||||||
|
|
||||||
@ -32,9 +32,8 @@ pub fn generate_root_ca() -> Result<Vec<String>, Error> {
|
|||||||
KeyUsagePurpose::DigitalSignature,
|
KeyUsagePurpose::DigitalSignature,
|
||||||
];
|
];
|
||||||
|
|
||||||
let key_pair = KeyPair::generate().map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
let key_pair = KeyPair::generate()?;
|
||||||
let certificate = CertificateParams::self_signed(params, &key_pair)
|
let certificate = CertificateParams::self_signed(params, &key_pair)?;
|
||||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
|
||||||
|
|
||||||
// Returns certificate, then private key
|
// Returns certificate, then private key
|
||||||
Ok(vec![certificate.pem(), key_pair.serialize_pem()])
|
Ok(vec![certificate.pem(), key_pair.serialize_pem()])
|
||||||
@ -46,13 +45,10 @@ pub fn generate_client_certificate(
|
|||||||
_client_name: String,
|
_client_name: String,
|
||||||
root_ca: String,
|
root_ca: String,
|
||||||
root_ca_private: String,
|
root_ca_private: String,
|
||||||
) -> Result<Vec<String>, Error> {
|
) -> anyhow::Result<Vec<String>> {
|
||||||
let root_key_pair =
|
let root_key_pair = KeyPair::from_pem(&root_ca_private)?;
|
||||||
KeyPair::from_pem(&root_ca_private).map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
let certificate_params = CertificateParams::from_ca_cert_pem(&root_ca)?;
|
||||||
let certificate_params = CertificateParams::from_ca_cert_pem(&root_ca)
|
let root_ca = CertificateParams::self_signed(certificate_params, &root_key_pair)?;
|
||||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
|
||||||
let root_ca = CertificateParams::self_signed(certificate_params, &root_key_pair)
|
|
||||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
|
||||||
|
|
||||||
let mut params = CertificateParams::default();
|
let mut params = CertificateParams::default();
|
||||||
|
|
||||||
@ -66,28 +62,24 @@ pub fn generate_client_certificate(
|
|||||||
KeyUsagePurpose::DataEncipherment,
|
KeyUsagePurpose::DataEncipherment,
|
||||||
];
|
];
|
||||||
|
|
||||||
let key_pair = KeyPair::generate_for(&rcgen::PKCS_ECDSA_P384_SHA384)
|
let key_pair = KeyPair::generate_for(&rcgen::PKCS_ECDSA_P384_SHA384)?;
|
||||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
let certificate = CertificateParams::signed_by(params, &key_pair, &root_ca, &root_key_pair)?;
|
||||||
let certificate = CertificateParams::signed_by(params, &key_pair, &root_ca, &root_key_pair)
|
|
||||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
|
||||||
|
|
||||||
// Returns certificate, then private key
|
// Returns certificate, then private key
|
||||||
Ok(vec![certificate.pem(), key_pair.serialize_pem()])
|
Ok(vec![certificate.pem(), key_pair.serialize_pem()])
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn verify_client_certificate(client_cert: String, root_ca: String) -> Result<bool, Error> {
|
pub fn verify_client_certificate(client_cert: String, root_ca: String) -> anyhow::Result<bool> {
|
||||||
let root_ca = Pem::iter_from_buffer(root_ca.as_bytes())
|
let root_ca = Pem::iter_from_buffer(root_ca.as_bytes())
|
||||||
.next()
|
.next()
|
||||||
.unwrap()
|
.ok_or(anyhow!("no certificates in root ca"))??;
|
||||||
.unwrap();
|
let root_ca = root_ca.parse_x509()?;
|
||||||
let root_ca = root_ca.parse_x509().unwrap();
|
|
||||||
|
|
||||||
let client_cert = Pem::iter_from_buffer(client_cert.as_bytes())
|
let client_cert = Pem::iter_from_buffer(client_cert.as_bytes())
|
||||||
.next()
|
.next()
|
||||||
.unwrap()
|
.ok_or(anyhow!("No client certs in chain."))??;
|
||||||
.unwrap();
|
let client_cert = client_cert.parse_x509()?;
|
||||||
let client_cert = client_cert.parse_x509().unwrap();
|
|
||||||
|
|
||||||
let valid = root_ca
|
let valid = root_ca
|
||||||
.verify_signature(Some(client_cert.public_key()))
|
.verify_signature(Some(client_cert.public_key()))
|
||||||
@ -97,31 +89,33 @@ pub fn verify_client_certificate(client_cert: String, root_ca: String) -> Result
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn sign_nonce(private_key: String, nonce: String) -> Result<String, Error> {
|
pub fn sign_nonce(private_key: String, nonce: String) -> anyhow::Result<String> {
|
||||||
let rng = SystemRandom::new();
|
let rng = SystemRandom::new();
|
||||||
|
|
||||||
let key_pair = KeyPair::from_pem(&private_key).unwrap();
|
let key_pair = KeyPair::from_pem(&private_key)?;
|
||||||
|
|
||||||
let key_pair = EcdsaKeyPair::from_pkcs8(
|
let key_pair = EcdsaKeyPair::from_pkcs8(
|
||||||
&ring::signature::ECDSA_P384_SHA384_FIXED_SIGNING,
|
&ring::signature::ECDSA_P384_SHA384_FIXED_SIGNING,
|
||||||
&key_pair.serialize_der(),
|
&key_pair.serialize_der(),
|
||||||
&rng,
|
&rng,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
|
|
||||||
let signature = key_pair.sign(&rng, nonce.as_bytes()).unwrap();
|
let signature = key_pair
|
||||||
|
.sign(&rng, nonce.as_bytes())
|
||||||
|
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||||
let hex_signature = hex::encode(signature);
|
let hex_signature = hex::encode(signature);
|
||||||
|
|
||||||
Ok(hex_signature)
|
Ok(hex_signature)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn verify_nonce(public_cert: String, nonce: String, signature: String) -> Result<bool, Error> {
|
pub fn verify_nonce(public_cert: String, nonce: String, signature: String) -> anyhow::Result<bool> {
|
||||||
let (_, pem) = x509_parser::pem::parse_x509_pem(public_cert.as_bytes()).unwrap();
|
let (_, pem) = x509_parser::pem::parse_x509_pem(public_cert.as_bytes())?;
|
||||||
let (_, spki) = parse_x509_certificate(&pem.contents).unwrap();
|
let (_, spki) = parse_x509_certificate(&pem.contents)?;
|
||||||
let public_key = SubjectPublicKeyInfo::from_der(spki.public_key().raw).unwrap();
|
let public_key = SubjectPublicKeyInfo::from_der(spki.public_key().raw)?;
|
||||||
|
|
||||||
let raw_signature = hex::decode(signature).unwrap();
|
let raw_signature = hex::decode(signature)?;
|
||||||
|
|
||||||
let valid = ring::signature::ECDSA_P384_SHA384_FIXED
|
let valid = ring::signature::ECDSA_P384_SHA384_FIXED
|
||||||
.verify(
|
.verify(
|
||||||
|
|||||||
216
src/version/backends.rs
Normal file
216
src/version/backends.rs
Normal file
@ -0,0 +1,216 @@
|
|||||||
|
#[cfg(unix)]
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
use std::{
|
||||||
|
cell::LazyCell,
|
||||||
|
fs::{self, metadata, File},
|
||||||
|
io::{self, BufRead, BufReader, Read, Seek, SeekFrom, Sink},
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
process::{Child, ChildStdout, Command, Stdio},
|
||||||
|
sync::{Arc, LazyLock},
|
||||||
|
};
|
||||||
|
|
||||||
|
use anyhow::anyhow;
|
||||||
|
|
||||||
|
use crate::version::types::{MinimumFileObject, VersionBackend, VersionFile};
|
||||||
|
|
||||||
|
pub fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) -> napi::Result<()> {
|
||||||
|
if metadata(path)?.is_dir() {
|
||||||
|
let paths = fs::read_dir(path)?;
|
||||||
|
for path_result in paths {
|
||||||
|
let full_path = path_result?.path();
|
||||||
|
if metadata(&full_path)?.is_dir() {
|
||||||
|
_list_files(vec, &full_path)?;
|
||||||
|
} else {
|
||||||
|
vec.push(full_path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct PathVersionBackend {
|
||||||
|
pub base_dir: PathBuf,
|
||||||
|
}
|
||||||
|
impl VersionBackend for PathVersionBackend {
|
||||||
|
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>> {
|
||||||
|
let mut vec = Vec::new();
|
||||||
|
_list_files(&mut vec, &self.base_dir)?;
|
||||||
|
|
||||||
|
let mut results = Vec::new();
|
||||||
|
|
||||||
|
for pathbuf in vec.iter() {
|
||||||
|
let relative = pathbuf.strip_prefix(self.base_dir.clone())?;
|
||||||
|
|
||||||
|
results.push(
|
||||||
|
self.peek_file(
|
||||||
|
relative
|
||||||
|
.to_str()
|
||||||
|
.ok_or(napi::Error::from_reason("Could not parse path"))?
|
||||||
|
.to_owned(),
|
||||||
|
)?,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(results)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn reader(
|
||||||
|
&mut self,
|
||||||
|
file: &VersionFile,
|
||||||
|
start: u64,
|
||||||
|
end: u64,
|
||||||
|
) -> anyhow::Result<Box<dyn MinimumFileObject + 'static>> {
|
||||||
|
let mut file = File::open(self.base_dir.join(file.relative_filename.clone()))?;
|
||||||
|
|
||||||
|
if start != 0 {
|
||||||
|
file.seek(SeekFrom::Start(start))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if end != 0 {
|
||||||
|
return Ok(Box::new(file.take(end - start)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Box::new(file))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile> {
|
||||||
|
let pathbuf = self.base_dir.join(sub_path.clone());
|
||||||
|
if !pathbuf.exists() {
|
||||||
|
return Err(anyhow!("Path doesn't exist."));
|
||||||
|
};
|
||||||
|
|
||||||
|
let file = File::open(pathbuf.clone())?;
|
||||||
|
let metadata = file.try_clone()?.metadata()?;
|
||||||
|
let permission_object = metadata.permissions();
|
||||||
|
let permissions = {
|
||||||
|
let perm: u32;
|
||||||
|
#[cfg(target_family = "unix")]
|
||||||
|
{
|
||||||
|
perm = permission_object.mode();
|
||||||
|
}
|
||||||
|
#[cfg(not(target_family = "unix"))]
|
||||||
|
{
|
||||||
|
perm = 0
|
||||||
|
}
|
||||||
|
perm
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(VersionFile {
|
||||||
|
relative_filename: sub_path,
|
||||||
|
permission: permissions,
|
||||||
|
size: metadata.len(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn require_whole_files(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub static SEVEN_ZIP_INSTALLED: LazyLock<bool> =
|
||||||
|
LazyLock::new(|| Command::new("7z").output().is_ok());
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ZipVersionBackend {
|
||||||
|
path: String,
|
||||||
|
}
|
||||||
|
impl ZipVersionBackend {
|
||||||
|
pub fn new(path: PathBuf) -> anyhow::Result<Self> {
|
||||||
|
Ok(Self {
|
||||||
|
path: path.to_str().expect("invalid utf path").to_owned(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ZipFileWrapper {
|
||||||
|
command: Child,
|
||||||
|
reader: BufReader<ChildStdout>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ZipFileWrapper {
|
||||||
|
pub fn new(mut command: Child) -> Self {
|
||||||
|
let stdout = command.stdout.take().expect("failed to access stdout of 7z");
|
||||||
|
let reader = BufReader::new(stdout);
|
||||||
|
ZipFileWrapper { command, reader }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This read implemention is a result of debugging hell
|
||||||
|
* It should probably be replaced with a .take() call.
|
||||||
|
*/
|
||||||
|
impl Read for ZipFileWrapper {
|
||||||
|
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||||
|
self.reader.read(buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for ZipFileWrapper {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
self.command.wait().expect("failed to wait for 7z exit");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VersionBackend for ZipVersionBackend {
|
||||||
|
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>> {
|
||||||
|
let mut list_command = Command::new("7z");
|
||||||
|
list_command.args(vec!["l", "-ba", &self.path]);
|
||||||
|
let result = list_command.output()?;
|
||||||
|
if !result.status.success() {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"failed to list files: code {:?}",
|
||||||
|
result.status.code()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
let raw_result = String::from_utf8(result.stdout)?;
|
||||||
|
let files = raw_result.split("\n").filter(|v| v.len() > 0).map(|v| v.split(" ").filter(|v| v.len() > 0));
|
||||||
|
let mut results = Vec::new();
|
||||||
|
|
||||||
|
for mut file in files {
|
||||||
|
let (date, time, attrs, size, compress, name) = (
|
||||||
|
file.next().unwrap(),
|
||||||
|
file.next().unwrap(),
|
||||||
|
file.next().unwrap(),
|
||||||
|
file.next().unwrap(),
|
||||||
|
file.next().unwrap(),
|
||||||
|
file.next().unwrap(),
|
||||||
|
);
|
||||||
|
println!("got line: {} {} {} {} {} {}", date, time, attrs, size, compress, name);
|
||||||
|
results.push(VersionFile {
|
||||||
|
relative_filename: name.to_owned(),
|
||||||
|
permission: 0,
|
||||||
|
size: size.parse().unwrap(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(results)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn reader(
|
||||||
|
&mut self,
|
||||||
|
file: &VersionFile,
|
||||||
|
start: u64,
|
||||||
|
end: u64,
|
||||||
|
) -> anyhow::Result<Box<dyn MinimumFileObject + '_>> {
|
||||||
|
let mut read_command = Command::new("7z");
|
||||||
|
read_command.args(vec!["e", "-so", &self.path, &file.relative_filename]);
|
||||||
|
let output = read_command.stdout(Stdio::piped()).spawn().expect("failed to spawn 7z");
|
||||||
|
Ok(Box::new(ZipFileWrapper::new(output)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile> {
|
||||||
|
let files = self.list_files()?;
|
||||||
|
let file = files
|
||||||
|
.iter()
|
||||||
|
.find(|v| v.relative_filename == sub_path)
|
||||||
|
.expect("file not found");
|
||||||
|
|
||||||
|
Ok(file.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn require_whole_files(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
3
src/version/mod.rs
Normal file
3
src/version/mod.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
pub mod utils;
|
||||||
|
pub mod types;
|
||||||
|
pub mod backends;
|
||||||
55
src/version/types.rs
Normal file
55
src/version/types.rs
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
use std::{fmt::Debug, io::Read};
|
||||||
|
|
||||||
|
use dyn_clone::DynClone;
|
||||||
|
use tokio::io::{self, AsyncRead};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct VersionFile {
|
||||||
|
pub relative_filename: String,
|
||||||
|
pub permission: u32,
|
||||||
|
pub size: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait MinimumFileObject: Read + Send {}
|
||||||
|
impl<T: Read + Send> MinimumFileObject for T {}
|
||||||
|
|
||||||
|
// Intentionally not a generic, because of types in read_file
|
||||||
|
pub struct ReadToAsyncRead<'a> {
|
||||||
|
pub inner: Box<dyn Read + Send + 'a>,
|
||||||
|
}
|
||||||
|
|
||||||
|
const ASYNC_READ_BUFFER_SIZE: usize = 8128;
|
||||||
|
|
||||||
|
impl<'a> AsyncRead for ReadToAsyncRead<'a> {
|
||||||
|
fn poll_read(
|
||||||
|
mut self: std::pin::Pin<&mut Self>,
|
||||||
|
_cx: &mut std::task::Context<'_>,
|
||||||
|
buf: &mut tokio::io::ReadBuf<'_>,
|
||||||
|
) -> std::task::Poll<io::Result<()>> {
|
||||||
|
let mut read_buf = [0u8; ASYNC_READ_BUFFER_SIZE];
|
||||||
|
let read_size = ASYNC_READ_BUFFER_SIZE.min(buf.remaining());
|
||||||
|
match self.inner.read(&mut read_buf[0..read_size]) {
|
||||||
|
Ok(read) => {
|
||||||
|
buf.put_slice(&read_buf[0..read]);
|
||||||
|
std::task::Poll::Ready(Ok(()))
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
std::task::Poll::Ready(Err(err))
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait VersionBackend: DynClone {
|
||||||
|
fn require_whole_files(&self) -> bool;
|
||||||
|
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>>;
|
||||||
|
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile>;
|
||||||
|
fn reader(
|
||||||
|
&mut self,
|
||||||
|
file: &VersionFile,
|
||||||
|
start: u64,
|
||||||
|
end: u64,
|
||||||
|
) -> anyhow::Result<Box<dyn MinimumFileObject + '_>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
dyn_clone::clone_trait_object!(VersionBackend);
|
||||||
173
src/version/utils.rs
Normal file
173
src/version/utils.rs
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
fs::File,
|
||||||
|
path::Path,
|
||||||
|
process::{Command, ExitStatus},
|
||||||
|
};
|
||||||
|
|
||||||
|
use anyhow::anyhow;
|
||||||
|
use napi::{bindgen_prelude::*, sys::napi_value__, tokio_stream::StreamExt};
|
||||||
|
use tokio_util::codec::{BytesCodec, FramedRead};
|
||||||
|
|
||||||
|
use crate::version::{
|
||||||
|
backends::{PathVersionBackend, ZipVersionBackend, SEVEN_ZIP_INSTALLED},
|
||||||
|
types::{ReadToAsyncRead, VersionBackend, VersionFile},
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Append new backends here
|
||||||
|
*/
|
||||||
|
pub fn create_backend_constructor<'a>(
|
||||||
|
path: &Path,
|
||||||
|
) -> Option<Box<dyn FnOnce() -> Result<Box<dyn VersionBackend + Send + 'a>>>> {
|
||||||
|
if !path.exists() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let is_directory = path.is_dir();
|
||||||
|
if is_directory {
|
||||||
|
let base_dir = path.to_path_buf();
|
||||||
|
return Some(Box::new(move || {
|
||||||
|
Ok(Box::new(PathVersionBackend { base_dir }))
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
|
if *SEVEN_ZIP_INSTALLED {
|
||||||
|
let mut test = Command::new("7z");
|
||||||
|
test.args(vec!["t", path.to_str().expect("invalid utf path")]);
|
||||||
|
let status = test.status().ok()?;
|
||||||
|
if status.code().unwrap_or(1) == 0 {
|
||||||
|
let buf = path.to_path_buf();
|
||||||
|
return Some(Box::new(move || {
|
||||||
|
Ok(Box::new(ZipVersionBackend::new(buf)?))
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Persistent object so we can cache things between commands
|
||||||
|
*/
|
||||||
|
#[napi(js_name = "DropletHandler")]
|
||||||
|
pub struct DropletHandler<'a> {
|
||||||
|
backend_cache: HashMap<String, Box<dyn VersionBackend + Send + 'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
impl<'a> DropletHandler<'a> {
|
||||||
|
#[napi(constructor)]
|
||||||
|
pub fn new() -> Self {
|
||||||
|
DropletHandler {
|
||||||
|
backend_cache: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_backend_for_path(
|
||||||
|
&mut self,
|
||||||
|
path: String,
|
||||||
|
) -> Option<&mut Box<dyn VersionBackend + Send + 'a>> {
|
||||||
|
let fs_path = Path::new(&path);
|
||||||
|
let constructor = create_backend_constructor(fs_path)?;
|
||||||
|
|
||||||
|
let existing_backend = match self.backend_cache.entry(path) {
|
||||||
|
std::collections::hash_map::Entry::Occupied(occupied_entry) => occupied_entry.into_mut(),
|
||||||
|
std::collections::hash_map::Entry::Vacant(vacant_entry) => {
|
||||||
|
let backend = constructor().ok()?;
|
||||||
|
vacant_entry.insert(backend)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(existing_backend)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub fn has_backend_for_path(&self, path: String) -> bool {
|
||||||
|
let path = Path::new(&path);
|
||||||
|
|
||||||
|
let has_backend = create_backend_constructor(path).is_some();
|
||||||
|
|
||||||
|
has_backend
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub fn list_files(&mut self, path: String) -> Result<Vec<String>> {
|
||||||
|
let backend = self
|
||||||
|
.create_backend_for_path(path)
|
||||||
|
.ok_or(napi::Error::from_reason("No backend for path"))?;
|
||||||
|
let files = backend.list_files()?;
|
||||||
|
Ok(files.into_iter().map(|e| e.relative_filename).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub fn peek_file(&mut self, path: String, sub_path: String) -> Result<u64> {
|
||||||
|
let backend = self
|
||||||
|
.create_backend_for_path(path)
|
||||||
|
.ok_or(napi::Error::from_reason("No backend for path"))?;
|
||||||
|
|
||||||
|
let file = backend.peek_file(sub_path)?;
|
||||||
|
|
||||||
|
Ok(file.size)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub fn read_file(
|
||||||
|
&mut self,
|
||||||
|
reference: Reference<DropletHandler<'static>>,
|
||||||
|
path: String,
|
||||||
|
sub_path: String,
|
||||||
|
env: Env,
|
||||||
|
start: Option<BigInt>,
|
||||||
|
end: Option<BigInt>,
|
||||||
|
) -> anyhow::Result<JsDropStreamable> {
|
||||||
|
let stream = reference.share_with(env, |handler| {
|
||||||
|
let backend = handler
|
||||||
|
.create_backend_for_path(path)
|
||||||
|
.ok_or(anyhow!("Failed to create backend."))?;
|
||||||
|
let version_file = VersionFile {
|
||||||
|
relative_filename: sub_path,
|
||||||
|
permission: 0, // Shouldn't matter
|
||||||
|
size: 0, // Shouldn't matter
|
||||||
|
};
|
||||||
|
// Use `?` operator for cleaner error propagation from `Option`
|
||||||
|
let reader = backend.reader(
|
||||||
|
&version_file,
|
||||||
|
start.map(|e| e.get_u64().1).unwrap_or(0),
|
||||||
|
end.map(|e| e.get_u64().1).unwrap_or(0),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let async_reader = ReadToAsyncRead { inner: reader };
|
||||||
|
|
||||||
|
// Create a FramedRead stream with BytesCodec for chunking
|
||||||
|
let stream = FramedRead::new(async_reader, BytesCodec::new())
|
||||||
|
// Use StreamExt::map to transform each Result item
|
||||||
|
.map(|result_item| {
|
||||||
|
result_item
|
||||||
|
// Apply Result::map to transform Ok(BytesMut) to Ok(Vec<u8>)
|
||||||
|
.map(|bytes| bytes.to_vec())
|
||||||
|
// Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error)
|
||||||
|
.map_err(napi::Error::from) // napi::Error implements From<tokio::io::Error>
|
||||||
|
});
|
||||||
|
// Create the napi-rs ReadableStream from the tokio_stream::Stream
|
||||||
|
// The unwrap() here means if stream creation fails, it will panic.
|
||||||
|
// For a production system, consider returning Result<Option<...>> and handling this.
|
||||||
|
ReadableStream::create_with_stream_bytes(&env, stream)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(JsDropStreamable { inner: stream })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
pub struct JsDropStreamable {
|
||||||
|
inner: SharedReference<DropletHandler<'static>, ReadableStream<'static, BufferSlice<'static>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[napi]
|
||||||
|
impl JsDropStreamable {
|
||||||
|
#[napi]
|
||||||
|
pub fn get_stream(&self) -> *mut napi_value__ {
|
||||||
|
self.inner.raw()
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user