mirror of
https://github.com/Drop-OSS/droplet.git
synced 2025-11-12 07:42:48 +10:00
Compare commits
46 Commits
v1.1.2
...
ba35ca9a14
| Author | SHA1 | Date | |
|---|---|---|---|
| ba35ca9a14 | |||
| ae4648845e | |||
| bd30464a08 | |||
| c67cca4ee0 | |||
| cae208a3e0 | |||
| 4276b9d668 | |||
| 4fb9bb7563 | |||
| 913dc2f58d | |||
| 7ec5e9f215 | |||
| b67a67d809 | |||
| 87b19a5c8c | |||
| dc3a420986 | |||
| 1665033fd9 | |||
| 2969d64c45 | |||
| e525ff44bb | |||
| 52a685391a | |||
| 535d5a4062 | |||
| 450734f5c9 | |||
| 20e2eda381 | |||
| 04d3f2dd8c | |||
| 59ca57ee1b | |||
| 8f4b2a6c6d | |||
| 7c3e6fe63c | |||
| 204902951e | |||
| b3011c517d | |||
| 74a54eb9ac | |||
| 89e94e3afd | |||
| 169d471bb7 | |||
| 076dc60155 | |||
| 48e5b97a4e | |||
| c1aaf8adcd | |||
| fe43f79062 | |||
| 30b9c4a1cc | |||
| 42f770aed9 | |||
| 4670df4127 | |||
| e33eaebe1a | |||
| f954f23410 | |||
| 3632687001 | |||
| 90817487ed | |||
| 98b84c64d4 | |||
| d3186cdd5f | |||
| bb678b4b3a | |||
| cc94798962 | |||
| 7811818a72 | |||
| b6910e717b | |||
| 45a26c7156 |
34
.github/workflows/CI.yml
vendored
34
.github/workflows/CI.yml
vendored
@ -12,12 +12,9 @@ permissions:
|
||||
- main
|
||||
tags-ignore:
|
||||
- "**"
|
||||
paths-ignore:
|
||||
- "**/*.md"
|
||||
- LICENSE
|
||||
- "**/*.gitignore"
|
||||
- .editorconfig
|
||||
- docs/**
|
||||
paths:
|
||||
- package.json
|
||||
- .github/workflows/*
|
||||
pull_request: null
|
||||
jobs:
|
||||
build:
|
||||
@ -46,15 +43,6 @@ jobs:
|
||||
target: aarch64-unknown-linux-gnu
|
||||
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
|
||||
build: yarn build --target aarch64-unknown-linux-gnu
|
||||
- host: ubuntu-latest
|
||||
target: armv7-unknown-linux-gnueabihf
|
||||
setup: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install gcc-arm-linux-gnueabihf -y
|
||||
build: yarn build --target armv7-unknown-linux-gnueabihf
|
||||
- host: ubuntu-latest
|
||||
target: armv7-unknown-linux-musleabihf
|
||||
build: yarn build --target armv7-unknown-linux-musleabihf
|
||||
- host: ubuntu-latest
|
||||
target: aarch64-unknown-linux-musl
|
||||
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine
|
||||
@ -71,7 +59,7 @@ jobs:
|
||||
sudo apt-get update
|
||||
sudo apt-get install gcc-riscv64-linux-gnu -y
|
||||
build: yarn build --target riscv64gc-unknown-linux-gnu
|
||||
name: stable - ${{ matrix.settings.target }} - node@20
|
||||
name: nightly - ${{ matrix.settings.target }} - node@20
|
||||
runs-on: ${{ matrix.settings.host }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -82,7 +70,7 @@ jobs:
|
||||
node-version: 20
|
||||
cache: yarn
|
||||
- name: Install
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@nightly
|
||||
if: ${{ !matrix.settings.docker }}
|
||||
with:
|
||||
toolchain: nightly
|
||||
@ -106,14 +94,20 @@ jobs:
|
||||
if: ${{ matrix.settings.setup }}
|
||||
shell: bash
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
run: |-
|
||||
cargo install patch-crate &&
|
||||
cargo patch-crate &&
|
||||
yarn install
|
||||
- name: Build in docker
|
||||
uses: addnab/docker-run-action@v3
|
||||
if: ${{ matrix.settings.docker }}
|
||||
with:
|
||||
image: ${{ matrix.settings.docker }}
|
||||
options: "--user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build"
|
||||
run: ${{ matrix.settings.build }}
|
||||
run: |-
|
||||
rustup default nightly &&
|
||||
rustup target add ${{ matrix.settings.target }} &&
|
||||
${{ matrix.settings.build }}
|
||||
- name: Build
|
||||
run: ${{ matrix.settings.build }}
|
||||
if: ${{ !matrix.settings.docker }}
|
||||
@ -358,6 +352,8 @@ jobs:
|
||||
with:
|
||||
name: bindings-aarch64-apple-darwin
|
||||
path: artifacts
|
||||
- name: Move artifacts
|
||||
run: mv artifacts/* .
|
||||
- name: Combine binaries
|
||||
run: yarn universal
|
||||
- name: Upload artifact
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@ -9,7 +9,7 @@ npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.test
|
||||
.test*
|
||||
.tsimp
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
@ -186,7 +186,6 @@ $RECYCLE.BIN/
|
||||
#Added by cargo
|
||||
|
||||
/target
|
||||
Cargo.lock
|
||||
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
@ -201,4 +200,7 @@ test.mjs
|
||||
manifest.json
|
||||
|
||||
# JetBrains
|
||||
.idea
|
||||
.idea
|
||||
|
||||
assets/*
|
||||
!assets/generate.sh
|
||||
2175
Cargo.lock
generated
Normal file
2175
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
20
Cargo.toml
20
Cargo.toml
@ -9,18 +9,30 @@ crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
|
||||
napi = { version = "2.12.2", default-features = false, features = [
|
||||
"napi4",
|
||||
napi = { version = "3.0.0-beta.11", default-features = false, features = [
|
||||
"napi6",
|
||||
"async",
|
||||
"web_stream",
|
||||
] }
|
||||
napi-derive = "2.12.2"
|
||||
napi-derive = "3.0.0-beta.11"
|
||||
hex = "0.4.3"
|
||||
serde_json = "1.0.128"
|
||||
md5 = "0.7.0"
|
||||
time-macros = "0.2.22"
|
||||
time = "0.3.41"
|
||||
webpki = "0.22.4"
|
||||
ring = "0.17.14"
|
||||
tokio = { version = "1.45.1", features = ["fs", "io-util"] }
|
||||
tokio-util = { version = "0.7.15", features = ["codec"] }
|
||||
rawzip = "0.3.0"
|
||||
dyn-clone = "1.0.20"
|
||||
flate2 = "1.1.2"
|
||||
rhai = "1.22.2"
|
||||
mlua = { version = "0.11.2", features = ["luajit"] }
|
||||
boa_engine = "0.20.0"
|
||||
serde_json = "1.0.143"
|
||||
|
||||
[package.metadata.patch]
|
||||
crates = ["rawzip"]
|
||||
|
||||
[dependencies.x509-parser]
|
||||
version = "0.17.0"
|
||||
|
||||
22
__test__/debug.spec.mjs
Normal file
22
__test__/debug.spec.mjs
Normal file
@ -0,0 +1,22 @@
|
||||
import test from "ava";
|
||||
import { DropletHandler, generateManifest } from "../index.js";
|
||||
|
||||
test.skip("debug", async (t) => {
|
||||
const handler = new DropletHandler();
|
||||
|
||||
console.log("created handler");
|
||||
|
||||
const manifest = JSON.parse(
|
||||
await new Promise((r, e) =>
|
||||
generateManifest(
|
||||
handler,
|
||||
"./assets/TheGame.zip",
|
||||
(_, __) => {},
|
||||
(_, __) => {},
|
||||
(err, manifest) => (err ? e(err) : r(manifest))
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
return t.pass();
|
||||
});
|
||||
@ -2,7 +2,7 @@ import test from "ava";
|
||||
import fs from "node:fs";
|
||||
import path from "path";
|
||||
|
||||
import { generateManifest, listFiles } from "../index.js";
|
||||
import { DropletHandler, generateManifest } from "../index.js";
|
||||
|
||||
test("numerous small file", async (t) => {
|
||||
// Setup test dir
|
||||
@ -18,9 +18,12 @@ test("numerous small file", async (t) => {
|
||||
fs.writeFileSync(fileName, i.toString());
|
||||
}
|
||||
|
||||
const dropletHandler = new DropletHandler();
|
||||
|
||||
const manifest = JSON.parse(
|
||||
await new Promise((r, e) =>
|
||||
generateManifest(
|
||||
dropletHandler,
|
||||
dirName,
|
||||
(_, __) => {},
|
||||
(_, __) => {},
|
||||
@ -52,4 +55,41 @@ test("numerous small file", async (t) => {
|
||||
}
|
||||
|
||||
fs.rmSync(dirName, { recursive: true });
|
||||
});
|
||||
});
|
||||
|
||||
test.skip("performance test", async (t) => {
|
||||
t.timeout(5 * 60 * 1000);
|
||||
const dirName = "./.test/pt";
|
||||
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
||||
fs.mkdirSync(dirName, { recursive: true });
|
||||
|
||||
const fileSize = 1 * 1000 * 1000 * 1000; // 1GB
|
||||
|
||||
const randomStream = fs.createReadStream("/dev/random", {
|
||||
start: 0,
|
||||
end: fileSize,
|
||||
});
|
||||
const outputStream = fs.createWriteStream(path.join(dirName, "file.bin"));
|
||||
await new Promise((r) => {
|
||||
randomStream.pipe(outputStream);
|
||||
randomStream.on("end", r);
|
||||
});
|
||||
|
||||
const dropletHandler = new DropletHandler();
|
||||
|
||||
const start = Date.now();
|
||||
await new Promise((r, e) =>
|
||||
generateManifest(
|
||||
dropletHandler,
|
||||
dirName,
|
||||
(_, __) => {},
|
||||
(_, __) => {},
|
||||
(err, manifest) => (err ? e(err) : r(manifest))
|
||||
)
|
||||
);
|
||||
const end = Date.now();
|
||||
|
||||
t.pass(`Took ${end - start}ms to process ${fileSize / (1000 * 1000)}MB`);
|
||||
|
||||
fs.rmSync(dirName, { recursive: true });
|
||||
});
|
||||
|
||||
62
__test__/script.spec.mjs
Normal file
62
__test__/script.spec.mjs
Normal file
@ -0,0 +1,62 @@
|
||||
import test from "ava";
|
||||
import { ScriptEngine } from "../index.js";
|
||||
|
||||
test("lua syntax fail", (t) => {
|
||||
const scriptEngine = new ScriptEngine();
|
||||
|
||||
const luaIshCode = `
|
||||
print("hello world);
|
||||
`;
|
||||
|
||||
try {
|
||||
const script = scriptEngine.buildLuaScript(luaIshCode);
|
||||
} catch {
|
||||
return t.pass();
|
||||
}
|
||||
t.fail();
|
||||
});
|
||||
|
||||
test("js syntax fail", (t) => {
|
||||
const scriptEngine = new ScriptEngine();
|
||||
|
||||
const jsIshCode = `
|
||||
const v = "hello world;
|
||||
`;
|
||||
|
||||
try {
|
||||
const script = scriptEngine.buildJsScript(jsIshCode);
|
||||
} catch {
|
||||
return t.pass();
|
||||
}
|
||||
t.fail();
|
||||
});
|
||||
|
||||
test("js", (t) => {
|
||||
const scriptEngine = new ScriptEngine();
|
||||
|
||||
const jsModule = `
|
||||
const v = "1" + "2";
|
||||
["1", "2", "3", v]
|
||||
`;
|
||||
|
||||
const script = scriptEngine.buildJsScript(jsModule);
|
||||
|
||||
scriptEngine.fetchStrings(script);
|
||||
|
||||
t.pass();
|
||||
});
|
||||
|
||||
test("lua", (t) => {
|
||||
const scriptEngine = new ScriptEngine();
|
||||
|
||||
const luaModule = `
|
||||
local arr = {"1", "2"};
|
||||
return arr;
|
||||
`;
|
||||
|
||||
const script = scriptEngine.buildLuaScript(luaModule);
|
||||
|
||||
scriptEngine.fetchStrings(script);
|
||||
|
||||
t.pass();
|
||||
});
|
||||
220
__test__/utils.spec.mjs
Normal file
220
__test__/utils.spec.mjs
Normal file
@ -0,0 +1,220 @@
|
||||
import test from "ava";
|
||||
import fs from "node:fs";
|
||||
import path from "path";
|
||||
import { createHash } from "node:crypto";
|
||||
import prettyBytes from "pretty-bytes";
|
||||
|
||||
import droplet, { DropletHandler, generateManifest } from "../index.js";
|
||||
|
||||
test("check alt thread util", async (t) => {
|
||||
let endtime1, endtime2;
|
||||
|
||||
droplet.callAltThreadFunc(async () => {
|
||||
await new Promise((r) => setTimeout(r, 100));
|
||||
endtime1 = Date.now();
|
||||
});
|
||||
|
||||
await new Promise((r) => setTimeout(r, 500));
|
||||
endtime2 = Date.now();
|
||||
|
||||
const difference = endtime2 - endtime1;
|
||||
if (difference >= 600) {
|
||||
t.fail("likely isn't multithreaded, difference: " + difference);
|
||||
}
|
||||
|
||||
t.pass();
|
||||
});
|
||||
|
||||
test("list files", async (t) => {
|
||||
const dirName = "./.listfiles";
|
||||
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
||||
fs.mkdirSync(dirName, { recursive: true });
|
||||
fs.mkdirSync(dirName + "/subdir", { recursive: true });
|
||||
fs.mkdirSync(dirName + "/subddir", { recursive: true });
|
||||
|
||||
fs.writeFileSync(dirName + "/root.txt", "root");
|
||||
fs.writeFileSync(dirName + "/subdir/one.txt", "the first subdir");
|
||||
fs.writeFileSync(dirName + "/subddir/two.txt", "the second");
|
||||
|
||||
const dropletHandler = new DropletHandler();
|
||||
const files = dropletHandler.listFiles(dirName);
|
||||
|
||||
t.assert(
|
||||
files.sort().join("\n"),
|
||||
["root.txt", "subddir/two.txt", "subdir/one.txt"].join("\n")
|
||||
);
|
||||
|
||||
fs.rmSync(dirName, { recursive: true });
|
||||
});
|
||||
|
||||
test("read file", async (t) => {
|
||||
const dirName = "./.test2";
|
||||
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
||||
fs.mkdirSync(dirName, { recursive: true });
|
||||
|
||||
const testString = "g'day what's up my koala bros\n".repeat(1000);
|
||||
|
||||
fs.writeFileSync(dirName + "/TESTFILE", testString);
|
||||
|
||||
const dropletHandler = new DropletHandler();
|
||||
|
||||
const stream = dropletHandler.readFile(
|
||||
dirName,
|
||||
"TESTFILE",
|
||||
BigInt(0),
|
||||
BigInt(testString.length)
|
||||
);
|
||||
|
||||
let finalString = "";
|
||||
|
||||
for await (const chunk of stream.getStream()) {
|
||||
// Do something with each 'chunk'
|
||||
finalString += String.fromCharCode.apply(null, chunk);
|
||||
}
|
||||
|
||||
t.assert(finalString == testString, "file strings don't match");
|
||||
fs.rmSync(dirName, { recursive: true });
|
||||
});
|
||||
|
||||
test("read file offset", async (t) => {
|
||||
const dirName = "./.test3";
|
||||
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
||||
fs.mkdirSync(dirName, { recursive: true });
|
||||
|
||||
const testString = "0123456789";
|
||||
fs.writeFileSync(dirName + "/TESTFILE", testString);
|
||||
|
||||
const dropletHandler = new DropletHandler();
|
||||
const stream = dropletHandler.readFile(
|
||||
dirName,
|
||||
"TESTFILE",
|
||||
BigInt(1),
|
||||
BigInt(4)
|
||||
);
|
||||
|
||||
let finalString = "";
|
||||
|
||||
for await (const chunk of stream.getStream()) {
|
||||
// Do something with each 'chunk'
|
||||
finalString += String.fromCharCode.apply(null, chunk);
|
||||
}
|
||||
|
||||
const expectedString = testString.slice(1, 4);
|
||||
|
||||
t.assert(
|
||||
finalString == expectedString,
|
||||
`file strings don't match: ${finalString} vs ${expectedString}`
|
||||
);
|
||||
fs.rmSync(dirName, { recursive: true });
|
||||
});
|
||||
|
||||
test.skip("zip speed test", async (t) => {
|
||||
t.timeout(100_000_000);
|
||||
const dropletHandler = new DropletHandler();
|
||||
|
||||
const stream = dropletHandler.readFile("./assets/TheGame.zip", "setup.exe");
|
||||
|
||||
let totalRead = 0;
|
||||
let totalSeconds = 0;
|
||||
|
||||
let lastTime = process.hrtime.bigint();
|
||||
const timeThreshold = BigInt(1_000_000_000);
|
||||
let runningTotal = 0;
|
||||
let runningTime = BigInt(0);
|
||||
for await (const chunk of stream.getStream()) {
|
||||
// Do something with each 'chunk'
|
||||
const currentTime = process.hrtime.bigint();
|
||||
const timeDiff = currentTime - lastTime;
|
||||
lastTime = currentTime;
|
||||
runningTime += timeDiff;
|
||||
|
||||
runningTotal += chunk.length;
|
||||
|
||||
if (runningTime >= timeThreshold) {
|
||||
console.log(`${prettyBytes(runningTotal)}/s`);
|
||||
totalRead += runningTotal;
|
||||
totalSeconds += 1;
|
||||
runningTime = BigInt(0);
|
||||
runningTotal = 0;
|
||||
}
|
||||
}
|
||||
|
||||
const roughAverage = totalRead / totalSeconds;
|
||||
|
||||
console.log(`total rough average: ${prettyBytes(roughAverage)}/s`);
|
||||
|
||||
t.pass();
|
||||
});
|
||||
|
||||
test.skip("zip manifest test", async (t) => {
|
||||
const dropletHandler = new DropletHandler();
|
||||
const manifest = JSON.parse(
|
||||
await new Promise((r, e) =>
|
||||
generateManifest(
|
||||
dropletHandler,
|
||||
"./assets/TheGame.zip",
|
||||
(_, __) => {},
|
||||
(_, __) => {},
|
||||
(err, manifest) => (err ? e(err) : r(manifest))
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
for (const [filename, data] of Object.entries(manifest)) {
|
||||
let start = 0;
|
||||
for (const [chunkIndex, length] of data.lengths.entries()) {
|
||||
const hash = createHash("md5");
|
||||
const stream = (
|
||||
await dropletHandler.readFile(
|
||||
"./assets/TheGame.zip",
|
||||
filename,
|
||||
BigInt(start),
|
||||
BigInt(start + length)
|
||||
)
|
||||
).getStream();
|
||||
|
||||
let streamLength = 0;
|
||||
await stream.pipeTo(
|
||||
new WritableStream({
|
||||
write(chunk) {
|
||||
streamLength += chunk.length;
|
||||
hash.update(chunk);
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
if (streamLength != length)
|
||||
return t.fail(
|
||||
`stream length for chunk index ${chunkIndex} was not expected: real: ${streamLength} vs expected: ${length}`
|
||||
);
|
||||
|
||||
const digest = hash.digest("hex");
|
||||
if (data.checksums[chunkIndex] != digest)
|
||||
return t.fail(
|
||||
`checksums did not match for chunk index ${chunkIndex}: real: ${digest} vs expected: ${data.checksums[chunkIndex]}`
|
||||
);
|
||||
|
||||
start += length;
|
||||
}
|
||||
}
|
||||
|
||||
t.pass();
|
||||
});
|
||||
|
||||
test.skip("partially compress zip test", async (t) => {
|
||||
const dropletHandler = new DropletHandler();
|
||||
|
||||
const manifest = JSON.parse(
|
||||
await new Promise((r, e) =>
|
||||
generateManifest(
|
||||
dropletHandler,
|
||||
"./assets/my horror game.zip",
|
||||
(_, __) => {},
|
||||
(_, __) => {},
|
||||
(err, manifest) => (err ? e(err) : r(manifest))
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
return t.pass();
|
||||
});
|
||||
4
assets/generate.sh
Executable file
4
assets/generate.sh
Executable file
@ -0,0 +1,4 @@
|
||||
# yes "droplet is awesome" | dd of=./setup.exe bs=1024 count=1000000
|
||||
dd if=/dev/random of=./setup.exe bs=1024 count=1000000
|
||||
zip TheGame.zip setup.exe
|
||||
rm setup.exe
|
||||
47
index.d.ts
vendored
47
index.d.ts
vendored
@ -1,14 +1,43 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
|
||||
/* auto-generated by NAPI-RS */
|
||||
/* eslint-disable */
|
||||
/**
|
||||
* Persistent object so we can cache things between commands
|
||||
*/
|
||||
export declare class DropletHandler {
|
||||
constructor()
|
||||
hasBackendForPath(path: string): boolean
|
||||
listFiles(path: string): Array<string>
|
||||
peekFile(path: string, subPath: string): bigint
|
||||
readFile(path: string, subPath: string, start?: bigint | undefined | null, end?: bigint | undefined | null): JsDropStreamable
|
||||
}
|
||||
|
||||
export declare class JsDropStreamable {
|
||||
getStream(): any
|
||||
}
|
||||
|
||||
export declare class Script {
|
||||
|
||||
}
|
||||
|
||||
export declare class ScriptEngine {
|
||||
constructor()
|
||||
buildRahiScript(content: string): Script
|
||||
buildLuaScript(content: string): Script
|
||||
buildJsScript(content: string): Script
|
||||
execute(script: Script): void
|
||||
fetchStrings(script: Script): Array<string>
|
||||
}
|
||||
|
||||
export declare function callAltThreadFunc(tsfn: ((err: Error | null, ) => any)): void
|
||||
|
||||
export declare function hasBackendForPath(path: string): boolean
|
||||
export declare function listFiles(path: string): Array<string>
|
||||
export declare function callAltThreadFunc(callback: (...args: any[]) => any): void
|
||||
export declare function generateManifest(dir: string, progress: (...args: any[]) => any, log: (...args: any[]) => any, callback: (...args: any[]) => any): void
|
||||
export declare function generateRootCa(): Array<string>
|
||||
export declare function generateClientCertificate(clientId: string, clientName: string, rootCa: string, rootCaPrivate: string): Array<string>
|
||||
export declare function verifyClientCertificate(clientCert: string, rootCa: string): boolean
|
||||
|
||||
export declare function generateManifest(dropletHandler: DropletHandler, dir: string, progressSfn: ((err: Error | null, arg: number) => any), logSfn: ((err: Error | null, arg: string) => any), callbackSfn: ((err: Error | null, arg: string) => any)): void
|
||||
|
||||
export declare function generateRootCa(): Array<string>
|
||||
|
||||
export declare function signNonce(privateKey: string, nonce: string): string
|
||||
|
||||
export declare function verifyClientCertificate(clientCert: string, rootCa: string): boolean
|
||||
|
||||
export declare function verifyNonce(publicCert: string, nonce: string, signature: string): boolean
|
||||
|
||||
658
index.js
658
index.js
@ -1,323 +1,389 @@
|
||||
/* tslint:disable */
|
||||
// prettier-ignore
|
||||
/* eslint-disable */
|
||||
/* prettier-ignore */
|
||||
|
||||
// @ts-nocheck
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
const { existsSync, readFileSync } = require('fs')
|
||||
const { join } = require('path')
|
||||
|
||||
const { platform, arch } = process
|
||||
const { createRequire } = require('node:module')
|
||||
require = createRequire(__filename)
|
||||
|
||||
const { readFileSync } = require('node:fs')
|
||||
let nativeBinding = null
|
||||
let localFileExisted = false
|
||||
let loadError = null
|
||||
const loadErrors = []
|
||||
|
||||
function isMusl() {
|
||||
// For Node 10
|
||||
if (!process.report || typeof process.report.getReport !== 'function') {
|
||||
try {
|
||||
const lddPath = require('child_process').execSync('which ldd').toString().trim()
|
||||
return readFileSync(lddPath, 'utf8').includes('musl')
|
||||
} catch (e) {
|
||||
return true
|
||||
const isMusl = () => {
|
||||
let musl = false
|
||||
if (process.platform === 'linux') {
|
||||
musl = isMuslFromFilesystem()
|
||||
if (musl === null) {
|
||||
musl = isMuslFromReport()
|
||||
}
|
||||
} else {
|
||||
const { glibcVersionRuntime } = process.report.getReport().header
|
||||
return !glibcVersionRuntime
|
||||
if (musl === null) {
|
||||
musl = isMuslFromChildProcess()
|
||||
}
|
||||
}
|
||||
return musl
|
||||
}
|
||||
|
||||
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
|
||||
|
||||
const isMuslFromFilesystem = () => {
|
||||
try {
|
||||
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
switch (platform) {
|
||||
case 'android':
|
||||
switch (arch) {
|
||||
case 'arm64':
|
||||
localFileExisted = existsSync(join(__dirname, 'droplet.android-arm64.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.android-arm64.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-android-arm64')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'arm':
|
||||
localFileExisted = existsSync(join(__dirname, 'droplet.android-arm-eabi.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.android-arm-eabi.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-android-arm-eabi')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on Android ${arch}`)
|
||||
const isMuslFromReport = () => {
|
||||
let report = null
|
||||
if (typeof process.report?.getReport === 'function') {
|
||||
process.report.excludeNetwork = true
|
||||
report = process.report.getReport()
|
||||
}
|
||||
if (!report) {
|
||||
return null
|
||||
}
|
||||
if (report.header && report.header.glibcVersionRuntime) {
|
||||
return false
|
||||
}
|
||||
if (Array.isArray(report.sharedObjects)) {
|
||||
if (report.sharedObjects.some(isFileMusl)) {
|
||||
return true
|
||||
}
|
||||
break
|
||||
case 'win32':
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.win32-x64-msvc.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.win32-x64-msvc.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-win32-x64-msvc')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'ia32':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.win32-ia32-msvc.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.win32-ia32-msvc.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-win32-ia32-msvc')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'arm64':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.win32-arm64-msvc.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.win32-arm64-msvc.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-win32-arm64-msvc')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on Windows: ${arch}`)
|
||||
}
|
||||
break
|
||||
case 'darwin':
|
||||
localFileExisted = existsSync(join(__dirname, 'droplet.darwin-universal.node'))
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
const isMuslFromChildProcess = () => {
|
||||
try {
|
||||
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
|
||||
} catch (e) {
|
||||
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function requireNative() {
|
||||
if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.darwin-universal.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-darwin-universal')
|
||||
nativeBinding = require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
|
||||
} catch (err) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
} else if (process.platform === 'android') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./droplet.android-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
break
|
||||
} catch {}
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
localFileExisted = existsSync(join(__dirname, 'droplet.darwin-x64.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.darwin-x64.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-darwin-x64')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'arm64':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.darwin-arm64.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.darwin-arm64.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-darwin-arm64')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on macOS: ${arch}`)
|
||||
try {
|
||||
return require('@drop-oss/droplet-android-arm64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
return require('./droplet.android-arm-eabi.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-android-arm-eabi')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
|
||||
}
|
||||
break
|
||||
case 'freebsd':
|
||||
if (arch !== 'x64') {
|
||||
throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
|
||||
} else if (process.platform === 'win32') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./droplet.win32-x64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-win32-x64-msvc')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else if (process.arch === 'ia32') {
|
||||
try {
|
||||
return require('./droplet.win32-ia32-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-win32-ia32-msvc')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./droplet.win32-arm64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-win32-arm64-msvc')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
|
||||
}
|
||||
localFileExisted = existsSync(join(__dirname, 'droplet.freebsd-x64.node'))
|
||||
} else if (process.platform === 'darwin') {
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.freebsd-x64.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-freebsd-x64')
|
||||
return require('./droplet.darwin-universal.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
try {
|
||||
return require('@drop-oss/droplet-darwin-universal')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./droplet.darwin-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-darwin-x64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./droplet.darwin-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-darwin-arm64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
|
||||
}
|
||||
break
|
||||
case 'linux':
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
if (isMusl()) {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-x64-musl.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-x64-musl.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-x64-musl')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
} else {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-x64-gnu.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-x64-gnu.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-x64-gnu')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
}
|
||||
break
|
||||
case 'arm64':
|
||||
if (isMusl()) {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-arm64-musl.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-arm64-musl.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-arm64-musl')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
} else {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-arm64-gnu.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-arm64-gnu.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-arm64-gnu')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
}
|
||||
break
|
||||
case 'arm':
|
||||
if (isMusl()) {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-arm-musleabihf.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-arm-musleabihf.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-arm-musleabihf')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
} else {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-arm-gnueabihf.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-arm-gnueabihf.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-arm-gnueabihf')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
}
|
||||
break
|
||||
case 'riscv64':
|
||||
if (isMusl()) {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-riscv64-musl.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-riscv64-musl.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-riscv64-musl')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
} else {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-riscv64-gnu.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-riscv64-gnu.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-riscv64-gnu')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
}
|
||||
break
|
||||
case 's390x':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-s390x-gnu.node')
|
||||
)
|
||||
} else if (process.platform === 'freebsd') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./droplet.freebsd-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-freebsd-x64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./droplet.freebsd-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-freebsd-arm64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'linux') {
|
||||
if (process.arch === 'x64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-s390x-gnu.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-s390x-gnu')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on Linux: ${arch}`)
|
||||
return require('./droplet.linux-x64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-x64-musl')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
try {
|
||||
return require('./droplet.linux-x64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-x64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./droplet.linux-arm64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-arm64-musl')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
try {
|
||||
return require('./droplet.linux-arm64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-arm64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./droplet.linux-arm-musleabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-arm-musleabihf')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
try {
|
||||
return require('./droplet.linux-arm-gnueabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-arm-gnueabihf')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
}
|
||||
} else if (process.arch === 'riscv64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./droplet.linux-riscv64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-riscv64-musl')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
try {
|
||||
return require('./droplet.linux-riscv64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-riscv64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
}
|
||||
} else if (process.arch === 'ppc64') {
|
||||
try {
|
||||
return require('./droplet.linux-ppc64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-ppc64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else if (process.arch === 's390x') {
|
||||
try {
|
||||
return require('./droplet.linux-s390x-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-s390x-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
|
||||
}
|
||||
}
|
||||
|
||||
nativeBinding = requireNative()
|
||||
|
||||
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
|
||||
try {
|
||||
nativeBinding = require('./droplet.wasi.cjs')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
if (!nativeBinding) {
|
||||
try {
|
||||
nativeBinding = require('@drop-oss/droplet-wasm32-wasi')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!nativeBinding) {
|
||||
if (loadError) {
|
||||
throw loadError
|
||||
if (loadErrors.length > 0) {
|
||||
throw new Error(
|
||||
`Cannot find native binding. ` +
|
||||
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
|
||||
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
|
||||
{ cause: loadErrors }
|
||||
)
|
||||
}
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { hasBackendForPath, listFiles, callAltThreadFunc, generateManifest, generateRootCa, generateClientCertificate, verifyClientCertificate, signNonce, verifyNonce } = nativeBinding
|
||||
|
||||
module.exports.hasBackendForPath = hasBackendForPath
|
||||
module.exports.listFiles = listFiles
|
||||
module.exports.callAltThreadFunc = callAltThreadFunc
|
||||
module.exports.generateManifest = generateManifest
|
||||
module.exports.generateRootCa = generateRootCa
|
||||
module.exports.generateClientCertificate = generateClientCertificate
|
||||
module.exports.verifyClientCertificate = verifyClientCertificate
|
||||
module.exports.signNonce = signNonce
|
||||
module.exports.verifyNonce = verifyNonce
|
||||
module.exports = nativeBinding
|
||||
module.exports.DropletHandler = nativeBinding.DropletHandler
|
||||
module.exports.JsDropStreamable = nativeBinding.JsDropStreamable
|
||||
module.exports.Script = nativeBinding.Script
|
||||
module.exports.ScriptEngine = nativeBinding.ScriptEngine
|
||||
module.exports.callAltThreadFunc = nativeBinding.callAltThreadFunc
|
||||
module.exports.generateClientCertificate = nativeBinding.generateClientCertificate
|
||||
module.exports.generateManifest = nativeBinding.generateManifest
|
||||
module.exports.generateRootCa = nativeBinding.generateRootCa
|
||||
module.exports.signNonce = nativeBinding.signNonce
|
||||
module.exports.verifyClientCertificate = nativeBinding.verifyClientCertificate
|
||||
module.exports.verifyNonce = nativeBinding.verifyNonce
|
||||
|
||||
@ -1,3 +0,0 @@
|
||||
# `@drop-oss/droplet-linux-arm-gnueabihf`
|
||||
|
||||
This is the **armv7-unknown-linux-gnueabihf** binary for `@drop-oss/droplet`
|
||||
@ -1,21 +0,0 @@
|
||||
{
|
||||
"name": "@drop-oss/droplet-linux-arm-gnueabihf",
|
||||
"version": "0.0.0",
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"main": "droplet.linux-arm-gnueabihf.node",
|
||||
"files": [
|
||||
"droplet.linux-arm-gnueabihf.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"repository": {
|
||||
"url": "https://github.com/Drop-OSS/droplet"
|
||||
}
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
# `@drop-oss/droplet-linux-arm-musleabihf`
|
||||
|
||||
This is the **armv7-unknown-linux-musleabihf** binary for `@drop-oss/droplet`
|
||||
@ -1,21 +0,0 @@
|
||||
{
|
||||
"name": "@drop-oss/droplet-linux-arm-musleabihf",
|
||||
"version": "0.0.0",
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"main": "droplet.linux-arm-musleabihf.node",
|
||||
"files": [
|
||||
"droplet.linux-arm-musleabihf.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"repository": {
|
||||
"url": "https://github.com/Drop-OSS/droplet"
|
||||
}
|
||||
}
|
||||
30
package.json
30
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@drop-oss/droplet",
|
||||
"version": "1.1.2",
|
||||
"version": "2.3.1",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"napi": {
|
||||
@ -8,25 +8,35 @@
|
||||
"triples": {
|
||||
"additional": [
|
||||
"aarch64-apple-darwin",
|
||||
"x86_64-apple-darwin",
|
||||
"universal-apple-darwin",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"universal-apple-darwin",
|
||||
"riscv64gc-unknown-linux-gnu"
|
||||
"riscv64gc-unknown-linux-gnu",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"x86_64-pc-windows-msvc"
|
||||
]
|
||||
}
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^2.18.4",
|
||||
"@napi-rs/cli": "3.0.0-alpha.91",
|
||||
"@types/node": "^22.13.10",
|
||||
"ava": "^6.2.0"
|
||||
"ava": "^6.2.0",
|
||||
"pretty-bytes": "^7.0.1",
|
||||
"tsimp": "^2.0.12"
|
||||
},
|
||||
"ava": {
|
||||
"timeout": "3m"
|
||||
"timeout": "3m",
|
||||
"extensions": [
|
||||
"cjs",
|
||||
"mjs",
|
||||
"js",
|
||||
"ts",
|
||||
"mts"
|
||||
]
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
@ -37,7 +47,7 @@
|
||||
"build:debug": "napi build --platform",
|
||||
"prepublishOnly": "napi prepublish -t npm",
|
||||
"test": "ava",
|
||||
"universal": "napi universal",
|
||||
"universal": "napi universalize",
|
||||
"version": "napi version"
|
||||
},
|
||||
"packageManager": "yarn@4.7.0",
|
||||
|
||||
@ -1,123 +0,0 @@
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::{
|
||||
fs::{self, metadata, File},
|
||||
io::BufReader,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
const CHUNK_SIZE: usize = 1024 * 1024 * 64;
|
||||
|
||||
|
||||
fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) {
|
||||
if metadata(path).unwrap().is_dir() {
|
||||
let paths = fs::read_dir(path).unwrap();
|
||||
for path_result in paths {
|
||||
let full_path = path_result.unwrap().path();
|
||||
if metadata(&full_path).unwrap().is_dir() {
|
||||
_list_files(vec, &full_path);
|
||||
} else {
|
||||
vec.push(full_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VersionFile {
|
||||
pub relative_filename: String,
|
||||
pub permission: u32,
|
||||
}
|
||||
|
||||
pub trait VersionBackend: 'static {
|
||||
fn list_files(&self, path: &Path) -> Vec<VersionFile>;
|
||||
fn reader(&self, file: &VersionFile) -> BufReader<File>;
|
||||
}
|
||||
|
||||
pub struct PathVersionBackend {
|
||||
pub base_dir: PathBuf,
|
||||
}
|
||||
impl VersionBackend for PathVersionBackend {
|
||||
fn list_files(&self, path: &Path) -> Vec<VersionFile> {
|
||||
let mut vec = Vec::new();
|
||||
_list_files(&mut vec, path);
|
||||
|
||||
let mut results = Vec::new();
|
||||
|
||||
for pathbuf in vec.iter() {
|
||||
let file = File::open(pathbuf.clone()).unwrap();
|
||||
let relative = pathbuf.strip_prefix(path).unwrap();
|
||||
let metadata = file.try_clone().unwrap().metadata().unwrap();
|
||||
let permission_object = metadata.permissions();
|
||||
let permissions = {
|
||||
let perm: u32;
|
||||
#[cfg(target_family = "unix")]
|
||||
{
|
||||
perm = permission_object.mode();
|
||||
}
|
||||
#[cfg(not(target_family = "unix"))]
|
||||
{
|
||||
perm = 0
|
||||
}
|
||||
perm
|
||||
};
|
||||
|
||||
results.push(VersionFile {
|
||||
relative_filename: relative.to_string_lossy().to_string(),
|
||||
permission: permissions,
|
||||
});
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
fn reader(&self, file: &VersionFile) -> BufReader<File> {
|
||||
let file = File::open(self.base_dir.join(file.relative_filename.clone())).unwrap();
|
||||
let reader = BufReader::with_capacity(CHUNK_SIZE, file);
|
||||
return reader;
|
||||
}
|
||||
}
|
||||
|
||||
// Todo implementation for archives
|
||||
// Split into a separate impl for each type of archive
|
||||
pub struct ArchiveVersionBackend {}
|
||||
impl VersionBackend for ArchiveVersionBackend {
|
||||
fn list_files(&self, path: &Path) -> Vec<VersionFile> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn reader(&self, file: &VersionFile) -> BufReader<File> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_backend_for_path(path: &Path) -> Option<Box<(dyn VersionBackend)>> {
|
||||
let is_directory = path.is_dir();
|
||||
if is_directory {
|
||||
return Some(Box::new(PathVersionBackend {
|
||||
base_dir: path.to_path_buf(),
|
||||
}));
|
||||
};
|
||||
|
||||
/*
|
||||
Insert checks for whatever backend you like
|
||||
*/
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn has_backend_for_path(path: String) -> bool {
|
||||
let path = Path::new(&path);
|
||||
|
||||
let has_backend = create_backend_for_path(path).is_some();
|
||||
|
||||
has_backend
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn list_files(path: String) -> Vec<String> {
|
||||
let path = Path::new(&path);
|
||||
let backend = create_backend_for_path(path).unwrap();
|
||||
let files = backend.list_files(path);
|
||||
files.into_iter().map(|e| e.relative_filename).collect()
|
||||
}
|
||||
@ -1,8 +1,10 @@
|
||||
#![deny(clippy::all)]
|
||||
#![feature(trait_alias)]
|
||||
|
||||
pub mod file_utils;
|
||||
pub mod manifest;
|
||||
pub mod ssl;
|
||||
pub mod version;
|
||||
pub mod script;
|
||||
|
||||
#[macro_use]
|
||||
extern crate napi_derive;
|
||||
|
||||
103
src/manifest.rs
103
src/manifest.rs
@ -1,22 +1,15 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::File,
|
||||
io::{BufRead, BufReader},
|
||||
path::Path,
|
||||
thread,
|
||||
};
|
||||
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::{collections::HashMap, sync::Arc, thread};
|
||||
|
||||
use napi::{
|
||||
threadsafe_function::{ErrorStrategy, ThreadsafeFunction, ThreadsafeFunctionCallMode},
|
||||
Error, JsFunction,
|
||||
threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
|
||||
Result,
|
||||
};
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::file_utils::create_backend_for_path;
|
||||
use crate::version::{types::VersionBackend, utils::DropletHandler};
|
||||
|
||||
const CHUNK_SIZE: usize = 1024 * 1024 * 64;
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
struct ChunkData {
|
||||
@ -27,43 +20,36 @@ struct ChunkData {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn call_alt_thread_func(callback: JsFunction) -> Result<(), Error> {
|
||||
let tsfn: ThreadsafeFunction<u32, ErrorStrategy::CalleeHandled> = callback
|
||||
.create_threadsafe_function(0, |ctx| {
|
||||
ctx.env.create_uint32(ctx.value + 1).map(|v| vec![v])
|
||||
})?;
|
||||
let tsfn = tsfn.clone();
|
||||
pub fn call_alt_thread_func(tsfn: Arc<ThreadsafeFunction<()>>) -> Result<(), String> {
|
||||
let tsfn_cloned = tsfn.clone();
|
||||
thread::spawn(move || {
|
||||
tsfn.call(Ok(0), ThreadsafeFunctionCallMode::NonBlocking);
|
||||
tsfn_cloned.call(Ok(()), ThreadsafeFunctionCallMode::Blocking);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn generate_manifest(
|
||||
pub fn generate_manifest<'a>(
|
||||
droplet_handler: &mut DropletHandler,
|
||||
dir: String,
|
||||
progress: JsFunction,
|
||||
log: JsFunction,
|
||||
callback: JsFunction,
|
||||
) -> Result<(), Error> {
|
||||
let progress_sfn: ThreadsafeFunction<i32, ErrorStrategy::CalleeHandled> = progress
|
||||
.create_threadsafe_function(0, |ctx| ctx.env.create_int32(ctx.value).map(|v| vec![v]))
|
||||
.unwrap();
|
||||
let log_sfn: ThreadsafeFunction<String, ErrorStrategy::CalleeHandled> = log
|
||||
.create_threadsafe_function(0, |ctx| {
|
||||
ctx.env.create_string_from_std(ctx.value).map(|v| vec![v])
|
||||
})
|
||||
.unwrap();
|
||||
let callback_sfn: ThreadsafeFunction<String, ErrorStrategy::CalleeHandled> = callback
|
||||
.create_threadsafe_function(0, |ctx| {
|
||||
ctx.env.create_string_from_std(ctx.value).map(|v| vec![v])
|
||||
})
|
||||
.unwrap();
|
||||
progress_sfn: ThreadsafeFunction<i32>,
|
||||
log_sfn: ThreadsafeFunction<String>,
|
||||
callback_sfn: ThreadsafeFunction<String>,
|
||||
) -> Result<()> {
|
||||
let backend: &mut Box<dyn VersionBackend + Send> = droplet_handler
|
||||
.create_backend_for_path(dir)
|
||||
.ok_or(napi::Error::from_reason(
|
||||
"Could not create backend for path.",
|
||||
))?;
|
||||
|
||||
// This is unsafe (obviously)
|
||||
// But it's allg as long the DropletHandler doesn't get
|
||||
// dropped while we're generating the manifest.
|
||||
let backend: &'static mut Box<dyn VersionBackend + Send> =
|
||||
unsafe { std::mem::transmute(backend) };
|
||||
|
||||
thread::spawn(move || {
|
||||
let base_dir = Path::new(&dir);
|
||||
let backend = create_backend_for_path(base_dir).unwrap();
|
||||
let files = backend.list_files(base_dir);
|
||||
let files = backend.list_files();
|
||||
|
||||
// Filepath to chunk data
|
||||
let mut chunks: HashMap<String, ChunkData> = HashMap::new();
|
||||
@ -71,8 +57,10 @@ pub fn generate_manifest(
|
||||
let total: i32 = files.len() as i32;
|
||||
let mut i: i32 = 0;
|
||||
|
||||
let mut buf = [0u8; 1024 * 16];
|
||||
|
||||
for version_file in files {
|
||||
let mut reader = backend.reader(&version_file);
|
||||
let mut reader = backend.reader(&version_file, 0, 0).unwrap();
|
||||
|
||||
let mut chunk_data = ChunkData {
|
||||
permissions: version_file.permission,
|
||||
@ -83,12 +71,26 @@ pub fn generate_manifest(
|
||||
|
||||
let mut chunk_index = 0;
|
||||
loop {
|
||||
let mut length = 0;
|
||||
let mut buffer: Vec<u8> = Vec::new();
|
||||
reader.fill_buf().unwrap().clone_into(&mut buffer);
|
||||
let length = buffer.len();
|
||||
let mut file_empty = false;
|
||||
|
||||
if length == 0 {
|
||||
break;
|
||||
loop {
|
||||
let read = reader.read(&mut buf).unwrap();
|
||||
|
||||
length += read;
|
||||
|
||||
// If we're out of data, add this chunk and then move onto the next file
|
||||
if read == 0 {
|
||||
file_empty = true;
|
||||
break;
|
||||
}
|
||||
|
||||
buffer.extend_from_slice(&buf[0..read]);
|
||||
|
||||
if length >= CHUNK_SIZE {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let chunk_id = Uuid::new_v4();
|
||||
@ -101,13 +103,16 @@ pub fn generate_manifest(
|
||||
|
||||
let log_str = format!(
|
||||
"Processed chunk {} for {}",
|
||||
chunk_index,
|
||||
&version_file.relative_filename
|
||||
chunk_index, &version_file.relative_filename
|
||||
);
|
||||
|
||||
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
|
||||
|
||||
reader.consume(length);
|
||||
chunk_index += 1;
|
||||
|
||||
if file_empty {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
chunks.insert(version_file.relative_filename, chunk_data);
|
||||
|
||||
128
src/script/mod.rs
Normal file
128
src/script/mod.rs
Normal file
@ -0,0 +1,128 @@
|
||||
use boa_engine::{Context, JsValue, Source};
|
||||
use mlua::{FromLuaMulti, Function, Lua};
|
||||
use napi::Result;
|
||||
use rhai::AST;
|
||||
|
||||
pub enum ScriptType {
|
||||
Rhai,
|
||||
Lua,
|
||||
Javascript,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct Script(ScriptInner);
|
||||
|
||||
pub enum ScriptInner {
|
||||
Rhai { script: AST },
|
||||
Lua { script: Function },
|
||||
Javascript { script: boa_engine::Script },
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct ScriptEngine {
|
||||
rhai_engine: rhai::Engine,
|
||||
lua_engine: Lua,
|
||||
js_engine: Context,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl ScriptEngine {
|
||||
#[napi(constructor)]
|
||||
pub fn new() -> Self {
|
||||
ScriptEngine {
|
||||
rhai_engine: rhai::Engine::new(),
|
||||
lua_engine: Lua::new(),
|
||||
js_engine: Context::default(),
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn build_rahi_script(&self, content: String) -> Result<Script> {
|
||||
let script = self
|
||||
.rhai_engine
|
||||
.compile(content.clone())
|
||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||
Ok(Script(ScriptInner::Rhai { script }))
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn build_lua_script(&self, content: String) -> Result<Script> {
|
||||
let func = self
|
||||
.lua_engine
|
||||
.load(content.clone())
|
||||
.into_function()
|
||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||
Ok(Script(ScriptInner::Lua { script: func }))
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn build_js_script(&mut self, content: String) -> Result<Script> {
|
||||
let source = Source::from_bytes(content.as_bytes());
|
||||
let script = boa_engine::Script::parse(source, None, &mut self.js_engine)
|
||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||
|
||||
Ok(Script(ScriptInner::Javascript { script }))
|
||||
}
|
||||
|
||||
fn execute_rhai_script<T>(&self, ast: &AST) -> Result<T>
|
||||
where
|
||||
T: Clone + 'static,
|
||||
{
|
||||
let v = self
|
||||
.rhai_engine
|
||||
.eval_ast::<T>(ast)
|
||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||
|
||||
Ok(v)
|
||||
}
|
||||
|
||||
fn execute_lua_script<T>(&self, function: &Function) -> Result<T>
|
||||
where
|
||||
T: FromLuaMulti,
|
||||
{
|
||||
let v = function
|
||||
.call::<T>(())
|
||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||
Ok(v)
|
||||
}
|
||||
|
||||
fn execute_js_script(&mut self, func: &boa_engine::Script) -> Result<JsValue> {
|
||||
let v = func
|
||||
.evaluate(&mut self.js_engine)
|
||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
|
||||
|
||||
Ok(v)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn execute(&mut self, script: &mut Script) -> Result<()> {
|
||||
match &script.0 {
|
||||
ScriptInner::Rhai { script } => {
|
||||
self.execute_rhai_script::<()>(script)?;
|
||||
}
|
||||
ScriptInner::Lua { script } => {
|
||||
self.execute_lua_script::<()>(script)?;
|
||||
}
|
||||
ScriptInner::Javascript { script } => {
|
||||
self.execute_js_script(script)?;
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn fetch_strings(&mut self, script: &mut Script) -> Result<Vec<String>> {
|
||||
Ok(match &script.0 {
|
||||
ScriptInner::Rhai { script } => self.execute_rhai_script(script)?,
|
||||
ScriptInner::Lua { script } => self.execute_lua_script(script)?,
|
||||
ScriptInner::Javascript { script } => {
|
||||
let v = self.execute_js_script(script)?;
|
||||
|
||||
serde_json::from_value(
|
||||
v.to_json(&mut self.js_engine)
|
||||
.map_err(|e| napi::Error::from_reason(e.to_string()))?,
|
||||
).map_err(|e| napi::Error::from_reason(e.to_string()))?
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
244
src/version/backends.rs
Normal file
244
src/version/backends.rs
Normal file
@ -0,0 +1,244 @@
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::{
|
||||
fs::{self, metadata, File},
|
||||
io::{self, Read, Seek, SeekFrom, Sink},
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use flate2::read::DeflateDecoder;
|
||||
use rawzip::{
|
||||
CompressionMethod, FileReader, ZipArchive, ZipArchiveEntryWayfinder, ZipEntry,
|
||||
ZipVerifier, RECOMMENDED_BUFFER_SIZE,
|
||||
};
|
||||
|
||||
use crate::version::types::{MinimumFileObject, VersionBackend, VersionFile};
|
||||
|
||||
pub fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) {
|
||||
if metadata(path).unwrap().is_dir() {
|
||||
let paths = fs::read_dir(path).unwrap();
|
||||
for path_result in paths {
|
||||
let full_path = path_result.unwrap().path();
|
||||
if metadata(&full_path).unwrap().is_dir() {
|
||||
_list_files(vec, &full_path);
|
||||
} else {
|
||||
vec.push(full_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PathVersionBackend {
|
||||
pub base_dir: PathBuf,
|
||||
}
|
||||
impl VersionBackend for PathVersionBackend {
|
||||
fn list_files(&mut self) -> Vec<VersionFile> {
|
||||
let mut vec = Vec::new();
|
||||
_list_files(&mut vec, &self.base_dir);
|
||||
|
||||
let mut results = Vec::new();
|
||||
|
||||
for pathbuf in vec.iter() {
|
||||
let relative = pathbuf.strip_prefix(self.base_dir.clone()).unwrap();
|
||||
|
||||
results.push(
|
||||
self
|
||||
.peek_file(relative.to_str().unwrap().to_owned())
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
fn reader(
|
||||
&mut self,
|
||||
file: &VersionFile,
|
||||
start: u64,
|
||||
end: u64,
|
||||
) -> Option<Box<dyn MinimumFileObject + 'static>> {
|
||||
let mut file = File::open(self.base_dir.join(file.relative_filename.clone())).ok()?;
|
||||
|
||||
if start != 0 {
|
||||
file.seek(SeekFrom::Start(start)).ok()?;
|
||||
}
|
||||
|
||||
if end != 0 {
|
||||
return Some(Box::new(file.take(end - start)));
|
||||
}
|
||||
|
||||
return Some(Box::new(file));
|
||||
}
|
||||
|
||||
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile> {
|
||||
let pathbuf = self.base_dir.join(sub_path.clone());
|
||||
if !pathbuf.exists() {
|
||||
return None;
|
||||
};
|
||||
|
||||
let file = File::open(pathbuf.clone()).unwrap();
|
||||
let metadata = file.try_clone().unwrap().metadata().unwrap();
|
||||
let permission_object = metadata.permissions();
|
||||
let permissions = {
|
||||
let perm: u32;
|
||||
#[cfg(target_family = "unix")]
|
||||
{
|
||||
perm = permission_object.mode();
|
||||
}
|
||||
#[cfg(not(target_family = "unix"))]
|
||||
{
|
||||
perm = 0
|
||||
}
|
||||
perm
|
||||
};
|
||||
|
||||
Some(VersionFile {
|
||||
relative_filename: sub_path,
|
||||
permission: permissions,
|
||||
size: metadata.len(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ZipVersionBackend {
|
||||
archive: Arc<ZipArchive<FileReader>>,
|
||||
}
|
||||
impl ZipVersionBackend {
|
||||
pub fn new(archive: File) -> Self {
|
||||
let archive = ZipArchive::from_file(archive, &mut [0u8; RECOMMENDED_BUFFER_SIZE]).unwrap();
|
||||
Self {
|
||||
archive: Arc::new(archive),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_entry<'archive>(
|
||||
&self,
|
||||
entry: ZipEntry<'archive, FileReader>,
|
||||
compression_method: CompressionMethod,
|
||||
start: u64,
|
||||
end: u64,
|
||||
) -> ZipFileWrapper<'archive> {
|
||||
let deflater: Box<dyn Read + Send + 'archive> = match compression_method {
|
||||
CompressionMethod::Store => Box::new(entry.reader()),
|
||||
CompressionMethod::Deflate => Box::new(DeflateDecoder::new(entry.reader())),
|
||||
CompressionMethod::Deflate64 => Box::new(DeflateDecoder::new(entry.reader())),
|
||||
_ => panic!(
|
||||
"unsupported decompression algorithm: {:?}",
|
||||
compression_method
|
||||
),
|
||||
};
|
||||
|
||||
let mut verifier = entry.verifying_reader(deflater);
|
||||
if start != 0 {
|
||||
io::copy(&mut (&mut verifier).take(start), &mut Sink::default()).unwrap();
|
||||
}
|
||||
|
||||
ZipFileWrapper {
|
||||
reader: verifier,
|
||||
limit: (end - start) as usize,
|
||||
current: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ZipFileWrapper<'archive> {
|
||||
reader: ZipVerifier<'archive, Box<dyn Read + Send + 'archive>, FileReader>,
|
||||
limit: usize,
|
||||
current: usize,
|
||||
}
|
||||
|
||||
/**
|
||||
* This read implemention is a result of debugging hell
|
||||
* It should probably be replaced with a .take() call.
|
||||
*/
|
||||
impl<'a> Read for ZipFileWrapper<'a> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
let has_limit = self.limit != 0;
|
||||
|
||||
// End this stream if the read is the right size
|
||||
if has_limit {
|
||||
if self.current >= self.limit {
|
||||
return Ok(0);
|
||||
}
|
||||
}
|
||||
|
||||
let read = self.reader.read(buf)?;
|
||||
if self.limit != 0 {
|
||||
self.current += read;
|
||||
if self.current > self.limit {
|
||||
let over = self.current - self.limit;
|
||||
return Ok(read - over);
|
||||
}
|
||||
}
|
||||
return Ok(read);
|
||||
}
|
||||
}
|
||||
//impl<'a> MinimumFileObject for ZipFileWrapper<'a> {}
|
||||
|
||||
impl ZipVersionBackend {
|
||||
fn find_wayfinder(
|
||||
&mut self,
|
||||
filename: &str,
|
||||
) -> Option<(ZipArchiveEntryWayfinder, CompressionMethod)> {
|
||||
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
|
||||
let mut entries = self.archive.entries(read_buffer);
|
||||
let entry = loop {
|
||||
if let Some(v) = entries.next_entry().unwrap() {
|
||||
if v.file_path().try_normalize().unwrap().as_ref() == filename {
|
||||
break Some(v);
|
||||
}
|
||||
} else {
|
||||
break None;
|
||||
}
|
||||
}?;
|
||||
|
||||
let wayfinder = entry.wayfinder();
|
||||
|
||||
Some((wayfinder, entry.compression_method()))
|
||||
}
|
||||
}
|
||||
impl VersionBackend for ZipVersionBackend {
|
||||
fn list_files(&mut self) -> Vec<VersionFile> {
|
||||
let mut results = Vec::new();
|
||||
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
|
||||
let mut budget_iterator = self.archive.entries(read_buffer);
|
||||
while let Some(entry) = budget_iterator.next_entry().unwrap() {
|
||||
if entry.is_dir() {
|
||||
continue;
|
||||
}
|
||||
results.push(VersionFile {
|
||||
relative_filename: String::from(entry.file_path().try_normalize().unwrap()),
|
||||
permission: entry.mode().permissions(),
|
||||
size: entry.uncompressed_size_hint(),
|
||||
});
|
||||
}
|
||||
results
|
||||
}
|
||||
|
||||
fn reader(
|
||||
&mut self,
|
||||
file: &VersionFile,
|
||||
start: u64,
|
||||
end: u64,
|
||||
) -> Option<Box<dyn MinimumFileObject + '_>> {
|
||||
let (wayfinder, compression_method) = self.find_wayfinder(&file.relative_filename)?;
|
||||
let local_entry = self.archive.get_entry(wayfinder).unwrap();
|
||||
|
||||
let wrapper = self.new_entry(local_entry, compression_method, start, end);
|
||||
|
||||
Some(Box::new(wrapper) as Box<dyn MinimumFileObject>)
|
||||
}
|
||||
|
||||
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile> {
|
||||
let (entry, _) = self.find_wayfinder(&sub_path)?;
|
||||
|
||||
Some(VersionFile {
|
||||
relative_filename: sub_path,
|
||||
permission: 0,
|
||||
size: entry.uncompressed_size_hint(),
|
||||
})
|
||||
}
|
||||
}
|
||||
3
src/version/mod.rs
Normal file
3
src/version/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
pub mod utils;
|
||||
pub mod types;
|
||||
pub mod backends;
|
||||
45
src/version/types.rs
Normal file
45
src/version/types.rs
Normal file
@ -0,0 +1,45 @@
|
||||
use std::{
|
||||
fmt::Debug, io::Read
|
||||
};
|
||||
|
||||
use dyn_clone::DynClone;
|
||||
use tokio::io::{self, AsyncRead};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VersionFile {
|
||||
pub relative_filename: String,
|
||||
pub permission: u32,
|
||||
pub size: u64,
|
||||
}
|
||||
|
||||
pub trait MinimumFileObject: Read + Send {}
|
||||
impl<T: Read + Send> MinimumFileObject for T {}
|
||||
|
||||
// Intentionally not a generic, because of types in read_file
|
||||
pub struct ReadToAsyncRead<'a> {
|
||||
pub inner: Box<dyn Read + Send + 'a>,
|
||||
}
|
||||
|
||||
const ASYNC_READ_BUFFER_SIZE: usize = 8128;
|
||||
|
||||
impl<'a> AsyncRead for ReadToAsyncRead<'a> {
|
||||
fn poll_read(
|
||||
mut self: std::pin::Pin<&mut Self>,
|
||||
_cx: &mut std::task::Context<'_>,
|
||||
buf: &mut tokio::io::ReadBuf<'_>,
|
||||
) -> std::task::Poll<io::Result<()>> {
|
||||
let mut read_buf = [0u8; ASYNC_READ_BUFFER_SIZE];
|
||||
let read_size = ASYNC_READ_BUFFER_SIZE.min(buf.remaining());
|
||||
let read = self.inner.read(&mut read_buf[0..read_size]).unwrap();
|
||||
buf.put_slice(&read_buf[0..read]);
|
||||
std::task::Poll::Ready(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait VersionBackend: DynClone {
|
||||
fn list_files(&mut self) -> Vec<VersionFile>;
|
||||
fn peek_file(&mut self, sub_path: String) -> Option<VersionFile>;
|
||||
fn reader(&mut self, file: &VersionFile, start: u64, end: u64) -> Option<Box<dyn MinimumFileObject + '_>>;
|
||||
}
|
||||
|
||||
dyn_clone::clone_trait_object!(VersionBackend);
|
||||
163
src/version/utils.rs
Normal file
163
src/version/utils.rs
Normal file
@ -0,0 +1,163 @@
|
||||
use std::{
|
||||
collections::HashMap, fs::File, path::Path
|
||||
};
|
||||
|
||||
use napi::{bindgen_prelude::*, sys::napi_value__, tokio_stream::StreamExt};
|
||||
use tokio_util::codec::{BytesCodec, FramedRead};
|
||||
|
||||
use crate::version::{
|
||||
backends::{PathVersionBackend, ZipVersionBackend},
|
||||
types::{ReadToAsyncRead, VersionBackend, VersionFile},
|
||||
};
|
||||
|
||||
/**
|
||||
* Append new backends here
|
||||
*/
|
||||
pub fn create_backend_constructor<'a>(
|
||||
path: &Path,
|
||||
) -> Option<Box<dyn FnOnce() -> Box<dyn VersionBackend + Send + 'a>>> {
|
||||
if !path.exists() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let is_directory = path.is_dir();
|
||||
if is_directory {
|
||||
let base_dir = path.to_path_buf();
|
||||
return Some(Box::new(move || Box::new(PathVersionBackend { base_dir })));
|
||||
};
|
||||
|
||||
if path.to_string_lossy().ends_with(".zip") {
|
||||
let f = File::open(path.to_path_buf()).unwrap();
|
||||
return Some(Box::new(|| Box::new(ZipVersionBackend::new(f))));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/**
|
||||
* Persistent object so we can cache things between commands
|
||||
*/
|
||||
#[napi(js_name = "DropletHandler")]
|
||||
pub struct DropletHandler<'a> {
|
||||
backend_cache: HashMap<String, Box<dyn VersionBackend + Send + 'a>>,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl<'a> DropletHandler<'a> {
|
||||
#[napi(constructor)]
|
||||
pub fn new() -> Self {
|
||||
DropletHandler {
|
||||
backend_cache: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_backend_for_path(
|
||||
&mut self,
|
||||
path: String,
|
||||
) -> Option<&mut Box<dyn VersionBackend + Send + 'a>> {
|
||||
let fs_path = Path::new(&path);
|
||||
let constructor = create_backend_constructor(fs_path)?;
|
||||
|
||||
let existing_backend = self.backend_cache.entry(path).or_insert_with(|| {
|
||||
let backend = constructor();
|
||||
backend
|
||||
});
|
||||
|
||||
Some(existing_backend)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn has_backend_for_path(&self, path: String) -> bool {
|
||||
let path = Path::new(&path);
|
||||
|
||||
let has_backend = create_backend_constructor(path).is_some();
|
||||
|
||||
has_backend
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn list_files(&mut self, path: String) -> Result<Vec<String>> {
|
||||
let backend = self
|
||||
.create_backend_for_path(path)
|
||||
.ok_or(napi::Error::from_reason("No backend for path"))?;
|
||||
let files = backend.list_files();
|
||||
Ok(files.into_iter().map(|e| e.relative_filename).collect())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn peek_file(&mut self, path: String, sub_path: String) -> Result<u64> {
|
||||
let backend = self
|
||||
.create_backend_for_path(path)
|
||||
.ok_or(napi::Error::from_reason("No backend for path"))?;
|
||||
|
||||
let file = backend
|
||||
.peek_file(sub_path)
|
||||
.ok_or(napi::Error::from_reason("Can't find file to peek"))?;
|
||||
|
||||
return Ok(file.size.try_into().unwrap());
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn read_file(
|
||||
&mut self,
|
||||
reference: Reference<DropletHandler<'static>>,
|
||||
path: String,
|
||||
sub_path: String,
|
||||
env: Env,
|
||||
start: Option<BigInt>,
|
||||
end: Option<BigInt>,
|
||||
) -> Result<JsDropStreamable> {
|
||||
let stream = reference.share_with(env, |handler| {
|
||||
let backend = handler
|
||||
.create_backend_for_path(path)
|
||||
.ok_or(napi::Error::from_reason("Failed to create backend."))?;
|
||||
let version_file = VersionFile {
|
||||
relative_filename: sub_path,
|
||||
permission: 0, // Shouldn't matter
|
||||
size: 0, // Shouldn't matter
|
||||
};
|
||||
// Use `?` operator for cleaner error propagation from `Option`
|
||||
let reader = backend
|
||||
.reader(
|
||||
&version_file,
|
||||
start.map(|e| e.get_u64().1).unwrap_or(0),
|
||||
end.map(|e| e.get_u64().1).unwrap_or(0),
|
||||
)
|
||||
.ok_or(napi::Error::from_reason("Failed to create reader."))?;
|
||||
|
||||
let async_reader = ReadToAsyncRead {
|
||||
inner: reader,
|
||||
};
|
||||
|
||||
// Create a FramedRead stream with BytesCodec for chunking
|
||||
let stream = FramedRead::new(async_reader, BytesCodec::new())
|
||||
// Use StreamExt::map to transform each Result item
|
||||
.map(|result_item| {
|
||||
result_item
|
||||
// Apply Result::map to transform Ok(BytesMut) to Ok(Vec<u8>)
|
||||
.map(|bytes| bytes.to_vec())
|
||||
// Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error)
|
||||
.map_err(|e| napi::Error::from(e)) // napi::Error implements From<tokio::io::Error>
|
||||
});
|
||||
// Create the napi-rs ReadableStream from the tokio_stream::Stream
|
||||
// The unwrap() here means if stream creation fails, it will panic.
|
||||
// For a production system, consider returning Result<Option<...>> and handling this.
|
||||
Ok(ReadableStream::create_with_stream_bytes(&env, stream).unwrap())
|
||||
})?;
|
||||
|
||||
Ok(JsDropStreamable { inner: stream })
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct JsDropStreamable {
|
||||
inner: SharedReference<DropletHandler<'static>, ReadableStream<'static, BufferSlice<'static>>>,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl JsDropStreamable {
|
||||
#[napi]
|
||||
pub fn get_stream(&self) -> *mut napi_value__ {
|
||||
self.inner.raw()
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user