56 Commits

Author SHA1 Message Date
416cada9f4 fix: unix permissions properly fixed with 7z 2025-10-28 19:31:59 +11:00
97312585db fix: fix to unix permissions with 7z 2025-10-28 19:29:25 +11:00
538aa3bb57 fix: update license 2025-10-14 12:11:24 +11:00
7ec09bee1e feat: fix 7zip integration 2025-10-13 11:29:30 +11:00
96c1b15de7 remove unneeded deps 2025-10-02 17:14:26 +10:00
bd6d7060fd feat: the 7z update 2025-10-02 17:06:58 +10:00
0431eebaa7 fix: remove lua tests 2025-08-25 13:02:00 +10:00
e66a6581cb fix: temporary remove luajit for compliation reasons 2025-08-25 12:43:23 +10:00
817c3cf503 feat: script backend, fixes 2025-08-25 12:35:12 +10:00
0d01809fd0 feat: no panik 2025-08-25 12:20:51 +10:00
ba35ca9a14 feat: start of scripting engine 2025-08-24 13:50:44 +10:00
ae4648845e feat: add support for partially deflated zips 2025-08-17 11:21:09 +10:00
bd30464a08 fix: manifest generation with multiple chunks 2025-08-15 21:56:33 +10:00
c67cca4ee0 fix: remove debug println 2025-08-15 21:41:48 +10:00
cae208a3e0 fix: zip read sizing 2025-08-15 21:30:25 +10:00
4276b9d668 fix: skip zip test 2025-08-15 19:47:50 +10:00
4fb9bb7563 fix: manifest sizing for slow backends 2025-08-15 16:49:18 +10:00
913dc2f58d feat: add zip speed test 2025-08-15 12:17:10 +10:00
7ec5e9f215 fix: zip file reader offset 2025-08-13 16:22:48 +10:00
b67a67d809 fix: bump version 2025-08-13 11:38:09 +10:00
87b19a5c8c fix: test 2025-08-13 11:37:41 +10:00
dc3a420986 feat: performance improvements, fix zip 2025-08-13 11:35:50 +10:00
1665033fd9 test: add subdir tests 2025-07-18 22:46:42 +10:00
2969d64c45 feat: move to bigints for larger file sizes 2025-07-14 15:17:38 +10:00
e525ff44bb Merge pull request #3 from nickbabcock/rawzip-0.3
Bump rawzip to 0.3
2025-07-13 23:08:10 +10:00
52a685391a Bump rawzip to 0.3
No need for any patches ;)
2025-07-13 07:46:36 -05:00
535d5a4062 i give up, bump all versions 2025-07-02 20:54:06 +10:00
450734f5c9 bump version 2025-07-02 20:45:58 +10:00
20e2eda381 fix: regenerate lockfile 2025-07-02 20:45:02 +10:00
04d3f2dd8c fix: revert napi update 2025-07-02 20:33:53 +10:00
59ca57ee1b fix: bump napi version and commit lockfile 2025-07-02 20:20:19 +10:00
8f4b2a6c6d feat: add file peaking, 1.5.0 2025-07-02 18:03:35 +10:00
7c3e6fe63c fix: add target setup 2025-07-02 13:47:08 +10:00
204902951e fix: nightly toolchain for docker builds 2025-07-02 13:39:17 +10:00
b3011c517d fix: skip tests, move to nightly 2025-07-02 12:29:58 +10:00
74a54eb9ac fix: bump @napi-rs/cli to alpha version 2025-07-02 12:22:31 +10:00
89e94e3afd fix: install patch crate before patching 2025-07-02 12:07:00 +10:00
169d471bb7 fix: patch crate on build 2025-07-02 12:05:57 +10:00
076dc60155 version bump to 1.4.0 2025-07-02 11:56:22 +10:00
48e5b97a4e feat: zip file reading 2025-07-02 11:55:04 +10:00
c1aaf8adcd feat: work on version backend system 2025-07-01 22:51:22 +10:00
fe43f79062 chore: bump version and add test 2025-05-30 20:55:53 +10:00
30b9c4a1cc bump version 2025-05-29 09:30:10 +10:00
42f770aed9 feat: Add file start and end to read_file function
Signed-off-by: quexeky <git@quexeky.dev>
2025-05-28 22:32:37 +10:00
4670df4127 fix: add windows target 2025-05-28 21:01:16 +10:00
e33eaebe1a fix: refix macos universalisation 2025-05-28 20:52:30 +10:00
f954f23410 fix: longshot fix: add x86_64-unknown-linux-gnu to the list of ABI targets 2025-05-28 20:47:59 +10:00
3632687001 fix: again, macos universalisation 2025-05-28 20:33:56 +10:00
90817487ed fix: universalisation for macos 2025-05-28 20:27:00 +10:00
98b84c64d4 fix: remove problematic builds 2025-05-28 19:58:26 +10:00
d3186cdd5f fix: types 2025-05-28 17:07:12 +10:00
bb678b4b3a fix: tests 2025-05-28 16:48:07 +10:00
cc94798962 feat: add file reader 2025-05-28 15:03:45 +10:00
7811818a72 Merge branch 'borked-reader' 2025-05-28 14:55:05 +10:00
b6910e717b fix: Changed FramedRead to work with ReadableStream
Signed-off-by: quexeky <git@quexeky.dev>
2025-05-28 14:52:42 +10:00
45a26c7156 inprogress: handoff to quexeky 2025-05-28 13:53:28 +10:00
26 changed files with 5054 additions and 646 deletions

View File

@ -12,12 +12,9 @@ permissions:
- main
tags-ignore:
- "**"
paths-ignore:
- "**/*.md"
- LICENSE
- "**/*.gitignore"
- .editorconfig
- docs/**
paths:
- package.json
- .github/workflows/*
pull_request: null
jobs:
build:
@ -46,15 +43,6 @@ jobs:
target: aarch64-unknown-linux-gnu
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
build: yarn build --target aarch64-unknown-linux-gnu
- host: ubuntu-latest
target: armv7-unknown-linux-gnueabihf
setup: |
sudo apt-get update
sudo apt-get install gcc-arm-linux-gnueabihf -y
build: yarn build --target armv7-unknown-linux-gnueabihf
- host: ubuntu-latest
target: armv7-unknown-linux-musleabihf
build: yarn build --target armv7-unknown-linux-musleabihf
- host: ubuntu-latest
target: aarch64-unknown-linux-musl
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine
@ -71,7 +59,7 @@ jobs:
sudo apt-get update
sudo apt-get install gcc-riscv64-linux-gnu -y
build: yarn build --target riscv64gc-unknown-linux-gnu
name: stable - ${{ matrix.settings.target }} - node@20
name: nightly - ${{ matrix.settings.target }} - node@20
runs-on: ${{ matrix.settings.host }}
steps:
- uses: actions/checkout@v4
@ -82,7 +70,7 @@ jobs:
node-version: 20
cache: yarn
- name: Install
uses: dtolnay/rust-toolchain@stable
uses: dtolnay/rust-toolchain@nightly
if: ${{ !matrix.settings.docker }}
with:
toolchain: nightly
@ -106,14 +94,20 @@ jobs:
if: ${{ matrix.settings.setup }}
shell: bash
- name: Install dependencies
run: yarn install
run: |-
cargo install patch-crate &&
cargo patch-crate &&
yarn install
- name: Build in docker
uses: addnab/docker-run-action@v3
if: ${{ matrix.settings.docker }}
with:
image: ${{ matrix.settings.docker }}
options: "--user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build"
run: ${{ matrix.settings.build }}
run: |-
rustup default nightly &&
rustup target add ${{ matrix.settings.target }} &&
${{ matrix.settings.build }}
- name: Build
run: ${{ matrix.settings.build }}
if: ${{ !matrix.settings.docker }}
@ -358,6 +352,8 @@ jobs:
with:
name: bindings-aarch64-apple-darwin
path: artifacts
- name: Move artifacts
run: mv artifacts/* .
- name: Combine binaries
run: yarn universal
- name: Upload artifact

8
.gitignore vendored
View File

@ -9,7 +9,7 @@ npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.test
.test*
.tsimp
# Diagnostic reports (https://nodejs.org/api/report.html)
@ -186,7 +186,6 @@ $RECYCLE.BIN/
#Added by cargo
/target
Cargo.lock
.pnp.*
.yarn/*
@ -201,4 +200,7 @@ test.mjs
manifest.json
# JetBrains
.idea
.idea
assets/*
!assets/generate.sh

2103
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -9,18 +9,22 @@ crate-type = ["cdylib"]
[dependencies]
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
napi = { version = "2.12.2", default-features = false, features = [
"napi4",
"async",
] }
napi-derive = "2.12.2"
napi = { version = "3.0.0-beta.11", default-features = false, features = ["napi6", "async", "web_stream", "error_anyhow"] }
napi-derive = "3.0.0-beta.11"
hex = "0.4.3"
serde_json = "1.0.128"
md5 = "0.7.0"
time-macros = "0.2.22"
time = "0.3.41"
webpki = "0.22.4"
ring = "0.17.14"
tokio = { version = "1.45.1", features = ["fs", "io-util"] }
tokio-util = { version = "0.7.15", features = ["codec"] }
dyn-clone = "1.0.20"
rhai = "1.22.2"
# mlua = { version = "0.11.2", features = ["luajit"] }
boa_engine = "0.20.0"
serde_json = "1.0.143"
anyhow = "1.0.99"
[dependencies.x509-parser]
version = "0.17.0"

22
__test__/debug.spec.mjs Normal file
View File

@ -0,0 +1,22 @@
import test from "ava";
import { DropletHandler, generateManifest } from "../index.js";
test.skip("debug", async (t) => {
const handler = new DropletHandler();
console.log("created handler");
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
handler,
"./assets/TheGame.zip",
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
)
);
return t.pass();
});

View File

@ -2,7 +2,7 @@ import test from "ava";
import fs from "node:fs";
import path from "path";
import { generateManifest, listFiles } from "../index.js";
import { DropletHandler, generateManifest } from "../index.js";
test("numerous small file", async (t) => {
// Setup test dir
@ -18,9 +18,12 @@ test("numerous small file", async (t) => {
fs.writeFileSync(fileName, i.toString());
}
const dropletHandler = new DropletHandler();
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
dropletHandler,
dirName,
(_, __) => {},
(_, __) => {},
@ -51,5 +54,86 @@ test("numerous small file", async (t) => {
t.is(entry.lengths[0], i.toString().length);
}
fs.rmSync(dirName, { recursive: true });
});
test.skip("performance test", async (t) => {
t.timeout(5 * 60 * 1000);
const dirName = "./.test/pt";
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
fs.mkdirSync(dirName, { recursive: true });
const fileSize = 1 * 1000 * 1000 * 1000; // 1GB
const randomStream = fs.createReadStream("/dev/random", {
start: 0,
end: fileSize,
});
const outputStream = fs.createWriteStream(path.join(dirName, "file.bin"));
await new Promise((r) => {
randomStream.pipe(outputStream);
randomStream.on("end", r);
});
const dropletHandler = new DropletHandler();
const start = Date.now();
await new Promise((r, e) =>
generateManifest(
dropletHandler,
dirName,
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
);
const end = Date.now();
t.pass(`Took ${end - start}ms to process ${fileSize / (1000 * 1000)}MB`);
fs.rmSync(dirName, { recursive: true });
});
test("special characters", async (t) => {
// Setup test dir
const dirName = "./.test/sc";
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
fs.mkdirSync(dirName, { recursive: true });
// Config
const fileNames = ["Technická podpora.rtf", "Servicio técnico.rtf"];
for (let i = 0; i < fileNames.length; i++) {
const fileName = path.join(dirName, fileNames[i]);
fs.writeFileSync(fileName, i.toString());
}
const dropletHandler = new DropletHandler();
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
dropletHandler,
dirName,
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
)
);
// Check the first few checksums
const checksums = [
"cfcd208495d565ef66e7dff9f98764da",
"c4ca4238a0b923820dcc509a6f75849b",
];
for (let index in checksums) {
const entry = manifest[fileNames[index]];
if (!entry) return t.fail(`manifest missing file ${index}`);
const checksum = entry.checksums[0];
t.is(checksum, checksums[index], `checksums do not match for ${index}`);
}
fs.rmSync(dirName, { recursive: true });
});

62
__test__/script.spec.mjs Normal file
View File

@ -0,0 +1,62 @@
import test from "ava";
import { ScriptEngine } from "../index.js";
test.skip("lua syntax fail", (t) => {
const scriptEngine = new ScriptEngine();
const luaIshCode = `
print("hello world);
`;
try {
const script = scriptEngine.buildLuaScript(luaIshCode);
} catch {
return t.pass();
}
t.fail();
});
test("js syntax fail", (t) => {
const scriptEngine = new ScriptEngine();
const jsIshCode = `
const v = "hello world;
`;
try {
const script = scriptEngine.buildJsScript(jsIshCode);
} catch {
return t.pass();
}
t.fail();
});
test("js", (t) => {
const scriptEngine = new ScriptEngine();
const jsModule = `
const v = "1" + "2";
["1", "2", "3", v]
`;
const script = scriptEngine.buildJsScript(jsModule);
scriptEngine.fetchStrings(script);
t.pass();
});
test.skip("lua", (t) => {
const scriptEngine = new ScriptEngine();
const luaModule = `
local arr = {"1", "2"};
return arr;
`;
const script = scriptEngine.buildLuaScript(luaModule);
scriptEngine.fetchStrings(script);
t.pass();
});

220
__test__/utils.spec.mjs Normal file
View File

@ -0,0 +1,220 @@
import test from "ava";
import fs from "node:fs";
import path from "path";
import { createHash } from "node:crypto";
import prettyBytes from "pretty-bytes";
import droplet, { DropletHandler, generateManifest } from "../index.js";
test("check alt thread util", async (t) => {
let endtime1, endtime2;
droplet.callAltThreadFunc(async () => {
await new Promise((r) => setTimeout(r, 100));
endtime1 = Date.now();
});
await new Promise((r) => setTimeout(r, 500));
endtime2 = Date.now();
const difference = endtime2 - endtime1;
if (difference >= 600) {
t.fail("likely isn't multithreaded, difference: " + difference);
}
t.pass();
});
test("list files", async (t) => {
const dirName = "./.listfiles";
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
fs.mkdirSync(dirName, { recursive: true });
fs.mkdirSync(dirName + "/subdir", { recursive: true });
fs.mkdirSync(dirName + "/subddir", { recursive: true });
fs.writeFileSync(dirName + "/root.txt", "root");
fs.writeFileSync(dirName + "/subdir/one.txt", "the first subdir");
fs.writeFileSync(dirName + "/subddir/two.txt", "the second");
const dropletHandler = new DropletHandler();
const files = dropletHandler.listFiles(dirName);
t.assert(
files.sort().join("\n"),
["root.txt", "subddir/two.txt", "subdir/one.txt"].join("\n")
);
fs.rmSync(dirName, { recursive: true });
});
test("read file", async (t) => {
const dirName = "./.test2";
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
fs.mkdirSync(dirName, { recursive: true });
const testString = "g'day what's up my koala bros\n".repeat(1000);
fs.writeFileSync(dirName + "/TESTFILE", testString);
const dropletHandler = new DropletHandler();
const stream = dropletHandler.readFile(
dirName,
"TESTFILE",
BigInt(0),
BigInt(testString.length)
);
let finalString = "";
for await (const chunk of stream.getStream()) {
// Do something with each 'chunk'
finalString += String.fromCharCode.apply(null, chunk);
}
t.assert(finalString == testString, "file strings don't match");
fs.rmSync(dirName, { recursive: true });
});
test("read file offset", async (t) => {
const dirName = "./.test3";
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
fs.mkdirSync(dirName, { recursive: true });
const testString = "0123456789";
fs.writeFileSync(dirName + "/TESTFILE", testString);
const dropletHandler = new DropletHandler();
const stream = dropletHandler.readFile(
dirName,
"TESTFILE",
BigInt(1),
BigInt(4)
);
let finalString = "";
for await (const chunk of stream.getStream()) {
// Do something with each 'chunk'
finalString += String.fromCharCode.apply(null, chunk);
}
const expectedString = testString.slice(1, 4);
t.assert(
finalString == expectedString,
`file strings don't match: ${finalString} vs ${expectedString}`
);
fs.rmSync(dirName, { recursive: true });
});
test.skip("zip speed test", async (t) => {
t.timeout(100_000_000);
const dropletHandler = new DropletHandler();
const stream = dropletHandler.readFile("./assets/TheGame.zip", "setup.exe");
let totalRead = 0;
let totalSeconds = 0;
let lastTime = process.hrtime.bigint();
const timeThreshold = BigInt(1_000_000_000);
let runningTotal = 0;
let runningTime = BigInt(0);
for await (const chunk of stream.getStream()) {
// Do something with each 'chunk'
const currentTime = process.hrtime.bigint();
const timeDiff = currentTime - lastTime;
lastTime = currentTime;
runningTime += timeDiff;
runningTotal += chunk.length;
if (runningTime >= timeThreshold) {
console.log(`${prettyBytes(runningTotal)}/s`);
totalRead += runningTotal;
totalSeconds += 1;
runningTime = BigInt(0);
runningTotal = 0;
}
}
const roughAverage = totalRead / totalSeconds;
console.log(`total rough average: ${prettyBytes(roughAverage)}/s`);
t.pass();
});
test.skip("zip manifest test", async (t) => {
const dropletHandler = new DropletHandler();
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
dropletHandler,
"./assets/TheGame.zip",
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
)
);
for (const [filename, data] of Object.entries(manifest)) {
let start = 0;
for (const [chunkIndex, length] of data.lengths.entries()) {
const hash = createHash("md5");
const stream = (
await dropletHandler.readFile(
"./assets/TheGame.zip",
filename,
BigInt(start),
BigInt(start + length)
)
).getStream();
let streamLength = 0;
await stream.pipeTo(
new WritableStream({
write(chunk) {
streamLength += chunk.length;
hash.update(chunk);
},
})
);
if (streamLength != length)
return t.fail(
`stream length for chunk index ${chunkIndex} was not expected: real: ${streamLength} vs expected: ${length}`
);
const digest = hash.digest("hex");
if (data.checksums[chunkIndex] != digest)
return t.fail(
`checksums did not match for chunk index ${chunkIndex}: real: ${digest} vs expected: ${data.checksums[chunkIndex]}`
);
start += length;
}
}
t.pass();
});
test.skip("partially compress zip test", async (t) => {
const dropletHandler = new DropletHandler();
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
dropletHandler,
"./assets/my horror game.zip",
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
)
);
return t.pass();
});

4
assets/generate.sh Executable file
View File

@ -0,0 +1,4 @@
# yes "droplet is awesome" | dd of=./setup.exe bs=1024 count=1000000
dd if=/dev/random of=./setup.exe bs=1024 count=1000000
zip TheGame.zip setup.exe
rm setup.exe

46
index.d.ts vendored
View File

@ -1,14 +1,42 @@
/* tslint:disable */
/* eslint-disable */
/* auto-generated by NAPI-RS */
/* eslint-disable */
/**
* Persistent object so we can cache things between commands
*/
export declare class DropletHandler {
constructor()
hasBackendForPath(path: string): boolean
listFiles(path: string): Array<string>
peekFile(path: string, subPath: string): bigint
readFile(path: string, subPath: string, start?: bigint | undefined | null, end?: bigint | undefined | null): JsDropStreamable
}
export declare class JsDropStreamable {
getStream(): any
}
export declare class Script {
}
export declare class ScriptEngine {
constructor()
buildRhaiScript(content: string): Script
buildJsScript(content: string): Script
execute(script: Script): void
fetchStrings(script: Script): Array<string>
}
export declare function callAltThreadFunc(tsfn: ((err: Error | null, ) => any)): void
export declare function hasBackendForPath(path: string): boolean
export declare function listFiles(path: string): Array<string>
export declare function callAltThreadFunc(callback: (...args: any[]) => any): void
export declare function generateManifest(dir: string, progress: (...args: any[]) => any, log: (...args: any[]) => any, callback: (...args: any[]) => any): void
export declare function generateRootCa(): Array<string>
export declare function generateClientCertificate(clientId: string, clientName: string, rootCa: string, rootCaPrivate: string): Array<string>
export declare function verifyClientCertificate(clientCert: string, rootCa: string): boolean
export declare function generateManifest(dropletHandler: DropletHandler, dir: string, progressSfn: ((err: Error | null, arg: number) => any), logSfn: ((err: Error | null, arg: string) => any), callbackSfn: ((err: Error | null, arg: string) => any)): void
export declare function generateRootCa(): Array<string>
export declare function signNonce(privateKey: string, nonce: string): string
export declare function verifyClientCertificate(clientCert: string, rootCa: string): boolean
export declare function verifyNonce(publicCert: string, nonce: string, signature: string): boolean

658
index.js
View File

@ -1,323 +1,389 @@
/* tslint:disable */
// prettier-ignore
/* eslint-disable */
/* prettier-ignore */
// @ts-nocheck
/* auto-generated by NAPI-RS */
const { existsSync, readFileSync } = require('fs')
const { join } = require('path')
const { platform, arch } = process
const { createRequire } = require('node:module')
require = createRequire(__filename)
const { readFileSync } = require('node:fs')
let nativeBinding = null
let localFileExisted = false
let loadError = null
const loadErrors = []
function isMusl() {
// For Node 10
if (!process.report || typeof process.report.getReport !== 'function') {
try {
const lddPath = require('child_process').execSync('which ldd').toString().trim()
return readFileSync(lddPath, 'utf8').includes('musl')
} catch (e) {
return true
const isMusl = () => {
let musl = false
if (process.platform === 'linux') {
musl = isMuslFromFilesystem()
if (musl === null) {
musl = isMuslFromReport()
}
} else {
const { glibcVersionRuntime } = process.report.getReport().header
return !glibcVersionRuntime
if (musl === null) {
musl = isMuslFromChildProcess()
}
}
return musl
}
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
const isMuslFromFilesystem = () => {
try {
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
} catch {
return null
}
}
switch (platform) {
case 'android':
switch (arch) {
case 'arm64':
localFileExisted = existsSync(join(__dirname, 'droplet.android-arm64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./droplet.android-arm64.node')
} else {
nativeBinding = require('@drop-oss/droplet-android-arm64')
}
} catch (e) {
loadError = e
}
break
case 'arm':
localFileExisted = existsSync(join(__dirname, 'droplet.android-arm-eabi.node'))
try {
if (localFileExisted) {
nativeBinding = require('./droplet.android-arm-eabi.node')
} else {
nativeBinding = require('@drop-oss/droplet-android-arm-eabi')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Android ${arch}`)
const isMuslFromReport = () => {
let report = null
if (typeof process.report?.getReport === 'function') {
process.report.excludeNetwork = true
report = process.report.getReport()
}
if (!report) {
return null
}
if (report.header && report.header.glibcVersionRuntime) {
return false
}
if (Array.isArray(report.sharedObjects)) {
if (report.sharedObjects.some(isFileMusl)) {
return true
}
break
case 'win32':
switch (arch) {
case 'x64':
localFileExisted = existsSync(
join(__dirname, 'droplet.win32-x64-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./droplet.win32-x64-msvc.node')
} else {
nativeBinding = require('@drop-oss/droplet-win32-x64-msvc')
}
} catch (e) {
loadError = e
}
break
case 'ia32':
localFileExisted = existsSync(
join(__dirname, 'droplet.win32-ia32-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./droplet.win32-ia32-msvc.node')
} else {
nativeBinding = require('@drop-oss/droplet-win32-ia32-msvc')
}
} catch (e) {
loadError = e
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'droplet.win32-arm64-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./droplet.win32-arm64-msvc.node')
} else {
nativeBinding = require('@drop-oss/droplet-win32-arm64-msvc')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Windows: ${arch}`)
}
break
case 'darwin':
localFileExisted = existsSync(join(__dirname, 'droplet.darwin-universal.node'))
}
return false
}
const isMuslFromChildProcess = () => {
try {
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
} catch (e) {
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
return false
}
}
function requireNative() {
if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
try {
if (localFileExisted) {
nativeBinding = require('./droplet.darwin-universal.node')
} else {
nativeBinding = require('@drop-oss/droplet-darwin-universal')
nativeBinding = require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
} catch (err) {
loadErrors.push(err)
}
} else if (process.platform === 'android') {
if (process.arch === 'arm64') {
try {
return require('./droplet.android-arm64.node')
} catch (e) {
loadErrors.push(e)
}
break
} catch {}
switch (arch) {
case 'x64':
localFileExisted = existsSync(join(__dirname, 'droplet.darwin-x64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./droplet.darwin-x64.node')
} else {
nativeBinding = require('@drop-oss/droplet-darwin-x64')
}
} catch (e) {
loadError = e
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'droplet.darwin-arm64.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./droplet.darwin-arm64.node')
} else {
nativeBinding = require('@drop-oss/droplet-darwin-arm64')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on macOS: ${arch}`)
try {
return require('@drop-oss/droplet-android-arm64')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm') {
try {
return require('./droplet.android-arm-eabi.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-android-arm-eabi')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
}
break
case 'freebsd':
if (arch !== 'x64') {
throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
} else if (process.platform === 'win32') {
if (process.arch === 'x64') {
try {
return require('./droplet.win32-x64-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-win32-x64-msvc')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'ia32') {
try {
return require('./droplet.win32-ia32-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-win32-ia32-msvc')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./droplet.win32-arm64-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-win32-arm64-msvc')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
}
localFileExisted = existsSync(join(__dirname, 'droplet.freebsd-x64.node'))
} else if (process.platform === 'darwin') {
try {
if (localFileExisted) {
nativeBinding = require('./droplet.freebsd-x64.node')
} else {
nativeBinding = require('@drop-oss/droplet-freebsd-x64')
return require('./droplet.darwin-universal.node')
} catch (e) {
loadErrors.push(e)
}
} catch (e) {
loadError = e
try {
return require('@drop-oss/droplet-darwin-universal')
} catch (e) {
loadErrors.push(e)
}
if (process.arch === 'x64') {
try {
return require('./droplet.darwin-x64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-darwin-x64')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./droplet.darwin-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-darwin-arm64')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
}
break
case 'linux':
switch (arch) {
case 'x64':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-x64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./droplet.linux-x64-musl.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-x64-musl')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-x64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./droplet.linux-x64-gnu.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-x64-gnu')
}
} catch (e) {
loadError = e
}
}
break
case 'arm64':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-arm64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./droplet.linux-arm64-musl.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-arm64-musl')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-arm64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./droplet.linux-arm64-gnu.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-arm64-gnu')
}
} catch (e) {
loadError = e
}
}
break
case 'arm':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-arm-musleabihf.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./droplet.linux-arm-musleabihf.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-arm-musleabihf')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-arm-gnueabihf.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./droplet.linux-arm-gnueabihf.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-arm-gnueabihf')
}
} catch (e) {
loadError = e
}
}
break
case 'riscv64':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-riscv64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./droplet.linux-riscv64-musl.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-riscv64-musl')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-riscv64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./droplet.linux-riscv64-gnu.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-riscv64-gnu')
}
} catch (e) {
loadError = e
}
}
break
case 's390x':
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-s390x-gnu.node')
)
} else if (process.platform === 'freebsd') {
if (process.arch === 'x64') {
try {
return require('./droplet.freebsd-x64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-freebsd-x64')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./droplet.freebsd-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-freebsd-arm64')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
}
} else if (process.platform === 'linux') {
if (process.arch === 'x64') {
if (isMusl()) {
try {
if (localFileExisted) {
nativeBinding = require('./droplet.linux-s390x-gnu.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-s390x-gnu')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Linux: ${arch}`)
return require('./droplet.linux-x64-musl.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-linux-x64-musl')
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./droplet.linux-x64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-linux-x64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'arm64') {
if (isMusl()) {
try {
return require('./droplet.linux-arm64-musl.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-linux-arm64-musl')
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./droplet.linux-arm64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-linux-arm64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'arm') {
if (isMusl()) {
try {
return require('./droplet.linux-arm-musleabihf.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-linux-arm-musleabihf')
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./droplet.linux-arm-gnueabihf.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-linux-arm-gnueabihf')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'riscv64') {
if (isMusl()) {
try {
return require('./droplet.linux-riscv64-musl.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-linux-riscv64-musl')
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./droplet.linux-riscv64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-linux-riscv64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'ppc64') {
try {
return require('./droplet.linux-ppc64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-linux-ppc64-gnu')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 's390x') {
try {
return require('./droplet.linux-s390x-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-linux-s390x-gnu')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
}
break
default:
throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
} else {
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
}
}
nativeBinding = requireNative()
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
try {
nativeBinding = require('./droplet.wasi.cjs')
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
loadErrors.push(err)
}
}
if (!nativeBinding) {
try {
nativeBinding = require('@drop-oss/droplet-wasm32-wasi')
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
loadErrors.push(err)
}
}
}
}
if (!nativeBinding) {
if (loadError) {
throw loadError
if (loadErrors.length > 0) {
throw new Error(
`Cannot find native binding. ` +
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
{ cause: loadErrors }
)
}
throw new Error(`Failed to load native binding`)
}
const { hasBackendForPath, listFiles, callAltThreadFunc, generateManifest, generateRootCa, generateClientCertificate, verifyClientCertificate, signNonce, verifyNonce } = nativeBinding
module.exports.hasBackendForPath = hasBackendForPath
module.exports.listFiles = listFiles
module.exports.callAltThreadFunc = callAltThreadFunc
module.exports.generateManifest = generateManifest
module.exports.generateRootCa = generateRootCa
module.exports.generateClientCertificate = generateClientCertificate
module.exports.verifyClientCertificate = verifyClientCertificate
module.exports.signNonce = signNonce
module.exports.verifyNonce = verifyNonce
module.exports = nativeBinding
module.exports.DropletHandler = nativeBinding.DropletHandler
module.exports.JsDropStreamable = nativeBinding.JsDropStreamable
module.exports.Script = nativeBinding.Script
module.exports.ScriptEngine = nativeBinding.ScriptEngine
module.exports.callAltThreadFunc = nativeBinding.callAltThreadFunc
module.exports.generateClientCertificate = nativeBinding.generateClientCertificate
module.exports.generateManifest = nativeBinding.generateManifest
module.exports.generateRootCa = nativeBinding.generateRootCa
module.exports.signNonce = nativeBinding.signNonce
module.exports.verifyClientCertificate = nativeBinding.verifyClientCertificate
module.exports.verifyNonce = nativeBinding.verifyNonce

View File

@ -1,3 +0,0 @@
# `@drop-oss/droplet-linux-arm-gnueabihf`
This is the **armv7-unknown-linux-gnueabihf** binary for `@drop-oss/droplet`

View File

@ -1,21 +0,0 @@
{
"name": "@drop-oss/droplet-linux-arm-gnueabihf",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm"
],
"main": "droplet.linux-arm-gnueabihf.node",
"files": [
"droplet.linux-arm-gnueabihf.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"repository": {
"url": "https://github.com/Drop-OSS/droplet"
}
}

View File

@ -1,3 +0,0 @@
# `@drop-oss/droplet-linux-arm-musleabihf`
This is the **armv7-unknown-linux-musleabihf** binary for `@drop-oss/droplet`

View File

@ -1,21 +0,0 @@
{
"name": "@drop-oss/droplet-linux-arm-musleabihf",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm"
],
"main": "droplet.linux-arm-musleabihf.node",
"files": [
"droplet.linux-arm-musleabihf.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"repository": {
"url": "https://github.com/Drop-OSS/droplet"
}
}

View File

@ -1,6 +1,6 @@
{
"name": "@drop-oss/droplet",
"version": "1.1.2",
"version": "3.2.2",
"main": "index.js",
"types": "index.d.ts",
"napi": {
@ -8,25 +8,35 @@
"triples": {
"additional": [
"aarch64-apple-darwin",
"x86_64-apple-darwin",
"universal-apple-darwin",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"aarch64-pc-windows-msvc",
"armv7-unknown-linux-gnueabihf",
"armv7-unknown-linux-musleabihf",
"x86_64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
"universal-apple-darwin",
"riscv64gc-unknown-linux-gnu"
"riscv64gc-unknown-linux-gnu",
"aarch64-pc-windows-msvc",
"x86_64-pc-windows-msvc"
]
}
},
"license": "MIT",
"license": "AGPL-3.0-only",
"devDependencies": {
"@napi-rs/cli": "^2.18.4",
"@napi-rs/cli": "3.0.0-alpha.91",
"@types/node": "^22.13.10",
"ava": "^6.2.0"
"ava": "^6.2.0",
"pretty-bytes": "^7.0.1",
"tsimp": "^2.0.12"
},
"ava": {
"timeout": "3m"
"timeout": "3m",
"extensions": [
"cjs",
"mjs",
"js",
"ts",
"mts"
]
},
"engines": {
"node": ">= 10"
@ -36,8 +46,8 @@
"build": "napi build --platform --release",
"build:debug": "napi build --platform",
"prepublishOnly": "napi prepublish -t npm",
"test": "ava",
"universal": "napi universal",
"test": "ava ",
"universal": "napi universalize",
"version": "napi version"
},
"packageManager": "yarn@4.7.0",

View File

@ -1,123 +0,0 @@
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::{
fs::{self, metadata, File},
io::BufReader,
path::{Path, PathBuf},
};
const CHUNK_SIZE: usize = 1024 * 1024 * 64;
fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) {
if metadata(path).unwrap().is_dir() {
let paths = fs::read_dir(path).unwrap();
for path_result in paths {
let full_path = path_result.unwrap().path();
if metadata(&full_path).unwrap().is_dir() {
_list_files(vec, &full_path);
} else {
vec.push(full_path);
}
}
}
}
pub struct VersionFile {
pub relative_filename: String,
pub permission: u32,
}
pub trait VersionBackend: 'static {
fn list_files(&self, path: &Path) -> Vec<VersionFile>;
fn reader(&self, file: &VersionFile) -> BufReader<File>;
}
pub struct PathVersionBackend {
pub base_dir: PathBuf,
}
impl VersionBackend for PathVersionBackend {
fn list_files(&self, path: &Path) -> Vec<VersionFile> {
let mut vec = Vec::new();
_list_files(&mut vec, path);
let mut results = Vec::new();
for pathbuf in vec.iter() {
let file = File::open(pathbuf.clone()).unwrap();
let relative = pathbuf.strip_prefix(path).unwrap();
let metadata = file.try_clone().unwrap().metadata().unwrap();
let permission_object = metadata.permissions();
let permissions = {
let perm: u32;
#[cfg(target_family = "unix")]
{
perm = permission_object.mode();
}
#[cfg(not(target_family = "unix"))]
{
perm = 0
}
perm
};
results.push(VersionFile {
relative_filename: relative.to_string_lossy().to_string(),
permission: permissions,
});
}
results
}
fn reader(&self, file: &VersionFile) -> BufReader<File> {
let file = File::open(self.base_dir.join(file.relative_filename.clone())).unwrap();
let reader = BufReader::with_capacity(CHUNK_SIZE, file);
return reader;
}
}
// Todo implementation for archives
// Split into a separate impl for each type of archive
pub struct ArchiveVersionBackend {}
impl VersionBackend for ArchiveVersionBackend {
fn list_files(&self, path: &Path) -> Vec<VersionFile> {
todo!()
}
fn reader(&self, file: &VersionFile) -> BufReader<File> {
todo!()
}
}
pub fn create_backend_for_path(path: &Path) -> Option<Box<(dyn VersionBackend)>> {
let is_directory = path.is_dir();
if is_directory {
return Some(Box::new(PathVersionBackend {
base_dir: path.to_path_buf(),
}));
};
/*
Insert checks for whatever backend you like
*/
None
}
#[napi]
pub fn has_backend_for_path(path: String) -> bool {
let path = Path::new(&path);
let has_backend = create_backend_for_path(path).is_some();
has_backend
}
#[napi]
pub fn list_files(path: String) -> Vec<String> {
let path = Path::new(&path);
let backend = create_backend_for_path(path).unwrap();
let files = backend.list_files(path);
files.into_iter().map(|e| e.relative_filename).collect()
}

View File

@ -1,8 +1,14 @@
#![deny(clippy::all)]
#![deny(clippy::unwrap_used)]
#![deny(clippy::expect_used)]
#![deny(clippy::panic)]
#![feature(trait_alias)]
#![feature(iterator_try_collect)]
pub mod file_utils;
pub mod manifest;
pub mod script;
pub mod ssl;
pub mod version;
#[macro_use]
extern crate napi_derive;
extern crate napi_derive;

View File

@ -1,22 +1,15 @@
use std::{
collections::HashMap,
fs::File,
io::{BufRead, BufReader},
path::Path,
thread,
};
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::{collections::HashMap, sync::Arc, thread};
use napi::{
threadsafe_function::{ErrorStrategy, ThreadsafeFunction, ThreadsafeFunctionCallMode},
Error, JsFunction,
threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
Result,
};
use serde_json::json;
use uuid::Uuid;
use crate::file_utils::create_backend_for_path;
use crate::version::{types::VersionBackend, utils::DropletHandler};
const CHUNK_SIZE: usize = 1024 * 1024 * 64;
#[derive(serde::Serialize)]
struct ChunkData {
@ -27,100 +20,125 @@ struct ChunkData {
}
#[napi]
pub fn call_alt_thread_func(callback: JsFunction) -> Result<(), Error> {
let tsfn: ThreadsafeFunction<u32, ErrorStrategy::CalleeHandled> = callback
.create_threadsafe_function(0, |ctx| {
ctx.env.create_uint32(ctx.value + 1).map(|v| vec![v])
})?;
let tsfn = tsfn.clone();
pub fn call_alt_thread_func(tsfn: Arc<ThreadsafeFunction<()>>) -> Result<(), String> {
let tsfn_cloned = tsfn.clone();
thread::spawn(move || {
tsfn.call(Ok(0), ThreadsafeFunctionCallMode::NonBlocking);
tsfn_cloned.call(Ok(()), ThreadsafeFunctionCallMode::Blocking);
});
Ok(())
}
#[napi]
pub fn generate_manifest(
pub fn generate_manifest<'a>(
droplet_handler: &mut DropletHandler,
dir: String,
progress: JsFunction,
log: JsFunction,
callback: JsFunction,
) -> Result<(), Error> {
let progress_sfn: ThreadsafeFunction<i32, ErrorStrategy::CalleeHandled> = progress
.create_threadsafe_function(0, |ctx| ctx.env.create_int32(ctx.value).map(|v| vec![v]))
.unwrap();
let log_sfn: ThreadsafeFunction<String, ErrorStrategy::CalleeHandled> = log
.create_threadsafe_function(0, |ctx| {
ctx.env.create_string_from_std(ctx.value).map(|v| vec![v])
})
.unwrap();
let callback_sfn: ThreadsafeFunction<String, ErrorStrategy::CalleeHandled> = callback
.create_threadsafe_function(0, |ctx| {
ctx.env.create_string_from_std(ctx.value).map(|v| vec![v])
})
.unwrap();
progress_sfn: ThreadsafeFunction<i32>,
log_sfn: ThreadsafeFunction<String>,
callback_sfn: ThreadsafeFunction<String>,
) -> anyhow::Result<()> {
let backend: &mut Box<dyn VersionBackend + Send> = droplet_handler
.create_backend_for_path(dir)
.ok_or(napi::Error::from_reason(
"Could not create backend for path.",
))?;
// This is unsafe (obviously)
// But it's allg as long the DropletHandler doesn't get
// dropped while we're generating the manifest.
let backend: &'static mut Box<dyn VersionBackend + Send> =
unsafe { std::mem::transmute(backend) };
let required_single_file = backend.require_whole_files();
thread::spawn(move || {
let base_dir = Path::new(&dir);
let backend = create_backend_for_path(base_dir).unwrap();
let files = backend.list_files(base_dir);
let callback_borrow = &callback_sfn;
// Filepath to chunk data
let mut chunks: HashMap<String, ChunkData> = HashMap::new();
let mut inner = move || -> Result<()> {
let files = backend.list_files()?;
let total: i32 = files.len() as i32;
let mut i: i32 = 0;
// Filepath to chunk data
let mut chunks: HashMap<String, ChunkData> = HashMap::new();
for version_file in files {
let mut reader = backend.reader(&version_file);
let total: i32 = files.len() as i32;
let mut i: i32 = 0;
let mut chunk_data = ChunkData {
permissions: version_file.permission,
ids: Vec::new(),
checksums: Vec::new(),
lengths: Vec::new(),
};
let mut buf = [0u8; 1024 * 16];
let mut chunk_index = 0;
loop {
let mut buffer: Vec<u8> = Vec::new();
reader.fill_buf().unwrap().clone_into(&mut buffer);
let length = buffer.len();
for version_file in files {
let mut reader = backend.reader(&version_file, 0, 0)?;
if length == 0 {
break;
let mut chunk_data = ChunkData {
permissions: version_file.permission,
ids: Vec::new(),
checksums: Vec::new(),
lengths: Vec::new(),
};
let mut chunk_index = 0;
loop {
let mut length = 0;
let mut buffer: Vec<u8> = Vec::new();
let mut file_empty = false;
loop {
let read = reader.read(&mut buf)?;
length += read;
// If we're out of data, add this chunk and then move onto the next file
if read == 0 {
file_empty = true;
break;
}
buffer.extend_from_slice(&buf[0..read]);
if length >= CHUNK_SIZE && !required_single_file {
break;
}
}
let chunk_id = Uuid::new_v4();
let checksum = md5::compute(buffer).0;
let checksum_string = hex::encode(checksum);
chunk_data.ids.push(chunk_id.to_string());
chunk_data.checksums.push(checksum_string);
chunk_data.lengths.push(length);
let log_str = format!(
"Processed chunk {} for {}",
chunk_index, &version_file.relative_filename
);
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
chunk_index += 1;
if file_empty {
break;
}
}
let chunk_id = Uuid::new_v4();
let checksum = md5::compute(buffer).0;
let checksum_string = hex::encode(checksum);
chunks.insert(version_file.relative_filename, chunk_data);
chunk_data.ids.push(chunk_id.to_string());
chunk_data.checksums.push(checksum_string);
chunk_data.lengths.push(length);
let log_str = format!(
"Processed chunk {} for {}",
chunk_index,
&version_file.relative_filename
);
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
reader.consume(length);
chunk_index += 1;
i += 1;
let progress = i * 100 / total;
progress_sfn.call(Ok(progress), ThreadsafeFunctionCallMode::Blocking);
}
chunks.insert(version_file.relative_filename, chunk_data);
callback_borrow.call(
Ok(json!(chunks).to_string()),
ThreadsafeFunctionCallMode::Blocking,
);
i += 1;
let progress = i * 100 / total;
progress_sfn.call(Ok(progress), ThreadsafeFunctionCallMode::Blocking);
Ok(())
};
let result = inner();
if let Err(generate_err) = result {
callback_borrow.call(Err(generate_err), ThreadsafeFunctionCallMode::Blocking);
}
callback_sfn.call(
Ok(json!(chunks).to_string()),
ThreadsafeFunctionCallMode::Blocking,
);
});
Ok(())

133
src/script/mod.rs Normal file
View File

@ -0,0 +1,133 @@
use boa_engine::{Context, JsValue, Source};
// use mlua::{FromLuaMulti, Function, Lua};
use napi::Result;
use rhai::AST;
pub enum ScriptType {
Rhai,
Lua,
Javascript,
}
#[napi]
pub struct Script(ScriptInner);
pub enum ScriptInner {
Rhai { script: AST },
// Lua { script: Function },
Javascript { script: boa_engine::Script },
}
#[napi]
pub struct ScriptEngine {
rhai_engine: rhai::Engine,
// lua_engine: Lua,
js_engine: Context,
}
#[napi]
impl ScriptEngine {
#[napi(constructor)]
pub fn new() -> Self {
ScriptEngine {
rhai_engine: rhai::Engine::new(),
// lua_engine: Lua::new(),
js_engine: Context::default(),
}
}
#[napi]
pub fn build_rhai_script(&self, content: String) -> Result<Script> {
let script = self
.rhai_engine
.compile(content.clone())
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(Script(ScriptInner::Rhai { script }))
}
/*
#[napi]
pub fn build_lua_script(&self, content: String) -> Result<Script> {
let func = self
.lua_engine
.load(content.clone())
.into_function()
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(Script(ScriptInner::Lua { script: func }))
}
*/
#[napi]
pub fn build_js_script(&mut self, content: String) -> Result<Script> {
let source = Source::from_bytes(content.as_bytes());
let script = boa_engine::Script::parse(source, None, &mut self.js_engine)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(Script(ScriptInner::Javascript { script }))
}
fn execute_rhai_script<T>(&self, ast: &AST) -> Result<T>
where
T: Clone + 'static,
{
let v = self
.rhai_engine
.eval_ast::<T>(ast)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(v)
}
/*
fn execute_lua_script<T>(&self, function: &Function) -> Result<T>
where
T: FromLuaMulti,
{
let v = function
.call::<T>(())
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(v)
}
*/
fn execute_js_script(&mut self, func: &boa_engine::Script) -> Result<JsValue> {
let v = func
.evaluate(&mut self.js_engine)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
Ok(v)
}
#[napi]
pub fn execute(&mut self, script: &mut Script) -> Result<()> {
match &script.0 {
ScriptInner::Rhai { script } => {
self.execute_rhai_script::<()>(script)?;
}
/*ScriptInner::Lua { script } => {
self.execute_lua_script::<()>(script)?;
}*/
ScriptInner::Javascript { script } => {
self.execute_js_script(script)?;
}
};
Ok(())
}
#[napi]
pub fn fetch_strings(&mut self, script: &mut Script) -> Result<Vec<String>> {
Ok(match &script.0 {
ScriptInner::Rhai { script } => self.execute_rhai_script(script)?,
//ScriptInner::Lua { script } => self.execute_lua_script(script)?,
ScriptInner::Javascript { script } => {
let v = self.execute_js_script(script)?;
serde_json::from_value(
v.to_json(&mut self.js_engine)
.map_err(|e| napi::Error::from_reason(e.to_string()))?,
)
.map_err(|e| napi::Error::from_reason(e.to_string()))?
}
})
}
}

View File

@ -1,4 +1,4 @@
use napi::Error;
use anyhow::anyhow;
use rcgen::{
CertificateParams, DistinguishedName, IsCa, KeyPair, KeyUsagePurpose, PublicKeyData,
SubjectPublicKeyInfo,
@ -10,7 +10,7 @@ use x509_parser::parse_x509_certificate;
use x509_parser::pem::Pem;
#[napi]
pub fn generate_root_ca() -> Result<Vec<String>, Error> {
pub fn generate_root_ca() -> anyhow::Result<Vec<String>> {
let mut params = CertificateParams::default();
let mut name = DistinguishedName::new();
@ -22,7 +22,7 @@ pub fn generate_root_ca() -> Result<Vec<String>, Error> {
params.not_before = OffsetDateTime::now_utc();
params.not_after = OffsetDateTime::now_utc()
.checked_add(Duration::days(365 * 1000))
.unwrap();
.ok_or(anyhow!("failed to calculate end date"))?;
params.is_ca = IsCa::Ca(rcgen::BasicConstraints::Unconstrained);
@ -32,9 +32,8 @@ pub fn generate_root_ca() -> Result<Vec<String>, Error> {
KeyUsagePurpose::DigitalSignature,
];
let key_pair = KeyPair::generate().map_err(|e| napi::Error::from_reason(e.to_string()))?;
let certificate = CertificateParams::self_signed(params, &key_pair)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let key_pair = KeyPair::generate()?;
let certificate = CertificateParams::self_signed(params, &key_pair)?;
// Returns certificate, then private key
Ok(vec![certificate.pem(), key_pair.serialize_pem()])
@ -46,13 +45,10 @@ pub fn generate_client_certificate(
_client_name: String,
root_ca: String,
root_ca_private: String,
) -> Result<Vec<String>, Error> {
let root_key_pair =
KeyPair::from_pem(&root_ca_private).map_err(|e| napi::Error::from_reason(e.to_string()))?;
let certificate_params = CertificateParams::from_ca_cert_pem(&root_ca)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let root_ca = CertificateParams::self_signed(certificate_params, &root_key_pair)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
) -> anyhow::Result<Vec<String>> {
let root_key_pair = KeyPair::from_pem(&root_ca_private)?;
let certificate_params = CertificateParams::from_ca_cert_pem(&root_ca)?;
let root_ca = CertificateParams::self_signed(certificate_params, &root_key_pair)?;
let mut params = CertificateParams::default();
@ -66,28 +62,24 @@ pub fn generate_client_certificate(
KeyUsagePurpose::DataEncipherment,
];
let key_pair = KeyPair::generate_for(&rcgen::PKCS_ECDSA_P384_SHA384)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let certificate = CertificateParams::signed_by(params, &key_pair, &root_ca, &root_key_pair)
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let key_pair = KeyPair::generate_for(&rcgen::PKCS_ECDSA_P384_SHA384)?;
let certificate = CertificateParams::signed_by(params, &key_pair, &root_ca, &root_key_pair)?;
// Returns certificate, then private key
Ok(vec![certificate.pem(), key_pair.serialize_pem()])
}
#[napi]
pub fn verify_client_certificate(client_cert: String, root_ca: String) -> Result<bool, Error> {
pub fn verify_client_certificate(client_cert: String, root_ca: String) -> anyhow::Result<bool> {
let root_ca = Pem::iter_from_buffer(root_ca.as_bytes())
.next()
.unwrap()
.unwrap();
let root_ca = root_ca.parse_x509().unwrap();
.ok_or(anyhow!("no certificates in root ca"))??;
let root_ca = root_ca.parse_x509()?;
let client_cert = Pem::iter_from_buffer(client_cert.as_bytes())
.next()
.unwrap()
.unwrap();
let client_cert = client_cert.parse_x509().unwrap();
.ok_or(anyhow!("No client certs in chain."))??;
let client_cert = client_cert.parse_x509()?;
let valid = root_ca
.verify_signature(Some(client_cert.public_key()))
@ -97,31 +89,33 @@ pub fn verify_client_certificate(client_cert: String, root_ca: String) -> Result
}
#[napi]
pub fn sign_nonce(private_key: String, nonce: String) -> Result<String, Error> {
pub fn sign_nonce(private_key: String, nonce: String) -> anyhow::Result<String> {
let rng = SystemRandom::new();
let key_pair = KeyPair::from_pem(&private_key).unwrap();
let key_pair = KeyPair::from_pem(&private_key)?;
let key_pair = EcdsaKeyPair::from_pkcs8(
&ring::signature::ECDSA_P384_SHA384_FIXED_SIGNING,
&key_pair.serialize_der(),
&rng,
)
.unwrap();
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let signature = key_pair.sign(&rng, nonce.as_bytes()).unwrap();
let signature = key_pair
.sign(&rng, nonce.as_bytes())
.map_err(|e| napi::Error::from_reason(e.to_string()))?;
let hex_signature = hex::encode(signature);
Ok(hex_signature)
}
#[napi]
pub fn verify_nonce(public_cert: String, nonce: String, signature: String) -> Result<bool, Error> {
let (_, pem) = x509_parser::pem::parse_x509_pem(public_cert.as_bytes()).unwrap();
let (_, spki) = parse_x509_certificate(&pem.contents).unwrap();
let public_key = SubjectPublicKeyInfo::from_der(spki.public_key().raw).unwrap();
pub fn verify_nonce(public_cert: String, nonce: String, signature: String) -> anyhow::Result<bool> {
let (_, pem) = x509_parser::pem::parse_x509_pem(public_cert.as_bytes())?;
let (_, spki) = parse_x509_certificate(&pem.contents)?;
let public_key = SubjectPublicKeyInfo::from_der(spki.public_key().raw)?;
let raw_signature = hex::decode(signature).unwrap();
let raw_signature = hex::decode(signature)?;
let valid = ring::signature::ECDSA_P384_SHA384_FIXED
.verify(

219
src/version/backends.rs Normal file
View File

@ -0,0 +1,219 @@
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::{
cell::LazyCell,
fs::{self, metadata, File},
io::{self, BufRead, BufReader, Read, Seek, SeekFrom, Sink},
path::{Path, PathBuf},
process::{Child, ChildStdout, Command, Stdio},
sync::{Arc, LazyLock},
};
use anyhow::anyhow;
use crate::version::types::{MinimumFileObject, VersionBackend, VersionFile};
pub fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) -> napi::Result<()> {
if metadata(path)?.is_dir() {
let paths = fs::read_dir(path)?;
for path_result in paths {
let full_path = path_result?.path();
if metadata(&full_path)?.is_dir() {
_list_files(vec, &full_path)?;
} else {
vec.push(full_path);
}
}
};
Ok(())
}
#[derive(Clone)]
pub struct PathVersionBackend {
pub base_dir: PathBuf,
}
impl VersionBackend for PathVersionBackend {
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>> {
let mut vec = Vec::new();
_list_files(&mut vec, &self.base_dir)?;
let mut results = Vec::new();
for pathbuf in vec.iter() {
let relative = pathbuf.strip_prefix(self.base_dir.clone())?;
results.push(
self.peek_file(
relative
.to_str()
.ok_or(napi::Error::from_reason("Could not parse path"))?
.to_owned(),
)?,
);
}
Ok(results)
}
fn reader(
&mut self,
file: &VersionFile,
start: u64,
end: u64,
) -> anyhow::Result<Box<dyn MinimumFileObject + 'static>> {
let mut file = File::open(self.base_dir.join(file.relative_filename.clone()))?;
if start != 0 {
file.seek(SeekFrom::Start(start))?;
}
if end != 0 {
return Ok(Box::new(file.take(end - start)));
}
Ok(Box::new(file))
}
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile> {
let pathbuf = self.base_dir.join(sub_path.clone());
if !pathbuf.exists() {
return Err(anyhow!("Path doesn't exist."));
};
let file = File::open(pathbuf.clone())?;
let metadata = file.try_clone()?.metadata()?;
let permission_object = metadata.permissions();
let permissions = {
let perm: u32;
#[cfg(target_family = "unix")]
{
perm = permission_object.mode();
}
#[cfg(not(target_family = "unix"))]
{
perm = 0
}
perm
};
Ok(VersionFile {
relative_filename: sub_path,
permission: permissions,
size: metadata.len(),
})
}
fn require_whole_files(&self) -> bool {
false
}
}
pub static SEVEN_ZIP_INSTALLED: LazyLock<bool> =
LazyLock::new(|| Command::new("7z").output().is_ok());
#[derive(Clone)]
pub struct ZipVersionBackend {
path: String,
}
impl ZipVersionBackend {
pub fn new(path: PathBuf) -> anyhow::Result<Self> {
Ok(Self {
path: path.to_str().expect("invalid utf path").to_owned(),
})
}
}
pub struct ZipFileWrapper {
command: Child,
reader: BufReader<ChildStdout>
}
impl ZipFileWrapper {
pub fn new(mut command: Child) -> Self {
let stdout = command.stdout.take().expect("failed to access stdout of 7z");
let reader = BufReader::new(stdout);
ZipFileWrapper { command, reader }
}
}
/**
* This read implemention is a result of debugging hell
* It should probably be replaced with a .take() call.
*/
impl Read for ZipFileWrapper {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
self.reader.read(buf)
}
}
impl Drop for ZipFileWrapper {
fn drop(&mut self) {
self.command.wait().expect("failed to wait for 7z exit");
}
}
impl VersionBackend for ZipVersionBackend {
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>> {
let mut list_command = Command::new("7z");
list_command.args(vec!["l", "-ba", &self.path]);
let result = list_command.output()?;
if !result.status.success() {
return Err(anyhow!(
"failed to list files: code {:?}",
result.status.code()
));
}
let raw_result = String::from_utf8(result.stdout)?;
let files = raw_result.split("\n").filter(|v| v.len() > 0).map(|v| v.split(" ").filter(|v| v.len() > 0));
let mut results = Vec::new();
for file in files {
let mut values = file.collect::<Vec<&str>>();
values.reverse();
let mut iter = values.iter();
let (name, compress, size, attrs) = (
iter.next().expect("failed to fetch name"),
iter.next().expect("failed to read compressed size"),
iter.next().expect("failed to read file size"),
iter.next().expect("failed to fetch attrs")
);
if attrs.starts_with("D") {
continue;
}
results.push(VersionFile {
relative_filename: name.to_owned().to_owned(),
permission: 0o744, // owner r/w/x, everyone else, read
size: size.parse().unwrap(),
});
}
Ok(results)
}
fn reader(
&mut self,
file: &VersionFile,
start: u64,
end: u64,
) -> anyhow::Result<Box<dyn MinimumFileObject + '_>> {
let mut read_command = Command::new("7z");
read_command.args(vec!["e", "-so", &self.path, &file.relative_filename]);
let output = read_command.stdout(Stdio::piped()).spawn().expect("failed to spawn 7z");
Ok(Box::new(ZipFileWrapper::new(output)))
}
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile> {
let files = self.list_files()?;
let file = files
.iter()
.find(|v| v.relative_filename == sub_path)
.expect("file not found");
Ok(file.clone())
}
fn require_whole_files(&self) -> bool {
true
}
}

3
src/version/mod.rs Normal file
View File

@ -0,0 +1,3 @@
pub mod utils;
pub mod types;
pub mod backends;

55
src/version/types.rs Normal file
View File

@ -0,0 +1,55 @@
use std::{fmt::Debug, io::Read};
use dyn_clone::DynClone;
use tokio::io::{self, AsyncRead};
#[derive(Debug, Clone)]
pub struct VersionFile {
pub relative_filename: String,
pub permission: u32,
pub size: u64,
}
pub trait MinimumFileObject: Read + Send {}
impl<T: Read + Send> MinimumFileObject for T {}
// Intentionally not a generic, because of types in read_file
pub struct ReadToAsyncRead<'a> {
pub inner: Box<dyn Read + Send + 'a>,
}
const ASYNC_READ_BUFFER_SIZE: usize = 8128;
impl<'a> AsyncRead for ReadToAsyncRead<'a> {
fn poll_read(
mut self: std::pin::Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
buf: &mut tokio::io::ReadBuf<'_>,
) -> std::task::Poll<io::Result<()>> {
let mut read_buf = [0u8; ASYNC_READ_BUFFER_SIZE];
let read_size = ASYNC_READ_BUFFER_SIZE.min(buf.remaining());
match self.inner.read(&mut read_buf[0..read_size]) {
Ok(read) => {
buf.put_slice(&read_buf[0..read]);
std::task::Poll::Ready(Ok(()))
}
Err(err) => {
std::task::Poll::Ready(Err(err))
},
}
}
}
pub trait VersionBackend: DynClone {
fn require_whole_files(&self) -> bool;
fn list_files(&mut self) -> anyhow::Result<Vec<VersionFile>>;
fn peek_file(&mut self, sub_path: String) -> anyhow::Result<VersionFile>;
fn reader(
&mut self,
file: &VersionFile,
start: u64,
end: u64,
) -> anyhow::Result<Box<dyn MinimumFileObject + '_>>;
}
dyn_clone::clone_trait_object!(VersionBackend);

173
src/version/utils.rs Normal file
View File

@ -0,0 +1,173 @@
use std::{
collections::HashMap,
fs::File,
path::Path,
process::{Command, ExitStatus},
};
use anyhow::anyhow;
use napi::{bindgen_prelude::*, sys::napi_value__, tokio_stream::StreamExt};
use tokio_util::codec::{BytesCodec, FramedRead};
use crate::version::{
backends::{PathVersionBackend, ZipVersionBackend, SEVEN_ZIP_INSTALLED},
types::{ReadToAsyncRead, VersionBackend, VersionFile},
};
/**
* Append new backends here
*/
pub fn create_backend_constructor<'a>(
path: &Path,
) -> Option<Box<dyn FnOnce() -> Result<Box<dyn VersionBackend + Send + 'a>>>> {
if !path.exists() {
return None;
}
let is_directory = path.is_dir();
if is_directory {
let base_dir = path.to_path_buf();
return Some(Box::new(move || {
Ok(Box::new(PathVersionBackend { base_dir }))
}));
};
if *SEVEN_ZIP_INSTALLED {
let mut test = Command::new("7z");
test.args(vec!["t", path.to_str().expect("invalid utf path")]);
let status = test.status().ok()?;
if status.code().unwrap_or(1) == 0 {
let buf = path.to_path_buf();
return Some(Box::new(move || {
Ok(Box::new(ZipVersionBackend::new(buf)?))
}));
}
}
None
}
/**
* Persistent object so we can cache things between commands
*/
#[napi(js_name = "DropletHandler")]
pub struct DropletHandler<'a> {
backend_cache: HashMap<String, Box<dyn VersionBackend + Send + 'a>>,
}
#[napi]
impl<'a> DropletHandler<'a> {
#[napi(constructor)]
pub fn new() -> Self {
DropletHandler {
backend_cache: HashMap::new(),
}
}
pub fn create_backend_for_path(
&mut self,
path: String,
) -> Option<&mut Box<dyn VersionBackend + Send + 'a>> {
let fs_path = Path::new(&path);
let constructor = create_backend_constructor(fs_path)?;
let existing_backend = match self.backend_cache.entry(path) {
std::collections::hash_map::Entry::Occupied(occupied_entry) => occupied_entry.into_mut(),
std::collections::hash_map::Entry::Vacant(vacant_entry) => {
let backend = constructor().ok()?;
vacant_entry.insert(backend)
}
};
Some(existing_backend)
}
#[napi]
pub fn has_backend_for_path(&self, path: String) -> bool {
let path = Path::new(&path);
let has_backend = create_backend_constructor(path).is_some();
has_backend
}
#[napi]
pub fn list_files(&mut self, path: String) -> Result<Vec<String>> {
let backend = self
.create_backend_for_path(path)
.ok_or(napi::Error::from_reason("No backend for path"))?;
let files = backend.list_files()?;
Ok(files.into_iter().map(|e| e.relative_filename).collect())
}
#[napi]
pub fn peek_file(&mut self, path: String, sub_path: String) -> Result<u64> {
let backend = self
.create_backend_for_path(path)
.ok_or(napi::Error::from_reason("No backend for path"))?;
let file = backend.peek_file(sub_path)?;
Ok(file.size)
}
#[napi]
pub fn read_file(
&mut self,
reference: Reference<DropletHandler<'static>>,
path: String,
sub_path: String,
env: Env,
start: Option<BigInt>,
end: Option<BigInt>,
) -> anyhow::Result<JsDropStreamable> {
let stream = reference.share_with(env, |handler| {
let backend = handler
.create_backend_for_path(path)
.ok_or(anyhow!("Failed to create backend."))?;
let version_file = VersionFile {
relative_filename: sub_path,
permission: 0, // Shouldn't matter
size: 0, // Shouldn't matter
};
// Use `?` operator for cleaner error propagation from `Option`
let reader = backend.reader(
&version_file,
start.map(|e| e.get_u64().1).unwrap_or(0),
end.map(|e| e.get_u64().1).unwrap_or(0),
)?;
let async_reader = ReadToAsyncRead { inner: reader };
// Create a FramedRead stream with BytesCodec for chunking
let stream = FramedRead::new(async_reader, BytesCodec::new())
// Use StreamExt::map to transform each Result item
.map(|result_item| {
result_item
// Apply Result::map to transform Ok(BytesMut) to Ok(Vec<u8>)
.map(|bytes| bytes.to_vec())
// Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error)
.map_err(napi::Error::from) // napi::Error implements From<tokio::io::Error>
});
// Create the napi-rs ReadableStream from the tokio_stream::Stream
// The unwrap() here means if stream creation fails, it will panic.
// For a production system, consider returning Result<Option<...>> and handling this.
ReadableStream::create_with_stream_bytes(&env, stream)
})?;
Ok(JsDropStreamable { inner: stream })
}
}
#[napi]
pub struct JsDropStreamable {
inner: SharedReference<DropletHandler<'static>, ReadableStream<'static, BufferSlice<'static>>>,
}
#[napi]
impl JsDropStreamable {
#[napi]
pub fn get_stream(&self) -> *mut napi_value__ {
self.inner.raw()
}
}

1393
yarn.lock

File diff suppressed because it is too large Load Diff