mirror of
https://github.com/Drop-OSS/droplet.git
synced 2025-11-12 15:52:47 +10:00
Compare commits
32 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 535d5a4062 | |||
| 450734f5c9 | |||
| 20e2eda381 | |||
| 04d3f2dd8c | |||
| 59ca57ee1b | |||
| 8f4b2a6c6d | |||
| 7c3e6fe63c | |||
| 204902951e | |||
| b3011c517d | |||
| 74a54eb9ac | |||
| 89e94e3afd | |||
| 169d471bb7 | |||
| 076dc60155 | |||
| 48e5b97a4e | |||
| c1aaf8adcd | |||
| fe43f79062 | |||
| 30b9c4a1cc | |||
| 42f770aed9 | |||
| 4670df4127 | |||
| e33eaebe1a | |||
| f954f23410 | |||
| 3632687001 | |||
| 90817487ed | |||
| 98b84c64d4 | |||
| d3186cdd5f | |||
| bb678b4b3a | |||
| cc94798962 | |||
| 7811818a72 | |||
| b6910e717b | |||
| 45a26c7156 | |||
| 16b78bca17 | |||
| 4ac19b8be0 |
34
.github/workflows/CI.yml
vendored
34
.github/workflows/CI.yml
vendored
@ -12,12 +12,9 @@ permissions:
|
||||
- main
|
||||
tags-ignore:
|
||||
- "**"
|
||||
paths-ignore:
|
||||
- "**/*.md"
|
||||
- LICENSE
|
||||
- "**/*.gitignore"
|
||||
- .editorconfig
|
||||
- docs/**
|
||||
paths:
|
||||
- package.json
|
||||
- .github/workflows/*
|
||||
pull_request: null
|
||||
jobs:
|
||||
build:
|
||||
@ -46,15 +43,6 @@ jobs:
|
||||
target: aarch64-unknown-linux-gnu
|
||||
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
|
||||
build: yarn build --target aarch64-unknown-linux-gnu
|
||||
- host: ubuntu-latest
|
||||
target: armv7-unknown-linux-gnueabihf
|
||||
setup: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install gcc-arm-linux-gnueabihf -y
|
||||
build: yarn build --target armv7-unknown-linux-gnueabihf
|
||||
- host: ubuntu-latest
|
||||
target: armv7-unknown-linux-musleabihf
|
||||
build: yarn build --target armv7-unknown-linux-musleabihf
|
||||
- host: ubuntu-latest
|
||||
target: aarch64-unknown-linux-musl
|
||||
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine
|
||||
@ -71,7 +59,7 @@ jobs:
|
||||
sudo apt-get update
|
||||
sudo apt-get install gcc-riscv64-linux-gnu -y
|
||||
build: yarn build --target riscv64gc-unknown-linux-gnu
|
||||
name: stable - ${{ matrix.settings.target }} - node@20
|
||||
name: nightly - ${{ matrix.settings.target }} - node@20
|
||||
runs-on: ${{ matrix.settings.host }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -82,7 +70,7 @@ jobs:
|
||||
node-version: 20
|
||||
cache: yarn
|
||||
- name: Install
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@nightly
|
||||
if: ${{ !matrix.settings.docker }}
|
||||
with:
|
||||
toolchain: nightly
|
||||
@ -106,14 +94,20 @@ jobs:
|
||||
if: ${{ matrix.settings.setup }}
|
||||
shell: bash
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
run: |-
|
||||
cargo install patch-crate &&
|
||||
cargo patch-crate &&
|
||||
yarn install
|
||||
- name: Build in docker
|
||||
uses: addnab/docker-run-action@v3
|
||||
if: ${{ matrix.settings.docker }}
|
||||
with:
|
||||
image: ${{ matrix.settings.docker }}
|
||||
options: "--user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build"
|
||||
run: ${{ matrix.settings.build }}
|
||||
run: |-
|
||||
rustup default nightly &&
|
||||
rustup target add ${{ matrix.settings.target }} &&
|
||||
${{ matrix.settings.build }}
|
||||
- name: Build
|
||||
run: ${{ matrix.settings.build }}
|
||||
if: ${{ !matrix.settings.docker }}
|
||||
@ -358,6 +352,8 @@ jobs:
|
||||
with:
|
||||
name: bindings-aarch64-apple-darwin
|
||||
path: artifacts
|
||||
- name: Move artifacts
|
||||
run: mv artifacts/* .
|
||||
- name: Combine binaries
|
||||
run: yarn universal
|
||||
- name: Upload artifact
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@ -9,7 +9,7 @@ npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.test
|
||||
.test*
|
||||
.tsimp
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
@ -186,7 +186,6 @@ $RECYCLE.BIN/
|
||||
#Added by cargo
|
||||
|
||||
/target
|
||||
Cargo.lock
|
||||
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
@ -202,3 +201,6 @@ manifest.json
|
||||
|
||||
# JetBrains
|
||||
.idea
|
||||
|
||||
assets/*
|
||||
!assets/generate.sh
|
||||
1235
Cargo.lock
generated
Normal file
1235
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
14
Cargo.toml
14
Cargo.toml
@ -9,11 +9,12 @@ crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
|
||||
napi = { version = "2.12.2", default-features = false, features = [
|
||||
napi = { version = "3.0.0-beta.11", default-features = false, features = [
|
||||
"napi4",
|
||||
"async",
|
||||
"web_stream",
|
||||
] }
|
||||
napi-derive = "2.12.2"
|
||||
napi-derive = "3.0.0-beta.11"
|
||||
hex = "0.4.3"
|
||||
serde_json = "1.0.128"
|
||||
md5 = "0.7.0"
|
||||
@ -21,6 +22,15 @@ time-macros = "0.2.22"
|
||||
time = "0.3.41"
|
||||
webpki = "0.22.4"
|
||||
ring = "0.17.14"
|
||||
tokio = { version = "1.45.1", features = ["fs", "io-util"] }
|
||||
tokio-util = { version = "0.7.15", features = ["codec"] }
|
||||
rawzip = "0.2.0"
|
||||
|
||||
[package.metadata.patch]
|
||||
crates = ["rawzip"]
|
||||
|
||||
[patch.crates-io]
|
||||
rawzip = { path="./target/patch/rawzip-0.2.0" }
|
||||
|
||||
[dependencies.x509-parser]
|
||||
version = "0.17.0"
|
||||
|
||||
@ -2,7 +2,7 @@ import test from "ava";
|
||||
import fs from "node:fs";
|
||||
import path from "path";
|
||||
|
||||
import { generateManifest } from "../index.js";
|
||||
import { generateManifest, listFiles } from "../index.js";
|
||||
|
||||
test("numerous small file", async (t) => {
|
||||
// Setup test dir
|
||||
@ -53,3 +53,38 @@ test("numerous small file", async (t) => {
|
||||
|
||||
fs.rmSync(dirName, { recursive: true });
|
||||
});
|
||||
|
||||
test.skip("performance test", async (t) => {
|
||||
t.timeout(5 * 60 * 1000);
|
||||
return t.pass();
|
||||
const dirName = "./.test/pt";
|
||||
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
||||
fs.mkdirSync(dirName, { recursive: true });
|
||||
|
||||
const fileSize = 1 * 1000 * 1000 * 1000; // 1GB
|
||||
|
||||
const randomStream = fs.createReadStream("/dev/random", {
|
||||
start: 0,
|
||||
end: fileSize,
|
||||
});
|
||||
const outputStream = fs.createWriteStream(path.join(dirName, "file.bin"));
|
||||
await new Promise((r) => {
|
||||
randomStream.pipe(outputStream);
|
||||
randomStream.on("end", r);
|
||||
});
|
||||
|
||||
const start = Date.now();
|
||||
await new Promise((r, e) =>
|
||||
generateManifest(
|
||||
dirName,
|
||||
(_, __) => {},
|
||||
(_, __) => {},
|
||||
(err, manifest) => (err ? e(err) : r(manifest))
|
||||
)
|
||||
);
|
||||
const end = Date.now();
|
||||
|
||||
t.pass(`Took ${end - start}ms to process ${fileSize / (1000 * 1000)}MB`);
|
||||
|
||||
fs.rmSync(dirName, { recursive: true });
|
||||
});
|
||||
|
||||
100
__test__/utils.spec.mjs
Normal file
100
__test__/utils.spec.mjs
Normal file
@ -0,0 +1,100 @@
|
||||
import test from "ava";
|
||||
import fs from "node:fs";
|
||||
import path from "path";
|
||||
|
||||
import droplet, { generateManifest } from "../index.js";
|
||||
|
||||
test("check alt thread util", async (t) => {
|
||||
let endtime1, endtime2;
|
||||
|
||||
droplet.callAltThreadFunc(async () => {
|
||||
await new Promise((r) => setTimeout(r, 100));
|
||||
endtime1 = Date.now();
|
||||
});
|
||||
|
||||
await new Promise((r) => setTimeout(r, 500));
|
||||
endtime2 = Date.now();
|
||||
|
||||
const difference = endtime2 - endtime1;
|
||||
if (difference >= 600) {
|
||||
t.fail("likely isn't multithreaded, difference: " + difference);
|
||||
}
|
||||
|
||||
t.pass();
|
||||
});
|
||||
|
||||
test("read file", async (t) => {
|
||||
const dirName = "./.test2";
|
||||
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
||||
fs.mkdirSync(dirName, { recursive: true });
|
||||
|
||||
const testString = "g'day what's up my koala bros\n".repeat(1000);
|
||||
|
||||
fs.writeFileSync(dirName + "/TESTFILE", testString);
|
||||
|
||||
const stream = droplet.readFile(dirName, "TESTFILE");
|
||||
|
||||
let finalString = "";
|
||||
|
||||
for await (const chunk of stream) {
|
||||
// Do something with each 'chunk'
|
||||
finalString += String.fromCharCode.apply(null, chunk);
|
||||
}
|
||||
|
||||
t.assert(finalString == testString, "file strings don't match");
|
||||
fs.rmSync(dirName, { recursive: true });
|
||||
});
|
||||
|
||||
test("read file offset", async (t) => {
|
||||
const dirName = "./.test3";
|
||||
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
|
||||
fs.mkdirSync(dirName, { recursive: true });
|
||||
|
||||
const testString = "0123456789";
|
||||
fs.writeFileSync(dirName + "/TESTFILE", testString);
|
||||
|
||||
const stream = droplet.readFile(dirName, "TESTFILE", 1, 4);
|
||||
|
||||
let finalString = "";
|
||||
|
||||
for await (const chunk of stream) {
|
||||
// Do something with each 'chunk'
|
||||
finalString += String.fromCharCode.apply(null, chunk);
|
||||
}
|
||||
|
||||
const expectedString = testString.slice(1, 4);
|
||||
|
||||
t.assert(
|
||||
finalString == expectedString,
|
||||
`file strings don't match: ${finalString} vs ${expectedString}`
|
||||
);
|
||||
fs.rmSync(dirName, { recursive: true });
|
||||
});
|
||||
|
||||
test("zip file reader", async (t) => {
|
||||
return t.pass();
|
||||
const manifest = JSON.parse(
|
||||
await new Promise((r, e) =>
|
||||
generateManifest(
|
||||
"./assets/TheGame.zip",
|
||||
(_, __) => {},
|
||||
(_, __) => {},
|
||||
(err, manifest) => (err ? e(err) : r(manifest))
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
console.log(manifest);
|
||||
|
||||
return t.pass();
|
||||
const stream = droplet.readFile("./assets/TheGame.zip", "TheGame/setup.exe");
|
||||
|
||||
let finalString;
|
||||
for await (const chunk of stream) {
|
||||
console.log(`read chunk ${chunk}`);
|
||||
// Do something with each 'chunk'
|
||||
finalString += String.fromCharCode.apply(null, chunk);
|
||||
}
|
||||
|
||||
console.log(finalString);
|
||||
});
|
||||
3
assets/generate.sh
Executable file
3
assets/generate.sh
Executable file
@ -0,0 +1,3 @@
|
||||
dd if=/dev/random of=./setup.exe bs=1024 count=1000000
|
||||
zip TheGame.zip setup.exe
|
||||
rm setup.exe
|
||||
29
index.d.ts
vendored
29
index.d.ts
vendored
@ -1,13 +1,26 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
|
||||
/* auto-generated by NAPI-RS */
|
||||
/* eslint-disable */
|
||||
export declare function callAltThreadFunc(tsfn: ((err: Error | null, ) => any)): void
|
||||
|
||||
export declare function generateClientCertificate(clientId: string, clientName: string, rootCa: string, rootCaPrivate: string): Array<string>
|
||||
|
||||
export declare function generateManifest(dir: string, progressSfn: ((err: Error | null, arg: number) => any), logSfn: ((err: Error | null, arg: string) => any), callbackSfn: ((err: Error | null, arg: string) => any)): void
|
||||
|
||||
export declare function generateRootCa(): Array<string>
|
||||
|
||||
export declare function hasBackendForPath(path: string): boolean
|
||||
export declare function callAltThreadFunc(callback: (...args: any[]) => any): void
|
||||
export declare function generateManifest(dir: string, progress: (...args: any[]) => any, log: (...args: any[]) => any, callback: (...args: any[]) => any): void
|
||||
export declare function generateRootCa(): Array<string>
|
||||
export declare function generateClientCertificate(clientId: string, clientName: string, rootCa: string, rootCaPrivate: string): Array<string>
|
||||
export declare function verifyClientCertificate(clientCert: string, rootCa: string): boolean
|
||||
|
||||
export declare function listFiles(path: string): Array<string>
|
||||
|
||||
/**
|
||||
* This is inefficient, but is used in attempt to keep the interface simple
|
||||
*/
|
||||
export declare function peekFile(path: string, subPath: string): number
|
||||
|
||||
export declare function readFile(path: string, subPath: string, start?: number | undefined | null, end?: number | undefined | null): ReadableStream<Buffer> | null
|
||||
|
||||
export declare function signNonce(privateKey: string, nonce: string): string
|
||||
|
||||
export declare function verifyClientCertificate(clientCert: string, rootCa: string): boolean
|
||||
|
||||
export declare function verifyNonce(publicCert: string, nonce: string, signature: string): boolean
|
||||
|
||||
551
index.js
551
index.js
@ -1,322 +1,389 @@
|
||||
/* tslint:disable */
|
||||
// prettier-ignore
|
||||
/* eslint-disable */
|
||||
/* prettier-ignore */
|
||||
|
||||
// @ts-nocheck
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
const { existsSync, readFileSync } = require('fs')
|
||||
const { join } = require('path')
|
||||
|
||||
const { platform, arch } = process
|
||||
const { createRequire } = require('node:module')
|
||||
require = createRequire(__filename)
|
||||
|
||||
const { readFileSync } = require('node:fs')
|
||||
let nativeBinding = null
|
||||
let localFileExisted = false
|
||||
let loadError = null
|
||||
const loadErrors = []
|
||||
|
||||
function isMusl() {
|
||||
// For Node 10
|
||||
if (!process.report || typeof process.report.getReport !== 'function') {
|
||||
const isMusl = () => {
|
||||
let musl = false
|
||||
if (process.platform === 'linux') {
|
||||
musl = isMuslFromFilesystem()
|
||||
if (musl === null) {
|
||||
musl = isMuslFromReport()
|
||||
}
|
||||
if (musl === null) {
|
||||
musl = isMuslFromChildProcess()
|
||||
}
|
||||
}
|
||||
return musl
|
||||
}
|
||||
|
||||
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
|
||||
|
||||
const isMuslFromFilesystem = () => {
|
||||
try {
|
||||
const lddPath = require('child_process').execSync('which ldd').toString().trim()
|
||||
return readFileSync(lddPath, 'utf8').includes('musl')
|
||||
} catch (e) {
|
||||
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const isMuslFromReport = () => {
|
||||
let report = null
|
||||
if (typeof process.report?.getReport === 'function') {
|
||||
process.report.excludeNetwork = true
|
||||
report = process.report.getReport()
|
||||
}
|
||||
if (!report) {
|
||||
return null
|
||||
}
|
||||
if (report.header && report.header.glibcVersionRuntime) {
|
||||
return false
|
||||
}
|
||||
if (Array.isArray(report.sharedObjects)) {
|
||||
if (report.sharedObjects.some(isFileMusl)) {
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
const { glibcVersionRuntime } = process.report.getReport().header
|
||||
return !glibcVersionRuntime
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
const isMuslFromChildProcess = () => {
|
||||
try {
|
||||
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
|
||||
} catch (e) {
|
||||
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
switch (platform) {
|
||||
case 'android':
|
||||
switch (arch) {
|
||||
case 'arm64':
|
||||
localFileExisted = existsSync(join(__dirname, 'droplet.android-arm64.node'))
|
||||
function requireNative() {
|
||||
if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.android-arm64.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-android-arm64')
|
||||
nativeBinding = require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
|
||||
} catch (err) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
} else if (process.platform === 'android') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./droplet.android-arm64.node')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
break
|
||||
case 'arm':
|
||||
localFileExisted = existsSync(join(__dirname, 'droplet.android-arm-eabi.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.android-arm-eabi.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-android-arm-eabi')
|
||||
}
|
||||
return require('@drop-oss/droplet-android-arm64')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on Android ${arch}`)
|
||||
}
|
||||
break
|
||||
case 'win32':
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.win32-x64-msvc.node')
|
||||
)
|
||||
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.win32-x64-msvc.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-win32-x64-msvc')
|
||||
}
|
||||
return require('./droplet.android-arm-eabi.node')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
break
|
||||
case 'ia32':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.win32-ia32-msvc.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.win32-ia32-msvc.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-win32-ia32-msvc')
|
||||
}
|
||||
return require('@drop-oss/droplet-android-arm-eabi')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
break
|
||||
case 'arm64':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.win32-arm64-msvc.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.win32-arm64-msvc.node')
|
||||
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-win32-arm64-msvc')
|
||||
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'win32') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./droplet.win32-x64-msvc.node')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on Windows: ${arch}`)
|
||||
}
|
||||
break
|
||||
case 'darwin':
|
||||
localFileExisted = existsSync(join(__dirname, 'droplet.darwin-universal.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.darwin-universal.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-darwin-universal')
|
||||
}
|
||||
break
|
||||
} catch {}
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
localFileExisted = existsSync(join(__dirname, 'droplet.darwin-x64.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.darwin-x64.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-darwin-x64')
|
||||
}
|
||||
return require('@drop-oss/droplet-win32-x64-msvc')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
break
|
||||
case 'arm64':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.darwin-arm64.node')
|
||||
)
|
||||
|
||||
} else if (process.arch === 'ia32') {
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.darwin-arm64.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-darwin-arm64')
|
||||
}
|
||||
return require('./droplet.win32-ia32-msvc.node')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on macOS: ${arch}`)
|
||||
}
|
||||
break
|
||||
case 'freebsd':
|
||||
if (arch !== 'x64') {
|
||||
throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
|
||||
}
|
||||
localFileExisted = existsSync(join(__dirname, 'droplet.freebsd-x64.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.freebsd-x64.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-freebsd-x64')
|
||||
}
|
||||
return require('@drop-oss/droplet-win32-ia32-msvc')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
break
|
||||
case 'linux':
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./droplet.win32-arm64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-win32-arm64-msvc')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'darwin') {
|
||||
try {
|
||||
return require('./droplet.darwin-universal.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-darwin-universal')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./droplet.darwin-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-darwin-x64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./droplet.darwin-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-darwin-arm64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'freebsd') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./droplet.freebsd-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-freebsd-x64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./droplet.freebsd-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-freebsd-arm64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'linux') {
|
||||
if (process.arch === 'x64') {
|
||||
if (isMusl()) {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-x64-musl.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-x64-musl.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-x64-musl')
|
||||
}
|
||||
return require('./droplet.linux-x64-musl.node')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-x64-gnu.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-x64-gnu.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-x64-gnu')
|
||||
}
|
||||
return require('@drop-oss/droplet-linux-x64-musl')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
try {
|
||||
return require('./droplet.linux-x64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
break
|
||||
case 'arm64':
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-x64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
if (isMusl()) {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-arm64-musl.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-arm64-musl.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-arm64-musl')
|
||||
}
|
||||
return require('./droplet.linux-arm64-musl.node')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-arm64-gnu.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-arm64-gnu.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-arm64-gnu')
|
||||
}
|
||||
return require('@drop-oss/droplet-linux-arm64-musl')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
try {
|
||||
return require('./droplet.linux-arm64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
break
|
||||
case 'arm':
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-arm64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
if (isMusl()) {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-arm-musleabihf.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-arm-musleabihf.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-arm-musleabihf')
|
||||
}
|
||||
return require('./droplet.linux-arm-musleabihf.node')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-arm-gnueabihf.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-arm-gnueabihf.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-arm-gnueabihf')
|
||||
}
|
||||
return require('@drop-oss/droplet-linux-arm-musleabihf')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
try {
|
||||
return require('./droplet.linux-arm-gnueabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
break
|
||||
case 'riscv64':
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-arm-gnueabihf')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
}
|
||||
} else if (process.arch === 'riscv64') {
|
||||
if (isMusl()) {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-riscv64-musl.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-riscv64-musl.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-riscv64-musl')
|
||||
}
|
||||
return require('./droplet.linux-riscv64-musl.node')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-riscv64-gnu.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-riscv64-gnu.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-riscv64-gnu')
|
||||
}
|
||||
return require('@drop-oss/droplet-linux-riscv64-musl')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
break
|
||||
case 's390x':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'droplet.linux-s390x-gnu.node')
|
||||
)
|
||||
|
||||
} else {
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./droplet.linux-s390x-gnu.node')
|
||||
} else {
|
||||
nativeBinding = require('@drop-oss/droplet-linux-s390x-gnu')
|
||||
}
|
||||
return require('./droplet.linux-riscv64-gnu.node')
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-riscv64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
}
|
||||
} else if (process.arch === 'ppc64') {
|
||||
try {
|
||||
return require('./droplet.linux-ppc64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-ppc64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else if (process.arch === 's390x') {
|
||||
try {
|
||||
return require('./droplet.linux-s390x-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@drop-oss/droplet-linux-s390x-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
|
||||
}
|
||||
}
|
||||
|
||||
nativeBinding = requireNative()
|
||||
|
||||
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
|
||||
try {
|
||||
nativeBinding = require('./droplet.wasi.cjs')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
if (!nativeBinding) {
|
||||
try {
|
||||
nativeBinding = require('@drop-oss/droplet-wasm32-wasi')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on Linux: ${arch}`)
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
|
||||
}
|
||||
|
||||
if (!nativeBinding) {
|
||||
if (loadError) {
|
||||
throw loadError
|
||||
if (loadErrors.length > 0) {
|
||||
throw new Error(
|
||||
`Cannot find native binding. ` +
|
||||
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
|
||||
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
|
||||
{ cause: loadErrors }
|
||||
)
|
||||
}
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { hasBackendForPath, callAltThreadFunc, generateManifest, generateRootCa, generateClientCertificate, verifyClientCertificate, signNonce, verifyNonce } = nativeBinding
|
||||
|
||||
module.exports.hasBackendForPath = hasBackendForPath
|
||||
module.exports.callAltThreadFunc = callAltThreadFunc
|
||||
module.exports.generateManifest = generateManifest
|
||||
module.exports.generateRootCa = generateRootCa
|
||||
module.exports.generateClientCertificate = generateClientCertificate
|
||||
module.exports.verifyClientCertificate = verifyClientCertificate
|
||||
module.exports.signNonce = signNonce
|
||||
module.exports.verifyNonce = verifyNonce
|
||||
module.exports = nativeBinding
|
||||
module.exports.callAltThreadFunc = nativeBinding.callAltThreadFunc
|
||||
module.exports.generateClientCertificate = nativeBinding.generateClientCertificate
|
||||
module.exports.generateManifest = nativeBinding.generateManifest
|
||||
module.exports.generateRootCa = nativeBinding.generateRootCa
|
||||
module.exports.hasBackendForPath = nativeBinding.hasBackendForPath
|
||||
module.exports.listFiles = nativeBinding.listFiles
|
||||
module.exports.peekFile = nativeBinding.peekFile
|
||||
module.exports.readFile = nativeBinding.readFile
|
||||
module.exports.signNonce = nativeBinding.signNonce
|
||||
module.exports.verifyClientCertificate = nativeBinding.verifyClientCertificate
|
||||
module.exports.verifyNonce = nativeBinding.verifyNonce
|
||||
|
||||
@ -1,3 +0,0 @@
|
||||
# `@drop-oss/droplet-linux-arm-gnueabihf`
|
||||
|
||||
This is the **armv7-unknown-linux-gnueabihf** binary for `@drop-oss/droplet`
|
||||
@ -1,21 +0,0 @@
|
||||
{
|
||||
"name": "@drop-oss/droplet-linux-arm-gnueabihf",
|
||||
"version": "0.0.0",
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"main": "droplet.linux-arm-gnueabihf.node",
|
||||
"files": [
|
||||
"droplet.linux-arm-gnueabihf.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"repository": {
|
||||
"url": "https://github.com/Drop-OSS/droplet"
|
||||
}
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
# `@drop-oss/droplet-linux-arm-musleabihf`
|
||||
|
||||
This is the **armv7-unknown-linux-musleabihf** binary for `@drop-oss/droplet`
|
||||
@ -1,21 +0,0 @@
|
||||
{
|
||||
"name": "@drop-oss/droplet-linux-arm-musleabihf",
|
||||
"version": "0.0.0",
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"main": "droplet.linux-arm-musleabihf.node",
|
||||
"files": [
|
||||
"droplet.linux-arm-musleabihf.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"repository": {
|
||||
"url": "https://github.com/Drop-OSS/droplet"
|
||||
}
|
||||
}
|
||||
17
package.json
17
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@drop-oss/droplet",
|
||||
"version": "1.1.0",
|
||||
"version": "1.5.3",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"napi": {
|
||||
@ -8,20 +8,21 @@
|
||||
"triples": {
|
||||
"additional": [
|
||||
"aarch64-apple-darwin",
|
||||
"x86_64-apple-darwin",
|
||||
"universal-apple-darwin",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"universal-apple-darwin",
|
||||
"riscv64gc-unknown-linux-gnu"
|
||||
"riscv64gc-unknown-linux-gnu",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"x86_64-pc-windows-msvc"
|
||||
]
|
||||
}
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^2.18.4",
|
||||
"@napi-rs/cli": "3.0.0-alpha.91",
|
||||
"@types/node": "^22.13.10",
|
||||
"ava": "^6.2.0"
|
||||
},
|
||||
@ -37,7 +38,7 @@
|
||||
"build:debug": "napi build --platform",
|
||||
"prepublishOnly": "napi prepublish -t npm",
|
||||
"test": "ava",
|
||||
"universal": "napi universal",
|
||||
"universal": "napi universalize",
|
||||
"version": "napi version"
|
||||
},
|
||||
"packageManager": "yarn@4.7.0",
|
||||
|
||||
26
patches/rawzip+0.2.0.patch
Normal file
26
patches/rawzip+0.2.0.patch
Normal file
@ -0,0 +1,26 @@
|
||||
diff --git a/src/archive.rs b/src/archive.rs
|
||||
index 1203015..837c405 100644
|
||||
--- a/src/archive.rs
|
||||
+++ b/src/archive.rs
|
||||
@@ -275,7 +275,7 @@ impl<'data> Iterator for ZipSliceEntries<'data> {
|
||||
/// ```
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ZipArchive<R> {
|
||||
- pub(crate) reader: R,
|
||||
+ pub reader: R,
|
||||
pub(crate) comment: ZipString,
|
||||
pub(crate) eocd: EndOfCentralDirectory,
|
||||
}
|
||||
@@ -431,9 +431,9 @@ where
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ZipEntry<'archive, R> {
|
||||
archive: &'archive ZipArchive<R>,
|
||||
- body_offset: u64,
|
||||
- body_end_offset: u64,
|
||||
- entry: ZipArchiveEntryWayfinder,
|
||||
+ pub body_offset: u64,
|
||||
+ pub body_end_offset: u64,
|
||||
+ pub entry: ZipArchiveEntryWayfinder,
|
||||
}
|
||||
|
||||
impl<'archive, R> ZipEntry<'archive, R>
|
||||
@ -1,120 +0,0 @@
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::{
|
||||
fs::{self, metadata, File},
|
||||
io::BufReader,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) {
|
||||
if metadata(path).unwrap().is_dir() {
|
||||
let paths = fs::read_dir(path).unwrap();
|
||||
for path_result in paths {
|
||||
let full_path = path_result.unwrap().path();
|
||||
if metadata(&full_path).unwrap().is_dir() {
|
||||
_list_files(vec, &full_path);
|
||||
} else {
|
||||
vec.push(full_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VersionFile {
|
||||
pub relative_filename: String,
|
||||
pub permission: u32,
|
||||
}
|
||||
|
||||
pub trait VersionBackend: 'static {
|
||||
fn list_files(&self, path: &Path) -> Vec<VersionFile>;
|
||||
fn reader(&self, file: &VersionFile) -> BufReader<File>;
|
||||
}
|
||||
|
||||
pub struct PathVersionBackend {
|
||||
pub base_dir: PathBuf,
|
||||
}
|
||||
impl VersionBackend for PathVersionBackend {
|
||||
fn list_files(&self, path: &Path) -> Vec<VersionFile> {
|
||||
let mut vec = Vec::new();
|
||||
_list_files(&mut vec, path);
|
||||
|
||||
let mut results = Vec::new();
|
||||
|
||||
for pathbuf in vec.iter() {
|
||||
let file = File::open(pathbuf.clone()).unwrap();
|
||||
let relative = pathbuf.strip_prefix(path).unwrap();
|
||||
let metadata = file.try_clone().unwrap().metadata().unwrap();
|
||||
let permission_object = metadata.permissions();
|
||||
let permissions = {
|
||||
let perm: u32;
|
||||
#[cfg(target_family = "unix")]
|
||||
{
|
||||
perm = permission_object.mode();
|
||||
}
|
||||
#[cfg(not(target_family = "unix"))]
|
||||
{
|
||||
perm = 0
|
||||
}
|
||||
perm
|
||||
};
|
||||
|
||||
results.push(VersionFile {
|
||||
relative_filename: relative.to_string_lossy().to_string(),
|
||||
permission: permissions,
|
||||
});
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
fn reader(&self, file: &VersionFile) -> BufReader<File> {
|
||||
let file = File::open(self.base_dir.join(file.relative_filename.clone())).unwrap();
|
||||
let reader = BufReader::with_capacity(4096, file);
|
||||
return reader;
|
||||
}
|
||||
}
|
||||
|
||||
// Todo implementation for archives
|
||||
// Split into a separate impl for each type of archive
|
||||
pub struct ArchiveVersionBackend {}
|
||||
impl VersionBackend for ArchiveVersionBackend {
|
||||
fn list_files(&self, path: &Path) -> Vec<VersionFile> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn reader(&self, file: &VersionFile) -> BufReader<File> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_backend_for_path(path: &Path) -> Option<Box<(dyn VersionBackend)>> {
|
||||
let is_directory = path.is_dir();
|
||||
if is_directory {
|
||||
return Some(Box::new(PathVersionBackend {
|
||||
base_dir: path.to_path_buf(),
|
||||
}));
|
||||
};
|
||||
|
||||
/*
|
||||
Insert checks for whatever backend you like
|
||||
*/
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn has_backend_for_path(path: String) -> bool {
|
||||
let path = Path::new(&path);
|
||||
|
||||
let has_backend = create_backend_for_path(path).is_some();
|
||||
|
||||
has_backend
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn list_files(path: String) -> Vec<String> {
|
||||
let path = Path::new(&path);
|
||||
let backend = create_backend_for_path(path).unwrap();
|
||||
let files = backend.list_files(path);
|
||||
files.into_iter().map(|e| e.relative_filename).collect()
|
||||
}
|
||||
@ -1,8 +1,9 @@
|
||||
#![deny(clippy::all)]
|
||||
#![feature(trait_alias)]
|
||||
|
||||
pub mod file_utils;
|
||||
pub mod manifest;
|
||||
pub mod ssl;
|
||||
pub mod version;
|
||||
|
||||
#[macro_use]
|
||||
extern crate napi_derive;
|
||||
|
||||
@ -1,22 +1,20 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::File,
|
||||
io::{BufRead, BufReader},
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
thread,
|
||||
};
|
||||
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
use napi::{
|
||||
threadsafe_function::{ErrorStrategy, ThreadsafeFunction, ThreadsafeFunctionCallMode},
|
||||
Error, JsFunction,
|
||||
threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
|
||||
Result,
|
||||
};
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::file_utils::create_backend_for_path;
|
||||
use crate::version::utils::create_backend_for_path;
|
||||
|
||||
|
||||
const CHUNK_SIZE: usize = 1024 * 1024 * 64;
|
||||
|
||||
@ -29,14 +27,10 @@ struct ChunkData {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn call_alt_thread_func(callback: JsFunction) -> Result<(), Error> {
|
||||
let tsfn: ThreadsafeFunction<u32, ErrorStrategy::CalleeHandled> = callback
|
||||
.create_threadsafe_function(0, |ctx| {
|
||||
ctx.env.create_uint32(ctx.value + 1).map(|v| vec![v])
|
||||
})?;
|
||||
let tsfn = tsfn.clone();
|
||||
pub fn call_alt_thread_func(tsfn: Arc<ThreadsafeFunction<()>>) -> Result<(), String> {
|
||||
let tsfn_cloned = tsfn.clone();
|
||||
thread::spawn(move || {
|
||||
tsfn.call(Ok(0), ThreadsafeFunctionCallMode::NonBlocking);
|
||||
tsfn_cloned.call(Ok(()), ThreadsafeFunctionCallMode::Blocking);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
@ -44,28 +38,14 @@ pub fn call_alt_thread_func(callback: JsFunction) -> Result<(), Error> {
|
||||
#[napi]
|
||||
pub fn generate_manifest(
|
||||
dir: String,
|
||||
progress: JsFunction,
|
||||
log: JsFunction,
|
||||
callback: JsFunction,
|
||||
) -> Result<(), Error> {
|
||||
let progress_sfn: ThreadsafeFunction<i32, ErrorStrategy::CalleeHandled> = progress
|
||||
.create_threadsafe_function(0, |ctx| ctx.env.create_int32(ctx.value).map(|v| vec![v]))
|
||||
.unwrap();
|
||||
let log_sfn: ThreadsafeFunction<String, ErrorStrategy::CalleeHandled> = log
|
||||
.create_threadsafe_function(0, |ctx| {
|
||||
ctx.env.create_string_from_std(ctx.value).map(|v| vec![v])
|
||||
})
|
||||
.unwrap();
|
||||
let callback_sfn: ThreadsafeFunction<String, ErrorStrategy::CalleeHandled> = callback
|
||||
.create_threadsafe_function(0, |ctx| {
|
||||
ctx.env.create_string_from_std(ctx.value).map(|v| vec![v])
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
progress_sfn: ThreadsafeFunction<i32>,
|
||||
log_sfn: ThreadsafeFunction<String>,
|
||||
callback_sfn: ThreadsafeFunction<String>,
|
||||
) -> Result<(), String> {
|
||||
thread::spawn(move || {
|
||||
let base_dir = Path::new(&dir);
|
||||
let backend = create_backend_for_path(base_dir).unwrap();
|
||||
let files = backend.list_files(base_dir);
|
||||
let mut backend = create_backend_for_path(base_dir).unwrap();
|
||||
let files = backend.list_files();
|
||||
|
||||
// Filepath to chunk data
|
||||
let mut chunks: HashMap<String, ChunkData> = HashMap::new();
|
||||
@ -74,7 +54,8 @@ pub fn generate_manifest(
|
||||
let mut i: i32 = 0;
|
||||
|
||||
for version_file in files {
|
||||
let mut reader = backend.reader(&version_file);
|
||||
let raw_reader= backend.reader(&version_file).unwrap();
|
||||
let mut reader = BufReader::with_capacity(CHUNK_SIZE, raw_reader);
|
||||
|
||||
let mut chunk_data = ChunkData {
|
||||
permissions: version_file.permission,
|
||||
@ -103,8 +84,7 @@ pub fn generate_manifest(
|
||||
|
||||
let log_str = format!(
|
||||
"Processed chunk {} for {}",
|
||||
chunk_index,
|
||||
&version_file.relative_filename
|
||||
chunk_index, &version_file.relative_filename
|
||||
);
|
||||
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
|
||||
|
||||
|
||||
151
src/version/backends.rs
Normal file
151
src/version/backends.rs
Normal file
@ -0,0 +1,151 @@
|
||||
use core::arch;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{self, Read, Seek},
|
||||
path::PathBuf,
|
||||
pin::Pin,
|
||||
rc::Rc,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use rawzip::{
|
||||
FileReader, ReaderAt, ZipArchive, ZipArchiveEntryWayfinder, ZipEntry, RECOMMENDED_BUFFER_SIZE,
|
||||
};
|
||||
|
||||
use crate::version::{
|
||||
types::{MinimumFileObject, Skippable, VersionBackend, VersionFile},
|
||||
utils::_list_files,
|
||||
};
|
||||
|
||||
pub struct PathVersionBackend {
|
||||
pub base_dir: PathBuf,
|
||||
}
|
||||
impl VersionBackend for PathVersionBackend {
|
||||
fn list_files(&mut self) -> Vec<VersionFile> {
|
||||
let mut vec = Vec::new();
|
||||
_list_files(&mut vec, &self.base_dir);
|
||||
|
||||
let mut results = Vec::new();
|
||||
|
||||
for pathbuf in vec.iter() {
|
||||
let file = File::open(pathbuf.clone()).unwrap();
|
||||
let relative = pathbuf.strip_prefix(self.base_dir.clone()).unwrap();
|
||||
let metadata = file.try_clone().unwrap().metadata().unwrap();
|
||||
let permission_object = metadata.permissions();
|
||||
let permissions = {
|
||||
let perm: u32;
|
||||
#[cfg(target_family = "unix")]
|
||||
{
|
||||
perm = permission_object.mode();
|
||||
}
|
||||
#[cfg(not(target_family = "unix"))]
|
||||
{
|
||||
perm = 0
|
||||
}
|
||||
perm
|
||||
};
|
||||
|
||||
results.push(VersionFile {
|
||||
relative_filename: relative.to_string_lossy().to_string(),
|
||||
permission: permissions,
|
||||
size: metadata.len(),
|
||||
});
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
fn reader(&mut self, file: &VersionFile) -> Option<Box<(dyn MinimumFileObject + 'static)>> {
|
||||
let file = File::open(self.base_dir.join(file.relative_filename.clone())).ok()?;
|
||||
|
||||
return Some(Box::new(file));
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ZipVersionBackend {
|
||||
archive: Arc<ZipArchive<FileReader>>,
|
||||
}
|
||||
impl ZipVersionBackend {
|
||||
pub fn new(archive: File) -> Self {
|
||||
let archive = ZipArchive::from_file(archive, &mut [0u8; RECOMMENDED_BUFFER_SIZE]).unwrap();
|
||||
Self {
|
||||
archive: Arc::new(archive),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_entry(&self, entry: ZipEntry<'_, FileReader>) -> ZipFileWrapper {
|
||||
ZipFileWrapper {
|
||||
archive: self.archive.clone(),
|
||||
wayfinder: entry.entry,
|
||||
offset: entry.body_offset,
|
||||
end_offset: entry.body_end_offset,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ZipFileWrapper {
|
||||
pub archive: Arc<ZipArchive<FileReader>>,
|
||||
wayfinder: ZipArchiveEntryWayfinder,
|
||||
offset: u64,
|
||||
end_offset: u64,
|
||||
}
|
||||
|
||||
impl Read for ZipFileWrapper {
|
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
let read_size = buf.len().min((self.end_offset - self.offset) as usize);
|
||||
let read = self
|
||||
.archive
|
||||
.reader
|
||||
.read_at(&mut buf[..read_size], self.offset)?;
|
||||
self.offset += read as u64;
|
||||
Ok(read)
|
||||
}
|
||||
}
|
||||
impl Skippable for ZipFileWrapper {
|
||||
fn skip(&mut self, amount: u64) {
|
||||
self.offset += amount;
|
||||
}
|
||||
}
|
||||
impl MinimumFileObject for ZipFileWrapper {}
|
||||
|
||||
impl VersionBackend for ZipVersionBackend {
|
||||
fn list_files(&mut self) -> Vec<VersionFile> {
|
||||
let mut results = Vec::new();
|
||||
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
|
||||
let mut budget_iterator = self.archive.entries(read_buffer);
|
||||
while let Some(entry) = budget_iterator.next_entry().unwrap() {
|
||||
if entry.is_dir() {
|
||||
continue;
|
||||
}
|
||||
results.push(VersionFile {
|
||||
relative_filename: entry.file_safe_path().unwrap().to_string(),
|
||||
permission: 744, // apparently ZIPs with permissions are not supported by this library, so we let the owner do anything
|
||||
size: entry.uncompressed_size_hint(),
|
||||
});
|
||||
}
|
||||
results
|
||||
}
|
||||
|
||||
fn reader(&mut self, file: &VersionFile) -> Option<Box<(dyn MinimumFileObject)>> {
|
||||
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
|
||||
let mut entries = self.archive.entries(read_buffer);
|
||||
let entry = loop {
|
||||
if let Some(v) = entries.next_entry().unwrap() {
|
||||
if v.file_safe_path().unwrap().to_string() == file.relative_filename {
|
||||
break Some(v);
|
||||
}
|
||||
} else {
|
||||
break None;
|
||||
}
|
||||
}?;
|
||||
|
||||
let wayfinder = entry.wayfinder();
|
||||
let local_entry = self.archive.get_entry(wayfinder).unwrap();
|
||||
|
||||
let wrapper = self.new_entry(local_entry);
|
||||
|
||||
Some(Box::new(wrapper))
|
||||
}
|
||||
}
|
||||
3
src/version/mod.rs
Normal file
3
src/version/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
pub mod utils;
|
||||
pub mod types;
|
||||
pub mod backends;
|
||||
52
src/version/types.rs
Normal file
52
src/version/types.rs
Normal file
@ -0,0 +1,52 @@
|
||||
use std::{
|
||||
fmt::Debug, io::{Read, Seek, SeekFrom}
|
||||
};
|
||||
|
||||
use tokio::io::{self, AsyncRead};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VersionFile {
|
||||
pub relative_filename: String,
|
||||
pub permission: u32,
|
||||
pub size: u64,
|
||||
}
|
||||
|
||||
pub trait Skippable {
|
||||
fn skip(&mut self, amount: u64);
|
||||
}
|
||||
impl<T> Skippable for T
|
||||
where
|
||||
T: Seek,
|
||||
{
|
||||
fn skip(&mut self, amount: u64) {
|
||||
self.seek(SeekFrom::Start(amount)).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
pub trait MinimumFileObject: Read + Send + Skippable {}
|
||||
impl<T: Read + Send + Seek> MinimumFileObject for T {}
|
||||
|
||||
// Intentionally not a generic, because of types in read_file
|
||||
pub struct ReadToAsyncRead {
|
||||
pub inner: Box<(dyn Read + Send)>,
|
||||
pub backend: Box<(dyn VersionBackend + Send)>,
|
||||
}
|
||||
|
||||
impl AsyncRead for ReadToAsyncRead {
|
||||
fn poll_read(
|
||||
mut self: std::pin::Pin<&mut Self>,
|
||||
_cx: &mut std::task::Context<'_>,
|
||||
buf: &mut tokio::io::ReadBuf<'_>,
|
||||
) -> std::task::Poll<io::Result<()>> {
|
||||
let mut read_buf = [0u8; 8192];
|
||||
let var_name = self.inner.read(&mut read_buf).unwrap();
|
||||
let amount = var_name;
|
||||
buf.put_slice(&read_buf[0..amount]);
|
||||
std::task::Poll::Ready(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait VersionBackend {
|
||||
fn list_files(&mut self) -> Vec<VersionFile>;
|
||||
fn reader(&mut self, file: &VersionFile) -> Option<Box<(dyn MinimumFileObject)>>;
|
||||
}
|
||||
131
src/version/utils.rs
Normal file
131
src/version/utils.rs
Normal file
@ -0,0 +1,131 @@
|
||||
use std::{
|
||||
fs::{self, metadata, File},
|
||||
io::Read,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use napi::{bindgen_prelude::*, tokio_stream::StreamExt};
|
||||
use tokio_util::codec::{BytesCodec, FramedRead};
|
||||
|
||||
use crate::version::{
|
||||
backends::{PathVersionBackend, ZipVersionBackend},
|
||||
types::{ReadToAsyncRead, VersionBackend, VersionFile},
|
||||
};
|
||||
|
||||
pub fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) {
|
||||
if metadata(path).unwrap().is_dir() {
|
||||
let paths = fs::read_dir(path).unwrap();
|
||||
for path_result in paths {
|
||||
let full_path = path_result.unwrap().path();
|
||||
if metadata(&full_path).unwrap().is_dir() {
|
||||
_list_files(vec, &full_path);
|
||||
} else {
|
||||
vec.push(full_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_backend_for_path<'a>(path: &Path) -> Option<Box<(dyn VersionBackend + Send + 'a)>> {
|
||||
let is_directory = path.is_dir();
|
||||
if is_directory {
|
||||
return Some(Box::new(PathVersionBackend {
|
||||
base_dir: path.to_path_buf(),
|
||||
}));
|
||||
};
|
||||
|
||||
if path.to_string_lossy().ends_with(".zip") {
|
||||
let f = File::open(path.to_path_buf()).unwrap();
|
||||
return Some(Box::new(ZipVersionBackend::new(f)));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn has_backend_for_path(path: String) -> bool {
|
||||
let path = Path::new(&path);
|
||||
|
||||
let has_backend = create_backend_for_path(path).is_some();
|
||||
|
||||
has_backend
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn list_files(path: String) -> Result<Vec<String>> {
|
||||
let path = Path::new(&path);
|
||||
let mut backend =
|
||||
create_backend_for_path(path).ok_or(napi::Error::from_reason("No backend for path"))?;
|
||||
let files = backend.list_files();
|
||||
Ok(files.into_iter().map(|e| e.relative_filename).collect())
|
||||
}
|
||||
|
||||
/**
|
||||
* This is inefficient, but is used in attempt to keep the interface simple
|
||||
*/
|
||||
#[napi]
|
||||
pub fn peek_file(path: String, sub_path: String) -> Result<u32> {
|
||||
let path = Path::new(&path);
|
||||
let mut backend =
|
||||
create_backend_for_path(path).ok_or(napi::Error::from_reason("No backend for path"))?;
|
||||
let files = backend.list_files();
|
||||
|
||||
let file = files
|
||||
.iter()
|
||||
.find(|e| e.relative_filename == sub_path)
|
||||
.ok_or(napi::Error::from_reason("Can't find file to peek"))?;
|
||||
return Ok(file.size.try_into().unwrap());
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn read_file(
|
||||
path: String,
|
||||
sub_path: String,
|
||||
env: &Env,
|
||||
start: Option<u32>,
|
||||
end: Option<u32>,
|
||||
) -> Option<ReadableStream<'_, BufferSlice<'_>>> {
|
||||
let path = Path::new(&path);
|
||||
let mut backend = create_backend_for_path(path).unwrap();
|
||||
let version_file = VersionFile {
|
||||
relative_filename: sub_path,
|
||||
permission: 0, // Shouldn't matter
|
||||
size: 0, // Shouldn't matter
|
||||
};
|
||||
// Use `?` operator for cleaner error propagation from `Option`
|
||||
let mut reader = backend.reader(&version_file)?;
|
||||
|
||||
// Skip the 'start' amount of bytes without seek
|
||||
if let Some(skip) = start {
|
||||
reader.skip(skip.into());
|
||||
// io::copy(&mut reader.by_ref().take(skip.into()), &mut io::sink()).unwrap();
|
||||
}
|
||||
|
||||
let async_reader = if let Some(limit) = end {
|
||||
let amount = limit - start.or(Some(0)).unwrap();
|
||||
ReadToAsyncRead {
|
||||
inner: Box::new(reader.take(amount.into())),
|
||||
backend,
|
||||
}
|
||||
} else {
|
||||
ReadToAsyncRead {
|
||||
inner: reader,
|
||||
backend,
|
||||
}
|
||||
};
|
||||
|
||||
// Create a FramedRead stream with BytesCodec for chunking
|
||||
let stream = FramedRead::new(async_reader, BytesCodec::new())
|
||||
// Use StreamExt::map to transform each Result item
|
||||
.map(|result_item| {
|
||||
result_item
|
||||
// Apply Result::map to transform Ok(BytesMut) to Ok(Vec<u8>)
|
||||
.map(|bytes| bytes.to_vec())
|
||||
// Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error)
|
||||
.map_err(|e| napi::Error::from(e)) // napi::Error implements From<tokio::io::Error>
|
||||
});
|
||||
// Create the napi-rs ReadableStream from the tokio_stream::Stream
|
||||
// The unwrap() here means if stream creation fails, it will panic.
|
||||
// For a production system, consider returning Result<Option<...>> and handling this.
|
||||
Some(ReadableStream::create_with_stream_bytes(env, stream).unwrap())
|
||||
}
|
||||
Reference in New Issue
Block a user