50 Commits

Author SHA1 Message Date
2969d64c45 feat: move to bigints for larger file sizes 2025-07-14 15:17:38 +10:00
e525ff44bb Merge pull request #3 from nickbabcock/rawzip-0.3
Bump rawzip to 0.3
2025-07-13 23:08:10 +10:00
52a685391a Bump rawzip to 0.3
No need for any patches ;)
2025-07-13 07:46:36 -05:00
535d5a4062 i give up, bump all versions 2025-07-02 20:54:06 +10:00
450734f5c9 bump version 2025-07-02 20:45:58 +10:00
20e2eda381 fix: regenerate lockfile 2025-07-02 20:45:02 +10:00
04d3f2dd8c fix: revert napi update 2025-07-02 20:33:53 +10:00
59ca57ee1b fix: bump napi version and commit lockfile 2025-07-02 20:20:19 +10:00
8f4b2a6c6d feat: add file peaking, 1.5.0 2025-07-02 18:03:35 +10:00
7c3e6fe63c fix: add target setup 2025-07-02 13:47:08 +10:00
204902951e fix: nightly toolchain for docker builds 2025-07-02 13:39:17 +10:00
b3011c517d fix: skip tests, move to nightly 2025-07-02 12:29:58 +10:00
74a54eb9ac fix: bump @napi-rs/cli to alpha version 2025-07-02 12:22:31 +10:00
89e94e3afd fix: install patch crate before patching 2025-07-02 12:07:00 +10:00
169d471bb7 fix: patch crate on build 2025-07-02 12:05:57 +10:00
076dc60155 version bump to 1.4.0 2025-07-02 11:56:22 +10:00
48e5b97a4e feat: zip file reading 2025-07-02 11:55:04 +10:00
c1aaf8adcd feat: work on version backend system 2025-07-01 22:51:22 +10:00
fe43f79062 chore: bump version and add test 2025-05-30 20:55:53 +10:00
30b9c4a1cc bump version 2025-05-29 09:30:10 +10:00
42f770aed9 feat: Add file start and end to read_file function
Signed-off-by: quexeky <git@quexeky.dev>
2025-05-28 22:32:37 +10:00
4670df4127 fix: add windows target 2025-05-28 21:01:16 +10:00
e33eaebe1a fix: refix macos universalisation 2025-05-28 20:52:30 +10:00
f954f23410 fix: longshot fix: add x86_64-unknown-linux-gnu to the list of ABI targets 2025-05-28 20:47:59 +10:00
3632687001 fix: again, macos universalisation 2025-05-28 20:33:56 +10:00
90817487ed fix: universalisation for macos 2025-05-28 20:27:00 +10:00
98b84c64d4 fix: remove problematic builds 2025-05-28 19:58:26 +10:00
d3186cdd5f fix: types 2025-05-28 17:07:12 +10:00
bb678b4b3a fix: tests 2025-05-28 16:48:07 +10:00
cc94798962 feat: add file reader 2025-05-28 15:03:45 +10:00
7811818a72 Merge branch 'borked-reader' 2025-05-28 14:55:05 +10:00
b6910e717b fix: Changed FramedRead to work with ReadableStream
Signed-off-by: quexeky <git@quexeky.dev>
2025-05-28 14:52:42 +10:00
45a26c7156 inprogress: handoff to quexeky 2025-05-28 13:53:28 +10:00
16b78bca17 fix: chunk size 2025-05-27 10:34:44 +10:00
4ac19b8be0 fix: update index.js & index.d.ts 2025-05-26 17:20:03 +10:00
072a1584a0 feat: add list files command 2025-05-26 15:02:41 +10:00
6b5356627a fix: remove unnecessary size parameter causing windows build issues 2025-05-26 14:03:26 +10:00
7ede73e87c feat: move to backend-based manifest 2025-05-26 13:56:24 +10:00
921eb02132 fix: finish removing 32-bit windows 2025-03-31 09:25:30 +11:00
a95d0cea26 fix: bump version and remove freebsd 2025-03-31 09:15:56 +11:00
277651df09 fix: temporarily remove docker-based builds from publish stage 2025-03-31 09:07:44 +11:00
7896f11207 fix: temporarily remove docker-basd tests 2025-03-31 09:06:17 +11:00
0c3196ba7d fix: remove android builds 2025-03-30 18:18:41 +11:00
a0791d71a9 fix: package json 2025-03-30 13:14:51 +11:00
a8b8d4bb3d fix: remove android builds from package.json 2025-03-30 13:12:45 +11:00
59dd17a156 fix: add repo urls 2025-03-30 12:56:15 +11:00
2c28c605ba fix: add repo url 2025-03-30 12:37:41 +11:00
987defaafb fix: update container path 2025-03-29 22:56:21 +11:00
7815338e9e fix: update to use AWS docker mirror 2025-03-29 22:51:44 +11:00
7bb0689cbd fix: update namespace from @drop to @drop-oss 2025-03-29 22:44:30 +11:00
50 changed files with 3649 additions and 785 deletions

View File

@ -12,12 +12,9 @@ permissions:
- main - main
tags-ignore: tags-ignore:
- "**" - "**"
paths-ignore: paths:
- "**/*.md" - package.json
- LICENSE - .github/workflows/*
- "**/*.gitignore"
- .editorconfig
- docs/**
pull_request: null pull_request: null
jobs: jobs:
build: build:
@ -46,21 +43,6 @@ jobs:
target: aarch64-unknown-linux-gnu target: aarch64-unknown-linux-gnu
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64 docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
build: yarn build --target aarch64-unknown-linux-gnu build: yarn build --target aarch64-unknown-linux-gnu
- host: ubuntu-latest
target: armv7-unknown-linux-gnueabihf
setup: |
sudo apt-get update
sudo apt-get install gcc-arm-linux-gnueabihf -y
build: yarn build --target armv7-unknown-linux-gnueabihf
- host: ubuntu-latest
target: armv7-unknown-linux-musleabihf
build: yarn build --target armv7-unknown-linux-musleabihf
- host: ubuntu-latest
target: aarch64-linux-android
build: yarn build --target aarch64-linux-android
- host: ubuntu-latest
target: armv7-linux-androideabi
build: yarn build --target armv7-linux-androideabi
- host: ubuntu-latest - host: ubuntu-latest
target: aarch64-unknown-linux-musl target: aarch64-unknown-linux-musl
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine
@ -77,7 +59,7 @@ jobs:
sudo apt-get update sudo apt-get update
sudo apt-get install gcc-riscv64-linux-gnu -y sudo apt-get install gcc-riscv64-linux-gnu -y
build: yarn build --target riscv64gc-unknown-linux-gnu build: yarn build --target riscv64gc-unknown-linux-gnu
name: stable - ${{ matrix.settings.target }} - node@20 name: nightly - ${{ matrix.settings.target }} - node@20
runs-on: ${{ matrix.settings.host }} runs-on: ${{ matrix.settings.host }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -88,7 +70,7 @@ jobs:
node-version: 20 node-version: 20
cache: yarn cache: yarn
- name: Install - name: Install
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@nightly
if: ${{ !matrix.settings.docker }} if: ${{ !matrix.settings.docker }}
with: with:
toolchain: nightly toolchain: nightly
@ -112,14 +94,20 @@ jobs:
if: ${{ matrix.settings.setup }} if: ${{ matrix.settings.setup }}
shell: bash shell: bash
- name: Install dependencies - name: Install dependencies
run: yarn install run: |-
cargo install patch-crate &&
cargo patch-crate &&
yarn install
- name: Build in docker - name: Build in docker
uses: addnab/docker-run-action@v3 uses: addnab/docker-run-action@v3
if: ${{ matrix.settings.docker }} if: ${{ matrix.settings.docker }}
with: with:
image: ${{ matrix.settings.docker }} image: ${{ matrix.settings.docker }}
options: "--user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build" options: "--user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build"
run: ${{ matrix.settings.build }} run: |-
rustup default nightly &&
rustup target add ${{ matrix.settings.target }} &&
${{ matrix.settings.build }}
- name: Build - name: Build
run: ${{ matrix.settings.build }} run: ${{ matrix.settings.build }}
if: ${{ !matrix.settings.docker }} if: ${{ !matrix.settings.docker }}
@ -166,180 +154,180 @@ jobs:
shell: bash shell: bash
- name: Test bindings - name: Test bindings
run: yarn test run: yarn test
test-linux-x64-gnu-binding: # test-linux-x64-gnu-binding:
name: Test bindings on Linux-x64-gnu - node@${{ matrix.node }} # name: Test bindings on Linux-x64-gnu - node@${{ matrix.node }}
needs: # needs:
- build # - build
strategy: # strategy:
fail-fast: false # fail-fast: false
matrix: # matrix:
node: # node:
- "18" # - "18"
- "20" # - "20"
runs-on: ubuntu-latest # runs-on: ubuntu-latest
steps: # steps:
- uses: actions/checkout@v4 # - uses: actions/checkout@v4
- name: Setup node # - name: Setup node
uses: actions/setup-node@v4 # uses: actions/setup-node@v4
with: # with:
node-version: ${{ matrix.node }} # node-version: ${{ matrix.node }}
cache: yarn # cache: yarn
- name: Install dependencies # - name: Install dependencies
run: yarn install # run: yarn install
- name: Download artifacts # - name: Download artifacts
uses: actions/download-artifact@v4 # uses: actions/download-artifact@v4
with: # with:
name: bindings-x86_64-unknown-linux-gnu # name: bindings-x86_64-unknown-linux-gnu
path: . # path: .
- name: List packages # - name: List packages
run: ls -R . # run: ls -R .
shell: bash # shell: bash
- name: Test bindings # - name: Test bindings
run: docker run --rm -v $(pwd):/build -w /build node:${{ matrix.node }}-slim yarn test # run: docker run --rm -v $(pwd):/build -w /build public.ecr.aws/docker/library/node:${{ matrix.node }}-slim yarn test
test-linux-x64-musl-binding: # test-linux-x64-musl-binding:
name: Test bindings on x86_64-unknown-linux-musl - node@${{ matrix.node }} # name: Test bindings on x86_64-unknown-linux-musl - node@${{ matrix.node }}
needs: # needs:
- build # - build
strategy: # strategy:
fail-fast: false # fail-fast: false
matrix: # matrix:
node: # node:
- "18" # - "18"
- "20" # - "20"
runs-on: ubuntu-latest # runs-on: ubuntu-latest
steps: # steps:
- uses: actions/checkout@v4 # - uses: actions/checkout@v4
- name: Setup node # - name: Setup node
uses: actions/setup-node@v4 # uses: actions/setup-node@v4
with: # with:
node-version: ${{ matrix.node }} # node-version: ${{ matrix.node }}
cache: yarn # cache: yarn
- name: Install dependencies # - name: Install dependencies
run: | # run: |
yarn config set supportedArchitectures.libc "musl" # yarn config set supportedArchitectures.libc "musl"
yarn install # yarn install
- name: Download artifacts # - name: Download artifacts
uses: actions/download-artifact@v4 # uses: actions/download-artifact@v4
with: # with:
name: bindings-x86_64-unknown-linux-musl # name: bindings-x86_64-unknown-linux-musl
path: . # path: .
- name: List packages # - name: List packages
run: ls -R . # run: ls -R .
shell: bash # shell: bash
- name: Test bindings # - name: Test bindings
run: docker run --rm -v $(pwd):/build -w /build node:${{ matrix.node }}-alpine yarn test # run: docker run --rm -v $(pwd):/build -w /build public.ecr.aws/docker/library/node:${{ matrix.node }}-alpine yarn test
test-linux-aarch64-gnu-binding: # test-linux-aarch64-gnu-binding:
name: Test bindings on aarch64-unknown-linux-gnu - node@${{ matrix.node }} # name: Test bindings on aarch64-unknown-linux-gnu - node@${{ matrix.node }}
needs: # needs:
- build # - build
strategy: # strategy:
fail-fast: false # fail-fast: false
matrix: # matrix:
node: # node:
- "18" # - "18"
- "20" # - "20"
runs-on: ubuntu-latest # runs-on: ubuntu-latest
steps: # steps:
- uses: actions/checkout@v4 # - uses: actions/checkout@v4
- name: Download artifacts # - name: Download artifacts
uses: actions/download-artifact@v4 # uses: actions/download-artifact@v4
with: # with:
name: bindings-aarch64-unknown-linux-gnu # name: bindings-aarch64-unknown-linux-gnu
path: . # path: .
- name: List packages # - name: List packages
run: ls -R . # run: ls -R .
shell: bash # shell: bash
- name: Install dependencies # - name: Install dependencies
run: | # run: |
yarn config set supportedArchitectures.cpu "arm64" # yarn config set supportedArchitectures.cpu "arm64"
yarn config set supportedArchitectures.libc "glibc" # yarn config set supportedArchitectures.libc "glibc"
yarn install # yarn install
- name: Set up QEMU # - name: Set up QEMU
uses: docker/setup-qemu-action@v3 # uses: docker/setup-qemu-action@v3
with: # with:
platforms: arm64 # platforms: arm64
- run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes # - run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- name: Setup and run tests # - name: Setup and run tests
uses: addnab/docker-run-action@v3 # uses: addnab/docker-run-action@v3
with: # with:
image: node:${{ matrix.node }}-slim # image: public.ecr.aws/docker/library/node:${{ matrix.node }}-slim
options: "--platform linux/arm64 -v ${{ github.workspace }}:/build -w /build" # options: "--platform linux/arm64 -v ${{ github.workspace }}:/build -w /build"
run: | # run: |
set -e # set -e
yarn test # yarn test
ls -la # ls -la
test-linux-aarch64-musl-binding: # test-linux-aarch64-musl-binding:
name: Test bindings on aarch64-unknown-linux-musl - node@${{ matrix.node }} # name: Test bindings on aarch64-unknown-linux-musl - node@${{ matrix.node }}
needs: # needs:
- build # - build
runs-on: ubuntu-latest # runs-on: ubuntu-latest
steps: # steps:
- uses: actions/checkout@v4 # - uses: actions/checkout@v4
- name: Download artifacts # - name: Download artifacts
uses: actions/download-artifact@v4 # uses: actions/download-artifact@v4
with: # with:
name: bindings-aarch64-unknown-linux-musl # name: bindings-aarch64-unknown-linux-musl
path: . # path: .
- name: List packages # - name: List packages
run: ls -R . # run: ls -R .
shell: bash # shell: bash
- name: Install dependencies # - name: Install dependencies
run: | # run: |
yarn config set supportedArchitectures.cpu "arm64" # yarn config set supportedArchitectures.cpu "arm64"
yarn config set supportedArchitectures.libc "musl" # yarn config set supportedArchitectures.libc "musl"
yarn install # yarn install
- name: Set up QEMU # - name: Set up QEMU
uses: docker/setup-qemu-action@v3 # uses: docker/setup-qemu-action@v3
with: # with:
platforms: arm64 # platforms: arm64
- run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes # - run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- name: Setup and run tests # - name: Setup and run tests
uses: addnab/docker-run-action@v3 # uses: addnab/docker-run-action@v3
with: # with:
image: node:lts-alpine # image: node:lts-alpine
options: "--platform linux/arm64 -v ${{ github.workspace }}:/build -w /build" # options: "--platform linux/arm64 -v ${{ github.workspace }}:/build -w /build"
run: | # run: |
set -e # set -e
yarn test # yarn test
test-linux-arm-gnueabihf-binding: # test-linux-arm-gnueabihf-binding:
name: Test bindings on armv7-unknown-linux-gnueabihf - node@${{ matrix.node }} # name: Test bindings on armv7-unknown-linux-gnueabihf - node@${{ matrix.node }}
needs: # needs:
- build # - build
strategy: # strategy:
fail-fast: false # fail-fast: false
matrix: # matrix:
node: # node:
- "18" # - "18"
- "20" # - "20"
runs-on: ubuntu-latest # runs-on: ubuntu-latest
steps: # steps:
- uses: actions/checkout@v4 # - uses: actions/checkout@v4
- name: Download artifacts # - name: Download artifacts
uses: actions/download-artifact@v4 # uses: actions/download-artifact@v4
with: # with:
name: bindings-armv7-unknown-linux-gnueabihf # name: bindings-armv7-unknown-linux-gnueabihf
path: . # path: .
- name: List packages # - name: List packages
run: ls -R . # run: ls -R .
shell: bash # shell: bash
- name: Install dependencies # - name: Install dependencies
run: | # run: |
yarn config set supportedArchitectures.cpu "arm" # yarn config set supportedArchitectures.cpu "arm"
yarn install # yarn install
- name: Set up QEMU # - name: Set up QEMU
uses: docker/setup-qemu-action@v3 # uses: docker/setup-qemu-action@v3
with: # with:
platforms: arm # platforms: arm
- run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes # - run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- name: Setup and run tests # - name: Setup and run tests
uses: addnab/docker-run-action@v3 # uses: addnab/docker-run-action@v3
with: # with:
image: node:${{ matrix.node }}-bullseye-slim # image: public.ecr.aws/docker/library/node:${{ matrix.node }}-bullseye-slim
options: "--platform linux/arm/v7 -v ${{ github.workspace }}:/build -w /build" # options: "--platform linux/arm/v7 -v ${{ github.workspace }}:/build -w /build"
run: | # run: |
set -e # set -e
yarn test # yarn test
ls -la # ls -la
universal-macOS: universal-macOS:
name: Build universal macOS binary name: Build universal macOS binary
needs: needs:
@ -364,6 +352,8 @@ jobs:
with: with:
name: bindings-aarch64-apple-darwin name: bindings-aarch64-apple-darwin
path: artifacts path: artifacts
- name: Move artifacts
run: mv artifacts/* .
- name: Combine binaries - name: Combine binaries
run: yarn universal run: yarn universal
- name: Upload artifact - name: Upload artifact
@ -377,11 +367,11 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: needs:
- test-macOS-windows-binding - test-macOS-windows-binding
- test-linux-x64-gnu-binding # - test-linux-x64-gnu-binding
- test-linux-x64-musl-binding # - test-linux-x64-musl-binding
- test-linux-aarch64-gnu-binding # - test-linux-aarch64-gnu-binding
- test-linux-aarch64-musl-binding # - test-linux-aarch64-musl-binding
- test-linux-arm-gnueabihf-binding # - test-linux-arm-gnueabihf-binding
- universal-macOS - universal-macOS
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4

6
.gitignore vendored
View File

@ -9,7 +9,7 @@ npm-debug.log*
yarn-debug.log* yarn-debug.log*
yarn-error.log* yarn-error.log*
lerna-debug.log* lerna-debug.log*
.test .test*
.tsimp .tsimp
# Diagnostic reports (https://nodejs.org/api/report.html) # Diagnostic reports (https://nodejs.org/api/report.html)
@ -186,7 +186,6 @@ $RECYCLE.BIN/
#Added by cargo #Added by cargo
/target /target
Cargo.lock
.pnp.* .pnp.*
.yarn/* .yarn/*
@ -202,3 +201,6 @@ manifest.json
# JetBrains # JetBrains
.idea .idea
assets/*
!assets/generate.sh

1237
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -9,11 +9,12 @@ crate-type = ["cdylib"]
[dependencies] [dependencies]
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix # Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
napi = { version = "2.12.2", default-features = false, features = [ napi = { version = "3.0.0-beta.11", default-features = false, features = [
"napi4", "napi6",
"async", "async",
"web_stream",
] } ] }
napi-derive = "2.12.2" napi-derive = "3.0.0-beta.11"
hex = "0.4.3" hex = "0.4.3"
serde_json = "1.0.128" serde_json = "1.0.128"
md5 = "0.7.0" md5 = "0.7.0"
@ -21,6 +22,12 @@ time-macros = "0.2.22"
time = "0.3.41" time = "0.3.41"
webpki = "0.22.4" webpki = "0.22.4"
ring = "0.17.14" ring = "0.17.14"
tokio = { version = "1.45.1", features = ["fs", "io-util"] }
tokio-util = { version = "0.7.15", features = ["codec"] }
rawzip = "0.3.0"
[package.metadata.patch]
crates = ["rawzip"]
[dependencies.x509-parser] [dependencies.x509-parser]
version = "0.17.0" version = "0.17.0"

View File

@ -2,7 +2,7 @@ import test from "ava";
import fs from "node:fs"; import fs from "node:fs";
import path from "path"; import path from "path";
import { generateManifest } from "../index.js"; import { generateManifest, listFiles } from "../index.js";
test("numerous small file", async (t) => { test("numerous small file", async (t) => {
// Setup test dir // Setup test dir
@ -53,3 +53,38 @@ test("numerous small file", async (t) => {
fs.rmSync(dirName, { recursive: true }); fs.rmSync(dirName, { recursive: true });
}); });
test.skip("performance test", async (t) => {
t.timeout(5 * 60 * 1000);
return t.pass();
const dirName = "./.test/pt";
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
fs.mkdirSync(dirName, { recursive: true });
const fileSize = 1 * 1000 * 1000 * 1000; // 1GB
const randomStream = fs.createReadStream("/dev/random", {
start: 0,
end: fileSize,
});
const outputStream = fs.createWriteStream(path.join(dirName, "file.bin"));
await new Promise((r) => {
randomStream.pipe(outputStream);
randomStream.on("end", r);
});
const start = Date.now();
await new Promise((r, e) =>
generateManifest(
dirName,
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
);
const end = Date.now();
t.pass(`Took ${end - start}ms to process ${fileSize / (1000 * 1000)}MB`);
fs.rmSync(dirName, { recursive: true });
});

100
__test__/utils.spec.mjs Normal file
View File

@ -0,0 +1,100 @@
import test from "ava";
import fs from "node:fs";
import path from "path";
import droplet, { generateManifest } from "../index.js";
test("check alt thread util", async (t) => {
let endtime1, endtime2;
droplet.callAltThreadFunc(async () => {
await new Promise((r) => setTimeout(r, 100));
endtime1 = Date.now();
});
await new Promise((r) => setTimeout(r, 500));
endtime2 = Date.now();
const difference = endtime2 - endtime1;
if (difference >= 600) {
t.fail("likely isn't multithreaded, difference: " + difference);
}
t.pass();
});
test("read file", async (t) => {
const dirName = "./.test2";
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
fs.mkdirSync(dirName, { recursive: true });
const testString = "g'day what's up my koala bros\n".repeat(1000);
fs.writeFileSync(dirName + "/TESTFILE", testString);
const stream = droplet.readFile(dirName, "TESTFILE");
let finalString = "";
for await (const chunk of stream) {
// Do something with each 'chunk'
finalString += String.fromCharCode.apply(null, chunk);
}
t.assert(finalString == testString, "file strings don't match");
fs.rmSync(dirName, { recursive: true });
});
test("read file offset", async (t) => {
const dirName = "./.test3";
if (fs.existsSync(dirName)) fs.rmSync(dirName, { recursive: true });
fs.mkdirSync(dirName, { recursive: true });
const testString = "0123456789";
fs.writeFileSync(dirName + "/TESTFILE", testString);
const stream = droplet.readFile(dirName, "TESTFILE", BigInt(1), BigInt(4));
let finalString = "";
for await (const chunk of stream) {
// Do something with each 'chunk'
finalString += String.fromCharCode.apply(null, chunk);
}
const expectedString = testString.slice(1, 4);
t.assert(
finalString == expectedString,
`file strings don't match: ${finalString} vs ${expectedString}`
);
fs.rmSync(dirName, { recursive: true });
});
test("zip file reader", async (t) => {
return t.pass();
const manifest = JSON.parse(
await new Promise((r, e) =>
generateManifest(
"./assets/TheGame.zip",
(_, __) => {},
(_, __) => {},
(err, manifest) => (err ? e(err) : r(manifest))
)
)
);
console.log(manifest);
return t.pass();
const stream = droplet.readFile("./assets/TheGame.zip", "TheGame/setup.exe");
let finalString;
for await (const chunk of stream) {
console.log(`read chunk ${chunk}`);
// Do something with each 'chunk'
finalString += String.fromCharCode.apply(null, chunk);
}
console.log(finalString);
});

3
assets/generate.sh Executable file
View File

@ -0,0 +1,3 @@
dd if=/dev/random of=./setup.exe bs=1024 count=1000000
zip TheGame.zip setup.exe
rm setup.exe

28
index.d.ts vendored
View File

@ -1,12 +1,26 @@
/* tslint:disable */
/* eslint-disable */
/* auto-generated by NAPI-RS */ /* auto-generated by NAPI-RS */
/* eslint-disable */
export declare function callAltThreadFunc(tsfn: ((err: Error | null, ) => any)): void
export declare function callAltThreadFunc(callback: (...args: any[]) => any): void
export declare function generateManifest(dir: string, progress: (...args: any[]) => any, log: (...args: any[]) => any, callback: (...args: any[]) => any): void
export declare function generateRootCa(): Array<string>
export declare function generateClientCertificate(clientId: string, clientName: string, rootCa: string, rootCaPrivate: string): Array<string> export declare function generateClientCertificate(clientId: string, clientName: string, rootCa: string, rootCaPrivate: string): Array<string>
export declare function verifyClientCertificate(clientCert: string, rootCa: string): boolean
export declare function generateManifest(dir: string, progressSfn: ((err: Error | null, arg: number) => any), logSfn: ((err: Error | null, arg: string) => any), callbackSfn: ((err: Error | null, arg: string) => any)): void
export declare function generateRootCa(): Array<string>
export declare function hasBackendForPath(path: string): boolean
export declare function listFiles(path: string): Array<string>
/**
* This is inefficient, but is used in attempt to keep the interface simple
*/
export declare function peekFile(path: string, subPath: string): bigint
export declare function readFile(path: string, subPath: string, start?: bigint | undefined | null, end?: bigint | undefined | null): ReadableStream<Buffer> | null
export declare function signNonce(privateKey: string, nonce: string): string export declare function signNonce(privateKey: string, nonce: string): string
export declare function verifyClientCertificate(clientCert: string, rootCa: string): boolean
export declare function verifyNonce(publicCert: string, nonce: string, signature: string): boolean export declare function verifyNonce(publicCert: string, nonce: string, signature: string): boolean

550
index.js
View File

@ -1,321 +1,389 @@
/* tslint:disable */ // prettier-ignore
/* eslint-disable */ /* eslint-disable */
/* prettier-ignore */ // @ts-nocheck
/* auto-generated by NAPI-RS */ /* auto-generated by NAPI-RS */
const { existsSync, readFileSync } = require('fs') const { createRequire } = require('node:module')
const { join } = require('path') require = createRequire(__filename)
const { platform, arch } = process
const { readFileSync } = require('node:fs')
let nativeBinding = null let nativeBinding = null
let localFileExisted = false const loadErrors = []
let loadError = null
function isMusl() { const isMusl = () => {
// For Node 10 let musl = false
if (!process.report || typeof process.report.getReport !== 'function') { if (process.platform === 'linux') {
musl = isMuslFromFilesystem()
if (musl === null) {
musl = isMuslFromReport()
}
if (musl === null) {
musl = isMuslFromChildProcess()
}
}
return musl
}
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
const isMuslFromFilesystem = () => {
try { try {
const lddPath = require('child_process').execSync('which ldd').toString().trim() return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
return readFileSync(lddPath, 'utf8').includes('musl') } catch {
} catch (e) { return null
}
}
const isMuslFromReport = () => {
let report = null
if (typeof process.report?.getReport === 'function') {
process.report.excludeNetwork = true
report = process.report.getReport()
}
if (!report) {
return null
}
if (report.header && report.header.glibcVersionRuntime) {
return false
}
if (Array.isArray(report.sharedObjects)) {
if (report.sharedObjects.some(isFileMusl)) {
return true return true
} }
} else { }
const { glibcVersionRuntime } = process.report.getReport().header return false
return !glibcVersionRuntime }
const isMuslFromChildProcess = () => {
try {
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
} catch (e) {
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
return false
} }
} }
switch (platform) { function requireNative() {
case 'android': if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
switch (arch) {
case 'arm64':
localFileExisted = existsSync(join(__dirname, 'droplet.android-arm64.node'))
try { try {
if (localFileExisted) { nativeBinding = require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
nativeBinding = require('./droplet.android-arm64.node') } catch (err) {
} else { loadErrors.push(err)
nativeBinding = require('@drop-oss/droplet-android-arm64')
} }
} else if (process.platform === 'android') {
if (process.arch === 'arm64') {
try {
return require('./droplet.android-arm64.node')
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
break
case 'arm':
localFileExisted = existsSync(join(__dirname, 'droplet.android-arm-eabi.node'))
try { try {
if (localFileExisted) { return require('@drop-oss/droplet-android-arm64')
nativeBinding = require('./droplet.android-arm-eabi.node')
} else {
nativeBinding = require('@drop-oss/droplet-android-arm-eabi')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
break
default: } else if (process.arch === 'arm') {
throw new Error(`Unsupported architecture on Android ${arch}`)
}
break
case 'win32':
switch (arch) {
case 'x64':
localFileExisted = existsSync(
join(__dirname, 'droplet.win32-x64-msvc.node')
)
try { try {
if (localFileExisted) { return require('./droplet.android-arm-eabi.node')
nativeBinding = require('./droplet.win32-x64-msvc.node')
} else {
nativeBinding = require('@drop-oss/droplet-win32-x64-msvc')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
break
case 'ia32':
localFileExisted = existsSync(
join(__dirname, 'droplet.win32-ia32-msvc.node')
)
try { try {
if (localFileExisted) { return require('@drop-oss/droplet-android-arm-eabi')
nativeBinding = require('./droplet.win32-ia32-msvc.node')
} else {
nativeBinding = require('@drop-oss/droplet-win32-ia32-msvc')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'droplet.win32-arm64-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./droplet.win32-arm64-msvc.node')
} else { } else {
nativeBinding = require('@drop-oss/droplet-win32-arm64-msvc') loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
} }
} else if (process.platform === 'win32') {
if (process.arch === 'x64') {
try {
return require('./droplet.win32-x64-msvc.node')
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
break
default:
throw new Error(`Unsupported architecture on Windows: ${arch}`)
}
break
case 'darwin':
localFileExisted = existsSync(join(__dirname, 'droplet.darwin-universal.node'))
try { try {
if (localFileExisted) { return require('@drop-oss/droplet-win32-x64-msvc')
nativeBinding = require('./droplet.darwin-universal.node')
} else {
nativeBinding = require('@drop-oss/droplet-darwin-universal')
}
break
} catch {}
switch (arch) {
case 'x64':
localFileExisted = existsSync(join(__dirname, 'droplet.darwin-x64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./droplet.darwin-x64.node')
} else {
nativeBinding = require('@drop-oss/droplet-darwin-x64')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
break
case 'arm64': } else if (process.arch === 'ia32') {
localFileExisted = existsSync(
join(__dirname, 'droplet.darwin-arm64.node')
)
try { try {
if (localFileExisted) { return require('./droplet.win32-ia32-msvc.node')
nativeBinding = require('./droplet.darwin-arm64.node')
} else {
nativeBinding = require('@drop-oss/droplet-darwin-arm64')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
break
default:
throw new Error(`Unsupported architecture on macOS: ${arch}`)
}
break
case 'freebsd':
if (arch !== 'x64') {
throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
}
localFileExisted = existsSync(join(__dirname, 'droplet.freebsd-x64.node'))
try { try {
if (localFileExisted) { return require('@drop-oss/droplet-win32-ia32-msvc')
nativeBinding = require('./droplet.freebsd-x64.node')
} else {
nativeBinding = require('@drop-oss/droplet-freebsd-x64')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
break
case 'linux': } else if (process.arch === 'arm64') {
switch (arch) { try {
case 'x64': return require('./droplet.win32-arm64-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-win32-arm64-msvc')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
}
} else if (process.platform === 'darwin') {
try {
return require('./droplet.darwin-universal.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-darwin-universal')
} catch (e) {
loadErrors.push(e)
}
if (process.arch === 'x64') {
try {
return require('./droplet.darwin-x64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-darwin-x64')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./droplet.darwin-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-darwin-arm64')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
}
} else if (process.platform === 'freebsd') {
if (process.arch === 'x64') {
try {
return require('./droplet.freebsd-x64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-freebsd-x64')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./droplet.freebsd-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-freebsd-arm64')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
}
} else if (process.platform === 'linux') {
if (process.arch === 'x64') {
if (isMusl()) { if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-x64-musl.node')
)
try { try {
if (localFileExisted) { return require('./droplet.linux-x64-musl.node')
nativeBinding = require('./droplet.linux-x64-musl.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-x64-musl')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
} else {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-x64-gnu.node')
)
try { try {
if (localFileExisted) { return require('@drop-oss/droplet-linux-x64-musl')
nativeBinding = require('./droplet.linux-x64-gnu.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-x64-gnu')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
} else {
try {
return require('./droplet.linux-x64-gnu.node')
} catch (e) {
loadErrors.push(e)
} }
break try {
case 'arm64': return require('@drop-oss/droplet-linux-x64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'arm64') {
if (isMusl()) { if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-arm64-musl.node')
)
try { try {
if (localFileExisted) { return require('./droplet.linux-arm64-musl.node')
nativeBinding = require('./droplet.linux-arm64-musl.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-arm64-musl')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
} else {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-arm64-gnu.node')
)
try { try {
if (localFileExisted) { return require('@drop-oss/droplet-linux-arm64-musl')
nativeBinding = require('./droplet.linux-arm64-gnu.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-arm64-gnu')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
} else {
try {
return require('./droplet.linux-arm64-gnu.node')
} catch (e) {
loadErrors.push(e)
} }
break try {
case 'arm': return require('@drop-oss/droplet-linux-arm64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'arm') {
if (isMusl()) { if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-arm-musleabihf.node')
)
try { try {
if (localFileExisted) { return require('./droplet.linux-arm-musleabihf.node')
nativeBinding = require('./droplet.linux-arm-musleabihf.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-arm-musleabihf')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
} else {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-arm-gnueabihf.node')
)
try { try {
if (localFileExisted) { return require('@drop-oss/droplet-linux-arm-musleabihf')
nativeBinding = require('./droplet.linux-arm-gnueabihf.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-arm-gnueabihf')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
} else {
try {
return require('./droplet.linux-arm-gnueabihf.node')
} catch (e) {
loadErrors.push(e)
} }
break try {
case 'riscv64': return require('@drop-oss/droplet-linux-arm-gnueabihf')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'riscv64') {
if (isMusl()) { if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-riscv64-musl.node')
)
try { try {
if (localFileExisted) { return require('./droplet.linux-riscv64-musl.node')
nativeBinding = require('./droplet.linux-riscv64-musl.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-riscv64-musl')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
} else {
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-riscv64-gnu.node')
)
try { try {
if (localFileExisted) { return require('@drop-oss/droplet-linux-riscv64-musl')
nativeBinding = require('./droplet.linux-riscv64-gnu.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-riscv64-gnu')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
} }
}
break } else {
case 's390x':
localFileExisted = existsSync(
join(__dirname, 'droplet.linux-s390x-gnu.node')
)
try { try {
if (localFileExisted) { return require('./droplet.linux-riscv64-gnu.node')
nativeBinding = require('./droplet.linux-s390x-gnu.node')
} else {
nativeBinding = require('@drop-oss/droplet-linux-s390x-gnu')
}
} catch (e) { } catch (e) {
loadError = e loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-linux-riscv64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'ppc64') {
try {
return require('./droplet.linux-ppc64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-linux-ppc64-gnu')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 's390x') {
try {
return require('./droplet.linux-s390x-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@drop-oss/droplet-linux-s390x-gnu')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
}
} else {
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
}
}
nativeBinding = requireNative()
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
try {
nativeBinding = require('./droplet.wasi.cjs')
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
loadErrors.push(err)
}
}
if (!nativeBinding) {
try {
nativeBinding = require('@drop-oss/droplet-wasm32-wasi')
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
loadErrors.push(err)
}
} }
break
default:
throw new Error(`Unsupported architecture on Linux: ${arch}`)
} }
break
default:
throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
} }
if (!nativeBinding) { if (!nativeBinding) {
if (loadError) { if (loadErrors.length > 0) {
throw loadError throw new Error(
`Cannot find native binding. ` +
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
{ cause: loadErrors }
)
} }
throw new Error(`Failed to load native binding`) throw new Error(`Failed to load native binding`)
} }
const { callAltThreadFunc, generateManifest, generateRootCa, generateClientCertificate, verifyClientCertificate, signNonce, verifyNonce } = nativeBinding module.exports = nativeBinding
module.exports.callAltThreadFunc = nativeBinding.callAltThreadFunc
module.exports.callAltThreadFunc = callAltThreadFunc module.exports.generateClientCertificate = nativeBinding.generateClientCertificate
module.exports.generateManifest = generateManifest module.exports.generateManifest = nativeBinding.generateManifest
module.exports.generateRootCa = generateRootCa module.exports.generateRootCa = nativeBinding.generateRootCa
module.exports.generateClientCertificate = generateClientCertificate module.exports.hasBackendForPath = nativeBinding.hasBackendForPath
module.exports.verifyClientCertificate = verifyClientCertificate module.exports.listFiles = nativeBinding.listFiles
module.exports.signNonce = signNonce module.exports.peekFile = nativeBinding.peekFile
module.exports.verifyNonce = verifyNonce module.exports.readFile = nativeBinding.readFile
module.exports.signNonce = nativeBinding.signNonce
module.exports.verifyClientCertificate = nativeBinding.verifyClientCertificate
module.exports.verifyNonce = nativeBinding.verifyNonce

View File

@ -1,3 +0,0 @@
# `@drop/droplet-android-arm-eabi`
This is the **armv7-linux-androideabi** binary for `@drop/droplet`

View File

@ -1,18 +0,0 @@
{
"name": "@drop/droplet-android-arm-eabi",
"version": "0.0.0",
"os": [
"android"
],
"cpu": [
"arm"
],
"main": "droplet.android-arm-eabi.node",
"files": [
"droplet.android-arm-eabi.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -1,3 +0,0 @@
# `@drop/droplet-android-arm64`
This is the **aarch64-linux-android** binary for `@drop/droplet`

View File

@ -1,18 +0,0 @@
{
"name": "@drop/droplet-android-arm64",
"version": "0.0.0",
"os": [
"android"
],
"cpu": [
"arm64"
],
"main": "droplet.android-arm64.node",
"files": [
"droplet.android-arm64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -1,3 +1,3 @@
# `@drop/droplet-darwin-arm64` # `@drop-oss/droplet-darwin-arm64`
This is the **aarch64-apple-darwin** binary for `@drop/droplet` This is the **aarch64-apple-darwin** binary for `@drop-oss/droplet`

View File

@ -1,5 +1,5 @@
{ {
"name": "@drop/droplet-darwin-arm64", "name": "@drop-oss/droplet-darwin-arm64",
"version": "0.0.0", "version": "0.0.0",
"os": [ "os": [
"darwin" "darwin"
@ -14,5 +14,8 @@
"license": "MIT", "license": "MIT",
"engines": { "engines": {
"node": ">= 10" "node": ">= 10"
},
"repository": {
"url": "https://github.com/Drop-OSS/droplet"
} }
} }

View File

@ -1,3 +1,3 @@
# `@drop/droplet-darwin-universal` # `@drop-oss/droplet-darwin-universal`
This is the **universal-apple-darwin** binary for `@drop/droplet` This is the **universal-apple-darwin** binary for `@drop-oss/droplet`

View File

@ -1,5 +1,5 @@
{ {
"name": "@drop/droplet-darwin-universal", "name": "@drop-oss/droplet-darwin-universal",
"version": "0.0.0", "version": "0.0.0",
"os": [ "os": [
"darwin" "darwin"
@ -11,5 +11,8 @@
"license": "MIT", "license": "MIT",
"engines": { "engines": {
"node": ">= 10" "node": ">= 10"
},
"repository": {
"url": "https://github.com/Drop-OSS/droplet"
} }
} }

View File

@ -1,3 +1,3 @@
# `@drop/droplet-darwin-x64` # `@drop-oss/droplet-darwin-x64`
This is the **x86_64-apple-darwin** binary for `@drop/droplet` This is the **x86_64-apple-darwin** binary for `@drop-oss/droplet`

View File

@ -1,5 +1,5 @@
{ {
"name": "@drop/droplet-darwin-x64", "name": "@drop-oss/droplet-darwin-x64",
"version": "0.0.0", "version": "0.0.0",
"os": [ "os": [
"darwin" "darwin"
@ -14,5 +14,8 @@
"license": "MIT", "license": "MIT",
"engines": { "engines": {
"node": ">= 10" "node": ">= 10"
},
"repository": {
"url": "https://github.com/Drop-OSS/droplet"
} }
} }

View File

@ -1,3 +0,0 @@
# `@drop/droplet-freebsd-x64`
This is the **x86_64-unknown-freebsd** binary for `@drop/droplet`

View File

@ -1,18 +0,0 @@
{
"name": "@drop/droplet-freebsd-x64",
"version": "0.0.0",
"os": [
"freebsd"
],
"cpu": [
"x64"
],
"main": "droplet.freebsd-x64.node",
"files": [
"droplet.freebsd-x64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -1,3 +0,0 @@
# `@drop/droplet-linux-arm-gnueabihf`
This is the **armv7-unknown-linux-gnueabihf** binary for `@drop/droplet`

View File

@ -1,18 +0,0 @@
{
"name": "@drop/droplet-linux-arm-gnueabihf",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm"
],
"main": "droplet.linux-arm-gnueabihf.node",
"files": [
"droplet.linux-arm-gnueabihf.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -1,3 +0,0 @@
# `@drop/droplet-linux-arm-musleabihf`
This is the **armv7-unknown-linux-musleabihf** binary for `@drop/droplet`

View File

@ -1,18 +0,0 @@
{
"name": "@drop/droplet-linux-arm-musleabihf",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm"
],
"main": "droplet.linux-arm-musleabihf.node",
"files": [
"droplet.linux-arm-musleabihf.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -1,3 +1,3 @@
# `@drop/droplet-linux-arm64-gnu` # `@drop-oss/droplet-linux-arm64-gnu`
This is the **aarch64-unknown-linux-gnu** binary for `@drop/droplet` This is the **aarch64-unknown-linux-gnu** binary for `@drop-oss/droplet`

View File

@ -1,5 +1,5 @@
{ {
"name": "@drop/droplet-linux-arm64-gnu", "name": "@drop-oss/droplet-linux-arm64-gnu",
"version": "0.0.0", "version": "0.0.0",
"os": [ "os": [
"linux" "linux"
@ -17,5 +17,8 @@
}, },
"libc": [ "libc": [
"glibc" "glibc"
] ],
"repository": {
"url": "https://github.com/Drop-OSS/droplet"
}
} }

View File

@ -1,3 +1,3 @@
# `@drop/droplet-linux-arm64-musl` # `@drop-oss/droplet-linux-arm64-musl`
This is the **aarch64-unknown-linux-musl** binary for `@drop/droplet` This is the **aarch64-unknown-linux-musl** binary for `@drop-oss/droplet`

View File

@ -1,5 +1,5 @@
{ {
"name": "@drop/droplet-linux-arm64-musl", "name": "@drop-oss/droplet-linux-arm64-musl",
"version": "0.0.0", "version": "0.0.0",
"os": [ "os": [
"linux" "linux"
@ -17,5 +17,8 @@
}, },
"libc": [ "libc": [
"musl" "musl"
] ],
"repository": {
"url": "https://github.com/Drop-OSS/droplet"
}
} }

View File

@ -1,3 +1,3 @@
# `@drop/droplet-linux-riscv64-gnu` # `@drop-oss/droplet-linux-riscv64-gnu`
This is the **riscv64gc-unknown-linux-gnu** binary for `@drop/droplet` This is the **riscv64gc-unknown-linux-gnu** binary for `@drop-oss/droplet`

View File

@ -1,5 +1,5 @@
{ {
"name": "@drop/droplet-linux-riscv64-gnu", "name": "@drop-oss/droplet-linux-riscv64-gnu",
"version": "0.0.0", "version": "0.0.0",
"os": [ "os": [
"linux" "linux"
@ -17,5 +17,8 @@
}, },
"libc": [ "libc": [
"glibc" "glibc"
] ],
"repository": {
"url": "https://github.com/Drop-OSS/droplet"
}
} }

View File

@ -1,3 +1,3 @@
# `@drop/droplet-linux-x64-gnu` # `@drop-oss/droplet-linux-x64-gnu`
This is the **x86_64-unknown-linux-gnu** binary for `@drop/droplet` This is the **x86_64-unknown-linux-gnu** binary for `@drop-oss/droplet`

View File

@ -1,5 +1,5 @@
{ {
"name": "@drop/droplet-linux-x64-gnu", "name": "@drop-oss/droplet-linux-x64-gnu",
"version": "0.0.0", "version": "0.0.0",
"os": [ "os": [
"linux" "linux"
@ -17,5 +17,8 @@
}, },
"libc": [ "libc": [
"glibc" "glibc"
] ],
"repository": {
"url": "https://github.com/Drop-OSS/droplet"
}
} }

View File

@ -1,3 +1,3 @@
# `@drop/droplet-linux-x64-musl` # `@drop-oss/droplet-linux-x64-musl`
This is the **x86_64-unknown-linux-musl** binary for `@drop/droplet` This is the **x86_64-unknown-linux-musl** binary for `@drop-oss/droplet`

View File

@ -1,5 +1,5 @@
{ {
"name": "@drop/droplet-linux-x64-musl", "name": "@drop-oss/droplet-linux-x64-musl",
"version": "0.0.0", "version": "0.0.0",
"os": [ "os": [
"linux" "linux"
@ -17,5 +17,8 @@
}, },
"libc": [ "libc": [
"musl" "musl"
] ],
"repository": {
"url": "https://github.com/Drop-OSS/droplet"
}
} }

View File

@ -1,3 +1,3 @@
# `@drop/droplet-win32-arm64-msvc` # `@drop-oss/droplet-win32-arm64-msvc`
This is the **aarch64-pc-windows-msvc** binary for `@drop/droplet` This is the **aarch64-pc-windows-msvc** binary for `@drop-oss/droplet`

View File

@ -1,5 +1,5 @@
{ {
"name": "@drop/droplet-win32-arm64-msvc", "name": "@drop-oss/droplet-win32-arm64-msvc",
"version": "0.0.0", "version": "0.0.0",
"os": [ "os": [
"win32" "win32"
@ -14,5 +14,8 @@
"license": "MIT", "license": "MIT",
"engines": { "engines": {
"node": ">= 10" "node": ">= 10"
},
"repository": {
"url": "https://github.com/Drop-OSS/droplet"
} }
} }

View File

@ -1,3 +0,0 @@
# `@drop/droplet-win32-ia32-msvc`
This is the **i686-pc-windows-msvc** binary for `@drop/droplet`

View File

@ -1,18 +0,0 @@
{
"name": "@drop/droplet-win32-ia32-msvc",
"version": "0.0.0",
"os": [
"win32"
],
"cpu": [
"ia32"
],
"main": "droplet.win32-ia32-msvc.node",
"files": [
"droplet.win32-ia32-msvc.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@ -1,3 +1,3 @@
# `@drop/droplet-win32-x64-msvc` # `@drop-oss/droplet-win32-x64-msvc`
This is the **x86_64-pc-windows-msvc** binary for `@drop/droplet` This is the **x86_64-pc-windows-msvc** binary for `@drop-oss/droplet`

View File

@ -1,5 +1,5 @@
{ {
"name": "@drop/droplet-win32-x64-msvc", "name": "@drop-oss/droplet-win32-x64-msvc",
"version": "0.0.0", "version": "0.0.0",
"os": [ "os": [
"win32" "win32"
@ -14,5 +14,8 @@
"license": "MIT", "license": "MIT",
"engines": { "engines": {
"node": ">= 10" "node": ">= 10"
},
"repository": {
"url": "https://github.com/Drop-OSS/droplet"
} }
} }

View File

@ -1,6 +1,6 @@
{ {
"name": "@drop-oss/droplet", "name": "@drop-oss/droplet",
"version": "0.7.0", "version": "1.6.0",
"main": "index.js", "main": "index.js",
"types": "index.d.ts", "types": "index.d.ts",
"napi": { "napi": {
@ -8,24 +8,21 @@
"triples": { "triples": {
"additional": [ "additional": [
"aarch64-apple-darwin", "aarch64-apple-darwin",
"aarch64-linux-android", "x86_64-apple-darwin",
"universal-apple-darwin",
"aarch64-unknown-linux-gnu", "aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl", "aarch64-unknown-linux-musl",
"aarch64-pc-windows-msvc", "x86_64-unknown-linux-gnu",
"armv7-unknown-linux-gnueabihf",
"armv7-unknown-linux-musleabihf",
"x86_64-unknown-linux-musl", "x86_64-unknown-linux-musl",
"x86_64-unknown-freebsd", "riscv64gc-unknown-linux-gnu",
"i686-pc-windows-msvc", "aarch64-pc-windows-msvc",
"armv7-linux-androideabi", "x86_64-pc-windows-msvc"
"universal-apple-darwin",
"riscv64gc-unknown-linux-gnu"
] ]
} }
}, },
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"@napi-rs/cli": "^2.18.4", "@napi-rs/cli": "3.0.0-alpha.91",
"@types/node": "^22.13.10", "@types/node": "^22.13.10",
"ava": "^6.2.0" "ava": "^6.2.0"
}, },
@ -41,8 +38,11 @@
"build:debug": "napi build --platform", "build:debug": "napi build --platform",
"prepublishOnly": "napi prepublish -t npm", "prepublishOnly": "napi prepublish -t npm",
"test": "ava", "test": "ava",
"universal": "napi universal", "universal": "napi universalize",
"version": "napi version" "version": "napi version"
}, },
"packageManager": "yarn@4.7.0" "packageManager": "yarn@4.7.0",
"repository": {
"url": "git+https://github.com/Drop-OSS/droplet.git"
}
} }

View File

@ -1,24 +0,0 @@
use std::{
fs::{self, metadata},
path::{Path, PathBuf},
};
fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) {
if metadata(path).unwrap().is_dir() {
let paths = fs::read_dir(path).unwrap();
for path_result in paths {
let full_path = path_result.unwrap().path();
if metadata(&full_path).unwrap().is_dir() {
_list_files(vec, &full_path);
} else {
vec.push(full_path);
}
}
}
}
pub fn list_files(path: &Path) -> Vec<PathBuf> {
let mut vec = Vec::new();
_list_files(&mut vec, path);
vec
}

View File

@ -1,8 +1,9 @@
#![deny(clippy::all)] #![deny(clippy::all)]
#![feature(trait_alias)]
pub mod file_utils;
pub mod manifest; pub mod manifest;
pub mod ssl; pub mod ssl;
pub mod version;
#[macro_use] #[macro_use]
extern crate napi_derive; extern crate napi_derive;

View File

@ -1,22 +1,20 @@
use std::{ use std::{
collections::HashMap, collections::HashMap,
fs::File,
io::{BufRead, BufReader}, io::{BufRead, BufReader},
path::Path, path::Path,
sync::Arc,
thread, thread,
}; };
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use napi::{ use napi::{
threadsafe_function::{ErrorStrategy, ThreadsafeFunction, ThreadsafeFunctionCallMode}, threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode},
Error, JsFunction, Result,
}; };
use serde_json::json; use serde_json::json;
use uuid::Uuid; use uuid::Uuid;
use crate::file_utils::list_files; use crate::version::utils::create_backend_for_path;
const CHUNK_SIZE: usize = 1024 * 1024 * 64; const CHUNK_SIZE: usize = 1024 * 1024 * 64;
@ -29,14 +27,10 @@ struct ChunkData {
} }
#[napi] #[napi]
pub fn call_alt_thread_func(callback: JsFunction) -> Result<(), Error> { pub fn call_alt_thread_func(tsfn: Arc<ThreadsafeFunction<()>>) -> Result<(), String> {
let tsfn: ThreadsafeFunction<u32, ErrorStrategy::CalleeHandled> = callback let tsfn_cloned = tsfn.clone();
.create_threadsafe_function(0, |ctx| {
ctx.env.create_uint32(ctx.value + 1).map(|v| vec![v])
})?;
let tsfn = tsfn.clone();
thread::spawn(move || { thread::spawn(move || {
tsfn.call(Ok(0), ThreadsafeFunctionCallMode::NonBlocking); tsfn_cloned.call(Ok(()), ThreadsafeFunctionCallMode::Blocking);
}); });
Ok(()) Ok(())
} }
@ -44,27 +38,14 @@ pub fn call_alt_thread_func(callback: JsFunction) -> Result<(), Error> {
#[napi] #[napi]
pub fn generate_manifest( pub fn generate_manifest(
dir: String, dir: String,
progress: JsFunction, progress_sfn: ThreadsafeFunction<i32>,
log: JsFunction, log_sfn: ThreadsafeFunction<String>,
callback: JsFunction, callback_sfn: ThreadsafeFunction<String>,
) -> Result<(), Error> { ) -> Result<(), String> {
let progress_sfn: ThreadsafeFunction<i32, ErrorStrategy::CalleeHandled> = progress
.create_threadsafe_function(0, |ctx| ctx.env.create_int32(ctx.value).map(|v| vec![v]))
.unwrap();
let log_sfn: ThreadsafeFunction<String, ErrorStrategy::CalleeHandled> = log
.create_threadsafe_function(0, |ctx| {
ctx.env.create_string_from_std(ctx.value).map(|v| vec![v])
})
.unwrap();
let callback_sfn: ThreadsafeFunction<String, ErrorStrategy::CalleeHandled> = callback
.create_threadsafe_function(0, |ctx| {
ctx.env.create_string_from_std(ctx.value).map(|v| vec![v])
})
.unwrap();
thread::spawn(move || { thread::spawn(move || {
let base_dir = Path::new(&dir); let base_dir = Path::new(&dir);
let files = list_files(base_dir); let mut backend = create_backend_for_path(base_dir).unwrap();
let files = backend.list_files();
// Filepath to chunk data // Filepath to chunk data
let mut chunks: HashMap<String, ChunkData> = HashMap::new(); let mut chunks: HashMap<String, ChunkData> = HashMap::new();
@ -72,27 +53,12 @@ pub fn generate_manifest(
let total: i32 = files.len() as i32; let total: i32 = files.len() as i32;
let mut i: i32 = 0; let mut i: i32 = 0;
for file_path in files { for version_file in files {
let file = File::open(file_path.clone()).unwrap(); let raw_reader= backend.reader(&version_file).unwrap();
let relative = file_path.strip_prefix(base_dir).unwrap(); let mut reader = BufReader::with_capacity(CHUNK_SIZE, raw_reader);
let permission_object = file.try_clone().unwrap().metadata().unwrap().permissions();
let permissions = {
let perm: u32;
#[cfg(target_family = "unix")]
{
perm = permission_object.mode();
}
#[cfg(not(target_family = "unix"))]
{
perm = 0
}
perm
};
let mut reader = BufReader::with_capacity(CHUNK_SIZE, file);
let mut chunk_data = ChunkData { let mut chunk_data = ChunkData {
permissions, permissions: version_file.permission,
ids: Vec::new(), ids: Vec::new(),
checksums: Vec::new(), checksums: Vec::new(),
lengths: Vec::new(), lengths: Vec::new(),
@ -118,8 +84,7 @@ pub fn generate_manifest(
let log_str = format!( let log_str = format!(
"Processed chunk {} for {}", "Processed chunk {} for {}",
chunk_index, chunk_index, &version_file.relative_filename
relative.to_str().unwrap()
); );
log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking); log_sfn.call(Ok(log_str), ThreadsafeFunctionCallMode::Blocking);
@ -127,7 +92,7 @@ pub fn generate_manifest(
chunk_index += 1; chunk_index += 1;
} }
chunks.insert(relative.to_str().unwrap().to_string(), chunk_data); chunks.insert(version_file.relative_filename, chunk_data);
i += 1; i += 1;
let progress = i * 100 / total; let progress = i * 100 / total;

156
src/version/backends.rs Normal file
View File

@ -0,0 +1,156 @@
use core::arch;
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::{
fs::File,
io::{self, Read, Seek},
path::PathBuf,
pin::Pin,
rc::Rc,
sync::Arc,
};
use rawzip::{
FileReader, ReaderAt, ZipArchive, ZipArchiveEntryWayfinder, ZipEntry, RECOMMENDED_BUFFER_SIZE,
};
use crate::version::{
types::{MinimumFileObject, Skippable, VersionBackend, VersionFile},
utils::_list_files,
};
pub struct PathVersionBackend {
pub base_dir: PathBuf,
}
impl VersionBackend for PathVersionBackend {
fn list_files(&mut self) -> Vec<VersionFile> {
let mut vec = Vec::new();
_list_files(&mut vec, &self.base_dir);
let mut results = Vec::new();
for pathbuf in vec.iter() {
let file = File::open(pathbuf.clone()).unwrap();
let relative = pathbuf.strip_prefix(self.base_dir.clone()).unwrap();
let metadata = file.try_clone().unwrap().metadata().unwrap();
let permission_object = metadata.permissions();
let permissions = {
let perm: u32;
#[cfg(target_family = "unix")]
{
perm = permission_object.mode();
}
#[cfg(not(target_family = "unix"))]
{
perm = 0
}
perm
};
results.push(VersionFile {
relative_filename: relative.to_string_lossy().to_string(),
permission: permissions,
size: metadata.len(),
});
}
results
}
fn reader(&mut self, file: &VersionFile) -> Option<Box<(dyn MinimumFileObject + 'static)>> {
let file = File::open(self.base_dir.join(file.relative_filename.clone())).ok()?;
return Some(Box::new(file));
}
}
pub struct ZipVersionBackend {
archive: Arc<ZipArchive<FileReader>>,
}
impl ZipVersionBackend {
pub fn new(archive: File) -> Self {
let archive = ZipArchive::from_file(archive, &mut [0u8; RECOMMENDED_BUFFER_SIZE]).unwrap();
Self {
archive: Arc::new(archive),
}
}
pub fn new_entry(
&self,
entry: ZipEntry<'_, FileReader>,
wayfinder: ZipArchiveEntryWayfinder,
) -> ZipFileWrapper {
let (offset, end_offset) = entry.compressed_data_range();
ZipFileWrapper {
archive: self.archive.clone(),
wayfinder,
offset,
end_offset,
}
}
}
pub struct ZipFileWrapper {
pub archive: Arc<ZipArchive<FileReader>>,
wayfinder: ZipArchiveEntryWayfinder,
offset: u64,
end_offset: u64,
}
impl Read for ZipFileWrapper {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let read_size = buf.len().min((self.end_offset - self.offset) as usize);
let read = self
.archive
.get_ref()
.read_at(&mut buf[..read_size], self.offset)?;
self.offset += read as u64;
Ok(read)
}
}
impl Skippable for ZipFileWrapper {
fn skip(&mut self, amount: u64) {
self.offset += amount;
}
}
impl MinimumFileObject for ZipFileWrapper {}
impl VersionBackend for ZipVersionBackend {
fn list_files(&mut self) -> Vec<VersionFile> {
let mut results = Vec::new();
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
let mut budget_iterator = self.archive.entries(read_buffer);
while let Some(entry) = budget_iterator.next_entry().unwrap() {
if entry.is_dir() {
continue;
}
results.push(VersionFile {
relative_filename: String::from(entry.file_path().try_normalize().unwrap()),
permission: entry.mode().permissions(),
size: entry.uncompressed_size_hint(),
});
}
results
}
fn reader(&mut self, file: &VersionFile) -> Option<Box<(dyn MinimumFileObject)>> {
let read_buffer = &mut [0u8; RECOMMENDED_BUFFER_SIZE];
let mut entries = self.archive.entries(read_buffer);
let entry = loop {
if let Some(v) = entries.next_entry().unwrap() {
if v.file_path().try_normalize().unwrap().as_ref() == &file.relative_filename {
break Some(v);
}
} else {
break None;
}
}?;
let wayfinder = entry.wayfinder();
let local_entry = self.archive.get_entry(wayfinder).unwrap();
let wrapper = self.new_entry(local_entry, wayfinder);
Some(Box::new(wrapper))
}
}

3
src/version/mod.rs Normal file
View File

@ -0,0 +1,3 @@
pub mod utils;
pub mod types;
pub mod backends;

52
src/version/types.rs Normal file
View File

@ -0,0 +1,52 @@
use std::{
fmt::Debug, io::{Read, Seek, SeekFrom}
};
use tokio::io::{self, AsyncRead};
#[derive(Debug, Clone)]
pub struct VersionFile {
pub relative_filename: String,
pub permission: u32,
pub size: u64,
}
pub trait Skippable {
fn skip(&mut self, amount: u64);
}
impl<T> Skippable for T
where
T: Seek,
{
fn skip(&mut self, amount: u64) {
self.seek(SeekFrom::Start(amount)).unwrap();
}
}
pub trait MinimumFileObject: Read + Send + Skippable {}
impl<T: Read + Send + Seek> MinimumFileObject for T {}
// Intentionally not a generic, because of types in read_file
pub struct ReadToAsyncRead {
pub inner: Box<(dyn Read + Send)>,
pub backend: Box<(dyn VersionBackend + Send)>,
}
impl AsyncRead for ReadToAsyncRead {
fn poll_read(
mut self: std::pin::Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
buf: &mut tokio::io::ReadBuf<'_>,
) -> std::task::Poll<io::Result<()>> {
let mut read_buf = [0u8; 8192];
let var_name = self.inner.read(&mut read_buf).unwrap();
let amount = var_name;
buf.put_slice(&read_buf[0..amount]);
std::task::Poll::Ready(Ok(()))
}
}
pub trait VersionBackend {
fn list_files(&mut self) -> Vec<VersionFile>;
fn reader(&mut self, file: &VersionFile) -> Option<Box<(dyn MinimumFileObject)>>;
}

131
src/version/utils.rs Normal file
View File

@ -0,0 +1,131 @@
use std::{
fs::{self, metadata, File},
io::Read,
path::{Path, PathBuf},
};
use napi::{bindgen_prelude::*, tokio_stream::StreamExt};
use tokio_util::codec::{BytesCodec, FramedRead};
use crate::version::{
backends::{PathVersionBackend, ZipVersionBackend},
types::{ReadToAsyncRead, VersionBackend, VersionFile},
};
pub fn _list_files(vec: &mut Vec<PathBuf>, path: &Path) {
if metadata(path).unwrap().is_dir() {
let paths = fs::read_dir(path).unwrap();
for path_result in paths {
let full_path = path_result.unwrap().path();
if metadata(&full_path).unwrap().is_dir() {
_list_files(vec, &full_path);
} else {
vec.push(full_path);
}
}
}
}
pub fn create_backend_for_path<'a>(path: &Path) -> Option<Box<(dyn VersionBackend + Send + 'a)>> {
let is_directory = path.is_dir();
if is_directory {
return Some(Box::new(PathVersionBackend {
base_dir: path.to_path_buf(),
}));
};
if path.to_string_lossy().ends_with(".zip") {
let f = File::open(path.to_path_buf()).unwrap();
return Some(Box::new(ZipVersionBackend::new(f)));
}
None
}
#[napi]
pub fn has_backend_for_path(path: String) -> bool {
let path = Path::new(&path);
let has_backend = create_backend_for_path(path).is_some();
has_backend
}
#[napi]
pub fn list_files(path: String) -> Result<Vec<String>> {
let path = Path::new(&path);
let mut backend =
create_backend_for_path(path).ok_or(napi::Error::from_reason("No backend for path"))?;
let files = backend.list_files();
Ok(files.into_iter().map(|e| e.relative_filename).collect())
}
/**
* This is inefficient, but is used in attempt to keep the interface simple
*/
#[napi]
pub fn peek_file(path: String, sub_path: String) -> Result<u64> {
let path = Path::new(&path);
let mut backend =
create_backend_for_path(path).ok_or(napi::Error::from_reason("No backend for path"))?;
let files = backend.list_files();
let file = files
.iter()
.find(|e| e.relative_filename == sub_path)
.ok_or(napi::Error::from_reason("Can't find file to peek"))?;
return Ok(file.size.try_into().unwrap());
}
#[napi]
pub fn read_file(
path: String,
sub_path: String,
env: &Env,
start: Option<BigInt>,
end: Option<BigInt>,
) -> Option<ReadableStream<'_, BufferSlice<'_>>> {
let path = Path::new(&path);
let mut backend = create_backend_for_path(path).unwrap();
let version_file = VersionFile {
relative_filename: sub_path,
permission: 0, // Shouldn't matter
size: 0, // Shouldn't matter
};
// Use `?` operator for cleaner error propagation from `Option`
let mut reader = backend.reader(&version_file)?;
// Skip the 'start' amount of bytes without seek
if let Some(skip) = start.clone() {
reader.skip(skip.get_u64().1.into());
// io::copy(&mut reader.by_ref().take(skip.into()), &mut io::sink()).unwrap();
}
let async_reader = if let Some(limit) = end {
let amount = limit.get_u64().1 - start.map_or(Some(0), |v| Some(v.get_u64().1)).unwrap();
ReadToAsyncRead {
inner: Box::new(reader.take(amount.into())),
backend,
}
} else {
ReadToAsyncRead {
inner: reader,
backend,
}
};
// Create a FramedRead stream with BytesCodec for chunking
let stream = FramedRead::new(async_reader, BytesCodec::new())
// Use StreamExt::map to transform each Result item
.map(|result_item| {
result_item
// Apply Result::map to transform Ok(BytesMut) to Ok(Vec<u8>)
.map(|bytes| bytes.to_vec())
// Apply Result::map_err to transform Err(std::io::Error) to Err(napi::Error)
.map_err(|e| napi::Error::from(e)) // napi::Error implements From<tokio::io::Error>
});
// Create the napi-rs ReadableStream from the tokio_stream::Stream
// The unwrap() here means if stream creation fails, it will panic.
// For a production system, consider returning Result<Option<...>> and handling this.
Some(ReadableStream::create_with_stream_bytes(env, stream).unwrap())
}

1234
yarn.lock

File diff suppressed because it is too large Load Diff