+4
-1
.cargo/config.toml
+4
-1
.cargo/config.toml
+1
.env
+1
.env
···
1
+
DATABASE_URL=sqlite://dev.db
+4
.github/CODEOWNERS
+4
.github/CODEOWNERS
+1
.github/FUNDING.yml
+1
.github/FUNDING.yml
···
1
+
github: [mrshmllow]
+4
-29
.github/actions/setup-nix/action.yml
+4
-29
.github/actions/setup-nix/action.yml
···
3
3
description: |
4
4
Sets up the Nix environment for wire, removing unnecessary bloat and installing Nix along with proper
5
5
substituters being set
6
-
inputs:
7
-
cachixToken:
8
-
description: Cachix token
9
-
required: true
10
-
withQEMU:
11
-
description: Enable QEMU
12
-
default: false
13
6
runs:
14
7
using: "composite"
15
8
steps:
16
-
- uses: wimpysworld/nothing-but-nix@main
17
-
with:
18
-
hatchet-protocol: "carve"
19
9
- name: Generate nix.conf
20
10
shell: bash
21
11
id: config
···
24
14
echo 'config<<EOF'
25
15
echo "system-features = nixos-test benchmark big-parallel kvm"
26
16
27
-
if [ "${{ inputs.withQEMU }}" = "true" ]; then
28
-
echo "extra-platforms = aarch64-linux i686-linux"
29
-
fi
17
+
echo "substituters = https://cache.nixos.org?priority=1 https://cache.althaea.zone?priority=2 https://cache.garnix.io?priority=3"
18
+
echo "trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= cache.garnix.io:CTFPyKSLcx5RMJKfLo5EEPUObbA78b0YQ2DTCJXqr9g= cache.althaea.zone:BelRpa863X9q3Y+AOnl5SM7QFzre3qb+5I7g2s/mqHI="
30
19
31
20
echo EOF
32
21
} >> "$GITHUB_OUTPUT"
33
-
- uses: cachix/install-nix-action@v31
22
+
- uses: cachix/install-nix-action@4e002c8ec80594ecd40e759629461e26c8abed15
34
23
with:
35
24
nix_path: nixpkgs=channel:nixos-unstable
36
25
extra_nix_config: ${{ steps.config.outputs.config }}
37
26
- name: Sanity check nix.conf
38
-
if: ${{ inputs.withQEMU == 'true' && runner.debug == '1' }}
27
+
if: ${{ runner.debug == '1' }}
39
28
shell: bash
40
29
run: cat /etc/nix/nix.conf
41
-
- name: Register binfmt
42
-
if: ${{ inputs.withQEMU == 'true' }}
43
-
shell: bash
44
-
run: |
45
-
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
46
-
- name: Sanity check binfmt
47
-
if: ${{ inputs.withQEMU == 'true' && runner.debug == '1' }}
48
-
shell: bash
49
-
run: |
50
-
cat /proc/sys/fs/binfmt_misc/qemu-aarch64
51
-
- uses: cachix/cachix-action@v16
52
-
with:
53
-
name: wires
54
-
authToken: "${{ inputs.cachixToken }}"
-17
.github/dependabot.yml
-17
.github/dependabot.yml
···
1
-
version: 2
2
-
updates:
3
-
- package-ecosystem: "cargo"
4
-
target-branch: main
5
-
directory: "/"
6
-
schedule:
7
-
interval: "daily"
8
-
- package-ecosystem: "npm"
9
-
target-branch: main
10
-
directory: "doc/"
11
-
schedule:
12
-
interval: "daily"
13
-
- package-ecosystem: "github-actions"
14
-
target-branch: main
15
-
directory: "/"
16
-
schedule:
17
-
interval: "weekly"
+1
-1
.github/labeler.yml
+1
-1
.github/labeler.yml
+66
.github/workflows/autofix.yml
+66
.github/workflows/autofix.yml
···
1
+
---
2
+
name: autofix.ci
3
+
on:
4
+
pull_request:
5
+
push:
6
+
7
+
permissions:
8
+
contents: read
9
+
10
+
jobs:
11
+
check-changes:
12
+
runs-on: ubuntu-latest
13
+
outputs:
14
+
docs-pnpm: ${{ steps.filter.outputs.docs-pnpm }}
15
+
steps:
16
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
17
+
with:
18
+
persist-credentials: false
19
+
- uses: dorny/paths-filter@668c092af3649c4b664c54e4b704aa46782f6f7c
20
+
id: filter
21
+
with:
22
+
filters: |
23
+
docs-pnpm:
24
+
- 'doc/pnpm-lock.yaml'
25
+
autofix:
26
+
runs-on: blacksmith-2vcpu-ubuntu-2404
27
+
needs: check-changes
28
+
env:
29
+
UV_CACHE_DIR: /tmp/.uv-cache
30
+
steps:
31
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
32
+
with:
33
+
persist-credentials: false
34
+
- uses: ./.github/actions/setup-nix
35
+
- name: Cache Cargo
36
+
uses: actions/cache@v5
37
+
with:
38
+
path: |
39
+
~/.cargo/bin/
40
+
~/.cargo/registry/index/
41
+
~/.cargo/registry/cache/
42
+
~/.cargo/git/db/
43
+
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
44
+
- name: Cache uv
45
+
uses: actions/cache@v5
46
+
with:
47
+
path: /tmp/.uv-cache
48
+
key: uv-${{ runner.os }}-${{ hashFiles('**/uv.lock') }}
49
+
restore-keys: |
50
+
uv-${{ runner.os }}-${{ hashFiles('**/uv.lock') }}
51
+
uv-${{ runner.os }}
52
+
- name: setup sqlx
53
+
run: nix develop -L -v -c sqlx database setup --source ./crates/core/src/cache/migrations/
54
+
- name: autofix sqlx
55
+
run: nix develop -L -v -c cargo sqlx prepare --workspace
56
+
- name: clippy --fix
57
+
run: nix develop -L -v -c cargo clippy --fix
58
+
- name: pre-commit run
59
+
run: nix develop -L -v -c pre-commit run --all-files
60
+
continue-on-error: true
61
+
- name: Upgrade Hash
62
+
if: ${{ needs.check-changes.outputs.docs-pnpm == 'true' }}
63
+
run: bash ./doc/upgrade.sh
64
+
- name: Minimise uv cache
65
+
run: nix develop -L -v -c uv cache prune --ci
66
+
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
-56
.github/workflows/build.yml
-56
.github/workflows/build.yml
···
1
-
---
2
-
name: "Build Packages"
3
-
on:
4
-
pull_request:
5
-
push:
6
-
jobs:
7
-
pre-job:
8
-
continue-on-error: true
9
-
runs-on: ubuntu-latest
10
-
outputs:
11
-
should_skip: ${{ steps.skip_check.outputs.should_skip }}
12
-
steps:
13
-
- id: skip_check
14
-
uses: fkirc/skip-duplicate-actions@v5
15
-
with:
16
-
concurrent_skipping: "same_content_newer"
17
-
cancel_others: "true"
18
-
agent:
19
-
runs-on: ubuntu-latest
20
-
strategy:
21
-
matrix:
22
-
# https://github.com/nix-systems/default-linux
23
-
system: ["x86_64-linux", "aarch64-linux"]
24
-
needs: pre-job
25
-
if: needs.pre-job.outputs.should_skip != 'true'
26
-
steps:
27
-
- uses: actions/checkout@v4
28
-
- uses: ./.github/actions/setup-nix
29
-
with:
30
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
31
-
withQEMU: true
32
-
- run: nix build .#packages.${{ matrix.system }}.agent --print-build-logs
33
-
wire:
34
-
runs-on: ubuntu-latest
35
-
needs: agent
36
-
strategy:
37
-
matrix:
38
-
# https://github.com/nix-systems/default-linux
39
-
system: ["x86_64-linux", "aarch64-linux"]
40
-
steps:
41
-
- uses: actions/checkout@v4
42
-
- uses: ./.github/actions/setup-nix
43
-
with:
44
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
45
-
withQEMU: true
46
-
- run: nix build .#packages.${{ matrix.system }}.wire --print-build-logs
47
-
docs:
48
-
runs-on: ubuntu-latest
49
-
needs: pre-job
50
-
if: needs.pre-job.outputs.should_skip != 'true'
51
-
steps:
52
-
- uses: actions/checkout@v4
53
-
- uses: ./.github/actions/setup-nix
54
-
with:
55
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
56
-
- run: nix build .#docs
-23
.github/workflows/clean-pr-env.yml
-23
.github/workflows/clean-pr-env.yml
···
1
-
name: Clean PR Environment
2
-
on:
3
-
pull_request:
4
-
types:
5
-
- closed
6
-
jobs:
7
-
cleanup-gh:
8
-
runs-on: ubuntu-latest
9
-
permissions: write-all
10
-
steps:
11
-
- uses: actions/checkout@v4
12
-
- name: get github app token
13
-
uses: navikt/github-app-token-generator@793caf0d755fb4d6e88150825f680f188535cb48
14
-
id: get-token
15
-
with:
16
-
app-id: ${{ secrets.GH_APP_CLEANER_ID }}
17
-
private-key: ${{ secrets.GH_APP_CLEANER_PRIVATE_KEY }}
18
-
- name: delete pr environment
19
-
uses: strumwolf/delete-deployment-environment@v3.0.0
20
-
with:
21
-
token: ${{ steps.get-token.outputs.token }}
22
-
environment: pr-${{ github.event.number }}
23
-
ref: ${{ github.ref_name }}
+10
-6
.github/workflows/follow-nixpkgs.yml
+10
-6
.github/workflows/follow-nixpkgs.yml
···
8
8
pre-job:
9
9
continue-on-error: true
10
10
runs-on: ubuntu-latest
11
+
permissions: {}
11
12
outputs:
12
13
number: ${{ steps.skip_check.outputs.number }}
13
14
steps:
14
-
- uses: actions/checkout@v4
15
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
16
+
with:
17
+
persist-credentials: false
15
18
- id: skip_check
16
19
run: |
17
20
echo "number=$(gh pr list --label flake-lock-update --state open --json id | jq 'length')" >> "$GITHUB_OUTPUT"
···
19
22
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
20
23
update:
21
24
runs-on: ubuntu-latest
25
+
permissions: {}
22
26
needs: pre-job
23
27
if: needs.pre-job.outputs.number == '0'
24
28
steps:
25
-
- uses: actions/checkout@v4
29
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
26
30
with:
27
31
ssh-key: ${{ secrets.FOLLOW_NIXPKGS_PRIVATE_KEY }}
28
32
ref: main
33
+
persist-credentials: false
29
34
- uses: ./.github/actions/setup-nix
30
-
with:
31
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
32
35
- run: |
33
36
git config user.name 'github-actions[bot]'
34
37
git config user.email 'github-actions[bot]@users.noreply.github.com'
···
38
41
run: echo "date=$(date +'%Y-%m-%d')" >> "$GITHUB_OUTPUT"
39
42
- name: Create Pull Request
40
43
id: cpr
41
-
uses: peter-evans/create-pull-request@v7
44
+
uses: peter-evans/create-pull-request@0979079bc20c05bbbb590a56c21c4e2b1d1f1bbe
42
45
with:
43
46
title: Update flake.lock ${{ steps.date.outputs.date }}
44
47
labels: flake-lock-update
45
48
branch: ci/flake-update
46
49
- name: Enable automerge
47
50
if: steps.cpr.outputs.pull-request-number
48
-
run: gh pr merge --squash --auto "${{ steps.cpr.outputs.pull-request-number }}"
51
+
run: gh pr merge --squash --auto "${STEPS_CPR_OUTPUTS_PULL_REQUEST_NUMBER}"
49
52
env:
50
53
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
54
+
STEPS_CPR_OUTPUTS_PULL_REQUEST_NUMBER: ${{ steps.cpr.outputs.pull-request-number }}
+2
-2
.github/workflows/labeler.yml
+2
-2
.github/workflows/labeler.yml
+13
-6
.github/workflows/pages.yml
+13
-6
.github/workflows/pages.yml
···
2
2
name: "Pages"
3
3
on:
4
4
push:
5
-
branches: [main]
5
+
branches:
6
+
- stable
7
+
- trunk
6
8
workflow_dispatch:
7
9
jobs:
8
10
pre-job:
9
11
continue-on-error: true
10
12
runs-on: ubuntu-latest
13
+
permissions: {}
11
14
outputs:
12
15
should_skip: ${{ steps.skip_check.outputs.should_skip }}
13
16
steps:
14
17
- id: skip_check
15
-
uses: fkirc/skip-duplicate-actions@v5
18
+
uses: fkirc/skip-duplicate-actions@04a1aebece824b56e6ad6a401d015479cd1c50b3
16
19
deploy:
17
20
runs-on: ubuntu-latest
21
+
permissions: {}
18
22
environment:
19
23
name: production
20
24
url: https://wire.althaea.zone/
21
25
if: github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_skip != 'true'
22
26
steps:
23
-
- uses: actions/checkout@v4
27
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
28
+
with:
29
+
persist-credentials: false
24
30
- uses: ./.github/actions/setup-nix
25
-
with:
26
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
27
31
- run: nix build .#docs
32
+
if: github.ref == 'refs/heads/stable'
33
+
- run: nix build .#docs-unstable
34
+
if: github.ref != 'refs/heads/stable'
28
35
- name: Deploy to Cloudflare Pages
29
36
id: deployment
30
-
uses: cloudflare/wrangler-action@v3
37
+
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65
31
38
with:
32
39
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
33
40
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
+17
-12
.github/workflows/pr-preview.yml
+17
-12
.github/workflows/pr-preview.yml
···
7
7
runs-on: ubuntu-latest
8
8
outputs:
9
9
number: ${{ steps.find-pr.outputs.number }}
10
+
permissions: {}
10
11
if: ${{ github.actor != 'dependabot[bot]' }}
11
12
steps:
12
-
- uses: jwalton/gh-find-current-pr@master
13
+
- uses: jwalton/gh-find-current-pr@89ee5799558265a1e0e31fab792ebb4ee91c016b
13
14
id: find-pr
14
15
with:
15
16
state: all
16
17
base-ref:
17
18
runs-on: ubuntu-latest
19
+
permissions: {}
18
20
needs: get-pr
19
21
outputs:
20
22
base-ref: ${{ steps.base-ref.outputs.base-ref }}
···
24
26
- name: Locate Base Ref
25
27
id: base-ref
26
28
run: |
27
-
echo "base-ref=$(gh api /repos/${{ github.repository }}/pulls/${{ needs.get-pr.outputs.number }} | jq -r '.base.ref')" >> "$GITHUB_OUTPUT"
29
+
echo "base-ref=$(gh api /repos/${{ github.repository }}/pulls/${NEEDS_GET_PR_OUTPUTS_NUMBER} | jq -r '.base.ref')" >> "$GITHUB_OUTPUT"
28
30
env:
29
31
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
32
+
NEEDS_GET_PR_OUTPUTS_NUMBER: ${{ needs.get-pr.outputs.number }}
30
33
eval-base:
31
34
runs-on: ubuntu-latest
35
+
permissions: {}
32
36
needs: base-ref
33
37
outputs:
34
38
drv: ${{ steps.drv.outputs.drv }}
35
39
steps:
36
-
- uses: actions/checkout@v4
40
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
37
41
with:
38
42
ref: ${{ needs.base-ref.outputs.base-ref }}
43
+
persist-credentials: false
39
44
- uses: ./.github/actions/setup-nix
40
-
with:
41
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
42
45
- id: drv
43
46
run: echo "drv=$(nix eval .#docs --json)" >> "$GITHUB_OUTPUT"
44
47
eval-head:
45
48
runs-on: ubuntu-latest
49
+
permissions: {}
46
50
needs: get-pr
47
51
outputs:
48
52
drv: ${{ steps.drv.outputs.drv }}
49
53
if: ${{ needs.get-pr.outputs.number != '' }}
50
54
steps:
51
-
- uses: actions/checkout@v4
55
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
56
+
with:
57
+
persist-credentials: false
52
58
- uses: ./.github/actions/setup-nix
53
-
with:
54
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
55
59
- id: drv
56
60
run: echo "drv=$(nix eval .#docs --json)" >> "$GITHUB_OUTPUT"
57
61
deploy:
58
62
runs-on: ubuntu-latest
63
+
permissions: {}
59
64
needs:
60
65
- eval-head
61
66
- eval-base
···
66
71
# skip if nothing changed in the docs package
67
72
if: needs.eval-head.outputs.drv != needs.eval-base.outputs.drv
68
73
steps:
69
-
- uses: actions/checkout@v4
70
-
- uses: ./.github/actions/setup-nix
74
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
71
75
with:
72
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
76
+
persist-credentials: false
77
+
- uses: ./.github/actions/setup-nix
73
78
- run: nix build .#docs
74
79
- name: Deploy to Cloudflare Pages
75
80
id: deployment
76
-
uses: cloudflare/wrangler-action@v3
81
+
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65
77
82
with:
78
83
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
79
84
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
+14
-65
.github/workflows/test.yml
+14
-65
.github/workflows/test.yml
···
7
7
pre-job:
8
8
continue-on-error: true
9
9
runs-on: ubuntu-latest
10
+
permissions: {}
10
11
outputs:
11
12
should_skip: ${{ steps.skip_check.outputs.should_skip }}
12
13
steps:
13
14
- id: skip_check
14
-
uses: fkirc/skip-duplicate-actions@v5
15
+
uses: fkirc/skip-duplicate-actions@04a1aebece824b56e6ad6a401d015479cd1c50b3
15
16
with:
16
17
concurrent_skipping: "same_content_newer"
17
18
cancel_others: "true"
18
-
pre-commit:
19
+
test:
19
20
runs-on: ubuntu-latest
20
21
needs: pre-job
22
+
permissions:
23
+
contents: read
21
24
if: needs.pre-job.outputs.should_skip != 'true'
22
25
steps:
23
-
- uses: actions/checkout@v4
24
-
- uses: ./.github/actions/setup-nix
26
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
25
27
with:
26
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
27
-
- name: Build
28
-
run: nix develop --print-build-logs -v --command pre-commit run --all-files
29
-
nextest:
30
-
runs-on: ubuntu-latest
31
-
needs: pre-job
32
-
if: needs.pre-job.outputs.should_skip != 'true'
33
-
steps:
34
-
- uses: actions/checkout@v4
28
+
persist-credentials: false
35
29
- uses: ./.github/actions/setup-nix
36
-
with:
37
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
38
-
- uses: actions/cache@v4
39
-
with:
40
-
path: |
41
-
~/.cargo/registry
42
-
~/.cargo/git
43
-
target
44
-
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
45
-
- name: Nextest
46
-
run: nix develop --print-build-logs -v --command cargo nextest run
47
-
find-vm-tests:
48
-
runs-on: ubuntu-latest
49
-
needs: pre-job
50
-
if: needs.pre-job.outputs.should_skip != 'true'
51
-
outputs:
52
-
tests: ${{ steps.tests.outputs.tests }}
53
-
steps:
54
-
- uses: actions/checkout@v4
55
-
- name: Cache Tests
56
-
id: cache
57
-
uses: actions/cache@v4
58
-
with:
59
-
key: ${{ runner.os }}-tests-${{ hashFiles('tests/nix/**', 'wire/**', 'runtime/**', 'flake.*') }}
60
-
path: tests.json
61
-
- uses: ./.github/actions/setup-nix
62
-
with:
63
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
64
-
- name: find tests
65
-
if: steps.cache.outputs.cache-hit != 'true'
30
+
- name: Build Tests
31
+
run: nix build .#cargo-tests -L -vv
32
+
- name: Cargo Tests
66
33
run: |
67
-
nix eval --impure --json --expr \
68
-
'with builtins; filter ((import <nixpkgs>{}).lib.hasPrefix "vm-") (attrNames (getFlake "${{ github.workspace }}").checks.x86_64-linux)' \
69
-
> tests.json
70
-
- name: set output
71
-
id: tests
72
-
run: |
73
-
echo "tests=$(cat tests.json)" >> "$GITHUB_OUTPUT"
74
-
vm-tests:
75
-
runs-on: ubuntu-latest
76
-
needs: find-vm-tests
77
-
strategy:
78
-
matrix:
79
-
test: ${{ fromJSON(needs.find-vm-tests.outputs.tests) }}
80
-
steps:
81
-
- uses: actions/checkout@v4
82
-
- uses: ./.github/actions/setup-nix
83
-
with:
84
-
withQEMU: true
85
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
86
-
- name: Build
87
-
timeout-minutes: 60
88
-
run: nix build .#checks.x86_64-linux.${{ matrix.test }} --print-build-logs
34
+
nix develop \
35
+
--print-build-logs \
36
+
-v \
37
+
--command result/bin/run-tests
+9
.gitignore
+9
.gitignore
+12
.sqlx/query-1ab95659223cbc7e012f538080c4be8b4774a0354348e6de53e6ea7aadfe8819.json
+12
.sqlx/query-1ab95659223cbc7e012f538080c4be8b4774a0354348e6de53e6ea7aadfe8819.json
···
1
+
{
2
+
"db_name": "SQLite",
3
+
"query": "delete from inspection_cache\nwhere\n blob_id in (\n select\n id\n from\n inspection_blobs\n where\n schema_version != $1\n )\n or ROWID in (\n select\n ROWID\n from\n inspection_cache\n order by\n ROWID desc\n limit\n -1\n offset\n 30\n )",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Right": 1
8
+
},
9
+
"nullable": []
10
+
},
11
+
"hash": "1ab95659223cbc7e012f538080c4be8b4774a0354348e6de53e6ea7aadfe8819"
12
+
}
+12
.sqlx/query-61b79ce83349770c0b4e474471cdee067214b88444cb68d6d3560f4be835b3a8.json
+12
.sqlx/query-61b79ce83349770c0b4e474471cdee067214b88444cb68d6d3560f4be835b3a8.json
···
1
+
{
2
+
"db_name": "SQLite",
3
+
"query": "\n insert into\n inspection_cache (store_path, hash, blob_id)\n values\n ($1, $2, $3)\n ",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Right": 3
8
+
},
9
+
"nullable": []
10
+
},
11
+
"hash": "61b79ce83349770c0b4e474471cdee067214b88444cb68d6d3560f4be835b3a8"
12
+
}
+26
.sqlx/query-810100e8d8c033a0d0b77fe8e90a212cd2c1e8718ee347fba3076bcd10f19198.json
+26
.sqlx/query-810100e8d8c033a0d0b77fe8e90a212cd2c1e8718ee347fba3076bcd10f19198.json
···
1
+
{
2
+
"db_name": "SQLite",
3
+
"query": "\n select\n inspection_blobs.json_value,\n inspection_cache.store_path\n from\n inspection_blobs\n join inspection_cache on inspection_cache.blob_id = inspection_blobs.id\n where\n inspection_cache.store_path = $1\n and inspection_cache.hash = $2\n and inspection_blobs.schema_version = $3\n limit\n 1\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"name": "json_value",
8
+
"ordinal": 0,
9
+
"type_info": "Blob"
10
+
},
11
+
{
12
+
"name": "store_path",
13
+
"ordinal": 1,
14
+
"type_info": "Text"
15
+
}
16
+
],
17
+
"parameters": {
18
+
"Right": 3
19
+
},
20
+
"nullable": [
21
+
false,
22
+
false
23
+
]
24
+
},
25
+
"hash": "810100e8d8c033a0d0b77fe8e90a212cd2c1e8718ee347fba3076bcd10f19198"
26
+
}
+12
.sqlx/query-a5459b4c5f879509d29c3b6dcf85d1da89e4bd9380b8763edc5d16cbaa302d3f.json
+12
.sqlx/query-a5459b4c5f879509d29c3b6dcf85d1da89e4bd9380b8763edc5d16cbaa302d3f.json
···
1
+
{
2
+
"db_name": "SQLite",
3
+
"query": "delete from inspection_blobs\nwhere\n not exists (\n select\n 1\n from\n inspection_cache\n where\n inspection_cache.blob_id = inspection_blobs.id\n )",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Right": 0
8
+
},
9
+
"nullable": []
10
+
},
11
+
"hash": "a5459b4c5f879509d29c3b6dcf85d1da89e4bd9380b8763edc5d16cbaa302d3f"
12
+
}
+20
.sqlx/query-cdea7b20c482f4127bacb5c58755d4fdd4dca2066b9c06950be60bc790569335.json
+20
.sqlx/query-cdea7b20c482f4127bacb5c58755d4fdd4dca2066b9c06950be60bc790569335.json
···
1
+
{
2
+
"db_name": "SQLite",
3
+
"query": "\n insert into inspection_blobs (json_value, schema_version)\n values ($1, $2)\n on conflict(json_value)\n do update set json_value = excluded.json_value\n returning inspection_blobs.id\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"name": "id",
8
+
"ordinal": 0,
9
+
"type_info": "Integer"
10
+
}
11
+
],
12
+
"parameters": {
13
+
"Right": 2
14
+
},
15
+
"nullable": [
16
+
false
17
+
]
18
+
},
19
+
"hash": "cdea7b20c482f4127bacb5c58755d4fdd4dca2066b9c06950be60bc790569335"
20
+
}
+154
-3
CHANGELOG.md
+154
-3
CHANGELOG.md
···
7
7
8
8
## [Unreleased] - yyyy-mm-dd
9
9
10
+
## [v1.1.1] - 2025-01-05
11
+
12
+
### Fixed
13
+
14
+
- Fix a bug where wire was attempting to SSH to the local machine when `buildOnTarget` &
15
+
`allowLocalDeployment` where true.
16
+
17
+
## [v1.1.0] - 2025-12-31
18
+
19
+
### Added
20
+
21
+
- Add a `--substitute-on-destination` argument.
22
+
- Add the `meta.nodeSpecialArgs` meta option.
23
+
- Add `wire build`, a new command to build nodes offline.
24
+
It is distinct from `wire apply build`, as it will not ping
25
+
or push the result, making it useful for CI.
26
+
27
+
### Changed
28
+
29
+
- Build store paths will be output to stdout
30
+
31
+
### Fixed
32
+
33
+
- Fix invalidated caches not actually returning `None`.
34
+
35
+
## [v1.0.0] - 2025-12-17
36
+
37
+
### Added
38
+
39
+
- SIGINT signal handling.
40
+
41
+
### Changed
42
+
43
+
- Invalidate caches that reference garbage collected paths.
44
+
45
+
### Fixed
46
+
47
+
- Fix key filtering logic.
48
+
49
+
## [v1.0.0-beta.0] - 2025-12-02
50
+
51
+
### Added
52
+
53
+
- Implement `meta.nodeNixpkgs`.
54
+
- Add caching of hive evaluation for flakes.
55
+
56
+
### Changed
57
+
58
+
- Run tests against 25.11.
59
+
60
+
## [v1.0.0-alpha.1] - 2025-11-24
61
+
62
+
### Added
63
+
64
+
- Add `--handle-unreachable`. You can use `--handle-unreachable ignore` to
65
+
ignore unreachable nodes in the status of the deployment.
66
+
- Add a basic progress bar.
67
+
68
+
### Changed
69
+
70
+
- Revert "Wire will now attempt to use SSH ControlMaster by default.".
71
+
- Change the `show` subcommand to look nicer now.
72
+
- Change the `build` step to always build remotely when the node is
73
+
going to be applied locally.
74
+
75
+
## [v1.0.0-alpha.0] - 2025-10-22
76
+
77
+
### Added
78
+
79
+
- Add `--ssh-accept-host` argument.
80
+
- Add `--on -` syntax to the `--on` argument.
81
+
Passing `-` will now read additional apply targets from stdin.
82
+
- Add `{key.name}-key.{path,service}` systemd units.
83
+
- Added `--flake` argument as an alias for `--path`.
84
+
- A terminal bell will be output if a sudo / ssh prompt is ever printed.
85
+
- Added a real tutorial, and separated many how-to guides.
86
+
The tutorial leads the user through creating and deploying a wire Hive.
87
+
- Add `config.nixpkgs.flake.source` by default if `meta.nixpkgs` ends
88
+
with `-source` at priority 1000 (default).
89
+
90
+
### Fixed
91
+
92
+
- Fix bug where `--non-interactive` was inversed.
93
+
- Fix a bug where `./result` links where being created.
94
+
- Fix passing `sources.nixpkgs` directly from npins to `meta.nixpkgs`.
95
+
- Fix nodes that will be applied locally running the `push` and `cleanup`
96
+
steps.
97
+
98
+
### Changed
99
+
100
+
- Improve logging from interactive commands (absence of `--non-interactive`).
101
+
- Changed `--path` argument to support flakerefs (`github:foo/bar`,
102
+
`git+file:///...`, `https://.../main.tar.gz`, etc).
103
+
- Changed SSH arguments to use ControlMaster by default.
104
+
- Compile-out logs with level `tracing_level::TRACE` in release builds.
105
+
- Improve aata integrity of keys.
106
+
- Unknown SSH keys will be immediately rejected unless `--ssh-accept-host` is passed.
107
+
- Changed evaluation to be ran in parallel with other steps until
108
+
the .drv is required.
109
+
110
+
## [0.5.0] - 2025-09-18
111
+
112
+
### Added
113
+
114
+
- Added `--reboot`. wire will wait for the node to reconnect after rebooting.
115
+
wire will refuse to reboot localhost. Keys post-activation will be applied
116
+
after rebooting!
117
+
- Most errors now have error codes and documentation links.
118
+
- Added the global flag `--non-interactive`.
119
+
- wire now creates its own PTY to interface with openssh's PTY to allow for
120
+
interactive sudo authentication on both remote and local targets.
121
+
122
+
Using a wheel user as `deployment.target.user` is no longer necessary
123
+
(if you like entering your password a lot).
124
+
125
+
A non-wheel user combined with `--non-interactive` will likely fail.
126
+
127
+
- Added `deployment.keys.environment` to give key commands environment variables.
128
+
129
+
### Changed
130
+
131
+
- `wire inspect/show --json` will no longer use a pretty print.
132
+
- wire will now wait for the node to reconnect if activation failed (excluding
133
+
dry-activate).
134
+
- Nix logs with the `Talkative` and `Chatty` level have been moved to
135
+
`tracing_level::TRACE`.
136
+
- Error messages have been greatly improved.
137
+
138
+
### Fixed
139
+
140
+
- Some bugs to do with step execution were fixed.
141
+
142
+
## [0.4.0] - 2025-07-10
143
+
144
+
### Added
145
+
146
+
- Nodes may now fail without stopping the entire hive from continuing. A summary
147
+
of errors will be presented at the end of the apply process.
148
+
- wire will now ping the node before it proceeds executing.
149
+
- wire will now properly respect `deployment.target.hosts`.
150
+
- wire will now attempt each target host in order until a valid one is found.
151
+
152
+
### Changed
153
+
154
+
- wire now directly evaluates your hive instead of shipping extra nix code along with its binary.
155
+
You must now use `outputs.makeHive { ... }` instead of a raw attribute.
156
+
This can be obtained with npins or a flake input.
157
+
- The expected flake output name has changed from `outputs.colmena` to `outputs.wire`.
158
+
159
+
## [0.3.0] - 2025-06-20
160
+
10
161
### Added
11
162
12
163
- Run tests against `unstable` and `25.05` by @mrshmllow in https://github.com/wires-org/wire/pull/176.
···
14
165
### Changed
15
166
16
167
- Dependency Updates.
17
-
- Wire now compiles and includes key agents for multiple architectures, currently only linux.
168
+
- wire now compiles and includes key agents for multiple architectures, currently only linux.
18
169
- There is a new package output, `wire-small`, for testing purposes.
19
170
It only compiles the key agent for the host that builds `wire-small`.
20
171
- `--no-progress` now defaults to true if stdin does not refer to a tty (unix pipelines, in CI).
21
-
- Added an error for the internal hive evluation parse failure.
172
+
- Added an error for the internal hive evaluation parse failure.
22
173
- The `inspect` command now has `show` as an alias.
23
174
- Remove `log` command as there are currently no plans to implement the feature
24
175
- The `completions` command is now hidden from the help page
25
176
26
177
### Fixed
27
178
28
-
- A non-existant key owner user/group would not default to gid/uid `0`.
179
+
- A non-existent key owner user/group would not default to gid/uid `0`.
29
180
- Keys can now be deployed to localhost.
30
181
31
182
## [0.2.0] - 2025-04-21
+661
COPYING
+661
COPYING
···
1
+
GNU AFFERO GENERAL PUBLIC LICENSE
2
+
Version 3, 19 November 2007
3
+
4
+
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
5
+
Everyone is permitted to copy and distribute verbatim copies
6
+
of this license document, but changing it is not allowed.
7
+
8
+
Preamble
9
+
10
+
The GNU Affero General Public License is a free, copyleft license for
11
+
software and other kinds of works, specifically designed to ensure
12
+
cooperation with the community in the case of network server software.
13
+
14
+
The licenses for most software and other practical works are designed
15
+
to take away your freedom to share and change the works. By contrast,
16
+
our General Public Licenses are intended to guarantee your freedom to
17
+
share and change all versions of a program--to make sure it remains free
18
+
software for all its users.
19
+
20
+
When we speak of free software, we are referring to freedom, not
21
+
price. Our General Public Licenses are designed to make sure that you
22
+
have the freedom to distribute copies of free software (and charge for
23
+
them if you wish), that you receive source code or can get it if you
24
+
want it, that you can change the software or use pieces of it in new
25
+
free programs, and that you know you can do these things.
26
+
27
+
Developers that use our General Public Licenses protect your rights
28
+
with two steps: (1) assert copyright on the software, and (2) offer
29
+
you this License which gives you legal permission to copy, distribute
30
+
and/or modify the software.
31
+
32
+
A secondary benefit of defending all users' freedom is that
33
+
improvements made in alternate versions of the program, if they
34
+
receive widespread use, become available for other developers to
35
+
incorporate. Many developers of free software are heartened and
36
+
encouraged by the resulting cooperation. However, in the case of
37
+
software used on network servers, this result may fail to come about.
38
+
The GNU General Public License permits making a modified version and
39
+
letting the public access it on a server without ever releasing its
40
+
source code to the public.
41
+
42
+
The GNU Affero General Public License is designed specifically to
43
+
ensure that, in such cases, the modified source code becomes available
44
+
to the community. It requires the operator of a network server to
45
+
provide the source code of the modified version running there to the
46
+
users of that server. Therefore, public use of a modified version, on
47
+
a publicly accessible server, gives the public access to the source
48
+
code of the modified version.
49
+
50
+
An older license, called the Affero General Public License and
51
+
published by Affero, was designed to accomplish similar goals. This is
52
+
a different license, not a version of the Affero GPL, but Affero has
53
+
released a new version of the Affero GPL which permits relicensing under
54
+
this license.
55
+
56
+
The precise terms and conditions for copying, distribution and
57
+
modification follow.
58
+
59
+
TERMS AND CONDITIONS
60
+
61
+
0. Definitions.
62
+
63
+
"This License" refers to version 3 of the GNU Affero General Public License.
64
+
65
+
"Copyright" also means copyright-like laws that apply to other kinds of
66
+
works, such as semiconductor masks.
67
+
68
+
"The Program" refers to any copyrightable work licensed under this
69
+
License. Each licensee is addressed as "you". "Licensees" and
70
+
"recipients" may be individuals or organizations.
71
+
72
+
To "modify" a work means to copy from or adapt all or part of the work
73
+
in a fashion requiring copyright permission, other than the making of an
74
+
exact copy. The resulting work is called a "modified version" of the
75
+
earlier work or a work "based on" the earlier work.
76
+
77
+
A "covered work" means either the unmodified Program or a work based
78
+
on the Program.
79
+
80
+
To "propagate" a work means to do anything with it that, without
81
+
permission, would make you directly or secondarily liable for
82
+
infringement under applicable copyright law, except executing it on a
83
+
computer or modifying a private copy. Propagation includes copying,
84
+
distribution (with or without modification), making available to the
85
+
public, and in some countries other activities as well.
86
+
87
+
To "convey" a work means any kind of propagation that enables other
88
+
parties to make or receive copies. Mere interaction with a user through
89
+
a computer network, with no transfer of a copy, is not conveying.
90
+
91
+
An interactive user interface displays "Appropriate Legal Notices"
92
+
to the extent that it includes a convenient and prominently visible
93
+
feature that (1) displays an appropriate copyright notice, and (2)
94
+
tells the user that there is no warranty for the work (except to the
95
+
extent that warranties are provided), that licensees may convey the
96
+
work under this License, and how to view a copy of this License. If
97
+
the interface presents a list of user commands or options, such as a
98
+
menu, a prominent item in the list meets this criterion.
99
+
100
+
1. Source Code.
101
+
102
+
The "source code" for a work means the preferred form of the work
103
+
for making modifications to it. "Object code" means any non-source
104
+
form of a work.
105
+
106
+
A "Standard Interface" means an interface that either is an official
107
+
standard defined by a recognized standards body, or, in the case of
108
+
interfaces specified for a particular programming language, one that
109
+
is widely used among developers working in that language.
110
+
111
+
The "System Libraries" of an executable work include anything, other
112
+
than the work as a whole, that (a) is included in the normal form of
113
+
packaging a Major Component, but which is not part of that Major
114
+
Component, and (b) serves only to enable use of the work with that
115
+
Major Component, or to implement a Standard Interface for which an
116
+
implementation is available to the public in source code form. A
117
+
"Major Component", in this context, means a major essential component
118
+
(kernel, window system, and so on) of the specific operating system
119
+
(if any) on which the executable work runs, or a compiler used to
120
+
produce the work, or an object code interpreter used to run it.
121
+
122
+
The "Corresponding Source" for a work in object code form means all
123
+
the source code needed to generate, install, and (for an executable
124
+
work) run the object code and to modify the work, including scripts to
125
+
control those activities. However, it does not include the work's
126
+
System Libraries, or general-purpose tools or generally available free
127
+
programs which are used unmodified in performing those activities but
128
+
which are not part of the work. For example, Corresponding Source
129
+
includes interface definition files associated with source files for
130
+
the work, and the source code for shared libraries and dynamically
131
+
linked subprograms that the work is specifically designed to require,
132
+
such as by intimate data communication or control flow between those
133
+
subprograms and other parts of the work.
134
+
135
+
The Corresponding Source need not include anything that users
136
+
can regenerate automatically from other parts of the Corresponding
137
+
Source.
138
+
139
+
The Corresponding Source for a work in source code form is that
140
+
same work.
141
+
142
+
2. Basic Permissions.
143
+
144
+
All rights granted under this License are granted for the term of
145
+
copyright on the Program, and are irrevocable provided the stated
146
+
conditions are met. This License explicitly affirms your unlimited
147
+
permission to run the unmodified Program. The output from running a
148
+
covered work is covered by this License only if the output, given its
149
+
content, constitutes a covered work. This License acknowledges your
150
+
rights of fair use or other equivalent, as provided by copyright law.
151
+
152
+
You may make, run and propagate covered works that you do not
153
+
convey, without conditions so long as your license otherwise remains
154
+
in force. You may convey covered works to others for the sole purpose
155
+
of having them make modifications exclusively for you, or provide you
156
+
with facilities for running those works, provided that you comply with
157
+
the terms of this License in conveying all material for which you do
158
+
not control copyright. Those thus making or running the covered works
159
+
for you must do so exclusively on your behalf, under your direction
160
+
and control, on terms that prohibit them from making any copies of
161
+
your copyrighted material outside their relationship with you.
162
+
163
+
Conveying under any other circumstances is permitted solely under
164
+
the conditions stated below. Sublicensing is not allowed; section 10
165
+
makes it unnecessary.
166
+
167
+
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168
+
169
+
No covered work shall be deemed part of an effective technological
170
+
measure under any applicable law fulfilling obligations under article
171
+
11 of the WIPO copyright treaty adopted on 20 December 1996, or
172
+
similar laws prohibiting or restricting circumvention of such
173
+
measures.
174
+
175
+
When you convey a covered work, you waive any legal power to forbid
176
+
circumvention of technological measures to the extent such circumvention
177
+
is effected by exercising rights under this License with respect to
178
+
the covered work, and you disclaim any intention to limit operation or
179
+
modification of the work as a means of enforcing, against the work's
180
+
users, your or third parties' legal rights to forbid circumvention of
181
+
technological measures.
182
+
183
+
4. Conveying Verbatim Copies.
184
+
185
+
You may convey verbatim copies of the Program's source code as you
186
+
receive it, in any medium, provided that you conspicuously and
187
+
appropriately publish on each copy an appropriate copyright notice;
188
+
keep intact all notices stating that this License and any
189
+
non-permissive terms added in accord with section 7 apply to the code;
190
+
keep intact all notices of the absence of any warranty; and give all
191
+
recipients a copy of this License along with the Program.
192
+
193
+
You may charge any price or no price for each copy that you convey,
194
+
and you may offer support or warranty protection for a fee.
195
+
196
+
5. Conveying Modified Source Versions.
197
+
198
+
You may convey a work based on the Program, or the modifications to
199
+
produce it from the Program, in the form of source code under the
200
+
terms of section 4, provided that you also meet all of these conditions:
201
+
202
+
a) The work must carry prominent notices stating that you modified
203
+
it, and giving a relevant date.
204
+
205
+
b) The work must carry prominent notices stating that it is
206
+
released under this License and any conditions added under section
207
+
7. This requirement modifies the requirement in section 4 to
208
+
"keep intact all notices".
209
+
210
+
c) You must license the entire work, as a whole, under this
211
+
License to anyone who comes into possession of a copy. This
212
+
License will therefore apply, along with any applicable section 7
213
+
additional terms, to the whole of the work, and all its parts,
214
+
regardless of how they are packaged. This License gives no
215
+
permission to license the work in any other way, but it does not
216
+
invalidate such permission if you have separately received it.
217
+
218
+
d) If the work has interactive user interfaces, each must display
219
+
Appropriate Legal Notices; however, if the Program has interactive
220
+
interfaces that do not display Appropriate Legal Notices, your
221
+
work need not make them do so.
222
+
223
+
A compilation of a covered work with other separate and independent
224
+
works, which are not by their nature extensions of the covered work,
225
+
and which are not combined with it such as to form a larger program,
226
+
in or on a volume of a storage or distribution medium, is called an
227
+
"aggregate" if the compilation and its resulting copyright are not
228
+
used to limit the access or legal rights of the compilation's users
229
+
beyond what the individual works permit. Inclusion of a covered work
230
+
in an aggregate does not cause this License to apply to the other
231
+
parts of the aggregate.
232
+
233
+
6. Conveying Non-Source Forms.
234
+
235
+
You may convey a covered work in object code form under the terms
236
+
of sections 4 and 5, provided that you also convey the
237
+
machine-readable Corresponding Source under the terms of this License,
238
+
in one of these ways:
239
+
240
+
a) Convey the object code in, or embodied in, a physical product
241
+
(including a physical distribution medium), accompanied by the
242
+
Corresponding Source fixed on a durable physical medium
243
+
customarily used for software interchange.
244
+
245
+
b) Convey the object code in, or embodied in, a physical product
246
+
(including a physical distribution medium), accompanied by a
247
+
written offer, valid for at least three years and valid for as
248
+
long as you offer spare parts or customer support for that product
249
+
model, to give anyone who possesses the object code either (1) a
250
+
copy of the Corresponding Source for all the software in the
251
+
product that is covered by this License, on a durable physical
252
+
medium customarily used for software interchange, for a price no
253
+
more than your reasonable cost of physically performing this
254
+
conveying of source, or (2) access to copy the
255
+
Corresponding Source from a network server at no charge.
256
+
257
+
c) Convey individual copies of the object code with a copy of the
258
+
written offer to provide the Corresponding Source. This
259
+
alternative is allowed only occasionally and noncommercially, and
260
+
only if you received the object code with such an offer, in accord
261
+
with subsection 6b.
262
+
263
+
d) Convey the object code by offering access from a designated
264
+
place (gratis or for a charge), and offer equivalent access to the
265
+
Corresponding Source in the same way through the same place at no
266
+
further charge. You need not require recipients to copy the
267
+
Corresponding Source along with the object code. If the place to
268
+
copy the object code is a network server, the Corresponding Source
269
+
may be on a different server (operated by you or a third party)
270
+
that supports equivalent copying facilities, provided you maintain
271
+
clear directions next to the object code saying where to find the
272
+
Corresponding Source. Regardless of what server hosts the
273
+
Corresponding Source, you remain obligated to ensure that it is
274
+
available for as long as needed to satisfy these requirements.
275
+
276
+
e) Convey the object code using peer-to-peer transmission, provided
277
+
you inform other peers where the object code and Corresponding
278
+
Source of the work are being offered to the general public at no
279
+
charge under subsection 6d.
280
+
281
+
A separable portion of the object code, whose source code is excluded
282
+
from the Corresponding Source as a System Library, need not be
283
+
included in conveying the object code work.
284
+
285
+
A "User Product" is either (1) a "consumer product", which means any
286
+
tangible personal property which is normally used for personal, family,
287
+
or household purposes, or (2) anything designed or sold for incorporation
288
+
into a dwelling. In determining whether a product is a consumer product,
289
+
doubtful cases shall be resolved in favor of coverage. For a particular
290
+
product received by a particular user, "normally used" refers to a
291
+
typical or common use of that class of product, regardless of the status
292
+
of the particular user or of the way in which the particular user
293
+
actually uses, or expects or is expected to use, the product. A product
294
+
is a consumer product regardless of whether the product has substantial
295
+
commercial, industrial or non-consumer uses, unless such uses represent
296
+
the only significant mode of use of the product.
297
+
298
+
"Installation Information" for a User Product means any methods,
299
+
procedures, authorization keys, or other information required to install
300
+
and execute modified versions of a covered work in that User Product from
301
+
a modified version of its Corresponding Source. The information must
302
+
suffice to ensure that the continued functioning of the modified object
303
+
code is in no case prevented or interfered with solely because
304
+
modification has been made.
305
+
306
+
If you convey an object code work under this section in, or with, or
307
+
specifically for use in, a User Product, and the conveying occurs as
308
+
part of a transaction in which the right of possession and use of the
309
+
User Product is transferred to the recipient in perpetuity or for a
310
+
fixed term (regardless of how the transaction is characterized), the
311
+
Corresponding Source conveyed under this section must be accompanied
312
+
by the Installation Information. But this requirement does not apply
313
+
if neither you nor any third party retains the ability to install
314
+
modified object code on the User Product (for example, the work has
315
+
been installed in ROM).
316
+
317
+
The requirement to provide Installation Information does not include a
318
+
requirement to continue to provide support service, warranty, or updates
319
+
for a work that has been modified or installed by the recipient, or for
320
+
the User Product in which it has been modified or installed. Access to a
321
+
network may be denied when the modification itself materially and
322
+
adversely affects the operation of the network or violates the rules and
323
+
protocols for communication across the network.
324
+
325
+
Corresponding Source conveyed, and Installation Information provided,
326
+
in accord with this section must be in a format that is publicly
327
+
documented (and with an implementation available to the public in
328
+
source code form), and must require no special password or key for
329
+
unpacking, reading or copying.
330
+
331
+
7. Additional Terms.
332
+
333
+
"Additional permissions" are terms that supplement the terms of this
334
+
License by making exceptions from one or more of its conditions.
335
+
Additional permissions that are applicable to the entire Program shall
336
+
be treated as though they were included in this License, to the extent
337
+
that they are valid under applicable law. If additional permissions
338
+
apply only to part of the Program, that part may be used separately
339
+
under those permissions, but the entire Program remains governed by
340
+
this License without regard to the additional permissions.
341
+
342
+
When you convey a copy of a covered work, you may at your option
343
+
remove any additional permissions from that copy, or from any part of
344
+
it. (Additional permissions may be written to require their own
345
+
removal in certain cases when you modify the work.) You may place
346
+
additional permissions on material, added by you to a covered work,
347
+
for which you have or can give appropriate copyright permission.
348
+
349
+
Notwithstanding any other provision of this License, for material you
350
+
add to a covered work, you may (if authorized by the copyright holders of
351
+
that material) supplement the terms of this License with terms:
352
+
353
+
a) Disclaiming warranty or limiting liability differently from the
354
+
terms of sections 15 and 16 of this License; or
355
+
356
+
b) Requiring preservation of specified reasonable legal notices or
357
+
author attributions in that material or in the Appropriate Legal
358
+
Notices displayed by works containing it; or
359
+
360
+
c) Prohibiting misrepresentation of the origin of that material, or
361
+
requiring that modified versions of such material be marked in
362
+
reasonable ways as different from the original version; or
363
+
364
+
d) Limiting the use for publicity purposes of names of licensors or
365
+
authors of the material; or
366
+
367
+
e) Declining to grant rights under trademark law for use of some
368
+
trade names, trademarks, or service marks; or
369
+
370
+
f) Requiring indemnification of licensors and authors of that
371
+
material by anyone who conveys the material (or modified versions of
372
+
it) with contractual assumptions of liability to the recipient, for
373
+
any liability that these contractual assumptions directly impose on
374
+
those licensors and authors.
375
+
376
+
All other non-permissive additional terms are considered "further
377
+
restrictions" within the meaning of section 10. If the Program as you
378
+
received it, or any part of it, contains a notice stating that it is
379
+
governed by this License along with a term that is a further
380
+
restriction, you may remove that term. If a license document contains
381
+
a further restriction but permits relicensing or conveying under this
382
+
License, you may add to a covered work material governed by the terms
383
+
of that license document, provided that the further restriction does
384
+
not survive such relicensing or conveying.
385
+
386
+
If you add terms to a covered work in accord with this section, you
387
+
must place, in the relevant source files, a statement of the
388
+
additional terms that apply to those files, or a notice indicating
389
+
where to find the applicable terms.
390
+
391
+
Additional terms, permissive or non-permissive, may be stated in the
392
+
form of a separately written license, or stated as exceptions;
393
+
the above requirements apply either way.
394
+
395
+
8. Termination.
396
+
397
+
You may not propagate or modify a covered work except as expressly
398
+
provided under this License. Any attempt otherwise to propagate or
399
+
modify it is void, and will automatically terminate your rights under
400
+
this License (including any patent licenses granted under the third
401
+
paragraph of section 11).
402
+
403
+
However, if you cease all violation of this License, then your
404
+
license from a particular copyright holder is reinstated (a)
405
+
provisionally, unless and until the copyright holder explicitly and
406
+
finally terminates your license, and (b) permanently, if the copyright
407
+
holder fails to notify you of the violation by some reasonable means
408
+
prior to 60 days after the cessation.
409
+
410
+
Moreover, your license from a particular copyright holder is
411
+
reinstated permanently if the copyright holder notifies you of the
412
+
violation by some reasonable means, this is the first time you have
413
+
received notice of violation of this License (for any work) from that
414
+
copyright holder, and you cure the violation prior to 30 days after
415
+
your receipt of the notice.
416
+
417
+
Termination of your rights under this section does not terminate the
418
+
licenses of parties who have received copies or rights from you under
419
+
this License. If your rights have been terminated and not permanently
420
+
reinstated, you do not qualify to receive new licenses for the same
421
+
material under section 10.
422
+
423
+
9. Acceptance Not Required for Having Copies.
424
+
425
+
You are not required to accept this License in order to receive or
426
+
run a copy of the Program. Ancillary propagation of a covered work
427
+
occurring solely as a consequence of using peer-to-peer transmission
428
+
to receive a copy likewise does not require acceptance. However,
429
+
nothing other than this License grants you permission to propagate or
430
+
modify any covered work. These actions infringe copyright if you do
431
+
not accept this License. Therefore, by modifying or propagating a
432
+
covered work, you indicate your acceptance of this License to do so.
433
+
434
+
10. Automatic Licensing of Downstream Recipients.
435
+
436
+
Each time you convey a covered work, the recipient automatically
437
+
receives a license from the original licensors, to run, modify and
438
+
propagate that work, subject to this License. You are not responsible
439
+
for enforcing compliance by third parties with this License.
440
+
441
+
An "entity transaction" is a transaction transferring control of an
442
+
organization, or substantially all assets of one, or subdividing an
443
+
organization, or merging organizations. If propagation of a covered
444
+
work results from an entity transaction, each party to that
445
+
transaction who receives a copy of the work also receives whatever
446
+
licenses to the work the party's predecessor in interest had or could
447
+
give under the previous paragraph, plus a right to possession of the
448
+
Corresponding Source of the work from the predecessor in interest, if
449
+
the predecessor has it or can get it with reasonable efforts.
450
+
451
+
You may not impose any further restrictions on the exercise of the
452
+
rights granted or affirmed under this License. For example, you may
453
+
not impose a license fee, royalty, or other charge for exercise of
454
+
rights granted under this License, and you may not initiate litigation
455
+
(including a cross-claim or counterclaim in a lawsuit) alleging that
456
+
any patent claim is infringed by making, using, selling, offering for
457
+
sale, or importing the Program or any portion of it.
458
+
459
+
11. Patents.
460
+
461
+
A "contributor" is a copyright holder who authorizes use under this
462
+
License of the Program or a work on which the Program is based. The
463
+
work thus licensed is called the contributor's "contributor version".
464
+
465
+
A contributor's "essential patent claims" are all patent claims
466
+
owned or controlled by the contributor, whether already acquired or
467
+
hereafter acquired, that would be infringed by some manner, permitted
468
+
by this License, of making, using, or selling its contributor version,
469
+
but do not include claims that would be infringed only as a
470
+
consequence of further modification of the contributor version. For
471
+
purposes of this definition, "control" includes the right to grant
472
+
patent sublicenses in a manner consistent with the requirements of
473
+
this License.
474
+
475
+
Each contributor grants you a non-exclusive, worldwide, royalty-free
476
+
patent license under the contributor's essential patent claims, to
477
+
make, use, sell, offer for sale, import and otherwise run, modify and
478
+
propagate the contents of its contributor version.
479
+
480
+
In the following three paragraphs, a "patent license" is any express
481
+
agreement or commitment, however denominated, not to enforce a patent
482
+
(such as an express permission to practice a patent or covenant not to
483
+
sue for patent infringement). To "grant" such a patent license to a
484
+
party means to make such an agreement or commitment not to enforce a
485
+
patent against the party.
486
+
487
+
If you convey a covered work, knowingly relying on a patent license,
488
+
and the Corresponding Source of the work is not available for anyone
489
+
to copy, free of charge and under the terms of this License, through a
490
+
publicly available network server or other readily accessible means,
491
+
then you must either (1) cause the Corresponding Source to be so
492
+
available, or (2) arrange to deprive yourself of the benefit of the
493
+
patent license for this particular work, or (3) arrange, in a manner
494
+
consistent with the requirements of this License, to extend the patent
495
+
license to downstream recipients. "Knowingly relying" means you have
496
+
actual knowledge that, but for the patent license, your conveying the
497
+
covered work in a country, or your recipient's use of the covered work
498
+
in a country, would infringe one or more identifiable patents in that
499
+
country that you have reason to believe are valid.
500
+
501
+
If, pursuant to or in connection with a single transaction or
502
+
arrangement, you convey, or propagate by procuring conveyance of, a
503
+
covered work, and grant a patent license to some of the parties
504
+
receiving the covered work authorizing them to use, propagate, modify
505
+
or convey a specific copy of the covered work, then the patent license
506
+
you grant is automatically extended to all recipients of the covered
507
+
work and works based on it.
508
+
509
+
A patent license is "discriminatory" if it does not include within
510
+
the scope of its coverage, prohibits the exercise of, or is
511
+
conditioned on the non-exercise of one or more of the rights that are
512
+
specifically granted under this License. You may not convey a covered
513
+
work if you are a party to an arrangement with a third party that is
514
+
in the business of distributing software, under which you make payment
515
+
to the third party based on the extent of your activity of conveying
516
+
the work, and under which the third party grants, to any of the
517
+
parties who would receive the covered work from you, a discriminatory
518
+
patent license (a) in connection with copies of the covered work
519
+
conveyed by you (or copies made from those copies), or (b) primarily
520
+
for and in connection with specific products or compilations that
521
+
contain the covered work, unless you entered into that arrangement,
522
+
or that patent license was granted, prior to 28 March 2007.
523
+
524
+
Nothing in this License shall be construed as excluding or limiting
525
+
any implied license or other defenses to infringement that may
526
+
otherwise be available to you under applicable patent law.
527
+
528
+
12. No Surrender of Others' Freedom.
529
+
530
+
If conditions are imposed on you (whether by court order, agreement or
531
+
otherwise) that contradict the conditions of this License, they do not
532
+
excuse you from the conditions of this License. If you cannot convey a
533
+
covered work so as to satisfy simultaneously your obligations under this
534
+
License and any other pertinent obligations, then as a consequence you may
535
+
not convey it at all. For example, if you agree to terms that obligate you
536
+
to collect a royalty for further conveying from those to whom you convey
537
+
the Program, the only way you could satisfy both those terms and this
538
+
License would be to refrain entirely from conveying the Program.
539
+
540
+
13. Remote Network Interaction; Use with the GNU General Public License.
541
+
542
+
Notwithstanding any other provision of this License, if you modify the
543
+
Program, your modified version must prominently offer all users
544
+
interacting with it remotely through a computer network (if your version
545
+
supports such interaction) an opportunity to receive the Corresponding
546
+
Source of your version by providing access to the Corresponding Source
547
+
from a network server at no charge, through some standard or customary
548
+
means of facilitating copying of software. This Corresponding Source
549
+
shall include the Corresponding Source for any work covered by version 3
550
+
of the GNU General Public License that is incorporated pursuant to the
551
+
following paragraph.
552
+
553
+
Notwithstanding any other provision of this License, you have
554
+
permission to link or combine any covered work with a work licensed
555
+
under version 3 of the GNU General Public License into a single
556
+
combined work, and to convey the resulting work. The terms of this
557
+
License will continue to apply to the part which is the covered work,
558
+
but the work with which it is combined will remain governed by version
559
+
3 of the GNU General Public License.
560
+
561
+
14. Revised Versions of this License.
562
+
563
+
The Free Software Foundation may publish revised and/or new versions of
564
+
the GNU Affero General Public License from time to time. Such new versions
565
+
will be similar in spirit to the present version, but may differ in detail to
566
+
address new problems or concerns.
567
+
568
+
Each version is given a distinguishing version number. If the
569
+
Program specifies that a certain numbered version of the GNU Affero General
570
+
Public License "or any later version" applies to it, you have the
571
+
option of following the terms and conditions either of that numbered
572
+
version or of any later version published by the Free Software
573
+
Foundation. If the Program does not specify a version number of the
574
+
GNU Affero General Public License, you may choose any version ever published
575
+
by the Free Software Foundation.
576
+
577
+
If the Program specifies that a proxy can decide which future
578
+
versions of the GNU Affero General Public License can be used, that proxy's
579
+
public statement of acceptance of a version permanently authorizes you
580
+
to choose that version for the Program.
581
+
582
+
Later license versions may give you additional or different
583
+
permissions. However, no additional obligations are imposed on any
584
+
author or copyright holder as a result of your choosing to follow a
585
+
later version.
586
+
587
+
15. Disclaimer of Warranty.
588
+
589
+
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590
+
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591
+
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592
+
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593
+
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594
+
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595
+
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596
+
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597
+
598
+
16. Limitation of Liability.
599
+
600
+
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601
+
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602
+
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603
+
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604
+
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605
+
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606
+
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607
+
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608
+
SUCH DAMAGES.
609
+
610
+
17. Interpretation of Sections 15 and 16.
611
+
612
+
If the disclaimer of warranty and limitation of liability provided
613
+
above cannot be given local legal effect according to their terms,
614
+
reviewing courts shall apply local law that most closely approximates
615
+
an absolute waiver of all civil liability in connection with the
616
+
Program, unless a warranty or assumption of liability accompanies a
617
+
copy of the Program in return for a fee.
618
+
619
+
END OF TERMS AND CONDITIONS
620
+
621
+
How to Apply These Terms to Your New Programs
622
+
623
+
If you develop a new program, and you want it to be of the greatest
624
+
possible use to the public, the best way to achieve this is to make it
625
+
free software which everyone can redistribute and change under these terms.
626
+
627
+
To do so, attach the following notices to the program. It is safest
628
+
to attach them to the start of each source file to most effectively
629
+
state the exclusion of warranty; and each file should have at least
630
+
the "copyright" line and a pointer to where the full notice is found.
631
+
632
+
<one line to give the program's name and a brief idea of what it does.>
633
+
Copyright (C) <year> <name of author>
634
+
635
+
This program is free software: you can redistribute it and/or modify
636
+
it under the terms of the GNU Affero General Public License as published
637
+
by the Free Software Foundation, either version 3 of the License, or
638
+
(at your option) any later version.
639
+
640
+
This program is distributed in the hope that it will be useful,
641
+
but WITHOUT ANY WARRANTY; without even the implied warranty of
642
+
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643
+
GNU Affero General Public License for more details.
644
+
645
+
You should have received a copy of the GNU Affero General Public License
646
+
along with this program. If not, see <https://www.gnu.org/licenses/>.
647
+
648
+
Also add information on how to contact you by electronic and paper mail.
649
+
650
+
If your software can interact with users remotely through a computer
651
+
network, you should also make sure that it provides a way for users to
652
+
get its source. For example, if your program is a web application, its
653
+
interface could display a "Source" link that leads users to an archive
654
+
of the code. There are many ways you could offer source, and different
655
+
solutions will be better for different programs; see section 13 for the
656
+
specific requirements.
657
+
658
+
You should also get your employer (if you work as a programmer) or school,
659
+
if any, to sign a "copyright disclaimer" for the program, if necessary.
660
+
For more information on this, and how to apply and follow the GNU AGPL, see
661
+
<https://www.gnu.org/licenses/>.
+2088
-378
Cargo.lock
+2088
-378
Cargo.lock
···
19
19
20
20
[[package]]
21
21
name = "aho-corasick"
22
-
version = "1.1.3"
22
+
version = "1.1.4"
23
23
source = "registry+https://github.com/rust-lang/crates.io-index"
24
-
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
24
+
checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
25
25
dependencies = [
26
26
"memchr",
27
27
]
28
28
29
29
[[package]]
30
+
name = "allocator-api2"
31
+
version = "0.2.21"
32
+
source = "registry+https://github.com/rust-lang/crates.io-index"
33
+
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
34
+
35
+
[[package]]
30
36
name = "anstream"
31
-
version = "0.6.19"
37
+
version = "0.6.20"
32
38
source = "registry+https://github.com/rust-lang/crates.io-index"
33
-
checksum = "301af1932e46185686725e0fad2f8f2aa7da69dd70bf6ecc44d6b703844a3933"
39
+
checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192"
34
40
dependencies = [
35
41
"anstyle",
36
42
"anstyle-parse",
···
58
64
59
65
[[package]]
60
66
name = "anstyle-query"
61
-
version = "1.1.3"
67
+
version = "1.1.4"
62
68
source = "registry+https://github.com/rust-lang/crates.io-index"
63
-
checksum = "6c8bdeb6047d8983be085bab0ba1472e6dc604e7041dbf6fcd5e71523014fae9"
69
+
checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2"
64
70
dependencies = [
65
-
"windows-sys 0.59.0",
71
+
"windows-sys 0.60.2",
66
72
]
67
73
68
74
[[package]]
69
75
name = "anstyle-wincon"
70
-
version = "3.0.9"
76
+
version = "3.0.10"
71
77
source = "registry+https://github.com/rust-lang/crates.io-index"
72
-
checksum = "403f75924867bb1033c59fbf0797484329750cfbe3c4325cd33127941fabc882"
78
+
checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a"
73
79
dependencies = [
74
80
"anstyle",
75
81
"once_cell_polyfill",
76
-
"windows-sys 0.59.0",
82
+
"windows-sys 0.60.2",
77
83
]
78
84
79
85
[[package]]
80
86
name = "anyhow"
81
-
version = "1.0.98"
87
+
version = "1.0.100"
82
88
source = "registry+https://github.com/rust-lang/crates.io-index"
83
-
checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
89
+
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
84
90
85
91
[[package]]
86
-
name = "arrayvec"
87
-
version = "0.7.6"
88
-
source = "registry+https://github.com/rust-lang/crates.io-index"
89
-
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
90
-
91
-
[[package]]
92
-
name = "async-trait"
93
-
version = "0.1.88"
92
+
name = "atoi"
93
+
version = "2.0.0"
94
94
source = "registry+https://github.com/rust-lang/crates.io-index"
95
-
checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5"
95
+
checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528"
96
96
dependencies = [
97
-
"proc-macro2",
98
-
"quote",
99
-
"syn 2.0.103",
97
+
"num-traits",
100
98
]
101
99
102
100
[[package]]
···
117
115
"miniz_oxide",
118
116
"object",
119
117
"rustc-demangle",
120
-
"windows-targets",
118
+
"windows-targets 0.52.6",
119
+
]
120
+
121
+
[[package]]
122
+
name = "backtrace-ext"
123
+
version = "0.2.1"
124
+
source = "registry+https://github.com/rust-lang/crates.io-index"
125
+
checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50"
126
+
dependencies = [
127
+
"backtrace",
121
128
]
122
129
123
130
[[package]]
124
-
name = "beef"
125
-
version = "0.5.2"
131
+
name = "base64"
132
+
version = "0.22.1"
133
+
source = "registry+https://github.com/rust-lang/crates.io-index"
134
+
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
135
+
136
+
[[package]]
137
+
name = "base64ct"
138
+
version = "1.8.0"
139
+
source = "registry+https://github.com/rust-lang/crates.io-index"
140
+
checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba"
141
+
142
+
[[package]]
143
+
name = "bitflags"
144
+
version = "1.3.2"
126
145
source = "registry+https://github.com/rust-lang/crates.io-index"
127
-
checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1"
146
+
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
128
147
129
148
[[package]]
130
149
name = "bitflags"
131
150
version = "2.9.1"
132
151
source = "registry+https://github.com/rust-lang/crates.io-index"
133
152
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
153
+
dependencies = [
154
+
"serde",
155
+
]
134
156
135
157
[[package]]
136
158
name = "bitmaps"
···
142
164
]
143
165
144
166
[[package]]
145
-
name = "bumpalo"
146
-
version = "3.18.1"
167
+
name = "block-buffer"
168
+
version = "0.10.4"
169
+
source = "registry+https://github.com/rust-lang/crates.io-index"
170
+
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
171
+
dependencies = [
172
+
"generic-array",
173
+
]
174
+
175
+
[[package]]
176
+
name = "bstr"
177
+
version = "1.12.0"
178
+
source = "registry+https://github.com/rust-lang/crates.io-index"
179
+
checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4"
180
+
dependencies = [
181
+
"memchr",
182
+
"regex-automata",
183
+
"serde",
184
+
]
185
+
186
+
[[package]]
187
+
name = "byteorder"
188
+
version = "1.5.0"
147
189
source = "registry+https://github.com/rust-lang/crates.io-index"
148
-
checksum = "793db76d6187cd04dff33004d8e6c9cc4e05cd330500379d2394209271b4aeee"
190
+
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
149
191
150
192
[[package]]
151
193
name = "bytes"
···
154
196
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
155
197
156
198
[[package]]
199
+
name = "cc"
200
+
version = "1.2.41"
201
+
source = "registry+https://github.com/rust-lang/crates.io-index"
202
+
checksum = "ac9fe6cdbb24b6ade63616c0a0688e45bb56732262c158df3c0c4bea4ca47cb7"
203
+
dependencies = [
204
+
"find-msvc-tools",
205
+
"jobserver",
206
+
"libc",
207
+
"shlex",
208
+
]
209
+
210
+
[[package]]
157
211
name = "cfg-if"
158
212
version = "1.0.1"
159
213
source = "registry+https://github.com/rust-lang/crates.io-index"
···
161
215
162
216
[[package]]
163
217
name = "cfg_aliases"
218
+
version = "0.1.1"
219
+
source = "registry+https://github.com/rust-lang/crates.io-index"
220
+
checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
221
+
222
+
[[package]]
223
+
name = "cfg_aliases"
164
224
version = "0.2.1"
165
225
source = "registry+https://github.com/rust-lang/crates.io-index"
166
226
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
167
227
168
228
[[package]]
169
229
name = "clap"
170
-
version = "4.5.40"
230
+
version = "4.5.53"
171
231
source = "registry+https://github.com/rust-lang/crates.io-index"
172
-
checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f"
232
+
checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
173
233
dependencies = [
174
234
"clap_builder",
175
235
"clap_derive",
···
195
255
196
256
[[package]]
197
257
name = "clap-verbosity-flag"
198
-
version = "3.0.3"
258
+
version = "3.0.4"
199
259
source = "registry+https://github.com/rust-lang/crates.io-index"
200
-
checksum = "eeab6a5cdfc795a05538422012f20a5496f050223c91be4e5420bfd13c641fb1"
260
+
checksum = "9d92b1fab272fe943881b77cc6e920d6543e5b1bfadbd5ed81c7c5a755742394"
201
261
dependencies = [
202
262
"clap",
203
263
"log",
···
205
265
206
266
[[package]]
207
267
name = "clap_builder"
208
-
version = "4.5.40"
268
+
version = "4.5.53"
209
269
source = "registry+https://github.com/rust-lang/crates.io-index"
210
-
checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e"
270
+
checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
211
271
dependencies = [
212
272
"anstream",
213
273
"anstyle",
···
217
277
218
278
[[package]]
219
279
name = "clap_complete"
220
-
version = "4.5.54"
280
+
version = "4.5.62"
221
281
source = "registry+https://github.com/rust-lang/crates.io-index"
222
-
checksum = "aad5b1b4de04fead402672b48897030eec1f3bfe1550776322f59f6d6e6a5677"
282
+
checksum = "004eef6b14ce34759aa7de4aea3217e368f463f46a3ed3764ca4b5a4404003b4"
223
283
dependencies = [
224
284
"clap",
285
+
"clap_lex",
286
+
"is_executable",
287
+
"shlex",
225
288
]
226
289
227
290
[[package]]
228
291
name = "clap_derive"
229
-
version = "4.5.40"
292
+
version = "4.5.49"
230
293
source = "registry+https://github.com/rust-lang/crates.io-index"
231
-
checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce"
294
+
checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671"
232
295
dependencies = [
233
296
"heck",
234
297
"proc-macro2",
235
298
"quote",
236
-
"syn 2.0.103",
299
+
"syn 2.0.111",
237
300
]
238
301
239
302
[[package]]
···
249
312
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
250
313
251
314
[[package]]
252
-
name = "console"
253
-
version = "0.15.11"
315
+
name = "concurrent-queue"
316
+
version = "2.5.0"
317
+
source = "registry+https://github.com/rust-lang/crates.io-index"
318
+
checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973"
319
+
dependencies = [
320
+
"crossbeam-utils",
321
+
]
322
+
323
+
[[package]]
324
+
name = "const-oid"
325
+
version = "0.9.6"
326
+
source = "registry+https://github.com/rust-lang/crates.io-index"
327
+
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
328
+
329
+
[[package]]
330
+
name = "convert_case"
331
+
version = "0.10.0"
332
+
source = "registry+https://github.com/rust-lang/crates.io-index"
333
+
checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9"
334
+
dependencies = [
335
+
"unicode-segmentation",
336
+
]
337
+
338
+
[[package]]
339
+
name = "cpufeatures"
340
+
version = "0.2.17"
254
341
source = "registry+https://github.com/rust-lang/crates.io-index"
255
-
checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8"
342
+
checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
256
343
dependencies = [
257
-
"encode_unicode",
258
344
"libc",
259
-
"once_cell",
260
-
"unicode-width 0.2.1",
261
-
"windows-sys 0.59.0",
345
+
]
346
+
347
+
[[package]]
348
+
name = "crc"
349
+
version = "3.3.0"
350
+
source = "registry+https://github.com/rust-lang/crates.io-index"
351
+
checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675"
352
+
dependencies = [
353
+
"crc-catalog",
354
+
]
355
+
356
+
[[package]]
357
+
name = "crc-catalog"
358
+
version = "2.4.0"
359
+
source = "registry+https://github.com/rust-lang/crates.io-index"
360
+
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
361
+
362
+
[[package]]
363
+
name = "crossbeam-queue"
364
+
version = "0.3.12"
365
+
source = "registry+https://github.com/rust-lang/crates.io-index"
366
+
checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115"
367
+
dependencies = [
368
+
"crossbeam-utils",
369
+
]
370
+
371
+
[[package]]
372
+
name = "crossbeam-utils"
373
+
version = "0.8.21"
374
+
source = "registry+https://github.com/rust-lang/crates.io-index"
375
+
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
376
+
377
+
[[package]]
378
+
name = "crypto-common"
379
+
version = "0.1.6"
380
+
source = "registry+https://github.com/rust-lang/crates.io-index"
381
+
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
382
+
dependencies = [
383
+
"generic-array",
384
+
"typenum",
385
+
]
386
+
387
+
[[package]]
388
+
name = "curve25519-dalek"
389
+
version = "4.1.3"
390
+
source = "registry+https://github.com/rust-lang/crates.io-index"
391
+
checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be"
392
+
dependencies = [
393
+
"cfg-if",
394
+
"cpufeatures",
395
+
"curve25519-dalek-derive",
396
+
"digest",
397
+
"fiat-crypto",
398
+
"rustc_version",
399
+
"subtle",
400
+
"zeroize",
401
+
]
402
+
403
+
[[package]]
404
+
name = "curve25519-dalek-derive"
405
+
version = "0.1.1"
406
+
source = "registry+https://github.com/rust-lang/crates.io-index"
407
+
checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3"
408
+
dependencies = [
409
+
"proc-macro2",
410
+
"quote",
411
+
"syn 2.0.111",
412
+
]
413
+
414
+
[[package]]
415
+
name = "darling"
416
+
version = "0.21.3"
417
+
source = "registry+https://github.com/rust-lang/crates.io-index"
418
+
checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0"
419
+
dependencies = [
420
+
"darling_core",
421
+
"darling_macro",
422
+
]
423
+
424
+
[[package]]
425
+
name = "darling_core"
426
+
version = "0.21.3"
427
+
source = "registry+https://github.com/rust-lang/crates.io-index"
428
+
checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4"
429
+
dependencies = [
430
+
"fnv",
431
+
"ident_case",
432
+
"proc-macro2",
433
+
"quote",
434
+
"strsim",
435
+
"syn 2.0.111",
436
+
]
437
+
438
+
[[package]]
439
+
name = "darling_macro"
440
+
version = "0.21.3"
441
+
source = "registry+https://github.com/rust-lang/crates.io-index"
442
+
checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81"
443
+
dependencies = [
444
+
"darling_core",
445
+
"quote",
446
+
"syn 2.0.111",
447
+
]
448
+
449
+
[[package]]
450
+
name = "data-encoding"
451
+
version = "2.9.0"
452
+
source = "registry+https://github.com/rust-lang/crates.io-index"
453
+
checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476"
454
+
455
+
[[package]]
456
+
name = "der"
457
+
version = "0.7.10"
458
+
source = "registry+https://github.com/rust-lang/crates.io-index"
459
+
checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
460
+
dependencies = [
461
+
"const-oid",
462
+
"pem-rfc7468",
463
+
"zeroize",
262
464
]
263
465
264
466
[[package]]
265
467
name = "derive_more"
266
-
version = "2.0.1"
468
+
version = "2.1.0"
267
469
source = "registry+https://github.com/rust-lang/crates.io-index"
268
-
checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678"
470
+
checksum = "10b768e943bed7bf2cab53df09f4bc34bfd217cdb57d971e769874c9a6710618"
269
471
dependencies = [
270
472
"derive_more-impl",
271
473
]
272
474
273
475
[[package]]
274
476
name = "derive_more-impl"
275
-
version = "2.0.1"
477
+
version = "2.1.0"
276
478
source = "registry+https://github.com/rust-lang/crates.io-index"
277
-
checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
479
+
checksum = "6d286bfdaf75e988b4a78e013ecd79c581e06399ab53fbacd2d916c2f904f30b"
278
480
dependencies = [
481
+
"convert_case",
279
482
"proc-macro2",
280
483
"quote",
281
-
"syn 2.0.103",
484
+
"rustc_version",
485
+
"syn 2.0.111",
282
486
"unicode-xid",
283
487
]
284
488
···
299
503
]
300
504
301
505
[[package]]
302
-
name = "either"
303
-
version = "1.15.0"
506
+
name = "digest"
507
+
version = "0.10.7"
508
+
source = "registry+https://github.com/rust-lang/crates.io-index"
509
+
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
510
+
dependencies = [
511
+
"block-buffer",
512
+
"const-oid",
513
+
"crypto-common",
514
+
"subtle",
515
+
]
516
+
517
+
[[package]]
518
+
name = "displaydoc"
519
+
version = "0.2.5"
520
+
source = "registry+https://github.com/rust-lang/crates.io-index"
521
+
checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
522
+
dependencies = [
523
+
"proc-macro2",
524
+
"quote",
525
+
"syn 2.0.111",
526
+
]
527
+
528
+
[[package]]
529
+
name = "dotenvy"
530
+
version = "0.15.7"
531
+
source = "registry+https://github.com/rust-lang/crates.io-index"
532
+
checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
533
+
534
+
[[package]]
535
+
name = "downcast-rs"
536
+
version = "1.2.1"
537
+
source = "registry+https://github.com/rust-lang/crates.io-index"
538
+
checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2"
539
+
540
+
[[package]]
541
+
name = "ed25519"
542
+
version = "2.2.3"
304
543
source = "registry+https://github.com/rust-lang/crates.io-index"
305
-
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
544
+
checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53"
545
+
dependencies = [
546
+
"pkcs8",
547
+
"signature",
548
+
]
306
549
307
550
[[package]]
308
-
name = "encode_unicode"
309
-
version = "1.0.0"
551
+
name = "ed25519-dalek"
552
+
version = "2.2.0"
310
553
source = "registry+https://github.com/rust-lang/crates.io-index"
311
-
checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
554
+
checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9"
555
+
dependencies = [
556
+
"curve25519-dalek",
557
+
"ed25519",
558
+
"serde",
559
+
"sha2",
560
+
"subtle",
561
+
"zeroize",
562
+
]
563
+
564
+
[[package]]
565
+
name = "either"
566
+
version = "1.15.0"
567
+
source = "registry+https://github.com/rust-lang/crates.io-index"
568
+
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
569
+
dependencies = [
570
+
"serde",
571
+
]
312
572
313
573
[[package]]
314
574
name = "enum-display-derive"
···
322
582
]
323
583
324
584
[[package]]
585
+
name = "enum_dispatch"
586
+
version = "0.3.13"
587
+
source = "registry+https://github.com/rust-lang/crates.io-index"
588
+
checksum = "aa18ce2bc66555b3218614519ac839ddb759a7d6720732f979ef8d13be147ecd"
589
+
dependencies = [
590
+
"once_cell",
591
+
"proc-macro2",
592
+
"quote",
593
+
"syn 2.0.111",
594
+
]
595
+
596
+
[[package]]
325
597
name = "equivalent"
326
598
version = "1.0.2"
327
599
source = "registry+https://github.com/rust-lang/crates.io-index"
···
329
601
330
602
[[package]]
331
603
name = "errno"
332
-
version = "0.3.12"
604
+
version = "0.3.13"
333
605
source = "registry+https://github.com/rust-lang/crates.io-index"
334
-
checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18"
606
+
checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
335
607
dependencies = [
336
608
"libc",
337
-
"windows-sys 0.59.0",
609
+
"windows-sys 0.52.0",
610
+
]
611
+
612
+
[[package]]
613
+
name = "etcetera"
614
+
version = "0.8.0"
615
+
source = "registry+https://github.com/rust-lang/crates.io-index"
616
+
checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943"
617
+
dependencies = [
618
+
"cfg-if",
619
+
"home",
620
+
"windows-sys 0.48.0",
621
+
]
622
+
623
+
[[package]]
624
+
name = "event-listener"
625
+
version = "5.4.1"
626
+
source = "registry+https://github.com/rust-lang/crates.io-index"
627
+
checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab"
628
+
dependencies = [
629
+
"concurrent-queue",
630
+
"parking",
631
+
"pin-project-lite",
338
632
]
339
633
340
634
[[package]]
···
344
638
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
345
639
346
640
[[package]]
641
+
name = "fiat-crypto"
642
+
version = "0.2.9"
643
+
source = "registry+https://github.com/rust-lang/crates.io-index"
644
+
checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d"
645
+
646
+
[[package]]
647
+
name = "filedescriptor"
648
+
version = "0.8.3"
649
+
source = "registry+https://github.com/rust-lang/crates.io-index"
650
+
checksum = "e40758ed24c9b2eeb76c35fb0aebc66c626084edd827e07e1552279814c6682d"
651
+
dependencies = [
652
+
"libc",
653
+
"thiserror 1.0.69",
654
+
"winapi",
655
+
]
656
+
657
+
[[package]]
658
+
name = "find-msvc-tools"
659
+
version = "0.1.4"
660
+
source = "registry+https://github.com/rust-lang/crates.io-index"
661
+
checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127"
662
+
663
+
[[package]]
347
664
name = "fixedbitset"
348
665
version = "0.5.7"
349
666
source = "registry+https://github.com/rust-lang/crates.io-index"
350
667
checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
351
668
352
669
[[package]]
670
+
name = "flume"
671
+
version = "0.11.1"
672
+
source = "registry+https://github.com/rust-lang/crates.io-index"
673
+
checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095"
674
+
dependencies = [
675
+
"futures-core",
676
+
"futures-sink",
677
+
"spin",
678
+
]
679
+
680
+
[[package]]
353
681
name = "fnv"
354
682
version = "1.0.7"
355
683
source = "registry+https://github.com/rust-lang/crates.io-index"
356
684
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
357
685
358
686
[[package]]
687
+
name = "foldhash"
688
+
version = "0.1.5"
689
+
source = "registry+https://github.com/rust-lang/crates.io-index"
690
+
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
691
+
692
+
[[package]]
693
+
name = "form_urlencoded"
694
+
version = "1.2.2"
695
+
source = "registry+https://github.com/rust-lang/crates.io-index"
696
+
checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf"
697
+
dependencies = [
698
+
"percent-encoding",
699
+
]
700
+
701
+
[[package]]
359
702
name = "fuchsia-cprng"
360
703
version = "0.1.1"
361
704
source = "registry+https://github.com/rust-lang/crates.io-index"
···
404
747
]
405
748
406
749
[[package]]
750
+
name = "futures-intrusive"
751
+
version = "0.5.0"
752
+
source = "registry+https://github.com/rust-lang/crates.io-index"
753
+
checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f"
754
+
dependencies = [
755
+
"futures-core",
756
+
"lock_api",
757
+
"parking_lot",
758
+
]
759
+
760
+
[[package]]
407
761
name = "futures-io"
408
762
version = "0.3.31"
409
763
source = "registry+https://github.com/rust-lang/crates.io-index"
···
417
771
dependencies = [
418
772
"proc-macro2",
419
773
"quote",
420
-
"syn 2.0.103",
774
+
"syn 2.0.111",
421
775
]
422
776
423
777
[[package]]
···
451
805
]
452
806
453
807
[[package]]
808
+
name = "generic-array"
809
+
version = "0.14.9"
810
+
source = "registry+https://github.com/rust-lang/crates.io-index"
811
+
checksum = "4bb6743198531e02858aeaea5398fcc883e71851fcbcb5a2f773e2fb6cb1edf2"
812
+
dependencies = [
813
+
"typenum",
814
+
"version_check",
815
+
]
816
+
817
+
[[package]]
454
818
name = "gethostname"
455
-
version = "1.0.2"
819
+
version = "1.1.0"
456
820
source = "registry+https://github.com/rust-lang/crates.io-index"
457
-
checksum = "fc257fdb4038301ce4b9cd1b3b51704509692bb3ff716a410cbd07925d9dae55"
821
+
checksum = "1bd49230192a3797a9a4d6abe9b3eed6f7fa4c8a8a4947977c6f80025f92cbd8"
458
822
dependencies = [
459
823
"rustix",
460
-
"windows-targets",
824
+
"windows-link 0.2.1",
825
+
]
826
+
827
+
[[package]]
828
+
name = "getrandom"
829
+
version = "0.2.16"
830
+
source = "registry+https://github.com/rust-lang/crates.io-index"
831
+
checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
832
+
dependencies = [
833
+
"cfg-if",
834
+
"libc",
835
+
"wasi 0.11.1+wasi-snapshot-preview1",
461
836
]
462
837
463
838
[[package]]
···
479
854
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
480
855
481
856
[[package]]
857
+
name = "gjson"
858
+
version = "0.8.1"
859
+
source = "registry+https://github.com/rust-lang/crates.io-index"
860
+
checksum = "43503cc176394dd30a6525f5f36e838339b8b5619be33ed9a7783841580a97b6"
861
+
862
+
[[package]]
482
863
name = "hashbrown"
483
-
version = "0.15.4"
864
+
version = "0.15.5"
484
865
source = "registry+https://github.com/rust-lang/crates.io-index"
485
-
checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5"
866
+
checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
867
+
dependencies = [
868
+
"allocator-api2",
869
+
"equivalent",
870
+
"foldhash",
871
+
]
872
+
873
+
[[package]]
874
+
name = "hashlink"
875
+
version = "0.10.0"
876
+
source = "registry+https://github.com/rust-lang/crates.io-index"
877
+
checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
878
+
dependencies = [
879
+
"hashbrown",
880
+
]
486
881
487
882
[[package]]
488
883
name = "heck"
···
491
886
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
492
887
493
888
[[package]]
889
+
name = "hermit-abi"
890
+
version = "0.5.2"
891
+
source = "registry+https://github.com/rust-lang/crates.io-index"
892
+
checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c"
893
+
894
+
[[package]]
895
+
name = "hex"
896
+
version = "0.4.3"
897
+
source = "registry+https://github.com/rust-lang/crates.io-index"
898
+
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
899
+
900
+
[[package]]
901
+
name = "hkdf"
902
+
version = "0.12.4"
903
+
source = "registry+https://github.com/rust-lang/crates.io-index"
904
+
checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7"
905
+
dependencies = [
906
+
"hmac",
907
+
]
908
+
909
+
[[package]]
910
+
name = "hmac"
911
+
version = "0.12.1"
912
+
source = "registry+https://github.com/rust-lang/crates.io-index"
913
+
checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
914
+
dependencies = [
915
+
"digest",
916
+
]
917
+
918
+
[[package]]
919
+
name = "home"
920
+
version = "0.5.12"
921
+
source = "registry+https://github.com/rust-lang/crates.io-index"
922
+
checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d"
923
+
dependencies = [
924
+
"windows-sys 0.61.2",
925
+
]
926
+
927
+
[[package]]
928
+
name = "icu_collections"
929
+
version = "2.0.0"
930
+
source = "registry+https://github.com/rust-lang/crates.io-index"
931
+
checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47"
932
+
dependencies = [
933
+
"displaydoc",
934
+
"potential_utf",
935
+
"yoke",
936
+
"zerofrom",
937
+
"zerovec",
938
+
]
939
+
940
+
[[package]]
941
+
name = "icu_locale_core"
942
+
version = "2.0.0"
943
+
source = "registry+https://github.com/rust-lang/crates.io-index"
944
+
checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a"
945
+
dependencies = [
946
+
"displaydoc",
947
+
"litemap",
948
+
"tinystr",
949
+
"writeable",
950
+
"zerovec",
951
+
]
952
+
953
+
[[package]]
954
+
name = "icu_normalizer"
955
+
version = "2.0.0"
956
+
source = "registry+https://github.com/rust-lang/crates.io-index"
957
+
checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979"
958
+
dependencies = [
959
+
"displaydoc",
960
+
"icu_collections",
961
+
"icu_normalizer_data",
962
+
"icu_properties",
963
+
"icu_provider",
964
+
"smallvec",
965
+
"zerovec",
966
+
]
967
+
968
+
[[package]]
969
+
name = "icu_normalizer_data"
970
+
version = "2.0.0"
971
+
source = "registry+https://github.com/rust-lang/crates.io-index"
972
+
checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3"
973
+
974
+
[[package]]
975
+
name = "icu_properties"
976
+
version = "2.0.1"
977
+
source = "registry+https://github.com/rust-lang/crates.io-index"
978
+
checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b"
979
+
dependencies = [
980
+
"displaydoc",
981
+
"icu_collections",
982
+
"icu_locale_core",
983
+
"icu_properties_data",
984
+
"icu_provider",
985
+
"potential_utf",
986
+
"zerotrie",
987
+
"zerovec",
988
+
]
989
+
990
+
[[package]]
991
+
name = "icu_properties_data"
992
+
version = "2.0.1"
993
+
source = "registry+https://github.com/rust-lang/crates.io-index"
994
+
checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632"
995
+
996
+
[[package]]
997
+
name = "icu_provider"
998
+
version = "2.0.0"
999
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1000
+
checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af"
1001
+
dependencies = [
1002
+
"displaydoc",
1003
+
"icu_locale_core",
1004
+
"stable_deref_trait",
1005
+
"tinystr",
1006
+
"writeable",
1007
+
"yoke",
1008
+
"zerofrom",
1009
+
"zerotrie",
1010
+
"zerovec",
1011
+
]
1012
+
1013
+
[[package]]
1014
+
name = "ident_case"
1015
+
version = "1.0.1"
1016
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1017
+
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
1018
+
1019
+
[[package]]
1020
+
name = "idna"
1021
+
version = "1.1.0"
1022
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1023
+
checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de"
1024
+
dependencies = [
1025
+
"idna_adapter",
1026
+
"smallvec",
1027
+
"utf8_iter",
1028
+
]
1029
+
1030
+
[[package]]
1031
+
name = "idna_adapter"
1032
+
version = "1.2.1"
1033
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1034
+
checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
1035
+
dependencies = [
1036
+
"icu_normalizer",
1037
+
"icu_properties",
1038
+
]
1039
+
1040
+
[[package]]
494
1041
name = "im"
495
1042
version = "15.1.0"
496
1043
source = "registry+https://github.com/rust-lang/crates.io-index"
···
507
1054
508
1055
[[package]]
509
1056
name = "indexmap"
510
-
version = "2.9.0"
1057
+
version = "2.10.0"
511
1058
source = "registry+https://github.com/rust-lang/crates.io-index"
512
-
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
1059
+
checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
513
1060
dependencies = [
514
1061
"equivalent",
515
1062
"hashbrown",
516
1063
]
517
1064
518
1065
[[package]]
519
-
name = "indicatif"
520
-
version = "0.17.11"
1066
+
name = "is-terminal"
1067
+
version = "0.4.16"
521
1068
source = "registry+https://github.com/rust-lang/crates.io-index"
522
-
checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235"
1069
+
checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
523
1070
dependencies = [
524
-
"console",
525
-
"number_prefix",
526
-
"portable-atomic",
527
-
"unicode-width 0.2.1",
528
-
"vt100",
529
-
"web-time",
1071
+
"hermit-abi",
1072
+
"libc",
1073
+
"windows-sys 0.59.0",
1074
+
]
1075
+
1076
+
[[package]]
1077
+
name = "is_ci"
1078
+
version = "1.2.0"
1079
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1080
+
checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45"
1081
+
1082
+
[[package]]
1083
+
name = "is_executable"
1084
+
version = "1.0.5"
1085
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1086
+
checksum = "baabb8b4867b26294d818bf3f651a454b6901431711abb96e296245888d6e8c4"
1087
+
dependencies = [
1088
+
"windows-sys 0.60.2",
530
1089
]
531
1090
532
1091
[[package]]
···
551
1110
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
552
1111
553
1112
[[package]]
554
-
name = "js-sys"
555
-
version = "0.3.77"
1113
+
name = "jobserver"
1114
+
version = "0.1.34"
556
1115
source = "registry+https://github.com/rust-lang/crates.io-index"
557
-
checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f"
1116
+
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
558
1117
dependencies = [
559
-
"once_cell",
560
-
"wasm-bindgen",
1118
+
"getrandom 0.3.3",
1119
+
"libc",
561
1120
]
562
1121
563
1122
[[package]]
564
-
name = "key_agent"
565
-
version = "0.3.0"
1123
+
name = "lazy_static"
1124
+
version = "1.5.0"
1125
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1126
+
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
566
1127
dependencies = [
567
-
"anyhow",
568
-
"nix",
569
-
"prost",
570
-
"prost-build",
571
-
"tokio",
1128
+
"spin",
572
1129
]
573
1130
574
1131
[[package]]
575
-
name = "lazy_static"
576
-
version = "1.5.0"
1132
+
name = "libc"
1133
+
version = "0.2.175"
577
1134
source = "registry+https://github.com/rust-lang/crates.io-index"
578
-
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
1135
+
checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543"
579
1136
580
1137
[[package]]
581
-
name = "lib"
582
-
version = "0.3.0"
1138
+
name = "libm"
1139
+
version = "0.2.15"
1140
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1141
+
checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de"
1142
+
1143
+
[[package]]
1144
+
name = "libmimalloc-sys"
1145
+
version = "0.1.44"
1146
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1147
+
checksum = "667f4fec20f29dfc6bc7357c582d91796c169ad7e2fce709468aefeb2c099870"
583
1148
dependencies = [
584
-
"async-trait",
585
-
"derive_more",
586
-
"futures",
587
-
"gethostname",
588
-
"im",
589
-
"key_agent",
590
-
"nix",
591
-
"prost",
592
-
"regex",
593
-
"serde",
594
-
"serde-query",
595
-
"serde_json",
596
-
"serde_repr",
597
-
"tempdir",
598
-
"thiserror",
599
-
"tokio",
600
-
"tracing",
601
-
"tracing-indicatif",
602
-
"tracing-subscriber",
1149
+
"cc",
1150
+
"libc",
1151
+
]
1152
+
1153
+
[[package]]
1154
+
name = "libredox"
1155
+
version = "0.1.10"
1156
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1157
+
checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb"
1158
+
dependencies = [
1159
+
"bitflags 2.9.1",
1160
+
"libc",
1161
+
"redox_syscall",
603
1162
]
604
1163
605
1164
[[package]]
606
-
name = "libc"
607
-
version = "0.2.174"
1165
+
name = "libsqlite3-sys"
1166
+
version = "0.30.1"
608
1167
source = "registry+https://github.com/rust-lang/crates.io-index"
609
-
checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776"
1168
+
checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149"
1169
+
dependencies = [
1170
+
"cc",
1171
+
"pkg-config",
1172
+
"vcpkg",
1173
+
]
610
1174
611
1175
[[package]]
612
1176
name = "linux-raw-sys"
613
1177
version = "0.9.4"
614
1178
source = "registry+https://github.com/rust-lang/crates.io-index"
615
1179
checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12"
1180
+
1181
+
[[package]]
1182
+
name = "litemap"
1183
+
version = "0.8.0"
1184
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1185
+
checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
616
1186
617
1187
[[package]]
618
1188
name = "lock_api"
···
631
1201
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
632
1202
633
1203
[[package]]
634
-
name = "logos"
635
-
version = "0.12.1"
1204
+
name = "md-5"
1205
+
version = "0.10.6"
636
1206
source = "registry+https://github.com/rust-lang/crates.io-index"
637
-
checksum = "bf8b031682c67a8e3d5446840f9573eb7fe26efe7ec8d195c9ac4c0647c502f1"
1207
+
checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf"
638
1208
dependencies = [
639
-
"logos-derive",
1209
+
"cfg-if",
1210
+
"digest",
640
1211
]
641
1212
642
1213
[[package]]
643
-
name = "logos-derive"
644
-
version = "0.12.1"
1214
+
name = "memchr"
1215
+
version = "2.7.5"
1216
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1217
+
checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
1218
+
1219
+
[[package]]
1220
+
name = "miette"
1221
+
version = "7.6.0"
645
1222
source = "registry+https://github.com/rust-lang/crates.io-index"
646
-
checksum = "a1d849148dbaf9661a6151d1ca82b13bb4c4c128146a88d05253b38d4e2f496c"
1223
+
checksum = "5f98efec8807c63c752b5bd61f862c165c115b0a35685bdcfd9238c7aeb592b7"
647
1224
dependencies = [
648
-
"beef",
649
-
"fnv",
1225
+
"backtrace",
1226
+
"backtrace-ext",
1227
+
"cfg-if",
1228
+
"miette-derive",
1229
+
"owo-colors",
1230
+
"supports-color 3.0.2",
1231
+
"supports-hyperlinks",
1232
+
"supports-unicode",
1233
+
"terminal_size",
1234
+
"textwrap",
1235
+
"unicode-width 0.1.14",
1236
+
]
1237
+
1238
+
[[package]]
1239
+
name = "miette-derive"
1240
+
version = "7.6.0"
1241
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1242
+
checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b"
1243
+
dependencies = [
650
1244
"proc-macro2",
651
1245
"quote",
652
-
"regex-syntax 0.6.29",
653
-
"syn 1.0.109",
1246
+
"syn 2.0.111",
654
1247
]
655
1248
656
1249
[[package]]
657
-
name = "memchr"
658
-
version = "2.7.5"
1250
+
name = "mimalloc"
1251
+
version = "0.1.48"
659
1252
source = "registry+https://github.com/rust-lang/crates.io-index"
660
-
checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
1253
+
checksum = "e1ee66a4b64c74f4ef288bcbb9192ad9c3feaad75193129ac8509af543894fd8"
1254
+
dependencies = [
1255
+
"libmimalloc-sys",
1256
+
]
661
1257
662
1258
[[package]]
663
1259
name = "miniz_oxide"
···
693
1289
694
1290
[[package]]
695
1291
name = "nix"
1292
+
version = "0.28.0"
1293
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1294
+
checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
1295
+
dependencies = [
1296
+
"bitflags 2.9.1",
1297
+
"cfg-if",
1298
+
"cfg_aliases 0.1.1",
1299
+
"libc",
1300
+
]
1301
+
1302
+
[[package]]
1303
+
name = "nix"
696
1304
version = "0.30.1"
697
1305
source = "registry+https://github.com/rust-lang/crates.io-index"
698
1306
checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
699
1307
dependencies = [
700
-
"bitflags",
1308
+
"bitflags 2.9.1",
701
1309
"cfg-if",
702
-
"cfg_aliases",
1310
+
"cfg_aliases 0.2.1",
703
1311
"libc",
704
1312
]
705
1313
706
1314
[[package]]
1315
+
name = "nix-compat"
1316
+
version = "0.1.0"
1317
+
source = "git+https://git.snix.dev/snix/snix.git#4aaef4cdf6f7766eedcfe1b5bad8f1c4e4d05c12"
1318
+
dependencies = [
1319
+
"bitflags 2.9.1",
1320
+
"bstr",
1321
+
"bytes",
1322
+
"data-encoding",
1323
+
"ed25519",
1324
+
"ed25519-dalek",
1325
+
"futures",
1326
+
"mimalloc",
1327
+
"nix-compat-derive",
1328
+
"nom",
1329
+
"num_enum",
1330
+
"pin-project-lite",
1331
+
"serde",
1332
+
"serde_json",
1333
+
"serde_with",
1334
+
"sha2",
1335
+
"thiserror 2.0.17",
1336
+
"tokio",
1337
+
"tracing",
1338
+
"url",
1339
+
]
1340
+
1341
+
[[package]]
1342
+
name = "nix-compat-derive"
1343
+
version = "0.1.0"
1344
+
source = "git+https://git.snix.dev/snix/snix.git#4aaef4cdf6f7766eedcfe1b5bad8f1c4e4d05c12"
1345
+
dependencies = [
1346
+
"proc-macro2",
1347
+
"quote",
1348
+
"syn 2.0.111",
1349
+
]
1350
+
1351
+
[[package]]
1352
+
name = "nom"
1353
+
version = "8.0.0"
1354
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1355
+
checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405"
1356
+
dependencies = [
1357
+
"memchr",
1358
+
]
1359
+
1360
+
[[package]]
707
1361
name = "nu-ansi-term"
708
-
version = "0.46.0"
1362
+
version = "0.50.1"
709
1363
source = "registry+https://github.com/rust-lang/crates.io-index"
710
-
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
1364
+
checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399"
1365
+
dependencies = [
1366
+
"windows-sys 0.52.0",
1367
+
]
1368
+
1369
+
[[package]]
1370
+
name = "num-bigint-dig"
1371
+
version = "0.8.6"
1372
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1373
+
checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7"
1374
+
dependencies = [
1375
+
"lazy_static",
1376
+
"libm",
1377
+
"num-integer",
1378
+
"num-iter",
1379
+
"num-traits",
1380
+
"rand 0.8.5",
1381
+
"smallvec",
1382
+
"zeroize",
1383
+
]
1384
+
1385
+
[[package]]
1386
+
name = "num-integer"
1387
+
version = "0.1.46"
1388
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1389
+
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
1390
+
dependencies = [
1391
+
"num-traits",
1392
+
]
1393
+
1394
+
[[package]]
1395
+
name = "num-iter"
1396
+
version = "0.1.45"
1397
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1398
+
checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf"
711
1399
dependencies = [
712
-
"overload",
713
-
"winapi",
1400
+
"autocfg",
1401
+
"num-integer",
1402
+
"num-traits",
714
1403
]
715
1404
716
1405
[[package]]
···
720
1409
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
721
1410
dependencies = [
722
1411
"autocfg",
1412
+
"libm",
723
1413
]
724
1414
725
1415
[[package]]
726
-
name = "number_prefix"
727
-
version = "0.4.0"
1416
+
name = "num_enum"
1417
+
version = "0.7.5"
728
1418
source = "registry+https://github.com/rust-lang/crates.io-index"
729
-
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
1419
+
checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c"
1420
+
dependencies = [
1421
+
"num_enum_derive",
1422
+
"rustversion",
1423
+
]
1424
+
1425
+
[[package]]
1426
+
name = "num_enum_derive"
1427
+
version = "0.7.5"
1428
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1429
+
checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7"
1430
+
dependencies = [
1431
+
"proc-macro-crate",
1432
+
"proc-macro2",
1433
+
"quote",
1434
+
"syn 2.0.111",
1435
+
]
1436
+
1437
+
[[package]]
1438
+
name = "numtoa"
1439
+
version = "0.2.4"
1440
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1441
+
checksum = "6aa2c4e539b869820a2b82e1aef6ff40aa85e65decdd5185e83fb4b1249cd00f"
730
1442
731
1443
[[package]]
732
1444
name = "object"
···
750
1462
checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
751
1463
752
1464
[[package]]
753
-
name = "overload"
754
-
version = "0.1.1"
1465
+
name = "owo-colors"
1466
+
version = "4.2.3"
755
1467
source = "registry+https://github.com/rust-lang/crates.io-index"
756
-
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
1468
+
checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52"
1469
+
dependencies = [
1470
+
"supports-color 2.1.0",
1471
+
"supports-color 3.0.2",
1472
+
]
1473
+
1474
+
[[package]]
1475
+
name = "parking"
1476
+
version = "2.2.1"
1477
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1478
+
checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
757
1479
758
1480
[[package]]
759
1481
name = "parking_lot"
···
775
1497
"libc",
776
1498
"redox_syscall",
777
1499
"smallvec",
778
-
"windows-targets",
1500
+
"windows-targets 0.52.6",
1501
+
]
1502
+
1503
+
[[package]]
1504
+
name = "pem-rfc7468"
1505
+
version = "0.7.0"
1506
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1507
+
checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412"
1508
+
dependencies = [
1509
+
"base64ct",
779
1510
]
1511
+
1512
+
[[package]]
1513
+
name = "percent-encoding"
1514
+
version = "2.3.2"
1515
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1516
+
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
780
1517
781
1518
[[package]]
782
1519
name = "petgraph"
···
801
1538
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
802
1539
803
1540
[[package]]
804
-
name = "portable-atomic"
805
-
version = "1.11.1"
1541
+
name = "pkcs1"
1542
+
version = "0.7.5"
806
1543
source = "registry+https://github.com/rust-lang/crates.io-index"
807
-
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
1544
+
checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f"
1545
+
dependencies = [
1546
+
"der",
1547
+
"pkcs8",
1548
+
"spki",
1549
+
]
808
1550
809
1551
[[package]]
810
-
name = "prettyplease"
811
-
version = "0.2.34"
1552
+
name = "pkcs8"
1553
+
version = "0.10.2"
812
1554
source = "registry+https://github.com/rust-lang/crates.io-index"
813
-
checksum = "6837b9e10d61f45f987d50808f83d1ee3d206c66acf650c3e4ae2e1f6ddedf55"
1555
+
checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
814
1556
dependencies = [
815
-
"proc-macro2",
816
-
"syn 2.0.103",
1557
+
"der",
1558
+
"spki",
817
1559
]
818
1560
819
1561
[[package]]
820
-
name = "proc-macro-error"
821
-
version = "1.0.4"
1562
+
name = "pkg-config"
1563
+
version = "0.3.32"
822
1564
source = "registry+https://github.com/rust-lang/crates.io-index"
823
-
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
1565
+
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
1566
+
1567
+
[[package]]
1568
+
name = "portable-pty"
1569
+
version = "0.9.0"
1570
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1571
+
checksum = "b4a596a2b3d2752d94f51fac2d4a96737b8705dddd311a32b9af47211f08671e"
824
1572
dependencies = [
825
-
"proc-macro-error-attr",
826
-
"proc-macro2",
827
-
"quote",
828
-
"syn 1.0.109",
829
-
"version_check",
1573
+
"anyhow",
1574
+
"bitflags 1.3.2",
1575
+
"downcast-rs",
1576
+
"filedescriptor",
1577
+
"lazy_static",
1578
+
"libc",
1579
+
"log",
1580
+
"nix 0.28.0",
1581
+
"serial2",
1582
+
"shared_library",
1583
+
"shell-words",
1584
+
"winapi",
1585
+
"winreg",
830
1586
]
831
1587
832
1588
[[package]]
833
-
name = "proc-macro-error-attr"
834
-
version = "1.0.4"
1589
+
name = "potential_utf"
1590
+
version = "0.1.3"
1591
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1592
+
checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a"
1593
+
dependencies = [
1594
+
"zerovec",
1595
+
]
1596
+
1597
+
[[package]]
1598
+
name = "ppv-lite86"
1599
+
version = "0.2.21"
1600
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1601
+
checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
1602
+
dependencies = [
1603
+
"zerocopy",
1604
+
]
1605
+
1606
+
[[package]]
1607
+
name = "prettyplease"
1608
+
version = "0.2.36"
835
1609
source = "registry+https://github.com/rust-lang/crates.io-index"
836
-
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
1610
+
checksum = "ff24dfcda44452b9816fff4cd4227e1bb73ff5a2f1bc1105aa92fb8565ce44d2"
837
1611
dependencies = [
838
1612
"proc-macro2",
839
-
"quote",
840
-
"version_check",
1613
+
"syn 2.0.111",
1614
+
]
1615
+
1616
+
[[package]]
1617
+
name = "proc-macro-crate"
1618
+
version = "3.4.0"
1619
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1620
+
checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983"
1621
+
dependencies = [
1622
+
"toml_edit",
841
1623
]
842
1624
843
1625
[[package]]
844
1626
name = "proc-macro2"
845
-
version = "1.0.95"
1627
+
version = "1.0.103"
846
1628
source = "registry+https://github.com/rust-lang/crates.io-index"
847
-
checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
1629
+
checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8"
848
1630
dependencies = [
849
1631
"unicode-ident",
850
1632
]
···
875
1657
"prost",
876
1658
"prost-types",
877
1659
"regex",
878
-
"syn 2.0.103",
1660
+
"syn 2.0.111",
879
1661
"tempfile",
880
1662
]
881
1663
···
889
1671
"itertools",
890
1672
"proc-macro2",
891
1673
"quote",
892
-
"syn 2.0.103",
1674
+
"syn 2.0.111",
893
1675
]
894
1676
895
1677
[[package]]
···
930
1712
]
931
1713
932
1714
[[package]]
1715
+
name = "rand"
1716
+
version = "0.8.5"
1717
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1718
+
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
1719
+
dependencies = [
1720
+
"libc",
1721
+
"rand_chacha 0.3.1",
1722
+
"rand_core 0.6.4",
1723
+
]
1724
+
1725
+
[[package]]
1726
+
name = "rand"
1727
+
version = "0.9.2"
1728
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1729
+
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
1730
+
dependencies = [
1731
+
"rand_chacha 0.9.0",
1732
+
"rand_core 0.9.3",
1733
+
]
1734
+
1735
+
[[package]]
1736
+
name = "rand_chacha"
1737
+
version = "0.3.1"
1738
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1739
+
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
1740
+
dependencies = [
1741
+
"ppv-lite86",
1742
+
"rand_core 0.6.4",
1743
+
]
1744
+
1745
+
[[package]]
1746
+
name = "rand_chacha"
1747
+
version = "0.9.0"
1748
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1749
+
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
1750
+
dependencies = [
1751
+
"ppv-lite86",
1752
+
"rand_core 0.9.3",
1753
+
]
1754
+
1755
+
[[package]]
933
1756
name = "rand_core"
934
1757
version = "0.3.1"
935
1758
source = "registry+https://github.com/rust-lang/crates.io-index"
···
949
1772
version = "0.6.4"
950
1773
source = "registry+https://github.com/rust-lang/crates.io-index"
951
1774
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
1775
+
dependencies = [
1776
+
"getrandom 0.2.16",
1777
+
]
1778
+
1779
+
[[package]]
1780
+
name = "rand_core"
1781
+
version = "0.9.3"
1782
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1783
+
checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
1784
+
dependencies = [
1785
+
"getrandom 0.3.3",
1786
+
]
952
1787
953
1788
[[package]]
954
1789
name = "rand_xoshiro"
···
970
1805
971
1806
[[package]]
972
1807
name = "redox_syscall"
973
-
version = "0.5.13"
1808
+
version = "0.5.17"
974
1809
source = "registry+https://github.com/rust-lang/crates.io-index"
975
-
checksum = "0d04b7d0ee6b4a0207a0a7adb104d23ecb0b47d6beae7152d0fa34b692b29fd6"
1810
+
checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77"
976
1811
dependencies = [
977
-
"bitflags",
1812
+
"bitflags 2.9.1",
978
1813
]
979
1814
980
1815
[[package]]
981
1816
name = "regex"
982
-
version = "1.11.1"
1817
+
version = "1.12.2"
983
1818
source = "registry+https://github.com/rust-lang/crates.io-index"
984
-
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
1819
+
checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4"
985
1820
dependencies = [
986
1821
"aho-corasick",
987
1822
"memchr",
988
1823
"regex-automata",
989
-
"regex-syntax 0.8.5",
1824
+
"regex-syntax",
990
1825
]
991
1826
992
1827
[[package]]
993
1828
name = "regex-automata"
994
-
version = "0.4.9"
1829
+
version = "0.4.13"
995
1830
source = "registry+https://github.com/rust-lang/crates.io-index"
996
-
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
1831
+
checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c"
997
1832
dependencies = [
998
1833
"aho-corasick",
999
1834
"memchr",
1000
-
"regex-syntax 0.8.5",
1835
+
"regex-syntax",
1001
1836
]
1002
1837
1003
1838
[[package]]
1004
1839
name = "regex-syntax"
1005
-
version = "0.6.29"
1006
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1007
-
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
1008
-
1009
-
[[package]]
1010
-
name = "regex-syntax"
1011
1840
version = "0.8.5"
1012
1841
source = "registry+https://github.com/rust-lang/crates.io-index"
1013
1842
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
···
1022
1851
]
1023
1852
1024
1853
[[package]]
1854
+
name = "rsa"
1855
+
version = "0.9.9"
1856
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1857
+
checksum = "40a0376c50d0358279d9d643e4bf7b7be212f1f4ff1da9070a7b54d22ef75c88"
1858
+
dependencies = [
1859
+
"const-oid",
1860
+
"digest",
1861
+
"num-bigint-dig",
1862
+
"num-integer",
1863
+
"num-traits",
1864
+
"pkcs1",
1865
+
"pkcs8",
1866
+
"rand_core 0.6.4",
1867
+
"signature",
1868
+
"spki",
1869
+
"subtle",
1870
+
"zeroize",
1871
+
]
1872
+
1873
+
[[package]]
1025
1874
name = "rustc-demangle"
1026
-
version = "0.1.25"
1875
+
version = "0.1.26"
1027
1876
source = "registry+https://github.com/rust-lang/crates.io-index"
1028
-
checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f"
1877
+
checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace"
1029
1878
1030
1879
[[package]]
1031
1880
name = "rustc-hash"
···
1034
1883
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
1035
1884
1036
1885
[[package]]
1886
+
name = "rustc_version"
1887
+
version = "0.4.1"
1888
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1889
+
checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92"
1890
+
dependencies = [
1891
+
"semver",
1892
+
]
1893
+
1894
+
[[package]]
1037
1895
name = "rustix"
1038
-
version = "1.0.7"
1896
+
version = "1.0.8"
1039
1897
source = "registry+https://github.com/rust-lang/crates.io-index"
1040
-
checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266"
1898
+
checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
1041
1899
dependencies = [
1042
-
"bitflags",
1900
+
"bitflags 2.9.1",
1043
1901
"errno",
1044
1902
"libc",
1045
1903
"linux-raw-sys",
1046
-
"windows-sys 0.59.0",
1904
+
"windows-sys 0.52.0",
1047
1905
]
1048
1906
1049
1907
[[package]]
1908
+
name = "rustversion"
1909
+
version = "1.0.22"
1910
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1911
+
checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
1912
+
1913
+
[[package]]
1050
1914
name = "ryu"
1051
1915
version = "1.0.20"
1052
1916
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1059
1923
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
1060
1924
1061
1925
[[package]]
1926
+
name = "semver"
1927
+
version = "1.0.27"
1928
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1929
+
checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
1930
+
1931
+
[[package]]
1062
1932
name = "serde"
1063
-
version = "1.0.219"
1933
+
version = "1.0.228"
1064
1934
source = "registry+https://github.com/rust-lang/crates.io-index"
1065
-
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
1935
+
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
1066
1936
dependencies = [
1937
+
"serde_core",
1067
1938
"serde_derive",
1068
1939
]
1069
1940
1070
1941
[[package]]
1071
-
name = "serde-query"
1072
-
version = "0.2.0"
1942
+
name = "serde_core"
1943
+
version = "1.0.228"
1073
1944
source = "registry+https://github.com/rust-lang/crates.io-index"
1074
-
checksum = "eccf6e0453b6f1981f159a1da3e4c16427447921f282eff3bbe40cec28aeaf5f"
1945
+
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
1075
1946
dependencies = [
1076
-
"serde",
1077
-
"serde-query-derive",
1947
+
"serde_derive",
1078
1948
]
1079
1949
1080
1950
[[package]]
1081
-
name = "serde-query-core"
1082
-
version = "0.2.0"
1951
+
name = "serde_derive"
1952
+
version = "1.0.228"
1083
1953
source = "registry+https://github.com/rust-lang/crates.io-index"
1084
-
checksum = "350922b83e64ef1ac841b6c47a95d6cc1677735e5cad058eac0fb32e80796122"
1954
+
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
1085
1955
dependencies = [
1086
-
"logos",
1087
-
"proc-macro-error",
1088
1956
"proc-macro2",
1089
1957
"quote",
1090
-
"syn 1.0.109",
1958
+
"syn 2.0.111",
1959
+
]
1960
+
1961
+
[[package]]
1962
+
name = "serde_json"
1963
+
version = "1.0.145"
1964
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1965
+
checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
1966
+
dependencies = [
1967
+
"itoa",
1968
+
"memchr",
1969
+
"ryu",
1970
+
"serde",
1971
+
"serde_core",
1972
+
]
1973
+
1974
+
[[package]]
1975
+
name = "serde_urlencoded"
1976
+
version = "0.7.1"
1977
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1978
+
checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
1979
+
dependencies = [
1980
+
"form_urlencoded",
1981
+
"itoa",
1982
+
"ryu",
1983
+
"serde",
1091
1984
]
1092
1985
1093
1986
[[package]]
1094
-
name = "serde-query-derive"
1095
-
version = "0.2.0"
1987
+
name = "serde_with"
1988
+
version = "3.15.0"
1096
1989
source = "registry+https://github.com/rust-lang/crates.io-index"
1097
-
checksum = "c56cc536c2da20c38f9c134d5a313e2b996f63fcc0540d25d3d3daeb1d04bb8f"
1990
+
checksum = "6093cd8c01b25262b84927e0f7151692158fab02d961e04c979d3903eba7ecc5"
1098
1991
dependencies = [
1099
-
"proc-macro-error",
1100
-
"quote",
1101
-
"serde-query-core",
1102
-
"syn 1.0.109",
1992
+
"serde_core",
1993
+
"serde_with_macros",
1103
1994
]
1104
1995
1105
1996
[[package]]
1106
-
name = "serde_derive"
1107
-
version = "1.0.219"
1997
+
name = "serde_with_macros"
1998
+
version = "3.15.0"
1108
1999
source = "registry+https://github.com/rust-lang/crates.io-index"
1109
-
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
2000
+
checksum = "a7e6c180db0816026a61afa1cff5344fb7ebded7e4d3062772179f2501481c27"
1110
2001
dependencies = [
2002
+
"darling",
1111
2003
"proc-macro2",
1112
2004
"quote",
1113
-
"syn 2.0.103",
2005
+
"syn 2.0.111",
2006
+
]
2007
+
2008
+
[[package]]
2009
+
name = "serial2"
2010
+
version = "0.2.31"
2011
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2012
+
checksum = "26e1e5956803a69ddd72ce2de337b577898801528749565def03515f82bad5bb"
2013
+
dependencies = [
2014
+
"cfg-if",
2015
+
"libc",
2016
+
"winapi",
1114
2017
]
1115
2018
1116
2019
[[package]]
1117
-
name = "serde_json"
1118
-
version = "1.0.140"
2020
+
name = "sha1"
2021
+
version = "0.10.6"
1119
2022
source = "registry+https://github.com/rust-lang/crates.io-index"
1120
-
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
2023
+
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
1121
2024
dependencies = [
1122
-
"itoa",
1123
-
"memchr",
1124
-
"ryu",
1125
-
"serde",
2025
+
"cfg-if",
2026
+
"cpufeatures",
2027
+
"digest",
1126
2028
]
1127
2029
1128
2030
[[package]]
1129
-
name = "serde_repr"
1130
-
version = "0.1.20"
2031
+
name = "sha2"
2032
+
version = "0.10.9"
1131
2033
source = "registry+https://github.com/rust-lang/crates.io-index"
1132
-
checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
2034
+
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
1133
2035
dependencies = [
1134
-
"proc-macro2",
1135
-
"quote",
1136
-
"syn 2.0.103",
2036
+
"cfg-if",
2037
+
"cpufeatures",
2038
+
"digest",
1137
2039
]
1138
2040
1139
2041
[[package]]
···
1146
2048
]
1147
2049
1148
2050
[[package]]
2051
+
name = "shared_library"
2052
+
version = "0.1.9"
2053
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2054
+
checksum = "5a9e7e0f2bfae24d8a5b5a66c5b257a83c7412304311512a0c054cd5e619da11"
2055
+
dependencies = [
2056
+
"lazy_static",
2057
+
"libc",
2058
+
]
2059
+
2060
+
[[package]]
2061
+
name = "shell-words"
2062
+
version = "1.1.0"
2063
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2064
+
checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
2065
+
2066
+
[[package]]
2067
+
name = "shlex"
2068
+
version = "1.3.0"
2069
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2070
+
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
2071
+
2072
+
[[package]]
2073
+
name = "signal-hook"
2074
+
version = "0.3.18"
2075
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2076
+
checksum = "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2"
2077
+
dependencies = [
2078
+
"libc",
2079
+
"signal-hook-registry",
2080
+
]
2081
+
2082
+
[[package]]
1149
2083
name = "signal-hook-registry"
1150
-
version = "1.4.5"
2084
+
version = "1.4.6"
1151
2085
source = "registry+https://github.com/rust-lang/crates.io-index"
1152
-
checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410"
2086
+
checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b"
1153
2087
dependencies = [
1154
2088
"libc",
1155
2089
]
1156
2090
1157
2091
[[package]]
2092
+
name = "signal-hook-tokio"
2093
+
version = "0.3.1"
2094
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2095
+
checksum = "213241f76fb1e37e27de3b6aa1b068a2c333233b59cca6634f634b80a27ecf1e"
2096
+
dependencies = [
2097
+
"futures-core",
2098
+
"libc",
2099
+
"signal-hook",
2100
+
"tokio",
2101
+
]
2102
+
2103
+
[[package]]
2104
+
name = "signature"
2105
+
version = "2.2.0"
2106
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2107
+
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
2108
+
dependencies = [
2109
+
"digest",
2110
+
"rand_core 0.6.4",
2111
+
]
2112
+
2113
+
[[package]]
1158
2114
name = "sized-chunks"
1159
2115
version = "0.6.5"
1160
2116
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1166
2122
1167
2123
[[package]]
1168
2124
name = "slab"
1169
-
version = "0.4.10"
2125
+
version = "0.4.11"
1170
2126
source = "registry+https://github.com/rust-lang/crates.io-index"
1171
-
checksum = "04dc19736151f35336d325007ac991178d504a119863a2fcb3758cdb5e52c50d"
2127
+
checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589"
1172
2128
1173
2129
[[package]]
1174
2130
name = "smallvec"
1175
2131
version = "1.15.1"
1176
2132
source = "registry+https://github.com/rust-lang/crates.io-index"
1177
2133
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
2134
+
dependencies = [
2135
+
"serde",
2136
+
]
1178
2137
1179
2138
[[package]]
1180
2139
name = "socket2"
1181
-
version = "0.5.10"
2140
+
version = "0.6.0"
1182
2141
source = "registry+https://github.com/rust-lang/crates.io-index"
1183
-
checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678"
2142
+
checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807"
1184
2143
dependencies = [
1185
2144
"libc",
1186
-
"windows-sys 0.52.0",
2145
+
"windows-sys 0.59.0",
2146
+
]
2147
+
2148
+
[[package]]
2149
+
name = "spin"
2150
+
version = "0.9.8"
2151
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2152
+
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
2153
+
dependencies = [
2154
+
"lock_api",
2155
+
]
2156
+
2157
+
[[package]]
2158
+
name = "spki"
2159
+
version = "0.7.3"
2160
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2161
+
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
2162
+
dependencies = [
2163
+
"base64ct",
2164
+
"der",
2165
+
]
2166
+
2167
+
[[package]]
2168
+
name = "sqlx"
2169
+
version = "0.8.6"
2170
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2171
+
checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc"
2172
+
dependencies = [
2173
+
"sqlx-core",
2174
+
"sqlx-macros",
2175
+
"sqlx-mysql",
2176
+
"sqlx-postgres",
2177
+
"sqlx-sqlite",
2178
+
]
2179
+
2180
+
[[package]]
2181
+
name = "sqlx-core"
2182
+
version = "0.8.6"
2183
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2184
+
checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6"
2185
+
dependencies = [
2186
+
"base64",
2187
+
"bytes",
2188
+
"crc",
2189
+
"crossbeam-queue",
2190
+
"either",
2191
+
"event-listener",
2192
+
"futures-core",
2193
+
"futures-intrusive",
2194
+
"futures-io",
2195
+
"futures-util",
2196
+
"hashbrown",
2197
+
"hashlink",
2198
+
"indexmap",
2199
+
"log",
2200
+
"memchr",
2201
+
"once_cell",
2202
+
"percent-encoding",
2203
+
"serde",
2204
+
"serde_json",
2205
+
"sha2",
2206
+
"smallvec",
2207
+
"thiserror 2.0.17",
2208
+
"tokio",
2209
+
"tokio-stream",
2210
+
"tracing",
2211
+
"url",
2212
+
]
2213
+
2214
+
[[package]]
2215
+
name = "sqlx-macros"
2216
+
version = "0.8.6"
2217
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2218
+
checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d"
2219
+
dependencies = [
2220
+
"proc-macro2",
2221
+
"quote",
2222
+
"sqlx-core",
2223
+
"sqlx-macros-core",
2224
+
"syn 2.0.111",
2225
+
]
2226
+
2227
+
[[package]]
2228
+
name = "sqlx-macros-core"
2229
+
version = "0.8.6"
2230
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2231
+
checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b"
2232
+
dependencies = [
2233
+
"dotenvy",
2234
+
"either",
2235
+
"heck",
2236
+
"hex",
2237
+
"once_cell",
2238
+
"proc-macro2",
2239
+
"quote",
2240
+
"serde",
2241
+
"serde_json",
2242
+
"sha2",
2243
+
"sqlx-core",
2244
+
"sqlx-mysql",
2245
+
"sqlx-postgres",
2246
+
"sqlx-sqlite",
2247
+
"syn 2.0.111",
2248
+
"tokio",
2249
+
"url",
2250
+
]
2251
+
2252
+
[[package]]
2253
+
name = "sqlx-mysql"
2254
+
version = "0.8.6"
2255
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2256
+
checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526"
2257
+
dependencies = [
2258
+
"atoi",
2259
+
"base64",
2260
+
"bitflags 2.9.1",
2261
+
"byteorder",
2262
+
"bytes",
2263
+
"crc",
2264
+
"digest",
2265
+
"dotenvy",
2266
+
"either",
2267
+
"futures-channel",
2268
+
"futures-core",
2269
+
"futures-io",
2270
+
"futures-util",
2271
+
"generic-array",
2272
+
"hex",
2273
+
"hkdf",
2274
+
"hmac",
2275
+
"itoa",
2276
+
"log",
2277
+
"md-5",
2278
+
"memchr",
2279
+
"once_cell",
2280
+
"percent-encoding",
2281
+
"rand 0.8.5",
2282
+
"rsa",
2283
+
"serde",
2284
+
"sha1",
2285
+
"sha2",
2286
+
"smallvec",
2287
+
"sqlx-core",
2288
+
"stringprep",
2289
+
"thiserror 2.0.17",
2290
+
"tracing",
2291
+
"whoami",
2292
+
]
2293
+
2294
+
[[package]]
2295
+
name = "sqlx-postgres"
2296
+
version = "0.8.6"
2297
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2298
+
checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46"
2299
+
dependencies = [
2300
+
"atoi",
2301
+
"base64",
2302
+
"bitflags 2.9.1",
2303
+
"byteorder",
2304
+
"crc",
2305
+
"dotenvy",
2306
+
"etcetera",
2307
+
"futures-channel",
2308
+
"futures-core",
2309
+
"futures-util",
2310
+
"hex",
2311
+
"hkdf",
2312
+
"hmac",
2313
+
"home",
2314
+
"itoa",
2315
+
"log",
2316
+
"md-5",
2317
+
"memchr",
2318
+
"once_cell",
2319
+
"rand 0.8.5",
2320
+
"serde",
2321
+
"serde_json",
2322
+
"sha2",
2323
+
"smallvec",
2324
+
"sqlx-core",
2325
+
"stringprep",
2326
+
"thiserror 2.0.17",
2327
+
"tracing",
2328
+
"whoami",
2329
+
]
2330
+
2331
+
[[package]]
2332
+
name = "sqlx-sqlite"
2333
+
version = "0.8.6"
2334
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2335
+
checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea"
2336
+
dependencies = [
2337
+
"atoi",
2338
+
"flume",
2339
+
"futures-channel",
2340
+
"futures-core",
2341
+
"futures-executor",
2342
+
"futures-intrusive",
2343
+
"futures-util",
2344
+
"libsqlite3-sys",
2345
+
"log",
2346
+
"percent-encoding",
2347
+
"serde",
2348
+
"serde_urlencoded",
2349
+
"sqlx-core",
2350
+
"thiserror 2.0.17",
2351
+
"tracing",
2352
+
"url",
2353
+
]
2354
+
2355
+
[[package]]
2356
+
name = "stable_deref_trait"
2357
+
version = "1.2.1"
2358
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2359
+
checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596"
2360
+
2361
+
[[package]]
2362
+
name = "stringprep"
2363
+
version = "0.1.5"
2364
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2365
+
checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1"
2366
+
dependencies = [
2367
+
"unicode-bidi",
2368
+
"unicode-normalization",
2369
+
"unicode-properties",
2370
+
]
2371
+
2372
+
[[package]]
2373
+
name = "strip-ansi-escapes"
2374
+
version = "0.2.1"
2375
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2376
+
checksum = "2a8f8038e7e7969abb3f1b7c2a811225e9296da208539e0f79c5251d6cac0025"
2377
+
dependencies = [
2378
+
"vte",
1187
2379
]
1188
2380
1189
2381
[[package]]
···
1193
2385
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
1194
2386
1195
2387
[[package]]
2388
+
name = "subtle"
2389
+
version = "2.6.1"
2390
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2391
+
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
2392
+
2393
+
[[package]]
2394
+
name = "supports-color"
2395
+
version = "2.1.0"
2396
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2397
+
checksum = "d6398cde53adc3c4557306a96ce67b302968513830a77a95b2b17305d9719a89"
2398
+
dependencies = [
2399
+
"is-terminal",
2400
+
"is_ci",
2401
+
]
2402
+
2403
+
[[package]]
2404
+
name = "supports-color"
2405
+
version = "3.0.2"
2406
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2407
+
checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6"
2408
+
dependencies = [
2409
+
"is_ci",
2410
+
]
2411
+
2412
+
[[package]]
2413
+
name = "supports-hyperlinks"
2414
+
version = "3.1.0"
2415
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2416
+
checksum = "804f44ed3c63152de6a9f90acbea1a110441de43006ea51bcce8f436196a288b"
2417
+
2418
+
[[package]]
2419
+
name = "supports-unicode"
2420
+
version = "3.0.0"
2421
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2422
+
checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2"
2423
+
2424
+
[[package]]
1196
2425
name = "syn"
1197
2426
version = "1.0.109"
1198
2427
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1205
2434
1206
2435
[[package]]
1207
2436
name = "syn"
1208
-
version = "2.0.103"
2437
+
version = "2.0.111"
1209
2438
source = "registry+https://github.com/rust-lang/crates.io-index"
1210
-
checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8"
2439
+
checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"
1211
2440
dependencies = [
1212
2441
"proc-macro2",
1213
2442
"quote",
1214
2443
"unicode-ident",
2444
+
]
2445
+
2446
+
[[package]]
2447
+
name = "synstructure"
2448
+
version = "0.13.2"
2449
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2450
+
checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
2451
+
dependencies = [
2452
+
"proc-macro2",
2453
+
"quote",
2454
+
"syn 2.0.111",
1215
2455
]
1216
2456
1217
2457
[[package]]
···
1220
2460
source = "registry+https://github.com/rust-lang/crates.io-index"
1221
2461
checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8"
1222
2462
dependencies = [
1223
-
"rand",
2463
+
"rand 0.4.6",
1224
2464
"remove_dir_all",
1225
2465
]
1226
2466
···
1231
2471
checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1"
1232
2472
dependencies = [
1233
2473
"fastrand",
1234
-
"getrandom",
2474
+
"getrandom 0.3.3",
1235
2475
"once_cell",
1236
2476
"rustix",
1237
2477
"windows-sys 0.59.0",
1238
2478
]
1239
2479
1240
2480
[[package]]
2481
+
name = "terminal_size"
2482
+
version = "0.4.2"
2483
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2484
+
checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed"
2485
+
dependencies = [
2486
+
"rustix",
2487
+
"windows-sys 0.59.0",
2488
+
]
2489
+
2490
+
[[package]]
2491
+
name = "termion"
2492
+
version = "4.0.6"
2493
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2494
+
checksum = "f44138a9ae08f0f502f24104d82517ef4da7330c35acd638f1f29d3cd5475ecb"
2495
+
dependencies = [
2496
+
"libc",
2497
+
"numtoa",
2498
+
]
2499
+
2500
+
[[package]]
2501
+
name = "textwrap"
2502
+
version = "0.16.2"
2503
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2504
+
checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057"
2505
+
dependencies = [
2506
+
"unicode-linebreak",
2507
+
"unicode-width 0.2.1",
2508
+
]
2509
+
2510
+
[[package]]
1241
2511
name = "thiserror"
1242
-
version = "2.0.12"
2512
+
version = "1.0.69"
1243
2513
source = "registry+https://github.com/rust-lang/crates.io-index"
1244
-
checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
2514
+
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
1245
2515
dependencies = [
1246
-
"thiserror-impl",
2516
+
"thiserror-impl 1.0.69",
2517
+
]
2518
+
2519
+
[[package]]
2520
+
name = "thiserror"
2521
+
version = "2.0.17"
2522
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2523
+
checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8"
2524
+
dependencies = [
2525
+
"thiserror-impl 2.0.17",
1247
2526
]
1248
2527
1249
2528
[[package]]
1250
2529
name = "thiserror-impl"
1251
-
version = "2.0.12"
2530
+
version = "1.0.69"
1252
2531
source = "registry+https://github.com/rust-lang/crates.io-index"
1253
-
checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
2532
+
checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
1254
2533
dependencies = [
1255
2534
"proc-macro2",
1256
2535
"quote",
1257
-
"syn 2.0.103",
2536
+
"syn 2.0.111",
2537
+
]
2538
+
2539
+
[[package]]
2540
+
name = "thiserror-impl"
2541
+
version = "2.0.17"
2542
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2543
+
checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
2544
+
dependencies = [
2545
+
"proc-macro2",
2546
+
"quote",
2547
+
"syn 2.0.111",
1258
2548
]
1259
2549
1260
2550
[[package]]
···
1273
2563
]
1274
2564
1275
2565
[[package]]
2566
+
name = "tinystr"
2567
+
version = "0.8.1"
2568
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2569
+
checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b"
2570
+
dependencies = [
2571
+
"displaydoc",
2572
+
"zerovec",
2573
+
]
2574
+
2575
+
[[package]]
2576
+
name = "tinyvec"
2577
+
version = "1.10.0"
2578
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2579
+
checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa"
2580
+
dependencies = [
2581
+
"tinyvec_macros",
2582
+
]
2583
+
2584
+
[[package]]
2585
+
name = "tinyvec_macros"
2586
+
version = "0.1.1"
2587
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2588
+
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
2589
+
2590
+
[[package]]
1276
2591
name = "tokio"
1277
-
version = "1.45.1"
2592
+
version = "1.48.0"
1278
2593
source = "registry+https://github.com/rust-lang/crates.io-index"
1279
-
checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779"
2594
+
checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408"
1280
2595
dependencies = [
1281
-
"backtrace",
1282
2596
"bytes",
1283
2597
"libc",
1284
2598
"mio",
···
1287
2601
"signal-hook-registry",
1288
2602
"socket2",
1289
2603
"tokio-macros",
1290
-
"windows-sys 0.52.0",
2604
+
"windows-sys 0.61.2",
1291
2605
]
1292
2606
1293
2607
[[package]]
1294
2608
name = "tokio-macros"
1295
-
version = "2.5.0"
2609
+
version = "2.6.0"
1296
2610
source = "registry+https://github.com/rust-lang/crates.io-index"
1297
-
checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
2611
+
checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5"
1298
2612
dependencies = [
1299
2613
"proc-macro2",
1300
2614
"quote",
1301
-
"syn 2.0.103",
2615
+
"syn 2.0.111",
2616
+
]
2617
+
2618
+
[[package]]
2619
+
name = "tokio-stream"
2620
+
version = "0.1.17"
2621
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2622
+
checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047"
2623
+
dependencies = [
2624
+
"futures-core",
2625
+
"pin-project-lite",
2626
+
"tokio",
2627
+
]
2628
+
2629
+
[[package]]
2630
+
name = "tokio-util"
2631
+
version = "0.7.17"
2632
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2633
+
checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594"
2634
+
dependencies = [
2635
+
"bytes",
2636
+
"futures-core",
2637
+
"futures-sink",
2638
+
"pin-project-lite",
2639
+
"tokio",
2640
+
]
2641
+
2642
+
[[package]]
2643
+
name = "toml_datetime"
2644
+
version = "0.7.3"
2645
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2646
+
checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533"
2647
+
dependencies = [
2648
+
"serde_core",
2649
+
]
2650
+
2651
+
[[package]]
2652
+
name = "toml_edit"
2653
+
version = "0.23.5"
2654
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2655
+
checksum = "c2ad0b7ae9cfeef5605163839cb9221f453399f15cfb5c10be9885fcf56611f9"
2656
+
dependencies = [
2657
+
"indexmap",
2658
+
"toml_datetime",
2659
+
"toml_parser",
2660
+
"winnow",
2661
+
]
2662
+
2663
+
[[package]]
2664
+
name = "toml_parser"
2665
+
version = "1.0.4"
2666
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2667
+
checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e"
2668
+
dependencies = [
2669
+
"winnow",
1302
2670
]
1303
2671
1304
2672
[[package]]
1305
2673
name = "tracing"
1306
-
version = "0.1.41"
2674
+
version = "0.1.44"
1307
2675
source = "registry+https://github.com/rust-lang/crates.io-index"
1308
-
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
2676
+
checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
1309
2677
dependencies = [
2678
+
"log",
1310
2679
"pin-project-lite",
1311
2680
"tracing-attributes",
1312
2681
"tracing-core",
···
1314
2683
1315
2684
[[package]]
1316
2685
name = "tracing-attributes"
1317
-
version = "0.1.30"
2686
+
version = "0.1.31"
1318
2687
source = "registry+https://github.com/rust-lang/crates.io-index"
1319
-
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
2688
+
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
1320
2689
dependencies = [
1321
2690
"proc-macro2",
1322
2691
"quote",
1323
-
"syn 2.0.103",
2692
+
"syn 2.0.111",
1324
2693
]
1325
2694
1326
2695
[[package]]
1327
2696
name = "tracing-core"
1328
-
version = "0.1.34"
2697
+
version = "0.1.36"
1329
2698
source = "registry+https://github.com/rust-lang/crates.io-index"
1330
-
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
2699
+
checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
1331
2700
dependencies = [
1332
2701
"once_cell",
1333
2702
"valuable",
1334
2703
]
1335
2704
1336
2705
[[package]]
1337
-
name = "tracing-indicatif"
1338
-
version = "0.3.9"
1339
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1340
-
checksum = "8201ca430e0cd893ef978226fd3516c06d9c494181c8bf4e5b32e30ed4b40aa1"
1341
-
dependencies = [
1342
-
"indicatif",
1343
-
"tracing",
1344
-
"tracing-core",
1345
-
"tracing-subscriber",
1346
-
]
1347
-
1348
-
[[package]]
1349
2706
name = "tracing-log"
1350
2707
version = "0.2.0"
1351
2708
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1358
2715
1359
2716
[[package]]
1360
2717
name = "tracing-subscriber"
1361
-
version = "0.3.19"
2718
+
version = "0.3.22"
1362
2719
source = "registry+https://github.com/rust-lang/crates.io-index"
1363
-
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
2720
+
checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
1364
2721
dependencies = [
1365
2722
"nu-ansi-term",
1366
2723
"sharded-slab",
···
1377
2734
checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f"
1378
2735
1379
2736
[[package]]
2737
+
name = "unicode-bidi"
2738
+
version = "0.3.18"
2739
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2740
+
checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5"
2741
+
2742
+
[[package]]
1380
2743
name = "unicode-ident"
1381
2744
version = "1.0.18"
1382
2745
source = "registry+https://github.com/rust-lang/crates.io-index"
1383
2746
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
1384
2747
1385
2748
[[package]]
2749
+
name = "unicode-linebreak"
2750
+
version = "0.1.5"
2751
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2752
+
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
2753
+
2754
+
[[package]]
2755
+
name = "unicode-normalization"
2756
+
version = "0.1.25"
2757
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2758
+
checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8"
2759
+
dependencies = [
2760
+
"tinyvec",
2761
+
]
2762
+
2763
+
[[package]]
2764
+
name = "unicode-properties"
2765
+
version = "0.1.4"
2766
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2767
+
checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d"
2768
+
2769
+
[[package]]
2770
+
name = "unicode-segmentation"
2771
+
version = "1.12.0"
2772
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2773
+
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
2774
+
2775
+
[[package]]
1386
2776
name = "unicode-width"
1387
2777
version = "0.1.14"
1388
2778
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1401
2791
checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
1402
2792
1403
2793
[[package]]
2794
+
name = "url"
2795
+
version = "2.5.7"
2796
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2797
+
checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b"
2798
+
dependencies = [
2799
+
"form_urlencoded",
2800
+
"idna",
2801
+
"percent-encoding",
2802
+
"serde",
2803
+
]
2804
+
2805
+
[[package]]
2806
+
name = "utf8_iter"
2807
+
version = "1.0.4"
2808
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2809
+
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
2810
+
2811
+
[[package]]
1404
2812
name = "utf8parse"
1405
2813
version = "0.2.2"
1406
2814
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1413
2821
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
1414
2822
1415
2823
[[package]]
2824
+
name = "vcpkg"
2825
+
version = "0.2.15"
2826
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2827
+
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
2828
+
2829
+
[[package]]
1416
2830
name = "version_check"
1417
2831
version = "0.9.5"
1418
2832
source = "registry+https://github.com/rust-lang/crates.io-index"
1419
2833
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
1420
2834
1421
2835
[[package]]
1422
-
name = "vt100"
1423
-
version = "0.15.2"
1424
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1425
-
checksum = "84cd863bf0db7e392ba3bd04994be3473491b31e66340672af5d11943c6274de"
1426
-
dependencies = [
1427
-
"itoa",
1428
-
"log",
1429
-
"unicode-width 0.1.14",
1430
-
"vte",
1431
-
]
1432
-
1433
-
[[package]]
1434
2836
name = "vte"
1435
-
version = "0.11.1"
1436
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1437
-
checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197"
1438
-
dependencies = [
1439
-
"arrayvec",
1440
-
"utf8parse",
1441
-
"vte_generate_state_changes",
1442
-
]
1443
-
1444
-
[[package]]
1445
-
name = "vte_generate_state_changes"
1446
-
version = "0.1.2"
2837
+
version = "0.14.1"
1447
2838
source = "registry+https://github.com/rust-lang/crates.io-index"
1448
-
checksum = "2e369bee1b05d510a7b4ed645f5faa90619e05437111783ea5848f28d97d3c2e"
2839
+
checksum = "231fdcd7ef3037e8330d8e17e61011a2c244126acc0a982f4040ac3f9f0bc077"
1449
2840
dependencies = [
1450
-
"proc-macro2",
1451
-
"quote",
2841
+
"memchr",
1452
2842
]
1453
2843
1454
2844
[[package]]
···
1467
2857
]
1468
2858
1469
2859
[[package]]
1470
-
name = "wasm-bindgen"
1471
-
version = "0.2.100"
1472
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1473
-
checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
1474
-
dependencies = [
1475
-
"cfg-if",
1476
-
"once_cell",
1477
-
"wasm-bindgen-macro",
1478
-
]
1479
-
1480
-
[[package]]
1481
-
name = "wasm-bindgen-backend"
1482
-
version = "0.2.100"
2860
+
name = "wasite"
2861
+
version = "0.1.0"
1483
2862
source = "registry+https://github.com/rust-lang/crates.io-index"
1484
-
checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
1485
-
dependencies = [
1486
-
"bumpalo",
1487
-
"log",
1488
-
"proc-macro2",
1489
-
"quote",
1490
-
"syn 2.0.103",
1491
-
"wasm-bindgen-shared",
1492
-
]
2863
+
checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
1493
2864
1494
2865
[[package]]
1495
-
name = "wasm-bindgen-macro"
1496
-
version = "0.2.100"
2866
+
name = "whoami"
2867
+
version = "1.6.1"
1497
2868
source = "registry+https://github.com/rust-lang/crates.io-index"
1498
-
checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407"
2869
+
checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d"
1499
2870
dependencies = [
1500
-
"quote",
1501
-
"wasm-bindgen-macro-support",
1502
-
]
1503
-
1504
-
[[package]]
1505
-
name = "wasm-bindgen-macro-support"
1506
-
version = "0.2.100"
1507
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1508
-
checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
1509
-
dependencies = [
1510
-
"proc-macro2",
1511
-
"quote",
1512
-
"syn 2.0.103",
1513
-
"wasm-bindgen-backend",
1514
-
"wasm-bindgen-shared",
1515
-
]
1516
-
1517
-
[[package]]
1518
-
name = "wasm-bindgen-shared"
1519
-
version = "0.2.100"
1520
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1521
-
checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d"
1522
-
dependencies = [
1523
-
"unicode-ident",
1524
-
]
1525
-
1526
-
[[package]]
1527
-
name = "web-time"
1528
-
version = "1.1.0"
1529
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1530
-
checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
1531
-
dependencies = [
1532
-
"js-sys",
1533
-
"wasm-bindgen",
2871
+
"libredox",
2872
+
"wasite",
1534
2873
]
1535
2874
1536
2875
[[package]]
···
1556
2895
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
1557
2896
1558
2897
[[package]]
2898
+
name = "windows-link"
2899
+
version = "0.1.3"
2900
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2901
+
checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
2902
+
2903
+
[[package]]
2904
+
name = "windows-link"
2905
+
version = "0.2.1"
2906
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2907
+
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
2908
+
2909
+
[[package]]
2910
+
name = "windows-sys"
2911
+
version = "0.48.0"
2912
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2913
+
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
2914
+
dependencies = [
2915
+
"windows-targets 0.48.5",
2916
+
]
2917
+
2918
+
[[package]]
1559
2919
name = "windows-sys"
1560
2920
version = "0.52.0"
1561
2921
source = "registry+https://github.com/rust-lang/crates.io-index"
1562
2922
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
1563
2923
dependencies = [
1564
-
"windows-targets",
2924
+
"windows-targets 0.52.6",
1565
2925
]
1566
2926
1567
2927
[[package]]
···
1570
2930
source = "registry+https://github.com/rust-lang/crates.io-index"
1571
2931
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
1572
2932
dependencies = [
1573
-
"windows-targets",
2933
+
"windows-targets 0.52.6",
2934
+
]
2935
+
2936
+
[[package]]
2937
+
name = "windows-sys"
2938
+
version = "0.60.2"
2939
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2940
+
checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
2941
+
dependencies = [
2942
+
"windows-targets 0.53.3",
2943
+
]
2944
+
2945
+
[[package]]
2946
+
name = "windows-sys"
2947
+
version = "0.61.2"
2948
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2949
+
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
2950
+
dependencies = [
2951
+
"windows-link 0.2.1",
2952
+
]
2953
+
2954
+
[[package]]
2955
+
name = "windows-targets"
2956
+
version = "0.48.5"
2957
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2958
+
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
2959
+
dependencies = [
2960
+
"windows_aarch64_gnullvm 0.48.5",
2961
+
"windows_aarch64_msvc 0.48.5",
2962
+
"windows_i686_gnu 0.48.5",
2963
+
"windows_i686_msvc 0.48.5",
2964
+
"windows_x86_64_gnu 0.48.5",
2965
+
"windows_x86_64_gnullvm 0.48.5",
2966
+
"windows_x86_64_msvc 0.48.5",
1574
2967
]
1575
2968
1576
2969
[[package]]
···
1579
2972
source = "registry+https://github.com/rust-lang/crates.io-index"
1580
2973
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
1581
2974
dependencies = [
1582
-
"windows_aarch64_gnullvm",
1583
-
"windows_aarch64_msvc",
1584
-
"windows_i686_gnu",
1585
-
"windows_i686_gnullvm",
1586
-
"windows_i686_msvc",
1587
-
"windows_x86_64_gnu",
1588
-
"windows_x86_64_gnullvm",
1589
-
"windows_x86_64_msvc",
2975
+
"windows_aarch64_gnullvm 0.52.6",
2976
+
"windows_aarch64_msvc 0.52.6",
2977
+
"windows_i686_gnu 0.52.6",
2978
+
"windows_i686_gnullvm 0.52.6",
2979
+
"windows_i686_msvc 0.52.6",
2980
+
"windows_x86_64_gnu 0.52.6",
2981
+
"windows_x86_64_gnullvm 0.52.6",
2982
+
"windows_x86_64_msvc 0.52.6",
1590
2983
]
1591
2984
1592
2985
[[package]]
2986
+
name = "windows-targets"
2987
+
version = "0.53.3"
2988
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2989
+
checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91"
2990
+
dependencies = [
2991
+
"windows-link 0.1.3",
2992
+
"windows_aarch64_gnullvm 0.53.0",
2993
+
"windows_aarch64_msvc 0.53.0",
2994
+
"windows_i686_gnu 0.53.0",
2995
+
"windows_i686_gnullvm 0.53.0",
2996
+
"windows_i686_msvc 0.53.0",
2997
+
"windows_x86_64_gnu 0.53.0",
2998
+
"windows_x86_64_gnullvm 0.53.0",
2999
+
"windows_x86_64_msvc 0.53.0",
3000
+
]
3001
+
3002
+
[[package]]
3003
+
name = "windows_aarch64_gnullvm"
3004
+
version = "0.48.5"
3005
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3006
+
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
3007
+
3008
+
[[package]]
1593
3009
name = "windows_aarch64_gnullvm"
1594
3010
version = "0.52.6"
1595
3011
source = "registry+https://github.com/rust-lang/crates.io-index"
1596
3012
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
1597
3013
1598
3014
[[package]]
3015
+
name = "windows_aarch64_gnullvm"
3016
+
version = "0.53.0"
3017
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3018
+
checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
3019
+
3020
+
[[package]]
3021
+
name = "windows_aarch64_msvc"
3022
+
version = "0.48.5"
3023
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3024
+
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
3025
+
3026
+
[[package]]
1599
3027
name = "windows_aarch64_msvc"
1600
3028
version = "0.52.6"
1601
3029
source = "registry+https://github.com/rust-lang/crates.io-index"
1602
3030
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
1603
3031
1604
3032
[[package]]
3033
+
name = "windows_aarch64_msvc"
3034
+
version = "0.53.0"
3035
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3036
+
checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
3037
+
3038
+
[[package]]
3039
+
name = "windows_i686_gnu"
3040
+
version = "0.48.5"
3041
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3042
+
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
3043
+
3044
+
[[package]]
1605
3045
name = "windows_i686_gnu"
1606
3046
version = "0.52.6"
1607
3047
source = "registry+https://github.com/rust-lang/crates.io-index"
1608
3048
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
1609
3049
1610
3050
[[package]]
3051
+
name = "windows_i686_gnu"
3052
+
version = "0.53.0"
3053
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3054
+
checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
3055
+
3056
+
[[package]]
1611
3057
name = "windows_i686_gnullvm"
1612
3058
version = "0.52.6"
1613
3059
source = "registry+https://github.com/rust-lang/crates.io-index"
1614
3060
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
1615
3061
1616
3062
[[package]]
3063
+
name = "windows_i686_gnullvm"
3064
+
version = "0.53.0"
3065
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3066
+
checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
3067
+
3068
+
[[package]]
3069
+
name = "windows_i686_msvc"
3070
+
version = "0.48.5"
3071
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3072
+
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
3073
+
3074
+
[[package]]
1617
3075
name = "windows_i686_msvc"
1618
3076
version = "0.52.6"
1619
3077
source = "registry+https://github.com/rust-lang/crates.io-index"
1620
3078
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
1621
3079
1622
3080
[[package]]
3081
+
name = "windows_i686_msvc"
3082
+
version = "0.53.0"
3083
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3084
+
checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
3085
+
3086
+
[[package]]
3087
+
name = "windows_x86_64_gnu"
3088
+
version = "0.48.5"
3089
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3090
+
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
3091
+
3092
+
[[package]]
1623
3093
name = "windows_x86_64_gnu"
1624
3094
version = "0.52.6"
1625
3095
source = "registry+https://github.com/rust-lang/crates.io-index"
1626
3096
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
3097
+
3098
+
[[package]]
3099
+
name = "windows_x86_64_gnu"
3100
+
version = "0.53.0"
3101
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3102
+
checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
3103
+
3104
+
[[package]]
3105
+
name = "windows_x86_64_gnullvm"
3106
+
version = "0.48.5"
3107
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3108
+
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
1627
3109
1628
3110
[[package]]
1629
3111
name = "windows_x86_64_gnullvm"
···
1632
3114
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
1633
3115
1634
3116
[[package]]
3117
+
name = "windows_x86_64_gnullvm"
3118
+
version = "0.53.0"
3119
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3120
+
checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
3121
+
3122
+
[[package]]
3123
+
name = "windows_x86_64_msvc"
3124
+
version = "0.48.5"
3125
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3126
+
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
3127
+
3128
+
[[package]]
1635
3129
name = "windows_x86_64_msvc"
1636
3130
version = "0.52.6"
1637
3131
source = "registry+https://github.com/rust-lang/crates.io-index"
1638
3132
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
1639
3133
1640
3134
[[package]]
3135
+
name = "windows_x86_64_msvc"
3136
+
version = "0.53.0"
3137
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3138
+
checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
3139
+
3140
+
[[package]]
3141
+
name = "winnow"
3142
+
version = "0.7.13"
3143
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3144
+
checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
3145
+
dependencies = [
3146
+
"memchr",
3147
+
]
3148
+
3149
+
[[package]]
3150
+
name = "winreg"
3151
+
version = "0.10.1"
3152
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3153
+
checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d"
3154
+
dependencies = [
3155
+
"winapi",
3156
+
]
3157
+
3158
+
[[package]]
1641
3159
name = "wire"
1642
-
version = "0.3.0"
3160
+
version = "1.1.1"
1643
3161
dependencies = [
1644
-
"anyhow",
1645
3162
"clap",
1646
3163
"clap-markdown",
1647
3164
"clap-num",
···
1650
3167
"dhat",
1651
3168
"enum-display-derive",
1652
3169
"futures",
3170
+
"itertools",
3171
+
"miette",
3172
+
"owo-colors",
3173
+
"serde_json",
3174
+
"signal-hook",
3175
+
"signal-hook-tokio",
3176
+
"thiserror 2.0.17",
3177
+
"tokio",
3178
+
"tracing",
3179
+
"tracing-log",
3180
+
"tracing-subscriber",
3181
+
"wire-core",
3182
+
]
3183
+
3184
+
[[package]]
3185
+
name = "wire-core"
3186
+
version = "1.1.1"
3187
+
dependencies = [
3188
+
"aho-corasick",
3189
+
"anyhow",
3190
+
"base64",
3191
+
"derive_more",
3192
+
"enum_dispatch",
3193
+
"futures",
3194
+
"gethostname",
3195
+
"gjson",
1653
3196
"im",
1654
-
"indicatif",
1655
3197
"itertools",
1656
-
"lib",
3198
+
"miette",
3199
+
"nix 0.30.1",
3200
+
"nix-compat",
3201
+
"num_enum",
3202
+
"owo-colors",
3203
+
"portable-pty",
3204
+
"proc-macro2",
3205
+
"prost",
3206
+
"rand 0.9.2",
1657
3207
"serde",
1658
3208
"serde_json",
3209
+
"sha2",
3210
+
"sqlx",
3211
+
"strip-ansi-escapes",
3212
+
"syn 2.0.111",
3213
+
"tempdir",
3214
+
"termion",
3215
+
"thiserror 2.0.17",
1659
3216
"tokio",
3217
+
"tokio-util",
1660
3218
"tracing",
1661
-
"tracing-indicatif",
1662
-
"tracing-log",
1663
-
"tracing-subscriber",
3219
+
"wire-key-agent",
3220
+
"zstd",
3221
+
]
3222
+
3223
+
[[package]]
3224
+
name = "wire-key-agent"
3225
+
version = "1.1.1"
3226
+
dependencies = [
3227
+
"anyhow",
3228
+
"base64",
3229
+
"futures-util",
3230
+
"nix 0.30.1",
3231
+
"prost",
3232
+
"prost-build",
3233
+
"sha2",
3234
+
"tokio",
3235
+
"tokio-util",
1664
3236
]
1665
3237
1666
3238
[[package]]
···
1669
3241
source = "registry+https://github.com/rust-lang/crates.io-index"
1670
3242
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
1671
3243
dependencies = [
1672
-
"bitflags",
3244
+
"bitflags 2.9.1",
3245
+
]
3246
+
3247
+
[[package]]
3248
+
name = "writeable"
3249
+
version = "0.6.1"
3250
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3251
+
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
3252
+
3253
+
[[package]]
3254
+
name = "yoke"
3255
+
version = "0.8.0"
3256
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3257
+
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
3258
+
dependencies = [
3259
+
"serde",
3260
+
"stable_deref_trait",
3261
+
"yoke-derive",
3262
+
"zerofrom",
3263
+
]
3264
+
3265
+
[[package]]
3266
+
name = "yoke-derive"
3267
+
version = "0.8.0"
3268
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3269
+
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
3270
+
dependencies = [
3271
+
"proc-macro2",
3272
+
"quote",
3273
+
"syn 2.0.111",
3274
+
"synstructure",
3275
+
]
3276
+
3277
+
[[package]]
3278
+
name = "zerocopy"
3279
+
version = "0.8.26"
3280
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3281
+
checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f"
3282
+
dependencies = [
3283
+
"zerocopy-derive",
3284
+
]
3285
+
3286
+
[[package]]
3287
+
name = "zerocopy-derive"
3288
+
version = "0.8.26"
3289
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3290
+
checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181"
3291
+
dependencies = [
3292
+
"proc-macro2",
3293
+
"quote",
3294
+
"syn 2.0.111",
3295
+
]
3296
+
3297
+
[[package]]
3298
+
name = "zerofrom"
3299
+
version = "0.1.6"
3300
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3301
+
checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
3302
+
dependencies = [
3303
+
"zerofrom-derive",
3304
+
]
3305
+
3306
+
[[package]]
3307
+
name = "zerofrom-derive"
3308
+
version = "0.1.6"
3309
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3310
+
checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
3311
+
dependencies = [
3312
+
"proc-macro2",
3313
+
"quote",
3314
+
"syn 2.0.111",
3315
+
"synstructure",
3316
+
]
3317
+
3318
+
[[package]]
3319
+
name = "zeroize"
3320
+
version = "1.8.2"
3321
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3322
+
checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
3323
+
3324
+
[[package]]
3325
+
name = "zerotrie"
3326
+
version = "0.2.2"
3327
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3328
+
checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595"
3329
+
dependencies = [
3330
+
"displaydoc",
3331
+
"yoke",
3332
+
"zerofrom",
3333
+
]
3334
+
3335
+
[[package]]
3336
+
name = "zerovec"
3337
+
version = "0.11.4"
3338
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3339
+
checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b"
3340
+
dependencies = [
3341
+
"yoke",
3342
+
"zerofrom",
3343
+
"zerovec-derive",
3344
+
]
3345
+
3346
+
[[package]]
3347
+
name = "zerovec-derive"
3348
+
version = "0.11.1"
3349
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3350
+
checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
3351
+
dependencies = [
3352
+
"proc-macro2",
3353
+
"quote",
3354
+
"syn 2.0.111",
3355
+
]
3356
+
3357
+
[[package]]
3358
+
name = "zstd"
3359
+
version = "0.13.3"
3360
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3361
+
checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
3362
+
dependencies = [
3363
+
"zstd-safe",
3364
+
]
3365
+
3366
+
[[package]]
3367
+
name = "zstd-safe"
3368
+
version = "7.2.4"
3369
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3370
+
checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
3371
+
dependencies = [
3372
+
"zstd-sys",
3373
+
]
3374
+
3375
+
[[package]]
3376
+
name = "zstd-sys"
3377
+
version = "2.0.16+zstd.1.5.7"
3378
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3379
+
checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748"
3380
+
dependencies = [
3381
+
"cc",
3382
+
"pkg-config",
1673
3383
]
+42
-13
Cargo.toml
+42
-13
Cargo.toml
···
1
1
[workspace]
2
-
members = ["wire/key_agent", "wire/lib", "wire/cli"]
2
+
members = ["crates/key_agent", "crates/core", "crates/cli"]
3
3
resolver = "2"
4
-
package.edition = "2021"
5
-
package.version = "0.3.0"
4
+
package.edition = "2024"
5
+
package.version = "1.1.1"
6
6
7
7
[workspace.metadata.crane]
8
8
name = "wire"
9
+
10
+
[workspace.lints.clippy]
11
+
pedantic = { level = "deny", priority = -1 }
12
+
missing_const_for_fn = "deny"
13
+
14
+
# annoying to deal with
15
+
missing_errors_doc = "allow"
16
+
missing_panics_doc = "allow"
9
17
10
18
[workspace.dependencies]
11
-
clap = { version = "4.5.40", features = ["derive", "string"] }
12
-
clap-verbosity-flag = "3.0.3"
13
-
serde = { version = "1.0.219", features = ["derive", "rc"] }
14
-
serde_json = { version = "1.0.140" }
15
-
tokio = { version = "1.45.1", features = ["full"] }
16
-
tracing = "0.1.41"
17
-
tracing-indicatif = "0.3.9"
19
+
futures-util = { version = "0.3.31", features = ["sink", "std"] }
20
+
clap = { version = "4.5.51", features = ["derive", "string", "cargo"] }
21
+
clap-verbosity-flag = "3.0.4"
22
+
serde = { version = "1.0.228", features = ["derive", "rc"] }
23
+
tokio = { version = "1.48.0", features = ["full"] }
24
+
tracing = { version = "0.1.41", features = ["release_max_level_debug"] }
18
25
tracing-log = "0.2.0"
19
-
tracing-subscriber = "0.3.19"
26
+
tracing-subscriber = "0.3.20"
20
27
im = { version = "15.1.0", features = ["serde"] }
21
-
anyhow = "1.0.98"
28
+
anyhow = "1.0.100"
22
29
prost = "0.14.1"
23
-
nix = { version = "0.30.1", features = ["user"] }
30
+
nix = { version = "0.30.1", features = ["user", "poll", "term"] }
31
+
miette = { version = "7.6.0", features = ["fancy"] }
32
+
thiserror = "2.0.17"
33
+
sha2 = "0.10.9"
34
+
tokio-util = { version = "0.7.17", features = ["codec"] }
35
+
base64 = "0.22.1"
36
+
nix-compat = { git = "https://git.snix.dev/snix/snix.git", features = [
37
+
"serde",
38
+
"flakeref",
39
+
] }
40
+
# simd-json = { version = "0.17.0", features = [
41
+
# "serde_impl",
42
+
# # swar-number-parsing is disabled because nix never outputs
43
+
# # floats.
44
+
# # "swar-number-parsing",
45
+
# "runtime-detection",
46
+
# "hints",
47
+
# ] }
48
+
serde_json = { version = "1.0.145" }
49
+
owo-colors = { version = "4.2.3", features = ["supports-colors"] }
50
+
51
+
[profile.dev.package.sqlx-macros]
52
+
opt-level = 3
-661
LICENSE
-661
LICENSE
···
1
-
GNU AFFERO GENERAL PUBLIC LICENSE
2
-
Version 3, 19 November 2007
3
-
4
-
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
5
-
Everyone is permitted to copy and distribute verbatim copies
6
-
of this license document, but changing it is not allowed.
7
-
8
-
Preamble
9
-
10
-
The GNU Affero General Public License is a free, copyleft license for
11
-
software and other kinds of works, specifically designed to ensure
12
-
cooperation with the community in the case of network server software.
13
-
14
-
The licenses for most software and other practical works are designed
15
-
to take away your freedom to share and change the works. By contrast,
16
-
our General Public Licenses are intended to guarantee your freedom to
17
-
share and change all versions of a program--to make sure it remains free
18
-
software for all its users.
19
-
20
-
When we speak of free software, we are referring to freedom, not
21
-
price. Our General Public Licenses are designed to make sure that you
22
-
have the freedom to distribute copies of free software (and charge for
23
-
them if you wish), that you receive source code or can get it if you
24
-
want it, that you can change the software or use pieces of it in new
25
-
free programs, and that you know you can do these things.
26
-
27
-
Developers that use our General Public Licenses protect your rights
28
-
with two steps: (1) assert copyright on the software, and (2) offer
29
-
you this License which gives you legal permission to copy, distribute
30
-
and/or modify the software.
31
-
32
-
A secondary benefit of defending all users' freedom is that
33
-
improvements made in alternate versions of the program, if they
34
-
receive widespread use, become available for other developers to
35
-
incorporate. Many developers of free software are heartened and
36
-
encouraged by the resulting cooperation. However, in the case of
37
-
software used on network servers, this result may fail to come about.
38
-
The GNU General Public License permits making a modified version and
39
-
letting the public access it on a server without ever releasing its
40
-
source code to the public.
41
-
42
-
The GNU Affero General Public License is designed specifically to
43
-
ensure that, in such cases, the modified source code becomes available
44
-
to the community. It requires the operator of a network server to
45
-
provide the source code of the modified version running there to the
46
-
users of that server. Therefore, public use of a modified version, on
47
-
a publicly accessible server, gives the public access to the source
48
-
code of the modified version.
49
-
50
-
An older license, called the Affero General Public License and
51
-
published by Affero, was designed to accomplish similar goals. This is
52
-
a different license, not a version of the Affero GPL, but Affero has
53
-
released a new version of the Affero GPL which permits relicensing under
54
-
this license.
55
-
56
-
The precise terms and conditions for copying, distribution and
57
-
modification follow.
58
-
59
-
TERMS AND CONDITIONS
60
-
61
-
0. Definitions.
62
-
63
-
"This License" refers to version 3 of the GNU Affero General Public License.
64
-
65
-
"Copyright" also means copyright-like laws that apply to other kinds of
66
-
works, such as semiconductor masks.
67
-
68
-
"The Program" refers to any copyrightable work licensed under this
69
-
License. Each licensee is addressed as "you". "Licensees" and
70
-
"recipients" may be individuals or organizations.
71
-
72
-
To "modify" a work means to copy from or adapt all or part of the work
73
-
in a fashion requiring copyright permission, other than the making of an
74
-
exact copy. The resulting work is called a "modified version" of the
75
-
earlier work or a work "based on" the earlier work.
76
-
77
-
A "covered work" means either the unmodified Program or a work based
78
-
on the Program.
79
-
80
-
To "propagate" a work means to do anything with it that, without
81
-
permission, would make you directly or secondarily liable for
82
-
infringement under applicable copyright law, except executing it on a
83
-
computer or modifying a private copy. Propagation includes copying,
84
-
distribution (with or without modification), making available to the
85
-
public, and in some countries other activities as well.
86
-
87
-
To "convey" a work means any kind of propagation that enables other
88
-
parties to make or receive copies. Mere interaction with a user through
89
-
a computer network, with no transfer of a copy, is not conveying.
90
-
91
-
An interactive user interface displays "Appropriate Legal Notices"
92
-
to the extent that it includes a convenient and prominently visible
93
-
feature that (1) displays an appropriate copyright notice, and (2)
94
-
tells the user that there is no warranty for the work (except to the
95
-
extent that warranties are provided), that licensees may convey the
96
-
work under this License, and how to view a copy of this License. If
97
-
the interface presents a list of user commands or options, such as a
98
-
menu, a prominent item in the list meets this criterion.
99
-
100
-
1. Source Code.
101
-
102
-
The "source code" for a work means the preferred form of the work
103
-
for making modifications to it. "Object code" means any non-source
104
-
form of a work.
105
-
106
-
A "Standard Interface" means an interface that either is an official
107
-
standard defined by a recognized standards body, or, in the case of
108
-
interfaces specified for a particular programming language, one that
109
-
is widely used among developers working in that language.
110
-
111
-
The "System Libraries" of an executable work include anything, other
112
-
than the work as a whole, that (a) is included in the normal form of
113
-
packaging a Major Component, but which is not part of that Major
114
-
Component, and (b) serves only to enable use of the work with that
115
-
Major Component, or to implement a Standard Interface for which an
116
-
implementation is available to the public in source code form. A
117
-
"Major Component", in this context, means a major essential component
118
-
(kernel, window system, and so on) of the specific operating system
119
-
(if any) on which the executable work runs, or a compiler used to
120
-
produce the work, or an object code interpreter used to run it.
121
-
122
-
The "Corresponding Source" for a work in object code form means all
123
-
the source code needed to generate, install, and (for an executable
124
-
work) run the object code and to modify the work, including scripts to
125
-
control those activities. However, it does not include the work's
126
-
System Libraries, or general-purpose tools or generally available free
127
-
programs which are used unmodified in performing those activities but
128
-
which are not part of the work. For example, Corresponding Source
129
-
includes interface definition files associated with source files for
130
-
the work, and the source code for shared libraries and dynamically
131
-
linked subprograms that the work is specifically designed to require,
132
-
such as by intimate data communication or control flow between those
133
-
subprograms and other parts of the work.
134
-
135
-
The Corresponding Source need not include anything that users
136
-
can regenerate automatically from other parts of the Corresponding
137
-
Source.
138
-
139
-
The Corresponding Source for a work in source code form is that
140
-
same work.
141
-
142
-
2. Basic Permissions.
143
-
144
-
All rights granted under this License are granted for the term of
145
-
copyright on the Program, and are irrevocable provided the stated
146
-
conditions are met. This License explicitly affirms your unlimited
147
-
permission to run the unmodified Program. The output from running a
148
-
covered work is covered by this License only if the output, given its
149
-
content, constitutes a covered work. This License acknowledges your
150
-
rights of fair use or other equivalent, as provided by copyright law.
151
-
152
-
You may make, run and propagate covered works that you do not
153
-
convey, without conditions so long as your license otherwise remains
154
-
in force. You may convey covered works to others for the sole purpose
155
-
of having them make modifications exclusively for you, or provide you
156
-
with facilities for running those works, provided that you comply with
157
-
the terms of this License in conveying all material for which you do
158
-
not control copyright. Those thus making or running the covered works
159
-
for you must do so exclusively on your behalf, under your direction
160
-
and control, on terms that prohibit them from making any copies of
161
-
your copyrighted material outside their relationship with you.
162
-
163
-
Conveying under any other circumstances is permitted solely under
164
-
the conditions stated below. Sublicensing is not allowed; section 10
165
-
makes it unnecessary.
166
-
167
-
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168
-
169
-
No covered work shall be deemed part of an effective technological
170
-
measure under any applicable law fulfilling obligations under article
171
-
11 of the WIPO copyright treaty adopted on 20 December 1996, or
172
-
similar laws prohibiting or restricting circumvention of such
173
-
measures.
174
-
175
-
When you convey a covered work, you waive any legal power to forbid
176
-
circumvention of technological measures to the extent such circumvention
177
-
is effected by exercising rights under this License with respect to
178
-
the covered work, and you disclaim any intention to limit operation or
179
-
modification of the work as a means of enforcing, against the work's
180
-
users, your or third parties' legal rights to forbid circumvention of
181
-
technological measures.
182
-
183
-
4. Conveying Verbatim Copies.
184
-
185
-
You may convey verbatim copies of the Program's source code as you
186
-
receive it, in any medium, provided that you conspicuously and
187
-
appropriately publish on each copy an appropriate copyright notice;
188
-
keep intact all notices stating that this License and any
189
-
non-permissive terms added in accord with section 7 apply to the code;
190
-
keep intact all notices of the absence of any warranty; and give all
191
-
recipients a copy of this License along with the Program.
192
-
193
-
You may charge any price or no price for each copy that you convey,
194
-
and you may offer support or warranty protection for a fee.
195
-
196
-
5. Conveying Modified Source Versions.
197
-
198
-
You may convey a work based on the Program, or the modifications to
199
-
produce it from the Program, in the form of source code under the
200
-
terms of section 4, provided that you also meet all of these conditions:
201
-
202
-
a) The work must carry prominent notices stating that you modified
203
-
it, and giving a relevant date.
204
-
205
-
b) The work must carry prominent notices stating that it is
206
-
released under this License and any conditions added under section
207
-
7. This requirement modifies the requirement in section 4 to
208
-
"keep intact all notices".
209
-
210
-
c) You must license the entire work, as a whole, under this
211
-
License to anyone who comes into possession of a copy. This
212
-
License will therefore apply, along with any applicable section 7
213
-
additional terms, to the whole of the work, and all its parts,
214
-
regardless of how they are packaged. This License gives no
215
-
permission to license the work in any other way, but it does not
216
-
invalidate such permission if you have separately received it.
217
-
218
-
d) If the work has interactive user interfaces, each must display
219
-
Appropriate Legal Notices; however, if the Program has interactive
220
-
interfaces that do not display Appropriate Legal Notices, your
221
-
work need not make them do so.
222
-
223
-
A compilation of a covered work with other separate and independent
224
-
works, which are not by their nature extensions of the covered work,
225
-
and which are not combined with it such as to form a larger program,
226
-
in or on a volume of a storage or distribution medium, is called an
227
-
"aggregate" if the compilation and its resulting copyright are not
228
-
used to limit the access or legal rights of the compilation's users
229
-
beyond what the individual works permit. Inclusion of a covered work
230
-
in an aggregate does not cause this License to apply to the other
231
-
parts of the aggregate.
232
-
233
-
6. Conveying Non-Source Forms.
234
-
235
-
You may convey a covered work in object code form under the terms
236
-
of sections 4 and 5, provided that you also convey the
237
-
machine-readable Corresponding Source under the terms of this License,
238
-
in one of these ways:
239
-
240
-
a) Convey the object code in, or embodied in, a physical product
241
-
(including a physical distribution medium), accompanied by the
242
-
Corresponding Source fixed on a durable physical medium
243
-
customarily used for software interchange.
244
-
245
-
b) Convey the object code in, or embodied in, a physical product
246
-
(including a physical distribution medium), accompanied by a
247
-
written offer, valid for at least three years and valid for as
248
-
long as you offer spare parts or customer support for that product
249
-
model, to give anyone who possesses the object code either (1) a
250
-
copy of the Corresponding Source for all the software in the
251
-
product that is covered by this License, on a durable physical
252
-
medium customarily used for software interchange, for a price no
253
-
more than your reasonable cost of physically performing this
254
-
conveying of source, or (2) access to copy the
255
-
Corresponding Source from a network server at no charge.
256
-
257
-
c) Convey individual copies of the object code with a copy of the
258
-
written offer to provide the Corresponding Source. This
259
-
alternative is allowed only occasionally and noncommercially, and
260
-
only if you received the object code with such an offer, in accord
261
-
with subsection 6b.
262
-
263
-
d) Convey the object code by offering access from a designated
264
-
place (gratis or for a charge), and offer equivalent access to the
265
-
Corresponding Source in the same way through the same place at no
266
-
further charge. You need not require recipients to copy the
267
-
Corresponding Source along with the object code. If the place to
268
-
copy the object code is a network server, the Corresponding Source
269
-
may be on a different server (operated by you or a third party)
270
-
that supports equivalent copying facilities, provided you maintain
271
-
clear directions next to the object code saying where to find the
272
-
Corresponding Source. Regardless of what server hosts the
273
-
Corresponding Source, you remain obligated to ensure that it is
274
-
available for as long as needed to satisfy these requirements.
275
-
276
-
e) Convey the object code using peer-to-peer transmission, provided
277
-
you inform other peers where the object code and Corresponding
278
-
Source of the work are being offered to the general public at no
279
-
charge under subsection 6d.
280
-
281
-
A separable portion of the object code, whose source code is excluded
282
-
from the Corresponding Source as a System Library, need not be
283
-
included in conveying the object code work.
284
-
285
-
A "User Product" is either (1) a "consumer product", which means any
286
-
tangible personal property which is normally used for personal, family,
287
-
or household purposes, or (2) anything designed or sold for incorporation
288
-
into a dwelling. In determining whether a product is a consumer product,
289
-
doubtful cases shall be resolved in favor of coverage. For a particular
290
-
product received by a particular user, "normally used" refers to a
291
-
typical or common use of that class of product, regardless of the status
292
-
of the particular user or of the way in which the particular user
293
-
actually uses, or expects or is expected to use, the product. A product
294
-
is a consumer product regardless of whether the product has substantial
295
-
commercial, industrial or non-consumer uses, unless such uses represent
296
-
the only significant mode of use of the product.
297
-
298
-
"Installation Information" for a User Product means any methods,
299
-
procedures, authorization keys, or other information required to install
300
-
and execute modified versions of a covered work in that User Product from
301
-
a modified version of its Corresponding Source. The information must
302
-
suffice to ensure that the continued functioning of the modified object
303
-
code is in no case prevented or interfered with solely because
304
-
modification has been made.
305
-
306
-
If you convey an object code work under this section in, or with, or
307
-
specifically for use in, a User Product, and the conveying occurs as
308
-
part of a transaction in which the right of possession and use of the
309
-
User Product is transferred to the recipient in perpetuity or for a
310
-
fixed term (regardless of how the transaction is characterized), the
311
-
Corresponding Source conveyed under this section must be accompanied
312
-
by the Installation Information. But this requirement does not apply
313
-
if neither you nor any third party retains the ability to install
314
-
modified object code on the User Product (for example, the work has
315
-
been installed in ROM).
316
-
317
-
The requirement to provide Installation Information does not include a
318
-
requirement to continue to provide support service, warranty, or updates
319
-
for a work that has been modified or installed by the recipient, or for
320
-
the User Product in which it has been modified or installed. Access to a
321
-
network may be denied when the modification itself materially and
322
-
adversely affects the operation of the network or violates the rules and
323
-
protocols for communication across the network.
324
-
325
-
Corresponding Source conveyed, and Installation Information provided,
326
-
in accord with this section must be in a format that is publicly
327
-
documented (and with an implementation available to the public in
328
-
source code form), and must require no special password or key for
329
-
unpacking, reading or copying.
330
-
331
-
7. Additional Terms.
332
-
333
-
"Additional permissions" are terms that supplement the terms of this
334
-
License by making exceptions from one or more of its conditions.
335
-
Additional permissions that are applicable to the entire Program shall
336
-
be treated as though they were included in this License, to the extent
337
-
that they are valid under applicable law. If additional permissions
338
-
apply only to part of the Program, that part may be used separately
339
-
under those permissions, but the entire Program remains governed by
340
-
this License without regard to the additional permissions.
341
-
342
-
When you convey a copy of a covered work, you may at your option
343
-
remove any additional permissions from that copy, or from any part of
344
-
it. (Additional permissions may be written to require their own
345
-
removal in certain cases when you modify the work.) You may place
346
-
additional permissions on material, added by you to a covered work,
347
-
for which you have or can give appropriate copyright permission.
348
-
349
-
Notwithstanding any other provision of this License, for material you
350
-
add to a covered work, you may (if authorized by the copyright holders of
351
-
that material) supplement the terms of this License with terms:
352
-
353
-
a) Disclaiming warranty or limiting liability differently from the
354
-
terms of sections 15 and 16 of this License; or
355
-
356
-
b) Requiring preservation of specified reasonable legal notices or
357
-
author attributions in that material or in the Appropriate Legal
358
-
Notices displayed by works containing it; or
359
-
360
-
c) Prohibiting misrepresentation of the origin of that material, or
361
-
requiring that modified versions of such material be marked in
362
-
reasonable ways as different from the original version; or
363
-
364
-
d) Limiting the use for publicity purposes of names of licensors or
365
-
authors of the material; or
366
-
367
-
e) Declining to grant rights under trademark law for use of some
368
-
trade names, trademarks, or service marks; or
369
-
370
-
f) Requiring indemnification of licensors and authors of that
371
-
material by anyone who conveys the material (or modified versions of
372
-
it) with contractual assumptions of liability to the recipient, for
373
-
any liability that these contractual assumptions directly impose on
374
-
those licensors and authors.
375
-
376
-
All other non-permissive additional terms are considered "further
377
-
restrictions" within the meaning of section 10. If the Program as you
378
-
received it, or any part of it, contains a notice stating that it is
379
-
governed by this License along with a term that is a further
380
-
restriction, you may remove that term. If a license document contains
381
-
a further restriction but permits relicensing or conveying under this
382
-
License, you may add to a covered work material governed by the terms
383
-
of that license document, provided that the further restriction does
384
-
not survive such relicensing or conveying.
385
-
386
-
If you add terms to a covered work in accord with this section, you
387
-
must place, in the relevant source files, a statement of the
388
-
additional terms that apply to those files, or a notice indicating
389
-
where to find the applicable terms.
390
-
391
-
Additional terms, permissive or non-permissive, may be stated in the
392
-
form of a separately written license, or stated as exceptions;
393
-
the above requirements apply either way.
394
-
395
-
8. Termination.
396
-
397
-
You may not propagate or modify a covered work except as expressly
398
-
provided under this License. Any attempt otherwise to propagate or
399
-
modify it is void, and will automatically terminate your rights under
400
-
this License (including any patent licenses granted under the third
401
-
paragraph of section 11).
402
-
403
-
However, if you cease all violation of this License, then your
404
-
license from a particular copyright holder is reinstated (a)
405
-
provisionally, unless and until the copyright holder explicitly and
406
-
finally terminates your license, and (b) permanently, if the copyright
407
-
holder fails to notify you of the violation by some reasonable means
408
-
prior to 60 days after the cessation.
409
-
410
-
Moreover, your license from a particular copyright holder is
411
-
reinstated permanently if the copyright holder notifies you of the
412
-
violation by some reasonable means, this is the first time you have
413
-
received notice of violation of this License (for any work) from that
414
-
copyright holder, and you cure the violation prior to 30 days after
415
-
your receipt of the notice.
416
-
417
-
Termination of your rights under this section does not terminate the
418
-
licenses of parties who have received copies or rights from you under
419
-
this License. If your rights have been terminated and not permanently
420
-
reinstated, you do not qualify to receive new licenses for the same
421
-
material under section 10.
422
-
423
-
9. Acceptance Not Required for Having Copies.
424
-
425
-
You are not required to accept this License in order to receive or
426
-
run a copy of the Program. Ancillary propagation of a covered work
427
-
occurring solely as a consequence of using peer-to-peer transmission
428
-
to receive a copy likewise does not require acceptance. However,
429
-
nothing other than this License grants you permission to propagate or
430
-
modify any covered work. These actions infringe copyright if you do
431
-
not accept this License. Therefore, by modifying or propagating a
432
-
covered work, you indicate your acceptance of this License to do so.
433
-
434
-
10. Automatic Licensing of Downstream Recipients.
435
-
436
-
Each time you convey a covered work, the recipient automatically
437
-
receives a license from the original licensors, to run, modify and
438
-
propagate that work, subject to this License. You are not responsible
439
-
for enforcing compliance by third parties with this License.
440
-
441
-
An "entity transaction" is a transaction transferring control of an
442
-
organization, or substantially all assets of one, or subdividing an
443
-
organization, or merging organizations. If propagation of a covered
444
-
work results from an entity transaction, each party to that
445
-
transaction who receives a copy of the work also receives whatever
446
-
licenses to the work the party's predecessor in interest had or could
447
-
give under the previous paragraph, plus a right to possession of the
448
-
Corresponding Source of the work from the predecessor in interest, if
449
-
the predecessor has it or can get it with reasonable efforts.
450
-
451
-
You may not impose any further restrictions on the exercise of the
452
-
rights granted or affirmed under this License. For example, you may
453
-
not impose a license fee, royalty, or other charge for exercise of
454
-
rights granted under this License, and you may not initiate litigation
455
-
(including a cross-claim or counterclaim in a lawsuit) alleging that
456
-
any patent claim is infringed by making, using, selling, offering for
457
-
sale, or importing the Program or any portion of it.
458
-
459
-
11. Patents.
460
-
461
-
A "contributor" is a copyright holder who authorizes use under this
462
-
License of the Program or a work on which the Program is based. The
463
-
work thus licensed is called the contributor's "contributor version".
464
-
465
-
A contributor's "essential patent claims" are all patent claims
466
-
owned or controlled by the contributor, whether already acquired or
467
-
hereafter acquired, that would be infringed by some manner, permitted
468
-
by this License, of making, using, or selling its contributor version,
469
-
but do not include claims that would be infringed only as a
470
-
consequence of further modification of the contributor version. For
471
-
purposes of this definition, "control" includes the right to grant
472
-
patent sublicenses in a manner consistent with the requirements of
473
-
this License.
474
-
475
-
Each contributor grants you a non-exclusive, worldwide, royalty-free
476
-
patent license under the contributor's essential patent claims, to
477
-
make, use, sell, offer for sale, import and otherwise run, modify and
478
-
propagate the contents of its contributor version.
479
-
480
-
In the following three paragraphs, a "patent license" is any express
481
-
agreement or commitment, however denominated, not to enforce a patent
482
-
(such as an express permission to practice a patent or covenant not to
483
-
sue for patent infringement). To "grant" such a patent license to a
484
-
party means to make such an agreement or commitment not to enforce a
485
-
patent against the party.
486
-
487
-
If you convey a covered work, knowingly relying on a patent license,
488
-
and the Corresponding Source of the work is not available for anyone
489
-
to copy, free of charge and under the terms of this License, through a
490
-
publicly available network server or other readily accessible means,
491
-
then you must either (1) cause the Corresponding Source to be so
492
-
available, or (2) arrange to deprive yourself of the benefit of the
493
-
patent license for this particular work, or (3) arrange, in a manner
494
-
consistent with the requirements of this License, to extend the patent
495
-
license to downstream recipients. "Knowingly relying" means you have
496
-
actual knowledge that, but for the patent license, your conveying the
497
-
covered work in a country, or your recipient's use of the covered work
498
-
in a country, would infringe one or more identifiable patents in that
499
-
country that you have reason to believe are valid.
500
-
501
-
If, pursuant to or in connection with a single transaction or
502
-
arrangement, you convey, or propagate by procuring conveyance of, a
503
-
covered work, and grant a patent license to some of the parties
504
-
receiving the covered work authorizing them to use, propagate, modify
505
-
or convey a specific copy of the covered work, then the patent license
506
-
you grant is automatically extended to all recipients of the covered
507
-
work and works based on it.
508
-
509
-
A patent license is "discriminatory" if it does not include within
510
-
the scope of its coverage, prohibits the exercise of, or is
511
-
conditioned on the non-exercise of one or more of the rights that are
512
-
specifically granted under this License. You may not convey a covered
513
-
work if you are a party to an arrangement with a third party that is
514
-
in the business of distributing software, under which you make payment
515
-
to the third party based on the extent of your activity of conveying
516
-
the work, and under which the third party grants, to any of the
517
-
parties who would receive the covered work from you, a discriminatory
518
-
patent license (a) in connection with copies of the covered work
519
-
conveyed by you (or copies made from those copies), or (b) primarily
520
-
for and in connection with specific products or compilations that
521
-
contain the covered work, unless you entered into that arrangement,
522
-
or that patent license was granted, prior to 28 March 2007.
523
-
524
-
Nothing in this License shall be construed as excluding or limiting
525
-
any implied license or other defenses to infringement that may
526
-
otherwise be available to you under applicable patent law.
527
-
528
-
12. No Surrender of Others' Freedom.
529
-
530
-
If conditions are imposed on you (whether by court order, agreement or
531
-
otherwise) that contradict the conditions of this License, they do not
532
-
excuse you from the conditions of this License. If you cannot convey a
533
-
covered work so as to satisfy simultaneously your obligations under this
534
-
License and any other pertinent obligations, then as a consequence you may
535
-
not convey it at all. For example, if you agree to terms that obligate you
536
-
to collect a royalty for further conveying from those to whom you convey
537
-
the Program, the only way you could satisfy both those terms and this
538
-
License would be to refrain entirely from conveying the Program.
539
-
540
-
13. Remote Network Interaction; Use with the GNU General Public License.
541
-
542
-
Notwithstanding any other provision of this License, if you modify the
543
-
Program, your modified version must prominently offer all users
544
-
interacting with it remotely through a computer network (if your version
545
-
supports such interaction) an opportunity to receive the Corresponding
546
-
Source of your version by providing access to the Corresponding Source
547
-
from a network server at no charge, through some standard or customary
548
-
means of facilitating copying of software. This Corresponding Source
549
-
shall include the Corresponding Source for any work covered by version 3
550
-
of the GNU General Public License that is incorporated pursuant to the
551
-
following paragraph.
552
-
553
-
Notwithstanding any other provision of this License, you have
554
-
permission to link or combine any covered work with a work licensed
555
-
under version 3 of the GNU General Public License into a single
556
-
combined work, and to convey the resulting work. The terms of this
557
-
License will continue to apply to the part which is the covered work,
558
-
but the work with which it is combined will remain governed by version
559
-
3 of the GNU General Public License.
560
-
561
-
14. Revised Versions of this License.
562
-
563
-
The Free Software Foundation may publish revised and/or new versions of
564
-
the GNU Affero General Public License from time to time. Such new versions
565
-
will be similar in spirit to the present version, but may differ in detail to
566
-
address new problems or concerns.
567
-
568
-
Each version is given a distinguishing version number. If the
569
-
Program specifies that a certain numbered version of the GNU Affero General
570
-
Public License "or any later version" applies to it, you have the
571
-
option of following the terms and conditions either of that numbered
572
-
version or of any later version published by the Free Software
573
-
Foundation. If the Program does not specify a version number of the
574
-
GNU Affero General Public License, you may choose any version ever published
575
-
by the Free Software Foundation.
576
-
577
-
If the Program specifies that a proxy can decide which future
578
-
versions of the GNU Affero General Public License can be used, that proxy's
579
-
public statement of acceptance of a version permanently authorizes you
580
-
to choose that version for the Program.
581
-
582
-
Later license versions may give you additional or different
583
-
permissions. However, no additional obligations are imposed on any
584
-
author or copyright holder as a result of your choosing to follow a
585
-
later version.
586
-
587
-
15. Disclaimer of Warranty.
588
-
589
-
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590
-
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591
-
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592
-
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593
-
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594
-
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595
-
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596
-
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597
-
598
-
16. Limitation of Liability.
599
-
600
-
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601
-
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602
-
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603
-
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604
-
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605
-
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606
-
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607
-
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608
-
SUCH DAMAGES.
609
-
610
-
17. Interpretation of Sections 15 and 16.
611
-
612
-
If the disclaimer of warranty and limitation of liability provided
613
-
above cannot be given local legal effect according to their terms,
614
-
reviewing courts shall apply local law that most closely approximates
615
-
an absolute waiver of all civil liability in connection with the
616
-
Program, unless a warranty or assumption of liability accompanies a
617
-
copy of the Program in return for a fee.
618
-
619
-
END OF TERMS AND CONDITIONS
620
-
621
-
How to Apply These Terms to Your New Programs
622
-
623
-
If you develop a new program, and you want it to be of the greatest
624
-
possible use to the public, the best way to achieve this is to make it
625
-
free software which everyone can redistribute and change under these terms.
626
-
627
-
To do so, attach the following notices to the program. It is safest
628
-
to attach them to the start of each source file to most effectively
629
-
state the exclusion of warranty; and each file should have at least
630
-
the "copyright" line and a pointer to where the full notice is found.
631
-
632
-
<one line to give the program's name and a brief idea of what it does.>
633
-
Copyright (C) <year> <name of author>
634
-
635
-
This program is free software: you can redistribute it and/or modify
636
-
it under the terms of the GNU Affero General Public License as published
637
-
by the Free Software Foundation, either version 3 of the License, or
638
-
(at your option) any later version.
639
-
640
-
This program is distributed in the hope that it will be useful,
641
-
but WITHOUT ANY WARRANTY; without even the implied warranty of
642
-
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643
-
GNU Affero General Public License for more details.
644
-
645
-
You should have received a copy of the GNU Affero General Public License
646
-
along with this program. If not, see <https://www.gnu.org/licenses/>.
647
-
648
-
Also add information on how to contact you by electronic and paper mail.
649
-
650
-
If your software can interact with users remotely through a computer
651
-
network, you should also make sure that it provides a way for users to
652
-
get its source. For example, if your program is a web application, its
653
-
interface could display a "Source" link that leads users to an archive
654
-
of the code. There are many ways you could offer source, and different
655
-
solutions will be better for different programs; see section 13 for the
656
-
specific requirements.
657
-
658
-
You should also get your employer (if you work as a programmer) or school,
659
-
if any, to sign a "copyright disclaimer" for the program, if necessary.
660
-
For more information on this, and how to apply and follow the GNU AGPL, see
661
-
<https://www.gnu.org/licenses/>.
+5
-41
README.md
+5
-41
README.md
···
1
-
# wire
2
-
3
-

4
-

5
-

6
-
7
-
wire is a tool to deploy nixos systems. its configuration is a superset of colmena however it is not a fork.
1
+

2
+

3
+

8
4
9
-
Read the [The Guide](https://wire.althaea.zone/guide/wire.html), or continue reading this readme for development information.
5
+
wire is a tool to deploy nixos systems. its usage is inspired by colmena however it is not a fork.
10
6
11
-
## Tree Layout
12
-
13
-
```
14
-
wire
15
-
โโโ wire
16
-
โ โโโ lib
17
-
โ โ โโโ Rust library containing business logic, consumed by `wire`
18
-
โ โโโ cli
19
-
โ โ โโโ Rust binary, using `lib`
20
-
โ โโโ key_agent
21
-
โ โโโ Rust binary ran on a target node. recieves key file bytes and metadata w/ protobuf over SSH stdin
22
-
โโโ doc
23
-
โ โโโ a [vitepress](https://vitepress.dev/) site
24
-
โโโ runtime
25
-
โ โโโ Nix files used during runtime to evaluate nodes
26
-
โโโtests
27
-
โโโ Directories used during cargo & NixOS VM testing
28
-
```
7
+
Read the [The Tutorial](https://wire.althaea.zone/tutorial/overview.html), [Guides](https://wire.althaea.zone/guides/installation.html), or continue reading this readme for development information.
29
8
30
9
## Development
31
10
32
11
Please use `nix develop` for access to the development environment and to ensure
33
12
your changes are ran against the defined git hooks. For simplicity, you may wish
34
13
to use [direnv](https://github.com/direnv/direnv).
35
-
36
-
### Testing
37
-
38
-
#### dhat profiling
39
-
40
-
```sh
41
-
$ just build-dhat
42
-
```
43
-
44
-
#### Testing
45
-
46
-
```sh
47
-
$ cargo test
48
-
$ nix flake check
49
-
```
+24
bench/README.md
+24
bench/README.md
···
1
+
# Bench
2
+
3
+
This directory contains a little tool to run hyperfine against wire and colmena, deploying the exact same hive.
4
+
5
+
The hive can be found in `default.nix`.
6
+
7
+
Run the test with `nix run .#checks.x86_64-linux.bench.driverInteractive -vvv -L
8
+
--show-trace --impure`
9
+
10
+
Then run `test_script()`
11
+
12
+
No idea why running the test directly breaks it....
13
+
14
+
You can adjust the number of nodes in `num-nodes.nix`
15
+
16
+
The hive has around 20 nodes and 200 keys each. 80% of the keys are pre-activation, 20% post-activation.
17
+
18
+
| Command | Mean [s] | Min [s] | Max [s] | Relative |
19
+
| :----------------------- | --------------: | ------: | ------: | ----------: |
20
+
| `wire@HEAD - flake` | 89.825 ยฑ 22.941 | 78.190 | 130.831 | 1.00 |
21
+
| `wire@stable - flake` | 133.664 ยฑ 0.303 | 133.219 | 134.044 | 1.49 ยฑ 0.38 |
22
+
| `colmena@pinned - flake` | 131.544 ยฑ 1.076 | 130.330 | 133.211 | 1.46 ยฑ 0.37 |
23
+
| `wire@stable - hive.nix` | 133.070 ยฑ 0.805 | 132.166 | 134.209 | 1.48 ยฑ 0.38 |
24
+
| `wire@HEAD - hive.nix` | 130.287 ยฑ 1.456 | 128.980 | 132.699 | 1.45 ยฑ 0.37 |
+4
bench/colmena/hive.nix
+4
bench/colmena/hive.nix
+11
bench/colmena-flake/flake.nix
+11
bench/colmena-flake/flake.nix
+36
bench/default.nix
+36
bench/default.nix
···
1
+
{ flake }:
2
+
let
3
+
nixpkgs = import flake.inputs.nixpkgs {
4
+
system = "x86_64-linux";
5
+
};
6
+
7
+
vmNode =
8
+
index:
9
+
nixpkgs.lib.nameValuePair "node_${builtins.toString index}" {
10
+
imports = [
11
+
./vm.nix
12
+
flake.checks."x86_64-linux"."bench".nodes."node_${builtins.toString index}".system.build.networkConfig
13
+
];
14
+
15
+
_module.args = {
16
+
index = builtins.toString index;
17
+
};
18
+
19
+
deployment.keys = builtins.listToAttrs (
20
+
builtins.map (
21
+
index:
22
+
nixpkgs.lib.nameValuePair "key-${builtins.toString index}" {
23
+
keyFile = ./key.txt;
24
+
# 80% of keys pre activation, 20% post activation.
25
+
uploadAt = if index <= (200 * 0.8) then "pre-activation" else "post-activation";
26
+
}
27
+
) (nixpkgs.lib.range 0 200)
28
+
);
29
+
30
+
nixpkgs.hostPlatform = "x86_64-linux";
31
+
};
32
+
in
33
+
{
34
+
meta.nixpkgs = nixpkgs;
35
+
}
36
+
// builtins.listToAttrs (builtins.map vmNode (nixpkgs.lib.range 0 (import ./num-nodes.nix)))
+1
bench/key.txt
+1
bench/key.txt
···
1
+
blixwqqujqfyfpsqcgnazvfmgoowggimroqngrbzvvawxprfrewrxgawcnkqqhlwkesyrpykabsshmyxjishnfphequzekyxtsacwqpruugcarendkmswnzercczxympaaddadqsxwtxbghvdjjtqrdfvmelnlloiczpafnccdognkfpugcbibutxsxxkucfdmewfqnqrjukoigckktqhdoaimhnpcpmdlkqkrsuakmyymzmoxxkwofpxgdpwxogjtrexratgzkgbrouiwuobefuweuhtlyyuigwniyqhvmsqxcxwcrbjgnmypbthuxzukkuiptbqhbgvbnktwthztrfjetthpguagswaqfpcykshvkzechxyruchddaqjribnajpvboopqnbstxqnuyawsrsqnljcwwzegcothixqltfmxssectaamgscngwejmadtmqclfuqukfxehwlitnhnhcfzsnandlyathmyxjvhxauvvjpkzxzkswgiyizkyvgmhlzildlxxrkejssslqodzjhbifddvqomqvbwyodnafluyctzyvnnpkslhnuhwnpjqnrtakqeezknstjjtijrnxsvbvwnmwjxoiwdysevzqvvbscjwvhvrzgmsosjbupchvsijhzemltvbpolsrgrcnlsvpqdxhrgmfhozzajavbshnzzdndcsjzvoahosxonpgragvqivolulvjybhnytnvrddicgdcdhbltlkbdzyyhltymvuhozkjrtsraucpufqpzkoexpsioagkblbouspmmwukeoprmbbbtpxlduxleaooaoynzfqmenxrpospatazsgsvghocochzqwaqzyypkarzuyhtgxlfoizntygobpbdikuhgmwupinslzazgbnngsekukaovfmhsvqgleulkywmxemebgyrilarghiyvaxwpdxkzpttrjkfrabdlqyqmrwyrvjqmtaghsoernqkbbhvkkzegdpadgoibladfjumvbckaxgleduswvxwproweukscpbjqnzibvpopariarmgwspfxfijljnkdmifynlpbcfbtuvxbupjuuzpaopadpszrltyrlotsdhiqduqqqdmnxxrzhtnwgenshdwnljcziylvmrjschfgifjumjrxyvfqgmywbtgpfnmnbhrjkbmczzabrbsqggcgsrvvlweejqyckvezcpqawiqvjxrdbrphcnakwjjgrgmonuyejnjojmvpghdhzmtmcdthnzgvbihsgowgelxbwmpevzcvkslmzcoqbmhtiyjjzjrfxgzoidqulslrfjnnbyzglvpulucpytanlyywptrwzvkrrzrzlgbhsjsqrvixrzkevfgxtoshzpugridbtyzporfburhwmccglhqdbgxqgqipuzefsrwhmtfmqdmmtcttcfpvqmxunhyofmktispmtgibgolhwaugvwtiiyrxlligmctgzhpajcwqsguoymxesdufzbivzvidnnccozkkyrqauxwknjarmtfgdeiqopozgxhlidoiyxdvigookwhukhnaizmxlfdwsioxgmgxegayzuxwanujnqfexhzqtgubqtbmozouighccucdgxteesnitceqhjxlemnoccxioalsuhlnchrhdejffupgkapwyhhynzmalsycgokkzckbufcegsjrnuuaxdqqhmuehftjbvemozitbtuleycbxcbavtqicsqgkmsclxqhzhqnsbzqbnbqwoehqyjnmrvowkfejcrrxnfqxgmnlfucruowgsoakjhrjnawlcnqkbyvlccymsyfaaoiyubzwrkcxzjxsvdtipwyuwzqdjminswqxlmymzijiyhmcgqvcyshzkjbhbwncqguehlacmmzlshxjfvvjftqlpqqdbdbrnxdutjeuzvzjveoxotpwfzysbvkavfkiyhnttfitpfqwniixyzapbwevyyyibjlzbscyppbmmldnzqesykqjjhwgpvlugsedjwjwxjdgzkhytwkhzsopdkznyxhwctwwqryqtdivpbhxmxnfcumowbfzoqpmtmdwwfgsezhdkldejqnwgululpnbwlrmtkdvyzzsmxakthtrnhwcttkkrzicxyslroovbptajumzzryjwqwgxfgfogfnhgawprylaypbkxxhejpdzxasgeypacicudzlacdzrxpqqxujnmxyossjfiglycnqutqfcifdsfbnewgemphzwflfpcdyusakecerfzlhsyqhmgklshdghnsxazjcfkbaciqobngxztbpyaeriolrzlcqevdkrcszjshkxtndetsbfoppqmmijlchvsdxzsouasnpgkovfykruteyykzsnkbesnjdreqfujyeawwwqvhboszotkiyrazknxrzymneedaalzibwpizlfqahaozrnydrcfgcklzpccychatkwcsbmwcbtwjexakjqygiwdjpbwrpzjntmtykdylfcqywotfsesesjlbmzanruxooslcybgbsoytcjlneacdyxbwdkhisoistamrqwaugebbzapujycvthhzrjaexwnsfnvdhzwaxawwqekwsuuacemehxmctvjefpdyzzhcxvmtivvdmspgxvvdmkersxfqfmegujbauernsckhqveqcgkkymgmicjgjdpuvpylqpzwcemawhgcxoebsvagwvwxytiqfnnbyfuzpindozdcgeriemnjptwgwpoyfibndyrimnhlxtpplkxzqflsxxihluqeiyxcxbvpsijtdvevswohsfjpvyhciwoljbslamdlxvabqnrxjdfxihderjllbxqflldskprfkyuwwyatkdkdznqdaaveujtmnivvfwgabpespniwljolxluiohgrcawiulcsctkihxjluryxfldnbioqghwzhrkcusdzzttmxvzbprtquymjomftiybhzuxcvqlbkaochlyyapjthrgcujxjpcogqblfpblydnreiolwrvxdczjefsurtylstjzcgemgliiyndsyotojyjopvmlzlogmiievgrzltdotktosehiexbqgfwsfrilqlhununycgzrrdhjnethzkjsaluhgflzxwvmxyiylulrbnqozlrsaytyvkngcxtokwzolnrfmmfpnnojqriwjtwmemsaberjjipwomuptonsfqiojqcczxunwoeqnvvzltqaptkjwgjwfilrptyyexbsabrugibvknhmydvknlbjamvwbfiiaemcaxqpmvujhrjqkeabbicrwtweyfwfapmnqinthozpkdiofukezcgfmtbspczfiervzeptdpnxfzzuiwtwlcbamlblcbjbtaljfvmyfbohtuvtttqdqmzchkmpzmtqzupzqqivjgcwlxxcuexykpslrylraxzsqrvbmfscsnsxaywzryskllgkyiudkhvxlajtonhukromstlnljtgaspwymmyunxxvigepcxwwlgxcmfwagvlnlgevdcbenfenogywefabmdbudpemiwvdxiqvncplehtbvrjkqloihgebbzshlnrjvbzqbbblkzzifaraviubwehgbjvzdrnyvpdvlyvmvfzzuwgfthresdrwyypmgnxexuhvsdhtdefqeyhfwtefkbxxaucezxsantnntnljvspsatbtdywjvhrxabxicsxqixypsliqofexlztcearqjruxndflxljrmkltbqvlwfeznttkeakdjhbdqzdyebfgwlgxnndsfsgzevhuaifikyqgvnndqtwpizwtazekveutwuwjkklwekkilcrsixzvqhceqiwcdqhdhpkqtrgwzztbwfvywicemwttlypyfwojluidadztbqqxvzfebdakwcquqfnnjelzjbnjmjqrfioiuhrqmcrmkcrjxzkymayunmxchbjeppfgugrsguamzbjuaebolaskixgddkuxfkredmtbvmfgijfckymxkfjoyriihizwvxkayruichaxxrmxqpvcwohcyhmgejjekybsleqepgmtqgrrsmwtqsdgtjaicaanwaggkukdhxadbnedgnilohnlbugmsbsjstqwwpsjqxyezoaeqitgdbtnlpnicqrhmlklokbhyurgdkflpvkwlhkkznqwsjjywesuopyylvjgzgfbsgwopjaiokdvfkgyzsjdtoytbxrinjazyhvytkpyrjybzgtknyddqywgyiwqtarpgmarpqnpndumisfzcmarhhqikmepfmpcvpfyougjujqjtfkbghwgxfrlklbiuqipppzfdlrnatenccjgssgszemfbtrguikiheejhgsrchjolozflgrrgnkzfwbspzabcsvarosquqhsqqxammqrqxklvazseeyzpofkkgpsmpetmwducilvwxoxnjhupccpvllajckrbhssktylvyffwoaaqajgupzkjaxedebwibkmpcudnkrageojafqrkbmydccghgmtnuuyiewasdoqzmfplspbqlhaoxvaypccwonetojkaasiiykeeckqmhanrfepbzdseycieuedvilmqwnteeqrseofabfvlpozbcedfuxpilrydpesnwclfzocfaehgphigecvraooopwtaxiysdhxhdoybsmanpckxhbqhzhcpnvcngdmfoquvrddhwoyuuprbqoillrvmxwyjvukjczzdqhlcrsssdpnvepbyirpxwymcqvzpmrddizjsrqmawdqhbymcdtfcjpbxlsqvlkyecroiivzqctvizzcadafztndwvfqrbrbntpbqcjlbwjuhlhwbeivdmimyeewizqtzlfwdcnsdfaupdewbgwfnkrzcunhobzjqgfppnuubcvbysbmrgjzjwhcfrxdodfsyijgprzfprryctwpawqdjpgljhrifspdbmczcrhlhwnbcchnxopkaksbcyxq
+1
bench/num-nodes.nix
+1
bench/num-nodes.nix
···
1
+
1
+169
bench/runner.nix
+169
bench/runner.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
{
5
+
lib,
6
+
inputs,
7
+
...
8
+
}:
9
+
let
10
+
inherit (lib)
11
+
mapAttrsToList
12
+
flatten
13
+
;
14
+
in
15
+
{
16
+
config.perSystem =
17
+
{
18
+
pkgs,
19
+
self',
20
+
system,
21
+
...
22
+
}:
23
+
let
24
+
benchDirFileset = lib.fileset.toSource {
25
+
root = ../..;
26
+
fileset = lib.fileset.union ./. (
27
+
lib.fileset.fileFilter (
28
+
file: (file.hasExt "nix") || (file.hasExt "txt") || (file.hasExt "lock")
29
+
) ../.
30
+
);
31
+
};
32
+
33
+
nodes =
34
+
builtins.listToAttrs (
35
+
builtins.map (index: {
36
+
value = {
37
+
imports = [
38
+
./vm.nix
39
+
];
40
+
41
+
_module.args = {
42
+
index = builtins.toString index;
43
+
};
44
+
};
45
+
name = "node_${builtins.toString index}";
46
+
}) (lib.range 0 (import ./num-nodes.nix))
47
+
)
48
+
// {
49
+
deployer = {
50
+
imports = [
51
+
./vm.nix
52
+
];
53
+
54
+
environment.systemPackages = [
55
+
pkgs.git
56
+
57
+
(pkgs.writeShellScriptBin "setup-benchmark" ''
58
+
mkdir -p $HOME/wire
59
+
cp -r ${benchDirFileset}/*-source/* $HOME/wire
60
+
61
+
cp -r $HOME/wire/bench/wire-flake $HOME/wire-flake
62
+
cp -r $HOME/wire/bench/colmena-flake $HOME/colmena-flake
63
+
64
+
chmod -R +w $HOME/*
65
+
66
+
cd $HOME/wire
67
+
git init .
68
+
git add -A
69
+
'')
70
+
71
+
(pkgs.writeShellScriptBin "run-benchmark" ''
72
+
bench_dir=$HOME/wire/bench
73
+
74
+
wire_args="apply test --path $bench_dir/wire -vv --ssh-accept-host -p 10"
75
+
wire_args_flake="apply test --path $HOME/wire-flake -vv --ssh-accept-host -p 10"
76
+
77
+
colmena_args="apply test --config $bench_dir/colmena/hive.nix -v -p 10"
78
+
colmena_args_flake="apply test --config $HOME/colmena-flake/flake.nix -v -p 10"
79
+
80
+
${lib.getExe pkgs.hyperfine} --warmup 1 --show-output --runs 5 \
81
+
--export-markdown stats.md \
82
+
--export-json run.json \
83
+
"${lib.getExe self'.packages.wire-small} $wire_args_flake" -n "wire@HEAD - flake" \
84
+
"${lib.getExe' inputs.colmena_benchmarking.packages.x86_64-linux.colmena "colmena"} $colmena_args_flake" \
85
+
-n "colmena@pinned - flake" \
86
+
"${lib.getExe self'.packages.wire-small} $wire_args" -n "wire@HEAD - hive.nix"
87
+
'')
88
+
];
89
+
90
+
_module.args = {
91
+
index = "deployer";
92
+
};
93
+
};
94
+
};
95
+
96
+
evalConfig = import (pkgs.path + "/nixos/lib/eval-config.nix");
97
+
98
+
evalVM =
99
+
module:
100
+
evalConfig {
101
+
inherit system;
102
+
modules = [ module ];
103
+
};
104
+
in
105
+
{
106
+
checks.bench = pkgs.testers.runNixOSTest {
107
+
inherit nodes;
108
+
109
+
name = "benchmark";
110
+
111
+
defaults =
112
+
_:
113
+
let
114
+
# hive = builtins.scopedImport {
115
+
# __nixPath = _b: null;
116
+
# __findFile = _path: name: if name == "nixpkgs" then pkgs.path else throw "oops!!";
117
+
# } "${injectedFlakeDir}/${path}/hive.nix";
118
+
119
+
# fetch **all** dependencies of a flake
120
+
# it's called fetchLayer because my naming skills are awful
121
+
fetchLayer =
122
+
input:
123
+
let
124
+
subLayers = if input ? inputs then map fetchLayer (builtins.attrValues input.inputs) else [ ];
125
+
in
126
+
[
127
+
input.outPath
128
+
]
129
+
++ subLayers;
130
+
in
131
+
{
132
+
virtualisation.additionalPaths = flatten [
133
+
(mapAttrsToList (_: val: (evalVM val).config.system.build.toplevel.drvPath) nodes)
134
+
(mapAttrsToList (_: fetchLayer) inputs)
135
+
];
136
+
137
+
nix.settings.experimental-features = [
138
+
"nix-command"
139
+
"flakes"
140
+
];
141
+
};
142
+
node.specialArgs = {
143
+
snakeOil = import "${pkgs.path}/nixos/tests/ssh-keys.nix" pkgs;
144
+
inherit (self'.packages) wire-small-dev;
145
+
};
146
+
skipTypeCheck = true;
147
+
testScript = ''
148
+
start_all()
149
+
150
+
for i in range(0,${builtins.toString (import ./num-nodes.nix)}):
151
+
machine = globals().get(f"node_{i}")
152
+
machine.wait_for_unit("sshd.service") # type: ignore
153
+
154
+
node_deployer.succeed("setup-benchmark");
155
+
node_deployer.succeed("run-benchmark");
156
+
157
+
node_deployer.copy_from_vm("run.json")
158
+
node_deployer.copy_from_vm("stats.json")
159
+
'';
160
+
};
161
+
};
162
+
}
163
+
164
+
# "${
165
+
# lib.getExe (builtins.getFlake "github:mrshmllow/wire/stable").packages.${system}.wire-small
166
+
# } $wire_args" -n "wire@stable - hive.nix" \
167
+
# "${
168
+
# lib.getExe (builtins.getFlake "github:mrshmllow/wire/stable").packages.${system}.wire-small
169
+
# } $wire_args_flake" -n "wire@stable - flake" \
+67
bench/vm.nix
+67
bench/vm.nix
···
1
+
{
2
+
lib,
3
+
index,
4
+
modulesPath,
5
+
pkgs,
6
+
...
7
+
}:
8
+
let
9
+
flake = import ../default.nix;
10
+
snakeOil = import "${pkgs.path}/nixos/tests/ssh-keys.nix" pkgs;
11
+
in
12
+
{
13
+
imports = [
14
+
"${flake.inputs.nixpkgs}/nixos/modules/virtualisation/qemu-vm.nix"
15
+
"${modulesPath}/virtualisation/qemu-vm.nix"
16
+
"${modulesPath}/testing/test-instrumentation.nix"
17
+
];
18
+
19
+
networking.hostName = "node_${index}";
20
+
21
+
boot = {
22
+
loader = {
23
+
systemd-boot.enable = true;
24
+
efi.canTouchEfiVariables = true;
25
+
};
26
+
};
27
+
28
+
environment.variables.XDG_RUNTIME_DIR = "/tmp";
29
+
30
+
services = {
31
+
openssh = {
32
+
enable = true;
33
+
settings = {
34
+
PermitRootLogin = "without-password";
35
+
};
36
+
};
37
+
38
+
getty.autologinUser = "root";
39
+
};
40
+
41
+
virtualisation = {
42
+
graphics = false;
43
+
# useBootLoader = true;
44
+
45
+
diskSize = 5024;
46
+
memorySize = 4096;
47
+
};
48
+
49
+
# It's important to note that you should never ever use this configuration
50
+
# for production. You are risking a MITM attack with this!
51
+
programs.ssh.extraConfig = ''
52
+
Host *
53
+
StrictHostKeyChecking no
54
+
UserKnownHostsFile /dev/null
55
+
'';
56
+
57
+
users.users.root.openssh.authorizedKeys.keys = [ snakeOil.snakeOilEd25519PublicKey ];
58
+
systemd.tmpfiles.rules = [
59
+
"C+ /root/.ssh/id_ed25519 600 - - - ${snakeOil.snakeOilEd25519PrivateKey}"
60
+
];
61
+
62
+
nix = {
63
+
nixPath = [ "nixpkgs=${pkgs.path}" ];
64
+
settings.substituters = lib.mkForce [ ];
65
+
package = pkgs.lix;
66
+
};
67
+
}
+5
bench/wire/hive.nix
+5
bench/wire/hive.nix
+9
bench/wire-flake/flake.nix
+9
bench/wire-flake/flake.nix
+3
buildbot-nix.toml
+3
buildbot-nix.toml
+32
crates/cli/Cargo.toml
+32
crates/cli/Cargo.toml
···
1
+
[package]
2
+
name = "wire"
3
+
version.workspace = true
4
+
edition.workspace = true
5
+
6
+
[lints]
7
+
workspace = true
8
+
9
+
[features]
10
+
dhat-heap = []
11
+
12
+
[dependencies]
13
+
clap = { workspace = true }
14
+
clap-verbosity-flag = { workspace = true }
15
+
tokio = { workspace = true }
16
+
tracing = { workspace = true }
17
+
tracing-log = { workspace = true }
18
+
tracing-subscriber = { workspace = true }
19
+
wire-core = { path = "../core" }
20
+
serde_json = { workspace = true }
21
+
miette = { workspace = true }
22
+
thiserror = { workspace = true }
23
+
enum-display-derive = "0.1.1"
24
+
futures = "0.3.31"
25
+
clap-num = "1.2.0"
26
+
clap-markdown = "0.1.5"
27
+
itertools = "0.14.0"
28
+
dhat = "0.3.2"
29
+
clap_complete = { version = "4.5.60", features = ["unstable-dynamic"] }
30
+
owo-colors = { workspace = true }
31
+
signal-hook-tokio = { version = "0.3.1", features = ["futures-v0_3"] }
32
+
signal-hook = "0.3.18"
+98
crates/cli/default.nix
+98
crates/cli/default.nix
···
1
+
{ getSystem, inputs, ... }:
2
+
{
3
+
perSystem =
4
+
{
5
+
pkgs,
6
+
lib,
7
+
self',
8
+
buildRustProgram,
9
+
system,
10
+
...
11
+
}:
12
+
let
13
+
cleanSystem = system: lib.replaceStrings [ "-" ] [ "_" ] system;
14
+
agents = lib.strings.concatMapStrings (
15
+
system: "--set WIRE_KEY_AGENT_${cleanSystem system} ${(getSystem system).packages.agent} "
16
+
) (import inputs.linux-systems);
17
+
in
18
+
{
19
+
packages = {
20
+
default = self'.packages.wire;
21
+
wire-unwrapped = buildRustProgram {
22
+
name = "wire";
23
+
pname = "wire";
24
+
cargoExtraArgs = "-p wire";
25
+
doCheck = true;
26
+
nativeBuildInputs = [
27
+
pkgs.installShellFiles
28
+
pkgs.sqlx-cli
29
+
];
30
+
preBuild = ''
31
+
export DATABASE_URL=sqlite:./db.sqlite3
32
+
sqlx database create
33
+
sqlx migrate run --source ./crates/core/src/cache/migrations/
34
+
'';
35
+
postInstall = ''
36
+
installShellCompletion --cmd wire \
37
+
--bash <(COMPLETE=bash $out/bin/wire) \
38
+
--fish <(COMPLETE=fish $out/bin/wire) \
39
+
--zsh <(COMPLETE=zsh $out/bin/wire)
40
+
'';
41
+
};
42
+
43
+
wire-unwrapped-dev = self'.packages.wire-unwrapped.overrideAttrs {
44
+
CARGO_PROFILE = "dev";
45
+
};
46
+
47
+
wire-unwrapped-perf = buildRustProgram {
48
+
name = "wire";
49
+
pname = "wire";
50
+
CARGO_PROFILE = "profiling";
51
+
cargoExtraArgs = "-p wire";
52
+
};
53
+
54
+
wire = pkgs.symlinkJoin {
55
+
name = "wire";
56
+
paths = [ self'.packages.wire-unwrapped ];
57
+
nativeBuildInputs = [
58
+
pkgs.makeWrapper
59
+
];
60
+
postBuild = ''
61
+
wrapProgram $out/bin/wire ${agents}
62
+
'';
63
+
meta.mainProgram = "wire";
64
+
};
65
+
66
+
wire-small = pkgs.symlinkJoin {
67
+
name = "wire";
68
+
paths = [ self'.packages.wire-unwrapped ];
69
+
nativeBuildInputs = [
70
+
pkgs.makeWrapper
71
+
];
72
+
postBuild = ''
73
+
wrapProgram $out/bin/wire --set WIRE_KEY_AGENT_${cleanSystem system} ${self'.packages.agent}
74
+
'';
75
+
meta.mainProgram = "wire";
76
+
};
77
+
78
+
wire-dev = self'.packages.wire.overrideAttrs {
79
+
paths = [ self'.packages.wire-unwrapped-dev ];
80
+
};
81
+
82
+
wire-small-dev = self'.packages.wire-small.overrideAttrs {
83
+
paths = [ self'.packages.wire-unwrapped-dev ];
84
+
};
85
+
86
+
wire-small-perf = self'.packages.wire-small.overrideAttrs {
87
+
paths = [ self'.packages.wire-unwrapped-perf ];
88
+
};
89
+
90
+
wire-diagnostics-md = self'.packages.wire-unwrapped.overrideAttrs {
91
+
DIAGNOSTICS_MD_OUTPUT = "/build/source";
92
+
installPhase = ''
93
+
mv /build/source/DIAGNOSTICS.md $out
94
+
'';
95
+
};
96
+
};
97
+
};
98
+
}
+341
crates/cli/src/apply.rs
+341
crates/cli/src/apply.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use futures::{FutureExt, StreamExt};
5
+
use itertools::{Either, Itertools};
6
+
use miette::{Diagnostic, IntoDiagnostic, Result};
7
+
use std::any::Any;
8
+
use std::collections::HashSet;
9
+
use std::io::{Read, stderr};
10
+
use std::sync::Arc;
11
+
use std::sync::atomic::AtomicBool;
12
+
use thiserror::Error;
13
+
use tracing::{error, info};
14
+
use wire_core::hive::node::{Context, GoalExecutor, Name, Node, Objective, StepState};
15
+
use wire_core::hive::{Hive, HiveLocation};
16
+
use wire_core::status::STATUS;
17
+
use wire_core::{SubCommandModifiers, errors::HiveLibError};
18
+
19
+
use crate::cli::{ApplyTarget, CommonVerbArgs, Partitions};
20
+
21
+
#[derive(Debug, Error, Diagnostic)]
22
+
#[error("node {} failed to apply", .0)]
23
+
struct NodeError(
24
+
Name,
25
+
#[source]
26
+
#[diagnostic_source]
27
+
HiveLibError,
28
+
);
29
+
30
+
#[derive(Debug, Error, Diagnostic)]
31
+
#[error("{} node(s) failed to apply.", .0.len())]
32
+
struct NodeErrors(#[related] Vec<NodeError>);
33
+
34
+
// returns Names and Tags
35
+
fn read_apply_targets_from_stdin() -> Result<(Vec<String>, Vec<Name>)> {
36
+
let mut buf = String::new();
37
+
let mut stdin = std::io::stdin().lock();
38
+
stdin.read_to_string(&mut buf).into_diagnostic()?;
39
+
40
+
Ok(buf
41
+
.split_whitespace()
42
+
.map(|x| ApplyTarget::from(x.to_string()))
43
+
.fold((Vec::new(), Vec::new()), |(mut tags, mut names), target| {
44
+
match target {
45
+
ApplyTarget::Node(name) => names.push(name),
46
+
ApplyTarget::Tag(tag) => tags.push(tag),
47
+
ApplyTarget::Stdin => {}
48
+
}
49
+
(tags, names)
50
+
}))
51
+
}
52
+
53
+
fn resolve_targets(
54
+
on: &[ApplyTarget],
55
+
modifiers: &mut SubCommandModifiers,
56
+
) -> (HashSet<String>, HashSet<Name>) {
57
+
on.iter().fold(
58
+
(HashSet::new(), HashSet::new()),
59
+
|(mut tags, mut names), target| {
60
+
match target {
61
+
ApplyTarget::Tag(tag) => {
62
+
tags.insert(tag.clone());
63
+
}
64
+
ApplyTarget::Node(name) => {
65
+
names.insert(name.clone());
66
+
}
67
+
ApplyTarget::Stdin => {
68
+
// implies non_interactive
69
+
modifiers.non_interactive = true;
70
+
71
+
let (found_tags, found_names) = read_apply_targets_from_stdin().unwrap();
72
+
names.extend(found_names);
73
+
tags.extend(found_tags);
74
+
}
75
+
}
76
+
(tags, names)
77
+
},
78
+
)
79
+
}
80
+
81
+
fn partition_arr<T>(arr: Vec<T>, partition: &Partitions) -> Vec<T>
82
+
where
83
+
T: Any + Clone,
84
+
{
85
+
if arr.is_empty() {
86
+
return arr;
87
+
}
88
+
89
+
let items_per_chunk = arr.len().div_ceil(partition.maximum);
90
+
91
+
arr.chunks(items_per_chunk)
92
+
.nth(partition.current - 1)
93
+
.unwrap_or(&[])
94
+
.to_vec()
95
+
}
96
+
97
+
pub async fn apply<F>(
98
+
hive: &mut Hive,
99
+
should_shutdown: Arc<AtomicBool>,
100
+
location: HiveLocation,
101
+
args: CommonVerbArgs,
102
+
partition: Partitions,
103
+
make_objective: F,
104
+
mut modifiers: SubCommandModifiers,
105
+
) -> Result<()>
106
+
where
107
+
F: Fn(&Name, &Node) -> Objective,
108
+
{
109
+
let location = Arc::new(location);
110
+
111
+
let (tags, names) = resolve_targets(&args.on, &mut modifiers);
112
+
113
+
let selected_names: Vec<_> = hive
114
+
.nodes
115
+
.iter()
116
+
.filter(|(name, node)| {
117
+
args.on.is_empty()
118
+
|| names.contains(name)
119
+
|| node.tags.iter().any(|tag| tags.contains(tag))
120
+
})
121
+
.sorted_by_key(|(name, _)| *name)
122
+
.map(|(name, _)| name.clone())
123
+
.collect();
124
+
125
+
let num_selected = selected_names.len();
126
+
127
+
let partitioned_names = partition_arr(selected_names, &partition);
128
+
129
+
if num_selected != partitioned_names.len() {
130
+
info!(
131
+
"Partitioning reduced selected number of nodes from {num_selected} to {}",
132
+
partitioned_names.len()
133
+
);
134
+
}
135
+
136
+
STATUS
137
+
.lock()
138
+
.add_many(&partitioned_names.iter().collect::<Vec<_>>());
139
+
140
+
let mut set = hive
141
+
.nodes
142
+
.iter_mut()
143
+
.filter(|(name, _)| partitioned_names.contains(name))
144
+
.map(|(name, node)| {
145
+
info!("Resolved {:?} to include {}", args.on, name);
146
+
147
+
let objective = make_objective(name, node);
148
+
149
+
let context = Context {
150
+
node,
151
+
name,
152
+
objective,
153
+
state: StepState::default(),
154
+
hive_location: location.clone(),
155
+
modifiers,
156
+
should_quit: should_shutdown.clone(),
157
+
};
158
+
159
+
GoalExecutor::new(context)
160
+
.execute()
161
+
.map(move |result| (name, result))
162
+
})
163
+
.peekable();
164
+
165
+
if set.peek().is_none() {
166
+
error!("There are no nodes selected for deployment");
167
+
}
168
+
169
+
let futures = futures::stream::iter(set).buffer_unordered(args.parallel);
170
+
let result = futures.collect::<Vec<_>>().await;
171
+
let (successful, errors): (Vec<_>, Vec<_>) =
172
+
result
173
+
.into_iter()
174
+
.partition_map(|(name, result)| match result {
175
+
Ok(..) => Either::Left(name),
176
+
Err(err) => Either::Right((name, err)),
177
+
});
178
+
179
+
if !successful.is_empty() {
180
+
info!(
181
+
"Successfully applied goal to {} node(s): {:?}",
182
+
successful.len(),
183
+
successful
184
+
);
185
+
}
186
+
187
+
if !errors.is_empty() {
188
+
// clear the status bar if we are about to print error messages
189
+
STATUS.lock().clear(&mut stderr());
190
+
191
+
return Err(NodeErrors(
192
+
errors
193
+
.into_iter()
194
+
.map(|(name, error)| NodeError(name.clone(), error))
195
+
.collect(),
196
+
)
197
+
.into());
198
+
}
199
+
200
+
Ok(())
201
+
}
202
+
203
+
#[cfg(test)]
204
+
mod tests {
205
+
use super::*;
206
+
207
+
#[test]
208
+
#[allow(clippy::too_many_lines)]
209
+
fn test_partitioning() {
210
+
let arr = (1..=10).collect::<Vec<_>>();
211
+
assert_eq!(arr, partition_arr(arr.clone(), &Partitions::default()));
212
+
213
+
assert_eq!(
214
+
vec![1, 2, 3, 4, 5],
215
+
partition_arr(
216
+
arr.clone(),
217
+
&Partitions {
218
+
current: 1,
219
+
maximum: 2
220
+
}
221
+
)
222
+
);
223
+
assert_eq!(
224
+
vec![6, 7, 8, 9, 10],
225
+
partition_arr(
226
+
arr,
227
+
&Partitions {
228
+
current: 2,
229
+
maximum: 2
230
+
}
231
+
)
232
+
);
233
+
234
+
// test odd number
235
+
let arr = (1..10).collect::<Vec<_>>();
236
+
assert_eq!(
237
+
arr.clone(),
238
+
partition_arr(arr.clone(), &Partitions::default())
239
+
);
240
+
241
+
assert_eq!(
242
+
vec![1, 2, 3, 4, 5],
243
+
partition_arr(
244
+
arr.clone(),
245
+
&Partitions {
246
+
current: 1,
247
+
maximum: 2
248
+
}
249
+
)
250
+
);
251
+
assert_eq!(
252
+
vec![6, 7, 8, 9],
253
+
partition_arr(
254
+
arr.clone(),
255
+
&Partitions {
256
+
current: 2,
257
+
maximum: 2
258
+
}
259
+
)
260
+
);
261
+
262
+
// test large number of partitions
263
+
let arr = (1..=10).collect::<Vec<_>>();
264
+
assert_eq!(
265
+
arr.clone(),
266
+
partition_arr(arr.clone(), &Partitions::default())
267
+
);
268
+
269
+
for i in 1..=10 {
270
+
assert_eq!(
271
+
vec![i],
272
+
partition_arr(
273
+
arr.clone(),
274
+
&Partitions {
275
+
current: i,
276
+
maximum: 10
277
+
}
278
+
)
279
+
);
280
+
281
+
assert_eq!(
282
+
vec![i],
283
+
partition_arr(
284
+
arr.clone(),
285
+
&Partitions {
286
+
current: i,
287
+
maximum: 15
288
+
}
289
+
)
290
+
);
291
+
}
292
+
293
+
// stretching thin with higher partitions will start to leave higher ones empty
294
+
assert_eq!(
295
+
Vec::<usize>::new(),
296
+
partition_arr(
297
+
arr,
298
+
&Partitions {
299
+
current: 11,
300
+
maximum: 15
301
+
}
302
+
)
303
+
);
304
+
305
+
// test the above holds for a lot of numbers
306
+
for i in 1..1000 {
307
+
let arr: Vec<usize> = (0..i).collect();
308
+
let total = arr.len();
309
+
310
+
assert_eq!(
311
+
arr.clone(),
312
+
partition_arr(arr.clone(), &Partitions::default()),
313
+
);
314
+
315
+
let buckets = 2;
316
+
let chunk_size = total.div_ceil(buckets);
317
+
let split_index = std::cmp::min(chunk_size, total);
318
+
319
+
assert_eq!(
320
+
&arr.clone()[..split_index],
321
+
partition_arr(
322
+
arr.clone(),
323
+
&Partitions {
324
+
current: 1,
325
+
maximum: 2
326
+
}
327
+
),
328
+
);
329
+
assert_eq!(
330
+
&arr.clone()[split_index..],
331
+
partition_arr(
332
+
arr.clone(),
333
+
&Partitions {
334
+
current: 2,
335
+
maximum: 2
336
+
}
337
+
),
338
+
);
339
+
}
340
+
}
341
+
}
+398
crates/cli/src/cli.rs
+398
crates/cli/src/cli.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use clap::builder::PossibleValue;
5
+
use clap::{Args, Parser, Subcommand, ValueEnum};
6
+
use clap::{ValueHint, crate_version};
7
+
use clap_complete::CompletionCandidate;
8
+
use clap_complete::engine::ArgValueCompleter;
9
+
use clap_num::number_range;
10
+
use clap_verbosity_flag::InfoLevel;
11
+
use tokio::runtime::Handle;
12
+
use wire_core::SubCommandModifiers;
13
+
use wire_core::commands::common::get_hive_node_names;
14
+
use wire_core::hive::node::{Goal as HiveGoal, HandleUnreachable, Name, SwitchToConfigurationGoal};
15
+
use wire_core::hive::{Hive, get_hive_location};
16
+
17
+
use std::io::IsTerminal;
18
+
use std::{
19
+
fmt::{self, Display, Formatter},
20
+
sync::Arc,
21
+
};
22
+
23
+
#[allow(clippy::struct_excessive_bools)]
24
+
#[derive(Parser)]
25
+
#[command(
26
+
name = "wire",
27
+
bin_name = "wire",
28
+
about = "a tool to deploy nixos systems",
29
+
version = format!("{}\nDebug: Hive::SCHEMA_VERSION {}", crate_version!(), Hive::SCHEMA_VERSION)
30
+
)]
31
+
pub struct Cli {
32
+
#[command(subcommand)]
33
+
pub command: Commands,
34
+
35
+
#[command(flatten)]
36
+
pub verbose: clap_verbosity_flag::Verbosity<InfoLevel>,
37
+
38
+
/// Path or flake reference
39
+
#[arg(long, global = true, default_value = std::env::current_dir().unwrap().into_os_string(), visible_alias("flake"))]
40
+
pub path: String,
41
+
42
+
/// Hide progress bars.
43
+
///
44
+
/// Defaults to true if stdin does not refer to a tty (unix pipelines, in CI).
45
+
#[arg(long, global = true, default_value_t = !std::io::stdin().is_terminal())]
46
+
pub no_progress: bool,
47
+
48
+
/// Never accept user input.
49
+
///
50
+
/// Defaults to true if stdin does not refer to a tty (unix pipelines, in CI).
51
+
#[arg(long, global = true, default_value_t = !std::io::stdin().is_terminal())]
52
+
pub non_interactive: bool,
53
+
54
+
/// Show trace logs
55
+
#[arg(long, global = true, default_value_t = false)]
56
+
pub show_trace: bool,
57
+
58
+
#[cfg(debug_assertions)]
59
+
#[arg(long, hide = true, global = true)]
60
+
pub markdown_help: bool,
61
+
}
62
+
63
+
#[derive(Clone, Debug)]
64
+
pub enum ApplyTarget {
65
+
Node(Name),
66
+
Tag(String),
67
+
Stdin,
68
+
}
69
+
70
+
impl From<String> for ApplyTarget {
71
+
fn from(value: String) -> Self {
72
+
if value == "-" {
73
+
return ApplyTarget::Stdin;
74
+
}
75
+
76
+
if let Some(stripped) = value.strip_prefix("@") {
77
+
ApplyTarget::Tag(stripped.to_string())
78
+
} else {
79
+
ApplyTarget::Node(Name(Arc::from(value.as_str())))
80
+
}
81
+
}
82
+
}
83
+
84
+
impl Display for ApplyTarget {
85
+
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
86
+
match self {
87
+
ApplyTarget::Node(name) => name.fmt(f),
88
+
ApplyTarget::Tag(tag) => write!(f, "@{tag}"),
89
+
ApplyTarget::Stdin => write!(f, "#stdin"),
90
+
}
91
+
}
92
+
}
93
+
94
+
fn more_than_zero(s: &str) -> Result<usize, String> {
95
+
number_range(s, 1, usize::MAX)
96
+
}
97
+
98
+
fn parse_partitions(s: &str) -> Result<Partitions, String> {
99
+
let parts: [&str; 2] = s
100
+
.split('/')
101
+
.collect::<Vec<_>>()
102
+
.try_into()
103
+
.map_err(|_| "partition must contain exactly one '/'")?;
104
+
105
+
let (current, maximum) =
106
+
std::array::from_fn(|i| parts[i].parse::<usize>().map_err(|x| x.to_string())).into();
107
+
let (current, maximum) = (current?, maximum?);
108
+
109
+
if current > maximum {
110
+
return Err("current is more than total".to_string());
111
+
}
112
+
113
+
if current == 0 || maximum == 0 {
114
+
return Err("partition segments cannot be 0.".to_string());
115
+
}
116
+
117
+
Ok(Partitions { current, maximum })
118
+
}
119
+
120
+
#[derive(Clone)]
121
+
pub enum HandleUnreachableArg {
122
+
Ignore,
123
+
FailNode,
124
+
}
125
+
126
+
impl Display for HandleUnreachableArg {
127
+
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
128
+
match self {
129
+
Self::Ignore => write!(f, "ignore"),
130
+
Self::FailNode => write!(f, "fail-node"),
131
+
}
132
+
}
133
+
}
134
+
135
+
impl clap::ValueEnum for HandleUnreachableArg {
136
+
fn value_variants<'a>() -> &'a [Self] {
137
+
&[Self::Ignore, Self::FailNode]
138
+
}
139
+
140
+
fn to_possible_value(&self) -> Option<clap::builder::PossibleValue> {
141
+
match self {
142
+
Self::Ignore => Some(PossibleValue::new("ignore")),
143
+
Self::FailNode => Some(PossibleValue::new("fail-node")),
144
+
}
145
+
}
146
+
}
147
+
148
+
impl From<HandleUnreachableArg> for HandleUnreachable {
149
+
fn from(value: HandleUnreachableArg) -> Self {
150
+
match value {
151
+
HandleUnreachableArg::Ignore => Self::Ignore,
152
+
HandleUnreachableArg::FailNode => Self::FailNode,
153
+
}
154
+
}
155
+
}
156
+
157
+
#[derive(Args)]
158
+
pub struct CommonVerbArgs {
159
+
/// List of literal node names, a literal `-`, or `@` prefixed tags.
160
+
///
161
+
/// `-` will read additional values from stdin, separated by whitespace.
162
+
/// Any `-` implies `--non-interactive`.
163
+
#[arg(short, long, value_name = "NODE | @TAG | `-`", num_args = 1.., add = ArgValueCompleter::new(node_names_completer), value_hint = ValueHint::Unknown)]
164
+
pub on: Vec<ApplyTarget>,
165
+
166
+
#[arg(short, long, default_value_t = 10, value_parser=more_than_zero)]
167
+
pub parallel: usize,
168
+
}
169
+
170
+
#[allow(clippy::struct_excessive_bools)]
171
+
#[derive(Args)]
172
+
pub struct ApplyArgs {
173
+
#[command(flatten)]
174
+
pub common: CommonVerbArgs,
175
+
176
+
#[arg(value_enum, default_value_t)]
177
+
pub goal: Goal,
178
+
179
+
/// Skip key uploads. noop when [GOAL] = Keys
180
+
#[arg(short, long, default_value_t = false)]
181
+
pub no_keys: bool,
182
+
183
+
/// Overrides deployment.buildOnTarget.
184
+
#[arg(short, long, value_name = "NODE")]
185
+
pub always_build_local: Vec<String>,
186
+
187
+
/// Reboot the nodes after activation
188
+
#[arg(short, long, default_value_t = false)]
189
+
pub reboot: bool,
190
+
191
+
/// Enable `--substitute-on-destination` in Nix subcommands.
192
+
#[arg(short, long, default_value_t = true)]
193
+
pub substitute_on_destination: bool,
194
+
195
+
/// How to handle an unreachable node in the ping step.
196
+
///
197
+
/// This only effects the ping step.
198
+
/// wire will still fail the node if it becomes unreachable after activation
199
+
#[arg(long, default_value_t = HandleUnreachableArg::FailNode)]
200
+
pub handle_unreachable: HandleUnreachableArg,
201
+
202
+
/// Unconditionally accept SSH host keys [!!]
203
+
///
204
+
/// Sets `StrictHostKeyChecking` to `no`.
205
+
/// Vulnerable to man-in-the-middle attacks, use with caution.
206
+
#[arg(long, default_value_t = false)]
207
+
pub ssh_accept_host: bool,
208
+
}
209
+
210
+
#[derive(Clone, Debug)]
211
+
pub struct Partitions {
212
+
pub current: usize,
213
+
pub maximum: usize,
214
+
}
215
+
216
+
impl Default for Partitions {
217
+
fn default() -> Self {
218
+
Self {
219
+
current: 1,
220
+
maximum: 1,
221
+
}
222
+
}
223
+
}
224
+
225
+
#[derive(Args)]
226
+
pub struct BuildArgs {
227
+
#[command(flatten)]
228
+
pub common: CommonVerbArgs,
229
+
230
+
/// Partition builds into buckets.
231
+
///
232
+
/// In the format of `current/total`, where 1 <= current <= total.
233
+
#[arg(short = 'P', default_value="1/1", long, value_parser=parse_partitions)]
234
+
pub partition: Option<Partitions>,
235
+
}
236
+
237
+
#[derive(Subcommand)]
238
+
pub enum Commands {
239
+
/// Deploy nodes
240
+
Apply(ApplyArgs),
241
+
/// Build nodes offline
242
+
///
243
+
/// This is distinct from `wire apply build`, as it will not ping or push
244
+
/// the result, making it useful for CI.
245
+
///
246
+
/// Additionally, you may partition the build jobs into buckets.
247
+
Build(BuildArgs),
248
+
/// Inspect hive
249
+
#[clap(visible_alias = "show")]
250
+
Inspect {
251
+
#[arg(value_enum, default_value_t)]
252
+
selection: Inspection,
253
+
254
+
/// Return in JSON format
255
+
#[arg(short, long, default_value_t = false)]
256
+
json: bool,
257
+
},
258
+
}
259
+
260
+
#[derive(Clone, Debug, Default, ValueEnum, Display)]
261
+
pub enum Inspection {
262
+
/// Output all data wire has on the entire hive
263
+
#[default]
264
+
Full,
265
+
/// Only output a list of node names
266
+
Names,
267
+
}
268
+
269
+
#[derive(Clone, Debug, Default, ValueEnum, Display)]
270
+
pub enum Goal {
271
+
/// Make the configuration the boot default and activate now
272
+
#[default]
273
+
Switch,
274
+
/// Build the configuration & push the results
275
+
Build,
276
+
/// Copy the system derivation to the remote hosts
277
+
Push,
278
+
/// Push deployment keys to the remote hosts
279
+
Keys,
280
+
/// Activate the system profile on next boot
281
+
Boot,
282
+
/// Activate the configuration, but don't make it the boot default
283
+
Test,
284
+
/// Show what would be done if this configuration were activated.
285
+
DryActivate,
286
+
}
287
+
288
+
impl TryFrom<Goal> for HiveGoal {
289
+
type Error = miette::Error;
290
+
291
+
fn try_from(value: Goal) -> Result<Self, Self::Error> {
292
+
match value {
293
+
Goal::Build => Ok(HiveGoal::Build),
294
+
Goal::Push => Ok(HiveGoal::Push),
295
+
Goal::Boot => Ok(HiveGoal::SwitchToConfiguration(
296
+
SwitchToConfigurationGoal::Boot,
297
+
)),
298
+
Goal::Switch => Ok(HiveGoal::SwitchToConfiguration(
299
+
SwitchToConfigurationGoal::Switch,
300
+
)),
301
+
Goal::Test => Ok(HiveGoal::SwitchToConfiguration(
302
+
SwitchToConfigurationGoal::Test,
303
+
)),
304
+
Goal::DryActivate => Ok(HiveGoal::SwitchToConfiguration(
305
+
SwitchToConfigurationGoal::DryActivate,
306
+
)),
307
+
Goal::Keys => Ok(HiveGoal::Keys),
308
+
}
309
+
}
310
+
}
311
+
312
+
pub trait ToSubCommandModifiers {
313
+
fn to_subcommand_modifiers(&self) -> SubCommandModifiers;
314
+
}
315
+
316
+
impl ToSubCommandModifiers for Cli {
317
+
fn to_subcommand_modifiers(&self) -> SubCommandModifiers {
318
+
SubCommandModifiers {
319
+
show_trace: self.show_trace,
320
+
non_interactive: self.non_interactive,
321
+
ssh_accept_host: match &self.command {
322
+
Commands::Apply(args) if args.ssh_accept_host => {
323
+
wire_core::StrictHostKeyChecking::No
324
+
}
325
+
_ => wire_core::StrictHostKeyChecking::default(),
326
+
},
327
+
}
328
+
}
329
+
}
330
+
331
+
fn node_names_completer(current: &std::ffi::OsStr) -> Vec<CompletionCandidate> {
332
+
tokio::task::block_in_place(|| {
333
+
let handle = Handle::current();
334
+
let modifiers = SubCommandModifiers::default();
335
+
let mut completions = vec![];
336
+
337
+
if current.is_empty() || current == "-" {
338
+
completions.push(
339
+
CompletionCandidate::new("-").help(Some("Read stdin as --on arguments".into())),
340
+
);
341
+
}
342
+
343
+
let Ok(current_dir) = std::env::current_dir() else {
344
+
return completions;
345
+
};
346
+
347
+
let Ok(hive_location) = handle.block_on(get_hive_location(
348
+
current_dir.display().to_string(),
349
+
modifiers,
350
+
)) else {
351
+
return completions;
352
+
};
353
+
354
+
let Some(current) = current.to_str() else {
355
+
return completions;
356
+
};
357
+
358
+
if current.starts_with('@') {
359
+
return vec![];
360
+
}
361
+
362
+
if let Ok(names) =
363
+
handle.block_on(async { get_hive_node_names(&hive_location, modifiers).await })
364
+
{
365
+
for name in names {
366
+
if name.starts_with(current) {
367
+
completions.push(CompletionCandidate::new(name));
368
+
}
369
+
}
370
+
}
371
+
372
+
completions
373
+
})
374
+
}
375
+
376
+
#[cfg(test)]
377
+
mod tests {
378
+
use std::assert_matches::assert_matches;
379
+
380
+
use crate::cli::{Partitions, parse_partitions};
381
+
382
+
#[test]
383
+
fn test_partition_parsing() {
384
+
assert_matches!(parse_partitions(""), Err(..));
385
+
assert_matches!(parse_partitions("/"), Err(..));
386
+
assert_matches!(parse_partitions(" / "), Err(..));
387
+
assert_matches!(parse_partitions("abc/"), Err(..));
388
+
assert_matches!(parse_partitions("abc"), Err(..));
389
+
assert_matches!(parse_partitions("1/1"), Ok(Partitions {
390
+
current,
391
+
maximum
392
+
}) if current == 1 && maximum == 1);
393
+
assert_matches!(parse_partitions("0/1"), Err(..));
394
+
assert_matches!(parse_partitions("-11/1"), Err(..));
395
+
assert_matches!(parse_partitions("100/99"), Err(..));
396
+
assert_matches!(parse_partitions("5/10"), Ok(Partitions { current, maximum }) if current == 5 && maximum == 10);
397
+
}
398
+
}
+173
crates/cli/src/main.rs
+173
crates/cli/src/main.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
#![deny(clippy::pedantic)]
5
+
#![feature(sync_nonpoison)]
6
+
#![feature(nonpoison_mutex)]
7
+
#![feature(assert_matches)]
8
+
9
+
use std::process::Command;
10
+
use std::sync::Arc;
11
+
use std::sync::atomic::AtomicBool;
12
+
13
+
use crate::cli::Cli;
14
+
use crate::cli::Partitions;
15
+
use crate::cli::ToSubCommandModifiers;
16
+
use crate::sigint::handle_signals;
17
+
use crate::tracing_setup::setup_logging;
18
+
use clap::CommandFactory;
19
+
use clap::Parser;
20
+
use clap_complete::CompleteEnv;
21
+
use miette::IntoDiagnostic;
22
+
use miette::Result;
23
+
use signal_hook::consts::SIGINT;
24
+
use signal_hook_tokio::Signals;
25
+
use tracing::error;
26
+
use tracing::warn;
27
+
use wire_core::cache::InspectionCache;
28
+
use wire_core::commands::common::get_hive_node_names;
29
+
use wire_core::hive::Hive;
30
+
use wire_core::hive::get_hive_location;
31
+
use wire_core::hive::node::ApplyObjective;
32
+
use wire_core::hive::node::Objective;
33
+
use wire_core::hive::node::should_apply_locally;
34
+
35
+
#[macro_use]
36
+
extern crate enum_display_derive;
37
+
38
+
mod apply;
39
+
mod cli;
40
+
mod sigint;
41
+
mod tracing_setup;
42
+
43
+
#[cfg(feature = "dhat-heap")]
44
+
#[global_allocator]
45
+
static ALLOC: dhat::Alloc = dhat::Alloc;
46
+
47
+
#[tokio::main]
48
+
async fn main() -> Result<()> {
49
+
#[cfg(feature = "dhat-heap")]
50
+
let _profiler = dhat::Profiler::new_heap();
51
+
CompleteEnv::with_factory(Cli::command).complete();
52
+
53
+
let args = Cli::parse();
54
+
55
+
let modifiers = args.to_subcommand_modifiers();
56
+
// disable progress when running inspect mode.
57
+
setup_logging(
58
+
&args.verbose,
59
+
!matches!(args.command, cli::Commands::Inspect { .. }) && !&args.no_progress,
60
+
);
61
+
62
+
#[cfg(debug_assertions)]
63
+
if args.markdown_help {
64
+
clap_markdown::print_help_markdown::<Cli>();
65
+
return Ok(());
66
+
}
67
+
68
+
if !check_nix_available() {
69
+
miette::bail!("Nix is not available on this system.");
70
+
}
71
+
72
+
let signals = Signals::new([SIGINT]).into_diagnostic()?;
73
+
let signals_handle = signals.handle();
74
+
let should_shutdown = Arc::new(AtomicBool::new(false));
75
+
let signals_task = tokio::spawn(handle_signals(signals, should_shutdown.clone()));
76
+
77
+
let location = get_hive_location(args.path, modifiers).await?;
78
+
let cache = InspectionCache::new().await;
79
+
80
+
match args.command {
81
+
cli::Commands::Apply(apply_args) => {
82
+
let mut hive = Hive::new_from_path(&location, cache.clone(), modifiers).await?;
83
+
let goal: wire_core::hive::node::Goal = apply_args.goal.clone().try_into().unwrap();
84
+
85
+
// Respect user's --always-build-local arg
86
+
hive.force_always_local(apply_args.always_build_local)?;
87
+
88
+
apply::apply(
89
+
&mut hive,
90
+
should_shutdown,
91
+
location,
92
+
apply_args.common,
93
+
Partitions::default(),
94
+
|name, node| {
95
+
Objective::Apply(ApplyObjective {
96
+
goal,
97
+
no_keys: apply_args.no_keys,
98
+
reboot: apply_args.reboot,
99
+
substitute_on_destination: apply_args.substitute_on_destination,
100
+
should_apply_locally: should_apply_locally(
101
+
node.allow_local_deployment,
102
+
&name.0,
103
+
),
104
+
handle_unreachable: apply_args.handle_unreachable.clone().into(),
105
+
})
106
+
},
107
+
modifiers,
108
+
)
109
+
.await?;
110
+
}
111
+
cli::Commands::Build(build_args) => {
112
+
let mut hive = Hive::new_from_path(&location, cache.clone(), modifiers).await?;
113
+
114
+
apply::apply(
115
+
&mut hive,
116
+
should_shutdown,
117
+
location,
118
+
build_args.common,
119
+
build_args.partition.unwrap_or_default(),
120
+
|_name, _node| Objective::BuildLocally,
121
+
modifiers,
122
+
)
123
+
.await?;
124
+
}
125
+
cli::Commands::Inspect { json, selection } => println!("{}", {
126
+
match selection {
127
+
cli::Inspection::Full => {
128
+
let hive = Hive::new_from_path(&location, cache.clone(), modifiers).await?;
129
+
if json {
130
+
serde_json::to_string(&hive).into_diagnostic()?
131
+
} else {
132
+
warn!("use --json to output something scripting suitable");
133
+
format!("{hive}")
134
+
}
135
+
}
136
+
cli::Inspection::Names => {
137
+
serde_json::to_string(&get_hive_node_names(&location, modifiers).await?)
138
+
.into_diagnostic()?
139
+
}
140
+
}
141
+
}),
142
+
}
143
+
144
+
if let Some(cache) = cache {
145
+
cache.gc().await.into_diagnostic()?;
146
+
}
147
+
148
+
signals_handle.close();
149
+
signals_task.await.into_diagnostic()?;
150
+
151
+
Ok(())
152
+
}
153
+
154
+
fn check_nix_available() -> bool {
155
+
match Command::new("nix")
156
+
.stdout(std::process::Stdio::null())
157
+
.stderr(std::process::Stdio::null())
158
+
.spawn()
159
+
{
160
+
Ok(_) => true,
161
+
Err(e) => {
162
+
if let std::io::ErrorKind::NotFound = e.kind() {
163
+
false
164
+
} else {
165
+
error!(
166
+
"Something weird happened checking for nix availability, {}",
167
+
e
168
+
);
169
+
false
170
+
}
171
+
}
172
+
}
173
+
}
+21
crates/cli/src/sigint.rs
+21
crates/cli/src/sigint.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::sync::{Arc, atomic::AtomicBool};
5
+
6
+
use signal_hook::consts::SIGINT;
7
+
use signal_hook_tokio::Signals;
8
+
9
+
use futures::stream::StreamExt;
10
+
use tracing::info;
11
+
12
+
pub(crate) async fn handle_signals(mut signals: Signals, should_shutdown: Arc<AtomicBool>) {
13
+
while let Some(signal) = signals.next().await {
14
+
if let SIGINT = signal
15
+
&& !should_shutdown.load(std::sync::atomic::Ordering::Relaxed)
16
+
{
17
+
info!("Received SIGINT, attempting to shut down executor tasks.");
18
+
should_shutdown.store(true, std::sync::atomic::Ordering::Relaxed);
19
+
}
20
+
}
21
+
}
+284
crates/cli/src/tracing_setup.rs
+284
crates/cli/src/tracing_setup.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::{
5
+
collections::VecDeque,
6
+
io::{self, Stderr, Write, stderr},
7
+
time::Duration,
8
+
};
9
+
10
+
use clap_verbosity_flag::{LogLevel, Verbosity};
11
+
use owo_colors::{OwoColorize, Stream, Style};
12
+
use tracing::{Level, Subscriber};
13
+
use tracing_log::AsTrace;
14
+
use tracing_subscriber::{
15
+
Layer,
16
+
field::{RecordFields, VisitFmt},
17
+
fmt::{
18
+
FormatEvent, FormatFields, FormattedFields,
19
+
format::{self, DefaultFields, DefaultVisitor, Format, Full},
20
+
},
21
+
layer::{Context, SubscriberExt},
22
+
registry::LookupSpan,
23
+
util::SubscriberInitExt,
24
+
};
25
+
use wire_core::{STDIN_CLOBBER_LOCK, status::STATUS};
26
+
27
+
/// The non-clobbering writer ensures that log lines are held while interactive
28
+
/// prompts are shown to the user. If logs where shown, they would "clobber" the
29
+
/// sudo / ssh prompt.
30
+
///
31
+
/// Additionally, the `STDIN_CLOBBER_LOCK` is used to ensure that no two
32
+
/// interactive prompts are shown at the same time.
33
+
struct NonClobberingWriter {
34
+
queue: VecDeque<Vec<u8>>,
35
+
stderr: Stderr,
36
+
}
37
+
38
+
impl NonClobberingWriter {
39
+
fn new() -> Self {
40
+
NonClobberingWriter {
41
+
queue: VecDeque::with_capacity(100),
42
+
stderr: stderr(),
43
+
}
44
+
}
45
+
46
+
/// expects the caller to write the status line
47
+
fn dump_previous(&mut self) -> Result<(), io::Error> {
48
+
STATUS.lock().clear(&mut self.stderr);
49
+
50
+
for buf in self.queue.iter().rev() {
51
+
self.stderr.write(buf).map(|_| ())?;
52
+
}
53
+
54
+
Ok(())
55
+
}
56
+
}
57
+
58
+
impl Write for NonClobberingWriter {
59
+
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
60
+
if let 1.. = STDIN_CLOBBER_LOCK.available_permits() {
61
+
self.dump_previous().map(|()| 0)?;
62
+
63
+
STATUS.lock().write_above_status(buf, &mut self.stderr)
64
+
} else {
65
+
self.queue.push_front(buf.to_vec());
66
+
67
+
Ok(buf.len())
68
+
}
69
+
}
70
+
71
+
fn flush(&mut self) -> std::io::Result<()> {
72
+
self.stderr.flush()
73
+
}
74
+
}
75
+
76
+
/// Handles event formatting, which falls back to the default formatter
77
+
/// passed.
78
+
struct WireEventFormat(Format<Full, ()>);
79
+
/// Formats the node's name with `WireFieldVisitor`
80
+
struct WireFieldFormat;
81
+
struct WireFieldVisitor<'a>(DefaultVisitor<'a>);
82
+
/// `WireLayer` injects `WireFieldFormat` as an extension on the event
83
+
struct WireLayer;
84
+
85
+
impl<'a> WireFieldVisitor<'a> {
86
+
fn new(writer: format::Writer<'a>, is_empty: bool) -> Self {
87
+
Self(DefaultVisitor::new(writer, is_empty))
88
+
}
89
+
}
90
+
91
+
impl<'writer> FormatFields<'writer> for WireFieldFormat {
92
+
fn format_fields<R: RecordFields>(
93
+
&self,
94
+
writer: format::Writer<'writer>,
95
+
fields: R,
96
+
) -> std::fmt::Result {
97
+
let mut v = WireFieldVisitor::new(writer, true);
98
+
fields.record(&mut v);
99
+
Ok(())
100
+
}
101
+
}
102
+
103
+
impl tracing::field::Visit for WireFieldVisitor<'_> {
104
+
fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn std::fmt::Debug) {
105
+
if field.name() == "node" {
106
+
let _ = write!(
107
+
self.0.writer(),
108
+
"{:?}",
109
+
value.if_supports_color(Stream::Stderr, |text| text.bold())
110
+
);
111
+
}
112
+
}
113
+
}
114
+
115
+
const fn get_style(level: Level) -> Style {
116
+
let mut style = Style::new();
117
+
118
+
style = match level {
119
+
Level::TRACE => style.purple(),
120
+
Level::DEBUG => style.blue(),
121
+
Level::INFO => style.green(),
122
+
Level::WARN => style.yellow(),
123
+
Level::ERROR => style.red(),
124
+
};
125
+
126
+
style
127
+
}
128
+
129
+
const fn fmt_level(level: Level) -> &'static str {
130
+
match level {
131
+
Level::TRACE => "TRACE",
132
+
Level::DEBUG => "DEBUG",
133
+
Level::INFO => " INFO",
134
+
Level::WARN => " WARN",
135
+
Level::ERROR => "ERROR",
136
+
}
137
+
}
138
+
139
+
impl<S, N> FormatEvent<S, N> for WireEventFormat
140
+
where
141
+
S: Subscriber + for<'a> LookupSpan<'a>,
142
+
N: for<'a> FormatFields<'a> + 'static,
143
+
{
144
+
fn format_event(
145
+
&self,
146
+
ctx: &tracing_subscriber::fmt::FmtContext<'_, S, N>,
147
+
mut writer: tracing_subscriber::fmt::format::Writer<'_>,
148
+
event: &tracing::Event<'_>,
149
+
) -> std::fmt::Result {
150
+
let metadata = event.metadata();
151
+
152
+
// skip events without an "event_scope"
153
+
let Some(scope) = ctx.event_scope() else {
154
+
return self.0.format_event(ctx, writer, event);
155
+
};
156
+
157
+
// skip spans without a parent
158
+
let Some(parent) = scope.last() else {
159
+
return self.0.format_event(ctx, writer, event);
160
+
};
161
+
162
+
// skip spans that dont refer to the goal step executor
163
+
if parent.name() != "execute" {
164
+
return self.0.format_event(ctx, writer, event);
165
+
}
166
+
167
+
// skip spans that dont refer to a specific node being executed
168
+
if parent.fields().field("node").is_none() {
169
+
return self.0.format_event(ctx, writer, event);
170
+
}
171
+
172
+
let style = get_style(*metadata.level());
173
+
174
+
// write the log level with colour
175
+
write!(
176
+
writer,
177
+
"{} ",
178
+
fmt_level(*metadata.level()).if_supports_color(Stream::Stderr, |x| { x.style(style) })
179
+
)?;
180
+
181
+
// extract the formatted node name into a string
182
+
let parent_ext = parent.extensions();
183
+
let node_name = &parent_ext
184
+
.get::<FormattedFields<WireFieldFormat>>()
185
+
.unwrap();
186
+
187
+
write!(writer, "{node_name}")?;
188
+
189
+
// write the step name
190
+
if let Some(step) = ctx.event_scope().unwrap().from_root().nth(1) {
191
+
write!(writer, " {}", step.name().italic())?;
192
+
}
193
+
194
+
write!(writer, " | ")?;
195
+
196
+
// write the default fields, including the actual message and other data
197
+
let mut fields = FormattedFields::<DefaultFields>::new(String::new());
198
+
199
+
ctx.format_fields(fields.as_writer(), event)?;
200
+
201
+
write!(writer, "{fields}")?;
202
+
writeln!(writer)?;
203
+
204
+
Ok(())
205
+
}
206
+
}
207
+
208
+
impl<S> Layer<S> for WireLayer
209
+
where
210
+
S: Subscriber + for<'a> LookupSpan<'a>,
211
+
{
212
+
fn on_new_span(
213
+
&self,
214
+
attrs: &tracing::span::Attributes<'_>,
215
+
id: &tracing::span::Id,
216
+
ctx: Context<'_, S>,
217
+
) {
218
+
let span = ctx.span(id).unwrap();
219
+
220
+
if span.extensions().get::<WireFieldFormat>().is_some() {
221
+
return;
222
+
}
223
+
224
+
let mut fields = FormattedFields::<WireFieldFormat>::new(String::new());
225
+
if WireFieldFormat
226
+
.format_fields(fields.as_writer(), attrs)
227
+
.is_ok()
228
+
{
229
+
span.extensions_mut().insert(fields);
230
+
}
231
+
}
232
+
}
233
+
234
+
async fn status_tick_worker() {
235
+
let mut interval = tokio::time::interval(Duration::from_secs(1));
236
+
let mut stderr = stderr();
237
+
238
+
loop {
239
+
interval.tick().await;
240
+
241
+
if STDIN_CLOBBER_LOCK.available_permits() < 1 {
242
+
continue;
243
+
}
244
+
245
+
let mut status = STATUS.lock();
246
+
247
+
status.clear(&mut stderr);
248
+
status.write_status(&mut stderr);
249
+
}
250
+
}
251
+
252
+
/// Set up logging for the application
253
+
/// Uses `WireFieldFormat` if -v was never passed
254
+
pub fn setup_logging<L: LogLevel>(verbosity: &Verbosity<L>, show_progress: bool) {
255
+
let filter = verbosity.log_level_filter().as_trace();
256
+
let registry = tracing_subscriber::registry();
257
+
258
+
STATUS.lock().show_progress(show_progress);
259
+
260
+
// spawn worker to tick the status bar
261
+
if show_progress {
262
+
tokio::spawn(status_tick_worker());
263
+
}
264
+
265
+
if verbosity.is_present() {
266
+
let layer = tracing_subscriber::fmt::layer()
267
+
.without_time()
268
+
.with_target(false)
269
+
.with_writer(NonClobberingWriter::new)
270
+
.with_filter(filter);
271
+
272
+
registry.with(layer).init();
273
+
return;
274
+
}
275
+
276
+
let event_formatter = WireEventFormat(format::format().without_time().with_target(false));
277
+
278
+
let layer = tracing_subscriber::fmt::layer()
279
+
.event_format(event_formatter)
280
+
.with_writer(NonClobberingWriter::new)
281
+
.with_filter(filter);
282
+
283
+
registry.with(layer).with(WireLayer).init();
284
+
}
+51
crates/core/Cargo.toml
+51
crates/core/Cargo.toml
···
1
+
[package]
2
+
name = "wire-core"
3
+
version.workspace = true
4
+
edition.workspace = true
5
+
6
+
[lints]
7
+
workspace = true
8
+
9
+
[features]
10
+
no_web_tests = []
11
+
12
+
[dependencies]
13
+
tokio = { workspace = true }
14
+
serde = { workspace = true }
15
+
serde_json = { workspace = true }
16
+
tracing = { workspace = true }
17
+
im = { workspace = true }
18
+
thiserror = "2.0.17"
19
+
derive_more = { version = "2.0.1", features = ["display"] }
20
+
wire-key-agent = { path = "../key_agent" }
21
+
futures = "0.3.31"
22
+
prost = { workspace = true }
23
+
gethostname = "1.1.0"
24
+
nix.workspace = true
25
+
miette = { workspace = true }
26
+
rand = "0.9.2"
27
+
tokio-util = { workspace = true }
28
+
portable-pty = "0.9.0"
29
+
anyhow.workspace = true
30
+
itertools = "0.14.0"
31
+
enum_dispatch = "0.3.13"
32
+
sha2 = { workspace = true }
33
+
base64 = { workspace = true }
34
+
nix-compat = { workspace = true }
35
+
strip-ansi-escapes = "0.2.1"
36
+
aho-corasick = "1.1.4"
37
+
num_enum = "0.7.5"
38
+
gjson = "0.8.1"
39
+
owo-colors = { workspace = true }
40
+
termion = "4.0.6"
41
+
sqlx = { version = "0.8", features = ["runtime-tokio", "sqlite"] }
42
+
zstd = "0.13.3"
43
+
44
+
[dev-dependencies]
45
+
tempdir = "0.3"
46
+
47
+
[build-dependencies]
48
+
miette = { workspace = true }
49
+
syn = "2.0.109"
50
+
proc-macro2 = "1.0.103"
51
+
itertools = "0.14.0"
+206
crates/core/build.rs
+206
crates/core/build.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use miette::{Context, IntoDiagnostic as _, Result, miette};
5
+
use std::fmt::Write;
6
+
use std::{
7
+
env,
8
+
fmt::{self, Display, Formatter},
9
+
fs::{self},
10
+
path::Path,
11
+
};
12
+
13
+
use itertools::Itertools;
14
+
use proc_macro2::TokenTree;
15
+
use syn::{Expr, Item, ItemEnum, Lit, Meta, MetaList, MetaNameValue, parse_file};
16
+
17
+
macro_rules! p {
18
+
($($tokens: tt)*) => {
19
+
println!("cargo::warning={}", format!($($tokens)*))
20
+
}
21
+
}
22
+
23
+
#[derive(Debug)]
24
+
struct DerivedError {
25
+
code: Option<String>,
26
+
help: Option<String>,
27
+
message: Option<String>,
28
+
doc_string: String,
29
+
}
30
+
31
+
impl Display for DerivedError {
32
+
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
33
+
write!(
34
+
f,
35
+
"## `{code}` {{#{code}}}
36
+
37
+
{doc}
38
+
{message}
39
+
{help}",
40
+
doc = self.doc_string,
41
+
code = self.code.as_ref().unwrap(),
42
+
help = match &self.help {
43
+
Some(help) => format!(
44
+
"
45
+
::: tip HELP
46
+
{help}
47
+
:::"
48
+
),
49
+
None => String::new(),
50
+
},
51
+
message = match &self.message {
52
+
Some(message) => format!(
53
+
"
54
+
```txt [message]
55
+
{message}
56
+
```"
57
+
),
58
+
None => String::new(),
59
+
}
60
+
)
61
+
}
62
+
}
63
+
64
+
impl DerivedError {
65
+
fn get_error(&mut self, list: &MetaList) -> Result<(), miette::Error> {
66
+
if list.path.segments.last().unwrap().ident != "error" {
67
+
return Err(miette!("Not an error"));
68
+
}
69
+
70
+
self.message = Some(
71
+
list.tokens
72
+
.clone()
73
+
.into_iter()
74
+
.filter(|tok| matches!(tok, TokenTree::Literal(tok) if tok.to_string().starts_with('"')))
75
+
.map(|tok| tok.to_string())
76
+
.join(""),
77
+
);
78
+
79
+
Err(miette!("No error msg found"))
80
+
}
81
+
82
+
fn update_diagnostic(&mut self, list: &MetaList) -> Result<(), miette::Error> {
83
+
if list.path.segments.last().unwrap().ident != "diagnostic" {
84
+
return Err(miette!("Not a diagnostic"));
85
+
}
86
+
87
+
let vec: Vec<_> = list.tokens.clone().into_iter().collect();
88
+
89
+
// Find `diagnostic(code(x::y::z))`
90
+
let code: Option<String> = if let Some((_, TokenTree::Group(group))) =
91
+
vec.iter().tuple_windows().find(|(ident, group)| {
92
+
matches!(ident, TokenTree::Ident(ident) if ident == "code")
93
+
&& matches!(group, TokenTree::Group(..))
94
+
}) {
95
+
Some(group.stream().to_string().replace(' ', ""))
96
+
} else {
97
+
None
98
+
};
99
+
100
+
// Find `diagnostic(help("hi"))`
101
+
let help: Option<String> = if let Some((_, TokenTree::Group(group))) =
102
+
vec.iter().tuple_windows().find(|(ident, group)| {
103
+
matches!(ident, TokenTree::Ident(ident) if ident == "help")
104
+
&& matches!(group, TokenTree::Group(..))
105
+
}) {
106
+
Some(group.stream().to_string())
107
+
} else {
108
+
None
109
+
};
110
+
111
+
if let Some(code) = code {
112
+
self.code = Some(code);
113
+
self.help = help;
114
+
return Ok(());
115
+
}
116
+
117
+
Err(miette!("Had no code."))
118
+
}
119
+
120
+
fn update_from_list(&mut self, list: &MetaList) {
121
+
let _ = self.get_error(list);
122
+
let _ = self.update_diagnostic(list);
123
+
}
124
+
125
+
fn update_from_namevalue(&mut self, list: MetaNameValue) -> Result<(), miette::Error> {
126
+
if list.path.segments.last().unwrap().ident != "doc" {
127
+
return Err(miette!("Not a doc string"));
128
+
}
129
+
130
+
if let Expr::Lit(lit) = list.value
131
+
&& let Lit::Str(str) = lit.lit
132
+
{
133
+
let _ = write!(self.doc_string, "{}\n\n", &str.value()[1..]);
134
+
}
135
+
136
+
Ok(())
137
+
}
138
+
}
139
+
140
+
fn main() -> Result<()> {
141
+
println!("cargo:rerun-if-changed=src/errors.rs");
142
+
143
+
let manifest_dir = env::var("CARGO_MANIFEST_DIR").into_diagnostic()?;
144
+
let Ok(md_out_dir) = env::var("DIAGNOSTICS_MD_OUTPUT") else {
145
+
return Ok(());
146
+
};
147
+
148
+
let src_path = Path::new(&manifest_dir).join("src/errors.rs");
149
+
let src = fs::read_to_string(&src_path)
150
+
.into_diagnostic()
151
+
.wrap_err("reading errors.rs")?;
152
+
153
+
let syntax_tree = parse_file(&src)
154
+
.into_diagnostic()
155
+
.wrap_err("parsing errors.rs")?;
156
+
let mut entries: Vec<DerivedError> = Vec::new();
157
+
158
+
for item in &syntax_tree.items {
159
+
if let Item::Enum(ItemEnum { variants, .. }) = item {
160
+
for variant in variants {
161
+
let mut entry = DerivedError {
162
+
code: None,
163
+
help: None,
164
+
message: None,
165
+
doc_string: String::new(),
166
+
};
167
+
168
+
for attribute in variant.attrs.clone() {
169
+
match attribute.meta {
170
+
Meta::List(list) => {
171
+
entry.update_from_list(&list);
172
+
}
173
+
Meta::NameValue(nv) => {
174
+
let _ = entry.update_from_namevalue(nv);
175
+
}
176
+
Meta::Path(_) => {}
177
+
}
178
+
}
179
+
180
+
if entry.code.is_some() {
181
+
entries.push(entry);
182
+
}
183
+
}
184
+
}
185
+
}
186
+
187
+
fs::create_dir_all(Path::new(&md_out_dir))
188
+
.into_diagnostic()
189
+
.wrap_err("creating target directory")?;
190
+
fs::write(
191
+
Path::new(&md_out_dir).join("DIAGNOSTICS.md"),
192
+
entries
193
+
.iter()
194
+
.map(std::string::ToString::to_string)
195
+
.join("\n\n"),
196
+
)
197
+
.into_diagnostic()
198
+
.wrap_err("writing DIAGNOSTICS.md")?;
199
+
200
+
p!(
201
+
"wrote to {:?}",
202
+
Path::new(&md_out_dir).join("DIAGNOSTICS.md")
203
+
);
204
+
205
+
Ok(())
206
+
}
+13
crates/core/src/cache/migrations/20251124234730_init.sql
+13
crates/core/src/cache/migrations/20251124234730_init.sql
···
1
+
create table hive_inspection (
2
+
id integer primary key autoincrement,
3
+
json_value text not null unique
4
+
) strict;
5
+
6
+
create table cached_inspection (
7
+
store_path text,
8
+
hash text,
9
+
10
+
inspection_id integer references hive_inspection(id) not null,
11
+
12
+
primary key (store_path, hash)
13
+
) strict;
+16
crates/core/src/cache/migrations/20251126222409_blobs.sql
+16
crates/core/src/cache/migrations/20251126222409_blobs.sql
···
1
+
create table inspection_blobs (
2
+
id integer primary key autoincrement,
3
+
json_value blob not null unique,
4
+
schema_version integer not null
5
+
) strict;
6
+
7
+
create table inspection_cache (
8
+
store_path text,
9
+
hash text,
10
+
blob_id integer references inspection_blobs (id) not null,
11
+
primary key (store_path, hash)
12
+
) strict;
13
+
14
+
drop table cached_inspection;
15
+
16
+
drop table hive_inspection;
+237
crates/core/src/cache/mod.rs
+237
crates/core/src/cache/mod.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::{
5
+
env,
6
+
path::{Path, PathBuf},
7
+
};
8
+
9
+
use sqlx::{
10
+
Pool, Sqlite,
11
+
migrate::Migrator,
12
+
sqlite::{SqliteConnectOptions, SqlitePoolOptions},
13
+
};
14
+
use tokio::fs::create_dir_all;
15
+
use tracing::{debug, error, trace};
16
+
17
+
use crate::hive::{FlakePrefetch, Hive};
18
+
19
+
#[derive(Clone)]
20
+
pub struct InspectionCache {
21
+
pool: Pool<Sqlite>,
22
+
}
23
+
24
+
static MIGRATOR: Migrator = sqlx::migrate!("src/cache/migrations");
25
+
26
+
async fn get_cache_directory() -> Option<PathBuf> {
27
+
let home = PathBuf::from(
28
+
env::var("HOME")
29
+
.inspect_err(|_| error!("HOME env var not found"))
30
+
.ok()?,
31
+
);
32
+
33
+
trace!(home = ?home);
34
+
35
+
let cache_home = env::var("XDG_CACHE_HOME")
36
+
.inspect_err(|_| debug!("XDG_CACHE_HOME not found"))
37
+
.ok()
38
+
.map(PathBuf::from)
39
+
.unwrap_or(home.join(".cache"));
40
+
41
+
let cache_directory = cache_home.join("wire");
42
+
43
+
trace!(cache_directory = ?cache_directory);
44
+
45
+
let _ = create_dir_all(&cache_directory).await;
46
+
47
+
Some(cache_directory)
48
+
}
49
+
50
+
impl InspectionCache {
51
+
pub async fn new() -> Option<Self> {
52
+
let cache_path = get_cache_directory().await?.join("inspect.db");
53
+
debug!(cache_path = ?cache_path);
54
+
55
+
let pool = SqlitePoolOptions::new()
56
+
.max_connections(1)
57
+
.connect_with(
58
+
SqliteConnectOptions::new()
59
+
.filename(cache_path)
60
+
.create_if_missing(true),
61
+
)
62
+
.await
63
+
.inspect_err(|x| error!("failed to open cache db: {x}"))
64
+
.ok()?;
65
+
66
+
MIGRATOR
67
+
.run(&pool)
68
+
.await
69
+
.inspect_err(|err| error!("failed to run cache migrations: {err:?}"))
70
+
.ok()?;
71
+
72
+
Some(Self { pool })
73
+
}
74
+
75
+
fn cache_invalid(store_path: &String) -> bool {
76
+
let path = Path::new(store_path);
77
+
78
+
// possible TOCTOU
79
+
!path.exists()
80
+
}
81
+
82
+
pub async fn get_hive(&self, prefetch: &FlakePrefetch) -> Option<Hive> {
83
+
struct Query {
84
+
json_value: Vec<u8>,
85
+
store_path: String,
86
+
}
87
+
88
+
let cached_blob = sqlx::query_as!(
89
+
Query,
90
+
"
91
+
select
92
+
inspection_blobs.json_value,
93
+
inspection_cache.store_path
94
+
from
95
+
inspection_blobs
96
+
join inspection_cache on inspection_cache.blob_id = inspection_blobs.id
97
+
where
98
+
inspection_cache.store_path = $1
99
+
and inspection_cache.hash = $2
100
+
and inspection_blobs.schema_version = $3
101
+
limit
102
+
1
103
+
",
104
+
prefetch.store_path,
105
+
prefetch.hash,
106
+
Hive::SCHEMA_VERSION
107
+
)
108
+
.fetch_optional(&self.pool)
109
+
.await
110
+
.inspect_err(|x| error!("failed to fetch cached hive: {x}"))
111
+
.ok()??;
112
+
113
+
// the cached path may of been garbage collected, discard it
114
+
// it is quite hard to replicate this bug but its occurred to me
115
+
// atleast once
116
+
if Self::cache_invalid(&cached_blob.store_path) {
117
+
trace!("discarding cache that does not exist in the nix store");
118
+
return None;
119
+
}
120
+
121
+
trace!(
122
+
"read {} bytes of zstd data from cache",
123
+
cached_blob.json_value.len()
124
+
);
125
+
126
+
let json_string = zstd::decode_all(cached_blob.json_value.as_slice())
127
+
.inspect_err(|err| error!("failed to decode cached zstd data: {err}"))
128
+
.ok()?;
129
+
130
+
trace!(
131
+
"inflated {} > {} in decoding",
132
+
cached_blob.json_value.len(),
133
+
json_string.len()
134
+
);
135
+
136
+
serde_json::from_slice(&json_string)
137
+
.inspect_err(|err| {
138
+
error!("could not use cached evaluation: {err}");
139
+
})
140
+
.ok()
141
+
}
142
+
143
+
pub async fn store_hive(&self, prefetch: &FlakePrefetch, json_value: &String) {
144
+
let Ok(json_value) = zstd::encode_all(json_value.as_bytes(), 0)
145
+
.inspect_err(|err| error!("failed to encode data w/ zstd: {err}"))
146
+
else {
147
+
return;
148
+
};
149
+
150
+
let hive_inspection = sqlx::query_scalar!(
151
+
"
152
+
insert into inspection_blobs (json_value, schema_version)
153
+
values ($1, $2)
154
+
on conflict(json_value)
155
+
do update set json_value = excluded.json_value
156
+
returning inspection_blobs.id
157
+
",
158
+
json_value,
159
+
Hive::SCHEMA_VERSION
160
+
)
161
+
.fetch_one(&self.pool)
162
+
.await
163
+
.inspect_err(|x| error!("could not insert hive_inspection: {x}"));
164
+
165
+
let Ok(blob_id) = hive_inspection else {
166
+
return;
167
+
};
168
+
169
+
let cached_inspection = sqlx::query!(
170
+
"
171
+
insert into
172
+
inspection_cache (store_path, hash, blob_id)
173
+
values
174
+
($1, $2, $3)
175
+
",
176
+
prefetch.store_path,
177
+
prefetch.hash,
178
+
blob_id
179
+
)
180
+
.execute(&self.pool)
181
+
.await;
182
+
183
+
if let Err(err) = cached_inspection {
184
+
error!("could not insert cached_inspection: {err}");
185
+
}
186
+
}
187
+
188
+
pub async fn gc(&self) -> Result<(), sqlx::Error> {
189
+
// keep newest 30 AND
190
+
// delete caches that refer to a blob w/ wrong schema
191
+
sqlx::query!(
192
+
"delete from inspection_cache
193
+
where
194
+
blob_id in (
195
+
select
196
+
id
197
+
from
198
+
inspection_blobs
199
+
where
200
+
schema_version != $1
201
+
)
202
+
or ROWID in (
203
+
select
204
+
ROWID
205
+
from
206
+
inspection_cache
207
+
order by
208
+
ROWID desc
209
+
limit
210
+
-1
211
+
offset
212
+
30
213
+
)",
214
+
Hive::SCHEMA_VERSION
215
+
)
216
+
.execute(&self.pool)
217
+
.await?;
218
+
219
+
// delete orphaned blobs
220
+
sqlx::query!(
221
+
"delete from inspection_blobs
222
+
where
223
+
not exists (
224
+
select
225
+
1
226
+
from
227
+
inspection_cache
228
+
where
229
+
inspection_cache.blob_id = inspection_blobs.id
230
+
)"
231
+
)
232
+
.execute(&self.pool)
233
+
.await?;
234
+
235
+
Ok(())
236
+
}
237
+
}
+74
crates/core/src/commands/builder.rs
+74
crates/core/src/commands/builder.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt;
5
+
6
+
pub(crate) struct CommandStringBuilder {
7
+
command: String,
8
+
}
9
+
10
+
impl CommandStringBuilder {
11
+
pub(crate) fn nix() -> Self {
12
+
Self {
13
+
command: "nix".to_string(),
14
+
}
15
+
}
16
+
17
+
pub(crate) fn new<S: AsRef<str>>(s: S) -> Self {
18
+
Self {
19
+
command: s.as_ref().trim().to_string(),
20
+
}
21
+
}
22
+
23
+
pub(crate) fn arg<S: AsRef<str>>(&mut self, argument: S) {
24
+
let argument = argument.as_ref().trim();
25
+
self.command.push(' ');
26
+
self.command.push_str(argument);
27
+
}
28
+
29
+
pub(crate) fn opt_arg<S: AsRef<str>>(&mut self, opt: bool, argument: S) {
30
+
if !opt {
31
+
return;
32
+
}
33
+
34
+
self.arg(argument);
35
+
}
36
+
37
+
pub(crate) fn args<S: AsRef<str>>(&mut self, arguments: &[S]) {
38
+
for arg in arguments {
39
+
self.arg(arg);
40
+
}
41
+
}
42
+
}
43
+
44
+
impl fmt::Display for CommandStringBuilder {
45
+
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
46
+
write!(f, "{}", self.command)
47
+
}
48
+
}
49
+
50
+
impl AsRef<str> for CommandStringBuilder {
51
+
fn as_ref(&self) -> &str {
52
+
&self.command
53
+
}
54
+
}
55
+
56
+
#[cfg(test)]
57
+
mod tests {
58
+
use crate::commands::builder::CommandStringBuilder;
59
+
60
+
#[test]
61
+
fn command_builder() {
62
+
let mut builder = CommandStringBuilder::new("a");
63
+
builder.arg(" b ");
64
+
builder.args(&[" c ", "d", "e"]);
65
+
builder.opt_arg(false, "f");
66
+
builder.opt_arg(true, "g");
67
+
68
+
assert_eq!(
69
+
builder.to_string(),
70
+
std::convert::AsRef::<str>::as_ref(&builder)
71
+
);
72
+
assert_eq!(builder.to_string(), "a b c d e g");
73
+
}
74
+
}
+177
crates/core/src/commands/common.rs
+177
crates/core/src/commands/common.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::collections::HashMap;
5
+
6
+
use tracing::instrument;
7
+
8
+
use crate::{
9
+
EvalGoal, SubCommandModifiers,
10
+
commands::{
11
+
CommandArguments, Either, WireCommandChip, builder::CommandStringBuilder, run_command,
12
+
run_command_with_env,
13
+
},
14
+
errors::{CommandError, HiveInitialisationError, HiveLibError},
15
+
hive::{
16
+
HiveLocation,
17
+
node::{Context, Objective, Push},
18
+
},
19
+
};
20
+
21
+
fn get_common_copy_path_help(error: &CommandError) -> Option<String> {
22
+
if let CommandError::CommandFailed { logs, .. } = error
23
+
&& (logs.contains("error: unexpected end-of-file"))
24
+
{
25
+
Some("wire requires the deploying user or wire binary cache is trusted on the remote server. if you're attempting to make that change, skip keys with --no-keys. please read https://wire.althaea.zone/guides/keys for more information".to_string())
26
+
} else {
27
+
None
28
+
}
29
+
}
30
+
31
+
pub async fn push(context: &Context<'_>, push: Push<'_>) -> Result<(), HiveLibError> {
32
+
let mut command_string = CommandStringBuilder::nix();
33
+
34
+
command_string.args(&["--extra-experimental-features", "nix-command", "copy"]);
35
+
if let Objective::Apply(apply_objective) = context.objective {
36
+
command_string.opt_arg(
37
+
apply_objective.substitute_on_destination,
38
+
"--substitute-on-destination",
39
+
);
40
+
}
41
+
command_string.arg("--to");
42
+
command_string.args(&[
43
+
format!(
44
+
"ssh://{user}@{host}",
45
+
user = context.node.target.user,
46
+
host = context.node.target.get_preferred_host()?,
47
+
),
48
+
match push {
49
+
Push::Derivation(drv) => format!("{drv} --derivation"),
50
+
Push::Path(path) => path.clone(),
51
+
},
52
+
]);
53
+
54
+
let child = run_command_with_env(
55
+
&CommandArguments::new(command_string, context.modifiers)
56
+
.mode(crate::commands::ChildOutputMode::Nix),
57
+
HashMap::from([(
58
+
"NIX_SSHOPTS".into(),
59
+
context
60
+
.node
61
+
.target
62
+
.create_ssh_opts(context.modifiers, false)?,
63
+
)]),
64
+
)
65
+
.await?;
66
+
67
+
let status = child.wait_till_success().await;
68
+
69
+
let help = if let Err(ref error) = status {
70
+
get_common_copy_path_help(error).map(Box::new)
71
+
} else {
72
+
None
73
+
};
74
+
75
+
status.map_err(|error| HiveLibError::NixCopyError {
76
+
name: context.name.clone(),
77
+
path: push.to_string(),
78
+
error: Box::new(error),
79
+
help,
80
+
})?;
81
+
82
+
Ok(())
83
+
}
84
+
85
+
fn get_common_command_help(error: &CommandError) -> Option<String> {
86
+
if let CommandError::CommandFailed { logs, .. } = error
87
+
// marshmallow: your using this repo as a hive you idiot
88
+
&& (logs.contains("attribute 'inspect' missing")
89
+
// using a flake that does not provide `wire`
90
+
|| logs.contains("does not provide attribute 'packages.x86_64-linux.wire'")
91
+
// using a file called `hive.nix` that is not actually a hive
92
+
|| logs.contains("attribute 'inspect' in selection path"))
93
+
{
94
+
Some("Double check this `--path` or `--flake` is a wire hive. You may be pointing to the wrong directory.".to_string())
95
+
} else {
96
+
None
97
+
}
98
+
}
99
+
100
+
pub async fn get_hive_node_names(
101
+
location: &HiveLocation,
102
+
modifiers: SubCommandModifiers,
103
+
) -> Result<Vec<String>, HiveLibError> {
104
+
let output = evaluate_hive_attribute(location, &EvalGoal::Names, modifiers).await?;
105
+
serde_json::from_str(&output).map_err(|err| {
106
+
HiveLibError::HiveInitialisationError(HiveInitialisationError::ParseEvaluateError(err))
107
+
})
108
+
}
109
+
110
+
/// Evaluates the hive in flakeref with regards to the given goal,
111
+
/// and returns stdout.
112
+
#[instrument(ret(level = tracing::Level::TRACE), skip_all)]
113
+
pub async fn evaluate_hive_attribute(
114
+
location: &HiveLocation,
115
+
goal: &EvalGoal<'_>,
116
+
modifiers: SubCommandModifiers,
117
+
) -> Result<String, HiveLibError> {
118
+
let attribute = match location {
119
+
HiveLocation::Flake { uri, .. } => {
120
+
format!(
121
+
"{uri}#wire --apply \"hive: {}\"",
122
+
match goal {
123
+
EvalGoal::Inspect => "hive.inspect".to_string(),
124
+
EvalGoal::Names => "hive.names".to_string(),
125
+
EvalGoal::GetTopLevel(node) => format!("hive.topLevels.{node}"),
126
+
}
127
+
)
128
+
}
129
+
HiveLocation::HiveNix(path) => {
130
+
format!(
131
+
"--file {} {}",
132
+
&path.to_string_lossy(),
133
+
match goal {
134
+
EvalGoal::Inspect => "inspect".to_string(),
135
+
EvalGoal::Names => "names".to_string(),
136
+
EvalGoal::GetTopLevel(node) => format!("topLevels.{node}"),
137
+
}
138
+
)
139
+
}
140
+
};
141
+
142
+
let mut command_string = CommandStringBuilder::nix();
143
+
command_string.args(&[
144
+
"--extra-experimental-features",
145
+
"nix-command",
146
+
"--extra-experimental-features",
147
+
"flakes",
148
+
"eval",
149
+
"--json",
150
+
]);
151
+
command_string.opt_arg(modifiers.show_trace, "--show-trace");
152
+
command_string.arg(&attribute);
153
+
154
+
let child = run_command(
155
+
&CommandArguments::new(command_string, modifiers)
156
+
.mode(crate::commands::ChildOutputMode::Nix),
157
+
)
158
+
.await?;
159
+
160
+
let status = child.wait_till_success().await;
161
+
162
+
let help = if let Err(ref error) = status {
163
+
get_common_command_help(error).map(Box::new)
164
+
} else {
165
+
None
166
+
};
167
+
168
+
status
169
+
.map_err(|source| HiveLibError::NixEvalError {
170
+
attribute,
171
+
source,
172
+
help,
173
+
})
174
+
.map(|x| match x {
175
+
Either::Left((_, stdout)) | Either::Right((_, stdout)) => stdout,
176
+
})
177
+
}
+240
crates/core/src/commands/mod.rs
+240
crates/core/src/commands/mod.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use crate::commands::pty::{InteractiveChildChip, interactive_command_with_env};
5
+
use std::{collections::HashMap, str::from_utf8, sync::LazyLock};
6
+
7
+
use aho_corasick::AhoCorasick;
8
+
use gjson::Value;
9
+
use itertools::Itertools;
10
+
use nix_compat::log::{AT_NIX_PREFIX, VerbosityLevel};
11
+
use num_enum::TryFromPrimitive;
12
+
use tracing::{debug, error, info, trace, warn};
13
+
14
+
use crate::{
15
+
SubCommandModifiers,
16
+
commands::noninteractive::{NonInteractiveChildChip, non_interactive_command_with_env},
17
+
errors::{CommandError, HiveLibError},
18
+
hive::node::{Node, Target},
19
+
};
20
+
21
+
pub(crate) mod builder;
22
+
pub mod common;
23
+
pub(crate) mod noninteractive;
24
+
pub(crate) mod pty;
25
+
26
+
#[derive(Copy, Clone, Debug)]
27
+
pub(crate) enum ChildOutputMode {
28
+
Nix,
29
+
Generic,
30
+
Interactive,
31
+
}
32
+
33
+
#[derive(Debug)]
34
+
pub enum Either<L, R> {
35
+
Left(L),
36
+
Right(R),
37
+
}
38
+
39
+
#[derive(Debug)]
40
+
pub(crate) struct CommandArguments<'t, S: AsRef<str>> {
41
+
modifiers: SubCommandModifiers,
42
+
target: Option<&'t Target>,
43
+
output_mode: ChildOutputMode,
44
+
command_string: S,
45
+
keep_stdin_open: bool,
46
+
privilege_escalation_command: Option<String>,
47
+
log_stdout: bool,
48
+
}
49
+
50
+
static AHO_CORASICK: LazyLock<AhoCorasick> = LazyLock::new(|| {
51
+
AhoCorasick::builder()
52
+
.ascii_case_insensitive(false)
53
+
.match_kind(aho_corasick::MatchKind::LeftmostFirst)
54
+
.build([AT_NIX_PREFIX])
55
+
.unwrap()
56
+
});
57
+
58
+
impl<'a, S: AsRef<str>> CommandArguments<'a, S> {
59
+
pub(crate) const fn new(command_string: S, modifiers: SubCommandModifiers) -> Self {
60
+
Self {
61
+
command_string,
62
+
keep_stdin_open: false,
63
+
privilege_escalation_command: None,
64
+
log_stdout: false,
65
+
target: None,
66
+
output_mode: ChildOutputMode::Generic,
67
+
modifiers,
68
+
}
69
+
}
70
+
71
+
pub(crate) const fn execute_on_remote(mut self, target: Option<&'a Target>) -> Self {
72
+
self.target = target;
73
+
self
74
+
}
75
+
76
+
pub(crate) const fn mode(mut self, mode: ChildOutputMode) -> Self {
77
+
self.output_mode = mode;
78
+
self
79
+
}
80
+
81
+
pub(crate) const fn keep_stdin_open(mut self) -> Self {
82
+
self.keep_stdin_open = true;
83
+
self
84
+
}
85
+
86
+
pub(crate) fn elevated(mut self, node: &Node) -> Self {
87
+
self.privilege_escalation_command =
88
+
Some(node.privilege_escalation_command.iter().join(" "));
89
+
self
90
+
}
91
+
92
+
pub(crate) const fn is_elevated(&self) -> bool {
93
+
self.privilege_escalation_command.is_some()
94
+
}
95
+
96
+
pub(crate) const fn log_stdout(mut self) -> Self {
97
+
self.log_stdout = true;
98
+
self
99
+
}
100
+
}
101
+
102
+
pub(crate) async fn run_command<S: AsRef<str>>(
103
+
arguments: &CommandArguments<'_, S>,
104
+
) -> Result<Either<InteractiveChildChip, NonInteractiveChildChip>, HiveLibError> {
105
+
run_command_with_env(arguments, HashMap::new()).await
106
+
}
107
+
108
+
pub(crate) async fn run_command_with_env<S: AsRef<str>>(
109
+
arguments: &CommandArguments<'_, S>,
110
+
envs: HashMap<String, String>,
111
+
) -> Result<Either<InteractiveChildChip, NonInteractiveChildChip>, HiveLibError> {
112
+
// use the non interactive command runner when forced
113
+
// ... or when there is no reason for interactivity, local and unprivileged
114
+
if arguments.modifiers.non_interactive
115
+
|| (arguments.target.is_none() && !arguments.is_elevated())
116
+
{
117
+
return Ok(Either::Right(non_interactive_command_with_env(
118
+
arguments, envs,
119
+
)?));
120
+
}
121
+
122
+
Ok(Either::Left(
123
+
interactive_command_with_env(arguments, envs).await?,
124
+
))
125
+
}
126
+
127
+
pub(crate) trait WireCommandChip {
128
+
type ExitStatus;
129
+
130
+
async fn wait_till_success(self) -> Result<Self::ExitStatus, CommandError>;
131
+
async fn write_stdin(&mut self, data: Vec<u8>) -> Result<(), HiveLibError>;
132
+
}
133
+
134
+
type ExitStatus = Either<(portable_pty::ExitStatus, String), (std::process::ExitStatus, String)>;
135
+
136
+
impl WireCommandChip for Either<InteractiveChildChip, NonInteractiveChildChip> {
137
+
type ExitStatus = ExitStatus;
138
+
139
+
async fn write_stdin(&mut self, data: Vec<u8>) -> Result<(), HiveLibError> {
140
+
match self {
141
+
Self::Left(left) => left.write_stdin(data).await,
142
+
Self::Right(right) => right.write_stdin(data).await,
143
+
}
144
+
}
145
+
146
+
async fn wait_till_success(self) -> Result<Self::ExitStatus, CommandError> {
147
+
match self {
148
+
Self::Left(left) => left.wait_till_success().await.map(Either::Left),
149
+
Self::Right(right) => right.wait_till_success().await.map(Either::Right),
150
+
}
151
+
}
152
+
}
153
+
154
+
fn trace_gjson_str<'a>(log: &'a Value<'a>, msg: &'a str) -> Option<String> {
155
+
if msg.is_empty() {
156
+
return None;
157
+
}
158
+
159
+
let level = log.get("level");
160
+
161
+
if !level.exists() {
162
+
return None;
163
+
}
164
+
165
+
let level = match VerbosityLevel::try_from_primitive(level.u64()) {
166
+
Ok(level) => level,
167
+
Err(err) => {
168
+
error!("nix log `level` did not match to a VerbosityLevel: {err:?}");
169
+
return None;
170
+
}
171
+
};
172
+
173
+
let msg = strip_ansi_escapes::strip_str(msg);
174
+
175
+
match level {
176
+
VerbosityLevel::Info => info!("{msg}"),
177
+
VerbosityLevel::Warn | VerbosityLevel::Notice => warn!("{msg}"),
178
+
VerbosityLevel::Error => error!("{msg}"),
179
+
VerbosityLevel::Debug => debug!("{msg}"),
180
+
VerbosityLevel::Vomit | VerbosityLevel::Talkative | VerbosityLevel::Chatty => {
181
+
trace!("{msg}");
182
+
}
183
+
}
184
+
185
+
if matches!(
186
+
level,
187
+
VerbosityLevel::Error | VerbosityLevel::Warn | VerbosityLevel::Notice
188
+
) {
189
+
return Some(msg);
190
+
}
191
+
192
+
None
193
+
}
194
+
195
+
impl ChildOutputMode {
196
+
/// this function is by far the biggest hotspot in the whole tree
197
+
/// Returns a string if this log is notable to be stored as an error message
198
+
fn trace_slice(self, line: &mut [u8]) -> Option<String> {
199
+
let slice = match self {
200
+
Self::Generic | Self::Interactive => {
201
+
let string = String::from_utf8_lossy(line);
202
+
let stripped = strip_ansi_escapes::strip_str(&string);
203
+
warn!("{stripped}");
204
+
return Some(string.to_string());
205
+
}
206
+
Self::Nix => {
207
+
let position = AHO_CORASICK.find(&line).map(|x| &mut line[x.end()..]);
208
+
209
+
if let Some(json_buf) = position {
210
+
json_buf
211
+
} else {
212
+
// usually happens when ssh is outputting something
213
+
warn!("{}", String::from_utf8_lossy(line));
214
+
return None;
215
+
}
216
+
}
217
+
};
218
+
219
+
let Ok(str) = from_utf8(slice) else {
220
+
error!("nix log was not valid utf8!");
221
+
return None;
222
+
};
223
+
224
+
let log = gjson::parse(str);
225
+
226
+
let text = log.get("text");
227
+
228
+
if text.exists() {
229
+
return trace_gjson_str(&log, text.str());
230
+
}
231
+
232
+
let text = log.get("msg");
233
+
234
+
if text.exists() {
235
+
return trace_gjson_str(&log, text.str());
236
+
}
237
+
238
+
None
239
+
}
240
+
}
+199
crates/core/src/commands/noninteractive.rs
+199
crates/core/src/commands/noninteractive.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::{
5
+
collections::{HashMap, VecDeque},
6
+
process::ExitStatus,
7
+
sync::Arc,
8
+
};
9
+
10
+
use crate::{
11
+
SubCommandModifiers,
12
+
commands::{ChildOutputMode, CommandArguments, WireCommandChip},
13
+
errors::{CommandError, HiveLibError},
14
+
hive::node::Target,
15
+
};
16
+
use itertools::Itertools;
17
+
use tokio::{
18
+
io::{AsyncWriteExt, BufReader},
19
+
process::{Child, ChildStdin, Command},
20
+
sync::Mutex,
21
+
task::JoinSet,
22
+
};
23
+
use tracing::{Instrument, debug, instrument, trace};
24
+
25
+
pub(crate) struct NonInteractiveChildChip {
26
+
error_collection: Arc<Mutex<VecDeque<String>>>,
27
+
stdout_collection: Arc<Mutex<VecDeque<String>>>,
28
+
child: Child,
29
+
joinset: JoinSet<()>,
30
+
original_command: String,
31
+
stdin: ChildStdin,
32
+
}
33
+
34
+
#[instrument(skip_all, name = "run", fields(elevated = %arguments.is_elevated()))]
35
+
pub(crate) fn non_interactive_command_with_env<S: AsRef<str>>(
36
+
arguments: &CommandArguments<S>,
37
+
envs: HashMap<String, String>,
38
+
) -> Result<NonInteractiveChildChip, HiveLibError> {
39
+
let mut command = if let Some(target) = arguments.target {
40
+
create_sync_ssh_command(target, arguments.modifiers)?
41
+
} else {
42
+
let mut command = Command::new("sh");
43
+
44
+
command.arg("-c");
45
+
46
+
command
47
+
};
48
+
49
+
let command_string = format!(
50
+
"{command_string}{extra}",
51
+
command_string = arguments.command_string.as_ref(),
52
+
extra = match arguments.output_mode {
53
+
ChildOutputMode::Generic | ChildOutputMode::Interactive => "",
54
+
ChildOutputMode::Nix => " --log-format internal-json",
55
+
}
56
+
);
57
+
58
+
let command_string = if let Some(escalation_command) = &arguments.privilege_escalation_command {
59
+
format!("{escalation_command} sh -c '{command_string}'")
60
+
} else {
61
+
command_string
62
+
};
63
+
64
+
debug!("{command_string}");
65
+
66
+
command.arg(&command_string);
67
+
command.stdin(std::process::Stdio::piped());
68
+
command.stderr(std::process::Stdio::piped());
69
+
command.stdout(std::process::Stdio::piped());
70
+
command.kill_on_drop(true);
71
+
// command.env_clear();
72
+
command.envs(envs);
73
+
74
+
let mut child = command.spawn().unwrap();
75
+
let error_collection = Arc::new(Mutex::new(VecDeque::<String>::with_capacity(10)));
76
+
let stdout_collection = Arc::new(Mutex::new(VecDeque::<String>::with_capacity(10)));
77
+
let stdin = child.stdin.take().unwrap();
78
+
79
+
let stdout_handle = child
80
+
.stdout
81
+
.take()
82
+
.ok_or(HiveLibError::CommandError(CommandError::NoHandle))?;
83
+
let stderr_handle = child
84
+
.stderr
85
+
.take()
86
+
.ok_or(HiveLibError::CommandError(CommandError::NoHandle))?;
87
+
88
+
let mut joinset = JoinSet::new();
89
+
let output_mode = Arc::new(arguments.output_mode);
90
+
91
+
joinset.spawn(
92
+
handle_io(
93
+
stderr_handle,
94
+
output_mode.clone(),
95
+
error_collection.clone(),
96
+
true,
97
+
true,
98
+
)
99
+
.in_current_span(),
100
+
);
101
+
joinset.spawn(
102
+
handle_io(
103
+
stdout_handle,
104
+
output_mode.clone(),
105
+
stdout_collection.clone(),
106
+
false,
107
+
arguments.log_stdout,
108
+
)
109
+
.in_current_span(),
110
+
);
111
+
112
+
Ok(NonInteractiveChildChip {
113
+
error_collection,
114
+
stdout_collection,
115
+
child,
116
+
joinset,
117
+
original_command: arguments.command_string.as_ref().to_string(),
118
+
stdin,
119
+
})
120
+
}
121
+
122
+
impl WireCommandChip for NonInteractiveChildChip {
123
+
type ExitStatus = (ExitStatus, String);
124
+
125
+
async fn wait_till_success(mut self) -> Result<Self::ExitStatus, CommandError> {
126
+
let status = self.child.wait().await.unwrap();
127
+
let _ = self.joinset.join_all().await;
128
+
129
+
if !status.success() {
130
+
let logs = self.error_collection.lock().await.iter().rev().join("\n");
131
+
132
+
return Err(CommandError::CommandFailed {
133
+
command_ran: self.original_command,
134
+
logs,
135
+
code: match status.code() {
136
+
Some(code) => format!("code {code}"),
137
+
None => "no exit code".to_string(),
138
+
},
139
+
reason: "known-status",
140
+
});
141
+
}
142
+
143
+
let stdout = self.stdout_collection.lock().await.iter().rev().join("\n");
144
+
145
+
Ok((status, stdout))
146
+
}
147
+
148
+
async fn write_stdin(&mut self, data: Vec<u8>) -> Result<(), HiveLibError> {
149
+
trace!("Writing {} bytes", data.len());
150
+
self.stdin.write_all(&data).await.unwrap();
151
+
Ok(())
152
+
}
153
+
}
154
+
155
+
#[instrument(skip_all, name = "log")]
156
+
pub async fn handle_io<R>(
157
+
reader: R,
158
+
output_mode: Arc<ChildOutputMode>,
159
+
collection: Arc<Mutex<VecDeque<String>>>,
160
+
is_error: bool,
161
+
should_log: bool,
162
+
) where
163
+
R: tokio::io::AsyncRead + Unpin,
164
+
{
165
+
let mut io_reader = tokio::io::AsyncBufReadExt::lines(BufReader::new(reader));
166
+
167
+
while let Some(line) = io_reader.next_line().await.unwrap() {
168
+
let mut line = line.into_bytes();
169
+
170
+
let log = if should_log {
171
+
Some(output_mode.trace_slice(&mut line))
172
+
} else {
173
+
None
174
+
};
175
+
176
+
if !is_error {
177
+
let mut queue = collection.lock().await;
178
+
queue.push_front(String::from_utf8_lossy(&line).to_string());
179
+
} else if let Some(error_msg) = log.flatten() {
180
+
let mut queue = collection.lock().await;
181
+
queue.push_front(error_msg);
182
+
// add at most 20 message to the front, drop the rest.
183
+
queue.truncate(20);
184
+
}
185
+
}
186
+
187
+
debug!("io_handler: goodbye!");
188
+
}
189
+
190
+
fn create_sync_ssh_command(
191
+
target: &Target,
192
+
modifiers: SubCommandModifiers,
193
+
) -> Result<Command, HiveLibError> {
194
+
let mut command = Command::new("ssh");
195
+
command.args(target.create_ssh_args(modifiers, true, false)?);
196
+
command.arg(target.get_preferred_host()?.to_string());
197
+
198
+
Ok(command)
199
+
}
+102
crates/core/src/commands/pty/input.rs
+102
crates/core/src/commands/pty/input.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::os::fd::{AsFd, OwnedFd};
5
+
6
+
use nix::{
7
+
poll::{PollFd, PollFlags, PollTimeout, poll},
8
+
unistd::read,
9
+
};
10
+
use tracing::{Span, debug, error, instrument, trace};
11
+
12
+
use crate::{
13
+
commands::pty::{MasterWriter, THREAD_BEGAN_SIGNAL, THREAD_QUIT_SIGNAL},
14
+
errors::CommandError,
15
+
};
16
+
17
+
/// Exits on any data written to `cancel_pipe_r`
18
+
/// A pipe is used to cancel the function.
19
+
#[instrument(skip_all, level = "trace", parent = span)]
20
+
pub(super) fn watch_stdin_from_user(
21
+
cancel_pipe_r: &OwnedFd,
22
+
mut master_writer: MasterWriter,
23
+
write_pipe_r: &OwnedFd,
24
+
span: Span,
25
+
) -> Result<(), CommandError> {
26
+
const WRITER_POSITION: usize = 0;
27
+
const SIGNAL_POSITION: usize = 1;
28
+
const USER_POSITION: usize = 2;
29
+
30
+
let mut buffer = [0u8; 1024];
31
+
let stdin = std::io::stdin();
32
+
let mut cancel_pipe_buf = [0u8; 1];
33
+
34
+
let user_stdin_fd = stdin.as_fd();
35
+
let cancel_pipe_r_fd = cancel_pipe_r.as_fd();
36
+
37
+
let mut all_fds = vec![
38
+
PollFd::new(write_pipe_r.as_fd(), PollFlags::POLLIN),
39
+
PollFd::new(cancel_pipe_r.as_fd(), PollFlags::POLLIN),
40
+
PollFd::new(user_stdin_fd, PollFlags::POLLIN),
41
+
];
42
+
43
+
loop {
44
+
match poll(&mut all_fds, PollTimeout::NONE) {
45
+
Ok(0) => {} // timeout, impossible
46
+
Ok(_) => {
47
+
// The user stdin pipe can be removed
48
+
if all_fds.get(USER_POSITION).is_some()
49
+
&& let Some(events) = all_fds[USER_POSITION].revents()
50
+
&& events.contains(PollFlags::POLLIN)
51
+
{
52
+
trace!("Got stdin from user...");
53
+
let n = read(user_stdin_fd, &mut buffer).map_err(CommandError::PosixPipe)?;
54
+
master_writer
55
+
.write_all(&buffer[..n])
56
+
.map_err(CommandError::WritingMasterStdout)?;
57
+
master_writer
58
+
.flush()
59
+
.map_err(CommandError::WritingMasterStdout)?;
60
+
}
61
+
62
+
if let Some(events) = all_fds[WRITER_POSITION].revents()
63
+
&& events.contains(PollFlags::POLLIN)
64
+
{
65
+
trace!("Got stdin from writer...");
66
+
let n = read(write_pipe_r, &mut buffer).map_err(CommandError::PosixPipe)?;
67
+
master_writer
68
+
.write_all(&buffer[..n])
69
+
.map_err(CommandError::WritingMasterStdout)?;
70
+
master_writer
71
+
.flush()
72
+
.map_err(CommandError::WritingMasterStdout)?;
73
+
}
74
+
75
+
if let Some(events) = all_fds[SIGNAL_POSITION].revents()
76
+
&& events.contains(PollFlags::POLLIN)
77
+
{
78
+
let n = read(cancel_pipe_r_fd, &mut cancel_pipe_buf)
79
+
.map_err(CommandError::PosixPipe)?;
80
+
let message = &cancel_pipe_buf[..n];
81
+
82
+
trace!("Got byte from signal pipe: {message:?}");
83
+
84
+
if message == THREAD_QUIT_SIGNAL {
85
+
return Ok(());
86
+
}
87
+
88
+
if message == THREAD_BEGAN_SIGNAL {
89
+
all_fds.remove(USER_POSITION);
90
+
}
91
+
}
92
+
}
93
+
Err(e) => {
94
+
error!("Poll error: {e}");
95
+
break;
96
+
}
97
+
}
98
+
}
99
+
100
+
debug!("stdin_thread: goodbye");
101
+
Ok(())
102
+
}
+63
crates/core/src/commands/pty/logbuffer.rs
+63
crates/core/src/commands/pty/logbuffer.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
/// Split into its own struct to be tested nicer
5
+
pub(crate) struct LogBuffer {
6
+
buffer: Vec<u8>,
7
+
}
8
+
9
+
impl LogBuffer {
10
+
pub const fn new() -> Self {
11
+
Self { buffer: Vec::new() }
12
+
}
13
+
14
+
pub fn process_slice(&mut self, slice: &[u8]) {
15
+
self.buffer.extend_from_slice(slice);
16
+
}
17
+
18
+
pub fn next_line(&mut self) -> Option<Vec<u8>> {
19
+
let line_end = self.buffer.iter().position(|x| *x == b'\n')?;
20
+
21
+
let drained = self.buffer.drain(..line_end).collect();
22
+
self.buffer.remove(0);
23
+
Some(drained)
24
+
}
25
+
26
+
#[cfg(test)]
27
+
fn take_lines(&mut self) -> Vec<Vec<u8>> {
28
+
let mut lines = vec![];
29
+
30
+
while let Some(line) = self.next_line() {
31
+
lines.push(line);
32
+
}
33
+
34
+
lines
35
+
}
36
+
}
37
+
38
+
#[cfg(test)]
39
+
mod tests {
40
+
use super::*;
41
+
42
+
#[test]
43
+
fn test_split_line_processing() {
44
+
let mut log_buffer = LogBuffer::new();
45
+
46
+
log_buffer.process_slice(b"Writing key KeySpec { destination: \"/et");
47
+
log_buffer.process_slice(b"c/keys/buildbot.aws.key\", user: \"buildbot\", group: \"buildbot-worker\", permissions: 384, length: 32, last: false, crc: 1370815231 }, 32 bytes of data");
48
+
log_buffer.process_slice(b"\n");
49
+
log_buffer.process_slice(b"xxx");
50
+
log_buffer.process_slice(b"xx_WIRE");
51
+
log_buffer.process_slice(b"_QUIT\n");
52
+
let lines = log_buffer.take_lines();
53
+
assert_eq!(lines.len(), 2);
54
+
assert_eq!(
55
+
String::from_utf8_lossy(lines.first().unwrap()),
56
+
"Writing key KeySpec { destination: \"/etc/keys/buildbot.aws.key\", user: \"buildbot\", group: \"buildbot-worker\", permissions: 384, length: 32, last: false, crc: 1370815231 }, 32 bytes of data"
57
+
);
58
+
assert_eq!(lines.get(1), Some(&"xxxxx_WIRE_QUIT".as_bytes().to_vec()));
59
+
60
+
// taking leaves none
61
+
assert_eq!(log_buffer.take_lines().len(), 0);
62
+
}
63
+
}
+566
crates/core/src/commands/pty/mod.rs
+566
crates/core/src/commands/pty/mod.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use crate::commands::pty::output::{WatchStdoutArguments, handle_pty_stdout};
5
+
use crate::status::STATUS;
6
+
use aho_corasick::PatternID;
7
+
use itertools::Itertools;
8
+
use nix::sys::termios::{LocalFlags, SetArg, Termios, tcgetattr, tcsetattr};
9
+
use nix::unistd::pipe;
10
+
use nix::unistd::write as posix_write;
11
+
use portable_pty::{CommandBuilder, NativePtySystem, PtyPair, PtySize};
12
+
use rand::distr::Alphabetic;
13
+
use std::collections::VecDeque;
14
+
use std::io::stderr;
15
+
use std::sync::{LazyLock, Mutex};
16
+
use std::{
17
+
io::{Read, Write},
18
+
os::fd::{AsFd, OwnedFd},
19
+
sync::Arc,
20
+
};
21
+
use tokio::sync::{oneshot, watch};
22
+
use tracing::instrument;
23
+
use tracing::{Span, debug, trace};
24
+
25
+
use crate::commands::CommandArguments;
26
+
use crate::commands::pty::input::watch_stdin_from_user;
27
+
use crate::errors::CommandError;
28
+
use crate::{SubCommandModifiers, acquire_stdin_lock};
29
+
use crate::{
30
+
commands::{ChildOutputMode, WireCommandChip},
31
+
errors::HiveLibError,
32
+
hive::node::Target,
33
+
};
34
+
35
+
mod input;
36
+
mod logbuffer;
37
+
mod output;
38
+
39
+
type MasterWriter = Box<dyn Write + Send>;
40
+
type MasterReader = Box<dyn Read + Send>;
41
+
42
+
/// the underlying command began
43
+
const THREAD_BEGAN_SIGNAL: &[u8; 1] = b"b";
44
+
const THREAD_QUIT_SIGNAL: &[u8; 1] = b"q";
45
+
46
+
type Child = Box<dyn portable_pty::Child + Send + Sync>;
47
+
48
+
pub(crate) struct InteractiveChildChip {
49
+
child: Child,
50
+
51
+
cancel_stdin_pipe_w: OwnedFd,
52
+
write_stdin_pipe_w: OwnedFd,
53
+
54
+
stderr_collection: Arc<Mutex<VecDeque<String>>>,
55
+
stdout_collection: Arc<Mutex<VecDeque<String>>>,
56
+
57
+
original_command: String,
58
+
59
+
status_receiver: watch::Receiver<Status>,
60
+
stdout_handle: tokio::task::JoinHandle<Result<(), CommandError>>,
61
+
}
62
+
63
+
/// sets and reverts terminal options (the terminal user interaction is performed)
64
+
/// reverts data when dropped
65
+
struct StdinTermiosAttrGuard(Termios);
66
+
67
+
#[derive(Debug)]
68
+
enum Status {
69
+
Running,
70
+
Done { success: bool },
71
+
}
72
+
73
+
#[derive(Debug)]
74
+
enum SearchFindings {
75
+
None,
76
+
Started,
77
+
Terminate,
78
+
}
79
+
80
+
static STARTED_PATTERN: LazyLock<PatternID> = LazyLock::new(|| PatternID::must(0));
81
+
static SUCCEEDED_PATTERN: LazyLock<PatternID> = LazyLock::new(|| PatternID::must(1));
82
+
static FAILED_PATTERN: LazyLock<PatternID> = LazyLock::new(|| PatternID::must(2));
83
+
84
+
/// substitutes STDOUT with #$line. stdout is far less common than stderr.
85
+
const IO_SUBS: &str = "1> >(while IFS= read -r line; do echo \"#$line\"; done)";
86
+
87
+
fn create_ending_segment<S: AsRef<str>>(
88
+
arguments: &CommandArguments<'_, S>,
89
+
needles: &Needles,
90
+
) -> String {
91
+
let Needles {
92
+
succeed,
93
+
fail,
94
+
start,
95
+
} = needles;
96
+
97
+
format!(
98
+
"echo -e '{succeed}' || echo '{failed}'",
99
+
succeed = if matches!(arguments.output_mode, ChildOutputMode::Interactive) {
100
+
format!(
101
+
"{start}\\n{succeed}",
102
+
start = String::from_utf8_lossy(start),
103
+
succeed = String::from_utf8_lossy(succeed)
104
+
)
105
+
} else {
106
+
String::from_utf8_lossy(succeed).to_string()
107
+
},
108
+
failed = String::from_utf8_lossy(fail)
109
+
)
110
+
}
111
+
112
+
fn create_starting_segment<S: AsRef<str>>(
113
+
arguments: &CommandArguments<'_, S>,
114
+
start_needle: &Arc<Vec<u8>>,
115
+
) -> String {
116
+
if matches!(arguments.output_mode, ChildOutputMode::Interactive) {
117
+
String::new()
118
+
} else {
119
+
format!(
120
+
"echo '{start}' && ",
121
+
start = String::from_utf8_lossy(start_needle)
122
+
)
123
+
}
124
+
}
125
+
126
+
#[instrument(skip_all, name = "run-int", fields(elevated = %arguments.is_elevated(), mode = ?arguments.output_mode))]
127
+
pub(crate) async fn interactive_command_with_env<S: AsRef<str>>(
128
+
arguments: &CommandArguments<'_, S>,
129
+
envs: std::collections::HashMap<String, String>,
130
+
) -> Result<InteractiveChildChip, HiveLibError> {
131
+
print_authenticate_warning(arguments)?;
132
+
133
+
let needles = create_needles();
134
+
let pty_system = NativePtySystem::default();
135
+
let pty_pair = portable_pty::PtySystem::openpty(&pty_system, PtySize::default()).unwrap();
136
+
setup_master(&pty_pair)?;
137
+
138
+
let command_string = &format!(
139
+
"{starting}{command} {flags} {IO_SUBS} && {ending}",
140
+
command = arguments.command_string.as_ref(),
141
+
flags = match arguments.output_mode {
142
+
ChildOutputMode::Nix => "--log-format internal-json",
143
+
ChildOutputMode::Generic | ChildOutputMode::Interactive => "",
144
+
},
145
+
starting = create_starting_segment(arguments, &needles.start),
146
+
ending = create_ending_segment(arguments, &needles)
147
+
);
148
+
149
+
debug!("{command_string}");
150
+
151
+
let mut command = build_command(arguments, command_string)?;
152
+
153
+
// give command all env vars
154
+
for (key, value) in envs {
155
+
command.env(key, value);
156
+
}
157
+
158
+
let clobber_guard = acquire_stdin_lock().await;
159
+
let _guard = StdinTermiosAttrGuard::new().map_err(HiveLibError::CommandError)?;
160
+
let child = pty_pair
161
+
.slave
162
+
.spawn_command(command)
163
+
.map_err(|x| HiveLibError::CommandError(CommandError::PortablePty(x)))?;
164
+
165
+
// Release any handles owned by the slave: we don't need it now
166
+
// that we've spawned the child.
167
+
drop(pty_pair.slave);
168
+
169
+
let reader = pty_pair
170
+
.master
171
+
.try_clone_reader()
172
+
.map_err(|x| HiveLibError::CommandError(CommandError::PortablePty(x)))?;
173
+
let master_writer = pty_pair
174
+
.master
175
+
.take_writer()
176
+
.map_err(|x| HiveLibError::CommandError(CommandError::PortablePty(x)))?;
177
+
178
+
let stderr_collection = Arc::new(Mutex::new(VecDeque::<String>::with_capacity(10)));
179
+
let stdout_collection = Arc::new(Mutex::new(VecDeque::<String>::with_capacity(10)));
180
+
let (began_tx, began_rx) = oneshot::channel::<()>();
181
+
let (status_sender, status_receiver) = watch::channel(Status::Running);
182
+
183
+
let stdout_handle = {
184
+
let arguments = WatchStdoutArguments {
185
+
began_tx,
186
+
reader,
187
+
needles,
188
+
output_mode: arguments.output_mode,
189
+
stderr_collection: stderr_collection.clone(),
190
+
stdout_collection: stdout_collection.clone(),
191
+
span: Span::current(),
192
+
log_stdout: arguments.log_stdout,
193
+
status_sender,
194
+
};
195
+
196
+
tokio::task::spawn_blocking(move || handle_pty_stdout(arguments))
197
+
};
198
+
199
+
let (write_stdin_pipe_r, write_stdin_pipe_w) =
200
+
pipe().map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
201
+
let (cancel_stdin_pipe_r, cancel_stdin_pipe_w) =
202
+
pipe().map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
203
+
204
+
tokio::task::spawn_blocking(move || {
205
+
watch_stdin_from_user(
206
+
&cancel_stdin_pipe_r,
207
+
master_writer,
208
+
&write_stdin_pipe_r,
209
+
Span::current(),
210
+
)
211
+
});
212
+
213
+
debug!("Setup threads");
214
+
215
+
let () = began_rx
216
+
.await
217
+
.map_err(|x| HiveLibError::CommandError(CommandError::OneshotRecvError(x)))?;
218
+
219
+
drop(clobber_guard);
220
+
221
+
if arguments.keep_stdin_open {
222
+
trace!("Sending THREAD_BEGAN_SIGNAL");
223
+
224
+
posix_write(&cancel_stdin_pipe_w, THREAD_BEGAN_SIGNAL)
225
+
.map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
226
+
} else {
227
+
trace!("Sending THREAD_QUIT_SIGNAL");
228
+
229
+
posix_write(&cancel_stdin_pipe_w, THREAD_QUIT_SIGNAL)
230
+
.map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
231
+
}
232
+
233
+
Ok(InteractiveChildChip {
234
+
child,
235
+
cancel_stdin_pipe_w,
236
+
write_stdin_pipe_w,
237
+
stderr_collection,
238
+
stdout_collection,
239
+
original_command: arguments.command_string.as_ref().to_string(),
240
+
status_receiver,
241
+
stdout_handle,
242
+
})
243
+
}
244
+
245
+
fn print_authenticate_warning<S: AsRef<str>>(
246
+
arguments: &CommandArguments<S>,
247
+
) -> Result<(), HiveLibError> {
248
+
if !arguments.is_elevated() {
249
+
return Ok(());
250
+
}
251
+
252
+
let _ = STATUS.lock().write_above_status(
253
+
&format!(
254
+
"{} | Authenticate for \"sudo {}\":\n",
255
+
arguments
256
+
.target
257
+
.map_or(Ok("localhost (!)".to_string()), |target| Ok(format!(
258
+
"{}@{}:{}",
259
+
target.user,
260
+
target.get_preferred_host()?,
261
+
target.port
262
+
)))?,
263
+
arguments.command_string.as_ref()
264
+
)
265
+
.into_bytes(),
266
+
&mut stderr(),
267
+
);
268
+
269
+
Ok(())
270
+
}
271
+
272
+
struct Needles {
273
+
succeed: Arc<Vec<u8>>,
274
+
fail: Arc<Vec<u8>>,
275
+
start: Arc<Vec<u8>>,
276
+
}
277
+
278
+
fn create_needles() -> Needles {
279
+
let tmp_prefix = rand::distr::SampleString::sample_string(&Alphabetic, &mut rand::rng(), 5);
280
+
281
+
Needles {
282
+
succeed: Arc::new(format!("{tmp_prefix}_W_Q").as_bytes().to_vec()),
283
+
fail: Arc::new(format!("{tmp_prefix}_W_F").as_bytes().to_vec()),
284
+
start: Arc::new(format!("{tmp_prefix}_W_S").as_bytes().to_vec()),
285
+
}
286
+
}
287
+
288
+
fn setup_master(pty_pair: &PtyPair) -> Result<(), HiveLibError> {
289
+
if let Some(fd) = pty_pair.master.as_raw_fd() {
290
+
// convert raw fd to a BorrowedFd
291
+
// safe as `fd` is dropped well before `pty_pair.master`
292
+
let fd = unsafe { std::os::unix::io::BorrowedFd::borrow_raw(fd) };
293
+
let mut termios =
294
+
tcgetattr(fd).map_err(|x| HiveLibError::CommandError(CommandError::TermAttrs(x)))?;
295
+
296
+
termios.local_flags &= !LocalFlags::ECHO;
297
+
// Key agent does not work well without canonical mode
298
+
termios.local_flags &= !LocalFlags::ICANON;
299
+
// Actually quit
300
+
termios.local_flags &= !LocalFlags::ISIG;
301
+
302
+
tcsetattr(fd, SetArg::TCSANOW, &termios)
303
+
.map_err(|x| HiveLibError::CommandError(CommandError::TermAttrs(x)))?;
304
+
}
305
+
306
+
Ok(())
307
+
}
308
+
309
+
fn build_command<S: AsRef<str>>(
310
+
arguments: &CommandArguments<'_, S>,
311
+
command_string: &String,
312
+
) -> Result<CommandBuilder, HiveLibError> {
313
+
let mut command = if let Some(target) = arguments.target {
314
+
let mut command = create_int_ssh_command(target, arguments.modifiers)?;
315
+
316
+
// force ssh to use our pseudo terminal
317
+
command.arg("-tt");
318
+
319
+
command
320
+
} else {
321
+
let mut command = portable_pty::CommandBuilder::new("sh");
322
+
323
+
command.arg("-c");
324
+
325
+
command
326
+
};
327
+
328
+
if arguments.is_elevated() {
329
+
command.arg(format!("sudo -u root -- sh -c '{command_string}'"));
330
+
} else {
331
+
command.arg(command_string);
332
+
}
333
+
334
+
Ok(command)
335
+
}
336
+
337
+
impl WireCommandChip for InteractiveChildChip {
338
+
type ExitStatus = (portable_pty::ExitStatus, String);
339
+
340
+
#[instrument(skip_all)]
341
+
async fn wait_till_success(mut self) -> Result<Self::ExitStatus, CommandError> {
342
+
drop(self.write_stdin_pipe_w);
343
+
344
+
let exit_status = tokio::task::spawn_blocking(move || self.child.wait())
345
+
.await
346
+
.map_err(CommandError::JoinError)?
347
+
.map_err(CommandError::WaitForStatus)?;
348
+
349
+
debug!("exit_status: {exit_status:?}");
350
+
351
+
self.stdout_handle
352
+
.await
353
+
.map_err(|_| CommandError::ThreadPanic)??;
354
+
355
+
let status = self
356
+
.status_receiver
357
+
.wait_for(|value| matches!(value, Status::Done { .. }))
358
+
.await
359
+
.unwrap();
360
+
361
+
let _ = posix_write(&self.cancel_stdin_pipe_w, THREAD_QUIT_SIGNAL);
362
+
363
+
if let Status::Done { success: true } = *status {
364
+
let logs = self
365
+
.stdout_collection
366
+
.lock()
367
+
.unwrap()
368
+
.iter()
369
+
.rev()
370
+
.map(|x| x.trim())
371
+
.join("\n");
372
+
373
+
return Ok((exit_status, logs));
374
+
}
375
+
376
+
debug!("child did not succeed");
377
+
378
+
let logs = self
379
+
.stderr_collection
380
+
.lock()
381
+
.unwrap()
382
+
.iter()
383
+
.rev()
384
+
.join("\n");
385
+
386
+
Err(CommandError::CommandFailed {
387
+
command_ran: self.original_command,
388
+
logs,
389
+
code: format!("code {}", exit_status.exit_code()),
390
+
reason: match *status {
391
+
Status::Done { .. } => "marked-unsuccessful",
392
+
Status::Running => "child-crashed-before-succeeding",
393
+
},
394
+
})
395
+
}
396
+
397
+
async fn write_stdin(&mut self, data: Vec<u8>) -> Result<(), HiveLibError> {
398
+
trace!("Writing {} bytes to stdin", data.len());
399
+
400
+
posix_write(&self.write_stdin_pipe_w, &data)
401
+
.map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
402
+
403
+
Ok(())
404
+
}
405
+
}
406
+
407
+
impl StdinTermiosAttrGuard {
408
+
fn new() -> Result<Self, CommandError> {
409
+
let stdin = std::io::stdin();
410
+
let stdin_fd = stdin.as_fd();
411
+
412
+
let mut termios = tcgetattr(stdin_fd).map_err(CommandError::TermAttrs)?;
413
+
let original_termios = termios.clone();
414
+
415
+
termios.local_flags &= !(LocalFlags::ECHO | LocalFlags::ICANON);
416
+
tcsetattr(stdin_fd, SetArg::TCSANOW, &termios).map_err(CommandError::TermAttrs)?;
417
+
418
+
Ok(StdinTermiosAttrGuard(original_termios))
419
+
}
420
+
}
421
+
422
+
impl Drop for StdinTermiosAttrGuard {
423
+
fn drop(&mut self) {
424
+
let stdin = std::io::stdin();
425
+
let stdin_fd = stdin.as_fd();
426
+
427
+
let _ = tcsetattr(stdin_fd, SetArg::TCSANOW, &self.0);
428
+
}
429
+
}
430
+
431
+
fn create_int_ssh_command(
432
+
target: &Target,
433
+
modifiers: SubCommandModifiers,
434
+
) -> Result<portable_pty::CommandBuilder, HiveLibError> {
435
+
let mut command = portable_pty::CommandBuilder::new("ssh");
436
+
command.args(target.create_ssh_args(modifiers, false, false)?);
437
+
command.arg(target.get_preferred_host()?.to_string());
438
+
Ok(command)
439
+
}
440
+
441
+
#[cfg(test)]
442
+
mod tests {
443
+
use aho_corasick::AhoCorasick;
444
+
use tokio::sync::oneshot::error::TryRecvError;
445
+
446
+
use crate::commands::pty::output::handle_rawmode_data;
447
+
448
+
use super::*;
449
+
use std::assert_matches::assert_matches;
450
+
451
+
#[test]
452
+
fn test_rawmode_data() {
453
+
let aho_corasick = AhoCorasick::builder()
454
+
.ascii_case_insensitive(false)
455
+
.match_kind(aho_corasick::MatchKind::LeftmostFirst)
456
+
.build(["START_NEEDLE", "SUCCEEDED_NEEDLE", "FAILED_NEEDLE"])
457
+
.unwrap();
458
+
let mut stderr = vec![];
459
+
let (began_tx, mut began_rx) = oneshot::channel::<()>();
460
+
let mut began_tx = Some(began_tx);
461
+
let (status_sender, _) = watch::channel(Status::Running);
462
+
463
+
// each "Bla" is 4 bytes.
464
+
let buffer = "bla bla bla START_NEEDLE bla bla bla".as_bytes();
465
+
let mut raw_mode_buffer = vec![];
466
+
467
+
// handle 1 "bla"
468
+
assert_matches!(
469
+
handle_rawmode_data(
470
+
&mut stderr,
471
+
buffer,
472
+
4,
473
+
&mut raw_mode_buffer,
474
+
&aho_corasick,
475
+
&status_sender,
476
+
&mut began_tx
477
+
),
478
+
Ok(SearchFindings::None)
479
+
);
480
+
assert_matches!(began_rx.try_recv(), Err(TryRecvError::Empty));
481
+
assert!(began_tx.is_some());
482
+
assert_eq!(raw_mode_buffer, b"bla ");
483
+
assert_matches!(*status_sender.borrow(), Status::Running);
484
+
485
+
let buffer = &buffer[4..];
486
+
487
+
// handle 2 "bla"'s and half a "START_NEEDLE"
488
+
let n = 4 + 4 + 6;
489
+
assert_matches!(
490
+
handle_rawmode_data(
491
+
&mut stderr,
492
+
buffer,
493
+
n,
494
+
&mut raw_mode_buffer,
495
+
&aho_corasick,
496
+
&status_sender,
497
+
&mut began_tx
498
+
),
499
+
Ok(SearchFindings::None)
500
+
);
501
+
assert_matches!(began_rx.try_recv(), Err(TryRecvError::Empty));
502
+
assert!(began_tx.is_some());
503
+
assert_matches!(*status_sender.borrow(), Status::Running);
504
+
assert_eq!(raw_mode_buffer, b"bla bla bla START_");
505
+
506
+
let buffer = &buffer[n..];
507
+
508
+
// handle rest of the data
509
+
let n = buffer.len();
510
+
assert_matches!(
511
+
handle_rawmode_data(
512
+
&mut stderr,
513
+
buffer,
514
+
n,
515
+
&mut raw_mode_buffer,
516
+
&aho_corasick,
517
+
&status_sender,
518
+
&mut began_tx
519
+
),
520
+
Ok(SearchFindings::Started)
521
+
);
522
+
assert_matches!(began_rx.try_recv(), Ok(()));
523
+
assert_matches!(began_tx, None);
524
+
assert_eq!(raw_mode_buffer, b"bla bla bla START_NEEDLE bla bla bla");
525
+
assert_matches!(*status_sender.borrow(), Status::Running);
526
+
527
+
// test failed needle
528
+
let buffer = "bla FAILED_NEEDLE bla".as_bytes();
529
+
let mut raw_mode_buffer = vec![];
530
+
531
+
let n = buffer.len();
532
+
assert_matches!(
533
+
handle_rawmode_data(
534
+
&mut stderr,
535
+
buffer,
536
+
n,
537
+
&mut raw_mode_buffer,
538
+
&aho_corasick,
539
+
&status_sender,
540
+
&mut began_tx
541
+
),
542
+
Ok(SearchFindings::Terminate)
543
+
);
544
+
assert_matches!(*status_sender.borrow(), Status::Done { success: false });
545
+
546
+
// test succeed needle
547
+
let buffer = "bla SUCCEEDED_NEEDLE bla".as_bytes();
548
+
let mut raw_mode_buffer = vec![];
549
+
let (status_sender, _) = watch::channel(Status::Running);
550
+
551
+
let n = buffer.len();
552
+
assert_matches!(
553
+
handle_rawmode_data(
554
+
&mut stderr,
555
+
buffer,
556
+
n,
557
+
&mut raw_mode_buffer,
558
+
&aho_corasick,
559
+
&status_sender,
560
+
&mut began_tx
561
+
),
562
+
Ok(SearchFindings::Terminate)
563
+
);
564
+
assert_matches!(*status_sender.borrow(), Status::Done { success: true });
565
+
}
566
+
}
+264
crates/core/src/commands/pty/output.rs
+264
crates/core/src/commands/pty/output.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use crate::{
5
+
commands::{
6
+
ChildOutputMode,
7
+
pty::{
8
+
FAILED_PATTERN, Needles, STARTED_PATTERN, SUCCEEDED_PATTERN, SearchFindings, Status,
9
+
logbuffer::LogBuffer,
10
+
},
11
+
},
12
+
errors::CommandError,
13
+
};
14
+
use aho_corasick::AhoCorasick;
15
+
use std::{
16
+
collections::VecDeque,
17
+
io::Write,
18
+
sync::{Arc, Mutex},
19
+
};
20
+
use tokio::sync::{oneshot, watch};
21
+
use tracing::{Span, debug, instrument};
22
+
23
+
pub(super) struct WatchStdoutArguments {
24
+
pub began_tx: oneshot::Sender<()>,
25
+
pub reader: super::MasterReader,
26
+
pub needles: Needles,
27
+
pub output_mode: ChildOutputMode,
28
+
pub stderr_collection: Arc<Mutex<VecDeque<String>>>,
29
+
pub stdout_collection: Arc<Mutex<VecDeque<String>>>,
30
+
pub status_sender: watch::Sender<Status>,
31
+
pub span: Span,
32
+
pub log_stdout: bool,
33
+
}
34
+
35
+
/// Handles data from the PTY, and logs or prompts the user depending on the state
36
+
/// of the command.
37
+
///
38
+
/// Emits a message on the `began_tx` when the command is considered started.
39
+
///
40
+
/// Records stderr and stdout when it is considered notable (all stdout, last few stderr messages)
41
+
#[instrument(skip_all, name = "log", parent = arguments.span)]
42
+
pub(super) fn handle_pty_stdout(arguments: WatchStdoutArguments) -> Result<(), CommandError> {
43
+
let WatchStdoutArguments {
44
+
began_tx,
45
+
mut reader,
46
+
needles,
47
+
output_mode,
48
+
stdout_collection,
49
+
stderr_collection,
50
+
status_sender,
51
+
log_stdout,
52
+
..
53
+
} = arguments;
54
+
55
+
let aho_corasick = AhoCorasick::builder()
56
+
.ascii_case_insensitive(false)
57
+
.match_kind(aho_corasick::MatchKind::LeftmostFirst)
58
+
.build([
59
+
needles.start.as_ref(),
60
+
needles.succeed.as_ref(),
61
+
needles.fail.as_ref(),
62
+
])
63
+
.unwrap();
64
+
65
+
let mut buffer = [0u8; 1024];
66
+
let mut stderr = std::io::stderr();
67
+
let mut began = false;
68
+
let mut log_buffer = LogBuffer::new();
69
+
let mut raw_mode_buffer = Vec::new();
70
+
let mut belled = false;
71
+
let mut began_tx = Some(began_tx);
72
+
73
+
'outer: loop {
74
+
match reader.read(&mut buffer) {
75
+
Ok(0) => break 'outer,
76
+
Ok(n) => {
77
+
// this block is responsible for outputting the "raw" data,
78
+
// mostly sudo prompts.
79
+
if !began {
80
+
let findings = handle_rawmode_data(
81
+
&mut stderr,
82
+
&buffer,
83
+
n,
84
+
&mut raw_mode_buffer,
85
+
&aho_corasick,
86
+
&status_sender,
87
+
&mut began_tx,
88
+
)?;
89
+
90
+
match findings {
91
+
SearchFindings::Terminate => break 'outer,
92
+
SearchFindings::Started => {
93
+
began = true;
94
+
continue;
95
+
}
96
+
SearchFindings::None => {}
97
+
}
98
+
99
+
if belled {
100
+
continue;
101
+
}
102
+
103
+
stderr
104
+
.write(b"\x07") // bell
105
+
.map_err(CommandError::WritingClientStderr)?;
106
+
stderr.flush().map_err(CommandError::WritingClientStderr)?;
107
+
108
+
belled = true;
109
+
110
+
continue;
111
+
}
112
+
113
+
log_buffer.process_slice(&buffer[..n]);
114
+
115
+
while let Some(mut line) = log_buffer.next_line() {
116
+
let findings =
117
+
search_string(&aho_corasick, &line, &status_sender, &mut began_tx);
118
+
119
+
match findings {
120
+
SearchFindings::Terminate => break 'outer,
121
+
SearchFindings::Started => {
122
+
began = true;
123
+
continue;
124
+
}
125
+
SearchFindings::None => {}
126
+
}
127
+
128
+
handle_normal_data(
129
+
&stderr_collection,
130
+
&stdout_collection,
131
+
&mut line,
132
+
log_stdout,
133
+
output_mode,
134
+
);
135
+
}
136
+
}
137
+
Err(e) => {
138
+
eprintln!("Error reading from PTY: {e}");
139
+
break;
140
+
}
141
+
}
142
+
}
143
+
144
+
began_tx.map(|began_tx| began_tx.send(()));
145
+
146
+
// failsafe if there were errors or the reader stopped
147
+
if matches!(*status_sender.borrow(), Status::Running) {
148
+
status_sender.send_replace(Status::Done { success: false });
149
+
}
150
+
151
+
debug!("stdout: goodbye");
152
+
153
+
Ok(())
154
+
}
155
+
156
+
/// handles raw data, prints to stderr when a prompt is detected
157
+
pub(super) fn handle_rawmode_data<W: std::io::Write>(
158
+
stderr: &mut W,
159
+
buffer: &[u8],
160
+
n: usize,
161
+
raw_mode_buffer: &mut Vec<u8>,
162
+
aho_corasick: &AhoCorasick,
163
+
status_sender: &watch::Sender<Status>,
164
+
began_tx: &mut Option<oneshot::Sender<()>>,
165
+
) -> Result<SearchFindings, CommandError> {
166
+
raw_mode_buffer.extend_from_slice(&buffer[..n]);
167
+
168
+
let findings = search_string(aho_corasick, raw_mode_buffer, status_sender, began_tx);
169
+
170
+
if matches!(
171
+
findings,
172
+
SearchFindings::Started | SearchFindings::Terminate
173
+
) {
174
+
return Ok(findings);
175
+
}
176
+
177
+
stderr
178
+
.write_all(&buffer[..n])
179
+
.map_err(CommandError::WritingClientStderr)?;
180
+
181
+
stderr.flush().map_err(CommandError::WritingClientStderr)?;
182
+
183
+
Ok(findings)
184
+
}
185
+
186
+
/// handles data when the command is considered "started", logs and records errors as appropriate
187
+
fn handle_normal_data(
188
+
stderr_collection: &Arc<Mutex<VecDeque<String>>>,
189
+
stdout_collection: &Arc<Mutex<VecDeque<String>>>,
190
+
line: &mut [u8],
191
+
log_stdout: bool,
192
+
output_mode: ChildOutputMode,
193
+
) {
194
+
if line.starts_with(b"#") {
195
+
let stripped = &mut line[1..];
196
+
197
+
if log_stdout {
198
+
output_mode.trace_slice(stripped);
199
+
}
200
+
201
+
let mut queue = stdout_collection.lock().unwrap();
202
+
queue.push_front(String::from_utf8_lossy(stripped).to_string());
203
+
return;
204
+
}
205
+
206
+
let log = output_mode.trace_slice(line);
207
+
208
+
if let Some(error_msg) = log {
209
+
let mut queue = stderr_collection.lock().unwrap();
210
+
211
+
// add at most 20 message to the front, drop the rest.
212
+
queue.push_front(error_msg);
213
+
queue.truncate(20);
214
+
}
215
+
}
216
+
217
+
/// returns true if the command is considered stopped
218
+
fn search_string(
219
+
aho_corasick: &AhoCorasick,
220
+
haystack: &[u8],
221
+
status_sender: &watch::Sender<Status>,
222
+
began_tx: &mut Option<oneshot::Sender<()>>,
223
+
) -> SearchFindings {
224
+
let searched = aho_corasick
225
+
.find_iter(haystack)
226
+
.map(|x| x.pattern())
227
+
.collect::<Vec<_>>();
228
+
229
+
let started = if searched.contains(&STARTED_PATTERN) {
230
+
debug!("start needle was found, switching mode...");
231
+
if let Some(began_tx) = began_tx.take() {
232
+
let _ = began_tx.send(());
233
+
}
234
+
true
235
+
} else {
236
+
false
237
+
};
238
+
239
+
let succeeded = if searched.contains(&SUCCEEDED_PATTERN) {
240
+
debug!("succeed needle was found, marking child as succeeding.");
241
+
status_sender.send_replace(Status::Done { success: true });
242
+
true
243
+
} else {
244
+
false
245
+
};
246
+
247
+
let failed = if searched.contains(&FAILED_PATTERN) {
248
+
debug!("failed needle was found, elevated child did not succeed.");
249
+
status_sender.send_replace(Status::Done { success: false });
250
+
true
251
+
} else {
252
+
false
253
+
};
254
+
255
+
if succeeded || failed {
256
+
return SearchFindings::Terminate;
257
+
}
258
+
259
+
if started {
260
+
return SearchFindings::Started;
261
+
}
262
+
263
+
SearchFindings::None
264
+
}
+376
crates/core/src/errors.rs
+376
crates/core/src/errors.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
#![allow(unused_assignments)]
5
+
6
+
use std::{num::ParseIntError, path::PathBuf, process::ExitStatus, sync::mpsc::RecvError};
7
+
8
+
use miette::{Diagnostic, SourceSpan};
9
+
use nix_compat::flakeref::{FlakeRef, FlakeRefError};
10
+
use thiserror::Error;
11
+
use tokio::task::JoinError;
12
+
13
+
use crate::hive::node::{Name, SwitchToConfigurationGoal};
14
+
15
+
#[cfg(debug_assertions)]
16
+
const DOCS_URL: &str = "http://localhost:5173/reference/errors.html";
17
+
#[cfg(not(debug_assertions))]
18
+
const DOCS_URL: &str = "https://wire.althaea.zone/reference/errors.html";
19
+
20
+
#[derive(Debug, Diagnostic, Error)]
21
+
pub enum KeyError {
22
+
#[diagnostic(
23
+
code(wire::key::File),
24
+
url("{DOCS_URL}#{}", self.code().unwrap())
25
+
)]
26
+
#[error("error reading file")]
27
+
File(#[source] std::io::Error),
28
+
29
+
#[diagnostic(
30
+
code(wire::key::SpawningCommand),
31
+
help("Ensure wire has the correct $PATH for this command"),
32
+
url("{DOCS_URL}#{}", self.code().unwrap())
33
+
)]
34
+
#[error("error spawning key command")]
35
+
CommandSpawnError {
36
+
#[source]
37
+
error: std::io::Error,
38
+
39
+
#[source_code]
40
+
command: String,
41
+
42
+
#[label(primary, "Program ran")]
43
+
command_span: Option<SourceSpan>,
44
+
},
45
+
46
+
#[diagnostic(
47
+
code(wire::key::Resolving),
48
+
url("{DOCS_URL}#{}", self.code().unwrap())
49
+
)]
50
+
#[error("Error resolving key command child process")]
51
+
CommandResolveError {
52
+
#[source]
53
+
error: std::io::Error,
54
+
55
+
#[source_code]
56
+
command: String,
57
+
},
58
+
59
+
#[diagnostic(
60
+
code(wire::key::CommandExit),
61
+
url("{DOCS_URL}#{}", self.code().unwrap())
62
+
)]
63
+
#[error("key command failed with status {}: {}", .0,.1)]
64
+
CommandError(ExitStatus, String),
65
+
66
+
#[diagnostic(
67
+
code(wire::key::Empty),
68
+
url("{DOCS_URL}#{}", self.code().unwrap())
69
+
)]
70
+
#[error("Command list empty")]
71
+
Empty,
72
+
73
+
#[diagnostic(
74
+
code(wire::key::ParseKeyPermissions),
75
+
help("Refer to the documentation for the format of key file permissions."),
76
+
url("{DOCS_URL}#{}", self.code().unwrap())
77
+
)]
78
+
#[error("Failed to parse key permissions")]
79
+
ParseKeyPermissions(#[source] ParseIntError),
80
+
}
81
+
82
+
#[derive(Debug, Diagnostic, Error)]
83
+
pub enum ActivationError {
84
+
#[diagnostic(
85
+
code(wire::activation::SwitchToConfiguration),
86
+
url("{DOCS_URL}#{}", self.code().unwrap())
87
+
)]
88
+
#[error("failed to run switch-to-configuration {0} on node {1}")]
89
+
SwitchToConfigurationError(SwitchToConfigurationGoal, Name, #[source] CommandError),
90
+
}
91
+
92
+
#[derive(Debug, Diagnostic, Error)]
93
+
pub enum NetworkError {
94
+
#[diagnostic(
95
+
code(wire::network::HostUnreachable),
96
+
help(
97
+
"If you failed due to a fault in DNS, note that a node can have multiple targets defined."
98
+
),
99
+
url("{DOCS_URL}#{}", self.code().unwrap())
100
+
)]
101
+
#[error("Cannot reach host {host}")]
102
+
HostUnreachable {
103
+
host: String,
104
+
#[source]
105
+
source: CommandError,
106
+
},
107
+
108
+
#[diagnostic(
109
+
code(wire::network::HostUnreachableAfterReboot),
110
+
url("{DOCS_URL}#{}", self.code().unwrap())
111
+
)]
112
+
#[error("Failed to get regain connection to {0} after activation.")]
113
+
HostUnreachableAfterReboot(String),
114
+
115
+
#[diagnostic(
116
+
code(wire::network::HostsExhausted),
117
+
url("{DOCS_URL}#{}", self.code().unwrap())
118
+
)]
119
+
#[error("Ran out of contactable hosts")]
120
+
HostsExhausted,
121
+
}
122
+
123
+
#[derive(Debug, Diagnostic, Error)]
124
+
pub enum HiveInitialisationError {
125
+
#[diagnostic(
126
+
code(wire::hive_init::NoHiveFound),
127
+
help(
128
+
"Double check the path is correct. You can adjust the hive path with `--path` when the hive lies outside of the CWD."
129
+
),
130
+
url("{DOCS_URL}#{}", self.code().unwrap())
131
+
)]
132
+
#[error("No hive could be found in {}", .0.display())]
133
+
NoHiveFound(PathBuf),
134
+
135
+
#[diagnostic(
136
+
code(wire::hive_init::Parse),
137
+
help("If you cannot resolve this problem, please create an issue."),
138
+
url("{DOCS_URL}#{}", self.code().unwrap())
139
+
)]
140
+
#[error("Failed to parse internal wire json.")]
141
+
ParseEvaluateError(#[source] serde_json::Error),
142
+
143
+
#[diagnostic(
144
+
code(wire::hive_init::ParsePrefetch),
145
+
help("please create an issue."),
146
+
url("{DOCS_URL}#{}", self.code().unwrap())
147
+
)]
148
+
#[error("Failed to parse `nix flake prefetch --json`.")]
149
+
ParsePrefetchError(#[source] serde_json::Error),
150
+
151
+
#[diagnostic(
152
+
code(wire::hive_init::NodeDoesNotExist),
153
+
help("Please create an issue!"),
154
+
url("{DOCS_URL}#{}", self.code().unwrap())
155
+
)]
156
+
#[error("node {0} not exist in hive")]
157
+
NodeDoesNotExist(String),
158
+
}
159
+
160
+
#[derive(Debug, Diagnostic, Error)]
161
+
pub enum HiveLocationError {
162
+
#[diagnostic(
163
+
code(wire::hive_location::MalformedPath),
164
+
url("{DOCS_URL}#{}", self.code().unwrap())
165
+
)]
166
+
#[error("Path was malformed: {}", .0.display())]
167
+
MalformedPath(PathBuf),
168
+
169
+
#[diagnostic(
170
+
code(wire::hive_location::Malformed),
171
+
url("{DOCS_URL}#{}", self.code().unwrap())
172
+
)]
173
+
#[error("--path was malformed")]
174
+
Malformed(#[source] FlakeRefError),
175
+
176
+
#[diagnostic(
177
+
code(wire::hive_location::TypeUnsupported),
178
+
url("{DOCS_URL}#{}", self.code().unwrap())
179
+
)]
180
+
#[error("The flakref had an unsupported type: {:#?}", .0)]
181
+
TypeUnsupported(Box<FlakeRef>),
182
+
}
183
+
184
+
#[derive(Debug, Diagnostic, Error)]
185
+
pub enum CommandError {
186
+
#[diagnostic(
187
+
code(wire::command::TermAttrs),
188
+
url("{DOCS_URL}#{}", self.code().unwrap())
189
+
)]
190
+
#[error("Failed to set PTY attrs")]
191
+
TermAttrs(#[source] nix::errno::Errno),
192
+
193
+
#[diagnostic(
194
+
code(wire::command::PosixPipe),
195
+
url("{DOCS_URL}#{}", self.code().unwrap())
196
+
)]
197
+
#[error("There was an error in regards to a pipe")]
198
+
PosixPipe(#[source] nix::errno::Errno),
199
+
200
+
/// Error wrapped around `portable_pty`'s anyhow
201
+
/// errors
202
+
#[diagnostic(
203
+
code(wire::command::PortablePty),
204
+
url("{DOCS_URL}#{}", self.code().unwrap())
205
+
)]
206
+
#[error("There was an error from the portable_pty crate")]
207
+
PortablePty(#[source] anyhow::Error),
208
+
209
+
#[diagnostic(
210
+
code(wire::command::Joining),
211
+
url("{DOCS_URL}#{}", self.code().unwrap())
212
+
)]
213
+
#[error("Failed to join on some tokio task")]
214
+
JoinError(#[source] JoinError),
215
+
216
+
#[diagnostic(
217
+
code(wire::command::WaitForStatus),
218
+
url("{DOCS_URL}#{}", self.code().unwrap())
219
+
)]
220
+
#[error("Failed to wait for the child's status")]
221
+
WaitForStatus(#[source] std::io::Error),
222
+
223
+
#[diagnostic(
224
+
code(wire::detached::NoHandle),
225
+
help("This should never happen, please create an issue!"),
226
+
url("{DOCS_URL}#{}", self.code().unwrap())
227
+
)]
228
+
#[error("There was no handle to child io")]
229
+
NoHandle,
230
+
231
+
#[diagnostic(
232
+
code(wire::command::WritingClientStdout),
233
+
url("{DOCS_URL}#{}", self.code().unwrap())
234
+
)]
235
+
#[error("Failed to write to client stderr.")]
236
+
WritingClientStderr(#[source] std::io::Error),
237
+
238
+
#[diagnostic(
239
+
code(wire::command::WritingMasterStdin),
240
+
url("{DOCS_URL}#{}", self.code().unwrap())
241
+
)]
242
+
#[error("Failed to write to PTY master stdout.")]
243
+
WritingMasterStdout(#[source] std::io::Error),
244
+
245
+
#[diagnostic(
246
+
code(wire::command::Recv),
247
+
url("{DOCS_URL}#{}", self.code().unwrap()),
248
+
help("please create an issue!"),
249
+
)]
250
+
#[error("Failed to receive a message from the begin channel")]
251
+
RecvError(#[source] RecvError),
252
+
253
+
#[diagnostic(
254
+
code(wire::command::ThreadPanic),
255
+
url("{DOCS_URL}#{}", self.code().unwrap()),
256
+
help("please create an issue!"),
257
+
)]
258
+
#[error("Thread panicked")]
259
+
ThreadPanic,
260
+
261
+
#[diagnostic(
262
+
code(wire::command::CommandFailed),
263
+
url("{DOCS_URL}#{}", self.code().unwrap()),
264
+
help("`nix` commands are filtered, run with -vvv to view all"),
265
+
)]
266
+
#[error("{command_ran} failed ({reason}) with {code} (last 20 lines):\n{logs}")]
267
+
CommandFailed {
268
+
command_ran: String,
269
+
logs: String,
270
+
code: String,
271
+
reason: &'static str,
272
+
},
273
+
274
+
#[diagnostic(
275
+
code(wire::command::RuntimeDirectory),
276
+
url("{DOCS_URL}#{}", self.code().unwrap())
277
+
)]
278
+
#[error("error creating $XDG_RUNTIME_DIR/wire")]
279
+
RuntimeDirectory(#[source] std::io::Error),
280
+
281
+
#[diagnostic(
282
+
code(wire::command::RuntimeDirectoryMissing),
283
+
url("{DOCS_URL}#{}", self.code().unwrap())
284
+
)]
285
+
#[error("$XDG_RUNTIME_DIR could not be used.")]
286
+
RuntimeDirectoryMissing(#[source] std::env::VarError),
287
+
288
+
#[diagnostic(
289
+
code(wire::command::OneshotRecvError),
290
+
url("{DOCS_URL}#{}", self.code().unwrap())
291
+
)]
292
+
#[error("Error waiting for begin message")]
293
+
OneshotRecvError(#[source] tokio::sync::oneshot::error::RecvError),
294
+
}
295
+
296
+
#[derive(Debug, Diagnostic, Error)]
297
+
pub enum HiveLibError {
298
+
#[error(transparent)]
299
+
#[diagnostic(transparent)]
300
+
HiveInitialisationError(HiveInitialisationError),
301
+
302
+
#[error(transparent)]
303
+
#[diagnostic(transparent)]
304
+
NetworkError(NetworkError),
305
+
306
+
#[error(transparent)]
307
+
#[diagnostic(transparent)]
308
+
ActivationError(ActivationError),
309
+
310
+
#[error(transparent)]
311
+
#[diagnostic(transparent)]
312
+
CommandError(CommandError),
313
+
314
+
#[error(transparent)]
315
+
#[diagnostic(transparent)]
316
+
HiveLocationError(HiveLocationError),
317
+
318
+
#[error("Failed to apply key {}", .0)]
319
+
KeyError(
320
+
String,
321
+
#[source]
322
+
#[diagnostic_source]
323
+
KeyError,
324
+
),
325
+
326
+
#[diagnostic(
327
+
code(wire::BuildNode),
328
+
url("{DOCS_URL}#{}", self.code().unwrap())
329
+
)]
330
+
#[error("failed to build node {name}")]
331
+
NixBuildError {
332
+
name: Name,
333
+
#[source]
334
+
source: CommandError,
335
+
},
336
+
337
+
#[diagnostic(
338
+
code(wire::CopyPath),
339
+
url("{DOCS_URL}#{}", self.code().unwrap())
340
+
)]
341
+
#[error("failed to copy path {path} to node {name}")]
342
+
NixCopyError {
343
+
name: Name,
344
+
path: String,
345
+
#[source]
346
+
error: Box<CommandError>,
347
+
#[help]
348
+
help: Option<Box<String>>,
349
+
},
350
+
351
+
#[diagnostic(code(wire::Evaluate))]
352
+
#[error("failed to evaluate `{attribute}` from the context of a hive.")]
353
+
NixEvalError {
354
+
attribute: String,
355
+
356
+
#[source]
357
+
source: CommandError,
358
+
359
+
#[help]
360
+
help: Option<Box<String>>,
361
+
},
362
+
363
+
#[diagnostic(
364
+
code(wire::Encoding),
365
+
url("{DOCS_URL}#{}", self.code().unwrap())
366
+
)]
367
+
#[error("error encoding length delimited data")]
368
+
Encoding(#[source] std::io::Error),
369
+
370
+
#[diagnostic(
371
+
code(wire::SIGINT),
372
+
url("{DOCS_URL}#{}", self.code().unwrap())
373
+
)]
374
+
#[error("SIGINT received, shut down")]
375
+
Sigint,
376
+
}
+480
crates/core/src/hive/mod.rs
+480
crates/core/src/hive/mod.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use itertools::Itertools;
5
+
use nix_compat::flakeref::FlakeRef;
6
+
use node::{Name, Node};
7
+
use owo_colors::{OwoColorize, Stream};
8
+
use serde::de::Error;
9
+
use serde::{Deserialize, Deserializer, Serialize};
10
+
use std::collections::HashMap;
11
+
use std::collections::hash_map::OccupiedEntry;
12
+
use std::ffi::OsStr;
13
+
use std::fmt::Display;
14
+
use std::fs;
15
+
use std::path::PathBuf;
16
+
use std::str::FromStr;
17
+
use std::sync::Arc;
18
+
use tracing::{debug, info, instrument};
19
+
20
+
use crate::cache::InspectionCache;
21
+
use crate::commands::builder::CommandStringBuilder;
22
+
use crate::commands::common::evaluate_hive_attribute;
23
+
use crate::commands::{CommandArguments, Either, WireCommandChip, run_command};
24
+
use crate::errors::{HiveInitialisationError, HiveLocationError};
25
+
use crate::{EvalGoal, HiveLibError, SubCommandModifiers};
26
+
pub mod node;
27
+
pub mod steps;
28
+
29
+
#[derive(Serialize, Deserialize, Debug, PartialEq)]
30
+
#[serde(deny_unknown_fields)]
31
+
pub struct Hive {
32
+
pub nodes: HashMap<Name, Node>,
33
+
34
+
#[serde(deserialize_with = "check_schema_version", rename = "_schema")]
35
+
pub schema: u32,
36
+
}
37
+
38
+
pub enum Action<'a> {
39
+
Inspect,
40
+
EvaluateNode(OccupiedEntry<'a, String, Node>),
41
+
}
42
+
43
+
fn check_schema_version<'de, D: Deserializer<'de>>(d: D) -> Result<u32, D::Error> {
44
+
let version = u32::deserialize(d)?;
45
+
if version != Hive::SCHEMA_VERSION {
46
+
return Err(D::Error::custom(
47
+
"Version mismatch for Hive. Please ensure the binary and your wire input match!",
48
+
));
49
+
}
50
+
Ok(version)
51
+
}
52
+
53
+
impl Hive {
54
+
pub const SCHEMA_VERSION: u32 = 1;
55
+
56
+
#[instrument(skip_all, name = "eval_hive")]
57
+
pub async fn new_from_path(
58
+
location: &HiveLocation,
59
+
cache: Option<InspectionCache>,
60
+
modifiers: SubCommandModifiers,
61
+
) -> Result<Hive, HiveLibError> {
62
+
info!("evaluating hive {location:?}");
63
+
64
+
if let Some(ref cache) = cache
65
+
&& let HiveLocation::Flake { prefetch, .. } = location
66
+
&& let Some(hive) = cache.get_hive(prefetch).await
67
+
{
68
+
return Ok(hive);
69
+
}
70
+
71
+
let output = evaluate_hive_attribute(location, &EvalGoal::Inspect, modifiers).await?;
72
+
73
+
let hive: Hive = serde_json::from_str(&output).map_err(|err| {
74
+
HiveLibError::HiveInitialisationError(HiveInitialisationError::ParseEvaluateError(err))
75
+
})?;
76
+
77
+
if let Some(cache) = cache
78
+
&& let HiveLocation::Flake { prefetch, .. } = location
79
+
{
80
+
cache.store_hive(prefetch, &output).await;
81
+
}
82
+
83
+
Ok(hive)
84
+
}
85
+
86
+
/// # Errors
87
+
///
88
+
/// Returns an error if a node in nodes does not exist in the hive.
89
+
pub fn force_always_local(&mut self, nodes: Vec<String>) -> Result<(), HiveLibError> {
90
+
for node in nodes {
91
+
info!("Forcing a local build for {node}");
92
+
93
+
self.nodes
94
+
.get_mut(&Name(Arc::from(node.clone())))
95
+
.ok_or(HiveLibError::HiveInitialisationError(
96
+
HiveInitialisationError::NodeDoesNotExist(node.clone()),
97
+
))?
98
+
.build_remotely = false;
99
+
}
100
+
101
+
Ok(())
102
+
}
103
+
}
104
+
105
+
impl Display for Hive {
106
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
107
+
for (name, node) in &self.nodes {
108
+
writeln!(
109
+
f,
110
+
"Node {} {}:\n",
111
+
name.bold(),
112
+
format!("({})", node.host_platform)
113
+
.italic()
114
+
.if_supports_color(Stream::Stdout, |x| x.dimmed()),
115
+
)?;
116
+
117
+
if !node.tags.is_empty() {
118
+
write!(f, " > {}", "Tags:".bold())?;
119
+
writeln!(f, " {:?}", node.tags)?;
120
+
}
121
+
122
+
write!(f, " > {}", "Connection:".bold())?;
123
+
writeln!(f, " {{{}}}", node.target)?;
124
+
125
+
write!(
126
+
f,
127
+
" > {} {}{}",
128
+
"Build remotely".bold(),
129
+
"`deployment.buildOnTarget`"
130
+
.if_supports_color(Stream::Stdout, |x| x.dimmed())
131
+
.italic(),
132
+
":".bold()
133
+
)?;
134
+
writeln!(f, " {}", node.build_remotely)?;
135
+
136
+
write!(
137
+
f,
138
+
" > {} {}{}",
139
+
"Local apply allowed".bold(),
140
+
"`deployment.allowLocalDeployment`"
141
+
.if_supports_color(Stream::Stdout, |x| x.dimmed())
142
+
.italic(),
143
+
":".bold()
144
+
)?;
145
+
writeln!(f, " {}", node.allow_local_deployment)?;
146
+
147
+
if !node.keys.is_empty() {
148
+
write!(f, " > {}", "Keys:".bold())?;
149
+
writeln!(f, " {} key(s)", node.keys.len())?;
150
+
151
+
for key in &node.keys {
152
+
writeln!(f, " > {key}")?;
153
+
}
154
+
}
155
+
156
+
writeln!(f)?;
157
+
}
158
+
159
+
let total_keys = self
160
+
.nodes
161
+
.values()
162
+
.flat_map(|node| node.keys.iter())
163
+
.collect::<Vec<_>>();
164
+
let distinct_keys = self
165
+
.nodes
166
+
.values()
167
+
.flat_map(|node| node.keys.iter())
168
+
.unique()
169
+
.collect::<Vec<_>>()
170
+
.len();
171
+
172
+
write!(f, "{}", "Summary:".bold())?;
173
+
writeln!(
174
+
f,
175
+
" {} total node(s), totalling {} keys ({distinct_keys} distinct).",
176
+
self.nodes.len(),
177
+
total_keys.len()
178
+
)?;
179
+
writeln!(
180
+
f,
181
+
"{}",
182
+
"Note: Listed connections are tried from Left to Right".italic(),
183
+
)?;
184
+
185
+
Ok(())
186
+
}
187
+
}
188
+
189
+
#[derive(Debug, PartialEq, Eq, Deserialize)]
190
+
pub struct FlakePrefetch {
191
+
pub(crate) hash: String,
192
+
#[serde(rename = "storePath")]
193
+
pub(crate) store_path: String,
194
+
}
195
+
196
+
#[derive(Debug, PartialEq, Eq)]
197
+
pub enum HiveLocation {
198
+
HiveNix(PathBuf),
199
+
Flake {
200
+
uri: String,
201
+
prefetch: FlakePrefetch,
202
+
},
203
+
}
204
+
205
+
impl HiveLocation {
206
+
async fn get_flake(
207
+
uri: String,
208
+
modifiers: SubCommandModifiers,
209
+
) -> Result<HiveLocation, HiveLibError> {
210
+
let mut command_string = CommandStringBuilder::nix();
211
+
command_string.args(&[
212
+
"flake",
213
+
"prefetch",
214
+
"--extra-experimental-features",
215
+
"nix-command",
216
+
"--extra-experimental-features",
217
+
"flakes",
218
+
"--json",
219
+
]);
220
+
command_string.arg(&uri);
221
+
222
+
let command = run_command(
223
+
&CommandArguments::new(command_string, modifiers)
224
+
.mode(crate::commands::ChildOutputMode::Generic),
225
+
)
226
+
.await?;
227
+
228
+
let result = command
229
+
.wait_till_success()
230
+
.await
231
+
.map_err(HiveLibError::CommandError)?;
232
+
233
+
debug!(hash_json = ?result);
234
+
235
+
let prefetch = serde_json::from_str(&match result {
236
+
Either::Left((.., output)) | Either::Right((.., output)) => output,
237
+
})
238
+
.map_err(|x| {
239
+
HiveLibError::HiveInitialisationError(HiveInitialisationError::ParsePrefetchError(x))
240
+
})?;
241
+
242
+
debug!(prefetch = ?prefetch);
243
+
244
+
Ok(HiveLocation::Flake { uri, prefetch })
245
+
}
246
+
}
247
+
248
+
pub async fn get_hive_location(
249
+
path: String,
250
+
modifiers: SubCommandModifiers,
251
+
) -> Result<HiveLocation, HiveLibError> {
252
+
let flakeref = FlakeRef::from_str(&path);
253
+
254
+
let path_to_location = async |path: PathBuf| {
255
+
Ok(match path.file_name().and_then(OsStr::to_str) {
256
+
Some("hive.nix") => HiveLocation::HiveNix(path.clone()),
257
+
Some(_) => {
258
+
if fs::metadata(path.join("flake.nix")).is_ok() {
259
+
HiveLocation::get_flake(path.display().to_string(), modifiers).await?
260
+
} else {
261
+
HiveLocation::HiveNix(path.join("hive.nix"))
262
+
}
263
+
}
264
+
None => {
265
+
return Err(HiveLibError::HiveLocationError(
266
+
HiveLocationError::MalformedPath(path.clone()),
267
+
));
268
+
}
269
+
})
270
+
};
271
+
272
+
match flakeref {
273
+
Err(nix_compat::flakeref::FlakeRefError::UrlParseError(_err)) => {
274
+
let path = PathBuf::from(path);
275
+
Ok(path_to_location(path).await?)
276
+
}
277
+
Ok(FlakeRef::Path { path, .. }) => Ok(path_to_location(path).await?),
278
+
Ok(
279
+
FlakeRef::Git { .. }
280
+
| FlakeRef::GitHub { .. }
281
+
| FlakeRef::GitLab { .. }
282
+
| FlakeRef::Tarball { .. }
283
+
| FlakeRef::Mercurial { .. }
284
+
| FlakeRef::SourceHut { .. },
285
+
) => Ok(HiveLocation::get_flake(path, modifiers).await?),
286
+
Err(err) => Err(HiveLibError::HiveLocationError(
287
+
HiveLocationError::Malformed(err),
288
+
)),
289
+
Ok(flakeref) => Err(HiveLibError::HiveLocationError(
290
+
HiveLocationError::TypeUnsupported(Box::new(flakeref)),
291
+
)),
292
+
}
293
+
}
294
+
295
+
#[cfg(test)]
296
+
mod tests {
297
+
use im::vector;
298
+
299
+
use crate::{
300
+
errors::CommandError,
301
+
get_test_path,
302
+
hive::steps::keys::{Key, Source, UploadKeyAt},
303
+
location,
304
+
test_support::make_flake_sandbox,
305
+
};
306
+
307
+
use super::*;
308
+
use std::{assert_matches::assert_matches, env};
309
+
310
+
// flake should always come before hive.nix
311
+
#[tokio::test]
312
+
async fn test_hive_dot_nix_priority() {
313
+
let location = location!(get_test_path!());
314
+
315
+
assert_matches!(location, HiveLocation::Flake { .. });
316
+
}
317
+
318
+
#[tokio::test]
319
+
#[cfg_attr(feature = "no_web_tests", ignore)]
320
+
async fn test_hive_file() {
321
+
let location = location!(get_test_path!());
322
+
323
+
let hive = Hive::new_from_path(&location, None, SubCommandModifiers::default())
324
+
.await
325
+
.unwrap();
326
+
327
+
let node = Node {
328
+
target: node::Target::from_host("192.168.122.96"),
329
+
..Default::default()
330
+
};
331
+
332
+
let mut nodes = HashMap::new();
333
+
nodes.insert(Name("node-a".into()), node);
334
+
335
+
assert_eq!(
336
+
hive,
337
+
Hive {
338
+
nodes,
339
+
schema: Hive::SCHEMA_VERSION
340
+
}
341
+
);
342
+
}
343
+
344
+
#[tokio::test]
345
+
#[cfg_attr(feature = "no_web_tests", ignore)]
346
+
async fn non_trivial_hive() {
347
+
let location = location!(get_test_path!());
348
+
349
+
let hive = Hive::new_from_path(&location, None, SubCommandModifiers::default())
350
+
.await
351
+
.unwrap();
352
+
353
+
let node = Node {
354
+
target: node::Target::from_host("name"),
355
+
keys: vector![Key {
356
+
name: "different-than-a".into(),
357
+
dest_dir: "/run/keys/".into(),
358
+
path: "/run/keys/different-than-a".into(),
359
+
group: "root".into(),
360
+
user: "root".into(),
361
+
permissions: "0600".into(),
362
+
source: Source::String("hi".into()),
363
+
upload_at: UploadKeyAt::PreActivation,
364
+
environment: im::HashMap::new()
365
+
}],
366
+
build_remotely: true,
367
+
..Default::default()
368
+
};
369
+
370
+
let mut nodes = HashMap::new();
371
+
nodes.insert(Name("node-a".into()), node);
372
+
373
+
assert_eq!(
374
+
hive,
375
+
Hive {
376
+
nodes,
377
+
schema: Hive::SCHEMA_VERSION
378
+
}
379
+
);
380
+
}
381
+
382
+
#[tokio::test]
383
+
#[cfg_attr(feature = "no_web_tests", ignore)]
384
+
async fn flake_hive() {
385
+
let tmp_dir = make_flake_sandbox(&get_test_path!()).unwrap();
386
+
387
+
let location = get_hive_location(
388
+
tmp_dir.path().display().to_string(),
389
+
SubCommandModifiers::default(),
390
+
)
391
+
.await
392
+
.unwrap();
393
+
let hive = Hive::new_from_path(&location, None, SubCommandModifiers::default())
394
+
.await
395
+
.unwrap();
396
+
397
+
let mut nodes = HashMap::new();
398
+
399
+
// a merged node
400
+
nodes.insert(Name("node-a".into()), Node::from_host("node-a"));
401
+
// a non-merged node
402
+
nodes.insert(Name("node-b".into()), Node::from_host("node-b"));
403
+
404
+
assert_eq!(
405
+
hive,
406
+
Hive {
407
+
nodes,
408
+
schema: Hive::SCHEMA_VERSION
409
+
}
410
+
);
411
+
412
+
tmp_dir.close().unwrap();
413
+
}
414
+
415
+
#[tokio::test]
416
+
async fn no_nixpkgs() {
417
+
let location = location!(get_test_path!());
418
+
419
+
assert_matches!(
420
+
Hive::new_from_path(&location, None, SubCommandModifiers::default()).await,
421
+
Err(HiveLibError::NixEvalError {
422
+
source: CommandError::CommandFailed {
423
+
logs,
424
+
..
425
+
},
426
+
..
427
+
})
428
+
if logs.contains("makeHive called without meta.nixpkgs specified")
429
+
);
430
+
}
431
+
432
+
#[tokio::test]
433
+
async fn _keys_should_fail() {
434
+
let location = location!(get_test_path!());
435
+
436
+
assert_matches!(
437
+
Hive::new_from_path(&location, None, SubCommandModifiers::default()).await,
438
+
Err(HiveLibError::NixEvalError {
439
+
source: CommandError::CommandFailed {
440
+
logs,
441
+
..
442
+
},
443
+
..
444
+
})
445
+
if logs.contains("The option `deployment._keys' is read-only, but it's set multiple times.")
446
+
);
447
+
}
448
+
449
+
#[tokio::test]
450
+
async fn test_force_always_local() {
451
+
let mut location: PathBuf = env::var("WIRE_TEST_DIR").unwrap().into();
452
+
location.push("non_trivial_hive");
453
+
let location = location!(location);
454
+
455
+
let mut hive = Hive::new_from_path(&location, None, SubCommandModifiers::default())
456
+
.await
457
+
.unwrap();
458
+
459
+
assert_matches!(
460
+
hive.force_always_local(vec!["non-existent".to_string()]),
461
+
Err(HiveLibError::HiveInitialisationError(
462
+
HiveInitialisationError::NodeDoesNotExist(node)
463
+
)) if node == "non-existent"
464
+
);
465
+
466
+
for node in hive.nodes.values() {
467
+
assert!(node.build_remotely);
468
+
}
469
+
470
+
assert_matches!(hive.force_always_local(vec!["node-a".to_string()]), Ok(()));
471
+
472
+
assert!(
473
+
!hive
474
+
.nodes
475
+
.get(&Name("node-a".into()))
476
+
.unwrap()
477
+
.build_remotely
478
+
);
479
+
}
480
+
}
+939
crates/core/src/hive/node.rs
+939
crates/core/src/hive/node.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
#![allow(clippy::missing_errors_doc)]
5
+
use enum_dispatch::enum_dispatch;
6
+
use gethostname::gethostname;
7
+
use serde::{Deserialize, Serialize};
8
+
use std::assert_matches::debug_assert_matches;
9
+
use std::fmt::Display;
10
+
use std::sync::Arc;
11
+
use std::sync::atomic::AtomicBool;
12
+
use tokio::sync::oneshot;
13
+
use tracing::{Instrument, Level, Span, debug, error, event, instrument, trace};
14
+
15
+
use crate::commands::builder::CommandStringBuilder;
16
+
use crate::commands::common::evaluate_hive_attribute;
17
+
use crate::commands::{CommandArguments, WireCommandChip, run_command};
18
+
use crate::errors::NetworkError;
19
+
use crate::hive::HiveLocation;
20
+
use crate::hive::steps::build::Build;
21
+
use crate::hive::steps::cleanup::CleanUp;
22
+
use crate::hive::steps::evaluate::Evaluate;
23
+
use crate::hive::steps::keys::{Key, Keys, PushKeyAgent, UploadKeyAt};
24
+
use crate::hive::steps::ping::Ping;
25
+
use crate::hive::steps::push::{PushBuildOutput, PushEvaluatedOutput};
26
+
use crate::status::STATUS;
27
+
use crate::{EvalGoal, StrictHostKeyChecking, SubCommandModifiers};
28
+
29
+
use super::HiveLibError;
30
+
use super::steps::activate::SwitchToConfiguration;
31
+
32
+
#[derive(
33
+
Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq, PartialOrd, Ord, derive_more::Display,
34
+
)]
35
+
pub struct Name(pub Arc<str>);
36
+
37
+
#[derive(Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq)]
38
+
pub struct Target {
39
+
pub hosts: Vec<Arc<str>>,
40
+
pub user: Arc<str>,
41
+
pub port: u32,
42
+
43
+
#[serde(skip)]
44
+
current_host: usize,
45
+
}
46
+
47
+
impl Target {
48
+
#[instrument(ret(level = tracing::Level::DEBUG), skip_all)]
49
+
pub fn create_ssh_opts(
50
+
&self,
51
+
modifiers: SubCommandModifiers,
52
+
master: bool,
53
+
) -> Result<String, HiveLibError> {
54
+
self.create_ssh_args(modifiers, false, master)
55
+
.map(|x| x.join(" "))
56
+
}
57
+
58
+
#[instrument(ret(level = tracing::Level::DEBUG))]
59
+
pub fn create_ssh_args(
60
+
&self,
61
+
modifiers: SubCommandModifiers,
62
+
non_interactive_forced: bool,
63
+
master: bool,
64
+
) -> Result<Vec<String>, HiveLibError> {
65
+
let mut vector = vec![
66
+
"-l".to_string(),
67
+
self.user.to_string(),
68
+
"-p".to_string(),
69
+
self.port.to_string(),
70
+
];
71
+
let mut options = vec![
72
+
format!(
73
+
"StrictHostKeyChecking={}",
74
+
match modifiers.ssh_accept_host {
75
+
StrictHostKeyChecking::AcceptNew => "accept-new",
76
+
StrictHostKeyChecking::No => "no",
77
+
}
78
+
)
79
+
.to_string(),
80
+
];
81
+
82
+
options.extend(["PasswordAuthentication=no".to_string()]);
83
+
options.extend(["KbdInteractiveAuthentication=no".to_string()]);
84
+
85
+
vector.push("-o".to_string());
86
+
vector.extend(options.into_iter().intersperse("-o".to_string()));
87
+
88
+
Ok(vector)
89
+
}
90
+
}
91
+
92
+
#[cfg(test)]
93
+
impl Default for Target {
94
+
fn default() -> Self {
95
+
Target {
96
+
hosts: vec!["NAME".into()],
97
+
user: "root".into(),
98
+
port: 22,
99
+
current_host: 0,
100
+
}
101
+
}
102
+
}
103
+
104
+
#[cfg(test)]
105
+
impl<'a> Context<'a> {
106
+
fn create_test_context(
107
+
hive_location: HiveLocation,
108
+
name: &'a Name,
109
+
node: &'a mut Node,
110
+
) -> Self {
111
+
Context {
112
+
name,
113
+
node,
114
+
hive_location: Arc::new(hive_location),
115
+
modifiers: SubCommandModifiers::default(),
116
+
objective: Objective::Apply(ApplyObjective {
117
+
goal: Goal::SwitchToConfiguration(SwitchToConfigurationGoal::Switch),
118
+
no_keys: false,
119
+
reboot: false,
120
+
should_apply_locally: false,
121
+
substitute_on_destination: false,
122
+
handle_unreachable: HandleUnreachable::default(),
123
+
}),
124
+
state: StepState::default(),
125
+
should_quit: Arc::new(AtomicBool::new(false)),
126
+
}
127
+
}
128
+
}
129
+
130
+
impl Target {
131
+
pub fn get_preferred_host(&self) -> Result<&Arc<str>, HiveLibError> {
132
+
self.hosts
133
+
.get(self.current_host)
134
+
.ok_or(HiveLibError::NetworkError(NetworkError::HostsExhausted))
135
+
}
136
+
137
+
pub const fn host_failed(&mut self) {
138
+
self.current_host += 1;
139
+
}
140
+
141
+
#[cfg(test)]
142
+
#[must_use]
143
+
pub fn from_host(host: &str) -> Self {
144
+
Target {
145
+
hosts: vec![host.into()],
146
+
..Default::default()
147
+
}
148
+
}
149
+
}
150
+
151
+
impl Display for Target {
152
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
153
+
let hosts = itertools::Itertools::join(
154
+
&mut self
155
+
.hosts
156
+
.iter()
157
+
.map(|host| format!("{}@{host}:{}", self.user, self.port)),
158
+
", ",
159
+
);
160
+
161
+
write!(f, "{hosts}")
162
+
}
163
+
}
164
+
165
+
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
166
+
pub struct Node {
167
+
#[serde(rename = "target")]
168
+
pub target: Target,
169
+
170
+
#[serde(rename = "buildOnTarget")]
171
+
pub build_remotely: bool,
172
+
173
+
#[serde(rename = "allowLocalDeployment")]
174
+
pub allow_local_deployment: bool,
175
+
176
+
#[serde(default)]
177
+
pub tags: im::HashSet<String>,
178
+
179
+
#[serde(rename(deserialize = "_keys", serialize = "keys"))]
180
+
pub keys: im::Vector<Key>,
181
+
182
+
#[serde(rename(deserialize = "_hostPlatform", serialize = "host_platform"))]
183
+
pub host_platform: Arc<str>,
184
+
185
+
#[serde(rename(
186
+
deserialize = "privilegeEscalationCommand",
187
+
serialize = "privilege_escalation_command"
188
+
))]
189
+
pub privilege_escalation_command: im::Vector<Arc<str>>,
190
+
}
191
+
192
+
#[cfg(test)]
193
+
impl Default for Node {
194
+
fn default() -> Self {
195
+
Node {
196
+
target: Target::default(),
197
+
keys: im::Vector::new(),
198
+
tags: im::HashSet::new(),
199
+
privilege_escalation_command: vec!["sudo".into(), "--".into()].into(),
200
+
allow_local_deployment: true,
201
+
build_remotely: false,
202
+
host_platform: "x86_64-linux".into(),
203
+
}
204
+
}
205
+
}
206
+
207
+
impl Node {
208
+
#[cfg(test)]
209
+
#[must_use]
210
+
pub fn from_host(host: &str) -> Self {
211
+
Node {
212
+
target: Target::from_host(host),
213
+
..Default::default()
214
+
}
215
+
}
216
+
217
+
/// Tests the connection to a node
218
+
pub async fn ping(&self, modifiers: SubCommandModifiers) -> Result<(), HiveLibError> {
219
+
let host = self.target.get_preferred_host()?;
220
+
221
+
let mut command_string = CommandStringBuilder::new("ssh");
222
+
command_string.arg(format!("{}@{host}", self.target.user));
223
+
command_string.arg(self.target.create_ssh_opts(modifiers, true)?);
224
+
command_string.arg("exit");
225
+
226
+
let output = run_command(
227
+
&CommandArguments::new(command_string, modifiers)
228
+
.log_stdout()
229
+
.mode(crate::commands::ChildOutputMode::Interactive),
230
+
)
231
+
.await?;
232
+
233
+
output.wait_till_success().await.map_err(|source| {
234
+
HiveLibError::NetworkError(NetworkError::HostUnreachable {
235
+
host: host.to_string(),
236
+
source,
237
+
})
238
+
})?;
239
+
240
+
Ok(())
241
+
}
242
+
}
243
+
244
+
#[must_use]
245
+
pub fn should_apply_locally(allow_local_deployment: bool, name: &str) -> bool {
246
+
*name == *gethostname() && allow_local_deployment
247
+
}
248
+
249
+
#[derive(derive_more::Display)]
250
+
pub enum Push<'a> {
251
+
Derivation(&'a Derivation),
252
+
Path(&'a String),
253
+
}
254
+
255
+
#[derive(Deserialize, Clone, Debug)]
256
+
pub struct Derivation(String);
257
+
258
+
impl Display for Derivation {
259
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
260
+
self.0.fmt(f).and_then(|()| write!(f, "^*"))
261
+
}
262
+
}
263
+
264
+
#[derive(derive_more::Display, Debug, Clone, Copy)]
265
+
pub enum SwitchToConfigurationGoal {
266
+
Switch,
267
+
Boot,
268
+
Test,
269
+
DryActivate,
270
+
}
271
+
272
+
#[derive(derive_more::Display, Clone, Copy)]
273
+
pub enum Goal {
274
+
SwitchToConfiguration(SwitchToConfigurationGoal),
275
+
Build,
276
+
Push,
277
+
Keys,
278
+
}
279
+
280
+
// TODO: Get rid of this allow and resolve it
281
+
#[allow(clippy::struct_excessive_bools)]
282
+
#[derive(Clone, Copy)]
283
+
pub struct ApplyObjective {
284
+
pub goal: Goal,
285
+
pub no_keys: bool,
286
+
pub reboot: bool,
287
+
pub should_apply_locally: bool,
288
+
pub substitute_on_destination: bool,
289
+
pub handle_unreachable: HandleUnreachable,
290
+
}
291
+
292
+
#[derive(Clone, Copy)]
293
+
pub enum Objective {
294
+
Apply(ApplyObjective),
295
+
BuildLocally,
296
+
}
297
+
298
+
#[enum_dispatch]
299
+
pub(crate) trait ExecuteStep: Send + Sync + Display + std::fmt::Debug {
300
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError>;
301
+
302
+
fn should_execute(&self, context: &Context) -> bool;
303
+
}
304
+
305
+
// may include other options such as FailAll in the future
306
+
#[non_exhaustive]
307
+
#[derive(Clone, Copy, Default)]
308
+
pub enum HandleUnreachable {
309
+
Ignore,
310
+
#[default]
311
+
FailNode,
312
+
}
313
+
314
+
#[derive(Default)]
315
+
pub struct StepState {
316
+
pub evaluation: Option<Derivation>,
317
+
pub evaluation_rx: Option<oneshot::Receiver<Result<Derivation, HiveLibError>>>,
318
+
pub build: Option<String>,
319
+
pub key_agent_directory: Option<String>,
320
+
}
321
+
322
+
pub struct Context<'a> {
323
+
pub name: &'a Name,
324
+
pub node: &'a mut Node,
325
+
pub hive_location: Arc<HiveLocation>,
326
+
pub modifiers: SubCommandModifiers,
327
+
pub state: StepState,
328
+
pub should_quit: Arc<AtomicBool>,
329
+
pub objective: Objective,
330
+
}
331
+
332
+
#[enum_dispatch(ExecuteStep)]
333
+
#[derive(Debug, PartialEq)]
334
+
enum Step {
335
+
Ping,
336
+
PushKeyAgent,
337
+
Keys,
338
+
Evaluate,
339
+
PushEvaluatedOutput,
340
+
Build,
341
+
PushBuildOutput,
342
+
SwitchToConfiguration,
343
+
CleanUp,
344
+
}
345
+
346
+
impl Display for Step {
347
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
348
+
match self {
349
+
Self::Ping(step) => step.fmt(f),
350
+
Self::PushKeyAgent(step) => step.fmt(f),
351
+
Self::Keys(step) => step.fmt(f),
352
+
Self::Evaluate(step) => step.fmt(f),
353
+
Self::PushEvaluatedOutput(step) => step.fmt(f),
354
+
Self::Build(step) => step.fmt(f),
355
+
Self::PushBuildOutput(step) => step.fmt(f),
356
+
Self::SwitchToConfiguration(step) => step.fmt(f),
357
+
Self::CleanUp(step) => step.fmt(f),
358
+
}
359
+
}
360
+
}
361
+
362
+
pub struct GoalExecutor<'a> {
363
+
steps: Vec<Step>,
364
+
context: Context<'a>,
365
+
}
366
+
367
+
/// returns Err if the application should shut down.
368
+
fn app_shutdown_guard(context: &Context) -> Result<(), HiveLibError> {
369
+
if context
370
+
.should_quit
371
+
.load(std::sync::atomic::Ordering::Relaxed)
372
+
{
373
+
return Err(HiveLibError::Sigint);
374
+
}
375
+
376
+
Ok(())
377
+
}
378
+
379
+
impl<'a> GoalExecutor<'a> {
380
+
#[must_use]
381
+
pub fn new(context: Context<'a>) -> Self {
382
+
Self {
383
+
steps: vec![
384
+
Step::Ping(Ping),
385
+
Step::PushKeyAgent(PushKeyAgent),
386
+
Step::Keys(Keys {
387
+
filter: UploadKeyAt::NoFilter,
388
+
}),
389
+
Step::Keys(Keys {
390
+
filter: UploadKeyAt::PreActivation,
391
+
}),
392
+
Step::Evaluate(super::steps::evaluate::Evaluate),
393
+
Step::PushEvaluatedOutput(super::steps::push::PushEvaluatedOutput),
394
+
Step::Build(super::steps::build::Build),
395
+
Step::PushBuildOutput(super::steps::push::PushBuildOutput),
396
+
Step::SwitchToConfiguration(SwitchToConfiguration),
397
+
Step::Keys(Keys {
398
+
filter: UploadKeyAt::PostActivation,
399
+
}),
400
+
],
401
+
context,
402
+
}
403
+
}
404
+
405
+
#[instrument(skip_all, name = "eval")]
406
+
async fn evaluate_task(
407
+
tx: oneshot::Sender<Result<Derivation, HiveLibError>>,
408
+
hive_location: Arc<HiveLocation>,
409
+
name: Name,
410
+
modifiers: SubCommandModifiers,
411
+
) {
412
+
let output =
413
+
evaluate_hive_attribute(&hive_location, &EvalGoal::GetTopLevel(&name), modifiers)
414
+
.await
415
+
.map(|output| {
416
+
serde_json::from_str::<Derivation>(&output).expect("failed to parse derivation")
417
+
});
418
+
419
+
debug!(output = ?output, done = true);
420
+
421
+
let _ = tx.send(output);
422
+
}
423
+
424
+
#[instrument(skip_all, fields(node = %self.context.name))]
425
+
pub async fn execute(mut self) -> Result<(), HiveLibError> {
426
+
app_shutdown_guard(&self.context)?;
427
+
428
+
let (tx, rx) = oneshot::channel();
429
+
self.context.state.evaluation_rx = Some(rx);
430
+
431
+
// The name of this span should never be changed without updating
432
+
// `wire/cli/tracing_setup.rs`
433
+
debug_assert_matches!(Span::current().metadata().unwrap().name(), "execute");
434
+
// This span should always have a `node` field by the same file
435
+
debug_assert!(
436
+
Span::current()
437
+
.metadata()
438
+
.unwrap()
439
+
.fields()
440
+
.field("node")
441
+
.is_some()
442
+
);
443
+
444
+
let spawn_evaluator = match self.context.objective {
445
+
Objective::Apply(apply_objective) => !matches!(apply_objective.goal, Goal::Keys),
446
+
Objective::BuildLocally => true,
447
+
};
448
+
449
+
if spawn_evaluator {
450
+
tokio::spawn(
451
+
GoalExecutor::evaluate_task(
452
+
tx,
453
+
self.context.hive_location.clone(),
454
+
self.context.name.clone(),
455
+
self.context.modifiers,
456
+
)
457
+
.in_current_span(),
458
+
);
459
+
}
460
+
461
+
let steps = self
462
+
.steps
463
+
.iter()
464
+
.filter(|step| step.should_execute(&self.context))
465
+
.inspect(|step| {
466
+
trace!("Will execute step `{step}` for {}", self.context.name);
467
+
})
468
+
.collect::<Vec<_>>();
469
+
let length = steps.len();
470
+
471
+
for (position, step) in steps.iter().enumerate() {
472
+
app_shutdown_guard(&self.context)?;
473
+
474
+
event!(
475
+
Level::INFO,
476
+
step = step.to_string(),
477
+
progress = format!("{}/{length}", position + 1)
478
+
);
479
+
480
+
STATUS
481
+
.lock()
482
+
.set_node_step(self.context.name, step.to_string());
483
+
484
+
if let Err(err) = step.execute(&mut self.context).await.inspect_err(|_| {
485
+
error!("Failed to execute `{step}`");
486
+
}) {
487
+
// discard error from cleanup
488
+
let _ = CleanUp.execute(&mut self.context).await;
489
+
490
+
if let Objective::Apply(apply_objective) = self.context.objective
491
+
&& matches!(step, Step::Ping(..))
492
+
&& matches!(
493
+
apply_objective.handle_unreachable,
494
+
HandleUnreachable::Ignore,
495
+
)
496
+
{
497
+
return Ok(());
498
+
}
499
+
500
+
STATUS.lock().mark_node_failed(self.context.name);
501
+
502
+
return Err(err);
503
+
}
504
+
}
505
+
506
+
STATUS.lock().mark_node_succeeded(self.context.name);
507
+
508
+
Ok(())
509
+
}
510
+
}
511
+
512
+
#[cfg(test)]
513
+
mod tests {
514
+
use rand::distr::Alphabetic;
515
+
516
+
use super::*;
517
+
use crate::{
518
+
function_name, get_test_path,
519
+
hive::{Hive, get_hive_location},
520
+
location,
521
+
};
522
+
use std::{assert_matches::assert_matches, path::PathBuf};
523
+
use std::{collections::HashMap, env};
524
+
525
+
fn get_steps(goal_executor: GoalExecutor) -> std::vec::Vec<Step> {
526
+
goal_executor
527
+
.steps
528
+
.into_iter()
529
+
.filter(|step| step.should_execute(&goal_executor.context))
530
+
.collect::<Vec<_>>()
531
+
}
532
+
533
+
#[tokio::test]
534
+
#[cfg_attr(feature = "no_web_tests", ignore)]
535
+
async fn default_values_match() {
536
+
let mut path = get_test_path!();
537
+
538
+
let location =
539
+
get_hive_location(path.display().to_string(), SubCommandModifiers::default())
540
+
.await
541
+
.unwrap();
542
+
let hive = Hive::new_from_path(&location, None, SubCommandModifiers::default())
543
+
.await
544
+
.unwrap();
545
+
546
+
let node = Node::default();
547
+
548
+
let mut nodes = HashMap::new();
549
+
nodes.insert(Name("NAME".into()), node);
550
+
551
+
path.push("hive.nix");
552
+
553
+
assert_eq!(
554
+
hive,
555
+
Hive {
556
+
nodes,
557
+
schema: Hive::SCHEMA_VERSION
558
+
}
559
+
);
560
+
}
561
+
562
+
#[tokio::test]
563
+
async fn order_build_locally() {
564
+
let location = location!(get_test_path!());
565
+
let mut node = Node {
566
+
build_remotely: false,
567
+
..Default::default()
568
+
};
569
+
let name = &Name(function_name!().into());
570
+
let executor = GoalExecutor::new(Context::create_test_context(location, name, &mut node));
571
+
let steps = get_steps(executor);
572
+
573
+
assert_eq!(
574
+
steps,
575
+
vec![
576
+
Ping.into(),
577
+
PushKeyAgent.into(),
578
+
Keys {
579
+
filter: UploadKeyAt::PreActivation
580
+
}
581
+
.into(),
582
+
crate::hive::steps::evaluate::Evaluate.into(),
583
+
crate::hive::steps::build::Build.into(),
584
+
crate::hive::steps::push::PushBuildOutput.into(),
585
+
SwitchToConfiguration.into(),
586
+
Keys {
587
+
filter: UploadKeyAt::PostActivation
588
+
}
589
+
.into(),
590
+
]
591
+
);
592
+
}
593
+
594
+
#[tokio::test]
595
+
async fn order_keys_only() {
596
+
let location = location!(get_test_path!());
597
+
let mut node = Node::default();
598
+
let name = &Name(function_name!().into());
599
+
let mut context = Context::create_test_context(location, name, &mut node);
600
+
601
+
let Objective::Apply(ref mut apply_objective) = context.objective else {
602
+
unreachable!()
603
+
};
604
+
605
+
apply_objective.goal = Goal::Keys;
606
+
607
+
let executor = GoalExecutor::new(context);
608
+
let steps = get_steps(executor);
609
+
610
+
assert_eq!(
611
+
steps,
612
+
vec![
613
+
Ping.into(),
614
+
PushKeyAgent.into(),
615
+
Keys {
616
+
filter: UploadKeyAt::NoFilter
617
+
}
618
+
.into(),
619
+
]
620
+
);
621
+
}
622
+
623
+
#[tokio::test]
624
+
async fn order_build() {
625
+
let location = location!(get_test_path!());
626
+
let mut node = Node::default();
627
+
let name = &Name(function_name!().into());
628
+
let mut context = Context::create_test_context(location, name, &mut node);
629
+
630
+
let Objective::Apply(ref mut apply_objective) = context.objective else {
631
+
unreachable!()
632
+
};
633
+
apply_objective.goal = Goal::Build;
634
+
635
+
let executor = GoalExecutor::new(context);
636
+
let steps = get_steps(executor);
637
+
638
+
assert_eq!(
639
+
steps,
640
+
vec![
641
+
Ping.into(),
642
+
crate::hive::steps::evaluate::Evaluate.into(),
643
+
crate::hive::steps::build::Build.into(),
644
+
crate::hive::steps::push::PushBuildOutput.into(),
645
+
]
646
+
);
647
+
}
648
+
649
+
#[tokio::test]
650
+
async fn order_push_only() {
651
+
let location = location!(get_test_path!());
652
+
let mut node = Node::default();
653
+
let name = &Name(function_name!().into());
654
+
let mut context = Context::create_test_context(location, name, &mut node);
655
+
656
+
let Objective::Apply(ref mut apply_objective) = context.objective else {
657
+
unreachable!()
658
+
};
659
+
apply_objective.goal = Goal::Push;
660
+
661
+
let executor = GoalExecutor::new(context);
662
+
let steps = get_steps(executor);
663
+
664
+
assert_eq!(
665
+
steps,
666
+
vec![
667
+
Ping.into(),
668
+
crate::hive::steps::evaluate::Evaluate.into(),
669
+
crate::hive::steps::push::PushEvaluatedOutput.into(),
670
+
]
671
+
);
672
+
}
673
+
674
+
#[tokio::test]
675
+
async fn order_remote_build() {
676
+
let location = location!(get_test_path!());
677
+
let mut node = Node {
678
+
build_remotely: true,
679
+
..Default::default()
680
+
};
681
+
682
+
let name = &Name(function_name!().into());
683
+
let executor = GoalExecutor::new(Context::create_test_context(location, name, &mut node));
684
+
let steps = get_steps(executor);
685
+
686
+
assert_eq!(
687
+
steps,
688
+
vec![
689
+
Ping.into(),
690
+
PushKeyAgent.into(),
691
+
Keys {
692
+
filter: UploadKeyAt::PreActivation
693
+
}
694
+
.into(),
695
+
crate::hive::steps::evaluate::Evaluate.into(),
696
+
crate::hive::steps::push::PushEvaluatedOutput.into(),
697
+
crate::hive::steps::build::Build.into(),
698
+
SwitchToConfiguration.into(),
699
+
Keys {
700
+
filter: UploadKeyAt::PostActivation
701
+
}
702
+
.into(),
703
+
]
704
+
);
705
+
}
706
+
707
+
#[tokio::test]
708
+
async fn order_nokeys() {
709
+
let location = location!(get_test_path!());
710
+
let mut node = Node::default();
711
+
712
+
let name = &Name(function_name!().into());
713
+
let mut context = Context::create_test_context(location, name, &mut node);
714
+
715
+
let Objective::Apply(ref mut apply_objective) = context.objective else {
716
+
unreachable!()
717
+
};
718
+
apply_objective.no_keys = true;
719
+
720
+
let executor = GoalExecutor::new(context);
721
+
let steps = get_steps(executor);
722
+
723
+
assert_eq!(
724
+
steps,
725
+
vec![
726
+
Ping.into(),
727
+
crate::hive::steps::evaluate::Evaluate.into(),
728
+
crate::hive::steps::build::Build.into(),
729
+
crate::hive::steps::push::PushBuildOutput.into(),
730
+
SwitchToConfiguration.into(),
731
+
]
732
+
);
733
+
}
734
+
735
+
#[tokio::test]
736
+
async fn order_should_apply_locally() {
737
+
let location = location!(get_test_path!());
738
+
let mut node = Node::default();
739
+
740
+
let name = &Name(function_name!().into());
741
+
let mut context = Context::create_test_context(location, name, &mut node);
742
+
743
+
let Objective::Apply(ref mut apply_objective) = context.objective else {
744
+
unreachable!()
745
+
};
746
+
apply_objective.no_keys = true;
747
+
apply_objective.should_apply_locally = true;
748
+
749
+
let executor = GoalExecutor::new(context);
750
+
let steps = get_steps(executor);
751
+
752
+
assert_eq!(
753
+
steps,
754
+
vec![
755
+
crate::hive::steps::evaluate::Evaluate.into(),
756
+
crate::hive::steps::build::Build.into(),
757
+
SwitchToConfiguration.into(),
758
+
]
759
+
);
760
+
}
761
+
762
+
#[tokio::test]
763
+
async fn order_build_only() {
764
+
let location = location!(get_test_path!());
765
+
let mut node = Node::default();
766
+
767
+
let name = &Name(function_name!().into());
768
+
let mut context = Context::create_test_context(location, name, &mut node);
769
+
770
+
context.objective = Objective::BuildLocally;
771
+
772
+
let executor = GoalExecutor::new(context);
773
+
let steps = get_steps(executor);
774
+
775
+
assert_eq!(
776
+
steps,
777
+
vec![
778
+
crate::hive::steps::evaluate::Evaluate.into(),
779
+
crate::hive::steps::build::Build.into()
780
+
]
781
+
);
782
+
}
783
+
784
+
#[test]
785
+
fn target_fails_increments() {
786
+
let mut target = Target::from_host("localhost");
787
+
788
+
assert_eq!(target.current_host, 0);
789
+
790
+
for i in 0..100 {
791
+
target.host_failed();
792
+
assert_eq!(target.current_host, i + 1);
793
+
}
794
+
}
795
+
796
+
#[test]
797
+
fn get_preferred_host_fails() {
798
+
let mut target = Target {
799
+
hosts: vec![
800
+
"un.reachable.1".into(),
801
+
"un.reachable.2".into(),
802
+
"un.reachable.3".into(),
803
+
"un.reachable.4".into(),
804
+
"un.reachable.5".into(),
805
+
],
806
+
..Default::default()
807
+
};
808
+
809
+
assert_ne!(
810
+
target.get_preferred_host().unwrap().to_string(),
811
+
"un.reachable.5"
812
+
);
813
+
814
+
for i in 1..=5 {
815
+
assert_eq!(
816
+
target.get_preferred_host().unwrap().to_string(),
817
+
format!("un.reachable.{i}")
818
+
);
819
+
target.host_failed();
820
+
}
821
+
822
+
for _ in 0..5 {
823
+
assert_matches!(
824
+
target.get_preferred_host(),
825
+
Err(HiveLibError::NetworkError(NetworkError::HostsExhausted))
826
+
);
827
+
}
828
+
}
829
+
830
+
#[test]
831
+
fn test_ssh_opts() {
832
+
let target = Target::from_host("hello-world");
833
+
let subcommand_modifiers = SubCommandModifiers {
834
+
non_interactive: false,
835
+
..Default::default()
836
+
};
837
+
let tmp = format!(
838
+
"/tmp/{}",
839
+
rand::distr::SampleString::sample_string(&Alphabetic, &mut rand::rng(), 10)
840
+
);
841
+
842
+
std::fs::create_dir(&tmp).unwrap();
843
+
844
+
unsafe { env::set_var("XDG_RUNTIME_DIR", &tmp) }
845
+
846
+
let args = [
847
+
"-l".to_string(),
848
+
target.user.to_string(),
849
+
"-p".to_string(),
850
+
target.port.to_string(),
851
+
"-o".to_string(),
852
+
"StrictHostKeyChecking=accept-new".to_string(),
853
+
"-o".to_string(),
854
+
"PasswordAuthentication=no".to_string(),
855
+
"-o".to_string(),
856
+
"KbdInteractiveAuthentication=no".to_string(),
857
+
];
858
+
859
+
assert_eq!(
860
+
target
861
+
.create_ssh_args(subcommand_modifiers, false, false)
862
+
.unwrap(),
863
+
args
864
+
);
865
+
assert_eq!(
866
+
target.create_ssh_opts(subcommand_modifiers, false).unwrap(),
867
+
args.join(" ")
868
+
);
869
+
870
+
assert_eq!(
871
+
target
872
+
.create_ssh_args(subcommand_modifiers, false, true)
873
+
.unwrap(),
874
+
[
875
+
"-l".to_string(),
876
+
target.user.to_string(),
877
+
"-p".to_string(),
878
+
target.port.to_string(),
879
+
"-o".to_string(),
880
+
"StrictHostKeyChecking=accept-new".to_string(),
881
+
"-o".to_string(),
882
+
"PasswordAuthentication=no".to_string(),
883
+
"-o".to_string(),
884
+
"KbdInteractiveAuthentication=no".to_string(),
885
+
]
886
+
);
887
+
888
+
assert_eq!(
889
+
target
890
+
.create_ssh_args(subcommand_modifiers, true, true)
891
+
.unwrap(),
892
+
[
893
+
"-l".to_string(),
894
+
target.user.to_string(),
895
+
"-p".to_string(),
896
+
target.port.to_string(),
897
+
"-o".to_string(),
898
+
"StrictHostKeyChecking=accept-new".to_string(),
899
+
"-o".to_string(),
900
+
"PasswordAuthentication=no".to_string(),
901
+
"-o".to_string(),
902
+
"KbdInteractiveAuthentication=no".to_string(),
903
+
]
904
+
);
905
+
906
+
// forced non interactive is the same as --non-interactive
907
+
assert_eq!(
908
+
target
909
+
.create_ssh_args(subcommand_modifiers, true, false)
910
+
.unwrap(),
911
+
target
912
+
.create_ssh_args(
913
+
SubCommandModifiers {
914
+
non_interactive: true,
915
+
..Default::default()
916
+
},
917
+
false,
918
+
false
919
+
)
920
+
.unwrap()
921
+
);
922
+
}
923
+
924
+
#[tokio::test]
925
+
async fn context_quits_sigint() {
926
+
let location = location!(get_test_path!());
927
+
let mut node = Node::default();
928
+
929
+
let name = &Name(function_name!().into());
930
+
let context = Context::create_test_context(location, name, &mut node);
931
+
context
932
+
.should_quit
933
+
.store(true, std::sync::atomic::Ordering::Relaxed);
934
+
let executor = GoalExecutor::new(context);
935
+
let status = executor.execute().await;
936
+
937
+
assert_matches!(status, Err(HiveLibError::Sigint));
938
+
}
939
+
}
+219
crates/core/src/hive/steps/activate.rs
+219
crates/core/src/hive/steps/activate.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt::Display;
5
+
6
+
use tracing::{error, info, instrument, warn};
7
+
8
+
use crate::{
9
+
HiveLibError,
10
+
commands::{CommandArguments, WireCommandChip, builder::CommandStringBuilder, run_command},
11
+
errors::{ActivationError, NetworkError},
12
+
hive::node::{Context, ExecuteStep, Goal, Objective, SwitchToConfigurationGoal},
13
+
};
14
+
15
+
#[derive(Debug, PartialEq)]
16
+
pub struct SwitchToConfiguration;
17
+
18
+
impl Display for SwitchToConfiguration {
19
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
20
+
write!(f, "switch-to-configuration")
21
+
}
22
+
}
23
+
24
+
async fn wait_for_ping(ctx: &Context<'_>) -> Result<(), HiveLibError> {
25
+
let host = ctx.node.target.get_preferred_host()?;
26
+
let mut result = ctx.node.ping(ctx.modifiers).await;
27
+
28
+
for num in 0..2 {
29
+
warn!("Trying to ping {host} (attempt {}/3)", num + 1);
30
+
31
+
result = ctx.node.ping(ctx.modifiers).await;
32
+
33
+
if result.is_ok() {
34
+
info!("Regained connection to {} via {host}", ctx.name);
35
+
36
+
break;
37
+
}
38
+
}
39
+
40
+
result
41
+
}
42
+
43
+
async fn set_profile(
44
+
goal: SwitchToConfigurationGoal,
45
+
built_path: &String,
46
+
ctx: &Context<'_>,
47
+
) -> Result<(), HiveLibError> {
48
+
info!("Setting profiles in anticipation for switch-to-configuration {goal}");
49
+
50
+
let mut command_string = CommandStringBuilder::new("nix-env");
51
+
command_string.args(&["-p", "/nix/var/nix/profiles/system", "--set"]);
52
+
command_string.arg(built_path);
53
+
54
+
let Objective::Apply(apply_objective) = ctx.objective else {
55
+
unreachable!()
56
+
};
57
+
58
+
let child = run_command(
59
+
&CommandArguments::new(command_string, ctx.modifiers)
60
+
.mode(crate::commands::ChildOutputMode::Nix)
61
+
.execute_on_remote(if apply_objective.should_apply_locally {
62
+
None
63
+
} else {
64
+
Some(&ctx.node.target)
65
+
})
66
+
.elevated(ctx.node),
67
+
)
68
+
.await?;
69
+
70
+
let _ = child
71
+
.wait_till_success()
72
+
.await
73
+
.map_err(HiveLibError::CommandError)?;
74
+
75
+
info!("Set system profile");
76
+
77
+
Ok(())
78
+
}
79
+
80
+
impl ExecuteStep for SwitchToConfiguration {
81
+
fn should_execute(&self, ctx: &Context) -> bool {
82
+
let Objective::Apply(apply_objective) = ctx.objective else {
83
+
return false;
84
+
};
85
+
86
+
matches!(apply_objective.goal, Goal::SwitchToConfiguration(..))
87
+
}
88
+
89
+
#[allow(clippy::too_many_lines)]
90
+
#[instrument(skip_all, name = "activate")]
91
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
92
+
let built_path = ctx.state.build.as_ref().unwrap();
93
+
94
+
let Objective::Apply(apply_objective) = ctx.objective else {
95
+
unreachable!()
96
+
};
97
+
98
+
let Goal::SwitchToConfiguration(goal) = &apply_objective.goal else {
99
+
unreachable!("Cannot reach as guarded by should_execute")
100
+
};
101
+
102
+
if matches!(
103
+
goal,
104
+
// switch profile if switch or boot
105
+
// https://github.com/NixOS/nixpkgs/blob/a2c92aa34735a04010671e3378e2aa2d109b2a72/pkgs/by-name/ni/nixos-rebuild-ng/src/nixos_rebuild/services.py#L224
106
+
SwitchToConfigurationGoal::Switch | SwitchToConfigurationGoal::Boot
107
+
) {
108
+
set_profile(*goal, built_path, ctx).await?;
109
+
}
110
+
111
+
info!("Running switch-to-configuration {goal}");
112
+
113
+
let mut command_string =
114
+
CommandStringBuilder::new(format!("{built_path}/bin/switch-to-configuration"));
115
+
command_string.arg(match goal {
116
+
SwitchToConfigurationGoal::Switch => "switch",
117
+
SwitchToConfigurationGoal::Boot => "boot",
118
+
SwitchToConfigurationGoal::Test => "test",
119
+
SwitchToConfigurationGoal::DryActivate => "dry-activate",
120
+
});
121
+
122
+
let child = run_command(
123
+
&CommandArguments::new(command_string, ctx.modifiers)
124
+
.execute_on_remote(if apply_objective.should_apply_locally {
125
+
None
126
+
} else {
127
+
Some(&ctx.node.target)
128
+
})
129
+
.elevated(ctx.node)
130
+
.log_stdout(),
131
+
)
132
+
.await?;
133
+
134
+
let result = child.wait_till_success().await;
135
+
136
+
match result {
137
+
Ok(_) => {
138
+
if !apply_objective.reboot {
139
+
return Ok(());
140
+
}
141
+
142
+
if apply_objective.should_apply_locally {
143
+
error!("Refusing to reboot local machine!");
144
+
145
+
return Ok(());
146
+
}
147
+
148
+
warn!("Rebooting {name}!", name = ctx.name);
149
+
150
+
let reboot = run_command(
151
+
&CommandArguments::new("reboot now", ctx.modifiers)
152
+
.log_stdout()
153
+
.execute_on_remote(Some(&ctx.node.target))
154
+
.elevated(ctx.node),
155
+
)
156
+
.await?;
157
+
158
+
// consume result, impossible to know if the machine failed to reboot or we
159
+
// simply disconnected
160
+
let _ = reboot
161
+
.wait_till_success()
162
+
.await
163
+
.map_err(HiveLibError::CommandError)?;
164
+
165
+
info!("Rebooted {name}, waiting to reconnect...", name = ctx.name);
166
+
167
+
if wait_for_ping(ctx).await.is_ok() {
168
+
return Ok(());
169
+
}
170
+
171
+
error!(
172
+
"Failed to get regain connection to {name} via {host} after reboot.",
173
+
name = ctx.name,
174
+
host = ctx.node.target.get_preferred_host()?
175
+
);
176
+
177
+
return Err(HiveLibError::NetworkError(
178
+
NetworkError::HostUnreachableAfterReboot(
179
+
ctx.node.target.get_preferred_host()?.to_string(),
180
+
),
181
+
));
182
+
}
183
+
Err(error) => {
184
+
warn!(
185
+
"Activation command for {name} exited unsuccessfully.",
186
+
name = ctx.name
187
+
);
188
+
189
+
// Bail if the command couldn't of broken the system
190
+
// and don't try to regain connection to localhost
191
+
if matches!(goal, SwitchToConfigurationGoal::DryActivate)
192
+
|| apply_objective.should_apply_locally
193
+
{
194
+
return Err(HiveLibError::ActivationError(
195
+
ActivationError::SwitchToConfigurationError(*goal, ctx.name.clone(), error),
196
+
));
197
+
}
198
+
199
+
if wait_for_ping(ctx).await.is_ok() {
200
+
return Err(HiveLibError::ActivationError(
201
+
ActivationError::SwitchToConfigurationError(*goal, ctx.name.clone(), error),
202
+
));
203
+
}
204
+
205
+
error!(
206
+
"Failed to get regain connection to {name} via {host} after {goal} activation.",
207
+
name = ctx.name,
208
+
host = ctx.node.target.get_preferred_host()?
209
+
);
210
+
211
+
return Err(HiveLibError::NetworkError(
212
+
NetworkError::HostUnreachableAfterReboot(
213
+
ctx.node.target.get_preferred_host()?.to_string(),
214
+
),
215
+
));
216
+
}
217
+
}
218
+
}
219
+
}
+89
crates/core/src/hive/steps/build.rs
+89
crates/core/src/hive/steps/build.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt::Display;
5
+
6
+
use tracing::{info, instrument};
7
+
8
+
use crate::{
9
+
HiveLibError,
10
+
commands::{
11
+
CommandArguments, Either, WireCommandChip, builder::CommandStringBuilder,
12
+
run_command_with_env,
13
+
},
14
+
hive::node::{Context, ExecuteStep, Goal, Objective},
15
+
};
16
+
17
+
#[derive(Debug, PartialEq)]
18
+
pub struct Build;
19
+
20
+
impl Display for Build {
21
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
22
+
write!(f, "Build the node")
23
+
}
24
+
}
25
+
26
+
impl ExecuteStep for Build {
27
+
fn should_execute(&self, ctx: &Context) -> bool {
28
+
match ctx.objective {
29
+
Objective::Apply(apply_objective) => {
30
+
!matches!(apply_objective.goal, Goal::Keys | Goal::Push)
31
+
}
32
+
Objective::BuildLocally => true,
33
+
}
34
+
}
35
+
36
+
#[instrument(skip_all, name = "build")]
37
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
38
+
let top_level = ctx.state.evaluation.as_ref().unwrap();
39
+
40
+
let mut command_string = CommandStringBuilder::nix();
41
+
command_string.args(&[
42
+
"--extra-experimental-features",
43
+
"nix-command",
44
+
"build",
45
+
"--print-build-logs",
46
+
"--no-link",
47
+
"--print-out-paths",
48
+
]);
49
+
command_string.arg(top_level.to_string());
50
+
51
+
let status = run_command_with_env(
52
+
&CommandArguments::new(command_string, ctx.modifiers)
53
+
// build remotely if asked for AND we arent applying locally
54
+
.execute_on_remote(
55
+
if ctx.node.build_remotely
56
+
&& let Objective::Apply(apply_objective) = ctx.objective
57
+
&& !apply_objective.should_apply_locally
58
+
{
59
+
Some(&ctx.node.target)
60
+
} else {
61
+
None
62
+
},
63
+
)
64
+
.mode(crate::commands::ChildOutputMode::Nix)
65
+
.log_stdout(),
66
+
std::collections::HashMap::new(),
67
+
)
68
+
.await?
69
+
.wait_till_success()
70
+
.await
71
+
.map_err(|source| HiveLibError::NixBuildError {
72
+
name: ctx.name.clone(),
73
+
source,
74
+
})?;
75
+
76
+
let stdout = match status {
77
+
Either::Left((_, stdout)) | Either::Right((_, stdout)) => stdout,
78
+
};
79
+
80
+
info!("Built output: {stdout:?}");
81
+
82
+
// print built path to stdout
83
+
println!("{stdout}");
84
+
85
+
ctx.state.build = Some(stdout);
86
+
87
+
Ok(())
88
+
}
89
+
}
+28
crates/core/src/hive/steps/cleanup.rs
+28
crates/core/src/hive/steps/cleanup.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt::Display;
5
+
6
+
use crate::{
7
+
errors::HiveLibError,
8
+
hive::node::{Context, ExecuteStep},
9
+
};
10
+
11
+
#[derive(PartialEq, Debug)]
12
+
pub(crate) struct CleanUp;
13
+
14
+
impl Display for CleanUp {
15
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
16
+
write!(f, "Clean up")
17
+
}
18
+
}
19
+
20
+
impl ExecuteStep for CleanUp {
21
+
fn should_execute(&self, _ctx: &Context) -> bool {
22
+
false
23
+
}
24
+
25
+
async fn execute(&self, _ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
26
+
Ok(())
27
+
}
28
+
}
+38
crates/core/src/hive/steps/evaluate.rs
+38
crates/core/src/hive/steps/evaluate.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt::Display;
5
+
6
+
use tracing::instrument;
7
+
8
+
use crate::{
9
+
HiveLibError,
10
+
hive::node::{Context, ExecuteStep, Goal, Objective},
11
+
};
12
+
13
+
#[derive(Debug, PartialEq)]
14
+
pub struct Evaluate;
15
+
16
+
impl Display for Evaluate {
17
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
18
+
write!(f, "Evaluate the node")
19
+
}
20
+
}
21
+
22
+
impl ExecuteStep for Evaluate {
23
+
fn should_execute(&self, ctx: &Context) -> bool {
24
+
match ctx.objective {
25
+
Objective::Apply(apply_objective) => !matches!(apply_objective.goal, Goal::Keys),
26
+
Objective::BuildLocally => true,
27
+
}
28
+
}
29
+
30
+
#[instrument(skip_all, name = "eval")]
31
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
32
+
let rx = ctx.state.evaluation_rx.take().unwrap();
33
+
34
+
ctx.state.evaluation = Some(rx.await.unwrap()?);
35
+
36
+
Ok(())
37
+
}
38
+
}
+441
crates/core/src/hive/steps/keys.rs
+441
crates/core/src/hive/steps/keys.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use base64::Engine;
5
+
use base64::prelude::BASE64_STANDARD;
6
+
use futures::future::join_all;
7
+
use im::Vector;
8
+
use itertools::{Itertools, Position};
9
+
use owo_colors::OwoColorize;
10
+
use prost::Message;
11
+
use prost::bytes::BytesMut;
12
+
use serde::{Deserialize, Serialize};
13
+
use sha2::{Digest, Sha256};
14
+
use std::env;
15
+
use std::fmt::Display;
16
+
use std::io::Cursor;
17
+
use std::iter::Peekable;
18
+
use std::path::PathBuf;
19
+
use std::pin::Pin;
20
+
use std::process::Stdio;
21
+
use std::str::from_utf8;
22
+
use std::vec::IntoIter;
23
+
use tokio::io::AsyncReadExt as _;
24
+
use tokio::process::Command;
25
+
use tokio::{fs::File, io::AsyncRead};
26
+
use tokio_util::codec::LengthDelimitedCodec;
27
+
use tracing::{debug, instrument};
28
+
29
+
use crate::HiveLibError;
30
+
use crate::commands::builder::CommandStringBuilder;
31
+
use crate::commands::common::push;
32
+
use crate::commands::{CommandArguments, WireCommandChip, run_command};
33
+
use crate::errors::KeyError;
34
+
use crate::hive::node::{Context, ExecuteStep, Goal, Objective, Push, SwitchToConfigurationGoal};
35
+
36
+
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
37
+
#[serde(tag = "t", content = "c")]
38
+
pub enum Source {
39
+
String(String),
40
+
Path(PathBuf),
41
+
Command(Vec<String>),
42
+
}
43
+
44
+
#[derive(Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq)]
45
+
pub enum UploadKeyAt {
46
+
#[serde(rename = "pre-activation")]
47
+
PreActivation,
48
+
#[serde(rename = "post-activation")]
49
+
PostActivation,
50
+
#[serde(skip)]
51
+
NoFilter,
52
+
}
53
+
54
+
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
55
+
pub struct Key {
56
+
pub name: String,
57
+
#[serde(rename = "destDir")]
58
+
pub dest_dir: String,
59
+
pub path: PathBuf,
60
+
pub group: String,
61
+
pub user: String,
62
+
pub permissions: String,
63
+
pub source: Source,
64
+
#[serde(rename = "uploadAt")]
65
+
pub upload_at: UploadKeyAt,
66
+
#[serde(default)]
67
+
pub environment: im::HashMap<String, String>,
68
+
}
69
+
70
+
impl Display for Key {
71
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
72
+
write!(
73
+
f,
74
+
"{} {} {}:{} {}",
75
+
match self.source {
76
+
Source::String(_) => "Literal",
77
+
Source::Path(_) => "Path",
78
+
Source::Command(_) => "Command",
79
+
}
80
+
.if_supports_color(owo_colors::Stream::Stdout, |x| x.dimmed()),
81
+
[self.dest_dir.clone(), self.name.clone()]
82
+
.iter()
83
+
.collect::<PathBuf>()
84
+
.display(),
85
+
self.user,
86
+
self.group,
87
+
self.permissions,
88
+
)
89
+
}
90
+
}
91
+
92
+
#[cfg(test)]
93
+
impl Default for Key {
94
+
fn default() -> Self {
95
+
use im::HashMap;
96
+
97
+
Self {
98
+
name: "key".into(),
99
+
dest_dir: "/somewhere/".into(),
100
+
path: "key".into(),
101
+
group: "root".into(),
102
+
user: "root".into(),
103
+
permissions: "0600".into(),
104
+
source: Source::String("test key".into()),
105
+
upload_at: UploadKeyAt::PreActivation,
106
+
environment: HashMap::new(),
107
+
}
108
+
}
109
+
}
110
+
111
+
fn get_u32_permission(key: &Key) -> Result<u32, KeyError> {
112
+
u32::from_str_radix(&key.permissions, 8).map_err(KeyError::ParseKeyPermissions)
113
+
}
114
+
115
+
async fn create_reader(key: &'_ Key) -> Result<Pin<Box<dyn AsyncRead + Send + '_>>, KeyError> {
116
+
match &key.source {
117
+
Source::Path(path) => Ok(Box::pin(File::open(path).await.map_err(KeyError::File)?)),
118
+
Source::String(string) => Ok(Box::pin(Cursor::new(string))),
119
+
Source::Command(args) => {
120
+
let output = Command::new(args.first().ok_or(KeyError::Empty)?)
121
+
.args(&args[1..])
122
+
.stdin(Stdio::null())
123
+
.stdout(Stdio::piped())
124
+
.stderr(Stdio::piped())
125
+
.envs(key.environment.clone())
126
+
.spawn()
127
+
.map_err(|err| KeyError::CommandSpawnError {
128
+
error: err,
129
+
command: args.join(" "),
130
+
command_span: Some((0..args.first().unwrap().len()).into()),
131
+
})?
132
+
.wait_with_output()
133
+
.await
134
+
.map_err(|err| KeyError::CommandResolveError {
135
+
error: err,
136
+
command: args.join(" "),
137
+
})?;
138
+
139
+
if output.status.success() {
140
+
return Ok(Box::pin(Cursor::new(output.stdout)));
141
+
}
142
+
143
+
Err(KeyError::CommandError(
144
+
output.status,
145
+
from_utf8(&output.stderr).unwrap().to_string(),
146
+
))
147
+
}
148
+
}
149
+
}
150
+
151
+
async fn process_key(key: &Key) -> Result<(wire_key_agent::keys::KeySpec, Vec<u8>), KeyError> {
152
+
let mut reader = create_reader(key).await?;
153
+
154
+
let mut buf = Vec::new();
155
+
156
+
reader
157
+
.read_to_end(&mut buf)
158
+
.await
159
+
.expect("failed to read into buffer");
160
+
161
+
let destination: PathBuf = [key.dest_dir.clone(), key.name.clone()].iter().collect();
162
+
163
+
debug!("Staging push to {}", destination.clone().display());
164
+
165
+
Ok((
166
+
wire_key_agent::keys::KeySpec {
167
+
length: buf
168
+
.len()
169
+
.try_into()
170
+
.expect("Failed to convert usize buf length to i32"),
171
+
user: key.user.clone(),
172
+
group: key.group.clone(),
173
+
permissions: get_u32_permission(key)?,
174
+
destination: destination.into_os_string().into_string().unwrap(),
175
+
digest: Sha256::digest(&buf).to_vec(),
176
+
last: false,
177
+
},
178
+
buf,
179
+
))
180
+
}
181
+
182
+
#[derive(Debug, PartialEq)]
183
+
pub struct Keys {
184
+
pub filter: UploadKeyAt,
185
+
}
186
+
#[derive(Debug, PartialEq)]
187
+
pub struct PushKeyAgent;
188
+
189
+
impl Display for Keys {
190
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
191
+
write!(f, "Upload key @ {:?}", self.filter)
192
+
}
193
+
}
194
+
195
+
impl Display for PushKeyAgent {
196
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
197
+
write!(f, "Push the key agent")
198
+
}
199
+
}
200
+
201
+
pub struct SimpleLengthDelimWriter<F> {
202
+
codec: LengthDelimitedCodec,
203
+
write_fn: F,
204
+
}
205
+
206
+
impl<F> SimpleLengthDelimWriter<F>
207
+
where
208
+
F: AsyncFnMut(Vec<u8>) -> Result<(), HiveLibError>,
209
+
{
210
+
fn new(write_fn: F) -> Self {
211
+
Self {
212
+
codec: LengthDelimitedCodec::new(),
213
+
write_fn,
214
+
}
215
+
}
216
+
217
+
async fn send(&mut self, data: prost::bytes::Bytes) -> Result<(), HiveLibError> {
218
+
let mut buffer = BytesMut::new();
219
+
tokio_util::codec::Encoder::encode(&mut self.codec, data, &mut buffer)
220
+
.map_err(HiveLibError::Encoding)?;
221
+
222
+
(self.write_fn)(buffer.to_vec()).await?;
223
+
Ok(())
224
+
}
225
+
}
226
+
227
+
impl ExecuteStep for Keys {
228
+
fn should_execute(&self, ctx: &Context) -> bool {
229
+
let Objective::Apply(apply_objective) = ctx.objective else {
230
+
return false;
231
+
};
232
+
233
+
if apply_objective.no_keys {
234
+
return false;
235
+
}
236
+
237
+
// should execute if no filter, and the goal is keys.
238
+
// otherwise, only execute if the goal is switch and non-nofilter
239
+
matches!(
240
+
(&self.filter, &apply_objective.goal),
241
+
(UploadKeyAt::NoFilter, Goal::Keys)
242
+
| (
243
+
UploadKeyAt::PreActivation | UploadKeyAt::PostActivation,
244
+
Goal::SwitchToConfiguration(SwitchToConfigurationGoal::Switch)
245
+
)
246
+
)
247
+
}
248
+
249
+
#[instrument(skip_all, name = "keys")]
250
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
251
+
let agent_directory = ctx.state.key_agent_directory.as_ref().unwrap();
252
+
253
+
let mut keys = self.select_keys(&ctx.node.keys).await?;
254
+
255
+
if keys.peek().is_none() {
256
+
debug!("Had no keys to push, ending KeyStep early.");
257
+
return Ok(());
258
+
}
259
+
260
+
let command_string =
261
+
CommandStringBuilder::new(format!("{agent_directory}/bin/wire-key-agent"));
262
+
263
+
let Objective::Apply(apply_objective) = ctx.objective else {
264
+
unreachable!()
265
+
};
266
+
267
+
let mut child = run_command(
268
+
&CommandArguments::new(command_string, ctx.modifiers)
269
+
.execute_on_remote(if apply_objective.should_apply_locally {
270
+
None
271
+
} else {
272
+
Some(&ctx.node.target)
273
+
})
274
+
.elevated(ctx.node)
275
+
.keep_stdin_open()
276
+
.log_stdout(),
277
+
)
278
+
.await?;
279
+
280
+
let mut writer = SimpleLengthDelimWriter::new(async |data| child.write_stdin(data).await);
281
+
282
+
for (position, (mut spec, buf)) in keys.with_position() {
283
+
if matches!(position, Position::Last | Position::Only) {
284
+
spec.last = true;
285
+
}
286
+
287
+
debug!("Writing spec & buf for {:?}", spec);
288
+
289
+
writer
290
+
.send(BASE64_STANDARD.encode(spec.encode_to_vec()).into())
291
+
.await?;
292
+
writer.send(BASE64_STANDARD.encode(buf).into()).await?;
293
+
}
294
+
295
+
let status = child
296
+
.wait_till_success()
297
+
.await
298
+
.map_err(HiveLibError::CommandError)?;
299
+
300
+
debug!("status: {status:?}");
301
+
302
+
Ok(())
303
+
}
304
+
}
305
+
306
+
impl Keys {
307
+
async fn select_keys(
308
+
&self,
309
+
keys: &Vector<Key>,
310
+
) -> Result<Peekable<IntoIter<(wire_key_agent::keys::KeySpec, std::vec::Vec<u8>)>>, HiveLibError>
311
+
{
312
+
let futures = keys
313
+
.iter()
314
+
.filter(|key| self.filter == UploadKeyAt::NoFilter || (key.upload_at == self.filter))
315
+
.map(|key| async move {
316
+
process_key(key)
317
+
.await
318
+
.map_err(|err| HiveLibError::KeyError(key.name.clone(), err))
319
+
});
320
+
321
+
Ok(join_all(futures)
322
+
.await
323
+
.into_iter()
324
+
.collect::<Result<Vec<_>, HiveLibError>>()?
325
+
.into_iter()
326
+
.peekable())
327
+
}
328
+
}
329
+
330
+
impl ExecuteStep for PushKeyAgent {
331
+
fn should_execute(&self, ctx: &Context) -> bool {
332
+
let Objective::Apply(apply_objective) = ctx.objective else {
333
+
return false;
334
+
};
335
+
336
+
if apply_objective.no_keys {
337
+
return false;
338
+
}
339
+
340
+
matches!(
341
+
&apply_objective.goal,
342
+
Goal::Keys | Goal::SwitchToConfiguration(SwitchToConfigurationGoal::Switch)
343
+
)
344
+
}
345
+
346
+
#[instrument(skip_all, name = "push_agent")]
347
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
348
+
let arg_name = format!(
349
+
"WIRE_KEY_AGENT_{platform}",
350
+
platform = ctx.node.host_platform.replace('-', "_")
351
+
);
352
+
353
+
let agent_directory = match env::var_os(&arg_name) {
354
+
Some(agent) => agent.into_string().unwrap(),
355
+
None => panic!(
356
+
"{arg_name} environment variable not set! \n
357
+
wire was not built with the ability to deploy keys to this platform. \n
358
+
Please create an issue: https://github.com/forallsys/wire/issues/new?template=bug_report.md"
359
+
),
360
+
};
361
+
362
+
let Objective::Apply(apply_objective) = ctx.objective else {
363
+
unreachable!()
364
+
};
365
+
366
+
if !apply_objective.should_apply_locally {
367
+
push(ctx, Push::Path(&agent_directory)).await?;
368
+
}
369
+
370
+
ctx.state.key_agent_directory = Some(agent_directory);
371
+
372
+
Ok(())
373
+
}
374
+
}
375
+
376
+
#[cfg(test)]
377
+
mod tests {
378
+
use im::Vector;
379
+
380
+
use crate::hive::steps::keys::{Key, Keys, UploadKeyAt, process_key};
381
+
382
+
fn new_key(upload_at: &UploadKeyAt) -> Key {
383
+
Key {
384
+
upload_at: upload_at.clone(),
385
+
source: super::Source::String(match upload_at {
386
+
UploadKeyAt::PreActivation => "pre".into(),
387
+
UploadKeyAt::PostActivation => "post".into(),
388
+
UploadKeyAt::NoFilter => "none".into(),
389
+
}),
390
+
..Default::default()
391
+
}
392
+
}
393
+
394
+
#[tokio::test]
395
+
async fn key_filtering() {
396
+
let keys = Vector::from(vec![
397
+
new_key(&UploadKeyAt::PreActivation),
398
+
new_key(&UploadKeyAt::PostActivation),
399
+
new_key(&UploadKeyAt::PreActivation),
400
+
new_key(&UploadKeyAt::PostActivation),
401
+
]);
402
+
403
+
for (_, buf) in (Keys {
404
+
filter: crate::hive::steps::keys::UploadKeyAt::PreActivation,
405
+
})
406
+
.select_keys(&keys)
407
+
.await
408
+
.unwrap()
409
+
{
410
+
assert_eq!(String::from_utf8_lossy(&buf), "pre");
411
+
}
412
+
413
+
for (_, buf) in (Keys {
414
+
filter: crate::hive::steps::keys::UploadKeyAt::PostActivation,
415
+
})
416
+
.select_keys(&keys)
417
+
.await
418
+
.unwrap()
419
+
{
420
+
assert_eq!(String::from_utf8_lossy(&buf), "post");
421
+
}
422
+
423
+
// test that NoFilter processes all keys.
424
+
let processed_all =
425
+
futures::future::join_all(keys.iter().map(async |x| process_key(x).await))
426
+
.await
427
+
.iter()
428
+
.flatten()
429
+
.cloned()
430
+
.collect::<Vec<_>>();
431
+
let no_filter = (Keys {
432
+
filter: crate::hive::steps::keys::UploadKeyAt::NoFilter,
433
+
})
434
+
.select_keys(&keys)
435
+
.await
436
+
.unwrap()
437
+
.collect::<Vec<_>>();
438
+
439
+
assert_eq!(processed_all, no_filter);
440
+
}
441
+
}
+10
crates/core/src/hive/steps/mod.rs
+10
crates/core/src/hive/steps/mod.rs
+58
crates/core/src/hive/steps/ping.rs
+58
crates/core/src/hive/steps/ping.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt::Display;
5
+
6
+
use tracing::{Level, event, instrument};
7
+
8
+
use crate::{
9
+
HiveLibError,
10
+
hive::node::{Context, ExecuteStep, Objective},
11
+
};
12
+
13
+
#[derive(Debug, PartialEq)]
14
+
pub struct Ping;
15
+
16
+
impl Display for Ping {
17
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
18
+
write!(f, "Ping node")
19
+
}
20
+
}
21
+
22
+
impl ExecuteStep for Ping {
23
+
fn should_execute(&self, ctx: &Context) -> bool {
24
+
let Objective::Apply(apply_objective) = ctx.objective else {
25
+
return false;
26
+
};
27
+
28
+
!apply_objective.should_apply_locally
29
+
}
30
+
31
+
#[instrument(skip_all, name = "ping")]
32
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
33
+
loop {
34
+
event!(
35
+
Level::INFO,
36
+
status = "attempting",
37
+
host = ctx.node.target.get_preferred_host()?.to_string()
38
+
);
39
+
40
+
if ctx.node.ping(ctx.modifiers).await.is_ok() {
41
+
event!(
42
+
Level::INFO,
43
+
status = "success",
44
+
host = ctx.node.target.get_preferred_host()?.to_string()
45
+
);
46
+
return Ok(());
47
+
}
48
+
49
+
// ? will take us out if we ran out of hosts
50
+
event!(
51
+
Level::WARN,
52
+
status = "failed to ping",
53
+
host = ctx.node.target.get_preferred_host()?.to_string()
54
+
);
55
+
ctx.node.target.host_failed();
56
+
}
57
+
}
58
+
}
+84
crates/core/src/hive/steps/push.rs
+84
crates/core/src/hive/steps/push.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt::Display;
5
+
6
+
use tracing::instrument;
7
+
8
+
use crate::{
9
+
HiveLibError,
10
+
commands::common::push,
11
+
hive::node::{Context, ExecuteStep, Goal, Objective},
12
+
};
13
+
14
+
#[derive(Debug, PartialEq)]
15
+
pub struct PushEvaluatedOutput;
16
+
#[derive(Debug, PartialEq)]
17
+
pub struct PushBuildOutput;
18
+
19
+
impl Display for PushEvaluatedOutput {
20
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
21
+
write!(f, "Push the evaluated output")
22
+
}
23
+
}
24
+
25
+
impl Display for PushBuildOutput {
26
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
27
+
write!(f, "Push the build output")
28
+
}
29
+
}
30
+
31
+
impl ExecuteStep for PushEvaluatedOutput {
32
+
fn should_execute(&self, ctx: &Context) -> bool {
33
+
let Objective::Apply(apply_objective) = ctx.objective else {
34
+
return false;
35
+
};
36
+
37
+
!matches!(apply_objective.goal, Goal::Keys)
38
+
&& !apply_objective.should_apply_locally
39
+
&& (ctx.node.build_remotely | matches!(apply_objective.goal, Goal::Push))
40
+
}
41
+
42
+
#[instrument(skip_all, name = "push_eval")]
43
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
44
+
let top_level = ctx.state.evaluation.as_ref().unwrap();
45
+
46
+
push(ctx, crate::hive::node::Push::Derivation(top_level)).await?;
47
+
48
+
Ok(())
49
+
}
50
+
}
51
+
52
+
impl ExecuteStep for PushBuildOutput {
53
+
fn should_execute(&self, ctx: &Context) -> bool {
54
+
let Objective::Apply(apply_objective) = ctx.objective else {
55
+
return false;
56
+
};
57
+
58
+
if matches!(apply_objective.goal, Goal::Keys | Goal::Push) {
59
+
// skip if we are not building
60
+
return false;
61
+
}
62
+
63
+
if ctx.node.build_remotely {
64
+
// skip if we are building remotely
65
+
return false;
66
+
}
67
+
68
+
if apply_objective.should_apply_locally {
69
+
// skip step if we are applying locally
70
+
return false;
71
+
}
72
+
73
+
true
74
+
}
75
+
76
+
#[instrument(skip_all, name = "push_build")]
77
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
78
+
let built_path = ctx.state.build.as_ref().unwrap();
79
+
80
+
push(ctx, crate::hive::node::Push::Path(built_path)).await?;
81
+
82
+
Ok(())
83
+
}
84
+
}
+71
crates/core/src/lib.rs
+71
crates/core/src/lib.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
#![feature(assert_matches)]
5
+
#![feature(iter_intersperse)]
6
+
#![feature(sync_nonpoison)]
7
+
#![feature(nonpoison_mutex)]
8
+
9
+
use std::{
10
+
io::{IsTerminal, stderr},
11
+
sync::LazyLock,
12
+
};
13
+
14
+
use tokio::sync::{AcquireError, Semaphore, SemaphorePermit};
15
+
16
+
use crate::{errors::HiveLibError, hive::node::Name, status::STATUS};
17
+
18
+
pub mod cache;
19
+
pub mod commands;
20
+
pub mod hive;
21
+
pub mod status;
22
+
23
+
#[cfg(test)]
24
+
mod test_macros;
25
+
26
+
#[cfg(test)]
27
+
mod test_support;
28
+
29
+
pub mod errors;
30
+
31
+
#[derive(Clone, Debug, Copy, Default)]
32
+
pub enum StrictHostKeyChecking {
33
+
/// do not accept new host. dangerous!
34
+
No,
35
+
36
+
/// accept-new, default
37
+
#[default]
38
+
AcceptNew,
39
+
}
40
+
41
+
#[derive(Debug, Clone, Copy)]
42
+
pub struct SubCommandModifiers {
43
+
pub show_trace: bool,
44
+
pub non_interactive: bool,
45
+
pub ssh_accept_host: StrictHostKeyChecking,
46
+
}
47
+
48
+
impl Default for SubCommandModifiers {
49
+
fn default() -> Self {
50
+
SubCommandModifiers {
51
+
show_trace: false,
52
+
non_interactive: !std::io::stdin().is_terminal(),
53
+
ssh_accept_host: StrictHostKeyChecking::default(),
54
+
}
55
+
}
56
+
}
57
+
58
+
pub enum EvalGoal<'a> {
59
+
Inspect,
60
+
Names,
61
+
GetTopLevel(&'a Name),
62
+
}
63
+
64
+
pub static STDIN_CLOBBER_LOCK: LazyLock<Semaphore> = LazyLock::new(|| Semaphore::new(1));
65
+
66
+
pub async fn acquire_stdin_lock<'a>() -> Result<SemaphorePermit<'a>, AcquireError> {
67
+
let result = STDIN_CLOBBER_LOCK.acquire().await?;
68
+
STATUS.lock().wipe_out(&mut stderr());
69
+
70
+
Ok(result)
71
+
}
+173
crates/core/src/status.rs
+173
crates/core/src/status.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use owo_colors::OwoColorize;
5
+
use std::{fmt::Write, time::Instant};
6
+
use termion::{clear, cursor};
7
+
8
+
use crate::{STDIN_CLOBBER_LOCK, hive::node::Name};
9
+
10
+
use std::{
11
+
collections::HashMap,
12
+
sync::{LazyLock, nonpoison::Mutex},
13
+
};
14
+
15
+
#[derive(Default)]
16
+
pub enum NodeStatus {
17
+
#[default]
18
+
Pending,
19
+
Running(String),
20
+
Succeeded,
21
+
Failed,
22
+
}
23
+
24
+
pub struct Status {
25
+
statuses: HashMap<String, NodeStatus>,
26
+
began: Instant,
27
+
show_progress: bool,
28
+
}
29
+
30
+
/// global status used for the progress bar in the cli crate
31
+
pub static STATUS: LazyLock<Mutex<Status>> = LazyLock::new(|| Mutex::new(Status::new()));
32
+
33
+
impl Status {
34
+
fn new() -> Self {
35
+
Self {
36
+
statuses: HashMap::default(),
37
+
began: Instant::now(),
38
+
show_progress: false,
39
+
}
40
+
}
41
+
42
+
pub const fn show_progress(&mut self, show_progress: bool) {
43
+
self.show_progress = show_progress;
44
+
}
45
+
46
+
pub fn add_many(&mut self, names: &[&Name]) {
47
+
self.statuses.extend(
48
+
names
49
+
.iter()
50
+
.map(|name| (name.0.to_string(), NodeStatus::Pending)),
51
+
);
52
+
}
53
+
54
+
pub fn set_node_step(&mut self, node: &Name, step: String) {
55
+
self.statuses
56
+
.insert(node.0.to_string(), NodeStatus::Running(step));
57
+
}
58
+
59
+
pub fn mark_node_failed(&mut self, node: &Name) {
60
+
self.statuses.insert(node.0.to_string(), NodeStatus::Failed);
61
+
}
62
+
63
+
pub fn mark_node_succeeded(&mut self, node: &Name) {
64
+
self.statuses
65
+
.insert(node.0.to_string(), NodeStatus::Succeeded);
66
+
}
67
+
68
+
#[must_use]
69
+
fn num_finished(&self) -> usize {
70
+
self.statuses
71
+
.iter()
72
+
.filter(|(_, status)| matches!(status, NodeStatus::Succeeded | NodeStatus::Failed))
73
+
.count()
74
+
}
75
+
76
+
#[must_use]
77
+
fn num_running(&self) -> usize {
78
+
self.statuses
79
+
.iter()
80
+
.filter(|(_, status)| matches!(status, NodeStatus::Running(..)))
81
+
.count()
82
+
}
83
+
84
+
#[must_use]
85
+
fn num_failed(&self) -> usize {
86
+
self.statuses
87
+
.iter()
88
+
.filter(|(_, status)| matches!(status, NodeStatus::Failed))
89
+
.count()
90
+
}
91
+
92
+
#[must_use]
93
+
pub fn get_msg(&self) -> String {
94
+
if self.statuses.is_empty() {
95
+
return String::new();
96
+
}
97
+
98
+
let mut msg = format!("[{} / {}", self.num_finished(), self.statuses.len(),);
99
+
100
+
let num_failed = self.num_failed();
101
+
let num_running = self.num_running();
102
+
103
+
let failed = if num_failed >= 1 {
104
+
Some(format!("{} Failed", num_failed.red()))
105
+
} else {
106
+
None
107
+
};
108
+
109
+
let running = if num_running >= 1 {
110
+
Some(format!("{} Deploying", num_running.blue()))
111
+
} else {
112
+
None
113
+
};
114
+
115
+
let _ = match (failed, running) {
116
+
(None, None) => write!(&mut msg, ""),
117
+
(Some(message), None) | (None, Some(message)) => write!(&mut msg, " ({message})"),
118
+
(Some(failed), Some(running)) => write!(&mut msg, " ({failed}, {running})"),
119
+
};
120
+
121
+
let _ = write!(&mut msg, "]");
122
+
123
+
let _ = write!(&mut msg, " {}s", self.began.elapsed().as_secs());
124
+
125
+
msg
126
+
}
127
+
128
+
pub fn clear<T: std::io::Write>(&self, writer: &mut T) {
129
+
if !self.show_progress {
130
+
return;
131
+
}
132
+
133
+
let _ = write!(writer, "{}", cursor::Save);
134
+
// let _ = write!(writer, "{}", cursor::Down(1));
135
+
let _ = write!(writer, "{}", cursor::Left(999));
136
+
let _ = write!(writer, "{}", clear::CurrentLine);
137
+
}
138
+
139
+
/// used when there is an interactive prompt
140
+
pub fn wipe_out<T: std::io::Write>(&self, writer: &mut T) {
141
+
if !self.show_progress {
142
+
return;
143
+
}
144
+
145
+
let _ = write!(writer, "{}", cursor::Save);
146
+
let _ = write!(writer, "{}", cursor::Left(999));
147
+
let _ = write!(writer, "{}", clear::CurrentLine);
148
+
let _ = writer.flush();
149
+
}
150
+
151
+
pub fn write_status<T: std::io::Write>(&mut self, writer: &mut T) {
152
+
if self.show_progress {
153
+
let _ = write!(writer, "{}", self.get_msg());
154
+
}
155
+
}
156
+
157
+
pub fn write_above_status<T: std::io::Write>(
158
+
&mut self,
159
+
buf: &[u8],
160
+
writer: &mut T,
161
+
) -> std::io::Result<usize> {
162
+
if STDIN_CLOBBER_LOCK.available_permits() != 1 {
163
+
// skip
164
+
return Ok(0);
165
+
}
166
+
167
+
self.clear(writer);
168
+
let written = writer.write(buf)?;
169
+
self.write_status(writer);
170
+
171
+
Ok(written)
172
+
}
173
+
}
+43
crates/core/src/test_macros.rs
+43
crates/core/src/test_macros.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
#[macro_export]
5
+
macro_rules! function_name {
6
+
() => {{
7
+
fn f() {}
8
+
fn type_name_of<T>(_: T) -> &'static str {
9
+
std::any::type_name::<T>()
10
+
}
11
+
let name = type_name_of(f);
12
+
// closure for async functions
13
+
&name[..name.len() - 3]
14
+
}};
15
+
}
16
+
17
+
#[macro_export]
18
+
macro_rules! get_test_path {
19
+
() => {{
20
+
let mut path: PathBuf = env::var("WIRE_TEST_DIR").unwrap().into();
21
+
let full_name = $crate::function_name!();
22
+
let function_name = full_name
23
+
.trim_end_matches("::{{closure}}")
24
+
.split("::")
25
+
.last()
26
+
.unwrap();
27
+
path.push(function_name);
28
+
29
+
path
30
+
}};
31
+
}
32
+
33
+
#[macro_export]
34
+
macro_rules! location {
35
+
($path:expr) => {{
36
+
$crate::hive::get_hive_location(
37
+
$path.display().to_string(),
38
+
$crate::SubCommandModifiers::default(),
39
+
)
40
+
.await
41
+
.unwrap()
42
+
}};
43
+
}
+67
crates/core/src/test_support.rs
+67
crates/core/src/test_support.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::{
5
+
fs::{self, create_dir},
6
+
io,
7
+
path::Path,
8
+
process::Command,
9
+
};
10
+
11
+
use tempdir::TempDir;
12
+
13
+
pub fn make_flake_sandbox(path: &Path) -> Result<TempDir, io::Error> {
14
+
let tmp_dir = TempDir::new("wire-test")?;
15
+
16
+
Command::new("git")
17
+
.args(["init", "-b", "tmp"])
18
+
.current_dir(tmp_dir.path())
19
+
.status()?;
20
+
21
+
for entry in fs::read_dir(path)? {
22
+
let entry = entry?;
23
+
24
+
fs::copy(entry.path(), tmp_dir.as_ref().join(entry.file_name()))?;
25
+
}
26
+
27
+
let root = path.parent().unwrap().parent().unwrap().parent().unwrap();
28
+
29
+
create_dir(tmp_dir.as_ref().join("module/"))?;
30
+
31
+
fs::copy(
32
+
root.join(Path::new("runtime/evaluate.nix")),
33
+
tmp_dir.as_ref().join("evaluate.nix"),
34
+
)?;
35
+
fs::copy(
36
+
root.join(Path::new("runtime/module/config.nix")),
37
+
tmp_dir.as_ref().join("module/config.nix"),
38
+
)?;
39
+
fs::copy(
40
+
root.join(Path::new("runtime/module/options.nix")),
41
+
tmp_dir.as_ref().join("module/options.nix"),
42
+
)?;
43
+
fs::copy(
44
+
root.join(Path::new("runtime/module/default.nix")),
45
+
tmp_dir.as_ref().join("module/default.nix"),
46
+
)?;
47
+
fs::copy(
48
+
root.join(Path::new("runtime/makeHive.nix")),
49
+
tmp_dir.as_ref().join("makeHive.nix"),
50
+
)?;
51
+
fs::copy(
52
+
root.join(Path::new("flake.lock")),
53
+
tmp_dir.as_ref().join("flake.lock"),
54
+
)?;
55
+
56
+
Command::new("git")
57
+
.args(["add", "-A"])
58
+
.current_dir(tmp_dir.path())
59
+
.status()?;
60
+
61
+
Command::new("nix")
62
+
.args(["flake", "lock"])
63
+
.current_dir(tmp_dir.path())
64
+
.status()?;
65
+
66
+
Ok(tmp_dir)
67
+
}
+20
crates/key_agent/Cargo.toml
+20
crates/key_agent/Cargo.toml
···
1
+
[package]
2
+
name = "wire-key-agent"
3
+
edition.workspace = true
4
+
version.workspace = true
5
+
6
+
[dependencies]
7
+
tokio = { workspace = true }
8
+
tokio-util = { workspace = true }
9
+
anyhow = { workspace = true }
10
+
prost = { workspace = true }
11
+
nix = { workspace = true }
12
+
futures-util = { workspace = true }
13
+
sha2 = { workspace = true }
14
+
base64 = { workspace = true }
15
+
16
+
[build-dependencies]
17
+
prost-build = "0.14"
18
+
19
+
[lints]
20
+
workspace = true
+8
crates/key_agent/build.rs
+8
crates/key_agent/build.rs
+17
crates/key_agent/default.nix
+17
crates/key_agent/default.nix
+17
crates/key_agent/src/keys.proto
+17
crates/key_agent/src/keys.proto
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
syntax = "proto3";
5
+
6
+
package key_agent.keys;
7
+
8
+
message KeySpec {
9
+
string destination = 1;
10
+
string user = 2;
11
+
string group = 3;
12
+
uint32 permissions = 4;
13
+
uint32 length = 5;
14
+
bool last = 6;
15
+
/// Sha256 digest
16
+
bytes digest = 7;
17
+
}
+6
crates/key_agent/src/lib.rs
+6
crates/key_agent/src/lib.rs
+94
crates/key_agent/src/main.rs
+94
crates/key_agent/src/main.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
#![deny(clippy::pedantic)]
5
+
use base64::Engine;
6
+
use base64::prelude::BASE64_STANDARD;
7
+
use futures_util::stream::StreamExt;
8
+
use nix::unistd::{Group, User};
9
+
use prost::Message;
10
+
use prost::bytes::Bytes;
11
+
use sha2::{Digest, Sha256};
12
+
use std::os::unix::fs::PermissionsExt;
13
+
use std::os::unix::fs::chown;
14
+
use std::path::{Path, PathBuf};
15
+
use tokio::fs::File;
16
+
use tokio::io::AsyncWriteExt;
17
+
use tokio_util::codec::{FramedRead, LengthDelimitedCodec};
18
+
use wire_key_agent::keys::KeySpec;
19
+
20
+
fn create_path(key_path: &Path) -> Result<(), anyhow::Error> {
21
+
let prefix = key_path.parent().unwrap();
22
+
std::fs::create_dir_all(prefix)?;
23
+
24
+
Ok(())
25
+
}
26
+
27
+
fn pretty_keyspec(spec: &KeySpec) -> String {
28
+
format!(
29
+
"{} {}:{} {}",
30
+
spec.destination, spec.user, spec.group, spec.permissions
31
+
)
32
+
}
33
+
34
+
#[tokio::main]
35
+
async fn main() -> Result<(), anyhow::Error> {
36
+
let stdin = tokio::io::stdin();
37
+
38
+
let mut framed = FramedRead::new(stdin, LengthDelimitedCodec::new());
39
+
40
+
while let Some(spec_bytes) = framed.next().await {
41
+
let spec_bytes = Bytes::from(BASE64_STANDARD.decode(spec_bytes?)?);
42
+
let spec = KeySpec::decode(spec_bytes)?;
43
+
44
+
let key_bytes = BASE64_STANDARD.decode(
45
+
framed
46
+
.next()
47
+
.await
48
+
.expect("expected key_bytes to come after spec_bytes")?,
49
+
)?;
50
+
51
+
let digest = Sha256::digest(&key_bytes).to_vec();
52
+
53
+
println!(
54
+
"Writing {}, {:?} bytes of data",
55
+
pretty_keyspec(&spec),
56
+
key_bytes.len()
57
+
);
58
+
59
+
if digest != spec.digest {
60
+
return Err(anyhow::anyhow!(
61
+
"digest of {spec:?} did not match {digest:?}! Please create an issue!"
62
+
));
63
+
}
64
+
65
+
let path = PathBuf::from(&spec.destination);
66
+
create_path(&path)?;
67
+
68
+
let mut file = File::create(path).await?;
69
+
let mut permissions = file.metadata().await?.permissions();
70
+
71
+
permissions.set_mode(spec.permissions);
72
+
file.set_permissions(permissions).await?;
73
+
74
+
let user = User::from_name(&spec.user)?;
75
+
let group = Group::from_name(&spec.group)?;
76
+
77
+
chown(
78
+
spec.destination,
79
+
// Default uid/gid to 0. This is then wrapped around an Option again for
80
+
// the function.
81
+
Some(user.map_or(0, |user| user.uid.into())),
82
+
Some(group.map_or(0, |group| group.gid.into())),
83
+
)?;
84
+
85
+
file.write_all(&key_bytes).await?;
86
+
87
+
// last key, goobye
88
+
if spec.last {
89
+
break;
90
+
}
91
+
}
92
+
93
+
Ok(())
94
+
}
+13
-12
default.nix
+13
-12
default.nix
···
1
-
(import (
2
-
let
3
-
lock = builtins.fromJSON (builtins.readFile ./flake.lock);
4
-
nodeName = lock.nodes.root.inputs.flake-compat;
5
-
in
6
-
fetchTarball {
7
-
url =
8
-
lock.nodes.${nodeName}.locked.url
9
-
or "https://github.com/edolstra/flake-compat/archive/${lock.nodes.${nodeName}.locked.rev}.tar.gz";
10
-
sha256 = lock.nodes.${nodeName}.locked.narHash;
11
-
}
12
-
) { src = ./.; }).defaultNix
1
+
let
2
+
lockFile = builtins.fromJSON (builtins.readFile ./flake.lock);
3
+
flake-compat-node = lockFile.nodes.${lockFile.nodes.root.inputs.flake-compat};
4
+
flake-compat = builtins.fetchTarball {
5
+
inherit (flake-compat-node.locked) url;
6
+
sha256 = flake-compat-node.locked.narHash;
7
+
};
8
+
9
+
flake = import flake-compat {
10
+
src = ./.;
11
+
};
12
+
in
13
+
flake.defaultNix
+108
-21
doc/.vitepress/config.ts
+108
-21
doc/.vitepress/config.ts
···
7
7
localIconLoader,
8
8
} from "vitepress-plugin-group-icons";
9
9
10
+
const MODE = (process.env.MODE ?? "unstable") as "unstable" | "stable";
11
+
10
12
// https://vitepress.dev/reference/site-config
11
13
export default defineConfig({
12
-
title: "wire",
14
+
title: MODE === "stable" ? "wire" : "wire (unstable!)",
13
15
description: "a tool to deploy nixos systems",
14
16
themeConfig: {
15
17
search: {
16
18
provider: "local",
17
19
},
18
20
21
+
footer: {
22
+
message:
23
+
'Released under the <a href="https://github.com/forallsys/wire/blob/trunk/COPYING">AGPL-3.0 License</a>.',
24
+
copyright: "Copyright 2024-2025 wire Contributors",
25
+
},
26
+
19
27
// https://vitepress.dev/reference/default-theme-config
20
28
nav: [
21
29
{ text: "Home", link: "/" },
22
-
{ text: "Guide", link: "/guide/wire" },
30
+
{ text: "Tutorial", link: "/tutorial/overview" },
31
+
{ text: "Guides", link: "/guides/installation" },
23
32
{ text: "Reference", link: "/reference/cli" },
24
33
{
25
-
text: pkg.version,
34
+
text: MODE === "stable" ? pkg.version : `Unstable (${pkg.version})`,
26
35
items: [
36
+
MODE === "unstable"
37
+
? {
38
+
text: `View Stable`,
39
+
link: "https://wire.althaea.zone",
40
+
}
41
+
: {
42
+
text: "View Unstable",
43
+
link: "https://trunk.wire-docs.pages.dev",
44
+
},
27
45
{
28
46
text: "Changelog",
29
-
link: "https://github.com/wires-org/wire/blob/main/CHANGELOG.md",
47
+
link: "https://github.com/forallsys/wire/blob/trunk/CHANGELOG.md",
48
+
},
49
+
{
50
+
text: "CI Server",
51
+
link: "https://buildbot.althaea.zone/#/projects/1",
30
52
},
31
53
],
32
54
},
33
55
],
34
56
35
57
sidebar: {
36
-
"/guide/": [
58
+
"/": [
37
59
{
38
-
text: "Introduction",
60
+
text: "Tutorial",
61
+
collapsed: false,
39
62
items: [
40
-
{ text: "What is Wire?", link: "/guide/wire" },
41
-
{ text: "Getting Started", link: "/guide/getting-started" },
42
-
{ text: "Targeting Nodes", link: "/guide/targeting" },
63
+
{ text: "Overview", link: "/tutorial/overview" },
64
+
{
65
+
text: "Part One",
66
+
items: [
67
+
{
68
+
text: "Nix Setup",
69
+
link: "/tutorial/part-one/nix-setup",
70
+
},
71
+
{
72
+
text: "Preparing Repo & Shell",
73
+
link: "/tutorial/part-one/repo-setup",
74
+
},
75
+
{
76
+
text: "Creating a Virtual Machine",
77
+
link: "/tutorial/part-one/vm-setup",
78
+
},
79
+
{
80
+
text: "Basic Hive & Deployment",
81
+
link: "/tutorial/part-one/basic-hive",
82
+
},
83
+
],
84
+
},
85
+
{
86
+
text: "Part Two",
87
+
items: [
88
+
{
89
+
text: "Basic Deployment Keys",
90
+
link: "/tutorial/part-two/basic-keys",
91
+
},
92
+
{
93
+
text: "Encrypted Deployment Keys",
94
+
link: "/tutorial/part-two/encryption",
95
+
},
96
+
],
97
+
},
43
98
],
44
99
},
45
100
{
46
-
text: "Features",
101
+
text: "How-To Guides",
102
+
collapsed: false,
47
103
items: [
48
-
{ text: "Secret management", link: "/guide/keys" },
49
-
{ text: "Parallelism", link: "/guide/parallelism" },
50
-
{ text: "hive.default", link: "/guide/hive-default" },
51
-
{ text: "Magic Rollback", link: "/guide/magic-rollback" },
104
+
{ text: "Install wire", link: "/guides/installation" },
105
+
{
106
+
text: "Write a Hive",
107
+
link: "/guides/writing-a-hive",
108
+
},
109
+
{ text: "Migrate to wire", link: "/guides/migrate" },
110
+
{
111
+
text: "Use Flakes",
112
+
link: "/guides/flakes/overview",
113
+
items: [
114
+
{
115
+
text: "Keep Using nixos-rebuild",
116
+
link: "/guides/flakes/nixos-rebuild",
117
+
},
118
+
],
119
+
},
120
+
{ text: "Apply your Config", link: "/guides/apply" },
121
+
{ text: "Target Nodes", link: "/guides/targeting" },
122
+
{ text: "Build in CI", link: "/guides/build-in-ci" },
123
+
{
124
+
text: "Features",
125
+
items: [
126
+
{
127
+
text: "Use a non-root user",
128
+
link: "/guides/non-root-user",
129
+
},
130
+
{ text: "Manage Secrets", link: "/guides/keys" },
131
+
{ text: "Use Parallelism", link: "/guides/parallelism" },
132
+
{ text: "Use hive.default", link: "/guides/hive-default" },
133
+
],
134
+
},
52
135
],
53
136
},
54
-
{
55
-
text: "Use cases",
56
-
items: [{ text: "Tailscale", link: "/guide/tailscale" }],
57
-
},
137
+
{ text: "CLI & Module Reference", link: "/reference/cli.html" },
58
138
],
59
139
"/reference/": [
60
140
{
···
63
143
{ text: "CLI", link: "/reference/cli" },
64
144
{ text: "Meta Options", link: "/reference/meta" },
65
145
{ text: "Module Options", link: "/reference/module" },
146
+
{ text: "Error Codes", link: "/reference/errors" },
66
147
],
67
148
},
68
149
],
69
150
},
70
151
71
152
editLink: {
72
-
pattern: "https://github.com/wires-org/wire/edit/main/doc/:path",
153
+
pattern: "https://github.com/forallsys/wire/edit/trunk/doc/:path",
73
154
text: "Edit this page on GitHub",
74
155
},
75
156
76
157
socialLinks: [
77
-
{ icon: "github", link: "https://github.com/wires-org/wire" },
158
+
{ icon: "github", link: "https://github.com/forallsys/wire" },
78
159
],
79
160
},
80
161
markdown: {
81
162
config: (md) => {
82
163
md.use(markdownItFootnote);
83
-
md.use(groupIconMdPlugin);
164
+
md.use(groupIconMdPlugin, {
165
+
titleBar: { includeSnippet: true },
166
+
});
84
167
},
85
168
},
86
169
vite: {
···
90
173
nixos: "vscode-icons:file-type-nix",
91
174
"configuration.nix": "vscode-icons:file-type-nix",
92
175
"hive.nix": "vscode-icons:file-type-nix",
176
+
"flake.nix": "vscode-icons:file-type-nix",
93
177
"module.nix": "vscode-icons:file-type-nix",
178
+
"vm.nix": "vscode-icons:file-type-nix",
179
+
"shell.nix": "vscode-icons:file-type-nix",
180
+
"secrets.nix": "vscode-icons:file-type-nix",
94
181
home: localIconLoader(import.meta.url, "../assets/homemanager.svg"),
95
182
".conf": "vscode-icons:file-type-config",
96
183
},
+2
-34
doc/.vitepress/theme/index.ts
+2
-34
doc/.vitepress/theme/index.ts
···
1
1
import DefaultTheme from "vitepress/theme";
2
2
import "virtual:group-icons.css";
3
-
import giscusTalk from "vitepress-plugin-comment-with-giscus";
4
-
import { EnhanceAppContext, useData, useRoute } from "vitepress";
5
-
import { toRefs } from "vue";
3
+
import { EnhanceAppContext } from "vitepress";
4
+
import "./style.css";
6
5
7
6
export default {
8
7
...DefaultTheme,
9
8
enhanceApp(ctx: EnhanceAppContext) {
10
9
DefaultTheme.enhanceApp(ctx);
11
-
},
12
-
setup() {
13
-
const { frontmatter } = toRefs(useData());
14
-
const route = useRoute();
15
-
16
-
giscusTalk(
17
-
{
18
-
repo: "wires-org/wire",
19
-
repoId: "R_kgDOMQQbzw",
20
-
category: "giscus", // default: `General`
21
-
categoryId: "DIC_kwDOMQQbz84Co4vv",
22
-
mapping: "pathname",
23
-
inputPosition: "top",
24
-
lang: "en",
25
-
// i18n setting (Note: This configuration will override the default language set by lang)
26
-
// Configured as an object with key-value pairs inside:
27
-
// [your i18n configuration name]: [corresponds to the language pack name in Giscus]
28
-
locales: {
29
-
"en-US": "en",
30
-
},
31
-
homePageShowComment: false,
32
-
lightTheme: "light",
33
-
darkTheme: "transparent_dark",
34
-
},
35
-
{
36
-
frontmatter,
37
-
route,
38
-
},
39
-
// Default to false for all pages
40
-
false,
41
-
);
42
10
},
43
11
};
+1
-1
doc/README.md
+1
-1
doc/README.md
+10
-1
doc/default.nix
+10
-1
doc/default.nix
···
6
6
...
7
7
}:
8
8
{
9
-
packages.docs = pkgs.callPackage ./package.nix { inherit (self'.packages) wire-small; };
9
+
packages = {
10
+
docs = pkgs.callPackage ./package.nix {
11
+
mode = "stable";
12
+
inherit (self'.packages) wire-small-dev wire-diagnostics-md;
13
+
};
14
+
15
+
docs-unstable = pkgs.callPackage ./package.nix {
16
+
inherit (self'.packages) wire-small-dev wire-diagnostics-md;
17
+
};
18
+
};
10
19
};
11
20
}
-113
doc/guide/getting-started.md
-113
doc/guide/getting-started.md
···
1
-
---
2
-
comment: true
3
-
title: Getting Started
4
-
description: Getting started with Wire Tool!
5
-
---
6
-
7
-
# {{ $frontmatter.title }}
8
-
9
-
## Installation
10
-
11
-
Wire can be heavy to compile. You should enable the substituter `wires.cachix.org`.
12
-
13
-
::: code-group
14
-
15
-
<<< @/snippets/getting-started/cache.nix [module.nix]
16
-
<<< @/snippets/getting-started/nix.conf
17
-
18
-
:::
19
-
20
-
### Supported Nix & NixOS versions
21
-
22
-
Wire is currently _tested_ against `unstable`, `24.11` and `25.05`.
23
-
For each channel, it is tested against the given channel's `pkgs.lix`.
24
-
25
-
There is currently a bug when our VM tests are ran with nixcpp. Nixcpp will try
26
-
to download a file in a network sandbox, whereas Lix will not. We don't know
27
-
how to solve it. Please see (#126)[https://github.com/wires-org/wire/issues/126]
28
-
29
-
### NixOS / Home Manager
30
-
31
-
::: code-group
32
-
33
-
<<< @/snippets/getting-started/nixos.flake.nix [flake.nix (NixOS)]
34
-
<<< @/snippets/getting-started/hm.flake.nix [flake.nix (Home Manager)]
35
-
<<< @/snippets/getting-started/configuration.nix
36
-
<<< @/snippets/getting-started/home.nix
37
-
38
-
:::
39
-
40
-
## Your First Hive
41
-
42
-
Wire groups your machines into _nodes_, which are NixOS configurations with
43
-
additional information for deployment. Start by creating a `hive.nix` in the same directory as your
44
-
`configuration.nix`.
45
-
46
-
A `hive.nix` is an attribute set with NixOS configurations, each with a unique
47
-
name. Add a node for your local machine:
48
-
49
-
```nix:line-numbers [hive.nix]
50
-
{
51
-
meta.nixpkgs = import <nixpkgs> {};
52
-
53
-
my-local-machine = {
54
-
imports = [./configuration.nix];
55
-
56
-
# If you don't know, find this value by running
57
-
# `nix eval --expr 'builtins.currentSystem' --impure`
58
-
nixpkgs.hostPlatform = "x86_64-linux";
59
-
};
60
-
}
61
-
```
62
-
63
-
Now, assuming your host machine is currently `my-local-machine`, simply running
64
-
[`wire apply`](/reference/cli.html#wire-apply) will evaluate, build, and
65
-
activate your system, which would be the equivalent of `nixos-rebuild switch`.
66
-
67
-
```sh
68
-
wire apply switch -v
69
-
```
70
-
71
-
### A Remote Machine
72
-
73
-
Lets add another node to your hive! This one is an example of a remote machine.
74
-
75
-
```nix:line-numbers [hive.nix]
76
-
{
77
-
meta.nixpkgs = import <nixpkgs> {};
78
-
79
-
my-local-machine = {
80
-
imports = [./local-machine/configuration.nix];
81
-
nixpkgs.hostPlatform = "x86_64-linux";
82
-
};
83
-
84
-
my-remote-machine = {
85
-
deployment = {
86
-
# buildOnTarget defaults to `false`, enable this
87
-
# if the machine is strong enough to build itself.
88
-
buildOnTarget = true;
89
-
target = {
90
-
# Some IP or host that this node is reachable by ssh under,
91
-
# defaults to "my-remote-machine" (node name).
92
-
host = "10.1.1.2";
93
-
# A user you can non-interactively login through ssh by,
94
-
# defaults to "root".
95
-
user = "root";
96
-
};
97
-
};
98
-
imports = [./remote-machine/configuration.nix];
99
-
nixpkgs.hostPlatform = "x86_64-linux";
100
-
};
101
-
}
102
-
```
103
-
104
-
> [!TIP]
105
-
> Read more options in [the reference](/reference/module#deployment-target) to adjust options such as
106
-
> ssh port.
107
-
108
-
To deploy the node `my-remote-machine`, lets use `wire apply` again. Wire will
109
-
apply both nodes in the hive at once, one local and one remote:
110
-
111
-
```sh
112
-
wire apply switch -v
113
-
```
-53
doc/guide/hive-default.md
-53
doc/guide/hive-default.md
···
1
-
---
2
-
comment: true
3
-
title: hive.default
4
-
description: Deduplicate options with default node configuration.
5
-
---
6
-
7
-
# `{{ $frontmatter.title }}`
8
-
9
-
{{ $frontmatter.description }}
10
-
11
-
## Introduction
12
-
13
-
At the top level of a hive wire reserves the `defaults` attribute. It's applied
14
-
to every node.
15
-
16
-
::: warning
17
-
18
-
`defaults` must not rely on modules that a node imports, but a
19
-
node may rely on modules that default imports.
20
-
21
-
:::
22
-
23
-
```nix:line-numbers [hive.nix]
24
-
{
25
-
meta.nixpkgs = import <nixpkgs> {};
26
-
27
-
defaults = {
28
-
# name of the node that defaults is being applied to
29
-
name,
30
-
# attribute set of all nodes
31
-
nodes,
32
-
...
33
-
}: {
34
-
import = [
35
-
./default-module.nix
36
-
37
-
# module that is imported for all nodes
38
-
some-flake.nixosModules.default
39
-
];
40
-
41
-
# default configuration
42
-
# may or may not utilise `name` or `nodes`
43
-
};
44
-
45
-
node-a = {
46
-
# some config
47
-
};
48
-
49
-
node-b = {
50
-
# some more config
51
-
};
52
-
}
53
-
```
-184
doc/guide/keys.md
-184
doc/guide/keys.md
···
1
-
---
2
-
comment: true
3
-
title: Secret Management
4
-
description: Keys, files, and other out-of-store paths with Wire Tool.
5
-
---
6
-
7
-
# {{ $frontmatter.title }}
8
-
9
-
{{ $frontmatter.description }}
10
-
11
-
::: warning
12
-
13
-
Pushing keys to your local machine is currently unimplemented and is planned for
14
-
v2.0.0.
15
-
16
-
:::
17
-
18
-
## Introduction
19
-
20
-
Wire Tool is very unopinionated as to how you encrypt your secrets, Wire only
21
-
handles pushing and setting up permissions of your key files.
22
-
23
-
The `source` of your key can be a literal string (unencrypted), a path
24
-
(unencrypted), or a command that wire runs to evaluate the key. Programs that
25
-
work well with wire keys include:
26
-
27
-
- GPG
28
-
- [Age](https://github.com/FiloSottile/age)
29
-
- Anything that non-interactively decrypts to `stdout`.
30
-
31
-
### A Trivial "Key"
32
-
33
-
```nix:line-numbers [hive.nix]
34
-
{
35
-
meta.nixpkgs = import <nixpkgs> {};
36
-
37
-
node-1 = {
38
-
deployment.key."file.txt" = {
39
-
source = ''
40
-
Hello World!
41
-
'';
42
-
};
43
-
};
44
-
}
45
-
```
46
-
47
-
```sh
48
-
[user@node-1]$ cat /run/keys/file.txt
49
-
Hello World!
50
-
```
51
-
52
-
### Encrypting with GPG
53
-
54
-
```nix:line-numbers [hive.nix]
55
-
{
56
-
meta.nixpkgs = import <nixpkgs> {};
57
-
58
-
node-1 = {
59
-
deployment.key."file.txt" = {
60
-
source = [
61
-
"gpg"
62
-
"--decrypt"
63
-
"${./secrets/file.txt.gpg}"
64
-
];
65
-
};
66
-
};
67
-
}
68
-
```
69
-
70
-
```sh
71
-
[user@node-1]$ cat /run/keys/file.txt
72
-
Hello World!
73
-
```
74
-
75
-
### A Plain Text File
76
-
77
-
```nix:line-numbers [hive.nix]
78
-
{
79
-
meta.nixpkgs = import <nixpkgs> {};
80
-
81
-
node-1 = {
82
-
deployment.key."file.txt" = {
83
-
# using this syntax will enter the file into the store, readable by
84
-
# anyone!
85
-
source = ./file.txt;
86
-
};
87
-
};
88
-
}
89
-
```
90
-
91
-
## Persistence
92
-
93
-
Wire defaults `destDir` to `/run/keys`. `/run/` is held in memory and will not
94
-
persist past reboot. Change
95
-
[`deployment.key.<name>.destDir`](/reference/module#deployment-keys-name-destdir)
96
-
to something like `/etc/keys` if you need secrets every time the machine boots.
97
-
98
-
## Upload Order
99
-
100
-
By default Wire will upload keys before the system is activated. You can
101
-
force Wire to upload the key after the system is activated by setting
102
-
[`deployment.keys.<name>.uploadAt`](/reference/module#deployment-keys-name-uploadat)
103
-
to `post-activation`.
104
-
105
-
## Permissions and Ownership
106
-
107
-
Wire secrets are owned by user & group `root` (`0600`). You can change these
108
-
with the `user` and `group` option.
109
-
110
-
```nix:line-numbers [hive.nix]
111
-
{
112
-
meta.nixpkgs = import <nixpkgs> {};
113
-
114
-
node-1 = {
115
-
deployment.key."file.txt" = {
116
-
source = [
117
-
"gpg"
118
-
"--decrypt"
119
-
"${./secrets/file.txt.gpg}"
120
-
];
121
-
122
-
user = "my-user";
123
-
group = "my-group";
124
-
};
125
-
};
126
-
}
127
-
```
128
-
129
-
## Further Examples
130
-
131
-
### Using Keys With Services
132
-
133
-
You can access the full absolute path of any key with
134
-
`config.deployment.keys.<name>.path` (auto-generated and read-only).
135
-
Here's an example with the Tailscale service:
136
-
137
-
```nix:line-numbers [hive.nix]
138
-
{
139
-
meta.nixpkgs = import <nixpkgs> {};
140
-
141
-
node-1 = {config, ...}: {
142
-
services.tailscale = {
143
-
enable = true;
144
-
# use deployment key path directly
145
-
authKeyFile = config.deployment.keys."tailscale.key".path;
146
-
};
147
-
148
-
deployment.keys."tailscale.key" = {
149
-
keyCommand = ["gpg" "--decrypt" "${./secrets/tailscale.key.gpg}"];
150
-
};
151
-
};
152
-
}
153
-
```
154
-
155
-
### Scoping a Key to a service account
156
-
157
-
Additionally you can scope the key to the user that the service runs under, to
158
-
further reduce duplication using the `config` argument. Here's an example of
159
-
providing a certificate that is only readable by the caddy service.
160
-
161
-
```nix:line-numbers [hive.nix]
162
-
{
163
-
meta.nixpkgs = import <nixpkgs> {};
164
-
165
-
some-web-server = {config, ...}: {
166
-
deployment.keys."some.host.pem" = {
167
-
keyCommand = ["gpg" "--decrypt" "${./some.host.pem.gpg}"];
168
-
destDir = "/etc/keys";
169
-
170
-
# inherit the user and group that caddy runs under
171
-
# the key will only readable by the caddy service
172
-
inherit (config.services.caddy) user group;
173
-
};
174
-
175
-
# ^^ repeat for `some.host.key`
176
-
177
-
services.caddy = {
178
-
virtualHosts."https://some.host".extraConfig = ''
179
-
tls ${config.deployment.keys."some.host.pem".path} ${config.deployment.keys."some.host.key".path}
180
-
'';
181
-
};
182
-
};
183
-
}
184
-
```
-32
doc/guide/parallelism.md
-32
doc/guide/parallelism.md
···
1
-
---
2
-
comment: true
3
-
title: Parallelism
4
-
description: A deeper dive into parallelism with Wire Tool.
5
-
---
6
-
7
-
# {{ $frontmatter.title }}
8
-
9
-
{{ $frontmatter.description }}
10
-
11
-
## Controlling CPU Usage
12
-
13
-
Wire evaluates, builds, pushes, and deploys each node completely independently
14
-
from each other. Internally Wire calls this process a "node execution".
15
-
16
-
The default number of parallel _node executions_ is `10`, which can be
17
-
controlled with the `-p` / `--parallel` argument.
18
-
19
-
```sh
20
-
wire apply -p <NUMBER>
21
-
```
22
-
23
-
## Interaction with Nix's `max-jobs`
24
-
25
-
Nix has an overall derivation build limit and core limit.
26
-
If executing a node fills Nix's `max-jobs` all other nodes will bottleneck. You
27
-
should read [the relevant
28
-
documentation](https://nix.dev/manual/nix/2.28/advanced-topics/cores-vs-jobs) to fine tune these settings.
29
-
30
-
When a Node is built remotely due to
31
-
[`deployment.buildOnTarget`](/reference/module.html#deployment-buildontarget)
32
-
that node will not push up the _local machine's_ max-jobs limit.
-1
doc/guide/tailscale.md
-1
doc/guide/tailscale.md
···
1
-
# Wire & Tailscale
-76
doc/guide/targeting.md
-76
doc/guide/targeting.md
···
1
-
---
2
-
comment: true
3
-
title: Targeting Nodes
4
-
description: Tags, nodes, and how to target them with Wire Tool.
5
-
---
6
-
7
-
# {{ $frontmatter.title }}
8
-
9
-
{{ $frontmatter.description }}
10
-
11
-
## The Basics
12
-
13
-
Nodes can have _tags_, which allows you to easily target multiple, related
14
-
nodes for deployment.
15
-
16
-
```nix:line-numbers [hive.nix]
17
-
{
18
-
meta.nixpkgs = import <nixpkgs> {};
19
-
20
-
node-1 = {
21
-
# ...
22
-
deployment.tags = ["cloud"];
23
-
};
24
-
node-2 = {
25
-
# ...
26
-
deployment.tags = ["cloud", "virtual"];
27
-
};
28
-
node-3 = {
29
-
# ...
30
-
deployment.tags = ["on-prem"];
31
-
};
32
-
node-4 = {
33
-
# ...
34
-
deployment.tags = ["virtual"];
35
-
};
36
-
node-5 = {
37
-
# Untagged
38
-
};
39
-
}
40
-
```
41
-
42
-
To target all nodes with a specific tag, prefix tags with an `@`.
43
-
For example, to deploy only nodes with the `cloud` tag, use
44
-
45
-
```sh
46
-
wire apply --on @cloud
47
-
```
48
-
49
-
## Further Examples
50
-
51
-
::: info
52
-
53
-
Other operations such as an `--ignore` argument are unimplemented as of wire `v0.2.0`.
54
-
55
-
:::
56
-
57
-
### Mixing Tags with Node Names
58
-
59
-
`--on` without an `@` prefix interprets as a literal node name. You can mix tags
60
-
and node names with `--on`:
61
-
62
-
```sh
63
-
wire apply --on @cloud node-5
64
-
```
65
-
66
-
This will deploy all nodes in `@cloud`, alongside the node `node-5`.
67
-
68
-
### Targeting Many Tags (Union)
69
-
70
-
You can specify many tags together:
71
-
72
-
```sh
73
-
wire apply --on @cloud @on-prem
74
-
```
75
-
76
-
This is a union between `@cloud` and `@on-prem`.
-49
doc/guide/wire.md
-49
doc/guide/wire.md
···
1
-
---
2
-
comment: true
3
-
---
4
-
5
-
# What is Wire?
6
-
7
-
<p style="display: flex; gap: 8px">
8
-
<a href="https://github.com/wires-org/wire/actions/workflows/test.yml?query=branch%3Amain">
9
-
<img alt="Test Suite Status" src="https://img.shields.io/github/actions/workflow/status/wires-org/wire/test.yml?branch=main&style=flat-square&label=Test%20Suite">
10
-
</a>
11
-
12
-
<a href="https://github.com/wires-org/wire/actions/workflows/build.yml?query=branch%3Amain">
13
-
<img alt="Build Status" src="https://img.shields.io/github/actions/workflow/status/wires-org/wire/build.yml?branch=main&style=flat-square&label=Builds">
14
-
</a>
15
-
16
-
<a href="https://github.com/wires-org/wire/actions/workflows/pages.yml?query=branch%3Amain">
17
-
<img alt="Documentation Status" src="https://img.shields.io/github/actions/workflow/status/wires-org/wire/pages.yml?branch=main&style=flat-square&label=Documentation">
18
-
</a>
19
-
</p>
20
-
21
-
Wire is a tool to deploy NixOS systems. Its configuration is a superset[^1] of [colmena](https://colmena.cli.rs/), however it is **not** a fork.
22
-
23
-
[^1]: Any colmena configuration will continue to work with wire, but wire has additional ergonomic changes you can take advantage of.
24
-
25
-
::: warning
26
-
Wire is alpha software, please use at your own risk. Many features listed in this documentation may not be complete / implemented.
27
-
:::
28
-
29
-
<div class="tip custom-block" style="padding-top: 8px">
30
-
31
-
Ready? Skip to the [Quickstart](./getting-started).
32
-
33
-
</div>
34
-
35
-
## Why Wire?
36
-
37
-
::: info
38
-
The following is the goal for a stable release and not fully implemented.
39
-
:::
40
-
41
-
| Features | Wire | Colmena |
42
-
| --------------------- | ---------------------------- | ---------------------------------------------------------------------------------------------------------- |
43
-
| Secret Management | :white_check_mark: | :white_check_mark: |
44
-
| Parallel Evaluation | :white_check_mark: | [Experimental](https://colmena.cli.rs/unstable/features/parallelism.html#parallel-evaluation-experimental) |
45
-
| Node Tagging | :white_check_mark: | :white_check_mark: |
46
-
| `jq` pipeline support | :white_check_mark: | :x:[^2] |
47
-
| Magic Rollback | :white_check_mark: (Planned) | :x: |
48
-
49
-
[^2]: You need to write custom nix code to use Colmena hive metadata inside environments like CI pipelines, bash scripting, etc., which requires a knowledge of its internals.
+83
doc/guides/apply.md
+83
doc/guides/apply.md
···
1
+
---
2
+
comment: true
3
+
title: Apply your Config
4
+
description: How to apply a node with wire.
5
+
---
6
+
7
+
# Apply your Config
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## What does it mean to 'apply'?
12
+
13
+
Once you have created a hive, you can now "apply" your
14
+
configuration to nodes in your hive. Simply, "applying" is the term used by wire to describe **deploying the
15
+
config**.
16
+
17
+
::: info
18
+
Applying a node typically involves pushing keys,
19
+
evaluating the node's NixOS system, building the node's NixOS system, and running
20
+
`switch-to-configuration`, depending on which specific goal is used.
21
+
:::
22
+
23
+
The simplest way to apply is simply running:
24
+
25
+
```sh
26
+
$ wire apply switch
27
+
```
28
+
29
+
Which will `switch` to each node's NixOS system in your hive and push
30
+
secrets (the equivalent to `nixos-rebuild`'s `nixos-rebuild switch`).
31
+
32
+
## Apply goals
33
+
34
+
`wire apply` accepts a goal, which include verbs which will be familiar to
35
+
`nixos-rebuild` users such as `switch`, `boot`, and `test`, alongside additional verbs
36
+
like `keys` and `push`.
37
+
38
+
### `wire apply keys`
39
+
40
+
wire will push all deployment keys to nodes, and do nothing else. While running
41
+
this goal, option
42
+
[`deployment.keys.<name>.uploadAt`](/reference/module#deployment-keys-name-uploadat)
43
+
has no effect and all keys will be pushed. Read [the secret management guide](./keys)
44
+
to learn more about wire deployment keys.
45
+
46
+
### `wire apply push`
47
+
48
+
wire will "push" (equivalent to [`nix
49
+
copy`](https://nix.dev/manual/nix/2.18/command-ref/new-cli/nix3-copy)) the
50
+
`.drv` file that can produce the node's NixOS system when built.
51
+
52
+
### `wire apply build`
53
+
54
+
Sister to `wire apply push`, wire will build the
55
+
node's NixOS system and ensure the output path exists on the node. Depending on
56
+
[`deployment.buildOnTarget`](/reference/module#deployment-buildontarget), the
57
+
`.drv` file may be built on the machine invoking wire or the node itself.
58
+
59
+
### `wire apply [switch|boot|test|dry-activate]`
60
+
61
+
Type `wire apply --help` or
62
+
[read the reference](../reference/cli#wire-apply) to read more.
63
+
64
+
## Applying locally
65
+
66
+
If `deployment.allowLocalDeployment` is `true`, and the machine invoking wire's
67
+
host name is equivalent to a node's name, wire will apply that node to the local
68
+
machine. Goals like `push` and `build`, wont actually "push" anything as
69
+
the paths already exists on the local machine.
70
+
71
+
When applying to your local machine, wire can interactively run `sudo`!
72
+
wire will prompt for your password, meaning wire can be ran as any user in
73
+
the `wheel` group.
74
+
75
+
## Applying specific nodes
76
+
77
+
Use the `--on` argument to specify which nodes in your hive to apply:
78
+
79
+
```sh
80
+
$ wire apply --on node-a
81
+
```
82
+
83
+
Further examples, including how you can utilise tags, can be found on the [Targeting Nodes](./targeting) page.
+36
doc/guides/build-in-ci.md
+36
doc/guides/build-in-ci.md
···
1
+
---
2
+
comment: true
3
+
title: Build in CI
4
+
---
5
+
6
+
# Build in CI
7
+
8
+
## The `wire build` command <Badge type="tip" text="^1.1.0" />
9
+
10
+
`wire build` builds nodes locally. It is distinct from
11
+
`wire apply build`, as it will not ping or push the result,
12
+
making it useful for CI.
13
+
14
+
It accepts the same `--on` argument as `wire apply` does.
15
+
16
+
## Partitioning builds
17
+
18
+
`wire build` accepts a `--partition` option inspired by
19
+
[cargo-nextest](https://nexte.st/docs/ci-features/partitioning/), which splits
20
+
selected nodes into buckets to be built separately.
21
+
22
+
It accepts values in the format `--partition current/total`, where 1 โค current โค total.
23
+
24
+
For example, these two commands will build the entire hive in two invocations:
25
+
26
+
```sh
27
+
wire build --partition 1/2
28
+
29
+
# later or synchronously:
30
+
31
+
wire build --partition 2/2
32
+
```
33
+
34
+
## Example: Build in Github Actions
35
+
36
+
<<< @/snippets/guides/example-action.yml [.github/workflows/build.yml]
+42
doc/guides/flakes/nixos-rebuild.md
+42
doc/guides/flakes/nixos-rebuild.md
···
1
+
---
2
+
comment: true
3
+
title: Keep Using nixos-rebuild
4
+
description: How to combine outputs.nixosConfigurations with outputs.wire
5
+
---
6
+
7
+
# Keep Using nixos-rebuild
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## An Example
12
+
13
+
You can provide `makeHive` with your `nixosConfigurations` with the `inherit`
14
+
nix keyword. `makeHive` will merge any nodes and nixosConfigurations that share
15
+
the same name together.
16
+
17
+
::: tip
18
+
You should include the wire module, which will provide the `deployment` options, even if nixos-rebuild can't directly use them.
19
+
:::
20
+
21
+
::: code-group
22
+
<<< @/snippets/getting-started/flake-merged.nix [flake.nix]
23
+
:::
24
+
25
+
Now, if we run `wire show`, you will see that wire only finds
26
+
the `nixosConfigurations`-es that also match a node in the hive.
27
+
`some-other-host` is not included in the hive unless specified in `makeHive`.
28
+
29
+
```
30
+
$ wire show
31
+
Node node-a (x86_64-linux):
32
+
33
+
> Connection: {root@node-a:22}
34
+
> Build remotely `deployment.buildOnTarget`: false
35
+
> Local apply allowed `deployment.allowLocalDeployment`: true
36
+
37
+
Summary: 1 total node(s), totalling 0 keys (0 distinct).
38
+
Note: Listed connections are tried from Left to Right
39
+
40
+
```
41
+
42
+
This way, you can continue using `nixos-rebuild` and wire at the same time.
+40
doc/guides/flakes/overview.md
+40
doc/guides/flakes/overview.md
···
1
+
---
2
+
comment: true
3
+
title: Use Flakes
4
+
description: How to output a hive from a flake.
5
+
---
6
+
7
+
# Use Flakes
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Output a hive
12
+
13
+
::: tip
14
+
If you have skipped ahead, please read the previous page to understand the
15
+
concept of a hive.
16
+
:::
17
+
18
+
You can use wire with a flake by outputting a hive with the `wire` flake output.
19
+
Just like when using a `hive.nix`, you must provide `meta.nixpkgs` which will
20
+
come from an input.
21
+
22
+
::: code-group
23
+
<<< @/snippets/getting-started/flake.nix [flake.nix]
24
+
:::
25
+
26
+
```
27
+
$ nix flake show
28
+
git+file:///some/path
29
+
โโโโwire: unknown
30
+
31
+
$ wire show
32
+
Node node-a (x86_64-linux):
33
+
34
+
> Connection: {root@node-a:22}
35
+
> Build remotely `deployment.buildOnTarget`: false
36
+
> Local apply allowed `deployment.allowLocalDeployment`: true
37
+
38
+
Summary: 1 total node(s), totalling 0 keys (0 distinct).
39
+
Note: Listed connections are tried from Left to Right
40
+
```
+56
doc/guides/hive-default.md
+56
doc/guides/hive-default.md
···
1
+
---
2
+
comment: true
3
+
title: Use hive.default
4
+
description: Deduplicate options with default node configuration.
5
+
---
6
+
7
+
# `Use hive.default`
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Introduction
12
+
13
+
At the top level of a hive wire reserves the `defaults` attribute. It's applied
14
+
to every node.
15
+
16
+
::: warning
17
+
18
+
`defaults` must not rely on modules that a node imports, but a
19
+
node may rely on modules that default imports.
20
+
21
+
:::
22
+
23
+
```nix:line-numbers [hive.nix]
24
+
let
25
+
sources = import ./npins;
26
+
wire = import sources.wire;
27
+
in wire.makeHive {
28
+
meta.nixpkgs = import sources.nixpkgs { };
29
+
30
+
defaults = {
31
+
# name of the node that defaults is being applied to
32
+
name,
33
+
# attribute set of all nodes
34
+
nodes,
35
+
...
36
+
}: {
37
+
import = [
38
+
./default-module.nix
39
+
40
+
# module that is imported for all nodes
41
+
some-flake.nixosModules.default
42
+
];
43
+
44
+
# default configuration
45
+
# may or may not utilise `name` or `nodes`
46
+
};
47
+
48
+
node-a = {
49
+
# some config
50
+
};
51
+
52
+
node-b = {
53
+
# some more config
54
+
};
55
+
}
56
+
```
+60
doc/guides/installation.md
+60
doc/guides/installation.md
···
1
+
---
2
+
comment: true
3
+
title: Install wire
4
+
description: How to install wire tool.
5
+
---
6
+
7
+
# Install wire
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
::: info
12
+
13
+
The `wire` binary and the `wire.makeHive` function are tightly coupled, so it is
14
+
recommended that you use the same version for both.
15
+
16
+
:::
17
+
18
+
It is recommended you stick to either using a tagged version of wire, or the `stable` branch which tracks the latest stable tag.
19
+
20
+
## Binary Cache
21
+
22
+
You should enable the [garnix binary cache](https://garnix.io/docs/caching) _before_
23
+
continuing otherwise you will be compiling from source:
24
+
25
+
::: code-group
26
+
<<< @/snippets/tutorial/cache.conf [nix.conf]
27
+
<<< @/snippets/tutorial/cache.nix [configuration.nix]
28
+
:::
29
+
30
+
## Installation through flakes
31
+
32
+
When using flakes, you should install wire through the same input you create
33
+
your hive from, sourced from the `stable` branch.
34
+
35
+
::: code-group
36
+
<<< @/snippets/guides/installation/flake.nix [flake.nix]
37
+
:::
38
+
39
+
## Installation through npins
40
+
41
+
With npins you may allow it to use release tags instead of the `stable`
42
+
branch.
43
+
44
+
Using npins specifically is not required, you can pin your sources in any way
45
+
you'd like, really.
46
+
47
+
```sh
48
+
$ npins add github forallsys wire --branch stable
49
+
```
50
+
51
+
Alternatively, you can use a tag instead:
52
+
53
+
```sh
54
+
$ npins add github forallsys wire --at v1.1.1
55
+
```
56
+
57
+
Then, use this pinned version of wire for both your `hive.nix` and `shell.nix`:
58
+
59
+
<<< @/snippets/guides/installation/shell.nix{8} [shell.nix]
60
+
<<< @/snippets/guides/installation/hive.nix [hive.nix]
+259
doc/guides/keys.md
+259
doc/guides/keys.md
···
1
+
---
2
+
comment: true
3
+
title: Manage Secrets
4
+
description: Manage keys, secrets, files, and other out-of-store paths with wire Tool.
5
+
---
6
+
7
+
# Manage Secrets
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Introduction
12
+
13
+
wire Tool is very unopinionated as to how you encrypt your secrets, wire only
14
+
handles pushing and setting up permissions of your key files.
15
+
16
+
The `source` of your key can be a literal string (unencrypted), a path
17
+
(unencrypted), or a command that wire runs to evaluate the key. Programs that
18
+
work well with wire keys include:
19
+
20
+
- GPG
21
+
- [Age](https://github.com/FiloSottile/age)
22
+
- Anything that non-interactively decrypts to `stdout`.
23
+
24
+
### Prerequisites
25
+
26
+
wire uses a Rust binary to receive encrypted key data, so your deploying
27
+
user must be trusted or you must add garnix as a trusted public key:
28
+
29
+
```nix
30
+
{ config, ... }:
31
+
{
32
+
nix.settings.trusted-users = [
33
+
config.deployment.target.user # [!code ++]
34
+
];
35
+
}
36
+
```
37
+
38
+
Otherwise, you may see errors such as:
39
+
40
+
```
41
+
error: cannot add path '/nix/store/...-wire-tool-key_agent-x86_64-linux-...' because it lacks a signature by a trusted key
42
+
```
43
+
44
+
This is a requirement because `nix copy` is used to copy the binary.
45
+
As a benefit to this approach, key deployments are significantly faster!
46
+
47
+
### A Trivial "Key"
48
+
49
+
```nix:line-numbers [hive.nix]
50
+
let
51
+
sources = import ./npins;
52
+
wire = import sources.wire;
53
+
in wire.makeHive {
54
+
meta.nixpkgs = import sources.nixpkgs { };
55
+
56
+
node-1 = {
57
+
deployment.key."file.txt" = {
58
+
source = ''
59
+
Hello World!
60
+
'';
61
+
};
62
+
};
63
+
}
64
+
```
65
+
66
+
```sh
67
+
[user@node-1]$ cat /run/keys/file.txt
68
+
Hello World!
69
+
```
70
+
71
+
### Encrypting with GPG
72
+
73
+
```nix:line-numbers [hive.nix]
74
+
let
75
+
sources = import ./npins;
76
+
wire = import sources.wire;
77
+
in wire.makeHive {
78
+
meta.nixpkgs = import sources.nixpkgs { };
79
+
80
+
node-1 = {
81
+
deployment.key."file.txt" = {
82
+
source = [
83
+
"gpg"
84
+
"--decrypt"
85
+
"${./secrets/file.txt.gpg}"
86
+
];
87
+
};
88
+
};
89
+
}
90
+
```
91
+
92
+
```sh
93
+
[user@node-1]$ cat /run/keys/file.txt
94
+
Hello World!
95
+
```
96
+
97
+
### Encrypting with KeepassXC
98
+
99
+
A simple example of extracting a KeepassXC attachment into a wire key.
100
+
You must pass the password through stdin as the command must be non-interactive.
101
+
Note that the `--stdout` is important as wire expects the command to output the key to stdout.
102
+
103
+
```nix:line-numbers [hive.nix]
104
+
let
105
+
sources = import ./npins;
106
+
wire = import sources.wire;
107
+
in wire.makeHive {
108
+
meta.nixpkgs = import sources.nixpkgs { };
109
+
110
+
node-1 = {
111
+
deployment.key."file.txt" = {
112
+
source = [
113
+
"bash"
114
+
"-c"
115
+
''cat ~/pass | keepassxc-cli attachment-export --stdout ~/.local/share/keepass/database.kdbx test 'file.txt'''
116
+
];
117
+
};
118
+
};
119
+
}
120
+
```
121
+
122
+
```sh
123
+
[user@node-1]$ cat /run/keys/file.txt
124
+
Hello World!
125
+
```
126
+
127
+
### A Plain Text File
128
+
129
+
```nix:line-numbers [hive.nix]
130
+
let
131
+
sources = import ./npins;
132
+
wire = import sources.wire;
133
+
in wire.makeHive {
134
+
meta.nixpkgs = import sources.nixpkgs { };
135
+
136
+
node-1 = {
137
+
deployment.key."file.txt" = {
138
+
# using this syntax will enter the file into the store, readable by
139
+
# anyone!
140
+
source = ./file.txt;
141
+
};
142
+
};
143
+
}
144
+
```
145
+
146
+
## Persistence
147
+
148
+
wire defaults `destDir` to `/run/keys`. `/run/` is held in memory and will not
149
+
persist past reboot. Change
150
+
[`deployment.key.<name>.destDir`](/reference/module#deployment-keys-name-destdir)
151
+
to something like `/etc/keys` if you need secrets every time the machine boots.
152
+
153
+
## Upload Order
154
+
155
+
By default wire will upload keys before the system is activated. You can
156
+
force wire to upload the key after the system is activated by setting
157
+
[`deployment.keys.<name>.uploadAt`](/reference/module#deployment-keys-name-uploadat)
158
+
to `post-activation`.
159
+
160
+
## Permissions and Ownership
161
+
162
+
wire secrets are owned by user & group `root` (`0600`). You can change these
163
+
with the `user` and `group` option.
164
+
165
+
```nix:line-numbers [hive.nix]
166
+
let
167
+
sources = import ./npins;
168
+
wire = import sources.wire;
169
+
in wire.makeHive {
170
+
meta.nixpkgs = import sources.nixpkgs { };
171
+
172
+
node-1 = {
173
+
deployment.key."file.txt" = {
174
+
source = [
175
+
"gpg"
176
+
"--decrypt"
177
+
"${./secrets/file.txt.gpg}"
178
+
];
179
+
180
+
user = "my-user";
181
+
group = "my-group";
182
+
};
183
+
};
184
+
}
185
+
```
186
+
187
+
## Further Examples
188
+
189
+
### Using Keys With Services
190
+
191
+
You can access the full absolute path of any key with
192
+
`config.deployment.keys.<name>.path` (auto-generated and read-only).
193
+
194
+
Keys also have a `config.deployment.keys.<name>.service` property
195
+
(auto-generated and read-only), which represent systemd services that you can
196
+
`require`, telling systemd there is a hard-dependency on that key for the
197
+
service to run.
198
+
199
+
Here's an example with the Tailscale service:
200
+
201
+
```nix:line-numbers [hive.nix]
202
+
let
203
+
sources = import ./npins;
204
+
wire = import sources.wire;
205
+
in wire.makeHive {
206
+
meta.nixpkgs = import sources.nixpkgs { };
207
+
208
+
node-1 = {config, ...}: {
209
+
services.tailscale = {
210
+
enable = true;
211
+
# use deployment key path directly
212
+
authKeyFile = config.deployment.keys."tailscale.key".path;
213
+
};
214
+
215
+
deployment.keys."tailscale.key" = {
216
+
keyCommand = ["gpg" "--decrypt" "${./secrets/tailscale.key.gpg}"];
217
+
};
218
+
219
+
# The service will not start unless the key exists.
220
+
systemd.services.tailscaled-autoconnect.requires = [
221
+
config.deployment.keys."tailscale.key".service
222
+
];
223
+
};
224
+
}
225
+
```
226
+
227
+
### Scoping a Key to a service account
228
+
229
+
Additionally you can scope the key to the user that the service runs under, to
230
+
further reduce duplication using the `config` argument. Here's an example of
231
+
providing a certificate that is only readable by the caddy service.
232
+
233
+
```nix:line-numbers [hive.nix]
234
+
let
235
+
sources = import ./npins;
236
+
wire = import sources.wire;
237
+
in wire.makeHive {
238
+
meta.nixpkgs = import sources.nixpkgs { };
239
+
240
+
some-web-server = {config, ...}: {
241
+
deployment.keys."some.host.pem" = {
242
+
keyCommand = ["gpg" "--decrypt" "${./some.host.pem.gpg}"];
243
+
destDir = "/etc/keys";
244
+
245
+
# inherit the user and group that caddy runs under
246
+
# the key will only readable by the caddy service
247
+
inherit (config.services.caddy) user group;
248
+
};
249
+
250
+
# ^^ repeat for `some.host.key`
251
+
252
+
services.caddy = {
253
+
virtualHosts."https://some.host".extraConfig = ''
254
+
tls ${config.deployment.keys."some.host.pem".path} ${config.deployment.keys."some.host.key".path}
255
+
'';
256
+
};
257
+
};
258
+
}
259
+
```
+88
doc/guides/migrate.md
+88
doc/guides/migrate.md
···
1
+
---
2
+
comment: true
3
+
title: Migrate to wire
4
+
description: How-to migrate from other tools to wire tool.
5
+
---
6
+
7
+
# Migrate to wire
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
Migrate from...
12
+
13
+
- [Colmena](#from-colmena)
14
+
- [`nixos-rebuild`](#from-nixos-rebuild)
15
+
16
+
## From Colmena
17
+
18
+
If you're familiar with colmena, wire will hopefully come quickly to you! (or,
19
+
atleast that was the intention when writing it!). There are a few changes you
20
+
should know:
21
+
22
+
- [You don't have to use a root user](/guides/non-root-user.html)
23
+
- `apply-local` does not exist, `apply` will apply locally when appropriate
24
+
- [Many options have been aliased to nicer names](/reference/module.html)
25
+
(ie, `deployment.targetUser` <=> `deployment.target.user`)
26
+
- You may pass a list of hosts to `deployment.targetHost` (no more fiddling with
27
+
your hive whenever DNS is down, for example)
28
+
- `--path` optionally takes a flakeref! You can pass `--path github:foo/bar`,
29
+
`--path git+file:///...`, `--path https://.../main.tar.gz`, etc.
30
+
(plain paths like `--path ~/my-hive` still work as always)
31
+
32
+
::: tip
33
+
You should also follow [installation](/guides/installation) to install the
34
+
binary.
35
+
:::
36
+
37
+
### Convert a Hive as a Flake
38
+
39
+
```nix [flake.nix]
40
+
{
41
+
inputs = {
42
+
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
43
+
colmena.url = "github:zhaofengli/colmena"; # [!code --]
44
+
wire.url = "github:forallsys/wire/stable"; # [!code ++]
45
+
};
46
+
outputs =
47
+
{ nixpkgs, colmena, ... }:
48
+
{
49
+
colmenaHive = colmena.lib.makeHive { # [!code --]
50
+
wire = wire.lib.makeHive { # [!code ++]
51
+
# ..
52
+
};
53
+
};
54
+
}
55
+
```
56
+
57
+
### Convert a Hive with npins
58
+
59
+
::: tip
60
+
You should also follow [installation](/guides/installation) to setup
61
+
npins and install the binary.
62
+
:::
63
+
64
+
Unlike colmena, you must call `makeHive` directly even in non-flake hives.
65
+
66
+
```nix [hive.nix]
67
+
let
68
+
sources = import ./npins;
69
+
wire = import sources.wire;
70
+
in
71
+
{ # [!code --]
72
+
wire.makeHive { # [!code ++]
73
+
74
+
meta.nixpkgs = <nixpkgs>; # [!code --]
75
+
meta.nixpkgs = import sources.nixpkgs { }; # [!code ++]
76
+
77
+
# ...
78
+
}
79
+
```
80
+
81
+
Replacing `<nixpkgs>` with a pinned source is optional, but you should
82
+
probably use one if you ask me \:)
83
+
84
+
## From `nixos-rebuild`
85
+
86
+
You can keep using `nixos-rebuild` alongside wire!
87
+
88
+
Follow the instructions in [the relevant page](/guides/flakes/nixos-rebuild.html).
+78
doc/guides/non-root-user.md
+78
doc/guides/non-root-user.md
···
1
+
---
2
+
comment: true
3
+
title: Use a non-root user
4
+
description: Deploy without root permissions with wire.
5
+
---
6
+
7
+
# Use a non-root user
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Deploying User Requirements
12
+
13
+
For deployment commands to succeed, the user defined in `deployment.target.user` must meet the following criteria:
14
+
15
+
1. Essential Config
16
+
17
+
- **Sudo Access**: The user must be `wheel` (A sudo user)
18
+
- **SSH Key Authentication**: The user must be authenticated through SSH keys,
19
+
and password-based SSH auth is not supported.
20
+
21
+
**Why?** Wire can prompt you for your `sudo` password, but not your `ssh` password.
22
+
23
+
2. Deploying with Secrets
24
+
25
+
- **Trusted User**: The user must be listed in the `trusted-users` nix config.
26
+
27
+
If the user is not trusted, wire will fail in the key deployment stage.
28
+
29
+
For setting up a trusted user, see [Manage Secrets - Prerequisites](/guides/keys.html#prerequisites).
30
+
31
+
## Changing the user
32
+
33
+
By default, the target is set to root:
34
+
35
+
```nix
36
+
{
37
+
deployment.target.user = "root";
38
+
}
39
+
```
40
+
41
+
But it can be any user you want so long as it fits the requirements above.
42
+
43
+
```nix
44
+
{
45
+
deployment.target.user = "root"; # [!code --]
46
+
deployment.target.user = "deploy-user"; # [!code ++]
47
+
}
48
+
```
49
+
50
+
After this change, wire will prompt you for sudo authentication, and tell you
51
+
the exact command wire wants privileged:
52
+
53
+
```sh{6}
54
+
$ wire apply keys --on media
55
+
INFO eval_hive: evaluating hive Flake("/path/to/hive")
56
+
...
57
+
INFO media | step="Upload key @ NoFilter" progress="3/4"
58
+
deploy-user@node:22 | Authenticate for "sudo /nix/store/.../bin/key_agent":
59
+
[sudo] password for deploy-user:
60
+
```
61
+
62
+
## Using alternative privilege escalation
63
+
64
+
You may change the privilege escalation command with the
65
+
[deployment.privilegeEscalationCommand](/reference/module.html#deployment-privilegeescalationcommand)
66
+
option.
67
+
68
+
For example, doas:
69
+
70
+
```nix
71
+
{
72
+
deployment.privilegeEscalationCommand = [
73
+
"sudo" # [!code --]
74
+
"--" # [!code --]
75
+
"doas" # [!code ++]
76
+
];
77
+
}
78
+
```
+32
doc/guides/parallelism.md
+32
doc/guides/parallelism.md
···
1
+
---
2
+
comment: true
3
+
title: Use Parallelism
4
+
description: How to use parallelism with wire Tool.
5
+
---
6
+
7
+
# Use Parallelism
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Controlling CPU Usage
12
+
13
+
wire evaluates, builds, pushes, and deploys each node completely independently
14
+
from each other. Internally wire calls this process a "node execution".
15
+
16
+
The default number of parallel _node executions_ is `10`, which can be
17
+
controlled with the `-p` / `--parallel` argument.
18
+
19
+
```sh
20
+
$ wire apply -p <NUMBER>
21
+
```
22
+
23
+
## Interaction with Nix's `max-jobs`
24
+
25
+
Nix has an overall derivation build limit and core limit.
26
+
If executing a node fills Nix's `max-jobs` all other nodes will bottleneck. You
27
+
should read [the relevant
28
+
documentation](https://nix.dev/manual/nix/2.28/advanced-topics/cores-vs-jobs) to fine tune these settings.
29
+
30
+
When a Node is built remotely due to
31
+
[`deployment.buildOnTarget`](/reference/module.html#deployment-buildontarget)
32
+
that node will not push up the _local machine's_ max-jobs limit.
+102
doc/guides/targeting.md
+102
doc/guides/targeting.md
···
1
+
---
2
+
comment: true
3
+
title: Target Nodes
4
+
description: Tags, nodes, and how to target them with wire Tool.
5
+
---
6
+
7
+
# Target Nodes
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Targeting Specific Nodes
12
+
13
+
`wire apply --on` without an `@` prefix interprets as a literal node name. For
14
+
example:
15
+
16
+
```sh
17
+
$ wire apply switch --on node-a,node-b
18
+
```
19
+
20
+
Will switch-to-configuration on node a, and node b.
21
+
22
+
## Reading from Stdin
23
+
24
+
Passing `--on -` will read whitespace-separated nodes and tags from stdin. This
25
+
can be combined with normal `--on` usage.
26
+
27
+
For example:
28
+
29
+
```sh
30
+
$ echo "node-a node-b" | wire apply --on @other --on -
31
+
```
32
+
33
+
Will apply on `node-a`, `node-b`, and all nodes with the tag `@other`.
34
+
35
+
## Tag Basics
36
+
37
+
Nodes can have _tags_, which allows you to easily target multiple, related
38
+
nodes for deployment.
39
+
40
+
```nix:line-numbers{9,13,17,21} [hive.nix]
41
+
let
42
+
sources = import ./npins;
43
+
wire = import sources.wire;
44
+
in wire.makeHive {
45
+
meta.nixpkgs = import sources.nixpkgs { };
46
+
47
+
node-1 = {
48
+
# ...
49
+
deployment.tags = ["cloud"];
50
+
};
51
+
node-2 = {
52
+
# ...
53
+
deployment.tags = ["cloud", "virtual"];
54
+
};
55
+
node-3 = {
56
+
# ...
57
+
deployment.tags = ["on-prem"];
58
+
};
59
+
node-4 = {
60
+
# ...
61
+
deployment.tags = ["virtual"];
62
+
};
63
+
node-5 = {
64
+
# Untagged
65
+
};
66
+
}
67
+
```
68
+
69
+
To target all nodes with a specific tag, prefix tags with an `@`.
70
+
For example, to deploy only nodes with the `cloud` tag, use
71
+
72
+
```sh
73
+
$ wire apply --on @cloud
74
+
```
75
+
76
+
## Further Examples
77
+
78
+
::: info
79
+
80
+
Other operations such as an `--ignore` argument are unimplemented as of wire `v0.2.0`.
81
+
82
+
:::
83
+
84
+
### Mixing Tags with Node Names
85
+
86
+
You can mix tags and node names with `--on`:
87
+
88
+
```sh
89
+
$ wire apply --on @cloud --on node-5
90
+
```
91
+
92
+
This will deploy all nodes in `@cloud`, alongside the node `node-5`.
93
+
94
+
### Targeting Many Tags (Union)
95
+
96
+
You can specify many tags together:
97
+
98
+
```sh
99
+
$ wire apply --on @cloud @on-prem
100
+
```
101
+
102
+
This is a union between `@cloud` and `@on-prem`.
+116
doc/guides/writing-a-hive.md
+116
doc/guides/writing-a-hive.md
···
1
+
---
2
+
comment: true
3
+
title: Write a Hive
4
+
---
5
+
6
+
# Write a Hive
7
+
8
+
## Anatomy of a Hive
9
+
10
+
A "Hive" is the attribute set that you pass to `wire.makeHive`. It has the
11
+
following layout:
12
+
13
+
```nix
14
+
wire.makeHive {
15
+
# `meta`
16
+
# type: attrset
17
+
meta = {
18
+
# `meta.nixpkgs` tells wire how to get nixpkgs.
19
+
# type: "A path or an instance of nixpkgs."
20
+
nixpkgs = <nixpkgs>;
21
+
22
+
# `meta.specialArgs` are specialArgs to pass to each node & default
23
+
# type: attrset
24
+
specialArgs = { };
25
+
26
+
# `meta.nodeSpecialArgs` lets you override `meta.specialArgs` per-node.
27
+
# type: attrset of attrset
28
+
nodeSpecialArgs: = { };
29
+
30
+
# `meta.nodeNixpkgs` lets you override nixpkgs per-node.
31
+
# type: attrset of "A path or an instance of nixpkgs."
32
+
nodeNixpkgs: = { };
33
+
};
34
+
35
+
# `defaults` is a module applied to every node
36
+
# type: NixOS Module
37
+
defaults = { ... }: { };
38
+
39
+
# Any other attributes are nodes.
40
+
<node-name> = { ... }: { };
41
+
}
42
+
```
43
+
44
+
### `<node-name>`
45
+
46
+
Other attributes are NixOs modules that describe a system. They automatically
47
+
have `defaults` and the wire NixOS module imported.
48
+
49
+
They also have the `name` and `nodes` attributes passed to them, `name` being a string of the nodes name, and `nodes` being an attribute set of every node in the hive.
50
+
51
+
### `meta`
52
+
53
+
There is more detailed information about `meta` in [the
54
+
reference](/reference/meta.html).
55
+
56
+
### `defaults`
57
+
58
+
De-duplicate options with default node configuration.
59
+
60
+
At the top level of a hive wire reserves the `defaults` attribute. It's applied
61
+
to every node.
62
+
63
+
## Example
64
+
65
+
There is more detailed information the special options for nodes [the
66
+
reference](/reference/module.html).
67
+
68
+
```nix:line-numbers [hive.nix]
69
+
{
70
+
meta.nixpkgs = import some-sources-or-inputs.nixpkgs { };
71
+
72
+
defaults = {
73
+
# name of the node that defaults is being applied to
74
+
name,
75
+
# attribute set of all nodes
76
+
nodes,
77
+
pkgs,
78
+
...
79
+
}: {
80
+
import = [
81
+
./default-module.nix
82
+
83
+
# module that is imported for all nodes
84
+
some-flake.nixosModules.default
85
+
];
86
+
87
+
# all nodes should include vim!
88
+
environment.systemPackages [ pkgs.vim ];
89
+
};
90
+
91
+
node-a = {
92
+
# name of the node that defaults is being applied to
93
+
name,
94
+
# attribute set of all nodes
95
+
nodes,
96
+
pkgs,
97
+
...
98
+
}: {
99
+
imports = [
100
+
# import the hardware-config and all your extra stuff
101
+
./node-a
102
+
];
103
+
104
+
deployment = {
105
+
target.host = "192.0.2.1";
106
+
tags = [ "x86" ];
107
+
};
108
+
};
109
+
110
+
# as many nodes as you'd like...
111
+
112
+
node-g = {
113
+
# some more config
114
+
};
115
+
}
116
+
```
+19
-11
doc/index.md
+19
-11
doc/index.md
···
5
5
hero:
6
6
name: wire
7
7
text: a tool to deploy nixos systems
8
-
# tagline: My great project tagline
9
8
actions:
10
9
- theme: brand
11
-
text: Read Guide
12
-
link: /guide/wire
10
+
text: Read Tutorial
11
+
link: /tutorial/overview
12
+
- theme: alt
13
+
text: How-to Guides
14
+
link: /guides/installation
13
15
- theme: alt
14
-
text: Reference
15
-
link: /reference/cli
16
+
text: Sources
17
+
link: https://github.com/forallsys/wire.git
16
18
17
19
features:
18
-
- title: Parallelism
19
-
details: Build and deploy many nodes at once
20
-
- title: Secret management
21
-
details: Fast & Unopinionated secret management
22
-
- title: Node Tagging & CI Friendly
23
-
details: Pipe data through jq
20
+
- title: Deploy in Parallel
21
+
details: Build and deploy many nodes at once.
22
+
link: /guides/parallelism
23
+
icon: ๐ฝ
24
+
- title: Manage Secrets
25
+
details: Fast & Unopinionated secret management. Bring your own GPG, Age, or any other encryption tool.
26
+
link: /guides/keys
27
+
icon: ๐
28
+
- title: Deploy as Any User
29
+
details: Non-root deployments and interactive authentication is fully supported.
30
+
link: /guides/non-root-user
31
+
icon: ๐งโ๐ป
24
32
---
+1
-1
doc/options.nix
+1
-1
doc/options.nix
+4
-5
doc/package.json
+4
-5
doc/package.json
···
1
1
{
2
2
"name": "wire-docs",
3
-
"version": "0.3.0",
3
+
"version": "1.1.1",
4
4
"type": "module",
5
5
"devDependencies": {
6
-
"vitepress": "^1.6.3",
7
-
"vue": "^3.5.17"
6
+
"vitepress": "^1.6.4",
7
+
"vue": "^3.5.25"
8
8
},
9
9
"scripts": {
10
10
"dev": "vitepress dev .",
···
18
18
},
19
19
"dependencies": {
20
20
"markdown-it-footnote": "^4.0.0",
21
-
"vitepress-plugin-comment-with-giscus": "^1.1.15",
22
-
"vitepress-plugin-group-icons": "^1.6.0"
21
+
"vitepress-plugin-group-icons": "^1.6.5"
23
22
}
24
23
}
+9
-4
doc/package.nix
+9
-4
doc/package.nix
···
2
2
lib,
3
3
nixosOptionsDoc,
4
4
runCommand,
5
-
wire-small,
5
+
wire-small-dev,
6
+
wire-diagnostics-md,
6
7
nix,
7
8
nodejs,
8
9
pnpm,
9
10
stdenv,
11
+
mode ? "unstable",
10
12
...
11
13
}:
12
14
let
13
15
eval = lib.evalModules {
14
16
modules = [
15
-
../runtime/module.nix
17
+
../runtime/module/options.nix
16
18
{
17
19
options._module.args = lib.mkOption {
18
20
internal = true;
···
41
43
inherit (pkg) version;
42
44
pname = pkg.name;
43
45
nativeBuildInputs = [
44
-
wire-small
46
+
wire-small-dev
45
47
nodejs
46
48
pnpm.configHook
47
49
nix
···
49
51
src = ./.;
50
52
pnpmDeps = pnpm.fetchDeps {
51
53
inherit (finalAttrs) pname version src;
52
-
hash = "sha256-6+cgOk9ixnOfLsXLMmyR6I9+goZCo5r4+A7Iy5Hjv2M=";
54
+
fetcherVersion = 1;
55
+
hash = "sha256-ydgb5NCFsYaDbmLjBqu91MqKj/I3TKpNLjOvyP+aY8o=";
53
56
};
54
57
patchPhase = ''
55
58
cat ${optionsDoc} >> ./reference/module.md
59
+
cat ${wire-diagnostics-md} >> ./reference/errors.md
56
60
wire inspect --markdown-help > ./reference/cli.md
57
61
'';
58
62
buildPhase = "pnpm run build > build.log 2>&1";
···
62
66
nix-instantiate --eval --strict ./snippets > /dev/null
63
67
'';
64
68
DEBUG = "*";
69
+
MODE = mode;
65
70
})
+447
-598
doc/pnpm-lock.yaml
+447
-598
doc/pnpm-lock.yaml
···
11
11
markdown-it-footnote:
12
12
specifier: ^4.0.0
13
13
version: 4.0.0
14
-
vitepress-plugin-comment-with-giscus:
15
-
specifier: ^1.1.15
16
-
version: 1.1.15(vue@3.5.17)
17
14
vitepress-plugin-group-icons:
18
-
specifier: ^1.6.0
19
-
version: 1.6.0(markdown-it@14.1.0)(vite@5.4.19)
15
+
specifier: ^1.6.5
16
+
version: 1.6.5(vite@5.4.21)
20
17
devDependencies:
21
18
vitepress:
22
-
specifier: ^1.6.3
23
-
version: 1.6.3(@algolia/client-search@5.28.0)(postcss@8.5.6)(search-insights@2.17.3)
19
+
specifier: ^1.6.4
20
+
version: 1.6.4(@algolia/client-search@5.46.0)(postcss@8.5.6)(search-insights@2.17.3)
24
21
vue:
25
-
specifier: ^3.5.17
26
-
version: 3.5.17
22
+
specifier: ^3.5.25
23
+
version: 3.5.26
27
24
28
25
packages:
26
+
27
+
'@algolia/abtesting@1.12.0':
28
+
resolution: {integrity: sha512-EfW0bfxjPs+C7ANkJDw2TATntfBKsFiy7APh+KO0pQ8A6HYa5I0NjFuCGCXWfzzzLXNZta3QUl3n5Kmm6aJo9Q==}
29
+
engines: {node: '>= 14.0.0'}
29
30
30
31
'@algolia/autocomplete-core@1.17.7':
31
32
resolution: {integrity: sha512-BjiPOW6ks90UKl7TwMv7oNQMnzU+t/wk9mgIDi6b1tXpUek7MW0lbNOUHpvam9pe3lVCf4xPFT+lK7s+e+fs7Q==}
···
47
48
'@algolia/client-search': '>= 4.9.1 < 6'
48
49
algoliasearch: '>= 4.9.1 < 6'
49
50
50
-
'@algolia/client-abtesting@5.28.0':
51
-
resolution: {integrity: sha512-oGMaBCIpvz3n+4rCz/73ldo/Dw95YFx6+MAQkNiCfsgolB2tduaiZvNOvdkm86eKqSKDDBGBo54GQXZ5YX6Bjg==}
51
+
'@algolia/client-abtesting@5.46.0':
52
+
resolution: {integrity: sha512-eG5xV8rujK4ZIHXrRshvv9O13NmU/k42Rnd3w43iKH5RaQ2zWuZO6Q7XjaoJjAFVCsJWqRbXzbYyPGrbF3wGNg==}
52
53
engines: {node: '>= 14.0.0'}
53
54
54
-
'@algolia/client-analytics@5.28.0':
55
-
resolution: {integrity: sha512-G+TTdNnuwUSy8evolyNE3I74uSIXPU4LLDnJmB4d6TkLvvzMAjwsMBuHHjwYpw37+c4tH0dT4u+39cyxrZNojg==}
55
+
'@algolia/client-analytics@5.46.0':
56
+
resolution: {integrity: sha512-AYh2uL8IUW9eZrbbT+wZElyb7QkkeV3US2NEKY7doqMlyPWE8lErNfkVN1NvZdVcY4/SVic5GDbeDz2ft8YIiQ==}
56
57
engines: {node: '>= 14.0.0'}
57
58
58
-
'@algolia/client-common@5.28.0':
59
-
resolution: {integrity: sha512-lqa0Km1/YWfPplNB8jX9kstaCl2LO6ziQAJEBtHxw2sJp/mlxJIAuudBUbEhoUrKQvI7N4erNYawl6ejic7gfw==}
59
+
'@algolia/client-common@5.46.0':
60
+
resolution: {integrity: sha512-0emZTaYOeI9WzJi0TcNd2k3SxiN6DZfdWc2x2gHt855Jl9jPUOzfVTL6gTvCCrOlT4McvpDGg5nGO+9doEjjig==}
60
61
engines: {node: '>= 14.0.0'}
61
62
62
-
'@algolia/client-insights@5.28.0':
63
-
resolution: {integrity: sha512-pGsDrlnt0UMXDjQuIpKQSfl7PVx+KcqcwVgkgITwQ45akckTwmbpaV4rZF2k3wgIbOECFZGnpArWF5cSrE4T3g==}
63
+
'@algolia/client-insights@5.46.0':
64
+
resolution: {integrity: sha512-wrBJ8fE+M0TDG1As4DDmwPn2TXajrvmvAN72Qwpuv8e2JOKNohF7+JxBoF70ZLlvP1A1EiH8DBu+JpfhBbNphQ==}
64
65
engines: {node: '>= 14.0.0'}
65
66
66
-
'@algolia/client-personalization@5.28.0':
67
-
resolution: {integrity: sha512-d/Uot/LH8YJeFyqpAmTN/LxueqV5mLD5K4aAKTDVP4CBNNubX4Z+0sveRcxWQZiORVLrs5zR1G5Buxmab2Xb9w==}
67
+
'@algolia/client-personalization@5.46.0':
68
+
resolution: {integrity: sha512-LnkeX4p0ENt0DoftDJJDzQQJig/sFQmD1eQifl/iSjhUOGUIKC/7VTeXRcKtQB78naS8njUAwpzFvxy1CDDXDQ==}
68
69
engines: {node: '>= 14.0.0'}
69
70
70
-
'@algolia/client-query-suggestions@5.28.0':
71
-
resolution: {integrity: sha512-XygCxyxJ5IwqsTrzpsAG2O/lr8GsnMA3ih7wzbXtot+ZyAhzDUFwlQSjCCmjACNbrBEaIvtiGbjX/z+HZd902Q==}
71
+
'@algolia/client-query-suggestions@5.46.0':
72
+
resolution: {integrity: sha512-aF9tc4ex/smypXw+W3lBPB1jjKoaGHpZezTqofvDOI/oK1dR2sdTpFpK2Ru+7IRzYgwtRqHF3znmTlyoNs9dpA==}
72
73
engines: {node: '>= 14.0.0'}
73
74
74
-
'@algolia/client-search@5.28.0':
75
-
resolution: {integrity: sha512-zLEddu9TEwFT/YUJkA3oUwqQYHeGEj64fi0WyVRq+siJVfxt4AYkFfcMBcSr2iR1Wo9Mk10IPOhk3DUr0TSncg==}
75
+
'@algolia/client-search@5.46.0':
76
+
resolution: {integrity: sha512-22SHEEVNjZfFWkFks3P6HilkR3rS7a6GjnCIqR22Zz4HNxdfT0FG+RE7efTcFVfLUkTTMQQybvaUcwMrHXYa7Q==}
76
77
engines: {node: '>= 14.0.0'}
77
78
78
-
'@algolia/ingestion@1.28.0':
79
-
resolution: {integrity: sha512-dmkoSQ+bzC5ryDu2J4MTRDxuh5rZg6sHNawgBfSC/iNttEzeogCyvdxg+uWMErJuSlZk9oENykhETMkSFurwpQ==}
79
+
'@algolia/ingestion@1.46.0':
80
+
resolution: {integrity: sha512-2LT0/Z+/sFwEpZLH6V17WSZ81JX2uPjgvv5eNlxgU7rPyup4NXXfuMbtCJ+6uc4RO/LQpEJd3Li59ke3wtyAsA==}
80
81
engines: {node: '>= 14.0.0'}
81
82
82
-
'@algolia/monitoring@1.28.0':
83
-
resolution: {integrity: sha512-XwVpkxc2my2rNUWbHo4Dk1Mx/JOrq6CLOAC3dmIrMt2Le2bIPMIDA6Iyjz4F4kXvp7H8q1R26cRMlYmhL31Jlg==}
83
+
'@algolia/monitoring@1.46.0':
84
+
resolution: {integrity: sha512-uivZ9wSWZ8mz2ZU0dgDvQwvVZV8XBv6lYBXf8UtkQF3u7WeTqBPeU8ZoeTyLpf0jAXCYOvc1mAVmK0xPLuEwOQ==}
84
85
engines: {node: '>= 14.0.0'}
85
86
86
-
'@algolia/recommend@5.28.0':
87
-
resolution: {integrity: sha512-MVqY7zIw0TdQUExefGthydLXccbe5CHH/uOxIG8/QiSD0ZmAmg95UwfmJiJBfuXGGi/cmCrW3JQiDbAM9vx6PA==}
87
+
'@algolia/recommend@5.46.0':
88
+
resolution: {integrity: sha512-O2BB8DuySuddgOAbhyH4jsGbL+KyDGpzJRtkDZkv091OMomqIA78emhhMhX9d/nIRrzS1wNLWB/ix7Hb2eV5rg==}
88
89
engines: {node: '>= 14.0.0'}
89
90
90
-
'@algolia/requester-browser-xhr@5.28.0':
91
-
resolution: {integrity: sha512-RfxbCinf+coQgxRkDKmRiB/ovOt3Fz0md84LmogsQIabrJVKoQrFON4Vc9YdK2bTTn6iBHtnezm0puNTk+n3SA==}
91
+
'@algolia/requester-browser-xhr@5.46.0':
92
+
resolution: {integrity: sha512-eW6xyHCyYrJD0Kjk9Mz33gQ40LfWiEA51JJTVfJy3yeoRSw/NXhAL81Pljpa0qslTs6+LO/5DYPZddct6HvISQ==}
92
93
engines: {node: '>= 14.0.0'}
93
94
94
-
'@algolia/requester-fetch@5.28.0':
95
-
resolution: {integrity: sha512-85ZBqPTQ5tjiZ925V89ttE/vUJXpJjy2cCF7PAWq9v32JGGF+v+mDm8NiEBRk9AS7+4klb/uR80KBdcg5bO7cA==}
95
+
'@algolia/requester-fetch@5.46.0':
96
+
resolution: {integrity: sha512-Vn2+TukMGHy4PIxmdvP667tN/MhS7MPT8EEvEhS6JyFLPx3weLcxSa1F9gVvrfHWCUJhLWoMVJVB2PT8YfRGcw==}
96
97
engines: {node: '>= 14.0.0'}
97
98
98
-
'@algolia/requester-node-http@5.28.0':
99
-
resolution: {integrity: sha512-U3F4WeExiKx1Ig6OxO9dDzzk04HKgtEn47TwjgKmGSDPFM7WZ5KyP1EAZEbfd3/nw6hp0z9RKdTfMql6Sd1/2Q==}
99
+
'@algolia/requester-node-http@5.46.0':
100
+
resolution: {integrity: sha512-xaqXyna5yBZ+r1SJ9my/DM6vfTqJg9FJgVydRJ0lnO+D5NhqGW/qaRG/iBGKr/d4fho34el6WakV7BqJvrl/HQ==}
100
101
engines: {node: '>= 14.0.0'}
101
102
102
103
'@antfu/install-pkg@1.1.0':
103
104
resolution: {integrity: sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==}
104
105
105
-
'@antfu/utils@8.1.1':
106
-
resolution: {integrity: sha512-Mex9nXf9vR6AhcXmMrlz/HVgYYZpVGJ6YlPgwl7UnaFpnshXs6EK/oa5Gpf3CzENMjkvEx2tQtntGnb7UtSTOQ==}
107
-
108
106
'@babel/helper-string-parser@7.27.1':
109
107
resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==}
110
108
engines: {node: '>=6.9.0'}
111
109
112
-
'@babel/helper-validator-identifier@7.27.1':
113
-
resolution: {integrity: sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==}
110
+
'@babel/helper-validator-identifier@7.28.5':
111
+
resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==}
114
112
engines: {node: '>=6.9.0'}
115
113
116
-
'@babel/parser@7.27.5':
117
-
resolution: {integrity: sha512-OsQd175SxWkGlzbny8J3K8TnnDD0N3lrIUtB92xwyRpzaenGZhxDvxN/JgU00U3CDZNj9tPuDJ5H0WS4Nt3vKg==}
114
+
'@babel/parser@7.28.5':
115
+
resolution: {integrity: sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==}
118
116
engines: {node: '>=6.0.0'}
119
117
hasBin: true
120
118
121
-
'@babel/types@7.27.6':
122
-
resolution: {integrity: sha512-ETyHEk2VHHvl9b9jZP5IHPavHYk57EhanlRRuae9XCpb/j5bDCbPPMOBfCWhnl/7EDJz0jEMCi/RhccCE8r1+Q==}
119
+
'@babel/types@7.28.5':
120
+
resolution: {integrity: sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==}
123
121
engines: {node: '>=6.9.0'}
124
122
125
123
'@docsearch/css@3.8.2':
···
283
281
cpu: [x64]
284
282
os: [win32]
285
283
286
-
'@giscus/vue@2.4.0':
287
-
resolution: {integrity: sha512-QOxKHgsMT91myyQagP2v20YYAei1ByZuc3qcaYxbHx4AwOeyVrybDIuRFwG9YDv6OraC86jYnU4Ixd37ddC/0A==}
288
-
peerDependencies:
289
-
vue: '>=3.2.0'
284
+
'@iconify-json/logos@1.2.10':
285
+
resolution: {integrity: sha512-qxaXKJ6fu8jzTMPQdHtNxlfx6tBQ0jXRbHZIYy5Ilh8Lx9US9FsAdzZWUR8MXV8PnWTKGDFO4ZZee9VwerCyMA==}
290
286
291
-
'@iconify-json/logos@1.2.4':
292
-
resolution: {integrity: sha512-XC4If5D/hbaZvUkTV8iaZuGlQCyG6CNOlaAaJaGa13V5QMYwYjgtKk3vPP8wz3wtTVNVEVk3LRx1fOJz+YnSMw==}
287
+
'@iconify-json/simple-icons@1.2.63':
288
+
resolution: {integrity: sha512-xZl2UWCwE58VlqZ+pDPmaUhE2tq8MVSTJRr4/9nzzHlDdjJ0Ud1VxNXPrwTSgESKY29iCQw3S0r2nJTSNNngHw==}
293
289
294
-
'@iconify-json/simple-icons@1.2.39':
295
-
resolution: {integrity: sha512-XlhW73c4dHvUrwWckVY76HDjnaZ2fWKD6hNZtd5kuv23GC0g3Lu0MXnYscpkIYOeiXO+Gtlw8FM53J7C84mCtA==}
296
-
297
-
'@iconify-json/vscode-icons@1.2.23':
298
-
resolution: {integrity: sha512-gFTcKecKra2/b5SbGDgHGI/l8CuikHyBPmqGlK+YCmS8AK72dtDQbUekdoACsju/3TYS37QvdPoOQwnyx2LdYg==}
290
+
'@iconify-json/vscode-icons@1.2.37':
291
+
resolution: {integrity: sha512-HLRdU6nZks4N8x3JYz6j+b3+hcUCvYvlTLwGzM3xyXfTJyDSA2cAdWcEXfoA4hQMJGA+zCDSPAWFelFptH5Kbw==}
299
292
300
293
'@iconify/types@2.0.0':
301
294
resolution: {integrity: sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==}
302
295
303
-
'@iconify/utils@2.3.0':
304
-
resolution: {integrity: sha512-GmQ78prtwYW6EtzXRU1rY+KwOKfz32PD7iJh6Iyqw68GiKuoZ2A6pRtzWONz5VQJbp50mEjXh/7NkumtrAgRKA==}
296
+
'@iconify/utils@3.1.0':
297
+
resolution: {integrity: sha512-Zlzem1ZXhI1iHeeERabLNzBHdOa4VhQbqAcOQaMKuTuyZCpwKbC2R4Dd0Zo3g9EAc+Y4fiarO8HIHRAth7+skw==}
305
298
306
-
'@jridgewell/sourcemap-codec@1.5.0':
307
-
resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==}
299
+
'@jridgewell/sourcemap-codec@1.5.5':
300
+
resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==}
308
301
309
-
'@lit-labs/ssr-dom-shim@1.3.0':
310
-
resolution: {integrity: sha512-nQIWonJ6eFAvUUrSlwyHDm/aE8PBDu5kRpL0vHMg6K8fK3Diq1xdPjTnsJSwxABhaZ+5eBi1btQB5ShUTKo4nQ==}
311
-
312
-
'@lit/reactive-element@2.1.0':
313
-
resolution: {integrity: sha512-L2qyoZSQClcBmq0qajBVbhYEcG6iK0XfLn66ifLe/RfC0/ihpc+pl0Wdn8bJ8o+hj38cG0fGXRgSS20MuXn7qA==}
314
-
315
-
'@rollup/rollup-android-arm-eabi@4.43.0':
316
-
resolution: {integrity: sha512-Krjy9awJl6rKbruhQDgivNbD1WuLb8xAclM4IR4cN5pHGAs2oIMMQJEiC3IC/9TZJ+QZkmZhlMO/6MBGxPidpw==}
302
+
'@rollup/rollup-android-arm-eabi@4.53.5':
303
+
resolution: {integrity: sha512-iDGS/h7D8t7tvZ1t6+WPK04KD0MwzLZrG0se1hzBjSi5fyxlsiggoJHwh18PCFNn7tG43OWb6pdZ6Y+rMlmyNQ==}
317
304
cpu: [arm]
318
305
os: [android]
319
306
320
-
'@rollup/rollup-android-arm64@4.43.0':
321
-
resolution: {integrity: sha512-ss4YJwRt5I63454Rpj+mXCXicakdFmKnUNxr1dLK+5rv5FJgAxnN7s31a5VchRYxCFWdmnDWKd0wbAdTr0J5EA==}
307
+
'@rollup/rollup-android-arm64@4.53.5':
308
+
resolution: {integrity: sha512-wrSAViWvZHBMMlWk6EJhvg8/rjxzyEhEdgfMMjREHEq11EtJ6IP6yfcCH57YAEca2Oe3FNCE9DSTgU70EIGmVw==}
322
309
cpu: [arm64]
323
310
os: [android]
324
311
325
-
'@rollup/rollup-darwin-arm64@4.43.0':
326
-
resolution: {integrity: sha512-eKoL8ykZ7zz8MjgBenEF2OoTNFAPFz1/lyJ5UmmFSz5jW+7XbH1+MAgCVHy72aG59rbuQLcJeiMrP8qP5d/N0A==}
312
+
'@rollup/rollup-darwin-arm64@4.53.5':
313
+
resolution: {integrity: sha512-S87zZPBmRO6u1YXQLwpveZm4JfPpAa6oHBX7/ghSiGH3rz/KDgAu1rKdGutV+WUI6tKDMbaBJomhnT30Y2t4VQ==}
327
314
cpu: [arm64]
328
315
os: [darwin]
329
316
330
-
'@rollup/rollup-darwin-x64@4.43.0':
331
-
resolution: {integrity: sha512-SYwXJgaBYW33Wi/q4ubN+ldWC4DzQY62S4Ll2dgfr/dbPoF50dlQwEaEHSKrQdSjC6oIe1WgzosoaNoHCdNuMg==}
317
+
'@rollup/rollup-darwin-x64@4.53.5':
318
+
resolution: {integrity: sha512-YTbnsAaHo6VrAczISxgpTva8EkfQus0VPEVJCEaboHtZRIb6h6j0BNxRBOwnDciFTZLDPW5r+ZBmhL/+YpTZgA==}
332
319
cpu: [x64]
333
320
os: [darwin]
334
321
335
-
'@rollup/rollup-freebsd-arm64@4.43.0':
336
-
resolution: {integrity: sha512-SV+U5sSo0yujrjzBF7/YidieK2iF6E7MdF6EbYxNz94lA+R0wKl3SiixGyG/9Klab6uNBIqsN7j4Y/Fya7wAjQ==}
322
+
'@rollup/rollup-freebsd-arm64@4.53.5':
323
+
resolution: {integrity: sha512-1T8eY2J8rKJWzaznV7zedfdhD1BqVs1iqILhmHDq/bqCUZsrMt+j8VCTHhP0vdfbHK3e1IQ7VYx3jlKqwlf+vw==}
337
324
cpu: [arm64]
338
325
os: [freebsd]
339
326
340
-
'@rollup/rollup-freebsd-x64@4.43.0':
341
-
resolution: {integrity: sha512-J7uCsiV13L/VOeHJBo5SjasKiGxJ0g+nQTrBkAsmQBIdil3KhPnSE9GnRon4ejX1XDdsmK/l30IYLiAaQEO0Cg==}
327
+
'@rollup/rollup-freebsd-x64@4.53.5':
328
+
resolution: {integrity: sha512-sHTiuXyBJApxRn+VFMaw1U+Qsz4kcNlxQ742snICYPrY+DDL8/ZbaC4DVIB7vgZmp3jiDaKA0WpBdP0aqPJoBQ==}
342
329
cpu: [x64]
343
330
os: [freebsd]
344
331
345
-
'@rollup/rollup-linux-arm-gnueabihf@4.43.0':
346
-
resolution: {integrity: sha512-gTJ/JnnjCMc15uwB10TTATBEhK9meBIY+gXP4s0sHD1zHOaIh4Dmy1X9wup18IiY9tTNk5gJc4yx9ctj/fjrIw==}
332
+
'@rollup/rollup-linux-arm-gnueabihf@4.53.5':
333
+
resolution: {integrity: sha512-dV3T9MyAf0w8zPVLVBptVlzaXxka6xg1f16VAQmjg+4KMSTWDvhimI/Y6mp8oHwNrmnmVl9XxJ/w/mO4uIQONA==}
347
334
cpu: [arm]
348
335
os: [linux]
349
336
350
-
'@rollup/rollup-linux-arm-musleabihf@4.43.0':
351
-
resolution: {integrity: sha512-ZJ3gZynL1LDSIvRfz0qXtTNs56n5DI2Mq+WACWZ7yGHFUEirHBRt7fyIk0NsCKhmRhn7WAcjgSkSVVxKlPNFFw==}
337
+
'@rollup/rollup-linux-arm-musleabihf@4.53.5':
338
+
resolution: {integrity: sha512-wIGYC1x/hyjP+KAu9+ewDI+fi5XSNiUi9Bvg6KGAh2TsNMA3tSEs+Sh6jJ/r4BV/bx/CyWu2ue9kDnIdRyafcQ==}
352
339
cpu: [arm]
353
340
os: [linux]
354
341
355
-
'@rollup/rollup-linux-arm64-gnu@4.43.0':
356
-
resolution: {integrity: sha512-8FnkipasmOOSSlfucGYEu58U8cxEdhziKjPD2FIa0ONVMxvl/hmONtX/7y4vGjdUhjcTHlKlDhw3H9t98fPvyA==}
342
+
'@rollup/rollup-linux-arm64-gnu@4.53.5':
343
+
resolution: {integrity: sha512-Y+qVA0D9d0y2FRNiG9oM3Hut/DgODZbU9I8pLLPwAsU0tUKZ49cyV1tzmB/qRbSzGvY8lpgGkJuMyuhH7Ma+Vg==}
357
344
cpu: [arm64]
358
345
os: [linux]
359
346
360
-
'@rollup/rollup-linux-arm64-musl@4.43.0':
361
-
resolution: {integrity: sha512-KPPyAdlcIZ6S9C3S2cndXDkV0Bb1OSMsX0Eelr2Bay4EsF9yi9u9uzc9RniK3mcUGCLhWY9oLr6er80P5DE6XA==}
347
+
'@rollup/rollup-linux-arm64-musl@4.53.5':
348
+
resolution: {integrity: sha512-juaC4bEgJsyFVfqhtGLz8mbopaWD+WeSOYr5E16y+1of6KQjc0BpwZLuxkClqY1i8sco+MdyoXPNiCkQou09+g==}
362
349
cpu: [arm64]
363
350
os: [linux]
364
351
365
-
'@rollup/rollup-linux-loongarch64-gnu@4.43.0':
366
-
resolution: {integrity: sha512-HPGDIH0/ZzAZjvtlXj6g+KDQ9ZMHfSP553za7o2Odegb/BEfwJcR0Sw0RLNpQ9nC6Gy8s+3mSS9xjZ0n3rhcYg==}
352
+
'@rollup/rollup-linux-loong64-gnu@4.53.5':
353
+
resolution: {integrity: sha512-rIEC0hZ17A42iXtHX+EPJVL/CakHo+tT7W0pbzdAGuWOt2jxDFh7A/lRhsNHBcqL4T36+UiAgwO8pbmn3dE8wA==}
367
354
cpu: [loong64]
368
355
os: [linux]
369
356
370
-
'@rollup/rollup-linux-powerpc64le-gnu@4.43.0':
371
-
resolution: {integrity: sha512-gEmwbOws4U4GLAJDhhtSPWPXUzDfMRedT3hFMyRAvM9Mrnj+dJIFIeL7otsv2WF3D7GrV0GIewW0y28dOYWkmw==}
357
+
'@rollup/rollup-linux-ppc64-gnu@4.53.5':
358
+
resolution: {integrity: sha512-T7l409NhUE552RcAOcmJHj3xyZ2h7vMWzcwQI0hvn5tqHh3oSoclf9WgTl+0QqffWFG8MEVZZP1/OBglKZx52Q==}
372
359
cpu: [ppc64]
373
360
os: [linux]
374
361
375
-
'@rollup/rollup-linux-riscv64-gnu@4.43.0':
376
-
resolution: {integrity: sha512-XXKvo2e+wFtXZF/9xoWohHg+MuRnvO29TI5Hqe9xwN5uN8NKUYy7tXUG3EZAlfchufNCTHNGjEx7uN78KsBo0g==}
362
+
'@rollup/rollup-linux-riscv64-gnu@4.53.5':
363
+
resolution: {integrity: sha512-7OK5/GhxbnrMcxIFoYfhV/TkknarkYC1hqUw1wU2xUN3TVRLNT5FmBv4KkheSG2xZ6IEbRAhTooTV2+R5Tk0lQ==}
377
364
cpu: [riscv64]
378
365
os: [linux]
379
366
380
-
'@rollup/rollup-linux-riscv64-musl@4.43.0':
381
-
resolution: {integrity: sha512-ruf3hPWhjw6uDFsOAzmbNIvlXFXlBQ4nk57Sec8E8rUxs/AI4HD6xmiiasOOx/3QxS2f5eQMKTAwk7KHwpzr/Q==}
367
+
'@rollup/rollup-linux-riscv64-musl@4.53.5':
368
+
resolution: {integrity: sha512-GwuDBE/PsXaTa76lO5eLJTyr2k8QkPipAyOrs4V/KJufHCZBJ495VCGJol35grx9xryk4V+2zd3Ri+3v7NPh+w==}
382
369
cpu: [riscv64]
383
370
os: [linux]
384
371
385
-
'@rollup/rollup-linux-s390x-gnu@4.43.0':
386
-
resolution: {integrity: sha512-QmNIAqDiEMEvFV15rsSnjoSmO0+eJLoKRD9EAa9rrYNwO/XRCtOGM3A5A0X+wmG+XRrw9Fxdsw+LnyYiZWWcVw==}
372
+
'@rollup/rollup-linux-s390x-gnu@4.53.5':
373
+
resolution: {integrity: sha512-IAE1Ziyr1qNfnmiQLHBURAD+eh/zH1pIeJjeShleII7Vj8kyEm2PF77o+lf3WTHDpNJcu4IXJxNO0Zluro8bOw==}
387
374
cpu: [s390x]
388
375
os: [linux]
389
376
390
-
'@rollup/rollup-linux-x64-gnu@4.43.0':
391
-
resolution: {integrity: sha512-jAHr/S0iiBtFyzjhOkAics/2SrXE092qyqEg96e90L3t9Op8OTzS6+IX0Fy5wCt2+KqeHAkti+eitV0wvblEoQ==}
377
+
'@rollup/rollup-linux-x64-gnu@4.53.5':
378
+
resolution: {integrity: sha512-Pg6E+oP7GvZ4XwgRJBuSXZjcqpIW3yCBhK4BcsANvb47qMvAbCjR6E+1a/U2WXz1JJxp9/4Dno3/iSJLcm5auw==}
392
379
cpu: [x64]
393
380
os: [linux]
394
381
395
-
'@rollup/rollup-linux-x64-musl@4.43.0':
396
-
resolution: {integrity: sha512-3yATWgdeXyuHtBhrLt98w+5fKurdqvs8B53LaoKD7P7H7FKOONLsBVMNl9ghPQZQuYcceV5CDyPfyfGpMWD9mQ==}
382
+
'@rollup/rollup-linux-x64-musl@4.53.5':
383
+
resolution: {integrity: sha512-txGtluxDKTxaMDzUduGP0wdfng24y1rygUMnmlUJ88fzCCULCLn7oE5kb2+tRB+MWq1QDZT6ObT5RrR8HFRKqg==}
397
384
cpu: [x64]
398
385
os: [linux]
399
386
400
-
'@rollup/rollup-win32-arm64-msvc@4.43.0':
401
-
resolution: {integrity: sha512-wVzXp2qDSCOpcBCT5WRWLmpJRIzv23valvcTwMHEobkjippNf+C3ys/+wf07poPkeNix0paTNemB2XrHr2TnGw==}
387
+
'@rollup/rollup-openharmony-arm64@4.53.5':
388
+
resolution: {integrity: sha512-3DFiLPnTxiOQV993fMc+KO8zXHTcIjgaInrqlG8zDp1TlhYl6WgrOHuJkJQ6M8zHEcntSJsUp1XFZSY8C1DYbg==}
389
+
cpu: [arm64]
390
+
os: [openharmony]
391
+
392
+
'@rollup/rollup-win32-arm64-msvc@4.53.5':
393
+
resolution: {integrity: sha512-nggc/wPpNTgjGg75hu+Q/3i32R00Lq1B6N1DO7MCU340MRKL3WZJMjA9U4K4gzy3dkZPXm9E1Nc81FItBVGRlA==}
402
394
cpu: [arm64]
403
395
os: [win32]
404
396
405
-
'@rollup/rollup-win32-ia32-msvc@4.43.0':
406
-
resolution: {integrity: sha512-fYCTEyzf8d+7diCw8b+asvWDCLMjsCEA8alvtAutqJOJp/wL5hs1rWSqJ1vkjgW0L2NB4bsYJrpKkiIPRR9dvw==}
397
+
'@rollup/rollup-win32-ia32-msvc@4.53.5':
398
+
resolution: {integrity: sha512-U/54pTbdQpPLBdEzCT6NBCFAfSZMvmjr0twhnD9f4EIvlm9wy3jjQ38yQj1AGznrNO65EWQMgm/QUjuIVrYF9w==}
407
399
cpu: [ia32]
408
400
os: [win32]
409
401
410
-
'@rollup/rollup-win32-x64-msvc@4.43.0':
411
-
resolution: {integrity: sha512-SnGhLiE5rlK0ofq8kzuDkM0g7FN1s5VYY+YSMTibP7CqShxCQvqtNxTARS4xX4PFJfHjG0ZQYX9iGzI3FQh5Aw==}
402
+
'@rollup/rollup-win32-x64-gnu@4.53.5':
403
+
resolution: {integrity: sha512-2NqKgZSuLH9SXBBV2dWNRCZmocgSOx8OJSdpRaEcRlIfX8YrKxUT6z0F1NpvDVhOsl190UFTRh2F2WDWWCYp3A==}
404
+
cpu: [x64]
405
+
os: [win32]
406
+
407
+
'@rollup/rollup-win32-x64-msvc@4.53.5':
408
+
resolution: {integrity: sha512-JRpZUhCfhZ4keB5v0fe02gQJy05GqboPOaxvjugW04RLSYYoB/9t2lx2u/tMs/Na/1NXfY8QYjgRljRpN+MjTQ==}
412
409
cpu: [x64]
413
410
os: [win32]
414
411
···
436
433
'@shikijs/vscode-textmate@10.0.2':
437
434
resolution: {integrity: sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==}
438
435
439
-
'@types/estree@1.0.7':
440
-
resolution: {integrity: sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==}
436
+
'@types/estree@1.0.8':
437
+
resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==}
441
438
442
439
'@types/hast@3.0.4':
443
440
resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==}
···
453
450
454
451
'@types/mdurl@2.0.0':
455
452
resolution: {integrity: sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==}
456
-
457
-
'@types/trusted-types@2.0.7':
458
-
resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==}
459
453
460
454
'@types/unist@3.0.3':
461
455
resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==}
···
473
467
vite: ^5.0.0 || ^6.0.0
474
468
vue: ^3.2.25
475
469
476
-
'@vue/compiler-core@3.5.17':
477
-
resolution: {integrity: sha512-Xe+AittLbAyV0pabcN7cP7/BenRBNcteM4aSDCtRvGw0d9OL+HG1u/XHLY/kt1q4fyMeZYXyIYrsHuPSiDPosA==}
470
+
'@vue/compiler-core@3.5.26':
471
+
resolution: {integrity: sha512-vXyI5GMfuoBCnv5ucIT7jhHKl55Y477yxP6fc4eUswjP8FG3FFVFd41eNDArR+Uk3QKn2Z85NavjaxLxOC19/w==}
478
472
479
-
'@vue/compiler-dom@3.5.17':
480
-
resolution: {integrity: sha512-+2UgfLKoaNLhgfhV5Ihnk6wB4ljyW1/7wUIog2puUqajiC29Lp5R/IKDdkebh9jTbTogTbsgB+OY9cEWzG95JQ==}
473
+
'@vue/compiler-dom@3.5.26':
474
+
resolution: {integrity: sha512-y1Tcd3eXs834QjswshSilCBnKGeQjQXB6PqFn/1nxcQw4pmG42G8lwz+FZPAZAby6gZeHSt/8LMPfZ4Rb+Bd/A==}
481
475
482
-
'@vue/compiler-sfc@3.5.17':
483
-
resolution: {integrity: sha512-rQQxbRJMgTqwRugtjw0cnyQv9cP4/4BxWfTdRBkqsTfLOHWykLzbOc3C4GGzAmdMDxhzU/1Ija5bTjMVrddqww==}
476
+
'@vue/compiler-sfc@3.5.26':
477
+
resolution: {integrity: sha512-egp69qDTSEZcf4bGOSsprUr4xI73wfrY5oRs6GSgXFTiHrWj4Y3X5Ydtip9QMqiCMCPVwLglB9GBxXtTadJ3mA==}
484
478
485
-
'@vue/compiler-ssr@3.5.17':
486
-
resolution: {integrity: sha512-hkDbA0Q20ZzGgpj5uZjb9rBzQtIHLS78mMilwrlpWk2Ep37DYntUz0PonQ6kr113vfOEdM+zTBuJDaceNIW0tQ==}
479
+
'@vue/compiler-ssr@3.5.26':
480
+
resolution: {integrity: sha512-lZT9/Y0nSIRUPVvapFJEVDbEXruZh2IYHMk2zTtEgJSlP5gVOqeWXH54xDKAaFS4rTnDeDBQUYDtxKyoW9FwDw==}
487
481
488
-
'@vue/devtools-api@7.7.7':
489
-
resolution: {integrity: sha512-lwOnNBH2e7x1fIIbVT7yF5D+YWhqELm55/4ZKf45R9T8r9dE2AIOy8HKjfqzGsoTHFbWbr337O4E0A0QADnjBg==}
482
+
'@vue/devtools-api@7.7.9':
483
+
resolution: {integrity: sha512-kIE8wvwlcZ6TJTbNeU2HQNtaxLx3a84aotTITUuL/4bzfPxzajGBOoqjMhwZJ8L9qFYDU/lAYMEEm11dnZOD6g==}
490
484
491
-
'@vue/devtools-kit@7.7.7':
492
-
resolution: {integrity: sha512-wgoZtxcTta65cnZ1Q6MbAfePVFxfM+gq0saaeytoph7nEa7yMXoi6sCPy4ufO111B9msnw0VOWjPEFCXuAKRHA==}
485
+
'@vue/devtools-kit@7.7.9':
486
+
resolution: {integrity: sha512-PyQ6odHSgiDVd4hnTP+aDk2X4gl2HmLDfiyEnn3/oV+ckFDuswRs4IbBT7vacMuGdwY/XemxBoh302ctbsptuA==}
493
487
494
-
'@vue/devtools-shared@7.7.7':
495
-
resolution: {integrity: sha512-+udSj47aRl5aKb0memBvcUG9koarqnxNM5yjuREvqwK6T3ap4mn3Zqqc17QrBFTqSMjr3HK1cvStEZpMDpfdyw==}
488
+
'@vue/devtools-shared@7.7.9':
489
+
resolution: {integrity: sha512-iWAb0v2WYf0QWmxCGy0seZNDPdO3Sp5+u78ORnyeonS6MT4PC7VPrryX2BpMJrwlDeaZ6BD4vP4XKjK0SZqaeA==}
496
490
497
-
'@vue/reactivity@3.5.17':
498
-
resolution: {integrity: sha512-l/rmw2STIscWi7SNJp708FK4Kofs97zc/5aEPQh4bOsReD/8ICuBcEmS7KGwDj5ODQLYWVN2lNibKJL1z5b+Lw==}
491
+
'@vue/reactivity@3.5.26':
492
+
resolution: {integrity: sha512-9EnYB1/DIiUYYnzlnUBgwU32NNvLp/nhxLXeWRhHUEeWNTn1ECxX8aGO7RTXeX6PPcxe3LLuNBFoJbV4QZ+CFQ==}
499
493
500
-
'@vue/runtime-core@3.5.17':
501
-
resolution: {integrity: sha512-QQLXa20dHg1R0ri4bjKeGFKEkJA7MMBxrKo2G+gJikmumRS7PTD4BOU9FKrDQWMKowz7frJJGqBffYMgQYS96Q==}
494
+
'@vue/runtime-core@3.5.26':
495
+
resolution: {integrity: sha512-xJWM9KH1kd201w5DvMDOwDHYhrdPTrAatn56oB/LRG4plEQeZRQLw0Bpwih9KYoqmzaxF0OKSn6swzYi84e1/Q==}
502
496
503
-
'@vue/runtime-dom@3.5.17':
504
-
resolution: {integrity: sha512-8El0M60TcwZ1QMz4/os2MdlQECgGoVHPuLnQBU3m9h3gdNRW9xRmI8iLS4t/22OQlOE6aJvNNlBiCzPHur4H9g==}
497
+
'@vue/runtime-dom@3.5.26':
498
+
resolution: {integrity: sha512-XLLd/+4sPC2ZkN/6+V4O4gjJu6kSDbHAChvsyWgm1oGbdSO3efvGYnm25yCjtFm/K7rrSDvSfPDgN1pHgS4VNQ==}
505
499
506
-
'@vue/server-renderer@3.5.17':
507
-
resolution: {integrity: sha512-BOHhm8HalujY6lmC3DbqF6uXN/K00uWiEeF22LfEsm9Q93XeJ/plHTepGwf6tqFcF7GA5oGSSAAUock3VvzaCA==}
500
+
'@vue/server-renderer@3.5.26':
501
+
resolution: {integrity: sha512-TYKLXmrwWKSodyVuO1WAubucd+1XlLg4set0YoV+Hu8Lo79mp/YMwWV5mC5FgtsDxX3qo1ONrxFaTP1OQgy1uA==}
508
502
peerDependencies:
509
-
vue: 3.5.17
503
+
vue: 3.5.26
504
+
505
+
'@vue/shared@3.5.25':
506
+
resolution: {integrity: sha512-AbOPdQQnAnzs58H2FrrDxYj/TJfmeS2jdfEEhgiKINy+bnOANmVizIEgq1r+C5zsbs6l1CCQxtcj71rwNQ4jWg==}
510
507
511
-
'@vue/shared@3.5.17':
512
-
resolution: {integrity: sha512-CabR+UN630VnsJO/jHWYBC1YVXyMq94KKp6iF5MQgZJs5I8cmjw6oVMO1oDbtBkENSHSSn/UadWlW/OAgdmKrg==}
508
+
'@vue/shared@3.5.26':
509
+
resolution: {integrity: sha512-7Z6/y3uFI5PRoKeorTOSXKcDj0MSasfNNltcslbFrPpcw6aXRUALq4IfJlaTRspiWIUOEZbrpM+iQGmCOiWe4A==}
513
510
514
511
'@vueuse/core@12.8.2':
515
512
resolution: {integrity: sha512-HbvCmZdzAu3VGi/pWYm5Ut+Kd9mn1ZHnn4L5G8kOQTPs/IwIAmJoBrmYk2ckLArgMXZj0AW3n5CAejLUO+PhdQ==}
···
566
563
engines: {node: '>=0.4.0'}
567
564
hasBin: true
568
565
569
-
algoliasearch@5.28.0:
570
-
resolution: {integrity: sha512-FCRzwW+/TJFQIfo+DxObo2gfn4+0aGa7sVQgCN1/ojKqrhb/7Scnuyi4FBS0zvNCgOZBMms+Ci2hyQwsgAqIzg==}
566
+
algoliasearch@5.46.0:
567
+
resolution: {integrity: sha512-7ML6fa2K93FIfifG3GMWhDEwT5qQzPTmoHKCTvhzGEwdbQ4n0yYUWZlLYT75WllTGJCJtNUI0C1ybN4BCegqvg==}
571
568
engines: {node: '>= 14.0.0'}
572
569
573
-
argparse@2.0.1:
574
-
resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
575
-
576
-
birpc@2.4.0:
577
-
resolution: {integrity: sha512-5IdNxTyhXHv2UlgnPHQ0h+5ypVmkrYHzL8QT+DwFZ//2N/oNV8Ch+BCRmTJ3x6/z9Axo/cXYBc9eprsUVK/Jsg==}
570
+
birpc@2.9.0:
571
+
resolution: {integrity: sha512-KrayHS5pBi69Xi9JmvoqrIgYGDkD6mcSe/i6YKi3w5kekCLzrX4+nawcXqrj2tIp50Kw/mT/s3p+GVK0A0sKxw==}
578
572
579
573
ccount@2.0.1:
580
574
resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==}
···
591
585
confbox@0.1.8:
592
586
resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==}
593
587
594
-
confbox@0.2.2:
595
-
resolution: {integrity: sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==}
596
-
597
-
copy-anything@3.0.5:
598
-
resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==}
599
-
engines: {node: '>=12.13'}
600
-
601
-
csstype@3.1.3:
602
-
resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==}
588
+
copy-anything@4.0.5:
589
+
resolution: {integrity: sha512-7Vv6asjS4gMOuILabD3l739tsaxFQmC+a7pLZm02zyvs8p977bL3zEgq3yDk5rn9B0PbYgIv++jmHcuUab4RhA==}
590
+
engines: {node: '>=18'}
603
591
604
-
debug@4.4.1:
605
-
resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==}
606
-
engines: {node: '>=6.0'}
607
-
peerDependencies:
608
-
supports-color: '*'
609
-
peerDependenciesMeta:
610
-
supports-color:
611
-
optional: true
592
+
csstype@3.2.3:
593
+
resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==}
612
594
613
595
dequal@2.0.3:
614
596
resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==}
···
620
602
emoji-regex-xs@1.0.0:
621
603
resolution: {integrity: sha512-LRlerrMYoIDrT6jgpeZ2YYl/L8EulRTt5hQcYjy5AInh7HWXKimpqx68aknBFpGL2+/IcogTcaydJEgaTmOpDg==}
622
604
623
-
entities@4.5.0:
624
-
resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==}
605
+
entities@7.0.0:
606
+
resolution: {integrity: sha512-FDWG5cmEYf2Z00IkYRhbFrwIwvdFKH07uV8dvNy0omp/Qb1xcyCWp2UDtcwJF4QZZvk0sLudP6/hAu42TaqVhQ==}
625
607
engines: {node: '>=0.12'}
626
608
627
609
esbuild@0.21.5:
···
632
614
estree-walker@2.0.2:
633
615
resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==}
634
616
635
-
exsolve@1.0.6:
636
-
resolution: {integrity: sha512-Q05uIdxhPBVBwK29gcPsl2K220xSBy52TZQPdeYWE0zOs8jM+yJ6y5h7jm6cpAo1p+OOMZRIj/Ftku4EQQBLnQ==}
637
-
638
-
focus-trap@7.6.5:
639
-
resolution: {integrity: sha512-7Ke1jyybbbPZyZXFxEftUtxFGLMpE2n6A+z//m4CRDlj0hW+o3iYSmh8nFlYMurOiJVDmJRilUQtJr08KfIxlg==}
617
+
focus-trap@7.6.6:
618
+
resolution: {integrity: sha512-v/Z8bvMCajtx4mEXmOo7QEsIzlIOqRXTIwgUfsFOF9gEsespdbD0AkPIka1bSXZ8Y8oZ+2IVDQZePkTfEHZl7Q==}
640
619
641
620
fsevents@2.3.3:
642
621
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
643
622
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
644
623
os: [darwin]
645
624
646
-
giscus@1.6.0:
647
-
resolution: {integrity: sha512-Zrsi8r4t1LVW950keaWcsURuZUQwUaMKjvJgTCY125vkW6OiEBkatE7ScJDbpqKHdZwb///7FVC21SE3iFK3PQ==}
648
-
649
-
globals@15.15.0:
650
-
resolution: {integrity: sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg==}
651
-
engines: {node: '>=18'}
652
-
653
625
hast-util-to-html@9.0.5:
654
626
resolution: {integrity: sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==}
655
627
···
662
634
html-void-elements@3.0.0:
663
635
resolution: {integrity: sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==}
664
636
665
-
is-what@4.1.16:
666
-
resolution: {integrity: sha512-ZhMwEosbFJkA0YhFnNDgTM4ZxDRsS6HqTo7qsZM08fehyRYIYa0yHu5R6mgo1n/8MgaPBXiPimPD77baVFYg+A==}
667
-
engines: {node: '>=12.13'}
637
+
is-what@5.5.0:
638
+
resolution: {integrity: sha512-oG7cgbmg5kLYae2N5IVd3jm2s+vldjxJzK1pcu9LfpGuQ93MQSzo0okvRna+7y5ifrD+20FE8FvjusyGaz14fw==}
639
+
engines: {node: '>=18'}
668
640
669
-
kolorist@1.8.0:
670
-
resolution: {integrity: sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ==}
671
-
672
-
linkify-it@5.0.0:
673
-
resolution: {integrity: sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==}
674
-
675
-
lit-element@4.2.0:
676
-
resolution: {integrity: sha512-MGrXJVAI5x+Bfth/pU9Kst1iWID6GHDLEzFEnyULB/sFiRLgkd8NPK/PeeXxktA3T6EIIaq8U3KcbTU5XFcP2Q==}
677
-
678
-
lit-html@3.3.0:
679
-
resolution: {integrity: sha512-RHoswrFAxY2d8Cf2mm4OZ1DgzCoBKUKSPvA1fhtSELxUERq2aQQ2h05pO9j81gS1o7RIRJ+CePLogfyahwmynw==}
680
-
681
-
lit@3.3.0:
682
-
resolution: {integrity: sha512-DGVsqsOIHBww2DqnuZzW7QsuCdahp50ojuDaBPC7jUDRpYoH0z7kHBBYZewRzer75FwtrkmkKk7iOAwSaWdBmw==}
683
-
684
-
local-pkg@1.1.1:
685
-
resolution: {integrity: sha512-WunYko2W1NcdfAFpuLUoucsgULmgDBRkdxHxWQ7mK0cQqwPiy8E1enjuRBrhLtZkB5iScJ1XIPdhVEFK8aOLSg==}
686
-
engines: {node: '>=14'}
687
-
688
-
magic-string@0.30.17:
689
-
resolution: {integrity: sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==}
641
+
magic-string@0.30.21:
642
+
resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==}
690
643
691
644
mark.js@8.11.1:
692
645
resolution: {integrity: sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==}
···
694
647
markdown-it-footnote@4.0.0:
695
648
resolution: {integrity: sha512-WYJ7urf+khJYl3DqofQpYfEYkZKbmXmwxQV8c8mO/hGIhgZ1wOe7R4HLFNwqx7TjILbnC98fuyeSsin19JdFcQ==}
696
649
697
-
markdown-it@14.1.0:
698
-
resolution: {integrity: sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==}
699
-
hasBin: true
700
-
701
-
mdast-util-to-hast@13.2.0:
702
-
resolution: {integrity: sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==}
703
-
704
-
mdurl@2.0.0:
705
-
resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==}
650
+
mdast-util-to-hast@13.2.1:
651
+
resolution: {integrity: sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==}
706
652
707
653
micromark-util-character@2.1.1:
708
654
resolution: {integrity: sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==}
···
719
665
micromark-util-types@2.0.2:
720
666
resolution: {integrity: sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==}
721
667
722
-
minisearch@7.1.2:
723
-
resolution: {integrity: sha512-R1Pd9eF+MD5JYDDSPAp/q1ougKglm14uEkPMvQ/05RGmx6G9wvmLTrTI/Q5iPNJLYqNdsDQ7qTGIcNWR+FrHmA==}
668
+
minisearch@7.2.0:
669
+
resolution: {integrity: sha512-dqT2XBYUOZOiC5t2HRnwADjhNS2cecp9u+TJRiJ1Qp/f5qjkeT5APcGPjHw+bz89Ms8Jp+cG4AlE+QZ/QnDglg==}
724
670
725
671
mitt@3.0.1:
726
672
resolution: {integrity: sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==}
727
673
728
-
mlly@1.7.4:
729
-
resolution: {integrity: sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw==}
730
-
731
-
ms@2.1.3:
732
-
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
674
+
mlly@1.8.0:
675
+
resolution: {integrity: sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==}
733
676
734
677
nanoid@3.3.11:
735
678
resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==}
···
739
682
oniguruma-to-es@3.1.1:
740
683
resolution: {integrity: sha512-bUH8SDvPkH3ho3dvwJwfonjlQ4R80vjyvrU8YpxuROddv55vAEJrTuCuCVUhhsHbtlD9tGGbaNApGQckXhS8iQ==}
741
684
742
-
package-manager-detector@1.3.0:
743
-
resolution: {integrity: sha512-ZsEbbZORsyHuO00lY1kV3/t72yp6Ysay6Pd17ZAlNGuGwmWDLCJxFpRs0IzfXfj1o4icJOkUEioexFHzyPurSQ==}
685
+
package-manager-detector@1.6.0:
686
+
resolution: {integrity: sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA==}
744
687
745
688
pathe@2.0.3:
746
689
resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==}
···
754
697
pkg-types@1.3.1:
755
698
resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==}
756
699
757
-
pkg-types@2.1.0:
758
-
resolution: {integrity: sha512-wmJwA+8ihJixSoHKxZJRBQG1oY8Yr9pGLzRmSsNms0iNWyHHAlZCa7mmKiFR10YPZuz/2k169JiS/inOjBCZ2A==}
759
-
760
700
postcss@8.5.6:
761
701
resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==}
762
702
engines: {node: ^10 || ^12 || >=14}
763
703
764
-
preact@10.26.9:
765
-
resolution: {integrity: sha512-SSjF9vcnF27mJK1XyFMNJzFd5u3pQiATFqoaDy03XuN00u4ziveVVEGt5RKJrDR8MHE/wJo9Nnad56RLzS2RMA==}
704
+
preact@10.28.0:
705
+
resolution: {integrity: sha512-rytDAoiXr3+t6OIP3WGlDd0ouCUG1iCWzkcY3++Nreuoi17y6T5i/zRhe6uYfoVcxq6YU+sBtJouuRDsq8vvqA==}
766
706
767
707
property-information@7.1.0:
768
708
resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==}
769
709
770
-
punycode.js@2.3.1:
771
-
resolution: {integrity: sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==}
772
-
engines: {node: '>=6'}
773
-
774
-
quansync@0.2.10:
775
-
resolution: {integrity: sha512-t41VRkMYbkHyCYmOvx/6URnN80H7k4X0lLdBMGsz+maAwrJQYB1djpV6vHrQIBE0WBSGqhtEHrK9U3DWWH8v7A==}
776
-
777
710
regex-recursion@6.0.2:
778
711
resolution: {integrity: sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==}
779
712
780
713
regex-utilities@2.3.0:
781
714
resolution: {integrity: sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng==}
782
715
783
-
regex@6.0.1:
784
-
resolution: {integrity: sha512-uorlqlzAKjKQZ5P+kTJr3eeJGSVroLKoHmquUj4zHWuR+hEyNqlXsSKlYYF5F4NI6nl7tWCs0apKJ0lmfsXAPA==}
716
+
regex@6.1.0:
717
+
resolution: {integrity: sha512-6VwtthbV4o/7+OaAF9I5L5V3llLEsoPyq9P1JVXkedTP33c7MfCG0/5NOPcSJn0TzXcG9YUrR0gQSWioew3LDg==}
785
718
786
719
rfdc@1.4.1:
787
720
resolution: {integrity: sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==}
788
721
789
-
rollup@4.43.0:
790
-
resolution: {integrity: sha512-wdN2Kd3Twh8MAEOEJZsuxuLKCsBEo4PVNLK6tQWAn10VhsVewQLzcucMgLolRlhFybGxfclbPeEYBaP6RvUFGg==}
722
+
rollup@4.53.5:
723
+
resolution: {integrity: sha512-iTNAbFSlRpcHeeWu73ywU/8KuU/LZmNCSxp6fjQkJBD3ivUb8tpDrXhIxEzA05HlYMEwmtaUnb3RP+YNv162OQ==}
791
724
engines: {node: '>=18.0.0', npm: '>=8.0.0'}
792
725
hasBin: true
793
726
···
811
744
stringify-entities@4.0.4:
812
745
resolution: {integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==}
813
746
814
-
superjson@2.2.2:
815
-
resolution: {integrity: sha512-5JRxVqC8I8NuOUjzBbvVJAKNM8qoVuH0O77h4WInc/qC2q5IreqKxYwgkga3PfA22OayK2ikceb/B26dztPl+Q==}
747
+
superjson@2.2.6:
748
+
resolution: {integrity: sha512-H+ue8Zo4vJmV2nRjpx86P35lzwDT3nItnIsocgumgr0hHMQ+ZGq5vrERg9kJBo5AWGmxZDhzDo+WVIJqkB0cGA==}
816
749
engines: {node: '>=16'}
817
750
818
-
tabbable@6.2.0:
819
-
resolution: {integrity: sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==}
751
+
tabbable@6.3.0:
752
+
resolution: {integrity: sha512-EIHvdY5bPLuWForiR/AN2Bxngzpuwn1is4asboytXtpTgsArc+WmSJKVLlhdh71u7jFcryDqB2A8lQvj78MkyQ==}
820
753
821
-
tinyexec@1.0.1:
822
-
resolution: {integrity: sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==}
754
+
tinyexec@1.0.2:
755
+
resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==}
756
+
engines: {node: '>=18'}
823
757
824
758
trim-lines@3.0.1:
825
759
resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==}
826
760
827
-
uc.micro@2.1.0:
828
-
resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==}
829
-
830
761
ufo@1.6.1:
831
762
resolution: {integrity: sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==}
832
763
833
-
unist-util-is@6.0.0:
834
-
resolution: {integrity: sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==}
764
+
unist-util-is@6.0.1:
765
+
resolution: {integrity: sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==}
835
766
836
767
unist-util-position@5.0.0:
837
768
resolution: {integrity: sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==}
···
839
770
unist-util-stringify-position@4.0.0:
840
771
resolution: {integrity: sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==}
841
772
842
-
unist-util-visit-parents@6.0.1:
843
-
resolution: {integrity: sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==}
773
+
unist-util-visit-parents@6.0.2:
774
+
resolution: {integrity: sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==}
844
775
845
776
unist-util-visit@5.0.0:
846
777
resolution: {integrity: sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==}
847
778
848
-
vfile-message@4.0.2:
849
-
resolution: {integrity: sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==}
779
+
vfile-message@4.0.3:
780
+
resolution: {integrity: sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==}
850
781
851
782
vfile@6.0.3:
852
783
resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==}
853
784
854
-
vite@5.4.19:
855
-
resolution: {integrity: sha512-qO3aKv3HoQC8QKiNSTuUM1l9o/XX3+c+VTgLHbJWHZGeTPVAg2XwazI9UWzoxjIJCGCV2zU60uqMzjeLZuULqA==}
785
+
vite@5.4.21:
786
+
resolution: {integrity: sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==}
856
787
engines: {node: ^18.0.0 || >=20.0.0}
857
788
hasBin: true
858
789
peerDependencies:
···
882
813
terser:
883
814
optional: true
884
815
885
-
vitepress-plugin-comment-with-giscus@1.1.15:
886
-
resolution: {integrity: sha512-1DJjgN+7SYvn5ZkjuSXPmz7nlqfcrh4qCGGviiZghA2ELXnaO2m9WY7m+RisPSaqCn90xqe0JbO2T4NMq8iUBg==}
887
-
888
-
vitepress-plugin-group-icons@1.6.0:
889
-
resolution: {integrity: sha512-+nxuVETpFkOYR5qHdvj3M5otWusJyS3ozEvVf1aQaE5Oz5e6NR0naYKTtH0Zf3Qss4vnhqaYt2Lq4jUTn9JVuA==}
816
+
vitepress-plugin-group-icons@1.6.5:
817
+
resolution: {integrity: sha512-+pg4+GKDq2fLqKb1Sat5p1p4SuIZ5tEPxu8HjpwoeecZ/VaXKy6Bdf0wyjedjaTAyZQzXbvyavJegqAcQ+B0VA==}
890
818
peerDependencies:
891
-
markdown-it: '>=14'
892
819
vite: '>=3'
820
+
peerDependenciesMeta:
821
+
vite:
822
+
optional: true
893
823
894
-
vitepress@1.6.3:
895
-
resolution: {integrity: sha512-fCkfdOk8yRZT8GD9BFqusW3+GggWYZ/rYncOfmgcDtP3ualNHCAg+Robxp2/6xfH1WwPHtGpPwv7mbA3qomtBw==}
824
+
vitepress@1.6.4:
825
+
resolution: {integrity: sha512-+2ym1/+0VVrbhNyRoFFesVvBvHAVMZMK0rw60E3X/5349M1GuVdKeazuksqopEdvkKwKGs21Q729jX81/bkBJg==}
896
826
hasBin: true
897
827
peerDependencies:
898
828
markdown-it-mathjax3: ^4
···
903
833
postcss:
904
834
optional: true
905
835
906
-
vue@3.5.17:
907
-
resolution: {integrity: sha512-LbHV3xPN9BeljML+Xctq4lbz2lVHCR6DtbpTf5XIO6gugpXUN49j2QQPcMj086r9+AkJ0FfUT8xjulKKBkkr9g==}
836
+
vue@3.5.26:
837
+
resolution: {integrity: sha512-SJ/NTccVyAoNUJmkM9KUqPcYlY+u8OVL1X5EW9RIs3ch5H2uERxyyIUI4MRxVCSOiEcupX9xNGde1tL9ZKpimA==}
908
838
peerDependencies:
909
839
typescript: '*'
910
840
peerDependenciesMeta:
···
916
846
917
847
snapshots:
918
848
919
-
'@algolia/autocomplete-core@1.17.7(@algolia/client-search@5.28.0)(algoliasearch@5.28.0)(search-insights@2.17.3)':
849
+
'@algolia/abtesting@1.12.0':
920
850
dependencies:
921
-
'@algolia/autocomplete-plugin-algolia-insights': 1.17.7(@algolia/client-search@5.28.0)(algoliasearch@5.28.0)(search-insights@2.17.3)
922
-
'@algolia/autocomplete-shared': 1.17.7(@algolia/client-search@5.28.0)(algoliasearch@5.28.0)
851
+
'@algolia/client-common': 5.46.0
852
+
'@algolia/requester-browser-xhr': 5.46.0
853
+
'@algolia/requester-fetch': 5.46.0
854
+
'@algolia/requester-node-http': 5.46.0
855
+
856
+
'@algolia/autocomplete-core@1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)(search-insights@2.17.3)':
857
+
dependencies:
858
+
'@algolia/autocomplete-plugin-algolia-insights': 1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)(search-insights@2.17.3)
859
+
'@algolia/autocomplete-shared': 1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)
923
860
transitivePeerDependencies:
924
861
- '@algolia/client-search'
925
862
- algoliasearch
926
863
- search-insights
927
864
928
-
'@algolia/autocomplete-plugin-algolia-insights@1.17.7(@algolia/client-search@5.28.0)(algoliasearch@5.28.0)(search-insights@2.17.3)':
865
+
'@algolia/autocomplete-plugin-algolia-insights@1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)(search-insights@2.17.3)':
929
866
dependencies:
930
-
'@algolia/autocomplete-shared': 1.17.7(@algolia/client-search@5.28.0)(algoliasearch@5.28.0)
867
+
'@algolia/autocomplete-shared': 1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)
931
868
search-insights: 2.17.3
932
869
transitivePeerDependencies:
933
870
- '@algolia/client-search'
934
871
- algoliasearch
935
872
936
-
'@algolia/autocomplete-preset-algolia@1.17.7(@algolia/client-search@5.28.0)(algoliasearch@5.28.0)':
873
+
'@algolia/autocomplete-preset-algolia@1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)':
937
874
dependencies:
938
-
'@algolia/autocomplete-shared': 1.17.7(@algolia/client-search@5.28.0)(algoliasearch@5.28.0)
939
-
'@algolia/client-search': 5.28.0
940
-
algoliasearch: 5.28.0
875
+
'@algolia/autocomplete-shared': 1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)
876
+
'@algolia/client-search': 5.46.0
877
+
algoliasearch: 5.46.0
941
878
942
-
'@algolia/autocomplete-shared@1.17.7(@algolia/client-search@5.28.0)(algoliasearch@5.28.0)':
879
+
'@algolia/autocomplete-shared@1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)':
943
880
dependencies:
944
-
'@algolia/client-search': 5.28.0
945
-
algoliasearch: 5.28.0
881
+
'@algolia/client-search': 5.46.0
882
+
algoliasearch: 5.46.0
946
883
947
-
'@algolia/client-abtesting@5.28.0':
884
+
'@algolia/client-abtesting@5.46.0':
948
885
dependencies:
949
-
'@algolia/client-common': 5.28.0
950
-
'@algolia/requester-browser-xhr': 5.28.0
951
-
'@algolia/requester-fetch': 5.28.0
952
-
'@algolia/requester-node-http': 5.28.0
886
+
'@algolia/client-common': 5.46.0
887
+
'@algolia/requester-browser-xhr': 5.46.0
888
+
'@algolia/requester-fetch': 5.46.0
889
+
'@algolia/requester-node-http': 5.46.0
953
890
954
-
'@algolia/client-analytics@5.28.0':
891
+
'@algolia/client-analytics@5.46.0':
955
892
dependencies:
956
-
'@algolia/client-common': 5.28.0
957
-
'@algolia/requester-browser-xhr': 5.28.0
958
-
'@algolia/requester-fetch': 5.28.0
959
-
'@algolia/requester-node-http': 5.28.0
893
+
'@algolia/client-common': 5.46.0
894
+
'@algolia/requester-browser-xhr': 5.46.0
895
+
'@algolia/requester-fetch': 5.46.0
896
+
'@algolia/requester-node-http': 5.46.0
960
897
961
-
'@algolia/client-common@5.28.0': {}
898
+
'@algolia/client-common@5.46.0': {}
962
899
963
-
'@algolia/client-insights@5.28.0':
900
+
'@algolia/client-insights@5.46.0':
964
901
dependencies:
965
-
'@algolia/client-common': 5.28.0
966
-
'@algolia/requester-browser-xhr': 5.28.0
967
-
'@algolia/requester-fetch': 5.28.0
968
-
'@algolia/requester-node-http': 5.28.0
902
+
'@algolia/client-common': 5.46.0
903
+
'@algolia/requester-browser-xhr': 5.46.0
904
+
'@algolia/requester-fetch': 5.46.0
905
+
'@algolia/requester-node-http': 5.46.0
969
906
970
-
'@algolia/client-personalization@5.28.0':
907
+
'@algolia/client-personalization@5.46.0':
971
908
dependencies:
972
-
'@algolia/client-common': 5.28.0
973
-
'@algolia/requester-browser-xhr': 5.28.0
974
-
'@algolia/requester-fetch': 5.28.0
975
-
'@algolia/requester-node-http': 5.28.0
909
+
'@algolia/client-common': 5.46.0
910
+
'@algolia/requester-browser-xhr': 5.46.0
911
+
'@algolia/requester-fetch': 5.46.0
912
+
'@algolia/requester-node-http': 5.46.0
976
913
977
-
'@algolia/client-query-suggestions@5.28.0':
914
+
'@algolia/client-query-suggestions@5.46.0':
978
915
dependencies:
979
-
'@algolia/client-common': 5.28.0
980
-
'@algolia/requester-browser-xhr': 5.28.0
981
-
'@algolia/requester-fetch': 5.28.0
982
-
'@algolia/requester-node-http': 5.28.0
916
+
'@algolia/client-common': 5.46.0
917
+
'@algolia/requester-browser-xhr': 5.46.0
918
+
'@algolia/requester-fetch': 5.46.0
919
+
'@algolia/requester-node-http': 5.46.0
983
920
984
-
'@algolia/client-search@5.28.0':
921
+
'@algolia/client-search@5.46.0':
985
922
dependencies:
986
-
'@algolia/client-common': 5.28.0
987
-
'@algolia/requester-browser-xhr': 5.28.0
988
-
'@algolia/requester-fetch': 5.28.0
989
-
'@algolia/requester-node-http': 5.28.0
923
+
'@algolia/client-common': 5.46.0
924
+
'@algolia/requester-browser-xhr': 5.46.0
925
+
'@algolia/requester-fetch': 5.46.0
926
+
'@algolia/requester-node-http': 5.46.0
990
927
991
-
'@algolia/ingestion@1.28.0':
928
+
'@algolia/ingestion@1.46.0':
992
929
dependencies:
993
-
'@algolia/client-common': 5.28.0
994
-
'@algolia/requester-browser-xhr': 5.28.0
995
-
'@algolia/requester-fetch': 5.28.0
996
-
'@algolia/requester-node-http': 5.28.0
930
+
'@algolia/client-common': 5.46.0
931
+
'@algolia/requester-browser-xhr': 5.46.0
932
+
'@algolia/requester-fetch': 5.46.0
933
+
'@algolia/requester-node-http': 5.46.0
997
934
998
-
'@algolia/monitoring@1.28.0':
935
+
'@algolia/monitoring@1.46.0':
999
936
dependencies:
1000
-
'@algolia/client-common': 5.28.0
1001
-
'@algolia/requester-browser-xhr': 5.28.0
1002
-
'@algolia/requester-fetch': 5.28.0
1003
-
'@algolia/requester-node-http': 5.28.0
937
+
'@algolia/client-common': 5.46.0
938
+
'@algolia/requester-browser-xhr': 5.46.0
939
+
'@algolia/requester-fetch': 5.46.0
940
+
'@algolia/requester-node-http': 5.46.0
1004
941
1005
-
'@algolia/recommend@5.28.0':
942
+
'@algolia/recommend@5.46.0':
1006
943
dependencies:
1007
-
'@algolia/client-common': 5.28.0
1008
-
'@algolia/requester-browser-xhr': 5.28.0
1009
-
'@algolia/requester-fetch': 5.28.0
1010
-
'@algolia/requester-node-http': 5.28.0
944
+
'@algolia/client-common': 5.46.0
945
+
'@algolia/requester-browser-xhr': 5.46.0
946
+
'@algolia/requester-fetch': 5.46.0
947
+
'@algolia/requester-node-http': 5.46.0
1011
948
1012
-
'@algolia/requester-browser-xhr@5.28.0':
949
+
'@algolia/requester-browser-xhr@5.46.0':
1013
950
dependencies:
1014
-
'@algolia/client-common': 5.28.0
951
+
'@algolia/client-common': 5.46.0
1015
952
1016
-
'@algolia/requester-fetch@5.28.0':
953
+
'@algolia/requester-fetch@5.46.0':
1017
954
dependencies:
1018
-
'@algolia/client-common': 5.28.0
955
+
'@algolia/client-common': 5.46.0
1019
956
1020
-
'@algolia/requester-node-http@5.28.0':
957
+
'@algolia/requester-node-http@5.46.0':
1021
958
dependencies:
1022
-
'@algolia/client-common': 5.28.0
959
+
'@algolia/client-common': 5.46.0
1023
960
1024
961
'@antfu/install-pkg@1.1.0':
1025
962
dependencies:
1026
-
package-manager-detector: 1.3.0
1027
-
tinyexec: 1.0.1
1028
-
1029
-
'@antfu/utils@8.1.1': {}
963
+
package-manager-detector: 1.6.0
964
+
tinyexec: 1.0.2
1030
965
1031
966
'@babel/helper-string-parser@7.27.1': {}
1032
967
1033
-
'@babel/helper-validator-identifier@7.27.1': {}
968
+
'@babel/helper-validator-identifier@7.28.5': {}
1034
969
1035
-
'@babel/parser@7.27.5':
970
+
'@babel/parser@7.28.5':
1036
971
dependencies:
1037
-
'@babel/types': 7.27.6
972
+
'@babel/types': 7.28.5
1038
973
1039
-
'@babel/types@7.27.6':
974
+
'@babel/types@7.28.5':
1040
975
dependencies:
1041
976
'@babel/helper-string-parser': 7.27.1
1042
-
'@babel/helper-validator-identifier': 7.27.1
977
+
'@babel/helper-validator-identifier': 7.28.5
1043
978
1044
979
'@docsearch/css@3.8.2': {}
1045
980
1046
-
'@docsearch/js@3.8.2(@algolia/client-search@5.28.0)(search-insights@2.17.3)':
981
+
'@docsearch/js@3.8.2(@algolia/client-search@5.46.0)(search-insights@2.17.3)':
1047
982
dependencies:
1048
-
'@docsearch/react': 3.8.2(@algolia/client-search@5.28.0)(search-insights@2.17.3)
1049
-
preact: 10.26.9
983
+
'@docsearch/react': 3.8.2(@algolia/client-search@5.46.0)(search-insights@2.17.3)
984
+
preact: 10.28.0
1050
985
transitivePeerDependencies:
1051
986
- '@algolia/client-search'
1052
987
- '@types/react'
···
1054
989
- react-dom
1055
990
- search-insights
1056
991
1057
-
'@docsearch/react@3.8.2(@algolia/client-search@5.28.0)(search-insights@2.17.3)':
992
+
'@docsearch/react@3.8.2(@algolia/client-search@5.46.0)(search-insights@2.17.3)':
1058
993
dependencies:
1059
-
'@algolia/autocomplete-core': 1.17.7(@algolia/client-search@5.28.0)(algoliasearch@5.28.0)(search-insights@2.17.3)
1060
-
'@algolia/autocomplete-preset-algolia': 1.17.7(@algolia/client-search@5.28.0)(algoliasearch@5.28.0)
994
+
'@algolia/autocomplete-core': 1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)(search-insights@2.17.3)
995
+
'@algolia/autocomplete-preset-algolia': 1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)
1061
996
'@docsearch/css': 3.8.2
1062
-
algoliasearch: 5.28.0
997
+
algoliasearch: 5.46.0
1063
998
optionalDependencies:
1064
999
search-insights: 2.17.3
1065
1000
transitivePeerDependencies:
···
1134
1069
'@esbuild/win32-x64@0.21.5':
1135
1070
optional: true
1136
1071
1137
-
'@giscus/vue@2.4.0(vue@3.5.17)':
1138
-
dependencies:
1139
-
giscus: 1.6.0
1140
-
vue: 3.5.17
1141
-
1142
-
'@iconify-json/logos@1.2.4':
1072
+
'@iconify-json/logos@1.2.10':
1143
1073
dependencies:
1144
1074
'@iconify/types': 2.0.0
1145
1075
1146
-
'@iconify-json/simple-icons@1.2.39':
1076
+
'@iconify-json/simple-icons@1.2.63':
1147
1077
dependencies:
1148
1078
'@iconify/types': 2.0.0
1149
1079
1150
-
'@iconify-json/vscode-icons@1.2.23':
1080
+
'@iconify-json/vscode-icons@1.2.37':
1151
1081
dependencies:
1152
1082
'@iconify/types': 2.0.0
1153
1083
1154
1084
'@iconify/types@2.0.0': {}
1155
1085
1156
-
'@iconify/utils@2.3.0':
1086
+
'@iconify/utils@3.1.0':
1157
1087
dependencies:
1158
1088
'@antfu/install-pkg': 1.1.0
1159
-
'@antfu/utils': 8.1.1
1160
1089
'@iconify/types': 2.0.0
1161
-
debug: 4.4.1
1162
-
globals: 15.15.0
1163
-
kolorist: 1.8.0
1164
-
local-pkg: 1.1.1
1165
-
mlly: 1.7.4
1166
-
transitivePeerDependencies:
1167
-
- supports-color
1090
+
mlly: 1.8.0
1168
1091
1169
-
'@jridgewell/sourcemap-codec@1.5.0': {}
1092
+
'@jridgewell/sourcemap-codec@1.5.5': {}
1170
1093
1171
-
'@lit-labs/ssr-dom-shim@1.3.0': {}
1094
+
'@rollup/rollup-android-arm-eabi@4.53.5':
1095
+
optional: true
1172
1096
1173
-
'@lit/reactive-element@2.1.0':
1174
-
dependencies:
1175
-
'@lit-labs/ssr-dom-shim': 1.3.0
1097
+
'@rollup/rollup-android-arm64@4.53.5':
1098
+
optional: true
1176
1099
1177
-
'@rollup/rollup-android-arm-eabi@4.43.0':
1100
+
'@rollup/rollup-darwin-arm64@4.53.5':
1178
1101
optional: true
1179
1102
1180
-
'@rollup/rollup-android-arm64@4.43.0':
1103
+
'@rollup/rollup-darwin-x64@4.53.5':
1181
1104
optional: true
1182
1105
1183
-
'@rollup/rollup-darwin-arm64@4.43.0':
1106
+
'@rollup/rollup-freebsd-arm64@4.53.5':
1184
1107
optional: true
1185
1108
1186
-
'@rollup/rollup-darwin-x64@4.43.0':
1109
+
'@rollup/rollup-freebsd-x64@4.53.5':
1187
1110
optional: true
1188
1111
1189
-
'@rollup/rollup-freebsd-arm64@4.43.0':
1112
+
'@rollup/rollup-linux-arm-gnueabihf@4.53.5':
1190
1113
optional: true
1191
1114
1192
-
'@rollup/rollup-freebsd-x64@4.43.0':
1115
+
'@rollup/rollup-linux-arm-musleabihf@4.53.5':
1193
1116
optional: true
1194
1117
1195
-
'@rollup/rollup-linux-arm-gnueabihf@4.43.0':
1118
+
'@rollup/rollup-linux-arm64-gnu@4.53.5':
1196
1119
optional: true
1197
1120
1198
-
'@rollup/rollup-linux-arm-musleabihf@4.43.0':
1121
+
'@rollup/rollup-linux-arm64-musl@4.53.5':
1199
1122
optional: true
1200
1123
1201
-
'@rollup/rollup-linux-arm64-gnu@4.43.0':
1124
+
'@rollup/rollup-linux-loong64-gnu@4.53.5':
1202
1125
optional: true
1203
1126
1204
-
'@rollup/rollup-linux-arm64-musl@4.43.0':
1127
+
'@rollup/rollup-linux-ppc64-gnu@4.53.5':
1205
1128
optional: true
1206
1129
1207
-
'@rollup/rollup-linux-loongarch64-gnu@4.43.0':
1130
+
'@rollup/rollup-linux-riscv64-gnu@4.53.5':
1208
1131
optional: true
1209
1132
1210
-
'@rollup/rollup-linux-powerpc64le-gnu@4.43.0':
1133
+
'@rollup/rollup-linux-riscv64-musl@4.53.5':
1211
1134
optional: true
1212
1135
1213
-
'@rollup/rollup-linux-riscv64-gnu@4.43.0':
1136
+
'@rollup/rollup-linux-s390x-gnu@4.53.5':
1214
1137
optional: true
1215
1138
1216
-
'@rollup/rollup-linux-riscv64-musl@4.43.0':
1139
+
'@rollup/rollup-linux-x64-gnu@4.53.5':
1217
1140
optional: true
1218
1141
1219
-
'@rollup/rollup-linux-s390x-gnu@4.43.0':
1142
+
'@rollup/rollup-linux-x64-musl@4.53.5':
1220
1143
optional: true
1221
1144
1222
-
'@rollup/rollup-linux-x64-gnu@4.43.0':
1145
+
'@rollup/rollup-openharmony-arm64@4.53.5':
1223
1146
optional: true
1224
1147
1225
-
'@rollup/rollup-linux-x64-musl@4.43.0':
1148
+
'@rollup/rollup-win32-arm64-msvc@4.53.5':
1226
1149
optional: true
1227
1150
1228
-
'@rollup/rollup-win32-arm64-msvc@4.43.0':
1151
+
'@rollup/rollup-win32-ia32-msvc@4.53.5':
1229
1152
optional: true
1230
1153
1231
-
'@rollup/rollup-win32-ia32-msvc@4.43.0':
1154
+
'@rollup/rollup-win32-x64-gnu@4.53.5':
1232
1155
optional: true
1233
1156
1234
-
'@rollup/rollup-win32-x64-msvc@4.43.0':
1157
+
'@rollup/rollup-win32-x64-msvc@4.53.5':
1235
1158
optional: true
1236
1159
1237
1160
'@shikijs/core@2.5.0':
···
1274
1197
1275
1198
'@shikijs/vscode-textmate@10.0.2': {}
1276
1199
1277
-
'@types/estree@1.0.7': {}
1200
+
'@types/estree@1.0.8': {}
1278
1201
1279
1202
'@types/hast@3.0.4':
1280
1203
dependencies:
···
1292
1215
'@types/unist': 3.0.3
1293
1216
1294
1217
'@types/mdurl@2.0.0': {}
1295
-
1296
-
'@types/trusted-types@2.0.7': {}
1297
1218
1298
1219
'@types/unist@3.0.3': {}
1299
1220
···
1301
1222
1302
1223
'@ungap/structured-clone@1.3.0': {}
1303
1224
1304
-
'@vitejs/plugin-vue@5.2.4(vite@5.4.19)(vue@3.5.17)':
1225
+
'@vitejs/plugin-vue@5.2.4(vite@5.4.21)(vue@3.5.26)':
1305
1226
dependencies:
1306
-
vite: 5.4.19
1307
-
vue: 3.5.17
1227
+
vite: 5.4.21
1228
+
vue: 3.5.26
1308
1229
1309
-
'@vue/compiler-core@3.5.17':
1230
+
'@vue/compiler-core@3.5.26':
1310
1231
dependencies:
1311
-
'@babel/parser': 7.27.5
1312
-
'@vue/shared': 3.5.17
1313
-
entities: 4.5.0
1232
+
'@babel/parser': 7.28.5
1233
+
'@vue/shared': 3.5.26
1234
+
entities: 7.0.0
1314
1235
estree-walker: 2.0.2
1315
1236
source-map-js: 1.2.1
1316
1237
1317
-
'@vue/compiler-dom@3.5.17':
1238
+
'@vue/compiler-dom@3.5.26':
1318
1239
dependencies:
1319
-
'@vue/compiler-core': 3.5.17
1320
-
'@vue/shared': 3.5.17
1240
+
'@vue/compiler-core': 3.5.26
1241
+
'@vue/shared': 3.5.26
1321
1242
1322
-
'@vue/compiler-sfc@3.5.17':
1243
+
'@vue/compiler-sfc@3.5.26':
1323
1244
dependencies:
1324
-
'@babel/parser': 7.27.5
1325
-
'@vue/compiler-core': 3.5.17
1326
-
'@vue/compiler-dom': 3.5.17
1327
-
'@vue/compiler-ssr': 3.5.17
1328
-
'@vue/shared': 3.5.17
1245
+
'@babel/parser': 7.28.5
1246
+
'@vue/compiler-core': 3.5.26
1247
+
'@vue/compiler-dom': 3.5.26
1248
+
'@vue/compiler-ssr': 3.5.26
1249
+
'@vue/shared': 3.5.26
1329
1250
estree-walker: 2.0.2
1330
-
magic-string: 0.30.17
1251
+
magic-string: 0.30.21
1331
1252
postcss: 8.5.6
1332
1253
source-map-js: 1.2.1
1333
1254
1334
-
'@vue/compiler-ssr@3.5.17':
1255
+
'@vue/compiler-ssr@3.5.26':
1335
1256
dependencies:
1336
-
'@vue/compiler-dom': 3.5.17
1337
-
'@vue/shared': 3.5.17
1257
+
'@vue/compiler-dom': 3.5.26
1258
+
'@vue/shared': 3.5.26
1338
1259
1339
-
'@vue/devtools-api@7.7.7':
1260
+
'@vue/devtools-api@7.7.9':
1340
1261
dependencies:
1341
-
'@vue/devtools-kit': 7.7.7
1262
+
'@vue/devtools-kit': 7.7.9
1342
1263
1343
-
'@vue/devtools-kit@7.7.7':
1264
+
'@vue/devtools-kit@7.7.9':
1344
1265
dependencies:
1345
-
'@vue/devtools-shared': 7.7.7
1346
-
birpc: 2.4.0
1266
+
'@vue/devtools-shared': 7.7.9
1267
+
birpc: 2.9.0
1347
1268
hookable: 5.5.3
1348
1269
mitt: 3.0.1
1349
1270
perfect-debounce: 1.0.0
1350
1271
speakingurl: 14.0.1
1351
-
superjson: 2.2.2
1272
+
superjson: 2.2.6
1352
1273
1353
-
'@vue/devtools-shared@7.7.7':
1274
+
'@vue/devtools-shared@7.7.9':
1354
1275
dependencies:
1355
1276
rfdc: 1.4.1
1356
1277
1357
-
'@vue/reactivity@3.5.17':
1278
+
'@vue/reactivity@3.5.26':
1358
1279
dependencies:
1359
-
'@vue/shared': 3.5.17
1280
+
'@vue/shared': 3.5.26
1360
1281
1361
-
'@vue/runtime-core@3.5.17':
1282
+
'@vue/runtime-core@3.5.26':
1362
1283
dependencies:
1363
-
'@vue/reactivity': 3.5.17
1364
-
'@vue/shared': 3.5.17
1284
+
'@vue/reactivity': 3.5.26
1285
+
'@vue/shared': 3.5.26
1365
1286
1366
-
'@vue/runtime-dom@3.5.17':
1287
+
'@vue/runtime-dom@3.5.26':
1367
1288
dependencies:
1368
-
'@vue/reactivity': 3.5.17
1369
-
'@vue/runtime-core': 3.5.17
1370
-
'@vue/shared': 3.5.17
1371
-
csstype: 3.1.3
1289
+
'@vue/reactivity': 3.5.26
1290
+
'@vue/runtime-core': 3.5.26
1291
+
'@vue/shared': 3.5.26
1292
+
csstype: 3.2.3
1372
1293
1373
-
'@vue/server-renderer@3.5.17(vue@3.5.17)':
1294
+
'@vue/server-renderer@3.5.26(vue@3.5.26)':
1374
1295
dependencies:
1375
-
'@vue/compiler-ssr': 3.5.17
1376
-
'@vue/shared': 3.5.17
1377
-
vue: 3.5.17
1296
+
'@vue/compiler-ssr': 3.5.26
1297
+
'@vue/shared': 3.5.26
1298
+
vue: 3.5.26
1378
1299
1379
-
'@vue/shared@3.5.17': {}
1300
+
'@vue/shared@3.5.25': {}
1301
+
1302
+
'@vue/shared@3.5.26': {}
1380
1303
1381
1304
'@vueuse/core@12.8.2':
1382
1305
dependencies:
1383
1306
'@types/web-bluetooth': 0.0.21
1384
1307
'@vueuse/metadata': 12.8.2
1385
1308
'@vueuse/shared': 12.8.2
1386
-
vue: 3.5.17
1309
+
vue: 3.5.26
1387
1310
transitivePeerDependencies:
1388
1311
- typescript
1389
1312
1390
-
'@vueuse/integrations@12.8.2(focus-trap@7.6.5)':
1313
+
'@vueuse/integrations@12.8.2(focus-trap@7.6.6)':
1391
1314
dependencies:
1392
1315
'@vueuse/core': 12.8.2
1393
1316
'@vueuse/shared': 12.8.2
1394
-
vue: 3.5.17
1317
+
vue: 3.5.26
1395
1318
optionalDependencies:
1396
-
focus-trap: 7.6.5
1319
+
focus-trap: 7.6.6
1397
1320
transitivePeerDependencies:
1398
1321
- typescript
1399
1322
···
1401
1324
1402
1325
'@vueuse/shared@12.8.2':
1403
1326
dependencies:
1404
-
vue: 3.5.17
1327
+
vue: 3.5.26
1405
1328
transitivePeerDependencies:
1406
1329
- typescript
1407
1330
1408
1331
acorn@8.15.0: {}
1409
1332
1410
-
algoliasearch@5.28.0:
1333
+
algoliasearch@5.46.0:
1411
1334
dependencies:
1412
-
'@algolia/client-abtesting': 5.28.0
1413
-
'@algolia/client-analytics': 5.28.0
1414
-
'@algolia/client-common': 5.28.0
1415
-
'@algolia/client-insights': 5.28.0
1416
-
'@algolia/client-personalization': 5.28.0
1417
-
'@algolia/client-query-suggestions': 5.28.0
1418
-
'@algolia/client-search': 5.28.0
1419
-
'@algolia/ingestion': 1.28.0
1420
-
'@algolia/monitoring': 1.28.0
1421
-
'@algolia/recommend': 5.28.0
1422
-
'@algolia/requester-browser-xhr': 5.28.0
1423
-
'@algolia/requester-fetch': 5.28.0
1424
-
'@algolia/requester-node-http': 5.28.0
1335
+
'@algolia/abtesting': 1.12.0
1336
+
'@algolia/client-abtesting': 5.46.0
1337
+
'@algolia/client-analytics': 5.46.0
1338
+
'@algolia/client-common': 5.46.0
1339
+
'@algolia/client-insights': 5.46.0
1340
+
'@algolia/client-personalization': 5.46.0
1341
+
'@algolia/client-query-suggestions': 5.46.0
1342
+
'@algolia/client-search': 5.46.0
1343
+
'@algolia/ingestion': 1.46.0
1344
+
'@algolia/monitoring': 1.46.0
1345
+
'@algolia/recommend': 5.46.0
1346
+
'@algolia/requester-browser-xhr': 5.46.0
1347
+
'@algolia/requester-fetch': 5.46.0
1348
+
'@algolia/requester-node-http': 5.46.0
1425
1349
1426
-
argparse@2.0.1: {}
1427
-
1428
-
birpc@2.4.0: {}
1350
+
birpc@2.9.0: {}
1429
1351
1430
1352
ccount@2.0.1: {}
1431
1353
···
1437
1359
1438
1360
confbox@0.1.8: {}
1439
1361
1440
-
confbox@0.2.2: {}
1441
-
1442
-
copy-anything@3.0.5:
1362
+
copy-anything@4.0.5:
1443
1363
dependencies:
1444
-
is-what: 4.1.16
1364
+
is-what: 5.5.0
1445
1365
1446
-
csstype@3.1.3: {}
1447
-
1448
-
debug@4.4.1:
1449
-
dependencies:
1450
-
ms: 2.1.3
1366
+
csstype@3.2.3: {}
1451
1367
1452
1368
dequal@2.0.3: {}
1453
1369
···
1457
1373
1458
1374
emoji-regex-xs@1.0.0: {}
1459
1375
1460
-
entities@4.5.0: {}
1376
+
entities@7.0.0: {}
1461
1377
1462
1378
esbuild@0.21.5:
1463
1379
optionalDependencies:
···
1487
1403
1488
1404
estree-walker@2.0.2: {}
1489
1405
1490
-
exsolve@1.0.6: {}
1491
-
1492
-
focus-trap@7.6.5:
1406
+
focus-trap@7.6.6:
1493
1407
dependencies:
1494
-
tabbable: 6.2.0
1408
+
tabbable: 6.3.0
1495
1409
1496
1410
fsevents@2.3.3:
1497
1411
optional: true
1498
-
1499
-
giscus@1.6.0:
1500
-
dependencies:
1501
-
lit: 3.3.0
1502
-
1503
-
globals@15.15.0: {}
1504
1412
1505
1413
hast-util-to-html@9.0.5:
1506
1414
dependencies:
···
1510
1418
comma-separated-tokens: 2.0.3
1511
1419
hast-util-whitespace: 3.0.0
1512
1420
html-void-elements: 3.0.0
1513
-
mdast-util-to-hast: 13.2.0
1421
+
mdast-util-to-hast: 13.2.1
1514
1422
property-information: 7.1.0
1515
1423
space-separated-tokens: 2.0.2
1516
1424
stringify-entities: 4.0.4
···
1524
1432
1525
1433
html-void-elements@3.0.0: {}
1526
1434
1527
-
is-what@4.1.16: {}
1528
-
1529
-
kolorist@1.8.0: {}
1435
+
is-what@5.5.0: {}
1530
1436
1531
-
linkify-it@5.0.0:
1437
+
magic-string@0.30.21:
1532
1438
dependencies:
1533
-
uc.micro: 2.1.0
1534
-
1535
-
lit-element@4.2.0:
1536
-
dependencies:
1537
-
'@lit-labs/ssr-dom-shim': 1.3.0
1538
-
'@lit/reactive-element': 2.1.0
1539
-
lit-html: 3.3.0
1540
-
1541
-
lit-html@3.3.0:
1542
-
dependencies:
1543
-
'@types/trusted-types': 2.0.7
1544
-
1545
-
lit@3.3.0:
1546
-
dependencies:
1547
-
'@lit/reactive-element': 2.1.0
1548
-
lit-element: 4.2.0
1549
-
lit-html: 3.3.0
1550
-
1551
-
local-pkg@1.1.1:
1552
-
dependencies:
1553
-
mlly: 1.7.4
1554
-
pkg-types: 2.1.0
1555
-
quansync: 0.2.10
1556
-
1557
-
magic-string@0.30.17:
1558
-
dependencies:
1559
-
'@jridgewell/sourcemap-codec': 1.5.0
1439
+
'@jridgewell/sourcemap-codec': 1.5.5
1560
1440
1561
1441
mark.js@8.11.1: {}
1562
1442
1563
1443
markdown-it-footnote@4.0.0: {}
1564
1444
1565
-
markdown-it@14.1.0:
1566
-
dependencies:
1567
-
argparse: 2.0.1
1568
-
entities: 4.5.0
1569
-
linkify-it: 5.0.0
1570
-
mdurl: 2.0.0
1571
-
punycode.js: 2.3.1
1572
-
uc.micro: 2.1.0
1573
-
1574
-
mdast-util-to-hast@13.2.0:
1445
+
mdast-util-to-hast@13.2.1:
1575
1446
dependencies:
1576
1447
'@types/hast': 3.0.4
1577
1448
'@types/mdast': 4.0.4
···
1582
1453
unist-util-position: 5.0.0
1583
1454
unist-util-visit: 5.0.0
1584
1455
vfile: 6.0.3
1585
-
1586
-
mdurl@2.0.0: {}
1587
1456
1588
1457
micromark-util-character@2.1.1:
1589
1458
dependencies:
···
1602
1471
1603
1472
micromark-util-types@2.0.2: {}
1604
1473
1605
-
minisearch@7.1.2: {}
1474
+
minisearch@7.2.0: {}
1606
1475
1607
1476
mitt@3.0.1: {}
1608
1477
1609
-
mlly@1.7.4:
1478
+
mlly@1.8.0:
1610
1479
dependencies:
1611
1480
acorn: 8.15.0
1612
1481
pathe: 2.0.3
1613
1482
pkg-types: 1.3.1
1614
1483
ufo: 1.6.1
1615
1484
1616
-
ms@2.1.3: {}
1617
-
1618
1485
nanoid@3.3.11: {}
1619
1486
1620
1487
oniguruma-to-es@3.1.1:
1621
1488
dependencies:
1622
1489
emoji-regex-xs: 1.0.0
1623
-
regex: 6.0.1
1490
+
regex: 6.1.0
1624
1491
regex-recursion: 6.0.2
1625
1492
1626
-
package-manager-detector@1.3.0: {}
1493
+
package-manager-detector@1.6.0: {}
1627
1494
1628
1495
pathe@2.0.3: {}
1629
1496
···
1634
1501
pkg-types@1.3.1:
1635
1502
dependencies:
1636
1503
confbox: 0.1.8
1637
-
mlly: 1.7.4
1638
-
pathe: 2.0.3
1639
-
1640
-
pkg-types@2.1.0:
1641
-
dependencies:
1642
-
confbox: 0.2.2
1643
-
exsolve: 1.0.6
1504
+
mlly: 1.8.0
1644
1505
pathe: 2.0.3
1645
1506
1646
1507
postcss@8.5.6:
···
1649
1510
picocolors: 1.1.1
1650
1511
source-map-js: 1.2.1
1651
1512
1652
-
preact@10.26.9: {}
1513
+
preact@10.28.0: {}
1653
1514
1654
1515
property-information@7.1.0: {}
1655
-
1656
-
punycode.js@2.3.1: {}
1657
-
1658
-
quansync@0.2.10: {}
1659
1516
1660
1517
regex-recursion@6.0.2:
1661
1518
dependencies:
···
1663
1520
1664
1521
regex-utilities@2.3.0: {}
1665
1522
1666
-
regex@6.0.1:
1523
+
regex@6.1.0:
1667
1524
dependencies:
1668
1525
regex-utilities: 2.3.0
1669
1526
1670
1527
rfdc@1.4.1: {}
1671
1528
1672
-
rollup@4.43.0:
1529
+
rollup@4.53.5:
1673
1530
dependencies:
1674
-
'@types/estree': 1.0.7
1531
+
'@types/estree': 1.0.8
1675
1532
optionalDependencies:
1676
-
'@rollup/rollup-android-arm-eabi': 4.43.0
1677
-
'@rollup/rollup-android-arm64': 4.43.0
1678
-
'@rollup/rollup-darwin-arm64': 4.43.0
1679
-
'@rollup/rollup-darwin-x64': 4.43.0
1680
-
'@rollup/rollup-freebsd-arm64': 4.43.0
1681
-
'@rollup/rollup-freebsd-x64': 4.43.0
1682
-
'@rollup/rollup-linux-arm-gnueabihf': 4.43.0
1683
-
'@rollup/rollup-linux-arm-musleabihf': 4.43.0
1684
-
'@rollup/rollup-linux-arm64-gnu': 4.43.0
1685
-
'@rollup/rollup-linux-arm64-musl': 4.43.0
1686
-
'@rollup/rollup-linux-loongarch64-gnu': 4.43.0
1687
-
'@rollup/rollup-linux-powerpc64le-gnu': 4.43.0
1688
-
'@rollup/rollup-linux-riscv64-gnu': 4.43.0
1689
-
'@rollup/rollup-linux-riscv64-musl': 4.43.0
1690
-
'@rollup/rollup-linux-s390x-gnu': 4.43.0
1691
-
'@rollup/rollup-linux-x64-gnu': 4.43.0
1692
-
'@rollup/rollup-linux-x64-musl': 4.43.0
1693
-
'@rollup/rollup-win32-arm64-msvc': 4.43.0
1694
-
'@rollup/rollup-win32-ia32-msvc': 4.43.0
1695
-
'@rollup/rollup-win32-x64-msvc': 4.43.0
1533
+
'@rollup/rollup-android-arm-eabi': 4.53.5
1534
+
'@rollup/rollup-android-arm64': 4.53.5
1535
+
'@rollup/rollup-darwin-arm64': 4.53.5
1536
+
'@rollup/rollup-darwin-x64': 4.53.5
1537
+
'@rollup/rollup-freebsd-arm64': 4.53.5
1538
+
'@rollup/rollup-freebsd-x64': 4.53.5
1539
+
'@rollup/rollup-linux-arm-gnueabihf': 4.53.5
1540
+
'@rollup/rollup-linux-arm-musleabihf': 4.53.5
1541
+
'@rollup/rollup-linux-arm64-gnu': 4.53.5
1542
+
'@rollup/rollup-linux-arm64-musl': 4.53.5
1543
+
'@rollup/rollup-linux-loong64-gnu': 4.53.5
1544
+
'@rollup/rollup-linux-ppc64-gnu': 4.53.5
1545
+
'@rollup/rollup-linux-riscv64-gnu': 4.53.5
1546
+
'@rollup/rollup-linux-riscv64-musl': 4.53.5
1547
+
'@rollup/rollup-linux-s390x-gnu': 4.53.5
1548
+
'@rollup/rollup-linux-x64-gnu': 4.53.5
1549
+
'@rollup/rollup-linux-x64-musl': 4.53.5
1550
+
'@rollup/rollup-openharmony-arm64': 4.53.5
1551
+
'@rollup/rollup-win32-arm64-msvc': 4.53.5
1552
+
'@rollup/rollup-win32-ia32-msvc': 4.53.5
1553
+
'@rollup/rollup-win32-x64-gnu': 4.53.5
1554
+
'@rollup/rollup-win32-x64-msvc': 4.53.5
1696
1555
fsevents: 2.3.3
1697
1556
1698
1557
search-insights@2.17.3: {}
···
1719
1578
character-entities-html4: 2.1.0
1720
1579
character-entities-legacy: 3.0.0
1721
1580
1722
-
superjson@2.2.2:
1581
+
superjson@2.2.6:
1723
1582
dependencies:
1724
-
copy-anything: 3.0.5
1583
+
copy-anything: 4.0.5
1725
1584
1726
-
tabbable@6.2.0: {}
1585
+
tabbable@6.3.0: {}
1727
1586
1728
-
tinyexec@1.0.1: {}
1587
+
tinyexec@1.0.2: {}
1729
1588
1730
1589
trim-lines@3.0.1: {}
1731
1590
1732
-
uc.micro@2.1.0: {}
1733
-
1734
1591
ufo@1.6.1: {}
1735
1592
1736
-
unist-util-is@6.0.0:
1593
+
unist-util-is@6.0.1:
1737
1594
dependencies:
1738
1595
'@types/unist': 3.0.3
1739
1596
···
1745
1602
dependencies:
1746
1603
'@types/unist': 3.0.3
1747
1604
1748
-
unist-util-visit-parents@6.0.1:
1605
+
unist-util-visit-parents@6.0.2:
1749
1606
dependencies:
1750
1607
'@types/unist': 3.0.3
1751
-
unist-util-is: 6.0.0
1608
+
unist-util-is: 6.0.1
1752
1609
1753
1610
unist-util-visit@5.0.0:
1754
1611
dependencies:
1755
1612
'@types/unist': 3.0.3
1756
-
unist-util-is: 6.0.0
1757
-
unist-util-visit-parents: 6.0.1
1613
+
unist-util-is: 6.0.1
1614
+
unist-util-visit-parents: 6.0.2
1758
1615
1759
-
vfile-message@4.0.2:
1616
+
vfile-message@4.0.3:
1760
1617
dependencies:
1761
1618
'@types/unist': 3.0.3
1762
1619
unist-util-stringify-position: 4.0.0
···
1764
1621
vfile@6.0.3:
1765
1622
dependencies:
1766
1623
'@types/unist': 3.0.3
1767
-
vfile-message: 4.0.2
1624
+
vfile-message: 4.0.3
1768
1625
1769
-
vite@5.4.19:
1626
+
vite@5.4.21:
1770
1627
dependencies:
1771
1628
esbuild: 0.21.5
1772
1629
postcss: 8.5.6
1773
-
rollup: 4.43.0
1630
+
rollup: 4.53.5
1774
1631
optionalDependencies:
1775
1632
fsevents: 2.3.3
1776
1633
1777
-
vitepress-plugin-comment-with-giscus@1.1.15(vue@3.5.17):
1634
+
vitepress-plugin-group-icons@1.6.5(vite@5.4.21):
1778
1635
dependencies:
1779
-
'@giscus/vue': 2.4.0(vue@3.5.17)
1780
-
transitivePeerDependencies:
1781
-
- vue
1782
-
1783
-
vitepress-plugin-group-icons@1.6.0(markdown-it@14.1.0)(vite@5.4.19):
1784
-
dependencies:
1785
-
'@iconify-json/logos': 1.2.4
1786
-
'@iconify-json/vscode-icons': 1.2.23
1787
-
'@iconify/utils': 2.3.0
1788
-
markdown-it: 14.1.0
1789
-
vite: 5.4.19
1790
-
transitivePeerDependencies:
1791
-
- supports-color
1636
+
'@iconify-json/logos': 1.2.10
1637
+
'@iconify-json/vscode-icons': 1.2.37
1638
+
'@iconify/utils': 3.1.0
1639
+
optionalDependencies:
1640
+
vite: 5.4.21
1792
1641
1793
-
vitepress@1.6.3(@algolia/client-search@5.28.0)(postcss@8.5.6)(search-insights@2.17.3):
1642
+
vitepress@1.6.4(@algolia/client-search@5.46.0)(postcss@8.5.6)(search-insights@2.17.3):
1794
1643
dependencies:
1795
1644
'@docsearch/css': 3.8.2
1796
-
'@docsearch/js': 3.8.2(@algolia/client-search@5.28.0)(search-insights@2.17.3)
1797
-
'@iconify-json/simple-icons': 1.2.39
1645
+
'@docsearch/js': 3.8.2(@algolia/client-search@5.46.0)(search-insights@2.17.3)
1646
+
'@iconify-json/simple-icons': 1.2.63
1798
1647
'@shikijs/core': 2.5.0
1799
1648
'@shikijs/transformers': 2.5.0
1800
1649
'@shikijs/types': 2.5.0
1801
1650
'@types/markdown-it': 14.1.2
1802
-
'@vitejs/plugin-vue': 5.2.4(vite@5.4.19)(vue@3.5.17)
1803
-
'@vue/devtools-api': 7.7.7
1804
-
'@vue/shared': 3.5.17
1651
+
'@vitejs/plugin-vue': 5.2.4(vite@5.4.21)(vue@3.5.26)
1652
+
'@vue/devtools-api': 7.7.9
1653
+
'@vue/shared': 3.5.25
1805
1654
'@vueuse/core': 12.8.2
1806
-
'@vueuse/integrations': 12.8.2(focus-trap@7.6.5)
1807
-
focus-trap: 7.6.5
1655
+
'@vueuse/integrations': 12.8.2(focus-trap@7.6.6)
1656
+
focus-trap: 7.6.6
1808
1657
mark.js: 8.11.1
1809
-
minisearch: 7.1.2
1658
+
minisearch: 7.2.0
1810
1659
shiki: 2.5.0
1811
-
vite: 5.4.19
1812
-
vue: 3.5.17
1660
+
vite: 5.4.21
1661
+
vue: 3.5.26
1813
1662
optionalDependencies:
1814
1663
postcss: 8.5.6
1815
1664
transitivePeerDependencies:
···
1839
1688
- typescript
1840
1689
- universal-cookie
1841
1690
1842
-
vue@3.5.17:
1691
+
vue@3.5.26:
1843
1692
dependencies:
1844
-
'@vue/compiler-dom': 3.5.17
1845
-
'@vue/compiler-sfc': 3.5.17
1846
-
'@vue/runtime-dom': 3.5.17
1847
-
'@vue/server-renderer': 3.5.17(vue@3.5.17)
1848
-
'@vue/shared': 3.5.17
1693
+
'@vue/compiler-dom': 3.5.26
1694
+
'@vue/compiler-sfc': 3.5.26
1695
+
'@vue/runtime-dom': 3.5.26
1696
+
'@vue/server-renderer': 3.5.26(vue@3.5.26)
1697
+
'@vue/shared': 3.5.26
1849
1698
1850
1699
zwitch@2.0.4: {}
+9
doc/reference/errors.md
+9
doc/reference/errors.md
+64
-9
doc/reference/meta.md
+64
-9
doc/reference/meta.md
···
1
1
---
2
2
comment: true
3
3
title: Meta Options
4
-
description: Wire hive meta options.
4
+
description: wire hive meta options.
5
5
---
6
6
7
-
# {{ $frontmatter.title }}
7
+
# Meta Options
8
8
9
9
{{ $frontmatter.description }}
10
10
11
11
## meta.nixpkgs
12
12
13
-
Tells wire how to get `nixpkgs`. You can omit this option in a flake to
14
-
default to `inputs.nixpkgs`.
13
+
Tells wire how to get `nixpkgs`.
15
14
16
15
_Type:_ A path or an instance of `nixpkgs`.
17
16
18
-
_Default (`hive.nix`):_ `null`
19
-
20
-
_Default (flake attribute):_ `inputs.nixpkgs.outPath`
17
+
_Default:_ `null`
21
18
22
19
_Examples:_
23
20
···
27
24
28
25
meta.nixpkgs = <nixpkgs>;
29
26
30
-
meta.nixpkgs = import <nixpkgs> {};
27
+
meta.nixpkgs = import <nixpkgs> { };
28
+
29
+
meta.nixpkgs = import sources.nixpkgs { };
30
+
31
+
meta.nixpkgs = inputs.nixpkgs.outPath;
31
32
32
33
meta.nixpkgs = inputs.other-nixpkgs.outPath;
33
34
}
···
39
40
40
41
::: tip
41
42
42
-
Wire always passes `name` (name of the node)
43
+
wire always passes `name` (name of the node)
43
44
and `nodes` (attribute set of all nodes) as args, even if `meta.specialArgs =
44
45
{ }`.
45
46
···
59
60
};
60
61
}
61
62
```
63
+
64
+
## meta.nodeSpecialArgs
65
+
66
+
Extra `specialArgs` to override `meta.specialArgs` for each node
67
+
68
+
_Type:_ attribute set of attribute set
69
+
70
+
_Default:_ `{ }`
71
+
72
+
_Example:_
73
+
74
+
```nix
75
+
{
76
+
meta.nodeSpecialArgs = {
77
+
extra-property = "some-value";
78
+
};
79
+
}
80
+
```
81
+
82
+
## meta.nodeNixpkgs
83
+
84
+
Per-node nixpkgs to override `meta.nixpkgs`.
85
+
86
+
See `meta.nixpkgs` examples for possible values.
87
+
88
+
_Type:_ attribute set of path or an instance of `nixpkgs`
89
+
90
+
_Default:_ `{ }`
91
+
92
+
_Example:_
93
+
94
+
```nix
95
+
{
96
+
meta = {
97
+
nixpkgs = import <nixpkgs> { };
98
+
99
+
nodeNixpkgs = {
100
+
node-b = import <special-nixpkgs> { };
101
+
};
102
+
};
103
+
104
+
node-a =
105
+
{ pkgs, ... }:
106
+
{
107
+
# uses <nixpkgs> (meta.nixpkgs)
108
+
};
109
+
110
+
node-b =
111
+
{ pkgs, ... }:
112
+
{
113
+
# uses <special-nixpkgs> (meta.nodeNixpkgs.node-b)
114
+
};
115
+
}
116
+
```
-3
doc/snippets/default.nix
-3
doc/snippets/default.nix
···
1
1
{
2
-
getting-started-hm = import ./getting-started/home.nix;
3
-
getting-started-hm-flake = import ./getting-started/hm.flake.nix;
4
2
getting-started-nixos = import ./getting-started/configuration.nix;
5
3
getting-started-nixos-flake = import ./getting-started/nixos.flake.nix;
6
-
getting-started-cache = import ./getting-started/cache.nix;
7
4
}
-12
doc/snippets/getting-started/cache.nix
-12
doc/snippets/getting-started/cache.nix
+3
-2
doc/snippets/getting-started/configuration.nix
+3
-2
doc/snippets/getting-started/configuration.nix
···
1
1
{system, ...}: let
2
-
wire = import ( # [!code ++]
3
-
builtins.fetchTarball "https://github.com/wires-org/wire/archive/refs/heads/main.tar.gz" # [!code ++]
2
+
wire = import (
3
+
# [!code ++]
4
+
builtins.fetchTarball "https://github.com/forallsys/wire/archive/refs/heads/trunk.tar.gz" # [!code ++]
4
5
); # [!code ++]
5
6
in {
6
7
environment.systemPackages = [
+54
doc/snippets/getting-started/flake-merged.nix
+54
doc/snippets/getting-started/flake-merged.nix
···
1
+
{
2
+
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
3
+
inputs.wire.url = "github:forallsys/wire";
4
+
5
+
outputs = {
6
+
self,
7
+
nixpkgs,
8
+
wire,
9
+
...
10
+
} @ inputs: {
11
+
wire = wire.makeHive {
12
+
# Give wire our nixosConfigurations
13
+
inherit (self) nixosConfigurations;
14
+
15
+
meta = {
16
+
nixpkgs = import nixpkgs {localSystem = "x86_64-linux";};
17
+
};
18
+
19
+
node-a.deployment = {
20
+
tags = [
21
+
# some tags
22
+
];
23
+
24
+
# ...
25
+
};
26
+
};
27
+
28
+
nixosConfigurations = {
29
+
node-a = nixpkgs.lib.nixosSystem {
30
+
system = "x86_64-linux";
31
+
specialArgs = {inherit inputs;};
32
+
modules = [
33
+
wire.nixosModules.default
34
+
{
35
+
nixpkgs.hostPlatform = "x86_64-linux";
36
+
37
+
# you can put deployment options here too!
38
+
deployment.target = "some-hostname";
39
+
}
40
+
];
41
+
};
42
+
43
+
some-other-host = nixpkgs.lib.nixosSystem {
44
+
system = "x86_64-linux";
45
+
specialArgs = {inherit inputs;};
46
+
modules = [
47
+
{
48
+
nixpkgs.hostPlatform = "x86_64-linux";
49
+
}
50
+
];
51
+
};
52
+
};
53
+
};
54
+
}
+31
doc/snippets/getting-started/flake.nix
+31
doc/snippets/getting-started/flake.nix
···
1
+
{
2
+
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
3
+
inputs.wire.url = "github:forallsys/wire";
4
+
5
+
outputs = inputs @ {
6
+
nixpkgs,
7
+
wire,
8
+
...
9
+
}: {
10
+
wire = wire.makeHive {
11
+
meta = {
12
+
nixpkgs = import nixpkgs {
13
+
localSystem = "x86_64-linux";
14
+
};
15
+
specialArgs = {
16
+
inherit inputs;
17
+
};
18
+
};
19
+
20
+
defaults = {
21
+
# ...
22
+
};
23
+
24
+
node-a = {
25
+
nixpkgs.hostPlatform = "x86_64-linux";
26
+
27
+
# ...
28
+
};
29
+
};
30
+
};
31
+
}
-34
doc/snippets/getting-started/hm.flake.nix
-34
doc/snippets/getting-started/hm.flake.nix
···
1
-
{
2
-
inputs = {
3
-
# ...
4
-
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
5
-
home-manager = {
6
-
url = "github:nix-community/home-manager";
7
-
inputs.nixpkgs.follows = "nixpkgs";
8
-
};
9
-
wire.url = "github:wires-org/wire"; # [!code ++]
10
-
};
11
-
12
-
outputs = {
13
-
# ...
14
-
nixpkgs,
15
-
home-manager,
16
-
wire, # [!code ++]
17
-
...
18
-
}: let
19
-
system = "x86_64-linux";
20
-
pkgs = nixpkgs.legacyPackages.${system};
21
-
in {
22
-
homeConfigurations.my-user = home-manager.lib.homeManagerConfiguration {
23
-
inherit pkgs;
24
-
modules = [
25
-
# ...
26
-
{
27
-
home.packages = [
28
-
wire.packages.${system}.wire # [!code ++]
29
-
];
30
-
}
31
-
];
32
-
};
33
-
};
34
-
}
-11
doc/snippets/getting-started/home.nix
-11
doc/snippets/getting-started/home.nix
-2
doc/snippets/getting-started/nix.conf
-2
doc/snippets/getting-started/nix.conf
+1
-1
doc/snippets/getting-started/nixos.flake.nix
+1
-1
doc/snippets/getting-started/nixos.flake.nix
+40
doc/snippets/guides/example-action.yml
+40
doc/snippets/guides/example-action.yml
···
1
+
name: Build
2
+
3
+
on:
4
+
push:
5
+
branches: [main]
6
+
7
+
jobs:
8
+
build-partitioned:
9
+
name: Build Partitioned
10
+
runs-on: ubuntu-latest
11
+
permissions: {}
12
+
strategy:
13
+
matrix:
14
+
# Break into 4 partitions
15
+
partition: [1, 2, 3, 4]
16
+
steps:
17
+
- uses: actions/checkout@v6
18
+
with:
19
+
persist-credentials: false
20
+
# This will likely be required if you have multiple architectures
21
+
# in your hive.
22
+
- name: Set up QEMU
23
+
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130
24
+
- uses: cachix/install-nix-action@4e002c8ec80594ecd40e759629461e26c8abed15
25
+
with:
26
+
nix_path: nixpkgs=channel:nixos-unstable
27
+
extra_nix_config: |
28
+
# Install binary cache as described in the install wire guide
29
+
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= cache.garnix.io:CTFPyKSLcx5RMJKfLo5EEPUObbA78b0YQ2DTCJXqr9g=
30
+
substituters = https://cache.nixos.org/ https://cache.garnix.io
31
+
32
+
# Again, include additional architectures if you have multiple
33
+
# architectures in your hive
34
+
extra-platforms = aarch64-linux i686-linux
35
+
# Uses wire from your shell (as described in the install wire guide).
36
+
- name: Build partition ${{ matrix.partition }}
37
+
run: nix develop -Lvc wire \
38
+
build \
39
+
--parallel 1 \
40
+
--partition ${{ matrix.partition }}/4
+344
doc/snippets/guides/installation/flake.lock
+344
doc/snippets/guides/installation/flake.lock
···
1
+
{
2
+
"nodes": {
3
+
"crane": {
4
+
"locked": {
5
+
"lastModified": 1759893430,
6
+
"narHash": "sha256-yAy4otLYm9iZ+NtQwTMEbqHwswSFUbhn7x826RR6djw=",
7
+
"owner": "ipetkov",
8
+
"repo": "crane",
9
+
"rev": "1979a2524cb8c801520bd94c38bb3d5692419d93",
10
+
"type": "github"
11
+
},
12
+
"original": {
13
+
"owner": "ipetkov",
14
+
"repo": "crane",
15
+
"type": "github"
16
+
}
17
+
},
18
+
"fenix": {
19
+
"inputs": {
20
+
"nixpkgs": [
21
+
"wire",
22
+
"nixpkgs"
23
+
],
24
+
"rust-analyzer-src": "rust-analyzer-src"
25
+
},
26
+
"locked": {
27
+
"lastModified": 1760424233,
28
+
"narHash": "sha256-8jLfVik1ccwmacVW5BlprmsuK534rT5HjdPhkSaew44=",
29
+
"owner": "nix-community",
30
+
"repo": "fenix",
31
+
"rev": "48a763cdc0b2d07199a021de99c2ca50af76e49f",
32
+
"type": "github"
33
+
},
34
+
"original": {
35
+
"owner": "nix-community",
36
+
"repo": "fenix",
37
+
"type": "github"
38
+
}
39
+
},
40
+
"flake-compat": {
41
+
"locked": {
42
+
"lastModified": 1747046372,
43
+
"narHash": "sha256-CIVLLkVgvHYbgI2UpXvIIBJ12HWgX+fjA8Xf8PUmqCY=",
44
+
"owner": "edolstra",
45
+
"repo": "flake-compat",
46
+
"rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885",
47
+
"type": "github"
48
+
},
49
+
"original": {
50
+
"owner": "edolstra",
51
+
"repo": "flake-compat",
52
+
"type": "github"
53
+
}
54
+
},
55
+
"flake-compat_2": {
56
+
"flake": false,
57
+
"locked": {
58
+
"lastModified": 1747046372,
59
+
"narHash": "sha256-CIVLLkVgvHYbgI2UpXvIIBJ12HWgX+fjA8Xf8PUmqCY=",
60
+
"owner": "edolstra",
61
+
"repo": "flake-compat",
62
+
"rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885",
63
+
"type": "github"
64
+
},
65
+
"original": {
66
+
"owner": "edolstra",
67
+
"repo": "flake-compat",
68
+
"type": "github"
69
+
}
70
+
},
71
+
"flake-parts": {
72
+
"inputs": {
73
+
"nixpkgs-lib": "nixpkgs-lib"
74
+
},
75
+
"locked": {
76
+
"lastModified": 1759362264,
77
+
"narHash": "sha256-wfG0S7pltlYyZTM+qqlhJ7GMw2fTF4mLKCIVhLii/4M=",
78
+
"owner": "hercules-ci",
79
+
"repo": "flake-parts",
80
+
"rev": "758cf7296bee11f1706a574c77d072b8a7baa881",
81
+
"type": "github"
82
+
},
83
+
"original": {
84
+
"owner": "hercules-ci",
85
+
"repo": "flake-parts",
86
+
"type": "github"
87
+
}
88
+
},
89
+
"git-hooks": {
90
+
"inputs": {
91
+
"flake-compat": "flake-compat_2",
92
+
"gitignore": "gitignore",
93
+
"nixpkgs": "nixpkgs_2"
94
+
},
95
+
"locked": {
96
+
"lastModified": 1760392170,
97
+
"narHash": "sha256-WftxJgr2MeDDFK47fQKywzC72L2jRc/PWcyGdjaDzkw=",
98
+
"owner": "cachix",
99
+
"repo": "git-hooks.nix",
100
+
"rev": "46d55f0aeb1d567a78223e69729734f3dca25a85",
101
+
"type": "github"
102
+
},
103
+
"original": {
104
+
"owner": "cachix",
105
+
"repo": "git-hooks.nix",
106
+
"type": "github"
107
+
}
108
+
},
109
+
"gitignore": {
110
+
"inputs": {
111
+
"nixpkgs": [
112
+
"wire",
113
+
"git-hooks",
114
+
"nixpkgs"
115
+
]
116
+
},
117
+
"locked": {
118
+
"lastModified": 1709087332,
119
+
"narHash": "sha256-HG2cCnktfHsKV0s4XW83gU3F57gaTljL9KNSuG6bnQs=",
120
+
"owner": "hercules-ci",
121
+
"repo": "gitignore.nix",
122
+
"rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
123
+
"type": "github"
124
+
},
125
+
"original": {
126
+
"owner": "hercules-ci",
127
+
"repo": "gitignore.nix",
128
+
"type": "github"
129
+
}
130
+
},
131
+
"linux-systems": {
132
+
"locked": {
133
+
"lastModified": 1689347949,
134
+
"narHash": "sha256-12tWmuL2zgBgZkdoB6qXZsgJEH9LR3oUgpaQq2RbI80=",
135
+
"owner": "nix-systems",
136
+
"repo": "default-linux",
137
+
"rev": "31732fcf5e8fea42e59c2488ad31a0e651500f68",
138
+
"type": "github"
139
+
},
140
+
"original": {
141
+
"owner": "nix-systems",
142
+
"repo": "default-linux",
143
+
"type": "github"
144
+
}
145
+
},
146
+
"nixpkgs": {
147
+
"locked": {
148
+
"lastModified": 1760524057,
149
+
"narHash": "sha256-EVAqOteLBFmd7pKkb0+FIUyzTF61VKi7YmvP1tw4nEw=",
150
+
"owner": "NixOS",
151
+
"repo": "nixpkgs",
152
+
"rev": "544961dfcce86422ba200ed9a0b00dd4b1486ec5",
153
+
"type": "github"
154
+
},
155
+
"original": {
156
+
"owner": "NixOS",
157
+
"ref": "nixos-unstable",
158
+
"repo": "nixpkgs",
159
+
"type": "github"
160
+
}
161
+
},
162
+
"nixpkgs-lib": {
163
+
"locked": {
164
+
"lastModified": 1754788789,
165
+
"narHash": "sha256-x2rJ+Ovzq0sCMpgfgGaaqgBSwY+LST+WbZ6TytnT9Rk=",
166
+
"owner": "nix-community",
167
+
"repo": "nixpkgs.lib",
168
+
"rev": "a73b9c743612e4244d865a2fdee11865283c04e6",
169
+
"type": "github"
170
+
},
171
+
"original": {
172
+
"owner": "nix-community",
173
+
"repo": "nixpkgs.lib",
174
+
"type": "github"
175
+
}
176
+
},
177
+
"nixpkgs_2": {
178
+
"locked": {
179
+
"lastModified": 1759070547,
180
+
"narHash": "sha256-JVZl8NaVRYb0+381nl7LvPE+A774/dRpif01FKLrYFQ=",
181
+
"owner": "NixOS",
182
+
"repo": "nixpkgs",
183
+
"rev": "647e5c14cbd5067f44ac86b74f014962df460840",
184
+
"type": "github"
185
+
},
186
+
"original": {
187
+
"owner": "NixOS",
188
+
"ref": "nixpkgs-unstable",
189
+
"repo": "nixpkgs",
190
+
"type": "github"
191
+
}
192
+
},
193
+
"nixpkgs_3": {
194
+
"locked": {
195
+
"lastModified": 1760284886,
196
+
"narHash": "sha256-TK9Kr0BYBQ/1P5kAsnNQhmWWKgmZXwUQr4ZMjCzWf2c=",
197
+
"owner": "NixOS",
198
+
"repo": "nixpkgs",
199
+
"rev": "cf3f5c4def3c7b5f1fc012b3d839575dbe552d43",
200
+
"type": "github"
201
+
},
202
+
"original": {
203
+
"owner": "NixOS",
204
+
"ref": "nixos-unstable",
205
+
"repo": "nixpkgs",
206
+
"type": "github"
207
+
}
208
+
},
209
+
"nixpkgs_4": {
210
+
"locked": {
211
+
"lastModified": 1754340878,
212
+
"narHash": "sha256-lgmUyVQL9tSnvvIvBp7x1euhkkCho7n3TMzgjdvgPoU=",
213
+
"owner": "nixos",
214
+
"repo": "nixpkgs",
215
+
"rev": "cab778239e705082fe97bb4990e0d24c50924c04",
216
+
"type": "github"
217
+
},
218
+
"original": {
219
+
"owner": "nixos",
220
+
"ref": "nixpkgs-unstable",
221
+
"repo": "nixpkgs",
222
+
"type": "github"
223
+
}
224
+
},
225
+
"nixpkgs_current_stable": {
226
+
"locked": {
227
+
"lastModified": 1760139962,
228
+
"narHash": "sha256-4xggC56Rub3WInz5eD7EZWXuLXpNvJiUPahGtMkwtuc=",
229
+
"owner": "NixOS",
230
+
"repo": "nixpkgs",
231
+
"rev": "7e297ddff44a3cc93673bb38d0374df8d0ad73e4",
232
+
"type": "github"
233
+
},
234
+
"original": {
235
+
"owner": "NixOS",
236
+
"ref": "nixos-25.05",
237
+
"repo": "nixpkgs",
238
+
"type": "github"
239
+
}
240
+
},
241
+
"root": {
242
+
"inputs": {
243
+
"nixpkgs": "nixpkgs",
244
+
"systems": "systems",
245
+
"wire": "wire"
246
+
}
247
+
},
248
+
"rust-analyzer-src": {
249
+
"flake": false,
250
+
"locked": {
251
+
"lastModified": 1760260966,
252
+
"narHash": "sha256-pOVvZz/aa+laeaUKyE6PtBevdo4rywMwjhWdSZE/O1c=",
253
+
"owner": "rust-lang",
254
+
"repo": "rust-analyzer",
255
+
"rev": "c5181dbbe33af6f21b9d83e02fdb6fda298a3b65",
256
+
"type": "github"
257
+
},
258
+
"original": {
259
+
"owner": "rust-lang",
260
+
"ref": "nightly",
261
+
"repo": "rust-analyzer",
262
+
"type": "github"
263
+
}
264
+
},
265
+
"systems": {
266
+
"locked": {
267
+
"lastModified": 1681028828,
268
+
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
269
+
"owner": "nix-systems",
270
+
"repo": "default",
271
+
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
272
+
"type": "github"
273
+
},
274
+
"original": {
275
+
"owner": "nix-systems",
276
+
"repo": "default",
277
+
"type": "github"
278
+
}
279
+
},
280
+
"systems_2": {
281
+
"locked": {
282
+
"lastModified": 1681028828,
283
+
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
284
+
"owner": "nix-systems",
285
+
"repo": "default",
286
+
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
287
+
"type": "github"
288
+
},
289
+
"original": {
290
+
"owner": "nix-systems",
291
+
"repo": "default",
292
+
"type": "github"
293
+
}
294
+
},
295
+
"treefmt-nix": {
296
+
"inputs": {
297
+
"nixpkgs": "nixpkgs_4"
298
+
},
299
+
"locked": {
300
+
"lastModified": 1760120816,
301
+
"narHash": "sha256-gq9rdocpmRZCwLS5vsHozwB6b5nrOBDNc2kkEaTXHfg=",
302
+
"owner": "numtide",
303
+
"repo": "treefmt-nix",
304
+
"rev": "761ae7aff00907b607125b2f57338b74177697ed",
305
+
"type": "github"
306
+
},
307
+
"original": {
308
+
"owner": "numtide",
309
+
"repo": "treefmt-nix",
310
+
"type": "github"
311
+
}
312
+
},
313
+
"wire": {
314
+
"inputs": {
315
+
"crane": "crane",
316
+
"fenix": "fenix",
317
+
"flake-compat": "flake-compat",
318
+
"flake-parts": "flake-parts",
319
+
"git-hooks": "git-hooks",
320
+
"linux-systems": "linux-systems",
321
+
"nixpkgs": "nixpkgs_3",
322
+
"nixpkgs_current_stable": "nixpkgs_current_stable",
323
+
"systems": "systems_2",
324
+
"treefmt-nix": "treefmt-nix"
325
+
},
326
+
"locked": {
327
+
"lastModified": 1758104393,
328
+
"narHash": "sha256-ddXhp8hDFMKUiVf+V5Q71Ehfw8o1kGofxN9cAljyfOI=",
329
+
"owner": "mrshmllow",
330
+
"repo": "wire",
331
+
"rev": "6c99f40273aa16dc6603375dc2c867b9265a289a",
332
+
"type": "github"
333
+
},
334
+
"original": {
335
+
"owner": "mrshmllow",
336
+
"ref": "v0.5.0",
337
+
"repo": "wire",
338
+
"type": "github"
339
+
}
340
+
}
341
+
},
342
+
"root": "root",
343
+
"version": 7
344
+
}
+38
doc/snippets/guides/installation/flake.nix
+38
doc/snippets/guides/installation/flake.nix
···
1
+
{
2
+
inputs = {
3
+
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
4
+
wire.url = "github:forallsys/wire/stable";
5
+
6
+
# alternatively, you can use a tag instead:
7
+
# wire.url = "github:forallsys/wire/v1.1.1";
8
+
9
+
systems.url = "github:nix-systems/default";
10
+
};
11
+
12
+
outputs = {
13
+
nixpkgs,
14
+
wire,
15
+
systems,
16
+
...
17
+
}: let
18
+
forAllSystems = nixpkgs.lib.genAttrs (import systems);
19
+
in {
20
+
wire = wire.makeHive {
21
+
meta.nixpkgs = import nixpkgs {localSystem = "x86_64-linux";};
22
+
23
+
# Continue to next How-To guide to fill this section
24
+
};
25
+
26
+
devShells = forAllSystems (
27
+
system: let
28
+
pkgs = nixpkgs.legacyPackages.${system};
29
+
in {
30
+
default = pkgs.mkShell {
31
+
buildInputs = [
32
+
wire.packages.${system}.wire
33
+
];
34
+
};
35
+
}
36
+
);
37
+
};
38
+
}
+10
doc/snippets/guides/installation/hive.nix
+10
doc/snippets/guides/installation/hive.nix
+11
doc/snippets/guides/installation/shell.nix
+11
doc/snippets/guides/installation/shell.nix
+3
doc/snippets/tutorial/cache.conf
+3
doc/snippets/tutorial/cache.conf
+12
doc/snippets/tutorial/cache.nix
+12
doc/snippets/tutorial/cache.nix
+2
doc/snippets/tutorial/extra-experimental-features.conf
+2
doc/snippets/tutorial/extra-experimental-features.conf
+7
doc/snippets/tutorial/extra-experimental-features.nix
+7
doc/snippets/tutorial/extra-experimental-features.nix
+46
doc/tutorial/overview.md
+46
doc/tutorial/overview.md
···
1
+
---
2
+
comment: false
3
+
title: wire Tutorial Overview
4
+
description: In this tutorial we will create and deploy a wire Hive.
5
+
---
6
+
7
+
# wire Tutorial Overview
8
+
9
+
wire is a tool to deploy NixOS systems. Its usage is inspired by [colmena](https://colmena.cli.rs/). In many places it's configuration attempts to remain a superset[^1] of colmena, however it is **not** a fork.
10
+
11
+
[^1]: A lot of your colmena module options will continue to work with wire, but wire has additional ergonomic changes you can take advantage of.
12
+
13
+
---
14
+
15
+
In this tutorial we will create and deploy a wire Hive. Along the way we will
16
+
encounter [npins](https://github.com/andir/npins), simple NixOS
17
+
configurations, virtual machines, and deployment keys.
18
+
19
+
You'll need at least 10~ GB of free disk space to complete this tutorial.
20
+
21
+
<div class="tip custom-block" style="padding-top: 8px">
22
+
23
+
Ready? Skip to [Nix Setup](./part-one/nix-setup).
24
+
25
+
</div>
26
+
27
+
## What features does wire have?
28
+
29
+
| Features | wire | Colmena |
30
+
| ------------------------------------------------------------- | ------------------ | ------------------ |
31
+
| [Node Tagging](/guides/targeting.html#tag-basics) | :white_check_mark: | :white_check_mark: |
32
+
| [Secret Management](/guides/keys.html) | :white_check_mark: | :white_check_mark: |
33
+
| [Parallel Deployment](/guides/parallelism.html) | :white_check_mark: | :white_check_mark: |
34
+
| Remote Builds | :white_check_mark: | :white_check_mark: |
35
+
| [Key Services](/guides/keys.html#using-keys-with-services) | :white_check_mark: | :white_check_mark: |
36
+
| [Pipeline Support](/guides/targeting.html#reading-from-stdin) | :white_check_mark: | :x:[^2] |
37
+
| [Non-Root Deployments](/guides/non-root-user) | :white_check_mark: | :x:[^3] |
38
+
| `--path` accepts flakerefs | :white_check_mark: | :x: |
39
+
| REPL & Eval expressions | :x: | :white_check_mark: |
40
+
| Adhoc remote command execution[^4] | :x: | :white_check_mark: |
41
+
42
+
[^2]: You need to write custom nix code to use Colmena hive metadata inside environments like CI pipelines, bash scripting, etc., which requires a knowledge of its internals. Recently it agained the [eval feature](https://colmena.cli.rs/unstable/features/eval.html) which has improved the situation since wire was first started.
43
+
44
+
[^3]: See https://github.com/zhaofengli/colmena/issues/120
45
+
46
+
[^4]: wire lacks an equivalent to `colmena exec`.
+188
doc/tutorial/part-one/basic-hive.md
+188
doc/tutorial/part-one/basic-hive.md
···
1
+
---
2
+
comment: true
3
+
title: Basic Hive & Deployment
4
+
description: Creating a basic hive and deploying changes to the virtual machine.
5
+
---
6
+
7
+
# Basic Hive & Deployment
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Editing `hive.nix`
12
+
13
+
Open a text editor and edit `hive.nix`. You should copy this example, which imports
14
+
the npins sources we added. It also calls `makeHive`, and gives wire `nixpkgs`
15
+
from npins as well.
16
+
17
+
```nix:line-numbers [hive.nix]
18
+
let
19
+
# import npins sources
20
+
sources = import ./npins;
21
+
# import `wire` from npins sources
22
+
wire = import sources.wire;
23
+
in
24
+
wire.makeHive {
25
+
# give wire nixpkgs from npins
26
+
meta.nixpkgs = import sources.nixpkgs { };
27
+
28
+
# we'll edit this part
29
+
}
30
+
```
31
+
32
+
Lets check out what wire sees with `wire show`.
33
+
34
+
```sh
35
+
[nix-shell]$ wire show
36
+
INFO eval_hive: evaluating hive HiveNix("/home/marsh/scratch/wire-tutorial/hive.nix")
37
+
WARN use --json to output something scripting suitable
38
+
Summary: 0 total node(s), totalling 0 keys (0 distinct).
39
+
Note: Listed connections are tried from Left to Right
40
+
41
+
```
42
+
43
+
The line `nodes: {}` means there is no "nodes" in our hive.
44
+
45
+
## Adding The First Node
46
+
47
+
Lets add the virtual machine as a node to the hive with the name
48
+
`virtual-machine`. Additionally, we will add `deployment.target`, recalling we
49
+
forwarded sshd `virtual-machine:22` to the port `localhost:2222`:
50
+
51
+
```nix:line-numbers [hive.nix]
52
+
let
53
+
sources = import ./npins;
54
+
wire = import sources.wire;
55
+
in
56
+
wire.makeHive {
57
+
meta.nixpkgs = import sources.nixpkgs { };
58
+
59
+
virtual-machine = { pkgs, ... }: { # [!code ++]
60
+
deployment.target = { # [!code ++]
61
+
port = 2222; # [!code ++]
62
+
hosts = [ "localhost" ]; # [!code ++]
63
+
}; # [!code ++]
64
+
65
+
nixpkgs.hostPlatform = "x86_64-linux"; # [!code ++]
66
+
}; # [!code ++]
67
+
}
68
+
```
69
+
70
+
## A naive `wire apply`
71
+
72
+
If we tried to run `wire apply` on our hive at this stage, it likely won't work.
73
+
If you've used NixOS before, you'll notice that many important options are
74
+
missing. But let's try anyway:
75
+
76
+
```sh
77
+
[nix-shell]$ wire apply
78
+
ERROR apply{goal=Switch on=}:goal{node=virtual-machine}: lib::hive::node: Failed to execute `Evaluate the node`
79
+
Error: ร 1 node(s) failed to apply.
80
+
81
+
Error:
82
+
ร node virtual-machine failed to apply
83
+
โโโถ wire::Evaluate
84
+
โ
85
+
โ ร failed to evaluate `--file /home/marsh/scratch/wire-tutorial/hive.nix topLevels.virtual-machine` from the context
86
+
โ โ of a hive.
87
+
โ
88
+
โฐโโถ nix --extra-experimental-features nix-command --extra-experimental-features flakes eval --json --file /home/marsh/scratch/
89
+
wire-tutorial/hive.nix topLevels.virtual-machine --log-format internal-json failed (reason: known-status) with code 1 (last 20
90
+
lines):
91
+
error:
92
+
โฆ while evaluating '(evaluateNode node).config.system.build.toplevel' to select 'drvPath' on it
93
+
at /nix/store/5pfz0v479gnciac17rcqi2gwyz8pl4s0-source/runtime/evaluate.nix:65:23:
94
+
64|
95
+
65| getTopLevel = node: (evaluateNode node).config.system.build.toplevel.drvPath;
96
+
| ^
97
+
66| in
98
+
99
+
โฆ while calling the 'head' builtin
100
+
at /nix/store/n3d1ricw0cb5jd8vvfym6ig0mw7x7sv9-source/lib/attrsets.nix:1701:13:
101
+
1700| if length values == 1 || pred here (elemAt values 1) (head values) then
102
+
1701| head values
103
+
| ^
104
+
1702| else
105
+
106
+
(stack trace truncated; use '--show-trace' to show the full trace)
107
+
108
+
error:
109
+
Failed assertions:
110
+
- The โfileSystemsโ option does not specify your root file system.
111
+
- You must set the option โboot.loader.grub.devicesโ or 'boot.loader.grub.mirroredBoots' to make the system bootable.
112
+
trace: evaluation warning: system.stateVersion is not set, defaulting to 25.11. Read why this matters on https://nixos.org/
113
+
manual/nixos/stable/options.html#opt-system.stateVersion.
114
+
115
+
```
116
+
117
+
The command complained about not defining any fileSystems or a boot loader.
118
+
The `${sources.nixpkgs}/nixos/modules/virtualisation/qemu-vm.nix` imported in
119
+
`vm.nix` does
120
+
extra work to make our virtual machine work, which we are currently missing.
121
+
122
+
## Importing `vm.nix`
123
+
124
+
Lets import our `vm.nix` to this hive to fix our evaluation errors.
125
+
Additionally, add a new package such as `vim` to our configuration:
126
+
127
+
```nix:line-numbers [hive.nix]
128
+
let
129
+
sources = import ./npins;
130
+
wire = import sources.wire;
131
+
in
132
+
wire.makeHive {
133
+
meta.nixpkgs = import sources.nixpkgs { };
134
+
135
+
virtual-machine = { pkgs, ... }: {
136
+
deployment.target = {
137
+
port = 2222;
138
+
hosts = [ "localhost" ];
139
+
};
140
+
141
+
imports = [ # [!code ++]
142
+
./vm.nix # [!code ++]
143
+
]; # [!code ++]
144
+
145
+
environment.systemPackages = [ pkgs.vim ]; # [!code ++]
146
+
147
+
nixpkgs.hostPlatform = "x86_64-linux";
148
+
};
149
+
}
150
+
```
151
+
152
+
## Our first deploy
153
+
154
+
Trying our basic `wire apply` again with these changes:
155
+
156
+
```sh
157
+
[nix-shell]$ wire apply
158
+
...
159
+
INFO lib::nix_log: stopping the following units: boot.mount
160
+
INFO lib::nix_log: NOT restarting the following changed units: systemd-fsck@dev-disk-by\x2dlabel-ESP.service
161
+
INFO lib::nix_log: activating the configuration...
162
+
INFO lib::nix_log: setting up /etc...
163
+
INFO lib::nix_log: restarting systemd...
164
+
INFO lib::nix_log: reloading user units for root...
165
+
INFO lib::nix_log: restarting sysinit-reactivation.target
166
+
INFO lib::nix_log: reloading the following units: dbus.service
167
+
INFO lib::nix_log: the following new units were started: boot.automount, sysinit-reactivation.target, systemd-tmpfiles-resetup.service
168
+
INFO apply{goal=Switch on=}:goal{node=virtual-machines}: lib::hive::node: Executing step `Upload key @ PostActivation`
169
+
INFO apply{goal=Switch on=}: wire::apply: Successfully applied goal to 1 node(s): [Name("virtual-machines")]
170
+
```
171
+
172
+
Now, lets confirm these changes were applied to the virtual machine by executing
173
+
`vim` in the virtual machine window:
174
+
175
+
```sh [Virtual Machine]
176
+
[root@wire-tutorial:~]# vim --version
177
+
VIM - Vi IMproved 9.1 (2024 Jan 02, compiled Jan 01 1980 00:00:00)
178
+
```
179
+
180
+
Nice! You successfully deployed a new NixOS configuration to a **remote host**!
181
+
182
+
::: info
183
+
This followed common steps of adding the node's `deployment.target` details and
184
+
importing it's pre-existing NixOS configuration (in this case, `vm.nix`), a
185
+
pattern you'll be using a lot if you chose to adopt wire.
186
+
:::
187
+
188
+
In the next section, we'll cover how to deploy secrets / keys to our remote node.
+40
doc/tutorial/part-one/nix-setup.md
+40
doc/tutorial/part-one/nix-setup.md
···
1
+
---
2
+
comment: true
3
+
title: Nix Setup
4
+
description: Installing npins, nix, and enabling the binary cache.
5
+
---
6
+
7
+
# Nix Setup
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
::: warning
12
+
This page is for the purposes for the **Tutorial**.
13
+
You should read [How-to Guides - Install wire](/guides/installation.html) for installing wire for
14
+
regular use.
15
+
:::
16
+
17
+
## Nix Installation
18
+
19
+
You should install nix if you do not have it on your system already.
20
+
There are detailed steps to installing Nix on [nix.dev](https://nix.dev/install-nix).
21
+
22
+
By the end of the installation, you should see something like this:
23
+
24
+
```sh
25
+
$ nix --version
26
+
nix (Nix) 2.11.0
27
+
```
28
+
29
+
## Binary Cache
30
+
31
+
Because wire can be heavy to compile, it is distributed with a [binary
32
+
cache](https://wiki.nixos.org/wiki/Binary_Cache).
33
+
34
+
You must enable the [garnix binary cache](https://garnix.io/docs/caching) or you
35
+
will be compiling everything from source:
36
+
37
+
::: code-group
38
+
<<< @/snippets/tutorial/cache.conf [nix.conf]
39
+
<<< @/snippets/tutorial/cache.nix [configuration.nix]
40
+
:::
+116
doc/tutorial/part-one/repo-setup.md
+116
doc/tutorial/part-one/repo-setup.md
···
1
+
---
2
+
comment: true
3
+
title: Preparing Repo & Shell
4
+
description: Adding npins sources and a nix development shell.
5
+
---
6
+
7
+
# Preparing Repo & Shell
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Initialising with Git & `npins`
12
+
13
+
First, lets create an adhoc shell to bring these two tools into our $PATH.
14
+
15
+
```sh
16
+
$ nix-shell -p git npins
17
+
[nix-shell]$ git --version
18
+
git version 2.51.0
19
+
[nix-shell]$ npins --version
20
+
npins 0.3.1
21
+
```
22
+
23
+
Great! Now lets use Git & `npins` to create a new Git repo and initialise it.
24
+
`npins init` may take a while to download `nixpkgs`.
25
+
26
+
```sh
27
+
[nix-shell]$ git init wire-tutorial
28
+
Initialized empty Git repository in /home/.../wire-tutorial/.git/
29
+
[nix-shell]$ cd wire-tutorial/
30
+
[nix-shell]$ npins init
31
+
[INFO ] Welcome to npins!
32
+
[INFO ] Creating `npins` directory
33
+
[INFO ] Writing default.nix
34
+
[INFO ] Writing initial lock file (empty)
35
+
[INFO ] Successfully written initial files to 'npins/sources.json'.
36
+
```
37
+
38
+
This has created a pinned version of `nixpkgs` for us to use in our wire hive.
39
+
40
+
## Adding wire as a dependency
41
+
42
+
We can now need to tell `npins` to use `forallsys/wire` as a dependency.
43
+
44
+
```sh
45
+
[nix-shell]$ npins add github forallsys wire --branch stable
46
+
[INFO ] Adding 'wire' โฆ
47
+
repository: https://github.com/forallsys/wire.git
48
+
pre_releases: false
49
+
submodules: false
50
+
version: v0.4.0
51
+
revision: f33d80c15b17c85d557d533441609a59a2210941
52
+
hash: 0wgah341hvjpvppkgwjrj50rvzf56ccmjz720xsl3mw38h9nn6sr
53
+
frozen: false
54
+
```
55
+
56
+
Great, now lets confirm the two dependencies we have added to this `npins`
57
+
project:
58
+
59
+
```sh
60
+
[nix-shell]$ npins show
61
+
nixpkgs: (git repository)
62
+
repository: https://github.com/pkpbynum/nixpkgs.git
63
+
branch: pb/disk-size-bootloader
64
+
submodules: false
65
+
revision: da2060bdc1c9bc35acc4eafa265ba6b6c64f9926
66
+
url: https://github.com/pkpbynum/nixpkgs/archive/da2060bdc1c9bc35acc4eafa265ba6b6c64f9926.tar.gz
67
+
hash: 0j07gvnm7c5mzw1313asa8limzbmsbnsd02dcw22ing8fg3vbb7g
68
+
frozen: false
69
+
70
+
wire: (git release tag)
71
+
repository: https://github.com/forallsys/wire.git
72
+
pre_releases: false
73
+
submodules: false
74
+
version: v0.4.0
75
+
revision: f33d80c15b17c85d557d533441609a59a2210941
76
+
hash: 0wgah341hvjpvppkgwjrj50rvzf56ccmjz720xsl3mw38h9nn6sr
77
+
frozen: false
78
+
```
79
+
80
+
## Creating a `shell.nix`
81
+
82
+
Open a text editor to edit `shell.nix` in the `wire-tutorial` directory.
83
+
84
+
```nix:line-numbers [shell.nix]
85
+
let
86
+
sources = import ./npins;
87
+
pkgs = import sources.nixpkgs { };
88
+
wire = import sources.wire;
89
+
in
90
+
pkgs.mkShell {
91
+
packages = [
92
+
wire.packages.x86_64-linux.wire-small
93
+
pkgs.npins
94
+
pkgs.git
95
+
];
96
+
97
+
shellHook = ''
98
+
export NIX_PATH="nixpkgs=${sources.nixpkgs.outPath}"
99
+
'';
100
+
}
101
+
```
102
+
103
+
You should now `exit` to quit the old shell, and
104
+
enter a new shell with `nix-shell`. Since we added wire as a package, our new
105
+
shell should have wire in the $PATH:
106
+
107
+
```sh
108
+
[nix-shell]$ exit
109
+
exit
110
+
$ cd wire-tutorial/
111
+
$ nix-shell
112
+
[nix-shell]$ wire --version
113
+
wire 0.5.0
114
+
Debug: Hive::SCHEMA_VERSION 0
115
+
116
+
```
+161
doc/tutorial/part-one/vm-setup.md
+161
doc/tutorial/part-one/vm-setup.md
···
1
+
---
2
+
comment: true
3
+
title: Creating a Virtual Machine
4
+
description: Creating a NixOS virtual machine to use as a deployment target.
5
+
---
6
+
7
+
# Creating a Virtual Machine
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Creating a `vm.nix`
12
+
13
+
For this step, you'll need your ssh public key, which you can obtain from
14
+
`ssh-add -L`.
15
+
16
+
Open a text editor and edit `vm.nix`. Place in it this basic NixOS
17
+
virtual machine configuration, which enables openssh and forwards it's 22 port:
18
+
19
+
```nix:line-numbers [vm.nix]
20
+
let
21
+
sources = import ./npins;
22
+
in
23
+
{
24
+
imports = [ "${sources.nixpkgs}/nixos/modules/virtualisation/qemu-vm.nix" ];
25
+
26
+
networking.hostName = "wire-tutorial";
27
+
28
+
users.users.root = {
29
+
initialPassword = "root";
30
+
openssh.authorizedKeys.keys = [
31
+
# I made this a nix syntax error so you're forced to deal with it!
32
+
<your ssh public-key as a string>
33
+
];
34
+
};
35
+
36
+
boot = {
37
+
loader = {
38
+
systemd-boot.enable = true;
39
+
efi.canTouchEfiVariables = true;
40
+
};
41
+
42
+
kernelParams = [ "console=ttyS0" ];
43
+
44
+
boot.growPartition = true;
45
+
};
46
+
47
+
# enable openssh
48
+
services = {
49
+
openssh = {
50
+
enable = true;
51
+
settings.PermitRootLogin = "yes";
52
+
};
53
+
54
+
getty.autologinUser = "root";
55
+
};
56
+
57
+
virtualisation = {
58
+
graphics = false;
59
+
useBootLoader = true;
60
+
61
+
# use a 5gb disk
62
+
diskSize = 5 * 1024;
63
+
64
+
# grow the filesystem to fit the 5 gb we reserved
65
+
fileSystems."/".autoResize = true;
66
+
67
+
# forward `openssh` port 22 to localhost:2222.
68
+
forwardPorts = [
69
+
{
70
+
from = "host";
71
+
host.port = 2222;
72
+
guest.port = 22;
73
+
}
74
+
];
75
+
};
76
+
77
+
system.stateVersion = "23.11";
78
+
}
79
+
```
80
+
81
+
If you like, you may take a moment to understand each line of this
82
+
configuration.
83
+
84
+
## Building & Running the virtual machine
85
+
86
+
Open a separate Terminal tab/window/instance, ensuring you enter the development
87
+
shell with `nix-shell`.
88
+
Then, build the virtual machine with a bootloader,
89
+
taking our `vm.nix` as the nixos configuration.
90
+
91
+
```sh
92
+
$ nix-shell
93
+
[nix-shell]$ nix-build '<nixpkgs/nixos>' -A vmWithBootLoader -I nixos-config=./vm.nix
94
+
```
95
+
96
+
::: tip HELP
97
+
98
+
If you got an error such as
99
+
100
+
```
101
+
error: The option `...' in `...' is already declared in `...'.
102
+
```
103
+
104
+
make sure you ran the above command in the `nix-shell`!
105
+
106
+
:::
107
+
108
+
Building the virtual machine can take some time, but once it completes, start it
109
+
by running:
110
+
111
+
```sh
112
+
[nix-shell]$ ./result/bin/run-wire-tutorial-vm
113
+
```
114
+
115
+
You will see boot-up logs fly across the screen and eventually you will be placed
116
+
into shell inside the virtual machine.
117
+
118
+
```sh [Virtual Machine]
119
+
running activation script...
120
+
setting up /etc...
121
+
122
+
Welcome to NixOS 25.11 (Xantusia)!
123
+
124
+
[ OK ] Created slice Slice /system/getty.
125
+
[ OK ] Created slice Slice /system/modprobe.
126
+
...
127
+
<<< Welcome to NixOS 25.11pre861972.88cef159e47c (x86_64) - hvc0 >>>
128
+
129
+
Run 'nixos-help' for the NixOS manual.
130
+
131
+
wire-tutorial login: root (automatic login)
132
+
133
+
[root@wire-tutorial:~]#
134
+
135
+
```
136
+
137
+
::: details
138
+
Further details on how the above commands work can be found at
139
+
[nix.dev](https://nix.dev/tutorials/nixos/nixos-configuration-on-vm.html#creating-a-qemu-based-virtual-machine-from-a-nixos-configuration)
140
+
:::
141
+
142
+
## Summary
143
+
144
+
Congratulations, you created a virtual machine in your terminal.
145
+
We'll be deploying to this virtual machine, so keep the
146
+
terminal instance open.
147
+
148
+
::: info
149
+
From now on, commands ran inside the virtual machine will be lead with the
150
+
following prompt:
151
+
152
+
```sh [Virtual Machine]
153
+
[root@wire-tutorial:~]#
154
+
155
+
```
156
+
157
+
:::
158
+
159
+
::: tip
160
+
If you ever want to quit the virtual machine, run the command `poweroff`.
161
+
:::
+142
doc/tutorial/part-two/basic-keys.md
+142
doc/tutorial/part-two/basic-keys.md
···
1
+
---
2
+
comment: true
3
+
title: Deployment Keys Basics
4
+
description: Deploy some basic secrets with wire tool.
5
+
---
6
+
7
+
# Deployment Keys Basics
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Creating a `secrets.nix`
12
+
13
+
Lets create a NixOS module that will contain our secret keys, and import it:
14
+
15
+
```nix:line-numbers [hive.nix]
16
+
let
17
+
sources = import ./npins;
18
+
wire = import sources.wire;
19
+
in
20
+
wire.makeHive {
21
+
meta.nixpkgs = import sources.nixpkgs { };
22
+
23
+
virtual-machine = {
24
+
deployment.target = {
25
+
port = 2222;
26
+
hosts = [ "localhost" ];
27
+
};
28
+
29
+
imports = [
30
+
./vm.nix
31
+
./secrets.nix # [!code ++]
32
+
];
33
+
34
+
environment.systemPackages = [ pkgs.vim ];
35
+
36
+
nixpkgs.hostPlatform = "x86_64-linux";
37
+
};
38
+
}
39
+
```
40
+
41
+
```nix:line-numbers [secrets.nix]
42
+
{
43
+
deployment.keys = {
44
+
# the key's unique name is `"basic.txt"`.
45
+
"basic.txt" = {
46
+
# In this key's case, the source is a literal string:
47
+
source = ''
48
+
Hello World
49
+
'';
50
+
};
51
+
};
52
+
}
53
+
```
54
+
55
+
::: details
56
+
Further details on the `deployment.keys` options can be found
57
+
[in the reference](/reference/module.html#deployment-keys)
58
+
:::
59
+
60
+
Once we deploy this new configuration to the virtul machine,
61
+
`/run/keys/basic.txt` will be created with the contents of the key.
62
+
63
+
```sh
64
+
[nix-shell]$ wire apply keys
65
+
WARN lib::nix_log: Store URL: ssh://root@localhost
66
+
(root@localhost) Password:
67
+
68
+
```
69
+
70
+
```sh [Virtual Machine]
71
+
[root@wire-tutorial:~]# cat /run/keys/basic.txt
72
+
Hello World
73
+
74
+
```
75
+
76
+
You successfully deployed your first, albeit not-so-secret, secret key! Let's
77
+
move on from literal-text keys and use something a bit more powerful.
78
+
79
+
## File-sourced keys <Badge type="info">Optional</Badge>
80
+
81
+
This section is optional to try, but you can also pass `deployment.keys.<name>.source`
82
+
a file path. It's contents is read and treated as literal text.
83
+
84
+
```sh
85
+
$ echo hello world > very-important-secret.txt
86
+
```
87
+
88
+
```nix:line-numbers [secrets.nix]
89
+
{
90
+
deployment.keys = {
91
+
# ...
92
+
93
+
"very-important-secret.txt" = { # [!code ++]
94
+
source = ./very-important-secret.txt; # [!code ++]
95
+
}; # [!code ++]
96
+
};
97
+
}
98
+
```
99
+
100
+
```sh [Virtual Machine]
101
+
[root@wire-tutorial:~]# cat /run/keys/very-important-secret.txt
102
+
hello world
103
+
104
+
```
105
+
106
+
## Command-sourced keys
107
+
108
+
Command-sourced keys are where the real power of wire keys lie. By passing a
109
+
list of strings, wire will execute them as a command and create a key out of it's `stdout`.
110
+
111
+
Because the command's output is never written to the nix store, these can be
112
+
considered real secrets.
113
+
114
+
To create a basic example, update your `secrets.nix` to include a secret that
115
+
echos "hello world":
116
+
117
+
```nix:line-numbers [secrets.nix]
118
+
{
119
+
deployment.keys = {
120
+
# ...
121
+
122
+
"command.txt" = { # [!code ++]
123
+
source = [ # [!code ++]
124
+
"echo" # [!code ++]
125
+
"hello world" # [!code ++]
126
+
]; # [!code ++]
127
+
}; # [!code ++]
128
+
};
129
+
}
130
+
```
131
+
132
+
After a quick `wire deploy secrets`, the `/run/keys/command.txt` file is
133
+
created:
134
+
135
+
```sh [Virtual Machine]
136
+
[root@wire-tutorial:~]# cat /run/keys/command.txt
137
+
hello world
138
+
139
+
```
140
+
141
+
Hopefully you can see the potential of command-sourced keys, as these are the
142
+
basic building block of how we achieve encrypted secrets with wire.
+107
doc/tutorial/part-two/encryption.md
+107
doc/tutorial/part-two/encryption.md
···
1
+
---
2
+
comment: true
3
+
title: Deployment Keys Basics
4
+
description: Deploy a age-encrypted secret with wire tool.
5
+
---
6
+
7
+
# Deployment Keys Basics
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
::: tip
12
+
For this tutorial we will be using [`age`](https://github.com/FiloSottile/age),
13
+
but other encryption CLI tools work just as well such as GnuPG.
14
+
:::
15
+
16
+
## Installing age
17
+
18
+
Alter your shell.nix to include age:
19
+
20
+
```nix:line-numbers [shell.nix]
21
+
let
22
+
sources = import ./npins;
23
+
pkgs = import sources.nixpkgs { };
24
+
wire = import sources.wire;
25
+
in
26
+
pkgs.mkShell {
27
+
packages = [
28
+
wire.packages.x86_64-linux.wire-small
29
+
pkgs.npins
30
+
pkgs.git
31
+
pkgs.age # [!code ++]
32
+
];
33
+
34
+
shellHook = ''
35
+
export NIX_PATH="nixpkgs=${sources.nixpkgs.outPath}"
36
+
'';
37
+
}
38
+
```
39
+
40
+
Quit and re-open your shell, and confirm age is now available:
41
+
42
+
```sh
43
+
[nix-shell]$ exit
44
+
exit
45
+
$ nix-shell
46
+
[nix-shell]$ age --version
47
+
1.2.1
48
+
49
+
```
50
+
51
+
## Encrypting a secret
52
+
53
+
First create an age private key:
54
+
55
+
```sh
56
+
[nix-shell]$ age-keygen -o key.txt
57
+
Public key: age1j08s3kmr8zw4w8k99vs4nut5mg03dm8nfuaajuekdyzlujxply5qwsv4g0
58
+
59
+
```
60
+
61
+
::: details
62
+
Further details on how age works can be found on in the
63
+
[age manual](https://man.archlinux.org/man/age.1.en.txt).
64
+
:::
65
+
66
+
Now, lets encrypt the words `"!! encrypted string !!"` with age and save it to the
67
+
file `top-secret.age`.
68
+
69
+
We will use a pipeline to echo the encrypted string into
70
+
age, and use `age-keygent -y` to give age the public key we generated, then we
71
+
use the redirection operator to save the encrypted data to `top-secret.age`.
72
+
73
+
```sh
74
+
[nix-shell]$ echo "encrypted string!" | age --encrypt --recipient $(age-keygen -y key.txt) > top-secret.age
75
+
```
76
+
77
+
## Adding an age-encrypted key
78
+
79
+
Now, lets combine our previous command-sourced key with `age`. Pass the
80
+
arguments `age --decrypt --identity key.txt ./top-secret.age` to wire:
81
+
82
+
```nix:line-numbers [secrets.nix]
83
+
{
84
+
deployment.keys = {
85
+
# ...
86
+
87
+
"top-secret" = { # [!code ++]
88
+
source = [ # [!code ++]
89
+
"age" # [!code ++]
90
+
"--decrypt" # [!code ++]
91
+
"--identity" # [!code ++]
92
+
"key.txt" # [!code ++]
93
+
"${./top-secret.age}" # [!code ++]
94
+
]; # [!code ++]
95
+
}; # [!code ++]
96
+
};
97
+
}
98
+
```
99
+
100
+
One `wire apply keys` later, and you have successfully deployed an encrypted
101
+
key:
102
+
103
+
```sh [Virtual Machine]
104
+
[root@wire-tutorial:~]# cat /run/keys/top-secret
105
+
encrypted string!
106
+
107
+
```
+14
doc/upgrade.sh
+14
doc/upgrade.sh
···
1
+
#!/usr/bin/env bash
2
+
3
+
set -e -x
4
+
5
+
pushd doc/
6
+
7
+
# https://datatracker.ietf.org/doc/html/rfc4648#section-4
8
+
sed -i -r -e 's|sha256-[a-zA-Z0-9+/]+=|sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=|g' package.nix
9
+
10
+
NEW="$(nix build .#docs 2>&1 | sed -n -r 's/got:\s+(sha256-[a-zA-Z0-9+\\/]+=)/\1/p' | awk '{$1=$1;print}')"
11
+
12
+
echo "new: $NEW"
13
+
14
+
sed -i -r -e "s|sha256-[a-zA-Z0-9+/]+=|$NEW|g" package.nix
+131
-47
flake.lock
+131
-47
flake.lock
···
1
1
{
2
2
"nodes": {
3
+
"colmena_benchmarking": {
4
+
"inputs": {
5
+
"flake-compat": "flake-compat",
6
+
"flake-utils": "flake-utils",
7
+
"nixpkgs": "nixpkgs",
8
+
"stable": "stable"
9
+
},
10
+
"locked": {
11
+
"lastModified": 1684127108,
12
+
"narHash": "sha256-01bfuSY4gnshhtqA1EJCw2CMsKkAx+dHS+sEpQ2+EAQ=",
13
+
"owner": "zhaofengli",
14
+
"repo": "colmena",
15
+
"rev": "5fdd743a11e7291bd8ac1e169d62ba6156c99be4",
16
+
"type": "github"
17
+
},
18
+
"original": {
19
+
"owner": "zhaofengli",
20
+
"ref": "v0.4.0",
21
+
"repo": "colmena",
22
+
"type": "github"
23
+
}
24
+
},
3
25
"crane": {
4
26
"locked": {
5
-
"lastModified": 1748970125,
6
-
"narHash": "sha256-UDyigbDGv8fvs9aS95yzFfOKkEjx1LO3PL3DsKopohA=",
27
+
"lastModified": 1763938834,
28
+
"narHash": "sha256-j8iB0Yr4zAvQLueCZ5abxfk6fnG/SJ5JnGUziETjwfg=",
7
29
"owner": "ipetkov",
8
30
"repo": "crane",
9
-
"rev": "323b5746d89e04b22554b061522dfce9e4c49b18",
31
+
"rev": "d9e753122e51cee64eb8d2dddfe11148f339f5a2",
10
32
"type": "github"
11
33
},
12
34
"original": {
···
23
45
"rust-analyzer-src": "rust-analyzer-src"
24
46
},
25
47
"locked": {
26
-
"lastModified": 1750142293,
27
-
"narHash": "sha256-D2IwLkYYgsaXu8asJdGoNGhYkRgmW7fvxi4BmVUrkys=",
48
+
"lastModified": 1764571808,
49
+
"narHash": "sha256-+oo9W5rz03TjfpNqDSLEQwgKiuBbjrHdORyTHli2RuM=",
28
50
"owner": "nix-community",
29
51
"repo": "fenix",
30
-
"rev": "a266cb2d1beda20f750fc8e484e57224c8671926",
52
+
"rev": "df3c2e78ec13418f85c1f26e77a50f865ec57d38",
31
53
"type": "github"
32
54
},
33
55
"original": {
···
37
59
}
38
60
},
39
61
"flake-compat": {
62
+
"flake": false,
40
63
"locked": {
41
-
"lastModified": 1747046372,
42
-
"narHash": "sha256-CIVLLkVgvHYbgI2UpXvIIBJ12HWgX+fjA8Xf8PUmqCY=",
64
+
"lastModified": 1650374568,
65
+
"narHash": "sha256-Z+s0J8/r907g149rllvwhb4pKi8Wam5ij0st8PwAh+E=",
43
66
"owner": "edolstra",
44
67
"repo": "flake-compat",
45
-
"rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885",
68
+
"rev": "b4a34015c698c7793d592d66adbab377907a2be8",
46
69
"type": "github"
47
70
},
48
71
"original": {
···
52
75
}
53
76
},
54
77
"flake-compat_2": {
78
+
"locked": {
79
+
"lastModified": 1751685974,
80
+
"narHash": "sha256-NKw96t+BgHIYzHUjkTK95FqYRVKB8DHpVhefWSz/kTw=",
81
+
"rev": "549f2762aebeff29a2e5ece7a7dc0f955281a1d1",
82
+
"type": "tarball",
83
+
"url": "https://git.lix.systems/api/v1/repos/lix-project/flake-compat/archive/549f2762aebeff29a2e5ece7a7dc0f955281a1d1.tar.gz?rev=549f2762aebeff29a2e5ece7a7dc0f955281a1d1"
84
+
},
85
+
"original": {
86
+
"type": "tarball",
87
+
"url": "https://git.lix.systems/lix-project/flake-compat/archive/main.tar.gz"
88
+
}
89
+
},
90
+
"flake-compat_3": {
55
91
"flake": false,
56
92
"locked": {
57
-
"lastModified": 1696426674,
58
-
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
93
+
"lastModified": 1761588595,
94
+
"narHash": "sha256-XKUZz9zewJNUj46b4AJdiRZJAvSZ0Dqj2BNfXvFlJC4=",
59
95
"owner": "edolstra",
60
96
"repo": "flake-compat",
61
-
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
97
+
"rev": "f387cd2afec9419c8ee37694406ca490c3f34ee5",
62
98
"type": "github"
63
99
},
64
100
"original": {
···
72
108
"nixpkgs-lib": "nixpkgs-lib"
73
109
},
74
110
"locked": {
75
-
"lastModified": 1749398372,
76
-
"narHash": "sha256-tYBdgS56eXYaWVW3fsnPQ/nFlgWi/Z2Ymhyu21zVM98=",
111
+
"lastModified": 1763759067,
112
+
"narHash": "sha256-LlLt2Jo/gMNYAwOgdRQBrsRoOz7BPRkzvNaI/fzXi2Q=",
77
113
"owner": "hercules-ci",
78
114
"repo": "flake-parts",
79
-
"rev": "9305fe4e5c2a6fcf5ba6a3ff155720fbe4076569",
115
+
"rev": "2cccadc7357c0ba201788ae99c4dfa90728ef5e0",
80
116
"type": "github"
81
117
},
82
118
"original": {
···
85
121
"type": "github"
86
122
}
87
123
},
124
+
"flake-utils": {
125
+
"locked": {
126
+
"lastModified": 1659877975,
127
+
"narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=",
128
+
"owner": "numtide",
129
+
"repo": "flake-utils",
130
+
"rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0",
131
+
"type": "github"
132
+
},
133
+
"original": {
134
+
"owner": "numtide",
135
+
"repo": "flake-utils",
136
+
"type": "github"
137
+
}
138
+
},
88
139
"git-hooks": {
89
140
"inputs": {
90
-
"flake-compat": "flake-compat_2",
141
+
"flake-compat": "flake-compat_3",
91
142
"gitignore": "gitignore",
92
-
"nixpkgs": "nixpkgs"
143
+
"nixpkgs": "nixpkgs_2"
93
144
},
94
145
"locked": {
95
-
"lastModified": 1749636823,
96
-
"narHash": "sha256-WUaIlOlPLyPgz9be7fqWJA5iG6rHcGRtLERSCfUDne4=",
146
+
"lastModified": 1763988335,
147
+
"narHash": "sha256-QlcnByMc8KBjpU37rbq5iP7Cp97HvjRP0ucfdh+M4Qc=",
97
148
"owner": "cachix",
98
149
"repo": "git-hooks.nix",
99
-
"rev": "623c56286de5a3193aa38891a6991b28f9bab056",
150
+
"rev": "50b9238891e388c9fdc6a5c49e49c42533a1b5ce",
100
151
"type": "github"
101
152
},
102
153
"original": {
···
143
194
},
144
195
"nixpkgs": {
145
196
"locked": {
146
-
"lastModified": 1730768919,
147
-
"narHash": "sha256-8AKquNnnSaJRXZxc5YmF/WfmxiHX6MMZZasRP6RRQkE=",
197
+
"lastModified": 1683408522,
198
+
"narHash": "sha256-9kcPh6Uxo17a3kK3XCHhcWiV1Yu1kYj22RHiymUhMkU=",
148
199
"owner": "NixOS",
149
200
"repo": "nixpkgs",
150
-
"rev": "a04d33c0c3f1a59a2c1cb0c6e34cd24500e5a1dc",
201
+
"rev": "897876e4c484f1e8f92009fd11b7d988a121a4e7",
151
202
"type": "github"
152
203
},
153
204
"original": {
154
205
"owner": "NixOS",
155
-
"ref": "nixpkgs-unstable",
206
+
"ref": "nixos-unstable",
156
207
"repo": "nixpkgs",
157
208
"type": "github"
158
209
}
159
210
},
160
211
"nixpkgs-lib": {
161
212
"locked": {
162
-
"lastModified": 1748740939,
163
-
"narHash": "sha256-rQaysilft1aVMwF14xIdGS3sj1yHlI6oKQNBRTF40cc=",
213
+
"lastModified": 1761765539,
214
+
"narHash": "sha256-b0yj6kfvO8ApcSE+QmA6mUfu8IYG6/uU28OFn4PaC8M=",
164
215
"owner": "nix-community",
165
216
"repo": "nixpkgs.lib",
166
-
"rev": "656a64127e9d791a334452c6b6606d17539476e2",
217
+
"rev": "719359f4562934ae99f5443f20aa06c2ffff91fc",
167
218
"type": "github"
168
219
},
169
220
"original": {
···
174
225
},
175
226
"nixpkgs_2": {
176
227
"locked": {
177
-
"lastModified": 1749794982,
178
-
"narHash": "sha256-Kh9K4taXbVuaLC0IL+9HcfvxsSUx8dPB5s5weJcc9pc=",
228
+
"lastModified": 1759417375,
229
+
"narHash": "sha256-O7eHcgkQXJNygY6AypkF9tFhsoDQjpNEojw3eFs73Ow=",
230
+
"owner": "NixOS",
231
+
"repo": "nixpkgs",
232
+
"rev": "dc704e6102e76aad573f63b74c742cd96f8f1e6c",
233
+
"type": "github"
234
+
},
235
+
"original": {
179
236
"owner": "NixOS",
237
+
"ref": "nixpkgs-unstable",
180
238
"repo": "nixpkgs",
181
-
"rev": "ee930f9755f58096ac6e8ca94a1887e0534e2d81",
239
+
"type": "github"
240
+
}
241
+
},
242
+
"nixpkgs_3": {
243
+
"locked": {
244
+
"lastModified": 1764517877,
245
+
"narHash": "sha256-pp3uT4hHijIC8JUK5MEqeAWmParJrgBVzHLNfJDZxg4=",
246
+
"owner": "NixOS",
247
+
"repo": "nixpkgs",
248
+
"rev": "2d293cbfa5a793b4c50d17c05ef9e385b90edf6c",
182
249
"type": "github"
183
250
},
184
251
"original": {
···
188
255
"type": "github"
189
256
}
190
257
},
191
-
"nixpkgs_3": {
258
+
"nixpkgs_4": {
192
259
"locked": {
193
-
"lastModified": 1747958103,
194
-
"narHash": "sha256-qmmFCrfBwSHoWw7cVK4Aj+fns+c54EBP8cGqp/yK410=",
260
+
"lastModified": 1761236834,
261
+
"narHash": "sha256-+pthv6hrL5VLW2UqPdISGuLiUZ6SnAXdd2DdUE+fV2Q=",
195
262
"owner": "nixos",
196
263
"repo": "nixpkgs",
197
-
"rev": "fe51d34885f7b5e3e7b59572796e1bcb427eccb1",
264
+
"rev": "d5faa84122bc0a1fd5d378492efce4e289f8eac1",
198
265
"type": "github"
199
266
},
200
267
"original": {
···
206
273
},
207
274
"nixpkgs_current_stable": {
208
275
"locked": {
209
-
"lastModified": 1749857119,
210
-
"narHash": "sha256-tG5xUn3hFaPpAHYIvr2F88b+ovcIO5k1HqajFy7ZFPM=",
276
+
"lastModified": 1764522689,
277
+
"narHash": "sha256-SqUuBFjhl/kpDiVaKLQBoD8TLD+/cTUzzgVFoaHrkqY=",
211
278
"owner": "NixOS",
212
279
"repo": "nixpkgs",
213
-
"rev": "5f4f306bea96741f1588ea4f450b2a2e29f42b98",
280
+
"rev": "8bb5646e0bed5dbd3ab08c7a7cc15b75ab4e1d0f",
214
281
"type": "github"
215
282
},
216
283
"original": {
217
284
"owner": "NixOS",
218
-
"ref": "nixos-25.05",
285
+
"ref": "nixos-25.11",
219
286
"repo": "nixpkgs",
220
287
"type": "github"
221
288
}
222
289
},
223
290
"root": {
224
291
"inputs": {
292
+
"colmena_benchmarking": "colmena_benchmarking",
225
293
"crane": "crane",
226
294
"fenix": "fenix",
227
-
"flake-compat": "flake-compat",
295
+
"flake-compat": "flake-compat_2",
228
296
"flake-parts": "flake-parts",
229
297
"git-hooks": "git-hooks",
230
298
"linux-systems": "linux-systems",
231
-
"nixpkgs": "nixpkgs_2",
299
+
"nixpkgs": "nixpkgs_3",
232
300
"nixpkgs_current_stable": "nixpkgs_current_stable",
233
301
"systems": "systems",
234
302
"treefmt-nix": "treefmt-nix"
···
237
305
"rust-analyzer-src": {
238
306
"flake": false,
239
307
"locked": {
240
-
"lastModified": 1750093996,
241
-
"narHash": "sha256-Nw/TcDo3OgsEgyZ651iCcTILGaQRxBfCdgI9pVOD6rk=",
308
+
"lastModified": 1764525349,
309
+
"narHash": "sha256-vR3vU9AwzMsBvjNeeG2inA5W/2MwseFk5NIIrLFEMHk=",
242
310
"owner": "rust-lang",
243
311
"repo": "rust-analyzer",
244
-
"rev": "2c25e436c717d5f6b264dbb9b8f459d65384a253",
312
+
"rev": "d646b23f000d099d845f999c2c1e05b15d9cdc78",
245
313
"type": "github"
246
314
},
247
315
"original": {
···
251
319
"type": "github"
252
320
}
253
321
},
322
+
"stable": {
323
+
"locked": {
324
+
"lastModified": 1669735802,
325
+
"narHash": "sha256-qtG/o/i5ZWZLmXw108N2aPiVsxOcidpHJYNkT45ry9Q=",
326
+
"owner": "NixOS",
327
+
"repo": "nixpkgs",
328
+
"rev": "731cc710aeebecbf45a258e977e8b68350549522",
329
+
"type": "github"
330
+
},
331
+
"original": {
332
+
"owner": "NixOS",
333
+
"ref": "nixos-22.11",
334
+
"repo": "nixpkgs",
335
+
"type": "github"
336
+
}
337
+
},
254
338
"systems": {
255
339
"locked": {
256
340
"lastModified": 1681028828,
···
268
352
},
269
353
"treefmt-nix": {
270
354
"inputs": {
271
-
"nixpkgs": "nixpkgs_3"
355
+
"nixpkgs": "nixpkgs_4"
272
356
},
273
357
"locked": {
274
-
"lastModified": 1749194973,
275
-
"narHash": "sha256-eEy8cuS0mZ2j/r/FE0/LYBSBcIs/MKOIVakwHVuqTfk=",
358
+
"lastModified": 1762938485,
359
+
"narHash": "sha256-AlEObg0syDl+Spi4LsZIBrjw+snSVU4T8MOeuZJUJjM=",
276
360
"owner": "numtide",
277
361
"repo": "treefmt-nix",
278
-
"rev": "a05be418a1af1198ca0f63facb13c985db4cb3c5",
362
+
"rev": "5b4ee75aeefd1e2d5a1cc43cf6ba65eba75e83e4",
279
363
"type": "github"
280
364
},
281
365
"original": {
+31
-6
flake.nix
+31
-6
flake.nix
···
1
1
{
2
2
inputs = {
3
3
flake-parts.url = "github:hercules-ci/flake-parts";
4
-
flake-compat.url = "github:edolstra/flake-compat";
4
+
flake-compat.url = "https://git.lix.systems/lix-project/flake-compat/archive/main.tar.gz";
5
5
git-hooks.url = "github:cachix/git-hooks.nix";
6
6
systems.url = "github:nix-systems/default";
7
7
crane.url = "github:ipetkov/crane";
···
14
14
linux-systems.url = "github:nix-systems/default-linux";
15
15
16
16
# testing inputs
17
-
nixpkgs_current_stable.url = "github:NixOS/nixpkgs/nixos-25.05";
17
+
nixpkgs_current_stable.url = "github:NixOS/nixpkgs/nixos-25.11";
18
+
19
+
# benchmarking
20
+
colmena_benchmarking.url = "github:zhaofengli/colmena/v0.4.0";
18
21
};
19
22
outputs =
20
23
{
···
33
36
./nix/hooks.nix # pre-commit hooks
34
37
./nix/utils.nix # utility functions
35
38
./nix/shells.nix
36
-
./nix/checks.nix
37
-
./wire/cli
38
-
./wire/key_agent
39
+
./nix/tests.nix
40
+
./crates/cli
41
+
./crates/key_agent
39
42
./doc
40
43
./tests/nix
44
+
./runtime
45
+
./bench/runner.nix
41
46
];
42
47
systems = import systems;
43
48
49
+
flake = {
50
+
makeHive = import ./runtime/makeHive.nix;
51
+
hydraJobs =
52
+
let
53
+
inherit (inputs.nixpkgs) lib;
54
+
in
55
+
{
56
+
packages = {
57
+
inherit (self.packages.x86_64-linux) docs;
58
+
}
59
+
// lib.genAttrs [ "x86_64-linux" "aarch64-linux" ] (system: {
60
+
inherit (self.packages.${system}) wire wire-small cargo-tests;
61
+
});
62
+
63
+
tests = lib.filterAttrs (n: _: (lib.hasPrefix "vm" n)) self.checks.x86_64-linux;
64
+
inherit (self) devShells;
65
+
};
66
+
};
67
+
44
68
perSystem =
45
69
{
46
70
pkgs,
···
62
86
# docs only
63
87
alejandra.enable = true;
64
88
rustfmt.enable = true;
65
-
just.enable = true;
66
89
prettier.enable = true;
67
90
protolint.enable = true;
68
91
taplo.enable = true;
92
+
ruff-format.enable = true;
69
93
};
70
94
settings.formatter = {
71
95
nixfmt.excludes = [ "doc/snippets/*.nix" ];
96
+
prettier.excludes = [ ".sqlx/*" ];
72
97
alejandra = {
73
98
includes = lib.mkForce [ "doc/snippets/*.nix" ];
74
99
};
+15
garnix.yaml
+15
garnix.yaml
···
1
+
builds:
2
+
- exclude: []
3
+
include:
4
+
- packages.x86_64-linux.docs
5
+
- packages.x86_64-linux.docs-unstable
6
+
- packages.*.wire
7
+
- packages.*.wire-small
8
+
branch: trunk
9
+
- exclude: []
10
+
include:
11
+
- packages.x86_64-linux.docs
12
+
- packages.x86_64-linux.docs-unstable
13
+
- packages.*.wire
14
+
- packages.*.wire-small
15
+
branch: stable
-7
justfile
-7
justfile
···
1
-
# vim: set ft=make :
2
-
3
-
build-dhat:
4
-
cargo build --profile profiling --features dhat-heap
5
-
@echo 'dhat binaries in target/profiling'
6
-
@echo 'Example:'
7
-
@echo 'WIRE_RUNTIME=/nix/store/...-runtime WIRE_KEY_AGENT=/nix/store/...-key_agent-0.1.0 PROJECT/target/profiling/wire apply ...'
-21
nix/checks.nix
-21
nix/checks.nix
···
1
-
{
2
-
perSystem =
3
-
{
4
-
craneLib,
5
-
commonArgs,
6
-
...
7
-
}:
8
-
{
9
-
checks.wire-nextest = craneLib.cargoNextest (
10
-
{
11
-
partitions = 2;
12
-
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
13
-
cargoNextestPartitionsExtraArgs = builtins.concatStringsSep " " [
14
-
"--no-tests pass"
15
-
];
16
-
17
-
}
18
-
// commonArgs
19
-
);
20
-
};
21
-
}
+27
nix/hooks.nix
+27
nix/hooks.nix
···
4
4
toolchain,
5
5
config,
6
6
lib,
7
+
pkgs,
7
8
...
8
9
}:
9
10
{
···
15
16
enable = true;
16
17
settings.edit = true;
17
18
};
19
+
zizmor.enable = true;
18
20
clippy = {
19
21
enable = true;
22
+
settings.extraArgs = "--tests";
20
23
packageOverrides = {
21
24
inherit (toolchain) cargo clippy;
22
25
};
23
26
};
27
+
ruff.enable = true;
24
28
cargo-check = {
25
29
enable = true;
26
30
package = toolchain.cargo;
···
29
33
enable = true;
30
34
name = "nix fmt";
31
35
entry = "${lib.getExe config.formatter} --no-cache";
36
+
};
37
+
ty = {
38
+
enable = true;
39
+
name = "ty check";
40
+
files = "\\.py$";
41
+
entry = lib.getExe (
42
+
pkgs.writeShellScriptBin "ty-check" ''
43
+
cd tests/nix
44
+
${lib.getExe pkgs.uv} run ty check
45
+
''
46
+
);
47
+
};
48
+
machete = {
49
+
enable = true;
50
+
name = "cargo-machete";
51
+
files = "\\.(rs|toml)$";
52
+
entry = lib.getExe pkgs.cargo-machete;
53
+
};
54
+
typos = {
55
+
enable = true;
56
+
settings = {
57
+
configPath = "typos.toml";
58
+
};
32
59
};
33
60
34
61
};
+6
-3
nix/shells.nix
+6
-3
nix/shells.nix
···
18
18
cfg.settings.enabledPackages
19
19
cfg.settings.package
20
20
21
-
pkgs.just
22
-
pkgs.cargo-nextest
23
21
pkgs.pnpm
22
+
pkgs.nodejs
23
+
pkgs.sqlx-cli
24
+
pkgs.sqlite
25
+
pkgs.turso
26
+
pkgs.zstd
27
+
pkgs.uv
24
28
];
25
29
26
30
PROTOC = lib.getExe pkgs.protobuf;
27
31
shellHook = builtins.concatStringsSep "\n" [
28
32
cfg.installationScript
29
33
''
30
-
export WIRE_RUNTIME=$(realpath ./runtime)
31
34
export WIRE_TEST_DIR=$(realpath ./tests/rust)
32
35
''
33
36
];
+38
nix/tests.nix
+38
nix/tests.nix
···
1
+
{
2
+
perSystem =
3
+
{
4
+
craneLib,
5
+
pkgs,
6
+
commonArgs,
7
+
...
8
+
}:
9
+
let
10
+
tests = craneLib.buildPackage (
11
+
{
12
+
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
13
+
doCheck = false;
14
+
15
+
doNotPostBuildInstallCargoBinaries = true;
16
+
17
+
buildPhase = ''
18
+
cargo test --no-run
19
+
'';
20
+
21
+
installPhaseCommand = ''
22
+
mkdir -p $out
23
+
cp $(ls target/debug/deps/{wire,lib,key_agent}-* | grep -v "\.d") $out
24
+
'';
25
+
}
26
+
// commonArgs
27
+
);
28
+
in
29
+
{
30
+
packages.cargo-tests = pkgs.writeShellScriptBin "run-tests" ''
31
+
set -e
32
+
for item in "${tests}"/*; do
33
+
echo "running $item"
34
+
"$item"
35
+
done
36
+
'';
37
+
};
38
+
}
+3
-2
nix/utils.nix
+3
-2
nix/utils.nix
···
18
18
src = toSource {
19
19
root = ../.;
20
20
fileset = unions [
21
-
../wire
21
+
../.cargo
22
+
../.sqlx
23
+
../crates
22
24
../Cargo.toml
23
25
../Cargo.lock
24
26
];
···
27
29
commonArgs = {
28
30
inherit src;
29
31
strictDeps = true;
30
-
WIRE_RUNTIME = ../runtime;
31
32
WIRE_TEST_DIR = ../tests/rust;
32
33
PROTOC = lib.getExe pkgs.protobuf;
33
34
};
+4
renovate.json
+4
renovate.json
+61
-27
runtime/evaluate.nix
+61
-27
runtime/evaluate.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{
2
5
hive,
3
-
path,
4
6
nixosConfigurations ? { },
5
-
nixpkgs ? null,
6
7
}:
7
8
let
8
-
module = import ./module.nix;
9
+
module = import ./module;
9
10
10
-
mergedHive =
11
-
{
12
-
meta = { };
11
+
mergedHive = {
12
+
meta = { };
13
13
14
-
defaults = { };
15
-
}
16
-
// hive
17
-
# Map nixosConfigurations into nodes
18
-
// (builtins.mapAttrs (name: value: {
19
-
imports =
20
-
value._module.args.modules
21
-
# Include any custom stuff within `colmena`
22
-
++ [ hive.${name} or { } ];
23
-
}) nixosConfigurations);
14
+
defaults = { };
15
+
}
16
+
// hive
17
+
# Map nixosConfigurations into nodes
18
+
// (builtins.mapAttrs (name: value: {
19
+
imports =
20
+
value._module.args.modules
21
+
# Include any custom stuff within `colmena`
22
+
++ [ hive.${name} or { } ];
23
+
}) nixosConfigurations);
24
24
25
25
nodeNames = builtins.filter (
26
26
name:
27
27
!builtins.elem name [
28
28
"meta"
29
29
"defaults"
30
+
"-"
30
31
]
31
32
) (builtins.filter (name: builtins.hasAttr name hive) (builtins.attrNames mergedHive));
32
33
33
-
resolvedNixpkgs =
34
+
resolveNixpkgs =
35
+
value: help:
36
+
# support `<nixpkgs>`
37
+
if builtins.isPath value then
38
+
import value { }
39
+
# support npins sources passed directly
40
+
else if value ? "outPath" then
41
+
import value { }
42
+
# support `import <nixpkgs> { }`
43
+
else if builtins.isAttrs value then
44
+
value
45
+
else
46
+
builtins.abort "${help} was not a path, { outPath, .. }, or attrset. Was type: ${builtins.typeOf value}";
47
+
48
+
hiveGlobalNixpkgs =
34
49
if mergedHive.meta ? "nixpkgs" then
35
-
# support '<nixpkgs>' and 'import <nixpkgs> {}'
36
-
if builtins.isPath mergedHive.meta.nixpkgs then
37
-
import mergedHive.meta.nixpkgs { }
50
+
(resolveNixpkgs mergedHive.meta.nixpkgs "meta.nixpkgs")
51
+
else
52
+
builtins.abort "makeHive called without meta.nixpkgs specified.";
53
+
54
+
getNodeNixpkgs =
55
+
name:
56
+
if mergedHive.meta ? "nodeNixpkgs" then
57
+
if mergedHive.meta.nodeNixpkgs ? "${name}" then
58
+
(resolveNixpkgs mergedHive.meta.nodeNixpkgs.${name} "meta.nodeNixpkgs.${name}")
38
59
else
39
-
mergedHive.meta.nixpkgs
60
+
hiveGlobalNixpkgs
40
61
else
41
-
import nixpkgs { };
62
+
hiveGlobalNixpkgs;
63
+
64
+
nixpkgsIsFlake = nixpkgs: nixpkgs.lib.hasSuffix "-source" nixpkgs.path;
42
65
43
66
evaluateNode =
44
67
name:
45
68
let
46
-
evalConfig = import (resolvedNixpkgs.path + "/nixos/lib/eval-config.nix");
69
+
nixpkgs = getNodeNixpkgs name;
70
+
evalConfig = import (nixpkgs.path + "/nixos/lib/eval-config.nix");
47
71
in
48
72
evalConfig {
49
73
modules = [
···
51
75
52
76
mergedHive.defaults
53
77
mergedHive.${name}
54
-
];
78
+
]
79
+
++ (nixpkgs.lib.optional (nixpkgsIsFlake nixpkgs) {
80
+
config.nixpkgs.flake.source = nixpkgs.lib.mkDefault nixpkgs.path;
81
+
});
55
82
system = null;
56
83
specialArgs = {
57
84
inherit name nodes;
58
-
} // mergedHive.meta.specialArgs or { };
85
+
}
86
+
// mergedHive.meta.specialArgs or { }
87
+
// mergedHive.meta.nodeSpecialArgs.${name} or { };
59
88
};
89
+
60
90
nodes = builtins.listToAttrs (
61
91
map (name: {
62
92
inherit name;
···
67
97
getTopLevel = node: (evaluateNode node).config.system.build.toplevel.drvPath;
68
98
in
69
99
rec {
70
-
inherit evaluateNode getTopLevel nodes;
100
+
inherit nodes;
71
101
102
+
topLevels = builtins.mapAttrs (name: _: getTopLevel name) nodes;
72
103
inspect = {
73
-
inherit path;
104
+
_schema = 1;
105
+
74
106
nodes = builtins.mapAttrs (_: v: v.config.deployment) nodes;
75
107
};
108
+
109
+
names = nodeNames;
76
110
}
+14
runtime/makeHive.nix
+14
runtime/makeHive.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
{
5
+
nixosConfigurations ? { },
6
+
...
7
+
}@hive:
8
+
import ./evaluate.nix {
9
+
inherit
10
+
nixosConfigurations
11
+
;
12
+
13
+
hive = builtins.removeAttrs hive [ "nixosConfigurations" ];
14
+
}
+79
runtime/module/config.nix
+79
runtime/module/config.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
{
5
+
pkgs,
6
+
lib,
7
+
config,
8
+
...
9
+
}:
10
+
{
11
+
config = {
12
+
systemd = {
13
+
paths = lib.mapAttrs' (
14
+
_name: value:
15
+
lib.nameValuePair "${value.name}-key" {
16
+
description = "Monitor changes to ${value.path}. You should Require ${value.service} instead of this.";
17
+
pathConfig = {
18
+
PathExists = value.path;
19
+
PathChanged = value.path;
20
+
Unit = "${value.name}-key.service";
21
+
};
22
+
}
23
+
) config.deployment.keys;
24
+
25
+
services = lib.mapAttrs' (
26
+
_name: value:
27
+
lib.nameValuePair "${value.name}-key" {
28
+
description = "Service that requires ${value.path}";
29
+
path = [
30
+
pkgs.inotify-tools
31
+
pkgs.coreutils
32
+
];
33
+
script = ''
34
+
MSG="Key ${value.path} exists."
35
+
systemd-notify --ready --status="$MSG"
36
+
37
+
echo "waiting to fail if the key is removed..."
38
+
39
+
while inotifywait -e delete_self "${value.path}"; do
40
+
MSG="Key ${value.path} no longer exists."
41
+
42
+
systemd-notify --status="$MSG"
43
+
echo $MSG
44
+
45
+
exit 1
46
+
done
47
+
'';
48
+
unitConfig = {
49
+
ConditionPathExists = value.path;
50
+
};
51
+
serviceConfig = {
52
+
Type = "simple";
53
+
Restart = "no";
54
+
NotifyAccess = "all";
55
+
RemainAfterExit = "yes";
56
+
};
57
+
}
58
+
) config.deployment.keys;
59
+
};
60
+
61
+
deployment = {
62
+
_keys = lib.mapAttrsToList (
63
+
_: value:
64
+
value
65
+
// {
66
+
source = {
67
+
# Attach type to internally tag serde enum
68
+
t = builtins.replaceStrings [ "path" "string" "list" ] [ "Path" "String" "Command" ] (
69
+
builtins.typeOf value.source
70
+
);
71
+
c = value.source;
72
+
};
73
+
}
74
+
) config.deployment.keys;
75
+
76
+
_hostPlatform = config.nixpkgs.hostPlatform.system;
77
+
};
78
+
};
79
+
}
+6
runtime/module/default.nix
+6
runtime/module/default.nix
+218
runtime/module/options.nix
+218
runtime/module/options.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
{
5
+
lib,
6
+
name,
7
+
...
8
+
}:
9
+
let
10
+
inherit (lib) types;
11
+
in
12
+
{
13
+
imports =
14
+
let
15
+
inherit (lib) mkAliasOptionModule;
16
+
in
17
+
[
18
+
(mkAliasOptionModule [ "deployment" "targetHost" ] [ "deployment" "target" "hosts" ])
19
+
(mkAliasOptionModule [ "deployment" "targetUser" ] [ "deployment" "target" "user" ])
20
+
(mkAliasOptionModule [ "deployment" "targetPort" ] [ "deployment" "target" "port" ])
21
+
];
22
+
23
+
options.deployment = {
24
+
target = lib.mkOption {
25
+
type = types.submodule {
26
+
imports = [
27
+
(lib.mkAliasOptionModule [ "host" ] [ "hosts" ])
28
+
];
29
+
options = {
30
+
hosts = lib.mkOption {
31
+
type = types.coercedTo types.str lib.singleton (types.listOf types.str);
32
+
description = "IPs or hostnames to attempt to connect to. They are tried in order.";
33
+
default = lib.singleton name;
34
+
apply = lib.unique;
35
+
};
36
+
user = lib.mkOption {
37
+
type = types.str;
38
+
description = "User to use for SSH. The user must be atleast `wheel` and must use an SSH key or similar
39
+
non-interactive login method. More information can be found at https://wire.althaea.zone/guides/non-root-user";
40
+
default = "root";
41
+
};
42
+
port = lib.mkOption {
43
+
type = types.int;
44
+
default = 22;
45
+
description = "SSH port to use.";
46
+
};
47
+
};
48
+
};
49
+
description = "Describes the target for this node";
50
+
default = { };
51
+
};
52
+
53
+
buildOnTarget = lib.mkOption {
54
+
type = types.bool;
55
+
default = false;
56
+
description = "Whether to build the system on the target host or not.";
57
+
};
58
+
59
+
allowLocalDeployment = lib.mkOption {
60
+
type = types.bool;
61
+
default = true;
62
+
description = "Whether to allow or deny this node being applied to localhost when the host's hostname matches the
63
+
node's name.";
64
+
};
65
+
66
+
tags = lib.mkOption {
67
+
type = types.listOf types.str;
68
+
default = [ ];
69
+
description = "Tags for node.";
70
+
example = [
71
+
"arm"
72
+
"cloud"
73
+
];
74
+
};
75
+
76
+
privilegeEscalationCommand = lib.mkOption {
77
+
type = types.listOf types.str;
78
+
description = "Command to elevate.";
79
+
default = [
80
+
"sudo"
81
+
"--"
82
+
];
83
+
};
84
+
85
+
replaceUnknownProfiles = lib.mkOption {
86
+
type = types.bool;
87
+
description = "No-op, colmena compatibility";
88
+
default = true;
89
+
};
90
+
91
+
sshOptions = lib.mkOption {
92
+
type = types.listOf types.str;
93
+
description = "No-op, colmena compatibility";
94
+
default = [ ];
95
+
};
96
+
97
+
_keys = lib.mkOption {
98
+
internal = true;
99
+
readOnly = true;
100
+
};
101
+
102
+
_hostPlatform = lib.mkOption {
103
+
internal = true;
104
+
readOnly = true;
105
+
};
106
+
107
+
keys = lib.mkOption {
108
+
type = types.attrsOf (
109
+
types.submodule (
110
+
{
111
+
name,
112
+
config,
113
+
...
114
+
}:
115
+
{
116
+
imports =
117
+
let
118
+
inherit (lib) mkAliasOptionModule;
119
+
in
120
+
[
121
+
(mkAliasOptionModule [ "keyFile" ] [ "source" ])
122
+
(mkAliasOptionModule [ "keyCommand" ] [ "source" ])
123
+
(mkAliasOptionModule [ "text" ] [ "source" ])
124
+
];
125
+
options = {
126
+
name = lib.mkOption {
127
+
type = types.str;
128
+
default = name;
129
+
description = "Filename of the secret.";
130
+
};
131
+
destDir = lib.mkOption {
132
+
type = types.path;
133
+
default = "/run/keys/";
134
+
description = "Destination directory for the secret. Change this to something other than `/run/keys/` for keys to persist past reboots.";
135
+
};
136
+
path = lib.mkOption {
137
+
internal = true;
138
+
type = types.path;
139
+
default =
140
+
if lib.hasSuffix "/" config.destDir then
141
+
"${config.destDir}${config.name}"
142
+
else
143
+
"${config.destDir}/${config.name}";
144
+
description = "Path that the key is deployed to.";
145
+
};
146
+
service = lib.mkOption {
147
+
internal = true;
148
+
type = types.str;
149
+
default = "${config.name}-key.service";
150
+
description = "Name of the systemd service that represents this key.";
151
+
};
152
+
group = lib.mkOption {
153
+
type = types.str;
154
+
default = "root";
155
+
description = "Group to own the key. If this group does not exist this will silently fail and the key will be owned by gid 0.";
156
+
};
157
+
user = lib.mkOption {
158
+
type = types.str;
159
+
default = "root";
160
+
description = "User to own the key. If this user does not exist this will silently fail and the key will be owned by uid 0.";
161
+
};
162
+
permissions = lib.mkOption {
163
+
type = types.str;
164
+
default = "0600";
165
+
description = "Unix Octal permissions, in string format, for the key.";
166
+
};
167
+
source = lib.mkOption {
168
+
type = types.oneOf [
169
+
types.str
170
+
types.path
171
+
(types.listOf types.str)
172
+
];
173
+
description = "Source of the key. Either a path to a file, a literal string, or a command to generate the key.";
174
+
};
175
+
uploadAt = lib.mkOption {
176
+
type = types.enum [
177
+
"pre-activation"
178
+
"post-activation"
179
+
];
180
+
default = "pre-activation";
181
+
description = "When to upload the key. Either `pre-activation` or `post-activation`.";
182
+
};
183
+
environment = lib.mkOption {
184
+
type = types.attrsOf types.str;
185
+
default = { };
186
+
description = "Key-Value environment variables to use when creating the key if the key source is a command.";
187
+
};
188
+
};
189
+
}
190
+
)
191
+
);
192
+
description = "Secrets to be deployed to the node.";
193
+
default = { };
194
+
example = {
195
+
"wireless.env" = {
196
+
source = [
197
+
"gpg"
198
+
"--decrypt"
199
+
"secrets/wireless.env.gpg"
200
+
];
201
+
destDir = "/etc/keys/";
202
+
};
203
+
204
+
"arbfile.txt" = {
205
+
source = ./arbfile.txt;
206
+
destDir = "/etc/arbs/";
207
+
};
208
+
209
+
"arberfile.txt" = {
210
+
source = ''
211
+
Hello World
212
+
'';
213
+
destDir = "/etc/arbs/";
214
+
};
215
+
};
216
+
};
217
+
};
218
+
}
-199
runtime/module.nix
-199
runtime/module.nix
···
1
-
{
2
-
lib,
3
-
name,
4
-
config,
5
-
...
6
-
}:
7
-
let
8
-
inherit (lib) types;
9
-
in
10
-
{
11
-
imports =
12
-
let
13
-
inherit (lib) mkAliasOptionModule;
14
-
in
15
-
[
16
-
(mkAliasOptionModule [ "deployment" "targetHost" ] [ "deployment" "target" "host" ])
17
-
(mkAliasOptionModule [ "deployment" "targetUser" ] [ "deployment" "target" "user" ])
18
-
(mkAliasOptionModule [ "deployment" "targetPort" ] [ "deployment" "target" "port" ])
19
-
];
20
-
21
-
options.deployment = {
22
-
target = lib.mkOption {
23
-
type = types.submodule {
24
-
options = {
25
-
host = lib.mkOption {
26
-
type = types.str;
27
-
description = "Host to connect to.";
28
-
default = name;
29
-
};
30
-
hosts = lib.mkOption {
31
-
type = types.listOf types.str;
32
-
description = "Additional hosts to attempt to connect to, if `deployment.target.host` cannot be reached.";
33
-
default = lib.singleton name;
34
-
apply = list: lib.unique ([ name ] ++ list);
35
-
};
36
-
user = lib.mkOption {
37
-
type = types.str;
38
-
description = "User to use for ssh.";
39
-
default = "root";
40
-
};
41
-
port = lib.mkOption {
42
-
type = types.int;
43
-
default = 22;
44
-
description = "SSH port to use.";
45
-
};
46
-
};
47
-
};
48
-
description = "Describes the target for this node";
49
-
default = { };
50
-
};
51
-
52
-
buildOnTarget = lib.mkOption {
53
-
type = types.bool;
54
-
default = false;
55
-
description = "Whether to build the system on the target host or not.";
56
-
};
57
-
58
-
allowLocalDeployment = lib.mkOption {
59
-
type = types.bool;
60
-
default = true;
61
-
description = "No-op, colmena compatibility right now.";
62
-
};
63
-
64
-
tags = lib.mkOption {
65
-
type = types.listOf types.str;
66
-
default = [ ];
67
-
description = "Tags for node.";
68
-
example = [
69
-
"arm"
70
-
"cloud"
71
-
];
72
-
};
73
-
74
-
_keys = lib.mkOption {
75
-
internal = true;
76
-
readOnly = true;
77
-
};
78
-
79
-
_hostPlatform = lib.mkOption {
80
-
internal = true;
81
-
readOnly = true;
82
-
};
83
-
84
-
keys = lib.mkOption {
85
-
type = types.attrsOf (
86
-
types.submodule (
87
-
{
88
-
name,
89
-
config,
90
-
...
91
-
}:
92
-
{
93
-
imports =
94
-
let
95
-
inherit (lib) mkAliasOptionModule;
96
-
in
97
-
[
98
-
(mkAliasOptionModule [ "keyFile" ] [ "source" ])
99
-
(mkAliasOptionModule [ "keyCommand" ] [ "source" ])
100
-
(mkAliasOptionModule [ "text" ] [ "source" ])
101
-
];
102
-
options = {
103
-
name = lib.mkOption {
104
-
type = types.str;
105
-
default = name;
106
-
description = "Filename of the secret.";
107
-
};
108
-
destDir = lib.mkOption {
109
-
type = types.path;
110
-
default = "/run/keys/";
111
-
description = "Destination directory for the secret. Change this to something other than `/run/keys/` for keys to persist past reboots.";
112
-
};
113
-
path = lib.mkOption {
114
-
internal = true;
115
-
type = types.path;
116
-
default = "${config.destDir}/${config.name}";
117
-
};
118
-
group = lib.mkOption {
119
-
type = types.str;
120
-
default = "root";
121
-
description = "Group to own the key. If this group does not exist this will silently fail and the key will be owned by gid 0.";
122
-
};
123
-
user = lib.mkOption {
124
-
type = types.str;
125
-
default = "root";
126
-
description = "User to own the key. If this user does not exist this will silently fail and the key will be owned by uid 0.";
127
-
};
128
-
permissions = lib.mkOption {
129
-
type = types.str;
130
-
default = "0600";
131
-
description = "Unix Octal permissions, in string format, for the key.";
132
-
};
133
-
source = lib.mkOption {
134
-
type = types.oneOf [
135
-
types.str
136
-
types.path
137
-
(types.listOf types.str)
138
-
];
139
-
description = "Source of the key. Either a path to a file, a literal string, or a command to generate the key.";
140
-
};
141
-
uploadAt = lib.mkOption {
142
-
type = types.enum [
143
-
"pre-activation"
144
-
"post-activation"
145
-
];
146
-
default = "pre-activation";
147
-
description = "When to upload the key. Either `pre-activation` or `post-activation`.";
148
-
};
149
-
};
150
-
}
151
-
)
152
-
);
153
-
description = "Secrets to be deployed to the node.";
154
-
default = { };
155
-
example = {
156
-
"wireless.env" = {
157
-
source = [
158
-
"gpg"
159
-
"--decrypt"
160
-
"secrets/wireless.env.gpg"
161
-
];
162
-
destDir = "/etc/keys/";
163
-
};
164
-
165
-
"arbfile.txt" = {
166
-
source = ./arbfile.txt;
167
-
destDir = "/etc/arbs/";
168
-
};
169
-
170
-
"arberfile.txt" = {
171
-
source = ''
172
-
Hello World
173
-
'';
174
-
destDir = "/etc/arbs/";
175
-
};
176
-
};
177
-
};
178
-
};
179
-
180
-
config = {
181
-
deployment = {
182
-
_keys = lib.mapAttrsToList (
183
-
_: value:
184
-
value
185
-
// {
186
-
source = {
187
-
# Attach type to internally tag serde enum
188
-
t = builtins.replaceStrings [ "path" "string" "list" ] [ "Path" "String" "Command" ] (
189
-
builtins.typeOf value.source
190
-
);
191
-
c = value.source;
192
-
};
193
-
}
194
-
) config.deployment.keys;
195
-
196
-
_hostPlatform = config.nixpkgs.hostPlatform.system;
197
-
};
198
-
};
199
-
}
+1
tests/nix/.python-version
+1
tests/nix/.python-version
···
1
+
3.13
+37
-25
tests/nix/default.nix
+37
-25
tests/nix/default.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{
2
5
self,
3
6
config,
···
20
23
lazyAttrsOf
21
24
;
22
25
cfg = config.wire.testing;
26
+
27
+
stripTyping =
28
+
value:
29
+
let
30
+
split = builtins.split "(from typing import TYPE_CHECKING|# typing-end)" value;
31
+
in
32
+
(builtins.elemAt split 0) + (builtins.elemAt split 4);
23
33
in
24
34
{
25
35
imports = [
26
36
./suite/test_remote_deploy
27
37
./suite/test_local_deploy
28
38
./suite/test_keys
39
+
./suite/test_stdin
29
40
];
30
41
options.wire.testing = mkOption {
31
42
type = attrsOf (
···
40
51
type = lines;
41
52
default = '''';
42
53
description = "test script for runNixOSTest";
54
+
apply = stripTyping;
43
55
};
44
56
testDir = mkOption {
45
57
default = "${self}/tests/nix/suite/${name}";
···
77
89
{
78
90
testName,
79
91
opts,
80
-
nix,
81
92
nixpkgs,
82
93
}:
83
94
let
84
95
# TODO: Update once #126 is solved.
85
96
nixPackage = nixpkgs.legacyPackages.lix;
86
-
sanitizeName =
97
+
sanitiseName =
87
98
str: lib.strings.sanitizeDerivationName (builtins.replaceStrings [ "." ] [ "_" ] str);
88
-
identifier = sanitizeName "${nixpkgs.legacyPackages.lib.trivial.release}-${nixPackage.name}";
99
+
identifier = sanitiseName "${nixpkgs.legacyPackages.lib.trivial.release}-${nixPackage.name}";
89
100
path = "tests/nix/suite/${testName}";
101
+
102
+
flakeDirFileset = lib.fileset.toSource {
103
+
root = ../..;
104
+
fileset = lib.fileset.union ./. (
105
+
lib.fileset.fileFilter (file: (file.hasExt "nix") || (file.hasExt "lock")) ../..
106
+
);
107
+
};
108
+
90
109
injectedFlakeDir = pkgs.runCommand "injected-flake-dir" { } ''
91
-
cp -r ${../..} $out
110
+
cp -r ${flakeDirFileset} $out
92
111
chmod -R +w $out
93
112
substituteInPlace $out/${path}/hive.nix --replace-fail @IDENT@ ${identifier}
94
113
'';
···
101
120
defaults =
102
121
{
103
122
pkgs,
104
-
evaluateHive,
105
123
...
106
124
}:
107
125
let
108
-
hive = evaluateHive {
109
-
nixpkgs = pkgs.path;
110
-
path = injectedFlakeDir;
111
-
hive = builtins.scopedImport {
112
-
__nixPath = _b: null;
113
-
__findFile = path: name: if name == "nixpkgs" then pkgs.path else throw "oops!!";
114
-
} "${injectedFlakeDir}/${path}/hive.nix";
115
-
};
126
+
hive = builtins.scopedImport {
127
+
__nixPath = _b: null;
128
+
__findFile = _path: name: if name == "nixpkgs" then pkgs.path else throw "oops!!";
129
+
} "${injectedFlakeDir}/${path}/hive.nix";
116
130
nodes = mapAttrsToList (_: val: val.config.system.build.toplevel.drvPath) hive.nodes;
117
131
# fetch **all** dependencies of a flake
118
132
# it's called fetchLayer because my naming skills are awful
···
135
149
};
136
150
137
151
environment.systemPackages = [ pkgs.ripgrep ];
152
+
environment.variables.XDG_RUNTIME_DIR = "/tmp";
138
153
virtualisation.memorySize = 4096;
139
154
virtualisation.additionalPaths = flatten [
140
155
injectedFlakeDir
···
143
158
];
144
159
};
145
160
node.specialArgs = {
146
-
evaluateHive = import "${self}/runtime/evaluate.nix";
147
161
testName = name;
148
162
snakeOil = import "${pkgs.path}/nixos/tests/ssh-keys.nix" pkgs;
149
163
inherit (opts) testDir;
150
-
inherit (self'.packages) wire-small;
164
+
inherit (self'.packages) wire-small-dev;
151
165
};
152
166
# NOTE: there is surely a better way of doing this in a more
153
167
# "controlled" manner, but until a need is asked for, this will remain
154
168
# as is.
155
-
testScript =
156
-
''
157
-
start_all()
169
+
testScript = ''
170
+
start_all()
158
171
159
-
TEST_DIR="${injectedFlakeDir}/${path}"
172
+
TEST_DIR="${injectedFlakeDir}/${path}"
160
173
161
-
${builtins.readFile ./tools.py}
162
-
''
163
-
+ lib.concatStringsSep "\n" (mapAttrsToList (_: value: value._wire.testScript) value.nodes)
164
-
+ opts.testScript;
174
+
${stripTyping (builtins.readFile ./tools/__init__.py)}
175
+
''
176
+
+ lib.concatStringsSep "\n" (mapAttrsToList (_: value: value._wire.testScript) value.nodes)
177
+
+ opts.testScript;
165
178
};
166
179
};
167
180
in
···
169
182
checks = builtins.listToAttrs (
170
183
builtins.map (
171
184
{
172
-
nix,
173
185
nixpkgs,
174
186
testName,
187
+
...
175
188
}:
176
189
let
177
190
opts = cfg.${testName};
···
180
193
inherit
181
194
testName
182
195
opts
183
-
nix
184
196
nixpkgs
185
197
;
186
198
}
+18
tests/nix/pyproject.toml
+18
tests/nix/pyproject.toml
···
1
+
[project]
2
+
name = "wire-vm-tests"
3
+
version = "0.0.0"
4
+
requires-python = ">=3.13"
5
+
dependencies = [
6
+
"colorama>=0.4.6",
7
+
"ipython>=9.8.0",
8
+
"junit-xml>=1.9",
9
+
"nixos-test-driver",
10
+
"ptpython>=3.0.32",
11
+
"remote-pdb>=2.1.0",
12
+
]
13
+
14
+
[tool.uv.sources]
15
+
nixos-test-driver = { git = "https://github.com/NixOS/nixpkgs", subdirectory = "nixos/lib/test-driver/src", branch = "nixos-25.11" }
16
+
17
+
[dependency-groups]
18
+
dev = ["ty>=0.0.4"]
+4
-58
tests/nix/suite/test_keys/default.nix
+4
-58
tests/nix/suite/test_keys/default.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{
2
5
wire.testing.test_keys = {
3
6
nodes.deployer = {
···
7
10
nodes.receiver = {
8
11
_wire.receiver = true;
9
12
};
10
-
testScript = ''
11
-
deployer_so = collect_store_objects(deployer)
12
-
receiver_so = collect_store_objects(receiver)
13
-
14
-
# build all nodes without any keys
15
-
deployer.succeed(f"wire apply --no-progress --on receiver --path {TEST_DIR}/hive.nix --no-keys -vvv >&2")
16
-
17
-
receiver.wait_for_unit("sshd.service")
18
-
19
-
# --no-keys should never push a key
20
-
receiver.fail("test -f /run/keys/source_string")
21
-
deployer.fail("test -f /run/keys/source_string")
22
-
23
-
def test_keys(target, target_object):
24
-
deployer.succeed(f"wire apply keys --on {target} --no-progress --path {TEST_DIR}/hive.nix -vvv >&2")
25
-
26
-
keys = [
27
-
("/run/keys/source_string", "hello_world_source", "root root 600"),
28
-
("/etc/keys/file", "hello_world_file", "root root 644"),
29
-
("/home/owner/some/deep/path/command", "hello_world_command", "owner owner 644"),
30
-
]
31
-
32
-
for path, value, permissions in keys:
33
-
# test existence & value
34
-
source_string = target_object.succeed(f"cat {path}")
35
-
assert value in source_string, f"{path} has correct contents ({target})"
36
-
37
-
stat = target_object.succeed(f"stat -c '%U %G %a' {path}").rstrip()
38
-
assert permissions == stat, f"{path} has correct permissions ({target})"
39
-
40
-
def perform_routine(target, target_object):
41
-
test_keys(target, target_object)
42
-
43
-
# Mess with the keys to make sure that every push refreshes the permissions
44
-
target_object.succeed("echo 'incorrect_value' > /run/keys/source_string")
45
-
target_object.succeed("chown 600 /etc/keys/file")
46
-
# Test having a key that doesn't exist mixed with keys that do
47
-
target_object.succeed("rm /home/owner/some/deep/path/command")
48
-
49
-
# Test keys twice to ensure the operation is idempotent,
50
-
# especially around directory creation.
51
-
test_keys(target, target_object)
52
-
53
-
perform_routine("receiver", receiver)
54
-
perform_routine("deployer", deployer)
55
-
56
-
new_deployer_store_objects = collect_store_objects(deployer).difference(deployer_so)
57
-
new_receiver_store_objects = collect_store_objects(receiver).difference(receiver_so)
58
-
59
-
# no one should have any keys introduced by the operation
60
-
for node, objects in [
61
-
(deployer, new_deployer_store_objects),
62
-
(receiver, new_receiver_store_objects),
63
-
]:
64
-
assert_store_not_posioned(node, "hello_world_source", objects)
65
-
assert_store_not_posioned(node, "hello_world_file", objects)
66
-
assert_store_not_posioned(node, "hello_world_command", objects)
67
-
'';
13
+
testScript = builtins.readFile ./script.py;
68
14
};
69
15
}
+16
-3
tests/nix/suite/test_keys/hive.nix
+16
-3
tests/nix/suite/test_keys/hive.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
let
2
-
mkHiveNode = import ../utils.nix { testName = "test_keys-@IDENT@"; };
5
+
inherit (import ../utils.nix { testName = "test_keys-@IDENT@"; }) makeHive mkHiveNode;
3
6
in
4
-
{
5
-
meta.nixpkgs = import <nixpkgs> { system = "x86_64-linux"; };
7
+
makeHive {
8
+
meta.nixpkgs = import <nixpkgs> { localSystem = "x86_64-linux"; };
6
9
defaults = {
7
10
deployment.keys = {
8
11
source_string = {
12
+
# key with different name to attr name
13
+
name = "source_string_name";
9
14
source = ''
10
15
hello_world_source
11
16
'';
···
27
32
user = "owner";
28
33
group = "owner";
29
34
destDir = "/home/owner/some/deep/path";
35
+
};
36
+
environment = {
37
+
source = [
38
+
"bash"
39
+
"-c"
40
+
"echo $ENV_STRING"
41
+
];
42
+
environment.ENV_STRING = "string_from_environment";
30
43
};
31
44
};
32
45
+123
tests/nix/suite/test_keys/script.py
+123
tests/nix/suite/test_keys/script.py
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
from typing import TYPE_CHECKING
5
+
6
+
if TYPE_CHECKING:
7
+
from test_driver.machine import Machine
8
+
from tools import collect_store_objects, assert_store_not_poisoned
9
+
10
+
deployer: Machine = None # type: ignore[invalid-assignment]
11
+
receiver: Machine = None # type: ignore[invalid-assignment]
12
+
TEST_DIR = ""
13
+
14
+
# typing-end
15
+
16
+
deployer_so = collect_store_objects(deployer)
17
+
receiver_so = collect_store_objects(receiver)
18
+
19
+
# build receiver with no keys
20
+
deployer.succeed(
21
+
f"wire apply --no-progress --on receiver --path {TEST_DIR}/hive.nix --no-keys --ssh-accept-host -vvv >&2"
22
+
)
23
+
24
+
receiver.wait_for_unit("sshd.service")
25
+
26
+
# --no-keys should never push a key
27
+
receiver.fail("test -f /run/keys/source_string_name")
28
+
deployer.fail("test -f /run/keys/source_string_name")
29
+
30
+
# key services are created
31
+
receiver.succeed("systemctl cat source_string_name-key.service")
32
+
33
+
_, is_failed = receiver.execute("systemctl is-failed source_string_name-key.service")
34
+
assert is_failed == "inactive\n", (
35
+
f"source_string_name-key.service must be inactive before key exists ({is_failed})"
36
+
)
37
+
38
+
39
+
def test_keys(target, target_object, non_interactive):
40
+
if non_interactive:
41
+
deployer.succeed(
42
+
f"wire apply keys --on {target} --no-progress --path {TEST_DIR}/hive.nix --non-interactive --ssh-accept-host -vvv >&2"
43
+
)
44
+
else:
45
+
deployer.succeed(
46
+
f"wire apply keys --on {target} --no-progress --path {TEST_DIR}/hive.nix --ssh-accept-host -vvv >&2"
47
+
)
48
+
49
+
keys = [
50
+
(
51
+
"/run/keys/source_string_name",
52
+
"hello_world_source",
53
+
"root root 600",
54
+
"source_string_name",
55
+
),
56
+
("/etc/keys/file", "hello_world_file", "root root 644", "file"),
57
+
(
58
+
"/home/owner/some/deep/path/command",
59
+
"hello_world_command",
60
+
"owner owner 644",
61
+
"command",
62
+
),
63
+
(
64
+
"/run/keys/environment",
65
+
"string_from_environment",
66
+
"root root 600",
67
+
"environment",
68
+
),
69
+
]
70
+
71
+
for path, value, permissions, name in keys:
72
+
# test existence & value
73
+
source_string = target_object.succeed(f"cat {path}")
74
+
assert value in source_string, f"{path} has correct contents ({target})"
75
+
76
+
stat = target_object.succeed(f"stat -c '%U %G %a' {path}").rstrip()
77
+
assert permissions == stat, f"{path} has correct permissions ({target})"
78
+
79
+
80
+
def perform_routine(target, target_object, non_interactive):
81
+
test_keys(target, target_object, non_interactive)
82
+
83
+
# only check systemd units on receiver since deployer apply's are one time only
84
+
if target == "receiver":
85
+
target_object.succeed("systemctl start source_string_name-key.path")
86
+
target_object.succeed("systemctl start command-key.path")
87
+
target_object.wait_for_unit("source_string_name-key.service")
88
+
target_object.wait_for_unit("command-key.service")
89
+
90
+
# Mess with the keys to make sure that every push refreshes the permissions
91
+
target_object.succeed("echo 'incorrect_value' > /run/keys/source_string")
92
+
target_object.succeed("chown 600 /etc/keys/file")
93
+
# Test having a key that doesn't exist mixed with keys that do
94
+
target_object.succeed("rm /home/owner/some/deep/path/command")
95
+
96
+
if target == "receiver":
97
+
_, is_failed = target_object.execute("systemctl is-active command-key.service")
98
+
assert is_failed == "failed\n", (
99
+
f"command-key.service is failed after deletion ({is_failed})"
100
+
)
101
+
102
+
# Test keys twice to ensure the operation is idempotent,
103
+
# especially around directory creation.
104
+
test_keys(target, target_object, non_interactive)
105
+
106
+
107
+
perform_routine("receiver", receiver, True)
108
+
perform_routine("deployer", deployer, True)
109
+
perform_routine("receiver", receiver, False)
110
+
perform_routine("deployer", deployer, False)
111
+
112
+
new_deployer_store_objects = collect_store_objects(deployer).difference(deployer_so)
113
+
new_receiver_store_objects = collect_store_objects(receiver).difference(receiver_so)
114
+
115
+
# no one should have any keys introduced by the operation
116
+
for node, objects in [
117
+
(deployer, new_deployer_store_objects),
118
+
(receiver, new_receiver_store_objects),
119
+
]:
120
+
assert_store_not_poisoned(node, "hello_world_source", objects)
121
+
assert_store_not_poisoned(node, "hello_world_file", objects)
122
+
assert_store_not_poisoned(node, "hello_world_command", objects)
123
+
assert_store_not_poisoned(node, "string_from_environment", objects)
+4
-4
tests/nix/suite/test_local_deploy/default.nix
+4
-4
tests/nix/suite/test_local_deploy/default.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{
2
5
wire.testing.test_local_deploy = {
3
6
nodes.deployer = {
4
7
_wire.deployer = true;
5
8
_wire.receiver = true;
6
9
};
7
-
testScript = ''
8
-
deployer.succeed(f"wire apply --on deployer --no-progress --path {TEST_DIR}/hive.nix --no-keys -vvv >&2")
9
-
deployer.succeed("test -f /etc/a")
10
-
'';
10
+
testScript = builtins.readFile ./script.py;
11
11
};
12
12
}
+6
-3
tests/nix/suite/test_local_deploy/hive.nix
+6
-3
tests/nix/suite/test_local_deploy/hive.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
let
2
-
mkHiveNode = import ../utils.nix { testName = "test_local_deploy-@IDENT@"; };
5
+
inherit (import ../utils.nix { testName = "test_keys-@IDENT@"; }) makeHive mkHiveNode;
3
6
in
4
-
{
5
-
meta.nixpkgs = import <nixpkgs> { system = "x86_64-linux"; };
7
+
makeHive {
8
+
meta.nixpkgs = import <nixpkgs> { localSystem = "x86_64-linux"; };
6
9
deployer = mkHiveNode { hostname = "deployer"; } {
7
10
environment.etc."a".text = "b";
8
11
};
+17
tests/nix/suite/test_local_deploy/script.py
+17
tests/nix/suite/test_local_deploy/script.py
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
from typing import TYPE_CHECKING
5
+
6
+
if TYPE_CHECKING:
7
+
from test_driver.machine import Machine
8
+
9
+
deployer: Machine = None # type: ignore[invalid-assignment]
10
+
TEST_DIR = ""
11
+
12
+
# typing-end
13
+
14
+
deployer.succeed(
15
+
f"wire apply --on deployer --no-progress --path {TEST_DIR}/hive.nix --no-keys -vvv >&2"
16
+
)
17
+
deployer.succeed("test -f /etc/a")
+4
-10
tests/nix/suite/test_remote_deploy/default.nix
+4
-10
tests/nix/suite/test_remote_deploy/default.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{
2
5
wire.testing.test_remote_deploy = {
3
6
nodes.deployer = {
···
6
9
nodes.receiver = {
7
10
_wire.receiver = true;
8
11
};
9
-
testScript = ''
10
-
deployer_so = collect_store_objects(deployer)
11
-
receiver_so = collect_store_objects(receiver)
12
-
13
-
deployer.succeed(f"wire apply --on receiver --no-progress --path {TEST_DIR}/hive.nix --no-keys -vvv >&2")
14
-
15
-
receiver.wait_for_unit("sshd.service")
16
-
17
-
receiver.succeed("test -f /etc/a")
18
-
'';
12
+
testScript = builtins.readFile ./script.py;
19
13
};
20
14
}
+50
-4
tests/nix/suite/test_remote_deploy/hive.nix
+50
-4
tests/nix/suite/test_remote_deploy/hive.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
let
2
-
mkHiveNode = import ../utils.nix { testName = "test_remote_deploy-@IDENT@"; };
5
+
inherit (import ../utils.nix { testName = "test_keys-@IDENT@"; }) makeHive mkHiveNode;
3
6
in
4
-
{
5
-
meta.nixpkgs = import <nixpkgs> { system = "x86_64-linux"; };
7
+
makeHive {
8
+
meta = {
9
+
nixpkgs = import <nixpkgs> { localSystem = "x86_64-linux"; };
10
+
11
+
specialArgs = {
12
+
message = "second";
13
+
};
14
+
15
+
nodeSpecialArgs = {
16
+
receiver-third.message = "third";
17
+
};
18
+
};
19
+
6
20
receiver = mkHiveNode { hostname = "receiver"; } {
7
-
environment.etc."a".text = "b";
21
+
environment.etc."identity".text = "first";
22
+
23
+
# test node pinging
24
+
deployment.target.hosts = [
25
+
"unreachable-1"
26
+
"unreachable-2"
27
+
"unreachable-3"
28
+
"unreachable-4"
29
+
"receiver"
30
+
];
31
+
};
32
+
33
+
receiver-second = mkHiveNode { hostname = "receiver"; } (
34
+
{ message, ... }:
35
+
{
36
+
environment.etc."identity".text = message;
37
+
deployment.target.host = "receiver";
38
+
}
39
+
);
40
+
41
+
receiver-third = mkHiveNode { hostname = "receiver"; } (
42
+
{ message, ... }:
43
+
{
44
+
environment.etc."identity".text = message;
45
+
deployment.target.host = "receiver";
46
+
}
47
+
);
48
+
49
+
receiver-unreachable = mkHiveNode { hostname = "receiver"; } {
50
+
# test node pinging
51
+
deployment.target.hosts = [
52
+
"completely-unreachable"
53
+
];
8
54
};
9
55
}
+63
tests/nix/suite/test_remote_deploy/script.py
+63
tests/nix/suite/test_remote_deploy/script.py
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
from typing import TYPE_CHECKING
5
+
from typing import Callable, ContextManager
6
+
7
+
if TYPE_CHECKING:
8
+
from test_driver.machine import Machine
9
+
10
+
deployer: Machine = None # type: ignore[invalid-assignment]
11
+
receiver: Machine = None # type: ignore[invalid-assignment]
12
+
13
+
TEST_DIR = ""
14
+
15
+
# https://github.com/NixOS/nixpkgs/blob/d10d9933b1c206f9b2950e5e1d68268c5ed0a3c7/nixos/lib/test-script-prepend.py#L43
16
+
subtest: Callable[[str], ContextManager[None]] = None # type: ignore[invalid-assignment]
17
+
18
+
# typing-end
19
+
20
+
with subtest("Test unreachable hosts"):
21
+
deployer.fail(
22
+
f"wire apply --on receiver-unreachable --no-progress --path {TEST_DIR}/hive.nix --no-keys -vvv >&2"
23
+
)
24
+
25
+
with subtest("Check basic apply: Interactive"):
26
+
deployer.succeed(
27
+
f"wire apply --on receiver --no-progress --path {TEST_DIR}/hive.nix --no-keys --ssh-accept-host -vvv >&2"
28
+
)
29
+
30
+
identity = receiver.succeed("cat /etc/identity")
31
+
assert identity == "first", "Identity of first apply wasn't as expected"
32
+
33
+
with subtest("Check basic apply: NonInteractive"):
34
+
deployer.succeed(
35
+
f"wire apply --on receiver-third --no-progress --path {TEST_DIR}/hive.nix --no-keys --ssh-accept-host --non-interactive -vvv >&2"
36
+
)
37
+
38
+
identity = receiver.succeed("cat /etc/identity")
39
+
assert identity == "third", "Identity of non-interactive apply wasn't as expected"
40
+
41
+
with subtest("Check boot apply"):
42
+
first_system = receiver.succeed("readlink -f /run/current-system")
43
+
44
+
deployer.succeed(
45
+
f"wire apply boot --on receiver-second --no-progress --path {TEST_DIR}/hive.nix --no-keys --ssh-accept-host -vvv >&2"
46
+
)
47
+
48
+
_first_system = receiver.succeed("readlink -f /run/current-system")
49
+
assert first_system == _first_system, (
50
+
"apply boot without --reboot changed /run/current-system"
51
+
)
52
+
53
+
# with subtest("Check /etc/identity after reboot"):
54
+
# receiver.reboot()
55
+
#
56
+
# identity = receiver.succeed("cat /etc/identity")
57
+
# assert identity == "second", "Identity didn't change after second apply"
58
+
59
+
# with subtest("Check --reboot"):
60
+
# deployer.succeed(f"wire apply boot --on receiver-third --no-progress --path {TEST_DIR}/hive.nix --reboot --no-keys -vvv >&2")
61
+
#
62
+
# identity = receiver.succeed("cat /etc/identity")
63
+
# assert identity == "third", "Identity didn't change after third apply"
+12
tests/nix/suite/test_stdin/default.nix
+12
tests/nix/suite/test_stdin/default.nix
+13
tests/nix/suite/test_stdin/hive.nix
+13
tests/nix/suite/test_stdin/hive.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
let
5
+
inherit (import ../utils.nix { testName = "test_keys-@IDENT@"; }) makeHive mkHiveNode;
6
+
in
7
+
makeHive {
8
+
meta.nixpkgs = import <nixpkgs> { localSystem = "x86_64-linux"; };
9
+
deployer = mkHiveNode { hostname = "deployer"; } {
10
+
deployment.tags = [ "tag" ];
11
+
environment.etc."a".text = "b";
12
+
};
13
+
}
+17
tests/nix/suite/test_stdin/script.py
+17
tests/nix/suite/test_stdin/script.py
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
from typing import TYPE_CHECKING
5
+
6
+
if TYPE_CHECKING:
7
+
from test_driver.machine import Machine
8
+
9
+
deployer: Machine = None # type: ignore[invalid-assignment]
10
+
TEST_DIR = ""
11
+
12
+
# typing-end
13
+
14
+
deployer.succeed(
15
+
f"echo @tag | wire apply --on deployer --no-progress --path {TEST_DIR}/hive.nix --no-keys -vvv >&2"
16
+
)
17
+
deployer.succeed("test -f /etc/a")
+4
-1
tests/nix/suite/utils.nix
+4
-1
tests/nix/suite/utils.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{ testName }:
2
5
let
3
6
# Use the flake-compat code in project root to access the tests which are
···
46
49
];
47
50
};
48
51
49
-
__functor = self: self.mkHiveNode;
52
+
inherit (flake) makeHive;
50
53
}
+5
-2
tests/nix/test-opts.nix
+5
-2
tests/nix/test-opts.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{
2
5
lib,
3
6
snakeOil,
4
-
wire-small,
7
+
wire-small-dev,
5
8
config,
6
9
pkgs,
7
10
...
···
33
36
"C+ /root/.ssh/id_ed25519 600 - - - ${snakeOil.snakeOilEd25519PrivateKey}"
34
37
];
35
38
environment.systemPackages = [
36
-
wire-small
39
+
wire-small-dev
37
40
pkgs.ripgrep
38
41
];
39
42
# It's important to note that you should never ever use this configuration
+20
tests/nix/tools/__init__.py
+20
tests/nix/tools/__init__.py
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
from typing import TYPE_CHECKING
5
+
6
+
if TYPE_CHECKING:
7
+
from test_driver.machine import Machine
8
+
9
+
# typing-end
10
+
11
+
12
+
def collect_store_objects(machine: Machine) -> set[str]:
13
+
return set(machine.succeed("ls /nix/store").strip().split("\n"))
14
+
15
+
16
+
def assert_store_not_poisoned(machine: Machine, poison: str, objects: set[str]):
17
+
paths = list(map(lambda n: f"/nix/store/{n}", objects))
18
+
19
+
machine.succeed("which rg")
20
+
machine.fail(f"rg '{poison}' {' '.join(paths)}")
-9
tests/nix/tools.py
-9
tests/nix/tools.py
···
1
-
def collect_store_objects(machine: Machine) -> set[str]:
2
-
return set(machine.succeed("ls /nix/store").strip().split("\n"))
3
-
4
-
5
-
def assert_store_not_posioned(machine: Machine, poison: str, objects: set[str]):
6
-
paths = list(map(lambda n: f"/nix/store/{n}", objects))
7
-
8
-
machine.succeed("which rg")
9
-
machine.fail(f"rg '{poison}' {" ".join(paths)}")
+303
tests/nix/uv.lock
+303
tests/nix/uv.lock
···
1
+
version = 1
2
+
revision = 3
3
+
requires-python = ">=3.13"
4
+
5
+
[[package]]
6
+
name = "appdirs"
7
+
version = "1.4.4"
8
+
source = { registry = "https://pypi.org/simple" }
9
+
sdist = { url = "https://files.pythonhosted.org/packages/d7/d8/05696357e0311f5b5c316d7b95f46c669dd9c15aaeecbb48c7d0aeb88c40/appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", size = 13470, upload-time = "2020-05-11T07:59:51.037Z" }
10
+
wheels = [
11
+
{ url = "https://files.pythonhosted.org/packages/3b/00/2344469e2084fb287c2e0b57b72910309874c3245463acd6cf5e3db69324/appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", size = 9566, upload-time = "2020-05-11T07:59:49.499Z" },
12
+
]
13
+
14
+
[[package]]
15
+
name = "asttokens"
16
+
version = "3.0.1"
17
+
source = { registry = "https://pypi.org/simple" }
18
+
sdist = { url = "https://files.pythonhosted.org/packages/be/a5/8e3f9b6771b0b408517c82d97aed8f2036509bc247d46114925e32fe33f0/asttokens-3.0.1.tar.gz", hash = "sha256:71a4ee5de0bde6a31d64f6b13f2293ac190344478f081c3d1bccfcf5eacb0cb7", size = 62308, upload-time = "2025-11-15T16:43:48.578Z" }
19
+
wheels = [
20
+
{ url = "https://files.pythonhosted.org/packages/d2/39/e7eaf1799466a4aef85b6a4fe7bd175ad2b1c6345066aa33f1f58d4b18d0/asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a", size = 27047, upload-time = "2025-11-15T16:43:16.109Z" },
21
+
]
22
+
23
+
[[package]]
24
+
name = "colorama"
25
+
version = "0.4.6"
26
+
source = { registry = "https://pypi.org/simple" }
27
+
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
28
+
wheels = [
29
+
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
30
+
]
31
+
32
+
[[package]]
33
+
name = "decorator"
34
+
version = "5.2.1"
35
+
source = { registry = "https://pypi.org/simple" }
36
+
sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" }
37
+
wheels = [
38
+
{ url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" },
39
+
]
40
+
41
+
[[package]]
42
+
name = "executing"
43
+
version = "2.2.1"
44
+
source = { registry = "https://pypi.org/simple" }
45
+
sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488, upload-time = "2025-09-01T09:48:10.866Z" }
46
+
wheels = [
47
+
{ url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" },
48
+
]
49
+
50
+
[[package]]
51
+
name = "ipython"
52
+
version = "9.8.0"
53
+
source = { registry = "https://pypi.org/simple" }
54
+
dependencies = [
55
+
{ name = "colorama", marker = "sys_platform == 'win32'" },
56
+
{ name = "decorator" },
57
+
{ name = "ipython-pygments-lexers" },
58
+
{ name = "jedi" },
59
+
{ name = "matplotlib-inline" },
60
+
{ name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
61
+
{ name = "prompt-toolkit" },
62
+
{ name = "pygments" },
63
+
{ name = "stack-data" },
64
+
{ name = "traitlets" },
65
+
]
66
+
sdist = { url = "https://files.pythonhosted.org/packages/12/51/a703c030f4928646d390b4971af4938a1b10c9dfce694f0d99a0bb073cb2/ipython-9.8.0.tar.gz", hash = "sha256:8e4ce129a627eb9dd221c41b1d2cdaed4ef7c9da8c17c63f6f578fe231141f83", size = 4424940, upload-time = "2025-12-03T10:18:24.353Z" }
67
+
wheels = [
68
+
{ url = "https://files.pythonhosted.org/packages/f1/df/8ee1c5dd1e3308b5d5b2f2dfea323bb2f3827da8d654abb6642051199049/ipython-9.8.0-py3-none-any.whl", hash = "sha256:ebe6d1d58d7d988fbf23ff8ff6d8e1622cfdb194daf4b7b73b792c4ec3b85385", size = 621374, upload-time = "2025-12-03T10:18:22.335Z" },
69
+
]
70
+
71
+
[[package]]
72
+
name = "ipython-pygments-lexers"
73
+
version = "1.1.1"
74
+
source = { registry = "https://pypi.org/simple" }
75
+
dependencies = [
76
+
{ name = "pygments" },
77
+
]
78
+
sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" }
79
+
wheels = [
80
+
{ url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" },
81
+
]
82
+
83
+
[[package]]
84
+
name = "jedi"
85
+
version = "0.19.2"
86
+
source = { registry = "https://pypi.org/simple" }
87
+
dependencies = [
88
+
{ name = "parso" },
89
+
]
90
+
sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" }
91
+
wheels = [
92
+
{ url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" },
93
+
]
94
+
95
+
[[package]]
96
+
name = "junit-xml"
97
+
version = "1.9"
98
+
source = { registry = "https://pypi.org/simple" }
99
+
dependencies = [
100
+
{ name = "six" },
101
+
]
102
+
sdist = { url = "https://files.pythonhosted.org/packages/98/af/bc988c914dd1ea2bc7540ecc6a0265c2b6faccc6d9cdb82f20e2094a8229/junit-xml-1.9.tar.gz", hash = "sha256:de16a051990d4e25a3982b2dd9e89d671067548718866416faec14d9de56db9f", size = 7349, upload-time = "2023-01-24T18:42:00.836Z" }
103
+
wheels = [
104
+
{ url = "https://files.pythonhosted.org/packages/2a/93/2d896b5fd3d79b4cadd8882c06650e66d003f465c9d12c488d92853dff78/junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732", size = 7130, upload-time = "2020-02-22T20:41:37.661Z" },
105
+
]
106
+
107
+
[[package]]
108
+
name = "matplotlib-inline"
109
+
version = "0.2.1"
110
+
source = { registry = "https://pypi.org/simple" }
111
+
dependencies = [
112
+
{ name = "traitlets" },
113
+
]
114
+
sdist = { url = "https://files.pythonhosted.org/packages/c7/74/97e72a36efd4ae2bccb3463284300f8953f199b5ffbc04cbbb0ec78f74b1/matplotlib_inline-0.2.1.tar.gz", hash = "sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe", size = 8110, upload-time = "2025-10-23T09:00:22.126Z" }
115
+
wheels = [
116
+
{ url = "https://files.pythonhosted.org/packages/af/33/ee4519fa02ed11a94aef9559552f3b17bb863f2ecfe1a35dc7f548cde231/matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76", size = 9516, upload-time = "2025-10-23T09:00:20.675Z" },
117
+
]
118
+
119
+
[[package]]
120
+
name = "nixos-test-driver"
121
+
version = "0.0.0"
122
+
source = { git = "https://github.com/NixOS/nixpkgs?subdirectory=nixos%2Flib%2Ftest-driver%2Fsrc&branch=nixos-25.11#c6f52ebd45e5925c188d1a20119978aa4ffd5ef6" }
123
+
124
+
[[package]]
125
+
name = "parso"
126
+
version = "0.8.5"
127
+
source = { registry = "https://pypi.org/simple" }
128
+
sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" }
129
+
wheels = [
130
+
{ url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" },
131
+
]
132
+
133
+
[[package]]
134
+
name = "pexpect"
135
+
version = "4.9.0"
136
+
source = { registry = "https://pypi.org/simple" }
137
+
dependencies = [
138
+
{ name = "ptyprocess" },
139
+
]
140
+
sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
141
+
wheels = [
142
+
{ url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" },
143
+
]
144
+
145
+
[[package]]
146
+
name = "prompt-toolkit"
147
+
version = "3.0.52"
148
+
source = { registry = "https://pypi.org/simple" }
149
+
dependencies = [
150
+
{ name = "wcwidth" },
151
+
]
152
+
sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" }
153
+
wheels = [
154
+
{ url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" },
155
+
]
156
+
157
+
[[package]]
158
+
name = "ptpython"
159
+
version = "3.0.32"
160
+
source = { registry = "https://pypi.org/simple" }
161
+
dependencies = [
162
+
{ name = "appdirs" },
163
+
{ name = "jedi" },
164
+
{ name = "prompt-toolkit" },
165
+
{ name = "pygments" },
166
+
]
167
+
sdist = { url = "https://files.pythonhosted.org/packages/b6/8c/7e904ceeb512b4530c7ca1d918d3565d694a1fa7df337cdfc36a16347d68/ptpython-3.0.32.tar.gz", hash = "sha256:11651778236de95c582b42737294e50a66ba4a21fa01c0090ea70815af478fe0", size = 74080, upload-time = "2025-11-20T21:20:48.27Z" }
168
+
wheels = [
169
+
{ url = "https://files.pythonhosted.org/packages/4c/ac/0e35e5d7afd47ab0e2c71293ed2ad18df91a2a4a008c0ff59c2f22def377/ptpython-3.0.32-py3-none-any.whl", hash = "sha256:16435d323e5fc0a685d5f4dc5bb4494fb68ac68736689cd1247e1eda9369b616", size = 68099, upload-time = "2025-11-20T21:20:46.634Z" },
170
+
]
171
+
172
+
[[package]]
173
+
name = "ptyprocess"
174
+
version = "0.7.0"
175
+
source = { registry = "https://pypi.org/simple" }
176
+
sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" }
177
+
wheels = [
178
+
{ url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" },
179
+
]
180
+
181
+
[[package]]
182
+
name = "pure-eval"
183
+
version = "0.2.3"
184
+
source = { registry = "https://pypi.org/simple" }
185
+
sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" }
186
+
wheels = [
187
+
{ url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" },
188
+
]
189
+
190
+
[[package]]
191
+
name = "pygments"
192
+
version = "2.19.2"
193
+
source = { registry = "https://pypi.org/simple" }
194
+
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
195
+
wheels = [
196
+
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
197
+
]
198
+
199
+
[[package]]
200
+
name = "remote-pdb"
201
+
version = "2.1.0"
202
+
source = { registry = "https://pypi.org/simple" }
203
+
sdist = { url = "https://files.pythonhosted.org/packages/e4/b5/4944cac06fd9fc4a2e168313ec220aa25ed96ce83947b63eea5b4045b22d/remote-pdb-2.1.0.tar.gz", hash = "sha256:2d70c6f41e0eabf0165e8f1be58f82aa7a605aaeab8f2aefeb9ce246431091c1", size = 22295, upload-time = "2020-07-24T13:31:32.985Z" }
204
+
wheels = [
205
+
{ url = "https://files.pythonhosted.org/packages/71/c5/d208c66344bb785d800adb61aef512290d3473052b9e7697890f0547aff2/remote_pdb-2.1.0-py2.py3-none-any.whl", hash = "sha256:94f73a92ac1248cf16189211011f97096bdada8a7baac8c79372663bbb57b5d0", size = 6304, upload-time = "2020-07-24T13:31:31.535Z" },
206
+
]
207
+
208
+
[[package]]
209
+
name = "six"
210
+
version = "1.17.0"
211
+
source = { registry = "https://pypi.org/simple" }
212
+
sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
213
+
wheels = [
214
+
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
215
+
]
216
+
217
+
[[package]]
218
+
name = "stack-data"
219
+
version = "0.6.3"
220
+
source = { registry = "https://pypi.org/simple" }
221
+
dependencies = [
222
+
{ name = "asttokens" },
223
+
{ name = "executing" },
224
+
{ name = "pure-eval" },
225
+
]
226
+
sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" }
227
+
wheels = [
228
+
{ url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" },
229
+
]
230
+
231
+
[[package]]
232
+
name = "traitlets"
233
+
version = "5.14.3"
234
+
source = { registry = "https://pypi.org/simple" }
235
+
sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" }
236
+
wheels = [
237
+
{ url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" },
238
+
]
239
+
240
+
[[package]]
241
+
name = "ty"
242
+
version = "0.0.4"
243
+
source = { registry = "https://pypi.org/simple" }
244
+
sdist = { url = "https://files.pythonhosted.org/packages/48/d9/97d5808e851f790e58f8a54efb5c7b9f404640baf9e295f424846040b316/ty-0.0.4.tar.gz", hash = "sha256:2ea47a0089d74730658ec4e988c8ef476a1e9bd92df3e56709c4003c2895ff3b", size = 4780289, upload-time = "2025-12-19T00:13:53.12Z" }
245
+
wheels = [
246
+
{ url = "https://files.pythonhosted.org/packages/b1/94/b32a962243cc8a16e8dc74cf1fe75e8bb013d0e13e71bb540e2c86214b61/ty-0.0.4-py3-none-linux_armv6l.whl", hash = "sha256:5225da65a8d1defeb21ee9d74298b1b97c6cbab36e235a310c1430d9079e4b6a", size = 9762399, upload-time = "2025-12-19T00:14:11.261Z" },
247
+
{ url = "https://files.pythonhosted.org/packages/d1/d2/7c76e0c22ddfc2fcd4a3458a65f87ce074070eb1c68c07ee475cc2b6ea68/ty-0.0.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f87770d7988f470b795a2043185082fa959dbe1979a11b4bfe20f1214d37bd6e", size = 9590410, upload-time = "2025-12-19T00:13:55.759Z" },
248
+
{ url = "https://files.pythonhosted.org/packages/a5/84/de4b1fc85669faca3622071d5a3f3ec7bfb239971f368c28fae461d3398a/ty-0.0.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ecf68b8ea48674a289d733b4786aecc259242a2d9a920b3ec8583db18c67496a", size = 9131113, upload-time = "2025-12-19T00:14:08.593Z" },
249
+
{ url = "https://files.pythonhosted.org/packages/a7/ff/b5bf385b6983be56a470856bbcbac1b7e816bcd765a7e9d39ab2399e387d/ty-0.0.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efc396d76a57e527393cae4ee8faf23b93be3df9e93202f39925721a7a2bb7b8", size = 9599152, upload-time = "2025-12-19T00:13:40.484Z" },
250
+
{ url = "https://files.pythonhosted.org/packages/36/d6/9880ba106f2f20d13e6a5dca5d5ca44bfb3782936ee67ff635f89a2959c0/ty-0.0.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c893b968d2f9964a4d4db9992c9ba66b01f411b1f48dffcde08622e19cd6ab97", size = 9585368, upload-time = "2025-12-19T00:14:00.994Z" },
251
+
{ url = "https://files.pythonhosted.org/packages/3f/53/503cfc18bc4c7c4e02f89dd43debc41a6e343b41eb43df658dfb493a386d/ty-0.0.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:526c925b80d68a53c165044d2370fcfc0def1f119f7b7e483ee61d24da6fb891", size = 9998412, upload-time = "2025-12-19T00:14:18.653Z" },
252
+
{ url = "https://files.pythonhosted.org/packages/1d/bd/dd2d3e29834da5add2eda0ab5b433171ce9ce9a248c364d2e237f82073d7/ty-0.0.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:857f605a7fa366b6c6e6f38abc311d0606be513c2bee8977b5c8fd4bde1a82d5", size = 10853890, upload-time = "2025-12-19T00:13:50.891Z" },
253
+
{ url = "https://files.pythonhosted.org/packages/07/fe/28ba3be1672e6b8df46e43de66a02dc076ffba7853d391a5466421886225/ty-0.0.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4cc981aa3ebdac2c233421b1e58c80b0df6a8e6e6fa8b9e69fbdfd2f82768af", size = 10587263, upload-time = "2025-12-19T00:14:21.577Z" },
254
+
{ url = "https://files.pythonhosted.org/packages/26/9c/bb598772043f686afe5bc26cb386020709c1a0bcc164bc22ad9da2b4f55d/ty-0.0.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b03b2708b0bf67c76424a860f848aebaa4772c05529170c3761bfcaea93ec199", size = 10401204, upload-time = "2025-12-19T00:13:43.453Z" },
255
+
{ url = "https://files.pythonhosted.org/packages/ac/18/71765e9d63669bf09461c3fea84a7a63232ccb0e83b84676f07b987fc217/ty-0.0.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:469890e885544beb129c21e2f8f15321f0573d094aec13da68593c5f86389ff9", size = 10129713, upload-time = "2025-12-19T00:14:13.725Z" },
256
+
{ url = "https://files.pythonhosted.org/packages/c3/2d/c03eba570aa85e9c361de5ed36d60b9ab139e93ee91057f455ab4af48e54/ty-0.0.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:abfd928d09567e12068aeca875e920def3badf1978896f474aa4b85b552703c4", size = 9586203, upload-time = "2025-12-19T00:14:03.423Z" },
257
+
{ url = "https://files.pythonhosted.org/packages/61/f1/8c3c82a8df69bd4417c77be4f895d043db26dd47bfcc90b33dc109cd0096/ty-0.0.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:44b8e94f9d64df12eae4cf8031c5ca9a4c610b57092b26ad3d68d91bcc7af122", size = 9608230, upload-time = "2025-12-19T00:13:58.252Z" },
258
+
{ url = "https://files.pythonhosted.org/packages/51/0c/d8ba3a85c089c246ef6bd49d0f0b40bc0f9209bb819e8c02ccbea5cb4d57/ty-0.0.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9d6a439813e21a06769daf858105818c385d88018929d4a56970d4ddd5cd3df2", size = 9725125, upload-time = "2025-12-19T00:14:05.996Z" },
259
+
{ url = "https://files.pythonhosted.org/packages/4d/38/e30f64ad1e40905c766576ec70cffc69163591a5842ce14652672f6ab394/ty-0.0.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c3cfcf26cfe6c828e91d7a529cc2dda37bc3b51ba06909c9be07002a6584af52", size = 10237174, upload-time = "2025-12-19T00:14:23.858Z" },
260
+
{ url = "https://files.pythonhosted.org/packages/cb/d7/8d650aa0be8936dd3ed74e2b0655230e2904caa6077c30c16a089b523cff/ty-0.0.4-py3-none-win32.whl", hash = "sha256:58bbf70dd27af6b00dedbdebeec92d5993aa238664f96fa5c0064930f7a0d30b", size = 9188434, upload-time = "2025-12-19T00:13:45.875Z" },
261
+
{ url = "https://files.pythonhosted.org/packages/82/d7/9fc0c81cf0b0d281ac9c18bfbdb4d6bae2173503ba79e40b210ab41c2c8b/ty-0.0.4-py3-none-win_amd64.whl", hash = "sha256:7c2db0f96218f08c140bd9d3fcbb1b3c8c5c4f0c9b0a5624487f0a2bf4b76163", size = 10019313, upload-time = "2025-12-19T00:14:15.968Z" },
262
+
{ url = "https://files.pythonhosted.org/packages/5f/b8/3e3246738eed1cd695c5964a401f3b9c757d20ac21fdae06281af9f40ef6/ty-0.0.4-py3-none-win_arm64.whl", hash = "sha256:69f14fc98e4a847afa9f8c5d5234d008820dbc09c7dcdb3ac1ba16628f5132df", size = 9561857, upload-time = "2025-12-19T00:13:48.382Z" },
263
+
]
264
+
265
+
[[package]]
266
+
name = "wcwidth"
267
+
version = "0.2.14"
268
+
source = { registry = "https://pypi.org/simple" }
269
+
sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" }
270
+
wheels = [
271
+
{ url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" },
272
+
]
273
+
274
+
[[package]]
275
+
name = "wire-vm-tests"
276
+
version = "0.0.0"
277
+
source = { virtual = "." }
278
+
dependencies = [
279
+
{ name = "colorama" },
280
+
{ name = "ipython" },
281
+
{ name = "junit-xml" },
282
+
{ name = "nixos-test-driver" },
283
+
{ name = "ptpython" },
284
+
{ name = "remote-pdb" },
285
+
]
286
+
287
+
[package.dev-dependencies]
288
+
dev = [
289
+
{ name = "ty" },
290
+
]
291
+
292
+
[package.metadata]
293
+
requires-dist = [
294
+
{ name = "colorama", specifier = ">=0.4.6" },
295
+
{ name = "ipython", specifier = ">=9.8.0" },
296
+
{ name = "junit-xml", specifier = ">=1.9" },
297
+
{ name = "nixos-test-driver", git = "https://github.com/NixOS/nixpkgs?subdirectory=nixos%2Flib%2Ftest-driver%2Fsrc&branch=nixos-25.11" },
298
+
{ name = "ptpython", specifier = ">=3.0.32" },
299
+
{ name = "remote-pdb", specifier = ">=2.1.0" },
300
+
]
301
+
302
+
[package.metadata.requires-dev]
303
+
dev = [{ name = "ty", specifier = ">=0.0.4" }]
+9
-1
tests/rust/_keys_should_fail/hive.nix
+9
-1
tests/rust/_keys_should_fail/hive.nix
···
1
-
{
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
let
5
+
inherit (import ../../..) makeHive;
6
+
in
7
+
makeHive {
2
8
meta = {
3
9
nixpkgs = <nixpkgs>;
4
10
};
···
10
16
source = "hi";
11
17
}
12
18
];
19
+
20
+
nixpkgs.hostPlatform = "x86_64-linux";
13
21
};
14
22
}
+7
-1
tests/rust/default_values_match/hive.nix
+7
-1
tests/rust/default_values_match/hive.nix
-1
tests/rust/flake_hive/flake.lock
-1
tests/rust/flake_hive/flake.lock
···
1
-
../../../flake.lock
+11
-2
tests/rust/flake_hive/flake.nix
+11
-2
tests/rust/flake_hive/flake.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{
2
5
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
3
6
4
7
outputs =
5
-
{ nixpkgs, ... }@inputs:
8
+
{ nixpkgs, self, ... }@inputs:
9
+
let
10
+
makeHive = import ./makeHive.nix;
11
+
in
6
12
{
7
-
colmena = {
13
+
wire = makeHive {
14
+
inherit (self) nixosConfigurations;
15
+
meta.nixpkgs = import nixpkgs { localSystem = "x86_64-linux"; };
16
+
8
17
node-a = { };
9
18
node-b = {
10
19
nixpkgs.hostPlatform = "x86_64-linux";
+7
-1
tests/rust/no_nixpkgs/hive.nix
+7
-1
tests/rust/no_nixpkgs/hive.nix
+9
-1
tests/rust/non_trivial_hive/hive.nix
+9
-1
tests/rust/non_trivial_hive/hive.nix
···
1
-
{
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
let
5
+
inherit (import ../../..) makeHive;
6
+
in
7
+
makeHive {
2
8
meta = {
3
9
nixpkgs = <nixpkgs>;
4
10
};
···
28
34
name = "different-than-a";
29
35
source = "hi";
30
36
};
37
+
38
+
deployment.buildOnTarget = true;
31
39
32
40
nixpkgs.hostPlatform = "x86_64-linux";
33
41
};
+8
-1
tests/rust/test_hive_dot_nix_priority/hive.nix
+8
-1
tests/rust/test_hive_dot_nix_priority/hive.nix
+7
-1
tests/rust/test_hive_file/hive.nix
+7
-1
tests/rust/test_hive_file/hive.nix
+24
typos.toml
+24
typos.toml
···
1
+
[files]
2
+
extend-exclude = ["COPYING"]
3
+
4
+
[default]
5
+
locale = "en-au"
6
+
7
+
[type.nix]
8
+
# nixpkgs
9
+
extend-ignore-re = ["authorizedKeys", "sanitizeDerivationName"]
10
+
11
+
[type.md]
12
+
extend-ignore-re = ["authorizedKeys", "Initialized empty Git"]
13
+
14
+
[type.rust.extend-words]
15
+
# serde
16
+
serialize = "serialize"
17
+
18
+
[type.yaml.extend-words]
19
+
# github
20
+
labeler = "labeler"
21
+
22
+
[type.ts.extend-words]
23
+
# vite
24
+
optimize = "optimize"
-29
wire/cli/Cargo.toml
-29
wire/cli/Cargo.toml
···
1
-
[package]
2
-
name = "wire"
3
-
version.workspace = true
4
-
edition.workspace = true
5
-
6
-
[features]
7
-
dhat-heap = []
8
-
9
-
[dependencies]
10
-
clap = { workspace = true }
11
-
clap-verbosity-flag = { workspace = true }
12
-
serde = { workspace = true }
13
-
tokio = { workspace = true }
14
-
tracing = { workspace = true }
15
-
tracing-indicatif = { workspace = true }
16
-
tracing-log = { workspace = true }
17
-
tracing-subscriber = { workspace = true }
18
-
lib = { path = "../lib" }
19
-
serde_json = { workspace = true }
20
-
anyhow = { workspace = true }
21
-
indicatif = "0.17.8"
22
-
enum-display-derive = "0.1.1"
23
-
im = { workspace = true }
24
-
futures = "0.3.31"
25
-
clap-num = "1.2.0"
26
-
clap-markdown = "0.1.5"
27
-
itertools = "0.14.0"
28
-
dhat = "0.3.2"
29
-
clap_complete = "4.5.54"
-64
wire/cli/default.nix
-64
wire/cli/default.nix
···
1
-
{ getSystem, inputs, ... }:
2
-
{
3
-
perSystem =
4
-
{
5
-
pkgs,
6
-
lib,
7
-
self',
8
-
buildRustProgram,
9
-
system,
10
-
...
11
-
}:
12
-
let
13
-
postBuild = ''
14
-
wrapProgram $out/bin/wire \
15
-
--set WIRE_RUNTIME ${../../runtime} \
16
-
'';
17
-
cleanSystem = system: lib.replaceStrings [ "-" ] [ "_" ] system;
18
-
agents = lib.strings.concatMapStrings (
19
-
system: "--set WIRE_KEY_AGENT_${cleanSystem system} ${(getSystem system).packages.agent} "
20
-
) (import inputs.linux-systems);
21
-
in
22
-
{
23
-
packages = {
24
-
default = self'.packages.wire;
25
-
wire-unwrapped = buildRustProgram {
26
-
name = "wire";
27
-
pname = "wire";
28
-
cargoExtraArgs = "-p wire";
29
-
doCheck = true;
30
-
nativeBuildInputs = [ pkgs.installShellFiles ];
31
-
postInstall = ''
32
-
installShellCompletion --cmd wire \
33
-
--bash <($out/bin/wire completions bash) \
34
-
--fish <($out/bin/wire completions fish) \
35
-
--zsh <($out/bin/wire completions zsh)
36
-
'';
37
-
};
38
-
39
-
wire = pkgs.symlinkJoin {
40
-
name = "wire";
41
-
paths = [ self'.packages.wire-unwrapped ];
42
-
nativeBuildInputs = [
43
-
pkgs.makeWrapper
44
-
];
45
-
postBuild = ''
46
-
${postBuild} ${agents}
47
-
'';
48
-
meta.mainProgram = "wire";
49
-
};
50
-
51
-
wire-small = pkgs.symlinkJoin {
52
-
name = "wire";
53
-
paths = [ self'.packages.wire-unwrapped ];
54
-
nativeBuildInputs = [
55
-
pkgs.makeWrapper
56
-
];
57
-
postBuild = ''
58
-
${postBuild} --set WIRE_KEY_AGENT_${cleanSystem system} ${self'.packages.agent}
59
-
'';
60
-
meta.mainProgram = "wire";
61
-
};
62
-
};
63
-
};
64
-
}
-80
wire/cli/src/apply.rs
-80
wire/cli/src/apply.rs
···
1
-
use futures::StreamExt;
2
-
use indicatif::ProgressStyle;
3
-
use itertools::Itertools;
4
-
use lib::hive::Hive;
5
-
use lib::hive::node::{Context, Goal, GoalExecutor, StepState};
6
-
use lib::{HiveLibError, SubCommandModifiers};
7
-
use std::collections::HashSet;
8
-
use tracing::{Span, error, info, instrument};
9
-
use tracing_indicatif::span_ext::IndicatifSpanExt;
10
-
11
-
use crate::cli::ApplyTarget;
12
-
13
-
#[instrument(skip_all, fields(goal = %goal, on = %on.iter().join(", ")))]
14
-
pub async fn apply(
15
-
hive: &mut Hive,
16
-
goal: Goal,
17
-
on: Vec<ApplyTarget>,
18
-
parallel: usize,
19
-
no_keys: bool,
20
-
always_build_local: Vec<String>,
21
-
modifiers: SubCommandModifiers,
22
-
) -> Result<(), HiveLibError> {
23
-
let header_span = Span::current();
24
-
header_span.pb_set_style(&ProgressStyle::default_bar());
25
-
header_span.pb_set_length(1);
26
-
27
-
// Respect user's --always-build-local arg
28
-
hive.force_always_local(always_build_local)?;
29
-
30
-
let header_span_enter = header_span.enter();
31
-
32
-
let (tags, names) = on.iter().fold(
33
-
(HashSet::new(), HashSet::new()),
34
-
|(mut tags, mut names), target| {
35
-
match target {
36
-
ApplyTarget::Tag(tag) => tags.insert(tag.clone()),
37
-
ApplyTarget::Node(name) => names.insert(name.clone()),
38
-
};
39
-
(tags, names)
40
-
},
41
-
);
42
-
43
-
let mut set = hive
44
-
.nodes
45
-
.iter()
46
-
.filter(|(name, node)| {
47
-
on.is_empty() || names.contains(name) || node.tags.iter().any(|tag| tags.contains(tag))
48
-
})
49
-
.map(|node| {
50
-
let path = hive.path.clone();
51
-
let span = header_span.clone();
52
-
53
-
info!("Resolved {on:?} to include {}", node.0);
54
-
55
-
let context = Context {
56
-
node: node.1,
57
-
name: node.0,
58
-
goal,
59
-
state: StepState::default(),
60
-
no_keys,
61
-
hivepath: path,
62
-
modifiers,
63
-
};
64
-
65
-
GoalExecutor::new(context).execute(span)
66
-
})
67
-
.peekable();
68
-
69
-
if set.peek().is_none() {
70
-
error!("There are no nodes selected for deployment");
71
-
}
72
-
73
-
let futures = futures::stream::iter(set).buffer_unordered(parallel);
74
-
let result: Result<(), _> = futures.collect::<Vec<_>>().await.into_iter().collect();
75
-
76
-
std::mem::drop(header_span_enter);
77
-
std::mem::drop(header_span);
78
-
79
-
result
80
-
}
-168
wire/cli/src/cli.rs
-168
wire/cli/src/cli.rs
···
1
-
use clap::{Parser, Subcommand, ValueEnum};
2
-
use clap_complete::Shell;
3
-
use clap_num::number_range;
4
-
use clap_verbosity_flag::WarnLevel;
5
-
use lib::SubCommandModifiers;
6
-
use lib::hive::node::{Goal as HiveGoal, Name, SwitchToConfigurationGoal};
7
-
use std::io::IsTerminal;
8
-
9
-
use std::{
10
-
fmt::{self, Display, Formatter},
11
-
sync::Arc,
12
-
};
13
-
14
-
#[derive(Parser)]
15
-
#[command(
16
-
name = "wire",
17
-
bin_name = "wire",
18
-
about = "a tool to deploy nixos systems",
19
-
version
20
-
)]
21
-
pub struct Cli {
22
-
#[command(subcommand)]
23
-
pub command: Commands,
24
-
25
-
#[command(flatten)]
26
-
pub verbose: clap_verbosity_flag::Verbosity<WarnLevel>,
27
-
28
-
/// Path to directory containing hive
29
-
#[arg(long, global = true, default_value = std::env::current_dir().unwrap().into_os_string())]
30
-
pub path: std::path::PathBuf,
31
-
32
-
/// Hide progress bars. Defaults to true if stdin does not refer to a tty (unix pipelines, in CI).
33
-
#[arg(long, global = true, default_value_t = !std::io::stdin().is_terminal())]
34
-
pub no_progress: bool,
35
-
36
-
/// Show trace logs
37
-
#[arg(long, global = true, default_value_t = false)]
38
-
pub show_trace: bool,
39
-
40
-
#[arg(long, hide = true, global = true)]
41
-
pub markdown_help: bool,
42
-
}
43
-
44
-
#[derive(Clone, Debug)]
45
-
pub enum ApplyTarget {
46
-
Node(Name),
47
-
Tag(String),
48
-
}
49
-
50
-
impl From<String> for ApplyTarget {
51
-
fn from(value: String) -> Self {
52
-
if let Some(stripped) = value.strip_prefix("@") {
53
-
ApplyTarget::Tag(stripped.to_string())
54
-
} else {
55
-
ApplyTarget::Node(Name(Arc::from(value.as_str())))
56
-
}
57
-
}
58
-
}
59
-
60
-
impl Display for ApplyTarget {
61
-
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
62
-
match self {
63
-
ApplyTarget::Node(name) => name.fmt(f),
64
-
ApplyTarget::Tag(tag) => write!(f, "@{tag}"),
65
-
}
66
-
}
67
-
}
68
-
69
-
fn more_than_zero(s: &str) -> Result<usize, String> {
70
-
number_range(s, 1, usize::MAX)
71
-
}
72
-
73
-
#[derive(Subcommand)]
74
-
pub enum Commands {
75
-
/// Deploy nodes
76
-
Apply {
77
-
#[arg(value_enum, default_value_t)]
78
-
goal: Goal,
79
-
80
-
/// List of literal node names or `@` prefixed tags.
81
-
#[arg(short, long, value_name = "NODE | @TAG", num_args = 1..)]
82
-
on: Vec<ApplyTarget>,
83
-
84
-
#[arg(short, long, default_value_t = 10, value_parser=more_than_zero)]
85
-
parallel: usize,
86
-
87
-
/// Skip key uploads. noop when [GOAL] = Keys
88
-
#[arg(short, long, default_value_t = false)]
89
-
no_keys: bool,
90
-
91
-
/// Overrides deployment.buildOnTarget.
92
-
#[arg(short, long, value_name = "NODE")]
93
-
always_build_local: Vec<String>,
94
-
},
95
-
/// Inspect hive
96
-
#[clap(visible_alias = "show")]
97
-
Inspect {
98
-
/// Include liveliness
99
-
#[arg(short, long, default_value_t = false)]
100
-
online: bool,
101
-
102
-
/// Return in JSON format
103
-
#[arg(short, long, default_value_t = false)]
104
-
json: bool,
105
-
},
106
-
/// Generates shell completions
107
-
#[clap(hide = true)]
108
-
Completions {
109
-
#[arg()]
110
-
// Shell to generate completions for
111
-
shell: Shell,
112
-
},
113
-
}
114
-
115
-
#[derive(Clone, Debug, Default, ValueEnum, Display)]
116
-
pub enum Goal {
117
-
/// Make the configuration the boot default and activate now
118
-
#[default]
119
-
Switch,
120
-
/// Build the configuration but do nothing with it
121
-
Build,
122
-
/// Copy system derivation to remote hosts
123
-
Push,
124
-
/// Push deployment keys to remote hosts
125
-
Keys,
126
-
/// Activate system profile on next boot
127
-
Boot,
128
-
/// Activate the configuration, but don't make it the boot default
129
-
Test,
130
-
/// Show what would be done if this configuration were activated.
131
-
DryActivate,
132
-
}
133
-
134
-
impl TryFrom<Goal> for HiveGoal {
135
-
type Error = anyhow::Error;
136
-
137
-
fn try_from(value: Goal) -> Result<Self, Self::Error> {
138
-
match value {
139
-
Goal::Build => Ok(HiveGoal::Build),
140
-
Goal::Push => Ok(HiveGoal::Push),
141
-
Goal::Boot => Ok(HiveGoal::SwitchToConfiguration(
142
-
SwitchToConfigurationGoal::Boot,
143
-
)),
144
-
Goal::Switch => Ok(HiveGoal::SwitchToConfiguration(
145
-
SwitchToConfigurationGoal::Switch,
146
-
)),
147
-
Goal::Test => Ok(HiveGoal::SwitchToConfiguration(
148
-
SwitchToConfigurationGoal::Test,
149
-
)),
150
-
Goal::DryActivate => Ok(HiveGoal::SwitchToConfiguration(
151
-
SwitchToConfigurationGoal::DryActivate,
152
-
)),
153
-
Goal::Keys => Ok(HiveGoal::Keys),
154
-
}
155
-
}
156
-
}
157
-
158
-
pub trait ToSubCommandModifiers {
159
-
fn to_subcommand_modifiers(&self) -> SubCommandModifiers;
160
-
}
161
-
162
-
impl ToSubCommandModifiers for Cli {
163
-
fn to_subcommand_modifiers(&self) -> SubCommandModifiers {
164
-
SubCommandModifiers {
165
-
show_trace: self.show_trace,
166
-
}
167
-
}
168
-
}
-106
wire/cli/src/main.rs
-106
wire/cli/src/main.rs
···
1
-
#![deny(clippy::pedantic)]
2
-
#![allow(clippy::missing_panics_doc)]
3
-
use crate::cli::Cli;
4
-
use crate::cli::ToSubCommandModifiers;
5
-
use anyhow::Ok;
6
-
use clap::CommandFactory;
7
-
use clap::Parser;
8
-
use clap_complete::generate;
9
-
use clap_verbosity_flag::{Verbosity, WarnLevel};
10
-
use indicatif::style::ProgressStyle;
11
-
use lib::hive::Hive;
12
-
use tracing::warn;
13
-
use tracing_indicatif::IndicatifLayer;
14
-
use tracing_log::AsTrace;
15
-
use tracing_subscriber::layer::SubscriberExt;
16
-
use tracing_subscriber::util::SubscriberInitExt;
17
-
use tracing_subscriber::{Layer, Registry};
18
-
19
-
#[macro_use]
20
-
extern crate enum_display_derive;
21
-
22
-
mod apply;
23
-
mod cli;
24
-
25
-
#[cfg(feature = "dhat-heap")]
26
-
#[global_allocator]
27
-
static ALLOC: dhat::Alloc = dhat::Alloc;
28
-
29
-
#[tokio::main]
30
-
async fn main() -> Result<(), anyhow::Error> {
31
-
#[cfg(feature = "dhat-heap")]
32
-
let _profiler = dhat::Profiler::new_heap();
33
-
34
-
let args = Cli::parse();
35
-
36
-
let modifiers = args.to_subcommand_modifiers();
37
-
setup_logging(args.no_progress, &args.verbose);
38
-
39
-
if args.markdown_help {
40
-
clap_markdown::print_help_markdown::<Cli>();
41
-
return Ok(());
42
-
}
43
-
44
-
match args.command {
45
-
cli::Commands::Apply {
46
-
goal,
47
-
on,
48
-
parallel,
49
-
no_keys,
50
-
always_build_local,
51
-
} => {
52
-
let mut hive = Hive::new_from_path(args.path.as_path(), modifiers).await?;
53
-
apply::apply(
54
-
&mut hive,
55
-
goal.try_into()?,
56
-
on,
57
-
parallel,
58
-
no_keys,
59
-
always_build_local,
60
-
modifiers,
61
-
)
62
-
.await?;
63
-
}
64
-
cli::Commands::Inspect { online: _, json } => println!("{}", {
65
-
let hive = Hive::new_from_path(args.path.as_path(), modifiers).await?;
66
-
if json {
67
-
serde_json::to_string_pretty(&hive)?
68
-
} else {
69
-
warn!("use --json to output something scripting suitable");
70
-
format!("{hive:#?}")
71
-
}
72
-
}),
73
-
cli::Commands::Completions { shell } => {
74
-
let mut cmd = Cli::command();
75
-
let name = cmd.clone();
76
-
generate(shell, &mut cmd, name.get_name(), &mut std::io::stdout());
77
-
}
78
-
}
79
-
80
-
Ok(())
81
-
}
82
-
83
-
pub fn setup_logging(no_progress: bool, verbosity: &Verbosity<WarnLevel>) {
84
-
let layer = tracing_subscriber::fmt::layer::<Registry>().without_time();
85
-
let filter = verbosity.log_level_filter().as_trace();
86
-
let registry = tracing_subscriber::registry();
87
-
88
-
if no_progress {
89
-
let layer = layer.with_filter(filter);
90
-
91
-
registry.with(layer).init();
92
-
} else {
93
-
let indicatif_layer = IndicatifLayer::new().with_progress_style(
94
-
ProgressStyle::with_template(
95
-
"{span_child_prefix}[{spinner}] {span_name}{{{span_fields}}} {wide_msg}",
96
-
)
97
-
.expect("Failed to create progress style"),
98
-
);
99
-
100
-
let layer = layer
101
-
.with_writer(indicatif_layer.get_stderr_writer())
102
-
.with_filter(filter);
103
-
104
-
registry.with(layer).with(indicatif_layer).init();
105
-
}
106
-
}
-13
wire/key_agent/Cargo.toml
-13
wire/key_agent/Cargo.toml
···
1
-
[package]
2
-
name = "key_agent"
3
-
edition.workspace = true
4
-
version.workspace = true
5
-
6
-
[dependencies]
7
-
tokio = { workspace = true }
8
-
anyhow = { workspace = true }
9
-
prost = { workspace = true }
10
-
nix = { workspace = true }
11
-
12
-
[build-dependencies]
13
-
prost-build = "0.14"
-5
wire/key_agent/build.rs
-5
wire/key_agent/build.rs
-17
wire/key_agent/default.nix
-17
wire/key_agent/default.nix
-15
wire/key_agent/src/keys.proto
-15
wire/key_agent/src/keys.proto
-3
wire/key_agent/src/lib.rs
-3
wire/key_agent/src/lib.rs
-70
wire/key_agent/src/main.rs
-70
wire/key_agent/src/main.rs
···
1
-
#![deny(clippy::pedantic)]
2
-
use nix::unistd::{Group, User};
3
-
use prost::Message;
4
-
use std::env;
5
-
use std::os::unix::fs::PermissionsExt;
6
-
use std::path::{Path, PathBuf};
7
-
use std::{
8
-
io::{Cursor, Read},
9
-
os::unix::fs::chown,
10
-
};
11
-
use tokio::fs::File;
12
-
use tokio::io::AsyncWriteExt;
13
-
14
-
use key_agent::keys::Keys;
15
-
16
-
fn create_path(key_path: &Path) -> Result<(), anyhow::Error> {
17
-
let prefix = key_path.parent().unwrap();
18
-
std::fs::create_dir_all(prefix)?;
19
-
20
-
Ok(())
21
-
}
22
-
23
-
#[tokio::main]
24
-
async fn main() -> Result<(), anyhow::Error> {
25
-
let mut stdin = std::io::stdin();
26
-
let length: usize = env::args().nth(1).expect("failed to grab arg").parse()?;
27
-
let mut msg_buf = vec![0u8; length];
28
-
29
-
stdin.read_exact(&mut msg_buf)?;
30
-
31
-
let msg = Keys::decode(&mut Cursor::new(&msg_buf))?;
32
-
33
-
println!("{msg:?}");
34
-
35
-
for key in msg.keys {
36
-
let path = PathBuf::from(&key.destination);
37
-
create_path(&path)?;
38
-
39
-
let mut file = File::create(path).await?;
40
-
let mut permissions = file.metadata().await?.permissions();
41
-
42
-
permissions.set_mode(key.permissions);
43
-
file.set_permissions(permissions).await?;
44
-
45
-
let user = User::from_name(&key.user)?;
46
-
let group = Group::from_name(&key.group)?;
47
-
48
-
chown(
49
-
key.destination,
50
-
// Default uid/gid to 0. This is then wrapped around an Option again for
51
-
// the function.
52
-
Some(user.map_or(0, |user| user.uid.into())),
53
-
Some(group.map_or(0, |group| group.gid.into())),
54
-
)?;
55
-
56
-
let mut file_buf = vec![
57
-
0u8;
58
-
key.length
59
-
.try_into()
60
-
.expect("failed to convert size to usize")
61
-
];
62
-
63
-
stdin.read_exact(&mut file_buf)?;
64
-
file.write_all(&file_buf).await?;
65
-
66
-
println!("Wrote to {file:?}");
67
-
}
68
-
69
-
Ok(())
70
-
}
-30
wire/lib/Cargo.toml
-30
wire/lib/Cargo.toml
···
1
-
[package]
2
-
name = "lib"
3
-
version.workspace = true
4
-
edition.workspace = true
5
-
6
-
[features]
7
-
no_web_tests = []
8
-
9
-
[dependencies]
10
-
tokio = { workspace = true }
11
-
serde = { workspace = true }
12
-
serde_json = { workspace = true }
13
-
tracing = { workspace = true }
14
-
tracing-indicatif = { workspace = true }
15
-
tracing-subscriber = { workspace = true }
16
-
im = { workspace = true }
17
-
serde-query = "0.2.0"
18
-
thiserror = "2.0.12"
19
-
serde_repr = "0.1.20"
20
-
regex = "1.11.1"
21
-
derive_more = { version = "2.0.1", features = ["display"] }
22
-
key_agent = { path = "../key_agent" }
23
-
futures = "0.3.31"
24
-
prost = { workspace = true }
25
-
gethostname = "1.0.2"
26
-
async-trait = "0.1.88"
27
-
nix = { workspace = true }
28
-
29
-
[dev-dependencies]
30
-
tempdir = "0.3"
-227
wire/lib/src/hive/mod.rs
-227
wire/lib/src/hive/mod.rs
···
1
-
use node::{Name, Node};
2
-
use std::collections::HashMap;
3
-
use std::collections::hash_map::OccupiedEntry;
4
-
use std::path::{Path, PathBuf};
5
-
use std::sync::Arc;
6
-
use tracing::{debug, error, info, instrument, trace};
7
-
8
-
use serde::{Deserialize, Serialize};
9
-
10
-
use crate::nix::{EvalGoal, get_eval_command};
11
-
use crate::{HiveLibError, SubCommandModifiers};
12
-
pub mod node;
13
-
pub mod steps;
14
-
15
-
#[derive(Serialize, Deserialize, Debug, PartialEq)]
16
-
pub struct Hive {
17
-
pub nodes: HashMap<Name, Node>,
18
-
pub path: PathBuf,
19
-
}
20
-
21
-
pub enum Action<'a> {
22
-
Inspect,
23
-
EvaluateNode(OccupiedEntry<'a, String, Node>),
24
-
}
25
-
26
-
impl Hive {
27
-
#[instrument]
28
-
pub async fn new_from_path(
29
-
path: &Path,
30
-
modifiers: SubCommandModifiers,
31
-
) -> Result<Hive, HiveLibError> {
32
-
info!("Searching upwards for hive in {}", path.display());
33
-
let filepath = find_hive(path).ok_or(HiveLibError::NoHiveFound(path.to_path_buf()))?;
34
-
info!("Using hive {}", filepath.display());
35
-
36
-
let command = get_eval_command(&filepath, &EvalGoal::Inspect, modifiers)
37
-
.output()
38
-
.await
39
-
.map_err(HiveLibError::NixExecError)?;
40
-
41
-
let stdout = String::from_utf8_lossy(&command.stdout);
42
-
let stderr = String::from_utf8_lossy(&command.stderr);
43
-
44
-
debug!("Output of nix eval: {stdout}");
45
-
46
-
if command.status.success() {
47
-
let hive: Hive =
48
-
serde_json::from_str(&stdout).map_err(HiveLibError::ParseEvaluateError)?;
49
-
50
-
return Ok(hive);
51
-
}
52
-
53
-
Err(HiveLibError::NixEvalError(
54
-
stderr
55
-
.split('\n')
56
-
.map(std::string::ToString::to_string)
57
-
.collect(),
58
-
))
59
-
}
60
-
61
-
/// # Errors
62
-
///
63
-
/// Returns an error if a node in nodes does not exist in the hive.
64
-
pub fn force_always_local(&mut self, nodes: Vec<String>) -> Result<(), HiveLibError> {
65
-
for node in nodes {
66
-
info!("Forcing a local build for {node}");
67
-
68
-
self.nodes
69
-
.get_mut(&Name(Arc::from(node.clone())))
70
-
.ok_or(HiveLibError::NodeDoesNotExist(node.to_string()))?
71
-
.build_remotely = false;
72
-
}
73
-
74
-
Ok(())
75
-
}
76
-
}
77
-
78
-
fn find_hive(path: &Path) -> Option<PathBuf> {
79
-
trace!("Searching for hive in {}", path.display());
80
-
let filepath_flake = path.join("flake.nix");
81
-
82
-
if filepath_flake.is_file() {
83
-
return Some(filepath_flake);
84
-
}
85
-
let filepath_hive = path.join("hive.nix");
86
-
87
-
if filepath_hive.is_file() {
88
-
return Some(filepath_hive);
89
-
}
90
-
91
-
if let Some(parent) = path.parent() {
92
-
return find_hive(parent);
93
-
}
94
-
95
-
error!("No hive found");
96
-
None
97
-
}
98
-
99
-
#[cfg(test)]
100
-
mod tests {
101
-
use im::vector;
102
-
103
-
use crate::{
104
-
get_test_path,
105
-
hive::steps::keys::{Key, Source, UploadKeyAt},
106
-
test_support::make_flake_sandbox,
107
-
};
108
-
109
-
use super::*;
110
-
use std::env;
111
-
112
-
#[test]
113
-
fn test_hive_dot_nix_priority() {
114
-
let path = get_test_path!();
115
-
116
-
let hive = find_hive(&path).unwrap();
117
-
118
-
assert!(hive.ends_with("flake.nix"));
119
-
}
120
-
121
-
#[tokio::test]
122
-
#[cfg_attr(feature = "no_web_tests", ignore)]
123
-
async fn test_hive_file() {
124
-
let mut path = get_test_path!();
125
-
126
-
let hive = Hive::new_from_path(&path, SubCommandModifiers::default())
127
-
.await
128
-
.unwrap();
129
-
130
-
let node = Node {
131
-
target: node::Target {
132
-
host: "192.168.122.96".into(),
133
-
user: "root".into(),
134
-
port: 22,
135
-
},
136
-
..Default::default()
137
-
};
138
-
139
-
let mut nodes = HashMap::new();
140
-
nodes.insert(Name("node-a".into()), node);
141
-
142
-
path.push("hive.nix");
143
-
144
-
assert_eq!(hive, Hive { nodes, path });
145
-
}
146
-
147
-
#[tokio::test]
148
-
#[cfg_attr(feature = "no_web_tests", ignore)]
149
-
async fn non_trivial_hive() {
150
-
let mut path = get_test_path!();
151
-
152
-
let hive = Hive::new_from_path(&path, SubCommandModifiers::default())
153
-
.await
154
-
.unwrap();
155
-
156
-
let node = Node {
157
-
target: node::Target {
158
-
host: "name".into(),
159
-
user: "root".into(),
160
-
port: 22,
161
-
},
162
-
keys: vector![Key {
163
-
name: "different-than-a".into(),
164
-
dest_dir: "/run/keys/".into(),
165
-
path: "/run/keys/different-than-a".into(),
166
-
group: "root".into(),
167
-
user: "root".into(),
168
-
permissions: "0600".into(),
169
-
source: Source::String("hi".into()),
170
-
upload_at: UploadKeyAt::PreActivation,
171
-
}],
172
-
..Default::default()
173
-
};
174
-
175
-
let mut nodes = HashMap::new();
176
-
nodes.insert(Name("node-a".into()), node);
177
-
178
-
path.push("hive.nix");
179
-
180
-
assert_eq!(hive, Hive { nodes, path });
181
-
}
182
-
183
-
#[tokio::test]
184
-
#[cfg_attr(feature = "no_web_tests", ignore)]
185
-
async fn flake_hive() {
186
-
let tmp_dir = make_flake_sandbox(&get_test_path!()).unwrap();
187
-
188
-
let hive = Hive::new_from_path(tmp_dir.path(), SubCommandModifiers::default())
189
-
.await
190
-
.unwrap();
191
-
192
-
let mut nodes = HashMap::new();
193
-
194
-
// a merged node
195
-
nodes.insert(Name("node-a".into()), Node::from_host("node-a"));
196
-
// a non-merged node
197
-
nodes.insert(Name("node-b".into()), Node::from_host("node-b"));
198
-
// omit a node called system-c
199
-
200
-
let mut path = tmp_dir.path().to_path_buf();
201
-
path.push("flake.nix");
202
-
203
-
assert_eq!(hive, Hive { nodes, path });
204
-
205
-
tmp_dir.close().unwrap();
206
-
}
207
-
208
-
#[tokio::test]
209
-
async fn no_nixpkgs() {
210
-
let path = get_test_path!();
211
-
212
-
assert!(matches!(
213
-
Hive::new_from_path(&path, SubCommandModifiers::default()).await,
214
-
Err(HiveLibError::NixEvalError(..))
215
-
));
216
-
}
217
-
218
-
#[tokio::test]
219
-
async fn _keys_should_fail() {
220
-
let path = get_test_path!();
221
-
222
-
assert!(matches!(
223
-
Hive::new_from_path(&path, SubCommandModifiers::default()).await,
224
-
Err(HiveLibError::NixEvalError(..))
225
-
));
226
-
}
227
-
}
-262
wire/lib/src/hive/node.rs
-262
wire/lib/src/hive/node.rs
···
1
-
#![allow(clippy::missing_errors_doc)]
2
-
use async_trait::async_trait;
3
-
use gethostname::gethostname;
4
-
use serde::{Deserialize, Serialize};
5
-
use std::fmt::Display;
6
-
use std::path::PathBuf;
7
-
use std::sync::Arc;
8
-
use tokio::process::Command;
9
-
use tracing::{Instrument, Span, error, info, instrument, trace};
10
-
use tracing_indicatif::span_ext::IndicatifSpanExt;
11
-
12
-
use crate::SubCommandModifiers;
13
-
use crate::hive::steps::keys::{Key, KeysStep, PushKeyAgentStep, UploadKeyAt};
14
-
use crate::nix::StreamTracing;
15
-
16
-
use super::HiveLibError;
17
-
use super::steps::activate::SwitchToConfigurationStep;
18
-
19
-
#[derive(Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq, derive_more::Display)]
20
-
pub struct Name(pub Arc<str>);
21
-
22
-
#[derive(Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq)]
23
-
pub struct Target {
24
-
#[serde(rename = "host")]
25
-
pub host: Arc<str>,
26
-
27
-
#[serde(rename = "user")]
28
-
pub user: Arc<str>,
29
-
30
-
#[serde(rename = "port")]
31
-
pub port: u32,
32
-
}
33
-
34
-
#[cfg(test)]
35
-
impl Default for Target {
36
-
fn default() -> Self {
37
-
Target {
38
-
host: "NAME".into(),
39
-
user: "root".into(),
40
-
port: 22,
41
-
}
42
-
}
43
-
}
44
-
45
-
#[cfg(test)]
46
-
impl Target {
47
-
fn from_host(host: &str) -> Self {
48
-
Target {
49
-
host: host.into(),
50
-
..Default::default()
51
-
}
52
-
}
53
-
}
54
-
55
-
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
56
-
pub struct Node {
57
-
#[serde(rename = "target")]
58
-
pub target: Target,
59
-
60
-
#[serde(rename = "buildOnTarget")]
61
-
pub build_remotely: bool,
62
-
63
-
#[serde(rename = "allowLocalDeployment")]
64
-
pub allow_local_deployment: bool,
65
-
66
-
#[serde(default)]
67
-
pub tags: im::HashSet<String>,
68
-
69
-
#[serde(rename(deserialize = "_keys", serialize = "keys"))]
70
-
pub keys: im::Vector<Key>,
71
-
72
-
#[serde(rename(deserialize = "_hostPlatform", serialize = "host_platform"))]
73
-
pub host_platform: Arc<str>,
74
-
}
75
-
76
-
#[cfg(test)]
77
-
impl Default for Node {
78
-
fn default() -> Self {
79
-
Node {
80
-
target: Target::default(),
81
-
keys: im::Vector::new(),
82
-
tags: im::HashSet::new(),
83
-
allow_local_deployment: true,
84
-
build_remotely: false,
85
-
host_platform: "x86_64-linux".into(),
86
-
}
87
-
}
88
-
}
89
-
90
-
#[cfg(test)]
91
-
impl Node {
92
-
pub fn from_host(host: &str) -> Self {
93
-
Node {
94
-
target: Target::from_host(host),
95
-
..Default::default()
96
-
}
97
-
}
98
-
}
99
-
100
-
pub fn should_apply_locally(allow_local_deployment: bool, name: &str) -> bool {
101
-
*name == *gethostname() && allow_local_deployment
102
-
}
103
-
104
-
#[derive(derive_more::Display)]
105
-
pub enum Push<'a> {
106
-
Derivation(&'a Derivation),
107
-
Path(&'a String),
108
-
}
109
-
110
-
#[derive(Deserialize, Debug)]
111
-
pub struct Derivation(String);
112
-
113
-
impl Display for Derivation {
114
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
115
-
self.0.fmt(f).and_then(|()| write!(f, "^*"))
116
-
}
117
-
}
118
-
119
-
#[derive(derive_more::Display, Debug, Clone, Copy)]
120
-
pub enum SwitchToConfigurationGoal {
121
-
Switch,
122
-
Boot,
123
-
Test,
124
-
DryActivate,
125
-
}
126
-
127
-
#[derive(derive_more::Display, Clone, Copy)]
128
-
pub enum Goal {
129
-
SwitchToConfiguration(SwitchToConfigurationGoal),
130
-
Build,
131
-
Push,
132
-
Keys,
133
-
}
134
-
135
-
#[async_trait]
136
-
pub trait ExecuteStep: Send + Sync + Display {
137
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError>;
138
-
139
-
fn should_execute(&self, context: &Context) -> bool;
140
-
}
141
-
142
-
#[derive(Default)]
143
-
pub struct StepState {
144
-
pub evaluation: Option<Derivation>,
145
-
pub build: Option<String>,
146
-
pub key_agent_directory: Option<String>,
147
-
}
148
-
149
-
pub struct Context<'a> {
150
-
pub name: &'a Name,
151
-
pub node: &'a Node,
152
-
pub hivepath: PathBuf,
153
-
pub modifiers: SubCommandModifiers,
154
-
pub no_keys: bool,
155
-
pub state: StepState,
156
-
pub goal: Goal,
157
-
}
158
-
159
-
pub struct GoalExecutor<'a> {
160
-
steps: Vec<Box<dyn ExecuteStep>>,
161
-
context: Context<'a>,
162
-
}
163
-
164
-
impl<'a> GoalExecutor<'a> {
165
-
pub fn new(context: Context<'a>) -> Self {
166
-
Self {
167
-
steps: vec![
168
-
Box::new(PushKeyAgentStep),
169
-
Box::new(KeysStep {
170
-
filter: UploadKeyAt::NoFilter,
171
-
}),
172
-
Box::new(KeysStep {
173
-
filter: UploadKeyAt::PreActivation,
174
-
}),
175
-
Box::new(super::steps::evaluate::Step),
176
-
Box::new(super::steps::push::EvaluatedOutputStep),
177
-
Box::new(super::steps::build::Step),
178
-
Box::new(super::steps::push::BuildOutputStep),
179
-
Box::new(SwitchToConfigurationStep),
180
-
Box::new(KeysStep {
181
-
filter: UploadKeyAt::PostActivation,
182
-
}),
183
-
],
184
-
context,
185
-
}
186
-
}
187
-
188
-
#[instrument(skip_all, name = "goal", fields(node = %self.context.name))]
189
-
pub async fn execute(mut self, span: Span) -> Result<(), HiveLibError> {
190
-
let steps = self
191
-
.steps
192
-
.iter()
193
-
.filter(|step| step.should_execute(&self.context))
194
-
.inspect(|step| trace!("Will execute step `{step}` for {}", self.context.name))
195
-
.collect::<Vec<_>>();
196
-
197
-
span.pb_inc_length(steps.len().try_into().unwrap());
198
-
199
-
for step in steps {
200
-
info!("Executing step `{step}`");
201
-
202
-
step.execute(&mut self.context).await.inspect_err(|_| {
203
-
error!("Failed to execute `{step}`");
204
-
})?;
205
-
206
-
span.pb_inc(1);
207
-
}
208
-
209
-
Ok(())
210
-
}
211
-
}
212
-
213
-
pub async fn push(node: &Node, name: &Name, push: Push<'_>) -> Result<(), HiveLibError> {
214
-
let mut command = Command::new("nix");
215
-
216
-
command
217
-
.args(["--extra-experimental-features", "nix-command"])
218
-
.arg("copy")
219
-
.arg("--substitute-on-destination")
220
-
.arg("--to")
221
-
.arg(format!("ssh://{}@{}", node.target.user, node.target.host))
222
-
.env("NIX_SSHOPTS", format!("-p {}", node.target.port));
223
-
224
-
match push {
225
-
Push::Derivation(drv) => command.args([drv.to_string(), "--derivation".to_string()]),
226
-
Push::Path(path) => command.arg(path),
227
-
};
228
-
229
-
let (status, _stdout, stderr_vec) = command.execute(true).in_current_span().await?;
230
-
231
-
if !status.success() {
232
-
return Err(HiveLibError::NixCopyError(name.clone(), stderr_vec));
233
-
}
234
-
235
-
Ok(())
236
-
}
237
-
238
-
#[cfg(test)]
239
-
mod tests {
240
-
use super::*;
241
-
use crate::{get_test_path, hive::Hive};
242
-
use std::{collections::HashMap, env};
243
-
244
-
#[tokio::test]
245
-
#[cfg_attr(feature = "no_web_tests", ignore)]
246
-
async fn default_values_match() {
247
-
let mut path = get_test_path!();
248
-
249
-
let hive = Hive::new_from_path(&path, SubCommandModifiers::default())
250
-
.await
251
-
.unwrap();
252
-
253
-
let node = Node::default();
254
-
255
-
let mut nodes = HashMap::new();
256
-
nodes.insert(Name("NAME".into()), node);
257
-
258
-
path.push("hive.nix");
259
-
260
-
assert_eq!(hive, Hive { nodes, path });
261
-
}
262
-
}
-127
wire/lib/src/hive/steps/activate.rs
-127
wire/lib/src/hive/steps/activate.rs
···
1
-
use std::{fmt::Display, process::Output};
2
-
3
-
use async_trait::async_trait;
4
-
use tokio::process::Command;
5
-
use tracing::{Instrument, info, instrument, warn};
6
-
use tracing_indicatif::suspend_tracing_indicatif;
7
-
8
-
use crate::{
9
-
HiveLibError, create_ssh_command,
10
-
hive::node::{Context, ExecuteStep, Goal, SwitchToConfigurationGoal, should_apply_locally},
11
-
nix::StreamTracing,
12
-
};
13
-
14
-
pub struct SwitchToConfigurationStep;
15
-
16
-
impl Display for SwitchToConfigurationStep {
17
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
18
-
write!(f, "Switch to configuration")
19
-
}
20
-
}
21
-
22
-
pub(crate) fn get_elevation(reason: &str) -> Result<Output, HiveLibError> {
23
-
info!("Attempting to elevate for {reason}.");
24
-
suspend_tracing_indicatif(|| {
25
-
let mut command = std::process::Command::new("sudo");
26
-
command.arg("-v").output()
27
-
})
28
-
.map_err(HiveLibError::FailedToElevate)
29
-
}
30
-
31
-
#[async_trait]
32
-
impl ExecuteStep for SwitchToConfigurationStep {
33
-
fn should_execute(&self, ctx: &Context) -> bool {
34
-
matches!(ctx.goal, Goal::SwitchToConfiguration(..))
35
-
}
36
-
37
-
#[instrument(skip_all, name = "switch")]
38
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
39
-
let built_path = ctx.state.build.as_ref().unwrap();
40
-
41
-
let Goal::SwitchToConfiguration(goal) = &ctx.goal else {
42
-
unreachable!("Cannot reach as guarded by should_execute")
43
-
};
44
-
45
-
if !matches!(goal, SwitchToConfigurationGoal::DryActivate) {
46
-
info!("Setting profiles in anticipation for switch-to-configuration {goal}");
47
-
48
-
let mut env_command =
49
-
if should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.to_string()) {
50
-
// Refresh sudo timeout
51
-
warn!("Running nix-env ON THIS MACHINE for node {0}", ctx.name);
52
-
get_elevation("nix-env")?;
53
-
let mut command = Command::new("sudo");
54
-
command.arg("nix-env");
55
-
command
56
-
} else {
57
-
let mut command = create_ssh_command(&ctx.node.target, true);
58
-
command.arg("nix-env");
59
-
command
60
-
};
61
-
62
-
env_command.args(["-p", "/nix/var/nix/profiles/system/", "--set", built_path]);
63
-
64
-
let (status, _, stderr_vec) = env_command.execute(true).in_current_span().await?;
65
-
66
-
if !status.success() {
67
-
let stderr: Vec<String> = stderr_vec
68
-
.into_iter()
69
-
.map(|l| l.to_string())
70
-
.filter(|s| !s.is_empty())
71
-
.collect();
72
-
73
-
return Err(HiveLibError::NixEnvError(ctx.name.clone(), stderr));
74
-
}
75
-
76
-
info!("Set system profile");
77
-
}
78
-
79
-
info!("Running switch-to-configuration {goal}");
80
-
81
-
let cmd = format!("{built_path}/bin/switch-to-configuration");
82
-
83
-
let mut command =
84
-
if should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.to_string()) {
85
-
// Refresh sudo timeout
86
-
warn!(
87
-
"Running switch-to-configuration {goal:?} ON THIS MACHINE for node {0}",
88
-
ctx.name
89
-
);
90
-
get_elevation("switch-to-configuration")?;
91
-
let mut command = Command::new("sudo");
92
-
command.arg(cmd);
93
-
command
94
-
} else {
95
-
let mut command = create_ssh_command(&ctx.node.target, true);
96
-
command.arg(cmd);
97
-
command
98
-
};
99
-
100
-
command.arg(match goal {
101
-
SwitchToConfigurationGoal::Switch => "switch",
102
-
SwitchToConfigurationGoal::Boot => "boot",
103
-
SwitchToConfigurationGoal::Test => "test",
104
-
SwitchToConfigurationGoal::DryActivate => "dry-activate",
105
-
});
106
-
107
-
let (status, _, stderr_vec) = command.execute(true).in_current_span().await?;
108
-
109
-
if status.success() {
110
-
info!("Done");
111
-
112
-
return Ok(());
113
-
}
114
-
115
-
let stderr: Vec<String> = stderr_vec
116
-
.into_iter()
117
-
.map(|l| l.to_string())
118
-
.filter(|s| !s.is_empty())
119
-
.collect();
120
-
121
-
Err(HiveLibError::SwitchToConfigurationError(
122
-
*goal,
123
-
ctx.name.clone(),
124
-
stderr,
125
-
))
126
-
}
127
-
}
-71
wire/lib/src/hive/steps/build.rs
-71
wire/lib/src/hive/steps/build.rs
···
1
-
use std::fmt::Display;
2
-
3
-
use async_trait::async_trait;
4
-
use tokio::process::Command;
5
-
use tracing::{Instrument, info, instrument};
6
-
7
-
use crate::{
8
-
HiveLibError, create_ssh_command,
9
-
hive::node::{Context, ExecuteStep, Goal},
10
-
nix::StreamTracing,
11
-
};
12
-
13
-
pub struct Step;
14
-
15
-
impl Display for Step {
16
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
17
-
write!(f, "Build the node")
18
-
}
19
-
}
20
-
21
-
#[async_trait]
22
-
impl ExecuteStep for Step {
23
-
fn should_execute(&self, ctx: &Context) -> bool {
24
-
!matches!(ctx.goal, Goal::Keys | Goal::Push)
25
-
}
26
-
27
-
#[instrument(skip_all, name = "build")]
28
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
29
-
let top_level = ctx.state.evaluation.as_ref().unwrap();
30
-
31
-
let mut command = if ctx.node.build_remotely {
32
-
let mut command = create_ssh_command(&ctx.node.target, false);
33
-
command.arg("nix");
34
-
command
35
-
} else {
36
-
Command::new("nix")
37
-
};
38
-
39
-
command
40
-
.args(["--extra-experimental-features", "nix-command"])
41
-
.arg("build")
42
-
.arg("--print-build-logs")
43
-
.arg("--print-out-paths")
44
-
.arg(top_level.to_string());
45
-
46
-
let (status, stdout, stderr_vec) = command.execute(true).in_current_span().await?;
47
-
48
-
if status.success() {
49
-
info!("Built output: {stdout:?}");
50
-
51
-
let stdout = stdout
52
-
.into_iter()
53
-
.map(|l| l.to_string())
54
-
.filter(|s| !s.is_empty())
55
-
.collect::<Vec<String>>()
56
-
.join("\n");
57
-
58
-
ctx.state.build = Some(stdout);
59
-
60
-
return Ok(());
61
-
}
62
-
63
-
let stderr: Vec<String> = stderr_vec
64
-
.into_iter()
65
-
.map(|l| l.to_string())
66
-
.filter(|s| !s.is_empty())
67
-
.collect();
68
-
69
-
Err(HiveLibError::NixBuildError(ctx.name.clone(), stderr))
70
-
}
71
-
}
-53
wire/lib/src/hive/steps/evaluate.rs
-53
wire/lib/src/hive/steps/evaluate.rs
···
1
-
use std::fmt::Display;
2
-
3
-
use async_trait::async_trait;
4
-
use tracing::{Instrument, instrument};
5
-
6
-
use crate::{
7
-
HiveLibError,
8
-
hive::node::{Context, Derivation, ExecuteStep, Goal},
9
-
nix::{EvalGoal, StreamTracing, get_eval_command},
10
-
};
11
-
12
-
pub struct Step;
13
-
14
-
impl Display for Step {
15
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
16
-
write!(f, "Evaluate the node")
17
-
}
18
-
}
19
-
20
-
#[async_trait]
21
-
impl ExecuteStep for Step {
22
-
fn should_execute(&self, ctx: &Context) -> bool {
23
-
!matches!(ctx.goal, Goal::Keys)
24
-
}
25
-
26
-
#[instrument(skip_all, name = "eval")]
27
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
28
-
let mut command = get_eval_command(
29
-
&ctx.hivepath,
30
-
&EvalGoal::GetTopLevel(ctx.name),
31
-
ctx.modifiers,
32
-
);
33
-
34
-
let (status, stdout_vec, stderr) = command.execute(true).in_current_span().await?;
35
-
36
-
if status.success() {
37
-
let stdout: Vec<String> = stdout_vec
38
-
.into_iter()
39
-
.map(|l| l.to_string())
40
-
.filter(|s| !s.is_empty())
41
-
.collect();
42
-
43
-
let derivation: Derivation =
44
-
serde_json::from_str(&stdout.join("\n")).expect("failed to parse derivation");
45
-
46
-
ctx.state.evaluation = Some(derivation);
47
-
48
-
return Ok(());
49
-
}
50
-
51
-
Err(HiveLibError::NixEvalInteralError(ctx.name.clone(), stderr))
52
-
}
53
-
}
-313
wire/lib/src/hive/steps/keys.rs
-313
wire/lib/src/hive/steps/keys.rs
···
1
-
use async_trait::async_trait;
2
-
use futures::future::join_all;
3
-
use prost::Message;
4
-
use serde::{Deserialize, Serialize};
5
-
use std::env;
6
-
use std::fmt::Display;
7
-
use std::io::Cursor;
8
-
use std::pin::Pin;
9
-
use std::process::{ExitStatus, Stdio};
10
-
use std::str::from_utf8;
11
-
use std::{num::ParseIntError, path::PathBuf};
12
-
use thiserror::Error;
13
-
use tokio::io::{AsyncReadExt as _, AsyncWriteExt};
14
-
use tokio::process::Command;
15
-
use tokio::{fs::File, io::AsyncRead};
16
-
use tracing::{debug, info, trace, warn};
17
-
18
-
use crate::hive::node::{
19
-
Context, ExecuteStep, Goal, Push, SwitchToConfigurationGoal, push, should_apply_locally,
20
-
};
21
-
use crate::hive::steps::activate::get_elevation;
22
-
use crate::{HiveLibError, create_ssh_command};
23
-
24
-
#[derive(Debug, Error)]
25
-
pub enum KeyError {
26
-
#[error("error reading file")]
27
-
File(#[source] std::io::Error),
28
-
29
-
#[error("error spawning command")]
30
-
CommandSpawnError(#[source] std::io::Error),
31
-
32
-
#[error("key command failed with status {}: {}", .0,.1)]
33
-
CommandError(ExitStatus, String),
34
-
35
-
#[error("Command list empty")]
36
-
Empty,
37
-
38
-
#[error("Failed to parse key permissions")]
39
-
ParseKeyPermissions(#[source] ParseIntError),
40
-
}
41
-
42
-
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
43
-
#[serde(tag = "t", content = "c")]
44
-
pub enum Source {
45
-
String(String),
46
-
Path(PathBuf),
47
-
Command(Vec<String>),
48
-
}
49
-
50
-
#[derive(Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq)]
51
-
pub enum UploadKeyAt {
52
-
#[serde(rename = "pre-activation")]
53
-
PreActivation,
54
-
#[serde(rename = "post-activation")]
55
-
PostActivation,
56
-
#[serde(skip)]
57
-
NoFilter,
58
-
}
59
-
60
-
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
61
-
pub struct Key {
62
-
pub name: String,
63
-
#[serde(rename = "destDir")]
64
-
pub dest_dir: String,
65
-
pub path: PathBuf,
66
-
pub group: String,
67
-
pub user: String,
68
-
pub permissions: String,
69
-
pub source: Source,
70
-
#[serde(rename = "uploadAt")]
71
-
pub upload_at: UploadKeyAt,
72
-
}
73
-
74
-
fn should_execute(filter: &UploadKeyAt, ctx: &crate::hive::node::Context) -> bool {
75
-
if ctx.no_keys {
76
-
return false;
77
-
}
78
-
79
-
// should execute if no filter, and the goal is keys.
80
-
// otherwise, only execute if the goal is switch
81
-
matches!(
82
-
(filter, &ctx.goal),
83
-
(UploadKeyAt::NoFilter, Goal::Keys)
84
-
| (
85
-
_,
86
-
Goal::SwitchToConfiguration(SwitchToConfigurationGoal::Switch)
87
-
)
88
-
)
89
-
}
90
-
91
-
fn get_u32_permission(key: &Key) -> Result<u32, KeyError> {
92
-
u32::from_str_radix(&key.permissions, 8).map_err(KeyError::ParseKeyPermissions)
93
-
}
94
-
95
-
async fn create_reader(
96
-
source: &'_ Source,
97
-
) -> Result<Pin<Box<dyn AsyncRead + Send + '_>>, KeyError> {
98
-
match source {
99
-
Source::Path(path) => Ok(Box::pin(File::open(path).await.map_err(KeyError::File)?)),
100
-
Source::String(string) => Ok(Box::pin(Cursor::new(string))),
101
-
Source::Command(args) => {
102
-
let output = Command::new(args.first().ok_or(KeyError::Empty)?)
103
-
.args(&args[1..])
104
-
.stdin(Stdio::null())
105
-
.stdout(Stdio::piped())
106
-
.stderr(Stdio::piped())
107
-
.spawn()
108
-
.map_err(KeyError::CommandSpawnError)?
109
-
.wait_with_output()
110
-
.await
111
-
.map_err(KeyError::CommandSpawnError)?;
112
-
113
-
if output.status.success() {
114
-
return Ok(Box::pin(Cursor::new(output.stdout)));
115
-
}
116
-
117
-
Err(KeyError::CommandError(
118
-
output.status,
119
-
from_utf8(&output.stderr).unwrap().to_string(),
120
-
))
121
-
}
122
-
}
123
-
}
124
-
125
-
async fn copy_buffer<T: AsyncWriteExt + Unpin>(
126
-
reader: &mut T,
127
-
buf: &[u8],
128
-
) -> Result<(), HiveLibError> {
129
-
reader
130
-
.write_all(buf)
131
-
.await
132
-
.map_err(HiveLibError::BufferOperationError)?;
133
-
reader
134
-
.flush()
135
-
.await
136
-
.map_err(HiveLibError::BufferOperationError)
137
-
}
138
-
139
-
async fn copy_buffers<T: AsyncWriteExt + Unpin>(
140
-
reader: &mut T,
141
-
bufs: Vec<Vec<u8>>,
142
-
) -> Result<(), HiveLibError> {
143
-
for (index, buf) in bufs.iter().enumerate() {
144
-
trace!("Pushing buf {}", index);
145
-
copy_buffer(reader, buf).await?;
146
-
}
147
-
148
-
Ok(())
149
-
}
150
-
151
-
async fn process_key(key: &Key) -> Result<(key_agent::keys::Key, Vec<u8>), KeyError> {
152
-
let mut reader = create_reader(&key.source).await?;
153
-
154
-
let mut buf = Vec::new();
155
-
156
-
reader
157
-
.read_to_end(&mut buf)
158
-
.await
159
-
.expect("failed to read into buffer");
160
-
161
-
let destination: PathBuf = [key.dest_dir.clone(), key.name.clone()].iter().collect();
162
-
163
-
debug!(
164
-
"Staging push to {}",
165
-
destination.clone().into_os_string().into_string().unwrap()
166
-
);
167
-
168
-
Ok((
169
-
key_agent::keys::Key {
170
-
length: buf
171
-
.len()
172
-
.try_into()
173
-
.expect("Failed to conver usize buf length to i32"),
174
-
user: key.user.clone(),
175
-
group: key.group.clone(),
176
-
permissions: get_u32_permission(key)?,
177
-
destination: destination.into_os_string().into_string().unwrap(),
178
-
},
179
-
buf,
180
-
))
181
-
}
182
-
183
-
pub struct KeysStep {
184
-
pub filter: UploadKeyAt,
185
-
}
186
-
pub struct PushKeyAgentStep;
187
-
188
-
impl Display for KeysStep {
189
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
190
-
write!(f, "Upload key @ {:?}", self.filter)
191
-
}
192
-
}
193
-
194
-
impl Display for PushKeyAgentStep {
195
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
196
-
write!(f, "Push the key agent")
197
-
}
198
-
}
199
-
200
-
#[async_trait]
201
-
impl ExecuteStep for KeysStep {
202
-
fn should_execute(&self, ctx: &Context) -> bool {
203
-
should_execute(&self.filter, ctx)
204
-
}
205
-
206
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
207
-
let agent_directory = ctx.state.key_agent_directory.as_ref().unwrap();
208
-
209
-
let futures = ctx
210
-
.node
211
-
.keys
212
-
.iter()
213
-
.filter(|key| {
214
-
self.filter == UploadKeyAt::NoFilter
215
-
|| (self.filter != UploadKeyAt::NoFilter && key.upload_at != self.filter)
216
-
})
217
-
.map(|key| async move { process_key(key).await });
218
-
219
-
let (keys, bufs): (Vec<key_agent::keys::Key>, Vec<Vec<u8>>) = join_all(futures)
220
-
.await
221
-
.into_iter()
222
-
.collect::<Result<Vec<_>, KeyError>>()
223
-
.map_err(HiveLibError::KeyError)?
224
-
.into_iter()
225
-
.unzip();
226
-
227
-
let msg = key_agent::keys::Keys { keys };
228
-
229
-
trace!("Sending message {msg:?}");
230
-
231
-
let buf = msg.encode_to_vec();
232
-
233
-
let mut command =
234
-
if should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.to_string()) {
235
-
warn!("Placing keys locally for node {0}", ctx.name);
236
-
get_elevation("wire key agent")?;
237
-
Command::new("sudo")
238
-
} else {
239
-
create_ssh_command(&ctx.node.target, true)
240
-
};
241
-
242
-
let mut child = command
243
-
.args([
244
-
format!("{agent_directory}/bin/key_agent"),
245
-
buf.len().to_string(),
246
-
])
247
-
.stdout(Stdio::piped())
248
-
.stderr(Stdio::piped())
249
-
.stdin(Stdio::piped())
250
-
.spawn()
251
-
.map_err(HiveLibError::SpawnFailed)?;
252
-
253
-
// take() stdin so it will be dropped out of block
254
-
if let Some(mut stdin) = child.stdin.take() {
255
-
trace!("Pushing msg");
256
-
copy_buffer(&mut stdin, &buf).await?;
257
-
copy_buffers(&mut stdin, bufs).await?;
258
-
}
259
-
260
-
let output = child
261
-
.wait_with_output()
262
-
.await
263
-
.map_err(HiveLibError::SpawnFailed)?;
264
-
265
-
if output.status.success() {
266
-
info!("Successfully pushed keys to {}", ctx.name);
267
-
trace!("Agent stdout: {}", String::from_utf8_lossy(&output.stdout));
268
-
269
-
return Ok(());
270
-
}
271
-
272
-
let stderr = String::from_utf8_lossy(&output.stderr);
273
-
274
-
Err(HiveLibError::KeyCommandError(
275
-
ctx.name.clone(),
276
-
stderr
277
-
.split('\n')
278
-
.map(std::string::ToString::to_string)
279
-
.collect(),
280
-
))
281
-
}
282
-
}
283
-
284
-
#[async_trait]
285
-
impl ExecuteStep for PushKeyAgentStep {
286
-
fn should_execute(&self, ctx: &Context) -> bool {
287
-
should_execute(&UploadKeyAt::NoFilter, ctx)
288
-
}
289
-
290
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
291
-
let arg_name = format!(
292
-
"WIRE_KEY_AGENT_{platform}",
293
-
platform = ctx.node.host_platform.replace('-', "_")
294
-
);
295
-
296
-
let agent_directory = match env::var_os(&arg_name) {
297
-
Some(agent) => agent.into_string().unwrap(),
298
-
None => panic!(
299
-
"{arg_name} environment variable not set! \n
300
-
Wire was not built with the ability to deploy keys to this platform. \n
301
-
Please create an issue: https://github.com/wires-org/wire/issues/new?template=bug_report.md"
302
-
),
303
-
};
304
-
305
-
if !should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.to_string()) {
306
-
push(ctx.node, ctx.name, Push::Path(&agent_directory)).await?;
307
-
}
308
-
309
-
ctx.state.key_agent_directory = Some(agent_directory);
310
-
311
-
Ok(())
312
-
}
313
-
}
-5
wire/lib/src/hive/steps/mod.rs
-5
wire/lib/src/hive/steps/mod.rs
-82
wire/lib/src/hive/steps/push.rs
-82
wire/lib/src/hive/steps/push.rs
···
1
-
use std::fmt::Display;
2
-
3
-
use async_trait::async_trait;
4
-
use tracing::{instrument, warn};
5
-
6
-
use crate::{
7
-
HiveLibError,
8
-
hive::node::{Context, ExecuteStep, Goal, push, should_apply_locally},
9
-
};
10
-
11
-
pub struct EvaluatedOutputStep;
12
-
pub struct BuildOutputStep;
13
-
14
-
impl Display for EvaluatedOutputStep {
15
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
16
-
write!(f, "Push the evaluated output")
17
-
}
18
-
}
19
-
20
-
impl Display for BuildOutputStep {
21
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
22
-
write!(f, "Push the build output")
23
-
}
24
-
}
25
-
26
-
#[async_trait]
27
-
impl ExecuteStep for EvaluatedOutputStep {
28
-
fn should_execute(&self, ctx: &Context) -> bool {
29
-
!matches!(ctx.goal, Goal::Keys) && ctx.node.build_remotely
30
-
}
31
-
32
-
#[instrument(skip_all, name = "push_eval")]
33
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
34
-
let top_level = ctx.state.evaluation.as_ref().unwrap();
35
-
36
-
push(
37
-
ctx.node,
38
-
ctx.name,
39
-
crate::hive::node::Push::Derivation(top_level),
40
-
).await.inspect_err(|_| {
41
-
if should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.to_string()) {
42
-
warn!("Remote push failed, but this node matches our local hostname ({0}). Perhaps you want to apply this node locally? Use `--always-build-local {0}` to override deployment.buildOnTarget", ctx.name.to_string());
43
-
} else {
44
-
warn!("Use `--always-build-local {0}` to override deployment.buildOnTarget and force {0} to build locally", ctx.name.to_string());
45
-
}
46
-
})
47
-
}
48
-
}
49
-
50
-
#[async_trait]
51
-
impl ExecuteStep for BuildOutputStep {
52
-
fn should_execute(&self, ctx: &Context) -> bool {
53
-
if matches!(ctx.goal, Goal::Keys | Goal::Push) {
54
-
// skip if we are not building
55
-
return false;
56
-
}
57
-
58
-
if ctx.node.build_remotely {
59
-
// skip if we are building remotely
60
-
return false;
61
-
}
62
-
63
-
if should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.0) {
64
-
// skip step if we are applying locally
65
-
return false;
66
-
}
67
-
68
-
true
69
-
}
70
-
71
-
#[instrument(skip_all, name = "push_build")]
72
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
73
-
let built_path = ctx.state.build.as_ref().unwrap();
74
-
75
-
push(
76
-
ctx.node,
77
-
ctx.name,
78
-
crate::hive::node::Push::Path(built_path),
79
-
)
80
-
.await
81
-
}
82
-
}
-119
wire/lib/src/lib.rs
-119
wire/lib/src/lib.rs
···
1
-
#![feature(let_chains)]
2
-
#![deny(clippy::pedantic)]
3
-
#![allow(
4
-
clippy::missing_errors_doc,
5
-
clippy::must_use_candidate,
6
-
clippy::missing_panics_doc
7
-
)]
8
-
use hive::{
9
-
node::{Name, SwitchToConfigurationGoal, Target},
10
-
steps::keys::KeyError,
11
-
};
12
-
use nix_log::{NixLog, Trace};
13
-
use std::path::PathBuf;
14
-
use thiserror::Error;
15
-
use tokio::{process::Command, task::JoinError};
16
-
17
-
pub mod hive;
18
-
mod nix;
19
-
mod nix_log;
20
-
21
-
#[cfg(test)]
22
-
mod test_macros;
23
-
24
-
#[cfg(test)]
25
-
mod test_support;
26
-
27
-
fn create_ssh_command(target: &Target, sudo: bool) -> Command {
28
-
let mut command = Command::new("ssh");
29
-
30
-
command
31
-
.args(["-l", target.user.as_ref()])
32
-
.arg(target.host.as_ref())
33
-
.args(["-p", &target.port.to_string()]);
34
-
35
-
if sudo && target.user != "root".into() {
36
-
command.args(["sudo", "-H", "--"]);
37
-
}
38
-
39
-
command
40
-
}
41
-
42
-
fn format_error_lines(lines: &[String]) -> String {
43
-
lines
44
-
.iter()
45
-
.rev()
46
-
.take(20)
47
-
.rev()
48
-
.cloned()
49
-
.collect::<Vec<_>>()
50
-
.join("\n")
51
-
}
52
-
53
-
#[derive(Debug, Error)]
54
-
pub enum HiveLibError {
55
-
#[error("no hive could be found in {}", .0.display())]
56
-
NoHiveFound(PathBuf),
57
-
58
-
#[error("failed to execute nix command")]
59
-
NixExecError(#[source] tokio::io::Error),
60
-
61
-
#[error("failed to evaluate nix expression (last 20 lines):\n{}", format_error_lines(.0))]
62
-
NixEvalError(Vec<String>),
63
-
64
-
#[error(
65
-
"failed to evaluate node {0} (filtered logs, run with -vvv to see all):\n{log}",
66
-
log = .1.iter().filter(|l| l.is_error()).map(std::string::ToString::to_string).collect::<Vec<String>>().join("\n"))
67
-
]
68
-
NixEvalInteralError(Name, Vec<NixLog>),
69
-
70
-
#[error(
71
-
"failed to copy drv to node {0} (filtered logs, run with -vvv to see all):\n{log}",
72
-
log = .1.iter().filter(|l| l.is_error()).map(std::string::ToString::to_string).collect::<Vec<String>>().join("\n"))
73
-
]
74
-
NixCopyError(Name, Vec<NixLog>),
75
-
76
-
#[error("failed to build node {0} (last 20 lines):\n{lines}", lines = format_error_lines(.1))]
77
-
NixBuildError(Name, Vec<String>),
78
-
79
-
#[error("failed to run switch-to-configuration {0} on node {1} (last 20 lines):\n{lines}", lines = format_error_lines(.2))]
80
-
SwitchToConfigurationError(SwitchToConfigurationGoal, Name, Vec<String>),
81
-
82
-
#[error("failed to run nix-env on node {0} (last 20 lines):\n{lines}", lines = format_error_lines(.1))]
83
-
NixEnvError(Name, Vec<String>),
84
-
85
-
#[error("failed to push keys to {0} (last 20 lines):\n{lines}", lines = format_error_lines(.1))]
86
-
KeyCommandError(Name, Vec<String>),
87
-
88
-
#[error("failed to push a key")]
89
-
KeyError(#[source] KeyError),
90
-
91
-
#[error("node {0} not exist in hive")]
92
-
NodeDoesNotExist(String),
93
-
94
-
#[error("failed to execute command")]
95
-
SpawnFailed(#[source] tokio::io::Error),
96
-
97
-
#[error("failed to join task")]
98
-
JoinError(#[source] JoinError),
99
-
100
-
#[error("there was no handle to io on the child process")]
101
-
NoHandle,
102
-
103
-
#[error("failed to parse nix log \"{0}\"")]
104
-
ParseLogError(String, #[source] serde_json::Error),
105
-
106
-
#[error("failed to parse internal wire json. please create an issue!")]
107
-
ParseEvaluateError(#[source] serde_json::Error),
108
-
109
-
#[error("an operation failed in regards to buffers")]
110
-
BufferOperationError(#[source] tokio::io::Error),
111
-
112
-
#[error("failed to elevate")]
113
-
FailedToElevate(#[source] std::io::Error),
114
-
}
115
-
116
-
#[derive(Debug, Default, Clone, Copy)]
117
-
pub struct SubCommandModifiers {
118
-
pub show_trace: bool,
119
-
}
-179
wire/lib/src/nix.rs
-179
wire/lib/src/nix.rs
···
1
-
use regex::Regex;
2
-
use std::env;
3
-
use std::path::Path;
4
-
use std::process::{Command, ExitStatus};
5
-
use std::sync::LazyLock;
6
-
use tokio::io::BufReader;
7
-
use tokio::io::{AsyncBufReadExt, AsyncRead};
8
-
use tracing::{Instrument, Span, error, info, trace};
9
-
use tracing_indicatif::span_ext::IndicatifSpanExt;
10
-
11
-
use crate::hive::node::Name;
12
-
use crate::nix_log::{Action, Internal, NixLog, Trace};
13
-
use crate::{HiveLibError, SubCommandModifiers};
14
-
15
-
static DIGEST_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[0-9a-z]{32}").unwrap());
16
-
17
-
pub enum EvalGoal<'a> {
18
-
Inspect,
19
-
GetTopLevel(&'a Name),
20
-
}
21
-
22
-
fn check_nix_available() -> bool {
23
-
match Command::new("nix")
24
-
.stdout(std::process::Stdio::null())
25
-
.stderr(std::process::Stdio::null())
26
-
.spawn()
27
-
{
28
-
Ok(_) => true,
29
-
Err(e) => {
30
-
if let std::io::ErrorKind::NotFound = e.kind() {
31
-
false
32
-
} else {
33
-
error!(
34
-
"Something weird happened checking for nix availability, {}",
35
-
e
36
-
);
37
-
false
38
-
}
39
-
}
40
-
}
41
-
}
42
-
43
-
pub fn get_eval_command(
44
-
path: &Path,
45
-
goal: &EvalGoal,
46
-
modifiers: SubCommandModifiers,
47
-
) -> tokio::process::Command {
48
-
let runtime = match env::var_os("WIRE_RUNTIME") {
49
-
Some(runtime) => runtime.into_string().unwrap(),
50
-
None => panic!("WIRE_RUNTIME environment variable not set"),
51
-
};
52
-
53
-
assert!(check_nix_available(), "nix is not available on this system");
54
-
55
-
let canon_path = path.canonicalize().unwrap();
56
-
57
-
let mut command = tokio::process::Command::new("nix");
58
-
command.args(["--extra-experimental-features", "nix-command"]);
59
-
command.args(["--extra-experimental-features", "flakes"]);
60
-
command.args(["eval", "--json", "--impure"]);
61
-
if modifiers.show_trace {
62
-
command.arg("--show-trace");
63
-
}
64
-
command.args(["--expr"]);
65
-
66
-
command.arg(format!(
67
-
"let flake = {flake}; evaluate = import {runtime}/evaluate.nix; hive = evaluate {{hive = \
68
-
{hive}; path = {path}; nixosConfigurations = {nixosConfigurations}; nixpkgs = \
69
-
{nixpkgs};}}; in {goal}",
70
-
flake = if canon_path.ends_with("flake.nix") {
71
-
format!(
72
-
"(builtins.getFlake \"git+file://{path}\")",
73
-
path = canon_path.parent().unwrap().to_str().unwrap(),
74
-
)
75
-
} else {
76
-
"null".to_string()
77
-
},
78
-
hive = if canon_path.ends_with("flake.nix") {
79
-
"flake.colmena".to_string()
80
-
} else {
81
-
format!("import {path}", path = canon_path.to_str().unwrap())
82
-
},
83
-
nixosConfigurations = if canon_path.ends_with("flake.nix") {
84
-
"flake.nixosConfigurations or {}".to_string()
85
-
} else {
86
-
"{}".to_string()
87
-
},
88
-
nixpkgs = if canon_path.ends_with("flake.nix") {
89
-
"flake.inputs.nixpkgs.outPath or null".to_string()
90
-
} else {
91
-
"null".to_string()
92
-
},
93
-
path = canon_path.to_str().unwrap(),
94
-
goal = match goal {
95
-
EvalGoal::Inspect => "hive.inspect".to_string(),
96
-
EvalGoal::GetTopLevel(node) => format!("hive.getTopLevel \"{node}\""),
97
-
}
98
-
));
99
-
100
-
command
101
-
}
102
-
103
-
pub async fn handle_io<R>(reader: R, should_trace: bool) -> Result<Vec<NixLog>, HiveLibError>
104
-
where
105
-
R: AsyncRead + Unpin,
106
-
{
107
-
let mut io_reader = BufReader::new(reader).lines();
108
-
let mut collect = Vec::new();
109
-
110
-
while let Some(line) = io_reader
111
-
.next_line()
112
-
.await
113
-
.map_err(HiveLibError::SpawnFailed)?
114
-
{
115
-
let log = serde_json::from_str::<Internal>(line.strip_prefix("@nix ").unwrap_or(&line))
116
-
.map(NixLog::Internal)
117
-
.unwrap_or(NixLog::Raw(line.to_string()));
118
-
119
-
// Throw out stop logs
120
-
if let NixLog::Internal(Internal {
121
-
action: Action::Stop,
122
-
}) = log
123
-
{
124
-
continue;
125
-
}
126
-
127
-
if cfg!(debug_assertions) {
128
-
trace!(line);
129
-
}
130
-
131
-
if should_trace {
132
-
match log {
133
-
NixLog::Raw(ref string) => info!("{string}"),
134
-
NixLog::Internal(ref internal) => internal.trace(),
135
-
}
136
-
137
-
Span::current().pb_set_message(&DIGEST_RE.replace_all(&log.to_string(), "โฆ"));
138
-
}
139
-
140
-
collect.push(log);
141
-
}
142
-
143
-
Ok(collect)
144
-
}
145
-
146
-
pub trait StreamTracing {
147
-
async fn execute(
148
-
&mut self,
149
-
log_stderr: bool,
150
-
) -> Result<(ExitStatus, Vec<NixLog>, Vec<NixLog>), HiveLibError>;
151
-
}
152
-
153
-
impl StreamTracing for tokio::process::Command {
154
-
async fn execute(
155
-
&mut self,
156
-
log_stderr: bool,
157
-
) -> Result<(ExitStatus, Vec<NixLog>, Vec<NixLog>), HiveLibError> {
158
-
let mut child = self
159
-
.args(["--log-format", "internal-json"])
160
-
.stderr(std::process::Stdio::piped())
161
-
.stdout(std::process::Stdio::piped())
162
-
.spawn()
163
-
.map_err(HiveLibError::SpawnFailed)?;
164
-
165
-
let stdout_handle = child.stdout.take().ok_or(HiveLibError::NoHandle)?;
166
-
let stderr_handle = child.stderr.take().ok_or(HiveLibError::NoHandle)?;
167
-
168
-
let stderr_task = tokio::spawn(handle_io(stderr_handle, log_stderr).in_current_span());
169
-
let stdout_task = tokio::spawn(handle_io(stdout_handle, false));
170
-
171
-
let handle =
172
-
tokio::spawn(async move { child.wait().await.map_err(HiveLibError::SpawnFailed) });
173
-
174
-
let (result, stdout, stderr) =
175
-
tokio::try_join!(handle, stdout_task, stderr_task).map_err(HiveLibError::JoinError)?;
176
-
177
-
Ok((result?, stdout?, stderr?))
178
-
}
179
-
}
-119
wire/lib/src/nix_log.rs
-119
wire/lib/src/nix_log.rs
···
1
-
use serde::{Deserialize, Serialize};
2
-
use serde_repr::{Deserialize_repr, Serialize_repr};
3
-
use std::fmt::{Debug, Display};
4
-
use tracing::{Level as tracing_level, event, info};
5
-
6
-
#[derive(Serialize, Deserialize, Debug)]
7
-
#[serde(tag = "action")]
8
-
pub enum Action {
9
-
#[serde(rename = "msg", alias = "start")]
10
-
Message {
11
-
level: Level,
12
-
#[serde(rename = "msg", alias = "text")]
13
-
message: Option<String>,
14
-
},
15
-
#[serde(rename = "stop", alias = "result")]
16
-
Stop,
17
-
}
18
-
19
-
#[derive(Serialize_repr, Deserialize_repr, PartialEq, Debug)]
20
-
#[repr(u8)]
21
-
pub enum Level {
22
-
Error = 0,
23
-
Warn = 1,
24
-
Notice = 2,
25
-
Info = 3,
26
-
Talkative = 4,
27
-
Chatty = 5,
28
-
Debug = 6,
29
-
Vomit = 7,
30
-
}
31
-
32
-
#[derive(Serialize, Deserialize, Debug)]
33
-
pub struct Internal {
34
-
#[serde(flatten)]
35
-
pub action: Action,
36
-
}
37
-
38
-
#[derive(Debug)]
39
-
pub enum NixLog {
40
-
Internal(Internal),
41
-
Raw(String),
42
-
}
43
-
44
-
pub(crate) trait Trace {
45
-
fn trace(&self);
46
-
fn is_error(&self) -> bool;
47
-
}
48
-
49
-
impl Trace for Internal {
50
-
fn trace(&self) {
51
-
match &self.action {
52
-
Action::Message { level, message } => {
53
-
let text = match message {
54
-
Some(text) if text.is_empty() => return,
55
-
None => return,
56
-
Some(text) => text,
57
-
};
58
-
59
-
match level {
60
-
Level::Info | Level::Talkative | Level::Chatty => {
61
-
event!(tracing_level::INFO, "{text}");
62
-
}
63
-
Level::Warn | Level::Notice => event!(tracing_level::WARN, "{text}"),
64
-
Level::Error => event!(tracing_level::ERROR, "{text}"),
65
-
Level::Debug => event!(tracing_level::DEBUG, "{text}"),
66
-
Level::Vomit => event!(tracing_level::TRACE, "{text}"),
67
-
}
68
-
}
69
-
Action::Stop => {}
70
-
}
71
-
}
72
-
73
-
fn is_error(&self) -> bool {
74
-
matches!(&self.action, Action::Message { level, message: _ } if matches!(level, Level::Error))
75
-
}
76
-
}
77
-
78
-
impl Trace for NixLog {
79
-
fn trace(&self) {
80
-
match self {
81
-
NixLog::Internal(line) => line.trace(),
82
-
NixLog::Raw(line) => info!("{line}"),
83
-
}
84
-
}
85
-
86
-
fn is_error(&self) -> bool {
87
-
match self {
88
-
NixLog::Internal(line) => line.is_error(),
89
-
NixLog::Raw(..) => false,
90
-
}
91
-
}
92
-
}
93
-
94
-
impl Display for Internal {
95
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
96
-
match &self.action {
97
-
Action::Message { level, message } => {
98
-
write!(
99
-
f,
100
-
"{level:?}: {}",
101
-
match message {
102
-
Some(message) => message,
103
-
None => "Nix log without text",
104
-
}
105
-
)
106
-
}
107
-
Action::Stop => write!(f, ""),
108
-
}
109
-
}
110
-
}
111
-
112
-
impl Display for NixLog {
113
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
114
-
match &self {
115
-
NixLog::Internal(line) => Display::fmt(&line, f),
116
-
NixLog::Raw(line) => Display::fmt(&line, f),
117
-
}
118
-
}
119
-
}
-23
wire/lib/src/test_macros.rs
-23
wire/lib/src/test_macros.rs
···
1
-
#[macro_export]
2
-
macro_rules! function_name {
3
-
() => {{
4
-
fn f() {}
5
-
fn type_name_of<T>(_: T) -> &'static str {
6
-
std::any::type_name::<T>()
7
-
}
8
-
let name = type_name_of(f);
9
-
// closure for async functions
10
-
&name[..name.len() - 3].trim_end_matches("::{{closure}}")
11
-
}};
12
-
}
13
-
14
-
#[macro_export]
15
-
macro_rules! get_test_path {
16
-
() => {{
17
-
let mut path: PathBuf = env::var("WIRE_TEST_DIR").unwrap().into();
18
-
let full_name = $crate::function_name!();
19
-
let function_name = full_name.split("::").last().unwrap();
20
-
path.push(function_name);
21
-
path
22
-
}};
23
-
}
-30
wire/lib/src/test_support.rs
-30
wire/lib/src/test_support.rs
···
1
-
use std::{fs, io, path::Path, process::Command};
2
-
3
-
use tempdir::TempDir;
4
-
5
-
pub fn make_flake_sandbox(path: &Path) -> Result<TempDir, io::Error> {
6
-
let tmp_dir = TempDir::new("wire-test")?;
7
-
8
-
Command::new("git")
9
-
.args(["init", "-b", "tmp"])
10
-
.current_dir(tmp_dir.path())
11
-
.status()?;
12
-
13
-
for entry in fs::read_dir(path)? {
14
-
let entry = entry?;
15
-
16
-
fs::copy(entry.path(), tmp_dir.as_ref().join(entry.file_name()))?;
17
-
}
18
-
19
-
Command::new("git")
20
-
.args(["add", "-A"])
21
-
.current_dir(tmp_dir.path())
22
-
.status()?;
23
-
24
-
Command::new("nix")
25
-
.args(["flake", "lock"])
26
-
.current_dir(tmp_dir.path())
27
-
.status()?;
28
-
29
-
Ok(tmp_dir)
30
-
}