+4
-1
.cargo/config.toml
+4
-1
.cargo/config.toml
+1
.env
+1
.env
···
1
+
DATABASE_URL=sqlite://dev.db
+1
.github/FUNDING.yml
+1
.github/FUNDING.yml
···
1
+
github: [mrshmllow]
+4
-29
.github/actions/setup-nix/action.yml
+4
-29
.github/actions/setup-nix/action.yml
···
3
3
description: |
4
4
Sets up the Nix environment for wire, removing unnecessary bloat and installing Nix along with proper
5
5
substituters being set
6
-
inputs:
7
-
cachixToken:
8
-
description: Cachix token
9
-
required: true
10
-
withQEMU:
11
-
description: Enable QEMU
12
-
default: false
13
6
runs:
14
7
using: "composite"
15
8
steps:
16
-
- uses: wimpysworld/nothing-but-nix@main
17
-
with:
18
-
hatchet-protocol: "carve"
19
9
- name: Generate nix.conf
20
10
shell: bash
21
11
id: config
···
24
14
echo 'config<<EOF'
25
15
echo "system-features = nixos-test benchmark big-parallel kvm"
26
16
27
-
if [ "${{ inputs.withQEMU }}" = "true" ]; then
28
-
echo "extra-platforms = aarch64-linux i686-linux"
29
-
fi
17
+
echo "substituters = https://cache.nixos.org?priority=1 https://cache.althaea.zone?priority=2 https://cache.garnix.io?priority=3"
18
+
echo "trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= cache.garnix.io:CTFPyKSLcx5RMJKfLo5EEPUObbA78b0YQ2DTCJXqr9g= cache.althaea.zone:BelRpa863X9q3Y+AOnl5SM7QFzre3qb+5I7g2s/mqHI="
30
19
31
20
echo EOF
32
21
} >> "$GITHUB_OUTPUT"
33
-
- uses: cachix/install-nix-action@v31
22
+
- uses: cachix/install-nix-action@4e002c8ec80594ecd40e759629461e26c8abed15
34
23
with:
35
24
nix_path: nixpkgs=channel:nixos-unstable
36
25
extra_nix_config: ${{ steps.config.outputs.config }}
37
26
- name: Sanity check nix.conf
38
-
if: ${{ inputs.withQEMU == 'true' && runner.debug == '1' }}
27
+
if: ${{ runner.debug == '1' }}
39
28
shell: bash
40
29
run: cat /etc/nix/nix.conf
41
-
- name: Register binfmt
42
-
if: ${{ inputs.withQEMU == 'true' }}
43
-
shell: bash
44
-
run: |
45
-
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
46
-
- name: Sanity check binfmt
47
-
if: ${{ inputs.withQEMU == 'true' && runner.debug == '1' }}
48
-
shell: bash
49
-
run: |
50
-
cat /proc/sys/fs/binfmt_misc/qemu-aarch64
51
-
- uses: cachix/cachix-action@v16
52
-
with:
53
-
name: wires
54
-
authToken: "${{ inputs.cachixToken }}"
-17
.github/dependabot.yml
-17
.github/dependabot.yml
···
1
-
version: 2
2
-
updates:
3
-
- package-ecosystem: "cargo"
4
-
target-branch: main
5
-
directory: "/"
6
-
schedule:
7
-
interval: "daily"
8
-
- package-ecosystem: "npm"
9
-
target-branch: main
10
-
directory: "doc/"
11
-
schedule:
12
-
interval: "daily"
13
-
- package-ecosystem: "github-actions"
14
-
target-branch: main
15
-
directory: "/"
16
-
schedule:
17
-
interval: "weekly"
+1
-1
.github/labeler.yml
+1
-1
.github/labeler.yml
+30
-13
.github/workflows/autofix.yml
+30
-13
.github/workflows/autofix.yml
···
3
3
on:
4
4
pull_request:
5
5
push:
6
-
branches: ["main"]
7
6
8
7
permissions:
9
8
contents: read
···
14
13
outputs:
15
14
docs-pnpm: ${{ steps.filter.outputs.docs-pnpm }}
16
15
steps:
17
-
- uses: actions/checkout@v5
18
-
- uses: dorny/paths-filter@v3
16
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
17
+
with:
18
+
persist-credentials: false
19
+
- uses: dorny/paths-filter@668c092af3649c4b664c54e4b704aa46782f6f7c
19
20
id: filter
20
21
with:
21
22
filters: |
22
23
docs-pnpm:
23
24
- 'doc/pnpm-lock.yaml'
24
25
autofix:
25
-
runs-on: ubuntu-latest
26
+
runs-on: blacksmith-2vcpu-ubuntu-2404
26
27
needs: check-changes
28
+
env:
29
+
UV_CACHE_DIR: /tmp/.uv-cache
27
30
steps:
28
-
- uses: actions/checkout@v5
29
-
- uses: ./.github/actions/setup-nix
31
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
30
32
with:
31
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
33
+
persist-credentials: false
34
+
- uses: ./.github/actions/setup-nix
32
35
- name: Cache Cargo
33
-
uses: actions/cache@v4
36
+
uses: actions/cache@v5
34
37
with:
35
38
path: |
36
39
~/.cargo/bin/
···
38
41
~/.cargo/registry/cache/
39
42
~/.cargo/git/db/
40
43
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
44
+
- name: Cache uv
45
+
uses: actions/cache@v5
46
+
with:
47
+
path: /tmp/.uv-cache
48
+
key: uv-${{ runner.os }}-${{ hashFiles('**/uv.lock') }}
49
+
restore-keys: |
50
+
uv-${{ runner.os }}-${{ hashFiles('**/uv.lock') }}
51
+
uv-${{ runner.os }}
52
+
- name: setup sqlx
53
+
run: nix develop -L -v -c sqlx database setup --source ./crates/core/src/cache/migrations/
54
+
- name: autofix sqlx
55
+
run: nix develop -L -v -c cargo sqlx prepare --workspace
56
+
- name: clippy --fix
57
+
run: nix develop -L -v -c cargo clippy --fix
58
+
- name: pre-commit run
59
+
run: nix develop -L -v -c pre-commit run --all-files
60
+
continue-on-error: true
41
61
- name: Upgrade Hash
42
62
if: ${{ needs.check-changes.outputs.docs-pnpm == 'true' }}
43
63
run: bash ./doc/upgrade.sh
44
-
- name: clippy --fix
45
-
run: nix develop --print-build-logs -v --command cargo clippy --fix
46
-
- name: nix fmt
47
-
run: nix develop --print-build-logs -v --command pre-commit run --all-files
48
-
continue-on-error: true
64
+
- name: Minimise uv cache
65
+
run: nix develop -L -v -c uv cache prune --ci
49
66
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
-23
.github/workflows/clean-pr-env.yml
-23
.github/workflows/clean-pr-env.yml
···
1
-
name: Clean PR Environment
2
-
on:
3
-
pull_request:
4
-
types:
5
-
- closed
6
-
jobs:
7
-
cleanup-gh:
8
-
runs-on: ubuntu-latest
9
-
permissions: write-all
10
-
steps:
11
-
- uses: actions/checkout@v5
12
-
- name: get github app token
13
-
uses: navikt/github-app-token-generator@793caf0d755fb4d6e88150825f680f188535cb48
14
-
id: get-token
15
-
with:
16
-
app-id: ${{ secrets.GH_APP_CLEANER_ID }}
17
-
private-key: ${{ secrets.GH_APP_CLEANER_PRIVATE_KEY }}
18
-
- name: delete pr environment
19
-
uses: strumwolf/delete-deployment-environment@v3.0.0
20
-
with:
21
-
token: ${{ steps.get-token.outputs.token }}
22
-
environment: pr-${{ github.event.number }}
23
-
ref: ${{ github.ref_name }}
+10
-6
.github/workflows/follow-nixpkgs.yml
+10
-6
.github/workflows/follow-nixpkgs.yml
···
8
8
pre-job:
9
9
continue-on-error: true
10
10
runs-on: ubuntu-latest
11
+
permissions: {}
11
12
outputs:
12
13
number: ${{ steps.skip_check.outputs.number }}
13
14
steps:
14
-
- uses: actions/checkout@v5
15
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
16
+
with:
17
+
persist-credentials: false
15
18
- id: skip_check
16
19
run: |
17
20
echo "number=$(gh pr list --label flake-lock-update --state open --json id | jq 'length')" >> "$GITHUB_OUTPUT"
···
19
22
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
20
23
update:
21
24
runs-on: ubuntu-latest
25
+
permissions: {}
22
26
needs: pre-job
23
27
if: needs.pre-job.outputs.number == '0'
24
28
steps:
25
-
- uses: actions/checkout@v5
29
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
26
30
with:
27
31
ssh-key: ${{ secrets.FOLLOW_NIXPKGS_PRIVATE_KEY }}
28
32
ref: main
33
+
persist-credentials: false
29
34
- uses: ./.github/actions/setup-nix
30
-
with:
31
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
32
35
- run: |
33
36
git config user.name 'github-actions[bot]'
34
37
git config user.email 'github-actions[bot]@users.noreply.github.com'
···
38
41
run: echo "date=$(date +'%Y-%m-%d')" >> "$GITHUB_OUTPUT"
39
42
- name: Create Pull Request
40
43
id: cpr
41
-
uses: peter-evans/create-pull-request@v7
44
+
uses: peter-evans/create-pull-request@0979079bc20c05bbbb590a56c21c4e2b1d1f1bbe
42
45
with:
43
46
title: Update flake.lock ${{ steps.date.outputs.date }}
44
47
labels: flake-lock-update
45
48
branch: ci/flake-update
46
49
- name: Enable automerge
47
50
if: steps.cpr.outputs.pull-request-number
48
-
run: gh pr merge --squash --auto "${{ steps.cpr.outputs.pull-request-number }}"
51
+
run: gh pr merge --squash --auto "${STEPS_CPR_OUTPUTS_PULL_REQUEST_NUMBER}"
49
52
env:
50
53
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
54
+
STEPS_CPR_OUTPUTS_PULL_REQUEST_NUMBER: ${{ steps.cpr.outputs.pull-request-number }}
+1
-1
.github/workflows/labeler.yml
+1
-1
.github/workflows/labeler.yml
+13
-6
.github/workflows/pages.yml
+13
-6
.github/workflows/pages.yml
···
2
2
name: "Pages"
3
3
on:
4
4
push:
5
-
branches: [main]
5
+
branches:
6
+
- stable
7
+
- trunk
6
8
workflow_dispatch:
7
9
jobs:
8
10
pre-job:
9
11
continue-on-error: true
10
12
runs-on: ubuntu-latest
13
+
permissions: {}
11
14
outputs:
12
15
should_skip: ${{ steps.skip_check.outputs.should_skip }}
13
16
steps:
14
17
- id: skip_check
15
-
uses: fkirc/skip-duplicate-actions@v5
18
+
uses: fkirc/skip-duplicate-actions@04a1aebece824b56e6ad6a401d015479cd1c50b3
16
19
deploy:
17
20
runs-on: ubuntu-latest
21
+
permissions: {}
18
22
environment:
19
23
name: production
20
24
url: https://wire.althaea.zone/
21
25
if: github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_skip != 'true'
22
26
steps:
23
-
- uses: actions/checkout@v5
27
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
28
+
with:
29
+
persist-credentials: false
24
30
- uses: ./.github/actions/setup-nix
25
-
with:
26
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
27
31
- run: nix build .#docs
32
+
if: github.ref == 'refs/heads/stable'
33
+
- run: nix build .#docs-unstable
34
+
if: github.ref != 'refs/heads/stable'
28
35
- name: Deploy to Cloudflare Pages
29
36
id: deployment
30
-
uses: cloudflare/wrangler-action@v3
37
+
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65
31
38
with:
32
39
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
33
40
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
+17
-12
.github/workflows/pr-preview.yml
+17
-12
.github/workflows/pr-preview.yml
···
7
7
runs-on: ubuntu-latest
8
8
outputs:
9
9
number: ${{ steps.find-pr.outputs.number }}
10
+
permissions: {}
10
11
if: ${{ github.actor != 'dependabot[bot]' }}
11
12
steps:
12
-
- uses: jwalton/gh-find-current-pr@master
13
+
- uses: jwalton/gh-find-current-pr@89ee5799558265a1e0e31fab792ebb4ee91c016b
13
14
id: find-pr
14
15
with:
15
16
state: all
16
17
base-ref:
17
18
runs-on: ubuntu-latest
19
+
permissions: {}
18
20
needs: get-pr
19
21
outputs:
20
22
base-ref: ${{ steps.base-ref.outputs.base-ref }}
···
24
26
- name: Locate Base Ref
25
27
id: base-ref
26
28
run: |
27
-
echo "base-ref=$(gh api /repos/${{ github.repository }}/pulls/${{ needs.get-pr.outputs.number }} | jq -r '.base.ref')" >> "$GITHUB_OUTPUT"
29
+
echo "base-ref=$(gh api /repos/${{ github.repository }}/pulls/${NEEDS_GET_PR_OUTPUTS_NUMBER} | jq -r '.base.ref')" >> "$GITHUB_OUTPUT"
28
30
env:
29
31
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
32
+
NEEDS_GET_PR_OUTPUTS_NUMBER: ${{ needs.get-pr.outputs.number }}
30
33
eval-base:
31
34
runs-on: ubuntu-latest
35
+
permissions: {}
32
36
needs: base-ref
33
37
outputs:
34
38
drv: ${{ steps.drv.outputs.drv }}
35
39
steps:
36
-
- uses: actions/checkout@v5
40
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
37
41
with:
38
42
ref: ${{ needs.base-ref.outputs.base-ref }}
43
+
persist-credentials: false
39
44
- uses: ./.github/actions/setup-nix
40
-
with:
41
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
42
45
- id: drv
43
46
run: echo "drv=$(nix eval .#docs --json)" >> "$GITHUB_OUTPUT"
44
47
eval-head:
45
48
runs-on: ubuntu-latest
49
+
permissions: {}
46
50
needs: get-pr
47
51
outputs:
48
52
drv: ${{ steps.drv.outputs.drv }}
49
53
if: ${{ needs.get-pr.outputs.number != '' }}
50
54
steps:
51
-
- uses: actions/checkout@v5
55
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
56
+
with:
57
+
persist-credentials: false
52
58
- uses: ./.github/actions/setup-nix
53
-
with:
54
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
55
59
- id: drv
56
60
run: echo "drv=$(nix eval .#docs --json)" >> "$GITHUB_OUTPUT"
57
61
deploy:
58
62
runs-on: ubuntu-latest
63
+
permissions: {}
59
64
needs:
60
65
- eval-head
61
66
- eval-base
···
66
71
# skip if nothing changed in the docs package
67
72
if: needs.eval-head.outputs.drv != needs.eval-base.outputs.drv
68
73
steps:
69
-
- uses: actions/checkout@v5
70
-
- uses: ./.github/actions/setup-nix
74
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
71
75
with:
72
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
76
+
persist-credentials: false
77
+
- uses: ./.github/actions/setup-nix
73
78
- run: nix build .#docs
74
79
- name: Deploy to Cloudflare Pages
75
80
id: deployment
76
-
uses: cloudflare/wrangler-action@v3
81
+
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65
77
82
with:
78
83
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
79
84
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
+14
-14
.github/workflows/test.yml
+14
-14
.github/workflows/test.yml
···
7
7
pre-job:
8
8
continue-on-error: true
9
9
runs-on: ubuntu-latest
10
+
permissions: {}
10
11
outputs:
11
12
should_skip: ${{ steps.skip_check.outputs.should_skip }}
12
13
steps:
13
14
- id: skip_check
14
-
uses: fkirc/skip-duplicate-actions@v5
15
+
uses: fkirc/skip-duplicate-actions@04a1aebece824b56e6ad6a401d015479cd1c50b3
15
16
with:
16
17
concurrent_skipping: "same_content_newer"
17
18
cancel_others: "true"
18
-
nextest:
19
+
test:
19
20
runs-on: ubuntu-latest
20
21
needs: pre-job
21
22
permissions:
22
23
contents: read
23
24
if: needs.pre-job.outputs.should_skip != 'true'
24
25
steps:
25
-
- uses: actions/checkout@v5
26
-
- uses: ./.github/actions/setup-nix
27
-
with:
28
-
cachixToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
29
-
- uses: actions/cache@v4
26
+
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
30
27
with:
31
-
path: |
32
-
~/.cargo/registry
33
-
~/.cargo/git
34
-
target
35
-
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
36
-
- name: Nextest
37
-
run: nix develop --print-build-logs -v --command cargo nextest run
28
+
persist-credentials: false
29
+
- uses: ./.github/actions/setup-nix
30
+
- name: Build Tests
31
+
run: nix build .#cargo-tests -L -vv
32
+
- name: Cargo Tests
33
+
run: |
34
+
nix develop \
35
+
--print-build-logs \
36
+
-v \
37
+
--command result/bin/run-tests
+9
.gitignore
+9
.gitignore
+12
.sqlx/query-1ab95659223cbc7e012f538080c4be8b4774a0354348e6de53e6ea7aadfe8819.json
+12
.sqlx/query-1ab95659223cbc7e012f538080c4be8b4774a0354348e6de53e6ea7aadfe8819.json
···
1
+
{
2
+
"db_name": "SQLite",
3
+
"query": "delete from inspection_cache\nwhere\n blob_id in (\n select\n id\n from\n inspection_blobs\n where\n schema_version != $1\n )\n or ROWID in (\n select\n ROWID\n from\n inspection_cache\n order by\n ROWID desc\n limit\n -1\n offset\n 30\n )",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Right": 1
8
+
},
9
+
"nullable": []
10
+
},
11
+
"hash": "1ab95659223cbc7e012f538080c4be8b4774a0354348e6de53e6ea7aadfe8819"
12
+
}
+12
.sqlx/query-61b79ce83349770c0b4e474471cdee067214b88444cb68d6d3560f4be835b3a8.json
+12
.sqlx/query-61b79ce83349770c0b4e474471cdee067214b88444cb68d6d3560f4be835b3a8.json
···
1
+
{
2
+
"db_name": "SQLite",
3
+
"query": "\n insert into\n inspection_cache (store_path, hash, blob_id)\n values\n ($1, $2, $3)\n ",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Right": 3
8
+
},
9
+
"nullable": []
10
+
},
11
+
"hash": "61b79ce83349770c0b4e474471cdee067214b88444cb68d6d3560f4be835b3a8"
12
+
}
+26
.sqlx/query-810100e8d8c033a0d0b77fe8e90a212cd2c1e8718ee347fba3076bcd10f19198.json
+26
.sqlx/query-810100e8d8c033a0d0b77fe8e90a212cd2c1e8718ee347fba3076bcd10f19198.json
···
1
+
{
2
+
"db_name": "SQLite",
3
+
"query": "\n select\n inspection_blobs.json_value,\n inspection_cache.store_path\n from\n inspection_blobs\n join inspection_cache on inspection_cache.blob_id = inspection_blobs.id\n where\n inspection_cache.store_path = $1\n and inspection_cache.hash = $2\n and inspection_blobs.schema_version = $3\n limit\n 1\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"name": "json_value",
8
+
"ordinal": 0,
9
+
"type_info": "Blob"
10
+
},
11
+
{
12
+
"name": "store_path",
13
+
"ordinal": 1,
14
+
"type_info": "Text"
15
+
}
16
+
],
17
+
"parameters": {
18
+
"Right": 3
19
+
},
20
+
"nullable": [
21
+
false,
22
+
false
23
+
]
24
+
},
25
+
"hash": "810100e8d8c033a0d0b77fe8e90a212cd2c1e8718ee347fba3076bcd10f19198"
26
+
}
+12
.sqlx/query-a5459b4c5f879509d29c3b6dcf85d1da89e4bd9380b8763edc5d16cbaa302d3f.json
+12
.sqlx/query-a5459b4c5f879509d29c3b6dcf85d1da89e4bd9380b8763edc5d16cbaa302d3f.json
···
1
+
{
2
+
"db_name": "SQLite",
3
+
"query": "delete from inspection_blobs\nwhere\n not exists (\n select\n 1\n from\n inspection_cache\n where\n inspection_cache.blob_id = inspection_blobs.id\n )",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Right": 0
8
+
},
9
+
"nullable": []
10
+
},
11
+
"hash": "a5459b4c5f879509d29c3b6dcf85d1da89e4bd9380b8763edc5d16cbaa302d3f"
12
+
}
+20
.sqlx/query-cdea7b20c482f4127bacb5c58755d4fdd4dca2066b9c06950be60bc790569335.json
+20
.sqlx/query-cdea7b20c482f4127bacb5c58755d4fdd4dca2066b9c06950be60bc790569335.json
···
1
+
{
2
+
"db_name": "SQLite",
3
+
"query": "\n insert into inspection_blobs (json_value, schema_version)\n values ($1, $2)\n on conflict(json_value)\n do update set json_value = excluded.json_value\n returning inspection_blobs.id\n ",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"name": "id",
8
+
"ordinal": 0,
9
+
"type_info": "Integer"
10
+
}
11
+
],
12
+
"parameters": {
13
+
"Right": 2
14
+
},
15
+
"nullable": [
16
+
false
17
+
]
18
+
},
19
+
"hash": "cdea7b20c482f4127bacb5c58755d4fdd4dca2066b9c06950be60bc790569335"
20
+
}
+113
-11
CHANGELOG.md
+113
-11
CHANGELOG.md
···
7
7
8
8
## [Unreleased] - yyyy-mm-dd
9
9
10
+
## [v1.1.1] - 2025-01-05
11
+
12
+
### Fixed
13
+
14
+
- Fix a bug where wire was attempting to SSH to the local machine when `buildOnTarget` &
15
+
`allowLocalDeployment` where true.
16
+
17
+
## [v1.1.0] - 2025-12-31
18
+
10
19
### Added
11
20
12
-
- Added `--reboot`. Wire will wait for the node to reconnect after rebooting.
13
-
Wire will refuse to reboot localhost. Keys post-activation will be applied
21
+
- Add a `--substitute-on-destination` argument.
22
+
- Add the `meta.nodeSpecialArgs` meta option.
23
+
- Add `wire build`, a new command to build nodes offline.
24
+
It is distinct from `wire apply build`, as it will not ping
25
+
or push the result, making it useful for CI.
26
+
27
+
### Changed
28
+
29
+
- Build store paths will be output to stdout
30
+
31
+
### Fixed
32
+
33
+
- Fix invalidated caches not actually returning `None`.
34
+
35
+
## [v1.0.0] - 2025-12-17
36
+
37
+
### Added
38
+
39
+
- SIGINT signal handling.
40
+
41
+
### Changed
42
+
43
+
- Invalidate caches that reference garbage collected paths.
44
+
45
+
### Fixed
46
+
47
+
- Fix key filtering logic.
48
+
49
+
## [v1.0.0-beta.0] - 2025-12-02
50
+
51
+
### Added
52
+
53
+
- Implement `meta.nodeNixpkgs`.
54
+
- Add caching of hive evaluation for flakes.
55
+
56
+
### Changed
57
+
58
+
- Run tests against 25.11.
59
+
60
+
## [v1.0.0-alpha.1] - 2025-11-24
61
+
62
+
### Added
63
+
64
+
- Add `--handle-unreachable`. You can use `--handle-unreachable ignore` to
65
+
ignore unreachable nodes in the status of the deployment.
66
+
- Add a basic progress bar.
67
+
68
+
### Changed
69
+
70
+
- Revert "Wire will now attempt to use SSH ControlMaster by default.".
71
+
- Change the `show` subcommand to look nicer now.
72
+
- Change the `build` step to always build remotely when the node is
73
+
going to be applied locally.
74
+
75
+
## [v1.0.0-alpha.0] - 2025-10-22
76
+
77
+
### Added
78
+
79
+
- Add `--ssh-accept-host` argument.
80
+
- Add `--on -` syntax to the `--on` argument.
81
+
Passing `-` will now read additional apply targets from stdin.
82
+
- Add `{key.name}-key.{path,service}` systemd units.
83
+
- Added `--flake` argument as an alias for `--path`.
84
+
- A terminal bell will be output if a sudo / ssh prompt is ever printed.
85
+
- Added a real tutorial, and separated many how-to guides.
86
+
The tutorial leads the user through creating and deploying a wire Hive.
87
+
- Add `config.nixpkgs.flake.source` by default if `meta.nixpkgs` ends
88
+
with `-source` at priority 1000 (default).
89
+
90
+
### Fixed
91
+
92
+
- Fix bug where `--non-interactive` was inversed.
93
+
- Fix a bug where `./result` links where being created.
94
+
- Fix passing `sources.nixpkgs` directly from npins to `meta.nixpkgs`.
95
+
- Fix nodes that will be applied locally running the `push` and `cleanup`
96
+
steps.
97
+
98
+
### Changed
99
+
100
+
- Improve logging from interactive commands (absence of `--non-interactive`).
101
+
- Changed `--path` argument to support flakerefs (`github:foo/bar`,
102
+
`git+file:///...`, `https://.../main.tar.gz`, etc).
103
+
- Changed SSH arguments to use ControlMaster by default.
104
+
- Compile-out logs with level `tracing_level::TRACE` in release builds.
105
+
- Improve aata integrity of keys.
106
+
- Unknown SSH keys will be immediately rejected unless `--ssh-accept-host` is passed.
107
+
- Changed evaluation to be ran in parallel with other steps until
108
+
the .drv is required.
109
+
110
+
## [0.5.0] - 2025-09-18
111
+
112
+
### Added
113
+
114
+
- Added `--reboot`. wire will wait for the node to reconnect after rebooting.
115
+
wire will refuse to reboot localhost. Keys post-activation will be applied
14
116
after rebooting!
15
117
- Most errors now have error codes and documentation links.
16
118
- Added the global flag `--non-interactive`.
17
-
- Wire now creates its own PTY to interface with openssh's PTY to allow for
119
+
- wire now creates its own PTY to interface with openssh's PTY to allow for
18
120
interactive sudo authentication on both remote and local targets.
19
121
20
122
Using a wheel user as `deployment.target.user` is no longer necessary
···
27
129
### Changed
28
130
29
131
- `wire inspect/show --json` will no longer use a pretty print.
30
-
- Wire will now wait for the node to reconnect if activation failed (excluding
132
+
- wire will now wait for the node to reconnect if activation failed (excluding
31
133
dry-activate).
32
134
- Nix logs with the `Talkative` and `Chatty` level have been moved to
33
135
`tracing_level::TRACE`.
···
43
145
44
146
- Nodes may now fail without stopping the entire hive from continuing. A summary
45
147
of errors will be presented at the end of the apply process.
46
-
- Wire will now ping the node before it proceeds executing.
47
-
- Wire will now properly respect `deployment.target.hosts`.
48
-
- Wire will now attempt each target host in order until a valid one is found.
148
+
- wire will now ping the node before it proceeds executing.
149
+
- wire will now properly respect `deployment.target.hosts`.
150
+
- wire will now attempt each target host in order until a valid one is found.
49
151
50
152
### Changed
51
153
52
-
- Wire now directly evaluates your hive instead of shipping extra nix code along with its binary.
154
+
- wire now directly evaluates your hive instead of shipping extra nix code along with its binary.
53
155
You must now use `outputs.makeHive { ... }` instead of a raw attribute.
54
156
This can be obtained with npins or a flake input.
55
157
- The expected flake output name has changed from `outputs.colmena` to `outputs.wire`.
···
63
165
### Changed
64
166
65
167
- Dependency Updates.
66
-
- Wire now compiles and includes key agents for multiple architectures, currently only linux.
168
+
- wire now compiles and includes key agents for multiple architectures, currently only linux.
67
169
- There is a new package output, `wire-small`, for testing purposes.
68
170
It only compiles the key agent for the host that builds `wire-small`.
69
171
- `--no-progress` now defaults to true if stdin does not refer to a tty (unix pipelines, in CI).
70
-
- Added an error for the internal hive evluation parse failure.
172
+
- Added an error for the internal hive evaluation parse failure.
71
173
- The `inspect` command now has `show` as an alias.
72
174
- Remove `log` command as there are currently no plans to implement the feature
73
175
- The `completions` command is now hidden from the help page
74
176
75
177
### Fixed
76
178
77
-
- A non-existant key owner user/group would not default to gid/uid `0`.
179
+
- A non-existent key owner user/group would not default to gid/uid `0`.
78
180
- Keys can now be deployed to localhost.
79
181
80
182
## [0.2.0] - 2025-04-21
+661
COPYING
+661
COPYING
···
1
+
GNU AFFERO GENERAL PUBLIC LICENSE
2
+
Version 3, 19 November 2007
3
+
4
+
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
5
+
Everyone is permitted to copy and distribute verbatim copies
6
+
of this license document, but changing it is not allowed.
7
+
8
+
Preamble
9
+
10
+
The GNU Affero General Public License is a free, copyleft license for
11
+
software and other kinds of works, specifically designed to ensure
12
+
cooperation with the community in the case of network server software.
13
+
14
+
The licenses for most software and other practical works are designed
15
+
to take away your freedom to share and change the works. By contrast,
16
+
our General Public Licenses are intended to guarantee your freedom to
17
+
share and change all versions of a program--to make sure it remains free
18
+
software for all its users.
19
+
20
+
When we speak of free software, we are referring to freedom, not
21
+
price. Our General Public Licenses are designed to make sure that you
22
+
have the freedom to distribute copies of free software (and charge for
23
+
them if you wish), that you receive source code or can get it if you
24
+
want it, that you can change the software or use pieces of it in new
25
+
free programs, and that you know you can do these things.
26
+
27
+
Developers that use our General Public Licenses protect your rights
28
+
with two steps: (1) assert copyright on the software, and (2) offer
29
+
you this License which gives you legal permission to copy, distribute
30
+
and/or modify the software.
31
+
32
+
A secondary benefit of defending all users' freedom is that
33
+
improvements made in alternate versions of the program, if they
34
+
receive widespread use, become available for other developers to
35
+
incorporate. Many developers of free software are heartened and
36
+
encouraged by the resulting cooperation. However, in the case of
37
+
software used on network servers, this result may fail to come about.
38
+
The GNU General Public License permits making a modified version and
39
+
letting the public access it on a server without ever releasing its
40
+
source code to the public.
41
+
42
+
The GNU Affero General Public License is designed specifically to
43
+
ensure that, in such cases, the modified source code becomes available
44
+
to the community. It requires the operator of a network server to
45
+
provide the source code of the modified version running there to the
46
+
users of that server. Therefore, public use of a modified version, on
47
+
a publicly accessible server, gives the public access to the source
48
+
code of the modified version.
49
+
50
+
An older license, called the Affero General Public License and
51
+
published by Affero, was designed to accomplish similar goals. This is
52
+
a different license, not a version of the Affero GPL, but Affero has
53
+
released a new version of the Affero GPL which permits relicensing under
54
+
this license.
55
+
56
+
The precise terms and conditions for copying, distribution and
57
+
modification follow.
58
+
59
+
TERMS AND CONDITIONS
60
+
61
+
0. Definitions.
62
+
63
+
"This License" refers to version 3 of the GNU Affero General Public License.
64
+
65
+
"Copyright" also means copyright-like laws that apply to other kinds of
66
+
works, such as semiconductor masks.
67
+
68
+
"The Program" refers to any copyrightable work licensed under this
69
+
License. Each licensee is addressed as "you". "Licensees" and
70
+
"recipients" may be individuals or organizations.
71
+
72
+
To "modify" a work means to copy from or adapt all or part of the work
73
+
in a fashion requiring copyright permission, other than the making of an
74
+
exact copy. The resulting work is called a "modified version" of the
75
+
earlier work or a work "based on" the earlier work.
76
+
77
+
A "covered work" means either the unmodified Program or a work based
78
+
on the Program.
79
+
80
+
To "propagate" a work means to do anything with it that, without
81
+
permission, would make you directly or secondarily liable for
82
+
infringement under applicable copyright law, except executing it on a
83
+
computer or modifying a private copy. Propagation includes copying,
84
+
distribution (with or without modification), making available to the
85
+
public, and in some countries other activities as well.
86
+
87
+
To "convey" a work means any kind of propagation that enables other
88
+
parties to make or receive copies. Mere interaction with a user through
89
+
a computer network, with no transfer of a copy, is not conveying.
90
+
91
+
An interactive user interface displays "Appropriate Legal Notices"
92
+
to the extent that it includes a convenient and prominently visible
93
+
feature that (1) displays an appropriate copyright notice, and (2)
94
+
tells the user that there is no warranty for the work (except to the
95
+
extent that warranties are provided), that licensees may convey the
96
+
work under this License, and how to view a copy of this License. If
97
+
the interface presents a list of user commands or options, such as a
98
+
menu, a prominent item in the list meets this criterion.
99
+
100
+
1. Source Code.
101
+
102
+
The "source code" for a work means the preferred form of the work
103
+
for making modifications to it. "Object code" means any non-source
104
+
form of a work.
105
+
106
+
A "Standard Interface" means an interface that either is an official
107
+
standard defined by a recognized standards body, or, in the case of
108
+
interfaces specified for a particular programming language, one that
109
+
is widely used among developers working in that language.
110
+
111
+
The "System Libraries" of an executable work include anything, other
112
+
than the work as a whole, that (a) is included in the normal form of
113
+
packaging a Major Component, but which is not part of that Major
114
+
Component, and (b) serves only to enable use of the work with that
115
+
Major Component, or to implement a Standard Interface for which an
116
+
implementation is available to the public in source code form. A
117
+
"Major Component", in this context, means a major essential component
118
+
(kernel, window system, and so on) of the specific operating system
119
+
(if any) on which the executable work runs, or a compiler used to
120
+
produce the work, or an object code interpreter used to run it.
121
+
122
+
The "Corresponding Source" for a work in object code form means all
123
+
the source code needed to generate, install, and (for an executable
124
+
work) run the object code and to modify the work, including scripts to
125
+
control those activities. However, it does not include the work's
126
+
System Libraries, or general-purpose tools or generally available free
127
+
programs which are used unmodified in performing those activities but
128
+
which are not part of the work. For example, Corresponding Source
129
+
includes interface definition files associated with source files for
130
+
the work, and the source code for shared libraries and dynamically
131
+
linked subprograms that the work is specifically designed to require,
132
+
such as by intimate data communication or control flow between those
133
+
subprograms and other parts of the work.
134
+
135
+
The Corresponding Source need not include anything that users
136
+
can regenerate automatically from other parts of the Corresponding
137
+
Source.
138
+
139
+
The Corresponding Source for a work in source code form is that
140
+
same work.
141
+
142
+
2. Basic Permissions.
143
+
144
+
All rights granted under this License are granted for the term of
145
+
copyright on the Program, and are irrevocable provided the stated
146
+
conditions are met. This License explicitly affirms your unlimited
147
+
permission to run the unmodified Program. The output from running a
148
+
covered work is covered by this License only if the output, given its
149
+
content, constitutes a covered work. This License acknowledges your
150
+
rights of fair use or other equivalent, as provided by copyright law.
151
+
152
+
You may make, run and propagate covered works that you do not
153
+
convey, without conditions so long as your license otherwise remains
154
+
in force. You may convey covered works to others for the sole purpose
155
+
of having them make modifications exclusively for you, or provide you
156
+
with facilities for running those works, provided that you comply with
157
+
the terms of this License in conveying all material for which you do
158
+
not control copyright. Those thus making or running the covered works
159
+
for you must do so exclusively on your behalf, under your direction
160
+
and control, on terms that prohibit them from making any copies of
161
+
your copyrighted material outside their relationship with you.
162
+
163
+
Conveying under any other circumstances is permitted solely under
164
+
the conditions stated below. Sublicensing is not allowed; section 10
165
+
makes it unnecessary.
166
+
167
+
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168
+
169
+
No covered work shall be deemed part of an effective technological
170
+
measure under any applicable law fulfilling obligations under article
171
+
11 of the WIPO copyright treaty adopted on 20 December 1996, or
172
+
similar laws prohibiting or restricting circumvention of such
173
+
measures.
174
+
175
+
When you convey a covered work, you waive any legal power to forbid
176
+
circumvention of technological measures to the extent such circumvention
177
+
is effected by exercising rights under this License with respect to
178
+
the covered work, and you disclaim any intention to limit operation or
179
+
modification of the work as a means of enforcing, against the work's
180
+
users, your or third parties' legal rights to forbid circumvention of
181
+
technological measures.
182
+
183
+
4. Conveying Verbatim Copies.
184
+
185
+
You may convey verbatim copies of the Program's source code as you
186
+
receive it, in any medium, provided that you conspicuously and
187
+
appropriately publish on each copy an appropriate copyright notice;
188
+
keep intact all notices stating that this License and any
189
+
non-permissive terms added in accord with section 7 apply to the code;
190
+
keep intact all notices of the absence of any warranty; and give all
191
+
recipients a copy of this License along with the Program.
192
+
193
+
You may charge any price or no price for each copy that you convey,
194
+
and you may offer support or warranty protection for a fee.
195
+
196
+
5. Conveying Modified Source Versions.
197
+
198
+
You may convey a work based on the Program, or the modifications to
199
+
produce it from the Program, in the form of source code under the
200
+
terms of section 4, provided that you also meet all of these conditions:
201
+
202
+
a) The work must carry prominent notices stating that you modified
203
+
it, and giving a relevant date.
204
+
205
+
b) The work must carry prominent notices stating that it is
206
+
released under this License and any conditions added under section
207
+
7. This requirement modifies the requirement in section 4 to
208
+
"keep intact all notices".
209
+
210
+
c) You must license the entire work, as a whole, under this
211
+
License to anyone who comes into possession of a copy. This
212
+
License will therefore apply, along with any applicable section 7
213
+
additional terms, to the whole of the work, and all its parts,
214
+
regardless of how they are packaged. This License gives no
215
+
permission to license the work in any other way, but it does not
216
+
invalidate such permission if you have separately received it.
217
+
218
+
d) If the work has interactive user interfaces, each must display
219
+
Appropriate Legal Notices; however, if the Program has interactive
220
+
interfaces that do not display Appropriate Legal Notices, your
221
+
work need not make them do so.
222
+
223
+
A compilation of a covered work with other separate and independent
224
+
works, which are not by their nature extensions of the covered work,
225
+
and which are not combined with it such as to form a larger program,
226
+
in or on a volume of a storage or distribution medium, is called an
227
+
"aggregate" if the compilation and its resulting copyright are not
228
+
used to limit the access or legal rights of the compilation's users
229
+
beyond what the individual works permit. Inclusion of a covered work
230
+
in an aggregate does not cause this License to apply to the other
231
+
parts of the aggregate.
232
+
233
+
6. Conveying Non-Source Forms.
234
+
235
+
You may convey a covered work in object code form under the terms
236
+
of sections 4 and 5, provided that you also convey the
237
+
machine-readable Corresponding Source under the terms of this License,
238
+
in one of these ways:
239
+
240
+
a) Convey the object code in, or embodied in, a physical product
241
+
(including a physical distribution medium), accompanied by the
242
+
Corresponding Source fixed on a durable physical medium
243
+
customarily used for software interchange.
244
+
245
+
b) Convey the object code in, or embodied in, a physical product
246
+
(including a physical distribution medium), accompanied by a
247
+
written offer, valid for at least three years and valid for as
248
+
long as you offer spare parts or customer support for that product
249
+
model, to give anyone who possesses the object code either (1) a
250
+
copy of the Corresponding Source for all the software in the
251
+
product that is covered by this License, on a durable physical
252
+
medium customarily used for software interchange, for a price no
253
+
more than your reasonable cost of physically performing this
254
+
conveying of source, or (2) access to copy the
255
+
Corresponding Source from a network server at no charge.
256
+
257
+
c) Convey individual copies of the object code with a copy of the
258
+
written offer to provide the Corresponding Source. This
259
+
alternative is allowed only occasionally and noncommercially, and
260
+
only if you received the object code with such an offer, in accord
261
+
with subsection 6b.
262
+
263
+
d) Convey the object code by offering access from a designated
264
+
place (gratis or for a charge), and offer equivalent access to the
265
+
Corresponding Source in the same way through the same place at no
266
+
further charge. You need not require recipients to copy the
267
+
Corresponding Source along with the object code. If the place to
268
+
copy the object code is a network server, the Corresponding Source
269
+
may be on a different server (operated by you or a third party)
270
+
that supports equivalent copying facilities, provided you maintain
271
+
clear directions next to the object code saying where to find the
272
+
Corresponding Source. Regardless of what server hosts the
273
+
Corresponding Source, you remain obligated to ensure that it is
274
+
available for as long as needed to satisfy these requirements.
275
+
276
+
e) Convey the object code using peer-to-peer transmission, provided
277
+
you inform other peers where the object code and Corresponding
278
+
Source of the work are being offered to the general public at no
279
+
charge under subsection 6d.
280
+
281
+
A separable portion of the object code, whose source code is excluded
282
+
from the Corresponding Source as a System Library, need not be
283
+
included in conveying the object code work.
284
+
285
+
A "User Product" is either (1) a "consumer product", which means any
286
+
tangible personal property which is normally used for personal, family,
287
+
or household purposes, or (2) anything designed or sold for incorporation
288
+
into a dwelling. In determining whether a product is a consumer product,
289
+
doubtful cases shall be resolved in favor of coverage. For a particular
290
+
product received by a particular user, "normally used" refers to a
291
+
typical or common use of that class of product, regardless of the status
292
+
of the particular user or of the way in which the particular user
293
+
actually uses, or expects or is expected to use, the product. A product
294
+
is a consumer product regardless of whether the product has substantial
295
+
commercial, industrial or non-consumer uses, unless such uses represent
296
+
the only significant mode of use of the product.
297
+
298
+
"Installation Information" for a User Product means any methods,
299
+
procedures, authorization keys, or other information required to install
300
+
and execute modified versions of a covered work in that User Product from
301
+
a modified version of its Corresponding Source. The information must
302
+
suffice to ensure that the continued functioning of the modified object
303
+
code is in no case prevented or interfered with solely because
304
+
modification has been made.
305
+
306
+
If you convey an object code work under this section in, or with, or
307
+
specifically for use in, a User Product, and the conveying occurs as
308
+
part of a transaction in which the right of possession and use of the
309
+
User Product is transferred to the recipient in perpetuity or for a
310
+
fixed term (regardless of how the transaction is characterized), the
311
+
Corresponding Source conveyed under this section must be accompanied
312
+
by the Installation Information. But this requirement does not apply
313
+
if neither you nor any third party retains the ability to install
314
+
modified object code on the User Product (for example, the work has
315
+
been installed in ROM).
316
+
317
+
The requirement to provide Installation Information does not include a
318
+
requirement to continue to provide support service, warranty, or updates
319
+
for a work that has been modified or installed by the recipient, or for
320
+
the User Product in which it has been modified or installed. Access to a
321
+
network may be denied when the modification itself materially and
322
+
adversely affects the operation of the network or violates the rules and
323
+
protocols for communication across the network.
324
+
325
+
Corresponding Source conveyed, and Installation Information provided,
326
+
in accord with this section must be in a format that is publicly
327
+
documented (and with an implementation available to the public in
328
+
source code form), and must require no special password or key for
329
+
unpacking, reading or copying.
330
+
331
+
7. Additional Terms.
332
+
333
+
"Additional permissions" are terms that supplement the terms of this
334
+
License by making exceptions from one or more of its conditions.
335
+
Additional permissions that are applicable to the entire Program shall
336
+
be treated as though they were included in this License, to the extent
337
+
that they are valid under applicable law. If additional permissions
338
+
apply only to part of the Program, that part may be used separately
339
+
under those permissions, but the entire Program remains governed by
340
+
this License without regard to the additional permissions.
341
+
342
+
When you convey a copy of a covered work, you may at your option
343
+
remove any additional permissions from that copy, or from any part of
344
+
it. (Additional permissions may be written to require their own
345
+
removal in certain cases when you modify the work.) You may place
346
+
additional permissions on material, added by you to a covered work,
347
+
for which you have or can give appropriate copyright permission.
348
+
349
+
Notwithstanding any other provision of this License, for material you
350
+
add to a covered work, you may (if authorized by the copyright holders of
351
+
that material) supplement the terms of this License with terms:
352
+
353
+
a) Disclaiming warranty or limiting liability differently from the
354
+
terms of sections 15 and 16 of this License; or
355
+
356
+
b) Requiring preservation of specified reasonable legal notices or
357
+
author attributions in that material or in the Appropriate Legal
358
+
Notices displayed by works containing it; or
359
+
360
+
c) Prohibiting misrepresentation of the origin of that material, or
361
+
requiring that modified versions of such material be marked in
362
+
reasonable ways as different from the original version; or
363
+
364
+
d) Limiting the use for publicity purposes of names of licensors or
365
+
authors of the material; or
366
+
367
+
e) Declining to grant rights under trademark law for use of some
368
+
trade names, trademarks, or service marks; or
369
+
370
+
f) Requiring indemnification of licensors and authors of that
371
+
material by anyone who conveys the material (or modified versions of
372
+
it) with contractual assumptions of liability to the recipient, for
373
+
any liability that these contractual assumptions directly impose on
374
+
those licensors and authors.
375
+
376
+
All other non-permissive additional terms are considered "further
377
+
restrictions" within the meaning of section 10. If the Program as you
378
+
received it, or any part of it, contains a notice stating that it is
379
+
governed by this License along with a term that is a further
380
+
restriction, you may remove that term. If a license document contains
381
+
a further restriction but permits relicensing or conveying under this
382
+
License, you may add to a covered work material governed by the terms
383
+
of that license document, provided that the further restriction does
384
+
not survive such relicensing or conveying.
385
+
386
+
If you add terms to a covered work in accord with this section, you
387
+
must place, in the relevant source files, a statement of the
388
+
additional terms that apply to those files, or a notice indicating
389
+
where to find the applicable terms.
390
+
391
+
Additional terms, permissive or non-permissive, may be stated in the
392
+
form of a separately written license, or stated as exceptions;
393
+
the above requirements apply either way.
394
+
395
+
8. Termination.
396
+
397
+
You may not propagate or modify a covered work except as expressly
398
+
provided under this License. Any attempt otherwise to propagate or
399
+
modify it is void, and will automatically terminate your rights under
400
+
this License (including any patent licenses granted under the third
401
+
paragraph of section 11).
402
+
403
+
However, if you cease all violation of this License, then your
404
+
license from a particular copyright holder is reinstated (a)
405
+
provisionally, unless and until the copyright holder explicitly and
406
+
finally terminates your license, and (b) permanently, if the copyright
407
+
holder fails to notify you of the violation by some reasonable means
408
+
prior to 60 days after the cessation.
409
+
410
+
Moreover, your license from a particular copyright holder is
411
+
reinstated permanently if the copyright holder notifies you of the
412
+
violation by some reasonable means, this is the first time you have
413
+
received notice of violation of this License (for any work) from that
414
+
copyright holder, and you cure the violation prior to 30 days after
415
+
your receipt of the notice.
416
+
417
+
Termination of your rights under this section does not terminate the
418
+
licenses of parties who have received copies or rights from you under
419
+
this License. If your rights have been terminated and not permanently
420
+
reinstated, you do not qualify to receive new licenses for the same
421
+
material under section 10.
422
+
423
+
9. Acceptance Not Required for Having Copies.
424
+
425
+
You are not required to accept this License in order to receive or
426
+
run a copy of the Program. Ancillary propagation of a covered work
427
+
occurring solely as a consequence of using peer-to-peer transmission
428
+
to receive a copy likewise does not require acceptance. However,
429
+
nothing other than this License grants you permission to propagate or
430
+
modify any covered work. These actions infringe copyright if you do
431
+
not accept this License. Therefore, by modifying or propagating a
432
+
covered work, you indicate your acceptance of this License to do so.
433
+
434
+
10. Automatic Licensing of Downstream Recipients.
435
+
436
+
Each time you convey a covered work, the recipient automatically
437
+
receives a license from the original licensors, to run, modify and
438
+
propagate that work, subject to this License. You are not responsible
439
+
for enforcing compliance by third parties with this License.
440
+
441
+
An "entity transaction" is a transaction transferring control of an
442
+
organization, or substantially all assets of one, or subdividing an
443
+
organization, or merging organizations. If propagation of a covered
444
+
work results from an entity transaction, each party to that
445
+
transaction who receives a copy of the work also receives whatever
446
+
licenses to the work the party's predecessor in interest had or could
447
+
give under the previous paragraph, plus a right to possession of the
448
+
Corresponding Source of the work from the predecessor in interest, if
449
+
the predecessor has it or can get it with reasonable efforts.
450
+
451
+
You may not impose any further restrictions on the exercise of the
452
+
rights granted or affirmed under this License. For example, you may
453
+
not impose a license fee, royalty, or other charge for exercise of
454
+
rights granted under this License, and you may not initiate litigation
455
+
(including a cross-claim or counterclaim in a lawsuit) alleging that
456
+
any patent claim is infringed by making, using, selling, offering for
457
+
sale, or importing the Program or any portion of it.
458
+
459
+
11. Patents.
460
+
461
+
A "contributor" is a copyright holder who authorizes use under this
462
+
License of the Program or a work on which the Program is based. The
463
+
work thus licensed is called the contributor's "contributor version".
464
+
465
+
A contributor's "essential patent claims" are all patent claims
466
+
owned or controlled by the contributor, whether already acquired or
467
+
hereafter acquired, that would be infringed by some manner, permitted
468
+
by this License, of making, using, or selling its contributor version,
469
+
but do not include claims that would be infringed only as a
470
+
consequence of further modification of the contributor version. For
471
+
purposes of this definition, "control" includes the right to grant
472
+
patent sublicenses in a manner consistent with the requirements of
473
+
this License.
474
+
475
+
Each contributor grants you a non-exclusive, worldwide, royalty-free
476
+
patent license under the contributor's essential patent claims, to
477
+
make, use, sell, offer for sale, import and otherwise run, modify and
478
+
propagate the contents of its contributor version.
479
+
480
+
In the following three paragraphs, a "patent license" is any express
481
+
agreement or commitment, however denominated, not to enforce a patent
482
+
(such as an express permission to practice a patent or covenant not to
483
+
sue for patent infringement). To "grant" such a patent license to a
484
+
party means to make such an agreement or commitment not to enforce a
485
+
patent against the party.
486
+
487
+
If you convey a covered work, knowingly relying on a patent license,
488
+
and the Corresponding Source of the work is not available for anyone
489
+
to copy, free of charge and under the terms of this License, through a
490
+
publicly available network server or other readily accessible means,
491
+
then you must either (1) cause the Corresponding Source to be so
492
+
available, or (2) arrange to deprive yourself of the benefit of the
493
+
patent license for this particular work, or (3) arrange, in a manner
494
+
consistent with the requirements of this License, to extend the patent
495
+
license to downstream recipients. "Knowingly relying" means you have
496
+
actual knowledge that, but for the patent license, your conveying the
497
+
covered work in a country, or your recipient's use of the covered work
498
+
in a country, would infringe one or more identifiable patents in that
499
+
country that you have reason to believe are valid.
500
+
501
+
If, pursuant to or in connection with a single transaction or
502
+
arrangement, you convey, or propagate by procuring conveyance of, a
503
+
covered work, and grant a patent license to some of the parties
504
+
receiving the covered work authorizing them to use, propagate, modify
505
+
or convey a specific copy of the covered work, then the patent license
506
+
you grant is automatically extended to all recipients of the covered
507
+
work and works based on it.
508
+
509
+
A patent license is "discriminatory" if it does not include within
510
+
the scope of its coverage, prohibits the exercise of, or is
511
+
conditioned on the non-exercise of one or more of the rights that are
512
+
specifically granted under this License. You may not convey a covered
513
+
work if you are a party to an arrangement with a third party that is
514
+
in the business of distributing software, under which you make payment
515
+
to the third party based on the extent of your activity of conveying
516
+
the work, and under which the third party grants, to any of the
517
+
parties who would receive the covered work from you, a discriminatory
518
+
patent license (a) in connection with copies of the covered work
519
+
conveyed by you (or copies made from those copies), or (b) primarily
520
+
for and in connection with specific products or compilations that
521
+
contain the covered work, unless you entered into that arrangement,
522
+
or that patent license was granted, prior to 28 March 2007.
523
+
524
+
Nothing in this License shall be construed as excluding or limiting
525
+
any implied license or other defenses to infringement that may
526
+
otherwise be available to you under applicable patent law.
527
+
528
+
12. No Surrender of Others' Freedom.
529
+
530
+
If conditions are imposed on you (whether by court order, agreement or
531
+
otherwise) that contradict the conditions of this License, they do not
532
+
excuse you from the conditions of this License. If you cannot convey a
533
+
covered work so as to satisfy simultaneously your obligations under this
534
+
License and any other pertinent obligations, then as a consequence you may
535
+
not convey it at all. For example, if you agree to terms that obligate you
536
+
to collect a royalty for further conveying from those to whom you convey
537
+
the Program, the only way you could satisfy both those terms and this
538
+
License would be to refrain entirely from conveying the Program.
539
+
540
+
13. Remote Network Interaction; Use with the GNU General Public License.
541
+
542
+
Notwithstanding any other provision of this License, if you modify the
543
+
Program, your modified version must prominently offer all users
544
+
interacting with it remotely through a computer network (if your version
545
+
supports such interaction) an opportunity to receive the Corresponding
546
+
Source of your version by providing access to the Corresponding Source
547
+
from a network server at no charge, through some standard or customary
548
+
means of facilitating copying of software. This Corresponding Source
549
+
shall include the Corresponding Source for any work covered by version 3
550
+
of the GNU General Public License that is incorporated pursuant to the
551
+
following paragraph.
552
+
553
+
Notwithstanding any other provision of this License, you have
554
+
permission to link or combine any covered work with a work licensed
555
+
under version 3 of the GNU General Public License into a single
556
+
combined work, and to convey the resulting work. The terms of this
557
+
License will continue to apply to the part which is the covered work,
558
+
but the work with which it is combined will remain governed by version
559
+
3 of the GNU General Public License.
560
+
561
+
14. Revised Versions of this License.
562
+
563
+
The Free Software Foundation may publish revised and/or new versions of
564
+
the GNU Affero General Public License from time to time. Such new versions
565
+
will be similar in spirit to the present version, but may differ in detail to
566
+
address new problems or concerns.
567
+
568
+
Each version is given a distinguishing version number. If the
569
+
Program specifies that a certain numbered version of the GNU Affero General
570
+
Public License "or any later version" applies to it, you have the
571
+
option of following the terms and conditions either of that numbered
572
+
version or of any later version published by the Free Software
573
+
Foundation. If the Program does not specify a version number of the
574
+
GNU Affero General Public License, you may choose any version ever published
575
+
by the Free Software Foundation.
576
+
577
+
If the Program specifies that a proxy can decide which future
578
+
versions of the GNU Affero General Public License can be used, that proxy's
579
+
public statement of acceptance of a version permanently authorizes you
580
+
to choose that version for the Program.
581
+
582
+
Later license versions may give you additional or different
583
+
permissions. However, no additional obligations are imposed on any
584
+
author or copyright holder as a result of your choosing to follow a
585
+
later version.
586
+
587
+
15. Disclaimer of Warranty.
588
+
589
+
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590
+
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591
+
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592
+
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593
+
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594
+
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595
+
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596
+
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597
+
598
+
16. Limitation of Liability.
599
+
600
+
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601
+
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602
+
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603
+
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604
+
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605
+
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606
+
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607
+
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608
+
SUCH DAMAGES.
609
+
610
+
17. Interpretation of Sections 15 and 16.
611
+
612
+
If the disclaimer of warranty and limitation of liability provided
613
+
above cannot be given local legal effect according to their terms,
614
+
reviewing courts shall apply local law that most closely approximates
615
+
an absolute waiver of all civil liability in connection with the
616
+
Program, unless a warranty or assumption of liability accompanies a
617
+
copy of the Program in return for a fee.
618
+
619
+
END OF TERMS AND CONDITIONS
620
+
621
+
How to Apply These Terms to Your New Programs
622
+
623
+
If you develop a new program, and you want it to be of the greatest
624
+
possible use to the public, the best way to achieve this is to make it
625
+
free software which everyone can redistribute and change under these terms.
626
+
627
+
To do so, attach the following notices to the program. It is safest
628
+
to attach them to the start of each source file to most effectively
629
+
state the exclusion of warranty; and each file should have at least
630
+
the "copyright" line and a pointer to where the full notice is found.
631
+
632
+
<one line to give the program's name and a brief idea of what it does.>
633
+
Copyright (C) <year> <name of author>
634
+
635
+
This program is free software: you can redistribute it and/or modify
636
+
it under the terms of the GNU Affero General Public License as published
637
+
by the Free Software Foundation, either version 3 of the License, or
638
+
(at your option) any later version.
639
+
640
+
This program is distributed in the hope that it will be useful,
641
+
but WITHOUT ANY WARRANTY; without even the implied warranty of
642
+
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643
+
GNU Affero General Public License for more details.
644
+
645
+
You should have received a copy of the GNU Affero General Public License
646
+
along with this program. If not, see <https://www.gnu.org/licenses/>.
647
+
648
+
Also add information on how to contact you by electronic and paper mail.
649
+
650
+
If your software can interact with users remotely through a computer
651
+
network, you should also make sure that it provides a way for users to
652
+
get its source. For example, if your program is a web application, its
653
+
interface could display a "Source" link that leads users to an archive
654
+
of the code. There are many ways you could offer source, and different
655
+
solutions will be better for different programs; see section 13 for the
656
+
specific requirements.
657
+
658
+
You should also get your employer (if you work as a programmer) or school,
659
+
if any, to sign a "copyright disclaimer" for the program, if necessary.
660
+
For more information on this, and how to apply and follow the GNU AGPL, see
661
+
<https://www.gnu.org/licenses/>.
+1682
-324
Cargo.lock
+1682
-324
Cargo.lock
···
19
19
20
20
[[package]]
21
21
name = "aho-corasick"
22
-
version = "1.1.3"
22
+
version = "1.1.4"
23
23
source = "registry+https://github.com/rust-lang/crates.io-index"
24
-
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
24
+
checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
25
25
dependencies = [
26
26
"memchr",
27
27
]
28
+
29
+
[[package]]
30
+
name = "allocator-api2"
31
+
version = "0.2.21"
32
+
source = "registry+https://github.com/rust-lang/crates.io-index"
33
+
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
28
34
29
35
[[package]]
30
36
name = "anstream"
···
78
84
79
85
[[package]]
80
86
name = "anyhow"
81
-
version = "1.0.99"
87
+
version = "1.0.100"
82
88
source = "registry+https://github.com/rust-lang/crates.io-index"
83
-
checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100"
89
+
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
90
+
91
+
[[package]]
92
+
name = "atoi"
93
+
version = "2.0.0"
94
+
source = "registry+https://github.com/rust-lang/crates.io-index"
95
+
checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528"
96
+
dependencies = [
97
+
"num-traits",
98
+
]
84
99
85
100
[[package]]
86
101
name = "autocfg"
···
113
128
]
114
129
115
130
[[package]]
116
-
name = "beef"
117
-
version = "0.5.2"
131
+
name = "base64"
132
+
version = "0.22.1"
133
+
source = "registry+https://github.com/rust-lang/crates.io-index"
134
+
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
135
+
136
+
[[package]]
137
+
name = "base64ct"
138
+
version = "1.8.0"
118
139
source = "registry+https://github.com/rust-lang/crates.io-index"
119
-
checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1"
140
+
checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba"
120
141
121
142
[[package]]
122
143
name = "bitflags"
···
129
150
version = "2.9.1"
130
151
source = "registry+https://github.com/rust-lang/crates.io-index"
131
152
checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
153
+
dependencies = [
154
+
"serde",
155
+
]
132
156
133
157
[[package]]
134
158
name = "bitmaps"
···
140
164
]
141
165
142
166
[[package]]
143
-
name = "bumpalo"
144
-
version = "3.19.0"
167
+
name = "block-buffer"
168
+
version = "0.10.4"
169
+
source = "registry+https://github.com/rust-lang/crates.io-index"
170
+
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
171
+
dependencies = [
172
+
"generic-array",
173
+
]
174
+
175
+
[[package]]
176
+
name = "bstr"
177
+
version = "1.12.0"
145
178
source = "registry+https://github.com/rust-lang/crates.io-index"
146
-
checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
179
+
checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4"
180
+
dependencies = [
181
+
"memchr",
182
+
"regex-automata",
183
+
"serde",
184
+
]
185
+
186
+
[[package]]
187
+
name = "byteorder"
188
+
version = "1.5.0"
189
+
source = "registry+https://github.com/rust-lang/crates.io-index"
190
+
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
147
191
148
192
[[package]]
149
193
name = "bytes"
150
194
version = "1.10.1"
151
195
source = "registry+https://github.com/rust-lang/crates.io-index"
152
196
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
197
+
198
+
[[package]]
199
+
name = "cc"
200
+
version = "1.2.41"
201
+
source = "registry+https://github.com/rust-lang/crates.io-index"
202
+
checksum = "ac9fe6cdbb24b6ade63616c0a0688e45bb56732262c158df3c0c4bea4ca47cb7"
203
+
dependencies = [
204
+
"find-msvc-tools",
205
+
"jobserver",
206
+
"libc",
207
+
"shlex",
208
+
]
153
209
154
210
[[package]]
155
211
name = "cfg-if"
···
171
227
172
228
[[package]]
173
229
name = "clap"
174
-
version = "4.5.47"
230
+
version = "4.5.53"
175
231
source = "registry+https://github.com/rust-lang/crates.io-index"
176
-
checksum = "7eac00902d9d136acd712710d71823fb8ac8004ca445a89e73a41d45aa712931"
232
+
checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
177
233
dependencies = [
178
234
"clap_builder",
179
235
"clap_derive",
···
209
265
210
266
[[package]]
211
267
name = "clap_builder"
212
-
version = "4.5.47"
268
+
version = "4.5.53"
213
269
source = "registry+https://github.com/rust-lang/crates.io-index"
214
-
checksum = "2ad9bbf750e73b5884fb8a211a9424a1906c1e156724260fdae972f31d70e1d6"
270
+
checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
215
271
dependencies = [
216
272
"anstream",
217
273
"anstyle",
···
221
277
222
278
[[package]]
223
279
name = "clap_complete"
224
-
version = "4.5.58"
280
+
version = "4.5.62"
225
281
source = "registry+https://github.com/rust-lang/crates.io-index"
226
-
checksum = "75bf0b32ad2e152de789bb635ea4d3078f6b838ad7974143e99b99f45a04af4a"
282
+
checksum = "004eef6b14ce34759aa7de4aea3217e368f463f46a3ed3764ca4b5a4404003b4"
227
283
dependencies = [
228
284
"clap",
285
+
"clap_lex",
286
+
"is_executable",
287
+
"shlex",
229
288
]
230
289
231
290
[[package]]
232
291
name = "clap_derive"
233
-
version = "4.5.47"
292
+
version = "4.5.49"
234
293
source = "registry+https://github.com/rust-lang/crates.io-index"
235
-
checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c"
294
+
checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671"
236
295
dependencies = [
237
296
"heck",
238
297
"proc-macro2",
239
298
"quote",
240
-
"syn 2.0.106",
299
+
"syn 2.0.111",
241
300
]
242
301
243
302
[[package]]
···
253
312
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
254
313
255
314
[[package]]
256
-
name = "console"
257
-
version = "0.16.0"
315
+
name = "concurrent-queue"
316
+
version = "2.5.0"
317
+
source = "registry+https://github.com/rust-lang/crates.io-index"
318
+
checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973"
319
+
dependencies = [
320
+
"crossbeam-utils",
321
+
]
322
+
323
+
[[package]]
324
+
name = "const-oid"
325
+
version = "0.9.6"
326
+
source = "registry+https://github.com/rust-lang/crates.io-index"
327
+
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
328
+
329
+
[[package]]
330
+
name = "convert_case"
331
+
version = "0.10.0"
332
+
source = "registry+https://github.com/rust-lang/crates.io-index"
333
+
checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9"
334
+
dependencies = [
335
+
"unicode-segmentation",
336
+
]
337
+
338
+
[[package]]
339
+
name = "cpufeatures"
340
+
version = "0.2.17"
258
341
source = "registry+https://github.com/rust-lang/crates.io-index"
259
-
checksum = "2e09ced7ebbccb63b4c65413d821f2e00ce54c5ca4514ddc6b3c892fdbcbc69d"
342
+
checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
260
343
dependencies = [
261
-
"encode_unicode",
262
344
"libc",
263
-
"once_cell",
264
-
"unicode-width 0.2.1",
265
-
"windows-sys 0.60.2",
345
+
]
346
+
347
+
[[package]]
348
+
name = "crc"
349
+
version = "3.3.0"
350
+
source = "registry+https://github.com/rust-lang/crates.io-index"
351
+
checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675"
352
+
dependencies = [
353
+
"crc-catalog",
354
+
]
355
+
356
+
[[package]]
357
+
name = "crc-catalog"
358
+
version = "2.4.0"
359
+
source = "registry+https://github.com/rust-lang/crates.io-index"
360
+
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
361
+
362
+
[[package]]
363
+
name = "crossbeam-queue"
364
+
version = "0.3.12"
365
+
source = "registry+https://github.com/rust-lang/crates.io-index"
366
+
checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115"
367
+
dependencies = [
368
+
"crossbeam-utils",
369
+
]
370
+
371
+
[[package]]
372
+
name = "crossbeam-utils"
373
+
version = "0.8.21"
374
+
source = "registry+https://github.com/rust-lang/crates.io-index"
375
+
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
376
+
377
+
[[package]]
378
+
name = "crypto-common"
379
+
version = "0.1.6"
380
+
source = "registry+https://github.com/rust-lang/crates.io-index"
381
+
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
382
+
dependencies = [
383
+
"generic-array",
384
+
"typenum",
385
+
]
386
+
387
+
[[package]]
388
+
name = "curve25519-dalek"
389
+
version = "4.1.3"
390
+
source = "registry+https://github.com/rust-lang/crates.io-index"
391
+
checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be"
392
+
dependencies = [
393
+
"cfg-if",
394
+
"cpufeatures",
395
+
"curve25519-dalek-derive",
396
+
"digest",
397
+
"fiat-crypto",
398
+
"rustc_version",
399
+
"subtle",
400
+
"zeroize",
401
+
]
402
+
403
+
[[package]]
404
+
name = "curve25519-dalek-derive"
405
+
version = "0.1.1"
406
+
source = "registry+https://github.com/rust-lang/crates.io-index"
407
+
checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3"
408
+
dependencies = [
409
+
"proc-macro2",
410
+
"quote",
411
+
"syn 2.0.111",
412
+
]
413
+
414
+
[[package]]
415
+
name = "darling"
416
+
version = "0.21.3"
417
+
source = "registry+https://github.com/rust-lang/crates.io-index"
418
+
checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0"
419
+
dependencies = [
420
+
"darling_core",
421
+
"darling_macro",
422
+
]
423
+
424
+
[[package]]
425
+
name = "darling_core"
426
+
version = "0.21.3"
427
+
source = "registry+https://github.com/rust-lang/crates.io-index"
428
+
checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4"
429
+
dependencies = [
430
+
"fnv",
431
+
"ident_case",
432
+
"proc-macro2",
433
+
"quote",
434
+
"strsim",
435
+
"syn 2.0.111",
436
+
]
437
+
438
+
[[package]]
439
+
name = "darling_macro"
440
+
version = "0.21.3"
441
+
source = "registry+https://github.com/rust-lang/crates.io-index"
442
+
checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81"
443
+
dependencies = [
444
+
"darling_core",
445
+
"quote",
446
+
"syn 2.0.111",
447
+
]
448
+
449
+
[[package]]
450
+
name = "data-encoding"
451
+
version = "2.9.0"
452
+
source = "registry+https://github.com/rust-lang/crates.io-index"
453
+
checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476"
454
+
455
+
[[package]]
456
+
name = "der"
457
+
version = "0.7.10"
458
+
source = "registry+https://github.com/rust-lang/crates.io-index"
459
+
checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
460
+
dependencies = [
461
+
"const-oid",
462
+
"pem-rfc7468",
463
+
"zeroize",
266
464
]
267
465
268
466
[[package]]
269
467
name = "derive_more"
270
-
version = "2.0.1"
468
+
version = "2.1.0"
271
469
source = "registry+https://github.com/rust-lang/crates.io-index"
272
-
checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678"
470
+
checksum = "10b768e943bed7bf2cab53df09f4bc34bfd217cdb57d971e769874c9a6710618"
273
471
dependencies = [
274
472
"derive_more-impl",
275
473
]
276
474
277
475
[[package]]
278
476
name = "derive_more-impl"
279
-
version = "2.0.1"
477
+
version = "2.1.0"
280
478
source = "registry+https://github.com/rust-lang/crates.io-index"
281
-
checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
479
+
checksum = "6d286bfdaf75e988b4a78e013ecd79c581e06399ab53fbacd2d916c2f904f30b"
282
480
dependencies = [
481
+
"convert_case",
283
482
"proc-macro2",
284
483
"quote",
285
-
"syn 2.0.106",
484
+
"rustc_version",
485
+
"syn 2.0.111",
286
486
"unicode-xid",
287
487
]
288
488
···
303
503
]
304
504
305
505
[[package]]
506
+
name = "digest"
507
+
version = "0.10.7"
508
+
source = "registry+https://github.com/rust-lang/crates.io-index"
509
+
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
510
+
dependencies = [
511
+
"block-buffer",
512
+
"const-oid",
513
+
"crypto-common",
514
+
"subtle",
515
+
]
516
+
517
+
[[package]]
518
+
name = "displaydoc"
519
+
version = "0.2.5"
520
+
source = "registry+https://github.com/rust-lang/crates.io-index"
521
+
checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
522
+
dependencies = [
523
+
"proc-macro2",
524
+
"quote",
525
+
"syn 2.0.111",
526
+
]
527
+
528
+
[[package]]
529
+
name = "dotenvy"
530
+
version = "0.15.7"
531
+
source = "registry+https://github.com/rust-lang/crates.io-index"
532
+
checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
533
+
534
+
[[package]]
306
535
name = "downcast-rs"
307
536
version = "1.2.1"
308
537
source = "registry+https://github.com/rust-lang/crates.io-index"
309
538
checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2"
310
539
311
540
[[package]]
312
-
name = "either"
313
-
version = "1.15.0"
541
+
name = "ed25519"
542
+
version = "2.2.3"
314
543
source = "registry+https://github.com/rust-lang/crates.io-index"
315
-
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
544
+
checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53"
545
+
dependencies = [
546
+
"pkcs8",
547
+
"signature",
548
+
]
316
549
317
550
[[package]]
318
-
name = "encode_unicode"
319
-
version = "1.0.0"
551
+
name = "ed25519-dalek"
552
+
version = "2.2.0"
320
553
source = "registry+https://github.com/rust-lang/crates.io-index"
321
-
checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
554
+
checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9"
555
+
dependencies = [
556
+
"curve25519-dalek",
557
+
"ed25519",
558
+
"serde",
559
+
"sha2",
560
+
"subtle",
561
+
"zeroize",
562
+
]
563
+
564
+
[[package]]
565
+
name = "either"
566
+
version = "1.15.0"
567
+
source = "registry+https://github.com/rust-lang/crates.io-index"
568
+
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
569
+
dependencies = [
570
+
"serde",
571
+
]
322
572
323
573
[[package]]
324
574
name = "enum-display-derive"
···
340
590
"once_cell",
341
591
"proc-macro2",
342
592
"quote",
343
-
"syn 2.0.106",
593
+
"syn 2.0.111",
344
594
]
345
595
346
596
[[package]]
···
356
606
checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
357
607
dependencies = [
358
608
"libc",
359
-
"windows-sys 0.60.2",
609
+
"windows-sys 0.52.0",
610
+
]
611
+
612
+
[[package]]
613
+
name = "etcetera"
614
+
version = "0.8.0"
615
+
source = "registry+https://github.com/rust-lang/crates.io-index"
616
+
checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943"
617
+
dependencies = [
618
+
"cfg-if",
619
+
"home",
620
+
"windows-sys 0.48.0",
621
+
]
622
+
623
+
[[package]]
624
+
name = "event-listener"
625
+
version = "5.4.1"
626
+
source = "registry+https://github.com/rust-lang/crates.io-index"
627
+
checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab"
628
+
dependencies = [
629
+
"concurrent-queue",
630
+
"parking",
631
+
"pin-project-lite",
360
632
]
361
633
362
634
[[package]]
···
366
638
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
367
639
368
640
[[package]]
641
+
name = "fiat-crypto"
642
+
version = "0.2.9"
643
+
source = "registry+https://github.com/rust-lang/crates.io-index"
644
+
checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d"
645
+
646
+
[[package]]
369
647
name = "filedescriptor"
370
648
version = "0.8.3"
371
649
source = "registry+https://github.com/rust-lang/crates.io-index"
···
375
653
"thiserror 1.0.69",
376
654
"winapi",
377
655
]
656
+
657
+
[[package]]
658
+
name = "find-msvc-tools"
659
+
version = "0.1.4"
660
+
source = "registry+https://github.com/rust-lang/crates.io-index"
661
+
checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127"
378
662
379
663
[[package]]
380
664
name = "fixedbitset"
···
383
667
checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
384
668
385
669
[[package]]
670
+
name = "flume"
671
+
version = "0.11.1"
672
+
source = "registry+https://github.com/rust-lang/crates.io-index"
673
+
checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095"
674
+
dependencies = [
675
+
"futures-core",
676
+
"futures-sink",
677
+
"spin",
678
+
]
679
+
680
+
[[package]]
386
681
name = "fnv"
387
682
version = "1.0.7"
388
683
source = "registry+https://github.com/rust-lang/crates.io-index"
389
684
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
390
685
391
686
[[package]]
687
+
name = "foldhash"
688
+
version = "0.1.5"
689
+
source = "registry+https://github.com/rust-lang/crates.io-index"
690
+
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
691
+
692
+
[[package]]
693
+
name = "form_urlencoded"
694
+
version = "1.2.2"
695
+
source = "registry+https://github.com/rust-lang/crates.io-index"
696
+
checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf"
697
+
dependencies = [
698
+
"percent-encoding",
699
+
]
700
+
701
+
[[package]]
392
702
name = "fuchsia-cprng"
393
703
version = "0.1.1"
394
704
source = "registry+https://github.com/rust-lang/crates.io-index"
···
437
747
]
438
748
439
749
[[package]]
750
+
name = "futures-intrusive"
751
+
version = "0.5.0"
752
+
source = "registry+https://github.com/rust-lang/crates.io-index"
753
+
checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f"
754
+
dependencies = [
755
+
"futures-core",
756
+
"lock_api",
757
+
"parking_lot",
758
+
]
759
+
760
+
[[package]]
440
761
name = "futures-io"
441
762
version = "0.3.31"
442
763
source = "registry+https://github.com/rust-lang/crates.io-index"
···
450
771
dependencies = [
451
772
"proc-macro2",
452
773
"quote",
453
-
"syn 2.0.106",
774
+
"syn 2.0.111",
454
775
]
455
776
456
777
[[package]]
···
484
805
]
485
806
486
807
[[package]]
808
+
name = "generic-array"
809
+
version = "0.14.9"
810
+
source = "registry+https://github.com/rust-lang/crates.io-index"
811
+
checksum = "4bb6743198531e02858aeaea5398fcc883e71851fcbcb5a2f773e2fb6cb1edf2"
812
+
dependencies = [
813
+
"typenum",
814
+
"version_check",
815
+
]
816
+
817
+
[[package]]
487
818
name = "gethostname"
488
-
version = "1.0.2"
819
+
version = "1.1.0"
489
820
source = "registry+https://github.com/rust-lang/crates.io-index"
490
-
checksum = "fc257fdb4038301ce4b9cd1b3b51704509692bb3ff716a410cbd07925d9dae55"
821
+
checksum = "1bd49230192a3797a9a4d6abe9b3eed6f7fa4c8a8a4947977c6f80025f92cbd8"
491
822
dependencies = [
492
823
"rustix",
493
-
"windows-targets 0.52.6",
824
+
"windows-link 0.2.1",
825
+
]
826
+
827
+
[[package]]
828
+
name = "getrandom"
829
+
version = "0.2.16"
830
+
source = "registry+https://github.com/rust-lang/crates.io-index"
831
+
checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
832
+
dependencies = [
833
+
"cfg-if",
834
+
"libc",
835
+
"wasi 0.11.1+wasi-snapshot-preview1",
494
836
]
495
837
496
838
[[package]]
···
512
854
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
513
855
514
856
[[package]]
857
+
name = "gjson"
858
+
version = "0.8.1"
859
+
source = "registry+https://github.com/rust-lang/crates.io-index"
860
+
checksum = "43503cc176394dd30a6525f5f36e838339b8b5619be33ed9a7783841580a97b6"
861
+
862
+
[[package]]
515
863
name = "hashbrown"
516
864
version = "0.15.5"
517
865
source = "registry+https://github.com/rust-lang/crates.io-index"
518
866
checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
867
+
dependencies = [
868
+
"allocator-api2",
869
+
"equivalent",
870
+
"foldhash",
871
+
]
872
+
873
+
[[package]]
874
+
name = "hashlink"
875
+
version = "0.10.0"
876
+
source = "registry+https://github.com/rust-lang/crates.io-index"
877
+
checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
878
+
dependencies = [
879
+
"hashbrown",
880
+
]
519
881
520
882
[[package]]
521
883
name = "heck"
···
524
886
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
525
887
526
888
[[package]]
889
+
name = "hermit-abi"
890
+
version = "0.5.2"
891
+
source = "registry+https://github.com/rust-lang/crates.io-index"
892
+
checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c"
893
+
894
+
[[package]]
895
+
name = "hex"
896
+
version = "0.4.3"
897
+
source = "registry+https://github.com/rust-lang/crates.io-index"
898
+
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
899
+
900
+
[[package]]
901
+
name = "hkdf"
902
+
version = "0.12.4"
903
+
source = "registry+https://github.com/rust-lang/crates.io-index"
904
+
checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7"
905
+
dependencies = [
906
+
"hmac",
907
+
]
908
+
909
+
[[package]]
910
+
name = "hmac"
911
+
version = "0.12.1"
912
+
source = "registry+https://github.com/rust-lang/crates.io-index"
913
+
checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
914
+
dependencies = [
915
+
"digest",
916
+
]
917
+
918
+
[[package]]
919
+
name = "home"
920
+
version = "0.5.12"
921
+
source = "registry+https://github.com/rust-lang/crates.io-index"
922
+
checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d"
923
+
dependencies = [
924
+
"windows-sys 0.61.2",
925
+
]
926
+
927
+
[[package]]
928
+
name = "icu_collections"
929
+
version = "2.0.0"
930
+
source = "registry+https://github.com/rust-lang/crates.io-index"
931
+
checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47"
932
+
dependencies = [
933
+
"displaydoc",
934
+
"potential_utf",
935
+
"yoke",
936
+
"zerofrom",
937
+
"zerovec",
938
+
]
939
+
940
+
[[package]]
941
+
name = "icu_locale_core"
942
+
version = "2.0.0"
943
+
source = "registry+https://github.com/rust-lang/crates.io-index"
944
+
checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a"
945
+
dependencies = [
946
+
"displaydoc",
947
+
"litemap",
948
+
"tinystr",
949
+
"writeable",
950
+
"zerovec",
951
+
]
952
+
953
+
[[package]]
954
+
name = "icu_normalizer"
955
+
version = "2.0.0"
956
+
source = "registry+https://github.com/rust-lang/crates.io-index"
957
+
checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979"
958
+
dependencies = [
959
+
"displaydoc",
960
+
"icu_collections",
961
+
"icu_normalizer_data",
962
+
"icu_properties",
963
+
"icu_provider",
964
+
"smallvec",
965
+
"zerovec",
966
+
]
967
+
968
+
[[package]]
969
+
name = "icu_normalizer_data"
970
+
version = "2.0.0"
971
+
source = "registry+https://github.com/rust-lang/crates.io-index"
972
+
checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3"
973
+
974
+
[[package]]
975
+
name = "icu_properties"
976
+
version = "2.0.1"
977
+
source = "registry+https://github.com/rust-lang/crates.io-index"
978
+
checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b"
979
+
dependencies = [
980
+
"displaydoc",
981
+
"icu_collections",
982
+
"icu_locale_core",
983
+
"icu_properties_data",
984
+
"icu_provider",
985
+
"potential_utf",
986
+
"zerotrie",
987
+
"zerovec",
988
+
]
989
+
990
+
[[package]]
991
+
name = "icu_properties_data"
992
+
version = "2.0.1"
993
+
source = "registry+https://github.com/rust-lang/crates.io-index"
994
+
checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632"
995
+
996
+
[[package]]
997
+
name = "icu_provider"
998
+
version = "2.0.0"
999
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1000
+
checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af"
1001
+
dependencies = [
1002
+
"displaydoc",
1003
+
"icu_locale_core",
1004
+
"stable_deref_trait",
1005
+
"tinystr",
1006
+
"writeable",
1007
+
"yoke",
1008
+
"zerofrom",
1009
+
"zerotrie",
1010
+
"zerovec",
1011
+
]
1012
+
1013
+
[[package]]
1014
+
name = "ident_case"
1015
+
version = "1.0.1"
1016
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1017
+
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
1018
+
1019
+
[[package]]
1020
+
name = "idna"
1021
+
version = "1.1.0"
1022
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1023
+
checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de"
1024
+
dependencies = [
1025
+
"idna_adapter",
1026
+
"smallvec",
1027
+
"utf8_iter",
1028
+
]
1029
+
1030
+
[[package]]
1031
+
name = "idna_adapter"
1032
+
version = "1.2.1"
1033
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1034
+
checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
1035
+
dependencies = [
1036
+
"icu_normalizer",
1037
+
"icu_properties",
1038
+
]
1039
+
1040
+
[[package]]
527
1041
name = "im"
528
1042
version = "15.1.0"
529
1043
source = "registry+https://github.com/rust-lang/crates.io-index"
···
549
1063
]
550
1064
551
1065
[[package]]
552
-
name = "indicatif"
553
-
version = "0.18.0"
1066
+
name = "is-terminal"
1067
+
version = "0.4.16"
554
1068
source = "registry+https://github.com/rust-lang/crates.io-index"
555
-
checksum = "70a646d946d06bedbbc4cac4c218acf4bbf2d87757a784857025f4d447e4e1cd"
1069
+
checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
556
1070
dependencies = [
557
-
"console",
558
-
"portable-atomic",
559
-
"unicode-width 0.2.1",
560
-
"unit-prefix",
561
-
"web-time",
562
-
]
563
-
564
-
[[package]]
565
-
name = "io-uring"
566
-
version = "0.7.9"
567
-
source = "registry+https://github.com/rust-lang/crates.io-index"
568
-
checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4"
569
-
dependencies = [
570
-
"bitflags 2.9.1",
571
-
"cfg-if",
1071
+
"hermit-abi",
572
1072
"libc",
1073
+
"windows-sys 0.59.0",
573
1074
]
574
1075
575
1076
[[package]]
···
579
1080
checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45"
580
1081
581
1082
[[package]]
1083
+
name = "is_executable"
1084
+
version = "1.0.5"
1085
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1086
+
checksum = "baabb8b4867b26294d818bf3f651a454b6901431711abb96e296245888d6e8c4"
1087
+
dependencies = [
1088
+
"windows-sys 0.60.2",
1089
+
]
1090
+
1091
+
[[package]]
582
1092
name = "is_terminal_polyfill"
583
1093
version = "1.70.1"
584
1094
source = "registry+https://github.com/rust-lang/crates.io-index"
···
600
1110
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
601
1111
602
1112
[[package]]
603
-
name = "js-sys"
604
-
version = "0.3.77"
1113
+
name = "jobserver"
1114
+
version = "0.1.34"
605
1115
source = "registry+https://github.com/rust-lang/crates.io-index"
606
-
checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f"
1116
+
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
607
1117
dependencies = [
608
-
"once_cell",
609
-
"wasm-bindgen",
1118
+
"getrandom 0.3.3",
1119
+
"libc",
610
1120
]
611
1121
612
1122
[[package]]
613
-
name = "key_agent"
614
-
version = "0.5.0"
1123
+
name = "lazy_static"
1124
+
version = "1.5.0"
1125
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1126
+
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
615
1127
dependencies = [
616
-
"anyhow",
617
-
"nix 0.30.1",
618
-
"prost",
619
-
"prost-build",
620
-
"tokio",
1128
+
"spin",
621
1129
]
622
1130
623
1131
[[package]]
624
-
name = "lazy_static"
625
-
version = "1.5.0"
1132
+
name = "libc"
1133
+
version = "0.2.175"
1134
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1135
+
checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543"
1136
+
1137
+
[[package]]
1138
+
name = "libm"
1139
+
version = "0.2.15"
626
1140
source = "registry+https://github.com/rust-lang/crates.io-index"
627
-
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
1141
+
checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de"
1142
+
1143
+
[[package]]
1144
+
name = "libmimalloc-sys"
1145
+
version = "0.1.44"
1146
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1147
+
checksum = "667f4fec20f29dfc6bc7357c582d91796c169ad7e2fce709468aefeb2c099870"
1148
+
dependencies = [
1149
+
"cc",
1150
+
"libc",
1151
+
]
628
1152
629
1153
[[package]]
630
-
name = "lib"
631
-
version = "0.5.0"
1154
+
name = "libredox"
1155
+
version = "0.1.10"
1156
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1157
+
checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb"
632
1158
dependencies = [
633
-
"anyhow",
634
-
"derive_more",
635
-
"enum_dispatch",
636
-
"futures",
637
-
"gethostname",
638
-
"im",
639
-
"itertools",
640
-
"key_agent",
641
-
"miette",
642
-
"nix 0.30.1",
643
-
"portable-pty",
644
-
"proc-macro2",
645
-
"prost",
646
-
"rand 0.9.2",
647
-
"regex",
648
-
"serde",
649
-
"serde-query",
650
-
"serde_json",
651
-
"serde_repr",
652
-
"syn 2.0.106",
653
-
"tempdir",
654
-
"thiserror 2.0.16",
655
-
"tokio",
656
-
"tokio-util",
657
-
"tracing",
658
-
"tracing-subscriber",
659
-
"visibility",
1159
+
"bitflags 2.9.1",
1160
+
"libc",
1161
+
"redox_syscall",
660
1162
]
661
1163
662
1164
[[package]]
663
-
name = "libc"
664
-
version = "0.2.175"
1165
+
name = "libsqlite3-sys"
1166
+
version = "0.30.1"
665
1167
source = "registry+https://github.com/rust-lang/crates.io-index"
666
-
checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543"
1168
+
checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149"
1169
+
dependencies = [
1170
+
"cc",
1171
+
"pkg-config",
1172
+
"vcpkg",
1173
+
]
667
1174
668
1175
[[package]]
669
1176
name = "linux-raw-sys"
670
1177
version = "0.9.4"
671
1178
source = "registry+https://github.com/rust-lang/crates.io-index"
672
1179
checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12"
1180
+
1181
+
[[package]]
1182
+
name = "litemap"
1183
+
version = "0.8.0"
1184
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1185
+
checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
673
1186
674
1187
[[package]]
675
1188
name = "lock_api"
···
688
1201
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
689
1202
690
1203
[[package]]
691
-
name = "logos"
692
-
version = "0.12.1"
1204
+
name = "md-5"
1205
+
version = "0.10.6"
693
1206
source = "registry+https://github.com/rust-lang/crates.io-index"
694
-
checksum = "bf8b031682c67a8e3d5446840f9573eb7fe26efe7ec8d195c9ac4c0647c502f1"
1207
+
checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf"
695
1208
dependencies = [
696
-
"logos-derive",
697
-
]
698
-
699
-
[[package]]
700
-
name = "logos-derive"
701
-
version = "0.12.1"
702
-
source = "registry+https://github.com/rust-lang/crates.io-index"
703
-
checksum = "a1d849148dbaf9661a6151d1ca82b13bb4c4c128146a88d05253b38d4e2f496c"
704
-
dependencies = [
705
-
"beef",
706
-
"fnv",
707
-
"proc-macro2",
708
-
"quote",
709
-
"regex-syntax 0.6.29",
710
-
"syn 1.0.109",
1209
+
"cfg-if",
1210
+
"digest",
711
1211
]
712
1212
713
1213
[[package]]
···
727
1227
"cfg-if",
728
1228
"miette-derive",
729
1229
"owo-colors",
730
-
"supports-color",
1230
+
"supports-color 3.0.2",
731
1231
"supports-hyperlinks",
732
1232
"supports-unicode",
733
1233
"terminal_size",
···
743
1243
dependencies = [
744
1244
"proc-macro2",
745
1245
"quote",
746
-
"syn 2.0.106",
1246
+
"syn 2.0.111",
1247
+
]
1248
+
1249
+
[[package]]
1250
+
name = "mimalloc"
1251
+
version = "0.1.48"
1252
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1253
+
checksum = "e1ee66a4b64c74f4ef288bcbb9192ad9c3feaad75193129ac8509af543894fd8"
1254
+
dependencies = [
1255
+
"libmimalloc-sys",
747
1256
]
748
1257
749
1258
[[package]]
···
803
1312
]
804
1313
805
1314
[[package]]
1315
+
name = "nix-compat"
1316
+
version = "0.1.0"
1317
+
source = "git+https://git.snix.dev/snix/snix.git#4aaef4cdf6f7766eedcfe1b5bad8f1c4e4d05c12"
1318
+
dependencies = [
1319
+
"bitflags 2.9.1",
1320
+
"bstr",
1321
+
"bytes",
1322
+
"data-encoding",
1323
+
"ed25519",
1324
+
"ed25519-dalek",
1325
+
"futures",
1326
+
"mimalloc",
1327
+
"nix-compat-derive",
1328
+
"nom",
1329
+
"num_enum",
1330
+
"pin-project-lite",
1331
+
"serde",
1332
+
"serde_json",
1333
+
"serde_with",
1334
+
"sha2",
1335
+
"thiserror 2.0.17",
1336
+
"tokio",
1337
+
"tracing",
1338
+
"url",
1339
+
]
1340
+
1341
+
[[package]]
1342
+
name = "nix-compat-derive"
1343
+
version = "0.1.0"
1344
+
source = "git+https://git.snix.dev/snix/snix.git#4aaef4cdf6f7766eedcfe1b5bad8f1c4e4d05c12"
1345
+
dependencies = [
1346
+
"proc-macro2",
1347
+
"quote",
1348
+
"syn 2.0.111",
1349
+
]
1350
+
1351
+
[[package]]
1352
+
name = "nom"
1353
+
version = "8.0.0"
1354
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1355
+
checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405"
1356
+
dependencies = [
1357
+
"memchr",
1358
+
]
1359
+
1360
+
[[package]]
806
1361
name = "nu-ansi-term"
807
1362
version = "0.50.1"
808
1363
source = "registry+https://github.com/rust-lang/crates.io-index"
···
812
1367
]
813
1368
814
1369
[[package]]
1370
+
name = "num-bigint-dig"
1371
+
version = "0.8.6"
1372
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1373
+
checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7"
1374
+
dependencies = [
1375
+
"lazy_static",
1376
+
"libm",
1377
+
"num-integer",
1378
+
"num-iter",
1379
+
"num-traits",
1380
+
"rand 0.8.5",
1381
+
"smallvec",
1382
+
"zeroize",
1383
+
]
1384
+
1385
+
[[package]]
1386
+
name = "num-integer"
1387
+
version = "0.1.46"
1388
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1389
+
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
1390
+
dependencies = [
1391
+
"num-traits",
1392
+
]
1393
+
1394
+
[[package]]
1395
+
name = "num-iter"
1396
+
version = "0.1.45"
1397
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1398
+
checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf"
1399
+
dependencies = [
1400
+
"autocfg",
1401
+
"num-integer",
1402
+
"num-traits",
1403
+
]
1404
+
1405
+
[[package]]
815
1406
name = "num-traits"
816
1407
version = "0.2.19"
817
1408
source = "registry+https://github.com/rust-lang/crates.io-index"
818
1409
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
819
1410
dependencies = [
820
1411
"autocfg",
1412
+
"libm",
821
1413
]
822
1414
823
1415
[[package]]
1416
+
name = "num_enum"
1417
+
version = "0.7.5"
1418
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1419
+
checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c"
1420
+
dependencies = [
1421
+
"num_enum_derive",
1422
+
"rustversion",
1423
+
]
1424
+
1425
+
[[package]]
1426
+
name = "num_enum_derive"
1427
+
version = "0.7.5"
1428
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1429
+
checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7"
1430
+
dependencies = [
1431
+
"proc-macro-crate",
1432
+
"proc-macro2",
1433
+
"quote",
1434
+
"syn 2.0.111",
1435
+
]
1436
+
1437
+
[[package]]
1438
+
name = "numtoa"
1439
+
version = "0.2.4"
1440
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1441
+
checksum = "6aa2c4e539b869820a2b82e1aef6ff40aa85e65decdd5185e83fb4b1249cd00f"
1442
+
1443
+
[[package]]
824
1444
name = "object"
825
1445
version = "0.36.7"
826
1446
source = "registry+https://github.com/rust-lang/crates.io-index"
···
843
1463
844
1464
[[package]]
845
1465
name = "owo-colors"
846
-
version = "4.2.2"
1466
+
version = "4.2.3"
847
1467
source = "registry+https://github.com/rust-lang/crates.io-index"
848
-
checksum = "48dd4f4a2c8405440fd0462561f0e5806bd0f77e86f51c761481bdd4018b545e"
1468
+
checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52"
1469
+
dependencies = [
1470
+
"supports-color 2.1.0",
1471
+
"supports-color 3.0.2",
1472
+
]
1473
+
1474
+
[[package]]
1475
+
name = "parking"
1476
+
version = "2.2.1"
1477
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1478
+
checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
849
1479
850
1480
[[package]]
851
1481
name = "parking_lot"
···
871
1501
]
872
1502
873
1503
[[package]]
1504
+
name = "pem-rfc7468"
1505
+
version = "0.7.0"
1506
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1507
+
checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412"
1508
+
dependencies = [
1509
+
"base64ct",
1510
+
]
1511
+
1512
+
[[package]]
1513
+
name = "percent-encoding"
1514
+
version = "2.3.2"
1515
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1516
+
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
1517
+
1518
+
[[package]]
874
1519
name = "petgraph"
875
1520
version = "0.7.1"
876
1521
source = "registry+https://github.com/rust-lang/crates.io-index"
···
893
1538
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
894
1539
895
1540
[[package]]
896
-
name = "portable-atomic"
897
-
version = "1.11.1"
1541
+
name = "pkcs1"
1542
+
version = "0.7.5"
1543
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1544
+
checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f"
1545
+
dependencies = [
1546
+
"der",
1547
+
"pkcs8",
1548
+
"spki",
1549
+
]
1550
+
1551
+
[[package]]
1552
+
name = "pkcs8"
1553
+
version = "0.10.2"
1554
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1555
+
checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
1556
+
dependencies = [
1557
+
"der",
1558
+
"spki",
1559
+
]
1560
+
1561
+
[[package]]
1562
+
name = "pkg-config"
1563
+
version = "0.3.32"
898
1564
source = "registry+https://github.com/rust-lang/crates.io-index"
899
-
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
1565
+
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
900
1566
901
1567
[[package]]
902
1568
name = "portable-pty"
···
917
1583
"shell-words",
918
1584
"winapi",
919
1585
"winreg",
1586
+
]
1587
+
1588
+
[[package]]
1589
+
name = "potential_utf"
1590
+
version = "0.1.3"
1591
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1592
+
checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a"
1593
+
dependencies = [
1594
+
"zerovec",
920
1595
]
921
1596
922
1597
[[package]]
···
935
1610
checksum = "ff24dfcda44452b9816fff4cd4227e1bb73ff5a2f1bc1105aa92fb8565ce44d2"
936
1611
dependencies = [
937
1612
"proc-macro2",
938
-
"syn 2.0.106",
1613
+
"syn 2.0.111",
939
1614
]
940
1615
941
1616
[[package]]
942
-
name = "proc-macro-error"
943
-
version = "1.0.4"
1617
+
name = "proc-macro-crate"
1618
+
version = "3.4.0"
944
1619
source = "registry+https://github.com/rust-lang/crates.io-index"
945
-
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
1620
+
checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983"
946
1621
dependencies = [
947
-
"proc-macro-error-attr",
948
-
"proc-macro2",
949
-
"quote",
950
-
"syn 1.0.109",
951
-
"version_check",
952
-
]
953
-
954
-
[[package]]
955
-
name = "proc-macro-error-attr"
956
-
version = "1.0.4"
957
-
source = "registry+https://github.com/rust-lang/crates.io-index"
958
-
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
959
-
dependencies = [
960
-
"proc-macro2",
961
-
"quote",
962
-
"version_check",
1622
+
"toml_edit",
963
1623
]
964
1624
965
1625
[[package]]
966
1626
name = "proc-macro2"
967
-
version = "1.0.101"
1627
+
version = "1.0.103"
968
1628
source = "registry+https://github.com/rust-lang/crates.io-index"
969
-
checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de"
1629
+
checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8"
970
1630
dependencies = [
971
1631
"unicode-ident",
972
1632
]
···
997
1657
"prost",
998
1658
"prost-types",
999
1659
"regex",
1000
-
"syn 2.0.106",
1660
+
"syn 2.0.111",
1001
1661
"tempfile",
1002
1662
]
1003
1663
···
1011
1671
"itertools",
1012
1672
"proc-macro2",
1013
1673
"quote",
1014
-
"syn 2.0.106",
1674
+
"syn 2.0.111",
1015
1675
]
1016
1676
1017
1677
[[package]]
···
1053
1713
1054
1714
[[package]]
1055
1715
name = "rand"
1716
+
version = "0.8.5"
1717
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1718
+
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
1719
+
dependencies = [
1720
+
"libc",
1721
+
"rand_chacha 0.3.1",
1722
+
"rand_core 0.6.4",
1723
+
]
1724
+
1725
+
[[package]]
1726
+
name = "rand"
1056
1727
version = "0.9.2"
1057
1728
source = "registry+https://github.com/rust-lang/crates.io-index"
1058
1729
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
1059
1730
dependencies = [
1060
-
"rand_chacha",
1731
+
"rand_chacha 0.9.0",
1061
1732
"rand_core 0.9.3",
1062
1733
]
1063
1734
1064
1735
[[package]]
1065
1736
name = "rand_chacha"
1737
+
version = "0.3.1"
1738
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1739
+
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
1740
+
dependencies = [
1741
+
"ppv-lite86",
1742
+
"rand_core 0.6.4",
1743
+
]
1744
+
1745
+
[[package]]
1746
+
name = "rand_chacha"
1066
1747
version = "0.9.0"
1067
1748
source = "registry+https://github.com/rust-lang/crates.io-index"
1068
1749
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
···
1091
1772
version = "0.6.4"
1092
1773
source = "registry+https://github.com/rust-lang/crates.io-index"
1093
1774
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
1775
+
dependencies = [
1776
+
"getrandom 0.2.16",
1777
+
]
1094
1778
1095
1779
[[package]]
1096
1780
name = "rand_core"
···
1098
1782
source = "registry+https://github.com/rust-lang/crates.io-index"
1099
1783
checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
1100
1784
dependencies = [
1101
-
"getrandom",
1785
+
"getrandom 0.3.3",
1102
1786
]
1103
1787
1104
1788
[[package]]
···
1130
1814
1131
1815
[[package]]
1132
1816
name = "regex"
1133
-
version = "1.11.2"
1817
+
version = "1.12.2"
1134
1818
source = "registry+https://github.com/rust-lang/crates.io-index"
1135
-
checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912"
1819
+
checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4"
1136
1820
dependencies = [
1137
1821
"aho-corasick",
1138
1822
"memchr",
1139
1823
"regex-automata",
1140
-
"regex-syntax 0.8.5",
1824
+
"regex-syntax",
1141
1825
]
1142
1826
1143
1827
[[package]]
1144
1828
name = "regex-automata"
1145
-
version = "0.4.9"
1829
+
version = "0.4.13"
1146
1830
source = "registry+https://github.com/rust-lang/crates.io-index"
1147
-
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
1831
+
checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c"
1148
1832
dependencies = [
1149
1833
"aho-corasick",
1150
1834
"memchr",
1151
-
"regex-syntax 0.8.5",
1835
+
"regex-syntax",
1152
1836
]
1153
1837
1154
1838
[[package]]
1155
1839
name = "regex-syntax"
1156
-
version = "0.6.29"
1157
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1158
-
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
1159
-
1160
-
[[package]]
1161
-
name = "regex-syntax"
1162
1840
version = "0.8.5"
1163
1841
source = "registry+https://github.com/rust-lang/crates.io-index"
1164
1842
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
···
1173
1851
]
1174
1852
1175
1853
[[package]]
1854
+
name = "rsa"
1855
+
version = "0.9.9"
1856
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1857
+
checksum = "40a0376c50d0358279d9d643e4bf7b7be212f1f4ff1da9070a7b54d22ef75c88"
1858
+
dependencies = [
1859
+
"const-oid",
1860
+
"digest",
1861
+
"num-bigint-dig",
1862
+
"num-integer",
1863
+
"num-traits",
1864
+
"pkcs1",
1865
+
"pkcs8",
1866
+
"rand_core 0.6.4",
1867
+
"signature",
1868
+
"spki",
1869
+
"subtle",
1870
+
"zeroize",
1871
+
]
1872
+
1873
+
[[package]]
1176
1874
name = "rustc-demangle"
1177
1875
version = "0.1.26"
1178
1876
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1185
1883
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
1186
1884
1187
1885
[[package]]
1886
+
name = "rustc_version"
1887
+
version = "0.4.1"
1888
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1889
+
checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92"
1890
+
dependencies = [
1891
+
"semver",
1892
+
]
1893
+
1894
+
[[package]]
1188
1895
name = "rustix"
1189
1896
version = "1.0.8"
1190
1897
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1194
1901
"errno",
1195
1902
"libc",
1196
1903
"linux-raw-sys",
1197
-
"windows-sys 0.60.2",
1904
+
"windows-sys 0.52.0",
1198
1905
]
1199
1906
1200
1907
[[package]]
1908
+
name = "rustversion"
1909
+
version = "1.0.22"
1910
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1911
+
checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
1912
+
1913
+
[[package]]
1201
1914
name = "ryu"
1202
1915
version = "1.0.20"
1203
1916
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1210
1923
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
1211
1924
1212
1925
[[package]]
1926
+
name = "semver"
1927
+
version = "1.0.27"
1928
+
source = "registry+https://github.com/rust-lang/crates.io-index"
1929
+
checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
1930
+
1931
+
[[package]]
1213
1932
name = "serde"
1214
-
version = "1.0.225"
1933
+
version = "1.0.228"
1215
1934
source = "registry+https://github.com/rust-lang/crates.io-index"
1216
-
checksum = "fd6c24dee235d0da097043389623fb913daddf92c76e9f5a1db88607a0bcbd1d"
1935
+
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
1217
1936
dependencies = [
1218
1937
"serde_core",
1219
1938
"serde_derive",
1220
1939
]
1221
1940
1222
1941
[[package]]
1223
-
name = "serde-query"
1224
-
version = "0.2.0"
1942
+
name = "serde_core"
1943
+
version = "1.0.228"
1225
1944
source = "registry+https://github.com/rust-lang/crates.io-index"
1226
-
checksum = "eccf6e0453b6f1981f159a1da3e4c16427447921f282eff3bbe40cec28aeaf5f"
1945
+
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
1227
1946
dependencies = [
1228
-
"serde",
1229
-
"serde-query-derive",
1947
+
"serde_derive",
1230
1948
]
1231
1949
1232
1950
[[package]]
1233
-
name = "serde-query-core"
1234
-
version = "0.2.0"
1951
+
name = "serde_derive"
1952
+
version = "1.0.228"
1235
1953
source = "registry+https://github.com/rust-lang/crates.io-index"
1236
-
checksum = "350922b83e64ef1ac841b6c47a95d6cc1677735e5cad058eac0fb32e80796122"
1954
+
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
1237
1955
dependencies = [
1238
-
"logos",
1239
-
"proc-macro-error",
1240
1956
"proc-macro2",
1241
1957
"quote",
1242
-
"syn 1.0.109",
1958
+
"syn 2.0.111",
1243
1959
]
1244
1960
1245
1961
[[package]]
1246
-
name = "serde-query-derive"
1247
-
version = "0.2.0"
1962
+
name = "serde_json"
1963
+
version = "1.0.145"
1248
1964
source = "registry+https://github.com/rust-lang/crates.io-index"
1249
-
checksum = "c56cc536c2da20c38f9c134d5a313e2b996f63fcc0540d25d3d3daeb1d04bb8f"
1965
+
checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
1250
1966
dependencies = [
1251
-
"proc-macro-error",
1252
-
"quote",
1253
-
"serde-query-core",
1254
-
"syn 1.0.109",
1967
+
"itoa",
1968
+
"memchr",
1969
+
"ryu",
1970
+
"serde",
1971
+
"serde_core",
1255
1972
]
1256
1973
1257
1974
[[package]]
1258
-
name = "serde_core"
1259
-
version = "1.0.225"
1975
+
name = "serde_urlencoded"
1976
+
version = "0.7.1"
1260
1977
source = "registry+https://github.com/rust-lang/crates.io-index"
1261
-
checksum = "659356f9a0cb1e529b24c01e43ad2bdf520ec4ceaf83047b83ddcc2251f96383"
1978
+
checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
1262
1979
dependencies = [
1263
-
"serde_derive",
1980
+
"form_urlencoded",
1981
+
"itoa",
1982
+
"ryu",
1983
+
"serde",
1264
1984
]
1265
1985
1266
1986
[[package]]
1267
-
name = "serde_derive"
1268
-
version = "1.0.225"
1987
+
name = "serde_with"
1988
+
version = "3.15.0"
1269
1989
source = "registry+https://github.com/rust-lang/crates.io-index"
1270
-
checksum = "0ea936adf78b1f766949a4977b91d2f5595825bd6ec079aa9543ad2685fc4516"
1271
-
dependencies = [
1272
-
"proc-macro2",
1273
-
"quote",
1274
-
"syn 2.0.106",
1275
-
]
1276
-
1277
-
[[package]]
1278
-
name = "serde_json"
1279
-
version = "1.0.145"
1280
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1281
-
checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
1990
+
checksum = "6093cd8c01b25262b84927e0f7151692158fab02d961e04c979d3903eba7ecc5"
1282
1991
dependencies = [
1283
-
"itoa",
1284
-
"memchr",
1285
-
"ryu",
1286
-
"serde",
1287
1992
"serde_core",
1993
+
"serde_with_macros",
1288
1994
]
1289
1995
1290
1996
[[package]]
1291
-
name = "serde_repr"
1292
-
version = "0.1.20"
1997
+
name = "serde_with_macros"
1998
+
version = "3.15.0"
1293
1999
source = "registry+https://github.com/rust-lang/crates.io-index"
1294
-
checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
2000
+
checksum = "a7e6c180db0816026a61afa1cff5344fb7ebded7e4d3062772179f2501481c27"
1295
2001
dependencies = [
2002
+
"darling",
1296
2003
"proc-macro2",
1297
2004
"quote",
1298
-
"syn 2.0.106",
2005
+
"syn 2.0.111",
1299
2006
]
1300
2007
1301
2008
[[package]]
···
1310
2017
]
1311
2018
1312
2019
[[package]]
2020
+
name = "sha1"
2021
+
version = "0.10.6"
2022
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2023
+
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
2024
+
dependencies = [
2025
+
"cfg-if",
2026
+
"cpufeatures",
2027
+
"digest",
2028
+
]
2029
+
2030
+
[[package]]
2031
+
name = "sha2"
2032
+
version = "0.10.9"
2033
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2034
+
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
2035
+
dependencies = [
2036
+
"cfg-if",
2037
+
"cpufeatures",
2038
+
"digest",
2039
+
]
2040
+
2041
+
[[package]]
1313
2042
name = "sharded-slab"
1314
2043
version = "0.1.7"
1315
2044
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1335
2064
checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
1336
2065
1337
2066
[[package]]
2067
+
name = "shlex"
2068
+
version = "1.3.0"
2069
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2070
+
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
2071
+
2072
+
[[package]]
2073
+
name = "signal-hook"
2074
+
version = "0.3.18"
2075
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2076
+
checksum = "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2"
2077
+
dependencies = [
2078
+
"libc",
2079
+
"signal-hook-registry",
2080
+
]
2081
+
2082
+
[[package]]
1338
2083
name = "signal-hook-registry"
1339
2084
version = "1.4.6"
1340
2085
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1344
2089
]
1345
2090
1346
2091
[[package]]
2092
+
name = "signal-hook-tokio"
2093
+
version = "0.3.1"
2094
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2095
+
checksum = "213241f76fb1e37e27de3b6aa1b068a2c333233b59cca6634f634b80a27ecf1e"
2096
+
dependencies = [
2097
+
"futures-core",
2098
+
"libc",
2099
+
"signal-hook",
2100
+
"tokio",
2101
+
]
2102
+
2103
+
[[package]]
2104
+
name = "signature"
2105
+
version = "2.2.0"
2106
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2107
+
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
2108
+
dependencies = [
2109
+
"digest",
2110
+
"rand_core 0.6.4",
2111
+
]
2112
+
2113
+
[[package]]
1347
2114
name = "sized-chunks"
1348
2115
version = "0.6.5"
1349
2116
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1364
2131
version = "1.15.1"
1365
2132
source = "registry+https://github.com/rust-lang/crates.io-index"
1366
2133
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
2134
+
dependencies = [
2135
+
"serde",
2136
+
]
1367
2137
1368
2138
[[package]]
1369
2139
name = "socket2"
···
1376
2146
]
1377
2147
1378
2148
[[package]]
2149
+
name = "spin"
2150
+
version = "0.9.8"
2151
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2152
+
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
2153
+
dependencies = [
2154
+
"lock_api",
2155
+
]
2156
+
2157
+
[[package]]
2158
+
name = "spki"
2159
+
version = "0.7.3"
2160
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2161
+
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
2162
+
dependencies = [
2163
+
"base64ct",
2164
+
"der",
2165
+
]
2166
+
2167
+
[[package]]
2168
+
name = "sqlx"
2169
+
version = "0.8.6"
2170
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2171
+
checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc"
2172
+
dependencies = [
2173
+
"sqlx-core",
2174
+
"sqlx-macros",
2175
+
"sqlx-mysql",
2176
+
"sqlx-postgres",
2177
+
"sqlx-sqlite",
2178
+
]
2179
+
2180
+
[[package]]
2181
+
name = "sqlx-core"
2182
+
version = "0.8.6"
2183
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2184
+
checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6"
2185
+
dependencies = [
2186
+
"base64",
2187
+
"bytes",
2188
+
"crc",
2189
+
"crossbeam-queue",
2190
+
"either",
2191
+
"event-listener",
2192
+
"futures-core",
2193
+
"futures-intrusive",
2194
+
"futures-io",
2195
+
"futures-util",
2196
+
"hashbrown",
2197
+
"hashlink",
2198
+
"indexmap",
2199
+
"log",
2200
+
"memchr",
2201
+
"once_cell",
2202
+
"percent-encoding",
2203
+
"serde",
2204
+
"serde_json",
2205
+
"sha2",
2206
+
"smallvec",
2207
+
"thiserror 2.0.17",
2208
+
"tokio",
2209
+
"tokio-stream",
2210
+
"tracing",
2211
+
"url",
2212
+
]
2213
+
2214
+
[[package]]
2215
+
name = "sqlx-macros"
2216
+
version = "0.8.6"
2217
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2218
+
checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d"
2219
+
dependencies = [
2220
+
"proc-macro2",
2221
+
"quote",
2222
+
"sqlx-core",
2223
+
"sqlx-macros-core",
2224
+
"syn 2.0.111",
2225
+
]
2226
+
2227
+
[[package]]
2228
+
name = "sqlx-macros-core"
2229
+
version = "0.8.6"
2230
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2231
+
checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b"
2232
+
dependencies = [
2233
+
"dotenvy",
2234
+
"either",
2235
+
"heck",
2236
+
"hex",
2237
+
"once_cell",
2238
+
"proc-macro2",
2239
+
"quote",
2240
+
"serde",
2241
+
"serde_json",
2242
+
"sha2",
2243
+
"sqlx-core",
2244
+
"sqlx-mysql",
2245
+
"sqlx-postgres",
2246
+
"sqlx-sqlite",
2247
+
"syn 2.0.111",
2248
+
"tokio",
2249
+
"url",
2250
+
]
2251
+
2252
+
[[package]]
2253
+
name = "sqlx-mysql"
2254
+
version = "0.8.6"
2255
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2256
+
checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526"
2257
+
dependencies = [
2258
+
"atoi",
2259
+
"base64",
2260
+
"bitflags 2.9.1",
2261
+
"byteorder",
2262
+
"bytes",
2263
+
"crc",
2264
+
"digest",
2265
+
"dotenvy",
2266
+
"either",
2267
+
"futures-channel",
2268
+
"futures-core",
2269
+
"futures-io",
2270
+
"futures-util",
2271
+
"generic-array",
2272
+
"hex",
2273
+
"hkdf",
2274
+
"hmac",
2275
+
"itoa",
2276
+
"log",
2277
+
"md-5",
2278
+
"memchr",
2279
+
"once_cell",
2280
+
"percent-encoding",
2281
+
"rand 0.8.5",
2282
+
"rsa",
2283
+
"serde",
2284
+
"sha1",
2285
+
"sha2",
2286
+
"smallvec",
2287
+
"sqlx-core",
2288
+
"stringprep",
2289
+
"thiserror 2.0.17",
2290
+
"tracing",
2291
+
"whoami",
2292
+
]
2293
+
2294
+
[[package]]
2295
+
name = "sqlx-postgres"
2296
+
version = "0.8.6"
2297
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2298
+
checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46"
2299
+
dependencies = [
2300
+
"atoi",
2301
+
"base64",
2302
+
"bitflags 2.9.1",
2303
+
"byteorder",
2304
+
"crc",
2305
+
"dotenvy",
2306
+
"etcetera",
2307
+
"futures-channel",
2308
+
"futures-core",
2309
+
"futures-util",
2310
+
"hex",
2311
+
"hkdf",
2312
+
"hmac",
2313
+
"home",
2314
+
"itoa",
2315
+
"log",
2316
+
"md-5",
2317
+
"memchr",
2318
+
"once_cell",
2319
+
"rand 0.8.5",
2320
+
"serde",
2321
+
"serde_json",
2322
+
"sha2",
2323
+
"smallvec",
2324
+
"sqlx-core",
2325
+
"stringprep",
2326
+
"thiserror 2.0.17",
2327
+
"tracing",
2328
+
"whoami",
2329
+
]
2330
+
2331
+
[[package]]
2332
+
name = "sqlx-sqlite"
2333
+
version = "0.8.6"
2334
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2335
+
checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea"
2336
+
dependencies = [
2337
+
"atoi",
2338
+
"flume",
2339
+
"futures-channel",
2340
+
"futures-core",
2341
+
"futures-executor",
2342
+
"futures-intrusive",
2343
+
"futures-util",
2344
+
"libsqlite3-sys",
2345
+
"log",
2346
+
"percent-encoding",
2347
+
"serde",
2348
+
"serde_urlencoded",
2349
+
"sqlx-core",
2350
+
"thiserror 2.0.17",
2351
+
"tracing",
2352
+
"url",
2353
+
]
2354
+
2355
+
[[package]]
2356
+
name = "stable_deref_trait"
2357
+
version = "1.2.1"
2358
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2359
+
checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596"
2360
+
2361
+
[[package]]
2362
+
name = "stringprep"
2363
+
version = "0.1.5"
2364
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2365
+
checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1"
2366
+
dependencies = [
2367
+
"unicode-bidi",
2368
+
"unicode-normalization",
2369
+
"unicode-properties",
2370
+
]
2371
+
2372
+
[[package]]
2373
+
name = "strip-ansi-escapes"
2374
+
version = "0.2.1"
2375
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2376
+
checksum = "2a8f8038e7e7969abb3f1b7c2a811225e9296da208539e0f79c5251d6cac0025"
2377
+
dependencies = [
2378
+
"vte",
2379
+
]
2380
+
2381
+
[[package]]
1379
2382
name = "strsim"
1380
2383
version = "0.11.1"
1381
2384
source = "registry+https://github.com/rust-lang/crates.io-index"
1382
2385
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
1383
2386
1384
2387
[[package]]
2388
+
name = "subtle"
2389
+
version = "2.6.1"
2390
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2391
+
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
2392
+
2393
+
[[package]]
2394
+
name = "supports-color"
2395
+
version = "2.1.0"
2396
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2397
+
checksum = "d6398cde53adc3c4557306a96ce67b302968513830a77a95b2b17305d9719a89"
2398
+
dependencies = [
2399
+
"is-terminal",
2400
+
"is_ci",
2401
+
]
2402
+
2403
+
[[package]]
1385
2404
name = "supports-color"
1386
2405
version = "3.0.2"
1387
2406
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1415
2434
1416
2435
[[package]]
1417
2436
name = "syn"
1418
-
version = "2.0.106"
2437
+
version = "2.0.111"
1419
2438
source = "registry+https://github.com/rust-lang/crates.io-index"
1420
-
checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6"
2439
+
checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"
1421
2440
dependencies = [
1422
2441
"proc-macro2",
1423
2442
"quote",
···
1425
2444
]
1426
2445
1427
2446
[[package]]
2447
+
name = "synstructure"
2448
+
version = "0.13.2"
2449
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2450
+
checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
2451
+
dependencies = [
2452
+
"proc-macro2",
2453
+
"quote",
2454
+
"syn 2.0.111",
2455
+
]
2456
+
2457
+
[[package]]
1428
2458
name = "tempdir"
1429
2459
version = "0.3.7"
1430
2460
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1441
2471
checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1"
1442
2472
dependencies = [
1443
2473
"fastrand",
1444
-
"getrandom",
2474
+
"getrandom 0.3.3",
1445
2475
"once_cell",
1446
2476
"rustix",
1447
2477
"windows-sys 0.59.0",
···
1458
2488
]
1459
2489
1460
2490
[[package]]
2491
+
name = "termion"
2492
+
version = "4.0.6"
2493
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2494
+
checksum = "f44138a9ae08f0f502f24104d82517ef4da7330c35acd638f1f29d3cd5475ecb"
2495
+
dependencies = [
2496
+
"libc",
2497
+
"numtoa",
2498
+
]
2499
+
2500
+
[[package]]
1461
2501
name = "textwrap"
1462
2502
version = "0.16.2"
1463
2503
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1478
2518
1479
2519
[[package]]
1480
2520
name = "thiserror"
1481
-
version = "2.0.16"
2521
+
version = "2.0.17"
1482
2522
source = "registry+https://github.com/rust-lang/crates.io-index"
1483
-
checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0"
2523
+
checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8"
1484
2524
dependencies = [
1485
-
"thiserror-impl 2.0.16",
2525
+
"thiserror-impl 2.0.17",
1486
2526
]
1487
2527
1488
2528
[[package]]
···
1493
2533
dependencies = [
1494
2534
"proc-macro2",
1495
2535
"quote",
1496
-
"syn 2.0.106",
2536
+
"syn 2.0.111",
1497
2537
]
1498
2538
1499
2539
[[package]]
1500
2540
name = "thiserror-impl"
1501
-
version = "2.0.16"
2541
+
version = "2.0.17"
1502
2542
source = "registry+https://github.com/rust-lang/crates.io-index"
1503
-
checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960"
2543
+
checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
1504
2544
dependencies = [
1505
2545
"proc-macro2",
1506
2546
"quote",
1507
-
"syn 2.0.106",
2547
+
"syn 2.0.111",
1508
2548
]
1509
2549
1510
2550
[[package]]
···
1523
2563
]
1524
2564
1525
2565
[[package]]
2566
+
name = "tinystr"
2567
+
version = "0.8.1"
2568
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2569
+
checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b"
2570
+
dependencies = [
2571
+
"displaydoc",
2572
+
"zerovec",
2573
+
]
2574
+
2575
+
[[package]]
2576
+
name = "tinyvec"
2577
+
version = "1.10.0"
2578
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2579
+
checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa"
2580
+
dependencies = [
2581
+
"tinyvec_macros",
2582
+
]
2583
+
2584
+
[[package]]
2585
+
name = "tinyvec_macros"
2586
+
version = "0.1.1"
2587
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2588
+
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
2589
+
2590
+
[[package]]
1526
2591
name = "tokio"
1527
-
version = "1.47.1"
2592
+
version = "1.48.0"
1528
2593
source = "registry+https://github.com/rust-lang/crates.io-index"
1529
-
checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038"
2594
+
checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408"
1530
2595
dependencies = [
1531
-
"backtrace",
1532
2596
"bytes",
1533
-
"io-uring",
1534
2597
"libc",
1535
2598
"mio",
1536
2599
"parking_lot",
1537
2600
"pin-project-lite",
1538
2601
"signal-hook-registry",
1539
-
"slab",
1540
2602
"socket2",
1541
2603
"tokio-macros",
1542
-
"windows-sys 0.59.0",
2604
+
"windows-sys 0.61.2",
1543
2605
]
1544
2606
1545
2607
[[package]]
1546
2608
name = "tokio-macros"
1547
-
version = "2.5.0"
2609
+
version = "2.6.0"
1548
2610
source = "registry+https://github.com/rust-lang/crates.io-index"
1549
-
checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
2611
+
checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5"
1550
2612
dependencies = [
1551
2613
"proc-macro2",
1552
2614
"quote",
1553
-
"syn 2.0.106",
2615
+
"syn 2.0.111",
2616
+
]
2617
+
2618
+
[[package]]
2619
+
name = "tokio-stream"
2620
+
version = "0.1.17"
2621
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2622
+
checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047"
2623
+
dependencies = [
2624
+
"futures-core",
2625
+
"pin-project-lite",
2626
+
"tokio",
1554
2627
]
1555
2628
1556
2629
[[package]]
1557
2630
name = "tokio-util"
1558
-
version = "0.7.16"
2631
+
version = "0.7.17"
1559
2632
source = "registry+https://github.com/rust-lang/crates.io-index"
1560
-
checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5"
2633
+
checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594"
1561
2634
dependencies = [
1562
2635
"bytes",
1563
2636
"futures-core",
···
1567
2640
]
1568
2641
1569
2642
[[package]]
2643
+
name = "toml_datetime"
2644
+
version = "0.7.3"
2645
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2646
+
checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533"
2647
+
dependencies = [
2648
+
"serde_core",
2649
+
]
2650
+
2651
+
[[package]]
2652
+
name = "toml_edit"
2653
+
version = "0.23.5"
2654
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2655
+
checksum = "c2ad0b7ae9cfeef5605163839cb9221f453399f15cfb5c10be9885fcf56611f9"
2656
+
dependencies = [
2657
+
"indexmap",
2658
+
"toml_datetime",
2659
+
"toml_parser",
2660
+
"winnow",
2661
+
]
2662
+
2663
+
[[package]]
2664
+
name = "toml_parser"
2665
+
version = "1.0.4"
2666
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2667
+
checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e"
2668
+
dependencies = [
2669
+
"winnow",
2670
+
]
2671
+
2672
+
[[package]]
1570
2673
name = "tracing"
1571
-
version = "0.1.41"
2674
+
version = "0.1.44"
1572
2675
source = "registry+https://github.com/rust-lang/crates.io-index"
1573
-
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
2676
+
checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
1574
2677
dependencies = [
2678
+
"log",
1575
2679
"pin-project-lite",
1576
2680
"tracing-attributes",
1577
2681
"tracing-core",
···
1579
2683
1580
2684
[[package]]
1581
2685
name = "tracing-attributes"
1582
-
version = "0.1.30"
2686
+
version = "0.1.31"
1583
2687
source = "registry+https://github.com/rust-lang/crates.io-index"
1584
-
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
2688
+
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
1585
2689
dependencies = [
1586
2690
"proc-macro2",
1587
2691
"quote",
1588
-
"syn 2.0.106",
2692
+
"syn 2.0.111",
1589
2693
]
1590
2694
1591
2695
[[package]]
1592
2696
name = "tracing-core"
1593
-
version = "0.1.34"
2697
+
version = "0.1.36"
1594
2698
source = "registry+https://github.com/rust-lang/crates.io-index"
1595
-
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
2699
+
checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
1596
2700
dependencies = [
1597
2701
"once_cell",
1598
2702
"valuable",
···
1611
2715
1612
2716
[[package]]
1613
2717
name = "tracing-subscriber"
1614
-
version = "0.3.20"
2718
+
version = "0.3.22"
1615
2719
source = "registry+https://github.com/rust-lang/crates.io-index"
1616
-
checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
2720
+
checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
1617
2721
dependencies = [
1618
2722
"nu-ansi-term",
1619
2723
"sharded-slab",
···
1630
2734
checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f"
1631
2735
1632
2736
[[package]]
2737
+
name = "unicode-bidi"
2738
+
version = "0.3.18"
2739
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2740
+
checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5"
2741
+
2742
+
[[package]]
1633
2743
name = "unicode-ident"
1634
2744
version = "1.0.18"
1635
2745
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1642
2752
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
1643
2753
1644
2754
[[package]]
2755
+
name = "unicode-normalization"
2756
+
version = "0.1.25"
2757
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2758
+
checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8"
2759
+
dependencies = [
2760
+
"tinyvec",
2761
+
]
2762
+
2763
+
[[package]]
2764
+
name = "unicode-properties"
2765
+
version = "0.1.4"
2766
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2767
+
checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d"
2768
+
2769
+
[[package]]
2770
+
name = "unicode-segmentation"
2771
+
version = "1.12.0"
2772
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2773
+
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
2774
+
2775
+
[[package]]
1645
2776
name = "unicode-width"
1646
2777
version = "0.1.14"
1647
2778
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1660
2791
checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
1661
2792
1662
2793
[[package]]
1663
-
name = "unit-prefix"
1664
-
version = "0.5.1"
2794
+
name = "url"
2795
+
version = "2.5.7"
2796
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2797
+
checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b"
2798
+
dependencies = [
2799
+
"form_urlencoded",
2800
+
"idna",
2801
+
"percent-encoding",
2802
+
"serde",
2803
+
]
2804
+
2805
+
[[package]]
2806
+
name = "utf8_iter"
2807
+
version = "1.0.4"
1665
2808
source = "registry+https://github.com/rust-lang/crates.io-index"
1666
-
checksum = "323402cff2dd658f39ca17c789b502021b3f18707c91cdf22e3838e1b4023817"
2809
+
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
1667
2810
1668
2811
[[package]]
1669
2812
name = "utf8parse"
···
1678
2821
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
1679
2822
1680
2823
[[package]]
2824
+
name = "vcpkg"
2825
+
version = "0.2.15"
2826
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2827
+
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
2828
+
2829
+
[[package]]
1681
2830
name = "version_check"
1682
2831
version = "0.9.5"
1683
2832
source = "registry+https://github.com/rust-lang/crates.io-index"
1684
2833
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
1685
2834
1686
2835
[[package]]
1687
-
name = "visibility"
1688
-
version = "0.1.1"
2836
+
name = "vte"
2837
+
version = "0.14.1"
1689
2838
source = "registry+https://github.com/rust-lang/crates.io-index"
1690
-
checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91"
2839
+
checksum = "231fdcd7ef3037e8330d8e17e61011a2c244126acc0a982f4040ac3f9f0bc077"
1691
2840
dependencies = [
1692
-
"proc-macro2",
1693
-
"quote",
1694
-
"syn 2.0.106",
2841
+
"memchr",
1695
2842
]
1696
2843
1697
2844
[[package]]
···
1710
2857
]
1711
2858
1712
2859
[[package]]
1713
-
name = "wasm-bindgen"
1714
-
version = "0.2.100"
2860
+
name = "wasite"
2861
+
version = "0.1.0"
1715
2862
source = "registry+https://github.com/rust-lang/crates.io-index"
1716
-
checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
1717
-
dependencies = [
1718
-
"cfg-if",
1719
-
"once_cell",
1720
-
"wasm-bindgen-macro",
1721
-
]
2863
+
checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
1722
2864
1723
2865
[[package]]
1724
-
name = "wasm-bindgen-backend"
1725
-
version = "0.2.100"
2866
+
name = "whoami"
2867
+
version = "1.6.1"
1726
2868
source = "registry+https://github.com/rust-lang/crates.io-index"
1727
-
checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
2869
+
checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d"
1728
2870
dependencies = [
1729
-
"bumpalo",
1730
-
"log",
1731
-
"proc-macro2",
1732
-
"quote",
1733
-
"syn 2.0.106",
1734
-
"wasm-bindgen-shared",
1735
-
]
1736
-
1737
-
[[package]]
1738
-
name = "wasm-bindgen-macro"
1739
-
version = "0.2.100"
1740
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1741
-
checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407"
1742
-
dependencies = [
1743
-
"quote",
1744
-
"wasm-bindgen-macro-support",
1745
-
]
1746
-
1747
-
[[package]]
1748
-
name = "wasm-bindgen-macro-support"
1749
-
version = "0.2.100"
1750
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1751
-
checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
1752
-
dependencies = [
1753
-
"proc-macro2",
1754
-
"quote",
1755
-
"syn 2.0.106",
1756
-
"wasm-bindgen-backend",
1757
-
"wasm-bindgen-shared",
1758
-
]
1759
-
1760
-
[[package]]
1761
-
name = "wasm-bindgen-shared"
1762
-
version = "0.2.100"
1763
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1764
-
checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d"
1765
-
dependencies = [
1766
-
"unicode-ident",
1767
-
]
1768
-
1769
-
[[package]]
1770
-
name = "web-time"
1771
-
version = "1.1.0"
1772
-
source = "registry+https://github.com/rust-lang/crates.io-index"
1773
-
checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
1774
-
dependencies = [
1775
-
"js-sys",
1776
-
"wasm-bindgen",
2871
+
"libredox",
2872
+
"wasite",
1777
2873
]
1778
2874
1779
2875
[[package]]
···
1805
2901
checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
1806
2902
1807
2903
[[package]]
2904
+
name = "windows-link"
2905
+
version = "0.2.1"
2906
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2907
+
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
2908
+
2909
+
[[package]]
2910
+
name = "windows-sys"
2911
+
version = "0.48.0"
2912
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2913
+
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
2914
+
dependencies = [
2915
+
"windows-targets 0.48.5",
2916
+
]
2917
+
2918
+
[[package]]
1808
2919
name = "windows-sys"
1809
2920
version = "0.52.0"
1810
2921
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1832
2943
]
1833
2944
1834
2945
[[package]]
2946
+
name = "windows-sys"
2947
+
version = "0.61.2"
2948
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2949
+
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
2950
+
dependencies = [
2951
+
"windows-link 0.2.1",
2952
+
]
2953
+
2954
+
[[package]]
2955
+
name = "windows-targets"
2956
+
version = "0.48.5"
2957
+
source = "registry+https://github.com/rust-lang/crates.io-index"
2958
+
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
2959
+
dependencies = [
2960
+
"windows_aarch64_gnullvm 0.48.5",
2961
+
"windows_aarch64_msvc 0.48.5",
2962
+
"windows_i686_gnu 0.48.5",
2963
+
"windows_i686_msvc 0.48.5",
2964
+
"windows_x86_64_gnu 0.48.5",
2965
+
"windows_x86_64_gnullvm 0.48.5",
2966
+
"windows_x86_64_msvc 0.48.5",
2967
+
]
2968
+
2969
+
[[package]]
1835
2970
name = "windows-targets"
1836
2971
version = "0.52.6"
1837
2972
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1853
2988
source = "registry+https://github.com/rust-lang/crates.io-index"
1854
2989
checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91"
1855
2990
dependencies = [
1856
-
"windows-link",
2991
+
"windows-link 0.1.3",
1857
2992
"windows_aarch64_gnullvm 0.53.0",
1858
2993
"windows_aarch64_msvc 0.53.0",
1859
2994
"windows_i686_gnu 0.53.0",
···
1866
3001
1867
3002
[[package]]
1868
3003
name = "windows_aarch64_gnullvm"
3004
+
version = "0.48.5"
3005
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3006
+
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
3007
+
3008
+
[[package]]
3009
+
name = "windows_aarch64_gnullvm"
1869
3010
version = "0.52.6"
1870
3011
source = "registry+https://github.com/rust-lang/crates.io-index"
1871
3012
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
···
1878
3019
1879
3020
[[package]]
1880
3021
name = "windows_aarch64_msvc"
3022
+
version = "0.48.5"
3023
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3024
+
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
3025
+
3026
+
[[package]]
3027
+
name = "windows_aarch64_msvc"
1881
3028
version = "0.52.6"
1882
3029
source = "registry+https://github.com/rust-lang/crates.io-index"
1883
3030
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
···
1887
3034
version = "0.53.0"
1888
3035
source = "registry+https://github.com/rust-lang/crates.io-index"
1889
3036
checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
3037
+
3038
+
[[package]]
3039
+
name = "windows_i686_gnu"
3040
+
version = "0.48.5"
3041
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3042
+
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
1890
3043
1891
3044
[[package]]
1892
3045
name = "windows_i686_gnu"
···
1914
3067
1915
3068
[[package]]
1916
3069
name = "windows_i686_msvc"
3070
+
version = "0.48.5"
3071
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3072
+
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
3073
+
3074
+
[[package]]
3075
+
name = "windows_i686_msvc"
1917
3076
version = "0.52.6"
1918
3077
source = "registry+https://github.com/rust-lang/crates.io-index"
1919
3078
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
···
1926
3085
1927
3086
[[package]]
1928
3087
name = "windows_x86_64_gnu"
3088
+
version = "0.48.5"
3089
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3090
+
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
3091
+
3092
+
[[package]]
3093
+
name = "windows_x86_64_gnu"
1929
3094
version = "0.52.6"
1930
3095
source = "registry+https://github.com/rust-lang/crates.io-index"
1931
3096
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
···
1938
3103
1939
3104
[[package]]
1940
3105
name = "windows_x86_64_gnullvm"
3106
+
version = "0.48.5"
3107
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3108
+
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
3109
+
3110
+
[[package]]
3111
+
name = "windows_x86_64_gnullvm"
1941
3112
version = "0.52.6"
1942
3113
source = "registry+https://github.com/rust-lang/crates.io-index"
1943
3114
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
···
1950
3121
1951
3122
[[package]]
1952
3123
name = "windows_x86_64_msvc"
3124
+
version = "0.48.5"
3125
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3126
+
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
3127
+
3128
+
[[package]]
3129
+
name = "windows_x86_64_msvc"
1953
3130
version = "0.52.6"
1954
3131
source = "registry+https://github.com/rust-lang/crates.io-index"
1955
3132
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
···
1961
3138
checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
1962
3139
1963
3140
[[package]]
3141
+
name = "winnow"
3142
+
version = "0.7.13"
3143
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3144
+
checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
3145
+
dependencies = [
3146
+
"memchr",
3147
+
]
3148
+
3149
+
[[package]]
1964
3150
name = "winreg"
1965
3151
version = "0.10.1"
1966
3152
source = "registry+https://github.com/rust-lang/crates.io-index"
···
1971
3157
1972
3158
[[package]]
1973
3159
name = "wire"
1974
-
version = "0.5.0"
3160
+
version = "1.1.1"
1975
3161
dependencies = [
1976
3162
"clap",
1977
3163
"clap-markdown",
···
1981
3167
"dhat",
1982
3168
"enum-display-derive",
1983
3169
"futures",
1984
-
"im",
1985
-
"indicatif",
1986
3170
"itertools",
1987
-
"lib",
1988
3171
"miette",
1989
-
"serde",
3172
+
"owo-colors",
1990
3173
"serde_json",
1991
-
"thiserror 2.0.16",
3174
+
"signal-hook",
3175
+
"signal-hook-tokio",
3176
+
"thiserror 2.0.17",
1992
3177
"tokio",
1993
3178
"tracing",
1994
3179
"tracing-log",
1995
3180
"tracing-subscriber",
3181
+
"wire-core",
3182
+
]
3183
+
3184
+
[[package]]
3185
+
name = "wire-core"
3186
+
version = "1.1.1"
3187
+
dependencies = [
3188
+
"aho-corasick",
3189
+
"anyhow",
3190
+
"base64",
3191
+
"derive_more",
3192
+
"enum_dispatch",
3193
+
"futures",
3194
+
"gethostname",
3195
+
"gjson",
3196
+
"im",
3197
+
"itertools",
3198
+
"miette",
3199
+
"nix 0.30.1",
3200
+
"nix-compat",
3201
+
"num_enum",
3202
+
"owo-colors",
3203
+
"portable-pty",
3204
+
"proc-macro2",
3205
+
"prost",
3206
+
"rand 0.9.2",
3207
+
"serde",
3208
+
"serde_json",
3209
+
"sha2",
3210
+
"sqlx",
3211
+
"strip-ansi-escapes",
3212
+
"syn 2.0.111",
3213
+
"tempdir",
3214
+
"termion",
3215
+
"thiserror 2.0.17",
3216
+
"tokio",
3217
+
"tokio-util",
3218
+
"tracing",
3219
+
"wire-key-agent",
3220
+
"zstd",
3221
+
]
3222
+
3223
+
[[package]]
3224
+
name = "wire-key-agent"
3225
+
version = "1.1.1"
3226
+
dependencies = [
3227
+
"anyhow",
3228
+
"base64",
3229
+
"futures-util",
3230
+
"nix 0.30.1",
3231
+
"prost",
3232
+
"prost-build",
3233
+
"sha2",
3234
+
"tokio",
3235
+
"tokio-util",
1996
3236
]
1997
3237
1998
3238
[[package]]
···
2005
3245
]
2006
3246
2007
3247
[[package]]
3248
+
name = "writeable"
3249
+
version = "0.6.1"
3250
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3251
+
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
3252
+
3253
+
[[package]]
3254
+
name = "yoke"
3255
+
version = "0.8.0"
3256
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3257
+
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
3258
+
dependencies = [
3259
+
"serde",
3260
+
"stable_deref_trait",
3261
+
"yoke-derive",
3262
+
"zerofrom",
3263
+
]
3264
+
3265
+
[[package]]
3266
+
name = "yoke-derive"
3267
+
version = "0.8.0"
3268
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3269
+
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
3270
+
dependencies = [
3271
+
"proc-macro2",
3272
+
"quote",
3273
+
"syn 2.0.111",
3274
+
"synstructure",
3275
+
]
3276
+
3277
+
[[package]]
2008
3278
name = "zerocopy"
2009
3279
version = "0.8.26"
2010
3280
source = "registry+https://github.com/rust-lang/crates.io-index"
···
2021
3291
dependencies = [
2022
3292
"proc-macro2",
2023
3293
"quote",
2024
-
"syn 2.0.106",
3294
+
"syn 2.0.111",
3295
+
]
3296
+
3297
+
[[package]]
3298
+
name = "zerofrom"
3299
+
version = "0.1.6"
3300
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3301
+
checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
3302
+
dependencies = [
3303
+
"zerofrom-derive",
3304
+
]
3305
+
3306
+
[[package]]
3307
+
name = "zerofrom-derive"
3308
+
version = "0.1.6"
3309
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3310
+
checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
3311
+
dependencies = [
3312
+
"proc-macro2",
3313
+
"quote",
3314
+
"syn 2.0.111",
3315
+
"synstructure",
3316
+
]
3317
+
3318
+
[[package]]
3319
+
name = "zeroize"
3320
+
version = "1.8.2"
3321
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3322
+
checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
3323
+
3324
+
[[package]]
3325
+
name = "zerotrie"
3326
+
version = "0.2.2"
3327
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3328
+
checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595"
3329
+
dependencies = [
3330
+
"displaydoc",
3331
+
"yoke",
3332
+
"zerofrom",
3333
+
]
3334
+
3335
+
[[package]]
3336
+
name = "zerovec"
3337
+
version = "0.11.4"
3338
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3339
+
checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b"
3340
+
dependencies = [
3341
+
"yoke",
3342
+
"zerofrom",
3343
+
"zerovec-derive",
3344
+
]
3345
+
3346
+
[[package]]
3347
+
name = "zerovec-derive"
3348
+
version = "0.11.1"
3349
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3350
+
checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
3351
+
dependencies = [
3352
+
"proc-macro2",
3353
+
"quote",
3354
+
"syn 2.0.111",
3355
+
]
3356
+
3357
+
[[package]]
3358
+
name = "zstd"
3359
+
version = "0.13.3"
3360
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3361
+
checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
3362
+
dependencies = [
3363
+
"zstd-safe",
3364
+
]
3365
+
3366
+
[[package]]
3367
+
name = "zstd-safe"
3368
+
version = "7.2.4"
3369
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3370
+
checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
3371
+
dependencies = [
3372
+
"zstd-sys",
3373
+
]
3374
+
3375
+
[[package]]
3376
+
name = "zstd-sys"
3377
+
version = "2.0.16+zstd.1.5.7"
3378
+
source = "registry+https://github.com/rust-lang/crates.io-index"
3379
+
checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748"
3380
+
dependencies = [
3381
+
"cc",
3382
+
"pkg-config",
2025
3383
]
+38
-10
Cargo.toml
+38
-10
Cargo.toml
···
1
1
[workspace]
2
-
members = ["wire/key_agent", "wire/lib", "wire/cli"]
2
+
members = ["crates/key_agent", "crates/core", "crates/cli"]
3
3
resolver = "2"
4
-
package.edition = "2021"
5
-
package.version = "0.5.0"
4
+
package.edition = "2024"
5
+
package.version = "1.1.1"
6
6
7
7
[workspace.metadata.crane]
8
8
name = "wire"
9
9
10
+
[workspace.lints.clippy]
11
+
pedantic = { level = "deny", priority = -1 }
12
+
missing_const_for_fn = "deny"
13
+
14
+
# annoying to deal with
15
+
missing_errors_doc = "allow"
16
+
missing_panics_doc = "allow"
17
+
10
18
[workspace.dependencies]
11
-
clap = { version = "4.5.47", features = ["derive", "string", "cargo"] }
19
+
futures-util = { version = "0.3.31", features = ["sink", "std"] }
20
+
clap = { version = "4.5.51", features = ["derive", "string", "cargo"] }
12
21
clap-verbosity-flag = "3.0.4"
13
-
serde = { version = "1.0.225", features = ["derive", "rc"] }
14
-
serde_json = { version = "1.0.145" }
15
-
tokio = { version = "1.47.1", features = ["full"] }
16
-
tracing = "0.1.41"
22
+
serde = { version = "1.0.228", features = ["derive", "rc"] }
23
+
tokio = { version = "1.48.0", features = ["full"] }
24
+
tracing = { version = "0.1.41", features = ["release_max_level_debug"] }
17
25
tracing-log = "0.2.0"
18
26
tracing-subscriber = "0.3.20"
19
27
im = { version = "15.1.0", features = ["serde"] }
20
-
anyhow = "1.0.98"
28
+
anyhow = "1.0.100"
21
29
prost = "0.14.1"
22
30
nix = { version = "0.30.1", features = ["user", "poll", "term"] }
23
31
miette = { version = "7.6.0", features = ["fancy"] }
24
-
thiserror = "2.0.16"
32
+
thiserror = "2.0.17"
33
+
sha2 = "0.10.9"
34
+
tokio-util = { version = "0.7.17", features = ["codec"] }
35
+
base64 = "0.22.1"
36
+
nix-compat = { git = "https://git.snix.dev/snix/snix.git", features = [
37
+
"serde",
38
+
"flakeref",
39
+
] }
40
+
# simd-json = { version = "0.17.0", features = [
41
+
# "serde_impl",
42
+
# # swar-number-parsing is disabled because nix never outputs
43
+
# # floats.
44
+
# # "swar-number-parsing",
45
+
# "runtime-detection",
46
+
# "hints",
47
+
# ] }
48
+
serde_json = { version = "1.0.145" }
49
+
owo-colors = { version = "4.2.3", features = ["supports-colors"] }
50
+
51
+
[profile.dev.package.sqlx-macros]
52
+
opt-level = 3
-661
LICENSE
-661
LICENSE
···
1
-
GNU AFFERO GENERAL PUBLIC LICENSE
2
-
Version 3, 19 November 2007
3
-
4
-
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
5
-
Everyone is permitted to copy and distribute verbatim copies
6
-
of this license document, but changing it is not allowed.
7
-
8
-
Preamble
9
-
10
-
The GNU Affero General Public License is a free, copyleft license for
11
-
software and other kinds of works, specifically designed to ensure
12
-
cooperation with the community in the case of network server software.
13
-
14
-
The licenses for most software and other practical works are designed
15
-
to take away your freedom to share and change the works. By contrast,
16
-
our General Public Licenses are intended to guarantee your freedom to
17
-
share and change all versions of a program--to make sure it remains free
18
-
software for all its users.
19
-
20
-
When we speak of free software, we are referring to freedom, not
21
-
price. Our General Public Licenses are designed to make sure that you
22
-
have the freedom to distribute copies of free software (and charge for
23
-
them if you wish), that you receive source code or can get it if you
24
-
want it, that you can change the software or use pieces of it in new
25
-
free programs, and that you know you can do these things.
26
-
27
-
Developers that use our General Public Licenses protect your rights
28
-
with two steps: (1) assert copyright on the software, and (2) offer
29
-
you this License which gives you legal permission to copy, distribute
30
-
and/or modify the software.
31
-
32
-
A secondary benefit of defending all users' freedom is that
33
-
improvements made in alternate versions of the program, if they
34
-
receive widespread use, become available for other developers to
35
-
incorporate. Many developers of free software are heartened and
36
-
encouraged by the resulting cooperation. However, in the case of
37
-
software used on network servers, this result may fail to come about.
38
-
The GNU General Public License permits making a modified version and
39
-
letting the public access it on a server without ever releasing its
40
-
source code to the public.
41
-
42
-
The GNU Affero General Public License is designed specifically to
43
-
ensure that, in such cases, the modified source code becomes available
44
-
to the community. It requires the operator of a network server to
45
-
provide the source code of the modified version running there to the
46
-
users of that server. Therefore, public use of a modified version, on
47
-
a publicly accessible server, gives the public access to the source
48
-
code of the modified version.
49
-
50
-
An older license, called the Affero General Public License and
51
-
published by Affero, was designed to accomplish similar goals. This is
52
-
a different license, not a version of the Affero GPL, but Affero has
53
-
released a new version of the Affero GPL which permits relicensing under
54
-
this license.
55
-
56
-
The precise terms and conditions for copying, distribution and
57
-
modification follow.
58
-
59
-
TERMS AND CONDITIONS
60
-
61
-
0. Definitions.
62
-
63
-
"This License" refers to version 3 of the GNU Affero General Public License.
64
-
65
-
"Copyright" also means copyright-like laws that apply to other kinds of
66
-
works, such as semiconductor masks.
67
-
68
-
"The Program" refers to any copyrightable work licensed under this
69
-
License. Each licensee is addressed as "you". "Licensees" and
70
-
"recipients" may be individuals or organizations.
71
-
72
-
To "modify" a work means to copy from or adapt all or part of the work
73
-
in a fashion requiring copyright permission, other than the making of an
74
-
exact copy. The resulting work is called a "modified version" of the
75
-
earlier work or a work "based on" the earlier work.
76
-
77
-
A "covered work" means either the unmodified Program or a work based
78
-
on the Program.
79
-
80
-
To "propagate" a work means to do anything with it that, without
81
-
permission, would make you directly or secondarily liable for
82
-
infringement under applicable copyright law, except executing it on a
83
-
computer or modifying a private copy. Propagation includes copying,
84
-
distribution (with or without modification), making available to the
85
-
public, and in some countries other activities as well.
86
-
87
-
To "convey" a work means any kind of propagation that enables other
88
-
parties to make or receive copies. Mere interaction with a user through
89
-
a computer network, with no transfer of a copy, is not conveying.
90
-
91
-
An interactive user interface displays "Appropriate Legal Notices"
92
-
to the extent that it includes a convenient and prominently visible
93
-
feature that (1) displays an appropriate copyright notice, and (2)
94
-
tells the user that there is no warranty for the work (except to the
95
-
extent that warranties are provided), that licensees may convey the
96
-
work under this License, and how to view a copy of this License. If
97
-
the interface presents a list of user commands or options, such as a
98
-
menu, a prominent item in the list meets this criterion.
99
-
100
-
1. Source Code.
101
-
102
-
The "source code" for a work means the preferred form of the work
103
-
for making modifications to it. "Object code" means any non-source
104
-
form of a work.
105
-
106
-
A "Standard Interface" means an interface that either is an official
107
-
standard defined by a recognized standards body, or, in the case of
108
-
interfaces specified for a particular programming language, one that
109
-
is widely used among developers working in that language.
110
-
111
-
The "System Libraries" of an executable work include anything, other
112
-
than the work as a whole, that (a) is included in the normal form of
113
-
packaging a Major Component, but which is not part of that Major
114
-
Component, and (b) serves only to enable use of the work with that
115
-
Major Component, or to implement a Standard Interface for which an
116
-
implementation is available to the public in source code form. A
117
-
"Major Component", in this context, means a major essential component
118
-
(kernel, window system, and so on) of the specific operating system
119
-
(if any) on which the executable work runs, or a compiler used to
120
-
produce the work, or an object code interpreter used to run it.
121
-
122
-
The "Corresponding Source" for a work in object code form means all
123
-
the source code needed to generate, install, and (for an executable
124
-
work) run the object code and to modify the work, including scripts to
125
-
control those activities. However, it does not include the work's
126
-
System Libraries, or general-purpose tools or generally available free
127
-
programs which are used unmodified in performing those activities but
128
-
which are not part of the work. For example, Corresponding Source
129
-
includes interface definition files associated with source files for
130
-
the work, and the source code for shared libraries and dynamically
131
-
linked subprograms that the work is specifically designed to require,
132
-
such as by intimate data communication or control flow between those
133
-
subprograms and other parts of the work.
134
-
135
-
The Corresponding Source need not include anything that users
136
-
can regenerate automatically from other parts of the Corresponding
137
-
Source.
138
-
139
-
The Corresponding Source for a work in source code form is that
140
-
same work.
141
-
142
-
2. Basic Permissions.
143
-
144
-
All rights granted under this License are granted for the term of
145
-
copyright on the Program, and are irrevocable provided the stated
146
-
conditions are met. This License explicitly affirms your unlimited
147
-
permission to run the unmodified Program. The output from running a
148
-
covered work is covered by this License only if the output, given its
149
-
content, constitutes a covered work. This License acknowledges your
150
-
rights of fair use or other equivalent, as provided by copyright law.
151
-
152
-
You may make, run and propagate covered works that you do not
153
-
convey, without conditions so long as your license otherwise remains
154
-
in force. You may convey covered works to others for the sole purpose
155
-
of having them make modifications exclusively for you, or provide you
156
-
with facilities for running those works, provided that you comply with
157
-
the terms of this License in conveying all material for which you do
158
-
not control copyright. Those thus making or running the covered works
159
-
for you must do so exclusively on your behalf, under your direction
160
-
and control, on terms that prohibit them from making any copies of
161
-
your copyrighted material outside their relationship with you.
162
-
163
-
Conveying under any other circumstances is permitted solely under
164
-
the conditions stated below. Sublicensing is not allowed; section 10
165
-
makes it unnecessary.
166
-
167
-
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168
-
169
-
No covered work shall be deemed part of an effective technological
170
-
measure under any applicable law fulfilling obligations under article
171
-
11 of the WIPO copyright treaty adopted on 20 December 1996, or
172
-
similar laws prohibiting or restricting circumvention of such
173
-
measures.
174
-
175
-
When you convey a covered work, you waive any legal power to forbid
176
-
circumvention of technological measures to the extent such circumvention
177
-
is effected by exercising rights under this License with respect to
178
-
the covered work, and you disclaim any intention to limit operation or
179
-
modification of the work as a means of enforcing, against the work's
180
-
users, your or third parties' legal rights to forbid circumvention of
181
-
technological measures.
182
-
183
-
4. Conveying Verbatim Copies.
184
-
185
-
You may convey verbatim copies of the Program's source code as you
186
-
receive it, in any medium, provided that you conspicuously and
187
-
appropriately publish on each copy an appropriate copyright notice;
188
-
keep intact all notices stating that this License and any
189
-
non-permissive terms added in accord with section 7 apply to the code;
190
-
keep intact all notices of the absence of any warranty; and give all
191
-
recipients a copy of this License along with the Program.
192
-
193
-
You may charge any price or no price for each copy that you convey,
194
-
and you may offer support or warranty protection for a fee.
195
-
196
-
5. Conveying Modified Source Versions.
197
-
198
-
You may convey a work based on the Program, or the modifications to
199
-
produce it from the Program, in the form of source code under the
200
-
terms of section 4, provided that you also meet all of these conditions:
201
-
202
-
a) The work must carry prominent notices stating that you modified
203
-
it, and giving a relevant date.
204
-
205
-
b) The work must carry prominent notices stating that it is
206
-
released under this License and any conditions added under section
207
-
7. This requirement modifies the requirement in section 4 to
208
-
"keep intact all notices".
209
-
210
-
c) You must license the entire work, as a whole, under this
211
-
License to anyone who comes into possession of a copy. This
212
-
License will therefore apply, along with any applicable section 7
213
-
additional terms, to the whole of the work, and all its parts,
214
-
regardless of how they are packaged. This License gives no
215
-
permission to license the work in any other way, but it does not
216
-
invalidate such permission if you have separately received it.
217
-
218
-
d) If the work has interactive user interfaces, each must display
219
-
Appropriate Legal Notices; however, if the Program has interactive
220
-
interfaces that do not display Appropriate Legal Notices, your
221
-
work need not make them do so.
222
-
223
-
A compilation of a covered work with other separate and independent
224
-
works, which are not by their nature extensions of the covered work,
225
-
and which are not combined with it such as to form a larger program,
226
-
in or on a volume of a storage or distribution medium, is called an
227
-
"aggregate" if the compilation and its resulting copyright are not
228
-
used to limit the access or legal rights of the compilation's users
229
-
beyond what the individual works permit. Inclusion of a covered work
230
-
in an aggregate does not cause this License to apply to the other
231
-
parts of the aggregate.
232
-
233
-
6. Conveying Non-Source Forms.
234
-
235
-
You may convey a covered work in object code form under the terms
236
-
of sections 4 and 5, provided that you also convey the
237
-
machine-readable Corresponding Source under the terms of this License,
238
-
in one of these ways:
239
-
240
-
a) Convey the object code in, or embodied in, a physical product
241
-
(including a physical distribution medium), accompanied by the
242
-
Corresponding Source fixed on a durable physical medium
243
-
customarily used for software interchange.
244
-
245
-
b) Convey the object code in, or embodied in, a physical product
246
-
(including a physical distribution medium), accompanied by a
247
-
written offer, valid for at least three years and valid for as
248
-
long as you offer spare parts or customer support for that product
249
-
model, to give anyone who possesses the object code either (1) a
250
-
copy of the Corresponding Source for all the software in the
251
-
product that is covered by this License, on a durable physical
252
-
medium customarily used for software interchange, for a price no
253
-
more than your reasonable cost of physically performing this
254
-
conveying of source, or (2) access to copy the
255
-
Corresponding Source from a network server at no charge.
256
-
257
-
c) Convey individual copies of the object code with a copy of the
258
-
written offer to provide the Corresponding Source. This
259
-
alternative is allowed only occasionally and noncommercially, and
260
-
only if you received the object code with such an offer, in accord
261
-
with subsection 6b.
262
-
263
-
d) Convey the object code by offering access from a designated
264
-
place (gratis or for a charge), and offer equivalent access to the
265
-
Corresponding Source in the same way through the same place at no
266
-
further charge. You need not require recipients to copy the
267
-
Corresponding Source along with the object code. If the place to
268
-
copy the object code is a network server, the Corresponding Source
269
-
may be on a different server (operated by you or a third party)
270
-
that supports equivalent copying facilities, provided you maintain
271
-
clear directions next to the object code saying where to find the
272
-
Corresponding Source. Regardless of what server hosts the
273
-
Corresponding Source, you remain obligated to ensure that it is
274
-
available for as long as needed to satisfy these requirements.
275
-
276
-
e) Convey the object code using peer-to-peer transmission, provided
277
-
you inform other peers where the object code and Corresponding
278
-
Source of the work are being offered to the general public at no
279
-
charge under subsection 6d.
280
-
281
-
A separable portion of the object code, whose source code is excluded
282
-
from the Corresponding Source as a System Library, need not be
283
-
included in conveying the object code work.
284
-
285
-
A "User Product" is either (1) a "consumer product", which means any
286
-
tangible personal property which is normally used for personal, family,
287
-
or household purposes, or (2) anything designed or sold for incorporation
288
-
into a dwelling. In determining whether a product is a consumer product,
289
-
doubtful cases shall be resolved in favor of coverage. For a particular
290
-
product received by a particular user, "normally used" refers to a
291
-
typical or common use of that class of product, regardless of the status
292
-
of the particular user or of the way in which the particular user
293
-
actually uses, or expects or is expected to use, the product. A product
294
-
is a consumer product regardless of whether the product has substantial
295
-
commercial, industrial or non-consumer uses, unless such uses represent
296
-
the only significant mode of use of the product.
297
-
298
-
"Installation Information" for a User Product means any methods,
299
-
procedures, authorization keys, or other information required to install
300
-
and execute modified versions of a covered work in that User Product from
301
-
a modified version of its Corresponding Source. The information must
302
-
suffice to ensure that the continued functioning of the modified object
303
-
code is in no case prevented or interfered with solely because
304
-
modification has been made.
305
-
306
-
If you convey an object code work under this section in, or with, or
307
-
specifically for use in, a User Product, and the conveying occurs as
308
-
part of a transaction in which the right of possession and use of the
309
-
User Product is transferred to the recipient in perpetuity or for a
310
-
fixed term (regardless of how the transaction is characterized), the
311
-
Corresponding Source conveyed under this section must be accompanied
312
-
by the Installation Information. But this requirement does not apply
313
-
if neither you nor any third party retains the ability to install
314
-
modified object code on the User Product (for example, the work has
315
-
been installed in ROM).
316
-
317
-
The requirement to provide Installation Information does not include a
318
-
requirement to continue to provide support service, warranty, or updates
319
-
for a work that has been modified or installed by the recipient, or for
320
-
the User Product in which it has been modified or installed. Access to a
321
-
network may be denied when the modification itself materially and
322
-
adversely affects the operation of the network or violates the rules and
323
-
protocols for communication across the network.
324
-
325
-
Corresponding Source conveyed, and Installation Information provided,
326
-
in accord with this section must be in a format that is publicly
327
-
documented (and with an implementation available to the public in
328
-
source code form), and must require no special password or key for
329
-
unpacking, reading or copying.
330
-
331
-
7. Additional Terms.
332
-
333
-
"Additional permissions" are terms that supplement the terms of this
334
-
License by making exceptions from one or more of its conditions.
335
-
Additional permissions that are applicable to the entire Program shall
336
-
be treated as though they were included in this License, to the extent
337
-
that they are valid under applicable law. If additional permissions
338
-
apply only to part of the Program, that part may be used separately
339
-
under those permissions, but the entire Program remains governed by
340
-
this License without regard to the additional permissions.
341
-
342
-
When you convey a copy of a covered work, you may at your option
343
-
remove any additional permissions from that copy, or from any part of
344
-
it. (Additional permissions may be written to require their own
345
-
removal in certain cases when you modify the work.) You may place
346
-
additional permissions on material, added by you to a covered work,
347
-
for which you have or can give appropriate copyright permission.
348
-
349
-
Notwithstanding any other provision of this License, for material you
350
-
add to a covered work, you may (if authorized by the copyright holders of
351
-
that material) supplement the terms of this License with terms:
352
-
353
-
a) Disclaiming warranty or limiting liability differently from the
354
-
terms of sections 15 and 16 of this License; or
355
-
356
-
b) Requiring preservation of specified reasonable legal notices or
357
-
author attributions in that material or in the Appropriate Legal
358
-
Notices displayed by works containing it; or
359
-
360
-
c) Prohibiting misrepresentation of the origin of that material, or
361
-
requiring that modified versions of such material be marked in
362
-
reasonable ways as different from the original version; or
363
-
364
-
d) Limiting the use for publicity purposes of names of licensors or
365
-
authors of the material; or
366
-
367
-
e) Declining to grant rights under trademark law for use of some
368
-
trade names, trademarks, or service marks; or
369
-
370
-
f) Requiring indemnification of licensors and authors of that
371
-
material by anyone who conveys the material (or modified versions of
372
-
it) with contractual assumptions of liability to the recipient, for
373
-
any liability that these contractual assumptions directly impose on
374
-
those licensors and authors.
375
-
376
-
All other non-permissive additional terms are considered "further
377
-
restrictions" within the meaning of section 10. If the Program as you
378
-
received it, or any part of it, contains a notice stating that it is
379
-
governed by this License along with a term that is a further
380
-
restriction, you may remove that term. If a license document contains
381
-
a further restriction but permits relicensing or conveying under this
382
-
License, you may add to a covered work material governed by the terms
383
-
of that license document, provided that the further restriction does
384
-
not survive such relicensing or conveying.
385
-
386
-
If you add terms to a covered work in accord with this section, you
387
-
must place, in the relevant source files, a statement of the
388
-
additional terms that apply to those files, or a notice indicating
389
-
where to find the applicable terms.
390
-
391
-
Additional terms, permissive or non-permissive, may be stated in the
392
-
form of a separately written license, or stated as exceptions;
393
-
the above requirements apply either way.
394
-
395
-
8. Termination.
396
-
397
-
You may not propagate or modify a covered work except as expressly
398
-
provided under this License. Any attempt otherwise to propagate or
399
-
modify it is void, and will automatically terminate your rights under
400
-
this License (including any patent licenses granted under the third
401
-
paragraph of section 11).
402
-
403
-
However, if you cease all violation of this License, then your
404
-
license from a particular copyright holder is reinstated (a)
405
-
provisionally, unless and until the copyright holder explicitly and
406
-
finally terminates your license, and (b) permanently, if the copyright
407
-
holder fails to notify you of the violation by some reasonable means
408
-
prior to 60 days after the cessation.
409
-
410
-
Moreover, your license from a particular copyright holder is
411
-
reinstated permanently if the copyright holder notifies you of the
412
-
violation by some reasonable means, this is the first time you have
413
-
received notice of violation of this License (for any work) from that
414
-
copyright holder, and you cure the violation prior to 30 days after
415
-
your receipt of the notice.
416
-
417
-
Termination of your rights under this section does not terminate the
418
-
licenses of parties who have received copies or rights from you under
419
-
this License. If your rights have been terminated and not permanently
420
-
reinstated, you do not qualify to receive new licenses for the same
421
-
material under section 10.
422
-
423
-
9. Acceptance Not Required for Having Copies.
424
-
425
-
You are not required to accept this License in order to receive or
426
-
run a copy of the Program. Ancillary propagation of a covered work
427
-
occurring solely as a consequence of using peer-to-peer transmission
428
-
to receive a copy likewise does not require acceptance. However,
429
-
nothing other than this License grants you permission to propagate or
430
-
modify any covered work. These actions infringe copyright if you do
431
-
not accept this License. Therefore, by modifying or propagating a
432
-
covered work, you indicate your acceptance of this License to do so.
433
-
434
-
10. Automatic Licensing of Downstream Recipients.
435
-
436
-
Each time you convey a covered work, the recipient automatically
437
-
receives a license from the original licensors, to run, modify and
438
-
propagate that work, subject to this License. You are not responsible
439
-
for enforcing compliance by third parties with this License.
440
-
441
-
An "entity transaction" is a transaction transferring control of an
442
-
organization, or substantially all assets of one, or subdividing an
443
-
organization, or merging organizations. If propagation of a covered
444
-
work results from an entity transaction, each party to that
445
-
transaction who receives a copy of the work also receives whatever
446
-
licenses to the work the party's predecessor in interest had or could
447
-
give under the previous paragraph, plus a right to possession of the
448
-
Corresponding Source of the work from the predecessor in interest, if
449
-
the predecessor has it or can get it with reasonable efforts.
450
-
451
-
You may not impose any further restrictions on the exercise of the
452
-
rights granted or affirmed under this License. For example, you may
453
-
not impose a license fee, royalty, or other charge for exercise of
454
-
rights granted under this License, and you may not initiate litigation
455
-
(including a cross-claim or counterclaim in a lawsuit) alleging that
456
-
any patent claim is infringed by making, using, selling, offering for
457
-
sale, or importing the Program or any portion of it.
458
-
459
-
11. Patents.
460
-
461
-
A "contributor" is a copyright holder who authorizes use under this
462
-
License of the Program or a work on which the Program is based. The
463
-
work thus licensed is called the contributor's "contributor version".
464
-
465
-
A contributor's "essential patent claims" are all patent claims
466
-
owned or controlled by the contributor, whether already acquired or
467
-
hereafter acquired, that would be infringed by some manner, permitted
468
-
by this License, of making, using, or selling its contributor version,
469
-
but do not include claims that would be infringed only as a
470
-
consequence of further modification of the contributor version. For
471
-
purposes of this definition, "control" includes the right to grant
472
-
patent sublicenses in a manner consistent with the requirements of
473
-
this License.
474
-
475
-
Each contributor grants you a non-exclusive, worldwide, royalty-free
476
-
patent license under the contributor's essential patent claims, to
477
-
make, use, sell, offer for sale, import and otherwise run, modify and
478
-
propagate the contents of its contributor version.
479
-
480
-
In the following three paragraphs, a "patent license" is any express
481
-
agreement or commitment, however denominated, not to enforce a patent
482
-
(such as an express permission to practice a patent or covenant not to
483
-
sue for patent infringement). To "grant" such a patent license to a
484
-
party means to make such an agreement or commitment not to enforce a
485
-
patent against the party.
486
-
487
-
If you convey a covered work, knowingly relying on a patent license,
488
-
and the Corresponding Source of the work is not available for anyone
489
-
to copy, free of charge and under the terms of this License, through a
490
-
publicly available network server or other readily accessible means,
491
-
then you must either (1) cause the Corresponding Source to be so
492
-
available, or (2) arrange to deprive yourself of the benefit of the
493
-
patent license for this particular work, or (3) arrange, in a manner
494
-
consistent with the requirements of this License, to extend the patent
495
-
license to downstream recipients. "Knowingly relying" means you have
496
-
actual knowledge that, but for the patent license, your conveying the
497
-
covered work in a country, or your recipient's use of the covered work
498
-
in a country, would infringe one or more identifiable patents in that
499
-
country that you have reason to believe are valid.
500
-
501
-
If, pursuant to or in connection with a single transaction or
502
-
arrangement, you convey, or propagate by procuring conveyance of, a
503
-
covered work, and grant a patent license to some of the parties
504
-
receiving the covered work authorizing them to use, propagate, modify
505
-
or convey a specific copy of the covered work, then the patent license
506
-
you grant is automatically extended to all recipients of the covered
507
-
work and works based on it.
508
-
509
-
A patent license is "discriminatory" if it does not include within
510
-
the scope of its coverage, prohibits the exercise of, or is
511
-
conditioned on the non-exercise of one or more of the rights that are
512
-
specifically granted under this License. You may not convey a covered
513
-
work if you are a party to an arrangement with a third party that is
514
-
in the business of distributing software, under which you make payment
515
-
to the third party based on the extent of your activity of conveying
516
-
the work, and under which the third party grants, to any of the
517
-
parties who would receive the covered work from you, a discriminatory
518
-
patent license (a) in connection with copies of the covered work
519
-
conveyed by you (or copies made from those copies), or (b) primarily
520
-
for and in connection with specific products or compilations that
521
-
contain the covered work, unless you entered into that arrangement,
522
-
or that patent license was granted, prior to 28 March 2007.
523
-
524
-
Nothing in this License shall be construed as excluding or limiting
525
-
any implied license or other defenses to infringement that may
526
-
otherwise be available to you under applicable patent law.
527
-
528
-
12. No Surrender of Others' Freedom.
529
-
530
-
If conditions are imposed on you (whether by court order, agreement or
531
-
otherwise) that contradict the conditions of this License, they do not
532
-
excuse you from the conditions of this License. If you cannot convey a
533
-
covered work so as to satisfy simultaneously your obligations under this
534
-
License and any other pertinent obligations, then as a consequence you may
535
-
not convey it at all. For example, if you agree to terms that obligate you
536
-
to collect a royalty for further conveying from those to whom you convey
537
-
the Program, the only way you could satisfy both those terms and this
538
-
License would be to refrain entirely from conveying the Program.
539
-
540
-
13. Remote Network Interaction; Use with the GNU General Public License.
541
-
542
-
Notwithstanding any other provision of this License, if you modify the
543
-
Program, your modified version must prominently offer all users
544
-
interacting with it remotely through a computer network (if your version
545
-
supports such interaction) an opportunity to receive the Corresponding
546
-
Source of your version by providing access to the Corresponding Source
547
-
from a network server at no charge, through some standard or customary
548
-
means of facilitating copying of software. This Corresponding Source
549
-
shall include the Corresponding Source for any work covered by version 3
550
-
of the GNU General Public License that is incorporated pursuant to the
551
-
following paragraph.
552
-
553
-
Notwithstanding any other provision of this License, you have
554
-
permission to link or combine any covered work with a work licensed
555
-
under version 3 of the GNU General Public License into a single
556
-
combined work, and to convey the resulting work. The terms of this
557
-
License will continue to apply to the part which is the covered work,
558
-
but the work with which it is combined will remain governed by version
559
-
3 of the GNU General Public License.
560
-
561
-
14. Revised Versions of this License.
562
-
563
-
The Free Software Foundation may publish revised and/or new versions of
564
-
the GNU Affero General Public License from time to time. Such new versions
565
-
will be similar in spirit to the present version, but may differ in detail to
566
-
address new problems or concerns.
567
-
568
-
Each version is given a distinguishing version number. If the
569
-
Program specifies that a certain numbered version of the GNU Affero General
570
-
Public License "or any later version" applies to it, you have the
571
-
option of following the terms and conditions either of that numbered
572
-
version or of any later version published by the Free Software
573
-
Foundation. If the Program does not specify a version number of the
574
-
GNU Affero General Public License, you may choose any version ever published
575
-
by the Free Software Foundation.
576
-
577
-
If the Program specifies that a proxy can decide which future
578
-
versions of the GNU Affero General Public License can be used, that proxy's
579
-
public statement of acceptance of a version permanently authorizes you
580
-
to choose that version for the Program.
581
-
582
-
Later license versions may give you additional or different
583
-
permissions. However, no additional obligations are imposed on any
584
-
author or copyright holder as a result of your choosing to follow a
585
-
later version.
586
-
587
-
15. Disclaimer of Warranty.
588
-
589
-
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590
-
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591
-
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592
-
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593
-
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594
-
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595
-
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596
-
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597
-
598
-
16. Limitation of Liability.
599
-
600
-
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601
-
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602
-
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603
-
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604
-
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605
-
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606
-
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607
-
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608
-
SUCH DAMAGES.
609
-
610
-
17. Interpretation of Sections 15 and 16.
611
-
612
-
If the disclaimer of warranty and limitation of liability provided
613
-
above cannot be given local legal effect according to their terms,
614
-
reviewing courts shall apply local law that most closely approximates
615
-
an absolute waiver of all civil liability in connection with the
616
-
Program, unless a warranty or assumption of liability accompanies a
617
-
copy of the Program in return for a fee.
618
-
619
-
END OF TERMS AND CONDITIONS
620
-
621
-
How to Apply These Terms to Your New Programs
622
-
623
-
If you develop a new program, and you want it to be of the greatest
624
-
possible use to the public, the best way to achieve this is to make it
625
-
free software which everyone can redistribute and change under these terms.
626
-
627
-
To do so, attach the following notices to the program. It is safest
628
-
to attach them to the start of each source file to most effectively
629
-
state the exclusion of warranty; and each file should have at least
630
-
the "copyright" line and a pointer to where the full notice is found.
631
-
632
-
<one line to give the program's name and a brief idea of what it does.>
633
-
Copyright (C) <year> <name of author>
634
-
635
-
This program is free software: you can redistribute it and/or modify
636
-
it under the terms of the GNU Affero General Public License as published
637
-
by the Free Software Foundation, either version 3 of the License, or
638
-
(at your option) any later version.
639
-
640
-
This program is distributed in the hope that it will be useful,
641
-
but WITHOUT ANY WARRANTY; without even the implied warranty of
642
-
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643
-
GNU Affero General Public License for more details.
644
-
645
-
You should have received a copy of the GNU Affero General Public License
646
-
along with this program. If not, see <https://www.gnu.org/licenses/>.
647
-
648
-
Also add information on how to contact you by electronic and paper mail.
649
-
650
-
If your software can interact with users remotely through a computer
651
-
network, you should also make sure that it provides a way for users to
652
-
get its source. For example, if your program is a web application, its
653
-
interface could display a "Source" link that leads users to an archive
654
-
of the code. There are many ways you could offer source, and different
655
-
solutions will be better for different programs; see section 13 for the
656
-
specific requirements.
657
-
658
-
You should also get your employer (if you work as a programmer) or school,
659
-
if any, to sign a "copyright disclaimer" for the program, if necessary.
660
-
For more information on this, and how to apply and follow the GNU AGPL, see
661
-
<https://www.gnu.org/licenses/>.
+4
-40
README.md
+4
-40
README.md
···
1
-
# wire
2
-
3
-

4
-

5
-

1
+

2
+

3
+

6
4
7
5
wire is a tool to deploy nixos systems. its usage is inspired by colmena however it is not a fork.
8
6
9
-
Read the [The Guide](https://wire.althaea.zone/guide/wire.html), or continue reading this readme for development information.
10
-
11
-
## Tree Layout
12
-
13
-
```
14
-
wire
15
-
โโโ wire
16
-
โ โโโ lib
17
-
โ โ โโโ Rust library containing business logic, consumed by `wire`
18
-
โ โโโ cli
19
-
โ โ โโโ Rust binary, using `lib`
20
-
โ โโโ key_agent
21
-
โ โโโ Rust binary ran on a target node. recieves key file bytes and metadata w/ protobuf over SSH stdin
22
-
โโโ doc
23
-
โ โโโ a [vitepress](https://vitepress.dev/) site
24
-
โโโ runtime
25
-
โ โโโ Nix files used during runtime to evaluate nodes
26
-
โโโtests
27
-
โโโ Directories used during cargo & NixOS VM testing
28
-
```
7
+
Read the [The Tutorial](https://wire.althaea.zone/tutorial/overview.html), [Guides](https://wire.althaea.zone/guides/installation.html), or continue reading this readme for development information.
29
8
30
9
## Development
31
10
32
11
Please use `nix develop` for access to the development environment and to ensure
33
12
your changes are ran against the defined git hooks. For simplicity, you may wish
34
13
to use [direnv](https://github.com/direnv/direnv).
35
-
36
-
### Testing
37
-
38
-
#### dhat profiling
39
-
40
-
```sh
41
-
$ just build-dhat
42
-
```
43
-
44
-
#### Testing
45
-
46
-
```sh
47
-
$ cargo test
48
-
$ nix flake check
49
-
```
+24
bench/README.md
+24
bench/README.md
···
1
+
# Bench
2
+
3
+
This directory contains a little tool to run hyperfine against wire and colmena, deploying the exact same hive.
4
+
5
+
The hive can be found in `default.nix`.
6
+
7
+
Run the test with `nix run .#checks.x86_64-linux.bench.driverInteractive -vvv -L
8
+
--show-trace --impure`
9
+
10
+
Then run `test_script()`
11
+
12
+
No idea why running the test directly breaks it....
13
+
14
+
You can adjust the number of nodes in `num-nodes.nix`
15
+
16
+
The hive has around 20 nodes and 200 keys each. 80% of the keys are pre-activation, 20% post-activation.
17
+
18
+
| Command | Mean [s] | Min [s] | Max [s] | Relative |
19
+
| :----------------------- | --------------: | ------: | ------: | ----------: |
20
+
| `wire@HEAD - flake` | 89.825 ยฑ 22.941 | 78.190 | 130.831 | 1.00 |
21
+
| `wire@stable - flake` | 133.664 ยฑ 0.303 | 133.219 | 134.044 | 1.49 ยฑ 0.38 |
22
+
| `colmena@pinned - flake` | 131.544 ยฑ 1.076 | 130.330 | 133.211 | 1.46 ยฑ 0.37 |
23
+
| `wire@stable - hive.nix` | 133.070 ยฑ 0.805 | 132.166 | 134.209 | 1.48 ยฑ 0.38 |
24
+
| `wire@HEAD - hive.nix` | 130.287 ยฑ 1.456 | 128.980 | 132.699 | 1.45 ยฑ 0.37 |
+4
bench/colmena/hive.nix
+4
bench/colmena/hive.nix
+11
bench/colmena-flake/flake.nix
+11
bench/colmena-flake/flake.nix
+36
bench/default.nix
+36
bench/default.nix
···
1
+
{ flake }:
2
+
let
3
+
nixpkgs = import flake.inputs.nixpkgs {
4
+
system = "x86_64-linux";
5
+
};
6
+
7
+
vmNode =
8
+
index:
9
+
nixpkgs.lib.nameValuePair "node_${builtins.toString index}" {
10
+
imports = [
11
+
./vm.nix
12
+
flake.checks."x86_64-linux"."bench".nodes."node_${builtins.toString index}".system.build.networkConfig
13
+
];
14
+
15
+
_module.args = {
16
+
index = builtins.toString index;
17
+
};
18
+
19
+
deployment.keys = builtins.listToAttrs (
20
+
builtins.map (
21
+
index:
22
+
nixpkgs.lib.nameValuePair "key-${builtins.toString index}" {
23
+
keyFile = ./key.txt;
24
+
# 80% of keys pre activation, 20% post activation.
25
+
uploadAt = if index <= (200 * 0.8) then "pre-activation" else "post-activation";
26
+
}
27
+
) (nixpkgs.lib.range 0 200)
28
+
);
29
+
30
+
nixpkgs.hostPlatform = "x86_64-linux";
31
+
};
32
+
in
33
+
{
34
+
meta.nixpkgs = nixpkgs;
35
+
}
36
+
// builtins.listToAttrs (builtins.map vmNode (nixpkgs.lib.range 0 (import ./num-nodes.nix)))
+1
bench/key.txt
+1
bench/key.txt
···
1
+
blixwqqujqfyfpsqcgnazvfmgoowggimroqngrbzvvawxprfrewrxgawcnkqqhlwkesyrpykabsshmyxjishnfphequzekyxtsacwqpruugcarendkmswnzercczxympaaddadqsxwtxbghvdjjtqrdfvmelnlloiczpafnccdognkfpugcbibutxsxxkucfdmewfqnqrjukoigckktqhdoaimhnpcpmdlkqkrsuakmyymzmoxxkwofpxgdpwxogjtrexratgzkgbrouiwuobefuweuhtlyyuigwniyqhvmsqxcxwcrbjgnmypbthuxzukkuiptbqhbgvbnktwthztrfjetthpguagswaqfpcykshvkzechxyruchddaqjribnajpvboopqnbstxqnuyawsrsqnljcwwzegcothixqltfmxssectaamgscngwejmadtmqclfuqukfxehwlitnhnhcfzsnandlyathmyxjvhxauvvjpkzxzkswgiyizkyvgmhlzildlxxrkejssslqodzjhbifddvqomqvbwyodnafluyctzyvnnpkslhnuhwnpjqnrtakqeezknstjjtijrnxsvbvwnmwjxoiwdysevzqvvbscjwvhvrzgmsosjbupchvsijhzemltvbpolsrgrcnlsvpqdxhrgmfhozzajavbshnzzdndcsjzvoahosxonpgragvqivolulvjybhnytnvrddicgdcdhbltlkbdzyyhltymvuhozkjrtsraucpufqpzkoexpsioagkblbouspmmwukeoprmbbbtpxlduxleaooaoynzfqmenxrpospatazsgsvghocochzqwaqzyypkarzuyhtgxlfoizntygobpbdikuhgmwupinslzazgbnngsekukaovfmhsvqgleulkywmxemebgyrilarghiyvaxwpdxkzpttrjkfrabdlqyqmrwyrvjqmtaghsoernqkbbhvkkzegdpadgoibladfjumvbckaxgleduswvxwproweukscpbjqnzibvpopariarmgwspfxfijljnkdmifynlpbcfbtuvxbupjuuzpaopadpszrltyrlotsdhiqduqqqdmnxxrzhtnwgenshdwnljcziylvmrjschfgifjumjrxyvfqgmywbtgpfnmnbhrjkbmczzabrbsqggcgsrvvlweejqyckvezcpqawiqvjxrdbrphcnakwjjgrgmonuyejnjojmvpghdhzmtmcdthnzgvbihsgowgelxbwmpevzcvkslmzcoqbmhtiyjjzjrfxgzoidqulslrfjnnbyzglvpulucpytanlyywptrwzvkrrzrzlgbhsjsqrvixrzkevfgxtoshzpugridbtyzporfburhwmccglhqdbgxqgqipuzefsrwhmtfmqdmmtcttcfpvqmxunhyofmktispmtgibgolhwaugvwtiiyrxlligmctgzhpajcwqsguoymxesdufzbivzvidnnccozkkyrqauxwknjarmtfgdeiqopozgxhlidoiyxdvigookwhukhnaizmxlfdwsioxgmgxegayzuxwanujnqfexhzqtgubqtbmozouighccucdgxteesnitceqhjxlemnoccxioalsuhlnchrhdejffupgkapwyhhynzmalsycgokkzckbufcegsjrnuuaxdqqhmuehftjbvemozitbtuleycbxcbavtqicsqgkmsclxqhzhqnsbzqbnbqwoehqyjnmrvowkfejcrrxnfqxgmnlfucruowgsoakjhrjnawlcnqkbyvlccymsyfaaoiyubzwrkcxzjxsvdtipwyuwzqdjminswqxlmymzijiyhmcgqvcyshzkjbhbwncqguehlacmmzlshxjfvvjftqlpqqdbdbrnxdutjeuzvzjveoxotpwfzysbvkavfkiyhnttfitpfqwniixyzapbwevyyyibjlzbscyppbmmldnzqesykqjjhwgpvlugsedjwjwxjdgzkhytwkhzsopdkznyxhwctwwqryqtdivpbhxmxnfcumowbfzoqpmtmdwwfgsezhdkldejqnwgululpnbwlrmtkdvyzzsmxakthtrnhwcttkkrzicxyslroovbptajumzzryjwqwgxfgfogfnhgawprylaypbkxxhejpdzxasgeypacicudzlacdzrxpqqxujnmxyossjfiglycnqutqfcifdsfbnewgemphzwflfpcdyusakecerfzlhsyqhmgklshdghnsxazjcfkbaciqobngxztbpyaeriolrzlcqevdkrcszjshkxtndetsbfoppqmmijlchvsdxzsouasnpgkovfykruteyykzsnkbesnjdreqfujyeawwwqvhboszotkiyrazknxrzymneedaalzibwpizlfqahaozrnydrcfgcklzpccychatkwcsbmwcbtwjexakjqygiwdjpbwrpzjntmtykdylfcqywotfsesesjlbmzanruxooslcybgbsoytcjlneacdyxbwdkhisoistamrqwaugebbzapujycvthhzrjaexwnsfnvdhzwaxawwqekwsuuacemehxmctvjefpdyzzhcxvmtivvdmspgxvvdmkersxfqfmegujbauernsckhqveqcgkkymgmicjgjdpuvpylqpzwcemawhgcxoebsvagwvwxytiqfnnbyfuzpindozdcgeriemnjptwgwpoyfibndyrimnhlxtpplkxzqflsxxihluqeiyxcxbvpsijtdvevswohsfjpvyhciwoljbslamdlxvabqnrxjdfxihderjllbxqflldskprfkyuwwyatkdkdznqdaaveujtmnivvfwgabpespniwljolxluiohgrcawiulcsctkihxjluryxfldnbioqghwzhrkcusdzzttmxvzbprtquymjomftiybhzuxcvqlbkaochlyyapjthrgcujxjpcogqblfpblydnreiolwrvxdczjefsurtylstjzcgemgliiyndsyotojyjopvmlzlogmiievgrzltdotktosehiexbqgfwsfrilqlhununycgzrrdhjnethzkjsaluhgflzxwvmxyiylulrbnqozlrsaytyvkngcxtokwzolnrfmmfpnnojqriwjtwmemsaberjjipwomuptonsfqiojqcczxunwoeqnvvzltqaptkjwgjwfilrptyyexbsabrugibvknhmydvknlbjamvwbfiiaemcaxqpmvujhrjqkeabbicrwtweyfwfapmnqinthozpkdiofukezcgfmtbspczfiervzeptdpnxfzzuiwtwlcbamlblcbjbtaljfvmyfbohtuvtttqdqmzchkmpzmtqzupzqqivjgcwlxxcuexykpslrylraxzsqrvbmfscsnsxaywzryskllgkyiudkhvxlajtonhukromstlnljtgaspwymmyunxxvigepcxwwlgxcmfwagvlnlgevdcbenfenogywefabmdbudpemiwvdxiqvncplehtbvrjkqloihgebbzshlnrjvbzqbbblkzzifaraviubwehgbjvzdrnyvpdvlyvmvfzzuwgfthresdrwyypmgnxexuhvsdhtdefqeyhfwtefkbxxaucezxsantnntnljvspsatbtdywjvhrxabxicsxqixypsliqofexlztcearqjruxndflxljrmkltbqvlwfeznttkeakdjhbdqzdyebfgwlgxnndsfsgzevhuaifikyqgvnndqtwpizwtazekveutwuwjkklwekkilcrsixzvqhceqiwcdqhdhpkqtrgwzztbwfvywicemwttlypyfwojluidadztbqqxvzfebdakwcquqfnnjelzjbnjmjqrfioiuhrqmcrmkcrjxzkymayunmxchbjeppfgugrsguamzbjuaebolaskixgddkuxfkredmtbvmfgijfckymxkfjoyriihizwvxkayruichaxxrmxqpvcwohcyhmgejjekybsleqepgmtqgrrsmwtqsdgtjaicaanwaggkukdhxadbnedgnilohnlbugmsbsjstqwwpsjqxyezoaeqitgdbtnlpnicqrhmlklokbhyurgdkflpvkwlhkkznqwsjjywesuopyylvjgzgfbsgwopjaiokdvfkgyzsjdtoytbxrinjazyhvytkpyrjybzgtknyddqywgyiwqtarpgmarpqnpndumisfzcmarhhqikmepfmpcvpfyougjujqjtfkbghwgxfrlklbiuqipppzfdlrnatenccjgssgszemfbtrguikiheejhgsrchjolozflgrrgnkzfwbspzabcsvarosquqhsqqxammqrqxklvazseeyzpofkkgpsmpetmwducilvwxoxnjhupccpvllajckrbhssktylvyffwoaaqajgupzkjaxedebwibkmpcudnkrageojafqrkbmydccghgmtnuuyiewasdoqzmfplspbqlhaoxvaypccwonetojkaasiiykeeckqmhanrfepbzdseycieuedvilmqwnteeqrseofabfvlpozbcedfuxpilrydpesnwclfzocfaehgphigecvraooopwtaxiysdhxhdoybsmanpckxhbqhzhcpnvcngdmfoquvrddhwoyuuprbqoillrvmxwyjvukjczzdqhlcrsssdpnvepbyirpxwymcqvzpmrddizjsrqmawdqhbymcdtfcjpbxlsqvlkyecroiivzqctvizzcadafztndwvfqrbrbntpbqcjlbwjuhlhwbeivdmimyeewizqtzlfwdcnsdfaupdewbgwfnkrzcunhobzjqgfppnuubcvbysbmrgjzjwhcfrxdodfsyijgprzfprryctwpawqdjpgljhrifspdbmczcrhlhwnbcchnxopkaksbcyxq
+1
bench/num-nodes.nix
+1
bench/num-nodes.nix
···
1
+
1
+169
bench/runner.nix
+169
bench/runner.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
{
5
+
lib,
6
+
inputs,
7
+
...
8
+
}:
9
+
let
10
+
inherit (lib)
11
+
mapAttrsToList
12
+
flatten
13
+
;
14
+
in
15
+
{
16
+
config.perSystem =
17
+
{
18
+
pkgs,
19
+
self',
20
+
system,
21
+
...
22
+
}:
23
+
let
24
+
benchDirFileset = lib.fileset.toSource {
25
+
root = ../..;
26
+
fileset = lib.fileset.union ./. (
27
+
lib.fileset.fileFilter (
28
+
file: (file.hasExt "nix") || (file.hasExt "txt") || (file.hasExt "lock")
29
+
) ../.
30
+
);
31
+
};
32
+
33
+
nodes =
34
+
builtins.listToAttrs (
35
+
builtins.map (index: {
36
+
value = {
37
+
imports = [
38
+
./vm.nix
39
+
];
40
+
41
+
_module.args = {
42
+
index = builtins.toString index;
43
+
};
44
+
};
45
+
name = "node_${builtins.toString index}";
46
+
}) (lib.range 0 (import ./num-nodes.nix))
47
+
)
48
+
// {
49
+
deployer = {
50
+
imports = [
51
+
./vm.nix
52
+
];
53
+
54
+
environment.systemPackages = [
55
+
pkgs.git
56
+
57
+
(pkgs.writeShellScriptBin "setup-benchmark" ''
58
+
mkdir -p $HOME/wire
59
+
cp -r ${benchDirFileset}/*-source/* $HOME/wire
60
+
61
+
cp -r $HOME/wire/bench/wire-flake $HOME/wire-flake
62
+
cp -r $HOME/wire/bench/colmena-flake $HOME/colmena-flake
63
+
64
+
chmod -R +w $HOME/*
65
+
66
+
cd $HOME/wire
67
+
git init .
68
+
git add -A
69
+
'')
70
+
71
+
(pkgs.writeShellScriptBin "run-benchmark" ''
72
+
bench_dir=$HOME/wire/bench
73
+
74
+
wire_args="apply test --path $bench_dir/wire -vv --ssh-accept-host -p 10"
75
+
wire_args_flake="apply test --path $HOME/wire-flake -vv --ssh-accept-host -p 10"
76
+
77
+
colmena_args="apply test --config $bench_dir/colmena/hive.nix -v -p 10"
78
+
colmena_args_flake="apply test --config $HOME/colmena-flake/flake.nix -v -p 10"
79
+
80
+
${lib.getExe pkgs.hyperfine} --warmup 1 --show-output --runs 5 \
81
+
--export-markdown stats.md \
82
+
--export-json run.json \
83
+
"${lib.getExe self'.packages.wire-small} $wire_args_flake" -n "wire@HEAD - flake" \
84
+
"${lib.getExe' inputs.colmena_benchmarking.packages.x86_64-linux.colmena "colmena"} $colmena_args_flake" \
85
+
-n "colmena@pinned - flake" \
86
+
"${lib.getExe self'.packages.wire-small} $wire_args" -n "wire@HEAD - hive.nix"
87
+
'')
88
+
];
89
+
90
+
_module.args = {
91
+
index = "deployer";
92
+
};
93
+
};
94
+
};
95
+
96
+
evalConfig = import (pkgs.path + "/nixos/lib/eval-config.nix");
97
+
98
+
evalVM =
99
+
module:
100
+
evalConfig {
101
+
inherit system;
102
+
modules = [ module ];
103
+
};
104
+
in
105
+
{
106
+
checks.bench = pkgs.testers.runNixOSTest {
107
+
inherit nodes;
108
+
109
+
name = "benchmark";
110
+
111
+
defaults =
112
+
_:
113
+
let
114
+
# hive = builtins.scopedImport {
115
+
# __nixPath = _b: null;
116
+
# __findFile = _path: name: if name == "nixpkgs" then pkgs.path else throw "oops!!";
117
+
# } "${injectedFlakeDir}/${path}/hive.nix";
118
+
119
+
# fetch **all** dependencies of a flake
120
+
# it's called fetchLayer because my naming skills are awful
121
+
fetchLayer =
122
+
input:
123
+
let
124
+
subLayers = if input ? inputs then map fetchLayer (builtins.attrValues input.inputs) else [ ];
125
+
in
126
+
[
127
+
input.outPath
128
+
]
129
+
++ subLayers;
130
+
in
131
+
{
132
+
virtualisation.additionalPaths = flatten [
133
+
(mapAttrsToList (_: val: (evalVM val).config.system.build.toplevel.drvPath) nodes)
134
+
(mapAttrsToList (_: fetchLayer) inputs)
135
+
];
136
+
137
+
nix.settings.experimental-features = [
138
+
"nix-command"
139
+
"flakes"
140
+
];
141
+
};
142
+
node.specialArgs = {
143
+
snakeOil = import "${pkgs.path}/nixos/tests/ssh-keys.nix" pkgs;
144
+
inherit (self'.packages) wire-small-dev;
145
+
};
146
+
skipTypeCheck = true;
147
+
testScript = ''
148
+
start_all()
149
+
150
+
for i in range(0,${builtins.toString (import ./num-nodes.nix)}):
151
+
machine = globals().get(f"node_{i}")
152
+
machine.wait_for_unit("sshd.service") # type: ignore
153
+
154
+
node_deployer.succeed("setup-benchmark");
155
+
node_deployer.succeed("run-benchmark");
156
+
157
+
node_deployer.copy_from_vm("run.json")
158
+
node_deployer.copy_from_vm("stats.json")
159
+
'';
160
+
};
161
+
};
162
+
}
163
+
164
+
# "${
165
+
# lib.getExe (builtins.getFlake "github:mrshmllow/wire/stable").packages.${system}.wire-small
166
+
# } $wire_args" -n "wire@stable - hive.nix" \
167
+
# "${
168
+
# lib.getExe (builtins.getFlake "github:mrshmllow/wire/stable").packages.${system}.wire-small
169
+
# } $wire_args_flake" -n "wire@stable - flake" \
+67
bench/vm.nix
+67
bench/vm.nix
···
1
+
{
2
+
lib,
3
+
index,
4
+
modulesPath,
5
+
pkgs,
6
+
...
7
+
}:
8
+
let
9
+
flake = import ../default.nix;
10
+
snakeOil = import "${pkgs.path}/nixos/tests/ssh-keys.nix" pkgs;
11
+
in
12
+
{
13
+
imports = [
14
+
"${flake.inputs.nixpkgs}/nixos/modules/virtualisation/qemu-vm.nix"
15
+
"${modulesPath}/virtualisation/qemu-vm.nix"
16
+
"${modulesPath}/testing/test-instrumentation.nix"
17
+
];
18
+
19
+
networking.hostName = "node_${index}";
20
+
21
+
boot = {
22
+
loader = {
23
+
systemd-boot.enable = true;
24
+
efi.canTouchEfiVariables = true;
25
+
};
26
+
};
27
+
28
+
environment.variables.XDG_RUNTIME_DIR = "/tmp";
29
+
30
+
services = {
31
+
openssh = {
32
+
enable = true;
33
+
settings = {
34
+
PermitRootLogin = "without-password";
35
+
};
36
+
};
37
+
38
+
getty.autologinUser = "root";
39
+
};
40
+
41
+
virtualisation = {
42
+
graphics = false;
43
+
# useBootLoader = true;
44
+
45
+
diskSize = 5024;
46
+
memorySize = 4096;
47
+
};
48
+
49
+
# It's important to note that you should never ever use this configuration
50
+
# for production. You are risking a MITM attack with this!
51
+
programs.ssh.extraConfig = ''
52
+
Host *
53
+
StrictHostKeyChecking no
54
+
UserKnownHostsFile /dev/null
55
+
'';
56
+
57
+
users.users.root.openssh.authorizedKeys.keys = [ snakeOil.snakeOilEd25519PublicKey ];
58
+
systemd.tmpfiles.rules = [
59
+
"C+ /root/.ssh/id_ed25519 600 - - - ${snakeOil.snakeOilEd25519PrivateKey}"
60
+
];
61
+
62
+
nix = {
63
+
nixPath = [ "nixpkgs=${pkgs.path}" ];
64
+
settings.substituters = lib.mkForce [ ];
65
+
package = pkgs.lix;
66
+
};
67
+
}
+5
bench/wire/hive.nix
+5
bench/wire/hive.nix
+9
bench/wire-flake/flake.nix
+9
bench/wire-flake/flake.nix
+32
crates/cli/Cargo.toml
+32
crates/cli/Cargo.toml
···
1
+
[package]
2
+
name = "wire"
3
+
version.workspace = true
4
+
edition.workspace = true
5
+
6
+
[lints]
7
+
workspace = true
8
+
9
+
[features]
10
+
dhat-heap = []
11
+
12
+
[dependencies]
13
+
clap = { workspace = true }
14
+
clap-verbosity-flag = { workspace = true }
15
+
tokio = { workspace = true }
16
+
tracing = { workspace = true }
17
+
tracing-log = { workspace = true }
18
+
tracing-subscriber = { workspace = true }
19
+
wire-core = { path = "../core" }
20
+
serde_json = { workspace = true }
21
+
miette = { workspace = true }
22
+
thiserror = { workspace = true }
23
+
enum-display-derive = "0.1.1"
24
+
futures = "0.3.31"
25
+
clap-num = "1.2.0"
26
+
clap-markdown = "0.1.5"
27
+
itertools = "0.14.0"
28
+
dhat = "0.3.2"
29
+
clap_complete = { version = "4.5.60", features = ["unstable-dynamic"] }
30
+
owo-colors = { workspace = true }
31
+
signal-hook-tokio = { version = "0.3.1", features = ["futures-v0_3"] }
32
+
signal-hook = "0.3.18"
+98
crates/cli/default.nix
+98
crates/cli/default.nix
···
1
+
{ getSystem, inputs, ... }:
2
+
{
3
+
perSystem =
4
+
{
5
+
pkgs,
6
+
lib,
7
+
self',
8
+
buildRustProgram,
9
+
system,
10
+
...
11
+
}:
12
+
let
13
+
cleanSystem = system: lib.replaceStrings [ "-" ] [ "_" ] system;
14
+
agents = lib.strings.concatMapStrings (
15
+
system: "--set WIRE_KEY_AGENT_${cleanSystem system} ${(getSystem system).packages.agent} "
16
+
) (import inputs.linux-systems);
17
+
in
18
+
{
19
+
packages = {
20
+
default = self'.packages.wire;
21
+
wire-unwrapped = buildRustProgram {
22
+
name = "wire";
23
+
pname = "wire";
24
+
cargoExtraArgs = "-p wire";
25
+
doCheck = true;
26
+
nativeBuildInputs = [
27
+
pkgs.installShellFiles
28
+
pkgs.sqlx-cli
29
+
];
30
+
preBuild = ''
31
+
export DATABASE_URL=sqlite:./db.sqlite3
32
+
sqlx database create
33
+
sqlx migrate run --source ./crates/core/src/cache/migrations/
34
+
'';
35
+
postInstall = ''
36
+
installShellCompletion --cmd wire \
37
+
--bash <(COMPLETE=bash $out/bin/wire) \
38
+
--fish <(COMPLETE=fish $out/bin/wire) \
39
+
--zsh <(COMPLETE=zsh $out/bin/wire)
40
+
'';
41
+
};
42
+
43
+
wire-unwrapped-dev = self'.packages.wire-unwrapped.overrideAttrs {
44
+
CARGO_PROFILE = "dev";
45
+
};
46
+
47
+
wire-unwrapped-perf = buildRustProgram {
48
+
name = "wire";
49
+
pname = "wire";
50
+
CARGO_PROFILE = "profiling";
51
+
cargoExtraArgs = "-p wire";
52
+
};
53
+
54
+
wire = pkgs.symlinkJoin {
55
+
name = "wire";
56
+
paths = [ self'.packages.wire-unwrapped ];
57
+
nativeBuildInputs = [
58
+
pkgs.makeWrapper
59
+
];
60
+
postBuild = ''
61
+
wrapProgram $out/bin/wire ${agents}
62
+
'';
63
+
meta.mainProgram = "wire";
64
+
};
65
+
66
+
wire-small = pkgs.symlinkJoin {
67
+
name = "wire";
68
+
paths = [ self'.packages.wire-unwrapped ];
69
+
nativeBuildInputs = [
70
+
pkgs.makeWrapper
71
+
];
72
+
postBuild = ''
73
+
wrapProgram $out/bin/wire --set WIRE_KEY_AGENT_${cleanSystem system} ${self'.packages.agent}
74
+
'';
75
+
meta.mainProgram = "wire";
76
+
};
77
+
78
+
wire-dev = self'.packages.wire.overrideAttrs {
79
+
paths = [ self'.packages.wire-unwrapped-dev ];
80
+
};
81
+
82
+
wire-small-dev = self'.packages.wire-small.overrideAttrs {
83
+
paths = [ self'.packages.wire-unwrapped-dev ];
84
+
};
85
+
86
+
wire-small-perf = self'.packages.wire-small.overrideAttrs {
87
+
paths = [ self'.packages.wire-unwrapped-perf ];
88
+
};
89
+
90
+
wire-diagnostics-md = self'.packages.wire-unwrapped.overrideAttrs {
91
+
DIAGNOSTICS_MD_OUTPUT = "/build/source";
92
+
installPhase = ''
93
+
mv /build/source/DIAGNOSTICS.md $out
94
+
'';
95
+
};
96
+
};
97
+
};
98
+
}
+341
crates/cli/src/apply.rs
+341
crates/cli/src/apply.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use futures::{FutureExt, StreamExt};
5
+
use itertools::{Either, Itertools};
6
+
use miette::{Diagnostic, IntoDiagnostic, Result};
7
+
use std::any::Any;
8
+
use std::collections::HashSet;
9
+
use std::io::{Read, stderr};
10
+
use std::sync::Arc;
11
+
use std::sync::atomic::AtomicBool;
12
+
use thiserror::Error;
13
+
use tracing::{error, info};
14
+
use wire_core::hive::node::{Context, GoalExecutor, Name, Node, Objective, StepState};
15
+
use wire_core::hive::{Hive, HiveLocation};
16
+
use wire_core::status::STATUS;
17
+
use wire_core::{SubCommandModifiers, errors::HiveLibError};
18
+
19
+
use crate::cli::{ApplyTarget, CommonVerbArgs, Partitions};
20
+
21
+
#[derive(Debug, Error, Diagnostic)]
22
+
#[error("node {} failed to apply", .0)]
23
+
struct NodeError(
24
+
Name,
25
+
#[source]
26
+
#[diagnostic_source]
27
+
HiveLibError,
28
+
);
29
+
30
+
#[derive(Debug, Error, Diagnostic)]
31
+
#[error("{} node(s) failed to apply.", .0.len())]
32
+
struct NodeErrors(#[related] Vec<NodeError>);
33
+
34
+
// returns Names and Tags
35
+
fn read_apply_targets_from_stdin() -> Result<(Vec<String>, Vec<Name>)> {
36
+
let mut buf = String::new();
37
+
let mut stdin = std::io::stdin().lock();
38
+
stdin.read_to_string(&mut buf).into_diagnostic()?;
39
+
40
+
Ok(buf
41
+
.split_whitespace()
42
+
.map(|x| ApplyTarget::from(x.to_string()))
43
+
.fold((Vec::new(), Vec::new()), |(mut tags, mut names), target| {
44
+
match target {
45
+
ApplyTarget::Node(name) => names.push(name),
46
+
ApplyTarget::Tag(tag) => tags.push(tag),
47
+
ApplyTarget::Stdin => {}
48
+
}
49
+
(tags, names)
50
+
}))
51
+
}
52
+
53
+
fn resolve_targets(
54
+
on: &[ApplyTarget],
55
+
modifiers: &mut SubCommandModifiers,
56
+
) -> (HashSet<String>, HashSet<Name>) {
57
+
on.iter().fold(
58
+
(HashSet::new(), HashSet::new()),
59
+
|(mut tags, mut names), target| {
60
+
match target {
61
+
ApplyTarget::Tag(tag) => {
62
+
tags.insert(tag.clone());
63
+
}
64
+
ApplyTarget::Node(name) => {
65
+
names.insert(name.clone());
66
+
}
67
+
ApplyTarget::Stdin => {
68
+
// implies non_interactive
69
+
modifiers.non_interactive = true;
70
+
71
+
let (found_tags, found_names) = read_apply_targets_from_stdin().unwrap();
72
+
names.extend(found_names);
73
+
tags.extend(found_tags);
74
+
}
75
+
}
76
+
(tags, names)
77
+
},
78
+
)
79
+
}
80
+
81
+
fn partition_arr<T>(arr: Vec<T>, partition: &Partitions) -> Vec<T>
82
+
where
83
+
T: Any + Clone,
84
+
{
85
+
if arr.is_empty() {
86
+
return arr;
87
+
}
88
+
89
+
let items_per_chunk = arr.len().div_ceil(partition.maximum);
90
+
91
+
arr.chunks(items_per_chunk)
92
+
.nth(partition.current - 1)
93
+
.unwrap_or(&[])
94
+
.to_vec()
95
+
}
96
+
97
+
pub async fn apply<F>(
98
+
hive: &mut Hive,
99
+
should_shutdown: Arc<AtomicBool>,
100
+
location: HiveLocation,
101
+
args: CommonVerbArgs,
102
+
partition: Partitions,
103
+
make_objective: F,
104
+
mut modifiers: SubCommandModifiers,
105
+
) -> Result<()>
106
+
where
107
+
F: Fn(&Name, &Node) -> Objective,
108
+
{
109
+
let location = Arc::new(location);
110
+
111
+
let (tags, names) = resolve_targets(&args.on, &mut modifiers);
112
+
113
+
let selected_names: Vec<_> = hive
114
+
.nodes
115
+
.iter()
116
+
.filter(|(name, node)| {
117
+
args.on.is_empty()
118
+
|| names.contains(name)
119
+
|| node.tags.iter().any(|tag| tags.contains(tag))
120
+
})
121
+
.sorted_by_key(|(name, _)| *name)
122
+
.map(|(name, _)| name.clone())
123
+
.collect();
124
+
125
+
let num_selected = selected_names.len();
126
+
127
+
let partitioned_names = partition_arr(selected_names, &partition);
128
+
129
+
if num_selected != partitioned_names.len() {
130
+
info!(
131
+
"Partitioning reduced selected number of nodes from {num_selected} to {}",
132
+
partitioned_names.len()
133
+
);
134
+
}
135
+
136
+
STATUS
137
+
.lock()
138
+
.add_many(&partitioned_names.iter().collect::<Vec<_>>());
139
+
140
+
let mut set = hive
141
+
.nodes
142
+
.iter_mut()
143
+
.filter(|(name, _)| partitioned_names.contains(name))
144
+
.map(|(name, node)| {
145
+
info!("Resolved {:?} to include {}", args.on, name);
146
+
147
+
let objective = make_objective(name, node);
148
+
149
+
let context = Context {
150
+
node,
151
+
name,
152
+
objective,
153
+
state: StepState::default(),
154
+
hive_location: location.clone(),
155
+
modifiers,
156
+
should_quit: should_shutdown.clone(),
157
+
};
158
+
159
+
GoalExecutor::new(context)
160
+
.execute()
161
+
.map(move |result| (name, result))
162
+
})
163
+
.peekable();
164
+
165
+
if set.peek().is_none() {
166
+
error!("There are no nodes selected for deployment");
167
+
}
168
+
169
+
let futures = futures::stream::iter(set).buffer_unordered(args.parallel);
170
+
let result = futures.collect::<Vec<_>>().await;
171
+
let (successful, errors): (Vec<_>, Vec<_>) =
172
+
result
173
+
.into_iter()
174
+
.partition_map(|(name, result)| match result {
175
+
Ok(..) => Either::Left(name),
176
+
Err(err) => Either::Right((name, err)),
177
+
});
178
+
179
+
if !successful.is_empty() {
180
+
info!(
181
+
"Successfully applied goal to {} node(s): {:?}",
182
+
successful.len(),
183
+
successful
184
+
);
185
+
}
186
+
187
+
if !errors.is_empty() {
188
+
// clear the status bar if we are about to print error messages
189
+
STATUS.lock().clear(&mut stderr());
190
+
191
+
return Err(NodeErrors(
192
+
errors
193
+
.into_iter()
194
+
.map(|(name, error)| NodeError(name.clone(), error))
195
+
.collect(),
196
+
)
197
+
.into());
198
+
}
199
+
200
+
Ok(())
201
+
}
202
+
203
+
#[cfg(test)]
204
+
mod tests {
205
+
use super::*;
206
+
207
+
#[test]
208
+
#[allow(clippy::too_many_lines)]
209
+
fn test_partitioning() {
210
+
let arr = (1..=10).collect::<Vec<_>>();
211
+
assert_eq!(arr, partition_arr(arr.clone(), &Partitions::default()));
212
+
213
+
assert_eq!(
214
+
vec![1, 2, 3, 4, 5],
215
+
partition_arr(
216
+
arr.clone(),
217
+
&Partitions {
218
+
current: 1,
219
+
maximum: 2
220
+
}
221
+
)
222
+
);
223
+
assert_eq!(
224
+
vec![6, 7, 8, 9, 10],
225
+
partition_arr(
226
+
arr,
227
+
&Partitions {
228
+
current: 2,
229
+
maximum: 2
230
+
}
231
+
)
232
+
);
233
+
234
+
// test odd number
235
+
let arr = (1..10).collect::<Vec<_>>();
236
+
assert_eq!(
237
+
arr.clone(),
238
+
partition_arr(arr.clone(), &Partitions::default())
239
+
);
240
+
241
+
assert_eq!(
242
+
vec![1, 2, 3, 4, 5],
243
+
partition_arr(
244
+
arr.clone(),
245
+
&Partitions {
246
+
current: 1,
247
+
maximum: 2
248
+
}
249
+
)
250
+
);
251
+
assert_eq!(
252
+
vec![6, 7, 8, 9],
253
+
partition_arr(
254
+
arr.clone(),
255
+
&Partitions {
256
+
current: 2,
257
+
maximum: 2
258
+
}
259
+
)
260
+
);
261
+
262
+
// test large number of partitions
263
+
let arr = (1..=10).collect::<Vec<_>>();
264
+
assert_eq!(
265
+
arr.clone(),
266
+
partition_arr(arr.clone(), &Partitions::default())
267
+
);
268
+
269
+
for i in 1..=10 {
270
+
assert_eq!(
271
+
vec![i],
272
+
partition_arr(
273
+
arr.clone(),
274
+
&Partitions {
275
+
current: i,
276
+
maximum: 10
277
+
}
278
+
)
279
+
);
280
+
281
+
assert_eq!(
282
+
vec![i],
283
+
partition_arr(
284
+
arr.clone(),
285
+
&Partitions {
286
+
current: i,
287
+
maximum: 15
288
+
}
289
+
)
290
+
);
291
+
}
292
+
293
+
// stretching thin with higher partitions will start to leave higher ones empty
294
+
assert_eq!(
295
+
Vec::<usize>::new(),
296
+
partition_arr(
297
+
arr,
298
+
&Partitions {
299
+
current: 11,
300
+
maximum: 15
301
+
}
302
+
)
303
+
);
304
+
305
+
// test the above holds for a lot of numbers
306
+
for i in 1..1000 {
307
+
let arr: Vec<usize> = (0..i).collect();
308
+
let total = arr.len();
309
+
310
+
assert_eq!(
311
+
arr.clone(),
312
+
partition_arr(arr.clone(), &Partitions::default()),
313
+
);
314
+
315
+
let buckets = 2;
316
+
let chunk_size = total.div_ceil(buckets);
317
+
let split_index = std::cmp::min(chunk_size, total);
318
+
319
+
assert_eq!(
320
+
&arr.clone()[..split_index],
321
+
partition_arr(
322
+
arr.clone(),
323
+
&Partitions {
324
+
current: 1,
325
+
maximum: 2
326
+
}
327
+
),
328
+
);
329
+
assert_eq!(
330
+
&arr.clone()[split_index..],
331
+
partition_arr(
332
+
arr.clone(),
333
+
&Partitions {
334
+
current: 2,
335
+
maximum: 2
336
+
}
337
+
),
338
+
);
339
+
}
340
+
}
341
+
}
+398
crates/cli/src/cli.rs
+398
crates/cli/src/cli.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use clap::builder::PossibleValue;
5
+
use clap::{Args, Parser, Subcommand, ValueEnum};
6
+
use clap::{ValueHint, crate_version};
7
+
use clap_complete::CompletionCandidate;
8
+
use clap_complete::engine::ArgValueCompleter;
9
+
use clap_num::number_range;
10
+
use clap_verbosity_flag::InfoLevel;
11
+
use tokio::runtime::Handle;
12
+
use wire_core::SubCommandModifiers;
13
+
use wire_core::commands::common::get_hive_node_names;
14
+
use wire_core::hive::node::{Goal as HiveGoal, HandleUnreachable, Name, SwitchToConfigurationGoal};
15
+
use wire_core::hive::{Hive, get_hive_location};
16
+
17
+
use std::io::IsTerminal;
18
+
use std::{
19
+
fmt::{self, Display, Formatter},
20
+
sync::Arc,
21
+
};
22
+
23
+
#[allow(clippy::struct_excessive_bools)]
24
+
#[derive(Parser)]
25
+
#[command(
26
+
name = "wire",
27
+
bin_name = "wire",
28
+
about = "a tool to deploy nixos systems",
29
+
version = format!("{}\nDebug: Hive::SCHEMA_VERSION {}", crate_version!(), Hive::SCHEMA_VERSION)
30
+
)]
31
+
pub struct Cli {
32
+
#[command(subcommand)]
33
+
pub command: Commands,
34
+
35
+
#[command(flatten)]
36
+
pub verbose: clap_verbosity_flag::Verbosity<InfoLevel>,
37
+
38
+
/// Path or flake reference
39
+
#[arg(long, global = true, default_value = std::env::current_dir().unwrap().into_os_string(), visible_alias("flake"))]
40
+
pub path: String,
41
+
42
+
/// Hide progress bars.
43
+
///
44
+
/// Defaults to true if stdin does not refer to a tty (unix pipelines, in CI).
45
+
#[arg(long, global = true, default_value_t = !std::io::stdin().is_terminal())]
46
+
pub no_progress: bool,
47
+
48
+
/// Never accept user input.
49
+
///
50
+
/// Defaults to true if stdin does not refer to a tty (unix pipelines, in CI).
51
+
#[arg(long, global = true, default_value_t = !std::io::stdin().is_terminal())]
52
+
pub non_interactive: bool,
53
+
54
+
/// Show trace logs
55
+
#[arg(long, global = true, default_value_t = false)]
56
+
pub show_trace: bool,
57
+
58
+
#[cfg(debug_assertions)]
59
+
#[arg(long, hide = true, global = true)]
60
+
pub markdown_help: bool,
61
+
}
62
+
63
+
#[derive(Clone, Debug)]
64
+
pub enum ApplyTarget {
65
+
Node(Name),
66
+
Tag(String),
67
+
Stdin,
68
+
}
69
+
70
+
impl From<String> for ApplyTarget {
71
+
fn from(value: String) -> Self {
72
+
if value == "-" {
73
+
return ApplyTarget::Stdin;
74
+
}
75
+
76
+
if let Some(stripped) = value.strip_prefix("@") {
77
+
ApplyTarget::Tag(stripped.to_string())
78
+
} else {
79
+
ApplyTarget::Node(Name(Arc::from(value.as_str())))
80
+
}
81
+
}
82
+
}
83
+
84
+
impl Display for ApplyTarget {
85
+
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
86
+
match self {
87
+
ApplyTarget::Node(name) => name.fmt(f),
88
+
ApplyTarget::Tag(tag) => write!(f, "@{tag}"),
89
+
ApplyTarget::Stdin => write!(f, "#stdin"),
90
+
}
91
+
}
92
+
}
93
+
94
+
fn more_than_zero(s: &str) -> Result<usize, String> {
95
+
number_range(s, 1, usize::MAX)
96
+
}
97
+
98
+
fn parse_partitions(s: &str) -> Result<Partitions, String> {
99
+
let parts: [&str; 2] = s
100
+
.split('/')
101
+
.collect::<Vec<_>>()
102
+
.try_into()
103
+
.map_err(|_| "partition must contain exactly one '/'")?;
104
+
105
+
let (current, maximum) =
106
+
std::array::from_fn(|i| parts[i].parse::<usize>().map_err(|x| x.to_string())).into();
107
+
let (current, maximum) = (current?, maximum?);
108
+
109
+
if current > maximum {
110
+
return Err("current is more than total".to_string());
111
+
}
112
+
113
+
if current == 0 || maximum == 0 {
114
+
return Err("partition segments cannot be 0.".to_string());
115
+
}
116
+
117
+
Ok(Partitions { current, maximum })
118
+
}
119
+
120
+
#[derive(Clone)]
121
+
pub enum HandleUnreachableArg {
122
+
Ignore,
123
+
FailNode,
124
+
}
125
+
126
+
impl Display for HandleUnreachableArg {
127
+
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
128
+
match self {
129
+
Self::Ignore => write!(f, "ignore"),
130
+
Self::FailNode => write!(f, "fail-node"),
131
+
}
132
+
}
133
+
}
134
+
135
+
impl clap::ValueEnum for HandleUnreachableArg {
136
+
fn value_variants<'a>() -> &'a [Self] {
137
+
&[Self::Ignore, Self::FailNode]
138
+
}
139
+
140
+
fn to_possible_value(&self) -> Option<clap::builder::PossibleValue> {
141
+
match self {
142
+
Self::Ignore => Some(PossibleValue::new("ignore")),
143
+
Self::FailNode => Some(PossibleValue::new("fail-node")),
144
+
}
145
+
}
146
+
}
147
+
148
+
impl From<HandleUnreachableArg> for HandleUnreachable {
149
+
fn from(value: HandleUnreachableArg) -> Self {
150
+
match value {
151
+
HandleUnreachableArg::Ignore => Self::Ignore,
152
+
HandleUnreachableArg::FailNode => Self::FailNode,
153
+
}
154
+
}
155
+
}
156
+
157
+
#[derive(Args)]
158
+
pub struct CommonVerbArgs {
159
+
/// List of literal node names, a literal `-`, or `@` prefixed tags.
160
+
///
161
+
/// `-` will read additional values from stdin, separated by whitespace.
162
+
/// Any `-` implies `--non-interactive`.
163
+
#[arg(short, long, value_name = "NODE | @TAG | `-`", num_args = 1.., add = ArgValueCompleter::new(node_names_completer), value_hint = ValueHint::Unknown)]
164
+
pub on: Vec<ApplyTarget>,
165
+
166
+
#[arg(short, long, default_value_t = 10, value_parser=more_than_zero)]
167
+
pub parallel: usize,
168
+
}
169
+
170
+
#[allow(clippy::struct_excessive_bools)]
171
+
#[derive(Args)]
172
+
pub struct ApplyArgs {
173
+
#[command(flatten)]
174
+
pub common: CommonVerbArgs,
175
+
176
+
#[arg(value_enum, default_value_t)]
177
+
pub goal: Goal,
178
+
179
+
/// Skip key uploads. noop when [GOAL] = Keys
180
+
#[arg(short, long, default_value_t = false)]
181
+
pub no_keys: bool,
182
+
183
+
/// Overrides deployment.buildOnTarget.
184
+
#[arg(short, long, value_name = "NODE")]
185
+
pub always_build_local: Vec<String>,
186
+
187
+
/// Reboot the nodes after activation
188
+
#[arg(short, long, default_value_t = false)]
189
+
pub reboot: bool,
190
+
191
+
/// Enable `--substitute-on-destination` in Nix subcommands.
192
+
#[arg(short, long, default_value_t = true)]
193
+
pub substitute_on_destination: bool,
194
+
195
+
/// How to handle an unreachable node in the ping step.
196
+
///
197
+
/// This only effects the ping step.
198
+
/// wire will still fail the node if it becomes unreachable after activation
199
+
#[arg(long, default_value_t = HandleUnreachableArg::FailNode)]
200
+
pub handle_unreachable: HandleUnreachableArg,
201
+
202
+
/// Unconditionally accept SSH host keys [!!]
203
+
///
204
+
/// Sets `StrictHostKeyChecking` to `no`.
205
+
/// Vulnerable to man-in-the-middle attacks, use with caution.
206
+
#[arg(long, default_value_t = false)]
207
+
pub ssh_accept_host: bool,
208
+
}
209
+
210
+
#[derive(Clone, Debug)]
211
+
pub struct Partitions {
212
+
pub current: usize,
213
+
pub maximum: usize,
214
+
}
215
+
216
+
impl Default for Partitions {
217
+
fn default() -> Self {
218
+
Self {
219
+
current: 1,
220
+
maximum: 1,
221
+
}
222
+
}
223
+
}
224
+
225
+
#[derive(Args)]
226
+
pub struct BuildArgs {
227
+
#[command(flatten)]
228
+
pub common: CommonVerbArgs,
229
+
230
+
/// Partition builds into buckets.
231
+
///
232
+
/// In the format of `current/total`, where 1 <= current <= total.
233
+
#[arg(short = 'P', default_value="1/1", long, value_parser=parse_partitions)]
234
+
pub partition: Option<Partitions>,
235
+
}
236
+
237
+
#[derive(Subcommand)]
238
+
pub enum Commands {
239
+
/// Deploy nodes
240
+
Apply(ApplyArgs),
241
+
/// Build nodes offline
242
+
///
243
+
/// This is distinct from `wire apply build`, as it will not ping or push
244
+
/// the result, making it useful for CI.
245
+
///
246
+
/// Additionally, you may partition the build jobs into buckets.
247
+
Build(BuildArgs),
248
+
/// Inspect hive
249
+
#[clap(visible_alias = "show")]
250
+
Inspect {
251
+
#[arg(value_enum, default_value_t)]
252
+
selection: Inspection,
253
+
254
+
/// Return in JSON format
255
+
#[arg(short, long, default_value_t = false)]
256
+
json: bool,
257
+
},
258
+
}
259
+
260
+
#[derive(Clone, Debug, Default, ValueEnum, Display)]
261
+
pub enum Inspection {
262
+
/// Output all data wire has on the entire hive
263
+
#[default]
264
+
Full,
265
+
/// Only output a list of node names
266
+
Names,
267
+
}
268
+
269
+
#[derive(Clone, Debug, Default, ValueEnum, Display)]
270
+
pub enum Goal {
271
+
/// Make the configuration the boot default and activate now
272
+
#[default]
273
+
Switch,
274
+
/// Build the configuration & push the results
275
+
Build,
276
+
/// Copy the system derivation to the remote hosts
277
+
Push,
278
+
/// Push deployment keys to the remote hosts
279
+
Keys,
280
+
/// Activate the system profile on next boot
281
+
Boot,
282
+
/// Activate the configuration, but don't make it the boot default
283
+
Test,
284
+
/// Show what would be done if this configuration were activated.
285
+
DryActivate,
286
+
}
287
+
288
+
impl TryFrom<Goal> for HiveGoal {
289
+
type Error = miette::Error;
290
+
291
+
fn try_from(value: Goal) -> Result<Self, Self::Error> {
292
+
match value {
293
+
Goal::Build => Ok(HiveGoal::Build),
294
+
Goal::Push => Ok(HiveGoal::Push),
295
+
Goal::Boot => Ok(HiveGoal::SwitchToConfiguration(
296
+
SwitchToConfigurationGoal::Boot,
297
+
)),
298
+
Goal::Switch => Ok(HiveGoal::SwitchToConfiguration(
299
+
SwitchToConfigurationGoal::Switch,
300
+
)),
301
+
Goal::Test => Ok(HiveGoal::SwitchToConfiguration(
302
+
SwitchToConfigurationGoal::Test,
303
+
)),
304
+
Goal::DryActivate => Ok(HiveGoal::SwitchToConfiguration(
305
+
SwitchToConfigurationGoal::DryActivate,
306
+
)),
307
+
Goal::Keys => Ok(HiveGoal::Keys),
308
+
}
309
+
}
310
+
}
311
+
312
+
pub trait ToSubCommandModifiers {
313
+
fn to_subcommand_modifiers(&self) -> SubCommandModifiers;
314
+
}
315
+
316
+
impl ToSubCommandModifiers for Cli {
317
+
fn to_subcommand_modifiers(&self) -> SubCommandModifiers {
318
+
SubCommandModifiers {
319
+
show_trace: self.show_trace,
320
+
non_interactive: self.non_interactive,
321
+
ssh_accept_host: match &self.command {
322
+
Commands::Apply(args) if args.ssh_accept_host => {
323
+
wire_core::StrictHostKeyChecking::No
324
+
}
325
+
_ => wire_core::StrictHostKeyChecking::default(),
326
+
},
327
+
}
328
+
}
329
+
}
330
+
331
+
fn node_names_completer(current: &std::ffi::OsStr) -> Vec<CompletionCandidate> {
332
+
tokio::task::block_in_place(|| {
333
+
let handle = Handle::current();
334
+
let modifiers = SubCommandModifiers::default();
335
+
let mut completions = vec![];
336
+
337
+
if current.is_empty() || current == "-" {
338
+
completions.push(
339
+
CompletionCandidate::new("-").help(Some("Read stdin as --on arguments".into())),
340
+
);
341
+
}
342
+
343
+
let Ok(current_dir) = std::env::current_dir() else {
344
+
return completions;
345
+
};
346
+
347
+
let Ok(hive_location) = handle.block_on(get_hive_location(
348
+
current_dir.display().to_string(),
349
+
modifiers,
350
+
)) else {
351
+
return completions;
352
+
};
353
+
354
+
let Some(current) = current.to_str() else {
355
+
return completions;
356
+
};
357
+
358
+
if current.starts_with('@') {
359
+
return vec![];
360
+
}
361
+
362
+
if let Ok(names) =
363
+
handle.block_on(async { get_hive_node_names(&hive_location, modifiers).await })
364
+
{
365
+
for name in names {
366
+
if name.starts_with(current) {
367
+
completions.push(CompletionCandidate::new(name));
368
+
}
369
+
}
370
+
}
371
+
372
+
completions
373
+
})
374
+
}
375
+
376
+
#[cfg(test)]
377
+
mod tests {
378
+
use std::assert_matches::assert_matches;
379
+
380
+
use crate::cli::{Partitions, parse_partitions};
381
+
382
+
#[test]
383
+
fn test_partition_parsing() {
384
+
assert_matches!(parse_partitions(""), Err(..));
385
+
assert_matches!(parse_partitions("/"), Err(..));
386
+
assert_matches!(parse_partitions(" / "), Err(..));
387
+
assert_matches!(parse_partitions("abc/"), Err(..));
388
+
assert_matches!(parse_partitions("abc"), Err(..));
389
+
assert_matches!(parse_partitions("1/1"), Ok(Partitions {
390
+
current,
391
+
maximum
392
+
}) if current == 1 && maximum == 1);
393
+
assert_matches!(parse_partitions("0/1"), Err(..));
394
+
assert_matches!(parse_partitions("-11/1"), Err(..));
395
+
assert_matches!(parse_partitions("100/99"), Err(..));
396
+
assert_matches!(parse_partitions("5/10"), Ok(Partitions { current, maximum }) if current == 5 && maximum == 10);
397
+
}
398
+
}
+173
crates/cli/src/main.rs
+173
crates/cli/src/main.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
#![deny(clippy::pedantic)]
5
+
#![feature(sync_nonpoison)]
6
+
#![feature(nonpoison_mutex)]
7
+
#![feature(assert_matches)]
8
+
9
+
use std::process::Command;
10
+
use std::sync::Arc;
11
+
use std::sync::atomic::AtomicBool;
12
+
13
+
use crate::cli::Cli;
14
+
use crate::cli::Partitions;
15
+
use crate::cli::ToSubCommandModifiers;
16
+
use crate::sigint::handle_signals;
17
+
use crate::tracing_setup::setup_logging;
18
+
use clap::CommandFactory;
19
+
use clap::Parser;
20
+
use clap_complete::CompleteEnv;
21
+
use miette::IntoDiagnostic;
22
+
use miette::Result;
23
+
use signal_hook::consts::SIGINT;
24
+
use signal_hook_tokio::Signals;
25
+
use tracing::error;
26
+
use tracing::warn;
27
+
use wire_core::cache::InspectionCache;
28
+
use wire_core::commands::common::get_hive_node_names;
29
+
use wire_core::hive::Hive;
30
+
use wire_core::hive::get_hive_location;
31
+
use wire_core::hive::node::ApplyObjective;
32
+
use wire_core::hive::node::Objective;
33
+
use wire_core::hive::node::should_apply_locally;
34
+
35
+
#[macro_use]
36
+
extern crate enum_display_derive;
37
+
38
+
mod apply;
39
+
mod cli;
40
+
mod sigint;
41
+
mod tracing_setup;
42
+
43
+
#[cfg(feature = "dhat-heap")]
44
+
#[global_allocator]
45
+
static ALLOC: dhat::Alloc = dhat::Alloc;
46
+
47
+
#[tokio::main]
48
+
async fn main() -> Result<()> {
49
+
#[cfg(feature = "dhat-heap")]
50
+
let _profiler = dhat::Profiler::new_heap();
51
+
CompleteEnv::with_factory(Cli::command).complete();
52
+
53
+
let args = Cli::parse();
54
+
55
+
let modifiers = args.to_subcommand_modifiers();
56
+
// disable progress when running inspect mode.
57
+
setup_logging(
58
+
&args.verbose,
59
+
!matches!(args.command, cli::Commands::Inspect { .. }) && !&args.no_progress,
60
+
);
61
+
62
+
#[cfg(debug_assertions)]
63
+
if args.markdown_help {
64
+
clap_markdown::print_help_markdown::<Cli>();
65
+
return Ok(());
66
+
}
67
+
68
+
if !check_nix_available() {
69
+
miette::bail!("Nix is not available on this system.");
70
+
}
71
+
72
+
let signals = Signals::new([SIGINT]).into_diagnostic()?;
73
+
let signals_handle = signals.handle();
74
+
let should_shutdown = Arc::new(AtomicBool::new(false));
75
+
let signals_task = tokio::spawn(handle_signals(signals, should_shutdown.clone()));
76
+
77
+
let location = get_hive_location(args.path, modifiers).await?;
78
+
let cache = InspectionCache::new().await;
79
+
80
+
match args.command {
81
+
cli::Commands::Apply(apply_args) => {
82
+
let mut hive = Hive::new_from_path(&location, cache.clone(), modifiers).await?;
83
+
let goal: wire_core::hive::node::Goal = apply_args.goal.clone().try_into().unwrap();
84
+
85
+
// Respect user's --always-build-local arg
86
+
hive.force_always_local(apply_args.always_build_local)?;
87
+
88
+
apply::apply(
89
+
&mut hive,
90
+
should_shutdown,
91
+
location,
92
+
apply_args.common,
93
+
Partitions::default(),
94
+
|name, node| {
95
+
Objective::Apply(ApplyObjective {
96
+
goal,
97
+
no_keys: apply_args.no_keys,
98
+
reboot: apply_args.reboot,
99
+
substitute_on_destination: apply_args.substitute_on_destination,
100
+
should_apply_locally: should_apply_locally(
101
+
node.allow_local_deployment,
102
+
&name.0,
103
+
),
104
+
handle_unreachable: apply_args.handle_unreachable.clone().into(),
105
+
})
106
+
},
107
+
modifiers,
108
+
)
109
+
.await?;
110
+
}
111
+
cli::Commands::Build(build_args) => {
112
+
let mut hive = Hive::new_from_path(&location, cache.clone(), modifiers).await?;
113
+
114
+
apply::apply(
115
+
&mut hive,
116
+
should_shutdown,
117
+
location,
118
+
build_args.common,
119
+
build_args.partition.unwrap_or_default(),
120
+
|_name, _node| Objective::BuildLocally,
121
+
modifiers,
122
+
)
123
+
.await?;
124
+
}
125
+
cli::Commands::Inspect { json, selection } => println!("{}", {
126
+
match selection {
127
+
cli::Inspection::Full => {
128
+
let hive = Hive::new_from_path(&location, cache.clone(), modifiers).await?;
129
+
if json {
130
+
serde_json::to_string(&hive).into_diagnostic()?
131
+
} else {
132
+
warn!("use --json to output something scripting suitable");
133
+
format!("{hive}")
134
+
}
135
+
}
136
+
cli::Inspection::Names => {
137
+
serde_json::to_string(&get_hive_node_names(&location, modifiers).await?)
138
+
.into_diagnostic()?
139
+
}
140
+
}
141
+
}),
142
+
}
143
+
144
+
if let Some(cache) = cache {
145
+
cache.gc().await.into_diagnostic()?;
146
+
}
147
+
148
+
signals_handle.close();
149
+
signals_task.await.into_diagnostic()?;
150
+
151
+
Ok(())
152
+
}
153
+
154
+
fn check_nix_available() -> bool {
155
+
match Command::new("nix")
156
+
.stdout(std::process::Stdio::null())
157
+
.stderr(std::process::Stdio::null())
158
+
.spawn()
159
+
{
160
+
Ok(_) => true,
161
+
Err(e) => {
162
+
if let std::io::ErrorKind::NotFound = e.kind() {
163
+
false
164
+
} else {
165
+
error!(
166
+
"Something weird happened checking for nix availability, {}",
167
+
e
168
+
);
169
+
false
170
+
}
171
+
}
172
+
}
173
+
}
+21
crates/cli/src/sigint.rs
+21
crates/cli/src/sigint.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::sync::{Arc, atomic::AtomicBool};
5
+
6
+
use signal_hook::consts::SIGINT;
7
+
use signal_hook_tokio::Signals;
8
+
9
+
use futures::stream::StreamExt;
10
+
use tracing::info;
11
+
12
+
pub(crate) async fn handle_signals(mut signals: Signals, should_shutdown: Arc<AtomicBool>) {
13
+
while let Some(signal) = signals.next().await {
14
+
if let SIGINT = signal
15
+
&& !should_shutdown.load(std::sync::atomic::Ordering::Relaxed)
16
+
{
17
+
info!("Received SIGINT, attempting to shut down executor tasks.");
18
+
should_shutdown.store(true, std::sync::atomic::Ordering::Relaxed);
19
+
}
20
+
}
21
+
}
+284
crates/cli/src/tracing_setup.rs
+284
crates/cli/src/tracing_setup.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::{
5
+
collections::VecDeque,
6
+
io::{self, Stderr, Write, stderr},
7
+
time::Duration,
8
+
};
9
+
10
+
use clap_verbosity_flag::{LogLevel, Verbosity};
11
+
use owo_colors::{OwoColorize, Stream, Style};
12
+
use tracing::{Level, Subscriber};
13
+
use tracing_log::AsTrace;
14
+
use tracing_subscriber::{
15
+
Layer,
16
+
field::{RecordFields, VisitFmt},
17
+
fmt::{
18
+
FormatEvent, FormatFields, FormattedFields,
19
+
format::{self, DefaultFields, DefaultVisitor, Format, Full},
20
+
},
21
+
layer::{Context, SubscriberExt},
22
+
registry::LookupSpan,
23
+
util::SubscriberInitExt,
24
+
};
25
+
use wire_core::{STDIN_CLOBBER_LOCK, status::STATUS};
26
+
27
+
/// The non-clobbering writer ensures that log lines are held while interactive
28
+
/// prompts are shown to the user. If logs where shown, they would "clobber" the
29
+
/// sudo / ssh prompt.
30
+
///
31
+
/// Additionally, the `STDIN_CLOBBER_LOCK` is used to ensure that no two
32
+
/// interactive prompts are shown at the same time.
33
+
struct NonClobberingWriter {
34
+
queue: VecDeque<Vec<u8>>,
35
+
stderr: Stderr,
36
+
}
37
+
38
+
impl NonClobberingWriter {
39
+
fn new() -> Self {
40
+
NonClobberingWriter {
41
+
queue: VecDeque::with_capacity(100),
42
+
stderr: stderr(),
43
+
}
44
+
}
45
+
46
+
/// expects the caller to write the status line
47
+
fn dump_previous(&mut self) -> Result<(), io::Error> {
48
+
STATUS.lock().clear(&mut self.stderr);
49
+
50
+
for buf in self.queue.iter().rev() {
51
+
self.stderr.write(buf).map(|_| ())?;
52
+
}
53
+
54
+
Ok(())
55
+
}
56
+
}
57
+
58
+
impl Write for NonClobberingWriter {
59
+
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
60
+
if let 1.. = STDIN_CLOBBER_LOCK.available_permits() {
61
+
self.dump_previous().map(|()| 0)?;
62
+
63
+
STATUS.lock().write_above_status(buf, &mut self.stderr)
64
+
} else {
65
+
self.queue.push_front(buf.to_vec());
66
+
67
+
Ok(buf.len())
68
+
}
69
+
}
70
+
71
+
fn flush(&mut self) -> std::io::Result<()> {
72
+
self.stderr.flush()
73
+
}
74
+
}
75
+
76
+
/// Handles event formatting, which falls back to the default formatter
77
+
/// passed.
78
+
struct WireEventFormat(Format<Full, ()>);
79
+
/// Formats the node's name with `WireFieldVisitor`
80
+
struct WireFieldFormat;
81
+
struct WireFieldVisitor<'a>(DefaultVisitor<'a>);
82
+
/// `WireLayer` injects `WireFieldFormat` as an extension on the event
83
+
struct WireLayer;
84
+
85
+
impl<'a> WireFieldVisitor<'a> {
86
+
fn new(writer: format::Writer<'a>, is_empty: bool) -> Self {
87
+
Self(DefaultVisitor::new(writer, is_empty))
88
+
}
89
+
}
90
+
91
+
impl<'writer> FormatFields<'writer> for WireFieldFormat {
92
+
fn format_fields<R: RecordFields>(
93
+
&self,
94
+
writer: format::Writer<'writer>,
95
+
fields: R,
96
+
) -> std::fmt::Result {
97
+
let mut v = WireFieldVisitor::new(writer, true);
98
+
fields.record(&mut v);
99
+
Ok(())
100
+
}
101
+
}
102
+
103
+
impl tracing::field::Visit for WireFieldVisitor<'_> {
104
+
fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn std::fmt::Debug) {
105
+
if field.name() == "node" {
106
+
let _ = write!(
107
+
self.0.writer(),
108
+
"{:?}",
109
+
value.if_supports_color(Stream::Stderr, |text| text.bold())
110
+
);
111
+
}
112
+
}
113
+
}
114
+
115
+
const fn get_style(level: Level) -> Style {
116
+
let mut style = Style::new();
117
+
118
+
style = match level {
119
+
Level::TRACE => style.purple(),
120
+
Level::DEBUG => style.blue(),
121
+
Level::INFO => style.green(),
122
+
Level::WARN => style.yellow(),
123
+
Level::ERROR => style.red(),
124
+
};
125
+
126
+
style
127
+
}
128
+
129
+
const fn fmt_level(level: Level) -> &'static str {
130
+
match level {
131
+
Level::TRACE => "TRACE",
132
+
Level::DEBUG => "DEBUG",
133
+
Level::INFO => " INFO",
134
+
Level::WARN => " WARN",
135
+
Level::ERROR => "ERROR",
136
+
}
137
+
}
138
+
139
+
impl<S, N> FormatEvent<S, N> for WireEventFormat
140
+
where
141
+
S: Subscriber + for<'a> LookupSpan<'a>,
142
+
N: for<'a> FormatFields<'a> + 'static,
143
+
{
144
+
fn format_event(
145
+
&self,
146
+
ctx: &tracing_subscriber::fmt::FmtContext<'_, S, N>,
147
+
mut writer: tracing_subscriber::fmt::format::Writer<'_>,
148
+
event: &tracing::Event<'_>,
149
+
) -> std::fmt::Result {
150
+
let metadata = event.metadata();
151
+
152
+
// skip events without an "event_scope"
153
+
let Some(scope) = ctx.event_scope() else {
154
+
return self.0.format_event(ctx, writer, event);
155
+
};
156
+
157
+
// skip spans without a parent
158
+
let Some(parent) = scope.last() else {
159
+
return self.0.format_event(ctx, writer, event);
160
+
};
161
+
162
+
// skip spans that dont refer to the goal step executor
163
+
if parent.name() != "execute" {
164
+
return self.0.format_event(ctx, writer, event);
165
+
}
166
+
167
+
// skip spans that dont refer to a specific node being executed
168
+
if parent.fields().field("node").is_none() {
169
+
return self.0.format_event(ctx, writer, event);
170
+
}
171
+
172
+
let style = get_style(*metadata.level());
173
+
174
+
// write the log level with colour
175
+
write!(
176
+
writer,
177
+
"{} ",
178
+
fmt_level(*metadata.level()).if_supports_color(Stream::Stderr, |x| { x.style(style) })
179
+
)?;
180
+
181
+
// extract the formatted node name into a string
182
+
let parent_ext = parent.extensions();
183
+
let node_name = &parent_ext
184
+
.get::<FormattedFields<WireFieldFormat>>()
185
+
.unwrap();
186
+
187
+
write!(writer, "{node_name}")?;
188
+
189
+
// write the step name
190
+
if let Some(step) = ctx.event_scope().unwrap().from_root().nth(1) {
191
+
write!(writer, " {}", step.name().italic())?;
192
+
}
193
+
194
+
write!(writer, " | ")?;
195
+
196
+
// write the default fields, including the actual message and other data
197
+
let mut fields = FormattedFields::<DefaultFields>::new(String::new());
198
+
199
+
ctx.format_fields(fields.as_writer(), event)?;
200
+
201
+
write!(writer, "{fields}")?;
202
+
writeln!(writer)?;
203
+
204
+
Ok(())
205
+
}
206
+
}
207
+
208
+
impl<S> Layer<S> for WireLayer
209
+
where
210
+
S: Subscriber + for<'a> LookupSpan<'a>,
211
+
{
212
+
fn on_new_span(
213
+
&self,
214
+
attrs: &tracing::span::Attributes<'_>,
215
+
id: &tracing::span::Id,
216
+
ctx: Context<'_, S>,
217
+
) {
218
+
let span = ctx.span(id).unwrap();
219
+
220
+
if span.extensions().get::<WireFieldFormat>().is_some() {
221
+
return;
222
+
}
223
+
224
+
let mut fields = FormattedFields::<WireFieldFormat>::new(String::new());
225
+
if WireFieldFormat
226
+
.format_fields(fields.as_writer(), attrs)
227
+
.is_ok()
228
+
{
229
+
span.extensions_mut().insert(fields);
230
+
}
231
+
}
232
+
}
233
+
234
+
async fn status_tick_worker() {
235
+
let mut interval = tokio::time::interval(Duration::from_secs(1));
236
+
let mut stderr = stderr();
237
+
238
+
loop {
239
+
interval.tick().await;
240
+
241
+
if STDIN_CLOBBER_LOCK.available_permits() < 1 {
242
+
continue;
243
+
}
244
+
245
+
let mut status = STATUS.lock();
246
+
247
+
status.clear(&mut stderr);
248
+
status.write_status(&mut stderr);
249
+
}
250
+
}
251
+
252
+
/// Set up logging for the application
253
+
/// Uses `WireFieldFormat` if -v was never passed
254
+
pub fn setup_logging<L: LogLevel>(verbosity: &Verbosity<L>, show_progress: bool) {
255
+
let filter = verbosity.log_level_filter().as_trace();
256
+
let registry = tracing_subscriber::registry();
257
+
258
+
STATUS.lock().show_progress(show_progress);
259
+
260
+
// spawn worker to tick the status bar
261
+
if show_progress {
262
+
tokio::spawn(status_tick_worker());
263
+
}
264
+
265
+
if verbosity.is_present() {
266
+
let layer = tracing_subscriber::fmt::layer()
267
+
.without_time()
268
+
.with_target(false)
269
+
.with_writer(NonClobberingWriter::new)
270
+
.with_filter(filter);
271
+
272
+
registry.with(layer).init();
273
+
return;
274
+
}
275
+
276
+
let event_formatter = WireEventFormat(format::format().without_time().with_target(false));
277
+
278
+
let layer = tracing_subscriber::fmt::layer()
279
+
.event_format(event_formatter)
280
+
.with_writer(NonClobberingWriter::new)
281
+
.with_filter(filter);
282
+
283
+
registry.with(layer).with(WireLayer).init();
284
+
}
+51
crates/core/Cargo.toml
+51
crates/core/Cargo.toml
···
1
+
[package]
2
+
name = "wire-core"
3
+
version.workspace = true
4
+
edition.workspace = true
5
+
6
+
[lints]
7
+
workspace = true
8
+
9
+
[features]
10
+
no_web_tests = []
11
+
12
+
[dependencies]
13
+
tokio = { workspace = true }
14
+
serde = { workspace = true }
15
+
serde_json = { workspace = true }
16
+
tracing = { workspace = true }
17
+
im = { workspace = true }
18
+
thiserror = "2.0.17"
19
+
derive_more = { version = "2.0.1", features = ["display"] }
20
+
wire-key-agent = { path = "../key_agent" }
21
+
futures = "0.3.31"
22
+
prost = { workspace = true }
23
+
gethostname = "1.1.0"
24
+
nix.workspace = true
25
+
miette = { workspace = true }
26
+
rand = "0.9.2"
27
+
tokio-util = { workspace = true }
28
+
portable-pty = "0.9.0"
29
+
anyhow.workspace = true
30
+
itertools = "0.14.0"
31
+
enum_dispatch = "0.3.13"
32
+
sha2 = { workspace = true }
33
+
base64 = { workspace = true }
34
+
nix-compat = { workspace = true }
35
+
strip-ansi-escapes = "0.2.1"
36
+
aho-corasick = "1.1.4"
37
+
num_enum = "0.7.5"
38
+
gjson = "0.8.1"
39
+
owo-colors = { workspace = true }
40
+
termion = "4.0.6"
41
+
sqlx = { version = "0.8", features = ["runtime-tokio", "sqlite"] }
42
+
zstd = "0.13.3"
43
+
44
+
[dev-dependencies]
45
+
tempdir = "0.3"
46
+
47
+
[build-dependencies]
48
+
miette = { workspace = true }
49
+
syn = "2.0.109"
50
+
proc-macro2 = "1.0.103"
51
+
itertools = "0.14.0"
+206
crates/core/build.rs
+206
crates/core/build.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use miette::{Context, IntoDiagnostic as _, Result, miette};
5
+
use std::fmt::Write;
6
+
use std::{
7
+
env,
8
+
fmt::{self, Display, Formatter},
9
+
fs::{self},
10
+
path::Path,
11
+
};
12
+
13
+
use itertools::Itertools;
14
+
use proc_macro2::TokenTree;
15
+
use syn::{Expr, Item, ItemEnum, Lit, Meta, MetaList, MetaNameValue, parse_file};
16
+
17
+
macro_rules! p {
18
+
($($tokens: tt)*) => {
19
+
println!("cargo::warning={}", format!($($tokens)*))
20
+
}
21
+
}
22
+
23
+
#[derive(Debug)]
24
+
struct DerivedError {
25
+
code: Option<String>,
26
+
help: Option<String>,
27
+
message: Option<String>,
28
+
doc_string: String,
29
+
}
30
+
31
+
impl Display for DerivedError {
32
+
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
33
+
write!(
34
+
f,
35
+
"## `{code}` {{#{code}}}
36
+
37
+
{doc}
38
+
{message}
39
+
{help}",
40
+
doc = self.doc_string,
41
+
code = self.code.as_ref().unwrap(),
42
+
help = match &self.help {
43
+
Some(help) => format!(
44
+
"
45
+
::: tip HELP
46
+
{help}
47
+
:::"
48
+
),
49
+
None => String::new(),
50
+
},
51
+
message = match &self.message {
52
+
Some(message) => format!(
53
+
"
54
+
```txt [message]
55
+
{message}
56
+
```"
57
+
),
58
+
None => String::new(),
59
+
}
60
+
)
61
+
}
62
+
}
63
+
64
+
impl DerivedError {
65
+
fn get_error(&mut self, list: &MetaList) -> Result<(), miette::Error> {
66
+
if list.path.segments.last().unwrap().ident != "error" {
67
+
return Err(miette!("Not an error"));
68
+
}
69
+
70
+
self.message = Some(
71
+
list.tokens
72
+
.clone()
73
+
.into_iter()
74
+
.filter(|tok| matches!(tok, TokenTree::Literal(tok) if tok.to_string().starts_with('"')))
75
+
.map(|tok| tok.to_string())
76
+
.join(""),
77
+
);
78
+
79
+
Err(miette!("No error msg found"))
80
+
}
81
+
82
+
fn update_diagnostic(&mut self, list: &MetaList) -> Result<(), miette::Error> {
83
+
if list.path.segments.last().unwrap().ident != "diagnostic" {
84
+
return Err(miette!("Not a diagnostic"));
85
+
}
86
+
87
+
let vec: Vec<_> = list.tokens.clone().into_iter().collect();
88
+
89
+
// Find `diagnostic(code(x::y::z))`
90
+
let code: Option<String> = if let Some((_, TokenTree::Group(group))) =
91
+
vec.iter().tuple_windows().find(|(ident, group)| {
92
+
matches!(ident, TokenTree::Ident(ident) if ident == "code")
93
+
&& matches!(group, TokenTree::Group(..))
94
+
}) {
95
+
Some(group.stream().to_string().replace(' ', ""))
96
+
} else {
97
+
None
98
+
};
99
+
100
+
// Find `diagnostic(help("hi"))`
101
+
let help: Option<String> = if let Some((_, TokenTree::Group(group))) =
102
+
vec.iter().tuple_windows().find(|(ident, group)| {
103
+
matches!(ident, TokenTree::Ident(ident) if ident == "help")
104
+
&& matches!(group, TokenTree::Group(..))
105
+
}) {
106
+
Some(group.stream().to_string())
107
+
} else {
108
+
None
109
+
};
110
+
111
+
if let Some(code) = code {
112
+
self.code = Some(code);
113
+
self.help = help;
114
+
return Ok(());
115
+
}
116
+
117
+
Err(miette!("Had no code."))
118
+
}
119
+
120
+
fn update_from_list(&mut self, list: &MetaList) {
121
+
let _ = self.get_error(list);
122
+
let _ = self.update_diagnostic(list);
123
+
}
124
+
125
+
fn update_from_namevalue(&mut self, list: MetaNameValue) -> Result<(), miette::Error> {
126
+
if list.path.segments.last().unwrap().ident != "doc" {
127
+
return Err(miette!("Not a doc string"));
128
+
}
129
+
130
+
if let Expr::Lit(lit) = list.value
131
+
&& let Lit::Str(str) = lit.lit
132
+
{
133
+
let _ = write!(self.doc_string, "{}\n\n", &str.value()[1..]);
134
+
}
135
+
136
+
Ok(())
137
+
}
138
+
}
139
+
140
+
fn main() -> Result<()> {
141
+
println!("cargo:rerun-if-changed=src/errors.rs");
142
+
143
+
let manifest_dir = env::var("CARGO_MANIFEST_DIR").into_diagnostic()?;
144
+
let Ok(md_out_dir) = env::var("DIAGNOSTICS_MD_OUTPUT") else {
145
+
return Ok(());
146
+
};
147
+
148
+
let src_path = Path::new(&manifest_dir).join("src/errors.rs");
149
+
let src = fs::read_to_string(&src_path)
150
+
.into_diagnostic()
151
+
.wrap_err("reading errors.rs")?;
152
+
153
+
let syntax_tree = parse_file(&src)
154
+
.into_diagnostic()
155
+
.wrap_err("parsing errors.rs")?;
156
+
let mut entries: Vec<DerivedError> = Vec::new();
157
+
158
+
for item in &syntax_tree.items {
159
+
if let Item::Enum(ItemEnum { variants, .. }) = item {
160
+
for variant in variants {
161
+
let mut entry = DerivedError {
162
+
code: None,
163
+
help: None,
164
+
message: None,
165
+
doc_string: String::new(),
166
+
};
167
+
168
+
for attribute in variant.attrs.clone() {
169
+
match attribute.meta {
170
+
Meta::List(list) => {
171
+
entry.update_from_list(&list);
172
+
}
173
+
Meta::NameValue(nv) => {
174
+
let _ = entry.update_from_namevalue(nv);
175
+
}
176
+
Meta::Path(_) => {}
177
+
}
178
+
}
179
+
180
+
if entry.code.is_some() {
181
+
entries.push(entry);
182
+
}
183
+
}
184
+
}
185
+
}
186
+
187
+
fs::create_dir_all(Path::new(&md_out_dir))
188
+
.into_diagnostic()
189
+
.wrap_err("creating target directory")?;
190
+
fs::write(
191
+
Path::new(&md_out_dir).join("DIAGNOSTICS.md"),
192
+
entries
193
+
.iter()
194
+
.map(std::string::ToString::to_string)
195
+
.join("\n\n"),
196
+
)
197
+
.into_diagnostic()
198
+
.wrap_err("writing DIAGNOSTICS.md")?;
199
+
200
+
p!(
201
+
"wrote to {:?}",
202
+
Path::new(&md_out_dir).join("DIAGNOSTICS.md")
203
+
);
204
+
205
+
Ok(())
206
+
}
+13
crates/core/src/cache/migrations/20251124234730_init.sql
+13
crates/core/src/cache/migrations/20251124234730_init.sql
···
1
+
create table hive_inspection (
2
+
id integer primary key autoincrement,
3
+
json_value text not null unique
4
+
) strict;
5
+
6
+
create table cached_inspection (
7
+
store_path text,
8
+
hash text,
9
+
10
+
inspection_id integer references hive_inspection(id) not null,
11
+
12
+
primary key (store_path, hash)
13
+
) strict;
+16
crates/core/src/cache/migrations/20251126222409_blobs.sql
+16
crates/core/src/cache/migrations/20251126222409_blobs.sql
···
1
+
create table inspection_blobs (
2
+
id integer primary key autoincrement,
3
+
json_value blob not null unique,
4
+
schema_version integer not null
5
+
) strict;
6
+
7
+
create table inspection_cache (
8
+
store_path text,
9
+
hash text,
10
+
blob_id integer references inspection_blobs (id) not null,
11
+
primary key (store_path, hash)
12
+
) strict;
13
+
14
+
drop table cached_inspection;
15
+
16
+
drop table hive_inspection;
+237
crates/core/src/cache/mod.rs
+237
crates/core/src/cache/mod.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::{
5
+
env,
6
+
path::{Path, PathBuf},
7
+
};
8
+
9
+
use sqlx::{
10
+
Pool, Sqlite,
11
+
migrate::Migrator,
12
+
sqlite::{SqliteConnectOptions, SqlitePoolOptions},
13
+
};
14
+
use tokio::fs::create_dir_all;
15
+
use tracing::{debug, error, trace};
16
+
17
+
use crate::hive::{FlakePrefetch, Hive};
18
+
19
+
#[derive(Clone)]
20
+
pub struct InspectionCache {
21
+
pool: Pool<Sqlite>,
22
+
}
23
+
24
+
static MIGRATOR: Migrator = sqlx::migrate!("src/cache/migrations");
25
+
26
+
async fn get_cache_directory() -> Option<PathBuf> {
27
+
let home = PathBuf::from(
28
+
env::var("HOME")
29
+
.inspect_err(|_| error!("HOME env var not found"))
30
+
.ok()?,
31
+
);
32
+
33
+
trace!(home = ?home);
34
+
35
+
let cache_home = env::var("XDG_CACHE_HOME")
36
+
.inspect_err(|_| debug!("XDG_CACHE_HOME not found"))
37
+
.ok()
38
+
.map(PathBuf::from)
39
+
.unwrap_or(home.join(".cache"));
40
+
41
+
let cache_directory = cache_home.join("wire");
42
+
43
+
trace!(cache_directory = ?cache_directory);
44
+
45
+
let _ = create_dir_all(&cache_directory).await;
46
+
47
+
Some(cache_directory)
48
+
}
49
+
50
+
impl InspectionCache {
51
+
pub async fn new() -> Option<Self> {
52
+
let cache_path = get_cache_directory().await?.join("inspect.db");
53
+
debug!(cache_path = ?cache_path);
54
+
55
+
let pool = SqlitePoolOptions::new()
56
+
.max_connections(1)
57
+
.connect_with(
58
+
SqliteConnectOptions::new()
59
+
.filename(cache_path)
60
+
.create_if_missing(true),
61
+
)
62
+
.await
63
+
.inspect_err(|x| error!("failed to open cache db: {x}"))
64
+
.ok()?;
65
+
66
+
MIGRATOR
67
+
.run(&pool)
68
+
.await
69
+
.inspect_err(|err| error!("failed to run cache migrations: {err:?}"))
70
+
.ok()?;
71
+
72
+
Some(Self { pool })
73
+
}
74
+
75
+
fn cache_invalid(store_path: &String) -> bool {
76
+
let path = Path::new(store_path);
77
+
78
+
// possible TOCTOU
79
+
!path.exists()
80
+
}
81
+
82
+
pub async fn get_hive(&self, prefetch: &FlakePrefetch) -> Option<Hive> {
83
+
struct Query {
84
+
json_value: Vec<u8>,
85
+
store_path: String,
86
+
}
87
+
88
+
let cached_blob = sqlx::query_as!(
89
+
Query,
90
+
"
91
+
select
92
+
inspection_blobs.json_value,
93
+
inspection_cache.store_path
94
+
from
95
+
inspection_blobs
96
+
join inspection_cache on inspection_cache.blob_id = inspection_blobs.id
97
+
where
98
+
inspection_cache.store_path = $1
99
+
and inspection_cache.hash = $2
100
+
and inspection_blobs.schema_version = $3
101
+
limit
102
+
1
103
+
",
104
+
prefetch.store_path,
105
+
prefetch.hash,
106
+
Hive::SCHEMA_VERSION
107
+
)
108
+
.fetch_optional(&self.pool)
109
+
.await
110
+
.inspect_err(|x| error!("failed to fetch cached hive: {x}"))
111
+
.ok()??;
112
+
113
+
// the cached path may of been garbage collected, discard it
114
+
// it is quite hard to replicate this bug but its occurred to me
115
+
// atleast once
116
+
if Self::cache_invalid(&cached_blob.store_path) {
117
+
trace!("discarding cache that does not exist in the nix store");
118
+
return None;
119
+
}
120
+
121
+
trace!(
122
+
"read {} bytes of zstd data from cache",
123
+
cached_blob.json_value.len()
124
+
);
125
+
126
+
let json_string = zstd::decode_all(cached_blob.json_value.as_slice())
127
+
.inspect_err(|err| error!("failed to decode cached zstd data: {err}"))
128
+
.ok()?;
129
+
130
+
trace!(
131
+
"inflated {} > {} in decoding",
132
+
cached_blob.json_value.len(),
133
+
json_string.len()
134
+
);
135
+
136
+
serde_json::from_slice(&json_string)
137
+
.inspect_err(|err| {
138
+
error!("could not use cached evaluation: {err}");
139
+
})
140
+
.ok()
141
+
}
142
+
143
+
pub async fn store_hive(&self, prefetch: &FlakePrefetch, json_value: &String) {
144
+
let Ok(json_value) = zstd::encode_all(json_value.as_bytes(), 0)
145
+
.inspect_err(|err| error!("failed to encode data w/ zstd: {err}"))
146
+
else {
147
+
return;
148
+
};
149
+
150
+
let hive_inspection = sqlx::query_scalar!(
151
+
"
152
+
insert into inspection_blobs (json_value, schema_version)
153
+
values ($1, $2)
154
+
on conflict(json_value)
155
+
do update set json_value = excluded.json_value
156
+
returning inspection_blobs.id
157
+
",
158
+
json_value,
159
+
Hive::SCHEMA_VERSION
160
+
)
161
+
.fetch_one(&self.pool)
162
+
.await
163
+
.inspect_err(|x| error!("could not insert hive_inspection: {x}"));
164
+
165
+
let Ok(blob_id) = hive_inspection else {
166
+
return;
167
+
};
168
+
169
+
let cached_inspection = sqlx::query!(
170
+
"
171
+
insert into
172
+
inspection_cache (store_path, hash, blob_id)
173
+
values
174
+
($1, $2, $3)
175
+
",
176
+
prefetch.store_path,
177
+
prefetch.hash,
178
+
blob_id
179
+
)
180
+
.execute(&self.pool)
181
+
.await;
182
+
183
+
if let Err(err) = cached_inspection {
184
+
error!("could not insert cached_inspection: {err}");
185
+
}
186
+
}
187
+
188
+
pub async fn gc(&self) -> Result<(), sqlx::Error> {
189
+
// keep newest 30 AND
190
+
// delete caches that refer to a blob w/ wrong schema
191
+
sqlx::query!(
192
+
"delete from inspection_cache
193
+
where
194
+
blob_id in (
195
+
select
196
+
id
197
+
from
198
+
inspection_blobs
199
+
where
200
+
schema_version != $1
201
+
)
202
+
or ROWID in (
203
+
select
204
+
ROWID
205
+
from
206
+
inspection_cache
207
+
order by
208
+
ROWID desc
209
+
limit
210
+
-1
211
+
offset
212
+
30
213
+
)",
214
+
Hive::SCHEMA_VERSION
215
+
)
216
+
.execute(&self.pool)
217
+
.await?;
218
+
219
+
// delete orphaned blobs
220
+
sqlx::query!(
221
+
"delete from inspection_blobs
222
+
where
223
+
not exists (
224
+
select
225
+
1
226
+
from
227
+
inspection_cache
228
+
where
229
+
inspection_cache.blob_id = inspection_blobs.id
230
+
)"
231
+
)
232
+
.execute(&self.pool)
233
+
.await?;
234
+
235
+
Ok(())
236
+
}
237
+
}
+74
crates/core/src/commands/builder.rs
+74
crates/core/src/commands/builder.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt;
5
+
6
+
pub(crate) struct CommandStringBuilder {
7
+
command: String,
8
+
}
9
+
10
+
impl CommandStringBuilder {
11
+
pub(crate) fn nix() -> Self {
12
+
Self {
13
+
command: "nix".to_string(),
14
+
}
15
+
}
16
+
17
+
pub(crate) fn new<S: AsRef<str>>(s: S) -> Self {
18
+
Self {
19
+
command: s.as_ref().trim().to_string(),
20
+
}
21
+
}
22
+
23
+
pub(crate) fn arg<S: AsRef<str>>(&mut self, argument: S) {
24
+
let argument = argument.as_ref().trim();
25
+
self.command.push(' ');
26
+
self.command.push_str(argument);
27
+
}
28
+
29
+
pub(crate) fn opt_arg<S: AsRef<str>>(&mut self, opt: bool, argument: S) {
30
+
if !opt {
31
+
return;
32
+
}
33
+
34
+
self.arg(argument);
35
+
}
36
+
37
+
pub(crate) fn args<S: AsRef<str>>(&mut self, arguments: &[S]) {
38
+
for arg in arguments {
39
+
self.arg(arg);
40
+
}
41
+
}
42
+
}
43
+
44
+
impl fmt::Display for CommandStringBuilder {
45
+
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
46
+
write!(f, "{}", self.command)
47
+
}
48
+
}
49
+
50
+
impl AsRef<str> for CommandStringBuilder {
51
+
fn as_ref(&self) -> &str {
52
+
&self.command
53
+
}
54
+
}
55
+
56
+
#[cfg(test)]
57
+
mod tests {
58
+
use crate::commands::builder::CommandStringBuilder;
59
+
60
+
#[test]
61
+
fn command_builder() {
62
+
let mut builder = CommandStringBuilder::new("a");
63
+
builder.arg(" b ");
64
+
builder.args(&[" c ", "d", "e"]);
65
+
builder.opt_arg(false, "f");
66
+
builder.opt_arg(true, "g");
67
+
68
+
assert_eq!(
69
+
builder.to_string(),
70
+
std::convert::AsRef::<str>::as_ref(&builder)
71
+
);
72
+
assert_eq!(builder.to_string(), "a b c d e g");
73
+
}
74
+
}
+177
crates/core/src/commands/common.rs
+177
crates/core/src/commands/common.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::collections::HashMap;
5
+
6
+
use tracing::instrument;
7
+
8
+
use crate::{
9
+
EvalGoal, SubCommandModifiers,
10
+
commands::{
11
+
CommandArguments, Either, WireCommandChip, builder::CommandStringBuilder, run_command,
12
+
run_command_with_env,
13
+
},
14
+
errors::{CommandError, HiveInitialisationError, HiveLibError},
15
+
hive::{
16
+
HiveLocation,
17
+
node::{Context, Objective, Push},
18
+
},
19
+
};
20
+
21
+
fn get_common_copy_path_help(error: &CommandError) -> Option<String> {
22
+
if let CommandError::CommandFailed { logs, .. } = error
23
+
&& (logs.contains("error: unexpected end-of-file"))
24
+
{
25
+
Some("wire requires the deploying user or wire binary cache is trusted on the remote server. if you're attempting to make that change, skip keys with --no-keys. please read https://wire.althaea.zone/guides/keys for more information".to_string())
26
+
} else {
27
+
None
28
+
}
29
+
}
30
+
31
+
pub async fn push(context: &Context<'_>, push: Push<'_>) -> Result<(), HiveLibError> {
32
+
let mut command_string = CommandStringBuilder::nix();
33
+
34
+
command_string.args(&["--extra-experimental-features", "nix-command", "copy"]);
35
+
if let Objective::Apply(apply_objective) = context.objective {
36
+
command_string.opt_arg(
37
+
apply_objective.substitute_on_destination,
38
+
"--substitute-on-destination",
39
+
);
40
+
}
41
+
command_string.arg("--to");
42
+
command_string.args(&[
43
+
format!(
44
+
"ssh://{user}@{host}",
45
+
user = context.node.target.user,
46
+
host = context.node.target.get_preferred_host()?,
47
+
),
48
+
match push {
49
+
Push::Derivation(drv) => format!("{drv} --derivation"),
50
+
Push::Path(path) => path.clone(),
51
+
},
52
+
]);
53
+
54
+
let child = run_command_with_env(
55
+
&CommandArguments::new(command_string, context.modifiers)
56
+
.mode(crate::commands::ChildOutputMode::Nix),
57
+
HashMap::from([(
58
+
"NIX_SSHOPTS".into(),
59
+
context
60
+
.node
61
+
.target
62
+
.create_ssh_opts(context.modifiers, false)?,
63
+
)]),
64
+
)
65
+
.await?;
66
+
67
+
let status = child.wait_till_success().await;
68
+
69
+
let help = if let Err(ref error) = status {
70
+
get_common_copy_path_help(error).map(Box::new)
71
+
} else {
72
+
None
73
+
};
74
+
75
+
status.map_err(|error| HiveLibError::NixCopyError {
76
+
name: context.name.clone(),
77
+
path: push.to_string(),
78
+
error: Box::new(error),
79
+
help,
80
+
})?;
81
+
82
+
Ok(())
83
+
}
84
+
85
+
fn get_common_command_help(error: &CommandError) -> Option<String> {
86
+
if let CommandError::CommandFailed { logs, .. } = error
87
+
// marshmallow: your using this repo as a hive you idiot
88
+
&& (logs.contains("attribute 'inspect' missing")
89
+
// using a flake that does not provide `wire`
90
+
|| logs.contains("does not provide attribute 'packages.x86_64-linux.wire'")
91
+
// using a file called `hive.nix` that is not actually a hive
92
+
|| logs.contains("attribute 'inspect' in selection path"))
93
+
{
94
+
Some("Double check this `--path` or `--flake` is a wire hive. You may be pointing to the wrong directory.".to_string())
95
+
} else {
96
+
None
97
+
}
98
+
}
99
+
100
+
pub async fn get_hive_node_names(
101
+
location: &HiveLocation,
102
+
modifiers: SubCommandModifiers,
103
+
) -> Result<Vec<String>, HiveLibError> {
104
+
let output = evaluate_hive_attribute(location, &EvalGoal::Names, modifiers).await?;
105
+
serde_json::from_str(&output).map_err(|err| {
106
+
HiveLibError::HiveInitialisationError(HiveInitialisationError::ParseEvaluateError(err))
107
+
})
108
+
}
109
+
110
+
/// Evaluates the hive in flakeref with regards to the given goal,
111
+
/// and returns stdout.
112
+
#[instrument(ret(level = tracing::Level::TRACE), skip_all)]
113
+
pub async fn evaluate_hive_attribute(
114
+
location: &HiveLocation,
115
+
goal: &EvalGoal<'_>,
116
+
modifiers: SubCommandModifiers,
117
+
) -> Result<String, HiveLibError> {
118
+
let attribute = match location {
119
+
HiveLocation::Flake { uri, .. } => {
120
+
format!(
121
+
"{uri}#wire --apply \"hive: {}\"",
122
+
match goal {
123
+
EvalGoal::Inspect => "hive.inspect".to_string(),
124
+
EvalGoal::Names => "hive.names".to_string(),
125
+
EvalGoal::GetTopLevel(node) => format!("hive.topLevels.{node}"),
126
+
}
127
+
)
128
+
}
129
+
HiveLocation::HiveNix(path) => {
130
+
format!(
131
+
"--file {} {}",
132
+
&path.to_string_lossy(),
133
+
match goal {
134
+
EvalGoal::Inspect => "inspect".to_string(),
135
+
EvalGoal::Names => "names".to_string(),
136
+
EvalGoal::GetTopLevel(node) => format!("topLevels.{node}"),
137
+
}
138
+
)
139
+
}
140
+
};
141
+
142
+
let mut command_string = CommandStringBuilder::nix();
143
+
command_string.args(&[
144
+
"--extra-experimental-features",
145
+
"nix-command",
146
+
"--extra-experimental-features",
147
+
"flakes",
148
+
"eval",
149
+
"--json",
150
+
]);
151
+
command_string.opt_arg(modifiers.show_trace, "--show-trace");
152
+
command_string.arg(&attribute);
153
+
154
+
let child = run_command(
155
+
&CommandArguments::new(command_string, modifiers)
156
+
.mode(crate::commands::ChildOutputMode::Nix),
157
+
)
158
+
.await?;
159
+
160
+
let status = child.wait_till_success().await;
161
+
162
+
let help = if let Err(ref error) = status {
163
+
get_common_command_help(error).map(Box::new)
164
+
} else {
165
+
None
166
+
};
167
+
168
+
status
169
+
.map_err(|source| HiveLibError::NixEvalError {
170
+
attribute,
171
+
source,
172
+
help,
173
+
})
174
+
.map(|x| match x {
175
+
Either::Left((_, stdout)) | Either::Right((_, stdout)) => stdout,
176
+
})
177
+
}
+240
crates/core/src/commands/mod.rs
+240
crates/core/src/commands/mod.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use crate::commands::pty::{InteractiveChildChip, interactive_command_with_env};
5
+
use std::{collections::HashMap, str::from_utf8, sync::LazyLock};
6
+
7
+
use aho_corasick::AhoCorasick;
8
+
use gjson::Value;
9
+
use itertools::Itertools;
10
+
use nix_compat::log::{AT_NIX_PREFIX, VerbosityLevel};
11
+
use num_enum::TryFromPrimitive;
12
+
use tracing::{debug, error, info, trace, warn};
13
+
14
+
use crate::{
15
+
SubCommandModifiers,
16
+
commands::noninteractive::{NonInteractiveChildChip, non_interactive_command_with_env},
17
+
errors::{CommandError, HiveLibError},
18
+
hive::node::{Node, Target},
19
+
};
20
+
21
+
pub(crate) mod builder;
22
+
pub mod common;
23
+
pub(crate) mod noninteractive;
24
+
pub(crate) mod pty;
25
+
26
+
#[derive(Copy, Clone, Debug)]
27
+
pub(crate) enum ChildOutputMode {
28
+
Nix,
29
+
Generic,
30
+
Interactive,
31
+
}
32
+
33
+
#[derive(Debug)]
34
+
pub enum Either<L, R> {
35
+
Left(L),
36
+
Right(R),
37
+
}
38
+
39
+
#[derive(Debug)]
40
+
pub(crate) struct CommandArguments<'t, S: AsRef<str>> {
41
+
modifiers: SubCommandModifiers,
42
+
target: Option<&'t Target>,
43
+
output_mode: ChildOutputMode,
44
+
command_string: S,
45
+
keep_stdin_open: bool,
46
+
privilege_escalation_command: Option<String>,
47
+
log_stdout: bool,
48
+
}
49
+
50
+
static AHO_CORASICK: LazyLock<AhoCorasick> = LazyLock::new(|| {
51
+
AhoCorasick::builder()
52
+
.ascii_case_insensitive(false)
53
+
.match_kind(aho_corasick::MatchKind::LeftmostFirst)
54
+
.build([AT_NIX_PREFIX])
55
+
.unwrap()
56
+
});
57
+
58
+
impl<'a, S: AsRef<str>> CommandArguments<'a, S> {
59
+
pub(crate) const fn new(command_string: S, modifiers: SubCommandModifiers) -> Self {
60
+
Self {
61
+
command_string,
62
+
keep_stdin_open: false,
63
+
privilege_escalation_command: None,
64
+
log_stdout: false,
65
+
target: None,
66
+
output_mode: ChildOutputMode::Generic,
67
+
modifiers,
68
+
}
69
+
}
70
+
71
+
pub(crate) const fn execute_on_remote(mut self, target: Option<&'a Target>) -> Self {
72
+
self.target = target;
73
+
self
74
+
}
75
+
76
+
pub(crate) const fn mode(mut self, mode: ChildOutputMode) -> Self {
77
+
self.output_mode = mode;
78
+
self
79
+
}
80
+
81
+
pub(crate) const fn keep_stdin_open(mut self) -> Self {
82
+
self.keep_stdin_open = true;
83
+
self
84
+
}
85
+
86
+
pub(crate) fn elevated(mut self, node: &Node) -> Self {
87
+
self.privilege_escalation_command =
88
+
Some(node.privilege_escalation_command.iter().join(" "));
89
+
self
90
+
}
91
+
92
+
pub(crate) const fn is_elevated(&self) -> bool {
93
+
self.privilege_escalation_command.is_some()
94
+
}
95
+
96
+
pub(crate) const fn log_stdout(mut self) -> Self {
97
+
self.log_stdout = true;
98
+
self
99
+
}
100
+
}
101
+
102
+
pub(crate) async fn run_command<S: AsRef<str>>(
103
+
arguments: &CommandArguments<'_, S>,
104
+
) -> Result<Either<InteractiveChildChip, NonInteractiveChildChip>, HiveLibError> {
105
+
run_command_with_env(arguments, HashMap::new()).await
106
+
}
107
+
108
+
pub(crate) async fn run_command_with_env<S: AsRef<str>>(
109
+
arguments: &CommandArguments<'_, S>,
110
+
envs: HashMap<String, String>,
111
+
) -> Result<Either<InteractiveChildChip, NonInteractiveChildChip>, HiveLibError> {
112
+
// use the non interactive command runner when forced
113
+
// ... or when there is no reason for interactivity, local and unprivileged
114
+
if arguments.modifiers.non_interactive
115
+
|| (arguments.target.is_none() && !arguments.is_elevated())
116
+
{
117
+
return Ok(Either::Right(non_interactive_command_with_env(
118
+
arguments, envs,
119
+
)?));
120
+
}
121
+
122
+
Ok(Either::Left(
123
+
interactive_command_with_env(arguments, envs).await?,
124
+
))
125
+
}
126
+
127
+
pub(crate) trait WireCommandChip {
128
+
type ExitStatus;
129
+
130
+
async fn wait_till_success(self) -> Result<Self::ExitStatus, CommandError>;
131
+
async fn write_stdin(&mut self, data: Vec<u8>) -> Result<(), HiveLibError>;
132
+
}
133
+
134
+
type ExitStatus = Either<(portable_pty::ExitStatus, String), (std::process::ExitStatus, String)>;
135
+
136
+
impl WireCommandChip for Either<InteractiveChildChip, NonInteractiveChildChip> {
137
+
type ExitStatus = ExitStatus;
138
+
139
+
async fn write_stdin(&mut self, data: Vec<u8>) -> Result<(), HiveLibError> {
140
+
match self {
141
+
Self::Left(left) => left.write_stdin(data).await,
142
+
Self::Right(right) => right.write_stdin(data).await,
143
+
}
144
+
}
145
+
146
+
async fn wait_till_success(self) -> Result<Self::ExitStatus, CommandError> {
147
+
match self {
148
+
Self::Left(left) => left.wait_till_success().await.map(Either::Left),
149
+
Self::Right(right) => right.wait_till_success().await.map(Either::Right),
150
+
}
151
+
}
152
+
}
153
+
154
+
fn trace_gjson_str<'a>(log: &'a Value<'a>, msg: &'a str) -> Option<String> {
155
+
if msg.is_empty() {
156
+
return None;
157
+
}
158
+
159
+
let level = log.get("level");
160
+
161
+
if !level.exists() {
162
+
return None;
163
+
}
164
+
165
+
let level = match VerbosityLevel::try_from_primitive(level.u64()) {
166
+
Ok(level) => level,
167
+
Err(err) => {
168
+
error!("nix log `level` did not match to a VerbosityLevel: {err:?}");
169
+
return None;
170
+
}
171
+
};
172
+
173
+
let msg = strip_ansi_escapes::strip_str(msg);
174
+
175
+
match level {
176
+
VerbosityLevel::Info => info!("{msg}"),
177
+
VerbosityLevel::Warn | VerbosityLevel::Notice => warn!("{msg}"),
178
+
VerbosityLevel::Error => error!("{msg}"),
179
+
VerbosityLevel::Debug => debug!("{msg}"),
180
+
VerbosityLevel::Vomit | VerbosityLevel::Talkative | VerbosityLevel::Chatty => {
181
+
trace!("{msg}");
182
+
}
183
+
}
184
+
185
+
if matches!(
186
+
level,
187
+
VerbosityLevel::Error | VerbosityLevel::Warn | VerbosityLevel::Notice
188
+
) {
189
+
return Some(msg);
190
+
}
191
+
192
+
None
193
+
}
194
+
195
+
impl ChildOutputMode {
196
+
/// this function is by far the biggest hotspot in the whole tree
197
+
/// Returns a string if this log is notable to be stored as an error message
198
+
fn trace_slice(self, line: &mut [u8]) -> Option<String> {
199
+
let slice = match self {
200
+
Self::Generic | Self::Interactive => {
201
+
let string = String::from_utf8_lossy(line);
202
+
let stripped = strip_ansi_escapes::strip_str(&string);
203
+
warn!("{stripped}");
204
+
return Some(string.to_string());
205
+
}
206
+
Self::Nix => {
207
+
let position = AHO_CORASICK.find(&line).map(|x| &mut line[x.end()..]);
208
+
209
+
if let Some(json_buf) = position {
210
+
json_buf
211
+
} else {
212
+
// usually happens when ssh is outputting something
213
+
warn!("{}", String::from_utf8_lossy(line));
214
+
return None;
215
+
}
216
+
}
217
+
};
218
+
219
+
let Ok(str) = from_utf8(slice) else {
220
+
error!("nix log was not valid utf8!");
221
+
return None;
222
+
};
223
+
224
+
let log = gjson::parse(str);
225
+
226
+
let text = log.get("text");
227
+
228
+
if text.exists() {
229
+
return trace_gjson_str(&log, text.str());
230
+
}
231
+
232
+
let text = log.get("msg");
233
+
234
+
if text.exists() {
235
+
return trace_gjson_str(&log, text.str());
236
+
}
237
+
238
+
None
239
+
}
240
+
}
+199
crates/core/src/commands/noninteractive.rs
+199
crates/core/src/commands/noninteractive.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::{
5
+
collections::{HashMap, VecDeque},
6
+
process::ExitStatus,
7
+
sync::Arc,
8
+
};
9
+
10
+
use crate::{
11
+
SubCommandModifiers,
12
+
commands::{ChildOutputMode, CommandArguments, WireCommandChip},
13
+
errors::{CommandError, HiveLibError},
14
+
hive::node::Target,
15
+
};
16
+
use itertools::Itertools;
17
+
use tokio::{
18
+
io::{AsyncWriteExt, BufReader},
19
+
process::{Child, ChildStdin, Command},
20
+
sync::Mutex,
21
+
task::JoinSet,
22
+
};
23
+
use tracing::{Instrument, debug, instrument, trace};
24
+
25
+
pub(crate) struct NonInteractiveChildChip {
26
+
error_collection: Arc<Mutex<VecDeque<String>>>,
27
+
stdout_collection: Arc<Mutex<VecDeque<String>>>,
28
+
child: Child,
29
+
joinset: JoinSet<()>,
30
+
original_command: String,
31
+
stdin: ChildStdin,
32
+
}
33
+
34
+
#[instrument(skip_all, name = "run", fields(elevated = %arguments.is_elevated()))]
35
+
pub(crate) fn non_interactive_command_with_env<S: AsRef<str>>(
36
+
arguments: &CommandArguments<S>,
37
+
envs: HashMap<String, String>,
38
+
) -> Result<NonInteractiveChildChip, HiveLibError> {
39
+
let mut command = if let Some(target) = arguments.target {
40
+
create_sync_ssh_command(target, arguments.modifiers)?
41
+
} else {
42
+
let mut command = Command::new("sh");
43
+
44
+
command.arg("-c");
45
+
46
+
command
47
+
};
48
+
49
+
let command_string = format!(
50
+
"{command_string}{extra}",
51
+
command_string = arguments.command_string.as_ref(),
52
+
extra = match arguments.output_mode {
53
+
ChildOutputMode::Generic | ChildOutputMode::Interactive => "",
54
+
ChildOutputMode::Nix => " --log-format internal-json",
55
+
}
56
+
);
57
+
58
+
let command_string = if let Some(escalation_command) = &arguments.privilege_escalation_command {
59
+
format!("{escalation_command} sh -c '{command_string}'")
60
+
} else {
61
+
command_string
62
+
};
63
+
64
+
debug!("{command_string}");
65
+
66
+
command.arg(&command_string);
67
+
command.stdin(std::process::Stdio::piped());
68
+
command.stderr(std::process::Stdio::piped());
69
+
command.stdout(std::process::Stdio::piped());
70
+
command.kill_on_drop(true);
71
+
// command.env_clear();
72
+
command.envs(envs);
73
+
74
+
let mut child = command.spawn().unwrap();
75
+
let error_collection = Arc::new(Mutex::new(VecDeque::<String>::with_capacity(10)));
76
+
let stdout_collection = Arc::new(Mutex::new(VecDeque::<String>::with_capacity(10)));
77
+
let stdin = child.stdin.take().unwrap();
78
+
79
+
let stdout_handle = child
80
+
.stdout
81
+
.take()
82
+
.ok_or(HiveLibError::CommandError(CommandError::NoHandle))?;
83
+
let stderr_handle = child
84
+
.stderr
85
+
.take()
86
+
.ok_or(HiveLibError::CommandError(CommandError::NoHandle))?;
87
+
88
+
let mut joinset = JoinSet::new();
89
+
let output_mode = Arc::new(arguments.output_mode);
90
+
91
+
joinset.spawn(
92
+
handle_io(
93
+
stderr_handle,
94
+
output_mode.clone(),
95
+
error_collection.clone(),
96
+
true,
97
+
true,
98
+
)
99
+
.in_current_span(),
100
+
);
101
+
joinset.spawn(
102
+
handle_io(
103
+
stdout_handle,
104
+
output_mode.clone(),
105
+
stdout_collection.clone(),
106
+
false,
107
+
arguments.log_stdout,
108
+
)
109
+
.in_current_span(),
110
+
);
111
+
112
+
Ok(NonInteractiveChildChip {
113
+
error_collection,
114
+
stdout_collection,
115
+
child,
116
+
joinset,
117
+
original_command: arguments.command_string.as_ref().to_string(),
118
+
stdin,
119
+
})
120
+
}
121
+
122
+
impl WireCommandChip for NonInteractiveChildChip {
123
+
type ExitStatus = (ExitStatus, String);
124
+
125
+
async fn wait_till_success(mut self) -> Result<Self::ExitStatus, CommandError> {
126
+
let status = self.child.wait().await.unwrap();
127
+
let _ = self.joinset.join_all().await;
128
+
129
+
if !status.success() {
130
+
let logs = self.error_collection.lock().await.iter().rev().join("\n");
131
+
132
+
return Err(CommandError::CommandFailed {
133
+
command_ran: self.original_command,
134
+
logs,
135
+
code: match status.code() {
136
+
Some(code) => format!("code {code}"),
137
+
None => "no exit code".to_string(),
138
+
},
139
+
reason: "known-status",
140
+
});
141
+
}
142
+
143
+
let stdout = self.stdout_collection.lock().await.iter().rev().join("\n");
144
+
145
+
Ok((status, stdout))
146
+
}
147
+
148
+
async fn write_stdin(&mut self, data: Vec<u8>) -> Result<(), HiveLibError> {
149
+
trace!("Writing {} bytes", data.len());
150
+
self.stdin.write_all(&data).await.unwrap();
151
+
Ok(())
152
+
}
153
+
}
154
+
155
+
#[instrument(skip_all, name = "log")]
156
+
pub async fn handle_io<R>(
157
+
reader: R,
158
+
output_mode: Arc<ChildOutputMode>,
159
+
collection: Arc<Mutex<VecDeque<String>>>,
160
+
is_error: bool,
161
+
should_log: bool,
162
+
) where
163
+
R: tokio::io::AsyncRead + Unpin,
164
+
{
165
+
let mut io_reader = tokio::io::AsyncBufReadExt::lines(BufReader::new(reader));
166
+
167
+
while let Some(line) = io_reader.next_line().await.unwrap() {
168
+
let mut line = line.into_bytes();
169
+
170
+
let log = if should_log {
171
+
Some(output_mode.trace_slice(&mut line))
172
+
} else {
173
+
None
174
+
};
175
+
176
+
if !is_error {
177
+
let mut queue = collection.lock().await;
178
+
queue.push_front(String::from_utf8_lossy(&line).to_string());
179
+
} else if let Some(error_msg) = log.flatten() {
180
+
let mut queue = collection.lock().await;
181
+
queue.push_front(error_msg);
182
+
// add at most 20 message to the front, drop the rest.
183
+
queue.truncate(20);
184
+
}
185
+
}
186
+
187
+
debug!("io_handler: goodbye!");
188
+
}
189
+
190
+
fn create_sync_ssh_command(
191
+
target: &Target,
192
+
modifiers: SubCommandModifiers,
193
+
) -> Result<Command, HiveLibError> {
194
+
let mut command = Command::new("ssh");
195
+
command.args(target.create_ssh_args(modifiers, true, false)?);
196
+
command.arg(target.get_preferred_host()?.to_string());
197
+
198
+
Ok(command)
199
+
}
+102
crates/core/src/commands/pty/input.rs
+102
crates/core/src/commands/pty/input.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::os::fd::{AsFd, OwnedFd};
5
+
6
+
use nix::{
7
+
poll::{PollFd, PollFlags, PollTimeout, poll},
8
+
unistd::read,
9
+
};
10
+
use tracing::{Span, debug, error, instrument, trace};
11
+
12
+
use crate::{
13
+
commands::pty::{MasterWriter, THREAD_BEGAN_SIGNAL, THREAD_QUIT_SIGNAL},
14
+
errors::CommandError,
15
+
};
16
+
17
+
/// Exits on any data written to `cancel_pipe_r`
18
+
/// A pipe is used to cancel the function.
19
+
#[instrument(skip_all, level = "trace", parent = span)]
20
+
pub(super) fn watch_stdin_from_user(
21
+
cancel_pipe_r: &OwnedFd,
22
+
mut master_writer: MasterWriter,
23
+
write_pipe_r: &OwnedFd,
24
+
span: Span,
25
+
) -> Result<(), CommandError> {
26
+
const WRITER_POSITION: usize = 0;
27
+
const SIGNAL_POSITION: usize = 1;
28
+
const USER_POSITION: usize = 2;
29
+
30
+
let mut buffer = [0u8; 1024];
31
+
let stdin = std::io::stdin();
32
+
let mut cancel_pipe_buf = [0u8; 1];
33
+
34
+
let user_stdin_fd = stdin.as_fd();
35
+
let cancel_pipe_r_fd = cancel_pipe_r.as_fd();
36
+
37
+
let mut all_fds = vec![
38
+
PollFd::new(write_pipe_r.as_fd(), PollFlags::POLLIN),
39
+
PollFd::new(cancel_pipe_r.as_fd(), PollFlags::POLLIN),
40
+
PollFd::new(user_stdin_fd, PollFlags::POLLIN),
41
+
];
42
+
43
+
loop {
44
+
match poll(&mut all_fds, PollTimeout::NONE) {
45
+
Ok(0) => {} // timeout, impossible
46
+
Ok(_) => {
47
+
// The user stdin pipe can be removed
48
+
if all_fds.get(USER_POSITION).is_some()
49
+
&& let Some(events) = all_fds[USER_POSITION].revents()
50
+
&& events.contains(PollFlags::POLLIN)
51
+
{
52
+
trace!("Got stdin from user...");
53
+
let n = read(user_stdin_fd, &mut buffer).map_err(CommandError::PosixPipe)?;
54
+
master_writer
55
+
.write_all(&buffer[..n])
56
+
.map_err(CommandError::WritingMasterStdout)?;
57
+
master_writer
58
+
.flush()
59
+
.map_err(CommandError::WritingMasterStdout)?;
60
+
}
61
+
62
+
if let Some(events) = all_fds[WRITER_POSITION].revents()
63
+
&& events.contains(PollFlags::POLLIN)
64
+
{
65
+
trace!("Got stdin from writer...");
66
+
let n = read(write_pipe_r, &mut buffer).map_err(CommandError::PosixPipe)?;
67
+
master_writer
68
+
.write_all(&buffer[..n])
69
+
.map_err(CommandError::WritingMasterStdout)?;
70
+
master_writer
71
+
.flush()
72
+
.map_err(CommandError::WritingMasterStdout)?;
73
+
}
74
+
75
+
if let Some(events) = all_fds[SIGNAL_POSITION].revents()
76
+
&& events.contains(PollFlags::POLLIN)
77
+
{
78
+
let n = read(cancel_pipe_r_fd, &mut cancel_pipe_buf)
79
+
.map_err(CommandError::PosixPipe)?;
80
+
let message = &cancel_pipe_buf[..n];
81
+
82
+
trace!("Got byte from signal pipe: {message:?}");
83
+
84
+
if message == THREAD_QUIT_SIGNAL {
85
+
return Ok(());
86
+
}
87
+
88
+
if message == THREAD_BEGAN_SIGNAL {
89
+
all_fds.remove(USER_POSITION);
90
+
}
91
+
}
92
+
}
93
+
Err(e) => {
94
+
error!("Poll error: {e}");
95
+
break;
96
+
}
97
+
}
98
+
}
99
+
100
+
debug!("stdin_thread: goodbye");
101
+
Ok(())
102
+
}
+63
crates/core/src/commands/pty/logbuffer.rs
+63
crates/core/src/commands/pty/logbuffer.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
/// Split into its own struct to be tested nicer
5
+
pub(crate) struct LogBuffer {
6
+
buffer: Vec<u8>,
7
+
}
8
+
9
+
impl LogBuffer {
10
+
pub const fn new() -> Self {
11
+
Self { buffer: Vec::new() }
12
+
}
13
+
14
+
pub fn process_slice(&mut self, slice: &[u8]) {
15
+
self.buffer.extend_from_slice(slice);
16
+
}
17
+
18
+
pub fn next_line(&mut self) -> Option<Vec<u8>> {
19
+
let line_end = self.buffer.iter().position(|x| *x == b'\n')?;
20
+
21
+
let drained = self.buffer.drain(..line_end).collect();
22
+
self.buffer.remove(0);
23
+
Some(drained)
24
+
}
25
+
26
+
#[cfg(test)]
27
+
fn take_lines(&mut self) -> Vec<Vec<u8>> {
28
+
let mut lines = vec![];
29
+
30
+
while let Some(line) = self.next_line() {
31
+
lines.push(line);
32
+
}
33
+
34
+
lines
35
+
}
36
+
}
37
+
38
+
#[cfg(test)]
39
+
mod tests {
40
+
use super::*;
41
+
42
+
#[test]
43
+
fn test_split_line_processing() {
44
+
let mut log_buffer = LogBuffer::new();
45
+
46
+
log_buffer.process_slice(b"Writing key KeySpec { destination: \"/et");
47
+
log_buffer.process_slice(b"c/keys/buildbot.aws.key\", user: \"buildbot\", group: \"buildbot-worker\", permissions: 384, length: 32, last: false, crc: 1370815231 }, 32 bytes of data");
48
+
log_buffer.process_slice(b"\n");
49
+
log_buffer.process_slice(b"xxx");
50
+
log_buffer.process_slice(b"xx_WIRE");
51
+
log_buffer.process_slice(b"_QUIT\n");
52
+
let lines = log_buffer.take_lines();
53
+
assert_eq!(lines.len(), 2);
54
+
assert_eq!(
55
+
String::from_utf8_lossy(lines.first().unwrap()),
56
+
"Writing key KeySpec { destination: \"/etc/keys/buildbot.aws.key\", user: \"buildbot\", group: \"buildbot-worker\", permissions: 384, length: 32, last: false, crc: 1370815231 }, 32 bytes of data"
57
+
);
58
+
assert_eq!(lines.get(1), Some(&"xxxxx_WIRE_QUIT".as_bytes().to_vec()));
59
+
60
+
// taking leaves none
61
+
assert_eq!(log_buffer.take_lines().len(), 0);
62
+
}
63
+
}
+566
crates/core/src/commands/pty/mod.rs
+566
crates/core/src/commands/pty/mod.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use crate::commands::pty::output::{WatchStdoutArguments, handle_pty_stdout};
5
+
use crate::status::STATUS;
6
+
use aho_corasick::PatternID;
7
+
use itertools::Itertools;
8
+
use nix::sys::termios::{LocalFlags, SetArg, Termios, tcgetattr, tcsetattr};
9
+
use nix::unistd::pipe;
10
+
use nix::unistd::write as posix_write;
11
+
use portable_pty::{CommandBuilder, NativePtySystem, PtyPair, PtySize};
12
+
use rand::distr::Alphabetic;
13
+
use std::collections::VecDeque;
14
+
use std::io::stderr;
15
+
use std::sync::{LazyLock, Mutex};
16
+
use std::{
17
+
io::{Read, Write},
18
+
os::fd::{AsFd, OwnedFd},
19
+
sync::Arc,
20
+
};
21
+
use tokio::sync::{oneshot, watch};
22
+
use tracing::instrument;
23
+
use tracing::{Span, debug, trace};
24
+
25
+
use crate::commands::CommandArguments;
26
+
use crate::commands::pty::input::watch_stdin_from_user;
27
+
use crate::errors::CommandError;
28
+
use crate::{SubCommandModifiers, acquire_stdin_lock};
29
+
use crate::{
30
+
commands::{ChildOutputMode, WireCommandChip},
31
+
errors::HiveLibError,
32
+
hive::node::Target,
33
+
};
34
+
35
+
mod input;
36
+
mod logbuffer;
37
+
mod output;
38
+
39
+
type MasterWriter = Box<dyn Write + Send>;
40
+
type MasterReader = Box<dyn Read + Send>;
41
+
42
+
/// the underlying command began
43
+
const THREAD_BEGAN_SIGNAL: &[u8; 1] = b"b";
44
+
const THREAD_QUIT_SIGNAL: &[u8; 1] = b"q";
45
+
46
+
type Child = Box<dyn portable_pty::Child + Send + Sync>;
47
+
48
+
pub(crate) struct InteractiveChildChip {
49
+
child: Child,
50
+
51
+
cancel_stdin_pipe_w: OwnedFd,
52
+
write_stdin_pipe_w: OwnedFd,
53
+
54
+
stderr_collection: Arc<Mutex<VecDeque<String>>>,
55
+
stdout_collection: Arc<Mutex<VecDeque<String>>>,
56
+
57
+
original_command: String,
58
+
59
+
status_receiver: watch::Receiver<Status>,
60
+
stdout_handle: tokio::task::JoinHandle<Result<(), CommandError>>,
61
+
}
62
+
63
+
/// sets and reverts terminal options (the terminal user interaction is performed)
64
+
/// reverts data when dropped
65
+
struct StdinTermiosAttrGuard(Termios);
66
+
67
+
#[derive(Debug)]
68
+
enum Status {
69
+
Running,
70
+
Done { success: bool },
71
+
}
72
+
73
+
#[derive(Debug)]
74
+
enum SearchFindings {
75
+
None,
76
+
Started,
77
+
Terminate,
78
+
}
79
+
80
+
static STARTED_PATTERN: LazyLock<PatternID> = LazyLock::new(|| PatternID::must(0));
81
+
static SUCCEEDED_PATTERN: LazyLock<PatternID> = LazyLock::new(|| PatternID::must(1));
82
+
static FAILED_PATTERN: LazyLock<PatternID> = LazyLock::new(|| PatternID::must(2));
83
+
84
+
/// substitutes STDOUT with #$line. stdout is far less common than stderr.
85
+
const IO_SUBS: &str = "1> >(while IFS= read -r line; do echo \"#$line\"; done)";
86
+
87
+
fn create_ending_segment<S: AsRef<str>>(
88
+
arguments: &CommandArguments<'_, S>,
89
+
needles: &Needles,
90
+
) -> String {
91
+
let Needles {
92
+
succeed,
93
+
fail,
94
+
start,
95
+
} = needles;
96
+
97
+
format!(
98
+
"echo -e '{succeed}' || echo '{failed}'",
99
+
succeed = if matches!(arguments.output_mode, ChildOutputMode::Interactive) {
100
+
format!(
101
+
"{start}\\n{succeed}",
102
+
start = String::from_utf8_lossy(start),
103
+
succeed = String::from_utf8_lossy(succeed)
104
+
)
105
+
} else {
106
+
String::from_utf8_lossy(succeed).to_string()
107
+
},
108
+
failed = String::from_utf8_lossy(fail)
109
+
)
110
+
}
111
+
112
+
fn create_starting_segment<S: AsRef<str>>(
113
+
arguments: &CommandArguments<'_, S>,
114
+
start_needle: &Arc<Vec<u8>>,
115
+
) -> String {
116
+
if matches!(arguments.output_mode, ChildOutputMode::Interactive) {
117
+
String::new()
118
+
} else {
119
+
format!(
120
+
"echo '{start}' && ",
121
+
start = String::from_utf8_lossy(start_needle)
122
+
)
123
+
}
124
+
}
125
+
126
+
#[instrument(skip_all, name = "run-int", fields(elevated = %arguments.is_elevated(), mode = ?arguments.output_mode))]
127
+
pub(crate) async fn interactive_command_with_env<S: AsRef<str>>(
128
+
arguments: &CommandArguments<'_, S>,
129
+
envs: std::collections::HashMap<String, String>,
130
+
) -> Result<InteractiveChildChip, HiveLibError> {
131
+
print_authenticate_warning(arguments)?;
132
+
133
+
let needles = create_needles();
134
+
let pty_system = NativePtySystem::default();
135
+
let pty_pair = portable_pty::PtySystem::openpty(&pty_system, PtySize::default()).unwrap();
136
+
setup_master(&pty_pair)?;
137
+
138
+
let command_string = &format!(
139
+
"{starting}{command} {flags} {IO_SUBS} && {ending}",
140
+
command = arguments.command_string.as_ref(),
141
+
flags = match arguments.output_mode {
142
+
ChildOutputMode::Nix => "--log-format internal-json",
143
+
ChildOutputMode::Generic | ChildOutputMode::Interactive => "",
144
+
},
145
+
starting = create_starting_segment(arguments, &needles.start),
146
+
ending = create_ending_segment(arguments, &needles)
147
+
);
148
+
149
+
debug!("{command_string}");
150
+
151
+
let mut command = build_command(arguments, command_string)?;
152
+
153
+
// give command all env vars
154
+
for (key, value) in envs {
155
+
command.env(key, value);
156
+
}
157
+
158
+
let clobber_guard = acquire_stdin_lock().await;
159
+
let _guard = StdinTermiosAttrGuard::new().map_err(HiveLibError::CommandError)?;
160
+
let child = pty_pair
161
+
.slave
162
+
.spawn_command(command)
163
+
.map_err(|x| HiveLibError::CommandError(CommandError::PortablePty(x)))?;
164
+
165
+
// Release any handles owned by the slave: we don't need it now
166
+
// that we've spawned the child.
167
+
drop(pty_pair.slave);
168
+
169
+
let reader = pty_pair
170
+
.master
171
+
.try_clone_reader()
172
+
.map_err(|x| HiveLibError::CommandError(CommandError::PortablePty(x)))?;
173
+
let master_writer = pty_pair
174
+
.master
175
+
.take_writer()
176
+
.map_err(|x| HiveLibError::CommandError(CommandError::PortablePty(x)))?;
177
+
178
+
let stderr_collection = Arc::new(Mutex::new(VecDeque::<String>::with_capacity(10)));
179
+
let stdout_collection = Arc::new(Mutex::new(VecDeque::<String>::with_capacity(10)));
180
+
let (began_tx, began_rx) = oneshot::channel::<()>();
181
+
let (status_sender, status_receiver) = watch::channel(Status::Running);
182
+
183
+
let stdout_handle = {
184
+
let arguments = WatchStdoutArguments {
185
+
began_tx,
186
+
reader,
187
+
needles,
188
+
output_mode: arguments.output_mode,
189
+
stderr_collection: stderr_collection.clone(),
190
+
stdout_collection: stdout_collection.clone(),
191
+
span: Span::current(),
192
+
log_stdout: arguments.log_stdout,
193
+
status_sender,
194
+
};
195
+
196
+
tokio::task::spawn_blocking(move || handle_pty_stdout(arguments))
197
+
};
198
+
199
+
let (write_stdin_pipe_r, write_stdin_pipe_w) =
200
+
pipe().map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
201
+
let (cancel_stdin_pipe_r, cancel_stdin_pipe_w) =
202
+
pipe().map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
203
+
204
+
tokio::task::spawn_blocking(move || {
205
+
watch_stdin_from_user(
206
+
&cancel_stdin_pipe_r,
207
+
master_writer,
208
+
&write_stdin_pipe_r,
209
+
Span::current(),
210
+
)
211
+
});
212
+
213
+
debug!("Setup threads");
214
+
215
+
let () = began_rx
216
+
.await
217
+
.map_err(|x| HiveLibError::CommandError(CommandError::OneshotRecvError(x)))?;
218
+
219
+
drop(clobber_guard);
220
+
221
+
if arguments.keep_stdin_open {
222
+
trace!("Sending THREAD_BEGAN_SIGNAL");
223
+
224
+
posix_write(&cancel_stdin_pipe_w, THREAD_BEGAN_SIGNAL)
225
+
.map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
226
+
} else {
227
+
trace!("Sending THREAD_QUIT_SIGNAL");
228
+
229
+
posix_write(&cancel_stdin_pipe_w, THREAD_QUIT_SIGNAL)
230
+
.map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
231
+
}
232
+
233
+
Ok(InteractiveChildChip {
234
+
child,
235
+
cancel_stdin_pipe_w,
236
+
write_stdin_pipe_w,
237
+
stderr_collection,
238
+
stdout_collection,
239
+
original_command: arguments.command_string.as_ref().to_string(),
240
+
status_receiver,
241
+
stdout_handle,
242
+
})
243
+
}
244
+
245
+
fn print_authenticate_warning<S: AsRef<str>>(
246
+
arguments: &CommandArguments<S>,
247
+
) -> Result<(), HiveLibError> {
248
+
if !arguments.is_elevated() {
249
+
return Ok(());
250
+
}
251
+
252
+
let _ = STATUS.lock().write_above_status(
253
+
&format!(
254
+
"{} | Authenticate for \"sudo {}\":\n",
255
+
arguments
256
+
.target
257
+
.map_or(Ok("localhost (!)".to_string()), |target| Ok(format!(
258
+
"{}@{}:{}",
259
+
target.user,
260
+
target.get_preferred_host()?,
261
+
target.port
262
+
)))?,
263
+
arguments.command_string.as_ref()
264
+
)
265
+
.into_bytes(),
266
+
&mut stderr(),
267
+
);
268
+
269
+
Ok(())
270
+
}
271
+
272
+
struct Needles {
273
+
succeed: Arc<Vec<u8>>,
274
+
fail: Arc<Vec<u8>>,
275
+
start: Arc<Vec<u8>>,
276
+
}
277
+
278
+
fn create_needles() -> Needles {
279
+
let tmp_prefix = rand::distr::SampleString::sample_string(&Alphabetic, &mut rand::rng(), 5);
280
+
281
+
Needles {
282
+
succeed: Arc::new(format!("{tmp_prefix}_W_Q").as_bytes().to_vec()),
283
+
fail: Arc::new(format!("{tmp_prefix}_W_F").as_bytes().to_vec()),
284
+
start: Arc::new(format!("{tmp_prefix}_W_S").as_bytes().to_vec()),
285
+
}
286
+
}
287
+
288
+
fn setup_master(pty_pair: &PtyPair) -> Result<(), HiveLibError> {
289
+
if let Some(fd) = pty_pair.master.as_raw_fd() {
290
+
// convert raw fd to a BorrowedFd
291
+
// safe as `fd` is dropped well before `pty_pair.master`
292
+
let fd = unsafe { std::os::unix::io::BorrowedFd::borrow_raw(fd) };
293
+
let mut termios =
294
+
tcgetattr(fd).map_err(|x| HiveLibError::CommandError(CommandError::TermAttrs(x)))?;
295
+
296
+
termios.local_flags &= !LocalFlags::ECHO;
297
+
// Key agent does not work well without canonical mode
298
+
termios.local_flags &= !LocalFlags::ICANON;
299
+
// Actually quit
300
+
termios.local_flags &= !LocalFlags::ISIG;
301
+
302
+
tcsetattr(fd, SetArg::TCSANOW, &termios)
303
+
.map_err(|x| HiveLibError::CommandError(CommandError::TermAttrs(x)))?;
304
+
}
305
+
306
+
Ok(())
307
+
}
308
+
309
+
fn build_command<S: AsRef<str>>(
310
+
arguments: &CommandArguments<'_, S>,
311
+
command_string: &String,
312
+
) -> Result<CommandBuilder, HiveLibError> {
313
+
let mut command = if let Some(target) = arguments.target {
314
+
let mut command = create_int_ssh_command(target, arguments.modifiers)?;
315
+
316
+
// force ssh to use our pseudo terminal
317
+
command.arg("-tt");
318
+
319
+
command
320
+
} else {
321
+
let mut command = portable_pty::CommandBuilder::new("sh");
322
+
323
+
command.arg("-c");
324
+
325
+
command
326
+
};
327
+
328
+
if arguments.is_elevated() {
329
+
command.arg(format!("sudo -u root -- sh -c '{command_string}'"));
330
+
} else {
331
+
command.arg(command_string);
332
+
}
333
+
334
+
Ok(command)
335
+
}
336
+
337
+
impl WireCommandChip for InteractiveChildChip {
338
+
type ExitStatus = (portable_pty::ExitStatus, String);
339
+
340
+
#[instrument(skip_all)]
341
+
async fn wait_till_success(mut self) -> Result<Self::ExitStatus, CommandError> {
342
+
drop(self.write_stdin_pipe_w);
343
+
344
+
let exit_status = tokio::task::spawn_blocking(move || self.child.wait())
345
+
.await
346
+
.map_err(CommandError::JoinError)?
347
+
.map_err(CommandError::WaitForStatus)?;
348
+
349
+
debug!("exit_status: {exit_status:?}");
350
+
351
+
self.stdout_handle
352
+
.await
353
+
.map_err(|_| CommandError::ThreadPanic)??;
354
+
355
+
let status = self
356
+
.status_receiver
357
+
.wait_for(|value| matches!(value, Status::Done { .. }))
358
+
.await
359
+
.unwrap();
360
+
361
+
let _ = posix_write(&self.cancel_stdin_pipe_w, THREAD_QUIT_SIGNAL);
362
+
363
+
if let Status::Done { success: true } = *status {
364
+
let logs = self
365
+
.stdout_collection
366
+
.lock()
367
+
.unwrap()
368
+
.iter()
369
+
.rev()
370
+
.map(|x| x.trim())
371
+
.join("\n");
372
+
373
+
return Ok((exit_status, logs));
374
+
}
375
+
376
+
debug!("child did not succeed");
377
+
378
+
let logs = self
379
+
.stderr_collection
380
+
.lock()
381
+
.unwrap()
382
+
.iter()
383
+
.rev()
384
+
.join("\n");
385
+
386
+
Err(CommandError::CommandFailed {
387
+
command_ran: self.original_command,
388
+
logs,
389
+
code: format!("code {}", exit_status.exit_code()),
390
+
reason: match *status {
391
+
Status::Done { .. } => "marked-unsuccessful",
392
+
Status::Running => "child-crashed-before-succeeding",
393
+
},
394
+
})
395
+
}
396
+
397
+
async fn write_stdin(&mut self, data: Vec<u8>) -> Result<(), HiveLibError> {
398
+
trace!("Writing {} bytes to stdin", data.len());
399
+
400
+
posix_write(&self.write_stdin_pipe_w, &data)
401
+
.map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
402
+
403
+
Ok(())
404
+
}
405
+
}
406
+
407
+
impl StdinTermiosAttrGuard {
408
+
fn new() -> Result<Self, CommandError> {
409
+
let stdin = std::io::stdin();
410
+
let stdin_fd = stdin.as_fd();
411
+
412
+
let mut termios = tcgetattr(stdin_fd).map_err(CommandError::TermAttrs)?;
413
+
let original_termios = termios.clone();
414
+
415
+
termios.local_flags &= !(LocalFlags::ECHO | LocalFlags::ICANON);
416
+
tcsetattr(stdin_fd, SetArg::TCSANOW, &termios).map_err(CommandError::TermAttrs)?;
417
+
418
+
Ok(StdinTermiosAttrGuard(original_termios))
419
+
}
420
+
}
421
+
422
+
impl Drop for StdinTermiosAttrGuard {
423
+
fn drop(&mut self) {
424
+
let stdin = std::io::stdin();
425
+
let stdin_fd = stdin.as_fd();
426
+
427
+
let _ = tcsetattr(stdin_fd, SetArg::TCSANOW, &self.0);
428
+
}
429
+
}
430
+
431
+
fn create_int_ssh_command(
432
+
target: &Target,
433
+
modifiers: SubCommandModifiers,
434
+
) -> Result<portable_pty::CommandBuilder, HiveLibError> {
435
+
let mut command = portable_pty::CommandBuilder::new("ssh");
436
+
command.args(target.create_ssh_args(modifiers, false, false)?);
437
+
command.arg(target.get_preferred_host()?.to_string());
438
+
Ok(command)
439
+
}
440
+
441
+
#[cfg(test)]
442
+
mod tests {
443
+
use aho_corasick::AhoCorasick;
444
+
use tokio::sync::oneshot::error::TryRecvError;
445
+
446
+
use crate::commands::pty::output::handle_rawmode_data;
447
+
448
+
use super::*;
449
+
use std::assert_matches::assert_matches;
450
+
451
+
#[test]
452
+
fn test_rawmode_data() {
453
+
let aho_corasick = AhoCorasick::builder()
454
+
.ascii_case_insensitive(false)
455
+
.match_kind(aho_corasick::MatchKind::LeftmostFirst)
456
+
.build(["START_NEEDLE", "SUCCEEDED_NEEDLE", "FAILED_NEEDLE"])
457
+
.unwrap();
458
+
let mut stderr = vec![];
459
+
let (began_tx, mut began_rx) = oneshot::channel::<()>();
460
+
let mut began_tx = Some(began_tx);
461
+
let (status_sender, _) = watch::channel(Status::Running);
462
+
463
+
// each "Bla" is 4 bytes.
464
+
let buffer = "bla bla bla START_NEEDLE bla bla bla".as_bytes();
465
+
let mut raw_mode_buffer = vec![];
466
+
467
+
// handle 1 "bla"
468
+
assert_matches!(
469
+
handle_rawmode_data(
470
+
&mut stderr,
471
+
buffer,
472
+
4,
473
+
&mut raw_mode_buffer,
474
+
&aho_corasick,
475
+
&status_sender,
476
+
&mut began_tx
477
+
),
478
+
Ok(SearchFindings::None)
479
+
);
480
+
assert_matches!(began_rx.try_recv(), Err(TryRecvError::Empty));
481
+
assert!(began_tx.is_some());
482
+
assert_eq!(raw_mode_buffer, b"bla ");
483
+
assert_matches!(*status_sender.borrow(), Status::Running);
484
+
485
+
let buffer = &buffer[4..];
486
+
487
+
// handle 2 "bla"'s and half a "START_NEEDLE"
488
+
let n = 4 + 4 + 6;
489
+
assert_matches!(
490
+
handle_rawmode_data(
491
+
&mut stderr,
492
+
buffer,
493
+
n,
494
+
&mut raw_mode_buffer,
495
+
&aho_corasick,
496
+
&status_sender,
497
+
&mut began_tx
498
+
),
499
+
Ok(SearchFindings::None)
500
+
);
501
+
assert_matches!(began_rx.try_recv(), Err(TryRecvError::Empty));
502
+
assert!(began_tx.is_some());
503
+
assert_matches!(*status_sender.borrow(), Status::Running);
504
+
assert_eq!(raw_mode_buffer, b"bla bla bla START_");
505
+
506
+
let buffer = &buffer[n..];
507
+
508
+
// handle rest of the data
509
+
let n = buffer.len();
510
+
assert_matches!(
511
+
handle_rawmode_data(
512
+
&mut stderr,
513
+
buffer,
514
+
n,
515
+
&mut raw_mode_buffer,
516
+
&aho_corasick,
517
+
&status_sender,
518
+
&mut began_tx
519
+
),
520
+
Ok(SearchFindings::Started)
521
+
);
522
+
assert_matches!(began_rx.try_recv(), Ok(()));
523
+
assert_matches!(began_tx, None);
524
+
assert_eq!(raw_mode_buffer, b"bla bla bla START_NEEDLE bla bla bla");
525
+
assert_matches!(*status_sender.borrow(), Status::Running);
526
+
527
+
// test failed needle
528
+
let buffer = "bla FAILED_NEEDLE bla".as_bytes();
529
+
let mut raw_mode_buffer = vec![];
530
+
531
+
let n = buffer.len();
532
+
assert_matches!(
533
+
handle_rawmode_data(
534
+
&mut stderr,
535
+
buffer,
536
+
n,
537
+
&mut raw_mode_buffer,
538
+
&aho_corasick,
539
+
&status_sender,
540
+
&mut began_tx
541
+
),
542
+
Ok(SearchFindings::Terminate)
543
+
);
544
+
assert_matches!(*status_sender.borrow(), Status::Done { success: false });
545
+
546
+
// test succeed needle
547
+
let buffer = "bla SUCCEEDED_NEEDLE bla".as_bytes();
548
+
let mut raw_mode_buffer = vec![];
549
+
let (status_sender, _) = watch::channel(Status::Running);
550
+
551
+
let n = buffer.len();
552
+
assert_matches!(
553
+
handle_rawmode_data(
554
+
&mut stderr,
555
+
buffer,
556
+
n,
557
+
&mut raw_mode_buffer,
558
+
&aho_corasick,
559
+
&status_sender,
560
+
&mut began_tx
561
+
),
562
+
Ok(SearchFindings::Terminate)
563
+
);
564
+
assert_matches!(*status_sender.borrow(), Status::Done { success: true });
565
+
}
566
+
}
+264
crates/core/src/commands/pty/output.rs
+264
crates/core/src/commands/pty/output.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use crate::{
5
+
commands::{
6
+
ChildOutputMode,
7
+
pty::{
8
+
FAILED_PATTERN, Needles, STARTED_PATTERN, SUCCEEDED_PATTERN, SearchFindings, Status,
9
+
logbuffer::LogBuffer,
10
+
},
11
+
},
12
+
errors::CommandError,
13
+
};
14
+
use aho_corasick::AhoCorasick;
15
+
use std::{
16
+
collections::VecDeque,
17
+
io::Write,
18
+
sync::{Arc, Mutex},
19
+
};
20
+
use tokio::sync::{oneshot, watch};
21
+
use tracing::{Span, debug, instrument};
22
+
23
+
pub(super) struct WatchStdoutArguments {
24
+
pub began_tx: oneshot::Sender<()>,
25
+
pub reader: super::MasterReader,
26
+
pub needles: Needles,
27
+
pub output_mode: ChildOutputMode,
28
+
pub stderr_collection: Arc<Mutex<VecDeque<String>>>,
29
+
pub stdout_collection: Arc<Mutex<VecDeque<String>>>,
30
+
pub status_sender: watch::Sender<Status>,
31
+
pub span: Span,
32
+
pub log_stdout: bool,
33
+
}
34
+
35
+
/// Handles data from the PTY, and logs or prompts the user depending on the state
36
+
/// of the command.
37
+
///
38
+
/// Emits a message on the `began_tx` when the command is considered started.
39
+
///
40
+
/// Records stderr and stdout when it is considered notable (all stdout, last few stderr messages)
41
+
#[instrument(skip_all, name = "log", parent = arguments.span)]
42
+
pub(super) fn handle_pty_stdout(arguments: WatchStdoutArguments) -> Result<(), CommandError> {
43
+
let WatchStdoutArguments {
44
+
began_tx,
45
+
mut reader,
46
+
needles,
47
+
output_mode,
48
+
stdout_collection,
49
+
stderr_collection,
50
+
status_sender,
51
+
log_stdout,
52
+
..
53
+
} = arguments;
54
+
55
+
let aho_corasick = AhoCorasick::builder()
56
+
.ascii_case_insensitive(false)
57
+
.match_kind(aho_corasick::MatchKind::LeftmostFirst)
58
+
.build([
59
+
needles.start.as_ref(),
60
+
needles.succeed.as_ref(),
61
+
needles.fail.as_ref(),
62
+
])
63
+
.unwrap();
64
+
65
+
let mut buffer = [0u8; 1024];
66
+
let mut stderr = std::io::stderr();
67
+
let mut began = false;
68
+
let mut log_buffer = LogBuffer::new();
69
+
let mut raw_mode_buffer = Vec::new();
70
+
let mut belled = false;
71
+
let mut began_tx = Some(began_tx);
72
+
73
+
'outer: loop {
74
+
match reader.read(&mut buffer) {
75
+
Ok(0) => break 'outer,
76
+
Ok(n) => {
77
+
// this block is responsible for outputting the "raw" data,
78
+
// mostly sudo prompts.
79
+
if !began {
80
+
let findings = handle_rawmode_data(
81
+
&mut stderr,
82
+
&buffer,
83
+
n,
84
+
&mut raw_mode_buffer,
85
+
&aho_corasick,
86
+
&status_sender,
87
+
&mut began_tx,
88
+
)?;
89
+
90
+
match findings {
91
+
SearchFindings::Terminate => break 'outer,
92
+
SearchFindings::Started => {
93
+
began = true;
94
+
continue;
95
+
}
96
+
SearchFindings::None => {}
97
+
}
98
+
99
+
if belled {
100
+
continue;
101
+
}
102
+
103
+
stderr
104
+
.write(b"\x07") // bell
105
+
.map_err(CommandError::WritingClientStderr)?;
106
+
stderr.flush().map_err(CommandError::WritingClientStderr)?;
107
+
108
+
belled = true;
109
+
110
+
continue;
111
+
}
112
+
113
+
log_buffer.process_slice(&buffer[..n]);
114
+
115
+
while let Some(mut line) = log_buffer.next_line() {
116
+
let findings =
117
+
search_string(&aho_corasick, &line, &status_sender, &mut began_tx);
118
+
119
+
match findings {
120
+
SearchFindings::Terminate => break 'outer,
121
+
SearchFindings::Started => {
122
+
began = true;
123
+
continue;
124
+
}
125
+
SearchFindings::None => {}
126
+
}
127
+
128
+
handle_normal_data(
129
+
&stderr_collection,
130
+
&stdout_collection,
131
+
&mut line,
132
+
log_stdout,
133
+
output_mode,
134
+
);
135
+
}
136
+
}
137
+
Err(e) => {
138
+
eprintln!("Error reading from PTY: {e}");
139
+
break;
140
+
}
141
+
}
142
+
}
143
+
144
+
began_tx.map(|began_tx| began_tx.send(()));
145
+
146
+
// failsafe if there were errors or the reader stopped
147
+
if matches!(*status_sender.borrow(), Status::Running) {
148
+
status_sender.send_replace(Status::Done { success: false });
149
+
}
150
+
151
+
debug!("stdout: goodbye");
152
+
153
+
Ok(())
154
+
}
155
+
156
+
/// handles raw data, prints to stderr when a prompt is detected
157
+
pub(super) fn handle_rawmode_data<W: std::io::Write>(
158
+
stderr: &mut W,
159
+
buffer: &[u8],
160
+
n: usize,
161
+
raw_mode_buffer: &mut Vec<u8>,
162
+
aho_corasick: &AhoCorasick,
163
+
status_sender: &watch::Sender<Status>,
164
+
began_tx: &mut Option<oneshot::Sender<()>>,
165
+
) -> Result<SearchFindings, CommandError> {
166
+
raw_mode_buffer.extend_from_slice(&buffer[..n]);
167
+
168
+
let findings = search_string(aho_corasick, raw_mode_buffer, status_sender, began_tx);
169
+
170
+
if matches!(
171
+
findings,
172
+
SearchFindings::Started | SearchFindings::Terminate
173
+
) {
174
+
return Ok(findings);
175
+
}
176
+
177
+
stderr
178
+
.write_all(&buffer[..n])
179
+
.map_err(CommandError::WritingClientStderr)?;
180
+
181
+
stderr.flush().map_err(CommandError::WritingClientStderr)?;
182
+
183
+
Ok(findings)
184
+
}
185
+
186
+
/// handles data when the command is considered "started", logs and records errors as appropriate
187
+
fn handle_normal_data(
188
+
stderr_collection: &Arc<Mutex<VecDeque<String>>>,
189
+
stdout_collection: &Arc<Mutex<VecDeque<String>>>,
190
+
line: &mut [u8],
191
+
log_stdout: bool,
192
+
output_mode: ChildOutputMode,
193
+
) {
194
+
if line.starts_with(b"#") {
195
+
let stripped = &mut line[1..];
196
+
197
+
if log_stdout {
198
+
output_mode.trace_slice(stripped);
199
+
}
200
+
201
+
let mut queue = stdout_collection.lock().unwrap();
202
+
queue.push_front(String::from_utf8_lossy(stripped).to_string());
203
+
return;
204
+
}
205
+
206
+
let log = output_mode.trace_slice(line);
207
+
208
+
if let Some(error_msg) = log {
209
+
let mut queue = stderr_collection.lock().unwrap();
210
+
211
+
// add at most 20 message to the front, drop the rest.
212
+
queue.push_front(error_msg);
213
+
queue.truncate(20);
214
+
}
215
+
}
216
+
217
+
/// returns true if the command is considered stopped
218
+
fn search_string(
219
+
aho_corasick: &AhoCorasick,
220
+
haystack: &[u8],
221
+
status_sender: &watch::Sender<Status>,
222
+
began_tx: &mut Option<oneshot::Sender<()>>,
223
+
) -> SearchFindings {
224
+
let searched = aho_corasick
225
+
.find_iter(haystack)
226
+
.map(|x| x.pattern())
227
+
.collect::<Vec<_>>();
228
+
229
+
let started = if searched.contains(&STARTED_PATTERN) {
230
+
debug!("start needle was found, switching mode...");
231
+
if let Some(began_tx) = began_tx.take() {
232
+
let _ = began_tx.send(());
233
+
}
234
+
true
235
+
} else {
236
+
false
237
+
};
238
+
239
+
let succeeded = if searched.contains(&SUCCEEDED_PATTERN) {
240
+
debug!("succeed needle was found, marking child as succeeding.");
241
+
status_sender.send_replace(Status::Done { success: true });
242
+
true
243
+
} else {
244
+
false
245
+
};
246
+
247
+
let failed = if searched.contains(&FAILED_PATTERN) {
248
+
debug!("failed needle was found, elevated child did not succeed.");
249
+
status_sender.send_replace(Status::Done { success: false });
250
+
true
251
+
} else {
252
+
false
253
+
};
254
+
255
+
if succeeded || failed {
256
+
return SearchFindings::Terminate;
257
+
}
258
+
259
+
if started {
260
+
return SearchFindings::Started;
261
+
}
262
+
263
+
SearchFindings::None
264
+
}
+376
crates/core/src/errors.rs
+376
crates/core/src/errors.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
#![allow(unused_assignments)]
5
+
6
+
use std::{num::ParseIntError, path::PathBuf, process::ExitStatus, sync::mpsc::RecvError};
7
+
8
+
use miette::{Diagnostic, SourceSpan};
9
+
use nix_compat::flakeref::{FlakeRef, FlakeRefError};
10
+
use thiserror::Error;
11
+
use tokio::task::JoinError;
12
+
13
+
use crate::hive::node::{Name, SwitchToConfigurationGoal};
14
+
15
+
#[cfg(debug_assertions)]
16
+
const DOCS_URL: &str = "http://localhost:5173/reference/errors.html";
17
+
#[cfg(not(debug_assertions))]
18
+
const DOCS_URL: &str = "https://wire.althaea.zone/reference/errors.html";
19
+
20
+
#[derive(Debug, Diagnostic, Error)]
21
+
pub enum KeyError {
22
+
#[diagnostic(
23
+
code(wire::key::File),
24
+
url("{DOCS_URL}#{}", self.code().unwrap())
25
+
)]
26
+
#[error("error reading file")]
27
+
File(#[source] std::io::Error),
28
+
29
+
#[diagnostic(
30
+
code(wire::key::SpawningCommand),
31
+
help("Ensure wire has the correct $PATH for this command"),
32
+
url("{DOCS_URL}#{}", self.code().unwrap())
33
+
)]
34
+
#[error("error spawning key command")]
35
+
CommandSpawnError {
36
+
#[source]
37
+
error: std::io::Error,
38
+
39
+
#[source_code]
40
+
command: String,
41
+
42
+
#[label(primary, "Program ran")]
43
+
command_span: Option<SourceSpan>,
44
+
},
45
+
46
+
#[diagnostic(
47
+
code(wire::key::Resolving),
48
+
url("{DOCS_URL}#{}", self.code().unwrap())
49
+
)]
50
+
#[error("Error resolving key command child process")]
51
+
CommandResolveError {
52
+
#[source]
53
+
error: std::io::Error,
54
+
55
+
#[source_code]
56
+
command: String,
57
+
},
58
+
59
+
#[diagnostic(
60
+
code(wire::key::CommandExit),
61
+
url("{DOCS_URL}#{}", self.code().unwrap())
62
+
)]
63
+
#[error("key command failed with status {}: {}", .0,.1)]
64
+
CommandError(ExitStatus, String),
65
+
66
+
#[diagnostic(
67
+
code(wire::key::Empty),
68
+
url("{DOCS_URL}#{}", self.code().unwrap())
69
+
)]
70
+
#[error("Command list empty")]
71
+
Empty,
72
+
73
+
#[diagnostic(
74
+
code(wire::key::ParseKeyPermissions),
75
+
help("Refer to the documentation for the format of key file permissions."),
76
+
url("{DOCS_URL}#{}", self.code().unwrap())
77
+
)]
78
+
#[error("Failed to parse key permissions")]
79
+
ParseKeyPermissions(#[source] ParseIntError),
80
+
}
81
+
82
+
#[derive(Debug, Diagnostic, Error)]
83
+
pub enum ActivationError {
84
+
#[diagnostic(
85
+
code(wire::activation::SwitchToConfiguration),
86
+
url("{DOCS_URL}#{}", self.code().unwrap())
87
+
)]
88
+
#[error("failed to run switch-to-configuration {0} on node {1}")]
89
+
SwitchToConfigurationError(SwitchToConfigurationGoal, Name, #[source] CommandError),
90
+
}
91
+
92
+
#[derive(Debug, Diagnostic, Error)]
93
+
pub enum NetworkError {
94
+
#[diagnostic(
95
+
code(wire::network::HostUnreachable),
96
+
help(
97
+
"If you failed due to a fault in DNS, note that a node can have multiple targets defined."
98
+
),
99
+
url("{DOCS_URL}#{}", self.code().unwrap())
100
+
)]
101
+
#[error("Cannot reach host {host}")]
102
+
HostUnreachable {
103
+
host: String,
104
+
#[source]
105
+
source: CommandError,
106
+
},
107
+
108
+
#[diagnostic(
109
+
code(wire::network::HostUnreachableAfterReboot),
110
+
url("{DOCS_URL}#{}", self.code().unwrap())
111
+
)]
112
+
#[error("Failed to get regain connection to {0} after activation.")]
113
+
HostUnreachableAfterReboot(String),
114
+
115
+
#[diagnostic(
116
+
code(wire::network::HostsExhausted),
117
+
url("{DOCS_URL}#{}", self.code().unwrap())
118
+
)]
119
+
#[error("Ran out of contactable hosts")]
120
+
HostsExhausted,
121
+
}
122
+
123
+
#[derive(Debug, Diagnostic, Error)]
124
+
pub enum HiveInitialisationError {
125
+
#[diagnostic(
126
+
code(wire::hive_init::NoHiveFound),
127
+
help(
128
+
"Double check the path is correct. You can adjust the hive path with `--path` when the hive lies outside of the CWD."
129
+
),
130
+
url("{DOCS_URL}#{}", self.code().unwrap())
131
+
)]
132
+
#[error("No hive could be found in {}", .0.display())]
133
+
NoHiveFound(PathBuf),
134
+
135
+
#[diagnostic(
136
+
code(wire::hive_init::Parse),
137
+
help("If you cannot resolve this problem, please create an issue."),
138
+
url("{DOCS_URL}#{}", self.code().unwrap())
139
+
)]
140
+
#[error("Failed to parse internal wire json.")]
141
+
ParseEvaluateError(#[source] serde_json::Error),
142
+
143
+
#[diagnostic(
144
+
code(wire::hive_init::ParsePrefetch),
145
+
help("please create an issue."),
146
+
url("{DOCS_URL}#{}", self.code().unwrap())
147
+
)]
148
+
#[error("Failed to parse `nix flake prefetch --json`.")]
149
+
ParsePrefetchError(#[source] serde_json::Error),
150
+
151
+
#[diagnostic(
152
+
code(wire::hive_init::NodeDoesNotExist),
153
+
help("Please create an issue!"),
154
+
url("{DOCS_URL}#{}", self.code().unwrap())
155
+
)]
156
+
#[error("node {0} not exist in hive")]
157
+
NodeDoesNotExist(String),
158
+
}
159
+
160
+
#[derive(Debug, Diagnostic, Error)]
161
+
pub enum HiveLocationError {
162
+
#[diagnostic(
163
+
code(wire::hive_location::MalformedPath),
164
+
url("{DOCS_URL}#{}", self.code().unwrap())
165
+
)]
166
+
#[error("Path was malformed: {}", .0.display())]
167
+
MalformedPath(PathBuf),
168
+
169
+
#[diagnostic(
170
+
code(wire::hive_location::Malformed),
171
+
url("{DOCS_URL}#{}", self.code().unwrap())
172
+
)]
173
+
#[error("--path was malformed")]
174
+
Malformed(#[source] FlakeRefError),
175
+
176
+
#[diagnostic(
177
+
code(wire::hive_location::TypeUnsupported),
178
+
url("{DOCS_URL}#{}", self.code().unwrap())
179
+
)]
180
+
#[error("The flakref had an unsupported type: {:#?}", .0)]
181
+
TypeUnsupported(Box<FlakeRef>),
182
+
}
183
+
184
+
#[derive(Debug, Diagnostic, Error)]
185
+
pub enum CommandError {
186
+
#[diagnostic(
187
+
code(wire::command::TermAttrs),
188
+
url("{DOCS_URL}#{}", self.code().unwrap())
189
+
)]
190
+
#[error("Failed to set PTY attrs")]
191
+
TermAttrs(#[source] nix::errno::Errno),
192
+
193
+
#[diagnostic(
194
+
code(wire::command::PosixPipe),
195
+
url("{DOCS_URL}#{}", self.code().unwrap())
196
+
)]
197
+
#[error("There was an error in regards to a pipe")]
198
+
PosixPipe(#[source] nix::errno::Errno),
199
+
200
+
/// Error wrapped around `portable_pty`'s anyhow
201
+
/// errors
202
+
#[diagnostic(
203
+
code(wire::command::PortablePty),
204
+
url("{DOCS_URL}#{}", self.code().unwrap())
205
+
)]
206
+
#[error("There was an error from the portable_pty crate")]
207
+
PortablePty(#[source] anyhow::Error),
208
+
209
+
#[diagnostic(
210
+
code(wire::command::Joining),
211
+
url("{DOCS_URL}#{}", self.code().unwrap())
212
+
)]
213
+
#[error("Failed to join on some tokio task")]
214
+
JoinError(#[source] JoinError),
215
+
216
+
#[diagnostic(
217
+
code(wire::command::WaitForStatus),
218
+
url("{DOCS_URL}#{}", self.code().unwrap())
219
+
)]
220
+
#[error("Failed to wait for the child's status")]
221
+
WaitForStatus(#[source] std::io::Error),
222
+
223
+
#[diagnostic(
224
+
code(wire::detached::NoHandle),
225
+
help("This should never happen, please create an issue!"),
226
+
url("{DOCS_URL}#{}", self.code().unwrap())
227
+
)]
228
+
#[error("There was no handle to child io")]
229
+
NoHandle,
230
+
231
+
#[diagnostic(
232
+
code(wire::command::WritingClientStdout),
233
+
url("{DOCS_URL}#{}", self.code().unwrap())
234
+
)]
235
+
#[error("Failed to write to client stderr.")]
236
+
WritingClientStderr(#[source] std::io::Error),
237
+
238
+
#[diagnostic(
239
+
code(wire::command::WritingMasterStdin),
240
+
url("{DOCS_URL}#{}", self.code().unwrap())
241
+
)]
242
+
#[error("Failed to write to PTY master stdout.")]
243
+
WritingMasterStdout(#[source] std::io::Error),
244
+
245
+
#[diagnostic(
246
+
code(wire::command::Recv),
247
+
url("{DOCS_URL}#{}", self.code().unwrap()),
248
+
help("please create an issue!"),
249
+
)]
250
+
#[error("Failed to receive a message from the begin channel")]
251
+
RecvError(#[source] RecvError),
252
+
253
+
#[diagnostic(
254
+
code(wire::command::ThreadPanic),
255
+
url("{DOCS_URL}#{}", self.code().unwrap()),
256
+
help("please create an issue!"),
257
+
)]
258
+
#[error("Thread panicked")]
259
+
ThreadPanic,
260
+
261
+
#[diagnostic(
262
+
code(wire::command::CommandFailed),
263
+
url("{DOCS_URL}#{}", self.code().unwrap()),
264
+
help("`nix` commands are filtered, run with -vvv to view all"),
265
+
)]
266
+
#[error("{command_ran} failed ({reason}) with {code} (last 20 lines):\n{logs}")]
267
+
CommandFailed {
268
+
command_ran: String,
269
+
logs: String,
270
+
code: String,
271
+
reason: &'static str,
272
+
},
273
+
274
+
#[diagnostic(
275
+
code(wire::command::RuntimeDirectory),
276
+
url("{DOCS_URL}#{}", self.code().unwrap())
277
+
)]
278
+
#[error("error creating $XDG_RUNTIME_DIR/wire")]
279
+
RuntimeDirectory(#[source] std::io::Error),
280
+
281
+
#[diagnostic(
282
+
code(wire::command::RuntimeDirectoryMissing),
283
+
url("{DOCS_URL}#{}", self.code().unwrap())
284
+
)]
285
+
#[error("$XDG_RUNTIME_DIR could not be used.")]
286
+
RuntimeDirectoryMissing(#[source] std::env::VarError),
287
+
288
+
#[diagnostic(
289
+
code(wire::command::OneshotRecvError),
290
+
url("{DOCS_URL}#{}", self.code().unwrap())
291
+
)]
292
+
#[error("Error waiting for begin message")]
293
+
OneshotRecvError(#[source] tokio::sync::oneshot::error::RecvError),
294
+
}
295
+
296
+
#[derive(Debug, Diagnostic, Error)]
297
+
pub enum HiveLibError {
298
+
#[error(transparent)]
299
+
#[diagnostic(transparent)]
300
+
HiveInitialisationError(HiveInitialisationError),
301
+
302
+
#[error(transparent)]
303
+
#[diagnostic(transparent)]
304
+
NetworkError(NetworkError),
305
+
306
+
#[error(transparent)]
307
+
#[diagnostic(transparent)]
308
+
ActivationError(ActivationError),
309
+
310
+
#[error(transparent)]
311
+
#[diagnostic(transparent)]
312
+
CommandError(CommandError),
313
+
314
+
#[error(transparent)]
315
+
#[diagnostic(transparent)]
316
+
HiveLocationError(HiveLocationError),
317
+
318
+
#[error("Failed to apply key {}", .0)]
319
+
KeyError(
320
+
String,
321
+
#[source]
322
+
#[diagnostic_source]
323
+
KeyError,
324
+
),
325
+
326
+
#[diagnostic(
327
+
code(wire::BuildNode),
328
+
url("{DOCS_URL}#{}", self.code().unwrap())
329
+
)]
330
+
#[error("failed to build node {name}")]
331
+
NixBuildError {
332
+
name: Name,
333
+
#[source]
334
+
source: CommandError,
335
+
},
336
+
337
+
#[diagnostic(
338
+
code(wire::CopyPath),
339
+
url("{DOCS_URL}#{}", self.code().unwrap())
340
+
)]
341
+
#[error("failed to copy path {path} to node {name}")]
342
+
NixCopyError {
343
+
name: Name,
344
+
path: String,
345
+
#[source]
346
+
error: Box<CommandError>,
347
+
#[help]
348
+
help: Option<Box<String>>,
349
+
},
350
+
351
+
#[diagnostic(code(wire::Evaluate))]
352
+
#[error("failed to evaluate `{attribute}` from the context of a hive.")]
353
+
NixEvalError {
354
+
attribute: String,
355
+
356
+
#[source]
357
+
source: CommandError,
358
+
359
+
#[help]
360
+
help: Option<Box<String>>,
361
+
},
362
+
363
+
#[diagnostic(
364
+
code(wire::Encoding),
365
+
url("{DOCS_URL}#{}", self.code().unwrap())
366
+
)]
367
+
#[error("error encoding length delimited data")]
368
+
Encoding(#[source] std::io::Error),
369
+
370
+
#[diagnostic(
371
+
code(wire::SIGINT),
372
+
url("{DOCS_URL}#{}", self.code().unwrap())
373
+
)]
374
+
#[error("SIGINT received, shut down")]
375
+
Sigint,
376
+
}
+480
crates/core/src/hive/mod.rs
+480
crates/core/src/hive/mod.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use itertools::Itertools;
5
+
use nix_compat::flakeref::FlakeRef;
6
+
use node::{Name, Node};
7
+
use owo_colors::{OwoColorize, Stream};
8
+
use serde::de::Error;
9
+
use serde::{Deserialize, Deserializer, Serialize};
10
+
use std::collections::HashMap;
11
+
use std::collections::hash_map::OccupiedEntry;
12
+
use std::ffi::OsStr;
13
+
use std::fmt::Display;
14
+
use std::fs;
15
+
use std::path::PathBuf;
16
+
use std::str::FromStr;
17
+
use std::sync::Arc;
18
+
use tracing::{debug, info, instrument};
19
+
20
+
use crate::cache::InspectionCache;
21
+
use crate::commands::builder::CommandStringBuilder;
22
+
use crate::commands::common::evaluate_hive_attribute;
23
+
use crate::commands::{CommandArguments, Either, WireCommandChip, run_command};
24
+
use crate::errors::{HiveInitialisationError, HiveLocationError};
25
+
use crate::{EvalGoal, HiveLibError, SubCommandModifiers};
26
+
pub mod node;
27
+
pub mod steps;
28
+
29
+
#[derive(Serialize, Deserialize, Debug, PartialEq)]
30
+
#[serde(deny_unknown_fields)]
31
+
pub struct Hive {
32
+
pub nodes: HashMap<Name, Node>,
33
+
34
+
#[serde(deserialize_with = "check_schema_version", rename = "_schema")]
35
+
pub schema: u32,
36
+
}
37
+
38
+
pub enum Action<'a> {
39
+
Inspect,
40
+
EvaluateNode(OccupiedEntry<'a, String, Node>),
41
+
}
42
+
43
+
fn check_schema_version<'de, D: Deserializer<'de>>(d: D) -> Result<u32, D::Error> {
44
+
let version = u32::deserialize(d)?;
45
+
if version != Hive::SCHEMA_VERSION {
46
+
return Err(D::Error::custom(
47
+
"Version mismatch for Hive. Please ensure the binary and your wire input match!",
48
+
));
49
+
}
50
+
Ok(version)
51
+
}
52
+
53
+
impl Hive {
54
+
pub const SCHEMA_VERSION: u32 = 1;
55
+
56
+
#[instrument(skip_all, name = "eval_hive")]
57
+
pub async fn new_from_path(
58
+
location: &HiveLocation,
59
+
cache: Option<InspectionCache>,
60
+
modifiers: SubCommandModifiers,
61
+
) -> Result<Hive, HiveLibError> {
62
+
info!("evaluating hive {location:?}");
63
+
64
+
if let Some(ref cache) = cache
65
+
&& let HiveLocation::Flake { prefetch, .. } = location
66
+
&& let Some(hive) = cache.get_hive(prefetch).await
67
+
{
68
+
return Ok(hive);
69
+
}
70
+
71
+
let output = evaluate_hive_attribute(location, &EvalGoal::Inspect, modifiers).await?;
72
+
73
+
let hive: Hive = serde_json::from_str(&output).map_err(|err| {
74
+
HiveLibError::HiveInitialisationError(HiveInitialisationError::ParseEvaluateError(err))
75
+
})?;
76
+
77
+
if let Some(cache) = cache
78
+
&& let HiveLocation::Flake { prefetch, .. } = location
79
+
{
80
+
cache.store_hive(prefetch, &output).await;
81
+
}
82
+
83
+
Ok(hive)
84
+
}
85
+
86
+
/// # Errors
87
+
///
88
+
/// Returns an error if a node in nodes does not exist in the hive.
89
+
pub fn force_always_local(&mut self, nodes: Vec<String>) -> Result<(), HiveLibError> {
90
+
for node in nodes {
91
+
info!("Forcing a local build for {node}");
92
+
93
+
self.nodes
94
+
.get_mut(&Name(Arc::from(node.clone())))
95
+
.ok_or(HiveLibError::HiveInitialisationError(
96
+
HiveInitialisationError::NodeDoesNotExist(node.clone()),
97
+
))?
98
+
.build_remotely = false;
99
+
}
100
+
101
+
Ok(())
102
+
}
103
+
}
104
+
105
+
impl Display for Hive {
106
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
107
+
for (name, node) in &self.nodes {
108
+
writeln!(
109
+
f,
110
+
"Node {} {}:\n",
111
+
name.bold(),
112
+
format!("({})", node.host_platform)
113
+
.italic()
114
+
.if_supports_color(Stream::Stdout, |x| x.dimmed()),
115
+
)?;
116
+
117
+
if !node.tags.is_empty() {
118
+
write!(f, " > {}", "Tags:".bold())?;
119
+
writeln!(f, " {:?}", node.tags)?;
120
+
}
121
+
122
+
write!(f, " > {}", "Connection:".bold())?;
123
+
writeln!(f, " {{{}}}", node.target)?;
124
+
125
+
write!(
126
+
f,
127
+
" > {} {}{}",
128
+
"Build remotely".bold(),
129
+
"`deployment.buildOnTarget`"
130
+
.if_supports_color(Stream::Stdout, |x| x.dimmed())
131
+
.italic(),
132
+
":".bold()
133
+
)?;
134
+
writeln!(f, " {}", node.build_remotely)?;
135
+
136
+
write!(
137
+
f,
138
+
" > {} {}{}",
139
+
"Local apply allowed".bold(),
140
+
"`deployment.allowLocalDeployment`"
141
+
.if_supports_color(Stream::Stdout, |x| x.dimmed())
142
+
.italic(),
143
+
":".bold()
144
+
)?;
145
+
writeln!(f, " {}", node.allow_local_deployment)?;
146
+
147
+
if !node.keys.is_empty() {
148
+
write!(f, " > {}", "Keys:".bold())?;
149
+
writeln!(f, " {} key(s)", node.keys.len())?;
150
+
151
+
for key in &node.keys {
152
+
writeln!(f, " > {key}")?;
153
+
}
154
+
}
155
+
156
+
writeln!(f)?;
157
+
}
158
+
159
+
let total_keys = self
160
+
.nodes
161
+
.values()
162
+
.flat_map(|node| node.keys.iter())
163
+
.collect::<Vec<_>>();
164
+
let distinct_keys = self
165
+
.nodes
166
+
.values()
167
+
.flat_map(|node| node.keys.iter())
168
+
.unique()
169
+
.collect::<Vec<_>>()
170
+
.len();
171
+
172
+
write!(f, "{}", "Summary:".bold())?;
173
+
writeln!(
174
+
f,
175
+
" {} total node(s), totalling {} keys ({distinct_keys} distinct).",
176
+
self.nodes.len(),
177
+
total_keys.len()
178
+
)?;
179
+
writeln!(
180
+
f,
181
+
"{}",
182
+
"Note: Listed connections are tried from Left to Right".italic(),
183
+
)?;
184
+
185
+
Ok(())
186
+
}
187
+
}
188
+
189
+
#[derive(Debug, PartialEq, Eq, Deserialize)]
190
+
pub struct FlakePrefetch {
191
+
pub(crate) hash: String,
192
+
#[serde(rename = "storePath")]
193
+
pub(crate) store_path: String,
194
+
}
195
+
196
+
#[derive(Debug, PartialEq, Eq)]
197
+
pub enum HiveLocation {
198
+
HiveNix(PathBuf),
199
+
Flake {
200
+
uri: String,
201
+
prefetch: FlakePrefetch,
202
+
},
203
+
}
204
+
205
+
impl HiveLocation {
206
+
async fn get_flake(
207
+
uri: String,
208
+
modifiers: SubCommandModifiers,
209
+
) -> Result<HiveLocation, HiveLibError> {
210
+
let mut command_string = CommandStringBuilder::nix();
211
+
command_string.args(&[
212
+
"flake",
213
+
"prefetch",
214
+
"--extra-experimental-features",
215
+
"nix-command",
216
+
"--extra-experimental-features",
217
+
"flakes",
218
+
"--json",
219
+
]);
220
+
command_string.arg(&uri);
221
+
222
+
let command = run_command(
223
+
&CommandArguments::new(command_string, modifiers)
224
+
.mode(crate::commands::ChildOutputMode::Generic),
225
+
)
226
+
.await?;
227
+
228
+
let result = command
229
+
.wait_till_success()
230
+
.await
231
+
.map_err(HiveLibError::CommandError)?;
232
+
233
+
debug!(hash_json = ?result);
234
+
235
+
let prefetch = serde_json::from_str(&match result {
236
+
Either::Left((.., output)) | Either::Right((.., output)) => output,
237
+
})
238
+
.map_err(|x| {
239
+
HiveLibError::HiveInitialisationError(HiveInitialisationError::ParsePrefetchError(x))
240
+
})?;
241
+
242
+
debug!(prefetch = ?prefetch);
243
+
244
+
Ok(HiveLocation::Flake { uri, prefetch })
245
+
}
246
+
}
247
+
248
+
pub async fn get_hive_location(
249
+
path: String,
250
+
modifiers: SubCommandModifiers,
251
+
) -> Result<HiveLocation, HiveLibError> {
252
+
let flakeref = FlakeRef::from_str(&path);
253
+
254
+
let path_to_location = async |path: PathBuf| {
255
+
Ok(match path.file_name().and_then(OsStr::to_str) {
256
+
Some("hive.nix") => HiveLocation::HiveNix(path.clone()),
257
+
Some(_) => {
258
+
if fs::metadata(path.join("flake.nix")).is_ok() {
259
+
HiveLocation::get_flake(path.display().to_string(), modifiers).await?
260
+
} else {
261
+
HiveLocation::HiveNix(path.join("hive.nix"))
262
+
}
263
+
}
264
+
None => {
265
+
return Err(HiveLibError::HiveLocationError(
266
+
HiveLocationError::MalformedPath(path.clone()),
267
+
));
268
+
}
269
+
})
270
+
};
271
+
272
+
match flakeref {
273
+
Err(nix_compat::flakeref::FlakeRefError::UrlParseError(_err)) => {
274
+
let path = PathBuf::from(path);
275
+
Ok(path_to_location(path).await?)
276
+
}
277
+
Ok(FlakeRef::Path { path, .. }) => Ok(path_to_location(path).await?),
278
+
Ok(
279
+
FlakeRef::Git { .. }
280
+
| FlakeRef::GitHub { .. }
281
+
| FlakeRef::GitLab { .. }
282
+
| FlakeRef::Tarball { .. }
283
+
| FlakeRef::Mercurial { .. }
284
+
| FlakeRef::SourceHut { .. },
285
+
) => Ok(HiveLocation::get_flake(path, modifiers).await?),
286
+
Err(err) => Err(HiveLibError::HiveLocationError(
287
+
HiveLocationError::Malformed(err),
288
+
)),
289
+
Ok(flakeref) => Err(HiveLibError::HiveLocationError(
290
+
HiveLocationError::TypeUnsupported(Box::new(flakeref)),
291
+
)),
292
+
}
293
+
}
294
+
295
+
#[cfg(test)]
296
+
mod tests {
297
+
use im::vector;
298
+
299
+
use crate::{
300
+
errors::CommandError,
301
+
get_test_path,
302
+
hive::steps::keys::{Key, Source, UploadKeyAt},
303
+
location,
304
+
test_support::make_flake_sandbox,
305
+
};
306
+
307
+
use super::*;
308
+
use std::{assert_matches::assert_matches, env};
309
+
310
+
// flake should always come before hive.nix
311
+
#[tokio::test]
312
+
async fn test_hive_dot_nix_priority() {
313
+
let location = location!(get_test_path!());
314
+
315
+
assert_matches!(location, HiveLocation::Flake { .. });
316
+
}
317
+
318
+
#[tokio::test]
319
+
#[cfg_attr(feature = "no_web_tests", ignore)]
320
+
async fn test_hive_file() {
321
+
let location = location!(get_test_path!());
322
+
323
+
let hive = Hive::new_from_path(&location, None, SubCommandModifiers::default())
324
+
.await
325
+
.unwrap();
326
+
327
+
let node = Node {
328
+
target: node::Target::from_host("192.168.122.96"),
329
+
..Default::default()
330
+
};
331
+
332
+
let mut nodes = HashMap::new();
333
+
nodes.insert(Name("node-a".into()), node);
334
+
335
+
assert_eq!(
336
+
hive,
337
+
Hive {
338
+
nodes,
339
+
schema: Hive::SCHEMA_VERSION
340
+
}
341
+
);
342
+
}
343
+
344
+
#[tokio::test]
345
+
#[cfg_attr(feature = "no_web_tests", ignore)]
346
+
async fn non_trivial_hive() {
347
+
let location = location!(get_test_path!());
348
+
349
+
let hive = Hive::new_from_path(&location, None, SubCommandModifiers::default())
350
+
.await
351
+
.unwrap();
352
+
353
+
let node = Node {
354
+
target: node::Target::from_host("name"),
355
+
keys: vector![Key {
356
+
name: "different-than-a".into(),
357
+
dest_dir: "/run/keys/".into(),
358
+
path: "/run/keys/different-than-a".into(),
359
+
group: "root".into(),
360
+
user: "root".into(),
361
+
permissions: "0600".into(),
362
+
source: Source::String("hi".into()),
363
+
upload_at: UploadKeyAt::PreActivation,
364
+
environment: im::HashMap::new()
365
+
}],
366
+
build_remotely: true,
367
+
..Default::default()
368
+
};
369
+
370
+
let mut nodes = HashMap::new();
371
+
nodes.insert(Name("node-a".into()), node);
372
+
373
+
assert_eq!(
374
+
hive,
375
+
Hive {
376
+
nodes,
377
+
schema: Hive::SCHEMA_VERSION
378
+
}
379
+
);
380
+
}
381
+
382
+
#[tokio::test]
383
+
#[cfg_attr(feature = "no_web_tests", ignore)]
384
+
async fn flake_hive() {
385
+
let tmp_dir = make_flake_sandbox(&get_test_path!()).unwrap();
386
+
387
+
let location = get_hive_location(
388
+
tmp_dir.path().display().to_string(),
389
+
SubCommandModifiers::default(),
390
+
)
391
+
.await
392
+
.unwrap();
393
+
let hive = Hive::new_from_path(&location, None, SubCommandModifiers::default())
394
+
.await
395
+
.unwrap();
396
+
397
+
let mut nodes = HashMap::new();
398
+
399
+
// a merged node
400
+
nodes.insert(Name("node-a".into()), Node::from_host("node-a"));
401
+
// a non-merged node
402
+
nodes.insert(Name("node-b".into()), Node::from_host("node-b"));
403
+
404
+
assert_eq!(
405
+
hive,
406
+
Hive {
407
+
nodes,
408
+
schema: Hive::SCHEMA_VERSION
409
+
}
410
+
);
411
+
412
+
tmp_dir.close().unwrap();
413
+
}
414
+
415
+
#[tokio::test]
416
+
async fn no_nixpkgs() {
417
+
let location = location!(get_test_path!());
418
+
419
+
assert_matches!(
420
+
Hive::new_from_path(&location, None, SubCommandModifiers::default()).await,
421
+
Err(HiveLibError::NixEvalError {
422
+
source: CommandError::CommandFailed {
423
+
logs,
424
+
..
425
+
},
426
+
..
427
+
})
428
+
if logs.contains("makeHive called without meta.nixpkgs specified")
429
+
);
430
+
}
431
+
432
+
#[tokio::test]
433
+
async fn _keys_should_fail() {
434
+
let location = location!(get_test_path!());
435
+
436
+
assert_matches!(
437
+
Hive::new_from_path(&location, None, SubCommandModifiers::default()).await,
438
+
Err(HiveLibError::NixEvalError {
439
+
source: CommandError::CommandFailed {
440
+
logs,
441
+
..
442
+
},
443
+
..
444
+
})
445
+
if logs.contains("The option `deployment._keys' is read-only, but it's set multiple times.")
446
+
);
447
+
}
448
+
449
+
#[tokio::test]
450
+
async fn test_force_always_local() {
451
+
let mut location: PathBuf = env::var("WIRE_TEST_DIR").unwrap().into();
452
+
location.push("non_trivial_hive");
453
+
let location = location!(location);
454
+
455
+
let mut hive = Hive::new_from_path(&location, None, SubCommandModifiers::default())
456
+
.await
457
+
.unwrap();
458
+
459
+
assert_matches!(
460
+
hive.force_always_local(vec!["non-existent".to_string()]),
461
+
Err(HiveLibError::HiveInitialisationError(
462
+
HiveInitialisationError::NodeDoesNotExist(node)
463
+
)) if node == "non-existent"
464
+
);
465
+
466
+
for node in hive.nodes.values() {
467
+
assert!(node.build_remotely);
468
+
}
469
+
470
+
assert_matches!(hive.force_always_local(vec!["node-a".to_string()]), Ok(()));
471
+
472
+
assert!(
473
+
!hive
474
+
.nodes
475
+
.get(&Name("node-a".into()))
476
+
.unwrap()
477
+
.build_remotely
478
+
);
479
+
}
480
+
}
+939
crates/core/src/hive/node.rs
+939
crates/core/src/hive/node.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
#![allow(clippy::missing_errors_doc)]
5
+
use enum_dispatch::enum_dispatch;
6
+
use gethostname::gethostname;
7
+
use serde::{Deserialize, Serialize};
8
+
use std::assert_matches::debug_assert_matches;
9
+
use std::fmt::Display;
10
+
use std::sync::Arc;
11
+
use std::sync::atomic::AtomicBool;
12
+
use tokio::sync::oneshot;
13
+
use tracing::{Instrument, Level, Span, debug, error, event, instrument, trace};
14
+
15
+
use crate::commands::builder::CommandStringBuilder;
16
+
use crate::commands::common::evaluate_hive_attribute;
17
+
use crate::commands::{CommandArguments, WireCommandChip, run_command};
18
+
use crate::errors::NetworkError;
19
+
use crate::hive::HiveLocation;
20
+
use crate::hive::steps::build::Build;
21
+
use crate::hive::steps::cleanup::CleanUp;
22
+
use crate::hive::steps::evaluate::Evaluate;
23
+
use crate::hive::steps::keys::{Key, Keys, PushKeyAgent, UploadKeyAt};
24
+
use crate::hive::steps::ping::Ping;
25
+
use crate::hive::steps::push::{PushBuildOutput, PushEvaluatedOutput};
26
+
use crate::status::STATUS;
27
+
use crate::{EvalGoal, StrictHostKeyChecking, SubCommandModifiers};
28
+
29
+
use super::HiveLibError;
30
+
use super::steps::activate::SwitchToConfiguration;
31
+
32
+
#[derive(
33
+
Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq, PartialOrd, Ord, derive_more::Display,
34
+
)]
35
+
pub struct Name(pub Arc<str>);
36
+
37
+
#[derive(Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq)]
38
+
pub struct Target {
39
+
pub hosts: Vec<Arc<str>>,
40
+
pub user: Arc<str>,
41
+
pub port: u32,
42
+
43
+
#[serde(skip)]
44
+
current_host: usize,
45
+
}
46
+
47
+
impl Target {
48
+
#[instrument(ret(level = tracing::Level::DEBUG), skip_all)]
49
+
pub fn create_ssh_opts(
50
+
&self,
51
+
modifiers: SubCommandModifiers,
52
+
master: bool,
53
+
) -> Result<String, HiveLibError> {
54
+
self.create_ssh_args(modifiers, false, master)
55
+
.map(|x| x.join(" "))
56
+
}
57
+
58
+
#[instrument(ret(level = tracing::Level::DEBUG))]
59
+
pub fn create_ssh_args(
60
+
&self,
61
+
modifiers: SubCommandModifiers,
62
+
non_interactive_forced: bool,
63
+
master: bool,
64
+
) -> Result<Vec<String>, HiveLibError> {
65
+
let mut vector = vec![
66
+
"-l".to_string(),
67
+
self.user.to_string(),
68
+
"-p".to_string(),
69
+
self.port.to_string(),
70
+
];
71
+
let mut options = vec![
72
+
format!(
73
+
"StrictHostKeyChecking={}",
74
+
match modifiers.ssh_accept_host {
75
+
StrictHostKeyChecking::AcceptNew => "accept-new",
76
+
StrictHostKeyChecking::No => "no",
77
+
}
78
+
)
79
+
.to_string(),
80
+
];
81
+
82
+
options.extend(["PasswordAuthentication=no".to_string()]);
83
+
options.extend(["KbdInteractiveAuthentication=no".to_string()]);
84
+
85
+
vector.push("-o".to_string());
86
+
vector.extend(options.into_iter().intersperse("-o".to_string()));
87
+
88
+
Ok(vector)
89
+
}
90
+
}
91
+
92
+
#[cfg(test)]
93
+
impl Default for Target {
94
+
fn default() -> Self {
95
+
Target {
96
+
hosts: vec!["NAME".into()],
97
+
user: "root".into(),
98
+
port: 22,
99
+
current_host: 0,
100
+
}
101
+
}
102
+
}
103
+
104
+
#[cfg(test)]
105
+
impl<'a> Context<'a> {
106
+
fn create_test_context(
107
+
hive_location: HiveLocation,
108
+
name: &'a Name,
109
+
node: &'a mut Node,
110
+
) -> Self {
111
+
Context {
112
+
name,
113
+
node,
114
+
hive_location: Arc::new(hive_location),
115
+
modifiers: SubCommandModifiers::default(),
116
+
objective: Objective::Apply(ApplyObjective {
117
+
goal: Goal::SwitchToConfiguration(SwitchToConfigurationGoal::Switch),
118
+
no_keys: false,
119
+
reboot: false,
120
+
should_apply_locally: false,
121
+
substitute_on_destination: false,
122
+
handle_unreachable: HandleUnreachable::default(),
123
+
}),
124
+
state: StepState::default(),
125
+
should_quit: Arc::new(AtomicBool::new(false)),
126
+
}
127
+
}
128
+
}
129
+
130
+
impl Target {
131
+
pub fn get_preferred_host(&self) -> Result<&Arc<str>, HiveLibError> {
132
+
self.hosts
133
+
.get(self.current_host)
134
+
.ok_or(HiveLibError::NetworkError(NetworkError::HostsExhausted))
135
+
}
136
+
137
+
pub const fn host_failed(&mut self) {
138
+
self.current_host += 1;
139
+
}
140
+
141
+
#[cfg(test)]
142
+
#[must_use]
143
+
pub fn from_host(host: &str) -> Self {
144
+
Target {
145
+
hosts: vec![host.into()],
146
+
..Default::default()
147
+
}
148
+
}
149
+
}
150
+
151
+
impl Display for Target {
152
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
153
+
let hosts = itertools::Itertools::join(
154
+
&mut self
155
+
.hosts
156
+
.iter()
157
+
.map(|host| format!("{}@{host}:{}", self.user, self.port)),
158
+
", ",
159
+
);
160
+
161
+
write!(f, "{hosts}")
162
+
}
163
+
}
164
+
165
+
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
166
+
pub struct Node {
167
+
#[serde(rename = "target")]
168
+
pub target: Target,
169
+
170
+
#[serde(rename = "buildOnTarget")]
171
+
pub build_remotely: bool,
172
+
173
+
#[serde(rename = "allowLocalDeployment")]
174
+
pub allow_local_deployment: bool,
175
+
176
+
#[serde(default)]
177
+
pub tags: im::HashSet<String>,
178
+
179
+
#[serde(rename(deserialize = "_keys", serialize = "keys"))]
180
+
pub keys: im::Vector<Key>,
181
+
182
+
#[serde(rename(deserialize = "_hostPlatform", serialize = "host_platform"))]
183
+
pub host_platform: Arc<str>,
184
+
185
+
#[serde(rename(
186
+
deserialize = "privilegeEscalationCommand",
187
+
serialize = "privilege_escalation_command"
188
+
))]
189
+
pub privilege_escalation_command: im::Vector<Arc<str>>,
190
+
}
191
+
192
+
#[cfg(test)]
193
+
impl Default for Node {
194
+
fn default() -> Self {
195
+
Node {
196
+
target: Target::default(),
197
+
keys: im::Vector::new(),
198
+
tags: im::HashSet::new(),
199
+
privilege_escalation_command: vec!["sudo".into(), "--".into()].into(),
200
+
allow_local_deployment: true,
201
+
build_remotely: false,
202
+
host_platform: "x86_64-linux".into(),
203
+
}
204
+
}
205
+
}
206
+
207
+
impl Node {
208
+
#[cfg(test)]
209
+
#[must_use]
210
+
pub fn from_host(host: &str) -> Self {
211
+
Node {
212
+
target: Target::from_host(host),
213
+
..Default::default()
214
+
}
215
+
}
216
+
217
+
/// Tests the connection to a node
218
+
pub async fn ping(&self, modifiers: SubCommandModifiers) -> Result<(), HiveLibError> {
219
+
let host = self.target.get_preferred_host()?;
220
+
221
+
let mut command_string = CommandStringBuilder::new("ssh");
222
+
command_string.arg(format!("{}@{host}", self.target.user));
223
+
command_string.arg(self.target.create_ssh_opts(modifiers, true)?);
224
+
command_string.arg("exit");
225
+
226
+
let output = run_command(
227
+
&CommandArguments::new(command_string, modifiers)
228
+
.log_stdout()
229
+
.mode(crate::commands::ChildOutputMode::Interactive),
230
+
)
231
+
.await?;
232
+
233
+
output.wait_till_success().await.map_err(|source| {
234
+
HiveLibError::NetworkError(NetworkError::HostUnreachable {
235
+
host: host.to_string(),
236
+
source,
237
+
})
238
+
})?;
239
+
240
+
Ok(())
241
+
}
242
+
}
243
+
244
+
#[must_use]
245
+
pub fn should_apply_locally(allow_local_deployment: bool, name: &str) -> bool {
246
+
*name == *gethostname() && allow_local_deployment
247
+
}
248
+
249
+
#[derive(derive_more::Display)]
250
+
pub enum Push<'a> {
251
+
Derivation(&'a Derivation),
252
+
Path(&'a String),
253
+
}
254
+
255
+
#[derive(Deserialize, Clone, Debug)]
256
+
pub struct Derivation(String);
257
+
258
+
impl Display for Derivation {
259
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
260
+
self.0.fmt(f).and_then(|()| write!(f, "^*"))
261
+
}
262
+
}
263
+
264
+
#[derive(derive_more::Display, Debug, Clone, Copy)]
265
+
pub enum SwitchToConfigurationGoal {
266
+
Switch,
267
+
Boot,
268
+
Test,
269
+
DryActivate,
270
+
}
271
+
272
+
#[derive(derive_more::Display, Clone, Copy)]
273
+
pub enum Goal {
274
+
SwitchToConfiguration(SwitchToConfigurationGoal),
275
+
Build,
276
+
Push,
277
+
Keys,
278
+
}
279
+
280
+
// TODO: Get rid of this allow and resolve it
281
+
#[allow(clippy::struct_excessive_bools)]
282
+
#[derive(Clone, Copy)]
283
+
pub struct ApplyObjective {
284
+
pub goal: Goal,
285
+
pub no_keys: bool,
286
+
pub reboot: bool,
287
+
pub should_apply_locally: bool,
288
+
pub substitute_on_destination: bool,
289
+
pub handle_unreachable: HandleUnreachable,
290
+
}
291
+
292
+
#[derive(Clone, Copy)]
293
+
pub enum Objective {
294
+
Apply(ApplyObjective),
295
+
BuildLocally,
296
+
}
297
+
298
+
#[enum_dispatch]
299
+
pub(crate) trait ExecuteStep: Send + Sync + Display + std::fmt::Debug {
300
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError>;
301
+
302
+
fn should_execute(&self, context: &Context) -> bool;
303
+
}
304
+
305
+
// may include other options such as FailAll in the future
306
+
#[non_exhaustive]
307
+
#[derive(Clone, Copy, Default)]
308
+
pub enum HandleUnreachable {
309
+
Ignore,
310
+
#[default]
311
+
FailNode,
312
+
}
313
+
314
+
#[derive(Default)]
315
+
pub struct StepState {
316
+
pub evaluation: Option<Derivation>,
317
+
pub evaluation_rx: Option<oneshot::Receiver<Result<Derivation, HiveLibError>>>,
318
+
pub build: Option<String>,
319
+
pub key_agent_directory: Option<String>,
320
+
}
321
+
322
+
pub struct Context<'a> {
323
+
pub name: &'a Name,
324
+
pub node: &'a mut Node,
325
+
pub hive_location: Arc<HiveLocation>,
326
+
pub modifiers: SubCommandModifiers,
327
+
pub state: StepState,
328
+
pub should_quit: Arc<AtomicBool>,
329
+
pub objective: Objective,
330
+
}
331
+
332
+
#[enum_dispatch(ExecuteStep)]
333
+
#[derive(Debug, PartialEq)]
334
+
enum Step {
335
+
Ping,
336
+
PushKeyAgent,
337
+
Keys,
338
+
Evaluate,
339
+
PushEvaluatedOutput,
340
+
Build,
341
+
PushBuildOutput,
342
+
SwitchToConfiguration,
343
+
CleanUp,
344
+
}
345
+
346
+
impl Display for Step {
347
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
348
+
match self {
349
+
Self::Ping(step) => step.fmt(f),
350
+
Self::PushKeyAgent(step) => step.fmt(f),
351
+
Self::Keys(step) => step.fmt(f),
352
+
Self::Evaluate(step) => step.fmt(f),
353
+
Self::PushEvaluatedOutput(step) => step.fmt(f),
354
+
Self::Build(step) => step.fmt(f),
355
+
Self::PushBuildOutput(step) => step.fmt(f),
356
+
Self::SwitchToConfiguration(step) => step.fmt(f),
357
+
Self::CleanUp(step) => step.fmt(f),
358
+
}
359
+
}
360
+
}
361
+
362
+
pub struct GoalExecutor<'a> {
363
+
steps: Vec<Step>,
364
+
context: Context<'a>,
365
+
}
366
+
367
+
/// returns Err if the application should shut down.
368
+
fn app_shutdown_guard(context: &Context) -> Result<(), HiveLibError> {
369
+
if context
370
+
.should_quit
371
+
.load(std::sync::atomic::Ordering::Relaxed)
372
+
{
373
+
return Err(HiveLibError::Sigint);
374
+
}
375
+
376
+
Ok(())
377
+
}
378
+
379
+
impl<'a> GoalExecutor<'a> {
380
+
#[must_use]
381
+
pub fn new(context: Context<'a>) -> Self {
382
+
Self {
383
+
steps: vec![
384
+
Step::Ping(Ping),
385
+
Step::PushKeyAgent(PushKeyAgent),
386
+
Step::Keys(Keys {
387
+
filter: UploadKeyAt::NoFilter,
388
+
}),
389
+
Step::Keys(Keys {
390
+
filter: UploadKeyAt::PreActivation,
391
+
}),
392
+
Step::Evaluate(super::steps::evaluate::Evaluate),
393
+
Step::PushEvaluatedOutput(super::steps::push::PushEvaluatedOutput),
394
+
Step::Build(super::steps::build::Build),
395
+
Step::PushBuildOutput(super::steps::push::PushBuildOutput),
396
+
Step::SwitchToConfiguration(SwitchToConfiguration),
397
+
Step::Keys(Keys {
398
+
filter: UploadKeyAt::PostActivation,
399
+
}),
400
+
],
401
+
context,
402
+
}
403
+
}
404
+
405
+
#[instrument(skip_all, name = "eval")]
406
+
async fn evaluate_task(
407
+
tx: oneshot::Sender<Result<Derivation, HiveLibError>>,
408
+
hive_location: Arc<HiveLocation>,
409
+
name: Name,
410
+
modifiers: SubCommandModifiers,
411
+
) {
412
+
let output =
413
+
evaluate_hive_attribute(&hive_location, &EvalGoal::GetTopLevel(&name), modifiers)
414
+
.await
415
+
.map(|output| {
416
+
serde_json::from_str::<Derivation>(&output).expect("failed to parse derivation")
417
+
});
418
+
419
+
debug!(output = ?output, done = true);
420
+
421
+
let _ = tx.send(output);
422
+
}
423
+
424
+
#[instrument(skip_all, fields(node = %self.context.name))]
425
+
pub async fn execute(mut self) -> Result<(), HiveLibError> {
426
+
app_shutdown_guard(&self.context)?;
427
+
428
+
let (tx, rx) = oneshot::channel();
429
+
self.context.state.evaluation_rx = Some(rx);
430
+
431
+
// The name of this span should never be changed without updating
432
+
// `wire/cli/tracing_setup.rs`
433
+
debug_assert_matches!(Span::current().metadata().unwrap().name(), "execute");
434
+
// This span should always have a `node` field by the same file
435
+
debug_assert!(
436
+
Span::current()
437
+
.metadata()
438
+
.unwrap()
439
+
.fields()
440
+
.field("node")
441
+
.is_some()
442
+
);
443
+
444
+
let spawn_evaluator = match self.context.objective {
445
+
Objective::Apply(apply_objective) => !matches!(apply_objective.goal, Goal::Keys),
446
+
Objective::BuildLocally => true,
447
+
};
448
+
449
+
if spawn_evaluator {
450
+
tokio::spawn(
451
+
GoalExecutor::evaluate_task(
452
+
tx,
453
+
self.context.hive_location.clone(),
454
+
self.context.name.clone(),
455
+
self.context.modifiers,
456
+
)
457
+
.in_current_span(),
458
+
);
459
+
}
460
+
461
+
let steps = self
462
+
.steps
463
+
.iter()
464
+
.filter(|step| step.should_execute(&self.context))
465
+
.inspect(|step| {
466
+
trace!("Will execute step `{step}` for {}", self.context.name);
467
+
})
468
+
.collect::<Vec<_>>();
469
+
let length = steps.len();
470
+
471
+
for (position, step) in steps.iter().enumerate() {
472
+
app_shutdown_guard(&self.context)?;
473
+
474
+
event!(
475
+
Level::INFO,
476
+
step = step.to_string(),
477
+
progress = format!("{}/{length}", position + 1)
478
+
);
479
+
480
+
STATUS
481
+
.lock()
482
+
.set_node_step(self.context.name, step.to_string());
483
+
484
+
if let Err(err) = step.execute(&mut self.context).await.inspect_err(|_| {
485
+
error!("Failed to execute `{step}`");
486
+
}) {
487
+
// discard error from cleanup
488
+
let _ = CleanUp.execute(&mut self.context).await;
489
+
490
+
if let Objective::Apply(apply_objective) = self.context.objective
491
+
&& matches!(step, Step::Ping(..))
492
+
&& matches!(
493
+
apply_objective.handle_unreachable,
494
+
HandleUnreachable::Ignore,
495
+
)
496
+
{
497
+
return Ok(());
498
+
}
499
+
500
+
STATUS.lock().mark_node_failed(self.context.name);
501
+
502
+
return Err(err);
503
+
}
504
+
}
505
+
506
+
STATUS.lock().mark_node_succeeded(self.context.name);
507
+
508
+
Ok(())
509
+
}
510
+
}
511
+
512
+
#[cfg(test)]
513
+
mod tests {
514
+
use rand::distr::Alphabetic;
515
+
516
+
use super::*;
517
+
use crate::{
518
+
function_name, get_test_path,
519
+
hive::{Hive, get_hive_location},
520
+
location,
521
+
};
522
+
use std::{assert_matches::assert_matches, path::PathBuf};
523
+
use std::{collections::HashMap, env};
524
+
525
+
fn get_steps(goal_executor: GoalExecutor) -> std::vec::Vec<Step> {
526
+
goal_executor
527
+
.steps
528
+
.into_iter()
529
+
.filter(|step| step.should_execute(&goal_executor.context))
530
+
.collect::<Vec<_>>()
531
+
}
532
+
533
+
#[tokio::test]
534
+
#[cfg_attr(feature = "no_web_tests", ignore)]
535
+
async fn default_values_match() {
536
+
let mut path = get_test_path!();
537
+
538
+
let location =
539
+
get_hive_location(path.display().to_string(), SubCommandModifiers::default())
540
+
.await
541
+
.unwrap();
542
+
let hive = Hive::new_from_path(&location, None, SubCommandModifiers::default())
543
+
.await
544
+
.unwrap();
545
+
546
+
let node = Node::default();
547
+
548
+
let mut nodes = HashMap::new();
549
+
nodes.insert(Name("NAME".into()), node);
550
+
551
+
path.push("hive.nix");
552
+
553
+
assert_eq!(
554
+
hive,
555
+
Hive {
556
+
nodes,
557
+
schema: Hive::SCHEMA_VERSION
558
+
}
559
+
);
560
+
}
561
+
562
+
#[tokio::test]
563
+
async fn order_build_locally() {
564
+
let location = location!(get_test_path!());
565
+
let mut node = Node {
566
+
build_remotely: false,
567
+
..Default::default()
568
+
};
569
+
let name = &Name(function_name!().into());
570
+
let executor = GoalExecutor::new(Context::create_test_context(location, name, &mut node));
571
+
let steps = get_steps(executor);
572
+
573
+
assert_eq!(
574
+
steps,
575
+
vec![
576
+
Ping.into(),
577
+
PushKeyAgent.into(),
578
+
Keys {
579
+
filter: UploadKeyAt::PreActivation
580
+
}
581
+
.into(),
582
+
crate::hive::steps::evaluate::Evaluate.into(),
583
+
crate::hive::steps::build::Build.into(),
584
+
crate::hive::steps::push::PushBuildOutput.into(),
585
+
SwitchToConfiguration.into(),
586
+
Keys {
587
+
filter: UploadKeyAt::PostActivation
588
+
}
589
+
.into(),
590
+
]
591
+
);
592
+
}
593
+
594
+
#[tokio::test]
595
+
async fn order_keys_only() {
596
+
let location = location!(get_test_path!());
597
+
let mut node = Node::default();
598
+
let name = &Name(function_name!().into());
599
+
let mut context = Context::create_test_context(location, name, &mut node);
600
+
601
+
let Objective::Apply(ref mut apply_objective) = context.objective else {
602
+
unreachable!()
603
+
};
604
+
605
+
apply_objective.goal = Goal::Keys;
606
+
607
+
let executor = GoalExecutor::new(context);
608
+
let steps = get_steps(executor);
609
+
610
+
assert_eq!(
611
+
steps,
612
+
vec![
613
+
Ping.into(),
614
+
PushKeyAgent.into(),
615
+
Keys {
616
+
filter: UploadKeyAt::NoFilter
617
+
}
618
+
.into(),
619
+
]
620
+
);
621
+
}
622
+
623
+
#[tokio::test]
624
+
async fn order_build() {
625
+
let location = location!(get_test_path!());
626
+
let mut node = Node::default();
627
+
let name = &Name(function_name!().into());
628
+
let mut context = Context::create_test_context(location, name, &mut node);
629
+
630
+
let Objective::Apply(ref mut apply_objective) = context.objective else {
631
+
unreachable!()
632
+
};
633
+
apply_objective.goal = Goal::Build;
634
+
635
+
let executor = GoalExecutor::new(context);
636
+
let steps = get_steps(executor);
637
+
638
+
assert_eq!(
639
+
steps,
640
+
vec![
641
+
Ping.into(),
642
+
crate::hive::steps::evaluate::Evaluate.into(),
643
+
crate::hive::steps::build::Build.into(),
644
+
crate::hive::steps::push::PushBuildOutput.into(),
645
+
]
646
+
);
647
+
}
648
+
649
+
#[tokio::test]
650
+
async fn order_push_only() {
651
+
let location = location!(get_test_path!());
652
+
let mut node = Node::default();
653
+
let name = &Name(function_name!().into());
654
+
let mut context = Context::create_test_context(location, name, &mut node);
655
+
656
+
let Objective::Apply(ref mut apply_objective) = context.objective else {
657
+
unreachable!()
658
+
};
659
+
apply_objective.goal = Goal::Push;
660
+
661
+
let executor = GoalExecutor::new(context);
662
+
let steps = get_steps(executor);
663
+
664
+
assert_eq!(
665
+
steps,
666
+
vec![
667
+
Ping.into(),
668
+
crate::hive::steps::evaluate::Evaluate.into(),
669
+
crate::hive::steps::push::PushEvaluatedOutput.into(),
670
+
]
671
+
);
672
+
}
673
+
674
+
#[tokio::test]
675
+
async fn order_remote_build() {
676
+
let location = location!(get_test_path!());
677
+
let mut node = Node {
678
+
build_remotely: true,
679
+
..Default::default()
680
+
};
681
+
682
+
let name = &Name(function_name!().into());
683
+
let executor = GoalExecutor::new(Context::create_test_context(location, name, &mut node));
684
+
let steps = get_steps(executor);
685
+
686
+
assert_eq!(
687
+
steps,
688
+
vec![
689
+
Ping.into(),
690
+
PushKeyAgent.into(),
691
+
Keys {
692
+
filter: UploadKeyAt::PreActivation
693
+
}
694
+
.into(),
695
+
crate::hive::steps::evaluate::Evaluate.into(),
696
+
crate::hive::steps::push::PushEvaluatedOutput.into(),
697
+
crate::hive::steps::build::Build.into(),
698
+
SwitchToConfiguration.into(),
699
+
Keys {
700
+
filter: UploadKeyAt::PostActivation
701
+
}
702
+
.into(),
703
+
]
704
+
);
705
+
}
706
+
707
+
#[tokio::test]
708
+
async fn order_nokeys() {
709
+
let location = location!(get_test_path!());
710
+
let mut node = Node::default();
711
+
712
+
let name = &Name(function_name!().into());
713
+
let mut context = Context::create_test_context(location, name, &mut node);
714
+
715
+
let Objective::Apply(ref mut apply_objective) = context.objective else {
716
+
unreachable!()
717
+
};
718
+
apply_objective.no_keys = true;
719
+
720
+
let executor = GoalExecutor::new(context);
721
+
let steps = get_steps(executor);
722
+
723
+
assert_eq!(
724
+
steps,
725
+
vec![
726
+
Ping.into(),
727
+
crate::hive::steps::evaluate::Evaluate.into(),
728
+
crate::hive::steps::build::Build.into(),
729
+
crate::hive::steps::push::PushBuildOutput.into(),
730
+
SwitchToConfiguration.into(),
731
+
]
732
+
);
733
+
}
734
+
735
+
#[tokio::test]
736
+
async fn order_should_apply_locally() {
737
+
let location = location!(get_test_path!());
738
+
let mut node = Node::default();
739
+
740
+
let name = &Name(function_name!().into());
741
+
let mut context = Context::create_test_context(location, name, &mut node);
742
+
743
+
let Objective::Apply(ref mut apply_objective) = context.objective else {
744
+
unreachable!()
745
+
};
746
+
apply_objective.no_keys = true;
747
+
apply_objective.should_apply_locally = true;
748
+
749
+
let executor = GoalExecutor::new(context);
750
+
let steps = get_steps(executor);
751
+
752
+
assert_eq!(
753
+
steps,
754
+
vec![
755
+
crate::hive::steps::evaluate::Evaluate.into(),
756
+
crate::hive::steps::build::Build.into(),
757
+
SwitchToConfiguration.into(),
758
+
]
759
+
);
760
+
}
761
+
762
+
#[tokio::test]
763
+
async fn order_build_only() {
764
+
let location = location!(get_test_path!());
765
+
let mut node = Node::default();
766
+
767
+
let name = &Name(function_name!().into());
768
+
let mut context = Context::create_test_context(location, name, &mut node);
769
+
770
+
context.objective = Objective::BuildLocally;
771
+
772
+
let executor = GoalExecutor::new(context);
773
+
let steps = get_steps(executor);
774
+
775
+
assert_eq!(
776
+
steps,
777
+
vec![
778
+
crate::hive::steps::evaluate::Evaluate.into(),
779
+
crate::hive::steps::build::Build.into()
780
+
]
781
+
);
782
+
}
783
+
784
+
#[test]
785
+
fn target_fails_increments() {
786
+
let mut target = Target::from_host("localhost");
787
+
788
+
assert_eq!(target.current_host, 0);
789
+
790
+
for i in 0..100 {
791
+
target.host_failed();
792
+
assert_eq!(target.current_host, i + 1);
793
+
}
794
+
}
795
+
796
+
#[test]
797
+
fn get_preferred_host_fails() {
798
+
let mut target = Target {
799
+
hosts: vec![
800
+
"un.reachable.1".into(),
801
+
"un.reachable.2".into(),
802
+
"un.reachable.3".into(),
803
+
"un.reachable.4".into(),
804
+
"un.reachable.5".into(),
805
+
],
806
+
..Default::default()
807
+
};
808
+
809
+
assert_ne!(
810
+
target.get_preferred_host().unwrap().to_string(),
811
+
"un.reachable.5"
812
+
);
813
+
814
+
for i in 1..=5 {
815
+
assert_eq!(
816
+
target.get_preferred_host().unwrap().to_string(),
817
+
format!("un.reachable.{i}")
818
+
);
819
+
target.host_failed();
820
+
}
821
+
822
+
for _ in 0..5 {
823
+
assert_matches!(
824
+
target.get_preferred_host(),
825
+
Err(HiveLibError::NetworkError(NetworkError::HostsExhausted))
826
+
);
827
+
}
828
+
}
829
+
830
+
#[test]
831
+
fn test_ssh_opts() {
832
+
let target = Target::from_host("hello-world");
833
+
let subcommand_modifiers = SubCommandModifiers {
834
+
non_interactive: false,
835
+
..Default::default()
836
+
};
837
+
let tmp = format!(
838
+
"/tmp/{}",
839
+
rand::distr::SampleString::sample_string(&Alphabetic, &mut rand::rng(), 10)
840
+
);
841
+
842
+
std::fs::create_dir(&tmp).unwrap();
843
+
844
+
unsafe { env::set_var("XDG_RUNTIME_DIR", &tmp) }
845
+
846
+
let args = [
847
+
"-l".to_string(),
848
+
target.user.to_string(),
849
+
"-p".to_string(),
850
+
target.port.to_string(),
851
+
"-o".to_string(),
852
+
"StrictHostKeyChecking=accept-new".to_string(),
853
+
"-o".to_string(),
854
+
"PasswordAuthentication=no".to_string(),
855
+
"-o".to_string(),
856
+
"KbdInteractiveAuthentication=no".to_string(),
857
+
];
858
+
859
+
assert_eq!(
860
+
target
861
+
.create_ssh_args(subcommand_modifiers, false, false)
862
+
.unwrap(),
863
+
args
864
+
);
865
+
assert_eq!(
866
+
target.create_ssh_opts(subcommand_modifiers, false).unwrap(),
867
+
args.join(" ")
868
+
);
869
+
870
+
assert_eq!(
871
+
target
872
+
.create_ssh_args(subcommand_modifiers, false, true)
873
+
.unwrap(),
874
+
[
875
+
"-l".to_string(),
876
+
target.user.to_string(),
877
+
"-p".to_string(),
878
+
target.port.to_string(),
879
+
"-o".to_string(),
880
+
"StrictHostKeyChecking=accept-new".to_string(),
881
+
"-o".to_string(),
882
+
"PasswordAuthentication=no".to_string(),
883
+
"-o".to_string(),
884
+
"KbdInteractiveAuthentication=no".to_string(),
885
+
]
886
+
);
887
+
888
+
assert_eq!(
889
+
target
890
+
.create_ssh_args(subcommand_modifiers, true, true)
891
+
.unwrap(),
892
+
[
893
+
"-l".to_string(),
894
+
target.user.to_string(),
895
+
"-p".to_string(),
896
+
target.port.to_string(),
897
+
"-o".to_string(),
898
+
"StrictHostKeyChecking=accept-new".to_string(),
899
+
"-o".to_string(),
900
+
"PasswordAuthentication=no".to_string(),
901
+
"-o".to_string(),
902
+
"KbdInteractiveAuthentication=no".to_string(),
903
+
]
904
+
);
905
+
906
+
// forced non interactive is the same as --non-interactive
907
+
assert_eq!(
908
+
target
909
+
.create_ssh_args(subcommand_modifiers, true, false)
910
+
.unwrap(),
911
+
target
912
+
.create_ssh_args(
913
+
SubCommandModifiers {
914
+
non_interactive: true,
915
+
..Default::default()
916
+
},
917
+
false,
918
+
false
919
+
)
920
+
.unwrap()
921
+
);
922
+
}
923
+
924
+
#[tokio::test]
925
+
async fn context_quits_sigint() {
926
+
let location = location!(get_test_path!());
927
+
let mut node = Node::default();
928
+
929
+
let name = &Name(function_name!().into());
930
+
let context = Context::create_test_context(location, name, &mut node);
931
+
context
932
+
.should_quit
933
+
.store(true, std::sync::atomic::Ordering::Relaxed);
934
+
let executor = GoalExecutor::new(context);
935
+
let status = executor.execute().await;
936
+
937
+
assert_matches!(status, Err(HiveLibError::Sigint));
938
+
}
939
+
}
+219
crates/core/src/hive/steps/activate.rs
+219
crates/core/src/hive/steps/activate.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt::Display;
5
+
6
+
use tracing::{error, info, instrument, warn};
7
+
8
+
use crate::{
9
+
HiveLibError,
10
+
commands::{CommandArguments, WireCommandChip, builder::CommandStringBuilder, run_command},
11
+
errors::{ActivationError, NetworkError},
12
+
hive::node::{Context, ExecuteStep, Goal, Objective, SwitchToConfigurationGoal},
13
+
};
14
+
15
+
#[derive(Debug, PartialEq)]
16
+
pub struct SwitchToConfiguration;
17
+
18
+
impl Display for SwitchToConfiguration {
19
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
20
+
write!(f, "switch-to-configuration")
21
+
}
22
+
}
23
+
24
+
async fn wait_for_ping(ctx: &Context<'_>) -> Result<(), HiveLibError> {
25
+
let host = ctx.node.target.get_preferred_host()?;
26
+
let mut result = ctx.node.ping(ctx.modifiers).await;
27
+
28
+
for num in 0..2 {
29
+
warn!("Trying to ping {host} (attempt {}/3)", num + 1);
30
+
31
+
result = ctx.node.ping(ctx.modifiers).await;
32
+
33
+
if result.is_ok() {
34
+
info!("Regained connection to {} via {host}", ctx.name);
35
+
36
+
break;
37
+
}
38
+
}
39
+
40
+
result
41
+
}
42
+
43
+
async fn set_profile(
44
+
goal: SwitchToConfigurationGoal,
45
+
built_path: &String,
46
+
ctx: &Context<'_>,
47
+
) -> Result<(), HiveLibError> {
48
+
info!("Setting profiles in anticipation for switch-to-configuration {goal}");
49
+
50
+
let mut command_string = CommandStringBuilder::new("nix-env");
51
+
command_string.args(&["-p", "/nix/var/nix/profiles/system", "--set"]);
52
+
command_string.arg(built_path);
53
+
54
+
let Objective::Apply(apply_objective) = ctx.objective else {
55
+
unreachable!()
56
+
};
57
+
58
+
let child = run_command(
59
+
&CommandArguments::new(command_string, ctx.modifiers)
60
+
.mode(crate::commands::ChildOutputMode::Nix)
61
+
.execute_on_remote(if apply_objective.should_apply_locally {
62
+
None
63
+
} else {
64
+
Some(&ctx.node.target)
65
+
})
66
+
.elevated(ctx.node),
67
+
)
68
+
.await?;
69
+
70
+
let _ = child
71
+
.wait_till_success()
72
+
.await
73
+
.map_err(HiveLibError::CommandError)?;
74
+
75
+
info!("Set system profile");
76
+
77
+
Ok(())
78
+
}
79
+
80
+
impl ExecuteStep for SwitchToConfiguration {
81
+
fn should_execute(&self, ctx: &Context) -> bool {
82
+
let Objective::Apply(apply_objective) = ctx.objective else {
83
+
return false;
84
+
};
85
+
86
+
matches!(apply_objective.goal, Goal::SwitchToConfiguration(..))
87
+
}
88
+
89
+
#[allow(clippy::too_many_lines)]
90
+
#[instrument(skip_all, name = "activate")]
91
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
92
+
let built_path = ctx.state.build.as_ref().unwrap();
93
+
94
+
let Objective::Apply(apply_objective) = ctx.objective else {
95
+
unreachable!()
96
+
};
97
+
98
+
let Goal::SwitchToConfiguration(goal) = &apply_objective.goal else {
99
+
unreachable!("Cannot reach as guarded by should_execute")
100
+
};
101
+
102
+
if matches!(
103
+
goal,
104
+
// switch profile if switch or boot
105
+
// https://github.com/NixOS/nixpkgs/blob/a2c92aa34735a04010671e3378e2aa2d109b2a72/pkgs/by-name/ni/nixos-rebuild-ng/src/nixos_rebuild/services.py#L224
106
+
SwitchToConfigurationGoal::Switch | SwitchToConfigurationGoal::Boot
107
+
) {
108
+
set_profile(*goal, built_path, ctx).await?;
109
+
}
110
+
111
+
info!("Running switch-to-configuration {goal}");
112
+
113
+
let mut command_string =
114
+
CommandStringBuilder::new(format!("{built_path}/bin/switch-to-configuration"));
115
+
command_string.arg(match goal {
116
+
SwitchToConfigurationGoal::Switch => "switch",
117
+
SwitchToConfigurationGoal::Boot => "boot",
118
+
SwitchToConfigurationGoal::Test => "test",
119
+
SwitchToConfigurationGoal::DryActivate => "dry-activate",
120
+
});
121
+
122
+
let child = run_command(
123
+
&CommandArguments::new(command_string, ctx.modifiers)
124
+
.execute_on_remote(if apply_objective.should_apply_locally {
125
+
None
126
+
} else {
127
+
Some(&ctx.node.target)
128
+
})
129
+
.elevated(ctx.node)
130
+
.log_stdout(),
131
+
)
132
+
.await?;
133
+
134
+
let result = child.wait_till_success().await;
135
+
136
+
match result {
137
+
Ok(_) => {
138
+
if !apply_objective.reboot {
139
+
return Ok(());
140
+
}
141
+
142
+
if apply_objective.should_apply_locally {
143
+
error!("Refusing to reboot local machine!");
144
+
145
+
return Ok(());
146
+
}
147
+
148
+
warn!("Rebooting {name}!", name = ctx.name);
149
+
150
+
let reboot = run_command(
151
+
&CommandArguments::new("reboot now", ctx.modifiers)
152
+
.log_stdout()
153
+
.execute_on_remote(Some(&ctx.node.target))
154
+
.elevated(ctx.node),
155
+
)
156
+
.await?;
157
+
158
+
// consume result, impossible to know if the machine failed to reboot or we
159
+
// simply disconnected
160
+
let _ = reboot
161
+
.wait_till_success()
162
+
.await
163
+
.map_err(HiveLibError::CommandError)?;
164
+
165
+
info!("Rebooted {name}, waiting to reconnect...", name = ctx.name);
166
+
167
+
if wait_for_ping(ctx).await.is_ok() {
168
+
return Ok(());
169
+
}
170
+
171
+
error!(
172
+
"Failed to get regain connection to {name} via {host} after reboot.",
173
+
name = ctx.name,
174
+
host = ctx.node.target.get_preferred_host()?
175
+
);
176
+
177
+
return Err(HiveLibError::NetworkError(
178
+
NetworkError::HostUnreachableAfterReboot(
179
+
ctx.node.target.get_preferred_host()?.to_string(),
180
+
),
181
+
));
182
+
}
183
+
Err(error) => {
184
+
warn!(
185
+
"Activation command for {name} exited unsuccessfully.",
186
+
name = ctx.name
187
+
);
188
+
189
+
// Bail if the command couldn't of broken the system
190
+
// and don't try to regain connection to localhost
191
+
if matches!(goal, SwitchToConfigurationGoal::DryActivate)
192
+
|| apply_objective.should_apply_locally
193
+
{
194
+
return Err(HiveLibError::ActivationError(
195
+
ActivationError::SwitchToConfigurationError(*goal, ctx.name.clone(), error),
196
+
));
197
+
}
198
+
199
+
if wait_for_ping(ctx).await.is_ok() {
200
+
return Err(HiveLibError::ActivationError(
201
+
ActivationError::SwitchToConfigurationError(*goal, ctx.name.clone(), error),
202
+
));
203
+
}
204
+
205
+
error!(
206
+
"Failed to get regain connection to {name} via {host} after {goal} activation.",
207
+
name = ctx.name,
208
+
host = ctx.node.target.get_preferred_host()?
209
+
);
210
+
211
+
return Err(HiveLibError::NetworkError(
212
+
NetworkError::HostUnreachableAfterReboot(
213
+
ctx.node.target.get_preferred_host()?.to_string(),
214
+
),
215
+
));
216
+
}
217
+
}
218
+
}
219
+
}
+89
crates/core/src/hive/steps/build.rs
+89
crates/core/src/hive/steps/build.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt::Display;
5
+
6
+
use tracing::{info, instrument};
7
+
8
+
use crate::{
9
+
HiveLibError,
10
+
commands::{
11
+
CommandArguments, Either, WireCommandChip, builder::CommandStringBuilder,
12
+
run_command_with_env,
13
+
},
14
+
hive::node::{Context, ExecuteStep, Goal, Objective},
15
+
};
16
+
17
+
#[derive(Debug, PartialEq)]
18
+
pub struct Build;
19
+
20
+
impl Display for Build {
21
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
22
+
write!(f, "Build the node")
23
+
}
24
+
}
25
+
26
+
impl ExecuteStep for Build {
27
+
fn should_execute(&self, ctx: &Context) -> bool {
28
+
match ctx.objective {
29
+
Objective::Apply(apply_objective) => {
30
+
!matches!(apply_objective.goal, Goal::Keys | Goal::Push)
31
+
}
32
+
Objective::BuildLocally => true,
33
+
}
34
+
}
35
+
36
+
#[instrument(skip_all, name = "build")]
37
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
38
+
let top_level = ctx.state.evaluation.as_ref().unwrap();
39
+
40
+
let mut command_string = CommandStringBuilder::nix();
41
+
command_string.args(&[
42
+
"--extra-experimental-features",
43
+
"nix-command",
44
+
"build",
45
+
"--print-build-logs",
46
+
"--no-link",
47
+
"--print-out-paths",
48
+
]);
49
+
command_string.arg(top_level.to_string());
50
+
51
+
let status = run_command_with_env(
52
+
&CommandArguments::new(command_string, ctx.modifiers)
53
+
// build remotely if asked for AND we arent applying locally
54
+
.execute_on_remote(
55
+
if ctx.node.build_remotely
56
+
&& let Objective::Apply(apply_objective) = ctx.objective
57
+
&& !apply_objective.should_apply_locally
58
+
{
59
+
Some(&ctx.node.target)
60
+
} else {
61
+
None
62
+
},
63
+
)
64
+
.mode(crate::commands::ChildOutputMode::Nix)
65
+
.log_stdout(),
66
+
std::collections::HashMap::new(),
67
+
)
68
+
.await?
69
+
.wait_till_success()
70
+
.await
71
+
.map_err(|source| HiveLibError::NixBuildError {
72
+
name: ctx.name.clone(),
73
+
source,
74
+
})?;
75
+
76
+
let stdout = match status {
77
+
Either::Left((_, stdout)) | Either::Right((_, stdout)) => stdout,
78
+
};
79
+
80
+
info!("Built output: {stdout:?}");
81
+
82
+
// print built path to stdout
83
+
println!("{stdout}");
84
+
85
+
ctx.state.build = Some(stdout);
86
+
87
+
Ok(())
88
+
}
89
+
}
+28
crates/core/src/hive/steps/cleanup.rs
+28
crates/core/src/hive/steps/cleanup.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt::Display;
5
+
6
+
use crate::{
7
+
errors::HiveLibError,
8
+
hive::node::{Context, ExecuteStep},
9
+
};
10
+
11
+
#[derive(PartialEq, Debug)]
12
+
pub(crate) struct CleanUp;
13
+
14
+
impl Display for CleanUp {
15
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
16
+
write!(f, "Clean up")
17
+
}
18
+
}
19
+
20
+
impl ExecuteStep for CleanUp {
21
+
fn should_execute(&self, _ctx: &Context) -> bool {
22
+
false
23
+
}
24
+
25
+
async fn execute(&self, _ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
26
+
Ok(())
27
+
}
28
+
}
+38
crates/core/src/hive/steps/evaluate.rs
+38
crates/core/src/hive/steps/evaluate.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt::Display;
5
+
6
+
use tracing::instrument;
7
+
8
+
use crate::{
9
+
HiveLibError,
10
+
hive::node::{Context, ExecuteStep, Goal, Objective},
11
+
};
12
+
13
+
#[derive(Debug, PartialEq)]
14
+
pub struct Evaluate;
15
+
16
+
impl Display for Evaluate {
17
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
18
+
write!(f, "Evaluate the node")
19
+
}
20
+
}
21
+
22
+
impl ExecuteStep for Evaluate {
23
+
fn should_execute(&self, ctx: &Context) -> bool {
24
+
match ctx.objective {
25
+
Objective::Apply(apply_objective) => !matches!(apply_objective.goal, Goal::Keys),
26
+
Objective::BuildLocally => true,
27
+
}
28
+
}
29
+
30
+
#[instrument(skip_all, name = "eval")]
31
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
32
+
let rx = ctx.state.evaluation_rx.take().unwrap();
33
+
34
+
ctx.state.evaluation = Some(rx.await.unwrap()?);
35
+
36
+
Ok(())
37
+
}
38
+
}
+441
crates/core/src/hive/steps/keys.rs
+441
crates/core/src/hive/steps/keys.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use base64::Engine;
5
+
use base64::prelude::BASE64_STANDARD;
6
+
use futures::future::join_all;
7
+
use im::Vector;
8
+
use itertools::{Itertools, Position};
9
+
use owo_colors::OwoColorize;
10
+
use prost::Message;
11
+
use prost::bytes::BytesMut;
12
+
use serde::{Deserialize, Serialize};
13
+
use sha2::{Digest, Sha256};
14
+
use std::env;
15
+
use std::fmt::Display;
16
+
use std::io::Cursor;
17
+
use std::iter::Peekable;
18
+
use std::path::PathBuf;
19
+
use std::pin::Pin;
20
+
use std::process::Stdio;
21
+
use std::str::from_utf8;
22
+
use std::vec::IntoIter;
23
+
use tokio::io::AsyncReadExt as _;
24
+
use tokio::process::Command;
25
+
use tokio::{fs::File, io::AsyncRead};
26
+
use tokio_util::codec::LengthDelimitedCodec;
27
+
use tracing::{debug, instrument};
28
+
29
+
use crate::HiveLibError;
30
+
use crate::commands::builder::CommandStringBuilder;
31
+
use crate::commands::common::push;
32
+
use crate::commands::{CommandArguments, WireCommandChip, run_command};
33
+
use crate::errors::KeyError;
34
+
use crate::hive::node::{Context, ExecuteStep, Goal, Objective, Push, SwitchToConfigurationGoal};
35
+
36
+
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
37
+
#[serde(tag = "t", content = "c")]
38
+
pub enum Source {
39
+
String(String),
40
+
Path(PathBuf),
41
+
Command(Vec<String>),
42
+
}
43
+
44
+
#[derive(Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq)]
45
+
pub enum UploadKeyAt {
46
+
#[serde(rename = "pre-activation")]
47
+
PreActivation,
48
+
#[serde(rename = "post-activation")]
49
+
PostActivation,
50
+
#[serde(skip)]
51
+
NoFilter,
52
+
}
53
+
54
+
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
55
+
pub struct Key {
56
+
pub name: String,
57
+
#[serde(rename = "destDir")]
58
+
pub dest_dir: String,
59
+
pub path: PathBuf,
60
+
pub group: String,
61
+
pub user: String,
62
+
pub permissions: String,
63
+
pub source: Source,
64
+
#[serde(rename = "uploadAt")]
65
+
pub upload_at: UploadKeyAt,
66
+
#[serde(default)]
67
+
pub environment: im::HashMap<String, String>,
68
+
}
69
+
70
+
impl Display for Key {
71
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
72
+
write!(
73
+
f,
74
+
"{} {} {}:{} {}",
75
+
match self.source {
76
+
Source::String(_) => "Literal",
77
+
Source::Path(_) => "Path",
78
+
Source::Command(_) => "Command",
79
+
}
80
+
.if_supports_color(owo_colors::Stream::Stdout, |x| x.dimmed()),
81
+
[self.dest_dir.clone(), self.name.clone()]
82
+
.iter()
83
+
.collect::<PathBuf>()
84
+
.display(),
85
+
self.user,
86
+
self.group,
87
+
self.permissions,
88
+
)
89
+
}
90
+
}
91
+
92
+
#[cfg(test)]
93
+
impl Default for Key {
94
+
fn default() -> Self {
95
+
use im::HashMap;
96
+
97
+
Self {
98
+
name: "key".into(),
99
+
dest_dir: "/somewhere/".into(),
100
+
path: "key".into(),
101
+
group: "root".into(),
102
+
user: "root".into(),
103
+
permissions: "0600".into(),
104
+
source: Source::String("test key".into()),
105
+
upload_at: UploadKeyAt::PreActivation,
106
+
environment: HashMap::new(),
107
+
}
108
+
}
109
+
}
110
+
111
+
fn get_u32_permission(key: &Key) -> Result<u32, KeyError> {
112
+
u32::from_str_radix(&key.permissions, 8).map_err(KeyError::ParseKeyPermissions)
113
+
}
114
+
115
+
async fn create_reader(key: &'_ Key) -> Result<Pin<Box<dyn AsyncRead + Send + '_>>, KeyError> {
116
+
match &key.source {
117
+
Source::Path(path) => Ok(Box::pin(File::open(path).await.map_err(KeyError::File)?)),
118
+
Source::String(string) => Ok(Box::pin(Cursor::new(string))),
119
+
Source::Command(args) => {
120
+
let output = Command::new(args.first().ok_or(KeyError::Empty)?)
121
+
.args(&args[1..])
122
+
.stdin(Stdio::null())
123
+
.stdout(Stdio::piped())
124
+
.stderr(Stdio::piped())
125
+
.envs(key.environment.clone())
126
+
.spawn()
127
+
.map_err(|err| KeyError::CommandSpawnError {
128
+
error: err,
129
+
command: args.join(" "),
130
+
command_span: Some((0..args.first().unwrap().len()).into()),
131
+
})?
132
+
.wait_with_output()
133
+
.await
134
+
.map_err(|err| KeyError::CommandResolveError {
135
+
error: err,
136
+
command: args.join(" "),
137
+
})?;
138
+
139
+
if output.status.success() {
140
+
return Ok(Box::pin(Cursor::new(output.stdout)));
141
+
}
142
+
143
+
Err(KeyError::CommandError(
144
+
output.status,
145
+
from_utf8(&output.stderr).unwrap().to_string(),
146
+
))
147
+
}
148
+
}
149
+
}
150
+
151
+
async fn process_key(key: &Key) -> Result<(wire_key_agent::keys::KeySpec, Vec<u8>), KeyError> {
152
+
let mut reader = create_reader(key).await?;
153
+
154
+
let mut buf = Vec::new();
155
+
156
+
reader
157
+
.read_to_end(&mut buf)
158
+
.await
159
+
.expect("failed to read into buffer");
160
+
161
+
let destination: PathBuf = [key.dest_dir.clone(), key.name.clone()].iter().collect();
162
+
163
+
debug!("Staging push to {}", destination.clone().display());
164
+
165
+
Ok((
166
+
wire_key_agent::keys::KeySpec {
167
+
length: buf
168
+
.len()
169
+
.try_into()
170
+
.expect("Failed to convert usize buf length to i32"),
171
+
user: key.user.clone(),
172
+
group: key.group.clone(),
173
+
permissions: get_u32_permission(key)?,
174
+
destination: destination.into_os_string().into_string().unwrap(),
175
+
digest: Sha256::digest(&buf).to_vec(),
176
+
last: false,
177
+
},
178
+
buf,
179
+
))
180
+
}
181
+
182
+
#[derive(Debug, PartialEq)]
183
+
pub struct Keys {
184
+
pub filter: UploadKeyAt,
185
+
}
186
+
#[derive(Debug, PartialEq)]
187
+
pub struct PushKeyAgent;
188
+
189
+
impl Display for Keys {
190
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
191
+
write!(f, "Upload key @ {:?}", self.filter)
192
+
}
193
+
}
194
+
195
+
impl Display for PushKeyAgent {
196
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
197
+
write!(f, "Push the key agent")
198
+
}
199
+
}
200
+
201
+
pub struct SimpleLengthDelimWriter<F> {
202
+
codec: LengthDelimitedCodec,
203
+
write_fn: F,
204
+
}
205
+
206
+
impl<F> SimpleLengthDelimWriter<F>
207
+
where
208
+
F: AsyncFnMut(Vec<u8>) -> Result<(), HiveLibError>,
209
+
{
210
+
fn new(write_fn: F) -> Self {
211
+
Self {
212
+
codec: LengthDelimitedCodec::new(),
213
+
write_fn,
214
+
}
215
+
}
216
+
217
+
async fn send(&mut self, data: prost::bytes::Bytes) -> Result<(), HiveLibError> {
218
+
let mut buffer = BytesMut::new();
219
+
tokio_util::codec::Encoder::encode(&mut self.codec, data, &mut buffer)
220
+
.map_err(HiveLibError::Encoding)?;
221
+
222
+
(self.write_fn)(buffer.to_vec()).await?;
223
+
Ok(())
224
+
}
225
+
}
226
+
227
+
impl ExecuteStep for Keys {
228
+
fn should_execute(&self, ctx: &Context) -> bool {
229
+
let Objective::Apply(apply_objective) = ctx.objective else {
230
+
return false;
231
+
};
232
+
233
+
if apply_objective.no_keys {
234
+
return false;
235
+
}
236
+
237
+
// should execute if no filter, and the goal is keys.
238
+
// otherwise, only execute if the goal is switch and non-nofilter
239
+
matches!(
240
+
(&self.filter, &apply_objective.goal),
241
+
(UploadKeyAt::NoFilter, Goal::Keys)
242
+
| (
243
+
UploadKeyAt::PreActivation | UploadKeyAt::PostActivation,
244
+
Goal::SwitchToConfiguration(SwitchToConfigurationGoal::Switch)
245
+
)
246
+
)
247
+
}
248
+
249
+
#[instrument(skip_all, name = "keys")]
250
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
251
+
let agent_directory = ctx.state.key_agent_directory.as_ref().unwrap();
252
+
253
+
let mut keys = self.select_keys(&ctx.node.keys).await?;
254
+
255
+
if keys.peek().is_none() {
256
+
debug!("Had no keys to push, ending KeyStep early.");
257
+
return Ok(());
258
+
}
259
+
260
+
let command_string =
261
+
CommandStringBuilder::new(format!("{agent_directory}/bin/wire-key-agent"));
262
+
263
+
let Objective::Apply(apply_objective) = ctx.objective else {
264
+
unreachable!()
265
+
};
266
+
267
+
let mut child = run_command(
268
+
&CommandArguments::new(command_string, ctx.modifiers)
269
+
.execute_on_remote(if apply_objective.should_apply_locally {
270
+
None
271
+
} else {
272
+
Some(&ctx.node.target)
273
+
})
274
+
.elevated(ctx.node)
275
+
.keep_stdin_open()
276
+
.log_stdout(),
277
+
)
278
+
.await?;
279
+
280
+
let mut writer = SimpleLengthDelimWriter::new(async |data| child.write_stdin(data).await);
281
+
282
+
for (position, (mut spec, buf)) in keys.with_position() {
283
+
if matches!(position, Position::Last | Position::Only) {
284
+
spec.last = true;
285
+
}
286
+
287
+
debug!("Writing spec & buf for {:?}", spec);
288
+
289
+
writer
290
+
.send(BASE64_STANDARD.encode(spec.encode_to_vec()).into())
291
+
.await?;
292
+
writer.send(BASE64_STANDARD.encode(buf).into()).await?;
293
+
}
294
+
295
+
let status = child
296
+
.wait_till_success()
297
+
.await
298
+
.map_err(HiveLibError::CommandError)?;
299
+
300
+
debug!("status: {status:?}");
301
+
302
+
Ok(())
303
+
}
304
+
}
305
+
306
+
impl Keys {
307
+
async fn select_keys(
308
+
&self,
309
+
keys: &Vector<Key>,
310
+
) -> Result<Peekable<IntoIter<(wire_key_agent::keys::KeySpec, std::vec::Vec<u8>)>>, HiveLibError>
311
+
{
312
+
let futures = keys
313
+
.iter()
314
+
.filter(|key| self.filter == UploadKeyAt::NoFilter || (key.upload_at == self.filter))
315
+
.map(|key| async move {
316
+
process_key(key)
317
+
.await
318
+
.map_err(|err| HiveLibError::KeyError(key.name.clone(), err))
319
+
});
320
+
321
+
Ok(join_all(futures)
322
+
.await
323
+
.into_iter()
324
+
.collect::<Result<Vec<_>, HiveLibError>>()?
325
+
.into_iter()
326
+
.peekable())
327
+
}
328
+
}
329
+
330
+
impl ExecuteStep for PushKeyAgent {
331
+
fn should_execute(&self, ctx: &Context) -> bool {
332
+
let Objective::Apply(apply_objective) = ctx.objective else {
333
+
return false;
334
+
};
335
+
336
+
if apply_objective.no_keys {
337
+
return false;
338
+
}
339
+
340
+
matches!(
341
+
&apply_objective.goal,
342
+
Goal::Keys | Goal::SwitchToConfiguration(SwitchToConfigurationGoal::Switch)
343
+
)
344
+
}
345
+
346
+
#[instrument(skip_all, name = "push_agent")]
347
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
348
+
let arg_name = format!(
349
+
"WIRE_KEY_AGENT_{platform}",
350
+
platform = ctx.node.host_platform.replace('-', "_")
351
+
);
352
+
353
+
let agent_directory = match env::var_os(&arg_name) {
354
+
Some(agent) => agent.into_string().unwrap(),
355
+
None => panic!(
356
+
"{arg_name} environment variable not set! \n
357
+
wire was not built with the ability to deploy keys to this platform. \n
358
+
Please create an issue: https://github.com/forallsys/wire/issues/new?template=bug_report.md"
359
+
),
360
+
};
361
+
362
+
let Objective::Apply(apply_objective) = ctx.objective else {
363
+
unreachable!()
364
+
};
365
+
366
+
if !apply_objective.should_apply_locally {
367
+
push(ctx, Push::Path(&agent_directory)).await?;
368
+
}
369
+
370
+
ctx.state.key_agent_directory = Some(agent_directory);
371
+
372
+
Ok(())
373
+
}
374
+
}
375
+
376
+
#[cfg(test)]
377
+
mod tests {
378
+
use im::Vector;
379
+
380
+
use crate::hive::steps::keys::{Key, Keys, UploadKeyAt, process_key};
381
+
382
+
fn new_key(upload_at: &UploadKeyAt) -> Key {
383
+
Key {
384
+
upload_at: upload_at.clone(),
385
+
source: super::Source::String(match upload_at {
386
+
UploadKeyAt::PreActivation => "pre".into(),
387
+
UploadKeyAt::PostActivation => "post".into(),
388
+
UploadKeyAt::NoFilter => "none".into(),
389
+
}),
390
+
..Default::default()
391
+
}
392
+
}
393
+
394
+
#[tokio::test]
395
+
async fn key_filtering() {
396
+
let keys = Vector::from(vec![
397
+
new_key(&UploadKeyAt::PreActivation),
398
+
new_key(&UploadKeyAt::PostActivation),
399
+
new_key(&UploadKeyAt::PreActivation),
400
+
new_key(&UploadKeyAt::PostActivation),
401
+
]);
402
+
403
+
for (_, buf) in (Keys {
404
+
filter: crate::hive::steps::keys::UploadKeyAt::PreActivation,
405
+
})
406
+
.select_keys(&keys)
407
+
.await
408
+
.unwrap()
409
+
{
410
+
assert_eq!(String::from_utf8_lossy(&buf), "pre");
411
+
}
412
+
413
+
for (_, buf) in (Keys {
414
+
filter: crate::hive::steps::keys::UploadKeyAt::PostActivation,
415
+
})
416
+
.select_keys(&keys)
417
+
.await
418
+
.unwrap()
419
+
{
420
+
assert_eq!(String::from_utf8_lossy(&buf), "post");
421
+
}
422
+
423
+
// test that NoFilter processes all keys.
424
+
let processed_all =
425
+
futures::future::join_all(keys.iter().map(async |x| process_key(x).await))
426
+
.await
427
+
.iter()
428
+
.flatten()
429
+
.cloned()
430
+
.collect::<Vec<_>>();
431
+
let no_filter = (Keys {
432
+
filter: crate::hive::steps::keys::UploadKeyAt::NoFilter,
433
+
})
434
+
.select_keys(&keys)
435
+
.await
436
+
.unwrap()
437
+
.collect::<Vec<_>>();
438
+
439
+
assert_eq!(processed_all, no_filter);
440
+
}
441
+
}
+10
crates/core/src/hive/steps/mod.rs
+10
crates/core/src/hive/steps/mod.rs
+58
crates/core/src/hive/steps/ping.rs
+58
crates/core/src/hive/steps/ping.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt::Display;
5
+
6
+
use tracing::{Level, event, instrument};
7
+
8
+
use crate::{
9
+
HiveLibError,
10
+
hive::node::{Context, ExecuteStep, Objective},
11
+
};
12
+
13
+
#[derive(Debug, PartialEq)]
14
+
pub struct Ping;
15
+
16
+
impl Display for Ping {
17
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
18
+
write!(f, "Ping node")
19
+
}
20
+
}
21
+
22
+
impl ExecuteStep for Ping {
23
+
fn should_execute(&self, ctx: &Context) -> bool {
24
+
let Objective::Apply(apply_objective) = ctx.objective else {
25
+
return false;
26
+
};
27
+
28
+
!apply_objective.should_apply_locally
29
+
}
30
+
31
+
#[instrument(skip_all, name = "ping")]
32
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
33
+
loop {
34
+
event!(
35
+
Level::INFO,
36
+
status = "attempting",
37
+
host = ctx.node.target.get_preferred_host()?.to_string()
38
+
);
39
+
40
+
if ctx.node.ping(ctx.modifiers).await.is_ok() {
41
+
event!(
42
+
Level::INFO,
43
+
status = "success",
44
+
host = ctx.node.target.get_preferred_host()?.to_string()
45
+
);
46
+
return Ok(());
47
+
}
48
+
49
+
// ? will take us out if we ran out of hosts
50
+
event!(
51
+
Level::WARN,
52
+
status = "failed to ping",
53
+
host = ctx.node.target.get_preferred_host()?.to_string()
54
+
);
55
+
ctx.node.target.host_failed();
56
+
}
57
+
}
58
+
}
+84
crates/core/src/hive/steps/push.rs
+84
crates/core/src/hive/steps/push.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::fmt::Display;
5
+
6
+
use tracing::instrument;
7
+
8
+
use crate::{
9
+
HiveLibError,
10
+
commands::common::push,
11
+
hive::node::{Context, ExecuteStep, Goal, Objective},
12
+
};
13
+
14
+
#[derive(Debug, PartialEq)]
15
+
pub struct PushEvaluatedOutput;
16
+
#[derive(Debug, PartialEq)]
17
+
pub struct PushBuildOutput;
18
+
19
+
impl Display for PushEvaluatedOutput {
20
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
21
+
write!(f, "Push the evaluated output")
22
+
}
23
+
}
24
+
25
+
impl Display for PushBuildOutput {
26
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
27
+
write!(f, "Push the build output")
28
+
}
29
+
}
30
+
31
+
impl ExecuteStep for PushEvaluatedOutput {
32
+
fn should_execute(&self, ctx: &Context) -> bool {
33
+
let Objective::Apply(apply_objective) = ctx.objective else {
34
+
return false;
35
+
};
36
+
37
+
!matches!(apply_objective.goal, Goal::Keys)
38
+
&& !apply_objective.should_apply_locally
39
+
&& (ctx.node.build_remotely | matches!(apply_objective.goal, Goal::Push))
40
+
}
41
+
42
+
#[instrument(skip_all, name = "push_eval")]
43
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
44
+
let top_level = ctx.state.evaluation.as_ref().unwrap();
45
+
46
+
push(ctx, crate::hive::node::Push::Derivation(top_level)).await?;
47
+
48
+
Ok(())
49
+
}
50
+
}
51
+
52
+
impl ExecuteStep for PushBuildOutput {
53
+
fn should_execute(&self, ctx: &Context) -> bool {
54
+
let Objective::Apply(apply_objective) = ctx.objective else {
55
+
return false;
56
+
};
57
+
58
+
if matches!(apply_objective.goal, Goal::Keys | Goal::Push) {
59
+
// skip if we are not building
60
+
return false;
61
+
}
62
+
63
+
if ctx.node.build_remotely {
64
+
// skip if we are building remotely
65
+
return false;
66
+
}
67
+
68
+
if apply_objective.should_apply_locally {
69
+
// skip step if we are applying locally
70
+
return false;
71
+
}
72
+
73
+
true
74
+
}
75
+
76
+
#[instrument(skip_all, name = "push_build")]
77
+
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
78
+
let built_path = ctx.state.build.as_ref().unwrap();
79
+
80
+
push(ctx, crate::hive::node::Push::Path(built_path)).await?;
81
+
82
+
Ok(())
83
+
}
84
+
}
+71
crates/core/src/lib.rs
+71
crates/core/src/lib.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
#![feature(assert_matches)]
5
+
#![feature(iter_intersperse)]
6
+
#![feature(sync_nonpoison)]
7
+
#![feature(nonpoison_mutex)]
8
+
9
+
use std::{
10
+
io::{IsTerminal, stderr},
11
+
sync::LazyLock,
12
+
};
13
+
14
+
use tokio::sync::{AcquireError, Semaphore, SemaphorePermit};
15
+
16
+
use crate::{errors::HiveLibError, hive::node::Name, status::STATUS};
17
+
18
+
pub mod cache;
19
+
pub mod commands;
20
+
pub mod hive;
21
+
pub mod status;
22
+
23
+
#[cfg(test)]
24
+
mod test_macros;
25
+
26
+
#[cfg(test)]
27
+
mod test_support;
28
+
29
+
pub mod errors;
30
+
31
+
#[derive(Clone, Debug, Copy, Default)]
32
+
pub enum StrictHostKeyChecking {
33
+
/// do not accept new host. dangerous!
34
+
No,
35
+
36
+
/// accept-new, default
37
+
#[default]
38
+
AcceptNew,
39
+
}
40
+
41
+
#[derive(Debug, Clone, Copy)]
42
+
pub struct SubCommandModifiers {
43
+
pub show_trace: bool,
44
+
pub non_interactive: bool,
45
+
pub ssh_accept_host: StrictHostKeyChecking,
46
+
}
47
+
48
+
impl Default for SubCommandModifiers {
49
+
fn default() -> Self {
50
+
SubCommandModifiers {
51
+
show_trace: false,
52
+
non_interactive: !std::io::stdin().is_terminal(),
53
+
ssh_accept_host: StrictHostKeyChecking::default(),
54
+
}
55
+
}
56
+
}
57
+
58
+
pub enum EvalGoal<'a> {
59
+
Inspect,
60
+
Names,
61
+
GetTopLevel(&'a Name),
62
+
}
63
+
64
+
pub static STDIN_CLOBBER_LOCK: LazyLock<Semaphore> = LazyLock::new(|| Semaphore::new(1));
65
+
66
+
pub async fn acquire_stdin_lock<'a>() -> Result<SemaphorePermit<'a>, AcquireError> {
67
+
let result = STDIN_CLOBBER_LOCK.acquire().await?;
68
+
STATUS.lock().wipe_out(&mut stderr());
69
+
70
+
Ok(result)
71
+
}
+173
crates/core/src/status.rs
+173
crates/core/src/status.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use owo_colors::OwoColorize;
5
+
use std::{fmt::Write, time::Instant};
6
+
use termion::{clear, cursor};
7
+
8
+
use crate::{STDIN_CLOBBER_LOCK, hive::node::Name};
9
+
10
+
use std::{
11
+
collections::HashMap,
12
+
sync::{LazyLock, nonpoison::Mutex},
13
+
};
14
+
15
+
#[derive(Default)]
16
+
pub enum NodeStatus {
17
+
#[default]
18
+
Pending,
19
+
Running(String),
20
+
Succeeded,
21
+
Failed,
22
+
}
23
+
24
+
pub struct Status {
25
+
statuses: HashMap<String, NodeStatus>,
26
+
began: Instant,
27
+
show_progress: bool,
28
+
}
29
+
30
+
/// global status used for the progress bar in the cli crate
31
+
pub static STATUS: LazyLock<Mutex<Status>> = LazyLock::new(|| Mutex::new(Status::new()));
32
+
33
+
impl Status {
34
+
fn new() -> Self {
35
+
Self {
36
+
statuses: HashMap::default(),
37
+
began: Instant::now(),
38
+
show_progress: false,
39
+
}
40
+
}
41
+
42
+
pub const fn show_progress(&mut self, show_progress: bool) {
43
+
self.show_progress = show_progress;
44
+
}
45
+
46
+
pub fn add_many(&mut self, names: &[&Name]) {
47
+
self.statuses.extend(
48
+
names
49
+
.iter()
50
+
.map(|name| (name.0.to_string(), NodeStatus::Pending)),
51
+
);
52
+
}
53
+
54
+
pub fn set_node_step(&mut self, node: &Name, step: String) {
55
+
self.statuses
56
+
.insert(node.0.to_string(), NodeStatus::Running(step));
57
+
}
58
+
59
+
pub fn mark_node_failed(&mut self, node: &Name) {
60
+
self.statuses.insert(node.0.to_string(), NodeStatus::Failed);
61
+
}
62
+
63
+
pub fn mark_node_succeeded(&mut self, node: &Name) {
64
+
self.statuses
65
+
.insert(node.0.to_string(), NodeStatus::Succeeded);
66
+
}
67
+
68
+
#[must_use]
69
+
fn num_finished(&self) -> usize {
70
+
self.statuses
71
+
.iter()
72
+
.filter(|(_, status)| matches!(status, NodeStatus::Succeeded | NodeStatus::Failed))
73
+
.count()
74
+
}
75
+
76
+
#[must_use]
77
+
fn num_running(&self) -> usize {
78
+
self.statuses
79
+
.iter()
80
+
.filter(|(_, status)| matches!(status, NodeStatus::Running(..)))
81
+
.count()
82
+
}
83
+
84
+
#[must_use]
85
+
fn num_failed(&self) -> usize {
86
+
self.statuses
87
+
.iter()
88
+
.filter(|(_, status)| matches!(status, NodeStatus::Failed))
89
+
.count()
90
+
}
91
+
92
+
#[must_use]
93
+
pub fn get_msg(&self) -> String {
94
+
if self.statuses.is_empty() {
95
+
return String::new();
96
+
}
97
+
98
+
let mut msg = format!("[{} / {}", self.num_finished(), self.statuses.len(),);
99
+
100
+
let num_failed = self.num_failed();
101
+
let num_running = self.num_running();
102
+
103
+
let failed = if num_failed >= 1 {
104
+
Some(format!("{} Failed", num_failed.red()))
105
+
} else {
106
+
None
107
+
};
108
+
109
+
let running = if num_running >= 1 {
110
+
Some(format!("{} Deploying", num_running.blue()))
111
+
} else {
112
+
None
113
+
};
114
+
115
+
let _ = match (failed, running) {
116
+
(None, None) => write!(&mut msg, ""),
117
+
(Some(message), None) | (None, Some(message)) => write!(&mut msg, " ({message})"),
118
+
(Some(failed), Some(running)) => write!(&mut msg, " ({failed}, {running})"),
119
+
};
120
+
121
+
let _ = write!(&mut msg, "]");
122
+
123
+
let _ = write!(&mut msg, " {}s", self.began.elapsed().as_secs());
124
+
125
+
msg
126
+
}
127
+
128
+
pub fn clear<T: std::io::Write>(&self, writer: &mut T) {
129
+
if !self.show_progress {
130
+
return;
131
+
}
132
+
133
+
let _ = write!(writer, "{}", cursor::Save);
134
+
// let _ = write!(writer, "{}", cursor::Down(1));
135
+
let _ = write!(writer, "{}", cursor::Left(999));
136
+
let _ = write!(writer, "{}", clear::CurrentLine);
137
+
}
138
+
139
+
/// used when there is an interactive prompt
140
+
pub fn wipe_out<T: std::io::Write>(&self, writer: &mut T) {
141
+
if !self.show_progress {
142
+
return;
143
+
}
144
+
145
+
let _ = write!(writer, "{}", cursor::Save);
146
+
let _ = write!(writer, "{}", cursor::Left(999));
147
+
let _ = write!(writer, "{}", clear::CurrentLine);
148
+
let _ = writer.flush();
149
+
}
150
+
151
+
pub fn write_status<T: std::io::Write>(&mut self, writer: &mut T) {
152
+
if self.show_progress {
153
+
let _ = write!(writer, "{}", self.get_msg());
154
+
}
155
+
}
156
+
157
+
pub fn write_above_status<T: std::io::Write>(
158
+
&mut self,
159
+
buf: &[u8],
160
+
writer: &mut T,
161
+
) -> std::io::Result<usize> {
162
+
if STDIN_CLOBBER_LOCK.available_permits() != 1 {
163
+
// skip
164
+
return Ok(0);
165
+
}
166
+
167
+
self.clear(writer);
168
+
let written = writer.write(buf)?;
169
+
self.write_status(writer);
170
+
171
+
Ok(written)
172
+
}
173
+
}
+43
crates/core/src/test_macros.rs
+43
crates/core/src/test_macros.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
#[macro_export]
5
+
macro_rules! function_name {
6
+
() => {{
7
+
fn f() {}
8
+
fn type_name_of<T>(_: T) -> &'static str {
9
+
std::any::type_name::<T>()
10
+
}
11
+
let name = type_name_of(f);
12
+
// closure for async functions
13
+
&name[..name.len() - 3]
14
+
}};
15
+
}
16
+
17
+
#[macro_export]
18
+
macro_rules! get_test_path {
19
+
() => {{
20
+
let mut path: PathBuf = env::var("WIRE_TEST_DIR").unwrap().into();
21
+
let full_name = $crate::function_name!();
22
+
let function_name = full_name
23
+
.trim_end_matches("::{{closure}}")
24
+
.split("::")
25
+
.last()
26
+
.unwrap();
27
+
path.push(function_name);
28
+
29
+
path
30
+
}};
31
+
}
32
+
33
+
#[macro_export]
34
+
macro_rules! location {
35
+
($path:expr) => {{
36
+
$crate::hive::get_hive_location(
37
+
$path.display().to_string(),
38
+
$crate::SubCommandModifiers::default(),
39
+
)
40
+
.await
41
+
.unwrap()
42
+
}};
43
+
}
+67
crates/core/src/test_support.rs
+67
crates/core/src/test_support.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
use std::{
5
+
fs::{self, create_dir},
6
+
io,
7
+
path::Path,
8
+
process::Command,
9
+
};
10
+
11
+
use tempdir::TempDir;
12
+
13
+
pub fn make_flake_sandbox(path: &Path) -> Result<TempDir, io::Error> {
14
+
let tmp_dir = TempDir::new("wire-test")?;
15
+
16
+
Command::new("git")
17
+
.args(["init", "-b", "tmp"])
18
+
.current_dir(tmp_dir.path())
19
+
.status()?;
20
+
21
+
for entry in fs::read_dir(path)? {
22
+
let entry = entry?;
23
+
24
+
fs::copy(entry.path(), tmp_dir.as_ref().join(entry.file_name()))?;
25
+
}
26
+
27
+
let root = path.parent().unwrap().parent().unwrap().parent().unwrap();
28
+
29
+
create_dir(tmp_dir.as_ref().join("module/"))?;
30
+
31
+
fs::copy(
32
+
root.join(Path::new("runtime/evaluate.nix")),
33
+
tmp_dir.as_ref().join("evaluate.nix"),
34
+
)?;
35
+
fs::copy(
36
+
root.join(Path::new("runtime/module/config.nix")),
37
+
tmp_dir.as_ref().join("module/config.nix"),
38
+
)?;
39
+
fs::copy(
40
+
root.join(Path::new("runtime/module/options.nix")),
41
+
tmp_dir.as_ref().join("module/options.nix"),
42
+
)?;
43
+
fs::copy(
44
+
root.join(Path::new("runtime/module/default.nix")),
45
+
tmp_dir.as_ref().join("module/default.nix"),
46
+
)?;
47
+
fs::copy(
48
+
root.join(Path::new("runtime/makeHive.nix")),
49
+
tmp_dir.as_ref().join("makeHive.nix"),
50
+
)?;
51
+
fs::copy(
52
+
root.join(Path::new("flake.lock")),
53
+
tmp_dir.as_ref().join("flake.lock"),
54
+
)?;
55
+
56
+
Command::new("git")
57
+
.args(["add", "-A"])
58
+
.current_dir(tmp_dir.path())
59
+
.status()?;
60
+
61
+
Command::new("nix")
62
+
.args(["flake", "lock"])
63
+
.current_dir(tmp_dir.path())
64
+
.status()?;
65
+
66
+
Ok(tmp_dir)
67
+
}
+20
crates/key_agent/Cargo.toml
+20
crates/key_agent/Cargo.toml
···
1
+
[package]
2
+
name = "wire-key-agent"
3
+
edition.workspace = true
4
+
version.workspace = true
5
+
6
+
[dependencies]
7
+
tokio = { workspace = true }
8
+
tokio-util = { workspace = true }
9
+
anyhow = { workspace = true }
10
+
prost = { workspace = true }
11
+
nix = { workspace = true }
12
+
futures-util = { workspace = true }
13
+
sha2 = { workspace = true }
14
+
base64 = { workspace = true }
15
+
16
+
[build-dependencies]
17
+
prost-build = "0.14"
18
+
19
+
[lints]
20
+
workspace = true
+8
crates/key_agent/build.rs
+8
crates/key_agent/build.rs
+17
crates/key_agent/default.nix
+17
crates/key_agent/default.nix
+17
crates/key_agent/src/keys.proto
+17
crates/key_agent/src/keys.proto
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
syntax = "proto3";
5
+
6
+
package key_agent.keys;
7
+
8
+
message KeySpec {
9
+
string destination = 1;
10
+
string user = 2;
11
+
string group = 3;
12
+
uint32 permissions = 4;
13
+
uint32 length = 5;
14
+
bool last = 6;
15
+
/// Sha256 digest
16
+
bytes digest = 7;
17
+
}
+6
crates/key_agent/src/lib.rs
+6
crates/key_agent/src/lib.rs
+94
crates/key_agent/src/main.rs
+94
crates/key_agent/src/main.rs
···
1
+
// SPDX-License-Identifier: AGPL-3.0-or-later
2
+
// Copyright 2024-2025 wire Contributors
3
+
4
+
#![deny(clippy::pedantic)]
5
+
use base64::Engine;
6
+
use base64::prelude::BASE64_STANDARD;
7
+
use futures_util::stream::StreamExt;
8
+
use nix::unistd::{Group, User};
9
+
use prost::Message;
10
+
use prost::bytes::Bytes;
11
+
use sha2::{Digest, Sha256};
12
+
use std::os::unix::fs::PermissionsExt;
13
+
use std::os::unix::fs::chown;
14
+
use std::path::{Path, PathBuf};
15
+
use tokio::fs::File;
16
+
use tokio::io::AsyncWriteExt;
17
+
use tokio_util::codec::{FramedRead, LengthDelimitedCodec};
18
+
use wire_key_agent::keys::KeySpec;
19
+
20
+
fn create_path(key_path: &Path) -> Result<(), anyhow::Error> {
21
+
let prefix = key_path.parent().unwrap();
22
+
std::fs::create_dir_all(prefix)?;
23
+
24
+
Ok(())
25
+
}
26
+
27
+
fn pretty_keyspec(spec: &KeySpec) -> String {
28
+
format!(
29
+
"{} {}:{} {}",
30
+
spec.destination, spec.user, spec.group, spec.permissions
31
+
)
32
+
}
33
+
34
+
#[tokio::main]
35
+
async fn main() -> Result<(), anyhow::Error> {
36
+
let stdin = tokio::io::stdin();
37
+
38
+
let mut framed = FramedRead::new(stdin, LengthDelimitedCodec::new());
39
+
40
+
while let Some(spec_bytes) = framed.next().await {
41
+
let spec_bytes = Bytes::from(BASE64_STANDARD.decode(spec_bytes?)?);
42
+
let spec = KeySpec::decode(spec_bytes)?;
43
+
44
+
let key_bytes = BASE64_STANDARD.decode(
45
+
framed
46
+
.next()
47
+
.await
48
+
.expect("expected key_bytes to come after spec_bytes")?,
49
+
)?;
50
+
51
+
let digest = Sha256::digest(&key_bytes).to_vec();
52
+
53
+
println!(
54
+
"Writing {}, {:?} bytes of data",
55
+
pretty_keyspec(&spec),
56
+
key_bytes.len()
57
+
);
58
+
59
+
if digest != spec.digest {
60
+
return Err(anyhow::anyhow!(
61
+
"digest of {spec:?} did not match {digest:?}! Please create an issue!"
62
+
));
63
+
}
64
+
65
+
let path = PathBuf::from(&spec.destination);
66
+
create_path(&path)?;
67
+
68
+
let mut file = File::create(path).await?;
69
+
let mut permissions = file.metadata().await?.permissions();
70
+
71
+
permissions.set_mode(spec.permissions);
72
+
file.set_permissions(permissions).await?;
73
+
74
+
let user = User::from_name(&spec.user)?;
75
+
let group = Group::from_name(&spec.group)?;
76
+
77
+
chown(
78
+
spec.destination,
79
+
// Default uid/gid to 0. This is then wrapped around an Option again for
80
+
// the function.
81
+
Some(user.map_or(0, |user| user.uid.into())),
82
+
Some(group.map_or(0, |group| group.gid.into())),
83
+
)?;
84
+
85
+
file.write_all(&key_bytes).await?;
86
+
87
+
// last key, goobye
88
+
if spec.last {
89
+
break;
90
+
}
91
+
}
92
+
93
+
Ok(())
94
+
}
+13
-12
default.nix
+13
-12
default.nix
···
1
-
(import (
2
-
let
3
-
lock = builtins.fromJSON (builtins.readFile ./flake.lock);
4
-
nodeName = lock.nodes.root.inputs.flake-compat;
5
-
in
6
-
fetchTarball {
7
-
url =
8
-
lock.nodes.${nodeName}.locked.url
9
-
or "https://github.com/edolstra/flake-compat/archive/${lock.nodes.${nodeName}.locked.rev}.tar.gz";
10
-
sha256 = lock.nodes.${nodeName}.locked.narHash;
11
-
}
12
-
) { src = ./.; }).defaultNix
1
+
let
2
+
lockFile = builtins.fromJSON (builtins.readFile ./flake.lock);
3
+
flake-compat-node = lockFile.nodes.${lockFile.nodes.root.inputs.flake-compat};
4
+
flake-compat = builtins.fetchTarball {
5
+
inherit (flake-compat-node.locked) url;
6
+
sha256 = flake-compat-node.locked.narHash;
7
+
};
8
+
9
+
flake = import flake-compat {
10
+
src = ./.;
11
+
};
12
+
in
13
+
flake.defaultNix
+164
-88
doc/.vitepress/config.ts
+164
-88
doc/.vitepress/config.ts
···
1
1
import { defineConfig } from "vitepress";
2
2
import pkg from "../package.json";
3
3
import markdownItFootnote from "markdown-it-footnote";
4
-
import { withMermaid } from "vitepress-plugin-mermaid";
5
4
import {
6
5
groupIconMdPlugin,
7
6
groupIconVitePlugin,
8
7
localIconLoader,
9
8
} from "vitepress-plugin-group-icons";
10
9
10
+
const MODE = (process.env.MODE ?? "unstable") as "unstable" | "stable";
11
+
11
12
// https://vitepress.dev/reference/site-config
12
-
export default withMermaid(
13
-
defineConfig({
14
-
title: "wire",
15
-
description: "a tool to deploy nixos systems",
16
-
themeConfig: {
17
-
search: {
18
-
provider: "local",
19
-
},
13
+
export default defineConfig({
14
+
title: MODE === "stable" ? "wire" : "wire (unstable!)",
15
+
description: "a tool to deploy nixos systems",
16
+
themeConfig: {
17
+
search: {
18
+
provider: "local",
19
+
},
20
20
21
-
// https://vitepress.dev/reference/default-theme-config
22
-
nav: [
23
-
{ text: "Home", link: "/" },
24
-
{ text: "Guide", link: "/guide/wire" },
25
-
{ text: "Reference", link: "/reference/cli" },
26
-
{
27
-
text: pkg.version,
28
-
items: [
29
-
{
30
-
text: "Changelog",
31
-
link: "https://github.com/wires-org/wire/blob/main/CHANGELOG.md",
32
-
},
33
-
],
34
-
},
35
-
],
21
+
footer: {
22
+
message:
23
+
'Released under the <a href="https://github.com/forallsys/wire/blob/trunk/COPYING">AGPL-3.0 License</a>.',
24
+
copyright: "Copyright 2024-2025 wire Contributors",
25
+
},
36
26
37
-
sidebar: {
38
-
"/guide/": [
39
-
{
40
-
text: "Introduction",
41
-
items: [
42
-
{ text: "What is Wire?", link: "/guide/wire" },
43
-
{ text: "Getting Started", link: "/guide/getting-started" },
44
-
{ text: "Flakes", link: "/guide/flakes" },
45
-
{ text: "Applying Your Config", link: "/guide/apply" },
46
-
{ text: "Targeting Nodes", link: "/guide/targeting" },
47
-
],
48
-
},
49
-
{
50
-
text: "Features",
51
-
items: [
52
-
{ text: "Secret management", link: "/guide/keys" },
53
-
{ text: "Parallelism", link: "/guide/parallelism" },
54
-
{ text: "hive.default", link: "/guide/hive-default" },
55
-
{ text: "Magic Rollback", link: "/guide/magic-rollback" },
56
-
],
57
-
},
27
+
// https://vitepress.dev/reference/default-theme-config
28
+
nav: [
29
+
{ text: "Home", link: "/" },
30
+
{ text: "Tutorial", link: "/tutorial/overview" },
31
+
{ text: "Guides", link: "/guides/installation" },
32
+
{ text: "Reference", link: "/reference/cli" },
33
+
{
34
+
text: MODE === "stable" ? pkg.version : `Unstable (${pkg.version})`,
35
+
items: [
36
+
MODE === "unstable"
37
+
? {
38
+
text: `View Stable`,
39
+
link: "https://wire.althaea.zone",
40
+
}
41
+
: {
42
+
text: "View Unstable",
43
+
link: "https://trunk.wire-docs.pages.dev",
44
+
},
58
45
{
59
-
text: "Use cases",
60
-
items: [{ text: "Tailscale", link: "/guide/tailscale" }],
46
+
text: "Changelog",
47
+
link: "https://github.com/forallsys/wire/blob/trunk/CHANGELOG.md",
61
48
},
62
-
],
63
-
"/reference/": [
64
49
{
65
-
text: "Reference",
66
-
items: [
67
-
{ text: "CLI", link: "/reference/cli" },
68
-
{ text: "Meta Options", link: "/reference/meta" },
69
-
{ text: "Module Options", link: "/reference/module" },
70
-
{ text: "Error Codes", link: "/reference/errors" },
71
-
],
50
+
text: "CI Server",
51
+
link: "https://buildbot.althaea.zone/#/projects/1",
72
52
},
73
53
],
74
54
},
55
+
],
75
56
76
-
editLink: {
77
-
pattern: "https://github.com/wires-org/wire/edit/main/doc/:path",
78
-
text: "Edit this page on GitHub",
79
-
},
80
-
81
-
socialLinks: [
82
-
{ icon: "github", link: "https://github.com/wires-org/wire" },
57
+
sidebar: {
58
+
"/": [
59
+
{
60
+
text: "Tutorial",
61
+
collapsed: false,
62
+
items: [
63
+
{ text: "Overview", link: "/tutorial/overview" },
64
+
{
65
+
text: "Part One",
66
+
items: [
67
+
{
68
+
text: "Nix Setup",
69
+
link: "/tutorial/part-one/nix-setup",
70
+
},
71
+
{
72
+
text: "Preparing Repo & Shell",
73
+
link: "/tutorial/part-one/repo-setup",
74
+
},
75
+
{
76
+
text: "Creating a Virtual Machine",
77
+
link: "/tutorial/part-one/vm-setup",
78
+
},
79
+
{
80
+
text: "Basic Hive & Deployment",
81
+
link: "/tutorial/part-one/basic-hive",
82
+
},
83
+
],
84
+
},
85
+
{
86
+
text: "Part Two",
87
+
items: [
88
+
{
89
+
text: "Basic Deployment Keys",
90
+
link: "/tutorial/part-two/basic-keys",
91
+
},
92
+
{
93
+
text: "Encrypted Deployment Keys",
94
+
link: "/tutorial/part-two/encryption",
95
+
},
96
+
],
97
+
},
98
+
],
99
+
},
100
+
{
101
+
text: "How-To Guides",
102
+
collapsed: false,
103
+
items: [
104
+
{ text: "Install wire", link: "/guides/installation" },
105
+
{
106
+
text: "Write a Hive",
107
+
link: "/guides/writing-a-hive",
108
+
},
109
+
{ text: "Migrate to wire", link: "/guides/migrate" },
110
+
{
111
+
text: "Use Flakes",
112
+
link: "/guides/flakes/overview",
113
+
items: [
114
+
{
115
+
text: "Keep Using nixos-rebuild",
116
+
link: "/guides/flakes/nixos-rebuild",
117
+
},
118
+
],
119
+
},
120
+
{ text: "Apply your Config", link: "/guides/apply" },
121
+
{ text: "Target Nodes", link: "/guides/targeting" },
122
+
{ text: "Build in CI", link: "/guides/build-in-ci" },
123
+
{
124
+
text: "Features",
125
+
items: [
126
+
{
127
+
text: "Use a non-root user",
128
+
link: "/guides/non-root-user",
129
+
},
130
+
{ text: "Manage Secrets", link: "/guides/keys" },
131
+
{ text: "Use Parallelism", link: "/guides/parallelism" },
132
+
{ text: "Use hive.default", link: "/guides/hive-default" },
133
+
],
134
+
},
135
+
],
136
+
},
137
+
{ text: "CLI & Module Reference", link: "/reference/cli.html" },
138
+
],
139
+
"/reference/": [
140
+
{
141
+
text: "Reference",
142
+
items: [
143
+
{ text: "CLI", link: "/reference/cli" },
144
+
{ text: "Meta Options", link: "/reference/meta" },
145
+
{ text: "Module Options", link: "/reference/module" },
146
+
{ text: "Error Codes", link: "/reference/errors" },
147
+
],
148
+
},
83
149
],
84
150
},
85
-
markdown: {
86
-
config: (md) => {
87
-
md.use(markdownItFootnote);
88
-
md.use(groupIconMdPlugin);
89
-
},
151
+
152
+
editLink: {
153
+
pattern: "https://github.com/forallsys/wire/edit/trunk/doc/:path",
154
+
text: "Edit this page on GitHub",
90
155
},
91
-
vite: {
92
-
// https://github.com/mermaid-js/mermaid/issues/4320#issuecomment-1653050539
93
-
optimizeDeps: {
94
-
include: ["mermaid"],
95
-
},
96
-
plugins: [
97
-
groupIconVitePlugin({
98
-
customIcon: {
99
-
nixos: "vscode-icons:file-type-nix",
100
-
"configuration.nix": "vscode-icons:file-type-nix",
101
-
"hive.nix": "vscode-icons:file-type-nix",
102
-
"flake.nix": "vscode-icons:file-type-nix",
103
-
"module.nix": "vscode-icons:file-type-nix",
104
-
home: localIconLoader(import.meta.url, "../assets/homemanager.svg"),
105
-
".conf": "vscode-icons:file-type-config",
106
-
},
107
-
}),
108
-
],
156
+
157
+
socialLinks: [
158
+
{ icon: "github", link: "https://github.com/forallsys/wire" },
159
+
],
160
+
},
161
+
markdown: {
162
+
config: (md) => {
163
+
md.use(markdownItFootnote);
164
+
md.use(groupIconMdPlugin, {
165
+
titleBar: { includeSnippet: true },
166
+
});
109
167
},
110
-
}),
111
-
);
168
+
},
169
+
vite: {
170
+
plugins: [
171
+
groupIconVitePlugin({
172
+
customIcon: {
173
+
nixos: "vscode-icons:file-type-nix",
174
+
"configuration.nix": "vscode-icons:file-type-nix",
175
+
"hive.nix": "vscode-icons:file-type-nix",
176
+
"flake.nix": "vscode-icons:file-type-nix",
177
+
"module.nix": "vscode-icons:file-type-nix",
178
+
"vm.nix": "vscode-icons:file-type-nix",
179
+
"shell.nix": "vscode-icons:file-type-nix",
180
+
"secrets.nix": "vscode-icons:file-type-nix",
181
+
home: localIconLoader(import.meta.url, "../assets/homemanager.svg"),
182
+
".conf": "vscode-icons:file-type-config",
183
+
},
184
+
}),
185
+
],
186
+
},
187
+
});
+2
-34
doc/.vitepress/theme/index.ts
+2
-34
doc/.vitepress/theme/index.ts
···
1
1
import DefaultTheme from "vitepress/theme";
2
2
import "virtual:group-icons.css";
3
-
import giscusTalk from "vitepress-plugin-comment-with-giscus";
4
-
import { EnhanceAppContext, useData, useRoute } from "vitepress";
5
-
import { toRefs } from "vue";
3
+
import { EnhanceAppContext } from "vitepress";
4
+
import "./style.css";
6
5
7
6
export default {
8
7
...DefaultTheme,
9
8
enhanceApp(ctx: EnhanceAppContext) {
10
9
DefaultTheme.enhanceApp(ctx);
11
-
},
12
-
setup() {
13
-
const { frontmatter } = toRefs(useData());
14
-
const route = useRoute();
15
-
16
-
giscusTalk(
17
-
{
18
-
repo: "wires-org/wire",
19
-
repoId: "R_kgDOMQQbzw",
20
-
category: "giscus", // default: `General`
21
-
categoryId: "DIC_kwDOMQQbz84Co4vv",
22
-
mapping: "pathname",
23
-
inputPosition: "top",
24
-
lang: "en",
25
-
// i18n setting (Note: This configuration will override the default language set by lang)
26
-
// Configured as an object with key-value pairs inside:
27
-
// [your i18n configuration name]: [corresponds to the language pack name in Giscus]
28
-
locales: {
29
-
"en-US": "en",
30
-
},
31
-
homePageShowComment: false,
32
-
lightTheme: "light",
33
-
darkTheme: "transparent_dark",
34
-
},
35
-
{
36
-
frontmatter,
37
-
route,
38
-
},
39
-
// Default to false for all pages
40
-
false,
41
-
);
42
10
},
43
11
};
+1
-1
doc/README.md
+1
-1
doc/README.md
+9
-2
doc/default.nix
+9
-2
doc/default.nix
···
6
6
...
7
7
}:
8
8
{
9
-
packages.docs = pkgs.callPackage ./package.nix {
10
-
inherit (self'.packages) wire-small wire-dignostics-md;
9
+
packages = {
10
+
docs = pkgs.callPackage ./package.nix {
11
+
mode = "stable";
12
+
inherit (self'.packages) wire-small-dev wire-diagnostics-md;
13
+
};
14
+
15
+
docs-unstable = pkgs.callPackage ./package.nix {
16
+
inherit (self'.packages) wire-small-dev wire-diagnostics-md;
17
+
};
11
18
};
12
19
};
13
20
}
-116
doc/guide/apply.md
-116
doc/guide/apply.md
···
1
-
---
2
-
comment: true
3
-
title: Applying Your Config
4
-
description: How to apply a node with wire.
5
-
---
6
-
7
-
# {{ $frontmatter.title }}
8
-
9
-
{{ $frontmatter.description }}
10
-
11
-
## What does it mean to 'apply'?
12
-
13
-
Once you have [created a hive](./getting-started), you can now "apply" your
14
-
configuration to nodes in your hive. Simply, "applying" is the term used by wire to describe **deploying the
15
-
config**.
16
-
17
-
::: info
18
-
Applying a node typically involves pushing keys,
19
-
evaluating the node's NixOS system, building the node's NixOS system, and running
20
-
`switch-to-configuration`, depending on which specific goal is used.
21
-
:::
22
-
23
-
The simplest way to apply is simply running:
24
-
25
-
```sh
26
-
wire apply switch
27
-
```
28
-
29
-
Which will `switch` to each node's NixOS system in your hive and push
30
-
secrets (the equivalent to `nixos-rebuild`'s `nixos-rebuild switch`).
31
-
32
-
::: details Apply Goal Flowchart
33
-
The following is an illustrative flowchart of how each step in the apply execution is ran.
34
-
35
-
Depending on the specific goal certain steps will not run, for example the
36
-
Switch to Configuration step will never run if the goal is `build`.
37
-
38
-
```mermaid
39
-
flowchart TD
40
-
A(Test Connection) --> |IP / Hostname| B(Push Keys)
41
-
42
-
C(Evaluate NixOS System)
43
-
44
-
B --> C
45
-
C -->|.drv Path| local
46
-
C -->|.drv Path| remote
47
-
48
-
subgraph remote[Remote Node]
49
-
D(Push To Node)
50
-
D --> E(Build NixOS System)
51
-
E -->|Built System| H(Push To Node)
52
-
end
53
-
54
-
subgraph local[Local Node]
55
-
direction RL
56
-
G(Build NixOS System Locally)
57
-
end
58
-
59
-
G --> F(Switch To Configuration)
60
-
H --> F
61
-
```
62
-
63
-
:::
64
-
65
-
## Apply goals
66
-
67
-
`wire apply` accepts a goal, which include verbs which will be familiar to
68
-
`nixos-rebuild` users such as `switch`, `boot`, and `test`, alongside additional verbs
69
-
like `keys` and `push`.
70
-
71
-
### `wire apply keys`
72
-
73
-
Wire will push all deployment keys to nodes, and do nothing else. While running
74
-
this goal, option
75
-
[`deployment.keys.<name>.uploadAt`](/reference/module#deployment-keys-name-uploadat)
76
-
has no effect and all keys will be pushed. Read [the secret management guide](./keys)
77
-
to learn more about wire deployment keys.
78
-
79
-
### `wire apply push`
80
-
81
-
Wire will "push" (equivalent to [`nix
82
-
copy`](https://nix.dev/manual/nix/2.18/command-ref/new-cli/nix3-copy)) the
83
-
`.drv` file that can produce the node's NixOS system when built.
84
-
85
-
### `wire apply build`
86
-
87
-
Sister to `wire apply push`, wire will build the
88
-
node's NixOS system and ensure the output path exists on the node. Depending on
89
-
[`deployment.buildOnTarget`](/reference/module#deployment-buildontarget), the
90
-
`.drv` file may be built on the machine invoking wire or the node itself.
91
-
92
-
### `wire apply [switch|boot|test|dry-activate]`
93
-
94
-
Type `wire apply --help` or
95
-
[read the reference](../reference/cli#wire-apply) to read more.
96
-
97
-
## Applying locally
98
-
99
-
If `deployment.allowLocalDeployment` is `true`, and the machine invoking wire's
100
-
host name is equivalent to a node's name, wire will apply that node to the local
101
-
machine. Goals like `push` and `build`, wont actually "push" anything as
102
-
the paths already exists on the local machine.
103
-
104
-
When applying to your local machine, wire can interactively run `sudo`!
105
-
Wire will prompt for your password, meaning wire can be ran as any user in
106
-
the `wheel` group.
107
-
108
-
## Applying specific nodes
109
-
110
-
Use the `--on` argument to specify which nodes in your hive to apply:
111
-
112
-
```sh
113
-
wire apply --on node-a
114
-
```
115
-
116
-
Further examples, including how you can utilise tags, can be found on the [Targeting Nodes](./targeting) page.
-75
doc/guide/flakes.md
-75
doc/guide/flakes.md
···
1
-
---
2
-
comment: true
3
-
title: Flakes
4
-
description: Learn how to output a hive from a flake.
5
-
---
6
-
7
-
# {{ $frontmatter.title }}
8
-
9
-
{{ $frontmatter.description }}
10
-
11
-
## Output a hive
12
-
13
-
::: tip
14
-
If you have skipped ahead, please read the previous page to understand the
15
-
concept of a hive.
16
-
:::
17
-
18
-
You can use wire with a flake by outputting a hive with the `wire` flake output.
19
-
Just like when using a `hive.nix`, you must provide `meta.nixpkgs` which will
20
-
come from an input.
21
-
22
-
::: code-group
23
-
<<< @/snippets/getting-started/flake.nix [flake.nix]
24
-
:::
25
-
26
-
```
27
-
โฏ nix flake show
28
-
git+file:///some/path
29
-
โโโโcolmena: unknown
30
-
```
31
-
32
-
## How to keep using `nixos-rebuild`
33
-
34
-
You can provide `makeHive` with your `nixosConfigurations` with the `inherit`
35
-
nix keyword. `makeHive` will merge any nodes and nixosConfigurations that share
36
-
the same name together.
37
-
38
-
::: tip
39
-
It should be noted that there are a few downsides. For example, you cannot access `config.deployment` from `nixosConfigurations`. For this reason it would be best practice to limit configuration in `colmena` to simply defining keys and deployment options.
40
-
:::
41
-
42
-
::: code-group
43
-
<<< @/snippets/getting-started/flake-merged.nix [flake.nix]
44
-
:::
45
-
46
-
Now, if we run `wire show`, you will see that wire only finds
47
-
the `nixosConfigurations`-es that also match a node in the hive.
48
-
49
-
```
50
-
โฏ nix run ~/Projects/wire#wire-small -- show
51
-
Hive {
52
-
nodes: {
53
-
Name(
54
-
"node-a",
55
-
): Node {
56
-
target: Target {
57
-
hosts: [
58
-
"node-a",
59
-
],
60
-
user: "root",
61
-
port: 22,
62
-
current_host: 0,
63
-
},
64
-
build_remotely: false,
65
-
allow_local_deployment: true,
66
-
tags: {},
67
-
keys: [],
68
-
host_platform: "x86_64-linux",
69
-
},
70
-
},
71
-
schema: 0,
72
-
}
73
-
```
74
-
75
-
This way, you can continue using `nixos-rebuild` and wire at the same time.
-112
doc/guide/getting-started.md
-112
doc/guide/getting-started.md
···
1
-
---
2
-
comment: true
3
-
title: Getting Started
4
-
description: Getting started with Wire Tool!
5
-
---
6
-
7
-
# {{ $frontmatter.title }}
8
-
9
-
## Installation
10
-
11
-
Wire can be heavy to compile. You should enable the substituter `wires.cachix.org`.
12
-
13
-
::: code-group
14
-
15
-
<<< @/snippets/getting-started/cache.nix [module.nix]
16
-
<<< @/snippets/getting-started/nix.conf
17
-
18
-
:::
19
-
20
-
### Supported Nix & NixOS versions
21
-
22
-
Wire is currently _tested_ against `unstable`, `24.11` and `25.05`.
23
-
For each channel, it is tested against the given channel's `pkgs.lix`.
24
-
25
-
There is currently a bug when our VM tests are ran with nixcpp. Nixcpp will try
26
-
to download a file in a network sandbox, whereas Lix will not. We don't know
27
-
how to solve it. Please see [#126](https://github.com/wires-org/wire/issues/126)
28
-
29
-
### NixOS / Home Manager
30
-
31
-
::: code-group
32
-
33
-
<<< @/snippets/getting-started/nixos.flake.nix [flake.nix (NixOS)]
34
-
<<< @/snippets/getting-started/hm.flake.nix [flake.nix (Home Manager)]
35
-
<<< @/snippets/getting-started/configuration.nix
36
-
<<< @/snippets/getting-started/home.nix
37
-
38
-
:::
39
-
40
-
## Your First Hive
41
-
42
-
Wire groups your machines into _nodes_, which are NixOS configurations with
43
-
additional information for deployment. Start by creating a `hive.nix` in the same directory as your
44
-
`configuration.nix`.
45
-
46
-
::: info
47
-
48
-
To include wire in these examples, we are using
49
-
[npins](https://github.com/andir/npins). To create this setup you
50
-
would run `npins add github wires-org wire`.
51
-
52
-
:::
53
-
54
-
A `hive.nix` is an attribute set with NixOS configurations, each with a unique
55
-
name. Add a node for your local machine:
56
-
57
-
```nix:line-numbers [hive.nix]
58
-
let
59
-
sources = import ./npins;
60
-
wire = import sources.wire;
61
-
in wire.makeHive {
62
-
meta.nixpkgs = import sources.nixpkgs { };
63
-
64
-
my-local-machine = {
65
-
imports = [./configuration.nix];
66
-
67
-
# If you don't know, find this value by running
68
-
# `nix eval --expr 'builtins.currentSystem' --impure`
69
-
nixpkgs.hostPlatform = "x86_64-linux";
70
-
};
71
-
}
72
-
```
73
-
74
-
### A Remote Machine
75
-
76
-
Lets add another node to your hive! This one is an example of a remote machine.
77
-
78
-
```nix:line-numbers [hive.nix]
79
-
let
80
-
sources = import ./npins;
81
-
wire = import sources.wire;
82
-
in wire.makeHive {
83
-
meta.nixpkgs = import sources.nixpkgs { };
84
-
85
-
my-local-machine = {
86
-
imports = [./local-machine/configuration.nix];
87
-
nixpkgs.hostPlatform = "x86_64-linux";
88
-
};
89
-
90
-
my-remote-machine = {
91
-
deployment = {
92
-
# buildOnTarget defaults to `false`, enable this
93
-
# if the machine is strong enough to build itself.
94
-
buildOnTarget = true;
95
-
target = {
96
-
# Some IP or host that this node is reachable by ssh under,
97
-
# defaults to "my-remote-machine" (node name).
98
-
host = "10.1.1.2";
99
-
# A user you can non-interactively login through ssh by,
100
-
# defaults to "root".
101
-
user = "root";
102
-
};
103
-
};
104
-
imports = [./remote-machine/configuration.nix];
105
-
nixpkgs.hostPlatform = "x86_64-linux";
106
-
};
107
-
}
108
-
```
109
-
110
-
> [!TIP]
111
-
> Read more options in [the reference](/reference/module#deployment-target) to adjust options such as
112
-
> ssh port.
-56
doc/guide/hive-default.md
-56
doc/guide/hive-default.md
···
1
-
---
2
-
comment: true
3
-
title: hive.default
4
-
description: Deduplicate options with default node configuration.
5
-
---
6
-
7
-
# `{{ $frontmatter.title }}`
8
-
9
-
{{ $frontmatter.description }}
10
-
11
-
## Introduction
12
-
13
-
At the top level of a hive wire reserves the `defaults` attribute. It's applied
14
-
to every node.
15
-
16
-
::: warning
17
-
18
-
`defaults` must not rely on modules that a node imports, but a
19
-
node may rely on modules that default imports.
20
-
21
-
:::
22
-
23
-
```nix:line-numbers [hive.nix]
24
-
let
25
-
sources = import ./npins;
26
-
wire = import sources.wire;
27
-
in wire.makeHive {
28
-
meta.nixpkgs = import sources.nixpkgs { };
29
-
30
-
defaults = {
31
-
# name of the node that defaults is being applied to
32
-
name,
33
-
# attribute set of all nodes
34
-
nodes,
35
-
...
36
-
}: {
37
-
import = [
38
-
./default-module.nix
39
-
40
-
# module that is imported for all nodes
41
-
some-flake.nixosModules.default
42
-
];
43
-
44
-
# default configuration
45
-
# may or may not utilise `name` or `nodes`
46
-
};
47
-
48
-
node-a = {
49
-
# some config
50
-
};
51
-
52
-
node-b = {
53
-
# some more config
54
-
};
55
-
}
56
-
```
-225
doc/guide/keys.md
-225
doc/guide/keys.md
···
1
-
---
2
-
comment: true
3
-
title: Secret Management
4
-
description: Keys, files, and other out-of-store paths with Wire Tool.
5
-
---
6
-
7
-
# {{ $frontmatter.title }}
8
-
9
-
{{ $frontmatter.description }}
10
-
11
-
## Introduction
12
-
13
-
Wire Tool is very unopinionated as to how you encrypt your secrets, Wire only
14
-
handles pushing and setting up permissions of your key files.
15
-
16
-
The `source` of your key can be a literal string (unencrypted), a path
17
-
(unencrypted), or a command that wire runs to evaluate the key. Programs that
18
-
work well with wire keys include:
19
-
20
-
- GPG
21
-
- [Age](https://github.com/FiloSottile/age)
22
-
- Anything that non-interactively decrypts to `stdout`.
23
-
24
-
### A Trivial "Key"
25
-
26
-
```nix:line-numbers [hive.nix]
27
-
let
28
-
sources = import ./npins;
29
-
wire = import sources.wire;
30
-
in wire.makeHive {
31
-
meta.nixpkgs = import sources.nixpkgs { };
32
-
33
-
node-1 = {
34
-
deployment.key."file.txt" = {
35
-
source = ''
36
-
Hello World!
37
-
'';
38
-
};
39
-
};
40
-
}
41
-
```
42
-
43
-
```sh
44
-
[user@node-1]$ cat /run/keys/file.txt
45
-
Hello World!
46
-
```
47
-
48
-
### Encrypting with GPG
49
-
50
-
```nix:line-numbers [hive.nix]
51
-
let
52
-
sources = import ./npins;
53
-
wire = import sources.wire;
54
-
in wire.makeHive {
55
-
meta.nixpkgs = import sources.nixpkgs { };
56
-
57
-
node-1 = {
58
-
deployment.key."file.txt" = {
59
-
source = [
60
-
"gpg"
61
-
"--decrypt"
62
-
"${./secrets/file.txt.gpg}"
63
-
];
64
-
};
65
-
};
66
-
}
67
-
```
68
-
69
-
```sh
70
-
[user@node-1]$ cat /run/keys/file.txt
71
-
Hello World!
72
-
```
73
-
74
-
### Encrypting with KeepassXC
75
-
76
-
A simple example of extracting a KeepassXC attachment into a wire key.
77
-
You must pass the password through stdin as the command must be non-interactive.
78
-
Note that the `--stdout` is important as wire expects the command to output the key to stdout.
79
-
80
-
```nix:line-numbers [hive.nix]
81
-
let
82
-
sources = import ./npins;
83
-
wire = import sources.wire;
84
-
in wire.makeHive {
85
-
meta.nixpkgs = import sources.nixpkgs { };
86
-
87
-
node-1 = {
88
-
deployment.key."file.txt" = {
89
-
source = [
90
-
"bash"
91
-
"-c"
92
-
''cat ~/pass | keepassxc-cli attachment-export --stdout ~/.local/share/keepass/database.kdbx test 'file.txt'''
93
-
];
94
-
};
95
-
};
96
-
}
97
-
```
98
-
99
-
```sh
100
-
[user@node-1]$ cat /run/keys/file.txt
101
-
Hello World!
102
-
```
103
-
104
-
### A Plain Text File
105
-
106
-
```nix:line-numbers [hive.nix]
107
-
let
108
-
sources = import ./npins;
109
-
wire = import sources.wire;
110
-
in wire.makeHive {
111
-
meta.nixpkgs = import sources.nixpkgs { };
112
-
113
-
node-1 = {
114
-
deployment.key."file.txt" = {
115
-
# using this syntax will enter the file into the store, readable by
116
-
# anyone!
117
-
source = ./file.txt;
118
-
};
119
-
};
120
-
}
121
-
```
122
-
123
-
## Persistence
124
-
125
-
Wire defaults `destDir` to `/run/keys`. `/run/` is held in memory and will not
126
-
persist past reboot. Change
127
-
[`deployment.key.<name>.destDir`](/reference/module#deployment-keys-name-destdir)
128
-
to something like `/etc/keys` if you need secrets every time the machine boots.
129
-
130
-
## Upload Order
131
-
132
-
By default Wire will upload keys before the system is activated. You can
133
-
force Wire to upload the key after the system is activated by setting
134
-
[`deployment.keys.<name>.uploadAt`](/reference/module#deployment-keys-name-uploadat)
135
-
to `post-activation`.
136
-
137
-
## Permissions and Ownership
138
-
139
-
Wire secrets are owned by user & group `root` (`0600`). You can change these
140
-
with the `user` and `group` option.
141
-
142
-
```nix:line-numbers [hive.nix]
143
-
let
144
-
sources = import ./npins;
145
-
wire = import sources.wire;
146
-
in wire.makeHive {
147
-
meta.nixpkgs = import sources.nixpkgs { };
148
-
149
-
node-1 = {
150
-
deployment.key."file.txt" = {
151
-
source = [
152
-
"gpg"
153
-
"--decrypt"
154
-
"${./secrets/file.txt.gpg}"
155
-
];
156
-
157
-
user = "my-user";
158
-
group = "my-group";
159
-
};
160
-
};
161
-
}
162
-
```
163
-
164
-
## Further Examples
165
-
166
-
### Using Keys With Services
167
-
168
-
You can access the full absolute path of any key with
169
-
`config.deployment.keys.<name>.path` (auto-generated and read-only).
170
-
Here's an example with the Tailscale service:
171
-
172
-
```nix:line-numbers [hive.nix]
173
-
let
174
-
sources = import ./npins;
175
-
wire = import sources.wire;
176
-
in wire.makeHive {
177
-
meta.nixpkgs = import sources.nixpkgs { };
178
-
179
-
node-1 = {config, ...}: {
180
-
services.tailscale = {
181
-
enable = true;
182
-
# use deployment key path directly
183
-
authKeyFile = config.deployment.keys."tailscale.key".path;
184
-
};
185
-
186
-
deployment.keys."tailscale.key" = {
187
-
keyCommand = ["gpg" "--decrypt" "${./secrets/tailscale.key.gpg}"];
188
-
};
189
-
};
190
-
}
191
-
```
192
-
193
-
### Scoping a Key to a service account
194
-
195
-
Additionally you can scope the key to the user that the service runs under, to
196
-
further reduce duplication using the `config` argument. Here's an example of
197
-
providing a certificate that is only readable by the caddy service.
198
-
199
-
```nix:line-numbers [hive.nix]
200
-
let
201
-
sources = import ./npins;
202
-
wire = import sources.wire;
203
-
in wire.makeHive {
204
-
meta.nixpkgs = import sources.nixpkgs { };
205
-
206
-
some-web-server = {config, ...}: {
207
-
deployment.keys."some.host.pem" = {
208
-
keyCommand = ["gpg" "--decrypt" "${./some.host.pem.gpg}"];
209
-
destDir = "/etc/keys";
210
-
211
-
# inherit the user and group that caddy runs under
212
-
# the key will only readable by the caddy service
213
-
inherit (config.services.caddy) user group;
214
-
};
215
-
216
-
# ^^ repeat for `some.host.key`
217
-
218
-
services.caddy = {
219
-
virtualHosts."https://some.host".extraConfig = ''
220
-
tls ${config.deployment.keys."some.host.pem".path} ${config.deployment.keys."some.host.key".path}
221
-
'';
222
-
};
223
-
};
224
-
}
225
-
```
-32
doc/guide/parallelism.md
-32
doc/guide/parallelism.md
···
1
-
---
2
-
comment: true
3
-
title: Parallelism
4
-
description: A deeper dive into parallelism with Wire Tool.
5
-
---
6
-
7
-
# {{ $frontmatter.title }}
8
-
9
-
{{ $frontmatter.description }}
10
-
11
-
## Controlling CPU Usage
12
-
13
-
Wire evaluates, builds, pushes, and deploys each node completely independently
14
-
from each other. Internally Wire calls this process a "node execution".
15
-
16
-
The default number of parallel _node executions_ is `10`, which can be
17
-
controlled with the `-p` / `--parallel` argument.
18
-
19
-
```sh
20
-
wire apply -p <NUMBER>
21
-
```
22
-
23
-
## Interaction with Nix's `max-jobs`
24
-
25
-
Nix has an overall derivation build limit and core limit.
26
-
If executing a node fills Nix's `max-jobs` all other nodes will bottleneck. You
27
-
should read [the relevant
28
-
documentation](https://nix.dev/manual/nix/2.28/advanced-topics/cores-vs-jobs) to fine tune these settings.
29
-
30
-
When a Node is built remotely due to
31
-
[`deployment.buildOnTarget`](/reference/module.html#deployment-buildontarget)
32
-
that node will not push up the _local machine's_ max-jobs limit.
-1
doc/guide/tailscale.md
-1
doc/guide/tailscale.md
···
1
-
# Wire & Tailscale
-89
doc/guide/targeting.md
-89
doc/guide/targeting.md
···
1
-
---
2
-
comment: true
3
-
title: Targeting Nodes
4
-
description: Tags, nodes, and how to target them with Wire Tool.
5
-
---
6
-
7
-
# {{ $frontmatter.title }}
8
-
9
-
{{ $frontmatter.description }}
10
-
11
-
## Targeting Specific Nodes
12
-
13
-
`wire apply --on` without an `@` prefix interprets as a literal node name. For
14
-
example:
15
-
16
-
```sh
17
-
wire apply switch --on node-a,node-b
18
-
```
19
-
20
-
Will switch-to-configuration on node a, and node b.
21
-
22
-
## Tag Basics
23
-
24
-
Nodes can have _tags_, which allows you to easily target multiple, related
25
-
nodes for deployment.
26
-
27
-
```nix:line-numbers{9,13,17,21} [hive.nix]
28
-
let
29
-
sources = import ./npins;
30
-
wire = import sources.wire;
31
-
in wire.makeHive {
32
-
meta.nixpkgs = import sources.nixpkgs { };
33
-
34
-
node-1 = {
35
-
# ...
36
-
deployment.tags = ["cloud"];
37
-
};
38
-
node-2 = {
39
-
# ...
40
-
deployment.tags = ["cloud", "virtual"];
41
-
};
42
-
node-3 = {
43
-
# ...
44
-
deployment.tags = ["on-prem"];
45
-
};
46
-
node-4 = {
47
-
# ...
48
-
deployment.tags = ["virtual"];
49
-
};
50
-
node-5 = {
51
-
# Untagged
52
-
};
53
-
}
54
-
```
55
-
56
-
To target all nodes with a specific tag, prefix tags with an `@`.
57
-
For example, to deploy only nodes with the `cloud` tag, use
58
-
59
-
```sh
60
-
wire apply --on @cloud
61
-
```
62
-
63
-
## Further Examples
64
-
65
-
::: info
66
-
67
-
Other operations such as an `--ignore` argument are unimplemented as of wire `v0.2.0`.
68
-
69
-
:::
70
-
71
-
### Mixing Tags with Node Names
72
-
73
-
You can mix tags and node names with `--on`:
74
-
75
-
```sh
76
-
wire apply --on @cloud node-5
77
-
```
78
-
79
-
This will deploy all nodes in `@cloud`, alongside the node `node-5`.
80
-
81
-
### Targeting Many Tags (Union)
82
-
83
-
You can specify many tags together:
84
-
85
-
```sh
86
-
wire apply --on @cloud @on-prem
87
-
```
88
-
89
-
This is a union between `@cloud` and `@on-prem`.
-49
doc/guide/wire.md
-49
doc/guide/wire.md
···
1
-
---
2
-
comment: true
3
-
---
4
-
5
-
# What is Wire?
6
-
7
-
<p style="display: flex; gap: 8px">
8
-
<a href="https://github.com/wires-org/wire/actions/workflows/test.yml?query=branch%3Amain">
9
-
<img alt="Rust Tests Status" src="https://img.shields.io/github/actions/workflow/status/wires-org/wire/test.yml?branch=main&style=flat-square&label=Rust%20Tests">
10
-
</a>
11
-
12
-
<a href="https://hydra.althaea.zone/jobset/wire/main">
13
-
<img alt="BuildBot Build & VM Test Status" src="https://img.shields.io/github/checks-status/wires-org/wire/main?style=flat-square&label=BuildBot%20Build%20%26%20VM%20Tests">
14
-
</a>
15
-
16
-
<a href="https://github.com/wires-org/wire/actions/workflows/pages.yml?query=branch%3Amain">
17
-
<img alt="Documentation Status" src="https://img.shields.io/github/actions/workflow/status/wires-org/wire/pages.yml?branch=main&style=flat-square&label=Documentation">
18
-
</a>
19
-
</p>
20
-
21
-
Wire is a tool to deploy NixOS systems. Its usage is inspired by [colmena](https://colmena.cli.rs/). In many places it's configuration attempts to remain a superset[^1] of colmena, however it is **not** a fork.
22
-
23
-
[^1]: A lot of your colmena module options will continue to work with wire, but wire has additional ergonomic changes you can take advantage of.
24
-
25
-
::: warning
26
-
Wire is alpha software, please use at your own risk. Many features listed in this documentation may not be complete / implemented.
27
-
:::
28
-
29
-
<div class="tip custom-block" style="padding-top: 8px">
30
-
31
-
Ready? Skip to the [Quickstart](./getting-started).
32
-
33
-
</div>
34
-
35
-
## Why Wire?
36
-
37
-
::: info
38
-
The following is the goal for a stable release and not fully implemented.
39
-
:::
40
-
41
-
| Features | Wire | Colmena |
42
-
| --------------------- | ---------------------------- | ---------------------------------------------------------------------------------------------------------- |
43
-
| Secret Management | :white_check_mark: | :white_check_mark: |
44
-
| Parallel Evaluation | :white_check_mark: | [Experimental](https://colmena.cli.rs/unstable/features/parallelism.html#parallel-evaluation-experimental) |
45
-
| Node Tagging | :white_check_mark: | :white_check_mark: |
46
-
| `jq` pipeline support | :white_check_mark: | :x:[^2] |
47
-
| Magic Rollback | :white_check_mark: (Planned) | :x: |
48
-
49
-
[^2]: You need to write custom nix code to use Colmena hive metadata inside environments like CI pipelines, bash scripting, etc., which requires a knowledge of its internals.
+83
doc/guides/apply.md
+83
doc/guides/apply.md
···
1
+
---
2
+
comment: true
3
+
title: Apply your Config
4
+
description: How to apply a node with wire.
5
+
---
6
+
7
+
# Apply your Config
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## What does it mean to 'apply'?
12
+
13
+
Once you have created a hive, you can now "apply" your
14
+
configuration to nodes in your hive. Simply, "applying" is the term used by wire to describe **deploying the
15
+
config**.
16
+
17
+
::: info
18
+
Applying a node typically involves pushing keys,
19
+
evaluating the node's NixOS system, building the node's NixOS system, and running
20
+
`switch-to-configuration`, depending on which specific goal is used.
21
+
:::
22
+
23
+
The simplest way to apply is simply running:
24
+
25
+
```sh
26
+
$ wire apply switch
27
+
```
28
+
29
+
Which will `switch` to each node's NixOS system in your hive and push
30
+
secrets (the equivalent to `nixos-rebuild`'s `nixos-rebuild switch`).
31
+
32
+
## Apply goals
33
+
34
+
`wire apply` accepts a goal, which include verbs which will be familiar to
35
+
`nixos-rebuild` users such as `switch`, `boot`, and `test`, alongside additional verbs
36
+
like `keys` and `push`.
37
+
38
+
### `wire apply keys`
39
+
40
+
wire will push all deployment keys to nodes, and do nothing else. While running
41
+
this goal, option
42
+
[`deployment.keys.<name>.uploadAt`](/reference/module#deployment-keys-name-uploadat)
43
+
has no effect and all keys will be pushed. Read [the secret management guide](./keys)
44
+
to learn more about wire deployment keys.
45
+
46
+
### `wire apply push`
47
+
48
+
wire will "push" (equivalent to [`nix
49
+
copy`](https://nix.dev/manual/nix/2.18/command-ref/new-cli/nix3-copy)) the
50
+
`.drv` file that can produce the node's NixOS system when built.
51
+
52
+
### `wire apply build`
53
+
54
+
Sister to `wire apply push`, wire will build the
55
+
node's NixOS system and ensure the output path exists on the node. Depending on
56
+
[`deployment.buildOnTarget`](/reference/module#deployment-buildontarget), the
57
+
`.drv` file may be built on the machine invoking wire or the node itself.
58
+
59
+
### `wire apply [switch|boot|test|dry-activate]`
60
+
61
+
Type `wire apply --help` or
62
+
[read the reference](../reference/cli#wire-apply) to read more.
63
+
64
+
## Applying locally
65
+
66
+
If `deployment.allowLocalDeployment` is `true`, and the machine invoking wire's
67
+
host name is equivalent to a node's name, wire will apply that node to the local
68
+
machine. Goals like `push` and `build`, wont actually "push" anything as
69
+
the paths already exists on the local machine.
70
+
71
+
When applying to your local machine, wire can interactively run `sudo`!
72
+
wire will prompt for your password, meaning wire can be ran as any user in
73
+
the `wheel` group.
74
+
75
+
## Applying specific nodes
76
+
77
+
Use the `--on` argument to specify which nodes in your hive to apply:
78
+
79
+
```sh
80
+
$ wire apply --on node-a
81
+
```
82
+
83
+
Further examples, including how you can utilise tags, can be found on the [Targeting Nodes](./targeting) page.
+36
doc/guides/build-in-ci.md
+36
doc/guides/build-in-ci.md
···
1
+
---
2
+
comment: true
3
+
title: Build in CI
4
+
---
5
+
6
+
# Build in CI
7
+
8
+
## The `wire build` command <Badge type="tip" text="^1.1.0" />
9
+
10
+
`wire build` builds nodes locally. It is distinct from
11
+
`wire apply build`, as it will not ping or push the result,
12
+
making it useful for CI.
13
+
14
+
It accepts the same `--on` argument as `wire apply` does.
15
+
16
+
## Partitioning builds
17
+
18
+
`wire build` accepts a `--partition` option inspired by
19
+
[cargo-nextest](https://nexte.st/docs/ci-features/partitioning/), which splits
20
+
selected nodes into buckets to be built separately.
21
+
22
+
It accepts values in the format `--partition current/total`, where 1 โค current โค total.
23
+
24
+
For example, these two commands will build the entire hive in two invocations:
25
+
26
+
```sh
27
+
wire build --partition 1/2
28
+
29
+
# later or synchronously:
30
+
31
+
wire build --partition 2/2
32
+
```
33
+
34
+
## Example: Build in Github Actions
35
+
36
+
<<< @/snippets/guides/example-action.yml [.github/workflows/build.yml]
+42
doc/guides/flakes/nixos-rebuild.md
+42
doc/guides/flakes/nixos-rebuild.md
···
1
+
---
2
+
comment: true
3
+
title: Keep Using nixos-rebuild
4
+
description: How to combine outputs.nixosConfigurations with outputs.wire
5
+
---
6
+
7
+
# Keep Using nixos-rebuild
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## An Example
12
+
13
+
You can provide `makeHive` with your `nixosConfigurations` with the `inherit`
14
+
nix keyword. `makeHive` will merge any nodes and nixosConfigurations that share
15
+
the same name together.
16
+
17
+
::: tip
18
+
You should include the wire module, which will provide the `deployment` options, even if nixos-rebuild can't directly use them.
19
+
:::
20
+
21
+
::: code-group
22
+
<<< @/snippets/getting-started/flake-merged.nix [flake.nix]
23
+
:::
24
+
25
+
Now, if we run `wire show`, you will see that wire only finds
26
+
the `nixosConfigurations`-es that also match a node in the hive.
27
+
`some-other-host` is not included in the hive unless specified in `makeHive`.
28
+
29
+
```
30
+
$ wire show
31
+
Node node-a (x86_64-linux):
32
+
33
+
> Connection: {root@node-a:22}
34
+
> Build remotely `deployment.buildOnTarget`: false
35
+
> Local apply allowed `deployment.allowLocalDeployment`: true
36
+
37
+
Summary: 1 total node(s), totalling 0 keys (0 distinct).
38
+
Note: Listed connections are tried from Left to Right
39
+
40
+
```
41
+
42
+
This way, you can continue using `nixos-rebuild` and wire at the same time.
+40
doc/guides/flakes/overview.md
+40
doc/guides/flakes/overview.md
···
1
+
---
2
+
comment: true
3
+
title: Use Flakes
4
+
description: How to output a hive from a flake.
5
+
---
6
+
7
+
# Use Flakes
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Output a hive
12
+
13
+
::: tip
14
+
If you have skipped ahead, please read the previous page to understand the
15
+
concept of a hive.
16
+
:::
17
+
18
+
You can use wire with a flake by outputting a hive with the `wire` flake output.
19
+
Just like when using a `hive.nix`, you must provide `meta.nixpkgs` which will
20
+
come from an input.
21
+
22
+
::: code-group
23
+
<<< @/snippets/getting-started/flake.nix [flake.nix]
24
+
:::
25
+
26
+
```
27
+
$ nix flake show
28
+
git+file:///some/path
29
+
โโโโwire: unknown
30
+
31
+
$ wire show
32
+
Node node-a (x86_64-linux):
33
+
34
+
> Connection: {root@node-a:22}
35
+
> Build remotely `deployment.buildOnTarget`: false
36
+
> Local apply allowed `deployment.allowLocalDeployment`: true
37
+
38
+
Summary: 1 total node(s), totalling 0 keys (0 distinct).
39
+
Note: Listed connections are tried from Left to Right
40
+
```
+56
doc/guides/hive-default.md
+56
doc/guides/hive-default.md
···
1
+
---
2
+
comment: true
3
+
title: Use hive.default
4
+
description: Deduplicate options with default node configuration.
5
+
---
6
+
7
+
# `Use hive.default`
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Introduction
12
+
13
+
At the top level of a hive wire reserves the `defaults` attribute. It's applied
14
+
to every node.
15
+
16
+
::: warning
17
+
18
+
`defaults` must not rely on modules that a node imports, but a
19
+
node may rely on modules that default imports.
20
+
21
+
:::
22
+
23
+
```nix:line-numbers [hive.nix]
24
+
let
25
+
sources = import ./npins;
26
+
wire = import sources.wire;
27
+
in wire.makeHive {
28
+
meta.nixpkgs = import sources.nixpkgs { };
29
+
30
+
defaults = {
31
+
# name of the node that defaults is being applied to
32
+
name,
33
+
# attribute set of all nodes
34
+
nodes,
35
+
...
36
+
}: {
37
+
import = [
38
+
./default-module.nix
39
+
40
+
# module that is imported for all nodes
41
+
some-flake.nixosModules.default
42
+
];
43
+
44
+
# default configuration
45
+
# may or may not utilise `name` or `nodes`
46
+
};
47
+
48
+
node-a = {
49
+
# some config
50
+
};
51
+
52
+
node-b = {
53
+
# some more config
54
+
};
55
+
}
56
+
```
+60
doc/guides/installation.md
+60
doc/guides/installation.md
···
1
+
---
2
+
comment: true
3
+
title: Install wire
4
+
description: How to install wire tool.
5
+
---
6
+
7
+
# Install wire
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
::: info
12
+
13
+
The `wire` binary and the `wire.makeHive` function are tightly coupled, so it is
14
+
recommended that you use the same version for both.
15
+
16
+
:::
17
+
18
+
It is recommended you stick to either using a tagged version of wire, or the `stable` branch which tracks the latest stable tag.
19
+
20
+
## Binary Cache
21
+
22
+
You should enable the [garnix binary cache](https://garnix.io/docs/caching) _before_
23
+
continuing otherwise you will be compiling from source:
24
+
25
+
::: code-group
26
+
<<< @/snippets/tutorial/cache.conf [nix.conf]
27
+
<<< @/snippets/tutorial/cache.nix [configuration.nix]
28
+
:::
29
+
30
+
## Installation through flakes
31
+
32
+
When using flakes, you should install wire through the same input you create
33
+
your hive from, sourced from the `stable` branch.
34
+
35
+
::: code-group
36
+
<<< @/snippets/guides/installation/flake.nix [flake.nix]
37
+
:::
38
+
39
+
## Installation through npins
40
+
41
+
With npins you may allow it to use release tags instead of the `stable`
42
+
branch.
43
+
44
+
Using npins specifically is not required, you can pin your sources in any way
45
+
you'd like, really.
46
+
47
+
```sh
48
+
$ npins add github forallsys wire --branch stable
49
+
```
50
+
51
+
Alternatively, you can use a tag instead:
52
+
53
+
```sh
54
+
$ npins add github forallsys wire --at v1.1.1
55
+
```
56
+
57
+
Then, use this pinned version of wire for both your `hive.nix` and `shell.nix`:
58
+
59
+
<<< @/snippets/guides/installation/shell.nix{8} [shell.nix]
60
+
<<< @/snippets/guides/installation/hive.nix [hive.nix]
+259
doc/guides/keys.md
+259
doc/guides/keys.md
···
1
+
---
2
+
comment: true
3
+
title: Manage Secrets
4
+
description: Manage keys, secrets, files, and other out-of-store paths with wire Tool.
5
+
---
6
+
7
+
# Manage Secrets
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Introduction
12
+
13
+
wire Tool is very unopinionated as to how you encrypt your secrets, wire only
14
+
handles pushing and setting up permissions of your key files.
15
+
16
+
The `source` of your key can be a literal string (unencrypted), a path
17
+
(unencrypted), or a command that wire runs to evaluate the key. Programs that
18
+
work well with wire keys include:
19
+
20
+
- GPG
21
+
- [Age](https://github.com/FiloSottile/age)
22
+
- Anything that non-interactively decrypts to `stdout`.
23
+
24
+
### Prerequisites
25
+
26
+
wire uses a Rust binary to receive encrypted key data, so your deploying
27
+
user must be trusted or you must add garnix as a trusted public key:
28
+
29
+
```nix
30
+
{ config, ... }:
31
+
{
32
+
nix.settings.trusted-users = [
33
+
config.deployment.target.user # [!code ++]
34
+
];
35
+
}
36
+
```
37
+
38
+
Otherwise, you may see errors such as:
39
+
40
+
```
41
+
error: cannot add path '/nix/store/...-wire-tool-key_agent-x86_64-linux-...' because it lacks a signature by a trusted key
42
+
```
43
+
44
+
This is a requirement because `nix copy` is used to copy the binary.
45
+
As a benefit to this approach, key deployments are significantly faster!
46
+
47
+
### A Trivial "Key"
48
+
49
+
```nix:line-numbers [hive.nix]
50
+
let
51
+
sources = import ./npins;
52
+
wire = import sources.wire;
53
+
in wire.makeHive {
54
+
meta.nixpkgs = import sources.nixpkgs { };
55
+
56
+
node-1 = {
57
+
deployment.key."file.txt" = {
58
+
source = ''
59
+
Hello World!
60
+
'';
61
+
};
62
+
};
63
+
}
64
+
```
65
+
66
+
```sh
67
+
[user@node-1]$ cat /run/keys/file.txt
68
+
Hello World!
69
+
```
70
+
71
+
### Encrypting with GPG
72
+
73
+
```nix:line-numbers [hive.nix]
74
+
let
75
+
sources = import ./npins;
76
+
wire = import sources.wire;
77
+
in wire.makeHive {
78
+
meta.nixpkgs = import sources.nixpkgs { };
79
+
80
+
node-1 = {
81
+
deployment.key."file.txt" = {
82
+
source = [
83
+
"gpg"
84
+
"--decrypt"
85
+
"${./secrets/file.txt.gpg}"
86
+
];
87
+
};
88
+
};
89
+
}
90
+
```
91
+
92
+
```sh
93
+
[user@node-1]$ cat /run/keys/file.txt
94
+
Hello World!
95
+
```
96
+
97
+
### Encrypting with KeepassXC
98
+
99
+
A simple example of extracting a KeepassXC attachment into a wire key.
100
+
You must pass the password through stdin as the command must be non-interactive.
101
+
Note that the `--stdout` is important as wire expects the command to output the key to stdout.
102
+
103
+
```nix:line-numbers [hive.nix]
104
+
let
105
+
sources = import ./npins;
106
+
wire = import sources.wire;
107
+
in wire.makeHive {
108
+
meta.nixpkgs = import sources.nixpkgs { };
109
+
110
+
node-1 = {
111
+
deployment.key."file.txt" = {
112
+
source = [
113
+
"bash"
114
+
"-c"
115
+
''cat ~/pass | keepassxc-cli attachment-export --stdout ~/.local/share/keepass/database.kdbx test 'file.txt'''
116
+
];
117
+
};
118
+
};
119
+
}
120
+
```
121
+
122
+
```sh
123
+
[user@node-1]$ cat /run/keys/file.txt
124
+
Hello World!
125
+
```
126
+
127
+
### A Plain Text File
128
+
129
+
```nix:line-numbers [hive.nix]
130
+
let
131
+
sources = import ./npins;
132
+
wire = import sources.wire;
133
+
in wire.makeHive {
134
+
meta.nixpkgs = import sources.nixpkgs { };
135
+
136
+
node-1 = {
137
+
deployment.key."file.txt" = {
138
+
# using this syntax will enter the file into the store, readable by
139
+
# anyone!
140
+
source = ./file.txt;
141
+
};
142
+
};
143
+
}
144
+
```
145
+
146
+
## Persistence
147
+
148
+
wire defaults `destDir` to `/run/keys`. `/run/` is held in memory and will not
149
+
persist past reboot. Change
150
+
[`deployment.key.<name>.destDir`](/reference/module#deployment-keys-name-destdir)
151
+
to something like `/etc/keys` if you need secrets every time the machine boots.
152
+
153
+
## Upload Order
154
+
155
+
By default wire will upload keys before the system is activated. You can
156
+
force wire to upload the key after the system is activated by setting
157
+
[`deployment.keys.<name>.uploadAt`](/reference/module#deployment-keys-name-uploadat)
158
+
to `post-activation`.
159
+
160
+
## Permissions and Ownership
161
+
162
+
wire secrets are owned by user & group `root` (`0600`). You can change these
163
+
with the `user` and `group` option.
164
+
165
+
```nix:line-numbers [hive.nix]
166
+
let
167
+
sources = import ./npins;
168
+
wire = import sources.wire;
169
+
in wire.makeHive {
170
+
meta.nixpkgs = import sources.nixpkgs { };
171
+
172
+
node-1 = {
173
+
deployment.key."file.txt" = {
174
+
source = [
175
+
"gpg"
176
+
"--decrypt"
177
+
"${./secrets/file.txt.gpg}"
178
+
];
179
+
180
+
user = "my-user";
181
+
group = "my-group";
182
+
};
183
+
};
184
+
}
185
+
```
186
+
187
+
## Further Examples
188
+
189
+
### Using Keys With Services
190
+
191
+
You can access the full absolute path of any key with
192
+
`config.deployment.keys.<name>.path` (auto-generated and read-only).
193
+
194
+
Keys also have a `config.deployment.keys.<name>.service` property
195
+
(auto-generated and read-only), which represent systemd services that you can
196
+
`require`, telling systemd there is a hard-dependency on that key for the
197
+
service to run.
198
+
199
+
Here's an example with the Tailscale service:
200
+
201
+
```nix:line-numbers [hive.nix]
202
+
let
203
+
sources = import ./npins;
204
+
wire = import sources.wire;
205
+
in wire.makeHive {
206
+
meta.nixpkgs = import sources.nixpkgs { };
207
+
208
+
node-1 = {config, ...}: {
209
+
services.tailscale = {
210
+
enable = true;
211
+
# use deployment key path directly
212
+
authKeyFile = config.deployment.keys."tailscale.key".path;
213
+
};
214
+
215
+
deployment.keys."tailscale.key" = {
216
+
keyCommand = ["gpg" "--decrypt" "${./secrets/tailscale.key.gpg}"];
217
+
};
218
+
219
+
# The service will not start unless the key exists.
220
+
systemd.services.tailscaled-autoconnect.requires = [
221
+
config.deployment.keys."tailscale.key".service
222
+
];
223
+
};
224
+
}
225
+
```
226
+
227
+
### Scoping a Key to a service account
228
+
229
+
Additionally you can scope the key to the user that the service runs under, to
230
+
further reduce duplication using the `config` argument. Here's an example of
231
+
providing a certificate that is only readable by the caddy service.
232
+
233
+
```nix:line-numbers [hive.nix]
234
+
let
235
+
sources = import ./npins;
236
+
wire = import sources.wire;
237
+
in wire.makeHive {
238
+
meta.nixpkgs = import sources.nixpkgs { };
239
+
240
+
some-web-server = {config, ...}: {
241
+
deployment.keys."some.host.pem" = {
242
+
keyCommand = ["gpg" "--decrypt" "${./some.host.pem.gpg}"];
243
+
destDir = "/etc/keys";
244
+
245
+
# inherit the user and group that caddy runs under
246
+
# the key will only readable by the caddy service
247
+
inherit (config.services.caddy) user group;
248
+
};
249
+
250
+
# ^^ repeat for `some.host.key`
251
+
252
+
services.caddy = {
253
+
virtualHosts."https://some.host".extraConfig = ''
254
+
tls ${config.deployment.keys."some.host.pem".path} ${config.deployment.keys."some.host.key".path}
255
+
'';
256
+
};
257
+
};
258
+
}
259
+
```
+88
doc/guides/migrate.md
+88
doc/guides/migrate.md
···
1
+
---
2
+
comment: true
3
+
title: Migrate to wire
4
+
description: How-to migrate from other tools to wire tool.
5
+
---
6
+
7
+
# Migrate to wire
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
Migrate from...
12
+
13
+
- [Colmena](#from-colmena)
14
+
- [`nixos-rebuild`](#from-nixos-rebuild)
15
+
16
+
## From Colmena
17
+
18
+
If you're familiar with colmena, wire will hopefully come quickly to you! (or,
19
+
atleast that was the intention when writing it!). There are a few changes you
20
+
should know:
21
+
22
+
- [You don't have to use a root user](/guides/non-root-user.html)
23
+
- `apply-local` does not exist, `apply` will apply locally when appropriate
24
+
- [Many options have been aliased to nicer names](/reference/module.html)
25
+
(ie, `deployment.targetUser` <=> `deployment.target.user`)
26
+
- You may pass a list of hosts to `deployment.targetHost` (no more fiddling with
27
+
your hive whenever DNS is down, for example)
28
+
- `--path` optionally takes a flakeref! You can pass `--path github:foo/bar`,
29
+
`--path git+file:///...`, `--path https://.../main.tar.gz`, etc.
30
+
(plain paths like `--path ~/my-hive` still work as always)
31
+
32
+
::: tip
33
+
You should also follow [installation](/guides/installation) to install the
34
+
binary.
35
+
:::
36
+
37
+
### Convert a Hive as a Flake
38
+
39
+
```nix [flake.nix]
40
+
{
41
+
inputs = {
42
+
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
43
+
colmena.url = "github:zhaofengli/colmena"; # [!code --]
44
+
wire.url = "github:forallsys/wire/stable"; # [!code ++]
45
+
};
46
+
outputs =
47
+
{ nixpkgs, colmena, ... }:
48
+
{
49
+
colmenaHive = colmena.lib.makeHive { # [!code --]
50
+
wire = wire.lib.makeHive { # [!code ++]
51
+
# ..
52
+
};
53
+
};
54
+
}
55
+
```
56
+
57
+
### Convert a Hive with npins
58
+
59
+
::: tip
60
+
You should also follow [installation](/guides/installation) to setup
61
+
npins and install the binary.
62
+
:::
63
+
64
+
Unlike colmena, you must call `makeHive` directly even in non-flake hives.
65
+
66
+
```nix [hive.nix]
67
+
let
68
+
sources = import ./npins;
69
+
wire = import sources.wire;
70
+
in
71
+
{ # [!code --]
72
+
wire.makeHive { # [!code ++]
73
+
74
+
meta.nixpkgs = <nixpkgs>; # [!code --]
75
+
meta.nixpkgs = import sources.nixpkgs { }; # [!code ++]
76
+
77
+
# ...
78
+
}
79
+
```
80
+
81
+
Replacing `<nixpkgs>` with a pinned source is optional, but you should
82
+
probably use one if you ask me \:)
83
+
84
+
## From `nixos-rebuild`
85
+
86
+
You can keep using `nixos-rebuild` alongside wire!
87
+
88
+
Follow the instructions in [the relevant page](/guides/flakes/nixos-rebuild.html).
+78
doc/guides/non-root-user.md
+78
doc/guides/non-root-user.md
···
1
+
---
2
+
comment: true
3
+
title: Use a non-root user
4
+
description: Deploy without root permissions with wire.
5
+
---
6
+
7
+
# Use a non-root user
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Deploying User Requirements
12
+
13
+
For deployment commands to succeed, the user defined in `deployment.target.user` must meet the following criteria:
14
+
15
+
1. Essential Config
16
+
17
+
- **Sudo Access**: The user must be `wheel` (A sudo user)
18
+
- **SSH Key Authentication**: The user must be authenticated through SSH keys,
19
+
and password-based SSH auth is not supported.
20
+
21
+
**Why?** Wire can prompt you for your `sudo` password, but not your `ssh` password.
22
+
23
+
2. Deploying with Secrets
24
+
25
+
- **Trusted User**: The user must be listed in the `trusted-users` nix config.
26
+
27
+
If the user is not trusted, wire will fail in the key deployment stage.
28
+
29
+
For setting up a trusted user, see [Manage Secrets - Prerequisites](/guides/keys.html#prerequisites).
30
+
31
+
## Changing the user
32
+
33
+
By default, the target is set to root:
34
+
35
+
```nix
36
+
{
37
+
deployment.target.user = "root";
38
+
}
39
+
```
40
+
41
+
But it can be any user you want so long as it fits the requirements above.
42
+
43
+
```nix
44
+
{
45
+
deployment.target.user = "root"; # [!code --]
46
+
deployment.target.user = "deploy-user"; # [!code ++]
47
+
}
48
+
```
49
+
50
+
After this change, wire will prompt you for sudo authentication, and tell you
51
+
the exact command wire wants privileged:
52
+
53
+
```sh{6}
54
+
$ wire apply keys --on media
55
+
INFO eval_hive: evaluating hive Flake("/path/to/hive")
56
+
...
57
+
INFO media | step="Upload key @ NoFilter" progress="3/4"
58
+
deploy-user@node:22 | Authenticate for "sudo /nix/store/.../bin/key_agent":
59
+
[sudo] password for deploy-user:
60
+
```
61
+
62
+
## Using alternative privilege escalation
63
+
64
+
You may change the privilege escalation command with the
65
+
[deployment.privilegeEscalationCommand](/reference/module.html#deployment-privilegeescalationcommand)
66
+
option.
67
+
68
+
For example, doas:
69
+
70
+
```nix
71
+
{
72
+
deployment.privilegeEscalationCommand = [
73
+
"sudo" # [!code --]
74
+
"--" # [!code --]
75
+
"doas" # [!code ++]
76
+
];
77
+
}
78
+
```
+32
doc/guides/parallelism.md
+32
doc/guides/parallelism.md
···
1
+
---
2
+
comment: true
3
+
title: Use Parallelism
4
+
description: How to use parallelism with wire Tool.
5
+
---
6
+
7
+
# Use Parallelism
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Controlling CPU Usage
12
+
13
+
wire evaluates, builds, pushes, and deploys each node completely independently
14
+
from each other. Internally wire calls this process a "node execution".
15
+
16
+
The default number of parallel _node executions_ is `10`, which can be
17
+
controlled with the `-p` / `--parallel` argument.
18
+
19
+
```sh
20
+
$ wire apply -p <NUMBER>
21
+
```
22
+
23
+
## Interaction with Nix's `max-jobs`
24
+
25
+
Nix has an overall derivation build limit and core limit.
26
+
If executing a node fills Nix's `max-jobs` all other nodes will bottleneck. You
27
+
should read [the relevant
28
+
documentation](https://nix.dev/manual/nix/2.28/advanced-topics/cores-vs-jobs) to fine tune these settings.
29
+
30
+
When a Node is built remotely due to
31
+
[`deployment.buildOnTarget`](/reference/module.html#deployment-buildontarget)
32
+
that node will not push up the _local machine's_ max-jobs limit.
+102
doc/guides/targeting.md
+102
doc/guides/targeting.md
···
1
+
---
2
+
comment: true
3
+
title: Target Nodes
4
+
description: Tags, nodes, and how to target them with wire Tool.
5
+
---
6
+
7
+
# Target Nodes
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Targeting Specific Nodes
12
+
13
+
`wire apply --on` without an `@` prefix interprets as a literal node name. For
14
+
example:
15
+
16
+
```sh
17
+
$ wire apply switch --on node-a,node-b
18
+
```
19
+
20
+
Will switch-to-configuration on node a, and node b.
21
+
22
+
## Reading from Stdin
23
+
24
+
Passing `--on -` will read whitespace-separated nodes and tags from stdin. This
25
+
can be combined with normal `--on` usage.
26
+
27
+
For example:
28
+
29
+
```sh
30
+
$ echo "node-a node-b" | wire apply --on @other --on -
31
+
```
32
+
33
+
Will apply on `node-a`, `node-b`, and all nodes with the tag `@other`.
34
+
35
+
## Tag Basics
36
+
37
+
Nodes can have _tags_, which allows you to easily target multiple, related
38
+
nodes for deployment.
39
+
40
+
```nix:line-numbers{9,13,17,21} [hive.nix]
41
+
let
42
+
sources = import ./npins;
43
+
wire = import sources.wire;
44
+
in wire.makeHive {
45
+
meta.nixpkgs = import sources.nixpkgs { };
46
+
47
+
node-1 = {
48
+
# ...
49
+
deployment.tags = ["cloud"];
50
+
};
51
+
node-2 = {
52
+
# ...
53
+
deployment.tags = ["cloud", "virtual"];
54
+
};
55
+
node-3 = {
56
+
# ...
57
+
deployment.tags = ["on-prem"];
58
+
};
59
+
node-4 = {
60
+
# ...
61
+
deployment.tags = ["virtual"];
62
+
};
63
+
node-5 = {
64
+
# Untagged
65
+
};
66
+
}
67
+
```
68
+
69
+
To target all nodes with a specific tag, prefix tags with an `@`.
70
+
For example, to deploy only nodes with the `cloud` tag, use
71
+
72
+
```sh
73
+
$ wire apply --on @cloud
74
+
```
75
+
76
+
## Further Examples
77
+
78
+
::: info
79
+
80
+
Other operations such as an `--ignore` argument are unimplemented as of wire `v0.2.0`.
81
+
82
+
:::
83
+
84
+
### Mixing Tags with Node Names
85
+
86
+
You can mix tags and node names with `--on`:
87
+
88
+
```sh
89
+
$ wire apply --on @cloud --on node-5
90
+
```
91
+
92
+
This will deploy all nodes in `@cloud`, alongside the node `node-5`.
93
+
94
+
### Targeting Many Tags (Union)
95
+
96
+
You can specify many tags together:
97
+
98
+
```sh
99
+
$ wire apply --on @cloud @on-prem
100
+
```
101
+
102
+
This is a union between `@cloud` and `@on-prem`.
+116
doc/guides/writing-a-hive.md
+116
doc/guides/writing-a-hive.md
···
1
+
---
2
+
comment: true
3
+
title: Write a Hive
4
+
---
5
+
6
+
# Write a Hive
7
+
8
+
## Anatomy of a Hive
9
+
10
+
A "Hive" is the attribute set that you pass to `wire.makeHive`. It has the
11
+
following layout:
12
+
13
+
```nix
14
+
wire.makeHive {
15
+
# `meta`
16
+
# type: attrset
17
+
meta = {
18
+
# `meta.nixpkgs` tells wire how to get nixpkgs.
19
+
# type: "A path or an instance of nixpkgs."
20
+
nixpkgs = <nixpkgs>;
21
+
22
+
# `meta.specialArgs` are specialArgs to pass to each node & default
23
+
# type: attrset
24
+
specialArgs = { };
25
+
26
+
# `meta.nodeSpecialArgs` lets you override `meta.specialArgs` per-node.
27
+
# type: attrset of attrset
28
+
nodeSpecialArgs: = { };
29
+
30
+
# `meta.nodeNixpkgs` lets you override nixpkgs per-node.
31
+
# type: attrset of "A path or an instance of nixpkgs."
32
+
nodeNixpkgs: = { };
33
+
};
34
+
35
+
# `defaults` is a module applied to every node
36
+
# type: NixOS Module
37
+
defaults = { ... }: { };
38
+
39
+
# Any other attributes are nodes.
40
+
<node-name> = { ... }: { };
41
+
}
42
+
```
43
+
44
+
### `<node-name>`
45
+
46
+
Other attributes are NixOs modules that describe a system. They automatically
47
+
have `defaults` and the wire NixOS module imported.
48
+
49
+
They also have the `name` and `nodes` attributes passed to them, `name` being a string of the nodes name, and `nodes` being an attribute set of every node in the hive.
50
+
51
+
### `meta`
52
+
53
+
There is more detailed information about `meta` in [the
54
+
reference](/reference/meta.html).
55
+
56
+
### `defaults`
57
+
58
+
De-duplicate options with default node configuration.
59
+
60
+
At the top level of a hive wire reserves the `defaults` attribute. It's applied
61
+
to every node.
62
+
63
+
## Example
64
+
65
+
There is more detailed information the special options for nodes [the
66
+
reference](/reference/module.html).
67
+
68
+
```nix:line-numbers [hive.nix]
69
+
{
70
+
meta.nixpkgs = import some-sources-or-inputs.nixpkgs { };
71
+
72
+
defaults = {
73
+
# name of the node that defaults is being applied to
74
+
name,
75
+
# attribute set of all nodes
76
+
nodes,
77
+
pkgs,
78
+
...
79
+
}: {
80
+
import = [
81
+
./default-module.nix
82
+
83
+
# module that is imported for all nodes
84
+
some-flake.nixosModules.default
85
+
];
86
+
87
+
# all nodes should include vim!
88
+
environment.systemPackages [ pkgs.vim ];
89
+
};
90
+
91
+
node-a = {
92
+
# name of the node that defaults is being applied to
93
+
name,
94
+
# attribute set of all nodes
95
+
nodes,
96
+
pkgs,
97
+
...
98
+
}: {
99
+
imports = [
100
+
# import the hardware-config and all your extra stuff
101
+
./node-a
102
+
];
103
+
104
+
deployment = {
105
+
target.host = "192.0.2.1";
106
+
tags = [ "x86" ];
107
+
};
108
+
};
109
+
110
+
# as many nodes as you'd like...
111
+
112
+
node-g = {
113
+
# some more config
114
+
};
115
+
}
116
+
```
+19
-11
doc/index.md
+19
-11
doc/index.md
···
5
5
hero:
6
6
name: wire
7
7
text: a tool to deploy nixos systems
8
-
# tagline: My great project tagline
9
8
actions:
10
9
- theme: brand
11
-
text: Read Guide
12
-
link: /guide/wire
10
+
text: Read Tutorial
11
+
link: /tutorial/overview
12
+
- theme: alt
13
+
text: How-to Guides
14
+
link: /guides/installation
13
15
- theme: alt
14
-
text: Reference
15
-
link: /reference/cli
16
+
text: Sources
17
+
link: https://github.com/forallsys/wire.git
16
18
17
19
features:
18
-
- title: Parallelism
19
-
details: Build and deploy many nodes at once
20
-
- title: Secret management
21
-
details: Fast & Unopinionated secret management
22
-
- title: Node Tagging & CI Friendly
23
-
details: Pipe data through jq
20
+
- title: Deploy in Parallel
21
+
details: Build and deploy many nodes at once.
22
+
link: /guides/parallelism
23
+
icon: ๐ฝ
24
+
- title: Manage Secrets
25
+
details: Fast & Unopinionated secret management. Bring your own GPG, Age, or any other encryption tool.
26
+
link: /guides/keys
27
+
icon: ๐
28
+
- title: Deploy as Any User
29
+
details: Non-root deployments and interactive authentication is fully supported.
30
+
link: /guides/non-root-user
31
+
icon: ๐งโ๐ป
24
32
---
+1
-1
doc/options.nix
+1
-1
doc/options.nix
+3
-6
doc/package.json
+3
-6
doc/package.json
···
1
1
{
2
2
"name": "wire-docs",
3
-
"version": "0.5.0",
3
+
"version": "1.1.1",
4
4
"type": "module",
5
5
"devDependencies": {
6
-
"mermaid": "^11.11.0",
7
6
"vitepress": "^1.6.4",
8
-
"vitepress-plugin-mermaid": "^2.0.17",
9
-
"vue": "^3.5.21"
7
+
"vue": "^3.5.25"
10
8
},
11
9
"scripts": {
12
10
"dev": "vitepress dev .",
···
20
18
},
21
19
"dependencies": {
22
20
"markdown-it-footnote": "^4.0.0",
23
-
"vitepress-plugin-comment-with-giscus": "^1.1.15",
24
-
"vitepress-plugin-group-icons": "^1.6.3"
21
+
"vitepress-plugin-group-icons": "^1.6.5"
25
22
}
26
23
}
+8
-6
doc/package.nix
+8
-6
doc/package.nix
···
2
2
lib,
3
3
nixosOptionsDoc,
4
4
runCommand,
5
-
wire-small,
6
-
wire-dignostics-md,
5
+
wire-small-dev,
6
+
wire-diagnostics-md,
7
7
nix,
8
8
nodejs,
9
9
pnpm,
10
10
stdenv,
11
+
mode ? "unstable",
11
12
...
12
13
}:
13
14
let
14
15
eval = lib.evalModules {
15
16
modules = [
16
-
../runtime/module.nix
17
+
../runtime/module/options.nix
17
18
{
18
19
options._module.args = lib.mkOption {
19
20
internal = true;
···
42
43
inherit (pkg) version;
43
44
pname = pkg.name;
44
45
nativeBuildInputs = [
45
-
wire-small
46
+
wire-small-dev
46
47
nodejs
47
48
pnpm.configHook
48
49
nix
···
51
52
pnpmDeps = pnpm.fetchDeps {
52
53
inherit (finalAttrs) pname version src;
53
54
fetcherVersion = 1;
54
-
hash = "sha256-mOlPgdZrG9UeDVLDNl6aCnlpb6/V5kkf4u1/W6ROEmQ=";
55
+
hash = "sha256-ydgb5NCFsYaDbmLjBqu91MqKj/I3TKpNLjOvyP+aY8o=";
55
56
};
56
57
patchPhase = ''
57
58
cat ${optionsDoc} >> ./reference/module.md
58
-
cat ${wire-dignostics-md} >> ./reference/errors.md
59
+
cat ${wire-diagnostics-md} >> ./reference/errors.md
59
60
wire inspect --markdown-help > ./reference/cli.md
60
61
'';
61
62
buildPhase = "pnpm run build > build.log 2>&1";
···
65
66
nix-instantiate --eval --strict ./snippets > /dev/null
66
67
'';
67
68
DEBUG = "*";
69
+
MODE = mode;
68
70
})
+419
-1494
doc/pnpm-lock.yaml
+419
-1494
doc/pnpm-lock.yaml
···
11
11
markdown-it-footnote:
12
12
specifier: ^4.0.0
13
13
version: 4.0.0
14
-
vitepress-plugin-comment-with-giscus:
15
-
specifier: ^1.1.15
16
-
version: 1.1.15(vue@3.5.21)
17
14
vitepress-plugin-group-icons:
18
-
specifier: ^1.6.3
19
-
version: 1.6.3(markdown-it@14.1.0)(vite@5.4.19)
15
+
specifier: ^1.6.5
16
+
version: 1.6.5(vite@5.4.21)
20
17
devDependencies:
21
-
mermaid:
22
-
specifier: ^11.11.0
23
-
version: 11.11.0
24
18
vitepress:
25
19
specifier: ^1.6.4
26
-
version: 1.6.4(@algolia/client-search@5.35.0)(postcss@8.5.6)(search-insights@2.17.3)
27
-
vitepress-plugin-mermaid:
28
-
specifier: ^2.0.17
29
-
version: 2.0.17(mermaid@11.11.0)(vitepress@1.6.4(@algolia/client-search@5.35.0)(postcss@8.5.6)(search-insights@2.17.3))
20
+
version: 1.6.4(@algolia/client-search@5.46.0)(postcss@8.5.6)(search-insights@2.17.3)
30
21
vue:
31
-
specifier: ^3.5.21
32
-
version: 3.5.21
22
+
specifier: ^3.5.25
23
+
version: 3.5.26
33
24
34
25
packages:
35
26
36
-
'@algolia/abtesting@1.1.0':
37
-
resolution: {integrity: sha512-sEyWjw28a/9iluA37KLGu8vjxEIlb60uxznfTUmXImy7H5NvbpSO6yYgmgH5KiD7j+zTUUihiST0jEP12IoXow==}
27
+
'@algolia/abtesting@1.12.0':
28
+
resolution: {integrity: sha512-EfW0bfxjPs+C7ANkJDw2TATntfBKsFiy7APh+KO0pQ8A6HYa5I0NjFuCGCXWfzzzLXNZta3QUl3n5Kmm6aJo9Q==}
38
29
engines: {node: '>= 14.0.0'}
39
30
40
31
'@algolia/autocomplete-core@1.17.7':
···
57
48
'@algolia/client-search': '>= 4.9.1 < 6'
58
49
algoliasearch: '>= 4.9.1 < 6'
59
50
60
-
'@algolia/client-abtesting@5.35.0':
61
-
resolution: {integrity: sha512-uUdHxbfHdoppDVflCHMxRlj49/IllPwwQ2cQ8DLC4LXr3kY96AHBpW0dMyi6ygkn2MtFCc6BxXCzr668ZRhLBQ==}
51
+
'@algolia/client-abtesting@5.46.0':
52
+
resolution: {integrity: sha512-eG5xV8rujK4ZIHXrRshvv9O13NmU/k42Rnd3w43iKH5RaQ2zWuZO6Q7XjaoJjAFVCsJWqRbXzbYyPGrbF3wGNg==}
62
53
engines: {node: '>= 14.0.0'}
63
54
64
-
'@algolia/client-analytics@5.35.0':
65
-
resolution: {integrity: sha512-SunAgwa9CamLcRCPnPHx1V2uxdQwJGqb1crYrRWktWUdld0+B2KyakNEeVn5lln4VyeNtW17Ia7V7qBWyM/Skw==}
55
+
'@algolia/client-analytics@5.46.0':
56
+
resolution: {integrity: sha512-AYh2uL8IUW9eZrbbT+wZElyb7QkkeV3US2NEKY7doqMlyPWE8lErNfkVN1NvZdVcY4/SVic5GDbeDz2ft8YIiQ==}
66
57
engines: {node: '>= 14.0.0'}
67
58
68
-
'@algolia/client-common@5.35.0':
69
-
resolution: {integrity: sha512-ipE0IuvHu/bg7TjT2s+187kz/E3h5ssfTtjpg1LbWMgxlgiaZIgTTbyynM7NfpSJSKsgQvCQxWjGUO51WSCu7w==}
59
+
'@algolia/client-common@5.46.0':
60
+
resolution: {integrity: sha512-0emZTaYOeI9WzJi0TcNd2k3SxiN6DZfdWc2x2gHt855Jl9jPUOzfVTL6gTvCCrOlT4McvpDGg5nGO+9doEjjig==}
70
61
engines: {node: '>= 14.0.0'}
71
62
72
-
'@algolia/client-insights@5.35.0':
73
-
resolution: {integrity: sha512-UNbCXcBpqtzUucxExwTSfAe8gknAJ485NfPN6o1ziHm6nnxx97piIbcBQ3edw823Tej2Wxu1C0xBY06KgeZ7gA==}
63
+
'@algolia/client-insights@5.46.0':
64
+
resolution: {integrity: sha512-wrBJ8fE+M0TDG1As4DDmwPn2TXajrvmvAN72Qwpuv8e2JOKNohF7+JxBoF70ZLlvP1A1EiH8DBu+JpfhBbNphQ==}
74
65
engines: {node: '>= 14.0.0'}
75
66
76
-
'@algolia/client-personalization@5.35.0':
77
-
resolution: {integrity: sha512-/KWjttZ6UCStt4QnWoDAJ12cKlQ+fkpMtyPmBgSS2WThJQdSV/4UWcqCUqGH7YLbwlj3JjNirCu3Y7uRTClxvA==}
67
+
'@algolia/client-personalization@5.46.0':
68
+
resolution: {integrity: sha512-LnkeX4p0ENt0DoftDJJDzQQJig/sFQmD1eQifl/iSjhUOGUIKC/7VTeXRcKtQB78naS8njUAwpzFvxy1CDDXDQ==}
78
69
engines: {node: '>= 14.0.0'}
79
70
80
-
'@algolia/client-query-suggestions@5.35.0':
81
-
resolution: {integrity: sha512-8oCuJCFf/71IYyvQQC+iu4kgViTODbXDk3m7yMctEncRSRV+u2RtDVlpGGfPlJQOrAY7OONwJlSHkmbbm2Kp/w==}
71
+
'@algolia/client-query-suggestions@5.46.0':
72
+
resolution: {integrity: sha512-aF9tc4ex/smypXw+W3lBPB1jjKoaGHpZezTqofvDOI/oK1dR2sdTpFpK2Ru+7IRzYgwtRqHF3znmTlyoNs9dpA==}
82
73
engines: {node: '>= 14.0.0'}
83
74
84
-
'@algolia/client-search@5.35.0':
85
-
resolution: {integrity: sha512-FfmdHTrXhIduWyyuko1YTcGLuicVbhUyRjO3HbXE4aP655yKZgdTIfMhZ/V5VY9bHuxv/fGEh3Od1Lvv2ODNTg==}
75
+
'@algolia/client-search@5.46.0':
76
+
resolution: {integrity: sha512-22SHEEVNjZfFWkFks3P6HilkR3rS7a6GjnCIqR22Zz4HNxdfT0FG+RE7efTcFVfLUkTTMQQybvaUcwMrHXYa7Q==}
86
77
engines: {node: '>= 14.0.0'}
87
78
88
-
'@algolia/ingestion@1.35.0':
89
-
resolution: {integrity: sha512-gPzACem9IL1Co8mM1LKMhzn1aSJmp+Vp434An4C0OBY4uEJRcqsLN3uLBlY+bYvFg8C8ImwM9YRiKczJXRk0XA==}
79
+
'@algolia/ingestion@1.46.0':
80
+
resolution: {integrity: sha512-2LT0/Z+/sFwEpZLH6V17WSZ81JX2uPjgvv5eNlxgU7rPyup4NXXfuMbtCJ+6uc4RO/LQpEJd3Li59ke3wtyAsA==}
90
81
engines: {node: '>= 14.0.0'}
91
82
92
-
'@algolia/monitoring@1.35.0':
93
-
resolution: {integrity: sha512-w9MGFLB6ashI8BGcQoVt7iLgDIJNCn4OIu0Q0giE3M2ItNrssvb8C0xuwJQyTy1OFZnemG0EB1OvXhIHOvQwWw==}
83
+
'@algolia/monitoring@1.46.0':
84
+
resolution: {integrity: sha512-uivZ9wSWZ8mz2ZU0dgDvQwvVZV8XBv6lYBXf8UtkQF3u7WeTqBPeU8ZoeTyLpf0jAXCYOvc1mAVmK0xPLuEwOQ==}
94
85
engines: {node: '>= 14.0.0'}
95
86
96
-
'@algolia/recommend@5.35.0':
97
-
resolution: {integrity: sha512-AhrVgaaXAb8Ue0u2nuRWwugt0dL5UmRgS9LXe0Hhz493a8KFeZVUE56RGIV3hAa6tHzmAV7eIoqcWTQvxzlJeQ==}
87
+
'@algolia/recommend@5.46.0':
88
+
resolution: {integrity: sha512-O2BB8DuySuddgOAbhyH4jsGbL+KyDGpzJRtkDZkv091OMomqIA78emhhMhX9d/nIRrzS1wNLWB/ix7Hb2eV5rg==}
98
89
engines: {node: '>= 14.0.0'}
99
90
100
-
'@algolia/requester-browser-xhr@5.35.0':
101
-
resolution: {integrity: sha512-diY415KLJZ6x1Kbwl9u96Jsz0OstE3asjXtJ9pmk1d+5gPuQ5jQyEsgC+WmEXzlec3iuVszm8AzNYYaqw6B+Zw==}
91
+
'@algolia/requester-browser-xhr@5.46.0':
92
+
resolution: {integrity: sha512-eW6xyHCyYrJD0Kjk9Mz33gQ40LfWiEA51JJTVfJy3yeoRSw/NXhAL81Pljpa0qslTs6+LO/5DYPZddct6HvISQ==}
102
93
engines: {node: '>= 14.0.0'}
103
94
104
-
'@algolia/requester-fetch@5.35.0':
105
-
resolution: {integrity: sha512-uydqnSmpAjrgo8bqhE9N1wgcB98psTRRQXcjc4izwMB7yRl9C8uuAQ/5YqRj04U0mMQ+fdu2fcNF6m9+Z1BzDQ==}
95
+
'@algolia/requester-fetch@5.46.0':
96
+
resolution: {integrity: sha512-Vn2+TukMGHy4PIxmdvP667tN/MhS7MPT8EEvEhS6JyFLPx3weLcxSa1F9gVvrfHWCUJhLWoMVJVB2PT8YfRGcw==}
106
97
engines: {node: '>= 14.0.0'}
107
98
108
-
'@algolia/requester-node-http@5.35.0':
109
-
resolution: {integrity: sha512-RgLX78ojYOrThJHrIiPzT4HW3yfQa0D7K+MQ81rhxqaNyNBu4F1r+72LNHYH/Z+y9I1Mrjrd/c/Ue5zfDgAEjQ==}
99
+
'@algolia/requester-node-http@5.46.0':
100
+
resolution: {integrity: sha512-xaqXyna5yBZ+r1SJ9my/DM6vfTqJg9FJgVydRJ0lnO+D5NhqGW/qaRG/iBGKr/d4fho34el6WakV7BqJvrl/HQ==}
110
101
engines: {node: '>= 14.0.0'}
111
102
112
103
'@antfu/install-pkg@1.1.0':
113
104
resolution: {integrity: sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==}
114
105
115
-
'@antfu/utils@9.2.0':
116
-
resolution: {integrity: sha512-Oq1d9BGZakE/FyoEtcNeSwM7MpDO2vUBi11RWBZXf75zPsbUVWmUs03EqkRFrcgbXyKTas0BdZWC1wcuSoqSAw==}
117
-
118
106
'@babel/helper-string-parser@7.27.1':
119
107
resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==}
120
108
engines: {node: '>=6.9.0'}
121
109
122
-
'@babel/helper-validator-identifier@7.27.1':
123
-
resolution: {integrity: sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==}
110
+
'@babel/helper-validator-identifier@7.28.5':
111
+
resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==}
124
112
engines: {node: '>=6.9.0'}
125
113
126
-
'@babel/parser@7.28.3':
127
-
resolution: {integrity: sha512-7+Ey1mAgYqFAx2h0RuoxcQT5+MlG3GTV0TQrgr7/ZliKsm/MNDxVVutlWaziMq7wJNAz8MTqz55XLpWvva6StA==}
114
+
'@babel/parser@7.28.5':
115
+
resolution: {integrity: sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==}
128
116
engines: {node: '>=6.0.0'}
129
117
hasBin: true
130
118
131
-
'@babel/types@7.28.2':
132
-
resolution: {integrity: sha512-ruv7Ae4J5dUYULmeXw1gmb7rYRz57OWCPM57pHojnLq/3Z1CK2lNSLTCVjxVk1F/TZHwOZZrOWi0ur95BbLxNQ==}
119
+
'@babel/types@7.28.5':
120
+
resolution: {integrity: sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==}
133
121
engines: {node: '>=6.9.0'}
134
-
135
-
'@braintree/sanitize-url@6.0.4':
136
-
resolution: {integrity: sha512-s3jaWicZd0pkP0jf5ysyHUI/RE7MHos6qlToFcGWXVp+ykHOy77OUMrfbgJ9it2C5bow7OIQwYYaHjk9XlBQ2A==}
137
-
138
-
'@braintree/sanitize-url@7.1.1':
139
-
resolution: {integrity: sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw==}
140
-
141
-
'@chevrotain/cst-dts-gen@11.0.3':
142
-
resolution: {integrity: sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==}
143
-
144
-
'@chevrotain/gast@11.0.3':
145
-
resolution: {integrity: sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q==}
146
-
147
-
'@chevrotain/regexp-to-ast@11.0.3':
148
-
resolution: {integrity: sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA==}
149
-
150
-
'@chevrotain/types@11.0.3':
151
-
resolution: {integrity: sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ==}
152
-
153
-
'@chevrotain/utils@11.0.3':
154
-
resolution: {integrity: sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==}
155
122
156
123
'@docsearch/css@3.8.2':
157
124
resolution: {integrity: sha512-y05ayQFyUmCXze79+56v/4HpycYF3uFqB78pLPrSV5ZKAlDuIAAJNhaRi8tTdRNXh05yxX/TyNnzD6LwSM89vQ==}
···
314
281
cpu: [x64]
315
282
os: [win32]
316
283
317
-
'@giscus/vue@2.4.0':
318
-
resolution: {integrity: sha512-QOxKHgsMT91myyQagP2v20YYAei1ByZuc3qcaYxbHx4AwOeyVrybDIuRFwG9YDv6OraC86jYnU4Ixd37ddC/0A==}
319
-
peerDependencies:
320
-
vue: '>=3.2.0'
321
-
322
-
'@iconify-json/logos@1.2.9':
323
-
resolution: {integrity: sha512-G6VCdFnwZcrT6Eveq3m43oJfLw/CX8plwFcE+2jgv3fiGB64pTmnU7Yd1MNZ/eA+/Re2iEDhuCfSNOWTHwwK8w==}
284
+
'@iconify-json/logos@1.2.10':
285
+
resolution: {integrity: sha512-qxaXKJ6fu8jzTMPQdHtNxlfx6tBQ0jXRbHZIYy5Ilh8Lx9US9FsAdzZWUR8MXV8PnWTKGDFO4ZZee9VwerCyMA==}
324
286
325
-
'@iconify-json/simple-icons@1.2.47':
326
-
resolution: {integrity: sha512-wa/2O7G4sBmwSEWWLh5C+HeY00lVOoWYRKJOYQtk7lAbQrHUReD1ijiGOyTynV1YavxtNueL1CBA1UZmYJfOrQ==}
287
+
'@iconify-json/simple-icons@1.2.63':
288
+
resolution: {integrity: sha512-xZl2UWCwE58VlqZ+pDPmaUhE2tq8MVSTJRr4/9nzzHlDdjJ0Ud1VxNXPrwTSgESKY29iCQw3S0r2nJTSNNngHw==}
327
289
328
-
'@iconify-json/vscode-icons@1.2.29':
329
-
resolution: {integrity: sha512-ByqO3YPYs0n7hakQ/ZUXltJQnYibeOv41H1AdciOs7Pmba5/OsKKK1/oOjcBmvXrYuENO+IvIzORYkl6sFXgqA==}
290
+
'@iconify-json/vscode-icons@1.2.37':
291
+
resolution: {integrity: sha512-HLRdU6nZks4N8x3JYz6j+b3+hcUCvYvlTLwGzM3xyXfTJyDSA2cAdWcEXfoA4hQMJGA+zCDSPAWFelFptH5Kbw==}
330
292
331
293
'@iconify/types@2.0.0':
332
294
resolution: {integrity: sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==}
333
295
334
-
'@iconify/utils@3.0.1':
335
-
resolution: {integrity: sha512-A78CUEnFGX8I/WlILxJCuIJXloL0j/OJ9PSchPAfCargEIKmUBWvvEMmKWB5oONwiUqlNt+5eRufdkLxeHIWYw==}
296
+
'@iconify/utils@3.1.0':
297
+
resolution: {integrity: sha512-Zlzem1ZXhI1iHeeERabLNzBHdOa4VhQbqAcOQaMKuTuyZCpwKbC2R4Dd0Zo3g9EAc+Y4fiarO8HIHRAth7+skw==}
336
298
337
299
'@jridgewell/sourcemap-codec@1.5.5':
338
300
resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==}
339
301
340
-
'@lit-labs/ssr-dom-shim@1.4.0':
341
-
resolution: {integrity: sha512-ficsEARKnmmW5njugNYKipTm4SFnbik7CXtoencDZzmzo/dQ+2Q0bgkzJuoJP20Aj0F+izzJjOqsnkd6F/o1bw==}
342
-
343
-
'@lit/reactive-element@2.1.1':
344
-
resolution: {integrity: sha512-N+dm5PAYdQ8e6UlywyyrgI2t++wFGXfHx+dSJ1oBrg6FAxUj40jId++EaRm80MKX5JnlH1sBsyZ5h0bcZKemCg==}
345
-
346
-
'@mermaid-js/mermaid-mindmap@9.3.0':
347
-
resolution: {integrity: sha512-IhtYSVBBRYviH1Ehu8gk69pMDF8DSRqXBRDMWrEfHoaMruHeaP2DXA3PBnuwsMaCdPQhlUUcy/7DBLAEIXvCAw==}
348
-
349
-
'@mermaid-js/parser@0.6.2':
350
-
resolution: {integrity: sha512-+PO02uGF6L6Cs0Bw8RpGhikVvMWEysfAyl27qTlroUB8jSWr1lL0Sf6zi78ZxlSnmgSY2AMMKVgghnN9jTtwkQ==}
351
-
352
-
'@rollup/rollup-android-arm-eabi@4.46.2':
353
-
resolution: {integrity: sha512-Zj3Hl6sN34xJtMv7Anwb5Gu01yujyE/cLBDB2gnHTAHaWS1Z38L7kuSG+oAh0giZMqG060f/YBStXtMH6FvPMA==}
302
+
'@rollup/rollup-android-arm-eabi@4.53.5':
303
+
resolution: {integrity: sha512-iDGS/h7D8t7tvZ1t6+WPK04KD0MwzLZrG0se1hzBjSi5fyxlsiggoJHwh18PCFNn7tG43OWb6pdZ6Y+rMlmyNQ==}
354
304
cpu: [arm]
355
305
os: [android]
356
306
357
-
'@rollup/rollup-android-arm64@4.46.2':
358
-
resolution: {integrity: sha512-nTeCWY83kN64oQ5MGz3CgtPx8NSOhC5lWtsjTs+8JAJNLcP3QbLCtDDgUKQc/Ro/frpMq4SHUaHN6AMltcEoLQ==}
307
+
'@rollup/rollup-android-arm64@4.53.5':
308
+
resolution: {integrity: sha512-wrSAViWvZHBMMlWk6EJhvg8/rjxzyEhEdgfMMjREHEq11EtJ6IP6yfcCH57YAEca2Oe3FNCE9DSTgU70EIGmVw==}
359
309
cpu: [arm64]
360
310
os: [android]
361
311
362
-
'@rollup/rollup-darwin-arm64@4.46.2':
363
-
resolution: {integrity: sha512-HV7bW2Fb/F5KPdM/9bApunQh68YVDU8sO8BvcW9OngQVN3HHHkw99wFupuUJfGR9pYLLAjcAOA6iO+evsbBaPQ==}
312
+
'@rollup/rollup-darwin-arm64@4.53.5':
313
+
resolution: {integrity: sha512-S87zZPBmRO6u1YXQLwpveZm4JfPpAa6oHBX7/ghSiGH3rz/KDgAu1rKdGutV+WUI6tKDMbaBJomhnT30Y2t4VQ==}
364
314
cpu: [arm64]
365
315
os: [darwin]
366
316
367
-
'@rollup/rollup-darwin-x64@4.46.2':
368
-
resolution: {integrity: sha512-SSj8TlYV5nJixSsm/y3QXfhspSiLYP11zpfwp6G/YDXctf3Xkdnk4woJIF5VQe0of2OjzTt8EsxnJDCdHd2xMA==}
317
+
'@rollup/rollup-darwin-x64@4.53.5':
318
+
resolution: {integrity: sha512-YTbnsAaHo6VrAczISxgpTva8EkfQus0VPEVJCEaboHtZRIb6h6j0BNxRBOwnDciFTZLDPW5r+ZBmhL/+YpTZgA==}
369
319
cpu: [x64]
370
320
os: [darwin]
371
321
372
-
'@rollup/rollup-freebsd-arm64@4.46.2':
373
-
resolution: {integrity: sha512-ZyrsG4TIT9xnOlLsSSi9w/X29tCbK1yegE49RYm3tu3wF1L/B6LVMqnEWyDB26d9Ecx9zrmXCiPmIabVuLmNSg==}
322
+
'@rollup/rollup-freebsd-arm64@4.53.5':
323
+
resolution: {integrity: sha512-1T8eY2J8rKJWzaznV7zedfdhD1BqVs1iqILhmHDq/bqCUZsrMt+j8VCTHhP0vdfbHK3e1IQ7VYx3jlKqwlf+vw==}
374
324
cpu: [arm64]
375
325
os: [freebsd]
376
326
377
-
'@rollup/rollup-freebsd-x64@4.46.2':
378
-
resolution: {integrity: sha512-pCgHFoOECwVCJ5GFq8+gR8SBKnMO+xe5UEqbemxBpCKYQddRQMgomv1104RnLSg7nNvgKy05sLsY51+OVRyiVw==}
327
+
'@rollup/rollup-freebsd-x64@4.53.5':
328
+
resolution: {integrity: sha512-sHTiuXyBJApxRn+VFMaw1U+Qsz4kcNlxQ742snICYPrY+DDL8/ZbaC4DVIB7vgZmp3jiDaKA0WpBdP0aqPJoBQ==}
379
329
cpu: [x64]
380
330
os: [freebsd]
381
331
382
-
'@rollup/rollup-linux-arm-gnueabihf@4.46.2':
383
-
resolution: {integrity: sha512-EtP8aquZ0xQg0ETFcxUbU71MZlHaw9MChwrQzatiE8U/bvi5uv/oChExXC4mWhjiqK7azGJBqU0tt5H123SzVA==}
332
+
'@rollup/rollup-linux-arm-gnueabihf@4.53.5':
333
+
resolution: {integrity: sha512-dV3T9MyAf0w8zPVLVBptVlzaXxka6xg1f16VAQmjg+4KMSTWDvhimI/Y6mp8oHwNrmnmVl9XxJ/w/mO4uIQONA==}
384
334
cpu: [arm]
385
335
os: [linux]
386
336
387
-
'@rollup/rollup-linux-arm-musleabihf@4.46.2':
388
-
resolution: {integrity: sha512-qO7F7U3u1nfxYRPM8HqFtLd+raev2K137dsV08q/LRKRLEc7RsiDWihUnrINdsWQxPR9jqZ8DIIZ1zJJAm5PjQ==}
337
+
'@rollup/rollup-linux-arm-musleabihf@4.53.5':
338
+
resolution: {integrity: sha512-wIGYC1x/hyjP+KAu9+ewDI+fi5XSNiUi9Bvg6KGAh2TsNMA3tSEs+Sh6jJ/r4BV/bx/CyWu2ue9kDnIdRyafcQ==}
389
339
cpu: [arm]
390
340
os: [linux]
391
341
392
-
'@rollup/rollup-linux-arm64-gnu@4.46.2':
393
-
resolution: {integrity: sha512-3dRaqLfcOXYsfvw5xMrxAk9Lb1f395gkoBYzSFcc/scgRFptRXL9DOaDpMiehf9CO8ZDRJW2z45b6fpU5nwjng==}
342
+
'@rollup/rollup-linux-arm64-gnu@4.53.5':
343
+
resolution: {integrity: sha512-Y+qVA0D9d0y2FRNiG9oM3Hut/DgODZbU9I8pLLPwAsU0tUKZ49cyV1tzmB/qRbSzGvY8lpgGkJuMyuhH7Ma+Vg==}
394
344
cpu: [arm64]
395
345
os: [linux]
396
346
397
-
'@rollup/rollup-linux-arm64-musl@4.46.2':
398
-
resolution: {integrity: sha512-fhHFTutA7SM+IrR6lIfiHskxmpmPTJUXpWIsBXpeEwNgZzZZSg/q4i6FU4J8qOGyJ0TR+wXBwx/L7Ho9z0+uDg==}
347
+
'@rollup/rollup-linux-arm64-musl@4.53.5':
348
+
resolution: {integrity: sha512-juaC4bEgJsyFVfqhtGLz8mbopaWD+WeSOYr5E16y+1of6KQjc0BpwZLuxkClqY1i8sco+MdyoXPNiCkQou09+g==}
399
349
cpu: [arm64]
400
350
os: [linux]
401
351
402
-
'@rollup/rollup-linux-loongarch64-gnu@4.46.2':
403
-
resolution: {integrity: sha512-i7wfGFXu8x4+FRqPymzjD+Hyav8l95UIZ773j7J7zRYc3Xsxy2wIn4x+llpunexXe6laaO72iEjeeGyUFmjKeA==}
352
+
'@rollup/rollup-linux-loong64-gnu@4.53.5':
353
+
resolution: {integrity: sha512-rIEC0hZ17A42iXtHX+EPJVL/CakHo+tT7W0pbzdAGuWOt2jxDFh7A/lRhsNHBcqL4T36+UiAgwO8pbmn3dE8wA==}
404
354
cpu: [loong64]
405
355
os: [linux]
406
356
407
-
'@rollup/rollup-linux-ppc64-gnu@4.46.2':
408
-
resolution: {integrity: sha512-B/l0dFcHVUnqcGZWKcWBSV2PF01YUt0Rvlurci5P+neqY/yMKchGU8ullZvIv5e8Y1C6wOn+U03mrDylP5q9Yw==}
357
+
'@rollup/rollup-linux-ppc64-gnu@4.53.5':
358
+
resolution: {integrity: sha512-T7l409NhUE552RcAOcmJHj3xyZ2h7vMWzcwQI0hvn5tqHh3oSoclf9WgTl+0QqffWFG8MEVZZP1/OBglKZx52Q==}
409
359
cpu: [ppc64]
410
360
os: [linux]
411
361
412
-
'@rollup/rollup-linux-riscv64-gnu@4.46.2':
413
-
resolution: {integrity: sha512-32k4ENb5ygtkMwPMucAb8MtV8olkPT03oiTxJbgkJa7lJ7dZMr0GCFJlyvy+K8iq7F/iuOr41ZdUHaOiqyR3iQ==}
362
+
'@rollup/rollup-linux-riscv64-gnu@4.53.5':
363
+
resolution: {integrity: sha512-7OK5/GhxbnrMcxIFoYfhV/TkknarkYC1hqUw1wU2xUN3TVRLNT5FmBv4KkheSG2xZ6IEbRAhTooTV2+R5Tk0lQ==}
414
364
cpu: [riscv64]
415
365
os: [linux]
416
366
417
-
'@rollup/rollup-linux-riscv64-musl@4.46.2':
418
-
resolution: {integrity: sha512-t5B2loThlFEauloaQkZg9gxV05BYeITLvLkWOkRXogP4qHXLkWSbSHKM9S6H1schf/0YGP/qNKtiISlxvfmmZw==}
367
+
'@rollup/rollup-linux-riscv64-musl@4.53.5':
368
+
resolution: {integrity: sha512-GwuDBE/PsXaTa76lO5eLJTyr2k8QkPipAyOrs4V/KJufHCZBJ495VCGJol35grx9xryk4V+2zd3Ri+3v7NPh+w==}
419
369
cpu: [riscv64]
420
370
os: [linux]
421
371
422
-
'@rollup/rollup-linux-s390x-gnu@4.46.2':
423
-
resolution: {integrity: sha512-YKjekwTEKgbB7n17gmODSmJVUIvj8CX7q5442/CK80L8nqOUbMtf8b01QkG3jOqyr1rotrAnW6B/qiHwfcuWQA==}
372
+
'@rollup/rollup-linux-s390x-gnu@4.53.5':
373
+
resolution: {integrity: sha512-IAE1Ziyr1qNfnmiQLHBURAD+eh/zH1pIeJjeShleII7Vj8kyEm2PF77o+lf3WTHDpNJcu4IXJxNO0Zluro8bOw==}
424
374
cpu: [s390x]
425
375
os: [linux]
426
376
427
-
'@rollup/rollup-linux-x64-gnu@4.46.2':
428
-
resolution: {integrity: sha512-Jj5a9RUoe5ra+MEyERkDKLwTXVu6s3aACP51nkfnK9wJTraCC8IMe3snOfALkrjTYd2G1ViE1hICj0fZ7ALBPA==}
377
+
'@rollup/rollup-linux-x64-gnu@4.53.5':
378
+
resolution: {integrity: sha512-Pg6E+oP7GvZ4XwgRJBuSXZjcqpIW3yCBhK4BcsANvb47qMvAbCjR6E+1a/U2WXz1JJxp9/4Dno3/iSJLcm5auw==}
429
379
cpu: [x64]
430
380
os: [linux]
431
381
432
-
'@rollup/rollup-linux-x64-musl@4.46.2':
433
-
resolution: {integrity: sha512-7kX69DIrBeD7yNp4A5b81izs8BqoZkCIaxQaOpumcJ1S/kmqNFjPhDu1LHeVXv0SexfHQv5cqHsxLOjETuqDuA==}
382
+
'@rollup/rollup-linux-x64-musl@4.53.5':
383
+
resolution: {integrity: sha512-txGtluxDKTxaMDzUduGP0wdfng24y1rygUMnmlUJ88fzCCULCLn7oE5kb2+tRB+MWq1QDZT6ObT5RrR8HFRKqg==}
434
384
cpu: [x64]
435
385
os: [linux]
436
386
437
-
'@rollup/rollup-win32-arm64-msvc@4.46.2':
438
-
resolution: {integrity: sha512-wiJWMIpeaak/jsbaq2HMh/rzZxHVW1rU6coyeNNpMwk5isiPjSTx0a4YLSlYDwBH/WBvLz+EtsNqQScZTLJy3g==}
387
+
'@rollup/rollup-openharmony-arm64@4.53.5':
388
+
resolution: {integrity: sha512-3DFiLPnTxiOQV993fMc+KO8zXHTcIjgaInrqlG8zDp1TlhYl6WgrOHuJkJQ6M8zHEcntSJsUp1XFZSY8C1DYbg==}
389
+
cpu: [arm64]
390
+
os: [openharmony]
391
+
392
+
'@rollup/rollup-win32-arm64-msvc@4.53.5':
393
+
resolution: {integrity: sha512-nggc/wPpNTgjGg75hu+Q/3i32R00Lq1B6N1DO7MCU340MRKL3WZJMjA9U4K4gzy3dkZPXm9E1Nc81FItBVGRlA==}
439
394
cpu: [arm64]
440
395
os: [win32]
441
396
442
-
'@rollup/rollup-win32-ia32-msvc@4.46.2':
443
-
resolution: {integrity: sha512-gBgaUDESVzMgWZhcyjfs9QFK16D8K6QZpwAaVNJxYDLHWayOta4ZMjGm/vsAEy3hvlS2GosVFlBlP9/Wb85DqQ==}
397
+
'@rollup/rollup-win32-ia32-msvc@4.53.5':
398
+
resolution: {integrity: sha512-U/54pTbdQpPLBdEzCT6NBCFAfSZMvmjr0twhnD9f4EIvlm9wy3jjQ38yQj1AGznrNO65EWQMgm/QUjuIVrYF9w==}
444
399
cpu: [ia32]
445
400
os: [win32]
446
401
447
-
'@rollup/rollup-win32-x64-msvc@4.46.2':
448
-
resolution: {integrity: sha512-CvUo2ixeIQGtF6WvuB87XWqPQkoFAFqW+HUo/WzHwuHDvIwZCtjdWXoYCcr06iKGydiqTclC4jU/TNObC/xKZg==}
402
+
'@rollup/rollup-win32-x64-gnu@4.53.5':
403
+
resolution: {integrity: sha512-2NqKgZSuLH9SXBBV2dWNRCZmocgSOx8OJSdpRaEcRlIfX8YrKxUT6z0F1NpvDVhOsl190UFTRh2F2WDWWCYp3A==}
404
+
cpu: [x64]
405
+
os: [win32]
406
+
407
+
'@rollup/rollup-win32-x64-msvc@4.53.5':
408
+
resolution: {integrity: sha512-JRpZUhCfhZ4keB5v0fe02gQJy05GqboPOaxvjugW04RLSYYoB/9t2lx2u/tMs/Na/1NXfY8QYjgRljRpN+MjTQ==}
449
409
cpu: [x64]
450
410
os: [win32]
451
411
···
473
433
'@shikijs/vscode-textmate@10.0.2':
474
434
resolution: {integrity: sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==}
475
435
476
-
'@types/d3-array@3.2.1':
477
-
resolution: {integrity: sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==}
478
-
479
-
'@types/d3-axis@3.0.6':
480
-
resolution: {integrity: sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==}
481
-
482
-
'@types/d3-brush@3.0.6':
483
-
resolution: {integrity: sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==}
484
-
485
-
'@types/d3-chord@3.0.6':
486
-
resolution: {integrity: sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==}
487
-
488
-
'@types/d3-color@3.1.3':
489
-
resolution: {integrity: sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==}
490
-
491
-
'@types/d3-contour@3.0.6':
492
-
resolution: {integrity: sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==}
493
-
494
-
'@types/d3-delaunay@6.0.4':
495
-
resolution: {integrity: sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==}
496
-
497
-
'@types/d3-dispatch@3.0.7':
498
-
resolution: {integrity: sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA==}
499
-
500
-
'@types/d3-drag@3.0.7':
501
-
resolution: {integrity: sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==}
502
-
503
-
'@types/d3-dsv@3.0.7':
504
-
resolution: {integrity: sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==}
505
-
506
-
'@types/d3-ease@3.0.2':
507
-
resolution: {integrity: sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==}
508
-
509
-
'@types/d3-fetch@3.0.7':
510
-
resolution: {integrity: sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==}
511
-
512
-
'@types/d3-force@3.0.10':
513
-
resolution: {integrity: sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==}
514
-
515
-
'@types/d3-format@3.0.4':
516
-
resolution: {integrity: sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==}
517
-
518
-
'@types/d3-geo@3.1.0':
519
-
resolution: {integrity: sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==}
520
-
521
-
'@types/d3-hierarchy@3.1.7':
522
-
resolution: {integrity: sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==}
523
-
524
-
'@types/d3-interpolate@3.0.4':
525
-
resolution: {integrity: sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==}
526
-
527
-
'@types/d3-path@3.1.1':
528
-
resolution: {integrity: sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==}
529
-
530
-
'@types/d3-polygon@3.0.2':
531
-
resolution: {integrity: sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==}
532
-
533
-
'@types/d3-quadtree@3.0.6':
534
-
resolution: {integrity: sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==}
535
-
536
-
'@types/d3-random@3.0.3':
537
-
resolution: {integrity: sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==}
538
-
539
-
'@types/d3-scale-chromatic@3.1.0':
540
-
resolution: {integrity: sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==}
541
-
542
-
'@types/d3-scale@4.0.9':
543
-
resolution: {integrity: sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==}
544
-
545
-
'@types/d3-selection@3.0.11':
546
-
resolution: {integrity: sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==}
547
-
548
-
'@types/d3-shape@3.1.7':
549
-
resolution: {integrity: sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==}
550
-
551
-
'@types/d3-time-format@4.0.3':
552
-
resolution: {integrity: sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==}
553
-
554
-
'@types/d3-time@3.0.4':
555
-
resolution: {integrity: sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==}
556
-
557
-
'@types/d3-timer@3.0.2':
558
-
resolution: {integrity: sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==}
559
-
560
-
'@types/d3-transition@3.0.9':
561
-
resolution: {integrity: sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==}
562
-
563
-
'@types/d3-zoom@3.0.8':
564
-
resolution: {integrity: sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==}
565
-
566
-
'@types/d3@7.4.3':
567
-
resolution: {integrity: sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==}
568
-
569
436
'@types/estree@1.0.8':
570
437
resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==}
571
-
572
-
'@types/geojson@7946.0.16':
573
-
resolution: {integrity: sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==}
574
438
575
439
'@types/hast@3.0.4':
576
440
resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==}
···
587
451
'@types/mdurl@2.0.0':
588
452
resolution: {integrity: sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==}
589
453
590
-
'@types/trusted-types@2.0.7':
591
-
resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==}
592
-
593
454
'@types/unist@3.0.3':
594
455
resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==}
595
456
···
606
467
vite: ^5.0.0 || ^6.0.0
607
468
vue: ^3.2.25
608
469
609
-
'@vue/compiler-core@3.5.21':
610
-
resolution: {integrity: sha512-8i+LZ0vf6ZgII5Z9XmUvrCyEzocvWT+TeR2VBUVlzIH6Tyv57E20mPZ1bCS+tbejgUgmjrEh7q/0F0bibskAmw==}
470
+
'@vue/compiler-core@3.5.26':
471
+
resolution: {integrity: sha512-vXyI5GMfuoBCnv5ucIT7jhHKl55Y477yxP6fc4eUswjP8FG3FFVFd41eNDArR+Uk3QKn2Z85NavjaxLxOC19/w==}
611
472
612
-
'@vue/compiler-dom@3.5.21':
613
-
resolution: {integrity: sha512-jNtbu/u97wiyEBJlJ9kmdw7tAr5Vy0Aj5CgQmo+6pxWNQhXZDPsRr1UWPN4v3Zf82s2H3kF51IbzZ4jMWAgPlQ==}
473
+
'@vue/compiler-dom@3.5.26':
474
+
resolution: {integrity: sha512-y1Tcd3eXs834QjswshSilCBnKGeQjQXB6PqFn/1nxcQw4pmG42G8lwz+FZPAZAby6gZeHSt/8LMPfZ4Rb+Bd/A==}
614
475
615
-
'@vue/compiler-sfc@3.5.21':
616
-
resolution: {integrity: sha512-SXlyk6I5eUGBd2v8Ie7tF6ADHE9kCR6mBEuPyH1nUZ0h6Xx6nZI29i12sJKQmzbDyr2tUHMhhTt51Z6blbkTTQ==}
476
+
'@vue/compiler-sfc@3.5.26':
477
+
resolution: {integrity: sha512-egp69qDTSEZcf4bGOSsprUr4xI73wfrY5oRs6GSgXFTiHrWj4Y3X5Ydtip9QMqiCMCPVwLglB9GBxXtTadJ3mA==}
617
478
618
-
'@vue/compiler-ssr@3.5.21':
619
-
resolution: {integrity: sha512-vKQ5olH5edFZdf5ZrlEgSO1j1DMA4u23TVK5XR1uMhvwnYvVdDF0nHXJUblL/GvzlShQbjhZZ2uvYmDlAbgo9w==}
479
+
'@vue/compiler-ssr@3.5.26':
480
+
resolution: {integrity: sha512-lZT9/Y0nSIRUPVvapFJEVDbEXruZh2IYHMk2zTtEgJSlP5gVOqeWXH54xDKAaFS4rTnDeDBQUYDtxKyoW9FwDw==}
620
481
621
-
'@vue/devtools-api@7.7.7':
622
-
resolution: {integrity: sha512-lwOnNBH2e7x1fIIbVT7yF5D+YWhqELm55/4ZKf45R9T8r9dE2AIOy8HKjfqzGsoTHFbWbr337O4E0A0QADnjBg==}
482
+
'@vue/devtools-api@7.7.9':
483
+
resolution: {integrity: sha512-kIE8wvwlcZ6TJTbNeU2HQNtaxLx3a84aotTITUuL/4bzfPxzajGBOoqjMhwZJ8L9qFYDU/lAYMEEm11dnZOD6g==}
623
484
624
-
'@vue/devtools-kit@7.7.7':
625
-
resolution: {integrity: sha512-wgoZtxcTta65cnZ1Q6MbAfePVFxfM+gq0saaeytoph7nEa7yMXoi6sCPy4ufO111B9msnw0VOWjPEFCXuAKRHA==}
485
+
'@vue/devtools-kit@7.7.9':
486
+
resolution: {integrity: sha512-PyQ6odHSgiDVd4hnTP+aDk2X4gl2HmLDfiyEnn3/oV+ckFDuswRs4IbBT7vacMuGdwY/XemxBoh302ctbsptuA==}
626
487
627
-
'@vue/devtools-shared@7.7.7':
628
-
resolution: {integrity: sha512-+udSj47aRl5aKb0memBvcUG9koarqnxNM5yjuREvqwK6T3ap4mn3Zqqc17QrBFTqSMjr3HK1cvStEZpMDpfdyw==}
488
+
'@vue/devtools-shared@7.7.9':
489
+
resolution: {integrity: sha512-iWAb0v2WYf0QWmxCGy0seZNDPdO3Sp5+u78ORnyeonS6MT4PC7VPrryX2BpMJrwlDeaZ6BD4vP4XKjK0SZqaeA==}
629
490
630
-
'@vue/reactivity@3.5.21':
631
-
resolution: {integrity: sha512-3ah7sa+Cwr9iiYEERt9JfZKPw4A2UlbY8RbbnH2mGCE8NwHkhmlZt2VsH0oDA3P08X3jJd29ohBDtX+TbD9AsA==}
491
+
'@vue/reactivity@3.5.26':
492
+
resolution: {integrity: sha512-9EnYB1/DIiUYYnzlnUBgwU32NNvLp/nhxLXeWRhHUEeWNTn1ECxX8aGO7RTXeX6PPcxe3LLuNBFoJbV4QZ+CFQ==}
632
493
633
-
'@vue/runtime-core@3.5.21':
634
-
resolution: {integrity: sha512-+DplQlRS4MXfIf9gfD1BOJpk5RSyGgGXD/R+cumhe8jdjUcq/qlxDawQlSI8hCKupBlvM+3eS1se5xW+SuNAwA==}
494
+
'@vue/runtime-core@3.5.26':
495
+
resolution: {integrity: sha512-xJWM9KH1kd201w5DvMDOwDHYhrdPTrAatn56oB/LRG4plEQeZRQLw0Bpwih9KYoqmzaxF0OKSn6swzYi84e1/Q==}
635
496
636
-
'@vue/runtime-dom@3.5.21':
637
-
resolution: {integrity: sha512-3M2DZsOFwM5qI15wrMmNF5RJe1+ARijt2HM3TbzBbPSuBHOQpoidE+Pa+XEaVN+czbHf81ETRoG1ltztP2em8w==}
497
+
'@vue/runtime-dom@3.5.26':
498
+
resolution: {integrity: sha512-XLLd/+4sPC2ZkN/6+V4O4gjJu6kSDbHAChvsyWgm1oGbdSO3efvGYnm25yCjtFm/K7rrSDvSfPDgN1pHgS4VNQ==}
638
499
639
-
'@vue/server-renderer@3.5.21':
640
-
resolution: {integrity: sha512-qr8AqgD3DJPJcGvLcJKQo2tAc8OnXRcfxhOJCPF+fcfn5bBGz7VCcO7t+qETOPxpWK1mgysXvVT/j+xWaHeMWA==}
500
+
'@vue/server-renderer@3.5.26':
501
+
resolution: {integrity: sha512-TYKLXmrwWKSodyVuO1WAubucd+1XlLg4set0YoV+Hu8Lo79mp/YMwWV5mC5FgtsDxX3qo1ONrxFaTP1OQgy1uA==}
641
502
peerDependencies:
642
-
vue: 3.5.21
503
+
vue: 3.5.26
643
504
644
-
'@vue/shared@3.5.18':
645
-
resolution: {integrity: sha512-cZy8Dq+uuIXbxCZpuLd2GJdeSO/lIzIspC2WtkqIpje5QyFbvLaI5wZtdUjLHjGZrlVX6GilejatWwVYYRc8tA==}
505
+
'@vue/shared@3.5.25':
506
+
resolution: {integrity: sha512-AbOPdQQnAnzs58H2FrrDxYj/TJfmeS2jdfEEhgiKINy+bnOANmVizIEgq1r+C5zsbs6l1CCQxtcj71rwNQ4jWg==}
646
507
647
-
'@vue/shared@3.5.21':
648
-
resolution: {integrity: sha512-+2k1EQpnYuVuu3N7atWyG3/xoFWIVJZq4Mz8XNOdScFI0etES75fbny/oU4lKWk/577P1zmg0ioYvpGEDZ3DLw==}
508
+
'@vue/shared@3.5.26':
509
+
resolution: {integrity: sha512-7Z6/y3uFI5PRoKeorTOSXKcDj0MSasfNNltcslbFrPpcw6aXRUALq4IfJlaTRspiWIUOEZbrpM+iQGmCOiWe4A==}
649
510
650
511
'@vueuse/core@12.8.2':
651
512
resolution: {integrity: sha512-HbvCmZdzAu3VGi/pWYm5Ut+Kd9mn1ZHnn4L5G8kOQTPs/IwIAmJoBrmYk2ckLArgMXZj0AW3n5CAejLUO+PhdQ==}
···
702
563
engines: {node: '>=0.4.0'}
703
564
hasBin: true
704
565
705
-
algoliasearch@5.35.0:
706
-
resolution: {integrity: sha512-Y+moNhsqgLmvJdgTsO4GZNgsaDWv8AOGAaPeIeHKlDn/XunoAqYbA+XNpBd1dW8GOXAUDyxC9Rxc7AV4kpFcIg==}
566
+
algoliasearch@5.46.0:
567
+
resolution: {integrity: sha512-7ML6fa2K93FIfifG3GMWhDEwT5qQzPTmoHKCTvhzGEwdbQ4n0yYUWZlLYT75WllTGJCJtNUI0C1ybN4BCegqvg==}
707
568
engines: {node: '>= 14.0.0'}
708
569
709
-
argparse@2.0.1:
710
-
resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
711
-
712
-
birpc@2.5.0:
713
-
resolution: {integrity: sha512-VSWO/W6nNQdyP520F1mhf+Lc2f8pjGQOtoHHm7Ze8Go1kX7akpVIrtTa0fn+HB0QJEDVacl6aO08YE0PgXfdnQ==}
570
+
birpc@2.9.0:
571
+
resolution: {integrity: sha512-KrayHS5pBi69Xi9JmvoqrIgYGDkD6mcSe/i6YKi3w5kekCLzrX4+nawcXqrj2tIp50Kw/mT/s3p+GVK0A0sKxw==}
714
572
715
573
ccount@2.0.1:
716
574
resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==}
···
721
579
character-entities-legacy@3.0.0:
722
580
resolution: {integrity: sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==}
723
581
724
-
chevrotain-allstar@0.3.1:
725
-
resolution: {integrity: sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==}
726
-
peerDependencies:
727
-
chevrotain: ^11.0.0
728
-
729
-
chevrotain@11.0.3:
730
-
resolution: {integrity: sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==}
731
-
732
582
comma-separated-tokens@2.0.3:
733
583
resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==}
734
584
735
-
commander@7.2.0:
736
-
resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==}
737
-
engines: {node: '>= 10'}
738
-
739
-
commander@8.3.0:
740
-
resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==}
741
-
engines: {node: '>= 12'}
742
-
743
585
confbox@0.1.8:
744
586
resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==}
745
587
746
-
confbox@0.2.2:
747
-
resolution: {integrity: sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==}
588
+
copy-anything@4.0.5:
589
+
resolution: {integrity: sha512-7Vv6asjS4gMOuILabD3l739tsaxFQmC+a7pLZm02zyvs8p977bL3zEgq3yDk5rn9B0PbYgIv++jmHcuUab4RhA==}
590
+
engines: {node: '>=18'}
748
591
749
-
copy-anything@3.0.5:
750
-
resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==}
751
-
engines: {node: '>=12.13'}
752
-
753
-
cose-base@1.0.3:
754
-
resolution: {integrity: sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==}
755
-
756
-
cose-base@2.2.0:
757
-
resolution: {integrity: sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==}
758
-
759
-
csstype@3.1.3:
760
-
resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==}
761
-
762
-
cytoscape-cose-bilkent@4.1.0:
763
-
resolution: {integrity: sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==}
764
-
peerDependencies:
765
-
cytoscape: ^3.2.0
766
-
767
-
cytoscape-fcose@2.2.0:
768
-
resolution: {integrity: sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==}
769
-
peerDependencies:
770
-
cytoscape: ^3.2.0
771
-
772
-
cytoscape@3.33.1:
773
-
resolution: {integrity: sha512-iJc4TwyANnOGR1OmWhsS9ayRS3s+XQ185FmuHObThD+5AeJCakAAbWv8KimMTt08xCCLNgneQwFp+JRJOr9qGQ==}
774
-
engines: {node: '>=0.10'}
775
-
776
-
d3-array@2.12.1:
777
-
resolution: {integrity: sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==}
778
-
779
-
d3-array@3.2.4:
780
-
resolution: {integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==}
781
-
engines: {node: '>=12'}
782
-
783
-
d3-axis@3.0.0:
784
-
resolution: {integrity: sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==}
785
-
engines: {node: '>=12'}
786
-
787
-
d3-brush@3.0.0:
788
-
resolution: {integrity: sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==}
789
-
engines: {node: '>=12'}
790
-
791
-
d3-chord@3.0.1:
792
-
resolution: {integrity: sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==}
793
-
engines: {node: '>=12'}
794
-
795
-
d3-color@3.1.0:
796
-
resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==}
797
-
engines: {node: '>=12'}
798
-
799
-
d3-contour@4.0.2:
800
-
resolution: {integrity: sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==}
801
-
engines: {node: '>=12'}
802
-
803
-
d3-delaunay@6.0.4:
804
-
resolution: {integrity: sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==}
805
-
engines: {node: '>=12'}
806
-
807
-
d3-dispatch@3.0.1:
808
-
resolution: {integrity: sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==}
809
-
engines: {node: '>=12'}
810
-
811
-
d3-drag@3.0.0:
812
-
resolution: {integrity: sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==}
813
-
engines: {node: '>=12'}
814
-
815
-
d3-dsv@3.0.1:
816
-
resolution: {integrity: sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==}
817
-
engines: {node: '>=12'}
818
-
hasBin: true
819
-
820
-
d3-ease@3.0.1:
821
-
resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==}
822
-
engines: {node: '>=12'}
823
-
824
-
d3-fetch@3.0.1:
825
-
resolution: {integrity: sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==}
826
-
engines: {node: '>=12'}
827
-
828
-
d3-force@3.0.0:
829
-
resolution: {integrity: sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==}
830
-
engines: {node: '>=12'}
831
-
832
-
d3-format@3.1.0:
833
-
resolution: {integrity: sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==}
834
-
engines: {node: '>=12'}
835
-
836
-
d3-geo@3.1.1:
837
-
resolution: {integrity: sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==}
838
-
engines: {node: '>=12'}
839
-
840
-
d3-hierarchy@3.1.2:
841
-
resolution: {integrity: sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==}
842
-
engines: {node: '>=12'}
843
-
844
-
d3-interpolate@3.0.1:
845
-
resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==}
846
-
engines: {node: '>=12'}
847
-
848
-
d3-path@1.0.9:
849
-
resolution: {integrity: sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==}
850
-
851
-
d3-path@3.1.0:
852
-
resolution: {integrity: sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==}
853
-
engines: {node: '>=12'}
854
-
855
-
d3-polygon@3.0.1:
856
-
resolution: {integrity: sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==}
857
-
engines: {node: '>=12'}
858
-
859
-
d3-quadtree@3.0.1:
860
-
resolution: {integrity: sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==}
861
-
engines: {node: '>=12'}
862
-
863
-
d3-random@3.0.1:
864
-
resolution: {integrity: sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==}
865
-
engines: {node: '>=12'}
866
-
867
-
d3-sankey@0.12.3:
868
-
resolution: {integrity: sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==}
869
-
870
-
d3-scale-chromatic@3.1.0:
871
-
resolution: {integrity: sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==}
872
-
engines: {node: '>=12'}
873
-
874
-
d3-scale@4.0.2:
875
-
resolution: {integrity: sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==}
876
-
engines: {node: '>=12'}
877
-
878
-
d3-selection@3.0.0:
879
-
resolution: {integrity: sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==}
880
-
engines: {node: '>=12'}
881
-
882
-
d3-shape@1.3.7:
883
-
resolution: {integrity: sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==}
884
-
885
-
d3-shape@3.2.0:
886
-
resolution: {integrity: sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==}
887
-
engines: {node: '>=12'}
888
-
889
-
d3-time-format@4.1.0:
890
-
resolution: {integrity: sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==}
891
-
engines: {node: '>=12'}
892
-
893
-
d3-time@3.1.0:
894
-
resolution: {integrity: sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==}
895
-
engines: {node: '>=12'}
896
-
897
-
d3-timer@3.0.1:
898
-
resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==}
899
-
engines: {node: '>=12'}
900
-
901
-
d3-transition@3.0.1:
902
-
resolution: {integrity: sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==}
903
-
engines: {node: '>=12'}
904
-
peerDependencies:
905
-
d3-selection: 2 - 3
906
-
907
-
d3-zoom@3.0.0:
908
-
resolution: {integrity: sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==}
909
-
engines: {node: '>=12'}
910
-
911
-
d3@7.9.0:
912
-
resolution: {integrity: sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==}
913
-
engines: {node: '>=12'}
914
-
915
-
dagre-d3-es@7.0.11:
916
-
resolution: {integrity: sha512-tvlJLyQf834SylNKax8Wkzco/1ias1OPw8DcUMDE7oUIoSEW25riQVuiu/0OWEFqT0cxHT3Pa9/D82Jr47IONw==}
917
-
918
-
dayjs@1.11.18:
919
-
resolution: {integrity: sha512-zFBQ7WFRvVRhKcWoUh+ZA1g2HVgUbsZm9sbddh8EC5iv93sui8DVVz1Npvz+r6meo9VKfa8NyLWBsQK1VvIKPA==}
920
-
921
-
debug@4.4.1:
922
-
resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==}
923
-
engines: {node: '>=6.0'}
924
-
peerDependencies:
925
-
supports-color: '*'
926
-
peerDependenciesMeta:
927
-
supports-color:
928
-
optional: true
929
-
930
-
delaunator@5.0.1:
931
-
resolution: {integrity: sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==}
592
+
csstype@3.2.3:
593
+
resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==}
932
594
933
595
dequal@2.0.3:
934
596
resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==}
···
936
598
937
599
devlop@1.1.0:
938
600
resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==}
939
-
940
-
dompurify@3.2.6:
941
-
resolution: {integrity: sha512-/2GogDQlohXPZe6D6NOgQvXLPSYBqIWMnZ8zzOhn09REE4eyAzb+Hed3jhoM9OkuaJ8P6ZGTTVWQKAi8ieIzfQ==}
942
601
943
602
emoji-regex-xs@1.0.0:
944
603
resolution: {integrity: sha512-LRlerrMYoIDrT6jgpeZ2YYl/L8EulRTt5hQcYjy5AInh7HWXKimpqx68aknBFpGL2+/IcogTcaydJEgaTmOpDg==}
945
604
946
-
entities@4.5.0:
947
-
resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==}
605
+
entities@7.0.0:
606
+
resolution: {integrity: sha512-FDWG5cmEYf2Z00IkYRhbFrwIwvdFKH07uV8dvNy0omp/Qb1xcyCWp2UDtcwJF4QZZvk0sLudP6/hAu42TaqVhQ==}
948
607
engines: {node: '>=0.12'}
949
608
950
609
esbuild@0.21.5:
···
955
614
estree-walker@2.0.2:
956
615
resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==}
957
616
958
-
exsolve@1.0.7:
959
-
resolution: {integrity: sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw==}
960
-
961
-
focus-trap@7.6.5:
962
-
resolution: {integrity: sha512-7Ke1jyybbbPZyZXFxEftUtxFGLMpE2n6A+z//m4CRDlj0hW+o3iYSmh8nFlYMurOiJVDmJRilUQtJr08KfIxlg==}
617
+
focus-trap@7.6.6:
618
+
resolution: {integrity: sha512-v/Z8bvMCajtx4mEXmOo7QEsIzlIOqRXTIwgUfsFOF9gEsespdbD0AkPIka1bSXZ8Y8oZ+2IVDQZePkTfEHZl7Q==}
963
619
964
620
fsevents@2.3.3:
965
621
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
966
622
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
967
623
os: [darwin]
968
624
969
-
giscus@1.6.0:
970
-
resolution: {integrity: sha512-Zrsi8r4t1LVW950keaWcsURuZUQwUaMKjvJgTCY125vkW6OiEBkatE7ScJDbpqKHdZwb///7FVC21SE3iFK3PQ==}
971
-
972
-
globals@15.15.0:
973
-
resolution: {integrity: sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg==}
974
-
engines: {node: '>=18'}
975
-
976
-
hachure-fill@0.5.2:
977
-
resolution: {integrity: sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==}
978
-
979
625
hast-util-to-html@9.0.5:
980
626
resolution: {integrity: sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==}
981
627
···
988
634
html-void-elements@3.0.0:
989
635
resolution: {integrity: sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==}
990
636
991
-
iconv-lite@0.6.3:
992
-
resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==}
993
-
engines: {node: '>=0.10.0'}
994
-
995
-
internmap@1.0.1:
996
-
resolution: {integrity: sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==}
997
-
998
-
internmap@2.0.3:
999
-
resolution: {integrity: sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==}
1000
-
engines: {node: '>=12'}
1001
-
1002
-
is-what@4.1.16:
1003
-
resolution: {integrity: sha512-ZhMwEosbFJkA0YhFnNDgTM4ZxDRsS6HqTo7qsZM08fehyRYIYa0yHu5R6mgo1n/8MgaPBXiPimPD77baVFYg+A==}
1004
-
engines: {node: '>=12.13'}
1005
-
1006
-
katex@0.16.22:
1007
-
resolution: {integrity: sha512-XCHRdUw4lf3SKBaJe4EvgqIuWwkPSo9XoeO8GjQW94Bp7TWv9hNhzZjZ+OH9yf1UmLygb7DIT5GSFQiyt16zYg==}
1008
-
hasBin: true
637
+
is-what@5.5.0:
638
+
resolution: {integrity: sha512-oG7cgbmg5kLYae2N5IVd3jm2s+vldjxJzK1pcu9LfpGuQ93MQSzo0okvRna+7y5ifrD+20FE8FvjusyGaz14fw==}
639
+
engines: {node: '>=18'}
1009
640
1010
-
khroma@2.1.0:
1011
-
resolution: {integrity: sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==}
1012
-
1013
-
kolorist@1.8.0:
1014
-
resolution: {integrity: sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ==}
1015
-
1016
-
langium@3.3.1:
1017
-
resolution: {integrity: sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==}
1018
-
engines: {node: '>=16.0.0'}
1019
-
1020
-
layout-base@1.0.2:
1021
-
resolution: {integrity: sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==}
1022
-
1023
-
layout-base@2.0.1:
1024
-
resolution: {integrity: sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==}
1025
-
1026
-
linkify-it@5.0.0:
1027
-
resolution: {integrity: sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==}
1028
-
1029
-
lit-element@4.2.1:
1030
-
resolution: {integrity: sha512-WGAWRGzirAgyphK2urmYOV72tlvnxw7YfyLDgQ+OZnM9vQQBQnumQ7jUJe6unEzwGU3ahFOjuz1iz1jjrpCPuw==}
1031
-
1032
-
lit-html@3.3.1:
1033
-
resolution: {integrity: sha512-S9hbyDu/vs1qNrithiNyeyv64c9yqiW9l+DBgI18fL+MTvOtWoFR0FWiyq1TxaYef5wNlpEmzlXoBlZEO+WjoA==}
1034
-
1035
-
lit@3.3.1:
1036
-
resolution: {integrity: sha512-Ksr/8L3PTapbdXJCk+EJVB78jDodUMaP54gD24W186zGRARvwrsPfS60wae/SSCTCNZVPd1chXqio1qHQmu4NA==}
1037
-
1038
-
local-pkg@1.1.2:
1039
-
resolution: {integrity: sha512-arhlxbFRmoQHl33a0Zkle/YWlmNwoyt6QNZEIJcqNbdrsix5Lvc4HyyI3EnwxTYlZYc32EbYrQ8SzEZ7dqgg9A==}
1040
-
engines: {node: '>=14'}
1041
-
1042
-
lodash-es@4.17.21:
1043
-
resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==}
1044
-
1045
-
magic-string@0.30.18:
1046
-
resolution: {integrity: sha512-yi8swmWbO17qHhwIBNeeZxTceJMeBvWJaId6dyvTSOwTipqeHhMhOrz6513r1sOKnpvQ7zkhlG8tPrpilwTxHQ==}
641
+
magic-string@0.30.21:
642
+
resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==}
1047
643
1048
644
mark.js@8.11.1:
1049
645
resolution: {integrity: sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==}
···
1051
647
markdown-it-footnote@4.0.0:
1052
648
resolution: {integrity: sha512-WYJ7urf+khJYl3DqofQpYfEYkZKbmXmwxQV8c8mO/hGIhgZ1wOe7R4HLFNwqx7TjILbnC98fuyeSsin19JdFcQ==}
1053
649
1054
-
markdown-it@14.1.0:
1055
-
resolution: {integrity: sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==}
1056
-
hasBin: true
1057
-
1058
-
marked@15.0.12:
1059
-
resolution: {integrity: sha512-8dD6FusOQSrpv9Z1rdNMdlSgQOIP880DHqnohobOmYLElGEqAL/JvxvuxZO16r4HtjTlfPRDC1hbvxC9dPN2nA==}
1060
-
engines: {node: '>= 18'}
1061
-
hasBin: true
1062
-
1063
-
mdast-util-to-hast@13.2.0:
1064
-
resolution: {integrity: sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==}
1065
-
1066
-
mdurl@2.0.0:
1067
-
resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==}
1068
-
1069
-
mermaid@11.11.0:
1070
-
resolution: {integrity: sha512-9lb/VNkZqWTRjVgCV+l1N+t4kyi94y+l5xrmBmbbxZYkfRl5hEDaTPMOcaWKCl1McG8nBEaMlWwkcAEEgjhBgg==}
650
+
mdast-util-to-hast@13.2.1:
651
+
resolution: {integrity: sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==}
1071
652
1072
653
micromark-util-character@2.1.1:
1073
654
resolution: {integrity: sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==}
···
1084
665
micromark-util-types@2.0.2:
1085
666
resolution: {integrity: sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==}
1086
667
1087
-
minisearch@7.1.2:
1088
-
resolution: {integrity: sha512-R1Pd9eF+MD5JYDDSPAp/q1ougKglm14uEkPMvQ/05RGmx6G9wvmLTrTI/Q5iPNJLYqNdsDQ7qTGIcNWR+FrHmA==}
668
+
minisearch@7.2.0:
669
+
resolution: {integrity: sha512-dqT2XBYUOZOiC5t2HRnwADjhNS2cecp9u+TJRiJ1Qp/f5qjkeT5APcGPjHw+bz89Ms8Jp+cG4AlE+QZ/QnDglg==}
1089
670
1090
671
mitt@3.0.1:
1091
672
resolution: {integrity: sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==}
···
1093
674
mlly@1.8.0:
1094
675
resolution: {integrity: sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==}
1095
676
1096
-
ms@2.1.3:
1097
-
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
1098
-
1099
677
nanoid@3.3.11:
1100
678
resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==}
1101
679
engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
1102
680
hasBin: true
1103
681
1104
-
non-layered-tidy-tree-layout@2.0.2:
1105
-
resolution: {integrity: sha512-gkXMxRzUH+PB0ax9dUN0yYF0S25BqeAYqhgMaLUFmpXLEk7Fcu8f4emJuOAY0V8kjDICxROIKsTAKsV/v355xw==}
1106
-
1107
682
oniguruma-to-es@3.1.1:
1108
683
resolution: {integrity: sha512-bUH8SDvPkH3ho3dvwJwfonjlQ4R80vjyvrU8YpxuROddv55vAEJrTuCuCVUhhsHbtlD9tGGbaNApGQckXhS8iQ==}
1109
684
1110
-
package-manager-detector@1.3.0:
1111
-
resolution: {integrity: sha512-ZsEbbZORsyHuO00lY1kV3/t72yp6Ysay6Pd17ZAlNGuGwmWDLCJxFpRs0IzfXfj1o4icJOkUEioexFHzyPurSQ==}
1112
-
1113
-
path-data-parser@0.1.0:
1114
-
resolution: {integrity: sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w==}
685
+
package-manager-detector@1.6.0:
686
+
resolution: {integrity: sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA==}
1115
687
1116
688
pathe@2.0.3:
1117
689
resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==}
···
1124
696
1125
697
pkg-types@1.3.1:
1126
698
resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==}
1127
-
1128
-
pkg-types@2.3.0:
1129
-
resolution: {integrity: sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig==}
1130
-
1131
-
points-on-curve@0.2.0:
1132
-
resolution: {integrity: sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==}
1133
-
1134
-
points-on-path@0.2.1:
1135
-
resolution: {integrity: sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==}
1136
699
1137
700
postcss@8.5.6:
1138
701
resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==}
1139
702
engines: {node: ^10 || ^12 || >=14}
1140
703
1141
-
preact@10.27.0:
1142
-
resolution: {integrity: sha512-/DTYoB6mwwgPytiqQTh/7SFRL98ZdiD8Sk8zIUVOxtwq4oWcwrcd1uno9fE/zZmUaUrFNYzbH14CPebOz9tZQw==}
704
+
preact@10.28.0:
705
+
resolution: {integrity: sha512-rytDAoiXr3+t6OIP3WGlDd0ouCUG1iCWzkcY3++Nreuoi17y6T5i/zRhe6uYfoVcxq6YU+sBtJouuRDsq8vvqA==}
1143
706
1144
707
property-information@7.1.0:
1145
708
resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==}
1146
709
1147
-
punycode.js@2.3.1:
1148
-
resolution: {integrity: sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==}
1149
-
engines: {node: '>=6'}
1150
-
1151
-
quansync@0.2.11:
1152
-
resolution: {integrity: sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA==}
1153
-
1154
710
regex-recursion@6.0.2:
1155
711
resolution: {integrity: sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==}
1156
712
1157
713
regex-utilities@2.3.0:
1158
714
resolution: {integrity: sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng==}
1159
715
1160
-
regex@6.0.1:
1161
-
resolution: {integrity: sha512-uorlqlzAKjKQZ5P+kTJr3eeJGSVroLKoHmquUj4zHWuR+hEyNqlXsSKlYYF5F4NI6nl7tWCs0apKJ0lmfsXAPA==}
716
+
regex@6.1.0:
717
+
resolution: {integrity: sha512-6VwtthbV4o/7+OaAF9I5L5V3llLEsoPyq9P1JVXkedTP33c7MfCG0/5NOPcSJn0TzXcG9YUrR0gQSWioew3LDg==}
1162
718
1163
719
rfdc@1.4.1:
1164
720
resolution: {integrity: sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==}
1165
721
1166
-
robust-predicates@3.0.2:
1167
-
resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==}
1168
-
1169
-
rollup@4.46.2:
1170
-
resolution: {integrity: sha512-WMmLFI+Boh6xbop+OAGo9cQ3OgX9MIg7xOQjn+pTCwOkk+FNDAeAemXkJ3HzDJrVXleLOFVa1ipuc1AmEx1Dwg==}
722
+
rollup@4.53.5:
723
+
resolution: {integrity: sha512-iTNAbFSlRpcHeeWu73ywU/8KuU/LZmNCSxp6fjQkJBD3ivUb8tpDrXhIxEzA05HlYMEwmtaUnb3RP+YNv162OQ==}
1171
724
engines: {node: '>=18.0.0', npm: '>=8.0.0'}
1172
725
hasBin: true
1173
726
1174
-
roughjs@4.6.6:
1175
-
resolution: {integrity: sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ==}
1176
-
1177
-
rw@1.3.3:
1178
-
resolution: {integrity: sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==}
1179
-
1180
-
safer-buffer@2.1.2:
1181
-
resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
1182
-
1183
727
search-insights@2.17.3:
1184
728
resolution: {integrity: sha512-RQPdCYTa8A68uM2jwxoY842xDhvx3E5LFL1LxvxCNMev4o5mLuokczhzjAgGwUZBAmOKZknArSxLKmXtIi2AxQ==}
1185
729
···
1200
744
stringify-entities@4.0.4:
1201
745
resolution: {integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==}
1202
746
1203
-
stylis@4.3.6:
1204
-
resolution: {integrity: sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==}
1205
-
1206
-
superjson@2.2.2:
1207
-
resolution: {integrity: sha512-5JRxVqC8I8NuOUjzBbvVJAKNM8qoVuH0O77h4WInc/qC2q5IreqKxYwgkga3PfA22OayK2ikceb/B26dztPl+Q==}
747
+
superjson@2.2.6:
748
+
resolution: {integrity: sha512-H+ue8Zo4vJmV2nRjpx86P35lzwDT3nItnIsocgumgr0hHMQ+ZGq5vrERg9kJBo5AWGmxZDhzDo+WVIJqkB0cGA==}
1208
749
engines: {node: '>=16'}
1209
750
1210
-
tabbable@6.2.0:
1211
-
resolution: {integrity: sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==}
751
+
tabbable@6.3.0:
752
+
resolution: {integrity: sha512-EIHvdY5bPLuWForiR/AN2Bxngzpuwn1is4asboytXtpTgsArc+WmSJKVLlhdh71u7jFcryDqB2A8lQvj78MkyQ==}
1212
753
1213
-
tinyexec@1.0.1:
1214
-
resolution: {integrity: sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==}
754
+
tinyexec@1.0.2:
755
+
resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==}
756
+
engines: {node: '>=18'}
1215
757
1216
758
trim-lines@3.0.1:
1217
759
resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==}
1218
760
1219
-
ts-dedent@2.2.0:
1220
-
resolution: {integrity: sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==}
1221
-
engines: {node: '>=6.10'}
1222
-
1223
-
uc.micro@2.1.0:
1224
-
resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==}
1225
-
1226
761
ufo@1.6.1:
1227
762
resolution: {integrity: sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==}
1228
763
1229
-
unist-util-is@6.0.0:
1230
-
resolution: {integrity: sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==}
764
+
unist-util-is@6.0.1:
765
+
resolution: {integrity: sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==}
1231
766
1232
767
unist-util-position@5.0.0:
1233
768
resolution: {integrity: sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==}
···
1235
770
unist-util-stringify-position@4.0.0:
1236
771
resolution: {integrity: sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==}
1237
772
1238
-
unist-util-visit-parents@6.0.1:
1239
-
resolution: {integrity: sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==}
773
+
unist-util-visit-parents@6.0.2:
774
+
resolution: {integrity: sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==}
1240
775
1241
776
unist-util-visit@5.0.0:
1242
777
resolution: {integrity: sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==}
1243
778
1244
-
uuid@11.1.0:
1245
-
resolution: {integrity: sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==}
1246
-
hasBin: true
1247
-
1248
779
vfile-message@4.0.3:
1249
780
resolution: {integrity: sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==}
1250
781
1251
782
vfile@6.0.3:
1252
783
resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==}
1253
784
1254
-
vite@5.4.19:
1255
-
resolution: {integrity: sha512-qO3aKv3HoQC8QKiNSTuUM1l9o/XX3+c+VTgLHbJWHZGeTPVAg2XwazI9UWzoxjIJCGCV2zU60uqMzjeLZuULqA==}
785
+
vite@5.4.21:
786
+
resolution: {integrity: sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==}
1256
787
engines: {node: ^18.0.0 || >=20.0.0}
1257
788
hasBin: true
1258
789
peerDependencies:
···
1282
813
terser:
1283
814
optional: true
1284
815
1285
-
vitepress-plugin-comment-with-giscus@1.1.15:
1286
-
resolution: {integrity: sha512-1DJjgN+7SYvn5ZkjuSXPmz7nlqfcrh4qCGGviiZghA2ELXnaO2m9WY7m+RisPSaqCn90xqe0JbO2T4NMq8iUBg==}
1287
-
1288
-
vitepress-plugin-group-icons@1.6.3:
1289
-
resolution: {integrity: sha512-bvPD4lhraLJw3rPtLhUIVsOvNfnHnF+F1LH7BKHekEzeZ4uqdTdqnwEyaT580AoKjjT6/F8En6hVJj7takPKDA==}
816
+
vitepress-plugin-group-icons@1.6.5:
817
+
resolution: {integrity: sha512-+pg4+GKDq2fLqKb1Sat5p1p4SuIZ5tEPxu8HjpwoeecZ/VaXKy6Bdf0wyjedjaTAyZQzXbvyavJegqAcQ+B0VA==}
1290
818
peerDependencies:
1291
-
markdown-it: '>=14'
1292
819
vite: '>=3'
1293
-
1294
-
vitepress-plugin-mermaid@2.0.17:
1295
-
resolution: {integrity: sha512-IUzYpwf61GC6k0XzfmAmNrLvMi9TRrVRMsUyCA8KNXhg/mQ1VqWnO0/tBVPiX5UoKF1mDUwqn5QV4qAJl6JnUg==}
1296
-
peerDependencies:
1297
-
mermaid: 10 || 11
1298
-
vitepress: ^1.0.0 || ^1.0.0-alpha
820
+
peerDependenciesMeta:
821
+
vite:
822
+
optional: true
1299
823
1300
824
vitepress@1.6.4:
1301
825
resolution: {integrity: sha512-+2ym1/+0VVrbhNyRoFFesVvBvHAVMZMK0rw60E3X/5349M1GuVdKeazuksqopEdvkKwKGs21Q729jX81/bkBJg==}
···
1309
833
postcss:
1310
834
optional: true
1311
835
1312
-
vscode-jsonrpc@8.2.0:
1313
-
resolution: {integrity: sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==}
1314
-
engines: {node: '>=14.0.0'}
1315
-
1316
-
vscode-languageserver-protocol@3.17.5:
1317
-
resolution: {integrity: sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==}
1318
-
1319
-
vscode-languageserver-textdocument@1.0.12:
1320
-
resolution: {integrity: sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==}
1321
-
1322
-
vscode-languageserver-types@3.17.5:
1323
-
resolution: {integrity: sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==}
1324
-
1325
-
vscode-languageserver@9.0.1:
1326
-
resolution: {integrity: sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==}
1327
-
hasBin: true
1328
-
1329
-
vscode-uri@3.0.8:
1330
-
resolution: {integrity: sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==}
1331
-
1332
-
vue@3.5.21:
1333
-
resolution: {integrity: sha512-xxf9rum9KtOdwdRkiApWL+9hZEMWE90FHh8yS1+KJAiWYh+iGWV1FquPjoO9VUHQ+VIhsCXNNyZ5Sf4++RVZBA==}
836
+
vue@3.5.26:
837
+
resolution: {integrity: sha512-SJ/NTccVyAoNUJmkM9KUqPcYlY+u8OVL1X5EW9RIs3ch5H2uERxyyIUI4MRxVCSOiEcupX9xNGde1tL9ZKpimA==}
1334
838
peerDependencies:
1335
839
typescript: '*'
1336
840
peerDependenciesMeta:
···
1342
846
1343
847
snapshots:
1344
848
1345
-
'@algolia/abtesting@1.1.0':
849
+
'@algolia/abtesting@1.12.0':
1346
850
dependencies:
1347
-
'@algolia/client-common': 5.35.0
1348
-
'@algolia/requester-browser-xhr': 5.35.0
1349
-
'@algolia/requester-fetch': 5.35.0
1350
-
'@algolia/requester-node-http': 5.35.0
851
+
'@algolia/client-common': 5.46.0
852
+
'@algolia/requester-browser-xhr': 5.46.0
853
+
'@algolia/requester-fetch': 5.46.0
854
+
'@algolia/requester-node-http': 5.46.0
1351
855
1352
-
'@algolia/autocomplete-core@1.17.7(@algolia/client-search@5.35.0)(algoliasearch@5.35.0)(search-insights@2.17.3)':
856
+
'@algolia/autocomplete-core@1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)(search-insights@2.17.3)':
1353
857
dependencies:
1354
-
'@algolia/autocomplete-plugin-algolia-insights': 1.17.7(@algolia/client-search@5.35.0)(algoliasearch@5.35.0)(search-insights@2.17.3)
1355
-
'@algolia/autocomplete-shared': 1.17.7(@algolia/client-search@5.35.0)(algoliasearch@5.35.0)
858
+
'@algolia/autocomplete-plugin-algolia-insights': 1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)(search-insights@2.17.3)
859
+
'@algolia/autocomplete-shared': 1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)
1356
860
transitivePeerDependencies:
1357
861
- '@algolia/client-search'
1358
862
- algoliasearch
1359
863
- search-insights
1360
864
1361
-
'@algolia/autocomplete-plugin-algolia-insights@1.17.7(@algolia/client-search@5.35.0)(algoliasearch@5.35.0)(search-insights@2.17.3)':
865
+
'@algolia/autocomplete-plugin-algolia-insights@1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)(search-insights@2.17.3)':
1362
866
dependencies:
1363
-
'@algolia/autocomplete-shared': 1.17.7(@algolia/client-search@5.35.0)(algoliasearch@5.35.0)
867
+
'@algolia/autocomplete-shared': 1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)
1364
868
search-insights: 2.17.3
1365
869
transitivePeerDependencies:
1366
870
- '@algolia/client-search'
1367
871
- algoliasearch
1368
872
1369
-
'@algolia/autocomplete-preset-algolia@1.17.7(@algolia/client-search@5.35.0)(algoliasearch@5.35.0)':
873
+
'@algolia/autocomplete-preset-algolia@1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)':
1370
874
dependencies:
1371
-
'@algolia/autocomplete-shared': 1.17.7(@algolia/client-search@5.35.0)(algoliasearch@5.35.0)
1372
-
'@algolia/client-search': 5.35.0
1373
-
algoliasearch: 5.35.0
875
+
'@algolia/autocomplete-shared': 1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)
876
+
'@algolia/client-search': 5.46.0
877
+
algoliasearch: 5.46.0
1374
878
1375
-
'@algolia/autocomplete-shared@1.17.7(@algolia/client-search@5.35.0)(algoliasearch@5.35.0)':
879
+
'@algolia/autocomplete-shared@1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)':
1376
880
dependencies:
1377
-
'@algolia/client-search': 5.35.0
1378
-
algoliasearch: 5.35.0
881
+
'@algolia/client-search': 5.46.0
882
+
algoliasearch: 5.46.0
1379
883
1380
-
'@algolia/client-abtesting@5.35.0':
884
+
'@algolia/client-abtesting@5.46.0':
1381
885
dependencies:
1382
-
'@algolia/client-common': 5.35.0
1383
-
'@algolia/requester-browser-xhr': 5.35.0
1384
-
'@algolia/requester-fetch': 5.35.0
1385
-
'@algolia/requester-node-http': 5.35.0
886
+
'@algolia/client-common': 5.46.0
887
+
'@algolia/requester-browser-xhr': 5.46.0
888
+
'@algolia/requester-fetch': 5.46.0
889
+
'@algolia/requester-node-http': 5.46.0
1386
890
1387
-
'@algolia/client-analytics@5.35.0':
891
+
'@algolia/client-analytics@5.46.0':
1388
892
dependencies:
1389
-
'@algolia/client-common': 5.35.0
1390
-
'@algolia/requester-browser-xhr': 5.35.0
1391
-
'@algolia/requester-fetch': 5.35.0
1392
-
'@algolia/requester-node-http': 5.35.0
893
+
'@algolia/client-common': 5.46.0
894
+
'@algolia/requester-browser-xhr': 5.46.0
895
+
'@algolia/requester-fetch': 5.46.0
896
+
'@algolia/requester-node-http': 5.46.0
1393
897
1394
-
'@algolia/client-common@5.35.0': {}
898
+
'@algolia/client-common@5.46.0': {}
1395
899
1396
-
'@algolia/client-insights@5.35.0':
900
+
'@algolia/client-insights@5.46.0':
1397
901
dependencies:
1398
-
'@algolia/client-common': 5.35.0
1399
-
'@algolia/requester-browser-xhr': 5.35.0
1400
-
'@algolia/requester-fetch': 5.35.0
1401
-
'@algolia/requester-node-http': 5.35.0
902
+
'@algolia/client-common': 5.46.0
903
+
'@algolia/requester-browser-xhr': 5.46.0
904
+
'@algolia/requester-fetch': 5.46.0
905
+
'@algolia/requester-node-http': 5.46.0
1402
906
1403
-
'@algolia/client-personalization@5.35.0':
907
+
'@algolia/client-personalization@5.46.0':
1404
908
dependencies:
1405
-
'@algolia/client-common': 5.35.0
1406
-
'@algolia/requester-browser-xhr': 5.35.0
1407
-
'@algolia/requester-fetch': 5.35.0
1408
-
'@algolia/requester-node-http': 5.35.0
909
+
'@algolia/client-common': 5.46.0
910
+
'@algolia/requester-browser-xhr': 5.46.0
911
+
'@algolia/requester-fetch': 5.46.0
912
+
'@algolia/requester-node-http': 5.46.0
1409
913
1410
-
'@algolia/client-query-suggestions@5.35.0':
914
+
'@algolia/client-query-suggestions@5.46.0':
1411
915
dependencies:
1412
-
'@algolia/client-common': 5.35.0
1413
-
'@algolia/requester-browser-xhr': 5.35.0
1414
-
'@algolia/requester-fetch': 5.35.0
1415
-
'@algolia/requester-node-http': 5.35.0
916
+
'@algolia/client-common': 5.46.0
917
+
'@algolia/requester-browser-xhr': 5.46.0
918
+
'@algolia/requester-fetch': 5.46.0
919
+
'@algolia/requester-node-http': 5.46.0
1416
920
1417
-
'@algolia/client-search@5.35.0':
921
+
'@algolia/client-search@5.46.0':
1418
922
dependencies:
1419
-
'@algolia/client-common': 5.35.0
1420
-
'@algolia/requester-browser-xhr': 5.35.0
1421
-
'@algolia/requester-fetch': 5.35.0
1422
-
'@algolia/requester-node-http': 5.35.0
923
+
'@algolia/client-common': 5.46.0
924
+
'@algolia/requester-browser-xhr': 5.46.0
925
+
'@algolia/requester-fetch': 5.46.0
926
+
'@algolia/requester-node-http': 5.46.0
1423
927
1424
-
'@algolia/ingestion@1.35.0':
928
+
'@algolia/ingestion@1.46.0':
1425
929
dependencies:
1426
-
'@algolia/client-common': 5.35.0
1427
-
'@algolia/requester-browser-xhr': 5.35.0
1428
-
'@algolia/requester-fetch': 5.35.0
1429
-
'@algolia/requester-node-http': 5.35.0
930
+
'@algolia/client-common': 5.46.0
931
+
'@algolia/requester-browser-xhr': 5.46.0
932
+
'@algolia/requester-fetch': 5.46.0
933
+
'@algolia/requester-node-http': 5.46.0
1430
934
1431
-
'@algolia/monitoring@1.35.0':
935
+
'@algolia/monitoring@1.46.0':
1432
936
dependencies:
1433
-
'@algolia/client-common': 5.35.0
1434
-
'@algolia/requester-browser-xhr': 5.35.0
1435
-
'@algolia/requester-fetch': 5.35.0
1436
-
'@algolia/requester-node-http': 5.35.0
937
+
'@algolia/client-common': 5.46.0
938
+
'@algolia/requester-browser-xhr': 5.46.0
939
+
'@algolia/requester-fetch': 5.46.0
940
+
'@algolia/requester-node-http': 5.46.0
1437
941
1438
-
'@algolia/recommend@5.35.0':
942
+
'@algolia/recommend@5.46.0':
1439
943
dependencies:
1440
-
'@algolia/client-common': 5.35.0
1441
-
'@algolia/requester-browser-xhr': 5.35.0
1442
-
'@algolia/requester-fetch': 5.35.0
1443
-
'@algolia/requester-node-http': 5.35.0
944
+
'@algolia/client-common': 5.46.0
945
+
'@algolia/requester-browser-xhr': 5.46.0
946
+
'@algolia/requester-fetch': 5.46.0
947
+
'@algolia/requester-node-http': 5.46.0
1444
948
1445
-
'@algolia/requester-browser-xhr@5.35.0':
949
+
'@algolia/requester-browser-xhr@5.46.0':
1446
950
dependencies:
1447
-
'@algolia/client-common': 5.35.0
951
+
'@algolia/client-common': 5.46.0
1448
952
1449
-
'@algolia/requester-fetch@5.35.0':
953
+
'@algolia/requester-fetch@5.46.0':
1450
954
dependencies:
1451
-
'@algolia/client-common': 5.35.0
955
+
'@algolia/client-common': 5.46.0
1452
956
1453
-
'@algolia/requester-node-http@5.35.0':
957
+
'@algolia/requester-node-http@5.46.0':
1454
958
dependencies:
1455
-
'@algolia/client-common': 5.35.0
959
+
'@algolia/client-common': 5.46.0
1456
960
1457
961
'@antfu/install-pkg@1.1.0':
1458
962
dependencies:
1459
-
package-manager-detector: 1.3.0
1460
-
tinyexec: 1.0.1
1461
-
1462
-
'@antfu/utils@9.2.0': {}
963
+
package-manager-detector: 1.6.0
964
+
tinyexec: 1.0.2
1463
965
1464
966
'@babel/helper-string-parser@7.27.1': {}
1465
967
1466
-
'@babel/helper-validator-identifier@7.27.1': {}
968
+
'@babel/helper-validator-identifier@7.28.5': {}
1467
969
1468
-
'@babel/parser@7.28.3':
970
+
'@babel/parser@7.28.5':
1469
971
dependencies:
1470
-
'@babel/types': 7.28.2
972
+
'@babel/types': 7.28.5
1471
973
1472
-
'@babel/types@7.28.2':
974
+
'@babel/types@7.28.5':
1473
975
dependencies:
1474
976
'@babel/helper-string-parser': 7.27.1
1475
-
'@babel/helper-validator-identifier': 7.27.1
1476
-
1477
-
'@braintree/sanitize-url@6.0.4':
1478
-
optional: true
1479
-
1480
-
'@braintree/sanitize-url@7.1.1': {}
1481
-
1482
-
'@chevrotain/cst-dts-gen@11.0.3':
1483
-
dependencies:
1484
-
'@chevrotain/gast': 11.0.3
1485
-
'@chevrotain/types': 11.0.3
1486
-
lodash-es: 4.17.21
1487
-
1488
-
'@chevrotain/gast@11.0.3':
1489
-
dependencies:
1490
-
'@chevrotain/types': 11.0.3
1491
-
lodash-es: 4.17.21
1492
-
1493
-
'@chevrotain/regexp-to-ast@11.0.3': {}
1494
-
1495
-
'@chevrotain/types@11.0.3': {}
1496
-
1497
-
'@chevrotain/utils@11.0.3': {}
977
+
'@babel/helper-validator-identifier': 7.28.5
1498
978
1499
979
'@docsearch/css@3.8.2': {}
1500
980
1501
-
'@docsearch/js@3.8.2(@algolia/client-search@5.35.0)(search-insights@2.17.3)':
981
+
'@docsearch/js@3.8.2(@algolia/client-search@5.46.0)(search-insights@2.17.3)':
1502
982
dependencies:
1503
-
'@docsearch/react': 3.8.2(@algolia/client-search@5.35.0)(search-insights@2.17.3)
1504
-
preact: 10.27.0
983
+
'@docsearch/react': 3.8.2(@algolia/client-search@5.46.0)(search-insights@2.17.3)
984
+
preact: 10.28.0
1505
985
transitivePeerDependencies:
1506
986
- '@algolia/client-search'
1507
987
- '@types/react'
···
1509
989
- react-dom
1510
990
- search-insights
1511
991
1512
-
'@docsearch/react@3.8.2(@algolia/client-search@5.35.0)(search-insights@2.17.3)':
992
+
'@docsearch/react@3.8.2(@algolia/client-search@5.46.0)(search-insights@2.17.3)':
1513
993
dependencies:
1514
-
'@algolia/autocomplete-core': 1.17.7(@algolia/client-search@5.35.0)(algoliasearch@5.35.0)(search-insights@2.17.3)
1515
-
'@algolia/autocomplete-preset-algolia': 1.17.7(@algolia/client-search@5.35.0)(algoliasearch@5.35.0)
994
+
'@algolia/autocomplete-core': 1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)(search-insights@2.17.3)
995
+
'@algolia/autocomplete-preset-algolia': 1.17.7(@algolia/client-search@5.46.0)(algoliasearch@5.46.0)
1516
996
'@docsearch/css': 3.8.2
1517
-
algoliasearch: 5.35.0
997
+
algoliasearch: 5.46.0
1518
998
optionalDependencies:
1519
999
search-insights: 2.17.3
1520
1000
transitivePeerDependencies:
···
1589
1069
'@esbuild/win32-x64@0.21.5':
1590
1070
optional: true
1591
1071
1592
-
'@giscus/vue@2.4.0(vue@3.5.21)':
1593
-
dependencies:
1594
-
giscus: 1.6.0
1595
-
vue: 3.5.21
1596
-
1597
-
'@iconify-json/logos@1.2.9':
1072
+
'@iconify-json/logos@1.2.10':
1598
1073
dependencies:
1599
1074
'@iconify/types': 2.0.0
1600
1075
1601
-
'@iconify-json/simple-icons@1.2.47':
1076
+
'@iconify-json/simple-icons@1.2.63':
1602
1077
dependencies:
1603
1078
'@iconify/types': 2.0.0
1604
1079
1605
-
'@iconify-json/vscode-icons@1.2.29':
1080
+
'@iconify-json/vscode-icons@1.2.37':
1606
1081
dependencies:
1607
1082
'@iconify/types': 2.0.0
1608
1083
1609
1084
'@iconify/types@2.0.0': {}
1610
1085
1611
-
'@iconify/utils@3.0.1':
1086
+
'@iconify/utils@3.1.0':
1612
1087
dependencies:
1613
1088
'@antfu/install-pkg': 1.1.0
1614
-
'@antfu/utils': 9.2.0
1615
1089
'@iconify/types': 2.0.0
1616
-
debug: 4.4.1
1617
-
globals: 15.15.0
1618
-
kolorist: 1.8.0
1619
-
local-pkg: 1.1.2
1620
1090
mlly: 1.8.0
1621
-
transitivePeerDependencies:
1622
-
- supports-color
1623
1091
1624
1092
'@jridgewell/sourcemap-codec@1.5.5': {}
1625
1093
1626
-
'@lit-labs/ssr-dom-shim@1.4.0': {}
1627
-
1628
-
'@lit/reactive-element@2.1.1':
1629
-
dependencies:
1630
-
'@lit-labs/ssr-dom-shim': 1.4.0
1631
-
1632
-
'@mermaid-js/mermaid-mindmap@9.3.0':
1633
-
dependencies:
1634
-
'@braintree/sanitize-url': 6.0.4
1635
-
cytoscape: 3.33.1
1636
-
cytoscape-cose-bilkent: 4.1.0(cytoscape@3.33.1)
1637
-
cytoscape-fcose: 2.2.0(cytoscape@3.33.1)
1638
-
d3: 7.9.0
1639
-
khroma: 2.1.0
1640
-
non-layered-tidy-tree-layout: 2.0.2
1094
+
'@rollup/rollup-android-arm-eabi@4.53.5':
1641
1095
optional: true
1642
1096
1643
-
'@mermaid-js/parser@0.6.2':
1644
-
dependencies:
1645
-
langium: 3.3.1
1097
+
'@rollup/rollup-android-arm64@4.53.5':
1098
+
optional: true
1646
1099
1647
-
'@rollup/rollup-android-arm-eabi@4.46.2':
1100
+
'@rollup/rollup-darwin-arm64@4.53.5':
1648
1101
optional: true
1649
1102
1650
-
'@rollup/rollup-android-arm64@4.46.2':
1103
+
'@rollup/rollup-darwin-x64@4.53.5':
1651
1104
optional: true
1652
1105
1653
-
'@rollup/rollup-darwin-arm64@4.46.2':
1106
+
'@rollup/rollup-freebsd-arm64@4.53.5':
1654
1107
optional: true
1655
1108
1656
-
'@rollup/rollup-darwin-x64@4.46.2':
1109
+
'@rollup/rollup-freebsd-x64@4.53.5':
1657
1110
optional: true
1658
1111
1659
-
'@rollup/rollup-freebsd-arm64@4.46.2':
1112
+
'@rollup/rollup-linux-arm-gnueabihf@4.53.5':
1660
1113
optional: true
1661
1114
1662
-
'@rollup/rollup-freebsd-x64@4.46.2':
1115
+
'@rollup/rollup-linux-arm-musleabihf@4.53.5':
1663
1116
optional: true
1664
1117
1665
-
'@rollup/rollup-linux-arm-gnueabihf@4.46.2':
1118
+
'@rollup/rollup-linux-arm64-gnu@4.53.5':
1666
1119
optional: true
1667
1120
1668
-
'@rollup/rollup-linux-arm-musleabihf@4.46.2':
1121
+
'@rollup/rollup-linux-arm64-musl@4.53.5':
1669
1122
optional: true
1670
1123
1671
-
'@rollup/rollup-linux-arm64-gnu@4.46.2':
1124
+
'@rollup/rollup-linux-loong64-gnu@4.53.5':
1672
1125
optional: true
1673
1126
1674
-
'@rollup/rollup-linux-arm64-musl@4.46.2':
1127
+
'@rollup/rollup-linux-ppc64-gnu@4.53.5':
1675
1128
optional: true
1676
1129
1677
-
'@rollup/rollup-linux-loongarch64-gnu@4.46.2':
1130
+
'@rollup/rollup-linux-riscv64-gnu@4.53.5':
1678
1131
optional: true
1679
1132
1680
-
'@rollup/rollup-linux-ppc64-gnu@4.46.2':
1133
+
'@rollup/rollup-linux-riscv64-musl@4.53.5':
1681
1134
optional: true
1682
1135
1683
-
'@rollup/rollup-linux-riscv64-gnu@4.46.2':
1136
+
'@rollup/rollup-linux-s390x-gnu@4.53.5':
1684
1137
optional: true
1685
1138
1686
-
'@rollup/rollup-linux-riscv64-musl@4.46.2':
1139
+
'@rollup/rollup-linux-x64-gnu@4.53.5':
1687
1140
optional: true
1688
1141
1689
-
'@rollup/rollup-linux-s390x-gnu@4.46.2':
1142
+
'@rollup/rollup-linux-x64-musl@4.53.5':
1690
1143
optional: true
1691
1144
1692
-
'@rollup/rollup-linux-x64-gnu@4.46.2':
1145
+
'@rollup/rollup-openharmony-arm64@4.53.5':
1693
1146
optional: true
1694
1147
1695
-
'@rollup/rollup-linux-x64-musl@4.46.2':
1148
+
'@rollup/rollup-win32-arm64-msvc@4.53.5':
1696
1149
optional: true
1697
1150
1698
-
'@rollup/rollup-win32-arm64-msvc@4.46.2':
1151
+
'@rollup/rollup-win32-ia32-msvc@4.53.5':
1699
1152
optional: true
1700
1153
1701
-
'@rollup/rollup-win32-ia32-msvc@4.46.2':
1154
+
'@rollup/rollup-win32-x64-gnu@4.53.5':
1702
1155
optional: true
1703
1156
1704
-
'@rollup/rollup-win32-x64-msvc@4.46.2':
1157
+
'@rollup/rollup-win32-x64-msvc@4.53.5':
1705
1158
optional: true
1706
1159
1707
1160
'@shikijs/core@2.5.0':
···
1744
1197
1745
1198
'@shikijs/vscode-textmate@10.0.2': {}
1746
1199
1747
-
'@types/d3-array@3.2.1': {}
1748
-
1749
-
'@types/d3-axis@3.0.6':
1750
-
dependencies:
1751
-
'@types/d3-selection': 3.0.11
1752
-
1753
-
'@types/d3-brush@3.0.6':
1754
-
dependencies:
1755
-
'@types/d3-selection': 3.0.11
1756
-
1757
-
'@types/d3-chord@3.0.6': {}
1758
-
1759
-
'@types/d3-color@3.1.3': {}
1760
-
1761
-
'@types/d3-contour@3.0.6':
1762
-
dependencies:
1763
-
'@types/d3-array': 3.2.1
1764
-
'@types/geojson': 7946.0.16
1765
-
1766
-
'@types/d3-delaunay@6.0.4': {}
1767
-
1768
-
'@types/d3-dispatch@3.0.7': {}
1769
-
1770
-
'@types/d3-drag@3.0.7':
1771
-
dependencies:
1772
-
'@types/d3-selection': 3.0.11
1773
-
1774
-
'@types/d3-dsv@3.0.7': {}
1775
-
1776
-
'@types/d3-ease@3.0.2': {}
1777
-
1778
-
'@types/d3-fetch@3.0.7':
1779
-
dependencies:
1780
-
'@types/d3-dsv': 3.0.7
1781
-
1782
-
'@types/d3-force@3.0.10': {}
1783
-
1784
-
'@types/d3-format@3.0.4': {}
1785
-
1786
-
'@types/d3-geo@3.1.0':
1787
-
dependencies:
1788
-
'@types/geojson': 7946.0.16
1789
-
1790
-
'@types/d3-hierarchy@3.1.7': {}
1791
-
1792
-
'@types/d3-interpolate@3.0.4':
1793
-
dependencies:
1794
-
'@types/d3-color': 3.1.3
1795
-
1796
-
'@types/d3-path@3.1.1': {}
1797
-
1798
-
'@types/d3-polygon@3.0.2': {}
1799
-
1800
-
'@types/d3-quadtree@3.0.6': {}
1801
-
1802
-
'@types/d3-random@3.0.3': {}
1803
-
1804
-
'@types/d3-scale-chromatic@3.1.0': {}
1805
-
1806
-
'@types/d3-scale@4.0.9':
1807
-
dependencies:
1808
-
'@types/d3-time': 3.0.4
1809
-
1810
-
'@types/d3-selection@3.0.11': {}
1811
-
1812
-
'@types/d3-shape@3.1.7':
1813
-
dependencies:
1814
-
'@types/d3-path': 3.1.1
1815
-
1816
-
'@types/d3-time-format@4.0.3': {}
1817
-
1818
-
'@types/d3-time@3.0.4': {}
1819
-
1820
-
'@types/d3-timer@3.0.2': {}
1821
-
1822
-
'@types/d3-transition@3.0.9':
1823
-
dependencies:
1824
-
'@types/d3-selection': 3.0.11
1825
-
1826
-
'@types/d3-zoom@3.0.8':
1827
-
dependencies:
1828
-
'@types/d3-interpolate': 3.0.4
1829
-
'@types/d3-selection': 3.0.11
1830
-
1831
-
'@types/d3@7.4.3':
1832
-
dependencies:
1833
-
'@types/d3-array': 3.2.1
1834
-
'@types/d3-axis': 3.0.6
1835
-
'@types/d3-brush': 3.0.6
1836
-
'@types/d3-chord': 3.0.6
1837
-
'@types/d3-color': 3.1.3
1838
-
'@types/d3-contour': 3.0.6
1839
-
'@types/d3-delaunay': 6.0.4
1840
-
'@types/d3-dispatch': 3.0.7
1841
-
'@types/d3-drag': 3.0.7
1842
-
'@types/d3-dsv': 3.0.7
1843
-
'@types/d3-ease': 3.0.2
1844
-
'@types/d3-fetch': 3.0.7
1845
-
'@types/d3-force': 3.0.10
1846
-
'@types/d3-format': 3.0.4
1847
-
'@types/d3-geo': 3.1.0
1848
-
'@types/d3-hierarchy': 3.1.7
1849
-
'@types/d3-interpolate': 3.0.4
1850
-
'@types/d3-path': 3.1.1
1851
-
'@types/d3-polygon': 3.0.2
1852
-
'@types/d3-quadtree': 3.0.6
1853
-
'@types/d3-random': 3.0.3
1854
-
'@types/d3-scale': 4.0.9
1855
-
'@types/d3-scale-chromatic': 3.1.0
1856
-
'@types/d3-selection': 3.0.11
1857
-
'@types/d3-shape': 3.1.7
1858
-
'@types/d3-time': 3.0.4
1859
-
'@types/d3-time-format': 4.0.3
1860
-
'@types/d3-timer': 3.0.2
1861
-
'@types/d3-transition': 3.0.9
1862
-
'@types/d3-zoom': 3.0.8
1863
-
1864
1200
'@types/estree@1.0.8': {}
1865
-
1866
-
'@types/geojson@7946.0.16': {}
1867
1201
1868
1202
'@types/hast@3.0.4':
1869
1203
dependencies:
···
1882
1216
1883
1217
'@types/mdurl@2.0.0': {}
1884
1218
1885
-
'@types/trusted-types@2.0.7': {}
1886
-
1887
1219
'@types/unist@3.0.3': {}
1888
1220
1889
1221
'@types/web-bluetooth@0.0.21': {}
1890
1222
1891
1223
'@ungap/structured-clone@1.3.0': {}
1892
1224
1893
-
'@vitejs/plugin-vue@5.2.4(vite@5.4.19)(vue@3.5.21)':
1225
+
'@vitejs/plugin-vue@5.2.4(vite@5.4.21)(vue@3.5.26)':
1894
1226
dependencies:
1895
-
vite: 5.4.19
1896
-
vue: 3.5.21
1227
+
vite: 5.4.21
1228
+
vue: 3.5.26
1897
1229
1898
-
'@vue/compiler-core@3.5.21':
1230
+
'@vue/compiler-core@3.5.26':
1899
1231
dependencies:
1900
-
'@babel/parser': 7.28.3
1901
-
'@vue/shared': 3.5.21
1902
-
entities: 4.5.0
1232
+
'@babel/parser': 7.28.5
1233
+
'@vue/shared': 3.5.26
1234
+
entities: 7.0.0
1903
1235
estree-walker: 2.0.2
1904
1236
source-map-js: 1.2.1
1905
1237
1906
-
'@vue/compiler-dom@3.5.21':
1238
+
'@vue/compiler-dom@3.5.26':
1907
1239
dependencies:
1908
-
'@vue/compiler-core': 3.5.21
1909
-
'@vue/shared': 3.5.21
1240
+
'@vue/compiler-core': 3.5.26
1241
+
'@vue/shared': 3.5.26
1910
1242
1911
-
'@vue/compiler-sfc@3.5.21':
1243
+
'@vue/compiler-sfc@3.5.26':
1912
1244
dependencies:
1913
-
'@babel/parser': 7.28.3
1914
-
'@vue/compiler-core': 3.5.21
1915
-
'@vue/compiler-dom': 3.5.21
1916
-
'@vue/compiler-ssr': 3.5.21
1917
-
'@vue/shared': 3.5.21
1245
+
'@babel/parser': 7.28.5
1246
+
'@vue/compiler-core': 3.5.26
1247
+
'@vue/compiler-dom': 3.5.26
1248
+
'@vue/compiler-ssr': 3.5.26
1249
+
'@vue/shared': 3.5.26
1918
1250
estree-walker: 2.0.2
1919
-
magic-string: 0.30.18
1251
+
magic-string: 0.30.21
1920
1252
postcss: 8.5.6
1921
1253
source-map-js: 1.2.1
1922
1254
1923
-
'@vue/compiler-ssr@3.5.21':
1255
+
'@vue/compiler-ssr@3.5.26':
1924
1256
dependencies:
1925
-
'@vue/compiler-dom': 3.5.21
1926
-
'@vue/shared': 3.5.21
1257
+
'@vue/compiler-dom': 3.5.26
1258
+
'@vue/shared': 3.5.26
1927
1259
1928
-
'@vue/devtools-api@7.7.7':
1260
+
'@vue/devtools-api@7.7.9':
1929
1261
dependencies:
1930
-
'@vue/devtools-kit': 7.7.7
1262
+
'@vue/devtools-kit': 7.7.9
1931
1263
1932
-
'@vue/devtools-kit@7.7.7':
1264
+
'@vue/devtools-kit@7.7.9':
1933
1265
dependencies:
1934
-
'@vue/devtools-shared': 7.7.7
1935
-
birpc: 2.5.0
1266
+
'@vue/devtools-shared': 7.7.9
1267
+
birpc: 2.9.0
1936
1268
hookable: 5.5.3
1937
1269
mitt: 3.0.1
1938
1270
perfect-debounce: 1.0.0
1939
1271
speakingurl: 14.0.1
1940
-
superjson: 2.2.2
1272
+
superjson: 2.2.6
1941
1273
1942
-
'@vue/devtools-shared@7.7.7':
1274
+
'@vue/devtools-shared@7.7.9':
1943
1275
dependencies:
1944
1276
rfdc: 1.4.1
1945
1277
1946
-
'@vue/reactivity@3.5.21':
1278
+
'@vue/reactivity@3.5.26':
1947
1279
dependencies:
1948
-
'@vue/shared': 3.5.21
1280
+
'@vue/shared': 3.5.26
1949
1281
1950
-
'@vue/runtime-core@3.5.21':
1282
+
'@vue/runtime-core@3.5.26':
1951
1283
dependencies:
1952
-
'@vue/reactivity': 3.5.21
1953
-
'@vue/shared': 3.5.21
1284
+
'@vue/reactivity': 3.5.26
1285
+
'@vue/shared': 3.5.26
1954
1286
1955
-
'@vue/runtime-dom@3.5.21':
1287
+
'@vue/runtime-dom@3.5.26':
1956
1288
dependencies:
1957
-
'@vue/reactivity': 3.5.21
1958
-
'@vue/runtime-core': 3.5.21
1959
-
'@vue/shared': 3.5.21
1960
-
csstype: 3.1.3
1289
+
'@vue/reactivity': 3.5.26
1290
+
'@vue/runtime-core': 3.5.26
1291
+
'@vue/shared': 3.5.26
1292
+
csstype: 3.2.3
1961
1293
1962
-
'@vue/server-renderer@3.5.21(vue@3.5.21)':
1294
+
'@vue/server-renderer@3.5.26(vue@3.5.26)':
1963
1295
dependencies:
1964
-
'@vue/compiler-ssr': 3.5.21
1965
-
'@vue/shared': 3.5.21
1966
-
vue: 3.5.21
1296
+
'@vue/compiler-ssr': 3.5.26
1297
+
'@vue/shared': 3.5.26
1298
+
vue: 3.5.26
1967
1299
1968
-
'@vue/shared@3.5.18': {}
1300
+
'@vue/shared@3.5.25': {}
1969
1301
1970
-
'@vue/shared@3.5.21': {}
1302
+
'@vue/shared@3.5.26': {}
1971
1303
1972
1304
'@vueuse/core@12.8.2':
1973
1305
dependencies:
1974
1306
'@types/web-bluetooth': 0.0.21
1975
1307
'@vueuse/metadata': 12.8.2
1976
1308
'@vueuse/shared': 12.8.2
1977
-
vue: 3.5.21
1309
+
vue: 3.5.26
1978
1310
transitivePeerDependencies:
1979
1311
- typescript
1980
1312
1981
-
'@vueuse/integrations@12.8.2(focus-trap@7.6.5)':
1313
+
'@vueuse/integrations@12.8.2(focus-trap@7.6.6)':
1982
1314
dependencies:
1983
1315
'@vueuse/core': 12.8.2
1984
1316
'@vueuse/shared': 12.8.2
1985
-
vue: 3.5.21
1317
+
vue: 3.5.26
1986
1318
optionalDependencies:
1987
-
focus-trap: 7.6.5
1319
+
focus-trap: 7.6.6
1988
1320
transitivePeerDependencies:
1989
1321
- typescript
1990
1322
···
1992
1324
1993
1325
'@vueuse/shared@12.8.2':
1994
1326
dependencies:
1995
-
vue: 3.5.21
1327
+
vue: 3.5.26
1996
1328
transitivePeerDependencies:
1997
1329
- typescript
1998
1330
1999
1331
acorn@8.15.0: {}
2000
1332
2001
-
algoliasearch@5.35.0:
1333
+
algoliasearch@5.46.0:
2002
1334
dependencies:
2003
-
'@algolia/abtesting': 1.1.0
2004
-
'@algolia/client-abtesting': 5.35.0
2005
-
'@algolia/client-analytics': 5.35.0
2006
-
'@algolia/client-common': 5.35.0
2007
-
'@algolia/client-insights': 5.35.0
2008
-
'@algolia/client-personalization': 5.35.0
2009
-
'@algolia/client-query-suggestions': 5.35.0
2010
-
'@algolia/client-search': 5.35.0
2011
-
'@algolia/ingestion': 1.35.0
2012
-
'@algolia/monitoring': 1.35.0
2013
-
'@algolia/recommend': 5.35.0
2014
-
'@algolia/requester-browser-xhr': 5.35.0
2015
-
'@algolia/requester-fetch': 5.35.0
2016
-
'@algolia/requester-node-http': 5.35.0
2017
-
2018
-
argparse@2.0.1: {}
1335
+
'@algolia/abtesting': 1.12.0
1336
+
'@algolia/client-abtesting': 5.46.0
1337
+
'@algolia/client-analytics': 5.46.0
1338
+
'@algolia/client-common': 5.46.0
1339
+
'@algolia/client-insights': 5.46.0
1340
+
'@algolia/client-personalization': 5.46.0
1341
+
'@algolia/client-query-suggestions': 5.46.0
1342
+
'@algolia/client-search': 5.46.0
1343
+
'@algolia/ingestion': 1.46.0
1344
+
'@algolia/monitoring': 1.46.0
1345
+
'@algolia/recommend': 5.46.0
1346
+
'@algolia/requester-browser-xhr': 5.46.0
1347
+
'@algolia/requester-fetch': 5.46.0
1348
+
'@algolia/requester-node-http': 5.46.0
2019
1349
2020
-
birpc@2.5.0: {}
1350
+
birpc@2.9.0: {}
2021
1351
2022
1352
ccount@2.0.1: {}
2023
1353
···
2025
1355
2026
1356
character-entities-legacy@3.0.0: {}
2027
1357
2028
-
chevrotain-allstar@0.3.1(chevrotain@11.0.3):
2029
-
dependencies:
2030
-
chevrotain: 11.0.3
2031
-
lodash-es: 4.17.21
2032
-
2033
-
chevrotain@11.0.3:
2034
-
dependencies:
2035
-
'@chevrotain/cst-dts-gen': 11.0.3
2036
-
'@chevrotain/gast': 11.0.3
2037
-
'@chevrotain/regexp-to-ast': 11.0.3
2038
-
'@chevrotain/types': 11.0.3
2039
-
'@chevrotain/utils': 11.0.3
2040
-
lodash-es: 4.17.21
2041
-
2042
1358
comma-separated-tokens@2.0.3: {}
2043
-
2044
-
commander@7.2.0: {}
2045
-
2046
-
commander@8.3.0: {}
2047
1359
2048
1360
confbox@0.1.8: {}
2049
1361
2050
-
confbox@0.2.2: {}
2051
-
2052
-
copy-anything@3.0.5:
2053
-
dependencies:
2054
-
is-what: 4.1.16
2055
-
2056
-
cose-base@1.0.3:
2057
-
dependencies:
2058
-
layout-base: 1.0.2
2059
-
2060
-
cose-base@2.2.0:
2061
-
dependencies:
2062
-
layout-base: 2.0.1
2063
-
2064
-
csstype@3.1.3: {}
2065
-
2066
-
cytoscape-cose-bilkent@4.1.0(cytoscape@3.33.1):
2067
-
dependencies:
2068
-
cose-base: 1.0.3
2069
-
cytoscape: 3.33.1
2070
-
2071
-
cytoscape-fcose@2.2.0(cytoscape@3.33.1):
2072
-
dependencies:
2073
-
cose-base: 2.2.0
2074
-
cytoscape: 3.33.1
2075
-
2076
-
cytoscape@3.33.1: {}
2077
-
2078
-
d3-array@2.12.1:
2079
-
dependencies:
2080
-
internmap: 1.0.1
2081
-
2082
-
d3-array@3.2.4:
2083
-
dependencies:
2084
-
internmap: 2.0.3
2085
-
2086
-
d3-axis@3.0.0: {}
2087
-
2088
-
d3-brush@3.0.0:
2089
-
dependencies:
2090
-
d3-dispatch: 3.0.1
2091
-
d3-drag: 3.0.0
2092
-
d3-interpolate: 3.0.1
2093
-
d3-selection: 3.0.0
2094
-
d3-transition: 3.0.1(d3-selection@3.0.0)
2095
-
2096
-
d3-chord@3.0.1:
2097
-
dependencies:
2098
-
d3-path: 3.1.0
2099
-
2100
-
d3-color@3.1.0: {}
2101
-
2102
-
d3-contour@4.0.2:
2103
-
dependencies:
2104
-
d3-array: 3.2.4
2105
-
2106
-
d3-delaunay@6.0.4:
2107
-
dependencies:
2108
-
delaunator: 5.0.1
2109
-
2110
-
d3-dispatch@3.0.1: {}
2111
-
2112
-
d3-drag@3.0.0:
2113
-
dependencies:
2114
-
d3-dispatch: 3.0.1
2115
-
d3-selection: 3.0.0
2116
-
2117
-
d3-dsv@3.0.1:
2118
-
dependencies:
2119
-
commander: 7.2.0
2120
-
iconv-lite: 0.6.3
2121
-
rw: 1.3.3
2122
-
2123
-
d3-ease@3.0.1: {}
2124
-
2125
-
d3-fetch@3.0.1:
2126
-
dependencies:
2127
-
d3-dsv: 3.0.1
2128
-
2129
-
d3-force@3.0.0:
2130
-
dependencies:
2131
-
d3-dispatch: 3.0.1
2132
-
d3-quadtree: 3.0.1
2133
-
d3-timer: 3.0.1
2134
-
2135
-
d3-format@3.1.0: {}
2136
-
2137
-
d3-geo@3.1.1:
2138
-
dependencies:
2139
-
d3-array: 3.2.4
2140
-
2141
-
d3-hierarchy@3.1.2: {}
2142
-
2143
-
d3-interpolate@3.0.1:
2144
-
dependencies:
2145
-
d3-color: 3.1.0
2146
-
2147
-
d3-path@1.0.9: {}
2148
-
2149
-
d3-path@3.1.0: {}
2150
-
2151
-
d3-polygon@3.0.1: {}
2152
-
2153
-
d3-quadtree@3.0.1: {}
2154
-
2155
-
d3-random@3.0.1: {}
2156
-
2157
-
d3-sankey@0.12.3:
2158
-
dependencies:
2159
-
d3-array: 2.12.1
2160
-
d3-shape: 1.3.7
2161
-
2162
-
d3-scale-chromatic@3.1.0:
2163
-
dependencies:
2164
-
d3-color: 3.1.0
2165
-
d3-interpolate: 3.0.1
2166
-
2167
-
d3-scale@4.0.2:
2168
-
dependencies:
2169
-
d3-array: 3.2.4
2170
-
d3-format: 3.1.0
2171
-
d3-interpolate: 3.0.1
2172
-
d3-time: 3.1.0
2173
-
d3-time-format: 4.1.0
2174
-
2175
-
d3-selection@3.0.0: {}
2176
-
2177
-
d3-shape@1.3.7:
2178
-
dependencies:
2179
-
d3-path: 1.0.9
2180
-
2181
-
d3-shape@3.2.0:
2182
-
dependencies:
2183
-
d3-path: 3.1.0
2184
-
2185
-
d3-time-format@4.1.0:
2186
-
dependencies:
2187
-
d3-time: 3.1.0
2188
-
2189
-
d3-time@3.1.0:
2190
-
dependencies:
2191
-
d3-array: 3.2.4
2192
-
2193
-
d3-timer@3.0.1: {}
2194
-
2195
-
d3-transition@3.0.1(d3-selection@3.0.0):
1362
+
copy-anything@4.0.5:
2196
1363
dependencies:
2197
-
d3-color: 3.1.0
2198
-
d3-dispatch: 3.0.1
2199
-
d3-ease: 3.0.1
2200
-
d3-interpolate: 3.0.1
2201
-
d3-selection: 3.0.0
2202
-
d3-timer: 3.0.1
1364
+
is-what: 5.5.0
2203
1365
2204
-
d3-zoom@3.0.0:
2205
-
dependencies:
2206
-
d3-dispatch: 3.0.1
2207
-
d3-drag: 3.0.0
2208
-
d3-interpolate: 3.0.1
2209
-
d3-selection: 3.0.0
2210
-
d3-transition: 3.0.1(d3-selection@3.0.0)
2211
-
2212
-
d3@7.9.0:
2213
-
dependencies:
2214
-
d3-array: 3.2.4
2215
-
d3-axis: 3.0.0
2216
-
d3-brush: 3.0.0
2217
-
d3-chord: 3.0.1
2218
-
d3-color: 3.1.0
2219
-
d3-contour: 4.0.2
2220
-
d3-delaunay: 6.0.4
2221
-
d3-dispatch: 3.0.1
2222
-
d3-drag: 3.0.0
2223
-
d3-dsv: 3.0.1
2224
-
d3-ease: 3.0.1
2225
-
d3-fetch: 3.0.1
2226
-
d3-force: 3.0.0
2227
-
d3-format: 3.1.0
2228
-
d3-geo: 3.1.1
2229
-
d3-hierarchy: 3.1.2
2230
-
d3-interpolate: 3.0.1
2231
-
d3-path: 3.1.0
2232
-
d3-polygon: 3.0.1
2233
-
d3-quadtree: 3.0.1
2234
-
d3-random: 3.0.1
2235
-
d3-scale: 4.0.2
2236
-
d3-scale-chromatic: 3.1.0
2237
-
d3-selection: 3.0.0
2238
-
d3-shape: 3.2.0
2239
-
d3-time: 3.1.0
2240
-
d3-time-format: 4.1.0
2241
-
d3-timer: 3.0.1
2242
-
d3-transition: 3.0.1(d3-selection@3.0.0)
2243
-
d3-zoom: 3.0.0
2244
-
2245
-
dagre-d3-es@7.0.11:
2246
-
dependencies:
2247
-
d3: 7.9.0
2248
-
lodash-es: 4.17.21
2249
-
2250
-
dayjs@1.11.18: {}
2251
-
2252
-
debug@4.4.1:
2253
-
dependencies:
2254
-
ms: 2.1.3
2255
-
2256
-
delaunator@5.0.1:
2257
-
dependencies:
2258
-
robust-predicates: 3.0.2
1366
+
csstype@3.2.3: {}
2259
1367
2260
1368
dequal@2.0.3: {}
2261
1369
···
2263
1371
dependencies:
2264
1372
dequal: 2.0.3
2265
1373
2266
-
dompurify@3.2.6:
2267
-
optionalDependencies:
2268
-
'@types/trusted-types': 2.0.7
2269
-
2270
1374
emoji-regex-xs@1.0.0: {}
2271
1375
2272
-
entities@4.5.0: {}
1376
+
entities@7.0.0: {}
2273
1377
2274
1378
esbuild@0.21.5:
2275
1379
optionalDependencies:
···
2299
1403
2300
1404
estree-walker@2.0.2: {}
2301
1405
2302
-
exsolve@1.0.7: {}
2303
-
2304
-
focus-trap@7.6.5:
1406
+
focus-trap@7.6.6:
2305
1407
dependencies:
2306
-
tabbable: 6.2.0
1408
+
tabbable: 6.3.0
2307
1409
2308
1410
fsevents@2.3.3:
2309
1411
optional: true
2310
1412
2311
-
giscus@1.6.0:
2312
-
dependencies:
2313
-
lit: 3.3.1
2314
-
2315
-
globals@15.15.0: {}
2316
-
2317
-
hachure-fill@0.5.2: {}
2318
-
2319
1413
hast-util-to-html@9.0.5:
2320
1414
dependencies:
2321
1415
'@types/hast': 3.0.4
···
2324
1418
comma-separated-tokens: 2.0.3
2325
1419
hast-util-whitespace: 3.0.0
2326
1420
html-void-elements: 3.0.0
2327
-
mdast-util-to-hast: 13.2.0
1421
+
mdast-util-to-hast: 13.2.1
2328
1422
property-information: 7.1.0
2329
1423
space-separated-tokens: 2.0.2
2330
1424
stringify-entities: 4.0.4
···
2338
1432
2339
1433
html-void-elements@3.0.0: {}
2340
1434
2341
-
iconv-lite@0.6.3:
2342
-
dependencies:
2343
-
safer-buffer: 2.1.2
2344
-
2345
-
internmap@1.0.1: {}
2346
-
2347
-
internmap@2.0.3: {}
2348
-
2349
-
is-what@4.1.16: {}
2350
-
2351
-
katex@0.16.22:
2352
-
dependencies:
2353
-
commander: 8.3.0
2354
-
2355
-
khroma@2.1.0: {}
2356
-
2357
-
kolorist@1.8.0: {}
2358
-
2359
-
langium@3.3.1:
2360
-
dependencies:
2361
-
chevrotain: 11.0.3
2362
-
chevrotain-allstar: 0.3.1(chevrotain@11.0.3)
2363
-
vscode-languageserver: 9.0.1
2364
-
vscode-languageserver-textdocument: 1.0.12
2365
-
vscode-uri: 3.0.8
2366
-
2367
-
layout-base@1.0.2: {}
2368
-
2369
-
layout-base@2.0.1: {}
2370
-
2371
-
linkify-it@5.0.0:
2372
-
dependencies:
2373
-
uc.micro: 2.1.0
2374
-
2375
-
lit-element@4.2.1:
2376
-
dependencies:
2377
-
'@lit-labs/ssr-dom-shim': 1.4.0
2378
-
'@lit/reactive-element': 2.1.1
2379
-
lit-html: 3.3.1
2380
-
2381
-
lit-html@3.3.1:
2382
-
dependencies:
2383
-
'@types/trusted-types': 2.0.7
2384
-
2385
-
lit@3.3.1:
2386
-
dependencies:
2387
-
'@lit/reactive-element': 2.1.1
2388
-
lit-element: 4.2.1
2389
-
lit-html: 3.3.1
2390
-
2391
-
local-pkg@1.1.2:
2392
-
dependencies:
2393
-
mlly: 1.8.0
2394
-
pkg-types: 2.3.0
2395
-
quansync: 0.2.11
2396
-
2397
-
lodash-es@4.17.21: {}
1435
+
is-what@5.5.0: {}
2398
1436
2399
-
magic-string@0.30.18:
1437
+
magic-string@0.30.21:
2400
1438
dependencies:
2401
1439
'@jridgewell/sourcemap-codec': 1.5.5
2402
1440
···
2404
1442
2405
1443
markdown-it-footnote@4.0.0: {}
2406
1444
2407
-
markdown-it@14.1.0:
2408
-
dependencies:
2409
-
argparse: 2.0.1
2410
-
entities: 4.5.0
2411
-
linkify-it: 5.0.0
2412
-
mdurl: 2.0.0
2413
-
punycode.js: 2.3.1
2414
-
uc.micro: 2.1.0
2415
-
2416
-
marked@15.0.12: {}
2417
-
2418
-
mdast-util-to-hast@13.2.0:
1445
+
mdast-util-to-hast@13.2.1:
2419
1446
dependencies:
2420
1447
'@types/hast': 3.0.4
2421
1448
'@types/mdast': 4.0.4
···
2427
1454
unist-util-visit: 5.0.0
2428
1455
vfile: 6.0.3
2429
1456
2430
-
mdurl@2.0.0: {}
2431
-
2432
-
mermaid@11.11.0:
2433
-
dependencies:
2434
-
'@braintree/sanitize-url': 7.1.1
2435
-
'@iconify/utils': 3.0.1
2436
-
'@mermaid-js/parser': 0.6.2
2437
-
'@types/d3': 7.4.3
2438
-
cytoscape: 3.33.1
2439
-
cytoscape-cose-bilkent: 4.1.0(cytoscape@3.33.1)
2440
-
cytoscape-fcose: 2.2.0(cytoscape@3.33.1)
2441
-
d3: 7.9.0
2442
-
d3-sankey: 0.12.3
2443
-
dagre-d3-es: 7.0.11
2444
-
dayjs: 1.11.18
2445
-
dompurify: 3.2.6
2446
-
katex: 0.16.22
2447
-
khroma: 2.1.0
2448
-
lodash-es: 4.17.21
2449
-
marked: 15.0.12
2450
-
roughjs: 4.6.6
2451
-
stylis: 4.3.6
2452
-
ts-dedent: 2.2.0
2453
-
uuid: 11.1.0
2454
-
transitivePeerDependencies:
2455
-
- supports-color
2456
-
2457
1457
micromark-util-character@2.1.1:
2458
1458
dependencies:
2459
1459
micromark-util-symbol: 2.0.1
···
2471
1471
2472
1472
micromark-util-types@2.0.2: {}
2473
1473
2474
-
minisearch@7.1.2: {}
1474
+
minisearch@7.2.0: {}
2475
1475
2476
1476
mitt@3.0.1: {}
2477
1477
···
2482
1482
pkg-types: 1.3.1
2483
1483
ufo: 1.6.1
2484
1484
2485
-
ms@2.1.3: {}
2486
-
2487
1485
nanoid@3.3.11: {}
2488
-
2489
-
non-layered-tidy-tree-layout@2.0.2:
2490
-
optional: true
2491
1486
2492
1487
oniguruma-to-es@3.1.1:
2493
1488
dependencies:
2494
1489
emoji-regex-xs: 1.0.0
2495
-
regex: 6.0.1
1490
+
regex: 6.1.0
2496
1491
regex-recursion: 6.0.2
2497
1492
2498
-
package-manager-detector@1.3.0: {}
2499
-
2500
-
path-data-parser@0.1.0: {}
1493
+
package-manager-detector@1.6.0: {}
2501
1494
2502
1495
pathe@2.0.3: {}
2503
1496
···
2511
1504
mlly: 1.8.0
2512
1505
pathe: 2.0.3
2513
1506
2514
-
pkg-types@2.3.0:
2515
-
dependencies:
2516
-
confbox: 0.2.2
2517
-
exsolve: 1.0.7
2518
-
pathe: 2.0.3
2519
-
2520
-
points-on-curve@0.2.0: {}
2521
-
2522
-
points-on-path@0.2.1:
2523
-
dependencies:
2524
-
path-data-parser: 0.1.0
2525
-
points-on-curve: 0.2.0
2526
-
2527
1507
postcss@8.5.6:
2528
1508
dependencies:
2529
1509
nanoid: 3.3.11
2530
1510
picocolors: 1.1.1
2531
1511
source-map-js: 1.2.1
2532
1512
2533
-
preact@10.27.0: {}
1513
+
preact@10.28.0: {}
2534
1514
2535
1515
property-information@7.1.0: {}
2536
1516
2537
-
punycode.js@2.3.1: {}
2538
-
2539
-
quansync@0.2.11: {}
2540
-
2541
1517
regex-recursion@6.0.2:
2542
1518
dependencies:
2543
1519
regex-utilities: 2.3.0
2544
1520
2545
1521
regex-utilities@2.3.0: {}
2546
1522
2547
-
regex@6.0.1:
1523
+
regex@6.1.0:
2548
1524
dependencies:
2549
1525
regex-utilities: 2.3.0
2550
1526
2551
1527
rfdc@1.4.1: {}
2552
1528
2553
-
robust-predicates@3.0.2: {}
2554
-
2555
-
rollup@4.46.2:
1529
+
rollup@4.53.5:
2556
1530
dependencies:
2557
1531
'@types/estree': 1.0.8
2558
1532
optionalDependencies:
2559
-
'@rollup/rollup-android-arm-eabi': 4.46.2
2560
-
'@rollup/rollup-android-arm64': 4.46.2
2561
-
'@rollup/rollup-darwin-arm64': 4.46.2
2562
-
'@rollup/rollup-darwin-x64': 4.46.2
2563
-
'@rollup/rollup-freebsd-arm64': 4.46.2
2564
-
'@rollup/rollup-freebsd-x64': 4.46.2
2565
-
'@rollup/rollup-linux-arm-gnueabihf': 4.46.2
2566
-
'@rollup/rollup-linux-arm-musleabihf': 4.46.2
2567
-
'@rollup/rollup-linux-arm64-gnu': 4.46.2
2568
-
'@rollup/rollup-linux-arm64-musl': 4.46.2
2569
-
'@rollup/rollup-linux-loongarch64-gnu': 4.46.2
2570
-
'@rollup/rollup-linux-ppc64-gnu': 4.46.2
2571
-
'@rollup/rollup-linux-riscv64-gnu': 4.46.2
2572
-
'@rollup/rollup-linux-riscv64-musl': 4.46.2
2573
-
'@rollup/rollup-linux-s390x-gnu': 4.46.2
2574
-
'@rollup/rollup-linux-x64-gnu': 4.46.2
2575
-
'@rollup/rollup-linux-x64-musl': 4.46.2
2576
-
'@rollup/rollup-win32-arm64-msvc': 4.46.2
2577
-
'@rollup/rollup-win32-ia32-msvc': 4.46.2
2578
-
'@rollup/rollup-win32-x64-msvc': 4.46.2
1533
+
'@rollup/rollup-android-arm-eabi': 4.53.5
1534
+
'@rollup/rollup-android-arm64': 4.53.5
1535
+
'@rollup/rollup-darwin-arm64': 4.53.5
1536
+
'@rollup/rollup-darwin-x64': 4.53.5
1537
+
'@rollup/rollup-freebsd-arm64': 4.53.5
1538
+
'@rollup/rollup-freebsd-x64': 4.53.5
1539
+
'@rollup/rollup-linux-arm-gnueabihf': 4.53.5
1540
+
'@rollup/rollup-linux-arm-musleabihf': 4.53.5
1541
+
'@rollup/rollup-linux-arm64-gnu': 4.53.5
1542
+
'@rollup/rollup-linux-arm64-musl': 4.53.5
1543
+
'@rollup/rollup-linux-loong64-gnu': 4.53.5
1544
+
'@rollup/rollup-linux-ppc64-gnu': 4.53.5
1545
+
'@rollup/rollup-linux-riscv64-gnu': 4.53.5
1546
+
'@rollup/rollup-linux-riscv64-musl': 4.53.5
1547
+
'@rollup/rollup-linux-s390x-gnu': 4.53.5
1548
+
'@rollup/rollup-linux-x64-gnu': 4.53.5
1549
+
'@rollup/rollup-linux-x64-musl': 4.53.5
1550
+
'@rollup/rollup-openharmony-arm64': 4.53.5
1551
+
'@rollup/rollup-win32-arm64-msvc': 4.53.5
1552
+
'@rollup/rollup-win32-ia32-msvc': 4.53.5
1553
+
'@rollup/rollup-win32-x64-gnu': 4.53.5
1554
+
'@rollup/rollup-win32-x64-msvc': 4.53.5
2579
1555
fsevents: 2.3.3
2580
-
2581
-
roughjs@4.6.6:
2582
-
dependencies:
2583
-
hachure-fill: 0.5.2
2584
-
path-data-parser: 0.1.0
2585
-
points-on-curve: 0.2.0
2586
-
points-on-path: 0.2.1
2587
-
2588
-
rw@1.3.3: {}
2589
-
2590
-
safer-buffer@2.1.2: {}
2591
1556
2592
1557
search-insights@2.17.3: {}
2593
1558
···
2613
1578
character-entities-html4: 2.1.0
2614
1579
character-entities-legacy: 3.0.0
2615
1580
2616
-
stylis@4.3.6: {}
2617
-
2618
-
superjson@2.2.2:
1581
+
superjson@2.2.6:
2619
1582
dependencies:
2620
-
copy-anything: 3.0.5
1583
+
copy-anything: 4.0.5
2621
1584
2622
-
tabbable@6.2.0: {}
1585
+
tabbable@6.3.0: {}
2623
1586
2624
-
tinyexec@1.0.1: {}
1587
+
tinyexec@1.0.2: {}
2625
1588
2626
1589
trim-lines@3.0.1: {}
2627
1590
2628
-
ts-dedent@2.2.0: {}
2629
-
2630
-
uc.micro@2.1.0: {}
2631
-
2632
1591
ufo@1.6.1: {}
2633
1592
2634
-
unist-util-is@6.0.0:
1593
+
unist-util-is@6.0.1:
2635
1594
dependencies:
2636
1595
'@types/unist': 3.0.3
2637
1596
···
2643
1602
dependencies:
2644
1603
'@types/unist': 3.0.3
2645
1604
2646
-
unist-util-visit-parents@6.0.1:
1605
+
unist-util-visit-parents@6.0.2:
2647
1606
dependencies:
2648
1607
'@types/unist': 3.0.3
2649
-
unist-util-is: 6.0.0
1608
+
unist-util-is: 6.0.1
2650
1609
2651
1610
unist-util-visit@5.0.0:
2652
1611
dependencies:
2653
1612
'@types/unist': 3.0.3
2654
-
unist-util-is: 6.0.0
2655
-
unist-util-visit-parents: 6.0.1
2656
-
2657
-
uuid@11.1.0: {}
1613
+
unist-util-is: 6.0.1
1614
+
unist-util-visit-parents: 6.0.2
2658
1615
2659
1616
vfile-message@4.0.3:
2660
1617
dependencies:
···
2666
1623
'@types/unist': 3.0.3
2667
1624
vfile-message: 4.0.3
2668
1625
2669
-
vite@5.4.19:
1626
+
vite@5.4.21:
2670
1627
dependencies:
2671
1628
esbuild: 0.21.5
2672
1629
postcss: 8.5.6
2673
-
rollup: 4.46.2
1630
+
rollup: 4.53.5
2674
1631
optionalDependencies:
2675
1632
fsevents: 2.3.3
2676
1633
2677
-
vitepress-plugin-comment-with-giscus@1.1.15(vue@3.5.21):
2678
-
dependencies:
2679
-
'@giscus/vue': 2.4.0(vue@3.5.21)
2680
-
transitivePeerDependencies:
2681
-
- vue
2682
-
2683
-
vitepress-plugin-group-icons@1.6.3(markdown-it@14.1.0)(vite@5.4.19):
1634
+
vitepress-plugin-group-icons@1.6.5(vite@5.4.21):
2684
1635
dependencies:
2685
-
'@iconify-json/logos': 1.2.9
2686
-
'@iconify-json/vscode-icons': 1.2.29
2687
-
'@iconify/utils': 3.0.1
2688
-
markdown-it: 14.1.0
2689
-
vite: 5.4.19
2690
-
transitivePeerDependencies:
2691
-
- supports-color
2692
-
2693
-
vitepress-plugin-mermaid@2.0.17(mermaid@11.11.0)(vitepress@1.6.4(@algolia/client-search@5.35.0)(postcss@8.5.6)(search-insights@2.17.3)):
2694
-
dependencies:
2695
-
mermaid: 11.11.0
2696
-
vitepress: 1.6.4(@algolia/client-search@5.35.0)(postcss@8.5.6)(search-insights@2.17.3)
1636
+
'@iconify-json/logos': 1.2.10
1637
+
'@iconify-json/vscode-icons': 1.2.37
1638
+
'@iconify/utils': 3.1.0
2697
1639
optionalDependencies:
2698
-
'@mermaid-js/mermaid-mindmap': 9.3.0
1640
+
vite: 5.4.21
2699
1641
2700
-
vitepress@1.6.4(@algolia/client-search@5.35.0)(postcss@8.5.6)(search-insights@2.17.3):
1642
+
vitepress@1.6.4(@algolia/client-search@5.46.0)(postcss@8.5.6)(search-insights@2.17.3):
2701
1643
dependencies:
2702
1644
'@docsearch/css': 3.8.2
2703
-
'@docsearch/js': 3.8.2(@algolia/client-search@5.35.0)(search-insights@2.17.3)
2704
-
'@iconify-json/simple-icons': 1.2.47
1645
+
'@docsearch/js': 3.8.2(@algolia/client-search@5.46.0)(search-insights@2.17.3)
1646
+
'@iconify-json/simple-icons': 1.2.63
2705
1647
'@shikijs/core': 2.5.0
2706
1648
'@shikijs/transformers': 2.5.0
2707
1649
'@shikijs/types': 2.5.0
2708
1650
'@types/markdown-it': 14.1.2
2709
-
'@vitejs/plugin-vue': 5.2.4(vite@5.4.19)(vue@3.5.21)
2710
-
'@vue/devtools-api': 7.7.7
2711
-
'@vue/shared': 3.5.18
1651
+
'@vitejs/plugin-vue': 5.2.4(vite@5.4.21)(vue@3.5.26)
1652
+
'@vue/devtools-api': 7.7.9
1653
+
'@vue/shared': 3.5.25
2712
1654
'@vueuse/core': 12.8.2
2713
-
'@vueuse/integrations': 12.8.2(focus-trap@7.6.5)
2714
-
focus-trap: 7.6.5
1655
+
'@vueuse/integrations': 12.8.2(focus-trap@7.6.6)
1656
+
focus-trap: 7.6.6
2715
1657
mark.js: 8.11.1
2716
-
minisearch: 7.1.2
1658
+
minisearch: 7.2.0
2717
1659
shiki: 2.5.0
2718
-
vite: 5.4.19
2719
-
vue: 3.5.21
1660
+
vite: 5.4.21
1661
+
vue: 3.5.26
2720
1662
optionalDependencies:
2721
1663
postcss: 8.5.6
2722
1664
transitivePeerDependencies:
···
2746
1688
- typescript
2747
1689
- universal-cookie
2748
1690
2749
-
vscode-jsonrpc@8.2.0: {}
2750
-
2751
-
vscode-languageserver-protocol@3.17.5:
1691
+
vue@3.5.26:
2752
1692
dependencies:
2753
-
vscode-jsonrpc: 8.2.0
2754
-
vscode-languageserver-types: 3.17.5
2755
-
2756
-
vscode-languageserver-textdocument@1.0.12: {}
2757
-
2758
-
vscode-languageserver-types@3.17.5: {}
2759
-
2760
-
vscode-languageserver@9.0.1:
2761
-
dependencies:
2762
-
vscode-languageserver-protocol: 3.17.5
2763
-
2764
-
vscode-uri@3.0.8: {}
2765
-
2766
-
vue@3.5.21:
2767
-
dependencies:
2768
-
'@vue/compiler-dom': 3.5.21
2769
-
'@vue/compiler-sfc': 3.5.21
2770
-
'@vue/runtime-dom': 3.5.21
2771
-
'@vue/server-renderer': 3.5.21(vue@3.5.21)
2772
-
'@vue/shared': 3.5.21
1693
+
'@vue/compiler-dom': 3.5.26
1694
+
'@vue/compiler-sfc': 3.5.26
1695
+
'@vue/runtime-dom': 3.5.26
1696
+
'@vue/server-renderer': 3.5.26(vue@3.5.26)
1697
+
'@vue/shared': 3.5.26
2773
1698
2774
1699
zwitch@2.0.4: {}
+1
-1
doc/reference/errors.md
+1
-1
doc/reference/errors.md
+58
-4
doc/reference/meta.md
+58
-4
doc/reference/meta.md
···
1
1
---
2
2
comment: true
3
3
title: Meta Options
4
-
description: Wire hive meta options.
4
+
description: wire hive meta options.
5
5
---
6
6
7
-
# {{ $frontmatter.title }}
7
+
# Meta Options
8
8
9
9
{{ $frontmatter.description }}
10
10
···
14
14
15
15
_Type:_ A path or an instance of `nixpkgs`.
16
16
17
-
_Default:_ `inputs.nixpkgs.outPath`
17
+
_Default:_ `null`
18
18
19
19
_Examples:_
20
20
···
40
40
41
41
::: tip
42
42
43
-
Wire always passes `name` (name of the node)
43
+
wire always passes `name` (name of the node)
44
44
and `nodes` (attribute set of all nodes) as args, even if `meta.specialArgs =
45
45
{ }`.
46
46
···
60
60
};
61
61
}
62
62
```
63
+
64
+
## meta.nodeSpecialArgs
65
+
66
+
Extra `specialArgs` to override `meta.specialArgs` for each node
67
+
68
+
_Type:_ attribute set of attribute set
69
+
70
+
_Default:_ `{ }`
71
+
72
+
_Example:_
73
+
74
+
```nix
75
+
{
76
+
meta.nodeSpecialArgs = {
77
+
extra-property = "some-value";
78
+
};
79
+
}
80
+
```
81
+
82
+
## meta.nodeNixpkgs
83
+
84
+
Per-node nixpkgs to override `meta.nixpkgs`.
85
+
86
+
See `meta.nixpkgs` examples for possible values.
87
+
88
+
_Type:_ attribute set of path or an instance of `nixpkgs`
89
+
90
+
_Default:_ `{ }`
91
+
92
+
_Example:_
93
+
94
+
```nix
95
+
{
96
+
meta = {
97
+
nixpkgs = import <nixpkgs> { };
98
+
99
+
nodeNixpkgs = {
100
+
node-b = import <special-nixpkgs> { };
101
+
};
102
+
};
103
+
104
+
node-a =
105
+
{ pkgs, ... }:
106
+
{
107
+
# uses <nixpkgs> (meta.nixpkgs)
108
+
};
109
+
110
+
node-b =
111
+
{ pkgs, ... }:
112
+
{
113
+
# uses <special-nixpkgs> (meta.nodeNixpkgs.node-b)
114
+
};
115
+
}
116
+
```
-3
doc/snippets/default.nix
-3
doc/snippets/default.nix
···
1
1
{
2
-
getting-started-hm = import ./getting-started/home.nix;
3
-
getting-started-hm-flake = import ./getting-started/hm.flake.nix;
4
2
getting-started-nixos = import ./getting-started/configuration.nix;
5
3
getting-started-nixos-flake = import ./getting-started/nixos.flake.nix;
6
-
getting-started-cache = import ./getting-started/cache.nix;
7
4
}
-12
doc/snippets/getting-started/cache.nix
-12
doc/snippets/getting-started/cache.nix
+3
-2
doc/snippets/getting-started/configuration.nix
+3
-2
doc/snippets/getting-started/configuration.nix
···
1
1
{system, ...}: let
2
-
wire = import ( # [!code ++]
3
-
builtins.fetchTarball "https://github.com/wires-org/wire/archive/refs/heads/main.tar.gz" # [!code ++]
2
+
wire = import (
3
+
# [!code ++]
4
+
builtins.fetchTarball "https://github.com/forallsys/wire/archive/refs/heads/trunk.tar.gz" # [!code ++]
4
5
); # [!code ++]
5
6
in {
6
7
environment.systemPackages = [
+12
-4
doc/snippets/getting-started/flake-merged.nix
+12
-4
doc/snippets/getting-started/flake-merged.nix
···
1
1
{
2
2
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
3
-
inputs.wire.url = "github:wires-org/wire";
3
+
inputs.wire.url = "github:forallsys/wire";
4
4
5
5
outputs = {
6
6
self,
···
9
9
...
10
10
} @ inputs: {
11
11
wire = wire.makeHive {
12
-
# Give wire our ninixosConfigurations
12
+
# Give wire our nixosConfigurations
13
13
inherit (self) nixosConfigurations;
14
14
15
15
meta = {
16
-
# ... from above
16
+
nixpkgs = import nixpkgs {localSystem = "x86_64-linux";};
17
17
};
18
18
19
19
node-a.deployment = {
20
+
tags = [
21
+
# some tags
22
+
];
23
+
20
24
# ...
21
25
};
22
26
};
···
26
30
system = "x86_64-linux";
27
31
specialArgs = {inherit inputs;};
28
32
modules = [
33
+
wire.nixosModules.default
29
34
{
30
35
nixpkgs.hostPlatform = "x86_64-linux";
36
+
37
+
# you can put deployment options here too!
38
+
deployment.target = "some-hostname";
31
39
}
32
40
];
33
41
};
34
42
35
-
node-b = nixpkgs.lib.nixosSystem {
43
+
some-other-host = nixpkgs.lib.nixosSystem {
36
44
system = "x86_64-linux";
37
45
specialArgs = {inherit inputs;};
38
46
modules = [
+3
-1
doc/snippets/getting-started/flake.nix
+3
-1
doc/snippets/getting-started/flake.nix
···
1
1
{
2
2
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
3
-
inputs.wire.url = "github:wires-org/wire";
3
+
inputs.wire.url = "github:forallsys/wire";
4
4
5
5
outputs = inputs @ {
6
6
nixpkgs,
···
22
22
};
23
23
24
24
node-a = {
25
+
nixpkgs.hostPlatform = "x86_64-linux";
26
+
25
27
# ...
26
28
};
27
29
};
-34
doc/snippets/getting-started/hm.flake.nix
-34
doc/snippets/getting-started/hm.flake.nix
···
1
-
{
2
-
inputs = {
3
-
# ...
4
-
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
5
-
home-manager = {
6
-
url = "github:nix-community/home-manager";
7
-
inputs.nixpkgs.follows = "nixpkgs";
8
-
};
9
-
wire.url = "github:wires-org/wire"; # [!code ++]
10
-
};
11
-
12
-
outputs = {
13
-
# ...
14
-
nixpkgs,
15
-
home-manager,
16
-
wire, # [!code ++]
17
-
...
18
-
}: let
19
-
system = "x86_64-linux";
20
-
pkgs = nixpkgs.legacyPackages.${system};
21
-
in {
22
-
homeConfigurations.my-user = home-manager.lib.homeManagerConfiguration {
23
-
inherit pkgs;
24
-
modules = [
25
-
# ...
26
-
{
27
-
home.packages = [
28
-
wire.packages.${system}.wire # [!code ++]
29
-
];
30
-
}
31
-
];
32
-
};
33
-
};
34
-
}
-11
doc/snippets/getting-started/home.nix
-11
doc/snippets/getting-started/home.nix
-2
doc/snippets/getting-started/nix.conf
-2
doc/snippets/getting-started/nix.conf
+1
-1
doc/snippets/getting-started/nixos.flake.nix
+1
-1
doc/snippets/getting-started/nixos.flake.nix
+40
doc/snippets/guides/example-action.yml
+40
doc/snippets/guides/example-action.yml
···
1
+
name: Build
2
+
3
+
on:
4
+
push:
5
+
branches: [main]
6
+
7
+
jobs:
8
+
build-partitioned:
9
+
name: Build Partitioned
10
+
runs-on: ubuntu-latest
11
+
permissions: {}
12
+
strategy:
13
+
matrix:
14
+
# Break into 4 partitions
15
+
partition: [1, 2, 3, 4]
16
+
steps:
17
+
- uses: actions/checkout@v6
18
+
with:
19
+
persist-credentials: false
20
+
# This will likely be required if you have multiple architectures
21
+
# in your hive.
22
+
- name: Set up QEMU
23
+
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130
24
+
- uses: cachix/install-nix-action@4e002c8ec80594ecd40e759629461e26c8abed15
25
+
with:
26
+
nix_path: nixpkgs=channel:nixos-unstable
27
+
extra_nix_config: |
28
+
# Install binary cache as described in the install wire guide
29
+
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= cache.garnix.io:CTFPyKSLcx5RMJKfLo5EEPUObbA78b0YQ2DTCJXqr9g=
30
+
substituters = https://cache.nixos.org/ https://cache.garnix.io
31
+
32
+
# Again, include additional architectures if you have multiple
33
+
# architectures in your hive
34
+
extra-platforms = aarch64-linux i686-linux
35
+
# Uses wire from your shell (as described in the install wire guide).
36
+
- name: Build partition ${{ matrix.partition }}
37
+
run: nix develop -Lvc wire \
38
+
build \
39
+
--parallel 1 \
40
+
--partition ${{ matrix.partition }}/4
+344
doc/snippets/guides/installation/flake.lock
+344
doc/snippets/guides/installation/flake.lock
···
1
+
{
2
+
"nodes": {
3
+
"crane": {
4
+
"locked": {
5
+
"lastModified": 1759893430,
6
+
"narHash": "sha256-yAy4otLYm9iZ+NtQwTMEbqHwswSFUbhn7x826RR6djw=",
7
+
"owner": "ipetkov",
8
+
"repo": "crane",
9
+
"rev": "1979a2524cb8c801520bd94c38bb3d5692419d93",
10
+
"type": "github"
11
+
},
12
+
"original": {
13
+
"owner": "ipetkov",
14
+
"repo": "crane",
15
+
"type": "github"
16
+
}
17
+
},
18
+
"fenix": {
19
+
"inputs": {
20
+
"nixpkgs": [
21
+
"wire",
22
+
"nixpkgs"
23
+
],
24
+
"rust-analyzer-src": "rust-analyzer-src"
25
+
},
26
+
"locked": {
27
+
"lastModified": 1760424233,
28
+
"narHash": "sha256-8jLfVik1ccwmacVW5BlprmsuK534rT5HjdPhkSaew44=",
29
+
"owner": "nix-community",
30
+
"repo": "fenix",
31
+
"rev": "48a763cdc0b2d07199a021de99c2ca50af76e49f",
32
+
"type": "github"
33
+
},
34
+
"original": {
35
+
"owner": "nix-community",
36
+
"repo": "fenix",
37
+
"type": "github"
38
+
}
39
+
},
40
+
"flake-compat": {
41
+
"locked": {
42
+
"lastModified": 1747046372,
43
+
"narHash": "sha256-CIVLLkVgvHYbgI2UpXvIIBJ12HWgX+fjA8Xf8PUmqCY=",
44
+
"owner": "edolstra",
45
+
"repo": "flake-compat",
46
+
"rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885",
47
+
"type": "github"
48
+
},
49
+
"original": {
50
+
"owner": "edolstra",
51
+
"repo": "flake-compat",
52
+
"type": "github"
53
+
}
54
+
},
55
+
"flake-compat_2": {
56
+
"flake": false,
57
+
"locked": {
58
+
"lastModified": 1747046372,
59
+
"narHash": "sha256-CIVLLkVgvHYbgI2UpXvIIBJ12HWgX+fjA8Xf8PUmqCY=",
60
+
"owner": "edolstra",
61
+
"repo": "flake-compat",
62
+
"rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885",
63
+
"type": "github"
64
+
},
65
+
"original": {
66
+
"owner": "edolstra",
67
+
"repo": "flake-compat",
68
+
"type": "github"
69
+
}
70
+
},
71
+
"flake-parts": {
72
+
"inputs": {
73
+
"nixpkgs-lib": "nixpkgs-lib"
74
+
},
75
+
"locked": {
76
+
"lastModified": 1759362264,
77
+
"narHash": "sha256-wfG0S7pltlYyZTM+qqlhJ7GMw2fTF4mLKCIVhLii/4M=",
78
+
"owner": "hercules-ci",
79
+
"repo": "flake-parts",
80
+
"rev": "758cf7296bee11f1706a574c77d072b8a7baa881",
81
+
"type": "github"
82
+
},
83
+
"original": {
84
+
"owner": "hercules-ci",
85
+
"repo": "flake-parts",
86
+
"type": "github"
87
+
}
88
+
},
89
+
"git-hooks": {
90
+
"inputs": {
91
+
"flake-compat": "flake-compat_2",
92
+
"gitignore": "gitignore",
93
+
"nixpkgs": "nixpkgs_2"
94
+
},
95
+
"locked": {
96
+
"lastModified": 1760392170,
97
+
"narHash": "sha256-WftxJgr2MeDDFK47fQKywzC72L2jRc/PWcyGdjaDzkw=",
98
+
"owner": "cachix",
99
+
"repo": "git-hooks.nix",
100
+
"rev": "46d55f0aeb1d567a78223e69729734f3dca25a85",
101
+
"type": "github"
102
+
},
103
+
"original": {
104
+
"owner": "cachix",
105
+
"repo": "git-hooks.nix",
106
+
"type": "github"
107
+
}
108
+
},
109
+
"gitignore": {
110
+
"inputs": {
111
+
"nixpkgs": [
112
+
"wire",
113
+
"git-hooks",
114
+
"nixpkgs"
115
+
]
116
+
},
117
+
"locked": {
118
+
"lastModified": 1709087332,
119
+
"narHash": "sha256-HG2cCnktfHsKV0s4XW83gU3F57gaTljL9KNSuG6bnQs=",
120
+
"owner": "hercules-ci",
121
+
"repo": "gitignore.nix",
122
+
"rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
123
+
"type": "github"
124
+
},
125
+
"original": {
126
+
"owner": "hercules-ci",
127
+
"repo": "gitignore.nix",
128
+
"type": "github"
129
+
}
130
+
},
131
+
"linux-systems": {
132
+
"locked": {
133
+
"lastModified": 1689347949,
134
+
"narHash": "sha256-12tWmuL2zgBgZkdoB6qXZsgJEH9LR3oUgpaQq2RbI80=",
135
+
"owner": "nix-systems",
136
+
"repo": "default-linux",
137
+
"rev": "31732fcf5e8fea42e59c2488ad31a0e651500f68",
138
+
"type": "github"
139
+
},
140
+
"original": {
141
+
"owner": "nix-systems",
142
+
"repo": "default-linux",
143
+
"type": "github"
144
+
}
145
+
},
146
+
"nixpkgs": {
147
+
"locked": {
148
+
"lastModified": 1760524057,
149
+
"narHash": "sha256-EVAqOteLBFmd7pKkb0+FIUyzTF61VKi7YmvP1tw4nEw=",
150
+
"owner": "NixOS",
151
+
"repo": "nixpkgs",
152
+
"rev": "544961dfcce86422ba200ed9a0b00dd4b1486ec5",
153
+
"type": "github"
154
+
},
155
+
"original": {
156
+
"owner": "NixOS",
157
+
"ref": "nixos-unstable",
158
+
"repo": "nixpkgs",
159
+
"type": "github"
160
+
}
161
+
},
162
+
"nixpkgs-lib": {
163
+
"locked": {
164
+
"lastModified": 1754788789,
165
+
"narHash": "sha256-x2rJ+Ovzq0sCMpgfgGaaqgBSwY+LST+WbZ6TytnT9Rk=",
166
+
"owner": "nix-community",
167
+
"repo": "nixpkgs.lib",
168
+
"rev": "a73b9c743612e4244d865a2fdee11865283c04e6",
169
+
"type": "github"
170
+
},
171
+
"original": {
172
+
"owner": "nix-community",
173
+
"repo": "nixpkgs.lib",
174
+
"type": "github"
175
+
}
176
+
},
177
+
"nixpkgs_2": {
178
+
"locked": {
179
+
"lastModified": 1759070547,
180
+
"narHash": "sha256-JVZl8NaVRYb0+381nl7LvPE+A774/dRpif01FKLrYFQ=",
181
+
"owner": "NixOS",
182
+
"repo": "nixpkgs",
183
+
"rev": "647e5c14cbd5067f44ac86b74f014962df460840",
184
+
"type": "github"
185
+
},
186
+
"original": {
187
+
"owner": "NixOS",
188
+
"ref": "nixpkgs-unstable",
189
+
"repo": "nixpkgs",
190
+
"type": "github"
191
+
}
192
+
},
193
+
"nixpkgs_3": {
194
+
"locked": {
195
+
"lastModified": 1760284886,
196
+
"narHash": "sha256-TK9Kr0BYBQ/1P5kAsnNQhmWWKgmZXwUQr4ZMjCzWf2c=",
197
+
"owner": "NixOS",
198
+
"repo": "nixpkgs",
199
+
"rev": "cf3f5c4def3c7b5f1fc012b3d839575dbe552d43",
200
+
"type": "github"
201
+
},
202
+
"original": {
203
+
"owner": "NixOS",
204
+
"ref": "nixos-unstable",
205
+
"repo": "nixpkgs",
206
+
"type": "github"
207
+
}
208
+
},
209
+
"nixpkgs_4": {
210
+
"locked": {
211
+
"lastModified": 1754340878,
212
+
"narHash": "sha256-lgmUyVQL9tSnvvIvBp7x1euhkkCho7n3TMzgjdvgPoU=",
213
+
"owner": "nixos",
214
+
"repo": "nixpkgs",
215
+
"rev": "cab778239e705082fe97bb4990e0d24c50924c04",
216
+
"type": "github"
217
+
},
218
+
"original": {
219
+
"owner": "nixos",
220
+
"ref": "nixpkgs-unstable",
221
+
"repo": "nixpkgs",
222
+
"type": "github"
223
+
}
224
+
},
225
+
"nixpkgs_current_stable": {
226
+
"locked": {
227
+
"lastModified": 1760139962,
228
+
"narHash": "sha256-4xggC56Rub3WInz5eD7EZWXuLXpNvJiUPahGtMkwtuc=",
229
+
"owner": "NixOS",
230
+
"repo": "nixpkgs",
231
+
"rev": "7e297ddff44a3cc93673bb38d0374df8d0ad73e4",
232
+
"type": "github"
233
+
},
234
+
"original": {
235
+
"owner": "NixOS",
236
+
"ref": "nixos-25.05",
237
+
"repo": "nixpkgs",
238
+
"type": "github"
239
+
}
240
+
},
241
+
"root": {
242
+
"inputs": {
243
+
"nixpkgs": "nixpkgs",
244
+
"systems": "systems",
245
+
"wire": "wire"
246
+
}
247
+
},
248
+
"rust-analyzer-src": {
249
+
"flake": false,
250
+
"locked": {
251
+
"lastModified": 1760260966,
252
+
"narHash": "sha256-pOVvZz/aa+laeaUKyE6PtBevdo4rywMwjhWdSZE/O1c=",
253
+
"owner": "rust-lang",
254
+
"repo": "rust-analyzer",
255
+
"rev": "c5181dbbe33af6f21b9d83e02fdb6fda298a3b65",
256
+
"type": "github"
257
+
},
258
+
"original": {
259
+
"owner": "rust-lang",
260
+
"ref": "nightly",
261
+
"repo": "rust-analyzer",
262
+
"type": "github"
263
+
}
264
+
},
265
+
"systems": {
266
+
"locked": {
267
+
"lastModified": 1681028828,
268
+
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
269
+
"owner": "nix-systems",
270
+
"repo": "default",
271
+
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
272
+
"type": "github"
273
+
},
274
+
"original": {
275
+
"owner": "nix-systems",
276
+
"repo": "default",
277
+
"type": "github"
278
+
}
279
+
},
280
+
"systems_2": {
281
+
"locked": {
282
+
"lastModified": 1681028828,
283
+
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
284
+
"owner": "nix-systems",
285
+
"repo": "default",
286
+
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
287
+
"type": "github"
288
+
},
289
+
"original": {
290
+
"owner": "nix-systems",
291
+
"repo": "default",
292
+
"type": "github"
293
+
}
294
+
},
295
+
"treefmt-nix": {
296
+
"inputs": {
297
+
"nixpkgs": "nixpkgs_4"
298
+
},
299
+
"locked": {
300
+
"lastModified": 1760120816,
301
+
"narHash": "sha256-gq9rdocpmRZCwLS5vsHozwB6b5nrOBDNc2kkEaTXHfg=",
302
+
"owner": "numtide",
303
+
"repo": "treefmt-nix",
304
+
"rev": "761ae7aff00907b607125b2f57338b74177697ed",
305
+
"type": "github"
306
+
},
307
+
"original": {
308
+
"owner": "numtide",
309
+
"repo": "treefmt-nix",
310
+
"type": "github"
311
+
}
312
+
},
313
+
"wire": {
314
+
"inputs": {
315
+
"crane": "crane",
316
+
"fenix": "fenix",
317
+
"flake-compat": "flake-compat",
318
+
"flake-parts": "flake-parts",
319
+
"git-hooks": "git-hooks",
320
+
"linux-systems": "linux-systems",
321
+
"nixpkgs": "nixpkgs_3",
322
+
"nixpkgs_current_stable": "nixpkgs_current_stable",
323
+
"systems": "systems_2",
324
+
"treefmt-nix": "treefmt-nix"
325
+
},
326
+
"locked": {
327
+
"lastModified": 1758104393,
328
+
"narHash": "sha256-ddXhp8hDFMKUiVf+V5Q71Ehfw8o1kGofxN9cAljyfOI=",
329
+
"owner": "mrshmllow",
330
+
"repo": "wire",
331
+
"rev": "6c99f40273aa16dc6603375dc2c867b9265a289a",
332
+
"type": "github"
333
+
},
334
+
"original": {
335
+
"owner": "mrshmllow",
336
+
"ref": "v0.5.0",
337
+
"repo": "wire",
338
+
"type": "github"
339
+
}
340
+
}
341
+
},
342
+
"root": "root",
343
+
"version": 7
344
+
}
+38
doc/snippets/guides/installation/flake.nix
+38
doc/snippets/guides/installation/flake.nix
···
1
+
{
2
+
inputs = {
3
+
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
4
+
wire.url = "github:forallsys/wire/stable";
5
+
6
+
# alternatively, you can use a tag instead:
7
+
# wire.url = "github:forallsys/wire/v1.1.1";
8
+
9
+
systems.url = "github:nix-systems/default";
10
+
};
11
+
12
+
outputs = {
13
+
nixpkgs,
14
+
wire,
15
+
systems,
16
+
...
17
+
}: let
18
+
forAllSystems = nixpkgs.lib.genAttrs (import systems);
19
+
in {
20
+
wire = wire.makeHive {
21
+
meta.nixpkgs = import nixpkgs {localSystem = "x86_64-linux";};
22
+
23
+
# Continue to next How-To guide to fill this section
24
+
};
25
+
26
+
devShells = forAllSystems (
27
+
system: let
28
+
pkgs = nixpkgs.legacyPackages.${system};
29
+
in {
30
+
default = pkgs.mkShell {
31
+
buildInputs = [
32
+
wire.packages.${system}.wire
33
+
];
34
+
};
35
+
}
36
+
);
37
+
};
38
+
}
+10
doc/snippets/guides/installation/hive.nix
+10
doc/snippets/guides/installation/hive.nix
+11
doc/snippets/guides/installation/shell.nix
+11
doc/snippets/guides/installation/shell.nix
+3
doc/snippets/tutorial/cache.conf
+3
doc/snippets/tutorial/cache.conf
+12
doc/snippets/tutorial/cache.nix
+12
doc/snippets/tutorial/cache.nix
+2
doc/snippets/tutorial/extra-experimental-features.conf
+2
doc/snippets/tutorial/extra-experimental-features.conf
+7
doc/snippets/tutorial/extra-experimental-features.nix
+7
doc/snippets/tutorial/extra-experimental-features.nix
+46
doc/tutorial/overview.md
+46
doc/tutorial/overview.md
···
1
+
---
2
+
comment: false
3
+
title: wire Tutorial Overview
4
+
description: In this tutorial we will create and deploy a wire Hive.
5
+
---
6
+
7
+
# wire Tutorial Overview
8
+
9
+
wire is a tool to deploy NixOS systems. Its usage is inspired by [colmena](https://colmena.cli.rs/). In many places it's configuration attempts to remain a superset[^1] of colmena, however it is **not** a fork.
10
+
11
+
[^1]: A lot of your colmena module options will continue to work with wire, but wire has additional ergonomic changes you can take advantage of.
12
+
13
+
---
14
+
15
+
In this tutorial we will create and deploy a wire Hive. Along the way we will
16
+
encounter [npins](https://github.com/andir/npins), simple NixOS
17
+
configurations, virtual machines, and deployment keys.
18
+
19
+
You'll need at least 10~ GB of free disk space to complete this tutorial.
20
+
21
+
<div class="tip custom-block" style="padding-top: 8px">
22
+
23
+
Ready? Skip to [Nix Setup](./part-one/nix-setup).
24
+
25
+
</div>
26
+
27
+
## What features does wire have?
28
+
29
+
| Features | wire | Colmena |
30
+
| ------------------------------------------------------------- | ------------------ | ------------------ |
31
+
| [Node Tagging](/guides/targeting.html#tag-basics) | :white_check_mark: | :white_check_mark: |
32
+
| [Secret Management](/guides/keys.html) | :white_check_mark: | :white_check_mark: |
33
+
| [Parallel Deployment](/guides/parallelism.html) | :white_check_mark: | :white_check_mark: |
34
+
| Remote Builds | :white_check_mark: | :white_check_mark: |
35
+
| [Key Services](/guides/keys.html#using-keys-with-services) | :white_check_mark: | :white_check_mark: |
36
+
| [Pipeline Support](/guides/targeting.html#reading-from-stdin) | :white_check_mark: | :x:[^2] |
37
+
| [Non-Root Deployments](/guides/non-root-user) | :white_check_mark: | :x:[^3] |
38
+
| `--path` accepts flakerefs | :white_check_mark: | :x: |
39
+
| REPL & Eval expressions | :x: | :white_check_mark: |
40
+
| Adhoc remote command execution[^4] | :x: | :white_check_mark: |
41
+
42
+
[^2]: You need to write custom nix code to use Colmena hive metadata inside environments like CI pipelines, bash scripting, etc., which requires a knowledge of its internals. Recently it agained the [eval feature](https://colmena.cli.rs/unstable/features/eval.html) which has improved the situation since wire was first started.
43
+
44
+
[^3]: See https://github.com/zhaofengli/colmena/issues/120
45
+
46
+
[^4]: wire lacks an equivalent to `colmena exec`.
+188
doc/tutorial/part-one/basic-hive.md
+188
doc/tutorial/part-one/basic-hive.md
···
1
+
---
2
+
comment: true
3
+
title: Basic Hive & Deployment
4
+
description: Creating a basic hive and deploying changes to the virtual machine.
5
+
---
6
+
7
+
# Basic Hive & Deployment
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Editing `hive.nix`
12
+
13
+
Open a text editor and edit `hive.nix`. You should copy this example, which imports
14
+
the npins sources we added. It also calls `makeHive`, and gives wire `nixpkgs`
15
+
from npins as well.
16
+
17
+
```nix:line-numbers [hive.nix]
18
+
let
19
+
# import npins sources
20
+
sources = import ./npins;
21
+
# import `wire` from npins sources
22
+
wire = import sources.wire;
23
+
in
24
+
wire.makeHive {
25
+
# give wire nixpkgs from npins
26
+
meta.nixpkgs = import sources.nixpkgs { };
27
+
28
+
# we'll edit this part
29
+
}
30
+
```
31
+
32
+
Lets check out what wire sees with `wire show`.
33
+
34
+
```sh
35
+
[nix-shell]$ wire show
36
+
INFO eval_hive: evaluating hive HiveNix("/home/marsh/scratch/wire-tutorial/hive.nix")
37
+
WARN use --json to output something scripting suitable
38
+
Summary: 0 total node(s), totalling 0 keys (0 distinct).
39
+
Note: Listed connections are tried from Left to Right
40
+
41
+
```
42
+
43
+
The line `nodes: {}` means there is no "nodes" in our hive.
44
+
45
+
## Adding The First Node
46
+
47
+
Lets add the virtual machine as a node to the hive with the name
48
+
`virtual-machine`. Additionally, we will add `deployment.target`, recalling we
49
+
forwarded sshd `virtual-machine:22` to the port `localhost:2222`:
50
+
51
+
```nix:line-numbers [hive.nix]
52
+
let
53
+
sources = import ./npins;
54
+
wire = import sources.wire;
55
+
in
56
+
wire.makeHive {
57
+
meta.nixpkgs = import sources.nixpkgs { };
58
+
59
+
virtual-machine = { pkgs, ... }: { # [!code ++]
60
+
deployment.target = { # [!code ++]
61
+
port = 2222; # [!code ++]
62
+
hosts = [ "localhost" ]; # [!code ++]
63
+
}; # [!code ++]
64
+
65
+
nixpkgs.hostPlatform = "x86_64-linux"; # [!code ++]
66
+
}; # [!code ++]
67
+
}
68
+
```
69
+
70
+
## A naive `wire apply`
71
+
72
+
If we tried to run `wire apply` on our hive at this stage, it likely won't work.
73
+
If you've used NixOS before, you'll notice that many important options are
74
+
missing. But let's try anyway:
75
+
76
+
```sh
77
+
[nix-shell]$ wire apply
78
+
ERROR apply{goal=Switch on=}:goal{node=virtual-machine}: lib::hive::node: Failed to execute `Evaluate the node`
79
+
Error: ร 1 node(s) failed to apply.
80
+
81
+
Error:
82
+
ร node virtual-machine failed to apply
83
+
โโโถ wire::Evaluate
84
+
โ
85
+
โ ร failed to evaluate `--file /home/marsh/scratch/wire-tutorial/hive.nix topLevels.virtual-machine` from the context
86
+
โ โ of a hive.
87
+
โ
88
+
โฐโโถ nix --extra-experimental-features nix-command --extra-experimental-features flakes eval --json --file /home/marsh/scratch/
89
+
wire-tutorial/hive.nix topLevels.virtual-machine --log-format internal-json failed (reason: known-status) with code 1 (last 20
90
+
lines):
91
+
error:
92
+
โฆ while evaluating '(evaluateNode node).config.system.build.toplevel' to select 'drvPath' on it
93
+
at /nix/store/5pfz0v479gnciac17rcqi2gwyz8pl4s0-source/runtime/evaluate.nix:65:23:
94
+
64|
95
+
65| getTopLevel = node: (evaluateNode node).config.system.build.toplevel.drvPath;
96
+
| ^
97
+
66| in
98
+
99
+
โฆ while calling the 'head' builtin
100
+
at /nix/store/n3d1ricw0cb5jd8vvfym6ig0mw7x7sv9-source/lib/attrsets.nix:1701:13:
101
+
1700| if length values == 1 || pred here (elemAt values 1) (head values) then
102
+
1701| head values
103
+
| ^
104
+
1702| else
105
+
106
+
(stack trace truncated; use '--show-trace' to show the full trace)
107
+
108
+
error:
109
+
Failed assertions:
110
+
- The โfileSystemsโ option does not specify your root file system.
111
+
- You must set the option โboot.loader.grub.devicesโ or 'boot.loader.grub.mirroredBoots' to make the system bootable.
112
+
trace: evaluation warning: system.stateVersion is not set, defaulting to 25.11. Read why this matters on https://nixos.org/
113
+
manual/nixos/stable/options.html#opt-system.stateVersion.
114
+
115
+
```
116
+
117
+
The command complained about not defining any fileSystems or a boot loader.
118
+
The `${sources.nixpkgs}/nixos/modules/virtualisation/qemu-vm.nix` imported in
119
+
`vm.nix` does
120
+
extra work to make our virtual machine work, which we are currently missing.
121
+
122
+
## Importing `vm.nix`
123
+
124
+
Lets import our `vm.nix` to this hive to fix our evaluation errors.
125
+
Additionally, add a new package such as `vim` to our configuration:
126
+
127
+
```nix:line-numbers [hive.nix]
128
+
let
129
+
sources = import ./npins;
130
+
wire = import sources.wire;
131
+
in
132
+
wire.makeHive {
133
+
meta.nixpkgs = import sources.nixpkgs { };
134
+
135
+
virtual-machine = { pkgs, ... }: {
136
+
deployment.target = {
137
+
port = 2222;
138
+
hosts = [ "localhost" ];
139
+
};
140
+
141
+
imports = [ # [!code ++]
142
+
./vm.nix # [!code ++]
143
+
]; # [!code ++]
144
+
145
+
environment.systemPackages = [ pkgs.vim ]; # [!code ++]
146
+
147
+
nixpkgs.hostPlatform = "x86_64-linux";
148
+
};
149
+
}
150
+
```
151
+
152
+
## Our first deploy
153
+
154
+
Trying our basic `wire apply` again with these changes:
155
+
156
+
```sh
157
+
[nix-shell]$ wire apply
158
+
...
159
+
INFO lib::nix_log: stopping the following units: boot.mount
160
+
INFO lib::nix_log: NOT restarting the following changed units: systemd-fsck@dev-disk-by\x2dlabel-ESP.service
161
+
INFO lib::nix_log: activating the configuration...
162
+
INFO lib::nix_log: setting up /etc...
163
+
INFO lib::nix_log: restarting systemd...
164
+
INFO lib::nix_log: reloading user units for root...
165
+
INFO lib::nix_log: restarting sysinit-reactivation.target
166
+
INFO lib::nix_log: reloading the following units: dbus.service
167
+
INFO lib::nix_log: the following new units were started: boot.automount, sysinit-reactivation.target, systemd-tmpfiles-resetup.service
168
+
INFO apply{goal=Switch on=}:goal{node=virtual-machines}: lib::hive::node: Executing step `Upload key @ PostActivation`
169
+
INFO apply{goal=Switch on=}: wire::apply: Successfully applied goal to 1 node(s): [Name("virtual-machines")]
170
+
```
171
+
172
+
Now, lets confirm these changes were applied to the virtual machine by executing
173
+
`vim` in the virtual machine window:
174
+
175
+
```sh [Virtual Machine]
176
+
[root@wire-tutorial:~]# vim --version
177
+
VIM - Vi IMproved 9.1 (2024 Jan 02, compiled Jan 01 1980 00:00:00)
178
+
```
179
+
180
+
Nice! You successfully deployed a new NixOS configuration to a **remote host**!
181
+
182
+
::: info
183
+
This followed common steps of adding the node's `deployment.target` details and
184
+
importing it's pre-existing NixOS configuration (in this case, `vm.nix`), a
185
+
pattern you'll be using a lot if you chose to adopt wire.
186
+
:::
187
+
188
+
In the next section, we'll cover how to deploy secrets / keys to our remote node.
+40
doc/tutorial/part-one/nix-setup.md
+40
doc/tutorial/part-one/nix-setup.md
···
1
+
---
2
+
comment: true
3
+
title: Nix Setup
4
+
description: Installing npins, nix, and enabling the binary cache.
5
+
---
6
+
7
+
# Nix Setup
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
::: warning
12
+
This page is for the purposes for the **Tutorial**.
13
+
You should read [How-to Guides - Install wire](/guides/installation.html) for installing wire for
14
+
regular use.
15
+
:::
16
+
17
+
## Nix Installation
18
+
19
+
You should install nix if you do not have it on your system already.
20
+
There are detailed steps to installing Nix on [nix.dev](https://nix.dev/install-nix).
21
+
22
+
By the end of the installation, you should see something like this:
23
+
24
+
```sh
25
+
$ nix --version
26
+
nix (Nix) 2.11.0
27
+
```
28
+
29
+
## Binary Cache
30
+
31
+
Because wire can be heavy to compile, it is distributed with a [binary
32
+
cache](https://wiki.nixos.org/wiki/Binary_Cache).
33
+
34
+
You must enable the [garnix binary cache](https://garnix.io/docs/caching) or you
35
+
will be compiling everything from source:
36
+
37
+
::: code-group
38
+
<<< @/snippets/tutorial/cache.conf [nix.conf]
39
+
<<< @/snippets/tutorial/cache.nix [configuration.nix]
40
+
:::
+116
doc/tutorial/part-one/repo-setup.md
+116
doc/tutorial/part-one/repo-setup.md
···
1
+
---
2
+
comment: true
3
+
title: Preparing Repo & Shell
4
+
description: Adding npins sources and a nix development shell.
5
+
---
6
+
7
+
# Preparing Repo & Shell
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Initialising with Git & `npins`
12
+
13
+
First, lets create an adhoc shell to bring these two tools into our $PATH.
14
+
15
+
```sh
16
+
$ nix-shell -p git npins
17
+
[nix-shell]$ git --version
18
+
git version 2.51.0
19
+
[nix-shell]$ npins --version
20
+
npins 0.3.1
21
+
```
22
+
23
+
Great! Now lets use Git & `npins` to create a new Git repo and initialise it.
24
+
`npins init` may take a while to download `nixpkgs`.
25
+
26
+
```sh
27
+
[nix-shell]$ git init wire-tutorial
28
+
Initialized empty Git repository in /home/.../wire-tutorial/.git/
29
+
[nix-shell]$ cd wire-tutorial/
30
+
[nix-shell]$ npins init
31
+
[INFO ] Welcome to npins!
32
+
[INFO ] Creating `npins` directory
33
+
[INFO ] Writing default.nix
34
+
[INFO ] Writing initial lock file (empty)
35
+
[INFO ] Successfully written initial files to 'npins/sources.json'.
36
+
```
37
+
38
+
This has created a pinned version of `nixpkgs` for us to use in our wire hive.
39
+
40
+
## Adding wire as a dependency
41
+
42
+
We can now need to tell `npins` to use `forallsys/wire` as a dependency.
43
+
44
+
```sh
45
+
[nix-shell]$ npins add github forallsys wire --branch stable
46
+
[INFO ] Adding 'wire' โฆ
47
+
repository: https://github.com/forallsys/wire.git
48
+
pre_releases: false
49
+
submodules: false
50
+
version: v0.4.0
51
+
revision: f33d80c15b17c85d557d533441609a59a2210941
52
+
hash: 0wgah341hvjpvppkgwjrj50rvzf56ccmjz720xsl3mw38h9nn6sr
53
+
frozen: false
54
+
```
55
+
56
+
Great, now lets confirm the two dependencies we have added to this `npins`
57
+
project:
58
+
59
+
```sh
60
+
[nix-shell]$ npins show
61
+
nixpkgs: (git repository)
62
+
repository: https://github.com/pkpbynum/nixpkgs.git
63
+
branch: pb/disk-size-bootloader
64
+
submodules: false
65
+
revision: da2060bdc1c9bc35acc4eafa265ba6b6c64f9926
66
+
url: https://github.com/pkpbynum/nixpkgs/archive/da2060bdc1c9bc35acc4eafa265ba6b6c64f9926.tar.gz
67
+
hash: 0j07gvnm7c5mzw1313asa8limzbmsbnsd02dcw22ing8fg3vbb7g
68
+
frozen: false
69
+
70
+
wire: (git release tag)
71
+
repository: https://github.com/forallsys/wire.git
72
+
pre_releases: false
73
+
submodules: false
74
+
version: v0.4.0
75
+
revision: f33d80c15b17c85d557d533441609a59a2210941
76
+
hash: 0wgah341hvjpvppkgwjrj50rvzf56ccmjz720xsl3mw38h9nn6sr
77
+
frozen: false
78
+
```
79
+
80
+
## Creating a `shell.nix`
81
+
82
+
Open a text editor to edit `shell.nix` in the `wire-tutorial` directory.
83
+
84
+
```nix:line-numbers [shell.nix]
85
+
let
86
+
sources = import ./npins;
87
+
pkgs = import sources.nixpkgs { };
88
+
wire = import sources.wire;
89
+
in
90
+
pkgs.mkShell {
91
+
packages = [
92
+
wire.packages.x86_64-linux.wire-small
93
+
pkgs.npins
94
+
pkgs.git
95
+
];
96
+
97
+
shellHook = ''
98
+
export NIX_PATH="nixpkgs=${sources.nixpkgs.outPath}"
99
+
'';
100
+
}
101
+
```
102
+
103
+
You should now `exit` to quit the old shell, and
104
+
enter a new shell with `nix-shell`. Since we added wire as a package, our new
105
+
shell should have wire in the $PATH:
106
+
107
+
```sh
108
+
[nix-shell]$ exit
109
+
exit
110
+
$ cd wire-tutorial/
111
+
$ nix-shell
112
+
[nix-shell]$ wire --version
113
+
wire 0.5.0
114
+
Debug: Hive::SCHEMA_VERSION 0
115
+
116
+
```
+161
doc/tutorial/part-one/vm-setup.md
+161
doc/tutorial/part-one/vm-setup.md
···
1
+
---
2
+
comment: true
3
+
title: Creating a Virtual Machine
4
+
description: Creating a NixOS virtual machine to use as a deployment target.
5
+
---
6
+
7
+
# Creating a Virtual Machine
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Creating a `vm.nix`
12
+
13
+
For this step, you'll need your ssh public key, which you can obtain from
14
+
`ssh-add -L`.
15
+
16
+
Open a text editor and edit `vm.nix`. Place in it this basic NixOS
17
+
virtual machine configuration, which enables openssh and forwards it's 22 port:
18
+
19
+
```nix:line-numbers [vm.nix]
20
+
let
21
+
sources = import ./npins;
22
+
in
23
+
{
24
+
imports = [ "${sources.nixpkgs}/nixos/modules/virtualisation/qemu-vm.nix" ];
25
+
26
+
networking.hostName = "wire-tutorial";
27
+
28
+
users.users.root = {
29
+
initialPassword = "root";
30
+
openssh.authorizedKeys.keys = [
31
+
# I made this a nix syntax error so you're forced to deal with it!
32
+
<your ssh public-key as a string>
33
+
];
34
+
};
35
+
36
+
boot = {
37
+
loader = {
38
+
systemd-boot.enable = true;
39
+
efi.canTouchEfiVariables = true;
40
+
};
41
+
42
+
kernelParams = [ "console=ttyS0" ];
43
+
44
+
boot.growPartition = true;
45
+
};
46
+
47
+
# enable openssh
48
+
services = {
49
+
openssh = {
50
+
enable = true;
51
+
settings.PermitRootLogin = "yes";
52
+
};
53
+
54
+
getty.autologinUser = "root";
55
+
};
56
+
57
+
virtualisation = {
58
+
graphics = false;
59
+
useBootLoader = true;
60
+
61
+
# use a 5gb disk
62
+
diskSize = 5 * 1024;
63
+
64
+
# grow the filesystem to fit the 5 gb we reserved
65
+
fileSystems."/".autoResize = true;
66
+
67
+
# forward `openssh` port 22 to localhost:2222.
68
+
forwardPorts = [
69
+
{
70
+
from = "host";
71
+
host.port = 2222;
72
+
guest.port = 22;
73
+
}
74
+
];
75
+
};
76
+
77
+
system.stateVersion = "23.11";
78
+
}
79
+
```
80
+
81
+
If you like, you may take a moment to understand each line of this
82
+
configuration.
83
+
84
+
## Building & Running the virtual machine
85
+
86
+
Open a separate Terminal tab/window/instance, ensuring you enter the development
87
+
shell with `nix-shell`.
88
+
Then, build the virtual machine with a bootloader,
89
+
taking our `vm.nix` as the nixos configuration.
90
+
91
+
```sh
92
+
$ nix-shell
93
+
[nix-shell]$ nix-build '<nixpkgs/nixos>' -A vmWithBootLoader -I nixos-config=./vm.nix
94
+
```
95
+
96
+
::: tip HELP
97
+
98
+
If you got an error such as
99
+
100
+
```
101
+
error: The option `...' in `...' is already declared in `...'.
102
+
```
103
+
104
+
make sure you ran the above command in the `nix-shell`!
105
+
106
+
:::
107
+
108
+
Building the virtual machine can take some time, but once it completes, start it
109
+
by running:
110
+
111
+
```sh
112
+
[nix-shell]$ ./result/bin/run-wire-tutorial-vm
113
+
```
114
+
115
+
You will see boot-up logs fly across the screen and eventually you will be placed
116
+
into shell inside the virtual machine.
117
+
118
+
```sh [Virtual Machine]
119
+
running activation script...
120
+
setting up /etc...
121
+
122
+
Welcome to NixOS 25.11 (Xantusia)!
123
+
124
+
[ OK ] Created slice Slice /system/getty.
125
+
[ OK ] Created slice Slice /system/modprobe.
126
+
...
127
+
<<< Welcome to NixOS 25.11pre861972.88cef159e47c (x86_64) - hvc0 >>>
128
+
129
+
Run 'nixos-help' for the NixOS manual.
130
+
131
+
wire-tutorial login: root (automatic login)
132
+
133
+
[root@wire-tutorial:~]#
134
+
135
+
```
136
+
137
+
::: details
138
+
Further details on how the above commands work can be found at
139
+
[nix.dev](https://nix.dev/tutorials/nixos/nixos-configuration-on-vm.html#creating-a-qemu-based-virtual-machine-from-a-nixos-configuration)
140
+
:::
141
+
142
+
## Summary
143
+
144
+
Congratulations, you created a virtual machine in your terminal.
145
+
We'll be deploying to this virtual machine, so keep the
146
+
terminal instance open.
147
+
148
+
::: info
149
+
From now on, commands ran inside the virtual machine will be lead with the
150
+
following prompt:
151
+
152
+
```sh [Virtual Machine]
153
+
[root@wire-tutorial:~]#
154
+
155
+
```
156
+
157
+
:::
158
+
159
+
::: tip
160
+
If you ever want to quit the virtual machine, run the command `poweroff`.
161
+
:::
+142
doc/tutorial/part-two/basic-keys.md
+142
doc/tutorial/part-two/basic-keys.md
···
1
+
---
2
+
comment: true
3
+
title: Deployment Keys Basics
4
+
description: Deploy some basic secrets with wire tool.
5
+
---
6
+
7
+
# Deployment Keys Basics
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
## Creating a `secrets.nix`
12
+
13
+
Lets create a NixOS module that will contain our secret keys, and import it:
14
+
15
+
```nix:line-numbers [hive.nix]
16
+
let
17
+
sources = import ./npins;
18
+
wire = import sources.wire;
19
+
in
20
+
wire.makeHive {
21
+
meta.nixpkgs = import sources.nixpkgs { };
22
+
23
+
virtual-machine = {
24
+
deployment.target = {
25
+
port = 2222;
26
+
hosts = [ "localhost" ];
27
+
};
28
+
29
+
imports = [
30
+
./vm.nix
31
+
./secrets.nix # [!code ++]
32
+
];
33
+
34
+
environment.systemPackages = [ pkgs.vim ];
35
+
36
+
nixpkgs.hostPlatform = "x86_64-linux";
37
+
};
38
+
}
39
+
```
40
+
41
+
```nix:line-numbers [secrets.nix]
42
+
{
43
+
deployment.keys = {
44
+
# the key's unique name is `"basic.txt"`.
45
+
"basic.txt" = {
46
+
# In this key's case, the source is a literal string:
47
+
source = ''
48
+
Hello World
49
+
'';
50
+
};
51
+
};
52
+
}
53
+
```
54
+
55
+
::: details
56
+
Further details on the `deployment.keys` options can be found
57
+
[in the reference](/reference/module.html#deployment-keys)
58
+
:::
59
+
60
+
Once we deploy this new configuration to the virtul machine,
61
+
`/run/keys/basic.txt` will be created with the contents of the key.
62
+
63
+
```sh
64
+
[nix-shell]$ wire apply keys
65
+
WARN lib::nix_log: Store URL: ssh://root@localhost
66
+
(root@localhost) Password:
67
+
68
+
```
69
+
70
+
```sh [Virtual Machine]
71
+
[root@wire-tutorial:~]# cat /run/keys/basic.txt
72
+
Hello World
73
+
74
+
```
75
+
76
+
You successfully deployed your first, albeit not-so-secret, secret key! Let's
77
+
move on from literal-text keys and use something a bit more powerful.
78
+
79
+
## File-sourced keys <Badge type="info">Optional</Badge>
80
+
81
+
This section is optional to try, but you can also pass `deployment.keys.<name>.source`
82
+
a file path. It's contents is read and treated as literal text.
83
+
84
+
```sh
85
+
$ echo hello world > very-important-secret.txt
86
+
```
87
+
88
+
```nix:line-numbers [secrets.nix]
89
+
{
90
+
deployment.keys = {
91
+
# ...
92
+
93
+
"very-important-secret.txt" = { # [!code ++]
94
+
source = ./very-important-secret.txt; # [!code ++]
95
+
}; # [!code ++]
96
+
};
97
+
}
98
+
```
99
+
100
+
```sh [Virtual Machine]
101
+
[root@wire-tutorial:~]# cat /run/keys/very-important-secret.txt
102
+
hello world
103
+
104
+
```
105
+
106
+
## Command-sourced keys
107
+
108
+
Command-sourced keys are where the real power of wire keys lie. By passing a
109
+
list of strings, wire will execute them as a command and create a key out of it's `stdout`.
110
+
111
+
Because the command's output is never written to the nix store, these can be
112
+
considered real secrets.
113
+
114
+
To create a basic example, update your `secrets.nix` to include a secret that
115
+
echos "hello world":
116
+
117
+
```nix:line-numbers [secrets.nix]
118
+
{
119
+
deployment.keys = {
120
+
# ...
121
+
122
+
"command.txt" = { # [!code ++]
123
+
source = [ # [!code ++]
124
+
"echo" # [!code ++]
125
+
"hello world" # [!code ++]
126
+
]; # [!code ++]
127
+
}; # [!code ++]
128
+
};
129
+
}
130
+
```
131
+
132
+
After a quick `wire deploy secrets`, the `/run/keys/command.txt` file is
133
+
created:
134
+
135
+
```sh [Virtual Machine]
136
+
[root@wire-tutorial:~]# cat /run/keys/command.txt
137
+
hello world
138
+
139
+
```
140
+
141
+
Hopefully you can see the potential of command-sourced keys, as these are the
142
+
basic building block of how we achieve encrypted secrets with wire.
+107
doc/tutorial/part-two/encryption.md
+107
doc/tutorial/part-two/encryption.md
···
1
+
---
2
+
comment: true
3
+
title: Deployment Keys Basics
4
+
description: Deploy a age-encrypted secret with wire tool.
5
+
---
6
+
7
+
# Deployment Keys Basics
8
+
9
+
{{ $frontmatter.description }}
10
+
11
+
::: tip
12
+
For this tutorial we will be using [`age`](https://github.com/FiloSottile/age),
13
+
but other encryption CLI tools work just as well such as GnuPG.
14
+
:::
15
+
16
+
## Installing age
17
+
18
+
Alter your shell.nix to include age:
19
+
20
+
```nix:line-numbers [shell.nix]
21
+
let
22
+
sources = import ./npins;
23
+
pkgs = import sources.nixpkgs { };
24
+
wire = import sources.wire;
25
+
in
26
+
pkgs.mkShell {
27
+
packages = [
28
+
wire.packages.x86_64-linux.wire-small
29
+
pkgs.npins
30
+
pkgs.git
31
+
pkgs.age # [!code ++]
32
+
];
33
+
34
+
shellHook = ''
35
+
export NIX_PATH="nixpkgs=${sources.nixpkgs.outPath}"
36
+
'';
37
+
}
38
+
```
39
+
40
+
Quit and re-open your shell, and confirm age is now available:
41
+
42
+
```sh
43
+
[nix-shell]$ exit
44
+
exit
45
+
$ nix-shell
46
+
[nix-shell]$ age --version
47
+
1.2.1
48
+
49
+
```
50
+
51
+
## Encrypting a secret
52
+
53
+
First create an age private key:
54
+
55
+
```sh
56
+
[nix-shell]$ age-keygen -o key.txt
57
+
Public key: age1j08s3kmr8zw4w8k99vs4nut5mg03dm8nfuaajuekdyzlujxply5qwsv4g0
58
+
59
+
```
60
+
61
+
::: details
62
+
Further details on how age works can be found on in the
63
+
[age manual](https://man.archlinux.org/man/age.1.en.txt).
64
+
:::
65
+
66
+
Now, lets encrypt the words `"!! encrypted string !!"` with age and save it to the
67
+
file `top-secret.age`.
68
+
69
+
We will use a pipeline to echo the encrypted string into
70
+
age, and use `age-keygent -y` to give age the public key we generated, then we
71
+
use the redirection operator to save the encrypted data to `top-secret.age`.
72
+
73
+
```sh
74
+
[nix-shell]$ echo "encrypted string!" | age --encrypt --recipient $(age-keygen -y key.txt) > top-secret.age
75
+
```
76
+
77
+
## Adding an age-encrypted key
78
+
79
+
Now, lets combine our previous command-sourced key with `age`. Pass the
80
+
arguments `age --decrypt --identity key.txt ./top-secret.age` to wire:
81
+
82
+
```nix:line-numbers [secrets.nix]
83
+
{
84
+
deployment.keys = {
85
+
# ...
86
+
87
+
"top-secret" = { # [!code ++]
88
+
source = [ # [!code ++]
89
+
"age" # [!code ++]
90
+
"--decrypt" # [!code ++]
91
+
"--identity" # [!code ++]
92
+
"key.txt" # [!code ++]
93
+
"${./top-secret.age}" # [!code ++]
94
+
]; # [!code ++]
95
+
}; # [!code ++]
96
+
};
97
+
}
98
+
```
99
+
100
+
One `wire apply keys` later, and you have successfully deployed an encrypted
101
+
key:
102
+
103
+
```sh [Virtual Machine]
104
+
[root@wire-tutorial:~]# cat /run/keys/top-secret
105
+
encrypted string!
106
+
107
+
```
+131
-47
flake.lock
+131
-47
flake.lock
···
1
1
{
2
2
"nodes": {
3
+
"colmena_benchmarking": {
4
+
"inputs": {
5
+
"flake-compat": "flake-compat",
6
+
"flake-utils": "flake-utils",
7
+
"nixpkgs": "nixpkgs",
8
+
"stable": "stable"
9
+
},
10
+
"locked": {
11
+
"lastModified": 1684127108,
12
+
"narHash": "sha256-01bfuSY4gnshhtqA1EJCw2CMsKkAx+dHS+sEpQ2+EAQ=",
13
+
"owner": "zhaofengli",
14
+
"repo": "colmena",
15
+
"rev": "5fdd743a11e7291bd8ac1e169d62ba6156c99be4",
16
+
"type": "github"
17
+
},
18
+
"original": {
19
+
"owner": "zhaofengli",
20
+
"ref": "v0.4.0",
21
+
"repo": "colmena",
22
+
"type": "github"
23
+
}
24
+
},
3
25
"crane": {
4
26
"locked": {
5
-
"lastModified": 1756705356,
6
-
"narHash": "sha256-dpBFe8SqYKr7W6KN5QOVCr8N76SBKwTslzjw+4BVBVs=",
27
+
"lastModified": 1763938834,
28
+
"narHash": "sha256-j8iB0Yr4zAvQLueCZ5abxfk6fnG/SJ5JnGUziETjwfg=",
7
29
"owner": "ipetkov",
8
30
"repo": "crane",
9
-
"rev": "305707bbc27d83aa1039378e91d7dd816f4cac10",
31
+
"rev": "d9e753122e51cee64eb8d2dddfe11148f339f5a2",
10
32
"type": "github"
11
33
},
12
34
"original": {
···
23
45
"rust-analyzer-src": "rust-analyzer-src"
24
46
},
25
47
"locked": {
26
-
"lastModified": 1756795219,
27
-
"narHash": "sha256-tKBQtz1JLKWrCJUxVkHKR+YKmVpm0KZdJdPWmR2slQ8=",
48
+
"lastModified": 1764571808,
49
+
"narHash": "sha256-+oo9W5rz03TjfpNqDSLEQwgKiuBbjrHdORyTHli2RuM=",
28
50
"owner": "nix-community",
29
51
"repo": "fenix",
30
-
"rev": "80dbdab137f2809e3c823ed027e1665ce2502d74",
52
+
"rev": "df3c2e78ec13418f85c1f26e77a50f865ec57d38",
31
53
"type": "github"
32
54
},
33
55
"original": {
···
37
59
}
38
60
},
39
61
"flake-compat": {
62
+
"flake": false,
40
63
"locked": {
41
-
"lastModified": 1747046372,
42
-
"narHash": "sha256-CIVLLkVgvHYbgI2UpXvIIBJ12HWgX+fjA8Xf8PUmqCY=",
64
+
"lastModified": 1650374568,
65
+
"narHash": "sha256-Z+s0J8/r907g149rllvwhb4pKi8Wam5ij0st8PwAh+E=",
43
66
"owner": "edolstra",
44
67
"repo": "flake-compat",
45
-
"rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885",
68
+
"rev": "b4a34015c698c7793d592d66adbab377907a2be8",
46
69
"type": "github"
47
70
},
48
71
"original": {
···
52
75
}
53
76
},
54
77
"flake-compat_2": {
78
+
"locked": {
79
+
"lastModified": 1751685974,
80
+
"narHash": "sha256-NKw96t+BgHIYzHUjkTK95FqYRVKB8DHpVhefWSz/kTw=",
81
+
"rev": "549f2762aebeff29a2e5ece7a7dc0f955281a1d1",
82
+
"type": "tarball",
83
+
"url": "https://git.lix.systems/api/v1/repos/lix-project/flake-compat/archive/549f2762aebeff29a2e5ece7a7dc0f955281a1d1.tar.gz?rev=549f2762aebeff29a2e5ece7a7dc0f955281a1d1"
84
+
},
85
+
"original": {
86
+
"type": "tarball",
87
+
"url": "https://git.lix.systems/lix-project/flake-compat/archive/main.tar.gz"
88
+
}
89
+
},
90
+
"flake-compat_3": {
55
91
"flake": false,
56
92
"locked": {
57
-
"lastModified": 1747046372,
58
-
"narHash": "sha256-CIVLLkVgvHYbgI2UpXvIIBJ12HWgX+fjA8Xf8PUmqCY=",
93
+
"lastModified": 1761588595,
94
+
"narHash": "sha256-XKUZz9zewJNUj46b4AJdiRZJAvSZ0Dqj2BNfXvFlJC4=",
59
95
"owner": "edolstra",
60
96
"repo": "flake-compat",
61
-
"rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885",
97
+
"rev": "f387cd2afec9419c8ee37694406ca490c3f34ee5",
62
98
"type": "github"
63
99
},
64
100
"original": {
···
72
108
"nixpkgs-lib": "nixpkgs-lib"
73
109
},
74
110
"locked": {
75
-
"lastModified": 1756770412,
76
-
"narHash": "sha256-+uWLQZccFHwqpGqr2Yt5VsW/PbeJVTn9Dk6SHWhNRPw=",
111
+
"lastModified": 1763759067,
112
+
"narHash": "sha256-LlLt2Jo/gMNYAwOgdRQBrsRoOz7BPRkzvNaI/fzXi2Q=",
77
113
"owner": "hercules-ci",
78
114
"repo": "flake-parts",
79
-
"rev": "4524271976b625a4a605beefd893f270620fd751",
115
+
"rev": "2cccadc7357c0ba201788ae99c4dfa90728ef5e0",
80
116
"type": "github"
81
117
},
82
118
"original": {
···
85
121
"type": "github"
86
122
}
87
123
},
124
+
"flake-utils": {
125
+
"locked": {
126
+
"lastModified": 1659877975,
127
+
"narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=",
128
+
"owner": "numtide",
129
+
"repo": "flake-utils",
130
+
"rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0",
131
+
"type": "github"
132
+
},
133
+
"original": {
134
+
"owner": "numtide",
135
+
"repo": "flake-utils",
136
+
"type": "github"
137
+
}
138
+
},
88
139
"git-hooks": {
89
140
"inputs": {
90
-
"flake-compat": "flake-compat_2",
141
+
"flake-compat": "flake-compat_3",
91
142
"gitignore": "gitignore",
92
-
"nixpkgs": "nixpkgs"
143
+
"nixpkgs": "nixpkgs_2"
93
144
},
94
145
"locked": {
95
-
"lastModified": 1755960406,
96
-
"narHash": "sha256-RF7j6C1TmSTK9tYWO6CdEMtg6XZaUKcvZwOCD2SICZs=",
146
+
"lastModified": 1763988335,
147
+
"narHash": "sha256-QlcnByMc8KBjpU37rbq5iP7Cp97HvjRP0ucfdh+M4Qc=",
97
148
"owner": "cachix",
98
149
"repo": "git-hooks.nix",
99
-
"rev": "e891a93b193fcaf2fc8012d890dc7f0befe86ec2",
150
+
"rev": "50b9238891e388c9fdc6a5c49e49c42533a1b5ce",
100
151
"type": "github"
101
152
},
102
153
"original": {
···
143
194
},
144
195
"nixpkgs": {
145
196
"locked": {
146
-
"lastModified": 1754340878,
147
-
"narHash": "sha256-lgmUyVQL9tSnvvIvBp7x1euhkkCho7n3TMzgjdvgPoU=",
197
+
"lastModified": 1683408522,
198
+
"narHash": "sha256-9kcPh6Uxo17a3kK3XCHhcWiV1Yu1kYj22RHiymUhMkU=",
148
199
"owner": "NixOS",
149
200
"repo": "nixpkgs",
150
-
"rev": "cab778239e705082fe97bb4990e0d24c50924c04",
201
+
"rev": "897876e4c484f1e8f92009fd11b7d988a121a4e7",
151
202
"type": "github"
152
203
},
153
204
"original": {
154
205
"owner": "NixOS",
155
-
"ref": "nixpkgs-unstable",
206
+
"ref": "nixos-unstable",
156
207
"repo": "nixpkgs",
157
208
"type": "github"
158
209
}
159
210
},
160
211
"nixpkgs-lib": {
161
212
"locked": {
162
-
"lastModified": 1754788789,
163
-
"narHash": "sha256-x2rJ+Ovzq0sCMpgfgGaaqgBSwY+LST+WbZ6TytnT9Rk=",
213
+
"lastModified": 1761765539,
214
+
"narHash": "sha256-b0yj6kfvO8ApcSE+QmA6mUfu8IYG6/uU28OFn4PaC8M=",
164
215
"owner": "nix-community",
165
216
"repo": "nixpkgs.lib",
166
-
"rev": "a73b9c743612e4244d865a2fdee11865283c04e6",
217
+
"rev": "719359f4562934ae99f5443f20aa06c2ffff91fc",
167
218
"type": "github"
168
219
},
169
220
"original": {
···
174
225
},
175
226
"nixpkgs_2": {
176
227
"locked": {
177
-
"lastModified": 1756787288,
178
-
"narHash": "sha256-rw/PHa1cqiePdBxhF66V7R+WAP8WekQ0mCDG4CFqT8Y=",
228
+
"lastModified": 1759417375,
229
+
"narHash": "sha256-O7eHcgkQXJNygY6AypkF9tFhsoDQjpNEojw3eFs73Ow=",
230
+
"owner": "NixOS",
231
+
"repo": "nixpkgs",
232
+
"rev": "dc704e6102e76aad573f63b74c742cd96f8f1e6c",
233
+
"type": "github"
234
+
},
235
+
"original": {
179
236
"owner": "NixOS",
237
+
"ref": "nixpkgs-unstable",
180
238
"repo": "nixpkgs",
181
-
"rev": "d0fc30899600b9b3466ddb260fd83deb486c32f1",
239
+
"type": "github"
240
+
}
241
+
},
242
+
"nixpkgs_3": {
243
+
"locked": {
244
+
"lastModified": 1764517877,
245
+
"narHash": "sha256-pp3uT4hHijIC8JUK5MEqeAWmParJrgBVzHLNfJDZxg4=",
246
+
"owner": "NixOS",
247
+
"repo": "nixpkgs",
248
+
"rev": "2d293cbfa5a793b4c50d17c05ef9e385b90edf6c",
182
249
"type": "github"
183
250
},
184
251
"original": {
···
188
255
"type": "github"
189
256
}
190
257
},
191
-
"nixpkgs_3": {
258
+
"nixpkgs_4": {
192
259
"locked": {
193
-
"lastModified": 1754340878,
194
-
"narHash": "sha256-lgmUyVQL9tSnvvIvBp7x1euhkkCho7n3TMzgjdvgPoU=",
260
+
"lastModified": 1761236834,
261
+
"narHash": "sha256-+pthv6hrL5VLW2UqPdISGuLiUZ6SnAXdd2DdUE+fV2Q=",
195
262
"owner": "nixos",
196
263
"repo": "nixpkgs",
197
-
"rev": "cab778239e705082fe97bb4990e0d24c50924c04",
264
+
"rev": "d5faa84122bc0a1fd5d378492efce4e289f8eac1",
198
265
"type": "github"
199
266
},
200
267
"original": {
···
206
273
},
207
274
"nixpkgs_current_stable": {
208
275
"locked": {
209
-
"lastModified": 1756886854,
210
-
"narHash": "sha256-6tooT142NLcFjt24Gi4B0G1pgWLvfw7y93sYEfSHlLI=",
276
+
"lastModified": 1764522689,
277
+
"narHash": "sha256-SqUuBFjhl/kpDiVaKLQBoD8TLD+/cTUzzgVFoaHrkqY=",
211
278
"owner": "NixOS",
212
279
"repo": "nixpkgs",
213
-
"rev": "0e6684e6c5755325f801bda1751a8a4038145d7d",
280
+
"rev": "8bb5646e0bed5dbd3ab08c7a7cc15b75ab4e1d0f",
214
281
"type": "github"
215
282
},
216
283
"original": {
217
284
"owner": "NixOS",
218
-
"ref": "nixos-25.05",
285
+
"ref": "nixos-25.11",
219
286
"repo": "nixpkgs",
220
287
"type": "github"
221
288
}
222
289
},
223
290
"root": {
224
291
"inputs": {
292
+
"colmena_benchmarking": "colmena_benchmarking",
225
293
"crane": "crane",
226
294
"fenix": "fenix",
227
-
"flake-compat": "flake-compat",
295
+
"flake-compat": "flake-compat_2",
228
296
"flake-parts": "flake-parts",
229
297
"git-hooks": "git-hooks",
230
298
"linux-systems": "linux-systems",
231
-
"nixpkgs": "nixpkgs_2",
299
+
"nixpkgs": "nixpkgs_3",
232
300
"nixpkgs_current_stable": "nixpkgs_current_stable",
233
301
"systems": "systems",
234
302
"treefmt-nix": "treefmt-nix"
···
237
305
"rust-analyzer-src": {
238
306
"flake": false,
239
307
"locked": {
240
-
"lastModified": 1756597274,
241
-
"narHash": "sha256-wfaKRKsEVQDB7pQtAt04vRgFphkVscGRpSx3wG1l50E=",
308
+
"lastModified": 1764525349,
309
+
"narHash": "sha256-vR3vU9AwzMsBvjNeeG2inA5W/2MwseFk5NIIrLFEMHk=",
242
310
"owner": "rust-lang",
243
311
"repo": "rust-analyzer",
244
-
"rev": "21614ed2d3279a9aa1f15c88d293e65a98991b30",
312
+
"rev": "d646b23f000d099d845f999c2c1e05b15d9cdc78",
245
313
"type": "github"
246
314
},
247
315
"original": {
···
251
319
"type": "github"
252
320
}
253
321
},
322
+
"stable": {
323
+
"locked": {
324
+
"lastModified": 1669735802,
325
+
"narHash": "sha256-qtG/o/i5ZWZLmXw108N2aPiVsxOcidpHJYNkT45ry9Q=",
326
+
"owner": "NixOS",
327
+
"repo": "nixpkgs",
328
+
"rev": "731cc710aeebecbf45a258e977e8b68350549522",
329
+
"type": "github"
330
+
},
331
+
"original": {
332
+
"owner": "NixOS",
333
+
"ref": "nixos-22.11",
334
+
"repo": "nixpkgs",
335
+
"type": "github"
336
+
}
337
+
},
254
338
"systems": {
255
339
"locked": {
256
340
"lastModified": 1681028828,
···
268
352
},
269
353
"treefmt-nix": {
270
354
"inputs": {
271
-
"nixpkgs": "nixpkgs_3"
355
+
"nixpkgs": "nixpkgs_4"
272
356
},
273
357
"locked": {
274
-
"lastModified": 1756662192,
275
-
"narHash": "sha256-F1oFfV51AE259I85av+MAia221XwMHCOtZCMcZLK2Jk=",
358
+
"lastModified": 1762938485,
359
+
"narHash": "sha256-AlEObg0syDl+Spi4LsZIBrjw+snSVU4T8MOeuZJUJjM=",
276
360
"owner": "numtide",
277
361
"repo": "treefmt-nix",
278
-
"rev": "1aabc6c05ccbcbf4a635fb7a90400e44282f61c4",
362
+
"rev": "5b4ee75aeefd1e2d5a1cc43cf6ba65eba75e83e4",
279
363
"type": "github"
280
364
},
281
365
"original": {
+14
-9
flake.nix
+14
-9
flake.nix
···
1
1
{
2
2
inputs = {
3
3
flake-parts.url = "github:hercules-ci/flake-parts";
4
-
flake-compat.url = "github:edolstra/flake-compat";
4
+
flake-compat.url = "https://git.lix.systems/lix-project/flake-compat/archive/main.tar.gz";
5
5
git-hooks.url = "github:cachix/git-hooks.nix";
6
6
systems.url = "github:nix-systems/default";
7
7
crane.url = "github:ipetkov/crane";
···
14
14
linux-systems.url = "github:nix-systems/default-linux";
15
15
16
16
# testing inputs
17
-
nixpkgs_current_stable.url = "github:NixOS/nixpkgs/nixos-25.05";
17
+
nixpkgs_current_stable.url = "github:NixOS/nixpkgs/nixos-25.11";
18
+
19
+
# benchmarking
20
+
colmena_benchmarking.url = "github:zhaofengli/colmena/v0.4.0";
18
21
};
19
22
outputs =
20
23
{
21
24
self,
22
-
nixpkgs,
23
25
flake-parts,
24
26
systems,
25
27
git-hooks,
···
34
36
./nix/hooks.nix # pre-commit hooks
35
37
./nix/utils.nix # utility functions
36
38
./nix/shells.nix
37
-
./nix/checks.nix
38
-
./wire/cli
39
-
./wire/key_agent
39
+
./nix/tests.nix
40
+
./crates/cli
41
+
./crates/key_agent
40
42
./doc
41
43
./tests/nix
44
+
./runtime
45
+
./bench/runner.nix
42
46
];
43
47
systems = import systems;
44
48
45
49
flake = {
46
-
nixosModules.default = import ./runtime/module.nix;
47
50
makeHive = import ./runtime/makeHive.nix;
48
51
hydraJobs =
49
52
let
···
54
57
inherit (self.packages.x86_64-linux) docs;
55
58
}
56
59
// lib.genAttrs [ "x86_64-linux" "aarch64-linux" ] (system: {
57
-
inherit (self.packages.${system}) wire wire-small;
60
+
inherit (self.packages.${system}) wire wire-small cargo-tests;
58
61
});
59
62
60
63
tests = lib.filterAttrs (n: _: (lib.hasPrefix "vm" n)) self.checks.x86_64-linux;
64
+
inherit (self) devShells;
61
65
};
62
66
};
63
67
···
82
86
# docs only
83
87
alejandra.enable = true;
84
88
rustfmt.enable = true;
85
-
just.enable = true;
86
89
prettier.enable = true;
87
90
protolint.enable = true;
88
91
taplo.enable = true;
92
+
ruff-format.enable = true;
89
93
};
90
94
settings.formatter = {
91
95
nixfmt.excludes = [ "doc/snippets/*.nix" ];
96
+
prettier.excludes = [ ".sqlx/*" ];
92
97
alejandra = {
93
98
includes = lib.mkForce [ "doc/snippets/*.nix" ];
94
99
};
+15
garnix.yaml
+15
garnix.yaml
···
1
+
builds:
2
+
- exclude: []
3
+
include:
4
+
- packages.x86_64-linux.docs
5
+
- packages.x86_64-linux.docs-unstable
6
+
- packages.*.wire
7
+
- packages.*.wire-small
8
+
branch: trunk
9
+
- exclude: []
10
+
include:
11
+
- packages.x86_64-linux.docs
12
+
- packages.x86_64-linux.docs-unstable
13
+
- packages.*.wire
14
+
- packages.*.wire-small
15
+
branch: stable
-7
justfile
-7
justfile
-21
nix/checks.nix
-21
nix/checks.nix
···
1
-
{
2
-
perSystem =
3
-
{
4
-
craneLib,
5
-
commonArgs,
6
-
...
7
-
}:
8
-
{
9
-
checks.wire-nextest = craneLib.cargoNextest (
10
-
{
11
-
partitions = 2;
12
-
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
13
-
cargoNextestPartitionsExtraArgs = builtins.concatStringsSep " " [
14
-
"--no-tests pass"
15
-
];
16
-
17
-
}
18
-
// commonArgs
19
-
);
20
-
};
21
-
}
+26
nix/hooks.nix
+26
nix/hooks.nix
···
4
4
toolchain,
5
5
config,
6
6
lib,
7
+
pkgs,
7
8
...
8
9
}:
9
10
{
···
15
16
enable = true;
16
17
settings.edit = true;
17
18
};
19
+
zizmor.enable = true;
18
20
clippy = {
19
21
enable = true;
20
22
settings.extraArgs = "--tests";
···
22
24
inherit (toolchain) cargo clippy;
23
25
};
24
26
};
27
+
ruff.enable = true;
25
28
cargo-check = {
26
29
enable = true;
27
30
package = toolchain.cargo;
···
30
33
enable = true;
31
34
name = "nix fmt";
32
35
entry = "${lib.getExe config.formatter} --no-cache";
36
+
};
37
+
ty = {
38
+
enable = true;
39
+
name = "ty check";
40
+
files = "\\.py$";
41
+
entry = lib.getExe (
42
+
pkgs.writeShellScriptBin "ty-check" ''
43
+
cd tests/nix
44
+
${lib.getExe pkgs.uv} run ty check
45
+
''
46
+
);
47
+
};
48
+
machete = {
49
+
enable = true;
50
+
name = "cargo-machete";
51
+
files = "\\.(rs|toml)$";
52
+
entry = lib.getExe pkgs.cargo-machete;
53
+
};
54
+
typos = {
55
+
enable = true;
56
+
settings = {
57
+
configPath = "typos.toml";
58
+
};
33
59
};
34
60
35
61
};
+5
-2
nix/shells.nix
+5
-2
nix/shells.nix
+38
nix/tests.nix
+38
nix/tests.nix
···
1
+
{
2
+
perSystem =
3
+
{
4
+
craneLib,
5
+
pkgs,
6
+
commonArgs,
7
+
...
8
+
}:
9
+
let
10
+
tests = craneLib.buildPackage (
11
+
{
12
+
cargoArtifacts = craneLib.buildDepsOnly commonArgs;
13
+
doCheck = false;
14
+
15
+
doNotPostBuildInstallCargoBinaries = true;
16
+
17
+
buildPhase = ''
18
+
cargo test --no-run
19
+
'';
20
+
21
+
installPhaseCommand = ''
22
+
mkdir -p $out
23
+
cp $(ls target/debug/deps/{wire,lib,key_agent}-* | grep -v "\.d") $out
24
+
'';
25
+
}
26
+
// commonArgs
27
+
);
28
+
in
29
+
{
30
+
packages.cargo-tests = pkgs.writeShellScriptBin "run-tests" ''
31
+
set -e
32
+
for item in "${tests}"/*; do
33
+
echo "running $item"
34
+
"$item"
35
+
done
36
+
'';
37
+
};
38
+
}
+3
-1
nix/utils.nix
+3
-1
nix/utils.nix
+4
renovate.json
+4
renovate.json
+45
-11
runtime/evaluate.nix
+45
-11
runtime/evaluate.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{
2
5
hive,
3
6
nixosConfigurations ? { },
4
7
}:
5
8
let
6
-
module = import ./module.nix;
9
+
module = import ./module;
7
10
8
11
mergedHive = {
9
12
meta = { };
···
24
27
!builtins.elem name [
25
28
"meta"
26
29
"defaults"
30
+
"-"
27
31
]
28
32
) (builtins.filter (name: builtins.hasAttr name hive) (builtins.attrNames mergedHive));
29
33
30
-
resolvedNixpkgs =
34
+
resolveNixpkgs =
35
+
value: help:
36
+
# support `<nixpkgs>`
37
+
if builtins.isPath value then
38
+
import value { }
39
+
# support npins sources passed directly
40
+
else if value ? "outPath" then
41
+
import value { }
42
+
# support `import <nixpkgs> { }`
43
+
else if builtins.isAttrs value then
44
+
value
45
+
else
46
+
builtins.abort "${help} was not a path, { outPath, .. }, or attrset. Was type: ${builtins.typeOf value}";
47
+
48
+
hiveGlobalNixpkgs =
31
49
if mergedHive.meta ? "nixpkgs" then
32
-
# support '<nixpkgs>' and 'import <nixpkgs> {}'
33
-
if builtins.isPath mergedHive.meta.nixpkgs then
34
-
import mergedHive.meta.nixpkgs { }
35
-
else
36
-
mergedHive.meta.nixpkgs
50
+
(resolveNixpkgs mergedHive.meta.nixpkgs "meta.nixpkgs")
37
51
else
38
52
builtins.abort "makeHive called without meta.nixpkgs specified.";
39
53
54
+
getNodeNixpkgs =
55
+
name:
56
+
if mergedHive.meta ? "nodeNixpkgs" then
57
+
if mergedHive.meta.nodeNixpkgs ? "${name}" then
58
+
(resolveNixpkgs mergedHive.meta.nodeNixpkgs.${name} "meta.nodeNixpkgs.${name}")
59
+
else
60
+
hiveGlobalNixpkgs
61
+
else
62
+
hiveGlobalNixpkgs;
63
+
64
+
nixpkgsIsFlake = nixpkgs: nixpkgs.lib.hasSuffix "-source" nixpkgs.path;
65
+
40
66
evaluateNode =
41
67
name:
42
68
let
43
-
evalConfig = import (resolvedNixpkgs.path + "/nixos/lib/eval-config.nix");
69
+
nixpkgs = getNodeNixpkgs name;
70
+
evalConfig = import (nixpkgs.path + "/nixos/lib/eval-config.nix");
44
71
in
45
72
evalConfig {
46
73
modules = [
···
48
75
49
76
mergedHive.defaults
50
77
mergedHive.${name}
51
-
];
78
+
]
79
+
++ (nixpkgs.lib.optional (nixpkgsIsFlake nixpkgs) {
80
+
config.nixpkgs.flake.source = nixpkgs.lib.mkDefault nixpkgs.path;
81
+
});
52
82
system = null;
53
83
specialArgs = {
54
84
inherit name nodes;
55
85
}
56
-
// mergedHive.meta.specialArgs or { };
86
+
// mergedHive.meta.specialArgs or { }
87
+
// mergedHive.meta.nodeSpecialArgs.${name} or { };
57
88
};
89
+
58
90
nodes = builtins.listToAttrs (
59
91
map (name: {
60
92
inherit name;
···
69
101
70
102
topLevels = builtins.mapAttrs (name: _: getTopLevel name) nodes;
71
103
inspect = {
72
-
_schema = 0;
104
+
_schema = 1;
73
105
74
106
nodes = builtins.mapAttrs (_: v: v.config.deployment) nodes;
75
107
};
108
+
109
+
names = nodeNames;
76
110
}
+3
runtime/makeHive.nix
+3
runtime/makeHive.nix
+79
runtime/module/config.nix
+79
runtime/module/config.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
{
5
+
pkgs,
6
+
lib,
7
+
config,
8
+
...
9
+
}:
10
+
{
11
+
config = {
12
+
systemd = {
13
+
paths = lib.mapAttrs' (
14
+
_name: value:
15
+
lib.nameValuePair "${value.name}-key" {
16
+
description = "Monitor changes to ${value.path}. You should Require ${value.service} instead of this.";
17
+
pathConfig = {
18
+
PathExists = value.path;
19
+
PathChanged = value.path;
20
+
Unit = "${value.name}-key.service";
21
+
};
22
+
}
23
+
) config.deployment.keys;
24
+
25
+
services = lib.mapAttrs' (
26
+
_name: value:
27
+
lib.nameValuePair "${value.name}-key" {
28
+
description = "Service that requires ${value.path}";
29
+
path = [
30
+
pkgs.inotify-tools
31
+
pkgs.coreutils
32
+
];
33
+
script = ''
34
+
MSG="Key ${value.path} exists."
35
+
systemd-notify --ready --status="$MSG"
36
+
37
+
echo "waiting to fail if the key is removed..."
38
+
39
+
while inotifywait -e delete_self "${value.path}"; do
40
+
MSG="Key ${value.path} no longer exists."
41
+
42
+
systemd-notify --status="$MSG"
43
+
echo $MSG
44
+
45
+
exit 1
46
+
done
47
+
'';
48
+
unitConfig = {
49
+
ConditionPathExists = value.path;
50
+
};
51
+
serviceConfig = {
52
+
Type = "simple";
53
+
Restart = "no";
54
+
NotifyAccess = "all";
55
+
RemainAfterExit = "yes";
56
+
};
57
+
}
58
+
) config.deployment.keys;
59
+
};
60
+
61
+
deployment = {
62
+
_keys = lib.mapAttrsToList (
63
+
_: value:
64
+
value
65
+
// {
66
+
source = {
67
+
# Attach type to internally tag serde enum
68
+
t = builtins.replaceStrings [ "path" "string" "list" ] [ "Path" "String" "Command" ] (
69
+
builtins.typeOf value.source
70
+
);
71
+
c = value.source;
72
+
};
73
+
}
74
+
) config.deployment.keys;
75
+
76
+
_hostPlatform = config.nixpkgs.hostPlatform.system;
77
+
};
78
+
};
79
+
}
+6
runtime/module/default.nix
+6
runtime/module/default.nix
+218
runtime/module/options.nix
+218
runtime/module/options.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
{
5
+
lib,
6
+
name,
7
+
...
8
+
}:
9
+
let
10
+
inherit (lib) types;
11
+
in
12
+
{
13
+
imports =
14
+
let
15
+
inherit (lib) mkAliasOptionModule;
16
+
in
17
+
[
18
+
(mkAliasOptionModule [ "deployment" "targetHost" ] [ "deployment" "target" "hosts" ])
19
+
(mkAliasOptionModule [ "deployment" "targetUser" ] [ "deployment" "target" "user" ])
20
+
(mkAliasOptionModule [ "deployment" "targetPort" ] [ "deployment" "target" "port" ])
21
+
];
22
+
23
+
options.deployment = {
24
+
target = lib.mkOption {
25
+
type = types.submodule {
26
+
imports = [
27
+
(lib.mkAliasOptionModule [ "host" ] [ "hosts" ])
28
+
];
29
+
options = {
30
+
hosts = lib.mkOption {
31
+
type = types.coercedTo types.str lib.singleton (types.listOf types.str);
32
+
description = "IPs or hostnames to attempt to connect to. They are tried in order.";
33
+
default = lib.singleton name;
34
+
apply = lib.unique;
35
+
};
36
+
user = lib.mkOption {
37
+
type = types.str;
38
+
description = "User to use for SSH. The user must be atleast `wheel` and must use an SSH key or similar
39
+
non-interactive login method. More information can be found at https://wire.althaea.zone/guides/non-root-user";
40
+
default = "root";
41
+
};
42
+
port = lib.mkOption {
43
+
type = types.int;
44
+
default = 22;
45
+
description = "SSH port to use.";
46
+
};
47
+
};
48
+
};
49
+
description = "Describes the target for this node";
50
+
default = { };
51
+
};
52
+
53
+
buildOnTarget = lib.mkOption {
54
+
type = types.bool;
55
+
default = false;
56
+
description = "Whether to build the system on the target host or not.";
57
+
};
58
+
59
+
allowLocalDeployment = lib.mkOption {
60
+
type = types.bool;
61
+
default = true;
62
+
description = "Whether to allow or deny this node being applied to localhost when the host's hostname matches the
63
+
node's name.";
64
+
};
65
+
66
+
tags = lib.mkOption {
67
+
type = types.listOf types.str;
68
+
default = [ ];
69
+
description = "Tags for node.";
70
+
example = [
71
+
"arm"
72
+
"cloud"
73
+
];
74
+
};
75
+
76
+
privilegeEscalationCommand = lib.mkOption {
77
+
type = types.listOf types.str;
78
+
description = "Command to elevate.";
79
+
default = [
80
+
"sudo"
81
+
"--"
82
+
];
83
+
};
84
+
85
+
replaceUnknownProfiles = lib.mkOption {
86
+
type = types.bool;
87
+
description = "No-op, colmena compatibility";
88
+
default = true;
89
+
};
90
+
91
+
sshOptions = lib.mkOption {
92
+
type = types.listOf types.str;
93
+
description = "No-op, colmena compatibility";
94
+
default = [ ];
95
+
};
96
+
97
+
_keys = lib.mkOption {
98
+
internal = true;
99
+
readOnly = true;
100
+
};
101
+
102
+
_hostPlatform = lib.mkOption {
103
+
internal = true;
104
+
readOnly = true;
105
+
};
106
+
107
+
keys = lib.mkOption {
108
+
type = types.attrsOf (
109
+
types.submodule (
110
+
{
111
+
name,
112
+
config,
113
+
...
114
+
}:
115
+
{
116
+
imports =
117
+
let
118
+
inherit (lib) mkAliasOptionModule;
119
+
in
120
+
[
121
+
(mkAliasOptionModule [ "keyFile" ] [ "source" ])
122
+
(mkAliasOptionModule [ "keyCommand" ] [ "source" ])
123
+
(mkAliasOptionModule [ "text" ] [ "source" ])
124
+
];
125
+
options = {
126
+
name = lib.mkOption {
127
+
type = types.str;
128
+
default = name;
129
+
description = "Filename of the secret.";
130
+
};
131
+
destDir = lib.mkOption {
132
+
type = types.path;
133
+
default = "/run/keys/";
134
+
description = "Destination directory for the secret. Change this to something other than `/run/keys/` for keys to persist past reboots.";
135
+
};
136
+
path = lib.mkOption {
137
+
internal = true;
138
+
type = types.path;
139
+
default =
140
+
if lib.hasSuffix "/" config.destDir then
141
+
"${config.destDir}${config.name}"
142
+
else
143
+
"${config.destDir}/${config.name}";
144
+
description = "Path that the key is deployed to.";
145
+
};
146
+
service = lib.mkOption {
147
+
internal = true;
148
+
type = types.str;
149
+
default = "${config.name}-key.service";
150
+
description = "Name of the systemd service that represents this key.";
151
+
};
152
+
group = lib.mkOption {
153
+
type = types.str;
154
+
default = "root";
155
+
description = "Group to own the key. If this group does not exist this will silently fail and the key will be owned by gid 0.";
156
+
};
157
+
user = lib.mkOption {
158
+
type = types.str;
159
+
default = "root";
160
+
description = "User to own the key. If this user does not exist this will silently fail and the key will be owned by uid 0.";
161
+
};
162
+
permissions = lib.mkOption {
163
+
type = types.str;
164
+
default = "0600";
165
+
description = "Unix Octal permissions, in string format, for the key.";
166
+
};
167
+
source = lib.mkOption {
168
+
type = types.oneOf [
169
+
types.str
170
+
types.path
171
+
(types.listOf types.str)
172
+
];
173
+
description = "Source of the key. Either a path to a file, a literal string, or a command to generate the key.";
174
+
};
175
+
uploadAt = lib.mkOption {
176
+
type = types.enum [
177
+
"pre-activation"
178
+
"post-activation"
179
+
];
180
+
default = "pre-activation";
181
+
description = "When to upload the key. Either `pre-activation` or `post-activation`.";
182
+
};
183
+
environment = lib.mkOption {
184
+
type = types.attrsOf types.str;
185
+
default = { };
186
+
description = "Key-Value environment variables to use when creating the key if the key source is a command.";
187
+
};
188
+
};
189
+
}
190
+
)
191
+
);
192
+
description = "Secrets to be deployed to the node.";
193
+
default = { };
194
+
example = {
195
+
"wireless.env" = {
196
+
source = [
197
+
"gpg"
198
+
"--decrypt"
199
+
"secrets/wireless.env.gpg"
200
+
];
201
+
destDir = "/etc/keys/";
202
+
};
203
+
204
+
"arbfile.txt" = {
205
+
source = ./arbfile.txt;
206
+
destDir = "/etc/arbs/";
207
+
};
208
+
209
+
"arberfile.txt" = {
210
+
source = ''
211
+
Hello World
212
+
'';
213
+
destDir = "/etc/arbs/";
214
+
};
215
+
};
216
+
};
217
+
};
218
+
}
-203
runtime/module.nix
-203
runtime/module.nix
···
1
-
{
2
-
lib,
3
-
name,
4
-
config,
5
-
...
6
-
}:
7
-
let
8
-
inherit (lib) types;
9
-
in
10
-
{
11
-
imports =
12
-
let
13
-
inherit (lib) mkAliasOptionModule;
14
-
in
15
-
[
16
-
(mkAliasOptionModule [ "deployment" "targetHost" ] [ "deployment" "target" "hosts" ])
17
-
(mkAliasOptionModule [ "deployment" "targetUser" ] [ "deployment" "target" "user" ])
18
-
(mkAliasOptionModule [ "deployment" "targetPort" ] [ "deployment" "target" "port" ])
19
-
];
20
-
21
-
options.deployment = {
22
-
target = lib.mkOption {
23
-
type = types.submodule {
24
-
imports = [
25
-
(lib.mkAliasOptionModule [ "host" ] [ "hosts" ])
26
-
];
27
-
options = {
28
-
hosts = lib.mkOption {
29
-
type = types.coercedTo types.str lib.singleton (types.listOf types.str);
30
-
description = "IPs or hostnames to attempt to connect to. They are tried in order.";
31
-
default = lib.singleton name;
32
-
apply = lib.unique;
33
-
};
34
-
user = lib.mkOption {
35
-
type = types.str;
36
-
description = "User to use for ssh.";
37
-
default = "root";
38
-
};
39
-
port = lib.mkOption {
40
-
type = types.int;
41
-
default = 22;
42
-
description = "SSH port to use.";
43
-
};
44
-
};
45
-
};
46
-
description = "Describes the target for this node";
47
-
default = { };
48
-
};
49
-
50
-
buildOnTarget = lib.mkOption {
51
-
type = types.bool;
52
-
default = false;
53
-
description = "Whether to build the system on the target host or not.";
54
-
};
55
-
56
-
allowLocalDeployment = lib.mkOption {
57
-
type = types.bool;
58
-
default = true;
59
-
description = "Whether to allow or deny this node being applied to localhost when the host's hostname matches the
60
-
node's name.";
61
-
};
62
-
63
-
tags = lib.mkOption {
64
-
type = types.listOf types.str;
65
-
default = [ ];
66
-
description = "Tags for node.";
67
-
example = [
68
-
"arm"
69
-
"cloud"
70
-
];
71
-
};
72
-
73
-
_keys = lib.mkOption {
74
-
internal = true;
75
-
readOnly = true;
76
-
};
77
-
78
-
_hostPlatform = lib.mkOption {
79
-
internal = true;
80
-
readOnly = true;
81
-
};
82
-
83
-
keys = lib.mkOption {
84
-
type = types.attrsOf (
85
-
types.submodule (
86
-
{
87
-
name,
88
-
config,
89
-
...
90
-
}:
91
-
{
92
-
imports =
93
-
let
94
-
inherit (lib) mkAliasOptionModule;
95
-
in
96
-
[
97
-
(mkAliasOptionModule [ "keyFile" ] [ "source" ])
98
-
(mkAliasOptionModule [ "keyCommand" ] [ "source" ])
99
-
(mkAliasOptionModule [ "text" ] [ "source" ])
100
-
];
101
-
options = {
102
-
name = lib.mkOption {
103
-
type = types.str;
104
-
default = name;
105
-
description = "Filename of the secret.";
106
-
};
107
-
destDir = lib.mkOption {
108
-
type = types.path;
109
-
default = "/run/keys/";
110
-
description = "Destination directory for the secret. Change this to something other than `/run/keys/` for keys to persist past reboots.";
111
-
};
112
-
path = lib.mkOption {
113
-
internal = true;
114
-
type = types.path;
115
-
default = "${config.destDir}/${config.name}";
116
-
};
117
-
group = lib.mkOption {
118
-
type = types.str;
119
-
default = "root";
120
-
description = "Group to own the key. If this group does not exist this will silently fail and the key will be owned by gid 0.";
121
-
};
122
-
user = lib.mkOption {
123
-
type = types.str;
124
-
default = "root";
125
-
description = "User to own the key. If this user does not exist this will silently fail and the key will be owned by uid 0.";
126
-
};
127
-
permissions = lib.mkOption {
128
-
type = types.str;
129
-
default = "0600";
130
-
description = "Unix Octal permissions, in string format, for the key.";
131
-
};
132
-
source = lib.mkOption {
133
-
type = types.oneOf [
134
-
types.str
135
-
types.path
136
-
(types.listOf types.str)
137
-
];
138
-
description = "Source of the key. Either a path to a file, a literal string, or a command to generate the key.";
139
-
};
140
-
uploadAt = lib.mkOption {
141
-
type = types.enum [
142
-
"pre-activation"
143
-
"post-activation"
144
-
];
145
-
default = "pre-activation";
146
-
description = "When to upload the key. Either `pre-activation` or `post-activation`.";
147
-
};
148
-
environment = lib.mkOption {
149
-
type = types.attrsOf types.str;
150
-
default = { };
151
-
description = "Key-Value environment variables to use when creating the key if the key source is a command.";
152
-
};
153
-
};
154
-
}
155
-
)
156
-
);
157
-
description = "Secrets to be deployed to the node.";
158
-
default = { };
159
-
example = {
160
-
"wireless.env" = {
161
-
source = [
162
-
"gpg"
163
-
"--decrypt"
164
-
"secrets/wireless.env.gpg"
165
-
];
166
-
destDir = "/etc/keys/";
167
-
};
168
-
169
-
"arbfile.txt" = {
170
-
source = ./arbfile.txt;
171
-
destDir = "/etc/arbs/";
172
-
};
173
-
174
-
"arberfile.txt" = {
175
-
source = ''
176
-
Hello World
177
-
'';
178
-
destDir = "/etc/arbs/";
179
-
};
180
-
};
181
-
};
182
-
};
183
-
184
-
config = {
185
-
deployment = {
186
-
_keys = lib.mapAttrsToList (
187
-
_: value:
188
-
value
189
-
// {
190
-
source = {
191
-
# Attach type to internally tag serde enum
192
-
t = builtins.replaceStrings [ "path" "string" "list" ] [ "Path" "String" "Command" ] (
193
-
builtins.typeOf value.source
194
-
);
195
-
c = value.source;
196
-
};
197
-
}
198
-
) config.deployment.keys;
199
-
200
-
_hostPlatform = config.nixpkgs.hostPlatform.system;
201
-
};
202
-
};
203
-
}
+1
tests/nix/.python-version
+1
tests/nix/.python-version
···
1
+
3.13
+19
-8
tests/nix/default.nix
+19
-8
tests/nix/default.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{
2
5
self,
3
6
config,
···
20
23
lazyAttrsOf
21
24
;
22
25
cfg = config.wire.testing;
26
+
27
+
stripTyping =
28
+
value:
29
+
let
30
+
split = builtins.split "(from typing import TYPE_CHECKING|# typing-end)" value;
31
+
in
32
+
(builtins.elemAt split 0) + (builtins.elemAt split 4);
23
33
in
24
34
{
25
35
imports = [
26
36
./suite/test_remote_deploy
27
37
./suite/test_local_deploy
28
38
./suite/test_keys
39
+
./suite/test_stdin
29
40
];
30
41
options.wire.testing = mkOption {
31
42
type = attrsOf (
···
40
51
type = lines;
41
52
default = '''';
42
53
description = "test script for runNixOSTest";
54
+
apply = stripTyping;
43
55
};
44
56
testDir = mkOption {
45
57
default = "${self}/tests/nix/suite/${name}";
···
77
89
{
78
90
testName,
79
91
opts,
80
-
nix,
81
92
nixpkgs,
82
93
}:
83
94
let
84
95
# TODO: Update once #126 is solved.
85
96
nixPackage = nixpkgs.legacyPackages.lix;
86
-
sanitizeName =
97
+
sanitiseName =
87
98
str: lib.strings.sanitizeDerivationName (builtins.replaceStrings [ "." ] [ "_" ] str);
88
-
identifier = sanitizeName "${nixpkgs.legacyPackages.lib.trivial.release}-${nixPackage.name}";
99
+
identifier = sanitiseName "${nixpkgs.legacyPackages.lib.trivial.release}-${nixPackage.name}";
89
100
path = "tests/nix/suite/${testName}";
90
101
91
102
flakeDirFileset = lib.fileset.toSource {
···
114
125
let
115
126
hive = builtins.scopedImport {
116
127
__nixPath = _b: null;
117
-
__findFile = path: name: if name == "nixpkgs" then pkgs.path else throw "oops!!";
128
+
__findFile = _path: name: if name == "nixpkgs" then pkgs.path else throw "oops!!";
118
129
} "${injectedFlakeDir}/${path}/hive.nix";
119
130
nodes = mapAttrsToList (_: val: val.config.system.build.toplevel.drvPath) hive.nodes;
120
131
# fetch **all** dependencies of a flake
···
138
149
};
139
150
140
151
environment.systemPackages = [ pkgs.ripgrep ];
152
+
environment.variables.XDG_RUNTIME_DIR = "/tmp";
141
153
virtualisation.memorySize = 4096;
142
154
virtualisation.additionalPaths = flatten [
143
155
injectedFlakeDir
···
149
161
testName = name;
150
162
snakeOil = import "${pkgs.path}/nixos/tests/ssh-keys.nix" pkgs;
151
163
inherit (opts) testDir;
152
-
inherit (self'.packages) wire-small;
164
+
inherit (self'.packages) wire-small-dev;
153
165
};
154
166
# NOTE: there is surely a better way of doing this in a more
155
167
# "controlled" manner, but until a need is asked for, this will remain
···
159
171
160
172
TEST_DIR="${injectedFlakeDir}/${path}"
161
173
162
-
${builtins.readFile ./tools.py}
174
+
${stripTyping (builtins.readFile ./tools/__init__.py)}
163
175
''
164
176
+ lib.concatStringsSep "\n" (mapAttrsToList (_: value: value._wire.testScript) value.nodes)
165
177
+ opts.testScript;
···
170
182
checks = builtins.listToAttrs (
171
183
builtins.map (
172
184
{
173
-
nix,
174
185
nixpkgs,
175
186
testName,
187
+
...
176
188
}:
177
189
let
178
190
opts = cfg.${testName};
···
181
193
inherit
182
194
testName
183
195
opts
184
-
nix
185
196
nixpkgs
186
197
;
187
198
}
+18
tests/nix/pyproject.toml
+18
tests/nix/pyproject.toml
···
1
+
[project]
2
+
name = "wire-vm-tests"
3
+
version = "0.0.0"
4
+
requires-python = ">=3.13"
5
+
dependencies = [
6
+
"colorama>=0.4.6",
7
+
"ipython>=9.8.0",
8
+
"junit-xml>=1.9",
9
+
"nixos-test-driver",
10
+
"ptpython>=3.0.32",
11
+
"remote-pdb>=2.1.0",
12
+
]
13
+
14
+
[tool.uv.sources]
15
+
nixos-test-driver = { git = "https://github.com/NixOS/nixpkgs", subdirectory = "nixos/lib/test-driver/src", branch = "nixos-25.11" }
16
+
17
+
[dependency-groups]
18
+
dev = ["ty>=0.0.4"]
+4
-60
tests/nix/suite/test_keys/default.nix
+4
-60
tests/nix/suite/test_keys/default.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{
2
5
wire.testing.test_keys = {
3
6
nodes.deployer = {
···
7
10
nodes.receiver = {
8
11
_wire.receiver = true;
9
12
};
10
-
testScript = ''
11
-
deployer_so = collect_store_objects(deployer)
12
-
receiver_so = collect_store_objects(receiver)
13
-
14
-
# build all nodes without any keys
15
-
deployer.succeed(f"wire apply --no-progress --on receiver --path {TEST_DIR}/hive.nix --no-keys -vvv >&2")
16
-
17
-
receiver.wait_for_unit("sshd.service")
18
-
19
-
# --no-keys should never push a key
20
-
receiver.fail("test -f /run/keys/source_string")
21
-
deployer.fail("test -f /run/keys/source_string")
22
-
23
-
def test_keys(target, target_object):
24
-
deployer.succeed(f"wire apply keys --on {target} --no-progress --path {TEST_DIR}/hive.nix -vvv >&2")
25
-
26
-
keys = [
27
-
("/run/keys/source_string", "hello_world_source", "root root 600"),
28
-
("/etc/keys/file", "hello_world_file", "root root 644"),
29
-
("/home/owner/some/deep/path/command", "hello_world_command", "owner owner 644"),
30
-
("/run/keys/environment", "string_from_environment", "root root 600"),
31
-
]
32
-
33
-
for path, value, permissions in keys:
34
-
# test existence & value
35
-
source_string = target_object.succeed(f"cat {path}")
36
-
assert value in source_string, f"{path} has correct contents ({target})"
37
-
38
-
stat = target_object.succeed(f"stat -c '%U %G %a' {path}").rstrip()
39
-
assert permissions == stat, f"{path} has correct permissions ({target})"
40
-
41
-
def perform_routine(target, target_object):
42
-
test_keys(target, target_object)
43
-
44
-
# Mess with the keys to make sure that every push refreshes the permissions
45
-
target_object.succeed("echo 'incorrect_value' > /run/keys/source_string")
46
-
target_object.succeed("chown 600 /etc/keys/file")
47
-
# Test having a key that doesn't exist mixed with keys that do
48
-
target_object.succeed("rm /home/owner/some/deep/path/command")
49
-
50
-
# Test keys twice to ensure the operation is idempotent,
51
-
# especially around directory creation.
52
-
test_keys(target, target_object)
53
-
54
-
perform_routine("receiver", receiver)
55
-
perform_routine("deployer", deployer)
56
-
57
-
new_deployer_store_objects = collect_store_objects(deployer).difference(deployer_so)
58
-
new_receiver_store_objects = collect_store_objects(receiver).difference(receiver_so)
59
-
60
-
# no one should have any keys introduced by the operation
61
-
for node, objects in [
62
-
(deployer, new_deployer_store_objects),
63
-
(receiver, new_receiver_store_objects),
64
-
]:
65
-
assert_store_not_posioned(node, "hello_world_source", objects)
66
-
assert_store_not_posioned(node, "hello_world_file", objects)
67
-
assert_store_not_posioned(node, "hello_world_command", objects)
68
-
assert_store_not_posioned(node, "string_from_environment", objects)
69
-
'';
13
+
testScript = builtins.readFile ./script.py;
70
14
};
71
15
}
+5
tests/nix/suite/test_keys/hive.nix
+5
tests/nix/suite/test_keys/hive.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
let
2
5
inherit (import ../utils.nix { testName = "test_keys-@IDENT@"; }) makeHive mkHiveNode;
3
6
in
···
6
9
defaults = {
7
10
deployment.keys = {
8
11
source_string = {
12
+
# key with different name to attr name
13
+
name = "source_string_name";
9
14
source = ''
10
15
hello_world_source
11
16
'';
+123
tests/nix/suite/test_keys/script.py
+123
tests/nix/suite/test_keys/script.py
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
from typing import TYPE_CHECKING
5
+
6
+
if TYPE_CHECKING:
7
+
from test_driver.machine import Machine
8
+
from tools import collect_store_objects, assert_store_not_poisoned
9
+
10
+
deployer: Machine = None # type: ignore[invalid-assignment]
11
+
receiver: Machine = None # type: ignore[invalid-assignment]
12
+
TEST_DIR = ""
13
+
14
+
# typing-end
15
+
16
+
deployer_so = collect_store_objects(deployer)
17
+
receiver_so = collect_store_objects(receiver)
18
+
19
+
# build receiver with no keys
20
+
deployer.succeed(
21
+
f"wire apply --no-progress --on receiver --path {TEST_DIR}/hive.nix --no-keys --ssh-accept-host -vvv >&2"
22
+
)
23
+
24
+
receiver.wait_for_unit("sshd.service")
25
+
26
+
# --no-keys should never push a key
27
+
receiver.fail("test -f /run/keys/source_string_name")
28
+
deployer.fail("test -f /run/keys/source_string_name")
29
+
30
+
# key services are created
31
+
receiver.succeed("systemctl cat source_string_name-key.service")
32
+
33
+
_, is_failed = receiver.execute("systemctl is-failed source_string_name-key.service")
34
+
assert is_failed == "inactive\n", (
35
+
f"source_string_name-key.service must be inactive before key exists ({is_failed})"
36
+
)
37
+
38
+
39
+
def test_keys(target, target_object, non_interactive):
40
+
if non_interactive:
41
+
deployer.succeed(
42
+
f"wire apply keys --on {target} --no-progress --path {TEST_DIR}/hive.nix --non-interactive --ssh-accept-host -vvv >&2"
43
+
)
44
+
else:
45
+
deployer.succeed(
46
+
f"wire apply keys --on {target} --no-progress --path {TEST_DIR}/hive.nix --ssh-accept-host -vvv >&2"
47
+
)
48
+
49
+
keys = [
50
+
(
51
+
"/run/keys/source_string_name",
52
+
"hello_world_source",
53
+
"root root 600",
54
+
"source_string_name",
55
+
),
56
+
("/etc/keys/file", "hello_world_file", "root root 644", "file"),
57
+
(
58
+
"/home/owner/some/deep/path/command",
59
+
"hello_world_command",
60
+
"owner owner 644",
61
+
"command",
62
+
),
63
+
(
64
+
"/run/keys/environment",
65
+
"string_from_environment",
66
+
"root root 600",
67
+
"environment",
68
+
),
69
+
]
70
+
71
+
for path, value, permissions, name in keys:
72
+
# test existence & value
73
+
source_string = target_object.succeed(f"cat {path}")
74
+
assert value in source_string, f"{path} has correct contents ({target})"
75
+
76
+
stat = target_object.succeed(f"stat -c '%U %G %a' {path}").rstrip()
77
+
assert permissions == stat, f"{path} has correct permissions ({target})"
78
+
79
+
80
+
def perform_routine(target, target_object, non_interactive):
81
+
test_keys(target, target_object, non_interactive)
82
+
83
+
# only check systemd units on receiver since deployer apply's are one time only
84
+
if target == "receiver":
85
+
target_object.succeed("systemctl start source_string_name-key.path")
86
+
target_object.succeed("systemctl start command-key.path")
87
+
target_object.wait_for_unit("source_string_name-key.service")
88
+
target_object.wait_for_unit("command-key.service")
89
+
90
+
# Mess with the keys to make sure that every push refreshes the permissions
91
+
target_object.succeed("echo 'incorrect_value' > /run/keys/source_string")
92
+
target_object.succeed("chown 600 /etc/keys/file")
93
+
# Test having a key that doesn't exist mixed with keys that do
94
+
target_object.succeed("rm /home/owner/some/deep/path/command")
95
+
96
+
if target == "receiver":
97
+
_, is_failed = target_object.execute("systemctl is-active command-key.service")
98
+
assert is_failed == "failed\n", (
99
+
f"command-key.service is failed after deletion ({is_failed})"
100
+
)
101
+
102
+
# Test keys twice to ensure the operation is idempotent,
103
+
# especially around directory creation.
104
+
test_keys(target, target_object, non_interactive)
105
+
106
+
107
+
perform_routine("receiver", receiver, True)
108
+
perform_routine("deployer", deployer, True)
109
+
perform_routine("receiver", receiver, False)
110
+
perform_routine("deployer", deployer, False)
111
+
112
+
new_deployer_store_objects = collect_store_objects(deployer).difference(deployer_so)
113
+
new_receiver_store_objects = collect_store_objects(receiver).difference(receiver_so)
114
+
115
+
# no one should have any keys introduced by the operation
116
+
for node, objects in [
117
+
(deployer, new_deployer_store_objects),
118
+
(receiver, new_receiver_store_objects),
119
+
]:
120
+
assert_store_not_poisoned(node, "hello_world_source", objects)
121
+
assert_store_not_poisoned(node, "hello_world_file", objects)
122
+
assert_store_not_poisoned(node, "hello_world_command", objects)
123
+
assert_store_not_poisoned(node, "string_from_environment", objects)
+4
-4
tests/nix/suite/test_local_deploy/default.nix
+4
-4
tests/nix/suite/test_local_deploy/default.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{
2
5
wire.testing.test_local_deploy = {
3
6
nodes.deployer = {
4
7
_wire.deployer = true;
5
8
_wire.receiver = true;
6
9
};
7
-
testScript = ''
8
-
deployer.succeed(f"wire apply --on deployer --no-progress --path {TEST_DIR}/hive.nix --no-keys -vvv >&2")
9
-
deployer.succeed("test -f /etc/a")
10
-
'';
10
+
testScript = builtins.readFile ./script.py;
11
11
};
12
12
}
+3
tests/nix/suite/test_local_deploy/hive.nix
+3
tests/nix/suite/test_local_deploy/hive.nix
+17
tests/nix/suite/test_local_deploy/script.py
+17
tests/nix/suite/test_local_deploy/script.py
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
from typing import TYPE_CHECKING
5
+
6
+
if TYPE_CHECKING:
7
+
from test_driver.machine import Machine
8
+
9
+
deployer: Machine = None # type: ignore[invalid-assignment]
10
+
TEST_DIR = ""
11
+
12
+
# typing-end
13
+
14
+
deployer.succeed(
15
+
f"wire apply --on deployer --no-progress --path {TEST_DIR}/hive.nix --no-keys -vvv >&2"
16
+
)
17
+
deployer.succeed("test -f /etc/a")
+4
-30
tests/nix/suite/test_remote_deploy/default.nix
+4
-30
tests/nix/suite/test_remote_deploy/default.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{
2
5
wire.testing.test_remote_deploy = {
3
6
nodes.deployer = {
···
6
9
nodes.receiver = {
7
10
_wire.receiver = true;
8
11
};
9
-
testScript = ''
10
-
with subtest("Test unreachable hosts"):
11
-
deployer.fail(f"wire apply --on receiver-unreachable --no-progress --path {TEST_DIR}/hive.nix --no-keys -vvv >&2")
12
-
13
-
with subtest("Check basic apply"):
14
-
deployer.succeed(f"wire apply --on receiver --no-progress --path {TEST_DIR}/hive.nix --no-keys -vvv >&2")
15
-
16
-
identity = receiver.succeed("cat /etc/identity")
17
-
assert identity == "first", "Identity of first apply wasn't as expected"
18
-
19
-
with subtest("Check boot apply"):
20
-
first_system = receiver.succeed("readlink -f /run/current-system")
21
-
22
-
deployer.succeed(f"wire apply boot --on receiver-second --no-progress --path {TEST_DIR}/hive.nix --no-keys -vvv >&2")
23
-
24
-
_first_system = receiver.succeed("readlink -f /run/current-system")
25
-
assert first_system == _first_system, "apply boot without --reboot changed /run/current-system"
26
-
27
-
# with subtest("Check /etc/identity after reboot"):
28
-
# receiver.reboot()
29
-
#
30
-
# identity = receiver.succeed("cat /etc/identity")
31
-
# assert identity == "second", "Identity didn't change after second apply"
32
-
33
-
# with subtest("Check --reboot"):
34
-
# deployer.succeed(f"wire apply boot --on receiver-third --no-progress --path {TEST_DIR}/hive.nix --reboot --no-keys -vvv >&2")
35
-
#
36
-
# identity = receiver.succeed("cat /etc/identity")
37
-
# assert identity == "third", "Identity didn't change after third apply"
38
-
'';
12
+
testScript = builtins.readFile ./script.py;
39
13
};
40
14
}
+28
-9
tests/nix/suite/test_remote_deploy/hive.nix
+28
-9
tests/nix/suite/test_remote_deploy/hive.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
let
2
5
inherit (import ../utils.nix { testName = "test_keys-@IDENT@"; }) makeHive mkHiveNode;
3
6
in
4
7
makeHive {
5
-
meta.nixpkgs = import <nixpkgs> { localSystem = "x86_64-linux"; };
8
+
meta = {
9
+
nixpkgs = import <nixpkgs> { localSystem = "x86_64-linux"; };
10
+
11
+
specialArgs = {
12
+
message = "second";
13
+
};
14
+
15
+
nodeSpecialArgs = {
16
+
receiver-third.message = "third";
17
+
};
18
+
};
6
19
7
20
receiver = mkHiveNode { hostname = "receiver"; } {
8
21
environment.etc."identity".text = "first";
···
17
30
];
18
31
};
19
32
20
-
receiver-second = mkHiveNode { hostname = "receiver"; } {
21
-
environment.etc."identity".text = "second";
22
-
deployment.target.host = "receiver";
23
-
};
33
+
receiver-second = mkHiveNode { hostname = "receiver"; } (
34
+
{ message, ... }:
35
+
{
36
+
environment.etc."identity".text = message;
37
+
deployment.target.host = "receiver";
38
+
}
39
+
);
24
40
25
-
receiver-third = mkHiveNode { hostname = "receiver"; } {
26
-
environment.etc."identity".text = "third";
27
-
deployment.target.host = "receiver";
28
-
};
41
+
receiver-third = mkHiveNode { hostname = "receiver"; } (
42
+
{ message, ... }:
43
+
{
44
+
environment.etc."identity".text = message;
45
+
deployment.target.host = "receiver";
46
+
}
47
+
);
29
48
30
49
receiver-unreachable = mkHiveNode { hostname = "receiver"; } {
31
50
# test node pinging
+63
tests/nix/suite/test_remote_deploy/script.py
+63
tests/nix/suite/test_remote_deploy/script.py
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
from typing import TYPE_CHECKING
5
+
from typing import Callable, ContextManager
6
+
7
+
if TYPE_CHECKING:
8
+
from test_driver.machine import Machine
9
+
10
+
deployer: Machine = None # type: ignore[invalid-assignment]
11
+
receiver: Machine = None # type: ignore[invalid-assignment]
12
+
13
+
TEST_DIR = ""
14
+
15
+
# https://github.com/NixOS/nixpkgs/blob/d10d9933b1c206f9b2950e5e1d68268c5ed0a3c7/nixos/lib/test-script-prepend.py#L43
16
+
subtest: Callable[[str], ContextManager[None]] = None # type: ignore[invalid-assignment]
17
+
18
+
# typing-end
19
+
20
+
with subtest("Test unreachable hosts"):
21
+
deployer.fail(
22
+
f"wire apply --on receiver-unreachable --no-progress --path {TEST_DIR}/hive.nix --no-keys -vvv >&2"
23
+
)
24
+
25
+
with subtest("Check basic apply: Interactive"):
26
+
deployer.succeed(
27
+
f"wire apply --on receiver --no-progress --path {TEST_DIR}/hive.nix --no-keys --ssh-accept-host -vvv >&2"
28
+
)
29
+
30
+
identity = receiver.succeed("cat /etc/identity")
31
+
assert identity == "first", "Identity of first apply wasn't as expected"
32
+
33
+
with subtest("Check basic apply: NonInteractive"):
34
+
deployer.succeed(
35
+
f"wire apply --on receiver-third --no-progress --path {TEST_DIR}/hive.nix --no-keys --ssh-accept-host --non-interactive -vvv >&2"
36
+
)
37
+
38
+
identity = receiver.succeed("cat /etc/identity")
39
+
assert identity == "third", "Identity of non-interactive apply wasn't as expected"
40
+
41
+
with subtest("Check boot apply"):
42
+
first_system = receiver.succeed("readlink -f /run/current-system")
43
+
44
+
deployer.succeed(
45
+
f"wire apply boot --on receiver-second --no-progress --path {TEST_DIR}/hive.nix --no-keys --ssh-accept-host -vvv >&2"
46
+
)
47
+
48
+
_first_system = receiver.succeed("readlink -f /run/current-system")
49
+
assert first_system == _first_system, (
50
+
"apply boot without --reboot changed /run/current-system"
51
+
)
52
+
53
+
# with subtest("Check /etc/identity after reboot"):
54
+
# receiver.reboot()
55
+
#
56
+
# identity = receiver.succeed("cat /etc/identity")
57
+
# assert identity == "second", "Identity didn't change after second apply"
58
+
59
+
# with subtest("Check --reboot"):
60
+
# deployer.succeed(f"wire apply boot --on receiver-third --no-progress --path {TEST_DIR}/hive.nix --reboot --no-keys -vvv >&2")
61
+
#
62
+
# identity = receiver.succeed("cat /etc/identity")
63
+
# assert identity == "third", "Identity didn't change after third apply"
+12
tests/nix/suite/test_stdin/default.nix
+12
tests/nix/suite/test_stdin/default.nix
+13
tests/nix/suite/test_stdin/hive.nix
+13
tests/nix/suite/test_stdin/hive.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
let
5
+
inherit (import ../utils.nix { testName = "test_keys-@IDENT@"; }) makeHive mkHiveNode;
6
+
in
7
+
makeHive {
8
+
meta.nixpkgs = import <nixpkgs> { localSystem = "x86_64-linux"; };
9
+
deployer = mkHiveNode { hostname = "deployer"; } {
10
+
deployment.tags = [ "tag" ];
11
+
environment.etc."a".text = "b";
12
+
};
13
+
}
+17
tests/nix/suite/test_stdin/script.py
+17
tests/nix/suite/test_stdin/script.py
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
from typing import TYPE_CHECKING
5
+
6
+
if TYPE_CHECKING:
7
+
from test_driver.machine import Machine
8
+
9
+
deployer: Machine = None # type: ignore[invalid-assignment]
10
+
TEST_DIR = ""
11
+
12
+
# typing-end
13
+
14
+
deployer.succeed(
15
+
f"echo @tag | wire apply --on deployer --no-progress --path {TEST_DIR}/hive.nix --no-keys -vvv >&2"
16
+
)
17
+
deployer.succeed("test -f /etc/a")
+3
tests/nix/suite/utils.nix
+3
tests/nix/suite/utils.nix
+5
-2
tests/nix/test-opts.nix
+5
-2
tests/nix/test-opts.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
{
2
5
lib,
3
6
snakeOil,
4
-
wire-small,
7
+
wire-small-dev,
5
8
config,
6
9
pkgs,
7
10
...
···
33
36
"C+ /root/.ssh/id_ed25519 600 - - - ${snakeOil.snakeOilEd25519PrivateKey}"
34
37
];
35
38
environment.systemPackages = [
36
-
wire-small
39
+
wire-small-dev
37
40
pkgs.ripgrep
38
41
];
39
42
# It's important to note that you should never ever use this configuration
+20
tests/nix/tools/__init__.py
+20
tests/nix/tools/__init__.py
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
4
+
from typing import TYPE_CHECKING
5
+
6
+
if TYPE_CHECKING:
7
+
from test_driver.machine import Machine
8
+
9
+
# typing-end
10
+
11
+
12
+
def collect_store_objects(machine: Machine) -> set[str]:
13
+
return set(machine.succeed("ls /nix/store").strip().split("\n"))
14
+
15
+
16
+
def assert_store_not_poisoned(machine: Machine, poison: str, objects: set[str]):
17
+
paths = list(map(lambda n: f"/nix/store/{n}", objects))
18
+
19
+
machine.succeed("which rg")
20
+
machine.fail(f"rg '{poison}' {' '.join(paths)}")
-9
tests/nix/tools.py
-9
tests/nix/tools.py
···
1
-
def collect_store_objects(machine: Machine) -> set[str]:
2
-
return set(machine.succeed("ls /nix/store").strip().split("\n"))
3
-
4
-
5
-
def assert_store_not_posioned(machine: Machine, poison: str, objects: set[str]):
6
-
paths = list(map(lambda n: f"/nix/store/{n}", objects))
7
-
8
-
machine.succeed("which rg")
9
-
machine.fail(f"rg '{poison}' {" ".join(paths)}")
+303
tests/nix/uv.lock
+303
tests/nix/uv.lock
···
1
+
version = 1
2
+
revision = 3
3
+
requires-python = ">=3.13"
4
+
5
+
[[package]]
6
+
name = "appdirs"
7
+
version = "1.4.4"
8
+
source = { registry = "https://pypi.org/simple" }
9
+
sdist = { url = "https://files.pythonhosted.org/packages/d7/d8/05696357e0311f5b5c316d7b95f46c669dd9c15aaeecbb48c7d0aeb88c40/appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", size = 13470, upload-time = "2020-05-11T07:59:51.037Z" }
10
+
wheels = [
11
+
{ url = "https://files.pythonhosted.org/packages/3b/00/2344469e2084fb287c2e0b57b72910309874c3245463acd6cf5e3db69324/appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", size = 9566, upload-time = "2020-05-11T07:59:49.499Z" },
12
+
]
13
+
14
+
[[package]]
15
+
name = "asttokens"
16
+
version = "3.0.1"
17
+
source = { registry = "https://pypi.org/simple" }
18
+
sdist = { url = "https://files.pythonhosted.org/packages/be/a5/8e3f9b6771b0b408517c82d97aed8f2036509bc247d46114925e32fe33f0/asttokens-3.0.1.tar.gz", hash = "sha256:71a4ee5de0bde6a31d64f6b13f2293ac190344478f081c3d1bccfcf5eacb0cb7", size = 62308, upload-time = "2025-11-15T16:43:48.578Z" }
19
+
wheels = [
20
+
{ url = "https://files.pythonhosted.org/packages/d2/39/e7eaf1799466a4aef85b6a4fe7bd175ad2b1c6345066aa33f1f58d4b18d0/asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a", size = 27047, upload-time = "2025-11-15T16:43:16.109Z" },
21
+
]
22
+
23
+
[[package]]
24
+
name = "colorama"
25
+
version = "0.4.6"
26
+
source = { registry = "https://pypi.org/simple" }
27
+
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
28
+
wheels = [
29
+
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
30
+
]
31
+
32
+
[[package]]
33
+
name = "decorator"
34
+
version = "5.2.1"
35
+
source = { registry = "https://pypi.org/simple" }
36
+
sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" }
37
+
wheels = [
38
+
{ url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" },
39
+
]
40
+
41
+
[[package]]
42
+
name = "executing"
43
+
version = "2.2.1"
44
+
source = { registry = "https://pypi.org/simple" }
45
+
sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488, upload-time = "2025-09-01T09:48:10.866Z" }
46
+
wheels = [
47
+
{ url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" },
48
+
]
49
+
50
+
[[package]]
51
+
name = "ipython"
52
+
version = "9.8.0"
53
+
source = { registry = "https://pypi.org/simple" }
54
+
dependencies = [
55
+
{ name = "colorama", marker = "sys_platform == 'win32'" },
56
+
{ name = "decorator" },
57
+
{ name = "ipython-pygments-lexers" },
58
+
{ name = "jedi" },
59
+
{ name = "matplotlib-inline" },
60
+
{ name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
61
+
{ name = "prompt-toolkit" },
62
+
{ name = "pygments" },
63
+
{ name = "stack-data" },
64
+
{ name = "traitlets" },
65
+
]
66
+
sdist = { url = "https://files.pythonhosted.org/packages/12/51/a703c030f4928646d390b4971af4938a1b10c9dfce694f0d99a0bb073cb2/ipython-9.8.0.tar.gz", hash = "sha256:8e4ce129a627eb9dd221c41b1d2cdaed4ef7c9da8c17c63f6f578fe231141f83", size = 4424940, upload-time = "2025-12-03T10:18:24.353Z" }
67
+
wheels = [
68
+
{ url = "https://files.pythonhosted.org/packages/f1/df/8ee1c5dd1e3308b5d5b2f2dfea323bb2f3827da8d654abb6642051199049/ipython-9.8.0-py3-none-any.whl", hash = "sha256:ebe6d1d58d7d988fbf23ff8ff6d8e1622cfdb194daf4b7b73b792c4ec3b85385", size = 621374, upload-time = "2025-12-03T10:18:22.335Z" },
69
+
]
70
+
71
+
[[package]]
72
+
name = "ipython-pygments-lexers"
73
+
version = "1.1.1"
74
+
source = { registry = "https://pypi.org/simple" }
75
+
dependencies = [
76
+
{ name = "pygments" },
77
+
]
78
+
sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" }
79
+
wheels = [
80
+
{ url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" },
81
+
]
82
+
83
+
[[package]]
84
+
name = "jedi"
85
+
version = "0.19.2"
86
+
source = { registry = "https://pypi.org/simple" }
87
+
dependencies = [
88
+
{ name = "parso" },
89
+
]
90
+
sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" }
91
+
wheels = [
92
+
{ url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" },
93
+
]
94
+
95
+
[[package]]
96
+
name = "junit-xml"
97
+
version = "1.9"
98
+
source = { registry = "https://pypi.org/simple" }
99
+
dependencies = [
100
+
{ name = "six" },
101
+
]
102
+
sdist = { url = "https://files.pythonhosted.org/packages/98/af/bc988c914dd1ea2bc7540ecc6a0265c2b6faccc6d9cdb82f20e2094a8229/junit-xml-1.9.tar.gz", hash = "sha256:de16a051990d4e25a3982b2dd9e89d671067548718866416faec14d9de56db9f", size = 7349, upload-time = "2023-01-24T18:42:00.836Z" }
103
+
wheels = [
104
+
{ url = "https://files.pythonhosted.org/packages/2a/93/2d896b5fd3d79b4cadd8882c06650e66d003f465c9d12c488d92853dff78/junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732", size = 7130, upload-time = "2020-02-22T20:41:37.661Z" },
105
+
]
106
+
107
+
[[package]]
108
+
name = "matplotlib-inline"
109
+
version = "0.2.1"
110
+
source = { registry = "https://pypi.org/simple" }
111
+
dependencies = [
112
+
{ name = "traitlets" },
113
+
]
114
+
sdist = { url = "https://files.pythonhosted.org/packages/c7/74/97e72a36efd4ae2bccb3463284300f8953f199b5ffbc04cbbb0ec78f74b1/matplotlib_inline-0.2.1.tar.gz", hash = "sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe", size = 8110, upload-time = "2025-10-23T09:00:22.126Z" }
115
+
wheels = [
116
+
{ url = "https://files.pythonhosted.org/packages/af/33/ee4519fa02ed11a94aef9559552f3b17bb863f2ecfe1a35dc7f548cde231/matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76", size = 9516, upload-time = "2025-10-23T09:00:20.675Z" },
117
+
]
118
+
119
+
[[package]]
120
+
name = "nixos-test-driver"
121
+
version = "0.0.0"
122
+
source = { git = "https://github.com/NixOS/nixpkgs?subdirectory=nixos%2Flib%2Ftest-driver%2Fsrc&branch=nixos-25.11#c6f52ebd45e5925c188d1a20119978aa4ffd5ef6" }
123
+
124
+
[[package]]
125
+
name = "parso"
126
+
version = "0.8.5"
127
+
source = { registry = "https://pypi.org/simple" }
128
+
sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" }
129
+
wheels = [
130
+
{ url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" },
131
+
]
132
+
133
+
[[package]]
134
+
name = "pexpect"
135
+
version = "4.9.0"
136
+
source = { registry = "https://pypi.org/simple" }
137
+
dependencies = [
138
+
{ name = "ptyprocess" },
139
+
]
140
+
sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
141
+
wheels = [
142
+
{ url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" },
143
+
]
144
+
145
+
[[package]]
146
+
name = "prompt-toolkit"
147
+
version = "3.0.52"
148
+
source = { registry = "https://pypi.org/simple" }
149
+
dependencies = [
150
+
{ name = "wcwidth" },
151
+
]
152
+
sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" }
153
+
wheels = [
154
+
{ url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" },
155
+
]
156
+
157
+
[[package]]
158
+
name = "ptpython"
159
+
version = "3.0.32"
160
+
source = { registry = "https://pypi.org/simple" }
161
+
dependencies = [
162
+
{ name = "appdirs" },
163
+
{ name = "jedi" },
164
+
{ name = "prompt-toolkit" },
165
+
{ name = "pygments" },
166
+
]
167
+
sdist = { url = "https://files.pythonhosted.org/packages/b6/8c/7e904ceeb512b4530c7ca1d918d3565d694a1fa7df337cdfc36a16347d68/ptpython-3.0.32.tar.gz", hash = "sha256:11651778236de95c582b42737294e50a66ba4a21fa01c0090ea70815af478fe0", size = 74080, upload-time = "2025-11-20T21:20:48.27Z" }
168
+
wheels = [
169
+
{ url = "https://files.pythonhosted.org/packages/4c/ac/0e35e5d7afd47ab0e2c71293ed2ad18df91a2a4a008c0ff59c2f22def377/ptpython-3.0.32-py3-none-any.whl", hash = "sha256:16435d323e5fc0a685d5f4dc5bb4494fb68ac68736689cd1247e1eda9369b616", size = 68099, upload-time = "2025-11-20T21:20:46.634Z" },
170
+
]
171
+
172
+
[[package]]
173
+
name = "ptyprocess"
174
+
version = "0.7.0"
175
+
source = { registry = "https://pypi.org/simple" }
176
+
sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" }
177
+
wheels = [
178
+
{ url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" },
179
+
]
180
+
181
+
[[package]]
182
+
name = "pure-eval"
183
+
version = "0.2.3"
184
+
source = { registry = "https://pypi.org/simple" }
185
+
sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" }
186
+
wheels = [
187
+
{ url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" },
188
+
]
189
+
190
+
[[package]]
191
+
name = "pygments"
192
+
version = "2.19.2"
193
+
source = { registry = "https://pypi.org/simple" }
194
+
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
195
+
wheels = [
196
+
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
197
+
]
198
+
199
+
[[package]]
200
+
name = "remote-pdb"
201
+
version = "2.1.0"
202
+
source = { registry = "https://pypi.org/simple" }
203
+
sdist = { url = "https://files.pythonhosted.org/packages/e4/b5/4944cac06fd9fc4a2e168313ec220aa25ed96ce83947b63eea5b4045b22d/remote-pdb-2.1.0.tar.gz", hash = "sha256:2d70c6f41e0eabf0165e8f1be58f82aa7a605aaeab8f2aefeb9ce246431091c1", size = 22295, upload-time = "2020-07-24T13:31:32.985Z" }
204
+
wheels = [
205
+
{ url = "https://files.pythonhosted.org/packages/71/c5/d208c66344bb785d800adb61aef512290d3473052b9e7697890f0547aff2/remote_pdb-2.1.0-py2.py3-none-any.whl", hash = "sha256:94f73a92ac1248cf16189211011f97096bdada8a7baac8c79372663bbb57b5d0", size = 6304, upload-time = "2020-07-24T13:31:31.535Z" },
206
+
]
207
+
208
+
[[package]]
209
+
name = "six"
210
+
version = "1.17.0"
211
+
source = { registry = "https://pypi.org/simple" }
212
+
sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
213
+
wheels = [
214
+
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
215
+
]
216
+
217
+
[[package]]
218
+
name = "stack-data"
219
+
version = "0.6.3"
220
+
source = { registry = "https://pypi.org/simple" }
221
+
dependencies = [
222
+
{ name = "asttokens" },
223
+
{ name = "executing" },
224
+
{ name = "pure-eval" },
225
+
]
226
+
sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" }
227
+
wheels = [
228
+
{ url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" },
229
+
]
230
+
231
+
[[package]]
232
+
name = "traitlets"
233
+
version = "5.14.3"
234
+
source = { registry = "https://pypi.org/simple" }
235
+
sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" }
236
+
wheels = [
237
+
{ url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" },
238
+
]
239
+
240
+
[[package]]
241
+
name = "ty"
242
+
version = "0.0.4"
243
+
source = { registry = "https://pypi.org/simple" }
244
+
sdist = { url = "https://files.pythonhosted.org/packages/48/d9/97d5808e851f790e58f8a54efb5c7b9f404640baf9e295f424846040b316/ty-0.0.4.tar.gz", hash = "sha256:2ea47a0089d74730658ec4e988c8ef476a1e9bd92df3e56709c4003c2895ff3b", size = 4780289, upload-time = "2025-12-19T00:13:53.12Z" }
245
+
wheels = [
246
+
{ url = "https://files.pythonhosted.org/packages/b1/94/b32a962243cc8a16e8dc74cf1fe75e8bb013d0e13e71bb540e2c86214b61/ty-0.0.4-py3-none-linux_armv6l.whl", hash = "sha256:5225da65a8d1defeb21ee9d74298b1b97c6cbab36e235a310c1430d9079e4b6a", size = 9762399, upload-time = "2025-12-19T00:14:11.261Z" },
247
+
{ url = "https://files.pythonhosted.org/packages/d1/d2/7c76e0c22ddfc2fcd4a3458a65f87ce074070eb1c68c07ee475cc2b6ea68/ty-0.0.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f87770d7988f470b795a2043185082fa959dbe1979a11b4bfe20f1214d37bd6e", size = 9590410, upload-time = "2025-12-19T00:13:55.759Z" },
248
+
{ url = "https://files.pythonhosted.org/packages/a5/84/de4b1fc85669faca3622071d5a3f3ec7bfb239971f368c28fae461d3398a/ty-0.0.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ecf68b8ea48674a289d733b4786aecc259242a2d9a920b3ec8583db18c67496a", size = 9131113, upload-time = "2025-12-19T00:14:08.593Z" },
249
+
{ url = "https://files.pythonhosted.org/packages/a7/ff/b5bf385b6983be56a470856bbcbac1b7e816bcd765a7e9d39ab2399e387d/ty-0.0.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efc396d76a57e527393cae4ee8faf23b93be3df9e93202f39925721a7a2bb7b8", size = 9599152, upload-time = "2025-12-19T00:13:40.484Z" },
250
+
{ url = "https://files.pythonhosted.org/packages/36/d6/9880ba106f2f20d13e6a5dca5d5ca44bfb3782936ee67ff635f89a2959c0/ty-0.0.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c893b968d2f9964a4d4db9992c9ba66b01f411b1f48dffcde08622e19cd6ab97", size = 9585368, upload-time = "2025-12-19T00:14:00.994Z" },
251
+
{ url = "https://files.pythonhosted.org/packages/3f/53/503cfc18bc4c7c4e02f89dd43debc41a6e343b41eb43df658dfb493a386d/ty-0.0.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:526c925b80d68a53c165044d2370fcfc0def1f119f7b7e483ee61d24da6fb891", size = 9998412, upload-time = "2025-12-19T00:14:18.653Z" },
252
+
{ url = "https://files.pythonhosted.org/packages/1d/bd/dd2d3e29834da5add2eda0ab5b433171ce9ce9a248c364d2e237f82073d7/ty-0.0.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:857f605a7fa366b6c6e6f38abc311d0606be513c2bee8977b5c8fd4bde1a82d5", size = 10853890, upload-time = "2025-12-19T00:13:50.891Z" },
253
+
{ url = "https://files.pythonhosted.org/packages/07/fe/28ba3be1672e6b8df46e43de66a02dc076ffba7853d391a5466421886225/ty-0.0.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4cc981aa3ebdac2c233421b1e58c80b0df6a8e6e6fa8b9e69fbdfd2f82768af", size = 10587263, upload-time = "2025-12-19T00:14:21.577Z" },
254
+
{ url = "https://files.pythonhosted.org/packages/26/9c/bb598772043f686afe5bc26cb386020709c1a0bcc164bc22ad9da2b4f55d/ty-0.0.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b03b2708b0bf67c76424a860f848aebaa4772c05529170c3761bfcaea93ec199", size = 10401204, upload-time = "2025-12-19T00:13:43.453Z" },
255
+
{ url = "https://files.pythonhosted.org/packages/ac/18/71765e9d63669bf09461c3fea84a7a63232ccb0e83b84676f07b987fc217/ty-0.0.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:469890e885544beb129c21e2f8f15321f0573d094aec13da68593c5f86389ff9", size = 10129713, upload-time = "2025-12-19T00:14:13.725Z" },
256
+
{ url = "https://files.pythonhosted.org/packages/c3/2d/c03eba570aa85e9c361de5ed36d60b9ab139e93ee91057f455ab4af48e54/ty-0.0.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:abfd928d09567e12068aeca875e920def3badf1978896f474aa4b85b552703c4", size = 9586203, upload-time = "2025-12-19T00:14:03.423Z" },
257
+
{ url = "https://files.pythonhosted.org/packages/61/f1/8c3c82a8df69bd4417c77be4f895d043db26dd47bfcc90b33dc109cd0096/ty-0.0.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:44b8e94f9d64df12eae4cf8031c5ca9a4c610b57092b26ad3d68d91bcc7af122", size = 9608230, upload-time = "2025-12-19T00:13:58.252Z" },
258
+
{ url = "https://files.pythonhosted.org/packages/51/0c/d8ba3a85c089c246ef6bd49d0f0b40bc0f9209bb819e8c02ccbea5cb4d57/ty-0.0.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9d6a439813e21a06769daf858105818c385d88018929d4a56970d4ddd5cd3df2", size = 9725125, upload-time = "2025-12-19T00:14:05.996Z" },
259
+
{ url = "https://files.pythonhosted.org/packages/4d/38/e30f64ad1e40905c766576ec70cffc69163591a5842ce14652672f6ab394/ty-0.0.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c3cfcf26cfe6c828e91d7a529cc2dda37bc3b51ba06909c9be07002a6584af52", size = 10237174, upload-time = "2025-12-19T00:14:23.858Z" },
260
+
{ url = "https://files.pythonhosted.org/packages/cb/d7/8d650aa0be8936dd3ed74e2b0655230e2904caa6077c30c16a089b523cff/ty-0.0.4-py3-none-win32.whl", hash = "sha256:58bbf70dd27af6b00dedbdebeec92d5993aa238664f96fa5c0064930f7a0d30b", size = 9188434, upload-time = "2025-12-19T00:13:45.875Z" },
261
+
{ url = "https://files.pythonhosted.org/packages/82/d7/9fc0c81cf0b0d281ac9c18bfbdb4d6bae2173503ba79e40b210ab41c2c8b/ty-0.0.4-py3-none-win_amd64.whl", hash = "sha256:7c2db0f96218f08c140bd9d3fcbb1b3c8c5c4f0c9b0a5624487f0a2bf4b76163", size = 10019313, upload-time = "2025-12-19T00:14:15.968Z" },
262
+
{ url = "https://files.pythonhosted.org/packages/5f/b8/3e3246738eed1cd695c5964a401f3b9c757d20ac21fdae06281af9f40ef6/ty-0.0.4-py3-none-win_arm64.whl", hash = "sha256:69f14fc98e4a847afa9f8c5d5234d008820dbc09c7dcdb3ac1ba16628f5132df", size = 9561857, upload-time = "2025-12-19T00:13:48.382Z" },
263
+
]
264
+
265
+
[[package]]
266
+
name = "wcwidth"
267
+
version = "0.2.14"
268
+
source = { registry = "https://pypi.org/simple" }
269
+
sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" }
270
+
wheels = [
271
+
{ url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" },
272
+
]
273
+
274
+
[[package]]
275
+
name = "wire-vm-tests"
276
+
version = "0.0.0"
277
+
source = { virtual = "." }
278
+
dependencies = [
279
+
{ name = "colorama" },
280
+
{ name = "ipython" },
281
+
{ name = "junit-xml" },
282
+
{ name = "nixos-test-driver" },
283
+
{ name = "ptpython" },
284
+
{ name = "remote-pdb" },
285
+
]
286
+
287
+
[package.dev-dependencies]
288
+
dev = [
289
+
{ name = "ty" },
290
+
]
291
+
292
+
[package.metadata]
293
+
requires-dist = [
294
+
{ name = "colorama", specifier = ">=0.4.6" },
295
+
{ name = "ipython", specifier = ">=9.8.0" },
296
+
{ name = "junit-xml", specifier = ">=1.9" },
297
+
{ name = "nixos-test-driver", git = "https://github.com/NixOS/nixpkgs?subdirectory=nixos%2Flib%2Ftest-driver%2Fsrc&branch=nixos-25.11" },
298
+
{ name = "ptpython", specifier = ">=3.0.32" },
299
+
{ name = "remote-pdb", specifier = ">=2.1.0" },
300
+
]
301
+
302
+
[package.metadata.requires-dev]
303
+
dev = [{ name = "ty", specifier = ">=0.0.4" }]
+3
tests/rust/_keys_should_fail/hive.nix
+3
tests/rust/_keys_should_fail/hive.nix
+3
tests/rust/default_values_match/hive.nix
+3
tests/rust/default_values_match/hive.nix
+3
tests/rust/flake_hive/flake.nix
+3
tests/rust/flake_hive/flake.nix
+3
tests/rust/no_nixpkgs/hive.nix
+3
tests/rust/no_nixpkgs/hive.nix
+5
tests/rust/non_trivial_hive/hive.nix
+5
tests/rust/non_trivial_hive/hive.nix
···
1
+
# SPDX-License-Identifier: AGPL-3.0-or-later
2
+
# Copyright 2024-2025 wire Contributors
3
+
1
4
let
2
5
inherit (import ../../..) makeHive;
3
6
in
···
31
34
name = "different-than-a";
32
35
source = "hi";
33
36
};
37
+
38
+
deployment.buildOnTarget = true;
34
39
35
40
nixpkgs.hostPlatform = "x86_64-linux";
36
41
};
+3
tests/rust/test_hive_dot_nix_priority/hive.nix
+3
tests/rust/test_hive_dot_nix_priority/hive.nix
+3
tests/rust/test_hive_file/hive.nix
+3
tests/rust/test_hive_file/hive.nix
+24
typos.toml
+24
typos.toml
···
1
+
[files]
2
+
extend-exclude = ["COPYING"]
3
+
4
+
[default]
5
+
locale = "en-au"
6
+
7
+
[type.nix]
8
+
# nixpkgs
9
+
extend-ignore-re = ["authorizedKeys", "sanitizeDerivationName"]
10
+
11
+
[type.md]
12
+
extend-ignore-re = ["authorizedKeys", "Initialized empty Git"]
13
+
14
+
[type.rust.extend-words]
15
+
# serde
16
+
serialize = "serialize"
17
+
18
+
[type.yaml.extend-words]
19
+
# github
20
+
labeler = "labeler"
21
+
22
+
[type.ts.extend-words]
23
+
# vite
24
+
optimize = "optimize"
-29
wire/cli/Cargo.toml
-29
wire/cli/Cargo.toml
···
1
-
[package]
2
-
name = "wire"
3
-
version.workspace = true
4
-
edition.workspace = true
5
-
6
-
[features]
7
-
dhat-heap = []
8
-
9
-
[dependencies]
10
-
clap = { workspace = true }
11
-
clap-verbosity-flag = { workspace = true }
12
-
serde = { workspace = true }
13
-
tokio = { workspace = true }
14
-
tracing = { workspace = true }
15
-
tracing-log = { workspace = true }
16
-
tracing-subscriber = { workspace = true }
17
-
lib = { path = "../lib" }
18
-
serde_json = { workspace = true }
19
-
miette = { workspace = true }
20
-
thiserror = { workspace = true }
21
-
indicatif = "0.18.0"
22
-
enum-display-derive = "0.1.1"
23
-
im = { workspace = true }
24
-
futures = "0.3.31"
25
-
clap-num = "1.2.0"
26
-
clap-markdown = "0.1.5"
27
-
itertools = "0.14.0"
28
-
dhat = "0.3.2"
29
-
clap_complete = "4.5.58"
-67
wire/cli/default.nix
-67
wire/cli/default.nix
···
1
-
{ getSystem, inputs, ... }:
2
-
{
3
-
perSystem =
4
-
{
5
-
pkgs,
6
-
lib,
7
-
self',
8
-
buildRustProgram,
9
-
system,
10
-
...
11
-
}:
12
-
let
13
-
cleanSystem = system: lib.replaceStrings [ "-" ] [ "_" ] system;
14
-
agents = lib.strings.concatMapStrings (
15
-
system: "--set WIRE_KEY_AGENT_${cleanSystem system} ${(getSystem system).packages.agent} "
16
-
) (import inputs.linux-systems);
17
-
in
18
-
{
19
-
packages = {
20
-
default = self'.packages.wire;
21
-
wire-unwrapped = buildRustProgram {
22
-
name = "wire";
23
-
pname = "wire";
24
-
cargoExtraArgs = "-p wire";
25
-
doCheck = true;
26
-
nativeBuildInputs = [ pkgs.installShellFiles ];
27
-
postInstall = ''
28
-
installShellCompletion --cmd wire \
29
-
--bash <($out/bin/wire completions bash) \
30
-
--fish <($out/bin/wire completions fish) \
31
-
--zsh <($out/bin/wire completions zsh)
32
-
'';
33
-
};
34
-
35
-
wire = pkgs.symlinkJoin {
36
-
name = "wire";
37
-
paths = [ self'.packages.wire-unwrapped ];
38
-
nativeBuildInputs = [
39
-
pkgs.makeWrapper
40
-
];
41
-
postBuild = ''
42
-
wrapProgram $out/bin/wire ${agents}
43
-
'';
44
-
meta.mainProgram = "wire";
45
-
};
46
-
47
-
wire-small = pkgs.symlinkJoin {
48
-
name = "wire";
49
-
paths = [ self'.packages.wire-unwrapped ];
50
-
nativeBuildInputs = [
51
-
pkgs.makeWrapper
52
-
];
53
-
postBuild = ''
54
-
wrapProgram $out/bin/wire --set WIRE_KEY_AGENT_${cleanSystem system} ${self'.packages.agent}
55
-
'';
56
-
meta.mainProgram = "wire";
57
-
};
58
-
59
-
wire-dignostics-md = self'.packages.wire-unwrapped.overrideAttrs {
60
-
DIAGNOSTICS_MD_OUTPUT = "/build/source";
61
-
installPhase = ''
62
-
mv /build/source/DIAGNOSTICS.md $out
63
-
'';
64
-
};
65
-
};
66
-
};
67
-
}
-121
wire/cli/src/apply.rs
-121
wire/cli/src/apply.rs
···
1
-
use futures::{FutureExt, StreamExt};
2
-
use itertools::{Either, Itertools};
3
-
use lib::hive::Hive;
4
-
use lib::hive::node::{Context, GoalExecutor, Name, StepState};
5
-
use lib::{SubCommandModifiers, errors::HiveLibError};
6
-
use miette::{Diagnostic, Result};
7
-
use std::collections::HashSet;
8
-
use std::path::PathBuf;
9
-
use std::sync::{Arc, Mutex};
10
-
use thiserror::Error;
11
-
use tracing::{Span, error, info, instrument};
12
-
13
-
use crate::cli::{ApplyArgs, ApplyTarget};
14
-
15
-
#[derive(Debug, Error, Diagnostic)]
16
-
#[error("node {} failed to apply", .0)]
17
-
struct NodeError(
18
-
Name,
19
-
#[source]
20
-
#[diagnostic_source]
21
-
HiveLibError,
22
-
);
23
-
24
-
#[derive(Debug, Error, Diagnostic)]
25
-
#[error("{} node(s) failed to apply.", .0.len())]
26
-
struct NodeErrors(#[related] Vec<NodeError>);
27
-
28
-
#[instrument(skip_all, fields(goal = %args.goal, on = %args.on.iter().join(", ")))]
29
-
pub async fn apply(
30
-
hive: &mut Hive,
31
-
args: ApplyArgs,
32
-
path: PathBuf,
33
-
modifiers: SubCommandModifiers,
34
-
clobber_lock: Arc<Mutex<()>>,
35
-
) -> Result<()> {
36
-
let header_span = Span::current();
37
-
38
-
// Respect user's --always-build-local arg
39
-
hive.force_always_local(args.always_build_local)?;
40
-
41
-
let header_span_enter = header_span.enter();
42
-
43
-
let (tags, names) = args.on.iter().fold(
44
-
(HashSet::new(), HashSet::new()),
45
-
|(mut tags, mut names), target| {
46
-
match target {
47
-
ApplyTarget::Tag(tag) => tags.insert(tag.clone()),
48
-
ApplyTarget::Node(name) => names.insert(name.clone()),
49
-
};
50
-
(tags, names)
51
-
},
52
-
);
53
-
54
-
let mut set = hive
55
-
.nodes
56
-
.iter_mut()
57
-
.filter(|(name, node)| {
58
-
args.on.is_empty()
59
-
|| names.contains(name)
60
-
|| node.tags.iter().any(|tag| tags.contains(tag))
61
-
})
62
-
.map(|node| {
63
-
let path = path.clone();
64
-
65
-
info!("Resolved {:?} to include {}", args.on, node.0);
66
-
67
-
let context = Context {
68
-
node: node.1,
69
-
name: node.0,
70
-
goal: args.goal.clone().try_into().unwrap(),
71
-
state: StepState::default(),
72
-
no_keys: args.no_keys,
73
-
hivepath: path,
74
-
modifiers,
75
-
reboot: args.reboot,
76
-
clobber_lock: clobber_lock.clone(),
77
-
};
78
-
79
-
GoalExecutor::new(context)
80
-
.execute()
81
-
.map(move |result| (node.0, result))
82
-
})
83
-
.peekable();
84
-
85
-
if set.peek().is_none() {
86
-
error!("There are no nodes selected for deployment");
87
-
}
88
-
89
-
let futures = futures::stream::iter(set).buffer_unordered(args.parallel);
90
-
let result = futures.collect::<Vec<_>>().await;
91
-
let (successful, errors): (Vec<_>, Vec<_>) =
92
-
result
93
-
.into_iter()
94
-
.partition_map(|(name, result)| match result {
95
-
Ok(..) => Either::Left(name),
96
-
Err(err) => Either::Right((name, err)),
97
-
});
98
-
99
-
if !successful.is_empty() {
100
-
info!(
101
-
"Successfully applied goal to {} node(s): {:?}",
102
-
successful.len(),
103
-
successful
104
-
);
105
-
}
106
-
107
-
std::mem::drop(header_span_enter);
108
-
std::mem::drop(header_span);
109
-
110
-
if !errors.is_empty() {
111
-
return Err(NodeErrors(
112
-
errors
113
-
.into_iter()
114
-
.map(|(name, error)| NodeError(name.clone(), error))
115
-
.collect(),
116
-
)
117
-
.into());
118
-
}
119
-
120
-
Ok(())
121
-
}
-185
wire/cli/src/cli.rs
-185
wire/cli/src/cli.rs
···
1
-
use clap::crate_version;
2
-
use clap::{Args, Parser, Subcommand, ValueEnum};
3
-
use clap_complete::Shell;
4
-
use clap_num::number_range;
5
-
use clap_verbosity_flag::WarnLevel;
6
-
use lib::SubCommandModifiers;
7
-
use lib::hive::Hive;
8
-
use lib::hive::node::{Goal as HiveGoal, Name, SwitchToConfigurationGoal};
9
-
10
-
use std::io::IsTerminal;
11
-
use std::{
12
-
fmt::{self, Display, Formatter},
13
-
sync::Arc,
14
-
};
15
-
16
-
#[allow(clippy::struct_excessive_bools)]
17
-
#[derive(Parser)]
18
-
#[command(
19
-
name = "wire",
20
-
bin_name = "wire",
21
-
about = "a tool to deploy nixos systems",
22
-
version = format!("{}\nDebug: Hive::SCHEMA_VERSION {}", crate_version!(), Hive::SCHEMA_VERSION)
23
-
)]
24
-
pub struct Cli {
25
-
#[command(subcommand)]
26
-
pub command: Commands,
27
-
28
-
#[command(flatten)]
29
-
pub verbose: clap_verbosity_flag::Verbosity<WarnLevel>,
30
-
31
-
/// Path to directory containing hive
32
-
#[arg(long, global = true, default_value = std::env::current_dir().unwrap().into_os_string())]
33
-
pub path: std::path::PathBuf,
34
-
35
-
// Unused until a solution to tracing-indicatif log deadlocking is found...
36
-
/// Hide progress bars. Defaults to true if stdin does not refer to a tty (unix pipelines, in CI).
37
-
#[arg(long, global = true, default_value_t = !std::io::stdin().is_terminal())]
38
-
pub no_progress: bool,
39
-
40
-
/// Never accept user input.
41
-
/// Defaults to true if stdin does not refer to a tty (unix pipelines, in CI).
42
-
#[arg(long, global = true, default_value_t = !std::io::stdin().is_terminal())]
43
-
pub non_interactive: bool,
44
-
45
-
/// Show trace logs
46
-
#[arg(long, global = true, default_value_t = false)]
47
-
pub show_trace: bool,
48
-
49
-
#[arg(long, hide = true, global = true)]
50
-
pub markdown_help: bool,
51
-
}
52
-
53
-
#[derive(Clone, Debug)]
54
-
pub enum ApplyTarget {
55
-
Node(Name),
56
-
Tag(String),
57
-
}
58
-
59
-
impl From<String> for ApplyTarget {
60
-
fn from(value: String) -> Self {
61
-
if let Some(stripped) = value.strip_prefix("@") {
62
-
ApplyTarget::Tag(stripped.to_string())
63
-
} else {
64
-
ApplyTarget::Node(Name(Arc::from(value.as_str())))
65
-
}
66
-
}
67
-
}
68
-
69
-
impl Display for ApplyTarget {
70
-
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
71
-
match self {
72
-
ApplyTarget::Node(name) => name.fmt(f),
73
-
ApplyTarget::Tag(tag) => write!(f, "@{tag}"),
74
-
}
75
-
}
76
-
}
77
-
78
-
fn more_than_zero(s: &str) -> Result<usize, String> {
79
-
number_range(s, 1, usize::MAX)
80
-
}
81
-
82
-
#[derive(Args)]
83
-
pub struct ApplyArgs {
84
-
#[arg(value_enum, default_value_t)]
85
-
pub goal: Goal,
86
-
87
-
/// List of literal node names or `@` prefixed tags.
88
-
#[arg(short, long, value_name = "NODE | @TAG", num_args = 1..)]
89
-
pub on: Vec<ApplyTarget>,
90
-
91
-
#[arg(short, long, default_value_t = 10, value_parser=more_than_zero)]
92
-
pub parallel: usize,
93
-
94
-
/// Skip key uploads. noop when [GOAL] = Keys
95
-
#[arg(short, long, default_value_t = false)]
96
-
pub no_keys: bool,
97
-
98
-
/// Overrides deployment.buildOnTarget.
99
-
#[arg(short, long, value_name = "NODE")]
100
-
pub always_build_local: Vec<String>,
101
-
102
-
/// Reboot the nodes after activation
103
-
#[arg(short, long, default_value_t = false)]
104
-
pub reboot: bool,
105
-
}
106
-
107
-
#[derive(Subcommand)]
108
-
pub enum Commands {
109
-
/// Deploy nodes
110
-
Apply(ApplyArgs),
111
-
/// Inspect hive
112
-
#[clap(visible_alias = "show")]
113
-
Inspect {
114
-
/// Include liveliness
115
-
#[arg(short, long, default_value_t = false)]
116
-
online: bool,
117
-
118
-
/// Return in JSON format
119
-
#[arg(short, long, default_value_t = false)]
120
-
json: bool,
121
-
},
122
-
/// Generates shell completions
123
-
#[clap(hide = true)]
124
-
Completions {
125
-
#[arg()]
126
-
// Shell to generate completions for
127
-
shell: Shell,
128
-
},
129
-
}
130
-
131
-
#[derive(Clone, Debug, Default, ValueEnum, Display)]
132
-
pub enum Goal {
133
-
/// Make the configuration the boot default and activate now
134
-
#[default]
135
-
Switch,
136
-
/// Build the configuration but do nothing with it
137
-
Build,
138
-
/// Copy system derivation to remote hosts
139
-
Push,
140
-
/// Push deployment keys to remote hosts
141
-
Keys,
142
-
/// Activate system profile on next boot
143
-
Boot,
144
-
/// Activate the configuration, but don't make it the boot default
145
-
Test,
146
-
/// Show what would be done if this configuration were activated.
147
-
DryActivate,
148
-
}
149
-
150
-
impl TryFrom<Goal> for HiveGoal {
151
-
type Error = miette::Error;
152
-
153
-
fn try_from(value: Goal) -> Result<Self, Self::Error> {
154
-
match value {
155
-
Goal::Build => Ok(HiveGoal::Build),
156
-
Goal::Push => Ok(HiveGoal::Push),
157
-
Goal::Boot => Ok(HiveGoal::SwitchToConfiguration(
158
-
SwitchToConfigurationGoal::Boot,
159
-
)),
160
-
Goal::Switch => Ok(HiveGoal::SwitchToConfiguration(
161
-
SwitchToConfigurationGoal::Switch,
162
-
)),
163
-
Goal::Test => Ok(HiveGoal::SwitchToConfiguration(
164
-
SwitchToConfigurationGoal::Test,
165
-
)),
166
-
Goal::DryActivate => Ok(HiveGoal::SwitchToConfiguration(
167
-
SwitchToConfigurationGoal::DryActivate,
168
-
)),
169
-
Goal::Keys => Ok(HiveGoal::Keys),
170
-
}
171
-
}
172
-
}
173
-
174
-
pub trait ToSubCommandModifiers {
175
-
fn to_subcommand_modifiers(&self) -> SubCommandModifiers;
176
-
}
177
-
178
-
impl ToSubCommandModifiers for Cli {
179
-
fn to_subcommand_modifiers(&self) -> SubCommandModifiers {
180
-
SubCommandModifiers {
181
-
show_trace: self.show_trace,
182
-
non_interactive: self.non_interactive,
183
-
}
184
-
}
185
-
}
-94
wire/cli/src/main.rs
-94
wire/cli/src/main.rs
···
1
-
#![deny(clippy::pedantic)]
2
-
#![allow(clippy::missing_panics_doc)]
3
-
use std::process::Command;
4
-
use std::sync::Arc;
5
-
use std::sync::Mutex;
6
-
7
-
use crate::cli::Cli;
8
-
use crate::cli::ToSubCommandModifiers;
9
-
use crate::tracing_setup::setup_logging;
10
-
use clap::CommandFactory;
11
-
use clap::Parser;
12
-
use clap_complete::generate;
13
-
use lib::hive::Hive;
14
-
use miette::IntoDiagnostic;
15
-
use miette::Result;
16
-
use tracing::error;
17
-
use tracing::warn;
18
-
19
-
#[macro_use]
20
-
extern crate enum_display_derive;
21
-
22
-
mod apply;
23
-
mod cli;
24
-
mod tracing_setup;
25
-
26
-
#[cfg(feature = "dhat-heap")]
27
-
#[global_allocator]
28
-
static ALLOC: dhat::Alloc = dhat::Alloc;
29
-
30
-
#[tokio::main]
31
-
async fn main() -> Result<()> {
32
-
#[cfg(feature = "dhat-heap")]
33
-
let _profiler = dhat::Profiler::new_heap();
34
-
let clobber_lock = Arc::new(Mutex::new(()));
35
-
36
-
let args = Cli::parse();
37
-
38
-
let modifiers = args.to_subcommand_modifiers();
39
-
setup_logging(args.verbose, clobber_lock.clone());
40
-
41
-
if args.markdown_help {
42
-
clap_markdown::print_help_markdown::<Cli>();
43
-
return Ok(());
44
-
}
45
-
46
-
if !matches!(args.command, cli::Commands::Completions { .. }) && !check_nix_available() {
47
-
miette::bail!("Nix is not availabile on this system.");
48
-
}
49
-
50
-
match args.command {
51
-
cli::Commands::Apply(apply_args) => {
52
-
let mut hive =
53
-
Hive::new_from_path(args.path.as_path(), modifiers, clobber_lock.clone()).await?;
54
-
apply::apply(&mut hive, apply_args, args.path, modifiers, clobber_lock).await?;
55
-
}
56
-
cli::Commands::Inspect { online: _, json } => println!("{}", {
57
-
let hive = Hive::new_from_path(args.path.as_path(), modifiers, clobber_lock).await?;
58
-
if json {
59
-
serde_json::to_string(&hive).into_diagnostic()?
60
-
} else {
61
-
warn!("use --json to output something scripting suitable");
62
-
format!("{hive:#?}")
63
-
}
64
-
}),
65
-
cli::Commands::Completions { shell } => {
66
-
let mut cmd = Cli::command();
67
-
let name = cmd.clone();
68
-
generate(shell, &mut cmd, name.get_name(), &mut std::io::stdout());
69
-
}
70
-
}
71
-
72
-
Ok(())
73
-
}
74
-
75
-
fn check_nix_available() -> bool {
76
-
match Command::new("nix")
77
-
.stdout(std::process::Stdio::null())
78
-
.stderr(std::process::Stdio::null())
79
-
.spawn()
80
-
{
81
-
Ok(_) => true,
82
-
Err(e) => {
83
-
if let std::io::ErrorKind::NotFound = e.kind() {
84
-
false
85
-
} else {
86
-
error!(
87
-
"Something weird happened checking for nix availability, {}",
88
-
e
89
-
);
90
-
false
91
-
}
92
-
}
93
-
}
94
-
}
-73
wire/cli/src/tracing_setup.rs
-73
wire/cli/src/tracing_setup.rs
···
1
-
use std::{
2
-
collections::VecDeque,
3
-
io::{self, Stderr, Write, stderr},
4
-
sync::{Arc, Mutex, TryLockError},
5
-
};
6
-
7
-
use clap_verbosity_flag::{Verbosity, WarnLevel};
8
-
use tracing_log::AsTrace;
9
-
use tracing_subscriber::{Layer, Registry, layer::SubscriberExt, util::SubscriberInitExt};
10
-
11
-
struct NonClobberingWriter {
12
-
clobber_lock: Arc<Mutex<()>>,
13
-
queue: VecDeque<Vec<u8>>,
14
-
stderr: Stderr,
15
-
}
16
-
17
-
impl NonClobberingWriter {
18
-
fn new(clobber_lock: Arc<Mutex<()>>) -> Self {
19
-
NonClobberingWriter {
20
-
clobber_lock,
21
-
queue: VecDeque::with_capacity(100),
22
-
stderr: stderr(),
23
-
}
24
-
}
25
-
26
-
fn dump_previous(&mut self) -> Result<(), io::Error> {
27
-
for buf in self.queue.iter().rev() {
28
-
self.stderr.write(buf).map(|_| ())?;
29
-
}
30
-
31
-
self.stderr.flush()?;
32
-
33
-
Ok(())
34
-
}
35
-
}
36
-
37
-
impl Write for NonClobberingWriter {
38
-
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
39
-
match self.clobber_lock.clone().try_lock() {
40
-
Ok(_) => {
41
-
self.dump_previous().map(|()| 0)?;
42
-
43
-
self.stderr.write(buf)
44
-
}
45
-
Err(e) => match e {
46
-
TryLockError::Poisoned(_) => {
47
-
panic!("Internal stdout clobber lock is posioned. Please create an issue.");
48
-
}
49
-
TryLockError::WouldBlock => {
50
-
self.queue.push_front(buf.to_vec());
51
-
52
-
Ok(buf.len())
53
-
}
54
-
},
55
-
}
56
-
}
57
-
58
-
fn flush(&mut self) -> std::io::Result<()> {
59
-
self.stderr.flush()
60
-
}
61
-
}
62
-
63
-
pub fn setup_logging(verbosity: Verbosity<WarnLevel>, clobber_lock: Arc<Mutex<()>>) {
64
-
let filter = verbosity.log_level_filter().as_trace();
65
-
let registry = tracing_subscriber::registry();
66
-
67
-
let layer = tracing_subscriber::fmt::layer::<Registry>()
68
-
.without_time()
69
-
.with_writer(move || NonClobberingWriter::new(clobber_lock.clone()))
70
-
.with_filter(filter);
71
-
72
-
registry.with(layer).init();
73
-
}
-13
wire/key_agent/Cargo.toml
-13
wire/key_agent/Cargo.toml
···
1
-
[package]
2
-
name = "key_agent"
3
-
edition.workspace = true
4
-
version.workspace = true
5
-
6
-
[dependencies]
7
-
tokio = { workspace = true }
8
-
anyhow = { workspace = true }
9
-
prost = { workspace = true }
10
-
nix = { workspace = true }
11
-
12
-
[build-dependencies]
13
-
prost-build = "0.14"
-5
wire/key_agent/build.rs
-5
wire/key_agent/build.rs
-17
wire/key_agent/default.nix
-17
wire/key_agent/default.nix
-15
wire/key_agent/src/keys.proto
-15
wire/key_agent/src/keys.proto
-3
wire/key_agent/src/lib.rs
-3
wire/key_agent/src/lib.rs
-70
wire/key_agent/src/main.rs
-70
wire/key_agent/src/main.rs
···
1
-
#![deny(clippy::pedantic)]
2
-
use nix::unistd::{Group, User};
3
-
use prost::Message;
4
-
use std::env;
5
-
use std::os::unix::fs::PermissionsExt;
6
-
use std::path::{Path, PathBuf};
7
-
use std::{
8
-
io::{Cursor, Read},
9
-
os::unix::fs::chown,
10
-
};
11
-
use tokio::fs::File;
12
-
use tokio::io::AsyncWriteExt;
13
-
14
-
use key_agent::keys::Keys;
15
-
16
-
fn create_path(key_path: &Path) -> Result<(), anyhow::Error> {
17
-
let prefix = key_path.parent().unwrap();
18
-
std::fs::create_dir_all(prefix)?;
19
-
20
-
Ok(())
21
-
}
22
-
23
-
#[tokio::main]
24
-
async fn main() -> Result<(), anyhow::Error> {
25
-
let mut stdin = std::io::stdin();
26
-
let length: usize = env::args().nth(1).expect("failed to grab arg").parse()?;
27
-
let mut msg_buf = vec![0u8; length];
28
-
29
-
stdin.read_exact(&mut msg_buf)?;
30
-
31
-
let msg = Keys::decode(&mut Cursor::new(&msg_buf))?;
32
-
33
-
println!("{msg:?}");
34
-
35
-
for key in msg.keys {
36
-
let path = PathBuf::from(&key.destination);
37
-
create_path(&path)?;
38
-
39
-
let mut file = File::create(path).await?;
40
-
let mut permissions = file.metadata().await?.permissions();
41
-
42
-
permissions.set_mode(key.permissions);
43
-
file.set_permissions(permissions).await?;
44
-
45
-
let user = User::from_name(&key.user)?;
46
-
let group = Group::from_name(&key.group)?;
47
-
48
-
chown(
49
-
key.destination,
50
-
// Default uid/gid to 0. This is then wrapped around an Option again for
51
-
// the function.
52
-
Some(user.map_or(0, |user| user.uid.into())),
53
-
Some(group.map_or(0, |group| group.gid.into())),
54
-
)?;
55
-
56
-
let mut file_buf = vec![
57
-
0u8;
58
-
key.length
59
-
.try_into()
60
-
.expect("failed to convert size to usize")
61
-
];
62
-
63
-
stdin.read_exact(&mut file_buf)?;
64
-
file.write_all(&file_buf).await?;
65
-
66
-
println!("Wrote to {file:?}");
67
-
}
68
-
69
-
Ok(())
70
-
}
-42
wire/lib/Cargo.toml
-42
wire/lib/Cargo.toml
···
1
-
[package]
2
-
name = "lib"
3
-
version.workspace = true
4
-
edition.workspace = true
5
-
6
-
[features]
7
-
no_web_tests = []
8
-
9
-
[dependencies]
10
-
tokio = { workspace = true }
11
-
serde = { workspace = true }
12
-
serde_json = { workspace = true }
13
-
tracing = { workspace = true }
14
-
tracing-subscriber = { workspace = true }
15
-
im = { workspace = true }
16
-
serde-query = "0.2.0"
17
-
thiserror = "2.0.16"
18
-
serde_repr = "0.1.20"
19
-
regex = "1.11.2"
20
-
derive_more = { version = "2.0.1", features = ["display"] }
21
-
key_agent = { path = "../key_agent" }
22
-
futures = "0.3.31"
23
-
prost = { workspace = true }
24
-
gethostname = "1.0.2"
25
-
nix.workspace = true
26
-
miette = { workspace = true }
27
-
rand = "0.9.2"
28
-
tokio-util = "0.7.16"
29
-
portable-pty = "0.9.0"
30
-
anyhow.workspace = true
31
-
itertools = "0.14.0"
32
-
enum_dispatch = "0.3.13"
33
-
34
-
[dev-dependencies]
35
-
tempdir = "0.3"
36
-
visibility = "0.1.1"
37
-
38
-
[build-dependencies]
39
-
miette = { workspace = true }
40
-
syn = "2.0.106"
41
-
proc-macro2 = "1.0.101"
42
-
itertools = "0.14.0"
-202
wire/lib/build.rs
-202
wire/lib/build.rs
···
1
-
use miette::{Context, IntoDiagnostic as _, Result, miette};
2
-
use std::{
3
-
env,
4
-
fmt::{self, Display, Formatter},
5
-
fs::{self},
6
-
path::Path,
7
-
};
8
-
9
-
use itertools::Itertools;
10
-
use proc_macro2::TokenTree;
11
-
use syn::{Expr, Item, ItemEnum, Lit, Meta, MetaList, MetaNameValue, parse_file};
12
-
13
-
macro_rules! p {
14
-
($($tokens: tt)*) => {
15
-
println!("cargo::warning={}", format!($($tokens)*))
16
-
}
17
-
}
18
-
19
-
#[derive(Debug)]
20
-
struct DerviedError {
21
-
code: Option<String>,
22
-
help: Option<String>,
23
-
message: Option<String>,
24
-
doc_string: String,
25
-
}
26
-
27
-
impl Display for DerviedError {
28
-
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
29
-
write!(
30
-
f,
31
-
"## `{code}` {{#{code}}}
32
-
33
-
{doc}
34
-
{message}
35
-
{help}",
36
-
doc = self.doc_string,
37
-
code = self.code.as_ref().unwrap(),
38
-
help = match &self.help {
39
-
Some(help) => format!(
40
-
"
41
-
::: tip HELP
42
-
{help}
43
-
:::"
44
-
),
45
-
None => "".to_string(),
46
-
},
47
-
message = match &self.message {
48
-
Some(message) => format!(
49
-
"
50
-
```txt [message]
51
-
{message}
52
-
```"
53
-
),
54
-
None => "".to_string(),
55
-
}
56
-
)
57
-
}
58
-
}
59
-
60
-
impl DerviedError {
61
-
fn get_error(&mut self, list: &MetaList) -> Result<(), miette::Error> {
62
-
if list.path.segments.last().unwrap().ident != "error" {
63
-
return Err(miette!("Not an error"));
64
-
}
65
-
66
-
self.message = Some(
67
-
list.tokens
68
-
.clone()
69
-
.into_iter()
70
-
.filter(|tok| matches!(tok, TokenTree::Literal(tok) if tok.to_string().starts_with("\"")))
71
-
.map(|tok| tok.to_string())
72
-
.join(""),
73
-
);
74
-
75
-
Err(miette!("No error msg found"))
76
-
}
77
-
78
-
fn update_diagnostic(&mut self, list: &MetaList) -> Result<(), miette::Error> {
79
-
if list.path.segments.last().unwrap().ident != "diagnostic" {
80
-
return Err(miette!("Not a diagnostic"));
81
-
}
82
-
83
-
let vec: Vec<_> = list.tokens.clone().into_iter().collect();
84
-
85
-
// Find `diagnostic(code(x::y::z))`
86
-
let code: Option<String> = if let Some((_, TokenTree::Group(group))) =
87
-
vec.iter().tuple_windows().find(|(ident, group)| {
88
-
matches!(ident, TokenTree::Ident(ident) if ident == "code")
89
-
&& matches!(group, TokenTree::Group(..))
90
-
}) {
91
-
Some(group.stream().to_string().replace(" ", ""))
92
-
} else {
93
-
None
94
-
};
95
-
96
-
// Find `diagnostic(help("hi"))`
97
-
let help: Option<String> = if let Some((_, TokenTree::Group(group))) =
98
-
vec.iter().tuple_windows().find(|(ident, group)| {
99
-
matches!(ident, TokenTree::Ident(ident) if ident == "help")
100
-
&& matches!(group, TokenTree::Group(..))
101
-
}) {
102
-
Some(group.stream().to_string())
103
-
} else {
104
-
None
105
-
};
106
-
107
-
if let Some(code) = code {
108
-
self.code = Some(code);
109
-
self.help = help;
110
-
return Ok(());
111
-
}
112
-
113
-
Err(miette!("Had no code."))
114
-
}
115
-
116
-
fn update_from_list(&mut self, list: MetaList) {
117
-
let _ = self.get_error(&list);
118
-
let _ = self.update_diagnostic(&list);
119
-
}
120
-
121
-
fn update_from_namevalue(&mut self, list: MetaNameValue) -> Result<(), miette::Error> {
122
-
if list.path.segments.last().unwrap().ident != "doc" {
123
-
return Err(miette!("Not a doc string"));
124
-
}
125
-
126
-
if let Expr::Lit(lit) = list.value {
127
-
if let Lit::Str(str) = lit.lit {
128
-
self.doc_string
129
-
.push_str(&format!("{}\n\n", &str.value()[1..]));
130
-
}
131
-
}
132
-
133
-
Ok(())
134
-
}
135
-
}
136
-
137
-
fn main() -> Result<()> {
138
-
println!("cargo:rerun-if-changed=src/errors.rs");
139
-
140
-
let manifest_dir = env::var("CARGO_MANIFEST_DIR").into_diagnostic()?;
141
-
let md_out_dir = if let Ok(path) = env::var("DIAGNOSTICS_MD_OUTPUT") {
142
-
path
143
-
} else {
144
-
return Ok(());
145
-
};
146
-
147
-
let src_path = Path::new(&manifest_dir).join("src/errors.rs");
148
-
let src = fs::read_to_string(&src_path)
149
-
.into_diagnostic()
150
-
.wrap_err("reading errors.rs")?;
151
-
152
-
let syntax_tree = parse_file(&src)
153
-
.into_diagnostic()
154
-
.wrap_err("parsing errors.rs")?;
155
-
let mut entries: Vec<DerviedError> = Vec::new();
156
-
157
-
for item in &syntax_tree.items {
158
-
if let Item::Enum(ItemEnum { variants, .. }) = item {
159
-
for variant in variants {
160
-
let mut entry = DerviedError {
161
-
code: None,
162
-
help: None,
163
-
message: None,
164
-
doc_string: String::new(),
165
-
};
166
-
167
-
for attribute in variant.attrs.clone() {
168
-
match attribute.meta {
169
-
Meta::List(list) => {
170
-
entry.update_from_list(list);
171
-
}
172
-
Meta::NameValue(nv) => {
173
-
let _ = entry.update_from_namevalue(nv);
174
-
}
175
-
_ => {}
176
-
}
177
-
}
178
-
179
-
if entry.code.is_some() {
180
-
entries.push(entry);
181
-
}
182
-
}
183
-
}
184
-
}
185
-
186
-
fs::create_dir_all(Path::new(&md_out_dir))
187
-
.into_diagnostic()
188
-
.wrap_err("creating target directory")?;
189
-
fs::write(
190
-
Path::new(&md_out_dir).join("DIAGNOSTICS.md"),
191
-
entries.iter().map(|x| x.to_string()).join("\n\n"),
192
-
)
193
-
.into_diagnostic()
194
-
.wrap_err("writing DIAGNOSTICS.md")?;
195
-
196
-
p!(
197
-
"wrote to {:?}",
198
-
Path::new(&md_out_dir).join("DIAGNOSTICS.md")
199
-
);
200
-
201
-
Ok(())
202
-
}
-109
wire/lib/src/commands/common.rs
-109
wire/lib/src/commands/common.rs
···
1
-
use std::{
2
-
collections::HashMap,
3
-
path::Path,
4
-
sync::{Arc, Mutex},
5
-
};
6
-
7
-
use crate::{
8
-
EvalGoal, SubCommandModifiers,
9
-
commands::{
10
-
ChildOutputMode, WireCommand, WireCommandChip, noninteractive::NonInteractiveCommand,
11
-
},
12
-
errors::{HiveInitializationError, HiveLibError},
13
-
hive::{
14
-
find_hive,
15
-
node::{Name, Node, Push},
16
-
},
17
-
};
18
-
19
-
pub async fn push(
20
-
node: &Node,
21
-
name: &Name,
22
-
push: Push<'_>,
23
-
clobber_lock: Arc<Mutex<()>>,
24
-
) -> Result<(), HiveLibError> {
25
-
let mut command = NonInteractiveCommand::spawn_new(None, ChildOutputMode::Nix).await?;
26
-
27
-
let command_string = format!(
28
-
"nix --extra-experimental-features nix-command \
29
-
copy --substitute-on-destination --to ssh://{user}@{host} {path}",
30
-
user = node.target.user,
31
-
host = node.target.get_preffered_host()?,
32
-
path = match push {
33
-
Push::Derivation(drv) => format!("{drv} --derivation"),
34
-
Push::Path(path) => path.clone(),
35
-
}
36
-
);
37
-
38
-
let child = command.run_command_with_env(
39
-
command_string,
40
-
false,
41
-
HashMap::from([("NIX_SSHOPTS".into(), format!("-p {}", node.target.port))]),
42
-
clobber_lock,
43
-
)?;
44
-
45
-
child
46
-
.wait_till_success()
47
-
.await
48
-
.map_err(|error| HiveLibError::NixCopyError {
49
-
name: name.clone(),
50
-
path: push.to_string(),
51
-
error: Box::new(error),
52
-
})?;
53
-
54
-
Ok(())
55
-
}
56
-
57
-
/// Evaluates the hive in path with regards to the given goal,
58
-
/// and returns stdout.
59
-
pub async fn evaluate_hive_attribute(
60
-
path: &Path,
61
-
goal: &EvalGoal<'_>,
62
-
modifiers: SubCommandModifiers,
63
-
clobber_lock: Arc<Mutex<()>>,
64
-
) -> Result<String, HiveLibError> {
65
-
let canon_path =
66
-
find_hive(&path.canonicalize().unwrap()).ok_or(HiveLibError::HiveInitializationError(
67
-
HiveInitializationError::NoHiveFound(path.to_path_buf()),
68
-
))?;
69
-
70
-
let mut command = NonInteractiveCommand::spawn_new(None, ChildOutputMode::Nix).await?;
71
-
let attribute = if canon_path.ends_with("flake.nix") {
72
-
format!(
73
-
"{}#wire --apply \"hive: {}\"",
74
-
canon_path.to_str().unwrap(),
75
-
match goal {
76
-
EvalGoal::Inspect => "hive.inspect".to_string(),
77
-
EvalGoal::GetTopLevel(node) => format!("hive.topLevels.{node}"),
78
-
}
79
-
)
80
-
} else {
81
-
format!(
82
-
"--file {} {}",
83
-
&canon_path.to_string_lossy(),
84
-
match goal {
85
-
EvalGoal::Inspect => "inspect".to_string(),
86
-
EvalGoal::GetTopLevel(node) => format!("topLevels.{node}"),
87
-
}
88
-
)
89
-
};
90
-
91
-
let command_string = format!(
92
-
"nix --extra-experimental-features nix-command \
93
-
--extra-experimental-features flakes \
94
-
eval --json {mods} {attribute}",
95
-
mods = if modifiers.show_trace {
96
-
"--show-trace"
97
-
} else {
98
-
""
99
-
},
100
-
);
101
-
102
-
let child = command.run_command(command_string, false, clobber_lock)?;
103
-
104
-
child
105
-
.wait_till_success()
106
-
.await
107
-
.map_err(|source| HiveLibError::NixEvalError { attribute, source })
108
-
.map(|(_, stdout)| stdout)
109
-
}
-533
wire/lib/src/commands/interactive.rs
-533
wire/lib/src/commands/interactive.rs
···
1
-
use nix::sys::termios::{LocalFlags, SetArg, Termios, tcgetattr, tcsetattr};
2
-
use nix::{
3
-
poll::{PollFd, PollFlags, PollTimeout, poll},
4
-
unistd::{pipe as posix_pipe, read as posix_read, write as posix_write},
5
-
};
6
-
use portable_pty::{NativePtySystem, PtySize};
7
-
use rand::distr::Alphabetic;
8
-
use std::collections::VecDeque;
9
-
use std::sync::mpsc::{self, Sender};
10
-
use std::sync::{Condvar, Mutex};
11
-
use std::thread::JoinHandle;
12
-
use std::{
13
-
io::{Read, Write},
14
-
os::fd::{AsFd, OwnedFd},
15
-
sync::Arc,
16
-
};
17
-
use tracing::{debug, error, info, trace};
18
-
19
-
use crate::errors::CommandError;
20
-
use crate::nix_log::NixLog;
21
-
use crate::{
22
-
commands::{ChildOutputMode, WireCommand, WireCommandChip},
23
-
errors::HiveLibError,
24
-
hive::node::Target,
25
-
};
26
-
27
-
type MasterWriter = Box<dyn Write + Send>;
28
-
type MasterReader = Box<dyn Read + Send>;
29
-
type Child = Box<dyn portable_pty::Child + Send + Sync>;
30
-
31
-
pub(crate) struct InteractiveCommand<'t> {
32
-
target: Option<&'t Target>,
33
-
output_mode: Arc<ChildOutputMode>,
34
-
succeed_needle: Arc<String>,
35
-
failed_needle: Arc<String>,
36
-
start_needle: Arc<String>,
37
-
}
38
-
39
-
pub(crate) struct InteractiveChildChip {
40
-
child: Child,
41
-
42
-
cancel_stdin_pipe_w: OwnedFd,
43
-
write_stdin_pipe_w: OwnedFd,
44
-
45
-
error_collection: Arc<Mutex<VecDeque<String>>>,
46
-
47
-
command_string: String,
48
-
49
-
completion_status: Arc<CompletionStatus>,
50
-
stdout_handle: JoinHandle<Result<(), CommandError>>,
51
-
}
52
-
53
-
struct StdinTermiosAttrGuard(Termios);
54
-
55
-
struct CompletionStatus {
56
-
completed: Mutex<bool>,
57
-
success: Mutex<Option<bool>>,
58
-
condvar: Condvar,
59
-
}
60
-
61
-
struct WatchStdinArguments {
62
-
began_tx: Sender<()>,
63
-
reader: MasterReader,
64
-
succeed_needle: Arc<String>,
65
-
failed_needle: Arc<String>,
66
-
start_needle: Arc<String>,
67
-
output_mode: Arc<ChildOutputMode>,
68
-
collection: Arc<Mutex<VecDeque<String>>>,
69
-
completion_status: Arc<CompletionStatus>,
70
-
}
71
-
72
-
/// the underlying command began
73
-
const THREAD_BEGAN_SIGNAL: &[u8; 1] = b"b";
74
-
const THREAD_QUIT_SIGNAL: &[u8; 1] = b"q";
75
-
76
-
impl<'t> WireCommand<'t> for InteractiveCommand<'t> {
77
-
type ChildChip = InteractiveChildChip;
78
-
79
-
async fn spawn_new(
80
-
target: Option<&'t Target>,
81
-
output_mode: ChildOutputMode,
82
-
) -> Result<InteractiveCommand<'t>, HiveLibError> {
83
-
let output_mode = Arc::new(output_mode);
84
-
let tmp_prefix = rand::distr::SampleString::sample_string(&Alphabetic, &mut rand::rng(), 5);
85
-
let succeed_needle = Arc::new(format!("{tmp_prefix}_WIRE_QUIT"));
86
-
let failed_needle = Arc::new(format!("{tmp_prefix}_WIRE_FAIL"));
87
-
let start_needle = Arc::new(format!("{tmp_prefix}_WIRE_START"));
88
-
89
-
Ok(Self {
90
-
target,
91
-
output_mode,
92
-
succeed_needle,
93
-
failed_needle,
94
-
start_needle,
95
-
})
96
-
}
97
-
98
-
#[allow(clippy::too_many_lines)]
99
-
fn run_command_with_env<S: AsRef<str>>(
100
-
&mut self,
101
-
command_string: S,
102
-
keep_stdin_open: bool,
103
-
envs: std::collections::HashMap<String, String>,
104
-
clobber_lock: Arc<Mutex<()>>,
105
-
) -> Result<Self::ChildChip, HiveLibError> {
106
-
if let Some(target) = self.target {
107
-
if target.user != "root".into() {
108
-
eprintln!(
109
-
"Non-root user: Please authenticate for \"sudo {}\"",
110
-
command_string.as_ref(),
111
-
);
112
-
}
113
-
}
114
-
115
-
let pty_system = NativePtySystem::default();
116
-
let pty_pair = portable_pty::PtySystem::openpty(&pty_system, PtySize::default()).unwrap();
117
-
118
-
if let Some(fd) = pty_pair.master.as_raw_fd() {
119
-
// convert raw fd to a BorrowedFd
120
-
// safe as `fd` is dropped well before `pty_pair.master`
121
-
let fd = unsafe { std::os::unix::io::BorrowedFd::borrow_raw(fd) };
122
-
let mut termios = tcgetattr(fd)
123
-
.map_err(|x| HiveLibError::CommandError(CommandError::TermAttrs(x)))?;
124
-
125
-
termios.local_flags &= !LocalFlags::ECHO;
126
-
// Key agent does not work well without canonical mode
127
-
termios.local_flags &= !LocalFlags::ICANON;
128
-
// Actually quit
129
-
termios.local_flags &= !LocalFlags::ISIG;
130
-
131
-
tcsetattr(fd, SetArg::TCSANOW, &termios)
132
-
.map_err(|x| HiveLibError::CommandError(CommandError::TermAttrs(x)))?;
133
-
}
134
-
135
-
let command_string = &format!(
136
-
"echo '{start}' && {command} {flags} && echo '{succeed}' || echo '{failed}'",
137
-
start = self.start_needle,
138
-
succeed = self.succeed_needle,
139
-
failed = self.failed_needle,
140
-
command = command_string.as_ref(),
141
-
flags = match *self.output_mode {
142
-
ChildOutputMode::Nix => "--log-format internal-json",
143
-
ChildOutputMode::Raw => "",
144
-
}
145
-
);
146
-
147
-
debug!("{command_string}");
148
-
149
-
let mut command = if let Some(target) = self.target {
150
-
let mut command = create_sync_ssh_command(target)?;
151
-
152
-
// force ssh to use our pesudo terminal
153
-
command.arg("-tt");
154
-
155
-
command
156
-
} else {
157
-
let mut command = portable_pty::CommandBuilder::new("sh");
158
-
159
-
command.arg("-c");
160
-
161
-
command
162
-
};
163
-
164
-
command.args([&format!("sudo -u root -- sh -c \"{command_string}\"")]);
165
-
166
-
// give command all env vars
167
-
for (key, value) in envs {
168
-
command.env(key, value);
169
-
}
170
-
171
-
let clobber_guard = clobber_lock.lock().unwrap();
172
-
let _guard = StdinTermiosAttrGuard::new().map_err(HiveLibError::CommandError)?;
173
-
let child = pty_pair
174
-
.slave
175
-
.spawn_command(command)
176
-
.map_err(|x| HiveLibError::CommandError(CommandError::PortablePty(x)))?;
177
-
178
-
// Release any handles owned by the slave: we don't need it now
179
-
// that we've spawned the child.
180
-
drop(pty_pair.slave);
181
-
182
-
let reader = pty_pair
183
-
.master
184
-
.try_clone_reader()
185
-
.map_err(|x| HiveLibError::CommandError(CommandError::PortablePty(x)))?;
186
-
let master_writer = pty_pair
187
-
.master
188
-
.take_writer()
189
-
.map_err(|x| HiveLibError::CommandError(CommandError::PortablePty(x)))?;
190
-
191
-
let error_collection = Arc::new(Mutex::new(VecDeque::<String>::with_capacity(10)));
192
-
let (began_tx, began_rx) = mpsc::channel::<()>();
193
-
let completion_status = Arc::new(CompletionStatus::new());
194
-
195
-
let stdout_handle = {
196
-
let arguments = WatchStdinArguments {
197
-
began_tx,
198
-
reader,
199
-
succeed_needle: self.succeed_needle.clone(),
200
-
failed_needle: self.failed_needle.clone(),
201
-
start_needle: self.start_needle.clone(),
202
-
output_mode: self.output_mode.clone(),
203
-
collection: error_collection.clone(),
204
-
completion_status: completion_status.clone(),
205
-
};
206
-
207
-
std::thread::spawn(move || dynamic_watch_sudo_stdout(arguments))
208
-
};
209
-
210
-
let (write_stdin_pipe_r, write_stdin_pipe_w) =
211
-
posix_pipe().map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
212
-
let (cancel_stdin_pipe_r, cancel_stdin_pipe_w) =
213
-
posix_pipe().map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
214
-
215
-
std::thread::spawn(move || {
216
-
watch_stdin_from_user(&cancel_stdin_pipe_r, master_writer, &write_stdin_pipe_r)
217
-
});
218
-
219
-
info!("Setup threads");
220
-
221
-
let () = began_rx
222
-
.recv()
223
-
.map_err(|x| HiveLibError::CommandError(CommandError::RecvError(x)))?;
224
-
225
-
drop(clobber_guard);
226
-
227
-
if keep_stdin_open {
228
-
trace!("Sending THREAD_BEGAN_SIGNAL");
229
-
230
-
posix_write(&cancel_stdin_pipe_w, THREAD_BEGAN_SIGNAL)
231
-
.map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
232
-
} else {
233
-
trace!("Sending THREAD_QUIT_SIGNAL");
234
-
235
-
posix_write(&cancel_stdin_pipe_w, THREAD_QUIT_SIGNAL)
236
-
.map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
237
-
}
238
-
239
-
Ok(InteractiveChildChip {
240
-
child,
241
-
cancel_stdin_pipe_w,
242
-
write_stdin_pipe_w,
243
-
error_collection,
244
-
command_string: command_string.clone(),
245
-
completion_status,
246
-
stdout_handle,
247
-
})
248
-
}
249
-
}
250
-
251
-
impl CompletionStatus {
252
-
fn new() -> Self {
253
-
CompletionStatus {
254
-
completed: Mutex::new(false),
255
-
success: Mutex::new(None),
256
-
condvar: Condvar::new(),
257
-
}
258
-
}
259
-
260
-
fn mark_completed(&self, was_successful: bool) {
261
-
let mut completed = self.completed.lock().unwrap();
262
-
let mut success = self.success.lock().unwrap();
263
-
264
-
*completed = true;
265
-
*success = Some(was_successful);
266
-
267
-
self.condvar.notify_all();
268
-
}
269
-
270
-
fn wait(&self) -> Option<bool> {
271
-
let mut completed = self.completed.lock().unwrap();
272
-
273
-
while !*completed {
274
-
completed = self.condvar.wait(completed).unwrap();
275
-
}
276
-
277
-
*self.success.lock().unwrap()
278
-
}
279
-
}
280
-
281
-
impl WireCommandChip for InteractiveChildChip {
282
-
type ExitStatus = portable_pty::ExitStatus;
283
-
284
-
async fn wait_till_success(mut self) -> Result<Self::ExitStatus, CommandError> {
285
-
info!("trying to grab status...");
286
-
287
-
drop(self.write_stdin_pipe_w);
288
-
289
-
let exit_status = tokio::task::spawn_blocking(move || self.child.wait())
290
-
.await
291
-
.map_err(CommandError::JoinError)?
292
-
.map_err(CommandError::WaitForStatus)?;
293
-
294
-
debug!("exit_status: {exit_status:?}");
295
-
296
-
self.stdout_handle
297
-
.join()
298
-
.map_err(|_| CommandError::ThreadPanic)??;
299
-
let success = self.completion_status.wait();
300
-
let _ = posix_write(&self.cancel_stdin_pipe_w, THREAD_QUIT_SIGNAL);
301
-
302
-
if let Some(true) = success {
303
-
return Ok(exit_status);
304
-
}
305
-
306
-
debug!("child did not succeed");
307
-
308
-
let mut collection = self.error_collection.lock().unwrap();
309
-
let logs = collection.make_contiguous().join("\n");
310
-
311
-
Err(CommandError::CommandFailed {
312
-
command_ran: self.command_string,
313
-
logs,
314
-
code: format!("code {}", exit_status.exit_code()),
315
-
reason: match success {
316
-
Some(_) => "marked-unsuccessful",
317
-
None => "child-crashed-before-succeeding",
318
-
},
319
-
})
320
-
}
321
-
322
-
async fn write_stdin(&mut self, data: Vec<u8>) -> Result<(), HiveLibError> {
323
-
trace!("Writing {} bytes to stdin", data.len());
324
-
325
-
posix_write(&self.write_stdin_pipe_w, &data)
326
-
.map_err(|x| HiveLibError::CommandError(CommandError::PosixPipe(x)))?;
327
-
328
-
Ok(())
329
-
}
330
-
}
331
-
332
-
impl StdinTermiosAttrGuard {
333
-
fn new() -> Result<Self, CommandError> {
334
-
let stdin = std::io::stdin();
335
-
let stdin_fd = stdin.as_fd();
336
-
337
-
let mut termios = tcgetattr(stdin_fd).map_err(CommandError::TermAttrs)?;
338
-
let original_termios = termios.clone();
339
-
340
-
termios.local_flags &= !(LocalFlags::ECHO | LocalFlags::ICANON);
341
-
tcsetattr(stdin_fd, SetArg::TCSANOW, &termios).map_err(CommandError::TermAttrs)?;
342
-
343
-
Ok(StdinTermiosAttrGuard(original_termios))
344
-
}
345
-
}
346
-
347
-
impl Drop for StdinTermiosAttrGuard {
348
-
fn drop(&mut self) {
349
-
let stdin = std::io::stdin();
350
-
let stdin_fd = stdin.as_fd();
351
-
352
-
let _ = tcsetattr(stdin_fd, SetArg::TCSANOW, &self.0);
353
-
}
354
-
}
355
-
356
-
fn create_sync_ssh_command(target: &Target) -> Result<portable_pty::CommandBuilder, HiveLibError> {
357
-
let mut command = portable_pty::CommandBuilder::new("ssh");
358
-
359
-
command.args(["-l", target.user.as_ref()]);
360
-
command.arg(target.get_preffered_host()?.as_ref());
361
-
command.args(["-p", &target.port.to_string()]);
362
-
363
-
Ok(command)
364
-
}
365
-
366
-
fn dynamic_watch_sudo_stdout(arguments: WatchStdinArguments) -> Result<(), CommandError> {
367
-
let WatchStdinArguments {
368
-
began_tx,
369
-
mut reader,
370
-
succeed_needle,
371
-
failed_needle,
372
-
start_needle,
373
-
output_mode,
374
-
collection,
375
-
completion_status,
376
-
} = arguments;
377
-
378
-
let mut buffer = [0u8; 1024];
379
-
let mut stdout = std::io::stdout();
380
-
let mut began = false;
381
-
382
-
'outer: loop {
383
-
match reader.read(&mut buffer) {
384
-
Ok(0) => break 'outer,
385
-
Ok(n) => {
386
-
let new_data = String::from_utf8_lossy(&buffer[..n]);
387
-
388
-
for line in new_data.split_inclusive('\n') {
389
-
trace!("line: {line}");
390
-
391
-
if line.contains(start_needle.as_ref()) {
392
-
debug!("{start_needle} was found, switching mode...");
393
-
let _ = began_tx.send(());
394
-
began = true;
395
-
continue;
396
-
}
397
-
398
-
if line.contains(succeed_needle.as_ref()) {
399
-
debug!("{succeed_needle} was found, marking child as succeeding.");
400
-
completion_status.mark_completed(true);
401
-
break 'outer;
402
-
}
403
-
404
-
if line.contains(failed_needle.as_ref()) {
405
-
debug!("{failed_needle} was found, elevated child did not succeed.");
406
-
completion_status.mark_completed(false);
407
-
break 'outer;
408
-
}
409
-
410
-
if began {
411
-
let log = output_mode.trace(line.to_string());
412
-
413
-
if let Some(NixLog::Internal(log)) = log {
414
-
if let Some(message) = log.get_errorish_message() {
415
-
let mut queue = collection.lock().unwrap();
416
-
// add at most 10 message to the front, drop the rest.
417
-
queue.push_front(message);
418
-
queue.truncate(10);
419
-
}
420
-
}
421
-
} else {
422
-
stdout
423
-
.write_all(new_data.as_bytes())
424
-
.map_err(CommandError::WritingClientStdout)?;
425
-
stdout.flush().map_err(CommandError::WritingClientStdout)?;
426
-
}
427
-
}
428
-
}
429
-
Err(e) => {
430
-
eprintln!("Error reading from PTY: {e}");
431
-
break;
432
-
}
433
-
}
434
-
}
435
-
436
-
let _ = began_tx.send(());
437
-
438
-
// failsafe if there were errors or the reader stopped
439
-
if !*completion_status.completed.lock().unwrap() {
440
-
completion_status.mark_completed(false);
441
-
}
442
-
443
-
debug!("stdout: goodbye");
444
-
445
-
Ok(())
446
-
}
447
-
448
-
/// Exits on any data written to `cancel_pipe_r`
449
-
fn watch_stdin_from_user(
450
-
cancel_pipe_r: &OwnedFd,
451
-
mut master_writer: MasterWriter,
452
-
write_pipe_r: &OwnedFd,
453
-
) -> Result<(), CommandError> {
454
-
const WRITER_POSITION: usize = 0;
455
-
const SIGNAL_POSITION: usize = 1;
456
-
const USER_POSITION: usize = 2;
457
-
458
-
let mut buffer = [0u8; 1024];
459
-
let stdin = std::io::stdin();
460
-
let mut cancel_pipe_buf = [0u8; 1];
461
-
462
-
let user_stdin_fd = std::os::fd::AsFd::as_fd(&stdin);
463
-
let cancel_pipe_r_fd = cancel_pipe_r.as_fd();
464
-
465
-
let mut all_fds = vec![
466
-
PollFd::new(write_pipe_r.as_fd(), PollFlags::POLLIN),
467
-
PollFd::new(cancel_pipe_r.as_fd(), PollFlags::POLLIN),
468
-
PollFd::new(user_stdin_fd, PollFlags::POLLIN),
469
-
];
470
-
471
-
loop {
472
-
match poll(&mut all_fds, PollTimeout::NONE) {
473
-
Ok(0) => {} // timeout, impossible
474
-
Ok(_) => {
475
-
// The user stdin pipe can be removed
476
-
if all_fds.get(USER_POSITION).is_some() {
477
-
if let Some(events) = all_fds[USER_POSITION].revents() {
478
-
if events.contains(PollFlags::POLLIN) {
479
-
trace!("Got stdin from user...");
480
-
let n = posix_read(user_stdin_fd, &mut buffer)
481
-
.map_err(CommandError::PosixPipe)?;
482
-
master_writer
483
-
.write_all(&buffer[..n])
484
-
.map_err(CommandError::WritingMasterStdout)?;
485
-
master_writer
486
-
.flush()
487
-
.map_err(CommandError::WritingMasterStdout)?;
488
-
}
489
-
}
490
-
}
491
-
492
-
if let Some(events) = all_fds[WRITER_POSITION].revents() {
493
-
if events.contains(PollFlags::POLLIN) {
494
-
trace!("Got stdin from writer...");
495
-
let n = posix_read(write_pipe_r, &mut buffer)
496
-
.map_err(CommandError::PosixPipe)?;
497
-
master_writer
498
-
.write_all(&buffer[..n])
499
-
.map_err(CommandError::WritingMasterStdout)?;
500
-
master_writer
501
-
.flush()
502
-
.map_err(CommandError::WritingMasterStdout)?;
503
-
}
504
-
}
505
-
506
-
if let Some(events) = all_fds[SIGNAL_POSITION].revents() {
507
-
if events.contains(PollFlags::POLLIN) {
508
-
let n = posix_read(cancel_pipe_r_fd, &mut cancel_pipe_buf)
509
-
.map_err(CommandError::PosixPipe)?;
510
-
let message = &cancel_pipe_buf[..n];
511
-
512
-
trace!("Got byte from signal pipe: {message:?}");
513
-
514
-
if message == THREAD_QUIT_SIGNAL {
515
-
return Ok(());
516
-
}
517
-
518
-
if message == THREAD_BEGAN_SIGNAL {
519
-
all_fds.remove(USER_POSITION);
520
-
}
521
-
}
522
-
}
523
-
}
524
-
Err(e) => {
525
-
error!("Poll error: {e}");
526
-
break;
527
-
}
528
-
}
529
-
}
530
-
531
-
debug!("stdin_thread: goodbye");
532
-
Ok(())
533
-
}
-156
wire/lib/src/commands/mod.rs
-156
wire/lib/src/commands/mod.rs
···
1
-
use std::{
2
-
collections::HashMap,
3
-
sync::{Arc, Mutex},
4
-
};
5
-
6
-
use itertools::Either;
7
-
8
-
use crate::{
9
-
SubCommandModifiers,
10
-
commands::{
11
-
interactive::{InteractiveChildChip, InteractiveCommand},
12
-
noninteractive::{NonInteractiveChildChip, NonInteractiveCommand},
13
-
},
14
-
errors::{CommandError, HiveLibError},
15
-
hive::node::Target,
16
-
nix_log::{Action, Internal, NixLog, Trace},
17
-
};
18
-
19
-
pub(crate) mod common;
20
-
pub(crate) mod interactive;
21
-
pub(crate) mod noninteractive;
22
-
23
-
#[derive(Copy, Clone)]
24
-
pub(crate) enum ChildOutputMode {
25
-
Raw,
26
-
Nix,
27
-
}
28
-
29
-
pub(crate) async fn get_elevated_command(
30
-
target: Option<&'_ Target>,
31
-
output_mode: ChildOutputMode,
32
-
modifiers: SubCommandModifiers,
33
-
) -> Result<Either<InteractiveCommand<'_>, NonInteractiveCommand<'_>>, HiveLibError> {
34
-
if modifiers.non_interactive {
35
-
return Ok(Either::Left(
36
-
InteractiveCommand::spawn_new(target, output_mode).await?,
37
-
));
38
-
}
39
-
40
-
return Ok(Either::Right(
41
-
NonInteractiveCommand::spawn_new(target, output_mode).await?,
42
-
));
43
-
}
44
-
45
-
pub(crate) trait WireCommand<'target>: Sized {
46
-
type ChildChip;
47
-
48
-
async fn spawn_new(
49
-
target: Option<&'target Target>,
50
-
output_mode: ChildOutputMode,
51
-
) -> Result<Self, HiveLibError>;
52
-
53
-
fn run_command<S: AsRef<str>>(
54
-
&mut self,
55
-
command_string: S,
56
-
keep_stdin_open: bool,
57
-
clobber_lock: Arc<Mutex<()>>,
58
-
) -> Result<Self::ChildChip, HiveLibError> {
59
-
self.run_command_with_env(
60
-
command_string,
61
-
keep_stdin_open,
62
-
std::collections::HashMap::new(),
63
-
clobber_lock,
64
-
)
65
-
}
66
-
67
-
fn run_command_with_env<S: AsRef<str>>(
68
-
&mut self,
69
-
command_string: S,
70
-
keep_stdin_open: bool,
71
-
args: HashMap<String, String>,
72
-
clobber_lock: Arc<Mutex<()>>,
73
-
) -> Result<Self::ChildChip, HiveLibError>;
74
-
}
75
-
76
-
pub(crate) trait WireCommandChip {
77
-
type ExitStatus;
78
-
79
-
async fn wait_till_success(self) -> Result<Self::ExitStatus, CommandError>;
80
-
async fn write_stdin(&mut self, data: Vec<u8>) -> Result<(), HiveLibError>;
81
-
}
82
-
83
-
impl WireCommand<'_> for Either<InteractiveCommand<'_>, NonInteractiveCommand<'_>> {
84
-
type ChildChip = Either<InteractiveChildChip, NonInteractiveChildChip>;
85
-
86
-
/// How'd you get here?
87
-
async fn spawn_new(
88
-
_target: Option<&'_ Target>,
89
-
_output_mode: ChildOutputMode,
90
-
) -> Result<Self, HiveLibError> {
91
-
unimplemented!()
92
-
}
93
-
94
-
fn run_command_with_env<S: AsRef<str>>(
95
-
&mut self,
96
-
command_string: S,
97
-
keep_stdin_open: bool,
98
-
args: HashMap<String, String>,
99
-
clobber_lock: Arc<Mutex<()>>,
100
-
) -> Result<Self::ChildChip, HiveLibError> {
101
-
match self {
102
-
Self::Left(left) => left
103
-
.run_command_with_env(command_string, keep_stdin_open, args, clobber_lock)
104
-
.map(Either::Left),
105
-
Self::Right(right) => right
106
-
.run_command_with_env(command_string, keep_stdin_open, args, clobber_lock)
107
-
.map(Either::Right),
108
-
}
109
-
}
110
-
}
111
-
112
-
impl WireCommandChip for Either<InteractiveChildChip, NonInteractiveChildChip> {
113
-
type ExitStatus = Either<portable_pty::ExitStatus, (std::process::ExitStatus, String)>;
114
-
115
-
async fn write_stdin(&mut self, data: Vec<u8>) -> Result<(), HiveLibError> {
116
-
match self {
117
-
Self::Left(left) => left.write_stdin(data).await,
118
-
Self::Right(right) => right.write_stdin(data).await,
119
-
}
120
-
}
121
-
122
-
async fn wait_till_success(self) -> Result<Self::ExitStatus, CommandError> {
123
-
match self {
124
-
Self::Left(left) => left.wait_till_success().await.map(Either::Left),
125
-
Self::Right(right) => right.wait_till_success().await.map(Either::Right),
126
-
}
127
-
}
128
-
}
129
-
130
-
impl ChildOutputMode {
131
-
fn trace(self, line: String) -> Option<NixLog> {
132
-
let log = match self {
133
-
ChildOutputMode::Nix => {
134
-
let log =
135
-
serde_json::from_str::<Internal>(line.strip_prefix("@nix ").unwrap_or(&line))
136
-
.map(NixLog::Internal)
137
-
.unwrap_or(NixLog::Raw(line));
138
-
139
-
// Throw out stop logs
140
-
if let NixLog::Internal(Internal {
141
-
action: Action::Stop,
142
-
}) = log
143
-
{
144
-
return None;
145
-
}
146
-
147
-
log
148
-
}
149
-
Self::Raw => NixLog::Raw(line),
150
-
};
151
-
152
-
log.trace();
153
-
154
-
Some(log)
155
-
}
156
-
}
-204
wire/lib/src/commands/noninteractive.rs
-204
wire/lib/src/commands/noninteractive.rs
···
1
-
use std::{
2
-
collections::{HashMap, VecDeque},
3
-
process::ExitStatus,
4
-
sync::Arc,
5
-
};
6
-
7
-
use itertools::Itertools;
8
-
use tokio::{
9
-
io::{AsyncWriteExt, BufReader},
10
-
process::{Child, ChildStdin, Command},
11
-
sync::Mutex,
12
-
task::JoinSet,
13
-
};
14
-
use tracing::{debug, trace};
15
-
16
-
use crate::{
17
-
commands::{ChildOutputMode, WireCommand, WireCommandChip},
18
-
errors::{CommandError, HiveLibError},
19
-
hive::node::Target,
20
-
nix_log::NixLog,
21
-
};
22
-
23
-
pub(crate) struct NonInteractiveCommand<'t> {
24
-
target: Option<&'t Target>,
25
-
output_mode: Arc<ChildOutputMode>,
26
-
}
27
-
28
-
pub(crate) struct NonInteractiveChildChip {
29
-
error_collection: Arc<Mutex<VecDeque<String>>>,
30
-
stdout_collection: Arc<Mutex<VecDeque<String>>>,
31
-
child: Child,
32
-
joinset: JoinSet<()>,
33
-
command_string: String,
34
-
stdin: ChildStdin,
35
-
}
36
-
37
-
impl<'t> WireCommand<'t> for NonInteractiveCommand<'t> {
38
-
type ChildChip = NonInteractiveChildChip;
39
-
40
-
/// If target is Some, then the command will be ran remotely.
41
-
/// Otherwise, the command is ran locally.
42
-
async fn spawn_new(
43
-
target: Option<&'t Target>,
44
-
output_mode: ChildOutputMode,
45
-
) -> Result<Self, crate::errors::HiveLibError> {
46
-
let output_mode = Arc::new(output_mode);
47
-
48
-
Ok(Self {
49
-
target,
50
-
output_mode,
51
-
})
52
-
}
53
-
54
-
fn run_command_with_env<S: AsRef<str>>(
55
-
&mut self,
56
-
command_string: S,
57
-
_keep_stdin_open: bool,
58
-
envs: HashMap<String, String>,
59
-
_clobber_lock: Arc<std::sync::Mutex<()>>,
60
-
) -> Result<Self::ChildChip, HiveLibError> {
61
-
let mut command = if let Some(target) = self.target {
62
-
create_sync_ssh_command(target)?
63
-
} else {
64
-
let mut command = Command::new("sh");
65
-
66
-
command.arg("-c");
67
-
68
-
command
69
-
};
70
-
71
-
let command_string = format!(
72
-
"{command_string}{extra}",
73
-
command_string = command_string.as_ref(),
74
-
extra = match *self.output_mode {
75
-
ChildOutputMode::Raw => "",
76
-
ChildOutputMode::Nix => " --log-format internal-json",
77
-
}
78
-
);
79
-
80
-
command.arg(&command_string);
81
-
command.stdin(std::process::Stdio::piped());
82
-
command.stderr(std::process::Stdio::piped());
83
-
command.stdout(std::process::Stdio::piped());
84
-
command.kill_on_drop(true);
85
-
// command.env_clear();
86
-
command.envs(envs);
87
-
88
-
let mut child = command.spawn().unwrap();
89
-
let error_collection = Arc::new(Mutex::new(VecDeque::<String>::with_capacity(10)));
90
-
let stdout_collection = Arc::new(Mutex::new(VecDeque::<String>::with_capacity(10)));
91
-
let stdin = child.stdin.take().unwrap();
92
-
93
-
let stdout_handle = child
94
-
.stdout
95
-
.take()
96
-
.ok_or(HiveLibError::CommandError(CommandError::NoHandle))?;
97
-
let stderr_handle = child
98
-
.stderr
99
-
.take()
100
-
.ok_or(HiveLibError::CommandError(CommandError::NoHandle))?;
101
-
102
-
let mut joinset = JoinSet::new();
103
-
104
-
joinset.spawn(handle_io(
105
-
stderr_handle,
106
-
self.output_mode.clone(),
107
-
error_collection.clone(),
108
-
false,
109
-
));
110
-
joinset.spawn(handle_io(
111
-
stdout_handle,
112
-
self.output_mode.clone(),
113
-
stdout_collection.clone(),
114
-
true,
115
-
));
116
-
117
-
Ok(NonInteractiveChildChip {
118
-
error_collection,
119
-
stdout_collection,
120
-
child,
121
-
joinset,
122
-
command_string,
123
-
stdin,
124
-
})
125
-
}
126
-
}
127
-
128
-
impl WireCommandChip for NonInteractiveChildChip {
129
-
type ExitStatus = (ExitStatus, String);
130
-
131
-
async fn wait_till_success(mut self) -> Result<Self::ExitStatus, CommandError> {
132
-
let status = self.child.wait().await.unwrap();
133
-
let _ = self.joinset.join_all().await;
134
-
135
-
if !status.success() {
136
-
let logs = self
137
-
.error_collection
138
-
.lock()
139
-
.await
140
-
.make_contiguous()
141
-
.join("\n");
142
-
143
-
return Err(CommandError::CommandFailed {
144
-
command_ran: self.command_string,
145
-
logs,
146
-
code: match status.code() {
147
-
Some(code) => format!("code {code}"),
148
-
None => "no exit code".to_string(),
149
-
},
150
-
reason: "known-status",
151
-
});
152
-
}
153
-
154
-
let stdout = self.stdout_collection.lock().await.iter().join("\n");
155
-
156
-
Ok((status, stdout))
157
-
}
158
-
159
-
async fn write_stdin(&mut self, data: Vec<u8>) -> Result<(), HiveLibError> {
160
-
trace!("Writing {} bytes", data.len());
161
-
self.stdin.write_all(&data).await.unwrap();
162
-
Ok(())
163
-
}
164
-
}
165
-
166
-
pub async fn handle_io<R>(
167
-
reader: R,
168
-
output_mode: Arc<ChildOutputMode>,
169
-
collection: Arc<Mutex<VecDeque<String>>>,
170
-
always_collect: bool,
171
-
) where
172
-
R: tokio::io::AsyncRead + Unpin,
173
-
{
174
-
let mut io_reader = tokio::io::AsyncBufReadExt::lines(BufReader::new(reader));
175
-
176
-
while let Some(line) = io_reader.next_line().await.unwrap() {
177
-
trace!("Got line: {line:?}");
178
-
let log = output_mode.trace(line.clone());
179
-
180
-
if always_collect {
181
-
let mut queue = collection.lock().await;
182
-
queue.push_front(line);
183
-
} else if let Some(NixLog::Internal(log)) = log {
184
-
if let Some(message) = log.get_errorish_message() {
185
-
let mut queue = collection.lock().await;
186
-
queue.push_front(message);
187
-
// add at most 10 message to the front, drop the rest.
188
-
queue.truncate(10);
189
-
}
190
-
}
191
-
}
192
-
193
-
debug!("io_handler: goodbye!");
194
-
}
195
-
196
-
fn create_sync_ssh_command(target: &Target) -> Result<Command, HiveLibError> {
197
-
let mut command = Command::new("ssh");
198
-
199
-
command.args(["-l", target.user.as_ref()]);
200
-
command.arg(target.get_preffered_host()?.as_ref());
201
-
command.args(["-p", &target.port.to_string()]);
202
-
203
-
Ok(command)
204
-
}
-294
wire/lib/src/errors.rs
-294
wire/lib/src/errors.rs
···
1
-
use std::{num::ParseIntError, path::PathBuf, process::ExitStatus, sync::mpsc::RecvError};
2
-
3
-
use miette::{Diagnostic, SourceSpan};
4
-
use thiserror::Error;
5
-
use tokio::task::JoinError;
6
-
7
-
use crate::hive::node::{Name, SwitchToConfigurationGoal};
8
-
9
-
#[cfg(debug_assertions)]
10
-
const DOCS_URL: &str = "http://localhost:5173/reference/errors.html";
11
-
#[cfg(not(debug_assertions))]
12
-
const DOCS_URL: &str = "https://wire.althaea.zone/reference/errors.html";
13
-
14
-
#[derive(Debug, Diagnostic, Error)]
15
-
pub enum KeyError {
16
-
#[diagnostic(
17
-
code(wire::key::File),
18
-
url("{DOCS_URL}#{}", self.code().unwrap())
19
-
)]
20
-
#[error("error reading file")]
21
-
File(#[source] std::io::Error),
22
-
23
-
#[diagnostic(
24
-
code(wire::key::SpawningCommand),
25
-
help("Ensure wire has the correct $PATH for this command"),
26
-
url("{DOCS_URL}#{}", self.code().unwrap())
27
-
)]
28
-
#[error("error spawning key command")]
29
-
CommandSpawnError {
30
-
#[source]
31
-
error: std::io::Error,
32
-
33
-
#[source_code]
34
-
command: String,
35
-
36
-
#[label(primary, "Program ran")]
37
-
command_span: Option<SourceSpan>,
38
-
},
39
-
40
-
#[diagnostic(
41
-
code(wire::key::Resolving),
42
-
url("{DOCS_URL}#{}", self.code().unwrap())
43
-
)]
44
-
#[error("Error resolving key command child process")]
45
-
CommandResolveError {
46
-
#[source]
47
-
error: std::io::Error,
48
-
49
-
#[source_code]
50
-
command: String,
51
-
},
52
-
53
-
#[diagnostic(
54
-
code(wire::key::CommandExit),
55
-
url("{DOCS_URL}#{}", self.code().unwrap())
56
-
)]
57
-
#[error("key command failed with status {}: {}", .0,.1)]
58
-
CommandError(ExitStatus, String),
59
-
60
-
#[diagnostic(
61
-
code(wire::key::Empty),
62
-
url("{DOCS_URL}#{}", self.code().unwrap())
63
-
)]
64
-
#[error("Command list empty")]
65
-
Empty,
66
-
67
-
#[diagnostic(
68
-
code(wire::key::ParseKeyPermissions),
69
-
help("Refer to the documentation for the format of key file permissions."),
70
-
url("{DOCS_URL}#{}", self.code().unwrap())
71
-
)]
72
-
#[error("Failed to parse key permissions")]
73
-
ParseKeyPermissions(#[source] ParseIntError),
74
-
}
75
-
76
-
#[derive(Debug, Diagnostic, Error)]
77
-
pub enum ActivationError {
78
-
#[diagnostic(
79
-
code(wire::activation::SwitchToConfiguration),
80
-
url("{DOCS_URL}#{}", self.code().unwrap())
81
-
)]
82
-
#[error("failed to run switch-to-configuration {0} on node {1}")]
83
-
SwitchToConfigurationError(SwitchToConfigurationGoal, Name, #[source] CommandError),
84
-
}
85
-
86
-
#[derive(Debug, Diagnostic, Error)]
87
-
pub enum NetworkError {
88
-
#[diagnostic(
89
-
code(wire::network::HostUnreachable),
90
-
help(
91
-
"If you failed due to a fault in DNS, note that a node can have multiple targets defined."
92
-
),
93
-
url("{DOCS_URL}#{}", self.code().unwrap())
94
-
)]
95
-
#[error("Cannot reach host {host}")]
96
-
HostUnreachable {
97
-
host: String,
98
-
#[source]
99
-
source: CommandError,
100
-
},
101
-
102
-
#[diagnostic(
103
-
code(wire::network::HostUnreachableAfterReboot),
104
-
url("{DOCS_URL}#{}", self.code().unwrap())
105
-
)]
106
-
#[error("Failed to get regain connection to {0} after activation.")]
107
-
HostUnreachableAfterReboot(String),
108
-
109
-
#[diagnostic(
110
-
code(wire::network::HostsExhausted),
111
-
url("{DOCS_URL}#{}", self.code().unwrap())
112
-
)]
113
-
#[error("Ran out of contactable hosts")]
114
-
HostsExhausted,
115
-
}
116
-
117
-
#[derive(Debug, Diagnostic, Error)]
118
-
pub enum HiveInitializationError {
119
-
#[diagnostic(
120
-
code(wire::hive_init::NoHiveFound),
121
-
help(
122
-
"Double check the path is correct. You can adjust the hive path with `--path` when the hive lies outside of the CWD."
123
-
),
124
-
url("{DOCS_URL}#{}", self.code().unwrap())
125
-
)]
126
-
#[error("No hive could be found in {}", .0.display())]
127
-
NoHiveFound(PathBuf),
128
-
129
-
#[diagnostic(
130
-
code(wire::hive_init::Parse),
131
-
help("Please create an issue!"),
132
-
url("{DOCS_URL}#{}", self.code().unwrap())
133
-
)]
134
-
#[error("Failed to parse internal wire json.")]
135
-
ParseEvaluateError(#[source] serde_json::Error),
136
-
137
-
#[diagnostic(
138
-
code(wire::hive_init::NodeDoesNotExist),
139
-
help("Please create an issue!"),
140
-
url("{DOCS_URL}#{}", self.code().unwrap())
141
-
)]
142
-
#[error("node {0} not exist in hive")]
143
-
NodeDoesNotExist(String),
144
-
}
145
-
146
-
#[derive(Debug, Diagnostic, Error)]
147
-
pub enum CommandError {
148
-
#[diagnostic(
149
-
code(wire::command::TermAttrs),
150
-
url("{DOCS_URL}#{}", self.code().unwrap())
151
-
)]
152
-
#[error("Failed to set PTY attrs")]
153
-
TermAttrs(#[source] nix::errno::Errno),
154
-
155
-
#[diagnostic(
156
-
code(wire::command::PosixPipe),
157
-
url("{DOCS_URL}#{}", self.code().unwrap())
158
-
)]
159
-
#[error("There was an error in regards to a pipe")]
160
-
PosixPipe(#[source] nix::errno::Errno),
161
-
162
-
/// Error wrapped around `portable_pty`'s anyhow
163
-
/// errors
164
-
#[diagnostic(
165
-
code(wire::command::PortablePty),
166
-
url("{DOCS_URL}#{}", self.code().unwrap())
167
-
)]
168
-
#[error("There was an error from the portable_pty crate")]
169
-
PortablePty(#[source] anyhow::Error),
170
-
171
-
#[diagnostic(
172
-
code(wire::command::Joining),
173
-
url("{DOCS_URL}#{}", self.code().unwrap())
174
-
)]
175
-
#[error("Failed to join on some tokio task")]
176
-
JoinError(#[source] JoinError),
177
-
178
-
#[diagnostic(
179
-
code(wire::command::WaitForStatus),
180
-
url("{DOCS_URL}#{}", self.code().unwrap())
181
-
)]
182
-
#[error("Failed to wait for the child's status")]
183
-
WaitForStatus(#[source] std::io::Error),
184
-
185
-
#[diagnostic(
186
-
code(wire::detatched::NoHandle),
187
-
help("This should never happen, please create an issue!"),
188
-
url("{DOCS_URL}#{}", self.code().unwrap())
189
-
)]
190
-
#[error("There was no handle to child io")]
191
-
NoHandle,
192
-
193
-
#[diagnostic(
194
-
code(wire::command::WritingClientStdout),
195
-
url("{DOCS_URL}#{}", self.code().unwrap())
196
-
)]
197
-
#[error("Failed to write to client stdout.")]
198
-
WritingClientStdout(#[source] std::io::Error),
199
-
200
-
#[diagnostic(
201
-
code(wire::command::WritingMasterStdin),
202
-
url("{DOCS_URL}#{}", self.code().unwrap())
203
-
)]
204
-
#[error("Failed to write to PTY master stdout.")]
205
-
WritingMasterStdout(#[source] std::io::Error),
206
-
207
-
#[diagnostic(
208
-
code(wire::command::Recv),
209
-
url("{DOCS_URL}#{}", self.code().unwrap()),
210
-
help("please create an issue!"),
211
-
)]
212
-
#[error("Failed to receive a message from the begin channel")]
213
-
RecvError(#[source] RecvError),
214
-
215
-
#[diagnostic(
216
-
code(wire::command::ThreadPanic),
217
-
url("{DOCS_URL}#{}", self.code().unwrap()),
218
-
help("please create an issue!"),
219
-
)]
220
-
#[error("Thread paniced")]
221
-
ThreadPanic,
222
-
223
-
#[diagnostic(
224
-
code(wire::command::CommandFailed),
225
-
url("{DOCS_URL}#{}", self.code().unwrap()),
226
-
help("`nix` commands are filtered, run with -vvv to view all"),
227
-
)]
228
-
#[error("{command_ran} failed (reason: {reason}) with {code} (last 20 lines):\n{logs}")]
229
-
CommandFailed {
230
-
command_ran: String,
231
-
logs: String,
232
-
code: String,
233
-
reason: &'static str,
234
-
},
235
-
}
236
-
237
-
#[derive(Debug, Diagnostic, Error)]
238
-
pub enum HiveLibError {
239
-
#[error(transparent)]
240
-
#[diagnostic(transparent)]
241
-
HiveInitializationError(HiveInitializationError),
242
-
243
-
#[error(transparent)]
244
-
#[diagnostic(transparent)]
245
-
NetworkError(NetworkError),
246
-
247
-
#[error(transparent)]
248
-
#[diagnostic(transparent)]
249
-
ActivationError(ActivationError),
250
-
251
-
#[error(transparent)]
252
-
#[diagnostic(transparent)]
253
-
CommandError(CommandError),
254
-
255
-
#[error("Failed to apply key {}", .0)]
256
-
KeyError(
257
-
String,
258
-
#[source]
259
-
#[diagnostic_source]
260
-
KeyError,
261
-
),
262
-
263
-
#[diagnostic(
264
-
code(wire::BuildNode),
265
-
url("{DOCS_URL}#{}", self.code().unwrap())
266
-
)]
267
-
#[error("failed to build node {name}")]
268
-
NixBuildError {
269
-
name: Name,
270
-
#[source]
271
-
source: CommandError,
272
-
},
273
-
274
-
#[diagnostic(
275
-
code(wire::CopyPath),
276
-
url("{DOCS_URL}#{}", self.code().unwrap())
277
-
)]
278
-
#[error("failed to copy path {path} to node {name}")]
279
-
NixCopyError {
280
-
name: Name,
281
-
path: String,
282
-
#[source]
283
-
error: Box<CommandError>,
284
-
},
285
-
286
-
#[diagnostic(code(wire::Evaluate))]
287
-
#[error("failed to evaluate `{attribute}` from the context of a hive.")]
288
-
NixEvalError {
289
-
attribute: String,
290
-
291
-
#[source]
292
-
source: CommandError,
293
-
},
294
-
}
-259
wire/lib/src/hive/mod.rs
-259
wire/lib/src/hive/mod.rs
···
1
-
use node::{Name, Node};
2
-
use serde::de::Error;
3
-
use serde::{Deserialize, Deserializer, Serialize};
4
-
use std::collections::HashMap;
5
-
use std::collections::hash_map::OccupiedEntry;
6
-
use std::path::{Path, PathBuf};
7
-
use std::sync::{Arc, Mutex};
8
-
use tracing::{error, info, instrument, trace};
9
-
10
-
use crate::commands::common::evaluate_hive_attribute;
11
-
use crate::errors::HiveInitializationError;
12
-
use crate::{EvalGoal, HiveLibError, SubCommandModifiers};
13
-
pub mod node;
14
-
pub mod steps;
15
-
16
-
#[derive(Serialize, Deserialize, Debug, PartialEq)]
17
-
#[serde(deny_unknown_fields)]
18
-
pub struct Hive {
19
-
pub nodes: HashMap<Name, Node>,
20
-
21
-
#[serde(deserialize_with = "check_schema_version", rename = "_schema")]
22
-
pub schema: u32,
23
-
}
24
-
25
-
pub enum Action<'a> {
26
-
Inspect,
27
-
EvaluateNode(OccupiedEntry<'a, String, Node>),
28
-
}
29
-
30
-
fn check_schema_version<'de, D: Deserializer<'de>>(d: D) -> Result<u32, D::Error> {
31
-
let version = u32::deserialize(d)?;
32
-
if version != Hive::SCHEMA_VERSION {
33
-
return Err(D::Error::custom(
34
-
"Version mismatch for Hive. Please ensure the binary and your wire input match!",
35
-
));
36
-
}
37
-
Ok(version)
38
-
}
39
-
40
-
impl Hive {
41
-
pub const SCHEMA_VERSION: u32 = 0;
42
-
43
-
#[instrument]
44
-
pub async fn new_from_path(
45
-
path: &Path,
46
-
modifiers: SubCommandModifiers,
47
-
clobber_lock: Arc<Mutex<()>>,
48
-
) -> Result<Hive, HiveLibError> {
49
-
info!("Searching upwards for hive in {}", path.display());
50
-
51
-
let output =
52
-
evaluate_hive_attribute(path, &EvalGoal::Inspect, modifiers, clobber_lock).await?;
53
-
54
-
let hive: Hive = serde_json::from_str(&output).map_err(|err| {
55
-
HiveLibError::HiveInitializationError(HiveInitializationError::ParseEvaluateError(err))
56
-
})?;
57
-
58
-
Ok(hive)
59
-
}
60
-
61
-
/// # Errors
62
-
///
63
-
/// Returns an error if a node in nodes does not exist in the hive.
64
-
pub fn force_always_local(&mut self, nodes: Vec<String>) -> Result<(), HiveLibError> {
65
-
for node in nodes {
66
-
info!("Forcing a local build for {node}");
67
-
68
-
self.nodes
69
-
.get_mut(&Name(Arc::from(node.clone())))
70
-
.ok_or(HiveLibError::HiveInitializationError(
71
-
HiveInitializationError::NodeDoesNotExist(node.clone()),
72
-
))?
73
-
.build_remotely = false;
74
-
}
75
-
76
-
Ok(())
77
-
}
78
-
}
79
-
80
-
pub fn find_hive(path: &Path) -> Option<PathBuf> {
81
-
trace!("Searching for hive in {}", path.display());
82
-
let filepath_flake = path.join("flake.nix");
83
-
84
-
if filepath_flake.is_file() {
85
-
return Some(filepath_flake);
86
-
}
87
-
let filepath_hive = path.join("hive.nix");
88
-
89
-
if filepath_hive.is_file() {
90
-
return Some(filepath_hive);
91
-
}
92
-
93
-
if let Some(parent) = path.parent() {
94
-
return find_hive(parent);
95
-
}
96
-
97
-
error!("No hive found");
98
-
None
99
-
}
100
-
101
-
#[cfg(test)]
102
-
mod tests {
103
-
use im::vector;
104
-
105
-
use crate::{
106
-
errors::CommandError,
107
-
get_test_path,
108
-
hive::steps::keys::{Key, Source, UploadKeyAt},
109
-
test_support::{get_clobber_lock, make_flake_sandbox},
110
-
};
111
-
112
-
use super::*;
113
-
use std::{assert_matches::assert_matches, env};
114
-
115
-
#[test]
116
-
fn test_hive_dot_nix_priority() {
117
-
let path = get_test_path!();
118
-
119
-
let hive = find_hive(&path).unwrap();
120
-
121
-
assert!(hive.ends_with("flake.nix"));
122
-
}
123
-
124
-
#[tokio::test]
125
-
#[cfg_attr(feature = "no_web_tests", ignore)]
126
-
async fn test_hive_file() {
127
-
let mut path = get_test_path!();
128
-
129
-
let hive = Hive::new_from_path(&path, SubCommandModifiers::default(), get_clobber_lock())
130
-
.await
131
-
.unwrap();
132
-
133
-
let node = Node {
134
-
target: node::Target::from_host("192.168.122.96"),
135
-
..Default::default()
136
-
};
137
-
138
-
let mut nodes = HashMap::new();
139
-
nodes.insert(Name("node-a".into()), node);
140
-
141
-
path.push("hive.nix");
142
-
143
-
assert_eq!(
144
-
hive,
145
-
Hive {
146
-
nodes,
147
-
schema: Hive::SCHEMA_VERSION
148
-
}
149
-
);
150
-
}
151
-
152
-
#[tokio::test]
153
-
#[cfg_attr(feature = "no_web_tests", ignore)]
154
-
async fn non_trivial_hive() {
155
-
let mut path = get_test_path!();
156
-
157
-
let hive = Hive::new_from_path(&path, SubCommandModifiers::default(), get_clobber_lock())
158
-
.await
159
-
.unwrap();
160
-
161
-
let node = Node {
162
-
target: node::Target::from_host("name"),
163
-
keys: vector![Key {
164
-
name: "different-than-a".into(),
165
-
dest_dir: "/run/keys/".into(),
166
-
path: "/run/keys/different-than-a".into(),
167
-
group: "root".into(),
168
-
user: "root".into(),
169
-
permissions: "0600".into(),
170
-
source: Source::String("hi".into()),
171
-
upload_at: UploadKeyAt::PreActivation,
172
-
environment: im::HashMap::new()
173
-
}],
174
-
..Default::default()
175
-
};
176
-
177
-
let mut nodes = HashMap::new();
178
-
nodes.insert(Name("node-a".into()), node);
179
-
180
-
path.push("hive.nix");
181
-
182
-
assert_eq!(
183
-
hive,
184
-
Hive {
185
-
nodes,
186
-
schema: Hive::SCHEMA_VERSION
187
-
}
188
-
);
189
-
}
190
-
191
-
#[tokio::test]
192
-
#[cfg_attr(feature = "no_web_tests", ignore)]
193
-
async fn flake_hive() {
194
-
let tmp_dir = make_flake_sandbox(&get_test_path!()).unwrap();
195
-
196
-
let hive = Hive::new_from_path(
197
-
tmp_dir.path(),
198
-
SubCommandModifiers::default(),
199
-
get_clobber_lock(),
200
-
)
201
-
.await
202
-
.unwrap();
203
-
204
-
let mut nodes = HashMap::new();
205
-
206
-
// a merged node
207
-
nodes.insert(Name("node-a".into()), Node::from_host("node-a"));
208
-
// a non-merged node
209
-
nodes.insert(Name("node-b".into()), Node::from_host("node-b"));
210
-
// omit a node called system-c
211
-
212
-
let mut path = tmp_dir.path().to_path_buf();
213
-
path.push("flake.nix");
214
-
215
-
assert_eq!(
216
-
hive,
217
-
Hive {
218
-
nodes,
219
-
schema: Hive::SCHEMA_VERSION
220
-
}
221
-
);
222
-
223
-
tmp_dir.close().unwrap();
224
-
}
225
-
226
-
#[tokio::test]
227
-
async fn no_nixpkgs() {
228
-
let path = get_test_path!();
229
-
230
-
assert_matches!(
231
-
Hive::new_from_path(&path, SubCommandModifiers::default(), get_clobber_lock()).await,
232
-
Err(HiveLibError::NixEvalError {
233
-
source: CommandError::CommandFailed {
234
-
logs,
235
-
..
236
-
},
237
-
..
238
-
})
239
-
if logs.contains("makeHive called without meta.nixpkgs specified")
240
-
);
241
-
}
242
-
243
-
#[tokio::test]
244
-
async fn _keys_should_fail() {
245
-
let path = get_test_path!();
246
-
247
-
assert_matches!(
248
-
Hive::new_from_path(&path, SubCommandModifiers::default(), get_clobber_lock()).await,
249
-
Err(HiveLibError::NixEvalError {
250
-
source: CommandError::CommandFailed {
251
-
logs,
252
-
..
253
-
},
254
-
..
255
-
})
256
-
if logs.contains("The option `deployment._keys' is read-only, but it's set multiple times.")
257
-
);
258
-
}
259
-
}
-479
wire/lib/src/hive/node.rs
-479
wire/lib/src/hive/node.rs
···
1
-
#![allow(clippy::missing_errors_doc)]
2
-
use enum_dispatch::enum_dispatch;
3
-
use gethostname::gethostname;
4
-
use serde::{Deserialize, Serialize};
5
-
use std::collections::HashMap;
6
-
use std::fmt::Display;
7
-
use std::path::PathBuf;
8
-
use std::sync::{Arc, Mutex};
9
-
use tracing::{error, info, instrument, trace};
10
-
11
-
use crate::SubCommandModifiers;
12
-
use crate::commands::noninteractive::NonInteractiveCommand;
13
-
use crate::commands::{ChildOutputMode, WireCommand, WireCommandChip};
14
-
use crate::errors::NetworkError;
15
-
use crate::hive::steps::build::Build;
16
-
use crate::hive::steps::evaluate::Evaluate;
17
-
use crate::hive::steps::keys::{Key, Keys, PushKeyAgent, UploadKeyAt};
18
-
use crate::hive::steps::ping::Ping;
19
-
use crate::hive::steps::push::{PushBuildOutput, PushEvaluatedOutput};
20
-
21
-
use super::HiveLibError;
22
-
use super::steps::activate::SwitchToConfiguration;
23
-
24
-
#[derive(Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq, derive_more::Display)]
25
-
pub struct Name(pub Arc<str>);
26
-
27
-
#[derive(Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq)]
28
-
pub struct Target {
29
-
pub hosts: Vec<Arc<str>>,
30
-
pub user: Arc<str>,
31
-
pub port: u32,
32
-
33
-
#[serde(skip)]
34
-
current_host: usize,
35
-
}
36
-
37
-
#[cfg(test)]
38
-
impl Default for Target {
39
-
fn default() -> Self {
40
-
Target {
41
-
hosts: vec!["NAME".into()],
42
-
user: "root".into(),
43
-
port: 22,
44
-
current_host: 0,
45
-
}
46
-
}
47
-
}
48
-
49
-
#[cfg(test)]
50
-
impl<'a> Context<'a> {
51
-
fn create_test_context(
52
-
hivepath: std::path::PathBuf,
53
-
name: &'a Name,
54
-
node: &'a mut Node,
55
-
) -> Self {
56
-
use crate::test_support::get_clobber_lock;
57
-
58
-
Context {
59
-
name,
60
-
node,
61
-
hivepath,
62
-
modifiers: SubCommandModifiers::default(),
63
-
no_keys: false,
64
-
state: StepState::default(),
65
-
goal: Goal::SwitchToConfiguration(SwitchToConfigurationGoal::Switch),
66
-
reboot: false,
67
-
clobber_lock: get_clobber_lock(),
68
-
}
69
-
}
70
-
}
71
-
72
-
impl Target {
73
-
pub fn get_preffered_host(&self) -> Result<&Arc<str>, HiveLibError> {
74
-
self.hosts
75
-
.get(self.current_host)
76
-
.ok_or(HiveLibError::NetworkError(NetworkError::HostsExhausted))
77
-
}
78
-
79
-
pub fn host_failed(&mut self) {
80
-
self.current_host += 1;
81
-
}
82
-
83
-
#[cfg(test)]
84
-
pub fn from_host(host: &str) -> Self {
85
-
Target {
86
-
hosts: vec![host.into()],
87
-
..Default::default()
88
-
}
89
-
}
90
-
}
91
-
92
-
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
93
-
pub struct Node {
94
-
#[serde(rename = "target")]
95
-
pub target: Target,
96
-
97
-
#[serde(rename = "buildOnTarget")]
98
-
pub build_remotely: bool,
99
-
100
-
#[serde(rename = "allowLocalDeployment")]
101
-
pub allow_local_deployment: bool,
102
-
103
-
#[serde(default)]
104
-
pub tags: im::HashSet<String>,
105
-
106
-
#[serde(rename(deserialize = "_keys", serialize = "keys"))]
107
-
pub keys: im::Vector<Key>,
108
-
109
-
#[serde(rename(deserialize = "_hostPlatform", serialize = "host_platform"))]
110
-
pub host_platform: Arc<str>,
111
-
}
112
-
113
-
#[cfg(test)]
114
-
impl Default for Node {
115
-
fn default() -> Self {
116
-
Node {
117
-
target: Target::default(),
118
-
keys: im::Vector::new(),
119
-
tags: im::HashSet::new(),
120
-
allow_local_deployment: true,
121
-
build_remotely: false,
122
-
host_platform: "x86_64-linux".into(),
123
-
}
124
-
}
125
-
}
126
-
127
-
impl Node {
128
-
#[cfg(test)]
129
-
pub fn from_host(host: &str) -> Self {
130
-
Node {
131
-
target: Target::from_host(host),
132
-
..Default::default()
133
-
}
134
-
}
135
-
136
-
pub async fn ping(&self, clobber_lock: Arc<Mutex<()>>) -> Result<(), HiveLibError> {
137
-
let host = self.target.get_preffered_host()?;
138
-
139
-
let command_string = format!(
140
-
"nix --extra-experimental-features nix-command \
141
-
store ping --store ssh://{}@{}",
142
-
self.target.user, host
143
-
);
144
-
145
-
let mut command = NonInteractiveCommand::spawn_new(None, ChildOutputMode::Nix).await?;
146
-
let output = command.run_command_with_env(
147
-
command_string,
148
-
false,
149
-
HashMap::from([("NIX_SSHOPTS".into(), format!("-p {}", self.target.port))]),
150
-
clobber_lock,
151
-
)?;
152
-
153
-
output.wait_till_success().await.map_err(|source| {
154
-
HiveLibError::NetworkError(NetworkError::HostUnreachable {
155
-
host: host.to_string(),
156
-
source,
157
-
})
158
-
})?;
159
-
160
-
Ok(())
161
-
}
162
-
}
163
-
164
-
pub fn should_apply_locally(allow_local_deployment: bool, name: &str) -> bool {
165
-
*name == *gethostname() && allow_local_deployment
166
-
}
167
-
168
-
#[derive(derive_more::Display)]
169
-
pub enum Push<'a> {
170
-
Derivation(&'a Derivation),
171
-
Path(&'a String),
172
-
}
173
-
174
-
#[derive(Deserialize, Debug)]
175
-
pub struct Derivation(String);
176
-
177
-
impl Display for Derivation {
178
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
179
-
self.0.fmt(f).and_then(|()| write!(f, "^*"))
180
-
}
181
-
}
182
-
183
-
#[derive(derive_more::Display, Debug, Clone, Copy)]
184
-
pub enum SwitchToConfigurationGoal {
185
-
Switch,
186
-
Boot,
187
-
Test,
188
-
DryActivate,
189
-
}
190
-
191
-
#[derive(derive_more::Display, Clone, Copy)]
192
-
pub enum Goal {
193
-
SwitchToConfiguration(SwitchToConfigurationGoal),
194
-
Build,
195
-
Push,
196
-
Keys,
197
-
}
198
-
199
-
#[enum_dispatch]
200
-
pub(crate) trait ExecuteStep: Send + Sync + Display + std::fmt::Debug {
201
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError>;
202
-
203
-
fn should_execute(&self, context: &Context) -> bool;
204
-
}
205
-
206
-
#[derive(Default)]
207
-
pub struct StepState {
208
-
pub evaluation: Option<Derivation>,
209
-
pub build: Option<String>,
210
-
pub key_agent_directory: Option<String>,
211
-
}
212
-
213
-
pub struct Context<'a> {
214
-
pub name: &'a Name,
215
-
pub node: &'a mut Node,
216
-
pub hivepath: PathBuf,
217
-
pub modifiers: SubCommandModifiers,
218
-
pub no_keys: bool,
219
-
pub state: StepState,
220
-
pub goal: Goal,
221
-
pub reboot: bool,
222
-
pub clobber_lock: Arc<Mutex<()>>,
223
-
}
224
-
225
-
#[enum_dispatch(ExecuteStep)]
226
-
#[derive(Debug, PartialEq)]
227
-
enum Step {
228
-
Ping,
229
-
PushKeyAgent,
230
-
Keys,
231
-
Evaluate,
232
-
PushEvaluatedOutput,
233
-
Build,
234
-
PushBuildOutput,
235
-
SwitchToConfiguration,
236
-
}
237
-
238
-
impl Display for Step {
239
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
240
-
match self {
241
-
Self::Ping(step) => step.fmt(f),
242
-
Self::PushKeyAgent(step) => step.fmt(f),
243
-
Self::Keys(step) => step.fmt(f),
244
-
Self::Evaluate(step) => step.fmt(f),
245
-
Self::PushEvaluatedOutput(step) => step.fmt(f),
246
-
Self::Build(step) => step.fmt(f),
247
-
Self::PushBuildOutput(step) => step.fmt(f),
248
-
Self::SwitchToConfiguration(step) => step.fmt(f),
249
-
}
250
-
}
251
-
}
252
-
253
-
pub struct GoalExecutor<'a> {
254
-
steps: Vec<Step>,
255
-
context: Context<'a>,
256
-
}
257
-
258
-
impl<'a> GoalExecutor<'a> {
259
-
pub fn new(context: Context<'a>) -> Self {
260
-
Self {
261
-
steps: vec![
262
-
Step::Ping(Ping),
263
-
Step::PushKeyAgent(PushKeyAgent),
264
-
Step::Keys(Keys {
265
-
filter: UploadKeyAt::NoFilter,
266
-
}),
267
-
Step::Keys(Keys {
268
-
filter: UploadKeyAt::PreActivation,
269
-
}),
270
-
Step::Evaluate(super::steps::evaluate::Evaluate),
271
-
Step::PushEvaluatedOutput(super::steps::push::PushEvaluatedOutput),
272
-
Step::Build(super::steps::build::Build),
273
-
Step::PushBuildOutput(super::steps::push::PushBuildOutput),
274
-
Step::SwitchToConfiguration(SwitchToConfiguration),
275
-
Step::Keys(Keys {
276
-
filter: UploadKeyAt::PostActivation,
277
-
}),
278
-
],
279
-
context,
280
-
}
281
-
}
282
-
283
-
#[instrument(skip_all, name = "goal", fields(node = %self.context.name))]
284
-
pub async fn execute(mut self) -> Result<(), HiveLibError> {
285
-
let steps = self
286
-
.steps
287
-
.iter()
288
-
.filter(|step| step.should_execute(&self.context))
289
-
.inspect(|step| {
290
-
trace!("Will execute step `{step}` for {}", self.context.name);
291
-
})
292
-
.collect::<Vec<_>>();
293
-
294
-
for step in steps {
295
-
info!("Executing step `{step}`");
296
-
297
-
step.execute(&mut self.context).await.inspect_err(|_| {
298
-
error!("Failed to execute `{step}`");
299
-
})?;
300
-
}
301
-
302
-
Ok(())
303
-
}
304
-
}
305
-
306
-
#[cfg(test)]
307
-
mod tests {
308
-
use super::*;
309
-
use crate::{function_name, get_test_path, hive::Hive, test_support::get_clobber_lock};
310
-
use std::{collections::HashMap, env};
311
-
312
-
fn get_steps(goal_executor: GoalExecutor) -> std::vec::Vec<Step> {
313
-
goal_executor
314
-
.steps
315
-
.into_iter()
316
-
.filter(|step| step.should_execute(&goal_executor.context))
317
-
.collect::<Vec<_>>()
318
-
}
319
-
320
-
#[tokio::test]
321
-
#[cfg_attr(feature = "no_web_tests", ignore)]
322
-
async fn default_values_match() {
323
-
let mut path = get_test_path!();
324
-
325
-
let hive = Hive::new_from_path(&path, SubCommandModifiers::default(), get_clobber_lock())
326
-
.await
327
-
.unwrap();
328
-
329
-
let node = Node::default();
330
-
331
-
let mut nodes = HashMap::new();
332
-
nodes.insert(Name("NAME".into()), node);
333
-
334
-
path.push("hive.nix");
335
-
336
-
assert_eq!(
337
-
hive,
338
-
Hive {
339
-
nodes,
340
-
schema: Hive::SCHEMA_VERSION
341
-
}
342
-
);
343
-
}
344
-
345
-
#[tokio::test]
346
-
async fn order_build_locally() {
347
-
let path = get_test_path!();
348
-
let mut node = Node {
349
-
build_remotely: false,
350
-
..Default::default()
351
-
};
352
-
let name = &Name(function_name!().into());
353
-
let executor = GoalExecutor::new(Context::create_test_context(path, name, &mut node));
354
-
let steps = get_steps(executor);
355
-
356
-
assert_eq!(
357
-
steps,
358
-
vec![
359
-
Ping.into(),
360
-
PushKeyAgent.into(),
361
-
Keys {
362
-
filter: UploadKeyAt::PreActivation
363
-
}
364
-
.into(),
365
-
crate::hive::steps::evaluate::Evaluate.into(),
366
-
crate::hive::steps::build::Build.into(),
367
-
crate::hive::steps::push::PushBuildOutput.into(),
368
-
SwitchToConfiguration.into(),
369
-
Keys {
370
-
filter: UploadKeyAt::PostActivation
371
-
}
372
-
.into()
373
-
]
374
-
);
375
-
}
376
-
377
-
#[tokio::test]
378
-
async fn order_keys_only() {
379
-
let path = get_test_path!();
380
-
let mut node = Node::default();
381
-
let name = &Name(function_name!().into());
382
-
let mut context = Context::create_test_context(path, name, &mut node);
383
-
384
-
context.goal = Goal::Keys;
385
-
386
-
let executor = GoalExecutor::new(context);
387
-
let steps = get_steps(executor);
388
-
389
-
assert_eq!(
390
-
steps,
391
-
vec![
392
-
Ping.into(),
393
-
PushKeyAgent.into(),
394
-
Keys {
395
-
filter: UploadKeyAt::NoFilter
396
-
}
397
-
.into(),
398
-
]
399
-
);
400
-
}
401
-
402
-
#[tokio::test]
403
-
async fn order_build_only() {
404
-
let path = get_test_path!();
405
-
let mut node = Node::default();
406
-
let name = &Name(function_name!().into());
407
-
let mut context = Context::create_test_context(path, name, &mut node);
408
-
409
-
context.goal = Goal::Build;
410
-
411
-
let executor = GoalExecutor::new(context);
412
-
let steps = get_steps(executor);
413
-
414
-
assert_eq!(
415
-
steps,
416
-
vec![
417
-
Ping.into(),
418
-
crate::hive::steps::evaluate::Evaluate.into(),
419
-
crate::hive::steps::build::Build.into(),
420
-
crate::hive::steps::push::PushBuildOutput.into(),
421
-
]
422
-
);
423
-
}
424
-
425
-
#[tokio::test]
426
-
async fn order_push_only() {
427
-
let path = get_test_path!();
428
-
let mut node = Node::default();
429
-
let name = &Name(function_name!().into());
430
-
let mut context = Context::create_test_context(path, name, &mut node);
431
-
432
-
context.goal = Goal::Push;
433
-
434
-
let executor = GoalExecutor::new(context);
435
-
let steps = get_steps(executor);
436
-
437
-
assert_eq!(
438
-
steps,
439
-
vec![
440
-
Ping.into(),
441
-
crate::hive::steps::evaluate::Evaluate.into(),
442
-
crate::hive::steps::push::PushEvaluatedOutput.into(),
443
-
]
444
-
);
445
-
}
446
-
447
-
#[tokio::test]
448
-
async fn order_remote_build() {
449
-
let path = get_test_path!();
450
-
let mut node = Node {
451
-
build_remotely: true,
452
-
..Default::default()
453
-
};
454
-
455
-
let name = &Name(function_name!().into());
456
-
let executor = GoalExecutor::new(Context::create_test_context(path, name, &mut node));
457
-
let steps = get_steps(executor);
458
-
459
-
assert_eq!(
460
-
steps,
461
-
vec![
462
-
Ping.into(),
463
-
PushKeyAgent.into(),
464
-
Keys {
465
-
filter: UploadKeyAt::PreActivation
466
-
}
467
-
.into(),
468
-
crate::hive::steps::evaluate::Evaluate.into(),
469
-
crate::hive::steps::push::PushEvaluatedOutput.into(),
470
-
crate::hive::steps::build::Build.into(),
471
-
SwitchToConfiguration.into(),
472
-
Keys {
473
-
filter: UploadKeyAt::PostActivation
474
-
}
475
-
.into()
476
-
]
477
-
);
478
-
}
479
-
}
-200
wire/lib/src/hive/steps/activate.rs
-200
wire/lib/src/hive/steps/activate.rs
···
1
-
use std::fmt::Display;
2
-
3
-
use tracing::{error, info, instrument, warn};
4
-
5
-
use crate::{
6
-
HiveLibError,
7
-
commands::{ChildOutputMode, WireCommand, WireCommandChip, get_elevated_command},
8
-
errors::{ActivationError, NetworkError},
9
-
hive::node::{Context, ExecuteStep, Goal, SwitchToConfigurationGoal, should_apply_locally},
10
-
};
11
-
12
-
#[derive(Debug, PartialEq)]
13
-
pub struct SwitchToConfiguration;
14
-
15
-
impl Display for SwitchToConfiguration {
16
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
17
-
write!(f, "Switch to configuration")
18
-
}
19
-
}
20
-
21
-
pub async fn wait_for_ping(ctx: &Context<'_>) -> Result<(), HiveLibError> {
22
-
let host = ctx.node.target.get_preffered_host()?;
23
-
let mut result = ctx.node.ping(ctx.clobber_lock.clone()).await;
24
-
25
-
for num in 0..2 {
26
-
warn!("Trying to ping {host} (attempt {}/3)", num + 1);
27
-
28
-
result = ctx.node.ping(ctx.clobber_lock.clone()).await;
29
-
30
-
if result.is_ok() {
31
-
info!("Regained connection to {} via {host}", ctx.name);
32
-
33
-
break;
34
-
}
35
-
}
36
-
37
-
result
38
-
}
39
-
40
-
async fn set_profile(
41
-
goal: SwitchToConfigurationGoal,
42
-
built_path: &String,
43
-
ctx: &Context<'_>,
44
-
) -> Result<(), HiveLibError> {
45
-
info!("Setting profiles in anticipation for switch-to-configuration {goal}");
46
-
47
-
let mut command = get_elevated_command(
48
-
if should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.to_string()) {
49
-
None
50
-
} else {
51
-
Some(&ctx.node.target)
52
-
},
53
-
ChildOutputMode::Nix,
54
-
ctx.modifiers,
55
-
)
56
-
.await?;
57
-
let command_string = format!("nix-env -p /nix/var/nix/profiles/system/ --set {built_path}");
58
-
59
-
let child = command.run_command(command_string, false, ctx.clobber_lock.clone())?;
60
-
61
-
let _ = child
62
-
.wait_till_success()
63
-
.await
64
-
.map_err(HiveLibError::CommandError)?;
65
-
66
-
info!("Set system profile");
67
-
68
-
Ok(())
69
-
}
70
-
71
-
impl ExecuteStep for SwitchToConfiguration {
72
-
fn should_execute(&self, ctx: &Context) -> bool {
73
-
matches!(ctx.goal, Goal::SwitchToConfiguration(..))
74
-
}
75
-
76
-
#[instrument(skip_all, name = "switch")]
77
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
78
-
let built_path = ctx.state.build.as_ref().unwrap();
79
-
80
-
let Goal::SwitchToConfiguration(goal) = &ctx.goal else {
81
-
unreachable!("Cannot reach as guarded by should_execute")
82
-
};
83
-
84
-
if !matches!(
85
-
goal,
86
-
SwitchToConfigurationGoal::DryActivate | SwitchToConfigurationGoal::Boot
87
-
) {
88
-
set_profile(*goal, built_path, ctx).await?;
89
-
}
90
-
91
-
info!("Running switch-to-configuration {goal}");
92
-
93
-
let mut command = get_elevated_command(
94
-
if should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.to_string()) {
95
-
None
96
-
} else {
97
-
Some(&ctx.node.target)
98
-
},
99
-
ChildOutputMode::Nix,
100
-
ctx.modifiers,
101
-
)
102
-
.await?;
103
-
104
-
let command_string = format!(
105
-
"{built_path}/bin/switch-to-configuration {}",
106
-
match goal {
107
-
SwitchToConfigurationGoal::Switch => "switch",
108
-
SwitchToConfigurationGoal::Boot => "boot",
109
-
SwitchToConfigurationGoal::Test => "test",
110
-
SwitchToConfigurationGoal::DryActivate => "dry-activate",
111
-
}
112
-
);
113
-
114
-
let child = command.run_command(command_string, false, ctx.clobber_lock.clone())?;
115
-
116
-
let result = child.wait_till_success().await;
117
-
118
-
match result {
119
-
Ok(_) => {
120
-
if !ctx.reboot {
121
-
return Ok(());
122
-
}
123
-
124
-
if should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.to_string()) {
125
-
error!("Refusing to reboot local machine!");
126
-
127
-
return Ok(());
128
-
}
129
-
130
-
let mut command = get_elevated_command(
131
-
Some(&ctx.node.target),
132
-
ChildOutputMode::Nix,
133
-
ctx.modifiers,
134
-
)
135
-
.await?;
136
-
137
-
warn!("Rebooting {name}!", name = ctx.name);
138
-
139
-
let reboot = command.run_command("reboot now", false, ctx.clobber_lock.clone())?;
140
-
141
-
// consume result, impossible to know if the machine failed to reboot or we
142
-
// simply disconnected
143
-
let _ = reboot
144
-
.wait_till_success()
145
-
.await
146
-
.map_err(HiveLibError::CommandError)?;
147
-
148
-
info!("Rebooted {name}, waiting to reconnect...", name = ctx.name);
149
-
150
-
if wait_for_ping(ctx).await.is_ok() {
151
-
return Ok(());
152
-
}
153
-
154
-
error!(
155
-
"Failed to get regain connection to {name} via {host} after reboot.",
156
-
name = ctx.name,
157
-
host = ctx.node.target.get_preffered_host()?
158
-
);
159
-
160
-
return Err(HiveLibError::NetworkError(
161
-
NetworkError::HostUnreachableAfterReboot(
162
-
ctx.node.target.get_preffered_host()?.to_string(),
163
-
),
164
-
));
165
-
}
166
-
Err(error) => {
167
-
warn!(
168
-
"Activation command for {name} exited unsuccessfully.",
169
-
name = ctx.name
170
-
);
171
-
172
-
// Bail if the command couldn't of broken the system
173
-
// and don't try to regain connection to localhost
174
-
if matches!(goal, SwitchToConfigurationGoal::DryActivate)
175
-
|| should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.to_string())
176
-
{
177
-
return Err(HiveLibError::ActivationError(
178
-
ActivationError::SwitchToConfigurationError(*goal, ctx.name.clone(), error),
179
-
));
180
-
}
181
-
182
-
if wait_for_ping(ctx).await.is_ok() {
183
-
return Ok(());
184
-
}
185
-
186
-
error!(
187
-
"Failed to get regain connection to {name} via {host} after {goal} activation.",
188
-
name = ctx.name,
189
-
host = ctx.node.target.get_preffered_host()?
190
-
);
191
-
192
-
return Err(HiveLibError::NetworkError(
193
-
NetworkError::HostUnreachableAfterReboot(
194
-
ctx.node.target.get_preffered_host()?.to_string(),
195
-
),
196
-
));
197
-
}
198
-
}
199
-
}
200
-
}
-60
wire/lib/src/hive/steps/build.rs
-60
wire/lib/src/hive/steps/build.rs
···
1
-
use std::fmt::Display;
2
-
3
-
use tracing::{info, instrument};
4
-
5
-
use crate::{
6
-
HiveLibError,
7
-
commands::{
8
-
ChildOutputMode, WireCommand, WireCommandChip, noninteractive::NonInteractiveCommand,
9
-
},
10
-
hive::node::{Context, ExecuteStep, Goal},
11
-
};
12
-
13
-
#[derive(Debug, PartialEq)]
14
-
pub struct Build;
15
-
16
-
impl Display for Build {
17
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
18
-
write!(f, "Build the node")
19
-
}
20
-
}
21
-
22
-
impl ExecuteStep for Build {
23
-
fn should_execute(&self, ctx: &Context) -> bool {
24
-
!matches!(ctx.goal, Goal::Keys | Goal::Push)
25
-
}
26
-
27
-
#[instrument(skip_all, name = "build")]
28
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
29
-
let top_level = ctx.state.evaluation.as_ref().unwrap();
30
-
31
-
let command_string = format!(
32
-
"nix --extra-experimental-features nix-command \
33
-
build --print-build-logs --print-out-paths {top_level}"
34
-
);
35
-
36
-
let mut command = NonInteractiveCommand::spawn_new(
37
-
if ctx.node.build_remotely {
38
-
Some(&ctx.node.target)
39
-
} else {
40
-
None
41
-
},
42
-
ChildOutputMode::Nix,
43
-
)
44
-
.await?;
45
-
46
-
let (_, stdout) = command
47
-
.run_command(command_string, false, ctx.clobber_lock.clone())?
48
-
.wait_till_success()
49
-
.await
50
-
.map_err(|source| HiveLibError::NixBuildError {
51
-
name: ctx.name.clone(),
52
-
source,
53
-
})?;
54
-
55
-
info!("Built output: {stdout:?}");
56
-
ctx.state.build = Some(stdout);
57
-
58
-
Ok(())
59
-
}
60
-
}
-39
wire/lib/src/hive/steps/evaluate.rs
-39
wire/lib/src/hive/steps/evaluate.rs
···
1
-
use std::fmt::Display;
2
-
3
-
use tracing::instrument;
4
-
5
-
use crate::{
6
-
EvalGoal, HiveLibError,
7
-
commands::common::evaluate_hive_attribute,
8
-
hive::node::{Context, ExecuteStep, Goal},
9
-
};
10
-
11
-
#[derive(Debug, PartialEq)]
12
-
pub struct Evaluate;
13
-
14
-
impl Display for Evaluate {
15
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
16
-
write!(f, "Evaluate the node")
17
-
}
18
-
}
19
-
20
-
impl ExecuteStep for Evaluate {
21
-
fn should_execute(&self, ctx: &Context) -> bool {
22
-
!matches!(ctx.goal, Goal::Keys)
23
-
}
24
-
25
-
#[instrument(skip_all, name = "eval")]
26
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
27
-
let output = evaluate_hive_attribute(
28
-
&ctx.hivepath,
29
-
&EvalGoal::GetTopLevel(ctx.name),
30
-
ctx.modifiers,
31
-
ctx.clobber_lock.clone(),
32
-
)
33
-
.await?;
34
-
35
-
ctx.state.evaluation = serde_json::from_str(&output).expect("failed to parse derivation");
36
-
37
-
Ok(())
38
-
}
39
-
}
-275
wire/lib/src/hive/steps/keys.rs
-275
wire/lib/src/hive/steps/keys.rs
···
1
-
use futures::future::join_all;
2
-
use prost::Message;
3
-
use serde::{Deserialize, Serialize};
4
-
use std::env;
5
-
use std::fmt::Display;
6
-
use std::io::Cursor;
7
-
use std::path::PathBuf;
8
-
use std::pin::Pin;
9
-
use std::process::Stdio;
10
-
use std::str::from_utf8;
11
-
use tokio::io::AsyncReadExt as _;
12
-
use tokio::process::Command;
13
-
use tokio::{fs::File, io::AsyncRead};
14
-
use tracing::{debug, trace};
15
-
16
-
use crate::HiveLibError;
17
-
use crate::commands::common::push;
18
-
use crate::commands::{ChildOutputMode, WireCommand, WireCommandChip, get_elevated_command};
19
-
use crate::errors::KeyError;
20
-
use crate::hive::node::{
21
-
Context, ExecuteStep, Goal, Push, SwitchToConfigurationGoal, should_apply_locally,
22
-
};
23
-
24
-
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
25
-
#[serde(tag = "t", content = "c")]
26
-
pub enum Source {
27
-
String(String),
28
-
Path(PathBuf),
29
-
Command(Vec<String>),
30
-
}
31
-
32
-
#[derive(Serialize, Deserialize, Clone, Debug, Hash, Eq, PartialEq)]
33
-
pub enum UploadKeyAt {
34
-
#[serde(rename = "pre-activation")]
35
-
PreActivation,
36
-
#[serde(rename = "post-activation")]
37
-
PostActivation,
38
-
#[serde(skip)]
39
-
NoFilter,
40
-
}
41
-
42
-
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
43
-
pub struct Key {
44
-
pub name: String,
45
-
#[serde(rename = "destDir")]
46
-
pub dest_dir: String,
47
-
pub path: PathBuf,
48
-
pub group: String,
49
-
pub user: String,
50
-
pub permissions: String,
51
-
pub source: Source,
52
-
#[serde(rename = "uploadAt")]
53
-
pub upload_at: UploadKeyAt,
54
-
#[serde(default)]
55
-
pub environment: im::HashMap<String, String>,
56
-
}
57
-
58
-
fn get_u32_permission(key: &Key) -> Result<u32, KeyError> {
59
-
u32::from_str_radix(&key.permissions, 8).map_err(KeyError::ParseKeyPermissions)
60
-
}
61
-
62
-
async fn create_reader(key: &'_ Key) -> Result<Pin<Box<dyn AsyncRead + Send + '_>>, KeyError> {
63
-
match &key.source {
64
-
Source::Path(path) => Ok(Box::pin(File::open(path).await.map_err(KeyError::File)?)),
65
-
Source::String(string) => Ok(Box::pin(Cursor::new(string))),
66
-
Source::Command(args) => {
67
-
let output = Command::new(args.first().ok_or(KeyError::Empty)?)
68
-
.args(&args[1..])
69
-
.stdin(Stdio::null())
70
-
.stdout(Stdio::piped())
71
-
.stderr(Stdio::piped())
72
-
.envs(key.environment.clone())
73
-
.spawn()
74
-
.map_err(|err| KeyError::CommandSpawnError {
75
-
error: err,
76
-
command: args.join(" "),
77
-
command_span: Some((0..args.first().unwrap().len()).into()),
78
-
})?
79
-
.wait_with_output()
80
-
.await
81
-
.map_err(|err| KeyError::CommandResolveError {
82
-
error: err,
83
-
command: args.join(" "),
84
-
})?;
85
-
86
-
if output.status.success() {
87
-
return Ok(Box::pin(Cursor::new(output.stdout)));
88
-
}
89
-
90
-
Err(KeyError::CommandError(
91
-
output.status,
92
-
from_utf8(&output.stderr).unwrap().to_string(),
93
-
))
94
-
}
95
-
}
96
-
}
97
-
98
-
async fn process_key(key: &Key) -> Result<(key_agent::keys::Key, Vec<u8>), KeyError> {
99
-
let mut reader = create_reader(key).await?;
100
-
101
-
let mut buf = Vec::new();
102
-
103
-
reader
104
-
.read_to_end(&mut buf)
105
-
.await
106
-
.expect("failed to read into buffer");
107
-
108
-
let destination: PathBuf = [key.dest_dir.clone(), key.name.clone()].iter().collect();
109
-
110
-
debug!(
111
-
"Staging push to {}",
112
-
destination.clone().into_os_string().into_string().unwrap()
113
-
);
114
-
115
-
Ok((
116
-
key_agent::keys::Key {
117
-
length: buf
118
-
.len()
119
-
.try_into()
120
-
.expect("Failed to conver usize buf length to i32"),
121
-
user: key.user.clone(),
122
-
group: key.group.clone(),
123
-
permissions: get_u32_permission(key)?,
124
-
destination: destination.into_os_string().into_string().unwrap(),
125
-
},
126
-
buf,
127
-
))
128
-
}
129
-
130
-
#[derive(Debug, PartialEq)]
131
-
pub struct Keys {
132
-
pub filter: UploadKeyAt,
133
-
}
134
-
#[derive(Debug, PartialEq)]
135
-
pub struct PushKeyAgent;
136
-
137
-
impl Display for Keys {
138
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
139
-
write!(f, "Upload key @ {:?}", self.filter)
140
-
}
141
-
}
142
-
143
-
impl Display for PushKeyAgent {
144
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
145
-
write!(f, "Push the key agent")
146
-
}
147
-
}
148
-
149
-
impl ExecuteStep for Keys {
150
-
fn should_execute(&self, ctx: &Context) -> bool {
151
-
if ctx.no_keys {
152
-
return false;
153
-
}
154
-
155
-
// should execute if no filter, and the goal is keys.
156
-
// otherwise, only execute if the goal is switch and non-nofilter
157
-
matches!(
158
-
(&self.filter, &ctx.goal),
159
-
(UploadKeyAt::NoFilter, Goal::Keys)
160
-
| (
161
-
UploadKeyAt::PreActivation | UploadKeyAt::PostActivation,
162
-
Goal::SwitchToConfiguration(SwitchToConfigurationGoal::Switch)
163
-
)
164
-
)
165
-
}
166
-
167
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
168
-
let agent_directory = ctx.state.key_agent_directory.as_ref().unwrap();
169
-
170
-
let futures = ctx
171
-
.node
172
-
.keys
173
-
.iter()
174
-
.filter(|key| {
175
-
self.filter == UploadKeyAt::NoFilter
176
-
|| (self.filter != UploadKeyAt::NoFilter && key.upload_at != self.filter)
177
-
})
178
-
.map(|key| async move {
179
-
process_key(key)
180
-
.await
181
-
.map_err(|err| HiveLibError::KeyError(key.name.clone(), err))
182
-
});
183
-
184
-
let (keys, bufs): (Vec<key_agent::keys::Key>, Vec<Vec<u8>>) = join_all(futures)
185
-
.await
186
-
.into_iter()
187
-
.collect::<Result<Vec<_>, HiveLibError>>()?
188
-
.into_iter()
189
-
.unzip();
190
-
191
-
if keys.is_empty() {
192
-
debug!("Had no keys to push, ending KeyStep early.");
193
-
return Ok(());
194
-
}
195
-
196
-
let msg = key_agent::keys::Keys { keys };
197
-
198
-
trace!("Will send message {msg:?}");
199
-
200
-
let buf = msg.encode_to_vec();
201
-
202
-
let mut command = get_elevated_command(
203
-
if should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.to_string()) {
204
-
None
205
-
} else {
206
-
Some(&ctx.node.target)
207
-
},
208
-
ChildOutputMode::Raw,
209
-
ctx.modifiers,
210
-
)
211
-
.await?;
212
-
let command_string = format!("{agent_directory}/bin/key_agent {}", buf.len());
213
-
214
-
let mut child = command.run_command(command_string, true, ctx.clobber_lock.clone())?;
215
-
216
-
child.write_stdin(buf).await?;
217
-
218
-
for buf in bufs {
219
-
trace!("Pushing buf");
220
-
child.write_stdin(buf).await?;
221
-
}
222
-
223
-
let status = child
224
-
.wait_till_success()
225
-
.await
226
-
.map_err(HiveLibError::CommandError)?;
227
-
228
-
debug!("status: {status:?}");
229
-
230
-
Ok(())
231
-
}
232
-
}
233
-
234
-
impl ExecuteStep for PushKeyAgent {
235
-
fn should_execute(&self, ctx: &Context) -> bool {
236
-
if ctx.no_keys {
237
-
return false;
238
-
}
239
-
240
-
matches!(
241
-
&ctx.goal,
242
-
Goal::Keys | Goal::SwitchToConfiguration(SwitchToConfigurationGoal::Switch)
243
-
)
244
-
}
245
-
246
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
247
-
let arg_name = format!(
248
-
"WIRE_KEY_AGENT_{platform}",
249
-
platform = ctx.node.host_platform.replace('-', "_")
250
-
);
251
-
252
-
let agent_directory = match env::var_os(&arg_name) {
253
-
Some(agent) => agent.into_string().unwrap(),
254
-
None => panic!(
255
-
"{arg_name} environment variable not set! \n
256
-
Wire was not built with the ability to deploy keys to this platform. \n
257
-
Please create an issue: https://github.com/wires-org/wire/issues/new?template=bug_report.md"
258
-
),
259
-
};
260
-
261
-
if !should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.to_string()) {
262
-
push(
263
-
ctx.node,
264
-
ctx.name,
265
-
Push::Path(&agent_directory),
266
-
ctx.clobber_lock.clone(),
267
-
)
268
-
.await?;
269
-
}
270
-
271
-
ctx.state.key_agent_directory = Some(agent_directory);
272
-
273
-
Ok(())
274
-
}
275
-
}
-6
wire/lib/src/hive/steps/mod.rs
-6
wire/lib/src/hive/steps/mod.rs
-41
wire/lib/src/hive/steps/ping.rs
-41
wire/lib/src/hive/steps/ping.rs
···
1
-
use std::fmt::Display;
2
-
3
-
use tracing::{info, instrument, warn};
4
-
5
-
use crate::{
6
-
HiveLibError,
7
-
hive::node::{Context, ExecuteStep, should_apply_locally},
8
-
};
9
-
10
-
#[derive(Debug, PartialEq)]
11
-
pub struct Ping;
12
-
13
-
impl Display for Ping {
14
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
15
-
write!(f, "Ping node")
16
-
}
17
-
}
18
-
19
-
impl ExecuteStep for Ping {
20
-
fn should_execute(&self, ctx: &Context) -> bool {
21
-
!should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.to_string())
22
-
}
23
-
24
-
#[instrument(skip_all, name = "ping")]
25
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
26
-
loop {
27
-
info!("Attempting host {}", ctx.node.target.get_preffered_host()?);
28
-
29
-
if ctx.node.ping(ctx.clobber_lock.clone()).await.is_ok() {
30
-
return Ok(());
31
-
}
32
-
33
-
warn!(
34
-
"Failed to ping host {}",
35
-
// ? will take us out if we ran out of hosts
36
-
ctx.node.target.get_preffered_host()?
37
-
);
38
-
ctx.node.target.host_failed();
39
-
}
40
-
}
41
-
}
-85
wire/lib/src/hive/steps/push.rs
-85
wire/lib/src/hive/steps/push.rs
···
1
-
use std::fmt::Display;
2
-
3
-
use tracing::{instrument, warn};
4
-
5
-
use crate::{
6
-
HiveLibError,
7
-
commands::common::push,
8
-
hive::node::{Context, ExecuteStep, Goal, should_apply_locally},
9
-
};
10
-
11
-
#[derive(Debug, PartialEq)]
12
-
pub struct PushEvaluatedOutput;
13
-
#[derive(Debug, PartialEq)]
14
-
pub struct PushBuildOutput;
15
-
16
-
impl Display for PushEvaluatedOutput {
17
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
18
-
write!(f, "Push the evaluated output")
19
-
}
20
-
}
21
-
22
-
impl Display for PushBuildOutput {
23
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
24
-
write!(f, "Push the build output")
25
-
}
26
-
}
27
-
28
-
impl ExecuteStep for PushEvaluatedOutput {
29
-
fn should_execute(&self, ctx: &Context) -> bool {
30
-
!matches!(ctx.goal, Goal::Keys)
31
-
&& (ctx.node.build_remotely | matches!(ctx.goal, Goal::Push))
32
-
}
33
-
34
-
#[instrument(skip_all, name = "push_eval")]
35
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
36
-
let top_level = ctx.state.evaluation.as_ref().unwrap();
37
-
38
-
push(
39
-
ctx.node,
40
-
ctx.name,
41
-
crate::hive::node::Push::Derivation(top_level),
42
-
ctx.clobber_lock.clone()
43
-
).await.inspect_err(|_| {
44
-
if should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.to_string()) {
45
-
warn!("Remote push failed, but this node matches our local hostname ({0}). Perhaps you want to apply this node locally? Use `--always-build-local {0}` to override deployment.buildOnTarget", ctx.name.to_string());
46
-
} else {
47
-
warn!("Use `--always-build-local {0}` to override deployment.buildOnTarget and force {0} to build locally", ctx.name.to_string());
48
-
}
49
-
})
50
-
}
51
-
}
52
-
53
-
impl ExecuteStep for PushBuildOutput {
54
-
fn should_execute(&self, ctx: &Context) -> bool {
55
-
if matches!(ctx.goal, Goal::Keys | Goal::Push) {
56
-
// skip if we are not building
57
-
return false;
58
-
}
59
-
60
-
if ctx.node.build_remotely {
61
-
// skip if we are building remotely
62
-
return false;
63
-
}
64
-
65
-
if should_apply_locally(ctx.node.allow_local_deployment, &ctx.name.0) {
66
-
// skip step if we are applying locally
67
-
return false;
68
-
}
69
-
70
-
true
71
-
}
72
-
73
-
#[instrument(skip_all, name = "push_build")]
74
-
async fn execute(&self, ctx: &mut Context<'_>) -> Result<(), HiveLibError> {
75
-
let built_path = ctx.state.build.as_ref().unwrap();
76
-
77
-
push(
78
-
ctx.node,
79
-
ctx.name,
80
-
crate::hive::node::Push::Path(built_path),
81
-
ctx.clobber_lock.clone(),
82
-
)
83
-
.await
84
-
}
85
-
}
-32
wire/lib/src/lib.rs
-32
wire/lib/src/lib.rs
···
1
-
#![deny(clippy::pedantic)]
2
-
#![allow(
3
-
clippy::missing_errors_doc,
4
-
clippy::must_use_candidate,
5
-
clippy::missing_panics_doc
6
-
)]
7
-
#![feature(assert_matches)]
8
-
9
-
use crate::{errors::HiveLibError, hive::node::Name};
10
-
11
-
pub mod commands;
12
-
pub mod hive;
13
-
mod nix_log;
14
-
15
-
#[cfg(test)]
16
-
mod test_macros;
17
-
18
-
#[cfg(test)]
19
-
mod test_support;
20
-
21
-
pub mod errors;
22
-
23
-
#[derive(Debug, Default, Clone, Copy)]
24
-
pub struct SubCommandModifiers {
25
-
pub show_trace: bool,
26
-
pub non_interactive: bool,
27
-
}
28
-
29
-
pub enum EvalGoal<'a> {
30
-
Inspect,
31
-
GetTopLevel(&'a Name),
32
-
}
-130
wire/lib/src/nix_log.rs
-130
wire/lib/src/nix_log.rs
···
1
-
use serde::{Deserialize, Serialize};
2
-
use serde_repr::{Deserialize_repr, Serialize_repr};
3
-
use std::fmt::{Debug, Display};
4
-
use tracing::{Level as tracing_level, event, info};
5
-
6
-
// static DIGEST_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[0-9a-z]{32}").unwrap());
7
-
8
-
#[derive(Serialize, Deserialize, Debug)]
9
-
#[serde(tag = "action")]
10
-
pub enum Action {
11
-
#[serde(rename = "msg", alias = "start")]
12
-
Message {
13
-
level: Level,
14
-
#[serde(rename = "msg", alias = "text")]
15
-
message: Option<String>,
16
-
},
17
-
#[serde(rename = "stop", alias = "result")]
18
-
Stop,
19
-
}
20
-
21
-
#[derive(Serialize_repr, Deserialize_repr, PartialEq, Debug)]
22
-
#[repr(u8)]
23
-
pub enum Level {
24
-
Error = 0,
25
-
Warn = 1,
26
-
Notice = 2,
27
-
Info = 3,
28
-
Talkative = 4,
29
-
Chatty = 5,
30
-
Debug = 6,
31
-
Vomit = 7,
32
-
}
33
-
34
-
#[derive(Serialize, Deserialize, Debug)]
35
-
pub struct Internal {
36
-
#[serde(flatten)]
37
-
pub action: Action,
38
-
}
39
-
40
-
#[derive(Debug)]
41
-
pub enum NixLog {
42
-
Internal(Internal),
43
-
Raw(String),
44
-
}
45
-
46
-
pub(crate) trait Trace {
47
-
fn trace(&self);
48
-
}
49
-
50
-
impl Internal {
51
-
pub fn get_errorish_message(self) -> Option<String> {
52
-
if let Action::Message {
53
-
level: Level::Error | Level::Warn | Level::Notice,
54
-
message,
55
-
} = self.action
56
-
{
57
-
return message;
58
-
}
59
-
60
-
None
61
-
}
62
-
}
63
-
64
-
impl Trace for Internal {
65
-
fn trace(&self) {
66
-
match &self.action {
67
-
Action::Message { level, message } => {
68
-
let text = match message {
69
-
Some(text) if text.is_empty() => return,
70
-
None => return,
71
-
Some(text) => text,
72
-
};
73
-
74
-
match level {
75
-
Level::Info => event!(tracing_level::INFO, "{text}"),
76
-
Level::Warn | Level::Notice => event!(tracing_level::WARN, "{text}"),
77
-
Level::Error => event!(tracing_level::ERROR, "{text}"),
78
-
Level::Debug => event!(tracing_level::DEBUG, "{text}"),
79
-
Level::Vomit | Level::Talkative | Level::Chatty => {
80
-
event!(tracing_level::TRACE, "{text}");
81
-
}
82
-
}
83
-
}
84
-
Action::Stop => {}
85
-
}
86
-
}
87
-
}
88
-
89
-
impl Trace for NixLog {
90
-
fn trace(&self) {
91
-
match self {
92
-
NixLog::Internal(line) => {
93
-
line.trace();
94
-
95
-
// tracing_indicatif::span_ext::IndicatifSpanExt::pb_set_message(
96
-
// &Span::current(),
97
-
// &DIGEST_RE.replace_all(&line.to_string(), "โฆ"),
98
-
// );
99
-
}
100
-
NixLog::Raw(line) => info!("{line}"),
101
-
}
102
-
}
103
-
}
104
-
105
-
impl Display for Internal {
106
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
107
-
match &self.action {
108
-
Action::Message { level, message } => {
109
-
write!(
110
-
f,
111
-
"{level:?}: {}",
112
-
match message {
113
-
Some(message) => message,
114
-
None => "Nix log without text",
115
-
}
116
-
)
117
-
}
118
-
Action::Stop => write!(f, ""),
119
-
}
120
-
}
121
-
}
122
-
123
-
impl Display for NixLog {
124
-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
125
-
match &self {
126
-
NixLog::Internal(line) => Display::fmt(&line, f),
127
-
NixLog::Raw(line) => Display::fmt(&line, f),
128
-
}
129
-
}
130
-
}
-27
wire/lib/src/test_macros.rs
-27
wire/lib/src/test_macros.rs
···
1
-
#[macro_export]
2
-
macro_rules! function_name {
3
-
() => {{
4
-
fn f() {}
5
-
fn type_name_of<T>(_: T) -> &'static str {
6
-
std::any::type_name::<T>()
7
-
}
8
-
let name = type_name_of(f);
9
-
// closure for async functions
10
-
&name[..name.len() - 3]
11
-
}};
12
-
}
13
-
14
-
#[macro_export]
15
-
macro_rules! get_test_path {
16
-
() => {{
17
-
let mut path: PathBuf = env::var("WIRE_TEST_DIR").unwrap().into();
18
-
let full_name = $crate::function_name!();
19
-
let function_name = full_name
20
-
.trim_end_matches("::{{closure}}")
21
-
.split("::")
22
-
.last()
23
-
.unwrap();
24
-
path.push(function_name);
25
-
path
26
-
}};
27
-
}
-58
wire/lib/src/test_support.rs
-58
wire/lib/src/test_support.rs
···
1
-
use std::{
2
-
fs, io,
3
-
path::Path,
4
-
process::Command,
5
-
sync::{Arc, Mutex},
6
-
};
7
-
8
-
use tempdir::TempDir;
9
-
10
-
pub fn make_flake_sandbox(path: &Path) -> Result<TempDir, io::Error> {
11
-
let tmp_dir = TempDir::new("wire-test")?;
12
-
13
-
Command::new("git")
14
-
.args(["init", "-b", "tmp"])
15
-
.current_dir(tmp_dir.path())
16
-
.status()?;
17
-
18
-
for entry in fs::read_dir(path)? {
19
-
let entry = entry?;
20
-
21
-
fs::copy(entry.path(), tmp_dir.as_ref().join(entry.file_name()))?;
22
-
}
23
-
24
-
let root = path.parent().unwrap().parent().unwrap().parent().unwrap();
25
-
26
-
fs::copy(
27
-
root.join(Path::new("runtime/evaluate.nix")),
28
-
tmp_dir.as_ref().join("evaluate.nix"),
29
-
)?;
30
-
fs::copy(
31
-
root.join(Path::new("runtime/module.nix")),
32
-
tmp_dir.as_ref().join("module.nix"),
33
-
)?;
34
-
fs::copy(
35
-
root.join(Path::new("runtime/makeHive.nix")),
36
-
tmp_dir.as_ref().join("makeHive.nix"),
37
-
)?;
38
-
fs::copy(
39
-
root.join(Path::new("flake.lock")),
40
-
tmp_dir.as_ref().join("flake.lock"),
41
-
)?;
42
-
43
-
Command::new("git")
44
-
.args(["add", "-A"])
45
-
.current_dir(tmp_dir.path())
46
-
.status()?;
47
-
48
-
Command::new("nix")
49
-
.args(["flake", "lock"])
50
-
.current_dir(tmp_dir.path())
51
-
.status()?;
52
-
53
-
Ok(tmp_dir)
54
-
}
55
-
56
-
pub fn get_clobber_lock() -> Arc<Mutex<()>> {
57
-
Arc::new(Mutex::new(()))
58
-
}