lol

Merge master into staging-next

authored by

github-actions[bot] and committed by
GitHub
6076dbb0 47de0ebf

+507 -130
+23 -2
nixos/doc/manual/development/writing-nixos-tests.xml
··· 274 274 </term> 275 275 <listitem> 276 276 <para> 277 - Execute a shell command, raising an exception if the exit status is not 278 - zero, otherwise returning the standard output. 277 + Execute a shell command, raising an exception if the exit status 278 + is not zero, otherwise returning the standard output. Commands 279 + are run with <literal>set -euo pipefail</literal> set: 280 + <itemizedlist> 281 + <listitem> 282 + <para> 283 + If several commands are separated by <literal>;</literal> 284 + and one fails, the command as a whole will fail. 285 + </para> 286 + </listitem> 287 + <listitem> 288 + <para> 289 + For pipelines, the last non-zero exit status will be 290 + returned (if there is one, zero will be returned 291 + otherwise). 292 + </para> 293 + </listitem> 294 + <listitem> 295 + <para> 296 + Dereferencing unset variables fail the command. 297 + </para> 298 + </listitem> 299 + </itemizedlist> 279 300 </para> 280 301 </listitem> 281 302 </varlistentry>
+9
nixos/doc/manual/release-notes/rl-2105.xml
··· 183 183 184 184 <listitem> 185 185 <para> 186 + Enabling wireless networking now requires specifying at least one network 187 + interface using <xref linkend="opt-networking.wireless.interfaces"/>. 188 + This is to avoid a race condition with the card initialisation (see 189 + <link xlink:href="https://github.com/NixOS/nixpkgs/issues/101963">issue 190 + #101963</link> for more information). 191 + </para> 192 + </listitem> 193 + <listitem> 194 + <para> 186 195 If you are using <option>services.udev.extraRules</option> to assign 187 196 custom names to network interfaces, this may stop working due to a change 188 197 in the initialisation of dhcpcd and systemd networkd. To avoid this, either
+1 -1
nixos/lib/test-driver/test-driver.py
··· 441 441 def execute(self, command: str) -> Tuple[int, str]: 442 442 self.connect() 443 443 444 - out_command = "( {} ); echo '|!=EOF' $?\n".format(command) 444 + out_command = "( set -euo pipefail; {} ); echo '|!=EOF' $?\n".format(command) 445 445 self.shell.send(out_command.encode()) 446 446 447 447 output = ""
+10 -17
nixos/modules/services/networking/wpa_supplicant.nix
··· 40 40 default = []; 41 41 example = [ "wlan0" "wlan1" ]; 42 42 description = '' 43 - The interfaces <command>wpa_supplicant</command> will use. If empty, it will 44 - automatically use all wireless interfaces. 43 + The interfaces <command>wpa_supplicant</command> will use. 45 44 ''; 46 45 }; 47 46 ··· 220 219 }; 221 220 222 221 config = mkIf cfg.enable { 223 - assertions = flip mapAttrsToList cfg.networks (name: cfg: { 222 + assertions = [ 223 + { assertion = cfg.interfaces != []; 224 + message = '' 225 + No network interfaces for wpa_supplicant have been configured. 226 + Please, specify at least one using networking.wireless.interfaces. 227 + ''; 228 + } 229 + ] ++ flip mapAttrsToList cfg.networks (name: cfg: { 224 230 assertion = with cfg; count (x: x != null) [ psk pskRaw auth ] <= 1; 225 231 message = ''options networking.wireless."${name}".{psk,pskRaw,auth} are mutually exclusive''; 226 232 }); ··· 255 261 then echo >&2 "<3>/etc/wpa_supplicant.conf present but ignored. Generated ${configFile} is used instead." 256 262 fi 257 263 iface_args="-s -u -D${cfg.driver} ${configStr}" 258 - ${if ifaces == [] then '' 259 - for i in $(cd /sys/class/net && echo *); do 260 - DEVTYPE= 261 - UEVENT_PATH=/sys/class/net/$i/uevent 262 - if [ -e "$UEVENT_PATH" ]; then 263 - source "$UEVENT_PATH" 264 - if [ "$DEVTYPE" = "wlan" -o -e /sys/class/net/$i/wireless ]; then 265 - args+="''${args:+ -N} -i$i $iface_args" 266 - fi 267 - fi 268 - done 269 - '' else '' 270 - args="${concatMapStringsSep " -N " (i: "-i${i} $iface_args") ifaces}" 271 - ''} 264 + args="${concatMapStringsSep " -N " (i: "-i${i} $iface_args") ifaces}" 272 265 exec wpa_supplicant $args 273 266 ''; 274 267 };
+2 -2
nixos/tests/calibre-web.nix
··· 36 36 default.wait_for_unit("calibre-web.service") 37 37 default.wait_for_open_port(${toString defaultPort}) 38 38 default.succeed( 39 - "curl --fail 'http://localhost:${toString defaultPort}/basicconfig' | grep -q 'Basic Configuration'" 39 + "curl --fail 'http://localhost:${toString defaultPort}/basicconfig' | grep 'Basic Configuration'" 40 40 ) 41 41 42 42 customized.succeed( ··· 46 46 customized.wait_for_unit("calibre-web.service") 47 47 customized.wait_for_open_port(${toString port}) 48 48 customized.succeed( 49 - "curl --fail -H X-User:admin 'http://localhost:${toString port}' | grep -q test-book" 49 + "curl --fail -H X-User:admin 'http://localhost:${toString port}' | grep test-book" 50 50 ) 51 51 ''; 52 52 }
+3 -3
nixos/tests/docker-tools.nix
··· 23 23 with subtest("includeStorePath"): 24 24 with subtest("assumption"): 25 25 docker.succeed("${examples.helloOnRoot} | docker load") 26 - docker.succeed("set -euo pipefail; docker run --rm hello | grep -i hello") 26 + docker.succeed("docker run --rm hello | grep -i hello") 27 27 docker.succeed("docker image rm hello:latest") 28 28 with subtest("includeStorePath = false; breaks example"): 29 29 docker.succeed("${examples.helloOnRootNoStore} | docker load") 30 - docker.fail("set -euo pipefail; docker run --rm hello | grep -i hello") 30 + docker.fail("docker run --rm hello | grep -i hello") 31 31 docker.succeed("docker image rm hello:latest") 32 32 with subtest("includeStorePath = false; works with mounted store"): 33 33 docker.succeed("${examples.helloOnRootNoStore} | docker load") 34 - docker.succeed("set -euo pipefail; docker run --rm --volume ${builtins.storeDir}:${builtins.storeDir}:ro hello | grep -i hello") 34 + docker.succeed("docker run --rm --volume ${builtins.storeDir}:${builtins.storeDir}:ro hello | grep -i hello") 35 35 docker.succeed("docker image rm hello:latest") 36 36 37 37 with subtest("Ensure Docker images use a stable date by default"):
+1 -1
nixos/tests/doh-proxy-rust.nix
··· 38 38 machine.wait_for_unit("doh-proxy-rust.service") 39 39 machine.wait_for_open_port(53) 40 40 machine.wait_for_open_port(3000) 41 - machine.succeed(f"curl --fail '{url}?dns={query}' | grep -qF {bin_ip}") 41 + machine.succeed(f"curl --fail '{url}?dns={query}' | grep -F {bin_ip}") 42 42 ''; 43 43 })
+1 -1
nixos/tests/elk.nix
··· 178 178 one.systemctl("stop logstash") 179 179 one.systemctl("start elasticsearch-curator") 180 180 one.wait_until_succeeds( 181 - '! curl --silent --show-error "${esUrl}/_cat/indices" | grep logstash | grep -q ^' 181 + '! curl --silent --show-error "${esUrl}/_cat/indices" | grep logstash | grep ^' 182 182 ) 183 183 ''; 184 184 }) {};
+1 -1
nixos/tests/gitlab.nix
··· 102 102 # `doSetup` is is true. 103 103 test = doSetup: '' 104 104 gitlab.succeed( 105 - "curl -isSf http://gitlab | grep -i location | grep -q http://gitlab/users/sign_in" 105 + "curl -isSf http://gitlab | grep -i location | grep http://gitlab/users/sign_in" 106 106 ) 107 107 gitlab.succeed( 108 108 "${pkgs.sudo}/bin/sudo -u gitlab -H gitlab-rake gitlab:check 1>&2"
+1 -1
nixos/tests/gocd-agent.nix
··· 42 42 "curl ${serverUrl} -H '${header}' | ${pkgs.jq}/bin/jq -e ._embedded.agents[0].uuid" 43 43 ) 44 44 agent.succeed( 45 - "curl ${serverUrl} -H '${header}' | ${pkgs.jq}/bin/jq -e ._embedded.agents[0].agent_state | grep -q Idle" 45 + "curl ${serverUrl} -H '${header}' | ${pkgs.jq}/bin/jq -e ._embedded.agents[0].agent_state | grep Idle" 46 46 ) 47 47 ''; 48 48 })
+4 -4
nixos/tests/grafana.nix
··· 74 74 declarativePlugins.wait_for_unit("grafana.service") 75 75 declarativePlugins.wait_for_open_port(3000) 76 76 declarativePlugins.succeed( 77 - "curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/plugins | grep -q grafana-clock-panel" 77 + "curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/plugins | grep grafana-clock-panel" 78 78 ) 79 79 declarativePlugins.shutdown() 80 80 ··· 82 82 sqlite.wait_for_unit("grafana.service") 83 83 sqlite.wait_for_open_port(3000) 84 84 sqlite.succeed( 85 - "curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/org/users | grep -q testadmin\@localhost" 85 + "curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/org/users | grep testadmin\@localhost" 86 86 ) 87 87 sqlite.shutdown() 88 88 ··· 92 92 postgresql.wait_for_open_port(3000) 93 93 postgresql.wait_for_open_port(5432) 94 94 postgresql.succeed( 95 - "curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/org/users | grep -q testadmin\@localhost" 95 + "curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/org/users | grep testadmin\@localhost" 96 96 ) 97 97 postgresql.shutdown() 98 98 ··· 102 102 mysql.wait_for_open_port(3000) 103 103 mysql.wait_for_open_port(3306) 104 104 mysql.succeed( 105 - "curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/org/users | grep -q testadmin\@localhost" 105 + "curl -sSfN -u testadmin:snakeoilpwd http://127.0.0.1:3000/api/org/users | grep testadmin\@localhost" 106 106 ) 107 107 mysql.shutdown() 108 108 '';
+6 -6
nixos/tests/miniflux.nix
··· 48 48 49 49 default.wait_for_unit("miniflux.service") 50 50 default.wait_for_open_port(${toString defaultPort}) 51 - default.succeed("curl --fail 'http://localhost:${toString defaultPort}/healthcheck' | grep -q OK") 51 + default.succeed("curl --fail 'http://localhost:${toString defaultPort}/healthcheck' | grep OK") 52 52 default.succeed( 53 - "curl 'http://localhost:${toString defaultPort}/v1/me' -u '${defaultUsername}:${defaultPassword}' -H Content-Type:application/json | grep -q '\"is_admin\":true'" 53 + "curl 'http://localhost:${toString defaultPort}/v1/me' -u '${defaultUsername}:${defaultPassword}' -H Content-Type:application/json | grep '\"is_admin\":true'" 54 54 ) 55 55 56 56 withoutSudo.wait_for_unit("miniflux.service") 57 57 withoutSudo.wait_for_open_port(${toString defaultPort}) 58 - withoutSudo.succeed("curl --fail 'http://localhost:${toString defaultPort}/healthcheck' | grep -q OK") 58 + withoutSudo.succeed("curl --fail 'http://localhost:${toString defaultPort}/healthcheck' | grep OK") 59 59 withoutSudo.succeed( 60 - "curl 'http://localhost:${toString defaultPort}/v1/me' -u '${defaultUsername}:${defaultPassword}' -H Content-Type:application/json | grep -q '\"is_admin\":true'" 60 + "curl 'http://localhost:${toString defaultPort}/v1/me' -u '${defaultUsername}:${defaultPassword}' -H Content-Type:application/json | grep '\"is_admin\":true'" 61 61 ) 62 62 63 63 customized.wait_for_unit("miniflux.service") 64 64 customized.wait_for_open_port(${toString port}) 65 - customized.succeed("curl --fail 'http://localhost:${toString port}/healthcheck' | grep -q OK") 65 + customized.succeed("curl --fail 'http://localhost:${toString port}/healthcheck' | grep OK") 66 66 customized.succeed( 67 - "curl 'http://localhost:${toString port}/v1/me' -u '${username}:${password}' -H Content-Type:application/json | grep -q '\"is_admin\":true'" 67 + "curl 'http://localhost:${toString port}/v1/me' -u '${username}:${password}' -H Content-Type:application/json | grep '\"is_admin\":true'" 68 68 ) 69 69 ''; 70 70 })
+1 -1
nixos/tests/nginx-variants.nix
··· 29 29 }; 30 30 } 31 31 ) 32 - [ "nginxStable" "nginxUnstable" "nginxShibboleth" "openresty" "tengine" ] 32 + [ "nginxStable" "nginxMainline" "nginxShibboleth" "openresty" "tengine" ] 33 33 )
+2 -2
nixos/tests/nginx.nix
··· 56 56 }; 57 57 58 58 specialisation.reloadRestartSystem.configuration = { 59 - services.nginx.package = pkgs.nginxUnstable; 59 + services.nginx.package = pkgs.nginxMainline; 60 60 }; 61 61 62 62 specialisation.reloadWithErrorsSystem.configuration = { 63 - services.nginx.package = pkgs.nginxUnstable; 63 + services.nginx.package = pkgs.nginxMainline; 64 64 services.nginx.virtualHosts."!@$$(#*%".locations."~@#*$*!)".proxyPass = ";;;"; 65 65 }; 66 66 };
+3 -3
nixos/tests/pomerium.nix
··· 88 88 89 89 with subtest("no authentication required"): 90 90 pomerium.succeed( 91 - "curl --resolve my.website:80:127.0.0.1 http://my.website | grep -q 'hello world'" 91 + "curl --resolve my.website:80:127.0.0.1 http://my.website | grep 'hello world'" 92 92 ) 93 93 94 94 with subtest("login required"): 95 95 pomerium.succeed( 96 - "curl -I --resolve login.required:80:127.0.0.1 http://login.required | grep -q pom-auth" 96 + "curl -I --resolve login.required:80:127.0.0.1 http://login.required | grep pom-auth" 97 97 ) 98 98 pomerium.succeed( 99 - "curl -L --resolve login.required:80:127.0.0.1 http://login.required | grep -q 'hello I am login page'" 99 + "curl -L --resolve login.required:80:127.0.0.1 http://login.required | grep 'hello I am login page'" 100 100 ) 101 101 ''; 102 102 })
+50 -50
nixos/tests/prometheus-exporters.nix
··· 71 71 wait_for_open_port(3551) 72 72 wait_for_unit("prometheus-apcupsd-exporter.service") 73 73 wait_for_open_port(9162) 74 - succeed("curl -sSf http://localhost:9162/metrics | grep -q 'apcupsd_info'") 74 + succeed("curl -sSf http://localhost:9162/metrics | grep 'apcupsd_info'") 75 75 ''; 76 76 }; 77 77 ··· 85 85 wait_for_unit("prometheus-artifactory-exporter.service") 86 86 wait_for_open_port(9531) 87 87 succeed( 88 - "curl -sSf http://localhost:9531/metrics | grep -q 'artifactory_up'" 88 + "curl -sSf http://localhost:9531/metrics | grep 'artifactory_up'" 89 89 ) 90 90 ''; 91 91 }; ··· 106 106 wait_for_unit("prometheus-bind-exporter.service") 107 107 wait_for_open_port(9119) 108 108 succeed( 109 - "curl -sSf http://localhost:9119/metrics | grep -q 'bind_query_recursions_total 0'" 109 + "curl -sSf http://localhost:9119/metrics | grep 'bind_query_recursions_total 0'" 110 110 ) 111 111 ''; 112 112 }; ··· 135 135 wait_for_unit("prometheus-bird-exporter.service") 136 136 wait_for_open_port(9324) 137 137 wait_until_succeeds( 138 - "curl -sSf http://localhost:9324/metrics | grep -q 'MyObviousTestString'" 138 + "curl -sSf http://localhost:9324/metrics | grep 'MyObviousTestString'" 139 139 ) 140 140 ''; 141 141 }; ··· 154 154 wait_for_unit("prometheus-bitcoin-exporter.service") 155 155 wait_for_unit("bitcoind-default.service") 156 156 wait_for_open_port(9332) 157 - succeed("curl -sSf http://localhost:9332/metrics | grep -q '^bitcoin_blocks '") 157 + succeed("curl -sSf http://localhost:9332/metrics | grep '^bitcoin_blocks '") 158 158 ''; 159 159 }; 160 160 ··· 172 172 wait_for_unit("prometheus-blackbox-exporter.service") 173 173 wait_for_open_port(9115) 174 174 succeed( 175 - "curl -sSf 'http://localhost:9115/probe?target=localhost&module=icmp_v6' | grep -q 'probe_success 1'" 175 + "curl -sSf 'http://localhost:9115/probe?target=localhost&module=icmp_v6' | grep 'probe_success 1'" 176 176 ) 177 177 ''; 178 178 }; ··· 204 204 "curl -sSfH 'Content-Type: application/json' -X POST --data @/tmp/data.json localhost:9103/collectd" 205 205 ) 206 206 succeed( 207 - "curl -sSf localhost:9103/metrics | grep -q 'collectd_testplugin_gauge{instance=\"testhost\"} 23'" 207 + "curl -sSf localhost:9103/metrics | grep 'collectd_testplugin_gauge{instance=\"testhost\"} 23'" 208 208 ) 209 209 ''; 210 210 }; ··· 220 220 exporterTest = '' 221 221 wait_for_unit("prometheus-dnsmasq-exporter.service") 222 222 wait_for_open_port(9153) 223 - succeed("curl -sSf http://localhost:9153/metrics | grep -q 'dnsmasq_leases 0'") 223 + succeed("curl -sSf http://localhost:9153/metrics | grep 'dnsmasq_leases 0'") 224 224 ''; 225 225 }; 226 226 ··· 235 235 wait_for_unit("prometheus-domain-exporter.service") 236 236 wait_for_open_port(9222) 237 237 succeed( 238 - "curl -sSf 'http://localhost:9222/probe?target=nixos.org' | grep -q 'domain_probe_success 0'" 238 + "curl -sSf 'http://localhost:9222/probe?target=nixos.org' | grep 'domain_probe_success 0'" 239 239 ) 240 240 ''; 241 241 }; ··· 254 254 wait_for_unit("prometheus-dovecot-exporter.service") 255 255 wait_for_open_port(9166) 256 256 succeed( 257 - "curl -sSf http://localhost:9166/metrics | grep -q 'dovecot_up{scope=\"global\"} 1'" 257 + "curl -sSf http://localhost:9166/metrics | grep 'dovecot_up{scope=\"global\"} 1'" 258 258 ) 259 259 ''; 260 260 }; ··· 268 268 wait_for_unit("prometheus-fritzbox-exporter.service") 269 269 wait_for_open_port(9133) 270 270 succeed( 271 - "curl -sSf http://localhost:9133/metrics | grep -q 'fritzbox_exporter_collect_errors 0'" 271 + "curl -sSf http://localhost:9133/metrics | grep 'fritzbox_exporter_collect_errors 0'" 272 272 ) 273 273 ''; 274 274 }; ··· 290 290 wait_for_unit("prometheus-jitsi-exporter.service") 291 291 wait_for_open_port(9700) 292 292 wait_until_succeeds( 293 - 'journalctl -eu prometheus-jitsi-exporter.service -o cat | grep -q "key=participants"' 293 + 'journalctl -eu prometheus-jitsi-exporter.service -o cat | grep "key=participants"' 294 294 ) 295 - succeed("curl -sSf 'localhost:9700/metrics' | grep -q 'jitsi_participants 0'") 295 + succeed("curl -sSf 'localhost:9700/metrics' | grep 'jitsi_participants 0'") 296 296 ''; 297 297 }; 298 298 ··· 321 321 wait_for_unit("prometheus-json-exporter.service") 322 322 wait_for_open_port(7979) 323 323 succeed( 324 - "curl -sSf 'localhost:7979/probe?target=http://localhost' | grep -q 'json_test_metric 1'" 324 + "curl -sSf 'localhost:7979/probe?target=http://localhost' | grep 'json_test_metric 1'" 325 325 ) 326 326 ''; 327 327 }; ··· 426 426 wait_for_unit("knot.service") 427 427 wait_for_unit("prometheus-knot-exporter.service") 428 428 wait_for_open_port(9433) 429 - succeed("curl -sSf 'localhost:9433' | grep -q 'knot_server_zone_count 1.0'") 429 + succeed("curl -sSf 'localhost:9433' | grep 'knot_server_zone_count 1.0'") 430 430 ''; 431 431 }; 432 432 ··· 441 441 wait_for_unit("prometheus-keylight-exporter.service") 442 442 wait_for_open_port(9288) 443 443 succeed( 444 - "curl -sS --write-out '%{http_code}' -o /dev/null http://localhost:9288/metrics | grep -q '400'" 444 + "curl -sS --write-out '%{http_code}' -o /dev/null http://localhost:9288/metrics | grep '400'" 445 445 ) 446 446 succeed( 447 - "curl -sS --write-out '%{http_code}' -o /dev/null http://localhost:9288/metrics?target=nosuchdevice | grep -q '500'" 447 + "curl -sS --write-out '%{http_code}' -o /dev/null http://localhost:9288/metrics?target=nosuchdevice | grep '500'" 448 448 ) 449 449 ''; 450 450 }; ··· 489 489 wait_for_open_port(10009) 490 490 wait_for_unit("prometheus-lnd-exporter.service") 491 491 wait_for_open_port(9092) 492 - succeed("curl -sSf localhost:9092/metrics | grep -q '^promhttp_metric_handler'") 492 + succeed("curl -sSf localhost:9092/metrics | grep '^promhttp_metric_handler'") 493 493 ''; 494 494 }; 495 495 ··· 531 531 wait_for_unit("prometheus-mail-exporter.service") 532 532 wait_for_open_port(9225) 533 533 wait_until_succeeds( 534 - "curl -sSf http://localhost:9225/metrics | grep -q 'mail_deliver_success{configname=\"testserver\"} 1'" 534 + "curl -sSf http://localhost:9225/metrics | grep 'mail_deliver_success{configname=\"testserver\"} 1'" 535 535 ) 536 536 ''; 537 537 }; ··· 571 571 wait_for_unit("prometheus-mikrotik-exporter.service") 572 572 wait_for_open_port(9436) 573 573 succeed( 574 - "curl -sSf http://localhost:9436/metrics | grep -q 'mikrotik_scrape_collector_success{device=\"router\"} 0'" 574 + "curl -sSf http://localhost:9436/metrics | grep 'mikrotik_scrape_collector_success{device=\"router\"} 0'" 575 575 ) 576 576 ''; 577 577 }; ··· 596 596 wait_for_unit("prometheus-modemmanager-exporter.service") 597 597 wait_for_open_port(9539) 598 598 succeed( 599 - "curl -sSf http://localhost:9539/metrics | grep -q 'modemmanager_info'" 599 + "curl -sSf http://localhost:9539/metrics | grep 'modemmanager_info'" 600 600 ) 601 601 ''; 602 602 }; ··· 634 634 wait_for_unit("nginx.service") 635 635 wait_for_unit("prometheus-nextcloud-exporter.service") 636 636 wait_for_open_port(9205) 637 - succeed("curl -sSf http://localhost:9205/metrics | grep -q 'nextcloud_up 1'") 637 + succeed("curl -sSf http://localhost:9205/metrics | grep 'nextcloud_up 1'") 638 638 ''; 639 639 }; 640 640 ··· 653 653 wait_for_unit("nginx.service") 654 654 wait_for_unit("prometheus-nginx-exporter.service") 655 655 wait_for_open_port(9113) 656 - succeed("curl -sSf http://localhost:9113/metrics | grep -q 'nginx_up 1'") 656 + succeed("curl -sSf http://localhost:9113/metrics | grep 'nginx_up 1'") 657 657 ''; 658 658 }; 659 659 ··· 708 708 succeed("curl http://localhost") 709 709 execute("sleep 1") 710 710 succeed( 711 - "curl -sSf http://localhost:9117/metrics | grep 'filelogger_http_response_count_total' | grep -q 1" 711 + "curl -sSf http://localhost:9117/metrics | grep 'filelogger_http_response_count_total' | grep 1" 712 712 ) 713 713 succeed("curl http://localhost:81") 714 714 execute("sleep 1") 715 715 succeed( 716 - "curl -sSf http://localhost:9117/metrics | grep 'syslogger_http_response_count_total' | grep -q 1" 716 + "curl -sSf http://localhost:9117/metrics | grep 'syslogger_http_response_count_total' | grep 1" 717 717 ) 718 718 ''; 719 719 }; ··· 726 726 wait_for_unit("prometheus-node-exporter.service") 727 727 wait_for_open_port(9100) 728 728 succeed( 729 - "curl -sSf http://localhost:9100/metrics | grep -q 'node_exporter_build_info{.\\+} 1'" 729 + "curl -sSf http://localhost:9100/metrics | grep 'node_exporter_build_info{.\\+} 1'" 730 730 ) 731 731 ''; 732 732 }; ··· 786 786 wait_for_open_port(389) 787 787 wait_for_open_port(9330) 788 788 wait_until_succeeds( 789 - "curl -sSf http://localhost:9330/metrics | grep -q 'openldap_scrape{result=\"ok\"} 1'" 789 + "curl -sSf http://localhost:9330/metrics | grep 'openldap_scrape{result=\"ok\"} 1'" 790 790 ) 791 791 ''; 792 792 }; ··· 812 812 exporterTest = '' 813 813 wait_for_unit("openvpn-test.service") 814 814 wait_for_unit("prometheus-openvpn-exporter.service") 815 - succeed("curl -sSf http://localhost:9176/metrics | grep -q 'openvpn_up{.*} 1'") 815 + succeed("curl -sSf http://localhost:9176/metrics | grep 'openvpn_up{.*} 1'") 816 816 ''; 817 817 }; 818 818 ··· 828 828 wait_for_file("/var/lib/postfix/queue/public/showq") 829 829 wait_for_open_port(9154) 830 830 succeed( 831 - "curl -sSf http://localhost:9154/metrics | grep -q 'postfix_smtpd_connects_total 0'" 831 + "curl -sSf http://localhost:9154/metrics | grep 'postfix_smtpd_connects_total 0'" 832 832 ) 833 - succeed("curl -sSf http://localhost:9154/metrics | grep -q 'postfix_up{.*} 1'") 833 + succeed("curl -sSf http://localhost:9154/metrics | grep 'postfix_up{.*} 1'") 834 834 ''; 835 835 }; 836 836 ··· 847 847 wait_for_open_port(9187) 848 848 wait_for_unit("postgresql.service") 849 849 succeed( 850 - "curl -sSf http://localhost:9187/metrics | grep -q 'pg_exporter_last_scrape_error 0'" 850 + "curl -sSf http://localhost:9187/metrics | grep 'pg_exporter_last_scrape_error 0'" 851 851 ) 852 - succeed("curl -sSf http://localhost:9187/metrics | grep -q 'pg_up 1'") 852 + succeed("curl -sSf http://localhost:9187/metrics | grep 'pg_up 1'") 853 853 systemctl("stop postgresql.service") 854 854 succeed( 855 - "curl -sSf http://localhost:9187/metrics | grep -qv 'pg_exporter_last_scrape_error 0'" 855 + "curl -sSf http://localhost:9187/metrics | grep -v 'pg_exporter_last_scrape_error 0'" 856 856 ) 857 - succeed("curl -sSf http://localhost:9187/metrics | grep -q 'pg_up 0'") 857 + succeed("curl -sSf http://localhost:9187/metrics | grep 'pg_up 0'") 858 858 systemctl("start postgresql.service") 859 859 wait_for_unit("postgresql.service") 860 860 succeed( 861 - "curl -sSf http://localhost:9187/metrics | grep -q 'pg_exporter_last_scrape_error 0'" 861 + "curl -sSf http://localhost:9187/metrics | grep 'pg_exporter_last_scrape_error 0'" 862 862 ) 863 - succeed("curl -sSf http://localhost:9187/metrics | grep -q 'pg_up 1'") 863 + succeed("curl -sSf http://localhost:9187/metrics | grep 'pg_up 1'") 864 864 ''; 865 865 }; 866 866 ··· 893 893 wait_for_unit("prometheus-py-air-control-exporter.service") 894 894 wait_for_open_port(9896) 895 895 succeed( 896 - "curl -sSf http://localhost:9896/metrics | grep -q 'py_air_control_sampling_error_total'" 896 + "curl -sSf http://localhost:9896/metrics | grep 'py_air_control_sampling_error_total'" 897 897 ) 898 898 ''; 899 899 }; ··· 908 908 wait_for_unit("prometheus-redis-exporter.service") 909 909 wait_for_open_port(6379) 910 910 wait_for_open_port(9121) 911 - wait_until_succeeds("curl -sSf localhost:9121/metrics | grep -q 'redis_up 1'") 911 + wait_until_succeeds("curl -sSf localhost:9121/metrics | grep 'redis_up 1'") 912 912 ''; 913 913 }; 914 914 ··· 926 926 wait_for_open_port(11334) 927 927 wait_for_open_port(7980) 928 928 wait_until_succeeds( 929 - "curl -sSf 'localhost:7980/probe?target=http://localhost:11334/stat' | grep -q 'rspamd_scanned{host=\"rspamd\"} 0'" 929 + "curl -sSf 'localhost:7980/probe?target=http://localhost:11334/stat' | grep 'rspamd_scanned{host=\"rspamd\"} 0'" 930 930 ) 931 931 ''; 932 932 }; ··· 957 957 wait_for_unit("prometheus-rtl_433-exporter.service") 958 958 wait_for_open_port(9550) 959 959 wait_until_succeeds( 960 - "curl -sSf localhost:9550/metrics | grep -q '{}'".format( 960 + "curl -sSf localhost:9550/metrics | grep '{}'".format( 961 961 'rtl_433_temperature_celsius{channel="3",id="55",location="",model="zopieux"} 18' 962 962 ) 963 963 ) ··· 973 973 wait_for_unit("prometheus-smokeping-exporter.service") 974 974 wait_for_open_port(9374) 975 975 wait_until_succeeds( 976 - "curl -sSf localhost:9374/metrics | grep '{}' | grep -qv ' 0$'".format( 976 + "curl -sSf localhost:9374/metrics | grep '{}' | grep -v ' 0$'".format( 977 977 'smokeping_requests_total{host="127.0.0.1",ip="127.0.0.1"} ' 978 978 ) 979 979 ) 980 980 wait_until_succeeds( 981 - "curl -sSf localhost:9374/metrics | grep -q '{}'".format( 981 + "curl -sSf localhost:9374/metrics | grep '{}'".format( 982 982 'smokeping_response_ttl{host="127.0.0.1",ip="127.0.0.1"}' 983 983 ) 984 984 ) ··· 996 996 exporterTest = '' 997 997 wait_for_unit("prometheus-snmp-exporter.service") 998 998 wait_for_open_port(9116) 999 - succeed("curl -sSf localhost:9116/metrics | grep -q 'snmp_request_errors_total 0'") 999 + succeed("curl -sSf localhost:9116/metrics | grep 'snmp_request_errors_total 0'") 1000 1000 ''; 1001 1001 }; 1002 1002 ··· 1040 1040 exporterTest = '' 1041 1041 wait_for_unit("prometheus-sql-exporter.service") 1042 1042 wait_for_open_port(9237) 1043 - succeed("curl http://localhost:9237/metrics | grep -c 'sql_points{' | grep -q 2") 1043 + succeed("curl http://localhost:9237/metrics | grep -c 'sql_points{' | grep 2") 1044 1044 ''; 1045 1045 }; 1046 1046 ··· 1063 1063 wait_for_open_port(80) 1064 1064 wait_for_unit("prometheus-surfboard-exporter.service") 1065 1065 wait_for_open_port(9239) 1066 - succeed("curl -sSf localhost:9239/metrics | grep -q 'surfboard_up 1'") 1066 + succeed("curl -sSf localhost:9239/metrics | grep 'surfboard_up 1'") 1067 1067 ''; 1068 1068 }; 1069 1069 ··· 1076 1076 wait_for_unit("prometheus-systemd-exporter.service") 1077 1077 wait_for_open_port(9558) 1078 1078 succeed( 1079 - "curl -sSf localhost:9558/metrics | grep -q '{}'".format( 1079 + "curl -sSf localhost:9558/metrics | grep '{}'".format( 1080 1080 'systemd_unit_state{name="basic.target",state="active",type="target"} 1' 1081 1081 ) 1082 1082 ) ··· 1098 1098 wait_for_open_port(9051) 1099 1099 wait_for_unit("prometheus-tor-exporter.service") 1100 1100 wait_for_open_port(9130) 1101 - succeed("curl -sSf localhost:9130/metrics | grep -q 'tor_version{.\\+} 1'") 1101 + succeed("curl -sSf localhost:9130/metrics | grep 'tor_version{.\\+} 1'") 1102 1102 ''; 1103 1103 }; 1104 1104 ··· 1110 1110 wait_for_unit("prometheus-unifi-poller-exporter.service") 1111 1111 wait_for_open_port(9130) 1112 1112 succeed( 1113 - "curl -sSf localhost:9130/metrics | grep -q 'unifipoller_build_info{.\\+} 1'" 1113 + "curl -sSf localhost:9130/metrics | grep 'unifipoller_build_info{.\\+} 1'" 1114 1114 ) 1115 1115 ''; 1116 1116 }; ··· 1134 1134 wait_for_unit("unbound.service") 1135 1135 wait_for_unit("prometheus-unbound-exporter.service") 1136 1136 wait_for_open_port(9167) 1137 - succeed("curl -sSf localhost:9167/metrics | grep -q 'unbound_up 1'") 1137 + succeed("curl -sSf localhost:9167/metrics | grep 'unbound_up 1'") 1138 1138 ''; 1139 1139 }; 1140 1140 ··· 1163 1163 wait_for_unit("prometheus-varnish-exporter.service") 1164 1164 wait_for_open_port(6081) 1165 1165 wait_for_open_port(9131) 1166 - succeed("curl -sSf http://localhost:9131/metrics | grep -q 'varnish_up 1'") 1166 + succeed("curl -sSf http://localhost:9131/metrics | grep 'varnish_up 1'") 1167 1167 ''; 1168 1168 }; 1169 1169
+1 -1
nixos/tests/shiori.nix
··· 28 28 machine.wait_for_unit("shiori.service") 29 29 machine.wait_for_open_port(8080) 30 30 machine.succeed("curl --fail http://localhost:8080/") 31 - machine.succeed("curl --fail --location http://localhost:8080/ | grep -qi shiori") 31 + machine.succeed("curl --fail --location http://localhost:8080/ | grep -i shiori") 32 32 33 33 with subtest("login"): 34 34 auth_json = machine.succeed(
+3 -3
nixos/tests/wiki-js.nix
··· 119 119 120 120 with subtest("Setup"): 121 121 result = machine.succeed( 122 - "set -o pipefail; curl -sSf localhost:3000/finalize -X POST -d " 122 + "curl -sSf localhost:3000/finalize -X POST -d " 123 123 + "@${payloads.finalize} -H 'Content-Type: application/json' " 124 124 + "| jq .ok | xargs echo" 125 125 ) ··· 132 132 133 133 with subtest("Base functionality"): 134 134 auth = machine.succeed( 135 - "set -o pipefail; curl -sSf localhost:3000/graphql -X POST " 135 + "curl -sSf localhost:3000/graphql -X POST " 136 136 + "-d @${payloads.login} -H 'Content-Type: application/json' " 137 137 + "| jq '.[0].data.authentication.login.jwt' | xargs echo" 138 138 ).strip() ··· 140 140 assert auth 141 141 142 142 create = machine.succeed( 143 - "set -o pipefail; curl -sSf localhost:3000/graphql -X POST " 143 + "curl -sSf localhost:3000/graphql -X POST " 144 144 + "-d @${payloads.content} -H 'Content-Type: application/json' " 145 145 + f"-H 'Authorization: Bearer {auth}' " 146 146 + "| jq '.[0].data.pages.create.responseResult.succeeded'|xargs echo"
+4 -4
nixos/tests/xandikos.nix
··· 44 44 xandikos_default.wait_for_open_port(8080) 45 45 xandikos_default.succeed("curl --fail http://localhost:8080/") 46 46 xandikos_default.succeed( 47 - "curl -s --fail --location http://localhost:8080/ | grep -qi Xandikos" 47 + "curl -s --fail --location http://localhost:8080/ | grep -i Xandikos" 48 48 ) 49 49 xandikos_client.wait_for_unit("network.target") 50 50 xandikos_client.fail("curl --fail http://xandikos_default:8080/") ··· 55 55 xandikos_proxy.wait_for_open_port(8080) 56 56 xandikos_proxy.succeed("curl --fail http://localhost:8080/") 57 57 xandikos_proxy.succeed( 58 - "curl -s --fail --location http://localhost:8080/ | grep -qi Xandikos" 58 + "curl -s --fail --location http://localhost:8080/ | grep -i Xandikos" 59 59 ) 60 60 xandikos_client.wait_for_unit("network.target") 61 61 xandikos_client.fail("curl --fail http://xandikos_proxy:8080/") 62 62 xandikos_client.succeed( 63 - "curl -s --fail -u xandikos:snakeOilPassword -H 'Host: xandikos.local' http://xandikos_proxy/xandikos/ | grep -qi Xandikos" 63 + "curl -s --fail -u xandikos:snakeOilPassword -H 'Host: xandikos.local' http://xandikos_proxy/xandikos/ | grep -i Xandikos" 64 64 ) 65 65 xandikos_client.succeed( 66 - "curl -s --fail -u xandikos:snakeOilPassword -H 'Host: xandikos.local' http://xandikos_proxy/xandikos/user/ | grep -qi Xandikos" 66 + "curl -s --fail -u xandikos:snakeOilPassword -H 'Host: xandikos.local' http://xandikos_proxy/xandikos/user/ | grep -i Xandikos" 67 67 ) 68 68 ''; 69 69 }
+36
pkgs/applications/editors/emacs/elisp-packages/apheleia/default.nix
··· 1 + { stdenv, fetchFromGitHub, emacs, lib }: 2 + 3 + stdenv.mkDerivation { 4 + pname = "apheleia"; 5 + version = "2021-05-23"; 6 + 7 + src = fetchFromGitHub { 8 + owner = "raxod502"; 9 + repo = "apheleia"; 10 + rev = "f865c165dac606187a66b2b25a57d5099b452120"; 11 + sha256 = "sha256-n37jJsNOGhSjUtQysG3NVIjjayhbOa52iTXBc8SyKXE="; 12 + }; 13 + 14 + buildInputs = [ emacs ]; 15 + 16 + buildPhase = '' 17 + runHook preBuild 18 + emacs -L . --batch -f batch-byte-compile *.el 19 + runHook postBuild 20 + ''; 21 + 22 + installPhase = '' 23 + runHook preInstall 24 + install -d $out/share/emacs/site-lisp 25 + install *.el *.elc $out/share/emacs/site-lisp 26 + runHook postInstall 27 + ''; 28 + 29 + meta = { 30 + description = "Reformat buffer stably"; 31 + homepage = "https://github.com/raxod502/apheleia"; 32 + license = lib.licenses.mit; 33 + maintainers = with lib.maintainers; [ leungbk ]; 34 + platforms = emacs.meta.platforms; 35 + }; 36 + }
+46
pkgs/applications/editors/emacs/elisp-packages/evil-markdown/default.nix
··· 1 + { stdenv, fetchFromGitHub, emacs, emacsPackages, lib }: 2 + 3 + let 4 + runtimeDeps = with emacsPackages; [ 5 + evil 6 + markdown-mode 7 + ]; 8 + in 9 + stdenv.mkDerivation { 10 + pname = "evil-markdown"; 11 + version = "2020-06-01"; 12 + 13 + src = fetchFromGitHub { 14 + owner = "Somelauw"; 15 + repo = "evil-markdown"; 16 + rev = "064fe9b4767470472356d20bdd08e2f30ebbc9ac"; 17 + sha256 = "sha256-Kt2wxG1XCFowavVWtj0urM/yURKegonpZcxTy/+CrJY="; 18 + }; 19 + 20 + buildInputs = [ 21 + emacs 22 + ] ++ runtimeDeps; 23 + 24 + propagatedUserEnvPkgs = runtimeDeps; 25 + 26 + buildPhase = '' 27 + runHook preBuild 28 + emacs -L . --batch -f batch-byte-compile *.el 29 + runHook postBuild 30 + ''; 31 + 32 + installPhase = '' 33 + runHook preInstall 34 + install -d $out/share/emacs/site-lisp 35 + install *.el *.elc $out/share/emacs/site-lisp 36 + runHook postInstall 37 + ''; 38 + 39 + meta = { 40 + description = "Vim-like keybindings for markdown-mode"; 41 + homepage = "https://github.com/Somelauw/evil-markdown"; 42 + license = lib.licenses.gpl3Plus; 43 + maintainers = with lib.maintainers; [ leungbk ]; 44 + platforms = emacs.meta.platforms; 45 + }; 46 + }
+36
pkgs/applications/editors/emacs/elisp-packages/git-undo/default.nix
··· 1 + { stdenv, fetchFromGitHub, emacs, lib }: 2 + 3 + stdenv.mkDerivation { 4 + pname = "git-undo"; 5 + version = "2019-10-13"; 6 + 7 + src = fetchFromGitHub { 8 + owner = "jwiegley"; 9 + repo = "git-undo-el"; 10 + rev = "cf31e38e7889e6ade7d2d2b9f8719fd44f52feb5"; 11 + sha256 = "sha256-cVkK9EF6qQyVV3uVqnBEjF8e9nEx/8ixnM8PvxqCyYE="; 12 + }; 13 + 14 + buildInputs = [ emacs ]; 15 + 16 + buildPhase = '' 17 + runHook preBuild 18 + emacs -L . --batch -f batch-byte-compile *.el 19 + runHook postBuild 20 + ''; 21 + 22 + installPhase = '' 23 + runHook preInstall 24 + install -d $out/share/emacs/site-lisp 25 + install *.el *.elc $out/share/emacs/site-lisp 26 + runHook postInstall 27 + ''; 28 + 29 + meta = { 30 + description = "Revert region to most recent Git-historical version"; 31 + homepage = "https://github.com/jwiegley/git-undo-el"; 32 + license = lib.licenses.gpl2Plus; 33 + maintainers = with lib.maintainers; [ leungbk ]; 34 + platforms = emacs.meta.platforms; 35 + }; 36 + }
+36
pkgs/applications/editors/emacs/elisp-packages/isearch-plus/default.nix
··· 1 + { stdenv, fetchFromGitHub, emacs, lib }: 2 + 3 + stdenv.mkDerivation { 4 + pname = "isearch-plus"; 5 + version = "2021-01-01"; 6 + 7 + src = fetchFromGitHub { 8 + owner = "emacsmirror"; 9 + repo = "isearch-plus"; 10 + rev = "376a8f9f8a9666d7e61d125abcdb645847cb8619"; 11 + sha256 = "sha256-Kd5vpu+mI1tJPcsu7EpnnBcPVdVAijkAeTz+bLB3WlQ="; 12 + }; 13 + 14 + buildInputs = [ emacs ]; 15 + 16 + buildPhase = '' 17 + runHook preBuild 18 + emacs -L . --batch -f batch-byte-compile *.el 19 + runHook postBuild 20 + ''; 21 + 22 + installPhase = '' 23 + runHook preInstall 24 + install -d $out/share/emacs/site-lisp 25 + install *.el *.elc $out/share/emacs/site-lisp 26 + runHook postInstall 27 + ''; 28 + 29 + meta = { 30 + description = "Extensions to isearch"; 31 + homepage = "https://www.emacswiki.org/emacs/download/isearch%2b.el"; 32 + license = lib.licenses.gpl2Plus; 33 + maintainers = with lib.maintainers; [ leungbk ]; 34 + platforms = emacs.meta.platforms; 35 + }; 36 + }
+36
pkgs/applications/editors/emacs/elisp-packages/isearch-prop/default.nix
··· 1 + { stdenv, fetchFromGitHub, emacs, lib }: 2 + 3 + stdenv.mkDerivation { 4 + pname = "isearch-prop"; 5 + version = "2019-05-01"; 6 + 7 + src = fetchFromGitHub { 8 + owner = "emacsmirror"; 9 + repo = "isearch-prop"; 10 + rev = "4a2765f835dd115d472142da05215c4c748809f4"; 11 + sha256 = "sha256-A1Kt4nm7iRV9J5yaLupwiNL5g7ddZvQs79dggmqZ7Rk="; 12 + }; 13 + 14 + buildInputs = [ emacs ]; 15 + 16 + buildPhase = '' 17 + runHook preBuild 18 + emacs -L . --batch -f batch-byte-compile *.el 19 + runHook postBuild 20 + ''; 21 + 22 + installPhase = '' 23 + runHook preInstall 24 + install -d $out/share/emacs/site-lisp 25 + install *.el *.elc $out/share/emacs/site-lisp 26 + runHook postInstall 27 + ''; 28 + 29 + meta = { 30 + description = "Search text- or overlay-property contexts"; 31 + homepage = "https://www.emacswiki.org/emacs/download/isearch-prop.el"; 32 + license = lib.licenses.gpl3Plus; 33 + maintainers = with lib.maintainers; [ leungbk ]; 34 + platforms = emacs.meta.platforms; 35 + }; 36 + }
+14
pkgs/applications/editors/emacs/elisp-packages/manual-packages.nix
··· 65 65 }; 66 66 }; 67 67 68 + apheleia = callPackage ./apheleia {}; 69 + 68 70 emacspeak = callPackage ./emacspeak {}; 69 71 70 72 ess-R-object-popup = 71 73 callPackage ./ess-R-object-popup { }; 74 + 75 + evil-markdown = callPackage ./evil-markdown { }; 72 76 73 77 font-lock-plus = callPackage ./font-lock-plus { }; 74 78 ··· 88 92 }; 89 93 }; 90 94 95 + git-undo = callPackage ./git-undo { }; 96 + 91 97 haskell-unicode-input-method = melpaBuild { 92 98 pname = "emacs-haskell-unicode-input-method"; 93 99 version = "20110905.2307"; ··· 111 117 112 118 helm-words = callPackage ./helm-words { }; 113 119 120 + isearch-plus = callPackage ./isearch-plus { }; 121 + 122 + isearch-prop = callPackage ./isearch-prop { }; 123 + 114 124 jam-mode = callPackage ./jam-mode { }; 115 125 116 126 llvm-mode = trivialBuild { ··· 177 187 178 188 }; 179 189 190 + mu4e-patch = callPackage ./mu4e-patch { }; 191 + 180 192 org-mac-link = 181 193 callPackage ./org-mac-link { }; 182 194 ··· 205 217 sv-kalender = callPackage ./sv-kalender { }; 206 218 207 219 tramp = callPackage ./tramp { }; 220 + 221 + youtube-dl = callPackage ./youtube-dl { }; 208 222 209 223 zeitgeist = callPackage ./zeitgeist { }; 210 224
+38
pkgs/applications/editors/emacs/elisp-packages/mu4e-patch/default.nix
··· 1 + { stdenv, fetchFromGitHub, emacs, lib }: 2 + 3 + stdenv.mkDerivation { 4 + pname = "mu4e-patch"; 5 + version = "2019-05-09"; 6 + 7 + src = fetchFromGitHub { 8 + owner = "seanfarley"; 9 + repo = "mu4e-patch"; 10 + rev = "522da46c1653b1cacc79cde91d6534da7ae9517d"; 11 + sha256 = "sha256-1lV4dDuCdyCUXi/In2DzYJPEHuAc9Jfbz2ZecNZwn4I="; 12 + }; 13 + 14 + buildInputs = [ 15 + emacs 16 + ]; 17 + 18 + buildPhase = '' 19 + runHook preBuild 20 + emacs -L . --batch -f batch-byte-compile *.el 21 + runHook postBuild 22 + ''; 23 + 24 + installPhase = '' 25 + runHook preInstall 26 + install -d $out/share/emacs/site-lisp 27 + install *.el *.elc $out/share/emacs/site-lisp 28 + runHook postInstall 29 + ''; 30 + 31 + meta = { 32 + description = "Colorize patch emails in mu4e"; 33 + homepage = "https://github.com/seanfarley/mu4e-patch"; 34 + license = lib.licenses.gpl3Plus; 35 + maintainers = with lib.maintainers; [ leungbk ]; 36 + platforms = emacs.meta.platforms; 37 + }; 38 + }
+36
pkgs/applications/editors/emacs/elisp-packages/youtube-dl/default.nix
··· 1 + { stdenv, fetchFromGitHub, emacs, lib }: 2 + 3 + stdenv.mkDerivation { 4 + pname = "youtube-dl"; 5 + version = "2018-10-12"; 6 + 7 + src = fetchFromGitHub { 8 + owner = "skeeto"; 9 + repo = "youtube-dl-emacs"; 10 + rev = "af877b5bc4f01c04fccfa7d47a2c328926f20ef4"; 11 + sha256 = "sha256-Etl95rcoRACDPjcTPQqYK2L+w8OZbOrTrRT0JadMdH4="; 12 + }; 13 + 14 + buildInputs = [ emacs ]; 15 + 16 + buildPhase = '' 17 + runHook preBuild 18 + emacs -L . --batch -f batch-byte-compile *.el 19 + runHook postBuild 20 + ''; 21 + 22 + installPhase = '' 23 + runHook preInstall 24 + install -d $out/share/emacs/site-lisp 25 + install *.el *.elc $out/share/emacs/site-lisp 26 + runHook postInstall 27 + ''; 28 + 29 + meta = { 30 + description = "Emacs frontend to the youtube-dl utility"; 31 + homepage = "https://github.com/skeeto/youtube-dl-emacs"; 32 + license = lib.licenses.unlicense; 33 + maintainers = with lib.maintainers; [ leungbk ]; 34 + platforms = emacs.meta.platforms; 35 + }; 36 + }
+2 -2
pkgs/desktops/gnome/apps/gnome-boxes/default.nix
··· 54 54 55 55 stdenv.mkDerivation rec { 56 56 pname = "gnome-boxes"; 57 - version = "40.1"; 57 + version = "40.2"; 58 58 59 59 src = fetchurl { 60 60 url = "mirror://gnome/sources/${pname}/${lib.versions.major version}/${pname}-${version}.tar.xz"; 61 - sha256 = "seKPLH+3a/T7uGLQ1S6BG5TL6f8W8GdAiWRWhpCILvg="; 61 + sha256 = "hzN1mi2GpWNnWWpTSQRjO4HKqlxFpWNtsulZDHFK6Nk="; 62 62 }; 63 63 64 64 doCheck = true;
+2 -2
pkgs/desktops/gnome/apps/gnome-calendar/default.nix
··· 24 24 25 25 stdenv.mkDerivation rec { 26 26 pname = "gnome-calendar"; 27 - version = "40.1"; 27 + version = "40.2"; 28 28 29 29 src = fetchurl { 30 30 url = "mirror://gnome/sources/${pname}/${lib.versions.major version}/${pname}-${version}.tar.xz"; 31 - sha256 = "2M30n57uHDo8aZHDL4VjxKfE2w23ymPOUcyRjkM7M6U="; 31 + sha256 = "njcB/UoOWJgA0iUgN3BkTzHVI0ZV9UqDqF/wVW3X6jM="; 32 32 }; 33 33 34 34 patches = [
+2 -2
pkgs/desktops/gnome/apps/gnome-maps/default.nix
··· 29 29 30 30 stdenv.mkDerivation rec { 31 31 pname = "gnome-maps"; 32 - version = "40.1"; 32 + version = "40.2"; 33 33 34 34 src = fetchurl { 35 35 url = "mirror://gnome/sources/${pname}/${lib.versions.major version}/${pname}-${version}.tar.xz"; 36 - sha256 = "sha256-mAXUwFs6NpV0bTdisoFr/+bZ19VuF7y7nZ1B3C0CYxo="; 36 + sha256 = "sha256-g+gVAFTQxLWmPJoJvyx9+YmuaQ7Kwb5r97ExKqpjm9Q="; 37 37 }; 38 38 39 39 doCheck = true;
+2 -2
pkgs/desktops/gnome/core/epiphany/default.nix
··· 37 37 38 38 stdenv.mkDerivation rec { 39 39 pname = "epiphany"; 40 - version = "40.1"; 40 + version = "40.2"; 41 41 42 42 src = fetchurl { 43 43 url = "mirror://gnome/sources/${pname}/${lib.versions.major version}/${pname}-${version}.tar.xz"; 44 - sha256 = "1l0sb1xg16g4wg3z99xb0w2kbyczbn7q4mphs3w4lxq22xml4sk9"; 44 + sha256 = "dRGeIgZWV89w7ytgPU9zg1VzvQNPHmGMD2YkeP1saDU="; 45 45 }; 46 46 47 47 nativeBuildInputs = [
+2 -2
pkgs/desktops/gnome/core/evolution-data-server/default.nix
··· 6 6 7 7 stdenv.mkDerivation rec { 8 8 pname = "evolution-data-server"; 9 - version = "3.40.1"; 9 + version = "3.40.2"; 10 10 11 11 outputs = [ "out" "dev" ]; 12 12 13 13 src = fetchurl { 14 14 url = "mirror://gnome/sources/evolution-data-server/${lib.versions.majorMinor version}/${pname}-${version}.tar.xz"; 15 - sha256 = "08iykha7zhk21b3axsp3v1jfwda612v0m8rz8zlzppm5i8s5ziza"; 15 + sha256 = "7IKVFjnzKlzs6AqLC5qj9mt9MY4+4sHDUjTy4r3opBg="; 16 16 }; 17 17 18 18 patches = [
+2 -2
pkgs/desktops/gnome/core/gnome-software/default.nix
··· 43 43 44 44 stdenv.mkDerivation rec { 45 45 pname = "gnome-software"; 46 - version = "40.1"; 46 + version = "40.2"; 47 47 48 48 src = fetchurl { 49 49 url = "mirror://gnome/sources/gnome-software/${lib.versions.major version}/${pname}-${version}.tar.xz"; 50 - sha256 = "16q2902swxsjdxb1nj335sv1bb76rvq4w6dn4yszkwf3s0fd86in"; 50 + sha256 = "y9HdKguvw/U93kIAPEpKA3RsuNZNxdJ+uNvmc27nJ5Y="; 51 51 }; 52 52 53 53 patches = [
+71
pkgs/development/compilers/llvm/12/compiler-rt/darwin-targetconditionals.patch
··· 1 + diff --git a/lib/sanitizer_common/sanitizer_mac.cpp b/lib/sanitizer_common/sanitizer_mac.cpp 2 + --- a/lib/sanitizer_common/sanitizer_mac.cpp 3 + +++ b/lib/sanitizer_common/sanitizer_mac.cpp 4 + @@ -613,9 +613,15 @@ HandleSignalMode GetHandleSignalMode(int signum) { 5 + // Offset example: 6 + // XNU 17 -- macOS 10.13 -- iOS 11 -- tvOS 11 -- watchOS 4 7 + constexpr u16 GetOSMajorKernelOffset() { 8 + - if (TARGET_OS_OSX) return 4; 9 + - if (TARGET_OS_IOS || TARGET_OS_TV) return 6; 10 + - if (TARGET_OS_WATCH) return 13; 11 + +#if TARGET_OS_OSX 12 + + return 4; 13 + +#endif 14 + +#if TARGET_OS_IOS || TARGET_OS_TV 15 + + return 6; 16 + +#endif 17 + +#if TARGET_OS_WATCH 18 + + return 13; 19 + +#endif 20 + } 21 + 22 + using VersStr = char[64]; 23 + @@ -627,13 +633,13 @@ static uptr ApproximateOSVersionViaKernelVersion(VersStr vers) { 24 + u16 os_major = kernel_major - offset; 25 + 26 + const char *format = "%d.0"; 27 + - if (TARGET_OS_OSX) { 28 + - if (os_major >= 16) { // macOS 11+ 29 + - os_major -= 5; 30 + - } else { // macOS 10.15 and below 31 + - format = "10.%d"; 32 + - } 33 + +#if TARGET_OS_OSX 34 + + if (os_major >= 16) { // macOS 11+ 35 + + os_major -= 5; 36 + + } else { // macOS 10.15 and below 37 + + format = "10.%d"; 38 + } 39 + +#endif 40 + return internal_snprintf(vers, sizeof(VersStr), format, os_major); 41 + } 42 + 43 + @@ -681,15 +687,14 @@ void ParseVersion(const char *vers, u16 *major, u16 *minor) { 44 + // Aligned versions example: 45 + // macOS 10.15 -- iOS 13 -- tvOS 13 -- watchOS 6 46 + static void MapToMacos(u16 *major, u16 *minor) { 47 + - if (TARGET_OS_OSX) 48 + - return; 49 + - 50 + - if (TARGET_OS_IOS || TARGET_OS_TV) 51 + +#if !TARGET_OS_OSX 52 + +#if TARGET_OS_IOS || TARGET_OS_TV 53 + *major += 2; 54 + - else if (TARGET_OS_WATCH) 55 + +#elif TARGET_OS_WATCH 56 + *major += 9; 57 + - else 58 + +#else 59 + UNREACHABLE("unsupported platform"); 60 + +#endif 61 + 62 + if (*major >= 16) { // macOS 11+ 63 + *major -= 5; 64 + @@ -697,6 +702,7 @@ static void MapToMacos(u16 *major, u16 *minor) { 65 + *minor = *major; 66 + *major = 10; 67 + } 68 + +#endif 69 + } 70 + 71 + static MacosVersion GetMacosAlignedVersionInternal() {
+2
pkgs/development/compilers/llvm/12/compiler-rt/default.nix
··· 59 59 # extra `/`. 60 60 ./normalize-var.patch 61 61 ]# ++ lib.optional stdenv.hostPlatform.isMusl ./sanitizers-nongnu.patch 62 + # Prevent a compilation error on darwin 63 + ++ lib.optional stdenv.hostPlatform.isDarwin ./darwin-targetconditionals.patch 62 64 ++ lib.optional stdenv.hostPlatform.isAarch32 ./armv7l.patch; 63 65 64 66 # TSAN requires XPC on Darwin, which we have no public/free source files for. We can depend on the Apple frameworks
+2 -2
pkgs/development/interpreters/erlang/R21.nix
··· 1 1 { mkDerivation }: 2 2 3 3 mkDerivation { 4 - version = "21.3.8.23"; 5 - sha256 = "sha256-zIEXn2HuXeRKHfXmm0AAv9rEqqc4gIgaYek0hSUK5YU="; 4 + version = "21.3.8.24"; 5 + sha256 = "sha256-FNs+M4KFFKzfb4EG513HtyQ9eRRtxSPMpYq0bmRgY3g="; 6 6 }
+2 -2
pkgs/development/libraries/cpp-utilities/default.nix
··· 7 7 8 8 stdenv.mkDerivation rec { 9 9 pname = "cpp-utilities"; 10 - version = "5.10.3"; 10 + version = "5.10.4"; 11 11 12 12 src = fetchFromGitHub { 13 13 owner = "Martchus"; 14 14 repo = pname; 15 15 rev = "v${version}"; 16 - sha256 = "sha256-bEdDRvm5W12wJnW4xC+AcRLevZ0H7C625eknKzNrLLU="; 16 + sha256 = "sha256-pZh/NbTzQR2kjMeauv1HcRn0hDBaCNRbaZ3+1qs5rxU="; 17 17 }; 18 18 19 19 nativeBuildInputs = [ cmake ];
+2 -2
pkgs/development/tools/go-toml/default.nix
··· 2 2 3 3 buildGoPackage rec { 4 4 pname = "go-toml"; 5 - version = "1.9.1"; 5 + version = "1.9.2"; 6 6 7 7 src = fetchFromGitHub { 8 8 owner = "pelletier"; 9 9 repo = pname; 10 10 rev = "v${version}"; 11 - sha256 = "sha256-O3gfQon8ktObLVED4A5sCSOw8K1NIXP3a5AsLq3Svb4="; 11 + sha256 = "sha256-x740f6I+szhq4mEsed4bsXcC8PvzF6PKFJNJ9SKMGIE="; 12 12 }; 13 13 14 14 goPackagePath = "github.com/pelletier/go-toml";
+2 -2
pkgs/development/tools/lazygit/default.nix
··· 2 2 3 3 buildGoModule rec { 4 4 pname = "lazygit"; 5 - version = "0.28.1"; 5 + version = "0.28.2"; 6 6 7 7 src = fetchFromGitHub { 8 8 owner = "jesseduffield"; 9 9 repo = pname; 10 10 rev = "v${version}"; 11 - sha256 = "sha256-tan8ksSyMIeDL7oRAWd3Qtz/sKbhAAfY7IknfwjyvgQ="; 11 + sha256 = "sha256-s5Ou0FhL9+2/xm7lKMG/3ya5P8idI0cgtJ28cV37pJQ="; 12 12 }; 13 13 14 14 vendorSha256 = null;
+3
pkgs/development/tools/parsing/tree-sitter/grammar.nix
··· 1 1 { stdenv 2 2 , tree-sitter 3 + , libcxx 4 + , lib 3 5 }: 4 6 5 7 # Build a parser grammar and put the resulting shared object in `$out/parser` ··· 27 29 "${source}/${location}" 28 30 ; 29 31 32 + NIX_CFLAGS_COMPILE = lib.optionalString stdenv.isDarwin "-I${lib.getDev libcxx}/include/c++/v1"; 30 33 buildInputs = [ tree-sitter ]; 31 34 32 35 dontUnpack = true;
+4 -4
pkgs/games/stockfish/default.nix
··· 12 12 if stdenv.isi686 then "x86-32" else 13 13 if stdenv.isAarch64 then "armv8" else 14 14 "unknown"; 15 - version = "12"; 15 + version = "13"; 16 16 17 - nnueFile = "nn-82215d0fd0df.nnue"; 17 + nnueFile = "nn-62ef826d1a6d.nnue"; 18 18 nnue = fetchurl { 19 19 name = nnueFile; 20 20 url = "https://tests.stockfishchess.org/api/nn/${nnueFile}"; 21 - sha256 = "1r4yqrh4di05syyhl84hqcz84djpbd605b27zhbxwg6zs07ms8c2"; 21 + sha256 = "0qsy9rr4zgxrpgwhwbi96z01a2560am2b00q2klbj4bd39nq5vv2"; 22 22 }; 23 23 in 24 24 ··· 28 28 29 29 src = fetchurl { 30 30 url = "https://github.com/official-stockfish/Stockfish/archive/sf_${version}.tar.gz"; 31 - sha256 = "16980aicm5i6i9252239q4f9bcxg1gnqkv6nphrmpz4drg8i3v6i"; 31 + sha256 = "0qhxp2w543psanzhzn8jhfafx8aip57v9nsvafbwa5xynchlgl8m"; 32 32 }; 33 33 34 34 # This addresses a linker issue with Darwin
+3 -3
pkgs/tools/misc/silicon/default.nix
··· 18 18 19 19 rustPlatform.buildRustPackage rec { 20 20 pname = "silicon"; 21 - version = "0.4.1"; 21 + version = "0.4.2"; 22 22 23 23 src = fetchFromGitHub { 24 24 owner = "Aloxaf"; 25 25 repo = "silicon"; 26 26 rev = "v${version}"; 27 - sha256 = "sha256-ci0gq4rOQHBmFPvhXZseIlwnqAWd06/qg/i/luhV79s="; 27 + sha256 = "sha256-k+p8AEEL1BBJTmPc58QoIk7EOzu8QKdG00RQ58EN3bg="; 28 28 }; 29 29 30 - cargoSha256 = "sha256-sUPOf9er+BOMqDJ8C6+Xjjqj6NQUV2JTzGA4yUWtDWM="; 30 + cargoSha256 = "sha256-vpegobS7lpRkt/oZePW9WggYeg0JXDte8fQP/bf7oAI="; 31 31 32 32 buildInputs = [ llvmPackages.libclang expat freetype fira-code ] 33 33 ++ lib.optionals stdenv.isLinux [ libxcb ]