Compare commits
395 Commits
v12.0.2
...
release-pl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
04eb33b165 | ||
|
|
5b6c31bd89 | ||
|
|
c316e2af69 | ||
|
|
7270aa96f0 | ||
|
|
dd823eb489 | ||
|
|
cb29305385 | ||
|
|
2dfa37dd0c | ||
|
|
b3d3284f18 | ||
|
|
73e3e133c6 | ||
|
|
9828af9f03 | ||
|
|
48aa6b5ac5 | ||
|
|
e5b3ed1461 | ||
|
|
5fad9f0ec6 | ||
|
|
f4a5507716 | ||
|
|
2ea9d1d6fd | ||
|
|
e393b1f90b | ||
|
|
2259e81bb0 | ||
|
|
65a30292a3 | ||
|
|
494eef3472 | ||
|
|
bee1e865b2 | ||
|
|
c92b049bbc | ||
|
|
c9985480fe | ||
|
|
e1b5b76d8e | ||
|
|
762a74f5f9 | ||
|
|
a7a2d8493e | ||
|
|
bc6538d209 | ||
|
|
93d841310e | ||
|
|
2ac679f17e | ||
|
|
9a3ef463f9 | ||
|
|
affc5fcd75 | ||
|
|
96bff5e974 | ||
|
|
e828e14fa5 | ||
|
|
4c6dc8ff82 | ||
|
|
75bd7c90d3 | ||
|
|
2aa3d94a98 | ||
|
|
f4ac809dff | ||
|
|
0dee534f84 | ||
|
|
ad9f2c2ccb | ||
|
|
a886d20a7b | ||
|
|
791993795a | ||
|
|
a2afdb821f | ||
|
|
47b51a8be0 | ||
|
|
7c7e7c3ce4 | ||
|
|
fec08a5ad1 | ||
|
|
3961ef61c8 | ||
|
|
84692da9a2 | ||
|
|
fb7ba52e39 | ||
|
|
898823abb2 | ||
|
|
ccefd0a43a | ||
|
|
98f0be61ed | ||
|
|
6bb1d54cb0 | ||
|
|
69a76e32b7 | ||
|
|
99d989d486 | ||
|
|
456d62224e | ||
|
|
b662fae11e | ||
|
|
0926bd2f6c | ||
|
|
3fb473ae95 | ||
|
|
603ed18a4c | ||
|
|
0307fdd296 | ||
|
|
c10dcdbfdb | ||
|
|
31d8e579c6 | ||
|
|
7b3fec0349 | ||
|
|
e32a58f6ff | ||
|
|
36cd726676 | ||
|
|
8dc08de628 | ||
|
|
0361954919 | ||
|
|
29a62575f4 | ||
|
|
547a6df2ae | ||
|
|
53d08cdf28 | ||
|
|
a033152c60 | ||
|
|
9472aaca7a | ||
|
|
6254b99e02 | ||
|
|
3d17bdb747 | ||
|
|
d2eeeb9129 | ||
|
|
1d626e0add | ||
|
|
9f4cb8c1b6 | ||
|
|
91fc5e3902 | ||
|
|
9048cd8f47 | ||
|
|
4f5e8a8836 | ||
|
|
29b05fa50f | ||
|
|
dbe1a5c988 | ||
|
|
b6c1290934 | ||
|
|
306ff3c7c5 | ||
|
|
0e43e0d7fc | ||
|
|
4da696321a | ||
|
|
a95dd1e037 | ||
|
|
012a6bbde3 | ||
|
|
32197f79f3 | ||
|
|
257d202646 | ||
|
|
c00365c19d | ||
|
|
3f9fe845e5 | ||
|
|
b166aae835 | ||
|
|
31f0097862 | ||
|
|
f78514dbd8 | ||
|
|
9fc5fe9798 | ||
|
|
6d14ac1693 | ||
|
|
b8ab573c00 | ||
|
|
75ac6808a1 | ||
|
|
6719ff93d8 | ||
|
|
6b8327faad | ||
|
|
85c8bd2277 | ||
|
|
23fff2a09f | ||
|
|
689db93c99 | ||
|
|
1114556661 | ||
|
|
f8c910a3c2 | ||
|
|
f18ae089ff | ||
|
|
4a64992054 | ||
|
|
9fefb47242 | ||
|
|
fc5cc3c43b | ||
|
|
27464b795e | ||
|
|
845558c1da | ||
|
|
31fe5aa452 | ||
|
|
b354e07ef3 | ||
|
|
50a74dac4b | ||
|
|
7558bbfe9b | ||
|
|
7518676ac9 | ||
|
|
b7b665ff48 | ||
|
|
1be941e815 | ||
|
|
d1b7eba44e | ||
|
|
38e2d5663a | ||
|
|
3db95a3e67 | ||
|
|
ef0a0d69bb | ||
|
|
4b3a3e74f8 | ||
|
|
2c4751c7b2 | ||
|
|
30941ed26d | ||
|
|
c7163b63db | ||
|
|
6e6b3dcbfe | ||
|
|
1d136a6635 | ||
|
|
0ee67d78ef | ||
|
|
7356b920d4 | ||
|
|
ce8a325c1f | ||
|
|
a2f57e4769 | ||
|
|
751f41bc5e | ||
|
|
fd406f0f82 | ||
|
|
801dddacd4 | ||
|
|
397a537eef | ||
|
|
0423c836eb | ||
|
|
3250337e70 | ||
|
|
9dcd7fffe2 | ||
|
|
30b727b138 | ||
|
|
b86d6981ab | ||
|
|
2bf6a2b100 | ||
|
|
3dc8d31d57 | ||
|
|
b308fb92c0 | ||
|
|
bc9746455e | ||
|
|
109a9c76e3 | ||
|
|
4488f3d5d3 | ||
|
|
5a7958d20e | ||
|
|
481a942b76 | ||
|
|
a601d8429d | ||
|
|
a4a2d52a6d | ||
|
|
47fa3ba7de | ||
|
|
e6bb6709b3 | ||
|
|
c421742c4f | ||
|
|
1312cc8f6e | ||
|
|
ed37763d30 | ||
|
|
583bbf65e2 | ||
|
|
5770a5caa7 | ||
|
|
722903fec3 | ||
|
|
30f1c3c1b4 | ||
|
|
ef7d146282 | ||
|
|
20667a23d3 | ||
|
|
26f05827ae | ||
|
|
b1ffe7d553 | ||
|
|
368a060529 | ||
|
|
b40bffb1f2 | ||
|
|
488ae149f7 | ||
|
|
fa3e4726b7 | ||
|
|
66a12cc8bf | ||
|
|
3e0c21e981 | ||
|
|
da270ae7d9 | ||
|
|
4624f11ba5 | ||
|
|
224bb96a98 | ||
|
|
9a6fe8eea9 | ||
|
|
aebc035ec0 | ||
|
|
bd348c328e | ||
|
|
c5f2d7b473 | ||
|
|
dc9d8d55f2 | ||
|
|
b172ba7f03 | ||
|
|
8227890808 | ||
|
|
a0963fe3fc | ||
|
|
4df30c2587 | ||
|
|
305a5fbcae | ||
|
|
4f4dcbb643 | ||
|
|
202897ba35 | ||
|
|
444689c899 | ||
|
|
98ec13f8db | ||
|
|
39f76a3a71 | ||
|
|
f181a795a6 | ||
|
|
ea2f3e07e9 | ||
|
|
8aad6eae0d | ||
|
|
e86e5fe3e7 | ||
|
|
2c2569c4f8 | ||
|
|
9ffdc9649e | ||
|
|
a5d4f2eec9 | ||
|
|
a5df40e01d | ||
|
|
0573fc97c6 | ||
|
|
1ae95f41a1 | ||
|
|
8a7af2e14d | ||
|
|
c36da89933 | ||
|
|
bbb84c2ee7 | ||
|
|
36fd4b13c0 | ||
|
|
49327000fc | ||
|
|
9c25cd7426 | ||
|
|
9767e4169c | ||
|
|
0854f9c559 | ||
|
|
e4a068d808 | ||
|
|
4c793b0df8 | ||
|
|
a021441135 | ||
|
|
29c555c394 | ||
|
|
c33d396489 | ||
|
|
f6d2ba4dae | ||
|
|
a88574204d | ||
|
|
9435bc4b7d | ||
|
|
27245cbd7b | ||
|
|
21751aa8a5 | ||
|
|
ad41948450 | ||
|
|
e32246f172 | ||
|
|
25d3a816b4 | ||
|
|
05b1a565e0 | ||
|
|
7b2623ea3c | ||
|
|
983c5243ba | ||
|
|
1958fe1e5b | ||
|
|
ca8558d9b4 | ||
|
|
1b534800a9 | ||
|
|
e91c00c9c0 | ||
|
|
a2375b4820 | ||
|
|
2e0c8e9e17 | ||
|
|
dc0ddcf9f0 | ||
|
|
a1f3c86a39 | ||
|
|
55f672eff7 | ||
|
|
8ece0346d8 | ||
|
|
b1fe1d201a | ||
|
|
5010abdc22 | ||
|
|
e4441d5021 | ||
|
|
5af0c6a7e5 | ||
|
|
b8da17106a | ||
|
|
fdf40dbf43 | ||
|
|
f3b6530969 | ||
|
|
cbc5fc94f9 | ||
|
|
dceb697355 | ||
|
|
07118fa0d2 | ||
|
|
16e6db0def | ||
|
|
64d8f6d632 | ||
|
|
180b5cba58 | ||
|
|
bac416e907 | ||
|
|
cb674a1572 | ||
|
|
960b14fa20 | ||
|
|
a9f57d4205 | ||
|
|
13330b6950 | ||
|
|
1ebcc9beee | ||
|
|
55e1bbf2b9 | ||
|
|
f2dfa1e475 | ||
|
|
fcd53e772a | ||
|
|
8b9d7ef8f3 | ||
|
|
d8406a8cfe | ||
|
|
4a9ef581e5 | ||
|
|
a52db1f261 | ||
|
|
8e16174ce7 | ||
|
|
c748bb5d7a | ||
|
|
3cc8f0d818 | ||
|
|
f96eeeda6b | ||
|
|
d1d8904376 | ||
|
|
3b329fe687 | ||
|
|
9eb1b4ac9f | ||
|
|
c4c0bd7383 | ||
|
|
1e9de5832d | ||
|
|
f2b17cdd9d | ||
|
|
7bfd6c2439 | ||
|
|
0e8d5f0266 | ||
|
|
32add8f046 | ||
|
|
f661f00277 | ||
|
|
2a1999fe20 | ||
|
|
4d66431aad | ||
|
|
767f0d91f4 | ||
|
|
a3428e3477 | ||
|
|
614131b7bf | ||
|
|
9b0681f3b8 | ||
|
|
ecf8fb7a47 | ||
|
|
04bfb45a97 | ||
|
|
d90ce30452 | ||
|
|
ab21600ca6 | ||
|
|
728ea26204 | ||
|
|
373cd3b3ae | ||
|
|
f4e0258b09 | ||
|
|
d50360a69a | ||
|
|
351922c81f | ||
|
|
9518f43866 | ||
|
|
2c1ce3d4e6 | ||
|
|
12116c3261 | ||
|
|
fbc84e8aa1 | ||
|
|
6dab1e4f37 | ||
|
|
650a143602 | ||
|
|
9b6027fe78 | ||
|
|
0e30e05ce8 | ||
|
|
eea952fa78 | ||
|
|
6071a1ee3b | ||
|
|
a801b7b9f4 | ||
|
|
c6e3f0ae0a | ||
|
|
a43b03d3db | ||
|
|
12b0fa57ad | ||
|
|
d9e304f0ef | ||
|
|
842b92cca7 | ||
|
|
485f0ec9c8 | ||
|
|
5e3b5fc9a7 | ||
|
|
7c63541cad | ||
|
|
238e089d74 | ||
|
|
8991bc9f62 | ||
|
|
7a3f3a8905 | ||
|
|
e4085e03eb | ||
|
|
4b0c366e5f | ||
|
|
ea97240d09 | ||
|
|
12de531abb | ||
|
|
c3876ce3bf | ||
|
|
cbbfc3a114 | ||
|
|
ad2bfc9abd | ||
|
|
528461412e | ||
|
|
64db679390 | ||
|
|
77a8b3b7d2 | ||
|
|
7007e76ab5 | ||
|
|
3c970063a9 | ||
|
|
b70830015e | ||
|
|
b43f2c8b3a | ||
|
|
c311da16f3 | ||
|
|
37608a338c | ||
|
|
b07288e674 | ||
|
|
707698faab | ||
|
|
2e70d132d0 | ||
|
|
30c5b31e21 | ||
|
|
77ff6cb714 | ||
|
|
ea13c51b7d | ||
|
|
3ed763b884 | ||
|
|
10e1e170b7 | ||
|
|
ffa62afc66 | ||
|
|
f794329913 | ||
|
|
f9a35c7661 | ||
|
|
ed496f3462 | ||
|
|
6accdae232 | ||
|
|
96efcc6c0d | ||
|
|
bf72d7bb5a | ||
|
|
dadffb1081 | ||
|
|
78dc567226 | ||
|
|
362ce4f4f9 | ||
|
|
ab35cd7b10 | ||
|
|
15f4ad7cd1 | ||
|
|
cbfb92041f | ||
|
|
a506c67cac | ||
|
|
788e0412f6 | ||
|
|
18b37ce3e3 | ||
|
|
a15e6748c7 | ||
|
|
c6d0539fd2 | ||
|
|
3eb3867944 | ||
|
|
810315b0e2 | ||
|
|
b461fc2536 | ||
|
|
7e63977ba0 | ||
|
|
78dec892cf | ||
|
|
9ea6628b5c | ||
|
|
465df2e9be | ||
|
|
61ef926849 | ||
|
|
7fa38c593e | ||
|
|
41c6d1cd9a | ||
|
|
cf3893dc49 | ||
|
|
a2fbe92a25 | ||
|
|
e1754707d8 | ||
|
|
cd380a53b3 | ||
|
|
a8c29fd1a2 | ||
|
|
6b871e7949 | ||
|
|
1b5fdb6645 | ||
|
|
fe9d877cdf | ||
|
|
60e7aa8f03 | ||
|
|
18e2d3e59c | ||
|
|
d68fcb08b2 | ||
|
|
1f6baefdc3 | ||
|
|
71efce32c1 | ||
|
|
3626c9cdc8 | ||
|
|
a23b761304 | ||
|
|
3fd27e4913 | ||
|
|
b3f152b716 | ||
|
|
df381f3a79 | ||
|
|
2dec9db310 | ||
|
|
d50dc4c9f6 | ||
|
|
ed8b563f20 | ||
|
|
2a73aa731d | ||
|
|
4dd1c13bd8 | ||
|
|
c1c9fe22df | ||
|
|
06a6b7a2eb | ||
|
|
b814dd824f | ||
|
|
ce234bdb59 | ||
|
|
13a46a44a8 | ||
|
|
dc78b00c3c | ||
|
|
48ae4bf813 | ||
|
|
a50040e2d5 | ||
|
|
2c9a56a8df | ||
|
|
021320b292 | ||
|
|
9d3662c3ea |
14
.github/ISSUE_TEMPLATE/bug_report.md
vendored
14
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,7 +2,7 @@
|
||||
name: Bug report
|
||||
about: Topgrade is misbehaving
|
||||
title: ''
|
||||
labels: 'bug'
|
||||
labels: 'C-bug'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
@@ -46,6 +46,18 @@ If you know the possible cause of the issue, please tell us.
|
||||
Execute the erroneous command directly to see if the problem persists
|
||||
-->
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
|
||||
## Did you run topgrade through `Remote Execution`
|
||||
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
|
||||
If yes, does the issue still occur when you run topgrade directly in your
|
||||
remote host
|
||||
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
|
||||
## Configuration file (Optional)
|
||||
<!--
|
||||
|
||||
14
.github/ISSUE_TEMPLATE/feature_request.md
vendored
14
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -2,16 +2,20 @@
|
||||
name: Feature request
|
||||
about: Can you please support...?
|
||||
title: ''
|
||||
labels: ''
|
||||
labels: 'C-feature request'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## I want to suggest a new step
|
||||
### Which tool is this about? Where is its repository?
|
||||
### Which operating systems are supported by this tool?
|
||||
### What should Topgrade do to figure out if the tool needs to be invoked?
|
||||
### Which exact commands should Topgrade run?
|
||||
|
||||
* Which tool is this about? Where is its repository?
|
||||
* Which operating systems are supported by this tool?
|
||||
* What should Topgrade do to figure out if the tool needs to be invoked?
|
||||
* Which exact commands should Topgrade run?
|
||||
* Does it have a `--dry-run` option? i.e., print what should be done and exit
|
||||
* Does it need the user to confirm the execution? And does it provide a `--yes`
|
||||
option to skip this step?
|
||||
|
||||
## I want to suggest some general feature
|
||||
Topgrade should...
|
||||
|
||||
20
.github/PULL_REQUEST_TEMPLATE.md
vendored
20
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,13 +1,19 @@
|
||||
## Standards checklist:
|
||||
## What does this PR do
|
||||
|
||||
- [ ] The PR title is descriptive.
|
||||
|
||||
## Standards checklist
|
||||
|
||||
- [ ] The PR title is descriptive
|
||||
- [ ] I have read `CONTRIBUTING.md`
|
||||
- [ ] The code compiles (`cargo build`)
|
||||
- [ ] The code passes rustfmt (`cargo fmt`)
|
||||
- [ ] The code passes clippy (`cargo clippy`)
|
||||
- [ ] The code passes tests (`cargo test`)
|
||||
- [ ] *Optional:* I have tested the code myself
|
||||
- [ ] I also tested that Topgrade skips the step where needed
|
||||
- [ ] If this PR introduces new user-facing messages they are translated
|
||||
|
||||
## For new steps
|
||||
|
||||
- [ ] *Optional:* Topgrade skips this step where needed
|
||||
- [ ] *Optional:* The `--dry-run` option works with this step
|
||||
- [ ] *Optional:* The `--yes` option works with this step if it is supported by
|
||||
the underlying command
|
||||
|
||||
If you developed a feature or a bug fix for someone else and you do not have the
|
||||
means to test it, please tag this person here.
|
||||
|
||||
24
.github/dependabot.yml
vendored
Normal file
24
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# Set update schedule for GitHub Actions
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: cargo
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "monday"
|
||||
time: "06:00"
|
||||
timezone: "UTC"
|
||||
versioning-strategy: increase
|
||||
labels: ["dependencies", "cargo"]
|
||||
commit-message:
|
||||
prefix: "deps(cargo)"
|
||||
include: "scope"
|
||||
groups:
|
||||
cargo-minor-patch:
|
||||
update-types: ["minor", "patch"]
|
||||
91
.github/workflows/check-and-lint.yaml
vendored
91
.github/workflows/check-and-lint.yaml
vendored
@@ -1,91 +0,0 @@
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
name: CI
|
||||
|
||||
env:
|
||||
RUST_VER: '1.71.0'
|
||||
CROSS_VER: '0.2.5'
|
||||
CARGO_NET_RETRY: 3
|
||||
|
||||
jobs:
|
||||
fmt:
|
||||
name: Rustfmt
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: '${{ env.RUST_VER }}'
|
||||
components: rustfmt
|
||||
|
||||
- name: Run cargo fmt
|
||||
env:
|
||||
TERM: xterm-256color
|
||||
run: |
|
||||
cargo fmt --all -- --check
|
||||
|
||||
main:
|
||||
needs: fmt
|
||||
name: ${{ matrix.target_name }} (check, clippy)
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- target: x86_64-linux-android
|
||||
target_name: Android
|
||||
use_cross: true
|
||||
os: ubuntu-20.04
|
||||
|
||||
- target: x86_64-unknown-freebsd
|
||||
target_name: FreeBSD
|
||||
use_cross: true
|
||||
os: ubuntu-20.04
|
||||
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
target_name: Linux
|
||||
os: ubuntu-20.04
|
||||
|
||||
- target: x86_64-apple-darwin
|
||||
target_name: macOS
|
||||
os: macos-11
|
||||
|
||||
- target: x86_64-unknown-netbsd
|
||||
target_name: NetBSD
|
||||
use_cross: true
|
||||
os: ubuntu-20.04
|
||||
|
||||
- target: x86_64-pc-windows-msvc
|
||||
target_name: Windows
|
||||
os: windows-2019
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: '${{ env.RUST_VER }}'
|
||||
components: clippy
|
||||
|
||||
- name: Setup Rust Cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
prefix-key: ${{ matrix.target }}
|
||||
|
||||
- name: Setup cross
|
||||
if: matrix.use_cross == true
|
||||
run: curl -fL --retry 3 https://github.com/cross-rs/cross/releases/download/v${{ env.CROSS_VER }}/cross-x86_64-unknown-linux-musl.tar.gz | tar vxz -C /usr/local/bin
|
||||
|
||||
- name: Run cargo check
|
||||
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} check --locked --target ${{ matrix.target }}
|
||||
|
||||
- name: Run cargo clippy
|
||||
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} clippy --locked --target ${{ matrix.target }} --all-features -- -D warnings
|
||||
27
.github/workflows/check-semver.yml
vendored
27
.github/workflows/check-semver.yml
vendored
@@ -1,27 +0,0 @@
|
||||
on:
|
||||
release:
|
||||
types: [published, edited]
|
||||
|
||||
name: Check SemVer compliance
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2022-08-03
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
|
||||
semver:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: --git https://github.com/rust-lang/rust-semverver
|
||||
- run: eval "current_version=$(grep -e '^version = .*$' Cargo.toml | cut -d ' ' -f 3)"
|
||||
- run: cargo semver | tee semver_out
|
||||
- run: (head -n 1 semver_out | grep "\-> $current_version") || (echo "versioning mismatch" && return 1)
|
||||
@@ -1,4 +1,4 @@
|
||||
name: Test Configuration File Creation
|
||||
name: Check config file creation if not exists
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -7,15 +7,18 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
TestConfig:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v5.0.0
|
||||
- run: |
|
||||
CONFIG_PATH=~/.config/topgrade.toml;
|
||||
if [ -f "$CONFIG_PATH" ]; then rm $CONFIG_PATH; fi
|
||||
cargo build;
|
||||
./target/debug/topgrade --dry-run --only system;
|
||||
TOPGRADE_SKIP_BRKC_NOTIFY=true ./target/debug/topgrade --dry-run --only system;
|
||||
stat $CONFIG_PATH;
|
||||
25
.github/workflows/check_i18n.yml
vendored
Normal file
25
.github/workflows/check_i18n.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
name: Check i18n
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
check_locale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Install checker
|
||||
# Build it with the dev profile as this is faster and the checker still works
|
||||
run: |
|
||||
cargo install --git https://github.com/topgrade-rs/topgrade_i18n_locale_checker --profile dev
|
||||
|
||||
- name: Run the checker
|
||||
run: topgrade_i18n_locale_checker --locale-file ./locales/app.yml --rust-src-to-check ./src
|
||||
35
.github/workflows/check_security_vulnerability.yml
vendored
Normal file
35
.github/workflows/check_security_vulnerability.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
# separate terms of service, privacy policy, and support
|
||||
# documentation.
|
||||
|
||||
name: Check Security Vulnerability
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: DevSkim
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Run DevSkim scanner
|
||||
uses: microsoft/DevSkim-Action@4b5047945a44163b94642a1cecc0d93a3f428cc6 # v1.0.16
|
||||
|
||||
- name: Upload DevSkim scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v4.31.0
|
||||
with:
|
||||
sarif_file: devskim-results.sarif
|
||||
149
.github/workflows/ci.yml
vendored
Normal file
149
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,149 @@
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
name: CI
|
||||
|
||||
env:
|
||||
CROSS_VER: '0.2.5'
|
||||
CARGO_NET_RETRY: 3
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
fmt:
|
||||
name: Rustfmt
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Run cargo fmt
|
||||
env:
|
||||
TERM: xterm-256color
|
||||
run: |
|
||||
rustup component add rustfmt
|
||||
cargo fmt --all -- --check
|
||||
|
||||
custom-checks:
|
||||
name: Custom checks
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Check if `Step` enum is sorted
|
||||
run: |
|
||||
ENUM_NAME="Step"
|
||||
FILE="src/step.rs"
|
||||
awk "/enum $ENUM_NAME/,/}/" "$FILE" | \
|
||||
grep -E '^\s*[A-Za-z_][A-Za-z0-9_]*\s*,?$' | \
|
||||
sed 's/[, ]//g' > original.txt
|
||||
sort original.txt > sorted.txt
|
||||
diff original.txt sorted.txt
|
||||
|
||||
- name: Check if `Step::run()`'s match is sorted
|
||||
run: |
|
||||
FILE="src/step.rs"
|
||||
awk '/[[:alpha:]] =>/{print $1}' $FILE > original.txt
|
||||
sort original.txt > sorted.txt
|
||||
diff original.txt sorted.txt
|
||||
|
||||
- name: Check if `default_steps` contains every step
|
||||
run: |
|
||||
# Extract all variants from enum Step
|
||||
all_variants=$(sed -n '/^pub enum Step {/,/^}/p' src/step.rs | grep -Po '^\s*\K[A-Z][A-Za-z0-9_]*' | sort)
|
||||
|
||||
# Extract variants used inside default_steps
|
||||
used_variants=$(sed -n '/^pub(crate) fn default_steps()/,/^}/p' src/step.rs | \
|
||||
grep -Po '\b[A-Z][A-Za-z0-9_]*\b' | \
|
||||
grep -Fx -f <(echo "$all_variants") | \
|
||||
sort)
|
||||
|
||||
# Check for missing variants
|
||||
missing=$(comm -23 <(echo "$all_variants") <(echo "$used_variants"))
|
||||
if [[ -z "$missing" ]]; then
|
||||
echo "All variants are used."
|
||||
else
|
||||
echo "Missing variants:"
|
||||
echo "$missing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for duplicates
|
||||
duplicates=$(echo "$used_variants" | uniq -c | awk '$1 > 1 {print $2}')
|
||||
if [[ -z "$duplicates" ]]; then
|
||||
echo "No duplicates found."
|
||||
else
|
||||
echo "Duplicates found:"
|
||||
echo "$duplicates"
|
||||
# We allow duplicates, but lets keep this check for potential future usefulness
|
||||
# exit 1
|
||||
fi
|
||||
|
||||
main:
|
||||
needs: [ fmt, custom-checks ]
|
||||
name: ${{ matrix.target_name }} (check, clippy)
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- target: x86_64-linux-android
|
||||
target_name: Android
|
||||
use_cross: true
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-unknown-freebsd
|
||||
target_name: FreeBSD
|
||||
use_cross: true
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
target_name: Linux
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-apple-darwin
|
||||
target_name: macOS-x86_64
|
||||
os: macos-13
|
||||
|
||||
- target: aarch64-apple-darwin
|
||||
target_name: macOS-aarch64
|
||||
os: macos-latest
|
||||
|
||||
- target: x86_64-unknown-netbsd
|
||||
target_name: NetBSD
|
||||
use_cross: true
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-pc-windows-msvc
|
||||
target_name: Windows
|
||||
os: windows-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Setup Rust Cache
|
||||
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
prefix-key: ${{ matrix.target }}
|
||||
|
||||
- name: Setup cross
|
||||
if: matrix.use_cross == true
|
||||
run: curl -fL --retry 3 https://github.com/cross-rs/cross/releases/download/v${{ env.CROSS_VER }}/cross-x86_64-unknown-linux-musl.tar.gz | tar vxz -C /usr/local/bin
|
||||
|
||||
- name: Run cargo/cross check
|
||||
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} check --locked --target ${{ matrix.target }}
|
||||
|
||||
- name: Run cargo/cross clippy
|
||||
run: |
|
||||
rustup component add clippy
|
||||
${{ matrix.use_cross == true && 'cross' || 'cargo' }} clippy --locked --target ${{ matrix.target }} --all-features -- -D warnings
|
||||
|
||||
- name: Run cargo test
|
||||
# ONLY run test with cargo
|
||||
if: matrix.use_cross == false
|
||||
run: cargo test --locked --target ${{ matrix.target }}
|
||||
59
.github/workflows/code-coverage.yml
vendored
59
.github/workflows/code-coverage.yml
vendored
@@ -1,59 +0,0 @@
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
name: Test with Code Coverage
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test
|
||||
env:
|
||||
PROJECT_NAME_UNDERSCORE: topgrade
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUSTFLAGS: -Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort
|
||||
RUSTDOCFLAGS: -Cpanic=abort
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@v2
|
||||
env:
|
||||
cache-name: cache-dependencies
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/.crates.toml
|
||||
~/.cargo/.crates2.json
|
||||
~/.cargo/bin
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
target
|
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('Cargo.lock') }}
|
||||
- name: Generate test result and coverage report
|
||||
run: |
|
||||
cargo install cargo2junit grcov;
|
||||
cargo test $CARGO_OPTIONS -- -Z unstable-options --format json | cargo2junit > results.xml;
|
||||
zip -0 ccov.zip `find . \( -name "$PROJECT_NAME_UNDERSCORE*.gc*" \) -print`;
|
||||
grcov ccov.zip -s . -t lcov --llvm --ignore-not-existing --ignore "/*" --ignore "tests/*" -o lcov.info;
|
||||
- name: Upload test results
|
||||
uses: EnricoMi/publish-unit-test-result-action@v1
|
||||
with:
|
||||
check_name: Test Results
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: results.xml
|
||||
- name: Upload to CodeCov
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
# required for private repositories:
|
||||
# token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./lcov.info
|
||||
fail_ci_if_error: true
|
||||
29
.github/workflows/crates-publish.yml
vendored
29
.github/workflows/crates-publish.yml
vendored
@@ -1,29 +0,0 @@
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
name: Publish to crates.io on release
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: katyo/publish-crates@v1
|
||||
with:
|
||||
dry-run: true
|
||||
check-repo: ${{ github.event_name == 'push' }}
|
||||
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
ignore-unpublished-changes: true
|
||||
277
.github/workflows/create_release_assets.yml
vendored
Normal file
277
.github/workflows/create_release_assets.yml
vendored
Normal file
@@ -0,0 +1,277 @@
|
||||
name: Publish release files for CD native and non-cd-native environments
|
||||
|
||||
on:
|
||||
repository_dispatch:
|
||||
types: [ release-created ]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
# Publish release files for CD native environments
|
||||
native_build:
|
||||
permissions:
|
||||
# Use to sign the release artifacts
|
||||
id-token: write
|
||||
# Used to upload release artifacts
|
||||
contents: write
|
||||
# Used to generate artifact attestations
|
||||
attestations: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Use the Ubuntu 22.04 image to link with a low version of glibc
|
||||
#
|
||||
# https://github.com/topgrade-rs/topgrade/issues/1095
|
||||
platform: [ ubuntu-22.04, macos-latest, macos-13, windows-latest ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Install needed components
|
||||
run: |
|
||||
rustup component add rustfmt
|
||||
rustup component add clippy
|
||||
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
if: ${{ startsWith(matrix.platform, 'ubuntu-') }}
|
||||
shell: bash
|
||||
|
||||
- name: Check format
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
- name: Run clippy
|
||||
run: cargo clippy --all-targets --locked -- -D warnings
|
||||
|
||||
- name: Run clippy (All features)
|
||||
run: cargo clippy --all-targets --locked --all-features -- -D warnings
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test
|
||||
|
||||
# Used `https://github.com/BurntSushi/ripgrep/blob/master/.github/workflows/release.yml`
|
||||
# as a reference.
|
||||
- name: Build debug binary to create release assets
|
||||
shell: bash
|
||||
run: |
|
||||
cargo build --all-features
|
||||
bin="target/debug/topgrade"
|
||||
echo "BIN=$bin" >> $GITHUB_ENV
|
||||
|
||||
- name: Create deployment directory
|
||||
shell: bash
|
||||
run: |
|
||||
dir=deployment/deb
|
||||
mkdir -p "$dir"
|
||||
echo "DEPLOY_DIR=$dir" >> $GITHUB_ENV
|
||||
|
||||
- name: Generate shell completions
|
||||
shell: bash
|
||||
run: |
|
||||
"$BIN" --gen-completion bash > "$DEPLOY_DIR/topgrade.bash"
|
||||
"$BIN" --gen-completion fish > "$DEPLOY_DIR/topgrade.fish"
|
||||
"$BIN" --gen-completion zsh > "$DEPLOY_DIR/_topgrade"
|
||||
|
||||
- name: Build in Release profile with all features enabled
|
||||
run: cargo build --release --all-features
|
||||
|
||||
- name: Rename Release (Unix)
|
||||
run: |
|
||||
cargo install default-target
|
||||
mkdir -p assets
|
||||
FILENAME=topgrade-${{ github.event.client_payload.tag }}-$(default-target)
|
||||
mv target/release/topgrade assets
|
||||
cd assets
|
||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||
rm topgrade
|
||||
ls .
|
||||
if: ${{ matrix.platform != 'windows-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Build Debian-based system binary and create package
|
||||
# First remove the binary built by previous steps
|
||||
# because we don't want the auto-update feature,
|
||||
# then build the new binary without auto-updating.
|
||||
run: |
|
||||
rm -rf target/release
|
||||
cargo build --release
|
||||
cargo deb --no-build --no-strip
|
||||
if: ${{ startsWith(matrix.platform, 'ubuntu-') }}
|
||||
shell: bash
|
||||
|
||||
- name: Move Debian-based system package
|
||||
run: |
|
||||
mkdir -p assets
|
||||
mv target/debian/*.deb assets
|
||||
if: ${{ startsWith(matrix.platform, 'ubuntu-') }}
|
||||
shell: bash
|
||||
|
||||
- name: Rename Release (Windows)
|
||||
run: |
|
||||
cargo install default-target
|
||||
mkdir assets
|
||||
FILENAME=topgrade-${{ github.event.client_payload.tag }}-$(default-target)
|
||||
mv target/release/topgrade.exe assets/topgrade.exe
|
||||
cd assets
|
||||
powershell Compress-Archive -Path * -Destination ${FILENAME}.zip
|
||||
rm topgrade.exe
|
||||
ls .
|
||||
if: ${{ matrix.platform == 'windows-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Upload assets
|
||||
run:
|
||||
gh release upload "${{ github.event.client_payload.tag }}" assets/*
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate artifact attestations
|
||||
uses: actions/attest-build-provenance@v3.0.0
|
||||
with:
|
||||
subject-path: assets/*
|
||||
|
||||
# Publish release files for non-CD-native environments
|
||||
cross_build:
|
||||
permissions:
|
||||
# Use to sign the release artifacts
|
||||
id-token: write
|
||||
# Used to upload release artifacts
|
||||
contents: write
|
||||
# Used to generate artifact attestations
|
||||
attestations: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target:
|
||||
[
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"x86_64-unknown-freebsd",
|
||||
]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Install needed components
|
||||
run: |
|
||||
rustup component add rustfmt
|
||||
rustup component add clippy
|
||||
|
||||
- name: Install cargo-deb cross compilation dependencies
|
||||
run: sudo apt-get install libc6-arm64-cross libgcc-s1-arm64-cross
|
||||
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
|
||||
- name: Install cargo-deb cross compilation dependencies for armv7
|
||||
run: sudo apt-get install libc6-armhf-cross libgcc-s1-armhf-cross
|
||||
if: ${{ matrix.target == 'armv7-unknown-linux-gnueabihf' }}
|
||||
shell: bash
|
||||
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'armv7-unknown-linux-gnueabihf' }}
|
||||
shell: bash
|
||||
|
||||
- name: install targets
|
||||
run: rustup target add ${{ matrix.target }}
|
||||
|
||||
- name: install cross
|
||||
uses: taiki-e/install-action@c5b1b6f479c32f356cc6f4ba672a47f63853b13b # v2.62.38
|
||||
with:
|
||||
tool: cross@0.2.5
|
||||
|
||||
- name: Check format
|
||||
run: cross fmt --all -- --check
|
||||
|
||||
- name: Run clippy
|
||||
run: cross clippy --all-targets --locked --target ${{matrix.target}} -- -D warnings
|
||||
|
||||
- name: Run clippy (All features)
|
||||
run: cross clippy --locked --all-features --target ${{matrix.target}} -- -D warnings
|
||||
|
||||
- name: Run tests
|
||||
run: cross test --target ${{matrix.target}}
|
||||
|
||||
# Used `https://github.com/BurntSushi/ripgrep/blob/master/.github/workflows/release.yml`
|
||||
# as a reference.
|
||||
- name: Build debug binary to create release assets
|
||||
shell: bash
|
||||
run: |
|
||||
# This build is not using the target arch since this binary is only needed in CI. It needs
|
||||
# to be the compiled for the runner since it has the run the binary to generate completion
|
||||
# scripts.
|
||||
cargo build --all-features
|
||||
bin="target/debug/topgrade"
|
||||
echo "BIN=$bin" >> $GITHUB_ENV
|
||||
|
||||
- name: Create deployment directory
|
||||
shell: bash
|
||||
run: |
|
||||
dir=deployment/deb
|
||||
mkdir -p "$dir"
|
||||
echo "DEPLOY_DIR=$dir" >> $GITHUB_ENV
|
||||
|
||||
- name: Generate shell completions
|
||||
shell: bash
|
||||
run: |
|
||||
"$BIN" --gen-completion bash > "$DEPLOY_DIR/topgrade.bash"
|
||||
"$BIN" --gen-completion fish > "$DEPLOY_DIR/topgrade.fish"
|
||||
"$BIN" --gen-completion zsh > "$DEPLOY_DIR/_topgrade"
|
||||
|
||||
- name: Build in Release profile with all features enabled
|
||||
run: cross build --release --all-features --target ${{matrix.target}}
|
||||
|
||||
- name: Rename Release
|
||||
run: |
|
||||
mkdir -p assets
|
||||
FILENAME=topgrade-${{ github.event.client_payload.tag }}-${{matrix.target}}
|
||||
mv target/${{matrix.target}}/release/topgrade assets
|
||||
cd assets
|
||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||
rm topgrade
|
||||
ls .
|
||||
|
||||
- name: Build Debian-based system package without autoupdate feature
|
||||
# First remove the binary built by previous steps
|
||||
# because we don't want the auto-update feature,
|
||||
# then build the new binary without auto-updating.
|
||||
run: |
|
||||
rm -rf target/${{matrix.target}}
|
||||
cross build --release --target ${{matrix.target}}
|
||||
cargo deb --target=${{matrix.target}} --no-build --no-strip
|
||||
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'armv7-unknown-linux-gnueabihf' }}
|
||||
shell: bash
|
||||
|
||||
- name: Move Debian-based system package
|
||||
run: |
|
||||
mkdir -p assets
|
||||
mv target/${{matrix.target}}/debian/*.deb assets
|
||||
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'armv7-unknown-linux-gnueabihf' }}
|
||||
shell: bash
|
||||
|
||||
|
||||
- name: Upload assets
|
||||
run:
|
||||
gh release upload "${{ github.event.client_payload.tag }}" assets/*
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate artifact attestations
|
||||
uses: actions/attest-build-provenance@v3.0.0
|
||||
with:
|
||||
subject-path: assets/*
|
||||
|
||||
triggers:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ native_build, cross_build ]
|
||||
steps:
|
||||
- name: Trigger workflows
|
||||
run: |
|
||||
gh api repos/${{ github.repository }}/dispatches \
|
||||
-f "event_type=release-assets-built" \
|
||||
-F "client_payload[tag]=${{ github.event.client_payload.tag }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
22
.github/workflows/dependency-review.yml
vendored
Normal file
22
.github/workflows/dependency-review.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
# Dependency Review Action
|
||||
#
|
||||
# This Action will scan dependency manifest files that change as part of a Pull Request,
|
||||
# surfacing known-vulnerable versions of the packages declared or updated in the PR.
|
||||
# Once installed, if the workflow run is marked as required,
|
||||
# PRs introducing known-vulnerable packages will be blocked from merging.
|
||||
#
|
||||
# Source repository: https://github.com/actions/dependency-review-action
|
||||
name: 'Dependency Review'
|
||||
on: [pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # v4.8.1
|
||||
70
.github/workflows/release-cross.yml
vendored
70
.github/workflows/release-cross.yml
vendored
@@ -1,70 +0,0 @@
|
||||
name: Publish release files for non-cd-native environments
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target: [ "aarch64-unknown-linux-gnu", "armv7-unknown-linux-gnueabihf", "x86_64-unknown-linux-musl", "aarch64-unknown-linux-musl", "x86_64-unknown-freebsd", ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
default: true
|
||||
override: true
|
||||
target: ${{ matrix.target }}
|
||||
components: rustfmt, clippy
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Check format
|
||||
with:
|
||||
use-cross: true
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run clippy
|
||||
with:
|
||||
command: clippy
|
||||
use-cross: true
|
||||
args: --all-targets --locked --target ${{matrix.target}} -- -D warnings
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run clippy (All features)
|
||||
with:
|
||||
command: clippy
|
||||
use-cross: true
|
||||
args: --locked --all-features --target ${{matrix.target}} -- -D warnings
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run tests
|
||||
with:
|
||||
command: test
|
||||
use-cross: true
|
||||
args: --target ${{matrix.target}}
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Build
|
||||
with:
|
||||
command: build
|
||||
use-cross: true
|
||||
args: --release --all-features --target ${{matrix.target}}
|
||||
- name: Rename Release
|
||||
run: |
|
||||
mkdir assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-${{matrix.target}}
|
||||
mv target/${{matrix.target}}/release/topgrade assets
|
||||
cd assets
|
||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||
rm topgrade
|
||||
ls .
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: assets/*
|
||||
63
.github/workflows/release-plz.yml
vendored
Normal file
63
.github/workflows/release-plz.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
name: Release-plz
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
|
||||
# Release unpublished packages.
|
||||
release-plz-release:
|
||||
name: Release-plz release
|
||||
runs-on: ubuntu-latest
|
||||
environment: crates_io
|
||||
permissions:
|
||||
contents: write
|
||||
id-token: write # For trusted publishing
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- &install-rust
|
||||
name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- name: Run release-plz
|
||||
id: release-plz
|
||||
uses: release-plz/action@v0.5
|
||||
with:
|
||||
command: release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Trigger workflows
|
||||
if: steps.release-plz.outputs.releases_created == 'true'
|
||||
run: |
|
||||
gh api repos/${{ github.repository }}/dispatches \
|
||||
-f "event_type=release-created" \
|
||||
-F "client_payload[tag]=${{ fromJSON(steps.release-plz.outputs.releases)[0].tag }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Create a PR with the new versions and changelog, preparing the next release.
|
||||
release-plz-pr:
|
||||
name: Release-plz PR
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
concurrency:
|
||||
group: release-plz-${{ github.ref }}
|
||||
cancel-in-progress: false
|
||||
steps:
|
||||
- *checkout
|
||||
- *install-rust
|
||||
- name: Run release-plz
|
||||
uses: release-plz/action@v0.5
|
||||
with:
|
||||
command: release-pr
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
77
.github/workflows/release.yml
vendored
77
.github/workflows/release.yml
vendored
@@ -1,77 +0,0 @@
|
||||
name: Publish release files for CD native environments
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [ ubuntu-latest, macos-latest, windows-latest ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Check format
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run clippy
|
||||
with:
|
||||
command: clippy
|
||||
args: --all-targets --locked -- -D warnings
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run clippy (All features)
|
||||
with:
|
||||
command: clippy
|
||||
args: --all-targets --locked --all-features -- -D warnings
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run tests
|
||||
with:
|
||||
command: test
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Build
|
||||
with:
|
||||
command: build
|
||||
args: --release --all-features
|
||||
- name: Rename Release (Unix)
|
||||
run: |
|
||||
cargo install default-target
|
||||
mkdir assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
||||
mv target/release/topgrade assets
|
||||
cd assets
|
||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||
rm topgrade
|
||||
ls .
|
||||
if: ${{ matrix.platform != 'windows-latest' }}
|
||||
shell: bash
|
||||
- name: Rename Release (Windows)
|
||||
run: |
|
||||
cargo install default-target
|
||||
mkdir assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
||||
mv target/release/topgrade.exe assets/topgrade.exe
|
||||
cd assets
|
||||
powershell Compress-Archive -Path * -Destination ${FILENAME}.zip
|
||||
rm topgrade.exe
|
||||
ls .
|
||||
if: ${{ matrix.platform == 'windows-latest' }}
|
||||
shell: bash
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: assets/*
|
||||
37
.github/workflows/release_to_aur.yml
vendored
Normal file
37
.github/workflows/release_to_aur.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
name: Publish to AUR
|
||||
|
||||
on:
|
||||
repository_dispatch:
|
||||
types: [ release-assets-built ]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
aur-publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Determine version
|
||||
id: determine_version
|
||||
run: |
|
||||
# tag should be something like "v16.0.4", remove the prefix v here
|
||||
tag="${{ github.event.client_payload.tag }}"
|
||||
echo "version=${tag#v}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish source AUR package
|
||||
uses: varabyte/update-aur-package@572e31b1972fa289a27b1926c06a489eb89c7fd7
|
||||
with:
|
||||
version: ${{ steps.determine_version.outputs.version }}
|
||||
package_name: topgrade
|
||||
commit_username: "Thomas Schönauer"
|
||||
commit_email: t.schoenauer@hgs-wt.at
|
||||
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Publish binary AUR package
|
||||
uses: varabyte/update-aur-package@572e31b1972fa289a27b1926c06a489eb89c7fd7
|
||||
with:
|
||||
version: ${{ steps.determine_version.outputs.version }}
|
||||
package_name: topgrade-bin
|
||||
commit_username: "Thomas Schönauer"
|
||||
commit_email: t.schoenauer@hgs-wt.at
|
||||
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
|
||||
@@ -1,14 +1,11 @@
|
||||
name: Publish to Homebrew
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
repository_dispatch:
|
||||
types: [ release-created ]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
homebrew-publish:
|
||||
@@ -16,10 +13,11 @@ jobs:
|
||||
steps:
|
||||
- name: Set up Homebrew
|
||||
id: set-up-homebrew
|
||||
uses: Homebrew/actions/setup-homebrew@master
|
||||
uses: Homebrew/actions/setup-homebrew@24a0b15df658487e137fcd20fba32757d41a9411 # master
|
||||
|
||||
- name: Cache Homebrew Bundler RubyGems
|
||||
id: cache
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v4.3.0
|
||||
with:
|
||||
path: ${{ steps.set-up-homebrew.outputs.gems-path }}
|
||||
key: ${{ runner.os }}-rubygems-${{ steps.set-up-homebrew.outputs.gems-hash }}
|
||||
@@ -28,8 +26,10 @@ jobs:
|
||||
- name: Install Homebrew Bundler RubyGems
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
run: brew install-bundler-gems
|
||||
|
||||
- name: Bump formulae
|
||||
uses: Homebrew/actions/bump-formulae@master
|
||||
uses: Homebrew/actions/bump-packages@24a0b15df658487e137fcd20fba32757d41a9411 # master
|
||||
continue-on-error: true
|
||||
with:
|
||||
# Custom GitHub access token with only the 'public_repo' scope enabled
|
||||
token: ${{secrets.HOMEBREW_ACCESS_TOKEN}}
|
||||
107
.github/workflows/release_to_pypi.yml
vendored
Normal file
107
.github/workflows/release_to_pypi.yml
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
name: Update PyPi
|
||||
|
||||
on:
|
||||
repository_dispatch:
|
||||
types: [ release-created ]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
# TODO: make linux/windows/macos/sdist a matrix. See how other workflows do it.
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x86_64, x86, aarch64]
|
||||
steps:
|
||||
- uses: actions/checkout@v5.0.0
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist
|
||||
manylinux: auto
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v5.0.0
|
||||
with:
|
||||
name: wheels-linux-${{ matrix.target }}
|
||||
path: dist
|
||||
|
||||
windows:
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x64, x86]
|
||||
steps:
|
||||
- uses: actions/checkout@v5.0.0
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v5.0.0
|
||||
with:
|
||||
name: wheels-windows-${{ matrix.target }}
|
||||
path: dist
|
||||
|
||||
macos:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x86_64, aarch64]
|
||||
steps:
|
||||
- uses: actions/checkout@v5.0.0
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v5.0.0
|
||||
with:
|
||||
name: wheels-macos-${{ matrix.target }}
|
||||
path: dist
|
||||
|
||||
sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5.0.0
|
||||
- name: Build sdist
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: Upload sdist
|
||||
uses: actions/upload-artifact@v5.0.0
|
||||
with:
|
||||
name: wheels-sdist
|
||||
path: dist
|
||||
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
needs: [linux, windows, macos, sdist]
|
||||
permissions:
|
||||
# Use to sign the release artifacts
|
||||
id-token: write
|
||||
# Used to upload release artifacts
|
||||
contents: write
|
||||
# Used to generate artifact attestation
|
||||
attestations: write
|
||||
steps:
|
||||
- uses: actions/download-artifact@v6.0.0
|
||||
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@v3.0.0
|
||||
with:
|
||||
subject-path: 'wheels-*/*'
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
env:
|
||||
MATURIN_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
||||
with:
|
||||
command: upload
|
||||
args: --non-interactive --skip-existing wheels-*/*
|
||||
18
.github/workflows/release_to_winget.yml
vendored
Normal file
18
.github/workflows/release_to_winget.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
name: Publish to WinGet
|
||||
|
||||
on:
|
||||
repository_dispatch:
|
||||
types: [ release-created ]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: vedantmgoyal2009/winget-releaser@19e706d4c9121098010096f9c495a70a7518b30f # main
|
||||
with:
|
||||
identifier: topgrade-rs.topgrade
|
||||
max-versions-to-keep: 5 # keep only latest 5 versions
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
76
.github/workflows/scorecards.yml
vendored
Normal file
76
.github/workflows/scorecards.yml
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
# This workflow uses actions that are not certified by GitHub. They are provided
|
||||
# by a third-party and are governed by separate terms of service, privacy
|
||||
# policy, and support documentation.
|
||||
|
||||
name: Scorecard supply-chain security
|
||||
on:
|
||||
# For Branch-Protection check. Only the default branch is supported. See
|
||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
|
||||
branch_protection_rule:
|
||||
# To guarantee Maintained check is occasionally updated. See
|
||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
|
||||
schedule:
|
||||
- cron: '20 7 * * 2'
|
||||
push:
|
||||
branches: ["main"]
|
||||
|
||||
# Declare default permissions as read only.
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
analysis:
|
||||
name: Scorecard analysis
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload the results to code-scanning dashboard.
|
||||
security-events: write
|
||||
# Needed to publish results and get a badge (see publish_results below).
|
||||
id-token: write
|
||||
contents: read
|
||||
actions: read
|
||||
# To allow GraphQL ListCommits to work
|
||||
issues: read
|
||||
pull-requests: read
|
||||
# To detect SAST tools
|
||||
checks: read
|
||||
|
||||
steps:
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: "Run analysis"
|
||||
uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3
|
||||
with:
|
||||
results_file: results.sarif
|
||||
results_format: sarif
|
||||
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
|
||||
# - you want to enable the Branch-Protection check on a *public* repository, or
|
||||
# - you are installing Scorecards on a *private* repository
|
||||
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat.
|
||||
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
|
||||
|
||||
# Public repositories:
|
||||
# - Publish results to OpenSSF REST API for easy access by consumers
|
||||
# - Allows the repository to include the Scorecard badge.
|
||||
# - See https://github.com/ossf/scorecard-action#publishing-results.
|
||||
# For private repositories:
|
||||
# - `publish_results` will always be set to `false`, regardless
|
||||
# of the value entered here.
|
||||
publish_results: true
|
||||
|
||||
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
||||
# format to the repository Actions tab.
|
||||
- name: "Upload artifact"
|
||||
uses: actions/upload-artifact@v5.0.0
|
||||
with:
|
||||
name: SARIF file
|
||||
path: results.sarif
|
||||
retention-days: 5
|
||||
|
||||
# Upload the results to GitHub's code scanning dashboard.
|
||||
- name: "Upload to code-scanning"
|
||||
uses: github/codeql-action/upload-sarif@v4.31.0
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
22
.github/workflows/update_aur.yml
vendored
22
.github/workflows/update_aur.yml
vendored
@@ -1,22 +0,0 @@
|
||||
name: Publish to AUR
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
jobs:
|
||||
aur-publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Publish AUR package
|
||||
uses: ATiltedTree/create-aur-release@v1
|
||||
with:
|
||||
package_name: topgrade
|
||||
commit_username: "Thomas Schönauer"
|
||||
commit_email: t.schoenauer@hgs-wt.at
|
||||
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
|
||||
99
.github/workflows/update_pypi.yml
vendored
99
.github/workflows/update_pypi.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Update PyPi
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x86_64, x86, aarch64]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist
|
||||
sccache: 'true'
|
||||
manylinux: auto
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
windows:
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x64, x86]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist
|
||||
sccache: 'true'
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
macos:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x86_64, aarch64]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist
|
||||
sccache: 'true'
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build sdist
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: Upload sdist
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
if: "startsWith(github.ref, 'refs/tags/')"
|
||||
needs: [linux, windows, macos, sdist]
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
- name: Publish to PyPI
|
||||
uses: PyO3/maturin-action@v1
|
||||
env:
|
||||
MATURIN_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
||||
with:
|
||||
command: upload
|
||||
args: --skip-existing *
|
||||
18
.gitignore
vendored
18
.gitignore
vendored
@@ -1,4 +1,20 @@
|
||||
# JetBrains IDEs
|
||||
.idea/
|
||||
|
||||
/target
|
||||
# Visual Studio
|
||||
.vs/
|
||||
|
||||
# Visual Studio Code
|
||||
.vscode/
|
||||
|
||||
# Generic build outputs
|
||||
/build
|
||||
|
||||
# Specific for some languages like Rust
|
||||
/target
|
||||
|
||||
# LLVM profiling output
|
||||
*.profraw
|
||||
|
||||
# Backup files for any .rs files in the project
|
||||
**/*.rs.bk
|
||||
|
||||
25
.pre-commit-config.yaml
Normal file
25
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,25 @@
|
||||
repos:
|
||||
- repo: https://github.com/gitleaks/gitleaks
|
||||
rev: v8.28.0
|
||||
hooks:
|
||||
- id: gitleaks
|
||||
|
||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||
rev: v0.11.0.1
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.38.1
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
|
||||
ci:
|
||||
autoupdate_commit_msg: "chore(pre-commit): autoupdate"
|
||||
20
.typos.toml
Normal file
20
.typos.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
# Typos configuration (minimal, conservative)
|
||||
# Exclude locales and OS fingerprint data to avoid false positives
|
||||
# - Recognize a few project-specific proper nouns
|
||||
|
||||
[files]
|
||||
extend-exclude = [
|
||||
"src/steps/os/os_release/**",
|
||||
"locales/**",
|
||||
# Include only English locale files - TODO: Split locales/app.yml into a Separate english File
|
||||
# "!locales/en/**"
|
||||
]
|
||||
|
||||
[default]
|
||||
# Mark specific words as always valid by mapping them to themselves
|
||||
check-file = true
|
||||
check-filename = true
|
||||
|
||||
[default.extend-words]
|
||||
# Add project-specific terms that should not be flagged as typos
|
||||
# Example: topgrade = "topgrade"
|
||||
38
.vscode/launch.json
vendored
38
.vscode/launch.json
vendored
@@ -1,38 +0,0 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Topgrade",
|
||||
"console": "integratedTerminal",
|
||||
"cargo": {
|
||||
"args": [
|
||||
"build",
|
||||
"--bin=topgrade-rs",
|
||||
"--package=topgrade-rs"
|
||||
],
|
||||
"filter": {
|
||||
"name": "topgrade-rs",
|
||||
"kind": "bin"
|
||||
}
|
||||
},
|
||||
"args": [
|
||||
"--only",
|
||||
"${input:step}",
|
||||
"-v"
|
||||
],
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
],
|
||||
"inputs": [
|
||||
{
|
||||
"type": "promptString",
|
||||
"id": "step",
|
||||
"description": "step name",
|
||||
}
|
||||
]
|
||||
}
|
||||
14
.vscode/tasks.json
vendored
14
.vscode/tasks.json
vendored
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"type": "cargo",
|
||||
"command": "clippy",
|
||||
"problemMatcher": [
|
||||
"$rustc"
|
||||
],
|
||||
"group": "test",
|
||||
"label": "rust: cargo clippy"
|
||||
}
|
||||
]
|
||||
}
|
||||
50
.vscode/topgrade.code-snippets
vendored
50
.vscode/topgrade.code-snippets
vendored
@@ -1,50 +0,0 @@
|
||||
{
|
||||
// Place your topgrade workspace snippets here. Each snippet is defined under a snippet name and has a scope, prefix, body and
|
||||
// description. Add comma separated ids of the languages where the snippet is applicable in the scope field. If scope
|
||||
// is left empty or omitted, the snippet gets applied to all languages. The prefix is what is
|
||||
// used to trigger the snippet and the body will be expanded and inserted. Possible variables are:
|
||||
// $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders.
|
||||
// Placeholders with the same ids are connected.
|
||||
// Example:
|
||||
// "Print to console": {
|
||||
// "scope": "javascript,typescript",
|
||||
// "prefix": "log",
|
||||
// "body": [
|
||||
// "console.log('$1');",
|
||||
// "$2"
|
||||
// ],
|
||||
// "description": "Log output to console"
|
||||
// }
|
||||
"Skip Step": {
|
||||
"scope": "rust",
|
||||
"prefix": "skipstep",
|
||||
"body": [
|
||||
"return Err(SkipStep(format!(\"$1\")).into());"
|
||||
]
|
||||
},
|
||||
"Step": {
|
||||
"scope": "rust",
|
||||
"prefix": "step",
|
||||
"body": [
|
||||
"pub fn $1(ctx: &ExecutionContext) -> Result<()> {",
|
||||
" $0",
|
||||
" Ok(())",
|
||||
"}"
|
||||
]
|
||||
},
|
||||
"Require Binary": {
|
||||
"scope": "rust",
|
||||
"prefix": "req",
|
||||
"description": "Require a binary to be installed",
|
||||
"body": [
|
||||
"let ${1:binary} = require(\"${1:binary}\")?;"
|
||||
]
|
||||
},
|
||||
"macos": {
|
||||
"scope": "rust",
|
||||
"prefix": "macos",
|
||||
"body": [
|
||||
"#[cfg(target_os = \"macos\")]"
|
||||
]
|
||||
}
|
||||
}
|
||||
0
BREAKINGCHANGES.md
Normal file
0
BREAKINGCHANGES.md
Normal file
3
BREAKINGCHANGES_dev.md
Normal file
3
BREAKINGCHANGES_dev.md
Normal file
@@ -0,0 +1,3 @@
|
||||
1. The `jet_brains_toolbox` step was renamed to `jetbrains_toolbox`. If you're
|
||||
using the old name in your configuration file in the `disable` or `only`
|
||||
fields, simply change it to `jetbrains_toolbox`.
|
||||
139
CHANGELOG.md
Normal file
139
CHANGELOG.md
Normal file
@@ -0,0 +1,139 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [16.1.0](https://github.com/topgrade-rs/topgrade/compare/v16.0.4...v16.1.0) - 2025-10-31
|
||||
|
||||
### Added
|
||||
|
||||
- *(deb-get)* Skip non-deb-get packages by passing --dg-only ([#1386](https://github.com/topgrade-rs/topgrade/pull/1386))
|
||||
- *(typst)* add typst step ([#1374](https://github.com/topgrade-rs/topgrade/pull/1374))
|
||||
- *(step)* Add atuin step ([#1367](https://github.com/topgrade-rs/topgrade/pull/1367))
|
||||
- *(nix)* support upgrading Determinate Nix ([#1366](https://github.com/topgrade-rs/topgrade/pull/1366))
|
||||
- *(sudo)* print warning if Windows Sudo is misconfigured
|
||||
- *(sudo)* print warning if steps were skipped due to missing sudo
|
||||
- *(sudo)* add SudoKind::Null
|
||||
- detect and warn if running as root
|
||||
- add `--no-tmux` flag ([#1328](https://github.com/topgrade-rs/topgrade/pull/1328))
|
||||
- add step for mandb - user and system (update man entries) ([#1319](https://github.com/topgrade-rs/topgrade/pull/1319))
|
||||
- support for pkgfile ([#1306](https://github.com/topgrade-rs/topgrade/pull/1306))
|
||||
- add "show_skipped" option in config file #1280 ([#1286](https://github.com/topgrade-rs/topgrade/pull/1286))
|
||||
- fix typos ([#1221](https://github.com/topgrade-rs/topgrade/pull/1221))
|
||||
- *(conda)* allow configuring additional envs to update ([#1048](https://github.com/topgrade-rs/topgrade/pull/1048))
|
||||
- *(step)* nix-helper ([#1045](https://github.com/topgrade-rs/topgrade/pull/1045))
|
||||
- *(winget)* winget uses sudo when `[windows] winget_use_sudo = true` ([#1061](https://github.com/topgrade-rs/topgrade/pull/1061))
|
||||
- suppress pixi release notes by default ([#1225](https://github.com/topgrade-rs/topgrade/pull/1225))
|
||||
|
||||
### Fixed
|
||||
|
||||
- *(freshclam)* run with sudo when running without sudo fails ([#1118](https://github.com/topgrade-rs/topgrade/pull/1118))
|
||||
- *(tldr)* move tldr to be a generic step ([#1370](https://github.com/topgrade-rs/topgrade/pull/1370))
|
||||
- *(nix)* fix nix upgrade command selection for profiles in XDG_STATE_HOME ([#1354](https://github.com/topgrade-rs/topgrade/pull/1354))
|
||||
- *(containers)* Docker update fails on M Macs due to platform / ([#1360](https://github.com/topgrade-rs/topgrade/pull/1360))
|
||||
- *(sudo)* reorder require_sudo() after print_separator()
|
||||
- *(sudo)* use require_sudo for windows commands
|
||||
- *(sudo)* prevent sudo_command = "sudo" finding gsudo
|
||||
- *(sudo)* set sudo flags depending on kind
|
||||
- skip gcloud update step if component manager is disabled ([#1237](https://github.com/topgrade-rs/topgrade/pull/1237))
|
||||
- *(i18n)* use double-quotes for translations with newlines
|
||||
- *(powershell)* run microsoft_store command directly
|
||||
- *(powershell)* remove mentions of USOClient
|
||||
- *(powershell)* execution policy check breaks when run in pwsh
|
||||
- *(powershell)* don't use sudo with Update-Module for pwsh
|
||||
- *(powershell)* add -Command to module update cmdline
|
||||
- *(tmux)* support all default `tpm` locations (xdg and both hardcoded locations) ([#1146](https://github.com/topgrade-rs/topgrade/pull/1146))
|
||||
- fixed the German translation for "y/n/s/q" ([#1220](https://github.com/topgrade-rs/topgrade/pull/1220))
|
||||
|
||||
### Other
|
||||
|
||||
- *(release)* switch to release-plz ([#1333](https://github.com/topgrade-rs/topgrade/pull/1333))
|
||||
- *(pre-commit)* Make pre-commit.ci use conventional commits ([#1388](https://github.com/topgrade-rs/topgrade/pull/1388))
|
||||
- *(pre-commit)* pre-commit autoupdate ([#1383](https://github.com/topgrade-rs/topgrade/pull/1383))
|
||||
- *(deps)* bump actions/upload-artifact from 4.6.2 to 5.0.0 ([#1382](https://github.com/topgrade-rs/topgrade/pull/1382))
|
||||
- *(deps)* bump github/codeql-action from 4.30.9 to 4.31.0 ([#1379](https://github.com/topgrade-rs/topgrade/pull/1379))
|
||||
- *(deps)* bump actions/download-artifact from 5.0.0 to 6.0.0 ([#1380](https://github.com/topgrade-rs/topgrade/pull/1380))
|
||||
- *(deps)* bump taiki-e/install-action from 2.62.33 to 2.62.38 ([#1381](https://github.com/topgrade-rs/topgrade/pull/1381))
|
||||
- *(pre-commit)* Fix pre-commit-config.yaml ([#1378](https://github.com/topgrade-rs/topgrade/pull/1378))
|
||||
- *(release)* Add .deb auto completion script ([#1353](https://github.com/topgrade-rs/topgrade/pull/1353))
|
||||
- *(deps)* bump github/codeql-action from 4.30.8 to 4.30.9 ([#1369](https://github.com/topgrade-rs/topgrade/pull/1369))
|
||||
- *(deps)* bump taiki-e/install-action from 2.62.28 to 2.62.33 ([#1368](https://github.com/topgrade-rs/topgrade/pull/1368))
|
||||
- *(deps)* bump actions/dependency-review-action from 4.8.0 to 4.8.1 ([#1362](https://github.com/topgrade-rs/topgrade/pull/1362))
|
||||
- *(deps)* bump softprops/action-gh-release from 2.3.4 to 2.4.1 ([#1364](https://github.com/topgrade-rs/topgrade/pull/1364))
|
||||
- *(deps)* bump taiki-e/install-action from 2.62.21 to 2.62.28 ([#1363](https://github.com/topgrade-rs/topgrade/pull/1363))
|
||||
- *(deps)* bump github/codeql-action from 3.30.6 to 4.30.8 ([#1365](https://github.com/topgrade-rs/topgrade/pull/1365))
|
||||
- *(deps)* bump github/codeql-action from 3.30.5 to 3.30.6 ([#1355](https://github.com/topgrade-rs/topgrade/pull/1355))
|
||||
- *(deps)* bump softprops/action-gh-release from 2.3.3 to 2.3.4 ([#1356](https://github.com/topgrade-rs/topgrade/pull/1356))
|
||||
- *(deps)* bump taiki-e/install-action from 2.62.13 to 2.62.21 ([#1357](https://github.com/topgrade-rs/topgrade/pull/1357))
|
||||
- *(deps)* bump ossf/scorecard-action from 2.4.2 to 2.4.3 ([#1358](https://github.com/topgrade-rs/topgrade/pull/1358))
|
||||
- *(deps)* bump actions/dependency-review-action from 4.7.3 to 4.8.0 ([#1350](https://github.com/topgrade-rs/topgrade/pull/1350))
|
||||
- *(deps)* bump github/codeql-action from 3.30.3 to 3.30.5 ([#1349](https://github.com/topgrade-rs/topgrade/pull/1349))
|
||||
- *(deps)* bump taiki-e/install-action from 2.62.1 to 2.62.13 ([#1351](https://github.com/topgrade-rs/topgrade/pull/1351))
|
||||
- *(deps)* bump actions/cache from 4.2.4 to 4.3.0 ([#1352](https://github.com/topgrade-rs/topgrade/pull/1352))
|
||||
- Fix WSL distribution name cleanup ([#1348](https://github.com/topgrade-rs/topgrade/pull/1348))
|
||||
- *(pyproject)* mark version as dynamic ([#1347](https://github.com/topgrade-rs/topgrade/pull/1347))
|
||||
- *(deps)* replace winapi with windows
|
||||
- *(sudo)* rename interactive to login_shell
|
||||
- Fix "WSL already reported" panic ([#1344](https://github.com/topgrade-rs/topgrade/pull/1344))
|
||||
- Move step logic out of Powershell struct ([#1345](https://github.com/topgrade-rs/topgrade/pull/1345))
|
||||
- *(deps)* bump taiki-e/install-action from 2.61.5 to 2.62.1 ([#1335](https://github.com/topgrade-rs/topgrade/pull/1335))
|
||||
- *(deps)* bump Swatinem/rust-cache from 2.8.0 to 2.8.1 ([#1336](https://github.com/topgrade-rs/topgrade/pull/1336))
|
||||
- Fixes for #1188; custom_commands broken ([#1332](https://github.com/topgrade-rs/topgrade/pull/1332))
|
||||
- use login shell when executing topgrade ([#1327](https://github.com/topgrade-rs/topgrade/pull/1327))
|
||||
- *(deps)* bump taiki-e/install-action from 2.60.0 to 2.61.5 ([#1325](https://github.com/topgrade-rs/topgrade/pull/1325))
|
||||
- *(deps)* bump github/codeql-action from 3.30.1 to 3.30.3 ([#1324](https://github.com/topgrade-rs/topgrade/pull/1324))
|
||||
- *(pre-commit)* add typos with conservative excludes; no content changes ([#1317](https://github.com/topgrade-rs/topgrade/pull/1317))
|
||||
- fix simple typos in code and comments (split var, whether, Extensions) ([#1318](https://github.com/topgrade-rs/topgrade/pull/1318))
|
||||
- *(deps)* bump github/codeql-action from 3.29.11 to 3.30.1 ([#1301](https://github.com/topgrade-rs/topgrade/pull/1301))
|
||||
- *(deps)* bump softprops/action-gh-release from 2.3.2 to 2.3.3 ([#1302](https://github.com/topgrade-rs/topgrade/pull/1302))
|
||||
- *(deps)* bump taiki-e/install-action from 2.58.21 to 2.60.0 ([#1303](https://github.com/topgrade-rs/topgrade/pull/1303))
|
||||
- *(deps)* bump actions/dependency-review-action from 4.7.2 to 4.7.3 ([#1304](https://github.com/topgrade-rs/topgrade/pull/1304))
|
||||
- *(deps)* bump actions/attest-build-provenance from 2.4.0 to 3.0.0 ([#1305](https://github.com/topgrade-rs/topgrade/pull/1305))
|
||||
- update tracing-subscriber to ~0.3.20 (ANSI escape injection fix, GHSA-xwfj-jgwm-7wp5) ([#1288](https://github.com/topgrade-rs/topgrade/pull/1288))
|
||||
- *(deps)* bump github/codeql-action from 3.29.8 to 3.29.11 ([#1281](https://github.com/topgrade-rs/topgrade/pull/1281))
|
||||
- *(deps)* bump actions/dependency-review-action from 4.7.1 to 4.7.2 ([#1282](https://github.com/topgrade-rs/topgrade/pull/1282))
|
||||
- *(deps)* bump taiki-e/install-action from 2.58.9 to 2.58.21 ([#1283](https://github.com/topgrade-rs/topgrade/pull/1283))
|
||||
- *(deps)* bump PyO3/maturin-action from 1.49.3 to 1.49.4 ([#1285](https://github.com/topgrade-rs/topgrade/pull/1285))
|
||||
- *(deps)* bump actions/cache from 4.2.3 to 4.2.4 ([#1284](https://github.com/topgrade-rs/topgrade/pull/1284))
|
||||
- Support "Insiders" versions of VSCode and VSCodium ([#1279](https://github.com/topgrade-rs/topgrade/pull/1279))
|
||||
- Sudo preserve env list argument is `--preserve-env` ([#1276](https://github.com/topgrade-rs/topgrade/pull/1276))
|
||||
- Clippy fixes from rust 1.91 nightly ([#1267](https://github.com/topgrade-rs/topgrade/pull/1267))
|
||||
- *(deps)* bump actions/checkout from 4.2.2 to 5.0.0 ([#1264](https://github.com/topgrade-rs/topgrade/pull/1264))
|
||||
- *(deps)* bump actions/download-artifact from 4.3.0 to 5.0.0 ([#1263](https://github.com/topgrade-rs/topgrade/pull/1263))
|
||||
- *(deps)* bump taiki-e/install-action from 2.58.0 to 2.58.9 ([#1261](https://github.com/topgrade-rs/topgrade/pull/1261))
|
||||
- *(deps)* bump ossf/scorecard-action from 2.4.0 to 2.4.2 ([#1262](https://github.com/topgrade-rs/topgrade/pull/1262))
|
||||
- *(deps)* bump github/codeql-action from 3.29.5 to 3.29.8 ([#1265](https://github.com/topgrade-rs/topgrade/pull/1265))
|
||||
- *(ci)* Dependabot, workflow security ([#1257](https://github.com/topgrade-rs/topgrade/pull/1257))
|
||||
- replace once_cell crate with std equivalent ([#1260](https://github.com/topgrade-rs/topgrade/pull/1260))
|
||||
- *(deps)* bump tokio from 1.38 to 1.47 ([#1256](https://github.com/topgrade-rs/topgrade/pull/1256))
|
||||
- *(app.yml)* fix fr language #1248
|
||||
- *(sudo)* add SudoKind::WinSudo
|
||||
- *(sudo)* add SudoExecuteOpts builder functions and preserve_env enum
|
||||
- *(yarn)* remove unnecessary Yarn::yarn field
|
||||
- *(apt)* extract detect_apt() function
|
||||
- route sudo usage through Sudo::execute*
|
||||
- move RunType::execute to ExecutionContext
|
||||
- *(powershell)* store powershell path directly
|
||||
- *(powershell)* cleanup and simplify code
|
||||
- Move step running into enum for dynamic ordering ([#1188](https://github.com/topgrade-rs/topgrade/pull/1188))
|
||||
- Generate artifact attestations for release assets ([#1216](https://github.com/topgrade-rs/topgrade/pull/1216))
|
||||
- windows update, use explicit reboot policy ([#1143](https://github.com/topgrade-rs/topgrade/pull/1143))
|
||||
- add Discord invite link to README ([#1203](https://github.com/topgrade-rs/topgrade/pull/1203))
|
||||
- Catch secondary uv self-update error ([#1201](https://github.com/topgrade-rs/topgrade/pull/1201))
|
||||
- Handle another format change in asdf version ([#1194](https://github.com/topgrade-rs/topgrade/pull/1194))
|
||||
- Preserve custom commmand order from config instead of sorting alphabetically ([#1182](https://github.com/topgrade-rs/topgrade/pull/1182))
|
||||
- Add support for multiple binary names and idea having multiple binaries ([#1167](https://github.com/topgrade-rs/topgrade/pull/1167))
|
||||
- fix the invalid action version ([#1185](https://github.com/topgrade-rs/topgrade/pull/1185))
|
||||
- allow us to re-run AUR CI ([#1184](https://github.com/topgrade-rs/topgrade/pull/1184))
|
||||
- Update Yazi upgrade step to use ya pkg. ([#1163](https://github.com/topgrade-rs/topgrade/pull/1163))
|
||||
- use the new tag name and specify shell to bash ([#1183](https://github.com/topgrade-rs/topgrade/pull/1183))
|
||||
- allow specifying tag when manually run 'create_release_assets.yml' ([#1180](https://github.com/topgrade-rs/topgrade/pull/1180))
|
||||
- fix homebrew ci, remove duplicate trigger event ([#1179](https://github.com/topgrade-rs/topgrade/pull/1179))
|
||||
- fix PyPI pipeline duplicate wheel name ([#1178](https://github.com/topgrade-rs/topgrade/pull/1178))
|
||||
- add event workflow_dispatch to release pipelines ([#1177](https://github.com/topgrade-rs/topgrade/pull/1177))
|
||||
- fix pipeline relase to PyPI ([#1176](https://github.com/topgrade-rs/topgrade/pull/1176))
|
||||
- Install rustfmt and clippy where necessary ([#1171](https://github.com/topgrade-rs/topgrade/pull/1171))
|
||||
@@ -16,15 +16,15 @@ In `topgrade`'s term, package manager is called `step`.
|
||||
To add a new `step` to `topgrade`:
|
||||
|
||||
1. Add a new variant to
|
||||
[`enum Step`](https://github.com/topgrade-rs/topgrade/blob/cb7adc8ced8a77addf2cb051d18bba9f202ab866/src/config.rs#L100)
|
||||
[`enum Step`](https://github.com/topgrade-rs/topgrade/blob/main/src/step.rs)
|
||||
|
||||
```rust
|
||||
pub enum Step {
|
||||
// Existed steps
|
||||
// Existing steps
|
||||
// ...
|
||||
|
||||
// Your new step here!
|
||||
// You may want it to be sorted alphabetically because that looks great:)
|
||||
// Make sure it stays sorted alphabetically because that looks great :)
|
||||
Xxx,
|
||||
}
|
||||
```
|
||||
@@ -32,9 +32,9 @@ To add a new `step` to `topgrade`:
|
||||
2. Implement the update function
|
||||
|
||||
You need to find the appropriate location where this update function goes, it should be
|
||||
a file under [`src/steps`](https://github.com/topgrade-rs/topgrade/tree/master/src/steps),
|
||||
a file under [`src/steps`](https://github.com/topgrade-rs/topgrade/tree/main/src/steps),
|
||||
the file names are self-explanatory, for example, `step`s related to `zsh` are
|
||||
placed in [`steps/zsh.rs`](https://github.com/topgrade-rs/topgrade/blob/master/src/steps/zsh.rs).
|
||||
placed in [`steps/zsh.rs`](https://github.com/topgrade-rs/topgrade/blob/main/src/steps/zsh.rs).
|
||||
|
||||
Then you implement the update function, and put it in the file where it belongs.
|
||||
|
||||
@@ -47,8 +47,7 @@ To add a new `step` to `topgrade`:
|
||||
print_separator("xxx");
|
||||
|
||||
// Invoke the new step to get things updated!
|
||||
ctx.run_type()
|
||||
.execute("xxx")
|
||||
ctx.execute(xxx)
|
||||
.arg(/* args required by this step */)
|
||||
.status_checked()
|
||||
}
|
||||
@@ -68,17 +67,17 @@ To add a new `step` to `topgrade`:
|
||||
Still, this is sufficient for most tools, but you may need some extra stuff
|
||||
with complicated `step`.
|
||||
|
||||
3. Finally, invoke that update function in `main.rs`
|
||||
3. Add a match arm to `Step::run()`
|
||||
|
||||
```rust
|
||||
runner.execute(Step::Xxx, "xxx", || ItsModule::run_xxx(&ctx))?;
|
||||
Xxx => runner.execute(*self, "xxx", || ItsModule::run_xxx(ctx))?
|
||||
```
|
||||
|
||||
We use [conditional compilation](https://doc.rust-lang.org/reference/conditional-compilation.html)
|
||||
to separate the steps, for example, for steps that are Linux-only, it goes
|
||||
like this:
|
||||
|
||||
```
|
||||
```rust
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// Xxx is Linux-only
|
||||
@@ -86,21 +85,42 @@ To add a new `step` to `topgrade`:
|
||||
}
|
||||
```
|
||||
|
||||
Congrats, you just added a new `step`:)
|
||||
4. Finally, add the step to `default_steps()` in `step.rs`
|
||||
```rust
|
||||
steps.push(Xxx)
|
||||
```
|
||||
Try to keep the conditional compilation the same as in the above step 3.
|
||||
|
||||
Congrats, you just added a new `step` :)
|
||||
|
||||
## Modification to the configuration entries
|
||||
|
||||
If your PR has the configuration options
|
||||
(in [`src/config.rs`](https://github.com/topgrade-rs/topgrade/blob/master/src/config.rs))
|
||||
(in [`src/config.rs`](https://github.com/topgrade-rs/topgrade/blob/main/src/config.rs))
|
||||
modified:
|
||||
|
||||
1. Adding new options
|
||||
2. Changing the existing options
|
||||
|
||||
Be sure to apply your changes to
|
||||
[`config.example.toml`](https://github.com/topgrade-rs/topgrade/blob/master/config.example.toml),
|
||||
[`config.example.toml`](https://github.com/topgrade-rs/topgrade/blob/main/config.example.toml),
|
||||
and have some basic documentations guiding user how to use these options.
|
||||
|
||||
## Breaking changes
|
||||
|
||||
If your PR introduces a breaking change, document it in [`BREAKINGCHANGES_dev.md`][bc_dev],
|
||||
it should be written in Markdown and wrapped at 80, for example:
|
||||
|
||||
```md
|
||||
1. The configuration location has been updated to x.
|
||||
|
||||
2. The step x has been removed.
|
||||
|
||||
3. ...
|
||||
```
|
||||
|
||||
[bc_dev]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES_dev.md
|
||||
|
||||
## Before you submit your PR
|
||||
|
||||
Make sure your patch passes the following tests on your host:
|
||||
@@ -114,6 +134,24 @@ $ cargo test
|
||||
|
||||
Don't worry about other platforms, we have most of them covered in our CI.
|
||||
|
||||
## I18n
|
||||
|
||||
If your PR introduces user-facing messages, we need to ensure they are translated.
|
||||
Please add the translations to [`locales/app.yml`][app_yml]. For simple messages
|
||||
without arguments (e.g., "hello world"), we can simply translate them according
|
||||
(Tip: ChatGPT or similar LLMs is good at translation). If a message contains
|
||||
arguments, e.g., "hello <NAME>", please follow this convention:
|
||||
|
||||
```yml
|
||||
"hello {name}": # key
|
||||
en: "hello %{name}" # translation
|
||||
```
|
||||
|
||||
Arguments in the key should be in format `{argument_name}`, and they will have
|
||||
a preceding `%` when used in translations.
|
||||
|
||||
[app_yml]: https://github.com/topgrade-rs/topgrade/blob/main/locales/app.yml
|
||||
|
||||
## Some tips
|
||||
|
||||
1. Locale
|
||||
|
||||
2738
Cargo.lock
generated
2738
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
72
Cargo.toml
72
Cargo.toml
@@ -5,9 +5,10 @@ categories = ["os"]
|
||||
keywords = ["upgrade", "update"]
|
||||
license = "GPL-3.0"
|
||||
repository = "https://github.com/topgrade-rs/topgrade"
|
||||
version = "12.0.2"
|
||||
rust-version = "1.84.1"
|
||||
version = "16.1.0"
|
||||
authors = ["Roey Darwish Dror <roey.ghost@gmail.com>", "Thomas Schönauer <t.schoenauer@hgs-wt.at>"]
|
||||
exclude = ["doc/screenshot.gif"]
|
||||
exclude = ["doc/screenshot.gif", "BREAKINGCHANGES_dev.md"]
|
||||
edition = "2021"
|
||||
|
||||
readme = "README.md"
|
||||
@@ -22,54 +23,75 @@ path = "src/main.rs"
|
||||
[dependencies]
|
||||
home = "~0.5"
|
||||
etcetera = "~0.8"
|
||||
once_cell = "~1.17"
|
||||
serde = { version = "~1.0", features = ["derive"] }
|
||||
toml = "0.5"
|
||||
which_crate = { version = "~4.1", package = "which" }
|
||||
shellexpand = "~2.1"
|
||||
clap = { version = "~3.1", features = ["cargo", "derive"] }
|
||||
clap_complete = "~3.1"
|
||||
clap_mangen = "~0.1"
|
||||
walkdir = "~2.3"
|
||||
toml = "0.8"
|
||||
which_crate = { version = "~6.0", package = "which" }
|
||||
shellexpand = "~3.1"
|
||||
clap = { version = "~4.5", features = ["cargo", "derive"] }
|
||||
clap_complete = "~4.5"
|
||||
clap_mangen = "~0.2"
|
||||
walkdir = "~2.5"
|
||||
console = "~0.15"
|
||||
lazy_static = "~1.4"
|
||||
chrono = "~0.4"
|
||||
glob = "~0.3"
|
||||
strum = { version = "~0.24", features = ["derive"] }
|
||||
strum = { version = "~0.26", features = ["derive"] }
|
||||
thiserror = "~1.0"
|
||||
tempfile = "~3.6"
|
||||
tempfile = "~3.10"
|
||||
cfg-if = "~1.0"
|
||||
tokio = { version = "~1.18", features = ["process", "rt-multi-thread"] }
|
||||
tokio = { version = "~1.47", features = ["process", "rt-multi-thread"] }
|
||||
futures = "~0.3"
|
||||
regex = "~1.7"
|
||||
regex = "~1.10"
|
||||
semver = "~1.0"
|
||||
shell-words = "~1.1"
|
||||
color-eyre = "~0.6"
|
||||
tracing = { version = "~0.1", features = ["attributes", "log"] }
|
||||
tracing-subscriber = { version = "~0.3", features = ["env-filter", "time"] }
|
||||
tracing-subscriber = { version = "~0.3.20", features = ["env-filter", "time"] }
|
||||
merge = "~0.1"
|
||||
regex-split = "~0.1"
|
||||
notify-rust = "~4.8"
|
||||
notify-rust = "~4.11"
|
||||
wildmatch = "2.3.0"
|
||||
rust-i18n = "3.0.1"
|
||||
sys-locale = "0.3.1"
|
||||
jetbrains-toolbox-updater = "5.0.0"
|
||||
indexmap = { version = "2.9.0", features = ["serde"] }
|
||||
|
||||
[package.metadata.generate-rpm]
|
||||
assets = [{source = "target/release/topgrade", dest="/usr/bin/topgrade"}]
|
||||
assets = [{ source = "target/release/topgrade", dest = "/usr/bin/topgrade" }]
|
||||
|
||||
[package.metadata.generate-rpm.requires]
|
||||
git = "*"
|
||||
|
||||
[package.metadata.deb]
|
||||
depends = "$auto,git"
|
||||
name = "topgrade"
|
||||
maintainer = "Chris Gelatt <kreeblah@gmail.com>"
|
||||
copyright = "2024, Topgrade Team"
|
||||
license-file = ["LICENSE", "0"]
|
||||
depends = "$auto"
|
||||
extended-description = "Keeping your system up to date usually involves invoking multiple package managers. This results in big, non-portable shell one-liners saved in your shell. To remedy this, Topgrade detects which tools you use and runs the appropriate commands to update them."
|
||||
section = "utils"
|
||||
priority = "optional"
|
||||
default-features = true
|
||||
assets = [
|
||||
["target/release/topgrade", "usr/bin/", "755"],
|
||||
["README.md", "usr/share/doc/topgrade/README.md", "644"],
|
||||
# The man page is automatically generated by topgrade's build process in CI, so
|
||||
# these files aren't actually committed.
|
||||
["deployment/deb/topgrade.bash", "usr/share/bash-completion/completions/topgrade", "644"],
|
||||
["deployment/deb/topgrade.fish", "usr/share/fish/vendor_completions.d/topgrade.fish", "644"],
|
||||
["deployment/deb/_topgrade", "usr/share/zsh/vendor-completions/", "644"],
|
||||
]
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
libc = "~0.2"
|
||||
nix = "~0.24"
|
||||
rust-ini = "~0.19"
|
||||
self_update_crate = { version = "~0.30", default-features = false, optional = true, package = "self_update", features = ["archive-tar", "compression-flate2", "rustls"] }
|
||||
nix = { version = "~0.29", features = ["hostname", "signal", "user"] }
|
||||
rust-ini = "~0.21"
|
||||
self_update_crate = { version = "~0.40", default-features = false, optional = true, package = "self_update", features = ["archive-tar", "compression-flate2", "rustls"] }
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
self_update_crate = { version = "~0.30", default-features = false, optional = true, package = "self_update", features = ["archive-zip", "compression-zip-deflate", "rustls"] }
|
||||
winapi = "~0.3"
|
||||
is_elevated = "~0.1"
|
||||
parselnk = "~0.1"
|
||||
self_update_crate = { version = "~0.40", default-features = false, optional = true, package = "self_update", features = ["archive-zip", "compression-zip-deflate", "rustls"] }
|
||||
windows = { version = "~0.62", features = ["Win32_System_Console"] }
|
||||
windows-registry = "~0.6"
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
|
||||
34
README.md
34
README.md
@@ -8,13 +8,9 @@
|
||||
<a href="https://aur.archlinux.org/packages/topgrade"><img alt="AUR" src="https://img.shields.io/aur/version/topgrade.svg"></a>
|
||||
<a href="https://formulae.brew.sh/formula/topgrade"><img alt="Homebrew" src="https://img.shields.io/homebrew/v/topgrade.svg"></a>
|
||||
|
||||
<img alt="Demo" src="doc/screenshot.gif" width="550px">
|
||||
<img alt="Demo" src="doc/topgrade_demo.gif">
|
||||
</div>
|
||||
|
||||
## Maintainers Wanted
|
||||
|
||||
I currently have not enough time to maintain this project on the level required and which the project deserves. For this reason I'm asking the community to help supporting the project, to help and work on resolving issues and create new features. Thanks for all your help.
|
||||
|
||||
|
||||
## Introduction
|
||||
|
||||
@@ -33,28 +29,33 @@ To remedy this, **Topgrade** detects which tools you use and runs the appropriat
|
||||
- NixOS: [Nixpkgs](https://search.nixos.org/packages?show=topgrade)
|
||||
- Void Linux: [XBPS](https://voidlinux.org/packages/?arch=x86_64&q=topgrade)
|
||||
- macOS: [Homebrew](https://formulae.brew.sh/formula/topgrade) or [MacPorts](https://ports.macports.org/port/topgrade/)
|
||||
- Windows: [Scoop](https://github.com/ScoopInstaller/Main/blob/master/bucket/topgrade.json)
|
||||
- Windows: [Chocolatey][choco], [Scoop][scoop] or [Winget][winget]
|
||||
- PyPi: [pip](https://pypi.org/project/topgrade/)
|
||||
- Fedora: [Copr](https://copr.fedorainfracloud.org/coprs/lilay/topgrade/)
|
||||
|
||||
[choco]: https://community.chocolatey.org/packages/topgrade
|
||||
[scoop]: https://scoop.sh/#/apps?q=topgrade
|
||||
[winget]: https://winstall.app/apps/topgrade-rs.topgrade
|
||||
|
||||
Other systems users can either use `cargo install` or the compiled binaries from the release page.
|
||||
The compiled binaries contain a self-upgrading feature.
|
||||
|
||||
> Currently, Topgrade requires Rust 1.65 or above. In general, Topgrade tracks
|
||||
> the latest stable toolchain.
|
||||
|
||||
## Usage
|
||||
|
||||
Just run `topgrade`.
|
||||
|
||||
Visit the documentation at [topgrade-rs.github.io](https://topgrade-rs.github.io/) for more information.
|
||||
|
||||
> **Warning**
|
||||
> Work in Progress
|
||||
|
||||
## Configuration
|
||||
|
||||
See `config.example.toml` for an example configuration file.
|
||||
|
||||
## Migration and Breaking Changes
|
||||
|
||||
Whenever there is a **breaking change**, the major version number will be bumped,
|
||||
and we will document these changes in the release note, please take a look at
|
||||
it when updated to a major release.
|
||||
|
||||
> Got a question? Feel free to open an issue or discussion!
|
||||
|
||||
### Configuration Path
|
||||
|
||||
#### `CONFIG_DIR` on each platform
|
||||
@@ -109,3 +110,8 @@ See [CONTRIBUTING.md](https://github.com/topgrade-rs/topgrade/blob/master/CONTRI
|
||||
- [ ] Add a proper testing framework to the code base.
|
||||
- [ ] Add unit tests for package managers.
|
||||
- [ ] Split up code into more maintainable parts, eg. putting every linux package manager in a own submodule of linux.rs.
|
||||
|
||||
## Discord server
|
||||
|
||||
Welcome to [join](https://discord.gg/Q8HGGWundY) our Discord server if you want
|
||||
to discuss Topgrade!
|
||||
|
||||
21
RELEASE_PROCEDURE.md
Normal file
21
RELEASE_PROCEDURE.md
Normal file
@@ -0,0 +1,21 @@
|
||||
Non-major versions go via release-plz.
|
||||
|
||||
1. bumps the version number.
|
||||
|
||||
> If there are breaking changes, the major version number should be increased.
|
||||
|
||||
2. If the major versioin number gets bumped, update [SECURITY.md][SECURITY_file_link].
|
||||
|
||||
[SECURITY_file_link]: https://github.com/topgrade-rs/topgrade/blob/main/SECURITY.md
|
||||
|
||||
3. Overwrite [`BREAKINGCHANGES`][breaking_changes] with
|
||||
[`BREAKINGCHANGES_dev`][breaking_changes_dev], and create a new dev file:
|
||||
|
||||
```sh'
|
||||
$ cd topgrade
|
||||
$ mv BREAKINGCHANGES_dev.md BREAKINGCHANGES.md
|
||||
$ touch BREAKINGCHANGES_dev.md
|
||||
```
|
||||
|
||||
[breaking_changes_dev]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES_dev.md
|
||||
[breaking_changes]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES.md
|
||||
@@ -6,6 +6,5 @@ We only support the latest major version and each subversion.
|
||||
|
||||
| Version | Supported |
|
||||
| -------- | ------------------ |
|
||||
| 10.0.x | :white_check_mark: |
|
||||
| < 10.0 | :x: |
|
||||
|
||||
| 16.0.x | :white_check_mark: |
|
||||
| < 16.0 | :x: |
|
||||
|
||||
35
build-all.sh
35
build-all.sh
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env sh
|
||||
#!/usr/bin/env bash
|
||||
|
||||
build_function() {
|
||||
rustup update
|
||||
cargo install cross
|
||||
@@ -20,7 +21,7 @@ build_function() {
|
||||
|
||||
package_function() {
|
||||
|
||||
cd build
|
||||
cd build || exit 1
|
||||
mkdir x86_64-unknown-linux-gnu/
|
||||
mkdir x86_64-unknown-linux-musl/
|
||||
mkdir x86_64-unknown-freebsd/
|
||||
@@ -35,28 +36,28 @@ package_function() {
|
||||
cp ../target/aarch64-unknown-linux-musl/release/topgrade aarch64-unknown-linux-musl/topgrade
|
||||
cp ../target/x86_64-pc-windows-gnu/release/topgrade.exe x86_64-pc-windows-gnu/topgrade.exe
|
||||
|
||||
cd x86_64-unknown-linux-gnu/
|
||||
tar -czf ../topgrade-${ans}-x86_64-linux-gnu.tar.gz topgrade
|
||||
cd x86_64-unknown-linux-gnu/ || exit 1
|
||||
tar -czf "../topgrade-${ans}-x86_64-linux-gnu.tar.gz" topgrade
|
||||
cd ..
|
||||
|
||||
cd x86_64-unknown-linux-musl
|
||||
tar -czf ../topgrade-${ans}-x86_64-linux-musl.tar.gz topgrade
|
||||
cd x86_64-unknown-linux-musl/ || exit 1
|
||||
tar -czf "../topgrade-${ans}-x86_64-linux-musl.tar.gz" topgrade
|
||||
cd ..
|
||||
|
||||
cd x86_64-unknown-freebsd/
|
||||
tar -czf ../topgrade-${ans}-x86_64-freebsd.tar.gz topgrade
|
||||
cd x86_64-unknown-freebsd/ || exit 1
|
||||
tar -czf "../topgrade-${ans}-x86_64-freebsd.tar.gz" topgrade
|
||||
cd ..
|
||||
|
||||
cd aarch64-unknown-linux-gnu/
|
||||
tar -czf ../topgrade-${ans}-aarch64-linux-gnu.tar.gz topgrade
|
||||
cd aarch64-unknown-linux-gnu/ || exit 1
|
||||
tar -czf "../topgrade-${ans}-aarch64-linux-gnu.tar.gz" topgrade
|
||||
cd ..
|
||||
|
||||
cd aarch64-unknown-linux-musl/
|
||||
tar -czf ../topgrade-${ans}-aarch64-linux-musl.tar.gz topgrade
|
||||
cd aarch64-unknown-linux-musl/ || exit 1
|
||||
tar -czf "../topgrade-${ans}-aarch64-linux-musl.tar.gz" topgrade
|
||||
cd ..
|
||||
|
||||
cd x86_64-pc-windows-gnu/
|
||||
zip -q ../topgrade-${ans}-x86_64-windows.zip topgrade.exe
|
||||
cd x86_64-pc-windows-gnu/ || exit 1
|
||||
zip -q "../topgrade-${ans}-x86_64-windows.zip" topgrade.exe
|
||||
cd ..
|
||||
cd ..
|
||||
|
||||
@@ -65,17 +66,19 @@ package_function() {
|
||||
print_checksums() {
|
||||
|
||||
|
||||
cd build/
|
||||
sha256sum topgrade-${ans}-*
|
||||
cd build/ || exit 1
|
||||
sha256sum topgrade-"${ans}"-*
|
||||
cd ../
|
||||
}
|
||||
|
||||
while true; do
|
||||
|
||||
echo "You should always have a look on scripts you download from the internet."
|
||||
# shellcheck disable=SC2162
|
||||
read -p "Do you still want to proceed? (y/n) " yn
|
||||
|
||||
echo -n "Input version number: "
|
||||
# shellcheck disable=SC2162
|
||||
read ans
|
||||
mkdir build
|
||||
|
||||
|
||||
@@ -2,168 +2,377 @@
|
||||
# [include] sections are processed in the order you write them
|
||||
# Files in $CONFIG_DIR/topgrade.d/ are automatically included before this file
|
||||
[include]
|
||||
#paths = ["/etc/topgrade.toml"]
|
||||
# paths = ["/etc/topgrade.toml"]
|
||||
|
||||
|
||||
[misc]
|
||||
# Don't ask for confirmations
|
||||
#assume_yes = true
|
||||
|
||||
# Disable specific steps - same options as the command line flag
|
||||
#disable = ["system", "emacs"]
|
||||
|
||||
# Ignore failures for these steps
|
||||
#ignore_failures = ["powershell"]
|
||||
|
||||
# Run specific steps - same options as the command line flag
|
||||
#only = ["system", "emacs"]
|
||||
|
||||
# Do not ask to retry failed steps (default: false)
|
||||
#no_retry = true
|
||||
|
||||
# Sudo command to be used
|
||||
#sudo_command = "sudo"
|
||||
# On Unix systems, Topgrade should not be run as root, it
|
||||
# will run commands with sudo or equivalent where needed.
|
||||
# Set this to true to suppress the warning and confirmation
|
||||
# prompt if Topgrade detects it is being run as root.
|
||||
# (default: false)
|
||||
# allow_root = false
|
||||
|
||||
# Run `sudo -v` to cache credentials at the start of the run
|
||||
# This avoids a blocking password prompt in the middle of an unattended run
|
||||
#pre_sudo = false
|
||||
# (default: false)
|
||||
# pre_sudo = false
|
||||
|
||||
# Run inside tmux
|
||||
#run_in_tmux = true
|
||||
# Sudo command to be used
|
||||
# sudo_command = "sudo"
|
||||
|
||||
# Disable specific steps - same options as the command line flag
|
||||
# disable = ["system", "emacs"]
|
||||
|
||||
# Ignore failures for these steps
|
||||
# ignore_failures = ["powershell"]
|
||||
|
||||
# List of remote machines with Topgrade installed on them
|
||||
#remote_topgrades = ["toothless", "pi", "parnas"]
|
||||
|
||||
# Arguments to pass to SSH when upgrading remote systems
|
||||
#ssh_arguments = "-o ConnectTimeout=2"
|
||||
# remote_topgrades = ["toothless", "pi", "parnas"]
|
||||
|
||||
# Path to Topgrade executable on remote machines
|
||||
#remote_topgrade_path = ".cargo/bin/topgrade"
|
||||
# remote_topgrade_path = ".cargo/bin/topgrade"
|
||||
|
||||
# Arguments to pass to SSH when upgrading remote systems
|
||||
# ssh_arguments = "-o ConnectTimeout=2"
|
||||
|
||||
# Arguments to pass tmux when pulling Repositories
|
||||
#tmux_arguments = "-S /var/tmux.sock"
|
||||
# tmux_arguments = "-S /var/tmux.sock"
|
||||
|
||||
# Do not set the terminal title
|
||||
#set_title = false
|
||||
# Do not set the terminal title (default: true)
|
||||
# set_title = true
|
||||
|
||||
# Display the time in step titles
|
||||
# Display the time in step titles (default: true)
|
||||
# display_time = true
|
||||
|
||||
# Cleanup temporary or old files
|
||||
#cleanup = true
|
||||
# Don't ask for confirmations (no default value)
|
||||
# assume_yes = true
|
||||
|
||||
# Skip sending a notification at the end of a run
|
||||
#skip_notify = true
|
||||
# Do not ask to retry failed steps (default: false)
|
||||
# no_retry = true
|
||||
|
||||
# Whether to self update (this is ignored if the binary has been built without self update support, available also via setting the environment variable TOPGRADE_NO_SELF_UPGRADE)
|
||||
#no_self_update = true
|
||||
# Show the reason for skipped steps (default: false)
|
||||
# This has no effect if the "only" option is specified
|
||||
# show_skipped = true
|
||||
|
||||
# Run inside tmux (default: false)
|
||||
# run_in_tmux = true
|
||||
|
||||
# Changes the way topgrade interacts with
|
||||
# the tmux session, creating the session
|
||||
# and only attaching to it if not inside tmux
|
||||
# (default: "attach_if_not_in_session", allowed values: "attach_if_not_in_session", "attach_always")
|
||||
# tmux_session_mode = "attach_if_not_in_session"
|
||||
|
||||
# Cleanup temporary or old files (default: false)
|
||||
# cleanup = true
|
||||
|
||||
# Send a notification for every step (default: false)
|
||||
# notify_each_step = false
|
||||
|
||||
# Skip sending a notification at the end of a run (default: false)
|
||||
# skip_notify = true
|
||||
|
||||
# The Bash-it branch to update (default: "stable")
|
||||
# bashit_branch = "stable"
|
||||
|
||||
# Run specific steps - same options as the command line flag
|
||||
# only = ["system", "emacs"]
|
||||
|
||||
# Whether to self update
|
||||
#
|
||||
# this will be ignored if the binary is built without self update support
|
||||
#
|
||||
# available also via setting the environment variable TOPGRADE_NO_SELF_UPGRADE)
|
||||
# no_self_update = true
|
||||
|
||||
# Extra tracing filter directives
|
||||
# These are prepended to the `--log-filter` argument
|
||||
# See: https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives
|
||||
# log_filters = ["topgrade::command=debug", "warn"]
|
||||
|
||||
# Extra Home Manager arguments
|
||||
#home_manager_arguments = ["--flake", "file"]
|
||||
|
||||
# Commands to run before anything
|
||||
[pre_commands]
|
||||
#"Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
# "Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
|
||||
|
||||
# Commands to run after anything
|
||||
[post_commands]
|
||||
#"Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
# "Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
|
||||
|
||||
# Custom commands
|
||||
[commands]
|
||||
#"Python Environment" = "~/dev/.env/bin/pip install -i https://pypi.python.org/simple -U --upgrade-strategy eager jupyter"
|
||||
#"Custom command using interactive shell (unix)" = "-i vim_upgrade"
|
||||
# "Python Environment" = "~/dev/.env/bin/pip install -i https://pypi.python.org/simple -U --upgrade-strategy eager jupyter"
|
||||
# "Custom command using interactive shell (unix)" = "-i vim_upgrade"
|
||||
|
||||
|
||||
[python]
|
||||
#enable_pip_review = true ###disabled by default
|
||||
#enable_pip_review_local = true ###disabled by default
|
||||
#enable_pipupgrade = true ###disabled by default
|
||||
#pipupgrade_arguments = "-y -u --pip-path pip" ###disabled by default
|
||||
# enable_pip_review = true ###disabled by default
|
||||
# enable_pip_review_local = true ###disabled by default
|
||||
# enable_pipupgrade = true ###disabled by default
|
||||
# pipupgrade_arguments = "-y -u --pip-path pip" ###disabled by default
|
||||
|
||||
# For the poetry step, by default, Topgrade skips its update if poetry is not
|
||||
# installed with the official script. This configuration entry forces Topgrade
|
||||
# to run the update in this case.
|
||||
#
|
||||
# (default: false)
|
||||
# poetry_force_self_update = true
|
||||
|
||||
|
||||
[conda]
|
||||
# Additional named conda environments to update (`conda env update -n env_name`)
|
||||
# env_names = [
|
||||
# "Toolbox",
|
||||
# "PyTorch"
|
||||
# ]
|
||||
# Additional conda environment paths to update (`conda env update -p env_path`)
|
||||
# env_paths = [
|
||||
# "~/webserver/.conda/",
|
||||
# "~/experiments/.conda/"
|
||||
# ]
|
||||
|
||||
|
||||
[composer]
|
||||
#self_update = true
|
||||
# self_update = true
|
||||
|
||||
|
||||
[brew]
|
||||
#greedy_cask = true
|
||||
#autoremove = true
|
||||
# For the BrewCask step
|
||||
# If `Repo Cask Upgrade` exists, then use the `-a` option.
|
||||
# Otherwise, use the `--greedy` option.
|
||||
# greedy_cask = true
|
||||
|
||||
# For the BrewCask step
|
||||
# If `Repo Cask Upgrade` does not exist, then use the `--greedy_latest` option.
|
||||
# NOTE: the above entry `greedy_cask` contains this entry, though you can enable
|
||||
# both of them, they won't clash with each other.
|
||||
# greedy_latest = true
|
||||
|
||||
# For the BrewCask step
|
||||
# If `Repo Cask Upgrade` does not exist, then use the `--greedy_auto_updates` option.
|
||||
# NOTE: the above entry `greedy_cask` contains this entry, though you can enable
|
||||
# both of them, they won't clash with each other.
|
||||
# greedy_auto_updates = true
|
||||
|
||||
# For the BrewFormula step
|
||||
# Execute `brew autoremove` after the step.
|
||||
# autoremove = true
|
||||
|
||||
# For the BrewFormula step
|
||||
# Upgrade formulae built from the HEAD branch; `brew upgrade --fetch-HEAD`
|
||||
# fetch_head = true
|
||||
|
||||
|
||||
[linux]
|
||||
# Arch Package Manager to use. Allowed values: autodetect, aura, garuda_update, pacman, pamac, paru, pikaur, trizen, yay.
|
||||
#arch_package_manager = "pacman"
|
||||
# Arch Package Manager to use.
|
||||
# Allowed values:
|
||||
# autodetect, aura, garuda_update, pacman, pamac, paru, pikaur, trizen, yay
|
||||
# arch_package_manager = "pacman"
|
||||
|
||||
# Arguments to pass yay (or paru) when updating packages
|
||||
#yay_arguments = "--nodevel"
|
||||
# yay_arguments = "--nodevel"
|
||||
|
||||
# Arguments to pass dnf when updating packages
|
||||
#dnf_arguments = "--refresh"
|
||||
#aura_aur_arguments = "-kx"
|
||||
#aura_pacman_arguments = ""
|
||||
#garuda_update_arguments = ""
|
||||
#show_arch_news = true
|
||||
#trizen_arguments = "--devel"
|
||||
#pikaur_arguments = ""
|
||||
#pamac_arguments = "--no-devel"
|
||||
#enable_tlmgr = true
|
||||
#emerge_sync_flags = "-q"
|
||||
#emerge_update_flags = "-uDNa --with-bdeps=y world"
|
||||
#redhat_distro_sync = false
|
||||
#suse_dup = false
|
||||
#rpm_ostree = false
|
||||
#nix_arguments = "--flake"
|
||||
# dnf_arguments = "--refresh"
|
||||
|
||||
# aura_aur_arguments = "-kx"
|
||||
|
||||
# aura_pacman_arguments = ""
|
||||
# garuda_update_arguments = ""
|
||||
|
||||
# show_arch_news = true
|
||||
|
||||
# trizen_arguments = "--devel"
|
||||
|
||||
# pikaur_arguments = ""
|
||||
|
||||
# pamac_arguments = "--no-devel"
|
||||
|
||||
# enable_tlmgr = true
|
||||
|
||||
# emerge_sync_flags = "-q"
|
||||
|
||||
# emerge_update_flags = "-uDNa --with-bdeps=y world"
|
||||
|
||||
# redhat_distro_sync = false
|
||||
|
||||
# suse_dup = false
|
||||
|
||||
# rpm_ostree = false
|
||||
|
||||
# For Fedora/CentOS/RHEL Atomic variants, if `bootc` is available and this configuration entry is set to true, use
|
||||
# it to do the update - Will also supersede rpm-ostree if enabled
|
||||
# (default: false)
|
||||
# bootc = false
|
||||
|
||||
# nix_arguments = "--flake"
|
||||
|
||||
# nix_env_arguments = "--prebuilt-only"
|
||||
|
||||
# Extra Home Manager arguments
|
||||
# home_manager_arguments = ["--flake", "file"]
|
||||
|
||||
|
||||
[mandb]
|
||||
# Enable the mandb step (to update manual entries).
|
||||
# Mandb is updated in the background by a service on most systems by default.
|
||||
# (default: false)
|
||||
# enable = true
|
||||
|
||||
|
||||
[git]
|
||||
#max_concurrency = 5
|
||||
# How many repos to pull at max in parallel
|
||||
# max_concurrency = 5
|
||||
|
||||
# Additional git repositories to pull
|
||||
#repos = [
|
||||
# repos = [
|
||||
# "~/src/*/",
|
||||
# "~/.config/something"
|
||||
#]
|
||||
# ]
|
||||
|
||||
# Don't pull the predefined git repos
|
||||
#pull_predefined = false
|
||||
# pull_predefined = false
|
||||
|
||||
# Arguments to pass Git when pulling Repositories
|
||||
#arguments = "--rebase --autostash"
|
||||
# arguments = "--rebase --autostash"
|
||||
|
||||
|
||||
[windows]
|
||||
# Manually select Windows updates
|
||||
#accept_all_updates = false
|
||||
#open_remotes_in_new_terminal = true
|
||||
#wsl_update_pre_release = true
|
||||
#wsl_update_use_web_download = true
|
||||
# accept_all_updates = false
|
||||
|
||||
# Controls whether to automatically reboot the computer when updates are
|
||||
# installed that request it. (default: "no", allowed values: "yes", "no", "ask")
|
||||
# updates_auto_reboot = "yes"
|
||||
|
||||
# open_remotes_in_new_terminal = true
|
||||
|
||||
# wsl_update_pre_release = true
|
||||
|
||||
# wsl_update_use_web_download = true
|
||||
|
||||
# The default for winget_install_silently is true,
|
||||
# this example turns off silent install.
|
||||
# winget_install_silently = false
|
||||
|
||||
# Causes Topgrade to rename itself during the run to allow package managers
|
||||
# to upgrade it. Use this only if you installed Topgrade by using a package
|
||||
# manager such as Scoop or Cargo
|
||||
#self_rename = true
|
||||
# self_rename = true
|
||||
|
||||
# Use sudo to elevate privileges for the Windows Package Manager (winget)
|
||||
# Only use this option if you want to run the Winget step in sudo-mode.
|
||||
# Running winget in sudo-mode is generally not recommended, as not every
|
||||
# package supports installing / upgrading in sudo-mode and it may cause issues
|
||||
# with some packages or may even cause the Winget-step to fail.
|
||||
# If any problems occur, please try running Topgrade without this option first
|
||||
# before reporting an issue.
|
||||
# (default: false)
|
||||
# winget_use_sudo = true
|
||||
|
||||
|
||||
[npm]
|
||||
# Use sudo if the NPM directory isn't owned by the current user
|
||||
#use_sudo = true
|
||||
# use_sudo = true
|
||||
|
||||
|
||||
[yarn]
|
||||
# Run `yarn global upgrade` with `sudo`
|
||||
#use_sudo = true
|
||||
# use_sudo = true
|
||||
|
||||
|
||||
[deno]
|
||||
# Upgrade deno executable to the given version.
|
||||
# version = "stable"
|
||||
|
||||
|
||||
[vim]
|
||||
# For `vim-plug`, execute `PlugUpdate!` instead of `PlugUpdate`
|
||||
#force_plug_update = true
|
||||
# force_plug_update = true
|
||||
|
||||
|
||||
[firmware]
|
||||
# Offer to update firmware; if false just check for and display available updates
|
||||
#upgrade = true
|
||||
# upgrade = true
|
||||
|
||||
|
||||
[vagrant]
|
||||
# Vagrant directories
|
||||
#directories = []
|
||||
# directories = []
|
||||
|
||||
# power on vagrant boxes if needed
|
||||
#power_on = true
|
||||
# power_on = true
|
||||
|
||||
# Always suspend vagrant boxes instead of powering off
|
||||
#always_suspend = true
|
||||
# always_suspend = true
|
||||
|
||||
|
||||
[flatpak]
|
||||
# Use sudo for updating the system-wide installation
|
||||
#use_sudo = true
|
||||
# use_sudo = true
|
||||
|
||||
|
||||
[distrobox]
|
||||
#use_root = false
|
||||
#containers = ["archlinux-latest"]
|
||||
# use_root = false
|
||||
|
||||
# containers = ["archlinux-latest"]
|
||||
[containers]
|
||||
# Specify the containers to ignore while updating (Wildcard supported)
|
||||
# ignored_containers = ["ghcr.io/rancher-sandbox/rancher-desktop/rdx-proxy:latest", "docker.io*"]
|
||||
# Specify the runtime to use for containers (default: "docker", allowed values: "docker", "podman")
|
||||
# runtime = "podman"
|
||||
|
||||
[lensfun]
|
||||
# If disabled, Topgrade invokes `lensfun‑update‑data` without root privilege,
|
||||
# then the update will be only available to you. Otherwise, `sudo` is required,
|
||||
# and the update will be installed system-wide, i.e., available to all users.
|
||||
# (default: false)
|
||||
# use_sudo = false
|
||||
|
||||
[julia]
|
||||
# If disabled, Topgrade invokes julia with the --startup-file=no CLI option.
|
||||
#
|
||||
# This may be desirable to avoid loading outdated packages with "using" directives
|
||||
# in the startup file, which might cause the update run to fail.
|
||||
# (default: true)
|
||||
# startup_file = true
|
||||
|
||||
[zigup]
|
||||
# Version strings passed to zigup.
|
||||
# These may be pinned versions such as "0.13.0" or branches such as "master".
|
||||
# Each one will be updated in its own zigup invocation.
|
||||
# (default: ["master"])
|
||||
# target_versions = ["master", "0.13.0"]
|
||||
|
||||
# Specifies the directory that the zig files will be installed to.
|
||||
# If defined, passed with the --install-dir command line flag.
|
||||
# If not defined, zigup will use its default behaviour.
|
||||
# (default: not defined)
|
||||
# install_dir = "~/.zig"
|
||||
|
||||
# Specifies the path of the symlink which will be set to point at the default compiler version.
|
||||
# If defined, passed with the --path-link command line flag.
|
||||
# If not defined, zigup will use its default behaviour.
|
||||
# This is not meaningful if set_default is not enabled.
|
||||
# (default: not defined)
|
||||
# path_link = "~/.bin/zig"
|
||||
|
||||
# If enabled, run `zigup clean` after updating all versions.
|
||||
# If enabled, each updated version above will be marked with `zigup keep`.
|
||||
# (default: false)
|
||||
# cleanup = false
|
||||
|
||||
[vscode]
|
||||
# If this is set and is a non-empty string, it specifies the profile the
|
||||
# extensions should be updated for.
|
||||
# (default: this won't be set by default)
|
||||
# profile = ""
|
||||
|
||||
[pixi]
|
||||
# Show the release notes of the latest pixi release
|
||||
# during the pixi step
|
||||
# (default: false)
|
||||
# include_release_notes = false
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 718 KiB |
BIN
doc/topgrade_demo.gif
Normal file
BIN
doc/topgrade_demo.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 4.1 MiB |
1428
locales/app.yml
Normal file
1428
locales/app.yml
Normal file
File diff suppressed because it is too large
Load Diff
@@ -4,6 +4,7 @@ build-backend = "maturin"
|
||||
|
||||
[project]
|
||||
name = "topgrade"
|
||||
dynamic = ["version"]
|
||||
requires-python = ">=3.7"
|
||||
classifiers = [
|
||||
"Programming Language :: Rust",
|
||||
|
||||
2
rust-toolchain.toml
Normal file
2
rust-toolchain.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[toolchain]
|
||||
channel = "1.84.1"
|
||||
171
src/breaking_changes.rs
Normal file
171
src/breaking_changes.rs
Normal file
@@ -0,0 +1,171 @@
|
||||
//! Inform the users of the breaking changes introduced in this major release.
|
||||
//!
|
||||
//! Print the breaking changes and possibly a migration guide when:
|
||||
//! 1. The Topgrade being executed is a new major release
|
||||
//! 2. This is the first launch of that major release
|
||||
|
||||
use crate::terminal::print_separator;
|
||||
#[cfg(windows)]
|
||||
use crate::WINDOWS_DIRS;
|
||||
#[cfg(unix)]
|
||||
use crate::XDG_DIRS;
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
use rust_i18n::t;
|
||||
use std::{
|
||||
env::var,
|
||||
fs::{read_to_string, OpenOptions},
|
||||
io::Write,
|
||||
path::PathBuf,
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
/// Version string x.y.z
|
||||
static VERSION_STR: &str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
/// Version info
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Version {
|
||||
_major: u64,
|
||||
minor: u64,
|
||||
patch: u64,
|
||||
}
|
||||
|
||||
impl FromStr for Version {
|
||||
type Err = std::convert::Infallible;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
const NOT_SEMVER: &str = "Topgrade version is not semantic";
|
||||
const NOT_NUMBER: &str = "Topgrade version is not dot-separated numbers";
|
||||
|
||||
let mut iter = s.split('.').take(3);
|
||||
let major = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
|
||||
let minor = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
|
||||
let patch = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
|
||||
|
||||
// They cannot be all 0s
|
||||
assert!(
|
||||
!(major == 0 && minor == 0 && patch == 0),
|
||||
"Version numbers cannot be all 0s"
|
||||
);
|
||||
|
||||
Ok(Self {
|
||||
_major: major,
|
||||
minor,
|
||||
patch,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Version {
|
||||
/// True if this version is a new major release.
|
||||
pub(crate) fn is_new_major_release(&self) -> bool {
|
||||
// We have already checked that they cannot all be zeros, so `self.major`
|
||||
// is guaranteed to be non-zero.
|
||||
self.minor == 0 && self.patch == 0
|
||||
}
|
||||
}
|
||||
|
||||
/// Topgrade's breaking changes
|
||||
///
|
||||
/// We store them in the compiled binary.
|
||||
pub(crate) static BREAKINGCHANGES: &str = include_str!("../BREAKINGCHANGES.md");
|
||||
|
||||
/// Return platform's data directory.
|
||||
fn data_dir() -> PathBuf {
|
||||
#[cfg(unix)]
|
||||
return XDG_DIRS.data_dir();
|
||||
|
||||
#[cfg(windows)]
|
||||
return WINDOWS_DIRS.data_dir();
|
||||
}
|
||||
|
||||
/// Return Topgrade's keep file path.
|
||||
///
|
||||
/// keep file is a file under the data directory containing a major version
|
||||
/// number, it will be created on first run and is used to check if an execution
|
||||
/// of Topgrade is the first run of a major release, for more details, see
|
||||
/// `first_run_of_major_release()`.
|
||||
fn keep_file_path() -> PathBuf {
|
||||
let keep_file = "topgrade_keep";
|
||||
data_dir().join(keep_file)
|
||||
}
|
||||
|
||||
/// If environment variable `TOPGRADE_SKIP_BRKC_NOTIFY` is set to `true`, then
|
||||
/// we won't notify the user of the breaking changes.
|
||||
pub(crate) fn should_skip() -> bool {
|
||||
if let Ok(var) = var("TOPGRADE_SKIP_BRKC_NOTIFY") {
|
||||
return var.as_str() == "true";
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// True if this is the first execution of a major release.
|
||||
pub(crate) fn first_run_of_major_release() -> Result<bool> {
|
||||
let version = VERSION_STR.parse::<Version>().expect("should be a valid version");
|
||||
let keep_file = keep_file_path();
|
||||
|
||||
// disable this lint here as the current code has better readability
|
||||
#[allow(clippy::collapsible_if)]
|
||||
if version.is_new_major_release() {
|
||||
if !keep_file.exists() || read_to_string(&keep_file)? != VERSION_STR {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
/// Print breaking changes to the user.
|
||||
pub(crate) fn print_breaking_changes() {
|
||||
let header = format!(
|
||||
"{}",
|
||||
t!("Topgrade {version_str} Breaking Changes", version_str = VERSION_STR)
|
||||
);
|
||||
print_separator(header);
|
||||
let contents = if BREAKINGCHANGES.is_empty() {
|
||||
t!("No Breaking changes").to_string()
|
||||
} else {
|
||||
BREAKINGCHANGES.to_string()
|
||||
};
|
||||
println!("{contents}\n");
|
||||
}
|
||||
|
||||
/// This function will be ONLY executed when the user has confirmed the breaking
|
||||
/// changes, once confirmed, we write the keep file, which means the first run
|
||||
/// of this major release is finished.
|
||||
pub(crate) fn write_keep_file() -> Result<()> {
|
||||
std::fs::create_dir_all(data_dir())?;
|
||||
let keep_file = keep_file_path();
|
||||
|
||||
let mut file = OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.truncate(true)
|
||||
.open(keep_file)?;
|
||||
let _ = file.write(VERSION_STR.as_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn is_new_major_release_works() {
|
||||
let first_major_release: Version = "1.0.0".parse().unwrap();
|
||||
let under_dev: Version = "0.1.0".parse().unwrap();
|
||||
|
||||
assert!(first_major_release.is_new_major_release());
|
||||
assert!(!under_dev.is_new_major_release());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Version numbers cannot be all 0s")]
|
||||
fn invalid_version() {
|
||||
let all_0 = "0.0.0";
|
||||
all_0.parse::<Version>().unwrap();
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,8 @@ use color_eyre::eyre::Context;
|
||||
|
||||
use crate::error::TopgradeError;
|
||||
|
||||
use tracing::debug;
|
||||
|
||||
/// Like [`Output`], but UTF-8 decoded.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Utf8Output {
|
||||
@@ -43,13 +45,13 @@ impl TryFrom<&Output> for Utf8Output {
|
||||
type Error = eyre::Error;
|
||||
|
||||
fn try_from(Output { status, stdout, stderr }: &Output) -> Result<Self, Self::Error> {
|
||||
let stdout = String::from_utf8(stdout.to_vec()).map_err(|err| {
|
||||
let stdout = String::from_utf8(stdout.clone()).map_err(|err| {
|
||||
eyre!(
|
||||
"Stdout contained invalid UTF-8: {}",
|
||||
String::from_utf8_lossy(err.as_bytes())
|
||||
)
|
||||
})?;
|
||||
let stderr = String::from_utf8(stderr.to_vec()).map_err(|err| {
|
||||
let stderr = String::from_utf8(stderr.clone()).map_err(|err| {
|
||||
eyre!(
|
||||
"Stderr contained invalid UTF-8: {}",
|
||||
String::from_utf8_lossy(err.as_bytes())
|
||||
@@ -147,6 +149,7 @@ pub trait CommandExt {
|
||||
/// Like [`Command::spawn`], but gives a nice error message if the command fails to
|
||||
/// execute.
|
||||
#[track_caller]
|
||||
#[allow(dead_code)]
|
||||
fn spawn_checked(&mut self) -> eyre::Result<Self::Child>;
|
||||
}
|
||||
|
||||
@@ -183,7 +186,7 @@ impl CommandExt for Command {
|
||||
let err = TopgradeError::ProcessFailedWithOutput(program, output.status, stderr.into_owned());
|
||||
|
||||
let ret = Err(err).with_context(|| message);
|
||||
tracing::debug!("Command failed: {ret:?}");
|
||||
debug!("Command failed: {ret:?}");
|
||||
ret
|
||||
}
|
||||
}
|
||||
@@ -203,7 +206,7 @@ impl CommandExt for Command {
|
||||
let (program, _) = get_program_and_args(self);
|
||||
let err = TopgradeError::ProcessFailed(program, status);
|
||||
let ret = Err(err).with_context(|| format!("Command failed: `{command}`"));
|
||||
tracing::debug!("Command failed: {ret:?}");
|
||||
debug!("Command failed: {ret:?}");
|
||||
ret
|
||||
}
|
||||
}
|
||||
@@ -239,6 +242,6 @@ fn format_program_and_args(cmd: &Command) -> String {
|
||||
|
||||
fn log(cmd: &Command) -> String {
|
||||
let command = format_program_and_args(cmd);
|
||||
tracing::debug!("Executing command `{command}`");
|
||||
debug!("Executing command `{command}`");
|
||||
command
|
||||
}
|
||||
|
||||
870
src/config.rs
870
src/config.rs
File diff suppressed because it is too large
Load Diff
@@ -11,9 +11,9 @@ pub fn interrupted() -> bool {
|
||||
/// Clears the interrupted flag
|
||||
pub fn unset_interrupted() {
|
||||
debug_assert!(INTERRUPTED.load(Ordering::SeqCst));
|
||||
INTERRUPTED.store(false, Ordering::SeqCst)
|
||||
INTERRUPTED.store(false, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
pub fn set_interrupted() {
|
||||
INTERRUPTED.store(true, Ordering::SeqCst)
|
||||
INTERRUPTED.store(true, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
@@ -1,21 +1,17 @@
|
||||
//! SIGINT handling in Unix systems.
|
||||
use crate::ctrlc::interrupted::set_interrupted;
|
||||
use nix::sys::signal;
|
||||
use nix::sys::signal::{sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal};
|
||||
|
||||
/// Handle SIGINT. Set the interruption flag.
|
||||
extern "C" fn handle_sigint(_: i32) {
|
||||
set_interrupted()
|
||||
set_interrupted();
|
||||
}
|
||||
|
||||
/// Set the necessary signal handlers.
|
||||
/// The function panics on failure.
|
||||
pub fn set_handler() {
|
||||
let sig_action = signal::SigAction::new(
|
||||
signal::SigHandler::Handler(handle_sigint),
|
||||
signal::SaFlags::empty(),
|
||||
signal::SigSet::empty(),
|
||||
);
|
||||
let sig_action = SigAction::new(SigHandler::Handler(handle_sigint), SaFlags::empty(), SigSet::empty());
|
||||
unsafe {
|
||||
signal::sigaction(signal::SIGINT, &sig_action).unwrap();
|
||||
sigaction(Signal::SIGINT, &sig_action).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
//! A stub for Ctrl + C handling.
|
||||
use crate::ctrlc::interrupted::set_interrupted;
|
||||
use winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE};
|
||||
use winapi::um::consoleapi::SetConsoleCtrlHandler;
|
||||
use winapi::um::wincon::CTRL_C_EVENT;
|
||||
use tracing::error;
|
||||
use windows::core::BOOL;
|
||||
use windows::Win32::System::Console::{SetConsoleCtrlHandler, CTRL_C_EVENT};
|
||||
|
||||
extern "system" fn handler(ctrl_type: DWORD) -> BOOL {
|
||||
extern "system" fn handler(ctrl_type: u32) -> BOOL {
|
||||
match ctrl_type {
|
||||
CTRL_C_EVENT => {
|
||||
set_interrupted();
|
||||
TRUE
|
||||
true.into()
|
||||
}
|
||||
_ => FALSE,
|
||||
_ => false.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_handler() {
|
||||
if 0 == unsafe { SetConsoleCtrlHandler(Some(handler), TRUE) } {
|
||||
tracing::error!("Cannot set a control C handler")
|
||||
if let Err(e) = unsafe { SetConsoleCtrlHandler(Some(handler), true) } {
|
||||
error!("Cannot set a control C handler: {e}")
|
||||
}
|
||||
}
|
||||
|
||||
112
src/error.rs
112
src/error.rs
@@ -1,41 +1,129 @@
|
||||
use std::process::ExitStatus;
|
||||
use std::{fmt::Display, process::ExitStatus};
|
||||
|
||||
use rust_i18n::t;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::sudo::SudoKind;
|
||||
|
||||
#[derive(Error, Debug, PartialEq, Eq)]
|
||||
pub enum TopgradeError {
|
||||
#[error("`{0}` failed: {1}")]
|
||||
ProcessFailed(String, ExitStatus),
|
||||
|
||||
#[error("`{0}` failed: {1}")]
|
||||
ProcessFailedWithOutput(String, ExitStatus, String),
|
||||
|
||||
#[error("Unknown Linux Distribution")]
|
||||
#[cfg(target_os = "linux")]
|
||||
UnknownLinuxDistribution,
|
||||
|
||||
#[error("File \"/etc/os-release\" does not exist or is empty")]
|
||||
#[cfg(target_os = "linux")]
|
||||
EmptyOSReleaseFile,
|
||||
|
||||
#[error("Failed getting the system package manager")]
|
||||
#[cfg(target_os = "linux")]
|
||||
FailedGettingPackageManager,
|
||||
}
|
||||
|
||||
impl Display for TopgradeError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
TopgradeError::ProcessFailed(process, exit_status) => {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
t!(
|
||||
"`{process}` failed: {exit_status}",
|
||||
process = process,
|
||||
exit_status = exit_status
|
||||
)
|
||||
)
|
||||
}
|
||||
TopgradeError::ProcessFailedWithOutput(process, exit_status, output) => {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
t!(
|
||||
"`{process}` failed: {exit_status} with {output}",
|
||||
process = process,
|
||||
exit_status = exit_status,
|
||||
output = output
|
||||
)
|
||||
)
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
TopgradeError::UnknownLinuxDistribution => write!(f, "{}", t!("Unknown Linux Distribution")),
|
||||
#[cfg(target_os = "linux")]
|
||||
TopgradeError::EmptyOSReleaseFile => {
|
||||
write!(f, "{}", t!("File \"/etc/os-release\" does not exist or is empty"))
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
TopgradeError::FailedGettingPackageManager => {
|
||||
write!(f, "{}", t!("Failed getting the system package manager"))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("A step failed")]
|
||||
pub struct StepFailed;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("Dry running")]
|
||||
pub struct DryRun();
|
||||
impl Display for StepFailed {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", t!("A step failed"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub struct UnsupportedSudo<'a> {
|
||||
pub sudo_kind: SudoKind,
|
||||
pub option: &'a str,
|
||||
}
|
||||
|
||||
impl Display for UnsupportedSudo<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
t!(
|
||||
"{sudo_kind} does not support the {option} option",
|
||||
sudo_kind = self.sudo_kind,
|
||||
option = self.option
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub struct MissingSudo();
|
||||
|
||||
impl Display for MissingSudo {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", t!("Could not find sudo"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub struct DryRun();
|
||||
|
||||
impl Display for DryRun {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", t!("Dry running"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("{0}")]
|
||||
pub struct SkipStep(pub String);
|
||||
|
||||
impl Display for SkipStep {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(windows, feature = "self-update"))]
|
||||
#[derive(Error, Debug)]
|
||||
#[error("Topgrade Upgraded")]
|
||||
pub struct Upgraded(pub ExitStatus);
|
||||
|
||||
#[cfg(all(windows, feature = "self-update"))]
|
||||
impl Display for Upgraded {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", t!("Topgrade Upgraded"))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,52 @@
|
||||
#![allow(dead_code)]
|
||||
use crate::executor::RunType;
|
||||
use crate::git::Git;
|
||||
use crate::sudo::Sudo;
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::{config::Config, executor::Executor};
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use std::env::var;
|
||||
use std::path::Path;
|
||||
use std::sync::Mutex;
|
||||
use std::ffi::OsStr;
|
||||
use std::process::Command;
|
||||
use std::sync::{LazyLock, Mutex};
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::error::MissingSudo;
|
||||
use crate::executor::{DryCommand, Executor};
|
||||
use crate::powershell::Powershell;
|
||||
#[cfg(target_os = "linux")]
|
||||
use crate::steps::linux::Distribution;
|
||||
use crate::sudo::Sudo;
|
||||
use crate::utils::require_option;
|
||||
|
||||
/// An enum telling whether Topgrade should perform dry runs or actually perform the steps.
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum RunType {
|
||||
/// Executing commands will just print the command with its argument.
|
||||
Dry,
|
||||
|
||||
/// Executing commands will perform actual execution.
|
||||
Wet,
|
||||
}
|
||||
|
||||
impl RunType {
|
||||
/// Create a new instance from a boolean telling whether to dry run.
|
||||
pub fn new(dry_run: bool) -> Self {
|
||||
if dry_run {
|
||||
RunType::Dry
|
||||
} else {
|
||||
RunType::Wet
|
||||
}
|
||||
}
|
||||
|
||||
/// Tells whether we're performing a dry run.
|
||||
pub fn dry(self) -> bool {
|
||||
match self {
|
||||
RunType::Dry => true,
|
||||
RunType::Wet => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ExecutionContext<'a> {
|
||||
run_type: RunType,
|
||||
sudo: Option<Sudo>,
|
||||
git: &'a Git,
|
||||
config: &'a Config,
|
||||
/// Name of a tmux session to execute commands in, if any.
|
||||
/// This is used in `./steps/remote/ssh.rs`, where we want to run `topgrade` in a new
|
||||
@@ -20,38 +54,55 @@ pub struct ExecutionContext<'a> {
|
||||
tmux_session: Mutex<Option<String>>,
|
||||
/// True if topgrade is running under ssh.
|
||||
under_ssh: bool,
|
||||
#[cfg(target_os = "linux")]
|
||||
distribution: &'a Result<Distribution>,
|
||||
powershell: LazyLock<Option<Powershell>>,
|
||||
}
|
||||
|
||||
impl<'a> ExecutionContext<'a> {
|
||||
pub fn new(run_type: RunType, sudo: Option<Sudo>, git: &'a Git, config: &'a Config) -> Self {
|
||||
pub fn new(
|
||||
run_type: RunType,
|
||||
sudo: Option<Sudo>,
|
||||
config: &'a Config,
|
||||
#[cfg(target_os = "linux")] distribution: &'a Result<Distribution>,
|
||||
) -> Self {
|
||||
let under_ssh = var("SSH_CLIENT").is_ok() || var("SSH_TTY").is_ok();
|
||||
Self {
|
||||
run_type,
|
||||
sudo,
|
||||
git,
|
||||
config,
|
||||
tmux_session: Mutex::new(None),
|
||||
under_ssh,
|
||||
#[cfg(target_os = "linux")]
|
||||
distribution,
|
||||
powershell: LazyLock::new(Powershell::new),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn execute_elevated(&self, command: &Path, interactive: bool) -> Result<Executor> {
|
||||
let sudo = require_option(self.sudo.as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
Ok(sudo.execute_elevated(self, command, interactive))
|
||||
/// Create an instance of `Executor` that should run `program`.
|
||||
pub fn execute<S: AsRef<OsStr>>(&self, program: S) -> Executor {
|
||||
match self.run_type {
|
||||
RunType::Dry => Executor::Dry(DryCommand::new(program)),
|
||||
RunType::Wet => Executor::Wet(Command::new(program)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_type(&self) -> RunType {
|
||||
self.run_type
|
||||
}
|
||||
|
||||
pub fn git(&self) -> &Git {
|
||||
self.git
|
||||
}
|
||||
|
||||
pub fn sudo(&self) -> &Option<Sudo> {
|
||||
&self.sudo
|
||||
}
|
||||
|
||||
pub fn require_sudo(&self) -> Result<&Sudo> {
|
||||
if let Some(value) = self.sudo() {
|
||||
Ok(value)
|
||||
} else {
|
||||
Err(MissingSudo().into())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn config(&self) -> &Config {
|
||||
self.config
|
||||
}
|
||||
@@ -67,4 +118,17 @@ impl<'a> ExecutionContext<'a> {
|
||||
pub fn get_tmux_session(&self) -> Option<String> {
|
||||
self.tmux_session.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
pub fn distribution(&self) -> &Result<Distribution> {
|
||||
self.distribution
|
||||
}
|
||||
|
||||
pub fn powershell(&self) -> &Option<Powershell> {
|
||||
&self.powershell
|
||||
}
|
||||
|
||||
pub fn require_powershell(&self) -> Result<&Powershell> {
|
||||
require_option(self.powershell.as_ref(), t!("Powershell is not installed").to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,51 +4,12 @@ use std::path::Path;
|
||||
use std::process::{Child, Command, ExitStatus, Output};
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::error::DryRun;
|
||||
|
||||
/// An enum telling whether Topgrade should perform dry runs or actually perform the steps.
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum RunType {
|
||||
/// Executing commands will just print the command with its argument.
|
||||
Dry,
|
||||
|
||||
/// Executing commands will perform actual execution.
|
||||
Wet,
|
||||
}
|
||||
|
||||
impl RunType {
|
||||
/// Create a new instance from a boolean telling whether to dry run.
|
||||
pub fn new(dry_run: bool) -> Self {
|
||||
if dry_run {
|
||||
RunType::Dry
|
||||
} else {
|
||||
RunType::Wet
|
||||
}
|
||||
}
|
||||
|
||||
/// Create an instance of `Executor` that should run `program`.
|
||||
pub fn execute<S: AsRef<OsStr>>(self, program: S) -> Executor {
|
||||
match self {
|
||||
RunType::Dry => Executor::Dry(DryCommand {
|
||||
program: program.as_ref().into(),
|
||||
..Default::default()
|
||||
}),
|
||||
RunType::Wet => Executor::Wet(Command::new(program)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Tells whether we're performing a dry run.
|
||||
pub fn dry(self) -> bool {
|
||||
match self {
|
||||
RunType::Dry => true,
|
||||
RunType::Wet => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An enum providing a similar interface to `std::process::Command`.
|
||||
/// If the enum is set to `Wet`, execution will be performed with `std::process::Command`.
|
||||
/// If the enum is set to `Dry`, execution will just print the command with its arguments.
|
||||
@@ -151,7 +112,10 @@ impl Executor {
|
||||
let result = match self {
|
||||
Executor::Wet(c) => {
|
||||
debug!("Running {:?}", c);
|
||||
c.spawn_checked().map(ExecutorChild::Wet)?
|
||||
// We should use `spawn()` here rather than `spawn_checked()` since
|
||||
// their semantics and behaviors are different.
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
c.spawn().map(ExecutorChild::Wet)?
|
||||
}
|
||||
Executor::Dry(c) => {
|
||||
c.dry_run();
|
||||
@@ -165,7 +129,12 @@ impl Executor {
|
||||
/// See `std::process::Command::output`
|
||||
pub fn output(&mut self) -> Result<ExecutorOutput> {
|
||||
match self {
|
||||
Executor::Wet(c) => Ok(ExecutorOutput::Wet(c.output_checked()?)),
|
||||
Executor::Wet(c) => {
|
||||
// We should use `output()` here rather than `output_checked()` since
|
||||
// their semantics and behaviors are different.
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
Ok(ExecutorOutput::Wet(c.output()?))
|
||||
}
|
||||
Executor::Dry(c) => {
|
||||
c.dry_run();
|
||||
Ok(ExecutorOutput::Dry)
|
||||
@@ -179,7 +148,7 @@ impl Executor {
|
||||
pub fn status_checked_with_codes(&mut self, codes: &[i32]) -> Result<()> {
|
||||
match self {
|
||||
Executor::Wet(c) => c.status_checked_with(|status| {
|
||||
if status.success() || status.code().as_ref().map(|c| codes.contains(c)).unwrap_or(false) {
|
||||
if status.success() || status.code().as_ref().is_some_and(|c| codes.contains(c)) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(())
|
||||
@@ -198,8 +167,7 @@ pub enum ExecutorOutput {
|
||||
Dry,
|
||||
}
|
||||
|
||||
/// A struct represending a command. Trying to execute it will just print its arguments.
|
||||
#[derive(Default)]
|
||||
/// A struct representing a command. Trying to execute it will just print its arguments.
|
||||
pub struct DryCommand {
|
||||
program: OsString,
|
||||
args: Vec<OsString>,
|
||||
@@ -207,19 +175,30 @@ pub struct DryCommand {
|
||||
}
|
||||
|
||||
impl DryCommand {
|
||||
pub fn new<S: AsRef<OsStr>>(program: S) -> Self {
|
||||
Self {
|
||||
program: program.as_ref().to_os_string(),
|
||||
args: Vec::new(),
|
||||
directory: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn dry_run(&self) {
|
||||
print!(
|
||||
"Dry running: {} {}",
|
||||
self.program.to_string_lossy(),
|
||||
shell_words::join(
|
||||
"{}",
|
||||
t!(
|
||||
"Dry running: {program_name} {arguments}",
|
||||
program_name = self.program.to_string_lossy(),
|
||||
arguments = shell_words::join(
|
||||
self.args
|
||||
.iter()
|
||||
.map(|a| String::from(a.to_string_lossy()))
|
||||
.collect::<Vec<String>>()
|
||||
)
|
||||
)
|
||||
);
|
||||
match &self.directory {
|
||||
Some(dir) => println!(" in {}", dir.to_string_lossy()),
|
||||
Some(dir) => println!(" {}", t!("in {directory}", directory = dir.to_string_lossy())),
|
||||
None => println!(),
|
||||
};
|
||||
}
|
||||
@@ -227,6 +206,7 @@ impl DryCommand {
|
||||
|
||||
/// The Result of spawn. Contains an actual `std::process::Child` if executed by a wet command.
|
||||
pub enum ExecutorChild {
|
||||
#[allow(unused)] // this type has not been used
|
||||
Wet(Child),
|
||||
Dry,
|
||||
}
|
||||
|
||||
541
src/main.rs
541
src/main.rs
@@ -6,51 +6,84 @@ use std::path::PathBuf;
|
||||
use std::process::exit;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::breaking_changes::{first_run_of_major_release, print_breaking_changes, should_skip, write_keep_file};
|
||||
use clap::CommandFactory;
|
||||
use clap::{crate_version, Parser};
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use console::Key;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
#[cfg(windows)]
|
||||
use etcetera::base_strategy::Windows;
|
||||
use etcetera::base_strategy::{BaseStrategy, Xdg};
|
||||
use once_cell::sync::Lazy;
|
||||
#[cfg(unix)]
|
||||
use etcetera::base_strategy::Xdg;
|
||||
use rust_i18n::{i18n, t};
|
||||
use std::sync::LazyLock;
|
||||
use tracing::debug;
|
||||
|
||||
use self::config::{CommandLineArgs, Config, Step};
|
||||
use self::config::{CommandLineArgs, Config};
|
||||
use self::error::StepFailed;
|
||||
#[cfg(all(windows, feature = "self-update"))]
|
||||
use self::error::Upgraded;
|
||||
use self::runner::StepResult;
|
||||
#[allow(clippy::wildcard_imports)]
|
||||
use self::steps::{remote::*, *};
|
||||
use self::sudo::{Sudo, SudoCreateError, SudoKind};
|
||||
#[allow(clippy::wildcard_imports)]
|
||||
use self::terminal::*;
|
||||
use self::utils::{install_color_eyre, install_tracing, is_elevated, update_tracing};
|
||||
|
||||
mod breaking_changes;
|
||||
mod command;
|
||||
mod config;
|
||||
mod ctrlc;
|
||||
mod error;
|
||||
mod execution_context;
|
||||
mod executor;
|
||||
mod report;
|
||||
mod runner;
|
||||
#[cfg(windows)]
|
||||
mod self_renamer;
|
||||
#[cfg(feature = "self-update")]
|
||||
mod self_update;
|
||||
mod step;
|
||||
mod steps;
|
||||
mod sudo;
|
||||
mod terminal;
|
||||
mod utils;
|
||||
|
||||
pub static HOME_DIR: Lazy<PathBuf> = Lazy::new(|| home::home_dir().expect("No home directory"));
|
||||
pub static XDG_DIRS: Lazy<Xdg> = Lazy::new(|| Xdg::new().expect("No home directory"));
|
||||
#[cfg(windows)]
|
||||
pub static WINDOWS_DIRS: Lazy<Windows> = Lazy::new(|| Windows::new().expect("No home directory"));
|
||||
pub(crate) static HOME_DIR: LazyLock<PathBuf> = LazyLock::new(|| home::home_dir().expect("No home directory"));
|
||||
#[cfg(unix)]
|
||||
pub(crate) static XDG_DIRS: LazyLock<Xdg> = LazyLock::new(|| Xdg::new().expect("No home directory"));
|
||||
|
||||
#[cfg(windows)]
|
||||
pub(crate) static WINDOWS_DIRS: LazyLock<Windows> = LazyLock::new(|| Windows::new().expect("No home directory"));
|
||||
|
||||
// Init and load the i18n files
|
||||
i18n!("locales", fallback = "en");
|
||||
|
||||
#[allow(clippy::too_many_lines)]
|
||||
fn run() -> Result<()> {
|
||||
color_eyre::install()?;
|
||||
install_color_eyre()?;
|
||||
ctrlc::set_handler();
|
||||
|
||||
let opt = CommandLineArgs::parse();
|
||||
// Set up the logger with the filter directives from:
|
||||
// 1. CLI option `--log-filter`
|
||||
// 2. `debug` if the `--verbose` option is present
|
||||
// We do this because we need our logger to work while loading the
|
||||
// configuration file.
|
||||
//
|
||||
// When the configuration file is loaded, update the logger with the full
|
||||
// filter directives.
|
||||
//
|
||||
// For more info, see the comments in `CommandLineArgs::tracing_filter_directives()`
|
||||
// and `Config::tracing_filter_directives()`.
|
||||
let reload_handle = install_tracing(&opt.tracing_filter_directives())?;
|
||||
|
||||
// Get current system locale and set it as the default locale
|
||||
let system_locale = sys_locale::get_locale().unwrap_or("en".to_string());
|
||||
rust_i18n::set_locale(&system_locale);
|
||||
debug!("Current system locale is {system_locale}");
|
||||
|
||||
if let Some(shell) = opt.gen_completion {
|
||||
let cmd = &mut CommandLineArgs::command();
|
||||
@@ -64,12 +97,10 @@ fn run() -> Result<()> {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
install_tracing(&opt.tracing_filter_directives())?;
|
||||
|
||||
for env in opt.env_variables() {
|
||||
let mut splitted = env.split('=');
|
||||
let var = splitted.next().unwrap();
|
||||
let value = splitted.next().unwrap();
|
||||
let mut parts = env.split('=');
|
||||
let var = parts.next().unwrap();
|
||||
let value = parts.next().unwrap();
|
||||
env::set_var(var, value);
|
||||
}
|
||||
|
||||
@@ -84,53 +115,89 @@ fn run() -> Result<()> {
|
||||
}
|
||||
|
||||
let config = Config::load(opt)?;
|
||||
// Update the logger with the full filter directives.
|
||||
update_tracing(&reload_handle, &config.tracing_filter_directives())?;
|
||||
set_title(config.set_title());
|
||||
display_time(config.display_time());
|
||||
set_desktop_notifications(config.notify_each_step());
|
||||
|
||||
debug!("Version: {}", crate_version!());
|
||||
debug!("OS: {}", env!("TARGET"));
|
||||
debug!("{:?}", std::env::args());
|
||||
debug!("{:?}", env::args());
|
||||
debug!("Binary path: {:?}", std::env::current_exe());
|
||||
debug!("Self Update: {:?}", cfg!(feature = "self-update"));
|
||||
debug!("self-update Feature Enabled: {:?}", cfg!(feature = "self-update"));
|
||||
debug!("Configuration: {:?}", config);
|
||||
|
||||
if config.run_in_tmux() && env::var("TOPGRADE_INSIDE_TMUX").is_err() {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
tmux::run_in_tmux(config.tmux_arguments()?)?;
|
||||
tmux::run_in_tmux(config.tmux_config()?)?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
let git = git::Git::new();
|
||||
let mut git_repos = git::Repositories::new(&git);
|
||||
let powershell = powershell::Powershell::new();
|
||||
let should_run_powershell = powershell.profile().is_some() && config.should_run(Step::Powershell);
|
||||
let emacs = emacs::Emacs::new();
|
||||
let elevated = is_elevated();
|
||||
|
||||
#[cfg(unix)]
|
||||
if !config.allow_root() && elevated {
|
||||
print_warning(t!(
|
||||
"Topgrade should not be run as root, it will run commands with sudo or equivalent where needed."
|
||||
));
|
||||
if !prompt_yesno(&t!("Continue?"))? {
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
let sudo = match config.sudo_command() {
|
||||
Some(kind) => Sudo::new(kind),
|
||||
None if elevated => Sudo::new(SudoKind::Null),
|
||||
None => Sudo::detect(),
|
||||
};
|
||||
debug!("Sudo: {:?}", sudo);
|
||||
|
||||
let (sudo, sudo_err) = match sudo {
|
||||
Ok(sudo) => (Some(sudo), None),
|
||||
Err(e) => (None, Some(e)),
|
||||
};
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
let distribution = linux::Distribution::detect();
|
||||
|
||||
let sudo = config.sudo_command().map_or_else(sudo::Sudo::detect, sudo::Sudo::new);
|
||||
let run_type = executor::RunType::new(config.dry_run());
|
||||
let ctx = execution_context::ExecutionContext::new(run_type, sudo, &git, &config);
|
||||
let run_type = execution_context::RunType::new(config.dry_run());
|
||||
let ctx = execution_context::ExecutionContext::new(
|
||||
run_type,
|
||||
sudo,
|
||||
&config,
|
||||
#[cfg(target_os = "linux")]
|
||||
&distribution,
|
||||
);
|
||||
let mut runner = runner::Runner::new(&ctx);
|
||||
|
||||
// If
|
||||
//
|
||||
// 1. the breaking changes notification shouldn't be skipped
|
||||
// 2. this is the first execution of a major release
|
||||
//
|
||||
// inform user of breaking changes
|
||||
if !should_skip() && first_run_of_major_release()? {
|
||||
print_breaking_changes();
|
||||
|
||||
if prompt_yesno(&t!("Continue?"))? {
|
||||
write_keep_file()?;
|
||||
} else {
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Self-Update step, this will execute only if:
|
||||
// 1. the `self-update` feature is enabled
|
||||
// 2. it is not disabled from configuration (env var/CLI opt/file)
|
||||
#[cfg(feature = "self-update")]
|
||||
{
|
||||
let config_self_upgrade = env::var("TOPGRADE_NO_SELF_UPGRADE").is_err() && !config.no_self_update();
|
||||
let should_self_update = env::var("TOPGRADE_NO_SELF_UPGRADE").is_err() && !config.no_self_update();
|
||||
|
||||
if !run_type.dry() && config_self_upgrade {
|
||||
let result = self_update::self_update();
|
||||
|
||||
if let Err(e) = &result {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if e.downcast_ref::<Upgraded>().is_some() {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
print_warning(format!("Self update error: {e}"));
|
||||
}
|
||||
if should_self_update {
|
||||
runner.execute(step::Step::SelfUpdate, "Self Update", || self_update::self_update(&ctx))?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -153,341 +220,89 @@ fn run() -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(topgrades) = config.remote_topgrades() {
|
||||
for remote_topgrade in topgrades.iter().filter(|t| config.should_execute_remote(t)) {
|
||||
runner.execute(Step::Remotes, format!("Remote ({remote_topgrade})"), || {
|
||||
ssh::ssh_step(&ctx, remote_topgrade)
|
||||
})?;
|
||||
}
|
||||
for step in step::default_steps() {
|
||||
step.run(&mut runner, &ctx)?
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
runner.execute(Step::Wsl, "WSL", || windows::run_wsl_topgrade(&ctx))?;
|
||||
runner.execute(Step::WslUpdate, "WSL", || windows::update_wsl(&ctx))?;
|
||||
runner.execute(Step::Chocolatey, "Chocolatey", || windows::run_chocolatey(&ctx))?;
|
||||
runner.execute(Step::Scoop, "Scoop", || windows::run_scoop(&ctx))?;
|
||||
runner.execute(Step::Winget, "Winget", || windows::run_winget(&ctx))?;
|
||||
runner.execute(Step::System, "Windows update", || windows::windows_update(&ctx))?;
|
||||
}
|
||||
let mut failed = false;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// NOTE: Due to breaking `nu` updates, `packer.nu` needs to be updated before `nu` get updated
|
||||
// by other package managers.
|
||||
runner.execute(Step::Shell, "packer.nu", || linux::run_packer_nu(&ctx))?;
|
||||
let report = runner.report();
|
||||
if !report.is_empty() {
|
||||
print_separator(t!("Summary"));
|
||||
|
||||
match &distribution {
|
||||
Ok(distribution) => {
|
||||
runner.execute(Step::System, "System update", || distribution.upgrade(&ctx))?;
|
||||
}
|
||||
Err(e) => {
|
||||
println!("Error detecting current distribution: {e}");
|
||||
}
|
||||
}
|
||||
runner.execute(Step::ConfigUpdate, "config-update", || linux::run_config_update(&ctx))?;
|
||||
let mut skipped_missing_sudo = false;
|
||||
|
||||
runner.execute(Step::BrewFormula, "Brew", || {
|
||||
unix::run_brew_formula(&ctx, unix::BrewVariant::Path)
|
||||
})?;
|
||||
|
||||
runner.execute(Step::AM, "am", || linux::run_am(&ctx))?;
|
||||
runner.execute(Step::AppMan, "appman", || linux::run_appman(&ctx))?;
|
||||
runner.execute(Step::DebGet, "deb-get", || linux::run_deb_get(&ctx))?;
|
||||
runner.execute(Step::Toolbx, "toolbx", || toolbx::run_toolbx(&ctx))?;
|
||||
runner.execute(Step::Flatpak, "Flatpak", || linux::run_flatpak(&ctx))?;
|
||||
runner.execute(Step::Snap, "snap", || linux::run_snap(&ctx))?;
|
||||
runner.execute(Step::Pacstall, "pacstall", || linux::run_pacstall(&ctx))?;
|
||||
runner.execute(Step::Pacdef, "pacdef", || linux::run_pacdef(&ctx))?;
|
||||
runner.execute(Step::Protonup, "protonup", || linux::run_protonup_update(&ctx))?;
|
||||
runner.execute(Step::Distrobox, "distrobox", || linux::run_distrobox_update(&ctx))?;
|
||||
runner.execute(Step::DkpPacman, "dkp-pacman", || linux::run_dkp_pacman_update(&ctx))?;
|
||||
runner.execute(Step::System, "pihole", || linux::run_pihole_update(&ctx))?;
|
||||
runner.execute(Step::Firmware, "Firmware upgrades", || linux::run_fwupdmgr(&ctx))?;
|
||||
runner.execute(Step::Restarts, "Restarts", || linux::run_needrestart(&ctx))?;
|
||||
for (key, result) in report {
|
||||
if !failed && result.failed() {
|
||||
failed = true;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
runner.execute(Step::BrewFormula, "Brew (ARM)", || {
|
||||
unix::run_brew_formula(&ctx, unix::BrewVariant::MacArm)
|
||||
})?;
|
||||
runner.execute(Step::BrewFormula, "Brew (Intel)", || {
|
||||
unix::run_brew_formula(&ctx, unix::BrewVariant::MacIntel)
|
||||
})?;
|
||||
runner.execute(Step::BrewFormula, "Brew", || {
|
||||
unix::run_brew_formula(&ctx, unix::BrewVariant::Path)
|
||||
})?;
|
||||
runner.execute(Step::BrewCask, "Brew Cask (ARM)", || {
|
||||
unix::run_brew_cask(&ctx, unix::BrewVariant::MacArm)
|
||||
})?;
|
||||
runner.execute(Step::BrewCask, "Brew Cask (Intel)", || {
|
||||
unix::run_brew_cask(&ctx, unix::BrewVariant::MacIntel)
|
||||
})?;
|
||||
runner.execute(Step::BrewCask, "Brew Cask", || {
|
||||
unix::run_brew_cask(&ctx, unix::BrewVariant::Path)
|
||||
})?;
|
||||
runner.execute(Step::Macports, "MacPorts", || macos::run_macports(&ctx))?;
|
||||
runner.execute(Step::Sparkle, "Sparkle", || macos::run_sparkle(&ctx))?;
|
||||
runner.execute(Step::Mas, "App Store", || macos::run_mas(&ctx))?;
|
||||
runner.execute(Step::System, "System upgrade", || macos::upgrade_macos(&ctx))?;
|
||||
if let StepResult::SkippedMissingSudo = result {
|
||||
skipped_missing_sudo = true;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "dragonfly")]
|
||||
{
|
||||
runner.execute(Step::Pkg, "DragonFly BSD Packages", || {
|
||||
dragonfly::upgrade_packages(&ctx)
|
||||
})?;
|
||||
dragonfly::audit_packages(&ctx)?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "freebsd")]
|
||||
{
|
||||
runner.execute(Step::Pkg, "FreeBSD Packages", || freebsd::upgrade_packages(&ctx))?;
|
||||
runner.execute(Step::System, "FreeBSD Upgrade", || freebsd::upgrade_freebsd(&ctx))?;
|
||||
freebsd::audit_packages(&ctx)?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "openbsd")]
|
||||
{
|
||||
runner.execute(Step::Pkg, "OpenBSD Packages", || openbsd::upgrade_packages(&ctx))?;
|
||||
runner.execute(Step::System, "OpenBSD Upgrade", || openbsd::upgrade_openbsd(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
{
|
||||
runner.execute(Step::Pkg, "Termux Packages", || android::upgrade_packages(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
runner.execute(Step::Yadm, "yadm", || unix::run_yadm(&ctx))?;
|
||||
runner.execute(Step::Nix, "nix", || unix::run_nix(&ctx))?;
|
||||
runner.execute(Step::Guix, "guix", || unix::run_guix(&ctx))?;
|
||||
runner.execute(Step::HomeManager, "home-manager", || unix::run_home_manager(&ctx))?;
|
||||
runner.execute(Step::Asdf, "asdf", || unix::run_asdf(&ctx))?;
|
||||
runner.execute(Step::Pkgin, "pkgin", || unix::run_pkgin(&ctx))?;
|
||||
runner.execute(Step::Bun, "bun", || unix::run_bun(&ctx))?;
|
||||
runner.execute(Step::Shell, "zr", || zsh::run_zr(&ctx))?;
|
||||
runner.execute(Step::Shell, "antibody", || zsh::run_antibody(&ctx))?;
|
||||
runner.execute(Step::Shell, "antidote", || zsh::run_antidote(&ctx))?;
|
||||
runner.execute(Step::Shell, "antigen", || zsh::run_antigen(&ctx))?;
|
||||
runner.execute(Step::Shell, "zgenom", || zsh::run_zgenom(&ctx))?;
|
||||
runner.execute(Step::Shell, "zplug", || zsh::run_zplug(&ctx))?;
|
||||
runner.execute(Step::Shell, "zinit", || zsh::run_zinit(&ctx))?;
|
||||
runner.execute(Step::Shell, "zi", || zsh::run_zi(&ctx))?;
|
||||
runner.execute(Step::Shell, "zim", || zsh::run_zim(&ctx))?;
|
||||
runner.execute(Step::Shell, "oh-my-zsh", || zsh::run_oh_my_zsh(&ctx))?;
|
||||
runner.execute(Step::Shell, "oh-my-bash", || unix::run_oh_my_bash(&ctx))?;
|
||||
runner.execute(Step::Shell, "fisher", || unix::run_fisher(&ctx))?;
|
||||
runner.execute(Step::Shell, "bash-it", || unix::run_bashit(&ctx))?;
|
||||
runner.execute(Step::Shell, "oh-my-fish", || unix::run_oh_my_fish(&ctx))?;
|
||||
runner.execute(Step::Shell, "fish-plug", || unix::run_fish_plug(&ctx))?;
|
||||
runner.execute(Step::Shell, "fundle", || unix::run_fundle(&ctx))?;
|
||||
runner.execute(Step::Tmux, "tmux", || tmux::run_tpm(&ctx))?;
|
||||
runner.execute(Step::Tldr, "TLDR", || unix::run_tldr(&ctx))?;
|
||||
runner.execute(Step::Pearl, "pearl", || unix::run_pearl(&ctx))?;
|
||||
#[cfg(not(any(target_os = "macos", target_os = "android")))]
|
||||
runner.execute(Step::GnomeShellExtensions, "Gnome Shell Extensions", || {
|
||||
unix::upgrade_gnome_extensions(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Sdkman, "SDKMAN!", || unix::run_sdkman(&ctx))?;
|
||||
runner.execute(Step::Rcm, "rcm", || unix::run_rcm(&ctx))?;
|
||||
runner.execute(Step::Maza, "maza", || unix::run_maza(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(not(any(
|
||||
target_os = "freebsd",
|
||||
target_os = "openbsd",
|
||||
target_os = "netbsd",
|
||||
target_os = "dragonfly"
|
||||
)))]
|
||||
{
|
||||
runner.execute(Step::Atom, "apm", || generic::run_apm(&ctx))?;
|
||||
}
|
||||
|
||||
// The following update function should be executed on all OSes.
|
||||
runner.execute(Step::Fossil, "fossil", || generic::run_fossil(&ctx))?;
|
||||
runner.execute(Step::Rustup, "rustup", || generic::run_rustup(&ctx))?;
|
||||
runner.execute(Step::Juliaup, "juliaup", || generic::run_juliaup(&ctx))?;
|
||||
runner.execute(Step::Dotnet, ".NET", || generic::run_dotnet_upgrade(&ctx))?;
|
||||
runner.execute(Step::Choosenim, "choosenim", || generic::run_choosenim(&ctx))?;
|
||||
runner.execute(Step::Cargo, "cargo", || generic::run_cargo_update(&ctx))?;
|
||||
runner.execute(Step::Flutter, "Flutter", || generic::run_flutter_upgrade(&ctx))?;
|
||||
runner.execute(Step::Go, "go-global-update", || go::run_go_global_update(&ctx))?;
|
||||
runner.execute(Step::Go, "gup", || go::run_go_gup(&ctx))?;
|
||||
runner.execute(Step::Emacs, "Emacs", || emacs.upgrade(&ctx))?;
|
||||
runner.execute(Step::Opam, "opam", || generic::run_opam_update(&ctx))?;
|
||||
runner.execute(Step::Vcpkg, "vcpkg", || generic::run_vcpkg_update(&ctx))?;
|
||||
runner.execute(Step::Pipx, "pipx", || generic::run_pipx_update(&ctx))?;
|
||||
runner.execute(Step::Conda, "conda", || generic::run_conda_update(&ctx))?;
|
||||
runner.execute(Step::Mamba, "mamba", || generic::run_mamba_update(&ctx))?;
|
||||
runner.execute(Step::Pip3, "pip3", || generic::run_pip3_update(&ctx))?;
|
||||
runner.execute(Step::PipReview, "pip-review", || generic::run_pip_review_update(&ctx))?;
|
||||
runner.execute(Step::PipReviewLocal, "pip-review (local)", || {
|
||||
generic::run_pip_review_local_update(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Pipupgrade, "pipupgrade", || generic::run_pipupgrade_update(&ctx))?;
|
||||
runner.execute(Step::Ghcup, "ghcup", || generic::run_ghcup_update(&ctx))?;
|
||||
runner.execute(Step::Stack, "stack", || generic::run_stack_update(&ctx))?;
|
||||
runner.execute(Step::Tlmgr, "tlmgr", || generic::run_tlmgr_update(&ctx))?;
|
||||
runner.execute(Step::Myrepos, "myrepos", || generic::run_myrepos_update(&ctx))?;
|
||||
runner.execute(Step::Chezmoi, "chezmoi", || generic::run_chezmoi_update(&ctx))?;
|
||||
runner.execute(Step::Jetpack, "jetpack", || generic::run_jetpack(&ctx))?;
|
||||
runner.execute(Step::Vim, "vim", || vim::upgrade_vim(&ctx))?;
|
||||
runner.execute(Step::Vim, "Neovim", || vim::upgrade_neovim(&ctx))?;
|
||||
runner.execute(Step::Vim, "The Ultimate vimrc", || vim::upgrade_ultimate_vimrc(&ctx))?;
|
||||
runner.execute(Step::Vim, "voom", || vim::run_voom(&ctx))?;
|
||||
runner.execute(Step::Kakoune, "Kakoune", || kakoune::upgrade_kak_plug(&ctx))?;
|
||||
runner.execute(Step::Helix, "helix", || generic::run_helix_grammars(&ctx))?;
|
||||
runner.execute(Step::Node, "npm", || node::run_npm_upgrade(&ctx))?;
|
||||
runner.execute(Step::Yarn, "yarn", || node::run_yarn_upgrade(&ctx))?;
|
||||
runner.execute(Step::Pnpm, "pnpm", || node::run_pnpm_upgrade(&ctx))?;
|
||||
runner.execute(Step::Containers, "Containers", || containers::run_containers(&ctx))?;
|
||||
runner.execute(Step::Deno, "deno", || node::deno_upgrade(&ctx))?;
|
||||
runner.execute(Step::Composer, "composer", || generic::run_composer_update(&ctx))?;
|
||||
runner.execute(Step::Krew, "krew", || generic::run_krew_upgrade(&ctx))?;
|
||||
runner.execute(Step::Helm, "helm", || generic::run_helm_repo_update(&ctx))?;
|
||||
runner.execute(Step::Gem, "gem", || generic::run_gem(&ctx))?;
|
||||
runner.execute(Step::RubyGems, "rubygems", || generic::run_rubygems(&ctx))?;
|
||||
runner.execute(Step::Julia, "julia", || generic::update_julia_packages(&ctx))?;
|
||||
runner.execute(Step::Haxelib, "haxelib", || generic::run_haxelib_update(&ctx))?;
|
||||
runner.execute(Step::Sheldon, "sheldon", || generic::run_sheldon(&ctx))?;
|
||||
runner.execute(Step::Stew, "stew", || generic::run_stew(&ctx))?;
|
||||
runner.execute(Step::Rtcl, "rtcl", || generic::run_rtcl(&ctx))?;
|
||||
runner.execute(Step::Bin, "bin", || generic::bin_update(&ctx))?;
|
||||
runner.execute(Step::Gcloud, "gcloud", || generic::run_gcloud_components_update(&ctx))?;
|
||||
runner.execute(Step::Micro, "micro", || generic::run_micro(&ctx))?;
|
||||
runner.execute(Step::Raco, "raco", || generic::run_raco_update(&ctx))?;
|
||||
runner.execute(Step::Spicetify, "spicetify", || generic::spicetify_upgrade(&ctx))?;
|
||||
runner.execute(Step::GithubCliExtensions, "GitHub CLI Extensions", || {
|
||||
generic::run_ghcli_extensions_upgrade(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Bob, "Bob", || generic::run_bob(&ctx))?;
|
||||
|
||||
if config.use_predefined_git_repos() {
|
||||
if config.should_run(Step::Emacs) {
|
||||
if !emacs.is_doom() {
|
||||
if let Some(directory) = emacs.directory() {
|
||||
git_repos.insert_if_repo(directory);
|
||||
}
|
||||
}
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".doom.d"));
|
||||
}
|
||||
|
||||
if config.should_run(Step::Vim) {
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".vim"));
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".config/nvim"));
|
||||
}
|
||||
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".ideavimrc"));
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".intellimacs"));
|
||||
|
||||
if config.should_run(Step::Rcm) {
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".dotfiles"));
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
git_repos.insert_if_repo(zsh::zshrc());
|
||||
if config.should_run(Step::Tmux) {
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".tmux"));
|
||||
}
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".config/fish"));
|
||||
git_repos.insert_if_repo(XDG_DIRS.config_dir().join("openbox"));
|
||||
git_repos.insert_if_repo(XDG_DIRS.config_dir().join("bspwm"));
|
||||
git_repos.insert_if_repo(XDG_DIRS.config_dir().join("i3"));
|
||||
git_repos.insert_if_repo(XDG_DIRS.config_dir().join("sway"));
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
git_repos.insert_if_repo(
|
||||
WINDOWS_DIRS
|
||||
.cache_dir()
|
||||
.join("Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState"),
|
||||
);
|
||||
|
||||
#[cfg(windows)]
|
||||
windows::insert_startup_scripts(&mut git_repos).ok();
|
||||
|
||||
if let Some(profile) = powershell.profile() {
|
||||
git_repos.insert_if_repo(profile);
|
||||
}
|
||||
}
|
||||
|
||||
if config.should_run(Step::GitRepos) {
|
||||
if let Some(custom_git_repos) = config.git_repos() {
|
||||
for git_repo in custom_git_repos {
|
||||
git_repos.glob_insert(git_repo);
|
||||
}
|
||||
}
|
||||
runner.execute(Step::GitRepos, "Git repositories", || {
|
||||
git.multi_pull_step(&git_repos, &ctx)
|
||||
})?;
|
||||
}
|
||||
|
||||
if should_run_powershell {
|
||||
runner.execute(Step::Powershell, "Powershell Modules Update", || {
|
||||
powershell.update_modules(&ctx)
|
||||
})?;
|
||||
}
|
||||
|
||||
if let Some(commands) = config.commands() {
|
||||
for (name, command) in commands {
|
||||
if config.should_run_custom_command(name) {
|
||||
runner.execute(Step::CustomCommands, name, || {
|
||||
generic::run_custom_command(name, command, &ctx)
|
||||
})?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if config.should_run(Step::Vagrant) {
|
||||
if let Ok(boxes) = vagrant::collect_boxes(&ctx) {
|
||||
for vagrant_box in boxes {
|
||||
runner.execute(Step::Vagrant, format!("Vagrant ({})", vagrant_box.smart_name()), || {
|
||||
vagrant::topgrade_vagrant_box(&ctx, &vagrant_box)
|
||||
})?;
|
||||
}
|
||||
}
|
||||
}
|
||||
runner.execute(Step::Vagrant, "Vagrant boxes", || vagrant::upgrade_vagrant_boxes(&ctx))?;
|
||||
|
||||
if !runner.report().data().is_empty() {
|
||||
print_separator("Summary");
|
||||
|
||||
for (key, result) in runner.report().data() {
|
||||
print_result(key, result);
|
||||
}
|
||||
|
||||
if skipped_missing_sudo {
|
||||
print_warning(t!(
|
||||
"\nSome steps were skipped as sudo or equivalent could not be found."
|
||||
));
|
||||
// Steps can only fail with SkippedMissingSudo if sudo is None,
|
||||
// therefore we must have a sudo_err
|
||||
match sudo_err.unwrap() {
|
||||
SudoCreateError::CannotFindBinary => {
|
||||
#[cfg(unix)]
|
||||
print_warning(t!(
|
||||
"Install one of `sudo`, `doas`, `pkexec`, `run0` or `please` to run these steps."
|
||||
));
|
||||
|
||||
// if this windows version supported Windows Sudo, the error would have been WinSudoDisabled
|
||||
#[cfg(windows)]
|
||||
print_warning(t!("Install gsudo to run these steps."));
|
||||
}
|
||||
#[cfg(windows)]
|
||||
SudoCreateError::WinSudoDisabled => {
|
||||
print_warning(t!(
|
||||
"Install gsudo or enable Windows Sudo to run these steps.\nFor Windows Sudo, the default 'In a new window' mode is not supported as it prevents Topgrade from waiting for commands to finish. Please configure it to use 'Inline' mode instead.\nGo to https://go.microsoft.com/fwlink/?linkid=2257346 to learn more."
|
||||
));
|
||||
}
|
||||
#[cfg(windows)]
|
||||
SudoCreateError::WinSudoNewWindowMode => {
|
||||
print_warning(t!(
|
||||
"Windows Sudo was found, but it is set to 'In a new window' mode, which prevents Topgrade from waiting for commands to finish. Please configure it to use 'Inline' mode instead.\nGo to https://go.microsoft.com/fwlink/?linkid=2257346 to learn more."
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
if let Ok(distribution) = &distribution {
|
||||
distribution.show_summary();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut post_command_failed = false;
|
||||
if let Some(commands) = config.post_commands() {
|
||||
for (name, command) in commands {
|
||||
if generic::run_custom_command(name, command, &ctx).is_err() {
|
||||
post_command_failed = true;
|
||||
let result = generic::run_custom_command(name, command, &ctx);
|
||||
if !failed && result.is_err() {
|
||||
failed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if config.keep_at_end() {
|
||||
print_info("\n(R)eboot\n(S)hell\n(Q)uit");
|
||||
print_info(t!("\n(R)eboot\n(S)hell\n(Q)uit"));
|
||||
loop {
|
||||
match get_key() {
|
||||
Ok(Key::Char('s')) | Ok(Key::Char('S')) => {
|
||||
Ok(Key::Char('s' | 'S')) => {
|
||||
run_shell().context("Failed to execute shell")?;
|
||||
}
|
||||
Ok(Key::Char('r')) | Ok(Key::Char('R')) => {
|
||||
reboot().context("Failed to reboot")?;
|
||||
Ok(Key::Char('r' | 'R')) => {
|
||||
println!("{}", t!("Rebooting..."));
|
||||
reboot(&ctx).context("Failed to reboot")?;
|
||||
}
|
||||
Ok(Key::Char('q')) | Ok(Key::Char('Q')) => (),
|
||||
Ok(Key::Char('q' | 'Q')) => (),
|
||||
_ => {
|
||||
continue;
|
||||
}
|
||||
@@ -496,16 +311,15 @@ fn run() -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
let failed = post_command_failed || runner.report().data().iter().any(|(_, result)| result.failed());
|
||||
|
||||
if !config.skip_notify() {
|
||||
notify_desktop(
|
||||
format!(
|
||||
"Topgrade finished {}",
|
||||
if failed { "with errors" } else { "successfully" }
|
||||
),
|
||||
if failed {
|
||||
t!("Topgrade finished with errors")
|
||||
} else {
|
||||
t!("Topgrade finished successfully")
|
||||
},
|
||||
Some(Duration::from_secs(10)),
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if failed {
|
||||
@@ -538,32 +352,9 @@ fn main() {
|
||||
// The `Debug` implementation of `eyre::Result` prints a multi-line
|
||||
// error message that includes all the 'causes' added with
|
||||
// `.with_context(...)` calls.
|
||||
println!("Error: {error:?}");
|
||||
println!("{}", t!("Error: {error}", error = format!("{:?}", error)));
|
||||
}
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn install_tracing(filter_directives: &str) -> Result<()> {
|
||||
use tracing_subscriber::fmt;
|
||||
use tracing_subscriber::fmt::format::FmtSpan;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
let env_filter = EnvFilter::try_new(filter_directives)
|
||||
.or_else(|_| EnvFilter::try_from_default_env())
|
||||
.or_else(|_| EnvFilter::try_new("info"))?;
|
||||
|
||||
let fmt_layer = fmt::layer()
|
||||
.with_target(false)
|
||||
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
|
||||
.without_time();
|
||||
|
||||
let registry = tracing_subscriber::registry();
|
||||
|
||||
registry.with(env_filter).with(fmt_layer).init();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
pub enum StepResult {
|
||||
Success,
|
||||
Failure,
|
||||
Ignored,
|
||||
Skipped(String),
|
||||
}
|
||||
|
||||
impl StepResult {
|
||||
pub fn failed(&self) -> bool {
|
||||
match self {
|
||||
StepResult::Success | StepResult::Ignored | StepResult::Skipped(_) => false,
|
||||
StepResult::Failure => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type CowString<'a> = Cow<'a, str>;
|
||||
type ReportData<'a> = Vec<(CowString<'a>, StepResult)>;
|
||||
pub struct Report<'a> {
|
||||
data: ReportData<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Report<'a> {
|
||||
pub fn new() -> Self {
|
||||
Self { data: Vec::new() }
|
||||
}
|
||||
|
||||
pub fn push_result<M>(&mut self, result: Option<(M, StepResult)>)
|
||||
where
|
||||
M: Into<CowString<'a>>,
|
||||
{
|
||||
if let Some((key, success)) = result {
|
||||
let key = key.into();
|
||||
|
||||
debug_assert!(!self.data.iter().any(|(k, _)| k == &key), "{key} already reported");
|
||||
self.data.push((key, success));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn data(&self) -> &ReportData<'a> {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,36 @@
|
||||
use crate::ctrlc;
|
||||
use crate::error::{DryRun, SkipStep};
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::report::{Report, StepResult};
|
||||
use crate::terminal::print_error;
|
||||
use crate::{config::Step, terminal::should_retry};
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::Debug;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::ctrlc;
|
||||
use crate::error::{DryRun, MissingSudo, SkipStep};
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::step::Step;
|
||||
use crate::terminal::{print_error, print_warning, should_retry};
|
||||
|
||||
pub enum StepResult {
|
||||
Success,
|
||||
Failure,
|
||||
Ignored,
|
||||
SkippedMissingSudo,
|
||||
Skipped(String),
|
||||
}
|
||||
|
||||
impl StepResult {
|
||||
pub fn failed(&self) -> bool {
|
||||
use StepResult::*;
|
||||
|
||||
match self {
|
||||
Success | Ignored | Skipped(_) | SkippedMissingSudo => false,
|
||||
Failure => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type Report<'a> = Vec<(Cow<'a, str>, StepResult)>;
|
||||
|
||||
pub struct Runner<'a> {
|
||||
ctx: &'a ExecutionContext<'a>,
|
||||
report: Report<'a>,
|
||||
@@ -18,32 +40,50 @@ impl<'a> Runner<'a> {
|
||||
pub fn new(ctx: &'a ExecutionContext) -> Runner<'a> {
|
||||
Runner {
|
||||
ctx,
|
||||
report: Report::new(),
|
||||
report: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn execute<F, M>(&mut self, step: Step, key: M, func: F) -> Result<()>
|
||||
fn push_result(&mut self, key: Cow<'a, str>, result: StepResult) {
|
||||
debug_assert!(!self.report.iter().any(|(k, _)| k == &key), "{key} already reported");
|
||||
self.report.push((key, result));
|
||||
}
|
||||
|
||||
pub fn execute<K, F>(&mut self, step: Step, key: K, func: F) -> Result<()>
|
||||
where
|
||||
K: Into<Cow<'a, str>> + Debug,
|
||||
F: Fn() -> Result<()>,
|
||||
M: Into<Cow<'a, str>> + Debug,
|
||||
{
|
||||
if !self.ctx.config().should_run(step) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let key = key.into();
|
||||
let key: Cow<'a, str> = key.into();
|
||||
debug!("Step {:?}", key);
|
||||
|
||||
// alter the `func` to put it in a span
|
||||
let func = || {
|
||||
let span =
|
||||
tracing::span!(parent: tracing::Span::none(), tracing::Level::TRACE, "step", step = ?step, key = %key);
|
||||
let _guard = span.enter();
|
||||
func()
|
||||
};
|
||||
|
||||
loop {
|
||||
match func() {
|
||||
Ok(()) => {
|
||||
self.report.push_result(Some((key, StepResult::Success)));
|
||||
self.push_result(key, StepResult::Success);
|
||||
break;
|
||||
}
|
||||
Err(e) if e.downcast_ref::<DryRun>().is_some() => break,
|
||||
Err(e) if e.downcast_ref::<MissingSudo>().is_some() => {
|
||||
print_warning(t!("Skipping step, sudo is required"));
|
||||
self.push_result(key, StepResult::SkippedMissingSudo);
|
||||
break;
|
||||
}
|
||||
Err(e) if e.downcast_ref::<SkipStep>().is_some() => {
|
||||
if self.ctx.config().verbose() || self.ctx.config().show_skipped() {
|
||||
self.report.push_result(Some((key, StepResult::Skipped(e.to_string()))));
|
||||
self.push_result(key, StepResult::Skipped(e.to_string()));
|
||||
}
|
||||
break;
|
||||
}
|
||||
@@ -64,14 +104,14 @@ impl<'a> Runner<'a> {
|
||||
};
|
||||
|
||||
if !should_retry {
|
||||
self.report.push_result(Some((
|
||||
self.push_result(
|
||||
key,
|
||||
if ignore_failure {
|
||||
StepResult::Ignored
|
||||
} else {
|
||||
StepResult::Failure
|
||||
},
|
||||
)));
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -81,7 +121,7 @@ impl<'a> Runner<'a> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn report(&self) -> &Report {
|
||||
pub fn report(&self) -> &Report<'_> {
|
||||
&self.report
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#![cfg(windows)]
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use std::{env::current_exe, fs, path::PathBuf};
|
||||
use tracing::{debug, error};
|
||||
|
||||
@@ -3,16 +3,26 @@ use std::env;
|
||||
use std::os::unix::process::CommandExt as _;
|
||||
use std::process::Command;
|
||||
|
||||
use crate::step::Step;
|
||||
use color_eyre::eyre::{bail, Result};
|
||||
use rust_i18n::t;
|
||||
use self_update_crate::backends::github::Update;
|
||||
use self_update_crate::update::UpdateStatus;
|
||||
|
||||
use super::terminal::*;
|
||||
use super::terminal::{print_info, print_separator};
|
||||
#[cfg(windows)]
|
||||
use crate::error::Upgraded;
|
||||
|
||||
pub fn self_update() -> Result<()> {
|
||||
print_separator("Self update");
|
||||
use crate::execution_context::ExecutionContext;
|
||||
|
||||
pub fn self_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator(t!("Self update"));
|
||||
|
||||
if ctx.run_type().dry() {
|
||||
println!("{}", t!("Would self-update"));
|
||||
Ok(())
|
||||
} else {
|
||||
let assume_yes = ctx.config().yes(Step::SelfUpdate);
|
||||
let current_exe = env::current_exe();
|
||||
|
||||
let target = self_update_crate::get_target();
|
||||
@@ -21,25 +31,25 @@ pub fn self_update() -> Result<()> {
|
||||
.repo_name("topgrade")
|
||||
.target(target)
|
||||
.bin_name(if cfg!(windows) { "topgrade.exe" } else { "topgrade" })
|
||||
.show_output(false)
|
||||
.show_output(true)
|
||||
.show_download_progress(true)
|
||||
.current_version(self_update_crate::cargo_crate_version!())
|
||||
.no_confirm(true)
|
||||
.no_confirm(assume_yes)
|
||||
.build()?
|
||||
.update_extended()?;
|
||||
|
||||
if let UpdateStatus::Updated(release) = &result {
|
||||
println!("\nTopgrade upgraded to {}:\n", release.version);
|
||||
println!("{}", t!("Topgrade upgraded to {version}:\n", version = release.version));
|
||||
if let Some(body) = &release.body {
|
||||
println!("{body}");
|
||||
}
|
||||
} else {
|
||||
println!("Topgrade is up-to-date");
|
||||
println!("{}", t!("Topgrade is up-to-date"));
|
||||
}
|
||||
|
||||
{
|
||||
if result.updated() {
|
||||
print_warning("Respawning...");
|
||||
print_info(t!("Respawning..."));
|
||||
let mut command = Command::new(current_exe?);
|
||||
command.args(env::args().skip(1)).env("TOPGRADE_NO_SELF_UPGRADE", "");
|
||||
|
||||
@@ -59,4 +69,5 @@ pub fn self_update() -> Result<()> {
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
888
src/step.rs
Normal file
888
src/step.rs
Normal file
@@ -0,0 +1,888 @@
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::runner::Runner;
|
||||
use clap::ValueEnum;
|
||||
use color_eyre::Result;
|
||||
#[cfg(target_os = "linux")]
|
||||
use rust_i18n::t;
|
||||
use serde::Deserialize;
|
||||
use strum::{EnumCount, EnumIter, EnumString, VariantNames};
|
||||
|
||||
#[cfg(feature = "self-update")]
|
||||
use crate::self_update;
|
||||
use crate::steps::remote::vagrant;
|
||||
#[allow(clippy::wildcard_imports)]
|
||||
use crate::steps::*;
|
||||
use crate::utils::hostname;
|
||||
|
||||
#[derive(ValueEnum, EnumString, VariantNames, Debug, Clone, PartialEq, Eq, Deserialize, EnumIter, Copy, EnumCount)]
|
||||
#[clap(rename_all = "snake_case")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[strum(serialize_all = "snake_case")]
|
||||
pub enum Step {
|
||||
AM,
|
||||
AndroidStudio,
|
||||
AppMan,
|
||||
Aqua,
|
||||
Asdf,
|
||||
Atom,
|
||||
Atuin,
|
||||
Audit,
|
||||
AutoCpufreq,
|
||||
Bin,
|
||||
Bob,
|
||||
BrewCask,
|
||||
BrewFormula,
|
||||
Bun,
|
||||
BunPackages,
|
||||
Cargo,
|
||||
Certbot,
|
||||
Chezmoi,
|
||||
Chocolatey,
|
||||
Choosenim,
|
||||
CinnamonSpices,
|
||||
ClamAvDb,
|
||||
Composer,
|
||||
Conda,
|
||||
ConfigUpdate,
|
||||
Containers,
|
||||
CustomCommands,
|
||||
DebGet,
|
||||
Deno,
|
||||
Distrobox,
|
||||
DkpPacman,
|
||||
Dotnet,
|
||||
Elan,
|
||||
Emacs,
|
||||
Firmware,
|
||||
Flatpak,
|
||||
Flutter,
|
||||
Fossil,
|
||||
Gcloud,
|
||||
Gem,
|
||||
Ghcup,
|
||||
GitRepos,
|
||||
GithubCliExtensions,
|
||||
GnomeShellExtensions,
|
||||
Go,
|
||||
Guix,
|
||||
Haxelib,
|
||||
Helix,
|
||||
Helm,
|
||||
HomeManager,
|
||||
// These names are miscapitalized on purpose, so the CLI name is
|
||||
// `jetbrains_pycharm` instead of `jet_brains_py_charm`.
|
||||
JetbrainsAqua,
|
||||
JetbrainsClion,
|
||||
JetbrainsDatagrip,
|
||||
JetbrainsDataspell,
|
||||
JetbrainsGateway,
|
||||
JetbrainsGoland,
|
||||
JetbrainsIdea,
|
||||
JetbrainsMps,
|
||||
JetbrainsPhpstorm,
|
||||
JetbrainsPycharm,
|
||||
JetbrainsRider,
|
||||
JetbrainsRubymine,
|
||||
JetbrainsRustrover,
|
||||
JetbrainsToolbox,
|
||||
JetbrainsWebstorm,
|
||||
Jetpack,
|
||||
Julia,
|
||||
Juliaup,
|
||||
Kakoune,
|
||||
Krew,
|
||||
Lensfun,
|
||||
Lure,
|
||||
Macports,
|
||||
Mamba,
|
||||
Mandb,
|
||||
Mas,
|
||||
Maza,
|
||||
Micro,
|
||||
MicrosoftStore,
|
||||
Miktex,
|
||||
Mise,
|
||||
Myrepos,
|
||||
Nix,
|
||||
NixHelper,
|
||||
Node,
|
||||
Opam,
|
||||
Pacdef,
|
||||
Pacstall,
|
||||
Pearl,
|
||||
Pip3,
|
||||
PipReview,
|
||||
PipReviewLocal,
|
||||
Pipupgrade,
|
||||
Pipx,
|
||||
Pipxu,
|
||||
Pixi,
|
||||
Pkg,
|
||||
Pkgfile,
|
||||
Pkgin,
|
||||
PlatformioCore,
|
||||
Pnpm,
|
||||
Poetry,
|
||||
Powershell,
|
||||
Protonup,
|
||||
Pyenv,
|
||||
Raco,
|
||||
Rcm,
|
||||
Remotes,
|
||||
Restarts,
|
||||
Rtcl,
|
||||
RubyGems,
|
||||
Rustup,
|
||||
Rye,
|
||||
Scoop,
|
||||
Sdkman,
|
||||
SelfUpdate,
|
||||
Sheldon,
|
||||
Shell,
|
||||
Snap,
|
||||
Sparkle,
|
||||
Spicetify,
|
||||
Stack,
|
||||
Stew,
|
||||
System,
|
||||
Tldr,
|
||||
Tlmgr,
|
||||
Tmux,
|
||||
Toolbx,
|
||||
Typst,
|
||||
Uv,
|
||||
Vagrant,
|
||||
Vcpkg,
|
||||
Vim,
|
||||
VoltaPackages,
|
||||
Vscode,
|
||||
VscodeInsiders,
|
||||
Vscodium,
|
||||
VscodiumInsiders,
|
||||
Waydroid,
|
||||
Winget,
|
||||
Wsl,
|
||||
WslUpdate,
|
||||
Xcodes,
|
||||
Yadm,
|
||||
Yarn,
|
||||
Yazi,
|
||||
Zigup,
|
||||
Zvm,
|
||||
}
|
||||
|
||||
impl Step {
|
||||
#[allow(clippy::too_many_lines)]
|
||||
pub fn run(&self, runner: &mut Runner, ctx: &ExecutionContext) -> Result<()> {
|
||||
use Step::*;
|
||||
|
||||
match *self {
|
||||
AM =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "am", || linux::run_am(ctx))?
|
||||
}
|
||||
AndroidStudio => runner.execute(*self, "Android Studio Plugins", || generic::run_android_studio(ctx))?,
|
||||
AppMan =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "appman", || linux::run_appman(ctx))?
|
||||
}
|
||||
Aqua => runner.execute(*self, "aqua", || generic::run_aqua(ctx))?,
|
||||
Asdf =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "asdf", || unix::run_asdf(ctx))?
|
||||
}
|
||||
Atom =>
|
||||
{
|
||||
#[cfg(not(any(
|
||||
target_os = "freebsd",
|
||||
target_os = "openbsd",
|
||||
target_os = "netbsd",
|
||||
target_os = "dragonfly"
|
||||
)))]
|
||||
runner.execute(*self, "apm", || generic::run_apm(ctx))?
|
||||
}
|
||||
Atuin =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "atuin", || unix::run_atuin(ctx))?
|
||||
}
|
||||
Audit => {
|
||||
#[cfg(target_os = "dragonfly")]
|
||||
runner.execute(*self, "DragonFly Audit", || dragonfly::audit_packages(ctx))?;
|
||||
#[cfg(target_os = "freebsd")]
|
||||
runner.execute(*self, "FreeBSD Audit", || freebsd::audit_packages(ctx))?
|
||||
}
|
||||
AutoCpufreq =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "auto-cpufreq", || linux::run_auto_cpufreq(ctx))?
|
||||
}
|
||||
Bin => runner.execute(*self, "bin", || generic::bin_update(ctx))?,
|
||||
Bob => runner.execute(*self, "Bob", || generic::run_bob(ctx))?,
|
||||
BrewCask => {
|
||||
#[cfg(target_os = "macos")]
|
||||
runner.execute(*self, "Brew Cask", || unix::run_brew_cask(ctx, unix::BrewVariant::Path))?;
|
||||
#[cfg(target_os = "macos")]
|
||||
runner.execute(*self, "Brew Cask (Intel)", || {
|
||||
unix::run_brew_cask(ctx, unix::BrewVariant::MacIntel)
|
||||
})?;
|
||||
#[cfg(target_os = "macos")]
|
||||
runner.execute(*self, "Brew Cask (ARM)", || {
|
||||
unix::run_brew_cask(ctx, unix::BrewVariant::MacArm)
|
||||
})?
|
||||
}
|
||||
BrewFormula => {
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "Brew", || unix::run_brew_formula(ctx, unix::BrewVariant::Path))?;
|
||||
#[cfg(target_os = "macos")]
|
||||
runner.execute(*self, "Brew (ARM)", || {
|
||||
unix::run_brew_formula(ctx, unix::BrewVariant::MacArm)
|
||||
})?;
|
||||
#[cfg(target_os = "macos")]
|
||||
runner.execute(*self, "Brew (Intel)", || {
|
||||
unix::run_brew_formula(ctx, unix::BrewVariant::MacIntel)
|
||||
})?
|
||||
}
|
||||
Bun => runner.execute(*self, "bun", || generic::run_bun(ctx))?,
|
||||
BunPackages =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "bun-packages", || unix::run_bun_packages(ctx))?
|
||||
}
|
||||
Cargo => runner.execute(*self, "cargo", || generic::run_cargo_update(ctx))?,
|
||||
Certbot => runner.execute(*self, "Certbot", || generic::run_certbot(ctx))?,
|
||||
Chezmoi => runner.execute(*self, "chezmoi", || generic::run_chezmoi_update(ctx))?,
|
||||
Chocolatey =>
|
||||
{
|
||||
#[cfg(windows)]
|
||||
runner.execute(*self, "Chocolatey", || windows::run_chocolatey(ctx))?
|
||||
}
|
||||
Choosenim => runner.execute(*self, "choosenim", || generic::run_choosenim(ctx))?,
|
||||
CinnamonSpices =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "Cinnamon spices", || linux::run_cinnamon_spices_updater(ctx))?
|
||||
}
|
||||
ClamAvDb => runner.execute(*self, "ClamAV Databases", || generic::run_freshclam(ctx))?,
|
||||
Composer => runner.execute(*self, "composer", || generic::run_composer_update(ctx))?,
|
||||
Conda => runner.execute(*self, "conda", || generic::run_conda_update(ctx))?,
|
||||
ConfigUpdate =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "config-update", || linux::run_config_update(ctx))?
|
||||
}
|
||||
Containers => runner.execute(*self, "Containers", || containers::run_containers(ctx))?,
|
||||
CustomCommands => {
|
||||
if let Some(commands) = ctx.config().commands() {
|
||||
for (name, command) in commands
|
||||
.iter()
|
||||
.filter(|(n, _)| ctx.config().should_run_custom_command(n))
|
||||
{
|
||||
runner.execute(*self, name.clone(), || generic::run_custom_command(name, command, ctx))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
DebGet =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "deb-get", || linux::run_deb_get(ctx))?
|
||||
}
|
||||
Deno => runner.execute(*self, "deno", || node::deno_upgrade(ctx))?,
|
||||
Distrobox =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "distrobox", || linux::run_distrobox_update(ctx))?
|
||||
}
|
||||
DkpPacman =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "dkp-pacman", || linux::run_dkp_pacman_update(ctx))?
|
||||
}
|
||||
Dotnet => runner.execute(*self, ".NET", || generic::run_dotnet_upgrade(ctx))?,
|
||||
Elan => runner.execute(*self, "elan", || generic::run_elan(ctx))?,
|
||||
Emacs => runner.execute(*self, "Emacs", || emacs::Emacs::new().upgrade(ctx))?,
|
||||
Firmware =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "Firmware", || linux::run_fwupdmgr(ctx))?
|
||||
}
|
||||
Flatpak =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "Flatpak", || linux::run_flatpak(ctx))?
|
||||
}
|
||||
Flutter => runner.execute(*self, "Flutter", || generic::run_flutter_upgrade(ctx))?,
|
||||
Fossil => runner.execute(*self, "fossil", || generic::run_fossil(ctx))?,
|
||||
Gcloud => runner.execute(*self, "gcloud", || generic::run_gcloud_components_update(ctx))?,
|
||||
Gem => runner.execute(*self, "gem", || generic::run_gem(ctx))?,
|
||||
Ghcup => runner.execute(*self, "ghcup", || generic::run_ghcup_update(ctx))?,
|
||||
GitRepos => runner.execute(*self, "Git Repositories", || git::run_git_pull(ctx))?,
|
||||
GithubCliExtensions => runner.execute(*self, "GitHub CLI Extensions", || {
|
||||
generic::run_ghcli_extensions_upgrade(ctx)
|
||||
})?,
|
||||
GnomeShellExtensions =>
|
||||
{
|
||||
#[cfg(all(unix, not(any(target_os = "macos", target_os = "android"))))]
|
||||
runner.execute(*self, "Gnome Shell Extensions", || unix::upgrade_gnome_extensions(ctx))?
|
||||
}
|
||||
Go => {
|
||||
runner.execute(*self, "go-global-update", || go::run_go_global_update(ctx))?;
|
||||
runner.execute(*self, "gup", || go::run_go_gup(ctx))?
|
||||
}
|
||||
Guix =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "guix", || unix::run_guix(ctx))?
|
||||
}
|
||||
Haxelib => runner.execute(*self, "haxelib", || generic::run_haxelib_update(ctx))?,
|
||||
Helix => runner.execute(*self, "helix", || generic::run_helix_grammars(ctx))?,
|
||||
Helm => runner.execute(*self, "helm", || generic::run_helm_repo_update(ctx))?,
|
||||
HomeManager =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "home-manager", || unix::run_home_manager(ctx))?
|
||||
}
|
||||
JetbrainsAqua => runner.execute(*self, "JetBrains Aqua Plugins", || generic::run_jetbrains_aqua(ctx))?,
|
||||
JetbrainsClion => runner.execute(*self, "JetBrains CL", || generic::run_jetbrains_clion(ctx))?,
|
||||
JetbrainsDatagrip => {
|
||||
runner.execute(*self, "JetBrains DataGrip", || generic::run_jetbrains_datagrip(ctx))?
|
||||
}
|
||||
JetbrainsDataspell => runner.execute(*self, "JetBrains DataSpell Plugins", || {
|
||||
generic::run_jetbrains_dataspell(ctx)
|
||||
})?,
|
||||
JetbrainsGateway => runner.execute(*self, "JetBrains Gateway Plugins", || {
|
||||
generic::run_jetbrains_gateway(ctx)
|
||||
})?,
|
||||
JetbrainsGoland => {
|
||||
runner.execute(*self, "JetBrains GoLand Plugins", || generic::run_jetbrains_goland(ctx))?
|
||||
}
|
||||
JetbrainsIdea => runner.execute(*self, "JetBrains IntelliJ IDEA Plugins", || {
|
||||
generic::run_jetbrains_idea(ctx)
|
||||
})?,
|
||||
JetbrainsMps => runner.execute(*self, "JetBrains MPS Plugins", || generic::run_jetbrains_mps(ctx))?,
|
||||
JetbrainsPhpstorm => runner.execute(*self, "JetBrains PhpStorm Plugins", || {
|
||||
generic::run_jetbrains_phpstorm(ctx)
|
||||
})?,
|
||||
JetbrainsPycharm => runner.execute(*self, "JetBrains PyCharm Plugins", || {
|
||||
generic::run_jetbrains_pycharm(ctx)
|
||||
})?,
|
||||
JetbrainsRider => runner.execute(*self, "JetBrains Rider Plugins", || generic::run_jetbrains_rider(ctx))?,
|
||||
JetbrainsRubymine => runner.execute(*self, "JetBrains RubyMine Plugins", || {
|
||||
generic::run_jetbrains_rubymine(ctx)
|
||||
})?,
|
||||
JetbrainsRustrover => runner.execute(*self, "JetBrains RustRover Plugins", || {
|
||||
generic::run_jetbrains_rustrover(ctx)
|
||||
})?,
|
||||
JetbrainsToolbox => runner.execute(*self, "JetBrains Toolbox", || generic::run_jetbrains_toolbox(ctx))?,
|
||||
JetbrainsWebstorm => runner.execute(*self, "JetBrains WebStorm Plugins", || {
|
||||
generic::run_jetbrains_webstorm(ctx)
|
||||
})?,
|
||||
Jetpack => runner.execute(*self, "jetpack", || generic::run_jetpack(ctx))?,
|
||||
Julia => runner.execute(*self, "julia", || generic::update_julia_packages(ctx))?,
|
||||
Juliaup => runner.execute(*self, "juliaup", || generic::run_juliaup(ctx))?,
|
||||
Kakoune => runner.execute(*self, "Kakoune", || kakoune::upgrade_kak_plug(ctx))?,
|
||||
Krew => runner.execute(*self, "krew", || generic::run_krew_upgrade(ctx))?,
|
||||
Lensfun => runner.execute(*self, "Lensfun's database update", || {
|
||||
generic::run_lensfun_update_data(ctx)
|
||||
})?,
|
||||
Lure =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "LURE", || linux::run_lure_update(ctx))?
|
||||
}
|
||||
Macports =>
|
||||
{
|
||||
#[cfg(target_os = "macos")]
|
||||
runner.execute(*self, "MacPorts", || macos::run_macports(ctx))?
|
||||
}
|
||||
Mamba => runner.execute(*self, "mamba", || generic::run_mamba_update(ctx))?,
|
||||
Mandb =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "Manual Entries", || linux::run_mandb(ctx))?
|
||||
}
|
||||
Mas =>
|
||||
{
|
||||
#[cfg(target_os = "macos")]
|
||||
runner.execute(*self, "App Store", || macos::run_mas(ctx))?
|
||||
}
|
||||
Maza =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "maza", || unix::run_maza(ctx))?
|
||||
}
|
||||
Micro => runner.execute(*self, "micro", || generic::run_micro(ctx))?,
|
||||
MicrosoftStore =>
|
||||
{
|
||||
#[cfg(windows)]
|
||||
runner.execute(*self, "Microsoft Store", || windows::microsoft_store(ctx))?
|
||||
}
|
||||
Miktex => runner.execute(*self, "miktex", || generic::run_miktex_packages_update(ctx))?,
|
||||
Mise =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "mise", || unix::run_mise(ctx))?
|
||||
}
|
||||
Myrepos => runner.execute(*self, "myrepos", || generic::run_myrepos_update(ctx))?,
|
||||
Nix => {
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "nix", || unix::run_nix(ctx))?;
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "nix upgrade-nix", || unix::run_nix_self_upgrade(ctx))?
|
||||
}
|
||||
NixHelper =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "nh", || unix::run_nix_helper(ctx))?
|
||||
}
|
||||
Node => runner.execute(*self, "npm", || node::run_npm_upgrade(ctx))?,
|
||||
Opam => runner.execute(*self, "opam", || generic::run_opam_update(ctx))?,
|
||||
Pacdef =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "pacdef", || linux::run_pacdef(ctx))?
|
||||
}
|
||||
Pacstall =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "pacstall", || linux::run_pacstall(ctx))?
|
||||
}
|
||||
Pearl =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "pearl", || unix::run_pearl(ctx))?
|
||||
}
|
||||
Pip3 => runner.execute(*self, "pip3", || generic::run_pip3_update(ctx))?,
|
||||
PipReview => runner.execute(*self, "pip-review", || generic::run_pip_review_update(ctx))?,
|
||||
PipReviewLocal => runner.execute(*self, "pip-review (local)", || {
|
||||
generic::run_pip_review_local_update(ctx)
|
||||
})?,
|
||||
Pipupgrade => runner.execute(*self, "pipupgrade", || generic::run_pipupgrade_update(ctx))?,
|
||||
Pipx => runner.execute(*self, "pipx", || generic::run_pipx_update(ctx))?,
|
||||
Pipxu => runner.execute(*self, "pipxu", || generic::run_pipxu_update(ctx))?,
|
||||
Pixi => runner.execute(*self, "pixi", || generic::run_pixi_update(ctx))?,
|
||||
Pkg => {
|
||||
#[cfg(target_os = "dragonfly")]
|
||||
runner.execute(*self, "Dragonfly BSD Packages", || dragonfly::upgrade_packages(ctx))?;
|
||||
#[cfg(target_os = "freebsd")]
|
||||
runner.execute(*self, "FreeBSD Packages", || freebsd::upgrade_packages(ctx))?;
|
||||
#[cfg(target_os = "openbsd")]
|
||||
runner.execute(*self, "OpenBSD Packages", || openbsd::upgrade_packages(ctx))?;
|
||||
#[cfg(target_os = "android")]
|
||||
runner.execute(*self, "Termux Packages", || android::upgrade_packages(ctx))?
|
||||
}
|
||||
Pkgfile =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "pkgfile", || linux::run_pkgfile(ctx))?
|
||||
}
|
||||
Pkgin =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "pkgin", || unix::run_pkgin(ctx))?
|
||||
}
|
||||
PlatformioCore => runner.execute(*self, "PlatformIO Core", || generic::run_platform_io(ctx))?,
|
||||
Pnpm => runner.execute(*self, "pnpm", || node::run_pnpm_upgrade(ctx))?,
|
||||
Poetry => runner.execute(*self, "Poetry", || generic::run_poetry(ctx))?,
|
||||
Powershell => runner.execute(*self, "Powershell Modules Update", || generic::run_powershell(ctx))?,
|
||||
Protonup =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "protonup", || linux::run_protonup_update(ctx))?
|
||||
}
|
||||
Pyenv =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "pyenv", || unix::run_pyenv(ctx))?
|
||||
}
|
||||
Raco => runner.execute(*self, "raco", || generic::run_raco_update(ctx))?,
|
||||
Rcm =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "rcm", || unix::run_rcm(ctx))?
|
||||
}
|
||||
Remotes => {
|
||||
if let Some(topgrades) = ctx.config().remote_topgrades() {
|
||||
for remote_topgrade in topgrades
|
||||
.iter()
|
||||
.filter(|t| ctx.config().should_execute_remote(hostname(), t))
|
||||
{
|
||||
runner.execute(*self, format!("Remote ({remote_topgrade})"), || {
|
||||
crate::ssh::ssh_step(ctx, remote_topgrade)
|
||||
})?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Restarts =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "Restarts", || linux::run_needrestart(ctx))?
|
||||
}
|
||||
Rtcl => runner.execute(*self, "rtcl", || generic::run_rtcl(ctx))?,
|
||||
RubyGems => runner.execute(*self, "rubygems", || generic::run_rubygems(ctx))?,
|
||||
Rustup => runner.execute(*self, "rustup", || generic::run_rustup(ctx))?,
|
||||
Rye => runner.execute(*self, "rye", || generic::run_rye(ctx))?,
|
||||
Scoop =>
|
||||
{
|
||||
#[cfg(windows)]
|
||||
runner.execute(*self, "Scoop", || windows::run_scoop(ctx))?
|
||||
}
|
||||
Sdkman =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "SDKMAN!", || unix::run_sdkman(ctx))?
|
||||
}
|
||||
SelfUpdate => {
|
||||
#[cfg(feature = "self-update")]
|
||||
{
|
||||
if std::env::var("TOPGRADE_NO_SELF_UPGRADE").is_err() && !ctx.config().no_self_update() {
|
||||
runner.execute(*self, "Self Update", || self_update::self_update(ctx))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Sheldon => runner.execute(*self, "sheldon", || generic::run_sheldon(ctx))?,
|
||||
Shell => {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
runner.execute(*self, "zr", || zsh::run_zr(ctx))?;
|
||||
runner.execute(*self, "antibody", || zsh::run_antibody(ctx))?;
|
||||
runner.execute(*self, "antidote", || zsh::run_antidote(ctx))?;
|
||||
runner.execute(*self, "antigen", || zsh::run_antigen(ctx))?;
|
||||
runner.execute(*self, "zgenom", || zsh::run_zgenom(ctx))?;
|
||||
runner.execute(*self, "zplug", || zsh::run_zplug(ctx))?;
|
||||
runner.execute(*self, "zinit", || zsh::run_zinit(ctx))?;
|
||||
runner.execute(*self, "zi", || zsh::run_zi(ctx))?;
|
||||
runner.execute(*self, "zim", || zsh::run_zim(ctx))?;
|
||||
runner.execute(*self, "oh-my-zsh", || zsh::run_oh_my_zsh(ctx))?;
|
||||
runner.execute(*self, "oh-my-bash", || unix::run_oh_my_bash(ctx))?;
|
||||
runner.execute(*self, "fisher", || unix::run_fisher(ctx))?;
|
||||
runner.execute(*self, "bash-it", || unix::run_bashit(ctx))?;
|
||||
runner.execute(*self, "oh-my-fish", || unix::run_oh_my_fish(ctx))?;
|
||||
runner.execute(*self, "fish-plug", || unix::run_fish_plug(ctx))?;
|
||||
runner.execute(*self, "fundle", || unix::run_fundle(ctx))?
|
||||
}
|
||||
}
|
||||
Snap =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "snap", || linux::run_snap(ctx))?
|
||||
}
|
||||
Sparkle =>
|
||||
{
|
||||
#[cfg(target_os = "macos")]
|
||||
runner.execute(*self, "Sparkle", || macos::run_sparkle(ctx))?
|
||||
}
|
||||
Spicetify => runner.execute(*self, "spicetify", || generic::spicetify_upgrade(ctx))?,
|
||||
Stack => runner.execute(*self, "stack", || generic::run_stack_update(ctx))?,
|
||||
Stew => runner.execute(*self, "stew", || generic::run_stew(ctx))?,
|
||||
System => {
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// NOTE: Due to breaking `nu` updates, `packer.nu` needs to be updated before `nu` get updated
|
||||
// by other package managers.
|
||||
runner.execute(Shell, "packer.nu", || linux::run_packer_nu(ctx))?;
|
||||
|
||||
match ctx.distribution() {
|
||||
Ok(distribution) => {
|
||||
runner.execute(*self, "System update", || distribution.upgrade(ctx))?;
|
||||
}
|
||||
Err(e) => {
|
||||
println!("{}", t!("Error detecting current distribution: {error}", error = e));
|
||||
}
|
||||
}
|
||||
runner.execute(*self, "pihole", || linux::run_pihole_update(ctx))?;
|
||||
}
|
||||
#[cfg(windows)]
|
||||
runner.execute(*self, "Windows update", || windows::windows_update(ctx))?;
|
||||
#[cfg(target_os = "macos")]
|
||||
runner.execute(*self, "System update", || macos::upgrade_macos(ctx))?;
|
||||
#[cfg(target_os = "freebsd")]
|
||||
runner.execute(*self, "FreeBSD Upgrade", || freebsd::upgrade_freebsd(ctx))?;
|
||||
#[cfg(target_os = "openbsd")]
|
||||
runner.execute(*self, "OpenBSD Upgrade", || openbsd::upgrade_openbsd(ctx))?
|
||||
}
|
||||
Tldr => runner.execute(*self, "TLDR", || generic::run_tldr(ctx))?,
|
||||
Tlmgr => runner.execute(*self, "tlmgr", || generic::run_tlmgr_update(ctx))?,
|
||||
Tmux =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "tmux", || tmux::run_tpm(ctx))?
|
||||
}
|
||||
Toolbx =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "toolbx", || toolbx::run_toolbx(ctx))?
|
||||
}
|
||||
Typst => runner.execute(*self, "Typst", || generic::run_typst(ctx))?,
|
||||
Uv => runner.execute(*self, "uv", || generic::run_uv(ctx))?,
|
||||
Vagrant => {
|
||||
if ctx.config().should_run(Vagrant) {
|
||||
if let Ok(boxes) = vagrant::collect_boxes(ctx) {
|
||||
for vagrant_box in boxes {
|
||||
runner.execute(*self, format!("Vagrant ({})", vagrant_box.smart_name()), || {
|
||||
vagrant::topgrade_vagrant_box(ctx, &vagrant_box)
|
||||
})?;
|
||||
}
|
||||
}
|
||||
}
|
||||
runner.execute(*self, "Vagrant boxes", || vagrant::upgrade_vagrant_boxes(ctx))?;
|
||||
}
|
||||
Vcpkg => runner.execute(*self, "vcpkg", || generic::run_vcpkg_update(ctx))?,
|
||||
Vim => {
|
||||
runner.execute(*self, "vim", || vim::upgrade_vim(ctx))?;
|
||||
runner.execute(*self, "Neovim", || vim::upgrade_neovim(ctx))?;
|
||||
runner.execute(*self, "The Ultimate vimrc", || vim::upgrade_ultimate_vimrc(ctx))?;
|
||||
runner.execute(*self, "voom", || vim::run_voom(ctx))?
|
||||
}
|
||||
VoltaPackages => runner.execute(*self, "volta packages", || node::run_volta_packages_upgrade(ctx))?,
|
||||
Vscode => runner.execute(*self, "Visual Studio Code extensions", || {
|
||||
generic::run_vscode_extensions_update(ctx)
|
||||
})?,
|
||||
VscodeInsiders => runner.execute(*self, "Visual Studio Code Insiders extensions", || {
|
||||
generic::run_vscode_insiders_extensions_update(ctx)
|
||||
})?,
|
||||
Vscodium => runner.execute(*self, "VSCodium extensions", || {
|
||||
generic::run_vscodium_extensions_update(ctx)
|
||||
})?,
|
||||
VscodiumInsiders => runner.execute(*self, "VSCodium Insiders extensions", || {
|
||||
generic::run_vscodium_insiders_extensions_update(ctx)
|
||||
})?,
|
||||
Waydroid =>
|
||||
{
|
||||
#[cfg(target_os = "linux")]
|
||||
runner.execute(*self, "Waydroid", || linux::run_waydroid(ctx))?
|
||||
}
|
||||
Winget =>
|
||||
{
|
||||
#[cfg(windows)]
|
||||
runner.execute(*self, "Winget", || windows::run_winget(ctx))?
|
||||
}
|
||||
Wsl =>
|
||||
{
|
||||
#[cfg(windows)]
|
||||
runner.execute(*self, "WSL", || windows::run_wsl_topgrade(ctx))?
|
||||
}
|
||||
WslUpdate =>
|
||||
{
|
||||
#[cfg(windows)]
|
||||
runner.execute(*self, "Update WSL", || windows::update_wsl(ctx))?
|
||||
}
|
||||
Xcodes =>
|
||||
{
|
||||
#[cfg(target_os = "macos")]
|
||||
runner.execute(*self, "Xcodes", || macos::update_xcodes(ctx))?
|
||||
}
|
||||
Yadm =>
|
||||
{
|
||||
#[cfg(unix)]
|
||||
runner.execute(*self, "yadm", || unix::run_yadm(ctx))?
|
||||
}
|
||||
Yarn => runner.execute(*self, "yarn", || node::run_yarn_upgrade(ctx))?,
|
||||
Yazi => runner.execute(*self, "Yazi packages", || generic::run_yazi(ctx))?,
|
||||
Zigup => runner.execute(*self, "zigup", || generic::run_zigup(ctx))?,
|
||||
Zvm => runner.execute(*self, "ZVM", || generic::run_zvm(ctx))?,
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_lines)]
|
||||
pub(crate) fn default_steps() -> Vec<Step> {
|
||||
// For now, SelfRenamer and SelfUpdate isn't included as they're ran before the other non-steps (pre-commands, sudo, etc)
|
||||
|
||||
use Step::*;
|
||||
// Could probably have a smaller starting capacity, but this at least ensures only 2 allocations:
|
||||
// initial and shrink
|
||||
let mut steps = Vec::with_capacity(Step::COUNT);
|
||||
|
||||
// Not combined with other generic steps to preserve the order as it was in main.rs originally,
|
||||
// but this can be changed in the future.
|
||||
steps.push(Remotes);
|
||||
|
||||
#[cfg(windows)]
|
||||
steps.extend_from_slice(&[Wsl, WslUpdate, Chocolatey, Scoop, Winget, System, MicrosoftStore]);
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
steps.extend_from_slice(&[BrewFormula, BrewCask, Macports, Xcodes, Sparkle, Mas, System]);
|
||||
|
||||
#[cfg(target_os = "dragonfly")]
|
||||
steps.extend_from_slice(&[Pkg, Audit]);
|
||||
|
||||
#[cfg(target_os = "freebsd")]
|
||||
steps.extend_from_slice(&[Pkg, System, Audit]);
|
||||
|
||||
#[cfg(target_os = "openbsd")]
|
||||
steps.extend_from_slice(&[Pkg, System]);
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
steps.push(Pkg);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
steps.extend_from_slice(&[
|
||||
System,
|
||||
ConfigUpdate,
|
||||
AM,
|
||||
AppMan,
|
||||
DebGet,
|
||||
Toolbx,
|
||||
Snap,
|
||||
Pacstall,
|
||||
Pacdef,
|
||||
Protonup,
|
||||
Distrobox,
|
||||
DkpPacman,
|
||||
Firmware,
|
||||
Restarts,
|
||||
Flatpak,
|
||||
BrewFormula,
|
||||
Lure,
|
||||
Waydroid,
|
||||
AutoCpufreq,
|
||||
CinnamonSpices,
|
||||
Mandb,
|
||||
Pkgfile,
|
||||
]);
|
||||
|
||||
#[cfg(unix)]
|
||||
steps.extend_from_slice(&[
|
||||
Yadm,
|
||||
Nix,
|
||||
NixHelper,
|
||||
Guix,
|
||||
HomeManager,
|
||||
Asdf,
|
||||
Mise,
|
||||
Pkgin,
|
||||
BunPackages,
|
||||
Shell,
|
||||
Tmux,
|
||||
Pearl,
|
||||
#[cfg(not(any(target_os = "macos", target_os = "android")))]
|
||||
GnomeShellExtensions,
|
||||
Pyenv,
|
||||
Sdkman,
|
||||
Rcm,
|
||||
Maza,
|
||||
Atuin,
|
||||
]);
|
||||
|
||||
#[cfg(not(any(
|
||||
target_os = "freebsd",
|
||||
target_os = "openbsd",
|
||||
target_os = "netbsd",
|
||||
target_os = "dragonfly"
|
||||
)))]
|
||||
steps.push(Atom);
|
||||
|
||||
// The following update function should be executed on all OSes.
|
||||
steps.extend_from_slice(&[
|
||||
Fossil,
|
||||
Elan,
|
||||
Rye,
|
||||
Rustup,
|
||||
Juliaup,
|
||||
Dotnet,
|
||||
Choosenim,
|
||||
Cargo,
|
||||
Flutter,
|
||||
Go,
|
||||
Emacs,
|
||||
Opam,
|
||||
Vcpkg,
|
||||
Pipx,
|
||||
Pipxu,
|
||||
Vscode,
|
||||
VscodeInsiders,
|
||||
Vscodium,
|
||||
VscodiumInsiders,
|
||||
Conda,
|
||||
Mamba,
|
||||
Pixi,
|
||||
Miktex,
|
||||
Pip3,
|
||||
PipReview,
|
||||
PipReviewLocal,
|
||||
Pipupgrade,
|
||||
Ghcup,
|
||||
Stack,
|
||||
Tldr,
|
||||
Tlmgr,
|
||||
Myrepos,
|
||||
Chezmoi,
|
||||
Jetpack,
|
||||
Vim,
|
||||
Kakoune,
|
||||
Helix,
|
||||
Node,
|
||||
Yarn,
|
||||
Pnpm,
|
||||
VoltaPackages,
|
||||
Containers,
|
||||
Deno,
|
||||
Composer,
|
||||
Krew,
|
||||
Helm,
|
||||
Gem,
|
||||
RubyGems,
|
||||
Julia,
|
||||
Haxelib,
|
||||
Sheldon,
|
||||
Stew,
|
||||
Rtcl,
|
||||
Bin,
|
||||
Gcloud,
|
||||
Micro,
|
||||
Raco,
|
||||
Spicetify,
|
||||
GithubCliExtensions,
|
||||
Bob,
|
||||
Certbot,
|
||||
GitRepos,
|
||||
ClamAvDb,
|
||||
PlatformioCore,
|
||||
Lensfun,
|
||||
Poetry,
|
||||
Uv,
|
||||
Zvm,
|
||||
Aqua,
|
||||
Bun,
|
||||
Zigup,
|
||||
JetbrainsToolbox,
|
||||
AndroidStudio,
|
||||
JetbrainsAqua,
|
||||
JetbrainsClion,
|
||||
JetbrainsDatagrip,
|
||||
JetbrainsDataspell,
|
||||
// JetBrains dotCover has no CLI
|
||||
// JetBrains dotMemory has no CLI
|
||||
// JetBrains dotPeek has no CLI
|
||||
// JetBrains dotTrace has no CLI
|
||||
// JetBrains Fleet has a different CLI without a `fleet update` command.
|
||||
JetbrainsGateway,
|
||||
JetbrainsGoland,
|
||||
JetbrainsIdea,
|
||||
JetbrainsMps,
|
||||
JetbrainsPhpstorm,
|
||||
JetbrainsPycharm,
|
||||
// JetBrains ReSharper has no CLI (it's a VSCode extension)
|
||||
// JetBrains ReSharper C++ has no CLI (it's a VSCode extension)
|
||||
JetbrainsRider,
|
||||
JetbrainsRubymine,
|
||||
JetbrainsRustrover,
|
||||
// JetBrains Space Desktop does not have a CLI
|
||||
JetbrainsWebstorm,
|
||||
Yazi,
|
||||
Powershell,
|
||||
CustomCommands,
|
||||
Vagrant,
|
||||
Typst,
|
||||
]);
|
||||
|
||||
steps.shrink_to_fit();
|
||||
|
||||
steps
|
||||
}
|
||||
@@ -6,11 +6,13 @@ use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use tracing::{debug, error, warn};
|
||||
use wildmatch::WildMatch;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::error::{self, TopgradeError};
|
||||
use crate::terminal::print_separator;
|
||||
use crate::{execution_context::ExecutionContext, utils::require};
|
||||
use rust_i18n::t;
|
||||
|
||||
// A string found in the output of docker for containers that weren't found in
|
||||
// the docker registry. We use this to gracefully handle and skip containers
|
||||
@@ -42,13 +44,30 @@ impl Container {
|
||||
impl Display for Container {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
// e.g., "`fedora:latest` for `linux/amd64`"
|
||||
write!(f, "`{}` for `{}`", self.repo_tag, self.platform)
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
t!(
|
||||
"`{repo_tag}` for `{platform}`",
|
||||
repo_tag = self.repo_tag,
|
||||
platform = self.platform
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a Vector of all containers, with Strings in the format
|
||||
/// "REGISTRY/[PATH/]CONTAINER_NAME:TAG"
|
||||
fn list_containers(crt: &Path) -> Result<Vec<Container>> {
|
||||
///
|
||||
/// Containers specified in `ignored_containers` will be filtered out.
|
||||
fn list_containers(crt: &Path, ignored_containers: Option<&Vec<String>>) -> Result<Vec<Container>> {
|
||||
let ignored_containers = ignored_containers.map(|patterns| {
|
||||
patterns
|
||||
.iter()
|
||||
.map(|pattern| WildMatch::new(pattern))
|
||||
.collect::<Vec<WildMatch>>()
|
||||
});
|
||||
|
||||
debug!(
|
||||
"Querying '{} image ls --format \"{{{{.Repository}}}}:{{{{.Tag}}}}/{{{{.ID}}}}\"' for containers",
|
||||
crt.display()
|
||||
@@ -83,6 +102,13 @@ fn list_containers(crt: &Path) -> Result<Vec<Container>> {
|
||||
assert_eq!(split_res.len(), 2);
|
||||
let (repo_tag, image_id) = (split_res[0], split_res[1]);
|
||||
|
||||
if let Some(ref ignored_containers) = ignored_containers {
|
||||
if ignored_containers.iter().any(|pattern| pattern.matches(repo_tag)) {
|
||||
debug!("Skipping ignored container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
debug!(
|
||||
"Querying '{} image inspect --format \"{{{{.Os}}}}/{{{{.Architecture}}}}\"' for container {}",
|
||||
crt.display(),
|
||||
@@ -103,24 +129,26 @@ fn list_containers(crt: &Path) -> Result<Vec<Container>> {
|
||||
}
|
||||
|
||||
pub fn run_containers(ctx: &ExecutionContext) -> Result<()> {
|
||||
// Prefer podman, fall back to docker if not present
|
||||
let crt = require("podman").or_else(|_| require("docker"))?;
|
||||
// Check what runtime is specified in the config
|
||||
let container_runtime = ctx.config().containers_runtime().to_string();
|
||||
let crt = require(container_runtime)?;
|
||||
debug!("Using container runtime '{}'", crt.display());
|
||||
|
||||
print_separator("Containers");
|
||||
print_separator(t!("Containers"));
|
||||
let mut success = true;
|
||||
let containers = list_containers(&crt).context("Failed to list Docker containers")?;
|
||||
let containers =
|
||||
list_containers(&crt, ctx.config().containers_ignored_tags()).context("Failed to list Docker containers")?;
|
||||
debug!("Containers to inspect: {:?}", containers);
|
||||
|
||||
for container in containers.iter() {
|
||||
for container in &containers {
|
||||
debug!("Pulling container '{}'", container);
|
||||
let args = vec![
|
||||
"pull",
|
||||
container.repo_tag.as_str(),
|
||||
"--platform",
|
||||
container.platform.as_str(),
|
||||
];
|
||||
let mut exec = ctx.run_type().execute(&crt);
|
||||
let mut args = vec!["pull", container.repo_tag.as_str()];
|
||||
if container.platform.as_str() != "/" {
|
||||
args.push("--platform");
|
||||
args.push(container.platform.as_str());
|
||||
}
|
||||
|
||||
let mut exec = ctx.execute(&crt);
|
||||
|
||||
if let Err(e) = exec.args(&args).status_checked() {
|
||||
error!("Pulling container '{}' failed: {}", container, e);
|
||||
@@ -149,12 +177,7 @@ pub fn run_containers(ctx: &ExecutionContext) -> Result<()> {
|
||||
if ctx.config().cleanup() {
|
||||
// Remove dangling images
|
||||
debug!("Removing dangling images");
|
||||
if let Err(e) = ctx
|
||||
.run_type()
|
||||
.execute(&crt)
|
||||
.args(["image", "prune", "-f"])
|
||||
.status_checked()
|
||||
{
|
||||
if let Err(e) = ctx.execute(&crt).args(["image", "prune", "-f"]).status_checked() {
|
||||
error!("Removing dangling images failed: {}", e);
|
||||
success = false;
|
||||
}
|
||||
|
||||
@@ -1,9 +1,4 @@
|
||||
(when (fboundp 'paradox-upgrade-packages)
|
||||
(progn
|
||||
(unless (boundp 'paradox-github-token)
|
||||
(setq paradox-github-token t))
|
||||
(paradox-upgrade-packages)
|
||||
(princ
|
||||
(if (get-buffer "*Paradox Report*")
|
||||
(with-current-buffer "*Paradox Report*" (buffer-string))
|
||||
"\nNothing to upgrade\n"))))
|
||||
(when (featurep 'package)
|
||||
(if (fboundp 'package-upgrade-all)
|
||||
(package-upgrade-all nil)
|
||||
(message "Your Emacs version doesn't support unattended packages upgrade")))
|
||||
|
||||
@@ -4,12 +4,13 @@ use std::path::{Path, PathBuf};
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
use rust_i18n::t;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::step::Step;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{require, require_option, PathExt};
|
||||
use crate::Step;
|
||||
|
||||
const EMACS_UPGRADE: &str = include_str!("emacs.el");
|
||||
#[cfg(windows)]
|
||||
@@ -59,7 +60,7 @@ impl Emacs {
|
||||
fn update_doom(doom: &Path, ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator("Doom Emacs");
|
||||
|
||||
let mut command = ctx.run_type().execute(doom);
|
||||
let mut command = ctx.execute(doom);
|
||||
if ctx.config().yes(Step::Emacs) {
|
||||
command.arg("--force");
|
||||
}
|
||||
@@ -74,13 +75,16 @@ impl Emacs {
|
||||
if let Some(doom) = &self.doom {
|
||||
Emacs::update_doom(doom, ctx)?;
|
||||
}
|
||||
let init_file = require_option(self.directory.as_ref(), String::from("Emacs directory does not exist"))?
|
||||
let init_file = require_option(
|
||||
self.directory.as_ref(),
|
||||
t!("Emacs directory does not exist").to_string(),
|
||||
)?
|
||||
.join("init.el")
|
||||
.require()?;
|
||||
|
||||
print_separator("Emacs");
|
||||
|
||||
let mut command = ctx.run_type().execute(emacs);
|
||||
let mut command = ctx.execute(emacs);
|
||||
|
||||
command
|
||||
.args(["--batch", "--debug-init", "-l"])
|
||||
|
||||
1379
src/steps/generic.rs
1379
src/steps/generic.rs
File diff suppressed because it is too large
Load Diff
480
src/steps/git.rs
480
src/steps/git.rs
@@ -3,10 +3,10 @@ use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Output, Stdio};
|
||||
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::{eyre, Result};
|
||||
use console::style;
|
||||
use futures::stream::{iter, FuturesUnordered};
|
||||
use futures::StreamExt;
|
||||
use futures::stream::{iter, FuturesUnordered, StreamExt};
|
||||
use glob::{glob_with, MatchOptions};
|
||||
use tokio::process::Command as AsyncCommand;
|
||||
use tokio::runtime;
|
||||
@@ -14,127 +14,166 @@ use tracing::{debug, error};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::step::Step;
|
||||
use crate::steps::emacs::Emacs;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{which, PathExt};
|
||||
use crate::{error::SkipStep, terminal::print_warning};
|
||||
use crate::utils::{require, PathExt};
|
||||
use crate::{error::SkipStep, terminal::print_warning, HOME_DIR};
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
use rust_i18n::t;
|
||||
|
||||
#[cfg(unix)]
|
||||
use crate::XDG_DIRS;
|
||||
|
||||
#[cfg(windows)]
|
||||
use crate::WINDOWS_DIRS;
|
||||
|
||||
pub fn run_git_pull(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut repos = RepoStep::try_new()?;
|
||||
let config = ctx.config();
|
||||
|
||||
// handle built-in repos
|
||||
if config.use_predefined_git_repos() {
|
||||
// should be executed on all the platforms
|
||||
{
|
||||
if config.should_run(Step::Emacs) {
|
||||
let emacs = Emacs::new();
|
||||
if !emacs.is_doom() {
|
||||
if let Some(directory) = emacs.directory() {
|
||||
repos.insert_if_repo(directory);
|
||||
}
|
||||
}
|
||||
repos.insert_if_repo(HOME_DIR.join(".doom.d"));
|
||||
}
|
||||
|
||||
if config.should_run(Step::Vim) {
|
||||
repos.insert_if_repo(HOME_DIR.join(".vim"));
|
||||
repos.insert_if_repo(HOME_DIR.join(".config/nvim"));
|
||||
}
|
||||
|
||||
repos.insert_if_repo(HOME_DIR.join(".ideavimrc"));
|
||||
repos.insert_if_repo(HOME_DIR.join(".intellimacs"));
|
||||
|
||||
if config.should_run(Step::Rcm) {
|
||||
repos.insert_if_repo(HOME_DIR.join(".dotfiles"));
|
||||
}
|
||||
|
||||
if let Some(powershell) = ctx.powershell() {
|
||||
if let Some(profile) = powershell.profile() {
|
||||
repos.insert_if_repo(profile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
repos.insert_if_repo(crate::steps::zsh::zshrc());
|
||||
if config.should_run(Step::Tmux) {
|
||||
repos.insert_if_repo(HOME_DIR.join(".tmux"));
|
||||
}
|
||||
repos.insert_if_repo(HOME_DIR.join(".config/fish"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("openbox"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("bspwm"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("i3"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("sway"));
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
repos.insert_if_repo(
|
||||
WINDOWS_DIRS
|
||||
.cache_dir()
|
||||
.join("Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState"),
|
||||
);
|
||||
|
||||
super::os::windows::insert_startup_scripts(&mut repos).ok();
|
||||
}
|
||||
}
|
||||
|
||||
// Handle user-defined repos
|
||||
if let Some(custom_git_repos) = config.git_repos() {
|
||||
for git_repo in custom_git_repos {
|
||||
repos.glob_insert(git_repo);
|
||||
}
|
||||
}
|
||||
|
||||
// Warn the user about the bad patterns.
|
||||
//
|
||||
// NOTE: this should be executed **before** skipping the Git step or the
|
||||
// user won't receive this warning in the cases where all the paths configured
|
||||
// are bad patterns.
|
||||
repos.bad_patterns.iter().for_each(|pattern| {
|
||||
print_warning(t!(
|
||||
"Path {pattern} did not contain any git repositories",
|
||||
pattern = pattern
|
||||
));
|
||||
});
|
||||
|
||||
if repos.is_repos_empty() {
|
||||
return Err(SkipStep(t!("No repositories to pull").to_string()).into());
|
||||
}
|
||||
|
||||
print_separator(t!("Git repositories"));
|
||||
|
||||
repos.pull_repos(ctx)
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
static PATH_PREFIX: &str = "\\\\?\\";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Git {
|
||||
git: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub struct Repositories<'a> {
|
||||
git: &'a Git,
|
||||
repositories: HashSet<String>,
|
||||
pub struct RepoStep {
|
||||
git: PathBuf,
|
||||
repos: HashSet<PathBuf>,
|
||||
glob_match_options: MatchOptions,
|
||||
bad_patterns: Vec<String>,
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn output_checked_utf8(output: Output) -> Result<()> {
|
||||
if !(output.status.success()) {
|
||||
let stderr = String::from_utf8(output.stderr).unwrap();
|
||||
Err(eyre!(stderr))
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stderr = stderr.trim();
|
||||
Err(eyre!("{stderr}"))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn pull_repository(repo: String, git: &Path, ctx: &ExecutionContext<'_>) -> Result<()> {
|
||||
let path = repo.to_string();
|
||||
let before_revision = get_head_revision(git, &repo);
|
||||
|
||||
println!("{} {}", style("Pulling").cyan().bold(), path);
|
||||
|
||||
let mut command = AsyncCommand::new(git);
|
||||
|
||||
command
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args(["pull", "--ff-only"]);
|
||||
|
||||
if let Some(extra_arguments) = ctx.config().git_arguments() {
|
||||
command.args(extra_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let pull_output = command.output().await?;
|
||||
let submodule_output = AsyncCommand::new(git)
|
||||
.args(["submodule", "update", "--recursive"])
|
||||
.current_dir(&repo)
|
||||
.stdin(Stdio::null())
|
||||
.output()
|
||||
.await?;
|
||||
let result = output_checked_utf8(pull_output).and_then(|_| output_checked_utf8(submodule_output));
|
||||
|
||||
if let Err(message) = &result {
|
||||
println!("{} pulling {}", style("Failed").red().bold(), &repo);
|
||||
print!("{message}");
|
||||
} else {
|
||||
let after_revision = get_head_revision(git, &repo);
|
||||
|
||||
match (&before_revision, &after_revision) {
|
||||
(Some(before), Some(after)) if before != after => {
|
||||
println!("{} {}:", style("Changed").yellow().bold(), &repo);
|
||||
|
||||
fn get_head_revision<P: AsRef<Path>>(git: &Path, repo: P) -> Option<String> {
|
||||
Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args([
|
||||
"--no-pager",
|
||||
"log",
|
||||
"--no-decorate",
|
||||
"--oneline",
|
||||
&format!("{before}..{after}"),
|
||||
])
|
||||
.status_checked()?;
|
||||
println!();
|
||||
}
|
||||
_ => {
|
||||
println!("{} {}", style("Up-to-date").green().bold(), &repo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result.map(|_| ())
|
||||
}
|
||||
|
||||
fn get_head_revision(git: &Path, repo: &str) -> Option<String> {
|
||||
Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(repo)
|
||||
.current_dir(repo.as_ref())
|
||||
.args(["rev-parse", "HEAD"])
|
||||
.output_checked_utf8()
|
||||
.map(|output| output.stdout.trim().to_string())
|
||||
.map_err(|e| {
|
||||
error!("Error getting revision for {}: {}", repo, e);
|
||||
error!("Error getting revision for {}: {e}", repo.as_ref().display(),);
|
||||
|
||||
e
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
|
||||
fn has_remotes(git: &Path, repo: &str) -> Option<bool> {
|
||||
Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(repo)
|
||||
.args(["remote", "show"])
|
||||
.output_checked_utf8()
|
||||
.map(|output| output.stdout.lines().count() > 0)
|
||||
.map_err(|e| {
|
||||
error!("Error getting remotes for {}: {}", repo, e);
|
||||
e
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
impl RepoStep {
|
||||
/// Try to create a `RepoStep`, fail if `git` is not found.
|
||||
pub fn try_new() -> Result<Self> {
|
||||
let git = require("git")?;
|
||||
let mut glob_match_options = MatchOptions::new();
|
||||
|
||||
impl Git {
|
||||
pub fn new() -> Self {
|
||||
Self { git: which("git") }
|
||||
if cfg!(windows) {
|
||||
glob_match_options.case_sensitive = false;
|
||||
}
|
||||
|
||||
pub fn get_repo_root<P: AsRef<Path>>(&self, path: P) -> Option<String> {
|
||||
Ok(Self {
|
||||
git,
|
||||
repos: HashSet::new(),
|
||||
bad_patterns: Vec::new(),
|
||||
glob_match_options,
|
||||
})
|
||||
}
|
||||
|
||||
/// Try to get the root of the repo specified in `path`.
|
||||
pub fn get_repo_root<P: AsRef<Path>>(&self, path: P) -> Option<PathBuf> {
|
||||
match path.as_ref().canonicalize() {
|
||||
Ok(mut path) => {
|
||||
debug_assert!(path.exists());
|
||||
@@ -158,111 +197,59 @@ impl Git {
|
||||
path_string
|
||||
};
|
||||
|
||||
if let Some(git) = &self.git {
|
||||
let output = Command::new(git)
|
||||
let output = Command::new(&self.git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(path)
|
||||
.args(["rev-parse", "--show-toplevel"])
|
||||
.output_checked_utf8()
|
||||
.ok()
|
||||
.map(|output| output.stdout.trim().to_string());
|
||||
// trim the last newline char
|
||||
.map(|output| PathBuf::from(output.stdout.trim()));
|
||||
|
||||
return output;
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == io::ErrorKind::NotFound {
|
||||
debug!("{} does not exist", path.as_ref().display());
|
||||
} else {
|
||||
error!("Error looking for {}: {e}", path.as_ref().display());
|
||||
}
|
||||
}
|
||||
Err(e) => match e.kind() {
|
||||
io::ErrorKind::NotFound => debug!("{} does not exists", path.as_ref().display()),
|
||||
_ => error!("Error looking for {}: {}", path.as_ref().display(), e),
|
||||
},
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
pub fn multi_pull_step(&self, repositories: &Repositories, ctx: &ExecutionContext) -> Result<()> {
|
||||
// Warn the user about the bad patterns.
|
||||
//
|
||||
// NOTE: this should be executed **before** skipping the Git step or the
|
||||
// user won't receive this warning in the cases where all the paths configured
|
||||
// are bad patterns.
|
||||
repositories
|
||||
.bad_patterns
|
||||
.iter()
|
||||
.for_each(|pattern| print_warning(format!("Path {pattern} did not contain any git repositories")));
|
||||
|
||||
if repositories.repositories.is_empty() {
|
||||
return Err(SkipStep(String::from("No repositories to pull")).into());
|
||||
}
|
||||
|
||||
print_separator("Git repositories");
|
||||
self.multi_pull(repositories, ctx)
|
||||
}
|
||||
|
||||
pub fn multi_pull(&self, repositories: &Repositories, ctx: &ExecutionContext) -> Result<()> {
|
||||
let git = self.git.as_ref().unwrap();
|
||||
|
||||
if ctx.run_type().dry() {
|
||||
repositories
|
||||
.repositories
|
||||
.iter()
|
||||
.for_each(|repo| println!("Would pull {}", &repo));
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let futures_iterator = repositories
|
||||
.repositories
|
||||
.iter()
|
||||
.filter(|repo| match has_remotes(git, repo) {
|
||||
Some(false) => {
|
||||
println!(
|
||||
"{} {} because it has no remotes",
|
||||
style("Skipping").yellow().bold(),
|
||||
repo
|
||||
);
|
||||
false
|
||||
}
|
||||
_ => true, // repo has remotes or command to check for remotes has failed. proceed to pull anyway.
|
||||
})
|
||||
.map(|repo| pull_repository(repo.clone(), git, ctx));
|
||||
|
||||
let stream_of_futures = if let Some(limit) = ctx.config().git_concurrency_limit() {
|
||||
iter(futures_iterator).buffer_unordered(limit).boxed()
|
||||
} else {
|
||||
futures_iterator.collect::<FuturesUnordered<_>>().boxed()
|
||||
};
|
||||
|
||||
let basic_rt = runtime::Runtime::new()?;
|
||||
let results = basic_rt.block_on(async { stream_of_futures.collect::<Vec<Result<()>>>().await });
|
||||
|
||||
let error = results.into_iter().find(|r| r.is_err());
|
||||
error.unwrap_or(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Repositories<'a> {
|
||||
pub fn new(git: &'a Git) -> Self {
|
||||
let mut glob_match_options = MatchOptions::new();
|
||||
|
||||
if cfg!(windows) {
|
||||
glob_match_options.case_sensitive = false;
|
||||
}
|
||||
|
||||
Self {
|
||||
git,
|
||||
repositories: HashSet::new(),
|
||||
bad_patterns: Vec::new(),
|
||||
glob_match_options,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if `path` is a git repo, if yes, add it to `self.repos`.
|
||||
///
|
||||
/// Return the check result.
|
||||
pub fn insert_if_repo<P: AsRef<Path>>(&mut self, path: P) -> bool {
|
||||
if let Some(repo) = self.git.get_repo_root(path) {
|
||||
self.repositories.insert(repo);
|
||||
if let Some(repo) = self.get_repo_root(path) {
|
||||
self.repos.insert(repo);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if `repo` has a remote.
|
||||
fn has_remotes<P: AsRef<Path>>(&self, repo: P) -> Option<bool> {
|
||||
let mut cmd = Command::new(&self.git);
|
||||
cmd.stdin(Stdio::null())
|
||||
.current_dir(repo.as_ref())
|
||||
.args(["remote", "show"]);
|
||||
|
||||
let res = cmd.output_checked_utf8();
|
||||
|
||||
res.map(|output| output.stdout.lines().count() > 0)
|
||||
.map_err(|e| {
|
||||
error!("Error getting remotes for {}: {e}", repo.as_ref().display());
|
||||
e
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
|
||||
/// Similar to `insert_if_repo`, with glob support.
|
||||
pub fn glob_insert(&mut self, pattern: &str) {
|
||||
if let Ok(glob) = glob_with(pattern, self.glob_match_options) {
|
||||
let mut last_git_repo: Option<PathBuf> = None;
|
||||
@@ -272,7 +259,7 @@ impl<'a> Repositories<'a> {
|
||||
if let Some(last_git_repo) = &last_git_repo {
|
||||
if path.is_descendant_of(last_git_repo) {
|
||||
debug!(
|
||||
"Skipping {} because it's a decendant of last known repo {}",
|
||||
"Skipping {} because it's a descendant of last known repo {}",
|
||||
path.display(),
|
||||
last_git_repo.display()
|
||||
);
|
||||
@@ -284,7 +271,7 @@ impl<'a> Repositories<'a> {
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error in path {}", e);
|
||||
error!("Error in path {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -293,18 +280,141 @@ impl<'a> Repositories<'a> {
|
||||
self.bad_patterns.push(String::from(pattern));
|
||||
}
|
||||
} else {
|
||||
error!("Bad glob pattern: {}", pattern);
|
||||
error!("Bad glob pattern: {pattern}");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.repositories.is_empty()
|
||||
/// True if `self.repos` is empty.
|
||||
pub fn is_repos_empty(&self) -> bool {
|
||||
self.repos.is_empty()
|
||||
}
|
||||
|
||||
/// Remove `path` from `self.repos`.
|
||||
///
|
||||
// `cfg(unix)` because it is only used in the oh-my-zsh step.
|
||||
#[cfg(unix)]
|
||||
pub fn remove(&mut self, path: &str) {
|
||||
let _removed = self.repositories.remove(path);
|
||||
pub fn remove<P: AsRef<Path>>(&mut self, path: P) {
|
||||
let _removed = self.repos.remove(path.as_ref());
|
||||
debug_assert!(_removed);
|
||||
}
|
||||
|
||||
/// Try to pull a repo.
|
||||
async fn pull_repo<P: AsRef<Path>>(&self, ctx: &ExecutionContext<'_>, repo: P) -> Result<()> {
|
||||
let before_revision = get_head_revision(&self.git, &repo);
|
||||
|
||||
if ctx.config().verbose() {
|
||||
println!("{} {}", style(t!("Pulling")).cyan().bold(), repo.as_ref().display());
|
||||
}
|
||||
|
||||
let mut command = AsyncCommand::new(&self.git);
|
||||
|
||||
command
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args(["pull", "--ff-only"]);
|
||||
|
||||
if let Some(extra_arguments) = ctx.config().git_arguments() {
|
||||
command.args(extra_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let pull_output = command.output().await?;
|
||||
let submodule_output = AsyncCommand::new(&self.git)
|
||||
.args(["submodule", "update", "--recursive"])
|
||||
.current_dir(&repo)
|
||||
.stdin(Stdio::null())
|
||||
.output()
|
||||
.await?;
|
||||
let result = output_checked_utf8(pull_output)
|
||||
.and_then(|()| output_checked_utf8(submodule_output))
|
||||
.wrap_err_with(|| format!("Failed to pull {}", repo.as_ref().display()));
|
||||
|
||||
if result.is_err() {
|
||||
println!(
|
||||
"{} {} {}",
|
||||
style(t!("Failed")).red().bold(),
|
||||
t!("pulling"),
|
||||
repo.as_ref().display()
|
||||
);
|
||||
} else {
|
||||
let after_revision = get_head_revision(&self.git, repo.as_ref());
|
||||
|
||||
match (&before_revision, &after_revision) {
|
||||
(Some(before), Some(after)) if before != after => {
|
||||
println!("{} {}", style(t!("Changed")).yellow().bold(), repo.as_ref().display());
|
||||
|
||||
Command::new(&self.git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args([
|
||||
"--no-pager",
|
||||
"log",
|
||||
"--no-decorate",
|
||||
"--oneline",
|
||||
&format!("{before}..{after}"),
|
||||
])
|
||||
.status_checked()?;
|
||||
println!();
|
||||
}
|
||||
_ => {
|
||||
if ctx.config().verbose() {
|
||||
println!("{} {}", style(t!("Up-to-date")).green().bold(), repo.as_ref().display());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Pull the repositories specified in `self.repos`.
|
||||
///
|
||||
/// # NOTE
|
||||
/// This function will create an async runtime and do the real job so the
|
||||
/// function itself is not async.
|
||||
fn pull_repos(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
if ctx.run_type().dry() {
|
||||
self.repos
|
||||
.iter()
|
||||
.for_each(|repo| println!("{}", t!("Would pull {repo}", repo = repo.display())));
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if !ctx.config().verbose() {
|
||||
println!(
|
||||
"\n{} {}\n",
|
||||
style(t!("Only")).green().bold(),
|
||||
t!("updated repositories will be shown...")
|
||||
);
|
||||
}
|
||||
|
||||
let futures_iterator = self
|
||||
.repos
|
||||
.iter()
|
||||
.filter(|repo| match self.has_remotes(repo) {
|
||||
Some(false) => {
|
||||
println!(
|
||||
"{} {} {}",
|
||||
style(t!("Skipping")).yellow().bold(),
|
||||
repo.display(),
|
||||
t!("because it has no remotes")
|
||||
);
|
||||
false
|
||||
}
|
||||
_ => true, // repo has remotes or command to check for remotes has failed. proceed to pull anyway.
|
||||
})
|
||||
.map(|repo| self.pull_repo(ctx, repo));
|
||||
|
||||
let stream_of_futures = if let Some(limit) = ctx.config().git_concurrency_limit() {
|
||||
iter(futures_iterator).buffer_unordered(limit).boxed()
|
||||
} else {
|
||||
futures_iterator.collect::<FuturesUnordered<_>>().boxed()
|
||||
};
|
||||
|
||||
let basic_rt = runtime::Runtime::new()?;
|
||||
let results = basic_rt.block_on(async { stream_of_futures.collect::<Vec<Result<()>>>().await });
|
||||
|
||||
let error = results.into_iter().find(std::result::Result::is_err);
|
||||
error.unwrap_or(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ pub fn run_go_global_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("go-global-update");
|
||||
|
||||
ctx.run_type().execute(go_global_update).status_checked()
|
||||
ctx.execute(go_global_update).status_checked()
|
||||
}
|
||||
|
||||
/// <https://github.com/nao1215/gup>
|
||||
@@ -24,7 +24,7 @@ pub fn run_go_gup(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("gup");
|
||||
|
||||
ctx.run_type().execute(gup).arg("update").status_checked()
|
||||
ctx.execute(gup).arg("update").status_checked()
|
||||
}
|
||||
|
||||
/// Get the path of a Go binary.
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::require;
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
|
||||
use crate::execution_context::ExecutionContext;
|
||||
|
||||
@@ -11,13 +12,10 @@ pub fn upgrade_kak_plug(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("Kakoune");
|
||||
|
||||
// TODO: Why supress output for this command?
|
||||
ctx.run_type()
|
||||
.execute(kak)
|
||||
.args(["-ui", "dummy", "-e", UPGRADE_KAK])
|
||||
.output()?;
|
||||
// TODO: Why suppress output for this command?
|
||||
ctx.execute(kak).args(["-ui", "dummy", "-e", UPGRADE_KAK]).output()?;
|
||||
|
||||
println!("Plugins upgraded");
|
||||
println!("{}", t!("Plugins upgraded"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -4,16 +4,16 @@ use std::os::unix::fs::MetadataExt;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::HOME_DIR;
|
||||
use color_eyre::eyre::Result;
|
||||
#[cfg(target_os = "linux")]
|
||||
use nix::unistd::Uid;
|
||||
use rust_i18n::t;
|
||||
use semver::Version;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::terminal::{print_info, print_separator};
|
||||
use crate::utils::{require, PathExt};
|
||||
use crate::{error::SkipStep, execution_context::ExecutionContext};
|
||||
|
||||
@@ -86,20 +86,16 @@ impl NPM {
|
||||
.args(["--version"])
|
||||
.output_checked_utf8()
|
||||
.map(|s| s.stdout.trim().to_owned());
|
||||
Version::parse(&version_str?).map_err(|err| err.into())
|
||||
Version::parse(&version_str?).map_err(std::convert::Into::into)
|
||||
}
|
||||
|
||||
fn upgrade(&self, ctx: &ExecutionContext, use_sudo: bool) -> Result<()> {
|
||||
let args = ["update", self.global_location_arg()];
|
||||
if use_sudo {
|
||||
let sudo = require_option(ctx.sudo().clone(), REQUIRE_SUDO.to_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg(&self.command)
|
||||
.args(args)
|
||||
.status_checked()?;
|
||||
let sudo = ctx.require_sudo()?;
|
||||
sudo.execute(ctx, &self.command)?.args(args).status_checked()?;
|
||||
} else {
|
||||
ctx.run_type().execute(&self.command).args(args).status_checked()?;
|
||||
ctx.execute(&self.command).args(args).status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -121,15 +117,11 @@ impl NPM {
|
||||
|
||||
struct Yarn {
|
||||
command: PathBuf,
|
||||
yarn: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl Yarn {
|
||||
fn new(command: PathBuf) -> Self {
|
||||
Self {
|
||||
command,
|
||||
yarn: require("yarn").ok(),
|
||||
}
|
||||
Self { command }
|
||||
}
|
||||
|
||||
fn has_global_subcmd(&self) -> bool {
|
||||
@@ -156,14 +148,10 @@ impl Yarn {
|
||||
let args = ["global", "upgrade"];
|
||||
|
||||
if use_sudo {
|
||||
let sudo = require_option(ctx.sudo().clone(), REQUIRE_SUDO.to_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg(self.yarn.as_ref().unwrap_or(&self.command))
|
||||
.args(args)
|
||||
.status_checked()?;
|
||||
let sudo = ctx.require_sudo()?;
|
||||
sudo.execute(ctx, &self.command)?.args(args).status_checked()?;
|
||||
} else {
|
||||
ctx.run_type().execute(&self.command).args(args).status_checked()?;
|
||||
ctx.execute(&self.command).args(args).status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -183,6 +171,88 @@ impl Yarn {
|
||||
}
|
||||
}
|
||||
|
||||
struct Deno {
|
||||
command: PathBuf,
|
||||
}
|
||||
|
||||
impl Deno {
|
||||
fn new(command: PathBuf) -> Self {
|
||||
Self { command }
|
||||
}
|
||||
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut args = vec![];
|
||||
|
||||
let version = ctx.config().deno_version();
|
||||
if let Some(version) = version {
|
||||
let bin_version = self.version()?;
|
||||
|
||||
if bin_version >= Version::new(2, 0, 0) {
|
||||
args.push(version);
|
||||
} else if bin_version >= Version::new(1, 6, 0) {
|
||||
match version {
|
||||
"stable" => { /* do nothing, as stable is the default channel to upgrade */ }
|
||||
"rc" => {
|
||||
return Err(SkipStep(
|
||||
"Deno (1.6.0-2.0.0) cannot be upgraded to a release candidate".to_string(),
|
||||
)
|
||||
.into());
|
||||
}
|
||||
"canary" => args.push("--canary"),
|
||||
_ => {
|
||||
if Version::parse(version).is_err() {
|
||||
return Err(SkipStep("Invalid Deno version".to_string()).into());
|
||||
}
|
||||
|
||||
args.push("--version");
|
||||
args.push(version);
|
||||
}
|
||||
}
|
||||
} else if bin_version >= Version::new(1, 0, 0) {
|
||||
match version {
|
||||
"stable" | "rc" | "canary" => {
|
||||
// Prior to v1.6.0, `deno upgrade` is not able fetch the latest tag version.
|
||||
return Err(
|
||||
SkipStep("Deno (1.0.0-1.6.0) cannot be upgraded to a named channel".to_string()).into(),
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
if Version::parse(version).is_err() {
|
||||
return Err(SkipStep("Invalid Deno version".to_string()).into());
|
||||
}
|
||||
|
||||
args.push("--version");
|
||||
args.push(version);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// v0.x cannot be upgraded with `deno upgrade` to v1.x or v2.x
|
||||
// nor can be upgraded to a specific version.
|
||||
return Err(SkipStep("Unsupported Deno version".to_string()).into());
|
||||
}
|
||||
}
|
||||
|
||||
ctx.execute(&self.command).arg("upgrade").args(args).status_checked()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the version of Deno.
|
||||
///
|
||||
/// This function will return the version of Deno installed on the system.
|
||||
/// The version is parsed from the output of `deno -V`.
|
||||
///
|
||||
/// ```sh
|
||||
/// deno -V # deno 1.6.0
|
||||
/// ```
|
||||
fn version(&self) -> Result<Version> {
|
||||
let version_str = Command::new(&self.command)
|
||||
.args(["-V"])
|
||||
.output_checked_utf8()
|
||||
.map(|s| s.stdout.trim().to_owned().split_off(5)); // remove "deno " prefix
|
||||
Version::parse(&version_str?).map_err(std::convert::Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
fn should_use_sudo(npm: &NPM, ctx: &ExecutionContext) -> Result<bool> {
|
||||
if npm.should_use_sudo()? {
|
||||
@@ -214,7 +284,7 @@ fn should_use_sudo_yarn(yarn: &Yarn, ctx: &ExecutionContext) -> Result<bool> {
|
||||
pub fn run_npm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let npm = require("npm").map(|b| NPM::new(b, NPMVariant::Npm))?;
|
||||
|
||||
print_separator("Node Package Manager");
|
||||
print_separator(t!("Node Package Manager"));
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
@@ -230,7 +300,7 @@ pub fn run_npm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn run_pnpm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pnpm = require("pnpm").map(|b| NPM::new(b, NPMVariant::Pnpm))?;
|
||||
|
||||
print_separator("Performant Node Package Manager");
|
||||
print_separator(t!("Performant Node Package Manager"));
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
@@ -251,7 +321,7 @@ pub fn run_yarn_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
print_separator("Yarn Package Manager");
|
||||
print_separator(t!("Yarn Package Manager"));
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
@@ -265,14 +335,55 @@ pub fn run_yarn_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn deno_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let deno = require("deno")?;
|
||||
let deno = require("deno").map(Deno::new)?;
|
||||
let deno_dir = HOME_DIR.join(".deno");
|
||||
|
||||
if !deno.canonicalize()?.is_descendant_of(&deno_dir) {
|
||||
let skip_reason = SkipStep("Deno installed outside of .deno directory".to_string());
|
||||
if !deno.command.canonicalize()?.is_descendant_of(&deno_dir) {
|
||||
let skip_reason = SkipStep(t!("Deno installed outside of .deno directory").to_string());
|
||||
return Err(skip_reason.into());
|
||||
}
|
||||
|
||||
print_separator("Deno");
|
||||
ctx.run_type().execute(&deno).arg("upgrade").status_checked()
|
||||
deno.upgrade(ctx)
|
||||
}
|
||||
|
||||
/// There is no `volta upgrade` command, so we need to upgrade each package
|
||||
pub fn run_volta_packages_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let volta = require("volta")?;
|
||||
|
||||
print_separator("Volta");
|
||||
|
||||
if ctx.run_type().dry() {
|
||||
print_info(t!("Updating Volta packages..."));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let list_output = ctx
|
||||
.execute(&volta)
|
||||
.args(["list", "--format=plain"])
|
||||
.output_checked_utf8()?
|
||||
.stdout;
|
||||
|
||||
let installed_packages: Vec<&str> = list_output
|
||||
.lines()
|
||||
.filter_map(|line| {
|
||||
// format is 'kind package@version ...'
|
||||
let mut parts = line.split_whitespace();
|
||||
parts.next();
|
||||
let package_part = parts.next()?;
|
||||
let version_index = package_part.rfind('@').unwrap_or(package_part.len());
|
||||
Some(package_part[..version_index].trim())
|
||||
})
|
||||
.collect();
|
||||
|
||||
if installed_packages.is_empty() {
|
||||
print_info(t!("No packages installed with Volta"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for package in &installed_packages {
|
||||
ctx.execute(&volta).args(["install", package]).status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::step::Step;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::require;
|
||||
use crate::utils::which;
|
||||
use crate::Step;
|
||||
use color_eyre::eyre::Result;
|
||||
use color_eyre::Result;
|
||||
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
//let pkg = require("pkg")?;
|
||||
@@ -14,7 +14,7 @@ pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
let is_nala = pkg.ends_with("nala");
|
||||
|
||||
let mut command = ctx.run_type().execute(&pkg);
|
||||
let mut command = ctx.execute(&pkg);
|
||||
command.arg("upgrade");
|
||||
|
||||
if ctx.config().yes(Step::System) {
|
||||
@@ -23,10 +23,10 @@ pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
command.status_checked()?;
|
||||
|
||||
if !is_nala && ctx.config().cleanup() {
|
||||
ctx.run_type().execute(&pkg).arg("clean").status_checked()?;
|
||||
ctx.execute(&pkg).arg("clean").status_checked()?;
|
||||
|
||||
let apt = require("apt")?;
|
||||
let mut command = ctx.run_type().execute(apt);
|
||||
let mut command = ctx.execute(apt);
|
||||
command.arg("autoremove");
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("-y");
|
||||
|
||||
@@ -3,15 +3,16 @@ use std::ffi::OsString;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use color_eyre::eyre;
|
||||
use color_eyre::eyre::Result;
|
||||
use color_eyre::eyre::{Context, Result};
|
||||
use rust_i18n::t;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::error::TopgradeError;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::sudo::Sudo;
|
||||
use crate::step::Step;
|
||||
use crate::utils::which;
|
||||
use crate::{config, Step};
|
||||
use crate::{config, output_changed_message};
|
||||
|
||||
fn get_execution_path() -> OsString {
|
||||
let mut path = OsString::from("/usr/bin:");
|
||||
@@ -31,13 +32,12 @@ pub struct YayParu {
|
||||
impl ArchPackageManager for YayParu {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
if ctx.config().show_arch_news() {
|
||||
ctx.run_type()
|
||||
.execute(&self.executable)
|
||||
ctx.execute(&self.executable)
|
||||
.arg("-Pw")
|
||||
.status_checked_with_codes(&[1, 0])?;
|
||||
}
|
||||
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
let mut command = ctx.execute(&self.executable);
|
||||
|
||||
command
|
||||
.arg("--pacman")
|
||||
@@ -52,7 +52,7 @@ impl ArchPackageManager for YayParu {
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
let mut command = ctx.execute(&self.executable);
|
||||
command.arg("--pacman").arg(&self.pacman).arg("-Scc");
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--noconfirm");
|
||||
@@ -79,7 +79,7 @@ pub struct GarudaUpdate {
|
||||
|
||||
impl ArchPackageManager for GarudaUpdate {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
let mut command = ctx.execute(&self.executable);
|
||||
|
||||
command
|
||||
.env("PATH", get_execution_path())
|
||||
@@ -110,7 +110,7 @@ pub struct Trizen {
|
||||
|
||||
impl ArchPackageManager for Trizen {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
let mut command = ctx.execute(&self.executable);
|
||||
|
||||
command
|
||||
.arg("-Syu")
|
||||
@@ -123,7 +123,7 @@ impl ArchPackageManager for Trizen {
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
let mut command = ctx.execute(&self.executable);
|
||||
command.arg("-Sc");
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--noconfirm");
|
||||
@@ -144,25 +144,22 @@ impl Trizen {
|
||||
}
|
||||
|
||||
pub struct Pacman {
|
||||
sudo: Sudo,
|
||||
executable: PathBuf,
|
||||
}
|
||||
|
||||
impl ArchPackageManager for Pacman {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = ctx.run_type().execute(&self.sudo);
|
||||
command
|
||||
.arg(&self.executable)
|
||||
.arg("-Syu")
|
||||
.env("PATH", get_execution_path());
|
||||
let sudo = ctx.require_sudo()?;
|
||||
let mut command = sudo.execute(ctx, &self.executable)?;
|
||||
command.arg("-Syu").env("PATH", get_execution_path());
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--noconfirm");
|
||||
}
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
let mut command = ctx.run_type().execute(&self.sudo);
|
||||
command.arg(&self.executable).arg("-Scc");
|
||||
let mut command = sudo.execute(ctx, &self.executable)?;
|
||||
command.arg("-Scc");
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--noconfirm");
|
||||
}
|
||||
@@ -174,10 +171,9 @@ impl ArchPackageManager for Pacman {
|
||||
}
|
||||
|
||||
impl Pacman {
|
||||
pub fn get(ctx: &ExecutionContext) -> Option<Self> {
|
||||
pub fn get() -> Option<Self> {
|
||||
Some(Self {
|
||||
executable: which("powerpill").unwrap_or_else(|| PathBuf::from("pacman")),
|
||||
sudo: ctx.sudo().to_owned()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -196,7 +192,7 @@ impl Pikaur {
|
||||
|
||||
impl ArchPackageManager for Pikaur {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
let mut command = ctx.execute(&self.executable);
|
||||
|
||||
command
|
||||
.arg("-Syu")
|
||||
@@ -210,7 +206,7 @@ impl ArchPackageManager for Pikaur {
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
let mut command = ctx.execute(&self.executable);
|
||||
command.arg("-Sc");
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--noconfirm");
|
||||
@@ -235,7 +231,7 @@ impl Pamac {
|
||||
}
|
||||
impl ArchPackageManager for Pamac {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
let mut command = ctx.execute(&self.executable);
|
||||
|
||||
command
|
||||
.arg("upgrade")
|
||||
@@ -249,7 +245,7 @@ impl ArchPackageManager for Pamac {
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
let mut command = ctx.execute(&self.executable);
|
||||
command.arg("clean");
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--no-confirm");
|
||||
@@ -263,46 +259,67 @@ impl ArchPackageManager for Pamac {
|
||||
|
||||
pub struct Aura {
|
||||
executable: PathBuf,
|
||||
sudo: Sudo,
|
||||
}
|
||||
|
||||
impl Aura {
|
||||
fn get(ctx: &ExecutionContext) -> Option<Self> {
|
||||
fn get() -> Option<Self> {
|
||||
Some(Self {
|
||||
executable: which("aura")?,
|
||||
sudo: ctx.sudo().to_owned()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ArchPackageManager for Aura {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = which("sudo").unwrap_or_else(PathBuf::new);
|
||||
let mut aur_update = ctx.run_type().execute(&sudo);
|
||||
use semver::Version;
|
||||
|
||||
if sudo.ends_with("sudo") {
|
||||
aur_update
|
||||
.arg(&self.executable)
|
||||
.arg("-Au")
|
||||
let version_cmd_output = ctx.execute(&self.executable).arg("--version").output_checked_utf8()?;
|
||||
// Output will be something like: "aura x.x.x\n"
|
||||
let version_cmd_stdout = version_cmd_output.stdout;
|
||||
let version_str = version_cmd_stdout.trim_start_matches("aura ").trim_end();
|
||||
let version = Version::parse(version_str)
|
||||
.wrap_err_with(|| output_changed_message!("aura --version", "invalid version"))?;
|
||||
|
||||
// Aura, since version 4.0.6, no longer needs sudo.
|
||||
//
|
||||
// https://github.com/fosskers/aura/releases/tag/v4.0.6
|
||||
let version_no_sudo = Version::new(4, 0, 6);
|
||||
|
||||
if version >= version_no_sudo {
|
||||
let mut cmd = ctx.execute(&self.executable);
|
||||
cmd.arg("-Au")
|
||||
.args(ctx.config().aura_aur_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
aur_update.arg("--noconfirm");
|
||||
cmd.arg("--noconfirm");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
|
||||
aur_update.status_checked()?;
|
||||
} else {
|
||||
println!("Aura requires sudo installed to work with AUR packages")
|
||||
}
|
||||
|
||||
let mut pacman_update = ctx.run_type().execute(&self.sudo);
|
||||
pacman_update
|
||||
.arg(&self.executable)
|
||||
.arg("-Syu")
|
||||
let mut cmd = ctx.execute(&self.executable);
|
||||
cmd.arg("-Syu")
|
||||
.args(ctx.config().aura_pacman_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
pacman_update.arg("--noconfirm");
|
||||
cmd.arg("--noconfirm");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
} else {
|
||||
let sudo = ctx.require_sudo()?;
|
||||
|
||||
let mut cmd = sudo.execute(ctx, &self.executable)?;
|
||||
cmd.arg("-Au")
|
||||
.args(ctx.config().aura_aur_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
cmd.arg("--noconfirm");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
|
||||
let mut cmd = sudo.execute(ctx, &self.executable)?;
|
||||
cmd.arg("-Syu")
|
||||
.args(ctx.config().aura_pacman_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
cmd.arg("--noconfirm");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
}
|
||||
pacman_update.status_checked()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -323,16 +340,16 @@ pub fn get_arch_package_manager(ctx: &ExecutionContext) -> Option<Box<dyn ArchPa
|
||||
.or_else(|| Trizen::get().map(box_package_manager))
|
||||
.or_else(|| Pikaur::get().map(box_package_manager))
|
||||
.or_else(|| Pamac::get().map(box_package_manager))
|
||||
.or_else(|| Pacman::get(ctx).map(box_package_manager))
|
||||
.or_else(|| Aura::get(ctx).map(box_package_manager)),
|
||||
.or_else(|| Pacman::get().map(box_package_manager))
|
||||
.or_else(|| Aura::get().map(box_package_manager)),
|
||||
config::ArchPackageManager::GarudaUpdate => GarudaUpdate::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Trizen => Trizen::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Paru => YayParu::get("paru", &pacman).map(box_package_manager),
|
||||
config::ArchPackageManager::Yay => YayParu::get("yay", &pacman).map(box_package_manager),
|
||||
config::ArchPackageManager::Pacman => Pacman::get(ctx).map(box_package_manager),
|
||||
config::ArchPackageManager::Pacman => Pacman::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Pikaur => Pikaur::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Pamac => Pamac::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Aura => Aura::get(ctx).map(box_package_manager),
|
||||
config::ArchPackageManager::Aura => Aura::get().map(box_package_manager),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -355,7 +372,7 @@ pub fn show_pacnew() {
|
||||
.peekable();
|
||||
|
||||
if iter.peek().is_some() {
|
||||
println!("\nPacman backup configuration files found:");
|
||||
println!("\n{}", t!("Pacman backup configuration files found:"));
|
||||
|
||||
for entry in iter {
|
||||
println!("{}", entry.path().display());
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::step::Step;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use color_eyre::eyre::Result;
|
||||
use std::process::Command;
|
||||
use rust_i18n::t;
|
||||
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("DragonFly BSD Packages");
|
||||
let mut cmd = ctx.execute(sudo);
|
||||
cmd.args(["/usr/local/sbin/pkg", "upgrade"]);
|
||||
print_separator(t!("DragonFly BSD Packages"));
|
||||
|
||||
let sudo = ctx.require_sudo()?;
|
||||
let mut cmd = sudo.execute(ctx, "/usr/local/sbin/pkg")?;
|
||||
cmd.arg("upgrade");
|
||||
if ctx.config().yes(Step::System) {
|
||||
cmd.arg("-y");
|
||||
}
|
||||
@@ -17,10 +18,18 @@ pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
println!();
|
||||
Command::new(sudo)
|
||||
.args(["/usr/local/sbin/pkg", "audit", "-Fr"])
|
||||
.status_checked()?;
|
||||
print_separator(t!("DragonFly BSD Audit"));
|
||||
|
||||
let sudo = ctx.require_sudo()?;
|
||||
sudo.execute(ctx, "/usr/local/sbin/pkg")?
|
||||
.args(["audit", "-Fr"])
|
||||
.status_checked_with(|status| {
|
||||
if !status.success() {
|
||||
println!(
|
||||
"{}",
|
||||
t!("The package audit was successful, but vulnerable packages still remain on the system")
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,27 +1,25 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::step::Step;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::Step;
|
||||
use color_eyre::eyre::Result;
|
||||
use std::process::Command;
|
||||
use color_eyre::Result;
|
||||
use rust_i18n::t;
|
||||
|
||||
pub fn upgrade_freebsd(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("FreeBSD Update");
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["/usr/sbin/freebsd-update", "fetch", "install"])
|
||||
print_separator(t!("FreeBSD Update"));
|
||||
|
||||
let sudo = ctx.require_sudo()?;
|
||||
sudo.execute(ctx, "/usr/sbin/freebsd-update")?
|
||||
.args(["fetch", "install"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("FreeBSD Packages");
|
||||
print_separator(t!("FreeBSD Packages"));
|
||||
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
|
||||
command.args(["/usr/sbin/pkg", "upgrade"]);
|
||||
let sudo = ctx.require_sudo()?;
|
||||
let mut command = sudo.execute(ctx, "/usr/sbin/pkg")?;
|
||||
command.arg("upgrade");
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("-y");
|
||||
}
|
||||
@@ -29,10 +27,10 @@ pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
println!();
|
||||
Command::new(sudo)
|
||||
.args(["/usr/sbin/pkg", "audit", "-Fr"])
|
||||
.status_checked()?;
|
||||
Ok(())
|
||||
print_separator(t!("FreeBSD Audit"));
|
||||
|
||||
let sudo = ctx.require_sudo()?;
|
||||
sudo.execute(ctx, "/usr/sbin/pkg")?
|
||||
.args(["audit", "-Fr"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,31 +1,28 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::step::Step;
|
||||
use crate::terminal::{print_separator, prompt_yesno};
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::{utils::require, Step};
|
||||
use crate::utils::require;
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
use tracing::debug;
|
||||
|
||||
pub fn run_macports(ctx: &ExecutionContext) -> Result<()> {
|
||||
require("port")?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let port = require("port")?;
|
||||
|
||||
print_separator("MacPorts");
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["port", "selfupdate"])
|
||||
.status_checked()?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["port", "-u", "upgrade", "outdated"])
|
||||
|
||||
let sudo = ctx.require_sudo()?;
|
||||
|
||||
sudo.execute(ctx, &port)?.arg("selfupdate").status_checked()?;
|
||||
sudo.execute(ctx, &port)?
|
||||
.args(["-u", "upgrade", "outdated"])
|
||||
.status_checked()?;
|
||||
if ctx.config().cleanup() {
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["port", "-N", "reclaim"])
|
||||
.status_checked()?;
|
||||
sudo.execute(ctx, &port)?.args(["-N", "reclaim"]).status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -33,30 +30,30 @@ pub fn run_macports(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
pub fn run_mas(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mas = require("mas")?;
|
||||
print_separator("macOS App Store");
|
||||
print_separator(t!("macOS App Store"));
|
||||
|
||||
ctx.run_type().execute(mas).arg("upgrade").status_checked()
|
||||
ctx.execute(mas).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
pub fn upgrade_macos(ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator("macOS system update");
|
||||
print_separator(t!("macOS system update"));
|
||||
|
||||
let should_ask = !(ctx.config().yes(Step::System)) || (ctx.config().dry_run());
|
||||
let should_ask = !(ctx.config().yes(Step::System) || ctx.config().dry_run());
|
||||
if should_ask {
|
||||
println!("Finding available software");
|
||||
println!("{}", t!("Finding available software"));
|
||||
if system_update_available()? {
|
||||
let answer = prompt_yesno("A system update is available. Do you wish to install it?")?;
|
||||
let answer = prompt_yesno(t!("A system update is available. Do you wish to install it?").as_ref())?;
|
||||
if !answer {
|
||||
return Ok(());
|
||||
}
|
||||
println!();
|
||||
} else {
|
||||
println!("No new software available.");
|
||||
println!("{}", t!("No new software available."));
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
let mut command = ctx.run_type().execute("softwareupdate");
|
||||
let mut command = ctx.execute("softwareupdate");
|
||||
command.args(["--install", "--all"]);
|
||||
|
||||
if should_ask {
|
||||
@@ -85,7 +82,7 @@ pub fn run_sparkle(ctx: &ExecutionContext) -> Result<()> {
|
||||
.arg(application.path())
|
||||
.output_checked_utf8();
|
||||
if probe.is_ok() {
|
||||
let mut command = ctx.run_type().execute(&sparkle);
|
||||
let mut command = ctx.execute(&sparkle);
|
||||
command.args(["bundle", "--check-immediately", "--application"]);
|
||||
command.arg(application.path());
|
||||
command.status_checked()?;
|
||||
@@ -93,3 +90,133 @@ pub fn run_sparkle(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_xcodes(ctx: &ExecutionContext) -> Result<()> {
|
||||
let xcodes = require("xcodes")?;
|
||||
print_separator("Xcodes");
|
||||
|
||||
let should_ask = !(ctx.config().yes(Step::Xcodes) || ctx.config().dry_run());
|
||||
|
||||
let releases = ctx.execute(&xcodes).args(["update"]).output_checked_utf8()?.stdout;
|
||||
|
||||
let releases_installed: Vec<String> = releases
|
||||
.lines()
|
||||
.filter(|r| r.contains("(Installed)"))
|
||||
.map(String::from)
|
||||
.collect();
|
||||
|
||||
if releases_installed.is_empty() {
|
||||
println!("{}", t!("No Xcode releases installed."));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let (installed_gm, installed_beta, installed_regular) =
|
||||
releases_installed
|
||||
.iter()
|
||||
.fold((false, false, false), |(gm, beta, regular), release| {
|
||||
(
|
||||
gm || release.contains("GM") || release.contains("Release Candidate"),
|
||||
beta || release.contains("Beta"),
|
||||
regular
|
||||
|| !(release.contains("GM")
|
||||
|| release.contains("Release Candidate")
|
||||
|| release.contains("Beta")),
|
||||
)
|
||||
});
|
||||
|
||||
let releases_gm = releases
|
||||
.lines()
|
||||
.filter(|&r| r.matches("GM").count() > 0 || r.matches("Release Candidate").count() > 0)
|
||||
.map(String::from)
|
||||
.collect();
|
||||
let releases_beta = releases
|
||||
.lines()
|
||||
.filter(|&r| r.matches("Beta").count() > 0)
|
||||
.map(String::from)
|
||||
.collect();
|
||||
let releases_regular = releases
|
||||
.lines()
|
||||
.filter(|&r| {
|
||||
r.matches("GM").count() == 0
|
||||
&& r.matches("Release Candidate").count() == 0
|
||||
&& r.matches("Beta").count() == 0
|
||||
})
|
||||
.map(String::from)
|
||||
.collect();
|
||||
|
||||
if installed_gm {
|
||||
process_xcodes_releases(releases_gm, should_ask, ctx)?;
|
||||
}
|
||||
if installed_beta {
|
||||
process_xcodes_releases(releases_beta, should_ask, ctx)?;
|
||||
}
|
||||
if installed_regular {
|
||||
process_xcodes_releases(releases_regular, should_ask, ctx)?;
|
||||
}
|
||||
|
||||
let releases_new = ctx.execute(&xcodes).args(["list"]).output_checked_utf8()?.stdout;
|
||||
|
||||
let releases_gm_new_installed: HashSet<_> = releases_new
|
||||
.lines()
|
||||
.filter(|release| {
|
||||
release.contains("(Installed)") && (release.contains("GM") || release.contains("Release Candidate"))
|
||||
})
|
||||
.collect();
|
||||
let releases_beta_new_installed: HashSet<_> = releases_new
|
||||
.lines()
|
||||
.filter(|release| release.contains("(Installed)") && release.contains("Beta"))
|
||||
.collect();
|
||||
let releases_regular_new_installed: HashSet<_> = releases_new
|
||||
.lines()
|
||||
.filter(|release| {
|
||||
release.contains("(Installed)")
|
||||
&& !(release.contains("GM") || release.contains("Release Candidate") || release.contains("Beta"))
|
||||
})
|
||||
.collect();
|
||||
|
||||
for releases_new_installed in [
|
||||
releases_gm_new_installed,
|
||||
releases_beta_new_installed,
|
||||
releases_regular_new_installed,
|
||||
] {
|
||||
if should_ask && releases_new_installed.len() == 2 {
|
||||
let answer_uninstall =
|
||||
prompt_yesno(t!("Would you like to move the former Xcode release to the trash?").as_ref())?;
|
||||
if answer_uninstall {
|
||||
let _ = ctx
|
||||
.execute(&xcodes)
|
||||
.args([
|
||||
"uninstall",
|
||||
releases_new_installed.iter().next().copied().unwrap_or_default(),
|
||||
])
|
||||
.status_checked();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn process_xcodes_releases(releases_filtered: Vec<String>, should_ask: bool, ctx: &ExecutionContext) -> Result<()> {
|
||||
let xcodes = require("xcodes")?;
|
||||
|
||||
if releases_filtered.last().map_or(true, |s| !s.contains("(Installed)")) && !releases_filtered.is_empty() {
|
||||
println!(
|
||||
"{} {}",
|
||||
t!("New Xcode release detected:"),
|
||||
releases_filtered.last().cloned().unwrap_or_default()
|
||||
);
|
||||
if should_ask {
|
||||
let answer_install = prompt_yesno(t!("Would you like to install it?").as_ref())?;
|
||||
if answer_install {
|
||||
let _ = ctx
|
||||
.execute(xcodes)
|
||||
.args(["install", &releases_filtered.last().cloned().unwrap_or_default()])
|
||||
.status_checked();
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,23 +1,60 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use color_eyre::eyre::Result;
|
||||
use std::path::PathBuf;
|
||||
use rust_i18n::t;
|
||||
use std::fs;
|
||||
|
||||
fn is_openbsd_current(ctx: &ExecutionContext) -> Result<bool> {
|
||||
let motd_content = fs::read_to_string("/etc/motd")?;
|
||||
let is_current = ["-current", "-beta"].iter().any(|&s| motd_content.contains(s));
|
||||
if ctx.config().dry_run() {
|
||||
println!("{}", t!("Would check if OpenBSD is -current"));
|
||||
Ok(is_current)
|
||||
} else {
|
||||
Ok(is_current)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn upgrade_openbsd(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("OpenBSD Update");
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(&["/usr/sbin/sysupgrade", "-n"])
|
||||
.status_checked()
|
||||
print_separator(t!("OpenBSD Update"));
|
||||
|
||||
let sudo = ctx.require_sudo()?;
|
||||
|
||||
let is_current = is_openbsd_current(ctx)?;
|
||||
|
||||
if ctx.config().dry_run() {
|
||||
println!("{}", t!("Would upgrade the OpenBSD system"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if is_current {
|
||||
sudo.execute(ctx, "/usr/sbin/sysupgrade")?.arg("-sn").status_checked()
|
||||
} else {
|
||||
sudo.execute(ctx, "/usr/sbin/syspatch")?.status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("OpenBSD Packages");
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(&["/usr/sbin/pkg_add", "-u"])
|
||||
.status_checked()
|
||||
print_separator(t!("OpenBSD Packages"));
|
||||
|
||||
let sudo = ctx.require_sudo()?;
|
||||
|
||||
let is_current = is_openbsd_current(ctx)?;
|
||||
|
||||
if ctx.config().dry_run() {
|
||||
println!("{}", t!("Would upgrade OpenBSD packages"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
sudo.execute(ctx, "/usr/sbin/pkg_delete")?.arg("-ac").status_checked()?;
|
||||
}
|
||||
|
||||
let mut command = sudo.execute(ctx, "/usr/sbin/pkg_add")?;
|
||||
command.arg("-u");
|
||||
if is_current {
|
||||
command.arg("-Dsnap");
|
||||
}
|
||||
command.status_checked()
|
||||
}
|
||||
|
||||
23
src/steps/os/os_release/aurora
Normal file
23
src/steps/os/os_release/aurora
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Aurora"
|
||||
VERSION="latest-41.20250210.4 (Kinoite)"
|
||||
RELEASE_TYPE=stable
|
||||
ID=aurora
|
||||
ID_LIKE="fedora"
|
||||
VERSION_ID=41
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f41"
|
||||
PRETTY_NAME="Aurora (Version: latest-41.20250210.4 / FROM Fedora Kinoite 41)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:universal-blue:aurora:41"
|
||||
DEFAULT_HOSTNAME="aurora"
|
||||
HOME_URL="https://getaurora.dev/"
|
||||
DOCUMENTATION_URL="https://docs.getaurora.dev"
|
||||
SUPPORT_URL="https://github.com/ublue-os/aurora/issues/"
|
||||
BUG_REPORT_URL="https://github.com/ublue-os/aurora/issues/"
|
||||
SUPPORT_END=2025-12-15
|
||||
VARIANT="Kinoite"
|
||||
VARIANT_ID=aurora
|
||||
OSTREE_VERSION='latest-41.20250210.4'
|
||||
BUILD_ID="fc1570c"
|
||||
IMAGE_ID="aurora"
|
||||
25
src/steps/os/os_release/bazzite
Normal file
25
src/steps/os/os_release/bazzite
Normal file
@@ -0,0 +1,25 @@
|
||||
NAME="Bazzite"
|
||||
VERSION="41.20250208.0 (Kinoite)"
|
||||
RELEASE_TYPE=stable
|
||||
ID=bazzite
|
||||
ID_LIKE="fedora"
|
||||
VERSION_ID=41
|
||||
VERSION_CODENAME="Holographic"
|
||||
PLATFORM_ID="platform:f41"
|
||||
PRETTY_NAME="Bazzite 41 (FROM Fedora Kinoite)"
|
||||
ANSI_COLOR="0;38;2;138;43;226"
|
||||
LOGO=bazzite-logo-icon
|
||||
CPE_NAME="cpe:/o:universal-blue:bazzite:41"
|
||||
DEFAULT_HOSTNAME="bazzite"
|
||||
HOME_URL="https://bazzite.gg"
|
||||
DOCUMENTATION_URL="https://docs.bazzite.gg"
|
||||
SUPPORT_URL="https://discord.bazzite.gg"
|
||||
BUG_REPORT_URL="https://github.com/ublue-os/bazzite/issues/"
|
||||
SUPPORT_END=2025-12-15
|
||||
VARIANT="Kinoite"
|
||||
VARIANT_ID=bazzite-nvidia-open
|
||||
OSTREE_VERSION='41.20250208.0'
|
||||
BUILD_ID="Stable (F41.20250208)"
|
||||
BOOTLOADER_NAME="Bazzite Stable (F41.20250208)"
|
||||
BUILD_ID="Stable (F41.20250208)"
|
||||
BOOTLOADER_NAME="Bazzite Stable (F41.20250208)"
|
||||
24
src/steps/os/os_release/bluefin
Normal file
24
src/steps/os/os_release/bluefin
Normal file
@@ -0,0 +1,24 @@
|
||||
NAME="Bluefin"
|
||||
VERSION="41.20250216.1 (Silverblue)"
|
||||
RELEASE_TYPE=stable
|
||||
ID=bluefin
|
||||
ID_LIKE="fedora"
|
||||
VERSION_ID=41
|
||||
VERSION_CODENAME="Archaeopteryx"
|
||||
PLATFORM_ID="platform:f41"
|
||||
PRETTY_NAME="Bluefin (Version: 41.20250216.1 / FROM Fedora Silverblue 41)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:universal-blue:bluefin:41"
|
||||
DEFAULT_HOSTNAME="bluefin"
|
||||
HOME_URL="https://projectbluefin.io"
|
||||
DOCUMENTATION_URL="https://docs.projectbluefin.io"
|
||||
SUPPORT_URL="https://github.com/ublue-os/bluefin/issues/"
|
||||
BUG_REPORT_URL="https://github.com/ublue-os/bluefin/issues/"
|
||||
SUPPORT_END=2025-12-15
|
||||
VARIANT="Silverblue"
|
||||
VARIANT_ID=bluefin
|
||||
OSTREE_VERSION='41.20250216.1'
|
||||
BUILD_ID="185146a"
|
||||
IMAGE_ID="bluefin"
|
||||
IMAGE_VERSION="41.20250216.1"
|
||||
11
src/steps/os/os_release/cachyos
Normal file
11
src/steps/os/os_release/cachyos
Normal file
@@ -0,0 +1,11 @@
|
||||
NAME="CachyOS Linux"
|
||||
PRETTY_NAME="CachyOS"
|
||||
ID=cachyos
|
||||
BUILD_ID=rolling
|
||||
ANSI_COLOR="38;2;23;147;209"
|
||||
HOME_URL="https://cachyos.org/"
|
||||
DOCUMENTATION_URL="https://wiki.cachyos.org/"
|
||||
SUPPORT_URL="https://discuss.cachyos.org/"
|
||||
BUG_REPORT_URL="https://github.com/cachyos"
|
||||
PRIVACY_POLICY_URL="https://terms.archlinux.org/docs/privacy-policy/"
|
||||
LOGO=cachyos
|
||||
@@ -13,4 +13,3 @@ CENTOS_MANTISBT_PROJECT="CentOS-7"
|
||||
CENTOS_MANTISBT_PROJECT_VERSION="7"
|
||||
REDHAT_SUPPORT_PRODUCT="centos"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION="7"
|
||||
|
||||
|
||||
23
src/steps/os/os_release/coreos
Normal file
23
src/steps/os/os_release/coreos
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="41.20250117.3.0 (CoreOS)"
|
||||
RELEASE_TYPE=stable
|
||||
ID=fedora
|
||||
VERSION_ID=41
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f41"
|
||||
PRETTY_NAME="Fedora CoreOS 41.20250117.3.0 (uCore)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:41"
|
||||
HOME_URL="https://getfedora.org/coreos/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-coreos/"
|
||||
SUPPORT_URL="https://github.com/coreos/fedora-coreos-tracker/"
|
||||
BUG_REPORT_URL="https://github.com/coreos/fedora-coreos-tracker/"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=41
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=41
|
||||
SUPPORT_END=2025-12-15
|
||||
VARIANT="CoreOS"
|
||||
VARIANT_ID=coreos
|
||||
OSTREE_VERSION='41.20250117.3.0'
|
||||
22
src/steps/os/os_release/fedoraiot
Normal file
22
src/steps/os/os_release/fedoraiot
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39.20240415.0 (IoT Edition)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39.20240415.0 (IoT Edition)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
HOME_URL="https://fedoraproject.org/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora/f39/system-administrators-guide/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://bugzilla.redhat.com/"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-11-12
|
||||
VARIANT="IoT Edition"
|
||||
VARIANT_ID=iot
|
||||
OSTREE_VERSION='39.20240415.0'
|
||||
23
src/steps/os/os_release/fedorakinoite
Normal file
23
src/steps/os/os_release/fedorakinoite
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39.20240105.0 (Kinoite)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39.20240105.0 (Kinoite)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://kinoite.fedoraproject.org"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-kinoite/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://pagure.io/fedora-kde/SIG/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-11-12
|
||||
VARIANT="Kinoite"
|
||||
VARIANT_ID=kinoite
|
||||
OSTREE_VERSION='39.20240105.0'
|
||||
22
src/steps/os/os_release/fedoraonyx
Normal file
22
src/steps/os/os_release/fedoraonyx
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39 (Onyx)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39 (Onyx)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://fedoraproject.org/onyx/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-onyx/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://bugzilla.redhat.com/"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="Onyx"
|
||||
VARIANT_ID=onyx
|
||||
22
src/steps/os/os_release/fedorasericea
Normal file
22
src/steps/os/os_release/fedorasericea
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39 (Sericea)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39 (Sericea)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://fedoraproject.org/sericea/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-sericea/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://gitlab.com/fedora/sigs/sway/SIG/-/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="Sericea"
|
||||
VARIANT_ID=sericea
|
||||
22
src/steps/os/os_release/fedorasilverblue
Normal file
22
src/steps/os/os_release/fedorasilverblue
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39 (Silverblue)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39 (Silverblue)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://silverblue.fedoraproject.org"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-silverblue/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://github.com/fedora-silverblue/issue-tracker/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="Silverblue"
|
||||
VARIANT_ID=silverblue
|
||||
23
src/steps/os/os_release/fedoraswayatomic
Normal file
23
src/steps/os/os_release/fedoraswayatomic
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="40.20240426.0 (Sway Atomic)"
|
||||
ID=fedora
|
||||
VERSION_ID=40
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f40"
|
||||
PRETTY_NAME="Fedora Linux 40.20240426.0 (Sway Atomic)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:40"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://fedoraproject.org/atomic-desktops/sway/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-sericea/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://gitlab.com/fedora/sigs/sway/SIG/-/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=40
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=40
|
||||
SUPPORT_END=2025-05-13
|
||||
VARIANT="Sway Atomic"
|
||||
VARIANT_ID=sway-atomic
|
||||
OSTREE_VERSION='40.20240426.0'
|
||||
6
src/steps/os/os_release/funtoo
Normal file
6
src/steps/os/os_release/funtoo
Normal file
@@ -0,0 +1,6 @@
|
||||
ID="funtoo"
|
||||
NAME="Funtoo"
|
||||
PRETTY_NAME="Funtoo Linux"
|
||||
ANSI_COLOR="0;34"
|
||||
HOME_URL="https://www.funtoo.org"
|
||||
BUG_REPORT_URL="https://bugs.funtoo.org"
|
||||
@@ -4,4 +4,3 @@ PRETTY_NAME="Manjaro ARM"
|
||||
ANSI_COLOR="1;32"
|
||||
HOME_URL="https://www.manjaro.org/"
|
||||
SUPPORT_URL="https://forum.manjaro.org/c/manjaro-arm/"
|
||||
|
||||
|
||||
8
src/steps/os/os_release/nilrt
Normal file
8
src/steps/os/os_release/nilrt
Normal file
@@ -0,0 +1,8 @@
|
||||
ID=nilrt
|
||||
NAME="NI Linux Real-Time"
|
||||
VERSION="10.0 (kirkstone)"
|
||||
VERSION_ID=10.0
|
||||
PRETTY_NAME="NI Linux Real-Time 10.0 (kirkstone)"
|
||||
DISTRO_CODENAME="kirkstone"
|
||||
BUILD_ID="23.8.0f153-x64"
|
||||
VERSION_CODENAME="kirkstone"
|
||||
23
src/steps/os/os_release/nobara
Normal file
23
src/steps/os/os_release/nobara
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Nobara Linux"
|
||||
VERSION="39 (GNOME Edition)"
|
||||
ID=nobara
|
||||
ID_LIKE="rhel centos fedora"
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Nobara Linux 39 (GNOME Edition)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=nobara-logo-icon
|
||||
CPE_NAME="cpe:/o:nobaraproject:nobara:39"
|
||||
DEFAULT_HOSTNAME="nobara"
|
||||
HOME_URL="https://nobaraproject.org/"
|
||||
DOCUMENTATION_URL="https://www.nobaraproject.org/"
|
||||
SUPPORT_URL="https://www.nobaraproject.org/"
|
||||
BUG_REPORT_URL="https://gitlab.com/gloriouseggroll/nobara-images"
|
||||
REDHAT_BUGZILLA_PRODUCT="Nobara"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Nobara"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="GNOME Edition"
|
||||
VARIANT_ID=gnome
|
||||
5
src/steps/os/os_release/wolfi
Normal file
5
src/steps/os/os_release/wolfi
Normal file
@@ -0,0 +1,5 @@
|
||||
ID=wolfi
|
||||
NAME="Wolfi"
|
||||
PRETTY_NAME="Wolfi"
|
||||
VERSION_ID="20230201"
|
||||
HOME_URL="https://wolfi.dev"
|
||||
@@ -1,24 +1,38 @@
|
||||
use std::fs;
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::{env::var, path::Path};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::{Step, HOME_DIR};
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::BaseStrategy;
|
||||
use home;
|
||||
use ini::Ini;
|
||||
use tracing::debug;
|
||||
#[cfg(target_os = "linux")]
|
||||
use nix::unistd::Uid;
|
||||
use regex::Regex;
|
||||
use rust_i18n::t;
|
||||
use semver::Version;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
use std::path::Component;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::sync::LazyLock;
|
||||
use std::{env::var, path::Path};
|
||||
use tracing::{debug, warn};
|
||||
|
||||
use crate::error::SkipStep;
|
||||
use crate::command::CommandExt;
|
||||
use crate::sudo::SudoExecuteOpts;
|
||||
use crate::XDG_DIRS;
|
||||
use crate::{output_changed_message, HOME_DIR};
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
use super::linux::Distribution;
|
||||
use crate::error::{SkipStep, StepFailed};
|
||||
use crate::execution_context::ExecutionContext;
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
use crate::executor::Executor;
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
use crate::executor::RunType;
|
||||
use crate::step::Step;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{require, require_option, PathExt, REQUIRE_SUDO};
|
||||
use crate::utils::{require, PathExt};
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
const INTEL_BREW: &str = "/usr/local/bin/brew";
|
||||
@@ -63,19 +77,41 @@ impl BrewVariant {
|
||||
}
|
||||
}
|
||||
|
||||
fn execute(self, run_type: RunType) -> Executor {
|
||||
/// Execute an "internal" brew command, i.e. one that should always be run
|
||||
/// even when dry-running. Basically just a wrapper around [`Command::new`]
|
||||
/// that uses `arch` to run using the correct architecture if needed.
|
||||
#[cfg(target_os = "macos")]
|
||||
fn execute_internal(self) -> Command {
|
||||
match self {
|
||||
BrewVariant::MacIntel if cfg!(target_arch = "aarch64") => {
|
||||
let mut command = run_type.execute("arch");
|
||||
let mut command = Command::new("arch");
|
||||
command.arg("-x86_64").arg(self.binary_name());
|
||||
command
|
||||
}
|
||||
BrewVariant::MacArm if cfg!(target_arch = "x86_64") => {
|
||||
let mut command = run_type.execute("arch");
|
||||
let mut command = Command::new("arch");
|
||||
command.arg("-arm64e").arg(self.binary_name());
|
||||
command
|
||||
}
|
||||
_ => run_type.execute(self.binary_name()),
|
||||
_ => Command::new(self.binary_name()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute a brew command. Uses `arch` to run using the correct
|
||||
/// architecture on macOS if needed.
|
||||
fn execute(self, ctx: &ExecutionContext) -> Executor {
|
||||
match self {
|
||||
BrewVariant::MacIntel if cfg!(target_arch = "aarch64") => {
|
||||
let mut command = ctx.execute("arch");
|
||||
command.arg("-x86_64").arg(self.binary_name());
|
||||
command
|
||||
}
|
||||
BrewVariant::MacArm if cfg!(target_arch = "x86_64") => {
|
||||
let mut command = ctx.execute("arch");
|
||||
command.arg("-arm64e").arg(self.binary_name());
|
||||
command
|
||||
}
|
||||
_ => ctx.execute(self.binary_name()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -92,24 +128,23 @@ pub fn run_fisher(ctx: &ExecutionContext) -> Result<()> {
|
||||
.args(["-c", "type -t fisher"])
|
||||
.output_checked_utf8()
|
||||
.map(|_| ())
|
||||
.map_err(|_| SkipStep("`fisher` is not defined in `fish`".to_owned()))?;
|
||||
.map_err(|_| SkipStep(t!("`fisher` is not defined in `fish`").to_string()))?;
|
||||
|
||||
Command::new(&fish)
|
||||
.args(["-c", "echo \"$__fish_config_dir/fish_plugins\""])
|
||||
.output_checked_utf8()
|
||||
.and_then(|output| Path::new(&output.stdout.trim()).require().map(|_| ()))
|
||||
.map_err(|err| SkipStep(format!("`fish_plugins` path doesn't exist: {err}")))?;
|
||||
.map_err(|err| SkipStep(t!("`fish_plugins` path doesn't exist: {err}", err = err).to_string()))?;
|
||||
|
||||
Command::new(&fish)
|
||||
.args(["-c", "fish_update_completions"])
|
||||
.output_checked_utf8()
|
||||
.map(|_| ())
|
||||
.map_err(|_| SkipStep("`fish_update_completions` is not available".to_owned()))?;
|
||||
.map_err(|_| SkipStep(t!("`fish_update_completions` is not available").to_string()))?;
|
||||
|
||||
print_separator("Fisher");
|
||||
|
||||
let version_str = ctx
|
||||
.run_type()
|
||||
.execute(&fish)
|
||||
.args(["-c", "fisher --version"])
|
||||
.output_checked_utf8()?
|
||||
@@ -118,13 +153,10 @@ pub fn run_fisher(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
if version_str.starts_with("fisher version 3.") {
|
||||
// v3 - see https://github.com/topgrade-rs/topgrade/pull/37#issuecomment-1283844506
|
||||
ctx.run_type().execute(&fish).args(["-c", "fisher"]).status_checked()
|
||||
ctx.execute(&fish).args(["-c", "fisher"]).status_checked()
|
||||
} else {
|
||||
// v4
|
||||
ctx.run_type()
|
||||
.execute(&fish)
|
||||
.args(["-c", "fisher update"])
|
||||
.status_checked()
|
||||
ctx.execute(&fish).args(["-c", "fisher update"]).status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,8 +165,7 @@ pub fn run_bashit(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("Bash-it");
|
||||
|
||||
ctx.run_type()
|
||||
.execute("bash")
|
||||
ctx.execute("bash")
|
||||
.args(["-lic", &format!("bash-it update {}", ctx.config().bashit_branch())])
|
||||
.status_checked()
|
||||
}
|
||||
@@ -157,7 +188,7 @@ pub fn run_oh_my_bash(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut update_script = oh_my_bash;
|
||||
update_script.push_str("/tools/upgrade.sh");
|
||||
|
||||
ctx.run_type().execute("bash").arg(update_script).status_checked()
|
||||
ctx.execute("bash").arg(update_script).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_oh_my_fish(ctx: &ExecutionContext) -> Result<()> {
|
||||
@@ -166,24 +197,25 @@ pub fn run_oh_my_fish(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("oh-my-fish");
|
||||
|
||||
ctx.run_type().execute(fish).args(["-c", "omf update"]).status_checked()
|
||||
ctx.execute(fish).args(["-c", "omf update"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_pkgin(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pkgin = require("pkgin")?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
|
||||
print_separator("Pkgin");
|
||||
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(&pkgin).arg("update");
|
||||
let sudo = ctx.require_sudo()?;
|
||||
|
||||
let mut command = sudo.execute(ctx, &pkgin)?;
|
||||
command.arg("update");
|
||||
if ctx.config().yes(Step::Pkgin) {
|
||||
command.arg("-y");
|
||||
}
|
||||
command.status_checked()?;
|
||||
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(&pkgin).arg("upgrade");
|
||||
let mut command = sudo.execute(ctx, &pkgin)?;
|
||||
command.arg("upgrade");
|
||||
if ctx.config().yes(Step::Pkgin) {
|
||||
command.arg("-y");
|
||||
}
|
||||
@@ -198,10 +230,7 @@ pub fn run_fish_plug(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("fish-plug");
|
||||
|
||||
ctx.run_type()
|
||||
.execute(fish)
|
||||
.args(["-c", "plug update"])
|
||||
.status_checked()
|
||||
ctx.execute(fish).args(["-c", "plug update"]).status_checked()
|
||||
}
|
||||
|
||||
/// Upgrades `fundle` and `fundle` plugins.
|
||||
@@ -215,8 +244,7 @@ pub fn run_fundle(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("fundle");
|
||||
|
||||
ctx.run_type()
|
||||
.execute(fish)
|
||||
ctx.execute(fish)
|
||||
.args(["-c", "fundle self-update && fundle update"])
|
||||
.status_checked()
|
||||
}
|
||||
@@ -224,9 +252,9 @@ pub fn run_fundle(ctx: &ExecutionContext) -> Result<()> {
|
||||
#[cfg(not(any(target_os = "android", target_os = "macos")))]
|
||||
pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
|
||||
let gdbus = require("gdbus")?;
|
||||
require_option(
|
||||
crate::utils::require_option(
|
||||
var("XDG_CURRENT_DESKTOP").ok().filter(|p| p.contains("GNOME")),
|
||||
"Desktop doest not appear to be gnome".to_string(),
|
||||
t!("Desktop does not appear to be GNOME").to_string(),
|
||||
)?;
|
||||
let output = Command::new("gdbus")
|
||||
.args([
|
||||
@@ -241,15 +269,14 @@ pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
|
||||
])
|
||||
.output_checked_utf8()?;
|
||||
|
||||
debug!("Checking for gnome extensions: {}", output);
|
||||
debug!("Checking for GNOME extensions: {}", output);
|
||||
if !output.stdout.contains("org.gnome.Shell.Extensions") {
|
||||
return Err(SkipStep(String::from("Gnome shell extensions are unregistered in DBus")).into());
|
||||
return Err(SkipStep(t!("GNOME shell extensions are unregistered in DBus").to_string()).into());
|
||||
}
|
||||
|
||||
print_separator("Gnome Shell extensions");
|
||||
print_separator(t!("GNOME Shell extensions"));
|
||||
|
||||
ctx.run_type()
|
||||
.execute(gdbus)
|
||||
ctx.execute(gdbus)
|
||||
.args([
|
||||
"call",
|
||||
"--session",
|
||||
@@ -263,6 +290,23 @@ pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
pub fn brew_linux_sudo_uid() -> Option<u32> {
|
||||
let linuxbrew_directory = "/home/linuxbrew/.linuxbrew";
|
||||
if let Ok(metadata) = std::fs::metadata(linuxbrew_directory) {
|
||||
let owner_id = metadata.uid();
|
||||
let current_id = Uid::effective();
|
||||
// print debug these two values
|
||||
debug!("linuxbrew_directory owner_id: {}, current_id: {}", owner_id, current_id);
|
||||
return if owner_id == current_id.as_raw() {
|
||||
None // no need for sudo if linuxbrew is owned by the current user
|
||||
} else {
|
||||
Some(owner_id) // otherwise use sudo to run brew as the owner
|
||||
};
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()> {
|
||||
#[allow(unused_variables)]
|
||||
@@ -271,25 +315,50 @@ pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
if variant.is_path() && !BrewVariant::is_macos_custom(binary_name) {
|
||||
return Err(SkipStep("Not a custom brew for macOS".to_string()).into());
|
||||
return Err(SkipStep(t!("Not a custom brew for macOS").to_string()).into());
|
||||
}
|
||||
}
|
||||
|
||||
print_separator(variant.step_title());
|
||||
let run_type = ctx.run_type();
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
let sudo_uid = brew_linux_sudo_uid();
|
||||
// if brew is owned by another user, execute "sudo -Hu <uid> brew update"
|
||||
if let Some(user_id) = sudo_uid {
|
||||
let uid = nix::unistd::Uid::from_raw(user_id);
|
||||
let user = nix::unistd::User::from_uid(uid)
|
||||
.expect("failed to call getpwuid()")
|
||||
.expect("this user should exist");
|
||||
|
||||
variant.execute(run_type).arg("update").status_checked()?;
|
||||
variant
|
||||
.execute(run_type)
|
||||
.args(["upgrade", "--ignore-pinned", "--formula"])
|
||||
let sudo_as_user = t!("sudo as user '{user}'", user = user.name);
|
||||
print_separator(format!("{} ({})", variant.step_title(), sudo_as_user));
|
||||
|
||||
let sudo = ctx.require_sudo()?;
|
||||
sudo.execute_opts(ctx, &binary_name, SudoExecuteOpts::new().set_home().user(&user.name))?
|
||||
.current_dir("/tmp") // brew needs a writable current directory
|
||||
.arg("update")
|
||||
.status_checked()?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
print_separator(variant.step_title());
|
||||
|
||||
variant.execute(ctx).arg("update").status_checked()?;
|
||||
|
||||
let mut command = variant.execute(ctx);
|
||||
command.args(["upgrade", "--formula"]);
|
||||
|
||||
if ctx.config().brew_fetch_head() {
|
||||
command.arg("--fetch-HEAD");
|
||||
}
|
||||
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
variant.execute(run_type).arg("cleanup").status_checked()?;
|
||||
variant.execute(ctx).arg("cleanup").status_checked()?;
|
||||
}
|
||||
|
||||
if ctx.config().brew_autoremove() {
|
||||
variant.execute(run_type).arg("autoremove").status_checked()?;
|
||||
variant.execute(ctx).arg("autoremove").status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -299,13 +368,12 @@ pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<
|
||||
pub fn run_brew_cask(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()> {
|
||||
let binary_name = require(variant.binary_name())?;
|
||||
if variant.is_path() && !BrewVariant::is_macos_custom(binary_name) {
|
||||
return Err(SkipStep("Not a custom brew for macOS".to_string()).into());
|
||||
return Err(SkipStep(t!("Not a custom brew for macOS").to_string()).into());
|
||||
}
|
||||
print_separator(format!("{} - Cask", variant.step_title()));
|
||||
let run_type = ctx.run_type();
|
||||
|
||||
let cask_upgrade_exists = variant
|
||||
.execute(RunType::Wet)
|
||||
.execute_internal()
|
||||
.args(["--repository", "buo/cask-upgrade"])
|
||||
.output_checked_utf8()
|
||||
.map(|p| Path::new(p.stdout.trim()).exists())?;
|
||||
@@ -322,12 +390,18 @@ pub fn run_brew_cask(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()>
|
||||
if ctx.config().brew_cask_greedy() {
|
||||
brew_args.push("--greedy");
|
||||
}
|
||||
if ctx.config().brew_greedy_latest() {
|
||||
brew_args.push("--greedy-latest");
|
||||
}
|
||||
if ctx.config().brew_greedy_auto_updates() {
|
||||
brew_args.push("--greedy-auto-updates");
|
||||
}
|
||||
}
|
||||
|
||||
variant.execute(run_type).args(&brew_args).status_checked()?;
|
||||
variant.execute(ctx).args(&brew_args).status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
variant.execute(run_type).arg("cleanup").status_checked()?;
|
||||
variant.execute(ctx).arg("cleanup").status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -336,8 +410,6 @@ pub fn run_brew_cask(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()>
|
||||
pub fn run_guix(ctx: &ExecutionContext) -> Result<()> {
|
||||
let guix = require("guix")?;
|
||||
|
||||
let run_type = ctx.run_type();
|
||||
|
||||
let output = Command::new(&guix).arg("pull").output_checked_utf8();
|
||||
debug!("guix pull output: {:?}", output);
|
||||
let should_upgrade = output.is_ok();
|
||||
@@ -346,9 +418,9 @@ pub fn run_guix(ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator("Guix");
|
||||
|
||||
if should_upgrade {
|
||||
return run_type.execute(&guix).args(["package", "-u"]).status_checked();
|
||||
return ctx.execute(&guix).args(["package", "-u"]).status_checked();
|
||||
}
|
||||
Err(SkipStep(String::from("Guix Pull Failed, Skipping")).into())
|
||||
Err(SkipStep(t!("Guix Pull Failed, Skipping").to_string()).into())
|
||||
}
|
||||
|
||||
pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
|
||||
@@ -357,85 +429,374 @@ pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
|
||||
let nix_env = require("nix-env")?;
|
||||
// TODO: Is None possible here?
|
||||
let profile_path = match home::home_dir() {
|
||||
Some(home) => Path::new(&home).join(".nix-profile"),
|
||||
Some(home) => XDG_DIRS
|
||||
.state_dir()
|
||||
.map(|d| d.join("nix/profile"))
|
||||
.filter(|p| p.exists())
|
||||
.unwrap_or(Path::new(&home).join(".nix-profile")),
|
||||
None => Path::new("/nix/var/nix/profiles/per-user/default").into(),
|
||||
};
|
||||
debug!("nix profile: {:?}", profile_path);
|
||||
let manifest_json_path = profile_path.join("manifest.json");
|
||||
|
||||
let output = Command::new(&nix_env).args(["--query", "nix"]).output_checked_utf8();
|
||||
debug!("nix-env output: {:?}", output);
|
||||
let should_self_upgrade = output.is_ok();
|
||||
|
||||
print_separator("Nix");
|
||||
|
||||
let multi_user = fs::metadata(&nix)?.uid() == 0;
|
||||
debug!("Multi user nix: {}", multi_user);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
use super::linux::Distribution;
|
||||
|
||||
if let Ok(Distribution::NixOS) = Distribution::detect() {
|
||||
return Err(SkipStep(String::from("Nix on NixOS must be upgraded via nixos-rebuild switch")).into());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
if require("darwin-rebuild").is_ok() {
|
||||
return Err(SkipStep(String::from(
|
||||
"Nix-darwin on macOS must be upgraded via darwin-rebuild switch",
|
||||
))
|
||||
.into());
|
||||
return Err(
|
||||
SkipStep(t!("Nix-darwin on macOS must be upgraded via darwin-rebuild switch").to_string()).into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let run_type = ctx.run_type();
|
||||
ctx.execute(nix_channel).arg("--update").status_checked()?;
|
||||
|
||||
if should_self_upgrade {
|
||||
if multi_user {
|
||||
ctx.execute_elevated(&nix, true)?.arg("upgrade-nix").status_checked()?;
|
||||
let mut get_version_cmd = ctx.execute(&nix);
|
||||
get_version_cmd.arg("--version");
|
||||
let get_version_cmd_output = get_version_cmd.output_checked_utf8()?;
|
||||
let get_version_cmd_first_line_stdout = get_version_cmd_output
|
||||
.stdout
|
||||
.lines()
|
||||
.next()
|
||||
.ok_or_else(|| eyre!("`nix --version` output is empty"))?;
|
||||
|
||||
let is_lix = get_version_cmd_first_line_stdout.contains("Lix");
|
||||
|
||||
debug!(
|
||||
output=%get_version_cmd_output,
|
||||
?is_lix,
|
||||
"`nix --version` output"
|
||||
);
|
||||
|
||||
static NIX_VERSION_REGEX: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"^nix \([^)]*\) ([0-9.]+)").expect("Nix version regex always compiles"));
|
||||
|
||||
if get_version_cmd_first_line_stdout.is_empty() {
|
||||
return Err(eyre!("`nix --version` output was empty"));
|
||||
}
|
||||
|
||||
let captures = NIX_VERSION_REGEX
|
||||
.captures(get_version_cmd_first_line_stdout)
|
||||
.ok_or_else(|| eyre!(output_changed_message!("nix --version", "regex did not match")))?;
|
||||
let raw_version = &captures[1];
|
||||
|
||||
debug!("Raw Nix version: {raw_version}");
|
||||
|
||||
// Nix 2.29.0 outputs "2.29" instead of "2.29.0", so we need to add that if necessary.
|
||||
let corrected_raw_version = if raw_version.chars().filter(|&c| c == '.').count() == 1 {
|
||||
&format!("{raw_version}.0")
|
||||
} else {
|
||||
run_type.execute(&nix).arg("upgrade-nix").status_checked()?;
|
||||
}
|
||||
}
|
||||
raw_version
|
||||
};
|
||||
|
||||
run_type.execute(nix_channel).arg("--update").status_checked()?;
|
||||
debug!("Corrected raw Nix version: {corrected_raw_version}");
|
||||
|
||||
let version = Version::parse(corrected_raw_version)
|
||||
.wrap_err_with(|| output_changed_message!("nix --version", "Invalid version"))?;
|
||||
|
||||
debug!("Nix version: {:?}", version);
|
||||
|
||||
// Nix since 2.21.0 uses `--all --impure` rather than `.*` to upgrade all packages.
|
||||
// Lix is based on Nix 2.18, so it doesn't!
|
||||
let packages = if version >= Version::new(2, 21, 0) && !is_lix {
|
||||
vec!["--all", "--impure"]
|
||||
} else {
|
||||
vec![".*"]
|
||||
};
|
||||
|
||||
if Path::new(&manifest_json_path).exists() {
|
||||
run_type
|
||||
.execute(&nix)
|
||||
ctx.execute(nix)
|
||||
.args(nix_args())
|
||||
.arg("profile")
|
||||
.arg("upgrade")
|
||||
.arg(".*")
|
||||
.args(&packages)
|
||||
.arg("--verbose")
|
||||
.status_checked()
|
||||
} else {
|
||||
run_type.execute(&nix_env).arg("--upgrade").status_checked()
|
||||
let mut command = ctx.execute(nix_env);
|
||||
command.arg("--upgrade");
|
||||
if let Some(args) = ctx.config().nix_env_arguments() {
|
||||
command.args(args.split_whitespace());
|
||||
};
|
||||
command.status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_nix_self_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let nix = require("nix")?;
|
||||
|
||||
// Should we attempt to upgrade Nix with `nix upgrade-nix`?
|
||||
#[allow(unused_mut)]
|
||||
let mut should_self_upgrade = cfg!(target_os = "macos");
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// We can't use `nix upgrade-nix` on NixOS.
|
||||
if let Ok(Distribution::NixOS) = Distribution::detect() {
|
||||
should_self_upgrade = false;
|
||||
}
|
||||
}
|
||||
|
||||
if !should_self_upgrade {
|
||||
return Err(SkipStep(t!("`nix upgrade-nix` can only be used on macOS or non-NixOS Linux").to_string()).into());
|
||||
}
|
||||
|
||||
if nix_profile_dir(&nix)?.is_none() {
|
||||
return Err(
|
||||
SkipStep(t!("`nix upgrade-nix` cannot be run when Nix is installed in a profile").to_string()).into(),
|
||||
);
|
||||
}
|
||||
|
||||
print_separator(t!("Nix (self-upgrade)"));
|
||||
|
||||
let version_output = ctx.execute(&nix).arg("--version").output_checked_utf8()?;
|
||||
let version = version_output
|
||||
.stdout
|
||||
.lines()
|
||||
.next()
|
||||
.ok_or_else(|| eyre!("`nix --version` output is empty"))?;
|
||||
|
||||
let is_determinate_nix = version.contains("Determinate Nix");
|
||||
|
||||
debug!(
|
||||
output=%version_output,
|
||||
?is_determinate_nix,
|
||||
"`nix --version` output"
|
||||
);
|
||||
|
||||
if is_determinate_nix {
|
||||
let nixd = require("determinate-nixd");
|
||||
let nixd = match nixd {
|
||||
Err(_) => {
|
||||
println!("Found Determinate Nix, but could not find determinate-nixd");
|
||||
return Err(StepFailed.into());
|
||||
}
|
||||
Ok(nixd) => nixd,
|
||||
};
|
||||
|
||||
let sudo = ctx.require_sudo()?;
|
||||
return sudo
|
||||
.execute_opts(ctx, nixd, SudoExecuteOpts::new().login_shell())?
|
||||
.arg("upgrade")
|
||||
.status_checked();
|
||||
}
|
||||
|
||||
let multi_user = fs::metadata(&nix)?.uid() == 0;
|
||||
debug!("Multi user nix: {}", multi_user);
|
||||
|
||||
let nix_args = nix_args();
|
||||
if multi_user {
|
||||
let sudo = ctx.require_sudo()?;
|
||||
sudo.execute_opts(ctx, &nix, SudoExecuteOpts::new().login_shell())?
|
||||
.args(nix_args)
|
||||
.arg("upgrade-nix")
|
||||
.status_checked()
|
||||
} else {
|
||||
ctx.execute(&nix).args(nix_args).arg("upgrade-nix").status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
/// If we try to `nix upgrade-nix` but Nix is installed with `nix profile`, we'll get a `does not
|
||||
/// appear to be part of a Nix profile` error.
|
||||
///
|
||||
/// We duplicate some of the `nix` logic here to avoid this.
|
||||
/// See: <https://github.com/NixOS/nix/blob/f0180487a0e4c0091b46cb1469c44144f5400240/src/nix/upgrade-nix.cc#L102-L139>
|
||||
///
|
||||
/// See: <https://github.com/NixOS/nix/issues/5473>
|
||||
fn nix_profile_dir(nix: &Path) -> Result<Option<PathBuf>> {
|
||||
// NOTE: `nix` uses the location of the `nix-env` binary for this but we're using the `nix`
|
||||
// binary; should be the same.
|
||||
let nix_bin_dir = nix.parent();
|
||||
if nix_bin_dir.and_then(|p| p.file_name()) != Some(OsStr::new("bin")) {
|
||||
debug!("Nix is not installed in a `bin` directory: {nix_bin_dir:?}");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let nix_dir = nix_bin_dir
|
||||
.and_then(|bin_dir| bin_dir.parent())
|
||||
.ok_or_else(|| eyre!("Unable to find Nix install directory from Nix binary {nix:?}"))?;
|
||||
|
||||
debug!("Found Nix in {nix_dir:?}");
|
||||
|
||||
let mut profile_dir = nix_dir.to_path_buf();
|
||||
while profile_dir.is_symlink() {
|
||||
profile_dir = profile_dir
|
||||
.parent()
|
||||
.ok_or_else(|| eyre!("Path has no parent: {profile_dir:?}"))?
|
||||
.join(
|
||||
profile_dir
|
||||
.read_link()
|
||||
.wrap_err_with(|| format!("Failed to read symlink {profile_dir:?}"))?,
|
||||
);
|
||||
|
||||
// NOTE: `nix` uses a hand-rolled canonicalize function, Rust just uses `realpath`.
|
||||
if profile_dir
|
||||
.canonicalize()
|
||||
.wrap_err_with(|| format!("Failed to canonicalize {profile_dir:?}"))?
|
||||
.components()
|
||||
.any(|component| component == Component::Normal(OsStr::new("profiles")))
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Found Nix profile {profile_dir:?}");
|
||||
let user_env = profile_dir
|
||||
.canonicalize()
|
||||
.wrap_err_with(|| format!("Failed to canonicalize {profile_dir:?}"))?;
|
||||
|
||||
Ok(
|
||||
if user_env
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.is_some_and(|name| name.ends_with("user-environment"))
|
||||
{
|
||||
Some(profile_dir)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns a directory from an environment variable, if and only if it is a directory which
|
||||
/// contains a flake.nix
|
||||
fn flake_dir(var: &'static str) -> Option<PathBuf> {
|
||||
std::env::var_os(var)
|
||||
.map(PathBuf::from)
|
||||
.take_if(|x| std::fs::exists(x.join("flake.nix")).is_ok_and(|x| x))
|
||||
}
|
||||
|
||||
/// Update NixOS and home-manager through a flake using `nh`
|
||||
///
|
||||
/// See: https://github.com/viperML/nh
|
||||
pub fn run_nix_helper(ctx: &ExecutionContext) -> Result<()> {
|
||||
require("nix")?;
|
||||
let nix_helper = require("nh")?;
|
||||
|
||||
let fallback_flake_path = flake_dir("NH_FLAKE");
|
||||
let darwin_flake_path = flake_dir("NH_DARWIN_FLAKE");
|
||||
let home_flake_path = flake_dir("NH_HOME_FLAKE");
|
||||
let nixos_flake_path = flake_dir("NH_OS_FLAKE");
|
||||
|
||||
let all_flake_paths: Vec<_> = [
|
||||
fallback_flake_path.as_ref(),
|
||||
darwin_flake_path.as_ref(),
|
||||
home_flake_path.as_ref(),
|
||||
nixos_flake_path.as_ref(),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect();
|
||||
|
||||
// if none of the paths exist AND contain a `flake.nix`, skip
|
||||
if all_flake_paths.is_empty() {
|
||||
if flake_dir("FLAKE").is_some() {
|
||||
warn!(
|
||||
"{}",
|
||||
t!("You have a flake inside of $FLAKE. This is deprecated for nh.")
|
||||
);
|
||||
}
|
||||
return Err(SkipStep(t!("nh cannot find any configured flakes").into()).into());
|
||||
}
|
||||
|
||||
let nh_switch = |ty: &'static str| -> Result<()> {
|
||||
print_separator(format!("nh {ty}"));
|
||||
|
||||
let mut cmd = ctx.execute(&nix_helper);
|
||||
cmd.arg(ty);
|
||||
cmd.arg("switch");
|
||||
cmd.arg("-u");
|
||||
|
||||
if !ctx.config().yes(Step::NixHelper) {
|
||||
cmd.arg("--ask");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
Ok(())
|
||||
};
|
||||
|
||||
// We assume that if the user has set these variables, we can throw an error if nh cannot find
|
||||
// a flake there. So we do not anymore perform an eval check to find out whether we should skip
|
||||
// or not.
|
||||
#[cfg(target_os = "macos")]
|
||||
if darwin_flake_path.is_some() || fallback_flake_path.is_some() {
|
||||
nh_switch("darwin")?;
|
||||
}
|
||||
|
||||
if home_flake_path.is_some() || fallback_flake_path.is_some() {
|
||||
nh_switch("home")?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
if matches!(Distribution::detect(), Ok(Distribution::NixOS))
|
||||
&& (nixos_flake_path.is_some() || fallback_flake_path.is_some())
|
||||
{
|
||||
nh_switch("os")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn nix_args() -> [&'static str; 2] {
|
||||
["--extra-experimental-features", "nix-command"]
|
||||
}
|
||||
|
||||
pub fn run_yadm(ctx: &ExecutionContext) -> Result<()> {
|
||||
let yadm = require("yadm")?;
|
||||
|
||||
print_separator("yadm");
|
||||
|
||||
ctx.run_type().execute(yadm).arg("pull").status_checked()
|
||||
ctx.execute(yadm).arg("pull").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_asdf(ctx: &ExecutionContext) -> Result<()> {
|
||||
let asdf = require("asdf")?;
|
||||
|
||||
print_separator("asdf");
|
||||
ctx.run_type()
|
||||
.execute(&asdf)
|
||||
.arg("update")
|
||||
.status_checked_with_codes(&[42])?;
|
||||
|
||||
ctx.run_type()
|
||||
.execute(&asdf)
|
||||
.args(["plugin", "update", "--all"])
|
||||
.status_checked()
|
||||
// asdf (>= 0.15.0) won't support the self-update command
|
||||
//
|
||||
// https://github.com/topgrade-rs/topgrade/issues/1007
|
||||
let version_output = Command::new(&asdf).arg("version").output_checked_utf8()?;
|
||||
// Example output
|
||||
//
|
||||
// ```
|
||||
// $ asdf version
|
||||
// v0.15.0-31e8c93
|
||||
//
|
||||
// ```
|
||||
// ```
|
||||
// $ asdf version
|
||||
// v0.16.7
|
||||
// ```
|
||||
// ```
|
||||
// $ asdf version
|
||||
// 0.18.0 (revision unknown)
|
||||
// ```
|
||||
let version_stdout = version_output.stdout.trim();
|
||||
// trim the starting 'v'
|
||||
let mut remaining = version_stdout.trim_start_matches('v');
|
||||
// remove the hash or revision part if present
|
||||
if let Some(idx) = remaining.find(['-', ' ']) {
|
||||
remaining = &remaining[..idx];
|
||||
}
|
||||
let version =
|
||||
Version::parse(remaining).wrap_err_with(|| output_changed_message!("asdf version", "invalid version"))?;
|
||||
if version < Version::new(0, 15, 0) {
|
||||
ctx.execute(&asdf).arg("update").status_checked_with_codes(&[42])?;
|
||||
}
|
||||
|
||||
ctx.execute(&asdf).args(["plugin", "update", "--all"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_mise(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mise = require("mise")?;
|
||||
|
||||
print_separator("mise");
|
||||
|
||||
ctx.execute(&mise).args(["plugins", "update"]).status_checked()?;
|
||||
|
||||
ctx.execute(&mise).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_home_manager(ctx: &ExecutionContext) -> Result<()> {
|
||||
@@ -443,7 +804,7 @@ pub fn run_home_manager(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("home-manager");
|
||||
|
||||
let mut cmd = ctx.run_type().execute(home_manager);
|
||||
let mut cmd = ctx.execute(home_manager);
|
||||
cmd.arg("switch");
|
||||
|
||||
if let Some(extra_args) = ctx.config().home_manager() {
|
||||
@@ -453,26 +814,39 @@ pub fn run_home_manager(ctx: &ExecutionContext) -> Result<()> {
|
||||
cmd.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_tldr(ctx: &ExecutionContext) -> Result<()> {
|
||||
let tldr = require("tldr")?;
|
||||
|
||||
print_separator("TLDR");
|
||||
ctx.run_type().execute(tldr).arg("--update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_pearl(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pearl = require("pearl")?;
|
||||
print_separator("pearl");
|
||||
|
||||
ctx.run_type().execute(pearl).arg("update").status_checked()
|
||||
ctx.execute(pearl).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_pyenv(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pyenv = require("pyenv")?;
|
||||
print_separator("pyenv");
|
||||
|
||||
let pyenv_dir = var("PYENV_ROOT").map_or_else(|_| HOME_DIR.join(".pyenv"), PathBuf::from);
|
||||
|
||||
if !pyenv_dir.exists() {
|
||||
return Err(SkipStep(t!("Pyenv is installed, but $PYENV_ROOT is not set correctly").to_string()).into());
|
||||
}
|
||||
|
||||
if !pyenv_dir.join(".git").exists() {
|
||||
return Err(SkipStep(t!("pyenv is not a git repository").to_string()).into());
|
||||
}
|
||||
|
||||
if !pyenv_dir.join("plugins").join("pyenv-update").exists() {
|
||||
return Err(SkipStep(t!("pyenv-update plugin is not installed").to_string()).into());
|
||||
}
|
||||
|
||||
ctx.execute(pyenv).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_sdkman(ctx: &ExecutionContext) -> Result<()> {
|
||||
let bash = require("bash")?;
|
||||
|
||||
let sdkman_init_path = var("SDKMAN_DIR")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".sdkman"))
|
||||
.map_or_else(|_| HOME_DIR.join(".sdkman"), PathBuf::from)
|
||||
.join("bin")
|
||||
.join("sdkman-init.sh")
|
||||
.require()
|
||||
@@ -481,8 +855,7 @@ pub fn run_sdkman(ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator("SDKMAN!");
|
||||
|
||||
let sdkman_config_path = var("SDKMAN_DIR")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".sdkman"))
|
||||
.map_or_else(|_| HOME_DIR.join(".sdkman"), PathBuf::from)
|
||||
.join("etc")
|
||||
.join("config")
|
||||
.require()?;
|
||||
@@ -495,34 +868,25 @@ pub fn run_sdkman(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
if selfupdate_enabled == "true" {
|
||||
let cmd_selfupdate = format!("source {} && sdk selfupdate", &sdkman_init_path);
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
ctx.execute(&bash)
|
||||
.args(["-c", cmd_selfupdate.as_str()])
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
let cmd_update = format!("source {} && sdk update", &sdkman_init_path);
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
.args(["-c", cmd_update.as_str()])
|
||||
.status_checked()?;
|
||||
ctx.execute(&bash).args(["-c", cmd_update.as_str()]).status_checked()?;
|
||||
|
||||
let cmd_upgrade = format!("source {} && sdk upgrade", &sdkman_init_path);
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
.args(["-c", cmd_upgrade.as_str()])
|
||||
.status_checked()?;
|
||||
ctx.execute(&bash).args(["-c", cmd_upgrade.as_str()]).status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
let cmd_flush_archives = format!("source {} && sdk flush archives", &sdkman_init_path);
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
ctx.execute(&bash)
|
||||
.args(["-c", cmd_flush_archives.as_str()])
|
||||
.status_checked()?;
|
||||
|
||||
let cmd_flush_temp = format!("source {} && sdk flush temp", &sdkman_init_path);
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
ctx.execute(&bash)
|
||||
.args(["-c", cmd_flush_temp.as_str()])
|
||||
.status_checked()?;
|
||||
}
|
||||
@@ -530,12 +894,20 @@ pub fn run_sdkman(ctx: &ExecutionContext) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_bun(ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn run_bun_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let bun = require("bun")?;
|
||||
|
||||
print_separator("Bun");
|
||||
print_separator(t!("Bun Packages"));
|
||||
|
||||
ctx.run_type().execute(bun).arg("upgrade").status_checked()
|
||||
let mut package_json: PathBuf = var("BUN_INSTALL").map_or_else(|_| HOME_DIR.join(".bun"), PathBuf::from);
|
||||
package_json.push("install/global/package.json");
|
||||
|
||||
if !package_json.exists() {
|
||||
println!("{}", t!("No global packages installed"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
ctx.execute(bun).args(["-g", "update"]).status_checked()
|
||||
}
|
||||
|
||||
/// Update dotfiles with `rcm(7)`.
|
||||
@@ -545,17 +917,27 @@ pub fn run_rcm(ctx: &ExecutionContext) -> Result<()> {
|
||||
let rcup = require("rcup")?;
|
||||
|
||||
print_separator("rcm");
|
||||
ctx.run_type().execute(rcup).arg("-v").status_checked()
|
||||
ctx.execute(rcup).arg("-v").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_maza(ctx: &ExecutionContext) -> Result<()> {
|
||||
let maza = require("maza")?;
|
||||
|
||||
print_separator("maza");
|
||||
ctx.run_type().execute(maza).arg("update").status_checked()
|
||||
ctx.execute(maza).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn reboot() -> Result<()> {
|
||||
print!("Rebooting...");
|
||||
Command::new("sudo").arg("reboot").status_checked()
|
||||
pub fn run_atuin(ctx: &ExecutionContext) -> Result<()> {
|
||||
let atuin = require("atuin-update")?;
|
||||
|
||||
print_separator("atuin");
|
||||
|
||||
ctx.execute(atuin).status_checked()
|
||||
}
|
||||
|
||||
pub fn reboot(ctx: &ExecutionContext) -> Result<()> {
|
||||
match ctx.sudo() {
|
||||
Some(sudo) => sudo.execute(ctx, "reboot")?.status_checked(),
|
||||
None => ctx.execute("reboot").status_checked(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
use std::convert::TryFrom;
|
||||
use std::path::Path;
|
||||
use std::{ffi::OsStr, process::Command};
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
use rust_i18n::t;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::config::UpdatesAutoReboot;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::step::Step;
|
||||
use crate::terminal::{print_separator, print_warning};
|
||||
use crate::utils::{require, which};
|
||||
use crate::{error::SkipStep, steps::git::Repositories};
|
||||
use crate::{powershell, Step};
|
||||
use crate::{error::SkipStep, steps::git::RepoStep};
|
||||
|
||||
pub fn run_chocolatey(ctx: &ExecutionContext) -> Result<()> {
|
||||
let choco = require("choco")?;
|
||||
@@ -19,15 +20,9 @@ pub fn run_chocolatey(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("Chocolatey");
|
||||
|
||||
let mut command = match ctx.sudo() {
|
||||
Some(sudo) => {
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(choco);
|
||||
command
|
||||
}
|
||||
None => ctx.run_type().execute(choco),
|
||||
};
|
||||
let sudo = ctx.require_sudo()?;
|
||||
|
||||
let mut command = sudo.execute(ctx, &choco)?;
|
||||
command.args(["upgrade", "all"]);
|
||||
|
||||
if yes {
|
||||
@@ -42,15 +37,23 @@ pub fn run_winget(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("winget");
|
||||
|
||||
if !ctx.config().enable_winget() {
|
||||
print_warning("Winget is disabled by default. Enable it by setting enable_winget=true in the [windows] section in the configuration.");
|
||||
return Err(SkipStep(String::from("Winget is disabled by default")).into());
|
||||
ctx.execute(&winget).args(["source", "update"]).status_checked()?;
|
||||
|
||||
let mut command = if ctx.config().winget_use_sudo() {
|
||||
let sudo = ctx.require_sudo()?;
|
||||
sudo.execute(ctx, &winget)?
|
||||
} else {
|
||||
ctx.execute(winget)
|
||||
};
|
||||
|
||||
let mut args = vec!["upgrade", "--all"];
|
||||
if ctx.config().winget_silent_install() {
|
||||
args.push("--silent");
|
||||
}
|
||||
|
||||
ctx.run_type()
|
||||
.execute(winget)
|
||||
.args(["upgrade", "--all"])
|
||||
.status_checked()
|
||||
command.args(args).status_checked()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_scoop(ctx: &ExecutionContext) -> Result<()> {
|
||||
@@ -58,26 +61,26 @@ pub fn run_scoop(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("Scoop");
|
||||
|
||||
ctx.run_type().execute(&scoop).args(["update"]).status_checked()?;
|
||||
ctx.run_type().execute(&scoop).args(["update", "*"]).status_checked()?;
|
||||
ctx.execute(&scoop).args(["update"]).status_checked()?;
|
||||
ctx.execute(&scoop).args(["update", "*"]).status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
ctx.run_type().execute(&scoop).args(["cleanup", "*"]).status_checked()?;
|
||||
ctx.execute(&scoop).args(["cleanup", "*"]).status_checked()?;
|
||||
ctx.execute(&scoop).args(["cache", "rm", "-a"]).status_checked()?
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_wsl(ctx: &ExecutionContext) -> Result<()> {
|
||||
if !is_wsl_installed()? {
|
||||
return Err(SkipStep("WSL not installed".to_string()).into());
|
||||
return Err(SkipStep(t!("WSL not installed").to_string()).into());
|
||||
}
|
||||
|
||||
let wsl = require("wsl")?;
|
||||
|
||||
print_separator("Update WSL");
|
||||
print_separator(t!("Update WSL"));
|
||||
|
||||
let mut wsl_command = ctx.run_type().execute(wsl);
|
||||
let mut wsl_command = ctx.execute(wsl);
|
||||
wsl_command.args(["--update"]);
|
||||
|
||||
if ctx.config().wsl_update_pre_release() {
|
||||
@@ -94,7 +97,7 @@ pub fn update_wsl(ctx: &ExecutionContext) -> Result<()> {
|
||||
/// Detect if WSL is installed or not.
|
||||
///
|
||||
/// For WSL, we cannot simply check if command `wsl` is installed as on newer
|
||||
/// versions of Windows (since windows 10 version 2004), this commmand is
|
||||
/// versions of Windows (since windows 10 version 2004), this command is
|
||||
/// installed by default.
|
||||
///
|
||||
/// If the command is installed and the user hasn't installed any Linux distros
|
||||
@@ -119,8 +122,8 @@ fn get_wsl_distributions(wsl: &Path) -> Result<Vec<String>> {
|
||||
let output = Command::new(wsl).args(["--list", "-q"]).output_checked_utf8()?.stdout;
|
||||
Ok(output
|
||||
.lines()
|
||||
.map(|x| x.replace(['\u{0}', '\r'], "").trim().to_owned())
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(|x| x.replace(['\u{0}', '\r'], ""))
|
||||
.collect())
|
||||
}
|
||||
|
||||
@@ -128,12 +131,45 @@ fn upgrade_wsl_distribution(wsl: &Path, dist: &str, ctx: &ExecutionContext) -> R
|
||||
let topgrade = Command::new(wsl)
|
||||
.args(["-d", dist, "bash", "-lc", "which topgrade"])
|
||||
.output_checked_utf8()
|
||||
.map_err(|_| SkipStep(String::from("Could not find Topgrade installed in WSL")))?;
|
||||
.map_err(|_| SkipStep(t!("Could not find Topgrade installed in WSL").to_string()))?
|
||||
.stdout // The normal output from `which topgrade` appends a newline, so we trim it here.
|
||||
.trim_end()
|
||||
.to_owned();
|
||||
|
||||
let mut command = ctx.execute(wsl);
|
||||
|
||||
// The `arg` method automatically quotes its arguments.
|
||||
// This means we can't append additional arguments to `topgrade` in WSL
|
||||
// by calling `arg` successively.
|
||||
//
|
||||
// For example:
|
||||
//
|
||||
// ```rust
|
||||
// command
|
||||
// .args(["-d", dist, "bash", "-lc"])
|
||||
// .arg(format!("TOPGRADE_PREFIX={dist} exec {topgrade}"));
|
||||
// ```
|
||||
//
|
||||
// creates a command string like:
|
||||
// > `C:\WINDOWS\system32\wsl.EXE -d Ubuntu bash -lc 'TOPGRADE_PREFIX=Ubuntu exec /bin/topgrade'`
|
||||
//
|
||||
// Adding the following:
|
||||
//
|
||||
// ```rust
|
||||
// command.arg("-v");
|
||||
// ```
|
||||
//
|
||||
// appends the next argument like so:
|
||||
// > `C:\WINDOWS\system32\wsl.EXE -d Ubuntu bash -lc 'TOPGRADE_PREFIX=Ubuntu exec /bin/topgrade' -v`
|
||||
// which means `-v` isn't passed to `topgrade`.
|
||||
let mut args = String::new();
|
||||
if ctx.config().verbose() {
|
||||
args.push_str("-v");
|
||||
}
|
||||
|
||||
let mut command = ctx.run_type().execute(wsl);
|
||||
command
|
||||
.args(["-d", dist, "bash", "-c"])
|
||||
.arg(format!("TOPGRADE_PREFIX={dist} exec {topgrade}"));
|
||||
.args(["-d", dist, "bash", "-lc"])
|
||||
.arg(format!("TOPGRADE_PREFIX={dist} exec {topgrade} {args}"));
|
||||
|
||||
if ctx.config().yes(Step::Wsl) {
|
||||
command.arg("-y");
|
||||
@@ -144,7 +180,7 @@ fn upgrade_wsl_distribution(wsl: &Path, dist: &str, ctx: &ExecutionContext) -> R
|
||||
|
||||
pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
if !is_wsl_installed()? {
|
||||
return Err(SkipStep("WSL not installed".to_string()).into());
|
||||
return Err(SkipStep(t!("WSL not installed").to_string()).into());
|
||||
}
|
||||
|
||||
let wsl = require("wsl")?;
|
||||
@@ -167,36 +203,77 @@ pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
if ran {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(SkipStep(String::from("Could not find Topgrade in any WSL disribution")).into())
|
||||
Err(SkipStep(t!("Could not find Topgrade in any WSL distribution").to_string()).into())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn windows_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let powershell = powershell::Powershell::windows_powershell();
|
||||
let powershell = ctx.require_powershell()?;
|
||||
|
||||
if powershell.supports_windows_update() {
|
||||
print_separator("Windows Update");
|
||||
return powershell.windows_update(ctx);
|
||||
print_separator(t!("Windows Update"));
|
||||
|
||||
if !powershell.has_module("PSWindowsUpdate") {
|
||||
print_warning(t!(
|
||||
"The PSWindowsUpdate PowerShell module isn't installed so Topgrade can't run Windows Update.\nInstall PSWindowsUpdate by running `Install-Module PSWindowsUpdate` in PowerShell."
|
||||
));
|
||||
|
||||
return Err(SkipStep(t!("PSWindowsUpdate is not installed").to_string()).into());
|
||||
}
|
||||
|
||||
let usoclient = require("UsoClient")?;
|
||||
let mut cmd = "Import-Module PSWindowsUpdate; Install-WindowsUpdate -Verbose".to_string();
|
||||
|
||||
print_separator("Windows Update");
|
||||
println!("Running Windows Update. Check the control panel for progress.");
|
||||
ctx.run_type()
|
||||
.execute(&usoclient)
|
||||
.arg("ScanInstallWait")
|
||||
.status_checked()?;
|
||||
ctx.run_type().execute(&usoclient).arg("StartInstall").status_checked()
|
||||
if ctx.config().accept_all_windows_updates() {
|
||||
cmd.push_str(" -AcceptAll");
|
||||
}
|
||||
|
||||
match ctx.config().windows_updates_auto_reboot() {
|
||||
UpdatesAutoReboot::Yes => cmd.push_str(" -AutoReboot"),
|
||||
UpdatesAutoReboot::No => cmd.push_str(" -IgnoreReboot"),
|
||||
UpdatesAutoReboot::Ask => (), // Prompting is the default for Install-WindowsUpdate
|
||||
}
|
||||
|
||||
powershell.build_command(ctx, &cmd, true)?.status_checked()
|
||||
}
|
||||
|
||||
pub fn reboot() -> Result<()> {
|
||||
pub fn microsoft_store(ctx: &ExecutionContext) -> Result<()> {
|
||||
let powershell = ctx.require_powershell()?;
|
||||
|
||||
print_separator(t!("Microsoft Store"));
|
||||
|
||||
println!("{}", t!("Scanning for updates..."));
|
||||
|
||||
// Scan for updates using the MDM UpdateScanMethod
|
||||
// This method is also available for non-MDM devices
|
||||
let cmd = r#"(Get-CimInstance -Namespace "Root\cimv2\mdm\dmmap" -ClassName "MDM_EnterpriseModernAppManagement_AppManagement01" | Invoke-CimMethod -MethodName UpdateScanMethod).ReturnValue"#;
|
||||
|
||||
powershell
|
||||
.build_command(ctx, cmd, true)?
|
||||
.output_checked_with_utf8(|output| {
|
||||
if !output.status.success() {
|
||||
return Err(());
|
||||
}
|
||||
let ret_val = output.stdout.trim();
|
||||
debug!("Command return value: {}", ret_val);
|
||||
if ret_val == "0" {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(())
|
||||
}
|
||||
})?;
|
||||
println!(
|
||||
"{}",
|
||||
t!("Success, Microsoft Store apps are being updated in the background")
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn reboot(ctx: &ExecutionContext) -> Result<()> {
|
||||
// If this works, it won't return, but if it doesn't work, it may return a useful error
|
||||
// message.
|
||||
Command::new("shutdown").args(["/R", "/T", "0"]).status_checked()
|
||||
ctx.execute("shutdown.exe").args(["/R", "/T", "0"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn insert_startup_scripts(git_repos: &mut Repositories) -> Result<()> {
|
||||
pub fn insert_startup_scripts(git_repos: &mut RepoStep) -> Result<()> {
|
||||
let startup_dir = crate::WINDOWS_DIRS
|
||||
.data_dir()
|
||||
.join("Microsoft\\Windows\\Start Menu\\Programs\\Startup");
|
||||
@@ -206,7 +283,7 @@ pub fn insert_startup_scripts(git_repos: &mut Repositories) -> Result<()> {
|
||||
if let Ok(lnk) = parselnk::Lnk::try_from(Path::new(&path)) {
|
||||
debug!("Startup link: {:?}", lnk);
|
||||
if let Some(path) = lnk.relative_path() {
|
||||
git_repos.insert_if_repo(&startup_dir.join(path));
|
||||
git_repos.insert_if_repo(startup_dir.join(path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,124 +1,159 @@
|
||||
#[cfg(windows)]
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
#[cfg(windows)]
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Result;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::{is_dumb, print_separator};
|
||||
use crate::utils::{require_option, which, PathExt};
|
||||
use crate::Step;
|
||||
use crate::terminal;
|
||||
use crate::utils::{which, PathExt};
|
||||
|
||||
pub struct Powershell {
|
||||
path: Option<PathBuf>,
|
||||
path: PathBuf,
|
||||
profile: Option<PathBuf>,
|
||||
is_pwsh: bool,
|
||||
}
|
||||
|
||||
impl Powershell {
|
||||
/// Returns a powershell instance.
|
||||
///
|
||||
/// If the powershell binary is not found, or the current terminal is dumb
|
||||
/// then the instance of this struct will skip all the powershell steps.
|
||||
pub fn new() -> Self {
|
||||
let path = which("pwsh").or_else(|| which("powershell")).filter(|_| !is_dumb());
|
||||
|
||||
let profile = path.as_ref().and_then(|path| {
|
||||
Command::new(path)
|
||||
.args(["-NoProfile", "-Command", "Split-Path $profile"])
|
||||
.output_checked_utf8()
|
||||
.map(|output| PathBuf::from(output.stdout.trim()))
|
||||
.and_then(|p| p.require())
|
||||
.ok()
|
||||
});
|
||||
|
||||
Powershell { path, profile }
|
||||
pub fn new() -> Option<Self> {
|
||||
if terminal::is_dumb() {
|
||||
return None;
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn windows_powershell() -> Self {
|
||||
Powershell {
|
||||
path: which("powershell").filter(|_| !is_dumb()),
|
||||
let (path, is_pwsh) = which("pwsh")
|
||||
.map(|p| (Some(p), true))
|
||||
.or_else(|| which("powershell").map(|p| (Some(p), false)))
|
||||
.unwrap_or((None, false));
|
||||
|
||||
path.map(|path| {
|
||||
let mut ret = Self {
|
||||
path,
|
||||
profile: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn has_module(powershell: &Path, command: &str) -> bool {
|
||||
Command::new(powershell)
|
||||
.args([
|
||||
"-NoProfile",
|
||||
"-Command",
|
||||
&format!("Get-Module -ListAvailable {command}"),
|
||||
])
|
||||
.output_checked_utf8()
|
||||
.map(|result| !result.stdout.is_empty())
|
||||
.unwrap_or(false)
|
||||
is_pwsh,
|
||||
};
|
||||
ret.set_profile();
|
||||
ret
|
||||
})
|
||||
}
|
||||
|
||||
pub fn profile(&self) -> Option<&PathBuf> {
|
||||
self.profile.as_ref()
|
||||
}
|
||||
|
||||
pub fn update_modules(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let powershell = require_option(self.path.as_ref(), String::from("Powershell is not installed"))?;
|
||||
|
||||
print_separator("Powershell Modules Update");
|
||||
|
||||
let mut cmd = vec!["Update-Module"];
|
||||
|
||||
if ctx.config().verbose() {
|
||||
cmd.push("-Verbose")
|
||||
fn set_profile(&mut self) {
|
||||
let profile = self
|
||||
.build_command_internal("Split-Path $PROFILE")
|
||||
.output_checked_utf8()
|
||||
.map(|output| output.stdout.trim().to_string())
|
||||
.and_then(|s| PathBuf::from(s).require())
|
||||
.ok();
|
||||
debug!("Found PowerShell profile: {:?}", profile);
|
||||
self.profile = profile;
|
||||
}
|
||||
|
||||
if ctx.config().yes(Step::Powershell) {
|
||||
cmd.push("-Force")
|
||||
pub fn is_pwsh(&self) -> bool {
|
||||
self.is_pwsh
|
||||
}
|
||||
|
||||
println!("Updating modules...");
|
||||
ctx.run_type()
|
||||
.execute(powershell)
|
||||
// This probably doesn't need `shell_words::join`.
|
||||
.args(["-NoProfile", "-Command", &cmd.join(" ")])
|
||||
.status_checked()
|
||||
/// Builds an "internal" powershell command
|
||||
pub fn build_command_internal(&self, cmd: &str) -> Command {
|
||||
let mut command = Command::new(&self.path);
|
||||
|
||||
command.args(["-NoProfile", "-Command"]);
|
||||
command.arg(cmd);
|
||||
|
||||
// If topgrade was run from pwsh, but we are trying to run powershell, then
|
||||
// the inherited PSModulePath breaks module imports
|
||||
if !self.is_pwsh {
|
||||
command.env_remove("PSModulePath");
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn supports_windows_update(&self) -> bool {
|
||||
self.path
|
||||
.as_ref()
|
||||
.map(|p| Self::has_module(p, "PSWindowsUpdate"))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn windows_update(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let powershell = require_option(self.path.as_ref(), String::from("Powershell is not installed"))?;
|
||||
|
||||
debug_assert!(self.supports_windows_update());
|
||||
|
||||
let mut command = if let Some(sudo) = ctx.sudo() {
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(powershell);
|
||||
command
|
||||
}
|
||||
|
||||
/// Builds a "primary" powershell command (uses dry-run if required):
|
||||
/// {powershell} -NoProfile -Command {cmd}
|
||||
pub fn build_command<'a>(
|
||||
&self,
|
||||
ctx: &'a ExecutionContext,
|
||||
cmd: &str,
|
||||
use_sudo: bool,
|
||||
) -> Result<impl CommandExt + 'a> {
|
||||
let mut command = if use_sudo {
|
||||
let sudo = ctx.require_sudo()?;
|
||||
sudo.execute(ctx, &self.path)?
|
||||
} else {
|
||||
ctx.run_type().execute(powershell)
|
||||
ctx.execute(&self.path)
|
||||
};
|
||||
|
||||
command
|
||||
.args([
|
||||
"-NoProfile",
|
||||
"-Command",
|
||||
&format!(
|
||||
"Import-Module PSWindowsUpdate; Install-WindowsUpdate -MicrosoftUpdate {} -Verbose",
|
||||
if ctx.config().accept_all_windows_updates() {
|
||||
"-AcceptAll"
|
||||
} else {
|
||||
""
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// Check execution policy and return early if it's not set correctly
|
||||
self.execution_policy_args_if_needed()?;
|
||||
}
|
||||
),
|
||||
])
|
||||
.status_checked()
|
||||
|
||||
command.args(["-NoProfile", "-Command"]);
|
||||
command.arg(cmd);
|
||||
|
||||
// If topgrade was run from pwsh, but we are trying to run powershell, then
|
||||
// the inherited PSModulePath breaks module imports
|
||||
if !self.is_pwsh {
|
||||
command.env_remove("PSModulePath");
|
||||
}
|
||||
|
||||
Ok(command)
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn execution_policy_args_if_needed(&self) -> Result<()> {
|
||||
if !self.is_execution_policy_set("RemoteSigned") {
|
||||
Err(eyre!(
|
||||
"PowerShell execution policy is too restrictive. \
|
||||
Please run 'Set-ExecutionPolicy RemoteSigned -Scope CurrentUser' in PowerShell \
|
||||
(or use Unrestricted/Bypass if you're sure about the security implications)"
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn is_execution_policy_set(&self, policy: &str) -> bool {
|
||||
// These policies are ordered from most restrictive to least restrictive
|
||||
let valid_policies = ["Restricted", "AllSigned", "RemoteSigned", "Unrestricted", "Bypass"];
|
||||
|
||||
// Find the index of our target policy
|
||||
let target_idx = valid_policies.iter().position(|&p| p == policy);
|
||||
|
||||
let current_policy = self
|
||||
.build_command_internal("Get-ExecutionPolicy")
|
||||
.output_checked_utf8()
|
||||
.map(|output| output.stdout.trim().to_string());
|
||||
|
||||
debug!("Found PowerShell ExecutionPolicy: {:?}", current_policy);
|
||||
|
||||
current_policy.is_ok_and(|current_policy| {
|
||||
// Find the index of the current policy
|
||||
let current_idx = valid_policies.iter().position(|&p| p == current_policy);
|
||||
|
||||
// Check if current policy exists and is at least as permissive as the target
|
||||
match (current_idx, target_idx) {
|
||||
(Some(current), Some(target)) => current >= target,
|
||||
_ => false,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn has_module(&self, module_name: &str) -> bool {
|
||||
let cmd = format!("Get-Module -ListAvailable {}", module_name);
|
||||
|
||||
self.build_command_internal(&cmd)
|
||||
.output_checked()
|
||||
.map(|output| !output.stdout.trim_ascii().is_empty())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
|
||||
use crate::{
|
||||
command::CommandExt, error::SkipStep, execution_context::ExecutionContext, terminal::print_separator, utils,
|
||||
@@ -27,15 +28,15 @@ pub fn ssh_step(ctx: &ExecutionContext, hostname: &str) -> Result<()> {
|
||||
{
|
||||
prepare_async_ssh_command(&mut args);
|
||||
crate::tmux::run_command(ctx, hostname, &shell_words::join(args))?;
|
||||
Err(SkipStep(String::from("Remote Topgrade launched in Tmux")).into())
|
||||
Err(SkipStep(String::from(t!("Remote Topgrade launched in Tmux"))).into())
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
unreachable!("Tmux execution is only implemented in Unix");
|
||||
} else if ctx.config().open_remotes_in_new_terminal() && !ctx.run_type().dry() && cfg!(windows) {
|
||||
prepare_async_ssh_command(&mut args);
|
||||
ctx.run_type().execute("wt").args(&args).spawn()?;
|
||||
Err(SkipStep(String::from("Remote Topgrade launched in an external terminal")).into())
|
||||
ctx.execute("wt").args(&args).spawn()?;
|
||||
Err(SkipStep(String::from(t!("Remote Topgrade launched in an external terminal"))).into())
|
||||
} else {
|
||||
let mut args = vec!["-t", hostname];
|
||||
|
||||
@@ -47,8 +48,8 @@ pub fn ssh_step(ctx: &ExecutionContext, hostname: &str) -> Result<()> {
|
||||
args.extend(["env", &env, "$SHELL", "-lc", topgrade]);
|
||||
|
||||
print_separator(format!("Remote ({hostname})"));
|
||||
println!("Connecting to {hostname}...");
|
||||
println!("{}", t!("Connecting to {hostname}...", hostname = hostname));
|
||||
|
||||
ctx.run_type().execute(ssh).args(&args).status_checked()
|
||||
ctx.execute(ssh).args(&args).status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,13 +4,15 @@ use std::{fmt::Display, rc::Rc, str::FromStr};
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use regex::Regex;
|
||||
use rust_i18n::t;
|
||||
use strum::EnumString;
|
||||
use tracing::{debug, error};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::step::Step;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::{error::SkipStep, utils, Step};
|
||||
use crate::{error::SkipStep, utils};
|
||||
|
||||
#[derive(Debug, Copy, Clone, EnumString)]
|
||||
#[strum(serialize_all = "lowercase")]
|
||||
@@ -112,8 +114,7 @@ impl<'a> TemporaryPowerOn<'a> {
|
||||
BoxStatus::Running => unreachable!(),
|
||||
};
|
||||
|
||||
ctx.run_type()
|
||||
.execute(vagrant)
|
||||
ctx.execute(vagrant)
|
||||
.args([subcommand, &vagrant_box.name])
|
||||
.current_dir(vagrant_box.path.clone())
|
||||
.status_checked()?;
|
||||
@@ -125,7 +126,7 @@ impl<'a> TemporaryPowerOn<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Drop for TemporaryPowerOn<'a> {
|
||||
impl Drop for TemporaryPowerOn<'_> {
|
||||
fn drop(&mut self) {
|
||||
let subcommand = if self.ctx.config().vagrant_always_suspend().unwrap_or(false) {
|
||||
"suspend"
|
||||
@@ -139,7 +140,6 @@ impl<'a> Drop for TemporaryPowerOn<'a> {
|
||||
|
||||
println!();
|
||||
self.ctx
|
||||
.run_type()
|
||||
.execute(self.vagrant)
|
||||
.args([subcommand, &self.vagrant_box.name])
|
||||
.current_dir(self.vagrant_box.path.clone())
|
||||
@@ -151,14 +151,14 @@ impl<'a> Drop for TemporaryPowerOn<'a> {
|
||||
pub fn collect_boxes(ctx: &ExecutionContext) -> Result<Vec<VagrantBox>> {
|
||||
let directories = utils::require_option(
|
||||
ctx.config().vagrant_directories(),
|
||||
String::from("No Vagrant directories were specified in the configuration file"),
|
||||
String::from(t!("No Vagrant directories were specified in the configuration file")),
|
||||
)?;
|
||||
let vagrant = Vagrant {
|
||||
path: utils::require("vagrant")?,
|
||||
};
|
||||
|
||||
print_separator("Vagrant");
|
||||
println!("Collecting Vagrant boxes");
|
||||
println!("{}", t!("Collecting Vagrant boxes"));
|
||||
|
||||
let mut result = Vec::new();
|
||||
|
||||
@@ -179,25 +179,28 @@ pub fn topgrade_vagrant_box(ctx: &ExecutionContext, vagrant_box: &VagrantBox) ->
|
||||
path: utils::require("vagrant")?,
|
||||
};
|
||||
|
||||
let seperator = format!("Vagrant ({})", vagrant_box.smart_name());
|
||||
let separator = format!("Vagrant ({})", vagrant_box.smart_name());
|
||||
let mut _poweron = None;
|
||||
if !vagrant_box.initial_status.powered_on() {
|
||||
if !(ctx.config().vagrant_power_on().unwrap_or(true)) {
|
||||
return Err(SkipStep(format!("Skipping powered off box {vagrant_box}")).into());
|
||||
return Err(SkipStep(format!(
|
||||
"{}",
|
||||
t!("Skipping powered off box {vagrant_box}", vagrant_box = vagrant_box)
|
||||
))
|
||||
.into());
|
||||
} else {
|
||||
print_separator(seperator);
|
||||
print_separator(separator);
|
||||
_poweron = Some(vagrant.temporary_power_on(vagrant_box, ctx)?);
|
||||
}
|
||||
} else {
|
||||
print_separator(seperator);
|
||||
print_separator(separator);
|
||||
}
|
||||
let mut command = format!("env TOPGRADE_PREFIX={} topgrade", vagrant_box.smart_name());
|
||||
if ctx.config().yes(Step::Vagrant) {
|
||||
command.push_str(" -y");
|
||||
}
|
||||
|
||||
ctx.run_type()
|
||||
.execute(&vagrant.path)
|
||||
ctx.execute(&vagrant.path)
|
||||
.current_dir(&vagrant_box.path)
|
||||
.args(["ssh", "-c", &command])
|
||||
.status_checked()
|
||||
@@ -205,7 +208,7 @@ pub fn topgrade_vagrant_box(ctx: &ExecutionContext, vagrant_box: &VagrantBox) ->
|
||||
|
||||
pub fn upgrade_vagrant_boxes(ctx: &ExecutionContext) -> Result<()> {
|
||||
let vagrant = utils::require("vagrant")?;
|
||||
print_separator("Vagrant boxes");
|
||||
print_separator(t!("Vagrant boxes"));
|
||||
|
||||
let outdated = Command::new(&vagrant)
|
||||
.args(["box", "outdated", "--global"])
|
||||
@@ -217,7 +220,6 @@ pub fn upgrade_vagrant_boxes(ctx: &ExecutionContext) -> Result<()> {
|
||||
for ele in re.captures_iter(&outdated.stdout) {
|
||||
found = true;
|
||||
let _ = ctx
|
||||
.run_type()
|
||||
.execute(&vagrant)
|
||||
.args(["box", "update", "--box"])
|
||||
.arg(ele.get(1).unwrap().as_str())
|
||||
@@ -227,12 +229,9 @@ pub fn upgrade_vagrant_boxes(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
if !found {
|
||||
println!("No outdated boxes")
|
||||
println!("{}", t!("No outdated boxes"));
|
||||
} else {
|
||||
ctx.run_type()
|
||||
.execute(&vagrant)
|
||||
.args(["box", "prune"])
|
||||
.status_checked()?;
|
||||
ctx.execute(&vagrant).args(["box", "prune"]).status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -5,24 +5,50 @@ use std::process::Command;
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::config::TmuxConfig;
|
||||
use crate::config::TmuxSessionMode;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::HOME_DIR;
|
||||
use crate::{
|
||||
execution_context::ExecutionContext,
|
||||
utils::{which, PathExt},
|
||||
};
|
||||
use crate::{HOME_DIR, XDG_DIRS};
|
||||
|
||||
use rust_i18n::t;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::process::CommandExt as _;
|
||||
|
||||
// update_plugins path is relative to the TPM path
|
||||
const UPDATE_PLUGINS: &str = "bin/update_plugins";
|
||||
// Default TPM path relative to the TMux config directory
|
||||
const TPM_PATH: &str = "plugins/tpm";
|
||||
|
||||
pub fn run_tpm(ctx: &ExecutionContext) -> Result<()> {
|
||||
let tpm = HOME_DIR.join(".tmux/plugins/tpm/bin/update_plugins").require()?;
|
||||
let tpm = match env::var("TMUX_PLUGIN_MANAGER_PATH") {
|
||||
// Use `$TMUX_PLUGIN_MANAGER_PATH` if set,
|
||||
Ok(var) => PathBuf::from(var).join(UPDATE_PLUGINS),
|
||||
Err(_) => {
|
||||
// otherwise, use the default XDG location `~/.config/tmux`
|
||||
#[cfg(unix)]
|
||||
let xdg_path = XDG_DIRS.config_dir().join("tmux").join(TPM_PATH).join(UPDATE_PLUGINS);
|
||||
#[cfg(windows)]
|
||||
let xdg_path = HOME_DIR.join(".config/tmux").join(TPM_PATH).join(UPDATE_PLUGINS);
|
||||
if xdg_path.exists() {
|
||||
xdg_path
|
||||
} else {
|
||||
// or fallback on the standard default location `~/.tmux`.
|
||||
HOME_DIR.join(".tmux").join(TPM_PATH).join(UPDATE_PLUGINS)
|
||||
}
|
||||
}
|
||||
}
|
||||
.require()?;
|
||||
|
||||
print_separator("tmux plugins");
|
||||
|
||||
ctx.run_type().execute(tpm).arg("all").status_checked()
|
||||
ctx.execute(tpm).arg("all").status_checked()
|
||||
}
|
||||
|
||||
struct Tmux {
|
||||
@@ -118,13 +144,13 @@ impl Tmux {
|
||||
.output_checked_utf8()?
|
||||
.stdout
|
||||
.lines()
|
||||
.map(|l| l.parse())
|
||||
.map(str::parse)
|
||||
.collect::<Result<Vec<usize>, _>>()
|
||||
.context("Failed to compute tmux windows")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_in_tmux(args: Vec<String>) -> Result<()> {
|
||||
pub fn run_in_tmux(config: TmuxConfig) -> Result<()> {
|
||||
let command = {
|
||||
let mut command = vec![
|
||||
String::from("env"),
|
||||
@@ -137,39 +163,50 @@ pub fn run_in_tmux(args: Vec<String>) -> Result<()> {
|
||||
shell_words::join(command)
|
||||
};
|
||||
|
||||
let tmux = Tmux::new(args);
|
||||
let tmux = Tmux::new(config.args);
|
||||
|
||||
// Find an unused session and run `topgrade` in it with the current command's arguments.
|
||||
let session_name = "topgrade";
|
||||
let window_name = "topgrade";
|
||||
let session = tmux.new_unique_session(session_name, window_name, &command)?;
|
||||
|
||||
let is_inside_tmux = env::var("TMUX").is_ok();
|
||||
let err = match config.session_mode {
|
||||
TmuxSessionMode::AttachIfNotInSession => {
|
||||
if is_inside_tmux {
|
||||
// Only attach to the newly-created session if we're not currently in a tmux session.
|
||||
if env::var("TMUX").is_err() {
|
||||
let err = tmux.build().args(["attach-session", "-t", &session]).exec();
|
||||
Err(eyre!("{err}")).context("Failed to `execvp(3)` tmux")
|
||||
println!("{}", t!("Topgrade launched in a new tmux session"));
|
||||
return Ok(());
|
||||
} else {
|
||||
println!("Topgrade launched in a new tmux session");
|
||||
Ok(())
|
||||
tmux.build().args(["attach-session", "-t", &session]).exec()
|
||||
}
|
||||
}
|
||||
|
||||
TmuxSessionMode::AttachAlways => {
|
||||
if is_inside_tmux {
|
||||
tmux.build().args(["switch-client", "-t", &session]).exec()
|
||||
} else {
|
||||
tmux.build().args(["attach-session", "-t", &session]).exec()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Err(eyre!("{err}")).context("Failed to `execvp(3)` tmux")
|
||||
}
|
||||
|
||||
pub fn run_command(ctx: &ExecutionContext, window_name: &str, command: &str) -> Result<()> {
|
||||
let tmux = Tmux::new(ctx.config().tmux_arguments()?);
|
||||
let tmux = Tmux::new(ctx.config().tmux_config()?.args);
|
||||
|
||||
match ctx.get_tmux_session() {
|
||||
Some(session_name) => {
|
||||
if let Some(session_name) = ctx.get_tmux_session() {
|
||||
let indices = tmux.window_indices(&session_name)?;
|
||||
let last_window = indices
|
||||
.iter()
|
||||
.last()
|
||||
.ok_or_else(|| eyre!("tmux session {session_name} has no windows"))?;
|
||||
tmux.new_window(&session_name, &format!("{last_window}"), command)?;
|
||||
}
|
||||
None => {
|
||||
} else {
|
||||
let name = tmux.new_unique_session("topgrade", window_name, command)?;
|
||||
ctx.set_tmux_session(name);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use color_eyre::eyre::Result;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::config::Step;
|
||||
use crate::step::Step;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::{execution_context::ExecutionContext, utils::require};
|
||||
use std::path::Path;
|
||||
@@ -59,7 +59,7 @@ pub fn run_toolbx(ctx: &ExecutionContext) -> Result<()> {
|
||||
args.push("--yes");
|
||||
}
|
||||
|
||||
ctx.run_type().execute(&toolbx).args(&args).status_checked()?;
|
||||
ctx.execute(&toolbx).args(&args).status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user