Compare commits
190 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0854f9c559 | ||
|
|
e4a068d808 | ||
|
|
4c793b0df8 | ||
|
|
a021441135 | ||
|
|
29c555c394 | ||
|
|
c33d396489 | ||
|
|
f6d2ba4dae | ||
|
|
a88574204d | ||
|
|
9435bc4b7d | ||
|
|
27245cbd7b | ||
|
|
21751aa8a5 | ||
|
|
ad41948450 | ||
|
|
e32246f172 | ||
|
|
25d3a816b4 | ||
|
|
05b1a565e0 | ||
|
|
7b2623ea3c | ||
|
|
983c5243ba | ||
|
|
1958fe1e5b | ||
|
|
ca8558d9b4 | ||
|
|
1b534800a9 | ||
|
|
e91c00c9c0 | ||
|
|
a2375b4820 | ||
|
|
2e0c8e9e17 | ||
|
|
dc0ddcf9f0 | ||
|
|
a1f3c86a39 | ||
|
|
55f672eff7 | ||
|
|
8ece0346d8 | ||
|
|
b1fe1d201a | ||
|
|
5010abdc22 | ||
|
|
e4441d5021 | ||
|
|
5af0c6a7e5 | ||
|
|
b8da17106a | ||
|
|
fdf40dbf43 | ||
|
|
f3b6530969 | ||
|
|
cbc5fc94f9 | ||
|
|
dceb697355 | ||
|
|
07118fa0d2 | ||
|
|
16e6db0def | ||
|
|
64d8f6d632 | ||
|
|
180b5cba58 | ||
|
|
bac416e907 | ||
|
|
cb674a1572 | ||
|
|
960b14fa20 | ||
|
|
a9f57d4205 | ||
|
|
13330b6950 | ||
|
|
1ebcc9beee | ||
|
|
55e1bbf2b9 | ||
|
|
f2dfa1e475 | ||
|
|
fcd53e772a | ||
|
|
8b9d7ef8f3 | ||
|
|
d8406a8cfe | ||
|
|
4a9ef581e5 | ||
|
|
a52db1f261 | ||
|
|
8e16174ce7 | ||
|
|
c748bb5d7a | ||
|
|
3cc8f0d818 | ||
|
|
f96eeeda6b | ||
|
|
d1d8904376 | ||
|
|
3b329fe687 | ||
|
|
9eb1b4ac9f | ||
|
|
c4c0bd7383 | ||
|
|
1e9de5832d | ||
|
|
f2b17cdd9d | ||
|
|
7bfd6c2439 | ||
|
|
0e8d5f0266 | ||
|
|
32add8f046 | ||
|
|
f661f00277 | ||
|
|
2a1999fe20 | ||
|
|
4d66431aad | ||
|
|
767f0d91f4 | ||
|
|
a3428e3477 | ||
|
|
614131b7bf | ||
|
|
9b0681f3b8 | ||
|
|
ecf8fb7a47 | ||
|
|
04bfb45a97 | ||
|
|
d90ce30452 | ||
|
|
ab21600ca6 | ||
|
|
728ea26204 | ||
|
|
373cd3b3ae | ||
|
|
f4e0258b09 | ||
|
|
d50360a69a | ||
|
|
351922c81f | ||
|
|
9518f43866 | ||
|
|
2c1ce3d4e6 | ||
|
|
12116c3261 | ||
|
|
fbc84e8aa1 | ||
|
|
6dab1e4f37 | ||
|
|
650a143602 | ||
|
|
9b6027fe78 | ||
|
|
0e30e05ce8 | ||
|
|
eea952fa78 | ||
|
|
6071a1ee3b | ||
|
|
a801b7b9f4 | ||
|
|
c6e3f0ae0a | ||
|
|
a43b03d3db | ||
|
|
12b0fa57ad | ||
|
|
d9e304f0ef | ||
|
|
842b92cca7 | ||
|
|
485f0ec9c8 | ||
|
|
5e3b5fc9a7 | ||
|
|
7c63541cad | ||
|
|
238e089d74 | ||
|
|
8991bc9f62 | ||
|
|
7a3f3a8905 | ||
|
|
e4085e03eb | ||
|
|
4b0c366e5f | ||
|
|
ea97240d09 | ||
|
|
12de531abb | ||
|
|
c3876ce3bf | ||
|
|
cbbfc3a114 | ||
|
|
ad2bfc9abd | ||
|
|
528461412e | ||
|
|
64db679390 | ||
|
|
77a8b3b7d2 | ||
|
|
7007e76ab5 | ||
|
|
3c970063a9 | ||
|
|
b70830015e | ||
|
|
b43f2c8b3a | ||
|
|
c311da16f3 | ||
|
|
37608a338c | ||
|
|
b07288e674 | ||
|
|
707698faab | ||
|
|
2e70d132d0 | ||
|
|
30c5b31e21 | ||
|
|
77ff6cb714 | ||
|
|
ea13c51b7d | ||
|
|
3ed763b884 | ||
|
|
10e1e170b7 | ||
|
|
ffa62afc66 | ||
|
|
f794329913 | ||
|
|
f9a35c7661 | ||
|
|
ed496f3462 | ||
|
|
6accdae232 | ||
|
|
96efcc6c0d | ||
|
|
bf72d7bb5a | ||
|
|
dadffb1081 | ||
|
|
78dc567226 | ||
|
|
362ce4f4f9 | ||
|
|
ab35cd7b10 | ||
|
|
15f4ad7cd1 | ||
|
|
cbfb92041f | ||
|
|
a506c67cac | ||
|
|
788e0412f6 | ||
|
|
18b37ce3e3 | ||
|
|
a15e6748c7 | ||
|
|
c6d0539fd2 | ||
|
|
3eb3867944 | ||
|
|
810315b0e2 | ||
|
|
b461fc2536 | ||
|
|
7e63977ba0 | ||
|
|
78dec892cf | ||
|
|
9ea6628b5c | ||
|
|
465df2e9be | ||
|
|
61ef926849 | ||
|
|
7fa38c593e | ||
|
|
41c6d1cd9a | ||
|
|
cf3893dc49 | ||
|
|
a2fbe92a25 | ||
|
|
e1754707d8 | ||
|
|
cd380a53b3 | ||
|
|
a8c29fd1a2 | ||
|
|
6b871e7949 | ||
|
|
1b5fdb6645 | ||
|
|
fe9d877cdf | ||
|
|
60e7aa8f03 | ||
|
|
18e2d3e59c | ||
|
|
d68fcb08b2 | ||
|
|
1f6baefdc3 | ||
|
|
71efce32c1 | ||
|
|
3626c9cdc8 | ||
|
|
a23b761304 | ||
|
|
3fd27e4913 | ||
|
|
b3f152b716 | ||
|
|
df381f3a79 | ||
|
|
2dec9db310 | ||
|
|
d50dc4c9f6 | ||
|
|
ed8b563f20 | ||
|
|
2a73aa731d | ||
|
|
4dd1c13bd8 | ||
|
|
c1c9fe22df | ||
|
|
06a6b7a2eb | ||
|
|
b814dd824f | ||
|
|
ce234bdb59 | ||
|
|
13a46a44a8 | ||
|
|
dc78b00c3c | ||
|
|
48ae4bf813 | ||
|
|
a50040e2d5 | ||
|
|
2c9a56a8df | ||
|
|
021320b292 | ||
|
|
9d3662c3ea |
14
.github/ISSUE_TEMPLATE/bug_report.md
vendored
14
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,7 +2,7 @@
|
||||
name: Bug report
|
||||
about: Topgrade is misbehaving
|
||||
title: ''
|
||||
labels: 'bug'
|
||||
labels: 'C-bug'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
@@ -46,6 +46,18 @@ If you know the possible cause of the issue, please tell us.
|
||||
Execute the erroneous command directly to see if the problem persists
|
||||
-->
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
|
||||
## Did you run topgrade through `Remote Execution`
|
||||
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
|
||||
If yes, does the issue still occur when you run topgrade directlly in your
|
||||
remote host
|
||||
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
|
||||
## Configuration file (Optional)
|
||||
<!--
|
||||
|
||||
14
.github/ISSUE_TEMPLATE/feature_request.md
vendored
14
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -2,16 +2,20 @@
|
||||
name: Feature request
|
||||
about: Can you please support...?
|
||||
title: ''
|
||||
labels: ''
|
||||
labels: 'C-feature request'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## I want to suggest a new step
|
||||
### Which tool is this about? Where is its repository?
|
||||
### Which operating systems are supported by this tool?
|
||||
### What should Topgrade do to figure out if the tool needs to be invoked?
|
||||
### Which exact commands should Topgrade run?
|
||||
|
||||
* Which tool is this about? Where is its repository?
|
||||
* Which operating systems are supported by this tool?
|
||||
* What should Topgrade do to figure out if the tool needs to be invoked?
|
||||
* Which exact commands should Topgrade run?
|
||||
* Does it have a `--dry-run` option? i.e., print what should be done and exit
|
||||
* Does it need the user to confirm the execution? And does it provide a `--yes`
|
||||
option to skip this step?
|
||||
|
||||
## I want to suggest some general feature
|
||||
Topgrade should...
|
||||
|
||||
17
.github/PULL_REQUEST_TEMPLATE.md
vendored
17
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,13 +1,18 @@
|
||||
## Standards checklist:
|
||||
## What does this PR do
|
||||
|
||||
|
||||
## Standards checklist
|
||||
|
||||
- [ ] The PR title is descriptive.
|
||||
- [ ] I have read `CONTRIBUTING.md`
|
||||
- [ ] The code compiles (`cargo build`)
|
||||
- [ ] The code passes rustfmt (`cargo fmt`)
|
||||
- [ ] The code passes clippy (`cargo clippy`)
|
||||
- [ ] The code passes tests (`cargo test`)
|
||||
- [ ] *Optional:* I have tested the code myself
|
||||
- [ ] I also tested that Topgrade skips the step where needed
|
||||
|
||||
## For new steps
|
||||
|
||||
- [ ] *Optional:* Topgrade skips this step where needed
|
||||
- [ ] *Optional:* The `--dry-run` option works with this step
|
||||
- [ ] *Optional:* The `--yes` option works with this step if it is supported by
|
||||
the underlying command
|
||||
|
||||
If you developed a feature or a bug fix for someone else and you do not have the
|
||||
means to test it, please tag this person here.
|
||||
|
||||
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
# Set update schedule for GitHub Actions
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
# Check for updates to GitHub Actions every week
|
||||
interval: "weekly"
|
||||
@@ -1,4 +1,4 @@
|
||||
name: Test Configuration File Creation
|
||||
name: Check config file creation if not exists
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -12,10 +12,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
CONFIG_PATH=~/.config/topgrade.toml;
|
||||
if [ -f "$CONFIG_PATH" ]; then rm $CONFIG_PATH; fi
|
||||
cargo build;
|
||||
./target/debug/topgrade --dry-run --only system;
|
||||
TOPGRADE_SKIP_BRKC_NOTIFY=true ./target/debug/topgrade --dry-run --only system;
|
||||
stat $CONFIG_PATH;
|
||||
22
.github/workflows/check_i18n.yml
vendored
Normal file
22
.github/workflows/check_i18n.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
name: Check i18n
|
||||
|
||||
jobs:
|
||||
check_locale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install checker
|
||||
# Build it with the dev profile as this is faster and the checker still works
|
||||
run: |
|
||||
cargo install --git https://github.com/topgrade-rs/topgrade_i18n_locale_checker --profile dev
|
||||
|
||||
- name: Run the checker
|
||||
run: topgrade_i18n_locale_checker --locale-file ./locales/app.yml --rust-src-to-check ./src
|
||||
32
.github/workflows/check_security_vulnerability.yml
vendored
Normal file
32
.github/workflows/check_security_vulnerability.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
# separate terms of service, privacy policy, and support
|
||||
# documentation.
|
||||
|
||||
name: Check Security Vulnerability
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: DevSkim
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run DevSkim scanner
|
||||
uses: microsoft/DevSkim-Action@v1
|
||||
|
||||
- name: Upload DevSkim scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: devskim-results.sarif
|
||||
@@ -8,7 +8,7 @@ jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2022-08-03
|
||||
@@ -7,23 +7,16 @@ on:
|
||||
name: CI
|
||||
|
||||
env:
|
||||
RUST_VER: '1.71.0'
|
||||
CROSS_VER: '0.2.5'
|
||||
CARGO_NET_RETRY: 3
|
||||
|
||||
jobs:
|
||||
fmt:
|
||||
name: Rustfmt
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: '${{ env.RUST_VER }}'
|
||||
components: rustfmt
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run cargo fmt
|
||||
env:
|
||||
@@ -42,38 +35,36 @@ jobs:
|
||||
- target: x86_64-linux-android
|
||||
target_name: Android
|
||||
use_cross: true
|
||||
os: ubuntu-20.04
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-unknown-freebsd
|
||||
target_name: FreeBSD
|
||||
use_cross: true
|
||||
os: ubuntu-20.04
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
target_name: Linux
|
||||
os: ubuntu-20.04
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-apple-darwin
|
||||
target_name: macOS
|
||||
os: macos-11
|
||||
target_name: macOS-x86_64
|
||||
os: macos-13
|
||||
|
||||
- target: aarch64-apple-darwin
|
||||
target_name: macOS-aarch64
|
||||
os: macos-latest
|
||||
|
||||
- target: x86_64-unknown-netbsd
|
||||
target_name: NetBSD
|
||||
use_cross: true
|
||||
os: ubuntu-20.04
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-pc-windows-msvc
|
||||
target_name: Windows
|
||||
os: windows-2019
|
||||
os: windows-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: '${{ env.RUST_VER }}'
|
||||
components: clippy
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Rust Cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
@@ -84,8 +75,13 @@ jobs:
|
||||
if: matrix.use_cross == true
|
||||
run: curl -fL --retry 3 https://github.com/cross-rs/cross/releases/download/v${{ env.CROSS_VER }}/cross-x86_64-unknown-linux-musl.tar.gz | tar vxz -C /usr/local/bin
|
||||
|
||||
- name: Run cargo check
|
||||
- name: Run cargo/cross check
|
||||
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} check --locked --target ${{ matrix.target }}
|
||||
|
||||
- name: Run cargo clippy
|
||||
- name: Run cargo/cross clippy
|
||||
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} clippy --locked --target ${{ matrix.target }} --all-features -- -D warnings
|
||||
|
||||
- name: Run cargo test
|
||||
# ONLY run test with cargo
|
||||
if: matrix.use_cross == false
|
||||
run: cargo test --locked --target ${{ matrix.target }}
|
||||
59
.github/workflows/code-coverage.yml
vendored
59
.github/workflows/code-coverage.yml
vendored
@@ -1,59 +0,0 @@
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
name: Test with Code Coverage
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test
|
||||
env:
|
||||
PROJECT_NAME_UNDERSCORE: topgrade
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUSTFLAGS: -Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort
|
||||
RUSTDOCFLAGS: -Cpanic=abort
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@v2
|
||||
env:
|
||||
cache-name: cache-dependencies
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/.crates.toml
|
||||
~/.cargo/.crates2.json
|
||||
~/.cargo/bin
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
target
|
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('Cargo.lock') }}
|
||||
- name: Generate test result and coverage report
|
||||
run: |
|
||||
cargo install cargo2junit grcov;
|
||||
cargo test $CARGO_OPTIONS -- -Z unstable-options --format json | cargo2junit > results.xml;
|
||||
zip -0 ccov.zip `find . \( -name "$PROJECT_NAME_UNDERSCORE*.gc*" \) -print`;
|
||||
grcov ccov.zip -s . -t lcov --llvm --ignore-not-existing --ignore "/*" --ignore "tests/*" -o lcov.info;
|
||||
- name: Upload test results
|
||||
uses: EnricoMi/publish-unit-test-result-action@v1
|
||||
with:
|
||||
check_name: Test Results
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: results.xml
|
||||
- name: Upload to CodeCov
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
# required for private repositories:
|
||||
# token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./lcov.info
|
||||
fail_ci_if_error: true
|
||||
88
.github/workflows/create_release_assets.yml
vendored
Normal file
88
.github/workflows/create_release_assets.yml
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
name: Publish release files for CD native environments
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [ ubuntu-latest, macos-latest, macos-13, windows-latest ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
if: ${{ matrix.platform == 'ubuntu-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Check format
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
- name: Run clippy
|
||||
run: cargo clippy --all-targets --locked -- -D warnings
|
||||
|
||||
- name: Run clippy (All features)
|
||||
run: cargo clippy --all-targets --locked --all-features -- -D warnings
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test
|
||||
|
||||
- name: Build in Release profile with all features enabled
|
||||
run: cargo build --release --all-features
|
||||
|
||||
- name: Rename Release (Unix)
|
||||
run: |
|
||||
cargo install default-target
|
||||
mkdir -p assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
||||
mv target/release/topgrade assets
|
||||
cd assets
|
||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||
rm topgrade
|
||||
ls .
|
||||
if: ${{ matrix.platform != 'windows-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Build Debian-based system binary and create package
|
||||
# First remove the binary built by previous steps
|
||||
# because we don't want the auto-update feature,
|
||||
# then build the new binary without auto-updating.
|
||||
run: |
|
||||
rm -rf target/release
|
||||
cargo build --release
|
||||
cargo deb --no-build --no-strip
|
||||
if: ${{ matrix.platform == 'ubuntu-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Move Debian-based system package
|
||||
run: |
|
||||
mkdir -p assets
|
||||
mv target/debian/*.deb assets
|
||||
if: ${{ matrix.platform == 'ubuntu-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Rename Release (Windows)
|
||||
run: |
|
||||
cargo install default-target
|
||||
mkdir assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
||||
mv target/release/topgrade.exe assets/topgrade.exe
|
||||
cd assets
|
||||
powershell Compress-Archive -Path * -Destination ${FILENAME}.zip
|
||||
rm topgrade.exe
|
||||
ls .
|
||||
if: ${{ matrix.platform == 'windows-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: assets/*
|
||||
91
.github/workflows/create_release_assets_cross.yml
vendored
Normal file
91
.github/workflows/create_release_assets_cross.yml
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
name: Publish release files for non-cd-native environments
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target: [
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"x86_64-unknown-freebsd",
|
||||
]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cargo-deb cross compilation dependencies
|
||||
run: sudo apt-get install libc6-arm64-cross libgcc-s1-arm64-cross
|
||||
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
|
||||
- name: install targets
|
||||
run: rustup target add ${{ matrix.target }}
|
||||
|
||||
- name: install cross
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cross@0.2.5
|
||||
|
||||
- name: Check format
|
||||
run: cross fmt --all -- --check
|
||||
|
||||
- name: Run clippy
|
||||
run: cross clippy --all-targets --locked --target ${{matrix.target}} -- -D warnings
|
||||
|
||||
- name: Run clippy (All features)
|
||||
run: cross clippy --locked --all-features --target ${{matrix.target}} -- -D warnings
|
||||
|
||||
- name: Run tests
|
||||
run: cross test --target ${{matrix.target}}
|
||||
|
||||
- name: Build in Release profile with all features enabled
|
||||
run: cross build --release --all-features --target ${{matrix.target}}
|
||||
|
||||
- name: Rename Release
|
||||
run: |
|
||||
mkdir -p assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-${{matrix.target}}
|
||||
mv target/${{matrix.target}}/release/topgrade assets
|
||||
cd assets
|
||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||
rm topgrade
|
||||
ls .
|
||||
|
||||
- name: Build Debian-based system package without autoupdate feature
|
||||
# First remove the binary built by previous steps
|
||||
# because we don't want the auto-update feature,
|
||||
# then build the new binary without auto-updating.
|
||||
run: |
|
||||
rm -rf target/${{matrix.target}}
|
||||
cross build --release --target ${{matrix.target}}
|
||||
cargo deb --target=${{matrix.target}} --no-build --no-strip
|
||||
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
|
||||
- name: Move Debian-based system package
|
||||
run: |
|
||||
mkdir -p assets
|
||||
mv target/${{matrix.target}}/debian/*.deb assets
|
||||
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: assets/*
|
||||
70
.github/workflows/release-cross.yml
vendored
70
.github/workflows/release-cross.yml
vendored
@@ -1,70 +0,0 @@
|
||||
name: Publish release files for non-cd-native environments
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target: [ "aarch64-unknown-linux-gnu", "armv7-unknown-linux-gnueabihf", "x86_64-unknown-linux-musl", "aarch64-unknown-linux-musl", "x86_64-unknown-freebsd", ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
default: true
|
||||
override: true
|
||||
target: ${{ matrix.target }}
|
||||
components: rustfmt, clippy
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Check format
|
||||
with:
|
||||
use-cross: true
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run clippy
|
||||
with:
|
||||
command: clippy
|
||||
use-cross: true
|
||||
args: --all-targets --locked --target ${{matrix.target}} -- -D warnings
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run clippy (All features)
|
||||
with:
|
||||
command: clippy
|
||||
use-cross: true
|
||||
args: --locked --all-features --target ${{matrix.target}} -- -D warnings
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run tests
|
||||
with:
|
||||
command: test
|
||||
use-cross: true
|
||||
args: --target ${{matrix.target}}
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Build
|
||||
with:
|
||||
command: build
|
||||
use-cross: true
|
||||
args: --release --all-features --target ${{matrix.target}}
|
||||
- name: Rename Release
|
||||
run: |
|
||||
mkdir assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-${{matrix.target}}
|
||||
mv target/${{matrix.target}}/release/topgrade assets
|
||||
cd assets
|
||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||
rm topgrade
|
||||
ls .
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: assets/*
|
||||
77
.github/workflows/release.yml
vendored
77
.github/workflows/release.yml
vendored
@@ -1,77 +0,0 @@
|
||||
name: Publish release files for CD native environments
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [ ubuntu-latest, macos-latest, windows-latest ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Check format
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run clippy
|
||||
with:
|
||||
command: clippy
|
||||
args: --all-targets --locked -- -D warnings
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run clippy (All features)
|
||||
with:
|
||||
command: clippy
|
||||
args: --all-targets --locked --all-features -- -D warnings
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run tests
|
||||
with:
|
||||
command: test
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Build
|
||||
with:
|
||||
command: build
|
||||
args: --release --all-features
|
||||
- name: Rename Release (Unix)
|
||||
run: |
|
||||
cargo install default-target
|
||||
mkdir assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
||||
mv target/release/topgrade assets
|
||||
cd assets
|
||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||
rm topgrade
|
||||
ls .
|
||||
if: ${{ matrix.platform != 'windows-latest' }}
|
||||
shell: bash
|
||||
- name: Rename Release (Windows)
|
||||
run: |
|
||||
cargo install default-target
|
||||
mkdir assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
||||
mv target/release/topgrade.exe assets/topgrade.exe
|
||||
cd assets
|
||||
powershell Compress-Archive -Path * -Destination ${FILENAME}.zip
|
||||
rm topgrade.exe
|
||||
ls .
|
||||
if: ${{ matrix.platform == 'windows-latest' }}
|
||||
shell: bash
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: assets/*
|
||||
@@ -14,8 +14,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Publish AUR package
|
||||
uses: ATiltedTree/create-aur-release@v1
|
||||
uses: aksh1618/update-aur-package@v1.0.5
|
||||
with:
|
||||
tag_version_prefix: v
|
||||
package_name: topgrade
|
||||
commit_username: "Thomas Schönauer"
|
||||
commit_email: t.schoenauer@hgs-wt.at
|
||||
@@ -12,7 +12,7 @@ jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
@@ -21,7 +21,7 @@ jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: katyo/publish-crates@v1
|
||||
- uses: katyo/publish-crates@v2
|
||||
with:
|
||||
dry-run: true
|
||||
check-repo: ${{ github.event_name == 'push' }}
|
||||
@@ -19,7 +19,7 @@ jobs:
|
||||
uses: Homebrew/actions/setup-homebrew@master
|
||||
- name: Cache Homebrew Bundler RubyGems
|
||||
id: cache
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ steps.set-up-homebrew.outputs.gems-path }}
|
||||
key: ${{ runner.os }}-rubygems-${{ steps.set-up-homebrew.outputs.gems-hash }}
|
||||
@@ -29,7 +29,8 @@ jobs:
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
run: brew install-bundler-gems
|
||||
- name: Bump formulae
|
||||
uses: Homebrew/actions/bump-formulae@master
|
||||
uses: Homebrew/actions/bump-packages@master
|
||||
continue-on-error: true
|
||||
with:
|
||||
# Custom GitHub access token with only the 'public_repo' scope enabled
|
||||
token: ${{secrets.HOMEBREW_ACCESS_TOKEN}}
|
||||
@@ -14,7 +14,7 @@ jobs:
|
||||
matrix:
|
||||
target: [x86_64, x86, aarch64]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
sccache: 'true'
|
||||
manylinux: auto
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
@@ -34,7 +34,7 @@ jobs:
|
||||
matrix:
|
||||
target: [x64, x86]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
args: --release --out dist
|
||||
sccache: 'true'
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
matrix:
|
||||
target: [x86_64, aarch64]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
args: --release --out dist
|
||||
sccache: 'true'
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
@@ -69,14 +69,14 @@ jobs:
|
||||
sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build sdist
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: Upload sdist
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
if: "startsWith(github.ref, 'refs/tags/')"
|
||||
needs: [linux, windows, macos, sdist]
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: wheels
|
||||
- name: Publish to PyPI
|
||||
13
.github/workflows/release_to_winget.yml
vendored
Normal file
13
.github/workflows/release_to_winget.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: Publish to WinGet
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: vedantmgoyal2009/winget-releaser@main
|
||||
with:
|
||||
identifier: topgrade-rs.topgrade
|
||||
max-versions-to-keep: 5 # keep only latest 5 versions
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
18
.gitignore
vendored
18
.gitignore
vendored
@@ -1,4 +1,20 @@
|
||||
# JetBrains IDEs
|
||||
.idea/
|
||||
|
||||
/target
|
||||
# Visual Studio
|
||||
.vs/
|
||||
|
||||
# Visual Studio Code
|
||||
.vscode/
|
||||
|
||||
# Generic build outputs
|
||||
/build
|
||||
|
||||
# Specific for some languages like Rust
|
||||
/target
|
||||
|
||||
# LLVM profiling output
|
||||
*.profraw
|
||||
|
||||
# Backup files for any .rs files in the project
|
||||
**/*.rs.bk
|
||||
|
||||
38
.vscode/launch.json
vendored
38
.vscode/launch.json
vendored
@@ -1,38 +0,0 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Topgrade",
|
||||
"console": "integratedTerminal",
|
||||
"cargo": {
|
||||
"args": [
|
||||
"build",
|
||||
"--bin=topgrade-rs",
|
||||
"--package=topgrade-rs"
|
||||
],
|
||||
"filter": {
|
||||
"name": "topgrade-rs",
|
||||
"kind": "bin"
|
||||
}
|
||||
},
|
||||
"args": [
|
||||
"--only",
|
||||
"${input:step}",
|
||||
"-v"
|
||||
],
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
],
|
||||
"inputs": [
|
||||
{
|
||||
"type": "promptString",
|
||||
"id": "step",
|
||||
"description": "step name",
|
||||
}
|
||||
]
|
||||
}
|
||||
14
.vscode/tasks.json
vendored
14
.vscode/tasks.json
vendored
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"type": "cargo",
|
||||
"command": "clippy",
|
||||
"problemMatcher": [
|
||||
"$rustc"
|
||||
],
|
||||
"group": "test",
|
||||
"label": "rust: cargo clippy"
|
||||
}
|
||||
]
|
||||
}
|
||||
50
.vscode/topgrade.code-snippets
vendored
50
.vscode/topgrade.code-snippets
vendored
@@ -1,50 +0,0 @@
|
||||
{
|
||||
// Place your topgrade workspace snippets here. Each snippet is defined under a snippet name and has a scope, prefix, body and
|
||||
// description. Add comma separated ids of the languages where the snippet is applicable in the scope field. If scope
|
||||
// is left empty or omitted, the snippet gets applied to all languages. The prefix is what is
|
||||
// used to trigger the snippet and the body will be expanded and inserted. Possible variables are:
|
||||
// $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders.
|
||||
// Placeholders with the same ids are connected.
|
||||
// Example:
|
||||
// "Print to console": {
|
||||
// "scope": "javascript,typescript",
|
||||
// "prefix": "log",
|
||||
// "body": [
|
||||
// "console.log('$1');",
|
||||
// "$2"
|
||||
// ],
|
||||
// "description": "Log output to console"
|
||||
// }
|
||||
"Skip Step": {
|
||||
"scope": "rust",
|
||||
"prefix": "skipstep",
|
||||
"body": [
|
||||
"return Err(SkipStep(format!(\"$1\")).into());"
|
||||
]
|
||||
},
|
||||
"Step": {
|
||||
"scope": "rust",
|
||||
"prefix": "step",
|
||||
"body": [
|
||||
"pub fn $1(ctx: &ExecutionContext) -> Result<()> {",
|
||||
" $0",
|
||||
" Ok(())",
|
||||
"}"
|
||||
]
|
||||
},
|
||||
"Require Binary": {
|
||||
"scope": "rust",
|
||||
"prefix": "req",
|
||||
"description": "Require a binary to be installed",
|
||||
"body": [
|
||||
"let ${1:binary} = require(\"${1:binary}\")?;"
|
||||
]
|
||||
},
|
||||
"macos": {
|
||||
"scope": "rust",
|
||||
"prefix": "macos",
|
||||
"body": [
|
||||
"#[cfg(target_os = \"macos\")]"
|
||||
]
|
||||
}
|
||||
}
|
||||
6
BREAKINGCHANGES.md
Normal file
6
BREAKINGCHANGES.md
Normal file
@@ -0,0 +1,6 @@
|
||||
# Containers step
|
||||
|
||||
* New default behavior: In the previous versions, if you have both Docker and
|
||||
Podman installed, Podman will be used by Topgrade. Now the default option
|
||||
has been changed to Docker. This can be overridden by setting the
|
||||
`containers.runtime` option in the configuration TOML to "podman".
|
||||
0
BREAKINGCHANGES_dev.md
Normal file
0
BREAKINGCHANGES_dev.md
Normal file
@@ -1,6 +1,6 @@
|
||||
## Contributing to `topgrade`
|
||||
|
||||
Thank you for your interest in contributing to `topgrade`!
|
||||
Thank you for your interest in contributing to `topgrade`!
|
||||
We welcome and encourage contributions of all kinds, such as:
|
||||
|
||||
1. Issue reports or feature requests
|
||||
@@ -13,9 +13,9 @@ for commit messages.
|
||||
## Adding a new `step`
|
||||
|
||||
In `topgrade`'s term, package manager is called `step`.
|
||||
To add a new `step` to `topgrade`:
|
||||
To add a new `step` to `topgrade`:
|
||||
|
||||
1. Add a new variant to
|
||||
1. Add a new variant to
|
||||
[`enum Step`](https://github.com/topgrade-rs/topgrade/blob/cb7adc8ced8a77addf2cb051d18bba9f202ab866/src/config.rs#L100)
|
||||
|
||||
```rust
|
||||
@@ -48,16 +48,16 @@ To add a new `step` to `topgrade`:
|
||||
|
||||
// Invoke the new step to get things updated!
|
||||
ctx.run_type()
|
||||
.execute("xxx")
|
||||
.execute(xxx)
|
||||
.arg(/* args required by this step */)
|
||||
.status_checked()
|
||||
}
|
||||
```
|
||||
|
||||
Such a update function would be conventionally named `run_xxx()`, where `xxx`
|
||||
is the name of the new step, and it should take a argument of type
|
||||
is the name of the new step, and it should take a argument of type
|
||||
`&ExecutionContext`, this is adequate for most cases unless some extra stuff is
|
||||
needed (You can find some examples where extra arguments are needed
|
||||
needed (You can find some examples where extra arguments are needed
|
||||
[here](https://github.com/topgrade-rs/topgrade/blob/7e48c5dedcfd5d0124bb9f39079a03e27ed23886/src/main.rs#L201-L219)).
|
||||
|
||||
Update function would usually do 3 things:
|
||||
@@ -90,8 +90,8 @@ To add a new `step` to `topgrade`:
|
||||
|
||||
## Modification to the configuration entries
|
||||
|
||||
If your PR has the configuration options
|
||||
(in [`src/config.rs`](https://github.com/topgrade-rs/topgrade/blob/master/src/config.rs))
|
||||
If your PR has the configuration options
|
||||
(in [`src/config.rs`](https://github.com/topgrade-rs/topgrade/blob/master/src/config.rs))
|
||||
modified:
|
||||
|
||||
1. Adding new options
|
||||
@@ -101,6 +101,21 @@ Be sure to apply your changes to
|
||||
[`config.example.toml`](https://github.com/topgrade-rs/topgrade/blob/master/config.example.toml),
|
||||
and have some basic documentations guiding user how to use these options.
|
||||
|
||||
## Breaking changes
|
||||
|
||||
If your PR introduces a breaking change, document it in [`BREAKINGCHANGES_dev.md`][bc_dev],
|
||||
it should be written in Markdown and wrapped at 80, for example:
|
||||
|
||||
```md
|
||||
1. The configuration location has been updated to x.
|
||||
|
||||
2. The step x has been removed.
|
||||
|
||||
3. ...
|
||||
```
|
||||
|
||||
[bc_dev]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES_dev.md
|
||||
|
||||
## Before you submit your PR
|
||||
|
||||
Make sure your patch passes the following tests on your host:
|
||||
@@ -133,5 +148,5 @@ Don't worry about other platforms, we have most of them covered in our CI.
|
||||
```
|
||||
|
||||
If `xxx` respects locale, then the above code should work on English system,
|
||||
on a system that does not use English, e.g., it uses Chinese, that `"help"` may be
|
||||
on a system that does not use English, e.g., it uses Chinese, that `"help"` may be
|
||||
translated to `"帮助"`, and the above code won't work.
|
||||
|
||||
2439
Cargo.lock
generated
2439
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
55
Cargo.toml
55
Cargo.toml
@@ -5,9 +5,10 @@ categories = ["os"]
|
||||
keywords = ["upgrade", "update"]
|
||||
license = "GPL-3.0"
|
||||
repository = "https://github.com/topgrade-rs/topgrade"
|
||||
version = "12.0.2"
|
||||
rust-version = "1.76.0"
|
||||
version = "16.0.0"
|
||||
authors = ["Roey Darwish Dror <roey.ghost@gmail.com>", "Thomas Schönauer <t.schoenauer@hgs-wt.at>"]
|
||||
exclude = ["doc/screenshot.gif"]
|
||||
exclude = ["doc/screenshot.gif", "BREAKINGCHANGES_dev.md"]
|
||||
edition = "2021"
|
||||
|
||||
readme = "README.md"
|
||||
@@ -22,26 +23,26 @@ path = "src/main.rs"
|
||||
[dependencies]
|
||||
home = "~0.5"
|
||||
etcetera = "~0.8"
|
||||
once_cell = "~1.17"
|
||||
once_cell = "~1.19"
|
||||
serde = { version = "~1.0", features = ["derive"] }
|
||||
toml = "0.5"
|
||||
which_crate = { version = "~4.1", package = "which" }
|
||||
shellexpand = "~2.1"
|
||||
clap = { version = "~3.1", features = ["cargo", "derive"] }
|
||||
clap_complete = "~3.1"
|
||||
clap_mangen = "~0.1"
|
||||
walkdir = "~2.3"
|
||||
toml = "0.8"
|
||||
which_crate = { version = "~6.0", package = "which" }
|
||||
shellexpand = "~3.1"
|
||||
clap = { version = "~4.5", features = ["cargo", "derive"] }
|
||||
clap_complete = "~4.5"
|
||||
clap_mangen = "~0.2"
|
||||
walkdir = "~2.5"
|
||||
console = "~0.15"
|
||||
lazy_static = "~1.4"
|
||||
chrono = "~0.4"
|
||||
glob = "~0.3"
|
||||
strum = { version = "~0.24", features = ["derive"] }
|
||||
strum = { version = "~0.26", features = ["derive"] }
|
||||
thiserror = "~1.0"
|
||||
tempfile = "~3.6"
|
||||
tempfile = "~3.10"
|
||||
cfg-if = "~1.0"
|
||||
tokio = { version = "~1.18", features = ["process", "rt-multi-thread"] }
|
||||
tokio = { version = "~1.38", features = ["process", "rt-multi-thread"] }
|
||||
futures = "~0.3"
|
||||
regex = "~1.7"
|
||||
regex = "~1.10"
|
||||
semver = "~1.0"
|
||||
shell-words = "~1.1"
|
||||
color-eyre = "~0.6"
|
||||
@@ -49,25 +50,35 @@ tracing = { version = "~0.1", features = ["attributes", "log"] }
|
||||
tracing-subscriber = { version = "~0.3", features = ["env-filter", "time"] }
|
||||
merge = "~0.1"
|
||||
regex-split = "~0.1"
|
||||
notify-rust = "~4.8"
|
||||
notify-rust = "~4.11"
|
||||
wildmatch = "2.3.0"
|
||||
rust-i18n = "3.0.1"
|
||||
sys-locale = "0.3.1"
|
||||
|
||||
[package.metadata.generate-rpm]
|
||||
assets = [{source = "target/release/topgrade", dest="/usr/bin/topgrade"}]
|
||||
assets = [{ source = "target/release/topgrade", dest = "/usr/bin/topgrade" }]
|
||||
|
||||
[package.metadata.generate-rpm.requires]
|
||||
git = "*"
|
||||
|
||||
[package.metadata.deb]
|
||||
depends = "$auto,git"
|
||||
name = "topgrade"
|
||||
maintainer = "Chris Gelatt <kreeblah@gmail.com>"
|
||||
copyright = "2024, Topgrade Team"
|
||||
license-file = ["LICENSE", "0"]
|
||||
depends = "$auto"
|
||||
extended-description = "Keeping your system up to date usually involves invoking multiple package managers. This results in big, non-portable shell one-liners saved in your shell. To remedy this, Topgrade detects which tools you use and runs the appropriate commands to update them."
|
||||
section = "utils"
|
||||
priority = "optional"
|
||||
default-features = true
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
libc = "~0.2"
|
||||
nix = "~0.24"
|
||||
rust-ini = "~0.19"
|
||||
self_update_crate = { version = "~0.30", default-features = false, optional = true, package = "self_update", features = ["archive-tar", "compression-flate2", "rustls"] }
|
||||
nix = { version = "~0.29", features = ["hostname", "signal", "user"] }
|
||||
rust-ini = "~0.21"
|
||||
self_update_crate = { version = "~0.40", default-features = false, optional = true, package = "self_update", features = ["archive-tar", "compression-flate2", "rustls"] }
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
self_update_crate = { version = "~0.30", default-features = false, optional = true, package = "self_update", features = ["archive-zip", "compression-zip-deflate", "rustls"] }
|
||||
self_update_crate = { version = "~0.40", default-features = false, optional = true, package = "self_update", features = ["archive-zip", "compression-zip-deflate", "rustls"] }
|
||||
winapi = "~0.3"
|
||||
parselnk = "~0.1"
|
||||
|
||||
|
||||
28
README.md
28
README.md
@@ -8,13 +8,9 @@
|
||||
<a href="https://aur.archlinux.org/packages/topgrade"><img alt="AUR" src="https://img.shields.io/aur/version/topgrade.svg"></a>
|
||||
<a href="https://formulae.brew.sh/formula/topgrade"><img alt="Homebrew" src="https://img.shields.io/homebrew/v/topgrade.svg"></a>
|
||||
|
||||
<img alt="Demo" src="doc/screenshot.gif" width="550px">
|
||||
<img alt="Demo" src="doc/topgrade_demo.gif">
|
||||
</div>
|
||||
|
||||
## Maintainers Wanted
|
||||
|
||||
I currently have not enough time to maintain this project on the level required and which the project deserves. For this reason I'm asking the community to help supporting the project, to help and work on resolving issues and create new features. Thanks for all your help.
|
||||
|
||||
|
||||
## Introduction
|
||||
|
||||
@@ -33,28 +29,32 @@ To remedy this, **Topgrade** detects which tools you use and runs the appropriat
|
||||
- NixOS: [Nixpkgs](https://search.nixos.org/packages?show=topgrade)
|
||||
- Void Linux: [XBPS](https://voidlinux.org/packages/?arch=x86_64&q=topgrade)
|
||||
- macOS: [Homebrew](https://formulae.brew.sh/formula/topgrade) or [MacPorts](https://ports.macports.org/port/topgrade/)
|
||||
- Windows: [Scoop](https://github.com/ScoopInstaller/Main/blob/master/bucket/topgrade.json)
|
||||
- Windows: [Chocolatey][choco], [Scoop][scoop] or [Winget][winget]
|
||||
- PyPi: [pip](https://pypi.org/project/topgrade/)
|
||||
|
||||
[choco]: https://community.chocolatey.org/packages/topgrade
|
||||
[scoop]: https://scoop.sh/#/apps?q=topgrade
|
||||
[winget]: https://winstall.app/apps/topgrade-rs.topgrade
|
||||
|
||||
Other systems users can either use `cargo install` or the compiled binaries from the release page.
|
||||
The compiled binaries contain a self-upgrading feature.
|
||||
|
||||
> Currently, Topgrade requires Rust 1.65 or above. In general, Topgrade tracks
|
||||
> the latest stable toolchain.
|
||||
|
||||
## Usage
|
||||
|
||||
Just run `topgrade`.
|
||||
|
||||
Visit the documentation at [topgrade-rs.github.io](https://topgrade-rs.github.io/) for more information.
|
||||
|
||||
> **Warning**
|
||||
> Work in Progress
|
||||
|
||||
## Configuration
|
||||
|
||||
See `config.example.toml` for an example configuration file.
|
||||
|
||||
## Migration and Breaking Changes
|
||||
|
||||
Whenever there is a **breaking change**, the major version number will be bumped,
|
||||
and we will document these changes in the release note, please take a look at
|
||||
it when updated to a major release.
|
||||
|
||||
> Got a question? Feel free to open an issue or discussion!
|
||||
|
||||
### Configuration Path
|
||||
|
||||
#### `CONFIG_DIR` on each platform
|
||||
|
||||
65
RELEASE_PROCEDURE.md
Normal file
65
RELEASE_PROCEDURE.md
Normal file
@@ -0,0 +1,65 @@
|
||||
> This document lists the steps that lead to a successful release of Topgrade.
|
||||
|
||||
1. Open a PR that:
|
||||
|
||||
> Here is an [Example PR](https://github.com/topgrade-rs/topgrade/pull/652)
|
||||
> that you can refer to.
|
||||
|
||||
1. bumps the version number.
|
||||
|
||||
> If there are breaking changes, the major version number should be increased.
|
||||
|
||||
2. Overwrite [`BREAKINGCHANGES`][breaking_changes] with
|
||||
[`BREAKINGCHANGES_dev`][breaking_changes_dev], and create a new dev file:
|
||||
|
||||
```sh'
|
||||
$ cd topgrade
|
||||
$ mv BREAKINGCHANGES_dev.md BREAKINGCHANGES.md
|
||||
$ touch BREAKINGCHANGES_dev.md
|
||||
```
|
||||
|
||||
[breaking_changes_dev]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES_dev.md
|
||||
[breaking_changes]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES.md
|
||||
|
||||
2. Check and merge that PR.
|
||||
|
||||
3. Go to the [release](https://github.com/topgrade-rs/topgrade/releases) page
|
||||
and click the [Draft a new release button](https://github.com/topgrade-rs/topgrade/releases/new)
|
||||
|
||||
4. Write the release notes
|
||||
|
||||
We usually use GitHub's [Automatically generated release notes][auto_gen_release_notes]
|
||||
functionality to generate release notes, but you write your own one instead.
|
||||
|
||||
[auto_gen_release_notes]: https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes
|
||||
|
||||
5. Attaching binaries
|
||||
|
||||
You don't need to do this as our CI will automatically do it for you,
|
||||
binaries for Linux, macOS and Windows will be created and attached.
|
||||
|
||||
And the CI will publish the new binary to:
|
||||
|
||||
1. AUR
|
||||
2. PyPi
|
||||
3. Homebrew (seems that this is not working correctly)
|
||||
4. Winget
|
||||
|
||||
6. Manually release it to Crates.io
|
||||
|
||||
> Yeah, this is unfortunate, our CI won't do this for us. We should probably add one.
|
||||
|
||||
1. `cd` to the Topgrade directory, make sure that it is the latest version
|
||||
(i.e., including the PR that bumps the version number).
|
||||
2. Set up your token with `cargo login`.
|
||||
3. Dry-run the publish `cargo publish --dry-run`.
|
||||
4. If step 3 works, then do the final release `cargo publish`.
|
||||
|
||||
> You can also take a look at the official tutorial [Publishing on crates.io][doc]
|
||||
>
|
||||
> [doc]: https://doc.rust-lang.org/cargo/reference/publishing.html
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -6,6 +6,6 @@ We only support the latest major version and each subversion.
|
||||
|
||||
| Version | Supported |
|
||||
| -------- | ------------------ |
|
||||
| 10.0.x | :white_check_mark: |
|
||||
| < 10.0 | :x: |
|
||||
| 15.0.x | :white_check_mark: |
|
||||
| < 15.0 | :x: |
|
||||
|
||||
|
||||
@@ -2,168 +2,266 @@
|
||||
# [include] sections are processed in the order you write them
|
||||
# Files in $CONFIG_DIR/topgrade.d/ are automatically included before this file
|
||||
[include]
|
||||
#paths = ["/etc/topgrade.toml"]
|
||||
# paths = ["/etc/topgrade.toml"]
|
||||
|
||||
|
||||
[misc]
|
||||
# Don't ask for confirmations
|
||||
#assume_yes = true
|
||||
|
||||
# Disable specific steps - same options as the command line flag
|
||||
#disable = ["system", "emacs"]
|
||||
|
||||
# Ignore failures for these steps
|
||||
#ignore_failures = ["powershell"]
|
||||
|
||||
# Run specific steps - same options as the command line flag
|
||||
#only = ["system", "emacs"]
|
||||
|
||||
# Do not ask to retry failed steps (default: false)
|
||||
#no_retry = true
|
||||
|
||||
# Sudo command to be used
|
||||
#sudo_command = "sudo"
|
||||
|
||||
# Run `sudo -v` to cache credentials at the start of the run
|
||||
# This avoids a blocking password prompt in the middle of an unattended run
|
||||
#pre_sudo = false
|
||||
# (default: false)
|
||||
# pre_sudo = false
|
||||
|
||||
# Run inside tmux
|
||||
#run_in_tmux = true
|
||||
# Sudo command to be used
|
||||
# sudo_command = "sudo"
|
||||
|
||||
# Disable specific steps - same options as the command line flag
|
||||
# disable = ["system", "emacs"]
|
||||
|
||||
# Ignore failures for these steps
|
||||
# ignore_failures = ["powershell"]
|
||||
|
||||
# List of remote machines with Topgrade installed on them
|
||||
#remote_topgrades = ["toothless", "pi", "parnas"]
|
||||
|
||||
# Arguments to pass to SSH when upgrading remote systems
|
||||
#ssh_arguments = "-o ConnectTimeout=2"
|
||||
# remote_topgrades = ["toothless", "pi", "parnas"]
|
||||
|
||||
# Path to Topgrade executable on remote machines
|
||||
#remote_topgrade_path = ".cargo/bin/topgrade"
|
||||
# remote_topgrade_path = ".cargo/bin/topgrade"
|
||||
|
||||
# Arguments to pass to SSH when upgrading remote systems
|
||||
# ssh_arguments = "-o ConnectTimeout=2"
|
||||
|
||||
# Arguments to pass tmux when pulling Repositories
|
||||
#tmux_arguments = "-S /var/tmux.sock"
|
||||
# tmux_arguments = "-S /var/tmux.sock"
|
||||
|
||||
# Do not set the terminal title
|
||||
#set_title = false
|
||||
# Do not set the terminal title (default: true)
|
||||
# set_title = true
|
||||
|
||||
# Display the time in step titles
|
||||
# Display the time in step titles (default: true)
|
||||
# display_time = true
|
||||
|
||||
# Cleanup temporary or old files
|
||||
#cleanup = true
|
||||
# Don't ask for confirmations (no default value)
|
||||
# assume_yes = true
|
||||
|
||||
# Skip sending a notification at the end of a run
|
||||
#skip_notify = true
|
||||
# Do not ask to retry failed steps (default: false)
|
||||
# no_retry = true
|
||||
|
||||
# Whether to self update (this is ignored if the binary has been built without self update support, available also via setting the environment variable TOPGRADE_NO_SELF_UPGRADE)
|
||||
#no_self_update = true
|
||||
# Run inside tmux (default: false)
|
||||
# run_in_tmux = true
|
||||
|
||||
# Changes the way topgrade interacts with
|
||||
# the tmux session, creating the session
|
||||
# and only attaching to it if not inside tmux
|
||||
# (default: "attach_if_not_in_session", allowed values: "attach_if_not_in_session", "attach_always")
|
||||
# tmux_session_mode = "attach_if_not_in_session"
|
||||
|
||||
# Cleanup temporary or old files (default: false)
|
||||
# cleanup = true
|
||||
|
||||
# Send a notification for every step (default: false)
|
||||
# notify_each_step = false
|
||||
|
||||
# Skip sending a notification at the end of a run (default: false)
|
||||
# skip_notify = true
|
||||
|
||||
# The Bash-it branch to update (default: "stable")
|
||||
# bashit_branch = "stable"
|
||||
|
||||
# Run specific steps - same options as the command line flag
|
||||
# only = ["system", "emacs"]
|
||||
|
||||
# Whether to self update
|
||||
#
|
||||
# this will be ignored if the binary is built without self update support
|
||||
#
|
||||
# available also via setting the environment variable TOPGRADE_NO_SELF_UPGRADE)
|
||||
# no_self_update = true
|
||||
|
||||
# Extra tracing filter directives
|
||||
# These are prepended to the `--log-filter` argument
|
||||
# See: https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives
|
||||
# log_filters = ["topgrade::command=debug", "warn"]
|
||||
|
||||
# Extra Home Manager arguments
|
||||
#home_manager_arguments = ["--flake", "file"]
|
||||
|
||||
# Commands to run before anything
|
||||
[pre_commands]
|
||||
#"Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
# "Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
|
||||
|
||||
# Commands to run after anything
|
||||
[post_commands]
|
||||
#"Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
# "Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
|
||||
|
||||
# Custom commands
|
||||
[commands]
|
||||
#"Python Environment" = "~/dev/.env/bin/pip install -i https://pypi.python.org/simple -U --upgrade-strategy eager jupyter"
|
||||
#"Custom command using interactive shell (unix)" = "-i vim_upgrade"
|
||||
# "Python Environment" = "~/dev/.env/bin/pip install -i https://pypi.python.org/simple -U --upgrade-strategy eager jupyter"
|
||||
# "Custom command using interactive shell (unix)" = "-i vim_upgrade"
|
||||
|
||||
|
||||
[python]
|
||||
#enable_pip_review = true ###disabled by default
|
||||
#enable_pip_review_local = true ###disabled by default
|
||||
#enable_pipupgrade = true ###disabled by default
|
||||
#pipupgrade_arguments = "-y -u --pip-path pip" ###disabled by default
|
||||
# enable_pip_review = true ###disabled by default
|
||||
# enable_pip_review_local = true ###disabled by default
|
||||
# enable_pipupgrade = true ###disabled by default
|
||||
# pipupgrade_arguments = "-y -u --pip-path pip" ###disabled by default
|
||||
|
||||
|
||||
[composer]
|
||||
#self_update = true
|
||||
# self_update = true
|
||||
|
||||
|
||||
[brew]
|
||||
#greedy_cask = true
|
||||
#autoremove = true
|
||||
# For the BrewCask step
|
||||
# If `Repo Cask Upgrade` exists, then use the `-a` option.
|
||||
# Otherwise, use the `--greedy` option.
|
||||
# greedy_cask = true
|
||||
|
||||
# For the BrewCask step
|
||||
# If `Repo Cask Upgrade` does not exist, then use the `--greedy_latest` option.
|
||||
# NOTE: the above entry `greedy_cask` contains this entry, though you can enable
|
||||
# both of them, they won't clash with each other.
|
||||
# greedy_latest = true
|
||||
|
||||
# For the BrewCask step
|
||||
# If `Repo Cask Upgrade` does not exist, then use the `--greedy_auto_updates` option.
|
||||
# NOTE: the above entry `greedy_cask` contains this entry, though you can enable
|
||||
# both of them, they won't clash with each other.
|
||||
# greedy_auto_updates = true
|
||||
|
||||
# For the BrewFormula step
|
||||
# Execute `brew autoremove` after the step.
|
||||
# autoremove = true
|
||||
|
||||
# For the BrewFormula step
|
||||
# Upgrade formulae built from the HEAD branch; `brew upgrade --fetch-HEAD`
|
||||
# fetch_head = true
|
||||
|
||||
|
||||
[linux]
|
||||
# Arch Package Manager to use. Allowed values: autodetect, aura, garuda_update, pacman, pamac, paru, pikaur, trizen, yay.
|
||||
#arch_package_manager = "pacman"
|
||||
# Arch Package Manager to use.
|
||||
# Allowed values:
|
||||
# autodetect, aura, garuda_update, pacman, pamac, paru, pikaur, trizen, yay
|
||||
# arch_package_manager = "pacman"
|
||||
|
||||
# Arguments to pass yay (or paru) when updating packages
|
||||
#yay_arguments = "--nodevel"
|
||||
# yay_arguments = "--nodevel"
|
||||
|
||||
# Arguments to pass dnf when updating packages
|
||||
#dnf_arguments = "--refresh"
|
||||
#aura_aur_arguments = "-kx"
|
||||
#aura_pacman_arguments = ""
|
||||
#garuda_update_arguments = ""
|
||||
#show_arch_news = true
|
||||
#trizen_arguments = "--devel"
|
||||
#pikaur_arguments = ""
|
||||
#pamac_arguments = "--no-devel"
|
||||
#enable_tlmgr = true
|
||||
#emerge_sync_flags = "-q"
|
||||
#emerge_update_flags = "-uDNa --with-bdeps=y world"
|
||||
#redhat_distro_sync = false
|
||||
#suse_dup = false
|
||||
#rpm_ostree = false
|
||||
#nix_arguments = "--flake"
|
||||
# dnf_arguments = "--refresh"
|
||||
|
||||
# aura_aur_arguments = "-kx"
|
||||
|
||||
# aura_pacman_arguments = ""
|
||||
# garuda_update_arguments = ""
|
||||
|
||||
# show_arch_news = true
|
||||
|
||||
# trizen_arguments = "--devel"
|
||||
|
||||
# pikaur_arguments = ""
|
||||
|
||||
# pamac_arguments = "--no-devel"
|
||||
|
||||
# enable_tlmgr = true
|
||||
|
||||
# emerge_sync_flags = "-q"
|
||||
|
||||
# emerge_update_flags = "-uDNa --with-bdeps=y world"
|
||||
|
||||
# redhat_distro_sync = false
|
||||
|
||||
# suse_dup = false
|
||||
|
||||
# rpm_ostree = false
|
||||
|
||||
# nix_arguments = "--flake"
|
||||
|
||||
# nix_env_arguments = "--prebuilt-only"
|
||||
|
||||
# Extra Home Manager arguments
|
||||
# home_manager_arguments = ["--flake", "file"]
|
||||
|
||||
|
||||
[git]
|
||||
#max_concurrency = 5
|
||||
# How many repos to pull at max in parallel
|
||||
# max_concurrency = 5
|
||||
|
||||
# Additional git repositories to pull
|
||||
#repos = [
|
||||
# "~/src/*/",
|
||||
# "~/.config/something"
|
||||
#]
|
||||
# repos = [
|
||||
# "~/src/*/",
|
||||
# "~/.config/something"
|
||||
# ]
|
||||
|
||||
# Don't pull the predefined git repos
|
||||
#pull_predefined = false
|
||||
# pull_predefined = false
|
||||
|
||||
# Arguments to pass Git when pulling Repositories
|
||||
#arguments = "--rebase --autostash"
|
||||
# arguments = "--rebase --autostash"
|
||||
|
||||
|
||||
[windows]
|
||||
# Manually select Windows updates
|
||||
#accept_all_updates = false
|
||||
#open_remotes_in_new_terminal = true
|
||||
#wsl_update_pre_release = true
|
||||
#wsl_update_use_web_download = true
|
||||
# accept_all_updates = false
|
||||
|
||||
# open_remotes_in_new_terminal = true
|
||||
|
||||
# wsl_update_pre_release = true
|
||||
|
||||
# wsl_update_use_web_download = true
|
||||
|
||||
# Causes Topgrade to rename itself during the run to allow package managers
|
||||
# to upgrade it. Use this only if you installed Topgrade by using a package
|
||||
# manager such as Scoop or Cargo
|
||||
#self_rename = true
|
||||
# self_rename = true
|
||||
|
||||
|
||||
[npm]
|
||||
# Use sudo if the NPM directory isn't owned by the current user
|
||||
#use_sudo = true
|
||||
# use_sudo = true
|
||||
|
||||
|
||||
[yarn]
|
||||
# Run `yarn global upgrade` with `sudo`
|
||||
#use_sudo = true
|
||||
# use_sudo = true
|
||||
|
||||
|
||||
[vim]
|
||||
# For `vim-plug`, execute `PlugUpdate!` instead of `PlugUpdate`
|
||||
#force_plug_update = true
|
||||
# force_plug_update = true
|
||||
|
||||
|
||||
[firmware]
|
||||
# Offer to update firmware; if false just check for and display available updates
|
||||
#upgrade = true
|
||||
# upgrade = true
|
||||
|
||||
|
||||
[vagrant]
|
||||
# Vagrant directories
|
||||
#directories = []
|
||||
# directories = []
|
||||
|
||||
# power on vagrant boxes if needed
|
||||
#power_on = true
|
||||
# power_on = true
|
||||
|
||||
# Always suspend vagrant boxes instead of powering off
|
||||
#always_suspend = true
|
||||
# always_suspend = true
|
||||
|
||||
|
||||
[flatpak]
|
||||
# Use sudo for updating the system-wide installation
|
||||
#use_sudo = true
|
||||
# use_sudo = true
|
||||
|
||||
|
||||
[distrobox]
|
||||
#use_root = false
|
||||
#containers = ["archlinux-latest"]
|
||||
# use_root = false
|
||||
|
||||
# containers = ["archlinux-latest"]
|
||||
[containers]
|
||||
# Specify the containers to ignore while updating (Wildcard supported)
|
||||
# ignored_containers = ["ghcr.io/rancher-sandbox/rancher-desktop/rdx-proxy:latest", "docker.io*"]
|
||||
# Specify the runtime to use for containers (default: "docker", allowed values: "docker", "podman")
|
||||
# runtime = "podman"
|
||||
|
||||
[lensfun]
|
||||
# If disabled, Topgrade invokes `lensfun‑update‑data` without root priviledge,
|
||||
# then the update will be only available to you. Otherwise, `sudo` is required,
|
||||
# and the update will be installed system-wide, i.e., available to all users.
|
||||
# (default: false)
|
||||
# use_sudo = false
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 718 KiB |
BIN
doc/topgrade_demo.gif
Normal file
BIN
doc/topgrade_demo.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 4.1 MiB |
307
locales/app.yml
Normal file
307
locales/app.yml
Normal file
@@ -0,0 +1,307 @@
|
||||
_version: 2
|
||||
|
||||
"Current system locale is {system_locale}":
|
||||
en: "Current system locale is %{system_locale}"
|
||||
"Dry running: {program_name} {arguments}":
|
||||
en: "Dry running: %{program_name} %{arguments}"
|
||||
"in {directory}":
|
||||
en: "in %{directory}"
|
||||
"Rebooting...":
|
||||
en: "Rebooting..."
|
||||
"Plugins upgraded":
|
||||
en: "Plugins upgraded"
|
||||
"Would self-update":
|
||||
en: "Would self-update"
|
||||
"Pulling":
|
||||
en: "Pulling"
|
||||
"No Breaking changes":
|
||||
en: "No Breaking changes"
|
||||
"Dropping you to shell. Fix what you need and then exit the shell.":
|
||||
en: "Dropping you to shell. Fix what you need and then exit the shell."
|
||||
"Topgrade launched in a new tmux session":
|
||||
en: "Topgrade launched in a new tmux session"
|
||||
'Topgrade upgraded to {version}:\n':
|
||||
en: 'Topgrade upgraded to %{version}:\n'
|
||||
"Topgrade is up-to-date":
|
||||
en: "Topgrade is up-to-date"
|
||||
"Updating modules...":
|
||||
en: "Updating modules..."
|
||||
"Powershell Modules Update":
|
||||
en: "Powershell Modules Update"
|
||||
"Powershell is not installed":
|
||||
en: "Powershell is not installed"
|
||||
"Error detecting current distribution: {error}":
|
||||
en: "Error detecting current distribution: %{error}"
|
||||
"Error: {error}":
|
||||
en: "Error: %{error}"
|
||||
"Failed":
|
||||
en: "Failed"
|
||||
"pulling":
|
||||
en: "pulling"
|
||||
"Changed":
|
||||
en: "Changed"
|
||||
"Up-to-date":
|
||||
en: "Up-to-date"
|
||||
"Self update":
|
||||
en: "Self update"
|
||||
|
||||
# The following 2 strings are used in the same sentence
|
||||
"Only":
|
||||
en: "Only"
|
||||
"updated repositories will be shown...":
|
||||
en: "updated repositories will be shown..."
|
||||
|
||||
"because it has no remotes":
|
||||
en: "because it has no remotes"
|
||||
"Skipping":
|
||||
en: "Skipping"
|
||||
"Aura(<0.4.6) requires sudo installed to work with AUR packages":
|
||||
en: "Aura(<0.4.6) requires sudo installed to work with AUR packages"
|
||||
"Pacman backup configuration files found:":
|
||||
en: "Pacman backup configuration files found:"
|
||||
"The package audit was successful, but vulnerable packages still remain on the system":
|
||||
en: "The package audit was successful, but vulnerable packages still remain on the system"
|
||||
"Syncing portage":
|
||||
en: "Syncing portage"
|
||||
"Finding available software":
|
||||
en: "Finding available software"
|
||||
"A system update is available. Do you wish to install it?":
|
||||
en: "A system update is available. Do you wish to install it?"
|
||||
"No new software available.":
|
||||
en: "No new software available."
|
||||
"No Xcode releases installed.":
|
||||
en: "No Xcode releases installed."
|
||||
"Would you like to move the former Xcode release to the trash?":
|
||||
en: "Would you like to move the former Xcode release to the trash?"
|
||||
"New Xcode release detected:":
|
||||
en: "New Xcode release detected:"
|
||||
"Would you like to install it?":
|
||||
en: "Would you like to install it?"
|
||||
"No global packages installed":
|
||||
en: "No global packages installed"
|
||||
"Remote Topgrade launched in Tmux":
|
||||
en: "Remote Topgrade launched in Tmux"
|
||||
"Remote Topgrade launched in an external terminal":
|
||||
en: "Remote Topgrade launched in an external terminal"
|
||||
"Collecting Vagrant boxes":
|
||||
en: "Collecting Vagrant boxes"
|
||||
"No Vagrant directories were specified in the configuration file":
|
||||
en: "No Vagrant directories were specified in the configuration file"
|
||||
"Vagrant boxes":
|
||||
en: "Vagrant boxes"
|
||||
"No outdated boxes":
|
||||
en: "No outdated boxes"
|
||||
"Summary":
|
||||
en: "Summary"
|
||||
"Topgrade finished with errors":
|
||||
en: "Topgrade finished with errors"
|
||||
"Topgrade finished successfully":
|
||||
en: "Topgrade finished successfully"
|
||||
"Topgrade {version_str} Breaking Changes":
|
||||
en: "Topgrade %{version_str} Breaking Changes"
|
||||
"Path {path} expanded to {expanded}":
|
||||
en: "Path %{path} expanded to %{expanded}"
|
||||
"Path {path} doesn't exist":
|
||||
en: "Path %{path} doesn't exist"
|
||||
"Cannot find {binary_name} in PATH":
|
||||
en: "Cannot find %{binary_name} in PATH"
|
||||
"Failed to get a UTF-8 encoded hostname":
|
||||
en: "Failed to get a UTF-8 encoded hostname"
|
||||
"Failed to get hostname: {err}":
|
||||
en: "Failed to get hostname: %{err}"
|
||||
"{python} is a Python 2, skip.":
|
||||
en: "%{python} is a Python 2, skip."
|
||||
"{python} is a Python shim, skip.":
|
||||
en: "%{python} is a Python shim, skip."
|
||||
"{key} failed:":
|
||||
en: "%{key} failed:"
|
||||
"{step_name} failed":
|
||||
en: "%{step_name} failed"
|
||||
"DragonFly BSD Packages":
|
||||
en: "DragonFly BSD Packages"
|
||||
"DragonFly BSD Audit":
|
||||
en: "DragonFly BSD Audit"
|
||||
"FreeBSD Update":
|
||||
en: "FreeBSD Update"
|
||||
"FreeBSD Packages":
|
||||
en: "FreeBSD Packages"
|
||||
"FreeBSD Audit":
|
||||
en: "FreeBSD Audit"
|
||||
"System update":
|
||||
en: "System update"
|
||||
"needrestart will be ran by the package manager":
|
||||
en: "needrestart will be ran by the package manager"
|
||||
"Check for needed restarts":
|
||||
en: "Check for needed restarts"
|
||||
"Should not run in WSL":
|
||||
en: "Should not run in WSL"
|
||||
"Firmware upgrades":
|
||||
en: "Firmware upgrades"
|
||||
"Flatpak System Packages":
|
||||
en: "Flatpak System Packages"
|
||||
"Snapd socket does not exist":
|
||||
en: "Snapd socket does not exist"
|
||||
"You need to specify at least one container":
|
||||
en: "You need to specify at least one container"
|
||||
"Skipped in --yes":
|
||||
en: "Skipped in --yes"
|
||||
"Configuration update":
|
||||
en: "Configuration update"
|
||||
"Going to execute `waydroid upgrade`, which would STOP the running container, is this ok?":
|
||||
en: "Going to execute `waydroid upgrade`, which would STOP the running container, is this ok?"
|
||||
"Skip the Waydroid step because the user don't want to proceed":
|
||||
en: "Skip the Waydroid step because the user don't want to proceed"
|
||||
"macOS App Store":
|
||||
en: "macOS App Store"
|
||||
"macOS system update":
|
||||
en: "macOS system update"
|
||||
"OpenBSD Update":
|
||||
en: "OpenBSD Update"
|
||||
"OpenBSD Packages":
|
||||
en: "OpenBSD Packages"
|
||||
"`fisher` is not defined in `fish`":
|
||||
en: "`fisher` is not defined in `fish`"
|
||||
"`fish_plugins` path doesn't exist: {err}":
|
||||
en: "`fish_plugins` path doesn't exist: %{err}"
|
||||
"`fish_update_completions` is not available":
|
||||
en: "`fish_update_completions` is not available"
|
||||
"Desktop doest not appear to be gnome":
|
||||
en: "Desktop doest not appear to be gnome"
|
||||
"Gnome shell extensions are unregistered in DBus":
|
||||
en: "Gnome shell extensions are unregistered in DBus"
|
||||
"Gnome Shell extensions":
|
||||
en: "Gnome Shell extensions"
|
||||
"Not a custom brew for macOS":
|
||||
en: "Not a custom brew for macOS"
|
||||
"Guix Pull Failed, Skipping":
|
||||
en: "Guix Pull Failed, Skipping"
|
||||
"Nix-darwin on macOS must be upgraded via darwin-rebuild switch":
|
||||
en: "Nix-darwin on macOS must be upgraded via darwin-rebuild switch"
|
||||
"`nix upgrade-nix` can only be used on macOS or non-NixOS Linux":
|
||||
en: "`nix upgrade-nix` can only be used on macOS or non-NixOS Linux"
|
||||
"`nix upgrade-nix` cannot be run when Nix is installed in a profile":
|
||||
en: "`nix upgrade-nix` cannot be run when Nix is installed in a profile"
|
||||
"Nix (self-upgrade)":
|
||||
en: "Nix (self-upgrade)"
|
||||
"Pyenv is installed, but $PYENV_ROOT is not set correctly":
|
||||
en: "Pyenv is installed, but $PYENV_ROOT is not set correctly"
|
||||
"pyenv is not a git repository":
|
||||
en: "pyenv is not a git repository"
|
||||
"Bun Packages":
|
||||
en: "Bun Packages"
|
||||
"WSL not installed":
|
||||
en: "WSL not installed"
|
||||
"Update WSL":
|
||||
en: "Update WSL"
|
||||
"Could not find Topgrade installed in WSL":
|
||||
en: "Could not find Topgrade installed in WSL"
|
||||
"Consider installing PSWindowsUpdate as the use of Windows Update via USOClient is not supported.":
|
||||
en: "Consider installing PSWindowsUpdate as the use of Windows Update via USOClient is not supported."
|
||||
"USOClient not supported.":
|
||||
en: "USOClient not supported."
|
||||
"Connecting to {hostname}...":
|
||||
en: "Connecting to %{hostname}..."
|
||||
"Skipping powered off box {vagrant_box}":
|
||||
en: "Skipping powered off box %{vagrant_box}"
|
||||
"`{repo_tag}` for `{platform}`":
|
||||
en: "`%{repo_tag}` for `%{platform}`"
|
||||
"Containers":
|
||||
en: "Containers"
|
||||
"Emacs directory does not exist":
|
||||
en: "Emacs directory does not exist"
|
||||
"Error getting the composer directory: {error}":
|
||||
en: "Error getting the composer directory: %{error}"
|
||||
"Composer directory {composer_home} isn't a descendant of the user's home directory":
|
||||
en: "Composer directory %{composer_home} isn't a descendant of the user's home directory"
|
||||
"Composer":
|
||||
en: "Composer"
|
||||
"Error running `dotnet tool list`. This is expected when a dotnet runtime is installed but no SDK.":
|
||||
en: "Error running `dotnet tool list`. This is expected when a dotnet runtime is installed but no SDK."
|
||||
"No dotnet global tools installed":
|
||||
en: "No dotnet global tools installed"
|
||||
"Racket Package Manager":
|
||||
en: "Racket Package Manager"
|
||||
"GH failed":
|
||||
en: "GH failed"
|
||||
"GitHub CLI Extensions":
|
||||
en: "GitHub CLI Extensions"
|
||||
"Julia Packages":
|
||||
en: "Julia Packages"
|
||||
"Update ClamAV Database(FreshClam)":
|
||||
en: "Update ClamAV Database(FreshClam)"
|
||||
"Path {pattern} did not contain any git repositories":
|
||||
en: "Path %{pattern} did not contain any git repositories"
|
||||
"No repositories to pull":
|
||||
en: "No repositories to pull"
|
||||
"Git repositories":
|
||||
en: "Git repositories"
|
||||
"Would pull {repo}":
|
||||
en: "Would pull %{repo}"
|
||||
"Node Package Manager":
|
||||
en: "Node Package Manager"
|
||||
"Performant Node Package Manager":
|
||||
en: "Performant Node Package Manager"
|
||||
"Yarn Package Manager":
|
||||
en: "Yarn Package Manager"
|
||||
"Deno installed outside of .deno directory":
|
||||
en: "Deno installed outside of .deno directory"
|
||||
"The Ultimate vimrc":
|
||||
en: "The Ultimate vimrc"
|
||||
"vim binary might be actually nvim":
|
||||
en: "vim binary might be actually nvim"
|
||||
"`{process}` failed: {exit_satus}":
|
||||
en: "`%{process}` failed: %{exit_satus}"
|
||||
"`{process}` failed: {exit_satus} with {output}":
|
||||
en: "`%{process}` failed: %{exit_satus} with %{output}"
|
||||
"Unknown Linux Distribution":
|
||||
en: "Unknown Linux Distribution"
|
||||
'File "/etc/os-release" does not exist or is empty':
|
||||
en: 'File "/etc/os-release" does not exist or is empty'
|
||||
"Failed getting the system package manager":
|
||||
en: "Failed getting the system package manager"
|
||||
"A step failed":
|
||||
en: "A step failed"
|
||||
"Dry running":
|
||||
en: "Dry running"
|
||||
"Topgrade Upgraded":
|
||||
en: "Topgrade Upgraded"
|
||||
"OK":
|
||||
en: "OK"
|
||||
"FAILED":
|
||||
en: "FAILED"
|
||||
"IGNORED":
|
||||
en: "IGNORED"
|
||||
"SKIPPED":
|
||||
en: "SKIPPED"
|
||||
|
||||
# 'Y' and 'N' have to stay the same characters. Eg for German the translation
|
||||
# would look sth like "(Y) Ja / (N) Nein"
|
||||
"(Y)es/(N)o":
|
||||
en: "(Y)es/(N)o"
|
||||
# 'y', 'N', 's', 'q' have to stay the same throughout all translations.
|
||||
# Eg German would look like "(y) Wiederholen / (N) Nein / (s) Konsole / (q) Beenden"
|
||||
"Retry? (y)es/(N)o/(s)hell/(q)uit":
|
||||
en: "Retry? (y)es/(N)o/(s)hell/(q)uit"
|
||||
# 'R', 'S', 'Q' have to stay the same throughout all translations. Eg German would look like "\n(R) Neustarten\n(S) Konsole\n(Q) Beenden"
|
||||
'\n(R)eboot\n(S)hell\n(Q)uit':
|
||||
en: '\n(R)eboot\n(S)hell\n(Q)uit'
|
||||
"Require sudo or counterpart but not found, skip":
|
||||
en: "Require sudo or counterpart but not found, skip"
|
||||
"sudo as user '{user}'":
|
||||
en: "sudo as user '%{user}'"
|
||||
"Updating aqua ...":
|
||||
en: "Updating aqua ..."
|
||||
"Updating aqua installed cli tools ...":
|
||||
en: "Updating aqua installed cli tools ..."
|
||||
"Updating Volta packages...":
|
||||
en: "Updating Volta packages..."
|
||||
"No packages installed with Volta":
|
||||
en: "No packages installed with Volta"
|
||||
"pyenv-update plugin is not installed":
|
||||
en: "pyenv-update plugin is not installed"
|
||||
"Respawning...":
|
||||
en: "Respawning..."
|
||||
"Could not find Topgrade in any WSL disribution":
|
||||
en: "Could not find Topgrade in any WSL disribution"
|
||||
"Windows Update":
|
||||
en: "Windows Update"
|
||||
2
rust-toolchain.toml
Normal file
2
rust-toolchain.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[toolchain]
|
||||
channel = "1.76.0"
|
||||
171
src/breaking_changes.rs
Normal file
171
src/breaking_changes.rs
Normal file
@@ -0,0 +1,171 @@
|
||||
//! Inform the users of the breaking changes introduced in this major release.
|
||||
//!
|
||||
//! Print the breaking changes and possibly a migration guide when:
|
||||
//! 1. The Topgrade being executed is a new major release
|
||||
//! 2. This is the first launch of that major release
|
||||
|
||||
use crate::terminal::print_separator;
|
||||
#[cfg(windows)]
|
||||
use crate::WINDOWS_DIRS;
|
||||
#[cfg(unix)]
|
||||
use crate::XDG_DIRS;
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
use rust_i18n::t;
|
||||
use std::{
|
||||
env::var,
|
||||
fs::{read_to_string, OpenOptions},
|
||||
io::Write,
|
||||
path::PathBuf,
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
/// Version string x.y.z
|
||||
static VERSION_STR: &str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
/// Version info
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Version {
|
||||
_major: u64,
|
||||
minor: u64,
|
||||
patch: u64,
|
||||
}
|
||||
|
||||
impl FromStr for Version {
|
||||
type Err = std::convert::Infallible;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
const NOT_SEMVER: &str = "Topgrade version is not semantic";
|
||||
const NOT_NUMBER: &str = "Topgrade version is not dot-separated numbers";
|
||||
|
||||
let mut iter = s.split('.').take(3);
|
||||
let major = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
|
||||
let minor = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
|
||||
let patch = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
|
||||
|
||||
// They cannot be all 0s
|
||||
assert!(
|
||||
!(major == 0 && minor == 0 && patch == 0),
|
||||
"Version numbers cannot be all 0s"
|
||||
);
|
||||
|
||||
Ok(Self {
|
||||
_major: major,
|
||||
minor,
|
||||
patch,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Version {
|
||||
/// True if this version is a new major release.
|
||||
pub(crate) fn is_new_major_release(&self) -> bool {
|
||||
// We have already checked that they cannot all be zeros, so `self.major`
|
||||
// is guaranteed to be non-zero.
|
||||
self.minor == 0 && self.patch == 0
|
||||
}
|
||||
}
|
||||
|
||||
/// Topgrade's breaking changes
|
||||
///
|
||||
/// We store them in the compiled binary.
|
||||
pub(crate) static BREAKINGCHANGES: &str = include_str!("../BREAKINGCHANGES.md");
|
||||
|
||||
/// Return platform's data directory.
|
||||
fn data_dir() -> PathBuf {
|
||||
#[cfg(unix)]
|
||||
return XDG_DIRS.data_dir();
|
||||
|
||||
#[cfg(windows)]
|
||||
return WINDOWS_DIRS.data_dir();
|
||||
}
|
||||
|
||||
/// Return Topgrade's keep file path.
|
||||
///
|
||||
/// keep file is a file under the data directory containing a major version
|
||||
/// number, it will be created on first run and is used to check if an execution
|
||||
/// of Topgrade is the first run of a major release, for more details, see
|
||||
/// `first_run_of_major_release()`.
|
||||
fn keep_file_path() -> PathBuf {
|
||||
let keep_file = "topgrade_keep";
|
||||
data_dir().join(keep_file)
|
||||
}
|
||||
|
||||
/// If environment variable `TOPGRADE_SKIP_BRKC_NOTIFY` is set to `true`, then
|
||||
/// we won't notify the user of the breaking changes.
|
||||
pub(crate) fn should_skip() -> bool {
|
||||
if let Ok(var) = var("TOPGRADE_SKIP_BRKC_NOTIFY") {
|
||||
return var.as_str() == "true";
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// True if this is the first execution of a major release.
|
||||
pub(crate) fn first_run_of_major_release() -> Result<bool> {
|
||||
let version = VERSION_STR.parse::<Version>().expect("should be a valid version");
|
||||
let keep_file = keep_file_path();
|
||||
|
||||
// disable this lint here as the current code has better readability
|
||||
#[allow(clippy::collapsible_if)]
|
||||
if version.is_new_major_release() {
|
||||
if !keep_file.exists() || read_to_string(&keep_file)? != VERSION_STR {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
/// Print breaking changes to the user.
|
||||
pub(crate) fn print_breaking_changes() {
|
||||
let header = format!(
|
||||
"{}",
|
||||
t!("Topgrade {version_str} Breaking Changes", version_str = VERSION_STR)
|
||||
);
|
||||
print_separator(header);
|
||||
let contents = if BREAKINGCHANGES.is_empty() {
|
||||
t!("No Breaking changes").to_string()
|
||||
} else {
|
||||
BREAKINGCHANGES.to_string()
|
||||
};
|
||||
println!("{contents}\n");
|
||||
}
|
||||
|
||||
/// This function will be ONLY executed when the user has confirmed the breaking
|
||||
/// changes, once confirmed, we write the keep file, which means the first run
|
||||
/// of this major release is finished.
|
||||
pub(crate) fn write_keep_file() -> Result<()> {
|
||||
std::fs::create_dir_all(data_dir())?;
|
||||
let keep_file = keep_file_path();
|
||||
|
||||
let mut file = OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.truncate(true)
|
||||
.open(keep_file)?;
|
||||
let _ = file.write(VERSION_STR.as_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn is_new_major_release_works() {
|
||||
let first_major_release: Version = "1.0.0".parse().unwrap();
|
||||
let under_dev: Version = "0.1.0".parse().unwrap();
|
||||
|
||||
assert!(first_major_release.is_new_major_release());
|
||||
assert!(!under_dev.is_new_major_release());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Version numbers cannot be all 0s")]
|
||||
fn invalid_version() {
|
||||
let all_0 = "0.0.0";
|
||||
all_0.parse::<Version>().unwrap();
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,8 @@ use color_eyre::eyre::Context;
|
||||
|
||||
use crate::error::TopgradeError;
|
||||
|
||||
use tracing::debug;
|
||||
|
||||
/// Like [`Output`], but UTF-8 decoded.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Utf8Output {
|
||||
@@ -183,7 +185,7 @@ impl CommandExt for Command {
|
||||
let err = TopgradeError::ProcessFailedWithOutput(program, output.status, stderr.into_owned());
|
||||
|
||||
let ret = Err(err).with_context(|| message);
|
||||
tracing::debug!("Command failed: {ret:?}");
|
||||
debug!("Command failed: {ret:?}");
|
||||
ret
|
||||
}
|
||||
}
|
||||
@@ -203,7 +205,7 @@ impl CommandExt for Command {
|
||||
let (program, _) = get_program_and_args(self);
|
||||
let err = TopgradeError::ProcessFailed(program, status);
|
||||
let ret = Err(err).with_context(|| format!("Command failed: `{command}`"));
|
||||
tracing::debug!("Command failed: {ret:?}");
|
||||
debug!("Command failed: {ret:?}");
|
||||
ret
|
||||
}
|
||||
}
|
||||
@@ -239,6 +241,6 @@ fn format_program_and_args(cmd: &Command) -> String {
|
||||
|
||||
fn log(cmd: &Command) -> String {
|
||||
let command = format_program_and_args(cmd);
|
||||
tracing::debug!("Executing command `{command}`");
|
||||
debug!("Executing command `{command}`");
|
||||
command
|
||||
}
|
||||
|
||||
518
src/config.rs
518
src/config.rs
@@ -5,9 +5,9 @@ use std::fs::{write, File};
|
||||
use std::io::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::{env, fs};
|
||||
use std::{env, fmt, fs};
|
||||
|
||||
use clap::{ArgEnum, Parser};
|
||||
use clap::{Parser, ValueEnum};
|
||||
use clap_complete::Shell;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
@@ -15,19 +15,23 @@ use etcetera::base_strategy::BaseStrategy;
|
||||
use merge::Merge;
|
||||
use regex::Regex;
|
||||
use regex_split::RegexSplit;
|
||||
use rust_i18n::t;
|
||||
use serde::Deserialize;
|
||||
use strum::{EnumIter, EnumString, EnumVariantNames, IntoEnumIterator};
|
||||
use tracing::debug;
|
||||
use strum::{EnumIter, EnumString, IntoEnumIterator, VariantNames};
|
||||
use which_crate::which;
|
||||
|
||||
use super::utils::editor;
|
||||
use crate::command::CommandExt;
|
||||
use crate::sudo::SudoKind;
|
||||
use crate::utils::string_prepend_str;
|
||||
use tracing::{debug, error};
|
||||
|
||||
use super::utils::{editor, hostname};
|
||||
|
||||
// TODO: Add i18n to this. Tracking issue: https://github.com/topgrade-rs/topgrade/issues/859
|
||||
pub static EXAMPLE_CONFIG: &str = include_str!("../config.example.toml");
|
||||
|
||||
/// Topgrade's default log level.
|
||||
pub const DEFAULT_LOG_LEVEL: &str = "warn";
|
||||
|
||||
#[allow(unused_macros)]
|
||||
macro_rules! str_value {
|
||||
($section:ident, $value:ident) => {
|
||||
@@ -40,60 +44,9 @@ macro_rules! str_value {
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! check_deprecated {
|
||||
($config:expr, $old:ident, $section:ident, $new:ident) => {
|
||||
if $config.$old.is_some() {
|
||||
println!(concat!(
|
||||
"'",
|
||||
stringify!($old),
|
||||
"' configuration option is deprecated. Rename it to '",
|
||||
stringify!($new),
|
||||
"' and put it under the section [",
|
||||
stringify!($section),
|
||||
"]",
|
||||
));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Get a deprecated option moved from a section to another
|
||||
macro_rules! get_deprecated_moved_opt {
|
||||
($old_section:expr, $old:ident, $new_section:expr, $new:ident) => {{
|
||||
if let Some(old_section) = &$old_section {
|
||||
if old_section.$old.is_some() {
|
||||
return &old_section.$old;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(new_section) = &$new_section {
|
||||
return &new_section.$new;
|
||||
}
|
||||
|
||||
return &None;
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! get_deprecated_moved_or_default_to {
|
||||
($old_section:expr, $old:ident, $new_section:expr, $new:ident, $default_ret:ident) => {{
|
||||
if let Some(old_section) = &$old_section {
|
||||
if let Some(old) = old_section.$old {
|
||||
return old;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(new_section) = &$new_section {
|
||||
if let Some(new) = new_section.$new {
|
||||
return new;
|
||||
}
|
||||
}
|
||||
|
||||
return $default_ret;
|
||||
}};
|
||||
}
|
||||
|
||||
pub type Commands = BTreeMap<String, String>;
|
||||
|
||||
#[derive(ArgEnum, EnumString, EnumVariantNames, Debug, Clone, PartialEq, Eq, Deserialize, EnumIter, Copy)]
|
||||
#[derive(ValueEnum, EnumString, VariantNames, Debug, Clone, PartialEq, Eq, Deserialize, EnumIter, Copy)]
|
||||
#[clap(rename_all = "snake_case")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[strum(serialize_all = "snake_case")]
|
||||
@@ -102,15 +55,21 @@ pub enum Step {
|
||||
AppMan,
|
||||
Asdf,
|
||||
Atom,
|
||||
Aqua,
|
||||
Audit,
|
||||
AutoCpufreq,
|
||||
Bin,
|
||||
Bob,
|
||||
BrewCask,
|
||||
BrewFormula,
|
||||
Bun,
|
||||
BunPackages,
|
||||
Cargo,
|
||||
Certbot,
|
||||
Chezmoi,
|
||||
Chocolatey,
|
||||
Choosenim,
|
||||
ClamAvDb,
|
||||
Composer,
|
||||
Conda,
|
||||
ConfigUpdate,
|
||||
@@ -121,6 +80,7 @@ pub enum Step {
|
||||
Distrobox,
|
||||
DkpPacman,
|
||||
Dotnet,
|
||||
Elan,
|
||||
Emacs,
|
||||
Firmware,
|
||||
Flatpak,
|
||||
@@ -143,11 +103,15 @@ pub enum Step {
|
||||
Kakoune,
|
||||
Helix,
|
||||
Krew,
|
||||
Lure,
|
||||
Lensfun,
|
||||
Macports,
|
||||
Mamba,
|
||||
Miktex,
|
||||
Mas,
|
||||
Maza,
|
||||
Micro,
|
||||
Mise,
|
||||
Myrepos,
|
||||
Nix,
|
||||
Node,
|
||||
@@ -160,11 +124,15 @@ pub enum Step {
|
||||
PipReviewLocal,
|
||||
Pipupgrade,
|
||||
Pipx,
|
||||
Pixi,
|
||||
Pkg,
|
||||
Pkgin,
|
||||
PlatformioCore,
|
||||
Pnpm,
|
||||
Poetry,
|
||||
Powershell,
|
||||
Protonup,
|
||||
Pyenv,
|
||||
Raco,
|
||||
Rcm,
|
||||
Remotes,
|
||||
@@ -172,8 +140,10 @@ pub enum Step {
|
||||
Rtcl,
|
||||
RubyGems,
|
||||
Rustup,
|
||||
Rye,
|
||||
Scoop,
|
||||
Sdkman,
|
||||
SelfUpdate,
|
||||
Sheldon,
|
||||
Shell,
|
||||
Snap,
|
||||
@@ -186,14 +156,20 @@ pub enum Step {
|
||||
Tlmgr,
|
||||
Tmux,
|
||||
Toolbx,
|
||||
Uv,
|
||||
Vagrant,
|
||||
Vcpkg,
|
||||
Vim,
|
||||
VoltaPackages,
|
||||
Vscode,
|
||||
Waydroid,
|
||||
Winget,
|
||||
Wsl,
|
||||
WslUpdate,
|
||||
Xcodes,
|
||||
Yadm,
|
||||
Yarn,
|
||||
Zvm,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default, Debug, Merge)]
|
||||
@@ -203,6 +179,14 @@ pub struct Include {
|
||||
paths: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default, Debug, Merge)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct Containers {
|
||||
#[merge(strategy = crate::utils::merge_strategies::vec_prepend_opt)]
|
||||
ignored_containers: Option<Vec<String>>,
|
||||
runtime: Option<ContainerRuntime>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default, Debug, Merge)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct Git {
|
||||
@@ -233,7 +217,6 @@ pub struct Windows {
|
||||
accept_all_updates: Option<bool>,
|
||||
self_rename: Option<bool>,
|
||||
open_remotes_in_new_terminal: Option<bool>,
|
||||
enable_winget: Option<bool>,
|
||||
wsl_update_pre_release: Option<bool>,
|
||||
wsl_update_use_web_download: Option<bool>,
|
||||
}
|
||||
@@ -289,7 +272,10 @@ pub struct Flatpak {
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct Brew {
|
||||
greedy_cask: Option<bool>,
|
||||
greedy_latest: Option<bool>,
|
||||
greedy_auto_updates: Option<bool>,
|
||||
autoremove: Option<bool>,
|
||||
fetch_head: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone, Copy)]
|
||||
@@ -306,6 +292,22 @@ pub enum ArchPackageManager {
|
||||
Yay,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ContainerRuntime {
|
||||
Docker,
|
||||
Podman,
|
||||
}
|
||||
|
||||
impl fmt::Display for ContainerRuntime {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
ContainerRuntime::Docker => write!(f, "docker"),
|
||||
ContainerRuntime::Podman => write!(f, "podman"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default, Debug, Merge)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct Linux {
|
||||
@@ -338,6 +340,9 @@ pub struct Linux {
|
||||
#[merge(strategy = crate::utils::merge_strategies::string_append_opt)]
|
||||
nix_arguments: Option<String>,
|
||||
|
||||
#[merge(strategy = crate::utils::merge_strategies::string_append_opt)]
|
||||
nix_env_arguments: Option<String>,
|
||||
|
||||
#[merge(strategy = crate::utils::merge_strategies::string_append_opt)]
|
||||
apt_arguments: Option<String>,
|
||||
|
||||
@@ -375,11 +380,6 @@ pub struct Misc {
|
||||
|
||||
sudo_command: Option<SudoKind>,
|
||||
|
||||
#[merge(strategy = crate::utils::merge_strategies::vec_prepend_opt)]
|
||||
git_repos: Option<Vec<String>>,
|
||||
|
||||
predefined_git_repos: Option<bool>,
|
||||
|
||||
#[merge(strategy = crate::utils::merge_strategies::vec_prepend_opt)]
|
||||
disable: Option<Vec<Step>>,
|
||||
|
||||
@@ -394,9 +394,6 @@ pub struct Misc {
|
||||
#[merge(strategy = crate::utils::merge_strategies::string_append_opt)]
|
||||
ssh_arguments: Option<String>,
|
||||
|
||||
#[merge(strategy = crate::utils::merge_strategies::string_append_opt)]
|
||||
git_arguments: Option<String>,
|
||||
|
||||
#[merge(strategy = crate::utils::merge_strategies::string_append_opt)]
|
||||
tmux_arguments: Option<String>,
|
||||
|
||||
@@ -406,25 +403,16 @@ pub struct Misc {
|
||||
|
||||
assume_yes: Option<bool>,
|
||||
|
||||
#[merge(strategy = crate::utils::merge_strategies::string_append_opt)]
|
||||
yay_arguments: Option<String>,
|
||||
|
||||
#[merge(strategy = crate::utils::merge_strategies::string_append_opt)]
|
||||
aura_aur_arguments: Option<String>,
|
||||
|
||||
#[merge(strategy = crate::utils::merge_strategies::string_append_opt)]
|
||||
aura_pacman_arguments: Option<String>,
|
||||
|
||||
no_retry: Option<bool>,
|
||||
|
||||
run_in_tmux: Option<bool>,
|
||||
|
||||
tmux_session_mode: Option<TmuxSessionMode>,
|
||||
|
||||
cleanup: Option<bool>,
|
||||
|
||||
notify_each_step: Option<bool>,
|
||||
|
||||
accept_all_windows_updates: Option<bool>,
|
||||
|
||||
skip_notify: Option<bool>,
|
||||
|
||||
bashit_branch: Option<String>,
|
||||
@@ -433,6 +421,27 @@ pub struct Misc {
|
||||
only: Option<Vec<Step>>,
|
||||
|
||||
no_self_update: Option<bool>,
|
||||
|
||||
log_filters: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Deserialize, ValueEnum)]
|
||||
#[clap(rename_all = "snake_case")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum TmuxSessionMode {
|
||||
AttachIfNotInSession,
|
||||
AttachAlways,
|
||||
}
|
||||
|
||||
pub struct TmuxConfig {
|
||||
pub args: Vec<String>,
|
||||
pub session_mode: TmuxSessionMode,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default, Debug, Merge)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct Lensfun {
|
||||
use_sudo: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default, Debug, Merge)]
|
||||
@@ -469,6 +478,9 @@ pub struct ConfigFile {
|
||||
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
||||
git: Option<Git>,
|
||||
|
||||
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
||||
containers: Option<Containers>,
|
||||
|
||||
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
||||
windows: Option<Windows>,
|
||||
|
||||
@@ -492,6 +504,9 @@ pub struct ConfigFile {
|
||||
|
||||
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
||||
distrobox: Option<Distrobox>,
|
||||
|
||||
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
||||
lensfun: Option<Lensfun>,
|
||||
}
|
||||
|
||||
fn config_directory() -> PathBuf {
|
||||
@@ -517,7 +532,7 @@ impl ConfigFile {
|
||||
|
||||
let config_directory = config_directory();
|
||||
|
||||
let possible_config_paths = vec![
|
||||
let possible_config_paths = [
|
||||
config_directory.join("topgrade.toml"),
|
||||
config_directory.join("topgrade/topgrade.toml"),
|
||||
];
|
||||
@@ -526,7 +541,7 @@ impl ConfigFile {
|
||||
for path in possible_config_paths.iter() {
|
||||
if path.exists() {
|
||||
debug!("Configuration at {}", path.display());
|
||||
res.0 = path.clone();
|
||||
res.0.clone_from(path);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -535,7 +550,7 @@ impl ConfigFile {
|
||||
|
||||
// If no config file exists, create a default one in the config directory
|
||||
if !res.0.exists() && res.1.is_empty() {
|
||||
res.0 = possible_config_paths[0].clone();
|
||||
res.0.clone_from(&possible_config_paths[0]);
|
||||
debug!("No configuration exists");
|
||||
write(&res.0, EXAMPLE_CONFIG).map_err(|e| {
|
||||
debug!(
|
||||
@@ -558,7 +573,9 @@ impl ConfigFile {
|
||||
if dir_to_search.exists() {
|
||||
for entry in fs::read_dir(dir_to_search)? {
|
||||
let entry = entry?;
|
||||
if entry.file_type()?.is_file() {
|
||||
// Use `Path::is_file()` here to traverse symbolic links.
|
||||
// `DirEntry::file_type()` and `FileType::is_file()` will not traverse symbolic links.
|
||||
if entry.path().is_file() {
|
||||
debug!(
|
||||
"Found additional (directory) configuration file at {}",
|
||||
entry.path().display()
|
||||
@@ -591,13 +608,11 @@ impl ConfigFile {
|
||||
to read the include directory before returning the main config path
|
||||
*/
|
||||
for include in dir_include {
|
||||
let include_contents = fs::read_to_string(&include).map_err(|e| {
|
||||
tracing::error!("Unable to read {}", include.display());
|
||||
e
|
||||
let include_contents = fs::read_to_string(&include).inspect_err(|_| {
|
||||
error!("Unable to read {}", include.display());
|
||||
})?;
|
||||
let include_contents_parsed = toml::from_str(include_contents.as_str()).map_err(|e| {
|
||||
tracing::error!("Failed to deserialize {}", include.display());
|
||||
e
|
||||
let include_contents_parsed = toml::from_str(include_contents.as_str()).inspect_err(|_| {
|
||||
error!("Failed to deserialize {}", include.display());
|
||||
})?;
|
||||
|
||||
result.merge(include_contents_parsed);
|
||||
@@ -612,9 +627,8 @@ impl ConfigFile {
|
||||
return Ok(result);
|
||||
}
|
||||
|
||||
let mut contents_non_split = fs::read_to_string(&config_path).map_err(|e| {
|
||||
tracing::error!("Unable to read {}", config_path.display());
|
||||
e
|
||||
let mut contents_non_split = fs::read_to_string(&config_path).inspect_err(|_| {
|
||||
error!("Unable to read {}", config_path.display());
|
||||
})?;
|
||||
|
||||
Self::ensure_misc_is_present(&mut contents_non_split, &config_path);
|
||||
@@ -625,9 +639,8 @@ impl ConfigFile {
|
||||
let contents_split = regex_match_include.split_inclusive_left(contents_non_split.as_str());
|
||||
|
||||
for contents in contents_split {
|
||||
let config_file_include_only: ConfigFileIncludeOnly = toml::from_str(contents).map_err(|e| {
|
||||
tracing::error!("Failed to deserialize an include section of {}", config_path.display());
|
||||
e
|
||||
let config_file_include_only: ConfigFileIncludeOnly = toml::from_str(contents).inspect_err(|_| {
|
||||
error!("Failed to deserialize an include section of {}", config_path.display());
|
||||
})?;
|
||||
|
||||
if let Some(includes) = &config_file_include_only.include {
|
||||
@@ -639,51 +652,39 @@ impl ConfigFile {
|
||||
let include_contents = match fs::read_to_string(&include_path) {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
tracing::error!("Unable to read {}: {}", include_path.display(), e);
|
||||
error!("Unable to read {}: {e}", include_path.display(),);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
match toml::from_str::<Self>(&include_contents) {
|
||||
Ok(include_parsed) => result.merge(include_parsed),
|
||||
Err(e) => {
|
||||
tracing::error!("Failed to deserialize {}: {}", include_path.display(), e);
|
||||
error!("Failed to deserialize {}: {e}", include_path.display(),);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
debug!("Configuration include found: {}", include_path.display());
|
||||
}
|
||||
} else {
|
||||
debug!("No include paths found in {}", config_path.display());
|
||||
}
|
||||
}
|
||||
|
||||
match toml::from_str::<Self>(contents) {
|
||||
Ok(contents) => result.merge(contents),
|
||||
Err(e) => tracing::error!("Failed to deserialize {}: {}", config_path.display(), e),
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(misc) = &mut result.misc {
|
||||
if let Some(ref mut paths) = &mut misc.git_repos {
|
||||
for path in paths.iter_mut() {
|
||||
let expanded = shellexpand::tilde::<&str>(&path.as_ref()).into_owned();
|
||||
debug!("Path {} expanded to {}", path, expanded);
|
||||
*path = expanded;
|
||||
}
|
||||
Err(e) => error!("Failed to deserialize {}: {e}", config_path.display(),),
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(paths) = result.git.as_mut().and_then(|git| git.repos.as_mut()) {
|
||||
for path in paths.iter_mut() {
|
||||
let expanded = shellexpand::tilde::<&str>(&path.as_ref()).into_owned();
|
||||
debug!("Path {} expanded to {}", path, expanded);
|
||||
debug!(
|
||||
"{}",
|
||||
t!("Path {path} expanded to {expanded}", path = path, expanded = expanded)
|
||||
);
|
||||
*path = expanded;
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Loaded configuration: {:?}", result);
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
@@ -716,104 +717,105 @@ impl ConfigFile {
|
||||
}
|
||||
|
||||
// Command line arguments
|
||||
// TODO: i18n of clap currently not easily possible. Waiting for https://github.com/clap-rs/clap/issues/380
|
||||
// Tracking issue for i18n: https://github.com/topgrade-rs/topgrade/issues/859
|
||||
#[derive(Parser, Debug)]
|
||||
#[clap(name = "Topgrade", version)]
|
||||
#[command(name = "topgrade", version)]
|
||||
pub struct CommandLineArgs {
|
||||
/// Edit the configuration file
|
||||
#[clap(long = "edit-config")]
|
||||
#[arg(long = "edit-config")]
|
||||
edit_config: bool,
|
||||
|
||||
/// Show config reference
|
||||
#[clap(long = "config-reference")]
|
||||
#[arg(long = "config-reference")]
|
||||
show_config_reference: bool,
|
||||
|
||||
/// Run inside tmux
|
||||
#[clap(short = 't', long = "tmux")]
|
||||
#[arg(short = 't', long = "tmux")]
|
||||
run_in_tmux: bool,
|
||||
|
||||
/// Cleanup temporary or old files
|
||||
#[clap(short = 'c', long = "cleanup")]
|
||||
#[arg(short = 'c', long = "cleanup")]
|
||||
cleanup: bool,
|
||||
|
||||
/// Print what would be done
|
||||
#[clap(short = 'n', long = "dry-run")]
|
||||
#[arg(short = 'n', long = "dry-run")]
|
||||
dry_run: bool,
|
||||
|
||||
/// Do not ask to retry failed steps
|
||||
#[clap(long = "no-retry")]
|
||||
#[arg(long = "no-retry")]
|
||||
no_retry: bool,
|
||||
|
||||
/// Do not perform upgrades for the given steps
|
||||
#[clap(long = "disable", value_name = "STEP", arg_enum, multiple_values = true)]
|
||||
#[arg(long = "disable", value_name = "STEP", value_enum, num_args = 1..)]
|
||||
disable: Vec<Step>,
|
||||
|
||||
/// Perform only the specified steps (experimental)
|
||||
#[clap(long = "only", value_name = "STEP", arg_enum, multiple_values = true)]
|
||||
/// Perform only the specified steps
|
||||
#[arg(long = "only", value_name = "STEP", value_enum, num_args = 1..)]
|
||||
only: Vec<Step>,
|
||||
|
||||
/// Run only specific custom commands
|
||||
#[clap(long = "custom-commands", value_name = "NAME", multiple_values = true)]
|
||||
#[arg(long = "custom-commands", value_name = "NAME", num_args = 1..)]
|
||||
custom_commands: Vec<String>,
|
||||
|
||||
/// Set environment variables
|
||||
#[clap(long = "env", value_name = "NAME=VALUE", multiple_values = true)]
|
||||
#[arg(long = "env", value_name = "NAME=VALUE", num_args = 1..)]
|
||||
env: Vec<String>,
|
||||
|
||||
/// Output debug logs. Alias for `--log-filter debug`.
|
||||
#[clap(short = 'v', long = "verbose")]
|
||||
#[arg(short = 'v', long = "verbose")]
|
||||
pub verbose: bool,
|
||||
|
||||
/// Prompt for a key before exiting
|
||||
#[clap(short = 'k', long = "keep")]
|
||||
#[arg(short = 'k', long = "keep")]
|
||||
keep_at_end: bool,
|
||||
|
||||
/// Skip sending a notification at the end of a run
|
||||
#[clap(long = "skip-notify")]
|
||||
#[arg(long = "skip-notify")]
|
||||
skip_notify: bool,
|
||||
|
||||
/// Say yes to package manager's prompt
|
||||
#[clap(
|
||||
#[arg(
|
||||
short = 'y',
|
||||
long = "yes",
|
||||
value_name = "STEP",
|
||||
arg_enum,
|
||||
multiple_values = true,
|
||||
min_values = 0
|
||||
value_enum,
|
||||
num_args = 0..,
|
||||
)]
|
||||
yes: Option<Vec<Step>>,
|
||||
|
||||
/// Don't pull the predefined git repos
|
||||
#[clap(long = "disable-predefined-git-repos")]
|
||||
#[arg(long = "disable-predefined-git-repos")]
|
||||
disable_predefined_git_repos: bool,
|
||||
|
||||
/// Alternative configuration file
|
||||
#[clap(long = "config", value_name = "PATH")]
|
||||
#[arg(long = "config", value_name = "PATH")]
|
||||
config: Option<PathBuf>,
|
||||
|
||||
/// A regular expression for restricting remote host execution
|
||||
#[clap(long = "remote-host-limit", value_name = "REGEX")]
|
||||
#[arg(long = "remote-host-limit", value_name = "REGEX")]
|
||||
remote_host_limit: Option<Regex>,
|
||||
|
||||
/// Show the reason for skipped steps
|
||||
#[clap(long = "show-skipped")]
|
||||
#[arg(long = "show-skipped")]
|
||||
show_skipped: bool,
|
||||
|
||||
/// Tracing filter directives.
|
||||
///
|
||||
/// See: https://docs.rs/tracing-subscriber/latest/tracing_subscriber/struct.EnvFilter.html
|
||||
#[clap(long, default_value = "warn")]
|
||||
#[arg(long, default_value = DEFAULT_LOG_LEVEL)]
|
||||
pub log_filter: String,
|
||||
|
||||
/// Print completion script for the given shell and exit
|
||||
#[clap(long, arg_enum, hide = true)]
|
||||
#[arg(long, value_enum, hide = true)]
|
||||
pub gen_completion: Option<Shell>,
|
||||
|
||||
/// Print roff manpage and exit
|
||||
#[clap(long, hide = true)]
|
||||
#[arg(long, hide = true)]
|
||||
pub gen_manpage: bool,
|
||||
|
||||
/// Don't update Topgrade
|
||||
#[clap(long = "no-self-update")]
|
||||
#[arg(long = "no-self-update")]
|
||||
pub no_self_update: bool,
|
||||
}
|
||||
|
||||
@@ -830,12 +832,25 @@ impl CommandLineArgs {
|
||||
&self.env
|
||||
}
|
||||
|
||||
/// In Topgrade, filter directives come from 3 places:
|
||||
/// 1. CLI option `--log-filter`
|
||||
/// 2. Config file
|
||||
/// 3. `debug` if the `--verbose` option is present
|
||||
///
|
||||
/// Before loading the configuration file, we need our logger to work, so this
|
||||
/// function will return directives coming from part 1 and 2.
|
||||
///
|
||||
///
|
||||
/// When the configuration file is loaded, `Config::tracing_filter_directives()`
|
||||
/// will return all the 3 parts.
|
||||
pub fn tracing_filter_directives(&self) -> String {
|
||||
let mut ret = self.log_filter.clone();
|
||||
if self.verbose {
|
||||
"debug".into()
|
||||
} else {
|
||||
self.log_filter.clone()
|
||||
ret.push(',');
|
||||
ret.push_str("debug");
|
||||
}
|
||||
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
@@ -844,6 +859,7 @@ impl CommandLineArgs {
|
||||
/// The struct holds the loaded configuration file, as well as the arguments parsed from the command line.
|
||||
/// Its provided methods decide the appropriate options based on combining the configuration file and the
|
||||
/// command line arguments.
|
||||
#[derive(Debug)]
|
||||
pub struct Config {
|
||||
opt: CommandLineArgs,
|
||||
config_file: ConfigFile,
|
||||
@@ -860,7 +876,7 @@ impl Config {
|
||||
ConfigFile::read(opt.config.clone()).unwrap_or_else(|e| {
|
||||
// Inform the user about errors when loading the configuration,
|
||||
// but fallback to the default config to at least attempt to do something
|
||||
tracing::error!("failed to load configuration: {}", e);
|
||||
error!("failed to load configuration: {e}");
|
||||
ConfigFile::default()
|
||||
})
|
||||
} else {
|
||||
@@ -868,14 +884,6 @@ impl Config {
|
||||
ConfigFile::default()
|
||||
};
|
||||
|
||||
if let Some(misc) = &config_file.misc {
|
||||
check_deprecated!(misc, git_arguments, git, arguments);
|
||||
check_deprecated!(misc, git_repos, git, repos);
|
||||
check_deprecated!(misc, predefined_git_repos, git, pull_predefined);
|
||||
check_deprecated!(misc, yay_arguments, linux, yay_arguments);
|
||||
check_deprecated!(misc, accept_all_windows_updates, windows, accept_all_updates);
|
||||
}
|
||||
|
||||
let allowed_steps = Self::allowed_steps(&opt, &config_file);
|
||||
|
||||
Ok(Self {
|
||||
@@ -906,8 +914,25 @@ impl Config {
|
||||
}
|
||||
|
||||
/// The list of additional git repositories to pull.
|
||||
pub fn git_repos(&self) -> &Option<Vec<String>> {
|
||||
get_deprecated_moved_opt!(&self.config_file.misc, git_repos, &self.config_file.git, repos)
|
||||
pub fn git_repos(&self) -> Option<&Vec<String>> {
|
||||
self.config_file.git.as_ref().and_then(|git| git.repos.as_ref())
|
||||
}
|
||||
|
||||
/// The list of docker/podman containers to ignore.
|
||||
pub fn containers_ignored_tags(&self) -> Option<&Vec<String>> {
|
||||
self.config_file
|
||||
.containers
|
||||
.as_ref()
|
||||
.and_then(|containers| containers.ignored_containers.as_ref())
|
||||
}
|
||||
|
||||
/// The preferred runtime for container updates (podman / docker).
|
||||
pub fn containers_runtime(&self) -> ContainerRuntime {
|
||||
self.config_file
|
||||
.containers
|
||||
.as_ref()
|
||||
.and_then(|containers| containers.runtime)
|
||||
.unwrap_or(ContainerRuntime::Docker) // defaults to a popular choice
|
||||
}
|
||||
|
||||
/// Tell whether the specified step should run.
|
||||
@@ -966,6 +991,15 @@ impl Config {
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// The preferred way to run the new tmux session.
|
||||
fn tmux_session_mode(&self) -> TmuxSessionMode {
|
||||
self.config_file
|
||||
.misc
|
||||
.as_ref()
|
||||
.and_then(|misc| misc.tmux_session_mode)
|
||||
.unwrap_or(TmuxSessionMode::AttachIfNotInSession)
|
||||
}
|
||||
|
||||
/// Tell whether we should perform cleanup steps.
|
||||
pub fn cleanup(&self) -> bool {
|
||||
self.opt.cleanup
|
||||
@@ -1019,12 +1053,20 @@ impl Config {
|
||||
}
|
||||
|
||||
/// Extra Git arguments
|
||||
pub fn git_arguments(&self) -> &Option<String> {
|
||||
get_deprecated_moved_opt!(&self.config_file.misc, git_arguments, &self.config_file.git, arguments)
|
||||
pub fn git_arguments(&self) -> Option<&String> {
|
||||
self.config_file.git.as_ref().and_then(|git| git.arguments.as_ref())
|
||||
}
|
||||
|
||||
pub fn tmux_config(&self) -> Result<TmuxConfig> {
|
||||
let args = self.tmux_arguments()?;
|
||||
Ok(TmuxConfig {
|
||||
args,
|
||||
session_mode: self.tmux_session_mode(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Extra Tmux arguments
|
||||
pub fn tmux_arguments(&self) -> Result<Vec<String>> {
|
||||
fn tmux_arguments(&self) -> Result<Vec<String>> {
|
||||
let args = &self
|
||||
.config_file
|
||||
.misc
|
||||
@@ -1093,13 +1135,11 @@ impl Config {
|
||||
|
||||
/// Whether to accept all Windows updates
|
||||
pub fn accept_all_windows_updates(&self) -> bool {
|
||||
get_deprecated_moved_or_default_to!(
|
||||
&self.config_file.misc,
|
||||
accept_all_windows_updates,
|
||||
&self.config_file.windows,
|
||||
accept_all_updates,
|
||||
true
|
||||
)
|
||||
self.config_file
|
||||
.windows
|
||||
.as_ref()
|
||||
.and_then(|windows| windows.accept_all_updates)
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
/// Whether to self rename the Topgrade executable during the run
|
||||
@@ -1138,6 +1178,24 @@ impl Config {
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Whether Brew cask should be greedy_latest
|
||||
pub fn brew_greedy_latest(&self) -> bool {
|
||||
self.config_file
|
||||
.brew
|
||||
.as_ref()
|
||||
.and_then(|c| c.greedy_latest)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Whether Brew cask should be auto_updates
|
||||
pub fn brew_greedy_auto_updates(&self) -> bool {
|
||||
self.config_file
|
||||
.brew
|
||||
.as_ref()
|
||||
.and_then(|c| c.greedy_auto_updates)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Whether Brew should autoremove
|
||||
pub fn brew_autoremove(&self) -> bool {
|
||||
self.config_file
|
||||
@@ -1147,6 +1205,15 @@ impl Config {
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Whether Brew should upgrade formulae built from the HEAD branch
|
||||
pub fn brew_fetch_head(&self) -> bool {
|
||||
self.config_file
|
||||
.brew
|
||||
.as_ref()
|
||||
.and_then(|c| c.fetch_head)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Whether Composer should update itself
|
||||
pub fn composer_self_update(&self) -> bool {
|
||||
self.config_file
|
||||
@@ -1278,6 +1345,14 @@ impl Config {
|
||||
.and_then(|linux| linux.nix_arguments.as_deref())
|
||||
}
|
||||
|
||||
/// Extra nix-env arguments
|
||||
pub fn nix_env_arguments(&self) -> Option<&str> {
|
||||
self.config_file
|
||||
.linux
|
||||
.as_ref()
|
||||
.and_then(|linux| linux.nix_env_arguments.as_deref())
|
||||
}
|
||||
|
||||
/// Extra Home Manager arguments
|
||||
pub fn home_manager(&self) -> Option<&Vec<String>> {
|
||||
self.config_file
|
||||
@@ -1374,19 +1449,39 @@ impl Config {
|
||||
|
||||
pub fn use_predefined_git_repos(&self) -> bool {
|
||||
!self.opt.disable_predefined_git_repos
|
||||
&& get_deprecated_moved_or_default_to!(
|
||||
&self.config_file.misc,
|
||||
predefined_git_repos,
|
||||
&self.config_file.git,
|
||||
pull_predefined,
|
||||
true
|
||||
)
|
||||
&& self
|
||||
.config_file
|
||||
.git
|
||||
.as_ref()
|
||||
.and_then(|git| git.pull_predefined)
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
pub fn verbose(&self) -> bool {
|
||||
self.opt.verbose
|
||||
}
|
||||
|
||||
/// After loading the config file, filter directives consist of 3 parts:
|
||||
///
|
||||
/// 1. directives from the configuration file
|
||||
/// 2. directives from the CLI options `--log-filter`
|
||||
/// 3. `debug`, which would be enabled if the `--verbose` option is present
|
||||
///
|
||||
/// Previous directive will be overwritten if a directive with the same target
|
||||
/// appear later.
|
||||
pub fn tracing_filter_directives(&self) -> String {
|
||||
let mut ret = String::new();
|
||||
if let Some(directives) = self.config_file.misc.as_ref().and_then(|m| m.log_filters.as_ref()) {
|
||||
ret.push_str(&directives.join(","));
|
||||
}
|
||||
ret.push(',');
|
||||
ret.push_str(&self.opt.log_filter);
|
||||
if self.verbose() {
|
||||
ret.push_str(",debug");
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn show_skipped(&self) -> bool {
|
||||
self.opt.show_skipped
|
||||
}
|
||||
@@ -1454,30 +1549,22 @@ impl Config {
|
||||
#[cfg(target_os = "linux")]
|
||||
str_value!(linux, emerge_update_flags);
|
||||
|
||||
pub fn should_execute_remote(&self, remote: &str) -> bool {
|
||||
if let Ok(hostname) = hostname() {
|
||||
if remote == hostname {
|
||||
pub fn should_execute_remote(&self, hostname: Result<String>, remote: &str) -> bool {
|
||||
let remote_host = remote.split_once('@').map_or(remote, |(_, host)| host);
|
||||
|
||||
if let Ok(hostname) = hostname {
|
||||
if remote_host == hostname {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(limit) = self.opt.remote_host_limit.as_ref() {
|
||||
return limit.is_match(remote);
|
||||
if let Some(limit) = &self.opt.remote_host_limit.as_ref() {
|
||||
return limit.is_match(remote_host);
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn enable_winget(&self) -> bool {
|
||||
return self
|
||||
.config_file
|
||||
.windows
|
||||
.as_ref()
|
||||
.and_then(|w| w.enable_winget)
|
||||
.unwrap_or(false);
|
||||
}
|
||||
|
||||
pub fn enable_pipupgrade(&self) -> bool {
|
||||
return self
|
||||
.config_file
|
||||
@@ -1525,11 +1612,21 @@ impl Config {
|
||||
|
||||
self.opt.custom_commands.iter().any(|s| s == name)
|
||||
}
|
||||
|
||||
pub fn lensfun_use_sudo(&self) -> bool {
|
||||
self.config_file
|
||||
.lensfun
|
||||
.as_ref()
|
||||
.and_then(|lensfun| lensfun.use_sudo)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::config::ConfigFile;
|
||||
|
||||
use crate::config::*;
|
||||
use color_eyre::eyre::eyre;
|
||||
|
||||
/// Test the default configuration in `config.example.toml` is valid.
|
||||
#[test]
|
||||
@@ -1538,4 +1635,51 @@ mod test {
|
||||
|
||||
assert!(toml::from_str::<ConfigFile>(str).is_ok());
|
||||
}
|
||||
|
||||
fn config() -> Config {
|
||||
Config {
|
||||
opt: CommandLineArgs::parse_from::<_, String>([]),
|
||||
config_file: ConfigFile::default(),
|
||||
allowed_steps: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_should_execute_remote_different_hostname() {
|
||||
assert!(config().should_execute_remote(Ok("hostname".to_string()), "remote_hostname"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_should_execute_remote_different_hostname_with_user() {
|
||||
assert!(config().should_execute_remote(Ok("hostname".to_string()), "user@remote_hostname"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_should_execute_remote_unknown_hostname() {
|
||||
assert!(config().should_execute_remote(Err(eyre!("failed to get hostname")), "remote_hostname"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_should_not_execute_remote_same_hostname() {
|
||||
assert!(!config().should_execute_remote(Ok("hostname".to_string()), "hostname"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_should_not_execute_remote_same_hostname_with_user() {
|
||||
assert!(!config().should_execute_remote(Ok("hostname".to_string()), "user@hostname"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_should_execute_remote_matching_limit() {
|
||||
let mut config = config();
|
||||
config.opt = CommandLineArgs::parse_from(["topgrade", "--remote-host-limit", "remote_hostname"]);
|
||||
assert!(config.should_execute_remote(Ok("hostname".to_string()), "user@remote_hostname"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_should_not_execute_remote_not_matching_limit() {
|
||||
let mut config = config();
|
||||
config.opt = CommandLineArgs::parse_from(["topgrade", "--remote-host-limit", "other_hostname"]);
|
||||
assert!(!config.should_execute_remote(Ok("hostname".to_string()), "user@remote_hostname"))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! SIGINT handling in Unix systems.
|
||||
use crate::ctrlc::interrupted::set_interrupted;
|
||||
use nix::sys::signal;
|
||||
use nix::sys::signal::{sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal};
|
||||
|
||||
/// Handle SIGINT. Set the interruption flag.
|
||||
extern "C" fn handle_sigint(_: i32) {
|
||||
@@ -10,12 +10,8 @@ extern "C" fn handle_sigint(_: i32) {
|
||||
/// Set the necessary signal handlers.
|
||||
/// The function panics on failure.
|
||||
pub fn set_handler() {
|
||||
let sig_action = signal::SigAction::new(
|
||||
signal::SigHandler::Handler(handle_sigint),
|
||||
signal::SaFlags::empty(),
|
||||
signal::SigSet::empty(),
|
||||
);
|
||||
let sig_action = SigAction::new(SigHandler::Handler(handle_sigint), SaFlags::empty(), SigSet::empty());
|
||||
unsafe {
|
||||
signal::sigaction(signal::SIGINT, &sig_action).unwrap();
|
||||
sigaction(Signal::SIGINT, &sig_action).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
//! A stub for Ctrl + C handling.
|
||||
use crate::ctrlc::interrupted::set_interrupted;
|
||||
use tracing::error;
|
||||
use winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE};
|
||||
use winapi::um::consoleapi::SetConsoleCtrlHandler;
|
||||
use winapi::um::wincon::CTRL_C_EVENT;
|
||||
@@ -16,6 +17,6 @@ extern "system" fn handler(ctrl_type: DWORD) -> BOOL {
|
||||
|
||||
pub fn set_handler() {
|
||||
if 0 == unsafe { SetConsoleCtrlHandler(Some(handler), TRUE) } {
|
||||
tracing::error!("Cannot set a control C handler")
|
||||
error!("Cannot set a control C handler")
|
||||
}
|
||||
}
|
||||
|
||||
81
src/error.rs
81
src/error.rs
@@ -1,41 +1,98 @@
|
||||
use std::process::ExitStatus;
|
||||
use std::{fmt::Display, process::ExitStatus};
|
||||
|
||||
use rust_i18n::t;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug, PartialEq, Eq)]
|
||||
pub enum TopgradeError {
|
||||
#[error("`{0}` failed: {1}")]
|
||||
ProcessFailed(String, ExitStatus),
|
||||
|
||||
#[error("`{0}` failed: {1}")]
|
||||
ProcessFailedWithOutput(String, ExitStatus, String),
|
||||
|
||||
#[error("Unknown Linux Distribution")]
|
||||
#[cfg(target_os = "linux")]
|
||||
UnknownLinuxDistribution,
|
||||
|
||||
#[error("File \"/etc/os-release\" does not exist or is empty")]
|
||||
#[cfg(target_os = "linux")]
|
||||
EmptyOSReleaseFile,
|
||||
|
||||
#[error("Failed getting the system package manager")]
|
||||
#[cfg(target_os = "linux")]
|
||||
FailedGettingPackageManager,
|
||||
}
|
||||
|
||||
impl Display for TopgradeError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
TopgradeError::ProcessFailed(process, exit_status) => {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
t!(
|
||||
"`{process}` failed: {exit_satus}",
|
||||
process = process,
|
||||
exit_status = exit_status
|
||||
)
|
||||
)
|
||||
}
|
||||
TopgradeError::ProcessFailedWithOutput(process, exit_status, output) => {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
t!(
|
||||
"`{process}` failed: {exit_satus} with {output}",
|
||||
process = process,
|
||||
exit_status = exit_status,
|
||||
output = output
|
||||
)
|
||||
)
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
TopgradeError::UnknownLinuxDistribution => write!(f, "{}", t!("Unknown Linux Distribution")),
|
||||
#[cfg(target_os = "linux")]
|
||||
TopgradeError::EmptyOSReleaseFile => {
|
||||
write!(f, "{}", t!("File \"/etc/os-release\" does not exist or is empty"))
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
TopgradeError::FailedGettingPackageManager => {
|
||||
write!(f, "{}", t!("Failed getting the system package manager"))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("A step failed")]
|
||||
pub struct StepFailed;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("Dry running")]
|
||||
pub struct DryRun();
|
||||
impl Display for StepFailed {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", t!("A step failed"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub struct DryRun();
|
||||
|
||||
impl Display for DryRun {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", t!("Dry running"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("{0}")]
|
||||
pub struct SkipStep(pub String);
|
||||
|
||||
impl Display for SkipStep {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(windows, feature = "self-update"))]
|
||||
#[derive(Error, Debug)]
|
||||
#[error("Topgrade Upgraded")]
|
||||
pub struct Upgraded(pub ExitStatus);
|
||||
|
||||
#[cfg(all(windows, feature = "self-update"))]
|
||||
impl Display for Upgraded {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", t!("Topgrade Upgraded"))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
#![allow(dead_code)]
|
||||
use crate::executor::RunType;
|
||||
use crate::git::Git;
|
||||
use crate::sudo::Sudo;
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::utils::{get_require_sudo_string, require_option};
|
||||
use crate::{config::Config, executor::Executor};
|
||||
use color_eyre::eyre::Result;
|
||||
use std::env::var;
|
||||
@@ -12,7 +11,6 @@ use std::sync::Mutex;
|
||||
pub struct ExecutionContext<'a> {
|
||||
run_type: RunType,
|
||||
sudo: Option<Sudo>,
|
||||
git: &'a Git,
|
||||
config: &'a Config,
|
||||
/// Name of a tmux session to execute commands in, if any.
|
||||
/// This is used in `./steps/remote/ssh.rs`, where we want to run `topgrade` in a new
|
||||
@@ -23,12 +21,11 @@ pub struct ExecutionContext<'a> {
|
||||
}
|
||||
|
||||
impl<'a> ExecutionContext<'a> {
|
||||
pub fn new(run_type: RunType, sudo: Option<Sudo>, git: &'a Git, config: &'a Config) -> Self {
|
||||
pub fn new(run_type: RunType, sudo: Option<Sudo>, config: &'a Config) -> Self {
|
||||
let under_ssh = var("SSH_CLIENT").is_ok() || var("SSH_TTY").is_ok();
|
||||
Self {
|
||||
run_type,
|
||||
sudo,
|
||||
git,
|
||||
config,
|
||||
tmux_session: Mutex::new(None),
|
||||
under_ssh,
|
||||
@@ -36,7 +33,7 @@ impl<'a> ExecutionContext<'a> {
|
||||
}
|
||||
|
||||
pub fn execute_elevated(&self, command: &Path, interactive: bool) -> Result<Executor> {
|
||||
let sudo = require_option(self.sudo.as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(self.sudo.as_ref(), get_require_sudo_string())?;
|
||||
Ok(sudo.execute_elevated(self, command, interactive))
|
||||
}
|
||||
|
||||
@@ -44,10 +41,6 @@ impl<'a> ExecutionContext<'a> {
|
||||
self.run_type
|
||||
}
|
||||
|
||||
pub fn git(&self) -> &Git {
|
||||
self.git
|
||||
}
|
||||
|
||||
pub fn sudo(&self) -> &Option<Sudo> {
|
||||
&self.sudo
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ use std::path::Path;
|
||||
use std::process::{Child, Command, ExitStatus, Output};
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
@@ -209,17 +210,20 @@ pub struct DryCommand {
|
||||
impl DryCommand {
|
||||
fn dry_run(&self) {
|
||||
print!(
|
||||
"Dry running: {} {}",
|
||||
self.program.to_string_lossy(),
|
||||
shell_words::join(
|
||||
self.args
|
||||
.iter()
|
||||
.map(|a| String::from(a.to_string_lossy()))
|
||||
.collect::<Vec<String>>()
|
||||
"{}",
|
||||
t!(
|
||||
"Dry running: {program_name} {arguments}",
|
||||
program_name = self.program.to_string_lossy(),
|
||||
arguments = shell_words::join(
|
||||
self.args
|
||||
.iter()
|
||||
.map(|a| String::from(a.to_string_lossy()))
|
||||
.collect::<Vec<String>>()
|
||||
)
|
||||
)
|
||||
);
|
||||
match &self.directory {
|
||||
Some(dir) => println!(" in {}", dir.to_string_lossy()),
|
||||
Some(dir) => println!(" {}", t!("in {directory}", directory = dir.to_string_lossy())),
|
||||
None => println!(),
|
||||
};
|
||||
}
|
||||
@@ -227,6 +231,7 @@ impl DryCommand {
|
||||
|
||||
/// The Result of spawn. Contains an actual `std::process::Child` if executed by a wet command.
|
||||
pub enum ExecutorChild {
|
||||
#[allow(unused)] // this type has not been used
|
||||
Wet(Child),
|
||||
Dry,
|
||||
}
|
||||
|
||||
237
src/main.rs
237
src/main.rs
@@ -6,15 +6,19 @@ use std::path::PathBuf;
|
||||
use std::process::exit;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::breaking_changes::{first_run_of_major_release, print_breaking_changes, should_skip, write_keep_file};
|
||||
use clap::CommandFactory;
|
||||
use clap::{crate_version, Parser};
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use console::Key;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
#[cfg(windows)]
|
||||
use etcetera::base_strategy::Windows;
|
||||
use etcetera::base_strategy::{BaseStrategy, Xdg};
|
||||
#[cfg(unix)]
|
||||
use etcetera::base_strategy::Xdg;
|
||||
use once_cell::sync::Lazy;
|
||||
use rust_i18n::{i18n, t};
|
||||
use tracing::debug;
|
||||
|
||||
use self::config::{CommandLineArgs, Config, Step};
|
||||
@@ -24,6 +28,9 @@ use self::error::Upgraded;
|
||||
use self::steps::{remote::*, *};
|
||||
use self::terminal::*;
|
||||
|
||||
use self::utils::{hostname, install_color_eyre, install_tracing, update_tracing};
|
||||
|
||||
mod breaking_changes;
|
||||
mod command;
|
||||
mod config;
|
||||
mod ctrlc;
|
||||
@@ -41,16 +48,38 @@ mod sudo;
|
||||
mod terminal;
|
||||
mod utils;
|
||||
|
||||
pub static HOME_DIR: Lazy<PathBuf> = Lazy::new(|| home::home_dir().expect("No home directory"));
|
||||
pub static XDG_DIRS: Lazy<Xdg> = Lazy::new(|| Xdg::new().expect("No home directory"));
|
||||
pub(crate) static HOME_DIR: Lazy<PathBuf> = Lazy::new(|| home::home_dir().expect("No home directory"));
|
||||
#[cfg(unix)]
|
||||
pub(crate) static XDG_DIRS: Lazy<Xdg> = Lazy::new(|| Xdg::new().expect("No home directory"));
|
||||
|
||||
#[cfg(windows)]
|
||||
pub static WINDOWS_DIRS: Lazy<Windows> = Lazy::new(|| Windows::new().expect("No home directory"));
|
||||
pub(crate) static WINDOWS_DIRS: Lazy<Windows> = Lazy::new(|| Windows::new().expect("No home directory"));
|
||||
|
||||
// Init and load the i18n files
|
||||
i18n!("locales", fallback = "en");
|
||||
|
||||
fn run() -> Result<()> {
|
||||
color_eyre::install()?;
|
||||
install_color_eyre()?;
|
||||
ctrlc::set_handler();
|
||||
|
||||
let opt = CommandLineArgs::parse();
|
||||
// Set up the logger with the filter directives from:
|
||||
// 1. CLI option `--log-filter`
|
||||
// 2. `debug` if the `--verbose` option is present
|
||||
// We do this because we need our logger to work while loading the
|
||||
// configuration file.
|
||||
//
|
||||
// When the configuration file is loaded, update the logger with the full
|
||||
// filter directives.
|
||||
//
|
||||
// For more info, see the comments in `CommandLineArgs::tracing_filter_directives()`
|
||||
// and `Config::tracing_filter_directives()`.
|
||||
let reload_handle = install_tracing(&opt.tracing_filter_directives())?;
|
||||
|
||||
// Get current system locale and set it as the default locale
|
||||
let system_locale = sys_locale::get_locale().unwrap_or("en".to_string());
|
||||
rust_i18n::set_locale(&system_locale);
|
||||
debug!("Current system locale is {system_locale}");
|
||||
|
||||
if let Some(shell) = opt.gen_completion {
|
||||
let cmd = &mut CommandLineArgs::command();
|
||||
@@ -64,8 +93,6 @@ fn run() -> Result<()> {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
install_tracing(&opt.tracing_filter_directives())?;
|
||||
|
||||
for env in opt.env_variables() {
|
||||
let mut splitted = env.split('=');
|
||||
let var = splitted.next().unwrap();
|
||||
@@ -84,6 +111,8 @@ fn run() -> Result<()> {
|
||||
}
|
||||
|
||||
let config = Config::load(opt)?;
|
||||
// Update the logger with the full filter directives.
|
||||
update_tracing(&reload_handle, &config.tracing_filter_directives())?;
|
||||
set_title(config.set_title());
|
||||
display_time(config.display_time());
|
||||
set_desktop_notifications(config.notify_each_step());
|
||||
@@ -92,18 +121,17 @@ fn run() -> Result<()> {
|
||||
debug!("OS: {}", env!("TARGET"));
|
||||
debug!("{:?}", std::env::args());
|
||||
debug!("Binary path: {:?}", std::env::current_exe());
|
||||
debug!("Self Update: {:?}", cfg!(feature = "self-update"));
|
||||
debug!("self-update Feature Enabled: {:?}", cfg!(feature = "self-update"));
|
||||
debug!("Configuration: {:?}", config);
|
||||
|
||||
if config.run_in_tmux() && env::var("TOPGRADE_INSIDE_TMUX").is_err() {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
tmux::run_in_tmux(config.tmux_arguments()?)?;
|
||||
tmux::run_in_tmux(config.tmux_config()?)?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
let git = git::Git::new();
|
||||
let mut git_repos = git::Repositories::new(&git);
|
||||
let powershell = powershell::Powershell::new();
|
||||
let should_run_powershell = powershell.profile().is_some() && config.should_run(Step::Powershell);
|
||||
let emacs = emacs::Emacs::new();
|
||||
@@ -112,25 +140,34 @@ fn run() -> Result<()> {
|
||||
|
||||
let sudo = config.sudo_command().map_or_else(sudo::Sudo::detect, sudo::Sudo::new);
|
||||
let run_type = executor::RunType::new(config.dry_run());
|
||||
let ctx = execution_context::ExecutionContext::new(run_type, sudo, &git, &config);
|
||||
let ctx = execution_context::ExecutionContext::new(run_type, sudo, &config);
|
||||
let mut runner = runner::Runner::new(&ctx);
|
||||
|
||||
// If
|
||||
//
|
||||
// 1. the breaking changes notification shouldnot be skipped
|
||||
// 2. this is the first execution of a major release
|
||||
//
|
||||
// inform user of breaking changes
|
||||
if !should_skip() && first_run_of_major_release()? {
|
||||
print_breaking_changes();
|
||||
|
||||
if prompt_yesno("Confirmed?")? {
|
||||
write_keep_file()?;
|
||||
} else {
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Self-Update step, this will execute only if:
|
||||
// 1. the `self-update` feature is enabled
|
||||
// 2. it is not disabled from configuration (env var/CLI opt/file)
|
||||
#[cfg(feature = "self-update")]
|
||||
{
|
||||
let config_self_upgrade = env::var("TOPGRADE_NO_SELF_UPGRADE").is_err() && !config.no_self_update();
|
||||
let should_self_update = env::var("TOPGRADE_NO_SELF_UPGRADE").is_err() && !config.no_self_update();
|
||||
|
||||
if !run_type.dry() && config_self_upgrade {
|
||||
let result = self_update::self_update();
|
||||
|
||||
if let Err(e) = &result {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if e.downcast_ref::<Upgraded>().is_some() {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
print_warning(format!("Self update error: {e}"));
|
||||
}
|
||||
if should_self_update {
|
||||
runner.execute(Step::SelfUpdate, "Self Update", || self_update::self_update(&ctx))?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -154,7 +191,7 @@ fn run() -> Result<()> {
|
||||
}
|
||||
|
||||
if let Some(topgrades) = config.remote_topgrades() {
|
||||
for remote_topgrade in topgrades.iter().filter(|t| config.should_execute_remote(t)) {
|
||||
for remote_topgrade in topgrades.iter().filter(|t| config.should_execute_remote(hostname(), t)) {
|
||||
runner.execute(Step::Remotes, format!("Remote ({remote_topgrade})"), || {
|
||||
ssh::ssh_step(&ctx, remote_topgrade)
|
||||
})?;
|
||||
@@ -182,20 +219,15 @@ fn run() -> Result<()> {
|
||||
runner.execute(Step::System, "System update", || distribution.upgrade(&ctx))?;
|
||||
}
|
||||
Err(e) => {
|
||||
println!("Error detecting current distribution: {e}");
|
||||
println!("{}", t!("Error detecting current distribution: {error}", error = e));
|
||||
}
|
||||
}
|
||||
runner.execute(Step::ConfigUpdate, "config-update", || linux::run_config_update(&ctx))?;
|
||||
|
||||
runner.execute(Step::BrewFormula, "Brew", || {
|
||||
unix::run_brew_formula(&ctx, unix::BrewVariant::Path)
|
||||
})?;
|
||||
|
||||
runner.execute(Step::AM, "am", || linux::run_am(&ctx))?;
|
||||
runner.execute(Step::AppMan, "appman", || linux::run_appman(&ctx))?;
|
||||
runner.execute(Step::DebGet, "deb-get", || linux::run_deb_get(&ctx))?;
|
||||
runner.execute(Step::Toolbx, "toolbx", || toolbx::run_toolbx(&ctx))?;
|
||||
runner.execute(Step::Flatpak, "Flatpak", || linux::run_flatpak(&ctx))?;
|
||||
runner.execute(Step::Snap, "snap", || linux::run_snap(&ctx))?;
|
||||
runner.execute(Step::Pacstall, "pacstall", || linux::run_pacstall(&ctx))?;
|
||||
runner.execute(Step::Pacdef, "pacdef", || linux::run_pacdef(&ctx))?;
|
||||
@@ -205,6 +237,14 @@ fn run() -> Result<()> {
|
||||
runner.execute(Step::System, "pihole", || linux::run_pihole_update(&ctx))?;
|
||||
runner.execute(Step::Firmware, "Firmware upgrades", || linux::run_fwupdmgr(&ctx))?;
|
||||
runner.execute(Step::Restarts, "Restarts", || linux::run_needrestart(&ctx))?;
|
||||
|
||||
runner.execute(Step::Flatpak, "Flatpak", || linux::run_flatpak(&ctx))?;
|
||||
runner.execute(Step::BrewFormula, "Brew", || {
|
||||
unix::run_brew_formula(&ctx, unix::BrewVariant::Path)
|
||||
})?;
|
||||
runner.execute(Step::Lure, "LURE", || linux::run_lure_update(&ctx))?;
|
||||
runner.execute(Step::Waydroid, "Waydroid", || linux::run_waydroid(&ctx))?;
|
||||
runner.execute(Step::AutoCpufreq, "auto-cpufreq", || linux::run_auto_cpufreq(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
@@ -228,6 +268,7 @@ fn run() -> Result<()> {
|
||||
unix::run_brew_cask(&ctx, unix::BrewVariant::Path)
|
||||
})?;
|
||||
runner.execute(Step::Macports, "MacPorts", || macos::run_macports(&ctx))?;
|
||||
runner.execute(Step::Xcodes, "Xcodes", || macos::update_xcodes(&ctx))?;
|
||||
runner.execute(Step::Sparkle, "Sparkle", || macos::run_sparkle(&ctx))?;
|
||||
runner.execute(Step::Mas, "App Store", || macos::run_mas(&ctx))?;
|
||||
runner.execute(Step::System, "System upgrade", || macos::upgrade_macos(&ctx))?;
|
||||
@@ -238,14 +279,14 @@ fn run() -> Result<()> {
|
||||
runner.execute(Step::Pkg, "DragonFly BSD Packages", || {
|
||||
dragonfly::upgrade_packages(&ctx)
|
||||
})?;
|
||||
dragonfly::audit_packages(&ctx)?;
|
||||
runner.execute(Step::Audit, "DragonFly Audit", || dragonfly::audit_packages(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "freebsd")]
|
||||
{
|
||||
runner.execute(Step::Pkg, "FreeBSD Packages", || freebsd::upgrade_packages(&ctx))?;
|
||||
runner.execute(Step::System, "FreeBSD Upgrade", || freebsd::upgrade_freebsd(&ctx))?;
|
||||
freebsd::audit_packages(&ctx)?;
|
||||
runner.execute(Step::Audit, "FreeBSD Audit", || freebsd::audit_packages(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "openbsd")]
|
||||
@@ -263,11 +304,13 @@ fn run() -> Result<()> {
|
||||
{
|
||||
runner.execute(Step::Yadm, "yadm", || unix::run_yadm(&ctx))?;
|
||||
runner.execute(Step::Nix, "nix", || unix::run_nix(&ctx))?;
|
||||
runner.execute(Step::Nix, "nix upgrade-nix", || unix::run_nix_self_upgrade(&ctx))?;
|
||||
runner.execute(Step::Guix, "guix", || unix::run_guix(&ctx))?;
|
||||
runner.execute(Step::HomeManager, "home-manager", || unix::run_home_manager(&ctx))?;
|
||||
runner.execute(Step::Asdf, "asdf", || unix::run_asdf(&ctx))?;
|
||||
runner.execute(Step::Mise, "mise", || unix::run_mise(&ctx))?;
|
||||
runner.execute(Step::Pkgin, "pkgin", || unix::run_pkgin(&ctx))?;
|
||||
runner.execute(Step::Bun, "bun", || unix::run_bun(&ctx))?;
|
||||
runner.execute(Step::BunPackages, "bun-packages", || unix::run_bun_packages(&ctx))?;
|
||||
runner.execute(Step::Shell, "zr", || zsh::run_zr(&ctx))?;
|
||||
runner.execute(Step::Shell, "antibody", || zsh::run_antibody(&ctx))?;
|
||||
runner.execute(Step::Shell, "antidote", || zsh::run_antidote(&ctx))?;
|
||||
@@ -291,6 +334,7 @@ fn run() -> Result<()> {
|
||||
runner.execute(Step::GnomeShellExtensions, "Gnome Shell Extensions", || {
|
||||
unix::upgrade_gnome_extensions(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Pyenv, "pyenv", || unix::run_pyenv(&ctx))?;
|
||||
runner.execute(Step::Sdkman, "SDKMAN!", || unix::run_sdkman(&ctx))?;
|
||||
runner.execute(Step::Rcm, "rcm", || unix::run_rcm(&ctx))?;
|
||||
runner.execute(Step::Maza, "maza", || unix::run_maza(&ctx))?;
|
||||
@@ -308,6 +352,8 @@ fn run() -> Result<()> {
|
||||
|
||||
// The following update function should be executed on all OSes.
|
||||
runner.execute(Step::Fossil, "fossil", || generic::run_fossil(&ctx))?;
|
||||
runner.execute(Step::Elan, "elan", || generic::run_elan(&ctx))?;
|
||||
runner.execute(Step::Rye, "rye", || generic::run_rye(&ctx))?;
|
||||
runner.execute(Step::Rustup, "rustup", || generic::run_rustup(&ctx))?;
|
||||
runner.execute(Step::Juliaup, "juliaup", || generic::run_juliaup(&ctx))?;
|
||||
runner.execute(Step::Dotnet, ".NET", || generic::run_dotnet_upgrade(&ctx))?;
|
||||
@@ -320,8 +366,13 @@ fn run() -> Result<()> {
|
||||
runner.execute(Step::Opam, "opam", || generic::run_opam_update(&ctx))?;
|
||||
runner.execute(Step::Vcpkg, "vcpkg", || generic::run_vcpkg_update(&ctx))?;
|
||||
runner.execute(Step::Pipx, "pipx", || generic::run_pipx_update(&ctx))?;
|
||||
runner.execute(Step::Vscode, "Visual Studio Code extensions", || {
|
||||
generic::run_vscode_extensions_update(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Conda, "conda", || generic::run_conda_update(&ctx))?;
|
||||
runner.execute(Step::Mamba, "mamba", || generic::run_mamba_update(&ctx))?;
|
||||
runner.execute(Step::Pixi, "pixi", || generic::run_pixi_update(&ctx))?;
|
||||
runner.execute(Step::Miktex, "miktex", || generic::run_miktex_packages_update(&ctx))?;
|
||||
runner.execute(Step::Pip3, "pip3", || generic::run_pip3_update(&ctx))?;
|
||||
runner.execute(Step::PipReview, "pip-review", || generic::run_pip_review_update(&ctx))?;
|
||||
runner.execute(Step::PipReviewLocal, "pip-review (local)", || {
|
||||
@@ -343,6 +394,9 @@ fn run() -> Result<()> {
|
||||
runner.execute(Step::Node, "npm", || node::run_npm_upgrade(&ctx))?;
|
||||
runner.execute(Step::Yarn, "yarn", || node::run_yarn_upgrade(&ctx))?;
|
||||
runner.execute(Step::Pnpm, "pnpm", || node::run_pnpm_upgrade(&ctx))?;
|
||||
runner.execute(Step::VoltaPackages, "volta packages", || {
|
||||
node::run_volta_packages_upgrade(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Containers, "Containers", || containers::run_containers(&ctx))?;
|
||||
runner.execute(Step::Deno, "deno", || node::deno_upgrade(&ctx))?;
|
||||
runner.execute(Step::Composer, "composer", || generic::run_composer_update(&ctx))?;
|
||||
@@ -364,67 +418,20 @@ fn run() -> Result<()> {
|
||||
generic::run_ghcli_extensions_upgrade(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Bob, "Bob", || generic::run_bob(&ctx))?;
|
||||
|
||||
if config.use_predefined_git_repos() {
|
||||
if config.should_run(Step::Emacs) {
|
||||
if !emacs.is_doom() {
|
||||
if let Some(directory) = emacs.directory() {
|
||||
git_repos.insert_if_repo(directory);
|
||||
}
|
||||
}
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".doom.d"));
|
||||
}
|
||||
|
||||
if config.should_run(Step::Vim) {
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".vim"));
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".config/nvim"));
|
||||
}
|
||||
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".ideavimrc"));
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".intellimacs"));
|
||||
|
||||
if config.should_run(Step::Rcm) {
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".dotfiles"));
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
git_repos.insert_if_repo(zsh::zshrc());
|
||||
if config.should_run(Step::Tmux) {
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".tmux"));
|
||||
}
|
||||
git_repos.insert_if_repo(HOME_DIR.join(".config/fish"));
|
||||
git_repos.insert_if_repo(XDG_DIRS.config_dir().join("openbox"));
|
||||
git_repos.insert_if_repo(XDG_DIRS.config_dir().join("bspwm"));
|
||||
git_repos.insert_if_repo(XDG_DIRS.config_dir().join("i3"));
|
||||
git_repos.insert_if_repo(XDG_DIRS.config_dir().join("sway"));
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
git_repos.insert_if_repo(
|
||||
WINDOWS_DIRS
|
||||
.cache_dir()
|
||||
.join("Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState"),
|
||||
);
|
||||
|
||||
#[cfg(windows)]
|
||||
windows::insert_startup_scripts(&mut git_repos).ok();
|
||||
|
||||
if let Some(profile) = powershell.profile() {
|
||||
git_repos.insert_if_repo(profile);
|
||||
}
|
||||
}
|
||||
|
||||
if config.should_run(Step::GitRepos) {
|
||||
if let Some(custom_git_repos) = config.git_repos() {
|
||||
for git_repo in custom_git_repos {
|
||||
git_repos.glob_insert(git_repo);
|
||||
}
|
||||
}
|
||||
runner.execute(Step::GitRepos, "Git repositories", || {
|
||||
git.multi_pull_step(&git_repos, &ctx)
|
||||
})?;
|
||||
}
|
||||
runner.execute(Step::Certbot, "Certbot", || generic::run_certbot(&ctx))?;
|
||||
runner.execute(Step::GitRepos, "Git Repositories", || git::run_git_pull(&ctx))?;
|
||||
runner.execute(Step::ClamAvDb, "ClamAV Databases", || generic::run_freshclam(&ctx))?;
|
||||
runner.execute(Step::PlatformioCore, "PlatformIO Core", || {
|
||||
generic::run_platform_io(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Lensfun, "Lensfun's database update", || {
|
||||
generic::run_lensfun_update_data(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Poetry, "Poetry", || generic::run_poetry(&ctx))?;
|
||||
runner.execute(Step::Uv, "uv", || generic::run_uv(&ctx))?;
|
||||
runner.execute(Step::Zvm, "ZVM", || generic::run_zvm(&ctx))?;
|
||||
runner.execute(Step::Aqua, "aqua", || generic::run_aqua(&ctx))?;
|
||||
runner.execute(Step::Bun, "bun", || generic::run_bun(&ctx))?;
|
||||
|
||||
if should_run_powershell {
|
||||
runner.execute(Step::Powershell, "Powershell Modules Update", || {
|
||||
@@ -454,7 +461,7 @@ fn run() -> Result<()> {
|
||||
runner.execute(Step::Vagrant, "Vagrant boxes", || vagrant::upgrade_vagrant_boxes(&ctx))?;
|
||||
|
||||
if !runner.report().data().is_empty() {
|
||||
print_separator("Summary");
|
||||
print_separator(t!("Summary"));
|
||||
|
||||
for (key, result) in runner.report().data() {
|
||||
print_result(key, result);
|
||||
@@ -478,7 +485,7 @@ fn run() -> Result<()> {
|
||||
}
|
||||
|
||||
if config.keep_at_end() {
|
||||
print_info("\n(R)eboot\n(S)hell\n(Q)uit");
|
||||
print_info(t!("\n(R)eboot\n(S)hell\n(Q)uit"));
|
||||
loop {
|
||||
match get_key() {
|
||||
Ok(Key::Char('s')) | Ok(Key::Char('S')) => {
|
||||
@@ -500,10 +507,11 @@ fn run() -> Result<()> {
|
||||
|
||||
if !config.skip_notify() {
|
||||
notify_desktop(
|
||||
format!(
|
||||
"Topgrade finished {}",
|
||||
if failed { "with errors" } else { "successfully" }
|
||||
),
|
||||
if failed {
|
||||
t!("Topgrade finished with errors")
|
||||
} else {
|
||||
t!("Topgrade finished successfully")
|
||||
},
|
||||
Some(Duration::from_secs(10)),
|
||||
)
|
||||
}
|
||||
@@ -538,32 +546,9 @@ fn main() {
|
||||
// The `Debug` implementation of `eyre::Result` prints a multi-line
|
||||
// error message that includes all the 'causes' added with
|
||||
// `.with_context(...)` calls.
|
||||
println!("Error: {error:?}");
|
||||
println!("{}", t!("Error: {error}", error = format!("{:?}", error)));
|
||||
}
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn install_tracing(filter_directives: &str) -> Result<()> {
|
||||
use tracing_subscriber::fmt;
|
||||
use tracing_subscriber::fmt::format::FmtSpan;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
let env_filter = EnvFilter::try_new(filter_directives)
|
||||
.or_else(|_| EnvFilter::try_from_default_env())
|
||||
.or_else(|_| EnvFilter::try_new("info"))?;
|
||||
|
||||
let fmt_layer = fmt::layer()
|
||||
.with_target(false)
|
||||
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
|
||||
.without_time();
|
||||
|
||||
let registry = tracing_subscriber::registry();
|
||||
|
||||
registry.with(env_filter).with(fmt_layer).init();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -34,6 +34,14 @@ impl<'a> Runner<'a> {
|
||||
let key = key.into();
|
||||
debug!("Step {:?}", key);
|
||||
|
||||
// alter the `func` to put it in a span
|
||||
let func = || {
|
||||
let span =
|
||||
tracing::span!(parent: tracing::Span::none(), tracing::Level::TRACE, "step", step = ?step, key = %key);
|
||||
let _guard = span.enter();
|
||||
func()
|
||||
};
|
||||
|
||||
loop {
|
||||
match func() {
|
||||
Ok(()) => {
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#![cfg(windows)]
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use std::{env::current_exe, fs, path::PathBuf};
|
||||
use tracing::{debug, error};
|
||||
|
||||
@@ -3,7 +3,9 @@ use std::env;
|
||||
use std::os::unix::process::CommandExt as _;
|
||||
use std::process::Command;
|
||||
|
||||
use crate::config::Step;
|
||||
use color_eyre::eyre::{bail, Result};
|
||||
use rust_i18n::t;
|
||||
use self_update_crate::backends::github::Update;
|
||||
use self_update_crate::update::UpdateStatus;
|
||||
|
||||
@@ -11,52 +13,61 @@ use super::terminal::*;
|
||||
#[cfg(windows)]
|
||||
use crate::error::Upgraded;
|
||||
|
||||
pub fn self_update() -> Result<()> {
|
||||
print_separator("Self update");
|
||||
let current_exe = env::current_exe();
|
||||
use crate::execution_context::ExecutionContext;
|
||||
|
||||
let target = self_update_crate::get_target();
|
||||
let result = Update::configure()
|
||||
.repo_owner("topgrade-rs")
|
||||
.repo_name("topgrade")
|
||||
.target(target)
|
||||
.bin_name(if cfg!(windows) { "topgrade.exe" } else { "topgrade" })
|
||||
.show_output(false)
|
||||
.show_download_progress(true)
|
||||
.current_version(self_update_crate::cargo_crate_version!())
|
||||
.no_confirm(true)
|
||||
.build()?
|
||||
.update_extended()?;
|
||||
pub fn self_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator(t!("Self update"));
|
||||
|
||||
if let UpdateStatus::Updated(release) = &result {
|
||||
println!("\nTopgrade upgraded to {}:\n", release.version);
|
||||
if let Some(body) = &release.body {
|
||||
println!("{body}");
|
||||
}
|
||||
if ctx.run_type().dry() {
|
||||
println!("{}", t!("Would self-update"));
|
||||
Ok(())
|
||||
} else {
|
||||
println!("Topgrade is up-to-date");
|
||||
}
|
||||
let assume_yes = ctx.config().yes(Step::SelfUpdate);
|
||||
let current_exe = env::current_exe();
|
||||
|
||||
{
|
||||
if result.updated() {
|
||||
print_warning("Respawning...");
|
||||
let mut command = Command::new(current_exe?);
|
||||
command.args(env::args().skip(1)).env("TOPGRADE_NO_SELF_UPGRADE", "");
|
||||
let target = self_update_crate::get_target();
|
||||
let result = Update::configure()
|
||||
.repo_owner("topgrade-rs")
|
||||
.repo_name("topgrade")
|
||||
.target(target)
|
||||
.bin_name(if cfg!(windows) { "topgrade.exe" } else { "topgrade" })
|
||||
.show_output(true)
|
||||
.show_download_progress(true)
|
||||
.current_version(self_update_crate::cargo_crate_version!())
|
||||
.no_confirm(assume_yes)
|
||||
.build()?
|
||||
.update_extended()?;
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
let err = command.exec();
|
||||
bail!(err);
|
||||
if let UpdateStatus::Updated(release) = &result {
|
||||
println!("{}", t!("Topgrade upgraded to {version}:\n", version = release.version));
|
||||
if let Some(body) = &release.body {
|
||||
println!("{body}");
|
||||
}
|
||||
} else {
|
||||
println!("{}", t!("Topgrade is up-to-date"));
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let status = command.status()?;
|
||||
bail!(Upgraded(status));
|
||||
{
|
||||
if result.updated() {
|
||||
print_info(t!("Respawning..."));
|
||||
let mut command = Command::new(current_exe?);
|
||||
command.args(env::args().skip(1)).env("TOPGRADE_NO_SELF_UPGRADE", "");
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
let err = command.exec();
|
||||
bail!(err);
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let status = command.status()?;
|
||||
bail!(Upgraded(status));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,168 +1,196 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use tracing::{debug, error, warn};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::error::{self, TopgradeError};
|
||||
use crate::terminal::print_separator;
|
||||
use crate::{execution_context::ExecutionContext, utils::require};
|
||||
|
||||
// A string found in the output of docker for containers that weren't found in
|
||||
// the docker registry. We use this to gracefully handle and skip containers
|
||||
// that cannot be pulled, likely because they don't exist in the registry in
|
||||
// the first place. This happens e.g. when the user tags an image locally
|
||||
// themselves or when using docker-compose.
|
||||
const NONEXISTENT_REPO: &str = "repository does not exist";
|
||||
|
||||
/// Uniquely identifies a `Container`.
|
||||
#[derive(Debug)]
|
||||
struct Container {
|
||||
/// `Repository` and `Tag`
|
||||
///
|
||||
/// format: `Repository:Tag`, e.g., `nixos/nix:latest`.
|
||||
repo_tag: String,
|
||||
/// Platform
|
||||
///
|
||||
/// format: `OS/Architecture`, e.g., `linux/amd64`.
|
||||
platform: String,
|
||||
}
|
||||
|
||||
impl Container {
|
||||
/// Construct a new `Container`.
|
||||
fn new(repo_tag: String, platform: String) -> Self {
|
||||
Self { repo_tag, platform }
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Container {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
// e.g., "`fedora:latest` for `linux/amd64`"
|
||||
write!(f, "`{}` for `{}`", self.repo_tag, self.platform)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a Vector of all containers, with Strings in the format
|
||||
/// "REGISTRY/[PATH/]CONTAINER_NAME:TAG"
|
||||
fn list_containers(crt: &Path) -> Result<Vec<Container>> {
|
||||
debug!(
|
||||
"Querying '{} image ls --format \"{{{{.Repository}}}}:{{{{.Tag}}}}/{{{{.ID}}}}\"' for containers",
|
||||
crt.display()
|
||||
);
|
||||
let output = Command::new(crt)
|
||||
.args(["image", "ls", "--format", "{{.Repository}}:{{.Tag}} {{.ID}}"])
|
||||
.output_checked_with_utf8(|_| Ok(()))?;
|
||||
|
||||
let mut retval = vec![];
|
||||
for line in output.stdout.lines() {
|
||||
if line.starts_with("localhost") {
|
||||
// Don't know how to update self-built containers
|
||||
debug!("Skipping self-built container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
if line.contains("<none>") {
|
||||
// Bogus/dangling container or intermediate layer
|
||||
debug!("Skipping bogus container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
if line.starts_with("vsc-") {
|
||||
debug!("Skipping visual studio code dev container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
debug!("Using container '{}'", line);
|
||||
|
||||
// line is of format: `Repository:Tag ImageID`, e.g., `nixos/nix:latest d80fea9c32b4`
|
||||
let split_res = line.split(' ').collect::<Vec<&str>>();
|
||||
assert_eq!(split_res.len(), 2);
|
||||
let (repo_tag, image_id) = (split_res[0], split_res[1]);
|
||||
|
||||
debug!(
|
||||
"Querying '{} image inspect --format \"{{{{.Os}}}}/{{{{.Architecture}}}}\"' for container {}",
|
||||
crt.display(),
|
||||
image_id
|
||||
);
|
||||
let inspect_output = Command::new(crt)
|
||||
.args(["image", "inspect", image_id, "--format", "{{.Os}}/{{.Architecture}}"])
|
||||
.output_checked_with_utf8(|_| Ok(()))?;
|
||||
let mut platform = inspect_output.stdout;
|
||||
// truncate the tailing new line character
|
||||
platform.truncate(platform.len() - 1);
|
||||
assert!(platform.contains('/'));
|
||||
|
||||
retval.push(Container::new(repo_tag.to_string(), platform));
|
||||
}
|
||||
|
||||
Ok(retval)
|
||||
}
|
||||
|
||||
pub fn run_containers(ctx: &ExecutionContext) -> Result<()> {
|
||||
// Prefer podman, fall back to docker if not present
|
||||
let crt = require("podman").or_else(|_| require("docker"))?;
|
||||
debug!("Using container runtime '{}'", crt.display());
|
||||
|
||||
print_separator("Containers");
|
||||
let mut success = true;
|
||||
let containers = list_containers(&crt).context("Failed to list Docker containers")?;
|
||||
debug!("Containers to inspect: {:?}", containers);
|
||||
|
||||
for container in containers.iter() {
|
||||
debug!("Pulling container '{}'", container);
|
||||
let args = vec![
|
||||
"pull",
|
||||
container.repo_tag.as_str(),
|
||||
"--platform",
|
||||
container.platform.as_str(),
|
||||
];
|
||||
let mut exec = ctx.run_type().execute(&crt);
|
||||
|
||||
if let Err(e) = exec.args(&args).status_checked() {
|
||||
error!("Pulling container '{}' failed: {}", container, e);
|
||||
|
||||
// Find out if this is 'skippable'
|
||||
// This is necessary e.g. for docker, because unlike podman docker doesn't tell from
|
||||
// which repository a container originates (such as `docker.io`). This has the
|
||||
// practical consequence that all containers, whether self-built, created by
|
||||
// docker-compose or pulled from the docker hub, look exactly the same to us. We can
|
||||
// only find out what went wrong by manually parsing the output of the command...
|
||||
if match exec.output_checked_utf8() {
|
||||
Ok(s) => s.stdout.contains(NONEXISTENT_REPO) || s.stderr.contains(NONEXISTENT_REPO),
|
||||
Err(e) => match e.downcast_ref::<TopgradeError>() {
|
||||
Some(TopgradeError::ProcessFailedWithOutput(_, _, stderr)) => stderr.contains(NONEXISTENT_REPO),
|
||||
_ => false,
|
||||
},
|
||||
} {
|
||||
warn!("Skipping unknown container '{}'", container);
|
||||
continue;
|
||||
}
|
||||
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
// Remove dangling images
|
||||
debug!("Removing dangling images");
|
||||
if let Err(e) = ctx
|
||||
.run_type()
|
||||
.execute(&crt)
|
||||
.args(["image", "prune", "-f"])
|
||||
.status_checked()
|
||||
{
|
||||
error!("Removing dangling images failed: {}", e);
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
|
||||
if success {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(eyre!(error::StepFailed))
|
||||
}
|
||||
}
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use tracing::{debug, error, warn};
|
||||
use wildmatch::WildMatch;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::error::{self, TopgradeError};
|
||||
use crate::terminal::print_separator;
|
||||
use crate::{execution_context::ExecutionContext, utils::require};
|
||||
use rust_i18n::t;
|
||||
|
||||
// A string found in the output of docker for containers that weren't found in
|
||||
// the docker registry. We use this to gracefully handle and skip containers
|
||||
// that cannot be pulled, likely because they don't exist in the registry in
|
||||
// the first place. This happens e.g. when the user tags an image locally
|
||||
// themselves or when using docker-compose.
|
||||
const NONEXISTENT_REPO: &str = "repository does not exist";
|
||||
|
||||
/// Uniquely identifies a `Container`.
|
||||
#[derive(Debug)]
|
||||
struct Container {
|
||||
/// `Repository` and `Tag`
|
||||
///
|
||||
/// format: `Repository:Tag`, e.g., `nixos/nix:latest`.
|
||||
repo_tag: String,
|
||||
/// Platform
|
||||
///
|
||||
/// format: `OS/Architecture`, e.g., `linux/amd64`.
|
||||
platform: String,
|
||||
}
|
||||
|
||||
impl Container {
|
||||
/// Construct a new `Container`.
|
||||
fn new(repo_tag: String, platform: String) -> Self {
|
||||
Self { repo_tag, platform }
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Container {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
// e.g., "`fedora:latest` for `linux/amd64`"
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
t!(
|
||||
"`{repo_tag}` for `{platform}`",
|
||||
repo_tag = self.repo_tag,
|
||||
platform = self.platform
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a Vector of all containers, with Strings in the format
|
||||
/// "REGISTRY/[PATH/]CONTAINER_NAME:TAG"
|
||||
///
|
||||
/// Containers specified in `ignored_containers` will be filtered out.
|
||||
fn list_containers(crt: &Path, ignored_containers: Option<&Vec<String>>) -> Result<Vec<Container>> {
|
||||
let ignored_containers = ignored_containers.map(|patterns| {
|
||||
patterns
|
||||
.iter()
|
||||
.map(|pattern| WildMatch::new(pattern))
|
||||
.collect::<Vec<WildMatch>>()
|
||||
});
|
||||
|
||||
debug!(
|
||||
"Querying '{} image ls --format \"{{{{.Repository}}}}:{{{{.Tag}}}}/{{{{.ID}}}}\"' for containers",
|
||||
crt.display()
|
||||
);
|
||||
let output = Command::new(crt)
|
||||
.args(["image", "ls", "--format", "{{.Repository}}:{{.Tag}} {{.ID}}"])
|
||||
.output_checked_with_utf8(|_| Ok(()))?;
|
||||
|
||||
let mut retval = vec![];
|
||||
for line in output.stdout.lines() {
|
||||
if line.starts_with("localhost") {
|
||||
// Don't know how to update self-built containers
|
||||
debug!("Skipping self-built container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
if line.contains("<none>") {
|
||||
// Bogus/dangling container or intermediate layer
|
||||
debug!("Skipping bogus container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
if line.starts_with("vsc-") {
|
||||
debug!("Skipping visual studio code dev container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
debug!("Using container '{}'", line);
|
||||
|
||||
// line is of format: `Repository:Tag ImageID`, e.g., `nixos/nix:latest d80fea9c32b4`
|
||||
let split_res = line.split(' ').collect::<Vec<&str>>();
|
||||
assert_eq!(split_res.len(), 2);
|
||||
let (repo_tag, image_id) = (split_res[0], split_res[1]);
|
||||
|
||||
if let Some(ref ignored_containers) = ignored_containers {
|
||||
if ignored_containers.iter().any(|pattern| pattern.matches(repo_tag)) {
|
||||
debug!("Skipping ignored container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
debug!(
|
||||
"Querying '{} image inspect --format \"{{{{.Os}}}}/{{{{.Architecture}}}}\"' for container {}",
|
||||
crt.display(),
|
||||
image_id
|
||||
);
|
||||
let inspect_output = Command::new(crt)
|
||||
.args(["image", "inspect", image_id, "--format", "{{.Os}}/{{.Architecture}}"])
|
||||
.output_checked_with_utf8(|_| Ok(()))?;
|
||||
let mut platform = inspect_output.stdout;
|
||||
// truncate the tailing new line character
|
||||
platform.truncate(platform.len() - 1);
|
||||
assert!(platform.contains('/'));
|
||||
|
||||
retval.push(Container::new(repo_tag.to_string(), platform));
|
||||
}
|
||||
|
||||
Ok(retval)
|
||||
}
|
||||
|
||||
pub fn run_containers(ctx: &ExecutionContext) -> Result<()> {
|
||||
// Check what runtime is specified in the config
|
||||
let container_runtime = ctx.config().containers_runtime().to_string();
|
||||
let crt = require(container_runtime)?;
|
||||
debug!("Using container runtime '{}'", crt.display());
|
||||
|
||||
print_separator(t!("Containers"));
|
||||
let mut success = true;
|
||||
let containers =
|
||||
list_containers(&crt, ctx.config().containers_ignored_tags()).context("Failed to list Docker containers")?;
|
||||
debug!("Containers to inspect: {:?}", containers);
|
||||
|
||||
for container in containers.iter() {
|
||||
debug!("Pulling container '{}'", container);
|
||||
let args = vec![
|
||||
"pull",
|
||||
container.repo_tag.as_str(),
|
||||
"--platform",
|
||||
container.platform.as_str(),
|
||||
];
|
||||
let mut exec = ctx.run_type().execute(&crt);
|
||||
|
||||
if let Err(e) = exec.args(&args).status_checked() {
|
||||
error!("Pulling container '{}' failed: {}", container, e);
|
||||
|
||||
// Find out if this is 'skippable'
|
||||
// This is necessary e.g. for docker, because unlike podman docker doesn't tell from
|
||||
// which repository a container originates (such as `docker.io`). This has the
|
||||
// practical consequence that all containers, whether self-built, created by
|
||||
// docker-compose or pulled from the docker hub, look exactly the same to us. We can
|
||||
// only find out what went wrong by manually parsing the output of the command...
|
||||
if match exec.output_checked_utf8() {
|
||||
Ok(s) => s.stdout.contains(NONEXISTENT_REPO) || s.stderr.contains(NONEXISTENT_REPO),
|
||||
Err(e) => match e.downcast_ref::<TopgradeError>() {
|
||||
Some(TopgradeError::ProcessFailedWithOutput(_, _, stderr)) => stderr.contains(NONEXISTENT_REPO),
|
||||
_ => false,
|
||||
},
|
||||
} {
|
||||
warn!("Skipping unknown container '{}'", container);
|
||||
continue;
|
||||
}
|
||||
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
// Remove dangling images
|
||||
debug!("Removing dangling images");
|
||||
if let Err(e) = ctx
|
||||
.run_type()
|
||||
.execute(&crt)
|
||||
.args(["image", "prune", "-f"])
|
||||
.status_checked()
|
||||
{
|
||||
error!("Removing dangling images failed: {}", e);
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
|
||||
if success {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(eyre!(error::StepFailed))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ use std::path::{Path, PathBuf};
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
use rust_i18n::t;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
@@ -74,9 +75,12 @@ impl Emacs {
|
||||
if let Some(doom) = &self.doom {
|
||||
Emacs::update_doom(doom, ctx)?;
|
||||
}
|
||||
let init_file = require_option(self.directory.as_ref(), String::from("Emacs directory does not exist"))?
|
||||
.join("init.el")
|
||||
.require()?;
|
||||
let init_file = require_option(
|
||||
self.directory.as_ref(),
|
||||
t!("Emacs directory does not exist").to_string(),
|
||||
)?
|
||||
.join("init.el")
|
||||
.require()?;
|
||||
|
||||
print_separator("Emacs");
|
||||
|
||||
|
||||
@@ -8,6 +8,8 @@ use std::{fs, io::Write};
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use semver::Version;
|
||||
use tempfile::tempfile_in;
|
||||
use tracing::{debug, error};
|
||||
|
||||
@@ -15,7 +17,7 @@ use crate::command::{CommandExt, Utf8Output};
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::ExecutorOutput;
|
||||
use crate::terminal::{print_separator, shell};
|
||||
use crate::utils::{self, check_is_python_2_or_shim, require, require_option, which, PathExt, REQUIRE_SUDO};
|
||||
use crate::utils::{self, check_is_python_2_or_shim, get_require_sudo_string, require, require_option, which, PathExt};
|
||||
use crate::Step;
|
||||
use crate::HOME_DIR;
|
||||
use crate::{
|
||||
@@ -23,6 +25,18 @@ use crate::{
|
||||
terminal::print_warning,
|
||||
};
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
pub fn is_wsl() -> Result<bool> {
|
||||
let output = Command::new("uname").arg("-r").output_checked_utf8()?.stdout;
|
||||
debug!("Uname output: {}", output);
|
||||
Ok(output.contains("microsoft"))
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
pub fn is_wsl() -> Result<bool> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
pub fn run_cargo_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let cargo_dir = env::var_os("CARGO_HOME")
|
||||
.map(PathBuf::from)
|
||||
@@ -107,13 +121,17 @@ pub fn run_rubygems(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("RubyGems");
|
||||
let gem_path_str = gem.as_os_str();
|
||||
if gem_path_str.to_str().unwrap().contains("asdf") {
|
||||
if gem_path_str.to_str().unwrap().contains("asdf")
|
||||
|| gem_path_str.to_str().unwrap().contains("mise")
|
||||
|| gem_path_str.to_str().unwrap().contains(".rbenv")
|
||||
|| gem_path_str.to_str().unwrap().contains(".rvm")
|
||||
{
|
||||
ctx.run_type()
|
||||
.execute(gem)
|
||||
.args(["update", "--system"])
|
||||
.status_checked()?;
|
||||
} else {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
if !Path::new("/usr/lib/ruby/vendor_ruby/rubygems/defaults/operating_system.rb").exists() {
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
@@ -142,7 +160,7 @@ pub fn run_haxelib_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = if directory_writable {
|
||||
ctx.run_type().execute(&haxelib)
|
||||
} else {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let mut c = ctx.run_type().execute(sudo);
|
||||
c.arg(&haxelib);
|
||||
c
|
||||
@@ -207,6 +225,20 @@ pub fn run_apm(ctx: &ExecutionContext) -> Result<()> {
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_aqua(ctx: &ExecutionContext) -> Result<()> {
|
||||
let aqua = require("aqua")?;
|
||||
|
||||
print_separator("Aqua");
|
||||
if ctx.run_type().dry() {
|
||||
println!("{}", t!("Updating aqua ..."));
|
||||
println!("{}", t!("Updating aqua installed cli tools ..."));
|
||||
Ok(())
|
||||
} else {
|
||||
ctx.run_type().execute(&aqua).arg("update-aqua").status_checked()?;
|
||||
ctx.run_type().execute(&aqua).arg("update").status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_rustup(ctx: &ExecutionContext) -> Result<()> {
|
||||
let rustup = require("rustup")?;
|
||||
|
||||
@@ -214,6 +246,24 @@ pub fn run_rustup(ctx: &ExecutionContext) -> Result<()> {
|
||||
ctx.run_type().execute(rustup).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_rye(ctx: &ExecutionContext) -> Result<()> {
|
||||
let rye = require("rye")?;
|
||||
|
||||
print_separator("Rye");
|
||||
ctx.run_type().execute(rye).args(["self", "update"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_elan(ctx: &ExecutionContext) -> Result<()> {
|
||||
let elan = require("elan")?;
|
||||
|
||||
print_separator("elan");
|
||||
ctx.run_type()
|
||||
.execute(&elan)
|
||||
.args(["self", "update"])
|
||||
.status_checked()?;
|
||||
ctx.run_type().execute(&elan).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_juliaup(ctx: &ExecutionContext) -> Result<()> {
|
||||
let juliaup = require("juliaup")?;
|
||||
|
||||
@@ -287,7 +337,13 @@ pub fn run_opam_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator("OCaml Package Manager");
|
||||
|
||||
ctx.run_type().execute(&opam).arg("update").status_checked()?;
|
||||
ctx.run_type().execute(&opam).arg("upgrade").status_checked()?;
|
||||
|
||||
let mut command = ctx.run_type().execute(&opam);
|
||||
command.arg("upgrade");
|
||||
if ctx.config().yes(Step::Opam) {
|
||||
command.arg("--yes");
|
||||
}
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
ctx.run_type().execute(&opam).arg("clean").status_checked()?;
|
||||
@@ -309,7 +365,7 @@ pub fn run_vcpkg_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = if is_root_install {
|
||||
ctx.run_type().execute(&vcpkg)
|
||||
} else {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let mut c = ctx.run_type().execute(sudo);
|
||||
c.arg(&vcpkg);
|
||||
c
|
||||
@@ -318,17 +374,63 @@ pub fn run_vcpkg_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
command.args(["upgrade", "--no-dry-run"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_vscode_extensions_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
// Calling vscode in WSL may install a server instead of updating extensions (https://github.com/topgrade-rs/topgrade/issues/594#issuecomment-1782157367)
|
||||
if is_wsl()? {
|
||||
return Err(SkipStep(String::from("Should not run in WSL")).into());
|
||||
}
|
||||
|
||||
let vscode = require("code")?;
|
||||
|
||||
// Vscode has update command only since 1.86 version ("january 2024" update), disable the update for prior versions
|
||||
// Use command `code --version` which returns 3 lines: version, git commit, instruction set. We parse only the first one
|
||||
let version: Result<Version> = match Command::new(&vscode)
|
||||
.arg("--version")
|
||||
.output_checked_utf8()?
|
||||
.stdout
|
||||
.lines()
|
||||
.next()
|
||||
{
|
||||
Some(item) => Version::parse(item).map_err(|err| err.into()),
|
||||
_ => return Err(SkipStep(String::from("Cannot find vscode version")).into()),
|
||||
};
|
||||
|
||||
if !matches!(version, Ok(version) if version >= Version::new(1, 86, 0)) {
|
||||
return Err(SkipStep(String::from("Too old vscode version to have update extensions command")).into());
|
||||
}
|
||||
|
||||
print_separator("Visual Studio Code extensions");
|
||||
|
||||
ctx.run_type()
|
||||
.execute(vscode)
|
||||
.arg("--update-extensions")
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_pipx_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pipx = require("pipx")?;
|
||||
print_separator("pipx");
|
||||
|
||||
ctx.run_type().execute(pipx).arg("upgrade-all").status_checked()
|
||||
let mut command_args = vec!["upgrade-all", "--include-injected"];
|
||||
|
||||
// pipx version 1.4.0 introduced a new command argument `pipx upgrade-all --quiet`
|
||||
// (see https://pipx.pypa.io/stable/docs/#pipx-upgrade-all)
|
||||
let version_str = Command::new(&pipx)
|
||||
.args(["--version"])
|
||||
.output_checked_utf8()
|
||||
.map(|s| s.stdout.trim().to_owned());
|
||||
let version = Version::parse(&version_str?);
|
||||
if matches!(version, Ok(version) if version >= Version::new(1, 4, 0)) {
|
||||
command_args.push("--quiet")
|
||||
}
|
||||
|
||||
ctx.run_type().execute(pipx).args(command_args).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_conda_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let conda = require("conda")?;
|
||||
|
||||
let output = Command::new("conda")
|
||||
let output = Command::new(&conda)
|
||||
.args(["config", "--show", "auto_activate_base"])
|
||||
.output_checked_utf8()?;
|
||||
debug!("Conda output: {}", output.stdout);
|
||||
@@ -346,17 +448,16 @@ pub fn run_conda_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
command.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_pixi_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pixi = require("pixi")?;
|
||||
print_separator("Pixi");
|
||||
|
||||
ctx.run_type().execute(pixi).args(["self-update"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_mamba_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mamba = require("mamba")?;
|
||||
|
||||
let output = Command::new("mamba")
|
||||
.args(["config", "--show", "auto_activate_base"])
|
||||
.output_checked_utf8()?;
|
||||
debug!("Mamba output: {}", output.stdout);
|
||||
if output.stdout.contains("False") {
|
||||
return Err(SkipStep("auto_activate_base is set to False".to_string()).into());
|
||||
}
|
||||
|
||||
print_separator("Mamba");
|
||||
|
||||
let mut command = ctx.run_type().execute(mamba);
|
||||
@@ -367,6 +468,16 @@ pub fn run_mamba_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
command.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_miktex_packages_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let miktex = require("miktex")?;
|
||||
print_separator("miktex");
|
||||
|
||||
ctx.run_type()
|
||||
.execute(miktex)
|
||||
.args(["packages", "update"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_pip3_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let py = require("python").and_then(check_is_python_2_or_shim);
|
||||
let py3 = require("python3").and_then(check_is_python_2_or_shim);
|
||||
@@ -383,26 +494,59 @@ pub fn run_pip3_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
Command::new(&python3)
|
||||
.args(["-m", "pip"])
|
||||
.output_checked_utf8()
|
||||
.map_err(|_| SkipStep("pip does not exists".to_string()))?;
|
||||
.map_err(|_| SkipStep("pip does not exist".to_string()))?;
|
||||
|
||||
let check_externally_managed = "import sysconfig; from os import path; print('Y') if path.isfile(path.join(sysconfig.get_path('stdlib'), 'EXTERNALLY-MANAGED')) else print('N')";
|
||||
Command::new(&python3)
|
||||
.args(["-c", check_externally_managed])
|
||||
let check_extern_managed_script = "import sysconfig; from os import path; print('Y') if path.isfile(path.join(sysconfig.get_path('stdlib'), 'EXTERNALLY-MANAGED')) else print('N')";
|
||||
let output = Command::new(&python3)
|
||||
.args(["-c", check_extern_managed_script])
|
||||
.output_checked_utf8()?;
|
||||
let stdout = output.stdout.trim();
|
||||
let extern_managed = match stdout {
|
||||
"N" => false,
|
||||
"Y" => true,
|
||||
_ => unreachable!("unexpected output from `check_extern_managed_script`"),
|
||||
};
|
||||
|
||||
let allow_break_sys_pkg = match Command::new(&python3)
|
||||
.args(["-m", "pip", "config", "get", "global.break-system-packages"])
|
||||
.output_checked_utf8()
|
||||
.map_err(|_| SkipStep("pip may be externally managed".to_string()))
|
||||
.and_then(|output| match output.stdout.trim() {
|
||||
"N" => Ok(()),
|
||||
"Y" => Err(SkipStep("pip is externally managed".to_string())),
|
||||
_ => {
|
||||
print_warning("Unexpected output when checking EXTERNALLY-MANAGED");
|
||||
print_warning(output.stdout.trim());
|
||||
Err(SkipStep("pip may be externally managed".to_string()))
|
||||
}
|
||||
})?;
|
||||
{
|
||||
Ok(output) => {
|
||||
let stdout = output.stdout.trim();
|
||||
stdout
|
||||
.parse::<bool>()
|
||||
.expect("unexpected output that is not `true` or `false`")
|
||||
}
|
||||
// it can fail because this key may not be set
|
||||
//
|
||||
// ```sh
|
||||
// $ pip --version
|
||||
// pip 23.0.1 from /usr/lib/python3/dist-packages/pip (python 3.11)
|
||||
//
|
||||
// $ pip config get global.break-system-packages
|
||||
// ERROR: No such key - global.break-system-packages
|
||||
//
|
||||
// $ echo $?
|
||||
// 1
|
||||
// ```
|
||||
Err(_) => false,
|
||||
};
|
||||
|
||||
debug!("pip3 externally managed: {} ", extern_managed);
|
||||
debug!("pip3 global.break-system-packages: {}", allow_break_sys_pkg);
|
||||
|
||||
// Even though pip3 is externally managed, we should still update it if
|
||||
// `global.break-system-packages` is true.
|
||||
if extern_managed && !allow_break_sys_pkg {
|
||||
return Err(SkipStep(
|
||||
"Skip pip3 update as it is externally managed and global.break-system-packages is not true".to_string(),
|
||||
)
|
||||
.into());
|
||||
}
|
||||
|
||||
print_separator("pip3");
|
||||
if env::var("VIRTUAL_ENV").is_ok() {
|
||||
print_warning("This step is will be skipped when running inside a virtual environment");
|
||||
print_warning("This step is skipped when running inside a virtual environment");
|
||||
return Err(SkipStep("Does not run inside a virtual environment".to_string()).into());
|
||||
}
|
||||
|
||||
@@ -430,6 +574,7 @@ pub fn run_pip_review_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_pip_review_local_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pip_review = require("pip-review")?;
|
||||
|
||||
@@ -449,6 +594,7 @@ pub fn run_pip_review_local_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_pipupgrade_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pipupgrade = require("pipupgrade")?;
|
||||
|
||||
@@ -466,6 +612,7 @@ pub fn run_pipupgrade_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_stack_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
if require("ghcup").is_ok() {
|
||||
// `ghcup` is present and probably(?) being used to install `stack`.
|
||||
@@ -519,7 +666,7 @@ pub fn run_tlmgr_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = if directory_writable {
|
||||
ctx.run_type().execute(&tlmgr)
|
||||
} else {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let mut c = ctx.run_type().execute(sudo);
|
||||
c.arg(&tlmgr);
|
||||
c
|
||||
@@ -576,19 +723,22 @@ pub fn run_composer_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let composer_home = Command::new(&composer)
|
||||
.args(["global", "config", "--absolute", "--quiet", "home"])
|
||||
.output_checked_utf8()
|
||||
.map_err(|e| (SkipStep(format!("Error getting the composer directory: {e}"))))
|
||||
.map_err(|e| (SkipStep(t!("Error getting the composer directory: {error}", error = e).to_string())))
|
||||
.map(|s| PathBuf::from(s.stdout.trim()))?
|
||||
.require()?;
|
||||
|
||||
if !composer_home.is_descendant_of(&HOME_DIR) {
|
||||
return Err(SkipStep(format!(
|
||||
"Composer directory {} isn't a decandent of the user's home directory",
|
||||
composer_home.display()
|
||||
))
|
||||
return Err(SkipStep(
|
||||
t!(
|
||||
"Composer directory {composer_home} isn't a descendant of the user's home directory",
|
||||
composer_home = composer_home.display()
|
||||
)
|
||||
.to_string(),
|
||||
)
|
||||
.into());
|
||||
}
|
||||
|
||||
print_separator("Composer");
|
||||
print_separator(t!("Composer"));
|
||||
|
||||
if ctx.config().composer_self_update() {
|
||||
cfg_if::cfg_if! {
|
||||
@@ -600,7 +750,7 @@ pub fn run_composer_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
};
|
||||
|
||||
if has_update {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg(&composer)
|
||||
@@ -636,17 +786,23 @@ pub fn run_dotnet_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
.run_type()
|
||||
.execute(&dotnet)
|
||||
.args(["tool", "list", "--global"])
|
||||
// dotnet will print a greeting message on its first run, from this question:
|
||||
// https://stackoverflow.com/q/70493706/14092446
|
||||
// Setting `DOTNET_NOLOGO` to `true` should disable it
|
||||
.env("DOTNET_NOLOGO", "true")
|
||||
.output_checked_utf8()
|
||||
{
|
||||
Ok(output) => output,
|
||||
Err(_) => {
|
||||
return Err(SkipStep(String::from(
|
||||
"Error running `dotnet tool list`. This is expected when a dotnet runtime is installed but no SDK.",
|
||||
))
|
||||
.into())
|
||||
return Err(SkipStep(
|
||||
t!("Error running `dotnet tool list`. This is expected when a dotnet runtime is installed but no SDK.")
|
||||
.to_string(),
|
||||
)
|
||||
.into());
|
||||
}
|
||||
};
|
||||
|
||||
let mut in_header = true;
|
||||
let mut packages = output
|
||||
.stdout
|
||||
.lines()
|
||||
@@ -654,16 +810,22 @@ pub fn run_dotnet_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
//
|
||||
// Package Id Version Commands
|
||||
// -------------------------------------
|
||||
//
|
||||
// One thing to note is that .NET SDK respect locale, which means this
|
||||
// header can be printed in languages other than English, do NOT use it
|
||||
// to do any check.
|
||||
.skip(2)
|
||||
.skip_while(|line| {
|
||||
// The .NET SDK respects locale, so the header can be printed
|
||||
// in languages other than English. The separator should hopefully
|
||||
// always be at least 10 -'s long.
|
||||
if in_header && line.starts_with("----------") {
|
||||
in_header = false;
|
||||
true
|
||||
} else {
|
||||
in_header
|
||||
}
|
||||
})
|
||||
.filter(|line| !line.is_empty())
|
||||
.peekable();
|
||||
|
||||
if packages.peek().is_none() {
|
||||
return Err(SkipStep(String::from("No dotnet global tools installed")).into());
|
||||
return Err(SkipStep(t!("No dotnet global tools installed").to_string()).into());
|
||||
}
|
||||
|
||||
print_separator(".NET");
|
||||
@@ -674,27 +836,26 @@ pub fn run_dotnet_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
.execute(&dotnet)
|
||||
.args(["tool", "update", package_name, "--global"])
|
||||
.status_checked()
|
||||
.with_context(|| format!("Failed to update .NET package {package_name}"))?;
|
||||
.with_context(|| format!("Failed to update .NET package {:?}", package_name))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_helix_grammars(ctx: &ExecutionContext) -> Result<()> {
|
||||
require("helix")?;
|
||||
let helix = require("helix").or(require("hx"))?;
|
||||
|
||||
print_separator("Helix");
|
||||
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["helix", "--grammar", "fetch"])
|
||||
.execute(&helix)
|
||||
.args(["--grammar", "fetch"])
|
||||
.status_checked()
|
||||
.with_context(|| "Failed to download helix grammars!")?;
|
||||
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["helix", "--grammar", "build"])
|
||||
.execute(&helix)
|
||||
.args(["--grammar", "build"])
|
||||
.status_checked()
|
||||
.with_context(|| "Failed to build helix grammars!")?;
|
||||
|
||||
@@ -704,7 +865,7 @@ pub fn run_helix_grammars(ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn run_raco_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let raco = require("raco")?;
|
||||
|
||||
print_separator("Racket Package Manager");
|
||||
print_separator(t!("Racket Package Manager"));
|
||||
|
||||
ctx.run_type()
|
||||
.execute(raco)
|
||||
@@ -732,10 +893,10 @@ pub fn run_ghcli_extensions_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let result = Command::new(&gh).args(["extensions", "list"]).output_checked_utf8();
|
||||
if result.is_err() {
|
||||
debug!("GH result {:?}", result);
|
||||
return Err(SkipStep(String::from("GH failed")).into());
|
||||
return Err(SkipStep(t!("GH failed").to_string()).into());
|
||||
}
|
||||
|
||||
print_separator("GitHub CLI Extensions");
|
||||
print_separator(t!("GitHub CLI Extensions"));
|
||||
ctx.run_type()
|
||||
.execute(&gh)
|
||||
.args(["extension", "upgrade", "--all"])
|
||||
@@ -745,7 +906,7 @@ pub fn run_ghcli_extensions_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn update_julia_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let julia = require("julia")?;
|
||||
|
||||
print_separator("Julia Packages");
|
||||
print_separator(t!("Julia Packages"));
|
||||
|
||||
ctx.run_type()
|
||||
.execute(julia)
|
||||
@@ -762,7 +923,7 @@ pub fn run_helm_repo_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut success = true;
|
||||
let mut exec = ctx.run_type().execute(helm);
|
||||
if let Err(e) = exec.arg("repo").arg("update").status_checked() {
|
||||
error!("Updating repositories failed: {}", e);
|
||||
error!("Updating repositories failed: {e}");
|
||||
success = match exec.output_checked_utf8() {
|
||||
Ok(s) => s.stdout.contains(no_repo) || s.stderr.contains(no_repo),
|
||||
Err(e) => match e.downcast_ref::<TopgradeError>() {
|
||||
@@ -793,3 +954,113 @@ pub fn run_bob(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
ctx.run_type().execute(bob).args(["update", "--all"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_certbot(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let certbot = require("certbot")?;
|
||||
|
||||
print_separator("Certbot");
|
||||
|
||||
let mut cmd = ctx.run_type().execute(sudo);
|
||||
cmd.arg(certbot);
|
||||
cmd.arg("renew");
|
||||
|
||||
cmd.status_checked()
|
||||
}
|
||||
|
||||
/// Run `$ freshclam` to update ClamAV signature database
|
||||
///
|
||||
/// doc: https://docs.clamav.net/manual/Usage/SignatureManagement.html#freshclam
|
||||
pub fn run_freshclam(ctx: &ExecutionContext) -> Result<()> {
|
||||
let freshclam = require("freshclam")?;
|
||||
print_separator(t!("Update ClamAV Database(FreshClam)"));
|
||||
ctx.run_type().execute(freshclam).status_checked()
|
||||
}
|
||||
|
||||
/// Involve `pio upgrade` to update PlatformIO core.
|
||||
pub fn run_platform_io(ctx: &ExecutionContext) -> Result<()> {
|
||||
// We use the full path because by default the binary is not in `PATH`:
|
||||
// https://github.com/topgrade-rs/topgrade/issues/754#issuecomment-2020537559
|
||||
#[cfg(unix)]
|
||||
fn bin_path() -> PathBuf {
|
||||
HOME_DIR.join(".platformio/penv/bin/pio")
|
||||
}
|
||||
#[cfg(windows)]
|
||||
fn bin_path() -> PathBuf {
|
||||
HOME_DIR.join(".platformio/penv/Scripts/pio.exe")
|
||||
}
|
||||
|
||||
let bin_path = require(bin_path())?;
|
||||
|
||||
print_separator("PlatformIO Core");
|
||||
|
||||
ctx.run_type().execute(bin_path).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
/// Run `lensfun-update-data` to update lensfun database.
|
||||
///
|
||||
/// `sudo` will be used if `use_sudo` configuration entry is set to true.
|
||||
pub fn run_lensfun_update_data(ctx: &ExecutionContext) -> Result<()> {
|
||||
const SEPARATOR: &str = "Lensfun's database update";
|
||||
let lensfun_update_data = require("lensfun-update-data")?;
|
||||
const EXIT_CODE_WHEN_NO_UPDATE: i32 = 1;
|
||||
|
||||
if ctx.config().lensfun_use_sudo() {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
print_separator(SEPARATOR);
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg(lensfun_update_data)
|
||||
// `lensfun-update-data` returns 1 when there is no update available
|
||||
// which should be considered success
|
||||
.status_checked_with_codes(&[EXIT_CODE_WHEN_NO_UPDATE])
|
||||
} else {
|
||||
print_separator(SEPARATOR);
|
||||
ctx.run_type()
|
||||
.execute(lensfun_update_data)
|
||||
.status_checked_with_codes(&[EXIT_CODE_WHEN_NO_UPDATE])
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_poetry(ctx: &ExecutionContext) -> Result<()> {
|
||||
let poetry = require("poetry")?;
|
||||
print_separator("Poetry");
|
||||
ctx.run_type().execute(poetry).args(["self", "update"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_uv(ctx: &ExecutionContext) -> Result<()> {
|
||||
let uv_exec = require("uv")?;
|
||||
print_separator("uv");
|
||||
|
||||
ctx.run_type()
|
||||
.execute(&uv_exec)
|
||||
.args(["self", "update"])
|
||||
.status_checked()
|
||||
.ok();
|
||||
|
||||
// ignoring self-update errors, because they are likely due to uv's
|
||||
// installation being managed by another package manager, in which
|
||||
// case another step will handle the update.
|
||||
|
||||
ctx.run_type()
|
||||
.execute(&uv_exec)
|
||||
.args(["tool", "upgrade", "--all"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
/// Involve `zvm upgrade` to update ZVM
|
||||
pub fn run_zvm(ctx: &ExecutionContext) -> Result<()> {
|
||||
let zvm = require("zvm")?;
|
||||
|
||||
print_separator("ZVM");
|
||||
|
||||
ctx.run_type().execute(zvm).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_bun(ctx: &ExecutionContext) -> Result<()> {
|
||||
let bun = require("bun")?;
|
||||
|
||||
print_separator("Bun");
|
||||
|
||||
ctx.run_type().execute(bun).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
510
src/steps/git.rs
510
src/steps/git.rs
@@ -3,138 +3,176 @@ use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Output, Stdio};
|
||||
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::{eyre, Result};
|
||||
use console::style;
|
||||
use futures::stream::{iter, FuturesUnordered};
|
||||
use futures::StreamExt;
|
||||
use futures::stream::{iter, FuturesUnordered, StreamExt};
|
||||
use glob::{glob_with, MatchOptions};
|
||||
use tokio::process::Command as AsyncCommand;
|
||||
use tokio::runtime;
|
||||
use tracing::{debug, error};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::config::Step;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::steps::emacs::Emacs;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{which, PathExt};
|
||||
use crate::{error::SkipStep, terminal::print_warning};
|
||||
use crate::utils::{require, PathExt};
|
||||
use crate::{error::SkipStep, terminal::print_warning, HOME_DIR};
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
use rust_i18n::t;
|
||||
|
||||
#[cfg(unix)]
|
||||
use crate::XDG_DIRS;
|
||||
|
||||
#[cfg(windows)]
|
||||
use crate::WINDOWS_DIRS;
|
||||
|
||||
pub fn run_git_pull(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut repos = RepoStep::try_new()?;
|
||||
let config = ctx.config();
|
||||
|
||||
// handle built-in repos
|
||||
if config.use_predefined_git_repos() {
|
||||
// should be executed on all the platforms
|
||||
{
|
||||
if config.should_run(Step::Emacs) {
|
||||
let emacs = Emacs::new();
|
||||
if !emacs.is_doom() {
|
||||
if let Some(directory) = emacs.directory() {
|
||||
repos.insert_if_repo(directory);
|
||||
}
|
||||
}
|
||||
repos.insert_if_repo(HOME_DIR.join(".doom.d"));
|
||||
}
|
||||
|
||||
if config.should_run(Step::Vim) {
|
||||
repos.insert_if_repo(HOME_DIR.join(".vim"));
|
||||
repos.insert_if_repo(HOME_DIR.join(".config/nvim"));
|
||||
}
|
||||
|
||||
repos.insert_if_repo(HOME_DIR.join(".ideavimrc"));
|
||||
repos.insert_if_repo(HOME_DIR.join(".intellimacs"));
|
||||
|
||||
if config.should_run(Step::Rcm) {
|
||||
repos.insert_if_repo(HOME_DIR.join(".dotfiles"));
|
||||
}
|
||||
|
||||
let powershell = crate::steps::powershell::Powershell::new();
|
||||
if let Some(profile) = powershell.profile() {
|
||||
repos.insert_if_repo(profile);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
repos.insert_if_repo(crate::steps::zsh::zshrc());
|
||||
if config.should_run(Step::Tmux) {
|
||||
repos.insert_if_repo(HOME_DIR.join(".tmux"));
|
||||
}
|
||||
repos.insert_if_repo(HOME_DIR.join(".config/fish"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("openbox"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("bspwm"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("i3"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("sway"));
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
repos.insert_if_repo(
|
||||
WINDOWS_DIRS
|
||||
.cache_dir()
|
||||
.join("Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState"),
|
||||
);
|
||||
|
||||
super::os::windows::insert_startup_scripts(&mut repos).ok();
|
||||
}
|
||||
}
|
||||
|
||||
// Handle user-defined repos
|
||||
if let Some(custom_git_repos) = config.git_repos() {
|
||||
for git_repo in custom_git_repos {
|
||||
repos.glob_insert(git_repo);
|
||||
}
|
||||
}
|
||||
|
||||
// Warn the user about the bad patterns.
|
||||
//
|
||||
// NOTE: this should be executed **before** skipping the Git step or the
|
||||
// user won't receive this warning in the cases where all the paths configured
|
||||
// are bad patterns.
|
||||
repos.bad_patterns.iter().for_each(|pattern| {
|
||||
print_warning(t!(
|
||||
"Path {pattern} did not contain any git repositories",
|
||||
pattern = pattern
|
||||
))
|
||||
});
|
||||
|
||||
if repos.is_repos_empty() {
|
||||
return Err(SkipStep(t!("No repositories to pull").to_string()).into());
|
||||
}
|
||||
|
||||
print_separator(t!("Git repositories"));
|
||||
|
||||
repos.pull_repos(ctx)
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
static PATH_PREFIX: &str = "\\\\?\\";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Git {
|
||||
git: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub struct Repositories<'a> {
|
||||
git: &'a Git,
|
||||
repositories: HashSet<String>,
|
||||
pub struct RepoStep {
|
||||
git: PathBuf,
|
||||
repos: HashSet<PathBuf>,
|
||||
glob_match_options: MatchOptions,
|
||||
bad_patterns: Vec<String>,
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn output_checked_utf8(output: Output) -> Result<()> {
|
||||
if !(output.status.success()) {
|
||||
let stderr = String::from_utf8(output.stderr).unwrap();
|
||||
Err(eyre!(stderr))
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stderr = stderr.trim();
|
||||
Err(eyre!("{stderr}"))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn pull_repository(repo: String, git: &Path, ctx: &ExecutionContext<'_>) -> Result<()> {
|
||||
let path = repo.to_string();
|
||||
let before_revision = get_head_revision(git, &repo);
|
||||
|
||||
println!("{} {}", style("Pulling").cyan().bold(), path);
|
||||
|
||||
let mut command = AsyncCommand::new(git);
|
||||
|
||||
command
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args(["pull", "--ff-only"]);
|
||||
|
||||
if let Some(extra_arguments) = ctx.config().git_arguments() {
|
||||
command.args(extra_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let pull_output = command.output().await?;
|
||||
let submodule_output = AsyncCommand::new(git)
|
||||
.args(["submodule", "update", "--recursive"])
|
||||
.current_dir(&repo)
|
||||
.stdin(Stdio::null())
|
||||
.output()
|
||||
.await?;
|
||||
let result = output_checked_utf8(pull_output).and_then(|_| output_checked_utf8(submodule_output));
|
||||
|
||||
if let Err(message) = &result {
|
||||
println!("{} pulling {}", style("Failed").red().bold(), &repo);
|
||||
print!("{message}");
|
||||
} else {
|
||||
let after_revision = get_head_revision(git, &repo);
|
||||
|
||||
match (&before_revision, &after_revision) {
|
||||
(Some(before), Some(after)) if before != after => {
|
||||
println!("{} {}:", style("Changed").yellow().bold(), &repo);
|
||||
|
||||
Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args([
|
||||
"--no-pager",
|
||||
"log",
|
||||
"--no-decorate",
|
||||
"--oneline",
|
||||
&format!("{before}..{after}"),
|
||||
])
|
||||
.status_checked()?;
|
||||
println!();
|
||||
}
|
||||
_ => {
|
||||
println!("{} {}", style("Up-to-date").green().bold(), &repo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result.map(|_| ())
|
||||
}
|
||||
|
||||
fn get_head_revision(git: &Path, repo: &str) -> Option<String> {
|
||||
fn get_head_revision<P: AsRef<Path>>(git: &Path, repo: P) -> Option<String> {
|
||||
Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(repo)
|
||||
.current_dir(repo.as_ref())
|
||||
.args(["rev-parse", "HEAD"])
|
||||
.output_checked_utf8()
|
||||
.map(|output| output.stdout.trim().to_string())
|
||||
.map_err(|e| {
|
||||
error!("Error getting revision for {}: {}", repo, e);
|
||||
error!("Error getting revision for {}: {e}", repo.as_ref().display(),);
|
||||
|
||||
e
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
|
||||
fn has_remotes(git: &Path, repo: &str) -> Option<bool> {
|
||||
Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(repo)
|
||||
.args(["remote", "show"])
|
||||
.output_checked_utf8()
|
||||
.map(|output| output.stdout.lines().count() > 0)
|
||||
.map_err(|e| {
|
||||
error!("Error getting remotes for {}: {}", repo, e);
|
||||
e
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
impl RepoStep {
|
||||
/// Try to create a `RepoStep`, fail if `git` is not found.
|
||||
pub fn try_new() -> Result<Self> {
|
||||
let git = require("git")?;
|
||||
let mut glob_match_options = MatchOptions::new();
|
||||
|
||||
impl Git {
|
||||
pub fn new() -> Self {
|
||||
Self { git: which("git") }
|
||||
if cfg!(windows) {
|
||||
glob_match_options.case_sensitive = false;
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
git,
|
||||
repos: HashSet::new(),
|
||||
bad_patterns: Vec::new(),
|
||||
glob_match_options,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_repo_root<P: AsRef<Path>>(&self, path: P) -> Option<String> {
|
||||
/// Try to get the root of the repo specified in `path`.
|
||||
pub fn get_repo_root<P: AsRef<Path>>(&self, path: P) -> Option<PathBuf> {
|
||||
match path.as_ref().canonicalize() {
|
||||
Ok(mut path) => {
|
||||
debug_assert!(path.exists());
|
||||
@@ -158,71 +196,210 @@ impl Git {
|
||||
path_string
|
||||
};
|
||||
|
||||
if let Some(git) = &self.git {
|
||||
let output = Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(path)
|
||||
.args(["rev-parse", "--show-toplevel"])
|
||||
.output_checked_utf8()
|
||||
.ok()
|
||||
.map(|output| output.stdout.trim().to_string());
|
||||
return output;
|
||||
}
|
||||
let output = Command::new(&self.git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(path)
|
||||
.args(["rev-parse", "--show-toplevel"])
|
||||
.output_checked_utf8()
|
||||
.ok()
|
||||
// trim the last newline char
|
||||
.map(|output| PathBuf::from(output.stdout.trim()));
|
||||
|
||||
return output;
|
||||
}
|
||||
Err(e) => match e.kind() {
|
||||
io::ErrorKind::NotFound => debug!("{} does not exists", path.as_ref().display()),
|
||||
_ => error!("Error looking for {}: {}", path.as_ref().display(), e),
|
||||
io::ErrorKind::NotFound => debug!("{} does not exist", path.as_ref().display()),
|
||||
_ => error!("Error looking for {}: {e}", path.as_ref().display(),),
|
||||
},
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
pub fn multi_pull_step(&self, repositories: &Repositories, ctx: &ExecutionContext) -> Result<()> {
|
||||
// Warn the user about the bad patterns.
|
||||
//
|
||||
// NOTE: this should be executed **before** skipping the Git step or the
|
||||
// user won't receive this warning in the cases where all the paths configured
|
||||
// are bad patterns.
|
||||
repositories
|
||||
.bad_patterns
|
||||
.iter()
|
||||
.for_each(|pattern| print_warning(format!("Path {pattern} did not contain any git repositories")));
|
||||
|
||||
if repositories.repositories.is_empty() {
|
||||
return Err(SkipStep(String::from("No repositories to pull")).into());
|
||||
/// Check if `path` is a git repo, if yes, add it to `self.repos`.
|
||||
///
|
||||
/// Return the check result.
|
||||
pub fn insert_if_repo<P: AsRef<Path>>(&mut self, path: P) -> bool {
|
||||
if let Some(repo) = self.get_repo_root(path) {
|
||||
self.repos.insert(repo);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
||||
print_separator("Git repositories");
|
||||
self.multi_pull(repositories, ctx)
|
||||
}
|
||||
|
||||
pub fn multi_pull(&self, repositories: &Repositories, ctx: &ExecutionContext) -> Result<()> {
|
||||
let git = self.git.as_ref().unwrap();
|
||||
/// Check if `repo` has a remote.
|
||||
fn has_remotes<P: AsRef<Path>>(&self, repo: P) -> Option<bool> {
|
||||
let mut cmd = Command::new(&self.git);
|
||||
cmd.stdin(Stdio::null())
|
||||
.current_dir(repo.as_ref())
|
||||
.args(["remote", "show"]);
|
||||
|
||||
let res = cmd.output_checked_utf8();
|
||||
|
||||
res.map(|output| output.stdout.lines().count() > 0)
|
||||
.map_err(|e| {
|
||||
error!("Error getting remotes for {}: {e}", repo.as_ref().display());
|
||||
e
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
|
||||
/// Similar to `insert_if_repo`, with glob support.
|
||||
pub fn glob_insert(&mut self, pattern: &str) {
|
||||
if let Ok(glob) = glob_with(pattern, self.glob_match_options) {
|
||||
let mut last_git_repo: Option<PathBuf> = None;
|
||||
for entry in glob {
|
||||
match entry {
|
||||
Ok(path) => {
|
||||
if let Some(last_git_repo) = &last_git_repo {
|
||||
if path.is_descendant_of(last_git_repo) {
|
||||
debug!(
|
||||
"Skipping {} because it's a descendant of last known repo {}",
|
||||
path.display(),
|
||||
last_git_repo.display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if self.insert_if_repo(&path) {
|
||||
last_git_repo = Some(path);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error in path {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if last_git_repo.is_none() {
|
||||
self.bad_patterns.push(String::from(pattern));
|
||||
}
|
||||
} else {
|
||||
error!("Bad glob pattern: {pattern}");
|
||||
}
|
||||
}
|
||||
|
||||
/// True if `self.repos` is empty.
|
||||
pub fn is_repos_empty(&self) -> bool {
|
||||
self.repos.is_empty()
|
||||
}
|
||||
|
||||
/// Remove `path` from `self.repos`.
|
||||
///
|
||||
// `cfg(unix)` because it is only used in the oh-my-zsh step.
|
||||
#[cfg(unix)]
|
||||
pub fn remove<P: AsRef<Path>>(&mut self, path: P) {
|
||||
let _removed = self.repos.remove(path.as_ref());
|
||||
debug_assert!(_removed);
|
||||
}
|
||||
|
||||
/// Try to pull a repo.
|
||||
async fn pull_repo<P: AsRef<Path>>(&self, ctx: &ExecutionContext<'_>, repo: P) -> Result<()> {
|
||||
let before_revision = get_head_revision(&self.git, &repo);
|
||||
|
||||
if ctx.config().verbose() {
|
||||
println!("{} {}", style(t!("Pulling")).cyan().bold(), repo.as_ref().display());
|
||||
}
|
||||
|
||||
let mut command = AsyncCommand::new(&self.git);
|
||||
|
||||
command
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args(["pull", "--ff-only"]);
|
||||
|
||||
if let Some(extra_arguments) = ctx.config().git_arguments() {
|
||||
command.args(extra_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let pull_output = command.output().await?;
|
||||
let submodule_output = AsyncCommand::new(&self.git)
|
||||
.args(["submodule", "update", "--recursive"])
|
||||
.current_dir(&repo)
|
||||
.stdin(Stdio::null())
|
||||
.output()
|
||||
.await?;
|
||||
let result = output_checked_utf8(pull_output)
|
||||
.and_then(|_| output_checked_utf8(submodule_output))
|
||||
.wrap_err_with(|| format!("Failed to pull {}", repo.as_ref().display()));
|
||||
|
||||
if result.is_err() {
|
||||
println!(
|
||||
"{} {} {}",
|
||||
style(t!("Failed")).red().bold(),
|
||||
t!("pulling"),
|
||||
repo.as_ref().display()
|
||||
);
|
||||
} else {
|
||||
let after_revision = get_head_revision(&self.git, repo.as_ref());
|
||||
|
||||
match (&before_revision, &after_revision) {
|
||||
(Some(before), Some(after)) if before != after => {
|
||||
println!("{} {}", style(t!("Changed")).yellow().bold(), repo.as_ref().display());
|
||||
|
||||
Command::new(&self.git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args([
|
||||
"--no-pager",
|
||||
"log",
|
||||
"--no-decorate",
|
||||
"--oneline",
|
||||
&format!("{before}..{after}"),
|
||||
])
|
||||
.status_checked()?;
|
||||
println!();
|
||||
}
|
||||
_ => {
|
||||
if ctx.config().verbose() {
|
||||
println!("{} {}", style(t!("Up-to-date")).green().bold(), repo.as_ref().display());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result.map(|_| ())
|
||||
}
|
||||
|
||||
/// Pull the repositories specified in `self.repos`.
|
||||
///
|
||||
/// # NOTE
|
||||
/// This function will create an async runtime and do the real job so the
|
||||
/// function itself is not async.
|
||||
fn pull_repos(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
if ctx.run_type().dry() {
|
||||
repositories
|
||||
.repositories
|
||||
self.repos
|
||||
.iter()
|
||||
.for_each(|repo| println!("Would pull {}", &repo));
|
||||
.for_each(|repo| println!("{}", t!("Would pull {repo}", repo = repo.display())));
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let futures_iterator = repositories
|
||||
.repositories
|
||||
if !ctx.config().verbose() {
|
||||
println!(
|
||||
"\n{} {}\n",
|
||||
style(t!("Only")).green().bold(),
|
||||
t!("updated repositories will be shown...")
|
||||
);
|
||||
}
|
||||
|
||||
let futures_iterator = self
|
||||
.repos
|
||||
.iter()
|
||||
.filter(|repo| match has_remotes(git, repo) {
|
||||
.filter(|repo| match self.has_remotes(repo) {
|
||||
Some(false) => {
|
||||
println!(
|
||||
"{} {} because it has no remotes",
|
||||
style("Skipping").yellow().bold(),
|
||||
repo
|
||||
"{} {} {}",
|
||||
style(t!("Skipping")).yellow().bold(),
|
||||
repo.display(),
|
||||
t!("because it has no remotes")
|
||||
);
|
||||
false
|
||||
}
|
||||
_ => true, // repo has remotes or command to check for remotes has failed. proceed to pull anyway.
|
||||
})
|
||||
.map(|repo| pull_repository(repo.clone(), git, ctx));
|
||||
.map(|repo| self.pull_repo(ctx, repo));
|
||||
|
||||
let stream_of_futures = if let Some(limit) = ctx.config().git_concurrency_limit() {
|
||||
iter(futures_iterator).buffer_unordered(limit).boxed()
|
||||
@@ -237,74 +414,3 @@ impl Git {
|
||||
error.unwrap_or(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Repositories<'a> {
|
||||
pub fn new(git: &'a Git) -> Self {
|
||||
let mut glob_match_options = MatchOptions::new();
|
||||
|
||||
if cfg!(windows) {
|
||||
glob_match_options.case_sensitive = false;
|
||||
}
|
||||
|
||||
Self {
|
||||
git,
|
||||
repositories: HashSet::new(),
|
||||
bad_patterns: Vec::new(),
|
||||
glob_match_options,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_if_repo<P: AsRef<Path>>(&mut self, path: P) -> bool {
|
||||
if let Some(repo) = self.git.get_repo_root(path) {
|
||||
self.repositories.insert(repo);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn glob_insert(&mut self, pattern: &str) {
|
||||
if let Ok(glob) = glob_with(pattern, self.glob_match_options) {
|
||||
let mut last_git_repo: Option<PathBuf> = None;
|
||||
for entry in glob {
|
||||
match entry {
|
||||
Ok(path) => {
|
||||
if let Some(last_git_repo) = &last_git_repo {
|
||||
if path.is_descendant_of(last_git_repo) {
|
||||
debug!(
|
||||
"Skipping {} because it's a decendant of last known repo {}",
|
||||
path.display(),
|
||||
last_git_repo.display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if self.insert_if_repo(&path) {
|
||||
last_git_repo = Some(path);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error in path {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if last_git_repo.is_none() {
|
||||
self.bad_patterns.push(String::from(pattern));
|
||||
}
|
||||
} else {
|
||||
error!("Bad glob pattern: {}", pattern);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.repositories.is_empty()
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub fn remove(&mut self, path: &str) {
|
||||
let _removed = self.repositories.remove(path);
|
||||
debug_assert!(_removed);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::require;
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
|
||||
use crate::execution_context::ExecutionContext;
|
||||
|
||||
@@ -17,7 +18,7 @@ pub fn upgrade_kak_plug(ctx: &ExecutionContext) -> Result<()> {
|
||||
.args(["-ui", "dummy", "-e", UPGRADE_KAK])
|
||||
.output()?;
|
||||
|
||||
println!("Plugins upgraded");
|
||||
println!("{}", t!("Plugins upgraded"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -4,16 +4,17 @@ use std::os::unix::fs::MetadataExt;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::utils::{get_require_sudo_string, require_option};
|
||||
use crate::HOME_DIR;
|
||||
use color_eyre::eyre::Result;
|
||||
#[cfg(target_os = "linux")]
|
||||
use nix::unistd::Uid;
|
||||
use rust_i18n::t;
|
||||
use semver::Version;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::terminal::{print_info, print_separator};
|
||||
use crate::utils::{require, PathExt};
|
||||
use crate::{error::SkipStep, execution_context::ExecutionContext};
|
||||
|
||||
@@ -92,7 +93,7 @@ impl NPM {
|
||||
fn upgrade(&self, ctx: &ExecutionContext, use_sudo: bool) -> Result<()> {
|
||||
let args = ["update", self.global_location_arg()];
|
||||
if use_sudo {
|
||||
let sudo = require_option(ctx.sudo().clone(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().clone(), get_require_sudo_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg(&self.command)
|
||||
@@ -156,7 +157,7 @@ impl Yarn {
|
||||
let args = ["global", "upgrade"];
|
||||
|
||||
if use_sudo {
|
||||
let sudo = require_option(ctx.sudo().clone(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().clone(), get_require_sudo_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg(self.yarn.as_ref().unwrap_or(&self.command))
|
||||
@@ -214,7 +215,7 @@ fn should_use_sudo_yarn(yarn: &Yarn, ctx: &ExecutionContext) -> Result<bool> {
|
||||
pub fn run_npm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let npm = require("npm").map(|b| NPM::new(b, NPMVariant::Npm))?;
|
||||
|
||||
print_separator("Node Package Manager");
|
||||
print_separator(t!("Node Package Manager"));
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
@@ -230,7 +231,7 @@ pub fn run_npm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn run_pnpm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pnpm = require("pnpm").map(|b| NPM::new(b, NPMVariant::Pnpm))?;
|
||||
|
||||
print_separator("Performant Node Package Manager");
|
||||
print_separator(t!("Performant Node Package Manager"));
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
@@ -251,7 +252,7 @@ pub fn run_yarn_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
print_separator("Yarn Package Manager");
|
||||
print_separator(t!("Yarn Package Manager"));
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
@@ -269,10 +270,55 @@ pub fn deno_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let deno_dir = HOME_DIR.join(".deno");
|
||||
|
||||
if !deno.canonicalize()?.is_descendant_of(&deno_dir) {
|
||||
let skip_reason = SkipStep("Deno installed outside of .deno directory".to_string());
|
||||
let skip_reason = SkipStep(t!("Deno installed outside of .deno directory").to_string());
|
||||
return Err(skip_reason.into());
|
||||
}
|
||||
|
||||
print_separator("Deno");
|
||||
ctx.run_type().execute(&deno).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
/// There is no `volta upgrade` command, so we need to upgrade each package
|
||||
pub fn run_volta_packages_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let volta = require("volta")?;
|
||||
|
||||
print_separator("Volta");
|
||||
|
||||
if ctx.run_type().dry() {
|
||||
print_info(t!("Updating Volta packages..."));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let list_output = ctx
|
||||
.run_type()
|
||||
.execute(&volta)
|
||||
.args(["list", "--format=plain"])
|
||||
.output_checked_utf8()?
|
||||
.stdout;
|
||||
|
||||
let installed_packages: Vec<&str> = list_output
|
||||
.lines()
|
||||
.filter_map(|line| {
|
||||
// format is 'kind package@version ...'
|
||||
let mut parts = line.split_whitespace();
|
||||
parts.next();
|
||||
let package_part = parts.next()?;
|
||||
let version_index = package_part.rfind('@').unwrap_or(package_part.len());
|
||||
Some(package_part[..version_index].trim())
|
||||
})
|
||||
.collect();
|
||||
|
||||
if installed_packages.is_empty() {
|
||||
print_info(t!("No packages installed with Volta"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for package in installed_packages.iter() {
|
||||
ctx.run_type()
|
||||
.execute(&volta)
|
||||
.args(["install", package])
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -4,12 +4,13 @@ use std::path::{Path, PathBuf};
|
||||
|
||||
use color_eyre::eyre;
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::error::TopgradeError;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::sudo::Sudo;
|
||||
use crate::utils::require_option;
|
||||
use crate::utils::which;
|
||||
use crate::{config, Step};
|
||||
|
||||
@@ -144,13 +145,13 @@ impl Trizen {
|
||||
}
|
||||
|
||||
pub struct Pacman {
|
||||
sudo: Sudo,
|
||||
executable: PathBuf,
|
||||
}
|
||||
|
||||
impl ArchPackageManager for Pacman {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = ctx.run_type().execute(&self.sudo);
|
||||
let sudo = require_option(ctx.sudo().as_ref(), "sudo is required to run pacman".into())?;
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command
|
||||
.arg(&self.executable)
|
||||
.arg("-Syu")
|
||||
@@ -161,7 +162,7 @@ impl ArchPackageManager for Pacman {
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
let mut command = ctx.run_type().execute(&self.sudo);
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(&self.executable).arg("-Scc");
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--noconfirm");
|
||||
@@ -174,10 +175,9 @@ impl ArchPackageManager for Pacman {
|
||||
}
|
||||
|
||||
impl Pacman {
|
||||
pub fn get(ctx: &ExecutionContext) -> Option<Self> {
|
||||
pub fn get() -> Option<Self> {
|
||||
Some(Self {
|
||||
executable: which("powerpill").unwrap_or_else(|| PathBuf::from("pacman")),
|
||||
sudo: ctx.sudo().to_owned()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -263,47 +263,76 @@ impl ArchPackageManager for Pamac {
|
||||
|
||||
pub struct Aura {
|
||||
executable: PathBuf,
|
||||
sudo: Sudo,
|
||||
}
|
||||
|
||||
impl Aura {
|
||||
fn get(ctx: &ExecutionContext) -> Option<Self> {
|
||||
fn get() -> Option<Self> {
|
||||
Some(Self {
|
||||
executable: which("aura")?,
|
||||
sudo: ctx.sudo().to_owned()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ArchPackageManager for Aura {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = which("sudo").unwrap_or_else(PathBuf::new);
|
||||
let mut aur_update = ctx.run_type().execute(&sudo);
|
||||
use semver::Version;
|
||||
|
||||
if sudo.ends_with("sudo") {
|
||||
aur_update
|
||||
.arg(&self.executable)
|
||||
let version_cmd_output = ctx
|
||||
.run_type()
|
||||
.execute(&self.executable)
|
||||
.arg("--version")
|
||||
.output_checked_utf8()?;
|
||||
// Output will be something like: "aura x.x.x\n"
|
||||
let version_cmd_stdout = version_cmd_output.stdout;
|
||||
let version_str = version_cmd_stdout.trim_start_matches("aura ").trim_end();
|
||||
let version = Version::parse(version_str).expect("invalid version");
|
||||
|
||||
// Aura, since version 4.0.6, no longer needs sudo.
|
||||
//
|
||||
// https://github.com/fosskers/aura/releases/tag/v4.0.6
|
||||
let version_no_sudo = Version::new(4, 0, 6);
|
||||
|
||||
if version >= version_no_sudo {
|
||||
let mut cmd = ctx.run_type().execute(&self.executable);
|
||||
cmd.arg("-Au")
|
||||
.args(ctx.config().aura_aur_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
cmd.arg("--noconfirm");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
|
||||
let mut cmd = ctx.run_type().execute(&self.executable);
|
||||
cmd.arg("-Syu")
|
||||
.args(ctx.config().aura_pacman_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
cmd.arg("--noconfirm");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
} else {
|
||||
let sudo = crate::utils::require_option(
|
||||
ctx.sudo().as_ref(),
|
||||
t!("Aura(<0.4.6) requires sudo installed to work with AUR packages").to_string(),
|
||||
)?;
|
||||
|
||||
let mut cmd = ctx.run_type().execute(sudo);
|
||||
cmd.arg(&self.executable)
|
||||
.arg("-Au")
|
||||
.args(ctx.config().aura_aur_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
aur_update.arg("--noconfirm");
|
||||
cmd.arg("--noconfirm");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
|
||||
aur_update.status_checked()?;
|
||||
} else {
|
||||
println!("Aura requires sudo installed to work with AUR packages")
|
||||
let mut cmd = ctx.run_type().execute(sudo);
|
||||
cmd.arg(&self.executable)
|
||||
.arg("-Syu")
|
||||
.args(ctx.config().aura_pacman_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
cmd.arg("--noconfirm");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
}
|
||||
|
||||
let mut pacman_update = ctx.run_type().execute(&self.sudo);
|
||||
pacman_update
|
||||
.arg(&self.executable)
|
||||
.arg("-Syu")
|
||||
.args(ctx.config().aura_pacman_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
pacman_update.arg("--noconfirm");
|
||||
}
|
||||
pacman_update.status_checked()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -323,16 +352,16 @@ pub fn get_arch_package_manager(ctx: &ExecutionContext) -> Option<Box<dyn ArchPa
|
||||
.or_else(|| Trizen::get().map(box_package_manager))
|
||||
.or_else(|| Pikaur::get().map(box_package_manager))
|
||||
.or_else(|| Pamac::get().map(box_package_manager))
|
||||
.or_else(|| Pacman::get(ctx).map(box_package_manager))
|
||||
.or_else(|| Aura::get(ctx).map(box_package_manager)),
|
||||
.or_else(|| Pacman::get().map(box_package_manager))
|
||||
.or_else(|| Aura::get().map(box_package_manager)),
|
||||
config::ArchPackageManager::GarudaUpdate => GarudaUpdate::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Trizen => Trizen::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Paru => YayParu::get("paru", &pacman).map(box_package_manager),
|
||||
config::ArchPackageManager::Yay => YayParu::get("yay", &pacman).map(box_package_manager),
|
||||
config::ArchPackageManager::Pacman => Pacman::get(ctx).map(box_package_manager),
|
||||
config::ArchPackageManager::Pacman => Pacman::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Pikaur => Pikaur::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Pamac => Pamac::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Aura => Aura::get(ctx).map(box_package_manager),
|
||||
config::ArchPackageManager::Aura => Aura::get().map(box_package_manager),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -355,7 +384,7 @@ pub fn show_pacnew() {
|
||||
.peekable();
|
||||
|
||||
if iter.peek().is_some() {
|
||||
println!("\nPacman backup configuration files found:");
|
||||
println!("\n{}", t!("Pacman backup configuration files found:"));
|
||||
|
||||
for entry in iter {
|
||||
println!("{}", entry.path().display());
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::utils::{get_require_sudo_string, require_option};
|
||||
use crate::Step;
|
||||
use color_eyre::eyre::Result;
|
||||
use std::process::Command;
|
||||
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("DragonFly BSD Packages");
|
||||
let mut cmd = ctx.execute(sudo);
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
print_separator(t!("DragonFly BSD Packages"));
|
||||
let mut cmd = ctx.run_type().execute(sudo);
|
||||
cmd.args(["/usr/local/sbin/pkg", "upgrade"]);
|
||||
if ctx.config().yes(Step::System) {
|
||||
cmd.arg("-y");
|
||||
@@ -17,10 +18,19 @@ pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
println!();
|
||||
Command::new(sudo)
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
|
||||
print_separator(t!("DragonFly BSD Audit"));
|
||||
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
if !Command::new(sudo)
|
||||
.args(["/usr/local/sbin/pkg", "audit", "-Fr"])
|
||||
.status_checked()?;
|
||||
.status()?
|
||||
.success()
|
||||
{
|
||||
println!(t!(
|
||||
"The package audit was successful, but vulnerable packages still remain on the system"
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::utils::{get_require_sudo_string, require_option};
|
||||
use crate::Step;
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use std::process::Command;
|
||||
|
||||
pub fn upgrade_freebsd(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("FreeBSD Update");
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
print_separator(t!("FreeBSD Update"));
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["/usr/sbin/freebsd-update", "fetch", "install"])
|
||||
@@ -16,8 +17,8 @@ pub fn upgrade_freebsd(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("FreeBSD Packages");
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
print_separator(t!("FreeBSD Packages"));
|
||||
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
|
||||
@@ -29,8 +30,10 @@ pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
println!();
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
|
||||
print_separator(t!("FreeBSD Audit"));
|
||||
|
||||
Command::new(sudo)
|
||||
.args(["/usr/sbin/pkg", "audit", "-Fr"])
|
||||
.status_checked()?;
|
||||
|
||||
@@ -3,14 +3,16 @@ use std::process::Command;
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use ini::Ini;
|
||||
use rust_i18n::t;
|
||||
use tracing::{debug, warn};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::error::{SkipStep, TopgradeError};
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::steps::generic::is_wsl;
|
||||
use crate::steps::os::archlinux;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{require, require_option, which, PathExt, REQUIRE_SUDO};
|
||||
use crate::terminal::{print_separator, prompt_yesno};
|
||||
use crate::utils::{get_require_sudo_string, require, require_option, which, PathExt};
|
||||
use crate::{Step, HOME_DIR};
|
||||
|
||||
static OS_RELEASE_PATH: &str = "/etc/os-release";
|
||||
@@ -19,14 +21,17 @@ static OS_RELEASE_PATH: &str = "/etc/os-release";
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Distribution {
|
||||
Alpine,
|
||||
Wolfi,
|
||||
Arch,
|
||||
Bedrock,
|
||||
CentOS,
|
||||
Chimera,
|
||||
ClearLinux,
|
||||
Fedora,
|
||||
FedoraSilverblue,
|
||||
FedoraImmutable,
|
||||
Debian,
|
||||
Gentoo,
|
||||
NILRT,
|
||||
OpenMandriva,
|
||||
OpenSuseTumbleweed,
|
||||
PCLinuxOS,
|
||||
@@ -38,6 +43,7 @@ pub enum Distribution {
|
||||
Exherbo,
|
||||
NixOS,
|
||||
KDENeon,
|
||||
Nobara,
|
||||
}
|
||||
|
||||
impl Distribution {
|
||||
@@ -45,30 +51,35 @@ impl Distribution {
|
||||
let section = os_release.general_section();
|
||||
let id = section.get("ID");
|
||||
let name = section.get("NAME");
|
||||
let variant: Option<Vec<&str>> = section.get("VARIANT").map(|s| s.split_whitespace().collect());
|
||||
let variant = section.get("VARIANT");
|
||||
let id_like: Option<Vec<&str>> = section.get("ID_LIKE").map(|s| s.split_whitespace().collect());
|
||||
|
||||
Ok(match id {
|
||||
Some("alpine") => Distribution::Alpine,
|
||||
Some("chimera") => Distribution::Chimera,
|
||||
Some("wolfi") => Distribution::Wolfi,
|
||||
Some("centos") | Some("rhel") | Some("ol") => Distribution::CentOS,
|
||||
Some("clear-linux-os") => Distribution::ClearLinux,
|
||||
Some("fedora") | Some("nobara") => {
|
||||
Some("fedora") => {
|
||||
return if let Some(variant) = variant {
|
||||
if variant.contains(&"Silverblue") {
|
||||
Ok(Distribution::FedoraSilverblue)
|
||||
} else {
|
||||
Ok(Distribution::Fedora)
|
||||
match variant {
|
||||
"Silverblue" | "Kinoite" | "Sericea" | "Onyx" | "IoT Edition" | "Sway Atomic" => {
|
||||
Ok(Distribution::FedoraImmutable)
|
||||
}
|
||||
_ => Ok(Distribution::Fedora),
|
||||
}
|
||||
} else {
|
||||
Ok(Distribution::Fedora)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Some("nilrt") => Distribution::NILRT,
|
||||
Some("nobara") => Distribution::Nobara,
|
||||
Some("void") => Distribution::Void,
|
||||
Some("debian") | Some("pureos") | Some("Deepin") => Distribution::Debian,
|
||||
Some("debian") | Some("pureos") | Some("Deepin") | Some("linuxmint") => Distribution::Debian,
|
||||
Some("arch") | Some("manjaro-arm") | Some("garuda") | Some("artix") => Distribution::Arch,
|
||||
Some("solus") => Distribution::Solus,
|
||||
Some("gentoo") => Distribution::Gentoo,
|
||||
Some("gentoo") | Some("funtoo") => Distribution::Gentoo,
|
||||
Some("exherbo") => Distribution::Exherbo,
|
||||
Some("nixos") => Distribution::NixOS,
|
||||
Some("opensuse-microos") => Distribution::SuseMicro,
|
||||
@@ -125,13 +136,15 @@ impl Distribution {
|
||||
}
|
||||
|
||||
pub fn upgrade(self, ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator("System update");
|
||||
print_separator(t!("System update"));
|
||||
|
||||
match self {
|
||||
Distribution::Alpine => upgrade_alpine_linux(ctx),
|
||||
Distribution::Chimera => upgrade_chimera_linux(ctx),
|
||||
Distribution::Wolfi => upgrade_wolfi_linux(ctx),
|
||||
Distribution::Arch => archlinux::upgrade_arch_linux(ctx),
|
||||
Distribution::CentOS | Distribution::Fedora => upgrade_redhat(ctx),
|
||||
Distribution::FedoraSilverblue => upgrade_fedora_silverblue(ctx),
|
||||
Distribution::FedoraImmutable => upgrade_fedora_immutable(ctx),
|
||||
Distribution::ClearLinux => upgrade_clearlinux(ctx),
|
||||
Distribution::Debian => upgrade_debian(ctx),
|
||||
Distribution::Gentoo => upgrade_gentoo(ctx),
|
||||
@@ -147,6 +160,8 @@ impl Distribution {
|
||||
Distribution::Bedrock => update_bedrock(ctx),
|
||||
Distribution::OpenMandriva => upgrade_openmandriva(ctx),
|
||||
Distribution::PCLinuxOS => upgrade_pclinuxos(ctx),
|
||||
Distribution::Nobara => upgrade_nobara(ctx),
|
||||
Distribution::NILRT => upgrade_nilrt(ctx),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,7 +177,7 @@ impl Distribution {
|
||||
}
|
||||
|
||||
fn update_bedrock(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
|
||||
ctx.run_type().execute(sudo).args(["brl", "update"]);
|
||||
|
||||
@@ -173,7 +188,7 @@ fn update_bedrock(ctx: &ExecutionContext) -> Result<()> {
|
||||
debug!("Bedrock distribution {}", distribution);
|
||||
match distribution {
|
||||
"arch" => archlinux::upgrade_arch_linux(ctx)?,
|
||||
"debian" | "ubuntu" => upgrade_debian(ctx)?,
|
||||
"debian" | "ubuntu" | "linuxmint" => upgrade_debian(ctx)?,
|
||||
"centos" | "fedora" => upgrade_redhat(ctx)?,
|
||||
"bedrock" => upgrade_bedrock_strata(ctx)?,
|
||||
_ => {
|
||||
@@ -185,15 +200,25 @@ fn update_bedrock(ctx: &ExecutionContext) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn is_wsl() -> Result<bool> {
|
||||
let output = Command::new("uname").arg("-r").output_checked_utf8()?.stdout;
|
||||
debug!("Uname output: {}", output);
|
||||
Ok(output.contains("microsoft"))
|
||||
}
|
||||
|
||||
fn upgrade_alpine_linux(ctx: &ExecutionContext) -> Result<()> {
|
||||
let apk = require("apk")?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
|
||||
ctx.run_type().execute(sudo).arg(&apk).arg("update").status_checked()?;
|
||||
ctx.run_type().execute(sudo).arg(&apk).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
fn upgrade_chimera_linux(ctx: &ExecutionContext) -> Result<()> {
|
||||
let apk = require("apk")?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
|
||||
ctx.run_type().execute(sudo).arg(&apk).arg("update").status_checked()?;
|
||||
ctx.run_type().execute(sudo).arg(&apk).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
fn upgrade_wolfi_linux(ctx: &ExecutionContext) -> Result<()> {
|
||||
let apk = require("apk")?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
|
||||
ctx.run_type().execute(sudo).arg(&apk).arg("update").status_checked()?;
|
||||
ctx.run_type().execute(sudo).arg(&apk).arg("upgrade").status_checked()
|
||||
@@ -208,7 +233,7 @@ fn upgrade_redhat(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
};
|
||||
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command
|
||||
.arg(which("dnf").unwrap_or_else(|| Path::new("yum").to_path_buf()))
|
||||
@@ -230,7 +255,49 @@ fn upgrade_redhat(ctx: &ExecutionContext) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn upgrade_fedora_silverblue(ctx: &ExecutionContext) -> Result<()> {
|
||||
fn upgrade_nobara(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let pkg_manager = require("dnf")?;
|
||||
|
||||
let mut update_command = ctx.run_type().execute(sudo);
|
||||
update_command.arg(&pkg_manager);
|
||||
|
||||
if ctx.config().yes(Step::System) {
|
||||
update_command.arg("-y");
|
||||
}
|
||||
|
||||
update_command.arg("update");
|
||||
// See https://nobaraproject.org/docs/upgrade-troubleshooting/how-do-i-update-the-system/
|
||||
update_command.args([
|
||||
"rpmfusion-nonfree-release",
|
||||
"rpmfusion-free-release",
|
||||
"fedora-repos",
|
||||
"nobara-repos",
|
||||
]);
|
||||
update_command.arg("--refresh").status_checked()?;
|
||||
|
||||
let mut upgrade_command = ctx.run_type().execute(sudo);
|
||||
upgrade_command.arg(&pkg_manager);
|
||||
|
||||
if ctx.config().yes(Step::System) {
|
||||
upgrade_command.arg("-y");
|
||||
}
|
||||
|
||||
upgrade_command.arg("distro-sync");
|
||||
|
||||
upgrade_command.status_checked()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn upgrade_nilrt(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let opkg = require("opkg")?;
|
||||
|
||||
ctx.run_type().execute(sudo).arg(&opkg).arg("update").status_checked()?;
|
||||
ctx.run_type().execute(sudo).arg(&opkg).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
fn upgrade_fedora_immutable(ctx: &ExecutionContext) -> Result<()> {
|
||||
let ostree = require("rpm-ostree")?;
|
||||
let mut command = ctx.run_type().execute(ostree);
|
||||
command.arg("upgrade");
|
||||
@@ -239,14 +306,14 @@ fn upgrade_fedora_silverblue(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
fn upgrade_bedrock_strata(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
ctx.run_type().execute(sudo).args(["brl", "update"]).status_checked()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn upgrade_suse(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["zypper", "refresh"])
|
||||
@@ -269,7 +336,7 @@ fn upgrade_suse(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
fn upgrade_opensuse_tumbleweed(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["zypper", "refresh"])
|
||||
@@ -287,7 +354,7 @@ fn upgrade_opensuse_tumbleweed(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
fn upgrade_suse_micro(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let mut cmd = ctx.run_type().execute(sudo);
|
||||
cmd.arg("transactional-update");
|
||||
if ctx.config().yes(Step::System) {
|
||||
@@ -300,10 +367,10 @@ fn upgrade_suse_micro(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
fn upgrade_openmandriva(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
|
||||
command.arg(&which("dnf").unwrap()).arg("upgrade");
|
||||
command.arg(which("dnf").unwrap()).arg("upgrade");
|
||||
|
||||
if let Some(args) = ctx.config().dnf_arguments() {
|
||||
command.args(args.split_whitespace());
|
||||
@@ -317,11 +384,12 @@ fn upgrade_openmandriva(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn upgrade_pclinuxos(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let mut command_update = ctx.run_type().execute(sudo);
|
||||
|
||||
command_update.arg(&which("apt-get").unwrap()).arg("update");
|
||||
command_update.arg(which("apt-get").unwrap()).arg("update");
|
||||
|
||||
if let Some(args) = ctx.config().dnf_arguments() {
|
||||
command_update.args(args.split_whitespace());
|
||||
@@ -334,7 +402,7 @@ fn upgrade_pclinuxos(ctx: &ExecutionContext) -> Result<()> {
|
||||
command_update.status_checked()?;
|
||||
|
||||
let mut cmd = ctx.run_type().execute(sudo);
|
||||
cmd.arg(&which("apt-get").unwrap());
|
||||
cmd.arg(which("apt-get").unwrap());
|
||||
cmd.arg("dist-upgrade");
|
||||
if ctx.config().yes(Step::System) {
|
||||
cmd.arg("-y");
|
||||
@@ -365,7 +433,7 @@ fn upgrade_vanilla(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
fn upgrade_void(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.args(["xbps-install", "-Su", "xbps"]);
|
||||
if ctx.config().yes(Step::System) {
|
||||
@@ -386,7 +454,7 @@ fn upgrade_void(ctx: &ExecutionContext) -> Result<()> {
|
||||
fn upgrade_gentoo(ctx: &ExecutionContext) -> Result<()> {
|
||||
let run_type = ctx.run_type();
|
||||
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
if let Some(layman) = which("layman") {
|
||||
run_type
|
||||
.execute(sudo)
|
||||
@@ -395,17 +463,22 @@ fn upgrade_gentoo(ctx: &ExecutionContext) -> Result<()> {
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
println!("Syncing portage");
|
||||
run_type
|
||||
.execute(sudo)
|
||||
.args(["emerge", "--sync"])
|
||||
.args(
|
||||
ctx.config()
|
||||
.emerge_sync_flags()
|
||||
.map(|s| s.split_whitespace().collect())
|
||||
.unwrap_or_else(|| vec!["-q"]),
|
||||
)
|
||||
.status_checked()?;
|
||||
println!("{}", t!("Syncing portage"));
|
||||
if let Some(ego) = which("ego") {
|
||||
// The Funtoo team doesn't reccomend running both ego sync and emerge --sync
|
||||
run_type.execute(sudo).arg(ego).arg("sync").status_checked()?;
|
||||
} else {
|
||||
run_type
|
||||
.execute(sudo)
|
||||
.args(["emerge", "--sync"])
|
||||
.args(
|
||||
ctx.config()
|
||||
.emerge_sync_flags()
|
||||
.map(|s| s.split_whitespace().collect())
|
||||
.unwrap_or_else(|| vec!["-q"]),
|
||||
)
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
if let Some(eix_update) = which("eix-update") {
|
||||
run_type.execute(sudo).arg(eix_update).status_checked()?;
|
||||
@@ -456,7 +529,7 @@ fn upgrade_debian(ctx: &ExecutionContext) -> Result<()> {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
if !is_nala {
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
@@ -510,7 +583,7 @@ pub fn run_deb_get(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
fn upgrade_solus(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let mut cmd = ctx.run_type().execute(sudo);
|
||||
cmd.arg("eopkg");
|
||||
if ctx.config().yes(Step::System) {
|
||||
@@ -619,7 +692,7 @@ pub fn run_packer_nu(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
fn upgrade_clearlinux(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let mut cmd = ctx.run_type().execute(sudo);
|
||||
cmd.args(["swupd", "update"]);
|
||||
if ctx.config().yes(Step::System) {
|
||||
@@ -631,7 +704,7 @@ fn upgrade_clearlinux(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
fn upgrade_exherbo(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
ctx.run_type().execute(sudo).args(["cave", "sync"]).status_checked()?;
|
||||
|
||||
ctx.run_type()
|
||||
@@ -660,7 +733,7 @@ fn upgrade_exherbo(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
fn upgrade_nixos(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.args(["/run/current-system/sw/bin/nixos-rebuild", "switch", "--upgrade"]);
|
||||
|
||||
@@ -686,7 +759,7 @@ fn upgrade_neon(ctx: &ExecutionContext) -> Result<()> {
|
||||
// seems rare
|
||||
// if that comes up we need to create a Distribution::PackageKit or some such
|
||||
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let pkcon = which("pkcon").unwrap();
|
||||
// pkcon ignores update with update and refresh provided together
|
||||
ctx.run_type()
|
||||
@@ -708,16 +781,54 @@ fn upgrade_neon(ctx: &ExecutionContext) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_needrestart(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let needrestart = require("needrestart")?;
|
||||
/// `needrestart` should be skipped if:
|
||||
///
|
||||
/// 1. This is a redhat-based distribution
|
||||
/// 2. This is a debian-based distribution and it is using `nala` as the `apt`
|
||||
/// alternative
|
||||
fn should_skip_needrestart() -> Result<()> {
|
||||
let distribution = Distribution::detect()?;
|
||||
let msg = t!("needrestart will be ran by the package manager");
|
||||
|
||||
if distribution.redhat_based() {
|
||||
return Err(SkipStep(String::from("needrestart will be ran by the package manager")).into());
|
||||
return Err(SkipStep(String::from(msg)).into());
|
||||
}
|
||||
|
||||
print_separator("Check for needed restarts");
|
||||
if matches!(distribution, Distribution::Debian) {
|
||||
let apt = which("apt-fast")
|
||||
.or_else(|| {
|
||||
if which("mist").is_some() {
|
||||
Some(PathBuf::from("mist"))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.or_else(|| {
|
||||
if Path::new("/usr/bin/nala").exists() {
|
||||
Some(Path::new("/usr/bin/nala").to_path_buf())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or_else(|| PathBuf::from("apt-get"));
|
||||
|
||||
let is_nala = apt.ends_with("nala");
|
||||
|
||||
if is_nala {
|
||||
return Err(SkipStep(String::from(msg)).into());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_needrestart(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let needrestart = require("needrestart")?;
|
||||
|
||||
should_skip_needrestart()?;
|
||||
|
||||
print_separator(t!("Check for needed restarts"));
|
||||
|
||||
ctx.run_type().execute(sudo).arg(needrestart).status_checked()?;
|
||||
|
||||
@@ -728,10 +839,10 @@ pub fn run_fwupdmgr(ctx: &ExecutionContext) -> Result<()> {
|
||||
let fwupdmgr = require("fwupdmgr")?;
|
||||
|
||||
if is_wsl()? {
|
||||
return Err(SkipStep(String::from("Should not run in WSL")).into());
|
||||
return Err(SkipStep(t!("Should not run in WSL").to_string()).into());
|
||||
}
|
||||
|
||||
print_separator("Firmware upgrades");
|
||||
print_separator(t!("Firmware upgrades"));
|
||||
|
||||
ctx.run_type()
|
||||
.execute(&fwupdmgr)
|
||||
@@ -753,7 +864,7 @@ pub fn run_fwupdmgr(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
pub fn run_flatpak(ctx: &ExecutionContext) -> Result<()> {
|
||||
let flatpak = require("flatpak")?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let cleanup = ctx.config().cleanup();
|
||||
let yes = ctx.config().yes(Step::Flatpak);
|
||||
let run_type = ctx.run_type();
|
||||
@@ -773,7 +884,7 @@ pub fn run_flatpak(ctx: &ExecutionContext) -> Result<()> {
|
||||
run_type.execute(&flatpak).args(&cleanup_args).status_checked()?;
|
||||
}
|
||||
|
||||
print_separator("Flatpak System Packages");
|
||||
print_separator(t!("Flatpak System Packages"));
|
||||
if ctx.config().flatpak_use_sudo() || std::env::var("SSH_CLIENT").is_ok() {
|
||||
let mut update_args = vec!["update", "--system"];
|
||||
if yes {
|
||||
@@ -814,11 +925,11 @@ pub fn run_flatpak(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn run_snap(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let snap = require("snap")?;
|
||||
|
||||
if !PathBuf::from("/var/snapd.socket").exists() && !PathBuf::from("/run/snapd.socket").exists() {
|
||||
return Err(SkipStep(String::from("Snapd socket does not exist")).into());
|
||||
return Err(SkipStep(t!("Snapd socket does not exist").to_string()).into());
|
||||
}
|
||||
print_separator("snap");
|
||||
|
||||
@@ -826,7 +937,7 @@ pub fn run_snap(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn run_pihole_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let pihole = require("pihole")?;
|
||||
Path::new("/opt/pihole/update.sh").require()?;
|
||||
|
||||
@@ -840,7 +951,12 @@ pub fn run_protonup_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("protonup");
|
||||
|
||||
ctx.run_type().execute(protonup).status_checked()?;
|
||||
let mut cmd = ctx.run_type().execute(protonup);
|
||||
if ctx.config().yes(Step::Protonup) {
|
||||
cmd.arg("--yes");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -855,7 +971,7 @@ pub fn run_distrobox_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
) {
|
||||
(r, Some(c)) => {
|
||||
if c.is_empty() {
|
||||
return Err(SkipStep("You need to specify at least one container".to_string()).into());
|
||||
return Err(SkipStep(t!("You need to specify at least one container").to_string()).into());
|
||||
}
|
||||
r.args(c)
|
||||
}
|
||||
@@ -870,7 +986,7 @@ pub fn run_distrobox_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn run_dkp_pacman_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let dkp_pacman = require("dkp-pacman")?;
|
||||
|
||||
print_separator("Devkitpro pacman");
|
||||
@@ -893,26 +1009,103 @@ pub fn run_dkp_pacman_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn run_config_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
if ctx.config().yes(Step::ConfigUpdate) {
|
||||
return Err(SkipStep("Skipped in --yes".to_string()).into());
|
||||
return Err(SkipStep(t!("Skipped in --yes").to_string()).into());
|
||||
}
|
||||
|
||||
if let Ok(etc_update) = require("etc-update") {
|
||||
print_separator("Configuration update");
|
||||
print_separator(t!("Configuration update"));
|
||||
ctx.run_type().execute(sudo).arg(etc_update).status_checked()?;
|
||||
} else if let Ok(pacdiff) = require("pacdiff") {
|
||||
if std::env::var("DIFFPROG").is_err() {
|
||||
require("vim")?;
|
||||
}
|
||||
|
||||
print_separator("Configuration update");
|
||||
print_separator(t!("Configuration update"));
|
||||
ctx.execute_elevated(&pacdiff, false)?.status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_lure_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let lure = require("lure")?;
|
||||
|
||||
print_separator("LURE");
|
||||
|
||||
let mut exe = ctx.run_type().execute(lure);
|
||||
|
||||
if ctx.config().yes(Step::Lure) {
|
||||
exe.args(["-i=false", "up"]);
|
||||
} else {
|
||||
exe.arg("up");
|
||||
}
|
||||
|
||||
exe.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_waydroid(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let waydroid = require("waydroid")?;
|
||||
let status = ctx.run_type().execute(&waydroid).arg("status").output_checked_utf8()?;
|
||||
// example output of `waydroid status`:
|
||||
//
|
||||
// ```sh
|
||||
// $ waydroid status
|
||||
// Session: RUNNING
|
||||
// Container: RUNNING
|
||||
// Vendor type: MAINLINE
|
||||
// IP address: 192.168.240.112
|
||||
// Session user: w568w(1000)
|
||||
// Wayland display: wayland-0
|
||||
// ```
|
||||
//
|
||||
// ```sh
|
||||
// $ waydroid status
|
||||
// Session: STOPPED
|
||||
// Vendor type: MAINLINE
|
||||
// ```
|
||||
let session = status
|
||||
.stdout
|
||||
.lines()
|
||||
.find(|line| line.contains("Session:"))
|
||||
.unwrap_or_else(|| panic!("the output of `waydroid status` should contain `Session:`"));
|
||||
let is_container_running = session.contains("RUNNING");
|
||||
let assume_yes = ctx.config().yes(Step::Waydroid);
|
||||
|
||||
print_separator("Waydroid");
|
||||
|
||||
if is_container_running && !assume_yes {
|
||||
let update_allowed = prompt_yesno(&t!(
|
||||
"Going to execute `waydroid upgrade`, which would STOP the running container, is this ok?"
|
||||
))?;
|
||||
if !update_allowed {
|
||||
return Err(
|
||||
SkipStep(t!("Skip the Waydroid step because the user don't want to proceed").to_string()).into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg(&waydroid)
|
||||
.arg("upgrade")
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_auto_cpufreq(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
let auto_cpu_freq = require("auto-cpufreq")?;
|
||||
|
||||
print_separator("auto-cpufreq");
|
||||
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg(auto_cpu_freq)
|
||||
.arg("--update")
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -925,6 +1118,11 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_wolfi() {
|
||||
test_template(include_str!("os_release/wolfi"), Distribution::Wolfi);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_arch_linux() {
|
||||
test_template(include_str!("os_release/arch"), Distribution::Arch);
|
||||
@@ -976,6 +1174,22 @@ mod tests {
|
||||
test_template(include_str!("os_release/fedora"), Distribution::Fedora);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fedora_immutable() {
|
||||
test_template(
|
||||
include_str!("os_release/fedorasilverblue"),
|
||||
Distribution::FedoraImmutable,
|
||||
);
|
||||
test_template(include_str!("os_release/fedorakinoite"), Distribution::FedoraImmutable);
|
||||
test_template(include_str!("os_release/fedoraonyx"), Distribution::FedoraImmutable);
|
||||
test_template(include_str!("os_release/fedorasericea"), Distribution::FedoraImmutable);
|
||||
test_template(include_str!("os_release/fedoraiot"), Distribution::FedoraImmutable);
|
||||
test_template(
|
||||
include_str!("os_release/fedoraswayatomic"),
|
||||
Distribution::FedoraImmutable,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_manjaro() {
|
||||
test_template(include_str!("os_release/manjaro"), Distribution::Arch);
|
||||
@@ -991,6 +1205,11 @@ mod tests {
|
||||
test_template(include_str!("os_release/gentoo"), Distribution::Gentoo);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_funtoo() {
|
||||
test_template(include_str!("os_release/funtoo"), Distribution::Gentoo);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_exherbo() {
|
||||
test_template(include_str!("os_release/exherbo"), Distribution::Exherbo);
|
||||
@@ -1045,4 +1264,14 @@ mod tests {
|
||||
fn test_solus() {
|
||||
test_template(include_str!("os_release/solus"), Distribution::Solus);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nobara() {
|
||||
test_template(include_str!("os_release/nobara"), Distribution::Nobara);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nilrt() {
|
||||
test_template(include_str!("os_release/nilrt"), Distribution::NILRT);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::{print_separator, prompt_yesno};
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::utils::{get_require_sudo_string, require_option};
|
||||
use crate::{utils::require, Step};
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
use tracing::debug;
|
||||
|
||||
pub fn run_macports(ctx: &ExecutionContext) -> Result<()> {
|
||||
require("port")?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
|
||||
print_separator("MacPorts");
|
||||
ctx.run_type()
|
||||
@@ -33,25 +35,25 @@ pub fn run_macports(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
pub fn run_mas(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mas = require("mas")?;
|
||||
print_separator("macOS App Store");
|
||||
print_separator(t!("macOS App Store"));
|
||||
|
||||
ctx.run_type().execute(mas).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
pub fn upgrade_macos(ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator("macOS system update");
|
||||
print_separator(t!("macOS system update"));
|
||||
|
||||
let should_ask = !(ctx.config().yes(Step::System)) || (ctx.config().dry_run());
|
||||
let should_ask = !(ctx.config().yes(Step::System) || ctx.config().dry_run());
|
||||
if should_ask {
|
||||
println!("Finding available software");
|
||||
println!("{}", t!("Finding available software"));
|
||||
if system_update_available()? {
|
||||
let answer = prompt_yesno("A system update is available. Do you wish to install it?")?;
|
||||
let answer = prompt_yesno(t!("A system update is available. Do you wish to install it?").as_ref())?;
|
||||
if !answer {
|
||||
return Ok(());
|
||||
}
|
||||
println!();
|
||||
} else {
|
||||
println!("No new software available.");
|
||||
println!("{}", t!("No new software available."));
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
@@ -93,3 +95,150 @@ pub fn run_sparkle(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_xcodes(ctx: &ExecutionContext) -> Result<()> {
|
||||
let xcodes = require("xcodes")?;
|
||||
print_separator("Xcodes");
|
||||
|
||||
let should_ask = !(ctx.config().yes(Step::Xcodes) || ctx.config().dry_run());
|
||||
|
||||
let releases = ctx
|
||||
.run_type()
|
||||
.execute(&xcodes)
|
||||
.args(["update"])
|
||||
.output_checked_utf8()?
|
||||
.stdout;
|
||||
|
||||
let releases_installed: Vec<String> = releases
|
||||
.lines()
|
||||
.filter(|r| r.contains("(Installed)"))
|
||||
.map(String::from)
|
||||
.collect();
|
||||
|
||||
if releases_installed.is_empty() {
|
||||
println!("{}", t!("No Xcode releases installed."));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let (installed_gm, installed_beta, installed_regular) =
|
||||
releases_installed
|
||||
.iter()
|
||||
.fold((false, false, false), |(gm, beta, regular), release| {
|
||||
(
|
||||
gm || release.contains("GM") || release.contains("Release Candidate"),
|
||||
beta || release.contains("Beta"),
|
||||
regular
|
||||
|| !(release.contains("GM")
|
||||
|| release.contains("Release Candidate")
|
||||
|| release.contains("Beta")),
|
||||
)
|
||||
});
|
||||
|
||||
let releases_gm = releases
|
||||
.lines()
|
||||
.filter(|&r| r.matches("GM").count() > 0 || r.matches("Release Candidate").count() > 0)
|
||||
.map(String::from)
|
||||
.collect();
|
||||
let releases_beta = releases
|
||||
.lines()
|
||||
.filter(|&r| r.matches("Beta").count() > 0)
|
||||
.map(String::from)
|
||||
.collect();
|
||||
let releases_regular = releases
|
||||
.lines()
|
||||
.filter(|&r| {
|
||||
r.matches("GM").count() == 0
|
||||
&& r.matches("Release Candidate").count() == 0
|
||||
&& r.matches("Beta").count() == 0
|
||||
})
|
||||
.map(String::from)
|
||||
.collect();
|
||||
|
||||
if installed_gm {
|
||||
process_xcodes_releases(releases_gm, should_ask, ctx)?;
|
||||
}
|
||||
if installed_beta {
|
||||
process_xcodes_releases(releases_beta, should_ask, ctx)?;
|
||||
}
|
||||
if installed_regular {
|
||||
process_xcodes_releases(releases_regular, should_ask, ctx)?;
|
||||
}
|
||||
|
||||
let releases_new = ctx
|
||||
.run_type()
|
||||
.execute(&xcodes)
|
||||
.args(["list"])
|
||||
.output_checked_utf8()?
|
||||
.stdout;
|
||||
|
||||
let releases_gm_new_installed: HashSet<_> = releases_new
|
||||
.lines()
|
||||
.filter(|release| {
|
||||
release.contains("(Installed)") && (release.contains("GM") || release.contains("Release Candidate"))
|
||||
})
|
||||
.collect();
|
||||
let releases_beta_new_installed: HashSet<_> = releases_new
|
||||
.lines()
|
||||
.filter(|release| release.contains("(Installed)") && release.contains("Beta"))
|
||||
.collect();
|
||||
let releases_regular_new_installed: HashSet<_> = releases_new
|
||||
.lines()
|
||||
.filter(|release| {
|
||||
release.contains("(Installed)")
|
||||
&& !(release.contains("GM") || release.contains("Release Candidate") || release.contains("Beta"))
|
||||
})
|
||||
.collect();
|
||||
|
||||
for releases_new_installed in [
|
||||
releases_gm_new_installed,
|
||||
releases_beta_new_installed,
|
||||
releases_regular_new_installed,
|
||||
] {
|
||||
if should_ask && releases_new_installed.len() == 2 {
|
||||
let answer_uninstall =
|
||||
prompt_yesno(t!("Would you like to move the former Xcode release to the trash?").as_ref())?;
|
||||
if answer_uninstall {
|
||||
let _ = ctx
|
||||
.run_type()
|
||||
.execute(&xcodes)
|
||||
.args([
|
||||
"uninstall",
|
||||
releases_new_installed.iter().next().cloned().unwrap_or_default(),
|
||||
])
|
||||
.status_checked();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn process_xcodes_releases(releases_filtered: Vec<String>, should_ask: bool, ctx: &ExecutionContext) -> Result<()> {
|
||||
let xcodes = require("xcodes")?;
|
||||
|
||||
if releases_filtered
|
||||
.last()
|
||||
.map(|s| !s.contains("(Installed)"))
|
||||
.unwrap_or(true)
|
||||
&& !releases_filtered.is_empty()
|
||||
{
|
||||
println!(
|
||||
"{} {}",
|
||||
t!("New Xcode release detected:"),
|
||||
releases_filtered.last().cloned().unwrap_or_default()
|
||||
);
|
||||
if should_ask {
|
||||
let answer_install = prompt_yesno(t!("Would you like to install it?").as_ref())?;
|
||||
if answer_install {
|
||||
let _ = ctx
|
||||
.run_type()
|
||||
.execute(xcodes)
|
||||
.args(["install", &releases_filtered.last().cloned().unwrap_or_default()])
|
||||
.status_checked();
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,23 +1,33 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::utils::{get_require_sudo_string, require_option};
|
||||
use color_eyre::eyre::Result;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn upgrade_openbsd(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("OpenBSD Update");
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
print_separator(t!("OpenBSD Update"));
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(&["/usr/sbin/sysupgrade", "-n"])
|
||||
.args(["/usr/sbin/sysupgrade", "-n"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("OpenBSD Packages");
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
print_separator(t!("OpenBSD Packages"));
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["/usr/sbin/pkg_delete", "-ac"])
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(&["/usr/sbin/pkg_add", "-u"])
|
||||
.status_checked()
|
||||
.args(["/usr/sbin/pkg_add", "-u"])
|
||||
.status_checked()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
22
src/steps/os/os_release/fedoraiot
Normal file
22
src/steps/os/os_release/fedoraiot
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39.20240415.0 (IoT Edition)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39.20240415.0 (IoT Edition)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
HOME_URL="https://fedoraproject.org/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora/f39/system-administrators-guide/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://bugzilla.redhat.com/"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-11-12
|
||||
VARIANT="IoT Edition"
|
||||
VARIANT_ID=iot
|
||||
OSTREE_VERSION='39.20240415.0'
|
||||
23
src/steps/os/os_release/fedorakinoite
Normal file
23
src/steps/os/os_release/fedorakinoite
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39.20240105.0 (Kinoite)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39.20240105.0 (Kinoite)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://kinoite.fedoraproject.org"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-kinoite/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://pagure.io/fedora-kde/SIG/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-11-12
|
||||
VARIANT="Kinoite"
|
||||
VARIANT_ID=kinoite
|
||||
OSTREE_VERSION='39.20240105.0'
|
||||
22
src/steps/os/os_release/fedoraonyx
Normal file
22
src/steps/os/os_release/fedoraonyx
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39 (Onyx)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39 (Onyx)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://fedoraproject.org/onyx/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-onyx/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://bugzilla.redhat.com/"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="Onyx"
|
||||
VARIANT_ID=onyx
|
||||
22
src/steps/os/os_release/fedorasericea
Normal file
22
src/steps/os/os_release/fedorasericea
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39 (Sericea)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39 (Sericea)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://fedoraproject.org/sericea/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-sericea/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://gitlab.com/fedora/sigs/sway/SIG/-/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="Sericea"
|
||||
VARIANT_ID=sericea
|
||||
22
src/steps/os/os_release/fedorasilverblue
Normal file
22
src/steps/os/os_release/fedorasilverblue
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39 (Silverblue)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39 (Silverblue)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://silverblue.fedoraproject.org"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-silverblue/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://github.com/fedora-silverblue/issue-tracker/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="Silverblue"
|
||||
VARIANT_ID=silverblue
|
||||
23
src/steps/os/os_release/fedoraswayatomic
Normal file
23
src/steps/os/os_release/fedoraswayatomic
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="40.20240426.0 (Sway Atomic)"
|
||||
ID=fedora
|
||||
VERSION_ID=40
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f40"
|
||||
PRETTY_NAME="Fedora Linux 40.20240426.0 (Sway Atomic)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:40"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://fedoraproject.org/atomic-desktops/sway/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-sericea/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://gitlab.com/fedora/sigs/sway/SIG/-/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=40
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=40
|
||||
SUPPORT_END=2025-05-13
|
||||
VARIANT="Sway Atomic"
|
||||
VARIANT_ID=sway-atomic
|
||||
OSTREE_VERSION='40.20240426.0'
|
||||
6
src/steps/os/os_release/funtoo
Normal file
6
src/steps/os/os_release/funtoo
Normal file
@@ -0,0 +1,6 @@
|
||||
ID="funtoo"
|
||||
NAME="Funtoo"
|
||||
PRETTY_NAME="Funtoo Linux"
|
||||
ANSI_COLOR="0;34"
|
||||
HOME_URL="https://www.funtoo.org"
|
||||
BUG_REPORT_URL="https://bugs.funtoo.org"
|
||||
8
src/steps/os/os_release/nilrt
Normal file
8
src/steps/os/os_release/nilrt
Normal file
@@ -0,0 +1,8 @@
|
||||
ID=nilrt
|
||||
NAME="NI Linux Real-Time"
|
||||
VERSION="10.0 (kirkstone)"
|
||||
VERSION_ID=10.0
|
||||
PRETTY_NAME="NI Linux Real-Time 10.0 (kirkstone)"
|
||||
DISTRO_CODENAME="kirkstone"
|
||||
BUILD_ID="23.8.0f153-x64"
|
||||
VERSION_CODENAME="kirkstone"
|
||||
23
src/steps/os/os_release/nobara
Normal file
23
src/steps/os/os_release/nobara
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Nobara Linux"
|
||||
VERSION="39 (GNOME Edition)"
|
||||
ID=nobara
|
||||
ID_LIKE="rhel centos fedora"
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Nobara Linux 39 (GNOME Edition)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=nobara-logo-icon
|
||||
CPE_NAME="cpe:/o:nobaraproject:nobara:39"
|
||||
DEFAULT_HOSTNAME="nobara"
|
||||
HOME_URL="https://nobaraproject.org/"
|
||||
DOCUMENTATION_URL="https://www.nobaraproject.org/"
|
||||
SUPPORT_URL="https://www.nobaraproject.org/"
|
||||
BUG_REPORT_URL="https://gitlab.com/gloriouseggroll/nobara-images"
|
||||
REDHAT_BUGZILLA_PRODUCT="Nobara"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Nobara"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="GNOME Edition"
|
||||
VARIANT_ID=gnome
|
||||
5
src/steps/os/os_release/wolfi
Normal file
5
src/steps/os/os_release/wolfi
Normal file
@@ -0,0 +1,5 @@
|
||||
ID=wolfi
|
||||
NAME="Wolfi"
|
||||
PRETTY_NAME="Wolfi"
|
||||
VERSION_ID="20230201"
|
||||
HOME_URL="https://wolfi.dev"
|
||||
@@ -1,16 +1,26 @@
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
use std::path::Component;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::{env::var, path::Path};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::{Step, HOME_DIR};
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use home;
|
||||
use ini::Ini;
|
||||
#[cfg(target_os = "linux")]
|
||||
use nix::unistd::Uid;
|
||||
use rust_i18n::t;
|
||||
use semver::Version;
|
||||
use tracing::debug;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
use super::linux::Distribution;
|
||||
use crate::error::SkipStep;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
@@ -18,7 +28,7 @@ use crate::executor::Executor;
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
use crate::executor::RunType;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{require, require_option, PathExt, REQUIRE_SUDO};
|
||||
use crate::utils::{get_require_sudo_string, require, require_option, PathExt};
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
const INTEL_BREW: &str = "/usr/local/bin/brew";
|
||||
@@ -92,19 +102,19 @@ pub fn run_fisher(ctx: &ExecutionContext) -> Result<()> {
|
||||
.args(["-c", "type -t fisher"])
|
||||
.output_checked_utf8()
|
||||
.map(|_| ())
|
||||
.map_err(|_| SkipStep("`fisher` is not defined in `fish`".to_owned()))?;
|
||||
.map_err(|_| SkipStep(t!("`fisher` is not defined in `fish`").to_string()))?;
|
||||
|
||||
Command::new(&fish)
|
||||
.args(["-c", "echo \"$__fish_config_dir/fish_plugins\""])
|
||||
.output_checked_utf8()
|
||||
.and_then(|output| Path::new(&output.stdout.trim()).require().map(|_| ()))
|
||||
.map_err(|err| SkipStep(format!("`fish_plugins` path doesn't exist: {err}")))?;
|
||||
.map_err(|err| SkipStep(t!("`fish_plugins` path doesn't exist: {err}", err = err).to_string()))?;
|
||||
|
||||
Command::new(&fish)
|
||||
.args(["-c", "fish_update_completions"])
|
||||
.output_checked_utf8()
|
||||
.map(|_| ())
|
||||
.map_err(|_| SkipStep("`fish_update_completions` is not available".to_owned()))?;
|
||||
.map_err(|_| SkipStep(t!("`fish_update_completions` is not available").to_string()))?;
|
||||
|
||||
print_separator("Fisher");
|
||||
|
||||
@@ -171,7 +181,7 @@ pub fn run_oh_my_fish(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
pub fn run_pkgin(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pkgin = require("pkgin")?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
|
||||
print_separator("Pkgin");
|
||||
|
||||
@@ -226,7 +236,7 @@ pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
|
||||
let gdbus = require("gdbus")?;
|
||||
require_option(
|
||||
var("XDG_CURRENT_DESKTOP").ok().filter(|p| p.contains("GNOME")),
|
||||
"Desktop doest not appear to be gnome".to_string(),
|
||||
t!("Desktop doest not appear to be gnome").to_string(),
|
||||
)?;
|
||||
let output = Command::new("gdbus")
|
||||
.args([
|
||||
@@ -243,10 +253,10 @@ pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
debug!("Checking for gnome extensions: {}", output);
|
||||
if !output.stdout.contains("org.gnome.Shell.Extensions") {
|
||||
return Err(SkipStep(String::from("Gnome shell extensions are unregistered in DBus")).into());
|
||||
return Err(SkipStep(t!("Gnome shell extensions are unregistered in DBus").to_string()).into());
|
||||
}
|
||||
|
||||
print_separator("Gnome Shell extensions");
|
||||
print_separator(t!("Gnome Shell extensions"));
|
||||
|
||||
ctx.run_type()
|
||||
.execute(gdbus)
|
||||
@@ -263,6 +273,23 @@ pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
pub fn brew_linux_sudo_uid() -> Option<u32> {
|
||||
let linuxbrew_directory = "/home/linuxbrew/.linuxbrew";
|
||||
if let Ok(metadata) = std::fs::metadata(linuxbrew_directory) {
|
||||
let owner_id = metadata.uid();
|
||||
let current_id = Uid::effective();
|
||||
// print debug these two values
|
||||
debug!("linuxbrew_directory owner_id: {}, current_id: {}", owner_id, current_id);
|
||||
return if owner_id == current_id.as_raw() {
|
||||
None // no need for sudo if linuxbrew is owned by the current user
|
||||
} else {
|
||||
Some(owner_id) // otherwise use sudo to run brew as the owner
|
||||
};
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()> {
|
||||
#[allow(unused_variables)]
|
||||
@@ -271,18 +298,50 @@ pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
if variant.is_path() && !BrewVariant::is_macos_custom(binary_name) {
|
||||
return Err(SkipStep("Not a custom brew for macOS".to_string()).into());
|
||||
return Err(SkipStep(t!("Not a custom brew for macOS").to_string()).into());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
let sudo_uid = brew_linux_sudo_uid();
|
||||
// if brew is owned by another user, execute "sudo -Hu <uid> brew update"
|
||||
if let Some(user_id) = sudo_uid {
|
||||
let uid = nix::unistd::Uid::from_raw(user_id);
|
||||
let user = nix::unistd::User::from_uid(uid)
|
||||
.expect("failed to call getpwuid()")
|
||||
.expect("this user should exist");
|
||||
|
||||
let sudo_as_user = t!("sudo as user '{user}'", user = user.name);
|
||||
print_separator(format!("{} ({})", variant.step_title(), sudo_as_user));
|
||||
|
||||
let sudo = crate::utils::require_option(ctx.sudo().as_ref(), crate::utils::get_require_sudo_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.current_dir("/tmp") // brew needs a writable current directory
|
||||
.args([
|
||||
"--set-home",
|
||||
&format!("--user={}", user.name),
|
||||
&format!("{}", binary_name.to_string_lossy()),
|
||||
"update",
|
||||
])
|
||||
.status_checked()?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
print_separator(variant.step_title());
|
||||
let run_type = ctx.run_type();
|
||||
|
||||
variant.execute(run_type).arg("update").status_checked()?;
|
||||
variant
|
||||
.execute(run_type)
|
||||
.args(["upgrade", "--ignore-pinned", "--formula"])
|
||||
.status_checked()?;
|
||||
|
||||
let mut command = variant.execute(run_type);
|
||||
command.args(["upgrade", "--formula"]);
|
||||
|
||||
if ctx.config().brew_fetch_head() {
|
||||
command.arg("--fetch-HEAD");
|
||||
}
|
||||
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
variant.execute(run_type).arg("cleanup").status_checked()?;
|
||||
@@ -299,7 +358,7 @@ pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<
|
||||
pub fn run_brew_cask(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()> {
|
||||
let binary_name = require(variant.binary_name())?;
|
||||
if variant.is_path() && !BrewVariant::is_macos_custom(binary_name) {
|
||||
return Err(SkipStep("Not a custom brew for macOS".to_string()).into());
|
||||
return Err(SkipStep(t!("Not a custom brew for macOS").to_string()).into());
|
||||
}
|
||||
print_separator(format!("{} - Cask", variant.step_title()));
|
||||
let run_type = ctx.run_type();
|
||||
@@ -322,6 +381,12 @@ pub fn run_brew_cask(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()>
|
||||
if ctx.config().brew_cask_greedy() {
|
||||
brew_args.push("--greedy");
|
||||
}
|
||||
if ctx.config().brew_greedy_latest() {
|
||||
brew_args.push("--greedy-latest");
|
||||
}
|
||||
if ctx.config().brew_greedy_auto_updates() {
|
||||
brew_args.push("--greedy-auto-updates");
|
||||
}
|
||||
}
|
||||
|
||||
variant.execute(run_type).args(&brew_args).status_checked()?;
|
||||
@@ -348,7 +413,7 @@ pub fn run_guix(ctx: &ExecutionContext) -> Result<()> {
|
||||
if should_upgrade {
|
||||
return run_type.execute(&guix).args(["package", "-u"]).status_checked();
|
||||
}
|
||||
Err(SkipStep(String::from("Guix Pull Failed, Skipping")).into())
|
||||
Err(SkipStep(t!("Guix Pull Failed, Skipping").to_string()).into())
|
||||
}
|
||||
|
||||
pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
|
||||
@@ -363,58 +428,175 @@ pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
|
||||
debug!("nix profile: {:?}", profile_path);
|
||||
let manifest_json_path = profile_path.join("manifest.json");
|
||||
|
||||
let output = Command::new(&nix_env).args(["--query", "nix"]).output_checked_utf8();
|
||||
debug!("nix-env output: {:?}", output);
|
||||
let should_self_upgrade = output.is_ok();
|
||||
|
||||
print_separator("Nix");
|
||||
|
||||
let multi_user = fs::metadata(&nix)?.uid() == 0;
|
||||
debug!("Multi user nix: {}", multi_user);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
use super::linux::Distribution;
|
||||
|
||||
if let Ok(Distribution::NixOS) = Distribution::detect() {
|
||||
return Err(SkipStep(String::from("Nix on NixOS must be upgraded via nixos-rebuild switch")).into());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
if require("darwin-rebuild").is_ok() {
|
||||
return Err(SkipStep(String::from(
|
||||
"Nix-darwin on macOS must be upgraded via darwin-rebuild switch",
|
||||
))
|
||||
.into());
|
||||
return Err(
|
||||
SkipStep(t!("Nix-darwin on macOS must be upgraded via darwin-rebuild switch").to_string()).into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let run_type = ctx.run_type();
|
||||
|
||||
if should_self_upgrade {
|
||||
if multi_user {
|
||||
ctx.execute_elevated(&nix, true)?.arg("upgrade-nix").status_checked()?;
|
||||
} else {
|
||||
run_type.execute(&nix).arg("upgrade-nix").status_checked()?;
|
||||
}
|
||||
}
|
||||
|
||||
run_type.execute(nix_channel).arg("--update").status_checked()?;
|
||||
|
||||
let mut get_version_cmd = ctx.run_type().execute(&nix);
|
||||
get_version_cmd.arg("--version");
|
||||
let get_version_cmd_output = get_version_cmd.output_checked_utf8()?;
|
||||
let get_version_cmd_first_line_stdout = get_version_cmd_output
|
||||
.stdout
|
||||
.lines()
|
||||
.next()
|
||||
.expect("nix --version gives an empty output");
|
||||
let splitted: Vec<&str> = get_version_cmd_first_line_stdout.split_whitespace().collect();
|
||||
let version = if splitted.len() >= 3 {
|
||||
Version::parse(splitted[2]).expect("invalid version")
|
||||
} else {
|
||||
panic!("nix --version output format changed, file an issue to Topgrade!")
|
||||
};
|
||||
|
||||
debug!("Nix version: {:?}", version);
|
||||
|
||||
// Nix since 2.21.0 uses `--all --impure` rather than `.*` to upgrade all packages
|
||||
let packages = if version >= Version::new(2, 21, 0) {
|
||||
vec!["--all", "--impure"]
|
||||
} else {
|
||||
vec![".*"]
|
||||
};
|
||||
|
||||
if Path::new(&manifest_json_path).exists() {
|
||||
run_type
|
||||
.execute(&nix)
|
||||
.execute(nix)
|
||||
.args(nix_args())
|
||||
.arg("profile")
|
||||
.arg("upgrade")
|
||||
.arg(".*")
|
||||
.args(&packages)
|
||||
.arg("--verbose")
|
||||
.status_checked()
|
||||
} else {
|
||||
run_type.execute(&nix_env).arg("--upgrade").status_checked()
|
||||
let mut command = run_type.execute(nix_env);
|
||||
command.arg("--upgrade");
|
||||
if let Some(args) = ctx.config().nix_env_arguments() {
|
||||
command.args(args.split_whitespace());
|
||||
};
|
||||
command.status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_nix_self_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let nix = require("nix")?;
|
||||
|
||||
// Should we attempt to upgrade Nix with `nix upgrade-nix`?
|
||||
#[allow(unused_mut)]
|
||||
let mut should_self_upgrade = cfg!(target_os = "macos");
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// We can't use `nix upgrade-nix` on NixOS.
|
||||
if let Ok(Distribution::NixOS) = Distribution::detect() {
|
||||
should_self_upgrade = false;
|
||||
}
|
||||
}
|
||||
|
||||
if !should_self_upgrade {
|
||||
return Err(SkipStep(t!("`nix upgrade-nix` can only be used on macOS or non-NixOS Linux").to_string()).into());
|
||||
}
|
||||
|
||||
if nix_profile_dir(&nix)?.is_none() {
|
||||
return Err(
|
||||
SkipStep(t!("`nix upgrade-nix` cannot be run when Nix is installed in a profile").to_string()).into(),
|
||||
);
|
||||
}
|
||||
|
||||
print_separator(t!("Nix (self-upgrade)"));
|
||||
|
||||
let multi_user = fs::metadata(&nix)?.uid() == 0;
|
||||
debug!("Multi user nix: {}", multi_user);
|
||||
|
||||
let nix_args = nix_args();
|
||||
if multi_user {
|
||||
ctx.execute_elevated(&nix, true)?
|
||||
.args(nix_args)
|
||||
.arg("upgrade-nix")
|
||||
.status_checked()
|
||||
} else {
|
||||
ctx.run_type()
|
||||
.execute(&nix)
|
||||
.args(nix_args)
|
||||
.arg("upgrade-nix")
|
||||
.status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
/// If we try to `nix upgrade-nix` but Nix is installed with `nix profile`, we'll get a `does not
|
||||
/// appear to be part of a Nix profile` error.
|
||||
///
|
||||
/// We duplicate some of the `nix` logic here to avoid this.
|
||||
/// See: <https://github.com/NixOS/nix/blob/f0180487a0e4c0091b46cb1469c44144f5400240/src/nix/upgrade-nix.cc#L102-L139>
|
||||
///
|
||||
/// See: <https://github.com/NixOS/nix/issues/5473>
|
||||
fn nix_profile_dir(nix: &Path) -> Result<Option<PathBuf>> {
|
||||
// NOTE: `nix` uses the location of the `nix-env` binary for this but we're using the `nix`
|
||||
// binary; should be the same.
|
||||
let nix_bin_dir = nix.parent();
|
||||
if nix_bin_dir.and_then(|p| p.file_name()) != Some(OsStr::new("bin")) {
|
||||
debug!("Nix is not installed in a `bin` directory: {nix_bin_dir:?}");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let nix_dir = nix_bin_dir
|
||||
.and_then(|bin_dir| bin_dir.parent())
|
||||
.ok_or_else(|| eyre!("Unable to find Nix install directory from Nix binary {nix:?}"))?;
|
||||
|
||||
debug!("Found Nix in {nix_dir:?}");
|
||||
|
||||
let mut profile_dir = nix_dir.to_path_buf();
|
||||
while profile_dir.is_symlink() {
|
||||
profile_dir = profile_dir
|
||||
.parent()
|
||||
.ok_or_else(|| eyre!("Path has no parent: {profile_dir:?}"))?
|
||||
.join(
|
||||
profile_dir
|
||||
.read_link()
|
||||
.wrap_err_with(|| format!("Failed to read symlink {profile_dir:?}"))?,
|
||||
);
|
||||
|
||||
// NOTE: `nix` uses a hand-rolled canonicalize function, Rust just uses `realpath`.
|
||||
if profile_dir
|
||||
.canonicalize()
|
||||
.wrap_err_with(|| format!("Failed to canonicalize {profile_dir:?}"))?
|
||||
.components()
|
||||
.any(|component| component == Component::Normal(OsStr::new("profiles")))
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Found Nix profile {profile_dir:?}");
|
||||
let user_env = profile_dir
|
||||
.canonicalize()
|
||||
.wrap_err_with(|| format!("Failed to canonicalize {profile_dir:?}"))?;
|
||||
|
||||
Ok(
|
||||
if user_env
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.map(|name| name.ends_with("user-environment"))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
Some(profile_dir)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn nix_args() -> [&'static str; 2] {
|
||||
["--extra-experimental-features", "nix-command"]
|
||||
}
|
||||
|
||||
pub fn run_yadm(ctx: &ExecutionContext) -> Result<()> {
|
||||
let yadm = require("yadm")?;
|
||||
|
||||
@@ -438,6 +620,19 @@ pub fn run_asdf(ctx: &ExecutionContext) -> Result<()> {
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_mise(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mise = require("mise")?;
|
||||
|
||||
print_separator("mise");
|
||||
|
||||
ctx.run_type().execute(&mise).arg("upgrade").status_checked()?;
|
||||
|
||||
ctx.run_type()
|
||||
.execute(&mise)
|
||||
.args(["plugins", "update"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_home_manager(ctx: &ExecutionContext) -> Result<()> {
|
||||
let home_manager = require("home-manager")?;
|
||||
|
||||
@@ -467,6 +662,29 @@ pub fn run_pearl(ctx: &ExecutionContext) -> Result<()> {
|
||||
ctx.run_type().execute(pearl).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_pyenv(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pyenv = require("pyenv")?;
|
||||
print_separator("pyenv");
|
||||
|
||||
let pyenv_dir = var("PYENV_ROOT")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".pyenv"));
|
||||
|
||||
if !pyenv_dir.exists() {
|
||||
return Err(SkipStep(t!("Pyenv is installed, but $PYENV_ROOT is not set correctly").to_string()).into());
|
||||
}
|
||||
|
||||
if !pyenv_dir.join(".git").exists() {
|
||||
return Err(SkipStep(t!("pyenv is not a git repository").to_string()).into());
|
||||
}
|
||||
|
||||
if !pyenv_dir.join("plugins").join("pyenv-update").exists() {
|
||||
return Err(SkipStep(t!("pyenv-update plugin is not installed").to_string()).into());
|
||||
}
|
||||
|
||||
ctx.run_type().execute(pyenv).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_sdkman(ctx: &ExecutionContext) -> Result<()> {
|
||||
let bash = require("bash")?;
|
||||
|
||||
@@ -530,12 +748,22 @@ pub fn run_sdkman(ctx: &ExecutionContext) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_bun(ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn run_bun_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let bun = require("bun")?;
|
||||
|
||||
print_separator("Bun");
|
||||
print_separator(t!("Bun Packages"));
|
||||
|
||||
ctx.run_type().execute(bun).arg("upgrade").status_checked()
|
||||
let mut package_json: PathBuf = var("BUN_INSTALL")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".bun"));
|
||||
package_json.push("install/global/package.json");
|
||||
|
||||
if !package_json.exists() {
|
||||
println!("{}", t!("No global packages installed"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
ctx.run_type().execute(bun).args(["-g", "update"]).status_checked()
|
||||
}
|
||||
|
||||
/// Update dotfiles with `rcm(7)`.
|
||||
@@ -556,6 +784,7 @@ pub fn run_maza(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn reboot() -> Result<()> {
|
||||
print!("Rebooting...");
|
||||
print!("{}", t!("Rebooting..."));
|
||||
|
||||
Command::new("sudo").arg("reboot").status_checked()
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use std::convert::TryFrom;
|
||||
use std::path::Path;
|
||||
use std::{ffi::OsStr, process::Command};
|
||||
|
||||
@@ -10,8 +9,9 @@ use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::{print_separator, print_warning};
|
||||
use crate::utils::{require, which};
|
||||
use crate::{error::SkipStep, steps::git::Repositories};
|
||||
use crate::{error::SkipStep, steps::git::RepoStep};
|
||||
use crate::{powershell, Step};
|
||||
use rust_i18n::t;
|
||||
|
||||
pub fn run_chocolatey(ctx: &ExecutionContext) -> Result<()> {
|
||||
let choco = require("choco")?;
|
||||
@@ -42,11 +42,6 @@ pub fn run_winget(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("winget");
|
||||
|
||||
if !ctx.config().enable_winget() {
|
||||
print_warning("Winget is disabled by default. Enable it by setting enable_winget=true in the [windows] section in the configuration.");
|
||||
return Err(SkipStep(String::from("Winget is disabled by default")).into());
|
||||
}
|
||||
|
||||
ctx.run_type()
|
||||
.execute(winget)
|
||||
.args(["upgrade", "--all"])
|
||||
@@ -63,6 +58,10 @@ pub fn run_scoop(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
ctx.run_type().execute(&scoop).args(["cleanup", "*"]).status_checked()?;
|
||||
ctx.run_type()
|
||||
.execute(&scoop)
|
||||
.args(["cache", "rm", "-a"])
|
||||
.status_checked()?
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -70,12 +69,12 @@ pub fn run_scoop(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
pub fn update_wsl(ctx: &ExecutionContext) -> Result<()> {
|
||||
if !is_wsl_installed()? {
|
||||
return Err(SkipStep("WSL not installed".to_string()).into());
|
||||
return Err(SkipStep(t!("WSL not installed").to_string()).into());
|
||||
}
|
||||
|
||||
let wsl = require("wsl")?;
|
||||
|
||||
print_separator("Update WSL");
|
||||
print_separator(t!("Update WSL"));
|
||||
|
||||
let mut wsl_command = ctx.run_type().execute(wsl);
|
||||
wsl_command.args(["--update"]);
|
||||
@@ -128,12 +127,45 @@ fn upgrade_wsl_distribution(wsl: &Path, dist: &str, ctx: &ExecutionContext) -> R
|
||||
let topgrade = Command::new(wsl)
|
||||
.args(["-d", dist, "bash", "-lc", "which topgrade"])
|
||||
.output_checked_utf8()
|
||||
.map_err(|_| SkipStep(String::from("Could not find Topgrade installed in WSL")))?;
|
||||
.map_err(|_| SkipStep(t!("Could not find Topgrade installed in WSL").to_string()))?
|
||||
.stdout // The normal output from `which topgrade` appends a newline, so we trim it here.
|
||||
.trim_end()
|
||||
.to_owned();
|
||||
|
||||
let mut command = ctx.run_type().execute(wsl);
|
||||
|
||||
// The `arg` method automatically quotes its arguments.
|
||||
// This means we can't append additional arguments to `topgrade` in WSL
|
||||
// by calling `arg` successively.
|
||||
//
|
||||
// For example:
|
||||
//
|
||||
// ```rust
|
||||
// command
|
||||
// .args(["-d", dist, "bash", "-c"])
|
||||
// .arg(format!("TOPGRADE_PREFIX={dist} exec {topgrade}"));
|
||||
// ```
|
||||
//
|
||||
// creates a command string like:
|
||||
// > `C:\WINDOWS\system32\wsl.EXE -d Ubuntu bash -c 'TOPGRADE_PREFIX=Ubuntu exec /bin/topgrade'`
|
||||
//
|
||||
// Adding the following:
|
||||
//
|
||||
// ```rust
|
||||
// command.arg("-v");
|
||||
// ```
|
||||
//
|
||||
// appends the next argument like so:
|
||||
// > `C:\WINDOWS\system32\wsl.EXE -d Ubuntu bash -c 'TOPGRADE_PREFIX=Ubuntu exec /bin/topgrade' -v`
|
||||
// which means `-v` isn't passed to `topgrade`.
|
||||
let mut args = String::new();
|
||||
if ctx.config().verbose() {
|
||||
args.push_str("-v");
|
||||
}
|
||||
|
||||
command
|
||||
.args(["-d", dist, "bash", "-c"])
|
||||
.arg(format!("TOPGRADE_PREFIX={dist} exec {topgrade}"));
|
||||
.arg(format!("TOPGRADE_PREFIX={dist} exec {topgrade} {args}"));
|
||||
|
||||
if ctx.config().yes(Step::Wsl) {
|
||||
command.arg("-y");
|
||||
@@ -144,7 +176,7 @@ fn upgrade_wsl_distribution(wsl: &Path, dist: &str, ctx: &ExecutionContext) -> R
|
||||
|
||||
pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
if !is_wsl_installed()? {
|
||||
return Err(SkipStep("WSL not installed".to_string()).into());
|
||||
return Err(SkipStep(t!("WSL not installed").to_string()).into());
|
||||
}
|
||||
|
||||
let wsl = require("wsl")?;
|
||||
@@ -167,27 +199,26 @@ pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
if ran {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(SkipStep(String::from("Could not find Topgrade in any WSL disribution")).into())
|
||||
Err(SkipStep(t!("Could not find Topgrade in any WSL disribution").to_string()).into())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn windows_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let powershell = powershell::Powershell::windows_powershell();
|
||||
|
||||
print_separator(t!("Windows Update"));
|
||||
|
||||
if powershell.supports_windows_update() {
|
||||
print_separator("Windows Update");
|
||||
return powershell.windows_update(ctx);
|
||||
println!("The installer will request to run as administrator, expect a prompt.");
|
||||
|
||||
powershell.windows_update(ctx)
|
||||
} else {
|
||||
print_warning(t!(
|
||||
"Consider installing PSWindowsUpdate as the use of Windows Update via USOClient is not supported."
|
||||
));
|
||||
|
||||
Err(SkipStep(t!("USOClient not supported.").to_string()).into())
|
||||
}
|
||||
|
||||
let usoclient = require("UsoClient")?;
|
||||
|
||||
print_separator("Windows Update");
|
||||
println!("Running Windows Update. Check the control panel for progress.");
|
||||
ctx.run_type()
|
||||
.execute(&usoclient)
|
||||
.arg("ScanInstallWait")
|
||||
.status_checked()?;
|
||||
ctx.run_type().execute(&usoclient).arg("StartInstall").status_checked()
|
||||
}
|
||||
|
||||
pub fn reboot() -> Result<()> {
|
||||
@@ -196,7 +227,7 @@ pub fn reboot() -> Result<()> {
|
||||
Command::new("shutdown").args(["/R", "/T", "0"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn insert_startup_scripts(git_repos: &mut Repositories) -> Result<()> {
|
||||
pub fn insert_startup_scripts(git_repos: &mut RepoStep) -> Result<()> {
|
||||
let startup_dir = crate::WINDOWS_DIRS
|
||||
.data_dir()
|
||||
.join("Microsoft\\Windows\\Start Menu\\Programs\\Startup");
|
||||
@@ -206,7 +237,7 @@ pub fn insert_startup_scripts(git_repos: &mut Repositories) -> Result<()> {
|
||||
if let Ok(lnk) = parselnk::Lnk::try_from(Path::new(&path)) {
|
||||
debug!("Startup link: {:?}", lnk);
|
||||
if let Some(path) = lnk.relative_path() {
|
||||
git_repos.insert_if_repo(&startup_dir.join(path));
|
||||
git_repos.insert_if_repo(startup_dir.join(path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
@@ -62,9 +63,9 @@ impl Powershell {
|
||||
}
|
||||
|
||||
pub fn update_modules(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let powershell = require_option(self.path.as_ref(), String::from("Powershell is not installed"))?;
|
||||
let powershell = require_option(self.path.as_ref(), t!("Powershell is not installed").to_string())?;
|
||||
|
||||
print_separator("Powershell Modules Update");
|
||||
print_separator(t!("Powershell Modules Update"));
|
||||
|
||||
let mut cmd = vec!["Update-Module"];
|
||||
|
||||
@@ -76,7 +77,7 @@ impl Powershell {
|
||||
cmd.push("-Force")
|
||||
}
|
||||
|
||||
println!("Updating modules...");
|
||||
println!("{}", t!("Updating modules..."));
|
||||
ctx.run_type()
|
||||
.execute(powershell)
|
||||
// This probably doesn't need `shell_words::join`.
|
||||
@@ -94,10 +95,18 @@ impl Powershell {
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn windows_update(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let powershell = require_option(self.path.as_ref(), String::from("Powershell is not installed"))?;
|
||||
let powershell = require_option(self.path.as_ref(), t!("Powershell is not installed").to_string())?;
|
||||
|
||||
debug_assert!(self.supports_windows_update());
|
||||
|
||||
let accept_all = if ctx.config().accept_all_windows_updates() {
|
||||
"-AcceptAll"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
|
||||
let install_windowsupdate_verbose = "Install-WindowsUpdate -Verbose".to_string();
|
||||
|
||||
let mut command = if let Some(sudo) = ctx.sudo() {
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(powershell);
|
||||
@@ -107,18 +116,7 @@ impl Powershell {
|
||||
};
|
||||
|
||||
command
|
||||
.args([
|
||||
"-NoProfile",
|
||||
"-Command",
|
||||
&format!(
|
||||
"Import-Module PSWindowsUpdate; Install-WindowsUpdate -MicrosoftUpdate {} -Verbose",
|
||||
if ctx.config().accept_all_windows_updates() {
|
||||
"-AcceptAll"
|
||||
} else {
|
||||
""
|
||||
}
|
||||
),
|
||||
])
|
||||
.args(["-NoProfile", &install_windowsupdate_verbose, accept_all])
|
||||
.status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,54 +1,55 @@
|
||||
use color_eyre::eyre::Result;
|
||||
|
||||
use crate::{
|
||||
command::CommandExt, error::SkipStep, execution_context::ExecutionContext, terminal::print_separator, utils,
|
||||
};
|
||||
|
||||
fn prepare_async_ssh_command(args: &mut Vec<&str>) {
|
||||
args.insert(0, "ssh");
|
||||
args.push("--keep");
|
||||
}
|
||||
|
||||
pub fn ssh_step(ctx: &ExecutionContext, hostname: &str) -> Result<()> {
|
||||
let ssh = utils::require("ssh")?;
|
||||
|
||||
let topgrade = ctx.config().remote_topgrade_path();
|
||||
let mut args = vec!["-t", hostname];
|
||||
|
||||
if let Some(ssh_arguments) = ctx.config().ssh_arguments() {
|
||||
args.extend(ssh_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let env = format!("TOPGRADE_PREFIX={hostname}");
|
||||
args.extend(["env", &env, "$SHELL", "-lc", topgrade]);
|
||||
|
||||
if ctx.config().run_in_tmux() && !ctx.run_type().dry() {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
prepare_async_ssh_command(&mut args);
|
||||
crate::tmux::run_command(ctx, hostname, &shell_words::join(args))?;
|
||||
Err(SkipStep(String::from("Remote Topgrade launched in Tmux")).into())
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
unreachable!("Tmux execution is only implemented in Unix");
|
||||
} else if ctx.config().open_remotes_in_new_terminal() && !ctx.run_type().dry() && cfg!(windows) {
|
||||
prepare_async_ssh_command(&mut args);
|
||||
ctx.run_type().execute("wt").args(&args).spawn()?;
|
||||
Err(SkipStep(String::from("Remote Topgrade launched in an external terminal")).into())
|
||||
} else {
|
||||
let mut args = vec!["-t", hostname];
|
||||
|
||||
if let Some(ssh_arguments) = ctx.config().ssh_arguments() {
|
||||
args.extend(ssh_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let env = format!("TOPGRADE_PREFIX={hostname}");
|
||||
args.extend(["env", &env, "$SHELL", "-lc", topgrade]);
|
||||
|
||||
print_separator(format!("Remote ({hostname})"));
|
||||
println!("Connecting to {hostname}...");
|
||||
|
||||
ctx.run_type().execute(ssh).args(&args).status_checked()
|
||||
}
|
||||
}
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
|
||||
use crate::{
|
||||
command::CommandExt, error::SkipStep, execution_context::ExecutionContext, terminal::print_separator, utils,
|
||||
};
|
||||
|
||||
fn prepare_async_ssh_command(args: &mut Vec<&str>) {
|
||||
args.insert(0, "ssh");
|
||||
args.push("--keep");
|
||||
}
|
||||
|
||||
pub fn ssh_step(ctx: &ExecutionContext, hostname: &str) -> Result<()> {
|
||||
let ssh = utils::require("ssh")?;
|
||||
|
||||
let topgrade = ctx.config().remote_topgrade_path();
|
||||
let mut args = vec!["-t", hostname];
|
||||
|
||||
if let Some(ssh_arguments) = ctx.config().ssh_arguments() {
|
||||
args.extend(ssh_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let env = format!("TOPGRADE_PREFIX={hostname}");
|
||||
args.extend(["env", &env, "$SHELL", "-lc", topgrade]);
|
||||
|
||||
if ctx.config().run_in_tmux() && !ctx.run_type().dry() {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
prepare_async_ssh_command(&mut args);
|
||||
crate::tmux::run_command(ctx, hostname, &shell_words::join(args))?;
|
||||
Err(SkipStep(String::from(t!("Remote Topgrade launched in Tmux"))).into())
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
unreachable!("Tmux execution is only implemented in Unix");
|
||||
} else if ctx.config().open_remotes_in_new_terminal() && !ctx.run_type().dry() && cfg!(windows) {
|
||||
prepare_async_ssh_command(&mut args);
|
||||
ctx.run_type().execute("wt").args(&args).spawn()?;
|
||||
Err(SkipStep(String::from(t!("Remote Topgrade launched in an external terminal"))).into())
|
||||
} else {
|
||||
let mut args = vec!["-t", hostname];
|
||||
|
||||
if let Some(ssh_arguments) = ctx.config().ssh_arguments() {
|
||||
args.extend(ssh_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let env = format!("TOPGRADE_PREFIX={hostname}");
|
||||
args.extend(["env", &env, "$SHELL", "-lc", topgrade]);
|
||||
|
||||
print_separator(format!("Remote ({hostname})"));
|
||||
println!("{}", t!("Connecting to {hostname}...", hostname = hostname));
|
||||
|
||||
ctx.run_type().execute(ssh).args(&args).status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ use std::{fmt::Display, rc::Rc, str::FromStr};
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use regex::Regex;
|
||||
use rust_i18n::t;
|
||||
use strum::EnumString;
|
||||
use tracing::{debug, error};
|
||||
|
||||
@@ -151,14 +152,14 @@ impl<'a> Drop for TemporaryPowerOn<'a> {
|
||||
pub fn collect_boxes(ctx: &ExecutionContext) -> Result<Vec<VagrantBox>> {
|
||||
let directories = utils::require_option(
|
||||
ctx.config().vagrant_directories(),
|
||||
String::from("No Vagrant directories were specified in the configuration file"),
|
||||
String::from(t!("No Vagrant directories were specified in the configuration file")),
|
||||
)?;
|
||||
let vagrant = Vagrant {
|
||||
path: utils::require("vagrant")?,
|
||||
};
|
||||
|
||||
print_separator("Vagrant");
|
||||
println!("Collecting Vagrant boxes");
|
||||
println!("{}", t!("Collecting Vagrant boxes"));
|
||||
|
||||
let mut result = Vec::new();
|
||||
|
||||
@@ -183,7 +184,11 @@ pub fn topgrade_vagrant_box(ctx: &ExecutionContext, vagrant_box: &VagrantBox) ->
|
||||
let mut _poweron = None;
|
||||
if !vagrant_box.initial_status.powered_on() {
|
||||
if !(ctx.config().vagrant_power_on().unwrap_or(true)) {
|
||||
return Err(SkipStep(format!("Skipping powered off box {vagrant_box}")).into());
|
||||
return Err(SkipStep(format!(
|
||||
"{}",
|
||||
t!("Skipping powered off box {vagrant_box}", vagrant_box = vagrant_box)
|
||||
))
|
||||
.into());
|
||||
} else {
|
||||
print_separator(seperator);
|
||||
_poweron = Some(vagrant.temporary_power_on(vagrant_box, ctx)?);
|
||||
@@ -205,7 +210,7 @@ pub fn topgrade_vagrant_box(ctx: &ExecutionContext, vagrant_box: &VagrantBox) ->
|
||||
|
||||
pub fn upgrade_vagrant_boxes(ctx: &ExecutionContext) -> Result<()> {
|
||||
let vagrant = utils::require("vagrant")?;
|
||||
print_separator("Vagrant boxes");
|
||||
print_separator(t!("Vagrant boxes"));
|
||||
|
||||
let outdated = Command::new(&vagrant)
|
||||
.args(["box", "outdated", "--global"])
|
||||
@@ -227,7 +232,7 @@ pub fn upgrade_vagrant_boxes(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
if !found {
|
||||
println!("No outdated boxes")
|
||||
println!("{}", t!("No outdated boxes"))
|
||||
} else {
|
||||
ctx.run_type()
|
||||
.execute(&vagrant)
|
||||
|
||||
@@ -7,6 +7,8 @@ use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::config::TmuxConfig;
|
||||
use crate::config::TmuxSessionMode;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::HOME_DIR;
|
||||
use crate::{
|
||||
@@ -14,11 +16,19 @@ use crate::{
|
||||
utils::{which, PathExt},
|
||||
};
|
||||
|
||||
use rust_i18n::t;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::process::CommandExt as _;
|
||||
|
||||
pub fn run_tpm(ctx: &ExecutionContext) -> Result<()> {
|
||||
let tpm = HOME_DIR.join(".tmux/plugins/tpm/bin/update_plugins").require()?;
|
||||
let tpm = match env::var("TMUX_PLUGIN_MANAGER_PATH") {
|
||||
// If `TMUX_PLUGIN_MANAGER_PATH` is set, search for
|
||||
// `$TMUX_PLUGIN_MANAGER_PATH/bin/install_plugins/tpm/bin/update_plugins`
|
||||
Ok(var) => PathBuf::from(var).join("bin/install_plugins/tpm/bin/update_plugins"),
|
||||
// Otherwise, use the default location `~/.tmux/plugins/tpm/bin/update_plugins`
|
||||
Err(_) => HOME_DIR.join(".tmux/plugins/tpm/bin/update_plugins"),
|
||||
}
|
||||
.require()?;
|
||||
|
||||
print_separator("tmux plugins");
|
||||
|
||||
@@ -124,7 +134,7 @@ impl Tmux {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_in_tmux(args: Vec<String>) -> Result<()> {
|
||||
pub fn run_in_tmux(config: TmuxConfig) -> Result<()> {
|
||||
let command = {
|
||||
let mut command = vec![
|
||||
String::from("env"),
|
||||
@@ -137,25 +147,39 @@ pub fn run_in_tmux(args: Vec<String>) -> Result<()> {
|
||||
shell_words::join(command)
|
||||
};
|
||||
|
||||
let tmux = Tmux::new(args);
|
||||
let tmux = Tmux::new(config.args);
|
||||
|
||||
// Find an unused session and run `topgrade` in it with the current command's arguments.
|
||||
let session_name = "topgrade";
|
||||
let window_name = "topgrade";
|
||||
let session = tmux.new_unique_session(session_name, window_name, &command)?;
|
||||
|
||||
// Only attach to the newly-created session if we're not currently in a tmux session.
|
||||
if env::var("TMUX").is_err() {
|
||||
let err = tmux.build().args(["attach-session", "-t", &session]).exec();
|
||||
Err(eyre!("{err}")).context("Failed to `execvp(3)` tmux")
|
||||
} else {
|
||||
println!("Topgrade launched in a new tmux session");
|
||||
Ok(())
|
||||
}
|
||||
let is_inside_tmux = env::var("TMUX").is_ok();
|
||||
let err = match config.session_mode {
|
||||
TmuxSessionMode::AttachIfNotInSession => {
|
||||
if is_inside_tmux {
|
||||
// Only attach to the newly-created session if we're not currently in a tmux session.
|
||||
println!("{}", t!("Topgrade launched in a new tmux session"));
|
||||
return Ok(());
|
||||
} else {
|
||||
tmux.build().args(["attach-client", "-t", &session]).exec()
|
||||
}
|
||||
}
|
||||
|
||||
TmuxSessionMode::AttachAlways => {
|
||||
if is_inside_tmux {
|
||||
tmux.build().args(["switch-client", "-t", &session]).exec()
|
||||
} else {
|
||||
tmux.build().args(["attach-client", "-t", &session]).exec()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Err(eyre!("{err}")).context("Failed to `execvp(3)` tmux")
|
||||
}
|
||||
|
||||
pub fn run_command(ctx: &ExecutionContext, window_name: &str, command: &str) -> Result<()> {
|
||||
let tmux = Tmux::new(ctx.config().tmux_arguments()?);
|
||||
let tmux = Tmux::new(ctx.config().tmux_config()?.args);
|
||||
|
||||
match ctx.get_tmux_session() {
|
||||
Some(session_name) => {
|
||||
|
||||
@@ -9,6 +9,11 @@ if exists(":AstroUpdate")
|
||||
quitall
|
||||
endif
|
||||
|
||||
if exists(":MasonUpdate")
|
||||
echo "MasonUpdate"
|
||||
MasonUpdate
|
||||
endif
|
||||
|
||||
if exists(":NeoBundleUpdate")
|
||||
echo "NeoBundle"
|
||||
NeoBundleUpdate
|
||||
|
||||
@@ -10,6 +10,7 @@ use crate::{
|
||||
execution_context::ExecutionContext,
|
||||
utils::{require, PathExt},
|
||||
};
|
||||
use rust_i18n::t;
|
||||
use std::path::PathBuf;
|
||||
use std::{
|
||||
io::{self, Write},
|
||||
@@ -57,14 +58,14 @@ fn upgrade(command: &mut Executor, ctx: &ExecutionContext) -> Result<()> {
|
||||
let status = output.status;
|
||||
|
||||
if !status.success() || ctx.config().verbose() {
|
||||
io::stdout().write(&output.stdout).ok();
|
||||
io::stderr().write(&output.stderr).ok();
|
||||
io::stdout().write_all(&output.stdout).ok();
|
||||
io::stderr().write_all(&output.stderr).ok();
|
||||
}
|
||||
|
||||
if !status.success() {
|
||||
return Err(TopgradeError::ProcessFailed(command.get_program(), status).into());
|
||||
} else {
|
||||
println!("Plugins upgraded")
|
||||
println!("{}", t!("Plugins upgraded"))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,7 +78,7 @@ pub fn upgrade_ultimate_vimrc(ctx: &ExecutionContext) -> Result<()> {
|
||||
let python = require("python3")?;
|
||||
let update_plugins = config_dir.join("update_plugins.py").require()?;
|
||||
|
||||
print_separator("The Ultimate vimrc");
|
||||
print_separator(t!("The Ultimate vimrc"));
|
||||
|
||||
ctx.run_type()
|
||||
.execute(&git)
|
||||
@@ -108,7 +109,7 @@ pub fn upgrade_vim(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
let output = Command::new(&vim).arg("--version").output_checked_utf8()?;
|
||||
if !output.stdout.starts_with("VIM") {
|
||||
return Err(SkipStep(String::from("vim binary might be actually nvim")).into());
|
||||
return Err(SkipStep(t!("vim binary might be actually nvim").to_string()).into());
|
||||
}
|
||||
|
||||
let vimrc = vimrc()?;
|
||||
|
||||
@@ -8,10 +8,12 @@ use walkdir::WalkDir;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::git::Repositories;
|
||||
use crate::git::RepoStep;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{require, PathExt};
|
||||
use crate::HOME_DIR;
|
||||
use crate::XDG_DIRS;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
|
||||
pub fn run_zr(ctx: &ExecutionContext) -> Result<()> {
|
||||
let zsh = require("zsh")?;
|
||||
@@ -117,12 +119,12 @@ pub fn run_zinit(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
env::var("ZINIT_HOME")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".zinit"))
|
||||
.unwrap_or_else(|_| XDG_DIRS.data_dir().join("zinit"))
|
||||
.require()?;
|
||||
|
||||
print_separator("zinit");
|
||||
|
||||
let cmd = format!("source {} && zinit self-update && zinit update --all", zshrc.display(),);
|
||||
let cmd = format!("source {} && zinit self-update && zinit update --all", zshrc.display());
|
||||
ctx.run_type()
|
||||
.execute(zsh)
|
||||
.args(["-i", "-c", cmd.as_str()])
|
||||
@@ -137,7 +139,7 @@ pub fn run_zi(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("zi");
|
||||
|
||||
let cmd = format!("source {} && zi self-update && zi update --all", zshrc.display(),);
|
||||
let cmd = format!("source {} && zi self-update && zi update --all", zshrc.display());
|
||||
ctx.run_type().execute(zsh).args(["-i", "-c", &cmd]).status_checked()
|
||||
}
|
||||
|
||||
@@ -176,15 +178,16 @@ pub fn run_oh_my_zsh(ctx: &ExecutionContext) -> Result<()> {
|
||||
// children processes won't get it either, so we source the zshrc and set
|
||||
// the ZSH variable for topgrade here.
|
||||
if ctx.under_ssh() {
|
||||
let zshrc_path = zshrc().require()?;
|
||||
let output = Command::new("zsh")
|
||||
.args([
|
||||
"-c",
|
||||
// ` > /dev/null` is used in case the user's zshrc will have some stdout output.
|
||||
format!("source {} > /dev/null && echo $ZSH", zshrc_path.display()).as_str(),
|
||||
])
|
||||
.output_checked_utf8()?;
|
||||
env::set_var("ZSH", output.stdout.trim());
|
||||
let res_env_zsh = Command::new("zsh")
|
||||
.args(["-ic", "print -rn -- ${ZSH:?}"])
|
||||
.output_checked_utf8();
|
||||
|
||||
// this command will fail if `ZSH` is not set
|
||||
if let Ok(output) = res_env_zsh {
|
||||
let env_zsh = output.stdout;
|
||||
debug!("Oh-my-zsh: under SSH, setting ZSH={}", env_zsh);
|
||||
env::set_var("ZSH", env_zsh);
|
||||
}
|
||||
}
|
||||
|
||||
let oh_my_zsh = env::var("ZSH")
|
||||
@@ -207,8 +210,7 @@ pub fn run_oh_my_zsh(ctx: &ExecutionContext) -> Result<()> {
|
||||
.unwrap_or_else(|e| {
|
||||
let default_path = oh_my_zsh.join("custom");
|
||||
debug!(
|
||||
"Running zsh returned {}. Using default path: {}",
|
||||
e,
|
||||
"Running zsh returned {e}. Using default path: {}",
|
||||
default_path.display()
|
||||
);
|
||||
default_path
|
||||
@@ -216,22 +218,17 @@ pub fn run_oh_my_zsh(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
debug!("oh-my-zsh custom dir: {}", custom_dir.display());
|
||||
|
||||
let mut custom_repos = Repositories::new(ctx.git());
|
||||
let mut custom_repos = RepoStep::try_new()?;
|
||||
|
||||
for entry in WalkDir::new(custom_dir).max_depth(2) {
|
||||
let entry = entry?;
|
||||
custom_repos.insert_if_repo(entry.path());
|
||||
}
|
||||
|
||||
custom_repos.remove(&oh_my_zsh.to_string_lossy());
|
||||
if !custom_repos.is_empty() {
|
||||
println!("Pulling custom plugins and themes");
|
||||
ctx.git().multi_pull(&custom_repos, ctx)?;
|
||||
}
|
||||
|
||||
custom_repos.remove(&oh_my_zsh);
|
||||
ctx.run_type()
|
||||
.execute("zsh")
|
||||
.arg(&oh_my_zsh.join("tools/upgrade.sh"))
|
||||
.arg(oh_my_zsh.join("tools/upgrade.sh"))
|
||||
// oh-my-zsh returns 80 when it is already updated and no changes pulled
|
||||
// in this update.
|
||||
// See this comment: https://github.com/r-darwish/topgrade/issues/569#issuecomment-736756731
|
||||
|
||||
16
src/sudo.rs
16
src/sudo.rs
@@ -26,10 +26,10 @@ impl Sudo {
|
||||
pub fn detect() -> Option<Self> {
|
||||
which("doas")
|
||||
.map(|p| (p, SudoKind::Doas))
|
||||
.or_else(|| which("please").map(|p| (p, SudoKind::Please)))
|
||||
.or_else(|| which("sudo").map(|p| (p, SudoKind::Sudo)))
|
||||
.or_else(|| which("gsudo").map(|p| (p, SudoKind::Gsudo)))
|
||||
.or_else(|| which("pkexec").map(|p| (p, SudoKind::Pkexec)))
|
||||
.or_else(|| which("please").map(|p| (p, SudoKind::Please)))
|
||||
.map(|(path, kind)| Self { path, kind })
|
||||
}
|
||||
|
||||
@@ -55,12 +55,6 @@ impl Sudo {
|
||||
// See: https://man.openbsd.org/doas
|
||||
cmd.arg("echo");
|
||||
}
|
||||
SudoKind::Please => {
|
||||
// From `man please`
|
||||
// -w, --warm
|
||||
// Warm the access token and exit.
|
||||
cmd.arg("-w");
|
||||
}
|
||||
SudoKind::Sudo => {
|
||||
// From `man sudo` on macOS:
|
||||
// -v, --validate
|
||||
@@ -85,6 +79,12 @@ impl Sudo {
|
||||
// See: https://linux.die.net/man/1/pkexec
|
||||
cmd.arg("echo");
|
||||
}
|
||||
SudoKind::Please => {
|
||||
// From `man please`
|
||||
// -w, --warm
|
||||
// Warm the access token and exit.
|
||||
cmd.arg("-w");
|
||||
}
|
||||
}
|
||||
cmd.status_checked().wrap_err("Failed to elevate permissions")
|
||||
}
|
||||
@@ -112,10 +112,10 @@ impl Sudo {
|
||||
#[strum(serialize_all = "lowercase")]
|
||||
pub enum SudoKind {
|
||||
Doas,
|
||||
Please,
|
||||
Sudo,
|
||||
Gsudo,
|
||||
Pkexec,
|
||||
Please,
|
||||
}
|
||||
|
||||
impl AsRef<OsStr> for Sudo {
|
||||
|
||||
@@ -11,6 +11,7 @@ use color_eyre::eyre::Context;
|
||||
use console::{style, Key, Term};
|
||||
use lazy_static::lazy_static;
|
||||
use notify_rust::{Notification, Timeout};
|
||||
use rust_i18n::t;
|
||||
use tracing::{debug, error};
|
||||
#[cfg(windows)]
|
||||
use which_crate::which;
|
||||
@@ -144,7 +145,7 @@ impl Terminal {
|
||||
self.term
|
||||
.write_fmt(format_args!(
|
||||
"{} {}",
|
||||
style(format!("{key} failed:")).red().bold(),
|
||||
style(format!("{}", t!("{key} failed:", key = key))).red().bold(),
|
||||
message
|
||||
))
|
||||
.ok();
|
||||
@@ -174,10 +175,10 @@ impl Terminal {
|
||||
"{}: {}\n",
|
||||
key,
|
||||
match result {
|
||||
StepResult::Success => format!("{}", style("OK").bold().green()),
|
||||
StepResult::Failure => format!("{}", style("FAILED").bold().red()),
|
||||
StepResult::Ignored => format!("{}", style("IGNORED").bold().yellow()),
|
||||
StepResult::Skipped(reason) => format!("{}: {}", style("SKIPPED").bold().blue(), reason),
|
||||
StepResult::Success => format!("{}", style(t!("OK")).bold().green()),
|
||||
StepResult::Failure => format!("{}", style(t!("FAILED")).bold().red()),
|
||||
StepResult::Ignored => format!("{}", style(t!("IGNORED")).bold().yellow()),
|
||||
StepResult::Skipped(reason) => format!("{}: {}", style(t!("SKIPPED")).bold().blue(), reason),
|
||||
}
|
||||
))
|
||||
.ok();
|
||||
@@ -188,7 +189,7 @@ impl Terminal {
|
||||
self.term
|
||||
.write_fmt(format_args!(
|
||||
"{}",
|
||||
style(format!("{question} (y)es/(N)o",)).yellow().bold()
|
||||
style(format!("{question} {}", t!("(Y)es/(N)o"))).yellow().bold()
|
||||
))
|
||||
.ok();
|
||||
|
||||
@@ -207,14 +208,14 @@ impl Terminal {
|
||||
}
|
||||
|
||||
if self.set_title {
|
||||
self.term.set_title("Topgrade - Awaiting user");
|
||||
self.term.set_title(format!("Topgrade - {}", t!("Awaiting user")));
|
||||
}
|
||||
|
||||
if self.desktop_notification {
|
||||
self.notify_desktop(format!("{step_name} failed"), None);
|
||||
self.notify_desktop(format!("{}", t!("{step_name} failed", step_name = step_name)), None);
|
||||
}
|
||||
|
||||
let prompt_inner = style(format!("{}Retry? (y)es/(N)o/(s)hell/(q)uit", self.prefix))
|
||||
let prompt_inner = style(format!("{}{}", self.prefix, t!("Retry? (y)es/(N)o/(s)hell/(q)uit")))
|
||||
.yellow()
|
||||
.bold();
|
||||
|
||||
@@ -224,7 +225,10 @@ impl Terminal {
|
||||
match self.term.read_key() {
|
||||
Ok(Key::Char('y')) | Ok(Key::Char('Y')) => break Ok(true),
|
||||
Ok(Key::Char('s')) | Ok(Key::Char('S')) => {
|
||||
println!("\n\nDropping you to shell. Fix what you need and then exit the shell.\n");
|
||||
println!(
|
||||
"\n\n{}\n",
|
||||
t!("Dropping you to shell. Fix what you need and then exit the shell.")
|
||||
);
|
||||
if let Err(err) = run_shell().context("Failed to run shell") {
|
||||
self.term.write_fmt(format_args!("{err:?}\n{prompt_inner}")).ok();
|
||||
} else {
|
||||
|
||||
119
src/utils.rs
119
src/utils.rs
@@ -5,9 +5,17 @@ use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
|
||||
use tracing::{debug, error};
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::reload::{Handle, Layer};
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
use tracing_subscriber::{fmt, Registry};
|
||||
use tracing_subscriber::{registry, EnvFilter};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::config::DEFAULT_LOG_LEVEL;
|
||||
use crate::error::SkipStep;
|
||||
|
||||
pub trait PathExt
|
||||
@@ -44,7 +52,11 @@ where
|
||||
debug!("Path {:?} exists", self.as_ref());
|
||||
Ok(self)
|
||||
} else {
|
||||
Err(SkipStep(format!("Path {:?} doesn't exist", self.as_ref())).into())
|
||||
Err(SkipStep(format!(
|
||||
"{}",
|
||||
t!("Path {path} doesn't exist", path = format!("{:?}", self.as_ref()))
|
||||
))
|
||||
.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -85,9 +97,14 @@ pub fn require<T: AsRef<OsStr> + Debug>(binary_name: T) -> Result<PathBuf> {
|
||||
Ok(path)
|
||||
}
|
||||
Err(e) => match e {
|
||||
which_crate::Error::CannotFindBinaryPath => {
|
||||
Err(SkipStep(format!("Cannot find {:?} in PATH", &binary_name)).into())
|
||||
}
|
||||
which_crate::Error::CannotFindBinaryPath => Err(SkipStep(format!(
|
||||
"{}",
|
||||
t!(
|
||||
"Cannot find {binary_name} in PATH",
|
||||
binary_name = format!("{:?}", &binary_name)
|
||||
)
|
||||
))
|
||||
.into()),
|
||||
_ => {
|
||||
panic!("Detecting {:?} failed: {}", &binary_name, e);
|
||||
}
|
||||
@@ -111,44 +128,13 @@ pub fn string_prepend_str(string: &mut String, s: &str) {
|
||||
*string = new_string;
|
||||
}
|
||||
|
||||
/* sys-info-rs
|
||||
*
|
||||
* Copyright (c) 2015 Siyu Wang
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
#[cfg(target_family = "unix")]
|
||||
pub fn hostname() -> Result<String> {
|
||||
use std::ffi;
|
||||
extern crate libc;
|
||||
|
||||
unsafe {
|
||||
let buf_size = libc::sysconf(libc::_SC_HOST_NAME_MAX) as usize;
|
||||
let mut buf = Vec::<u8>::with_capacity(buf_size + 1);
|
||||
|
||||
if libc::gethostname(buf.as_mut_ptr() as *mut libc::c_char, buf_size) < 0 {
|
||||
return Err(SkipStep(format!("Failed to get hostname: {}", std::io::Error::last_os_error())).into());
|
||||
}
|
||||
let hostname_len = libc::strnlen(buf.as_ptr() as *const libc::c_char, buf_size);
|
||||
buf.set_len(hostname_len);
|
||||
|
||||
Ok(ffi::CString::new(buf).unwrap().into_string().unwrap())
|
||||
match nix::unistd::gethostname() {
|
||||
Ok(os_str) => Ok(os_str
|
||||
.into_string()
|
||||
.map_err(|_| SkipStep(t!("Failed to get a UTF-8 encoded hostname").into()))?),
|
||||
Err(e) => Err(e.into()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -156,7 +142,7 @@ pub fn hostname() -> Result<String> {
|
||||
pub fn hostname() -> Result<String> {
|
||||
Command::new("hostname")
|
||||
.output_checked_utf8()
|
||||
.map_err(|err| SkipStep(format!("Failed to get hostname: {err}")).into())
|
||||
.map_err(|err| SkipStep(t!("Failed to get hostname: {err}", err = err).to_string()).into())
|
||||
.map(|output| output.stdout.trim().to_owned())
|
||||
}
|
||||
|
||||
@@ -215,7 +201,9 @@ pub mod merge_strategies {
|
||||
|
||||
// Skip causes
|
||||
// TODO: Put them in a better place when we have more of them
|
||||
pub const REQUIRE_SUDO: &str = "Require sudo or counterpart but not found, skip";
|
||||
pub fn get_require_sudo_string() -> String {
|
||||
t!("Require sudo or counterpart but not found, skip").to_string()
|
||||
}
|
||||
|
||||
/// Return `Err(SkipStep)` if `python` is a Python 2 or shim.
|
||||
///
|
||||
@@ -242,12 +230,55 @@ pub fn check_is_python_2_or_shim(python: PathBuf) -> Result<PathBuf> {
|
||||
.parse::<u32>()
|
||||
.expect("Major version should be a valid number");
|
||||
if major_version == 2 {
|
||||
return Err(SkipStep(format!("{} is a Python 2, skip.", python.display())).into());
|
||||
return Err(SkipStep(t!("{python} is a Python 2, skip.", python = python.display()).to_string()).into());
|
||||
}
|
||||
} else {
|
||||
// No version number, is a shim
|
||||
return Err(SkipStep(format!("{} is a Python shim, skip.", python.display())).into());
|
||||
return Err(SkipStep(t!("{python} is a Python shim, skip.", python = python.display()).to_string()).into());
|
||||
}
|
||||
|
||||
Ok(python)
|
||||
}
|
||||
|
||||
/// Set up the tracing logger
|
||||
///
|
||||
/// # Return value
|
||||
/// A reload handle will be returned so that we can change the log level at
|
||||
/// runtime.
|
||||
pub fn install_tracing(filter_directives: &str) -> Result<Handle<EnvFilter, Registry>> {
|
||||
let env_filter = EnvFilter::try_new(filter_directives)
|
||||
.or_else(|_| EnvFilter::try_from_default_env())
|
||||
.or_else(|_| EnvFilter::try_new(DEFAULT_LOG_LEVEL))?;
|
||||
|
||||
let fmt_layer = fmt::layer().with_target(false).without_time();
|
||||
|
||||
let (filter, reload_handle) = Layer::new(env_filter);
|
||||
|
||||
registry().with(filter).with(fmt_layer).init();
|
||||
|
||||
Ok(reload_handle)
|
||||
}
|
||||
|
||||
/// Update the tracing logger with new `filter_directives`.
|
||||
pub fn update_tracing(reload_handle: &Handle<EnvFilter, Registry>, filter_directives: &str) -> Result<()> {
|
||||
let new = EnvFilter::try_new(filter_directives)
|
||||
.or_else(|_| EnvFilter::try_from_default_env())
|
||||
.or_else(|_| EnvFilter::try_new(DEFAULT_LOG_LEVEL))?;
|
||||
reload_handle.modify(|old| *old = new)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set up the error handler crate
|
||||
pub fn install_color_eyre() -> Result<()> {
|
||||
color_eyre::config::HookBuilder::new()
|
||||
// Don't display the backtrace reminder by default:
|
||||
// Backtrace omitted. Run with RUST_BACKTRACE=1 environment variable to display it.
|
||||
// Run with RUST_BACKTRACE=full to include source snippets.
|
||||
.display_env_section(false)
|
||||
// Display location information by default:
|
||||
// Location:
|
||||
// src/steps.rs:92
|
||||
.display_location_section(true)
|
||||
.install()
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user