Compare commits
246 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1ebcc9beee | ||
|
|
55e1bbf2b9 | ||
|
|
f2dfa1e475 | ||
|
|
fcd53e772a | ||
|
|
8b9d7ef8f3 | ||
|
|
d8406a8cfe | ||
|
|
4a9ef581e5 | ||
|
|
a52db1f261 | ||
|
|
8e16174ce7 | ||
|
|
c748bb5d7a | ||
|
|
3cc8f0d818 | ||
|
|
f96eeeda6b | ||
|
|
d1d8904376 | ||
|
|
3b329fe687 | ||
|
|
9eb1b4ac9f | ||
|
|
c4c0bd7383 | ||
|
|
1e9de5832d | ||
|
|
f2b17cdd9d | ||
|
|
7bfd6c2439 | ||
|
|
0e8d5f0266 | ||
|
|
32add8f046 | ||
|
|
f661f00277 | ||
|
|
2a1999fe20 | ||
|
|
4d66431aad | ||
|
|
767f0d91f4 | ||
|
|
a3428e3477 | ||
|
|
614131b7bf | ||
|
|
9b0681f3b8 | ||
|
|
ecf8fb7a47 | ||
|
|
04bfb45a97 | ||
|
|
d90ce30452 | ||
|
|
ab21600ca6 | ||
|
|
728ea26204 | ||
|
|
373cd3b3ae | ||
|
|
f4e0258b09 | ||
|
|
d50360a69a | ||
|
|
351922c81f | ||
|
|
9518f43866 | ||
|
|
2c1ce3d4e6 | ||
|
|
12116c3261 | ||
|
|
fbc84e8aa1 | ||
|
|
6dab1e4f37 | ||
|
|
650a143602 | ||
|
|
9b6027fe78 | ||
|
|
0e30e05ce8 | ||
|
|
eea952fa78 | ||
|
|
6071a1ee3b | ||
|
|
a801b7b9f4 | ||
|
|
c6e3f0ae0a | ||
|
|
a43b03d3db | ||
|
|
12b0fa57ad | ||
|
|
d9e304f0ef | ||
|
|
842b92cca7 | ||
|
|
485f0ec9c8 | ||
|
|
5e3b5fc9a7 | ||
|
|
7c63541cad | ||
|
|
238e089d74 | ||
|
|
8991bc9f62 | ||
|
|
7a3f3a8905 | ||
|
|
e4085e03eb | ||
|
|
4b0c366e5f | ||
|
|
ea97240d09 | ||
|
|
12de531abb | ||
|
|
c3876ce3bf | ||
|
|
cbbfc3a114 | ||
|
|
ad2bfc9abd | ||
|
|
528461412e | ||
|
|
64db679390 | ||
|
|
77a8b3b7d2 | ||
|
|
7007e76ab5 | ||
|
|
3c970063a9 | ||
|
|
b70830015e | ||
|
|
b43f2c8b3a | ||
|
|
c311da16f3 | ||
|
|
37608a338c | ||
|
|
b07288e674 | ||
|
|
707698faab | ||
|
|
2e70d132d0 | ||
|
|
30c5b31e21 | ||
|
|
77ff6cb714 | ||
|
|
ea13c51b7d | ||
|
|
3ed763b884 | ||
|
|
10e1e170b7 | ||
|
|
ffa62afc66 | ||
|
|
f794329913 | ||
|
|
f9a35c7661 | ||
|
|
ed496f3462 | ||
|
|
6accdae232 | ||
|
|
96efcc6c0d | ||
|
|
bf72d7bb5a | ||
|
|
dadffb1081 | ||
|
|
78dc567226 | ||
|
|
362ce4f4f9 | ||
|
|
ab35cd7b10 | ||
|
|
15f4ad7cd1 | ||
|
|
cbfb92041f | ||
|
|
a506c67cac | ||
|
|
788e0412f6 | ||
|
|
18b37ce3e3 | ||
|
|
a15e6748c7 | ||
|
|
c6d0539fd2 | ||
|
|
3eb3867944 | ||
|
|
810315b0e2 | ||
|
|
b461fc2536 | ||
|
|
7e63977ba0 | ||
|
|
78dec892cf | ||
|
|
9ea6628b5c | ||
|
|
465df2e9be | ||
|
|
61ef926849 | ||
|
|
7fa38c593e | ||
|
|
41c6d1cd9a | ||
|
|
cf3893dc49 | ||
|
|
a2fbe92a25 | ||
|
|
e1754707d8 | ||
|
|
cd380a53b3 | ||
|
|
a8c29fd1a2 | ||
|
|
6b871e7949 | ||
|
|
1b5fdb6645 | ||
|
|
fe9d877cdf | ||
|
|
60e7aa8f03 | ||
|
|
18e2d3e59c | ||
|
|
d68fcb08b2 | ||
|
|
1f6baefdc3 | ||
|
|
71efce32c1 | ||
|
|
3626c9cdc8 | ||
|
|
a23b761304 | ||
|
|
3fd27e4913 | ||
|
|
b3f152b716 | ||
|
|
df381f3a79 | ||
|
|
2dec9db310 | ||
|
|
d50dc4c9f6 | ||
|
|
ed8b563f20 | ||
|
|
2a73aa731d | ||
|
|
4dd1c13bd8 | ||
|
|
c1c9fe22df | ||
|
|
06a6b7a2eb | ||
|
|
b814dd824f | ||
|
|
ce234bdb59 | ||
|
|
13a46a44a8 | ||
|
|
dc78b00c3c | ||
|
|
48ae4bf813 | ||
|
|
a50040e2d5 | ||
|
|
2c9a56a8df | ||
|
|
021320b292 | ||
|
|
9d3662c3ea | ||
|
|
8e580457a5 | ||
|
|
5350658dab | ||
|
|
1ec0ac50a5 | ||
|
|
635bfce198 | ||
|
|
1307d2d7e8 | ||
|
|
d21141fefe | ||
|
|
0ec0e5a9dd | ||
|
|
9415d7c61f | ||
|
|
42188af02b | ||
|
|
e9581bcf15 | ||
|
|
6afe4f51c6 | ||
|
|
f623746d6c | ||
|
|
1ce4d66e74 | ||
|
|
3735d5c537 | ||
|
|
f3b1d2dfb3 | ||
|
|
7f7d2633cd | ||
|
|
afd95e3d5c | ||
|
|
8f72545894 | ||
|
|
d0d447deac | ||
|
|
53a8683788 | ||
|
|
81491a8d03 | ||
|
|
83504754ac | ||
|
|
2068c2c169 | ||
|
|
dbac121a90 | ||
|
|
b974938a33 | ||
|
|
06cb88a1a1 | ||
|
|
a6195d284c | ||
|
|
5b8850e8a3 | ||
|
|
57546a07fc | ||
|
|
d7709490ce | ||
|
|
3e6c6e513b | ||
|
|
30858780cf | ||
|
|
a7ddf4575a | ||
|
|
470231c9d1 | ||
|
|
282e336ac4 | ||
|
|
658829e4ff | ||
|
|
a0ff565220 | ||
|
|
7e48c5dedc | ||
|
|
03436b7f8f | ||
|
|
3f5eedb83d | ||
|
|
234ad4bdd7 | ||
|
|
c7923393be | ||
|
|
d4548b2f9a | ||
|
|
f6e8af186c | ||
|
|
58153635da | ||
|
|
5358509825 | ||
|
|
1ab0232d96 | ||
|
|
66860f1848 | ||
|
|
625f823f46 | ||
|
|
6263ab7e10 | ||
|
|
7db991db9d | ||
|
|
d75782892e | ||
|
|
cb7adc8ced | ||
|
|
7c3ba80270 | ||
|
|
76c39edc8b | ||
|
|
c20a300eea | ||
|
|
de3902a9c9 | ||
|
|
8bca671e9f | ||
|
|
54301a6a17 | ||
|
|
f06b7c0807 | ||
|
|
43c02cf7a7 | ||
|
|
3a1568e884 | ||
|
|
14753a14e7 | ||
|
|
227e8dcc8d | ||
|
|
97fd2b2718 | ||
|
|
f30e36d7bb | ||
|
|
d640bc66f5 | ||
|
|
a2331a2575 | ||
|
|
26a2c3c266 | ||
|
|
ceafcba88f | ||
|
|
d7182b5a6e | ||
|
|
93ec1172fe | ||
|
|
609477a373 | ||
|
|
1d49af10a7 | ||
|
|
327ed837c2 | ||
|
|
d406e2aeab | ||
|
|
0991cc8a6f | ||
|
|
ac6330fac8 | ||
|
|
29f0d229d3 | ||
|
|
3dd11f7b52 | ||
|
|
ddb1a021bb | ||
|
|
565aa405be | ||
|
|
907465f891 | ||
|
|
250485c826 | ||
|
|
3a3f22b4e5 | ||
|
|
a3628d0d49 | ||
|
|
462016e51e | ||
|
|
199b81183b | ||
|
|
342d7f7209 | ||
|
|
9c2d121fc9 | ||
|
|
7728819133 | ||
|
|
a5d5d987d2 | ||
|
|
fae5d80f0a | ||
|
|
2369e371be | ||
|
|
e3b71b647f | ||
|
|
e224ea38b3 | ||
|
|
8ec37bcd44 | ||
|
|
6b7f6f4cc7 | ||
|
|
d767ef31a5 | ||
|
|
fcf776fe07 | ||
|
|
58060dda09 |
83
.github/ISSUE_TEMPLATE/bug_report.md
vendored
83
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,32 +2,91 @@
|
||||
name: Bug report
|
||||
about: Topgrade is misbehaving
|
||||
title: ''
|
||||
labels: ''
|
||||
labels: 'C-bug'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- If you're here to report about a "No asset found" error, please make sure that an hour has been passed since the last release was made. -->
|
||||
<!--
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
Please make sure to
|
||||
[search for existing issues](https://github.com/topgrade-rs/topgrade/issues)
|
||||
before filing a new one!
|
||||
|
||||
## What did you expect to happen?
|
||||
Questions labeled with `Optional` can be skipped.
|
||||
-->
|
||||
|
||||
<!--
|
||||
If you're here to report about a "No asset found" error, please make sure that
|
||||
an hour has been passed since the last release was made.
|
||||
-->
|
||||
|
||||
## What actually happened?
|
||||
## Erroneous Behavior
|
||||
<!--
|
||||
What actually happened?
|
||||
-->
|
||||
|
||||
## Expected Behavior
|
||||
<!--
|
||||
Describe the expected behavior
|
||||
-->
|
||||
|
||||
## Steps to reproduce
|
||||
<!--
|
||||
A minimal example to reproduce the issue
|
||||
-->
|
||||
|
||||
## Possible Cause (Optional)
|
||||
<!--
|
||||
If you know the possible cause of the issue, please tell us.
|
||||
-->
|
||||
|
||||
## Problem persists without calling from topgrade
|
||||
<!--
|
||||
Execute the erroneous command directly to see if the problem persists
|
||||
-->
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
|
||||
## Did you run topgrade through `Remote Execution`
|
||||
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
|
||||
If yes, does the issue still occur when you run topgrade directlly in your
|
||||
remote host
|
||||
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
|
||||
## Configuration file (Optional)
|
||||
<!--
|
||||
Paste your configuration file inside the code block if you think this issue is
|
||||
related to configuration.
|
||||
-->
|
||||
|
||||
```toml
|
||||
|
||||
```
|
||||
|
||||
## Additional Details
|
||||
- Which operating system or Linux distribution are you using?
|
||||
- How did you install Topgrade?
|
||||
- Which version are you running? <!-- Check with `topgrade -V` -->
|
||||
- Operation System/Version
|
||||
<!-- For example, Fedora Linux 38 -->
|
||||
|
||||
<!--
|
||||
Run `topgrade --dry-run` to see which commands Topgrade is running.
|
||||
If the command seems wrong and you know why please tell us so.
|
||||
If the command seems fine try to run it yourself and tell us if you got a different result from Topgrade.
|
||||
- Installation
|
||||
<!--
|
||||
How did you install topgrade: build from repo / crates.io (cargo install topgrade)
|
||||
/ package manager (which one) / other (describe)
|
||||
-->
|
||||
|
||||
- Topgrade version (`topgrade -V`)
|
||||
|
||||
## Verbose Output (`topgrade -v`)
|
||||
<!--
|
||||
Paste the verbose output into the pre-tags
|
||||
-->
|
||||
|
||||
<details>
|
||||
<!-- Paste the output of the problematic command with `-v` into the pre-tags -->
|
||||
<pre>
|
||||
|
||||
</pre>
|
||||
|
||||
14
.github/ISSUE_TEMPLATE/feature_request.md
vendored
14
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -2,16 +2,20 @@
|
||||
name: Feature request
|
||||
about: Can you please support...?
|
||||
title: ''
|
||||
labels: ''
|
||||
labels: 'C-feature request'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## I want to suggest a new step
|
||||
### Which tool is this about? Where is its repository?
|
||||
### Which operating systems are supported by this tool?
|
||||
### What should Topgrade do to figure out if the tool needs to be invoked?
|
||||
### Which exact commands should Topgrade run?
|
||||
|
||||
* Which tool is this about? Where is its repository?
|
||||
* Which operating systems are supported by this tool?
|
||||
* What should Topgrade do to figure out if the tool needs to be invoked?
|
||||
* Which exact commands should Topgrade run?
|
||||
* Does it have a `--dry-run` option? i.e., print what should be done and exit
|
||||
* Does it need the user to confirm the execution? And does it provide a `--yes`
|
||||
option to skip this step?
|
||||
|
||||
## I want to suggest some general feature
|
||||
Topgrade should...
|
||||
|
||||
18
.github/PULL_REQUEST_TEMPLATE.md
vendored
18
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,12 +1,18 @@
|
||||
## Standards checklist:
|
||||
## What does this PR do
|
||||
|
||||
|
||||
## Standards checklist
|
||||
|
||||
- [ ] The PR title is descriptive.
|
||||
- [ ] The code compiles (`cargo build`)
|
||||
- [ ] The code passes rustfmt (`cargo fmt`)
|
||||
- [ ] The code passes clippy (`cargo clippy`)
|
||||
- [ ] The code passes tests (`cargo test`)
|
||||
- [ ] I have read `CONTRIBUTING.md`
|
||||
- [ ] *Optional:* I have tested the code myself
|
||||
- [ ] I also tested that Topgrade skips the step where needed
|
||||
|
||||
## For new steps
|
||||
|
||||
- [ ] *Optional:* Topgrade skips this step where needed
|
||||
- [ ] *Optional:* The `--dry-run` option works with this step
|
||||
- [ ] *Optional:* The `--yes` option works with this step if it is supported by
|
||||
the underlying command
|
||||
|
||||
If you developed a feature or a bug fix for someone else and you do not have the
|
||||
means to test it, please tag this person here.
|
||||
|
||||
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
# Set update schedule for GitHub Actions
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
# Check for updates to GitHub Actions every week
|
||||
interval: "weekly"
|
||||
21
.github/workflows/check_config_creation_if_not_exists.yml
vendored
Normal file
21
.github/workflows/check_config_creation_if_not_exists.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Check config file creation if not exists
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
|
||||
jobs:
|
||||
TestConfig:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
CONFIG_PATH=~/.config/topgrade.toml;
|
||||
if [ -f "$CONFIG_PATH" ]; then rm $CONFIG_PATH; fi
|
||||
cargo build;
|
||||
TOPGRADE_SKIP_BRKC_NOTIFY=true ./target/debug/topgrade --dry-run --only system;
|
||||
stat $CONFIG_PATH;
|
||||
32
.github/workflows/check_security_vulnerability.yml
vendored
Normal file
32
.github/workflows/check_security_vulnerability.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
# separate terms of service, privacy policy, and support
|
||||
# documentation.
|
||||
|
||||
name: Check Security Vulnerability
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: DevSkim
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run DevSkim scanner
|
||||
uses: microsoft/DevSkim-Action@v1
|
||||
|
||||
- name: Upload DevSkim scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: devskim-results.sarif
|
||||
@@ -8,7 +8,7 @@ jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2022-08-03
|
||||
@@ -7,17 +7,17 @@ on:
|
||||
name: CI
|
||||
|
||||
env:
|
||||
RUST_VER: '1.60.0'
|
||||
CROSS_VER: '0.2.4'
|
||||
RUST_VER: 'stable'
|
||||
CROSS_VER: '0.2.5'
|
||||
CARGO_NET_RETRY: 3
|
||||
|
||||
jobs:
|
||||
fmt:
|
||||
name: Rustfmt
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
@@ -42,32 +42,36 @@ jobs:
|
||||
- target: x86_64-linux-android
|
||||
target_name: Android
|
||||
use_cross: true
|
||||
os: ubuntu-20.04
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-unknown-freebsd
|
||||
target_name: FreeBSD
|
||||
use_cross: true
|
||||
os: ubuntu-20.04
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
target_name: Linux
|
||||
os: ubuntu-20.04
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-apple-darwin
|
||||
target_name: macOS
|
||||
os: macos-11
|
||||
target_name: macOS-x86_64
|
||||
os: macos-13
|
||||
|
||||
- target: aarch64-apple-darwin
|
||||
target_name: macOS-aarch64
|
||||
os: macos-latest
|
||||
|
||||
- target: x86_64-unknown-netbsd
|
||||
target_name: NetBSD
|
||||
use_cross: true
|
||||
os: ubuntu-20.04
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-pc-windows-msvc
|
||||
target_name: Windows
|
||||
os: windows-2019
|
||||
os: windows-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
@@ -84,8 +88,13 @@ jobs:
|
||||
if: matrix.use_cross == true
|
||||
run: curl -fL --retry 3 https://github.com/cross-rs/cross/releases/download/v${{ env.CROSS_VER }}/cross-x86_64-unknown-linux-musl.tar.gz | tar vxz -C /usr/local/bin
|
||||
|
||||
- name: Run cargo check
|
||||
- name: Run cargo/cross check
|
||||
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} check --locked --target ${{ matrix.target }}
|
||||
|
||||
- name: Run cargo clippy
|
||||
- name: Run cargo/cross clippy
|
||||
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} clippy --locked --target ${{ matrix.target }} --all-features -- -D warnings
|
||||
|
||||
- name: Run cargo test
|
||||
# ONLY run test with cargo
|
||||
if: matrix.use_cross == false
|
||||
run: cargo test --locked --target ${{ matrix.target }}
|
||||
@@ -13,40 +13,31 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [ ubuntu-latest, macos-latest, windows-latest ]
|
||||
platform: [ ubuntu-latest, macos-latest, macos-13, windows-latest ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: rustfmt, clippy
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Check format
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run clippy
|
||||
with:
|
||||
command: clippy
|
||||
args: --all-targets --locked -- -D warnings
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run clippy (All features)
|
||||
with:
|
||||
command: clippy
|
||||
args: --all-targets --locked --all-features -- -D warnings
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run tests
|
||||
with:
|
||||
command: test
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Build
|
||||
with:
|
||||
command: build
|
||||
args: --release --all-features
|
||||
|
||||
- name: Check format
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
- name: Run clippy
|
||||
run: cargo clippy --all-targets --locked -- -D warnings
|
||||
|
||||
- name: Run clippy (All features)
|
||||
run: cargo clippy --all-targets --locked --all-features -- -D warnings
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test
|
||||
|
||||
- name: Build in Release profile with all features enabled
|
||||
run: cargo build --release --all-features
|
||||
|
||||
- name: Rename Release (Unix)
|
||||
run: |
|
||||
cargo install default-target
|
||||
@@ -59,6 +50,7 @@ jobs:
|
||||
ls .
|
||||
if: ${{ matrix.platform != 'windows-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Rename Release (Windows)
|
||||
run: |
|
||||
cargo install default-target
|
||||
@@ -71,7 +63,8 @@ jobs:
|
||||
ls .
|
||||
if: ${{ matrix.platform == 'windows-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: assets/*
|
||||
68
.github/workflows/create_release_assets_cross.yml
vendored
Normal file
68
.github/workflows/create_release_assets_cross.yml
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
name: Publish release files for non-cd-native environments
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target: [
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"x86_64-unknown-freebsd",
|
||||
]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: rustfmt, clippy
|
||||
|
||||
- name: install targets
|
||||
run: rustup target add ${{ matrix.target }}
|
||||
|
||||
- name: install cross
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cross@0.2.5
|
||||
|
||||
- name: Check format
|
||||
run: cross fmt --all -- --check
|
||||
|
||||
- name: Run clippy
|
||||
run: cross clippy --all-targets --locked --target ${{matrix.target}} -- -D warnings
|
||||
|
||||
- name: Run clippy (All features)
|
||||
run: cross clippy --locked --all-features --target ${{matrix.target}} -- -D warnings
|
||||
|
||||
- name: Run tests
|
||||
run: cross test --target ${{matrix.target}}
|
||||
|
||||
- name: Build in Release profile with all features enabled
|
||||
run: cross build --release --all-features --target ${{matrix.target}}
|
||||
|
||||
- name: Rename Release
|
||||
run: |
|
||||
mkdir assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-${{matrix.target}}
|
||||
mv target/${{matrix.target}}/release/topgrade assets
|
||||
cd assets
|
||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||
rm topgrade
|
||||
ls .
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: assets/*
|
||||
70
.github/workflows/release-cross.yml
vendored
70
.github/workflows/release-cross.yml
vendored
@@ -1,70 +0,0 @@
|
||||
name: Publish release files for non-cd-native environments
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target: [ "aarch64-unknown-linux-gnu", "armv7-unknown-linux-gnueabihf", "x86_64-unknown-linux-musl", "aarch64-unknown-linux-musl", "x86_64-unknown-freebsd", ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
default: true
|
||||
override: true
|
||||
target: ${{ matrix.target }}
|
||||
components: rustfmt, clippy
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Check format
|
||||
with:
|
||||
use-cross: true
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run clippy
|
||||
with:
|
||||
command: clippy
|
||||
use-cross: true
|
||||
args: --all-targets --locked --target ${{matrix.target}} -- -D warnings
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run clippy (All features)
|
||||
with:
|
||||
command: clippy
|
||||
use-cross: true
|
||||
args: --locked --all-features --target ${{matrix.target}} -- -D warnings
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run tests
|
||||
with:
|
||||
command: test
|
||||
use-cross: true
|
||||
args: --target ${{matrix.target}}
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Build
|
||||
with:
|
||||
command: build
|
||||
use-cross: true
|
||||
args: --release --all-features --target ${{matrix.target}}
|
||||
- name: Rename Release
|
||||
run: |
|
||||
mkdir assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-${{matrix.target}}
|
||||
mv target/${{matrix.target}}/release/topgrade assets
|
||||
cd assets
|
||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||
rm topgrade
|
||||
ls .
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: assets/*
|
||||
@@ -4,7 +4,7 @@ on:
|
||||
# types:
|
||||
# - completed
|
||||
release:
|
||||
types: [published, edited]
|
||||
types: [published]
|
||||
|
||||
name: Publish to crates.io on release
|
||||
|
||||
@@ -12,7 +12,7 @@ jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
@@ -21,7 +21,7 @@ jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: katyo/publish-crates@v1
|
||||
- uses: katyo/publish-crates@v2
|
||||
with:
|
||||
dry-run: true
|
||||
check-repo: ${{ github.event_name == 'push' }}
|
||||
@@ -19,7 +19,7 @@ jobs:
|
||||
uses: Homebrew/actions/setup-homebrew@master
|
||||
- name: Cache Homebrew Bundler RubyGems
|
||||
id: cache
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ steps.set-up-homebrew.outputs.gems-path }}
|
||||
key: ${{ runner.os }}-rubygems-${{ steps.set-up-homebrew.outputs.gems-hash }}
|
||||
@@ -29,7 +29,8 @@ jobs:
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
run: brew install-bundler-gems
|
||||
- name: Bump formulae
|
||||
uses: Homebrew/actions/bump-formulae@master
|
||||
uses: Homebrew/actions/bump-packages@master
|
||||
continue-on-error: true
|
||||
with:
|
||||
# Custom GitHub access token with only the 'public_repo' scope enabled
|
||||
token: ${{secrets.HOMEBREW_ACCESS_TOKEN}}
|
||||
99
.github/workflows/release_to_pypi.yml
vendored
Normal file
99
.github/workflows/release_to_pypi.yml
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
name: Update PyPi
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x86_64, x86, aarch64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist
|
||||
sccache: 'true'
|
||||
manylinux: auto
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
windows:
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x64, x86]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist
|
||||
sccache: 'true'
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
macos:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x86_64, aarch64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist
|
||||
sccache: 'true'
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build sdist
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: Upload sdist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
if: "startsWith(github.ref, 'refs/tags/')"
|
||||
needs: [linux, windows, macos, sdist]
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: wheels
|
||||
- name: Publish to PyPI
|
||||
uses: PyO3/maturin-action@v1
|
||||
env:
|
||||
MATURIN_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
||||
with:
|
||||
command: upload
|
||||
args: --skip-existing *
|
||||
13
.github/workflows/release_to_winget.yml
vendored
Normal file
13
.github/workflows/release_to_winget.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: Publish to WinGet
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: vedantmgoyal2009/winget-releaser@v2
|
||||
with:
|
||||
identifier: topgrade-rs.topgrade
|
||||
max-versions-to-keep: 5 # keep only latest 5 versions
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
59
.github/workflows/test.yaml
vendored
59
.github/workflows/test.yaml
vendored
@@ -1,59 +0,0 @@
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
name: Test with Code Coverage
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test
|
||||
env:
|
||||
PROJECT_NAME_UNDERSCORE: topgrade
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUSTFLAGS: -Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort
|
||||
RUSTDOCFLAGS: -Cpanic=abort
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@v2
|
||||
env:
|
||||
cache-name: cache-dependencies
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/.crates.toml
|
||||
~/.cargo/.crates2.json
|
||||
~/.cargo/bin
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
target
|
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('Cargo.lock') }}
|
||||
- name: Generate test result and coverage report
|
||||
run: |
|
||||
cargo install cargo2junit grcov;
|
||||
cargo test $CARGO_OPTIONS -- -Z unstable-options --format json | cargo2junit > results.xml;
|
||||
zip -0 ccov.zip `find . \( -name "$PROJECT_NAME_UNDERSCORE*.gc*" \) -print`;
|
||||
grcov ccov.zip -s . -t lcov --llvm --ignore-not-existing --ignore "/*" --ignore "tests/*" -o lcov.info;
|
||||
- name: Upload test results
|
||||
uses: EnricoMi/publish-unit-test-result-action@v1
|
||||
with:
|
||||
check_name: Test Results
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: results.xml
|
||||
- name: Upload to CodeCov
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
# required for private repositories:
|
||||
# token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./lcov.info
|
||||
fail_ci_if_error: true
|
||||
18
.gitignore
vendored
18
.gitignore
vendored
@@ -1,4 +1,20 @@
|
||||
# JetBrains IDEs
|
||||
.idea/
|
||||
|
||||
/target
|
||||
# Visual Studio
|
||||
.vs/
|
||||
|
||||
# Visual Studio Code
|
||||
.vscode/
|
||||
|
||||
# Generic build outputs
|
||||
/build
|
||||
|
||||
# Specific for some languages like Rust
|
||||
/target
|
||||
|
||||
# LLVM profiling output
|
||||
*.profraw
|
||||
|
||||
# Backup files for any .rs files in the project
|
||||
**/*.rs.bk
|
||||
|
||||
38
.vscode/launch.json
vendored
38
.vscode/launch.json
vendored
@@ -1,38 +0,0 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Topgrade",
|
||||
"console": "integratedTerminal",
|
||||
"cargo": {
|
||||
"args": [
|
||||
"build",
|
||||
"--bin=topgrade-rs",
|
||||
"--package=topgrade-rs"
|
||||
],
|
||||
"filter": {
|
||||
"name": "topgrade-rs",
|
||||
"kind": "bin"
|
||||
}
|
||||
},
|
||||
"args": [
|
||||
"--only",
|
||||
"${input:step}",
|
||||
"-v"
|
||||
],
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
],
|
||||
"inputs": [
|
||||
{
|
||||
"type": "promptString",
|
||||
"id": "step",
|
||||
"description": "step name",
|
||||
}
|
||||
]
|
||||
}
|
||||
14
.vscode/tasks.json
vendored
14
.vscode/tasks.json
vendored
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"type": "cargo",
|
||||
"command": "clippy",
|
||||
"problemMatcher": [
|
||||
"$rustc"
|
||||
],
|
||||
"group": "test",
|
||||
"label": "rust: cargo clippy"
|
||||
}
|
||||
]
|
||||
}
|
||||
50
.vscode/topgrade.code-snippets
vendored
50
.vscode/topgrade.code-snippets
vendored
@@ -1,50 +0,0 @@
|
||||
{
|
||||
// Place your topgrade workspace snippets here. Each snippet is defined under a snippet name and has a scope, prefix, body and
|
||||
// description. Add comma separated ids of the languages where the snippet is applicable in the scope field. If scope
|
||||
// is left empty or omitted, the snippet gets applied to all languages. The prefix is what is
|
||||
// used to trigger the snippet and the body will be expanded and inserted. Possible variables are:
|
||||
// $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders.
|
||||
// Placeholders with the same ids are connected.
|
||||
// Example:
|
||||
// "Print to console": {
|
||||
// "scope": "javascript,typescript",
|
||||
// "prefix": "log",
|
||||
// "body": [
|
||||
// "console.log('$1');",
|
||||
// "$2"
|
||||
// ],
|
||||
// "description": "Log output to console"
|
||||
// }
|
||||
"Skip Step": {
|
||||
"scope": "rust",
|
||||
"prefix": "skipstep",
|
||||
"body": [
|
||||
"return Err(SkipStep(format!(\"$1\")).into());"
|
||||
]
|
||||
},
|
||||
"Step": {
|
||||
"scope": "rust",
|
||||
"prefix": "step",
|
||||
"body": [
|
||||
"pub fn $1(ctx: &ExecutionContext) -> Result<()> {",
|
||||
" $0",
|
||||
" Ok(())",
|
||||
"}"
|
||||
]
|
||||
},
|
||||
"Require Binary": {
|
||||
"scope": "rust",
|
||||
"prefix": "req",
|
||||
"description": "Require a binary to be installed",
|
||||
"body": [
|
||||
"let ${1:binary} = require(\"${1:binary}\")?;"
|
||||
]
|
||||
},
|
||||
"macos": {
|
||||
"scope": "rust",
|
||||
"prefix": "macos",
|
||||
"body": [
|
||||
"#[cfg(target_os = \"macos\")]"
|
||||
]
|
||||
}
|
||||
}
|
||||
9
BREAKINGCHANGES.md
Normal file
9
BREAKINGCHANGES.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Git: Pull Repos
|
||||
|
||||
1. The output of "Pulling <repository path>" has been moved behind the
|
||||
--verbose flag / [misc] configuration block.
|
||||
|
||||
# Configuration
|
||||
|
||||
1. The `enable_winget` configuration entry in the `windows` section has been
|
||||
removed because it will not cause any issues and will be enabled by default.
|
||||
0
BREAKINGCHANGES_dev.md
Normal file
0
BREAKINGCHANGES_dev.md
Normal file
152
CONTRIBUTING.md
Normal file
152
CONTRIBUTING.md
Normal file
@@ -0,0 +1,152 @@
|
||||
## Contributing to `topgrade`
|
||||
|
||||
Thank you for your interest in contributing to `topgrade`!
|
||||
We welcome and encourage contributions of all kinds, such as:
|
||||
|
||||
1. Issue reports or feature requests
|
||||
2. Documentation improvements
|
||||
3. Code (PR or PR Review)
|
||||
|
||||
Please follow the [Karma Runner guidelines](http://karma-runner.github.io/6.2/dev/git-commit-msg.html)
|
||||
for commit messages.
|
||||
|
||||
## Adding a new `step`
|
||||
|
||||
In `topgrade`'s term, package manager is called `step`.
|
||||
To add a new `step` to `topgrade`:
|
||||
|
||||
1. Add a new variant to
|
||||
[`enum Step`](https://github.com/topgrade-rs/topgrade/blob/cb7adc8ced8a77addf2cb051d18bba9f202ab866/src/config.rs#L100)
|
||||
|
||||
```rust
|
||||
pub enum Step {
|
||||
// Existed steps
|
||||
// ...
|
||||
|
||||
// Your new step here!
|
||||
// You may want it to be sorted alphabetically because that looks great:)
|
||||
Xxx,
|
||||
}
|
||||
```
|
||||
|
||||
2. Implement the update function
|
||||
|
||||
You need to find the appropriate location where this update function goes, it should be
|
||||
a file under [`src/steps`](https://github.com/topgrade-rs/topgrade/tree/master/src/steps),
|
||||
the file names are self-explanatory, for example, `step`s related to `zsh` are
|
||||
placed in [`steps/zsh.rs`](https://github.com/topgrade-rs/topgrade/blob/master/src/steps/zsh.rs).
|
||||
|
||||
Then you implement the update function, and put it in the file where it belongs.
|
||||
|
||||
```rust
|
||||
pub fn run_xxx(ctx: &ExecutionContext) -> Result<()> {
|
||||
// Check if this step is installed, if not, then this update will be skipped.
|
||||
let xxx = require("xxx")?;
|
||||
|
||||
// Print the separator
|
||||
print_separator("xxx");
|
||||
|
||||
// Invoke the new step to get things updated!
|
||||
ctx.run_type()
|
||||
.execute("xxx")
|
||||
.arg(/* args required by this step */)
|
||||
.status_checked()
|
||||
}
|
||||
```
|
||||
|
||||
Such a update function would be conventionally named `run_xxx()`, where `xxx`
|
||||
is the name of the new step, and it should take a argument of type
|
||||
`&ExecutionContext`, this is adequate for most cases unless some extra stuff is
|
||||
needed (You can find some examples where extra arguments are needed
|
||||
[here](https://github.com/topgrade-rs/topgrade/blob/7e48c5dedcfd5d0124bb9f39079a03e27ed23886/src/main.rs#L201-L219)).
|
||||
|
||||
Update function would usually do 3 things:
|
||||
1. Check if the step is installed
|
||||
2. Output the Separator
|
||||
3. Invoke the step
|
||||
|
||||
Still, this is sufficient for most tools, but you may need some extra stuff
|
||||
with complicated `step`.
|
||||
|
||||
3. Finally, invoke that update function in `main.rs`
|
||||
|
||||
```rust
|
||||
runner.execute(Step::Xxx, "xxx", || ItsModule::run_xxx(&ctx))?;
|
||||
```
|
||||
|
||||
We use [conditional compilation](https://doc.rust-lang.org/reference/conditional-compilation.html)
|
||||
to separate the steps, for example, for steps that are Linux-only, it goes
|
||||
like this:
|
||||
|
||||
```
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// Xxx is Linux-only
|
||||
runner.execute(Step::Xxx, "xxx", || ItsModule::run_xxx(&ctx))?;
|
||||
}
|
||||
```
|
||||
|
||||
Congrats, you just added a new `step`:)
|
||||
|
||||
## Modification to the configuration entries
|
||||
|
||||
If your PR has the configuration options
|
||||
(in [`src/config.rs`](https://github.com/topgrade-rs/topgrade/blob/master/src/config.rs))
|
||||
modified:
|
||||
|
||||
1. Adding new options
|
||||
2. Changing the existing options
|
||||
|
||||
Be sure to apply your changes to
|
||||
[`config.example.toml`](https://github.com/topgrade-rs/topgrade/blob/master/config.example.toml),
|
||||
and have some basic documentations guiding user how to use these options.
|
||||
|
||||
## Breaking changes
|
||||
|
||||
If your PR introduces a breaking change, document it in [`BREAKINGCHANGES_dev.md`][bc_dev],
|
||||
it should be written in Markdown and wrapped at 80, for example:
|
||||
|
||||
```md
|
||||
1. The configuration location has been updated to x.
|
||||
|
||||
2. The step x has been removed.
|
||||
|
||||
3. ...
|
||||
```
|
||||
|
||||
[bc_dev]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES_dev.md
|
||||
|
||||
## Before you submit your PR
|
||||
|
||||
Make sure your patch passes the following tests on your host:
|
||||
|
||||
```shell
|
||||
$ cargo build
|
||||
$ cargo fmt
|
||||
$ cargo clippy
|
||||
$ cargo test
|
||||
```
|
||||
|
||||
Don't worry about other platforms, we have most of them covered in our CI.
|
||||
|
||||
## Some tips
|
||||
|
||||
1. Locale
|
||||
|
||||
Some `step` respects locale, which means their output can be in language other
|
||||
than English, we should not do check on it.
|
||||
|
||||
For example, one may want to check if a tool works by doing this:
|
||||
|
||||
```rust
|
||||
let output = Command::new("xxx").arg("--help").output().unwrap();
|
||||
let stdout = from_utf8(output.stdout).expect("Assume it is UTF-8 encoded");
|
||||
|
||||
if stdout.contains("help") {
|
||||
// xxx works
|
||||
}
|
||||
```
|
||||
|
||||
If `xxx` respects locale, then the above code should work on English system,
|
||||
on a system that does not use English, e.g., it uses Chinese, that `"help"` may be
|
||||
translated to `"帮助"`, and the above code won't work.
|
||||
2548
Cargo.lock
generated
2548
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
48
Cargo.toml
48
Cargo.toml
@@ -4,11 +4,10 @@ description = "Upgrade all the things"
|
||||
categories = ["os"]
|
||||
keywords = ["upgrade", "update"]
|
||||
license = "GPL-3.0"
|
||||
# license-file = "LICENSE"
|
||||
repository = "https://github.com/topgrade-rs/topgrade"
|
||||
version = "10.3.1"
|
||||
version = "15.0.0"
|
||||
authors = ["Roey Darwish Dror <roey.ghost@gmail.com>", "Thomas Schönauer <t.schoenauer@hgs-wt.at>"]
|
||||
exclude = ["doc/screenshot.gif"]
|
||||
exclude = ["doc/screenshot.gif", "BREAKINGCHANGES_dev.md"]
|
||||
edition = "2021"
|
||||
|
||||
readme = "README.md"
|
||||
@@ -22,37 +21,39 @@ path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
home = "~0.5"
|
||||
directories = "~4.0"
|
||||
etcetera = "~0.8"
|
||||
once_cell = "~1.19"
|
||||
serde = { version = "~1.0", features = ["derive"] }
|
||||
toml = "0.5"
|
||||
which_crate = { version = "~4.1", package = "which" }
|
||||
shellexpand = "~2.1"
|
||||
clap = { version = "~3.1", features = ["cargo", "derive"] }
|
||||
clap_complete = "~3.1"
|
||||
clap_mangen = "~0.1"
|
||||
walkdir = "~2.3"
|
||||
toml = "0.8"
|
||||
which_crate = { version = "~6.0", package = "which" }
|
||||
shellexpand = "~3.1"
|
||||
clap = { version = "~4.5", features = ["cargo", "derive"] }
|
||||
clap_complete = "~4.5"
|
||||
clap_mangen = "~0.2"
|
||||
walkdir = "~2.5"
|
||||
console = "~0.15"
|
||||
lazy_static = "~1.4"
|
||||
chrono = "~0.4"
|
||||
glob = "~0.3"
|
||||
strum = { version = "~0.24", features = ["derive"] }
|
||||
strum = { version = "~0.26", features = ["derive"] }
|
||||
thiserror = "~1.0"
|
||||
tempfile = "~3.2"
|
||||
tempfile = "~3.10"
|
||||
cfg-if = "~1.0"
|
||||
tokio = { version = "~1.18", features = ["process", "rt-multi-thread"] }
|
||||
tokio = { version = "~1.38", features = ["process", "rt-multi-thread"] }
|
||||
futures = "~0.3"
|
||||
regex = "~1.5"
|
||||
regex = "~1.10"
|
||||
semver = "~1.0"
|
||||
shell-words = "~1.1"
|
||||
color-eyre = "~0.6"
|
||||
tracing = { version = "~0.1", features = ["attributes", "log"] }
|
||||
tracing-subscriber = { version = "~0.3", features = ["env-filter", "time"] }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
notify-rust = "~4.5"
|
||||
merge = "~0.1"
|
||||
regex-split = "~0.1"
|
||||
notify-rust = "~4.11"
|
||||
wildmatch = "2.3.0"
|
||||
|
||||
[package.metadata.generate-rpm]
|
||||
assets = [{source = "target/release/topgrade", dest="/usr/bin/topgrade"}]
|
||||
assets = [{ source = "target/release/topgrade", dest = "/usr/bin/topgrade" }]
|
||||
|
||||
[package.metadata.generate-rpm.requires]
|
||||
git = "*"
|
||||
@@ -61,13 +62,12 @@ git = "*"
|
||||
depends = "$auto,git"
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
libc = "~0.2"
|
||||
nix = "~0.24"
|
||||
rust-ini = "~0.18"
|
||||
self_update_crate = { version = "~0.30", default-features = false, optional = true, package = "self_update", features = ["archive-tar", "compression-flate2", "rustls"] }
|
||||
nix = { version = "~0.29", features = ["hostname", "signal", "user"] }
|
||||
rust-ini = "~0.21"
|
||||
self_update_crate = { version = "~0.40", default-features = false, optional = true, package = "self_update", features = ["archive-tar", "compression-flate2", "rustls"] }
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
self_update_crate = { version = "~0.30", default-features = false, optional = true, package = "self_update", features = ["archive-zip", "compression-zip-deflate", "rustls"] }
|
||||
self_update_crate = { version = "~0.40", default-features = false, optional = true, package = "self_update", features = ["archive-zip", "compression-zip-deflate", "rustls"] }
|
||||
winapi = "~0.3"
|
||||
parselnk = "~0.1"
|
||||
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
# Workaround for: https://github.com/cross-rs/cross/issues/1100
|
||||
# TODO: Remove this file altogether once a new version of cross (after v0.2.4) is released.
|
||||
[target.x86_64-unknown-freebsd.env]
|
||||
passthrough = ["AR_x86_64_unknown_freebsd=x86_64-unknown-freebsd12-ar"]
|
||||
54
README.md
54
README.md
@@ -8,9 +8,10 @@
|
||||
<a href="https://aur.archlinux.org/packages/topgrade"><img alt="AUR" src="https://img.shields.io/aur/version/topgrade.svg"></a>
|
||||
<a href="https://formulae.brew.sh/formula/topgrade"><img alt="Homebrew" src="https://img.shields.io/homebrew/v/topgrade.svg"></a>
|
||||
|
||||
<img alt="Demo" src="doc/screenshot.gif" width="550px">
|
||||
<img alt="Demo" src="doc/topgrade_demo.gif">
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
## Introduction
|
||||
|
||||
> **Note**
|
||||
@@ -28,31 +29,56 @@ To remedy this, **Topgrade** detects which tools you use and runs the appropriat
|
||||
- NixOS: [Nixpkgs](https://search.nixos.org/packages?show=topgrade)
|
||||
- Void Linux: [XBPS](https://voidlinux.org/packages/?arch=x86_64&q=topgrade)
|
||||
- macOS: [Homebrew](https://formulae.brew.sh/formula/topgrade) or [MacPorts](https://ports.macports.org/port/topgrade/)
|
||||
- Windows: [Scoop][scoop] or [Winget][winget]
|
||||
- PyPi: [pip](https://pypi.org/project/topgrade/)
|
||||
|
||||
[scoop]: https://scoop.sh/#/apps?q=topgrade
|
||||
[winget]: https://winstall.app/apps/topgrade-rs.topgrade
|
||||
|
||||
Other systems users can either use `cargo install` or the compiled binaries from the release page.
|
||||
The compiled binaries contain a self-upgrading feature.
|
||||
|
||||
Topgrade requires Rust 1.60 or above.
|
||||
> Currently, Topgrade requires Rust 1.65 or above. In general, Topgrade tracks
|
||||
> the latest stable toolchain.
|
||||
|
||||
## Usage
|
||||
|
||||
Just run `topgrade`.
|
||||
|
||||
Visit the documentation at [topgrade-rs.github.io](https://topgrade-rs.github.io/) for more information.
|
||||
|
||||
> **Warning**
|
||||
> Work in Progress
|
||||
|
||||
## Customization
|
||||
## Configuration
|
||||
|
||||
See `config.example.toml` for an example configuration file.
|
||||
|
||||
## Migration and Breaking Changes
|
||||
|
||||
Whenever there is a **breaking change**, the major version number will be bumped,
|
||||
and we will document these changes in the release note, please take a look at
|
||||
it when updated to a major release.
|
||||
|
||||
> Got a question? Feel free to open an issue or discussion!
|
||||
|
||||
### Configuration Path
|
||||
|
||||
The configuration should be placed in the following paths depending on the operating system:
|
||||
#### `CONFIG_DIR` on each platform
|
||||
- **Windows**: `%APPDATA%`
|
||||
- **macOS** and **other Unix systems**: `${XDG_CONFIG_HOME:-~/.config}`
|
||||
|
||||
- **Windows** - `%APPDATA%/topgrade.toml`
|
||||
- **macOS** and **other Unix systems** - `${XDG_CONFIG_HOME:-~/.config}/topgrade.toml`
|
||||
`topgrade` will look for the configuration file in the following places, in order of priority:
|
||||
|
||||
1. `CONFIG_DIR/topgrade.toml`
|
||||
2. `CONFIG_DIR/topgrade/topgrade.toml`
|
||||
|
||||
If the file with higher priority is present, no matter it is valid or not, the other configuration files will be ignored.
|
||||
|
||||
On the first run(no configuration file exists), `topgrade` will create a configuration file at `CONFIG_DIR/topgrade.toml` for you.
|
||||
|
||||
### Custom Commands
|
||||
|
||||
Custom commands can be defined in the config file which can be run before, during, or after the inbuilt commands, as required.
|
||||
By default, the custom commands are run using a new shell according to the `$SHELL` environment variable on unix (falls back to `sh`) or `pwsh` on windows (falls back to `powershell`).
|
||||
|
||||
On unix, if you want to run your command using an interactive shell, for example to source your shell's rc files, you can add `-i` at the start of your custom command.
|
||||
But note that this requires the command to exit the shell correctly or else the shell will hang indefinitely.
|
||||
|
||||
## Remote Execution
|
||||
|
||||
@@ -78,8 +104,8 @@ Just fork the repository and start coding.
|
||||
|
||||
### Contribution Guidelines
|
||||
|
||||
- Check if your code passes `cargo fmt` and `cargo clippy`.
|
||||
- Check if your code is self explanatory, if not it should be documented by comments.
|
||||
See [CONTRIBUTING.md](https://github.com/topgrade-rs/topgrade/blob/master/CONTRIBUTING.md)
|
||||
|
||||
## Roadmap
|
||||
|
||||
- [ ] Add a proper testing framework to the code base.
|
||||
|
||||
65
RELEASE_PROCEDURE.md
Normal file
65
RELEASE_PROCEDURE.md
Normal file
@@ -0,0 +1,65 @@
|
||||
> This document lists the steps that lead to a successful release of Topgrade.
|
||||
|
||||
1. Open a PR that:
|
||||
|
||||
> Here is an [Example PR](https://github.com/topgrade-rs/topgrade/pull/652)
|
||||
> that you can refer to.
|
||||
|
||||
1. bumps the version number.
|
||||
|
||||
> If there are breaking changes, the major version number should be increased.
|
||||
|
||||
2. Overwrite [`BREAKINGCHANGES`][breaking_changes] with
|
||||
[`BREAKINGCHANGES_dev`][breaking_changes_dev], and create a new dev file:
|
||||
|
||||
```sh'
|
||||
$ cd topgrade
|
||||
$ cp BREAKINGCHANGES_dev.md BREAKINGCHANGES.md
|
||||
$ touch BREAKINGCHANGES_dev.md
|
||||
```
|
||||
|
||||
[breaking_changes_dev]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES_dev.md
|
||||
[breaking_changes]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES.md
|
||||
|
||||
2. Check and merge that PR.
|
||||
|
||||
3. Go to the [release](https://github.com/topgrade-rs/topgrade/releases) page
|
||||
and click the [Draft a new release button](https://github.com/topgrade-rs/topgrade/releases/new)
|
||||
|
||||
4. Write the release notes
|
||||
|
||||
We usually use GitHub's [Automatically generated release notes][auto_gen_release_notes]
|
||||
functionality to generate release notes, but you write your own one instead.
|
||||
|
||||
[auto_gen_release_notes]: https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes
|
||||
|
||||
5. Attaching binaries
|
||||
|
||||
You don't need to do this as our CI will automatically do it for you,
|
||||
binaries for Linux, macOS and Windows will be created and attached.
|
||||
|
||||
And the CI will publish the new binary to:
|
||||
|
||||
1. AUR
|
||||
2. PyPi
|
||||
3. Homebrew (seems that this is not working correctly)
|
||||
4. Winget
|
||||
|
||||
6. Manually release it to Crates.io
|
||||
|
||||
> Yeah, this is unfortunate, our CI won't do this for us. We should probably add one.
|
||||
|
||||
1. `cd` to the Topgrade directory, make sure that it is the latest version
|
||||
(i.e., including the PR that bumps the version number).
|
||||
2. Set up your token with `cargo login`.
|
||||
3. Dry-run the publish `cargo publish --dry-run`.
|
||||
4. If step 3 works, then do the final release `cargo publish`.
|
||||
|
||||
> You can also take a look at the official tutorial [Publishing on crates.io][doc]
|
||||
>
|
||||
> [doc]: https://doc.rust-lang.org/cargo/reference/publishing.html
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,130 +1,253 @@
|
||||
# Don't ask for confirmations
|
||||
#assume_yes = true
|
||||
# Include any additional configuration file(s)
|
||||
# [include] sections are processed in the order you write them
|
||||
# Files in $CONFIG_DIR/topgrade.d/ are automatically included before this file
|
||||
[include]
|
||||
# paths = ["/etc/topgrade.toml"]
|
||||
|
||||
|
||||
[misc]
|
||||
# Run `sudo -v` to cache credentials at the start of the run
|
||||
# This avoids a blocking password prompt in the middle of an unattended run
|
||||
# (default: false)
|
||||
# pre_sudo = false
|
||||
|
||||
# Sudo command to be used
|
||||
# sudo_command = "sudo"
|
||||
|
||||
# Disable specific steps - same options as the command line flag
|
||||
#disable = ["system", "emacs"]
|
||||
# disable = ["system", "emacs"]
|
||||
|
||||
# Ignore failures for these steps
|
||||
#ignore_failures = ["powershell"]
|
||||
|
||||
# Run specific steps - same options as the command line flag
|
||||
#only = ["system", "emacs"]
|
||||
|
||||
# Do not ask to retry failed steps (default: false)
|
||||
#no_retry = true
|
||||
|
||||
# Run `sudo -v` to cache credentials at the start of the run; this avoids a
|
||||
# blocking password prompt in the middle of a possibly-unattended run.
|
||||
#pre_sudo = false
|
||||
|
||||
# Run inside tmux
|
||||
#run_in_tmux = true
|
||||
# ignore_failures = ["powershell"]
|
||||
|
||||
# List of remote machines with Topgrade installed on them
|
||||
#remote_topgrades = ["toothless", "pi", "parnas"]
|
||||
|
||||
# Arguments to pass SSH when upgrading remote systems
|
||||
#ssh_arguments = "-o ConnectTimeout=2"
|
||||
# remote_topgrades = ["toothless", "pi", "parnas"]
|
||||
|
||||
# Path to Topgrade executable on remote machines
|
||||
#remote_topgrade_path = ".cargo/bin/topgrade"
|
||||
# remote_topgrade_path = ".cargo/bin/topgrade"
|
||||
|
||||
# Arguments to pass to SSH when upgrading remote systems
|
||||
# ssh_arguments = "-o ConnectTimeout=2"
|
||||
|
||||
# Arguments to pass tmux when pulling Repositories
|
||||
#tmux_arguments = "-S /var/tmux.sock"
|
||||
# tmux_arguments = "-S /var/tmux.sock"
|
||||
|
||||
# Do not set the terminal title
|
||||
#set_title = false
|
||||
# Do not set the terminal title (default: true)
|
||||
# set_title = true
|
||||
|
||||
# Display the time in step titles
|
||||
# Display the time in step titles (default: true)
|
||||
# display_time = true
|
||||
|
||||
# Cleanup temporary or old files
|
||||
#cleanup = true
|
||||
# Don't ask for confirmations (no default value)
|
||||
# assume_yes = true
|
||||
|
||||
# Skip sending a notification at the end of a run
|
||||
#skip_notify = true
|
||||
# Do not ask to retry failed steps (default: false)
|
||||
# no_retry = true
|
||||
|
||||
# Skip the preamble displayed when topgrade is run
|
||||
#display_preamble = false
|
||||
# Run inside tmux (default: false)
|
||||
# run_in_tmux = true
|
||||
|
||||
[git]
|
||||
#max_concurrency = 5
|
||||
# Additional git repositories to pull
|
||||
#repos = [
|
||||
# "~/src/*/",
|
||||
# "~/.config/something"
|
||||
#]
|
||||
# Cleanup temporary or old files (default: false)
|
||||
# cleanup = true
|
||||
|
||||
# Don't pull the predefined git repos
|
||||
#pull_predefined = false
|
||||
# Send a notification for every step (default: false)
|
||||
# notify_each_step = false
|
||||
|
||||
# Arguments to pass Git when pulling Repositories
|
||||
#arguments = "--rebase --autostash"
|
||||
# Skip sending a notification at the end of a run (default: false)
|
||||
# skip_notify = true
|
||||
|
||||
# The Bash-it branch to update (default: "stable")
|
||||
# bashit_branch = "stable"
|
||||
|
||||
# Run specific steps - same options as the command line flag
|
||||
# only = ["system", "emacs"]
|
||||
|
||||
# Whether to self update
|
||||
#
|
||||
# this will be ignored if the binary is built without self update support
|
||||
#
|
||||
# available also via setting the environment variable TOPGRADE_NO_SELF_UPGRADE)
|
||||
# no_self_update = true
|
||||
|
||||
# Extra tracing filter directives
|
||||
# These are prepended to the `--log-filter` argument
|
||||
# See: https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives
|
||||
# log_filters = ["topgrade::command=debug", "warn"]
|
||||
|
||||
[composer]
|
||||
#self_update = true
|
||||
|
||||
# Commands to run before anything
|
||||
[pre_commands]
|
||||
#"Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
# "Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
|
||||
|
||||
# Commands to run after anything
|
||||
[post_commands]
|
||||
# "Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
|
||||
|
||||
# Custom commands
|
||||
[commands]
|
||||
#"Python Environment" = "~/dev/.env/bin/pip install -i https://pypi.python.org/simple -U --upgrade-strategy eager jupyter"
|
||||
# "Python Environment" = "~/dev/.env/bin/pip install -i https://pypi.python.org/simple -U --upgrade-strategy eager jupyter"
|
||||
# "Custom command using interactive shell (unix)" = "-i vim_upgrade"
|
||||
|
||||
[brew]
|
||||
#greedy_cask = true
|
||||
#autoremove = true
|
||||
|
||||
[linux]
|
||||
# Arch Package Manager to use. Allowed values: autodetect, aura, garuda_update, pacman, pamac, paru, pikaur, trizen, yay.
|
||||
#arch_package_manager = "pacman"
|
||||
# Arguments to pass yay (or paru) when updating packages
|
||||
#yay_arguments = "--nodevel"
|
||||
# Arguments to pass dnf when updating packages
|
||||
#dnf_arguments = "--refresh"
|
||||
#aura_aur_arguments = "-kx"
|
||||
#aura_pacman_arguments = ""
|
||||
#garuda_update_arguments = ""
|
||||
#show_arch_news = true
|
||||
#trizen_arguments = "--devel"
|
||||
#pikaur_arguments = ""
|
||||
#pamac_arguments = "--no-devel"
|
||||
#enable_tlmgr = true
|
||||
#emerge_sync_flags = "-q"
|
||||
#emerge_update_flags = "-uDNa --with-bdeps=y world"
|
||||
#redhat_distro_sync = false
|
||||
#rpm_ostree = false
|
||||
#nix_arguments = "--flake"
|
||||
|
||||
[python]
|
||||
#enable_pip_review = true ###disabled by default
|
||||
#enable_pipupgrade = true ###disabled by default
|
||||
# enable_pip_review = true ###disabled by default
|
||||
# enable_pip_review_local = true ###disabled by default
|
||||
# enable_pipupgrade = true ###disabled by default
|
||||
# pipupgrade_arguments = "-y -u --pip-path pip" ###disabled by default
|
||||
|
||||
|
||||
[composer]
|
||||
# self_update = true
|
||||
|
||||
|
||||
[brew]
|
||||
# For the BrewCask step
|
||||
# If `Repo Cask Upgrade` exists, then use the `-a` option.
|
||||
# Otherwise, use the `--greedy` option.
|
||||
# greedy_cask = true
|
||||
|
||||
# For the BrewCask step
|
||||
# If `Repo Cask Upgrade` does not exist, then use the `--greedy_latest` option.
|
||||
# NOTE: the above entry `greedy_cask` contains this entry, though you can enable
|
||||
# both of them, they won't clash with each other.
|
||||
# greedy_latest = true
|
||||
|
||||
# For the BrewFormula step
|
||||
# Execute `brew autoremove` after the step.
|
||||
# autoremove = true
|
||||
|
||||
# For the BrewFormula step
|
||||
# Upgrade formulae built from the HEAD branch; `brew upgrade --fetch-HEAD`
|
||||
# fetch_head = true
|
||||
|
||||
|
||||
[linux]
|
||||
# Arch Package Manager to use.
|
||||
# Allowed values:
|
||||
# autodetect, aura, garuda_update, pacman, pamac, paru, pikaur, trizen, yay
|
||||
# arch_package_manager = "pacman"
|
||||
|
||||
# Arguments to pass yay (or paru) when updating packages
|
||||
# yay_arguments = "--nodevel"
|
||||
|
||||
# Arguments to pass dnf when updating packages
|
||||
# dnf_arguments = "--refresh"
|
||||
|
||||
# aura_aur_arguments = "-kx"
|
||||
|
||||
# aura_pacman_arguments = ""
|
||||
# garuda_update_arguments = ""
|
||||
|
||||
# show_arch_news = true
|
||||
|
||||
# trizen_arguments = "--devel"
|
||||
|
||||
# pikaur_arguments = ""
|
||||
|
||||
# pamac_arguments = "--no-devel"
|
||||
|
||||
# enable_tlmgr = true
|
||||
|
||||
# emerge_sync_flags = "-q"
|
||||
|
||||
# emerge_update_flags = "-uDNa --with-bdeps=y world"
|
||||
|
||||
# redhat_distro_sync = false
|
||||
|
||||
# suse_dup = false
|
||||
|
||||
# rpm_ostree = false
|
||||
|
||||
# nix_arguments = "--flake"
|
||||
|
||||
# nix_env_arguments = "--prebuilt-only"
|
||||
|
||||
# Extra Home Manager arguments
|
||||
# home_manager_arguments = ["--flake", "file"]
|
||||
|
||||
|
||||
[git]
|
||||
# How many repos to pull at max in parallel
|
||||
# max_concurrency = 5
|
||||
|
||||
# Additional git repositories to pull
|
||||
# repos = [
|
||||
# "~/src/*/",
|
||||
# "~/.config/something"
|
||||
# ]
|
||||
|
||||
# Don't pull the predefined git repos
|
||||
# pull_predefined = false
|
||||
|
||||
# Arguments to pass Git when pulling Repositories
|
||||
# arguments = "--rebase --autostash"
|
||||
|
||||
|
||||
[windows]
|
||||
# Manually select Windows updates
|
||||
#accept_all_updates = false
|
||||
#open_remotes_in_new_terminal = true
|
||||
#wsl_update_pre_release = true
|
||||
#wsl_update_use_web_download = true
|
||||
# accept_all_updates = false
|
||||
|
||||
# open_remotes_in_new_terminal = true
|
||||
|
||||
# wsl_update_pre_release = true
|
||||
|
||||
# wsl_update_use_web_download = true
|
||||
|
||||
# Causes Topgrade to rename itself during the run to allow package managers
|
||||
# to upgrade it. Use this only if you installed Topgrade by using a package
|
||||
# manager such as Scoop or Cargo
|
||||
#self_rename = true
|
||||
# self_rename = true
|
||||
|
||||
|
||||
[npm]
|
||||
# Use sudo if the NPM directory isn't owned by the current user
|
||||
#use_sudo = true
|
||||
# use_sudo = true
|
||||
|
||||
|
||||
[yarn]
|
||||
# Run `yarn global upgrade` with `sudo`
|
||||
# use_sudo = true
|
||||
|
||||
|
||||
[vim]
|
||||
# For `vim-plug`, execute `PlugUpdate!` instead of `PlugUpdate`
|
||||
# force_plug_update = true
|
||||
|
||||
|
||||
[firmware]
|
||||
# Offer to update firmware; if false just check for and display available updates
|
||||
#upgrade = true
|
||||
# upgrade = true
|
||||
|
||||
|
||||
[vagrant]
|
||||
# Vagrant directories
|
||||
# directories = []
|
||||
|
||||
# power on vagrant boxes if needed
|
||||
# power_on = true
|
||||
|
||||
# Always suspend vagrant boxes instead of powering off
|
||||
# always_suspend = true
|
||||
|
||||
|
||||
[flatpak]
|
||||
# Use sudo for updating the system-wide installation
|
||||
#use_sudo = true
|
||||
# use_sudo = true
|
||||
|
||||
|
||||
[distrobox]
|
||||
#use_root = false
|
||||
#containers = ["archlinux-latest"]
|
||||
# use_root = false
|
||||
|
||||
# containers = ["archlinux-latest"]
|
||||
[containers]
|
||||
# Specify the containers to ignore while updating (Wildcard supported)
|
||||
# ignored_containers = ["ghcr.io/rancher-sandbox/rancher-desktop/rdx-proxy:latest", "docker.io*"]
|
||||
|
||||
[lensfun]
|
||||
# If disabled, Topgrade invokes `lensfun‑update‑data` without root priviledge,
|
||||
# then the update will be only available to you. Otherwise, `sudo` is required,
|
||||
# and the update will be installed system-wide, i.e., available to all users.
|
||||
# (default: false)
|
||||
# use_sudo = false
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 718 KiB |
BIN
doc/topgrade_demo.gif
Normal file
BIN
doc/topgrade_demo.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 4.1 MiB |
16
pyproject.toml
Normal file
16
pyproject.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[build-system]
|
||||
requires = ["maturin>=1.0,<2.0"]
|
||||
build-backend = "maturin"
|
||||
|
||||
[project]
|
||||
name = "topgrade"
|
||||
requires-python = ">=3.7"
|
||||
classifiers = [
|
||||
"Programming Language :: Rust",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
]
|
||||
|
||||
|
||||
[tool.maturin]
|
||||
bindings = "bin"
|
||||
167
src/breaking_changes.rs
Normal file
167
src/breaking_changes.rs
Normal file
@@ -0,0 +1,167 @@
|
||||
//! Inform the users of the breaking changes introduced in this major release.
|
||||
//!
|
||||
//! Print the breaking changes and possibly a migration guide when:
|
||||
//! 1. The Topgrade being executed is a new major release
|
||||
//! 2. This is the first launch of that major release
|
||||
|
||||
use crate::terminal::print_separator;
|
||||
#[cfg(windows)]
|
||||
use crate::WINDOWS_DIRS;
|
||||
#[cfg(unix)]
|
||||
use crate::XDG_DIRS;
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
use std::{
|
||||
env::var,
|
||||
fs::{read_to_string, OpenOptions},
|
||||
io::Write,
|
||||
path::PathBuf,
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
/// Version string x.y.z
|
||||
static VERSION_STR: &str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
/// Version info
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Version {
|
||||
_major: u64,
|
||||
minor: u64,
|
||||
patch: u64,
|
||||
}
|
||||
|
||||
impl FromStr for Version {
|
||||
type Err = std::convert::Infallible;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
const NOT_SEMVER: &str = "Topgrade version is not semantic";
|
||||
const NOT_NUMBER: &str = "Topgrade version is not dot-separated numbers";
|
||||
|
||||
let mut iter = s.split('.').take(3);
|
||||
let major = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
|
||||
let minor = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
|
||||
let patch = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
|
||||
|
||||
// They cannot be all 0s
|
||||
assert!(
|
||||
!(major == 0 && minor == 0 && patch == 0),
|
||||
"Version numbers can not be all 0s"
|
||||
);
|
||||
|
||||
Ok(Self {
|
||||
_major: major,
|
||||
minor,
|
||||
patch,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Version {
|
||||
/// True if this version is a new major release.
|
||||
pub(crate) fn is_new_major_release(&self) -> bool {
|
||||
// We have already checked that they cannot all be zeros, so `self.major`
|
||||
// is guaranteed to be non-zero.
|
||||
self.minor == 0 && self.patch == 0
|
||||
}
|
||||
}
|
||||
|
||||
/// Topgrade's breaking changes
|
||||
///
|
||||
/// We store them in the compiled binary.
|
||||
pub(crate) static BREAKINGCHANGES: &str = include_str!("../BREAKINGCHANGES.md");
|
||||
|
||||
/// Return platform's data directory.
|
||||
fn data_dir() -> PathBuf {
|
||||
#[cfg(unix)]
|
||||
return XDG_DIRS.data_dir();
|
||||
|
||||
#[cfg(windows)]
|
||||
return WINDOWS_DIRS.data_dir();
|
||||
}
|
||||
|
||||
/// Return Topgrade's keep file path.
|
||||
///
|
||||
/// keep file is a file under the data directory containing a major version
|
||||
/// number, it will be created on first run and is used to check if an execution
|
||||
/// of Topgrade is the first run of a major release, for more details, see
|
||||
/// `first_run_of_major_release()`.
|
||||
fn keep_file_path() -> PathBuf {
|
||||
let keep_file = "topgrade_keep";
|
||||
data_dir().join(keep_file)
|
||||
}
|
||||
|
||||
/// If environment variable `TOPGRADE_SKIP_BRKC_NOTIFY` is set to `true`, then
|
||||
/// we won't notify the user of the breaking changes.
|
||||
pub(crate) fn should_skip() -> bool {
|
||||
if let Ok(var) = var("TOPGRADE_SKIP_BRKC_NOTIFY") {
|
||||
return var.as_str() == "true";
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// True if this is the first execution of a major release.
|
||||
pub(crate) fn first_run_of_major_release() -> Result<bool> {
|
||||
let version = VERSION_STR.parse::<Version>().expect("should be a valid version");
|
||||
let keep_file = keep_file_path();
|
||||
|
||||
// disable this lint here as the current code has better readability
|
||||
#[allow(clippy::collapsible_if)]
|
||||
if version.is_new_major_release() {
|
||||
if !keep_file.exists() || read_to_string(&keep_file)? != VERSION_STR {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
/// Print breaking changes to the user.
|
||||
pub(crate) fn print_breaking_changes() {
|
||||
let header = format!("Topgrade {VERSION_STR} Breaking Changes");
|
||||
print_separator(header);
|
||||
let contents = if BREAKINGCHANGES.is_empty() {
|
||||
"No Breaking changes"
|
||||
} else {
|
||||
BREAKINGCHANGES
|
||||
};
|
||||
println!("{contents}\n");
|
||||
}
|
||||
|
||||
/// This function will be ONLY executed when the user has confirmed the breaking
|
||||
/// changes, once confirmed, we write the keep file, which means the first run
|
||||
/// of this major release is finished.
|
||||
pub(crate) fn write_keep_file() -> Result<()> {
|
||||
std::fs::create_dir_all(data_dir())?;
|
||||
let keep_file = keep_file_path();
|
||||
|
||||
let mut file = OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.truncate(true)
|
||||
.open(keep_file)?;
|
||||
let _ = file.write(VERSION_STR.as_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn is_new_major_release_works() {
|
||||
let first_major_release: Version = "1.0.0".parse().unwrap();
|
||||
let under_dev: Version = "0.1.0".parse().unwrap();
|
||||
|
||||
assert!(first_major_release.is_new_major_release());
|
||||
assert!(!under_dev.is_new_major_release());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Version numbers can not be all 0s")]
|
||||
fn invalid_version() {
|
||||
let all_0 = "0.0.0";
|
||||
all_0.parse::<Version>().unwrap();
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,8 @@ use color_eyre::eyre::Context;
|
||||
|
||||
use crate::error::TopgradeError;
|
||||
|
||||
use tracing::debug;
|
||||
|
||||
/// Like [`Output`], but UTF-8 decoded.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Utf8Output {
|
||||
@@ -183,7 +185,7 @@ impl CommandExt for Command {
|
||||
let err = TopgradeError::ProcessFailedWithOutput(program, output.status, stderr.into_owned());
|
||||
|
||||
let ret = Err(err).with_context(|| message);
|
||||
tracing::debug!("Command failed: {ret:?}");
|
||||
debug!("Command failed: {ret:?}");
|
||||
ret
|
||||
}
|
||||
}
|
||||
@@ -203,7 +205,7 @@ impl CommandExt for Command {
|
||||
let (program, _) = get_program_and_args(self);
|
||||
let err = TopgradeError::ProcessFailed(program, status);
|
||||
let ret = Err(err).with_context(|| format!("Command failed: `{command}`"));
|
||||
tracing::debug!("Command failed: {ret:?}");
|
||||
debug!("Command failed: {ret:?}");
|
||||
ret
|
||||
}
|
||||
}
|
||||
@@ -239,6 +241,6 @@ fn format_program_and_args(cmd: &Command) -> String {
|
||||
|
||||
fn log(cmd: &Command) -> String {
|
||||
let command = format_program_and_args(cmd);
|
||||
tracing::debug!("Executing command `{command}`");
|
||||
debug!("Executing command `{command}`");
|
||||
command
|
||||
}
|
||||
|
||||
873
src/config.rs
873
src/config.rs
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
//! SIGINT handling in Unix systems.
|
||||
use crate::ctrlc::interrupted::set_interrupted;
|
||||
use nix::sys::signal;
|
||||
use nix::sys::signal::{sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal};
|
||||
|
||||
/// Handle SIGINT. Set the interruption flag.
|
||||
extern "C" fn handle_sigint(_: i32) {
|
||||
@@ -10,12 +10,8 @@ extern "C" fn handle_sigint(_: i32) {
|
||||
/// Set the necessary signal handlers.
|
||||
/// The function panics on failure.
|
||||
pub fn set_handler() {
|
||||
let sig_action = signal::SigAction::new(
|
||||
signal::SigHandler::Handler(handle_sigint),
|
||||
signal::SaFlags::empty(),
|
||||
signal::SigSet::empty(),
|
||||
);
|
||||
let sig_action = SigAction::new(SigHandler::Handler(handle_sigint), SaFlags::empty(), SigSet::empty());
|
||||
unsafe {
|
||||
signal::sigaction(signal::SIGINT, &sig_action).unwrap();
|
||||
sigaction(Signal::SIGINT, &sig_action).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
//! A stub for Ctrl + C handling.
|
||||
use crate::ctrlc::interrupted::set_interrupted;
|
||||
use tracing::error;
|
||||
use winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE};
|
||||
use winapi::um::consoleapi::SetConsoleCtrlHandler;
|
||||
use winapi::um::wincon::CTRL_C_EVENT;
|
||||
@@ -16,6 +17,6 @@ extern "system" fn handler(ctrl_type: DWORD) -> BOOL {
|
||||
|
||||
pub fn set_handler() {
|
||||
if 0 == unsafe { SetConsoleCtrlHandler(Some(handler), TRUE) } {
|
||||
tracing::error!("Cannot set a control C handler")
|
||||
error!("Cannot set a control C handler")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,14 +10,14 @@ pub enum TopgradeError {
|
||||
#[error("`{0}` failed: {1}")]
|
||||
ProcessFailedWithOutput(String, ExitStatus, String),
|
||||
|
||||
#[error("Sudo is required for this step")]
|
||||
#[allow(dead_code)]
|
||||
SudoRequired,
|
||||
|
||||
#[error("Unknown Linux Distribution")]
|
||||
#[cfg(target_os = "linux")]
|
||||
UnknownLinuxDistribution,
|
||||
|
||||
#[error("File \"/etc/os-release\" does not exist or is empty")]
|
||||
#[cfg(target_os = "linux")]
|
||||
EmptyOSReleaseFile,
|
||||
|
||||
#[error("Failed getting the system package manager")]
|
||||
#[cfg(target_os = "linux")]
|
||||
FailedGettingPackageManager,
|
||||
|
||||
@@ -1,46 +1,39 @@
|
||||
#![allow(dead_code)]
|
||||
use crate::executor::RunType;
|
||||
use crate::git::Git;
|
||||
use crate::sudo::Sudo;
|
||||
use crate::utils::require_option;
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::{config::Config, executor::Executor};
|
||||
use color_eyre::eyre::Result;
|
||||
use directories::BaseDirs;
|
||||
use std::env::var;
|
||||
use std::path::Path;
|
||||
use std::sync::Mutex;
|
||||
|
||||
pub struct ExecutionContext<'a> {
|
||||
run_type: RunType,
|
||||
sudo: Option<Sudo>,
|
||||
git: &'a Git,
|
||||
config: &'a Config,
|
||||
base_dirs: &'a BaseDirs,
|
||||
/// Name of a tmux session to execute commands in, if any.
|
||||
/// This is used in `./steps/remote/ssh.rs`, where we want to run `topgrade` in a new
|
||||
/// tmux window for each remote.
|
||||
tmux_session: Mutex<Option<String>>,
|
||||
/// True if topgrade is running under ssh.
|
||||
under_ssh: bool,
|
||||
}
|
||||
|
||||
impl<'a> ExecutionContext<'a> {
|
||||
pub fn new(
|
||||
run_type: RunType,
|
||||
sudo: Option<Sudo>,
|
||||
git: &'a Git,
|
||||
config: &'a Config,
|
||||
base_dirs: &'a BaseDirs,
|
||||
) -> Self {
|
||||
pub fn new(run_type: RunType, sudo: Option<Sudo>, config: &'a Config) -> Self {
|
||||
let under_ssh = var("SSH_CLIENT").is_ok() || var("SSH_TTY").is_ok();
|
||||
Self {
|
||||
run_type,
|
||||
sudo,
|
||||
git,
|
||||
config,
|
||||
base_dirs,
|
||||
tmux_session: Mutex::new(None),
|
||||
under_ssh,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn execute_elevated(&self, command: &Path, interactive: bool) -> Result<Executor> {
|
||||
let sudo = require_option(self.sudo.clone(), "Sudo is required for this operation".into())?;
|
||||
let sudo = require_option(self.sudo.as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
Ok(sudo.execute_elevated(self, command, interactive))
|
||||
}
|
||||
|
||||
@@ -48,10 +41,6 @@ impl<'a> ExecutionContext<'a> {
|
||||
self.run_type
|
||||
}
|
||||
|
||||
pub fn git(&self) -> &Git {
|
||||
self.git
|
||||
}
|
||||
|
||||
pub fn sudo(&self) -> &Option<Sudo> {
|
||||
&self.sudo
|
||||
}
|
||||
@@ -60,8 +49,8 @@ impl<'a> ExecutionContext<'a> {
|
||||
self.config
|
||||
}
|
||||
|
||||
pub fn base_dirs(&self) -> &BaseDirs {
|
||||
self.base_dirs
|
||||
pub fn under_ssh(&self) -> bool {
|
||||
self.under_ssh
|
||||
}
|
||||
|
||||
pub fn set_tmux_session(&self, session_name: String) {
|
||||
|
||||
@@ -3,7 +3,6 @@ use std::ffi::{OsStr, OsString};
|
||||
use std::path::Path;
|
||||
use std::process::{Child, Command, ExitStatus, Output};
|
||||
|
||||
use color_eyre::eyre;
|
||||
use color_eyre::eyre::Result;
|
||||
use tracing::debug;
|
||||
|
||||
@@ -228,6 +227,7 @@ impl DryCommand {
|
||||
|
||||
/// The Result of spawn. Contains an actual `std::process::Child` if executed by a wet command.
|
||||
pub enum ExecutorChild {
|
||||
#[allow(unused)] // this type has not been used
|
||||
Wet(Child),
|
||||
Dry,
|
||||
}
|
||||
@@ -238,7 +238,7 @@ impl CommandExt for Executor {
|
||||
// TODO: It might be nice to make `output_checked_with` return something that has a
|
||||
// variant for wet/dry runs.
|
||||
|
||||
fn output_checked_with(&mut self, succeeded: impl Fn(&Output) -> Result<(), ()>) -> eyre::Result<Output> {
|
||||
fn output_checked_with(&mut self, succeeded: impl Fn(&Output) -> Result<(), ()>) -> Result<Output> {
|
||||
match self {
|
||||
Executor::Wet(c) => c.output_checked_with(succeeded),
|
||||
Executor::Dry(c) => {
|
||||
@@ -248,7 +248,7 @@ impl CommandExt for Executor {
|
||||
}
|
||||
}
|
||||
|
||||
fn status_checked_with(&mut self, succeeded: impl Fn(ExitStatus) -> Result<(), ()>) -> eyre::Result<()> {
|
||||
fn status_checked_with(&mut self, succeeded: impl Fn(ExitStatus) -> Result<(), ()>) -> Result<()> {
|
||||
match self {
|
||||
Executor::Wet(c) => c.status_checked_with(succeeded),
|
||||
Executor::Dry(c) => {
|
||||
@@ -258,7 +258,7 @@ impl CommandExt for Executor {
|
||||
}
|
||||
}
|
||||
|
||||
fn spawn_checked(&mut self) -> eyre::Result<Self::Child> {
|
||||
fn spawn_checked(&mut self) -> Result<Self::Child> {
|
||||
self.spawn()
|
||||
}
|
||||
}
|
||||
|
||||
484
src/main.rs
484
src/main.rs
@@ -2,14 +2,22 @@
|
||||
|
||||
use std::env;
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
use std::process::exit;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::breaking_changes::{first_run_of_major_release, print_breaking_changes, should_skip, write_keep_file};
|
||||
use clap::CommandFactory;
|
||||
use clap::{crate_version, Parser};
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::{eyre, Result};
|
||||
use color_eyre::eyre::Result;
|
||||
use console::Key;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
#[cfg(windows)]
|
||||
use etcetera::base_strategy::Windows;
|
||||
#[cfg(unix)]
|
||||
use etcetera::base_strategy::Xdg;
|
||||
use once_cell::sync::Lazy;
|
||||
use tracing::debug;
|
||||
|
||||
use self::config::{CommandLineArgs, Config, Step};
|
||||
@@ -19,6 +27,9 @@ use self::error::Upgraded;
|
||||
use self::steps::{remote::*, *};
|
||||
use self::terminal::*;
|
||||
|
||||
use self::utils::{hostname, install_color_eyre, install_tracing, update_tracing};
|
||||
|
||||
mod breaking_changes;
|
||||
mod command;
|
||||
mod config;
|
||||
mod ctrlc;
|
||||
@@ -36,28 +47,43 @@ mod sudo;
|
||||
mod terminal;
|
||||
mod utils;
|
||||
|
||||
pub(crate) static HOME_DIR: Lazy<PathBuf> = Lazy::new(|| home::home_dir().expect("No home directory"));
|
||||
#[cfg(unix)]
|
||||
pub(crate) static XDG_DIRS: Lazy<Xdg> = Lazy::new(|| Xdg::new().expect("No home directory"));
|
||||
|
||||
#[cfg(windows)]
|
||||
pub(crate) static WINDOWS_DIRS: Lazy<Windows> = Lazy::new(|| Windows::new().expect("No home directory"));
|
||||
|
||||
fn run() -> Result<()> {
|
||||
color_eyre::install()?;
|
||||
install_color_eyre()?;
|
||||
ctrlc::set_handler();
|
||||
|
||||
let base_dirs = directories::BaseDirs::new().ok_or_else(|| eyre!("No base directories"))?;
|
||||
|
||||
let opt = CommandLineArgs::parse();
|
||||
// Set up the logger with the filter directives from:
|
||||
// 1. CLI option `--log-filter`
|
||||
// 2. `debug` if the `--verbose` option is present
|
||||
// We do this because we need our logger to work while loading the
|
||||
// configuration file.
|
||||
//
|
||||
// When the configuration file is loaded, update the logger with the full
|
||||
// filter directives.
|
||||
//
|
||||
// For more info, see the comments in `CommandLineArgs::tracing_filter_directives()`
|
||||
// and `Config::tracing_filter_directives()`.
|
||||
let reload_handle = install_tracing(&opt.tracing_filter_directives())?;
|
||||
|
||||
if let Some(shell) = opt.gen_completion {
|
||||
let cmd = &mut CommandLineArgs::command();
|
||||
clap_complete::generate(shell, cmd, clap::crate_name!(), &mut std::io::stdout());
|
||||
clap_complete::generate(shell, cmd, clap::crate_name!(), &mut io::stdout());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if opt.gen_manpage {
|
||||
let man = clap_mangen::Man::new(CommandLineArgs::command());
|
||||
man.render(&mut std::io::stdout())?;
|
||||
man.render(&mut io::stdout())?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
install_tracing(&opt.tracing_filter_directives())?;
|
||||
|
||||
for env in opt.env_variables() {
|
||||
let mut splitted = env.split('=');
|
||||
let var = splitted.next().unwrap();
|
||||
@@ -66,32 +92,28 @@ fn run() -> Result<()> {
|
||||
}
|
||||
|
||||
if opt.edit_config() {
|
||||
Config::edit(&base_dirs)?;
|
||||
Config::edit()?;
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
if opt.show_config_reference() {
|
||||
print!("{}", crate::config::EXAMPLE_CONFIG);
|
||||
print!("{}", config::EXAMPLE_CONFIG);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let config = Config::load(&base_dirs, opt)?;
|
||||
terminal::set_title(config.set_title());
|
||||
terminal::display_time(config.display_time());
|
||||
terminal::set_desktop_notifications(config.notify_each_step());
|
||||
let config = Config::load(opt)?;
|
||||
// Update the logger with the full filter directives.
|
||||
update_tracing(&reload_handle, &config.tracing_filter_directives())?;
|
||||
set_title(config.set_title());
|
||||
display_time(config.display_time());
|
||||
set_desktop_notifications(config.notify_each_step());
|
||||
|
||||
debug!("Version: {}", crate_version!());
|
||||
debug!("OS: {}", env!("TARGET"));
|
||||
debug!("{:?}", std::env::args());
|
||||
debug!("Binary path: {:?}", std::env::current_exe());
|
||||
debug!("Self Update: {:?}", cfg!(feature = "self-update"));
|
||||
|
||||
if config.display_preamble() && !config.skip_notify() {
|
||||
print_warning("Due to a design issue with notify-send it could be that topgrade hangs when it's finished.
|
||||
If this is the case on your system add the --skip-notify flag to the topgrade command or set skip_notify = true in the config file.
|
||||
If you don't want this message to appear any longer set display_preamble = false in the config file.
|
||||
For more information about this issue see https://askubuntu.com/questions/110969/notify-send-ignores-timeout and https://github.com/topgrade-rs/topgrade/issues/288.");
|
||||
}
|
||||
debug!("self-update Feature Enabled: {:?}", cfg!(feature = "self-update"));
|
||||
debug!("Configuration: {:?}", config);
|
||||
|
||||
if config.run_in_tmux() && env::var("TOPGRADE_INSIDE_TMUX").is_err() {
|
||||
#[cfg(unix)]
|
||||
@@ -101,30 +123,42 @@ For more information about this issue see https://askubuntu.com/questions/110969
|
||||
}
|
||||
}
|
||||
|
||||
let git = git::Git::new();
|
||||
let mut git_repos = git::Repositories::new(&git);
|
||||
let powershell = powershell::Powershell::new();
|
||||
let should_run_powershell = powershell.profile().is_some() && config.should_run(Step::Powershell);
|
||||
let emacs = emacs::Emacs::new();
|
||||
#[cfg(target_os = "linux")]
|
||||
let distribution = linux::Distribution::detect();
|
||||
|
||||
let sudo = sudo::Sudo::detect();
|
||||
let sudo = config.sudo_command().map_or_else(sudo::Sudo::detect, sudo::Sudo::new);
|
||||
let run_type = executor::RunType::new(config.dry_run());
|
||||
|
||||
let ctx = execution_context::ExecutionContext::new(run_type, sudo, &git, &config, &base_dirs);
|
||||
|
||||
let ctx = execution_context::ExecutionContext::new(run_type, sudo, &config);
|
||||
let mut runner = runner::Runner::new(&ctx);
|
||||
|
||||
// If
|
||||
//
|
||||
// 1. the breaking changes notification shouldnot be skipped
|
||||
// 2. this is the first execution of a major release
|
||||
//
|
||||
// inform user of breaking changes
|
||||
if !should_skip() && first_run_of_major_release()? {
|
||||
print_breaking_changes();
|
||||
|
||||
if prompt_yesno("Confirmed?")? {
|
||||
write_keep_file()?;
|
||||
} else {
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Self-Update step, this will execute only if:
|
||||
// 1. the `self-update` feature is enabled
|
||||
// 2. it is not disabled from configuration (env var/CLI opt/file)
|
||||
#[cfg(feature = "self-update")]
|
||||
{
|
||||
if !run_type.dry() && env::var("TOPGRADE_NO_SELF_UPGRADE").is_err() {
|
||||
let result = self_update::self_update();
|
||||
let should_self_update = env::var("TOPGRADE_NO_SELF_UPGRADE").is_err() && !config.no_self_update();
|
||||
|
||||
if let Err(e) = &result {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if e.downcast_ref::<Upgraded>().is_some() {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
print_warning(format!("Self update error: {e}"));
|
||||
}
|
||||
if should_self_update {
|
||||
runner.execute(Step::SelfUpdate, "Self Update", || self_update::self_update(&ctx))?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -147,28 +181,30 @@ For more information about this issue see https://askubuntu.com/questions/110969
|
||||
}
|
||||
}
|
||||
|
||||
let powershell = powershell::Powershell::new();
|
||||
let should_run_powershell = powershell.profile().is_some() && config.should_run(Step::Powershell);
|
||||
|
||||
#[cfg(windows)]
|
||||
runner.execute(Step::Wsl, "WSL", || windows::run_wsl_topgrade(&ctx))?;
|
||||
|
||||
#[cfg(windows)]
|
||||
runner.execute(Step::WslUpdate, "WSL", || windows::update_wsl(&ctx))?;
|
||||
|
||||
if let Some(topgrades) = config.remote_topgrades() {
|
||||
for remote_topgrade in topgrades.iter().filter(|t| config.should_execute_remote(t)) {
|
||||
for remote_topgrade in topgrades.iter().filter(|t| config.should_execute_remote(hostname(), t)) {
|
||||
runner.execute(Step::Remotes, format!("Remote ({remote_topgrade})"), || {
|
||||
remote::ssh::ssh_step(&ctx, remote_topgrade)
|
||||
ssh::ssh_step(&ctx, remote_topgrade)
|
||||
})?;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
let distribution = linux::Distribution::detect();
|
||||
|
||||
#[cfg(target_os = r#"linux"#)]
|
||||
#[cfg(windows)]
|
||||
{
|
||||
runner.execute(Step::Wsl, "WSL", || windows::run_wsl_topgrade(&ctx))?;
|
||||
runner.execute(Step::WslUpdate, "WSL", || windows::update_wsl(&ctx))?;
|
||||
runner.execute(Step::Chocolatey, "Chocolatey", || windows::run_chocolatey(&ctx))?;
|
||||
runner.execute(Step::Scoop, "Scoop", || windows::run_scoop(&ctx))?;
|
||||
runner.execute(Step::Winget, "Winget", || windows::run_winget(&ctx))?;
|
||||
runner.execute(Step::System, "Windows update", || windows::windows_update(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// NOTE: Due to breaking `nu` updates, `packer.nu` needs to be updated before `nu` get updated
|
||||
// by other package managers.
|
||||
runner.execute(Step::Shell, "packer.nu", || linux::run_packer_nu(&ctx))?;
|
||||
|
||||
match &distribution {
|
||||
Ok(distribution) => {
|
||||
runner.execute(Step::System, "System update", || distribution.upgrade(&ctx))?;
|
||||
@@ -179,16 +215,27 @@ For more information about this issue see https://askubuntu.com/questions/110969
|
||||
}
|
||||
runner.execute(Step::ConfigUpdate, "config-update", || linux::run_config_update(&ctx))?;
|
||||
|
||||
runner.execute(Step::AM, "am", || linux::run_am(&ctx))?;
|
||||
runner.execute(Step::AppMan, "appman", || linux::run_appman(&ctx))?;
|
||||
runner.execute(Step::DebGet, "deb-get", || linux::run_deb_get(&ctx))?;
|
||||
runner.execute(Step::Toolbx, "toolbx", || toolbx::run_toolbx(&ctx))?;
|
||||
runner.execute(Step::Snap, "snap", || linux::run_snap(&ctx))?;
|
||||
runner.execute(Step::Pacstall, "pacstall", || linux::run_pacstall(&ctx))?;
|
||||
runner.execute(Step::Pacdef, "pacdef", || linux::run_pacdef(&ctx))?;
|
||||
runner.execute(Step::Protonup, "protonup", || linux::run_protonup_update(&ctx))?;
|
||||
runner.execute(Step::Distrobox, "distrobox", || linux::run_distrobox_update(&ctx))?;
|
||||
runner.execute(Step::DkpPacman, "dkp-pacman", || linux::run_dkp_pacman_update(&ctx))?;
|
||||
runner.execute(Step::System, "pihole", || linux::run_pihole_update(&ctx))?;
|
||||
runner.execute(Step::Firmware, "Firmware upgrades", || linux::run_fwupdmgr(&ctx))?;
|
||||
runner.execute(Step::Restarts, "Restarts", || linux::run_needrestart(&ctx))?;
|
||||
|
||||
runner.execute(Step::Flatpak, "Flatpak", || linux::run_flatpak(&ctx))?;
|
||||
runner.execute(Step::BrewFormula, "Brew", || {
|
||||
unix::run_brew_formula(&ctx, unix::BrewVariant::Path)
|
||||
})?;
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
runner.execute(Step::Chocolatey, "Chocolatey", || windows::run_chocolatey(&ctx))?;
|
||||
runner.execute(Step::Scoop, "Scoop", || windows::run_scoop(config.cleanup(), run_type))?;
|
||||
runner.execute(Step::Winget, "Winget", || windows::run_winget(&ctx))?;
|
||||
runner.execute(Step::Lure, "LURE", || linux::run_lure_update(&ctx))?;
|
||||
runner.execute(Step::Waydroid, "Waydroid", || linux::run_waydroid(&ctx))?;
|
||||
runner.execute(Step::AutoCpufreq, "auto-cpufreq", || linux::run_auto_cpufreq(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
@@ -212,133 +259,77 @@ For more information about this issue see https://askubuntu.com/questions/110969
|
||||
unix::run_brew_cask(&ctx, unix::BrewVariant::Path)
|
||||
})?;
|
||||
runner.execute(Step::Macports, "MacPorts", || macos::run_macports(&ctx))?;
|
||||
runner.execute(Step::Xcodes, "Xcodes", || macos::update_xcodes(&ctx))?;
|
||||
runner.execute(Step::Sparkle, "Sparkle", || macos::run_sparkle(&ctx))?;
|
||||
runner.execute(Step::Mas, "App Store", || macos::run_mas(&ctx))?;
|
||||
runner.execute(Step::System, "System upgrade", || macos::upgrade_macos(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "dragonfly")]
|
||||
{
|
||||
runner.execute(Step::Pkg, "DragonFly BSD Packages", || {
|
||||
dragonfly::upgrade_packages(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Audit, "DragonFly Audit", || dragonfly::audit_packages(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "freebsd")]
|
||||
{
|
||||
runner.execute(Step::Pkg, "FreeBSD Packages", || freebsd::upgrade_packages(&ctx))?;
|
||||
runner.execute(Step::System, "FreeBSD Upgrade", || freebsd::upgrade_freebsd(&ctx))?;
|
||||
runner.execute(Step::Audit, "FreeBSD Audit", || freebsd::audit_packages(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "openbsd")]
|
||||
{
|
||||
runner.execute(Step::Pkg, "OpenBSD Packages", || openbsd::upgrade_packages(&ctx))?;
|
||||
runner.execute(Step::System, "OpenBSD Upgrade", || openbsd::upgrade_openbsd(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
{
|
||||
runner.execute(Step::Pkg, "Termux Packages", || android::upgrade_packages(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
runner.execute(Step::Yadm, "yadm", || unix::run_yadm(&ctx))?;
|
||||
runner.execute(Step::Nix, "nix", || unix::run_nix(&ctx))?;
|
||||
runner.execute(Step::Nix, "nix upgrade-nix", || unix::run_nix_self_upgrade(&ctx))?;
|
||||
runner.execute(Step::Guix, "guix", || unix::run_guix(&ctx))?;
|
||||
|
||||
runner.execute(Step::HomeManager, "home-manager", || unix::run_home_manager(run_type))?;
|
||||
runner.execute(Step::Asdf, "asdf", || unix::run_asdf(run_type))?;
|
||||
runner.execute(Step::HomeManager, "home-manager", || unix::run_home_manager(&ctx))?;
|
||||
runner.execute(Step::Asdf, "asdf", || unix::run_asdf(&ctx))?;
|
||||
runner.execute(Step::Mise, "mise", || unix::run_mise(&ctx))?;
|
||||
runner.execute(Step::Pkgin, "pkgin", || unix::run_pkgin(&ctx))?;
|
||||
runner.execute(Step::Bun, "bun", || unix::run_bun(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "dragonfly")]
|
||||
runner.execute(Step::Pkg, "DragonFly BSD Packages", || {
|
||||
dragonfly::upgrade_packages(ctx.sudo().as_ref(), run_type)
|
||||
})?;
|
||||
|
||||
#[cfg(target_os = "freebsd")]
|
||||
runner.execute(Step::Pkg, "FreeBSD Packages", || {
|
||||
freebsd::upgrade_packages(&ctx, ctx.sudo().as_ref(), run_type)
|
||||
})?;
|
||||
|
||||
#[cfg(target_os = "openbsd")]
|
||||
runner.execute(Step::Pkg, "OpenBSD Packages", || {
|
||||
openbsd::upgrade_packages(ctx.sudo().as_ref(), run_type)
|
||||
})?;
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
runner.execute(Step::Pkg, "Termux Packages", || android::upgrade_packages(&ctx))?;
|
||||
|
||||
let emacs = emacs::Emacs::new(&base_dirs);
|
||||
if config.use_predefined_git_repos() {
|
||||
if config.should_run(Step::Emacs) {
|
||||
if !emacs.is_doom() {
|
||||
if let Some(directory) = emacs.directory() {
|
||||
git_repos.insert_if_repo(directory);
|
||||
}
|
||||
}
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".doom.d"));
|
||||
}
|
||||
|
||||
if config.should_run(Step::Vim) {
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".vim"));
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".config/nvim"));
|
||||
}
|
||||
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".ideavimrc"));
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".intellimacs"));
|
||||
|
||||
if config.should_run(Step::Rcm) {
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".dotfiles"));
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
git_repos.insert_if_repo(zsh::zshrc(&base_dirs));
|
||||
if config.should_run(Step::Tmux) {
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".tmux"));
|
||||
}
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".config/fish"));
|
||||
git_repos.insert_if_repo(base_dirs.config_dir().join("openbox"));
|
||||
git_repos.insert_if_repo(base_dirs.config_dir().join("bspwm"));
|
||||
git_repos.insert_if_repo(base_dirs.config_dir().join("i3"));
|
||||
git_repos.insert_if_repo(base_dirs.config_dir().join("sway"));
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
git_repos.insert_if_repo(
|
||||
base_dirs
|
||||
.data_local_dir()
|
||||
.join("Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState"),
|
||||
);
|
||||
|
||||
#[cfg(windows)]
|
||||
windows::insert_startup_scripts(&ctx, &mut git_repos).ok();
|
||||
|
||||
if let Some(profile) = powershell.profile() {
|
||||
git_repos.insert_if_repo(profile);
|
||||
}
|
||||
}
|
||||
|
||||
if config.should_run(Step::GitRepos) {
|
||||
if let Some(custom_git_repos) = config.git_repos() {
|
||||
for git_repo in custom_git_repos {
|
||||
git_repos.glob_insert(git_repo);
|
||||
}
|
||||
}
|
||||
runner.execute(Step::GitRepos, "Git repositories", || {
|
||||
git.multi_pull_step(&git_repos, &ctx)
|
||||
})?;
|
||||
}
|
||||
|
||||
if should_run_powershell {
|
||||
runner.execute(Step::Powershell, "Powershell Modules Update", || {
|
||||
powershell.update_modules(&ctx)
|
||||
})?;
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
runner.execute(Step::Shell, "zr", || zsh::run_zr(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Shell, "antibody", || zsh::run_antibody(run_type))?;
|
||||
runner.execute(Step::Shell, "antigen", || zsh::run_antigen(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Shell, "zgenom", || zsh::run_zgenom(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Shell, "zplug", || zsh::run_zplug(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Shell, "zinit", || zsh::run_zinit(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Shell, "zi", || zsh::run_zi(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Shell, "zim", || zsh::run_zim(&base_dirs, run_type))?;
|
||||
runner.execute(Step::BunPackages, "bun-packages", || unix::run_bun_packages(&ctx))?;
|
||||
runner.execute(Step::Shell, "zr", || zsh::run_zr(&ctx))?;
|
||||
runner.execute(Step::Shell, "antibody", || zsh::run_antibody(&ctx))?;
|
||||
runner.execute(Step::Shell, "antidote", || zsh::run_antidote(&ctx))?;
|
||||
runner.execute(Step::Shell, "antigen", || zsh::run_antigen(&ctx))?;
|
||||
runner.execute(Step::Shell, "zgenom", || zsh::run_zgenom(&ctx))?;
|
||||
runner.execute(Step::Shell, "zplug", || zsh::run_zplug(&ctx))?;
|
||||
runner.execute(Step::Shell, "zinit", || zsh::run_zinit(&ctx))?;
|
||||
runner.execute(Step::Shell, "zi", || zsh::run_zi(&ctx))?;
|
||||
runner.execute(Step::Shell, "zim", || zsh::run_zim(&ctx))?;
|
||||
runner.execute(Step::Shell, "oh-my-zsh", || zsh::run_oh_my_zsh(&ctx))?;
|
||||
runner.execute(Step::Shell, "fisher", || unix::run_fisher(run_type))?;
|
||||
runner.execute(Step::Shell, "oh-my-bash", || unix::run_oh_my_bash(&ctx))?;
|
||||
runner.execute(Step::Shell, "fisher", || unix::run_fisher(&ctx))?;
|
||||
runner.execute(Step::Shell, "bash-it", || unix::run_bashit(&ctx))?;
|
||||
runner.execute(Step::Shell, "oh-my-fish", || unix::run_oh_my_fish(&ctx))?;
|
||||
runner.execute(Step::Shell, "fish-plug", || unix::run_fish_plug(&ctx))?;
|
||||
runner.execute(Step::Shell, "fundle", || unix::run_fundle(&ctx))?;
|
||||
runner.execute(Step::Tmux, "tmux", || tmux::run_tpm(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Tldr, "TLDR", || unix::run_tldr(run_type))?;
|
||||
runner.execute(Step::Pearl, "pearl", || unix::run_pearl(run_type))?;
|
||||
runner.execute(Step::Tmux, "tmux", || tmux::run_tpm(&ctx))?;
|
||||
runner.execute(Step::Tldr, "TLDR", || unix::run_tldr(&ctx))?;
|
||||
runner.execute(Step::Pearl, "pearl", || unix::run_pearl(&ctx))?;
|
||||
#[cfg(not(any(target_os = "macos", target_os = "android")))]
|
||||
runner.execute(Step::GnomeShellExtensions, "Gnome Shell Extensions", || {
|
||||
unix::upgrade_gnome_extensions(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Sdkman, "SDKMAN!", || {
|
||||
unix::run_sdkman(&base_dirs, config.cleanup(), run_type)
|
||||
})?;
|
||||
runner.execute(Step::Pyenv, "pyenv", || unix::run_pyenv(&ctx))?;
|
||||
runner.execute(Step::Sdkman, "SDKMAN!", || unix::run_sdkman(&ctx))?;
|
||||
runner.execute(Step::Rcm, "rcm", || unix::run_rcm(&ctx))?;
|
||||
runner.execute(Step::Maza, "maza", || unix::run_maza(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(not(any(
|
||||
@@ -347,38 +338,48 @@ For more information about this issue see https://askubuntu.com/questions/110969
|
||||
target_os = "netbsd",
|
||||
target_os = "dragonfly"
|
||||
)))]
|
||||
runner.execute(Step::Atom, "apm", || generic::run_apm(run_type))?;
|
||||
runner.execute(Step::Fossil, "fossil", || generic::run_fossil(run_type))?;
|
||||
runner.execute(Step::Rustup, "rustup", || generic::run_rustup(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Juliaup, "juliaup", || generic::run_juliaup(&base_dirs, run_type))?;
|
||||
{
|
||||
runner.execute(Step::Atom, "apm", || generic::run_apm(&ctx))?;
|
||||
}
|
||||
|
||||
// The following update function should be executed on all OSes.
|
||||
runner.execute(Step::Fossil, "fossil", || generic::run_fossil(&ctx))?;
|
||||
runner.execute(Step::Elan, "elan", || generic::run_elan(&ctx))?;
|
||||
runner.execute(Step::Rye, "rye", || generic::run_rye(&ctx))?;
|
||||
runner.execute(Step::Rustup, "rustup", || generic::run_rustup(&ctx))?;
|
||||
runner.execute(Step::Juliaup, "juliaup", || generic::run_juliaup(&ctx))?;
|
||||
runner.execute(Step::Dotnet, ".NET", || generic::run_dotnet_upgrade(&ctx))?;
|
||||
runner.execute(Step::Choosenim, "choosenim", || generic::run_choosenim(&ctx))?;
|
||||
runner.execute(Step::Cargo, "cargo", || generic::run_cargo_update(&ctx))?;
|
||||
runner.execute(Step::Flutter, "Flutter", || generic::run_flutter_upgrade(run_type))?;
|
||||
runner.execute(Step::Go, "go-global-update", || go::run_go_global_update(run_type))?;
|
||||
runner.execute(Step::Go, "gup", || go::run_go_gup(run_type))?;
|
||||
runner.execute(Step::Flutter, "Flutter", || generic::run_flutter_upgrade(&ctx))?;
|
||||
runner.execute(Step::Go, "go-global-update", || go::run_go_global_update(&ctx))?;
|
||||
runner.execute(Step::Go, "gup", || go::run_go_gup(&ctx))?;
|
||||
runner.execute(Step::Emacs, "Emacs", || emacs.upgrade(&ctx))?;
|
||||
runner.execute(Step::Opam, "opam", || generic::run_opam_update(&ctx))?;
|
||||
runner.execute(Step::Vcpkg, "vcpkg", || generic::run_vcpkg_update(&ctx))?;
|
||||
runner.execute(Step::Pipx, "pipx", || generic::run_pipx_update(run_type))?;
|
||||
runner.execute(Step::Pipx, "pipx", || generic::run_pipx_update(&ctx))?;
|
||||
runner.execute(Step::Vscode, "Visual Studio Code extensions", || {
|
||||
generic::run_vscode_extensions_update(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Conda, "conda", || generic::run_conda_update(&ctx))?;
|
||||
runner.execute(Step::Pip3, "pip3", || generic::run_pip3_update(run_type))?;
|
||||
runner.execute(Step::Mamba, "mamba", || generic::run_mamba_update(&ctx))?;
|
||||
runner.execute(Step::Miktex, "miktex", || generic::run_miktex_packages_update(&ctx))?;
|
||||
runner.execute(Step::Pip3, "pip3", || generic::run_pip3_update(&ctx))?;
|
||||
runner.execute(Step::PipReview, "pip-review", || generic::run_pip_review_update(&ctx))?;
|
||||
runner.execute(Step::PipReviewLocal, "pip-review (local)", || {
|
||||
generic::run_pip_review_local_update(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Pipupgrade, "pipupgrade", || generic::run_pipupgrade_update(&ctx))?;
|
||||
runner.execute(Step::Ghcup, "ghcup", || generic::run_ghcup_update(run_type))?;
|
||||
runner.execute(Step::Stack, "stack", || generic::run_stack_update(run_type))?;
|
||||
runner.execute(Step::Ghcup, "ghcup", || generic::run_ghcup_update(&ctx))?;
|
||||
runner.execute(Step::Stack, "stack", || generic::run_stack_update(&ctx))?;
|
||||
runner.execute(Step::Tlmgr, "tlmgr", || generic::run_tlmgr_update(&ctx))?;
|
||||
runner.execute(Step::Myrepos, "myrepos", || {
|
||||
generic::run_myrepos_update(&base_dirs, run_type)
|
||||
})?;
|
||||
runner.execute(Step::Chezmoi, "chezmoi", || {
|
||||
generic::run_chezmoi_update(&base_dirs, run_type)
|
||||
})?;
|
||||
runner.execute(Step::Jetpack, "jetpack", || generic::run_jetpack(run_type))?;
|
||||
runner.execute(Step::Vim, "vim", || vim::upgrade_vim(&base_dirs, &ctx))?;
|
||||
runner.execute(Step::Vim, "Neovim", || vim::upgrade_neovim(&base_dirs, &ctx))?;
|
||||
runner.execute(Step::Myrepos, "myrepos", || generic::run_myrepos_update(&ctx))?;
|
||||
runner.execute(Step::Chezmoi, "chezmoi", || generic::run_chezmoi_update(&ctx))?;
|
||||
runner.execute(Step::Jetpack, "jetpack", || generic::run_jetpack(&ctx))?;
|
||||
runner.execute(Step::Vim, "vim", || vim::upgrade_vim(&ctx))?;
|
||||
runner.execute(Step::Vim, "Neovim", || vim::upgrade_neovim(&ctx))?;
|
||||
runner.execute(Step::Vim, "The Ultimate vimrc", || vim::upgrade_ultimate_vimrc(&ctx))?;
|
||||
runner.execute(Step::Vim, "voom", || vim::run_voom(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Vim, "voom", || vim::run_voom(&ctx))?;
|
||||
runner.execute(Step::Kakoune, "Kakoune", || kakoune::upgrade_kak_plug(&ctx))?;
|
||||
runner.execute(Step::Helix, "helix", || generic::run_helix_grammars(&ctx))?;
|
||||
runner.execute(Step::Node, "npm", || node::run_npm_upgrade(&ctx))?;
|
||||
@@ -387,37 +388,38 @@ For more information about this issue see https://askubuntu.com/questions/110969
|
||||
runner.execute(Step::Containers, "Containers", || containers::run_containers(&ctx))?;
|
||||
runner.execute(Step::Deno, "deno", || node::deno_upgrade(&ctx))?;
|
||||
runner.execute(Step::Composer, "composer", || generic::run_composer_update(&ctx))?;
|
||||
runner.execute(Step::Krew, "krew", || generic::run_krew_upgrade(run_type))?;
|
||||
runner.execute(Step::Helm, "helm", || generic::run_helm_repo_update(run_type))?;
|
||||
runner.execute(Step::Gem, "gem", || generic::run_gem(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Krew, "krew", || generic::run_krew_upgrade(&ctx))?;
|
||||
runner.execute(Step::Helm, "helm", || generic::run_helm_repo_update(&ctx))?;
|
||||
runner.execute(Step::Gem, "gem", || generic::run_gem(&ctx))?;
|
||||
runner.execute(Step::RubyGems, "rubygems", || generic::run_rubygems(&ctx))?;
|
||||
runner.execute(Step::Julia, "julia", || generic::update_julia_packages(&ctx))?;
|
||||
runner.execute(Step::Haxelib, "haxelib", || generic::run_haxelib_update(&ctx))?;
|
||||
runner.execute(Step::Sheldon, "sheldon", || generic::run_sheldon(&ctx))?;
|
||||
runner.execute(Step::Stew, "stew", || generic::run_stew(&ctx))?;
|
||||
runner.execute(Step::Rtcl, "rtcl", || generic::run_rtcl(&ctx))?;
|
||||
runner.execute(Step::Bin, "bin", || generic::bin_update(&ctx))?;
|
||||
runner.execute(Step::Gcloud, "gcloud", || {
|
||||
generic::run_gcloud_components_update(run_type)
|
||||
})?;
|
||||
runner.execute(Step::Micro, "micro", || generic::run_micro(run_type))?;
|
||||
runner.execute(Step::Raco, "raco", || generic::run_raco_update(run_type))?;
|
||||
runner.execute(Step::Gcloud, "gcloud", || generic::run_gcloud_components_update(&ctx))?;
|
||||
runner.execute(Step::Micro, "micro", || generic::run_micro(&ctx))?;
|
||||
runner.execute(Step::Raco, "raco", || generic::run_raco_update(&ctx))?;
|
||||
runner.execute(Step::Spicetify, "spicetify", || generic::spicetify_upgrade(&ctx))?;
|
||||
runner.execute(Step::GithubCliExtensions, "GitHub CLI Extensions", || {
|
||||
generic::run_ghcli_extensions_upgrade(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Bob, "Bob", || generic::run_bob(&ctx))?;
|
||||
runner.execute(Step::Certbot, "Certbot", || generic::run_certbot(&ctx))?;
|
||||
runner.execute(Step::GitRepos, "Git Repositories", || git::run_git_pull(&ctx))?;
|
||||
runner.execute(Step::ClamAvDb, "ClamAV Databases", || generic::run_freshclam(&ctx))?;
|
||||
runner.execute(Step::PlatformioCore, "PlatformIO Core", || {
|
||||
generic::run_platform_io(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Lensfun, "Lensfun's database update", || {
|
||||
generic::run_lensfun_update_data(&ctx)
|
||||
})?;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
runner.execute(Step::AM, "am", || linux::update_am(&ctx))?;
|
||||
runner.execute(Step::DebGet, "deb-get", || linux::run_deb_get(&ctx))?;
|
||||
runner.execute(Step::Toolbx, "toolbx", || toolbx::run_toolbx(&ctx))?;
|
||||
runner.execute(Step::Flatpak, "Flatpak", || linux::flatpak_update(&ctx))?;
|
||||
runner.execute(Step::Snap, "snap", || linux::run_snap(ctx.sudo().as_ref(), run_type))?;
|
||||
runner.execute(Step::Pacstall, "pacstall", || linux::run_pacstall(&ctx))?;
|
||||
runner.execute(Step::Pacdef, "pacdef", || linux::run_pacdef(&ctx))?;
|
||||
runner.execute(Step::Protonup, "protonup", || linux::run_protonup_update(&ctx))?;
|
||||
runner.execute(Step::Distrobox, "distrobox", || linux::run_distrobox_update(&ctx))?;
|
||||
runner.execute(Step::DkpPacman, "dkp-pacman", || linux::run_dkp_pacman_update(&ctx))?;
|
||||
if should_run_powershell {
|
||||
runner.execute(Step::Powershell, "Powershell Modules Update", || {
|
||||
powershell.update_modules(&ctx)
|
||||
})?;
|
||||
}
|
||||
|
||||
if let Some(commands) = config.commands() {
|
||||
@@ -430,37 +432,6 @@ For more information about this issue see https://askubuntu.com/questions/110969
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
runner.execute(Step::System, "pihole", || {
|
||||
linux::run_pihole_update(ctx.sudo().as_ref(), run_type)
|
||||
})?;
|
||||
runner.execute(Step::Firmware, "Firmware upgrades", || linux::run_fwupdmgr(&ctx))?;
|
||||
runner.execute(Step::Restarts, "Restarts", || {
|
||||
linux::run_needrestart(ctx.sudo().as_ref(), run_type)
|
||||
})?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
runner.execute(Step::Sparkle, "Sparkle", || macos::run_sparkle(&ctx))?;
|
||||
runner.execute(Step::Mas, "App Store", || macos::run_mas(run_type))?;
|
||||
runner.execute(Step::System, "System upgrade", || macos::upgrade_macos(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "freebsd")]
|
||||
runner.execute(Step::System, "FreeBSD Upgrade", || {
|
||||
freebsd::upgrade_freebsd(ctx.sudo().as_ref(), run_type)
|
||||
})?;
|
||||
|
||||
#[cfg(target_os = "openbsd")]
|
||||
runner.execute(Step::System, "OpenBSD Upgrade", || {
|
||||
openbsd::upgrade_openbsd(ctx.sudo().as_ref(), run_type)
|
||||
})?;
|
||||
|
||||
#[cfg(windows)]
|
||||
runner.execute(Step::System, "Windows update", || windows::windows_update(&ctx))?;
|
||||
|
||||
if config.should_run(Step::Vagrant) {
|
||||
if let Ok(boxes) = vagrant::collect_boxes(&ctx) {
|
||||
for vagrant_box in boxes {
|
||||
@@ -485,12 +456,6 @@ For more information about this issue see https://askubuntu.com/questions/110969
|
||||
distribution.show_summary();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "freebsd")]
|
||||
freebsd::audit_packages(ctx.sudo().as_ref()).ok();
|
||||
|
||||
#[cfg(target_os = "dragonfly")]
|
||||
dragonfly::audit_packages(ctx.sudo().as_ref()).ok();
|
||||
}
|
||||
|
||||
let mut post_command_failed = false;
|
||||
@@ -524,7 +489,7 @@ For more information about this issue see https://askubuntu.com/questions/110969
|
||||
let failed = post_command_failed || runner.report().data().iter().any(|(_, result)| result.failed());
|
||||
|
||||
if !config.skip_notify() {
|
||||
terminal::notify_desktop(
|
||||
notify_desktop(
|
||||
format!(
|
||||
"Topgrade finished {}",
|
||||
if failed { "with errors" } else { "successfully" }
|
||||
@@ -569,26 +534,3 @@ fn main() {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn install_tracing(filter_directives: &str) -> Result<()> {
|
||||
use tracing_subscriber::fmt;
|
||||
use tracing_subscriber::fmt::format::FmtSpan;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
let env_filter = EnvFilter::try_new(filter_directives)
|
||||
.or_else(|_| EnvFilter::try_from_default_env())
|
||||
.or_else(|_| EnvFilter::try_new("info"))?;
|
||||
|
||||
let fmt_layer = fmt::layer()
|
||||
.with_target(false)
|
||||
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
|
||||
.without_time();
|
||||
|
||||
let registry = tracing_subscriber::registry();
|
||||
|
||||
registry.with(env_filter).with(fmt_layer).init();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -34,6 +34,14 @@ impl<'a> Runner<'a> {
|
||||
let key = key.into();
|
||||
debug!("Step {:?}", key);
|
||||
|
||||
// alter the `func` to put it in a span
|
||||
let func = || {
|
||||
let span =
|
||||
tracing::span!(parent: tracing::Span::none(), tracing::Level::TRACE, "step", step = ?step, key = %key);
|
||||
let _guard = span.enter();
|
||||
func()
|
||||
};
|
||||
|
||||
loop {
|
||||
match func() {
|
||||
Ok(()) => {
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#![cfg(windows)]
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use std::{env::current_exe, fs, path::PathBuf};
|
||||
use tracing::{debug, error};
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::env;
|
||||
use std::os::unix::process::CommandExt as _;
|
||||
use std::process::Command;
|
||||
|
||||
use crate::config::Step;
|
||||
use color_eyre::eyre::{bail, Result};
|
||||
use self_update_crate::backends::github::Update;
|
||||
use self_update_crate::update::UpdateStatus;
|
||||
@@ -11,52 +12,61 @@ use super::terminal::*;
|
||||
#[cfg(windows)]
|
||||
use crate::error::Upgraded;
|
||||
|
||||
pub fn self_update() -> Result<()> {
|
||||
use crate::execution_context::ExecutionContext;
|
||||
|
||||
pub fn self_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator("Self update");
|
||||
let current_exe = env::current_exe();
|
||||
|
||||
let target = self_update_crate::get_target();
|
||||
let result = Update::configure()
|
||||
.repo_owner("topgrade-rs")
|
||||
.repo_name("topgrade")
|
||||
.target(target)
|
||||
.bin_name(if cfg!(windows) { "topgrade.exe" } else { "topgrade" })
|
||||
.show_output(false)
|
||||
.show_download_progress(true)
|
||||
.current_version(self_update_crate::cargo_crate_version!())
|
||||
.no_confirm(true)
|
||||
.build()?
|
||||
.update_extended()?;
|
||||
|
||||
if let UpdateStatus::Updated(release) = &result {
|
||||
println!("\nTopgrade upgraded to {}:\n", release.version);
|
||||
if let Some(body) = &release.body {
|
||||
println!("{body}");
|
||||
}
|
||||
if ctx.run_type().dry() {
|
||||
println!("Would self-update");
|
||||
Ok(())
|
||||
} else {
|
||||
println!("Topgrade is up-to-date");
|
||||
}
|
||||
let assume_yes = ctx.config().yes(Step::SelfUpdate);
|
||||
let current_exe = env::current_exe();
|
||||
|
||||
{
|
||||
if result.updated() {
|
||||
print_warning("Respawning...");
|
||||
let mut command = Command::new(current_exe?);
|
||||
command.args(env::args().skip(1)).env("TOPGRADE_NO_SELF_UPGRADE", "");
|
||||
let target = self_update_crate::get_target();
|
||||
let result = Update::configure()
|
||||
.repo_owner("topgrade-rs")
|
||||
.repo_name("topgrade")
|
||||
.target(target)
|
||||
.bin_name(if cfg!(windows) { "topgrade.exe" } else { "topgrade" })
|
||||
.show_output(true)
|
||||
.show_download_progress(true)
|
||||
.current_version(self_update_crate::cargo_crate_version!())
|
||||
.no_confirm(assume_yes)
|
||||
.build()?
|
||||
.update_extended()?;
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
let err = command.exec();
|
||||
bail!(err);
|
||||
if let UpdateStatus::Updated(release) = &result {
|
||||
println!("\nTopgrade upgraded to {}:\n", release.version);
|
||||
if let Some(body) = &release.body {
|
||||
println!("{body}");
|
||||
}
|
||||
} else {
|
||||
println!("Topgrade is up-to-date");
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let status = command.status()?;
|
||||
bail!(Upgraded(status));
|
||||
{
|
||||
if result.updated() {
|
||||
print_info("Respawning...");
|
||||
let mut command = Command::new(current_exe?);
|
||||
command.args(env::args().skip(1)).env("TOPGRADE_NO_SELF_UPGRADE", "");
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
let err = command.exec();
|
||||
bail!(err);
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let status = command.status()?;
|
||||
bail!(Upgraded(status));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
@@ -5,6 +6,7 @@ use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use tracing::{debug, error, warn};
|
||||
use wildmatch::WildMatch;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::error::{self, TopgradeError};
|
||||
@@ -18,15 +20,51 @@ use crate::{execution_context::ExecutionContext, utils::require};
|
||||
// themselves or when using docker-compose.
|
||||
const NONEXISTENT_REPO: &str = "repository does not exist";
|
||||
|
||||
/// Uniquely identifies a `Container`.
|
||||
#[derive(Debug)]
|
||||
struct Container {
|
||||
/// `Repository` and `Tag`
|
||||
///
|
||||
/// format: `Repository:Tag`, e.g., `nixos/nix:latest`.
|
||||
repo_tag: String,
|
||||
/// Platform
|
||||
///
|
||||
/// format: `OS/Architecture`, e.g., `linux/amd64`.
|
||||
platform: String,
|
||||
}
|
||||
|
||||
impl Container {
|
||||
/// Construct a new `Container`.
|
||||
fn new(repo_tag: String, platform: String) -> Self {
|
||||
Self { repo_tag, platform }
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Container {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
// e.g., "`fedora:latest` for `linux/amd64`"
|
||||
write!(f, "`{}` for `{}`", self.repo_tag, self.platform)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a Vector of all containers, with Strings in the format
|
||||
/// "REGISTRY/[PATH/]CONTAINER_NAME:TAG"
|
||||
fn list_containers(crt: &Path) -> Result<Vec<String>> {
|
||||
///
|
||||
/// Containers specified in `ignored_containers` will be filtered out.
|
||||
fn list_containers(crt: &Path, ignored_containers: Option<&Vec<String>>) -> Result<Vec<Container>> {
|
||||
let ignored_containers = ignored_containers.map(|patterns| {
|
||||
patterns
|
||||
.iter()
|
||||
.map(|pattern| WildMatch::new(pattern))
|
||||
.collect::<Vec<WildMatch>>()
|
||||
});
|
||||
|
||||
debug!(
|
||||
"Querying '{} image ls --format \"{{{{.Repository}}}}:{{{{.Tag}}}}\"' for containers",
|
||||
"Querying '{} image ls --format \"{{{{.Repository}}}}:{{{{.Tag}}}}/{{{{.ID}}}}\"' for containers",
|
||||
crt.display()
|
||||
);
|
||||
let output = Command::new(crt)
|
||||
.args(["image", "ls", "--format", "{{.Repository}}:{{.Tag}}"])
|
||||
.args(["image", "ls", "--format", "{{.Repository}}:{{.Tag}} {{.ID}}"])
|
||||
.output_checked_with_utf8(|_| Ok(()))?;
|
||||
|
||||
let mut retval = vec![];
|
||||
@@ -49,7 +87,33 @@ fn list_containers(crt: &Path) -> Result<Vec<String>> {
|
||||
}
|
||||
|
||||
debug!("Using container '{}'", line);
|
||||
retval.push(String::from(line));
|
||||
|
||||
// line is of format: `Repository:Tag ImageID`, e.g., `nixos/nix:latest d80fea9c32b4`
|
||||
let split_res = line.split(' ').collect::<Vec<&str>>();
|
||||
assert_eq!(split_res.len(), 2);
|
||||
let (repo_tag, image_id) = (split_res[0], split_res[1]);
|
||||
|
||||
if let Some(ref ignored_containers) = ignored_containers {
|
||||
if ignored_containers.iter().any(|pattern| pattern.matches(repo_tag)) {
|
||||
debug!("Skipping ignored container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
debug!(
|
||||
"Querying '{} image inspect --format \"{{{{.Os}}}}/{{{{.Architecture}}}}\"' for container {}",
|
||||
crt.display(),
|
||||
image_id
|
||||
);
|
||||
let inspect_output = Command::new(crt)
|
||||
.args(["image", "inspect", image_id, "--format", "{{.Os}}/{{.Architecture}}"])
|
||||
.output_checked_with_utf8(|_| Ok(()))?;
|
||||
let mut platform = inspect_output.stdout;
|
||||
// truncate the tailing new line character
|
||||
platform.truncate(platform.len() - 1);
|
||||
assert!(platform.contains('/'));
|
||||
|
||||
retval.push(Container::new(repo_tag.to_string(), platform));
|
||||
}
|
||||
|
||||
Ok(retval)
|
||||
@@ -62,12 +126,18 @@ pub fn run_containers(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("Containers");
|
||||
let mut success = true;
|
||||
let containers = list_containers(&crt).context("Failed to list Docker containers")?;
|
||||
let containers =
|
||||
list_containers(&crt, ctx.config().containers_ignored_tags()).context("Failed to list Docker containers")?;
|
||||
debug!("Containers to inspect: {:?}", containers);
|
||||
|
||||
for container in containers.iter() {
|
||||
debug!("Pulling container '{}'", container);
|
||||
let args = vec!["pull", &container[..]];
|
||||
let args = vec![
|
||||
"pull",
|
||||
container.repo_tag.as_str(),
|
||||
"--platform",
|
||||
container.platform.as_str(),
|
||||
];
|
||||
let mut exec = ctx.run_type().execute(&crt);
|
||||
|
||||
if let Err(e) = exec.args(&args).status_checked() {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
#[cfg(any(windows, target_os = "macos"))]
|
||||
#[cfg(windows)]
|
||||
use std::env;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use directories::BaseDirs;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
@@ -23,20 +23,12 @@ pub struct Emacs {
|
||||
}
|
||||
|
||||
impl Emacs {
|
||||
fn directory_path(base_dirs: &BaseDirs) -> Option<PathBuf> {
|
||||
fn directory_path() -> Option<PathBuf> {
|
||||
#[cfg(unix)]
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(target_os = "macos")] {
|
||||
let emacs_xdg_dir = env::var("XDG_CONFIG_HOME")
|
||||
.ok()
|
||||
.and_then(|config| PathBuf::from(config).join("emacs").if_exists())
|
||||
.or_else(|| base_dirs.home_dir().join(".config/emacs").if_exists());
|
||||
} else {
|
||||
let emacs_xdg_dir = base_dirs.config_dir().join("emacs").if_exists();
|
||||
}
|
||||
}
|
||||
#[cfg(unix)]
|
||||
return base_dirs.home_dir().join(".emacs.d").if_exists().or(emacs_xdg_dir);
|
||||
return {
|
||||
let emacs_xdg_dir = crate::XDG_DIRS.config_dir().join("emacs").if_exists();
|
||||
crate::HOME_DIR.join(".emacs.d").if_exists().or(emacs_xdg_dir)
|
||||
};
|
||||
|
||||
#[cfg(windows)]
|
||||
return env::var("HOME")
|
||||
@@ -47,11 +39,11 @@ impl Emacs {
|
||||
.if_exists()
|
||||
.or_else(|| PathBuf::from(&home).join(".config\\emacs").if_exists())
|
||||
})
|
||||
.or_else(|| base_dirs.data_dir().join(".emacs.d").if_exists());
|
||||
.or_else(|| crate::WINDOWS_DIRS.data_dir().join(".emacs.d").if_exists());
|
||||
}
|
||||
|
||||
pub fn new(base_dirs: &BaseDirs) -> Self {
|
||||
let directory = Emacs::directory_path(base_dirs);
|
||||
pub fn new() -> Self {
|
||||
let directory = Emacs::directory_path();
|
||||
let doom = directory.as_ref().and_then(|d| d.join(DOOM_PATH).if_exists());
|
||||
Self { directory, doom }
|
||||
}
|
||||
|
||||
@@ -8,26 +8,40 @@ use std::{fs, io::Write};
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use directories::BaseDirs;
|
||||
use semver::Version;
|
||||
use tempfile::tempfile_in;
|
||||
use tracing::{debug, error};
|
||||
|
||||
use crate::command::{CommandExt, Utf8Output};
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::{ExecutorOutput, RunType};
|
||||
use crate::executor::ExecutorOutput;
|
||||
use crate::terminal::{print_separator, shell};
|
||||
use crate::utils::{self, require, require_option, which, PathExt};
|
||||
use crate::utils::{self, check_is_python_2_or_shim, require, require_option, which, PathExt, REQUIRE_SUDO};
|
||||
use crate::Step;
|
||||
use crate::HOME_DIR;
|
||||
use crate::{
|
||||
error::{SkipStep, StepFailed, TopgradeError},
|
||||
terminal::print_warning,
|
||||
};
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
pub fn is_wsl() -> Result<bool> {
|
||||
let output = Command::new("uname").arg("-r").output_checked_utf8()?.stdout;
|
||||
debug!("Uname output: {}", output);
|
||||
Ok(output.contains("microsoft"))
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
pub fn is_wsl() -> Result<bool> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
pub fn run_cargo_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let cargo_dir = env::var_os("CARGO_HOME")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|| ctx.base_dirs().home_dir().join(".cargo"))
|
||||
.unwrap_or_else(|| HOME_DIR.join(".cargo"))
|
||||
.require()?;
|
||||
utils::require("cargo").or_else(|_| {
|
||||
require("cargo").or_else(|_| {
|
||||
require_option(
|
||||
cargo_dir.join("bin/cargo").if_exists(),
|
||||
String::from("No cargo detected"),
|
||||
@@ -41,7 +55,7 @@ pub fn run_cargo_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
print_separator("Cargo");
|
||||
let cargo_update = utils::require("cargo-install-update")
|
||||
let cargo_update = require("cargo-install-update")
|
||||
.ok()
|
||||
.or_else(|| cargo_dir.join("bin/cargo-install-update").if_exists());
|
||||
let cargo_update = match cargo_update {
|
||||
@@ -56,23 +70,40 @@ pub fn run_cargo_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
ctx.run_type()
|
||||
.execute(cargo_update)
|
||||
.args(["install-update", "--git", "--all"])
|
||||
.status_checked()
|
||||
.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
let cargo_cache = require("cargo-cache")
|
||||
.ok()
|
||||
.or_else(|| cargo_dir.join("bin/cargo-cache").if_exists());
|
||||
match cargo_cache {
|
||||
Some(e) => {
|
||||
ctx.run_type().execute(e).args(["-a"]).status_checked()?;
|
||||
}
|
||||
None => {
|
||||
let message = String::from("cargo-cache isn't installed so Topgrade can't cleanup cargo packages.\nInstall cargo-cache by running `cargo install cargo-cache`");
|
||||
print_warning(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_flutter_upgrade(run_type: RunType) -> Result<()> {
|
||||
let flutter = utils::require("flutter")?;
|
||||
pub fn run_flutter_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let flutter = require("flutter")?;
|
||||
|
||||
print_separator("Flutter");
|
||||
run_type.execute(flutter).arg("upgrade").status_checked()
|
||||
ctx.run_type().execute(flutter).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_gem(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
let gem = utils::require("gem")?;
|
||||
base_dirs.home_dir().join(".gem").require()?;
|
||||
pub fn run_gem(ctx: &ExecutionContext) -> Result<()> {
|
||||
let gem = require("gem")?;
|
||||
HOME_DIR.join(".gem").require()?;
|
||||
|
||||
print_separator("Gems");
|
||||
|
||||
let mut command = run_type.execute(gem);
|
||||
let mut command = ctx.run_type().execute(gem);
|
||||
command.arg("update");
|
||||
|
||||
if env::var_os("RBENV_SHELL").is_none() {
|
||||
@@ -84,27 +115,36 @@ pub fn run_gem(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn run_rubygems(ctx: &ExecutionContext) -> Result<()> {
|
||||
ctx.base_dirs().home_dir().join(".gem").require()?;
|
||||
HOME_DIR.join(".gem").require()?;
|
||||
let gem = require("gem")?;
|
||||
|
||||
print_separator("RubyGems");
|
||||
if let Some(sudo) = &ctx.sudo() {
|
||||
if !std::path::Path::new("/usr/lib/ruby/vendor_ruby/rubygems/defaults/operating_system.rb").exists() {
|
||||
let gem_path_str = gem.as_os_str();
|
||||
if gem_path_str.to_str().unwrap().contains("asdf")
|
||||
|| gem_path_str.to_str().unwrap().contains(".rbenv")
|
||||
|| gem_path_str.to_str().unwrap().contains(".rvm")
|
||||
{
|
||||
ctx.run_type()
|
||||
.execute(gem)
|
||||
.args(["update", "--system"])
|
||||
.status_checked()?;
|
||||
} else {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
if !Path::new("/usr/lib/ruby/vendor_ruby/rubygems/defaults/operating_system.rb").exists() {
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg("-EH")
|
||||
.arg(require("gem")?)
|
||||
.arg(gem)
|
||||
.args(["update", "--system"])
|
||||
.status_checked()?;
|
||||
}
|
||||
} else {
|
||||
print_warning("No sudo detected. Skipping system upgrade");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_haxelib_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let haxelib = utils::require("haxelib")?;
|
||||
let haxelib = require("haxelib")?;
|
||||
|
||||
let haxelib_dir =
|
||||
PathBuf::from(std::str::from_utf8(&Command::new(&haxelib).arg("config").output_checked()?.stdout)?.trim())
|
||||
@@ -118,9 +158,8 @@ pub fn run_haxelib_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = if directory_writable {
|
||||
ctx.run_type().execute(&haxelib)
|
||||
} else {
|
||||
let mut c = ctx
|
||||
.run_type()
|
||||
.execute(ctx.sudo().as_ref().ok_or(TopgradeError::SudoRequired)?);
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let mut c = ctx.run_type().execute(sudo);
|
||||
c.arg(&haxelib);
|
||||
c
|
||||
};
|
||||
@@ -129,7 +168,7 @@ pub fn run_haxelib_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn run_sheldon(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sheldon = utils::require("sheldon")?;
|
||||
let sheldon = require("sheldon")?;
|
||||
|
||||
print_separator("Sheldon");
|
||||
|
||||
@@ -139,20 +178,21 @@ pub fn run_sheldon(ctx: &ExecutionContext) -> Result<()> {
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_fossil(run_type: RunType) -> Result<()> {
|
||||
let fossil = utils::require("fossil")?;
|
||||
pub fn run_fossil(ctx: &ExecutionContext) -> Result<()> {
|
||||
let fossil = require("fossil")?;
|
||||
|
||||
print_separator("Fossil");
|
||||
|
||||
run_type.execute(fossil).args(["all", "sync"]).status_checked()
|
||||
ctx.run_type().execute(fossil).args(["all", "sync"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_micro(run_type: RunType) -> Result<()> {
|
||||
let micro = utils::require("micro")?;
|
||||
pub fn run_micro(ctx: &ExecutionContext) -> Result<()> {
|
||||
let micro = require("micro")?;
|
||||
|
||||
print_separator("micro");
|
||||
|
||||
let stdout = run_type
|
||||
let stdout = ctx
|
||||
.run_type()
|
||||
.execute(micro)
|
||||
.args(["-plugin", "update"])
|
||||
.output_checked_utf8()?
|
||||
@@ -172,43 +212,59 @@ pub fn run_micro(run_type: RunType) -> Result<()> {
|
||||
target_os = "netbsd",
|
||||
target_os = "dragonfly"
|
||||
)))]
|
||||
pub fn run_apm(run_type: RunType) -> Result<()> {
|
||||
let apm = utils::require("apm")?;
|
||||
pub fn run_apm(ctx: &ExecutionContext) -> Result<()> {
|
||||
let apm = require("apm")?;
|
||||
|
||||
print_separator("Atom Package Manager");
|
||||
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(apm)
|
||||
.args(["upgrade", "--confirm=false"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_rustup(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
let rustup = utils::require("rustup")?;
|
||||
pub fn run_rustup(ctx: &ExecutionContext) -> Result<()> {
|
||||
let rustup = require("rustup")?;
|
||||
|
||||
print_separator("rustup");
|
||||
|
||||
if rustup.canonicalize()?.is_descendant_of(base_dirs.home_dir()) {
|
||||
run_type.execute(&rustup).args(["self", "update"]).status_checked()?;
|
||||
}
|
||||
|
||||
run_type.execute(&rustup).arg("update").status_checked()
|
||||
ctx.run_type().execute(rustup).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_juliaup(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
let juliaup = utils::require("juliaup")?;
|
||||
pub fn run_rye(ctx: &ExecutionContext) -> Result<()> {
|
||||
let rye = require("rye")?;
|
||||
|
||||
print_separator("Rye");
|
||||
ctx.run_type().execute(rye).args(["self", "update"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_elan(ctx: &ExecutionContext) -> Result<()> {
|
||||
let elan = require("elan")?;
|
||||
|
||||
print_separator("elan");
|
||||
ctx.run_type()
|
||||
.execute(&elan)
|
||||
.args(["self", "update"])
|
||||
.status_checked()?;
|
||||
ctx.run_type().execute(&elan).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_juliaup(ctx: &ExecutionContext) -> Result<()> {
|
||||
let juliaup = require("juliaup")?;
|
||||
|
||||
print_separator("juliaup");
|
||||
|
||||
if juliaup.canonicalize()?.is_descendant_of(base_dirs.home_dir()) {
|
||||
run_type.execute(&juliaup).args(["self", "update"]).status_checked()?;
|
||||
if juliaup.canonicalize()?.is_descendant_of(&HOME_DIR) {
|
||||
ctx.run_type()
|
||||
.execute(&juliaup)
|
||||
.args(["self", "update"])
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
run_type.execute(&juliaup).arg("update").status_checked()
|
||||
ctx.run_type().execute(&juliaup).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_choosenim(ctx: &ExecutionContext) -> Result<()> {
|
||||
let choosenim = utils::require("choosenim")?;
|
||||
let choosenim = require("choosenim")?;
|
||||
|
||||
print_separator("choosenim");
|
||||
let run_type = ctx.run_type();
|
||||
@@ -217,39 +273,42 @@ pub fn run_choosenim(ctx: &ExecutionContext) -> Result<()> {
|
||||
run_type.execute(&choosenim).args(["update", "stable"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_krew_upgrade(run_type: RunType) -> Result<()> {
|
||||
let krew = utils::require("kubectl-krew")?;
|
||||
pub fn run_krew_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let krew = require("kubectl-krew")?;
|
||||
|
||||
print_separator("Krew");
|
||||
|
||||
run_type.execute(krew).args(["upgrade"]).status_checked()
|
||||
ctx.run_type().execute(krew).args(["upgrade"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_gcloud_components_update(run_type: RunType) -> Result<()> {
|
||||
let gcloud = utils::require("gcloud")?;
|
||||
pub fn run_gcloud_components_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let gcloud = require("gcloud")?;
|
||||
|
||||
if gcloud.starts_with("/snap") {
|
||||
Ok(())
|
||||
} else {
|
||||
print_separator("gcloud");
|
||||
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(gcloud)
|
||||
.args(["components", "update", "--quiet"])
|
||||
.status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_jetpack(run_type: RunType) -> Result<()> {
|
||||
let jetpack = utils::require("jetpack")?;
|
||||
pub fn run_jetpack(ctx: &ExecutionContext) -> Result<()> {
|
||||
let jetpack = require("jetpack")?;
|
||||
|
||||
print_separator("Jetpack");
|
||||
|
||||
run_type.execute(jetpack).args(["global", "update"]).status_checked()
|
||||
ctx.run_type()
|
||||
.execute(jetpack)
|
||||
.args(["global", "update"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_rtcl(ctx: &ExecutionContext) -> Result<()> {
|
||||
let rupdate = utils::require("rupdate")?;
|
||||
let rupdate = require("rupdate")?;
|
||||
|
||||
print_separator("rtcl");
|
||||
|
||||
@@ -257,12 +316,18 @@ pub fn run_rtcl(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn run_opam_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let opam = utils::require("opam")?;
|
||||
let opam = require("opam")?;
|
||||
|
||||
print_separator("OCaml Package Manager");
|
||||
|
||||
ctx.run_type().execute(&opam).arg("update").status_checked()?;
|
||||
ctx.run_type().execute(&opam).arg("upgrade").status_checked()?;
|
||||
|
||||
let mut command = ctx.run_type().execute(&opam);
|
||||
command.arg("upgrade");
|
||||
if ctx.config().yes(Step::Opam) {
|
||||
command.arg("--yes");
|
||||
}
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
ctx.run_type().execute(&opam).arg("clean").status_checked()?;
|
||||
@@ -272,7 +337,7 @@ pub fn run_opam_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn run_vcpkg_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let vcpkg = utils::require("vcpkg")?;
|
||||
let vcpkg = require("vcpkg")?;
|
||||
print_separator("vcpkg");
|
||||
|
||||
#[cfg(unix)]
|
||||
@@ -284,9 +349,8 @@ pub fn run_vcpkg_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = if is_root_install {
|
||||
ctx.run_type().execute(&vcpkg)
|
||||
} else {
|
||||
let mut c = ctx
|
||||
.run_type()
|
||||
.execute(ctx.sudo().as_ref().ok_or(TopgradeError::SudoRequired)?);
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let mut c = ctx.run_type().execute(sudo);
|
||||
c.arg(&vcpkg);
|
||||
c
|
||||
};
|
||||
@@ -294,17 +358,63 @@ pub fn run_vcpkg_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
command.args(["upgrade", "--no-dry-run"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_pipx_update(run_type: RunType) -> Result<()> {
|
||||
let pipx = utils::require("pipx")?;
|
||||
pub fn run_vscode_extensions_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
// Calling vscode in WSL may install a server instead of updating extensions (https://github.com/topgrade-rs/topgrade/issues/594#issuecomment-1782157367)
|
||||
if is_wsl()? {
|
||||
return Err(SkipStep(String::from("Should not run in WSL")).into());
|
||||
}
|
||||
|
||||
let vscode = require("code")?;
|
||||
|
||||
// Vscode has update command only since 1.86 version ("january 2024" update), disable the update for prior versions
|
||||
// Use command `code --version` which returns 3 lines: version, git commit, instruction set. We parse only the first one
|
||||
let version: Result<Version> = match Command::new(&vscode)
|
||||
.arg("--version")
|
||||
.output_checked_utf8()?
|
||||
.stdout
|
||||
.lines()
|
||||
.next()
|
||||
{
|
||||
Some(item) => Version::parse(item).map_err(|err| err.into()),
|
||||
_ => return Err(SkipStep(String::from("Cannot find vscode version")).into()),
|
||||
};
|
||||
|
||||
if !matches!(version, Ok(version) if version >= Version::new(1, 86, 0)) {
|
||||
return Err(SkipStep(String::from("Too old vscode version to have update extensions command")).into());
|
||||
}
|
||||
|
||||
print_separator("Visual Studio Code extensions");
|
||||
|
||||
ctx.run_type()
|
||||
.execute(vscode)
|
||||
.arg("--update-extensions")
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_pipx_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pipx = require("pipx")?;
|
||||
print_separator("pipx");
|
||||
|
||||
run_type.execute(pipx).arg("upgrade-all").status_checked()
|
||||
let mut command_args = vec!["upgrade-all", "--include-injected"];
|
||||
|
||||
// pipx version 1.4.0 introduced a new command argument `pipx upgrade-all --quiet`
|
||||
// (see https://pipx.pypa.io/stable/docs/#pipx-upgrade-all)
|
||||
let version_str = Command::new(&pipx)
|
||||
.args(["--version"])
|
||||
.output_checked_utf8()
|
||||
.map(|s| s.stdout.trim().to_owned());
|
||||
let version = Version::parse(&version_str?);
|
||||
if matches!(version, Ok(version) if version >= Version::new(1, 4, 0)) {
|
||||
command_args.push("--quiet")
|
||||
}
|
||||
|
||||
ctx.run_type().execute(pipx).args(command_args).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_conda_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let conda = utils::require("conda")?;
|
||||
let conda = require("conda")?;
|
||||
|
||||
let output = Command::new("conda")
|
||||
let output = Command::new(&conda)
|
||||
.args(["config", "--show", "auto_activate_base"])
|
||||
.output_checked_utf8()?;
|
||||
debug!("Conda output: {}", output.stdout);
|
||||
@@ -314,26 +424,118 @@ pub fn run_conda_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("Conda");
|
||||
|
||||
let mut command = ctx.run_type().execute(conda);
|
||||
command.args(["update", "--all", "-n", "base"]);
|
||||
if ctx.config().yes(Step::Conda) {
|
||||
command.arg("--yes");
|
||||
}
|
||||
command.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_mamba_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mamba = require("mamba")?;
|
||||
|
||||
let output = Command::new(&mamba)
|
||||
.args(["config", "--show", "auto_activate_base"])
|
||||
.output_checked_utf8()?;
|
||||
debug!("Mamba output: {}", output.stdout);
|
||||
if output.stdout.contains("False") {
|
||||
return Err(SkipStep("auto_activate_base is set to False".to_string()).into());
|
||||
}
|
||||
|
||||
print_separator("Mamba");
|
||||
|
||||
let mut command = ctx.run_type().execute(mamba);
|
||||
command.args(["update", "--all", "-n", "base"]);
|
||||
if ctx.config().yes(Step::Mamba) {
|
||||
command.arg("--yes");
|
||||
}
|
||||
command.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_miktex_packages_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let miktex = require("miktex")?;
|
||||
print_separator("miktex");
|
||||
|
||||
ctx.run_type()
|
||||
.execute(conda)
|
||||
.args(["update", "--all", "-y"])
|
||||
.execute(miktex)
|
||||
.args(["packages", "update"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_pip3_update(run_type: RunType) -> Result<()> {
|
||||
let python3 = utils::require("python3")?;
|
||||
pub fn run_pip3_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let py = require("python").and_then(check_is_python_2_or_shim);
|
||||
let py3 = require("python3").and_then(check_is_python_2_or_shim);
|
||||
|
||||
let python3 = match (py, py3) {
|
||||
// prefer `python` if it is available and is a valid Python 3.
|
||||
(Ok(py), _) => py,
|
||||
(Err(_), Ok(py3)) => py3,
|
||||
(Err(py_err), Err(py3_err)) => {
|
||||
return Err(SkipStep(format!("Skip due to following reasons: {} {}", py_err, py3_err)).into());
|
||||
}
|
||||
};
|
||||
|
||||
Command::new(&python3)
|
||||
.args(["-m", "pip"])
|
||||
.output_checked_utf8()
|
||||
.map_err(|_| SkipStep("pip does not exists".to_string()))?;
|
||||
.map_err(|_| SkipStep("pip does not exist".to_string()))?;
|
||||
|
||||
let check_extern_managed_script = "import sysconfig; from os import path; print('Y') if path.isfile(path.join(sysconfig.get_path('stdlib'), 'EXTERNALLY-MANAGED')) else print('N')";
|
||||
let output = Command::new(&python3)
|
||||
.args(["-c", check_extern_managed_script])
|
||||
.output_checked_utf8()?;
|
||||
let stdout = output.stdout.trim();
|
||||
let extern_managed = match stdout {
|
||||
"N" => false,
|
||||
"Y" => true,
|
||||
_ => unreachable!("unexpected output from `check_extern_managed_script`"),
|
||||
};
|
||||
|
||||
let allow_break_sys_pkg = match Command::new(&python3)
|
||||
.args(["-m", "pip", "config", "get", "global.break-system-packages"])
|
||||
.output_checked_utf8()
|
||||
{
|
||||
Ok(output) => {
|
||||
let stdout = output.stdout.trim();
|
||||
stdout
|
||||
.parse::<bool>()
|
||||
.expect("unexpected output that is not `true` or `false`")
|
||||
}
|
||||
// it can fail because this key may not be set
|
||||
//
|
||||
// ```sh
|
||||
// $ pip --version
|
||||
// pip 23.0.1 from /usr/lib/python3/dist-packages/pip (python 3.11)
|
||||
//
|
||||
// $ pip config get global.break-system-packages
|
||||
// ERROR: No such key - global.break-system-packages
|
||||
//
|
||||
// $ echo $?
|
||||
// 1
|
||||
// ```
|
||||
Err(_) => false,
|
||||
};
|
||||
|
||||
debug!("pip3 externally managed: {} ", extern_managed);
|
||||
debug!("pip3 global.break-system-packages: {}", allow_break_sys_pkg);
|
||||
|
||||
// Even though pip3 is externally managed, we should still update it if
|
||||
// `global.break-system-packages` is true.
|
||||
if extern_managed && !allow_break_sys_pkg {
|
||||
return Err(SkipStep(
|
||||
"Skip pip3 update as it is externally managed and global.break-system-packages is not true".to_string(),
|
||||
)
|
||||
.into());
|
||||
}
|
||||
|
||||
print_separator("pip3");
|
||||
if std::env::var("VIRTUAL_ENV").is_ok() {
|
||||
print_warning("This step is will be skipped when running inside a virtual environment");
|
||||
if env::var("VIRTUAL_ENV").is_ok() {
|
||||
print_warning("This step is skipped when running inside a virtual environment");
|
||||
return Err(SkipStep("Does not run inside a virtual environment".to_string()).into());
|
||||
}
|
||||
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(&python3)
|
||||
.args(["-m", "pip", "install", "--upgrade", "--user", "pip"])
|
||||
.status_checked()
|
||||
@@ -357,40 +559,64 @@ pub fn run_pip_review_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_pip_review_local_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pip_review = require("pip-review")?;
|
||||
|
||||
print_separator("pip-review (local)");
|
||||
|
||||
if !ctx.config().enable_pip_review_local() {
|
||||
print_warning(
|
||||
"Pip-review (local) is disabled by default. Enable it by setting enable_pip_review_local=true in the configuration.",
|
||||
);
|
||||
return Err(SkipStep(String::from("Pip-review (local) is disabled by default")).into());
|
||||
}
|
||||
ctx.run_type()
|
||||
.execute(pip_review)
|
||||
.arg("--local")
|
||||
.arg("--auto")
|
||||
.status_checked_with_codes(&[1])?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_pipupgrade_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pipupgrade = require("pipupgrade")?;
|
||||
|
||||
print_separator("Pipupgrade");
|
||||
if !ctx.config().enable_pip_review() {
|
||||
if !ctx.config().enable_pipupgrade() {
|
||||
print_warning(
|
||||
"Pipupgrade is disabled by default. Enable it by setting enable_pipupgrade=true in the configuration.",
|
||||
);
|
||||
return Err(SkipStep(String::from("Pipupgrade is disabled by default")).into());
|
||||
}
|
||||
ctx.run_type().execute(pipupgrade).status_checked()?;
|
||||
ctx.run_type()
|
||||
.execute(pipupgrade)
|
||||
.args(ctx.config().pipupgrade_arguments().split_whitespace())
|
||||
.status_checked()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_stack_update(run_type: RunType) -> Result<()> {
|
||||
if utils::require("ghcup").is_ok() {
|
||||
pub fn run_stack_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
if require("ghcup").is_ok() {
|
||||
// `ghcup` is present and probably(?) being used to install `stack`.
|
||||
// Don't upgrade `stack`, let `ghcup` handle it. Per `ghcup install stack`:
|
||||
// !!! Additionally, you should upgrade stack only through ghcup and not use 'stack upgrade' !!!
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let stack = utils::require("stack")?;
|
||||
let stack = require("stack")?;
|
||||
print_separator("stack");
|
||||
|
||||
run_type.execute(stack).arg("upgrade").status_checked()
|
||||
ctx.run_type().execute(stack).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_ghcup_update(run_type: RunType) -> Result<()> {
|
||||
let ghcup = utils::require("ghcup")?;
|
||||
pub fn run_ghcup_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let ghcup = require("ghcup")?;
|
||||
print_separator("ghcup");
|
||||
|
||||
run_type.execute(ghcup).arg("upgrade").status_checked()
|
||||
ctx.run_type().execute(ghcup).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_tlmgr_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
@@ -402,8 +628,8 @@ pub fn run_tlmgr_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
let tlmgr = utils::require("tlmgr")?;
|
||||
let kpsewhich = utils::require("kpsewhich")?;
|
||||
let tlmgr = require("tlmgr")?;
|
||||
let kpsewhich = require("kpsewhich")?;
|
||||
let tlmgr_directory = {
|
||||
let mut d = PathBuf::from(
|
||||
&Command::new(kpsewhich)
|
||||
@@ -425,9 +651,8 @@ pub fn run_tlmgr_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = if directory_writable {
|
||||
ctx.run_type().execute(&tlmgr)
|
||||
} else {
|
||||
let mut c = ctx
|
||||
.run_type()
|
||||
.execute(ctx.sudo().as_ref().ok_or(TopgradeError::SudoRequired)?);
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let mut c = ctx.run_type().execute(sudo);
|
||||
c.arg(&tlmgr);
|
||||
c
|
||||
};
|
||||
@@ -436,42 +661,50 @@ pub fn run_tlmgr_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
command.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_chezmoi_update(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
let chezmoi = utils::require("chezmoi")?;
|
||||
base_dirs.home_dir().join(".local/share/chezmoi").require()?;
|
||||
pub fn run_chezmoi_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let chezmoi = require("chezmoi")?;
|
||||
HOME_DIR.join(".local/share/chezmoi").require()?;
|
||||
|
||||
print_separator("chezmoi");
|
||||
|
||||
run_type.execute(chezmoi).arg("update").status_checked()
|
||||
ctx.run_type().execute(chezmoi).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_myrepos_update(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
let myrepos = utils::require("mr")?;
|
||||
base_dirs.home_dir().join(".mrconfig").require()?;
|
||||
pub fn run_myrepos_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let myrepos = require("mr")?;
|
||||
HOME_DIR.join(".mrconfig").require()?;
|
||||
|
||||
print_separator("myrepos");
|
||||
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(&myrepos)
|
||||
.arg("--directory")
|
||||
.arg(base_dirs.home_dir())
|
||||
.arg(&*HOME_DIR)
|
||||
.arg("checkout")
|
||||
.status_checked()?;
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(&myrepos)
|
||||
.arg("--directory")
|
||||
.arg(base_dirs.home_dir())
|
||||
.arg(&*HOME_DIR)
|
||||
.arg("update")
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_custom_command(name: &str, command: &str, ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator(name);
|
||||
ctx.run_type().execute(shell()).arg("-c").arg(command).status_checked()
|
||||
let mut exec = ctx.run_type().execute(shell());
|
||||
#[cfg(unix)]
|
||||
let command = if let Some(command) = command.strip_prefix("-i ") {
|
||||
exec.arg("-i");
|
||||
command
|
||||
} else {
|
||||
command
|
||||
};
|
||||
exec.arg("-c").arg(command).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_composer_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let composer = utils::require("composer")?;
|
||||
let composer = require("composer")?;
|
||||
let composer_home = Command::new(&composer)
|
||||
.args(["global", "config", "--absolute", "--quiet", "home"])
|
||||
.output_checked_utf8()
|
||||
@@ -479,9 +712,9 @@ pub fn run_composer_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
.map(|s| PathBuf::from(s.stdout.trim()))?
|
||||
.require()?;
|
||||
|
||||
if !composer_home.is_descendant_of(ctx.base_dirs().home_dir()) {
|
||||
if !composer_home.is_descendant_of(&HOME_DIR) {
|
||||
return Err(SkipStep(format!(
|
||||
"Composer directory {} isn't a decandent of the user's home directory",
|
||||
"Composer directory {} isn't a descendant of the user's home directory",
|
||||
composer_home.display()
|
||||
))
|
||||
.into());
|
||||
@@ -499,8 +732,9 @@ pub fn run_composer_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
};
|
||||
|
||||
if has_update {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
ctx.run_type()
|
||||
.execute(ctx.sudo().as_ref().unwrap())
|
||||
.execute(sudo)
|
||||
.arg(&composer)
|
||||
.arg("self-update")
|
||||
.status_checked()?;
|
||||
@@ -516,7 +750,7 @@ pub fn run_composer_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let output: Utf8Output = output.try_into()?;
|
||||
print!("{}\n{}", output.stdout, output.stderr);
|
||||
if output.stdout.contains("valet") || output.stderr.contains("valet") {
|
||||
if let Some(valet) = utils::which("valet") {
|
||||
if let Some(valet) = which("valet") {
|
||||
ctx.run_type().execute(valet).arg("install").status_checked()?;
|
||||
}
|
||||
}
|
||||
@@ -526,13 +760,18 @@ pub fn run_composer_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn run_dotnet_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let dotnet = utils::require("dotnet")?;
|
||||
let dotnet = require("dotnet")?;
|
||||
|
||||
//Skip when the `dotnet tool list` subcommand fails. (This is expected when a dotnet runtime is installed but no SDK.)
|
||||
// Skip when the `dotnet tool list` subcommand fails.
|
||||
// (This is expected when a dotnet runtime is installed but no SDK.)
|
||||
let output = match ctx
|
||||
.run_type()
|
||||
.execute(&dotnet)
|
||||
.args(["tool", "list", "--global"])
|
||||
// dotnet will print a greeting message on its first run, from this question:
|
||||
// https://stackoverflow.com/q/70493706/14092446
|
||||
// Setting `DOTNET_NOLOGO` to `true` should disable it
|
||||
.env("DOTNET_NOLOGO", "true")
|
||||
.output_checked_utf8()
|
||||
{
|
||||
Ok(output) => output,
|
||||
@@ -540,15 +779,31 @@ pub fn run_dotnet_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
return Err(SkipStep(String::from(
|
||||
"Error running `dotnet tool list`. This is expected when a dotnet runtime is installed but no SDK.",
|
||||
))
|
||||
.into())
|
||||
.into());
|
||||
}
|
||||
};
|
||||
|
||||
if !output.stdout.starts_with("Package Id") {
|
||||
return Err(SkipStep(String::from("dotnet did not output packages")).into());
|
||||
}
|
||||
|
||||
let mut packages = output.stdout.lines().skip(2).filter(|line| !line.is_empty()).peekable();
|
||||
let mut in_header = true;
|
||||
let mut packages = output
|
||||
.stdout
|
||||
.lines()
|
||||
// Skip the header:
|
||||
//
|
||||
// Package Id Version Commands
|
||||
// -------------------------------------
|
||||
.skip_while(|line| {
|
||||
// The .NET SDK respects locale, so the header can be printed
|
||||
// in languages other than English. The separator should hopefully
|
||||
// always be at least 10 -'s long.
|
||||
if in_header && line.starts_with("----------") {
|
||||
in_header = false;
|
||||
true
|
||||
} else {
|
||||
in_header
|
||||
}
|
||||
})
|
||||
.filter(|line| !line.is_empty())
|
||||
.peekable();
|
||||
|
||||
if packages.peek().is_none() {
|
||||
return Err(SkipStep(String::from("No dotnet global tools installed")).into());
|
||||
@@ -569,49 +824,53 @@ pub fn run_dotnet_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn run_helix_grammars(ctx: &ExecutionContext) -> Result<()> {
|
||||
utils::require("helix")?;
|
||||
let helix = require("helix").or(require("hx"))?;
|
||||
|
||||
print_separator("Helix");
|
||||
|
||||
ctx.run_type()
|
||||
.execute(ctx.sudo().as_ref().ok_or(TopgradeError::SudoRequired)?)
|
||||
.args(["helix", "--grammar", "fetch"])
|
||||
.execute(&helix)
|
||||
.args(["--grammar", "fetch"])
|
||||
.status_checked()
|
||||
.with_context(|| "Failed to download helix grammars!")?;
|
||||
|
||||
ctx.run_type()
|
||||
.execute(ctx.sudo().as_ref().ok_or(TopgradeError::SudoRequired)?)
|
||||
.args(["helix", "--grammar", "build"])
|
||||
.execute(&helix)
|
||||
.args(["--grammar", "build"])
|
||||
.status_checked()
|
||||
.with_context(|| "Failed to build helix grammars!")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_raco_update(run_type: RunType) -> Result<()> {
|
||||
let raco = utils::require("raco")?;
|
||||
pub fn run_raco_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let raco = require("raco")?;
|
||||
|
||||
print_separator("Racket Package Manager");
|
||||
|
||||
run_type.execute(raco).args(["pkg", "update", "--all"]).status_checked()
|
||||
ctx.run_type()
|
||||
.execute(raco)
|
||||
.args(["pkg", "update", "--all"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn bin_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let bin = utils::require("bin")?;
|
||||
let bin = require("bin")?;
|
||||
|
||||
print_separator("Bin");
|
||||
ctx.run_type().execute(bin).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn spicetify_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let spicetify = utils::require("spicetify")?;
|
||||
// As of 04-07-2023 NixOS packages Spicetify with the `spicetify-cli` binary name
|
||||
let spicetify = require("spicetify").or(require("spicetify-cli"))?;
|
||||
|
||||
print_separator("Spicetify");
|
||||
ctx.run_type().execute(spicetify).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_ghcli_extensions_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let gh = utils::require("gh")?;
|
||||
let gh = require("gh")?;
|
||||
let result = Command::new(&gh).args(["extensions", "list"]).output_checked_utf8();
|
||||
if result.is_err() {
|
||||
debug!("GH result {:?}", result);
|
||||
@@ -626,7 +885,7 @@ pub fn run_ghcli_extensions_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn update_julia_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let julia = utils::require("julia")?;
|
||||
let julia = require("julia")?;
|
||||
|
||||
print_separator("Julia Packages");
|
||||
|
||||
@@ -636,14 +895,14 @@ pub fn update_julia_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_helm_repo_update(run_type: RunType) -> Result<()> {
|
||||
let helm = utils::require("helm")?;
|
||||
pub fn run_helm_repo_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let helm = require("helm")?;
|
||||
|
||||
print_separator("Helm");
|
||||
|
||||
let no_repo = "no repositories found";
|
||||
let mut success = true;
|
||||
let mut exec = run_type.execute(helm);
|
||||
let mut exec = ctx.run_type().execute(helm);
|
||||
if let Err(e) = exec.arg("repo").arg("update").status_checked() {
|
||||
error!("Updating repositories failed: {}", e);
|
||||
success = match exec.output_checked_utf8() {
|
||||
@@ -661,3 +920,85 @@ pub fn run_helm_repo_update(run_type: RunType) -> Result<()> {
|
||||
Err(eyre!(StepFailed))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_stew(ctx: &ExecutionContext) -> Result<()> {
|
||||
let stew = require("stew")?;
|
||||
|
||||
print_separator("stew");
|
||||
ctx.run_type().execute(stew).args(["upgrade", "--all"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_bob(ctx: &ExecutionContext) -> Result<()> {
|
||||
let bob = require("bob")?;
|
||||
|
||||
print_separator("Bob");
|
||||
|
||||
ctx.run_type().execute(bob).args(["update", "--all"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_certbot(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
let certbot = require("certbot")?;
|
||||
|
||||
print_separator("Certbot");
|
||||
|
||||
let mut cmd = ctx.run_type().execute(sudo);
|
||||
cmd.arg(certbot);
|
||||
cmd.arg("renew");
|
||||
|
||||
cmd.status_checked()
|
||||
}
|
||||
|
||||
/// Run `$ freshclam` to update ClamAV signature database
|
||||
///
|
||||
/// doc: https://docs.clamav.net/manual/Usage/SignatureManagement.html#freshclam
|
||||
pub fn run_freshclam(ctx: &ExecutionContext) -> Result<()> {
|
||||
let freshclam = require("freshclam")?;
|
||||
print_separator("Update ClamAV Database(FreshClam)");
|
||||
ctx.run_type().execute(freshclam).status_checked()
|
||||
}
|
||||
|
||||
/// Involve `pio upgrade` to update PlatformIO core.
|
||||
pub fn run_platform_io(ctx: &ExecutionContext) -> Result<()> {
|
||||
// We use the full path because by default the binary is not in `PATH`:
|
||||
// https://github.com/topgrade-rs/topgrade/issues/754#issuecomment-2020537559
|
||||
#[cfg(unix)]
|
||||
fn bin_path() -> PathBuf {
|
||||
HOME_DIR.join(".platformio/penv/bin/pio")
|
||||
}
|
||||
#[cfg(windows)]
|
||||
fn bin_path() -> PathBuf {
|
||||
HOME_DIR.join(".platformio/penv/Scripts/pio.exe")
|
||||
}
|
||||
|
||||
let bin_path = require(bin_path())?;
|
||||
|
||||
print_separator("PlatformIO Core");
|
||||
|
||||
ctx.run_type().execute(bin_path).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
/// Run `lensfun-update-data` to update lensfun database.
|
||||
///
|
||||
/// `sudo` will be used if `use_sudo` configuration entry is set to true.
|
||||
pub fn run_lensfun_update_data(ctx: &ExecutionContext) -> Result<()> {
|
||||
const SEPARATOR: &str = "Lensfun's database update";
|
||||
let lensfun_update_data = require("lensfun-update-data")?;
|
||||
const EXIT_CODE_WHEN_NO_UPDATE: i32 = 1;
|
||||
|
||||
if ctx.config().lensfun_use_sudo() {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator(SEPARATOR);
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg(lensfun_update_data)
|
||||
// `lensfun-update-data` returns 1 when there is no update available
|
||||
// which should be considered success
|
||||
.status_checked_with_codes(&[EXIT_CODE_WHEN_NO_UPDATE])
|
||||
} else {
|
||||
print_separator(SEPARATOR);
|
||||
ctx.run_type()
|
||||
.execute(lensfun_update_data)
|
||||
.status_checked_with_codes(&[EXIT_CODE_WHEN_NO_UPDATE])
|
||||
}
|
||||
}
|
||||
|
||||
465
src/steps/git.rs
465
src/steps/git.rs
@@ -3,139 +3,173 @@ use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Output, Stdio};
|
||||
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::{eyre, Result};
|
||||
use console::style;
|
||||
use futures::stream::{iter, FuturesUnordered};
|
||||
use futures::StreamExt;
|
||||
use futures::stream::{iter, FuturesUnordered, StreamExt};
|
||||
use glob::{glob_with, MatchOptions};
|
||||
use tokio::process::Command as AsyncCommand;
|
||||
use tokio::runtime;
|
||||
use tracing::{debug, error};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::config::Step;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::RunType;
|
||||
use crate::steps::emacs::Emacs;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{which, PathExt};
|
||||
use crate::{error::SkipStep, terminal::print_warning};
|
||||
use crate::utils::{require, PathExt};
|
||||
use crate::{error::SkipStep, terminal::print_warning, HOME_DIR};
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
|
||||
#[cfg(unix)]
|
||||
use crate::XDG_DIRS;
|
||||
|
||||
#[cfg(windows)]
|
||||
use crate::WINDOWS_DIRS;
|
||||
|
||||
pub fn run_git_pull(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut repos = RepoStep::try_new()?;
|
||||
let config = ctx.config();
|
||||
|
||||
// handle built-in repos
|
||||
if config.use_predefined_git_repos() {
|
||||
// should be executed on all the platforms
|
||||
{
|
||||
if config.should_run(Step::Emacs) {
|
||||
let emacs = Emacs::new();
|
||||
if !emacs.is_doom() {
|
||||
if let Some(directory) = emacs.directory() {
|
||||
repos.insert_if_repo(directory);
|
||||
}
|
||||
}
|
||||
repos.insert_if_repo(HOME_DIR.join(".doom.d"));
|
||||
}
|
||||
|
||||
if config.should_run(Step::Vim) {
|
||||
repos.insert_if_repo(HOME_DIR.join(".vim"));
|
||||
repos.insert_if_repo(HOME_DIR.join(".config/nvim"));
|
||||
}
|
||||
|
||||
repos.insert_if_repo(HOME_DIR.join(".ideavimrc"));
|
||||
repos.insert_if_repo(HOME_DIR.join(".intellimacs"));
|
||||
|
||||
if config.should_run(Step::Rcm) {
|
||||
repos.insert_if_repo(HOME_DIR.join(".dotfiles"));
|
||||
}
|
||||
|
||||
let powershell = crate::steps::powershell::Powershell::new();
|
||||
if let Some(profile) = powershell.profile() {
|
||||
repos.insert_if_repo(profile);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
repos.insert_if_repo(crate::steps::zsh::zshrc());
|
||||
if config.should_run(Step::Tmux) {
|
||||
repos.insert_if_repo(HOME_DIR.join(".tmux"));
|
||||
}
|
||||
repos.insert_if_repo(HOME_DIR.join(".config/fish"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("openbox"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("bspwm"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("i3"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("sway"));
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
repos.insert_if_repo(
|
||||
WINDOWS_DIRS
|
||||
.cache_dir()
|
||||
.join("Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState"),
|
||||
);
|
||||
|
||||
super::os::windows::insert_startup_scripts(&mut repos).ok();
|
||||
}
|
||||
}
|
||||
|
||||
// Handle user-defined repos
|
||||
if let Some(custom_git_repos) = config.git_repos() {
|
||||
for git_repo in custom_git_repos {
|
||||
repos.glob_insert(git_repo);
|
||||
}
|
||||
}
|
||||
|
||||
// Warn the user about the bad patterns.
|
||||
//
|
||||
// NOTE: this should be executed **before** skipping the Git step or the
|
||||
// user won't receive this warning in the cases where all the paths configured
|
||||
// are bad patterns.
|
||||
repos
|
||||
.bad_patterns
|
||||
.iter()
|
||||
.for_each(|pattern| print_warning(format!("Path {pattern} did not contain any git repositories")));
|
||||
|
||||
if repos.is_repos_empty() {
|
||||
return Err(SkipStep(String::from("No repositories to pull")).into());
|
||||
}
|
||||
|
||||
print_separator("Git repositories");
|
||||
|
||||
repos.pull_repos(ctx)
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
static PATH_PREFIX: &str = "\\\\?\\";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Git {
|
||||
git: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub struct Repositories<'a> {
|
||||
git: &'a Git,
|
||||
repositories: HashSet<String>,
|
||||
pub struct RepoStep {
|
||||
git: PathBuf,
|
||||
repos: HashSet<PathBuf>,
|
||||
glob_match_options: MatchOptions,
|
||||
bad_patterns: Vec<String>,
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn output_checked_utf8(output: Output) -> Result<()> {
|
||||
if !(output.status.success()) {
|
||||
let stderr = String::from_utf8(output.stderr).unwrap();
|
||||
Err(eyre!(stderr))
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stderr = stderr.trim();
|
||||
Err(eyre!("{stderr}"))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn pull_repository(repo: String, git: &Path, ctx: &ExecutionContext<'_>) -> Result<()> {
|
||||
let path = repo.to_string();
|
||||
let before_revision = get_head_revision(git, &repo);
|
||||
|
||||
println!("{} {}", style("Pulling").cyan().bold(), path);
|
||||
|
||||
let mut command = AsyncCommand::new(git);
|
||||
|
||||
command
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args(["pull", "--ff-only"]);
|
||||
|
||||
if let Some(extra_arguments) = ctx.config().git_arguments() {
|
||||
command.args(extra_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let pull_output = command.output().await?;
|
||||
let submodule_output = AsyncCommand::new(git)
|
||||
.args(["submodule", "update", "--recursive"])
|
||||
.current_dir(&repo)
|
||||
.stdin(Stdio::null())
|
||||
.output()
|
||||
.await?;
|
||||
let result = output_checked_utf8(pull_output).and_then(|_| output_checked_utf8(submodule_output));
|
||||
|
||||
if let Err(message) = &result {
|
||||
println!("{} pulling {}", style("Failed").red().bold(), &repo);
|
||||
print!("{message}");
|
||||
} else {
|
||||
let after_revision = get_head_revision(git, &repo);
|
||||
|
||||
match (&before_revision, &after_revision) {
|
||||
(Some(before), Some(after)) if before != after => {
|
||||
println!("{} {}:", style("Changed").yellow().bold(), &repo);
|
||||
|
||||
Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args([
|
||||
"--no-pager",
|
||||
"log",
|
||||
"--no-decorate",
|
||||
"--oneline",
|
||||
&format!("{before}..{after}"),
|
||||
])
|
||||
.status_checked()?;
|
||||
println!();
|
||||
}
|
||||
_ => {
|
||||
println!("{} {}", style("Up-to-date").green().bold(), &repo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result.map(|_| ())
|
||||
}
|
||||
|
||||
fn get_head_revision(git: &Path, repo: &str) -> Option<String> {
|
||||
fn get_head_revision<P: AsRef<Path>>(git: &Path, repo: P) -> Option<String> {
|
||||
Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(repo)
|
||||
.current_dir(repo.as_ref())
|
||||
.args(["rev-parse", "HEAD"])
|
||||
.output_checked_utf8()
|
||||
.map(|output| output.stdout.trim().to_string())
|
||||
.map_err(|e| {
|
||||
error!("Error getting revision for {}: {}", repo, e);
|
||||
error!("Error getting revision for {}: {}", repo.as_ref().display(), e);
|
||||
|
||||
e
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
|
||||
fn has_remotes(git: &Path, repo: &str) -> Option<bool> {
|
||||
Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(repo)
|
||||
.args(["remote", "show"])
|
||||
.output_checked_utf8()
|
||||
.map(|output| output.stdout.lines().count() > 0)
|
||||
.map_err(|e| {
|
||||
error!("Error getting remotes for {}: {}", repo, e);
|
||||
e
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
impl RepoStep {
|
||||
/// Try to create a `RepoStep`, fail if `git` is not found.
|
||||
pub fn try_new() -> Result<Self> {
|
||||
let git = require("git")?;
|
||||
let mut glob_match_options = MatchOptions::new();
|
||||
|
||||
impl Git {
|
||||
pub fn new() -> Self {
|
||||
Self { git: which("git") }
|
||||
if cfg!(windows) {
|
||||
glob_match_options.case_sensitive = false;
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
git,
|
||||
repos: HashSet::new(),
|
||||
bad_patterns: Vec::new(),
|
||||
glob_match_options,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_repo_root<P: AsRef<Path>>(&self, path: P) -> Option<String> {
|
||||
/// Try to get the root of the repo specified in `path`.
|
||||
pub fn get_repo_root<P: AsRef<Path>>(&self, path: P) -> Option<PathBuf> {
|
||||
match path.as_ref().canonicalize() {
|
||||
Ok(mut path) => {
|
||||
debug_assert!(path.exists());
|
||||
@@ -159,105 +193,56 @@ impl Git {
|
||||
path_string
|
||||
};
|
||||
|
||||
if let Some(git) = &self.git {
|
||||
let output = Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(path)
|
||||
.args(["rev-parse", "--show-toplevel"])
|
||||
.output_checked_utf8()
|
||||
.ok()
|
||||
.map(|output| output.stdout.trim().to_string());
|
||||
return output;
|
||||
}
|
||||
let output = Command::new(&self.git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(path)
|
||||
.args(["rev-parse", "--show-toplevel"])
|
||||
.output_checked_utf8()
|
||||
.ok()
|
||||
// trim the last newline char
|
||||
.map(|output| PathBuf::from(output.stdout.trim()));
|
||||
|
||||
return output;
|
||||
}
|
||||
Err(e) => match e.kind() {
|
||||
io::ErrorKind::NotFound => debug!("{} does not exists", path.as_ref().display()),
|
||||
io::ErrorKind::NotFound => debug!("{} does not exist", path.as_ref().display()),
|
||||
_ => error!("Error looking for {}: {}", path.as_ref().display(), e),
|
||||
},
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
pub fn multi_pull_step(&self, repositories: &Repositories, ctx: &ExecutionContext) -> Result<()> {
|
||||
if repositories.repositories.is_empty() {
|
||||
return Err(SkipStep(String::from("No repositories to pull")).into());
|
||||
}
|
||||
|
||||
print_separator("Git repositories");
|
||||
repositories
|
||||
.bad_patterns
|
||||
.iter()
|
||||
.for_each(|pattern| print_warning(format!("Path {pattern} did not contain any git repositories")));
|
||||
self.multi_pull(repositories, ctx)
|
||||
}
|
||||
|
||||
pub fn multi_pull(&self, repositories: &Repositories, ctx: &ExecutionContext) -> Result<()> {
|
||||
let git = self.git.as_ref().unwrap();
|
||||
|
||||
if let RunType::Dry = ctx.run_type() {
|
||||
repositories
|
||||
.repositories
|
||||
.iter()
|
||||
.for_each(|repo| println!("Would pull {}", &repo));
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let futures_iterator = repositories
|
||||
.repositories
|
||||
.iter()
|
||||
.filter(|repo| match has_remotes(git, repo) {
|
||||
Some(false) => {
|
||||
println!(
|
||||
"{} {} because it has no remotes",
|
||||
style("Skipping").yellow().bold(),
|
||||
repo
|
||||
);
|
||||
false
|
||||
}
|
||||
_ => true, // repo has remotes or command to check for remotes has failed. proceed to pull anyway.
|
||||
})
|
||||
.map(|repo| pull_repository(repo.clone(), git, ctx));
|
||||
|
||||
let stream_of_futures = if let Some(limit) = ctx.config().git_concurrency_limit() {
|
||||
iter(futures_iterator).buffer_unordered(limit).boxed()
|
||||
} else {
|
||||
futures_iterator.collect::<FuturesUnordered<_>>().boxed()
|
||||
};
|
||||
|
||||
let basic_rt = runtime::Runtime::new()?;
|
||||
let results = basic_rt.block_on(async { stream_of_futures.collect::<Vec<Result<()>>>().await });
|
||||
|
||||
let error = results.into_iter().find(|r| r.is_err());
|
||||
error.unwrap_or(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Repositories<'a> {
|
||||
pub fn new(git: &'a Git) -> Self {
|
||||
let mut glob_match_options = MatchOptions::new();
|
||||
|
||||
if cfg!(windows) {
|
||||
glob_match_options.case_sensitive = false;
|
||||
}
|
||||
|
||||
Self {
|
||||
git,
|
||||
repositories: HashSet::new(),
|
||||
bad_patterns: Vec::new(),
|
||||
glob_match_options,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if `path` is a git repo, if yes, add it to `self.repos`.
|
||||
///
|
||||
/// Return the check result.
|
||||
pub fn insert_if_repo<P: AsRef<Path>>(&mut self, path: P) -> bool {
|
||||
if let Some(repo) = self.git.get_repo_root(path) {
|
||||
self.repositories.insert(repo);
|
||||
if let Some(repo) = self.get_repo_root(path) {
|
||||
self.repos.insert(repo);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if `repo` has a remote.
|
||||
fn has_remotes<P: AsRef<Path>>(&self, repo: P) -> Option<bool> {
|
||||
let mut cmd = Command::new(&self.git);
|
||||
cmd.stdin(Stdio::null())
|
||||
.current_dir(repo.as_ref())
|
||||
.args(["remote", "show"]);
|
||||
|
||||
let res = cmd.output_checked_utf8();
|
||||
|
||||
res.map(|output| output.stdout.lines().count() > 0)
|
||||
.map_err(|e| {
|
||||
error!("Error getting remotes for {}: {}", repo.as_ref().display(), e);
|
||||
e
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
|
||||
/// Similar to `insert_if_repo`, with glob support.
|
||||
pub fn glob_insert(&mut self, pattern: &str) {
|
||||
if let Ok(glob) = glob_with(pattern, self.glob_match_options) {
|
||||
let mut last_git_repo: Option<PathBuf> = None;
|
||||
@@ -267,7 +252,7 @@ impl<'a> Repositories<'a> {
|
||||
if let Some(last_git_repo) = &last_git_repo {
|
||||
if path.is_descendant_of(last_git_repo) {
|
||||
debug!(
|
||||
"Skipping {} because it's a decendant of last known repo {}",
|
||||
"Skipping {} because it's a descendant of last known repo {}",
|
||||
path.display(),
|
||||
last_git_repo.display()
|
||||
);
|
||||
@@ -292,14 +277,130 @@ impl<'a> Repositories<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.repositories.is_empty()
|
||||
/// True if `self.repos` is empty.
|
||||
pub fn is_repos_empty(&self) -> bool {
|
||||
self.repos.is_empty()
|
||||
}
|
||||
|
||||
/// Remove `path` from `self.repos`.
|
||||
///
|
||||
// `cfg(unix)` because it is only used in the oh-my-zsh step.
|
||||
#[cfg(unix)]
|
||||
pub fn remove(&mut self, path: &str) {
|
||||
let _removed = self.repositories.remove(path);
|
||||
pub fn remove<P: AsRef<Path>>(&mut self, path: P) {
|
||||
let _removed = self.repos.remove(path.as_ref());
|
||||
debug_assert!(_removed);
|
||||
}
|
||||
|
||||
/// Try to pull a repo.
|
||||
async fn pull_repo<P: AsRef<Path>>(&self, ctx: &ExecutionContext<'_>, repo: P) -> Result<()> {
|
||||
let before_revision = get_head_revision(&self.git, &repo);
|
||||
|
||||
if ctx.config().verbose() {
|
||||
println!("{} {}", style("Pulling").cyan().bold(), repo.as_ref().display());
|
||||
}
|
||||
|
||||
let mut command = AsyncCommand::new(&self.git);
|
||||
|
||||
command
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args(["pull", "--ff-only"]);
|
||||
|
||||
if let Some(extra_arguments) = ctx.config().git_arguments() {
|
||||
command.args(extra_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let pull_output = command.output().await?;
|
||||
let submodule_output = AsyncCommand::new(&self.git)
|
||||
.args(["submodule", "update", "--recursive"])
|
||||
.current_dir(&repo)
|
||||
.stdin(Stdio::null())
|
||||
.output()
|
||||
.await?;
|
||||
let result = output_checked_utf8(pull_output)
|
||||
.and_then(|_| output_checked_utf8(submodule_output))
|
||||
.wrap_err_with(|| format!("Failed to pull {}", repo.as_ref().display()));
|
||||
|
||||
if result.is_err() {
|
||||
println!("{} pulling {}", style("Failed").red().bold(), repo.as_ref().display());
|
||||
} else {
|
||||
let after_revision = get_head_revision(&self.git, repo.as_ref());
|
||||
|
||||
match (&before_revision, &after_revision) {
|
||||
(Some(before), Some(after)) if before != after => {
|
||||
println!("{} {}", style("Changed").yellow().bold(), repo.as_ref().display());
|
||||
|
||||
Command::new(&self.git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args([
|
||||
"--no-pager",
|
||||
"log",
|
||||
"--no-decorate",
|
||||
"--oneline",
|
||||
&format!("{before}..{after}"),
|
||||
])
|
||||
.status_checked()?;
|
||||
println!();
|
||||
}
|
||||
_ => {
|
||||
if ctx.config().verbose() {
|
||||
println!("{} {}", style("Up-to-date").green().bold(), repo.as_ref().display());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result.map(|_| ())
|
||||
}
|
||||
|
||||
/// Pull the repositories specified in `self.repos`.
|
||||
///
|
||||
/// # NOTE
|
||||
/// This function will create an async runtime and do the real job so the
|
||||
/// function itself is not async.
|
||||
fn pull_repos(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
if ctx.run_type().dry() {
|
||||
self.repos
|
||||
.iter()
|
||||
.for_each(|repo| println!("Would pull {}", repo.display()));
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if !ctx.config().verbose() {
|
||||
println!(
|
||||
"\n{} updated repositories will be shown...\n",
|
||||
style("Only").green().bold()
|
||||
);
|
||||
}
|
||||
|
||||
let futures_iterator = self
|
||||
.repos
|
||||
.iter()
|
||||
.filter(|repo| match self.has_remotes(repo) {
|
||||
Some(false) => {
|
||||
println!(
|
||||
"{} {} because it has no remotes",
|
||||
style("Skipping").yellow().bold(),
|
||||
repo.display()
|
||||
);
|
||||
false
|
||||
}
|
||||
_ => true, // repo has remotes or command to check for remotes has failed. proceed to pull anyway.
|
||||
})
|
||||
.map(|repo| self.pull_repo(ctx, repo));
|
||||
|
||||
let stream_of_futures = if let Some(limit) = ctx.config().git_concurrency_limit() {
|
||||
iter(futures_iterator).buffer_unordered(limit).boxed()
|
||||
} else {
|
||||
futures_iterator.collect::<FuturesUnordered<_>>().boxed()
|
||||
};
|
||||
|
||||
let basic_rt = runtime::Runtime::new()?;
|
||||
let results = basic_rt.block_on(async { stream_of_futures.collect::<Vec<Result<()>>>().await });
|
||||
|
||||
let error = results.into_iter().find(|r| r.is_err());
|
||||
error.unwrap_or(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,27 +4,27 @@ use std::process::Command;
|
||||
use color_eyre::eyre::Result;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::executor::RunType;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils;
|
||||
use crate::utils::PathExt;
|
||||
|
||||
/// <https://github.com/Gelio/go-global-update>
|
||||
pub fn run_go_global_update(run_type: RunType) -> Result<()> {
|
||||
pub fn run_go_global_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let go_global_update = require_go_bin("go-global-update")?;
|
||||
|
||||
print_separator("go-global-update");
|
||||
|
||||
run_type.execute(go_global_update).status_checked()
|
||||
ctx.run_type().execute(go_global_update).status_checked()
|
||||
}
|
||||
|
||||
/// <https://github.com/nao1215/gup>
|
||||
pub fn run_go_gup(run_type: RunType) -> Result<()> {
|
||||
pub fn run_go_gup(ctx: &ExecutionContext) -> Result<()> {
|
||||
let gup = require_go_bin("gup")?;
|
||||
|
||||
print_separator("gup");
|
||||
|
||||
run_type.execute(gup).arg("update").status_checked()
|
||||
ctx.run_type().execute(gup).arg("update").status_checked()
|
||||
}
|
||||
|
||||
/// Get the path of a Go binary.
|
||||
|
||||
@@ -4,7 +4,8 @@ use std::os::unix::fs::MetadataExt;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use crate::utils::require_option;
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::HOME_DIR;
|
||||
use color_eyre::eyre::Result;
|
||||
#[cfg(target_os = "linux")]
|
||||
use nix::unistd::Uid;
|
||||
@@ -91,7 +92,7 @@ impl NPM {
|
||||
fn upgrade(&self, ctx: &ExecutionContext, use_sudo: bool) -> Result<()> {
|
||||
let args = ["update", self.global_location_arg()];
|
||||
if use_sudo {
|
||||
let sudo = require_option(ctx.sudo().clone(), String::from("sudo is not installed"))?;
|
||||
let sudo = require_option(ctx.sudo().clone(), REQUIRE_SUDO.to_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg(&self.command)
|
||||
@@ -155,7 +156,7 @@ impl Yarn {
|
||||
let args = ["global", "upgrade"];
|
||||
|
||||
if use_sudo {
|
||||
let sudo = require_option(ctx.sudo().clone(), String::from("sudo is not installed"))?;
|
||||
let sudo = require_option(ctx.sudo().clone(), REQUIRE_SUDO.to_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg(self.yarn.as_ref().unwrap_or(&self.command))
|
||||
@@ -229,7 +230,7 @@ pub fn run_npm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn run_pnpm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pnpm = require("pnpm").map(|b| NPM::new(b, NPMVariant::Pnpm))?;
|
||||
|
||||
print_separator("Node Package Manager");
|
||||
print_separator("Performant Node Package Manager");
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
@@ -265,7 +266,7 @@ pub fn run_yarn_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
pub fn deno_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let deno = require("deno")?;
|
||||
let deno_dir = ctx.base_dirs().home_dir().join(".deno");
|
||||
let deno_dir = HOME_DIR.join(".deno");
|
||||
|
||||
if !deno.canonicalize()?.is_descendant_of(&deno_dir) {
|
||||
let skip_reason = SkipStep("Deno installed outside of .deno directory".to_string());
|
||||
|
||||
@@ -26,7 +26,7 @@ pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
ctx.run_type().execute(&pkg).arg("clean").status_checked()?;
|
||||
|
||||
let apt = require("apt")?;
|
||||
let mut command = ctx.run_type().execute(&apt);
|
||||
let mut command = ctx.run_type().execute(apt);
|
||||
command.arg("autoremove");
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("-y");
|
||||
|
||||
@@ -277,7 +277,7 @@ impl Aura {
|
||||
|
||||
impl ArchPackageManager for Aura {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = which("sudo").unwrap_or_else(PathBuf::new);
|
||||
let sudo = which("sudo").unwrap_or_default();
|
||||
let mut aur_update = ctx.run_type().execute(&sudo);
|
||||
|
||||
if sudo.ends_with("sudo") {
|
||||
|
||||
@@ -1,26 +1,34 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::executor::RunType;
|
||||
use crate::sudo::Sudo;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::require_option;
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::Step;
|
||||
use color_eyre::eyre::Result;
|
||||
use std::process::Command;
|
||||
|
||||
pub fn upgrade_packages(sudo: Option<&Sudo>, run_type: RunType) -> Result<()> {
|
||||
let sudo = require_option(sudo, String::from("No sudo detected"))?;
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("DragonFly BSD Packages");
|
||||
run_type
|
||||
.execute(sudo)
|
||||
.args(["/usr/local/sbin/pkg", "upgrade"])
|
||||
.status_checked()
|
||||
let mut cmd = ctx.run_type().execute(sudo);
|
||||
cmd.args(["/usr/local/sbin/pkg", "upgrade"]);
|
||||
if ctx.config().yes(Step::System) {
|
||||
cmd.arg("-y");
|
||||
}
|
||||
cmd.status_checked()
|
||||
}
|
||||
|
||||
pub fn audit_packages(sudo: Option<&Sudo>) -> Result<()> {
|
||||
if let Some(sudo) = sudo {
|
||||
println!();
|
||||
Command::new(sudo)
|
||||
.args(["/usr/local/sbin/pkg", "audit", "-Fr"])
|
||||
.status_checked()?;
|
||||
pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
|
||||
print_separator("DragonFly BSD Audit");
|
||||
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
if !Command::new(sudo)
|
||||
.args(["/usr/local/sbin/pkg", "audit", "-Fr"])
|
||||
.status()?
|
||||
.success()
|
||||
{
|
||||
println!("The package audit was successful, but vulnerable packages still remain on the system");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,27 +1,25 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::RunType;
|
||||
use crate::sudo::Sudo;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::require_option;
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::Step;
|
||||
use color_eyre::eyre::Result;
|
||||
use std::process::Command;
|
||||
|
||||
pub fn upgrade_freebsd(sudo: Option<&Sudo>, run_type: RunType) -> Result<()> {
|
||||
let sudo = require_option(sudo, String::from("No sudo detected"))?;
|
||||
pub fn upgrade_freebsd(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("FreeBSD Update");
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["/usr/sbin/freebsd-update", "fetch", "install"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext, sudo: Option<&Sudo>, run_type: RunType) -> Result<()> {
|
||||
let sudo = require_option(sudo, String::from("No sudo detected"))?;
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("FreeBSD Packages");
|
||||
|
||||
let mut command = run_type.execute(sudo);
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
|
||||
command.args(["/usr/sbin/pkg", "upgrade"]);
|
||||
if ctx.config().yes(Step::System) {
|
||||
@@ -30,12 +28,13 @@ pub fn upgrade_packages(ctx: &ExecutionContext, sudo: Option<&Sudo>, run_type: R
|
||||
command.status_checked()
|
||||
}
|
||||
|
||||
pub fn audit_packages(sudo: Option<&Sudo>) -> Result<()> {
|
||||
if let Some(sudo) = sudo {
|
||||
println!();
|
||||
Command::new(sudo)
|
||||
.args(["/usr/sbin/pkg", "audit", "-Fr"])
|
||||
.status_checked()?;
|
||||
}
|
||||
pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
|
||||
print_separator("FreeBSD Audit");
|
||||
|
||||
Command::new(sudo)
|
||||
.args(["/usr/sbin/pkg", "audit", "-Fr"])
|
||||
.status_checked()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,16 +1,18 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::RunType;
|
||||
use crate::terminal::{print_separator, prompt_yesno};
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use crate::{utils::require, Step};
|
||||
use color_eyre::eyre::Result;
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
use tracing::debug;
|
||||
|
||||
pub fn run_macports(ctx: &ExecutionContext) -> Result<()> {
|
||||
require("port")?;
|
||||
let sudo = ctx.sudo().as_ref().unwrap();
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
|
||||
print_separator("MacPorts");
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
@@ -30,17 +32,17 @@ pub fn run_macports(ctx: &ExecutionContext) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_mas(run_type: RunType) -> Result<()> {
|
||||
pub fn run_mas(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mas = require("mas")?;
|
||||
print_separator("macOS App Store");
|
||||
|
||||
run_type.execute(mas).arg("upgrade").status_checked()
|
||||
ctx.run_type().execute(mas).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
pub fn upgrade_macos(ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator("macOS system update");
|
||||
|
||||
let should_ask = !(ctx.config().yes(Step::System)) || (ctx.config().dry_run());
|
||||
let should_ask = !(ctx.config().yes(Step::System) || ctx.config().dry_run());
|
||||
if should_ask {
|
||||
println!("Finding available software");
|
||||
if system_update_available()? {
|
||||
@@ -92,3 +94,148 @@ pub fn run_sparkle(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_xcodes(ctx: &ExecutionContext) -> Result<()> {
|
||||
let xcodes = require("xcodes")?;
|
||||
print_separator("Xcodes");
|
||||
|
||||
let should_ask = !(ctx.config().yes(Step::Xcodes) || ctx.config().dry_run());
|
||||
|
||||
let releases = ctx
|
||||
.run_type()
|
||||
.execute(&xcodes)
|
||||
.args(["update"])
|
||||
.output_checked_utf8()?
|
||||
.stdout;
|
||||
|
||||
let releases_installed: Vec<String> = releases
|
||||
.lines()
|
||||
.filter(|r| r.contains("(Installed)"))
|
||||
.map(String::from)
|
||||
.collect();
|
||||
|
||||
if releases_installed.is_empty() {
|
||||
println!("No Xcode releases installed.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let (installed_gm, installed_beta, installed_regular) =
|
||||
releases_installed
|
||||
.iter()
|
||||
.fold((false, false, false), |(gm, beta, regular), release| {
|
||||
(
|
||||
gm || release.contains("GM") || release.contains("Release Candidate"),
|
||||
beta || release.contains("Beta"),
|
||||
regular
|
||||
|| !(release.contains("GM")
|
||||
|| release.contains("Release Candidate")
|
||||
|| release.contains("Beta")),
|
||||
)
|
||||
});
|
||||
|
||||
let releases_gm = releases
|
||||
.lines()
|
||||
.filter(|&r| r.matches("GM").count() > 0 || r.matches("Release Candidate").count() > 0)
|
||||
.map(String::from)
|
||||
.collect();
|
||||
let releases_beta = releases
|
||||
.lines()
|
||||
.filter(|&r| r.matches("Beta").count() > 0)
|
||||
.map(String::from)
|
||||
.collect();
|
||||
let releases_regular = releases
|
||||
.lines()
|
||||
.filter(|&r| {
|
||||
r.matches("GM").count() == 0
|
||||
&& r.matches("Release Candidate").count() == 0
|
||||
&& r.matches("Beta").count() == 0
|
||||
})
|
||||
.map(String::from)
|
||||
.collect();
|
||||
|
||||
if installed_gm {
|
||||
process_xcodes_releases(releases_gm, should_ask, ctx)?;
|
||||
}
|
||||
if installed_beta {
|
||||
process_xcodes_releases(releases_beta, should_ask, ctx)?;
|
||||
}
|
||||
if installed_regular {
|
||||
process_xcodes_releases(releases_regular, should_ask, ctx)?;
|
||||
}
|
||||
|
||||
let releases_new = ctx
|
||||
.run_type()
|
||||
.execute(&xcodes)
|
||||
.args(["list"])
|
||||
.output_checked_utf8()?
|
||||
.stdout;
|
||||
|
||||
let releases_gm_new_installed: HashSet<_> = releases_new
|
||||
.lines()
|
||||
.filter(|release| {
|
||||
release.contains("(Installed)") && (release.contains("GM") || release.contains("Release Candidate"))
|
||||
})
|
||||
.collect();
|
||||
let releases_beta_new_installed: HashSet<_> = releases_new
|
||||
.lines()
|
||||
.filter(|release| release.contains("(Installed)") && release.contains("Beta"))
|
||||
.collect();
|
||||
let releases_regular_new_installed: HashSet<_> = releases_new
|
||||
.lines()
|
||||
.filter(|release| {
|
||||
release.contains("(Installed)")
|
||||
&& !(release.contains("GM") || release.contains("Release Candidate") || release.contains("Beta"))
|
||||
})
|
||||
.collect();
|
||||
|
||||
for releases_new_installed in [
|
||||
releases_gm_new_installed,
|
||||
releases_beta_new_installed,
|
||||
releases_regular_new_installed,
|
||||
] {
|
||||
if should_ask && releases_new_installed.len() == 2 {
|
||||
let answer_uninstall = prompt_yesno("Would you like to move the former Xcode release to the trash?")?;
|
||||
if answer_uninstall {
|
||||
let _ = ctx
|
||||
.run_type()
|
||||
.execute(&xcodes)
|
||||
.args([
|
||||
"uninstall",
|
||||
releases_new_installed.iter().next().cloned().unwrap_or_default(),
|
||||
])
|
||||
.status_checked();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn process_xcodes_releases(releases_filtered: Vec<String>, should_ask: bool, ctx: &ExecutionContext) -> Result<()> {
|
||||
let xcodes = require("xcodes")?;
|
||||
|
||||
if releases_filtered
|
||||
.last()
|
||||
.map(|s| !s.contains("(Installed)"))
|
||||
.unwrap_or(true)
|
||||
&& !releases_filtered.is_empty()
|
||||
{
|
||||
println!(
|
||||
"New Xcode release detected: {}",
|
||||
releases_filtered.last().cloned().unwrap_or_default()
|
||||
);
|
||||
if should_ask {
|
||||
let answer_install = prompt_yesno("Would you like to install it?")?;
|
||||
if answer_install {
|
||||
let _ = ctx
|
||||
.run_type()
|
||||
.execute(xcodes)
|
||||
.args(["install", &releases_filtered.last().cloned().unwrap_or_default()])
|
||||
.status_checked();
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,23 +1,33 @@
|
||||
use crate::executor::RunType;
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::require_option;
|
||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
||||
use color_eyre::eyre::Result;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn upgrade_openbsd(sudo: Option<&PathBuf>, run_type: RunType) -> Result<()> {
|
||||
let sudo = require_option(sudo, String::from("No sudo detected"))?;
|
||||
pub fn upgrade_openbsd(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("OpenBSD Update");
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(&["/usr/sbin/sysupgrade", "-n"])
|
||||
.args(["/usr/sbin/sysupgrade", "-n"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn upgrade_packages(sudo: Option<&PathBuf>, run_type: RunType) -> Result<()> {
|
||||
let sudo = require_option(sudo, String::from("No sudo detected"))?;
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
print_separator("OpenBSD Packages");
|
||||
run_type
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["/usr/sbin/pkg_delete", "-ac"])
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(&["/usr/sbin/pkg_add", "-u"])
|
||||
.status_checked()
|
||||
.args(["/usr/sbin/pkg_add", "-u"])
|
||||
.status_checked()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
NAME="Anarchy Linux"
|
||||
PRETTY_NAME="Anarchy Linux"
|
||||
ID=anarchy
|
||||
ID_LIKE=anarchylinux
|
||||
ANSI_COLOR="0;36"
|
||||
HOME_URL="https://anarchylinux.org/"
|
||||
@@ -1,10 +0,0 @@
|
||||
NAME="Antergos Linux"
|
||||
VERSION="18.7-ISO-Rolling"
|
||||
ID="antergos"
|
||||
ID_LIKE="arch"
|
||||
PRETTY_NAME="Antergos Linux"
|
||||
CPE_NAME="cpe:/o:antergosproject:antergos:18.7"
|
||||
ANSI_COLOR="1;34;40"
|
||||
HOME_URL="antergos.com"
|
||||
SUPPORT_URL="forum.antergos.com"
|
||||
BUG_REPORT_URL="@antergos"
|
||||
8
src/steps/os/os_release/deepin
Normal file
8
src/steps/os/os_release/deepin
Normal file
@@ -0,0 +1,8 @@
|
||||
PRETTY_NAME="Deepin 20.9"
|
||||
NAME="Deepin"
|
||||
VERSION_ID="20.9"
|
||||
VERSION="20.9"
|
||||
VERSION_CODENAME="apricot"
|
||||
ID=Deepin
|
||||
HOME_URL="https://www.deepin.org/"
|
||||
BUG_REPORT_URL="https://bbs.deepin.org/"
|
||||
22
src/steps/os/os_release/fedoraiot
Normal file
22
src/steps/os/os_release/fedoraiot
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39.20240415.0 (IoT Edition)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39.20240415.0 (IoT Edition)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
HOME_URL="https://fedoraproject.org/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora/f39/system-administrators-guide/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://bugzilla.redhat.com/"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-11-12
|
||||
VARIANT="IoT Edition"
|
||||
VARIANT_ID=iot
|
||||
OSTREE_VERSION='39.20240415.0'
|
||||
23
src/steps/os/os_release/fedorakinoite
Normal file
23
src/steps/os/os_release/fedorakinoite
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39.20240105.0 (Kinoite)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39.20240105.0 (Kinoite)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://kinoite.fedoraproject.org"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-kinoite/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://pagure.io/fedora-kde/SIG/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-11-12
|
||||
VARIANT="Kinoite"
|
||||
VARIANT_ID=kinoite
|
||||
OSTREE_VERSION='39.20240105.0'
|
||||
22
src/steps/os/os_release/fedoraonyx
Normal file
22
src/steps/os/os_release/fedoraonyx
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39 (Onyx)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39 (Onyx)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://fedoraproject.org/onyx/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-onyx/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://bugzilla.redhat.com/"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="Onyx"
|
||||
VARIANT_ID=onyx
|
||||
22
src/steps/os/os_release/fedorasericea
Normal file
22
src/steps/os/os_release/fedorasericea
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39 (Sericea)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39 (Sericea)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://fedoraproject.org/sericea/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-sericea/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://gitlab.com/fedora/sigs/sway/SIG/-/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="Sericea"
|
||||
VARIANT_ID=sericea
|
||||
22
src/steps/os/os_release/fedorasilverblue
Normal file
22
src/steps/os/os_release/fedorasilverblue
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39 (Silverblue)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39 (Silverblue)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://silverblue.fedoraproject.org"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-silverblue/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://github.com/fedora-silverblue/issue-tracker/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="Silverblue"
|
||||
VARIANT_ID=silverblue
|
||||
23
src/steps/os/os_release/fedoraswayatomic
Normal file
23
src/steps/os/os_release/fedoraswayatomic
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="40.20240426.0 (Sway Atomic)"
|
||||
ID=fedora
|
||||
VERSION_ID=40
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f40"
|
||||
PRETTY_NAME="Fedora Linux 40.20240426.0 (Sway Atomic)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:40"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://fedoraproject.org/atomic-desktops/sway/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-sericea/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://gitlab.com/fedora/sigs/sway/SIG/-/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=40
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=40
|
||||
SUPPORT_END=2025-05-13
|
||||
VARIANT="Sway Atomic"
|
||||
VARIANT_ID=sway-atomic
|
||||
OSTREE_VERSION='40.20240426.0'
|
||||
23
src/steps/os/os_release/nobara
Normal file
23
src/steps/os/os_release/nobara
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Nobara Linux"
|
||||
VERSION="39 (GNOME Edition)"
|
||||
ID=nobara
|
||||
ID_LIKE="rhel centos fedora"
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Nobara Linux 39 (GNOME Edition)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=nobara-logo-icon
|
||||
CPE_NAME="cpe:/o:nobaraproject:nobara:39"
|
||||
DEFAULT_HOSTNAME="nobara"
|
||||
HOME_URL="https://nobaraproject.org/"
|
||||
DOCUMENTATION_URL="https://www.nobaraproject.org/"
|
||||
SUPPORT_URL="https://www.nobaraproject.org/"
|
||||
BUG_REPORT_URL="https://gitlab.com/gloriouseggroll/nobara-images"
|
||||
REDHAT_BUGZILLA_PRODUCT="Nobara"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Nobara"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="GNOME Edition"
|
||||
VARIANT_ID=gnome
|
||||
11
src/steps/os/os_release/solus
Normal file
11
src/steps/os/os_release/solus
Normal file
@@ -0,0 +1,11 @@
|
||||
NAME="Solus"
|
||||
VERSION="4.4"
|
||||
ID="solus"
|
||||
VERSION_CODENAME=harmony
|
||||
VERSION_ID="4.4"
|
||||
PRETTY_NAME="Solus 4.4 Harmony"
|
||||
ANSI_COLOR="1;34"
|
||||
HOME_URL="https://getsol.us"
|
||||
SUPPORT_URL="https://help.getsol.us/docs/user/contributing/getting-involved"
|
||||
BUG_REPORT_URL="https://dev.getsol.us/"
|
||||
LOGO="distributor-logo-solus"
|
||||
12
src/steps/os/os_release/vanilla
Normal file
12
src/steps/os/os_release/vanilla
Normal file
@@ -0,0 +1,12 @@
|
||||
PRETTY_NAME="VanillaOS 22.10 all"
|
||||
NAME="VanillaOS"
|
||||
VERSION_ID="22.10"
|
||||
VERSION="22.10 all"
|
||||
VERSION_CODENAME="kinetic"
|
||||
ID=ubuntu
|
||||
ID_LIKE=debian
|
||||
HOME_URL="https://github.com/vanilla-os"
|
||||
SUPPORT_URL="https://github.com/vanilla-os"
|
||||
BUG_REPORT_URL="https://github.com/vanilla-os"
|
||||
PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
|
||||
UBUNTU_CODENAME="kinetic"
|
||||
5
src/steps/os/os_release/wolfi
Normal file
5
src/steps/os/os_release/wolfi
Normal file
@@ -0,0 +1,5 @@
|
||||
ID=wolfi
|
||||
NAME="Wolfi"
|
||||
PRETTY_NAME="Wolfi"
|
||||
VERSION_ID="20230201"
|
||||
HOME_URL="https://wolfi.dev"
|
||||
@@ -1,26 +1,30 @@
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
use std::path::Component;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::{env, path::Path};
|
||||
use std::{env::var, path::Path};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::Step;
|
||||
use crate::{Step, HOME_DIR};
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use directories::BaseDirs;
|
||||
use home;
|
||||
use ini::Ini;
|
||||
use tracing::debug;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
use super::linux::Distribution;
|
||||
use crate::error::SkipStep;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
use crate::executor::Executor;
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
use crate::executor::RunType;
|
||||
use crate::terminal::print_separator;
|
||||
#[cfg(not(any(target_os = "android", target_os = "macos")))]
|
||||
use crate::utils::require_option;
|
||||
use crate::utils::{require, PathExt};
|
||||
use crate::utils::{require, require_option, PathExt, REQUIRE_SUDO};
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
const INTEL_BREW: &str = "/usr/local/bin/brew";
|
||||
@@ -87,7 +91,7 @@ impl BrewVariant {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_fisher(run_type: RunType) -> Result<()> {
|
||||
pub fn run_fisher(ctx: &ExecutionContext) -> Result<()> {
|
||||
let fish = require("fish")?;
|
||||
|
||||
Command::new(&fish)
|
||||
@@ -110,7 +114,8 @@ pub fn run_fisher(run_type: RunType) -> Result<()> {
|
||||
|
||||
print_separator("Fisher");
|
||||
|
||||
let version_str = run_type
|
||||
let version_str = ctx
|
||||
.run_type()
|
||||
.execute(&fish)
|
||||
.args(["-c", "fisher --version"])
|
||||
.output_checked_utf8()?
|
||||
@@ -119,15 +124,18 @@ pub fn run_fisher(run_type: RunType) -> Result<()> {
|
||||
|
||||
if version_str.starts_with("fisher version 3.") {
|
||||
// v3 - see https://github.com/topgrade-rs/topgrade/pull/37#issuecomment-1283844506
|
||||
run_type.execute(&fish).args(["-c", "fisher"]).status_checked()
|
||||
ctx.run_type().execute(&fish).args(["-c", "fisher"]).status_checked()
|
||||
} else {
|
||||
// v4
|
||||
run_type.execute(&fish).args(["-c", "fisher update"]).status_checked()
|
||||
ctx.run_type()
|
||||
.execute(&fish)
|
||||
.args(["-c", "fisher update"])
|
||||
.status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_bashit(ctx: &ExecutionContext) -> Result<()> {
|
||||
ctx.base_dirs().home_dir().join(".bash_it").require()?;
|
||||
HOME_DIR.join(".bash_it").require()?;
|
||||
|
||||
print_separator("Bash-it");
|
||||
|
||||
@@ -137,12 +145,30 @@ pub fn run_bashit(ctx: &ExecutionContext) -> Result<()> {
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_oh_my_bash(ctx: &ExecutionContext) -> Result<()> {
|
||||
require("bash")?;
|
||||
let oh_my_bash = var("OSH")
|
||||
// default to `~/.oh-my-bash`
|
||||
.unwrap_or(
|
||||
HOME_DIR
|
||||
.join(".oh-my-bash")
|
||||
.to_str()
|
||||
.expect("should be UTF-8 encoded")
|
||||
.to_string(),
|
||||
)
|
||||
.require()?;
|
||||
|
||||
print_separator("oh-my-bash");
|
||||
|
||||
let mut update_script = oh_my_bash;
|
||||
update_script.push_str("/tools/upgrade.sh");
|
||||
|
||||
ctx.run_type().execute("bash").arg(update_script).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_oh_my_fish(ctx: &ExecutionContext) -> Result<()> {
|
||||
let fish = require("fish")?;
|
||||
ctx.base_dirs()
|
||||
.home_dir()
|
||||
.join(".local/share/omf/pkg/omf/functions/omf.fish")
|
||||
.require()?;
|
||||
HOME_DIR.join(".local/share/omf/pkg/omf/functions/omf.fish").require()?;
|
||||
|
||||
print_separator("oh-my-fish");
|
||||
|
||||
@@ -151,17 +177,18 @@ pub fn run_oh_my_fish(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
pub fn run_pkgin(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pkgin = require("pkgin")?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
||||
|
||||
print_separator("Pkgin");
|
||||
|
||||
let mut command = ctx.run_type().execute(ctx.sudo().as_ref().unwrap());
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(&pkgin).arg("update");
|
||||
if ctx.config().yes(Step::Pkgin) {
|
||||
command.arg("-y");
|
||||
}
|
||||
command.status_checked()?;
|
||||
|
||||
let mut command = ctx.run_type().execute(ctx.sudo().as_ref().unwrap());
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(&pkgin).arg("upgrade");
|
||||
if ctx.config().yes(Step::Pkgin) {
|
||||
command.arg("-y");
|
||||
@@ -171,8 +198,7 @@ pub fn run_pkgin(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
pub fn run_fish_plug(ctx: &ExecutionContext) -> Result<()> {
|
||||
let fish = require("fish")?;
|
||||
ctx.base_dirs()
|
||||
.home_dir()
|
||||
HOME_DIR
|
||||
.join(".local/share/fish/plug/kidonng/fish-plug/functions/plug.fish")
|
||||
.require()?;
|
||||
|
||||
@@ -191,7 +217,7 @@ pub fn run_fish_plug(ctx: &ExecutionContext) -> Result<()> {
|
||||
/// See: <https://github.com/danhper/fundle>
|
||||
pub fn run_fundle(ctx: &ExecutionContext) -> Result<()> {
|
||||
let fish = require("fish")?;
|
||||
ctx.base_dirs().home_dir().join(".config/fish/fundle").require()?;
|
||||
HOME_DIR.join(".config/fish/fundle").require()?;
|
||||
|
||||
print_separator("fundle");
|
||||
|
||||
@@ -205,7 +231,7 @@ pub fn run_fundle(ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
|
||||
let gdbus = require("gdbus")?;
|
||||
require_option(
|
||||
env::var("XDG_CURRENT_DESKTOP").ok().filter(|p| p.contains("GNOME")),
|
||||
var("XDG_CURRENT_DESKTOP").ok().filter(|p| p.contains("GNOME")),
|
||||
"Desktop doest not appear to be gnome".to_string(),
|
||||
)?;
|
||||
let output = Command::new("gdbus")
|
||||
@@ -259,10 +285,15 @@ pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<
|
||||
let run_type = ctx.run_type();
|
||||
|
||||
variant.execute(run_type).arg("update").status_checked()?;
|
||||
variant
|
||||
.execute(run_type)
|
||||
.args(["upgrade", "--ignore-pinned", "--formula"])
|
||||
.status_checked()?;
|
||||
|
||||
let mut command = variant.execute(run_type);
|
||||
command.args(["upgrade", "--formula"]);
|
||||
|
||||
if ctx.config().brew_fetch_head() {
|
||||
command.arg("--fetch-HEAD");
|
||||
}
|
||||
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
variant.execute(run_type).arg("cleanup").status_checked()?;
|
||||
@@ -302,6 +333,9 @@ pub fn run_brew_cask(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()>
|
||||
if ctx.config().brew_cask_greedy() {
|
||||
brew_args.push("--greedy");
|
||||
}
|
||||
if ctx.config().brew_greedy_latest() {
|
||||
brew_args.push("--greedy-latest");
|
||||
}
|
||||
}
|
||||
|
||||
variant.execute(run_type).args(&brew_args).status_checked()?;
|
||||
@@ -335,6 +369,7 @@ pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
|
||||
let nix = require("nix")?;
|
||||
let nix_channel = require("nix-channel")?;
|
||||
let nix_env = require("nix-env")?;
|
||||
// TODO: Is None possible here?
|
||||
let profile_path = match home::home_dir() {
|
||||
Some(home) => Path::new(&home).join(".nix-profile"),
|
||||
None => Path::new("/nix/var/nix/profiles/per-user/default").into(),
|
||||
@@ -342,27 +377,11 @@ pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
|
||||
debug!("nix profile: {:?}", profile_path);
|
||||
let manifest_json_path = profile_path.join("manifest.json");
|
||||
|
||||
let output = Command::new(&nix_env).args(["--query", "nix"]).output_checked_utf8();
|
||||
debug!("nix-env output: {:?}", output);
|
||||
let should_self_upgrade = output.is_ok();
|
||||
|
||||
print_separator("Nix");
|
||||
|
||||
let multi_user = fs::metadata(&nix)?.uid() == 0;
|
||||
debug!("Multi user nix: {}", multi_user);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
use super::linux::Distribution;
|
||||
|
||||
if let Ok(Distribution::NixOS) = Distribution::detect() {
|
||||
return Err(SkipStep(String::from("Nix on NixOS must be upgraded via nixos-rebuild switch")).into());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
if let Ok(..) = require("darwin-rebuild") {
|
||||
if require("darwin-rebuild").is_ok() {
|
||||
return Err(SkipStep(String::from(
|
||||
"Nix-darwin on macOS must be upgraded via darwin-rebuild switch",
|
||||
))
|
||||
@@ -371,29 +390,144 @@ pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
let run_type = ctx.run_type();
|
||||
|
||||
if should_self_upgrade {
|
||||
if multi_user {
|
||||
ctx.execute_elevated(&nix, true)?.arg("upgrade-nix").status_checked()?;
|
||||
} else {
|
||||
run_type.execute(&nix).arg("upgrade-nix").status_checked()?;
|
||||
}
|
||||
}
|
||||
|
||||
run_type.execute(nix_channel).arg("--update").status_checked()?;
|
||||
|
||||
if std::path::Path::new(&manifest_json_path).exists() {
|
||||
if Path::new(&manifest_json_path).exists() {
|
||||
run_type
|
||||
.execute(&nix)
|
||||
.execute(nix)
|
||||
.args(nix_args())
|
||||
.arg("profile")
|
||||
.arg("upgrade")
|
||||
.arg(".*")
|
||||
.arg("--verbose")
|
||||
.status_checked()
|
||||
} else {
|
||||
run_type.execute(&nix_env).arg("--upgrade").status_checked()
|
||||
let mut command = run_type.execute(nix_env);
|
||||
command.arg("--upgrade");
|
||||
if let Some(args) = ctx.config().nix_env_arguments() {
|
||||
command.args(args.split_whitespace());
|
||||
};
|
||||
command.status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_nix_self_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let nix = require("nix")?;
|
||||
|
||||
// Should we attempt to upgrade Nix with `nix upgrade-nix`?
|
||||
#[allow(unused_mut)]
|
||||
let mut should_self_upgrade = cfg!(target_os = "macos");
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// We can't use `nix upgrade-nix` on NixOS.
|
||||
if let Ok(Distribution::NixOS) = Distribution::detect() {
|
||||
should_self_upgrade = false;
|
||||
}
|
||||
}
|
||||
|
||||
if !should_self_upgrade {
|
||||
return Err(SkipStep(String::from(
|
||||
"`nix upgrade-nix` can only be used on macOS or non-NixOS Linux",
|
||||
))
|
||||
.into());
|
||||
}
|
||||
|
||||
if nix_profile_dir(&nix)?.is_none() {
|
||||
return Err(SkipStep(String::from(
|
||||
"`nix upgrade-nix` cannot be run when Nix is installed in a profile",
|
||||
))
|
||||
.into());
|
||||
}
|
||||
|
||||
print_separator("Nix (self-upgrade)");
|
||||
|
||||
let multi_user = fs::metadata(&nix)?.uid() == 0;
|
||||
debug!("Multi user nix: {}", multi_user);
|
||||
|
||||
let nix_args = nix_args();
|
||||
if multi_user {
|
||||
ctx.execute_elevated(&nix, true)?
|
||||
.args(nix_args)
|
||||
.arg("upgrade-nix")
|
||||
.status_checked()
|
||||
} else {
|
||||
ctx.run_type()
|
||||
.execute(&nix)
|
||||
.args(nix_args)
|
||||
.arg("upgrade-nix")
|
||||
.status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
/// If we try to `nix upgrade-nix` but Nix is installed with `nix profile`, we'll get a `does not
|
||||
/// appear to be part of a Nix profile` error.
|
||||
///
|
||||
/// We duplicate some of the `nix` logic here to avoid this.
|
||||
/// See: <https://github.com/NixOS/nix/blob/f0180487a0e4c0091b46cb1469c44144f5400240/src/nix/upgrade-nix.cc#L102-L139>
|
||||
///
|
||||
/// See: <https://github.com/NixOS/nix/issues/5473>
|
||||
fn nix_profile_dir(nix: &Path) -> Result<Option<PathBuf>> {
|
||||
// NOTE: `nix` uses the location of the `nix-env` binary for this but we're using the `nix`
|
||||
// binary; should be the same.
|
||||
let nix_bin_dir = nix.parent();
|
||||
if nix_bin_dir.and_then(|p| p.file_name()) != Some(OsStr::new("bin")) {
|
||||
debug!("Nix is not installed in a `bin` directory: {nix_bin_dir:?}");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let nix_dir = nix_bin_dir
|
||||
.and_then(|bin_dir| bin_dir.parent())
|
||||
.ok_or_else(|| eyre!("Unable to find Nix install directory from Nix binary {nix:?}"))?;
|
||||
|
||||
debug!("Found Nix in {nix_dir:?}");
|
||||
|
||||
let mut profile_dir = nix_dir.to_path_buf();
|
||||
while profile_dir.is_symlink() {
|
||||
profile_dir = profile_dir
|
||||
.parent()
|
||||
.ok_or_else(|| eyre!("Path has no parent: {profile_dir:?}"))?
|
||||
.join(
|
||||
profile_dir
|
||||
.read_link()
|
||||
.wrap_err_with(|| format!("Failed to read symlink {profile_dir:?}"))?,
|
||||
);
|
||||
|
||||
// NOTE: `nix` uses a hand-rolled canonicalize function, Rust just uses `realpath`.
|
||||
if profile_dir
|
||||
.canonicalize()
|
||||
.wrap_err_with(|| format!("Failed to canonicalize {profile_dir:?}"))?
|
||||
.components()
|
||||
.any(|component| component == Component::Normal(OsStr::new("profiles")))
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Found Nix profile {profile_dir:?}");
|
||||
|
||||
let user_env = profile_dir
|
||||
.canonicalize()
|
||||
.wrap_err_with(|| format!("Failed to canonicalize {profile_dir:?}"))?;
|
||||
|
||||
Ok(
|
||||
if user_env
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.map(|name| name.ends_with("user-environment"))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
Some(profile_dir)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn nix_args() -> [&'static str; 2] {
|
||||
["--extra-experimental-features", "nix-command"]
|
||||
}
|
||||
|
||||
pub fn run_yadm(ctx: &ExecutionContext) -> Result<()> {
|
||||
let yadm = require("yadm")?;
|
||||
|
||||
@@ -402,45 +536,88 @@ pub fn run_yadm(ctx: &ExecutionContext) -> Result<()> {
|
||||
ctx.run_type().execute(yadm).arg("pull").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_asdf(run_type: RunType) -> Result<()> {
|
||||
pub fn run_asdf(ctx: &ExecutionContext) -> Result<()> {
|
||||
let asdf = require("asdf")?;
|
||||
|
||||
print_separator("asdf");
|
||||
run_type.execute(&asdf).arg("update").status_checked_with_codes(&[42])?;
|
||||
ctx.run_type()
|
||||
.execute(&asdf)
|
||||
.arg("update")
|
||||
.status_checked_with_codes(&[42])?;
|
||||
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(&asdf)
|
||||
.args(["plugin", "update", "--all"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_home_manager(run_type: RunType) -> Result<()> {
|
||||
pub fn run_mise(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mise = require("mise")?;
|
||||
|
||||
print_separator("mise");
|
||||
|
||||
ctx.run_type().execute(&mise).arg("upgrade").status_checked()?;
|
||||
|
||||
ctx.run_type()
|
||||
.execute(&mise)
|
||||
.args(["plugins", "update"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_home_manager(ctx: &ExecutionContext) -> Result<()> {
|
||||
let home_manager = require("home-manager")?;
|
||||
|
||||
print_separator("home-manager");
|
||||
run_type.execute(home_manager).arg("switch").status_checked()
|
||||
|
||||
let mut cmd = ctx.run_type().execute(home_manager);
|
||||
cmd.arg("switch");
|
||||
|
||||
if let Some(extra_args) = ctx.config().home_manager() {
|
||||
cmd.args(extra_args);
|
||||
}
|
||||
|
||||
cmd.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_tldr(run_type: RunType) -> Result<()> {
|
||||
pub fn run_tldr(ctx: &ExecutionContext) -> Result<()> {
|
||||
let tldr = require("tldr")?;
|
||||
|
||||
print_separator("TLDR");
|
||||
run_type.execute(tldr).arg("--update").status_checked()
|
||||
ctx.run_type().execute(tldr).arg("--update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_pearl(run_type: RunType) -> Result<()> {
|
||||
pub fn run_pearl(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pearl = require("pearl")?;
|
||||
print_separator("pearl");
|
||||
|
||||
run_type.execute(pearl).arg("update").status_checked()
|
||||
ctx.run_type().execute(pearl).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_sdkman(base_dirs: &BaseDirs, cleanup: bool, run_type: RunType) -> Result<()> {
|
||||
pub fn run_pyenv(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pyenv = require("pyenv")?;
|
||||
print_separator("pyenv");
|
||||
|
||||
let pyenv_dir = var("PYENV_ROOT")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".pyenv"));
|
||||
|
||||
if !pyenv_dir.exists() {
|
||||
return Err(SkipStep("Pyenv is installed, but $PYENV_ROOT is not set correctly".to_string()).into());
|
||||
}
|
||||
|
||||
if !pyenv_dir.join(".git").exists() {
|
||||
return Err(SkipStep("pyenv is not a git repository".to_string()).into());
|
||||
}
|
||||
|
||||
ctx.run_type().execute(pyenv).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_sdkman(ctx: &ExecutionContext) -> Result<()> {
|
||||
let bash = require("bash")?;
|
||||
|
||||
let sdkman_init_path = env::var("SDKMAN_DIR")
|
||||
let sdkman_init_path = var("SDKMAN_DIR")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| base_dirs.home_dir().join(".sdkman"))
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".sdkman"))
|
||||
.join("bin")
|
||||
.join("sdkman-init.sh")
|
||||
.require()
|
||||
@@ -448,9 +625,9 @@ pub fn run_sdkman(base_dirs: &BaseDirs, cleanup: bool, run_type: RunType) -> Res
|
||||
|
||||
print_separator("SDKMAN!");
|
||||
|
||||
let sdkman_config_path = env::var("SDKMAN_DIR")
|
||||
let sdkman_config_path = var("SDKMAN_DIR")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| base_dirs.home_dir().join(".sdkman"))
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".sdkman"))
|
||||
.join("etc")
|
||||
.join("config")
|
||||
.require()?;
|
||||
@@ -463,33 +640,33 @@ pub fn run_sdkman(base_dirs: &BaseDirs, cleanup: bool, run_type: RunType) -> Res
|
||||
|
||||
if selfupdate_enabled == "true" {
|
||||
let cmd_selfupdate = format!("source {} && sdk selfupdate", &sdkman_init_path);
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
.args(["-c", cmd_selfupdate.as_str()])
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
let cmd_update = format!("source {} && sdk update", &sdkman_init_path);
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
.args(["-c", cmd_update.as_str()])
|
||||
.status_checked()?;
|
||||
|
||||
let cmd_upgrade = format!("source {} && sdk upgrade", &sdkman_init_path);
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
.args(["-c", cmd_upgrade.as_str()])
|
||||
.status_checked()?;
|
||||
|
||||
if cleanup {
|
||||
if ctx.config().cleanup() {
|
||||
let cmd_flush_archives = format!("source {} && sdk flush archives", &sdkman_init_path);
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
.args(["-c", cmd_flush_archives.as_str()])
|
||||
.status_checked()?;
|
||||
|
||||
let cmd_flush_temp = format!("source {} && sdk flush temp", &sdkman_init_path);
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
.args(["-c", cmd_flush_temp.as_str()])
|
||||
.status_checked()?;
|
||||
@@ -506,6 +683,24 @@ pub fn run_bun(ctx: &ExecutionContext) -> Result<()> {
|
||||
ctx.run_type().execute(bun).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_bun_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let bun = require("bun")?;
|
||||
|
||||
print_separator("Bun Packages");
|
||||
|
||||
let mut package_json: PathBuf = var("BUN_INSTALL")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".bun"));
|
||||
package_json.push("install/global/package.json");
|
||||
|
||||
if !package_json.exists() {
|
||||
println!("No global packages installed");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
ctx.run_type().execute(bun).args(["-g", "update"]).status_checked()
|
||||
}
|
||||
|
||||
/// Update dotfiles with `rcm(7)`.
|
||||
///
|
||||
/// See: <https://github.com/thoughtbot/rcm>
|
||||
@@ -516,6 +711,13 @@ pub fn run_rcm(ctx: &ExecutionContext) -> Result<()> {
|
||||
ctx.run_type().execute(rcup).arg("-v").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_maza(ctx: &ExecutionContext) -> Result<()> {
|
||||
let maza = require("maza")?;
|
||||
|
||||
print_separator("maza");
|
||||
ctx.run_type().execute(maza).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn reboot() -> Result<()> {
|
||||
print!("Rebooting...");
|
||||
Command::new("sudo").arg("reboot").status_checked()
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
use std::convert::TryFrom;
|
||||
use std::path::Path;
|
||||
use std::{ffi::OsStr, process::Command};
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::RunType;
|
||||
use crate::terminal::{print_separator, print_warning};
|
||||
use crate::utils::require;
|
||||
use crate::{error::SkipStep, steps::git::Repositories};
|
||||
use crate::utils::{require, which};
|
||||
use crate::{error::SkipStep, steps::git::RepoStep};
|
||||
use crate::{powershell, Step};
|
||||
|
||||
pub fn run_chocolatey(ctx: &ExecutionContext) -> Result<()> {
|
||||
@@ -42,33 +41,32 @@ pub fn run_winget(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("winget");
|
||||
|
||||
if !ctx.config().enable_winget() {
|
||||
print_warning("Winget is disabled by default. Enable it by setting enable_winget=true in the [windows] section in the configuration.");
|
||||
return Err(SkipStep(String::from("Winget is disabled by default")).into());
|
||||
}
|
||||
|
||||
ctx.run_type()
|
||||
.execute(winget)
|
||||
.args(["upgrade", "--all"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_scoop(cleanup: bool, run_type: RunType) -> Result<()> {
|
||||
pub fn run_scoop(ctx: &ExecutionContext) -> Result<()> {
|
||||
let scoop = require("scoop")?;
|
||||
|
||||
print_separator("Scoop");
|
||||
|
||||
run_type.execute(&scoop).args(["update"]).status_checked()?;
|
||||
run_type.execute(&scoop).args(["update", "*"]).status_checked()?;
|
||||
ctx.run_type().execute(&scoop).args(["update"]).status_checked()?;
|
||||
ctx.run_type().execute(&scoop).args(["update", "*"]).status_checked()?;
|
||||
|
||||
if cleanup {
|
||||
run_type.execute(&scoop).args(["cleanup", "*"]).status_checked()?;
|
||||
if ctx.config().cleanup() {
|
||||
ctx.run_type().execute(&scoop).args(["cleanup", "*"]).status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_wsl(ctx: &ExecutionContext) -> Result<()> {
|
||||
if !is_wsl_installed()? {
|
||||
return Err(SkipStep("WSL not installed".to_string()).into());
|
||||
}
|
||||
|
||||
let wsl = require("wsl")?;
|
||||
|
||||
print_separator("Update WSL");
|
||||
@@ -87,6 +85,30 @@ pub fn update_wsl(ctx: &ExecutionContext) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Detect if WSL is installed or not.
|
||||
///
|
||||
/// For WSL, we cannot simply check if command `wsl` is installed as on newer
|
||||
/// versions of Windows (since windows 10 version 2004), this commmand is
|
||||
/// installed by default.
|
||||
///
|
||||
/// If the command is installed and the user hasn't installed any Linux distros
|
||||
/// on it, command `wsl -l` would print a help message and exit with failure, we
|
||||
/// use this to check whether WSL is install or not.
|
||||
fn is_wsl_installed() -> Result<bool> {
|
||||
if let Some(wsl) = which("wsl") {
|
||||
// Don't use `output_checked` as an execution failure log is not wanted
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let output = Command::new(wsl).arg("-l").output()?;
|
||||
let status = output.status;
|
||||
|
||||
if status.success() {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn get_wsl_distributions(wsl: &Path) -> Result<Vec<String>> {
|
||||
let output = Command::new(wsl).args(["--list", "-q"]).output_checked_utf8()?.stdout;
|
||||
Ok(output
|
||||
@@ -100,12 +122,45 @@ fn upgrade_wsl_distribution(wsl: &Path, dist: &str, ctx: &ExecutionContext) -> R
|
||||
let topgrade = Command::new(wsl)
|
||||
.args(["-d", dist, "bash", "-lc", "which topgrade"])
|
||||
.output_checked_utf8()
|
||||
.map_err(|_| SkipStep(String::from("Could not find Topgrade installed in WSL")))?;
|
||||
.map_err(|_| SkipStep(String::from("Could not find Topgrade installed in WSL")))?
|
||||
.stdout // The normal output from `which topgrade` appends a newline, so we trim it here.
|
||||
.trim_end()
|
||||
.to_owned();
|
||||
|
||||
let mut command = ctx.run_type().execute(wsl);
|
||||
|
||||
// The `arg` method automatically quotes its arguments.
|
||||
// This means we can't append additional arguments to `topgrade` in WSL
|
||||
// by calling `arg` successively.
|
||||
//
|
||||
// For example:
|
||||
//
|
||||
// ```rust
|
||||
// command
|
||||
// .args(["-d", dist, "bash", "-c"])
|
||||
// .arg(format!("TOPGRADE_PREFIX={dist} exec {topgrade}"));
|
||||
// ```
|
||||
//
|
||||
// creates a command string like:
|
||||
// > `C:\WINDOWS\system32\wsl.EXE -d Ubuntu bash -c 'TOPGRADE_PREFIX=Ubuntu exec /bin/topgrade'`
|
||||
//
|
||||
// Adding the following:
|
||||
//
|
||||
// ```rust
|
||||
// command.arg("-v");
|
||||
// ```
|
||||
//
|
||||
// appends the next argument like so:
|
||||
// > `C:\WINDOWS\system32\wsl.EXE -d Ubuntu bash -c 'TOPGRADE_PREFIX=Ubuntu exec /bin/topgrade' -v`
|
||||
// which means `-v` isn't passed to `topgrade`.
|
||||
let mut args = String::new();
|
||||
if ctx.config().verbose() {
|
||||
args.push_str("-v");
|
||||
}
|
||||
|
||||
command
|
||||
.args(["-d", dist, "bash", "-c"])
|
||||
.arg(format!("TOPGRADE_PREFIX={dist} exec {topgrade}"));
|
||||
.arg(format!("TOPGRADE_PREFIX={dist} exec {topgrade} {args}"));
|
||||
|
||||
if ctx.config().yes(Step::Wsl) {
|
||||
command.arg("-y");
|
||||
@@ -115,6 +170,10 @@ fn upgrade_wsl_distribution(wsl: &Path, dist: &str, ctx: &ExecutionContext) -> R
|
||||
}
|
||||
|
||||
pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
if !is_wsl_installed()? {
|
||||
return Err(SkipStep("WSL not installed".to_string()).into());
|
||||
}
|
||||
|
||||
let wsl = require("wsl")?;
|
||||
let wsl_distributions = get_wsl_distributions(&wsl)?;
|
||||
let mut ran = false;
|
||||
@@ -142,20 +201,17 @@ pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn windows_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let powershell = powershell::Powershell::windows_powershell();
|
||||
|
||||
if powershell.supports_windows_update() {
|
||||
print_separator("Windows Update");
|
||||
return powershell.windows_update(ctx);
|
||||
}
|
||||
|
||||
let usoclient = require("UsoClient")?;
|
||||
|
||||
print_separator("Windows Update");
|
||||
println!("Running Windows Update. Check the control panel for progress.");
|
||||
ctx.run_type()
|
||||
.execute(&usoclient)
|
||||
.arg("ScanInstallWait")
|
||||
.status_checked()?;
|
||||
ctx.run_type().execute(&usoclient).arg("StartInstall").status_checked()
|
||||
|
||||
if powershell.supports_windows_update() {
|
||||
powershell.windows_update(ctx)
|
||||
} else {
|
||||
print_warning(
|
||||
"Consider installing PSWindowsUpdate as the use of Windows Update via USOClient is not supported.",
|
||||
);
|
||||
|
||||
Err(SkipStep("USOClient not supported.".to_string()).into())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reboot() -> Result<()> {
|
||||
@@ -164,9 +220,8 @@ pub fn reboot() -> Result<()> {
|
||||
Command::new("shutdown").args(["/R", "/T", "0"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn insert_startup_scripts(ctx: &ExecutionContext, git_repos: &mut Repositories) -> Result<()> {
|
||||
let startup_dir = ctx
|
||||
.base_dirs()
|
||||
pub fn insert_startup_scripts(git_repos: &mut RepoStep) -> Result<()> {
|
||||
let startup_dir = crate::WINDOWS_DIRS
|
||||
.data_dir()
|
||||
.join("Microsoft\\Windows\\Start Menu\\Programs\\Startup");
|
||||
for entry in std::fs::read_dir(&startup_dir)?.flatten() {
|
||||
|
||||
@@ -111,7 +111,7 @@ impl Powershell {
|
||||
"-NoProfile",
|
||||
"-Command",
|
||||
&format!(
|
||||
"Import-Module PSWindowsUpdate; Install-WindowsUpdate -MicrosoftUpdate {} -Verbose",
|
||||
"Start-Process powershell -Verb runAs -ArgumentList 'Import-Module PSWindowsUpdate; Install-WindowsUpdate -MicrosoftUpdate {} -Verbose'",
|
||||
if ctx.config().accept_all_windows_updates() {
|
||||
"-AcceptAll"
|
||||
} else {
|
||||
|
||||
@@ -5,11 +5,10 @@ use std::process::Command;
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use directories::BaseDirs;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::executor::RunType;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::HOME_DIR;
|
||||
use crate::{
|
||||
execution_context::ExecutionContext,
|
||||
utils::{which, PathExt},
|
||||
@@ -18,15 +17,19 @@ use crate::{
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::process::CommandExt as _;
|
||||
|
||||
pub fn run_tpm(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
let tpm = base_dirs
|
||||
.home_dir()
|
||||
.join(".tmux/plugins/tpm/bin/update_plugins")
|
||||
.require()?;
|
||||
pub fn run_tpm(ctx: &ExecutionContext) -> Result<()> {
|
||||
let tpm = match env::var("TMUX_PLUGIN_MANAGER_PATH") {
|
||||
// If `TMUX_PLUGIN_MANAGER_PATH` is set, search for
|
||||
// `$TMUX_PLUGIN_MANAGER_PATH/bin/install_plugins/tpm/bin/update_plugins`
|
||||
Ok(var) => PathBuf::from(var).join("bin/install_plugins/tpm/bin/update_plugins"),
|
||||
// Otherwise, use the default location `~/.tmux/plugins/tpm/bin/update_plugins`
|
||||
Err(_) => HOME_DIR.join(".tmux/plugins/tpm/bin/update_plugins"),
|
||||
}
|
||||
.require()?;
|
||||
|
||||
print_separator("tmux plugins");
|
||||
|
||||
run_type.execute(tpm).arg("all").status_checked()
|
||||
ctx.run_type().execute(tpm).arg("all").status_checked()
|
||||
}
|
||||
|
||||
struct Tmux {
|
||||
|
||||
@@ -52,6 +52,8 @@ pub fn run_toolbx(ctx: &ExecutionContext) -> Result<()> {
|
||||
topgrade_path,
|
||||
"--only",
|
||||
"system",
|
||||
"--no-self-update",
|
||||
"--skip-notify",
|
||||
];
|
||||
if ctx.config().yes(Step::Toolbx) {
|
||||
args.push("--yes");
|
||||
|
||||
@@ -1,3 +1,19 @@
|
||||
" AstroUpdate calls a plugin manager - Lazy as of this writing. So we check for it before
|
||||
" others. Add to init.lua:
|
||||
" updater = {
|
||||
" skip_prompts = true,
|
||||
" },
|
||||
if exists(":AstroUpdate")
|
||||
echo "AstroUpdate"
|
||||
AstroUpdate
|
||||
quitall
|
||||
endif
|
||||
|
||||
if exists(":MasonUpdate")
|
||||
echo "MasonUpdate"
|
||||
MasonUpdate
|
||||
endif
|
||||
|
||||
if exists(":NeoBundleUpdate")
|
||||
echo "NeoBundle"
|
||||
NeoBundleUpdate
|
||||
@@ -35,8 +51,7 @@ endif
|
||||
|
||||
if exists(":Lazy")
|
||||
echo "Lazy Update"
|
||||
autocmd User LazySync * quitall
|
||||
Lazy sync
|
||||
Lazy! sync | qa
|
||||
endif
|
||||
|
||||
if exists(':PackerSync')
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::error::{SkipStep, TopgradeError};
|
||||
use crate::HOME_DIR;
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
|
||||
use crate::executor::{Executor, ExecutorOutput, RunType};
|
||||
use crate::executor::{Executor, ExecutorOutput};
|
||||
use crate::terminal::print_separator;
|
||||
use crate::{
|
||||
execution_context::ExecutionContext,
|
||||
utils::{require, PathExt},
|
||||
};
|
||||
use directories::BaseDirs;
|
||||
use std::path::PathBuf;
|
||||
use std::{
|
||||
io::{self, Write},
|
||||
@@ -18,22 +19,19 @@ use tracing::debug;
|
||||
|
||||
const UPGRADE_VIM: &str = include_str!("upgrade.vim");
|
||||
|
||||
pub fn vimrc(base_dirs: &BaseDirs) -> Result<PathBuf> {
|
||||
base_dirs
|
||||
.home_dir()
|
||||
pub fn vimrc() -> Result<PathBuf> {
|
||||
HOME_DIR
|
||||
.join(".vimrc")
|
||||
.require()
|
||||
.or_else(|_| base_dirs.home_dir().join(".vim/vimrc").require())
|
||||
.or_else(|_| HOME_DIR.join(".vim/vimrc").require())
|
||||
}
|
||||
|
||||
fn nvimrc(base_dirs: &BaseDirs) -> Result<PathBuf> {
|
||||
fn nvimrc() -> Result<PathBuf> {
|
||||
#[cfg(unix)]
|
||||
let base_dir =
|
||||
// Bypass directories crate as nvim doesn't use the macOS-specific directories.
|
||||
std::env::var_os("XDG_CONFIG_HOME").map_or_else(|| base_dirs.home_dir().join(".config"), PathBuf::from);
|
||||
let base_dir = crate::XDG_DIRS.config_dir();
|
||||
|
||||
#[cfg(windows)]
|
||||
let base_dir = base_dirs.cache_dir();
|
||||
let base_dir = crate::WINDOWS_DIRS.cache_dir();
|
||||
|
||||
base_dir
|
||||
.join("nvim/init.vim")
|
||||
@@ -59,8 +57,8 @@ fn upgrade(command: &mut Executor, ctx: &ExecutionContext) -> Result<()> {
|
||||
let status = output.status;
|
||||
|
||||
if !status.success() || ctx.config().verbose() {
|
||||
io::stdout().write(&output.stdout).ok();
|
||||
io::stderr().write(&output.stderr).ok();
|
||||
io::stdout().write_all(&output.stdout).ok();
|
||||
io::stderr().write_all(&output.stderr).ok();
|
||||
}
|
||||
|
||||
if !status.success() {
|
||||
@@ -74,7 +72,7 @@ fn upgrade(command: &mut Executor, ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn upgrade_ultimate_vimrc(ctx: &ExecutionContext) -> Result<()> {
|
||||
let config_dir = ctx.base_dirs().home_dir().join(".vim_runtime").require()?;
|
||||
let config_dir = HOME_DIR.join(".vim_runtime").require()?;
|
||||
let git = require("git")?;
|
||||
let python = require("python3")?;
|
||||
let update_plugins = config_dir.join("update_plugins.py").require()?;
|
||||
@@ -105,7 +103,7 @@ pub fn upgrade_ultimate_vimrc(ctx: &ExecutionContext) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn upgrade_vim(base_dirs: &BaseDirs, ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn upgrade_vim(ctx: &ExecutionContext) -> Result<()> {
|
||||
let vim = require("vim")?;
|
||||
|
||||
let output = Command::new(&vim).arg("--version").output_checked_utf8()?;
|
||||
@@ -113,7 +111,7 @@ pub fn upgrade_vim(base_dirs: &BaseDirs, ctx: &ExecutionContext) -> Result<()> {
|
||||
return Err(SkipStep(String::from("vim binary might be actually nvim")).into());
|
||||
}
|
||||
|
||||
let vimrc = vimrc(base_dirs)?;
|
||||
let vimrc = vimrc()?;
|
||||
|
||||
print_separator("Vim");
|
||||
upgrade(
|
||||
@@ -127,9 +125,9 @@ pub fn upgrade_vim(base_dirs: &BaseDirs, ctx: &ExecutionContext) -> Result<()> {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn upgrade_neovim(base_dirs: &BaseDirs, ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn upgrade_neovim(ctx: &ExecutionContext) -> Result<()> {
|
||||
let nvim = require("nvim")?;
|
||||
let nvimrc = nvimrc(base_dirs)?;
|
||||
let nvimrc = nvimrc()?;
|
||||
|
||||
print_separator("Neovim");
|
||||
upgrade(
|
||||
@@ -143,10 +141,10 @@ pub fn upgrade_neovim(base_dirs: &BaseDirs, ctx: &ExecutionContext) -> Result<()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn run_voom(_base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
pub fn run_voom(ctx: &ExecutionContext) -> Result<()> {
|
||||
let voom = require("voom")?;
|
||||
|
||||
print_separator("voom");
|
||||
|
||||
run_type.execute(voom).arg("update").status_checked()
|
||||
ctx.run_type().execute(voom).arg("update").status_checked()
|
||||
}
|
||||
|
||||
148
src/steps/zsh.rs
148
src/steps/zsh.rs
@@ -1,118 +1,149 @@
|
||||
use std::env;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use directories::BaseDirs;
|
||||
use tracing::debug;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::RunType;
|
||||
use crate::git::Repositories;
|
||||
use crate::git::RepoStep;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{require, PathExt};
|
||||
use crate::HOME_DIR;
|
||||
use crate::XDG_DIRS;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
|
||||
pub fn run_zr(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
pub fn run_zr(ctx: &ExecutionContext) -> Result<()> {
|
||||
let zsh = require("zsh")?;
|
||||
|
||||
require("zr")?;
|
||||
|
||||
print_separator("zr");
|
||||
|
||||
let cmd = format!("source {} && zr --update", zshrc(base_dirs).display());
|
||||
run_type.execute(zsh).args(["-l", "-c", cmd.as_str()]).status_checked()
|
||||
let cmd = format!("source {} && zr --update", zshrc().display());
|
||||
ctx.run_type()
|
||||
.execute(zsh)
|
||||
.args(["-l", "-c", cmd.as_str()])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn zshrc(base_dirs: &BaseDirs) -> PathBuf {
|
||||
fn zdotdir() -> PathBuf {
|
||||
env::var("ZDOTDIR")
|
||||
.map(|p| Path::new(&p).join(".zshrc"))
|
||||
.unwrap_or_else(|_| base_dirs.home_dir().join(".zshrc"))
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| HOME_DIR.clone())
|
||||
}
|
||||
|
||||
pub fn run_antibody(run_type: RunType) -> Result<()> {
|
||||
pub fn zshrc() -> PathBuf {
|
||||
zdotdir().join(".zshrc")
|
||||
}
|
||||
|
||||
pub fn run_antidote(ctx: &ExecutionContext) -> Result<()> {
|
||||
let zsh = require("zsh")?;
|
||||
let mut antidote = zdotdir().join(".antidote").require()?;
|
||||
antidote.push("antidote.zsh");
|
||||
|
||||
print_separator("antidote");
|
||||
|
||||
ctx.run_type()
|
||||
.execute(zsh)
|
||||
.arg("-c")
|
||||
.arg(format!("source {} && antidote update", antidote.display()))
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_antibody(ctx: &ExecutionContext) -> Result<()> {
|
||||
require("zsh")?;
|
||||
let antibody = require("antibody")?;
|
||||
|
||||
print_separator("antibody");
|
||||
|
||||
run_type.execute(antibody).arg("update").status_checked()
|
||||
ctx.run_type().execute(antibody).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_antigen(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
pub fn run_antigen(ctx: &ExecutionContext) -> Result<()> {
|
||||
let zsh = require("zsh")?;
|
||||
let zshrc = zshrc(base_dirs).require()?;
|
||||
let zshrc = zshrc().require()?;
|
||||
env::var("ADOTDIR")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| base_dirs.home_dir().join("antigen.zsh"))
|
||||
.unwrap_or_else(|_| HOME_DIR.join("antigen.zsh"))
|
||||
.require()?;
|
||||
|
||||
print_separator("antigen");
|
||||
|
||||
let cmd = format!("source {} && (antigen selfupdate ; antigen update)", zshrc.display());
|
||||
run_type.execute(zsh).args(["-l", "-c", cmd.as_str()]).status_checked()
|
||||
ctx.run_type()
|
||||
.execute(zsh)
|
||||
.args(["-l", "-c", cmd.as_str()])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_zgenom(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
pub fn run_zgenom(ctx: &ExecutionContext) -> Result<()> {
|
||||
let zsh = require("zsh")?;
|
||||
let zshrc = zshrc(base_dirs).require()?;
|
||||
let zshrc = zshrc().require()?;
|
||||
env::var("ZGEN_SOURCE")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| base_dirs.home_dir().join(".zgenom"))
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".zgenom"))
|
||||
.require()?;
|
||||
|
||||
print_separator("zgenom");
|
||||
|
||||
let cmd = format!("source {} && zgenom selfupdate && zgenom update", zshrc.display());
|
||||
run_type.execute(zsh).args(["-l", "-c", cmd.as_str()]).status_checked()
|
||||
ctx.run_type()
|
||||
.execute(zsh)
|
||||
.args(["-l", "-c", cmd.as_str()])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_zplug(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
pub fn run_zplug(ctx: &ExecutionContext) -> Result<()> {
|
||||
let zsh = require("zsh")?;
|
||||
zshrc(base_dirs).require()?;
|
||||
zshrc().require()?;
|
||||
|
||||
env::var("ZPLUG_HOME")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| base_dirs.home_dir().join(".zplug"))
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".zplug"))
|
||||
.require()?;
|
||||
|
||||
print_separator("zplug");
|
||||
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(zsh)
|
||||
.args(["-i", "-c", "zplug update"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_zinit(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
pub fn run_zinit(ctx: &ExecutionContext) -> Result<()> {
|
||||
let zsh = require("zsh")?;
|
||||
let zshrc = zshrc(base_dirs).require()?;
|
||||
let zshrc = zshrc().require()?;
|
||||
|
||||
env::var("ZINIT_HOME")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| base_dirs.home_dir().join(".zinit"))
|
||||
.unwrap_or_else(|_| XDG_DIRS.data_dir().join("zinit"))
|
||||
.require()?;
|
||||
|
||||
print_separator("zinit");
|
||||
|
||||
let cmd = format!("source {} && zinit self-update && zinit update --all", zshrc.display(),);
|
||||
run_type.execute(zsh).args(["-i", "-c", cmd.as_str()]).status_checked()
|
||||
let cmd = format!("source {} && zinit self-update && zinit update --all", zshrc.display());
|
||||
ctx.run_type()
|
||||
.execute(zsh)
|
||||
.args(["-i", "-c", cmd.as_str()])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_zi(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
pub fn run_zi(ctx: &ExecutionContext) -> Result<()> {
|
||||
let zsh = require("zsh")?;
|
||||
let zshrc = zshrc(base_dirs).require()?;
|
||||
let zshrc = zshrc().require()?;
|
||||
|
||||
base_dirs.home_dir().join(".zi").require()?;
|
||||
HOME_DIR.join(".zi").require()?;
|
||||
|
||||
print_separator("zi");
|
||||
|
||||
let cmd = format!("source {} && zi self-update && zi update --all", zshrc.display(),);
|
||||
run_type.execute(zsh).args(["-i", "-c", &cmd]).status_checked()
|
||||
let cmd = format!("source {} && zi self-update && zi update --all", zshrc.display());
|
||||
ctx.run_type().execute(zsh).args(["-i", "-c", &cmd]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_zim(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
pub fn run_zim(ctx: &ExecutionContext) -> Result<()> {
|
||||
let zsh = require("zsh")?;
|
||||
env::var("ZIM_HOME")
|
||||
.or_else(|_| {
|
||||
@@ -123,12 +154,12 @@ pub fn run_zim(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
.map(|o| o.stdout)
|
||||
})
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| base_dirs.home_dir().join(".zim"))
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".zim"))
|
||||
.require()?;
|
||||
|
||||
print_separator("zim");
|
||||
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(zsh)
|
||||
.args(["-i", "-c", "zimfw upgrade && zimfw update"])
|
||||
.status_checked()
|
||||
@@ -136,7 +167,34 @@ pub fn run_zim(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
|
||||
pub fn run_oh_my_zsh(ctx: &ExecutionContext) -> Result<()> {
|
||||
require("zsh")?;
|
||||
let oh_my_zsh = ctx.base_dirs().home_dir().join(".oh-my-zsh").require()?;
|
||||
|
||||
// When updating `oh-my-zsh` on a remote machine through topgrade, the
|
||||
// following processes will be created:
|
||||
//
|
||||
// SSH -> ZSH -> ZSH ($SHELL) -> topgrade -> ZSH
|
||||
//
|
||||
// The first ZSH process, won't source zshrc (as it is a login shell),
|
||||
// and thus it won't have the ZSH environment variable, as a result, the
|
||||
// children processes won't get it either, so we source the zshrc and set
|
||||
// the ZSH variable for topgrade here.
|
||||
if ctx.under_ssh() {
|
||||
let res_env_zsh = Command::new("zsh")
|
||||
.args(["-ic", "print -rn -- ${ZSH:?}"])
|
||||
.output_checked_utf8();
|
||||
|
||||
// this command will fail if `ZSH` is not set
|
||||
if let Ok(output) = res_env_zsh {
|
||||
let env_zsh = output.stdout;
|
||||
debug!("Oh-my-zsh: under SSH, setting ZSH={}", env_zsh);
|
||||
env::set_var("ZSH", env_zsh);
|
||||
}
|
||||
}
|
||||
|
||||
let oh_my_zsh = env::var("ZSH")
|
||||
.map(PathBuf::from)
|
||||
// default to `~/.oh-my-zsh`
|
||||
.unwrap_or(HOME_DIR.join(".oh-my-zsh"))
|
||||
.require()?;
|
||||
|
||||
print_separator("oh-my-zsh");
|
||||
|
||||
@@ -161,22 +219,20 @@ pub fn run_oh_my_zsh(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
debug!("oh-my-zsh custom dir: {}", custom_dir.display());
|
||||
|
||||
let mut custom_repos = Repositories::new(ctx.git());
|
||||
let mut custom_repos = RepoStep::try_new()?;
|
||||
|
||||
for entry in WalkDir::new(custom_dir).max_depth(2) {
|
||||
let entry = entry?;
|
||||
custom_repos.insert_if_repo(entry.path());
|
||||
}
|
||||
|
||||
custom_repos.remove(&oh_my_zsh.to_string_lossy());
|
||||
if !custom_repos.is_empty() {
|
||||
println!("Pulling custom plugins and themes");
|
||||
ctx.git().multi_pull(&custom_repos, ctx)?;
|
||||
}
|
||||
|
||||
custom_repos.remove(&oh_my_zsh);
|
||||
ctx.run_type()
|
||||
.execute("zsh")
|
||||
.env("ZSH", &oh_my_zsh)
|
||||
.arg(&oh_my_zsh.join("tools/upgrade.sh"))
|
||||
// oh-my-zsh returns 80 when it is already updated and no changes pulled
|
||||
// in this update.
|
||||
// See this comment: https://github.com/r-darwish/topgrade/issues/569#issuecomment-736756731
|
||||
// for more information.
|
||||
.status_checked_with_codes(&[80])
|
||||
}
|
||||
|
||||
21
src/sudo.rs
21
src/sudo.rs
@@ -4,6 +4,8 @@ use std::path::PathBuf;
|
||||
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use serde::Deserialize;
|
||||
use strum::AsRefStr;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
@@ -27,9 +29,15 @@ impl Sudo {
|
||||
.or_else(|| which("sudo").map(|p| (p, SudoKind::Sudo)))
|
||||
.or_else(|| which("gsudo").map(|p| (p, SudoKind::Gsudo)))
|
||||
.or_else(|| which("pkexec").map(|p| (p, SudoKind::Pkexec)))
|
||||
.or_else(|| which("please").map(|p| (p, SudoKind::Please)))
|
||||
.map(|(path, kind)| Self { path, kind })
|
||||
}
|
||||
|
||||
/// Create Sudo from SudoKind, if found in the system
|
||||
pub fn new(kind: SudoKind) -> Option<Self> {
|
||||
which(kind.as_ref()).map(|path| Self { path, kind })
|
||||
}
|
||||
|
||||
/// Elevate permissions with `sudo`.
|
||||
///
|
||||
/// This helps prevent blocking `sudo` prompts from stopping the run in the middle of a
|
||||
@@ -71,6 +79,12 @@ impl Sudo {
|
||||
// See: https://linux.die.net/man/1/pkexec
|
||||
cmd.arg("echo");
|
||||
}
|
||||
SudoKind::Please => {
|
||||
// From `man please`
|
||||
// -w, --warm
|
||||
// Warm the access token and exit.
|
||||
cmd.arg("-w");
|
||||
}
|
||||
}
|
||||
cmd.status_checked().wrap_err("Failed to elevate permissions")
|
||||
}
|
||||
@@ -93,12 +107,15 @@ impl Sudo {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
enum SudoKind {
|
||||
#[derive(Clone, Copy, Debug, Deserialize, AsRefStr)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[strum(serialize_all = "lowercase")]
|
||||
pub enum SudoKind {
|
||||
Doas,
|
||||
Sudo,
|
||||
Gsudo,
|
||||
Pkexec,
|
||||
Please,
|
||||
}
|
||||
|
||||
impl AsRef<OsStr> for Sudo {
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use std::cmp::{max, min};
|
||||
use std::env;
|
||||
use std::io::{self, Write};
|
||||
#[cfg(target_os = "linux")]
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::sync::Mutex;
|
||||
use std::time::Duration;
|
||||
@@ -12,7 +10,6 @@ use color_eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use console::{style, Key, Term};
|
||||
use lazy_static::lazy_static;
|
||||
#[cfg(target_os = "macos")]
|
||||
use notify_rust::{Notification, Timeout};
|
||||
use tracing::{debug, error};
|
||||
#[cfg(windows)]
|
||||
@@ -20,10 +17,7 @@ use which_crate::which;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::report::StepResult;
|
||||
#[cfg(target_os = "linux")]
|
||||
use crate::terminal;
|
||||
#[cfg(target_os = "linux")]
|
||||
use crate::utils::which;
|
||||
|
||||
lazy_static! {
|
||||
static ref TERMINAL: Mutex<Terminal> = Mutex::new(Terminal::new());
|
||||
}
|
||||
@@ -49,8 +43,6 @@ struct Terminal {
|
||||
set_title: bool,
|
||||
display_time: bool,
|
||||
desktop_notification: bool,
|
||||
#[cfg(target_os = "linux")]
|
||||
notify_send: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl Terminal {
|
||||
@@ -65,8 +57,6 @@ impl Terminal {
|
||||
set_title: true,
|
||||
display_time: true,
|
||||
desktop_notification: false,
|
||||
#[cfg(target_os = "linux")]
|
||||
notify_send: which("notify-send"),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,35 +72,18 @@ impl Terminal {
|
||||
self.display_time = display_time
|
||||
}
|
||||
|
||||
#[allow(unused_variables)]
|
||||
fn notify_desktop<P: AsRef<str>>(&self, message: P, timeout: Option<Duration>) {
|
||||
debug!("Desktop notification: {}", message.as_ref());
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(target_os = "macos")] {
|
||||
let mut notification = Notification::new();
|
||||
notification.summary("Topgrade")
|
||||
.body(message.as_ref())
|
||||
.appname("topgrade");
|
||||
let mut notification = Notification::new();
|
||||
notification
|
||||
.summary("Topgrade")
|
||||
.body(message.as_ref())
|
||||
.appname("topgrade");
|
||||
|
||||
if let Some(timeout) = timeout {
|
||||
notification.timeout(Timeout::Milliseconds(timeout.as_millis() as u32));
|
||||
}
|
||||
notification.show().ok();
|
||||
} else if #[cfg(target_os = "linux")] {
|
||||
if let Some(ns) = self.notify_send.as_ref() {
|
||||
let mut command = Command::new(ns);
|
||||
if let Some(timeout) = timeout {
|
||||
command.arg("-t");
|
||||
command.arg(format!("{}", timeout.as_millis()));
|
||||
}
|
||||
command.args(["-a", "Topgrade", "Topgrade"]);
|
||||
command.arg(message.as_ref());
|
||||
if let Err(err) = command.output_checked() {
|
||||
terminal::print_warning("Sending notification failed with {err:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(timeout) = timeout {
|
||||
notification.timeout(Timeout::Milliseconds(timeout.as_millis() as u32));
|
||||
}
|
||||
notification.show().ok();
|
||||
}
|
||||
|
||||
fn print_separator<P: AsRef<str>>(&mut self, message: P) {
|
||||
|
||||
202
src/utils.rs
202
src/utils.rs
@@ -1,11 +1,21 @@
|
||||
use crate::error::SkipStep;
|
||||
use color_eyre::eyre::Result;
|
||||
|
||||
use std::env;
|
||||
use std::ffi::OsStr;
|
||||
use std::fmt::Debug;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
|
||||
use tracing::{debug, error};
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::reload::{Handle, Layer};
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
use tracing_subscriber::{fmt, Registry};
|
||||
use tracing_subscriber::{registry, EnvFilter};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::config::DEFAULT_LOG_LEVEL;
|
||||
use crate::error::SkipStep;
|
||||
|
||||
pub trait PathExt
|
||||
where
|
||||
@@ -101,54 +111,162 @@ pub fn require_option<T>(option: Option<T>, cause: String) -> Result<T> {
|
||||
}
|
||||
}
|
||||
|
||||
/* sys-info-rs
|
||||
*
|
||||
* Copyright (c) 2015 Siyu Wang
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
pub fn string_prepend_str(string: &mut String, s: &str) {
|
||||
let mut new_string = String::with_capacity(string.len() + s.len());
|
||||
new_string.push_str(s);
|
||||
new_string.push_str(string);
|
||||
*string = new_string;
|
||||
}
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
pub fn hostname() -> Result<String> {
|
||||
use std::ffi;
|
||||
extern crate libc;
|
||||
|
||||
unsafe {
|
||||
let buf_size = libc::sysconf(libc::_SC_HOST_NAME_MAX) as usize;
|
||||
let mut buf = Vec::<u8>::with_capacity(buf_size + 1);
|
||||
|
||||
if libc::gethostname(buf.as_mut_ptr() as *mut libc::c_char, buf_size) < 0 {
|
||||
return Err(SkipStep(format!("Failed to get hostname: {}", std::io::Error::last_os_error())).into());
|
||||
}
|
||||
let hostname_len = libc::strnlen(buf.as_ptr() as *const libc::c_char, buf_size);
|
||||
buf.set_len(hostname_len);
|
||||
|
||||
Ok(ffi::CString::new(buf).unwrap().into_string().unwrap())
|
||||
match nix::unistd::gethostname() {
|
||||
Ok(os_str) => Ok(os_str
|
||||
.into_string()
|
||||
.map_err(|_| SkipStep("Failed to get a UTF-8 encoded hostname".into()))?),
|
||||
Err(e) => Err(e.into()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_family = "windows")]
|
||||
pub fn hostname() -> Result<String> {
|
||||
use crate::command::CommandExt;
|
||||
use std::process::Command;
|
||||
|
||||
Command::new("hostname")
|
||||
.output_checked_utf8()
|
||||
.map_err(|err| SkipStep(format!("Failed to get hostname: {err}")).into())
|
||||
.map(|output| output.stdout.trim().to_owned())
|
||||
}
|
||||
|
||||
pub mod merge_strategies {
|
||||
use merge::Merge;
|
||||
|
||||
use crate::config::Commands;
|
||||
|
||||
/// Prepends right to left (both Option<Vec<T>>)
|
||||
pub fn vec_prepend_opt<T>(left: &mut Option<Vec<T>>, right: Option<Vec<T>>) {
|
||||
if let Some(left_vec) = left {
|
||||
if let Some(mut right_vec) = right {
|
||||
right_vec.append(left_vec);
|
||||
let _ = std::mem::replace(left, Some(right_vec));
|
||||
}
|
||||
} else {
|
||||
*left = right;
|
||||
}
|
||||
}
|
||||
|
||||
/// Appends an Option<String> to another Option<String>
|
||||
pub fn string_append_opt(left: &mut Option<String>, right: Option<String>) {
|
||||
if let Some(left_str) = left {
|
||||
if let Some(right_str) = right {
|
||||
left_str.push(' ');
|
||||
left_str.push_str(&right_str);
|
||||
}
|
||||
} else {
|
||||
*left = right;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn inner_merge_opt<T>(left: &mut Option<T>, right: Option<T>)
|
||||
where
|
||||
T: Merge,
|
||||
{
|
||||
if let Some(ref mut left_inner) = left {
|
||||
if let Some(right_inner) = right {
|
||||
left_inner.merge(right_inner);
|
||||
}
|
||||
} else {
|
||||
*left = right;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn commands_merge_opt(left: &mut Option<Commands>, right: Option<Commands>) {
|
||||
if let Some(ref mut left_inner) = left {
|
||||
if let Some(right_inner) = right {
|
||||
left_inner.extend(right_inner);
|
||||
}
|
||||
} else {
|
||||
*left = right;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Skip causes
|
||||
// TODO: Put them in a better place when we have more of them
|
||||
pub const REQUIRE_SUDO: &str = "Require sudo or counterpart but not found, skip";
|
||||
|
||||
/// Return `Err(SkipStep)` if `python` is a Python 2 or shim.
|
||||
///
|
||||
/// # Shim
|
||||
/// On Windows, if you install `python` through `winget`, an actual `python`
|
||||
/// is installed as well as a `python3` shim. Shim is invokable, but when you
|
||||
/// execute it, the Microsoft App Store will be launched instead of a Python
|
||||
/// shell.
|
||||
///
|
||||
/// We do this check through `python -V`, a shim will just give `Python` with
|
||||
/// no version number.
|
||||
pub fn check_is_python_2_or_shim(python: PathBuf) -> Result<PathBuf> {
|
||||
let output = Command::new(&python).arg("-V").output_checked_utf8()?;
|
||||
// "Python x.x.x\n"
|
||||
let stdout = output.stdout;
|
||||
// ["Python"] or ["Python", "x.x.x"], the newline char is trimmed.
|
||||
let mut split = stdout.split_whitespace();
|
||||
|
||||
if let Some(version) = split.nth(1) {
|
||||
let major_version = version
|
||||
.split('.')
|
||||
.next()
|
||||
.expect("Should have a major version number")
|
||||
.parse::<u32>()
|
||||
.expect("Major version should be a valid number");
|
||||
if major_version == 2 {
|
||||
return Err(SkipStep(format!("{} is a Python 2, skip.", python.display())).into());
|
||||
}
|
||||
} else {
|
||||
// No version number, is a shim
|
||||
return Err(SkipStep(format!("{} is a Python shim, skip.", python.display())).into());
|
||||
}
|
||||
|
||||
Ok(python)
|
||||
}
|
||||
|
||||
/// Set up the tracing logger
|
||||
///
|
||||
/// # Return value
|
||||
/// A reload handle will be returned so that we can change the log level at
|
||||
/// runtime.
|
||||
pub fn install_tracing(filter_directives: &str) -> Result<Handle<EnvFilter, Registry>> {
|
||||
let env_filter = EnvFilter::try_new(filter_directives)
|
||||
.or_else(|_| EnvFilter::try_from_default_env())
|
||||
.or_else(|_| EnvFilter::try_new(DEFAULT_LOG_LEVEL))?;
|
||||
|
||||
let fmt_layer = fmt::layer().with_target(false).without_time();
|
||||
|
||||
let (filter, reload_handle) = Layer::new(env_filter);
|
||||
|
||||
registry().with(filter).with(fmt_layer).init();
|
||||
|
||||
Ok(reload_handle)
|
||||
}
|
||||
|
||||
/// Update the tracing logger with new `filter_directives`.
|
||||
pub fn update_tracing(reload_handle: &Handle<EnvFilter, Registry>, filter_directives: &str) -> Result<()> {
|
||||
let new = EnvFilter::try_new(filter_directives)
|
||||
.or_else(|_| EnvFilter::try_from_default_env())
|
||||
.or_else(|_| EnvFilter::try_new(DEFAULT_LOG_LEVEL))?;
|
||||
reload_handle.modify(|old| *old = new)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set up the error handler crate
|
||||
pub fn install_color_eyre() -> Result<()> {
|
||||
color_eyre::config::HookBuilder::new()
|
||||
// Don't display the backtrace reminder by default:
|
||||
// Backtrace omitted. Run with RUST_BACKTRACE=1 environment variable to display it.
|
||||
// Run with RUST_BACKTRACE=full to include source snippets.
|
||||
.display_env_section(false)
|
||||
// Display location information by default:
|
||||
// Location:
|
||||
// src/steps.rs:92
|
||||
.display_location_section(true)
|
||||
.install()
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user