Compare commits

..

261 Commits

Author SHA1 Message Date
SteveLauC
1ebcc9beee chore: prepare for v15.0.0 (#843) 2024-07-01 09:45:20 +08:00
SteveLauC
55e1bbf2b9 feat: new step Lensfun's database update (#839)
* feat: new step Lensfun's database update

* refactor: take 1 as a success exit code
2024-06-30 22:41:09 +08:00
SteveLauC
f2dfa1e475 fix: consider TMUX_PLUGIN_MANAGER_PATH when searching tpm binary (#835)
* fix: consider TMUX_PLUGIN_MANAGER_PATH when searching tpm binary

* fix: correct update_plugins path when env var is present
2024-06-30 19:17:30 +08:00
SteveLauC
fcd53e772a chore: collect --dry-run and --yes opts info in feature request template (#838)
chore: collect --dry-run and --yes opts info in feature request template
2024-06-30 14:17:45 +08:00
dependabot[bot]
8b9d7ef8f3 chore(deps): bump curve25519-dalek from 4.1.2 to 4.1.3 (#827)
Bumps [curve25519-dalek](https://github.com/dalek-cryptography/curve25519-dalek) from 4.1.2 to 4.1.3.
- [Release notes](https://github.com/dalek-cryptography/curve25519-dalek/releases)
- [Commits](https://github.com/dalek-cryptography/curve25519-dalek/compare/curve25519-4.1.2...curve25519-4.1.3)

---
updated-dependencies:
- dependency-name: curve25519-dalek
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-06-19 09:49:20 +08:00
SteveLauC
d8406a8cfe style: allow unused ExecutorChild (#829)
* style: allow unused ExecutorChild

* style: remove duplicate cfg on windows
2024-06-19 09:43:26 +08:00
SteveLauC
4a9ef581e5 chore: bump deps (#823) 2024-06-13 09:21:42 +08:00
Tamás Demeter-Haludka
a52db1f261 Run MasonUpdate as part of the vim updates (#821)
feat(vim): add mason update
2024-06-13 09:00:15 +08:00
Yaroslav Markin
8e16174ce7 fix(RubyGems): support no-sudo updating for rbenv and rvm (#820) 2024-06-06 19:37:06 +08:00
huajingyun
c748bb5d7a deps: bump libc from 0.2.153 to 0.2.155 (#818) 2024-05-28 09:23:10 +08:00
lachsdachs
3cc8f0d818 Add linux mint support (#817)
Update linux.rs
2024-05-26 16:26:11 +08:00
SteveLauC
f96eeeda6b chore: build binary for both macOS aarch64 and amd64 (#816) 2024-05-25 20:26:21 +08:00
SteveLauC
d1d8904376 ci: replace deprecated gh actions with alternatives (#814) 2024-05-25 19:29:17 +08:00
SteveLauC
3b329fe687 chore: update PR template (#815) 2024-05-25 17:35:46 +08:00
SteveLauC
9eb1b4ac9f ci: remove code coverage test & uniform file names (#811) 2024-05-24 09:02:05 +08:00
lachsdachs
c4c0bd7383 add upgrade stuff for bedrock linuxmint strata (#813) 2024-05-24 09:01:46 +08:00
alice
1e9de5832d feat: add support for chimera linux (#808)
since it also uses apk the update/upgrade is identical to alpine/wolfi
2024-05-19 18:48:51 +08:00
dependabot[bot]
f2b17cdd9d chore(deps): bump mio from 0.8.10 to 0.8.11 (#729)
Bumps [mio](https://github.com/tokio-rs/mio) from 0.8.10 to 0.8.11.
- [Release notes](https://github.com/tokio-rs/mio/releases)
- [Changelog](https://github.com/tokio-rs/mio/blob/master/CHANGELOG.md)
- [Commits](https://github.com/tokio-rs/mio/compare/v0.8.10...v0.8.11)

---
updated-dependencies:
- dependency-name: mio
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-05-19 09:55:30 +08:00
dependabot[bot]
7bfd6c2439 chore(deps): bump h2 from 0.3.24 to 0.3.26 (#766)
Bumps [h2](https://github.com/hyperium/h2) from 0.3.24 to 0.3.26.
- [Release notes](https://github.com/hyperium/h2/releases)
- [Changelog](https://github.com/hyperium/h2/blob/v0.3.26/CHANGELOG.md)
- [Commits](https://github.com/hyperium/h2/compare/v0.3.24...v0.3.26)

---
updated-dependencies:
- dependency-name: h2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-05-19 09:55:05 +08:00
dependabot[bot]
0e8d5f0266 chore(deps): bump rustls from 0.21.10 to 0.21.12 (#804)
Bumps [rustls](https://github.com/rustls/rustls) from 0.21.10 to 0.21.12.
- [Release notes](https://github.com/rustls/rustls/releases)
- [Changelog](https://github.com/rustls/rustls/blob/main/CHANGELOG.md)
- [Commits](https://github.com/rustls/rustls/compare/v/0.21.10...v/0.21.12)

---
updated-dependencies:
- dependency-name: rustls
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-05-19 09:54:27 +08:00
Nils
32add8f046 Dependatbot Updates (#802)
* chore(deps): bump actions/checkout from 3 to 4

Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v3...v4)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>

* chore(deps): bump github/codeql-action from 2 to 3

Bumps [github/codeql-action](https://github.com/github/codeql-action) from 2 to 3.
- [Release notes](https://github.com/github/codeql-action/releases)
- [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/github/codeql-action/compare/v2...v3)

---
updated-dependencies:
- dependency-name: github/codeql-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>

* chore(deps): bump softprops/action-gh-release from 1 to 2

Bumps [softprops/action-gh-release](https://github.com/softprops/action-gh-release) from 1 to 2.
- [Release notes](https://github.com/softprops/action-gh-release/releases)
- [Changelog](https://github.com/softprops/action-gh-release/blob/master/CHANGELOG.md)
- [Commits](https://github.com/softprops/action-gh-release/compare/v1...v2)

---
updated-dependencies:
- dependency-name: softprops/action-gh-release
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-05-18 19:48:09 +08:00
SteveLauC
f661f00277 feat: support auto-cpufreq (#800) 2024-05-18 16:34:03 +08:00
Alok Singh
2a1999fe20 Add rye support (#799)
Rye is a new cargo-like package manager for python by @mitsuhiko.
2024-05-13 20:52:13 +08:00
SteveLauC
4d66431aad fix: Fedora Sway Atomic should be recognized as FedoraImmutable (#795)
* fix: Fedora Sway Atomic should be recognized as FedoraImmutable

* style: fmt
2024-05-11 11:20:43 +08:00
SteveLauC
767f0d91f4 refactor: 2 clippy warnings (#789) 2024-05-06 20:37:55 +08:00
edi
a3428e3477 Always display windows update step (#781)
* always display windows update step

* remove extra comma

* i guess format wants the comma
2024-05-06 20:24:57 +08:00
David C
614131b7bf fix(os): detect Fedora IoT Edition as immutable Fedora variant (#774)
Without this change, it is detected as a regular Fedora variant and
updating fails because neither `dnf` nor `yum` is found.
2024-04-17 09:05:54 +08:00
Dan Sully
9b0681f3b8 Add config flag to toggle verbose Git repository output. (#763)
* Add config flag to toggle verbose Git repository output.

If `true`: the default, no change.

If `false`: Only show repositories that have been updated or have an error.

Minor tweak to output (removed colon) so that copy and paste for 'cd' is nicer.
2024-04-14 10:28:03 +08:00
Andre Toerien
ecf8fb7a47 fix: better dotnet tool list header parsing (#772)
fix: better dotnet tool list header parsing
2024-04-14 09:10:08 +08:00
Andrew Barchuk
04bfb45a97 Fix local host detection for remotes with user (#755) 2024-04-08 19:43:32 +08:00
SteveLauC
d90ce30452 feat: support update PlatformIO Core (#759) 2024-04-07 11:03:33 +08:00
Ricardo Torres
ab21600ca6 feat: add support for mise (#757)
Add support for mise-en-place (or mise). Mise is a tool like asdf (already supported). https://mise.jdx.dev/
2024-03-30 18:40:16 +08:00
λP.(P izzy)
728ea26204 FIXES #708: add config directive for pkg_* cleanup on OpenBSD (#753)
FIXEs #708: add config directive for pkg_* cleanup on OpenBSD
2024-03-26 11:07:39 +08:00
SteveLauC
373cd3b3ae fix: don't use Command::new(bin_name) as it won't work on Windows (#750) 2024-03-24 11:48:17 +08:00
SteveLauC
f4e0258b09 style: fix 2 clippy lint unless_vec & unused_io_amount (#751) 2024-03-24 11:24:39 +08:00
SteveLauC
d50360a69a feat: support update ClamAV databases (#747) 2024-03-19 14:10:47 +08:00
SteveLauC
351922c81f feat: put step logs in a span (#746) 2024-03-16 14:17:19 +08:00
Alok Singh
9518f43866 Add support for Lean 4's elan (#742) 2024-03-16 09:35:47 +08:00
SteveLauC
2c1ce3d4e6 refactor: make GitSteps a dedicated step (#737) 2024-03-09 17:57:33 +08:00
SteveLauC
12116c3261 fix: use env BUN_INSTALL to locate package.json (#734) 2024-03-07 14:12:16 +08:00
Gerald Chen
fbc84e8aa1 fix(pipx): adds --include-injected argument to pipx (#726) 2024-03-01 15:06:23 +08:00
Brent Monning
6dab1e4f37 feat: adds xcodes step (#643) 2024-03-01 07:58:24 +08:00
Lucas Parzianello
650a143602 Adds pyenv step (#724) 2024-02-27 09:25:18 +08:00
Nils
9b6027fe78 Update GitHub Actions workflow for Codecov integration (#718)
- Refine the testing matrix to include only stable and nightly versions of Rust
- Add 'fail_ci_if_error' option to Codecov step for stricter CI checks
- Ensure newline at end of file
2024-02-25 11:19:09 +08:00
Nils
0e30e05ce8 Add GitHub Actions Workflow for Build and Test (#717)
* "Add *.profraw files to .gitignore

*.profraw files are generated by LLVM's Clang compiler when using the -fprofile-instr-generate option for Profile Guided Optimization. These files contain raw profiling data and should not be version controlled."

* Remove redundant import of TryFrom trait

The TryFrom trait was being imported explicitly in src\steps\os\windows.rs, even though it's already part of the Rust prelude and automatically imported into every Rust program. This was causing a compiler warning. This commit comments out the redundant import to resolve the warning.

* Add GitHub Actions workflow for Rust build and test

This commit adds a new GitHub Actions workflow for building and testing the Rust project across multiple operating systems (Ubuntu, Windows, macOS) and Rust versions (stable, beta, nightly). It also includes caching for dependencies and build artifacts, and uploads code coverage reports to Codecov.

* Update Codecov action and add token for coverage report upload

This commit updates the version of the Codecov GitHub Action used to upload coverage reports from v4 to v4.0.1. It also adds a token from the repository secrets to authenticate the upload. This ensures secure and authorized communication with the Codecov service.

* "Fix misuse of --jobs flag in cargo test command"

* "Fix grcov command in GitHub Actions workflow

The grcov command was previously prefixed with './', which caused an error because grcov was not found in the current directory. This commit removes the './' prefix to call grcov from the global path, where it is installed."

* Update GitHub Actions workflow for cross-platform compatibility

This commit modifies the 'build-and-test.yml' GitHub Actions workflow to ensure it works correctly across different operating systems (Ubuntu, Windows, MacOS). The RUSTFLAGS environment variable is now set in a cross-platform compatible way. The workflow will run the build and test process on every pull request and push to the main branch, generate a coverage report, and upload it to Codecov.

* Changed workflow trigger event to 'workflow_run' completion of 'Build and test' workflow

* "Updated GitHub Actions workflow to correctly set environment variables for code coverage"

* Renamed build and test workflow

* Update GitHub Actions workflow trigger

Change the trigger of the 'Test with Code Coverage' workflow to run when the 'build-and-test' workflow is completed. This ensures that code coverage is only calculated after successful build and test runs.

* Update workflow_run trigger in code-coverage.yml

* Fix CODECOV_TOKEN in code-coverage.yml workflow

* Update code-coverage workflow to trigger on pull requests and pushes to main branch

* Update .gitignore file to exclude LLVM profiling output

* Add empty line at the end

* Remove unused import in windows.rs

* Update .github/workflows/build-and-test.yml

Co-authored-by: SteveLauC <stevelauc@outlook.com>

* Update .github/workflows/build-and-test.yml

Co-authored-by: SteveLauC <stevelauc@outlook.com>

* Remove code coverage workflow

---------

Co-authored-by: SteveLauC <stevelauc@outlook.com>
2024-02-25 10:35:56 +08:00
Nils
eea952fa78 Create devskim.yml to enable GitHub code scanning for this repository (#700) 2024-02-24 18:53:10 +08:00
SteveLauC
6071a1ee3b chore: git ignore more (#715) 2024-02-24 13:45:53 +08:00
SteveLauC
a801b7b9f4 chore: bump deps (#714) 2024-02-24 13:14:53 +08:00
SteveLauC
c6e3f0ae0a revert: revert 614 to remove the -p option (#713) 2024-02-24 11:26:41 +08:00
SteveLauC
a43b03d3db feat: also detect Helix step with bin name hx (#710) 2024-02-23 07:39:31 +08:00
Md Isfarul Haque
12b0fa57ad fix: fetch and build Helix grammar as a regular user (#698) 2024-02-23 07:26:08 +08:00
Nils
d9e304f0ef Add .vs to .gitignore (#706)
* Added .vs vode to .gitignore

* Adjust .vs to .vs/
2024-02-22 09:47:37 +08:00
dependabot[bot]
842b92cca7 chore(deps): bump actions/download-artifact from 3 to 4 (#704)
Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 3 to 4.
- [Release notes](https://github.com/actions/download-artifact/releases)
- [Commits](https://github.com/actions/download-artifact/compare/v3...v4)

---
updated-dependencies:
- dependency-name: actions/download-artifact
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-22 09:39:05 +08:00
dependabot[bot]
485f0ec9c8 chore(deps): bump EnricoMi/publish-unit-test-result-action from 1 to 2 (#705)
Bumps [EnricoMi/publish-unit-test-result-action](https://github.com/enricomi/publish-unit-test-result-action) from 1 to 2.
- [Release notes](https://github.com/enricomi/publish-unit-test-result-action/releases)
- [Commits](https://github.com/enricomi/publish-unit-test-result-action/compare/v1...v2)

---
updated-dependencies:
- dependency-name: EnricoMi/publish-unit-test-result-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-22 09:38:40 +08:00
λP.(P izzy)
5e3b5fc9a7 Fix OpenBSD Step failing to build with E0599 (#707)
* fix openbsd support failing with error E0599

* clean up a little formatting in src/os/openbsd.os
2024-02-21 21:10:34 +08:00
SteveLauC
7c63541cad fix: zinit default install location (#625) 2024-02-17 13:15:53 +08:00
SteveLauC
238e089d74 docs: document brew config entries[skip ci] (#696) 2024-02-17 13:14:39 +08:00
luciodaou
8991bc9f62 feat(brew): adds "greedy-latest" option to Brew (#636) 2024-02-17 11:45:57 +08:00
SteveLauC
7a3f3a8905 feat: support waydroid (#687) 2024-02-16 11:57:53 +08:00
dependabot[bot]
e4085e03eb chore(deps): bump actions/checkout from 2 to 4 (#688)
Bumps [actions/checkout](https://github.com/actions/checkout) from 2 to 4.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v2...v4)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-15 16:19:58 +08:00
dependabot[bot]
4b0c366e5f chore(deps): bump actions/upload-artifact from 3 to 4 (#689)
Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 3 to 4.
- [Release notes](https://github.com/actions/upload-artifact/releases)
- [Commits](https://github.com/actions/upload-artifact/compare/v3...v4)

---
updated-dependencies:
- dependency-name: actions/upload-artifact
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-15 16:19:18 +08:00
dependabot[bot]
ea97240d09 chore(deps): bump actions/cache from 1 to 4 (#690)
Bumps [actions/cache](https://github.com/actions/cache) from 1 to 4.
- [Release notes](https://github.com/actions/cache/releases)
- [Changelog](https://github.com/actions/cache/blob/main/RELEASES.md)
- [Commits](https://github.com/actions/cache/compare/v1...v4)

---
updated-dependencies:
- dependency-name: actions/cache
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-15 16:18:47 +08:00
dependabot[bot]
12de531abb chore(deps): bump codecov/codecov-action from 1 to 4 (#691)
Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 1 to 4.
- [Release notes](https://github.com/codecov/codecov-action/releases)
- [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/codecov/codecov-action/compare/v1...v4)

---
updated-dependencies:
- dependency-name: codecov/codecov-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-15 16:18:24 +08:00
dependabot[bot]
c3876ce3bf chore(deps): bump katyo/publish-crates from 1 to 2 (#692)
Bumps [katyo/publish-crates](https://github.com/katyo/publish-crates) from 1 to 2.
- [Release notes](https://github.com/katyo/publish-crates/releases)
- [Commits](https://github.com/katyo/publish-crates/compare/v1...v2)

---
updated-dependencies:
- dependency-name: katyo/publish-crates
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-15 16:18:00 +08:00
SteveLauC
cbbfc3a114 docs: update install doc with Winget (#693) 2024-02-15 16:17:11 +08:00
Nils
ad2bfc9abd Keeping actions up to date with Dependabot (#685) 2024-02-15 16:04:51 +08:00
Nils
528461412e Publish new releases of topgrade to the Windows Package Manager with WinGet Releaser
Publish new releases of topgrade to the Windows Package Manager with WinGet Releaser (GitHb Action).
2024-02-15 16:04:11 +08:00
SteveLauC
64db679390 ci: add macOS aarch64 check (#680) 2024-02-06 16:28:01 +08:00
Wallunen
77a8b3b7d2 feat: add fetch_head configuration option into brew (#679) 2024-02-06 16:17:27 +08:00
Nils
7007e76ab5 Fix/winget (#670)
* cargo update

* Remove the check for 'winget_enable' set to 'true'. On my Windows 10 and 11 machines, there are no issues with Winget anymore. As far as I remember, it was disabled by default because it was buggy back then.

* remove print_warning

* Revert "cargo update"

This reverts commit 5f4e532bc1.

* Removed the `enable_winget = true` configuration as winget is now enabled by default.

* Removed the #[cfg(windows)] flag.

* Revised as Recommended

* Wrapping at 80
2024-02-03 09:09:47 +08:00
Andy Piper
3c970063a9 fix: correct typos in output (#677)
Corrects a grammatical issue and a typo in two of the step output messages.
2024-01-31 09:07:38 +08:00
SteveLauC
b70830015e docs: fix a wrong preposition[skip ci] (#676) 2024-01-30 11:06:32 +08:00
SteveLauC
b43f2c8b3a ci: run cargo test in ci (#674) 2024-01-29 10:36:30 +08:00
RJ Trujillo
c311da16f3 feat: Add support for Wolfi (#672)
* feat: Add support for Wolfi

This adds support for updating Wolfi via Topgrade

* chore(wolfi): Add os release info and unit test

* chore(wolfi): Don't check ID_LIKE as it is unique
2024-01-29 09:11:53 +08:00
Nils
37608a338c Fix/usoclient (#669)
* cargo update

* Implementing a check for Windows 11 and, if detected, skipping Windows Update via usoclient.exe. It is suggested to install PSWindowsUpdate.

* Revert "cargo update"

This reverts commit 43a4d321cf.

* Revert "Implementing a check for Windows 11 and, if detected, skipping Windows Update via usoclient.exe. It is suggested to install PSWindowsUpdate."

This reverts commit e1ef2e4bc5.

* Removed the usoclient step and added an error message.

* cargo fmt
2024-01-29 09:02:40 +08:00
Nils
b07288e674 Fix/pswindowsupdate (#671)
* cargo update

* An elevated PowerShell is required to run Install-WindowsUpdate on my system.

* Revert "cargo update"

This reverts commit fb58ce761a.
2024-01-29 09:01:38 +08:00
Nils
707698faab Update Cargo.lock (#673)
cargo update
2024-01-29 09:00:08 +08:00
SteveLauC
2e70d132d0 feat: certbot renew (#665) 2024-01-28 13:03:30 +08:00
Brent Monning
30c5b31e21 fix: softwareupdate under dry run (#668) 2024-01-27 14:57:10 +08:00
SteveLauC
77ff6cb714 feat: support wildcard in ignored_containers (#666) 2024-01-27 10:54:55 +08:00
SteveLauC
ea13c51b7d chore: release v14.0.1 (#662) 2024-01-25 15:40:52 +08:00
Cat Core
3ed763b884 Fix system updates for Nobara (#661)
* Fix system updates for Nobara

* fmt

* Add os-release test for Nobara

* Make requested changes

* cargo fmt
2024-01-24 19:29:20 +08:00
samhanic
10e1e170b7 fix vscode extensions update step (#650)
* fix vscode extensions update using the new update-extensions cli

* fix non-linux compilation
2024-01-24 10:32:00 +08:00
Sandro
ffa62afc66 Follow up to the follow up in #616 (#660) 2024-01-24 10:22:36 +08:00
SteveLauC
f794329913 feat: skip breaking changes notification with env var (#659)
* feat: skip breaking changes notification with env var

* ci: apply that env in ci
2024-01-23 14:50:35 +08:00
SteveLauC
f9a35c7661 docs: add doc on how to do a new release (#658) 2024-01-23 11:58:09 +08:00
SteveLauC
ed496f3462 chore: fix file name typo[skip ci] (#657)
chore: fix file name typo
2024-01-23 11:50:02 +08:00
Rui Chen
6accdae232 workflows(homebrew): replace Homebrew/actions/bump-formulae with Homebrew/actions/bump-packages (#656)
Signed-off-by: Rui Chen <rui@chenrui.dev>
2024-01-23 10:29:48 +08:00
SteveLauC
96efcc6c0d chore: release v14.0.0 (#652) 2024-01-22 11:13:33 +08:00
SteveLauC
bf72d7bb5a fix: oh-my-zsh step issue #646 (#647) 2024-01-22 09:18:27 +08:00
dependabot[bot]
dadffb1081 chore(deps): bump h2 from 0.3.22 to 0.3.24 (#645)
Bumps [h2](https://github.com/hyperium/h2) from 0.3.22 to 0.3.24.
- [Release notes](https://github.com/hyperium/h2/releases)
- [Changelog](https://github.com/hyperium/h2/blob/v0.3.24/CHANGELOG.md)
- [Commits](https://github.com/hyperium/h2/compare/v0.3.22...v0.3.24)

---
updated-dependencies:
- dependency-name: h2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-20 12:13:19 +08:00
Ned Wolpert
78dc567226 Added an Audit step for FreeBSD and DragonFly packagees. (#640)
* Added an Audit step for FreeBSD and DragonFly.

Allows for auditing the packages to be disabled since they are breaking steps.
Current behaivor is the default, where if the audit fails topgrade stops. Can
be disabled in the [misc] section independenly from other sections
2024-01-08 09:40:01 +08:00
Mike Wood
362ce4f4f9 fix(os) consider Fedora Kinoite and other immutable distros to be the FedoraImmutable (#638)
* fix(os) consider Fedora Kinoite to be the Fedora Silverblue distribution

* fix(os) support additional Fedora immutable variants

Rename FedoraSilverblue Distribution to FedoraImmutable.  Add test cases for Onyx, Sericea and Silverblue.  Rename upgrade method to match distribution.

Fixes #637
2024-01-08 08:48:48 +08:00
Carrol Cox
ab35cd7b10 feat(pipx-update): add quiet flag for pipx upgrade-all on version 1.4.0+ (#635)
This commit introduces conditional logic to the `run_pipx_update` function that checks the installed version of pipx. If the version is 1.4.0 or higher, the `--quiet` argument is added to the `pipx upgrade-all` command to suppress non-critical output during the upgrade process, adhering to the new feature introduced in pipx 1.4.0 as per the documentation (https://pipx.pypa.io/stable/docs/#pipx-upgrade-all). This change aims to make the upgrade process less verbose and more manageable in automated scripts or CI/CD pipelines where log brevity is beneficial.
2023-12-31 11:38:39 +08:00
SteveLauC
15f4ad7cd1 refactor: update pip if extern managed and global.break-system-packages is true (#634)
refactor: update pip if extern managed and global.break-system-packages is true
2023-12-30 18:23:33 +08:00
Rebecca Turner
cbfb92041f Skip nix upgrade-nix when Nix is installed in a nix profile (#622)
Make `nix upgrade-nix` a separate step

Also check that Nix can be upgraded before running `nix upgrade-nix` to
work around a bug.

See: <https://github.com/NixOS/nix/issues/5473>
2023-12-21 08:55:32 +08:00
SteveLauC
a506c67cac fix: remove deprecated brew option '--ignore-pinned' (#629) 2023-12-19 17:09:32 +08:00
SteveLauC
788e0412f6 feat: inform users of breaking changes on first run (#619) 2023-12-03 09:52:35 +08:00
Nils
18b37ce3e3 Update config.example.toml (#621)
Added WinGet setting:
enable_winget = true
2023-11-26 08:06:17 +08:00
Jakob Fels
a15e6748c7 Add option to ignore containers to pull (#613) 2023-11-24 16:44:52 +08:00
SteveLauC
c6d0539fd2 chore(deps): bump all deps (#618) 2023-11-24 07:50:41 +08:00
LeSnake
3eb3867944 Bun packages fixes (#617)
* fix running with --only

* fix error when no packages installed
2023-11-23 06:36:00 +08:00
DomGlusk
810315b0e2 Make zinit and zi use parallel updates (#614)
* Update zsh.rs to make zinit and zi use parallel

* run cargo fmt

---------

Co-authored-by: Dominic Gluskin <rhinoarmyleader@gmail.com>
2023-11-22 11:18:41 +08:00
SteveLauC
b461fc2536 refactor: cleanup for #615 (#616) 2023-11-22 09:34:21 +08:00
Sam Vente
7e63977ba0 revert git pushing functionalities (#615) 2023-11-22 09:04:19 +08:00
SteveLauC
78dec892cf docs: migration and breaking changes (#606) 2023-11-12 11:43:58 +08:00
pacjo
9ea6628b5c docs: fix typo in config.example.toml (#603)
docs(config): fix typo (dfault -> default)
2023-11-10 10:32:15 +08:00
LeSnake
465df2e9be feat: add Bun packages step (#599) 2023-11-05 10:34:21 +08:00
SteveLauC
61ef926849 chore: update issue template label (#596) 2023-11-01 08:57:57 +08:00
SteveLauC
7fa38c593e fix: omz remote execution if ZSH is not present (#592) 2023-10-29 18:05:20 +08:00
SteveLauC
41c6d1cd9a chore: release v13.0.0 (#579) 2023-10-20 08:07:11 +08:00
dependabot[bot]
cf3893dc49 chore(deps): bump rustix from 0.37.20 to 0.37.25 (#586) 2023-10-19 08:38:28 +08:00
SteveLauC
a2fbe92a25 refactor: make SelfUpdate a step (#585) 2023-10-18 12:19:53 +08:00
SteveLauC
e1754707d8 refactor: remove legacy deprecated macros (#583) 2023-10-18 11:13:14 +08:00
SteveLauC
cd380a53b3 docs: new demo video (#584) 2023-10-18 09:33:37 +08:00
SteveLauC
a8c29fd1a2 fix: make logger work while loading config file (#581) 2023-10-17 11:19:47 +08:00
Sam Vente
6b871e7949 switch git push and pull order (#578) 2023-10-15 17:06:40 +08:00
SteveLauC
1b5fdb6645 fix: shellexpand git.pull_only_repos & git.push_only_repos (#576) 2023-10-13 18:54:42 +08:00
Sam Vente
fe9d877cdf Add support for pushing custom git repositories (#574) 2023-10-13 17:01:35 +08:00
SteveLauC
60e7aa8f03 fix: disable dotnet greeting msg with DOTNET_NOLOGO=true (#573) 2023-10-12 14:37:52 +08:00
SteveLauC
18e2d3e59c chore: always use the latest stable toolchain for CI (#571) 2023-10-11 09:46:36 +08:00
Mylloon
d68fcb08b2 fix: Support yes option for opam upgrade (#570) 2023-10-10 08:08:46 +08:00
Zach Crownover
1f6baefdc3 Fix builds and runs on DragonFly BSD (#569) 2023-10-08 08:13:26 +08:00
SteveLauC
71efce32c1 chore: bump CI toolchain to 1.73.0 (#567) 2023-10-06 12:05:44 +08:00
dependabot[bot]
3626c9cdc8 chore(deps): bump webpki from 0.22.1 to 0.22.2 (#564)
Bumps [webpki](https://github.com/briansmith/webpki) from 0.22.1 to 0.22.2.
- [Commits](https://github.com/briansmith/webpki/commits)

---
updated-dependencies:
- dependency-name: webpki
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-10-03 08:21:00 +08:00
SteveLauC
a23b761304 fix: --yes option for protonup (#560) 2023-09-30 16:44:20 +08:00
SteveLauC
3fd27e4913 chore: add the check for the --yes opt in PR template (#561) 2023-09-30 12:46:01 +08:00
Sohum
b3f152b716 feat(wsl): pass verbose to topgrade-in-wsl (#556)
Closes #521
2023-09-26 11:11:19 +08:00
PabloMarcendo
df381f3a79 feat: add option for nix-env arguments (#555) 2023-09-21 09:05:03 +08:00
dependabot[bot]
2dec9db310 chore(deps): bump webpki from 0.22.0 to 0.22.1 (#554)
Bumps [webpki](https://github.com/briansmith/webpki) from 0.22.0 to 0.22.1.
- [Commits](https://github.com/briansmith/webpki/commits)

---
updated-dependencies:
- dependency-name: webpki
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-20 16:04:27 +08:00
SteveLauC
d50dc4c9f6 chore: bump CI toolchain (#553) 2023-09-20 15:57:35 +08:00
SteveLauC
ed8b563f20 fix: remote oh-my-zsh env var export issue (#528)
* fix: fix remove oh-my-zsh env export issue
2023-09-19 09:15:34 +08:00
Rebecca Turner
2a73aa731d Make error messages nicer (#551)
* Remove unhelpful information from errors

Before:

```
Git repositories failed:
   0: error: cannot pull with rebase: You have unstaged changes.
      error: Please commit or stash them.
   0:

Location:
   src/steps/git.rs:39

Backtrace omitted. Run with RUST_BACKTRACE=1 environment variable to display it.
Run with RUST_BACKTRACE=full to include source snippets.
```

After:

```
Git repositories failed:
   0: Failed to pull /Users/wiggles/.dotfiles
   1: error: cannot pull with rebase: You have unstaged changes.
      error: Please commit or stash them.

Location:
   src/steps/git.rs:39
```

* Improve git_repos errors

This removes the extra blank "0:" line at the end of the error, doesn't
print the error message twice, and provides the repo path in the error
message.
2023-09-19 09:09:58 +08:00
Rebecca Turner
4dd1c13bd8 fix: fix "Nix" step to use nix upgrade-nix in more situations (#550)
`nix upgrade-nix` can be used on any platform except NixOS where `nix`
is available.

Also use `nix profile upgrade --verbose` because the non-verbose mode
doesn't print anything on stdout.
2023-09-17 15:40:04 +08:00
Rebecca Turner
c1c9fe22df feat: allow setting misc.log_filters in config.toml (#552)
Allow setting `log_filters` in `config.toml`

This allows setting a list of `log_filters` in the `[misc]` section in
the `config.toml`. These filters are prepended to any filters listed
with `--log-filters`. Finally, `--verbose` can now be used with
`--log-filters`, and it will append `debug` to the list of filters
rather than replacing it entirely.
2023-09-17 15:04:46 +08:00
SteveLauC
06a6b7a2eb fix: skip needrestart when using nala on debian-based distro (#548) 2023-09-14 18:15:45 +08:00
SteveLauC
b814dd824f chore: bump ci toolchain (#544) 2023-09-01 14:42:12 +08:00
dependabot[bot]
ce234bdb59 chore(deps): bump rustls-webpki from 0.100.1 to 0.100.2 (#542)
Bumps [rustls-webpki](https://github.com/rustls/webpki) from 0.100.1 to 0.100.2.
- [Release notes](https://github.com/rustls/webpki/releases)
- [Commits](https://github.com/rustls/webpki/compare/v/0.100.1...v/0.100.2)

---
updated-dependencies:
- dependency-name: rustls-webpki
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-08-23 09:29:05 +08:00
SteveLauC
13a46a44a8 refactor: deprioritize please-sudo (#541)
refactor: deprioritze please-sudo
2023-08-22 09:14:29 +08:00
SteveLauC
dc78b00c3c feat: support LURE (#537) 2023-08-15 08:36:20 +08:00
samhanic
48ae4bf813 feat: support Vscode packages updates (#536)
feat: support vscode extensions update
2023-08-14 09:22:26 +08:00
SteveLauC
a50040e2d5 chore: add check for dry-run opt in PR template (#538)
chore: add test for dry-run opt in PR template
2023-08-13 10:24:39 +08:00
samhanic
2c9a56a8df feat: support miktex packages update (#535) 2023-08-13 10:05:07 +08:00
Sujay R
021320b292 Prioritize sudo steps to prevent sudo timeout (#532) 2023-08-06 11:32:20 +08:00
SteveLauC
9d3662c3ea chore: add ssh-related questions in issue template (#523) 2023-07-29 09:20:39 +08:00
SteveLauC
8e580457a5 chore: release v12.0.2 (#518) 2023-07-25 14:22:14 +08:00
SteveLauC
5350658dab fix: WSL detection (#508)
* fix: wSL detection
2023-07-25 14:02:13 +08:00
SteveLauC
1ec0ac50a5 fix: fix Linux and DragonFlyBSD yes option (#513) 2023-07-25 08:37:03 +08:00
SteveLauC
635bfce198 feat: extra arguments for Home Manager (#507)
* feat: extra arguments for Home Manager
2023-07-24 13:07:55 +08:00
6543
1307d2d7e8 feat: better error message on wrong os-release file (#511)
* enhancement: better error message when os-release parsing fails
2023-07-24 08:27:13 +08:00
SteveLauC
d21141fefe chore: release v12.0.1 (#510) 2023-07-23 20:06:31 +08:00
SteveLauC
0ec0e5a9dd chore: bump ci toolchain and MSRV (#506)
* chore: bump ci toolchain and MSRV

* fix clippy on macOS
2023-07-19 10:54:34 +08:00
SteveLauC
9415d7c61f fix(oh-my-zsh): fix remote oh-my-zsh issue (#496)
* fix(oh-my-zsh): fix remote oh-my-zsh issue
2023-07-18 13:59:55 +08:00
SteveLauC
42188af02b CI: release to PyPI (#500) 2023-07-18 08:11:36 +08:00
signed-log
e9581bcf15 feat: add assume-yes to more Linux managers (#501)
* Add assume-yes options to most Linux managers

Add `assume-yes` to :
- SUSE (Micro) - TW (`zypper`)
- PCLinux OS (`apt`)
- Solus (`eopkg`)
- `pacdef`
- Clear Linux (`swupd`)
2023-07-17 15:47:13 +08:00
SteveLauC
6afe4f51c6 test: unit test for Solus (#504) 2023-07-17 13:31:46 +08:00
signed-log
f623746d6c Fix clippy warning about non_minimal_cfg (#505)
Fix clippy::non_minimal_cfg warning
2023-07-17 13:30:55 +08:00
signed-log
1ce4d66e74 Ass assume-yes to DragonflyBSD (#502)
Add assume-yes to DragonflyBSD
2023-07-17 11:40:00 +08:00
har7an
3735d5c537 steps/toolbx: Don't self-update and don't send notifications (#503)
steps/toolbx: Don't send notification

after finishing execution in the toolbx step, and don't perform another
self-update (because the application will already have done that).
2023-07-17 09:08:44 +08:00
SteveLauC
f3b1d2dfb3 Merge pull request from GHSA-f2wx-xjfw-xjv6
chore: bump tempfile to ~3.6
2023-07-15 09:24:08 +08:00
Steve Lau
7f7d2633cd chore: bump tempfile to ~3.6 2023-07-15 09:17:47 +08:00
Harsh Shandilya
afd95e3d5c fix(generic): add alternate binary name for spicetify (#486) 2023-07-14 16:14:06 +00:00
SteveLauC
8f72545894 docs(config): document 4 missing sections in example config file (#485) 2023-07-14 16:13:44 +00:00
SteveLauC
d0d447deac fix: fix wrong path in oh-my-bash (#478) 2023-07-14 16:13:28 +00:00
SteveLauC
53a8683788 ci: separate code-coverage and test-config-creation (#488) 2023-07-14 16:12:53 +00:00
Janek
81491a8d03 docs: apply corrections in config.example.toml (#492)
* Fix Issues in config.example.toml

* Update config.example.toml
2023-07-14 16:12:32 +00:00
Janek
83504754ac docs: add Karma commit messages to CONTRIBUTING.md (#493)
Add Karma commit messages to CONTRIBUTING.md
2023-07-14 16:11:59 +00:00
Marcelo Duarte Trevisani
2068c2c169 Update only base conda env (#495) 2023-07-14 16:11:18 +00:00
SteveLauC
dbac121a90 refactor(config): move sudo_command to section misc (#484) 2023-07-01 13:58:39 +00:00
Thomas Schönauer
b974938a33 v12 Cargo files update (#441) 2023-06-27 10:02:27 +00:00
SteveLauC
06cb88a1a1 test: test for config file creation and default config file parsing (#459) 2023-06-23 09:04:05 +00:00
SteveLauC
a6195d284c feat: support Bob (#461) 2023-06-23 09:03:57 +00:00
SteveLauC
5b8850e8a3 chore: update bug report issue template (#474) 2023-06-23 09:03:29 +00:00
SteveLauC
57546a07fc fix(pip3): prefer python when available (#471) 2023-06-23 09:02:58 +00:00
slowsage
d7709490ce fix: Run AstroUpdate before Lazy sync (#473) 2023-06-23 09:01:55 +00:00
slowsage
3e6c6e513b fix: handle no topgrade.toml but files in topgrade.d (#460) 2023-06-13 14:17:27 +00:00
SteveLauC
30858780cf refactor: unify the behavior of the steps that require sudo (#467) 2023-06-13 14:15:57 +00:00
SteveLauC
a7ddf4575a fix: fix Mist (#466) 2023-06-05 06:38:14 +00:00
Thomas Schönauer
470231c9d1 Revert "fix: fix mist" (#465)
Revert "fix: fix mist (#464)"

This reverts commit 282e336ac4.
2023-06-03 21:22:23 +00:00
SteveLauC
282e336ac4 fix: fix mist (#464) 2023-06-03 21:20:57 +00:00
SteveLauC
658829e4ff refactor: make update fn take &ExectionContext & put update fn together (#457) 2023-06-02 20:20:42 +00:00
SteveLauC
a0ff565220 docs: update CONTRIBUTING.md & config.example.toml (#458) 2023-06-01 11:02:39 +00:00
SteveLauC
7e48c5dedc fix: warn user about bad pattern paths before skipping step git (#456) 2023-06-01 07:16:01 +00:00
slowsage
03436b7f8f fix: Handle '# [include]'. Update default config (#450) 2023-06-01 07:15:49 +00:00
SteveLauC
3f5eedb83d fix: run AM without sudo (#454) 2023-05-31 07:01:45 +00:00
SteveLauC
234ad4bdd7 docs: add config-related CONTRIBUTING doc (#452) 2023-05-30 10:03:22 +00:00
slowsage
c7923393be fix: Write to correct config path when none exists. (#449) 2023-05-30 07:07:02 +00:00
slowsage
d4548b2f9a feat: Add arguments to pipupgrade and fix enable_pipupgrade check (#448) 2023-05-30 07:04:23 +00:00
SteveLauC
f6e8af186c feat: support Vanilla Linux (#447) 2023-05-29 11:45:11 +00:00
SteveLauC
58153635da refactor: remove Anarchy and Antergos as they are discontinued (#446) 2023-05-28 12:44:49 +00:00
SteveLauC
5358509825 fix: fix panic during container update (#445) 2023-05-27 14:12:45 +00:00
SteveLauC
1ab0232d96 feat: support deepin OS (#444) 2023-05-27 09:41:51 +00:00
SteveLauC
66860f1848 refactor: remove unnecessary qualification (#443) 2023-05-27 09:41:42 +00:00
SteveLauC
625f823f46 refactor: rename update fn name & some cleanup (#442) 2023-05-27 09:37:51 +00:00
Thomas Schönauer
6263ab7e10 Allow apt-get update to continue to apt-get upgrade with error code 100 (#440)
Allow apt-get update to continue with error code 100
2023-05-26 19:57:05 +00:00
Kevin Gavrois
7db991db9d Merge code for desktop notification between MacOS and Linux (#438) 2023-05-26 10:07:14 +02:00
SteveLauC
d75782892e docs: CONTRIBUTING.md (#439) 2023-05-26 09:34:20 +02:00
PolpOnline
cb7adc8ced Added ability to include directories as an extension of the config file (#421) 2023-05-25 12:22:11 +02:00
SteveLauC
7c3ba80270 fix: fix .NET language issue (#437)
Co-authored-by: Thomas Schönauer <37108907+DottoDev@users.noreply.github.com>
2023-05-25 09:24:53 +02:00
SteveLauC
76c39edc8b refactor: make all step functions take &ExectutionContext (#436) 2023-05-25 09:09:23 +02:00
SteveLauC
c20a300eea fix: use --platform opt when pulling containers (#435) 2023-05-23 08:47:47 +02:00
SteveLauC
de3902a9c9 fix: use env ZSH to compose oh-my-zsh install dir (#434) 2023-05-22 14:06:19 +02:00
SteveLauC
8bca671e9f fix: run deb-get without sudo (#430) 2023-05-20 19:35:17 +02:00
MonstrousOgre
54301a6a17 Adding local pip-review (#433) 2023-05-20 19:33:59 +02:00
Cat Core
f06b7c0807 Differentiate NPM and PNPM steps in name (#431) 2023-05-20 11:33:41 +02:00
SteveLauC
43c02cf7a7 feat: support maza (#427) 2023-05-17 19:18:03 +02:00
SteveLauC
3a1568e884 feat: support oh-my-bash (#425) 2023-05-17 19:17:37 +02:00
SteveLauC
14753a14e7 feat: support AppMan (#423) 2023-05-09 08:03:06 +02:00
Sourajyoti Basak
227e8dcc8d feat(shell): add packer.nu (#414)
* feat(shell): add `packer.nu`

* dependency update (#413)

* fix(main): move `packer.nu` step before linux package managers

---------

Co-authored-by: Thomas Schönauer <37108907+DottoDev@users.noreply.github.com>
2023-05-05 11:01:24 +02:00
signed-log
97fd2b2718 Make zypper dist-upgrade opt-in on SLE/Leap (#417)
Make zypper dist-upgrade opt-in on SLE/Leap

- Create a `suse_dup` config option
- Create a new `Distribution::OpenSuseTumbleweed` object along with `upgrade_opensuse_tumbleweed()`
    * The purpose of it is to ignore the config option on Tumblweed as
      zypper `dup` is the only way to update a Tumbleweed
2023-05-05 10:24:01 +02:00
SteveLauC
f30e36d7bb feat: support stew (#422) 2023-05-05 10:17:42 +02:00
SteveLauC
d640bc66f5 docs: update README for alternative config path (#419) 2023-05-04 08:36:36 +00:00
PolpOnline
a2331a2575 Add the ability to have the config file in $XDG_CONFIG_HOME/topgrade/topgrade.toml (#418) 2023-05-03 19:53:52 +00:00
Thomas Schönauer
26a2c3c266 v11.0.2 version bump (#416)
* dependency update

* Cargo.toml version bump
2023-05-01 18:26:18 +00:00
Thomas Schönauer
ceafcba88f dependency update (#413) 2023-05-01 15:02:16 +00:00
Thomas Schönauer
d7182b5a6e v11.0.0 bump (#410) 2023-04-30 19:02:26 +00:00
Thomas Schönauer
93ec1172fe Update README.md 2023-04-30 18:58:22 +00:00
Thomas Schönauer
609477a373 Update README.md 2023-04-30 18:57:23 +00:00
Thomas Schönauer
1d49af10a7 Update README.md 2023-04-30 18:57:07 +00:00
Utkarsh Gupta
327ed837c2 Replace directories with home & etcetera (#407)
* Use global lazy HOME_DIR

* Remove unused base_dirs

* Use `etcetera` instead of `directories`

---------

Co-authored-by: Thomas Schönauer <37108907+DottoDev@users.noreply.github.com>
2023-04-30 18:32:13 +00:00
Thomas Schönauer
d406e2aeab Assume Fedora Silverblue based on os-release and not on existence of rpm-ostree (#393)
* Do not assume silverblue if rpm-ostree is available

* Fix typo

* Fix config error
2023-04-30 18:22:08 +00:00
dependabot[bot]
0991cc8a6f Bump enumflags2 from 0.7.5 to 0.7.7 (#408) 2023-04-24 20:37:00 +00:00
Brian Riccardi
ac6330fac8 Added support to 'mamba' (alternative to 'conda' with the exact same commands/interface) (#395) 2023-04-17 14:19:59 +00:00
dependabot[bot]
29f0d229d3 Bump h2 from 0.3.16 to 0.3.17 (#404) 2023-04-17 14:19:48 +00:00
Roey Darwish Dror
3dd11f7b52 No need to run self-update in Rustup (#403) 2023-04-05 12:42:47 +00:00
Roey Darwish Dror
ddb1a021bb Display the preamble in Linux only if notify-send is installed (#401) 2023-04-05 12:34:47 +00:00
PolpOnline
565aa405be Add no-self-update config and flag (#388) 2023-03-22 21:05:21 +00:00
Utkarsh Gupta
907465f891 run_custom_command: allow using interactive shell on unix (#383) 2023-03-17 16:28:58 +00:00
Trevor Sullivan
250485c826 Add Scoop manifest link for Windows installation (#384) 2023-03-15 07:40:31 +00:00
Thomas Schönauer
3a3f22b4e5 V10 3 2 bugfix + revert #347 (#382)
* Revert "run_custom_command: use interactive shell on unix (#347)"

This reverts commit d767ef31a5.

* v10-3-3 + revert of #347
2023-03-13 19:27:33 +00:00
Thomas de Queiroz Barros
a3628d0d49 Add sudo_command option (#379)
This allows the user to specify the preferred sudo command to be used
instead of the command chosen by Sudo::detect
2023-03-13 19:23:37 +00:00
Thomas Schönauer
462016e51e 10.3.2 patch (#378)
* 10.3.2 patch

* Clippy
2023-03-12 20:37:41 +00:00
Thomas Schönauer
199b81183b Update check-and-lint.yaml to use Rust version 1.68.0 2023-03-12 20:22:58 +00:00
Thomas Schönauer
342d7f7209 skip skip-notify warning on Win (#362) 2023-03-03 11:58:58 +00:00
Isaac Tay
9c2d121fc9 cargo: add cleanup step (using cargo-cache) (#371) 2023-03-03 11:58:15 +00:00
Roey Darwish Dror
7728819133 Support antidote (#368) 2023-02-26 21:45:43 +00:00
arctic-penguin
a5d5d987d2 pacdef: support new version 1.x (#364) 2023-02-23 22:01:53 +00:00
TGRCDev
fae5d80f0a pip3: Check for EXTERNALLY-MANAGED (PEP 668) (#367) 2023-02-23 22:01:26 +00:00
Thomas Schönauer
2369e371be apt: Recognise mist (#351) 2023-02-18 21:22:02 +00:00
Jason Stelzer
e3b71b647f Silence misleading warning on other platforms. (#353)

2023-02-07 17:21:15 +00:00
Guilherme Silva
e224ea38b3 CI: Update cross to v0.2.5 (#354) 2023-02-07 17:19:46 +00:00
Thomas Schönauer
8ec37bcd44 vim: Adds Astrovim support (#352) 2023-02-03 13:46:09 +00:00
Thomas Schönauer
6b7f6f4cc7 ruby_gems: Fixes asdf (#350) 2023-02-02 21:48:48 +00:00
Utkarsh Gupta
d767ef31a5 run_custom_command: use interactive shell on unix (#347) 2023-02-02 19:46:11 +00:00
Dan Sully
fcf776fe07 Add support for please (access elevation) (#310)
* Add support for please (access elevation)

Please is a sudo-like tool written in Rust.

https://gitlab.com/edneville/please

* Fixes code typo

---------

Co-authored-by: Thomas Schönauer <37108907+DottoDev@users.noreply.github.com>
Co-authored-by: Thomas Schönauer <t.schoenauer@hgs-wt.at>
2023-02-02 19:22:56 +00:00
edi
58060dda09 use documented way of updating (#344) 2023-01-31 22:19:01 +00:00
Thomas Schönauer
8cfc8d66be v10.3.1 patch (#342) 2023-01-30 21:24:06 +00:00
edi
9dcc8fdd0d (neo)vim: topgrade should only invoke plugin managers not plugins (#341)
* fix upgrade order of (n)vim plugins

* treesitter should use the synchronous cmd

* add lazy pkg manager for neovim

* fix lazy cmd

* change calls

* add autocmd, remove ts and coc

* fix vimscript err invalid range

---------

Co-authored-by: Thomas Schönauer <37108907+DottoDev@users.noreply.github.com>
2023-01-30 18:42:13 +00:00
Thomas Schönauer
828477b255 Update README.md 2023-01-30 18:41:48 +00:00
arctic-penguin
4eae1fedf7 fix ignored config display_preamble = false (#340)
Bug was introduced in f1e4009. Fixes #337.
2023-01-30 18:41:00 +00:00
Thomas Schönauer
1051e4cf47 AM fix + version bump (#335) 2023-01-29 21:53:26 +00:00
Thomas Schönauer
80a95cb404 Clippy (#331) 2023-01-29 19:31:37 +00:00
Thomas Schönauer
ab630cfbc6 v10.2.5 release (#330)
* Don't show desktop notification on error (if `skip_notify = true`) (#275)

* Use ─ (U+2500) to draw borders (#282)

* Adds Pclinuxos support (#283)

* Add Devkitpro Pacman support (#291)

* Added support for Neovim package manager lazy.nvim (#293)

* Added support for lazy.nvim

From https://github.com/folke/lazy.nvim
Authored-by: Jacob Lane Ledbetter <jledbetter460@gmail.com>

* Make garuda-update update AUR packages by default (#296)

* fix(#298): Don't throw error if no Helm repository found (#305)

* Skip .NET when `dotnet tool list` is not successful (#302)

* feat(pacstall): add `-y` flag variant (#312)

* Add openSUSE MicroOS support (#315)

* Adds notify-send timeout of 10s (#318)

* Don't run yum when rpm-ostree is available (#313)

* don't run yum when rpm-ostree is available

* Clippy fix

* rpm-ostree: set default value to true

* Fixes if loop error

* Fixes gem update --system requires sudo now (#317)

* Fixes gem update --system requires sudo now

* rubygem: Adds arg -EH to sudo

* Use fixed nala path instead of which(nala) (#314)

* Adds notify-send bug warning when topgrade is run (#324)

* Adds notify-send bug warning when topgrade is run

* fix typo + clippy

* notify-send warning respects skip_notify flag

* nix: Adds additional arguments support (#325)

* Adds pip-review and pipupgrade support (#316)

* Adds pip-review and pipupgrade support

* Python: fixes pip_review and pipupgrade

* v10.2.5 patch (#329)

* WSL: Adds new wsl --update flags (#327)

* wsl: Updates available flags

* Clippy fix

* Add WslUpdate runner

* wsl: Code Typo

* wsl: Code Typos

* wsl: Code Typos

* wsl: Code Typo

* Adds AM Package Manager (#328)

* Adds AM Package Manager

* Clippy fixes

* Cargo fmt

* Moves am to linux only in main file

---------

Co-authored-by: Guilherme Silva <626206+guihkx@users.noreply.github.com>
Co-authored-by: Gabriel Augendre <gabriel@augendre.info>
Co-authored-by: Cat Core <34719527+arthurbambou@users.noreply.github.com>
Co-authored-by: Hugo Haas <hugoh@hugoh.net>
Co-authored-by: Baptiste <32563450+BapRx@users.noreply.github.com>
Co-authored-by: bbx0 <39773919+bbx0@users.noreply.github.com>
Co-authored-by: Sourajyoti Basak <wiz28@protonmail.com>
2023-01-29 19:19:27 +00:00
edi
c13e14080c Add Lazy, a Neovim plugin manager (#326)
* fix upgrade order of (n)vim plugins

* treesitter should use the synchronous cmd

* add lazy pkg manager for neovim

* fix lazy cmd

---------

Co-authored-by: Thomas Schönauer <37108907+DottoDev@users.noreply.github.com>
2023-01-29 18:49:56 +00:00
edi
4abbee99cc fix upgrade order of (n)vim plugins (#322)
* fix upgrade order of (n)vim plugins

* treesitter should use the synchronous cmd

---------

Co-authored-by: Thomas Schönauer <37108907+DottoDev@users.noreply.github.com>
2023-01-27 21:41:48 +00:00
Giovanni Merlino
45d935eda3 Fix missing separator for Pkgin (pkgsrc) (#307)
* Fix missing separator for Pkgin (pkgsrc)

* Fix whitespace (giving issues with cargo fmt?)

---------

Co-authored-by: Thomas Schönauer <37108907+DottoDev@users.noreply.github.com>
2023-01-27 21:34:27 +00:00
pwygab
b4c5efde50 Fix small typo in warning when notify-send fails (#319)
Fix small typo
2023-01-27 20:52:20 +00:00
Roey Darwish Dror
cba9dc1c2c Fix windows build (#303) 2023-01-09 08:50:21 +01:00
dependabot[bot]
938647123c Bump tokio from 1.8.5 to 1.18.4 (#301) 2023-01-06 23:13:01 +00:00
Jacob Lane Ledbetter
9f24f6474e Add dnf config to config example (#292) 2022-12-30 22:52:04 +00:00
Thomas Schönauer
814e39644c fixes dotnet update + version bump 10.2.4 (#274) 2022-12-18 15:37:10 +00:00
86 changed files with 6612 additions and 2829 deletions

View File

@@ -2,32 +2,91 @@
name: Bug report name: Bug report
about: Topgrade is misbehaving about: Topgrade is misbehaving
title: '' title: ''
labels: '' labels: 'C-bug'
assignees: '' assignees: ''
--- ---
<!-- If you're here to report about a "No asset found" error, please make sure that an hour has been passed since the last release was made. --> <!--
Thanks for taking the time to fill out this bug report!
Please make sure to
[search for existing issues](https://github.com/topgrade-rs/topgrade/issues)
before filing a new one!
## What did you expect to happen? Questions labeled with `Optional` can be skipped.
-->
<!--
If you're here to report about a "No asset found" error, please make sure that
an hour has been passed since the last release was made.
-->
## What actually happened? ## Erroneous Behavior
<!--
What actually happened?
-->
## Expected Behavior
<!--
Describe the expected behavior
-->
## Steps to reproduce
<!--
A minimal example to reproduce the issue
-->
## Possible Cause (Optional)
<!--
If you know the possible cause of the issue, please tell us.
-->
## Problem persists without calling from topgrade
<!--
Execute the erroneous command directly to see if the problem persists
-->
- [ ] Yes
- [ ] No
## Did you run topgrade through `Remote Execution`
- [ ] Yes
- [ ] No
If yes, does the issue still occur when you run topgrade directlly in your
remote host
- [ ] Yes
- [ ] No
## Configuration file (Optional)
<!--
Paste your configuration file inside the code block if you think this issue is
related to configuration.
-->
```toml
```
## Additional Details ## Additional Details
- Which operating system or Linux distribution are you using? - Operation System/Version
- How did you install Topgrade? <!-- For example, Fedora Linux 38 -->
- Which version are you running? <!-- Check with `topgrade -V` -->
<!-- - Installation
Run `topgrade --dry-run` to see which commands Topgrade is running. <!--
If the command seems wrong and you know why please tell us so. How did you install topgrade: build from repo / crates.io (cargo install topgrade)
If the command seems fine try to run it yourself and tell us if you got a different result from Topgrade. / package manager (which one) / other (describe)
-->
- Topgrade version (`topgrade -V`)
## Verbose Output (`topgrade -v`)
<!--
Paste the verbose output into the pre-tags
--> -->
<details> <details>
<!-- Paste the output of the problematic command with `-v` into the pre-tags -->
<pre> <pre>
</pre> </pre>

View File

@@ -2,16 +2,20 @@
name: Feature request name: Feature request
about: Can you please support...? about: Can you please support...?
title: '' title: ''
labels: '' labels: 'C-feature request'
assignees: '' assignees: ''
--- ---
## I want to suggest a new step ## I want to suggest a new step
### Which tool is this about? Where is its repository?
### Which operating systems are supported by this tool? * Which tool is this about? Where is its repository?
### What should Topgrade do to figure out if the tool needs to be invoked? * Which operating systems are supported by this tool?
### Which exact commands should Topgrade run? * What should Topgrade do to figure out if the tool needs to be invoked?
* Which exact commands should Topgrade run?
* Does it have a `--dry-run` option? i.e., print what should be done and exit
* Does it need the user to confirm the execution? And does it provide a `--yes`
option to skip this step?
## I want to suggest some general feature ## I want to suggest some general feature
Topgrade should... Topgrade should...

View File

@@ -1,12 +1,18 @@
## Standards checklist: ## What does this PR do
## Standards checklist
- [ ] The PR title is descriptive. - [ ] The PR title is descriptive.
- [ ] The code compiles (`cargo build`) - [ ] I have read `CONTRIBUTING.md`
- [ ] The code passes rustfmt (`cargo fmt`)
- [ ] The code passes clippy (`cargo clippy`)
- [ ] The code passes tests (`cargo test`)
- [ ] *Optional:* I have tested the code myself - [ ] *Optional:* I have tested the code myself
- [ ] I also tested that Topgrade skips the step where needed
## For new steps
- [ ] *Optional:* Topgrade skips this step where needed
- [ ] *Optional:* The `--dry-run` option works with this step
- [ ] *Optional:* The `--yes` option works with this step if it is supported by
the underlying command
If you developed a feature or a bug fix for someone else and you do not have the If you developed a feature or a bug fix for someone else and you do not have the
means to test it, please tag this person here. means to test it, please tag this person here.

10
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,10 @@
# Set update schedule for GitHub Actions
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
# Check for updates to GitHub Actions every week
interval: "weekly"

View File

@@ -0,0 +1,21 @@
name: Check config file creation if not exists
on:
pull_request:
env:
CARGO_TERM_COLOR: always
jobs:
TestConfig:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: |
CONFIG_PATH=~/.config/topgrade.toml;
if [ -f "$CONFIG_PATH" ]; then rm $CONFIG_PATH; fi
cargo build;
TOPGRADE_SKIP_BRKC_NOTIFY=true ./target/debug/topgrade --dry-run --only system;
stat $CONFIG_PATH;

View File

@@ -0,0 +1,32 @@
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
name: Check Security Vulnerability
on:
pull_request:
push:
branches:
- main
jobs:
lint:
name: DevSkim
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Run DevSkim scanner
uses: microsoft/DevSkim-Action@v1
- name: Upload DevSkim scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: devskim-results.sarif

View File

@@ -8,7 +8,7 @@ jobs:
prepare: prepare:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1 - uses: actions-rs/toolchain@v1
with: with:
toolchain: nightly-2022-08-03 toolchain: nightly-2022-08-03

View File

@@ -7,17 +7,17 @@ on:
name: CI name: CI
env: env:
RUST_VER: '1.60.0' RUST_VER: 'stable'
CROSS_VER: '0.2.4' CROSS_VER: '0.2.5'
CARGO_NET_RETRY: 3 CARGO_NET_RETRY: 3
jobs: jobs:
fmt: fmt:
name: Rustfmt name: Rustfmt
runs-on: ubuntu-20.04 runs-on: ubuntu-latest
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup Rust - name: Setup Rust
uses: dtolnay/rust-toolchain@master uses: dtolnay/rust-toolchain@master
@@ -42,32 +42,36 @@ jobs:
- target: x86_64-linux-android - target: x86_64-linux-android
target_name: Android target_name: Android
use_cross: true use_cross: true
os: ubuntu-20.04 os: ubuntu-latest
- target: x86_64-unknown-freebsd - target: x86_64-unknown-freebsd
target_name: FreeBSD target_name: FreeBSD
use_cross: true use_cross: true
os: ubuntu-20.04 os: ubuntu-latest
- target: x86_64-unknown-linux-gnu - target: x86_64-unknown-linux-gnu
target_name: Linux target_name: Linux
os: ubuntu-20.04 os: ubuntu-latest
- target: x86_64-apple-darwin - target: x86_64-apple-darwin
target_name: macOS target_name: macOS-x86_64
os: macos-11 os: macos-13
- target: aarch64-apple-darwin
target_name: macOS-aarch64
os: macos-latest
- target: x86_64-unknown-netbsd - target: x86_64-unknown-netbsd
target_name: NetBSD target_name: NetBSD
use_cross: true use_cross: true
os: ubuntu-20.04 os: ubuntu-latest
- target: x86_64-pc-windows-msvc - target: x86_64-pc-windows-msvc
target_name: Windows target_name: Windows
os: windows-2019 os: windows-latest
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup Rust - name: Setup Rust
uses: dtolnay/rust-toolchain@master uses: dtolnay/rust-toolchain@master
@@ -84,8 +88,13 @@ jobs:
if: matrix.use_cross == true if: matrix.use_cross == true
run: curl -fL --retry 3 https://github.com/cross-rs/cross/releases/download/v${{ env.CROSS_VER }}/cross-x86_64-unknown-linux-musl.tar.gz | tar vxz -C /usr/local/bin run: curl -fL --retry 3 https://github.com/cross-rs/cross/releases/download/v${{ env.CROSS_VER }}/cross-x86_64-unknown-linux-musl.tar.gz | tar vxz -C /usr/local/bin
- name: Run cargo check - name: Run cargo/cross check
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} check --locked --target ${{ matrix.target }} run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} check --locked --target ${{ matrix.target }}
- name: Run cargo clippy - name: Run cargo/cross clippy
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} clippy --locked --target ${{ matrix.target }} --all-features -- -D warnings run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} clippy --locked --target ${{ matrix.target }} --all-features -- -D warnings
- name: Run cargo test
# ONLY run test with cargo
if: matrix.use_cross == false
run: cargo test --locked --target ${{ matrix.target }}

View File

@@ -13,40 +13,31 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
platform: [ ubuntu-latest, macos-latest, windows-latest ] platform: [ ubuntu-latest, macos-latest, macos-13, windows-latest ]
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
with: - name: setup Rust
toolchain: stable uses: dtolnay/rust-toolchain@stable
profile: minimal with:
override: true
components: rustfmt, clippy components: rustfmt, clippy
- uses: actions-rs/cargo@v1.0.1
name: Check format - name: Check format
with: run: cargo fmt --all -- --check
command: fmt
args: --all -- --check - name: Run clippy
- uses: actions-rs/cargo@v1.0.1 run: cargo clippy --all-targets --locked -- -D warnings
name: Run clippy
with: - name: Run clippy (All features)
command: clippy run: cargo clippy --all-targets --locked --all-features -- -D warnings
args: --all-targets --locked -- -D warnings
- uses: actions-rs/cargo@v1.0.1 - name: Run tests
name: Run clippy (All features) run: cargo test
with:
command: clippy - name: Build in Release profile with all features enabled
args: --all-targets --locked --all-features -- -D warnings run: cargo build --release --all-features
- uses: actions-rs/cargo@v1.0.1
name: Run tests
with:
command: test
- uses: actions-rs/cargo@v1.0.1
name: Build
with:
command: build
args: --release --all-features
- name: Rename Release (Unix) - name: Rename Release (Unix)
run: | run: |
cargo install default-target cargo install default-target
@@ -59,6 +50,7 @@ jobs:
ls . ls .
if: ${{ matrix.platform != 'windows-latest' }} if: ${{ matrix.platform != 'windows-latest' }}
shell: bash shell: bash
- name: Rename Release (Windows) - name: Rename Release (Windows)
run: | run: |
cargo install default-target cargo install default-target
@@ -71,7 +63,8 @@ jobs:
ls . ls .
if: ${{ matrix.platform == 'windows-latest' }} if: ${{ matrix.platform == 'windows-latest' }}
shell: bash shell: bash
- name: Release - name: Release
uses: softprops/action-gh-release@v1 uses: softprops/action-gh-release@v2
with: with:
files: assets/* files: assets/*

View File

@@ -0,0 +1,68 @@
name: Publish release files for non-cd-native environments
on:
# workflow_run:
# workflows: ["Check SemVer compliance"]
# types:
# - completed
release:
types: [ created ]
jobs:
build:
strategy:
fail-fast: false
matrix:
target: [
"aarch64-unknown-linux-gnu",
"armv7-unknown-linux-gnueabihf",
"x86_64-unknown-linux-musl",
"aarch64-unknown-linux-musl",
"x86_64-unknown-freebsd",
]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: setup Rust
uses: dtolnay/rust-toolchain@stable
with:
components: rustfmt, clippy
- name: install targets
run: rustup target add ${{ matrix.target }}
- name: install cross
uses: taiki-e/install-action@v2
with:
tool: cross@0.2.5
- name: Check format
run: cross fmt --all -- --check
- name: Run clippy
run: cross clippy --all-targets --locked --target ${{matrix.target}} -- -D warnings
- name: Run clippy (All features)
run: cross clippy --locked --all-features --target ${{matrix.target}} -- -D warnings
- name: Run tests
run: cross test --target ${{matrix.target}}
- name: Build in Release profile with all features enabled
run: cross build --release --all-features --target ${{matrix.target}}
- name: Rename Release
run: |
mkdir assets
FILENAME=topgrade-${{github.event.release.tag_name}}-${{matrix.target}}
mv target/${{matrix.target}}/release/topgrade assets
cd assets
tar --format=ustar -czf $FILENAME.tar.gz topgrade
rm topgrade
ls .
- name: Release
uses: softprops/action-gh-release@v2
with:
files: assets/*

View File

@@ -1,70 +0,0 @@
name: Publish release files for non-cd-native environments
on:
# workflow_run:
# workflows: ["Check SemVer compliance"]
# types:
# - completed
release:
types: [ created ]
jobs:
build:
strategy:
fail-fast: false
matrix:
target: [ "aarch64-unknown-linux-gnu", "armv7-unknown-linux-gnueabihf", "x86_64-unknown-linux-musl", "aarch64-unknown-linux-musl", "x86_64-unknown-freebsd", ]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
default: true
override: true
target: ${{ matrix.target }}
components: rustfmt, clippy
- uses: actions-rs/cargo@v1.0.1
name: Check format
with:
use-cross: true
command: fmt
args: --all -- --check
- uses: actions-rs/cargo@v1.0.1
name: Run clippy
with:
command: clippy
use-cross: true
args: --all-targets --locked --target ${{matrix.target}} -- -D warnings
- uses: actions-rs/cargo@v1.0.1
name: Run clippy (All features)
with:
command: clippy
use-cross: true
args: --locked --all-features --target ${{matrix.target}} -- -D warnings
- uses: actions-rs/cargo@v1.0.1
name: Run tests
with:
command: test
use-cross: true
args: --target ${{matrix.target}}
- uses: actions-rs/cargo@v1.0.1
name: Build
with:
command: build
use-cross: true
args: --release --all-features --target ${{matrix.target}}
- name: Rename Release
run: |
mkdir assets
FILENAME=topgrade-${{github.event.release.tag_name}}-${{matrix.target}}
mv target/${{matrix.target}}/release/topgrade assets
cd assets
tar --format=ustar -czf $FILENAME.tar.gz topgrade
rm topgrade
ls .
- name: Release
uses: softprops/action-gh-release@v1
with:
files: assets/*

View File

@@ -4,7 +4,7 @@ on:
# types: # types:
# - completed # - completed
release: release:
types: [published, edited] types: [published]
name: Publish to crates.io on release name: Publish to crates.io on release
@@ -12,7 +12,7 @@ jobs:
prepare: prepare:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1 - uses: actions-rs/toolchain@v1
with: with:
toolchain: stable toolchain: stable
@@ -21,7 +21,7 @@ jobs:
publish: publish:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: katyo/publish-crates@v1 - uses: katyo/publish-crates@v2
with: with:
dry-run: true dry-run: true
check-repo: ${{ github.event_name == 'push' }} check-repo: ${{ github.event_name == 'push' }}

View File

@@ -19,7 +19,7 @@ jobs:
uses: Homebrew/actions/setup-homebrew@master uses: Homebrew/actions/setup-homebrew@master
- name: Cache Homebrew Bundler RubyGems - name: Cache Homebrew Bundler RubyGems
id: cache id: cache
uses: actions/cache@v1 uses: actions/cache@v4
with: with:
path: ${{ steps.set-up-homebrew.outputs.gems-path }} path: ${{ steps.set-up-homebrew.outputs.gems-path }}
key: ${{ runner.os }}-rubygems-${{ steps.set-up-homebrew.outputs.gems-hash }} key: ${{ runner.os }}-rubygems-${{ steps.set-up-homebrew.outputs.gems-hash }}
@@ -29,7 +29,8 @@ jobs:
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
run: brew install-bundler-gems run: brew install-bundler-gems
- name: Bump formulae - name: Bump formulae
uses: Homebrew/actions/bump-formulae@master uses: Homebrew/actions/bump-packages@master
continue-on-error: true
with: with:
# Custom GitHub access token with only the 'public_repo' scope enabled # Custom GitHub access token with only the 'public_repo' scope enabled
token: ${{secrets.HOMEBREW_ACCESS_TOKEN}} token: ${{secrets.HOMEBREW_ACCESS_TOKEN}}

99
.github/workflows/release_to_pypi.yml vendored Normal file
View File

@@ -0,0 +1,99 @@
name: Update PyPi
on:
release:
types: [published]
permissions:
contents: read
jobs:
linux:
runs-on: ubuntu-latest
strategy:
matrix:
target: [x86_64, x86, aarch64]
steps:
- uses: actions/checkout@v4
- name: Build wheels
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
args: --release --out dist
sccache: 'true'
manylinux: auto
- name: Upload wheels
uses: actions/upload-artifact@v4
with:
name: wheels
path: dist
windows:
runs-on: windows-latest
strategy:
matrix:
target: [x64, x86]
steps:
- uses: actions/checkout@v4
- name: Build wheels
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
args: --release --out dist
sccache: 'true'
- name: Upload wheels
uses: actions/upload-artifact@v4
with:
name: wheels
path: dist
macos:
runs-on: macos-latest
strategy:
matrix:
target: [x86_64, aarch64]
steps:
- uses: actions/checkout@v4
- name: Build wheels
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
args: --release --out dist
sccache: 'true'
- name: Upload wheels
uses: actions/upload-artifact@v4
with:
name: wheels
path: dist
sdist:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Build sdist
uses: PyO3/maturin-action@v1
with:
command: sdist
args: --out dist
- name: Upload sdist
uses: actions/upload-artifact@v4
with:
name: wheels
path: dist
release:
name: Release
runs-on: ubuntu-latest
if: "startsWith(github.ref, 'refs/tags/')"
needs: [linux, windows, macos, sdist]
steps:
- uses: actions/download-artifact@v4
with:
name: wheels
- name: Publish to PyPI
uses: PyO3/maturin-action@v1
env:
MATURIN_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
with:
command: upload
args: --skip-existing *

13
.github/workflows/release_to_winget.yml vendored Normal file
View File

@@ -0,0 +1,13 @@
name: Publish to WinGet
on:
release:
types: [released]
jobs:
publish:
runs-on: windows-latest
steps:
- uses: vedantmgoyal2009/winget-releaser@v2
with:
identifier: topgrade-rs.topgrade
max-versions-to-keep: 5 # keep only latest 5 versions
token: ${{ secrets.WINGET_TOKEN }}

View File

@@ -1,59 +0,0 @@
on:
pull_request:
push:
branches:
- main
env:
CARGO_TERM_COLOR: always
name: Test with Code Coverage
jobs:
test:
name: Test
env:
PROJECT_NAME_UNDERSCORE: topgrade
CARGO_INCREMENTAL: 0
RUSTFLAGS: -Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort
RUSTDOCFLAGS: -Cpanic=abort
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
override: true
- name: Cache dependencies
uses: actions/cache@v2
env:
cache-name: cache-dependencies
with:
path: |
~/.cargo/.crates.toml
~/.cargo/.crates2.json
~/.cargo/bin
~/.cargo/registry/index
~/.cargo/registry/cache
target
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('Cargo.lock') }}
- name: Generate test result and coverage report
run: |
cargo install cargo2junit grcov;
cargo test $CARGO_OPTIONS -- -Z unstable-options --format json | cargo2junit > results.xml;
zip -0 ccov.zip `find . \( -name "$PROJECT_NAME_UNDERSCORE*.gc*" \) -print`;
grcov ccov.zip -s . -t lcov --llvm --ignore-not-existing --ignore "/*" --ignore "tests/*" -o lcov.info;
- name: Upload test results
uses: EnricoMi/publish-unit-test-result-action@v1
with:
check_name: Test Results
github_token: ${{ secrets.GITHUB_TOKEN }}
files: results.xml
- name: Upload to CodeCov
uses: codecov/codecov-action@v1
with:
# required for private repositories:
# token: ${{ secrets.CODECOV_TOKEN }}
files: ./lcov.info
fail_ci_if_error: true

18
.gitignore vendored
View File

@@ -1,4 +1,20 @@
# JetBrains IDEs
.idea/
/target # Visual Studio
.vs/
# Visual Studio Code
.vscode/
# Generic build outputs
/build /build
# Specific for some languages like Rust
/target
# LLVM profiling output
*.profraw
# Backup files for any .rs files in the project
**/*.rs.bk **/*.rs.bk

38
.vscode/launch.json vendored
View File

@@ -1,38 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "Topgrade",
"console": "integratedTerminal",
"cargo": {
"args": [
"build",
"--bin=topgrade-rs",
"--package=topgrade-rs"
],
"filter": {
"name": "topgrade-rs",
"kind": "bin"
}
},
"args": [
"--only",
"${input:step}",
"-v"
],
"cwd": "${workspaceFolder}"
},
],
"inputs": [
{
"type": "promptString",
"id": "step",
"description": "step name",
}
]
}

14
.vscode/tasks.json vendored
View File

@@ -1,14 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"type": "cargo",
"command": "clippy",
"problemMatcher": [
"$rustc"
],
"group": "test",
"label": "rust: cargo clippy"
}
]
}

View File

@@ -1,50 +0,0 @@
{
// Place your topgrade workspace snippets here. Each snippet is defined under a snippet name and has a scope, prefix, body and
// description. Add comma separated ids of the languages where the snippet is applicable in the scope field. If scope
// is left empty or omitted, the snippet gets applied to all languages. The prefix is what is
// used to trigger the snippet and the body will be expanded and inserted. Possible variables are:
// $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders.
// Placeholders with the same ids are connected.
// Example:
// "Print to console": {
// "scope": "javascript,typescript",
// "prefix": "log",
// "body": [
// "console.log('$1');",
// "$2"
// ],
// "description": "Log output to console"
// }
"Skip Step": {
"scope": "rust",
"prefix": "skipstep",
"body": [
"return Err(SkipStep(format!(\"$1\")).into());"
]
},
"Step": {
"scope": "rust",
"prefix": "step",
"body": [
"pub fn $1(ctx: &ExecutionContext) -> Result<()> {",
" $0",
" Ok(())",
"}"
]
},
"Require Binary": {
"scope": "rust",
"prefix": "req",
"description": "Require a binary to be installed",
"body": [
"let ${1:binary} = require(\"${1:binary}\")?;"
]
},
"macos": {
"scope": "rust",
"prefix": "macos",
"body": [
"#[cfg(target_os = \"macos\")]"
]
}
}

9
BREAKINGCHANGES.md Normal file
View File

@@ -0,0 +1,9 @@
# Git: Pull Repos
1. The output of "Pulling <repository path>" has been moved behind the
--verbose flag / [misc] configuration block.
# Configuration
1. The `enable_winget` configuration entry in the `windows` section has been
removed because it will not cause any issues and will be enabled by default.

0
BREAKINGCHANGES_dev.md Normal file
View File

152
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,152 @@
## Contributing to `topgrade`
Thank you for your interest in contributing to `topgrade`!
We welcome and encourage contributions of all kinds, such as:
1. Issue reports or feature requests
2. Documentation improvements
3. Code (PR or PR Review)
Please follow the [Karma Runner guidelines](http://karma-runner.github.io/6.2/dev/git-commit-msg.html)
for commit messages.
## Adding a new `step`
In `topgrade`'s term, package manager is called `step`.
To add a new `step` to `topgrade`:
1. Add a new variant to
[`enum Step`](https://github.com/topgrade-rs/topgrade/blob/cb7adc8ced8a77addf2cb051d18bba9f202ab866/src/config.rs#L100)
```rust
pub enum Step {
// Existed steps
// ...
// Your new step here!
// You may want it to be sorted alphabetically because that looks great:)
Xxx,
}
```
2. Implement the update function
You need to find the appropriate location where this update function goes, it should be
a file under [`src/steps`](https://github.com/topgrade-rs/topgrade/tree/master/src/steps),
the file names are self-explanatory, for example, `step`s related to `zsh` are
placed in [`steps/zsh.rs`](https://github.com/topgrade-rs/topgrade/blob/master/src/steps/zsh.rs).
Then you implement the update function, and put it in the file where it belongs.
```rust
pub fn run_xxx(ctx: &ExecutionContext) -> Result<()> {
// Check if this step is installed, if not, then this update will be skipped.
let xxx = require("xxx")?;
// Print the separator
print_separator("xxx");
// Invoke the new step to get things updated!
ctx.run_type()
.execute("xxx")
.arg(/* args required by this step */)
.status_checked()
}
```
Such a update function would be conventionally named `run_xxx()`, where `xxx`
is the name of the new step, and it should take a argument of type
`&ExecutionContext`, this is adequate for most cases unless some extra stuff is
needed (You can find some examples where extra arguments are needed
[here](https://github.com/topgrade-rs/topgrade/blob/7e48c5dedcfd5d0124bb9f39079a03e27ed23886/src/main.rs#L201-L219)).
Update function would usually do 3 things:
1. Check if the step is installed
2. Output the Separator
3. Invoke the step
Still, this is sufficient for most tools, but you may need some extra stuff
with complicated `step`.
3. Finally, invoke that update function in `main.rs`
```rust
runner.execute(Step::Xxx, "xxx", || ItsModule::run_xxx(&ctx))?;
```
We use [conditional compilation](https://doc.rust-lang.org/reference/conditional-compilation.html)
to separate the steps, for example, for steps that are Linux-only, it goes
like this:
```
#[cfg(target_os = "linux")]
{
// Xxx is Linux-only
runner.execute(Step::Xxx, "xxx", || ItsModule::run_xxx(&ctx))?;
}
```
Congrats, you just added a new `step`:)
## Modification to the configuration entries
If your PR has the configuration options
(in [`src/config.rs`](https://github.com/topgrade-rs/topgrade/blob/master/src/config.rs))
modified:
1. Adding new options
2. Changing the existing options
Be sure to apply your changes to
[`config.example.toml`](https://github.com/topgrade-rs/topgrade/blob/master/config.example.toml),
and have some basic documentations guiding user how to use these options.
## Breaking changes
If your PR introduces a breaking change, document it in [`BREAKINGCHANGES_dev.md`][bc_dev],
it should be written in Markdown and wrapped at 80, for example:
```md
1. The configuration location has been updated to x.
2. The step x has been removed.
3. ...
```
[bc_dev]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES_dev.md
## Before you submit your PR
Make sure your patch passes the following tests on your host:
```shell
$ cargo build
$ cargo fmt
$ cargo clippy
$ cargo test
```
Don't worry about other platforms, we have most of them covered in our CI.
## Some tips
1. Locale
Some `step` respects locale, which means their output can be in language other
than English, we should not do check on it.
For example, one may want to check if a tool works by doing this:
```rust
let output = Command::new("xxx").arg("--help").output().unwrap();
let stdout = from_utf8(output.stdout).expect("Assume it is UTF-8 encoded");
if stdout.contains("help") {
// xxx works
}
```
If `xxx` respects locale, then the above code should work on English system,
on a system that does not use English, e.g., it uses Chinese, that `"help"` may be
translated to `"帮助"`, and the above code won't work.

2582
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,11 +4,10 @@ description = "Upgrade all the things"
categories = ["os"] categories = ["os"]
keywords = ["upgrade", "update"] keywords = ["upgrade", "update"]
license = "GPL-3.0" license = "GPL-3.0"
# license-file = "LICENSE"
repository = "https://github.com/topgrade-rs/topgrade" repository = "https://github.com/topgrade-rs/topgrade"
version = "10.2.3" version = "15.0.0"
authors = ["Roey Darwish Dror <roey.ghost@gmail.com>", "Thomas Schönauer <t.schoenauer@hgs-wt.at>"] authors = ["Roey Darwish Dror <roey.ghost@gmail.com>", "Thomas Schönauer <t.schoenauer@hgs-wt.at>"]
exclude = ["doc/screenshot.gif"] exclude = ["doc/screenshot.gif", "BREAKINGCHANGES_dev.md"]
edition = "2021" edition = "2021"
readme = "README.md" readme = "README.md"
@@ -22,37 +21,39 @@ path = "src/main.rs"
[dependencies] [dependencies]
home = "~0.5" home = "~0.5"
directories = "~4.0" etcetera = "~0.8"
once_cell = "~1.19"
serde = { version = "~1.0", features = ["derive"] } serde = { version = "~1.0", features = ["derive"] }
toml = "0.5" toml = "0.8"
which_crate = { version = "~4.1", package = "which" } which_crate = { version = "~6.0", package = "which" }
shellexpand = "~2.1" shellexpand = "~3.1"
clap = { version = "~3.1", features = ["cargo", "derive"] } clap = { version = "~4.5", features = ["cargo", "derive"] }
clap_complete = "~3.1" clap_complete = "~4.5"
clap_mangen = "~0.1" clap_mangen = "~0.2"
walkdir = "~2.3" walkdir = "~2.5"
console = "~0.15" console = "~0.15"
lazy_static = "~1.4" lazy_static = "~1.4"
chrono = "~0.4" chrono = "~0.4"
glob = "~0.3" glob = "~0.3"
strum = { version = "~0.24", features = ["derive"] } strum = { version = "~0.26", features = ["derive"] }
thiserror = "~1.0" thiserror = "~1.0"
tempfile = "~3.2" tempfile = "~3.10"
cfg-if = "~1.0" cfg-if = "~1.0"
tokio = { version = "~1.8", features = ["process", "rt-multi-thread"] } tokio = { version = "~1.38", features = ["process", "rt-multi-thread"] }
futures = "~0.3" futures = "~0.3"
regex = "~1.5" regex = "~1.10"
semver = "~1.0" semver = "~1.0"
shell-words = "~1.1" shell-words = "~1.1"
color-eyre = "~0.6" color-eyre = "~0.6"
tracing = { version = "~0.1", features = ["attributes", "log"] } tracing = { version = "~0.1", features = ["attributes", "log"] }
tracing-subscriber = { version = "~0.3", features = ["env-filter", "time"] } tracing-subscriber = { version = "~0.3", features = ["env-filter", "time"] }
merge = "~0.1"
[target.'cfg(target_os = "macos")'.dependencies] regex-split = "~0.1"
notify-rust = "~4.5" notify-rust = "~4.11"
wildmatch = "2.3.0"
[package.metadata.generate-rpm] [package.metadata.generate-rpm]
assets = [{source = "target/release/topgrade", dest="/usr/bin/topgrade"}] assets = [{ source = "target/release/topgrade", dest = "/usr/bin/topgrade" }]
[package.metadata.generate-rpm.requires] [package.metadata.generate-rpm.requires]
git = "*" git = "*"
@@ -61,13 +62,12 @@ git = "*"
depends = "$auto,git" depends = "$auto,git"
[target.'cfg(unix)'.dependencies] [target.'cfg(unix)'.dependencies]
libc = "~0.2" nix = { version = "~0.29", features = ["hostname", "signal", "user"] }
nix = "~0.24" rust-ini = "~0.21"
rust-ini = "~0.18" self_update_crate = { version = "~0.40", default-features = false, optional = true, package = "self_update", features = ["archive-tar", "compression-flate2", "rustls"] }
self_update_crate = { version = "~0.30", default-features = false, optional = true, package = "self_update", features = ["archive-tar", "compression-flate2", "rustls"] }
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
self_update_crate = { version = "~0.30", default-features = false, optional = true, package = "self_update", features = ["archive-zip", "compression-zip-deflate", "rustls"] } self_update_crate = { version = "~0.40", default-features = false, optional = true, package = "self_update", features = ["archive-zip", "compression-zip-deflate", "rustls"] }
winapi = "~0.3" winapi = "~0.3"
parselnk = "~0.1" parselnk = "~0.1"

View File

@@ -1,4 +0,0 @@
# Workaround for: https://github.com/cross-rs/cross/issues/1100
# TODO: Remove this file altogether once a new version of cross (after v0.2.4) is released.
[target.x86_64-unknown-freebsd.env]
passthrough = ["AR_x86_64_unknown_freebsd=x86_64-unknown-freebsd12-ar"]

View File

@@ -8,9 +8,10 @@
<a href="https://aur.archlinux.org/packages/topgrade"><img alt="AUR" src="https://img.shields.io/aur/version/topgrade.svg"></a> <a href="https://aur.archlinux.org/packages/topgrade"><img alt="AUR" src="https://img.shields.io/aur/version/topgrade.svg"></a>
<a href="https://formulae.brew.sh/formula/topgrade"><img alt="Homebrew" src="https://img.shields.io/homebrew/v/topgrade.svg"></a> <a href="https://formulae.brew.sh/formula/topgrade"><img alt="Homebrew" src="https://img.shields.io/homebrew/v/topgrade.svg"></a>
<img alt="Demo" src="doc/screenshot.gif" width="550px"> <img alt="Demo" src="doc/topgrade_demo.gif">
</div> </div>
## Introduction ## Introduction
> **Note** > **Note**
@@ -28,31 +29,56 @@ To remedy this, **Topgrade** detects which tools you use and runs the appropriat
- NixOS: [Nixpkgs](https://search.nixos.org/packages?show=topgrade) - NixOS: [Nixpkgs](https://search.nixos.org/packages?show=topgrade)
- Void Linux: [XBPS](https://voidlinux.org/packages/?arch=x86_64&q=topgrade) - Void Linux: [XBPS](https://voidlinux.org/packages/?arch=x86_64&q=topgrade)
- macOS: [Homebrew](https://formulae.brew.sh/formula/topgrade) or [MacPorts](https://ports.macports.org/port/topgrade/) - macOS: [Homebrew](https://formulae.brew.sh/formula/topgrade) or [MacPorts](https://ports.macports.org/port/topgrade/)
- Windows: [Scoop][scoop] or [Winget][winget]
- PyPi: [pip](https://pypi.org/project/topgrade/)
[scoop]: https://scoop.sh/#/apps?q=topgrade
[winget]: https://winstall.app/apps/topgrade-rs.topgrade
Other systems users can either use `cargo install` or the compiled binaries from the release page. Other systems users can either use `cargo install` or the compiled binaries from the release page.
The compiled binaries contain a self-upgrading feature. The compiled binaries contain a self-upgrading feature.
Topgrade requires Rust 1.60 or above. > Currently, Topgrade requires Rust 1.65 or above. In general, Topgrade tracks
> the latest stable toolchain.
## Usage ## Usage
Just run `topgrade`. Just run `topgrade`.
Visit the documentation at [topgrade-rs.github.io](https://topgrade-rs.github.io/) for more information. ## Configuration
> **Warning**
> Work in Progress
## Customization
See `config.example.toml` for an example configuration file. See `config.example.toml` for an example configuration file.
## Migration and Breaking Changes
Whenever there is a **breaking change**, the major version number will be bumped,
and we will document these changes in the release note, please take a look at
it when updated to a major release.
> Got a question? Feel free to open an issue or discussion!
### Configuration Path ### Configuration Path
The configuration should be placed in the following paths depending on the operating system: #### `CONFIG_DIR` on each platform
- **Windows**: `%APPDATA%`
- **macOS** and **other Unix systems**: `${XDG_CONFIG_HOME:-~/.config}`
- **Windows** - `%APPDATA%/topgrade.toml` `topgrade` will look for the configuration file in the following places, in order of priority:
- **macOS** and **other Unix systems** - `${XDG_CONFIG_HOME:-~/.config}/topgrade.toml`
1. `CONFIG_DIR/topgrade.toml`
2. `CONFIG_DIR/topgrade/topgrade.toml`
If the file with higher priority is present, no matter it is valid or not, the other configuration files will be ignored.
On the first run(no configuration file exists), `topgrade` will create a configuration file at `CONFIG_DIR/topgrade.toml` for you.
### Custom Commands
Custom commands can be defined in the config file which can be run before, during, or after the inbuilt commands, as required.
By default, the custom commands are run using a new shell according to the `$SHELL` environment variable on unix (falls back to `sh`) or `pwsh` on windows (falls back to `powershell`).
On unix, if you want to run your command using an interactive shell, for example to source your shell's rc files, you can add `-i` at the start of your custom command.
But note that this requires the command to exit the shell correctly or else the shell will hang indefinitely.
## Remote Execution ## Remote Execution
@@ -78,9 +104,7 @@ Just fork the repository and start coding.
### Contribution Guidelines ### Contribution Guidelines
- Check if your code passes `cargo fmt` and `cargo clippy`. See [CONTRIBUTING.md](https://github.com/topgrade-rs/topgrade/blob/master/CONTRIBUTING.md)
- Check if your code is self explanatory, if not it should be documented by comments.
- Make a pull request to the `dev` branch for new features or to the `bug-fixes` branch for bug fixes.
## Roadmap ## Roadmap

65
RELEASE_PROCEDURE.md Normal file
View File

@@ -0,0 +1,65 @@
> This document lists the steps that lead to a successful release of Topgrade.
1. Open a PR that:
> Here is an [Example PR](https://github.com/topgrade-rs/topgrade/pull/652)
> that you can refer to.
1. bumps the version number.
> If there are breaking changes, the major version number should be increased.
2. Overwrite [`BREAKINGCHANGES`][breaking_changes] with
[`BREAKINGCHANGES_dev`][breaking_changes_dev], and create a new dev file:
```sh'
$ cd topgrade
$ cp BREAKINGCHANGES_dev.md BREAKINGCHANGES.md
$ touch BREAKINGCHANGES_dev.md
```
[breaking_changes_dev]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES_dev.md
[breaking_changes]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES.md
2. Check and merge that PR.
3. Go to the [release](https://github.com/topgrade-rs/topgrade/releases) page
and click the [Draft a new release button](https://github.com/topgrade-rs/topgrade/releases/new)
4. Write the release notes
We usually use GitHub's [Automatically generated release notes][auto_gen_release_notes]
functionality to generate release notes, but you write your own one instead.
[auto_gen_release_notes]: https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes
5. Attaching binaries
You don't need to do this as our CI will automatically do it for you,
binaries for Linux, macOS and Windows will be created and attached.
And the CI will publish the new binary to:
1. AUR
2. PyPi
3. Homebrew (seems that this is not working correctly)
4. Winget
6. Manually release it to Crates.io
> Yeah, this is unfortunate, our CI won't do this for us. We should probably add one.
1. `cd` to the Topgrade directory, make sure that it is the latest version
(i.e., including the PR that bumps the version number).
2. Set up your token with `cargo login`.
3. Dry-run the publish `cargo publish --dry-run`.
4. If step 3 works, then do the final release `cargo publish`.
> You can also take a look at the official tutorial [Publishing on crates.io][doc]
>
> [doc]: https://doc.rust-lang.org/cargo/reference/publishing.html

View File

@@ -1,117 +1,253 @@
# Don't ask for confirmations # Include any additional configuration file(s)
#assume_yes = true # [include] sections are processed in the order you write them
# Files in $CONFIG_DIR/topgrade.d/ are automatically included before this file
[include]
# paths = ["/etc/topgrade.toml"]
[misc]
# Run `sudo -v` to cache credentials at the start of the run
# This avoids a blocking password prompt in the middle of an unattended run
# (default: false)
# pre_sudo = false
# Sudo command to be used
# sudo_command = "sudo"
# Disable specific steps - same options as the command line flag # Disable specific steps - same options as the command line flag
#disable = ["system", "emacs"] # disable = ["system", "emacs"]
# Ignore failures for these steps # Ignore failures for these steps
#ignore_failures = ["powershell"] # ignore_failures = ["powershell"]
# Run specific steps - same options as the command line flag
#only = ["system", "emacs"]
# Do not ask to retry failed steps (default: false)
#no_retry = true
# Run `sudo -v` to cache credentials at the start of the run; this avoids a
# blocking password prompt in the middle of a possibly-unattended run.
#pre_sudo = false
# Run inside tmux
#run_in_tmux = true
# List of remote machines with Topgrade installed on them # List of remote machines with Topgrade installed on them
#remote_topgrades = ["toothless", "pi", "parnas"] # remote_topgrades = ["toothless", "pi", "parnas"]
# Arguments to pass SSH when upgrading remote systems
#ssh_arguments = "-o ConnectTimeout=2"
# Path to Topgrade executable on remote machines # Path to Topgrade executable on remote machines
#remote_topgrade_path = ".cargo/bin/topgrade" # remote_topgrade_path = ".cargo/bin/topgrade"
# Arguments to pass to SSH when upgrading remote systems
# ssh_arguments = "-o ConnectTimeout=2"
# Arguments to pass tmux when pulling Repositories # Arguments to pass tmux when pulling Repositories
#tmux_arguments = "-S /var/tmux.sock" # tmux_arguments = "-S /var/tmux.sock"
# Do not set the terminal title # Do not set the terminal title (default: true)
#set_title = false # set_title = true
# Display the time in step titles # Display the time in step titles (default: true)
# display_time = true # display_time = true
# Cleanup temporary or old files # Don't ask for confirmations (no default value)
#cleanup = true # assume_yes = true
# Skip sending a notification at the end of a run # Do not ask to retry failed steps (default: false)
#skip_notify = true # no_retry = true
[git] # Run inside tmux (default: false)
#max_concurrency = 5 # run_in_tmux = true
# Additional git repositories to pull
#repos = [
# "~/src/*/",
# "~/.config/something"
#]
# Don't pull the predefined git repos # Cleanup temporary or old files (default: false)
#pull_predefined = false # cleanup = true
# Arguments to pass Git when pulling Repositories # Send a notification for every step (default: false)
#arguments = "--rebase --autostash" # notify_each_step = false
# Skip sending a notification at the end of a run (default: false)
# skip_notify = true
# The Bash-it branch to update (default: "stable")
# bashit_branch = "stable"
# Run specific steps - same options as the command line flag
# only = ["system", "emacs"]
# Whether to self update
#
# this will be ignored if the binary is built without self update support
#
# available also via setting the environment variable TOPGRADE_NO_SELF_UPGRADE)
# no_self_update = true
# Extra tracing filter directives
# These are prepended to the `--log-filter` argument
# See: https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives
# log_filters = ["topgrade::command=debug", "warn"]
[composer]
#self_update = true
# Commands to run before anything # Commands to run before anything
[pre_commands] [pre_commands]
#"Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak" # "Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
# Commands to run after anything
[post_commands]
# "Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
# Custom commands # Custom commands
[commands] [commands]
#"Python Environment" = "~/dev/.env/bin/pip install -i https://pypi.python.org/simple -U --upgrade-strategy eager jupyter" # "Python Environment" = "~/dev/.env/bin/pip install -i https://pypi.python.org/simple -U --upgrade-strategy eager jupyter"
# "Custom command using interactive shell (unix)" = "-i vim_upgrade"
[python]
# enable_pip_review = true ###disabled by default
# enable_pip_review_local = true ###disabled by default
# enable_pipupgrade = true ###disabled by default
# pipupgrade_arguments = "-y -u --pip-path pip" ###disabled by default
[composer]
# self_update = true
[brew] [brew]
#greedy_cask = true # For the BrewCask step
#autoremove = true # If `Repo Cask Upgrade` exists, then use the `-a` option.
# Otherwise, use the `--greedy` option.
# greedy_cask = true
# For the BrewCask step
# If `Repo Cask Upgrade` does not exist, then use the `--greedy_latest` option.
# NOTE: the above entry `greedy_cask` contains this entry, though you can enable
# both of them, they won't clash with each other.
# greedy_latest = true
# For the BrewFormula step
# Execute `brew autoremove` after the step.
# autoremove = true
# For the BrewFormula step
# Upgrade formulae built from the HEAD branch; `brew upgrade --fetch-HEAD`
# fetch_head = true
[linux] [linux]
# Arch Package Manager to use. Allowed values: autodetect, trizen, aura, paru, yay, pikaur, pacman, pamac. # Arch Package Manager to use.
#arch_package_manager = "pacman" # Allowed values:
# autodetect, aura, garuda_update, pacman, pamac, paru, pikaur, trizen, yay
# arch_package_manager = "pacman"
# Arguments to pass yay (or paru) when updating packages # Arguments to pass yay (or paru) when updating packages
#yay_arguments = "--nodevel" # yay_arguments = "--nodevel"
#aura_aur_arguments = "-kx"
#aura_pacman_arguments = "" # Arguments to pass dnf when updating packages
#show_arch_news = true # dnf_arguments = "--refresh"
#trizen_arguments = "--devel"
#pikaur_arguments = "" # aura_aur_arguments = "-kx"
#pamac_arguments = "--no-devel"
#enable_tlmgr = true # aura_pacman_arguments = ""
#emerge_sync_flags = "-q" # garuda_update_arguments = ""
#emerge_update_flags = "-uDNa --with-bdeps=y world"
#redhat_distro_sync = false # show_arch_news = true
#rpm_ostree = false
# trizen_arguments = "--devel"
# pikaur_arguments = ""
# pamac_arguments = "--no-devel"
# enable_tlmgr = true
# emerge_sync_flags = "-q"
# emerge_update_flags = "-uDNa --with-bdeps=y world"
# redhat_distro_sync = false
# suse_dup = false
# rpm_ostree = false
# nix_arguments = "--flake"
# nix_env_arguments = "--prebuilt-only"
# Extra Home Manager arguments
# home_manager_arguments = ["--flake", "file"]
[git]
# How many repos to pull at max in parallel
# max_concurrency = 5
# Additional git repositories to pull
# repos = [
# "~/src/*/",
# "~/.config/something"
# ]
# Don't pull the predefined git repos
# pull_predefined = false
# Arguments to pass Git when pulling Repositories
# arguments = "--rebase --autostash"
[windows] [windows]
# Manually select Windows updates # Manually select Windows updates
#accept_all_updates = false # accept_all_updates = false
#open_remotes_in_new_terminal = true
# open_remotes_in_new_terminal = true
# wsl_update_pre_release = true
# wsl_update_use_web_download = true
# Causes Topgrade to rename itself during the run to allow package managers # Causes Topgrade to rename itself during the run to allow package managers
# to upgrade it. Use this only if you installed Topgrade by using a package # to upgrade it. Use this only if you installed Topgrade by using a package
# manager such as Scoop or Cargo # manager such as Scoop or Cargo
#self_rename = true # self_rename = true
[npm] [npm]
# Use sudo if the NPM directory isn't owned by the current user # Use sudo if the NPM directory isn't owned by the current user
#use_sudo = true # use_sudo = true
[yarn]
# Run `yarn global upgrade` with `sudo`
# use_sudo = true
[vim]
# For `vim-plug`, execute `PlugUpdate!` instead of `PlugUpdate`
# force_plug_update = true
[firmware] [firmware]
# Offer to update firmware; if false just check for and display available updates # Offer to update firmware; if false just check for and display available updates
#upgrade = true # upgrade = true
[vagrant]
# Vagrant directories
# directories = []
# power on vagrant boxes if needed
# power_on = true
# Always suspend vagrant boxes instead of powering off
# always_suspend = true
[flatpak] [flatpak]
# Use sudo for updating the system-wide installation # Use sudo for updating the system-wide installation
#use_sudo = true # use_sudo = true
[distrobox] [distrobox]
#use_root = false # use_root = false
#containers = ["archlinux-latest"]
# containers = ["archlinux-latest"]
[containers]
# Specify the containers to ignore while updating (Wildcard supported)
# ignored_containers = ["ghcr.io/rancher-sandbox/rancher-desktop/rdx-proxy:latest", "docker.io*"]
[lensfun]
# If disabled, Topgrade invokes `lensfunupdatedata` without root priviledge,
# then the update will be only available to you. Otherwise, `sudo` is required,
# and the update will be installed system-wide, i.e., available to all users.
# (default: false)
# use_sudo = false

Binary file not shown.

Before

Width:  |  Height:  |  Size: 718 KiB

BIN
doc/topgrade_demo.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 MiB

16
pyproject.toml Normal file
View File

@@ -0,0 +1,16 @@
[build-system]
requires = ["maturin>=1.0,<2.0"]
build-backend = "maturin"
[project]
name = "topgrade"
requires-python = ">=3.7"
classifiers = [
"Programming Language :: Rust",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]
[tool.maturin]
bindings = "bin"

167
src/breaking_changes.rs Normal file
View File

@@ -0,0 +1,167 @@
//! Inform the users of the breaking changes introduced in this major release.
//!
//! Print the breaking changes and possibly a migration guide when:
//! 1. The Topgrade being executed is a new major release
//! 2. This is the first launch of that major release
use crate::terminal::print_separator;
#[cfg(windows)]
use crate::WINDOWS_DIRS;
#[cfg(unix)]
use crate::XDG_DIRS;
use color_eyre::eyre::Result;
use etcetera::base_strategy::BaseStrategy;
use std::{
env::var,
fs::{read_to_string, OpenOptions},
io::Write,
path::PathBuf,
str::FromStr,
};
/// Version string x.y.z
static VERSION_STR: &str = env!("CARGO_PKG_VERSION");
/// Version info
#[derive(Debug)]
pub(crate) struct Version {
_major: u64,
minor: u64,
patch: u64,
}
impl FromStr for Version {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> Result<Self, Self::Err> {
const NOT_SEMVER: &str = "Topgrade version is not semantic";
const NOT_NUMBER: &str = "Topgrade version is not dot-separated numbers";
let mut iter = s.split('.').take(3);
let major = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
let minor = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
let patch = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
// They cannot be all 0s
assert!(
!(major == 0 && minor == 0 && patch == 0),
"Version numbers can not be all 0s"
);
Ok(Self {
_major: major,
minor,
patch,
})
}
}
impl Version {
/// True if this version is a new major release.
pub(crate) fn is_new_major_release(&self) -> bool {
// We have already checked that they cannot all be zeros, so `self.major`
// is guaranteed to be non-zero.
self.minor == 0 && self.patch == 0
}
}
/// Topgrade's breaking changes
///
/// We store them in the compiled binary.
pub(crate) static BREAKINGCHANGES: &str = include_str!("../BREAKINGCHANGES.md");
/// Return platform's data directory.
fn data_dir() -> PathBuf {
#[cfg(unix)]
return XDG_DIRS.data_dir();
#[cfg(windows)]
return WINDOWS_DIRS.data_dir();
}
/// Return Topgrade's keep file path.
///
/// keep file is a file under the data directory containing a major version
/// number, it will be created on first run and is used to check if an execution
/// of Topgrade is the first run of a major release, for more details, see
/// `first_run_of_major_release()`.
fn keep_file_path() -> PathBuf {
let keep_file = "topgrade_keep";
data_dir().join(keep_file)
}
/// If environment variable `TOPGRADE_SKIP_BRKC_NOTIFY` is set to `true`, then
/// we won't notify the user of the breaking changes.
pub(crate) fn should_skip() -> bool {
if let Ok(var) = var("TOPGRADE_SKIP_BRKC_NOTIFY") {
return var.as_str() == "true";
}
false
}
/// True if this is the first execution of a major release.
pub(crate) fn first_run_of_major_release() -> Result<bool> {
let version = VERSION_STR.parse::<Version>().expect("should be a valid version");
let keep_file = keep_file_path();
// disable this lint here as the current code has better readability
#[allow(clippy::collapsible_if)]
if version.is_new_major_release() {
if !keep_file.exists() || read_to_string(&keep_file)? != VERSION_STR {
return Ok(true);
}
}
Ok(false)
}
/// Print breaking changes to the user.
pub(crate) fn print_breaking_changes() {
let header = format!("Topgrade {VERSION_STR} Breaking Changes");
print_separator(header);
let contents = if BREAKINGCHANGES.is_empty() {
"No Breaking changes"
} else {
BREAKINGCHANGES
};
println!("{contents}\n");
}
/// This function will be ONLY executed when the user has confirmed the breaking
/// changes, once confirmed, we write the keep file, which means the first run
/// of this major release is finished.
pub(crate) fn write_keep_file() -> Result<()> {
std::fs::create_dir_all(data_dir())?;
let keep_file = keep_file_path();
let mut file = OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(keep_file)?;
let _ = file.write(VERSION_STR.as_bytes())?;
Ok(())
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn is_new_major_release_works() {
let first_major_release: Version = "1.0.0".parse().unwrap();
let under_dev: Version = "0.1.0".parse().unwrap();
assert!(first_major_release.is_new_major_release());
assert!(!under_dev.is_new_major_release());
}
#[test]
#[should_panic(expected = "Version numbers can not be all 0s")]
fn invalid_version() {
let all_0 = "0.0.0";
all_0.parse::<Version>().unwrap();
}
}

View File

@@ -10,6 +10,8 @@ use color_eyre::eyre::Context;
use crate::error::TopgradeError; use crate::error::TopgradeError;
use tracing::debug;
/// Like [`Output`], but UTF-8 decoded. /// Like [`Output`], but UTF-8 decoded.
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub struct Utf8Output { pub struct Utf8Output {
@@ -183,7 +185,7 @@ impl CommandExt for Command {
let err = TopgradeError::ProcessFailedWithOutput(program, output.status, stderr.into_owned()); let err = TopgradeError::ProcessFailedWithOutput(program, output.status, stderr.into_owned());
let ret = Err(err).with_context(|| message); let ret = Err(err).with_context(|| message);
tracing::debug!("Command failed: {ret:?}"); debug!("Command failed: {ret:?}");
ret ret
} }
} }
@@ -203,7 +205,7 @@ impl CommandExt for Command {
let (program, _) = get_program_and_args(self); let (program, _) = get_program_and_args(self);
let err = TopgradeError::ProcessFailed(program, status); let err = TopgradeError::ProcessFailed(program, status);
let ret = Err(err).with_context(|| format!("Command failed: `{command}`")); let ret = Err(err).with_context(|| format!("Command failed: `{command}`"));
tracing::debug!("Command failed: {ret:?}"); debug!("Command failed: {ret:?}");
ret ret
} }
} }
@@ -239,6 +241,6 @@ fn format_program_and_args(cmd: &Command) -> String {
fn log(cmd: &Command) -> String { fn log(cmd: &Command) -> String {
let command = format_program_and_args(cmd); let command = format_program_and_args(cmd);
tracing::debug!("Executing command `{command}`"); debug!("Executing command `{command}`");
command command
} }

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
//! SIGINT handling in Unix systems. //! SIGINT handling in Unix systems.
use crate::ctrlc::interrupted::set_interrupted; use crate::ctrlc::interrupted::set_interrupted;
use nix::sys::signal; use nix::sys::signal::{sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal};
/// Handle SIGINT. Set the interruption flag. /// Handle SIGINT. Set the interruption flag.
extern "C" fn handle_sigint(_: i32) { extern "C" fn handle_sigint(_: i32) {
@@ -10,12 +10,8 @@ extern "C" fn handle_sigint(_: i32) {
/// Set the necessary signal handlers. /// Set the necessary signal handlers.
/// The function panics on failure. /// The function panics on failure.
pub fn set_handler() { pub fn set_handler() {
let sig_action = signal::SigAction::new( let sig_action = SigAction::new(SigHandler::Handler(handle_sigint), SaFlags::empty(), SigSet::empty());
signal::SigHandler::Handler(handle_sigint),
signal::SaFlags::empty(),
signal::SigSet::empty(),
);
unsafe { unsafe {
signal::sigaction(signal::SIGINT, &sig_action).unwrap(); sigaction(Signal::SIGINT, &sig_action).unwrap();
} }
} }

View File

@@ -1,5 +1,6 @@
//! A stub for Ctrl + C handling. //! A stub for Ctrl + C handling.
use crate::ctrlc::interrupted::set_interrupted; use crate::ctrlc::interrupted::set_interrupted;
use tracing::error;
use winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE}; use winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE};
use winapi::um::consoleapi::SetConsoleCtrlHandler; use winapi::um::consoleapi::SetConsoleCtrlHandler;
use winapi::um::wincon::CTRL_C_EVENT; use winapi::um::wincon::CTRL_C_EVENT;
@@ -16,6 +17,6 @@ extern "system" fn handler(ctrl_type: DWORD) -> BOOL {
pub fn set_handler() { pub fn set_handler() {
if 0 == unsafe { SetConsoleCtrlHandler(Some(handler), TRUE) } { if 0 == unsafe { SetConsoleCtrlHandler(Some(handler), TRUE) } {
tracing::error!("Cannot set a control C handler") error!("Cannot set a control C handler")
} }
} }

View File

@@ -10,14 +10,14 @@ pub enum TopgradeError {
#[error("`{0}` failed: {1}")] #[error("`{0}` failed: {1}")]
ProcessFailedWithOutput(String, ExitStatus, String), ProcessFailedWithOutput(String, ExitStatus, String),
#[error("Sudo is required for this step")]
#[allow(dead_code)]
SudoRequired,
#[error("Unknown Linux Distribution")] #[error("Unknown Linux Distribution")]
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
UnknownLinuxDistribution, UnknownLinuxDistribution,
#[error("File \"/etc/os-release\" does not exist or is empty")]
#[cfg(target_os = "linux")]
EmptyOSReleaseFile,
#[error("Failed getting the system package manager")] #[error("Failed getting the system package manager")]
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
FailedGettingPackageManager, FailedGettingPackageManager,

View File

@@ -1,46 +1,39 @@
#![allow(dead_code)] #![allow(dead_code)]
use crate::executor::RunType; use crate::executor::RunType;
use crate::git::Git;
use crate::sudo::Sudo; use crate::sudo::Sudo;
use crate::utils::require_option; use crate::utils::{require_option, REQUIRE_SUDO};
use crate::{config::Config, executor::Executor}; use crate::{config::Config, executor::Executor};
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use directories::BaseDirs; use std::env::var;
use std::path::Path; use std::path::Path;
use std::sync::Mutex; use std::sync::Mutex;
pub struct ExecutionContext<'a> { pub struct ExecutionContext<'a> {
run_type: RunType, run_type: RunType,
sudo: Option<Sudo>, sudo: Option<Sudo>,
git: &'a Git,
config: &'a Config, config: &'a Config,
base_dirs: &'a BaseDirs,
/// Name of a tmux session to execute commands in, if any. /// Name of a tmux session to execute commands in, if any.
/// This is used in `./steps/remote/ssh.rs`, where we want to run `topgrade` in a new /// This is used in `./steps/remote/ssh.rs`, where we want to run `topgrade` in a new
/// tmux window for each remote. /// tmux window for each remote.
tmux_session: Mutex<Option<String>>, tmux_session: Mutex<Option<String>>,
/// True if topgrade is running under ssh.
under_ssh: bool,
} }
impl<'a> ExecutionContext<'a> { impl<'a> ExecutionContext<'a> {
pub fn new( pub fn new(run_type: RunType, sudo: Option<Sudo>, config: &'a Config) -> Self {
run_type: RunType, let under_ssh = var("SSH_CLIENT").is_ok() || var("SSH_TTY").is_ok();
sudo: Option<Sudo>,
git: &'a Git,
config: &'a Config,
base_dirs: &'a BaseDirs,
) -> Self {
Self { Self {
run_type, run_type,
sudo, sudo,
git,
config, config,
base_dirs,
tmux_session: Mutex::new(None), tmux_session: Mutex::new(None),
under_ssh,
} }
} }
pub fn execute_elevated(&self, command: &Path, interactive: bool) -> Result<Executor> { pub fn execute_elevated(&self, command: &Path, interactive: bool) -> Result<Executor> {
let sudo = require_option(self.sudo.clone(), "Sudo is required for this operation".into())?; let sudo = require_option(self.sudo.as_ref(), REQUIRE_SUDO.to_string())?;
Ok(sudo.execute_elevated(self, command, interactive)) Ok(sudo.execute_elevated(self, command, interactive))
} }
@@ -48,10 +41,6 @@ impl<'a> ExecutionContext<'a> {
self.run_type self.run_type
} }
pub fn git(&self) -> &Git {
self.git
}
pub fn sudo(&self) -> &Option<Sudo> { pub fn sudo(&self) -> &Option<Sudo> {
&self.sudo &self.sudo
} }
@@ -60,8 +49,8 @@ impl<'a> ExecutionContext<'a> {
self.config self.config
} }
pub fn base_dirs(&self) -> &BaseDirs { pub fn under_ssh(&self) -> bool {
self.base_dirs self.under_ssh
} }
pub fn set_tmux_session(&self, session_name: String) { pub fn set_tmux_session(&self, session_name: String) {

View File

@@ -3,7 +3,6 @@ use std::ffi::{OsStr, OsString};
use std::path::Path; use std::path::Path;
use std::process::{Child, Command, ExitStatus, Output}; use std::process::{Child, Command, ExitStatus, Output};
use color_eyre::eyre;
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use tracing::debug; use tracing::debug;
@@ -228,6 +227,7 @@ impl DryCommand {
/// The Result of spawn. Contains an actual `std::process::Child` if executed by a wet command. /// The Result of spawn. Contains an actual `std::process::Child` if executed by a wet command.
pub enum ExecutorChild { pub enum ExecutorChild {
#[allow(unused)] // this type has not been used
Wet(Child), Wet(Child),
Dry, Dry,
} }
@@ -238,7 +238,7 @@ impl CommandExt for Executor {
// TODO: It might be nice to make `output_checked_with` return something that has a // TODO: It might be nice to make `output_checked_with` return something that has a
// variant for wet/dry runs. // variant for wet/dry runs.
fn output_checked_with(&mut self, succeeded: impl Fn(&Output) -> Result<(), ()>) -> eyre::Result<Output> { fn output_checked_with(&mut self, succeeded: impl Fn(&Output) -> Result<(), ()>) -> Result<Output> {
match self { match self {
Executor::Wet(c) => c.output_checked_with(succeeded), Executor::Wet(c) => c.output_checked_with(succeeded),
Executor::Dry(c) => { Executor::Dry(c) => {
@@ -248,7 +248,7 @@ impl CommandExt for Executor {
} }
} }
fn status_checked_with(&mut self, succeeded: impl Fn(ExitStatus) -> Result<(), ()>) -> eyre::Result<()> { fn status_checked_with(&mut self, succeeded: impl Fn(ExitStatus) -> Result<(), ()>) -> Result<()> {
match self { match self {
Executor::Wet(c) => c.status_checked_with(succeeded), Executor::Wet(c) => c.status_checked_with(succeeded),
Executor::Dry(c) => { Executor::Dry(c) => {
@@ -258,7 +258,7 @@ impl CommandExt for Executor {
} }
} }
fn spawn_checked(&mut self) -> eyre::Result<Self::Child> { fn spawn_checked(&mut self) -> Result<Self::Child> {
self.spawn() self.spawn()
} }
} }

View File

@@ -2,13 +2,22 @@
use std::env; use std::env;
use std::io; use std::io;
use std::path::PathBuf;
use std::process::exit; use std::process::exit;
use std::time::Duration;
use crate::breaking_changes::{first_run_of_major_release, print_breaking_changes, should_skip, write_keep_file};
use clap::CommandFactory; use clap::CommandFactory;
use clap::{crate_version, Parser}; use clap::{crate_version, Parser};
use color_eyre::eyre::Context; use color_eyre::eyre::Context;
use color_eyre::eyre::{eyre, Result}; use color_eyre::eyre::Result;
use console::Key; use console::Key;
use etcetera::base_strategy::BaseStrategy;
#[cfg(windows)]
use etcetera::base_strategy::Windows;
#[cfg(unix)]
use etcetera::base_strategy::Xdg;
use once_cell::sync::Lazy;
use tracing::debug; use tracing::debug;
use self::config::{CommandLineArgs, Config, Step}; use self::config::{CommandLineArgs, Config, Step};
@@ -18,6 +27,9 @@ use self::error::Upgraded;
use self::steps::{remote::*, *}; use self::steps::{remote::*, *};
use self::terminal::*; use self::terminal::*;
use self::utils::{hostname, install_color_eyre, install_tracing, update_tracing};
mod breaking_changes;
mod command; mod command;
mod config; mod config;
mod ctrlc; mod ctrlc;
@@ -35,28 +47,43 @@ mod sudo;
mod terminal; mod terminal;
mod utils; mod utils;
pub(crate) static HOME_DIR: Lazy<PathBuf> = Lazy::new(|| home::home_dir().expect("No home directory"));
#[cfg(unix)]
pub(crate) static XDG_DIRS: Lazy<Xdg> = Lazy::new(|| Xdg::new().expect("No home directory"));
#[cfg(windows)]
pub(crate) static WINDOWS_DIRS: Lazy<Windows> = Lazy::new(|| Windows::new().expect("No home directory"));
fn run() -> Result<()> { fn run() -> Result<()> {
color_eyre::install()?; install_color_eyre()?;
ctrlc::set_handler(); ctrlc::set_handler();
let base_dirs = directories::BaseDirs::new().ok_or_else(|| eyre!("No base directories"))?;
let opt = CommandLineArgs::parse(); let opt = CommandLineArgs::parse();
// Set up the logger with the filter directives from:
// 1. CLI option `--log-filter`
// 2. `debug` if the `--verbose` option is present
// We do this because we need our logger to work while loading the
// configuration file.
//
// When the configuration file is loaded, update the logger with the full
// filter directives.
//
// For more info, see the comments in `CommandLineArgs::tracing_filter_directives()`
// and `Config::tracing_filter_directives()`.
let reload_handle = install_tracing(&opt.tracing_filter_directives())?;
if let Some(shell) = opt.gen_completion { if let Some(shell) = opt.gen_completion {
let cmd = &mut CommandLineArgs::command(); let cmd = &mut CommandLineArgs::command();
clap_complete::generate(shell, cmd, clap::crate_name!(), &mut std::io::stdout()); clap_complete::generate(shell, cmd, clap::crate_name!(), &mut io::stdout());
return Ok(()); return Ok(());
} }
if opt.gen_manpage { if opt.gen_manpage {
let man = clap_mangen::Man::new(CommandLineArgs::command()); let man = clap_mangen::Man::new(CommandLineArgs::command());
man.render(&mut std::io::stdout())?; man.render(&mut io::stdout())?;
return Ok(()); return Ok(());
} }
install_tracing(&opt.tracing_filter_directives())?;
for env in opt.env_variables() { for env in opt.env_variables() {
let mut splitted = env.split('='); let mut splitted = env.split('=');
let var = splitted.next().unwrap(); let var = splitted.next().unwrap();
@@ -65,25 +92,28 @@ fn run() -> Result<()> {
} }
if opt.edit_config() { if opt.edit_config() {
Config::edit(&base_dirs)?; Config::edit()?;
return Ok(()); return Ok(());
}; };
if opt.show_config_reference() { if opt.show_config_reference() {
print!("{}", crate::config::EXAMPLE_CONFIG); print!("{}", config::EXAMPLE_CONFIG);
return Ok(()); return Ok(());
} }
let config = Config::load(&base_dirs, opt)?; let config = Config::load(opt)?;
terminal::set_title(config.set_title()); // Update the logger with the full filter directives.
terminal::display_time(config.display_time()); update_tracing(&reload_handle, &config.tracing_filter_directives())?;
terminal::set_desktop_notifications(config.notify_each_step()); set_title(config.set_title());
display_time(config.display_time());
set_desktop_notifications(config.notify_each_step());
debug!("Version: {}", crate_version!()); debug!("Version: {}", crate_version!());
debug!("OS: {}", env!("TARGET")); debug!("OS: {}", env!("TARGET"));
debug!("{:?}", std::env::args()); debug!("{:?}", std::env::args());
debug!("Binary path: {:?}", std::env::current_exe()); debug!("Binary path: {:?}", std::env::current_exe());
debug!("Self Update: {:?}", cfg!(feature = "self-update")); debug!("self-update Feature Enabled: {:?}", cfg!(feature = "self-update"));
debug!("Configuration: {:?}", config);
if config.run_in_tmux() && env::var("TOPGRADE_INSIDE_TMUX").is_err() { if config.run_in_tmux() && env::var("TOPGRADE_INSIDE_TMUX").is_err() {
#[cfg(unix)] #[cfg(unix)]
@@ -93,30 +123,42 @@ fn run() -> Result<()> {
} }
} }
let git = git::Git::new(); let powershell = powershell::Powershell::new();
let mut git_repos = git::Repositories::new(&git); let should_run_powershell = powershell.profile().is_some() && config.should_run(Step::Powershell);
let emacs = emacs::Emacs::new();
#[cfg(target_os = "linux")]
let distribution = linux::Distribution::detect();
let sudo = sudo::Sudo::detect(); let sudo = config.sudo_command().map_or_else(sudo::Sudo::detect, sudo::Sudo::new);
let run_type = executor::RunType::new(config.dry_run()); let run_type = executor::RunType::new(config.dry_run());
let ctx = execution_context::ExecutionContext::new(run_type, sudo, &config);
let ctx = execution_context::ExecutionContext::new(run_type, sudo, &git, &config, &base_dirs);
let mut runner = runner::Runner::new(&ctx); let mut runner = runner::Runner::new(&ctx);
// If
//
// 1. the breaking changes notification shouldnot be skipped
// 2. this is the first execution of a major release
//
// inform user of breaking changes
if !should_skip() && first_run_of_major_release()? {
print_breaking_changes();
if prompt_yesno("Confirmed?")? {
write_keep_file()?;
} else {
exit(1);
}
}
// Self-Update step, this will execute only if:
// 1. the `self-update` feature is enabled
// 2. it is not disabled from configuration (env var/CLI opt/file)
#[cfg(feature = "self-update")] #[cfg(feature = "self-update")]
{ {
if !run_type.dry() && env::var("TOPGRADE_NO_SELF_UPGRADE").is_err() { let should_self_update = env::var("TOPGRADE_NO_SELF_UPGRADE").is_err() && !config.no_self_update();
let result = self_update::self_update();
if let Err(e) = &result { if should_self_update {
#[cfg(windows)] runner.execute(Step::SelfUpdate, "Self Update", || self_update::self_update(&ctx))?;
{
if e.downcast_ref::<Upgraded>().is_some() {
return result;
}
}
print_warning(format!("Self update error: {}", e));
}
} }
} }
@@ -139,45 +181,61 @@ fn run() -> Result<()> {
} }
} }
let powershell = powershell::Powershell::new();
let should_run_powershell = powershell.profile().is_some() && config.should_run(Step::Powershell);
#[cfg(windows)]
runner.execute(Step::Wsl, "WSL", || windows::run_wsl_topgrade(&ctx))?;
if let Some(topgrades) = config.remote_topgrades() { if let Some(topgrades) = config.remote_topgrades() {
for remote_topgrade in topgrades.iter().filter(|t| config.should_execute_remote(t)) { for remote_topgrade in topgrades.iter().filter(|t| config.should_execute_remote(hostname(), t)) {
runner.execute(Step::Remotes, format!("Remote ({})", remote_topgrade), || { runner.execute(Step::Remotes, format!("Remote ({remote_topgrade})"), || {
remote::ssh::ssh_step(&ctx, remote_topgrade) ssh::ssh_step(&ctx, remote_topgrade)
})?; })?;
} }
} }
#[cfg(target_os = "linux")] #[cfg(windows)]
let distribution = linux::Distribution::detect();
#[cfg(target_os = r#"linux"#)]
{ {
runner.execute(Step::Wsl, "WSL", || windows::run_wsl_topgrade(&ctx))?;
runner.execute(Step::WslUpdate, "WSL", || windows::update_wsl(&ctx))?;
runner.execute(Step::Chocolatey, "Chocolatey", || windows::run_chocolatey(&ctx))?;
runner.execute(Step::Scoop, "Scoop", || windows::run_scoop(&ctx))?;
runner.execute(Step::Winget, "Winget", || windows::run_winget(&ctx))?;
runner.execute(Step::System, "Windows update", || windows::windows_update(&ctx))?;
}
#[cfg(target_os = "linux")]
{
// NOTE: Due to breaking `nu` updates, `packer.nu` needs to be updated before `nu` get updated
// by other package managers.
runner.execute(Step::Shell, "packer.nu", || linux::run_packer_nu(&ctx))?;
match &distribution { match &distribution {
Ok(distribution) => { Ok(distribution) => {
runner.execute(Step::System, "System update", || distribution.upgrade(&ctx))?; runner.execute(Step::System, "System update", || distribution.upgrade(&ctx))?;
} }
Err(e) => { Err(e) => {
println!("Error detecting current distribution: {}", e); println!("Error detecting current distribution: {e}");
} }
} }
runner.execute(Step::ConfigUpdate, "config-update", || linux::run_config_update(&ctx))?; runner.execute(Step::ConfigUpdate, "config-update", || linux::run_config_update(&ctx))?;
runner.execute(Step::AM, "am", || linux::run_am(&ctx))?;
runner.execute(Step::AppMan, "appman", || linux::run_appman(&ctx))?;
runner.execute(Step::DebGet, "deb-get", || linux::run_deb_get(&ctx))?;
runner.execute(Step::Toolbx, "toolbx", || toolbx::run_toolbx(&ctx))?;
runner.execute(Step::Snap, "snap", || linux::run_snap(&ctx))?;
runner.execute(Step::Pacstall, "pacstall", || linux::run_pacstall(&ctx))?;
runner.execute(Step::Pacdef, "pacdef", || linux::run_pacdef(&ctx))?;
runner.execute(Step::Protonup, "protonup", || linux::run_protonup_update(&ctx))?;
runner.execute(Step::Distrobox, "distrobox", || linux::run_distrobox_update(&ctx))?;
runner.execute(Step::DkpPacman, "dkp-pacman", || linux::run_dkp_pacman_update(&ctx))?;
runner.execute(Step::System, "pihole", || linux::run_pihole_update(&ctx))?;
runner.execute(Step::Firmware, "Firmware upgrades", || linux::run_fwupdmgr(&ctx))?;
runner.execute(Step::Restarts, "Restarts", || linux::run_needrestart(&ctx))?;
runner.execute(Step::Flatpak, "Flatpak", || linux::run_flatpak(&ctx))?;
runner.execute(Step::BrewFormula, "Brew", || { runner.execute(Step::BrewFormula, "Brew", || {
unix::run_brew_formula(&ctx, unix::BrewVariant::Path) unix::run_brew_formula(&ctx, unix::BrewVariant::Path)
})?; })?;
} runner.execute(Step::Lure, "LURE", || linux::run_lure_update(&ctx))?;
runner.execute(Step::Waydroid, "Waydroid", || linux::run_waydroid(&ctx))?;
#[cfg(windows)] runner.execute(Step::AutoCpufreq, "auto-cpufreq", || linux::run_auto_cpufreq(&ctx))?;
{
runner.execute(Step::Chocolatey, "Chocolatey", || windows::run_chocolatey(&ctx))?;
runner.execute(Step::Scoop, "Scoop", || windows::run_scoop(config.cleanup(), run_type))?;
runner.execute(Step::Winget, "Winget", || windows::run_winget(&ctx))?;
} }
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
@@ -201,133 +259,77 @@ fn run() -> Result<()> {
unix::run_brew_cask(&ctx, unix::BrewVariant::Path) unix::run_brew_cask(&ctx, unix::BrewVariant::Path)
})?; })?;
runner.execute(Step::Macports, "MacPorts", || macos::run_macports(&ctx))?; runner.execute(Step::Macports, "MacPorts", || macos::run_macports(&ctx))?;
runner.execute(Step::Xcodes, "Xcodes", || macos::update_xcodes(&ctx))?;
runner.execute(Step::Sparkle, "Sparkle", || macos::run_sparkle(&ctx))?;
runner.execute(Step::Mas, "App Store", || macos::run_mas(&ctx))?;
runner.execute(Step::System, "System upgrade", || macos::upgrade_macos(&ctx))?;
}
#[cfg(target_os = "dragonfly")]
{
runner.execute(Step::Pkg, "DragonFly BSD Packages", || {
dragonfly::upgrade_packages(&ctx)
})?;
runner.execute(Step::Audit, "DragonFly Audit", || dragonfly::audit_packages(&ctx))?;
}
#[cfg(target_os = "freebsd")]
{
runner.execute(Step::Pkg, "FreeBSD Packages", || freebsd::upgrade_packages(&ctx))?;
runner.execute(Step::System, "FreeBSD Upgrade", || freebsd::upgrade_freebsd(&ctx))?;
runner.execute(Step::Audit, "FreeBSD Audit", || freebsd::audit_packages(&ctx))?;
}
#[cfg(target_os = "openbsd")]
{
runner.execute(Step::Pkg, "OpenBSD Packages", || openbsd::upgrade_packages(&ctx))?;
runner.execute(Step::System, "OpenBSD Upgrade", || openbsd::upgrade_openbsd(&ctx))?;
}
#[cfg(target_os = "android")]
{
runner.execute(Step::Pkg, "Termux Packages", || android::upgrade_packages(&ctx))?;
} }
#[cfg(unix)] #[cfg(unix)]
{ {
runner.execute(Step::Yadm, "yadm", || unix::run_yadm(&ctx))?; runner.execute(Step::Yadm, "yadm", || unix::run_yadm(&ctx))?;
runner.execute(Step::Nix, "nix", || unix::run_nix(&ctx))?; runner.execute(Step::Nix, "nix", || unix::run_nix(&ctx))?;
runner.execute(Step::Nix, "nix upgrade-nix", || unix::run_nix_self_upgrade(&ctx))?;
runner.execute(Step::Guix, "guix", || unix::run_guix(&ctx))?; runner.execute(Step::Guix, "guix", || unix::run_guix(&ctx))?;
runner.execute(Step::HomeManager, "home-manager", || unix::run_home_manager(&ctx))?;
runner.execute(Step::HomeManager, "home-manager", || unix::run_home_manager(run_type))?; runner.execute(Step::Asdf, "asdf", || unix::run_asdf(&ctx))?;
runner.execute(Step::Asdf, "asdf", || unix::run_asdf(run_type))?; runner.execute(Step::Mise, "mise", || unix::run_mise(&ctx))?;
runner.execute(Step::Pkgin, "pkgin", || unix::run_pkgin(&ctx))?; runner.execute(Step::Pkgin, "pkgin", || unix::run_pkgin(&ctx))?;
runner.execute(Step::Bun, "bun", || unix::run_bun(&ctx))?; runner.execute(Step::Bun, "bun", || unix::run_bun(&ctx))?;
} runner.execute(Step::BunPackages, "bun-packages", || unix::run_bun_packages(&ctx))?;
runner.execute(Step::Shell, "zr", || zsh::run_zr(&ctx))?;
#[cfg(target_os = "dragonfly")] runner.execute(Step::Shell, "antibody", || zsh::run_antibody(&ctx))?;
runner.execute(Step::Pkg, "DragonFly BSD Packages", || { runner.execute(Step::Shell, "antidote", || zsh::run_antidote(&ctx))?;
dragonfly::upgrade_packages(ctx.sudo().as_ref(), run_type) runner.execute(Step::Shell, "antigen", || zsh::run_antigen(&ctx))?;
})?; runner.execute(Step::Shell, "zgenom", || zsh::run_zgenom(&ctx))?;
runner.execute(Step::Shell, "zplug", || zsh::run_zplug(&ctx))?;
#[cfg(target_os = "freebsd")] runner.execute(Step::Shell, "zinit", || zsh::run_zinit(&ctx))?;
runner.execute(Step::Pkg, "FreeBSD Packages", || { runner.execute(Step::Shell, "zi", || zsh::run_zi(&ctx))?;
freebsd::upgrade_packages(&ctx, ctx.sudo().as_ref(), run_type) runner.execute(Step::Shell, "zim", || zsh::run_zim(&ctx))?;
})?;
#[cfg(target_os = "openbsd")]
runner.execute(Step::Pkg, "OpenBSD Packages", || {
openbsd::upgrade_packages(ctx.sudo().as_ref(), run_type)
})?;
#[cfg(target_os = "android")]
runner.execute(Step::Pkg, "Termux Packages", || android::upgrade_packages(&ctx))?;
let emacs = emacs::Emacs::new(&base_dirs);
if config.use_predefined_git_repos() {
if config.should_run(Step::Emacs) {
if !emacs.is_doom() {
if let Some(directory) = emacs.directory() {
git_repos.insert_if_repo(directory);
}
}
git_repos.insert_if_repo(base_dirs.home_dir().join(".doom.d"));
}
if config.should_run(Step::Vim) {
git_repos.insert_if_repo(base_dirs.home_dir().join(".vim"));
git_repos.insert_if_repo(base_dirs.home_dir().join(".config/nvim"));
}
git_repos.insert_if_repo(base_dirs.home_dir().join(".ideavimrc"));
git_repos.insert_if_repo(base_dirs.home_dir().join(".intellimacs"));
if config.should_run(Step::Rcm) {
git_repos.insert_if_repo(base_dirs.home_dir().join(".dotfiles"));
}
#[cfg(unix)]
{
git_repos.insert_if_repo(zsh::zshrc(&base_dirs));
if config.should_run(Step::Tmux) {
git_repos.insert_if_repo(base_dirs.home_dir().join(".tmux"));
}
git_repos.insert_if_repo(base_dirs.home_dir().join(".config/fish"));
git_repos.insert_if_repo(base_dirs.config_dir().join("openbox"));
git_repos.insert_if_repo(base_dirs.config_dir().join("bspwm"));
git_repos.insert_if_repo(base_dirs.config_dir().join("i3"));
git_repos.insert_if_repo(base_dirs.config_dir().join("sway"));
}
#[cfg(windows)]
git_repos.insert_if_repo(
base_dirs
.data_local_dir()
.join("Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState"),
);
#[cfg(windows)]
windows::insert_startup_scripts(&ctx, &mut git_repos).ok();
if let Some(profile) = powershell.profile() {
git_repos.insert_if_repo(profile);
}
}
if config.should_run(Step::GitRepos) {
if let Some(custom_git_repos) = config.git_repos() {
for git_repo in custom_git_repos {
git_repos.glob_insert(git_repo);
}
}
runner.execute(Step::GitRepos, "Git repositories", || {
git.multi_pull_step(&git_repos, &ctx)
})?;
}
if should_run_powershell {
runner.execute(Step::Powershell, "Powershell Modules Update", || {
powershell.update_modules(&ctx)
})?;
}
#[cfg(unix)]
{
runner.execute(Step::Shell, "zr", || zsh::run_zr(&base_dirs, run_type))?;
runner.execute(Step::Shell, "antibody", || zsh::run_antibody(run_type))?;
runner.execute(Step::Shell, "antigen", || zsh::run_antigen(&base_dirs, run_type))?;
runner.execute(Step::Shell, "zgenom", || zsh::run_zgenom(&base_dirs, run_type))?;
runner.execute(Step::Shell, "zplug", || zsh::run_zplug(&base_dirs, run_type))?;
runner.execute(Step::Shell, "zinit", || zsh::run_zinit(&base_dirs, run_type))?;
runner.execute(Step::Shell, "zi", || zsh::run_zi(&base_dirs, run_type))?;
runner.execute(Step::Shell, "zim", || zsh::run_zim(&base_dirs, run_type))?;
runner.execute(Step::Shell, "oh-my-zsh", || zsh::run_oh_my_zsh(&ctx))?; runner.execute(Step::Shell, "oh-my-zsh", || zsh::run_oh_my_zsh(&ctx))?;
runner.execute(Step::Shell, "fisher", || unix::run_fisher(run_type))?; runner.execute(Step::Shell, "oh-my-bash", || unix::run_oh_my_bash(&ctx))?;
runner.execute(Step::Shell, "fisher", || unix::run_fisher(&ctx))?;
runner.execute(Step::Shell, "bash-it", || unix::run_bashit(&ctx))?; runner.execute(Step::Shell, "bash-it", || unix::run_bashit(&ctx))?;
runner.execute(Step::Shell, "oh-my-fish", || unix::run_oh_my_fish(&ctx))?; runner.execute(Step::Shell, "oh-my-fish", || unix::run_oh_my_fish(&ctx))?;
runner.execute(Step::Shell, "fish-plug", || unix::run_fish_plug(&ctx))?; runner.execute(Step::Shell, "fish-plug", || unix::run_fish_plug(&ctx))?;
runner.execute(Step::Shell, "fundle", || unix::run_fundle(&ctx))?; runner.execute(Step::Shell, "fundle", || unix::run_fundle(&ctx))?;
runner.execute(Step::Tmux, "tmux", || tmux::run_tpm(&base_dirs, run_type))?; runner.execute(Step::Tmux, "tmux", || tmux::run_tpm(&ctx))?;
runner.execute(Step::Tldr, "TLDR", || unix::run_tldr(run_type))?; runner.execute(Step::Tldr, "TLDR", || unix::run_tldr(&ctx))?;
runner.execute(Step::Pearl, "pearl", || unix::run_pearl(run_type))?; runner.execute(Step::Pearl, "pearl", || unix::run_pearl(&ctx))?;
#[cfg(not(any(target_os = "macos", target_os = "android")))] #[cfg(not(any(target_os = "macos", target_os = "android")))]
runner.execute(Step::GnomeShellExtensions, "Gnome Shell Extensions", || { runner.execute(Step::GnomeShellExtensions, "Gnome Shell Extensions", || {
unix::upgrade_gnome_extensions(&ctx) unix::upgrade_gnome_extensions(&ctx)
})?; })?;
runner.execute(Step::Sdkman, "SDKMAN!", || { runner.execute(Step::Pyenv, "pyenv", || unix::run_pyenv(&ctx))?;
unix::run_sdkman(&base_dirs, config.cleanup(), run_type) runner.execute(Step::Sdkman, "SDKMAN!", || unix::run_sdkman(&ctx))?;
})?;
runner.execute(Step::Rcm, "rcm", || unix::run_rcm(&ctx))?; runner.execute(Step::Rcm, "rcm", || unix::run_rcm(&ctx))?;
runner.execute(Step::Maza, "maza", || unix::run_maza(&ctx))?;
} }
#[cfg(not(any( #[cfg(not(any(
@@ -336,36 +338,48 @@ fn run() -> Result<()> {
target_os = "netbsd", target_os = "netbsd",
target_os = "dragonfly" target_os = "dragonfly"
)))] )))]
runner.execute(Step::Atom, "apm", || generic::run_apm(run_type))?; {
runner.execute(Step::Fossil, "fossil", || generic::run_fossil(run_type))?; runner.execute(Step::Atom, "apm", || generic::run_apm(&ctx))?;
runner.execute(Step::Rustup, "rustup", || generic::run_rustup(&base_dirs, run_type))?; }
runner.execute(Step::Juliaup, "juliaup", || generic::run_juliaup(&base_dirs, run_type))?;
// The following update function should be executed on all OSes.
runner.execute(Step::Fossil, "fossil", || generic::run_fossil(&ctx))?;
runner.execute(Step::Elan, "elan", || generic::run_elan(&ctx))?;
runner.execute(Step::Rye, "rye", || generic::run_rye(&ctx))?;
runner.execute(Step::Rustup, "rustup", || generic::run_rustup(&ctx))?;
runner.execute(Step::Juliaup, "juliaup", || generic::run_juliaup(&ctx))?;
runner.execute(Step::Dotnet, ".NET", || generic::run_dotnet_upgrade(&ctx))?; runner.execute(Step::Dotnet, ".NET", || generic::run_dotnet_upgrade(&ctx))?;
runner.execute(Step::Choosenim, "choosenim", || generic::run_choosenim(&ctx))?; runner.execute(Step::Choosenim, "choosenim", || generic::run_choosenim(&ctx))?;
runner.execute(Step::Cargo, "cargo", || generic::run_cargo_update(&ctx))?; runner.execute(Step::Cargo, "cargo", || generic::run_cargo_update(&ctx))?;
runner.execute(Step::Flutter, "Flutter", || generic::run_flutter_upgrade(run_type))?; runner.execute(Step::Flutter, "Flutter", || generic::run_flutter_upgrade(&ctx))?;
runner.execute(Step::Go, "go-global-update", || go::run_go_global_update(run_type))?; runner.execute(Step::Go, "go-global-update", || go::run_go_global_update(&ctx))?;
runner.execute(Step::Go, "gup", || go::run_go_gup(run_type))?; runner.execute(Step::Go, "gup", || go::run_go_gup(&ctx))?;
runner.execute(Step::Emacs, "Emacs", || emacs.upgrade(&ctx))?; runner.execute(Step::Emacs, "Emacs", || emacs.upgrade(&ctx))?;
runner.execute(Step::Opam, "opam", || generic::run_opam_update(&ctx))?; runner.execute(Step::Opam, "opam", || generic::run_opam_update(&ctx))?;
runner.execute(Step::Vcpkg, "vcpkg", || generic::run_vcpkg_update(&ctx))?; runner.execute(Step::Vcpkg, "vcpkg", || generic::run_vcpkg_update(&ctx))?;
runner.execute(Step::Pipx, "pipx", || generic::run_pipx_update(run_type))?; runner.execute(Step::Pipx, "pipx", || generic::run_pipx_update(&ctx))?;
runner.execute(Step::Vscode, "Visual Studio Code extensions", || {
generic::run_vscode_extensions_update(&ctx)
})?;
runner.execute(Step::Conda, "conda", || generic::run_conda_update(&ctx))?; runner.execute(Step::Conda, "conda", || generic::run_conda_update(&ctx))?;
runner.execute(Step::Pip3, "pip3", || generic::run_pip3_update(run_type))?; runner.execute(Step::Mamba, "mamba", || generic::run_mamba_update(&ctx))?;
runner.execute(Step::Ghcup, "ghcup", || generic::run_ghcup_update(run_type))?; runner.execute(Step::Miktex, "miktex", || generic::run_miktex_packages_update(&ctx))?;
runner.execute(Step::Stack, "stack", || generic::run_stack_update(run_type))?; runner.execute(Step::Pip3, "pip3", || generic::run_pip3_update(&ctx))?;
runner.execute(Step::PipReview, "pip-review", || generic::run_pip_review_update(&ctx))?;
runner.execute(Step::PipReviewLocal, "pip-review (local)", || {
generic::run_pip_review_local_update(&ctx)
})?;
runner.execute(Step::Pipupgrade, "pipupgrade", || generic::run_pipupgrade_update(&ctx))?;
runner.execute(Step::Ghcup, "ghcup", || generic::run_ghcup_update(&ctx))?;
runner.execute(Step::Stack, "stack", || generic::run_stack_update(&ctx))?;
runner.execute(Step::Tlmgr, "tlmgr", || generic::run_tlmgr_update(&ctx))?; runner.execute(Step::Tlmgr, "tlmgr", || generic::run_tlmgr_update(&ctx))?;
runner.execute(Step::Myrepos, "myrepos", || { runner.execute(Step::Myrepos, "myrepos", || generic::run_myrepos_update(&ctx))?;
generic::run_myrepos_update(&base_dirs, run_type) runner.execute(Step::Chezmoi, "chezmoi", || generic::run_chezmoi_update(&ctx))?;
})?; runner.execute(Step::Jetpack, "jetpack", || generic::run_jetpack(&ctx))?;
runner.execute(Step::Chezmoi, "chezmoi", || { runner.execute(Step::Vim, "vim", || vim::upgrade_vim(&ctx))?;
generic::run_chezmoi_update(&base_dirs, run_type) runner.execute(Step::Vim, "Neovim", || vim::upgrade_neovim(&ctx))?;
})?;
runner.execute(Step::Jetpack, "jetpack", || generic::run_jetpack(run_type))?;
runner.execute(Step::Vim, "vim", || vim::upgrade_vim(&base_dirs, &ctx))?;
runner.execute(Step::Vim, "Neovim", || vim::upgrade_neovim(&base_dirs, &ctx))?;
runner.execute(Step::Vim, "The Ultimate vimrc", || vim::upgrade_ultimate_vimrc(&ctx))?; runner.execute(Step::Vim, "The Ultimate vimrc", || vim::upgrade_ultimate_vimrc(&ctx))?;
runner.execute(Step::Vim, "voom", || vim::run_voom(&base_dirs, run_type))?; runner.execute(Step::Vim, "voom", || vim::run_voom(&ctx))?;
runner.execute(Step::Kakoune, "Kakoune", || kakoune::upgrade_kak_plug(&ctx))?; runner.execute(Step::Kakoune, "Kakoune", || kakoune::upgrade_kak_plug(&ctx))?;
runner.execute(Step::Helix, "helix", || generic::run_helix_grammars(&ctx))?; runner.execute(Step::Helix, "helix", || generic::run_helix_grammars(&ctx))?;
runner.execute(Step::Node, "npm", || node::run_npm_upgrade(&ctx))?; runner.execute(Step::Node, "npm", || node::run_npm_upgrade(&ctx))?;
@@ -374,37 +388,38 @@ fn run() -> Result<()> {
runner.execute(Step::Containers, "Containers", || containers::run_containers(&ctx))?; runner.execute(Step::Containers, "Containers", || containers::run_containers(&ctx))?;
runner.execute(Step::Deno, "deno", || node::deno_upgrade(&ctx))?; runner.execute(Step::Deno, "deno", || node::deno_upgrade(&ctx))?;
runner.execute(Step::Composer, "composer", || generic::run_composer_update(&ctx))?; runner.execute(Step::Composer, "composer", || generic::run_composer_update(&ctx))?;
runner.execute(Step::Krew, "krew", || generic::run_krew_upgrade(run_type))?; runner.execute(Step::Krew, "krew", || generic::run_krew_upgrade(&ctx))?;
runner.execute(Step::Helm, "helm", || generic::run_helm_repo_update(run_type))?; runner.execute(Step::Helm, "helm", || generic::run_helm_repo_update(&ctx))?;
runner.execute(Step::Gem, "gem", || generic::run_gem(&base_dirs, run_type))?; runner.execute(Step::Gem, "gem", || generic::run_gem(&ctx))?;
runner.execute(Step::RubyGems, "rubygems", || { runner.execute(Step::RubyGems, "rubygems", || generic::run_rubygems(&ctx))?;
generic::run_rubygems(&base_dirs, run_type)
})?;
runner.execute(Step::Julia, "julia", || generic::update_julia_packages(&ctx))?; runner.execute(Step::Julia, "julia", || generic::update_julia_packages(&ctx))?;
runner.execute(Step::Haxelib, "haxelib", || generic::run_haxelib_update(&ctx))?; runner.execute(Step::Haxelib, "haxelib", || generic::run_haxelib_update(&ctx))?;
runner.execute(Step::Sheldon, "sheldon", || generic::run_sheldon(&ctx))?; runner.execute(Step::Sheldon, "sheldon", || generic::run_sheldon(&ctx))?;
runner.execute(Step::Stew, "stew", || generic::run_stew(&ctx))?;
runner.execute(Step::Rtcl, "rtcl", || generic::run_rtcl(&ctx))?; runner.execute(Step::Rtcl, "rtcl", || generic::run_rtcl(&ctx))?;
runner.execute(Step::Bin, "bin", || generic::bin_update(&ctx))?; runner.execute(Step::Bin, "bin", || generic::bin_update(&ctx))?;
runner.execute(Step::Gcloud, "gcloud", || { runner.execute(Step::Gcloud, "gcloud", || generic::run_gcloud_components_update(&ctx))?;
generic::run_gcloud_components_update(run_type) runner.execute(Step::Micro, "micro", || generic::run_micro(&ctx))?;
})?; runner.execute(Step::Raco, "raco", || generic::run_raco_update(&ctx))?;
runner.execute(Step::Micro, "micro", || generic::run_micro(run_type))?;
runner.execute(Step::Raco, "raco", || generic::run_raco_update(run_type))?;
runner.execute(Step::Spicetify, "spicetify", || generic::spicetify_upgrade(&ctx))?; runner.execute(Step::Spicetify, "spicetify", || generic::spicetify_upgrade(&ctx))?;
runner.execute(Step::GithubCliExtensions, "GitHub CLI Extensions", || { runner.execute(Step::GithubCliExtensions, "GitHub CLI Extensions", || {
generic::run_ghcli_extensions_upgrade(&ctx) generic::run_ghcli_extensions_upgrade(&ctx)
})?; })?;
runner.execute(Step::Bob, "Bob", || generic::run_bob(&ctx))?;
runner.execute(Step::Certbot, "Certbot", || generic::run_certbot(&ctx))?;
runner.execute(Step::GitRepos, "Git Repositories", || git::run_git_pull(&ctx))?;
runner.execute(Step::ClamAvDb, "ClamAV Databases", || generic::run_freshclam(&ctx))?;
runner.execute(Step::PlatformioCore, "PlatformIO Core", || {
generic::run_platform_io(&ctx)
})?;
runner.execute(Step::Lensfun, "Lensfun's database update", || {
generic::run_lensfun_update_data(&ctx)
})?;
#[cfg(target_os = "linux")] if should_run_powershell {
{ runner.execute(Step::Powershell, "Powershell Modules Update", || {
runner.execute(Step::DebGet, "deb-get", || linux::run_deb_get(&ctx))?; powershell.update_modules(&ctx)
runner.execute(Step::Toolbx, "toolbx", || toolbx::run_toolbx(&ctx))?; })?;
runner.execute(Step::Flatpak, "Flatpak", || linux::flatpak_update(&ctx))?;
runner.execute(Step::Snap, "snap", || linux::run_snap(ctx.sudo().as_ref(), run_type))?;
runner.execute(Step::Pacstall, "pacstall", || linux::run_pacstall(&ctx))?;
runner.execute(Step::Pacdef, "pacdef", || linux::run_pacdef(&ctx))?;
runner.execute(Step::Protonup, "protonup", || linux::run_protonup_update(&ctx))?;
runner.execute(Step::Distrobox, "distrobox", || linux::run_distrobox_update(&ctx))?;
} }
if let Some(commands) = config.commands() { if let Some(commands) = config.commands() {
@@ -417,37 +432,6 @@ fn run() -> Result<()> {
} }
} }
#[cfg(target_os = "linux")]
{
runner.execute(Step::System, "pihole", || {
linux::run_pihole_update(ctx.sudo().as_ref(), run_type)
})?;
runner.execute(Step::Firmware, "Firmware upgrades", || linux::run_fwupdmgr(&ctx))?;
runner.execute(Step::Restarts, "Restarts", || {
linux::run_needrestart(ctx.sudo().as_ref(), run_type)
})?;
}
#[cfg(target_os = "macos")]
{
runner.execute(Step::Sparkle, "Sparkle", || macos::run_sparkle(&ctx))?;
runner.execute(Step::Mas, "App Store", || macos::run_mas(run_type))?;
runner.execute(Step::System, "System upgrade", || macos::upgrade_macos(&ctx))?;
}
#[cfg(target_os = "freebsd")]
runner.execute(Step::System, "FreeBSD Upgrade", || {
freebsd::upgrade_freebsd(ctx.sudo().as_ref(), run_type)
})?;
#[cfg(target_os = "openbsd")]
runner.execute(Step::System, "OpenBSD Upgrade", || {
openbsd::upgrade_openbsd(ctx.sudo().as_ref(), run_type)
})?;
#[cfg(windows)]
runner.execute(Step::System, "Windows update", || windows::windows_update(&ctx))?;
if config.should_run(Step::Vagrant) { if config.should_run(Step::Vagrant) {
if let Ok(boxes) = vagrant::collect_boxes(&ctx) { if let Ok(boxes) = vagrant::collect_boxes(&ctx) {
for vagrant_box in boxes { for vagrant_box in boxes {
@@ -472,12 +456,6 @@ fn run() -> Result<()> {
distribution.show_summary(); distribution.show_summary();
} }
} }
#[cfg(target_os = "freebsd")]
freebsd::audit_packages(ctx.sudo().as_ref()).ok();
#[cfg(target_os = "dragonfly")]
dragonfly::audit_packages(ctx.sudo().as_ref()).ok();
} }
let mut post_command_failed = false; let mut post_command_failed = false;
@@ -511,13 +489,13 @@ fn run() -> Result<()> {
let failed = post_command_failed || runner.report().data().iter().any(|(_, result)| result.failed()); let failed = post_command_failed || runner.report().data().iter().any(|(_, result)| result.failed());
if !config.skip_notify() { if !config.skip_notify() {
terminal::notify_desktop( notify_desktop(
format!( format!(
"Topgrade finished {}", "Topgrade finished {}",
if failed { "with errors" } else { "successfully" } if failed { "with errors" } else { "successfully" }
), ),
None, Some(Duration::from_secs(10)),
); )
} }
if failed { if failed {
@@ -550,32 +528,9 @@ fn main() {
// The `Debug` implementation of `eyre::Result` prints a multi-line // The `Debug` implementation of `eyre::Result` prints a multi-line
// error message that includes all the 'causes' added with // error message that includes all the 'causes' added with
// `.with_context(...)` calls. // `.with_context(...)` calls.
println!("Error: {:?}", error); println!("Error: {error:?}");
} }
exit(1); exit(1);
} }
} }
} }
pub fn install_tracing(filter_directives: &str) -> Result<()> {
use tracing_subscriber::fmt;
use tracing_subscriber::fmt::format::FmtSpan;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
use tracing_subscriber::EnvFilter;
let env_filter = EnvFilter::try_new(filter_directives)
.or_else(|_| EnvFilter::try_from_default_env())
.or_else(|_| EnvFilter::try_new("info"))?;
let fmt_layer = fmt::layer()
.with_target(false)
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
.without_time();
let registry = tracing_subscriber::registry();
registry.with(env_filter).with(fmt_layer).init();
Ok(())
}

View File

@@ -34,7 +34,7 @@ impl<'a> Report<'a> {
if let Some((key, success)) = result { if let Some((key, success)) = result {
let key = key.into(); let key = key.into();
debug_assert!(!self.data.iter().any(|(k, _)| k == &key), "{} already reported", key); debug_assert!(!self.data.iter().any(|(k, _)| k == &key), "{key} already reported");
self.data.push((key, success)); self.data.push((key, success));
} }
} }

View File

@@ -34,6 +34,14 @@ impl<'a> Runner<'a> {
let key = key.into(); let key = key.into();
debug!("Step {:?}", key); debug!("Step {:?}", key);
// alter the `func` to put it in a span
let func = || {
let span =
tracing::span!(parent: tracing::Span::none(), tracing::Level::TRACE, "step", step = ?step, key = %key);
let _guard = span.enter();
func()
};
loop { loop {
match func() { match func() {
Ok(()) => { Ok(()) => {

View File

@@ -1,5 +1,3 @@
#![cfg(windows)]
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use std::{env::current_exe, fs, path::PathBuf}; use std::{env::current_exe, fs, path::PathBuf};
use tracing::{debug, error}; use tracing::{debug, error};

View File

@@ -3,6 +3,7 @@ use std::env;
use std::os::unix::process::CommandExt as _; use std::os::unix::process::CommandExt as _;
use std::process::Command; use std::process::Command;
use crate::config::Step;
use color_eyre::eyre::{bail, Result}; use color_eyre::eyre::{bail, Result};
use self_update_crate::backends::github::Update; use self_update_crate::backends::github::Update;
use self_update_crate::update::UpdateStatus; use self_update_crate::update::UpdateStatus;
@@ -11,52 +12,61 @@ use super::terminal::*;
#[cfg(windows)] #[cfg(windows)]
use crate::error::Upgraded; use crate::error::Upgraded;
pub fn self_update() -> Result<()> { use crate::execution_context::ExecutionContext;
pub fn self_update(ctx: &ExecutionContext) -> Result<()> {
print_separator("Self update"); print_separator("Self update");
let current_exe = env::current_exe();
let target = self_update_crate::get_target(); if ctx.run_type().dry() {
let result = Update::configure() println!("Would self-update");
.repo_owner("topgrade-rs") Ok(())
.repo_name("topgrade")
.target(target)
.bin_name(if cfg!(windows) { "topgrade.exe" } else { "topgrade" })
.show_output(false)
.show_download_progress(true)
.current_version(self_update_crate::cargo_crate_version!())
.no_confirm(true)
.build()?
.update_extended()?;
if let UpdateStatus::Updated(release) = &result {
println!("\nTopgrade upgraded to {}:\n", release.version);
if let Some(body) = &release.body {
println!("{}", body);
}
} else { } else {
println!("Topgrade is up-to-date"); let assume_yes = ctx.config().yes(Step::SelfUpdate);
} let current_exe = env::current_exe();
{ let target = self_update_crate::get_target();
if result.updated() { let result = Update::configure()
print_warning("Respawning..."); .repo_owner("topgrade-rs")
let mut command = Command::new(current_exe?); .repo_name("topgrade")
command.args(env::args().skip(1)).env("TOPGRADE_NO_SELF_UPGRADE", ""); .target(target)
.bin_name(if cfg!(windows) { "topgrade.exe" } else { "topgrade" })
.show_output(true)
.show_download_progress(true)
.current_version(self_update_crate::cargo_crate_version!())
.no_confirm(assume_yes)
.build()?
.update_extended()?;
#[cfg(unix)] if let UpdateStatus::Updated(release) = &result {
{ println!("\nTopgrade upgraded to {}:\n", release.version);
let err = command.exec(); if let Some(body) = &release.body {
bail!(err); println!("{body}");
} }
} else {
println!("Topgrade is up-to-date");
}
#[cfg(windows)] {
{ if result.updated() {
#[allow(clippy::disallowed_methods)] print_info("Respawning...");
let status = command.status()?; let mut command = Command::new(current_exe?);
bail!(Upgraded(status)); command.args(env::args().skip(1)).env("TOPGRADE_NO_SELF_UPGRADE", "");
#[cfg(unix)]
{
let err = command.exec();
bail!(err);
}
#[cfg(windows)]
{
#[allow(clippy::disallowed_methods)]
let status = command.status()?;
bail!(Upgraded(status));
}
} }
} }
}
Ok(()) Ok(())
}
} }

View File

@@ -1,3 +1,4 @@
use std::fmt::{Display, Formatter};
use std::path::Path; use std::path::Path;
use std::process::Command; use std::process::Command;
@@ -5,6 +6,7 @@ use color_eyre::eyre::eyre;
use color_eyre::eyre::Context; use color_eyre::eyre::Context;
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use tracing::{debug, error, warn}; use tracing::{debug, error, warn};
use wildmatch::WildMatch;
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::error::{self, TopgradeError}; use crate::error::{self, TopgradeError};
@@ -18,15 +20,51 @@ use crate::{execution_context::ExecutionContext, utils::require};
// themselves or when using docker-compose. // themselves or when using docker-compose.
const NONEXISTENT_REPO: &str = "repository does not exist"; const NONEXISTENT_REPO: &str = "repository does not exist";
/// Uniquely identifies a `Container`.
#[derive(Debug)]
struct Container {
/// `Repository` and `Tag`
///
/// format: `Repository:Tag`, e.g., `nixos/nix:latest`.
repo_tag: String,
/// Platform
///
/// format: `OS/Architecture`, e.g., `linux/amd64`.
platform: String,
}
impl Container {
/// Construct a new `Container`.
fn new(repo_tag: String, platform: String) -> Self {
Self { repo_tag, platform }
}
}
impl Display for Container {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
// e.g., "`fedora:latest` for `linux/amd64`"
write!(f, "`{}` for `{}`", self.repo_tag, self.platform)
}
}
/// Returns a Vector of all containers, with Strings in the format /// Returns a Vector of all containers, with Strings in the format
/// "REGISTRY/[PATH/]CONTAINER_NAME:TAG" /// "REGISTRY/[PATH/]CONTAINER_NAME:TAG"
fn list_containers(crt: &Path) -> Result<Vec<String>> { ///
/// Containers specified in `ignored_containers` will be filtered out.
fn list_containers(crt: &Path, ignored_containers: Option<&Vec<String>>) -> Result<Vec<Container>> {
let ignored_containers = ignored_containers.map(|patterns| {
patterns
.iter()
.map(|pattern| WildMatch::new(pattern))
.collect::<Vec<WildMatch>>()
});
debug!( debug!(
"Querying '{} image ls --format \"{{{{.Repository}}}}:{{{{.Tag}}}}\"' for containers", "Querying '{} image ls --format \"{{{{.Repository}}}}:{{{{.Tag}}}}/{{{{.ID}}}}\"' for containers",
crt.display() crt.display()
); );
let output = Command::new(crt) let output = Command::new(crt)
.args(["image", "ls", "--format", "{{.Repository}}:{{.Tag}}"]) .args(["image", "ls", "--format", "{{.Repository}}:{{.Tag}} {{.ID}}"])
.output_checked_with_utf8(|_| Ok(()))?; .output_checked_with_utf8(|_| Ok(()))?;
let mut retval = vec![]; let mut retval = vec![];
@@ -49,7 +87,33 @@ fn list_containers(crt: &Path) -> Result<Vec<String>> {
} }
debug!("Using container '{}'", line); debug!("Using container '{}'", line);
retval.push(String::from(line));
// line is of format: `Repository:Tag ImageID`, e.g., `nixos/nix:latest d80fea9c32b4`
let split_res = line.split(' ').collect::<Vec<&str>>();
assert_eq!(split_res.len(), 2);
let (repo_tag, image_id) = (split_res[0], split_res[1]);
if let Some(ref ignored_containers) = ignored_containers {
if ignored_containers.iter().any(|pattern| pattern.matches(repo_tag)) {
debug!("Skipping ignored container '{}'", line);
continue;
}
}
debug!(
"Querying '{} image inspect --format \"{{{{.Os}}}}/{{{{.Architecture}}}}\"' for container {}",
crt.display(),
image_id
);
let inspect_output = Command::new(crt)
.args(["image", "inspect", image_id, "--format", "{{.Os}}/{{.Architecture}}"])
.output_checked_with_utf8(|_| Ok(()))?;
let mut platform = inspect_output.stdout;
// truncate the tailing new line character
platform.truncate(platform.len() - 1);
assert!(platform.contains('/'));
retval.push(Container::new(repo_tag.to_string(), platform));
} }
Ok(retval) Ok(retval)
@@ -62,12 +126,18 @@ pub fn run_containers(ctx: &ExecutionContext) -> Result<()> {
print_separator("Containers"); print_separator("Containers");
let mut success = true; let mut success = true;
let containers = list_containers(&crt).context("Failed to list Docker containers")?; let containers =
list_containers(&crt, ctx.config().containers_ignored_tags()).context("Failed to list Docker containers")?;
debug!("Containers to inspect: {:?}", containers); debug!("Containers to inspect: {:?}", containers);
for container in containers.iter() { for container in containers.iter() {
debug!("Pulling container '{}'", container); debug!("Pulling container '{}'", container);
let args = vec!["pull", &container[..]]; let args = vec![
"pull",
container.repo_tag.as_str(),
"--platform",
container.platform.as_str(),
];
let mut exec = ctx.run_type().execute(&crt); let mut exec = ctx.run_type().execute(&crt);
if let Err(e) = exec.args(&args).status_checked() { if let Err(e) = exec.args(&args).status_checked() {

View File

@@ -1,9 +1,9 @@
#[cfg(any(windows, target_os = "macos"))] #[cfg(windows)]
use std::env; use std::env;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use directories::BaseDirs; use etcetera::base_strategy::BaseStrategy;
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::execution_context::ExecutionContext; use crate::execution_context::ExecutionContext;
@@ -23,20 +23,12 @@ pub struct Emacs {
} }
impl Emacs { impl Emacs {
fn directory_path(base_dirs: &BaseDirs) -> Option<PathBuf> { fn directory_path() -> Option<PathBuf> {
#[cfg(unix)] #[cfg(unix)]
cfg_if::cfg_if! { return {
if #[cfg(target_os = "macos")] { let emacs_xdg_dir = crate::XDG_DIRS.config_dir().join("emacs").if_exists();
let emacs_xdg_dir = env::var("XDG_CONFIG_HOME") crate::HOME_DIR.join(".emacs.d").if_exists().or(emacs_xdg_dir)
.ok() };
.and_then(|config| PathBuf::from(config).join("emacs").if_exists())
.or_else(|| base_dirs.home_dir().join(".config/emacs").if_exists());
} else {
let emacs_xdg_dir = base_dirs.config_dir().join("emacs").if_exists();
}
}
#[cfg(unix)]
return base_dirs.home_dir().join(".emacs.d").if_exists().or(emacs_xdg_dir);
#[cfg(windows)] #[cfg(windows)]
return env::var("HOME") return env::var("HOME")
@@ -47,11 +39,11 @@ impl Emacs {
.if_exists() .if_exists()
.or_else(|| PathBuf::from(&home).join(".config\\emacs").if_exists()) .or_else(|| PathBuf::from(&home).join(".config\\emacs").if_exists())
}) })
.or_else(|| base_dirs.data_dir().join(".emacs.d").if_exists()); .or_else(|| crate::WINDOWS_DIRS.data_dir().join(".emacs.d").if_exists());
} }
pub fn new(base_dirs: &BaseDirs) -> Self { pub fn new() -> Self {
let directory = Emacs::directory_path(base_dirs); let directory = Emacs::directory_path();
let doom = directory.as_ref().and_then(|d| d.join(DOOM_PATH).if_exists()); let doom = directory.as_ref().and_then(|d| d.join(DOOM_PATH).if_exists());
Self { directory, doom } Self { directory, doom }
} }

File diff suppressed because it is too large Load Diff

View File

@@ -3,139 +3,173 @@ use std::io;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process::{Command, Output, Stdio}; use std::process::{Command, Output, Stdio};
use color_eyre::eyre::Context;
use color_eyre::eyre::{eyre, Result}; use color_eyre::eyre::{eyre, Result};
use console::style; use console::style;
use futures::stream::{iter, FuturesUnordered}; use futures::stream::{iter, FuturesUnordered, StreamExt};
use futures::StreamExt;
use glob::{glob_with, MatchOptions}; use glob::{glob_with, MatchOptions};
use tokio::process::Command as AsyncCommand; use tokio::process::Command as AsyncCommand;
use tokio::runtime; use tokio::runtime;
use tracing::{debug, error}; use tracing::{debug, error};
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::config::Step;
use crate::execution_context::ExecutionContext; use crate::execution_context::ExecutionContext;
use crate::executor::RunType; use crate::steps::emacs::Emacs;
use crate::terminal::print_separator; use crate::terminal::print_separator;
use crate::utils::{which, PathExt}; use crate::utils::{require, PathExt};
use crate::{error::SkipStep, terminal::print_warning}; use crate::{error::SkipStep, terminal::print_warning, HOME_DIR};
use etcetera::base_strategy::BaseStrategy;
#[cfg(unix)]
use crate::XDG_DIRS;
#[cfg(windows)]
use crate::WINDOWS_DIRS;
pub fn run_git_pull(ctx: &ExecutionContext) -> Result<()> {
let mut repos = RepoStep::try_new()?;
let config = ctx.config();
// handle built-in repos
if config.use_predefined_git_repos() {
// should be executed on all the platforms
{
if config.should_run(Step::Emacs) {
let emacs = Emacs::new();
if !emacs.is_doom() {
if let Some(directory) = emacs.directory() {
repos.insert_if_repo(directory);
}
}
repos.insert_if_repo(HOME_DIR.join(".doom.d"));
}
if config.should_run(Step::Vim) {
repos.insert_if_repo(HOME_DIR.join(".vim"));
repos.insert_if_repo(HOME_DIR.join(".config/nvim"));
}
repos.insert_if_repo(HOME_DIR.join(".ideavimrc"));
repos.insert_if_repo(HOME_DIR.join(".intellimacs"));
if config.should_run(Step::Rcm) {
repos.insert_if_repo(HOME_DIR.join(".dotfiles"));
}
let powershell = crate::steps::powershell::Powershell::new();
if let Some(profile) = powershell.profile() {
repos.insert_if_repo(profile);
}
}
#[cfg(unix)]
{
repos.insert_if_repo(crate::steps::zsh::zshrc());
if config.should_run(Step::Tmux) {
repos.insert_if_repo(HOME_DIR.join(".tmux"));
}
repos.insert_if_repo(HOME_DIR.join(".config/fish"));
repos.insert_if_repo(XDG_DIRS.config_dir().join("openbox"));
repos.insert_if_repo(XDG_DIRS.config_dir().join("bspwm"));
repos.insert_if_repo(XDG_DIRS.config_dir().join("i3"));
repos.insert_if_repo(XDG_DIRS.config_dir().join("sway"));
}
#[cfg(windows)]
{
repos.insert_if_repo(
WINDOWS_DIRS
.cache_dir()
.join("Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState"),
);
super::os::windows::insert_startup_scripts(&mut repos).ok();
}
}
// Handle user-defined repos
if let Some(custom_git_repos) = config.git_repos() {
for git_repo in custom_git_repos {
repos.glob_insert(git_repo);
}
}
// Warn the user about the bad patterns.
//
// NOTE: this should be executed **before** skipping the Git step or the
// user won't receive this warning in the cases where all the paths configured
// are bad patterns.
repos
.bad_patterns
.iter()
.for_each(|pattern| print_warning(format!("Path {pattern} did not contain any git repositories")));
if repos.is_repos_empty() {
return Err(SkipStep(String::from("No repositories to pull")).into());
}
print_separator("Git repositories");
repos.pull_repos(ctx)
}
#[cfg(windows)] #[cfg(windows)]
static PATH_PREFIX: &str = "\\\\?\\"; static PATH_PREFIX: &str = "\\\\?\\";
#[derive(Debug)] pub struct RepoStep {
pub struct Git { git: PathBuf,
git: Option<PathBuf>, repos: HashSet<PathBuf>,
}
pub struct Repositories<'a> {
git: &'a Git,
repositories: HashSet<String>,
glob_match_options: MatchOptions, glob_match_options: MatchOptions,
bad_patterns: Vec<String>, bad_patterns: Vec<String>,
} }
#[track_caller]
fn output_checked_utf8(output: Output) -> Result<()> { fn output_checked_utf8(output: Output) -> Result<()> {
if !(output.status.success()) { if !(output.status.success()) {
let stderr = String::from_utf8(output.stderr).unwrap(); let stderr = String::from_utf8_lossy(&output.stderr);
Err(eyre!(stderr)) let stderr = stderr.trim();
Err(eyre!("{stderr}"))
} else { } else {
Ok(()) Ok(())
} }
} }
async fn pull_repository(repo: String, git: &Path, ctx: &ExecutionContext<'_>) -> Result<()> { fn get_head_revision<P: AsRef<Path>>(git: &Path, repo: P) -> Option<String> {
let path = repo.to_string();
let before_revision = get_head_revision(git, &repo);
println!("{} {}", style("Pulling").cyan().bold(), path);
let mut command = AsyncCommand::new(git);
command
.stdin(Stdio::null())
.current_dir(&repo)
.args(["pull", "--ff-only"]);
if let Some(extra_arguments) = ctx.config().git_arguments() {
command.args(extra_arguments.split_whitespace());
}
let pull_output = command.output().await?;
let submodule_output = AsyncCommand::new(git)
.args(["submodule", "update", "--recursive"])
.current_dir(&repo)
.stdin(Stdio::null())
.output()
.await?;
let result = output_checked_utf8(pull_output).and_then(|_| output_checked_utf8(submodule_output));
if let Err(message) = &result {
println!("{} pulling {}", style("Failed").red().bold(), &repo);
print!("{}", message);
} else {
let after_revision = get_head_revision(git, &repo);
match (&before_revision, &after_revision) {
(Some(before), Some(after)) if before != after => {
println!("{} {}:", style("Changed").yellow().bold(), &repo);
Command::new(git)
.stdin(Stdio::null())
.current_dir(&repo)
.args([
"--no-pager",
"log",
"--no-decorate",
"--oneline",
&format!("{}..{}", before, after),
])
.status_checked()?;
println!();
}
_ => {
println!("{} {}", style("Up-to-date").green().bold(), &repo);
}
}
}
result.map(|_| ())
}
fn get_head_revision(git: &Path, repo: &str) -> Option<String> {
Command::new(git) Command::new(git)
.stdin(Stdio::null()) .stdin(Stdio::null())
.current_dir(repo) .current_dir(repo.as_ref())
.args(["rev-parse", "HEAD"]) .args(["rev-parse", "HEAD"])
.output_checked_utf8() .output_checked_utf8()
.map(|output| output.stdout.trim().to_string()) .map(|output| output.stdout.trim().to_string())
.map_err(|e| { .map_err(|e| {
error!("Error getting revision for {}: {}", repo, e); error!("Error getting revision for {}: {}", repo.as_ref().display(), e);
e e
}) })
.ok() .ok()
} }
fn has_remotes(git: &Path, repo: &str) -> Option<bool> { impl RepoStep {
Command::new(git) /// Try to create a `RepoStep`, fail if `git` is not found.
.stdin(Stdio::null()) pub fn try_new() -> Result<Self> {
.current_dir(repo) let git = require("git")?;
.args(["remote", "show"]) let mut glob_match_options = MatchOptions::new();
.output_checked_utf8()
.map(|output| output.stdout.lines().count() > 0)
.map_err(|e| {
error!("Error getting remotes for {}: {}", repo, e);
e
})
.ok()
}
impl Git { if cfg!(windows) {
pub fn new() -> Self { glob_match_options.case_sensitive = false;
Self { git: which("git") } }
Ok(Self {
git,
repos: HashSet::new(),
bad_patterns: Vec::new(),
glob_match_options,
})
} }
pub fn get_repo_root<P: AsRef<Path>>(&self, path: P) -> Option<String> { /// Try to get the root of the repo specified in `path`.
pub fn get_repo_root<P: AsRef<Path>>(&self, path: P) -> Option<PathBuf> {
match path.as_ref().canonicalize() { match path.as_ref().canonicalize() {
Ok(mut path) => { Ok(mut path) => {
debug_assert!(path.exists()); debug_assert!(path.exists());
@@ -159,105 +193,56 @@ impl Git {
path_string path_string
}; };
if let Some(git) = &self.git { let output = Command::new(&self.git)
let output = Command::new(git) .stdin(Stdio::null())
.stdin(Stdio::null()) .current_dir(path)
.current_dir(path) .args(["rev-parse", "--show-toplevel"])
.args(["rev-parse", "--show-toplevel"]) .output_checked_utf8()
.output_checked_utf8() .ok()
.ok() // trim the last newline char
.map(|output| output.stdout.trim().to_string()); .map(|output| PathBuf::from(output.stdout.trim()));
return output;
} return output;
} }
Err(e) => match e.kind() { Err(e) => match e.kind() {
io::ErrorKind::NotFound => debug!("{} does not exists", path.as_ref().display()), io::ErrorKind::NotFound => debug!("{} does not exist", path.as_ref().display()),
_ => error!("Error looking for {}: {}", path.as_ref().display(), e), _ => error!("Error looking for {}: {}", path.as_ref().display(), e),
}, },
} }
None None
} }
pub fn multi_pull_step(&self, repositories: &Repositories, ctx: &ExecutionContext) -> Result<()> {
if repositories.repositories.is_empty() {
return Err(SkipStep(String::from("No repositories to pull")).into());
}
print_separator("Git repositories");
repositories
.bad_patterns
.iter()
.for_each(|pattern| print_warning(format!("Path {} did not contain any git repositories", pattern)));
self.multi_pull(repositories, ctx)
}
pub fn multi_pull(&self, repositories: &Repositories, ctx: &ExecutionContext) -> Result<()> {
let git = self.git.as_ref().unwrap();
if let RunType::Dry = ctx.run_type() {
repositories
.repositories
.iter()
.for_each(|repo| println!("Would pull {}", &repo));
return Ok(());
}
let futures_iterator = repositories
.repositories
.iter()
.filter(|repo| match has_remotes(git, repo) {
Some(false) => {
println!(
"{} {} because it has no remotes",
style("Skipping").yellow().bold(),
repo
);
false
}
_ => true, // repo has remotes or command to check for remotes has failed. proceed to pull anyway.
})
.map(|repo| pull_repository(repo.clone(), git, ctx));
let stream_of_futures = if let Some(limit) = ctx.config().git_concurrency_limit() {
iter(futures_iterator).buffer_unordered(limit).boxed()
} else {
futures_iterator.collect::<FuturesUnordered<_>>().boxed()
};
let basic_rt = runtime::Runtime::new()?;
let results = basic_rt.block_on(async { stream_of_futures.collect::<Vec<Result<()>>>().await });
let error = results.into_iter().find(|r| r.is_err());
error.unwrap_or(Ok(()))
}
}
impl<'a> Repositories<'a> {
pub fn new(git: &'a Git) -> Self {
let mut glob_match_options = MatchOptions::new();
if cfg!(windows) {
glob_match_options.case_sensitive = false;
}
Self {
git,
repositories: HashSet::new(),
bad_patterns: Vec::new(),
glob_match_options,
}
}
/// Check if `path` is a git repo, if yes, add it to `self.repos`.
///
/// Return the check result.
pub fn insert_if_repo<P: AsRef<Path>>(&mut self, path: P) -> bool { pub fn insert_if_repo<P: AsRef<Path>>(&mut self, path: P) -> bool {
if let Some(repo) = self.git.get_repo_root(path) { if let Some(repo) = self.get_repo_root(path) {
self.repositories.insert(repo); self.repos.insert(repo);
true true
} else { } else {
false false
} }
} }
/// Check if `repo` has a remote.
fn has_remotes<P: AsRef<Path>>(&self, repo: P) -> Option<bool> {
let mut cmd = Command::new(&self.git);
cmd.stdin(Stdio::null())
.current_dir(repo.as_ref())
.args(["remote", "show"]);
let res = cmd.output_checked_utf8();
res.map(|output| output.stdout.lines().count() > 0)
.map_err(|e| {
error!("Error getting remotes for {}: {}", repo.as_ref().display(), e);
e
})
.ok()
}
/// Similar to `insert_if_repo`, with glob support.
pub fn glob_insert(&mut self, pattern: &str) { pub fn glob_insert(&mut self, pattern: &str) {
if let Ok(glob) = glob_with(pattern, self.glob_match_options) { if let Ok(glob) = glob_with(pattern, self.glob_match_options) {
let mut last_git_repo: Option<PathBuf> = None; let mut last_git_repo: Option<PathBuf> = None;
@@ -267,7 +252,7 @@ impl<'a> Repositories<'a> {
if let Some(last_git_repo) = &last_git_repo { if let Some(last_git_repo) = &last_git_repo {
if path.is_descendant_of(last_git_repo) { if path.is_descendant_of(last_git_repo) {
debug!( debug!(
"Skipping {} because it's a decendant of last known repo {}", "Skipping {} because it's a descendant of last known repo {}",
path.display(), path.display(),
last_git_repo.display() last_git_repo.display()
); );
@@ -292,14 +277,130 @@ impl<'a> Repositories<'a> {
} }
} }
#[cfg(unix)] /// True if `self.repos` is empty.
pub fn is_empty(&self) -> bool { pub fn is_repos_empty(&self) -> bool {
self.repositories.is_empty() self.repos.is_empty()
} }
/// Remove `path` from `self.repos`.
///
// `cfg(unix)` because it is only used in the oh-my-zsh step.
#[cfg(unix)] #[cfg(unix)]
pub fn remove(&mut self, path: &str) { pub fn remove<P: AsRef<Path>>(&mut self, path: P) {
let _removed = self.repositories.remove(path); let _removed = self.repos.remove(path.as_ref());
debug_assert!(_removed); debug_assert!(_removed);
} }
/// Try to pull a repo.
async fn pull_repo<P: AsRef<Path>>(&self, ctx: &ExecutionContext<'_>, repo: P) -> Result<()> {
let before_revision = get_head_revision(&self.git, &repo);
if ctx.config().verbose() {
println!("{} {}", style("Pulling").cyan().bold(), repo.as_ref().display());
}
let mut command = AsyncCommand::new(&self.git);
command
.stdin(Stdio::null())
.current_dir(&repo)
.args(["pull", "--ff-only"]);
if let Some(extra_arguments) = ctx.config().git_arguments() {
command.args(extra_arguments.split_whitespace());
}
let pull_output = command.output().await?;
let submodule_output = AsyncCommand::new(&self.git)
.args(["submodule", "update", "--recursive"])
.current_dir(&repo)
.stdin(Stdio::null())
.output()
.await?;
let result = output_checked_utf8(pull_output)
.and_then(|_| output_checked_utf8(submodule_output))
.wrap_err_with(|| format!("Failed to pull {}", repo.as_ref().display()));
if result.is_err() {
println!("{} pulling {}", style("Failed").red().bold(), repo.as_ref().display());
} else {
let after_revision = get_head_revision(&self.git, repo.as_ref());
match (&before_revision, &after_revision) {
(Some(before), Some(after)) if before != after => {
println!("{} {}", style("Changed").yellow().bold(), repo.as_ref().display());
Command::new(&self.git)
.stdin(Stdio::null())
.current_dir(&repo)
.args([
"--no-pager",
"log",
"--no-decorate",
"--oneline",
&format!("{before}..{after}"),
])
.status_checked()?;
println!();
}
_ => {
if ctx.config().verbose() {
println!("{} {}", style("Up-to-date").green().bold(), repo.as_ref().display());
}
}
}
}
result.map(|_| ())
}
/// Pull the repositories specified in `self.repos`.
///
/// # NOTE
/// This function will create an async runtime and do the real job so the
/// function itself is not async.
fn pull_repos(&self, ctx: &ExecutionContext) -> Result<()> {
if ctx.run_type().dry() {
self.repos
.iter()
.for_each(|repo| println!("Would pull {}", repo.display()));
return Ok(());
}
if !ctx.config().verbose() {
println!(
"\n{} updated repositories will be shown...\n",
style("Only").green().bold()
);
}
let futures_iterator = self
.repos
.iter()
.filter(|repo| match self.has_remotes(repo) {
Some(false) => {
println!(
"{} {} because it has no remotes",
style("Skipping").yellow().bold(),
repo.display()
);
false
}
_ => true, // repo has remotes or command to check for remotes has failed. proceed to pull anyway.
})
.map(|repo| self.pull_repo(ctx, repo));
let stream_of_futures = if let Some(limit) = ctx.config().git_concurrency_limit() {
iter(futures_iterator).buffer_unordered(limit).boxed()
} else {
futures_iterator.collect::<FuturesUnordered<_>>().boxed()
};
let basic_rt = runtime::Runtime::new()?;
let results = basic_rt.block_on(async { stream_of_futures.collect::<Vec<Result<()>>>().await });
let error = results.into_iter().find(|r| r.is_err());
error.unwrap_or(Ok(()))
}
} }

View File

@@ -4,27 +4,27 @@ use std::process::Command;
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::executor::RunType; use crate::execution_context::ExecutionContext;
use crate::terminal::print_separator; use crate::terminal::print_separator;
use crate::utils; use crate::utils;
use crate::utils::PathExt; use crate::utils::PathExt;
/// <https://github.com/Gelio/go-global-update> /// <https://github.com/Gelio/go-global-update>
pub fn run_go_global_update(run_type: RunType) -> Result<()> { pub fn run_go_global_update(ctx: &ExecutionContext) -> Result<()> {
let go_global_update = require_go_bin("go-global-update")?; let go_global_update = require_go_bin("go-global-update")?;
print_separator("go-global-update"); print_separator("go-global-update");
run_type.execute(go_global_update).status_checked() ctx.run_type().execute(go_global_update).status_checked()
} }
/// <https://github.com/nao1215/gup> /// <https://github.com/nao1215/gup>
pub fn run_go_gup(run_type: RunType) -> Result<()> { pub fn run_go_gup(ctx: &ExecutionContext) -> Result<()> {
let gup = require_go_bin("gup")?; let gup = require_go_bin("gup")?;
print_separator("gup"); print_separator("gup");
run_type.execute(gup).arg("update").status_checked() ctx.run_type().execute(gup).arg("update").status_checked()
} }
/// Get the path of a Go binary. /// Get the path of a Go binary.

View File

@@ -4,7 +4,8 @@ use std::os::unix::fs::MetadataExt;
use std::path::PathBuf; use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use crate::utils::require_option; use crate::utils::{require_option, REQUIRE_SUDO};
use crate::HOME_DIR;
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
use nix::unistd::Uid; use nix::unistd::Uid;
@@ -91,7 +92,7 @@ impl NPM {
fn upgrade(&self, ctx: &ExecutionContext, use_sudo: bool) -> Result<()> { fn upgrade(&self, ctx: &ExecutionContext, use_sudo: bool) -> Result<()> {
let args = ["update", self.global_location_arg()]; let args = ["update", self.global_location_arg()];
if use_sudo { if use_sudo {
let sudo = require_option(ctx.sudo().clone(), String::from("sudo is not installed"))?; let sudo = require_option(ctx.sudo().clone(), REQUIRE_SUDO.to_string())?;
ctx.run_type() ctx.run_type()
.execute(sudo) .execute(sudo)
.arg(&self.command) .arg(&self.command)
@@ -155,7 +156,7 @@ impl Yarn {
let args = ["global", "upgrade"]; let args = ["global", "upgrade"];
if use_sudo { if use_sudo {
let sudo = require_option(ctx.sudo().clone(), String::from("sudo is not installed"))?; let sudo = require_option(ctx.sudo().clone(), REQUIRE_SUDO.to_string())?;
ctx.run_type() ctx.run_type()
.execute(sudo) .execute(sudo)
.arg(self.yarn.as_ref().unwrap_or(&self.command)) .arg(self.yarn.as_ref().unwrap_or(&self.command))
@@ -229,7 +230,7 @@ pub fn run_npm_upgrade(ctx: &ExecutionContext) -> Result<()> {
pub fn run_pnpm_upgrade(ctx: &ExecutionContext) -> Result<()> { pub fn run_pnpm_upgrade(ctx: &ExecutionContext) -> Result<()> {
let pnpm = require("pnpm").map(|b| NPM::new(b, NPMVariant::Pnpm))?; let pnpm = require("pnpm").map(|b| NPM::new(b, NPMVariant::Pnpm))?;
print_separator("Node Package Manager"); print_separator("Performant Node Package Manager");
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
{ {
@@ -265,7 +266,7 @@ pub fn run_yarn_upgrade(ctx: &ExecutionContext) -> Result<()> {
pub fn deno_upgrade(ctx: &ExecutionContext) -> Result<()> { pub fn deno_upgrade(ctx: &ExecutionContext) -> Result<()> {
let deno = require("deno")?; let deno = require("deno")?;
let deno_dir = ctx.base_dirs().home_dir().join(".deno"); let deno_dir = HOME_DIR.join(".deno");
if !deno.canonicalize()?.is_descendant_of(&deno_dir) { if !deno.canonicalize()?.is_descendant_of(&deno_dir) {
let skip_reason = SkipStep("Deno installed outside of .deno directory".to_string()); let skip_reason = SkipStep("Deno installed outside of .deno directory".to_string());

View File

@@ -26,7 +26,7 @@ pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
ctx.run_type().execute(&pkg).arg("clean").status_checked()?; ctx.run_type().execute(&pkg).arg("clean").status_checked()?;
let apt = require("apt")?; let apt = require("apt")?;
let mut command = ctx.run_type().execute(&apt); let mut command = ctx.run_type().execute(apt);
command.arg("autoremove"); command.arg("autoremove");
if ctx.config().yes(Step::System) { if ctx.config().yes(Step::System) {
command.arg("-y"); command.arg("-y");

View File

@@ -80,8 +80,18 @@ pub struct GarudaUpdate {
impl ArchPackageManager for GarudaUpdate { impl ArchPackageManager for GarudaUpdate {
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> { fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
let mut command = ctx.run_type().execute(&self.executable); let mut command = ctx.run_type().execute(&self.executable);
command.env("PATH", get_execution_path());
command
.env("PATH", get_execution_path())
.env("UPDATE_AUR", "1")
.env("SKIP_MIRRORLIST", "1");
if ctx.config().yes(Step::System) {
command.env("PACMAN_NOCONFIRM", "1");
}
command.args(ctx.config().garuda_update_arguments().split_whitespace());
command.status_checked()?; command.status_checked()?;
Ok(()) Ok(())
} }
} }
@@ -267,7 +277,7 @@ impl Aura {
impl ArchPackageManager for Aura { impl ArchPackageManager for Aura {
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> { fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
let sudo = which("sudo").unwrap_or_else(PathBuf::new); let sudo = which("sudo").unwrap_or_default();
let mut aur_update = ctx.run_type().execute(&sudo); let mut aur_update = ctx.run_type().execute(&sudo);
if sudo.ends_with("sudo") { if sudo.ends_with("sudo") {

View File

@@ -1,26 +1,34 @@
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::executor::RunType; use crate::execution_context::ExecutionContext;
use crate::sudo::Sudo;
use crate::terminal::print_separator; use crate::terminal::print_separator;
use crate::utils::require_option; use crate::utils::{require_option, REQUIRE_SUDO};
use crate::Step;
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use std::process::Command; use std::process::Command;
pub fn upgrade_packages(sudo: Option<&Sudo>, run_type: RunType) -> Result<()> { pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
let sudo = require_option(sudo, String::from("No sudo detected"))?; let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
print_separator("DragonFly BSD Packages"); print_separator("DragonFly BSD Packages");
run_type let mut cmd = ctx.run_type().execute(sudo);
.execute(sudo) cmd.args(["/usr/local/sbin/pkg", "upgrade"]);
.args(["/usr/local/sbin/pkg", "upgrade"]) if ctx.config().yes(Step::System) {
.status_checked() cmd.arg("-y");
}
cmd.status_checked()
} }
pub fn audit_packages(sudo: Option<&Sudo>) -> Result<()> { pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
if let Some(sudo) = sudo { let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
println!();
Command::new(sudo) print_separator("DragonFly BSD Audit");
.args(["/usr/local/sbin/pkg", "audit", "-Fr"])
.status_checked()?; #[allow(clippy::disallowed_methods)]
if !Command::new(sudo)
.args(["/usr/local/sbin/pkg", "audit", "-Fr"])
.status()?
.success()
{
println!("The package audit was successful, but vulnerable packages still remain on the system");
} }
Ok(()) Ok(())
} }

View File

@@ -1,27 +1,25 @@
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::execution_context::ExecutionContext; use crate::execution_context::ExecutionContext;
use crate::executor::RunType;
use crate::sudo::Sudo;
use crate::terminal::print_separator; use crate::terminal::print_separator;
use crate::utils::require_option; use crate::utils::{require_option, REQUIRE_SUDO};
use crate::Step; use crate::Step;
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use std::process::Command; use std::process::Command;
pub fn upgrade_freebsd(sudo: Option<&Sudo>, run_type: RunType) -> Result<()> { pub fn upgrade_freebsd(ctx: &ExecutionContext) -> Result<()> {
let sudo = require_option(sudo, String::from("No sudo detected"))?; let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
print_separator("FreeBSD Update"); print_separator("FreeBSD Update");
run_type ctx.run_type()
.execute(sudo) .execute(sudo)
.args(["/usr/sbin/freebsd-update", "fetch", "install"]) .args(["/usr/sbin/freebsd-update", "fetch", "install"])
.status_checked() .status_checked()
} }
pub fn upgrade_packages(ctx: &ExecutionContext, sudo: Option<&Sudo>, run_type: RunType) -> Result<()> { pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
let sudo = require_option(sudo, String::from("No sudo detected"))?; let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
print_separator("FreeBSD Packages"); print_separator("FreeBSD Packages");
let mut command = run_type.execute(sudo); let mut command = ctx.run_type().execute(sudo);
command.args(["/usr/sbin/pkg", "upgrade"]); command.args(["/usr/sbin/pkg", "upgrade"]);
if ctx.config().yes(Step::System) { if ctx.config().yes(Step::System) {
@@ -30,12 +28,13 @@ pub fn upgrade_packages(ctx: &ExecutionContext, sudo: Option<&Sudo>, run_type: R
command.status_checked() command.status_checked()
} }
pub fn audit_packages(sudo: Option<&Sudo>) -> Result<()> { pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
if let Some(sudo) = sudo { let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
println!();
Command::new(sudo) print_separator("FreeBSD Audit");
.args(["/usr/sbin/pkg", "audit", "-Fr"])
.status_checked()?; Command::new(sudo)
} .args(["/usr/sbin/pkg", "audit", "-Fr"])
.status_checked()?;
Ok(()) Ok(())
} }

File diff suppressed because it is too large Load Diff

View File

@@ -1,16 +1,18 @@
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::execution_context::ExecutionContext; use crate::execution_context::ExecutionContext;
use crate::executor::RunType;
use crate::terminal::{print_separator, prompt_yesno}; use crate::terminal::{print_separator, prompt_yesno};
use crate::utils::{require_option, REQUIRE_SUDO};
use crate::{utils::require, Step}; use crate::{utils::require, Step};
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use std::collections::HashSet;
use std::fs; use std::fs;
use std::process::Command; use std::process::Command;
use tracing::debug; use tracing::debug;
pub fn run_macports(ctx: &ExecutionContext) -> Result<()> { pub fn run_macports(ctx: &ExecutionContext) -> Result<()> {
require("port")?; require("port")?;
let sudo = ctx.sudo().as_ref().unwrap(); let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
print_separator("MacPorts"); print_separator("MacPorts");
ctx.run_type() ctx.run_type()
.execute(sudo) .execute(sudo)
@@ -30,17 +32,17 @@ pub fn run_macports(ctx: &ExecutionContext) -> Result<()> {
Ok(()) Ok(())
} }
pub fn run_mas(run_type: RunType) -> Result<()> { pub fn run_mas(ctx: &ExecutionContext) -> Result<()> {
let mas = require("mas")?; let mas = require("mas")?;
print_separator("macOS App Store"); print_separator("macOS App Store");
run_type.execute(mas).arg("upgrade").status_checked() ctx.run_type().execute(mas).arg("upgrade").status_checked()
} }
pub fn upgrade_macos(ctx: &ExecutionContext) -> Result<()> { pub fn upgrade_macos(ctx: &ExecutionContext) -> Result<()> {
print_separator("macOS system update"); print_separator("macOS system update");
let should_ask = !(ctx.config().yes(Step::System)) || (ctx.config().dry_run()); let should_ask = !(ctx.config().yes(Step::System) || ctx.config().dry_run());
if should_ask { if should_ask {
println!("Finding available software"); println!("Finding available software");
if system_update_available()? { if system_update_available()? {
@@ -92,3 +94,148 @@ pub fn run_sparkle(ctx: &ExecutionContext) -> Result<()> {
} }
Ok(()) Ok(())
} }
pub fn update_xcodes(ctx: &ExecutionContext) -> Result<()> {
let xcodes = require("xcodes")?;
print_separator("Xcodes");
let should_ask = !(ctx.config().yes(Step::Xcodes) || ctx.config().dry_run());
let releases = ctx
.run_type()
.execute(&xcodes)
.args(["update"])
.output_checked_utf8()?
.stdout;
let releases_installed: Vec<String> = releases
.lines()
.filter(|r| r.contains("(Installed)"))
.map(String::from)
.collect();
if releases_installed.is_empty() {
println!("No Xcode releases installed.");
return Ok(());
}
let (installed_gm, installed_beta, installed_regular) =
releases_installed
.iter()
.fold((false, false, false), |(gm, beta, regular), release| {
(
gm || release.contains("GM") || release.contains("Release Candidate"),
beta || release.contains("Beta"),
regular
|| !(release.contains("GM")
|| release.contains("Release Candidate")
|| release.contains("Beta")),
)
});
let releases_gm = releases
.lines()
.filter(|&r| r.matches("GM").count() > 0 || r.matches("Release Candidate").count() > 0)
.map(String::from)
.collect();
let releases_beta = releases
.lines()
.filter(|&r| r.matches("Beta").count() > 0)
.map(String::from)
.collect();
let releases_regular = releases
.lines()
.filter(|&r| {
r.matches("GM").count() == 0
&& r.matches("Release Candidate").count() == 0
&& r.matches("Beta").count() == 0
})
.map(String::from)
.collect();
if installed_gm {
process_xcodes_releases(releases_gm, should_ask, ctx)?;
}
if installed_beta {
process_xcodes_releases(releases_beta, should_ask, ctx)?;
}
if installed_regular {
process_xcodes_releases(releases_regular, should_ask, ctx)?;
}
let releases_new = ctx
.run_type()
.execute(&xcodes)
.args(["list"])
.output_checked_utf8()?
.stdout;
let releases_gm_new_installed: HashSet<_> = releases_new
.lines()
.filter(|release| {
release.contains("(Installed)") && (release.contains("GM") || release.contains("Release Candidate"))
})
.collect();
let releases_beta_new_installed: HashSet<_> = releases_new
.lines()
.filter(|release| release.contains("(Installed)") && release.contains("Beta"))
.collect();
let releases_regular_new_installed: HashSet<_> = releases_new
.lines()
.filter(|release| {
release.contains("(Installed)")
&& !(release.contains("GM") || release.contains("Release Candidate") || release.contains("Beta"))
})
.collect();
for releases_new_installed in [
releases_gm_new_installed,
releases_beta_new_installed,
releases_regular_new_installed,
] {
if should_ask && releases_new_installed.len() == 2 {
let answer_uninstall = prompt_yesno("Would you like to move the former Xcode release to the trash?")?;
if answer_uninstall {
let _ = ctx
.run_type()
.execute(&xcodes)
.args([
"uninstall",
releases_new_installed.iter().next().cloned().unwrap_or_default(),
])
.status_checked();
}
}
}
Ok(())
}
pub fn process_xcodes_releases(releases_filtered: Vec<String>, should_ask: bool, ctx: &ExecutionContext) -> Result<()> {
let xcodes = require("xcodes")?;
if releases_filtered
.last()
.map(|s| !s.contains("(Installed)"))
.unwrap_or(true)
&& !releases_filtered.is_empty()
{
println!(
"New Xcode release detected: {}",
releases_filtered.last().cloned().unwrap_or_default()
);
if should_ask {
let answer_install = prompt_yesno("Would you like to install it?")?;
if answer_install {
let _ = ctx
.run_type()
.execute(xcodes)
.args(["install", &releases_filtered.last().cloned().unwrap_or_default()])
.status_checked();
}
println!();
}
}
Ok(())
}

View File

@@ -1,23 +1,33 @@
use crate::executor::RunType; use crate::command::CommandExt;
use crate::execution_context::ExecutionContext;
use crate::terminal::print_separator; use crate::terminal::print_separator;
use crate::utils::require_option; use crate::utils::{require_option, REQUIRE_SUDO};
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use std::path::PathBuf;
pub fn upgrade_openbsd(sudo: Option<&PathBuf>, run_type: RunType) -> Result<()> { pub fn upgrade_openbsd(ctx: &ExecutionContext) -> Result<()> {
let sudo = require_option(sudo, String::from("No sudo detected"))?; let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
print_separator("OpenBSD Update"); print_separator("OpenBSD Update");
run_type ctx.run_type()
.execute(sudo) .execute(sudo)
.args(&["/usr/sbin/sysupgrade", "-n"]) .args(["/usr/sbin/sysupgrade", "-n"])
.status_checked() .status_checked()
} }
pub fn upgrade_packages(sudo: Option<&PathBuf>, run_type: RunType) -> Result<()> { pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
let sudo = require_option(sudo, String::from("No sudo detected"))?; let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
print_separator("OpenBSD Packages"); print_separator("OpenBSD Packages");
run_type
if ctx.config().cleanup() {
ctx.run_type()
.execute(sudo)
.args(["/usr/sbin/pkg_delete", "-ac"])
.status_checked()?;
}
ctx.run_type()
.execute(sudo) .execute(sudo)
.args(&["/usr/sbin/pkg_add", "-u"]) .args(["/usr/sbin/pkg_add", "-u"])
.status_checked() .status_checked()?;
Ok(())
} }

View File

@@ -1,6 +0,0 @@
NAME="Anarchy Linux"
PRETTY_NAME="Anarchy Linux"
ID=anarchy
ID_LIKE=anarchylinux
ANSI_COLOR="0;36"
HOME_URL="https://anarchylinux.org/"

View File

@@ -1,10 +0,0 @@
NAME="Antergos Linux"
VERSION="18.7-ISO-Rolling"
ID="antergos"
ID_LIKE="arch"
PRETTY_NAME="Antergos Linux"
CPE_NAME="cpe:/o:antergosproject:antergos:18.7"
ANSI_COLOR="1;34;40"
HOME_URL="antergos.com"
SUPPORT_URL="forum.antergos.com"
BUG_REPORT_URL="@antergos"

View File

@@ -0,0 +1,8 @@
PRETTY_NAME="Deepin 20.9"
NAME="Deepin"
VERSION_ID="20.9"
VERSION="20.9"
VERSION_CODENAME="apricot"
ID=Deepin
HOME_URL="https://www.deepin.org/"
BUG_REPORT_URL="https://bbs.deepin.org/"

View File

@@ -0,0 +1,22 @@
NAME="Fedora Linux"
VERSION="39.20240415.0 (IoT Edition)"
ID=fedora
VERSION_ID=39
VERSION_CODENAME=""
PLATFORM_ID="platform:f39"
PRETTY_NAME="Fedora Linux 39.20240415.0 (IoT Edition)"
ANSI_COLOR="0;38;2;60;110;180"
LOGO=fedora-logo-icon
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
HOME_URL="https://fedoraproject.org/"
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora/f39/system-administrators-guide/"
SUPPORT_URL="https://ask.fedoraproject.org/"
BUG_REPORT_URL="https://bugzilla.redhat.com/"
REDHAT_BUGZILLA_PRODUCT="Fedora"
REDHAT_BUGZILLA_PRODUCT_VERSION=39
REDHAT_SUPPORT_PRODUCT="Fedora"
REDHAT_SUPPORT_PRODUCT_VERSION=39
SUPPORT_END=2024-11-12
VARIANT="IoT Edition"
VARIANT_ID=iot
OSTREE_VERSION='39.20240415.0'

View File

@@ -0,0 +1,23 @@
NAME="Fedora Linux"
VERSION="39.20240105.0 (Kinoite)"
ID=fedora
VERSION_ID=39
VERSION_CODENAME=""
PLATFORM_ID="platform:f39"
PRETTY_NAME="Fedora Linux 39.20240105.0 (Kinoite)"
ANSI_COLOR="0;38;2;60;110;180"
LOGO=fedora-logo-icon
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
DEFAULT_HOSTNAME="fedora"
HOME_URL="https://kinoite.fedoraproject.org"
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-kinoite/"
SUPPORT_URL="https://ask.fedoraproject.org/"
BUG_REPORT_URL="https://pagure.io/fedora-kde/SIG/issues"
REDHAT_BUGZILLA_PRODUCT="Fedora"
REDHAT_BUGZILLA_PRODUCT_VERSION=39
REDHAT_SUPPORT_PRODUCT="Fedora"
REDHAT_SUPPORT_PRODUCT_VERSION=39
SUPPORT_END=2024-11-12
VARIANT="Kinoite"
VARIANT_ID=kinoite
OSTREE_VERSION='39.20240105.0'

View File

@@ -0,0 +1,22 @@
NAME="Fedora Linux"
VERSION="39 (Onyx)"
ID=fedora
VERSION_ID=39
VERSION_CODENAME=""
PLATFORM_ID="platform:f39"
PRETTY_NAME="Fedora Linux 39 (Onyx)"
ANSI_COLOR="0;38;2;60;110;180"
LOGO=fedora-logo-icon
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
DEFAULT_HOSTNAME="fedora"
HOME_URL="https://fedoraproject.org/onyx/"
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-onyx/"
SUPPORT_URL="https://ask.fedoraproject.org/"
BUG_REPORT_URL="https://bugzilla.redhat.com/"
REDHAT_BUGZILLA_PRODUCT="Fedora"
REDHAT_BUGZILLA_PRODUCT_VERSION=39
REDHAT_SUPPORT_PRODUCT="Fedora"
REDHAT_SUPPORT_PRODUCT_VERSION=39
SUPPORT_END=2024-05-14
VARIANT="Onyx"
VARIANT_ID=onyx

View File

@@ -0,0 +1,22 @@
NAME="Fedora Linux"
VERSION="39 (Sericea)"
ID=fedora
VERSION_ID=39
VERSION_CODENAME=""
PLATFORM_ID="platform:f39"
PRETTY_NAME="Fedora Linux 39 (Sericea)"
ANSI_COLOR="0;38;2;60;110;180"
LOGO=fedora-logo-icon
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
DEFAULT_HOSTNAME="fedora"
HOME_URL="https://fedoraproject.org/sericea/"
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-sericea/"
SUPPORT_URL="https://ask.fedoraproject.org/"
BUG_REPORT_URL="https://gitlab.com/fedora/sigs/sway/SIG/-/issues"
REDHAT_BUGZILLA_PRODUCT="Fedora"
REDHAT_BUGZILLA_PRODUCT_VERSION=39
REDHAT_SUPPORT_PRODUCT="Fedora"
REDHAT_SUPPORT_PRODUCT_VERSION=39
SUPPORT_END=2024-05-14
VARIANT="Sericea"
VARIANT_ID=sericea

View File

@@ -0,0 +1,22 @@
NAME="Fedora Linux"
VERSION="39 (Silverblue)"
ID=fedora
VERSION_ID=39
VERSION_CODENAME=""
PLATFORM_ID="platform:f39"
PRETTY_NAME="Fedora Linux 39 (Silverblue)"
ANSI_COLOR="0;38;2;60;110;180"
LOGO=fedora-logo-icon
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
DEFAULT_HOSTNAME="fedora"
HOME_URL="https://silverblue.fedoraproject.org"
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-silverblue/"
SUPPORT_URL="https://ask.fedoraproject.org/"
BUG_REPORT_URL="https://github.com/fedora-silverblue/issue-tracker/issues"
REDHAT_BUGZILLA_PRODUCT="Fedora"
REDHAT_BUGZILLA_PRODUCT_VERSION=39
REDHAT_SUPPORT_PRODUCT="Fedora"
REDHAT_SUPPORT_PRODUCT_VERSION=39
SUPPORT_END=2024-05-14
VARIANT="Silverblue"
VARIANT_ID=silverblue

View File

@@ -0,0 +1,23 @@
NAME="Fedora Linux"
VERSION="40.20240426.0 (Sway Atomic)"
ID=fedora
VERSION_ID=40
VERSION_CODENAME=""
PLATFORM_ID="platform:f40"
PRETTY_NAME="Fedora Linux 40.20240426.0 (Sway Atomic)"
ANSI_COLOR="0;38;2;60;110;180"
LOGO=fedora-logo-icon
CPE_NAME="cpe:/o:fedoraproject:fedora:40"
DEFAULT_HOSTNAME="fedora"
HOME_URL="https://fedoraproject.org/atomic-desktops/sway/"
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-sericea/"
SUPPORT_URL="https://ask.fedoraproject.org/"
BUG_REPORT_URL="https://gitlab.com/fedora/sigs/sway/SIG/-/issues"
REDHAT_BUGZILLA_PRODUCT="Fedora"
REDHAT_BUGZILLA_PRODUCT_VERSION=40
REDHAT_SUPPORT_PRODUCT="Fedora"
REDHAT_SUPPORT_PRODUCT_VERSION=40
SUPPORT_END=2025-05-13
VARIANT="Sway Atomic"
VARIANT_ID=sway-atomic
OSTREE_VERSION='40.20240426.0'

View File

@@ -0,0 +1,23 @@
NAME="Nobara Linux"
VERSION="39 (GNOME Edition)"
ID=nobara
ID_LIKE="rhel centos fedora"
VERSION_ID=39
VERSION_CODENAME=""
PLATFORM_ID="platform:f39"
PRETTY_NAME="Nobara Linux 39 (GNOME Edition)"
ANSI_COLOR="0;38;2;60;110;180"
LOGO=nobara-logo-icon
CPE_NAME="cpe:/o:nobaraproject:nobara:39"
DEFAULT_HOSTNAME="nobara"
HOME_URL="https://nobaraproject.org/"
DOCUMENTATION_URL="https://www.nobaraproject.org/"
SUPPORT_URL="https://www.nobaraproject.org/"
BUG_REPORT_URL="https://gitlab.com/gloriouseggroll/nobara-images"
REDHAT_BUGZILLA_PRODUCT="Nobara"
REDHAT_BUGZILLA_PRODUCT_VERSION=39
REDHAT_SUPPORT_PRODUCT="Nobara"
REDHAT_SUPPORT_PRODUCT_VERSION=39
SUPPORT_END=2024-05-14
VARIANT="GNOME Edition"
VARIANT_ID=gnome

View File

@@ -0,0 +1,9 @@
NAME="PCLinuxOS"
VERSION="2022"
ID=pclinuxos
VERSION_ID=2022
ID_LIKE="mandriva"
PRETTY_NAME="PCLinuxOS 2022"
ANSI_COLOR="1;37"
HOME_URL="http://www.pclinuxos.com/"
SUPPORT_URL="http://www.pclinuxos.com/"

View File

@@ -0,0 +1,11 @@
NAME="Solus"
VERSION="4.4"
ID="solus"
VERSION_CODENAME=harmony
VERSION_ID="4.4"
PRETTY_NAME="Solus 4.4 Harmony"
ANSI_COLOR="1;34"
HOME_URL="https://getsol.us"
SUPPORT_URL="https://help.getsol.us/docs/user/contributing/getting-involved"
BUG_REPORT_URL="https://dev.getsol.us/"
LOGO="distributor-logo-solus"

View File

@@ -0,0 +1,12 @@
PRETTY_NAME="VanillaOS 22.10 all"
NAME="VanillaOS"
VERSION_ID="22.10"
VERSION="22.10 all"
VERSION_CODENAME="kinetic"
ID=ubuntu
ID_LIKE=debian
HOME_URL="https://github.com/vanilla-os"
SUPPORT_URL="https://github.com/vanilla-os"
BUG_REPORT_URL="https://github.com/vanilla-os"
PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
UBUNTU_CODENAME="kinetic"

View File

@@ -0,0 +1,5 @@
ID=wolfi
NAME="Wolfi"
PRETTY_NAME="Wolfi"
VERSION_ID="20230201"
HOME_URL="https://wolfi.dev"

View File

@@ -1,26 +1,30 @@
use std::ffi::OsStr;
use std::fs; use std::fs;
use std::os::unix::fs::MetadataExt; use std::os::unix::fs::MetadataExt;
use std::path::Component;
use std::path::PathBuf; use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use std::{env, path::Path}; use std::{env::var, path::Path};
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::Step; use crate::{Step, HOME_DIR};
use color_eyre::eyre::eyre;
use color_eyre::eyre::Context;
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use directories::BaseDirs;
use home; use home;
use ini::Ini; use ini::Ini;
use tracing::debug; use tracing::debug;
#[cfg(target_os = "linux")]
use super::linux::Distribution;
use crate::error::SkipStep; use crate::error::SkipStep;
use crate::execution_context::ExecutionContext; use crate::execution_context::ExecutionContext;
#[cfg(any(target_os = "linux", target_os = "macos"))] #[cfg(any(target_os = "linux", target_os = "macos"))]
use crate::executor::Executor; use crate::executor::Executor;
#[cfg(any(target_os = "linux", target_os = "macos"))]
use crate::executor::RunType; use crate::executor::RunType;
use crate::terminal::print_separator; use crate::terminal::print_separator;
#[cfg(not(any(target_os = "android", target_os = "macos")))] use crate::utils::{require, require_option, PathExt, REQUIRE_SUDO};
use crate::utils::require_option;
use crate::utils::{require, PathExt};
#[cfg(any(target_os = "linux", target_os = "macos"))] #[cfg(any(target_os = "linux", target_os = "macos"))]
const INTEL_BREW: &str = "/usr/local/bin/brew"; const INTEL_BREW: &str = "/usr/local/bin/brew";
@@ -87,7 +91,7 @@ impl BrewVariant {
} }
} }
pub fn run_fisher(run_type: RunType) -> Result<()> { pub fn run_fisher(ctx: &ExecutionContext) -> Result<()> {
let fish = require("fish")?; let fish = require("fish")?;
Command::new(&fish) Command::new(&fish)
@@ -110,7 +114,8 @@ pub fn run_fisher(run_type: RunType) -> Result<()> {
print_separator("Fisher"); print_separator("Fisher");
let version_str = run_type let version_str = ctx
.run_type()
.execute(&fish) .execute(&fish)
.args(["-c", "fisher --version"]) .args(["-c", "fisher --version"])
.output_checked_utf8()? .output_checked_utf8()?
@@ -119,15 +124,18 @@ pub fn run_fisher(run_type: RunType) -> Result<()> {
if version_str.starts_with("fisher version 3.") { if version_str.starts_with("fisher version 3.") {
// v3 - see https://github.com/topgrade-rs/topgrade/pull/37#issuecomment-1283844506 // v3 - see https://github.com/topgrade-rs/topgrade/pull/37#issuecomment-1283844506
run_type.execute(&fish).args(["-c", "fisher"]).status_checked() ctx.run_type().execute(&fish).args(["-c", "fisher"]).status_checked()
} else { } else {
// v4 // v4
run_type.execute(&fish).args(["-c", "fisher update"]).status_checked() ctx.run_type()
.execute(&fish)
.args(["-c", "fisher update"])
.status_checked()
} }
} }
pub fn run_bashit(ctx: &ExecutionContext) -> Result<()> { pub fn run_bashit(ctx: &ExecutionContext) -> Result<()> {
ctx.base_dirs().home_dir().join(".bash_it").require()?; HOME_DIR.join(".bash_it").require()?;
print_separator("Bash-it"); print_separator("Bash-it");
@@ -137,12 +145,30 @@ pub fn run_bashit(ctx: &ExecutionContext) -> Result<()> {
.status_checked() .status_checked()
} }
pub fn run_oh_my_bash(ctx: &ExecutionContext) -> Result<()> {
require("bash")?;
let oh_my_bash = var("OSH")
// default to `~/.oh-my-bash`
.unwrap_or(
HOME_DIR
.join(".oh-my-bash")
.to_str()
.expect("should be UTF-8 encoded")
.to_string(),
)
.require()?;
print_separator("oh-my-bash");
let mut update_script = oh_my_bash;
update_script.push_str("/tools/upgrade.sh");
ctx.run_type().execute("bash").arg(update_script).status_checked()
}
pub fn run_oh_my_fish(ctx: &ExecutionContext) -> Result<()> { pub fn run_oh_my_fish(ctx: &ExecutionContext) -> Result<()> {
let fish = require("fish")?; let fish = require("fish")?;
ctx.base_dirs() HOME_DIR.join(".local/share/omf/pkg/omf/functions/omf.fish").require()?;
.home_dir()
.join(".local/share/omf/pkg/omf/functions/omf.fish")
.require()?;
print_separator("oh-my-fish"); print_separator("oh-my-fish");
@@ -151,15 +177,18 @@ pub fn run_oh_my_fish(ctx: &ExecutionContext) -> Result<()> {
pub fn run_pkgin(ctx: &ExecutionContext) -> Result<()> { pub fn run_pkgin(ctx: &ExecutionContext) -> Result<()> {
let pkgin = require("pkgin")?; let pkgin = require("pkgin")?;
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
let mut command = ctx.run_type().execute(ctx.sudo().as_ref().unwrap()); print_separator("Pkgin");
let mut command = ctx.run_type().execute(sudo);
command.arg(&pkgin).arg("update"); command.arg(&pkgin).arg("update");
if ctx.config().yes(Step::Pkgin) { if ctx.config().yes(Step::Pkgin) {
command.arg("-y"); command.arg("-y");
} }
command.status_checked()?; command.status_checked()?;
let mut command = ctx.run_type().execute(ctx.sudo().as_ref().unwrap()); let mut command = ctx.run_type().execute(sudo);
command.arg(&pkgin).arg("upgrade"); command.arg(&pkgin).arg("upgrade");
if ctx.config().yes(Step::Pkgin) { if ctx.config().yes(Step::Pkgin) {
command.arg("-y"); command.arg("-y");
@@ -169,8 +198,7 @@ pub fn run_pkgin(ctx: &ExecutionContext) -> Result<()> {
pub fn run_fish_plug(ctx: &ExecutionContext) -> Result<()> { pub fn run_fish_plug(ctx: &ExecutionContext) -> Result<()> {
let fish = require("fish")?; let fish = require("fish")?;
ctx.base_dirs() HOME_DIR
.home_dir()
.join(".local/share/fish/plug/kidonng/fish-plug/functions/plug.fish") .join(".local/share/fish/plug/kidonng/fish-plug/functions/plug.fish")
.require()?; .require()?;
@@ -189,7 +217,7 @@ pub fn run_fish_plug(ctx: &ExecutionContext) -> Result<()> {
/// See: <https://github.com/danhper/fundle> /// See: <https://github.com/danhper/fundle>
pub fn run_fundle(ctx: &ExecutionContext) -> Result<()> { pub fn run_fundle(ctx: &ExecutionContext) -> Result<()> {
let fish = require("fish")?; let fish = require("fish")?;
ctx.base_dirs().home_dir().join(".config/fish/fundle").require()?; HOME_DIR.join(".config/fish/fundle").require()?;
print_separator("fundle"); print_separator("fundle");
@@ -203,7 +231,7 @@ pub fn run_fundle(ctx: &ExecutionContext) -> Result<()> {
pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> { pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
let gdbus = require("gdbus")?; let gdbus = require("gdbus")?;
require_option( require_option(
env::var("XDG_CURRENT_DESKTOP").ok().filter(|p| p.contains("GNOME")), var("XDG_CURRENT_DESKTOP").ok().filter(|p| p.contains("GNOME")),
"Desktop doest not appear to be gnome".to_string(), "Desktop doest not appear to be gnome".to_string(),
)?; )?;
let output = Command::new("gdbus") let output = Command::new("gdbus")
@@ -257,10 +285,15 @@ pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<
let run_type = ctx.run_type(); let run_type = ctx.run_type();
variant.execute(run_type).arg("update").status_checked()?; variant.execute(run_type).arg("update").status_checked()?;
variant
.execute(run_type) let mut command = variant.execute(run_type);
.args(["upgrade", "--ignore-pinned", "--formula"]) command.args(["upgrade", "--formula"]);
.status_checked()?;
if ctx.config().brew_fetch_head() {
command.arg("--fetch-HEAD");
}
command.status_checked()?;
if ctx.config().cleanup() { if ctx.config().cleanup() {
variant.execute(run_type).arg("cleanup").status_checked()?; variant.execute(run_type).arg("cleanup").status_checked()?;
@@ -300,6 +333,9 @@ pub fn run_brew_cask(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()>
if ctx.config().brew_cask_greedy() { if ctx.config().brew_cask_greedy() {
brew_args.push("--greedy"); brew_args.push("--greedy");
} }
if ctx.config().brew_greedy_latest() {
brew_args.push("--greedy-latest");
}
} }
variant.execute(run_type).args(&brew_args).status_checked()?; variant.execute(run_type).args(&brew_args).status_checked()?;
@@ -333,6 +369,7 @@ pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
let nix = require("nix")?; let nix = require("nix")?;
let nix_channel = require("nix-channel")?; let nix_channel = require("nix-channel")?;
let nix_env = require("nix-env")?; let nix_env = require("nix-env")?;
// TODO: Is None possible here?
let profile_path = match home::home_dir() { let profile_path = match home::home_dir() {
Some(home) => Path::new(&home).join(".nix-profile"), Some(home) => Path::new(&home).join(".nix-profile"),
None => Path::new("/nix/var/nix/profiles/per-user/default").into(), None => Path::new("/nix/var/nix/profiles/per-user/default").into(),
@@ -340,27 +377,11 @@ pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
debug!("nix profile: {:?}", profile_path); debug!("nix profile: {:?}", profile_path);
let manifest_json_path = profile_path.join("manifest.json"); let manifest_json_path = profile_path.join("manifest.json");
let output = Command::new(&nix_env).args(["--query", "nix"]).output_checked_utf8();
debug!("nix-env output: {:?}", output);
let should_self_upgrade = output.is_ok();
print_separator("Nix"); print_separator("Nix");
let multi_user = fs::metadata(&nix)?.uid() == 0;
debug!("Multi user nix: {}", multi_user);
#[cfg(target_os = "linux")]
{
use super::linux::Distribution;
if let Ok(Distribution::NixOS) = Distribution::detect() {
return Err(SkipStep(String::from("Nix on NixOS must be upgraded via nixos-rebuild switch")).into());
}
}
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
{ {
if let Ok(..) = require("darwin-rebuild") { if require("darwin-rebuild").is_ok() {
return Err(SkipStep(String::from( return Err(SkipStep(String::from(
"Nix-darwin on macOS must be upgraded via darwin-rebuild switch", "Nix-darwin on macOS must be upgraded via darwin-rebuild switch",
)) ))
@@ -369,29 +390,144 @@ pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
} }
let run_type = ctx.run_type(); let run_type = ctx.run_type();
if should_self_upgrade {
if multi_user {
ctx.execute_elevated(&nix, true)?.arg("upgrade-nix").status_checked()?;
} else {
run_type.execute(&nix).arg("upgrade-nix").status_checked()?;
}
}
run_type.execute(nix_channel).arg("--update").status_checked()?; run_type.execute(nix_channel).arg("--update").status_checked()?;
if std::path::Path::new(&manifest_json_path).exists() { if Path::new(&manifest_json_path).exists() {
run_type run_type
.execute(&nix) .execute(nix)
.args(nix_args())
.arg("profile") .arg("profile")
.arg("upgrade") .arg("upgrade")
.arg(".*") .arg(".*")
.arg("--verbose")
.status_checked() .status_checked()
} else { } else {
run_type.execute(&nix_env).arg("--upgrade").status_checked() let mut command = run_type.execute(nix_env);
command.arg("--upgrade");
if let Some(args) = ctx.config().nix_env_arguments() {
command.args(args.split_whitespace());
};
command.status_checked()
} }
} }
pub fn run_nix_self_upgrade(ctx: &ExecutionContext) -> Result<()> {
let nix = require("nix")?;
// Should we attempt to upgrade Nix with `nix upgrade-nix`?
#[allow(unused_mut)]
let mut should_self_upgrade = cfg!(target_os = "macos");
#[cfg(target_os = "linux")]
{
// We can't use `nix upgrade-nix` on NixOS.
if let Ok(Distribution::NixOS) = Distribution::detect() {
should_self_upgrade = false;
}
}
if !should_self_upgrade {
return Err(SkipStep(String::from(
"`nix upgrade-nix` can only be used on macOS or non-NixOS Linux",
))
.into());
}
if nix_profile_dir(&nix)?.is_none() {
return Err(SkipStep(String::from(
"`nix upgrade-nix` cannot be run when Nix is installed in a profile",
))
.into());
}
print_separator("Nix (self-upgrade)");
let multi_user = fs::metadata(&nix)?.uid() == 0;
debug!("Multi user nix: {}", multi_user);
let nix_args = nix_args();
if multi_user {
ctx.execute_elevated(&nix, true)?
.args(nix_args)
.arg("upgrade-nix")
.status_checked()
} else {
ctx.run_type()
.execute(&nix)
.args(nix_args)
.arg("upgrade-nix")
.status_checked()
}
}
/// If we try to `nix upgrade-nix` but Nix is installed with `nix profile`, we'll get a `does not
/// appear to be part of a Nix profile` error.
///
/// We duplicate some of the `nix` logic here to avoid this.
/// See: <https://github.com/NixOS/nix/blob/f0180487a0e4c0091b46cb1469c44144f5400240/src/nix/upgrade-nix.cc#L102-L139>
///
/// See: <https://github.com/NixOS/nix/issues/5473>
fn nix_profile_dir(nix: &Path) -> Result<Option<PathBuf>> {
// NOTE: `nix` uses the location of the `nix-env` binary for this but we're using the `nix`
// binary; should be the same.
let nix_bin_dir = nix.parent();
if nix_bin_dir.and_then(|p| p.file_name()) != Some(OsStr::new("bin")) {
debug!("Nix is not installed in a `bin` directory: {nix_bin_dir:?}");
return Ok(None);
}
let nix_dir = nix_bin_dir
.and_then(|bin_dir| bin_dir.parent())
.ok_or_else(|| eyre!("Unable to find Nix install directory from Nix binary {nix:?}"))?;
debug!("Found Nix in {nix_dir:?}");
let mut profile_dir = nix_dir.to_path_buf();
while profile_dir.is_symlink() {
profile_dir = profile_dir
.parent()
.ok_or_else(|| eyre!("Path has no parent: {profile_dir:?}"))?
.join(
profile_dir
.read_link()
.wrap_err_with(|| format!("Failed to read symlink {profile_dir:?}"))?,
);
// NOTE: `nix` uses a hand-rolled canonicalize function, Rust just uses `realpath`.
if profile_dir
.canonicalize()
.wrap_err_with(|| format!("Failed to canonicalize {profile_dir:?}"))?
.components()
.any(|component| component == Component::Normal(OsStr::new("profiles")))
{
break;
}
}
debug!("Found Nix profile {profile_dir:?}");
let user_env = profile_dir
.canonicalize()
.wrap_err_with(|| format!("Failed to canonicalize {profile_dir:?}"))?;
Ok(
if user_env
.file_name()
.and_then(|name| name.to_str())
.map(|name| name.ends_with("user-environment"))
.unwrap_or(false)
{
Some(profile_dir)
} else {
None
},
)
}
fn nix_args() -> [&'static str; 2] {
["--extra-experimental-features", "nix-command"]
}
pub fn run_yadm(ctx: &ExecutionContext) -> Result<()> { pub fn run_yadm(ctx: &ExecutionContext) -> Result<()> {
let yadm = require("yadm")?; let yadm = require("yadm")?;
@@ -400,45 +536,88 @@ pub fn run_yadm(ctx: &ExecutionContext) -> Result<()> {
ctx.run_type().execute(yadm).arg("pull").status_checked() ctx.run_type().execute(yadm).arg("pull").status_checked()
} }
pub fn run_asdf(run_type: RunType) -> Result<()> { pub fn run_asdf(ctx: &ExecutionContext) -> Result<()> {
let asdf = require("asdf")?; let asdf = require("asdf")?;
print_separator("asdf"); print_separator("asdf");
run_type.execute(&asdf).arg("update").status_checked_with_codes(&[42])?; ctx.run_type()
.execute(&asdf)
.arg("update")
.status_checked_with_codes(&[42])?;
run_type ctx.run_type()
.execute(&asdf) .execute(&asdf)
.args(["plugin", "update", "--all"]) .args(["plugin", "update", "--all"])
.status_checked() .status_checked()
} }
pub fn run_home_manager(run_type: RunType) -> Result<()> { pub fn run_mise(ctx: &ExecutionContext) -> Result<()> {
let mise = require("mise")?;
print_separator("mise");
ctx.run_type().execute(&mise).arg("upgrade").status_checked()?;
ctx.run_type()
.execute(&mise)
.args(["plugins", "update"])
.status_checked()
}
pub fn run_home_manager(ctx: &ExecutionContext) -> Result<()> {
let home_manager = require("home-manager")?; let home_manager = require("home-manager")?;
print_separator("home-manager"); print_separator("home-manager");
run_type.execute(home_manager).arg("switch").status_checked()
let mut cmd = ctx.run_type().execute(home_manager);
cmd.arg("switch");
if let Some(extra_args) = ctx.config().home_manager() {
cmd.args(extra_args);
}
cmd.status_checked()
} }
pub fn run_tldr(run_type: RunType) -> Result<()> { pub fn run_tldr(ctx: &ExecutionContext) -> Result<()> {
let tldr = require("tldr")?; let tldr = require("tldr")?;
print_separator("TLDR"); print_separator("TLDR");
run_type.execute(tldr).arg("--update").status_checked() ctx.run_type().execute(tldr).arg("--update").status_checked()
} }
pub fn run_pearl(run_type: RunType) -> Result<()> { pub fn run_pearl(ctx: &ExecutionContext) -> Result<()> {
let pearl = require("pearl")?; let pearl = require("pearl")?;
print_separator("pearl"); print_separator("pearl");
run_type.execute(pearl).arg("update").status_checked() ctx.run_type().execute(pearl).arg("update").status_checked()
} }
pub fn run_sdkman(base_dirs: &BaseDirs, cleanup: bool, run_type: RunType) -> Result<()> { pub fn run_pyenv(ctx: &ExecutionContext) -> Result<()> {
let pyenv = require("pyenv")?;
print_separator("pyenv");
let pyenv_dir = var("PYENV_ROOT")
.map(PathBuf::from)
.unwrap_or_else(|_| HOME_DIR.join(".pyenv"));
if !pyenv_dir.exists() {
return Err(SkipStep("Pyenv is installed, but $PYENV_ROOT is not set correctly".to_string()).into());
}
if !pyenv_dir.join(".git").exists() {
return Err(SkipStep("pyenv is not a git repository".to_string()).into());
}
ctx.run_type().execute(pyenv).arg("update").status_checked()
}
pub fn run_sdkman(ctx: &ExecutionContext) -> Result<()> {
let bash = require("bash")?; let bash = require("bash")?;
let sdkman_init_path = env::var("SDKMAN_DIR") let sdkman_init_path = var("SDKMAN_DIR")
.map(PathBuf::from) .map(PathBuf::from)
.unwrap_or_else(|_| base_dirs.home_dir().join(".sdkman")) .unwrap_or_else(|_| HOME_DIR.join(".sdkman"))
.join("bin") .join("bin")
.join("sdkman-init.sh") .join("sdkman-init.sh")
.require() .require()
@@ -446,9 +625,9 @@ pub fn run_sdkman(base_dirs: &BaseDirs, cleanup: bool, run_type: RunType) -> Res
print_separator("SDKMAN!"); print_separator("SDKMAN!");
let sdkman_config_path = env::var("SDKMAN_DIR") let sdkman_config_path = var("SDKMAN_DIR")
.map(PathBuf::from) .map(PathBuf::from)
.unwrap_or_else(|_| base_dirs.home_dir().join(".sdkman")) .unwrap_or_else(|_| HOME_DIR.join(".sdkman"))
.join("etc") .join("etc")
.join("config") .join("config")
.require()?; .require()?;
@@ -461,33 +640,33 @@ pub fn run_sdkman(base_dirs: &BaseDirs, cleanup: bool, run_type: RunType) -> Res
if selfupdate_enabled == "true" { if selfupdate_enabled == "true" {
let cmd_selfupdate = format!("source {} && sdk selfupdate", &sdkman_init_path); let cmd_selfupdate = format!("source {} && sdk selfupdate", &sdkman_init_path);
run_type ctx.run_type()
.execute(&bash) .execute(&bash)
.args(["-c", cmd_selfupdate.as_str()]) .args(["-c", cmd_selfupdate.as_str()])
.status_checked()?; .status_checked()?;
} }
let cmd_update = format!("source {} && sdk update", &sdkman_init_path); let cmd_update = format!("source {} && sdk update", &sdkman_init_path);
run_type ctx.run_type()
.execute(&bash) .execute(&bash)
.args(["-c", cmd_update.as_str()]) .args(["-c", cmd_update.as_str()])
.status_checked()?; .status_checked()?;
let cmd_upgrade = format!("source {} && sdk upgrade", &sdkman_init_path); let cmd_upgrade = format!("source {} && sdk upgrade", &sdkman_init_path);
run_type ctx.run_type()
.execute(&bash) .execute(&bash)
.args(["-c", cmd_upgrade.as_str()]) .args(["-c", cmd_upgrade.as_str()])
.status_checked()?; .status_checked()?;
if cleanup { if ctx.config().cleanup() {
let cmd_flush_archives = format!("source {} && sdk flush archives", &sdkman_init_path); let cmd_flush_archives = format!("source {} && sdk flush archives", &sdkman_init_path);
run_type ctx.run_type()
.execute(&bash) .execute(&bash)
.args(["-c", cmd_flush_archives.as_str()]) .args(["-c", cmd_flush_archives.as_str()])
.status_checked()?; .status_checked()?;
let cmd_flush_temp = format!("source {} && sdk flush temp", &sdkman_init_path); let cmd_flush_temp = format!("source {} && sdk flush temp", &sdkman_init_path);
run_type ctx.run_type()
.execute(&bash) .execute(&bash)
.args(["-c", cmd_flush_temp.as_str()]) .args(["-c", cmd_flush_temp.as_str()])
.status_checked()?; .status_checked()?;
@@ -504,6 +683,24 @@ pub fn run_bun(ctx: &ExecutionContext) -> Result<()> {
ctx.run_type().execute(bun).arg("upgrade").status_checked() ctx.run_type().execute(bun).arg("upgrade").status_checked()
} }
pub fn run_bun_packages(ctx: &ExecutionContext) -> Result<()> {
let bun = require("bun")?;
print_separator("Bun Packages");
let mut package_json: PathBuf = var("BUN_INSTALL")
.map(PathBuf::from)
.unwrap_or_else(|_| HOME_DIR.join(".bun"));
package_json.push("install/global/package.json");
if !package_json.exists() {
println!("No global packages installed");
return Ok(());
}
ctx.run_type().execute(bun).args(["-g", "update"]).status_checked()
}
/// Update dotfiles with `rcm(7)`. /// Update dotfiles with `rcm(7)`.
/// ///
/// See: <https://github.com/thoughtbot/rcm> /// See: <https://github.com/thoughtbot/rcm>
@@ -514,6 +711,13 @@ pub fn run_rcm(ctx: &ExecutionContext) -> Result<()> {
ctx.run_type().execute(rcup).arg("-v").status_checked() ctx.run_type().execute(rcup).arg("-v").status_checked()
} }
pub fn run_maza(ctx: &ExecutionContext) -> Result<()> {
let maza = require("maza")?;
print_separator("maza");
ctx.run_type().execute(maza).arg("update").status_checked()
}
pub fn reboot() -> Result<()> { pub fn reboot() -> Result<()> {
print!("Rebooting..."); print!("Rebooting...");
Command::new("sudo").arg("reboot").status_checked() Command::new("sudo").arg("reboot").status_checked()

View File

@@ -1,16 +1,15 @@
use std::convert::TryFrom;
use std::path::Path; use std::path::Path;
use std::{ffi::OsStr, process::Command}; use std::{ffi::OsStr, process::Command};
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use etcetera::base_strategy::BaseStrategy;
use tracing::debug; use tracing::debug;
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::execution_context::ExecutionContext; use crate::execution_context::ExecutionContext;
use crate::executor::RunType;
use crate::terminal::{print_separator, print_warning}; use crate::terminal::{print_separator, print_warning};
use crate::utils::require; use crate::utils::{require, which};
use crate::{error::SkipStep, steps::git::Repositories}; use crate::{error::SkipStep, steps::git::RepoStep};
use crate::{powershell, Step}; use crate::{powershell, Step};
pub fn run_chocolatey(ctx: &ExecutionContext) -> Result<()> { pub fn run_chocolatey(ctx: &ExecutionContext) -> Result<()> {
@@ -42,32 +41,74 @@ pub fn run_winget(ctx: &ExecutionContext) -> Result<()> {
print_separator("winget"); print_separator("winget");
if !ctx.config().enable_winget() {
print_warning("Winget is disabled by default. Enable it by setting enable_winget=true in the [windows] section in the configuration.");
return Err(SkipStep(String::from("Winget is disabled by default")).into());
}
ctx.run_type() ctx.run_type()
.execute(&winget) .execute(winget)
.args(["upgrade", "--all"]) .args(["upgrade", "--all"])
.status_checked() .status_checked()
} }
pub fn run_scoop(cleanup: bool, run_type: RunType) -> Result<()> { pub fn run_scoop(ctx: &ExecutionContext) -> Result<()> {
let scoop = require("scoop")?; let scoop = require("scoop")?;
print_separator("Scoop"); print_separator("Scoop");
run_type.execute(&scoop).args(["update"]).status_checked()?; ctx.run_type().execute(&scoop).args(["update"]).status_checked()?;
run_type.execute(&scoop).args(["update", "*"]).status_checked()?; ctx.run_type().execute(&scoop).args(["update", "*"]).status_checked()?;
if cleanup { if ctx.config().cleanup() {
run_type.execute(&scoop).args(["cleanup", "*"]).status_checked()?; ctx.run_type().execute(&scoop).args(["cleanup", "*"]).status_checked()?;
} }
Ok(()) Ok(())
} }
pub fn update_wsl(ctx: &ExecutionContext) -> Result<()> {
if !is_wsl_installed()? {
return Err(SkipStep("WSL not installed".to_string()).into());
}
let wsl = require("wsl")?;
print_separator("Update WSL");
let mut wsl_command = ctx.run_type().execute(wsl);
wsl_command.args(["--update"]);
if ctx.config().wsl_update_pre_release() {
wsl_command.args(["--pre-release"]);
}
if ctx.config().wsl_update_use_web_download() {
wsl_command.args(["--web-download"]);
}
wsl_command.status_checked()?;
Ok(())
}
/// Detect if WSL is installed or not.
///
/// For WSL, we cannot simply check if command `wsl` is installed as on newer
/// versions of Windows (since windows 10 version 2004), this commmand is
/// installed by default.
///
/// If the command is installed and the user hasn't installed any Linux distros
/// on it, command `wsl -l` would print a help message and exit with failure, we
/// use this to check whether WSL is install or not.
fn is_wsl_installed() -> Result<bool> {
if let Some(wsl) = which("wsl") {
// Don't use `output_checked` as an execution failure log is not wanted
#[allow(clippy::disallowed_methods)]
let output = Command::new(wsl).arg("-l").output()?;
let status = output.status;
if status.success() {
return Ok(true);
}
}
Ok(false)
}
fn get_wsl_distributions(wsl: &Path) -> Result<Vec<String>> { fn get_wsl_distributions(wsl: &Path) -> Result<Vec<String>> {
let output = Command::new(wsl).args(["--list", "-q"]).output_checked_utf8()?.stdout; let output = Command::new(wsl).args(["--list", "-q"]).output_checked_utf8()?.stdout;
Ok(output Ok(output
@@ -81,12 +122,45 @@ fn upgrade_wsl_distribution(wsl: &Path, dist: &str, ctx: &ExecutionContext) -> R
let topgrade = Command::new(wsl) let topgrade = Command::new(wsl)
.args(["-d", dist, "bash", "-lc", "which topgrade"]) .args(["-d", dist, "bash", "-lc", "which topgrade"])
.output_checked_utf8() .output_checked_utf8()
.map_err(|_| SkipStep(String::from("Could not find Topgrade installed in WSL")))?; .map_err(|_| SkipStep(String::from("Could not find Topgrade installed in WSL")))?
.stdout // The normal output from `which topgrade` appends a newline, so we trim it here.
.trim_end()
.to_owned();
let mut command = ctx.run_type().execute(wsl); let mut command = ctx.run_type().execute(wsl);
// The `arg` method automatically quotes its arguments.
// This means we can't append additional arguments to `topgrade` in WSL
// by calling `arg` successively.
//
// For example:
//
// ```rust
// command
// .args(["-d", dist, "bash", "-c"])
// .arg(format!("TOPGRADE_PREFIX={dist} exec {topgrade}"));
// ```
//
// creates a command string like:
// > `C:\WINDOWS\system32\wsl.EXE -d Ubuntu bash -c 'TOPGRADE_PREFIX=Ubuntu exec /bin/topgrade'`
//
// Adding the following:
//
// ```rust
// command.arg("-v");
// ```
//
// appends the next argument like so:
// > `C:\WINDOWS\system32\wsl.EXE -d Ubuntu bash -c 'TOPGRADE_PREFIX=Ubuntu exec /bin/topgrade' -v`
// which means `-v` isn't passed to `topgrade`.
let mut args = String::new();
if ctx.config().verbose() {
args.push_str("-v");
}
command command
.args(["-d", dist, "bash", "-c"]) .args(["-d", dist, "bash", "-c"])
.arg(format!("TOPGRADE_PREFIX={} exec {}", dist, topgrade)); .arg(format!("TOPGRADE_PREFIX={dist} exec {topgrade} {args}"));
if ctx.config().yes(Step::Wsl) { if ctx.config().yes(Step::Wsl) {
command.arg("-y"); command.arg("-y");
@@ -96,6 +170,10 @@ fn upgrade_wsl_distribution(wsl: &Path, dist: &str, ctx: &ExecutionContext) -> R
} }
pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> { pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> {
if !is_wsl_installed()? {
return Err(SkipStep("WSL not installed".to_string()).into());
}
let wsl = require("wsl")?; let wsl = require("wsl")?;
let wsl_distributions = get_wsl_distributions(&wsl)?; let wsl_distributions = get_wsl_distributions(&wsl)?;
let mut ran = false; let mut ran = false;
@@ -123,20 +201,17 @@ pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> {
pub fn windows_update(ctx: &ExecutionContext) -> Result<()> { pub fn windows_update(ctx: &ExecutionContext) -> Result<()> {
let powershell = powershell::Powershell::windows_powershell(); let powershell = powershell::Powershell::windows_powershell();
if powershell.supports_windows_update() {
print_separator("Windows Update");
return powershell.windows_update(ctx);
}
let usoclient = require("UsoClient")?;
print_separator("Windows Update"); print_separator("Windows Update");
println!("Running Windows Update. Check the control panel for progress.");
ctx.run_type() if powershell.supports_windows_update() {
.execute(&usoclient) powershell.windows_update(ctx)
.arg("ScanInstallWait") } else {
.status_checked()?; print_warning(
ctx.run_type().execute(&usoclient).arg("StartInstall").status_checked() "Consider installing PSWindowsUpdate as the use of Windows Update via USOClient is not supported.",
);
Err(SkipStep("USOClient not supported.".to_string()).into())
}
} }
pub fn reboot() -> Result<()> { pub fn reboot() -> Result<()> {
@@ -145,9 +220,8 @@ pub fn reboot() -> Result<()> {
Command::new("shutdown").args(["/R", "/T", "0"]).status_checked() Command::new("shutdown").args(["/R", "/T", "0"]).status_checked()
} }
pub fn insert_startup_scripts(ctx: &ExecutionContext, git_repos: &mut Repositories) -> Result<()> { pub fn insert_startup_scripts(git_repos: &mut RepoStep) -> Result<()> {
let startup_dir = ctx let startup_dir = crate::WINDOWS_DIRS
.base_dirs()
.data_dir() .data_dir()
.join("Microsoft\\Windows\\Start Menu\\Programs\\Startup"); .join("Microsoft\\Windows\\Start Menu\\Programs\\Startup");
for entry in std::fs::read_dir(&startup_dir)?.flatten() { for entry in std::fs::read_dir(&startup_dir)?.flatten() {

View File

@@ -50,7 +50,7 @@ impl Powershell {
.args([ .args([
"-NoProfile", "-NoProfile",
"-Command", "-Command",
&format!("Get-Module -ListAvailable {}", command), &format!("Get-Module -ListAvailable {command}"),
]) ])
.output_checked_utf8() .output_checked_utf8()
.map(|result| !result.stdout.is_empty()) .map(|result| !result.stdout.is_empty())
@@ -111,7 +111,7 @@ impl Powershell {
"-NoProfile", "-NoProfile",
"-Command", "-Command",
&format!( &format!(
"Import-Module PSWindowsUpdate; Install-WindowsUpdate -MicrosoftUpdate {} -Verbose", "Start-Process powershell -Verb runAs -ArgumentList 'Import-Module PSWindowsUpdate; Install-WindowsUpdate -MicrosoftUpdate {} -Verbose'",
if ctx.config().accept_all_windows_updates() { if ctx.config().accept_all_windows_updates() {
"-AcceptAll" "-AcceptAll"
} else { } else {

View File

@@ -19,7 +19,7 @@ pub fn ssh_step(ctx: &ExecutionContext, hostname: &str) -> Result<()> {
args.extend(ssh_arguments.split_whitespace()); args.extend(ssh_arguments.split_whitespace());
} }
let env = format!("TOPGRADE_PREFIX={}", hostname); let env = format!("TOPGRADE_PREFIX={hostname}");
args.extend(["env", &env, "$SHELL", "-lc", topgrade]); args.extend(["env", &env, "$SHELL", "-lc", topgrade]);
if ctx.config().run_in_tmux() && !ctx.run_type().dry() { if ctx.config().run_in_tmux() && !ctx.run_type().dry() {
@@ -43,11 +43,11 @@ pub fn ssh_step(ctx: &ExecutionContext, hostname: &str) -> Result<()> {
args.extend(ssh_arguments.split_whitespace()); args.extend(ssh_arguments.split_whitespace());
} }
let env = format!("TOPGRADE_PREFIX={}", hostname); let env = format!("TOPGRADE_PREFIX={hostname}");
args.extend(["env", &env, "$SHELL", "-lc", topgrade]); args.extend(["env", &env, "$SHELL", "-lc", topgrade]);
print_separator(format!("Remote ({})", hostname)); print_separator(format!("Remote ({hostname})"));
println!("Connecting to {}...", hostname); println!("Connecting to {hostname}...");
ctx.run_type().execute(ssh).args(&args).status_checked() ctx.run_type().execute(ssh).args(&args).status_checked()
} }

View File

@@ -183,7 +183,7 @@ pub fn topgrade_vagrant_box(ctx: &ExecutionContext, vagrant_box: &VagrantBox) ->
let mut _poweron = None; let mut _poweron = None;
if !vagrant_box.initial_status.powered_on() { if !vagrant_box.initial_status.powered_on() {
if !(ctx.config().vagrant_power_on().unwrap_or(true)) { if !(ctx.config().vagrant_power_on().unwrap_or(true)) {
return Err(SkipStep(format!("Skipping powered off box {}", vagrant_box)).into()); return Err(SkipStep(format!("Skipping powered off box {vagrant_box}")).into());
} else { } else {
print_separator(seperator); print_separator(seperator);
_poweron = Some(vagrant.temporary_power_on(vagrant_box, ctx)?); _poweron = Some(vagrant.temporary_power_on(vagrant_box, ctx)?);

View File

@@ -5,11 +5,10 @@ use std::process::Command;
use color_eyre::eyre::eyre; use color_eyre::eyre::eyre;
use color_eyre::eyre::Context; use color_eyre::eyre::Context;
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use directories::BaseDirs;
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::executor::RunType;
use crate::terminal::print_separator; use crate::terminal::print_separator;
use crate::HOME_DIR;
use crate::{ use crate::{
execution_context::ExecutionContext, execution_context::ExecutionContext,
utils::{which, PathExt}, utils::{which, PathExt},
@@ -18,15 +17,19 @@ use crate::{
#[cfg(unix)] #[cfg(unix)]
use std::os::unix::process::CommandExt as _; use std::os::unix::process::CommandExt as _;
pub fn run_tpm(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> { pub fn run_tpm(ctx: &ExecutionContext) -> Result<()> {
let tpm = base_dirs let tpm = match env::var("TMUX_PLUGIN_MANAGER_PATH") {
.home_dir() // If `TMUX_PLUGIN_MANAGER_PATH` is set, search for
.join(".tmux/plugins/tpm/bin/update_plugins") // `$TMUX_PLUGIN_MANAGER_PATH/bin/install_plugins/tpm/bin/update_plugins`
.require()?; Ok(var) => PathBuf::from(var).join("bin/install_plugins/tpm/bin/update_plugins"),
// Otherwise, use the default location `~/.tmux/plugins/tpm/bin/update_plugins`
Err(_) => HOME_DIR.join(".tmux/plugins/tpm/bin/update_plugins"),
}
.require()?;
print_separator("tmux plugins"); print_separator("tmux plugins");
run_type.execute(tpm).arg("all").status_checked() ctx.run_type().execute(tpm).arg("all").status_checked()
} }
struct Tmux { struct Tmux {

View File

@@ -42,7 +42,7 @@ pub fn run_toolbx(ctx: &ExecutionContext) -> Result<()> {
let topgrade_path = topgrade_path.to_str().unwrap(); let topgrade_path = topgrade_path.to_str().unwrap();
for tb in toolboxes.iter() { for tb in toolboxes.iter() {
let topgrade_prefix = format!("TOPGRADE_PREFIX='Toolbx {}'", tb); let topgrade_prefix = format!("TOPGRADE_PREFIX='Toolbx {tb}'");
let mut args = vec![ let mut args = vec![
"run", "run",
"-c", "-c",
@@ -52,6 +52,8 @@ pub fn run_toolbx(ctx: &ExecutionContext) -> Result<()> {
topgrade_path, topgrade_path,
"--only", "--only",
"system", "system",
"--no-self-update",
"--skip-notify",
]; ];
if ctx.config().yes(Step::Toolbx) { if ctx.config().yes(Step::Toolbx) {
args.push("--yes"); args.push("--yes");

View File

@@ -1,3 +1,19 @@
" AstroUpdate calls a plugin manager - Lazy as of this writing. So we check for it before
" others. Add to init.lua:
" updater = {
" skip_prompts = true,
" },
if exists(":AstroUpdate")
echo "AstroUpdate"
AstroUpdate
quitall
endif
if exists(":MasonUpdate")
echo "MasonUpdate"
MasonUpdate
endif
if exists(":NeoBundleUpdate") if exists(":NeoBundleUpdate")
echo "NeoBundle" echo "NeoBundle"
NeoBundleUpdate NeoBundleUpdate
@@ -33,23 +49,15 @@ if exists(":PaqUpdate")
PaqUpdate PaqUpdate
endif endif
if exists(":CocUpdateSync") if exists(":Lazy")
echo "CocUpdateSync" echo "Lazy Update"
CocUpdateSync Lazy! sync | qa
endif
" TODO: Should this be after `PackerSync`?
" Not sure how to sequence this after Packer without doing something weird
" with that `PackerComplete` autocommand.
if exists(":TSUpdate")
echo "TreeSitter Update"
TSUpdate
endif endif
if exists(':PackerSync') if exists(':PackerSync')
echo "Packer" echo "Packer"
autocmd User PackerComplete quitall autocmd User PackerComplete quitall
PackerSync PackerSync
else else
quitall quitall
endif endif

View File

@@ -1,14 +1,15 @@
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::error::{SkipStep, TopgradeError}; use crate::error::{SkipStep, TopgradeError};
use crate::HOME_DIR;
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use etcetera::base_strategy::BaseStrategy;
use crate::executor::{Executor, ExecutorOutput, RunType}; use crate::executor::{Executor, ExecutorOutput};
use crate::terminal::print_separator; use crate::terminal::print_separator;
use crate::{ use crate::{
execution_context::ExecutionContext, execution_context::ExecutionContext,
utils::{require, PathExt}, utils::{require, PathExt},
}; };
use directories::BaseDirs;
use std::path::PathBuf; use std::path::PathBuf;
use std::{ use std::{
io::{self, Write}, io::{self, Write},
@@ -18,22 +19,19 @@ use tracing::debug;
const UPGRADE_VIM: &str = include_str!("upgrade.vim"); const UPGRADE_VIM: &str = include_str!("upgrade.vim");
pub fn vimrc(base_dirs: &BaseDirs) -> Result<PathBuf> { pub fn vimrc() -> Result<PathBuf> {
base_dirs HOME_DIR
.home_dir()
.join(".vimrc") .join(".vimrc")
.require() .require()
.or_else(|_| base_dirs.home_dir().join(".vim/vimrc").require()) .or_else(|_| HOME_DIR.join(".vim/vimrc").require())
} }
fn nvimrc(base_dirs: &BaseDirs) -> Result<PathBuf> { fn nvimrc() -> Result<PathBuf> {
#[cfg(unix)] #[cfg(unix)]
let base_dir = let base_dir = crate::XDG_DIRS.config_dir();
// Bypass directories crate as nvim doesn't use the macOS-specific directories.
std::env::var_os("XDG_CONFIG_HOME").map_or_else(|| base_dirs.home_dir().join(".config"), PathBuf::from);
#[cfg(windows)] #[cfg(windows)]
let base_dir = base_dirs.cache_dir(); let base_dir = crate::WINDOWS_DIRS.cache_dir();
base_dir base_dir
.join("nvim/init.vim") .join("nvim/init.vim")
@@ -59,8 +57,8 @@ fn upgrade(command: &mut Executor, ctx: &ExecutionContext) -> Result<()> {
let status = output.status; let status = output.status;
if !status.success() || ctx.config().verbose() { if !status.success() || ctx.config().verbose() {
io::stdout().write(&output.stdout).ok(); io::stdout().write_all(&output.stdout).ok();
io::stderr().write(&output.stderr).ok(); io::stderr().write_all(&output.stderr).ok();
} }
if !status.success() { if !status.success() {
@@ -74,7 +72,7 @@ fn upgrade(command: &mut Executor, ctx: &ExecutionContext) -> Result<()> {
} }
pub fn upgrade_ultimate_vimrc(ctx: &ExecutionContext) -> Result<()> { pub fn upgrade_ultimate_vimrc(ctx: &ExecutionContext) -> Result<()> {
let config_dir = ctx.base_dirs().home_dir().join(".vim_runtime").require()?; let config_dir = HOME_DIR.join(".vim_runtime").require()?;
let git = require("git")?; let git = require("git")?;
let python = require("python3")?; let python = require("python3")?;
let update_plugins = config_dir.join("update_plugins.py").require()?; let update_plugins = config_dir.join("update_plugins.py").require()?;
@@ -105,7 +103,7 @@ pub fn upgrade_ultimate_vimrc(ctx: &ExecutionContext) -> Result<()> {
Ok(()) Ok(())
} }
pub fn upgrade_vim(base_dirs: &BaseDirs, ctx: &ExecutionContext) -> Result<()> { pub fn upgrade_vim(ctx: &ExecutionContext) -> Result<()> {
let vim = require("vim")?; let vim = require("vim")?;
let output = Command::new(&vim).arg("--version").output_checked_utf8()?; let output = Command::new(&vim).arg("--version").output_checked_utf8()?;
@@ -113,7 +111,7 @@ pub fn upgrade_vim(base_dirs: &BaseDirs, ctx: &ExecutionContext) -> Result<()> {
return Err(SkipStep(String::from("vim binary might be actually nvim")).into()); return Err(SkipStep(String::from("vim binary might be actually nvim")).into());
} }
let vimrc = vimrc(base_dirs)?; let vimrc = vimrc()?;
print_separator("Vim"); print_separator("Vim");
upgrade( upgrade(
@@ -127,9 +125,9 @@ pub fn upgrade_vim(base_dirs: &BaseDirs, ctx: &ExecutionContext) -> Result<()> {
) )
} }
pub fn upgrade_neovim(base_dirs: &BaseDirs, ctx: &ExecutionContext) -> Result<()> { pub fn upgrade_neovim(ctx: &ExecutionContext) -> Result<()> {
let nvim = require("nvim")?; let nvim = require("nvim")?;
let nvimrc = nvimrc(base_dirs)?; let nvimrc = nvimrc()?;
print_separator("Neovim"); print_separator("Neovim");
upgrade( upgrade(
@@ -143,10 +141,10 @@ pub fn upgrade_neovim(base_dirs: &BaseDirs, ctx: &ExecutionContext) -> Result<()
) )
} }
pub fn run_voom(_base_dirs: &BaseDirs, run_type: RunType) -> Result<()> { pub fn run_voom(ctx: &ExecutionContext) -> Result<()> {
let voom = require("voom")?; let voom = require("voom")?;
print_separator("voom"); print_separator("voom");
run_type.execute(voom).arg("update").status_checked() ctx.run_type().execute(voom).arg("update").status_checked()
} }

View File

@@ -1,118 +1,149 @@
use std::env; use std::env;
use std::path::{Path, PathBuf}; use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use directories::BaseDirs;
use tracing::debug; use tracing::debug;
use walkdir::WalkDir; use walkdir::WalkDir;
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::execution_context::ExecutionContext; use crate::execution_context::ExecutionContext;
use crate::executor::RunType; use crate::git::RepoStep;
use crate::git::Repositories;
use crate::terminal::print_separator; use crate::terminal::print_separator;
use crate::utils::{require, PathExt}; use crate::utils::{require, PathExt};
use crate::HOME_DIR;
use crate::XDG_DIRS;
use etcetera::base_strategy::BaseStrategy;
pub fn run_zr(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> { pub fn run_zr(ctx: &ExecutionContext) -> Result<()> {
let zsh = require("zsh")?; let zsh = require("zsh")?;
require("zr")?; require("zr")?;
print_separator("zr"); print_separator("zr");
let cmd = format!("source {} && zr --update", zshrc(base_dirs).display()); let cmd = format!("source {} && zr --update", zshrc().display());
run_type.execute(zsh).args(["-l", "-c", cmd.as_str()]).status_checked() ctx.run_type()
.execute(zsh)
.args(["-l", "-c", cmd.as_str()])
.status_checked()
} }
pub fn zshrc(base_dirs: &BaseDirs) -> PathBuf { fn zdotdir() -> PathBuf {
env::var("ZDOTDIR") env::var("ZDOTDIR")
.map(|p| Path::new(&p).join(".zshrc")) .map(PathBuf::from)
.unwrap_or_else(|_| base_dirs.home_dir().join(".zshrc")) .unwrap_or_else(|_| HOME_DIR.clone())
} }
pub fn run_antibody(run_type: RunType) -> Result<()> { pub fn zshrc() -> PathBuf {
zdotdir().join(".zshrc")
}
pub fn run_antidote(ctx: &ExecutionContext) -> Result<()> {
let zsh = require("zsh")?;
let mut antidote = zdotdir().join(".antidote").require()?;
antidote.push("antidote.zsh");
print_separator("antidote");
ctx.run_type()
.execute(zsh)
.arg("-c")
.arg(format!("source {} && antidote update", antidote.display()))
.status_checked()
}
pub fn run_antibody(ctx: &ExecutionContext) -> Result<()> {
require("zsh")?; require("zsh")?;
let antibody = require("antibody")?; let antibody = require("antibody")?;
print_separator("antibody"); print_separator("antibody");
run_type.execute(antibody).arg("update").status_checked() ctx.run_type().execute(antibody).arg("update").status_checked()
} }
pub fn run_antigen(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> { pub fn run_antigen(ctx: &ExecutionContext) -> Result<()> {
let zsh = require("zsh")?; let zsh = require("zsh")?;
let zshrc = zshrc(base_dirs).require()?; let zshrc = zshrc().require()?;
env::var("ADOTDIR") env::var("ADOTDIR")
.map(PathBuf::from) .map(PathBuf::from)
.unwrap_or_else(|_| base_dirs.home_dir().join("antigen.zsh")) .unwrap_or_else(|_| HOME_DIR.join("antigen.zsh"))
.require()?; .require()?;
print_separator("antigen"); print_separator("antigen");
let cmd = format!("source {} && (antigen selfupdate ; antigen update)", zshrc.display()); let cmd = format!("source {} && (antigen selfupdate ; antigen update)", zshrc.display());
run_type.execute(zsh).args(["-l", "-c", cmd.as_str()]).status_checked() ctx.run_type()
.execute(zsh)
.args(["-l", "-c", cmd.as_str()])
.status_checked()
} }
pub fn run_zgenom(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> { pub fn run_zgenom(ctx: &ExecutionContext) -> Result<()> {
let zsh = require("zsh")?; let zsh = require("zsh")?;
let zshrc = zshrc(base_dirs).require()?; let zshrc = zshrc().require()?;
env::var("ZGEN_SOURCE") env::var("ZGEN_SOURCE")
.map(PathBuf::from) .map(PathBuf::from)
.unwrap_or_else(|_| base_dirs.home_dir().join(".zgenom")) .unwrap_or_else(|_| HOME_DIR.join(".zgenom"))
.require()?; .require()?;
print_separator("zgenom"); print_separator("zgenom");
let cmd = format!("source {} && zgenom selfupdate && zgenom update", zshrc.display()); let cmd = format!("source {} && zgenom selfupdate && zgenom update", zshrc.display());
run_type.execute(zsh).args(["-l", "-c", cmd.as_str()]).status_checked() ctx.run_type()
.execute(zsh)
.args(["-l", "-c", cmd.as_str()])
.status_checked()
} }
pub fn run_zplug(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> { pub fn run_zplug(ctx: &ExecutionContext) -> Result<()> {
let zsh = require("zsh")?; let zsh = require("zsh")?;
zshrc(base_dirs).require()?; zshrc().require()?;
env::var("ZPLUG_HOME") env::var("ZPLUG_HOME")
.map(PathBuf::from) .map(PathBuf::from)
.unwrap_or_else(|_| base_dirs.home_dir().join(".zplug")) .unwrap_or_else(|_| HOME_DIR.join(".zplug"))
.require()?; .require()?;
print_separator("zplug"); print_separator("zplug");
run_type ctx.run_type()
.execute(zsh) .execute(zsh)
.args(["-i", "-c", "zplug update"]) .args(["-i", "-c", "zplug update"])
.status_checked() .status_checked()
} }
pub fn run_zinit(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> { pub fn run_zinit(ctx: &ExecutionContext) -> Result<()> {
let zsh = require("zsh")?; let zsh = require("zsh")?;
let zshrc = zshrc(base_dirs).require()?; let zshrc = zshrc().require()?;
env::var("ZINIT_HOME") env::var("ZINIT_HOME")
.map(PathBuf::from) .map(PathBuf::from)
.unwrap_or_else(|_| base_dirs.home_dir().join(".zinit")) .unwrap_or_else(|_| XDG_DIRS.data_dir().join("zinit"))
.require()?; .require()?;
print_separator("zinit"); print_separator("zinit");
let cmd = format!("source {} && zinit self-update && zinit update --all", zshrc.display(),); let cmd = format!("source {} && zinit self-update && zinit update --all", zshrc.display());
run_type.execute(zsh).args(["-i", "-c", cmd.as_str()]).status_checked() ctx.run_type()
.execute(zsh)
.args(["-i", "-c", cmd.as_str()])
.status_checked()
} }
pub fn run_zi(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> { pub fn run_zi(ctx: &ExecutionContext) -> Result<()> {
let zsh = require("zsh")?; let zsh = require("zsh")?;
let zshrc = zshrc(base_dirs).require()?; let zshrc = zshrc().require()?;
base_dirs.home_dir().join(".zi").require()?; HOME_DIR.join(".zi").require()?;
print_separator("zi"); print_separator("zi");
let cmd = format!("source {} && zi self-update && zi update --all", zshrc.display(),); let cmd = format!("source {} && zi self-update && zi update --all", zshrc.display());
run_type.execute(zsh).args(["-i", "-c", &cmd]).status_checked() ctx.run_type().execute(zsh).args(["-i", "-c", &cmd]).status_checked()
} }
pub fn run_zim(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> { pub fn run_zim(ctx: &ExecutionContext) -> Result<()> {
let zsh = require("zsh")?; let zsh = require("zsh")?;
env::var("ZIM_HOME") env::var("ZIM_HOME")
.or_else(|_| { .or_else(|_| {
@@ -123,12 +154,12 @@ pub fn run_zim(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
.map(|o| o.stdout) .map(|o| o.stdout)
}) })
.map(PathBuf::from) .map(PathBuf::from)
.unwrap_or_else(|_| base_dirs.home_dir().join(".zim")) .unwrap_or_else(|_| HOME_DIR.join(".zim"))
.require()?; .require()?;
print_separator("zim"); print_separator("zim");
run_type ctx.run_type()
.execute(zsh) .execute(zsh)
.args(["-i", "-c", "zimfw upgrade && zimfw update"]) .args(["-i", "-c", "zimfw upgrade && zimfw update"])
.status_checked() .status_checked()
@@ -136,7 +167,34 @@ pub fn run_zim(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
pub fn run_oh_my_zsh(ctx: &ExecutionContext) -> Result<()> { pub fn run_oh_my_zsh(ctx: &ExecutionContext) -> Result<()> {
require("zsh")?; require("zsh")?;
let oh_my_zsh = ctx.base_dirs().home_dir().join(".oh-my-zsh").require()?;
// When updating `oh-my-zsh` on a remote machine through topgrade, the
// following processes will be created:
//
// SSH -> ZSH -> ZSH ($SHELL) -> topgrade -> ZSH
//
// The first ZSH process, won't source zshrc (as it is a login shell),
// and thus it won't have the ZSH environment variable, as a result, the
// children processes won't get it either, so we source the zshrc and set
// the ZSH variable for topgrade here.
if ctx.under_ssh() {
let res_env_zsh = Command::new("zsh")
.args(["-ic", "print -rn -- ${ZSH:?}"])
.output_checked_utf8();
// this command will fail if `ZSH` is not set
if let Ok(output) = res_env_zsh {
let env_zsh = output.stdout;
debug!("Oh-my-zsh: under SSH, setting ZSH={}", env_zsh);
env::set_var("ZSH", env_zsh);
}
}
let oh_my_zsh = env::var("ZSH")
.map(PathBuf::from)
// default to `~/.oh-my-zsh`
.unwrap_or(HOME_DIR.join(".oh-my-zsh"))
.require()?;
print_separator("oh-my-zsh"); print_separator("oh-my-zsh");
@@ -161,22 +219,20 @@ pub fn run_oh_my_zsh(ctx: &ExecutionContext) -> Result<()> {
debug!("oh-my-zsh custom dir: {}", custom_dir.display()); debug!("oh-my-zsh custom dir: {}", custom_dir.display());
let mut custom_repos = Repositories::new(ctx.git()); let mut custom_repos = RepoStep::try_new()?;
for entry in WalkDir::new(custom_dir).max_depth(2) { for entry in WalkDir::new(custom_dir).max_depth(2) {
let entry = entry?; let entry = entry?;
custom_repos.insert_if_repo(entry.path()); custom_repos.insert_if_repo(entry.path());
} }
custom_repos.remove(&oh_my_zsh.to_string_lossy()); custom_repos.remove(&oh_my_zsh);
if !custom_repos.is_empty() {
println!("Pulling custom plugins and themes");
ctx.git().multi_pull(&custom_repos, ctx)?;
}
ctx.run_type() ctx.run_type()
.execute("zsh") .execute("zsh")
.env("ZSH", &oh_my_zsh)
.arg(&oh_my_zsh.join("tools/upgrade.sh")) .arg(&oh_my_zsh.join("tools/upgrade.sh"))
// oh-my-zsh returns 80 when it is already updated and no changes pulled
// in this update.
// See this comment: https://github.com/r-darwish/topgrade/issues/569#issuecomment-736756731
// for more information.
.status_checked_with_codes(&[80]) .status_checked_with_codes(&[80])
} }

View File

@@ -4,6 +4,8 @@ use std::path::PathBuf;
use color_eyre::eyre::Context; use color_eyre::eyre::Context;
use color_eyre::eyre::Result; use color_eyre::eyre::Result;
use serde::Deserialize;
use strum::AsRefStr;
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::execution_context::ExecutionContext; use crate::execution_context::ExecutionContext;
@@ -27,9 +29,15 @@ impl Sudo {
.or_else(|| which("sudo").map(|p| (p, SudoKind::Sudo))) .or_else(|| which("sudo").map(|p| (p, SudoKind::Sudo)))
.or_else(|| which("gsudo").map(|p| (p, SudoKind::Gsudo))) .or_else(|| which("gsudo").map(|p| (p, SudoKind::Gsudo)))
.or_else(|| which("pkexec").map(|p| (p, SudoKind::Pkexec))) .or_else(|| which("pkexec").map(|p| (p, SudoKind::Pkexec)))
.or_else(|| which("please").map(|p| (p, SudoKind::Please)))
.map(|(path, kind)| Self { path, kind }) .map(|(path, kind)| Self { path, kind })
} }
/// Create Sudo from SudoKind, if found in the system
pub fn new(kind: SudoKind) -> Option<Self> {
which(kind.as_ref()).map(|path| Self { path, kind })
}
/// Elevate permissions with `sudo`. /// Elevate permissions with `sudo`.
/// ///
/// This helps prevent blocking `sudo` prompts from stopping the run in the middle of a /// This helps prevent blocking `sudo` prompts from stopping the run in the middle of a
@@ -71,6 +79,12 @@ impl Sudo {
// See: https://linux.die.net/man/1/pkexec // See: https://linux.die.net/man/1/pkexec
cmd.arg("echo"); cmd.arg("echo");
} }
SudoKind::Please => {
// From `man please`
// -w, --warm
// Warm the access token and exit.
cmd.arg("-w");
}
} }
cmd.status_checked().wrap_err("Failed to elevate permissions") cmd.status_checked().wrap_err("Failed to elevate permissions")
} }
@@ -93,12 +107,15 @@ impl Sudo {
} }
} }
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug, Deserialize, AsRefStr)]
enum SudoKind { #[serde(rename_all = "lowercase")]
#[strum(serialize_all = "lowercase")]
pub enum SudoKind {
Doas, Doas,
Sudo, Sudo,
Gsudo, Gsudo,
Pkexec, Pkexec,
Please,
} }
impl AsRef<OsStr> for Sudo { impl AsRef<OsStr> for Sudo {

View File

@@ -1,8 +1,6 @@
use std::cmp::{max, min}; use std::cmp::{max, min};
use std::env; use std::env;
use std::io::{self, Write}; use std::io::{self, Write};
#[cfg(target_os = "linux")]
use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use std::sync::Mutex; use std::sync::Mutex;
use std::time::Duration; use std::time::Duration;
@@ -12,7 +10,6 @@ use color_eyre::eyre;
use color_eyre::eyre::Context; use color_eyre::eyre::Context;
use console::{style, Key, Term}; use console::{style, Key, Term};
use lazy_static::lazy_static; use lazy_static::lazy_static;
#[cfg(target_os = "macos")]
use notify_rust::{Notification, Timeout}; use notify_rust::{Notification, Timeout};
use tracing::{debug, error}; use tracing::{debug, error};
#[cfg(windows)] #[cfg(windows)]
@@ -20,10 +17,7 @@ use which_crate::which;
use crate::command::CommandExt; use crate::command::CommandExt;
use crate::report::StepResult; use crate::report::StepResult;
#[cfg(target_os = "linux")]
use crate::terminal;
#[cfg(target_os = "linux")]
use crate::utils::which;
lazy_static! { lazy_static! {
static ref TERMINAL: Mutex<Terminal> = Mutex::new(Terminal::new()); static ref TERMINAL: Mutex<Terminal> = Mutex::new(Terminal::new());
} }
@@ -49,8 +43,6 @@ struct Terminal {
set_title: bool, set_title: bool,
display_time: bool, display_time: bool,
desktop_notification: bool, desktop_notification: bool,
#[cfg(target_os = "linux")]
notify_send: Option<PathBuf>,
} }
impl Terminal { impl Terminal {
@@ -60,13 +52,11 @@ impl Terminal {
width: term.size_checked().map(|(_, w)| w), width: term.size_checked().map(|(_, w)| w),
term, term,
prefix: env::var("TOPGRADE_PREFIX") prefix: env::var("TOPGRADE_PREFIX")
.map(|prefix| format!("({}) ", prefix)) .map(|prefix| format!("({prefix}) "))
.unwrap_or_else(|_| String::new()), .unwrap_or_else(|_| String::new()),
set_title: true, set_title: true,
display_time: true, display_time: true,
desktop_notification: false, desktop_notification: false,
#[cfg(target_os = "linux")]
notify_send: which("notify-send"),
} }
} }
@@ -82,35 +72,18 @@ impl Terminal {
self.display_time = display_time self.display_time = display_time
} }
#[allow(unused_variables)]
fn notify_desktop<P: AsRef<str>>(&self, message: P, timeout: Option<Duration>) { fn notify_desktop<P: AsRef<str>>(&self, message: P, timeout: Option<Duration>) {
debug!("Desktop notification: {}", message.as_ref()); debug!("Desktop notification: {}", message.as_ref());
cfg_if::cfg_if! { let mut notification = Notification::new();
if #[cfg(target_os = "macos")] { notification
let mut notification = Notification::new(); .summary("Topgrade")
notification.summary("Topgrade") .body(message.as_ref())
.body(message.as_ref()) .appname("topgrade");
.appname("topgrade");
if let Some(timeout) = timeout { if let Some(timeout) = timeout {
notification.timeout(Timeout::Milliseconds(timeout.as_millis() as u32)); notification.timeout(Timeout::Milliseconds(timeout.as_millis() as u32));
}
notification.show().ok();
} else if #[cfg(target_os = "linux")] {
if let Some(ns) = self.notify_send.as_ref() {
let mut command = Command::new(ns);
if let Some(timeout) = timeout {
command.arg("-t");
command.arg(format!("{}", timeout.as_millis()));
}
command.args(["-a", "Topgrade", "Topgrade"]);
command.arg(message.as_ref());
if let Err(err) = command.output_checked() {
terminal::print_warning("Senfing notification failed with {err:?}");
}
}
}
} }
notification.show().ok();
} }
fn print_separator<P: AsRef<str>>(&mut self, message: P) { fn print_separator<P: AsRef<str>>(&mut self, message: P) {
@@ -143,7 +116,7 @@ impl Terminal {
.write_fmt(format_args!( .write_fmt(format_args!(
"{}\n", "{}\n",
style(format_args!( style(format_args!(
"\n―― {} {:^border$}", "\n── {} {:^border$}",
message, message,
"", "",
border = max( border = max(
@@ -159,7 +132,7 @@ impl Terminal {
.ok(); .ok();
} }
None => { None => {
self.term.write_fmt(format_args!("―― {} ――\n", message)).ok(); self.term.write_fmt(format_args!("―― {message} ――\n")).ok();
} }
} }
} }
@@ -171,7 +144,7 @@ impl Terminal {
self.term self.term
.write_fmt(format_args!( .write_fmt(format_args!(
"{} {}", "{} {}",
style(format!("{} failed:", key)).red().bold(), style(format!("{key} failed:")).red().bold(),
message message
)) ))
.ok(); .ok();
@@ -215,7 +188,7 @@ impl Terminal {
self.term self.term
.write_fmt(format_args!( .write_fmt(format_args!(
"{}", "{}",
style(format!("{} (y)es/(N)o", question,)).yellow().bold() style(format!("{question} (y)es/(N)o",)).yellow().bold()
)) ))
.ok(); .ok();
@@ -237,13 +210,15 @@ impl Terminal {
self.term.set_title("Topgrade - Awaiting user"); self.term.set_title("Topgrade - Awaiting user");
} }
self.notify_desktop(format!("{} failed", step_name), None); if self.desktop_notification {
self.notify_desktop(format!("{step_name} failed"), None);
}
let prompt_inner = style(format!("{}Retry? (y)es/(N)o/(s)hell/(q)uit", self.prefix)) let prompt_inner = style(format!("{}Retry? (y)es/(N)o/(s)hell/(q)uit", self.prefix))
.yellow() .yellow()
.bold(); .bold();
self.term.write_fmt(format_args!("\n{}", prompt_inner)).ok(); self.term.write_fmt(format_args!("\n{prompt_inner}")).ok();
let answer = loop { let answer = loop {
match self.term.read_key() { match self.term.read_key() {
@@ -251,7 +226,7 @@ impl Terminal {
Ok(Key::Char('s')) | Ok(Key::Char('S')) => { Ok(Key::Char('s')) | Ok(Key::Char('S')) => {
println!("\n\nDropping you to shell. Fix what you need and then exit the shell.\n"); println!("\n\nDropping you to shell. Fix what you need and then exit the shell.\n");
if let Err(err) = run_shell().context("Failed to run shell") { if let Err(err) = run_shell().context("Failed to run shell") {
self.term.write_fmt(format_args!("{err:?}\n{}", prompt_inner)).ok(); self.term.write_fmt(format_args!("{err:?}\n{prompt_inner}")).ok();
} else { } else {
break Ok(true); break Ok(true);
} }

View File

@@ -1,11 +1,21 @@
use crate::error::SkipStep;
use color_eyre::eyre::Result;
use std::env; use std::env;
use std::ffi::OsStr; use std::ffi::OsStr;
use std::fmt::Debug; use std::fmt::Debug;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process::Command;
use color_eyre::eyre::Result;
use tracing::{debug, error}; use tracing::{debug, error};
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::reload::{Handle, Layer};
use tracing_subscriber::util::SubscriberInitExt;
use tracing_subscriber::{fmt, Registry};
use tracing_subscriber::{registry, EnvFilter};
use crate::command::CommandExt;
use crate::config::DEFAULT_LOG_LEVEL;
use crate::error::SkipStep;
pub trait PathExt pub trait PathExt
where where
@@ -101,54 +111,162 @@ pub fn require_option<T>(option: Option<T>, cause: String) -> Result<T> {
} }
} }
/* sys-info-rs pub fn string_prepend_str(string: &mut String, s: &str) {
* let mut new_string = String::with_capacity(string.len() + s.len());
* Copyright (c) 2015 Siyu Wang new_string.push_str(s);
* new_string.push_str(string);
* Permission is hereby granted, free of charge, to any person obtaining a copy *string = new_string;
* of this software and associated documentation files (the "Software"), to deal }
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#[cfg(target_family = "unix")] #[cfg(target_family = "unix")]
pub fn hostname() -> Result<String> { pub fn hostname() -> Result<String> {
use std::ffi; match nix::unistd::gethostname() {
extern crate libc; Ok(os_str) => Ok(os_str
.into_string()
unsafe { .map_err(|_| SkipStep("Failed to get a UTF-8 encoded hostname".into()))?),
let buf_size = libc::sysconf(libc::_SC_HOST_NAME_MAX) as usize; Err(e) => Err(e.into()),
let mut buf = Vec::<u8>::with_capacity(buf_size + 1);
if libc::gethostname(buf.as_mut_ptr() as *mut libc::c_char, buf_size) < 0 {
return Err(SkipStep(format!("Failed to get hostname: {}", std::io::Error::last_os_error())).into());
}
let hostname_len = libc::strnlen(buf.as_ptr() as *const libc::c_char, buf_size);
buf.set_len(hostname_len);
Ok(ffi::CString::new(buf).unwrap().into_string().unwrap())
} }
} }
#[cfg(target_family = "windows")] #[cfg(target_family = "windows")]
pub fn hostname() -> Result<String> { pub fn hostname() -> Result<String> {
use crate::command::CommandExt;
use std::process::Command;
Command::new("hostname") Command::new("hostname")
.output_checked_utf8() .output_checked_utf8()
.map_err(|err| SkipStep(format!("Failed to get hostname: {}", err)).into()) .map_err(|err| SkipStep(format!("Failed to get hostname: {err}")).into())
.map(|output| output.stdout.trim().to_owned()) .map(|output| output.stdout.trim().to_owned())
} }
pub mod merge_strategies {
use merge::Merge;
use crate::config::Commands;
/// Prepends right to left (both Option<Vec<T>>)
pub fn vec_prepend_opt<T>(left: &mut Option<Vec<T>>, right: Option<Vec<T>>) {
if let Some(left_vec) = left {
if let Some(mut right_vec) = right {
right_vec.append(left_vec);
let _ = std::mem::replace(left, Some(right_vec));
}
} else {
*left = right;
}
}
/// Appends an Option<String> to another Option<String>
pub fn string_append_opt(left: &mut Option<String>, right: Option<String>) {
if let Some(left_str) = left {
if let Some(right_str) = right {
left_str.push(' ');
left_str.push_str(&right_str);
}
} else {
*left = right;
}
}
pub fn inner_merge_opt<T>(left: &mut Option<T>, right: Option<T>)
where
T: Merge,
{
if let Some(ref mut left_inner) = left {
if let Some(right_inner) = right {
left_inner.merge(right_inner);
}
} else {
*left = right;
}
}
pub fn commands_merge_opt(left: &mut Option<Commands>, right: Option<Commands>) {
if let Some(ref mut left_inner) = left {
if let Some(right_inner) = right {
left_inner.extend(right_inner);
}
} else {
*left = right;
}
}
}
// Skip causes
// TODO: Put them in a better place when we have more of them
pub const REQUIRE_SUDO: &str = "Require sudo or counterpart but not found, skip";
/// Return `Err(SkipStep)` if `python` is a Python 2 or shim.
///
/// # Shim
/// On Windows, if you install `python` through `winget`, an actual `python`
/// is installed as well as a `python3` shim. Shim is invokable, but when you
/// execute it, the Microsoft App Store will be launched instead of a Python
/// shell.
///
/// We do this check through `python -V`, a shim will just give `Python` with
/// no version number.
pub fn check_is_python_2_or_shim(python: PathBuf) -> Result<PathBuf> {
let output = Command::new(&python).arg("-V").output_checked_utf8()?;
// "Python x.x.x\n"
let stdout = output.stdout;
// ["Python"] or ["Python", "x.x.x"], the newline char is trimmed.
let mut split = stdout.split_whitespace();
if let Some(version) = split.nth(1) {
let major_version = version
.split('.')
.next()
.expect("Should have a major version number")
.parse::<u32>()
.expect("Major version should be a valid number");
if major_version == 2 {
return Err(SkipStep(format!("{} is a Python 2, skip.", python.display())).into());
}
} else {
// No version number, is a shim
return Err(SkipStep(format!("{} is a Python shim, skip.", python.display())).into());
}
Ok(python)
}
/// Set up the tracing logger
///
/// # Return value
/// A reload handle will be returned so that we can change the log level at
/// runtime.
pub fn install_tracing(filter_directives: &str) -> Result<Handle<EnvFilter, Registry>> {
let env_filter = EnvFilter::try_new(filter_directives)
.or_else(|_| EnvFilter::try_from_default_env())
.or_else(|_| EnvFilter::try_new(DEFAULT_LOG_LEVEL))?;
let fmt_layer = fmt::layer().with_target(false).without_time();
let (filter, reload_handle) = Layer::new(env_filter);
registry().with(filter).with(fmt_layer).init();
Ok(reload_handle)
}
/// Update the tracing logger with new `filter_directives`.
pub fn update_tracing(reload_handle: &Handle<EnvFilter, Registry>, filter_directives: &str) -> Result<()> {
let new = EnvFilter::try_new(filter_directives)
.or_else(|_| EnvFilter::try_from_default_env())
.or_else(|_| EnvFilter::try_new(DEFAULT_LOG_LEVEL))?;
reload_handle.modify(|old| *old = new)?;
Ok(())
}
/// Set up the error handler crate
pub fn install_color_eyre() -> Result<()> {
color_eyre::config::HookBuilder::new()
// Don't display the backtrace reminder by default:
// Backtrace omitted. Run with RUST_BACKTRACE=1 environment variable to display it.
// Run with RUST_BACKTRACE=full to include source snippets.
.display_env_section(false)
// Display location information by default:
// Location:
// src/steps.rs:92
.display_location_section(true)
.install()
}