Compare commits
457 Commits
last-topgr
...
v16.0.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4df30c2587 | ||
|
|
305a5fbcae | ||
|
|
4f4dcbb643 | ||
|
|
202897ba35 | ||
|
|
444689c899 | ||
|
|
98ec13f8db | ||
|
|
39f76a3a71 | ||
|
|
f181a795a6 | ||
|
|
ea2f3e07e9 | ||
|
|
8aad6eae0d | ||
|
|
e86e5fe3e7 | ||
|
|
2c2569c4f8 | ||
|
|
9ffdc9649e | ||
|
|
a5d4f2eec9 | ||
|
|
a5df40e01d | ||
|
|
0573fc97c6 | ||
|
|
1ae95f41a1 | ||
|
|
8a7af2e14d | ||
|
|
c36da89933 | ||
|
|
bbb84c2ee7 | ||
|
|
36fd4b13c0 | ||
|
|
49327000fc | ||
|
|
9c25cd7426 | ||
|
|
9767e4169c | ||
|
|
0854f9c559 | ||
|
|
e4a068d808 | ||
|
|
4c793b0df8 | ||
|
|
a021441135 | ||
|
|
29c555c394 | ||
|
|
c33d396489 | ||
|
|
f6d2ba4dae | ||
|
|
a88574204d | ||
|
|
9435bc4b7d | ||
|
|
27245cbd7b | ||
|
|
21751aa8a5 | ||
|
|
ad41948450 | ||
|
|
e32246f172 | ||
|
|
25d3a816b4 | ||
|
|
05b1a565e0 | ||
|
|
7b2623ea3c | ||
|
|
983c5243ba | ||
|
|
1958fe1e5b | ||
|
|
ca8558d9b4 | ||
|
|
1b534800a9 | ||
|
|
e91c00c9c0 | ||
|
|
a2375b4820 | ||
|
|
2e0c8e9e17 | ||
|
|
dc0ddcf9f0 | ||
|
|
a1f3c86a39 | ||
|
|
55f672eff7 | ||
|
|
8ece0346d8 | ||
|
|
b1fe1d201a | ||
|
|
5010abdc22 | ||
|
|
e4441d5021 | ||
|
|
5af0c6a7e5 | ||
|
|
b8da17106a | ||
|
|
fdf40dbf43 | ||
|
|
f3b6530969 | ||
|
|
cbc5fc94f9 | ||
|
|
dceb697355 | ||
|
|
07118fa0d2 | ||
|
|
16e6db0def | ||
|
|
64d8f6d632 | ||
|
|
180b5cba58 | ||
|
|
bac416e907 | ||
|
|
cb674a1572 | ||
|
|
960b14fa20 | ||
|
|
a9f57d4205 | ||
|
|
13330b6950 | ||
|
|
1ebcc9beee | ||
|
|
55e1bbf2b9 | ||
|
|
f2dfa1e475 | ||
|
|
fcd53e772a | ||
|
|
8b9d7ef8f3 | ||
|
|
d8406a8cfe | ||
|
|
4a9ef581e5 | ||
|
|
a52db1f261 | ||
|
|
8e16174ce7 | ||
|
|
c748bb5d7a | ||
|
|
3cc8f0d818 | ||
|
|
f96eeeda6b | ||
|
|
d1d8904376 | ||
|
|
3b329fe687 | ||
|
|
9eb1b4ac9f | ||
|
|
c4c0bd7383 | ||
|
|
1e9de5832d | ||
|
|
f2b17cdd9d | ||
|
|
7bfd6c2439 | ||
|
|
0e8d5f0266 | ||
|
|
32add8f046 | ||
|
|
f661f00277 | ||
|
|
2a1999fe20 | ||
|
|
4d66431aad | ||
|
|
767f0d91f4 | ||
|
|
a3428e3477 | ||
|
|
614131b7bf | ||
|
|
9b0681f3b8 | ||
|
|
ecf8fb7a47 | ||
|
|
04bfb45a97 | ||
|
|
d90ce30452 | ||
|
|
ab21600ca6 | ||
|
|
728ea26204 | ||
|
|
373cd3b3ae | ||
|
|
f4e0258b09 | ||
|
|
d50360a69a | ||
|
|
351922c81f | ||
|
|
9518f43866 | ||
|
|
2c1ce3d4e6 | ||
|
|
12116c3261 | ||
|
|
fbc84e8aa1 | ||
|
|
6dab1e4f37 | ||
|
|
650a143602 | ||
|
|
9b6027fe78 | ||
|
|
0e30e05ce8 | ||
|
|
eea952fa78 | ||
|
|
6071a1ee3b | ||
|
|
a801b7b9f4 | ||
|
|
c6e3f0ae0a | ||
|
|
a43b03d3db | ||
|
|
12b0fa57ad | ||
|
|
d9e304f0ef | ||
|
|
842b92cca7 | ||
|
|
485f0ec9c8 | ||
|
|
5e3b5fc9a7 | ||
|
|
7c63541cad | ||
|
|
238e089d74 | ||
|
|
8991bc9f62 | ||
|
|
7a3f3a8905 | ||
|
|
e4085e03eb | ||
|
|
4b0c366e5f | ||
|
|
ea97240d09 | ||
|
|
12de531abb | ||
|
|
c3876ce3bf | ||
|
|
cbbfc3a114 | ||
|
|
ad2bfc9abd | ||
|
|
528461412e | ||
|
|
64db679390 | ||
|
|
77a8b3b7d2 | ||
|
|
7007e76ab5 | ||
|
|
3c970063a9 | ||
|
|
b70830015e | ||
|
|
b43f2c8b3a | ||
|
|
c311da16f3 | ||
|
|
37608a338c | ||
|
|
b07288e674 | ||
|
|
707698faab | ||
|
|
2e70d132d0 | ||
|
|
30c5b31e21 | ||
|
|
77ff6cb714 | ||
|
|
ea13c51b7d | ||
|
|
3ed763b884 | ||
|
|
10e1e170b7 | ||
|
|
ffa62afc66 | ||
|
|
f794329913 | ||
|
|
f9a35c7661 | ||
|
|
ed496f3462 | ||
|
|
6accdae232 | ||
|
|
96efcc6c0d | ||
|
|
bf72d7bb5a | ||
|
|
dadffb1081 | ||
|
|
78dc567226 | ||
|
|
362ce4f4f9 | ||
|
|
ab35cd7b10 | ||
|
|
15f4ad7cd1 | ||
|
|
cbfb92041f | ||
|
|
a506c67cac | ||
|
|
788e0412f6 | ||
|
|
18b37ce3e3 | ||
|
|
a15e6748c7 | ||
|
|
c6d0539fd2 | ||
|
|
3eb3867944 | ||
|
|
810315b0e2 | ||
|
|
b461fc2536 | ||
|
|
7e63977ba0 | ||
|
|
78dec892cf | ||
|
|
9ea6628b5c | ||
|
|
465df2e9be | ||
|
|
61ef926849 | ||
|
|
7fa38c593e | ||
|
|
41c6d1cd9a | ||
|
|
cf3893dc49 | ||
|
|
a2fbe92a25 | ||
|
|
e1754707d8 | ||
|
|
cd380a53b3 | ||
|
|
a8c29fd1a2 | ||
|
|
6b871e7949 | ||
|
|
1b5fdb6645 | ||
|
|
fe9d877cdf | ||
|
|
60e7aa8f03 | ||
|
|
18e2d3e59c | ||
|
|
d68fcb08b2 | ||
|
|
1f6baefdc3 | ||
|
|
71efce32c1 | ||
|
|
3626c9cdc8 | ||
|
|
a23b761304 | ||
|
|
3fd27e4913 | ||
|
|
b3f152b716 | ||
|
|
df381f3a79 | ||
|
|
2dec9db310 | ||
|
|
d50dc4c9f6 | ||
|
|
ed8b563f20 | ||
|
|
2a73aa731d | ||
|
|
4dd1c13bd8 | ||
|
|
c1c9fe22df | ||
|
|
06a6b7a2eb | ||
|
|
b814dd824f | ||
|
|
ce234bdb59 | ||
|
|
13a46a44a8 | ||
|
|
dc78b00c3c | ||
|
|
48ae4bf813 | ||
|
|
a50040e2d5 | ||
|
|
2c9a56a8df | ||
|
|
021320b292 | ||
|
|
9d3662c3ea | ||
|
|
8e580457a5 | ||
|
|
5350658dab | ||
|
|
1ec0ac50a5 | ||
|
|
635bfce198 | ||
|
|
1307d2d7e8 | ||
|
|
d21141fefe | ||
|
|
0ec0e5a9dd | ||
|
|
9415d7c61f | ||
|
|
42188af02b | ||
|
|
e9581bcf15 | ||
|
|
6afe4f51c6 | ||
|
|
f623746d6c | ||
|
|
1ce4d66e74 | ||
|
|
3735d5c537 | ||
|
|
f3b1d2dfb3 | ||
|
|
7f7d2633cd | ||
|
|
afd95e3d5c | ||
|
|
8f72545894 | ||
|
|
d0d447deac | ||
|
|
53a8683788 | ||
|
|
81491a8d03 | ||
|
|
83504754ac | ||
|
|
2068c2c169 | ||
|
|
dbac121a90 | ||
|
|
b974938a33 | ||
|
|
06cb88a1a1 | ||
|
|
a6195d284c | ||
|
|
5b8850e8a3 | ||
|
|
57546a07fc | ||
|
|
d7709490ce | ||
|
|
3e6c6e513b | ||
|
|
30858780cf | ||
|
|
a7ddf4575a | ||
|
|
470231c9d1 | ||
|
|
282e336ac4 | ||
|
|
658829e4ff | ||
|
|
a0ff565220 | ||
|
|
7e48c5dedc | ||
|
|
03436b7f8f | ||
|
|
3f5eedb83d | ||
|
|
234ad4bdd7 | ||
|
|
c7923393be | ||
|
|
d4548b2f9a | ||
|
|
f6e8af186c | ||
|
|
58153635da | ||
|
|
5358509825 | ||
|
|
1ab0232d96 | ||
|
|
66860f1848 | ||
|
|
625f823f46 | ||
|
|
6263ab7e10 | ||
|
|
7db991db9d | ||
|
|
d75782892e | ||
|
|
cb7adc8ced | ||
|
|
7c3ba80270 | ||
|
|
76c39edc8b | ||
|
|
c20a300eea | ||
|
|
de3902a9c9 | ||
|
|
8bca671e9f | ||
|
|
54301a6a17 | ||
|
|
f06b7c0807 | ||
|
|
43c02cf7a7 | ||
|
|
3a1568e884 | ||
|
|
14753a14e7 | ||
|
|
227e8dcc8d | ||
|
|
97fd2b2718 | ||
|
|
f30e36d7bb | ||
|
|
d640bc66f5 | ||
|
|
a2331a2575 | ||
|
|
26a2c3c266 | ||
|
|
ceafcba88f | ||
|
|
d7182b5a6e | ||
|
|
93ec1172fe | ||
|
|
609477a373 | ||
|
|
1d49af10a7 | ||
|
|
327ed837c2 | ||
|
|
d406e2aeab | ||
|
|
0991cc8a6f | ||
|
|
ac6330fac8 | ||
|
|
29f0d229d3 | ||
|
|
3dd11f7b52 | ||
|
|
ddb1a021bb | ||
|
|
565aa405be | ||
|
|
907465f891 | ||
|
|
250485c826 | ||
|
|
3a3f22b4e5 | ||
|
|
a3628d0d49 | ||
|
|
462016e51e | ||
|
|
199b81183b | ||
|
|
342d7f7209 | ||
|
|
9c2d121fc9 | ||
|
|
7728819133 | ||
|
|
a5d5d987d2 | ||
|
|
fae5d80f0a | ||
|
|
2369e371be | ||
|
|
e3b71b647f | ||
|
|
e224ea38b3 | ||
|
|
8ec37bcd44 | ||
|
|
6b7f6f4cc7 | ||
|
|
d767ef31a5 | ||
|
|
fcf776fe07 | ||
|
|
58060dda09 | ||
|
|
8cfc8d66be | ||
|
|
9dcc8fdd0d | ||
|
|
828477b255 | ||
|
|
4eae1fedf7 | ||
|
|
1051e4cf47 | ||
|
|
80a95cb404 | ||
|
|
ab630cfbc6 | ||
|
|
c13e14080c | ||
|
|
4abbee99cc | ||
|
|
45d935eda3 | ||
|
|
b4c5efde50 | ||
|
|
cba9dc1c2c | ||
|
|
938647123c | ||
|
|
9f24f6474e | ||
|
|
814e39644c | ||
|
|
eb51be0732 | ||
|
|
56a717dcc6 | ||
|
|
ee353ccb66 | ||
|
|
33cea0e5b6 | ||
|
|
78a491a976 | ||
|
|
9553be04e4 | ||
|
|
81928f55a2 | ||
|
|
4e56bf07f3 | ||
|
|
9f424f03c3 | ||
|
|
1e14b3bf28 | ||
|
|
51e7a31f48 | ||
|
|
9847fd9d4d | ||
|
|
24ef44291f | ||
|
|
ebb0c5a6d8 | ||
|
|
1dee462175 | ||
|
|
dc82b8b766 | ||
|
|
73888e7669 | ||
|
|
46a010cc8f | ||
|
|
96e4de4594 | ||
|
|
70616b2ec5 | ||
|
|
22ab07d88e | ||
|
|
70045fca29 | ||
|
|
f6ec6c76db | ||
|
|
37b900c56a | ||
|
|
e26ec4d9e0 | ||
|
|
b31778bdd8 | ||
|
|
526c4c9a58 | ||
|
|
3c1dda0c39 | ||
|
|
25c5057171 | ||
|
|
e456155562 | ||
|
|
f2c7e4848e | ||
|
|
b4407963ad | ||
|
|
85a4691229 | ||
|
|
8e9c3cc56a | ||
|
|
7cdaefe3b0 | ||
|
|
aedb25cde6 | ||
|
|
582bc737cb | ||
|
|
c4091584c3 | ||
|
|
22ed1ef50a | ||
|
|
41e2321b93 | ||
|
|
d8add139e1 | ||
|
|
04a8d960a9 | ||
|
|
2c6a8f73fa | ||
|
|
71883d7164 | ||
|
|
d4fe748814 | ||
|
|
369d923532 | ||
|
|
6be4a4a48d | ||
|
|
7442ddd386 | ||
|
|
761ffac127 | ||
|
|
5ca1dc3703 | ||
|
|
a18c6e815c | ||
|
|
022afab1ca | ||
|
|
2cbb7db66d | ||
|
|
e84173be8f | ||
|
|
bd34a3bcd4 | ||
|
|
13076fcef6 | ||
|
|
6f48017761 | ||
|
|
7fb07879bb | ||
|
|
ecbaf52156 | ||
|
|
6a6a84b0c5 | ||
|
|
3486200b2c | ||
|
|
f6b3a8fdca | ||
|
|
058a6fd9c9 | ||
|
|
e1783e3af8 | ||
|
|
e161d3cd3c | ||
|
|
318f935b43 | ||
|
|
3dc245245d | ||
|
|
9de111aa0f | ||
|
|
18951c8447 | ||
|
|
b6e50a38af | ||
|
|
8acdfc8d1c | ||
|
|
a6da5181f2 | ||
|
|
60ff087048 | ||
|
|
3ebaac3a1d | ||
|
|
e4f8488837 | ||
|
|
d8748b004b | ||
|
|
2a11df40ee | ||
|
|
16953409fd | ||
|
|
c85adb8980 | ||
|
|
0f0cbc1453 | ||
|
|
91554cac56 | ||
|
|
7256aaffc8 | ||
|
|
66e0b94e85 | ||
|
|
4dcb5a214b | ||
|
|
8731fd2b3f | ||
|
|
9233846479 | ||
|
|
94bdb8c3fd | ||
|
|
632fcb5b77 | ||
|
|
55ba2d30c1 | ||
|
|
ff66611ec0 | ||
|
|
edc3dd6b0b | ||
|
|
623a11cf21 | ||
|
|
7a83f38ca8 | ||
|
|
9a19b547c6 | ||
|
|
aae5c3b631 | ||
|
|
3be75bf399 | ||
|
|
a7c2262537 | ||
|
|
16a7d5f00b | ||
|
|
be0984cdf3 | ||
|
|
fb13543e44 | ||
|
|
c406fe2775 | ||
|
|
ce0c0c4314 | ||
|
|
9105a8aacf | ||
|
|
f26e48acca | ||
|
|
b81f28a73a | ||
|
|
6cb9e96151 | ||
|
|
027c99ea6b | ||
|
|
c191c073a0 | ||
|
|
1bdc948a81 | ||
|
|
381f13fe9c | ||
|
|
88e43a12d6 | ||
|
|
b9d93e203a | ||
|
|
f88998faf5 | ||
|
|
96c22ffdf5 | ||
|
|
dc07144918 | ||
|
|
d8bac65317 | ||
|
|
18cec5c9fa | ||
|
|
5a6f0d2e15 | ||
|
|
d9e49312b6 | ||
|
|
0b83333ca7 | ||
|
|
bf7f9a64ee | ||
|
|
6a3de81f1f | ||
|
|
3a5cc8a388 | ||
|
|
639c7fd422 | ||
|
|
31aae2198c | ||
|
|
573bae7511 | ||
|
|
d7891c4754 |
83
.github/ISSUE_TEMPLATE/bug_report.md
vendored
83
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,32 +2,91 @@
|
||||
name: Bug report
|
||||
about: Topgrade is misbehaving
|
||||
title: ''
|
||||
labels: ''
|
||||
labels: 'C-bug'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- If you're here to report about a "No asset found" error, please make sure that an hour has been passed since the last release was made. -->
|
||||
<!--
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
Please make sure to
|
||||
[search for existing issues](https://github.com/topgrade-rs/topgrade/issues)
|
||||
before filing a new one!
|
||||
|
||||
## What did you expect to happen?
|
||||
Questions labeled with `Optional` can be skipped.
|
||||
-->
|
||||
|
||||
<!--
|
||||
If you're here to report about a "No asset found" error, please make sure that
|
||||
an hour has been passed since the last release was made.
|
||||
-->
|
||||
|
||||
## What actually happened?
|
||||
## Erroneous Behavior
|
||||
<!--
|
||||
What actually happened?
|
||||
-->
|
||||
|
||||
## Expected Behavior
|
||||
<!--
|
||||
Describe the expected behavior
|
||||
-->
|
||||
|
||||
## Steps to reproduce
|
||||
<!--
|
||||
A minimal example to reproduce the issue
|
||||
-->
|
||||
|
||||
## Possible Cause (Optional)
|
||||
<!--
|
||||
If you know the possible cause of the issue, please tell us.
|
||||
-->
|
||||
|
||||
## Problem persists without calling from topgrade
|
||||
<!--
|
||||
Execute the erroneous command directly to see if the problem persists
|
||||
-->
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
|
||||
## Did you run topgrade through `Remote Execution`
|
||||
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
|
||||
If yes, does the issue still occur when you run topgrade directlly in your
|
||||
remote host
|
||||
|
||||
- [ ] Yes
|
||||
- [ ] No
|
||||
|
||||
## Configuration file (Optional)
|
||||
<!--
|
||||
Paste your configuration file inside the code block if you think this issue is
|
||||
related to configuration.
|
||||
-->
|
||||
|
||||
```toml
|
||||
|
||||
```
|
||||
|
||||
## Additional Details
|
||||
- Which operating system or Linux distribution are you using?
|
||||
- How did you install Topgrade?
|
||||
- Which version are you running? <!-- Check with `topgrade -V` -->
|
||||
- Operation System/Version
|
||||
<!-- For example, Fedora Linux 38 -->
|
||||
|
||||
<!--
|
||||
Run `topgrade --dry-run` to see which commands Topgrade is running.
|
||||
If the command seems wrong and you know why please tell us so.
|
||||
If the command seems fine try to run it yourself and tell us if you got a different result from Topgrade.
|
||||
- Installation
|
||||
<!--
|
||||
How did you install topgrade: build from repo / crates.io (cargo install topgrade)
|
||||
/ package manager (which one) / other (describe)
|
||||
-->
|
||||
|
||||
- Topgrade version (`topgrade -V`)
|
||||
|
||||
## Verbose Output (`topgrade -v`)
|
||||
<!--
|
||||
Paste the verbose output into the pre-tags
|
||||
-->
|
||||
|
||||
<details>
|
||||
<!-- Paste the output of the problematic command with `-v` into the pre-tags -->
|
||||
<pre>
|
||||
|
||||
</pre>
|
||||
|
||||
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: GitHub Discussions
|
||||
url: https://github.com/topgrade-rs/topgrade/discussions
|
||||
about: Please ask and answer questions here.
|
||||
14
.github/ISSUE_TEMPLATE/feature_request.md
vendored
14
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -2,16 +2,20 @@
|
||||
name: Feature request
|
||||
about: Can you please support...?
|
||||
title: ''
|
||||
labels: ''
|
||||
labels: 'C-feature request'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## I want to suggest a new step
|
||||
### Which tool is this about? Where is its repository?
|
||||
### Which operating systems are supported by this tool?
|
||||
### What should Topgrade do to figure out if the tool needs to be invoked?
|
||||
### Which exact commands should Topgrade run?
|
||||
|
||||
* Which tool is this about? Where is its repository?
|
||||
* Which operating systems are supported by this tool?
|
||||
* What should Topgrade do to figure out if the tool needs to be invoked?
|
||||
* Which exact commands should Topgrade run?
|
||||
* Does it have a `--dry-run` option? i.e., print what should be done and exit
|
||||
* Does it need the user to confirm the execution? And does it provide a `--yes`
|
||||
option to skip this step?
|
||||
|
||||
## I want to suggest some general feature
|
||||
Topgrade should...
|
||||
|
||||
21
.github/PULL_REQUEST_TEMPLATE.md
vendored
21
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,12 +1,19 @@
|
||||
## Standards checklist:
|
||||
## What does this PR do
|
||||
|
||||
- [ ] The PR title is descriptive.
|
||||
- [ ] The code compiles (`cargo build`)
|
||||
- [ ] The code passes rustfmt (`cargo fmt`)
|
||||
- [ ] The code passes clippy (`cargo clippy`)
|
||||
- [ ] The code passes tests (`cargo test`)
|
||||
|
||||
## Standards checklist
|
||||
|
||||
- [ ] The PR title is descriptive
|
||||
- [ ] I have read `CONTRIBUTING.md`
|
||||
- [ ] *Optional:* I have tested the code myself
|
||||
- [ ] I also tested that Topgrade skips the step where needed
|
||||
- [ ] If this PR introduces new user-facing messages they are translated
|
||||
|
||||
## For new steps
|
||||
|
||||
- [ ] *Optional:* Topgrade skips this step where needed
|
||||
- [ ] *Optional:* The `--dry-run` option works with this step
|
||||
- [ ] *Optional:* The `--yes` option works with this step if it is supported by
|
||||
the underlying command
|
||||
|
||||
If you developed a feature or a bug fix for someone else and you do not have the
|
||||
means to test it, please tag this person here.
|
||||
|
||||
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
# Set update schedule for GitHub Actions
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
# Check for updates to GitHub Actions every week
|
||||
interval: "weekly"
|
||||
24
.github/workflows/build-and-test.yml
vendored
24
.github/workflows/build-and-test.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Cargo Build & Test
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
build_and_test:
|
||||
name: Rust project - latest
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
toolchain:
|
||||
- stable
|
||||
- beta
|
||||
- nightly
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }}
|
||||
- run: cargo build --verbose
|
||||
- run: cargo test --verbose
|
||||
55
.github/workflows/check-and-lint.yaml
vendored
55
.github/workflows/check-and-lint.yaml
vendored
@@ -1,55 +0,0 @@
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
|
||||
name: Check and Lint
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: check
|
||||
|
||||
fmt:
|
||||
name: Rustfmt
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- run: rustup component add rustfmt
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
clippy:
|
||||
name: Clippy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
components: clippy
|
||||
override: true
|
||||
- uses: actions-rs/clippy-check@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
args: --all-features
|
||||
name: Clippy Output
|
||||
21
.github/workflows/check_config_creation_if_not_exists.yml
vendored
Normal file
21
.github/workflows/check_config_creation_if_not_exists.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Check config file creation if not exists
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
|
||||
jobs:
|
||||
TestConfig:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
CONFIG_PATH=~/.config/topgrade.toml;
|
||||
if [ -f "$CONFIG_PATH" ]; then rm $CONFIG_PATH; fi
|
||||
cargo build;
|
||||
TOPGRADE_SKIP_BRKC_NOTIFY=true ./target/debug/topgrade --dry-run --only system;
|
||||
stat $CONFIG_PATH;
|
||||
22
.github/workflows/check_i18n.yml
vendored
Normal file
22
.github/workflows/check_i18n.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
name: Check i18n
|
||||
|
||||
jobs:
|
||||
check_locale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install checker
|
||||
# Build it with the dev profile as this is faster and the checker still works
|
||||
run: |
|
||||
cargo install --git https://github.com/topgrade-rs/topgrade_i18n_locale_checker --profile dev
|
||||
|
||||
- name: Run the checker
|
||||
run: topgrade_i18n_locale_checker --locale-file ./locales/app.yml --rust-src-to-check ./src
|
||||
32
.github/workflows/check_security_vulnerability.yml
vendored
Normal file
32
.github/workflows/check_security_vulnerability.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
# separate terms of service, privacy policy, and support
|
||||
# documentation.
|
||||
|
||||
name: Check Security Vulnerability
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: DevSkim
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run DevSkim scanner
|
||||
uses: microsoft/DevSkim-Action@v1
|
||||
|
||||
- name: Upload DevSkim scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: devskim-results.sarif
|
||||
@@ -2,16 +2,18 @@ on:
|
||||
release:
|
||||
types: [published, edited]
|
||||
|
||||
name: Check SemVer compliance and publish on release
|
||||
name: Check SemVer compliance
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2022-08-03
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
|
||||
semver:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -23,12 +25,3 @@ jobs:
|
||||
- run: eval "current_version=$(grep -e '^version = .*$' Cargo.toml | cut -d ' ' -f 3)"
|
||||
- run: cargo semver | tee semver_out
|
||||
- run: (head -n 1 semver_out | grep "\-> $current_version") || (echo "versioning mismatch" && return 1)
|
||||
|
||||
publish:
|
||||
steps:
|
||||
- uses: katyo/publish-crates@v1
|
||||
with:
|
||||
dry-run: true
|
||||
check-repo: ${{ github.event_name == 'push' }}
|
||||
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
ignore-unpublished-changes: true
|
||||
87
.github/workflows/ci.yml
vendored
Normal file
87
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
name: CI
|
||||
|
||||
env:
|
||||
CROSS_VER: '0.2.5'
|
||||
CARGO_NET_RETRY: 3
|
||||
|
||||
jobs:
|
||||
fmt:
|
||||
name: Rustfmt
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run cargo fmt
|
||||
env:
|
||||
TERM: xterm-256color
|
||||
run: |
|
||||
cargo fmt --all -- --check
|
||||
|
||||
main:
|
||||
needs: fmt
|
||||
name: ${{ matrix.target_name }} (check, clippy)
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- target: x86_64-linux-android
|
||||
target_name: Android
|
||||
use_cross: true
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-unknown-freebsd
|
||||
target_name: FreeBSD
|
||||
use_cross: true
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
target_name: Linux
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-apple-darwin
|
||||
target_name: macOS-x86_64
|
||||
os: macos-13
|
||||
|
||||
- target: aarch64-apple-darwin
|
||||
target_name: macOS-aarch64
|
||||
os: macos-latest
|
||||
|
||||
- target: x86_64-unknown-netbsd
|
||||
target_name: NetBSD
|
||||
use_cross: true
|
||||
os: ubuntu-latest
|
||||
|
||||
- target: x86_64-pc-windows-msvc
|
||||
target_name: Windows
|
||||
os: windows-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Rust Cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
prefix-key: ${{ matrix.target }}
|
||||
|
||||
- name: Setup cross
|
||||
if: matrix.use_cross == true
|
||||
run: curl -fL --retry 3 https://github.com/cross-rs/cross/releases/download/v${{ env.CROSS_VER }}/cross-x86_64-unknown-linux-musl.tar.gz | tar vxz -C /usr/local/bin
|
||||
|
||||
- name: Run cargo/cross check
|
||||
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} check --locked --target ${{ matrix.target }}
|
||||
|
||||
- name: Run cargo/cross clippy
|
||||
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} clippy --locked --target ${{ matrix.target }} --all-features -- -D warnings
|
||||
|
||||
- name: Run cargo test
|
||||
# ONLY run test with cargo
|
||||
if: matrix.use_cross == false
|
||||
run: cargo test --locked --target ${{ matrix.target }}
|
||||
88
.github/workflows/create_release_assets.yml
vendored
Normal file
88
.github/workflows/create_release_assets.yml
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
name: Publish release files for CD native environments
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [ ubuntu-latest, macos-latest, macos-13, windows-latest ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
if: ${{ matrix.platform == 'ubuntu-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Check format
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
- name: Run clippy
|
||||
run: cargo clippy --all-targets --locked -- -D warnings
|
||||
|
||||
- name: Run clippy (All features)
|
||||
run: cargo clippy --all-targets --locked --all-features -- -D warnings
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test
|
||||
|
||||
- name: Build in Release profile with all features enabled
|
||||
run: cargo build --release --all-features
|
||||
|
||||
- name: Rename Release (Unix)
|
||||
run: |
|
||||
cargo install default-target
|
||||
mkdir -p assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
||||
mv target/release/topgrade assets
|
||||
cd assets
|
||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||
rm topgrade
|
||||
ls .
|
||||
if: ${{ matrix.platform != 'windows-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Build Debian-based system binary and create package
|
||||
# First remove the binary built by previous steps
|
||||
# because we don't want the auto-update feature,
|
||||
# then build the new binary without auto-updating.
|
||||
run: |
|
||||
rm -rf target/release
|
||||
cargo build --release
|
||||
cargo deb --no-build --no-strip
|
||||
if: ${{ matrix.platform == 'ubuntu-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Move Debian-based system package
|
||||
run: |
|
||||
mkdir -p assets
|
||||
mv target/debian/*.deb assets
|
||||
if: ${{ matrix.platform == 'ubuntu-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Rename Release (Windows)
|
||||
run: |
|
||||
cargo install default-target
|
||||
mkdir assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
||||
mv target/release/topgrade.exe assets/topgrade.exe
|
||||
cd assets
|
||||
powershell Compress-Archive -Path * -Destination ${FILENAME}.zip
|
||||
rm topgrade.exe
|
||||
ls .
|
||||
if: ${{ matrix.platform == 'windows-latest' }}
|
||||
shell: bash
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: assets/*
|
||||
91
.github/workflows/create_release_assets_cross.yml
vendored
Normal file
91
.github/workflows/create_release_assets_cross.yml
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
name: Publish release files for non-cd-native environments
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target: [
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"x86_64-unknown-freebsd",
|
||||
]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cargo-deb cross compilation dependencies
|
||||
run: sudo apt-get install libc6-arm64-cross libgcc-s1-arm64-cross
|
||||
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
|
||||
- name: install targets
|
||||
run: rustup target add ${{ matrix.target }}
|
||||
|
||||
- name: install cross
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cross@0.2.5
|
||||
|
||||
- name: Check format
|
||||
run: cross fmt --all -- --check
|
||||
|
||||
- name: Run clippy
|
||||
run: cross clippy --all-targets --locked --target ${{matrix.target}} -- -D warnings
|
||||
|
||||
- name: Run clippy (All features)
|
||||
run: cross clippy --locked --all-features --target ${{matrix.target}} -- -D warnings
|
||||
|
||||
- name: Run tests
|
||||
run: cross test --target ${{matrix.target}}
|
||||
|
||||
- name: Build in Release profile with all features enabled
|
||||
run: cross build --release --all-features --target ${{matrix.target}}
|
||||
|
||||
- name: Rename Release
|
||||
run: |
|
||||
mkdir -p assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-${{matrix.target}}
|
||||
mv target/${{matrix.target}}/release/topgrade assets
|
||||
cd assets
|
||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||
rm topgrade
|
||||
ls .
|
||||
|
||||
- name: Build Debian-based system package without autoupdate feature
|
||||
# First remove the binary built by previous steps
|
||||
# because we don't want the auto-update feature,
|
||||
# then build the new binary without auto-updating.
|
||||
run: |
|
||||
rm -rf target/${{matrix.target}}
|
||||
cross build --release --target ${{matrix.target}}
|
||||
cargo deb --target=${{matrix.target}} --no-build --no-strip
|
||||
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
|
||||
- name: Move Debian-based system package
|
||||
run: |
|
||||
mkdir -p assets
|
||||
mv target/${{matrix.target}}/debian/*.deb assets
|
||||
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' }}
|
||||
shell: bash
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: assets/*
|
||||
28
.github/workflows/release-packaging.yaml
vendored
28
.github/workflows/release-packaging.yaml
vendored
@@ -1,28 +0,0 @@
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
name: Release Packaging
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Release Packaging
|
||||
env:
|
||||
PROJECT_NAME_UNDERSCORE: topgrade-rs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Release Build
|
||||
run: cargo build --release
|
||||
- name: 'Upload Artifact'
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.PROJECT_NAME_UNDERSCORE }}
|
||||
path: target/release/${{ env.PROJECT_NAME_UNDERSCORE }}
|
||||
31
.github/workflows/release_to_aur.yml
vendored
Normal file
31
.github/workflows/release_to_aur.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Publish to AUR
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
jobs:
|
||||
aur-publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Publish source AUR package
|
||||
uses: aksh1618/update-aur-package@v1.0.5
|
||||
with:
|
||||
tag_version_prefix: v
|
||||
package_name: topgrade
|
||||
commit_username: "Thomas Schönauer"
|
||||
commit_email: t.schoenauer@hgs-wt.at
|
||||
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
|
||||
- name: Publish binary AUR package
|
||||
uses: aksh1618/update-aur-package@v1.0.5
|
||||
with:
|
||||
tag_version_prefix: v
|
||||
package_name: topgrade-bin
|
||||
commit_username: "Thomas Schönauer"
|
||||
commit_email: t.schoenauer@hgs-wt.at
|
||||
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
|
||||
29
.github/workflows/release_to_crates_io.yml
vendored
Normal file
29
.github/workflows/release_to_crates_io.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
name: Publish to crates.io on release
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: katyo/publish-crates@v2
|
||||
with:
|
||||
dry-run: true
|
||||
check-repo: ${{ github.event_name == 'push' }}
|
||||
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
ignore-unpublished-changes: true
|
||||
39
.github/workflows/release_to_homebrew.yml
vendored
Normal file
39
.github/workflows/release_to_homebrew.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: Publish to Homebrew
|
||||
|
||||
on:
|
||||
# workflow_run:
|
||||
# workflows: ["Check SemVer compliance"]
|
||||
# types:
|
||||
# - completed
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
jobs:
|
||||
homebrew-publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up Homebrew
|
||||
id: set-up-homebrew
|
||||
uses: Homebrew/actions/setup-homebrew@master
|
||||
- name: Cache Homebrew Bundler RubyGems
|
||||
id: cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ steps.set-up-homebrew.outputs.gems-path }}
|
||||
key: ${{ runner.os }}-rubygems-${{ steps.set-up-homebrew.outputs.gems-hash }}
|
||||
restore-keys: ${{ runner.os }}-rubygems-
|
||||
|
||||
- name: Install Homebrew Bundler RubyGems
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
run: brew install-bundler-gems
|
||||
- name: Bump formulae
|
||||
uses: Homebrew/actions/bump-packages@master
|
||||
continue-on-error: true
|
||||
with:
|
||||
# Custom GitHub access token with only the 'public_repo' scope enabled
|
||||
token: ${{secrets.HOMEBREW_ACCESS_TOKEN}}
|
||||
# Bump only these formulae if outdated
|
||||
formulae: |
|
||||
topgrade
|
||||
99
.github/workflows/release_to_pypi.yml
vendored
Normal file
99
.github/workflows/release_to_pypi.yml
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
name: Update PyPi
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x86_64, x86, aarch64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist
|
||||
sccache: 'true'
|
||||
manylinux: auto
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
windows:
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x64, x86]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist
|
||||
sccache: 'true'
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
macos:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x86_64, aarch64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build wheels
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist
|
||||
sccache: 'true'
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build sdist
|
||||
uses: PyO3/maturin-action@v1
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: Upload sdist
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
path: dist
|
||||
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
if: "startsWith(github.ref, 'refs/tags/')"
|
||||
needs: [linux, windows, macos, sdist]
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
- name: Publish to PyPI
|
||||
uses: PyO3/maturin-action@v1
|
||||
env:
|
||||
MATURIN_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
|
||||
with:
|
||||
command: upload
|
||||
args: --skip-existing *
|
||||
13
.github/workflows/release_to_winget.yml
vendored
Normal file
13
.github/workflows/release_to_winget.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: Publish to WinGet
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: vedantmgoyal2009/winget-releaser@main
|
||||
with:
|
||||
identifier: topgrade-rs.topgrade
|
||||
max-versions-to-keep: 5 # keep only latest 5 versions
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
59
.github/workflows/release_win_osx.yml
vendored
59
.github/workflows/release_win_osx.yml
vendored
@@ -1,59 +0,0 @@
|
||||
name: CD Win/MacOS Native
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [ created ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [ macos-latest, windows-latest ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Run tests
|
||||
with:
|
||||
command: test
|
||||
- uses: actions-rs/cargo@v1.0.1
|
||||
name: Build
|
||||
with:
|
||||
command: build
|
||||
args: --release --all-features
|
||||
- name: Rename Release (Unix)
|
||||
run: |
|
||||
cargo install default-target
|
||||
mkdir assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
||||
mv target/release/topgrade assets
|
||||
cd assets
|
||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||
rm topgrade
|
||||
ls .
|
||||
if: ${{ matrix.platform != 'windows-latest' }}
|
||||
shell: bash
|
||||
- name: Rename Release (Windows)
|
||||
run: |
|
||||
cargo install default-target
|
||||
mkdir assets
|
||||
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
||||
mv target/release/topgrade.exe assets/topgrade.exe
|
||||
cd assets
|
||||
powershell Compress-Archive -Path * -Destination ${FILENAME}.zip
|
||||
rm topgrade.exe
|
||||
ls .
|
||||
if: ${{ matrix.platform == 'windows-latest' }}
|
||||
shell: bash
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: assets/*
|
||||
24
.github/workflows/rust-ubuntu.yml
vendored
24
.github/workflows/rust-ubuntu.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Rust
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master", "dev" ]
|
||||
pull_request:
|
||||
branches: [ "master", "dev" ]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build
|
||||
run: cargo build --verbose
|
||||
- name: Fmt
|
||||
run: cargo fmt --check --all
|
||||
- name: Run tests
|
||||
run: cargo test --verbose
|
||||
57
.github/workflows/test.yaml
vendored
57
.github/workflows/test.yaml
vendored
@@ -1,57 +0,0 @@
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
name: Test with Code Coverage
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test
|
||||
env:
|
||||
PROJECT_NAME_UNDERSCORE: topgrade
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUSTFLAGS: -Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort
|
||||
RUSTDOCFLAGS: -Cpanic=abort
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@v2
|
||||
env:
|
||||
cache-name: cache-dependencies
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/.crates.toml
|
||||
~/.cargo/.crates2.json
|
||||
~/.cargo/bin
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
target
|
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('Cargo.lock') }}
|
||||
- name: Generate test result and coverage report
|
||||
run: |
|
||||
cargo install cargo2junit grcov;
|
||||
cargo test $CARGO_OPTIONS -- -Z unstable-options --format json | cargo2junit > results.xml;
|
||||
zip -0 ccov.zip `find . \( -name "$PROJECT_NAME_UNDERSCORE*.gc*" \) -print`;
|
||||
grcov ccov.zip -s . -t lcov --llvm --ignore-not-existing --ignore "/*" --ignore "tests/*" -o lcov.info;
|
||||
- name: Upload test results
|
||||
uses: EnricoMi/publish-unit-test-result-action@v1
|
||||
with:
|
||||
check_name: Test Results
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: results.xml
|
||||
- name: Upload to CodeCov
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
# required for private repositories:
|
||||
# token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./lcov.info
|
||||
fail_ci_if_error: true
|
||||
18
.gitignore
vendored
18
.gitignore
vendored
@@ -1,4 +1,20 @@
|
||||
# JetBrains IDEs
|
||||
.idea/
|
||||
|
||||
/target
|
||||
# Visual Studio
|
||||
.vs/
|
||||
|
||||
# Visual Studio Code
|
||||
.vscode/
|
||||
|
||||
# Generic build outputs
|
||||
/build
|
||||
|
||||
# Specific for some languages like Rust
|
||||
/target
|
||||
|
||||
# LLVM profiling output
|
||||
*.profraw
|
||||
|
||||
# Backup files for any .rs files in the project
|
||||
**/*.rs.bk
|
||||
|
||||
38
.vscode/launch.json
vendored
38
.vscode/launch.json
vendored
@@ -1,38 +0,0 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Topgrade",
|
||||
"console": "integratedTerminal",
|
||||
"cargo": {
|
||||
"args": [
|
||||
"build",
|
||||
"--bin=topgrade",
|
||||
"--package=topgrade"
|
||||
],
|
||||
"filter": {
|
||||
"name": "topgrade",
|
||||
"kind": "bin"
|
||||
}
|
||||
},
|
||||
"args": [
|
||||
"--only",
|
||||
"${input:step}",
|
||||
"-v"
|
||||
],
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
],
|
||||
"inputs": [
|
||||
{
|
||||
"type": "promptString",
|
||||
"id": "step",
|
||||
"description": "step name",
|
||||
}
|
||||
]
|
||||
}
|
||||
14
.vscode/tasks.json
vendored
14
.vscode/tasks.json
vendored
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"type": "cargo",
|
||||
"command": "clippy",
|
||||
"problemMatcher": [
|
||||
"$rustc"
|
||||
],
|
||||
"group": "test",
|
||||
"label": "rust: cargo clippy"
|
||||
}
|
||||
]
|
||||
}
|
||||
50
.vscode/topgrade.code-snippets
vendored
50
.vscode/topgrade.code-snippets
vendored
@@ -1,50 +0,0 @@
|
||||
{
|
||||
// Place your topgrade workspace snippets here. Each snippet is defined under a snippet name and has a scope, prefix, body and
|
||||
// description. Add comma separated ids of the languages where the snippet is applicable in the scope field. If scope
|
||||
// is left empty or omitted, the snippet gets applied to all languages. The prefix is what is
|
||||
// used to trigger the snippet and the body will be expanded and inserted. Possible variables are:
|
||||
// $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders.
|
||||
// Placeholders with the same ids are connected.
|
||||
// Example:
|
||||
// "Print to console": {
|
||||
// "scope": "javascript,typescript",
|
||||
// "prefix": "log",
|
||||
// "body": [
|
||||
// "console.log('$1');",
|
||||
// "$2"
|
||||
// ],
|
||||
// "description": "Log output to console"
|
||||
// }
|
||||
"Skip Step": {
|
||||
"scope": "rust",
|
||||
"prefix": "skipstep",
|
||||
"body": [
|
||||
"return Err(SkipStep(format!(\"$1\")).into());"
|
||||
]
|
||||
},
|
||||
"Step": {
|
||||
"scope": "rust",
|
||||
"prefix": "step",
|
||||
"body": [
|
||||
"pub fn $1(ctx: &ExecutionContext) -> Result<()> {",
|
||||
" $0",
|
||||
" Ok(())",
|
||||
"}"
|
||||
]
|
||||
},
|
||||
"Require Binary": {
|
||||
"scope": "rust",
|
||||
"prefix": "req",
|
||||
"description": "Require a binary to be installed",
|
||||
"body": [
|
||||
"let ${1:binary} = require(\"${1:binary}\")?;"
|
||||
]
|
||||
},
|
||||
"macos": {
|
||||
"scope": "rust",
|
||||
"prefix": "macos",
|
||||
"body": [
|
||||
"#[cfg(target_os = \"macos\")]"
|
||||
]
|
||||
}
|
||||
}
|
||||
0
BREAKINGCHANGES.md
Normal file
0
BREAKINGCHANGES.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,128 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
open an issue on GitHub .
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
170
CONTRIBUTING.md
Normal file
170
CONTRIBUTING.md
Normal file
@@ -0,0 +1,170 @@
|
||||
## Contributing to `topgrade`
|
||||
|
||||
Thank you for your interest in contributing to `topgrade`!
|
||||
We welcome and encourage contributions of all kinds, such as:
|
||||
|
||||
1. Issue reports or feature requests
|
||||
2. Documentation improvements
|
||||
3. Code (PR or PR Review)
|
||||
|
||||
Please follow the [Karma Runner guidelines](http://karma-runner.github.io/6.2/dev/git-commit-msg.html)
|
||||
for commit messages.
|
||||
|
||||
## Adding a new `step`
|
||||
|
||||
In `topgrade`'s term, package manager is called `step`.
|
||||
To add a new `step` to `topgrade`:
|
||||
|
||||
1. Add a new variant to
|
||||
[`enum Step`](https://github.com/topgrade-rs/topgrade/blob/cb7adc8ced8a77addf2cb051d18bba9f202ab866/src/config.rs#L100)
|
||||
|
||||
```rust
|
||||
pub enum Step {
|
||||
// Existed steps
|
||||
// ...
|
||||
|
||||
// Your new step here!
|
||||
// You may want it to be sorted alphabetically because that looks great:)
|
||||
Xxx,
|
||||
}
|
||||
```
|
||||
|
||||
2. Implement the update function
|
||||
|
||||
You need to find the appropriate location where this update function goes, it should be
|
||||
a file under [`src/steps`](https://github.com/topgrade-rs/topgrade/tree/master/src/steps),
|
||||
the file names are self-explanatory, for example, `step`s related to `zsh` are
|
||||
placed in [`steps/zsh.rs`](https://github.com/topgrade-rs/topgrade/blob/master/src/steps/zsh.rs).
|
||||
|
||||
Then you implement the update function, and put it in the file where it belongs.
|
||||
|
||||
```rust
|
||||
pub fn run_xxx(ctx: &ExecutionContext) -> Result<()> {
|
||||
// Check if this step is installed, if not, then this update will be skipped.
|
||||
let xxx = require("xxx")?;
|
||||
|
||||
// Print the separator
|
||||
print_separator("xxx");
|
||||
|
||||
// Invoke the new step to get things updated!
|
||||
ctx.run_type()
|
||||
.execute(xxx)
|
||||
.arg(/* args required by this step */)
|
||||
.status_checked()
|
||||
}
|
||||
```
|
||||
|
||||
Such a update function would be conventionally named `run_xxx()`, where `xxx`
|
||||
is the name of the new step, and it should take a argument of type
|
||||
`&ExecutionContext`, this is adequate for most cases unless some extra stuff is
|
||||
needed (You can find some examples where extra arguments are needed
|
||||
[here](https://github.com/topgrade-rs/topgrade/blob/7e48c5dedcfd5d0124bb9f39079a03e27ed23886/src/main.rs#L201-L219)).
|
||||
|
||||
Update function would usually do 3 things:
|
||||
1. Check if the step is installed
|
||||
2. Output the Separator
|
||||
3. Invoke the step
|
||||
|
||||
Still, this is sufficient for most tools, but you may need some extra stuff
|
||||
with complicated `step`.
|
||||
|
||||
3. Finally, invoke that update function in `main.rs`
|
||||
|
||||
```rust
|
||||
runner.execute(Step::Xxx, "xxx", || ItsModule::run_xxx(&ctx))?;
|
||||
```
|
||||
|
||||
We use [conditional compilation](https://doc.rust-lang.org/reference/conditional-compilation.html)
|
||||
to separate the steps, for example, for steps that are Linux-only, it goes
|
||||
like this:
|
||||
|
||||
```
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// Xxx is Linux-only
|
||||
runner.execute(Step::Xxx, "xxx", || ItsModule::run_xxx(&ctx))?;
|
||||
}
|
||||
```
|
||||
|
||||
Congrats, you just added a new `step`:)
|
||||
|
||||
## Modification to the configuration entries
|
||||
|
||||
If your PR has the configuration options
|
||||
(in [`src/config.rs`](https://github.com/topgrade-rs/topgrade/blob/master/src/config.rs))
|
||||
modified:
|
||||
|
||||
1. Adding new options
|
||||
2. Changing the existing options
|
||||
|
||||
Be sure to apply your changes to
|
||||
[`config.example.toml`](https://github.com/topgrade-rs/topgrade/blob/master/config.example.toml),
|
||||
and have some basic documentations guiding user how to use these options.
|
||||
|
||||
## Breaking changes
|
||||
|
||||
If your PR introduces a breaking change, document it in [`BREAKINGCHANGES_dev.md`][bc_dev],
|
||||
it should be written in Markdown and wrapped at 80, for example:
|
||||
|
||||
```md
|
||||
1. The configuration location has been updated to x.
|
||||
|
||||
2. The step x has been removed.
|
||||
|
||||
3. ...
|
||||
```
|
||||
|
||||
[bc_dev]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES_dev.md
|
||||
|
||||
## Before you submit your PR
|
||||
|
||||
Make sure your patch passes the following tests on your host:
|
||||
|
||||
```shell
|
||||
$ cargo build
|
||||
$ cargo fmt
|
||||
$ cargo clippy
|
||||
$ cargo test
|
||||
```
|
||||
|
||||
Don't worry about other platforms, we have most of them covered in our CI.
|
||||
|
||||
## I18n
|
||||
|
||||
If your PR introduces user-facing messages, we need to ensure they are translated.
|
||||
Please add the translations to [`locales/app.yml`][app_yml]. For simple messages
|
||||
without arguments (e.g., "hello world"), we can simply translate them according
|
||||
(Tip: ChatGPT or similar LLMs is good at translation). If a message contains
|
||||
arguments, e.g., "hello <NAME>", please follow this convention:
|
||||
|
||||
```yml
|
||||
"hello {name}": # key
|
||||
en: "hello %{name}" # translation
|
||||
```
|
||||
|
||||
Arguments in the key should be in format `{argument_name}`, and they will have
|
||||
a preceeding `%` when used in translations.
|
||||
|
||||
[app_yml]: https://github.com/topgrade-rs/topgrade/blob/main/locales/app.yml
|
||||
|
||||
## Some tips
|
||||
|
||||
1. Locale
|
||||
|
||||
Some `step` respects locale, which means their output can be in language other
|
||||
than English, we should not do check on it.
|
||||
|
||||
For example, one may want to check if a tool works by doing this:
|
||||
|
||||
```rust
|
||||
let output = Command::new("xxx").arg("--help").output().unwrap();
|
||||
let stdout = from_utf8(output.stdout).expect("Assume it is UTF-8 encoded");
|
||||
|
||||
if stdout.contains("help") {
|
||||
// xxx works
|
||||
}
|
||||
```
|
||||
|
||||
If `xxx` respects locale, then the above code should work on English system,
|
||||
on a system that does not use English, e.g., it uses Chinese, that `"help"` may be
|
||||
translated to `"帮助"`, and the above code won't work.
|
||||
2983
Cargo.lock
generated
2983
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
103
Cargo.toml
103
Cargo.toml
@@ -1,13 +1,14 @@
|
||||
[package]
|
||||
name = "topgrade-rs"
|
||||
description = "Upgrade all the things, successor of topgrade"
|
||||
name = "topgrade"
|
||||
description = "Upgrade all the things"
|
||||
categories = ["os"]
|
||||
keywords = ["upgrade", "update"]
|
||||
license-file = "LICENSE"
|
||||
license = "GPL-3.0"
|
||||
repository = "https://github.com/topgrade-rs/topgrade"
|
||||
version = "10.0.1"
|
||||
rust-version = "1.76.0"
|
||||
version = "16.0.2"
|
||||
authors = ["Roey Darwish Dror <roey.ghost@gmail.com>", "Thomas Schönauer <t.schoenauer@hgs-wt.at>"]
|
||||
exclude = ["doc/screenshot.gif"]
|
||||
exclude = ["doc/screenshot.gif", "BREAKINGCHANGES_dev.md"]
|
||||
edition = "2021"
|
||||
|
||||
readme = "README.md"
|
||||
@@ -16,44 +17,70 @@ readme = "README.md"
|
||||
name = "topgrade"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
home = "0.5"
|
||||
directories = "4.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
toml = "0.5"
|
||||
which_crate = { version = "4.1", package = "which" }
|
||||
shellexpand = "2.1"
|
||||
clap = { version = "3.1", features = ["cargo", "derive"] }
|
||||
log = "0.4"
|
||||
walkdir = "2.3"
|
||||
console = "0.15"
|
||||
lazy_static = "1.4"
|
||||
chrono = "0.4"
|
||||
pretty_env_logger = "0.4"
|
||||
glob = "0.3"
|
||||
strum = { version = "0.24", features = ["derive"] }
|
||||
thiserror = "1.0"
|
||||
anyhow = "1.0"
|
||||
tempfile = "3.2"
|
||||
cfg-if = "1.0"
|
||||
tokio = { version = "1.5", features = ["process", "rt-multi-thread"] }
|
||||
futures = "0.3"
|
||||
regex = "1.5"
|
||||
sys-info = "0.9"
|
||||
semver = "1.0"
|
||||
##[lib]
|
||||
##name = "topgrade_lib"
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
notify-rust = "4.5"
|
||||
[dependencies]
|
||||
home = "~0.5"
|
||||
etcetera = "~0.8"
|
||||
once_cell = "~1.19"
|
||||
serde = { version = "~1.0", features = ["derive"] }
|
||||
toml = "0.8"
|
||||
which_crate = { version = "~6.0", package = "which" }
|
||||
shellexpand = "~3.1"
|
||||
clap = { version = "~4.5", features = ["cargo", "derive"] }
|
||||
clap_complete = "~4.5"
|
||||
clap_mangen = "~0.2"
|
||||
walkdir = "~2.5"
|
||||
console = "~0.15"
|
||||
lazy_static = "~1.4"
|
||||
chrono = "~0.4"
|
||||
glob = "~0.3"
|
||||
strum = { version = "~0.26", features = ["derive"] }
|
||||
thiserror = "~1.0"
|
||||
tempfile = "~3.10"
|
||||
cfg-if = "~1.0"
|
||||
tokio = { version = "~1.38", features = ["process", "rt-multi-thread"] }
|
||||
futures = "~0.3"
|
||||
regex = "~1.10"
|
||||
semver = "~1.0"
|
||||
shell-words = "~1.1"
|
||||
color-eyre = "~0.6"
|
||||
tracing = { version = "~0.1", features = ["attributes", "log"] }
|
||||
tracing-subscriber = { version = "~0.3", features = ["env-filter", "time"] }
|
||||
merge = "~0.1"
|
||||
regex-split = "~0.1"
|
||||
notify-rust = "~4.11"
|
||||
wildmatch = "2.3.0"
|
||||
rust-i18n = "3.0.1"
|
||||
sys-locale = "0.3.1"
|
||||
|
||||
[package.metadata.generate-rpm]
|
||||
assets = [{ source = "target/release/topgrade", dest = "/usr/bin/topgrade" }]
|
||||
|
||||
[package.metadata.generate-rpm.requires]
|
||||
git = "*"
|
||||
|
||||
[package.metadata.deb]
|
||||
name = "topgrade"
|
||||
maintainer = "Chris Gelatt <kreeblah@gmail.com>"
|
||||
copyright = "2024, Topgrade Team"
|
||||
license-file = ["LICENSE", "0"]
|
||||
depends = "$auto"
|
||||
extended-description = "Keeping your system up to date usually involves invoking multiple package managers. This results in big, non-portable shell one-liners saved in your shell. To remedy this, Topgrade detects which tools you use and runs the appropriate commands to update them."
|
||||
section = "utils"
|
||||
priority = "optional"
|
||||
default-features = true
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
nix = "0.24"
|
||||
rust-ini = "0.18"
|
||||
self_update_crate = { version = "0.30", default-features = false, optional = true, package = "self_update", features = ["archive-tar", "compression-flate2", "rustls"] }
|
||||
nix = { version = "~0.29", features = ["hostname", "signal", "user"] }
|
||||
rust-ini = "~0.21"
|
||||
self_update_crate = { version = "~0.40", default-features = false, optional = true, package = "self_update", features = ["archive-tar", "compression-flate2", "rustls"] }
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
self_update_crate = { version = "0.30", default-features = false, optional = true, package = "self_update", features = ["archive-zip", "compression-zip-deflate", "rustls"] }
|
||||
winapi = "0.3"
|
||||
parselnk = "0.1"
|
||||
self_update_crate = { version = "~0.40", default-features = false, optional = true, package = "self_update", features = ["archive-zip", "compression-zip-deflate", "rustls"] }
|
||||
winapi = "~0.3"
|
||||
parselnk = "~0.1"
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
|
||||
126
README.md
126
README.md
@@ -1,59 +1,111 @@
|
||||

|
||||
<!---
|
||||

|
||||
[](https://crates.io/crates/topgrade)
|
||||
[](https://aur.archlinux.org/packages/topgrade/)
|
||||
 -->
|
||||
--->
|
||||
<div align="center">
|
||||
<h1>
|
||||
<img alt="Topgrade" src="doc/topgrade_transparent.png" width="850px">
|
||||
</h1>
|
||||
|
||||
<a href="https://github.com/topgrade-rs/topgrade/releases"><img alt="GitHub Release" src="https://img.shields.io/github/release/topgrade-rs/topgrade.svg"></a>
|
||||
<a href="https://crates.io/crates/topgrade"><img alt="crates.io" src="https://img.shields.io/crates/v/topgrade.svg"></a>
|
||||
<a href="https://aur.archlinux.org/packages/topgrade"><img alt="AUR" src="https://img.shields.io/aur/version/topgrade.svg"></a>
|
||||
<a href="https://formulae.brew.sh/formula/topgrade"><img alt="Homebrew" src="https://img.shields.io/homebrew/v/topgrade.svg"></a>
|
||||
|
||||

|
||||
<img alt="Demo" src="doc/topgrade_demo.gif">
|
||||
</div>
|
||||
|
||||
## Fork
|
||||
This is a fork of [topgrade by r-darwish](https://github.com/r-darwish/topgrade) to keep it maintained.
|
||||
|
||||
## Introduction
|
||||
|
||||
> **Note**
|
||||
> This is a fork of [topgrade by r-darwish](https://github.com/r-darwish/topgrade) to keep it maintained.
|
||||
|
||||
Keeping your system up to date usually involves invoking multiple package managers.
|
||||
This results in big, non-portable shell one-liners saved in your shell.
|
||||
To remedy this, _topgrade_ detects which tools you use and runs the appropriate commands to update them.
|
||||
To remedy this, **Topgrade** detects which tools you use and runs the appropriate commands to update them.
|
||||
|
||||
## Installation
|
||||
- Arch Linux: [AUR](https://aur.archlinux.org/packages/topgrade/) package.
|
||||
- NixOS: _topgrade_ package in `nixpkgs`.
|
||||
- macOS: [Homebrew](https://brew.sh/) or [MacPorts](https://www.macports.org/install.php).
|
||||
|
||||
Other systems users can either use `cargo install` or use the compiled binaries from the release page.
|
||||
[](https://repology.org/project/topgrade/versions)
|
||||
|
||||
- Arch Linux: [AUR](https://aur.archlinux.org/packages/topgrade)
|
||||
- NixOS: [Nixpkgs](https://search.nixos.org/packages?show=topgrade)
|
||||
- Void Linux: [XBPS](https://voidlinux.org/packages/?arch=x86_64&q=topgrade)
|
||||
- macOS: [Homebrew](https://formulae.brew.sh/formula/topgrade) or [MacPorts](https://ports.macports.org/port/topgrade/)
|
||||
- Windows: [Chocolatey][choco], [Scoop][scoop] or [Winget][winget]
|
||||
- PyPi: [pip](https://pypi.org/project/topgrade/)
|
||||
|
||||
[choco]: https://community.chocolatey.org/packages/topgrade
|
||||
[scoop]: https://scoop.sh/#/apps?q=topgrade
|
||||
[winget]: https://winstall.app/apps/topgrade-rs.topgrade
|
||||
|
||||
Other systems users can either use `cargo install` or the compiled binaries from the release page.
|
||||
The compiled binaries contain a self-upgrading feature.
|
||||
|
||||
Topgrade requires Rust 1.51 or above.
|
||||
|
||||
## Documentation[WIP]
|
||||
You can visit the documentation at [topgrade-rs.github.io](https://topgrade-rs.github.io/) .
|
||||
|
||||
## Usage
|
||||
Just run `topgrade`.
|
||||
See [the wiki](https://github.com/r-darwish/topgrade/wiki/Step-list) for the list of things Topgrade supports.
|
||||
|
||||
## Customization
|
||||
Just run `topgrade`.
|
||||
|
||||
## Configuration
|
||||
|
||||
See `config.example.toml` for an example configuration file.
|
||||
|
||||
### Configuration path
|
||||
## Migration and Breaking Changes
|
||||
|
||||
The configuration should be placed in the following paths depending by the operating system:
|
||||
Whenever there is a **breaking change**, the major version number will be bumped,
|
||||
and we will document these changes in the release note, please take a look at
|
||||
it when updated to a major release.
|
||||
|
||||
* **Windows** - `%APPDATA%/topgrade.toml`
|
||||
* **macOS** and **other Unix systems** - `${XDG_CONFIG_HOME:-~/.config}/topgrade.toml`
|
||||
> Got a question? Feel free to open an issue or discussion!
|
||||
|
||||
## Contribution
|
||||
### Problems or missing features?
|
||||
Open a new Issue describing your problem and if possible with a possible solution.
|
||||
### Missing a feature or found an unsupported tool/distro?
|
||||
Just let us now what you are missing by opening an issue.
|
||||
For tools please open an Issue describing the tool, which platforms it supports and if possible, give us an example of its usage.
|
||||
### Want to contribute to the code?
|
||||
Just fork the repository and start coding.
|
||||
### Configuration Path
|
||||
|
||||
#### `CONFIG_DIR` on each platform
|
||||
- **Windows**: `%APPDATA%`
|
||||
- **macOS** and **other Unix systems**: `${XDG_CONFIG_HOME:-~/.config}`
|
||||
|
||||
`topgrade` will look for the configuration file in the following places, in order of priority:
|
||||
|
||||
1. `CONFIG_DIR/topgrade.toml`
|
||||
2. `CONFIG_DIR/topgrade/topgrade.toml`
|
||||
|
||||
If the file with higher priority is present, no matter it is valid or not, the other configuration files will be ignored.
|
||||
|
||||
On the first run(no configuration file exists), `topgrade` will create a configuration file at `CONFIG_DIR/topgrade.toml` for you.
|
||||
|
||||
### Custom Commands
|
||||
|
||||
Custom commands can be defined in the config file which can be run before, during, or after the inbuilt commands, as required.
|
||||
By default, the custom commands are run using a new shell according to the `$SHELL` environment variable on unix (falls back to `sh`) or `pwsh` on windows (falls back to `powershell`).
|
||||
|
||||
On unix, if you want to run your command using an interactive shell, for example to source your shell's rc files, you can add `-i` at the start of your custom command.
|
||||
But note that this requires the command to exit the shell correctly or else the shell will hang indefinitely.
|
||||
|
||||
## Remote Execution
|
||||
|
||||
## Remote execution
|
||||
You can specify a key called `remote_topgrades` in the configuration file.
|
||||
This key should contain a list of hostnames that have topgrade installed on them.
|
||||
This key should contain a list of hostnames that have Topgrade installed on them.
|
||||
Topgrade will use `ssh` to run `topgrade` on remote hosts before acting locally.
|
||||
To limit the execution only to specific hosts use the `--remote-host-limit` parameter.
|
||||
|
||||
## Contribution
|
||||
|
||||
### Problems or missing features?
|
||||
|
||||
Open a new issue describing your problem and if possible provide a solution.
|
||||
|
||||
### Missing a feature or found an unsupported tool/distro?
|
||||
|
||||
Just let us now what you are missing by opening an issue.
|
||||
For tools, please open an issue describing the tool, which platforms it supports and if possible, give us an example of its usage.
|
||||
|
||||
### Want to contribute to the code?
|
||||
|
||||
Just fork the repository and start coding.
|
||||
|
||||
### Contribution Guidelines
|
||||
|
||||
See [CONTRIBUTING.md](https://github.com/topgrade-rs/topgrade/blob/master/CONTRIBUTING.md)
|
||||
|
||||
## Roadmap
|
||||
|
||||
- [ ] Add a proper testing framework to the code base.
|
||||
- [ ] Add unit tests for package managers.
|
||||
- [ ] Split up code into more maintainable parts, eg. putting every linux package manager in a own submodule of linux.rs.
|
||||
|
||||
69
RELEASE_PROCEDURE.md
Normal file
69
RELEASE_PROCEDURE.md
Normal file
@@ -0,0 +1,69 @@
|
||||
> This document lists the steps that lead to a successful release of Topgrade.
|
||||
|
||||
1. Open a PR that:
|
||||
|
||||
> Here is an [Example PR](https://github.com/topgrade-rs/topgrade/pull/652)
|
||||
> that you can refer to.
|
||||
|
||||
1. bumps the version number.
|
||||
|
||||
> If there are breaking changes, the major version number should be increased.
|
||||
|
||||
2. If the major versioin number gets bumped, update [SECURITY.md][SECURITY_file_link].
|
||||
|
||||
[SECURITY_file_link]: https://github.com/topgrade-rs/topgrade/blob/main/SECURITY.md
|
||||
|
||||
3. Overwrite [`BREAKINGCHANGES`][breaking_changes] with
|
||||
[`BREAKINGCHANGES_dev`][breaking_changes_dev], and create a new dev file:
|
||||
|
||||
```sh'
|
||||
$ cd topgrade
|
||||
$ mv BREAKINGCHANGES_dev.md BREAKINGCHANGES.md
|
||||
$ touch BREAKINGCHANGES_dev.md
|
||||
```
|
||||
|
||||
[breaking_changes_dev]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES_dev.md
|
||||
[breaking_changes]: https://github.com/topgrade-rs/topgrade/blob/main/BREAKINGCHANGES.md
|
||||
|
||||
2. Check and merge that PR.
|
||||
|
||||
3. Go to the [release](https://github.com/topgrade-rs/topgrade/releases) page
|
||||
and click the [Draft a new release button](https://github.com/topgrade-rs/topgrade/releases/new)
|
||||
|
||||
4. Write the release notes
|
||||
|
||||
We usually use GitHub's [Automatically generated release notes][auto_gen_release_notes]
|
||||
functionality to generate release notes, but you write your own one instead.
|
||||
|
||||
[auto_gen_release_notes]: https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes
|
||||
|
||||
5. Attaching binaries
|
||||
|
||||
You don't need to do this as our CI will automatically do it for you,
|
||||
binaries for Linux, macOS and Windows will be created and attached.
|
||||
|
||||
And the CI will publish the new binary to:
|
||||
|
||||
1. AUR
|
||||
2. PyPi
|
||||
3. Homebrew (seems that this is not working correctly)
|
||||
4. Winget
|
||||
|
||||
6. Manually release it to Crates.io
|
||||
|
||||
> Yeah, this is unfortunate, our CI won't do this for us. We should probably add one.
|
||||
|
||||
1. `cd` to the Topgrade directory, make sure that it is the latest version
|
||||
(i.e., including the PR that bumps the version number).
|
||||
2. Set up your token with `cargo login`.
|
||||
3. Dry-run the publish `cargo publish --dry-run`.
|
||||
4. If step 3 works, then do the final release `cargo publish`.
|
||||
|
||||
> You can also take a look at the official tutorial [Publishing on crates.io][doc]
|
||||
>
|
||||
> [doc]: https://doc.rust-lang.org/cargo/reference/publishing.html
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
11
SECURITY.md
Normal file
11
SECURITY.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We only support the latest major version and each subversion.
|
||||
|
||||
| Version | Supported |
|
||||
| -------- | ------------------ |
|
||||
| 16.0.x | :white_check_mark: |
|
||||
| < 16.0 | :x: |
|
||||
|
||||
5
clippy.toml
Normal file
5
clippy.toml
Normal file
@@ -0,0 +1,5 @@
|
||||
disallowed-methods = [
|
||||
{ path = "std::process::Command::output", reason = "Use `output_checked[_with][_utf8]`" },
|
||||
{ path = "std::process::Command::spawn", reason = "Use `spawn_checked`" },
|
||||
{ path = "std::process::Command::status", reason = "Use `status_checked`" },
|
||||
]
|
||||
@@ -1,109 +1,292 @@
|
||||
# Don't ask for confirmations
|
||||
#assume_yes = true
|
||||
# Include any additional configuration file(s)
|
||||
# [include] sections are processed in the order you write them
|
||||
# Files in $CONFIG_DIR/topgrade.d/ are automatically included before this file
|
||||
[include]
|
||||
# paths = ["/etc/topgrade.toml"]
|
||||
|
||||
|
||||
[misc]
|
||||
# Run `sudo -v` to cache credentials at the start of the run
|
||||
# This avoids a blocking password prompt in the middle of an unattended run
|
||||
# (default: false)
|
||||
# pre_sudo = false
|
||||
|
||||
# Sudo command to be used
|
||||
# sudo_command = "sudo"
|
||||
|
||||
# Disable specific steps - same options as the command line flag
|
||||
#disable = ["system", "emacs"]
|
||||
# disable = ["system", "emacs"]
|
||||
|
||||
# Ignore failures for these steps
|
||||
#ignore_failures = ["powershell"]
|
||||
|
||||
# Run specific steps - same options as the command line flag
|
||||
#only = ["system", "emacs"]
|
||||
|
||||
# Do not ask to retry failed steps (default: false)
|
||||
#no_retry = true
|
||||
|
||||
# Run inside tmux
|
||||
#run_in_tmux = true
|
||||
# ignore_failures = ["powershell"]
|
||||
|
||||
# List of remote machines with Topgrade installed on them
|
||||
#remote_topgrades = ["toothless", "pi", "parnas"]
|
||||
|
||||
# Arguments to pass SSH when upgrading remote systems
|
||||
#ssh_arguments = "-o ConnectTimeout=2"
|
||||
# remote_topgrades = ["toothless", "pi", "parnas"]
|
||||
|
||||
# Path to Topgrade executable on remote machines
|
||||
#remote_topgrade_path = ".cargo/bin/topgrade"
|
||||
# remote_topgrade_path = ".cargo/bin/topgrade"
|
||||
|
||||
# Arguments to pass to SSH when upgrading remote systems
|
||||
# ssh_arguments = "-o ConnectTimeout=2"
|
||||
|
||||
# Arguments to pass tmux when pulling Repositories
|
||||
#tmux_arguments = "-S /var/tmux.sock"
|
||||
# tmux_arguments = "-S /var/tmux.sock"
|
||||
|
||||
# Do not set the terminal title
|
||||
#set_title = false
|
||||
# Do not set the terminal title (default: true)
|
||||
# set_title = true
|
||||
|
||||
# Display the time in step titles
|
||||
# Display the time in step titles (default: true)
|
||||
# display_time = true
|
||||
|
||||
# Cleanup temporary or old files
|
||||
#cleanup = true
|
||||
# Don't ask for confirmations (no default value)
|
||||
# assume_yes = true
|
||||
|
||||
# Skip sending a notification at the end of a run
|
||||
#skip_notify = true
|
||||
# Do not ask to retry failed steps (default: false)
|
||||
# no_retry = true
|
||||
|
||||
[git]
|
||||
#max_concurrency = 5
|
||||
# Additional git repositories to pull
|
||||
#repos = [
|
||||
# "~/src/*/",
|
||||
# "~/.config/something"
|
||||
#]
|
||||
# Run inside tmux (default: false)
|
||||
# run_in_tmux = true
|
||||
|
||||
# Don't pull the predefined git repos
|
||||
#pull_predefined = false
|
||||
# Changes the way topgrade interacts with
|
||||
# the tmux session, creating the session
|
||||
# and only attaching to it if not inside tmux
|
||||
# (default: "attach_if_not_in_session", allowed values: "attach_if_not_in_session", "attach_always")
|
||||
# tmux_session_mode = "attach_if_not_in_session"
|
||||
|
||||
# Arguments to pass Git when pulling Repositories
|
||||
#arguments = "--rebase --autostash"
|
||||
# Cleanup temporary or old files (default: false)
|
||||
# cleanup = true
|
||||
|
||||
# Send a notification for every step (default: false)
|
||||
# notify_each_step = false
|
||||
|
||||
# Skip sending a notification at the end of a run (default: false)
|
||||
# skip_notify = true
|
||||
|
||||
# The Bash-it branch to update (default: "stable")
|
||||
# bashit_branch = "stable"
|
||||
|
||||
# Run specific steps - same options as the command line flag
|
||||
# only = ["system", "emacs"]
|
||||
|
||||
# Whether to self update
|
||||
#
|
||||
# this will be ignored if the binary is built without self update support
|
||||
#
|
||||
# available also via setting the environment variable TOPGRADE_NO_SELF_UPGRADE)
|
||||
# no_self_update = true
|
||||
|
||||
# Extra tracing filter directives
|
||||
# These are prepended to the `--log-filter` argument
|
||||
# See: https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives
|
||||
# log_filters = ["topgrade::command=debug", "warn"]
|
||||
|
||||
[composer]
|
||||
#self_update = true
|
||||
|
||||
# Commands to run before anything
|
||||
[pre_commands]
|
||||
#"Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
# "Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
|
||||
|
||||
# Commands to run after anything
|
||||
[post_commands]
|
||||
# "Emacs Snapshot" = "rm -rf ~/.emacs.d/elpa.bak && cp -rl ~/.emacs.d/elpa ~/.emacs.d/elpa.bak"
|
||||
|
||||
|
||||
# Custom commands
|
||||
[commands]
|
||||
#"Python Environment" = "~/dev/.env/bin/pip install -i https://pypi.python.org/simple -U --upgrade-strategy eager jupyter"
|
||||
# "Python Environment" = "~/dev/.env/bin/pip install -i https://pypi.python.org/simple -U --upgrade-strategy eager jupyter"
|
||||
# "Custom command using interactive shell (unix)" = "-i vim_upgrade"
|
||||
|
||||
|
||||
[python]
|
||||
# enable_pip_review = true ###disabled by default
|
||||
# enable_pip_review_local = true ###disabled by default
|
||||
# enable_pipupgrade = true ###disabled by default
|
||||
# pipupgrade_arguments = "-y -u --pip-path pip" ###disabled by default
|
||||
|
||||
# For the poetry step, by default, Topgrade skips its update if poetry is not
|
||||
# installed with the official script. This configuration entry forces Topgrade
|
||||
# to run the update in this case.
|
||||
#
|
||||
# (default: false)
|
||||
# poetry_force_self_update = true
|
||||
|
||||
|
||||
[composer]
|
||||
# self_update = true
|
||||
|
||||
|
||||
[brew]
|
||||
#greedy_cask = true
|
||||
#autoremove = true
|
||||
# For the BrewCask step
|
||||
# If `Repo Cask Upgrade` exists, then use the `-a` option.
|
||||
# Otherwise, use the `--greedy` option.
|
||||
# greedy_cask = true
|
||||
|
||||
# For the BrewCask step
|
||||
# If `Repo Cask Upgrade` does not exist, then use the `--greedy_latest` option.
|
||||
# NOTE: the above entry `greedy_cask` contains this entry, though you can enable
|
||||
# both of them, they won't clash with each other.
|
||||
# greedy_latest = true
|
||||
|
||||
# For the BrewCask step
|
||||
# If `Repo Cask Upgrade` does not exist, then use the `--greedy_auto_updates` option.
|
||||
# NOTE: the above entry `greedy_cask` contains this entry, though you can enable
|
||||
# both of them, they won't clash with each other.
|
||||
# greedy_auto_updates = true
|
||||
|
||||
# For the BrewFormula step
|
||||
# Execute `brew autoremove` after the step.
|
||||
# autoremove = true
|
||||
|
||||
# For the BrewFormula step
|
||||
# Upgrade formulae built from the HEAD branch; `brew upgrade --fetch-HEAD`
|
||||
# fetch_head = true
|
||||
|
||||
|
||||
[linux]
|
||||
# Arch Package Manager to use. Allowed values: autodetect, trizen, aura, paru, yay, pikaur, pacman, pamac.
|
||||
#arch_package_manager = "pacman"
|
||||
# Arch Package Manager to use.
|
||||
# Allowed values:
|
||||
# autodetect, aura, garuda_update, pacman, pamac, paru, pikaur, trizen, yay
|
||||
# arch_package_manager = "pacman"
|
||||
|
||||
# Arguments to pass yay (or paru) when updating packages
|
||||
#yay_arguments = "--nodevel"
|
||||
#aura_aur_arguments = "-kx"
|
||||
#aura_pacman_arguments = ""
|
||||
#show_arch_news = true
|
||||
#trizen_arguments = "--devel"
|
||||
#pikaur_arguments = ""
|
||||
#pamac_arguments = "--no-devel"
|
||||
#enable_tlmgr = true
|
||||
#emerge_sync_flags = "-q"
|
||||
#emerge_update_flags = "-uDNa --with-bdeps=y world"
|
||||
#redhat_distro_sync = false
|
||||
#rpm_ostree = false
|
||||
# yay_arguments = "--nodevel"
|
||||
|
||||
# Arguments to pass dnf when updating packages
|
||||
# dnf_arguments = "--refresh"
|
||||
|
||||
# aura_aur_arguments = "-kx"
|
||||
|
||||
# aura_pacman_arguments = ""
|
||||
# garuda_update_arguments = ""
|
||||
|
||||
# show_arch_news = true
|
||||
|
||||
# trizen_arguments = "--devel"
|
||||
|
||||
# pikaur_arguments = ""
|
||||
|
||||
# pamac_arguments = "--no-devel"
|
||||
|
||||
# enable_tlmgr = true
|
||||
|
||||
# emerge_sync_flags = "-q"
|
||||
|
||||
# emerge_update_flags = "-uDNa --with-bdeps=y world"
|
||||
|
||||
# redhat_distro_sync = false
|
||||
|
||||
# suse_dup = false
|
||||
|
||||
# rpm_ostree = false
|
||||
|
||||
# For Fedora/CentOS/RHEL Atomic variants, if `bootc` is available and this configuration entry is set to true, use
|
||||
# it to do the update - Will also supercede rpm-ostree if enabled
|
||||
# (default: false)
|
||||
# bootc = false
|
||||
|
||||
# nix_arguments = "--flake"
|
||||
|
||||
# nix_env_arguments = "--prebuilt-only"
|
||||
|
||||
# Extra Home Manager arguments
|
||||
# home_manager_arguments = ["--flake", "file"]
|
||||
|
||||
|
||||
[git]
|
||||
# How many repos to pull at max in parallel
|
||||
# max_concurrency = 5
|
||||
|
||||
# Additional git repositories to pull
|
||||
# repos = [
|
||||
# "~/src/*/",
|
||||
# "~/.config/something"
|
||||
# ]
|
||||
|
||||
# Don't pull the predefined git repos
|
||||
# pull_predefined = false
|
||||
|
||||
# Arguments to pass Git when pulling Repositories
|
||||
# arguments = "--rebase --autostash"
|
||||
|
||||
|
||||
[windows]
|
||||
# Manually select Windows updates
|
||||
#accept_all_updates = false
|
||||
#open_remotes_in_new_terminal = true
|
||||
# accept_all_updates = false
|
||||
|
||||
# open_remotes_in_new_terminal = true
|
||||
|
||||
# wsl_update_pre_release = true
|
||||
|
||||
# wsl_update_use_web_download = true
|
||||
|
||||
# Causes Topgrade to rename itself during the run to allow package managers
|
||||
# to upgrade it. Use this only if you installed Topgrade by using a package
|
||||
# manager such as Scoop or Cargo
|
||||
#self_rename = true
|
||||
# self_rename = true
|
||||
|
||||
|
||||
[npm]
|
||||
# Use sudo if the NPM directory isn't owned by the current user
|
||||
#use_sudo = true
|
||||
# use_sudo = true
|
||||
|
||||
|
||||
[yarn]
|
||||
# Run `yarn global upgrade` with `sudo`
|
||||
# use_sudo = true
|
||||
|
||||
|
||||
[deno]
|
||||
# Upgrade deno executable to the given version.
|
||||
# version = "stable"
|
||||
|
||||
|
||||
[vim]
|
||||
# For `vim-plug`, execute `PlugUpdate!` instead of `PlugUpdate`
|
||||
# force_plug_update = true
|
||||
|
||||
|
||||
[firmware]
|
||||
# Offer to update firmware; if false just check for and display available updates
|
||||
#upgrade = true
|
||||
# upgrade = true
|
||||
|
||||
|
||||
[vagrant]
|
||||
# Vagrant directories
|
||||
# directories = []
|
||||
|
||||
# power on vagrant boxes if needed
|
||||
# power_on = true
|
||||
|
||||
# Always suspend vagrant boxes instead of powering off
|
||||
# always_suspend = true
|
||||
|
||||
|
||||
[flatpak]
|
||||
# Use sudo for updating the system-wide installation
|
||||
#use_sudo = true
|
||||
# use_sudo = true
|
||||
|
||||
|
||||
[distrobox]
|
||||
# use_root = false
|
||||
|
||||
# containers = ["archlinux-latest"]
|
||||
[containers]
|
||||
# Specify the containers to ignore while updating (Wildcard supported)
|
||||
# ignored_containers = ["ghcr.io/rancher-sandbox/rancher-desktop/rdx-proxy:latest", "docker.io*"]
|
||||
# Specify the runtime to use for containers (default: "docker", allowed values: "docker", "podman")
|
||||
# runtime = "podman"
|
||||
|
||||
[lensfun]
|
||||
# If disabled, Topgrade invokes `lensfun‑update‑data` without root priviledge,
|
||||
# then the update will be only available to you. Otherwise, `sudo` is required,
|
||||
# and the update will be installed system-wide, i.e., available to all users.
|
||||
# (default: false)
|
||||
# use_sudo = false
|
||||
|
||||
[julia]
|
||||
# If disabled, Topgrade invokes julia with the --startup-file=no CLI option.
|
||||
#
|
||||
# This may be desirable to avoid loading outdated packages with "using" directives
|
||||
# in the startup file, which might cause the update run to fail.
|
||||
# (default: true)
|
||||
# startup_file = true
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 718 KiB |
BIN
doc/topgrade_demo.gif
Normal file
BIN
doc/topgrade_demo.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 4.1 MiB |
BIN
doc/topgrade_transparent.png
Normal file
BIN
doc/topgrade_transparent.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 34 KiB |
795
locales/app.yml
Normal file
795
locales/app.yml
Normal file
@@ -0,0 +1,795 @@
|
||||
_version: 2
|
||||
|
||||
"Current system locale is {system_locale}":
|
||||
en: "Current system locale is %{system_locale}"
|
||||
es: "La configuración regional del sistema es %{system_locale}"
|
||||
fr: "Le paramètre linguistique actuel du système est %{system_locale}"
|
||||
zh_TW: "目前語言為 %{system_locale}"
|
||||
"Dry running: {program_name} {arguments}":
|
||||
en: "Dry running: %{program_name} %{arguments}"
|
||||
es: "Simulando: %{program_name} %{arguments}"
|
||||
fr: "Simulation : %{program_name} %{arguments}"
|
||||
zh_TW: "正在模擬 %{program_name} %{arguments} 的執行過程"
|
||||
"in {directory}":
|
||||
en: "in %{directory}"
|
||||
es: "en %{directory}"
|
||||
fr: "dans %{directory}"
|
||||
zh_TW: "在 %{directory}"
|
||||
"Rebooting...":
|
||||
en: "Rebooting..."
|
||||
es: "Reiniciando..."
|
||||
fr: "Redémarrage..."
|
||||
zh_TW: "正在重新啟動..."
|
||||
"Plugins upgraded":
|
||||
en: "Plugins upgraded"
|
||||
es: "Plugins actualizados"
|
||||
fr: "Plugins mis à jour"
|
||||
zh_TW: "已更新所有擴充功能"
|
||||
"Would self-update":
|
||||
en: "Would self-update"
|
||||
es: "Se actualizara automáticamente"
|
||||
fr: "Se mettrait à jour lui-même"
|
||||
zh_TW: "將自行更新"
|
||||
"Pulling":
|
||||
en: "Pulling"
|
||||
es: "Extrayendo"
|
||||
fr: "Récupération"
|
||||
zh_TW: "正在拉取"
|
||||
"No Breaking changes":
|
||||
en: "No Breaking changes"
|
||||
es: "Sin Cambios Importantes"
|
||||
fr: "Pas de changement cassant"
|
||||
zh_TW: "無重大更改"
|
||||
"Dropping you to shell. Fix what you need and then exit the shell.":
|
||||
en: "Dropping you to shell. Fix what you need and then exit the shell."
|
||||
es: "Cambiando al shell. Arregla lo que necesites y luego sal del shell."
|
||||
fr: "Ouverture d'un shell. Réparez ce dont vous avez besoin et quittez le shell."
|
||||
zh_TW: "已切換到終端殼層。修復完畢後請退出殼層以繼續。"
|
||||
"Topgrade launched in a new tmux session":
|
||||
en: "Topgrade launched in a new tmux session"
|
||||
es: "Topgrade lanzado en una nueva sesión tmux"
|
||||
fr: "Topgrade lancé dans une nouvelle session tmux"
|
||||
zh_TW: "Topgrade 已啟動新 tmux 程序"
|
||||
'Topgrade upgraded to {version}:\n':
|
||||
en: 'Topgrade upgraded to %{version}:\n'
|
||||
es: 'Topgrade actualizado a %{version}:\n'
|
||||
fr: 'Topgrade mis à jour vers %{version}:\n'
|
||||
zh_TW: '已將 Topgrade 更新至 %{version}:\n'
|
||||
"Topgrade is up-to-date":
|
||||
en: "Topgrade is up-to-date"
|
||||
es: "Topgrade está actualizado"
|
||||
fr: "Topgrade est à jour"
|
||||
zh_TW: "Topgrade 為最新版本"
|
||||
"Updating modules...":
|
||||
en: "Updating modules..."
|
||||
es: "Actualizando módulos..."
|
||||
fr: "Mise à jour des modules..."
|
||||
zh_TW: "正在更新模組..."
|
||||
"Powershell Modules Update":
|
||||
en: "Powershell Modules Update"
|
||||
es: "Actualización de módulos Powershell"
|
||||
fr: "Mise à jour des modules Powershell"
|
||||
zh_TW: "Powershell 模組更新"
|
||||
"Powershell is not installed":
|
||||
en: "Powershell is not installed"
|
||||
es: "Powershell no está instalado"
|
||||
fr: "Powershell n'est pas installé"
|
||||
zh_TW: "未安裝 Powershell"
|
||||
"Error detecting current distribution: {error}":
|
||||
en: "Error detecting current distribution: %{error}"
|
||||
es: "Error al detectar la distribución actual: %{error}"
|
||||
fr: "Erreur lors de la détection de la distribution acutelle: %{error}"
|
||||
zh_TW: "無法偵測作業系統:%{error}"
|
||||
"Error: {error}":
|
||||
en: "Error: %{error}"
|
||||
es: "Error: %{error}"
|
||||
fr: "Erreur: %{error}"
|
||||
zh_TW: "錯誤:%{error}"
|
||||
"Failed":
|
||||
en: "Failed"
|
||||
es: "Fallido"
|
||||
fr: "Échec"
|
||||
zh_TW: "失敗"
|
||||
"pulling":
|
||||
en: "pulling"
|
||||
es: "extracción"
|
||||
fr: "récupérer"
|
||||
zh_TW: "正在拉取"
|
||||
"Changed":
|
||||
en: "Changed"
|
||||
es: "Cambiado"
|
||||
fr: "Modifié"
|
||||
zh_TW: "已更改"
|
||||
"Up-to-date":
|
||||
en: "Up-to-date"
|
||||
es: "Actualizado"
|
||||
fr: "À jour"
|
||||
zh_TW: "已為最新版本"
|
||||
"Self update":
|
||||
en: "Self update"
|
||||
es: "Autoactualización"
|
||||
fr: "Auto mise à jour"
|
||||
zh_TW: "自行更新"
|
||||
|
||||
# The following 2 strings are used in the same sentence
|
||||
# i.e. *Only* updated repositories will be shown...
|
||||
# Note that the text *Only* is highlighted in green
|
||||
|
||||
"Only":
|
||||
en: "Only"
|
||||
es: "Solo"
|
||||
fr: "Seulement"
|
||||
zh_TW: "將僅"
|
||||
"updated repositories will be shown...":
|
||||
en: "updated repositories will be shown..."
|
||||
es: "se mostrarán los repositorios actualizados..."
|
||||
fr: "les dépôts mis à jour seront affichés..."
|
||||
zh_TW: "顯示被更新的 git 來源..."
|
||||
|
||||
"because it has no remotes":
|
||||
en: "because it has no remotes"
|
||||
es: "porque no tiene fuentes remotas"
|
||||
fr: "parce qu'il n'a aucun dépôt distant"
|
||||
zh_TW: "因為其沒有遠端來源"
|
||||
"Skipping":
|
||||
en: "Skipping"
|
||||
es: "Omitiendo"
|
||||
fr: "Ignoré"
|
||||
zh_TW: "正在略過"
|
||||
"Aura(<0.4.6) requires sudo installed to work with AUR packages":
|
||||
en: "Aura(<0.4.6) requires sudo installed to work with AUR packages"
|
||||
es: "Aura(<0.4.6) requiere tener sudo instalado para funcionar con paquetes AUR"
|
||||
fr: "Aura(<0.4.6) nécessite sudo pour fonctionner avec les paquets AUR"
|
||||
zh_TW: "Aura(<0.4.6)依賴 sudo 安裝 AUR 套件"
|
||||
"Pacman backup configuration files found:":
|
||||
en: "Pacman backup configuration files found:"
|
||||
es: "Archivos de respaldo de Pacman encontrados:"
|
||||
fr: "Fichiers de configuration de sauvegarde de Pacman trouvés :"
|
||||
zh_TW: "找到 Pacman 設定備份檔:"
|
||||
"The package audit was successful, but vulnerable packages still remain on the system":
|
||||
en: "The package audit was successful, but vulnerable packages still remain on the system"
|
||||
es: "La auditoría del paquete fue exitosa, pero aún quedan paquetes vulnerables en el sistema"
|
||||
fr: "L'audit des paquets a réussi, mais des paquets vulnérables restent toujours sur le système"
|
||||
zh_TW: "雖然套件檢測成功,但系統仍然包含危險套件"
|
||||
"Syncing portage":
|
||||
en: "Syncing portage"
|
||||
es: "Sincronizando portage"
|
||||
fr: "Synchronisation du portage"
|
||||
zh_TW: "正在同步 portage"
|
||||
"Finding available software":
|
||||
en: "Finding available software"
|
||||
es: "Buscando software disponible"
|
||||
fr: "Recherche de logiciels disponible"
|
||||
zh_TW: "正在尋找軟體"
|
||||
"A system update is available. Do you wish to install it?":
|
||||
en: "A system update is available. Do you wish to install it?"
|
||||
es: "Hay una actualización del sistema disponible. ¿Desea instalarla?"
|
||||
fr: "Une mise à jour du système est disponible. Voulez-vous l'installer ?"
|
||||
zh_TW: "系統更新已就緒。是否現在安裝?"
|
||||
"No new software available.":
|
||||
en: "No new software available."
|
||||
es: "No hay ningún software nuevo disponible."
|
||||
fr: "Aucun nouveau logiciel disponible."
|
||||
zh_TW: "沒有新軟體。"
|
||||
"No Xcode releases installed.":
|
||||
en: "No Xcode releases installed."
|
||||
es: "No hay versiones de Xcode instaladas."
|
||||
fr: "Aucune version de Xcode installée."
|
||||
zh_TW: "尚未安裝 Xcode 發行版。"
|
||||
"Would you like to move the former Xcode release to the trash?":
|
||||
en: "Would you like to move the former Xcode release to the trash?"
|
||||
es: "¿Le gustaría mover la versión anterior de Xcode a la papelera?"
|
||||
fr: "Voulez-vous déplacer la précédente version de Xcode à la corbeille ?"
|
||||
zh_TW: "是否將舊版 Xcode 移至垃圾桶?"
|
||||
"New Xcode release detected:":
|
||||
en: "New Xcode release detected:"
|
||||
es: "Nueva versión de Xcode detectada:"
|
||||
fr: "Nouvelle version de Xcode détectée :"
|
||||
zh_TW: "有新的 Xcode 版本:"
|
||||
"Would you like to install it?":
|
||||
en: "Would you like to install it?"
|
||||
es: "¿Le gustaría instalarlo?"
|
||||
fr: "Voulez-vous l'installer ?"
|
||||
zh_TW: "是否現在安裝?"
|
||||
"No global packages installed":
|
||||
en: "No global packages installed"
|
||||
es: "No hay paquetes globales instalados"
|
||||
fr: "Aucun paquet global n'est installé"
|
||||
zh_TW: "尚未安裝全域套件"
|
||||
"Remote Topgrade launched in Tmux":
|
||||
en: "Remote Topgrade launched in Tmux"
|
||||
es: "Topgrade remoto lanzado en Tmux"
|
||||
fr: "Topgrade distant lancé dans Tmux"
|
||||
zh_TW: "已在 Tmux 中啟動遠端 Topgrade 程序"
|
||||
"Remote Topgrade launched in an external terminal":
|
||||
en: "Remote Topgrade launched in an external terminal"
|
||||
es: "Topgrade remoto iniciado en una terminal externa"
|
||||
fr: "Topgrade distant lancé dans un terminal externe"
|
||||
zh_TW: "已在新終端機視窗中啟動遠端 Topgrade"
|
||||
"Collecting Vagrant boxes":
|
||||
en: "Collecting Vagrant boxes"
|
||||
es: "Recolectando cajas Vagrant"
|
||||
fr: "Collecte des boîtes Vagrant"
|
||||
zh_TW: "正在蒐集 Vagrant 容器"
|
||||
"No Vagrant directories were specified in the configuration file":
|
||||
en: "No Vagrant directories were specified in the configuration file"
|
||||
es: "No se especificaron directorios Vagrant en el archivo de configuración"
|
||||
fr: "Aucun répertoire Vagrant n'est spécifié dans le fichier de configuration"
|
||||
zh_TW: "尚未在設定檔中指定 Vagrant 資料夾"
|
||||
"Vagrant boxes":
|
||||
en: "Vagrant boxes"
|
||||
es: "Cajas Vagrant"
|
||||
fr: "Boîtes Vagrant"
|
||||
zh_TW: "Vagrant 容器"
|
||||
"No outdated boxes":
|
||||
en: "No outdated boxes"
|
||||
es: "Sin cajas obsoletas"
|
||||
fr: "Aucune boîte obsolète"
|
||||
zh_TW: "未有需要更新的容器"
|
||||
"Summary":
|
||||
en: "Summary"
|
||||
es: "Resumen"
|
||||
fr: "Résumé"
|
||||
zh_TW: "結果"
|
||||
"Topgrade finished with errors":
|
||||
en: "Topgrade finished with errors"
|
||||
es: "Topgrade finalizado con errores"
|
||||
fr: "Topgrade terminé avec des erreurs"
|
||||
zh_TW: "Topgrade 執行部分成功"
|
||||
"Topgrade finished successfully":
|
||||
en: "Topgrade finished successfully"
|
||||
es: "Topgrade finalizó exitosamente"
|
||||
fr: "Topgrade terminé avec succès"
|
||||
zh_TW: "Topgrade 執行成功"
|
||||
"Topgrade {version_str} Breaking Changes":
|
||||
en: "Topgrade %{version_str} Breaking Changes"
|
||||
es: "Topgrade %{version_str} Cambios Importantes"
|
||||
fr: "Topgrade %{version_str} Changements Cassants"
|
||||
zh_TW: "Topgrade %{version_str} 重大更改"
|
||||
"Path {path} expanded to {expanded}":
|
||||
en: "Path %{path} expanded to %{expanded}"
|
||||
es: "Ruta %{path} expandida a %{expanded}"
|
||||
fr: "Le chemin %{path} a été transformé en %{expanded}"
|
||||
zh_TW: "已擴展 %{path} 至 %{expanded}"
|
||||
"Path {path} doesn't exist":
|
||||
en: "Path %{path} doesn't exist"
|
||||
es: "La ruta %{path} no existe"
|
||||
fr: "Le chemin %{path} n'existe pas"
|
||||
zh_TW: "路徑 %{path} 不存在"
|
||||
"Cannot find {binary_name} in PATH":
|
||||
en: "Cannot find %{binary_name} in PATH"
|
||||
es: "No se pudo encontrar %{binary_name} en PATH"
|
||||
fr: "Impossible de trouver %{binary_name} dans le PATH"
|
||||
zh_TW: "在 $PATH 中找不到 %{binary_name} 執行檔"
|
||||
"Failed to get a UTF-8 encoded hostname":
|
||||
en: "Failed to get a UTF-8 encoded hostname"
|
||||
es: "Error al obtener un nombre de host codificado en UTF-8"
|
||||
fr: "Échec de l'obtention d'un nom d'hôte encodé en UTF-8"
|
||||
zh_TW: "無法取得 UTF-8 編碼的主機名稱"
|
||||
"Failed to get hostname: {err}":
|
||||
en: "Failed to get hostname: %{err}"
|
||||
es: "Error al obtener el nombre del host: %{err}"
|
||||
fr: "Échec de l'obtention d'un nom d'hôte: %{err}"
|
||||
zh_TW: "無法取得主機名稱:%{err}"
|
||||
"{python} is a Python 2, skip.":
|
||||
en: "%{python} is a Python 2, skip."
|
||||
es: "%{python} es Python 2, omitiendo."
|
||||
fr: "%{python} est un Python 2, ignoré."
|
||||
zh_TW: "%{python} 是 Python 2,略過。"
|
||||
"{python} is a Python shim, skip.":
|
||||
en: "%{python} is a Python shim, skip."
|
||||
es: "%{python} es una corrección de Python, omitiendo."
|
||||
fr: "%{python} est un shim Python, ignoré."
|
||||
zh_TW: "%{python} 是 Python shim,略過。"
|
||||
"{key} failed:":
|
||||
en: "%{key} failed:"
|
||||
es: "%{key} ha fallado:"
|
||||
fr: "%{key} a échoué :"
|
||||
zh_TW: "%{key} 失敗:"
|
||||
"{step_name} failed":
|
||||
en: "%{step_name} failed"
|
||||
es: "%{step_name} fallido"
|
||||
fr: "%{step_name} a échoué"
|
||||
zh_TW: "%{step_name} 失敗"
|
||||
"DragonFly BSD Packages":
|
||||
en: "DragonFly BSD Packages"
|
||||
es: "Paquetes BSD de DragonFly"
|
||||
fr: "Paquets DragonFly BSD"
|
||||
zh_TW: "DragonFly BSD 套件"
|
||||
"DragonFly BSD Audit":
|
||||
en: "DragonFly BSD Audit"
|
||||
es: "Auditoría de DragonFly BSD"
|
||||
fr: "Audit de DragonFly BSD"
|
||||
zh_TW: "DragonFly BSD 紀錄"
|
||||
"FreeBSD Update":
|
||||
en: "FreeBSD Update"
|
||||
es: "Actualización de FreeBSD"
|
||||
fr: "Mise à jour de FreeBSD"
|
||||
zh_TW: "FreeBSD 更新"
|
||||
"FreeBSD Packages":
|
||||
en: "FreeBSD Packages"
|
||||
es: "Paquetes FreeBSD"
|
||||
fr: "Paquets FreeBSD"
|
||||
zh_TW: "FreeBSD 套件"
|
||||
"FreeBSD Audit":
|
||||
en: "FreeBSD Audit"
|
||||
es: "Auditoría FreeBSD"
|
||||
fr: "Audit de FreeBSD"
|
||||
zh_TW: "FreeBSD 紀錄"
|
||||
"System update":
|
||||
en: "System update"
|
||||
es: "Actualización del sistema"
|
||||
fr: "Mise à jour du système"
|
||||
zh_TW: "系統更新"
|
||||
"needrestart will be ran by the package manager":
|
||||
en: "needrestart will be ran by the package manager"
|
||||
es: "needrestart será ejecutado por el administrador de paquetes"
|
||||
fr: "needrestart sera exécuté par le gestionnaire de paquets"
|
||||
zh_TW: "needrestart 將被套件管理員執行"
|
||||
"Check for needed restarts":
|
||||
en: "Check for needed restarts"
|
||||
es: "Comprobando si es necesario reiniciar el sistema"
|
||||
fr: "Vérification des redémarrages nécessaires"
|
||||
zh_TW: "正在檢查是否需要重新啟動系統"
|
||||
"Should not run in WSL":
|
||||
en: "Should not run in WSL"
|
||||
es: "No se debe ejecutar en WSL"
|
||||
fr: "Ne doit pas être exécuté dans WSL"
|
||||
zh_TW: "不該在 WSL 中執行"
|
||||
"Firmware upgrades":
|
||||
en: "Firmware upgrades"
|
||||
es: "Actualizaciones de firmware"
|
||||
fr: "Mises à jour du firmware"
|
||||
zh_TW: "韌體更新"
|
||||
"Flatpak System Packages":
|
||||
en: "Flatpak System Packages"
|
||||
es: "Paquetes del sistema Flatpak"
|
||||
fr: "Paquets système Flatpak"
|
||||
zh_TW: "Flatpak 系統套件"
|
||||
"Snapd socket does not exist":
|
||||
en: "Snapd socket does not exist"
|
||||
es: "El socket Snapd no existe"
|
||||
fr: "Le socket Snapd n'existe pas"
|
||||
zh_TW: "找不到 Snapd 程序"
|
||||
"You need to specify at least one container":
|
||||
en: "You need to specify at least one container"
|
||||
es: "Necesita especificar al menos un contenedor"
|
||||
fr: "Vous devez spécifier au moins un conteneur"
|
||||
zh_TW: "必須指定至少一個容器"
|
||||
"Skipped in --yes":
|
||||
en: "Skipped in --yes"
|
||||
es: "Omitido por --yes"
|
||||
fr: "Ignoré avec --yes"
|
||||
zh_TW: "指定 --yes,略過"
|
||||
"Configuration update":
|
||||
en: "Configuration update"
|
||||
es: "Actualización de configuración"
|
||||
fr: "Mise à jour de la configuration"
|
||||
zh_TW: "設定更新"
|
||||
"Going to execute `waydroid upgrade`, which would STOP the running container, is this ok?":
|
||||
en: "Going to execute `waydroid upgrade`, which would STOP the running container, is this ok?"
|
||||
es: "Se ejecutará `waydroid update`, lo que DETENDRÁ el contenedor en ejecución. ¿Está bien?"
|
||||
fr: "`waydroid upgrade` va s'exécuter, ce qui ARRÊTERA le conteneur en cours d'exécution, est-ce ok ?"
|
||||
zh_TW: "將略過 `waydroid upgrade`,並且「停止」執行容器。是否繼續?"
|
||||
"Skip the Waydroid step because the user don't want to proceed":
|
||||
en: "Skip the Waydroid step because the user don't want to proceed"
|
||||
es: "Omitiendo el paso de Waydroid debido a que el usuario no quiere continuar"
|
||||
fr: "Passer l'étape Waydroid car l'utilisateur ne souhaite pas l'exécuter"
|
||||
zh_TW: "使用者指定略過 Waydroid 程序"
|
||||
"macOS App Store":
|
||||
en: "macOS App Store"
|
||||
es: "Tienda de aplicaciones macOS"
|
||||
fr: "macOS App Store"
|
||||
zh_TW: "macOS App Store"
|
||||
"macOS system update":
|
||||
en: "macOS system update"
|
||||
es: "Actualización del sistema macOS"
|
||||
fr: "Mise à jour du système macOS"
|
||||
zh_TW: "macOS 系統更新"
|
||||
"OpenBSD Update":
|
||||
en: "OpenBSD Update"
|
||||
es: "Actualización de OpenBSD"
|
||||
fr: "Mise à jour d'OpenBSD"
|
||||
zh_TW: "OpenBSD 更新"
|
||||
"OpenBSD Packages":
|
||||
en: "OpenBSD Packages"
|
||||
es: "Paquetes OpenBSD"
|
||||
fr: "Paquets OpenBSD"
|
||||
zh_TW: "OpenBSD 套件"
|
||||
"`fisher` is not defined in `fish`":
|
||||
en: "`fisher` is not defined in `fish`"
|
||||
es: "`fisher` no está definido en `fish`"
|
||||
fr: "`fisher` n'est pas reconnu dans `fish`"
|
||||
zh_TW: "`fisher` 未在 `fish` 中指定"
|
||||
"`fish_plugins` path doesn't exist: {err}":
|
||||
en: "`fish_plugins` path doesn't exist: %{err}"
|
||||
es: "La ruta `fish_plugins` no existe: %{err}"
|
||||
fr: "Le chemin `fish_plugins` n'existe pas : %{err}"
|
||||
zh_TW: "不存在 `fish_plugins` 路徑:%{err}"
|
||||
"`fish_update_completions` is not available":
|
||||
en: "`fish_update_completions` is not available"
|
||||
es: "`fish_update_completions` no está disponible"
|
||||
fr: "`fish_update_completions` n'est pas disponible"
|
||||
zh_TW: "無法使用 `fish_update_completions`"
|
||||
"Desktop doest not appear to be gnome":
|
||||
en: "Desktop doest not appear to be gnome"
|
||||
es: "El escritorio no parece ser Gnome"
|
||||
fr: "Le bureau ne semble pas être Gnome"
|
||||
zh_TW: "桌面環境不是 Gnome"
|
||||
"Gnome shell extensions are unregistered in DBus":
|
||||
en: "Gnome shell extensions are unregistered in DBus"
|
||||
es: "Las extensiones de Gnome Shell no están registradas en DBus"
|
||||
fr: "Les extensions de Gnome Shell ne sont pas enregistrées dans DBus"
|
||||
zh_TW: "Gnome Shell 擴充功能在 DBus 中未被註冊"
|
||||
"Gnome Shell extensions":
|
||||
en: "Gnome Shell extensions"
|
||||
es: "Extensiones de Gnome Shell"
|
||||
fr: "Extensions de Gnome Shell"
|
||||
zh_TW: "Gnome Shell 擴充功能"
|
||||
"Not a custom brew for macOS":
|
||||
en: "Not a custom brew for macOS"
|
||||
es: "No es un brew personalizado para macOS"
|
||||
fr: "Pas une version de brew personnalisée pour macOS"
|
||||
zh_TW: "不是專門的 macOS brew"
|
||||
"Guix Pull Failed, Skipping":
|
||||
en: "Guix Pull Failed, Skipping"
|
||||
es: "Guix Pull Fallido, omitiendo"
|
||||
fr: "Échec de Guix Pull, ignoré"
|
||||
zh_TW: "Guix 拉取失敗,略過"
|
||||
"Nix-darwin on macOS must be upgraded via darwin-rebuild switch":
|
||||
en: "Nix-darwin on macOS must be upgraded via darwin-rebuild switch"
|
||||
es: "Nix-darwin en macOS debe actualizarse mediante el interruptor darwin-rebuild"
|
||||
fr: "Nix-darwin sur macOS doit être mis à niveau via l'option darwin-rebuild"
|
||||
zh_TW: "Nix-darwin 在 macOS 上必須使用 darwin-rebuild 更新"
|
||||
"`nix upgrade-nix` can only be used on macOS or non-NixOS Linux":
|
||||
en: "`nix upgrade-nix` can only be used on macOS or non-NixOS Linux"
|
||||
es: "`nix update-nix` solo puede usarse en macOS o Linux que no sea NixOS"
|
||||
fr: "`nix upgrade-nix` ne peut être utilisée que sur macOS ou Linux non-NixOS"
|
||||
zh_TW: "`nix upgrade-nix` 僅能在 macOS 或非 NixOS 的 Linux 上使用"
|
||||
"`nix upgrade-nix` cannot be run when Nix is installed in a profile":
|
||||
en: "`nix upgrade-nix` cannot be run when Nix is installed in a profile"
|
||||
es: "`nix Upgrade-nix` no puede ejecutarse cuando Nix está instalado en un perfil"
|
||||
fr: "`nix upgrade-nix` ne peut pas être exécutée lorsque Nix est installé dans un profil"
|
||||
zh_TW: "`nix upgrade-nix` 無法在已安裝 Nix 使用者環境的系統上使用"
|
||||
"Nix (self-upgrade)":
|
||||
en: "Nix (self-upgrade)"
|
||||
es: "Nix (autoactualización)"
|
||||
fr: "Nix (auto mise à niveau)"
|
||||
zh_TW: "Nix(自行更新)"
|
||||
"Pyenv is installed, but $PYENV_ROOT is not set correctly":
|
||||
en: "Pyenv is installed, but $PYENV_ROOT is not set correctly"
|
||||
es: "Pyenv está instalado, pero $PYENV_ROOT no está configurado correctamente"
|
||||
fr: "Pyenv est installé, mais $PYENV_ROOT n'est pas défini correctement"
|
||||
zh_TW: "已安裝 Pyenv 但尚未正確設定 $PYENV_ROOT"
|
||||
"pyenv is not a git repository":
|
||||
en: "pyenv is not a git repository"
|
||||
es: "pyenv no es un repositorio git"
|
||||
fr: "pyenv n'est pas un dépôt Git"
|
||||
zh_TW: "pyenv 不是 git 來源"
|
||||
"Bun Packages":
|
||||
en: "Bun Packages"
|
||||
es: "Paquetes Bun"
|
||||
fr: "Paquets Bun"
|
||||
zh_TW: "Bun 套件"
|
||||
"WSL not installed":
|
||||
en: "WSL not installed"
|
||||
es: "WSL no instalado"
|
||||
fr: "WSL n'est pas installé"
|
||||
zh_TW: "未安裝 WSL"
|
||||
"Update WSL":
|
||||
en: "Update WSL"
|
||||
es: "Actualizando WSL"
|
||||
fr: "Mise à jour du WSL"
|
||||
zh_TW: "更新 WSL"
|
||||
"Could not find Topgrade installed in WSL":
|
||||
en: "Could not find Topgrade installed in WSL"
|
||||
es: "Topgrade no se ha instalado dentro de WSL"
|
||||
fr: "Impossible de trouver Topgrade installé dans WSL"
|
||||
zh_TW: "尚未在 WSL 內安裝 Topgrade"
|
||||
"Consider installing PSWindowsUpdate as the use of Windows Update via USOClient is not supported.":
|
||||
en: "Consider installing PSWindowsUpdate as the use of Windows Update via USOClient is not supported."
|
||||
es: "Considere instalar PSWindowsUpdate ya que no se admite el uso de Windows Update a través de USOClient."
|
||||
fr: "Envisagez d'installer PSWindowsUpdate car l'utilisation de Windows Update via USOClient n'est pas prise en charge."
|
||||
zh_TW: "目前不支援使用 USOClient 管理 Windows 更新。建議安裝 PSWindowsUpdate。"
|
||||
"USOClient not supported.":
|
||||
en: "USOClient not supported."
|
||||
es: "USOClient no es admitido."
|
||||
fr: "USOClient n'est pas pris en charge."
|
||||
zh_TW: "不支援 USOClient。"
|
||||
"Connecting to {hostname}...":
|
||||
en: "Connecting to %{hostname}..."
|
||||
es: "Conectándose a %{hostname}..."
|
||||
fr: "Connexion à %{hostname}..."
|
||||
zh_TW: "正在連接 %{hostname}..."
|
||||
"Skipping powered off box {vagrant_box}":
|
||||
en: "Skipping powered off box %{vagrant_box}"
|
||||
es: "Omitiendo el contenedor apagado %{vagrant_box}"
|
||||
fr: "Ingorer la boîte éteinte %{vagrant_box}"
|
||||
zh_TW: "正在略過已關機的容器 %{vagrant_box}"
|
||||
"`{repo_tag}` for `{platform}`":
|
||||
en: "`%{repo_tag}` for `%{platform}`"
|
||||
es: "`%{repo_tag}` para `%{platform}`"
|
||||
fr: "`%{repo_tag}` pour `%{platform}`"
|
||||
zh_TW: "`%{repo_tag}` 給 `%{platform}`"
|
||||
"Containers":
|
||||
en: "Containers"
|
||||
es: "Contenedores"
|
||||
fr: "Conteneurs"
|
||||
zh_TW: "容器"
|
||||
"Emacs directory does not exist":
|
||||
en: "Emacs directory does not exist"
|
||||
es: "El directorio Emacs no existe"
|
||||
fr: "Le répertoire Emacs n'existe pas"
|
||||
zh_TW: "找不到 Emacs 資料夾"
|
||||
"Error getting the composer directory: {error}":
|
||||
en: "Error getting the composer directory: %{error}"
|
||||
es: "Error al obtener el directorio de composer: %{error}"
|
||||
fr: "Erreur lors de la récupération du répertoire de Composer : %{error}"
|
||||
zh_TW: "無法取得 composer 資料夾:%{error}"
|
||||
"Composer directory {composer_home} isn't a descendant of the user's home directory":
|
||||
en: "Composer directory %{composer_home} isn't a descendant of the user's home directory"
|
||||
es: "El directorio de composer %{composer_home} no es descendiente del directorio de inicio del usuario"
|
||||
fr: "Le répertoire de Composer %{composer_home} n'est pas un descendant du répertoire home de l'utilisateur"
|
||||
zh_TW: "Composer 資料夾 %{composer_home} 不在家目錄下"
|
||||
"Composer":
|
||||
en: "Composer"
|
||||
es: "Composer"
|
||||
fr: "Composer"
|
||||
zh_TW: "Composer"
|
||||
"Error running `dotnet tool list`. This is expected when a dotnet runtime is installed but no SDK.":
|
||||
en: "Error running `dotnet tool list`. This is expected when a dotnet runtime is installed but no SDK."
|
||||
es: "Error al ejecutar `dotnet tool list`. Esto es lo esperado cuando se instala un entorno de ejecución dotnet pero no un SDK."
|
||||
fr: "Erreur lors de l'exécution de `dotnet tool list`. Ce comportement est attendu lorsque le runtime dotnet est installé mais pas le SDK."
|
||||
zh_TW: "執行 `dotnet tool list` 失敗。已安裝 dotnet 執行環境但未安裝 SDK"
|
||||
"No dotnet global tools installed":
|
||||
en: "No dotnet global tools installed"
|
||||
es: "No hay herramientas globales dotnet instaladas"
|
||||
fr: "Aucun outil global dotnet installé"
|
||||
zh_TW: "尚未安裝全域 dotnet 工具"
|
||||
"Racket Package Manager":
|
||||
en: "Racket Package Manager"
|
||||
es: "Administrador de paquetes Racket"
|
||||
fr: "Gestionnaire de paquets Racket"
|
||||
zh_TW: "Racket 套件管理員"
|
||||
"GH failed":
|
||||
en: "GH failed"
|
||||
es: "GH fallido"
|
||||
fr: "Échec de GH"
|
||||
zh_TW: "GH 失敗"
|
||||
"GitHub CLI Extensions":
|
||||
en: "GitHub CLI Extensions"
|
||||
es: "Extensiones de GitHub CLI"
|
||||
fr: "Extensions de Github CLI"
|
||||
zh_TW: "GitHub CLI 擴充功能"
|
||||
"Julia Packages":
|
||||
en: "Julia Packages"
|
||||
es: "Paquetes Julia"
|
||||
fr: "Paquets Julia"
|
||||
zh_TW: "Julia 套件"
|
||||
"Update ClamAV Database(FreshClam)":
|
||||
en: "Update ClamAV Database(FreshClam)"
|
||||
es: "Actualizando base de datos ClamAV (FreshClam)"
|
||||
fr: "Mise à jour de la base de données ClamAV (FreshClam)"
|
||||
zh_TW: "更新 ClamAV 資料庫(FreshClam)"
|
||||
"Path {pattern} did not contain any git repositories":
|
||||
en: "Path %{pattern} did not contain any git repositories"
|
||||
es: "La ruta %{pattern} no contenía ningún repositorio git"
|
||||
fr: "Le chemin %{pattern} ne contenait aucun dépôt Git"
|
||||
zh_TW: "路徑 %{pattern} 中沒有任何 git 來源"
|
||||
"No repositories to pull":
|
||||
en: "No repositories to pull"
|
||||
es: "No hay repositorios que extraer"
|
||||
fr: "Aucun dépôt à récupérer"
|
||||
zh_TW: "沒有來源可以拉取"
|
||||
"Git repositories":
|
||||
en: "Git repositories"
|
||||
es: "Repositorios Git"
|
||||
fr: "Dépôts Git"
|
||||
zh_TW: "Git 來源"
|
||||
"Would pull {repo}":
|
||||
en: "Would pull %{repo}"
|
||||
es: "Extrayendo %{repo}"
|
||||
fr: "Tirerait %{repo}"
|
||||
zh_TW: "拉取 %{repo}"
|
||||
|
||||
# aka npm
|
||||
"Node Package Manager":
|
||||
en: "Node Package Manager"
|
||||
es: "Node Package Manager (npm)"
|
||||
fr: "Gestionnaire de paquets Node (npm)"
|
||||
zh_TW: "Node 套件管理員(npm)"
|
||||
# aka pnpm
|
||||
"Performant Node Package Manager":
|
||||
en: "Performant Node Package Manager"
|
||||
es: "Performant Node Package Manager (pnpm)"
|
||||
fr: "Gestionnaire de paquets Node performant (pnpm)"
|
||||
zh_TW: "效能 Node 套件管理員(pnpm)"
|
||||
"Yarn Package Manager":
|
||||
en: "Yarn Package Manager"
|
||||
es: "Administrador de paquetes Yarn"
|
||||
fr: "Gestionnaire de paquets Yarn"
|
||||
zh_TW: "Yarn 套件管理員"
|
||||
"Deno installed outside of .deno directory":
|
||||
en: "Deno installed outside of .deno directory"
|
||||
es: "Deno está instalado fuera del directorio .deno"
|
||||
fr: "Deno est installé en dehors du répertoire .deno"
|
||||
zh_TW: "Deno 安裝在 .deno 資料夾外"
|
||||
"The Ultimate vimrc":
|
||||
en: "The Ultimate vimrc"
|
||||
es: "El vimrc definitivo (The Ultimate vimrc)"
|
||||
fr: "The Ultimate vimrc"
|
||||
zh_TW: "終極 vimrc(The Ultimate vimrc)"
|
||||
"vim binary might be actually nvim":
|
||||
en: "vim binary might be actually nvim"
|
||||
es: "el binario vim puede ser nvim"
|
||||
fr: "Le binaire vim pourrait être en réalité nvim"
|
||||
zh_TW: "vim 執行檔可能為 nvim"
|
||||
"`{process}` failed: {exit_status}":
|
||||
en: "`%{process}` failed: %{exit_status}"
|
||||
es: "`%{process}` falló: %{exit_status}"
|
||||
fr: "`%{process}` a échoué : %{exit_status}"
|
||||
zh_TW: "`%{process}` 失敗:%{exit_status}"
|
||||
"`{process}` failed: {exit_status} with {output}":
|
||||
en: "`%{process}` failed: %{exit_status} with %{output}"
|
||||
es: "`%{process}` falló: %{exit_status} con %{output}"
|
||||
fr: "`%{process}` a échoué : %{exit_status} avec %{output}"
|
||||
zh_TW: "`%{process}` 失敗:%{exit_status} 伴隨 %{output}"
|
||||
"Unknown Linux Distribution":
|
||||
en: "Unknown Linux Distribution"
|
||||
es: "Distribución de Linux desconocida"
|
||||
fr: "Distribution Linux inconnue"
|
||||
zh_TW: "未知 Linux"
|
||||
'File "/etc/os-release" does not exist or is empty':
|
||||
en: 'File "/etc/os-release" does not exist or is empty'
|
||||
es: 'El archivo "/etc/os-release" no existe o está vacío'
|
||||
fr: "Le fichier \"/etc/os-release\" n'existe pas ou est vide"
|
||||
zh_TW: '「/etc/os-release」不存在或為空'
|
||||
"Failed getting the system package manager":
|
||||
en: "Failed getting the system package manager"
|
||||
es: "Error al obtener el administrador de paquetes del sistema"
|
||||
fr: "Échec de l'obtention du gestionnaire de paquets système"
|
||||
zh_TW: "偵測系統套件管理員失敗"
|
||||
"A step failed":
|
||||
en: "A step failed"
|
||||
es: "Un paso fallido"
|
||||
fr: "Une étape a échouée"
|
||||
zh_TW: "某步驟執行失敗"
|
||||
"Dry running":
|
||||
en: "Dry running"
|
||||
es: "Simulando"
|
||||
fr: "Simulation"
|
||||
zh_TW: "模擬執行"
|
||||
"Topgrade Upgraded":
|
||||
en: "Topgrade Upgraded"
|
||||
es: "Topgrade Actualizado"
|
||||
fr: "Topgrade mis à jour"
|
||||
zh_TW: "已更新 Topgrade"
|
||||
|
||||
# Summary texts
|
||||
"OK":
|
||||
en: "OK"
|
||||
es: "OK"
|
||||
fr: "OK"
|
||||
zh_TW: "成功"
|
||||
"FAILED":
|
||||
en: "FAILED"
|
||||
es: "FALLIDO"
|
||||
fr: "ÉCHEC"
|
||||
zh_TW: "失敗"
|
||||
"IGNORED":
|
||||
en: "IGNORED"
|
||||
es: "IGNORADO"
|
||||
fr: "IGNORÉ"
|
||||
zh_TW: "忽略"
|
||||
"SKIPPED":
|
||||
en: "SKIPPED"
|
||||
es: "OMITIDO"
|
||||
fr: "PASSÉ"
|
||||
zh_TW: "略過"
|
||||
|
||||
# 'Y' and 'N' have to stay the same characters. Eg for German the translation
|
||||
# would look sth like "(Y) Ja / (N) Nein"
|
||||
"(Y)es/(N)o":
|
||||
en: "(Y)es/(N)o"
|
||||
es: "(Y) Si / (N) No"
|
||||
fr: "(Y) Oui / (N) Non"
|
||||
zh_TW: "(Y)是/(N)否"
|
||||
|
||||
# 'y', 'N', 's', 'q' have to stay the same throughout all translations.
|
||||
# Eg German would look like "(y) Wiederholen / (N) Nein / (s) Konsole / (q) Beenden"
|
||||
"Retry? (y)es/(N)o/(s)hell/(q)uit":
|
||||
en: "Retry? (y)es/(N)o/(s)hell/(q)uit"
|
||||
es: "¿Reintentar? (y) Si / (N) No / (s) Shell / (q) Salir"
|
||||
fr: "Réessayer ? (y) Oui / (N) Non / (s) Shell / (q) Quitter"
|
||||
zh_TW: "再試一次? (y)是/(N)否/(s)殼層/(q)退出"
|
||||
# 'R', 'S', 'Q' have to stay the same throughout all translations. Eg German would look like "\n(R) Neustarten\n(S) Konsole\n(Q) Beenden"
|
||||
'\n(R)eboot\n(S)hell\n(Q)uit':
|
||||
en: '\n(R)eboot\n(S)hell\n(Q)uit'
|
||||
es: "\n(R) Reiniciar\n(S) Shell\n(Q) Salir"
|
||||
fr: '\n(R) Redémarrer\n(S) Shell\n(Q) Quitter'
|
||||
zh_TW: '\n(R)重新啟動\n(S)殼層\n(Q)退出'
|
||||
"Require sudo or counterpart but not found, skip":
|
||||
en: "Require sudo or counterpart but not found, skip"
|
||||
es: "Se requiere sudo o su equivalente pero no ha sido encontrado, omitiendo"
|
||||
fr: "Nécessite sudo ou un équivalent mais n'a pas été trouvé, passé"
|
||||
zh_TW: "找不到權限管理程式(sudo 等),略過"
|
||||
"sudo as user '{user}'":
|
||||
en: "sudo as user '%{user}'"
|
||||
es: "sudo como usuario '%{user}'"
|
||||
fr: "sudo en tant qu'utilisateur '%{user}'"
|
||||
zh_TW: "sudo 以使用者 '%{user}'"
|
||||
"Updating aqua ...":
|
||||
en: "Updating aqua ..."
|
||||
es: "Actualizando aqua..."
|
||||
fr: "Mise à jour d'aqua..."
|
||||
zh_TW: "正在更新 aqua..."
|
||||
"Updating aqua installed cli tools ...":
|
||||
en: "Updating aqua installed cli tools ..."
|
||||
es: "Actualizando las herramientas CLI instaladas en aqua ..."
|
||||
fr: "Mise à jour des outils cli installés d'aqua..."
|
||||
zh_TW: "正在更新 aqua 安裝的命令行介面工具..."
|
||||
"Updating Volta packages...":
|
||||
en: "Updating Volta packages..."
|
||||
es: "Actualizando paquetes Volta..."
|
||||
fr: "Mise à jour des paquets Volta..."
|
||||
zh_TW: "正在更新 Volta 套件..."
|
||||
"No packages installed with Volta":
|
||||
en: "No packages installed with Volta"
|
||||
es: "No hay paquetes instalados con Volta"
|
||||
fr: "Aucun paquet installé avec Volta"
|
||||
zh_TW: "沒有任何 Volta 套件"
|
||||
"pyenv-update plugin is not installed":
|
||||
en: "pyenv-update plugin is not installed"
|
||||
es: "El plugin pyenv-update no está instalado"
|
||||
fr: "Le plugin pyenv-update n'est pas installé"
|
||||
zh_TW: "尚未安裝 pyenv-update 擴充功能"
|
||||
"Respawning...":
|
||||
en: "Respawning..."
|
||||
es: "Reapareciendo..."
|
||||
fr: "Relancement..."
|
||||
zh_TW: "正在重新生成..."
|
||||
"Could not find Topgrade in any WSL disribution":
|
||||
en: "Could not find Topgrade in any WSL disribution"
|
||||
es: "No se pudo encontrar Topgrade en ninguna distribución WSL"
|
||||
fr: "Impossible de trouver Topgrade dans aucune distribution WSL"
|
||||
zh_TW: "在所有 WSL 中找不到 Topgrade"
|
||||
"Windows Update":
|
||||
en: "Windows Update"
|
||||
es: "Actualización de Windows"
|
||||
fr: "Mise à jour de Windows"
|
||||
zh_TW: "Windows 更新"
|
||||
"Would check if OpenBSD is -current":
|
||||
en: "Would check if OpenBSD is -current"
|
||||
es: "Comprobaría si OpenBSD está en -current"
|
||||
fr: "Vérifierait si OpenBSD est à -curent"
|
||||
zh_TW: "會檢查 OpenBSD 是否為 -current"
|
||||
"Would upgrade the OpenBSD system":
|
||||
en: "Would upgrade the OpenBSD system"
|
||||
es: "Actualizaría el sistema OpenBSD"
|
||||
fr: "Mettrait à jour le système OpenBSD"
|
||||
zh_TW: "會升級 OpenBSD 系統"
|
||||
"Would upgrade OpenBSD packages":
|
||||
en: "Would upgrade OpenBSD packages"
|
||||
es: "Actualizaría los paquetes de OpenBSD"
|
||||
fr: "Mettrait à jour les paquets OpenBSD"
|
||||
zh_TW: "會升級 OpenBSD 套件"
|
||||
"Microsoft Store":
|
||||
en: "Microsoft Store"
|
||||
es: "Tienda de Microsoft"
|
||||
fr: "Microsoft Store"
|
||||
zh_TW: "Microsoft Store"
|
||||
"Scanning for updates...":
|
||||
en: "Scanning for updates..."
|
||||
es: "Buscando actualizaciones..."
|
||||
fr: "Recherche de mises à jour..."
|
||||
zh_TW: "正在掃描更新..."
|
||||
"Success, Microsoft Store apps are being updated in the background":
|
||||
en: "Success, Microsoft Store apps are being updated in the background"
|
||||
es: "Éxito, las aplicaciones de Microsoft Store se están actualizando en segundo plano"
|
||||
fr: "Succès, les applications du Microsoft Store sont en cours de mise à jour en arrière plan"
|
||||
zh_TW: "成功,Microsoft Store 應用程式正在後台更新"
|
||||
"Unable to update Microsoft Store apps, manual intervention is required":
|
||||
en: "Unable to update Microsoft Store apps, manual intervention is required"
|
||||
es: "No se pueden actualizar las aplicaciones de Microsoft Store, se requiere intervención manual"
|
||||
fr: "Impossible de mettre à jour les applications du Microsoft Store, une intervention manuelle est nécessaire"
|
||||
zh_TW: "無法更新 Microsoft Store 應用,需手動幹預"
|
||||
16
pyproject.toml
Normal file
16
pyproject.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[build-system]
|
||||
requires = ["maturin>=1.0,<2.0"]
|
||||
build-backend = "maturin"
|
||||
|
||||
[project]
|
||||
name = "topgrade"
|
||||
requires-python = ">=3.7"
|
||||
classifiers = [
|
||||
"Programming Language :: Rust",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
]
|
||||
|
||||
|
||||
[tool.maturin]
|
||||
bindings = "bin"
|
||||
2
rust-toolchain.toml
Normal file
2
rust-toolchain.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[toolchain]
|
||||
channel = "1.76.0"
|
||||
171
src/breaking_changes.rs
Normal file
171
src/breaking_changes.rs
Normal file
@@ -0,0 +1,171 @@
|
||||
//! Inform the users of the breaking changes introduced in this major release.
|
||||
//!
|
||||
//! Print the breaking changes and possibly a migration guide when:
|
||||
//! 1. The Topgrade being executed is a new major release
|
||||
//! 2. This is the first launch of that major release
|
||||
|
||||
use crate::terminal::print_separator;
|
||||
#[cfg(windows)]
|
||||
use crate::WINDOWS_DIRS;
|
||||
#[cfg(unix)]
|
||||
use crate::XDG_DIRS;
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
use rust_i18n::t;
|
||||
use std::{
|
||||
env::var,
|
||||
fs::{read_to_string, OpenOptions},
|
||||
io::Write,
|
||||
path::PathBuf,
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
/// Version string x.y.z
|
||||
static VERSION_STR: &str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
/// Version info
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Version {
|
||||
_major: u64,
|
||||
minor: u64,
|
||||
patch: u64,
|
||||
}
|
||||
|
||||
impl FromStr for Version {
|
||||
type Err = std::convert::Infallible;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
const NOT_SEMVER: &str = "Topgrade version is not semantic";
|
||||
const NOT_NUMBER: &str = "Topgrade version is not dot-separated numbers";
|
||||
|
||||
let mut iter = s.split('.').take(3);
|
||||
let major = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
|
||||
let minor = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
|
||||
let patch = iter.next().expect(NOT_SEMVER).parse().expect(NOT_NUMBER);
|
||||
|
||||
// They cannot be all 0s
|
||||
assert!(
|
||||
!(major == 0 && minor == 0 && patch == 0),
|
||||
"Version numbers cannot be all 0s"
|
||||
);
|
||||
|
||||
Ok(Self {
|
||||
_major: major,
|
||||
minor,
|
||||
patch,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Version {
|
||||
/// True if this version is a new major release.
|
||||
pub(crate) fn is_new_major_release(&self) -> bool {
|
||||
// We have already checked that they cannot all be zeros, so `self.major`
|
||||
// is guaranteed to be non-zero.
|
||||
self.minor == 0 && self.patch == 0
|
||||
}
|
||||
}
|
||||
|
||||
/// Topgrade's breaking changes
|
||||
///
|
||||
/// We store them in the compiled binary.
|
||||
pub(crate) static BREAKINGCHANGES: &str = include_str!("../BREAKINGCHANGES.md");
|
||||
|
||||
/// Return platform's data directory.
|
||||
fn data_dir() -> PathBuf {
|
||||
#[cfg(unix)]
|
||||
return XDG_DIRS.data_dir();
|
||||
|
||||
#[cfg(windows)]
|
||||
return WINDOWS_DIRS.data_dir();
|
||||
}
|
||||
|
||||
/// Return Topgrade's keep file path.
|
||||
///
|
||||
/// keep file is a file under the data directory containing a major version
|
||||
/// number, it will be created on first run and is used to check if an execution
|
||||
/// of Topgrade is the first run of a major release, for more details, see
|
||||
/// `first_run_of_major_release()`.
|
||||
fn keep_file_path() -> PathBuf {
|
||||
let keep_file = "topgrade_keep";
|
||||
data_dir().join(keep_file)
|
||||
}
|
||||
|
||||
/// If environment variable `TOPGRADE_SKIP_BRKC_NOTIFY` is set to `true`, then
|
||||
/// we won't notify the user of the breaking changes.
|
||||
pub(crate) fn should_skip() -> bool {
|
||||
if let Ok(var) = var("TOPGRADE_SKIP_BRKC_NOTIFY") {
|
||||
return var.as_str() == "true";
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// True if this is the first execution of a major release.
|
||||
pub(crate) fn first_run_of_major_release() -> Result<bool> {
|
||||
let version = VERSION_STR.parse::<Version>().expect("should be a valid version");
|
||||
let keep_file = keep_file_path();
|
||||
|
||||
// disable this lint here as the current code has better readability
|
||||
#[allow(clippy::collapsible_if)]
|
||||
if version.is_new_major_release() {
|
||||
if !keep_file.exists() || read_to_string(&keep_file)? != VERSION_STR {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
/// Print breaking changes to the user.
|
||||
pub(crate) fn print_breaking_changes() {
|
||||
let header = format!(
|
||||
"{}",
|
||||
t!("Topgrade {version_str} Breaking Changes", version_str = VERSION_STR)
|
||||
);
|
||||
print_separator(header);
|
||||
let contents = if BREAKINGCHANGES.is_empty() {
|
||||
t!("No Breaking changes").to_string()
|
||||
} else {
|
||||
BREAKINGCHANGES.to_string()
|
||||
};
|
||||
println!("{contents}\n");
|
||||
}
|
||||
|
||||
/// This function will be ONLY executed when the user has confirmed the breaking
|
||||
/// changes, once confirmed, we write the keep file, which means the first run
|
||||
/// of this major release is finished.
|
||||
pub(crate) fn write_keep_file() -> Result<()> {
|
||||
std::fs::create_dir_all(data_dir())?;
|
||||
let keep_file = keep_file_path();
|
||||
|
||||
let mut file = OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.truncate(true)
|
||||
.open(keep_file)?;
|
||||
let _ = file.write(VERSION_STR.as_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn is_new_major_release_works() {
|
||||
let first_major_release: Version = "1.0.0".parse().unwrap();
|
||||
let under_dev: Version = "0.1.0".parse().unwrap();
|
||||
|
||||
assert!(first_major_release.is_new_major_release());
|
||||
assert!(!under_dev.is_new_major_release());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Version numbers cannot be all 0s")]
|
||||
fn invalid_version() {
|
||||
let all_0 = "0.0.0";
|
||||
all_0.parse::<Version>().unwrap();
|
||||
}
|
||||
}
|
||||
246
src/command.rs
Normal file
246
src/command.rs
Normal file
@@ -0,0 +1,246 @@
|
||||
//! Utilities for running commands and providing user-friendly error messages.
|
||||
|
||||
use std::fmt::Display;
|
||||
use std::process::Child;
|
||||
use std::process::{Command, ExitStatus, Output};
|
||||
|
||||
use color_eyre::eyre;
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
|
||||
use crate::error::TopgradeError;
|
||||
|
||||
use tracing::debug;
|
||||
|
||||
/// Like [`Output`], but UTF-8 decoded.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Utf8Output {
|
||||
pub status: ExitStatus,
|
||||
pub stdout: String,
|
||||
pub stderr: String,
|
||||
}
|
||||
|
||||
impl TryFrom<Output> for Utf8Output {
|
||||
type Error = eyre::Error;
|
||||
|
||||
fn try_from(Output { status, stdout, stderr }: Output) -> Result<Self, Self::Error> {
|
||||
let stdout = String::from_utf8(stdout).map_err(|err| {
|
||||
eyre!(
|
||||
"Stdout contained invalid UTF-8: {}",
|
||||
String::from_utf8_lossy(err.as_bytes())
|
||||
)
|
||||
})?;
|
||||
let stderr = String::from_utf8(stderr).map_err(|err| {
|
||||
eyre!(
|
||||
"Stderr contained invalid UTF-8: {}",
|
||||
String::from_utf8_lossy(err.as_bytes())
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(Utf8Output { status, stdout, stderr })
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&Output> for Utf8Output {
|
||||
type Error = eyre::Error;
|
||||
|
||||
fn try_from(Output { status, stdout, stderr }: &Output) -> Result<Self, Self::Error> {
|
||||
let stdout = String::from_utf8(stdout.to_vec()).map_err(|err| {
|
||||
eyre!(
|
||||
"Stdout contained invalid UTF-8: {}",
|
||||
String::from_utf8_lossy(err.as_bytes())
|
||||
)
|
||||
})?;
|
||||
let stderr = String::from_utf8(stderr.to_vec()).map_err(|err| {
|
||||
eyre!(
|
||||
"Stderr contained invalid UTF-8: {}",
|
||||
String::from_utf8_lossy(err.as_bytes())
|
||||
)
|
||||
})?;
|
||||
let status = *status;
|
||||
|
||||
Ok(Utf8Output { status, stdout, stderr })
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Utf8Output {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.stdout)
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension trait for [`Command`], adding helpers to gather output while checking the exit
|
||||
/// status.
|
||||
///
|
||||
/// These also give us significantly better error messages, which include:
|
||||
///
|
||||
/// 1. The command and arguments that were executed, escaped with familiar `sh` syntax.
|
||||
/// 2. The exit status of the command or the signal that killed it.
|
||||
/// 3. If we were capturing the output of the command, rather than forwarding it to the user's
|
||||
/// stdout/stderr, the error message includes the command's stdout and stderr output.
|
||||
///
|
||||
/// Additionally, executing commands with these methods will log the command at debug-level,
|
||||
/// useful when gathering error reports.
|
||||
pub trait CommandExt {
|
||||
type Child;
|
||||
|
||||
/// Like [`Command::output`], but checks the exit status and provides nice error messages.
|
||||
///
|
||||
/// Returns an `Err` if the command failed to execute or returned a non-zero exit code.
|
||||
#[track_caller]
|
||||
fn output_checked(&mut self) -> eyre::Result<Output> {
|
||||
self.output_checked_with(|output: &Output| if output.status.success() { Ok(()) } else { Err(()) })
|
||||
}
|
||||
|
||||
/// Like [`output_checked`], but also decodes Stdout and Stderr as UTF-8.
|
||||
///
|
||||
/// Returns an `Err` if the command failed to execute, returned a non-zero exit code, or if the
|
||||
/// output contains invalid UTF-8.
|
||||
#[track_caller]
|
||||
fn output_checked_utf8(&mut self) -> eyre::Result<Utf8Output> {
|
||||
let output = self.output_checked()?;
|
||||
output.try_into()
|
||||
}
|
||||
|
||||
/// Like [`output_checked`] but a closure determines if the command failed instead of
|
||||
/// [`ExitStatus::success`].
|
||||
///
|
||||
/// Returns an `Err` if the command failed to execute or if `succeeded` returns an `Err`.
|
||||
/// (This lets the caller substitute their own notion of "success" instead of assuming
|
||||
/// non-zero exit codes indicate success.)
|
||||
#[track_caller]
|
||||
fn output_checked_with(&mut self, succeeded: impl Fn(&Output) -> Result<(), ()>) -> eyre::Result<Output>;
|
||||
|
||||
/// Like [`output_checked_with`], but also decodes Stdout and Stderr as UTF-8.
|
||||
///
|
||||
/// Returns an `Err` if the command failed to execute, if `succeeded` returns an `Err`, or if
|
||||
/// the output contains invalid UTF-8.
|
||||
#[track_caller]
|
||||
fn output_checked_with_utf8(
|
||||
&mut self,
|
||||
succeeded: impl Fn(&Utf8Output) -> Result<(), ()>,
|
||||
) -> eyre::Result<Utf8Output> {
|
||||
// This decodes the Stdout and Stderr as UTF-8 twice...
|
||||
let output =
|
||||
self.output_checked_with(|output| output.try_into().map_err(|_| ()).and_then(|o| succeeded(&o)))?;
|
||||
output.try_into()
|
||||
}
|
||||
|
||||
/// Like [`Command::status`], but gives a nice error message if the status is unsuccessful
|
||||
/// rather than returning the [`ExitStatus`].
|
||||
///
|
||||
/// Returns `Ok` if the command executes successfully, returns `Err` if the command fails to
|
||||
/// execute or returns a non-zero exit code.
|
||||
#[track_caller]
|
||||
fn status_checked(&mut self) -> eyre::Result<()> {
|
||||
self.status_checked_with(|status| if status.success() { Ok(()) } else { Err(()) })
|
||||
}
|
||||
|
||||
/// Like [`status_checked`], but gives a nice error message if the status is unsuccessful
|
||||
/// rather than returning the [`ExitStatus`].
|
||||
///
|
||||
/// Returns `Ok` if the command executes successfully, returns `Err` if the command fails to
|
||||
/// execute or if `succeeded` returns an `Err`.
|
||||
/// (This lets the caller substitute their own notion of "success" instead of assuming
|
||||
/// non-zero exit codes indicate success.)
|
||||
#[track_caller]
|
||||
fn status_checked_with(&mut self, succeeded: impl Fn(ExitStatus) -> Result<(), ()>) -> eyre::Result<()>;
|
||||
|
||||
/// Like [`Command::spawn`], but gives a nice error message if the command fails to
|
||||
/// execute.
|
||||
#[track_caller]
|
||||
fn spawn_checked(&mut self) -> eyre::Result<Self::Child>;
|
||||
}
|
||||
|
||||
impl CommandExt for Command {
|
||||
type Child = Child;
|
||||
|
||||
fn output_checked_with(&mut self, succeeded: impl Fn(&Output) -> Result<(), ()>) -> eyre::Result<Output> {
|
||||
let command = log(self);
|
||||
|
||||
// This is where we implement `output_checked`, which is what we prefer to use instead of
|
||||
// `output`, so we allow `Command::output` here.
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let output = self
|
||||
.output()
|
||||
.with_context(|| format!("Failed to execute `{command}`"))?;
|
||||
|
||||
if succeeded(&output).is_ok() {
|
||||
Ok(output)
|
||||
} else {
|
||||
let mut message = format!("Command failed: `{command}`");
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
|
||||
let stdout_trimmed = stdout.trim();
|
||||
if !stdout_trimmed.is_empty() {
|
||||
message.push_str(&format!("\n\nStdout:\n{stdout_trimmed}"));
|
||||
}
|
||||
let stderr_trimmed = stderr.trim();
|
||||
if !stderr_trimmed.is_empty() {
|
||||
message.push_str(&format!("\n\nStderr:\n{stderr_trimmed}"));
|
||||
}
|
||||
|
||||
let (program, _) = get_program_and_args(self);
|
||||
let err = TopgradeError::ProcessFailedWithOutput(program, output.status, stderr.into_owned());
|
||||
|
||||
let ret = Err(err).with_context(|| message);
|
||||
debug!("Command failed: {ret:?}");
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
fn status_checked_with(&mut self, succeeded: impl Fn(ExitStatus) -> Result<(), ()>) -> eyre::Result<()> {
|
||||
let command = log(self);
|
||||
let message = format!("Failed to execute `{command}`");
|
||||
|
||||
// This is where we implement `status_checked`, which is what we prefer to use instead of
|
||||
// `status`, so we allow `Command::status` here.
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let status = self.status().with_context(|| message.clone())?;
|
||||
|
||||
if succeeded(status).is_ok() {
|
||||
Ok(())
|
||||
} else {
|
||||
let (program, _) = get_program_and_args(self);
|
||||
let err = TopgradeError::ProcessFailed(program, status);
|
||||
let ret = Err(err).with_context(|| format!("Command failed: `{command}`"));
|
||||
debug!("Command failed: {ret:?}");
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
fn spawn_checked(&mut self) -> eyre::Result<Self::Child> {
|
||||
let command = log(self);
|
||||
let message = format!("Failed to execute `{command}`");
|
||||
|
||||
// This is where we implement `spawn_checked`, which is what we prefer to use instead of
|
||||
// `spawn`, so we allow `Command::spawn` here.
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
{
|
||||
self.spawn().with_context(|| message.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_program_and_args(cmd: &Command) -> (String, String) {
|
||||
// We're not doing anything weird with commands that are invalid UTF-8 so this is fine.
|
||||
let program = cmd.get_program().to_string_lossy().into_owned();
|
||||
let args = shell_words::join(cmd.get_args().map(|arg| arg.to_string_lossy()));
|
||||
(program, args)
|
||||
}
|
||||
|
||||
fn format_program_and_args(cmd: &Command) -> String {
|
||||
let (program, args) = get_program_and_args(cmd);
|
||||
if args.is_empty() {
|
||||
program
|
||||
} else {
|
||||
format!("{program} {args}")
|
||||
}
|
||||
}
|
||||
|
||||
fn log(cmd: &Command) -> String {
|
||||
let command = format_program_and_args(cmd);
|
||||
debug!("Executing command `{command}`");
|
||||
command
|
||||
}
|
||||
1178
src/config.rs
1178
src/config.rs
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,7 @@
|
||||
use lazy_static::lazy_static;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
|
||||
lazy_static! {
|
||||
/// A global variable telling whether the application has been interrupted.
|
||||
static ref INTERRUPTED: AtomicBool = AtomicBool::new(false);
|
||||
}
|
||||
/// A global variable telling whether the application has been interrupted.
|
||||
static INTERRUPTED: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
/// Tells whether the program has been interrupted
|
||||
pub fn interrupted() -> bool {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! SIGINT handling in Unix systems.
|
||||
use crate::ctrlc::interrupted::set_interrupted;
|
||||
use nix::sys::signal;
|
||||
use nix::sys::signal::{sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal};
|
||||
|
||||
/// Handle SIGINT. Set the interruption flag.
|
||||
extern "C" fn handle_sigint(_: i32) {
|
||||
@@ -10,12 +10,8 @@ extern "C" fn handle_sigint(_: i32) {
|
||||
/// Set the necessary signal handlers.
|
||||
/// The function panics on failure.
|
||||
pub fn set_handler() {
|
||||
let sig_action = signal::SigAction::new(
|
||||
signal::SigHandler::Handler(handle_sigint),
|
||||
signal::SaFlags::empty(),
|
||||
signal::SigSet::empty(),
|
||||
);
|
||||
let sig_action = SigAction::new(SigHandler::Handler(handle_sigint), SaFlags::empty(), SigSet::empty());
|
||||
unsafe {
|
||||
signal::sigaction(signal::SIGINT, &sig_action).unwrap();
|
||||
sigaction(Signal::SIGINT, &sig_action).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
//! A stub for Ctrl + C handling.
|
||||
use crate::ctrlc::interrupted::set_interrupted;
|
||||
use tracing::error;
|
||||
use winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE};
|
||||
use winapi::um::consoleapi::SetConsoleCtrlHandler;
|
||||
use winapi::um::wincon::CTRL_C_EVENT;
|
||||
@@ -16,6 +17,6 @@ extern "system" fn handler(ctrl_type: DWORD) -> BOOL {
|
||||
|
||||
pub fn set_handler() {
|
||||
if 0 == unsafe { SetConsoleCtrlHandler(Some(handler), TRUE) } {
|
||||
log::error!("Cannot set a control C handler")
|
||||
error!("Cannot set a control C handler")
|
||||
}
|
||||
}
|
||||
|
||||
91
src/error.rs
91
src/error.rs
@@ -1,41 +1,98 @@
|
||||
use std::process::ExitStatus;
|
||||
use std::{fmt::Display, process::ExitStatus};
|
||||
|
||||
use rust_i18n::t;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug, PartialEq, Eq)]
|
||||
pub enum TopgradeError {
|
||||
#[error("{0}")]
|
||||
ProcessFailed(ExitStatus),
|
||||
ProcessFailed(String, ExitStatus),
|
||||
|
||||
#[error("{0}: {1}")]
|
||||
ProcessFailedWithOutput(ExitStatus, String),
|
||||
ProcessFailedWithOutput(String, ExitStatus, String),
|
||||
|
||||
#[error("Sudo is required for this step")]
|
||||
#[allow(dead_code)]
|
||||
SudoRequired,
|
||||
|
||||
#[error("Unknown Linux Distribution")]
|
||||
#[cfg(target_os = "linux")]
|
||||
UnknownLinuxDistribution,
|
||||
|
||||
#[error("Failed getting the system package manager")]
|
||||
#[cfg(target_os = "linux")]
|
||||
EmptyOSReleaseFile,
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
FailedGettingPackageManager,
|
||||
}
|
||||
|
||||
impl Display for TopgradeError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
TopgradeError::ProcessFailed(process, exit_status) => {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
t!(
|
||||
"`{process}` failed: {exit_status}",
|
||||
process = process,
|
||||
exit_status = exit_status
|
||||
)
|
||||
)
|
||||
}
|
||||
TopgradeError::ProcessFailedWithOutput(process, exit_status, output) => {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
t!(
|
||||
"`{process}` failed: {exit_status} with {output}",
|
||||
process = process,
|
||||
exit_status = exit_status,
|
||||
output = output
|
||||
)
|
||||
)
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
TopgradeError::UnknownLinuxDistribution => write!(f, "{}", t!("Unknown Linux Distribution")),
|
||||
#[cfg(target_os = "linux")]
|
||||
TopgradeError::EmptyOSReleaseFile => {
|
||||
write!(f, "{}", t!("File \"/etc/os-release\" does not exist or is empty"))
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
TopgradeError::FailedGettingPackageManager => {
|
||||
write!(f, "{}", t!("Failed getting the system package manager"))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("A step failed")]
|
||||
pub struct StepFailed;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("Dry running")]
|
||||
pub struct DryRun();
|
||||
impl Display for StepFailed {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", t!("A step failed"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub struct DryRun();
|
||||
|
||||
impl Display for DryRun {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", t!("Dry running"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("{0}")]
|
||||
pub struct SkipStep(pub String);
|
||||
|
||||
impl Display for SkipStep {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(windows, feature = "self-update"))]
|
||||
#[derive(Error, Debug)]
|
||||
#[error("Topgrade Upgraded")]
|
||||
pub struct Upgraded(pub ExitStatus);
|
||||
|
||||
#[cfg(all(windows, feature = "self-update"))]
|
||||
impl Display for Upgraded {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", t!("Topgrade Upgraded"))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,70 +1,63 @@
|
||||
#![allow(dead_code)]
|
||||
use crate::executor::RunType;
|
||||
use crate::git::Git;
|
||||
use crate::utils::require_option;
|
||||
use crate::sudo::Sudo;
|
||||
use crate::utils::{get_require_sudo_string, require_option};
|
||||
use crate::{config::Config, executor::Executor};
|
||||
use anyhow::Result;
|
||||
use directories::BaseDirs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use color_eyre::eyre::Result;
|
||||
use std::env::var;
|
||||
use std::path::Path;
|
||||
use std::sync::Mutex;
|
||||
|
||||
pub struct ExecutionContext<'a> {
|
||||
run_type: RunType,
|
||||
sudo: &'a Option<PathBuf>,
|
||||
git: &'a Git,
|
||||
sudo: Option<Sudo>,
|
||||
config: &'a Config,
|
||||
base_dirs: &'a BaseDirs,
|
||||
/// Name of a tmux session to execute commands in, if any.
|
||||
/// This is used in `./steps/remote/ssh.rs`, where we want to run `topgrade` in a new
|
||||
/// tmux window for each remote.
|
||||
tmux_session: Mutex<Option<String>>,
|
||||
/// True if topgrade is running under ssh.
|
||||
under_ssh: bool,
|
||||
}
|
||||
|
||||
impl<'a> ExecutionContext<'a> {
|
||||
pub fn new(
|
||||
run_type: RunType,
|
||||
sudo: &'a Option<PathBuf>,
|
||||
git: &'a Git,
|
||||
config: &'a Config,
|
||||
base_dirs: &'a BaseDirs,
|
||||
) -> ExecutionContext<'a> {
|
||||
ExecutionContext {
|
||||
pub fn new(run_type: RunType, sudo: Option<Sudo>, config: &'a Config) -> Self {
|
||||
let under_ssh = var("SSH_CLIENT").is_ok() || var("SSH_TTY").is_ok();
|
||||
Self {
|
||||
run_type,
|
||||
sudo,
|
||||
git,
|
||||
config,
|
||||
base_dirs,
|
||||
tmux_session: Mutex::new(None),
|
||||
under_ssh,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn execute_elevated(&self, command: &Path, interactive: bool) -> Result<Executor> {
|
||||
let sudo = require_option(self.sudo.clone(), "Sudo is required for this operation".into())?;
|
||||
let mut cmd = self.run_type.execute(&sudo);
|
||||
|
||||
if sudo.ends_with("sudo") {
|
||||
cmd.arg("--preserve-env=DIFFPROG");
|
||||
}
|
||||
|
||||
if interactive {
|
||||
cmd.arg("-i");
|
||||
}
|
||||
|
||||
cmd.arg(command);
|
||||
Ok(cmd)
|
||||
let sudo = require_option(self.sudo.as_ref(), get_require_sudo_string())?;
|
||||
Ok(sudo.execute_elevated(self, command, interactive))
|
||||
}
|
||||
|
||||
pub fn run_type(&self) -> RunType {
|
||||
self.run_type
|
||||
}
|
||||
|
||||
pub fn git(&self) -> &Git {
|
||||
self.git
|
||||
}
|
||||
|
||||
pub fn sudo(&self) -> &Option<PathBuf> {
|
||||
self.sudo
|
||||
pub fn sudo(&self) -> &Option<Sudo> {
|
||||
&self.sudo
|
||||
}
|
||||
|
||||
pub fn config(&self) -> &Config {
|
||||
self.config
|
||||
}
|
||||
|
||||
pub fn base_dirs(&self) -> &BaseDirs {
|
||||
self.base_dirs
|
||||
pub fn under_ssh(&self) -> bool {
|
||||
self.under_ssh
|
||||
}
|
||||
|
||||
pub fn set_tmux_session(&self, session_name: String) {
|
||||
self.tmux_session.lock().unwrap().replace(session_name);
|
||||
}
|
||||
|
||||
pub fn get_tmux_session(&self) -> Option<String> {
|
||||
self.tmux_session.lock().unwrap().clone()
|
||||
}
|
||||
}
|
||||
|
||||
168
src/executor.rs
168
src/executor.rs
@@ -1,11 +1,14 @@
|
||||
//! Utilities for command execution
|
||||
use crate::error::{DryRun, TopgradeError};
|
||||
use crate::utils::{Check, CheckWithCodes};
|
||||
use anyhow::Result;
|
||||
use log::{debug, trace};
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::path::Path;
|
||||
use std::process::{Child, Command, ExitStatus};
|
||||
use std::process::{Child, Command, ExitStatus, Output};
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::error::DryRun;
|
||||
|
||||
/// An enum telling whether Topgrade should perform dry runs or actually perform the steps.
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
@@ -56,6 +59,16 @@ pub enum Executor {
|
||||
}
|
||||
|
||||
impl Executor {
|
||||
/// Get the name of the program being run.
|
||||
///
|
||||
/// Will give weird results for non-UTF-8 programs; see `to_string_lossy()`.
|
||||
pub fn get_program(&self) -> String {
|
||||
match self {
|
||||
Executor::Wet(c) => c.get_program().to_string_lossy().into_owned(),
|
||||
Executor::Dry(c) => c.program.to_string_lossy().into_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
/// See `std::process::Command::arg`
|
||||
pub fn arg<S: AsRef<OsStr>>(&mut self, arg: S) -> &mut Executor {
|
||||
match self {
|
||||
@@ -139,7 +152,7 @@ impl Executor {
|
||||
let result = match self {
|
||||
Executor::Wet(c) => {
|
||||
debug!("Running {:?}", c);
|
||||
c.spawn().map(ExecutorChild::Wet)?
|
||||
c.spawn_checked().map(ExecutorChild::Wet)?
|
||||
}
|
||||
Executor::Dry(c) => {
|
||||
c.dry_run();
|
||||
@@ -153,7 +166,7 @@ impl Executor {
|
||||
/// See `std::process::Command::output`
|
||||
pub fn output(&mut self) -> Result<ExecutorOutput> {
|
||||
match self {
|
||||
Executor::Wet(c) => Ok(ExecutorOutput::Wet(c.output()?)),
|
||||
Executor::Wet(c) => Ok(ExecutorOutput::Wet(c.output_checked()?)),
|
||||
Executor::Dry(c) => {
|
||||
c.dry_run();
|
||||
Ok(ExecutorOutput::Dry)
|
||||
@@ -161,23 +174,28 @@ impl Executor {
|
||||
}
|
||||
}
|
||||
|
||||
/// A convinence method for `spawn().wait().check()`.
|
||||
/// Returns an error if something went wrong during the execution or if the
|
||||
/// process exited with failure.
|
||||
pub fn check_run(&mut self) -> Result<()> {
|
||||
self.spawn()?.wait()?.check()
|
||||
}
|
||||
|
||||
/// An extension of `check_run` that allows you to set a sequence of codes
|
||||
/// An extension of `status_checked` that allows you to set a sequence of codes
|
||||
/// that can indicate success of a script
|
||||
#[allow(dead_code)]
|
||||
pub fn check_run_with_codes(&mut self, codes: &[i32]) -> Result<()> {
|
||||
self.spawn()?.wait()?.check_with_codes(codes)
|
||||
pub fn status_checked_with_codes(&mut self, codes: &[i32]) -> Result<()> {
|
||||
match self {
|
||||
Executor::Wet(c) => c.status_checked_with(|status| {
|
||||
if status.success() || status.code().as_ref().map(|c| codes.contains(c)).unwrap_or(false) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(())
|
||||
}
|
||||
}),
|
||||
Executor::Dry(c) => {
|
||||
c.dry_run();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum ExecutorOutput {
|
||||
Wet(std::process::Output),
|
||||
Wet(Output),
|
||||
Dry,
|
||||
}
|
||||
|
||||
@@ -192,16 +210,20 @@ pub struct DryCommand {
|
||||
impl DryCommand {
|
||||
fn dry_run(&self) {
|
||||
print!(
|
||||
"Dry running: {} {}",
|
||||
self.program.to_string_lossy(),
|
||||
self.args
|
||||
.iter()
|
||||
.map(|a| String::from(a.to_string_lossy()))
|
||||
.collect::<Vec<String>>()
|
||||
.join(" ")
|
||||
"{}",
|
||||
t!(
|
||||
"Dry running: {program_name} {arguments}",
|
||||
program_name = self.program.to_string_lossy(),
|
||||
arguments = shell_words::join(
|
||||
self.args
|
||||
.iter()
|
||||
.map(|a| String::from(a.to_string_lossy()))
|
||||
.collect::<Vec<String>>()
|
||||
)
|
||||
)
|
||||
);
|
||||
match &self.directory {
|
||||
Some(dir) => println!(" in {}", dir.to_string_lossy()),
|
||||
Some(dir) => println!(" {}", t!("in {directory}", directory = dir.to_string_lossy())),
|
||||
None => println!(),
|
||||
};
|
||||
}
|
||||
@@ -209,82 +231,38 @@ impl DryCommand {
|
||||
|
||||
/// The Result of spawn. Contains an actual `std::process::Child` if executed by a wet command.
|
||||
pub enum ExecutorChild {
|
||||
#[allow(unused)] // this type has not been used
|
||||
Wet(Child),
|
||||
Dry,
|
||||
}
|
||||
|
||||
impl ExecutorChild {
|
||||
/// See `std::process::Child::wait`
|
||||
pub fn wait(&mut self) -> Result<ExecutorExitStatus> {
|
||||
let result = match self {
|
||||
ExecutorChild::Wet(c) => c.wait().map(ExecutorExitStatus::Wet)?,
|
||||
ExecutorChild::Dry => ExecutorExitStatus::Dry,
|
||||
};
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
/// The Result of wait. Contains an actual `std::process::ExitStatus` if executed by a wet command.
|
||||
pub enum ExecutorExitStatus {
|
||||
Wet(ExitStatus),
|
||||
Dry,
|
||||
}
|
||||
|
||||
impl CheckWithCodes for ExecutorExitStatus {
|
||||
fn check_with_codes(self, codes: &[i32]) -> Result<()> {
|
||||
match self {
|
||||
ExecutorExitStatus::Wet(e) => e.check_with_codes(codes),
|
||||
ExecutorExitStatus::Dry => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension methods for `std::process::Command`
|
||||
pub trait CommandExt {
|
||||
/// Run the command, wait for it to complete, check the return code and decode the output as UTF-8.
|
||||
fn check_output(&mut self) -> Result<String>;
|
||||
fn string_output(&mut self) -> Result<String>;
|
||||
}
|
||||
|
||||
impl CommandExt for Command {
|
||||
fn check_output(&mut self) -> Result<String> {
|
||||
let output = self.output()?;
|
||||
trace!("Output of {:?}: {:?}", self, output);
|
||||
let status = output.status;
|
||||
if !status.success() {
|
||||
let stderr = String::from_utf8(output.stderr).unwrap_or_default();
|
||||
return Err(TopgradeError::ProcessFailedWithOutput(status, stderr).into());
|
||||
}
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
}
|
||||
|
||||
fn string_output(&mut self) -> Result<String> {
|
||||
let output = self.output()?;
|
||||
trace!("Output of {:?}: {:?}", self, output);
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
}
|
||||
}
|
||||
|
||||
impl CommandExt for Executor {
|
||||
fn check_output(&mut self) -> Result<String> {
|
||||
let output = match self.output()? {
|
||||
ExecutorOutput::Wet(output) => output,
|
||||
ExecutorOutput::Dry => return Err(DryRun().into()),
|
||||
};
|
||||
let status = output.status;
|
||||
if !status.success() {
|
||||
let stderr = String::from_utf8(output.stderr).unwrap_or_default();
|
||||
return Err(TopgradeError::ProcessFailedWithOutput(status, stderr).into());
|
||||
type Child = ExecutorChild;
|
||||
|
||||
// TODO: It might be nice to make `output_checked_with` return something that has a
|
||||
// variant for wet/dry runs.
|
||||
|
||||
fn output_checked_with(&mut self, succeeded: impl Fn(&Output) -> Result<(), ()>) -> Result<Output> {
|
||||
match self {
|
||||
Executor::Wet(c) => c.output_checked_with(succeeded),
|
||||
Executor::Dry(c) => {
|
||||
c.dry_run();
|
||||
Err(DryRun().into())
|
||||
}
|
||||
}
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
}
|
||||
|
||||
fn string_output(&mut self) -> Result<String> {
|
||||
let output = match self.output()? {
|
||||
ExecutorOutput::Wet(output) => output,
|
||||
ExecutorOutput::Dry => return Err(DryRun().into()),
|
||||
};
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
fn status_checked_with(&mut self, succeeded: impl Fn(ExitStatus) -> Result<(), ()>) -> Result<()> {
|
||||
match self {
|
||||
Executor::Wet(c) => c.status_checked_with(succeeded),
|
||||
Executor::Dry(c) => {
|
||||
c.dry_run();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn spawn_checked(&mut self) -> Result<Self::Child> {
|
||||
self.spawn()
|
||||
}
|
||||
}
|
||||
|
||||
2
src/lib.rs
Normal file
2
src/lib.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
//pub mod steps;
|
||||
//pub mod utils;
|
||||
525
src/main.rs
525
src/main.rs
@@ -2,14 +2,24 @@
|
||||
|
||||
use std::env;
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
use std::process::exit;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use crate::breaking_changes::{first_run_of_major_release, print_breaking_changes, should_skip, write_keep_file};
|
||||
use clap::CommandFactory;
|
||||
use clap::{crate_version, Parser};
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use console::Key;
|
||||
use log::debug;
|
||||
use log::LevelFilter;
|
||||
use pretty_env_logger::formatted_timed_builder;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
#[cfg(windows)]
|
||||
use etcetera::base_strategy::Windows;
|
||||
#[cfg(unix)]
|
||||
use etcetera::base_strategy::Xdg;
|
||||
use once_cell::sync::Lazy;
|
||||
use rust_i18n::{i18n, t};
|
||||
use tracing::debug;
|
||||
|
||||
use self::config::{CommandLineArgs, Config, Step};
|
||||
use self::error::StepFailed;
|
||||
@@ -18,6 +28,10 @@ use self::error::Upgraded;
|
||||
use self::steps::{remote::*, *};
|
||||
use self::terminal::*;
|
||||
|
||||
use self::utils::{hostname, install_color_eyre, install_tracing, update_tracing};
|
||||
|
||||
mod breaking_changes;
|
||||
mod command;
|
||||
mod config;
|
||||
mod ctrlc;
|
||||
mod error;
|
||||
@@ -30,15 +44,54 @@ mod self_renamer;
|
||||
#[cfg(feature = "self-update")]
|
||||
mod self_update;
|
||||
mod steps;
|
||||
mod sudo;
|
||||
mod terminal;
|
||||
mod utils;
|
||||
|
||||
pub(crate) static HOME_DIR: Lazy<PathBuf> = Lazy::new(|| home::home_dir().expect("No home directory"));
|
||||
#[cfg(unix)]
|
||||
pub(crate) static XDG_DIRS: Lazy<Xdg> = Lazy::new(|| Xdg::new().expect("No home directory"));
|
||||
|
||||
#[cfg(windows)]
|
||||
pub(crate) static WINDOWS_DIRS: Lazy<Windows> = Lazy::new(|| Windows::new().expect("No home directory"));
|
||||
|
||||
// Init and load the i18n files
|
||||
i18n!("locales", fallback = "en");
|
||||
|
||||
fn run() -> Result<()> {
|
||||
install_color_eyre()?;
|
||||
ctrlc::set_handler();
|
||||
|
||||
let base_dirs = directories::BaseDirs::new().ok_or_else(|| anyhow!("No base directories"))?;
|
||||
|
||||
let opt = CommandLineArgs::parse();
|
||||
// Set up the logger with the filter directives from:
|
||||
// 1. CLI option `--log-filter`
|
||||
// 2. `debug` if the `--verbose` option is present
|
||||
// We do this because we need our logger to work while loading the
|
||||
// configuration file.
|
||||
//
|
||||
// When the configuration file is loaded, update the logger with the full
|
||||
// filter directives.
|
||||
//
|
||||
// For more info, see the comments in `CommandLineArgs::tracing_filter_directives()`
|
||||
// and `Config::tracing_filter_directives()`.
|
||||
let reload_handle = install_tracing(&opt.tracing_filter_directives())?;
|
||||
|
||||
// Get current system locale and set it as the default locale
|
||||
let system_locale = sys_locale::get_locale().unwrap_or("en".to_string());
|
||||
rust_i18n::set_locale(&system_locale);
|
||||
debug!("Current system locale is {system_locale}");
|
||||
|
||||
if let Some(shell) = opt.gen_completion {
|
||||
let cmd = &mut CommandLineArgs::command();
|
||||
clap_complete::generate(shell, cmd, clap::crate_name!(), &mut io::stdout());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if opt.gen_manpage {
|
||||
let man = clap_mangen::Man::new(CommandLineArgs::command());
|
||||
man.render(&mut io::stdout())?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for env in opt.env_variables() {
|
||||
let mut splitted = env.split('=');
|
||||
@@ -47,66 +100,74 @@ fn run() -> Result<()> {
|
||||
env::set_var(var, value);
|
||||
}
|
||||
|
||||
let mut builder = formatted_timed_builder();
|
||||
|
||||
if opt.verbose {
|
||||
builder.filter(Some("topgrade"), LevelFilter::Trace);
|
||||
}
|
||||
|
||||
builder.init();
|
||||
|
||||
if opt.edit_config() {
|
||||
Config::edit(&base_dirs)?;
|
||||
Config::edit()?;
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
if opt.show_config_reference() {
|
||||
print!("{}", crate::config::EXAMPLE_CONFIG);
|
||||
print!("{}", config::EXAMPLE_CONFIG);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let config = Config::load(&base_dirs, opt)?;
|
||||
terminal::set_title(config.set_title());
|
||||
terminal::display_time(config.display_time());
|
||||
terminal::set_desktop_notifications(config.notify_each_step());
|
||||
let config = Config::load(opt)?;
|
||||
// Update the logger with the full filter directives.
|
||||
update_tracing(&reload_handle, &config.tracing_filter_directives())?;
|
||||
set_title(config.set_title());
|
||||
display_time(config.display_time());
|
||||
set_desktop_notifications(config.notify_each_step());
|
||||
|
||||
debug!("Version: {}", crate_version!());
|
||||
debug!("OS: {}", env!("TARGET"));
|
||||
debug!("{:?}", std::env::args());
|
||||
debug!("Binary path: {:?}", std::env::current_exe());
|
||||
debug!("Self Update: {:?}", cfg!(feature = "self-update"));
|
||||
debug!("self-update Feature Enabled: {:?}", cfg!(feature = "self-update"));
|
||||
debug!("Configuration: {:?}", config);
|
||||
|
||||
if config.run_in_tmux() && env::var("TOPGRADE_INSIDE_TMUX").is_err() {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
tmux::run_in_tmux(config.tmux_arguments());
|
||||
tmux::run_in_tmux(config.tmux_config()?)?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
let git = git::Git::new();
|
||||
let mut git_repos = git::Repositories::new(&git);
|
||||
let powershell = powershell::Powershell::new();
|
||||
let should_run_powershell = powershell.profile().is_some() && config.should_run(Step::Powershell);
|
||||
let emacs = emacs::Emacs::new();
|
||||
#[cfg(target_os = "linux")]
|
||||
let distribution = linux::Distribution::detect();
|
||||
|
||||
let sudo = utils::sudo();
|
||||
let sudo = config.sudo_command().map_or_else(sudo::Sudo::detect, sudo::Sudo::new);
|
||||
let run_type = executor::RunType::new(config.dry_run());
|
||||
|
||||
let ctx = execution_context::ExecutionContext::new(run_type, &sudo, &git, &config, &base_dirs);
|
||||
|
||||
let ctx = execution_context::ExecutionContext::new(run_type, sudo, &config);
|
||||
let mut runner = runner::Runner::new(&ctx);
|
||||
|
||||
// If
|
||||
//
|
||||
// 1. the breaking changes notification shouldnot be skipped
|
||||
// 2. this is the first execution of a major release
|
||||
//
|
||||
// inform user of breaking changes
|
||||
if !should_skip() && first_run_of_major_release()? {
|
||||
print_breaking_changes();
|
||||
|
||||
if prompt_yesno("Confirmed?")? {
|
||||
write_keep_file()?;
|
||||
} else {
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Self-Update step, this will execute only if:
|
||||
// 1. the `self-update` feature is enabled
|
||||
// 2. it is not disabled from configuration (env var/CLI opt/file)
|
||||
#[cfg(feature = "self-update")]
|
||||
{
|
||||
if !run_type.dry() && env::var("TOPGRADE_NO_SELF_UPGRADE").is_err() {
|
||||
let result = self_update::self_update();
|
||||
let should_self_update = env::var("TOPGRADE_NO_SELF_UPGRADE").is_err() && !config.no_self_update();
|
||||
|
||||
if let Err(e) = &result {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if e.downcast_ref::<Upgraded>().is_some() {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
print_warning(format!("Self update error: {}", e));
|
||||
}
|
||||
if should_self_update {
|
||||
runner.execute(Step::SelfUpdate, "Self Update", || self_update::self_update(&ctx))?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -123,45 +184,70 @@ fn run() -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
let powershell = powershell::Powershell::new();
|
||||
let should_run_powershell = powershell.profile().is_some() && config.should_run(Step::Powershell);
|
||||
|
||||
#[cfg(windows)]
|
||||
runner.execute(Step::Wsl, "WSL", || windows::run_wsl_topgrade(&ctx))?;
|
||||
if config.pre_sudo() {
|
||||
if let Some(sudo) = ctx.sudo() {
|
||||
sudo.elevate(&ctx)?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(topgrades) = config.remote_topgrades() {
|
||||
for remote_topgrade in topgrades.iter().filter(|t| config.should_execute_remote(t)) {
|
||||
runner.execute(Step::Remotes, format!("Remote ({})", remote_topgrade), || {
|
||||
remote::ssh::ssh_step(&ctx, remote_topgrade)
|
||||
for remote_topgrade in topgrades.iter().filter(|t| config.should_execute_remote(hostname(), t)) {
|
||||
runner.execute(Step::Remotes, format!("Remote ({remote_topgrade})"), || {
|
||||
ssh::ssh_step(&ctx, remote_topgrade)
|
||||
})?;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
let distribution = linux::Distribution::detect();
|
||||
|
||||
#[cfg(target_os = r#"linux"#)]
|
||||
#[cfg(windows)]
|
||||
{
|
||||
runner.execute(Step::Wsl, "WSL", || windows::run_wsl_topgrade(&ctx))?;
|
||||
runner.execute(Step::WslUpdate, "WSL", || windows::update_wsl(&ctx))?;
|
||||
runner.execute(Step::Chocolatey, "Chocolatey", || windows::run_chocolatey(&ctx))?;
|
||||
runner.execute(Step::Scoop, "Scoop", || windows::run_scoop(&ctx))?;
|
||||
runner.execute(Step::Winget, "Winget", || windows::run_winget(&ctx))?;
|
||||
runner.execute(Step::System, "Windows update", || windows::windows_update(&ctx))?;
|
||||
runner.execute(Step::MicrosoftStore, "Microsoft Store", || {
|
||||
windows::microsoft_store(&ctx)
|
||||
})?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// NOTE: Due to breaking `nu` updates, `packer.nu` needs to be updated before `nu` get updated
|
||||
// by other package managers.
|
||||
runner.execute(Step::Shell, "packer.nu", || linux::run_packer_nu(&ctx))?;
|
||||
|
||||
match &distribution {
|
||||
Ok(distribution) => {
|
||||
runner.execute(Step::System, "System update", || distribution.upgrade(&ctx))?;
|
||||
}
|
||||
Err(e) => {
|
||||
println!("Error detecting current distribution: {}", e);
|
||||
println!("{}", t!("Error detecting current distribution: {error}", error = e));
|
||||
}
|
||||
}
|
||||
runner.execute(Step::ConfigUpdate, "config-update", || linux::run_config_update(&ctx))?;
|
||||
|
||||
runner.execute(Step::AM, "am", || linux::run_am(&ctx))?;
|
||||
runner.execute(Step::AppMan, "appman", || linux::run_appman(&ctx))?;
|
||||
runner.execute(Step::DebGet, "deb-get", || linux::run_deb_get(&ctx))?;
|
||||
runner.execute(Step::Toolbx, "toolbx", || toolbx::run_toolbx(&ctx))?;
|
||||
runner.execute(Step::Snap, "snap", || linux::run_snap(&ctx))?;
|
||||
runner.execute(Step::Pacstall, "pacstall", || linux::run_pacstall(&ctx))?;
|
||||
runner.execute(Step::Pacdef, "pacdef", || linux::run_pacdef(&ctx))?;
|
||||
runner.execute(Step::Protonup, "protonup", || linux::run_protonup_update(&ctx))?;
|
||||
runner.execute(Step::Distrobox, "distrobox", || linux::run_distrobox_update(&ctx))?;
|
||||
runner.execute(Step::DkpPacman, "dkp-pacman", || linux::run_dkp_pacman_update(&ctx))?;
|
||||
runner.execute(Step::System, "pihole", || linux::run_pihole_update(&ctx))?;
|
||||
runner.execute(Step::Firmware, "Firmware upgrades", || linux::run_fwupdmgr(&ctx))?;
|
||||
runner.execute(Step::Restarts, "Restarts", || linux::run_needrestart(&ctx))?;
|
||||
|
||||
runner.execute(Step::Flatpak, "Flatpak", || linux::run_flatpak(&ctx))?;
|
||||
runner.execute(Step::BrewFormula, "Brew", || {
|
||||
unix::run_brew_formula(&ctx, unix::BrewVariant::Path)
|
||||
})?;
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
runner.execute(Step::Chocolatey, "Chocolatey", || windows::run_chocolatey(&ctx))?;
|
||||
runner.execute(Step::Scoop, "Scoop", || windows::run_scoop(config.cleanup(), run_type))?;
|
||||
runner.execute(Step::Winget, "Winget", || windows::run_winget(&ctx))?;
|
||||
runner.execute(Step::Lure, "LURE", || linux::run_lure_update(&ctx))?;
|
||||
runner.execute(Step::Waydroid, "Waydroid", || linux::run_waydroid(&ctx))?;
|
||||
runner.execute(Step::AutoCpufreq, "auto-cpufreq", || linux::run_auto_cpufreq(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
@@ -185,122 +271,76 @@ fn run() -> Result<()> {
|
||||
unix::run_brew_cask(&ctx, unix::BrewVariant::Path)
|
||||
})?;
|
||||
runner.execute(Step::Macports, "MacPorts", || macos::run_macports(&ctx))?;
|
||||
runner.execute(Step::Xcodes, "Xcodes", || macos::update_xcodes(&ctx))?;
|
||||
runner.execute(Step::Sparkle, "Sparkle", || macos::run_sparkle(&ctx))?;
|
||||
runner.execute(Step::Mas, "App Store", || macos::run_mas(&ctx))?;
|
||||
runner.execute(Step::System, "System upgrade", || macos::upgrade_macos(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "dragonfly")]
|
||||
{
|
||||
runner.execute(Step::Pkg, "DragonFly BSD Packages", || {
|
||||
dragonfly::upgrade_packages(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Audit, "DragonFly Audit", || dragonfly::audit_packages(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "freebsd")]
|
||||
{
|
||||
runner.execute(Step::Pkg, "FreeBSD Packages", || freebsd::upgrade_packages(&ctx))?;
|
||||
runner.execute(Step::System, "FreeBSD Upgrade", || freebsd::upgrade_freebsd(&ctx))?;
|
||||
runner.execute(Step::Audit, "FreeBSD Audit", || freebsd::audit_packages(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "openbsd")]
|
||||
{
|
||||
runner.execute(Step::Pkg, "OpenBSD Packages", || openbsd::upgrade_packages(&ctx))?;
|
||||
runner.execute(Step::System, "OpenBSD Upgrade", || openbsd::upgrade_openbsd(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
{
|
||||
runner.execute(Step::Pkg, "Termux Packages", || android::upgrade_packages(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
runner.execute(Step::Yadm, "yadm", || unix::run_yadm(&ctx))?;
|
||||
runner.execute(Step::Nix, "nix", || unix::run_nix(&ctx))?;
|
||||
runner.execute(Step::Nix, "nix upgrade-nix", || unix::run_nix_self_upgrade(&ctx))?;
|
||||
runner.execute(Step::Guix, "guix", || unix::run_guix(&ctx))?;
|
||||
|
||||
runner.execute(Step::HomeManager, "home-manager", || unix::run_home_manager(run_type))?;
|
||||
runner.execute(Step::Asdf, "asdf", || unix::run_asdf(run_type))?;
|
||||
runner.execute(Step::HomeManager, "home-manager", || unix::run_home_manager(&ctx))?;
|
||||
runner.execute(Step::Asdf, "asdf", || unix::run_asdf(&ctx))?;
|
||||
runner.execute(Step::Mise, "mise", || unix::run_mise(&ctx))?;
|
||||
runner.execute(Step::Pkgin, "pkgin", || unix::run_pkgin(&ctx))?;
|
||||
runner.execute(Step::Bun, "bun", || unix::run_bun(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "dragonfly")]
|
||||
runner.execute(Step::Pkg, "DragonFly BSD Packages", || {
|
||||
dragonfly::upgrade_packages(sudo.as_ref(), run_type)
|
||||
})?;
|
||||
|
||||
#[cfg(target_os = "freebsd")]
|
||||
runner.execute(Step::Pkg, "FreeBSD Packages", || {
|
||||
freebsd::upgrade_packages(sudo.as_ref(), run_type)
|
||||
})?;
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
runner.execute(Step::Pkg, "Termux Packages", || android::upgrade_packages(&ctx))?;
|
||||
|
||||
let emacs = emacs::Emacs::new(&base_dirs);
|
||||
if config.use_predefined_git_repos() {
|
||||
if config.should_run(Step::Emacs) {
|
||||
if !emacs.is_doom() {
|
||||
if let Some(directory) = emacs.directory() {
|
||||
git_repos.insert_if_repo(directory);
|
||||
}
|
||||
}
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".doom.d"));
|
||||
}
|
||||
|
||||
if config.should_run(Step::Vim) {
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".vim"));
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".config/nvim"));
|
||||
}
|
||||
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".ideavimrc"));
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".intellimacs"));
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
git_repos.insert_if_repo(zsh::zshrc(&base_dirs));
|
||||
if config.should_run(Step::Tmux) {
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".tmux"));
|
||||
}
|
||||
git_repos.insert_if_repo(base_dirs.home_dir().join(".config/fish"));
|
||||
git_repos.insert_if_repo(base_dirs.config_dir().join("openbox"));
|
||||
git_repos.insert_if_repo(base_dirs.config_dir().join("bspwm"));
|
||||
git_repos.insert_if_repo(base_dirs.config_dir().join("i3"));
|
||||
git_repos.insert_if_repo(base_dirs.config_dir().join("sway"));
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
git_repos.insert_if_repo(
|
||||
base_dirs
|
||||
.data_local_dir()
|
||||
.join("Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState"),
|
||||
);
|
||||
|
||||
#[cfg(windows)]
|
||||
windows::insert_startup_scripts(&ctx, &mut git_repos).ok();
|
||||
|
||||
if let Some(profile) = powershell.profile() {
|
||||
git_repos.insert_if_repo(profile);
|
||||
}
|
||||
}
|
||||
|
||||
if config.should_run(Step::GitRepos) {
|
||||
if let Some(custom_git_repos) = config.git_repos() {
|
||||
for git_repo in custom_git_repos {
|
||||
git_repos.glob_insert(git_repo);
|
||||
}
|
||||
}
|
||||
runner.execute(Step::GitRepos, "Git repositories", || {
|
||||
git.multi_pull_step(&git_repos, &ctx)
|
||||
})?;
|
||||
}
|
||||
|
||||
if should_run_powershell {
|
||||
runner.execute(Step::Powershell, "Powershell Modules Update", || {
|
||||
powershell.update_modules(&ctx)
|
||||
})?;
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
runner.execute(Step::Shell, "zr", || zsh::run_zr(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Shell, "antibody", || zsh::run_antibody(run_type))?;
|
||||
runner.execute(Step::Shell, "antigen", || zsh::run_antigen(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Shell, "zgenom", || zsh::run_zgenom(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Shell, "zplug", || zsh::run_zplug(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Shell, "zinit", || zsh::run_zinit(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Shell, "zi", || zsh::run_zi(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Shell, "zim", || zsh::run_zim(&base_dirs, run_type))?;
|
||||
runner.execute(Step::BunPackages, "bun-packages", || unix::run_bun_packages(&ctx))?;
|
||||
runner.execute(Step::Shell, "zr", || zsh::run_zr(&ctx))?;
|
||||
runner.execute(Step::Shell, "antibody", || zsh::run_antibody(&ctx))?;
|
||||
runner.execute(Step::Shell, "antidote", || zsh::run_antidote(&ctx))?;
|
||||
runner.execute(Step::Shell, "antigen", || zsh::run_antigen(&ctx))?;
|
||||
runner.execute(Step::Shell, "zgenom", || zsh::run_zgenom(&ctx))?;
|
||||
runner.execute(Step::Shell, "zplug", || zsh::run_zplug(&ctx))?;
|
||||
runner.execute(Step::Shell, "zinit", || zsh::run_zinit(&ctx))?;
|
||||
runner.execute(Step::Shell, "zi", || zsh::run_zi(&ctx))?;
|
||||
runner.execute(Step::Shell, "zim", || zsh::run_zim(&ctx))?;
|
||||
runner.execute(Step::Shell, "oh-my-zsh", || zsh::run_oh_my_zsh(&ctx))?;
|
||||
runner.execute(Step::Shell, "fisher", || unix::run_fisher(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Shell, "oh-my-bash", || unix::run_oh_my_bash(&ctx))?;
|
||||
runner.execute(Step::Shell, "fisher", || unix::run_fisher(&ctx))?;
|
||||
runner.execute(Step::Shell, "bash-it", || unix::run_bashit(&ctx))?;
|
||||
runner.execute(Step::Shell, "oh-my-fish", || unix::run_oh_my_fish(&ctx))?;
|
||||
runner.execute(Step::Shell, "fish-plug", || unix::run_fish_plug(&ctx))?;
|
||||
runner.execute(Step::Tmux, "tmux", || tmux::run_tpm(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Tldr, "TLDR", || unix::run_tldr(run_type))?;
|
||||
runner.execute(Step::Pearl, "pearl", || unix::run_pearl(run_type))?;
|
||||
runner.execute(Step::Shell, "fundle", || unix::run_fundle(&ctx))?;
|
||||
runner.execute(Step::Tmux, "tmux", || tmux::run_tpm(&ctx))?;
|
||||
runner.execute(Step::Tldr, "TLDR", || unix::run_tldr(&ctx))?;
|
||||
runner.execute(Step::Pearl, "pearl", || unix::run_pearl(&ctx))?;
|
||||
#[cfg(not(any(target_os = "macos", target_os = "android")))]
|
||||
runner.execute(Step::GnomeShellExtensions, "Gnome Shell Extensions", || {
|
||||
unix::upgrade_gnome_extensions(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Sdkman, "SDKMAN!", || {
|
||||
unix::run_sdkman(&base_dirs, config.cleanup(), run_type)
|
||||
})?;
|
||||
runner.execute(Step::Pyenv, "pyenv", || unix::run_pyenv(&ctx))?;
|
||||
runner.execute(Step::Sdkman, "SDKMAN!", || unix::run_sdkman(&ctx))?;
|
||||
runner.execute(Step::Rcm, "rcm", || unix::run_rcm(&ctx))?;
|
||||
runner.execute(Step::Maza, "maza", || unix::run_maza(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(not(any(
|
||||
@@ -309,65 +349,97 @@ fn run() -> Result<()> {
|
||||
target_os = "netbsd",
|
||||
target_os = "dragonfly"
|
||||
)))]
|
||||
runner.execute(Step::Atom, "apm", || generic::run_apm(run_type))?;
|
||||
runner.execute(Step::Fossil, "fossil", || generic::run_fossil(run_type))?;
|
||||
runner.execute(Step::Rustup, "rustup", || generic::run_rustup(&base_dirs, run_type))?;
|
||||
{
|
||||
runner.execute(Step::Atom, "apm", || generic::run_apm(&ctx))?;
|
||||
}
|
||||
|
||||
// The following update function should be executed on all OSes.
|
||||
runner.execute(Step::Fossil, "fossil", || generic::run_fossil(&ctx))?;
|
||||
runner.execute(Step::Elan, "elan", || generic::run_elan(&ctx))?;
|
||||
runner.execute(Step::Rye, "rye", || generic::run_rye(&ctx))?;
|
||||
runner.execute(Step::Rustup, "rustup", || generic::run_rustup(&ctx))?;
|
||||
runner.execute(Step::Juliaup, "juliaup", || generic::run_juliaup(&ctx))?;
|
||||
runner.execute(Step::Dotnet, ".NET", || generic::run_dotnet_upgrade(&ctx))?;
|
||||
runner.execute(Step::Choosenim, "choosenim", || generic::run_choosenim(&ctx))?;
|
||||
runner.execute(Step::Cargo, "cargo", || generic::run_cargo_update(&ctx))?;
|
||||
runner.execute(Step::Flutter, "Flutter", || generic::run_flutter_upgrade(run_type))?;
|
||||
runner.execute(Step::Go, "Go", || generic::run_go(run_type))?;
|
||||
runner.execute(Step::Flutter, "Flutter", || generic::run_flutter_upgrade(&ctx))?;
|
||||
runner.execute(Step::Go, "go-global-update", || go::run_go_global_update(&ctx))?;
|
||||
runner.execute(Step::Go, "gup", || go::run_go_gup(&ctx))?;
|
||||
runner.execute(Step::Emacs, "Emacs", || emacs.upgrade(&ctx))?;
|
||||
runner.execute(Step::Opam, "opam", || generic::run_opam_update(&ctx))?;
|
||||
runner.execute(Step::Vcpkg, "vcpkg", || generic::run_vcpkg_update(run_type))?;
|
||||
runner.execute(Step::Pipx, "pipx", || generic::run_pipx_update(run_type))?;
|
||||
runner.execute(Step::Vcpkg, "vcpkg", || generic::run_vcpkg_update(&ctx))?;
|
||||
runner.execute(Step::Pipx, "pipx", || generic::run_pipx_update(&ctx))?;
|
||||
runner.execute(Step::Vscode, "Visual Studio Code extensions", || {
|
||||
generic::run_vscode_extensions_update(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Conda, "conda", || generic::run_conda_update(&ctx))?;
|
||||
runner.execute(Step::Pip3, "pip3", || generic::run_pip3_update(run_type))?;
|
||||
runner.execute(Step::Stack, "stack", || generic::run_stack_update(run_type))?;
|
||||
runner.execute(Step::Mamba, "mamba", || generic::run_mamba_update(&ctx))?;
|
||||
runner.execute(Step::Pixi, "pixi", || generic::run_pixi_update(&ctx))?;
|
||||
runner.execute(Step::Miktex, "miktex", || generic::run_miktex_packages_update(&ctx))?;
|
||||
runner.execute(Step::Pip3, "pip3", || generic::run_pip3_update(&ctx))?;
|
||||
runner.execute(Step::PipReview, "pip-review", || generic::run_pip_review_update(&ctx))?;
|
||||
runner.execute(Step::PipReviewLocal, "pip-review (local)", || {
|
||||
generic::run_pip_review_local_update(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Pipupgrade, "pipupgrade", || generic::run_pipupgrade_update(&ctx))?;
|
||||
runner.execute(Step::Ghcup, "ghcup", || generic::run_ghcup_update(&ctx))?;
|
||||
runner.execute(Step::Stack, "stack", || generic::run_stack_update(&ctx))?;
|
||||
runner.execute(Step::Tlmgr, "tlmgr", || generic::run_tlmgr_update(&ctx))?;
|
||||
runner.execute(Step::Myrepos, "myrepos", || {
|
||||
generic::run_myrepos_update(&base_dirs, run_type)
|
||||
})?;
|
||||
runner.execute(Step::Chezmoi, "chezmoi", || {
|
||||
generic::run_chezmoi_update(&base_dirs, run_type)
|
||||
})?;
|
||||
runner.execute(Step::Jetpack, "jetpack", || generic::run_jetpack(run_type))?;
|
||||
runner.execute(Step::Vim, "vim", || vim::upgrade_vim(&base_dirs, &ctx))?;
|
||||
runner.execute(Step::Vim, "Neovim", || vim::upgrade_neovim(&base_dirs, &ctx))?;
|
||||
runner.execute(Step::Myrepos, "myrepos", || generic::run_myrepos_update(&ctx))?;
|
||||
runner.execute(Step::Chezmoi, "chezmoi", || generic::run_chezmoi_update(&ctx))?;
|
||||
runner.execute(Step::Jetpack, "jetpack", || generic::run_jetpack(&ctx))?;
|
||||
runner.execute(Step::Vim, "vim", || vim::upgrade_vim(&ctx))?;
|
||||
runner.execute(Step::Vim, "Neovim", || vim::upgrade_neovim(&ctx))?;
|
||||
runner.execute(Step::Vim, "The Ultimate vimrc", || vim::upgrade_ultimate_vimrc(&ctx))?;
|
||||
runner.execute(Step::Vim, "voom", || vim::run_voom(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Vim, "voom", || vim::run_voom(&ctx))?;
|
||||
runner.execute(Step::Kakoune, "Kakoune", || kakoune::upgrade_kak_plug(&ctx))?;
|
||||
runner.execute(Step::Helix, "helix", || generic::run_helix_grammars(&ctx))?;
|
||||
runner.execute(Step::Node, "npm", || node::run_npm_upgrade(&ctx))?;
|
||||
runner.execute(Step::Node, "yarn", || node::run_yarn_upgrade(&ctx))?;
|
||||
runner.execute(Step::Yarn, "yarn", || node::run_yarn_upgrade(&ctx))?;
|
||||
runner.execute(Step::Pnpm, "pnpm", || node::run_pnpm_upgrade(&ctx))?;
|
||||
runner.execute(Step::VoltaPackages, "volta packages", || {
|
||||
node::run_volta_packages_upgrade(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Containers, "Containers", || containers::run_containers(&ctx))?;
|
||||
runner.execute(Step::Deno, "deno", || node::deno_upgrade(&ctx))?;
|
||||
runner.execute(Step::Composer, "composer", || generic::run_composer_update(&ctx))?;
|
||||
runner.execute(Step::Krew, "krew", || generic::run_krew_upgrade(run_type))?;
|
||||
runner.execute(Step::Gem, "gem", || generic::run_gem(&base_dirs, run_type))?;
|
||||
runner.execute(Step::Krew, "krew", || generic::run_krew_upgrade(&ctx))?;
|
||||
runner.execute(Step::Helm, "helm", || generic::run_helm_repo_update(&ctx))?;
|
||||
runner.execute(Step::Gem, "gem", || generic::run_gem(&ctx))?;
|
||||
runner.execute(Step::RubyGems, "rubygems", || generic::run_rubygems(&ctx))?;
|
||||
runner.execute(Step::Julia, "julia", || generic::update_julia_packages(&ctx))?;
|
||||
runner.execute(Step::Haxelib, "haxelib", || generic::run_haxelib_update(&ctx))?;
|
||||
runner.execute(Step::Sheldon, "sheldon", || generic::run_sheldon(&ctx))?;
|
||||
runner.execute(Step::Stew, "stew", || generic::run_stew(&ctx))?;
|
||||
runner.execute(Step::Rtcl, "rtcl", || generic::run_rtcl(&ctx))?;
|
||||
runner.execute(Step::Bin, "bin", || generic::bin_update(&ctx))?;
|
||||
runner.execute(Step::Gcloud, "gcloud", || {
|
||||
generic::run_gcloud_components_update(run_type)
|
||||
})?;
|
||||
runner.execute(Step::Micro, "micro", || generic::run_micro(run_type))?;
|
||||
runner.execute(Step::Raco, "raco", || generic::run_raco_update(run_type))?;
|
||||
runner.execute(Step::Gcloud, "gcloud", || generic::run_gcloud_components_update(&ctx))?;
|
||||
runner.execute(Step::Micro, "micro", || generic::run_micro(&ctx))?;
|
||||
runner.execute(Step::Raco, "raco", || generic::run_raco_update(&ctx))?;
|
||||
runner.execute(Step::Spicetify, "spicetify", || generic::spicetify_upgrade(&ctx))?;
|
||||
runner.execute(Step::GithubCliExtensions, "GitHub CLI Extensions", || {
|
||||
generic::run_ghcli_extensions_upgrade(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Bob, "Bob", || generic::run_bob(&ctx))?;
|
||||
runner.execute(Step::Certbot, "Certbot", || generic::run_certbot(&ctx))?;
|
||||
runner.execute(Step::GitRepos, "Git Repositories", || git::run_git_pull(&ctx))?;
|
||||
runner.execute(Step::ClamAvDb, "ClamAV Databases", || generic::run_freshclam(&ctx))?;
|
||||
runner.execute(Step::PlatformioCore, "PlatformIO Core", || {
|
||||
generic::run_platform_io(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Lensfun, "Lensfun's database update", || {
|
||||
generic::run_lensfun_update_data(&ctx)
|
||||
})?;
|
||||
runner.execute(Step::Poetry, "Poetry", || generic::run_poetry(&ctx))?;
|
||||
runner.execute(Step::Uv, "uv", || generic::run_uv(&ctx))?;
|
||||
runner.execute(Step::Zvm, "ZVM", || generic::run_zvm(&ctx))?;
|
||||
runner.execute(Step::Aqua, "aqua", || generic::run_aqua(&ctx))?;
|
||||
runner.execute(Step::Bun, "bun", || generic::run_bun(&ctx))?;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
runner.execute(Step::DebGet, "deb-get", || linux::run_deb_get(&ctx))?;
|
||||
runner.execute(Step::Toolbx, "toolbx", || toolbx::run_toolbx(&ctx))?;
|
||||
runner.execute(Step::Flatpak, "Flatpak", || linux::flatpak_update(&ctx))?;
|
||||
runner.execute(Step::Snap, "snap", || linux::run_snap(sudo.as_ref(), run_type))?;
|
||||
runner.execute(Step::Pacstall, "pacstall", || linux::run_pacstall(&ctx))?;
|
||||
runner.execute(Step::Pacdef, "pacdef", || linux::run_pacdef(&ctx))?;
|
||||
runner.execute(Step::Protonup, "protonup", || linux::run_protonup_update(&ctx))?;
|
||||
if should_run_powershell {
|
||||
runner.execute(Step::Powershell, "Powershell Modules Update", || {
|
||||
powershell.update_modules(&ctx)
|
||||
})?;
|
||||
}
|
||||
|
||||
if let Some(commands) = config.commands() {
|
||||
@@ -380,32 +452,6 @@ fn run() -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
runner.execute(Step::System, "pihole", || {
|
||||
linux::run_pihole_update(sudo.as_ref(), run_type)
|
||||
})?;
|
||||
runner.execute(Step::Firmware, "Firmware upgrades", || linux::run_fwupdmgr(&ctx))?;
|
||||
runner.execute(Step::Restarts, "Restarts", || {
|
||||
linux::run_needrestart(sudo.as_ref(), run_type)
|
||||
})?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
runner.execute(Step::Sparkle, "Sparkle", || macos::run_sparkle(&ctx))?;
|
||||
runner.execute(Step::Mas, "App Store", || macos::run_mas(run_type))?;
|
||||
runner.execute(Step::System, "System upgrade", || macos::upgrade_macos(&ctx))?;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "freebsd")]
|
||||
runner.execute(Step::System, "FreeBSD Upgrade", || {
|
||||
freebsd::upgrade_freebsd(sudo.as_ref(), run_type)
|
||||
})?;
|
||||
|
||||
#[cfg(windows)]
|
||||
runner.execute(Step::System, "Windows update", || windows::windows_update(&ctx))?;
|
||||
|
||||
if config.should_run(Step::Vagrant) {
|
||||
if let Ok(boxes) = vagrant::collect_boxes(&ctx) {
|
||||
for vagrant_box in boxes {
|
||||
@@ -418,7 +464,7 @@ fn run() -> Result<()> {
|
||||
runner.execute(Step::Vagrant, "Vagrant boxes", || vagrant::upgrade_vagrant_boxes(&ctx))?;
|
||||
|
||||
if !runner.report().data().is_empty() {
|
||||
print_separator("Summary");
|
||||
print_separator(t!("Summary"));
|
||||
|
||||
for (key, result) in runner.report().data() {
|
||||
print_result(key, result);
|
||||
@@ -430,12 +476,6 @@ fn run() -> Result<()> {
|
||||
distribution.show_summary();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "freebsd")]
|
||||
freebsd::audit_packages(&sudo).ok();
|
||||
|
||||
#[cfg(target_os = "dragonfly")]
|
||||
dragonfly::audit_packages(&sudo).ok();
|
||||
}
|
||||
|
||||
let mut post_command_failed = false;
|
||||
@@ -448,14 +488,14 @@ fn run() -> Result<()> {
|
||||
}
|
||||
|
||||
if config.keep_at_end() {
|
||||
print_info("\n(R)eboot\n(S)hell\n(Q)uit");
|
||||
print_info(t!("\n(R)eboot\n(S)hell\n(Q)uit"));
|
||||
loop {
|
||||
match get_key() {
|
||||
Ok(Key::Char('s')) | Ok(Key::Char('S')) => {
|
||||
run_shell();
|
||||
run_shell().context("Failed to execute shell")?;
|
||||
}
|
||||
Ok(Key::Char('r')) | Ok(Key::Char('R')) => {
|
||||
reboot();
|
||||
reboot().context("Failed to reboot")?;
|
||||
}
|
||||
Ok(Key::Char('q')) | Ok(Key::Char('Q')) => (),
|
||||
_ => {
|
||||
@@ -469,13 +509,14 @@ fn run() -> Result<()> {
|
||||
let failed = post_command_failed || runner.report().data().iter().any(|(_, result)| result.failed());
|
||||
|
||||
if !config.skip_notify() {
|
||||
terminal::notify_desktop(
|
||||
format!(
|
||||
"Topgrade finished {}",
|
||||
if failed { "with errors" } else { "successfully" }
|
||||
),
|
||||
None,
|
||||
);
|
||||
notify_desktop(
|
||||
if failed {
|
||||
t!("Topgrade finished with errors")
|
||||
} else {
|
||||
t!("Topgrade finished successfully")
|
||||
},
|
||||
Some(Duration::from_secs(10)),
|
||||
)
|
||||
}
|
||||
|
||||
if failed {
|
||||
@@ -486,9 +527,6 @@ fn run() -> Result<()> {
|
||||
}
|
||||
|
||||
fn main() {
|
||||
print_info("Due to r-darwish giving this project access to the original topgrade crate, this crate will no longer receive any updates beyond the 10.0.1 update. To install the supported version, please run the following command: \n");
|
||||
println!("cargo uninstall topgrade-rs");
|
||||
println!("cargo install topgrade");
|
||||
match run() {
|
||||
Ok(()) => {
|
||||
exit(0);
|
||||
@@ -508,7 +546,10 @@ fn main() {
|
||||
.is_some());
|
||||
|
||||
if !skip_print {
|
||||
println!("Error: {}", error);
|
||||
// The `Debug` implementation of `eyre::Result` prints a multi-line
|
||||
// error message that includes all the 'causes' added with
|
||||
// `.with_context(...)` calls.
|
||||
println!("{}", t!("Error: {error}", error = format!("{:?}", error)));
|
||||
}
|
||||
exit(1);
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ impl<'a> Report<'a> {
|
||||
if let Some((key, success)) = result {
|
||||
let key = key.into();
|
||||
|
||||
debug_assert!(!self.data.iter().any(|(k, _)| k == &key), "{} already reported", key);
|
||||
debug_assert!(!self.data.iter().any(|(k, _)| k == &key), "{key} already reported");
|
||||
self.data.push((key, success));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,11 +2,12 @@ use crate::ctrlc;
|
||||
use crate::error::{DryRun, SkipStep};
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::report::{Report, StepResult};
|
||||
use crate::terminal::print_error;
|
||||
use crate::{config::Step, terminal::should_retry};
|
||||
use anyhow::Result;
|
||||
use log::debug;
|
||||
use color_eyre::eyre::Result;
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::Debug;
|
||||
use tracing::debug;
|
||||
|
||||
pub struct Runner<'a> {
|
||||
ctx: &'a ExecutionContext<'a>,
|
||||
@@ -33,6 +34,14 @@ impl<'a> Runner<'a> {
|
||||
let key = key.into();
|
||||
debug!("Step {:?}", key);
|
||||
|
||||
// alter the `func` to put it in a span
|
||||
let func = || {
|
||||
let span =
|
||||
tracing::span!(parent: tracing::Span::none(), tracing::Level::TRACE, "step", step = ?step, key = %key);
|
||||
let _guard = span.enter();
|
||||
func()
|
||||
};
|
||||
|
||||
loop {
|
||||
match func() {
|
||||
Ok(()) => {
|
||||
@@ -55,7 +64,12 @@ impl<'a> Runner<'a> {
|
||||
|
||||
let ignore_failure = self.ctx.config().ignore_failure(step);
|
||||
let should_ask = interrupted || !(self.ctx.config().no_retry() || ignore_failure);
|
||||
let should_retry = should_ask && should_retry(interrupted, key.as_ref())?;
|
||||
let should_retry = if should_ask {
|
||||
print_error(&key, format!("{e:?}"));
|
||||
should_retry(interrupted, key.as_ref())?
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if !should_retry {
|
||||
self.report.push_result(Some((
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
#![cfg(windows)]
|
||||
|
||||
use anyhow::Result;
|
||||
use log::{debug, error};
|
||||
use color_eyre::eyre::Result;
|
||||
use std::{env::current_exe, fs, path::PathBuf};
|
||||
use tracing::{debug, error};
|
||||
|
||||
pub struct SelfRenamer {
|
||||
exe_path: PathBuf,
|
||||
@@ -31,7 +29,7 @@ impl Drop for SelfRenamer {
|
||||
}
|
||||
|
||||
match fs::rename(&self.temp_path, &self.exe_path) {
|
||||
Ok(_) => debug!("Moved topgrade back from {:?} to {:?}", self.temp_path, self.exe_path),
|
||||
Ok(_) => debug!("Moved Topgrade back from {:?} to {:?}", self.temp_path, self.exe_path),
|
||||
Err(e) => error!(
|
||||
"Could not move Topgrade from {} back to {}: {}",
|
||||
self.temp_path.display(),
|
||||
|
||||
@@ -1,59 +1,73 @@
|
||||
use std::env;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::process::CommandExt as _;
|
||||
use std::process::Command;
|
||||
|
||||
use crate::config::Step;
|
||||
use color_eyre::eyre::{bail, Result};
|
||||
use rust_i18n::t;
|
||||
use self_update_crate::backends::github::Update;
|
||||
use self_update_crate::update::UpdateStatus;
|
||||
|
||||
use super::terminal::*;
|
||||
#[cfg(windows)]
|
||||
use crate::error::Upgraded;
|
||||
use anyhow::{bail, Result};
|
||||
use self_update_crate::backends::github::Update;
|
||||
use self_update_crate::update::UpdateStatus;
|
||||
use std::env;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::process::CommandExt;
|
||||
use std::process::Command;
|
||||
|
||||
pub fn self_update() -> Result<()> {
|
||||
print_separator("Self update");
|
||||
let current_exe = env::current_exe();
|
||||
use crate::execution_context::ExecutionContext;
|
||||
|
||||
let target = self_update_crate::get_target();
|
||||
let result = Update::configure()
|
||||
.repo_owner("r-darwish")
|
||||
.repo_name("topgrade")
|
||||
.target(target)
|
||||
.bin_name(if cfg!(windows) { "topgrade.exe" } else { "topgrade" })
|
||||
.show_output(false)
|
||||
.show_download_progress(true)
|
||||
.current_version(self_update_crate::cargo_crate_version!())
|
||||
.no_confirm(true)
|
||||
.build()?
|
||||
.update_extended()?;
|
||||
pub fn self_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator(t!("Self update"));
|
||||
|
||||
if let UpdateStatus::Updated(release) = &result {
|
||||
println!("\nTopgrade upgraded to {}:\n", release.version);
|
||||
if let Some(body) = &release.body {
|
||||
println!("{}", body);
|
||||
}
|
||||
if ctx.run_type().dry() {
|
||||
println!("{}", t!("Would self-update"));
|
||||
Ok(())
|
||||
} else {
|
||||
println!("Topgrade is up-to-date");
|
||||
}
|
||||
let assume_yes = ctx.config().yes(Step::SelfUpdate);
|
||||
let current_exe = env::current_exe();
|
||||
|
||||
{
|
||||
if result.updated() {
|
||||
print_warning("Respawning...");
|
||||
let mut command = Command::new(current_exe?);
|
||||
command.args(env::args().skip(1)).env("TOPGRADE_NO_SELF_UPGRADE", "");
|
||||
let target = self_update_crate::get_target();
|
||||
let result = Update::configure()
|
||||
.repo_owner("topgrade-rs")
|
||||
.repo_name("topgrade")
|
||||
.target(target)
|
||||
.bin_name(if cfg!(windows) { "topgrade.exe" } else { "topgrade" })
|
||||
.show_output(true)
|
||||
.show_download_progress(true)
|
||||
.current_version(self_update_crate::cargo_crate_version!())
|
||||
.no_confirm(assume_yes)
|
||||
.build()?
|
||||
.update_extended()?;
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
let err = command.exec();
|
||||
bail!(err);
|
||||
if let UpdateStatus::Updated(release) = &result {
|
||||
println!("{}", t!("Topgrade upgraded to {version}:\n", version = release.version));
|
||||
if let Some(body) = &release.body {
|
||||
println!("{body}");
|
||||
}
|
||||
} else {
|
||||
println!("{}", t!("Topgrade is up-to-date"));
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let status = command.spawn().and_then(|mut c| c.wait())?;
|
||||
bail!(Upgraded(status));
|
||||
{
|
||||
if result.updated() {
|
||||
print_info(t!("Respawning..."));
|
||||
let mut command = Command::new(current_exe?);
|
||||
command.args(env::args().skip(1)).env("TOPGRADE_NO_SELF_UPGRADE", "");
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
let err = command.exec();
|
||||
bail!(err);
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let status = command.status()?;
|
||||
bail!(Upgraded(status));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,109 +1,196 @@
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::error::{self, TopgradeError};
|
||||
use crate::executor::CommandExt;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::{execution_context::ExecutionContext, utils::require};
|
||||
use log::{debug, error, warn};
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
// A string found in the output of docker for containers that weren't found in
|
||||
// the docker registry. We use this to gracefully handle and skip containers
|
||||
// that cannot be pulled, likely because they don't exist in the registry in
|
||||
// the first place. This happens e.g. when the user tags an image locally
|
||||
// themselves or when using docker-compose.
|
||||
const NONEXISTENT_REPO: &str = "repository does not exist";
|
||||
|
||||
/// Returns a Vector of all containers, with Strings in the format
|
||||
/// "REGISTRY/[PATH/]CONTAINER_NAME:TAG"
|
||||
fn list_containers(crt: &Path) -> Result<Vec<String>> {
|
||||
debug!(
|
||||
"Querying '{} image ls --format \"{{{{.Repository}}}}:{{{{.Tag}}}}\"' for containers",
|
||||
crt.display()
|
||||
);
|
||||
let output = Command::new(crt)
|
||||
.args(["image", "ls", "--format", "{{.Repository}}:{{.Tag}}"])
|
||||
.output()?;
|
||||
let output_str = String::from_utf8(output.stdout)?;
|
||||
|
||||
let mut retval = vec![];
|
||||
for line in output_str.lines() {
|
||||
if line.starts_with("localhost") {
|
||||
// Don't know how to update self-built containers
|
||||
debug!("Skipping self-built container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
if line.contains("<none>") {
|
||||
// Bogus/dangling container or intermediate layer
|
||||
debug!("Skipping bogus container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
if line.starts_with("vsc-") {
|
||||
debug!("Skipping visual studio code dev container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
debug!("Using container '{}'", line);
|
||||
retval.push(String::from(line));
|
||||
}
|
||||
|
||||
Ok(retval)
|
||||
}
|
||||
|
||||
pub fn run_containers(ctx: &ExecutionContext) -> Result<()> {
|
||||
// Prefer podman, fall back to docker if not present
|
||||
let crt = require("podman").or_else(|_| require("docker"))?;
|
||||
debug!("Using container runtime '{}'", crt.display());
|
||||
|
||||
print_separator("Containers");
|
||||
let mut success = true;
|
||||
let containers = list_containers(&crt)?;
|
||||
debug!("Containers to inspect: {:?}", containers);
|
||||
|
||||
for container in containers.iter() {
|
||||
debug!("Pulling container '{}'", container);
|
||||
let args = vec!["pull", &container[..]];
|
||||
let mut exec = ctx.run_type().execute(&crt);
|
||||
|
||||
if let Err(e) = exec.args(&args).check_run() {
|
||||
error!("Pulling container '{}' failed: {}", container, e);
|
||||
|
||||
// Find out if this is 'skippable'
|
||||
// This is necessary e.g. for docker, because unlike podman docker doesn't tell from
|
||||
// which repository a container originates (such as `docker.io`). This has the
|
||||
// practical consequence that all containers, whether self-built, created by
|
||||
// docker-compose or pulled from the docker hub, look exactly the same to us. We can
|
||||
// only find out what went wrong by manually parsing the output of the command...
|
||||
if match exec.check_output() {
|
||||
Ok(s) => s.contains(NONEXISTENT_REPO),
|
||||
Err(e) => match e.downcast_ref::<TopgradeError>() {
|
||||
Some(TopgradeError::ProcessFailedWithOutput(_, stderr)) => stderr.contains(NONEXISTENT_REPO),
|
||||
_ => false,
|
||||
},
|
||||
} {
|
||||
warn!("Skipping unknown container '{}'", container);
|
||||
continue;
|
||||
}
|
||||
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
// Remove dangling images
|
||||
debug!("Removing dangling images");
|
||||
if let Err(e) = ctx.run_type().execute(&crt).args(["image", "prune", "-f"]).check_run() {
|
||||
error!("Removing dangling images failed: {}", e);
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
|
||||
if success {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow::anyhow!(error::StepFailed))
|
||||
}
|
||||
}
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use tracing::{debug, error, warn};
|
||||
use wildmatch::WildMatch;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::error::{self, TopgradeError};
|
||||
use crate::terminal::print_separator;
|
||||
use crate::{execution_context::ExecutionContext, utils::require};
|
||||
use rust_i18n::t;
|
||||
|
||||
// A string found in the output of docker for containers that weren't found in
|
||||
// the docker registry. We use this to gracefully handle and skip containers
|
||||
// that cannot be pulled, likely because they don't exist in the registry in
|
||||
// the first place. This happens e.g. when the user tags an image locally
|
||||
// themselves or when using docker-compose.
|
||||
const NONEXISTENT_REPO: &str = "repository does not exist";
|
||||
|
||||
/// Uniquely identifies a `Container`.
|
||||
#[derive(Debug)]
|
||||
struct Container {
|
||||
/// `Repository` and `Tag`
|
||||
///
|
||||
/// format: `Repository:Tag`, e.g., `nixos/nix:latest`.
|
||||
repo_tag: String,
|
||||
/// Platform
|
||||
///
|
||||
/// format: `OS/Architecture`, e.g., `linux/amd64`.
|
||||
platform: String,
|
||||
}
|
||||
|
||||
impl Container {
|
||||
/// Construct a new `Container`.
|
||||
fn new(repo_tag: String, platform: String) -> Self {
|
||||
Self { repo_tag, platform }
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Container {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
// e.g., "`fedora:latest` for `linux/amd64`"
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
t!(
|
||||
"`{repo_tag}` for `{platform}`",
|
||||
repo_tag = self.repo_tag,
|
||||
platform = self.platform
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a Vector of all containers, with Strings in the format
|
||||
/// "REGISTRY/[PATH/]CONTAINER_NAME:TAG"
|
||||
///
|
||||
/// Containers specified in `ignored_containers` will be filtered out.
|
||||
fn list_containers(crt: &Path, ignored_containers: Option<&Vec<String>>) -> Result<Vec<Container>> {
|
||||
let ignored_containers = ignored_containers.map(|patterns| {
|
||||
patterns
|
||||
.iter()
|
||||
.map(|pattern| WildMatch::new(pattern))
|
||||
.collect::<Vec<WildMatch>>()
|
||||
});
|
||||
|
||||
debug!(
|
||||
"Querying '{} image ls --format \"{{{{.Repository}}}}:{{{{.Tag}}}}/{{{{.ID}}}}\"' for containers",
|
||||
crt.display()
|
||||
);
|
||||
let output = Command::new(crt)
|
||||
.args(["image", "ls", "--format", "{{.Repository}}:{{.Tag}} {{.ID}}"])
|
||||
.output_checked_with_utf8(|_| Ok(()))?;
|
||||
|
||||
let mut retval = vec![];
|
||||
for line in output.stdout.lines() {
|
||||
if line.starts_with("localhost") {
|
||||
// Don't know how to update self-built containers
|
||||
debug!("Skipping self-built container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
if line.contains("<none>") {
|
||||
// Bogus/dangling container or intermediate layer
|
||||
debug!("Skipping bogus container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
if line.starts_with("vsc-") {
|
||||
debug!("Skipping visual studio code dev container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
|
||||
debug!("Using container '{}'", line);
|
||||
|
||||
// line is of format: `Repository:Tag ImageID`, e.g., `nixos/nix:latest d80fea9c32b4`
|
||||
let split_res = line.split(' ').collect::<Vec<&str>>();
|
||||
assert_eq!(split_res.len(), 2);
|
||||
let (repo_tag, image_id) = (split_res[0], split_res[1]);
|
||||
|
||||
if let Some(ref ignored_containers) = ignored_containers {
|
||||
if ignored_containers.iter().any(|pattern| pattern.matches(repo_tag)) {
|
||||
debug!("Skipping ignored container '{}'", line);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
debug!(
|
||||
"Querying '{} image inspect --format \"{{{{.Os}}}}/{{{{.Architecture}}}}\"' for container {}",
|
||||
crt.display(),
|
||||
image_id
|
||||
);
|
||||
let inspect_output = Command::new(crt)
|
||||
.args(["image", "inspect", image_id, "--format", "{{.Os}}/{{.Architecture}}"])
|
||||
.output_checked_with_utf8(|_| Ok(()))?;
|
||||
let mut platform = inspect_output.stdout;
|
||||
// truncate the tailing new line character
|
||||
platform.truncate(platform.len() - 1);
|
||||
assert!(platform.contains('/'));
|
||||
|
||||
retval.push(Container::new(repo_tag.to_string(), platform));
|
||||
}
|
||||
|
||||
Ok(retval)
|
||||
}
|
||||
|
||||
pub fn run_containers(ctx: &ExecutionContext) -> Result<()> {
|
||||
// Check what runtime is specified in the config
|
||||
let container_runtime = ctx.config().containers_runtime().to_string();
|
||||
let crt = require(container_runtime)?;
|
||||
debug!("Using container runtime '{}'", crt.display());
|
||||
|
||||
print_separator(t!("Containers"));
|
||||
let mut success = true;
|
||||
let containers =
|
||||
list_containers(&crt, ctx.config().containers_ignored_tags()).context("Failed to list Docker containers")?;
|
||||
debug!("Containers to inspect: {:?}", containers);
|
||||
|
||||
for container in containers.iter() {
|
||||
debug!("Pulling container '{}'", container);
|
||||
let args = vec![
|
||||
"pull",
|
||||
container.repo_tag.as_str(),
|
||||
"--platform",
|
||||
container.platform.as_str(),
|
||||
];
|
||||
let mut exec = ctx.run_type().execute(&crt);
|
||||
|
||||
if let Err(e) = exec.args(&args).status_checked() {
|
||||
error!("Pulling container '{}' failed: {}", container, e);
|
||||
|
||||
// Find out if this is 'skippable'
|
||||
// This is necessary e.g. for docker, because unlike podman docker doesn't tell from
|
||||
// which repository a container originates (such as `docker.io`). This has the
|
||||
// practical consequence that all containers, whether self-built, created by
|
||||
// docker-compose or pulled from the docker hub, look exactly the same to us. We can
|
||||
// only find out what went wrong by manually parsing the output of the command...
|
||||
if match exec.output_checked_utf8() {
|
||||
Ok(s) => s.stdout.contains(NONEXISTENT_REPO) || s.stderr.contains(NONEXISTENT_REPO),
|
||||
Err(e) => match e.downcast_ref::<TopgradeError>() {
|
||||
Some(TopgradeError::ProcessFailedWithOutput(_, _, stderr)) => stderr.contains(NONEXISTENT_REPO),
|
||||
_ => false,
|
||||
},
|
||||
} {
|
||||
warn!("Skipping unknown container '{}'", container);
|
||||
continue;
|
||||
}
|
||||
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
// Remove dangling images
|
||||
debug!("Removing dangling images");
|
||||
if let Err(e) = ctx
|
||||
.run_type()
|
||||
.execute(&crt)
|
||||
.args(["image", "prune", "-f"])
|
||||
.status_checked()
|
||||
{
|
||||
error!("Removing dangling images failed: {}", e);
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
|
||||
if success {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(eyre!(error::StepFailed))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
#[cfg(any(windows, target_os = "macos"))]
|
||||
#[cfg(windows)]
|
||||
use std::env;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::Result;
|
||||
use directories::BaseDirs;
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
use rust_i18n::t;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{require, require_option, PathExt};
|
||||
@@ -22,20 +24,12 @@ pub struct Emacs {
|
||||
}
|
||||
|
||||
impl Emacs {
|
||||
fn directory_path(base_dirs: &BaseDirs) -> Option<PathBuf> {
|
||||
fn directory_path() -> Option<PathBuf> {
|
||||
#[cfg(unix)]
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(target_os = "macos")] {
|
||||
let emacs_xdg_dir = env::var("XDG_CONFIG_HOME")
|
||||
.ok()
|
||||
.and_then(|config| PathBuf::from(config).join("emacs").if_exists())
|
||||
.or_else(|| base_dirs.home_dir().join(".config/emacs").if_exists());
|
||||
} else {
|
||||
let emacs_xdg_dir = base_dirs.config_dir().join("emacs").if_exists();
|
||||
}
|
||||
}
|
||||
#[cfg(unix)]
|
||||
return base_dirs.home_dir().join(".emacs.d").if_exists().or(emacs_xdg_dir);
|
||||
return {
|
||||
let emacs_xdg_dir = crate::XDG_DIRS.config_dir().join("emacs").if_exists();
|
||||
crate::HOME_DIR.join(".emacs.d").if_exists().or(emacs_xdg_dir)
|
||||
};
|
||||
|
||||
#[cfg(windows)]
|
||||
return env::var("HOME")
|
||||
@@ -46,11 +40,11 @@ impl Emacs {
|
||||
.if_exists()
|
||||
.or_else(|| PathBuf::from(&home).join(".config\\emacs").if_exists())
|
||||
})
|
||||
.or_else(|| base_dirs.data_dir().join(".emacs.d").if_exists());
|
||||
.or_else(|| crate::WINDOWS_DIRS.data_dir().join(".emacs.d").if_exists());
|
||||
}
|
||||
|
||||
pub fn new(base_dirs: &BaseDirs) -> Self {
|
||||
let directory = Emacs::directory_path(base_dirs);
|
||||
pub fn new() -> Self {
|
||||
let directory = Emacs::directory_path();
|
||||
let doom = directory.as_ref().and_then(|d| d.join(DOOM_PATH).if_exists());
|
||||
Self { directory, doom }
|
||||
}
|
||||
@@ -73,7 +67,7 @@ impl Emacs {
|
||||
|
||||
command.args(["upgrade"]);
|
||||
|
||||
command.check_run()
|
||||
command.status_checked()
|
||||
}
|
||||
|
||||
pub fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
@@ -81,13 +75,16 @@ impl Emacs {
|
||||
if let Some(doom) = &self.doom {
|
||||
Emacs::update_doom(doom, ctx)?;
|
||||
}
|
||||
let init_file = require_option(self.directory.as_ref(), String::from("Emacs directory does not exist"))?
|
||||
.join("init.el")
|
||||
.require()?;
|
||||
let init_file = require_option(
|
||||
self.directory.as_ref(),
|
||||
t!("Emacs directory does not exist").to_string(),
|
||||
)?
|
||||
.join("init.el")
|
||||
.require()?;
|
||||
|
||||
print_separator("Emacs");
|
||||
|
||||
let mut command = ctx.run_type().execute(&emacs);
|
||||
let mut command = ctx.run_type().execute(emacs);
|
||||
|
||||
command
|
||||
.args(["--batch", "--debug-init", "-l"])
|
||||
@@ -105,6 +102,6 @@ impl Emacs {
|
||||
#[cfg(not(unix))]
|
||||
command.arg(EMACS_UPGRADE);
|
||||
|
||||
command.check_run()
|
||||
command.status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
1022
src/steps/generic.rs
1022
src/steps/generic.rs
File diff suppressed because it is too large
Load Diff
523
src/steps/git.rs
523
src/steps/git.rs
@@ -3,141 +3,176 @@ use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Output, Stdio};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::{eyre, Result};
|
||||
use console::style;
|
||||
use futures::stream::{iter, FuturesUnordered};
|
||||
use futures::StreamExt;
|
||||
use futures::stream::{iter, FuturesUnordered, StreamExt};
|
||||
use glob::{glob_with, MatchOptions};
|
||||
use log::{debug, error};
|
||||
use tokio::process::Command as AsyncCommand;
|
||||
use tokio::runtime;
|
||||
use tracing::{debug, error};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::config::Step;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::{CommandExt, RunType};
|
||||
use crate::steps::emacs::Emacs;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{which, PathExt};
|
||||
use crate::{error::SkipStep, terminal::print_warning};
|
||||
use crate::utils::{require, PathExt};
|
||||
use crate::{error::SkipStep, terminal::print_warning, HOME_DIR};
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
use rust_i18n::t;
|
||||
|
||||
#[cfg(unix)]
|
||||
use crate::XDG_DIRS;
|
||||
|
||||
#[cfg(windows)]
|
||||
use crate::WINDOWS_DIRS;
|
||||
|
||||
pub fn run_git_pull(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut repos = RepoStep::try_new()?;
|
||||
let config = ctx.config();
|
||||
|
||||
// handle built-in repos
|
||||
if config.use_predefined_git_repos() {
|
||||
// should be executed on all the platforms
|
||||
{
|
||||
if config.should_run(Step::Emacs) {
|
||||
let emacs = Emacs::new();
|
||||
if !emacs.is_doom() {
|
||||
if let Some(directory) = emacs.directory() {
|
||||
repos.insert_if_repo(directory);
|
||||
}
|
||||
}
|
||||
repos.insert_if_repo(HOME_DIR.join(".doom.d"));
|
||||
}
|
||||
|
||||
if config.should_run(Step::Vim) {
|
||||
repos.insert_if_repo(HOME_DIR.join(".vim"));
|
||||
repos.insert_if_repo(HOME_DIR.join(".config/nvim"));
|
||||
}
|
||||
|
||||
repos.insert_if_repo(HOME_DIR.join(".ideavimrc"));
|
||||
repos.insert_if_repo(HOME_DIR.join(".intellimacs"));
|
||||
|
||||
if config.should_run(Step::Rcm) {
|
||||
repos.insert_if_repo(HOME_DIR.join(".dotfiles"));
|
||||
}
|
||||
|
||||
let powershell = crate::steps::powershell::Powershell::new();
|
||||
if let Some(profile) = powershell.profile() {
|
||||
repos.insert_if_repo(profile);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
repos.insert_if_repo(crate::steps::zsh::zshrc());
|
||||
if config.should_run(Step::Tmux) {
|
||||
repos.insert_if_repo(HOME_DIR.join(".tmux"));
|
||||
}
|
||||
repos.insert_if_repo(HOME_DIR.join(".config/fish"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("openbox"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("bspwm"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("i3"));
|
||||
repos.insert_if_repo(XDG_DIRS.config_dir().join("sway"));
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
repos.insert_if_repo(
|
||||
WINDOWS_DIRS
|
||||
.cache_dir()
|
||||
.join("Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState"),
|
||||
);
|
||||
|
||||
super::os::windows::insert_startup_scripts(&mut repos).ok();
|
||||
}
|
||||
}
|
||||
|
||||
// Handle user-defined repos
|
||||
if let Some(custom_git_repos) = config.git_repos() {
|
||||
for git_repo in custom_git_repos {
|
||||
repos.glob_insert(git_repo);
|
||||
}
|
||||
}
|
||||
|
||||
// Warn the user about the bad patterns.
|
||||
//
|
||||
// NOTE: this should be executed **before** skipping the Git step or the
|
||||
// user won't receive this warning in the cases where all the paths configured
|
||||
// are bad patterns.
|
||||
repos.bad_patterns.iter().for_each(|pattern| {
|
||||
print_warning(t!(
|
||||
"Path {pattern} did not contain any git repositories",
|
||||
pattern = pattern
|
||||
))
|
||||
});
|
||||
|
||||
if repos.is_repos_empty() {
|
||||
return Err(SkipStep(t!("No repositories to pull").to_string()).into());
|
||||
}
|
||||
|
||||
print_separator(t!("Git repositories"));
|
||||
|
||||
repos.pull_repos(ctx)
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
static PATH_PREFIX: &str = "\\\\?\\";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Git {
|
||||
git: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub struct Repositories<'a> {
|
||||
git: &'a Git,
|
||||
repositories: HashSet<String>,
|
||||
pub struct RepoStep {
|
||||
git: PathBuf,
|
||||
repos: HashSet<PathBuf>,
|
||||
glob_match_options: MatchOptions,
|
||||
bad_patterns: Vec<String>,
|
||||
}
|
||||
|
||||
fn check_output(output: Output) -> Result<()> {
|
||||
#[track_caller]
|
||||
fn output_checked_utf8(output: Output) -> Result<()> {
|
||||
if !(output.status.success()) {
|
||||
let stderr = String::from_utf8(output.stderr).unwrap();
|
||||
Err(anyhow!(stderr))
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stderr = stderr.trim();
|
||||
Err(eyre!("{stderr}"))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn pull_repository(repo: String, git: &Path, ctx: &ExecutionContext<'_>) -> Result<()> {
|
||||
let path = repo.to_string();
|
||||
let before_revision = get_head_revision(git, &repo);
|
||||
|
||||
println!("{} {}", style("Pulling").cyan().bold(), path);
|
||||
|
||||
let mut command = AsyncCommand::new(git);
|
||||
|
||||
command
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args(["pull", "--ff-only"]);
|
||||
|
||||
if let Some(extra_arguments) = ctx.config().git_arguments() {
|
||||
command.args(extra_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let pull_output = command.output().await?;
|
||||
let submodule_output = AsyncCommand::new(git)
|
||||
.args(["submodule", "update", "--recursive"])
|
||||
.current_dir(&repo)
|
||||
.stdin(Stdio::null())
|
||||
.output()
|
||||
.await?;
|
||||
let result = check_output(pull_output).and_then(|_| check_output(submodule_output));
|
||||
|
||||
if let Err(message) = &result {
|
||||
println!("{} pulling {}", style("Failed").red().bold(), &repo);
|
||||
print!("{}", message);
|
||||
} else {
|
||||
let after_revision = get_head_revision(git, &repo);
|
||||
|
||||
match (&before_revision, &after_revision) {
|
||||
(Some(before), Some(after)) if before != after => {
|
||||
println!("{} {}:", style("Changed").yellow().bold(), &repo);
|
||||
|
||||
Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args([
|
||||
"--no-pager",
|
||||
"log",
|
||||
"--no-decorate",
|
||||
"--oneline",
|
||||
&format!("{}..{}", before, after),
|
||||
])
|
||||
.spawn()
|
||||
.unwrap()
|
||||
.wait()
|
||||
.unwrap();
|
||||
println!();
|
||||
}
|
||||
_ => {
|
||||
println!("{} {}", style("Up-to-date").green().bold(), &repo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result.map(|_| ())
|
||||
}
|
||||
|
||||
fn get_head_revision(git: &Path, repo: &str) -> Option<String> {
|
||||
fn get_head_revision<P: AsRef<Path>>(git: &Path, repo: P) -> Option<String> {
|
||||
Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(repo)
|
||||
.current_dir(repo.as_ref())
|
||||
.args(["rev-parse", "HEAD"])
|
||||
.check_output()
|
||||
.map(|output| output.trim().to_string())
|
||||
.output_checked_utf8()
|
||||
.map(|output| output.stdout.trim().to_string())
|
||||
.map_err(|e| {
|
||||
error!("Error getting revision for {}: {}", repo, e);
|
||||
error!("Error getting revision for {}: {e}", repo.as_ref().display(),);
|
||||
|
||||
e
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
|
||||
fn has_remotes(git: &Path, repo: &str) -> Option<bool> {
|
||||
Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(repo)
|
||||
.args(["remote", "show"])
|
||||
.check_output()
|
||||
.map(|output| output.lines().count() > 0)
|
||||
.map_err(|e| {
|
||||
error!("Error getting remotes for {}: {}", repo, e);
|
||||
e
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
impl RepoStep {
|
||||
/// Try to create a `RepoStep`, fail if `git` is not found.
|
||||
pub fn try_new() -> Result<Self> {
|
||||
let git = require("git")?;
|
||||
let mut glob_match_options = MatchOptions::new();
|
||||
|
||||
impl Git {
|
||||
pub fn new() -> Self {
|
||||
Self { git: which("git") }
|
||||
if cfg!(windows) {
|
||||
glob_match_options.case_sensitive = false;
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
git,
|
||||
repos: HashSet::new(),
|
||||
bad_patterns: Vec::new(),
|
||||
glob_match_options,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_repo_root<P: AsRef<Path>>(&self, path: P) -> Option<String> {
|
||||
/// Try to get the root of the repo specified in `path`.
|
||||
pub fn get_repo_root<P: AsRef<Path>>(&self, path: P) -> Option<PathBuf> {
|
||||
match path.as_ref().canonicalize() {
|
||||
Ok(mut path) => {
|
||||
debug_assert!(path.exists());
|
||||
@@ -161,65 +196,210 @@ impl Git {
|
||||
path_string
|
||||
};
|
||||
|
||||
if let Some(git) = &self.git {
|
||||
let output = Command::new(git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(path)
|
||||
.args(["rev-parse", "--show-toplevel"])
|
||||
.check_output()
|
||||
.ok()
|
||||
.map(|output| output.trim().to_string());
|
||||
return output;
|
||||
}
|
||||
let output = Command::new(&self.git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(path)
|
||||
.args(["rev-parse", "--show-toplevel"])
|
||||
.output_checked_utf8()
|
||||
.ok()
|
||||
// trim the last newline char
|
||||
.map(|output| PathBuf::from(output.stdout.trim()));
|
||||
|
||||
return output;
|
||||
}
|
||||
Err(e) => match e.kind() {
|
||||
io::ErrorKind::NotFound => debug!("{} does not exists", path.as_ref().display()),
|
||||
_ => error!("Error looking for {}: {}", path.as_ref().display(), e),
|
||||
io::ErrorKind::NotFound => debug!("{} does not exist", path.as_ref().display()),
|
||||
_ => error!("Error looking for {}: {e}", path.as_ref().display(),),
|
||||
},
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
pub fn multi_pull_step(&self, repositories: &Repositories, ctx: &ExecutionContext) -> Result<()> {
|
||||
if repositories.repositories.is_empty() {
|
||||
return Err(SkipStep(String::from("No repositories to pull")).into());
|
||||
}
|
||||
|
||||
print_separator("Git repositories");
|
||||
repositories
|
||||
.bad_patterns
|
||||
.iter()
|
||||
.for_each(|pattern| print_warning(format!("Path {} did not contain any git repositories", pattern)));
|
||||
self.multi_pull(repositories, ctx)
|
||||
/// Check if `path` is a git repo, if yes, add it to `self.repos`.
|
||||
///
|
||||
/// Return the check result.
|
||||
pub fn insert_if_repo<P: AsRef<Path>>(&mut self, path: P) -> bool {
|
||||
if let Some(repo) = self.get_repo_root(path) {
|
||||
self.repos.insert(repo);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn multi_pull(&self, repositories: &Repositories, ctx: &ExecutionContext) -> Result<()> {
|
||||
let git = self.git.as_ref().unwrap();
|
||||
/// Check if `repo` has a remote.
|
||||
fn has_remotes<P: AsRef<Path>>(&self, repo: P) -> Option<bool> {
|
||||
let mut cmd = Command::new(&self.git);
|
||||
cmd.stdin(Stdio::null())
|
||||
.current_dir(repo.as_ref())
|
||||
.args(["remote", "show"]);
|
||||
|
||||
if let RunType::Dry = ctx.run_type() {
|
||||
repositories
|
||||
.repositories
|
||||
let res = cmd.output_checked_utf8();
|
||||
|
||||
res.map(|output| output.stdout.lines().count() > 0)
|
||||
.map_err(|e| {
|
||||
error!("Error getting remotes for {}: {e}", repo.as_ref().display());
|
||||
e
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
|
||||
/// Similar to `insert_if_repo`, with glob support.
|
||||
pub fn glob_insert(&mut self, pattern: &str) {
|
||||
if let Ok(glob) = glob_with(pattern, self.glob_match_options) {
|
||||
let mut last_git_repo: Option<PathBuf> = None;
|
||||
for entry in glob {
|
||||
match entry {
|
||||
Ok(path) => {
|
||||
if let Some(last_git_repo) = &last_git_repo {
|
||||
if path.is_descendant_of(last_git_repo) {
|
||||
debug!(
|
||||
"Skipping {} because it's a descendant of last known repo {}",
|
||||
path.display(),
|
||||
last_git_repo.display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if self.insert_if_repo(&path) {
|
||||
last_git_repo = Some(path);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error in path {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if last_git_repo.is_none() {
|
||||
self.bad_patterns.push(String::from(pattern));
|
||||
}
|
||||
} else {
|
||||
error!("Bad glob pattern: {pattern}");
|
||||
}
|
||||
}
|
||||
|
||||
/// True if `self.repos` is empty.
|
||||
pub fn is_repos_empty(&self) -> bool {
|
||||
self.repos.is_empty()
|
||||
}
|
||||
|
||||
/// Remove `path` from `self.repos`.
|
||||
///
|
||||
// `cfg(unix)` because it is only used in the oh-my-zsh step.
|
||||
#[cfg(unix)]
|
||||
pub fn remove<P: AsRef<Path>>(&mut self, path: P) {
|
||||
let _removed = self.repos.remove(path.as_ref());
|
||||
debug_assert!(_removed);
|
||||
}
|
||||
|
||||
/// Try to pull a repo.
|
||||
async fn pull_repo<P: AsRef<Path>>(&self, ctx: &ExecutionContext<'_>, repo: P) -> Result<()> {
|
||||
let before_revision = get_head_revision(&self.git, &repo);
|
||||
|
||||
if ctx.config().verbose() {
|
||||
println!("{} {}", style(t!("Pulling")).cyan().bold(), repo.as_ref().display());
|
||||
}
|
||||
|
||||
let mut command = AsyncCommand::new(&self.git);
|
||||
|
||||
command
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args(["pull", "--ff-only"]);
|
||||
|
||||
if let Some(extra_arguments) = ctx.config().git_arguments() {
|
||||
command.args(extra_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let pull_output = command.output().await?;
|
||||
let submodule_output = AsyncCommand::new(&self.git)
|
||||
.args(["submodule", "update", "--recursive"])
|
||||
.current_dir(&repo)
|
||||
.stdin(Stdio::null())
|
||||
.output()
|
||||
.await?;
|
||||
let result = output_checked_utf8(pull_output)
|
||||
.and_then(|_| output_checked_utf8(submodule_output))
|
||||
.wrap_err_with(|| format!("Failed to pull {}", repo.as_ref().display()));
|
||||
|
||||
if result.is_err() {
|
||||
println!(
|
||||
"{} {} {}",
|
||||
style(t!("Failed")).red().bold(),
|
||||
t!("pulling"),
|
||||
repo.as_ref().display()
|
||||
);
|
||||
} else {
|
||||
let after_revision = get_head_revision(&self.git, repo.as_ref());
|
||||
|
||||
match (&before_revision, &after_revision) {
|
||||
(Some(before), Some(after)) if before != after => {
|
||||
println!("{} {}", style(t!("Changed")).yellow().bold(), repo.as_ref().display());
|
||||
|
||||
Command::new(&self.git)
|
||||
.stdin(Stdio::null())
|
||||
.current_dir(&repo)
|
||||
.args([
|
||||
"--no-pager",
|
||||
"log",
|
||||
"--no-decorate",
|
||||
"--oneline",
|
||||
&format!("{before}..{after}"),
|
||||
])
|
||||
.status_checked()?;
|
||||
println!();
|
||||
}
|
||||
_ => {
|
||||
if ctx.config().verbose() {
|
||||
println!("{} {}", style(t!("Up-to-date")).green().bold(), repo.as_ref().display());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result.map(|_| ())
|
||||
}
|
||||
|
||||
/// Pull the repositories specified in `self.repos`.
|
||||
///
|
||||
/// # NOTE
|
||||
/// This function will create an async runtime and do the real job so the
|
||||
/// function itself is not async.
|
||||
fn pull_repos(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
if ctx.run_type().dry() {
|
||||
self.repos
|
||||
.iter()
|
||||
.for_each(|repo| println!("Would pull {}", &repo));
|
||||
.for_each(|repo| println!("{}", t!("Would pull {repo}", repo = repo.display())));
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let futures_iterator = repositories
|
||||
.repositories
|
||||
if !ctx.config().verbose() {
|
||||
println!(
|
||||
"\n{} {}\n",
|
||||
style(t!("Only")).green().bold(),
|
||||
t!("updated repositories will be shown...")
|
||||
);
|
||||
}
|
||||
|
||||
let futures_iterator = self
|
||||
.repos
|
||||
.iter()
|
||||
.filter(|repo| match has_remotes(git, repo) {
|
||||
.filter(|repo| match self.has_remotes(repo) {
|
||||
Some(false) => {
|
||||
println!(
|
||||
"{} {} because it has no remotes",
|
||||
style("Skipping").yellow().bold(),
|
||||
repo
|
||||
"{} {} {}",
|
||||
style(t!("Skipping")).yellow().bold(),
|
||||
repo.display(),
|
||||
t!("because it has no remotes")
|
||||
);
|
||||
false
|
||||
}
|
||||
_ => true, // repo has remotes or command to check for remotes has failed. proceed to pull anyway.
|
||||
})
|
||||
.map(|repo| pull_repository(repo.clone(), git, ctx));
|
||||
.map(|repo| self.pull_repo(ctx, repo));
|
||||
|
||||
let stream_of_futures = if let Some(limit) = ctx.config().git_concurrency_limit() {
|
||||
iter(futures_iterator).buffer_unordered(limit).boxed()
|
||||
@@ -234,74 +414,3 @@ impl Git {
|
||||
error.unwrap_or(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Repositories<'a> {
|
||||
pub fn new(git: &'a Git) -> Self {
|
||||
let mut glob_match_options = MatchOptions::new();
|
||||
|
||||
if cfg!(windows) {
|
||||
glob_match_options.case_sensitive = false;
|
||||
}
|
||||
|
||||
Self {
|
||||
git,
|
||||
repositories: HashSet::new(),
|
||||
bad_patterns: Vec::new(),
|
||||
glob_match_options,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_if_repo<P: AsRef<Path>>(&mut self, path: P) -> bool {
|
||||
if let Some(repo) = self.git.get_repo_root(path) {
|
||||
self.repositories.insert(repo);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn glob_insert(&mut self, pattern: &str) {
|
||||
if let Ok(glob) = glob_with(pattern, self.glob_match_options) {
|
||||
let mut last_git_repo: Option<PathBuf> = None;
|
||||
for entry in glob {
|
||||
match entry {
|
||||
Ok(path) => {
|
||||
if let Some(last_git_repo) = &last_git_repo {
|
||||
if path.is_descendant_of(last_git_repo) {
|
||||
debug!(
|
||||
"Skipping {} because it's a decendant of last known repo {}",
|
||||
path.display(),
|
||||
last_git_repo.display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if self.insert_if_repo(&path) {
|
||||
last_git_repo = Some(path);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error in path {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if last_git_repo.is_none() {
|
||||
self.bad_patterns.push(String::from(pattern));
|
||||
}
|
||||
} else {
|
||||
error!("Bad glob pattern: {}", pattern);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.repositories.is_empty()
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub fn remove(&mut self, path: &str) {
|
||||
let _removed = self.repositories.remove(path);
|
||||
debug_assert!(_removed);
|
||||
}
|
||||
}
|
||||
|
||||
45
src/steps/go.rs
Normal file
45
src/steps/go.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use color_eyre::eyre::Result;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils;
|
||||
use crate::utils::PathExt;
|
||||
|
||||
/// <https://github.com/Gelio/go-global-update>
|
||||
pub fn run_go_global_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let go_global_update = require_go_bin("go-global-update")?;
|
||||
|
||||
print_separator("go-global-update");
|
||||
|
||||
ctx.run_type().execute(go_global_update).status_checked()
|
||||
}
|
||||
|
||||
/// <https://github.com/nao1215/gup>
|
||||
pub fn run_go_gup(ctx: &ExecutionContext) -> Result<()> {
|
||||
let gup = require_go_bin("gup")?;
|
||||
|
||||
print_separator("gup");
|
||||
|
||||
ctx.run_type().execute(gup).arg("update").status_checked()
|
||||
}
|
||||
|
||||
/// Get the path of a Go binary.
|
||||
fn require_go_bin(name: &str) -> Result<PathBuf> {
|
||||
utils::require(name).or_else(|_| {
|
||||
let go = utils::require("go")?;
|
||||
// TODO: Does this work? `go help gopath` says that:
|
||||
// > The GOPATH environment variable lists places to look for Go code.
|
||||
// > On Unix, the value is a colon-separated string.
|
||||
// > On Windows, the value is a semicolon-separated string.
|
||||
// > On Plan 9, the value is a list.
|
||||
// Should we also fallback to the env variable?
|
||||
let gopath_output = Command::new(go).args(["env", "GOPATH"]).output_checked_utf8()?;
|
||||
let gopath = gopath_output.stdout.trim();
|
||||
|
||||
PathBuf::from(gopath).join("bin").join(name).require()
|
||||
})
|
||||
}
|
||||
@@ -1,10 +1,9 @@
|
||||
use crate::error::TopgradeError;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::require;
|
||||
use anyhow::Result;
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::ExecutorOutput;
|
||||
|
||||
const UPGRADE_KAK: &str = include_str!("upgrade.kak");
|
||||
|
||||
@@ -13,19 +12,13 @@ pub fn upgrade_kak_plug(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("Kakoune");
|
||||
|
||||
let mut command = ctx.run_type().execute(&kak);
|
||||
command.args(["-ui", "dummy", "-e", UPGRADE_KAK]);
|
||||
// TODO: Why supress output for this command?
|
||||
ctx.run_type()
|
||||
.execute(kak)
|
||||
.args(["-ui", "dummy", "-e", UPGRADE_KAK])
|
||||
.output()?;
|
||||
|
||||
let output = command.output()?;
|
||||
|
||||
if let ExecutorOutput::Wet(output) = output {
|
||||
let status = output.status;
|
||||
if !status.success() {
|
||||
return Err(TopgradeError::ProcessFailed(status).into());
|
||||
} else {
|
||||
println!("Plugins upgraded")
|
||||
}
|
||||
}
|
||||
println!("{}", t!("Plugins upgraded"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ pub mod containers;
|
||||
pub mod emacs;
|
||||
pub mod generic;
|
||||
pub mod git;
|
||||
pub mod go;
|
||||
pub mod kakoune;
|
||||
pub mod node;
|
||||
pub mod os;
|
||||
|
||||
@@ -1,66 +1,106 @@
|
||||
#![allow(unused_imports)]
|
||||
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::prelude::MetadataExt;
|
||||
use std::fmt::Display;
|
||||
#[cfg(target_os = "linux")]
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use anyhow::Result;
|
||||
use directories::BaseDirs;
|
||||
use log::debug;
|
||||
#[cfg(unix)]
|
||||
use crate::utils::{get_require_sudo_string, require_option};
|
||||
use crate::HOME_DIR;
|
||||
use color_eyre::eyre::Result;
|
||||
#[cfg(target_os = "linux")]
|
||||
use nix::unistd::Uid;
|
||||
use rust_i18n::t;
|
||||
use semver::Version;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::executor::{CommandExt, RunType};
|
||||
use crate::terminal::print_separator;
|
||||
use crate::command::CommandExt;
|
||||
use crate::terminal::{print_info, print_separator};
|
||||
use crate::utils::{require, PathExt};
|
||||
use crate::{error::SkipStep, execution_context::ExecutionContext};
|
||||
|
||||
enum NPMVariant {
|
||||
Npm,
|
||||
Pnpm,
|
||||
}
|
||||
|
||||
impl NPMVariant {
|
||||
const fn short_name(&self) -> &str {
|
||||
match self {
|
||||
NPMVariant::Npm => "npm",
|
||||
NPMVariant::Pnpm => "pnpm",
|
||||
}
|
||||
}
|
||||
|
||||
const fn is_npm(&self) -> bool {
|
||||
matches!(self, NPMVariant::Npm)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for NPMVariant {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.short_name())
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
struct NPM {
|
||||
command: PathBuf,
|
||||
variant: NPMVariant,
|
||||
}
|
||||
|
||||
impl NPM {
|
||||
fn new(command: PathBuf) -> Self {
|
||||
Self { command }
|
||||
fn new(command: PathBuf, variant: NPMVariant) -> Self {
|
||||
Self { command, variant }
|
||||
}
|
||||
|
||||
/// Is the “NPM” version larger than 8.11.0?
|
||||
fn is_npm_8(&self) -> bool {
|
||||
let v = self.version();
|
||||
|
||||
self.variant.is_npm() && matches!(v, Ok(v) if v >= Version::new(8, 11, 0))
|
||||
}
|
||||
|
||||
/// Get the most suitable “global location” argument
|
||||
/// of this NPM instance.
|
||||
///
|
||||
/// If the “NPM” version is larger than 8.11.0, we use
|
||||
/// `--location=global`; otherwise, use `-g`.
|
||||
fn global_location_arg(&self) -> &str {
|
||||
if self.is_npm_8() {
|
||||
"--location=global"
|
||||
} else {
|
||||
"-g"
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
fn root(&self) -> Result<PathBuf> {
|
||||
let version = self.version()?;
|
||||
let args = if version < Version::new(8, 11, 0) {
|
||||
["root", "-g"]
|
||||
} else {
|
||||
["root", "--location=global"]
|
||||
};
|
||||
let args = ["root", self.global_location_arg()];
|
||||
Command::new(&self.command)
|
||||
.args(args)
|
||||
.check_output()
|
||||
.map(|s| PathBuf::from(s.trim()))
|
||||
.output_checked_utf8()
|
||||
.map(|s| PathBuf::from(s.stdout.trim()))
|
||||
}
|
||||
|
||||
fn version(&self) -> Result<Version> {
|
||||
let version_str = Command::new(&self.command)
|
||||
.args(["--version"])
|
||||
.check_output()
|
||||
.map(|s| s.trim().to_owned());
|
||||
.output_checked_utf8()
|
||||
.map(|s| s.stdout.trim().to_owned());
|
||||
Version::parse(&version_str?).map_err(|err| err.into())
|
||||
}
|
||||
|
||||
fn upgrade(&self, run_type: RunType, use_sudo: bool) -> Result<()> {
|
||||
print_separator("Node Package Manager");
|
||||
let version = self.version()?;
|
||||
let args = if version < Version::new(8, 11, 0) {
|
||||
["update", "-g"]
|
||||
} else {
|
||||
["update", "--location=global"]
|
||||
};
|
||||
fn upgrade(&self, ctx: &ExecutionContext, use_sudo: bool) -> Result<()> {
|
||||
let args = ["update", self.global_location_arg()];
|
||||
if use_sudo {
|
||||
run_type.execute("sudo").args(args).check_run()?;
|
||||
let sudo = require_option(ctx.sudo().clone(), get_require_sudo_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg(&self.command)
|
||||
.args(args)
|
||||
.status_checked()?;
|
||||
} else {
|
||||
run_type.execute(&self.command).args(args).check_run()?;
|
||||
ctx.run_type().execute(&self.command).args(args).status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -70,7 +110,7 @@ impl NPM {
|
||||
pub fn should_use_sudo(&self) -> Result<bool> {
|
||||
let npm_root = self.root()?;
|
||||
if !npm_root.exists() {
|
||||
return Err(SkipStep(format!("NPM root at {} doesn't exist", npm_root.display(),)).into());
|
||||
return Err(SkipStep(format!("{} root at {} doesn't exist", self.variant, npm_root.display())).into());
|
||||
}
|
||||
|
||||
let metadata = std::fs::metadata(&npm_root)?;
|
||||
@@ -93,27 +133,38 @@ impl Yarn {
|
||||
}
|
||||
}
|
||||
|
||||
fn has_global_subcmd(&self) -> bool {
|
||||
// Get the version of Yarn. After Yarn 2.x (berry),
|
||||
// “yarn global” has been replaced with “yarn dlx”.
|
||||
//
|
||||
// As “yarn dlx” don't need to “upgrade”, we
|
||||
// ignore the whole task if Yarn is 2.x or above.
|
||||
let version = Command::new(&self.command).args(["--version"]).output_checked_utf8();
|
||||
|
||||
matches!(version, Ok(ver) if ver.stdout.starts_with('1') || ver.stdout.starts_with('0'))
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
fn root(&self) -> Result<PathBuf> {
|
||||
let args = ["global", "dir"];
|
||||
Command::new(&self.command)
|
||||
.args(args)
|
||||
.check_output()
|
||||
.map(|s| PathBuf::from(s.trim()))
|
||||
.output_checked_utf8()
|
||||
.map(|s| PathBuf::from(s.stdout.trim()))
|
||||
}
|
||||
|
||||
fn upgrade(&self, run_type: RunType, use_sudo: bool) -> Result<()> {
|
||||
print_separator("Yarn Package Manager");
|
||||
fn upgrade(&self, ctx: &ExecutionContext, use_sudo: bool) -> Result<()> {
|
||||
let args = ["global", "upgrade"];
|
||||
|
||||
if use_sudo {
|
||||
run_type
|
||||
.execute("sudo")
|
||||
let sudo = require_option(ctx.sudo().clone(), get_require_sudo_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.arg(self.yarn.as_ref().unwrap_or(&self.command))
|
||||
.args(args)
|
||||
.check_run()?;
|
||||
.status_checked()?;
|
||||
} else {
|
||||
run_type.execute(&self.command).args(args).check_run()?;
|
||||
ctx.run_type().execute(&self.command).args(args).status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -123,7 +174,7 @@ impl Yarn {
|
||||
pub fn should_use_sudo(&self) -> Result<bool> {
|
||||
let yarn_root = self.root()?;
|
||||
if !yarn_root.exists() {
|
||||
return Err(SkipStep(format!("NPM root at {} doesn't exist", yarn_root.display(),)).into());
|
||||
return Err(SkipStep(format!("Yarn root at {} doesn't exist", yarn_root.display(),)).into());
|
||||
}
|
||||
|
||||
let metadata = std::fs::metadata(&yarn_root)?;
|
||||
@@ -133,6 +184,92 @@ impl Yarn {
|
||||
}
|
||||
}
|
||||
|
||||
struct Deno {
|
||||
command: PathBuf,
|
||||
}
|
||||
|
||||
impl Deno {
|
||||
fn new(command: PathBuf) -> Self {
|
||||
Self { command }
|
||||
}
|
||||
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut args = vec![];
|
||||
|
||||
let version = ctx.config().deno_version();
|
||||
if let Some(version) = version {
|
||||
let bin_version = self.version()?;
|
||||
|
||||
if bin_version >= Version::new(2, 0, 0) {
|
||||
args.push(version);
|
||||
} else if bin_version >= Version::new(1, 6, 0) {
|
||||
match version {
|
||||
"stable" => { /* do nothing, as stable is the default channel to upgrade */ }
|
||||
"rc" => {
|
||||
return Err(SkipStep(
|
||||
"Deno (1.6.0-2.0.0) cannot be upgraded to a release candidate".to_string(),
|
||||
)
|
||||
.into());
|
||||
}
|
||||
"canary" => args.push("--canary"),
|
||||
_ => {
|
||||
if Version::parse(version).is_err() {
|
||||
return Err(SkipStep("Invalid Deno version".to_string()).into());
|
||||
}
|
||||
|
||||
args.push("--version");
|
||||
args.push(version);
|
||||
}
|
||||
}
|
||||
} else if bin_version >= Version::new(1, 0, 0) {
|
||||
match version {
|
||||
"stable" | "rc" | "canary" => {
|
||||
// Prior to v1.6.0, `deno upgrade` is not able fetch the latest tag version.
|
||||
return Err(
|
||||
SkipStep("Deno (1.0.0-1.6.0) cannot be upgraded to a named channel".to_string()).into(),
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
if Version::parse(version).is_err() {
|
||||
return Err(SkipStep("Invalid Deno version".to_string()).into());
|
||||
}
|
||||
|
||||
args.push("--version");
|
||||
args.push(version);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// v0.x cannot be upgraded with `deno upgrade` to v1.x or v2.x
|
||||
// nor can be upgraded to a specific version.
|
||||
return Err(SkipStep("Unsupported Deno version".to_string()).into());
|
||||
}
|
||||
}
|
||||
|
||||
ctx.run_type()
|
||||
.execute(&self.command)
|
||||
.arg("upgrade")
|
||||
.args(args)
|
||||
.status_checked()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the version of Deno.
|
||||
///
|
||||
/// This function will return the version of Deno installed on the system.
|
||||
/// The version is parsed from the output of `deno -V`.
|
||||
///
|
||||
/// ```sh
|
||||
/// deno -V # deno 1.6.0
|
||||
/// ```
|
||||
fn version(&self) -> Result<Version> {
|
||||
let version_str = Command::new(&self.command)
|
||||
.args(["-V"])
|
||||
.output_checked_utf8()
|
||||
.map(|s| s.stdout.trim().to_owned().split_off(5)); // remove "deno " prefix
|
||||
Version::parse(&version_str?).map_err(|err| err.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
fn should_use_sudo(npm: &NPM, ctx: &ExecutionContext) -> Result<bool> {
|
||||
if npm.should_use_sudo()? {
|
||||
@@ -162,42 +299,112 @@ fn should_use_sudo_yarn(yarn: &Yarn, ctx: &ExecutionContext) -> Result<bool> {
|
||||
}
|
||||
|
||||
pub fn run_npm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let npm = require("pnpm").or_else(|_| require("npm")).map(NPM::new)?;
|
||||
let npm = require("npm").map(|b| NPM::new(b, NPMVariant::Npm))?;
|
||||
|
||||
print_separator(t!("Node Package Manager"));
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
npm.upgrade(ctx.run_type(), should_use_sudo(&npm, ctx)?)
|
||||
npm.upgrade(ctx, should_use_sudo(&npm, ctx)?)
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
{
|
||||
npm.upgrade(ctx.run_type(), false)
|
||||
npm.upgrade(ctx, false)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_pnpm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pnpm = require("pnpm").map(|b| NPM::new(b, NPMVariant::Pnpm))?;
|
||||
|
||||
print_separator(t!("Performant Node Package Manager"));
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
pnpm.upgrade(ctx, should_use_sudo(&pnpm, ctx)?)
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
{
|
||||
pnpm.upgrade(ctx, false)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_yarn_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let yarn = require("yarn").map(Yarn::new)?;
|
||||
|
||||
if !yarn.has_global_subcmd() {
|
||||
debug!("Yarn is 2.x or above, skipping global upgrade");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
print_separator(t!("Yarn Package Manager"));
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
yarn.upgrade(ctx.run_type(), should_use_sudo_yarn(&yarn, ctx)?)
|
||||
yarn.upgrade(ctx, should_use_sudo_yarn(&yarn, ctx)?)
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
{
|
||||
yarn.upgrade(ctx.run_type(), false)
|
||||
yarn.upgrade(ctx, false)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deno_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let deno = require("deno")?;
|
||||
let deno_dir = ctx.base_dirs().home_dir().join(".deno");
|
||||
let deno = require("deno").map(Deno::new)?;
|
||||
let deno_dir = HOME_DIR.join(".deno");
|
||||
|
||||
if !deno.canonicalize()?.is_descendant_of(&deno_dir) {
|
||||
let skip_reason = SkipStep("Deno installed outside of .deno directory".to_string());
|
||||
if !deno.command.canonicalize()?.is_descendant_of(&deno_dir) {
|
||||
let skip_reason = SkipStep(t!("Deno installed outside of .deno directory").to_string());
|
||||
return Err(skip_reason.into());
|
||||
}
|
||||
|
||||
print_separator("Deno");
|
||||
ctx.run_type().execute(&deno).arg("upgrade").check_run()
|
||||
deno.upgrade(ctx)
|
||||
}
|
||||
|
||||
/// There is no `volta upgrade` command, so we need to upgrade each package
|
||||
pub fn run_volta_packages_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let volta = require("volta")?;
|
||||
|
||||
print_separator("Volta");
|
||||
|
||||
if ctx.run_type().dry() {
|
||||
print_info(t!("Updating Volta packages..."));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let list_output = ctx
|
||||
.run_type()
|
||||
.execute(&volta)
|
||||
.args(["list", "--format=plain"])
|
||||
.output_checked_utf8()?
|
||||
.stdout;
|
||||
|
||||
let installed_packages: Vec<&str> = list_output
|
||||
.lines()
|
||||
.filter_map(|line| {
|
||||
// format is 'kind package@version ...'
|
||||
let mut parts = line.split_whitespace();
|
||||
parts.next();
|
||||
let package_part = parts.next()?;
|
||||
let version_index = package_part.rfind('@').unwrap_or(package_part.len());
|
||||
Some(package_part[..version_index].trim())
|
||||
})
|
||||
.collect();
|
||||
|
||||
if installed_packages.is_empty() {
|
||||
print_info(t!("No packages installed with Volta"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for package in installed_packages.iter() {
|
||||
ctx.run_type()
|
||||
.execute(&volta)
|
||||
.args(["install", package])
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,31 +1,37 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::require;
|
||||
use crate::utils::which;
|
||||
use crate::Step;
|
||||
use anyhow::Result;
|
||||
use color_eyre::eyre::Result;
|
||||
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pkg = require("pkg")?;
|
||||
//let pkg = require("pkg")?;
|
||||
let pkg = which("nala").or_else(|| which("pkg")).unwrap();
|
||||
|
||||
print_separator("Termux Packages");
|
||||
|
||||
let is_nala = pkg.ends_with("nala");
|
||||
|
||||
let mut command = ctx.run_type().execute(&pkg);
|
||||
command.arg("upgrade");
|
||||
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("-y");
|
||||
}
|
||||
command.check_run()?;
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
ctx.run_type().execute(&pkg).arg("clean").check_run()?;
|
||||
if !is_nala && ctx.config().cleanup() {
|
||||
ctx.run_type().execute(&pkg).arg("clean").status_checked()?;
|
||||
|
||||
let apt = require("apt")?;
|
||||
let mut command = ctx.run_type().execute(&apt);
|
||||
let mut command = ctx.run_type().execute(apt);
|
||||
command.arg("autoremove");
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("-y");
|
||||
}
|
||||
command.check_run()?;
|
||||
command.status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
use std::env::var_os;
|
||||
use std::ffi::OsString;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
|
||||
use anyhow::Result;
|
||||
use color_eyre::eyre;
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::error::TopgradeError;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::utils::require_option;
|
||||
use crate::utils::which;
|
||||
use crate::{config, Step};
|
||||
|
||||
@@ -29,11 +32,10 @@ pub struct YayParu {
|
||||
impl ArchPackageManager for YayParu {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
if ctx.config().show_arch_news() {
|
||||
Command::new(&self.executable)
|
||||
ctx.run_type()
|
||||
.execute(&self.executable)
|
||||
.arg("-Pw")
|
||||
.spawn()
|
||||
.and_then(|mut p| p.wait())
|
||||
.ok();
|
||||
.status_checked_with_codes(&[1, 0])?;
|
||||
}
|
||||
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
@@ -48,7 +50,7 @@ impl ArchPackageManager for YayParu {
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--noconfirm");
|
||||
}
|
||||
command.check_run()?;
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
@@ -56,7 +58,7 @@ impl ArchPackageManager for YayParu {
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--noconfirm");
|
||||
}
|
||||
command.check_run()?;
|
||||
command.status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -72,6 +74,37 @@ impl YayParu {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GarudaUpdate {
|
||||
executable: PathBuf,
|
||||
}
|
||||
|
||||
impl ArchPackageManager for GarudaUpdate {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
|
||||
command
|
||||
.env("PATH", get_execution_path())
|
||||
.env("UPDATE_AUR", "1")
|
||||
.env("SKIP_MIRRORLIST", "1");
|
||||
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.env("PACMAN_NOCONFIRM", "1");
|
||||
}
|
||||
command.args(ctx.config().garuda_update_arguments().split_whitespace());
|
||||
command.status_checked()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl GarudaUpdate {
|
||||
fn get() -> Option<Self> {
|
||||
Some(Self {
|
||||
executable: which("garuda-update")?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Trizen {
|
||||
executable: PathBuf,
|
||||
}
|
||||
@@ -88,7 +121,7 @@ impl ArchPackageManager for Trizen {
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--noconfirm");
|
||||
}
|
||||
command.check_run()?;
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
@@ -96,7 +129,7 @@ impl ArchPackageManager for Trizen {
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--noconfirm");
|
||||
}
|
||||
command.check_run()?;
|
||||
command.status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -112,13 +145,13 @@ impl Trizen {
|
||||
}
|
||||
|
||||
pub struct Pacman {
|
||||
sudo: PathBuf,
|
||||
executable: PathBuf,
|
||||
}
|
||||
|
||||
impl ArchPackageManager for Pacman {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let mut command = ctx.run_type().execute(&self.sudo);
|
||||
let sudo = require_option(ctx.sudo().as_ref(), "sudo is required to run pacman".into())?;
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command
|
||||
.arg(&self.executable)
|
||||
.arg("-Syu")
|
||||
@@ -126,15 +159,15 @@ impl ArchPackageManager for Pacman {
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--noconfirm");
|
||||
}
|
||||
command.check_run()?;
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
let mut command = ctx.run_type().execute(&self.sudo);
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(&self.executable).arg("-Scc");
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--noconfirm");
|
||||
}
|
||||
command.check_run()?;
|
||||
command.status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -142,10 +175,9 @@ impl ArchPackageManager for Pacman {
|
||||
}
|
||||
|
||||
impl Pacman {
|
||||
pub fn get(ctx: &ExecutionContext) -> Option<Self> {
|
||||
pub fn get() -> Option<Self> {
|
||||
Some(Self {
|
||||
executable: which("powerpill").unwrap_or_else(|| PathBuf::from("pacman")),
|
||||
sudo: ctx.sudo().to_owned()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -175,7 +207,7 @@ impl ArchPackageManager for Pikaur {
|
||||
command.arg("--noconfirm");
|
||||
}
|
||||
|
||||
command.check_run()?;
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
@@ -183,7 +215,7 @@ impl ArchPackageManager for Pikaur {
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--noconfirm");
|
||||
}
|
||||
command.check_run()?;
|
||||
command.status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -214,7 +246,7 @@ impl ArchPackageManager for Pamac {
|
||||
command.arg("--no-confirm");
|
||||
}
|
||||
|
||||
command.check_run()?;
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
let mut command = ctx.run_type().execute(&self.executable);
|
||||
@@ -222,7 +254,7 @@ impl ArchPackageManager for Pamac {
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("--no-confirm");
|
||||
}
|
||||
command.check_run()?;
|
||||
command.status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -231,47 +263,76 @@ impl ArchPackageManager for Pamac {
|
||||
|
||||
pub struct Aura {
|
||||
executable: PathBuf,
|
||||
sudo: PathBuf,
|
||||
}
|
||||
|
||||
impl Aura {
|
||||
fn get(ctx: &ExecutionContext) -> Option<Self> {
|
||||
fn get() -> Option<Self> {
|
||||
Some(Self {
|
||||
executable: which("aura")?,
|
||||
sudo: ctx.sudo().to_owned()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ArchPackageManager for Aura {
|
||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = which("sudo").unwrap_or(PathBuf::new());
|
||||
let mut aur_update = ctx.run_type().execute(&sudo);
|
||||
use semver::Version;
|
||||
|
||||
if sudo.ends_with("sudo") {
|
||||
aur_update
|
||||
.arg(&self.executable)
|
||||
let version_cmd_output = ctx
|
||||
.run_type()
|
||||
.execute(&self.executable)
|
||||
.arg("--version")
|
||||
.output_checked_utf8()?;
|
||||
// Output will be something like: "aura x.x.x\n"
|
||||
let version_cmd_stdout = version_cmd_output.stdout;
|
||||
let version_str = version_cmd_stdout.trim_start_matches("aura ").trim_end();
|
||||
let version = Version::parse(version_str).expect("invalid version");
|
||||
|
||||
// Aura, since version 4.0.6, no longer needs sudo.
|
||||
//
|
||||
// https://github.com/fosskers/aura/releases/tag/v4.0.6
|
||||
let version_no_sudo = Version::new(4, 0, 6);
|
||||
|
||||
if version >= version_no_sudo {
|
||||
let mut cmd = ctx.run_type().execute(&self.executable);
|
||||
cmd.arg("-Au")
|
||||
.args(ctx.config().aura_aur_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
cmd.arg("--noconfirm");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
|
||||
let mut cmd = ctx.run_type().execute(&self.executable);
|
||||
cmd.arg("-Syu")
|
||||
.args(ctx.config().aura_pacman_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
cmd.arg("--noconfirm");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
} else {
|
||||
let sudo = crate::utils::require_option(
|
||||
ctx.sudo().as_ref(),
|
||||
t!("Aura(<0.4.6) requires sudo installed to work with AUR packages").to_string(),
|
||||
)?;
|
||||
|
||||
let mut cmd = ctx.run_type().execute(sudo);
|
||||
cmd.arg(&self.executable)
|
||||
.arg("-Au")
|
||||
.args(ctx.config().aura_aur_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
aur_update.arg("--noconfirm");
|
||||
cmd.arg("--noconfirm");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
|
||||
aur_update.check_run()?;
|
||||
} else {
|
||||
println!("Aura requires sudo installed to work with AUR packages")
|
||||
let mut cmd = ctx.run_type().execute(sudo);
|
||||
cmd.arg(&self.executable)
|
||||
.arg("-Syu")
|
||||
.args(ctx.config().aura_pacman_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
cmd.arg("--noconfirm");
|
||||
}
|
||||
cmd.status_checked()?;
|
||||
}
|
||||
|
||||
let mut pacman_update = ctx.run_type().execute(&self.sudo);
|
||||
pacman_update
|
||||
.arg(&self.executable)
|
||||
.arg("-Syu")
|
||||
.args(ctx.config().aura_pacman_arguments().split_whitespace());
|
||||
if ctx.config().yes(Step::System) {
|
||||
pacman_update.arg("--noconfirm");
|
||||
}
|
||||
pacman_update.check_run()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -284,27 +345,29 @@ pub fn get_arch_package_manager(ctx: &ExecutionContext) -> Option<Box<dyn ArchPa
|
||||
let pacman = which("powerpill").unwrap_or_else(|| PathBuf::from("pacman"));
|
||||
|
||||
match ctx.config().arch_package_manager() {
|
||||
config::ArchPackageManager::Autodetect => YayParu::get("paru", &pacman)
|
||||
config::ArchPackageManager::Autodetect => GarudaUpdate::get()
|
||||
.map(box_package_manager)
|
||||
.or_else(|| YayParu::get("paru", &pacman).map(box_package_manager))
|
||||
.or_else(|| YayParu::get("yay", &pacman).map(box_package_manager))
|
||||
.or_else(|| Trizen::get().map(box_package_manager))
|
||||
.or_else(|| Pikaur::get().map(box_package_manager))
|
||||
.or_else(|| Pamac::get().map(box_package_manager))
|
||||
.or_else(|| Pacman::get(ctx).map(box_package_manager))
|
||||
.or_else(|| Aura::get(ctx).map(box_package_manager)),
|
||||
.or_else(|| Pacman::get().map(box_package_manager))
|
||||
.or_else(|| Aura::get().map(box_package_manager)),
|
||||
config::ArchPackageManager::GarudaUpdate => GarudaUpdate::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Trizen => Trizen::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Paru => YayParu::get("paru", &pacman).map(box_package_manager),
|
||||
config::ArchPackageManager::Yay => YayParu::get("yay", &pacman).map(box_package_manager),
|
||||
config::ArchPackageManager::Pacman => Pacman::get(ctx).map(box_package_manager),
|
||||
config::ArchPackageManager::Pacman => Pacman::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Pikaur => Pikaur::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Pamac => Pamac::get().map(box_package_manager),
|
||||
config::ArchPackageManager::Aura => Aura::get(ctx).map(box_package_manager),
|
||||
config::ArchPackageManager::Aura => Aura::get().map(box_package_manager),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn upgrade_arch_linux(ctx: &ExecutionContext) -> Result<()> {
|
||||
let package_manager =
|
||||
get_arch_package_manager(ctx).ok_or_else(|| anyhow::Error::from(TopgradeError::FailedGettingPackageManager))?;
|
||||
get_arch_package_manager(ctx).ok_or_else(|| eyre::Report::from(TopgradeError::FailedGettingPackageManager))?;
|
||||
package_manager.upgrade(ctx)
|
||||
}
|
||||
|
||||
@@ -321,7 +384,7 @@ pub fn show_pacnew() {
|
||||
.peekable();
|
||||
|
||||
if iter.peek().is_some() {
|
||||
println!("\nPacman backup configuration files found:");
|
||||
println!("\n{}", t!("Pacman backup configuration files found:"));
|
||||
|
||||
for entry in iter {
|
||||
println!("{}", entry.path().display());
|
||||
|
||||
@@ -1,26 +1,36 @@
|
||||
use crate::executor::RunType;
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::require_option;
|
||||
use anyhow::Result;
|
||||
use std::path::PathBuf;
|
||||
use crate::utils::{get_require_sudo_string, require_option};
|
||||
use crate::Step;
|
||||
use color_eyre::eyre::Result;
|
||||
use std::process::Command;
|
||||
|
||||
pub fn upgrade_packages(sudo: Option<&PathBuf>, run_type: RunType) -> Result<()> {
|
||||
let sudo = require_option(sudo, String::from("No sudo detected"))?;
|
||||
print_separator("DrgaonFly BSD Packages");
|
||||
run_type
|
||||
.execute(sudo)
|
||||
.args(&["/usr/local/sbin/pkg", "upgrade"])
|
||||
.check_run()
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
print_separator(t!("DragonFly BSD Packages"));
|
||||
let mut cmd = ctx.run_type().execute(sudo);
|
||||
cmd.args(["/usr/local/sbin/pkg", "upgrade"]);
|
||||
if ctx.config().yes(Step::System) {
|
||||
cmd.arg("-y");
|
||||
}
|
||||
cmd.status_checked()
|
||||
}
|
||||
|
||||
pub fn audit_packages(sudo: &Option<PathBuf>) -> Result<()> {
|
||||
if let Some(sudo) = sudo {
|
||||
println!();
|
||||
Command::new(sudo)
|
||||
.args(&["/usr/local/sbin/pkg", "audit", "-Fr"])
|
||||
.spawn()?
|
||||
.wait()?;
|
||||
pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
|
||||
print_separator(t!("DragonFly BSD Audit"));
|
||||
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
if !Command::new(sudo)
|
||||
.args(["/usr/local/sbin/pkg", "audit", "-Fr"])
|
||||
.status()?
|
||||
.success()
|
||||
{
|
||||
println!(t!(
|
||||
"The package audit was successful, but vulnerable packages still remain on the system"
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,32 +1,41 @@
|
||||
use crate::executor::RunType;
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::require_option;
|
||||
use anyhow::Result;
|
||||
use std::path::PathBuf;
|
||||
use crate::utils::{get_require_sudo_string, require_option};
|
||||
use crate::Step;
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use std::process::Command;
|
||||
|
||||
pub fn upgrade_freebsd(sudo: Option<&PathBuf>, run_type: RunType) -> Result<()> {
|
||||
let sudo = require_option(sudo, String::from("No sudo detected"))?;
|
||||
print_separator("FreeBSD Update");
|
||||
run_type
|
||||
pub fn upgrade_freebsd(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
print_separator(t!("FreeBSD Update"));
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(&["/usr/sbin/freebsd-update", "fetch", "install"])
|
||||
.check_run()
|
||||
.args(["/usr/sbin/freebsd-update", "fetch", "install"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn upgrade_packages(sudo: Option<&PathBuf>, run_type: RunType) -> Result<()> {
|
||||
let sudo = require_option(sudo, String::from("No sudo detected"))?;
|
||||
print_separator("FreeBSD Packages");
|
||||
run_type.execute(sudo).args(&["/usr/sbin/pkg", "upgrade"]).check_run()
|
||||
}
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
print_separator(t!("FreeBSD Packages"));
|
||||
|
||||
pub fn audit_packages(sudo: &Option<PathBuf>) -> Result<()> {
|
||||
if let Some(sudo) = sudo {
|
||||
println!();
|
||||
Command::new(sudo)
|
||||
.args(&["/usr/sbin/pkg", "audit", "-Fr"])
|
||||
.spawn()?
|
||||
.wait()?;
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
|
||||
command.args(["/usr/sbin/pkg", "upgrade"]);
|
||||
if ctx.config().yes(Step::System) {
|
||||
command.arg("-y");
|
||||
}
|
||||
command.status_checked()
|
||||
}
|
||||
|
||||
pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
|
||||
print_separator(t!("FreeBSD Audit"));
|
||||
|
||||
Command::new(sudo)
|
||||
.args(["/usr/sbin/pkg", "audit", "-Fr"])
|
||||
.status_checked()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,77 +1,79 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::{CommandExt, RunType};
|
||||
use crate::terminal::{print_separator, prompt_yesno};
|
||||
use crate::{error::TopgradeError, utils::require, Step};
|
||||
use anyhow::Result;
|
||||
use log::debug;
|
||||
use crate::utils::{get_require_sudo_string, require_option};
|
||||
use crate::{utils::require, Step};
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
use tracing::debug;
|
||||
|
||||
pub fn run_macports(ctx: &ExecutionContext) -> Result<()> {
|
||||
require("port")?;
|
||||
let sudo = ctx.sudo().as_ref().unwrap();
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
|
||||
print_separator("MacPorts");
|
||||
ctx.run_type().execute(sudo).args(&["port", "selfupdate"]).check_run()?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(&["port", "-u", "upgrade", "outdated"])
|
||||
.check_run()?;
|
||||
.args(["port", "selfupdate"])
|
||||
.status_checked()?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["port", "-u", "upgrade", "outdated"])
|
||||
.status_checked()?;
|
||||
if ctx.config().cleanup() {
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(&["port", "-N", "reclaim"])
|
||||
.check_run()?;
|
||||
.args(["port", "-N", "reclaim"])
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_mas(run_type: RunType) -> Result<()> {
|
||||
pub fn run_mas(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mas = require("mas")?;
|
||||
print_separator("macOS App Store");
|
||||
print_separator(t!("macOS App Store"));
|
||||
|
||||
run_type.execute(mas).arg("upgrade").check_run()
|
||||
ctx.run_type().execute(mas).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
pub fn upgrade_macos(ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator("macOS system update");
|
||||
print_separator(t!("macOS system update"));
|
||||
|
||||
let should_ask = !(ctx.config().yes(Step::System)) || (ctx.config().dry_run());
|
||||
let should_ask = !(ctx.config().yes(Step::System) || ctx.config().dry_run());
|
||||
if should_ask {
|
||||
println!("Finding available software");
|
||||
println!("{}", t!("Finding available software"));
|
||||
if system_update_available()? {
|
||||
let answer = prompt_yesno("A system update is available. Do you wish to install it?")?;
|
||||
let answer = prompt_yesno(t!("A system update is available. Do you wish to install it?").as_ref())?;
|
||||
if !answer {
|
||||
return Ok(());
|
||||
}
|
||||
println!();
|
||||
} else {
|
||||
println!("No new software available.");
|
||||
println!("{}", t!("No new software available."));
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
let mut command = ctx.run_type().execute("softwareupdate");
|
||||
command.args(&["--install", "--all"]);
|
||||
command.args(["--install", "--all"]);
|
||||
|
||||
if should_ask {
|
||||
command.arg("--no-scan");
|
||||
}
|
||||
|
||||
command.check_run()
|
||||
command.status_checked()
|
||||
}
|
||||
|
||||
fn system_update_available() -> Result<bool> {
|
||||
let output = Command::new("softwareupdate").arg("--list").output()?;
|
||||
let output = Command::new("softwareupdate").arg("--list").output_checked_utf8()?;
|
||||
|
||||
debug!("{:?}", output);
|
||||
|
||||
let status = output.status;
|
||||
if !status.success() {
|
||||
return Err(TopgradeError::ProcessFailed(status).into());
|
||||
}
|
||||
let string_output = String::from_utf8(output.stderr)?;
|
||||
debug!("{:?}", string_output);
|
||||
Ok(!string_output.contains("No new software available"))
|
||||
Ok(!output.stderr.contains("No new software available"))
|
||||
}
|
||||
|
||||
pub fn run_sparkle(ctx: &ExecutionContext) -> Result<()> {
|
||||
@@ -81,15 +83,162 @@ pub fn run_sparkle(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
for application in (fs::read_dir("/Applications")?).flatten() {
|
||||
let probe = Command::new(&sparkle)
|
||||
.args(&["--probe", "--application"])
|
||||
.args(["--probe", "--application"])
|
||||
.arg(application.path())
|
||||
.check_output();
|
||||
.output_checked_utf8();
|
||||
if probe.is_ok() {
|
||||
let mut command = ctx.run_type().execute(&sparkle);
|
||||
command.args(&["bundle", "--check-immediately", "--application"]);
|
||||
command.args(["bundle", "--check-immediately", "--application"]);
|
||||
command.arg(application.path());
|
||||
command.spawn()?.wait()?;
|
||||
command.status_checked()?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_xcodes(ctx: &ExecutionContext) -> Result<()> {
|
||||
let xcodes = require("xcodes")?;
|
||||
print_separator("Xcodes");
|
||||
|
||||
let should_ask = !(ctx.config().yes(Step::Xcodes) || ctx.config().dry_run());
|
||||
|
||||
let releases = ctx
|
||||
.run_type()
|
||||
.execute(&xcodes)
|
||||
.args(["update"])
|
||||
.output_checked_utf8()?
|
||||
.stdout;
|
||||
|
||||
let releases_installed: Vec<String> = releases
|
||||
.lines()
|
||||
.filter(|r| r.contains("(Installed)"))
|
||||
.map(String::from)
|
||||
.collect();
|
||||
|
||||
if releases_installed.is_empty() {
|
||||
println!("{}", t!("No Xcode releases installed."));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let (installed_gm, installed_beta, installed_regular) =
|
||||
releases_installed
|
||||
.iter()
|
||||
.fold((false, false, false), |(gm, beta, regular), release| {
|
||||
(
|
||||
gm || release.contains("GM") || release.contains("Release Candidate"),
|
||||
beta || release.contains("Beta"),
|
||||
regular
|
||||
|| !(release.contains("GM")
|
||||
|| release.contains("Release Candidate")
|
||||
|| release.contains("Beta")),
|
||||
)
|
||||
});
|
||||
|
||||
let releases_gm = releases
|
||||
.lines()
|
||||
.filter(|&r| r.matches("GM").count() > 0 || r.matches("Release Candidate").count() > 0)
|
||||
.map(String::from)
|
||||
.collect();
|
||||
let releases_beta = releases
|
||||
.lines()
|
||||
.filter(|&r| r.matches("Beta").count() > 0)
|
||||
.map(String::from)
|
||||
.collect();
|
||||
let releases_regular = releases
|
||||
.lines()
|
||||
.filter(|&r| {
|
||||
r.matches("GM").count() == 0
|
||||
&& r.matches("Release Candidate").count() == 0
|
||||
&& r.matches("Beta").count() == 0
|
||||
})
|
||||
.map(String::from)
|
||||
.collect();
|
||||
|
||||
if installed_gm {
|
||||
process_xcodes_releases(releases_gm, should_ask, ctx)?;
|
||||
}
|
||||
if installed_beta {
|
||||
process_xcodes_releases(releases_beta, should_ask, ctx)?;
|
||||
}
|
||||
if installed_regular {
|
||||
process_xcodes_releases(releases_regular, should_ask, ctx)?;
|
||||
}
|
||||
|
||||
let releases_new = ctx
|
||||
.run_type()
|
||||
.execute(&xcodes)
|
||||
.args(["list"])
|
||||
.output_checked_utf8()?
|
||||
.stdout;
|
||||
|
||||
let releases_gm_new_installed: HashSet<_> = releases_new
|
||||
.lines()
|
||||
.filter(|release| {
|
||||
release.contains("(Installed)") && (release.contains("GM") || release.contains("Release Candidate"))
|
||||
})
|
||||
.collect();
|
||||
let releases_beta_new_installed: HashSet<_> = releases_new
|
||||
.lines()
|
||||
.filter(|release| release.contains("(Installed)") && release.contains("Beta"))
|
||||
.collect();
|
||||
let releases_regular_new_installed: HashSet<_> = releases_new
|
||||
.lines()
|
||||
.filter(|release| {
|
||||
release.contains("(Installed)")
|
||||
&& !(release.contains("GM") || release.contains("Release Candidate") || release.contains("Beta"))
|
||||
})
|
||||
.collect();
|
||||
|
||||
for releases_new_installed in [
|
||||
releases_gm_new_installed,
|
||||
releases_beta_new_installed,
|
||||
releases_regular_new_installed,
|
||||
] {
|
||||
if should_ask && releases_new_installed.len() == 2 {
|
||||
let answer_uninstall =
|
||||
prompt_yesno(t!("Would you like to move the former Xcode release to the trash?").as_ref())?;
|
||||
if answer_uninstall {
|
||||
let _ = ctx
|
||||
.run_type()
|
||||
.execute(&xcodes)
|
||||
.args([
|
||||
"uninstall",
|
||||
releases_new_installed.iter().next().cloned().unwrap_or_default(),
|
||||
])
|
||||
.status_checked();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn process_xcodes_releases(releases_filtered: Vec<String>, should_ask: bool, ctx: &ExecutionContext) -> Result<()> {
|
||||
let xcodes = require("xcodes")?;
|
||||
|
||||
if releases_filtered
|
||||
.last()
|
||||
.map(|s| !s.contains("(Installed)"))
|
||||
.unwrap_or(true)
|
||||
&& !releases_filtered.is_empty()
|
||||
{
|
||||
println!(
|
||||
"{} {}",
|
||||
t!("New Xcode release detected:"),
|
||||
releases_filtered.last().cloned().unwrap_or_default()
|
||||
);
|
||||
if should_ask {
|
||||
let answer_install = prompt_yesno(t!("Would you like to install it?").as_ref())?;
|
||||
if answer_install {
|
||||
let _ = ctx
|
||||
.run_type()
|
||||
.execute(xcodes)
|
||||
.args(["install", &releases_filtered.last().cloned().unwrap_or_default()])
|
||||
.status_checked();
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -10,6 +10,8 @@ pub mod freebsd;
|
||||
pub mod linux;
|
||||
#[cfg(target_os = "macos")]
|
||||
pub mod macos;
|
||||
#[cfg(target_os = "openbsd")]
|
||||
pub mod openbsd;
|
||||
#[cfg(unix)]
|
||||
pub mod unix;
|
||||
#[cfg(target_os = "windows")]
|
||||
|
||||
65
src/steps/os/openbsd.rs
Normal file
65
src/steps/os/openbsd.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{get_require_sudo_string, require_option};
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
use std::fs;
|
||||
|
||||
fn is_openbsd_current(ctx: &ExecutionContext) -> Result<bool> {
|
||||
let motd_content = fs::read_to_string("/etc/motd")?;
|
||||
let is_current = motd_content.contains("-current");
|
||||
if ctx.config().dry_run() {
|
||||
println!("{}", t!("Would check if OpenBSD is -current"));
|
||||
Ok(is_current)
|
||||
} else {
|
||||
Ok(is_current)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn upgrade_openbsd(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
print_separator(t!("OpenBSD Update"));
|
||||
|
||||
let is_current = is_openbsd_current(ctx)?;
|
||||
|
||||
if ctx.config().dry_run() {
|
||||
println!("{}", t!("Would upgrade the OpenBSD system"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut args = vec!["/usr/sbin/sysupgrade", "-n"];
|
||||
if is_current {
|
||||
args.push("-s");
|
||||
}
|
||||
|
||||
ctx.run_type().execute(sudo).args(&args).status_checked()
|
||||
}
|
||||
|
||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
print_separator(t!("OpenBSD Packages"));
|
||||
|
||||
let is_current = is_openbsd_current(ctx)?;
|
||||
|
||||
if ctx.config().dry_run() {
|
||||
println!("{}", t!("Would upgrade OpenBSD packages"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.args(["/usr/sbin/pkg_delete", "-ac"])
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
let mut args = vec!["/usr/sbin/pkg_add", "-u"];
|
||||
if is_current {
|
||||
args.push("-Dsnap");
|
||||
}
|
||||
|
||||
ctx.run_type().execute(sudo).args(&args).status_checked()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
NAME="Anarchy Linux"
|
||||
PRETTY_NAME="Anarchy Linux"
|
||||
ID=anarchy
|
||||
ID_LIKE=anarchylinux
|
||||
ANSI_COLOR="0;36"
|
||||
HOME_URL="https://anarchylinux.org/"
|
||||
@@ -1,10 +0,0 @@
|
||||
NAME="Antergos Linux"
|
||||
VERSION="18.7-ISO-Rolling"
|
||||
ID="antergos"
|
||||
ID_LIKE="arch"
|
||||
PRETTY_NAME="Antergos Linux"
|
||||
CPE_NAME="cpe:/o:antergosproject:antergos:18.7"
|
||||
ANSI_COLOR="1;34;40"
|
||||
HOME_URL="antergos.com"
|
||||
SUPPORT_URL="forum.antergos.com"
|
||||
BUG_REPORT_URL="@antergos"
|
||||
@@ -1,8 +1,9 @@
|
||||
PRETTY_NAME="Debian GNU/Linux 8 (jessie)"
|
||||
PRETTY_NAME="Debian GNU/Linux 11 (bullseye)"
|
||||
NAME="Debian GNU/Linux"
|
||||
VERSION_ID="8"
|
||||
VERSION="8 (jessie)"
|
||||
VERSION_ID="11"
|
||||
VERSION="11 (bullseye)"
|
||||
VERSION_CODENAME=bullseye
|
||||
ID=debian
|
||||
HOME_URL="http://www.debian.org/"
|
||||
SUPPORT_URL="http://www.debian.org/support"
|
||||
HOME_URL="https://www.debian.org/"
|
||||
SUPPORT_URL="https://www.debian.org/support"
|
||||
BUG_REPORT_URL="https://bugs.debian.org/"
|
||||
|
||||
8
src/steps/os/os_release/deepin
Normal file
8
src/steps/os/os_release/deepin
Normal file
@@ -0,0 +1,8 @@
|
||||
PRETTY_NAME="Deepin 20.9"
|
||||
NAME="Deepin"
|
||||
VERSION_ID="20.9"
|
||||
VERSION="20.9"
|
||||
VERSION_CODENAME="apricot"
|
||||
ID=Deepin
|
||||
HOME_URL="https://www.deepin.org/"
|
||||
BUG_REPORT_URL="https://bbs.deepin.org/"
|
||||
22
src/steps/os/os_release/fedoraiot
Normal file
22
src/steps/os/os_release/fedoraiot
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39.20240415.0 (IoT Edition)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39.20240415.0 (IoT Edition)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
HOME_URL="https://fedoraproject.org/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora/f39/system-administrators-guide/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://bugzilla.redhat.com/"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-11-12
|
||||
VARIANT="IoT Edition"
|
||||
VARIANT_ID=iot
|
||||
OSTREE_VERSION='39.20240415.0'
|
||||
23
src/steps/os/os_release/fedorakinoite
Normal file
23
src/steps/os/os_release/fedorakinoite
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39.20240105.0 (Kinoite)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39.20240105.0 (Kinoite)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://kinoite.fedoraproject.org"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-kinoite/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://pagure.io/fedora-kde/SIG/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-11-12
|
||||
VARIANT="Kinoite"
|
||||
VARIANT_ID=kinoite
|
||||
OSTREE_VERSION='39.20240105.0'
|
||||
22
src/steps/os/os_release/fedoraonyx
Normal file
22
src/steps/os/os_release/fedoraonyx
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39 (Onyx)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39 (Onyx)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://fedoraproject.org/onyx/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-onyx/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://bugzilla.redhat.com/"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="Onyx"
|
||||
VARIANT_ID=onyx
|
||||
22
src/steps/os/os_release/fedorasericea
Normal file
22
src/steps/os/os_release/fedorasericea
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39 (Sericea)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39 (Sericea)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://fedoraproject.org/sericea/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-sericea/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://gitlab.com/fedora/sigs/sway/SIG/-/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="Sericea"
|
||||
VARIANT_ID=sericea
|
||||
22
src/steps/os/os_release/fedorasilverblue
Normal file
22
src/steps/os/os_release/fedorasilverblue
Normal file
@@ -0,0 +1,22 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="39 (Silverblue)"
|
||||
ID=fedora
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Fedora Linux 39 (Silverblue)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://silverblue.fedoraproject.org"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-silverblue/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://github.com/fedora-silverblue/issue-tracker/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="Silverblue"
|
||||
VARIANT_ID=silverblue
|
||||
23
src/steps/os/os_release/fedoraswayatomic
Normal file
23
src/steps/os/os_release/fedoraswayatomic
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Fedora Linux"
|
||||
VERSION="40.20240426.0 (Sway Atomic)"
|
||||
ID=fedora
|
||||
VERSION_ID=40
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f40"
|
||||
PRETTY_NAME="Fedora Linux 40.20240426.0 (Sway Atomic)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=fedora-logo-icon
|
||||
CPE_NAME="cpe:/o:fedoraproject:fedora:40"
|
||||
DEFAULT_HOSTNAME="fedora"
|
||||
HOME_URL="https://fedoraproject.org/atomic-desktops/sway/"
|
||||
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-sericea/"
|
||||
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||
BUG_REPORT_URL="https://gitlab.com/fedora/sigs/sway/SIG/-/issues"
|
||||
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=40
|
||||
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=40
|
||||
SUPPORT_END=2025-05-13
|
||||
VARIANT="Sway Atomic"
|
||||
VARIANT_ID=sway-atomic
|
||||
OSTREE_VERSION='40.20240426.0'
|
||||
6
src/steps/os/os_release/funtoo
Normal file
6
src/steps/os/os_release/funtoo
Normal file
@@ -0,0 +1,6 @@
|
||||
ID="funtoo"
|
||||
NAME="Funtoo"
|
||||
PRETTY_NAME="Funtoo Linux"
|
||||
ANSI_COLOR="0;34"
|
||||
HOME_URL="https://www.funtoo.org"
|
||||
BUG_REPORT_URL="https://bugs.funtoo.org"
|
||||
8
src/steps/os/os_release/nilrt
Normal file
8
src/steps/os/os_release/nilrt
Normal file
@@ -0,0 +1,8 @@
|
||||
ID=nilrt
|
||||
NAME="NI Linux Real-Time"
|
||||
VERSION="10.0 (kirkstone)"
|
||||
VERSION_ID=10.0
|
||||
PRETTY_NAME="NI Linux Real-Time 10.0 (kirkstone)"
|
||||
DISTRO_CODENAME="kirkstone"
|
||||
BUILD_ID="23.8.0f153-x64"
|
||||
VERSION_CODENAME="kirkstone"
|
||||
23
src/steps/os/os_release/nobara
Normal file
23
src/steps/os/os_release/nobara
Normal file
@@ -0,0 +1,23 @@
|
||||
NAME="Nobara Linux"
|
||||
VERSION="39 (GNOME Edition)"
|
||||
ID=nobara
|
||||
ID_LIKE="rhel centos fedora"
|
||||
VERSION_ID=39
|
||||
VERSION_CODENAME=""
|
||||
PLATFORM_ID="platform:f39"
|
||||
PRETTY_NAME="Nobara Linux 39 (GNOME Edition)"
|
||||
ANSI_COLOR="0;38;2;60;110;180"
|
||||
LOGO=nobara-logo-icon
|
||||
CPE_NAME="cpe:/o:nobaraproject:nobara:39"
|
||||
DEFAULT_HOSTNAME="nobara"
|
||||
HOME_URL="https://nobaraproject.org/"
|
||||
DOCUMENTATION_URL="https://www.nobaraproject.org/"
|
||||
SUPPORT_URL="https://www.nobaraproject.org/"
|
||||
BUG_REPORT_URL="https://gitlab.com/gloriouseggroll/nobara-images"
|
||||
REDHAT_BUGZILLA_PRODUCT="Nobara"
|
||||
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||
REDHAT_SUPPORT_PRODUCT="Nobara"
|
||||
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||
SUPPORT_END=2024-05-14
|
||||
VARIANT="GNOME Edition"
|
||||
VARIANT_ID=gnome
|
||||
9
src/steps/os/os_release/pclinuxos
Normal file
9
src/steps/os/os_release/pclinuxos
Normal file
@@ -0,0 +1,9 @@
|
||||
NAME="PCLinuxOS"
|
||||
VERSION="2022"
|
||||
ID=pclinuxos
|
||||
VERSION_ID=2022
|
||||
ID_LIKE="mandriva"
|
||||
PRETTY_NAME="PCLinuxOS 2022"
|
||||
ANSI_COLOR="1;37"
|
||||
HOME_URL="http://www.pclinuxos.com/"
|
||||
SUPPORT_URL="http://www.pclinuxos.com/"
|
||||
11
src/steps/os/os_release/solus
Normal file
11
src/steps/os/os_release/solus
Normal file
@@ -0,0 +1,11 @@
|
||||
NAME="Solus"
|
||||
VERSION="4.4"
|
||||
ID="solus"
|
||||
VERSION_CODENAME=harmony
|
||||
VERSION_ID="4.4"
|
||||
PRETTY_NAME="Solus 4.4 Harmony"
|
||||
ANSI_COLOR="1;34"
|
||||
HOME_URL="https://getsol.us"
|
||||
SUPPORT_URL="https://help.getsol.us/docs/user/contributing/getting-involved"
|
||||
BUG_REPORT_URL="https://dev.getsol.us/"
|
||||
LOGO="distributor-logo-solus"
|
||||
12
src/steps/os/os_release/vanilla
Normal file
12
src/steps/os/os_release/vanilla
Normal file
@@ -0,0 +1,12 @@
|
||||
PRETTY_NAME="VanillaOS 22.10 all"
|
||||
NAME="VanillaOS"
|
||||
VERSION_ID="22.10"
|
||||
VERSION="22.10 all"
|
||||
VERSION_CODENAME="kinetic"
|
||||
ID=ubuntu
|
||||
ID_LIKE=debian
|
||||
HOME_URL="https://github.com/vanilla-os"
|
||||
SUPPORT_URL="https://github.com/vanilla-os"
|
||||
BUG_REPORT_URL="https://github.com/vanilla-os"
|
||||
PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
|
||||
UBUNTU_CODENAME="kinetic"
|
||||
5
src/steps/os/os_release/wolfi
Normal file
5
src/steps/os/os_release/wolfi
Normal file
@@ -0,0 +1,5 @@
|
||||
ID=wolfi
|
||||
NAME="Wolfi"
|
||||
PRETTY_NAME="Wolfi"
|
||||
VERSION_ID="20230201"
|
||||
HOME_URL="https://wolfi.dev"
|
||||
@@ -1,33 +1,53 @@
|
||||
use crate::error::{SkipStep, TopgradeError};
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::{CommandExt, Executor, ExecutorExitStatus, RunType};
|
||||
use crate::terminal::print_separator;
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
use crate::utils::require_option;
|
||||
use crate::utils::{require, PathExt};
|
||||
use crate::Step;
|
||||
use anyhow::Result;
|
||||
use directories::BaseDirs;
|
||||
use home;
|
||||
use ini::Ini;
|
||||
use log::debug;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
use std::path::Component;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::{env, path::Path};
|
||||
use std::{env::var, path::Path};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::{Step, HOME_DIR};
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
use home;
|
||||
use ini::Ini;
|
||||
use lazy_static::lazy_static;
|
||||
#[cfg(target_os = "linux")]
|
||||
use nix::unistd::Uid;
|
||||
use regex::Regex;
|
||||
use rust_i18n::t;
|
||||
use semver::Version;
|
||||
use tracing::debug;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
use super::linux::Distribution;
|
||||
use crate::error::SkipStep;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
use crate::executor::Executor;
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
use crate::executor::RunType;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::utils::{get_require_sudo_string, require, require_option, PathExt};
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
const INTEL_BREW: &str = "/usr/local/bin/brew";
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
const ARM_BREW: &str = "/opt/homebrew/bin/brew";
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
pub enum BrewVariant {
|
||||
Path,
|
||||
MacIntel,
|
||||
MacArm,
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
impl BrewVariant {
|
||||
fn binary_name(self) -> &'static str {
|
||||
match self {
|
||||
@@ -77,92 +97,148 @@ impl BrewVariant {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_fisher(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
pub fn run_fisher(ctx: &ExecutionContext) -> Result<()> {
|
||||
let fish = require("fish")?;
|
||||
|
||||
if env::var("fisher_path").is_err() {
|
||||
base_dirs
|
||||
.home_dir()
|
||||
.join(".config/fish/functions/fisher.fish")
|
||||
.require()?;
|
||||
}
|
||||
Command::new(&fish)
|
||||
.args(["-c", "type -t fisher"])
|
||||
.output_checked_utf8()
|
||||
.map(|_| ())
|
||||
.map_err(|_| SkipStep(t!("`fisher` is not defined in `fish`").to_string()))?;
|
||||
|
||||
Command::new(&fish)
|
||||
.args(["-c", "echo \"$__fish_config_dir/fish_plugins\""])
|
||||
.output_checked_utf8()
|
||||
.and_then(|output| Path::new(&output.stdout.trim()).require().map(|_| ()))
|
||||
.map_err(|err| SkipStep(t!("`fish_plugins` path doesn't exist: {err}", err = err).to_string()))?;
|
||||
|
||||
Command::new(&fish)
|
||||
.args(["-c", "fish_update_completions"])
|
||||
.output_checked_utf8()
|
||||
.map(|_| ())
|
||||
.map_err(|_| SkipStep(t!("`fish_update_completions` is not available").to_string()))?;
|
||||
|
||||
print_separator("Fisher");
|
||||
|
||||
let version_str = run_type
|
||||
let version_str = ctx
|
||||
.run_type()
|
||||
.execute(&fish)
|
||||
.args(["-c", "fisher --version"])
|
||||
.check_output()?;
|
||||
.output_checked_utf8()?
|
||||
.stdout;
|
||||
debug!("Fisher version: {}", version_str);
|
||||
|
||||
if version_str.starts_with("fisher version 3.") {
|
||||
// v3 - see https://github.com/topgrade-rs/topgrade/pull/37#issuecomment-1283844506
|
||||
run_type.execute(&fish).args(["-c", "fisher"]).check_run()
|
||||
ctx.run_type().execute(&fish).args(["-c", "fisher"]).status_checked()
|
||||
} else {
|
||||
// v4
|
||||
run_type.execute(&fish).args(["-c", "fisher update"]).check_run()
|
||||
ctx.run_type()
|
||||
.execute(&fish)
|
||||
.args(["-c", "fisher update"])
|
||||
.status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_bashit(ctx: &ExecutionContext) -> Result<()> {
|
||||
ctx.base_dirs().home_dir().join(".bash_it").require()?;
|
||||
HOME_DIR.join(".bash_it").require()?;
|
||||
|
||||
print_separator("Bash-it");
|
||||
|
||||
ctx.run_type()
|
||||
.execute("bash")
|
||||
.args(["-lic", &format!("bash-it update {}", ctx.config().bashit_branch())])
|
||||
.check_run()
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_oh_my_bash(ctx: &ExecutionContext) -> Result<()> {
|
||||
require("bash")?;
|
||||
let oh_my_bash = var("OSH")
|
||||
// default to `~/.oh-my-bash`
|
||||
.unwrap_or(
|
||||
HOME_DIR
|
||||
.join(".oh-my-bash")
|
||||
.to_str()
|
||||
.expect("should be UTF-8 encoded")
|
||||
.to_string(),
|
||||
)
|
||||
.require()?;
|
||||
|
||||
print_separator("oh-my-bash");
|
||||
|
||||
let mut update_script = oh_my_bash;
|
||||
update_script.push_str("/tools/upgrade.sh");
|
||||
|
||||
ctx.run_type().execute("bash").arg(update_script).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_oh_my_fish(ctx: &ExecutionContext) -> Result<()> {
|
||||
let fish = require("fish")?;
|
||||
ctx.base_dirs()
|
||||
.home_dir()
|
||||
.join(".local/share/omf/pkg/omf/functions/omf.fish")
|
||||
.require()?;
|
||||
HOME_DIR.join(".local/share/omf/pkg/omf/functions/omf.fish").require()?;
|
||||
|
||||
print_separator("oh-my-fish");
|
||||
|
||||
ctx.run_type().execute(&fish).args(["-c", "omf update"]).check_run()
|
||||
ctx.run_type().execute(fish).args(["-c", "omf update"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn run_pkgin(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pkgin = require("pkgin")?;
|
||||
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||
|
||||
let mut command = ctx.run_type().execute(ctx.sudo().as_ref().unwrap());
|
||||
print_separator("Pkgin");
|
||||
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(&pkgin).arg("update");
|
||||
if ctx.config().yes(Step::Pkgin) {
|
||||
command.arg("-y");
|
||||
}
|
||||
command.check_run()?;
|
||||
command.status_checked()?;
|
||||
|
||||
let mut command = ctx.run_type().execute(ctx.sudo().as_ref().unwrap());
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(&pkgin).arg("upgrade");
|
||||
if ctx.config().yes(Step::Pkgin) {
|
||||
command.arg("-y");
|
||||
}
|
||||
command.check_run()
|
||||
command.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_fish_plug(ctx: &ExecutionContext) -> Result<()> {
|
||||
let fish = require("fish")?;
|
||||
ctx.base_dirs()
|
||||
.home_dir()
|
||||
HOME_DIR
|
||||
.join(".local/share/fish/plug/kidonng/fish-plug/functions/plug.fish")
|
||||
.require()?;
|
||||
|
||||
print_separator("fish-plug");
|
||||
|
||||
ctx.run_type().execute(&fish).args(["-c", "plug update"]).check_run()
|
||||
ctx.run_type()
|
||||
.execute(fish)
|
||||
.args(["-c", "plug update"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
/// Upgrades `fundle` and `fundle` plugins.
|
||||
///
|
||||
/// `fundle` is a package manager for the Fish shell.
|
||||
///
|
||||
/// See: <https://github.com/danhper/fundle>
|
||||
pub fn run_fundle(ctx: &ExecutionContext) -> Result<()> {
|
||||
let fish = require("fish")?;
|
||||
HOME_DIR.join(".config/fish/fundle").require()?;
|
||||
|
||||
print_separator("fundle");
|
||||
|
||||
ctx.run_type()
|
||||
.execute(fish)
|
||||
.args(["-c", "fundle self-update && fundle update"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_os = "android", target_os = "macos")))]
|
||||
pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
|
||||
let gdbus = require("gdbus")?;
|
||||
require_option(
|
||||
env::var("XDG_CURRENT_DESKTOP").ok().filter(|p| p.contains("GNOME")),
|
||||
"Desktop doest not appear to be gnome".to_string(),
|
||||
var("XDG_CURRENT_DESKTOP").ok().filter(|p| p.contains("GNOME")),
|
||||
t!("Desktop doest not appear to be gnome").to_string(),
|
||||
)?;
|
||||
let output = Command::new("gdbus")
|
||||
.args([
|
||||
@@ -175,14 +251,14 @@ pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
|
||||
"--method",
|
||||
"org.freedesktop.DBus.ListActivatableNames",
|
||||
])
|
||||
.check_output()?;
|
||||
.output_checked_utf8()?;
|
||||
|
||||
debug!("Checking for gnome extensions: {}", output);
|
||||
if !output.contains("org.gnome.Shell.Extensions") {
|
||||
return Err(SkipStep(String::from("Gnome shell extensions are unregistered in DBus")).into());
|
||||
if !output.stdout.contains("org.gnome.Shell.Extensions") {
|
||||
return Err(SkipStep(t!("Gnome shell extensions are unregistered in DBus").to_string()).into());
|
||||
}
|
||||
|
||||
print_separator("Gnome Shell extensions");
|
||||
print_separator(t!("Gnome Shell extensions"));
|
||||
|
||||
ctx.run_type()
|
||||
.execute(gdbus)
|
||||
@@ -196,9 +272,27 @@ pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
|
||||
"--method",
|
||||
"org.gnome.Shell.Extensions.CheckForUpdates",
|
||||
])
|
||||
.check_run()
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
pub fn brew_linux_sudo_uid() -> Option<u32> {
|
||||
let linuxbrew_directory = "/home/linuxbrew/.linuxbrew";
|
||||
if let Ok(metadata) = std::fs::metadata(linuxbrew_directory) {
|
||||
let owner_id = metadata.uid();
|
||||
let current_id = Uid::effective();
|
||||
// print debug these two values
|
||||
debug!("linuxbrew_directory owner_id: {}, current_id: {}", owner_id, current_id);
|
||||
return if owner_id == current_id.as_raw() {
|
||||
None // no need for sudo if linuxbrew is owned by the current user
|
||||
} else {
|
||||
Some(owner_id) // otherwise use sudo to run brew as the owner
|
||||
};
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||
pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()> {
|
||||
#[allow(unused_variables)]
|
||||
let binary_name = require(variant.binary_name())?;
|
||||
@@ -206,25 +300,57 @@ pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
if variant.is_path() && !BrewVariant::is_macos_custom(binary_name) {
|
||||
return Err(SkipStep("Not a custom brew for macOS".to_string()).into());
|
||||
return Err(SkipStep(t!("Not a custom brew for macOS").to_string()).into());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
let sudo_uid = brew_linux_sudo_uid();
|
||||
// if brew is owned by another user, execute "sudo -Hu <uid> brew update"
|
||||
if let Some(user_id) = sudo_uid {
|
||||
let uid = nix::unistd::Uid::from_raw(user_id);
|
||||
let user = nix::unistd::User::from_uid(uid)
|
||||
.expect("failed to call getpwuid()")
|
||||
.expect("this user should exist");
|
||||
|
||||
let sudo_as_user = t!("sudo as user '{user}'", user = user.name);
|
||||
print_separator(format!("{} ({})", variant.step_title(), sudo_as_user));
|
||||
|
||||
let sudo = crate::utils::require_option(ctx.sudo().as_ref(), crate::utils::get_require_sudo_string())?;
|
||||
ctx.run_type()
|
||||
.execute(sudo)
|
||||
.current_dir("/tmp") // brew needs a writable current directory
|
||||
.args([
|
||||
"--set-home",
|
||||
&format!("--user={}", user.name),
|
||||
&format!("{}", binary_name.to_string_lossy()),
|
||||
"update",
|
||||
])
|
||||
.status_checked()?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
print_separator(variant.step_title());
|
||||
let run_type = ctx.run_type();
|
||||
|
||||
variant.execute(run_type).arg("update").check_run()?;
|
||||
variant
|
||||
.execute(run_type)
|
||||
.args(["upgrade", "--ignore-pinned", "--formula"])
|
||||
.check_run()?;
|
||||
variant.execute(run_type).arg("update").status_checked()?;
|
||||
|
||||
let mut command = variant.execute(run_type);
|
||||
command.args(["upgrade", "--formula"]);
|
||||
|
||||
if ctx.config().brew_fetch_head() {
|
||||
command.arg("--fetch-HEAD");
|
||||
}
|
||||
|
||||
command.status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
variant.execute(run_type).arg("cleanup").check_run()?;
|
||||
variant.execute(run_type).arg("cleanup").status_checked()?;
|
||||
}
|
||||
|
||||
if ctx.config().brew_autoremove() {
|
||||
variant.execute(run_type).arg("autoremove").check_run()?;
|
||||
variant.execute(run_type).arg("autoremove").status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -234,35 +360,41 @@ pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<
|
||||
pub fn run_brew_cask(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()> {
|
||||
let binary_name = require(variant.binary_name())?;
|
||||
if variant.is_path() && !BrewVariant::is_macos_custom(binary_name) {
|
||||
return Err(SkipStep("Not a custom brew for macOS".to_string()).into());
|
||||
return Err(SkipStep(t!("Not a custom brew for macOS").to_string()).into());
|
||||
}
|
||||
print_separator(format!("{} - Cask", variant.step_title()));
|
||||
let run_type = ctx.run_type();
|
||||
|
||||
let cask_upgrade_exists = variant
|
||||
.execute(RunType::Wet)
|
||||
.args(&["--repository", "buo/cask-upgrade"])
|
||||
.check_output()
|
||||
.map(|p| Path::new(p.trim()).exists())?;
|
||||
.args(["--repository", "buo/cask-upgrade"])
|
||||
.output_checked_utf8()
|
||||
.map(|p| Path::new(p.stdout.trim()).exists())?;
|
||||
|
||||
let mut brew_args = vec![];
|
||||
|
||||
if cask_upgrade_exists {
|
||||
brew_args.extend(&["cu", "-y"]);
|
||||
brew_args.extend(["cu", "-y"]);
|
||||
if ctx.config().brew_cask_greedy() {
|
||||
brew_args.push("-a");
|
||||
}
|
||||
} else {
|
||||
brew_args.extend(&["upgrade", "--cask"]);
|
||||
brew_args.extend(["upgrade", "--cask"]);
|
||||
if ctx.config().brew_cask_greedy() {
|
||||
brew_args.push("--greedy");
|
||||
}
|
||||
if ctx.config().brew_greedy_latest() {
|
||||
brew_args.push("--greedy-latest");
|
||||
}
|
||||
if ctx.config().brew_greedy_auto_updates() {
|
||||
brew_args.push("--greedy-auto-updates");
|
||||
}
|
||||
}
|
||||
|
||||
variant.execute(run_type).args(&brew_args).check_run()?;
|
||||
variant.execute(run_type).args(&brew_args).status_checked()?;
|
||||
|
||||
if ctx.config().cleanup() {
|
||||
variant.execute(run_type).arg("cleanup").check_run()?;
|
||||
variant.execute(run_type).arg("cleanup").status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -273,7 +405,7 @@ pub fn run_guix(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
let run_type = ctx.run_type();
|
||||
|
||||
let output = Command::new(&guix).arg("pull").check_output();
|
||||
let output = Command::new(&guix).arg("pull").output_checked_utf8();
|
||||
debug!("guix pull output: {:?}", output);
|
||||
let should_upgrade = output.is_ok();
|
||||
debug!("Can Upgrade Guix: {:?}", should_upgrade);
|
||||
@@ -281,15 +413,16 @@ pub fn run_guix(ctx: &ExecutionContext) -> Result<()> {
|
||||
print_separator("Guix");
|
||||
|
||||
if should_upgrade {
|
||||
return run_type.execute(&guix).args(["package", "-u"]).check_run();
|
||||
return run_type.execute(&guix).args(["package", "-u"]).status_checked();
|
||||
}
|
||||
Err(SkipStep(String::from("Guix Pull Failed, Skipping")).into())
|
||||
Err(SkipStep(t!("Guix Pull Failed, Skipping").to_string()).into())
|
||||
}
|
||||
|
||||
pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
|
||||
let nix = require("nix")?;
|
||||
let nix_channel = require("nix-channel")?;
|
||||
let nix_env = require("nix-env")?;
|
||||
// TODO: Is None possible here?
|
||||
let profile_path = match home::home_dir() {
|
||||
Some(home) => Path::new(&home).join(".nix-profile"),
|
||||
None => Path::new("/nix/var/nix/profiles/per-user/default").into(),
|
||||
@@ -297,56 +430,199 @@ pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
|
||||
debug!("nix profile: {:?}", profile_path);
|
||||
let manifest_json_path = profile_path.join("manifest.json");
|
||||
|
||||
let output = Command::new(&nix_env).args(["--query", "nix"]).check_output();
|
||||
debug!("nix-env output: {:?}", output);
|
||||
let should_self_upgrade = output.is_ok();
|
||||
|
||||
print_separator("Nix");
|
||||
|
||||
let multi_user = fs::metadata(&nix)?.uid() == 0;
|
||||
debug!("Multi user nix: {}", multi_user);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
use super::linux::Distribution;
|
||||
|
||||
if let Ok(Distribution::NixOS) = Distribution::detect() {
|
||||
return Err(SkipStep(String::from("Nix on NixOS must be upgraded via nixos-rebuild switch")).into());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
if let Ok(..) = require("darwin-rebuild") {
|
||||
return Err(SkipStep(String::from(
|
||||
"Nix-darwin on macOS must be upgraded via darwin-rebuild switch",
|
||||
))
|
||||
.into());
|
||||
if require("darwin-rebuild").is_ok() {
|
||||
return Err(
|
||||
SkipStep(t!("Nix-darwin on macOS must be upgraded via darwin-rebuild switch").to_string()).into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let run_type = ctx.run_type();
|
||||
run_type.execute(nix_channel).arg("--update").status_checked()?;
|
||||
|
||||
if should_self_upgrade {
|
||||
if multi_user {
|
||||
ctx.execute_elevated(&nix, true)?.arg("upgrade-nix").check_run()?;
|
||||
} else {
|
||||
run_type.execute(&nix).arg("upgrade-nix").check_run()?;
|
||||
let mut get_version_cmd = ctx.run_type().execute(&nix);
|
||||
get_version_cmd.arg("--version");
|
||||
let get_version_cmd_output = get_version_cmd.output_checked_utf8()?;
|
||||
let get_version_cmd_first_line_stdout = get_version_cmd_output
|
||||
.stdout
|
||||
.lines()
|
||||
.next()
|
||||
.ok_or_else(|| eyre!("`nix --version` output is empty"))?;
|
||||
|
||||
let is_lix = get_version_cmd_first_line_stdout.contains("Lix");
|
||||
|
||||
debug!(
|
||||
output=%get_version_cmd_output,
|
||||
?is_lix,
|
||||
"`nix --version` output"
|
||||
);
|
||||
|
||||
lazy_static! {
|
||||
static ref NIX_VERSION_REGEX: Regex =
|
||||
Regex::new(r#"^nix \([^)]*\) ([0-9.]+)"#).expect("Nix version regex always compiles");
|
||||
}
|
||||
|
||||
if get_version_cmd_first_line_stdout.is_empty() {
|
||||
return Err(eyre!("`nix --version` output was empty"));
|
||||
}
|
||||
|
||||
let captures = NIX_VERSION_REGEX.captures(get_version_cmd_first_line_stdout);
|
||||
let raw_version = match &captures {
|
||||
None => {
|
||||
return Err(eyre!(
|
||||
"`nix --version` output was weird: {get_version_cmd_first_line_stdout:?}\n\
|
||||
If the `nix --version` output format changed, please file an issue to Topgrade"
|
||||
));
|
||||
}
|
||||
Some(captures) => &captures[1],
|
||||
};
|
||||
|
||||
let version =
|
||||
Version::parse(raw_version).wrap_err_with(|| format!("Unable to parse Nix version: {raw_version:?}"))?;
|
||||
|
||||
debug!("Nix version: {:?}", version);
|
||||
|
||||
// Nix since 2.21.0 uses `--all --impure` rather than `.*` to upgrade all packages.
|
||||
// Lix is based on Nix 2.18, so it doesn't!
|
||||
let packages = if version >= Version::new(2, 21, 0) && !is_lix {
|
||||
vec!["--all", "--impure"]
|
||||
} else {
|
||||
vec![".*"]
|
||||
};
|
||||
|
||||
if Path::new(&manifest_json_path).exists() {
|
||||
run_type
|
||||
.execute(nix)
|
||||
.args(nix_args())
|
||||
.arg("profile")
|
||||
.arg("upgrade")
|
||||
.args(&packages)
|
||||
.arg("--verbose")
|
||||
.status_checked()
|
||||
} else {
|
||||
let mut command = run_type.execute(nix_env);
|
||||
command.arg("--upgrade");
|
||||
if let Some(args) = ctx.config().nix_env_arguments() {
|
||||
command.args(args.split_whitespace());
|
||||
};
|
||||
command.status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_nix_self_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
let nix = require("nix")?;
|
||||
|
||||
// Should we attempt to upgrade Nix with `nix upgrade-nix`?
|
||||
#[allow(unused_mut)]
|
||||
let mut should_self_upgrade = cfg!(target_os = "macos");
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// We can't use `nix upgrade-nix` on NixOS.
|
||||
if let Ok(Distribution::NixOS) = Distribution::detect() {
|
||||
should_self_upgrade = false;
|
||||
}
|
||||
}
|
||||
|
||||
run_type.execute(&nix_channel).arg("--update").check_run()?;
|
||||
|
||||
if std::path::Path::new(&manifest_json_path).exists() {
|
||||
run_type
|
||||
.execute(&nix)
|
||||
.arg("profile")
|
||||
.arg("upgrade")
|
||||
.arg(".*")
|
||||
.check_run()
|
||||
} else {
|
||||
run_type.execute(&nix_env).arg("--upgrade").check_run()
|
||||
if !should_self_upgrade {
|
||||
return Err(SkipStep(t!("`nix upgrade-nix` can only be used on macOS or non-NixOS Linux").to_string()).into());
|
||||
}
|
||||
|
||||
if nix_profile_dir(&nix)?.is_none() {
|
||||
return Err(
|
||||
SkipStep(t!("`nix upgrade-nix` cannot be run when Nix is installed in a profile").to_string()).into(),
|
||||
);
|
||||
}
|
||||
|
||||
print_separator(t!("Nix (self-upgrade)"));
|
||||
|
||||
let multi_user = fs::metadata(&nix)?.uid() == 0;
|
||||
debug!("Multi user nix: {}", multi_user);
|
||||
|
||||
let nix_args = nix_args();
|
||||
if multi_user {
|
||||
ctx.execute_elevated(&nix, true)?
|
||||
.args(nix_args)
|
||||
.arg("upgrade-nix")
|
||||
.status_checked()
|
||||
} else {
|
||||
ctx.run_type()
|
||||
.execute(&nix)
|
||||
.args(nix_args)
|
||||
.arg("upgrade-nix")
|
||||
.status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
/// If we try to `nix upgrade-nix` but Nix is installed with `nix profile`, we'll get a `does not
|
||||
/// appear to be part of a Nix profile` error.
|
||||
///
|
||||
/// We duplicate some of the `nix` logic here to avoid this.
|
||||
/// See: <https://github.com/NixOS/nix/blob/f0180487a0e4c0091b46cb1469c44144f5400240/src/nix/upgrade-nix.cc#L102-L139>
|
||||
///
|
||||
/// See: <https://github.com/NixOS/nix/issues/5473>
|
||||
fn nix_profile_dir(nix: &Path) -> Result<Option<PathBuf>> {
|
||||
// NOTE: `nix` uses the location of the `nix-env` binary for this but we're using the `nix`
|
||||
// binary; should be the same.
|
||||
let nix_bin_dir = nix.parent();
|
||||
if nix_bin_dir.and_then(|p| p.file_name()) != Some(OsStr::new("bin")) {
|
||||
debug!("Nix is not installed in a `bin` directory: {nix_bin_dir:?}");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let nix_dir = nix_bin_dir
|
||||
.and_then(|bin_dir| bin_dir.parent())
|
||||
.ok_or_else(|| eyre!("Unable to find Nix install directory from Nix binary {nix:?}"))?;
|
||||
|
||||
debug!("Found Nix in {nix_dir:?}");
|
||||
|
||||
let mut profile_dir = nix_dir.to_path_buf();
|
||||
while profile_dir.is_symlink() {
|
||||
profile_dir = profile_dir
|
||||
.parent()
|
||||
.ok_or_else(|| eyre!("Path has no parent: {profile_dir:?}"))?
|
||||
.join(
|
||||
profile_dir
|
||||
.read_link()
|
||||
.wrap_err_with(|| format!("Failed to read symlink {profile_dir:?}"))?,
|
||||
);
|
||||
|
||||
// NOTE: `nix` uses a hand-rolled canonicalize function, Rust just uses `realpath`.
|
||||
if profile_dir
|
||||
.canonicalize()
|
||||
.wrap_err_with(|| format!("Failed to canonicalize {profile_dir:?}"))?
|
||||
.components()
|
||||
.any(|component| component == Component::Normal(OsStr::new("profiles")))
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Found Nix profile {profile_dir:?}");
|
||||
let user_env = profile_dir
|
||||
.canonicalize()
|
||||
.wrap_err_with(|| format!("Failed to canonicalize {profile_dir:?}"))?;
|
||||
|
||||
Ok(
|
||||
if user_env
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.map(|name| name.ends_with("user-environment"))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
Some(profile_dir)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn nix_args() -> [&'static str; 2] {
|
||||
["--extra-experimental-features", "nix-command"]
|
||||
}
|
||||
|
||||
pub fn run_yadm(ctx: &ExecutionContext) -> Result<()> {
|
||||
@@ -354,50 +630,95 @@ pub fn run_yadm(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("yadm");
|
||||
|
||||
ctx.run_type().execute(&yadm).arg("pull").check_run()
|
||||
ctx.run_type().execute(yadm).arg("pull").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_asdf(run_type: RunType) -> Result<()> {
|
||||
pub fn run_asdf(ctx: &ExecutionContext) -> Result<()> {
|
||||
let asdf = require("asdf")?;
|
||||
|
||||
print_separator("asdf");
|
||||
let exit_status = run_type.execute(&asdf).arg("update").spawn()?.wait()?;
|
||||
ctx.run_type()
|
||||
.execute(&asdf)
|
||||
.arg("update")
|
||||
.status_checked_with_codes(&[42])?;
|
||||
|
||||
if let ExecutorExitStatus::Wet(e) = exit_status {
|
||||
if !(e.success() || e.code().map(|c| c == 42).unwrap_or(false)) {
|
||||
return Err(TopgradeError::ProcessFailed(e).into());
|
||||
}
|
||||
}
|
||||
run_type.execute(&asdf).args(["plugin", "update", "--all"]).check_run()
|
||||
ctx.run_type()
|
||||
.execute(&asdf)
|
||||
.args(["plugin", "update", "--all"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_home_manager(run_type: RunType) -> Result<()> {
|
||||
pub fn run_mise(ctx: &ExecutionContext) -> Result<()> {
|
||||
let mise = require("mise")?;
|
||||
|
||||
print_separator("mise");
|
||||
|
||||
ctx.run_type()
|
||||
.execute(&mise)
|
||||
.args(["plugins", "update"])
|
||||
.status_checked()?;
|
||||
|
||||
ctx.run_type().execute(&mise).arg("upgrade").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_home_manager(ctx: &ExecutionContext) -> Result<()> {
|
||||
let home_manager = require("home-manager")?;
|
||||
|
||||
print_separator("home-manager");
|
||||
run_type.execute(&home_manager).arg("switch").check_run()
|
||||
|
||||
let mut cmd = ctx.run_type().execute(home_manager);
|
||||
cmd.arg("switch");
|
||||
|
||||
if let Some(extra_args) = ctx.config().home_manager() {
|
||||
cmd.args(extra_args);
|
||||
}
|
||||
|
||||
cmd.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_tldr(run_type: RunType) -> Result<()> {
|
||||
pub fn run_tldr(ctx: &ExecutionContext) -> Result<()> {
|
||||
let tldr = require("tldr")?;
|
||||
|
||||
print_separator("TLDR");
|
||||
run_type.execute(&tldr).arg("--update").check_run()
|
||||
ctx.run_type().execute(tldr).arg("--update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_pearl(run_type: RunType) -> Result<()> {
|
||||
pub fn run_pearl(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pearl = require("pearl")?;
|
||||
print_separator("pearl");
|
||||
|
||||
run_type.execute(&pearl).arg("update").check_run()
|
||||
ctx.run_type().execute(pearl).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_sdkman(base_dirs: &BaseDirs, cleanup: bool, run_type: RunType) -> Result<()> {
|
||||
pub fn run_pyenv(ctx: &ExecutionContext) -> Result<()> {
|
||||
let pyenv = require("pyenv")?;
|
||||
print_separator("pyenv");
|
||||
|
||||
let pyenv_dir = var("PYENV_ROOT")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".pyenv"));
|
||||
|
||||
if !pyenv_dir.exists() {
|
||||
return Err(SkipStep(t!("Pyenv is installed, but $PYENV_ROOT is not set correctly").to_string()).into());
|
||||
}
|
||||
|
||||
if !pyenv_dir.join(".git").exists() {
|
||||
return Err(SkipStep(t!("pyenv is not a git repository").to_string()).into());
|
||||
}
|
||||
|
||||
if !pyenv_dir.join("plugins").join("pyenv-update").exists() {
|
||||
return Err(SkipStep(t!("pyenv-update plugin is not installed").to_string()).into());
|
||||
}
|
||||
|
||||
ctx.run_type().execute(pyenv).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_sdkman(ctx: &ExecutionContext) -> Result<()> {
|
||||
let bash = require("bash")?;
|
||||
|
||||
let sdkman_init_path = env::var("SDKMAN_DIR")
|
||||
let sdkman_init_path = var("SDKMAN_DIR")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| base_dirs.home_dir().join(".sdkman"))
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".sdkman"))
|
||||
.join("bin")
|
||||
.join("sdkman-init.sh")
|
||||
.require()
|
||||
@@ -405,9 +726,9 @@ pub fn run_sdkman(base_dirs: &BaseDirs, cleanup: bool, run_type: RunType) -> Res
|
||||
|
||||
print_separator("SDKMAN!");
|
||||
|
||||
let sdkman_config_path = env::var("SDKMAN_DIR")
|
||||
let sdkman_config_path = var("SDKMAN_DIR")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| base_dirs.home_dir().join(".sdkman"))
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".sdkman"))
|
||||
.join("etc")
|
||||
.join("config")
|
||||
.require()?;
|
||||
@@ -420,44 +741,78 @@ pub fn run_sdkman(base_dirs: &BaseDirs, cleanup: bool, run_type: RunType) -> Res
|
||||
|
||||
if selfupdate_enabled == "true" {
|
||||
let cmd_selfupdate = format!("source {} && sdk selfupdate", &sdkman_init_path);
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
.args(["-c", cmd_selfupdate.as_str()])
|
||||
.check_run()?;
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
let cmd_update = format!("source {} && sdk update", &sdkman_init_path);
|
||||
run_type.execute(&bash).args(["-c", cmd_update.as_str()]).check_run()?;
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
.args(["-c", cmd_update.as_str()])
|
||||
.status_checked()?;
|
||||
|
||||
let cmd_upgrade = format!("source {} && sdk upgrade", &sdkman_init_path);
|
||||
run_type.execute(&bash).args(["-c", cmd_upgrade.as_str()]).check_run()?;
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
.args(["-c", cmd_upgrade.as_str()])
|
||||
.status_checked()?;
|
||||
|
||||
if cleanup {
|
||||
if ctx.config().cleanup() {
|
||||
let cmd_flush_archives = format!("source {} && sdk flush archives", &sdkman_init_path);
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
.args(["-c", cmd_flush_archives.as_str()])
|
||||
.check_run()?;
|
||||
.status_checked()?;
|
||||
|
||||
let cmd_flush_temp = format!("source {} && sdk flush temp", &sdkman_init_path);
|
||||
run_type
|
||||
ctx.run_type()
|
||||
.execute(&bash)
|
||||
.args(["-c", cmd_flush_temp.as_str()])
|
||||
.check_run()?;
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_bun(ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn run_bun_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||
let bun = require("bun")?;
|
||||
|
||||
print_separator("Bun");
|
||||
print_separator(t!("Bun Packages"));
|
||||
|
||||
ctx.run_type().execute(&bun).arg("upgrade").check_run()
|
||||
let mut package_json: PathBuf = var("BUN_INSTALL")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| HOME_DIR.join(".bun"));
|
||||
package_json.push("install/global/package.json");
|
||||
|
||||
if !package_json.exists() {
|
||||
println!("{}", t!("No global packages installed"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
ctx.run_type().execute(bun).args(["-g", "update"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn reboot() {
|
||||
print!("Rebooting...");
|
||||
Command::new("sudo").arg("reboot").spawn().unwrap().wait().unwrap();
|
||||
/// Update dotfiles with `rcm(7)`.
|
||||
///
|
||||
/// See: <https://github.com/thoughtbot/rcm>
|
||||
pub fn run_rcm(ctx: &ExecutionContext) -> Result<()> {
|
||||
let rcup = require("rcup")?;
|
||||
|
||||
print_separator("rcm");
|
||||
ctx.run_type().execute(rcup).arg("-v").status_checked()
|
||||
}
|
||||
|
||||
pub fn run_maza(ctx: &ExecutionContext) -> Result<()> {
|
||||
let maza = require("maza")?;
|
||||
|
||||
print_separator("maza");
|
||||
ctx.run_type().execute(maza).arg("update").status_checked()
|
||||
}
|
||||
|
||||
pub fn reboot() -> Result<()> {
|
||||
print!("{}", t!("Rebooting..."));
|
||||
|
||||
Command::new("sudo").arg("reboot").status_checked()
|
||||
}
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
use std::convert::TryFrom;
|
||||
use std::path::Path;
|
||||
use std::{ffi::OsStr, process::Command};
|
||||
|
||||
use anyhow::Result;
|
||||
use log::debug;
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::{CommandExt, RunType};
|
||||
use crate::terminal::{print_separator, print_warning};
|
||||
use crate::utils::require;
|
||||
use crate::{error::SkipStep, steps::git::Repositories};
|
||||
use crate::utils::{require, which};
|
||||
use crate::{error::SkipStep, steps::git::RepoStep};
|
||||
use crate::{powershell, Step};
|
||||
use rust_i18n::t;
|
||||
|
||||
pub fn run_chocolatey(ctx: &ExecutionContext) -> Result<()> {
|
||||
let choco = require("choco")?;
|
||||
@@ -18,23 +19,22 @@ pub fn run_chocolatey(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("Chocolatey");
|
||||
|
||||
let mut cmd = &choco;
|
||||
let mut args = vec!["upgrade", "all"];
|
||||
let mut command = match ctx.sudo() {
|
||||
Some(sudo) => {
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(choco);
|
||||
command
|
||||
}
|
||||
None => ctx.run_type().execute(choco),
|
||||
};
|
||||
|
||||
if let Some(sudo) = ctx.sudo() {
|
||||
cmd = sudo;
|
||||
args.insert(0, "choco");
|
||||
}
|
||||
|
||||
let mut command = ctx.run_type().execute(&cmd);
|
||||
|
||||
command.args(&args);
|
||||
command.args(["upgrade", "all"]);
|
||||
|
||||
if yes {
|
||||
command.arg("--yes");
|
||||
}
|
||||
|
||||
command.check_run()
|
||||
command.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_winget(ctx: &ExecutionContext) -> Result<()> {
|
||||
@@ -42,57 +42,143 @@ pub fn run_winget(ctx: &ExecutionContext) -> Result<()> {
|
||||
|
||||
print_separator("winget");
|
||||
|
||||
if !ctx.config().enable_winget() {
|
||||
print_warning("Winget is disabled by default. Enable it by setting enable_winget=true in the [windows] section in the configuration.");
|
||||
return Err(SkipStep(String::from("Winget is disabled by default")).into());
|
||||
}
|
||||
|
||||
ctx.run_type().execute(&winget).args(&["upgrade", "--all"]).check_run()
|
||||
ctx.run_type()
|
||||
.execute(winget)
|
||||
.args(["upgrade", "--all"])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_scoop(cleanup: bool, run_type: RunType) -> Result<()> {
|
||||
pub fn run_scoop(ctx: &ExecutionContext) -> Result<()> {
|
||||
let scoop = require("scoop")?;
|
||||
|
||||
print_separator("Scoop");
|
||||
|
||||
run_type.execute(&scoop).args(&["update"]).check_run()?;
|
||||
run_type.execute(&scoop).args(&["update", "*"]).check_run()?;
|
||||
ctx.run_type().execute(&scoop).args(["update"]).status_checked()?;
|
||||
ctx.run_type().execute(&scoop).args(["update", "*"]).status_checked()?;
|
||||
|
||||
if cleanup {
|
||||
run_type.execute(&scoop).args(&["cleanup", "*"]).check_run()?;
|
||||
if ctx.config().cleanup() {
|
||||
ctx.run_type().execute(&scoop).args(["cleanup", "*"]).status_checked()?;
|
||||
ctx.run_type()
|
||||
.execute(&scoop)
|
||||
.args(["cache", "rm", "-a"])
|
||||
.status_checked()?
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_wsl(ctx: &ExecutionContext) -> Result<()> {
|
||||
if !is_wsl_installed()? {
|
||||
return Err(SkipStep(t!("WSL not installed").to_string()).into());
|
||||
}
|
||||
|
||||
let wsl = require("wsl")?;
|
||||
|
||||
print_separator(t!("Update WSL"));
|
||||
|
||||
let mut wsl_command = ctx.run_type().execute(wsl);
|
||||
wsl_command.args(["--update"]);
|
||||
|
||||
if ctx.config().wsl_update_pre_release() {
|
||||
wsl_command.args(["--pre-release"]);
|
||||
}
|
||||
|
||||
if ctx.config().wsl_update_use_web_download() {
|
||||
wsl_command.args(["--web-download"]);
|
||||
}
|
||||
wsl_command.status_checked()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Detect if WSL is installed or not.
|
||||
///
|
||||
/// For WSL, we cannot simply check if command `wsl` is installed as on newer
|
||||
/// versions of Windows (since windows 10 version 2004), this commmand is
|
||||
/// installed by default.
|
||||
///
|
||||
/// If the command is installed and the user hasn't installed any Linux distros
|
||||
/// on it, command `wsl -l` would print a help message and exit with failure, we
|
||||
/// use this to check whether WSL is install or not.
|
||||
fn is_wsl_installed() -> Result<bool> {
|
||||
if let Some(wsl) = which("wsl") {
|
||||
// Don't use `output_checked` as an execution failure log is not wanted
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let output = Command::new(wsl).arg("-l").output()?;
|
||||
let status = output.status;
|
||||
|
||||
if status.success() {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn get_wsl_distributions(wsl: &Path) -> Result<Vec<String>> {
|
||||
let output = Command::new(wsl).args(&["--list", "-q"]).check_output()?;
|
||||
let output = Command::new(wsl).args(["--list", "-q"]).output_checked_utf8()?.stdout;
|
||||
Ok(output
|
||||
.lines()
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(|x| x.replace('\u{0}', "").replace('\r', ""))
|
||||
.map(|x| x.replace(['\u{0}', '\r'], ""))
|
||||
.collect())
|
||||
}
|
||||
|
||||
fn upgrade_wsl_distribution(wsl: &Path, dist: &str, ctx: &ExecutionContext) -> Result<()> {
|
||||
let topgrade = Command::new(&wsl)
|
||||
.args(&["-d", dist, "bash", "-lc", "which topgrade"])
|
||||
.check_output()
|
||||
.map_err(|_| SkipStep(String::from("Could not find Topgrade installed in WSL")))?;
|
||||
let topgrade = Command::new(wsl)
|
||||
.args(["-d", dist, "bash", "-lc", "which topgrade"])
|
||||
.output_checked_utf8()
|
||||
.map_err(|_| SkipStep(t!("Could not find Topgrade installed in WSL").to_string()))?
|
||||
.stdout // The normal output from `which topgrade` appends a newline, so we trim it here.
|
||||
.trim_end()
|
||||
.to_owned();
|
||||
|
||||
let mut command = ctx.run_type().execute(wsl);
|
||||
|
||||
// The `arg` method automatically quotes its arguments.
|
||||
// This means we can't append additional arguments to `topgrade` in WSL
|
||||
// by calling `arg` successively.
|
||||
//
|
||||
// For example:
|
||||
//
|
||||
// ```rust
|
||||
// command
|
||||
// .args(["-d", dist, "bash", "-c"])
|
||||
// .arg(format!("TOPGRADE_PREFIX={dist} exec {topgrade}"));
|
||||
// ```
|
||||
//
|
||||
// creates a command string like:
|
||||
// > `C:\WINDOWS\system32\wsl.EXE -d Ubuntu bash -c 'TOPGRADE_PREFIX=Ubuntu exec /bin/topgrade'`
|
||||
//
|
||||
// Adding the following:
|
||||
//
|
||||
// ```rust
|
||||
// command.arg("-v");
|
||||
// ```
|
||||
//
|
||||
// appends the next argument like so:
|
||||
// > `C:\WINDOWS\system32\wsl.EXE -d Ubuntu bash -c 'TOPGRADE_PREFIX=Ubuntu exec /bin/topgrade' -v`
|
||||
// which means `-v` isn't passed to `topgrade`.
|
||||
let mut args = String::new();
|
||||
if ctx.config().verbose() {
|
||||
args.push_str("-v");
|
||||
}
|
||||
|
||||
let mut command = ctx.run_type().execute(&wsl);
|
||||
command
|
||||
.args(&["-d", dist, "bash", "-c"])
|
||||
.arg(format!("TOPGRADE_PREFIX={} exec {}", dist, topgrade));
|
||||
.args(["-d", dist, "bash", "-c"])
|
||||
.arg(format!("TOPGRADE_PREFIX={dist} exec {topgrade} {args}"));
|
||||
|
||||
if ctx.config().yes(Step::Wsl) {
|
||||
command.arg("-y");
|
||||
}
|
||||
|
||||
command.check_run()
|
||||
command.status_checked()
|
||||
}
|
||||
|
||||
pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
if !is_wsl_installed()? {
|
||||
return Err(SkipStep(t!("WSL not installed").to_string()).into());
|
||||
}
|
||||
|
||||
let wsl = require("wsl")?;
|
||||
let wsl_distributions = get_wsl_distributions(&wsl)?;
|
||||
let mut ran = false;
|
||||
@@ -113,33 +199,44 @@ pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||
if ran {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(SkipStep(String::from("Could not find Topgrade in any WSL disribution")).into())
|
||||
Err(SkipStep(t!("Could not find Topgrade in any WSL disribution").to_string()).into())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn windows_update(ctx: &ExecutionContext) -> Result<()> {
|
||||
let powershell = powershell::Powershell::windows_powershell();
|
||||
|
||||
print_separator(t!("Windows Update"));
|
||||
|
||||
if powershell.supports_windows_update() {
|
||||
print_separator("Windows Update");
|
||||
return powershell.windows_update(ctx);
|
||||
println!("The installer will request to run as administrator, expect a prompt.");
|
||||
|
||||
powershell.windows_update(ctx)
|
||||
} else {
|
||||
print_warning(t!(
|
||||
"Consider installing PSWindowsUpdate as the use of Windows Update via USOClient is not supported."
|
||||
));
|
||||
|
||||
Err(SkipStep(t!("USOClient not supported.").to_string()).into())
|
||||
}
|
||||
|
||||
let usoclient = require("UsoClient")?;
|
||||
|
||||
print_separator("Windows Update");
|
||||
println!("Running Windows Update. Check the control panel for progress.");
|
||||
ctx.run_type().execute(&usoclient).arg("ScanInstallWait").check_run()?;
|
||||
ctx.run_type().execute(&usoclient).arg("StartInstall").check_run()
|
||||
}
|
||||
|
||||
pub fn reboot() {
|
||||
Command::new("shutdown").args(&["/R", "/T", "0"]).spawn().ok();
|
||||
pub fn microsoft_store(ctx: &ExecutionContext) -> Result<()> {
|
||||
let powershell = powershell::Powershell::windows_powershell();
|
||||
|
||||
print_separator(t!("Microsoft Store"));
|
||||
|
||||
powershell.microsoft_store(ctx)
|
||||
}
|
||||
|
||||
pub fn insert_startup_scripts(ctx: &ExecutionContext, git_repos: &mut Repositories) -> Result<()> {
|
||||
let startup_dir = ctx
|
||||
.base_dirs()
|
||||
pub fn reboot() -> Result<()> {
|
||||
// If this works, it won't return, but if it doesn't work, it may return a useful error
|
||||
// message.
|
||||
Command::new("shutdown").args(["/R", "/T", "0"]).status_checked()
|
||||
}
|
||||
|
||||
pub fn insert_startup_scripts(git_repos: &mut RepoStep) -> Result<()> {
|
||||
let startup_dir = crate::WINDOWS_DIRS
|
||||
.data_dir()
|
||||
.join("Microsoft\\Windows\\Start Menu\\Programs\\Startup");
|
||||
for entry in std::fs::read_dir(&startup_dir)?.flatten() {
|
||||
@@ -148,7 +245,7 @@ pub fn insert_startup_scripts(ctx: &ExecutionContext, git_repos: &mut Repositori
|
||||
if let Ok(lnk) = parselnk::Lnk::try_from(Path::new(&path)) {
|
||||
debug!("Startup link: {:?}", lnk);
|
||||
if let Some(path) = lnk.relative_path() {
|
||||
git_repos.insert_if_repo(&startup_dir.join(path));
|
||||
git_repos.insert_if_repo(startup_dir.join(path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,10 +3,11 @@ use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use anyhow::Result;
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::CommandExt;
|
||||
use crate::terminal::{is_dumb, print_separator};
|
||||
use crate::utils::{require_option, which, PathExt};
|
||||
use crate::Step;
|
||||
@@ -27,8 +28,8 @@ impl Powershell {
|
||||
let profile = path.as_ref().and_then(|path| {
|
||||
Command::new(path)
|
||||
.args(["-NoProfile", "-Command", "Split-Path $profile"])
|
||||
.check_output()
|
||||
.map(|output| PathBuf::from(output.trim()))
|
||||
.output_checked_utf8()
|
||||
.map(|output| PathBuf::from(output.stdout.trim()))
|
||||
.and_then(|p| p.require())
|
||||
.ok()
|
||||
});
|
||||
@@ -46,14 +47,14 @@ impl Powershell {
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn has_module(powershell: &Path, command: &str) -> bool {
|
||||
Command::new(&powershell)
|
||||
.args(&[
|
||||
Command::new(powershell)
|
||||
.args([
|
||||
"-NoProfile",
|
||||
"-Command",
|
||||
&format!("Get-Module -ListAvailable {}", command),
|
||||
&format!("Get-Module -ListAvailable {command}"),
|
||||
])
|
||||
.check_output()
|
||||
.map(|result| !result.is_empty())
|
||||
.output_checked_utf8()
|
||||
.map(|result| !result.stdout.is_empty())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
@@ -62,9 +63,9 @@ impl Powershell {
|
||||
}
|
||||
|
||||
pub fn update_modules(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let powershell = require_option(self.path.as_ref(), String::from("Powershell is not installed"))?;
|
||||
let powershell = require_option(self.path.as_ref(), t!("Powershell is not installed").to_string())?;
|
||||
|
||||
print_separator("Powershell Modules Update");
|
||||
print_separator(t!("Powershell Modules Update"));
|
||||
|
||||
let mut cmd = vec!["Update-Module"];
|
||||
|
||||
@@ -76,11 +77,12 @@ impl Powershell {
|
||||
cmd.push("-Force")
|
||||
}
|
||||
|
||||
println!("Updating modules...");
|
||||
println!("{}", t!("Updating modules..."));
|
||||
ctx.run_type()
|
||||
.execute(powershell)
|
||||
// This probably doesn't need `shell_words::join`.
|
||||
.args(["-NoProfile", "-Command", &cmd.join(" ")])
|
||||
.check_run()
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
@@ -93,31 +95,67 @@ impl Powershell {
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn windows_update(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let powershell = require_option(self.path.as_ref(), String::from("Powershell is not installed"))?;
|
||||
let powershell = require_option(self.path.as_ref(), t!("Powershell is not installed").to_string())?;
|
||||
|
||||
debug_assert!(self.supports_windows_update());
|
||||
|
||||
let accept_all = if ctx.config().accept_all_windows_updates() {
|
||||
"-AcceptAll"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
|
||||
let install_windowsupdate_verbose = "Install-WindowsUpdate -Verbose".to_string();
|
||||
|
||||
let mut command = if let Some(sudo) = ctx.sudo() {
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(&powershell);
|
||||
command.arg(powershell);
|
||||
command
|
||||
} else {
|
||||
ctx.run_type().execute(&powershell)
|
||||
ctx.run_type().execute(powershell)
|
||||
};
|
||||
|
||||
command
|
||||
.args(&[
|
||||
"-NoProfile",
|
||||
"-Command",
|
||||
&format!(
|
||||
"Import-Module PSWindowsUpdate; Install-WindowsUpdate -MicrosoftUpdate {} -Verbose",
|
||||
if ctx.config().accept_all_windows_updates() {
|
||||
"-AcceptAll"
|
||||
} else {
|
||||
""
|
||||
}
|
||||
),
|
||||
])
|
||||
.check_run()
|
||||
.args(["-NoProfile", &install_windowsupdate_verbose, accept_all])
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn microsoft_store(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||
let powershell = require_option(self.path.as_ref(), t!("Powershell is not installed").to_string())?;
|
||||
|
||||
let mut command = if let Some(sudo) = ctx.sudo() {
|
||||
let mut command = ctx.run_type().execute(sudo);
|
||||
command.arg(powershell);
|
||||
command
|
||||
} else {
|
||||
ctx.run_type().execute(powershell)
|
||||
};
|
||||
|
||||
println!("{}", t!("Scanning for updates..."));
|
||||
|
||||
// Scan for updates using the MDM UpdateScanMethod
|
||||
// This method is also available for non-MDM devices
|
||||
let update_command = "(Get-CimInstance -Namespace \"Root\\cimv2\\mdm\\dmmap\" -ClassName \"MDM_EnterpriseModernAppManagement_AppManagement01\" | Invoke-CimMethod -MethodName UpdateScanMethod).ReturnValue";
|
||||
|
||||
command.args(["-NoProfile", update_command]);
|
||||
|
||||
command
|
||||
.output_checked_with_utf8(|output| {
|
||||
if output.stdout.trim() == "0" {
|
||||
println!(
|
||||
"{}",
|
||||
t!("Success, Microsoft Store apps are being updated in the background")
|
||||
);
|
||||
Ok(())
|
||||
} else {
|
||||
println!(
|
||||
"{}",
|
||||
t!("Unable to update Microsoft Store apps, manual intervention is required")
|
||||
);
|
||||
Err(())
|
||||
}
|
||||
})
|
||||
.map(|_| ())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,52 +1,55 @@
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::{error::SkipStep, execution_context::ExecutionContext, terminal::print_separator, utils};
|
||||
|
||||
fn prepare_async_ssh_command(args: &mut Vec<&str>) {
|
||||
args.insert(0, "ssh");
|
||||
args.push("--keep");
|
||||
}
|
||||
|
||||
pub fn ssh_step(ctx: &ExecutionContext, hostname: &str) -> Result<()> {
|
||||
let ssh = utils::require("ssh")?;
|
||||
|
||||
let topgrade = ctx.config().remote_topgrade_path();
|
||||
let mut args = vec!["-t", hostname];
|
||||
|
||||
if let Some(ssh_arguments) = ctx.config().ssh_arguments() {
|
||||
args.extend(ssh_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let env = format!("TOPGRADE_PREFIX={}", hostname);
|
||||
args.extend(["env", &env, "$SHELL", "-lc", topgrade]);
|
||||
|
||||
if ctx.config().run_in_tmux() && !ctx.run_type().dry() {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
prepare_async_ssh_command(&mut args);
|
||||
crate::tmux::run_command(ctx, &args.join(" "))?;
|
||||
Err(SkipStep(String::from("Remote Topgrade launched in Tmux")).into())
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
unreachable!("Tmux execution is only implemented in Unix");
|
||||
} else if ctx.config().open_remotes_in_new_terminal() && !ctx.run_type().dry() && cfg!(windows) {
|
||||
prepare_async_ssh_command(&mut args);
|
||||
ctx.run_type().execute("wt").args(&args).spawn()?;
|
||||
Err(SkipStep(String::from("Remote Topgrade launched in an external terminal")).into())
|
||||
} else {
|
||||
let mut args = vec!["-t", hostname];
|
||||
|
||||
if let Some(ssh_arguments) = ctx.config().ssh_arguments() {
|
||||
args.extend(ssh_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let env = format!("TOPGRADE_PREFIX={}", hostname);
|
||||
args.extend(["env", &env, "$SHELL", "-lc", topgrade]);
|
||||
|
||||
print_separator(format!("Remote ({})", hostname));
|
||||
println!("Connecting to {}...", hostname);
|
||||
|
||||
ctx.run_type().execute(&ssh).args(&args).check_run()
|
||||
}
|
||||
}
|
||||
use color_eyre::eyre::Result;
|
||||
use rust_i18n::t;
|
||||
|
||||
use crate::{
|
||||
command::CommandExt, error::SkipStep, execution_context::ExecutionContext, terminal::print_separator, utils,
|
||||
};
|
||||
|
||||
fn prepare_async_ssh_command(args: &mut Vec<&str>) {
|
||||
args.insert(0, "ssh");
|
||||
args.push("--keep");
|
||||
}
|
||||
|
||||
pub fn ssh_step(ctx: &ExecutionContext, hostname: &str) -> Result<()> {
|
||||
let ssh = utils::require("ssh")?;
|
||||
|
||||
let topgrade = ctx.config().remote_topgrade_path();
|
||||
let mut args = vec!["-t", hostname];
|
||||
|
||||
if let Some(ssh_arguments) = ctx.config().ssh_arguments() {
|
||||
args.extend(ssh_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let env = format!("TOPGRADE_PREFIX={hostname}");
|
||||
args.extend(["env", &env, "$SHELL", "-lc", topgrade]);
|
||||
|
||||
if ctx.config().run_in_tmux() && !ctx.run_type().dry() {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
prepare_async_ssh_command(&mut args);
|
||||
crate::tmux::run_command(ctx, hostname, &shell_words::join(args))?;
|
||||
Err(SkipStep(String::from(t!("Remote Topgrade launched in Tmux"))).into())
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
unreachable!("Tmux execution is only implemented in Unix");
|
||||
} else if ctx.config().open_remotes_in_new_terminal() && !ctx.run_type().dry() && cfg!(windows) {
|
||||
prepare_async_ssh_command(&mut args);
|
||||
ctx.run_type().execute("wt").args(&args).spawn()?;
|
||||
Err(SkipStep(String::from(t!("Remote Topgrade launched in an external terminal"))).into())
|
||||
} else {
|
||||
let mut args = vec!["-t", hostname];
|
||||
|
||||
if let Some(ssh_arguments) = ctx.config().ssh_arguments() {
|
||||
args.extend(ssh_arguments.split_whitespace());
|
||||
}
|
||||
|
||||
let env = format!("TOPGRADE_PREFIX={hostname}");
|
||||
args.extend(["env", &env, "$SHELL", "-lc", topgrade]);
|
||||
|
||||
print_separator(format!("Remote ({hostname})"));
|
||||
println!("{}", t!("Connecting to {hostname}...", hostname = hostname));
|
||||
|
||||
ctx.run_type().execute(ssh).args(&args).status_checked()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,13 +2,14 @@ use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::{fmt::Display, rc::Rc, str::FromStr};
|
||||
|
||||
use anyhow::Result;
|
||||
use log::{debug, error};
|
||||
use color_eyre::eyre::Result;
|
||||
use regex::Regex;
|
||||
use rust_i18n::t;
|
||||
use strum::EnumString;
|
||||
use tracing::{debug, error};
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::execution_context::ExecutionContext;
|
||||
use crate::executor::CommandExt;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::{error::SkipStep, utils, Step};
|
||||
|
||||
@@ -61,10 +62,11 @@ impl Vagrant {
|
||||
let output = Command::new(&self.path)
|
||||
.arg("status")
|
||||
.current_dir(directory)
|
||||
.check_output()?;
|
||||
.output_checked_utf8()?;
|
||||
debug!("Vagrant output in {}: {}", directory, output);
|
||||
|
||||
let boxes = output
|
||||
.stdout
|
||||
.split('\n')
|
||||
.skip(2)
|
||||
.take_while(|line| !(line.is_empty() || line.starts_with('\r')))
|
||||
@@ -115,7 +117,7 @@ impl<'a> TemporaryPowerOn<'a> {
|
||||
.execute(vagrant)
|
||||
.args([subcommand, &vagrant_box.name])
|
||||
.current_dir(vagrant_box.path.clone())
|
||||
.check_run()?;
|
||||
.status_checked()?;
|
||||
Ok(TemporaryPowerOn {
|
||||
vagrant,
|
||||
vagrant_box,
|
||||
@@ -124,7 +126,7 @@ impl<'a> TemporaryPowerOn<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Drop for TemporaryPowerOn<'a> {
|
||||
impl Drop for TemporaryPowerOn<'_> {
|
||||
fn drop(&mut self) {
|
||||
let subcommand = if self.ctx.config().vagrant_always_suspend().unwrap_or(false) {
|
||||
"suspend"
|
||||
@@ -142,7 +144,7 @@ impl<'a> Drop for TemporaryPowerOn<'a> {
|
||||
.execute(self.vagrant)
|
||||
.args([subcommand, &self.vagrant_box.name])
|
||||
.current_dir(self.vagrant_box.path.clone())
|
||||
.check_run()
|
||||
.status_checked()
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
@@ -150,14 +152,14 @@ impl<'a> Drop for TemporaryPowerOn<'a> {
|
||||
pub fn collect_boxes(ctx: &ExecutionContext) -> Result<Vec<VagrantBox>> {
|
||||
let directories = utils::require_option(
|
||||
ctx.config().vagrant_directories(),
|
||||
String::from("No Vagrant directories were specified in the configuration file"),
|
||||
String::from(t!("No Vagrant directories were specified in the configuration file")),
|
||||
)?;
|
||||
let vagrant = Vagrant {
|
||||
path: utils::require("vagrant")?,
|
||||
};
|
||||
|
||||
print_separator("Vagrant");
|
||||
println!("Collecting Vagrant boxes");
|
||||
println!("{}", t!("Collecting Vagrant boxes"));
|
||||
|
||||
let mut result = Vec::new();
|
||||
|
||||
@@ -182,7 +184,11 @@ pub fn topgrade_vagrant_box(ctx: &ExecutionContext, vagrant_box: &VagrantBox) ->
|
||||
let mut _poweron = None;
|
||||
if !vagrant_box.initial_status.powered_on() {
|
||||
if !(ctx.config().vagrant_power_on().unwrap_or(true)) {
|
||||
return Err(SkipStep(format!("Skipping powered off box {}", vagrant_box)).into());
|
||||
return Err(SkipStep(format!(
|
||||
"{}",
|
||||
t!("Skipping powered off box {vagrant_box}", vagrant_box = vagrant_box)
|
||||
))
|
||||
.into());
|
||||
} else {
|
||||
print_separator(seperator);
|
||||
_poweron = Some(vagrant.temporary_power_on(vagrant_box, ctx)?);
|
||||
@@ -199,21 +205,21 @@ pub fn topgrade_vagrant_box(ctx: &ExecutionContext, vagrant_box: &VagrantBox) ->
|
||||
.execute(&vagrant.path)
|
||||
.current_dir(&vagrant_box.path)
|
||||
.args(["ssh", "-c", &command])
|
||||
.check_run()
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
pub fn upgrade_vagrant_boxes(ctx: &ExecutionContext) -> Result<()> {
|
||||
let vagrant = utils::require("vagrant")?;
|
||||
print_separator("Vagrant boxes");
|
||||
print_separator(t!("Vagrant boxes"));
|
||||
|
||||
let outdated = Command::new(&vagrant)
|
||||
.args(["box", "outdated", "--global"])
|
||||
.check_output()?;
|
||||
.output_checked_utf8()?;
|
||||
|
||||
let re = Regex::new(r"\* '(.*?)' for '(.*?)' is outdated").unwrap();
|
||||
|
||||
let mut found = false;
|
||||
for ele in re.captures_iter(&outdated) {
|
||||
for ele in re.captures_iter(&outdated.stdout) {
|
||||
found = true;
|
||||
let _ = ctx
|
||||
.run_type()
|
||||
@@ -222,13 +228,16 @@ pub fn upgrade_vagrant_boxes(ctx: &ExecutionContext) -> Result<()> {
|
||||
.arg(ele.get(1).unwrap().as_str())
|
||||
.arg("--provider")
|
||||
.arg(ele.get(2).unwrap().as_str())
|
||||
.check_run();
|
||||
.status_checked();
|
||||
}
|
||||
|
||||
if !found {
|
||||
println!("No outdated boxes")
|
||||
println!("{}", t!("No outdated boxes"))
|
||||
} else {
|
||||
ctx.run_type().execute(&vagrant).args(["box", "prune"]).check_run()?;
|
||||
ctx.run_type()
|
||||
.execute(&vagrant)
|
||||
.args(["box", "prune"])
|
||||
.status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,26 +1,38 @@
|
||||
use crate::executor::RunType;
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use color_eyre::eyre::eyre;
|
||||
use color_eyre::eyre::Context;
|
||||
use color_eyre::eyre::Result;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::config::TmuxConfig;
|
||||
use crate::config::TmuxSessionMode;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::HOME_DIR;
|
||||
use crate::{
|
||||
execution_context::ExecutionContext,
|
||||
utils::{which, Check, PathExt},
|
||||
utils::{which, PathExt},
|
||||
};
|
||||
use anyhow::Result;
|
||||
use directories::BaseDirs;
|
||||
use std::env;
|
||||
use std::io;
|
||||
use std::os::unix::process::CommandExt;
|
||||
use std::path::PathBuf;
|
||||
use std::process::{exit, Command};
|
||||
|
||||
pub fn run_tpm(base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
let tpm = base_dirs
|
||||
.home_dir()
|
||||
.join(".tmux/plugins/tpm/bin/update_plugins")
|
||||
.require()?;
|
||||
use rust_i18n::t;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::process::CommandExt as _;
|
||||
|
||||
pub fn run_tpm(ctx: &ExecutionContext) -> Result<()> {
|
||||
let tpm = match env::var("TMUX_PLUGIN_MANAGER_PATH") {
|
||||
// If `TMUX_PLUGIN_MANAGER_PATH` is set, search for
|
||||
// `$TMUX_PLUGIN_MANAGER_PATH/bin/install_plugins/tpm/bin/update_plugins`
|
||||
Ok(var) => PathBuf::from(var).join("bin/install_plugins/tpm/bin/update_plugins"),
|
||||
// Otherwise, use the default location `~/.tmux/plugins/tpm/bin/update_plugins`
|
||||
Err(_) => HOME_DIR.join(".tmux/plugins/tpm/bin/update_plugins"),
|
||||
}
|
||||
.require()?;
|
||||
|
||||
print_separator("tmux plugins");
|
||||
|
||||
run_type.execute(&tpm).arg("all").check_run()
|
||||
ctx.run_type().execute(tpm).arg("all").status_checked()
|
||||
}
|
||||
|
||||
struct Tmux {
|
||||
@@ -29,12 +41,10 @@ struct Tmux {
|
||||
}
|
||||
|
||||
impl Tmux {
|
||||
fn new(args: &Option<String>) -> Self {
|
||||
fn new(args: Vec<String>) -> Self {
|
||||
Self {
|
||||
tmux: which("tmux").expect("Could not find tmux"),
|
||||
args: args
|
||||
.as_ref()
|
||||
.map(|args| args.split_whitespace().map(String::from).collect()),
|
||||
args: if args.is_empty() { None } else { Some(args) },
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,73 +56,144 @@ impl Tmux {
|
||||
command
|
||||
}
|
||||
|
||||
fn has_session(&self, session_name: &str) -> Result<bool, io::Error> {
|
||||
fn has_session(&self, session_name: &str) -> Result<bool> {
|
||||
Ok(self
|
||||
.build()
|
||||
.args(["has-session", "-t", session_name])
|
||||
.output()?
|
||||
.output_checked_with(|_| Ok(()))?
|
||||
.status
|
||||
.success())
|
||||
}
|
||||
|
||||
fn new_session(&self, session_name: &str) -> Result<bool, io::Error> {
|
||||
Ok(self
|
||||
/// Create a new tmux session with the given name, running the given command.
|
||||
/// The command is passed to `sh` (see "shell-command arguments are sh(1) commands" in the
|
||||
/// `tmux(1)` man page).
|
||||
fn new_session(&self, session_name: &str, window_name: &str, command: &str) -> Result<()> {
|
||||
let _ = self
|
||||
.build()
|
||||
.args(["new-session", "-d", "-s", session_name, "-n", "dummy"])
|
||||
.spawn()?
|
||||
.wait()?
|
||||
.success())
|
||||
// `-d`: initial size comes from the global `default-size` option (instead
|
||||
// of passing `-x` and `-y` arguments.
|
||||
// (What do those even do?)
|
||||
// `-s`: session name
|
||||
// `-n`: window name (always `topgrade`)
|
||||
.args(["new-session", "-d", "-s", session_name, "-n", window_name, command])
|
||||
.output_checked()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_in_session(&self, command: &str) -> Result<()> {
|
||||
self.build()
|
||||
.args(["new-window", "-t", "topgrade", command])
|
||||
.spawn()?
|
||||
.wait()?
|
||||
.check()?;
|
||||
/// Like [`new_session`] but it appends a digit to the session name (if necessary) to
|
||||
/// avoid duplicate session names.
|
||||
///
|
||||
/// The session name is returned.
|
||||
fn new_unique_session(&self, session_name: &str, window_name: &str, command: &str) -> Result<String> {
|
||||
let mut session = session_name.to_owned();
|
||||
for i in 1.. {
|
||||
if !self
|
||||
.has_session(&session)
|
||||
.context("Error determining if a tmux session exists")?
|
||||
{
|
||||
self.new_session(&session, window_name, command)
|
||||
.context("Error running Topgrade in tmux")?;
|
||||
return Ok(session);
|
||||
}
|
||||
session = format!("{session_name}-{i}");
|
||||
}
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
Ok(())
|
||||
/// Create a new window in the given tmux session, running the given command.
|
||||
fn new_window(&self, session_name: &str, window_name: &str, command: &str) -> Result<()> {
|
||||
self.build()
|
||||
// `-d`: initial size comes from the global `default-size` option (instead
|
||||
// of passing `-x` and `-y` arguments.
|
||||
// (What do those even do?)
|
||||
// `-s`: session name
|
||||
// `-n`: window name
|
||||
.args([
|
||||
"new-window",
|
||||
"-a",
|
||||
"-t",
|
||||
&format!("{session_name}:{window_name}"),
|
||||
"-n",
|
||||
window_name,
|
||||
command,
|
||||
])
|
||||
.env_remove("TMUX")
|
||||
.status_checked()
|
||||
}
|
||||
|
||||
fn window_indices(&self, session_name: &str) -> Result<Vec<usize>> {
|
||||
self.build()
|
||||
.args(["list-windows", "-F", "#{window_index}", "-t", session_name])
|
||||
.output_checked_utf8()?
|
||||
.stdout
|
||||
.lines()
|
||||
.map(|l| l.parse())
|
||||
.collect::<Result<Vec<usize>, _>>()
|
||||
.context("Failed to compute tmux windows")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_in_tmux(args: &Option<String>) -> ! {
|
||||
pub fn run_in_tmux(config: TmuxConfig) -> Result<()> {
|
||||
let command = {
|
||||
let mut command = vec![
|
||||
String::from("env"),
|
||||
String::from("TOPGRADE_KEEP_END=1"),
|
||||
String::from("TOPGRADE_INSIDE_TMUX=1"),
|
||||
];
|
||||
// TODO: Should we use `topgrade` instead of the first argument here, which may be
|
||||
// a local path?
|
||||
command.extend(env::args());
|
||||
command.join(" ")
|
||||
shell_words::join(command)
|
||||
};
|
||||
|
||||
let tmux = Tmux::new(args);
|
||||
let tmux = Tmux::new(config.args);
|
||||
|
||||
if !tmux.has_session("topgrade").expect("Error detecting a tmux session") {
|
||||
tmux.new_session("topgrade").expect("Error creating a tmux session");
|
||||
}
|
||||
// Find an unused session and run `topgrade` in it with the current command's arguments.
|
||||
let session_name = "topgrade";
|
||||
let window_name = "topgrade";
|
||||
let session = tmux.new_unique_session(session_name, window_name, &command)?;
|
||||
|
||||
tmux.run_in_session(&command).expect("Error running topgrade in tmux");
|
||||
tmux.build()
|
||||
.args(["kill-window", "-t", "topgrade:dummy"])
|
||||
.output()
|
||||
.expect("Error killing the dummy tmux window");
|
||||
let is_inside_tmux = env::var("TMUX").is_ok();
|
||||
let err = match config.session_mode {
|
||||
TmuxSessionMode::AttachIfNotInSession => {
|
||||
if is_inside_tmux {
|
||||
// Only attach to the newly-created session if we're not currently in a tmux session.
|
||||
println!("{}", t!("Topgrade launched in a new tmux session"));
|
||||
return Ok(());
|
||||
} else {
|
||||
tmux.build().args(["attach-session", "-t", &session]).exec()
|
||||
}
|
||||
}
|
||||
|
||||
if env::var("TMUX").is_err() {
|
||||
let err = tmux.build().args(["attach", "-t", "topgrade"]).exec();
|
||||
panic!("{:?}", err);
|
||||
} else {
|
||||
println!("Topgrade launched in a new tmux session");
|
||||
exit(0);
|
||||
}
|
||||
TmuxSessionMode::AttachAlways => {
|
||||
if is_inside_tmux {
|
||||
tmux.build().args(["switch-client", "-t", &session]).exec()
|
||||
} else {
|
||||
tmux.build().args(["attach-session", "-t", &session]).exec()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Err(eyre!("{err}")).context("Failed to `execvp(3)` tmux")
|
||||
}
|
||||
|
||||
pub fn run_command(ctx: &ExecutionContext, command: &str) -> Result<()> {
|
||||
Tmux::new(ctx.config().tmux_arguments())
|
||||
.build()
|
||||
.args(["new-window", "-a", "-t", "topgrade:1", command])
|
||||
.env_remove("TMUX")
|
||||
.spawn()?
|
||||
.wait()?
|
||||
.check()
|
||||
pub fn run_command(ctx: &ExecutionContext, window_name: &str, command: &str) -> Result<()> {
|
||||
let tmux = Tmux::new(ctx.config().tmux_config()?.args);
|
||||
|
||||
match ctx.get_tmux_session() {
|
||||
Some(session_name) => {
|
||||
let indices = tmux.window_indices(&session_name)?;
|
||||
let last_window = indices
|
||||
.iter()
|
||||
.last()
|
||||
.ok_or_else(|| eyre!("tmux session {session_name} has no windows"))?;
|
||||
tmux.new_window(&session_name, &format!("{last_window}"), command)?;
|
||||
}
|
||||
None => {
|
||||
let name = tmux.new_unique_session("topgrade", window_name, command)?;
|
||||
ctx.set_tmux_session(name);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
use anyhow::Result;
|
||||
use color_eyre::eyre::Result;
|
||||
|
||||
use crate::command::CommandExt;
|
||||
use crate::config::Step;
|
||||
use crate::terminal::print_separator;
|
||||
use crate::{execution_context::ExecutionContext, utils::require};
|
||||
use log::debug;
|
||||
use std::path::Path;
|
||||
use std::{path::PathBuf, process::Command};
|
||||
use tracing::debug;
|
||||
|
||||
fn list_toolboxes(toolbx: &Path) -> Result<Vec<String>> {
|
||||
let output = Command::new(toolbx).args(["list", "--containers"]).output()?;
|
||||
let output_str = String::from_utf8(output.stdout)?;
|
||||
let output = Command::new(toolbx)
|
||||
.args(["list", "--containers"])
|
||||
.output_checked_utf8()?;
|
||||
|
||||
let proc: Vec<String> = output_str
|
||||
let proc: Vec<String> = output
|
||||
.stdout
|
||||
.lines()
|
||||
// Skip the first line since that contains only status information
|
||||
.skip(1)
|
||||
@@ -33,13 +36,13 @@ pub fn run_toolbx(ctx: &ExecutionContext) -> Result<()> {
|
||||
debug!("Toolboxes to inspect: {:?}", toolboxes);
|
||||
|
||||
let mut topgrade_path = PathBuf::from("/run/host");
|
||||
// Path of the running topgrade executable
|
||||
// Path of the running Topgrade executable
|
||||
// Skip 1 to eliminate the path root, otherwise push overwrites the path
|
||||
topgrade_path.push(std::env::current_exe()?.components().skip(1).collect::<PathBuf>());
|
||||
let topgrade_path = topgrade_path.to_str().unwrap();
|
||||
|
||||
for tb in toolboxes.iter() {
|
||||
let topgrade_prefix = format!("TOPGRADE_PREFIX='Toolbx {}'", tb);
|
||||
let topgrade_prefix = format!("TOPGRADE_PREFIX='Toolbx {tb}'");
|
||||
let mut args = vec![
|
||||
"run",
|
||||
"-c",
|
||||
@@ -49,12 +52,14 @@ pub fn run_toolbx(ctx: &ExecutionContext) -> Result<()> {
|
||||
topgrade_path,
|
||||
"--only",
|
||||
"system",
|
||||
"--no-self-update",
|
||||
"--skip-notify",
|
||||
];
|
||||
if ctx.config().yes(Step::Toolbx) {
|
||||
args.push("--yes");
|
||||
}
|
||||
|
||||
let _output = ctx.run_type().execute(&toolbx).args(&args).check_run();
|
||||
ctx.run_type().execute(&toolbx).args(&args).status_checked()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,3 +1,19 @@
|
||||
" AstroUpdate calls a plugin manager - Lazy as of this writing. So we check for it before
|
||||
" others. Add to init.lua:
|
||||
" updater = {
|
||||
" skip_prompts = true,
|
||||
" },
|
||||
if exists(":AstroUpdate")
|
||||
echo "AstroUpdate"
|
||||
AstroUpdate
|
||||
quitall
|
||||
endif
|
||||
|
||||
if exists(":MasonUpdate")
|
||||
echo "MasonUpdate"
|
||||
MasonUpdate
|
||||
endif
|
||||
|
||||
if exists(":NeoBundleUpdate")
|
||||
echo "NeoBundle"
|
||||
NeoBundleUpdate
|
||||
@@ -18,11 +34,6 @@ if exists(":PlugUpgrade")
|
||||
endif
|
||||
endif
|
||||
|
||||
if exists(":PackerUpdate")
|
||||
echo "Packer"
|
||||
PackerSync
|
||||
endif
|
||||
|
||||
if exists("*dein#update()")
|
||||
echo "dein#update()"
|
||||
call dein#update()
|
||||
@@ -38,9 +49,15 @@ if exists(":PaqUpdate")
|
||||
PaqUpdate
|
||||
endif
|
||||
|
||||
if exists(":CocUpdateSync")
|
||||
echo "CocUpdateSync"
|
||||
CocUpdateSync
|
||||
if exists(":Lazy")
|
||||
echo "Lazy Update"
|
||||
Lazy! sync | qa
|
||||
endif
|
||||
|
||||
quitall
|
||||
if exists(':PackerSync')
|
||||
echo "Packer"
|
||||
autocmd User PackerComplete quitall
|
||||
PackerSync
|
||||
else
|
||||
quitall
|
||||
endif
|
||||
|
||||
@@ -1,38 +1,38 @@
|
||||
use crate::command::CommandExt;
|
||||
use crate::error::{SkipStep, TopgradeError};
|
||||
use anyhow::Result;
|
||||
use crate::HOME_DIR;
|
||||
use color_eyre::eyre::Result;
|
||||
use etcetera::base_strategy::BaseStrategy;
|
||||
|
||||
use crate::executor::{CommandExt, ExecutorOutput, RunType};
|
||||
use crate::executor::{Executor, ExecutorOutput};
|
||||
use crate::terminal::print_separator;
|
||||
use crate::{
|
||||
execution_context::ExecutionContext,
|
||||
utils::{require, PathExt},
|
||||
};
|
||||
use directories::BaseDirs;
|
||||
use log::debug;
|
||||
use std::path::{Path, PathBuf};
|
||||
use rust_i18n::t;
|
||||
use std::path::PathBuf;
|
||||
use std::{
|
||||
io::{self, Write},
|
||||
process::Command,
|
||||
};
|
||||
use tracing::debug;
|
||||
|
||||
const UPGRADE_VIM: &str = include_str!("upgrade.vim");
|
||||
|
||||
pub fn vimrc(base_dirs: &BaseDirs) -> Result<PathBuf> {
|
||||
base_dirs
|
||||
.home_dir()
|
||||
pub fn vimrc() -> Result<PathBuf> {
|
||||
HOME_DIR
|
||||
.join(".vimrc")
|
||||
.require()
|
||||
.or_else(|_| base_dirs.home_dir().join(".vim/vimrc").require())
|
||||
.or_else(|_| HOME_DIR.join(".vim/vimrc").require())
|
||||
}
|
||||
|
||||
fn nvimrc(base_dirs: &BaseDirs) -> Result<PathBuf> {
|
||||
fn nvimrc() -> Result<PathBuf> {
|
||||
#[cfg(unix)]
|
||||
let base_dir =
|
||||
// Bypass directories crate as nvim doesn't use the macOS-specific directories.
|
||||
std::env::var_os("XDG_CONFIG_HOME").map_or_else(|| base_dirs.home_dir().join(".config"), PathBuf::from);
|
||||
let base_dir = crate::XDG_DIRS.config_dir();
|
||||
|
||||
#[cfg(windows)]
|
||||
let base_dir = base_dirs.cache_dir();
|
||||
let base_dir = crate::WINDOWS_DIRS.cache_dir();
|
||||
|
||||
base_dir
|
||||
.join("nvim/init.vim")
|
||||
@@ -40,19 +40,14 @@ fn nvimrc(base_dirs: &BaseDirs) -> Result<PathBuf> {
|
||||
.or_else(|_| base_dir.join("nvim/init.lua").require())
|
||||
}
|
||||
|
||||
fn upgrade(vim: &Path, vimrc: &Path, ctx: &ExecutionContext) -> Result<()> {
|
||||
fn upgrade_script() -> Result<tempfile::NamedTempFile> {
|
||||
let mut tempfile = tempfile::NamedTempFile::new()?;
|
||||
tempfile.write_all(UPGRADE_VIM.replace('\r', "").as_bytes())?;
|
||||
debug!("Wrote vim script to {:?}", tempfile.path());
|
||||
Ok(tempfile)
|
||||
}
|
||||
|
||||
let mut command = ctx.run_type().execute(vim);
|
||||
|
||||
command
|
||||
.args(["-u"])
|
||||
.arg(vimrc)
|
||||
.args(["-U", "NONE", "-V1", "-nNesS"])
|
||||
.arg(tempfile.path());
|
||||
|
||||
fn upgrade(command: &mut Executor, ctx: &ExecutionContext) -> Result<()> {
|
||||
if ctx.config().force_vim_plug_update() {
|
||||
command.env("TOPGRADE_FORCE_PLUGUPDATE", "true");
|
||||
}
|
||||
@@ -63,14 +58,14 @@ fn upgrade(vim: &Path, vimrc: &Path, ctx: &ExecutionContext) -> Result<()> {
|
||||
let status = output.status;
|
||||
|
||||
if !status.success() || ctx.config().verbose() {
|
||||
io::stdout().write(&output.stdout).ok();
|
||||
io::stderr().write(&output.stderr).ok();
|
||||
io::stdout().write_all(&output.stdout).ok();
|
||||
io::stderr().write_all(&output.stderr).ok();
|
||||
}
|
||||
|
||||
if !status.success() {
|
||||
return Err(TopgradeError::ProcessFailed(status).into());
|
||||
return Err(TopgradeError::ProcessFailed(command.get_program(), status).into());
|
||||
} else {
|
||||
println!("Plugins upgraded")
|
||||
println!("{}", t!("Plugins upgraded"))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,63 +73,79 @@ fn upgrade(vim: &Path, vimrc: &Path, ctx: &ExecutionContext) -> Result<()> {
|
||||
}
|
||||
|
||||
pub fn upgrade_ultimate_vimrc(ctx: &ExecutionContext) -> Result<()> {
|
||||
let config_dir = ctx.base_dirs().home_dir().join(".vim_runtime").require()?;
|
||||
let config_dir = HOME_DIR.join(".vim_runtime").require()?;
|
||||
let git = require("git")?;
|
||||
let python = require("python3")?;
|
||||
let update_plugins = config_dir.join("update_plugins.py").require()?;
|
||||
|
||||
print_separator("The Ultimate vimrc");
|
||||
print_separator(t!("The Ultimate vimrc"));
|
||||
|
||||
ctx.run_type()
|
||||
.execute(&git)
|
||||
.current_dir(&config_dir)
|
||||
.args(["reset", "--hard"])
|
||||
.check_run()?;
|
||||
.status_checked()?;
|
||||
ctx.run_type()
|
||||
.execute(&git)
|
||||
.current_dir(&config_dir)
|
||||
.args(["clean", "-d", "--force"])
|
||||
.check_run()?;
|
||||
.status_checked()?;
|
||||
ctx.run_type()
|
||||
.execute(&git)
|
||||
.current_dir(&config_dir)
|
||||
.args(["pull", "--rebase"])
|
||||
.check_run()?;
|
||||
.status_checked()?;
|
||||
ctx.run_type()
|
||||
.execute(python)
|
||||
.current_dir(config_dir)
|
||||
.arg(update_plugins)
|
||||
.check_run()?;
|
||||
.status_checked()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn upgrade_vim(base_dirs: &BaseDirs, ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn upgrade_vim(ctx: &ExecutionContext) -> Result<()> {
|
||||
let vim = require("vim")?;
|
||||
|
||||
let output = Command::new(&vim).arg("--version").check_output()?;
|
||||
if !output.starts_with("VIM") {
|
||||
return Err(SkipStep(String::from("vim binary might by actually nvim")).into());
|
||||
let output = Command::new(&vim).arg("--version").output_checked_utf8()?;
|
||||
if !output.stdout.starts_with("VIM") {
|
||||
return Err(SkipStep(t!("vim binary might be actually nvim").to_string()).into());
|
||||
}
|
||||
|
||||
let vimrc = vimrc(base_dirs)?;
|
||||
let vimrc = vimrc()?;
|
||||
|
||||
print_separator("Vim");
|
||||
upgrade(&vim, &vimrc, ctx)
|
||||
upgrade(
|
||||
ctx.run_type()
|
||||
.execute(&vim)
|
||||
.args(["-u"])
|
||||
.arg(vimrc)
|
||||
.args(["-U", "NONE", "-V1", "-nNesS"])
|
||||
.arg(upgrade_script()?.path()),
|
||||
ctx,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn upgrade_neovim(base_dirs: &BaseDirs, ctx: &ExecutionContext) -> Result<()> {
|
||||
pub fn upgrade_neovim(ctx: &ExecutionContext) -> Result<()> {
|
||||
let nvim = require("nvim")?;
|
||||
let nvimrc = nvimrc(base_dirs)?;
|
||||
let nvimrc = nvimrc()?;
|
||||
|
||||
print_separator("Neovim");
|
||||
upgrade(&nvim, &nvimrc, ctx)
|
||||
upgrade(
|
||||
ctx.run_type()
|
||||
.execute(nvim)
|
||||
.args(["-u"])
|
||||
.arg(nvimrc)
|
||||
.args(["--headless", "-V1", "-nS"])
|
||||
.arg(upgrade_script()?.path()),
|
||||
ctx,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn run_voom(_base_dirs: &BaseDirs, run_type: RunType) -> Result<()> {
|
||||
pub fn run_voom(ctx: &ExecutionContext) -> Result<()> {
|
||||
let voom = require("voom")?;
|
||||
|
||||
print_separator("voom");
|
||||
|
||||
run_type.execute(voom).arg("update").check_run()
|
||||
ctx.run_type().execute(voom).arg("update").status_checked()
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user