Compare commits
184 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4488f3d5d3 | ||
|
|
5a7958d20e | ||
|
|
481a942b76 | ||
|
|
a601d8429d | ||
|
|
a4a2d52a6d | ||
|
|
47fa3ba7de | ||
|
|
e6bb6709b3 | ||
|
|
c421742c4f | ||
|
|
1312cc8f6e | ||
|
|
ed37763d30 | ||
|
|
583bbf65e2 | ||
|
|
5770a5caa7 | ||
|
|
722903fec3 | ||
|
|
30f1c3c1b4 | ||
|
|
ef7d146282 | ||
|
|
20667a23d3 | ||
|
|
26f05827ae | ||
|
|
b1ffe7d553 | ||
|
|
368a060529 | ||
|
|
b40bffb1f2 | ||
|
|
488ae149f7 | ||
|
|
fa3e4726b7 | ||
|
|
66a12cc8bf | ||
|
|
3e0c21e981 | ||
|
|
da270ae7d9 | ||
|
|
4624f11ba5 | ||
|
|
224bb96a98 | ||
|
|
9a6fe8eea9 | ||
|
|
aebc035ec0 | ||
|
|
bd348c328e | ||
|
|
c5f2d7b473 | ||
|
|
dc9d8d55f2 | ||
|
|
b172ba7f03 | ||
|
|
8227890808 | ||
|
|
a0963fe3fc | ||
|
|
4df30c2587 | ||
|
|
305a5fbcae | ||
|
|
4f4dcbb643 | ||
|
|
202897ba35 | ||
|
|
444689c899 | ||
|
|
98ec13f8db | ||
|
|
39f76a3a71 | ||
|
|
f181a795a6 | ||
|
|
ea2f3e07e9 | ||
|
|
8aad6eae0d | ||
|
|
e86e5fe3e7 | ||
|
|
2c2569c4f8 | ||
|
|
9ffdc9649e | ||
|
|
a5d4f2eec9 | ||
|
|
a5df40e01d | ||
|
|
0573fc97c6 | ||
|
|
1ae95f41a1 | ||
|
|
8a7af2e14d | ||
|
|
c36da89933 | ||
|
|
bbb84c2ee7 | ||
|
|
36fd4b13c0 | ||
|
|
49327000fc | ||
|
|
9c25cd7426 | ||
|
|
9767e4169c | ||
|
|
0854f9c559 | ||
|
|
e4a068d808 | ||
|
|
4c793b0df8 | ||
|
|
a021441135 | ||
|
|
29c555c394 | ||
|
|
c33d396489 | ||
|
|
f6d2ba4dae | ||
|
|
a88574204d | ||
|
|
9435bc4b7d | ||
|
|
27245cbd7b | ||
|
|
21751aa8a5 | ||
|
|
ad41948450 | ||
|
|
e32246f172 | ||
|
|
25d3a816b4 | ||
|
|
05b1a565e0 | ||
|
|
7b2623ea3c | ||
|
|
983c5243ba | ||
|
|
1958fe1e5b | ||
|
|
ca8558d9b4 | ||
|
|
1b534800a9 | ||
|
|
e91c00c9c0 | ||
|
|
a2375b4820 | ||
|
|
2e0c8e9e17 | ||
|
|
dc0ddcf9f0 | ||
|
|
a1f3c86a39 | ||
|
|
55f672eff7 | ||
|
|
8ece0346d8 | ||
|
|
b1fe1d201a | ||
|
|
5010abdc22 | ||
|
|
e4441d5021 | ||
|
|
5af0c6a7e5 | ||
|
|
b8da17106a | ||
|
|
fdf40dbf43 | ||
|
|
f3b6530969 | ||
|
|
cbc5fc94f9 | ||
|
|
dceb697355 | ||
|
|
07118fa0d2 | ||
|
|
16e6db0def | ||
|
|
64d8f6d632 | ||
|
|
180b5cba58 | ||
|
|
bac416e907 | ||
|
|
cb674a1572 | ||
|
|
960b14fa20 | ||
|
|
a9f57d4205 | ||
|
|
13330b6950 | ||
|
|
1ebcc9beee | ||
|
|
55e1bbf2b9 | ||
|
|
f2dfa1e475 | ||
|
|
fcd53e772a | ||
|
|
8b9d7ef8f3 | ||
|
|
d8406a8cfe | ||
|
|
4a9ef581e5 | ||
|
|
a52db1f261 | ||
|
|
8e16174ce7 | ||
|
|
c748bb5d7a | ||
|
|
3cc8f0d818 | ||
|
|
f96eeeda6b | ||
|
|
d1d8904376 | ||
|
|
3b329fe687 | ||
|
|
9eb1b4ac9f | ||
|
|
c4c0bd7383 | ||
|
|
1e9de5832d | ||
|
|
f2b17cdd9d | ||
|
|
7bfd6c2439 | ||
|
|
0e8d5f0266 | ||
|
|
32add8f046 | ||
|
|
f661f00277 | ||
|
|
2a1999fe20 | ||
|
|
4d66431aad | ||
|
|
767f0d91f4 | ||
|
|
a3428e3477 | ||
|
|
614131b7bf | ||
|
|
9b0681f3b8 | ||
|
|
ecf8fb7a47 | ||
|
|
04bfb45a97 | ||
|
|
d90ce30452 | ||
|
|
ab21600ca6 | ||
|
|
728ea26204 | ||
|
|
373cd3b3ae | ||
|
|
f4e0258b09 | ||
|
|
d50360a69a | ||
|
|
351922c81f | ||
|
|
9518f43866 | ||
|
|
2c1ce3d4e6 | ||
|
|
12116c3261 | ||
|
|
fbc84e8aa1 | ||
|
|
6dab1e4f37 | ||
|
|
650a143602 | ||
|
|
9b6027fe78 | ||
|
|
0e30e05ce8 | ||
|
|
eea952fa78 | ||
|
|
6071a1ee3b | ||
|
|
a801b7b9f4 | ||
|
|
c6e3f0ae0a | ||
|
|
a43b03d3db | ||
|
|
12b0fa57ad | ||
|
|
d9e304f0ef | ||
|
|
842b92cca7 | ||
|
|
485f0ec9c8 | ||
|
|
5e3b5fc9a7 | ||
|
|
7c63541cad | ||
|
|
238e089d74 | ||
|
|
8991bc9f62 | ||
|
|
7a3f3a8905 | ||
|
|
e4085e03eb | ||
|
|
4b0c366e5f | ||
|
|
ea97240d09 | ||
|
|
12de531abb | ||
|
|
c3876ce3bf | ||
|
|
cbbfc3a114 | ||
|
|
ad2bfc9abd | ||
|
|
528461412e | ||
|
|
64db679390 | ||
|
|
77a8b3b7d2 | ||
|
|
7007e76ab5 | ||
|
|
3c970063a9 | ||
|
|
b70830015e | ||
|
|
b43f2c8b3a | ||
|
|
c311da16f3 | ||
|
|
37608a338c | ||
|
|
b07288e674 | ||
|
|
707698faab | ||
|
|
2e70d132d0 | ||
|
|
30c5b31e21 | ||
|
|
77ff6cb714 |
12
.github/ISSUE_TEMPLATE/feature_request.md
vendored
12
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -8,10 +8,14 @@ assignees: ''
|
|||||||
---
|
---
|
||||||
|
|
||||||
## I want to suggest a new step
|
## I want to suggest a new step
|
||||||
### Which tool is this about? Where is its repository?
|
|
||||||
### Which operating systems are supported by this tool?
|
* Which tool is this about? Where is its repository?
|
||||||
### What should Topgrade do to figure out if the tool needs to be invoked?
|
* Which operating systems are supported by this tool?
|
||||||
### Which exact commands should Topgrade run?
|
* What should Topgrade do to figure out if the tool needs to be invoked?
|
||||||
|
* Which exact commands should Topgrade run?
|
||||||
|
* Does it have a `--dry-run` option? i.e., print what should be done and exit
|
||||||
|
* Does it need the user to confirm the execution? And does it provide a `--yes`
|
||||||
|
option to skip this step?
|
||||||
|
|
||||||
## I want to suggest some general feature
|
## I want to suggest some general feature
|
||||||
Topgrade should...
|
Topgrade should...
|
||||||
|
|||||||
13
.github/PULL_REQUEST_TEMPLATE.md
vendored
13
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,14 +1,15 @@
|
|||||||
## Standards checklist:
|
## What does this PR do
|
||||||
|
|
||||||
- [ ] The PR title is descriptive.
|
|
||||||
|
## Standards checklist
|
||||||
|
|
||||||
|
- [ ] The PR title is descriptive
|
||||||
- [ ] I have read `CONTRIBUTING.md`
|
- [ ] I have read `CONTRIBUTING.md`
|
||||||
- [ ] The code compiles (`cargo build`)
|
|
||||||
- [ ] The code passes rustfmt (`cargo fmt`)
|
|
||||||
- [ ] The code passes clippy (`cargo clippy`)
|
|
||||||
- [ ] The code passes tests (`cargo test`)
|
|
||||||
- [ ] *Optional:* I have tested the code myself
|
- [ ] *Optional:* I have tested the code myself
|
||||||
|
- [ ] If this PR introduces new user-facing messages they are translated
|
||||||
|
|
||||||
## For new steps
|
## For new steps
|
||||||
|
|
||||||
- [ ] *Optional:* Topgrade skips this step where needed
|
- [ ] *Optional:* Topgrade skips this step where needed
|
||||||
- [ ] *Optional:* The `--dry-run` option works with this step
|
- [ ] *Optional:* The `--dry-run` option works with this step
|
||||||
- [ ] *Optional:* The `--yes` option works with this step if it is supported by
|
- [ ] *Optional:* The `--yes` option works with this step if it is supported by
|
||||||
|
|||||||
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
# Set update schedule for GitHub Actions
|
||||||
|
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
# Check for updates to GitHub Actions every week
|
||||||
|
interval: "weekly"
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
name: Test Configuration File Creation
|
name: Check config file creation if not exists
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
@@ -12,7 +12,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- run: |
|
- run: |
|
||||||
CONFIG_PATH=~/.config/topgrade.toml;
|
CONFIG_PATH=~/.config/topgrade.toml;
|
||||||
if [ -f "$CONFIG_PATH" ]; then rm $CONFIG_PATH; fi
|
if [ -f "$CONFIG_PATH" ]; then rm $CONFIG_PATH; fi
|
||||||
22
.github/workflows/check_i18n.yml
vendored
Normal file
22
.github/workflows/check_i18n.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
name: Check i18n
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check_locale:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install checker
|
||||||
|
# Build it with the dev profile as this is faster and the checker still works
|
||||||
|
run: |
|
||||||
|
cargo install --git https://github.com/topgrade-rs/topgrade_i18n_locale_checker --profile dev
|
||||||
|
|
||||||
|
- name: Run the checker
|
||||||
|
run: topgrade_i18n_locale_checker --locale-file ./locales/app.yml --rust-src-to-check ./src
|
||||||
32
.github/workflows/check_security_vulnerability.yml
vendored
Normal file
32
.github/workflows/check_security_vulnerability.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# This workflow uses actions that are not certified by GitHub.
|
||||||
|
# They are provided by a third-party and are governed by
|
||||||
|
# separate terms of service, privacy policy, and support
|
||||||
|
# documentation.
|
||||||
|
|
||||||
|
name: Check Security Vulnerability
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
name: DevSkim
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Run DevSkim scanner
|
||||||
|
uses: microsoft/DevSkim-Action@v1
|
||||||
|
|
||||||
|
- name: Upload DevSkim scan results to GitHub Security tab
|
||||||
|
uses: github/codeql-action/upload-sarif@v3
|
||||||
|
with:
|
||||||
|
sarif_file: devskim-results.sarif
|
||||||
@@ -8,7 +8,7 @@ jobs:
|
|||||||
prepare:
|
prepare:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2022-08-03
|
toolchain: nightly-2022-08-03
|
||||||
@@ -7,23 +7,16 @@ on:
|
|||||||
name: CI
|
name: CI
|
||||||
|
|
||||||
env:
|
env:
|
||||||
RUST_VER: 'stable'
|
|
||||||
CROSS_VER: '0.2.5'
|
CROSS_VER: '0.2.5'
|
||||||
CARGO_NET_RETRY: 3
|
CARGO_NET_RETRY: 3
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
fmt:
|
fmt:
|
||||||
name: Rustfmt
|
name: Rustfmt
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Rust
|
|
||||||
uses: dtolnay/rust-toolchain@master
|
|
||||||
with:
|
|
||||||
toolchain: '${{ env.RUST_VER }}'
|
|
||||||
components: rustfmt
|
|
||||||
|
|
||||||
- name: Run cargo fmt
|
- name: Run cargo fmt
|
||||||
env:
|
env:
|
||||||
@@ -42,38 +35,36 @@ jobs:
|
|||||||
- target: x86_64-linux-android
|
- target: x86_64-linux-android
|
||||||
target_name: Android
|
target_name: Android
|
||||||
use_cross: true
|
use_cross: true
|
||||||
os: ubuntu-20.04
|
os: ubuntu-latest
|
||||||
|
|
||||||
- target: x86_64-unknown-freebsd
|
- target: x86_64-unknown-freebsd
|
||||||
target_name: FreeBSD
|
target_name: FreeBSD
|
||||||
use_cross: true
|
use_cross: true
|
||||||
os: ubuntu-20.04
|
os: ubuntu-latest
|
||||||
|
|
||||||
- target: x86_64-unknown-linux-gnu
|
- target: x86_64-unknown-linux-gnu
|
||||||
target_name: Linux
|
target_name: Linux
|
||||||
os: ubuntu-20.04
|
os: ubuntu-latest
|
||||||
|
|
||||||
- target: x86_64-apple-darwin
|
- target: x86_64-apple-darwin
|
||||||
target_name: macOS
|
target_name: macOS-x86_64
|
||||||
os: macos-11
|
os: macos-13
|
||||||
|
|
||||||
|
- target: aarch64-apple-darwin
|
||||||
|
target_name: macOS-aarch64
|
||||||
|
os: macos-latest
|
||||||
|
|
||||||
- target: x86_64-unknown-netbsd
|
- target: x86_64-unknown-netbsd
|
||||||
target_name: NetBSD
|
target_name: NetBSD
|
||||||
use_cross: true
|
use_cross: true
|
||||||
os: ubuntu-20.04
|
os: ubuntu-latest
|
||||||
|
|
||||||
- target: x86_64-pc-windows-msvc
|
- target: x86_64-pc-windows-msvc
|
||||||
target_name: Windows
|
target_name: Windows
|
||||||
os: windows-2019
|
os: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Rust
|
|
||||||
uses: dtolnay/rust-toolchain@master
|
|
||||||
with:
|
|
||||||
toolchain: '${{ env.RUST_VER }}'
|
|
||||||
components: clippy
|
|
||||||
|
|
||||||
- name: Setup Rust Cache
|
- name: Setup Rust Cache
|
||||||
uses: Swatinem/rust-cache@v2
|
uses: Swatinem/rust-cache@v2
|
||||||
@@ -84,8 +75,13 @@ jobs:
|
|||||||
if: matrix.use_cross == true
|
if: matrix.use_cross == true
|
||||||
run: curl -fL --retry 3 https://github.com/cross-rs/cross/releases/download/v${{ env.CROSS_VER }}/cross-x86_64-unknown-linux-musl.tar.gz | tar vxz -C /usr/local/bin
|
run: curl -fL --retry 3 https://github.com/cross-rs/cross/releases/download/v${{ env.CROSS_VER }}/cross-x86_64-unknown-linux-musl.tar.gz | tar vxz -C /usr/local/bin
|
||||||
|
|
||||||
- name: Run cargo check
|
- name: Run cargo/cross check
|
||||||
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} check --locked --target ${{ matrix.target }}
|
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} check --locked --target ${{ matrix.target }}
|
||||||
|
|
||||||
- name: Run cargo clippy
|
- name: Run cargo/cross clippy
|
||||||
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} clippy --locked --target ${{ matrix.target }} --all-features -- -D warnings
|
run: ${{ matrix.use_cross == true && 'cross' || 'cargo' }} clippy --locked --target ${{ matrix.target }} --all-features -- -D warnings
|
||||||
|
|
||||||
|
- name: Run cargo test
|
||||||
|
# ONLY run test with cargo
|
||||||
|
if: matrix.use_cross == false
|
||||||
|
run: cargo test --locked --target ${{ matrix.target }}
|
||||||
59
.github/workflows/code-coverage.yml
vendored
59
.github/workflows/code-coverage.yml
vendored
@@ -1,59 +0,0 @@
|
|||||||
on:
|
|
||||||
pull_request:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
env:
|
|
||||||
CARGO_TERM_COLOR: always
|
|
||||||
|
|
||||||
name: Test with Code Coverage
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
name: Test
|
|
||||||
env:
|
|
||||||
PROJECT_NAME_UNDERSCORE: topgrade
|
|
||||||
CARGO_INCREMENTAL: 0
|
|
||||||
RUSTFLAGS: -Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort
|
|
||||||
RUSTDOCFLAGS: -Cpanic=abort
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: nightly
|
|
||||||
override: true
|
|
||||||
- name: Cache dependencies
|
|
||||||
uses: actions/cache@v2
|
|
||||||
env:
|
|
||||||
cache-name: cache-dependencies
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/.cargo/.crates.toml
|
|
||||||
~/.cargo/.crates2.json
|
|
||||||
~/.cargo/bin
|
|
||||||
~/.cargo/registry/index
|
|
||||||
~/.cargo/registry/cache
|
|
||||||
target
|
|
||||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('Cargo.lock') }}
|
|
||||||
- name: Generate test result and coverage report
|
|
||||||
run: |
|
|
||||||
cargo install cargo2junit grcov;
|
|
||||||
cargo test $CARGO_OPTIONS -- -Z unstable-options --format json | cargo2junit > results.xml;
|
|
||||||
zip -0 ccov.zip `find . \( -name "$PROJECT_NAME_UNDERSCORE*.gc*" \) -print`;
|
|
||||||
grcov ccov.zip -s . -t lcov --llvm --ignore-not-existing --ignore "/*" --ignore "tests/*" -o lcov.info;
|
|
||||||
- name: Upload test results
|
|
||||||
uses: EnricoMi/publish-unit-test-result-action@v1
|
|
||||||
with:
|
|
||||||
check_name: Test Results
|
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
files: results.xml
|
|
||||||
- name: Upload to CodeCov
|
|
||||||
uses: codecov/codecov-action@v1
|
|
||||||
with:
|
|
||||||
# required for private repositories:
|
|
||||||
# token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
files: ./lcov.info
|
|
||||||
fail_ci_if_error: true
|
|
||||||
88
.github/workflows/create_release_assets.yml
vendored
Normal file
88
.github/workflows/create_release_assets.yml
vendored
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
name: Publish release files for CD native environments
|
||||||
|
|
||||||
|
on:
|
||||||
|
# workflow_run:
|
||||||
|
# workflows: ["Check SemVer compliance"]
|
||||||
|
# types:
|
||||||
|
# - completed
|
||||||
|
release:
|
||||||
|
types: [ created ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
platform: [ ubuntu-latest, macos-latest, macos-13, windows-latest ]
|
||||||
|
runs-on: ${{ matrix.platform }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install cargo-deb
|
||||||
|
run: cargo install cargo-deb
|
||||||
|
if: ${{ matrix.platform == 'ubuntu-latest' }}
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Check format
|
||||||
|
run: cargo fmt --all -- --check
|
||||||
|
|
||||||
|
- name: Run clippy
|
||||||
|
run: cargo clippy --all-targets --locked -- -D warnings
|
||||||
|
|
||||||
|
- name: Run clippy (All features)
|
||||||
|
run: cargo clippy --all-targets --locked --all-features -- -D warnings
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: cargo test
|
||||||
|
|
||||||
|
- name: Build in Release profile with all features enabled
|
||||||
|
run: cargo build --release --all-features
|
||||||
|
|
||||||
|
- name: Rename Release (Unix)
|
||||||
|
run: |
|
||||||
|
cargo install default-target
|
||||||
|
mkdir -p assets
|
||||||
|
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
||||||
|
mv target/release/topgrade assets
|
||||||
|
cd assets
|
||||||
|
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||||
|
rm topgrade
|
||||||
|
ls .
|
||||||
|
if: ${{ matrix.platform != 'windows-latest' }}
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Build Debian-based system binary and create package
|
||||||
|
# First remove the binary built by previous steps
|
||||||
|
# because we don't want the auto-update feature,
|
||||||
|
# then build the new binary without auto-updating.
|
||||||
|
run: |
|
||||||
|
rm -rf target/release
|
||||||
|
cargo build --release
|
||||||
|
cargo deb --no-build --no-strip
|
||||||
|
if: ${{ matrix.platform == 'ubuntu-latest' }}
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Move Debian-based system package
|
||||||
|
run: |
|
||||||
|
mkdir -p assets
|
||||||
|
mv target/debian/*.deb assets
|
||||||
|
if: ${{ matrix.platform == 'ubuntu-latest' }}
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Rename Release (Windows)
|
||||||
|
run: |
|
||||||
|
cargo install default-target
|
||||||
|
mkdir assets
|
||||||
|
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
||||||
|
mv target/release/topgrade.exe assets/topgrade.exe
|
||||||
|
cd assets
|
||||||
|
powershell Compress-Archive -Path * -Destination ${FILENAME}.zip
|
||||||
|
rm topgrade.exe
|
||||||
|
ls .
|
||||||
|
if: ${{ matrix.platform == 'windows-latest' }}
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Release
|
||||||
|
uses: softprops/action-gh-release@v2
|
||||||
|
with:
|
||||||
|
files: assets/*
|
||||||
97
.github/workflows/create_release_assets_cross.yml
vendored
Normal file
97
.github/workflows/create_release_assets_cross.yml
vendored
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
name: Publish release files for non-cd-native environments
|
||||||
|
|
||||||
|
on:
|
||||||
|
# workflow_run:
|
||||||
|
# workflows: ["Check SemVer compliance"]
|
||||||
|
# types:
|
||||||
|
# - completed
|
||||||
|
release:
|
||||||
|
types: [created]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
target:
|
||||||
|
[
|
||||||
|
"aarch64-unknown-linux-gnu",
|
||||||
|
"armv7-unknown-linux-gnueabihf",
|
||||||
|
"x86_64-unknown-linux-musl",
|
||||||
|
"aarch64-unknown-linux-musl",
|
||||||
|
"x86_64-unknown-freebsd",
|
||||||
|
]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install cargo-deb cross compilation dependencies
|
||||||
|
run: sudo apt-get install libc6-arm64-cross libgcc-s1-arm64-cross
|
||||||
|
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' }}
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Install cargo-deb cross compilation dependencies for armv7
|
||||||
|
run: sudo apt-get install libc6-armhf-cross libgcc-s1-armhf-cross
|
||||||
|
if: ${{ matrix.target == 'armv7-unknown-linux-gnueabihf' }}
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Install cargo-deb
|
||||||
|
run: cargo install cargo-deb
|
||||||
|
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'armv7-unknown-linux-gnueabihf' }}
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: install targets
|
||||||
|
run: rustup target add ${{ matrix.target }}
|
||||||
|
|
||||||
|
- name: install cross
|
||||||
|
uses: taiki-e/install-action@v2
|
||||||
|
with:
|
||||||
|
tool: cross@0.2.5
|
||||||
|
|
||||||
|
- name: Check format
|
||||||
|
run: cross fmt --all -- --check
|
||||||
|
|
||||||
|
- name: Run clippy
|
||||||
|
run: cross clippy --all-targets --locked --target ${{matrix.target}} -- -D warnings
|
||||||
|
|
||||||
|
- name: Run clippy (All features)
|
||||||
|
run: cross clippy --locked --all-features --target ${{matrix.target}} -- -D warnings
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: cross test --target ${{matrix.target}}
|
||||||
|
|
||||||
|
- name: Build in Release profile with all features enabled
|
||||||
|
run: cross build --release --all-features --target ${{matrix.target}}
|
||||||
|
|
||||||
|
- name: Rename Release
|
||||||
|
run: |
|
||||||
|
mkdir -p assets
|
||||||
|
FILENAME=topgrade-${{github.event.release.tag_name}}-${{matrix.target}}
|
||||||
|
mv target/${{matrix.target}}/release/topgrade assets
|
||||||
|
cd assets
|
||||||
|
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
||||||
|
rm topgrade
|
||||||
|
ls .
|
||||||
|
|
||||||
|
- name: Build Debian-based system package without autoupdate feature
|
||||||
|
# First remove the binary built by previous steps
|
||||||
|
# because we don't want the auto-update feature,
|
||||||
|
# then build the new binary without auto-updating.
|
||||||
|
run: |
|
||||||
|
rm -rf target/${{matrix.target}}
|
||||||
|
cross build --release --target ${{matrix.target}}
|
||||||
|
cargo deb --target=${{matrix.target}} --no-build --no-strip
|
||||||
|
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'armv7-unknown-linux-gnueabihf' }}
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Move Debian-based system package
|
||||||
|
run: |
|
||||||
|
mkdir -p assets
|
||||||
|
mv target/${{matrix.target}}/debian/*.deb assets
|
||||||
|
if: ${{ matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'armv7-unknown-linux-gnueabihf' }}
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Release
|
||||||
|
uses: softprops/action-gh-release@v2
|
||||||
|
with:
|
||||||
|
files: assets/*
|
||||||
70
.github/workflows/release-cross.yml
vendored
70
.github/workflows/release-cross.yml
vendored
@@ -1,70 +0,0 @@
|
|||||||
name: Publish release files for non-cd-native environments
|
|
||||||
|
|
||||||
on:
|
|
||||||
# workflow_run:
|
|
||||||
# workflows: ["Check SemVer compliance"]
|
|
||||||
# types:
|
|
||||||
# - completed
|
|
||||||
release:
|
|
||||||
types: [ created ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
target: [ "aarch64-unknown-linux-gnu", "armv7-unknown-linux-gnueabihf", "x86_64-unknown-linux-musl", "aarch64-unknown-linux-musl", "x86_64-unknown-freebsd", ]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
profile: minimal
|
|
||||||
default: true
|
|
||||||
override: true
|
|
||||||
target: ${{ matrix.target }}
|
|
||||||
components: rustfmt, clippy
|
|
||||||
- uses: actions-rs/cargo@v1.0.1
|
|
||||||
name: Check format
|
|
||||||
with:
|
|
||||||
use-cross: true
|
|
||||||
command: fmt
|
|
||||||
args: --all -- --check
|
|
||||||
- uses: actions-rs/cargo@v1.0.1
|
|
||||||
name: Run clippy
|
|
||||||
with:
|
|
||||||
command: clippy
|
|
||||||
use-cross: true
|
|
||||||
args: --all-targets --locked --target ${{matrix.target}} -- -D warnings
|
|
||||||
- uses: actions-rs/cargo@v1.0.1
|
|
||||||
name: Run clippy (All features)
|
|
||||||
with:
|
|
||||||
command: clippy
|
|
||||||
use-cross: true
|
|
||||||
args: --locked --all-features --target ${{matrix.target}} -- -D warnings
|
|
||||||
- uses: actions-rs/cargo@v1.0.1
|
|
||||||
name: Run tests
|
|
||||||
with:
|
|
||||||
command: test
|
|
||||||
use-cross: true
|
|
||||||
args: --target ${{matrix.target}}
|
|
||||||
- uses: actions-rs/cargo@v1.0.1
|
|
||||||
name: Build
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
use-cross: true
|
|
||||||
args: --release --all-features --target ${{matrix.target}}
|
|
||||||
- name: Rename Release
|
|
||||||
run: |
|
|
||||||
mkdir assets
|
|
||||||
FILENAME=topgrade-${{github.event.release.tag_name}}-${{matrix.target}}
|
|
||||||
mv target/${{matrix.target}}/release/topgrade assets
|
|
||||||
cd assets
|
|
||||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
|
||||||
rm topgrade
|
|
||||||
ls .
|
|
||||||
- name: Release
|
|
||||||
uses: softprops/action-gh-release@v1
|
|
||||||
with:
|
|
||||||
files: assets/*
|
|
||||||
77
.github/workflows/release.yml
vendored
77
.github/workflows/release.yml
vendored
@@ -1,77 +0,0 @@
|
|||||||
name: Publish release files for CD native environments
|
|
||||||
|
|
||||||
on:
|
|
||||||
# workflow_run:
|
|
||||||
# workflows: ["Check SemVer compliance"]
|
|
||||||
# types:
|
|
||||||
# - completed
|
|
||||||
release:
|
|
||||||
types: [ created ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
platform: [ ubuntu-latest, macos-latest, windows-latest ]
|
|
||||||
runs-on: ${{ matrix.platform }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
profile: minimal
|
|
||||||
override: true
|
|
||||||
components: rustfmt, clippy
|
|
||||||
- uses: actions-rs/cargo@v1.0.1
|
|
||||||
name: Check format
|
|
||||||
with:
|
|
||||||
command: fmt
|
|
||||||
args: --all -- --check
|
|
||||||
- uses: actions-rs/cargo@v1.0.1
|
|
||||||
name: Run clippy
|
|
||||||
with:
|
|
||||||
command: clippy
|
|
||||||
args: --all-targets --locked -- -D warnings
|
|
||||||
- uses: actions-rs/cargo@v1.0.1
|
|
||||||
name: Run clippy (All features)
|
|
||||||
with:
|
|
||||||
command: clippy
|
|
||||||
args: --all-targets --locked --all-features -- -D warnings
|
|
||||||
- uses: actions-rs/cargo@v1.0.1
|
|
||||||
name: Run tests
|
|
||||||
with:
|
|
||||||
command: test
|
|
||||||
- uses: actions-rs/cargo@v1.0.1
|
|
||||||
name: Build
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --release --all-features
|
|
||||||
- name: Rename Release (Unix)
|
|
||||||
run: |
|
|
||||||
cargo install default-target
|
|
||||||
mkdir assets
|
|
||||||
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
|
||||||
mv target/release/topgrade assets
|
|
||||||
cd assets
|
|
||||||
tar --format=ustar -czf $FILENAME.tar.gz topgrade
|
|
||||||
rm topgrade
|
|
||||||
ls .
|
|
||||||
if: ${{ matrix.platform != 'windows-latest' }}
|
|
||||||
shell: bash
|
|
||||||
- name: Rename Release (Windows)
|
|
||||||
run: |
|
|
||||||
cargo install default-target
|
|
||||||
mkdir assets
|
|
||||||
FILENAME=topgrade-${{github.event.release.tag_name}}-$(default-target)
|
|
||||||
mv target/release/topgrade.exe assets/topgrade.exe
|
|
||||||
cd assets
|
|
||||||
powershell Compress-Archive -Path * -Destination ${FILENAME}.zip
|
|
||||||
rm topgrade.exe
|
|
||||||
ls .
|
|
||||||
if: ${{ matrix.platform == 'windows-latest' }}
|
|
||||||
shell: bash
|
|
||||||
- name: Release
|
|
||||||
uses: softprops/action-gh-release@v1
|
|
||||||
with:
|
|
||||||
files: assets/*
|
|
||||||
31
.github/workflows/release_to_aur.yml
vendored
Normal file
31
.github/workflows/release_to_aur.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
name: Publish to AUR
|
||||||
|
|
||||||
|
on:
|
||||||
|
# workflow_run:
|
||||||
|
# workflows: ["Check SemVer compliance"]
|
||||||
|
# types:
|
||||||
|
# - completed
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- "v*"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
aur-publish:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Publish source AUR package
|
||||||
|
uses: aksh1618/update-aur-package@v1.0.5
|
||||||
|
with:
|
||||||
|
tag_version_prefix: v
|
||||||
|
package_name: topgrade
|
||||||
|
commit_username: "Thomas Schönauer"
|
||||||
|
commit_email: t.schoenauer@hgs-wt.at
|
||||||
|
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
|
||||||
|
- name: Publish binary AUR package
|
||||||
|
uses: aksh1618/update-aur-package@v1.0.5
|
||||||
|
with:
|
||||||
|
tag_version_prefix: v
|
||||||
|
package_name: topgrade-bin
|
||||||
|
commit_username: "Thomas Schönauer"
|
||||||
|
commit_email: t.schoenauer@hgs-wt.at
|
||||||
|
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
|
||||||
@@ -12,7 +12,7 @@ jobs:
|
|||||||
prepare:
|
prepare:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
@@ -21,7 +21,7 @@ jobs:
|
|||||||
publish:
|
publish:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: katyo/publish-crates@v1
|
- uses: katyo/publish-crates@v2
|
||||||
with:
|
with:
|
||||||
dry-run: true
|
dry-run: true
|
||||||
check-repo: ${{ github.event_name == 'push' }}
|
check-repo: ${{ github.event_name == 'push' }}
|
||||||
@@ -19,7 +19,7 @@ jobs:
|
|||||||
uses: Homebrew/actions/setup-homebrew@master
|
uses: Homebrew/actions/setup-homebrew@master
|
||||||
- name: Cache Homebrew Bundler RubyGems
|
- name: Cache Homebrew Bundler RubyGems
|
||||||
id: cache
|
id: cache
|
||||||
uses: actions/cache@v1
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: ${{ steps.set-up-homebrew.outputs.gems-path }}
|
path: ${{ steps.set-up-homebrew.outputs.gems-path }}
|
||||||
key: ${{ runner.os }}-rubygems-${{ steps.set-up-homebrew.outputs.gems-hash }}
|
key: ${{ runner.os }}-rubygems-${{ steps.set-up-homebrew.outputs.gems-hash }}
|
||||||
@@ -14,7 +14,7 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
target: [x86_64, x86, aarch64]
|
target: [x86_64, x86, aarch64]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: PyO3/maturin-action@v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
@@ -34,7 +34,7 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
target: [x64, x86]
|
target: [x64, x86]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: PyO3/maturin-action@v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
@@ -53,7 +53,7 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
target: [x86_64, aarch64]
|
target: [x86_64, aarch64]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: PyO3/maturin-action@v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
@@ -69,7 +69,7 @@ jobs:
|
|||||||
sdist:
|
sdist:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Build sdist
|
- name: Build sdist
|
||||||
uses: PyO3/maturin-action@v1
|
uses: PyO3/maturin-action@v1
|
||||||
with:
|
with:
|
||||||
13
.github/workflows/release_to_winget.yml
vendored
Normal file
13
.github/workflows/release_to_winget.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
name: Publish to WinGet
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [released]
|
||||||
|
jobs:
|
||||||
|
publish:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- uses: vedantmgoyal2009/winget-releaser@main
|
||||||
|
with:
|
||||||
|
identifier: topgrade-rs.topgrade
|
||||||
|
max-versions-to-keep: 5 # keep only latest 5 versions
|
||||||
|
token: ${{ secrets.WINGET_TOKEN }}
|
||||||
22
.github/workflows/update_aur.yml
vendored
22
.github/workflows/update_aur.yml
vendored
@@ -1,22 +0,0 @@
|
|||||||
name: Publish to AUR
|
|
||||||
|
|
||||||
on:
|
|
||||||
# workflow_run:
|
|
||||||
# workflows: ["Check SemVer compliance"]
|
|
||||||
# types:
|
|
||||||
# - completed
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- "v*"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
aur-publish:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Publish AUR package
|
|
||||||
uses: ATiltedTree/create-aur-release@v1
|
|
||||||
with:
|
|
||||||
package_name: topgrade
|
|
||||||
commit_username: "Thomas Schönauer"
|
|
||||||
commit_email: t.schoenauer@hgs-wt.at
|
|
||||||
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
|
|
||||||
18
.gitignore
vendored
18
.gitignore
vendored
@@ -1,4 +1,20 @@
|
|||||||
|
# JetBrains IDEs
|
||||||
|
.idea/
|
||||||
|
|
||||||
/target
|
# Visual Studio
|
||||||
|
.vs/
|
||||||
|
|
||||||
|
# Visual Studio Code
|
||||||
|
.vscode/
|
||||||
|
|
||||||
|
# Generic build outputs
|
||||||
/build
|
/build
|
||||||
|
|
||||||
|
# Specific for some languages like Rust
|
||||||
|
/target
|
||||||
|
|
||||||
|
# LLVM profiling output
|
||||||
|
*.profraw
|
||||||
|
|
||||||
|
# Backup files for any .rs files in the project
|
||||||
**/*.rs.bk
|
**/*.rs.bk
|
||||||
|
|||||||
38
.vscode/launch.json
vendored
38
.vscode/launch.json
vendored
@@ -1,38 +0,0 @@
|
|||||||
{
|
|
||||||
// Use IntelliSense to learn about possible attributes.
|
|
||||||
// Hover to view descriptions of existing attributes.
|
|
||||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
|
||||||
"version": "0.2.0",
|
|
||||||
"configurations": [
|
|
||||||
{
|
|
||||||
"type": "lldb",
|
|
||||||
"request": "launch",
|
|
||||||
"name": "Topgrade",
|
|
||||||
"console": "integratedTerminal",
|
|
||||||
"cargo": {
|
|
||||||
"args": [
|
|
||||||
"build",
|
|
||||||
"--bin=topgrade-rs",
|
|
||||||
"--package=topgrade-rs"
|
|
||||||
],
|
|
||||||
"filter": {
|
|
||||||
"name": "topgrade-rs",
|
|
||||||
"kind": "bin"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"args": [
|
|
||||||
"--only",
|
|
||||||
"${input:step}",
|
|
||||||
"-v"
|
|
||||||
],
|
|
||||||
"cwd": "${workspaceFolder}"
|
|
||||||
},
|
|
||||||
],
|
|
||||||
"inputs": [
|
|
||||||
{
|
|
||||||
"type": "promptString",
|
|
||||||
"id": "step",
|
|
||||||
"description": "step name",
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
14
.vscode/tasks.json
vendored
14
.vscode/tasks.json
vendored
@@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "2.0.0",
|
|
||||||
"tasks": [
|
|
||||||
{
|
|
||||||
"type": "cargo",
|
|
||||||
"command": "clippy",
|
|
||||||
"problemMatcher": [
|
|
||||||
"$rustc"
|
|
||||||
],
|
|
||||||
"group": "test",
|
|
||||||
"label": "rust: cargo clippy"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
50
.vscode/topgrade.code-snippets
vendored
50
.vscode/topgrade.code-snippets
vendored
@@ -1,50 +0,0 @@
|
|||||||
{
|
|
||||||
// Place your topgrade workspace snippets here. Each snippet is defined under a snippet name and has a scope, prefix, body and
|
|
||||||
// description. Add comma separated ids of the languages where the snippet is applicable in the scope field. If scope
|
|
||||||
// is left empty or omitted, the snippet gets applied to all languages. The prefix is what is
|
|
||||||
// used to trigger the snippet and the body will be expanded and inserted. Possible variables are:
|
|
||||||
// $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders.
|
|
||||||
// Placeholders with the same ids are connected.
|
|
||||||
// Example:
|
|
||||||
// "Print to console": {
|
|
||||||
// "scope": "javascript,typescript",
|
|
||||||
// "prefix": "log",
|
|
||||||
// "body": [
|
|
||||||
// "console.log('$1');",
|
|
||||||
// "$2"
|
|
||||||
// ],
|
|
||||||
// "description": "Log output to console"
|
|
||||||
// }
|
|
||||||
"Skip Step": {
|
|
||||||
"scope": "rust",
|
|
||||||
"prefix": "skipstep",
|
|
||||||
"body": [
|
|
||||||
"return Err(SkipStep(format!(\"$1\")).into());"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"Step": {
|
|
||||||
"scope": "rust",
|
|
||||||
"prefix": "step",
|
|
||||||
"body": [
|
|
||||||
"pub fn $1(ctx: &ExecutionContext) -> Result<()> {",
|
|
||||||
" $0",
|
|
||||||
" Ok(())",
|
|
||||||
"}"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"Require Binary": {
|
|
||||||
"scope": "rust",
|
|
||||||
"prefix": "req",
|
|
||||||
"description": "Require a binary to be installed",
|
|
||||||
"body": [
|
|
||||||
"let ${1:binary} = require(\"${1:binary}\")?;"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"macos": {
|
|
||||||
"scope": "rust",
|
|
||||||
"prefix": "macos",
|
|
||||||
"body": [
|
|
||||||
"#[cfg(target_os = \"macos\")]"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
1. In 13.0.0, we introduced a new feature, pushing git repos, now this feature
|
|
||||||
has been removed as some users are not satisfied with it.
|
|
||||||
|
|
||||||
For configuration entries, the following ones are gone:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
[git]
|
|
||||||
pull_only_repos = []
|
|
||||||
push_only_repos = []
|
|
||||||
pull_arguments = ""
|
|
||||||
push_arguments = ""
|
|
||||||
```
|
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ To add a new `step` to `topgrade`:
|
|||||||
|
|
||||||
// Invoke the new step to get things updated!
|
// Invoke the new step to get things updated!
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute("xxx")
|
.execute(xxx)
|
||||||
.arg(/* args required by this step */)
|
.arg(/* args required by this step */)
|
||||||
.status_checked()
|
.status_checked()
|
||||||
}
|
}
|
||||||
@@ -104,7 +104,7 @@ and have some basic documentations guiding user how to use these options.
|
|||||||
## Breaking changes
|
## Breaking changes
|
||||||
|
|
||||||
If your PR introduces a breaking change, document it in [`BREAKINGCHANGES_dev.md`][bc_dev],
|
If your PR introduces a breaking change, document it in [`BREAKINGCHANGES_dev.md`][bc_dev],
|
||||||
it should be written in Markdown and wrapped in 80, for example:
|
it should be written in Markdown and wrapped at 80, for example:
|
||||||
|
|
||||||
```md
|
```md
|
||||||
1. The configuration location has been updated to x.
|
1. The configuration location has been updated to x.
|
||||||
@@ -129,6 +129,24 @@ $ cargo test
|
|||||||
|
|
||||||
Don't worry about other platforms, we have most of them covered in our CI.
|
Don't worry about other platforms, we have most of them covered in our CI.
|
||||||
|
|
||||||
|
## I18n
|
||||||
|
|
||||||
|
If your PR introduces user-facing messages, we need to ensure they are translated.
|
||||||
|
Please add the translations to [`locales/app.yml`][app_yml]. For simple messages
|
||||||
|
without arguments (e.g., "hello world"), we can simply translate them according
|
||||||
|
(Tip: ChatGPT or similar LLMs is good at translation). If a message contains
|
||||||
|
arguments, e.g., "hello <NAME>", please follow this convention:
|
||||||
|
|
||||||
|
```yml
|
||||||
|
"hello {name}": # key
|
||||||
|
en: "hello %{name}" # translation
|
||||||
|
```
|
||||||
|
|
||||||
|
Arguments in the key should be in format `{argument_name}`, and they will have
|
||||||
|
a preceeding `%` when used in translations.
|
||||||
|
|
||||||
|
[app_yml]: https://github.com/topgrade-rs/topgrade/blob/main/locales/app.yml
|
||||||
|
|
||||||
## Some tips
|
## Some tips
|
||||||
|
|
||||||
1. Locale
|
1. Locale
|
||||||
|
|||||||
2207
Cargo.lock
generated
2207
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
43
Cargo.toml
43
Cargo.toml
@@ -5,7 +5,8 @@ categories = ["os"]
|
|||||||
keywords = ["upgrade", "update"]
|
keywords = ["upgrade", "update"]
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
repository = "https://github.com/topgrade-rs/topgrade"
|
repository = "https://github.com/topgrade-rs/topgrade"
|
||||||
version = "14.0.1"
|
rust-version = "1.84.1"
|
||||||
|
version = "16.0.3"
|
||||||
authors = ["Roey Darwish Dror <roey.ghost@gmail.com>", "Thomas Schönauer <t.schoenauer@hgs-wt.at>"]
|
authors = ["Roey Darwish Dror <roey.ghost@gmail.com>", "Thomas Schönauer <t.schoenauer@hgs-wt.at>"]
|
||||||
exclude = ["doc/screenshot.gif", "BREAKINGCHANGES_dev.md"]
|
exclude = ["doc/screenshot.gif", "BREAKINGCHANGES_dev.md"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
@@ -22,24 +23,24 @@ path = "src/main.rs"
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
home = "~0.5"
|
home = "~0.5"
|
||||||
etcetera = "~0.8"
|
etcetera = "~0.8"
|
||||||
once_cell = "~1.18"
|
once_cell = "~1.19"
|
||||||
serde = { version = "~1.0", features = ["derive"] }
|
serde = { version = "~1.0", features = ["derive"] }
|
||||||
toml = "0.8"
|
toml = "0.8"
|
||||||
which_crate = { version = "~4.1", package = "which" }
|
which_crate = { version = "~6.0", package = "which" }
|
||||||
shellexpand = "~3.1"
|
shellexpand = "~3.1"
|
||||||
clap = { version = "~4.4", features = ["cargo", "derive"] }
|
clap = { version = "~4.5", features = ["cargo", "derive"] }
|
||||||
clap_complete = "~4.4"
|
clap_complete = "~4.5"
|
||||||
clap_mangen = "~0.2"
|
clap_mangen = "~0.2"
|
||||||
walkdir = "~2.4"
|
walkdir = "~2.5"
|
||||||
console = "~0.15"
|
console = "~0.15"
|
||||||
lazy_static = "~1.4"
|
lazy_static = "~1.4"
|
||||||
chrono = "~0.4"
|
chrono = "~0.4"
|
||||||
glob = "~0.3"
|
glob = "~0.3"
|
||||||
strum = { version = "~0.24", features = ["derive"] }
|
strum = { version = "~0.26", features = ["derive"] }
|
||||||
thiserror = "~1.0"
|
thiserror = "~1.0"
|
||||||
tempfile = "~3.8"
|
tempfile = "~3.10"
|
||||||
cfg-if = "~1.0"
|
cfg-if = "~1.0"
|
||||||
tokio = { version = "~1.34", features = ["process", "rt-multi-thread"] }
|
tokio = { version = "~1.38", features = ["process", "rt-multi-thread"] }
|
||||||
futures = "~0.3"
|
futures = "~0.3"
|
||||||
regex = "~1.10"
|
regex = "~1.10"
|
||||||
semver = "~1.0"
|
semver = "~1.0"
|
||||||
@@ -49,7 +50,11 @@ tracing = { version = "~0.1", features = ["attributes", "log"] }
|
|||||||
tracing-subscriber = { version = "~0.3", features = ["env-filter", "time"] }
|
tracing-subscriber = { version = "~0.3", features = ["env-filter", "time"] }
|
||||||
merge = "~0.1"
|
merge = "~0.1"
|
||||||
regex-split = "~0.1"
|
regex-split = "~0.1"
|
||||||
notify-rust = "~4.10"
|
notify-rust = "~4.11"
|
||||||
|
wildmatch = "2.3.0"
|
||||||
|
rust-i18n = "3.0.1"
|
||||||
|
sys-locale = "0.3.1"
|
||||||
|
jetbrains-toolbox-updater = "1.1.0"
|
||||||
|
|
||||||
[package.metadata.generate-rpm]
|
[package.metadata.generate-rpm]
|
||||||
assets = [{ source = "target/release/topgrade", dest = "/usr/bin/topgrade" }]
|
assets = [{ source = "target/release/topgrade", dest = "/usr/bin/topgrade" }]
|
||||||
@@ -58,15 +63,23 @@ assets = [{ source = "target/release/topgrade", dest = "/usr/bin/topgrade" }]
|
|||||||
git = "*"
|
git = "*"
|
||||||
|
|
||||||
[package.metadata.deb]
|
[package.metadata.deb]
|
||||||
depends = "$auto,git"
|
name = "topgrade"
|
||||||
|
maintainer = "Chris Gelatt <kreeblah@gmail.com>"
|
||||||
|
copyright = "2024, Topgrade Team"
|
||||||
|
license-file = ["LICENSE", "0"]
|
||||||
|
depends = "$auto"
|
||||||
|
extended-description = "Keeping your system up to date usually involves invoking multiple package managers. This results in big, non-portable shell one-liners saved in your shell. To remedy this, Topgrade detects which tools you use and runs the appropriate commands to update them."
|
||||||
|
section = "utils"
|
||||||
|
priority = "optional"
|
||||||
|
default-features = true
|
||||||
|
|
||||||
[target.'cfg(unix)'.dependencies]
|
[target.'cfg(unix)'.dependencies]
|
||||||
nix = { version = "~0.27", features = ["hostname", "signal", "user"] }
|
nix = { version = "~0.29", features = ["hostname", "signal", "user"] }
|
||||||
rust-ini = "~0.19"
|
rust-ini = "~0.21"
|
||||||
self_update_crate = { version = "~0.30", default-features = false, optional = true, package = "self_update", features = ["archive-tar", "compression-flate2", "rustls"] }
|
self_update_crate = { version = "~0.40", default-features = false, optional = true, package = "self_update", features = ["archive-tar", "compression-flate2", "rustls"] }
|
||||||
|
|
||||||
[target.'cfg(windows)'.dependencies]
|
[target.'cfg(windows)'.dependencies]
|
||||||
self_update_crate = { version = "~0.30", default-features = false, optional = true, package = "self_update", features = ["archive-zip", "compression-zip-deflate", "rustls"] }
|
self_update_crate = { version = "~0.40", default-features = false, optional = true, package = "self_update", features = ["archive-zip", "compression-zip-deflate", "rustls"] }
|
||||||
winapi = "~0.3"
|
winapi = "~0.3"
|
||||||
parselnk = "~0.1"
|
parselnk = "~0.1"
|
||||||
|
|
||||||
|
|||||||
10
README.md
10
README.md
@@ -29,15 +29,17 @@ To remedy this, **Topgrade** detects which tools you use and runs the appropriat
|
|||||||
- NixOS: [Nixpkgs](https://search.nixos.org/packages?show=topgrade)
|
- NixOS: [Nixpkgs](https://search.nixos.org/packages?show=topgrade)
|
||||||
- Void Linux: [XBPS](https://voidlinux.org/packages/?arch=x86_64&q=topgrade)
|
- Void Linux: [XBPS](https://voidlinux.org/packages/?arch=x86_64&q=topgrade)
|
||||||
- macOS: [Homebrew](https://formulae.brew.sh/formula/topgrade) or [MacPorts](https://ports.macports.org/port/topgrade/)
|
- macOS: [Homebrew](https://formulae.brew.sh/formula/topgrade) or [MacPorts](https://ports.macports.org/port/topgrade/)
|
||||||
- Windows: [Scoop](https://github.com/ScoopInstaller/Main/blob/master/bucket/topgrade.json)
|
- Windows: [Chocolatey][choco], [Scoop][scoop] or [Winget][winget]
|
||||||
- PyPi: [pip](https://pypi.org/project/topgrade/)
|
- PyPi: [pip](https://pypi.org/project/topgrade/)
|
||||||
|
- Fedora: [Copr](https://copr.fedorainfracloud.org/coprs/lilay/topgrade/)
|
||||||
|
|
||||||
|
[choco]: https://community.chocolatey.org/packages/topgrade
|
||||||
|
[scoop]: https://scoop.sh/#/apps?q=topgrade
|
||||||
|
[winget]: https://winstall.app/apps/topgrade-rs.topgrade
|
||||||
|
|
||||||
Other systems users can either use `cargo install` or the compiled binaries from the release page.
|
Other systems users can either use `cargo install` or the compiled binaries from the release page.
|
||||||
The compiled binaries contain a self-upgrading feature.
|
The compiled binaries contain a self-upgrading feature.
|
||||||
|
|
||||||
> Currently, Topgrade requires Rust 1.65 or above. In general, Topgrade tracks
|
|
||||||
> the latest stable toolchain.
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
Just run `topgrade`.
|
Just run `topgrade`.
|
||||||
|
|||||||
@@ -9,12 +9,16 @@
|
|||||||
|
|
||||||
> If there are breaking changes, the major version number should be increased.
|
> If there are breaking changes, the major version number should be increased.
|
||||||
|
|
||||||
2. Overwrite [`BREAKINGCHANGES`][breaking_changes] with
|
2. If the major versioin number gets bumped, update [SECURITY.md][SECURITY_file_link].
|
||||||
|
|
||||||
|
[SECURITY_file_link]: https://github.com/topgrade-rs/topgrade/blob/main/SECURITY.md
|
||||||
|
|
||||||
|
3. Overwrite [`BREAKINGCHANGES`][breaking_changes] with
|
||||||
[`BREAKINGCHANGES_dev`][breaking_changes_dev], and create a new dev file:
|
[`BREAKINGCHANGES_dev`][breaking_changes_dev], and create a new dev file:
|
||||||
|
|
||||||
```sh'
|
```sh'
|
||||||
$ cd topgrade
|
$ cd topgrade
|
||||||
$ cp BREAKINGCHANGES_dev.md BREAKINGCHANGES.md
|
$ mv BREAKINGCHANGES_dev.md BREAKINGCHANGES.md
|
||||||
$ touch BREAKINGCHANGES_dev.md
|
$ touch BREAKINGCHANGES_dev.md
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -43,6 +47,7 @@
|
|||||||
1. AUR
|
1. AUR
|
||||||
2. PyPi
|
2. PyPi
|
||||||
3. Homebrew (seems that this is not working correctly)
|
3. Homebrew (seems that this is not working correctly)
|
||||||
|
4. Winget
|
||||||
|
|
||||||
6. Manually release it to Crates.io
|
6. Manually release it to Crates.io
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,6 @@ We only support the latest major version and each subversion.
|
|||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| -------- | ------------------ |
|
| -------- | ------------------ |
|
||||||
| 10.0.x | :white_check_mark: |
|
| 16.0.x | :white_check_mark: |
|
||||||
| < 10.0 | :x: |
|
| < 16.0 | :x: |
|
||||||
|
|
||||||
|
|||||||
@@ -47,6 +47,12 @@
|
|||||||
# Run inside tmux (default: false)
|
# Run inside tmux (default: false)
|
||||||
# run_in_tmux = true
|
# run_in_tmux = true
|
||||||
|
|
||||||
|
# Changes the way topgrade interacts with
|
||||||
|
# the tmux session, creating the session
|
||||||
|
# and only attaching to it if not inside tmux
|
||||||
|
# (default: "attach_if_not_in_session", allowed values: "attach_if_not_in_session", "attach_always")
|
||||||
|
# tmux_session_mode = "attach_if_not_in_session"
|
||||||
|
|
||||||
# Cleanup temporary or old files (default: false)
|
# Cleanup temporary or old files (default: false)
|
||||||
# cleanup = true
|
# cleanup = true
|
||||||
|
|
||||||
@@ -97,15 +103,44 @@
|
|||||||
# enable_pipupgrade = true ###disabled by default
|
# enable_pipupgrade = true ###disabled by default
|
||||||
# pipupgrade_arguments = "-y -u --pip-path pip" ###disabled by default
|
# pipupgrade_arguments = "-y -u --pip-path pip" ###disabled by default
|
||||||
|
|
||||||
|
# For the poetry step, by default, Topgrade skips its update if poetry is not
|
||||||
|
# installed with the official script. This configuration entry forces Topgrade
|
||||||
|
# to run the update in this case.
|
||||||
|
#
|
||||||
|
# (default: false)
|
||||||
|
# poetry_force_self_update = true
|
||||||
|
|
||||||
|
|
||||||
[composer]
|
[composer]
|
||||||
# self_update = true
|
# self_update = true
|
||||||
|
|
||||||
|
|
||||||
[brew]
|
[brew]
|
||||||
|
# For the BrewCask step
|
||||||
|
# If `Repo Cask Upgrade` exists, then use the `-a` option.
|
||||||
|
# Otherwise, use the `--greedy` option.
|
||||||
# greedy_cask = true
|
# greedy_cask = true
|
||||||
|
|
||||||
|
# For the BrewCask step
|
||||||
|
# If `Repo Cask Upgrade` does not exist, then use the `--greedy_latest` option.
|
||||||
|
# NOTE: the above entry `greedy_cask` contains this entry, though you can enable
|
||||||
|
# both of them, they won't clash with each other.
|
||||||
|
# greedy_latest = true
|
||||||
|
|
||||||
|
# For the BrewCask step
|
||||||
|
# If `Repo Cask Upgrade` does not exist, then use the `--greedy_auto_updates` option.
|
||||||
|
# NOTE: the above entry `greedy_cask` contains this entry, though you can enable
|
||||||
|
# both of them, they won't clash with each other.
|
||||||
|
# greedy_auto_updates = true
|
||||||
|
|
||||||
|
# For the BrewFormula step
|
||||||
|
# Execute `brew autoremove` after the step.
|
||||||
# autoremove = true
|
# autoremove = true
|
||||||
|
|
||||||
|
# For the BrewFormula step
|
||||||
|
# Upgrade formulae built from the HEAD branch; `brew upgrade --fetch-HEAD`
|
||||||
|
# fetch_head = true
|
||||||
|
|
||||||
|
|
||||||
[linux]
|
[linux]
|
||||||
# Arch Package Manager to use.
|
# Arch Package Manager to use.
|
||||||
@@ -144,6 +179,11 @@
|
|||||||
|
|
||||||
# rpm_ostree = false
|
# rpm_ostree = false
|
||||||
|
|
||||||
|
# For Fedora/CentOS/RHEL Atomic variants, if `bootc` is available and this configuration entry is set to true, use
|
||||||
|
# it to do the update - Will also supercede rpm-ostree if enabled
|
||||||
|
# (default: false)
|
||||||
|
# bootc = false
|
||||||
|
|
||||||
# nix_arguments = "--flake"
|
# nix_arguments = "--flake"
|
||||||
|
|
||||||
# nix_env_arguments = "--prebuilt-only"
|
# nix_env_arguments = "--prebuilt-only"
|
||||||
@@ -179,14 +219,15 @@
|
|||||||
|
|
||||||
# wsl_update_use_web_download = true
|
# wsl_update_use_web_download = true
|
||||||
|
|
||||||
|
# The default for winget_install_silently is true,
|
||||||
|
# this example turns off silent install.
|
||||||
|
# winget_install_silently = false
|
||||||
|
|
||||||
# Causes Topgrade to rename itself during the run to allow package managers
|
# Causes Topgrade to rename itself during the run to allow package managers
|
||||||
# to upgrade it. Use this only if you installed Topgrade by using a package
|
# to upgrade it. Use this only if you installed Topgrade by using a package
|
||||||
# manager such as Scoop or Cargo
|
# manager such as Scoop or Cargo
|
||||||
# self_rename = true
|
# self_rename = true
|
||||||
|
|
||||||
# Enable WinGet upgrade
|
|
||||||
# enable_winget = true
|
|
||||||
|
|
||||||
|
|
||||||
[npm]
|
[npm]
|
||||||
# Use sudo if the NPM directory isn't owned by the current user
|
# Use sudo if the NPM directory isn't owned by the current user
|
||||||
@@ -198,6 +239,11 @@
|
|||||||
# use_sudo = true
|
# use_sudo = true
|
||||||
|
|
||||||
|
|
||||||
|
[deno]
|
||||||
|
# Upgrade deno executable to the given version.
|
||||||
|
# version = "stable"
|
||||||
|
|
||||||
|
|
||||||
[vim]
|
[vim]
|
||||||
# For `vim-plug`, execute `PlugUpdate!` instead of `PlugUpdate`
|
# For `vim-plug`, execute `PlugUpdate!` instead of `PlugUpdate`
|
||||||
# force_plug_update = true
|
# force_plug_update = true
|
||||||
@@ -229,4 +275,53 @@
|
|||||||
|
|
||||||
# containers = ["archlinux-latest"]
|
# containers = ["archlinux-latest"]
|
||||||
[containers]
|
[containers]
|
||||||
# ignored_containers = ["ghcr.io/rancher-sandbox/rancher-desktop/rdx-proxy:latest"]
|
# Specify the containers to ignore while updating (Wildcard supported)
|
||||||
|
# ignored_containers = ["ghcr.io/rancher-sandbox/rancher-desktop/rdx-proxy:latest", "docker.io*"]
|
||||||
|
# Specify the runtime to use for containers (default: "docker", allowed values: "docker", "podman")
|
||||||
|
# runtime = "podman"
|
||||||
|
|
||||||
|
[lensfun]
|
||||||
|
# If disabled, Topgrade invokes `lensfun‑update‑data` without root priviledge,
|
||||||
|
# then the update will be only available to you. Otherwise, `sudo` is required,
|
||||||
|
# and the update will be installed system-wide, i.e., available to all users.
|
||||||
|
# (default: false)
|
||||||
|
# use_sudo = false
|
||||||
|
|
||||||
|
[julia]
|
||||||
|
# If disabled, Topgrade invokes julia with the --startup-file=no CLI option.
|
||||||
|
#
|
||||||
|
# This may be desirable to avoid loading outdated packages with "using" directives
|
||||||
|
# in the startup file, which might cause the update run to fail.
|
||||||
|
# (default: true)
|
||||||
|
# startup_file = true
|
||||||
|
|
||||||
|
[zigup]
|
||||||
|
# Version strings passed to zigup.
|
||||||
|
# These may be pinned versions such as "0.13.0" or branches such as "master".
|
||||||
|
# Each one will be updated in its own zigup invocation.
|
||||||
|
# (default: ["master"])
|
||||||
|
# target_versions = ["master", "0.13.0"]
|
||||||
|
|
||||||
|
# Specifies the directory that the zig files will be installed to.
|
||||||
|
# If defined, passed with the --install-dir command line flag.
|
||||||
|
# If not defined, zigup will use its default behaviour.
|
||||||
|
# (default: not defined)
|
||||||
|
# install_dir = "~/.zig"
|
||||||
|
|
||||||
|
# Specifies the path of the symlink which will be set to point at the default compiler version.
|
||||||
|
# If defined, passed with the --path-link command line flag.
|
||||||
|
# If not defined, zigup will use its default behaviour.
|
||||||
|
# This is not meaningful if set_default is not enabled.
|
||||||
|
# (default: not defined)
|
||||||
|
# path_link = "~/.bin/zig"
|
||||||
|
|
||||||
|
# If enabled, run `zigup clean` after updating all versions.
|
||||||
|
# If enabled, each updated version above will be marked with `zigup keep`.
|
||||||
|
# (default: false)
|
||||||
|
# cleanup = false
|
||||||
|
|
||||||
|
[vscode]
|
||||||
|
# If this is set and is a non-empty string, it specifies the profile the
|
||||||
|
# extensions should be updated for.
|
||||||
|
# (default: this won't be set by default)
|
||||||
|
# profile = ""
|
||||||
|
|||||||
1290
locales/app.yml
Normal file
1290
locales/app.yml
Normal file
File diff suppressed because it is too large
Load Diff
2
rust-toolchain.toml
Normal file
2
rust-toolchain.toml
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[toolchain]
|
||||||
|
channel = "1.84.1"
|
||||||
@@ -11,6 +11,7 @@ use crate::WINDOWS_DIRS;
|
|||||||
use crate::XDG_DIRS;
|
use crate::XDG_DIRS;
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
use etcetera::base_strategy::BaseStrategy;
|
use etcetera::base_strategy::BaseStrategy;
|
||||||
|
use rust_i18n::t;
|
||||||
use std::{
|
use std::{
|
||||||
env::var,
|
env::var,
|
||||||
fs::{read_to_string, OpenOptions},
|
fs::{read_to_string, OpenOptions},
|
||||||
@@ -45,7 +46,7 @@ impl FromStr for Version {
|
|||||||
// They cannot be all 0s
|
// They cannot be all 0s
|
||||||
assert!(
|
assert!(
|
||||||
!(major == 0 && minor == 0 && patch == 0),
|
!(major == 0 && minor == 0 && patch == 0),
|
||||||
"Version numbers can not be all 0s"
|
"Version numbers cannot be all 0s"
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
@@ -118,12 +119,15 @@ pub(crate) fn first_run_of_major_release() -> Result<bool> {
|
|||||||
|
|
||||||
/// Print breaking changes to the user.
|
/// Print breaking changes to the user.
|
||||||
pub(crate) fn print_breaking_changes() {
|
pub(crate) fn print_breaking_changes() {
|
||||||
let header = format!("Topgrade {VERSION_STR} Breaking Changes");
|
let header = format!(
|
||||||
|
"{}",
|
||||||
|
t!("Topgrade {version_str} Breaking Changes", version_str = VERSION_STR)
|
||||||
|
);
|
||||||
print_separator(header);
|
print_separator(header);
|
||||||
let contents = if BREAKINGCHANGES.is_empty() {
|
let contents = if BREAKINGCHANGES.is_empty() {
|
||||||
"No Breaking changes"
|
t!("No Breaking changes").to_string()
|
||||||
} else {
|
} else {
|
||||||
BREAKINGCHANGES
|
BREAKINGCHANGES.to_string()
|
||||||
};
|
};
|
||||||
println!("{contents}\n");
|
println!("{contents}\n");
|
||||||
}
|
}
|
||||||
@@ -159,7 +163,7 @@ mod test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[should_panic(expected = "Version numbers can not be all 0s")]
|
#[should_panic(expected = "Version numbers cannot be all 0s")]
|
||||||
fn invalid_version() {
|
fn invalid_version() {
|
||||||
let all_0 = "0.0.0";
|
let all_0 = "0.0.0";
|
||||||
all_0.parse::<Version>().unwrap();
|
all_0.parse::<Version>().unwrap();
|
||||||
|
|||||||
@@ -45,13 +45,13 @@ impl TryFrom<&Output> for Utf8Output {
|
|||||||
type Error = eyre::Error;
|
type Error = eyre::Error;
|
||||||
|
|
||||||
fn try_from(Output { status, stdout, stderr }: &Output) -> Result<Self, Self::Error> {
|
fn try_from(Output { status, stdout, stderr }: &Output) -> Result<Self, Self::Error> {
|
||||||
let stdout = String::from_utf8(stdout.to_vec()).map_err(|err| {
|
let stdout = String::from_utf8(stdout.clone()).map_err(|err| {
|
||||||
eyre!(
|
eyre!(
|
||||||
"Stdout contained invalid UTF-8: {}",
|
"Stdout contained invalid UTF-8: {}",
|
||||||
String::from_utf8_lossy(err.as_bytes())
|
String::from_utf8_lossy(err.as_bytes())
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
let stderr = String::from_utf8(stderr.to_vec()).map_err(|err| {
|
let stderr = String::from_utf8(stderr.clone()).map_err(|err| {
|
||||||
eyre!(
|
eyre!(
|
||||||
"Stderr contained invalid UTF-8: {}",
|
"Stderr contained invalid UTF-8: {}",
|
||||||
String::from_utf8_lossy(err.as_bytes())
|
String::from_utf8_lossy(err.as_bytes())
|
||||||
@@ -149,6 +149,7 @@ pub trait CommandExt {
|
|||||||
/// Like [`Command::spawn`], but gives a nice error message if the command fails to
|
/// Like [`Command::spawn`], but gives a nice error message if the command fails to
|
||||||
/// execute.
|
/// execute.
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
|
#[allow(dead_code)]
|
||||||
fn spawn_checked(&mut self) -> eyre::Result<Self::Child>;
|
fn spawn_checked(&mut self) -> eyre::Result<Self::Child>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
441
src/config.rs
441
src/config.rs
@@ -5,7 +5,7 @@ use std::fs::{write, File};
|
|||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use std::{env, fs};
|
use std::{env, fmt, fs};
|
||||||
|
|
||||||
use clap::{Parser, ValueEnum};
|
use clap::{Parser, ValueEnum};
|
||||||
use clap_complete::Shell;
|
use clap_complete::Shell;
|
||||||
@@ -15,16 +15,18 @@ use etcetera::base_strategy::BaseStrategy;
|
|||||||
use merge::Merge;
|
use merge::Merge;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use regex_split::RegexSplit;
|
use regex_split::RegexSplit;
|
||||||
|
use rust_i18n::t;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use strum::{EnumIter, EnumString, EnumVariantNames, IntoEnumIterator};
|
use strum::{EnumIter, EnumString, IntoEnumIterator, VariantNames};
|
||||||
use which_crate::which;
|
use which_crate::which;
|
||||||
|
|
||||||
use super::utils::editor;
|
use super::utils::editor;
|
||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
use crate::sudo::SudoKind;
|
use crate::sudo::SudoKind;
|
||||||
use crate::utils::{hostname, string_prepend_str};
|
use crate::utils::string_prepend_str;
|
||||||
use tracing::{debug, error};
|
use tracing::{debug, error};
|
||||||
|
|
||||||
|
// TODO: Add i18n to this. Tracking issue: https://github.com/topgrade-rs/topgrade/issues/859
|
||||||
pub static EXAMPLE_CONFIG: &str = include_str!("../config.example.toml");
|
pub static EXAMPLE_CONFIG: &str = include_str!("../config.example.toml");
|
||||||
|
|
||||||
/// Topgrade's default log level.
|
/// Topgrade's default log level.
|
||||||
@@ -44,7 +46,7 @@ macro_rules! str_value {
|
|||||||
|
|
||||||
pub type Commands = BTreeMap<String, String>;
|
pub type Commands = BTreeMap<String, String>;
|
||||||
|
|
||||||
#[derive(ValueEnum, EnumString, EnumVariantNames, Debug, Clone, PartialEq, Eq, Deserialize, EnumIter, Copy)]
|
#[derive(ValueEnum, EnumString, VariantNames, Debug, Clone, PartialEq, Eq, Deserialize, EnumIter, Copy)]
|
||||||
#[clap(rename_all = "snake_case")]
|
#[clap(rename_all = "snake_case")]
|
||||||
#[serde(rename_all = "snake_case")]
|
#[serde(rename_all = "snake_case")]
|
||||||
#[strum(serialize_all = "snake_case")]
|
#[strum(serialize_all = "snake_case")]
|
||||||
@@ -53,7 +55,9 @@ pub enum Step {
|
|||||||
AppMan,
|
AppMan,
|
||||||
Asdf,
|
Asdf,
|
||||||
Atom,
|
Atom,
|
||||||
|
Aqua,
|
||||||
Audit,
|
Audit,
|
||||||
|
AutoCpufreq,
|
||||||
Bin,
|
Bin,
|
||||||
Bob,
|
Bob,
|
||||||
BrewCask,
|
BrewCask,
|
||||||
@@ -61,9 +65,12 @@ pub enum Step {
|
|||||||
Bun,
|
Bun,
|
||||||
BunPackages,
|
BunPackages,
|
||||||
Cargo,
|
Cargo,
|
||||||
|
Certbot,
|
||||||
Chezmoi,
|
Chezmoi,
|
||||||
Chocolatey,
|
Chocolatey,
|
||||||
Choosenim,
|
Choosenim,
|
||||||
|
CinnamonSpices,
|
||||||
|
ClamAvDb,
|
||||||
Composer,
|
Composer,
|
||||||
Conda,
|
Conda,
|
||||||
ConfigUpdate,
|
ConfigUpdate,
|
||||||
@@ -74,6 +81,7 @@ pub enum Step {
|
|||||||
Distrobox,
|
Distrobox,
|
||||||
DkpPacman,
|
DkpPacman,
|
||||||
Dotnet,
|
Dotnet,
|
||||||
|
Elan,
|
||||||
Emacs,
|
Emacs,
|
||||||
Firmware,
|
Firmware,
|
||||||
Flatpak,
|
Flatpak,
|
||||||
@@ -90,6 +98,7 @@ pub enum Step {
|
|||||||
Haxelib,
|
Haxelib,
|
||||||
Helm,
|
Helm,
|
||||||
HomeManager,
|
HomeManager,
|
||||||
|
JetBrainsToolbox,
|
||||||
Jetpack,
|
Jetpack,
|
||||||
Julia,
|
Julia,
|
||||||
Juliaup,
|
Juliaup,
|
||||||
@@ -97,12 +106,15 @@ pub enum Step {
|
|||||||
Helix,
|
Helix,
|
||||||
Krew,
|
Krew,
|
||||||
Lure,
|
Lure,
|
||||||
|
Lensfun,
|
||||||
Macports,
|
Macports,
|
||||||
Mamba,
|
Mamba,
|
||||||
Miktex,
|
Miktex,
|
||||||
Mas,
|
Mas,
|
||||||
Maza,
|
Maza,
|
||||||
Micro,
|
Micro,
|
||||||
|
MicrosoftStore,
|
||||||
|
Mise,
|
||||||
Myrepos,
|
Myrepos,
|
||||||
Nix,
|
Nix,
|
||||||
Node,
|
Node,
|
||||||
@@ -115,11 +127,16 @@ pub enum Step {
|
|||||||
PipReviewLocal,
|
PipReviewLocal,
|
||||||
Pipupgrade,
|
Pipupgrade,
|
||||||
Pipx,
|
Pipx,
|
||||||
|
Pipxu,
|
||||||
|
Pixi,
|
||||||
Pkg,
|
Pkg,
|
||||||
Pkgin,
|
Pkgin,
|
||||||
|
PlatformioCore,
|
||||||
Pnpm,
|
Pnpm,
|
||||||
|
Poetry,
|
||||||
Powershell,
|
Powershell,
|
||||||
Protonup,
|
Protonup,
|
||||||
|
Pyenv,
|
||||||
Raco,
|
Raco,
|
||||||
Rcm,
|
Rcm,
|
||||||
Remotes,
|
Remotes,
|
||||||
@@ -127,6 +144,7 @@ pub enum Step {
|
|||||||
Rtcl,
|
Rtcl,
|
||||||
RubyGems,
|
RubyGems,
|
||||||
Rustup,
|
Rustup,
|
||||||
|
Rye,
|
||||||
Scoop,
|
Scoop,
|
||||||
Sdkman,
|
Sdkman,
|
||||||
SelfUpdate,
|
SelfUpdate,
|
||||||
@@ -142,15 +160,22 @@ pub enum Step {
|
|||||||
Tlmgr,
|
Tlmgr,
|
||||||
Tmux,
|
Tmux,
|
||||||
Toolbx,
|
Toolbx,
|
||||||
|
Uv,
|
||||||
Vagrant,
|
Vagrant,
|
||||||
Vcpkg,
|
Vcpkg,
|
||||||
Vim,
|
Vim,
|
||||||
|
VoltaPackages,
|
||||||
Vscode,
|
Vscode,
|
||||||
|
Vscodium,
|
||||||
|
Waydroid,
|
||||||
Winget,
|
Winget,
|
||||||
Wsl,
|
Wsl,
|
||||||
WslUpdate,
|
WslUpdate,
|
||||||
|
Xcodes,
|
||||||
Yadm,
|
Yadm,
|
||||||
Yarn,
|
Yarn,
|
||||||
|
Zigup,
|
||||||
|
Zvm,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Default, Debug, Merge)]
|
#[derive(Deserialize, Default, Debug, Merge)]
|
||||||
@@ -165,6 +190,7 @@ pub struct Include {
|
|||||||
pub struct Containers {
|
pub struct Containers {
|
||||||
#[merge(strategy = crate::utils::merge_strategies::vec_prepend_opt)]
|
#[merge(strategy = crate::utils::merge_strategies::vec_prepend_opt)]
|
||||||
ignored_containers: Option<Vec<String>>,
|
ignored_containers: Option<Vec<String>>,
|
||||||
|
runtime: Option<ContainerRuntime>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Default, Debug, Merge)]
|
#[derive(Deserialize, Default, Debug, Merge)]
|
||||||
@@ -197,9 +223,9 @@ pub struct Windows {
|
|||||||
accept_all_updates: Option<bool>,
|
accept_all_updates: Option<bool>,
|
||||||
self_rename: Option<bool>,
|
self_rename: Option<bool>,
|
||||||
open_remotes_in_new_terminal: Option<bool>,
|
open_remotes_in_new_terminal: Option<bool>,
|
||||||
enable_winget: Option<bool>,
|
|
||||||
wsl_update_pre_release: Option<bool>,
|
wsl_update_pre_release: Option<bool>,
|
||||||
wsl_update_use_web_download: Option<bool>,
|
wsl_update_use_web_download: Option<bool>,
|
||||||
|
winget_silent_install: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Default, Debug, Merge)]
|
#[derive(Deserialize, Default, Debug, Merge)]
|
||||||
@@ -209,6 +235,7 @@ pub struct Python {
|
|||||||
enable_pip_review_local: Option<bool>,
|
enable_pip_review_local: Option<bool>,
|
||||||
enable_pipupgrade: Option<bool>,
|
enable_pipupgrade: Option<bool>,
|
||||||
pipupgrade_arguments: Option<String>,
|
pipupgrade_arguments: Option<String>,
|
||||||
|
poetry_force_self_update: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Default, Debug, Merge)]
|
#[derive(Deserialize, Default, Debug, Merge)]
|
||||||
@@ -235,6 +262,13 @@ pub struct NPM {
|
|||||||
use_sudo: Option<bool>,
|
use_sudo: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Default, Debug, Merge)]
|
||||||
|
#[serde(deny_unknown_fields)]
|
||||||
|
#[allow(clippy::upper_case_acronyms)]
|
||||||
|
pub struct Deno {
|
||||||
|
version: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Default, Debug, Merge)]
|
#[derive(Deserialize, Default, Debug, Merge)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
#[allow(clippy::upper_case_acronyms)]
|
#[allow(clippy::upper_case_acronyms)]
|
||||||
@@ -253,7 +287,10 @@ pub struct Flatpak {
|
|||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct Brew {
|
pub struct Brew {
|
||||||
greedy_cask: Option<bool>,
|
greedy_cask: Option<bool>,
|
||||||
|
greedy_latest: Option<bool>,
|
||||||
|
greedy_auto_updates: Option<bool>,
|
||||||
autoremove: Option<bool>,
|
autoremove: Option<bool>,
|
||||||
|
fetch_head: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Clone, Copy)]
|
#[derive(Debug, Deserialize, Clone, Copy)]
|
||||||
@@ -270,6 +307,22 @@ pub enum ArchPackageManager {
|
|||||||
Yay,
|
Yay,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum ContainerRuntime {
|
||||||
|
Docker,
|
||||||
|
Podman,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for ContainerRuntime {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
ContainerRuntime::Docker => write!(f, "docker"),
|
||||||
|
ContainerRuntime::Podman => write!(f, "podman"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Default, Debug, Merge)]
|
#[derive(Deserialize, Default, Debug, Merge)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct Linux {
|
pub struct Linux {
|
||||||
@@ -312,6 +365,7 @@ pub struct Linux {
|
|||||||
redhat_distro_sync: Option<bool>,
|
redhat_distro_sync: Option<bool>,
|
||||||
suse_dup: Option<bool>,
|
suse_dup: Option<bool>,
|
||||||
rpm_ostree: Option<bool>,
|
rpm_ostree: Option<bool>,
|
||||||
|
bootc: Option<bool>,
|
||||||
|
|
||||||
#[merge(strategy = crate::utils::merge_strategies::string_append_opt)]
|
#[merge(strategy = crate::utils::merge_strategies::string_append_opt)]
|
||||||
emerge_sync_flags: Option<String>,
|
emerge_sync_flags: Option<String>,
|
||||||
@@ -369,6 +423,8 @@ pub struct Misc {
|
|||||||
|
|
||||||
run_in_tmux: Option<bool>,
|
run_in_tmux: Option<bool>,
|
||||||
|
|
||||||
|
tmux_session_mode: Option<TmuxSessionMode>,
|
||||||
|
|
||||||
cleanup: Option<bool>,
|
cleanup: Option<bool>,
|
||||||
|
|
||||||
notify_each_step: Option<bool>,
|
notify_each_step: Option<bool>,
|
||||||
@@ -385,6 +441,46 @@ pub struct Misc {
|
|||||||
log_filters: Option<Vec<String>>,
|
log_filters: Option<Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, Deserialize, ValueEnum)]
|
||||||
|
#[clap(rename_all = "snake_case")]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum TmuxSessionMode {
|
||||||
|
AttachIfNotInSession,
|
||||||
|
AttachAlways,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TmuxConfig {
|
||||||
|
pub args: Vec<String>,
|
||||||
|
pub session_mode: TmuxSessionMode,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Default, Debug, Merge)]
|
||||||
|
#[serde(deny_unknown_fields)]
|
||||||
|
pub struct Lensfun {
|
||||||
|
use_sudo: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Default, Debug, Merge)]
|
||||||
|
#[serde(deny_unknown_fields)]
|
||||||
|
pub struct JuliaConfig {
|
||||||
|
startup_file: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Default, Debug, Merge)]
|
||||||
|
#[serde(deny_unknown_fields)]
|
||||||
|
pub struct Zigup {
|
||||||
|
target_versions: Option<Vec<String>>,
|
||||||
|
install_dir: Option<String>,
|
||||||
|
path_link: Option<String>,
|
||||||
|
cleanup: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Default, Debug, Merge)]
|
||||||
|
#[serde(deny_unknown_fields)]
|
||||||
|
pub struct VscodeConfig {
|
||||||
|
profile: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Default, Debug, Merge)]
|
#[derive(Deserialize, Default, Debug, Merge)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
/// Configuration file
|
/// Configuration file
|
||||||
@@ -431,6 +527,9 @@ pub struct ConfigFile {
|
|||||||
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
||||||
yarn: Option<Yarn>,
|
yarn: Option<Yarn>,
|
||||||
|
|
||||||
|
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
||||||
|
deno: Option<Deno>,
|
||||||
|
|
||||||
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
||||||
vim: Option<Vim>,
|
vim: Option<Vim>,
|
||||||
|
|
||||||
@@ -445,6 +544,18 @@ pub struct ConfigFile {
|
|||||||
|
|
||||||
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
||||||
distrobox: Option<Distrobox>,
|
distrobox: Option<Distrobox>,
|
||||||
|
|
||||||
|
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
||||||
|
lensfun: Option<Lensfun>,
|
||||||
|
|
||||||
|
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
||||||
|
julia: Option<JuliaConfig>,
|
||||||
|
|
||||||
|
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
||||||
|
zigup: Option<Zigup>,
|
||||||
|
|
||||||
|
#[merge(strategy = crate::utils::merge_strategies::inner_merge_opt)]
|
||||||
|
vscode: Option<VscodeConfig>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn config_directory() -> PathBuf {
|
fn config_directory() -> PathBuf {
|
||||||
@@ -470,16 +581,16 @@ impl ConfigFile {
|
|||||||
|
|
||||||
let config_directory = config_directory();
|
let config_directory = config_directory();
|
||||||
|
|
||||||
let possible_config_paths = vec![
|
let possible_config_paths = [
|
||||||
config_directory.join("topgrade.toml"),
|
config_directory.join("topgrade.toml"),
|
||||||
config_directory.join("topgrade/topgrade.toml"),
|
config_directory.join("topgrade/topgrade.toml"),
|
||||||
];
|
];
|
||||||
|
|
||||||
// Search for the main config file
|
// Search for the main config file
|
||||||
for path in possible_config_paths.iter() {
|
for path in &possible_config_paths {
|
||||||
if path.exists() {
|
if path.exists() {
|
||||||
debug!("Configuration at {}", path.display());
|
debug!("Configuration at {}", path.display());
|
||||||
res.0 = path.clone();
|
res.0.clone_from(path);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -488,7 +599,7 @@ impl ConfigFile {
|
|||||||
|
|
||||||
// If no config file exists, create a default one in the config directory
|
// If no config file exists, create a default one in the config directory
|
||||||
if !res.0.exists() && res.1.is_empty() {
|
if !res.0.exists() && res.1.is_empty() {
|
||||||
res.0 = possible_config_paths[0].clone();
|
res.0.clone_from(&possible_config_paths[0]);
|
||||||
debug!("No configuration exists");
|
debug!("No configuration exists");
|
||||||
write(&res.0, EXAMPLE_CONFIG).map_err(|e| {
|
write(&res.0, EXAMPLE_CONFIG).map_err(|e| {
|
||||||
debug!(
|
debug!(
|
||||||
@@ -511,7 +622,9 @@ impl ConfigFile {
|
|||||||
if dir_to_search.exists() {
|
if dir_to_search.exists() {
|
||||||
for entry in fs::read_dir(dir_to_search)? {
|
for entry in fs::read_dir(dir_to_search)? {
|
||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
if entry.file_type()?.is_file() {
|
// Use `Path::is_file()` here to traverse symbolic links.
|
||||||
|
// `DirEntry::file_type()` and `FileType::is_file()` will not traverse symbolic links.
|
||||||
|
if entry.path().is_file() {
|
||||||
debug!(
|
debug!(
|
||||||
"Found additional (directory) configuration file at {}",
|
"Found additional (directory) configuration file at {}",
|
||||||
entry.path().display()
|
entry.path().display()
|
||||||
@@ -544,13 +657,11 @@ impl ConfigFile {
|
|||||||
to read the include directory before returning the main config path
|
to read the include directory before returning the main config path
|
||||||
*/
|
*/
|
||||||
for include in dir_include {
|
for include in dir_include {
|
||||||
let include_contents = fs::read_to_string(&include).map_err(|e| {
|
let include_contents = fs::read_to_string(&include).inspect_err(|_| {
|
||||||
error!("Unable to read {}", include.display());
|
error!("Unable to read {}", include.display());
|
||||||
e
|
|
||||||
})?;
|
})?;
|
||||||
let include_contents_parsed = toml::from_str(include_contents.as_str()).map_err(|e| {
|
let include_contents_parsed = toml::from_str(include_contents.as_str()).inspect_err(|_| {
|
||||||
error!("Failed to deserialize {}", include.display());
|
error!("Failed to deserialize {}", include.display());
|
||||||
e
|
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
result.merge(include_contents_parsed);
|
result.merge(include_contents_parsed);
|
||||||
@@ -565,9 +676,8 @@ impl ConfigFile {
|
|||||||
return Ok(result);
|
return Ok(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut contents_non_split = fs::read_to_string(&config_path).map_err(|e| {
|
let mut contents_non_split = fs::read_to_string(&config_path).inspect_err(|_| {
|
||||||
error!("Unable to read {}", config_path.display());
|
error!("Unable to read {}", config_path.display());
|
||||||
e
|
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
Self::ensure_misc_is_present(&mut contents_non_split, &config_path);
|
Self::ensure_misc_is_present(&mut contents_non_split, &config_path);
|
||||||
@@ -578,9 +688,8 @@ impl ConfigFile {
|
|||||||
let contents_split = regex_match_include.split_inclusive_left(contents_non_split.as_str());
|
let contents_split = regex_match_include.split_inclusive_left(contents_non_split.as_str());
|
||||||
|
|
||||||
for contents in contents_split {
|
for contents in contents_split {
|
||||||
let config_file_include_only: ConfigFileIncludeOnly = toml::from_str(contents).map_err(|e| {
|
let config_file_include_only: ConfigFileIncludeOnly = toml::from_str(contents).inspect_err(|_| {
|
||||||
error!("Failed to deserialize an include section of {}", config_path.display());
|
error!("Failed to deserialize an include section of {}", config_path.display());
|
||||||
e
|
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
if let Some(includes) = &config_file_include_only.include {
|
if let Some(includes) = &config_file_include_only.include {
|
||||||
@@ -592,14 +701,14 @@ impl ConfigFile {
|
|||||||
let include_contents = match fs::read_to_string(&include_path) {
|
let include_contents = match fs::read_to_string(&include_path) {
|
||||||
Ok(c) => c,
|
Ok(c) => c,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Unable to read {}: {}", include_path.display(), e);
|
error!("Unable to read {}: {e}", include_path.display(),);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match toml::from_str::<Self>(&include_contents) {
|
match toml::from_str::<Self>(&include_contents) {
|
||||||
Ok(include_parsed) => result.merge(include_parsed),
|
Ok(include_parsed) => result.merge(include_parsed),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Failed to deserialize {}: {}", include_path.display(), e);
|
error!("Failed to deserialize {}: {e}", include_path.display(),);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -609,14 +718,17 @@ impl ConfigFile {
|
|||||||
|
|
||||||
match toml::from_str::<Self>(contents) {
|
match toml::from_str::<Self>(contents) {
|
||||||
Ok(contents) => result.merge(contents),
|
Ok(contents) => result.merge(contents),
|
||||||
Err(e) => error!("Failed to deserialize {}: {}", config_path.display(), e),
|
Err(e) => error!("Failed to deserialize {}: {e}", config_path.display(),),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(paths) = result.git.as_mut().and_then(|git| git.repos.as_mut()) {
|
if let Some(paths) = result.git.as_mut().and_then(|git| git.repos.as_mut()) {
|
||||||
for path in paths.iter_mut() {
|
for path in paths.iter_mut() {
|
||||||
let expanded = shellexpand::tilde::<&str>(&path.as_ref()).into_owned();
|
let expanded = shellexpand::tilde::<&str>(&path.as_ref()).into_owned();
|
||||||
debug!("Path {} expanded to {}", path, expanded);
|
debug!(
|
||||||
|
"{}",
|
||||||
|
t!("Path {path} expanded to {expanded}", path = path, expanded = expanded)
|
||||||
|
);
|
||||||
*path = expanded;
|
*path = expanded;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -654,63 +766,65 @@ impl ConfigFile {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Command line arguments
|
// Command line arguments
|
||||||
|
// TODO: i18n of clap currently not easily possible. Waiting for https://github.com/clap-rs/clap/issues/380
|
||||||
|
// Tracking issue for i18n: https://github.com/topgrade-rs/topgrade/issues/859
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[clap(name = "Topgrade", version)]
|
#[command(name = "topgrade", version)]
|
||||||
pub struct CommandLineArgs {
|
pub struct CommandLineArgs {
|
||||||
/// Edit the configuration file
|
/// Edit the configuration file
|
||||||
#[clap(long = "edit-config")]
|
#[arg(long = "edit-config")]
|
||||||
edit_config: bool,
|
edit_config: bool,
|
||||||
|
|
||||||
/// Show config reference
|
/// Show config reference
|
||||||
#[clap(long = "config-reference")]
|
#[arg(long = "config-reference")]
|
||||||
show_config_reference: bool,
|
show_config_reference: bool,
|
||||||
|
|
||||||
/// Run inside tmux
|
/// Run inside tmux
|
||||||
#[clap(short = 't', long = "tmux")]
|
#[arg(short = 't', long = "tmux")]
|
||||||
run_in_tmux: bool,
|
run_in_tmux: bool,
|
||||||
|
|
||||||
/// Cleanup temporary or old files
|
/// Cleanup temporary or old files
|
||||||
#[clap(short = 'c', long = "cleanup")]
|
#[arg(short = 'c', long = "cleanup")]
|
||||||
cleanup: bool,
|
cleanup: bool,
|
||||||
|
|
||||||
/// Print what would be done
|
/// Print what would be done
|
||||||
#[clap(short = 'n', long = "dry-run")]
|
#[arg(short = 'n', long = "dry-run")]
|
||||||
dry_run: bool,
|
dry_run: bool,
|
||||||
|
|
||||||
/// Do not ask to retry failed steps
|
/// Do not ask to retry failed steps
|
||||||
#[clap(long = "no-retry")]
|
#[arg(long = "no-retry")]
|
||||||
no_retry: bool,
|
no_retry: bool,
|
||||||
|
|
||||||
/// Do not perform upgrades for the given steps
|
/// Do not perform upgrades for the given steps
|
||||||
#[clap(long = "disable", value_name = "STEP", value_enum, num_args = 1..)]
|
#[arg(long = "disable", value_name = "STEP", value_enum, num_args = 1..)]
|
||||||
disable: Vec<Step>,
|
disable: Vec<Step>,
|
||||||
|
|
||||||
/// Perform only the specified steps (experimental)
|
/// Perform only the specified steps
|
||||||
#[clap(long = "only", value_name = "STEP", value_enum, num_args = 1..)]
|
#[arg(long = "only", value_name = "STEP", value_enum, num_args = 1..)]
|
||||||
only: Vec<Step>,
|
only: Vec<Step>,
|
||||||
|
|
||||||
/// Run only specific custom commands
|
/// Run only specific custom commands
|
||||||
#[clap(long = "custom-commands", value_name = "NAME", num_args = 1..)]
|
#[arg(long = "custom-commands", value_name = "NAME", num_args = 1..)]
|
||||||
custom_commands: Vec<String>,
|
custom_commands: Vec<String>,
|
||||||
|
|
||||||
/// Set environment variables
|
/// Set environment variables
|
||||||
#[clap(long = "env", value_name = "NAME=VALUE", num_args = 1..)]
|
#[arg(long = "env", value_name = "NAME=VALUE", num_args = 1..)]
|
||||||
env: Vec<String>,
|
env: Vec<String>,
|
||||||
|
|
||||||
/// Output debug logs. Alias for `--log-filter debug`.
|
/// Output debug logs. Alias for `--log-filter debug`.
|
||||||
#[clap(short = 'v', long = "verbose")]
|
#[arg(short = 'v', long = "verbose")]
|
||||||
pub verbose: bool,
|
pub verbose: bool,
|
||||||
|
|
||||||
/// Prompt for a key before exiting
|
/// Prompt for a key before exiting
|
||||||
#[clap(short = 'k', long = "keep")]
|
#[arg(short = 'k', long = "keep")]
|
||||||
keep_at_end: bool,
|
keep_at_end: bool,
|
||||||
|
|
||||||
/// Skip sending a notification at the end of a run
|
/// Skip sending a notification at the end of a run
|
||||||
#[clap(long = "skip-notify")]
|
#[arg(long = "skip-notify")]
|
||||||
skip_notify: bool,
|
skip_notify: bool,
|
||||||
|
|
||||||
/// Say yes to package manager's prompt
|
/// Say yes to package manager's prompt
|
||||||
#[clap(
|
#[arg(
|
||||||
short = 'y',
|
short = 'y',
|
||||||
long = "yes",
|
long = "yes",
|
||||||
value_name = "STEP",
|
value_name = "STEP",
|
||||||
@@ -720,37 +834,37 @@ pub struct CommandLineArgs {
|
|||||||
yes: Option<Vec<Step>>,
|
yes: Option<Vec<Step>>,
|
||||||
|
|
||||||
/// Don't pull the predefined git repos
|
/// Don't pull the predefined git repos
|
||||||
#[clap(long = "disable-predefined-git-repos")]
|
#[arg(long = "disable-predefined-git-repos")]
|
||||||
disable_predefined_git_repos: bool,
|
disable_predefined_git_repos: bool,
|
||||||
|
|
||||||
/// Alternative configuration file
|
/// Alternative configuration file
|
||||||
#[clap(long = "config", value_name = "PATH")]
|
#[arg(long = "config", value_name = "PATH")]
|
||||||
config: Option<PathBuf>,
|
config: Option<PathBuf>,
|
||||||
|
|
||||||
/// A regular expression for restricting remote host execution
|
/// A regular expression for restricting remote host execution
|
||||||
#[clap(long = "remote-host-limit", value_name = "REGEX")]
|
#[arg(long = "remote-host-limit", value_name = "REGEX")]
|
||||||
remote_host_limit: Option<Regex>,
|
remote_host_limit: Option<Regex>,
|
||||||
|
|
||||||
/// Show the reason for skipped steps
|
/// Show the reason for skipped steps
|
||||||
#[clap(long = "show-skipped")]
|
#[arg(long = "show-skipped")]
|
||||||
show_skipped: bool,
|
show_skipped: bool,
|
||||||
|
|
||||||
/// Tracing filter directives.
|
/// Tracing filter directives.
|
||||||
///
|
///
|
||||||
/// See: https://docs.rs/tracing-subscriber/latest/tracing_subscriber/struct.EnvFilter.html
|
/// See: https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives
|
||||||
#[clap(long, default_value = DEFAULT_LOG_LEVEL)]
|
#[arg(long, default_value = DEFAULT_LOG_LEVEL)]
|
||||||
pub log_filter: String,
|
pub log_filter: String,
|
||||||
|
|
||||||
/// Print completion script for the given shell and exit
|
/// Print completion script for the given shell and exit
|
||||||
#[clap(long, value_enum, hide = true)]
|
#[arg(long, value_enum, hide = true)]
|
||||||
pub gen_completion: Option<Shell>,
|
pub gen_completion: Option<Shell>,
|
||||||
|
|
||||||
/// Print roff manpage and exit
|
/// Print roff manpage and exit
|
||||||
#[clap(long, hide = true)]
|
#[arg(long, hide = true)]
|
||||||
pub gen_manpage: bool,
|
pub gen_manpage: bool,
|
||||||
|
|
||||||
/// Don't update Topgrade
|
/// Don't update Topgrade
|
||||||
#[clap(long = "no-self-update")]
|
#[arg(long = "no-self-update")]
|
||||||
pub no_self_update: bool,
|
pub no_self_update: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -811,7 +925,7 @@ impl Config {
|
|||||||
ConfigFile::read(opt.config.clone()).unwrap_or_else(|e| {
|
ConfigFile::read(opt.config.clone()).unwrap_or_else(|e| {
|
||||||
// Inform the user about errors when loading the configuration,
|
// Inform the user about errors when loading the configuration,
|
||||||
// but fallback to the default config to at least attempt to do something
|
// but fallback to the default config to at least attempt to do something
|
||||||
error!("failed to load configuration: {}", e);
|
error!("failed to load configuration: {e}");
|
||||||
ConfigFile::default()
|
ConfigFile::default()
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
@@ -861,6 +975,15 @@ impl Config {
|
|||||||
.and_then(|containers| containers.ignored_containers.as_ref())
|
.and_then(|containers| containers.ignored_containers.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The preferred runtime for container updates (podman / docker).
|
||||||
|
pub fn containers_runtime(&self) -> ContainerRuntime {
|
||||||
|
self.config_file
|
||||||
|
.containers
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|containers| containers.runtime)
|
||||||
|
.unwrap_or(ContainerRuntime::Docker) // defaults to a popular choice
|
||||||
|
}
|
||||||
|
|
||||||
/// Tell whether the specified step should run.
|
/// Tell whether the specified step should run.
|
||||||
///
|
///
|
||||||
/// If the step appears either in the `--disable` command line argument
|
/// If the step appears either in the `--disable` command line argument
|
||||||
@@ -917,6 +1040,15 @@ impl Config {
|
|||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The preferred way to run the new tmux session.
|
||||||
|
fn tmux_session_mode(&self) -> TmuxSessionMode {
|
||||||
|
self.config_file
|
||||||
|
.misc
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|misc| misc.tmux_session_mode)
|
||||||
|
.unwrap_or(TmuxSessionMode::AttachIfNotInSession)
|
||||||
|
}
|
||||||
|
|
||||||
/// Tell whether we should perform cleanup steps.
|
/// Tell whether we should perform cleanup steps.
|
||||||
pub fn cleanup(&self) -> bool {
|
pub fn cleanup(&self) -> bool {
|
||||||
self.opt.cleanup
|
self.opt.cleanup
|
||||||
@@ -974,8 +1106,16 @@ impl Config {
|
|||||||
self.config_file.git.as_ref().and_then(|git| git.arguments.as_ref())
|
self.config_file.git.as_ref().and_then(|git| git.arguments.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn tmux_config(&self) -> Result<TmuxConfig> {
|
||||||
|
let args = self.tmux_arguments()?;
|
||||||
|
Ok(TmuxConfig {
|
||||||
|
args,
|
||||||
|
session_mode: self.tmux_session_mode(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/// Extra Tmux arguments
|
/// Extra Tmux arguments
|
||||||
pub fn tmux_arguments(&self) -> Result<Vec<String>> {
|
fn tmux_arguments(&self) -> Result<Vec<String>> {
|
||||||
let args = &self
|
let args = &self
|
||||||
.config_file
|
.config_file
|
||||||
.misc
|
.misc
|
||||||
@@ -1087,6 +1227,24 @@ impl Config {
|
|||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Whether Brew cask should be greedy_latest
|
||||||
|
pub fn brew_greedy_latest(&self) -> bool {
|
||||||
|
self.config_file
|
||||||
|
.brew
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|c| c.greedy_latest)
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether Brew cask should be auto_updates
|
||||||
|
pub fn brew_greedy_auto_updates(&self) -> bool {
|
||||||
|
self.config_file
|
||||||
|
.brew
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|c| c.greedy_auto_updates)
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
/// Whether Brew should autoremove
|
/// Whether Brew should autoremove
|
||||||
pub fn brew_autoremove(&self) -> bool {
|
pub fn brew_autoremove(&self) -> bool {
|
||||||
self.config_file
|
self.config_file
|
||||||
@@ -1096,6 +1254,15 @@ impl Config {
|
|||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Whether Brew should upgrade formulae built from the HEAD branch
|
||||||
|
pub fn brew_fetch_head(&self) -> bool {
|
||||||
|
self.config_file
|
||||||
|
.brew
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|c| c.fetch_head)
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
/// Whether Composer should update itself
|
/// Whether Composer should update itself
|
||||||
pub fn composer_self_update(&self) -> bool {
|
pub fn composer_self_update(&self) -> bool {
|
||||||
self.config_file
|
self.config_file
|
||||||
@@ -1319,14 +1486,22 @@ impl Config {
|
|||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Use bootc in *when bootc is detected* (default: false)
|
||||||
|
pub fn bootc(&self) -> bool {
|
||||||
|
self.config_file
|
||||||
|
.linux
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|linux| linux.bootc)
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
/// Determine if we should ignore failures for this step
|
/// Determine if we should ignore failures for this step
|
||||||
pub fn ignore_failure(&self, step: Step) -> bool {
|
pub fn ignore_failure(&self, step: Step) -> bool {
|
||||||
self.config_file
|
self.config_file
|
||||||
.misc
|
.misc
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|misc| misc.ignore_failures.as_ref())
|
.and_then(|misc| misc.ignore_failures.as_ref())
|
||||||
.map(|v| v.contains(&step))
|
.is_some_and(|v| v.contains(&step))
|
||||||
.unwrap_or(false)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn use_predefined_git_repos(&self) -> bool {
|
pub fn use_predefined_git_repos(&self) -> bool {
|
||||||
@@ -1376,6 +1551,14 @@ impl Config {
|
|||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn winget_silent_install(&self) -> bool {
|
||||||
|
self.config_file
|
||||||
|
.windows
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|windows| windows.winget_silent_install)
|
||||||
|
.unwrap_or(true)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn sudo_command(&self) -> Option<SudoKind> {
|
pub fn sudo_command(&self) -> Option<SudoKind> {
|
||||||
self.config_file.misc.as_ref().and_then(|misc| misc.sudo_command)
|
self.config_file.misc.as_ref().and_then(|misc| misc.sudo_command)
|
||||||
}
|
}
|
||||||
@@ -1407,6 +1590,10 @@ impl Config {
|
|||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn deno_version(&self) -> Option<&str> {
|
||||||
|
self.config_file.deno.as_ref().and_then(|deno| deno.version.as_deref())
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
pub fn firmware_upgrade(&self) -> bool {
|
pub fn firmware_upgrade(&self) -> bool {
|
||||||
self.config_file
|
self.config_file
|
||||||
@@ -1431,37 +1618,28 @@ impl Config {
|
|||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
str_value!(linux, emerge_update_flags);
|
str_value!(linux, emerge_update_flags);
|
||||||
|
|
||||||
pub fn should_execute_remote(&self, remote: &str) -> bool {
|
pub fn should_execute_remote(&self, hostname: Result<String>, remote: &str) -> bool {
|
||||||
if let Ok(hostname) = hostname() {
|
let remote_host = remote.split_once('@').map_or(remote, |(_, host)| host);
|
||||||
if remote == hostname {
|
|
||||||
|
if let Ok(hostname) = hostname {
|
||||||
|
if remote_host == hostname {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(limit) = self.opt.remote_host_limit.as_ref() {
|
if let Some(limit) = &self.opt.remote_host_limit.as_ref() {
|
||||||
return limit.is_match(remote);
|
return limit.is_match(remote_host);
|
||||||
}
|
}
|
||||||
|
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
pub fn enable_winget(&self) -> bool {
|
|
||||||
return self
|
|
||||||
.config_file
|
|
||||||
.windows
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|w| w.enable_winget)
|
|
||||||
.unwrap_or(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn enable_pipupgrade(&self) -> bool {
|
pub fn enable_pipupgrade(&self) -> bool {
|
||||||
return self
|
self.config_file
|
||||||
.config_file
|
|
||||||
.python
|
.python
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|python| python.enable_pipupgrade)
|
.and_then(|python| python.enable_pipupgrade)
|
||||||
.unwrap_or(false);
|
.unwrap_or(false)
|
||||||
}
|
}
|
||||||
pub fn pipupgrade_arguments(&self) -> &str {
|
pub fn pipupgrade_arguments(&self) -> &str {
|
||||||
self.config_file
|
self.config_file
|
||||||
@@ -1471,20 +1649,25 @@ impl Config {
|
|||||||
.unwrap_or("")
|
.unwrap_or("")
|
||||||
}
|
}
|
||||||
pub fn enable_pip_review(&self) -> bool {
|
pub fn enable_pip_review(&self) -> bool {
|
||||||
return self
|
self.config_file
|
||||||
.config_file
|
|
||||||
.python
|
.python
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|python| python.enable_pip_review)
|
.and_then(|python| python.enable_pip_review)
|
||||||
.unwrap_or(false);
|
.unwrap_or(false)
|
||||||
}
|
}
|
||||||
pub fn enable_pip_review_local(&self) -> bool {
|
pub fn enable_pip_review_local(&self) -> bool {
|
||||||
return self
|
self.config_file
|
||||||
.config_file
|
|
||||||
.python
|
.python
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|python| python.enable_pip_review_local)
|
.and_then(|python| python.enable_pip_review_local)
|
||||||
.unwrap_or(false);
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
pub fn poetry_force_self_update(&self) -> bool {
|
||||||
|
self.config_file
|
||||||
|
.python
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|python| python.poetry_force_self_update)
|
||||||
|
.unwrap_or(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn display_time(&self) -> bool {
|
pub fn display_time(&self) -> bool {
|
||||||
@@ -1502,11 +1685,70 @@ impl Config {
|
|||||||
|
|
||||||
self.opt.custom_commands.iter().any(|s| s == name)
|
self.opt.custom_commands.iter().any(|s| s == name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn lensfun_use_sudo(&self) -> bool {
|
||||||
|
self.config_file
|
||||||
|
.lensfun
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|lensfun| lensfun.use_sudo)
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn julia_use_startup_file(&self) -> bool {
|
||||||
|
self.config_file
|
||||||
|
.julia
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|julia| julia.startup_file)
|
||||||
|
.unwrap_or(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn zigup_target_versions(&self) -> Vec<String> {
|
||||||
|
self.config_file
|
||||||
|
.zigup
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|zigup| zigup.target_versions.clone())
|
||||||
|
.unwrap_or(vec!["master".to_owned()])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn zigup_install_dir(&self) -> Option<&str> {
|
||||||
|
self.config_file
|
||||||
|
.zigup
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|zigup| zigup.install_dir.as_deref())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn zigup_path_link(&self) -> Option<&str> {
|
||||||
|
self.config_file
|
||||||
|
.zigup
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|zigup| zigup.path_link.as_deref())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn zigup_cleanup(&self) -> bool {
|
||||||
|
self.config_file
|
||||||
|
.zigup
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|zigup| zigup.cleanup)
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn vscode_profile(&self) -> Option<&str> {
|
||||||
|
let vscode_cfg = self.config_file.vscode.as_ref()?;
|
||||||
|
let profile = vscode_cfg.profile.as_ref()?;
|
||||||
|
|
||||||
|
if profile.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(profile.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use crate::config::ConfigFile;
|
|
||||||
|
use crate::config::*;
|
||||||
|
use color_eyre::eyre::eyre;
|
||||||
|
|
||||||
/// Test the default configuration in `config.example.toml` is valid.
|
/// Test the default configuration in `config.example.toml` is valid.
|
||||||
#[test]
|
#[test]
|
||||||
@@ -1515,4 +1757,51 @@ mod test {
|
|||||||
|
|
||||||
assert!(toml::from_str::<ConfigFile>(str).is_ok());
|
assert!(toml::from_str::<ConfigFile>(str).is_ok());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn config() -> Config {
|
||||||
|
Config {
|
||||||
|
opt: CommandLineArgs::parse_from::<_, String>([]),
|
||||||
|
config_file: ConfigFile::default(),
|
||||||
|
allowed_steps: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_should_execute_remote_different_hostname() {
|
||||||
|
assert!(config().should_execute_remote(Ok("hostname".to_string()), "remote_hostname"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_should_execute_remote_different_hostname_with_user() {
|
||||||
|
assert!(config().should_execute_remote(Ok("hostname".to_string()), "user@remote_hostname"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_should_execute_remote_unknown_hostname() {
|
||||||
|
assert!(config().should_execute_remote(Err(eyre!("failed to get hostname")), "remote_hostname"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_should_not_execute_remote_same_hostname() {
|
||||||
|
assert!(!config().should_execute_remote(Ok("hostname".to_string()), "hostname"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_should_not_execute_remote_same_hostname_with_user() {
|
||||||
|
assert!(!config().should_execute_remote(Ok("hostname".to_string()), "user@hostname"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_should_execute_remote_matching_limit() {
|
||||||
|
let mut config = config();
|
||||||
|
config.opt = CommandLineArgs::parse_from(["topgrade", "--remote-host-limit", "remote_hostname"]);
|
||||||
|
assert!(config.should_execute_remote(Ok("hostname".to_string()), "user@remote_hostname"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_should_not_execute_remote_not_matching_limit() {
|
||||||
|
let mut config = config();
|
||||||
|
config.opt = CommandLineArgs::parse_from(["topgrade", "--remote-host-limit", "other_hostname"]);
|
||||||
|
assert!(!config.should_execute_remote(Ok("hostname".to_string()), "user@remote_hostname"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,9 +11,9 @@ pub fn interrupted() -> bool {
|
|||||||
/// Clears the interrupted flag
|
/// Clears the interrupted flag
|
||||||
pub fn unset_interrupted() {
|
pub fn unset_interrupted() {
|
||||||
debug_assert!(INTERRUPTED.load(Ordering::SeqCst));
|
debug_assert!(INTERRUPTED.load(Ordering::SeqCst));
|
||||||
INTERRUPTED.store(false, Ordering::SeqCst)
|
INTERRUPTED.store(false, Ordering::SeqCst);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_interrupted() {
|
pub fn set_interrupted() {
|
||||||
INTERRUPTED.store(true, Ordering::SeqCst)
|
INTERRUPTED.store(true, Ordering::SeqCst);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use nix::sys::signal::{sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal
|
|||||||
|
|
||||||
/// Handle SIGINT. Set the interruption flag.
|
/// Handle SIGINT. Set the interruption flag.
|
||||||
extern "C" fn handle_sigint(_: i32) {
|
extern "C" fn handle_sigint(_: i32) {
|
||||||
set_interrupted()
|
set_interrupted();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the necessary signal handlers.
|
/// Set the necessary signal handlers.
|
||||||
|
|||||||
81
src/error.rs
81
src/error.rs
@@ -1,41 +1,98 @@
|
|||||||
use std::process::ExitStatus;
|
use std::{fmt::Display, process::ExitStatus};
|
||||||
|
|
||||||
|
use rust_i18n::t;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
#[derive(Error, Debug, PartialEq, Eq)]
|
#[derive(Error, Debug, PartialEq, Eq)]
|
||||||
pub enum TopgradeError {
|
pub enum TopgradeError {
|
||||||
#[error("`{0}` failed: {1}")]
|
|
||||||
ProcessFailed(String, ExitStatus),
|
ProcessFailed(String, ExitStatus),
|
||||||
|
|
||||||
#[error("`{0}` failed: {1}")]
|
|
||||||
ProcessFailedWithOutput(String, ExitStatus, String),
|
ProcessFailedWithOutput(String, ExitStatus, String),
|
||||||
|
|
||||||
#[error("Unknown Linux Distribution")]
|
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
UnknownLinuxDistribution,
|
UnknownLinuxDistribution,
|
||||||
|
|
||||||
#[error("File \"/etc/os-release\" does not exist or is empty")]
|
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
EmptyOSReleaseFile,
|
EmptyOSReleaseFile,
|
||||||
|
|
||||||
#[error("Failed getting the system package manager")]
|
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
FailedGettingPackageManager,
|
FailedGettingPackageManager,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Display for TopgradeError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
TopgradeError::ProcessFailed(process, exit_status) => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
t!(
|
||||||
|
"`{process}` failed: {exit_status}",
|
||||||
|
process = process,
|
||||||
|
exit_status = exit_status
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
TopgradeError::ProcessFailedWithOutput(process, exit_status, output) => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
t!(
|
||||||
|
"`{process}` failed: {exit_status} with {output}",
|
||||||
|
process = process,
|
||||||
|
exit_status = exit_status,
|
||||||
|
output = output
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
#[cfg(target_os = "linux")]
|
||||||
|
TopgradeError::UnknownLinuxDistribution => write!(f, "{}", t!("Unknown Linux Distribution")),
|
||||||
|
#[cfg(target_os = "linux")]
|
||||||
|
TopgradeError::EmptyOSReleaseFile => {
|
||||||
|
write!(f, "{}", t!("File \"/etc/os-release\" does not exist or is empty"))
|
||||||
|
}
|
||||||
|
#[cfg(target_os = "linux")]
|
||||||
|
TopgradeError::FailedGettingPackageManager => {
|
||||||
|
write!(f, "{}", t!("Failed getting the system package manager"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
#[error("A step failed")]
|
|
||||||
pub struct StepFailed;
|
pub struct StepFailed;
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
impl Display for StepFailed {
|
||||||
#[error("Dry running")]
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
pub struct DryRun();
|
write!(f, "{}", t!("A step failed"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub struct DryRun();
|
||||||
|
|
||||||
|
impl Display for DryRun {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", t!("Dry running"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
#[error("{0}")]
|
|
||||||
pub struct SkipStep(pub String);
|
pub struct SkipStep(pub String);
|
||||||
|
|
||||||
|
impl Display for SkipStep {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(all(windows, feature = "self-update"))]
|
#[cfg(all(windows, feature = "self-update"))]
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
#[error("Topgrade Upgraded")]
|
|
||||||
pub struct Upgraded(pub ExitStatus);
|
pub struct Upgraded(pub ExitStatus);
|
||||||
|
|
||||||
|
#[cfg(all(windows, feature = "self-update"))]
|
||||||
|
impl Display for Upgraded {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", t!("Topgrade Upgraded"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
use crate::executor::RunType;
|
use crate::executor::RunType;
|
||||||
use crate::git::Git;
|
|
||||||
use crate::sudo::Sudo;
|
use crate::sudo::Sudo;
|
||||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
use crate::utils::{get_require_sudo_string, require_option};
|
||||||
use crate::{config::Config, executor::Executor};
|
use crate::{config::Config, executor::Executor};
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
use std::env::var;
|
use std::env::var;
|
||||||
@@ -12,7 +11,6 @@ use std::sync::Mutex;
|
|||||||
pub struct ExecutionContext<'a> {
|
pub struct ExecutionContext<'a> {
|
||||||
run_type: RunType,
|
run_type: RunType,
|
||||||
sudo: Option<Sudo>,
|
sudo: Option<Sudo>,
|
||||||
git: &'a Git,
|
|
||||||
config: &'a Config,
|
config: &'a Config,
|
||||||
/// Name of a tmux session to execute commands in, if any.
|
/// Name of a tmux session to execute commands in, if any.
|
||||||
/// This is used in `./steps/remote/ssh.rs`, where we want to run `topgrade` in a new
|
/// This is used in `./steps/remote/ssh.rs`, where we want to run `topgrade` in a new
|
||||||
@@ -23,12 +21,11 @@ pub struct ExecutionContext<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ExecutionContext<'a> {
|
impl<'a> ExecutionContext<'a> {
|
||||||
pub fn new(run_type: RunType, sudo: Option<Sudo>, git: &'a Git, config: &'a Config) -> Self {
|
pub fn new(run_type: RunType, sudo: Option<Sudo>, config: &'a Config) -> Self {
|
||||||
let under_ssh = var("SSH_CLIENT").is_ok() || var("SSH_TTY").is_ok();
|
let under_ssh = var("SSH_CLIENT").is_ok() || var("SSH_TTY").is_ok();
|
||||||
Self {
|
Self {
|
||||||
run_type,
|
run_type,
|
||||||
sudo,
|
sudo,
|
||||||
git,
|
|
||||||
config,
|
config,
|
||||||
tmux_session: Mutex::new(None),
|
tmux_session: Mutex::new(None),
|
||||||
under_ssh,
|
under_ssh,
|
||||||
@@ -36,7 +33,7 @@ impl<'a> ExecutionContext<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn execute_elevated(&self, command: &Path, interactive: bool) -> Result<Executor> {
|
pub fn execute_elevated(&self, command: &Path, interactive: bool) -> Result<Executor> {
|
||||||
let sudo = require_option(self.sudo.as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(self.sudo.as_ref(), get_require_sudo_string())?;
|
||||||
Ok(sudo.execute_elevated(self, command, interactive))
|
Ok(sudo.execute_elevated(self, command, interactive))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -44,10 +41,6 @@ impl<'a> ExecutionContext<'a> {
|
|||||||
self.run_type
|
self.run_type
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn git(&self) -> &Git {
|
|
||||||
self.git
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn sudo(&self) -> &Option<Sudo> {
|
pub fn sudo(&self) -> &Option<Sudo> {
|
||||||
&self.sudo
|
&self.sudo
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ use std::path::Path;
|
|||||||
use std::process::{Child, Command, ExitStatus, Output};
|
use std::process::{Child, Command, ExitStatus, Output};
|
||||||
|
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
|
use rust_i18n::t;
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
@@ -151,7 +152,10 @@ impl Executor {
|
|||||||
let result = match self {
|
let result = match self {
|
||||||
Executor::Wet(c) => {
|
Executor::Wet(c) => {
|
||||||
debug!("Running {:?}", c);
|
debug!("Running {:?}", c);
|
||||||
c.spawn_checked().map(ExecutorChild::Wet)?
|
// We should use `spawn()` here rather than `spawn_checked()` since
|
||||||
|
// their semantics and behaviors are different.
|
||||||
|
#[allow(clippy::disallowed_methods)]
|
||||||
|
c.spawn().map(ExecutorChild::Wet)?
|
||||||
}
|
}
|
||||||
Executor::Dry(c) => {
|
Executor::Dry(c) => {
|
||||||
c.dry_run();
|
c.dry_run();
|
||||||
@@ -165,7 +169,12 @@ impl Executor {
|
|||||||
/// See `std::process::Command::output`
|
/// See `std::process::Command::output`
|
||||||
pub fn output(&mut self) -> Result<ExecutorOutput> {
|
pub fn output(&mut self) -> Result<ExecutorOutput> {
|
||||||
match self {
|
match self {
|
||||||
Executor::Wet(c) => Ok(ExecutorOutput::Wet(c.output_checked()?)),
|
Executor::Wet(c) => {
|
||||||
|
// We should use `output()` here rather than `output_checked()` since
|
||||||
|
// their semantics and behaviors are different.
|
||||||
|
#[allow(clippy::disallowed_methods)]
|
||||||
|
Ok(ExecutorOutput::Wet(c.output()?))
|
||||||
|
}
|
||||||
Executor::Dry(c) => {
|
Executor::Dry(c) => {
|
||||||
c.dry_run();
|
c.dry_run();
|
||||||
Ok(ExecutorOutput::Dry)
|
Ok(ExecutorOutput::Dry)
|
||||||
@@ -179,7 +188,7 @@ impl Executor {
|
|||||||
pub fn status_checked_with_codes(&mut self, codes: &[i32]) -> Result<()> {
|
pub fn status_checked_with_codes(&mut self, codes: &[i32]) -> Result<()> {
|
||||||
match self {
|
match self {
|
||||||
Executor::Wet(c) => c.status_checked_with(|status| {
|
Executor::Wet(c) => c.status_checked_with(|status| {
|
||||||
if status.success() || status.code().as_ref().map(|c| codes.contains(c)).unwrap_or(false) {
|
if status.success() || status.code().as_ref().is_some_and(|c| codes.contains(c)) {
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(())
|
Err(())
|
||||||
@@ -209,17 +218,20 @@ pub struct DryCommand {
|
|||||||
impl DryCommand {
|
impl DryCommand {
|
||||||
fn dry_run(&self) {
|
fn dry_run(&self) {
|
||||||
print!(
|
print!(
|
||||||
"Dry running: {} {}",
|
"{}",
|
||||||
self.program.to_string_lossy(),
|
t!(
|
||||||
shell_words::join(
|
"Dry running: {program_name} {arguments}",
|
||||||
|
program_name = self.program.to_string_lossy(),
|
||||||
|
arguments = shell_words::join(
|
||||||
self.args
|
self.args
|
||||||
.iter()
|
.iter()
|
||||||
.map(|a| String::from(a.to_string_lossy()))
|
.map(|a| String::from(a.to_string_lossy()))
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
)
|
)
|
||||||
|
)
|
||||||
);
|
);
|
||||||
match &self.directory {
|
match &self.directory {
|
||||||
Some(dir) => println!(" in {}", dir.to_string_lossy()),
|
Some(dir) => println!(" {}", t!("in {directory}", directory = dir.to_string_lossy())),
|
||||||
None => println!(),
|
None => println!(),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -227,6 +239,7 @@ impl DryCommand {
|
|||||||
|
|
||||||
/// The Result of spawn. Contains an actual `std::process::Child` if executed by a wet command.
|
/// The Result of spawn. Contains an actual `std::process::Child` if executed by a wet command.
|
||||||
pub enum ExecutorChild {
|
pub enum ExecutorChild {
|
||||||
|
#[allow(unused)] // this type has not been used
|
||||||
Wet(Child),
|
Wet(Child),
|
||||||
Dry,
|
Dry,
|
||||||
}
|
}
|
||||||
|
|||||||
147
src/main.rs
147
src/main.rs
@@ -18,16 +18,19 @@ use etcetera::base_strategy::Windows;
|
|||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
use etcetera::base_strategy::Xdg;
|
use etcetera::base_strategy::Xdg;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
|
use rust_i18n::{i18n, t};
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
use self::config::{CommandLineArgs, Config, Step};
|
use self::config::{CommandLineArgs, Config, Step};
|
||||||
use self::error::StepFailed;
|
use self::error::StepFailed;
|
||||||
#[cfg(all(windows, feature = "self-update"))]
|
#[cfg(all(windows, feature = "self-update"))]
|
||||||
use self::error::Upgraded;
|
use self::error::Upgraded;
|
||||||
|
#[allow(clippy::wildcard_imports)]
|
||||||
use self::steps::{remote::*, *};
|
use self::steps::{remote::*, *};
|
||||||
|
#[allow(clippy::wildcard_imports)]
|
||||||
use self::terminal::*;
|
use self::terminal::*;
|
||||||
|
|
||||||
use self::utils::{install_color_eyre, install_tracing, update_tracing};
|
use self::utils::{hostname, install_color_eyre, install_tracing, update_tracing};
|
||||||
|
|
||||||
mod breaking_changes;
|
mod breaking_changes;
|
||||||
mod command;
|
mod command;
|
||||||
@@ -50,9 +53,14 @@ mod utils;
|
|||||||
pub(crate) static HOME_DIR: Lazy<PathBuf> = Lazy::new(|| home::home_dir().expect("No home directory"));
|
pub(crate) static HOME_DIR: Lazy<PathBuf> = Lazy::new(|| home::home_dir().expect("No home directory"));
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
pub(crate) static XDG_DIRS: Lazy<Xdg> = Lazy::new(|| Xdg::new().expect("No home directory"));
|
pub(crate) static XDG_DIRS: Lazy<Xdg> = Lazy::new(|| Xdg::new().expect("No home directory"));
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
pub(crate) static WINDOWS_DIRS: Lazy<Windows> = Lazy::new(|| Windows::new().expect("No home directory"));
|
pub(crate) static WINDOWS_DIRS: Lazy<Windows> = Lazy::new(|| Windows::new().expect("No home directory"));
|
||||||
|
|
||||||
|
// Init and load the i18n files
|
||||||
|
i18n!("locales", fallback = "en");
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_lines)]
|
||||||
fn run() -> Result<()> {
|
fn run() -> Result<()> {
|
||||||
install_color_eyre()?;
|
install_color_eyre()?;
|
||||||
ctrlc::set_handler();
|
ctrlc::set_handler();
|
||||||
@@ -71,6 +79,11 @@ fn run() -> Result<()> {
|
|||||||
// and `Config::tracing_filter_directives()`.
|
// and `Config::tracing_filter_directives()`.
|
||||||
let reload_handle = install_tracing(&opt.tracing_filter_directives())?;
|
let reload_handle = install_tracing(&opt.tracing_filter_directives())?;
|
||||||
|
|
||||||
|
// Get current system locale and set it as the default locale
|
||||||
|
let system_locale = sys_locale::get_locale().unwrap_or("en".to_string());
|
||||||
|
rust_i18n::set_locale(&system_locale);
|
||||||
|
debug!("Current system locale is {system_locale}");
|
||||||
|
|
||||||
if let Some(shell) = opt.gen_completion {
|
if let Some(shell) = opt.gen_completion {
|
||||||
let cmd = &mut CommandLineArgs::command();
|
let cmd = &mut CommandLineArgs::command();
|
||||||
clap_complete::generate(shell, cmd, clap::crate_name!(), &mut io::stdout());
|
clap_complete::generate(shell, cmd, clap::crate_name!(), &mut io::stdout());
|
||||||
@@ -117,13 +130,11 @@ fn run() -> Result<()> {
|
|||||||
if config.run_in_tmux() && env::var("TOPGRADE_INSIDE_TMUX").is_err() {
|
if config.run_in_tmux() && env::var("TOPGRADE_INSIDE_TMUX").is_err() {
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
{
|
{
|
||||||
tmux::run_in_tmux(config.tmux_arguments()?)?;
|
tmux::run_in_tmux(config.tmux_config()?)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let git = git::Git::new();
|
|
||||||
let mut git_repos = git::Repositories::new(&git);
|
|
||||||
let powershell = powershell::Powershell::new();
|
let powershell = powershell::Powershell::new();
|
||||||
let should_run_powershell = powershell.profile().is_some() && config.should_run(Step::Powershell);
|
let should_run_powershell = powershell.profile().is_some() && config.should_run(Step::Powershell);
|
||||||
let emacs = emacs::Emacs::new();
|
let emacs = emacs::Emacs::new();
|
||||||
@@ -132,7 +143,7 @@ fn run() -> Result<()> {
|
|||||||
|
|
||||||
let sudo = config.sudo_command().map_or_else(sudo::Sudo::detect, sudo::Sudo::new);
|
let sudo = config.sudo_command().map_or_else(sudo::Sudo::detect, sudo::Sudo::new);
|
||||||
let run_type = executor::RunType::new(config.dry_run());
|
let run_type = executor::RunType::new(config.dry_run());
|
||||||
let ctx = execution_context::ExecutionContext::new(run_type, sudo, &git, &config);
|
let ctx = execution_context::ExecutionContext::new(run_type, sudo, &config);
|
||||||
let mut runner = runner::Runner::new(&ctx);
|
let mut runner = runner::Runner::new(&ctx);
|
||||||
|
|
||||||
// If
|
// If
|
||||||
@@ -183,7 +194,7 @@ fn run() -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(topgrades) = config.remote_topgrades() {
|
if let Some(topgrades) = config.remote_topgrades() {
|
||||||
for remote_topgrade in topgrades.iter().filter(|t| config.should_execute_remote(t)) {
|
for remote_topgrade in topgrades.iter().filter(|t| config.should_execute_remote(hostname(), t)) {
|
||||||
runner.execute(Step::Remotes, format!("Remote ({remote_topgrade})"), || {
|
runner.execute(Step::Remotes, format!("Remote ({remote_topgrade})"), || {
|
||||||
ssh::ssh_step(&ctx, remote_topgrade)
|
ssh::ssh_step(&ctx, remote_topgrade)
|
||||||
})?;
|
})?;
|
||||||
@@ -198,6 +209,9 @@ fn run() -> Result<()> {
|
|||||||
runner.execute(Step::Scoop, "Scoop", || windows::run_scoop(&ctx))?;
|
runner.execute(Step::Scoop, "Scoop", || windows::run_scoop(&ctx))?;
|
||||||
runner.execute(Step::Winget, "Winget", || windows::run_winget(&ctx))?;
|
runner.execute(Step::Winget, "Winget", || windows::run_winget(&ctx))?;
|
||||||
runner.execute(Step::System, "Windows update", || windows::windows_update(&ctx))?;
|
runner.execute(Step::System, "Windows update", || windows::windows_update(&ctx))?;
|
||||||
|
runner.execute(Step::MicrosoftStore, "Microsoft Store", || {
|
||||||
|
windows::microsoft_store(&ctx)
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
@@ -211,7 +225,7 @@ fn run() -> Result<()> {
|
|||||||
runner.execute(Step::System, "System update", || distribution.upgrade(&ctx))?;
|
runner.execute(Step::System, "System update", || distribution.upgrade(&ctx))?;
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
println!("Error detecting current distribution: {e}");
|
println!("{}", t!("Error detecting current distribution: {error}", error = e));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
runner.execute(Step::ConfigUpdate, "config-update", || linux::run_config_update(&ctx))?;
|
runner.execute(Step::ConfigUpdate, "config-update", || linux::run_config_update(&ctx))?;
|
||||||
@@ -235,6 +249,11 @@ fn run() -> Result<()> {
|
|||||||
unix::run_brew_formula(&ctx, unix::BrewVariant::Path)
|
unix::run_brew_formula(&ctx, unix::BrewVariant::Path)
|
||||||
})?;
|
})?;
|
||||||
runner.execute(Step::Lure, "LURE", || linux::run_lure_update(&ctx))?;
|
runner.execute(Step::Lure, "LURE", || linux::run_lure_update(&ctx))?;
|
||||||
|
runner.execute(Step::Waydroid, "Waydroid", || linux::run_waydroid(&ctx))?;
|
||||||
|
runner.execute(Step::AutoCpufreq, "auto-cpufreq", || linux::run_auto_cpufreq(&ctx))?;
|
||||||
|
runner.execute(Step::CinnamonSpices, "Cinnamon spices", || {
|
||||||
|
linux::run_cinnamon_spices_updater(&ctx)
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "macos")]
|
#[cfg(target_os = "macos")]
|
||||||
@@ -258,6 +277,7 @@ fn run() -> Result<()> {
|
|||||||
unix::run_brew_cask(&ctx, unix::BrewVariant::Path)
|
unix::run_brew_cask(&ctx, unix::BrewVariant::Path)
|
||||||
})?;
|
})?;
|
||||||
runner.execute(Step::Macports, "MacPorts", || macos::run_macports(&ctx))?;
|
runner.execute(Step::Macports, "MacPorts", || macos::run_macports(&ctx))?;
|
||||||
|
runner.execute(Step::Xcodes, "Xcodes", || macos::update_xcodes(&ctx))?;
|
||||||
runner.execute(Step::Sparkle, "Sparkle", || macos::run_sparkle(&ctx))?;
|
runner.execute(Step::Sparkle, "Sparkle", || macos::run_sparkle(&ctx))?;
|
||||||
runner.execute(Step::Mas, "App Store", || macos::run_mas(&ctx))?;
|
runner.execute(Step::Mas, "App Store", || macos::run_mas(&ctx))?;
|
||||||
runner.execute(Step::System, "System upgrade", || macos::upgrade_macos(&ctx))?;
|
runner.execute(Step::System, "System upgrade", || macos::upgrade_macos(&ctx))?;
|
||||||
@@ -297,8 +317,8 @@ fn run() -> Result<()> {
|
|||||||
runner.execute(Step::Guix, "guix", || unix::run_guix(&ctx))?;
|
runner.execute(Step::Guix, "guix", || unix::run_guix(&ctx))?;
|
||||||
runner.execute(Step::HomeManager, "home-manager", || unix::run_home_manager(&ctx))?;
|
runner.execute(Step::HomeManager, "home-manager", || unix::run_home_manager(&ctx))?;
|
||||||
runner.execute(Step::Asdf, "asdf", || unix::run_asdf(&ctx))?;
|
runner.execute(Step::Asdf, "asdf", || unix::run_asdf(&ctx))?;
|
||||||
|
runner.execute(Step::Mise, "mise", || unix::run_mise(&ctx))?;
|
||||||
runner.execute(Step::Pkgin, "pkgin", || unix::run_pkgin(&ctx))?;
|
runner.execute(Step::Pkgin, "pkgin", || unix::run_pkgin(&ctx))?;
|
||||||
runner.execute(Step::Bun, "bun", || unix::run_bun(&ctx))?;
|
|
||||||
runner.execute(Step::BunPackages, "bun-packages", || unix::run_bun_packages(&ctx))?;
|
runner.execute(Step::BunPackages, "bun-packages", || unix::run_bun_packages(&ctx))?;
|
||||||
runner.execute(Step::Shell, "zr", || zsh::run_zr(&ctx))?;
|
runner.execute(Step::Shell, "zr", || zsh::run_zr(&ctx))?;
|
||||||
runner.execute(Step::Shell, "antibody", || zsh::run_antibody(&ctx))?;
|
runner.execute(Step::Shell, "antibody", || zsh::run_antibody(&ctx))?;
|
||||||
@@ -323,6 +343,7 @@ fn run() -> Result<()> {
|
|||||||
runner.execute(Step::GnomeShellExtensions, "Gnome Shell Extensions", || {
|
runner.execute(Step::GnomeShellExtensions, "Gnome Shell Extensions", || {
|
||||||
unix::upgrade_gnome_extensions(&ctx)
|
unix::upgrade_gnome_extensions(&ctx)
|
||||||
})?;
|
})?;
|
||||||
|
runner.execute(Step::Pyenv, "pyenv", || unix::run_pyenv(&ctx))?;
|
||||||
runner.execute(Step::Sdkman, "SDKMAN!", || unix::run_sdkman(&ctx))?;
|
runner.execute(Step::Sdkman, "SDKMAN!", || unix::run_sdkman(&ctx))?;
|
||||||
runner.execute(Step::Rcm, "rcm", || unix::run_rcm(&ctx))?;
|
runner.execute(Step::Rcm, "rcm", || unix::run_rcm(&ctx))?;
|
||||||
runner.execute(Step::Maza, "maza", || unix::run_maza(&ctx))?;
|
runner.execute(Step::Maza, "maza", || unix::run_maza(&ctx))?;
|
||||||
@@ -340,6 +361,8 @@ fn run() -> Result<()> {
|
|||||||
|
|
||||||
// The following update function should be executed on all OSes.
|
// The following update function should be executed on all OSes.
|
||||||
runner.execute(Step::Fossil, "fossil", || generic::run_fossil(&ctx))?;
|
runner.execute(Step::Fossil, "fossil", || generic::run_fossil(&ctx))?;
|
||||||
|
runner.execute(Step::Elan, "elan", || generic::run_elan(&ctx))?;
|
||||||
|
runner.execute(Step::Rye, "rye", || generic::run_rye(&ctx))?;
|
||||||
runner.execute(Step::Rustup, "rustup", || generic::run_rustup(&ctx))?;
|
runner.execute(Step::Rustup, "rustup", || generic::run_rustup(&ctx))?;
|
||||||
runner.execute(Step::Juliaup, "juliaup", || generic::run_juliaup(&ctx))?;
|
runner.execute(Step::Juliaup, "juliaup", || generic::run_juliaup(&ctx))?;
|
||||||
runner.execute(Step::Dotnet, ".NET", || generic::run_dotnet_upgrade(&ctx))?;
|
runner.execute(Step::Dotnet, ".NET", || generic::run_dotnet_upgrade(&ctx))?;
|
||||||
@@ -352,11 +375,16 @@ fn run() -> Result<()> {
|
|||||||
runner.execute(Step::Opam, "opam", || generic::run_opam_update(&ctx))?;
|
runner.execute(Step::Opam, "opam", || generic::run_opam_update(&ctx))?;
|
||||||
runner.execute(Step::Vcpkg, "vcpkg", || generic::run_vcpkg_update(&ctx))?;
|
runner.execute(Step::Vcpkg, "vcpkg", || generic::run_vcpkg_update(&ctx))?;
|
||||||
runner.execute(Step::Pipx, "pipx", || generic::run_pipx_update(&ctx))?;
|
runner.execute(Step::Pipx, "pipx", || generic::run_pipx_update(&ctx))?;
|
||||||
|
runner.execute(Step::Pipxu, "pipxu", || generic::run_pipxu_update(&ctx))?;
|
||||||
runner.execute(Step::Vscode, "Visual Studio Code extensions", || {
|
runner.execute(Step::Vscode, "Visual Studio Code extensions", || {
|
||||||
generic::run_vscode_extensions_update(&ctx)
|
generic::run_vscode_extensions_update(&ctx)
|
||||||
})?;
|
})?;
|
||||||
|
runner.execute(Step::Vscodium, "VSCodium extensions", || {
|
||||||
|
generic::run_vscodium_extensions_update(&ctx)
|
||||||
|
})?;
|
||||||
runner.execute(Step::Conda, "conda", || generic::run_conda_update(&ctx))?;
|
runner.execute(Step::Conda, "conda", || generic::run_conda_update(&ctx))?;
|
||||||
runner.execute(Step::Mamba, "mamba", || generic::run_mamba_update(&ctx))?;
|
runner.execute(Step::Mamba, "mamba", || generic::run_mamba_update(&ctx))?;
|
||||||
|
runner.execute(Step::Pixi, "pixi", || generic::run_pixi_update(&ctx))?;
|
||||||
runner.execute(Step::Miktex, "miktex", || generic::run_miktex_packages_update(&ctx))?;
|
runner.execute(Step::Miktex, "miktex", || generic::run_miktex_packages_update(&ctx))?;
|
||||||
runner.execute(Step::Pip3, "pip3", || generic::run_pip3_update(&ctx))?;
|
runner.execute(Step::Pip3, "pip3", || generic::run_pip3_update(&ctx))?;
|
||||||
runner.execute(Step::PipReview, "pip-review", || generic::run_pip_review_update(&ctx))?;
|
runner.execute(Step::PipReview, "pip-review", || generic::run_pip_review_update(&ctx))?;
|
||||||
@@ -379,6 +407,9 @@ fn run() -> Result<()> {
|
|||||||
runner.execute(Step::Node, "npm", || node::run_npm_upgrade(&ctx))?;
|
runner.execute(Step::Node, "npm", || node::run_npm_upgrade(&ctx))?;
|
||||||
runner.execute(Step::Yarn, "yarn", || node::run_yarn_upgrade(&ctx))?;
|
runner.execute(Step::Yarn, "yarn", || node::run_yarn_upgrade(&ctx))?;
|
||||||
runner.execute(Step::Pnpm, "pnpm", || node::run_pnpm_upgrade(&ctx))?;
|
runner.execute(Step::Pnpm, "pnpm", || node::run_pnpm_upgrade(&ctx))?;
|
||||||
|
runner.execute(Step::VoltaPackages, "volta packages", || {
|
||||||
|
node::run_volta_packages_upgrade(&ctx)
|
||||||
|
})?;
|
||||||
runner.execute(Step::Containers, "Containers", || containers::run_containers(&ctx))?;
|
runner.execute(Step::Containers, "Containers", || containers::run_containers(&ctx))?;
|
||||||
runner.execute(Step::Deno, "deno", || node::deno_upgrade(&ctx))?;
|
runner.execute(Step::Deno, "deno", || node::deno_upgrade(&ctx))?;
|
||||||
runner.execute(Step::Composer, "composer", || generic::run_composer_update(&ctx))?;
|
runner.execute(Step::Composer, "composer", || generic::run_composer_update(&ctx))?;
|
||||||
@@ -400,67 +431,24 @@ fn run() -> Result<()> {
|
|||||||
generic::run_ghcli_extensions_upgrade(&ctx)
|
generic::run_ghcli_extensions_upgrade(&ctx)
|
||||||
})?;
|
})?;
|
||||||
runner.execute(Step::Bob, "Bob", || generic::run_bob(&ctx))?;
|
runner.execute(Step::Bob, "Bob", || generic::run_bob(&ctx))?;
|
||||||
|
runner.execute(Step::Certbot, "Certbot", || generic::run_certbot(&ctx))?;
|
||||||
if config.use_predefined_git_repos() {
|
runner.execute(Step::GitRepos, "Git Repositories", || git::run_git_pull(&ctx))?;
|
||||||
if config.should_run(Step::Emacs) {
|
runner.execute(Step::ClamAvDb, "ClamAV Databases", || generic::run_freshclam(&ctx))?;
|
||||||
if !emacs.is_doom() {
|
runner.execute(Step::PlatformioCore, "PlatformIO Core", || {
|
||||||
if let Some(directory) = emacs.directory() {
|
generic::run_platform_io(&ctx)
|
||||||
git_repos.insert_if_repo(directory);
|
})?;
|
||||||
}
|
runner.execute(Step::Lensfun, "Lensfun's database update", || {
|
||||||
}
|
generic::run_lensfun_update_data(&ctx)
|
||||||
git_repos.insert_if_repo(HOME_DIR.join(".doom.d"));
|
})?;
|
||||||
}
|
runner.execute(Step::Poetry, "Poetry", || generic::run_poetry(&ctx))?;
|
||||||
|
runner.execute(Step::Uv, "uv", || generic::run_uv(&ctx))?;
|
||||||
if config.should_run(Step::Vim) {
|
runner.execute(Step::Zvm, "ZVM", || generic::run_zvm(&ctx))?;
|
||||||
git_repos.insert_if_repo(HOME_DIR.join(".vim"));
|
runner.execute(Step::Aqua, "aqua", || generic::run_aqua(&ctx))?;
|
||||||
git_repos.insert_if_repo(HOME_DIR.join(".config/nvim"));
|
runner.execute(Step::Bun, "bun", || generic::run_bun(&ctx))?;
|
||||||
}
|
runner.execute(Step::Zigup, "zigup", || generic::run_zigup(&ctx))?;
|
||||||
|
runner.execute(Step::JetBrainsToolbox, "JetBrains Toolbox", || {
|
||||||
git_repos.insert_if_repo(HOME_DIR.join(".ideavimrc"));
|
generic::run_jetbrains_toolbox(&ctx)
|
||||||
git_repos.insert_if_repo(HOME_DIR.join(".intellimacs"));
|
|
||||||
|
|
||||||
if config.should_run(Step::Rcm) {
|
|
||||||
git_repos.insert_if_repo(HOME_DIR.join(".dotfiles"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(unix)]
|
|
||||||
{
|
|
||||||
git_repos.insert_if_repo(zsh::zshrc());
|
|
||||||
if config.should_run(Step::Tmux) {
|
|
||||||
git_repos.insert_if_repo(HOME_DIR.join(".tmux"));
|
|
||||||
}
|
|
||||||
git_repos.insert_if_repo(HOME_DIR.join(".config/fish"));
|
|
||||||
git_repos.insert_if_repo(XDG_DIRS.config_dir().join("openbox"));
|
|
||||||
git_repos.insert_if_repo(XDG_DIRS.config_dir().join("bspwm"));
|
|
||||||
git_repos.insert_if_repo(XDG_DIRS.config_dir().join("i3"));
|
|
||||||
git_repos.insert_if_repo(XDG_DIRS.config_dir().join("sway"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
git_repos.insert_if_repo(
|
|
||||||
WINDOWS_DIRS
|
|
||||||
.cache_dir()
|
|
||||||
.join("Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState"),
|
|
||||||
);
|
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
windows::insert_startup_scripts(&mut git_repos).ok();
|
|
||||||
|
|
||||||
if let Some(profile) = powershell.profile() {
|
|
||||||
git_repos.insert_if_repo(profile);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if config.should_run(Step::GitRepos) {
|
|
||||||
if let Some(custom_git_repos) = config.git_repos() {
|
|
||||||
for git_repo in custom_git_repos {
|
|
||||||
git_repos.glob_insert(git_repo);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
runner.execute(Step::GitRepos, "Git repositories", || {
|
|
||||||
git.multi_pull_step(&git_repos, &ctx)
|
|
||||||
})?;
|
})?;
|
||||||
}
|
|
||||||
|
|
||||||
if should_run_powershell {
|
if should_run_powershell {
|
||||||
runner.execute(Step::Powershell, "Powershell Modules Update", || {
|
runner.execute(Step::Powershell, "Powershell Modules Update", || {
|
||||||
@@ -490,7 +478,7 @@ fn run() -> Result<()> {
|
|||||||
runner.execute(Step::Vagrant, "Vagrant boxes", || vagrant::upgrade_vagrant_boxes(&ctx))?;
|
runner.execute(Step::Vagrant, "Vagrant boxes", || vagrant::upgrade_vagrant_boxes(&ctx))?;
|
||||||
|
|
||||||
if !runner.report().data().is_empty() {
|
if !runner.report().data().is_empty() {
|
||||||
print_separator("Summary");
|
print_separator(t!("Summary"));
|
||||||
|
|
||||||
for (key, result) in runner.report().data() {
|
for (key, result) in runner.report().data() {
|
||||||
print_result(key, result);
|
print_result(key, result);
|
||||||
@@ -514,16 +502,16 @@ fn run() -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if config.keep_at_end() {
|
if config.keep_at_end() {
|
||||||
print_info("\n(R)eboot\n(S)hell\n(Q)uit");
|
print_info(t!("\n(R)eboot\n(S)hell\n(Q)uit"));
|
||||||
loop {
|
loop {
|
||||||
match get_key() {
|
match get_key() {
|
||||||
Ok(Key::Char('s')) | Ok(Key::Char('S')) => {
|
Ok(Key::Char('s' | 'S')) => {
|
||||||
run_shell().context("Failed to execute shell")?;
|
run_shell().context("Failed to execute shell")?;
|
||||||
}
|
}
|
||||||
Ok(Key::Char('r')) | Ok(Key::Char('R')) => {
|
Ok(Key::Char('r' | 'R')) => {
|
||||||
reboot().context("Failed to reboot")?;
|
reboot().context("Failed to reboot")?;
|
||||||
}
|
}
|
||||||
Ok(Key::Char('q')) | Ok(Key::Char('Q')) => (),
|
Ok(Key::Char('q' | 'Q')) => (),
|
||||||
_ => {
|
_ => {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -536,12 +524,13 @@ fn run() -> Result<()> {
|
|||||||
|
|
||||||
if !config.skip_notify() {
|
if !config.skip_notify() {
|
||||||
notify_desktop(
|
notify_desktop(
|
||||||
format!(
|
if failed {
|
||||||
"Topgrade finished {}",
|
t!("Topgrade finished with errors")
|
||||||
if failed { "with errors" } else { "successfully" }
|
} else {
|
||||||
),
|
t!("Topgrade finished successfully")
|
||||||
|
},
|
||||||
Some(Duration::from_secs(10)),
|
Some(Duration::from_secs(10)),
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if failed {
|
if failed {
|
||||||
@@ -574,7 +563,7 @@ fn main() {
|
|||||||
// The `Debug` implementation of `eyre::Result` prints a multi-line
|
// The `Debug` implementation of `eyre::Result` prints a multi-line
|
||||||
// error message that includes all the 'causes' added with
|
// error message that includes all the 'causes' added with
|
||||||
// `.with_context(...)` calls.
|
// `.with_context(...)` calls.
|
||||||
println!("Error: {error:?}");
|
println!("{}", t!("Error: {error}", error = format!("{:?}", error)));
|
||||||
}
|
}
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -34,6 +34,14 @@ impl<'a> Runner<'a> {
|
|||||||
let key = key.into();
|
let key = key.into();
|
||||||
debug!("Step {:?}", key);
|
debug!("Step {:?}", key);
|
||||||
|
|
||||||
|
// alter the `func` to put it in a span
|
||||||
|
let func = || {
|
||||||
|
let span =
|
||||||
|
tracing::span!(parent: tracing::Span::none(), tracing::Level::TRACE, "step", step = ?step, key = %key);
|
||||||
|
let _guard = span.enter();
|
||||||
|
func()
|
||||||
|
};
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match func() {
|
match func() {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
#![cfg(windows)]
|
|
||||||
|
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
use std::{env::current_exe, fs, path::PathBuf};
|
use std::{env::current_exe, fs, path::PathBuf};
|
||||||
use tracing::{debug, error};
|
use tracing::{debug, error};
|
||||||
|
|||||||
@@ -5,20 +5,21 @@ use std::process::Command;
|
|||||||
|
|
||||||
use crate::config::Step;
|
use crate::config::Step;
|
||||||
use color_eyre::eyre::{bail, Result};
|
use color_eyre::eyre::{bail, Result};
|
||||||
|
use rust_i18n::t;
|
||||||
use self_update_crate::backends::github::Update;
|
use self_update_crate::backends::github::Update;
|
||||||
use self_update_crate::update::UpdateStatus;
|
use self_update_crate::update::UpdateStatus;
|
||||||
|
|
||||||
use super::terminal::*;
|
use super::terminal::{print_info, print_separator};
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use crate::error::Upgraded;
|
use crate::error::Upgraded;
|
||||||
|
|
||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
|
|
||||||
pub fn self_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn self_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
print_separator("Self update");
|
print_separator(t!("Self update"));
|
||||||
|
|
||||||
if ctx.run_type().dry() {
|
if ctx.run_type().dry() {
|
||||||
println!("Would self-update");
|
println!("{}", t!("Would self-update"));
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
let assume_yes = ctx.config().yes(Step::SelfUpdate);
|
let assume_yes = ctx.config().yes(Step::SelfUpdate);
|
||||||
@@ -38,17 +39,17 @@ pub fn self_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
.update_extended()?;
|
.update_extended()?;
|
||||||
|
|
||||||
if let UpdateStatus::Updated(release) = &result {
|
if let UpdateStatus::Updated(release) = &result {
|
||||||
println!("\nTopgrade upgraded to {}:\n", release.version);
|
println!("{}", t!("Topgrade upgraded to {version}:\n", version = release.version));
|
||||||
if let Some(body) = &release.body {
|
if let Some(body) = &release.body {
|
||||||
println!("{body}");
|
println!("{body}");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
println!("Topgrade is up-to-date");
|
println!("{}", t!("Topgrade is up-to-date"));
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
if result.updated() {
|
if result.updated() {
|
||||||
print_info("Respawning...");
|
print_info(t!("Respawning..."));
|
||||||
let mut command = Command::new(current_exe?);
|
let mut command = Command::new(current_exe?);
|
||||||
command.args(env::args().skip(1)).env("TOPGRADE_NO_SELF_UPGRADE", "");
|
command.args(env::args().skip(1)).env("TOPGRADE_NO_SELF_UPGRADE", "");
|
||||||
|
|
||||||
|
|||||||
@@ -6,11 +6,13 @@ use color_eyre::eyre::eyre;
|
|||||||
use color_eyre::eyre::Context;
|
use color_eyre::eyre::Context;
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
use tracing::{debug, error, warn};
|
use tracing::{debug, error, warn};
|
||||||
|
use wildmatch::WildMatch;
|
||||||
|
|
||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
use crate::error::{self, TopgradeError};
|
use crate::error::{self, TopgradeError};
|
||||||
use crate::terminal::print_separator;
|
use crate::terminal::print_separator;
|
||||||
use crate::{execution_context::ExecutionContext, utils::require};
|
use crate::{execution_context::ExecutionContext, utils::require};
|
||||||
|
use rust_i18n::t;
|
||||||
|
|
||||||
// A string found in the output of docker for containers that weren't found in
|
// A string found in the output of docker for containers that weren't found in
|
||||||
// the docker registry. We use this to gracefully handle and skip containers
|
// the docker registry. We use this to gracefully handle and skip containers
|
||||||
@@ -42,7 +44,15 @@ impl Container {
|
|||||||
impl Display for Container {
|
impl Display for Container {
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
// e.g., "`fedora:latest` for `linux/amd64`"
|
// e.g., "`fedora:latest` for `linux/amd64`"
|
||||||
write!(f, "`{}` for `{}`", self.repo_tag, self.platform)
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
t!(
|
||||||
|
"`{repo_tag}` for `{platform}`",
|
||||||
|
repo_tag = self.repo_tag,
|
||||||
|
platform = self.platform
|
||||||
|
)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -51,6 +61,13 @@ impl Display for Container {
|
|||||||
///
|
///
|
||||||
/// Containers specified in `ignored_containers` will be filtered out.
|
/// Containers specified in `ignored_containers` will be filtered out.
|
||||||
fn list_containers(crt: &Path, ignored_containers: Option<&Vec<String>>) -> Result<Vec<Container>> {
|
fn list_containers(crt: &Path, ignored_containers: Option<&Vec<String>>) -> Result<Vec<Container>> {
|
||||||
|
let ignored_containers = ignored_containers.map(|patterns| {
|
||||||
|
patterns
|
||||||
|
.iter()
|
||||||
|
.map(|pattern| WildMatch::new(pattern))
|
||||||
|
.collect::<Vec<WildMatch>>()
|
||||||
|
});
|
||||||
|
|
||||||
debug!(
|
debug!(
|
||||||
"Querying '{} image ls --format \"{{{{.Repository}}}}:{{{{.Tag}}}}/{{{{.ID}}}}\"' for containers",
|
"Querying '{} image ls --format \"{{{{.Repository}}}}:{{{{.Tag}}}}/{{{{.ID}}}}\"' for containers",
|
||||||
crt.display()
|
crt.display()
|
||||||
@@ -85,11 +102,8 @@ fn list_containers(crt: &Path, ignored_containers: Option<&Vec<String>>) -> Resu
|
|||||||
assert_eq!(split_res.len(), 2);
|
assert_eq!(split_res.len(), 2);
|
||||||
let (repo_tag, image_id) = (split_res[0], split_res[1]);
|
let (repo_tag, image_id) = (split_res[0], split_res[1]);
|
||||||
|
|
||||||
if let Some(ignored_containers) = ignored_containers {
|
if let Some(ref ignored_containers) = ignored_containers {
|
||||||
if ignored_containers
|
if ignored_containers.iter().any(|pattern| pattern.matches(repo_tag)) {
|
||||||
.iter()
|
|
||||||
.any(|ignored_container| repo_tag.eq(ignored_container))
|
|
||||||
{
|
|
||||||
debug!("Skipping ignored container '{}'", line);
|
debug!("Skipping ignored container '{}'", line);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -115,17 +129,18 @@ fn list_containers(crt: &Path, ignored_containers: Option<&Vec<String>>) -> Resu
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_containers(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_containers(ctx: &ExecutionContext) -> Result<()> {
|
||||||
// Prefer podman, fall back to docker if not present
|
// Check what runtime is specified in the config
|
||||||
let crt = require("podman").or_else(|_| require("docker"))?;
|
let container_runtime = ctx.config().containers_runtime().to_string();
|
||||||
|
let crt = require(container_runtime)?;
|
||||||
debug!("Using container runtime '{}'", crt.display());
|
debug!("Using container runtime '{}'", crt.display());
|
||||||
|
|
||||||
print_separator("Containers");
|
print_separator(t!("Containers"));
|
||||||
let mut success = true;
|
let mut success = true;
|
||||||
let containers =
|
let containers =
|
||||||
list_containers(&crt, ctx.config().containers_ignored_tags()).context("Failed to list Docker containers")?;
|
list_containers(&crt, ctx.config().containers_ignored_tags()).context("Failed to list Docker containers")?;
|
||||||
debug!("Containers to inspect: {:?}", containers);
|
debug!("Containers to inspect: {:?}", containers);
|
||||||
|
|
||||||
for container in containers.iter() {
|
for container in &containers {
|
||||||
debug!("Pulling container '{}'", container);
|
debug!("Pulling container '{}'", container);
|
||||||
let args = vec![
|
let args = vec![
|
||||||
"pull",
|
"pull",
|
||||||
|
|||||||
@@ -1,9 +1,4 @@
|
|||||||
(when (fboundp 'paradox-upgrade-packages)
|
(when (featurep 'package)
|
||||||
(progn
|
(if (fboundp 'package-upgrade-all)
|
||||||
(unless (boundp 'paradox-github-token)
|
(package-upgrade-all nil)
|
||||||
(setq paradox-github-token t))
|
(message "Your Emacs version doesn't support unattended packages upgrade")))
|
||||||
(paradox-upgrade-packages)
|
|
||||||
(princ
|
|
||||||
(if (get-buffer "*Paradox Report*")
|
|
||||||
(with-current-buffer "*Paradox Report*" (buffer-string))
|
|
||||||
"\nNothing to upgrade\n"))))
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ use std::path::{Path, PathBuf};
|
|||||||
|
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
use etcetera::base_strategy::BaseStrategy;
|
use etcetera::base_strategy::BaseStrategy;
|
||||||
|
use rust_i18n::t;
|
||||||
|
|
||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
@@ -74,7 +75,10 @@ impl Emacs {
|
|||||||
if let Some(doom) = &self.doom {
|
if let Some(doom) = &self.doom {
|
||||||
Emacs::update_doom(doom, ctx)?;
|
Emacs::update_doom(doom, ctx)?;
|
||||||
}
|
}
|
||||||
let init_file = require_option(self.directory.as_ref(), String::from("Emacs directory does not exist"))?
|
let init_file = require_option(
|
||||||
|
self.directory.as_ref(),
|
||||||
|
t!("Emacs directory does not exist").to_string(),
|
||||||
|
)?
|
||||||
.join("init.el")
|
.join("init.el")
|
||||||
.require()?;
|
.require()?;
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
#![allow(unused_imports)]
|
#![allow(unused_imports)]
|
||||||
|
|
||||||
|
use std::ffi::{OsStr, OsString};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use std::{env, path::Path};
|
use std::{env, path::Path};
|
||||||
@@ -8,6 +9,10 @@ use std::{fs, io::Write};
|
|||||||
use color_eyre::eyre::eyre;
|
use color_eyre::eyre::eyre;
|
||||||
use color_eyre::eyre::Context;
|
use color_eyre::eyre::Context;
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
|
use jetbrains_toolbox_updater::{find_jetbrains_toolbox, update_jetbrains_toolbox, FindError};
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use regex::bytes::Regex;
|
||||||
|
use rust_i18n::t;
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use tempfile::tempfile_in;
|
use tempfile::tempfile_in;
|
||||||
use tracing::{debug, error};
|
use tracing::{debug, error};
|
||||||
@@ -16,7 +21,7 @@ use crate::command::{CommandExt, Utf8Output};
|
|||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
use crate::executor::ExecutorOutput;
|
use crate::executor::ExecutorOutput;
|
||||||
use crate::terminal::{print_separator, shell};
|
use crate::terminal::{print_separator, shell};
|
||||||
use crate::utils::{self, check_is_python_2_or_shim, require, require_option, which, PathExt, REQUIRE_SUDO};
|
use crate::utils::{self, check_is_python_2_or_shim, get_require_sudo_string, require, require_option, which, PathExt};
|
||||||
use crate::Step;
|
use crate::Step;
|
||||||
use crate::HOME_DIR;
|
use crate::HOME_DIR;
|
||||||
use crate::{
|
use crate::{
|
||||||
@@ -38,8 +43,7 @@ pub fn is_wsl() -> Result<bool> {
|
|||||||
|
|
||||||
pub fn run_cargo_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_cargo_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let cargo_dir = env::var_os("CARGO_HOME")
|
let cargo_dir = env::var_os("CARGO_HOME")
|
||||||
.map(PathBuf::from)
|
.map_or_else(|| HOME_DIR.join(".cargo"), PathBuf::from)
|
||||||
.unwrap_or_else(|| HOME_DIR.join(".cargo"))
|
|
||||||
.require()?;
|
.require()?;
|
||||||
require("cargo").or_else(|_| {
|
require("cargo").or_else(|_| {
|
||||||
require_option(
|
require_option(
|
||||||
@@ -58,13 +62,11 @@ pub fn run_cargo_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let cargo_update = require("cargo-install-update")
|
let cargo_update = require("cargo-install-update")
|
||||||
.ok()
|
.ok()
|
||||||
.or_else(|| cargo_dir.join("bin/cargo-install-update").if_exists());
|
.or_else(|| cargo_dir.join("bin/cargo-install-update").if_exists());
|
||||||
let cargo_update = match cargo_update {
|
|
||||||
Some(e) => e,
|
let Some(cargo_update) = cargo_update else {
|
||||||
None => {
|
|
||||||
let message = String::from("cargo-update isn't installed so Topgrade can't upgrade cargo packages.\nInstall cargo-update by running `cargo install cargo-update`");
|
let message = String::from("cargo-update isn't installed so Topgrade can't upgrade cargo packages.\nInstall cargo-update by running `cargo install cargo-update`");
|
||||||
print_warning(&message);
|
print_warning(&message);
|
||||||
return Err(SkipStep(message).into());
|
return Err(SkipStep(message).into());
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
@@ -76,16 +78,13 @@ pub fn run_cargo_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let cargo_cache = require("cargo-cache")
|
let cargo_cache = require("cargo-cache")
|
||||||
.ok()
|
.ok()
|
||||||
.or_else(|| cargo_dir.join("bin/cargo-cache").if_exists());
|
.or_else(|| cargo_dir.join("bin/cargo-cache").if_exists());
|
||||||
match cargo_cache {
|
if let Some(e) = cargo_cache {
|
||||||
Some(e) => {
|
|
||||||
ctx.run_type().execute(e).args(["-a"]).status_checked()?;
|
ctx.run_type().execute(e).args(["-a"]).status_checked()?;
|
||||||
}
|
} else {
|
||||||
None => {
|
|
||||||
let message = String::from("cargo-cache isn't installed so Topgrade can't cleanup cargo packages.\nInstall cargo-cache by running `cargo install cargo-cache`");
|
let message = String::from("cargo-cache isn't installed so Topgrade can't cleanup cargo packages.\nInstall cargo-cache by running `cargo install cargo-cache`");
|
||||||
print_warning(message);
|
print_warning(message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -120,13 +119,17 @@ pub fn run_rubygems(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
print_separator("RubyGems");
|
print_separator("RubyGems");
|
||||||
let gem_path_str = gem.as_os_str();
|
let gem_path_str = gem.as_os_str();
|
||||||
if gem_path_str.to_str().unwrap().contains("asdf") {
|
if gem_path_str.to_str().unwrap().contains("asdf")
|
||||||
|
|| gem_path_str.to_str().unwrap().contains("mise")
|
||||||
|
|| gem_path_str.to_str().unwrap().contains(".rbenv")
|
||||||
|
|| gem_path_str.to_str().unwrap().contains(".rvm")
|
||||||
|
{
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(gem)
|
.execute(gem)
|
||||||
.args(["update", "--system"])
|
.args(["update", "--system"])
|
||||||
.status_checked()?;
|
.status_checked()?;
|
||||||
} else {
|
} else {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
if !Path::new("/usr/lib/ruby/vendor_ruby/rubygems/defaults/operating_system.rb").exists() {
|
if !Path::new("/usr/lib/ruby/vendor_ruby/rubygems/defaults/operating_system.rb").exists() {
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(sudo)
|
.execute(sudo)
|
||||||
@@ -155,7 +158,7 @@ pub fn run_haxelib_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let mut command = if directory_writable {
|
let mut command = if directory_writable {
|
||||||
ctx.run_type().execute(&haxelib)
|
ctx.run_type().execute(&haxelib)
|
||||||
} else {
|
} else {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let mut c = ctx.run_type().execute(sudo);
|
let mut c = ctx.run_type().execute(sudo);
|
||||||
c.arg(&haxelib);
|
c.arg(&haxelib);
|
||||||
c
|
c
|
||||||
@@ -220,6 +223,27 @@ pub fn run_apm(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
.status_checked()
|
.status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn run_aqua(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let aqua = require("aqua")?;
|
||||||
|
|
||||||
|
// Check if `aqua --help` mentions "aqua". JetBrains aqua does not, aqua CLI does.
|
||||||
|
let output = ctx.run_type().execute(&aqua).arg("--help").output_checked()?;
|
||||||
|
|
||||||
|
if !String::from_utf8(output.stdout)?.contains("aqua") {
|
||||||
|
return Err(SkipStep("Command aqua probably points to JetBrains Aqua".to_string()).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
print_separator("Aqua");
|
||||||
|
if ctx.run_type().dry() {
|
||||||
|
println!("{}", t!("Updating aqua ..."));
|
||||||
|
println!("{}", t!("Updating aqua installed cli tools ..."));
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
ctx.run_type().execute(&aqua).arg("update-aqua").status_checked()?;
|
||||||
|
ctx.run_type().execute(&aqua).arg("update").status_checked()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn run_rustup(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_rustup(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let rustup = require("rustup")?;
|
let rustup = require("rustup")?;
|
||||||
|
|
||||||
@@ -227,6 +251,49 @@ pub fn run_rustup(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
ctx.run_type().execute(rustup).arg("update").status_checked()
|
ctx.run_type().execute(rustup).arg("update").status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn run_rye(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let rye = require("rye")?;
|
||||||
|
|
||||||
|
print_separator("Rye");
|
||||||
|
ctx.run_type().execute(rye).args(["self", "update"]).status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_elan(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let elan = require("elan")?;
|
||||||
|
|
||||||
|
print_separator("elan");
|
||||||
|
|
||||||
|
let disabled_error_msg = "self-update is disabled";
|
||||||
|
let executor_output = ctx.run_type().execute(&elan).args(["self", "update"]).output()?;
|
||||||
|
match executor_output {
|
||||||
|
ExecutorOutput::Wet(command_output) => {
|
||||||
|
if command_output.status.success() {
|
||||||
|
// Flush the captured output
|
||||||
|
std::io::stdout().lock().write_all(&command_output.stdout).unwrap();
|
||||||
|
std::io::stderr().lock().write_all(&command_output.stderr).unwrap();
|
||||||
|
} else {
|
||||||
|
let stderr_as_str = std::str::from_utf8(&command_output.stderr).unwrap();
|
||||||
|
if stderr_as_str.contains(disabled_error_msg) {
|
||||||
|
// `elan` is externally managed, we cannot do the update. Users
|
||||||
|
// won't see any error message because Topgrade captures them
|
||||||
|
// all.
|
||||||
|
} else {
|
||||||
|
// `elan` is NOT externally managed, `elan self update` can
|
||||||
|
// be performed, but the invocation failed, so we report the
|
||||||
|
// error to the user and error out.
|
||||||
|
std::io::stdout().lock().write_all(&command_output.stdout).unwrap();
|
||||||
|
std::io::stderr().lock().write_all(&command_output.stderr).unwrap();
|
||||||
|
|
||||||
|
return Err(StepFailed.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ExecutorOutput::Dry => { /* nothing needed because in a dry run */ }
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.run_type().execute(&elan).arg("update").status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn run_juliaup(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_juliaup(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let juliaup = require("juliaup")?;
|
let juliaup = require("juliaup")?;
|
||||||
|
|
||||||
@@ -239,7 +306,13 @@ pub fn run_juliaup(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
.status_checked()?;
|
.status_checked()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.run_type().execute(&juliaup).arg("update").status_checked()
|
ctx.run_type().execute(&juliaup).arg("update").status_checked()?;
|
||||||
|
|
||||||
|
if ctx.config().cleanup() {
|
||||||
|
ctx.run_type().execute(&juliaup).arg("gc").status_checked()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_choosenim(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_choosenim(ctx: &ExecutionContext) -> Result<()> {
|
||||||
@@ -328,7 +401,7 @@ pub fn run_vcpkg_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let mut command = if is_root_install {
|
let mut command = if is_root_install {
|
||||||
ctx.run_type().execute(&vcpkg)
|
ctx.run_type().execute(&vcpkg)
|
||||||
} else {
|
} else {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let mut c = ctx.run_type().execute(sudo);
|
let mut c = ctx.run_type().execute(sudo);
|
||||||
c.arg(&vcpkg);
|
c.arg(&vcpkg);
|
||||||
c
|
c
|
||||||
@@ -337,6 +410,48 @@ pub fn run_vcpkg_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
command.args(["upgrade", "--no-dry-run"]).status_checked()
|
command.args(["upgrade", "--no-dry-run"]).status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Make VSCodium a separate step because:
|
||||||
|
///
|
||||||
|
/// 1. Users could use both VSCode and VSCodium
|
||||||
|
/// 2. Just in case, VSCodium could have incompatible changes with VSCode
|
||||||
|
pub fn run_vscodium_extensions_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
// Calling vscodoe in WSL may install a server instead of updating extensions (https://github.com/topgrade-rs/topgrade/issues/594#issuecomment-1782157367)
|
||||||
|
if is_wsl()? {
|
||||||
|
return Err(SkipStep(String::from("Should not run in WSL")).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
let vscodium = require("codium")?;
|
||||||
|
|
||||||
|
// VSCode has update command only since 1.86 version ("january 2024" update), disable the update for prior versions
|
||||||
|
// Use command `code --version` which returns 3 lines: version, git commit, instruction set. We parse only the first one
|
||||||
|
//
|
||||||
|
// This should apply to VSCodium as well.
|
||||||
|
let version: Result<Version> = match Command::new(&vscodium)
|
||||||
|
.arg("--version")
|
||||||
|
.output_checked_utf8()?
|
||||||
|
.stdout
|
||||||
|
.lines()
|
||||||
|
.next()
|
||||||
|
{
|
||||||
|
Some(item) => Version::parse(item).map_err(std::convert::Into::into),
|
||||||
|
_ => return Err(SkipStep(String::from("Cannot find vscodium version")).into()),
|
||||||
|
};
|
||||||
|
|
||||||
|
if !matches!(version, Ok(version) if version >= Version::new(1, 86, 0)) {
|
||||||
|
return Err(SkipStep(String::from(
|
||||||
|
"Too old vscodium version to have update extensions command",
|
||||||
|
))
|
||||||
|
.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
print_separator("VSCodium extensions");
|
||||||
|
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(vscodium)
|
||||||
|
.arg("--update-extensions")
|
||||||
|
.status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn run_vscode_extensions_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_vscode_extensions_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
// Calling vscode in WSL may install a server instead of updating extensions (https://github.com/topgrade-rs/topgrade/issues/594#issuecomment-1782157367)
|
// Calling vscode in WSL may install a server instead of updating extensions (https://github.com/topgrade-rs/topgrade/issues/594#issuecomment-1782157367)
|
||||||
if is_wsl()? {
|
if is_wsl()? {
|
||||||
@@ -347,14 +462,14 @@ pub fn run_vscode_extensions_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
// Vscode has update command only since 1.86 version ("january 2024" update), disable the update for prior versions
|
// Vscode has update command only since 1.86 version ("january 2024" update), disable the update for prior versions
|
||||||
// Use command `code --version` which returns 3 lines: version, git commit, instruction set. We parse only the first one
|
// Use command `code --version` which returns 3 lines: version, git commit, instruction set. We parse only the first one
|
||||||
let version: Result<Version> = match Command::new("code")
|
let version: Result<Version> = match Command::new(&vscode)
|
||||||
.arg("--version")
|
.arg("--version")
|
||||||
.output_checked_utf8()?
|
.output_checked_utf8()?
|
||||||
.stdout
|
.stdout
|
||||||
.lines()
|
.lines()
|
||||||
.next()
|
.next()
|
||||||
{
|
{
|
||||||
Some(item) => Version::parse(item).map_err(|err| err.into()),
|
Some(item) => Version::parse(item).map_err(std::convert::Into::into),
|
||||||
_ => return Err(SkipStep(String::from("Cannot find vscode version")).into()),
|
_ => return Err(SkipStep(String::from("Cannot find vscode version")).into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -364,36 +479,55 @@ pub fn run_vscode_extensions_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
print_separator("Visual Studio Code extensions");
|
print_separator("Visual Studio Code extensions");
|
||||||
|
|
||||||
|
if let Some(profile) = ctx.config().vscode_profile() {
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(vscode)
|
||||||
|
.arg("--profile")
|
||||||
|
.arg(profile)
|
||||||
|
.arg("--update-extensions")
|
||||||
|
.status_checked()
|
||||||
|
} else {
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(vscode)
|
.execute(vscode)
|
||||||
.arg("--update-extensions")
|
.arg("--update-extensions")
|
||||||
.status_checked()
|
.status_checked()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_pipx_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_pipx_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let pipx = require("pipx")?;
|
let pipx = require("pipx")?;
|
||||||
print_separator("pipx");
|
print_separator("pipx");
|
||||||
|
|
||||||
let mut command_args = vec!["upgrade-all"];
|
let mut command_args = vec!["upgrade-all", "--include-injected"];
|
||||||
|
|
||||||
// pipx version 1.4.0 introduced a new command argument `pipx upgrade-all --quiet`
|
// pipx version 1.4.0 introduced a new command argument `pipx upgrade-all --quiet`
|
||||||
// (see https://pipx.pypa.io/stable/docs/#pipx-upgrade-all)
|
// (see https://pipx.pypa.io/stable/docs/#pipx-upgrade-all)
|
||||||
let version_str = Command::new("pipx")
|
let version_str = Command::new(&pipx)
|
||||||
.args(["--version"])
|
.args(["--version"])
|
||||||
.output_checked_utf8()
|
.output_checked_utf8()
|
||||||
.map(|s| s.stdout.trim().to_owned());
|
.map(|s| s.stdout.trim().to_owned());
|
||||||
let version = Version::parse(&version_str?);
|
let version = Version::parse(&version_str?);
|
||||||
if matches!(version, Ok(version) if version >= Version::new(1, 4, 0)) {
|
if matches!(version, Ok(version) if version >= Version::new(1, 4, 0)) {
|
||||||
command_args.push("--quiet")
|
command_args.push("--quiet");
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.run_type().execute(pipx).args(command_args).status_checked()
|
ctx.run_type().execute(pipx).args(command_args).status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn run_pipxu_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let pipxu = require("pipxu")?;
|
||||||
|
print_separator("pipxu");
|
||||||
|
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(pipxu)
|
||||||
|
.args(["upgrade", "--all"])
|
||||||
|
.status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn run_conda_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_conda_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let conda = require("conda")?;
|
let conda = require("conda")?;
|
||||||
|
|
||||||
let output = Command::new("conda")
|
let output = Command::new(&conda)
|
||||||
.args(["config", "--show", "auto_activate_base"])
|
.args(["config", "--show", "auto_activate_base"])
|
||||||
.output_checked_utf8()?;
|
.output_checked_utf8()?;
|
||||||
debug!("Conda output: {}", output.stdout);
|
debug!("Conda output: {}", output.stdout);
|
||||||
@@ -403,33 +537,69 @@ pub fn run_conda_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
print_separator("Conda");
|
print_separator("Conda");
|
||||||
|
|
||||||
let mut command = ctx.run_type().execute(conda);
|
let mut command = ctx.run_type().execute(&conda);
|
||||||
command.args(["update", "--all", "-n", "base"]);
|
command.args(["update", "--all", "-n", "base"]);
|
||||||
if ctx.config().yes(Step::Conda) {
|
if ctx.config().yes(Step::Conda) {
|
||||||
command.arg("--yes");
|
command.arg("--yes");
|
||||||
}
|
}
|
||||||
command.status_checked()
|
command.status_checked()?;
|
||||||
|
|
||||||
|
if ctx.config().cleanup() {
|
||||||
|
let mut command = ctx.run_type().execute(conda);
|
||||||
|
command.args(["clean", "--all"]);
|
||||||
|
if ctx.config().yes(Step::Conda) {
|
||||||
|
command.arg("--yes");
|
||||||
|
}
|
||||||
|
command.status_checked()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_pixi_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let pixi = require("pixi")?;
|
||||||
|
print_separator("Pixi");
|
||||||
|
|
||||||
|
// Check if `pixi --help` mentions self-update, if yes, self-update must be enabled.
|
||||||
|
// pixi self-update --help works regardless of whether the feature is enabled.
|
||||||
|
let output = ctx.run_type().execute(&pixi).arg("--help").output_checked()?;
|
||||||
|
|
||||||
|
if String::from_utf8(output.stdout)?.contains("self-update") {
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(&pixi)
|
||||||
|
.args(["self-update"])
|
||||||
|
.status_checked()
|
||||||
|
.ok();
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(&pixi)
|
||||||
|
.args(["global", "update"])
|
||||||
|
.status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_mamba_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_mamba_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let mamba = require("mamba")?;
|
let mamba = require("mamba")?;
|
||||||
|
|
||||||
let output = Command::new("mamba")
|
|
||||||
.args(["config", "--show", "auto_activate_base"])
|
|
||||||
.output_checked_utf8()?;
|
|
||||||
debug!("Mamba output: {}", output.stdout);
|
|
||||||
if output.stdout.contains("False") {
|
|
||||||
return Err(SkipStep("auto_activate_base is set to False".to_string()).into());
|
|
||||||
}
|
|
||||||
|
|
||||||
print_separator("Mamba");
|
print_separator("Mamba");
|
||||||
|
|
||||||
let mut command = ctx.run_type().execute(mamba);
|
let mut command = ctx.run_type().execute(&mamba);
|
||||||
command.args(["update", "--all", "-n", "base"]);
|
command.args(["update", "--all", "-n", "base"]);
|
||||||
if ctx.config().yes(Step::Mamba) {
|
if ctx.config().yes(Step::Mamba) {
|
||||||
command.arg("--yes");
|
command.arg("--yes");
|
||||||
}
|
}
|
||||||
command.status_checked()
|
command.status_checked()?;
|
||||||
|
|
||||||
|
if ctx.config().cleanup() {
|
||||||
|
let mut command = ctx.run_type().execute(&mamba);
|
||||||
|
command.args(["clean", "--all"]);
|
||||||
|
if ctx.config().yes(Step::Mamba) {
|
||||||
|
command.arg("--yes");
|
||||||
|
}
|
||||||
|
command.status_checked()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_miktex_packages_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_miktex_packages_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
@@ -451,7 +621,7 @@ pub fn run_pip3_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
(Ok(py), _) => py,
|
(Ok(py), _) => py,
|
||||||
(Err(_), Ok(py3)) => py3,
|
(Err(_), Ok(py3)) => py3,
|
||||||
(Err(py_err), Err(py3_err)) => {
|
(Err(py_err), Err(py3_err)) => {
|
||||||
return Err(SkipStep(format!("Skip due to following reasons: {} {}", py_err, py3_err)).into());
|
return Err(SkipStep(format!("Skip due to following reasons: {py_err} {py3_err}")).into());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -510,7 +680,7 @@ pub fn run_pip3_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
print_separator("pip3");
|
print_separator("pip3");
|
||||||
if env::var("VIRTUAL_ENV").is_ok() {
|
if env::var("VIRTUAL_ENV").is_ok() {
|
||||||
print_warning("This step is will be skipped when running inside a virtual environment");
|
print_warning("This step is skipped when running inside a virtual environment");
|
||||||
return Err(SkipStep("Does not run inside a virtual environment".to_string()).into());
|
return Err(SkipStep("Does not run inside a virtual environment".to_string()).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -538,6 +708,7 @@ pub fn run_pip_review_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_pip_review_local_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_pip_review_local_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let pip_review = require("pip-review")?;
|
let pip_review = require("pip-review")?;
|
||||||
|
|
||||||
@@ -557,6 +728,7 @@ pub fn run_pip_review_local_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_pipupgrade_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_pipupgrade_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let pipupgrade = require("pipupgrade")?;
|
let pipupgrade = require("pipupgrade")?;
|
||||||
|
|
||||||
@@ -574,6 +746,7 @@ pub fn run_pipupgrade_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_stack_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_stack_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
if require("ghcup").is_ok() {
|
if require("ghcup").is_ok() {
|
||||||
// `ghcup` is present and probably(?) being used to install `stack`.
|
// `ghcup` is present and probably(?) being used to install `stack`.
|
||||||
@@ -627,7 +800,7 @@ pub fn run_tlmgr_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let mut command = if directory_writable {
|
let mut command = if directory_writable {
|
||||||
ctx.run_type().execute(&tlmgr)
|
ctx.run_type().execute(&tlmgr)
|
||||||
} else {
|
} else {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let mut c = ctx.run_type().execute(sudo);
|
let mut c = ctx.run_type().execute(sudo);
|
||||||
c.arg(&tlmgr);
|
c.arg(&tlmgr);
|
||||||
c
|
c
|
||||||
@@ -684,19 +857,22 @@ pub fn run_composer_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let composer_home = Command::new(&composer)
|
let composer_home = Command::new(&composer)
|
||||||
.args(["global", "config", "--absolute", "--quiet", "home"])
|
.args(["global", "config", "--absolute", "--quiet", "home"])
|
||||||
.output_checked_utf8()
|
.output_checked_utf8()
|
||||||
.map_err(|e| (SkipStep(format!("Error getting the composer directory: {e}"))))
|
.map_err(|e| (SkipStep(t!("Error getting the composer directory: {error}", error = e).to_string())))
|
||||||
.map(|s| PathBuf::from(s.stdout.trim()))?
|
.map(|s| PathBuf::from(s.stdout.trim()))?
|
||||||
.require()?;
|
.require()?;
|
||||||
|
|
||||||
if !composer_home.is_descendant_of(&HOME_DIR) {
|
if !composer_home.is_descendant_of(&HOME_DIR) {
|
||||||
return Err(SkipStep(format!(
|
return Err(SkipStep(
|
||||||
"Composer directory {} isn't a decandent of the user's home directory",
|
t!(
|
||||||
composer_home.display()
|
"Composer directory {composer_home} isn't a descendant of the user's home directory",
|
||||||
))
|
composer_home = composer_home.display()
|
||||||
|
)
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
.into());
|
.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
print_separator("Composer");
|
print_separator(t!("Composer"));
|
||||||
|
|
||||||
if ctx.config().composer_self_update() {
|
if ctx.config().composer_self_update() {
|
||||||
cfg_if::cfg_if! {
|
cfg_if::cfg_if! {
|
||||||
@@ -708,7 +884,7 @@ pub fn run_composer_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if has_update {
|
if has_update {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(sudo)
|
.execute(sudo)
|
||||||
.arg(&composer)
|
.arg(&composer)
|
||||||
@@ -752,13 +928,15 @@ pub fn run_dotnet_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
{
|
{
|
||||||
Ok(output) => output,
|
Ok(output) => output,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
return Err(SkipStep(String::from(
|
return Err(SkipStep(
|
||||||
"Error running `dotnet tool list`. This is expected when a dotnet runtime is installed but no SDK.",
|
t!("Error running `dotnet tool list`. This is expected when a dotnet runtime is installed but no SDK.")
|
||||||
))
|
.to_string(),
|
||||||
.into())
|
)
|
||||||
|
.into());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mut in_header = true;
|
||||||
let mut packages = output
|
let mut packages = output
|
||||||
.stdout
|
.stdout
|
||||||
.lines()
|
.lines()
|
||||||
@@ -766,16 +944,22 @@ pub fn run_dotnet_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
//
|
//
|
||||||
// Package Id Version Commands
|
// Package Id Version Commands
|
||||||
// -------------------------------------
|
// -------------------------------------
|
||||||
//
|
.skip_while(|line| {
|
||||||
// One thing to note is that .NET SDK respect locale, which means this
|
// The .NET SDK respects locale, so the header can be printed
|
||||||
// header can be printed in languages other than English, do NOT use it
|
// in languages other than English. The separator should hopefully
|
||||||
// to do any check.
|
// always be at least 10 -'s long.
|
||||||
.skip(2)
|
if in_header && line.starts_with("----------") {
|
||||||
|
in_header = false;
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
in_header
|
||||||
|
}
|
||||||
|
})
|
||||||
.filter(|line| !line.is_empty())
|
.filter(|line| !line.is_empty())
|
||||||
.peekable();
|
.peekable();
|
||||||
|
|
||||||
if packages.peek().is_none() {
|
if packages.peek().is_none() {
|
||||||
return Err(SkipStep(String::from("No dotnet global tools installed")).into());
|
return Err(SkipStep(t!("No dotnet global tools installed").to_string()).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
print_separator(".NET");
|
print_separator(".NET");
|
||||||
@@ -786,27 +970,26 @@ pub fn run_dotnet_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
.execute(&dotnet)
|
.execute(&dotnet)
|
||||||
.args(["tool", "update", package_name, "--global"])
|
.args(["tool", "update", package_name, "--global"])
|
||||||
.status_checked()
|
.status_checked()
|
||||||
.with_context(|| format!("Failed to update .NET package {package_name}"))?;
|
.with_context(|| format!("Failed to update .NET package {package_name:?}"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_helix_grammars(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_helix_grammars(ctx: &ExecutionContext) -> Result<()> {
|
||||||
require("helix")?;
|
let helix = require("helix").or(require("hx"))?;
|
||||||
|
|
||||||
print_separator("Helix");
|
print_separator("Helix");
|
||||||
|
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(sudo)
|
.execute(&helix)
|
||||||
.args(["helix", "--grammar", "fetch"])
|
.args(["--grammar", "fetch"])
|
||||||
.status_checked()
|
.status_checked()
|
||||||
.with_context(|| "Failed to download helix grammars!")?;
|
.with_context(|| "Failed to download helix grammars!")?;
|
||||||
|
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(sudo)
|
.execute(&helix)
|
||||||
.args(["helix", "--grammar", "build"])
|
.args(["--grammar", "build"])
|
||||||
.status_checked()
|
.status_checked()
|
||||||
.with_context(|| "Failed to build helix grammars!")?;
|
.with_context(|| "Failed to build helix grammars!")?;
|
||||||
|
|
||||||
@@ -816,7 +999,7 @@ pub fn run_helix_grammars(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
pub fn run_raco_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_raco_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let raco = require("raco")?;
|
let raco = require("raco")?;
|
||||||
|
|
||||||
print_separator("Racket Package Manager");
|
print_separator(t!("Racket Package Manager"));
|
||||||
|
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(raco)
|
.execute(raco)
|
||||||
@@ -844,10 +1027,10 @@ pub fn run_ghcli_extensions_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let result = Command::new(&gh).args(["extensions", "list"]).output_checked_utf8();
|
let result = Command::new(&gh).args(["extensions", "list"]).output_checked_utf8();
|
||||||
if result.is_err() {
|
if result.is_err() {
|
||||||
debug!("GH result {:?}", result);
|
debug!("GH result {:?}", result);
|
||||||
return Err(SkipStep(String::from("GH failed")).into());
|
return Err(SkipStep(t!("GH failed").to_string()).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
print_separator("GitHub CLI Extensions");
|
print_separator(t!("GitHub CLI Extensions"));
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(&gh)
|
.execute(&gh)
|
||||||
.args(["extension", "upgrade", "--all"])
|
.args(["extension", "upgrade", "--all"])
|
||||||
@@ -857,12 +1040,17 @@ pub fn run_ghcli_extensions_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
pub fn update_julia_packages(ctx: &ExecutionContext) -> Result<()> {
|
pub fn update_julia_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let julia = require("julia")?;
|
let julia = require("julia")?;
|
||||||
|
|
||||||
print_separator("Julia Packages");
|
print_separator(t!("Julia Packages"));
|
||||||
|
|
||||||
ctx.run_type()
|
let mut executor = ctx.run_type().execute(julia);
|
||||||
.execute(julia)
|
|
||||||
.args(["-e", "using Pkg; Pkg.update()"])
|
executor.arg(if ctx.config().julia_use_startup_file() {
|
||||||
.status_checked()
|
"--startup-file=yes"
|
||||||
|
} else {
|
||||||
|
"--startup-file=no"
|
||||||
|
});
|
||||||
|
|
||||||
|
executor.args(["-e", "using Pkg; Pkg.update()"]).status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_helm_repo_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_helm_repo_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
@@ -874,7 +1062,7 @@ pub fn run_helm_repo_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let mut success = true;
|
let mut success = true;
|
||||||
let mut exec = ctx.run_type().execute(helm);
|
let mut exec = ctx.run_type().execute(helm);
|
||||||
if let Err(e) = exec.arg("repo").arg("update").status_checked() {
|
if let Err(e) = exec.arg("repo").arg("update").status_checked() {
|
||||||
error!("Updating repositories failed: {}", e);
|
error!("Updating repositories failed: {e}");
|
||||||
success = match exec.output_checked_utf8() {
|
success = match exec.output_checked_utf8() {
|
||||||
Ok(s) => s.stdout.contains(no_repo) || s.stderr.contains(no_repo),
|
Ok(s) => s.stdout.contains(no_repo) || s.stderr.contains(no_repo),
|
||||||
Err(e) => match e.downcast_ref::<TopgradeError>() {
|
Err(e) => match e.downcast_ref::<TopgradeError>() {
|
||||||
@@ -905,3 +1093,307 @@ pub fn run_bob(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
ctx.run_type().execute(bob).args(["update", "--all"]).status_checked()
|
ctx.run_type().execute(bob).args(["update", "--all"]).status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn run_certbot(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
let certbot = require("certbot")?;
|
||||||
|
|
||||||
|
print_separator("Certbot");
|
||||||
|
|
||||||
|
let mut cmd = ctx.run_type().execute(sudo);
|
||||||
|
cmd.arg(certbot);
|
||||||
|
cmd.arg("renew");
|
||||||
|
|
||||||
|
cmd.status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run `$ freshclam` to update ClamAV signature database
|
||||||
|
///
|
||||||
|
/// doc: https://docs.clamav.net/manual/Usage/SignatureManagement.html#freshclam
|
||||||
|
pub fn run_freshclam(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let freshclam = require("freshclam")?;
|
||||||
|
print_separator(t!("Update ClamAV Database(FreshClam)"));
|
||||||
|
ctx.run_type().execute(freshclam).status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Involve `pio upgrade` to update PlatformIO core.
|
||||||
|
pub fn run_platform_io(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
// We use the full path because by default the binary is not in `PATH`:
|
||||||
|
// https://github.com/topgrade-rs/topgrade/issues/754#issuecomment-2020537559
|
||||||
|
#[cfg(unix)]
|
||||||
|
fn bin_path() -> PathBuf {
|
||||||
|
HOME_DIR.join(".platformio/penv/bin/pio")
|
||||||
|
}
|
||||||
|
#[cfg(windows)]
|
||||||
|
fn bin_path() -> PathBuf {
|
||||||
|
HOME_DIR.join(".platformio/penv/Scripts/pio.exe")
|
||||||
|
}
|
||||||
|
|
||||||
|
let bin_path = require(bin_path())?;
|
||||||
|
|
||||||
|
print_separator("PlatformIO Core");
|
||||||
|
|
||||||
|
ctx.run_type().execute(bin_path).arg("upgrade").status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run `lensfun-update-data` to update lensfun database.
|
||||||
|
///
|
||||||
|
/// `sudo` will be used if `use_sudo` configuration entry is set to true.
|
||||||
|
pub fn run_lensfun_update_data(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
const SEPARATOR: &str = "Lensfun's database update";
|
||||||
|
let lensfun_update_data = require("lensfun-update-data")?;
|
||||||
|
const EXIT_CODE_WHEN_NO_UPDATE: i32 = 1;
|
||||||
|
|
||||||
|
if ctx.config().lensfun_use_sudo() {
|
||||||
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
print_separator(SEPARATOR);
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(sudo)
|
||||||
|
.arg(lensfun_update_data)
|
||||||
|
// `lensfun-update-data` returns 1 when there is no update available
|
||||||
|
// which should be considered success
|
||||||
|
.status_checked_with_codes(&[EXIT_CODE_WHEN_NO_UPDATE])
|
||||||
|
} else {
|
||||||
|
print_separator(SEPARATOR);
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(lensfun_update_data)
|
||||||
|
.status_checked_with_codes(&[EXIT_CODE_WHEN_NO_UPDATE])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_poetry(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let poetry = require("poetry")?;
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
fn get_interpreter(poetry: &PathBuf) -> Result<(PathBuf, Option<OsString>)> {
|
||||||
|
// Parse the standard Unix shebang line: #!interpreter [optional-arg]
|
||||||
|
// Spaces and tabs on either side of interpreter are ignored.
|
||||||
|
|
||||||
|
use std::os::unix::ffi::OsStrExt;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref SHEBANG_REGEX: Regex = Regex::new(r"^#![ \t]*([^ \t\n]+)(?:[ \t]+([^\n]+)?)?").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let script = fs::read(poetry)?;
|
||||||
|
if let Some(c) = SHEBANG_REGEX.captures(&script) {
|
||||||
|
let interpreter = OsStr::from_bytes(&c[1]).into();
|
||||||
|
let args = c.get(2).map(|args| OsStr::from_bytes(args.as_bytes()).into());
|
||||||
|
return Ok((interpreter, args));
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(eyre!("Could not find shebang"))
|
||||||
|
}
|
||||||
|
#[cfg(windows)]
|
||||||
|
fn get_interpreter(poetry: &PathBuf) -> Result<(PathBuf, Option<OsString>)> {
|
||||||
|
// Parse the shebang line from scripts using https://bitbucket.org/vinay.sajip/simple_launcher,
|
||||||
|
// such as those created by pip. In contrast to Unix shebang lines, interpreter paths can
|
||||||
|
// contain spaces, if they are double-quoted.
|
||||||
|
|
||||||
|
use std::str;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref SHEBANG_REGEX: Regex =
|
||||||
|
Regex::new(r#"^#![ \t]*(?:"([^"\n]+)"|([^" \t\n]+))(?:[ \t]+([^\n]+)?)?"#).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let data = fs::read(poetry)?;
|
||||||
|
|
||||||
|
let pos = match data.windows(4).rposition(|b| b == b"PK\x05\x06") {
|
||||||
|
Some(i) => i,
|
||||||
|
None => return Err(eyre!("Not a ZIP archive")),
|
||||||
|
};
|
||||||
|
|
||||||
|
let cdr_size = match data.get(pos + 12..pos + 16) {
|
||||||
|
Some(b) => u32::from_le_bytes(b.try_into().unwrap()) as usize,
|
||||||
|
None => return Err(eyre!("Invalid CDR size")),
|
||||||
|
};
|
||||||
|
let cdr_offset = match data.get(pos + 16..pos + 20) {
|
||||||
|
Some(b) => u32::from_le_bytes(b.try_into().unwrap()) as usize,
|
||||||
|
None => return Err(eyre!("Invalid CDR offset")),
|
||||||
|
};
|
||||||
|
if pos < cdr_size + cdr_offset {
|
||||||
|
return Err(eyre!("Invalid ZIP archive"));
|
||||||
|
}
|
||||||
|
let arc_pos = pos - cdr_size - cdr_offset;
|
||||||
|
match data[..arc_pos].windows(2).rposition(|b| b == b"#!") {
|
||||||
|
Some(l) => {
|
||||||
|
let line = &data[l..arc_pos - 1];
|
||||||
|
if let Some(c) = SHEBANG_REGEX.captures(line) {
|
||||||
|
let interpreter = c.get(1).or_else(|| c.get(2)).unwrap();
|
||||||
|
// shebang line should be valid utf8
|
||||||
|
let interpreter = str::from_utf8(interpreter.as_bytes())?.into();
|
||||||
|
let args = match c.get(3) {
|
||||||
|
Some(args) => Some(str::from_utf8(args.as_bytes())?.into()),
|
||||||
|
None => None,
|
||||||
|
};
|
||||||
|
Ok((interpreter, args))
|
||||||
|
} else {
|
||||||
|
Err(eyre!("Invalid shebang line"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => Err(eyre!("Could not find shebang")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.config().poetry_force_self_update() {
|
||||||
|
debug!("forcing poetry self update");
|
||||||
|
} else {
|
||||||
|
let (interp, interp_args) = get_interpreter(&poetry)
|
||||||
|
.map_err(|e| SkipStep(format!("Could not find interpreter for {}: {}", poetry.display(), e)))?;
|
||||||
|
debug!("poetry interpreter: {:?}, args: {:?}", interp, interp_args);
|
||||||
|
|
||||||
|
let check_official_install_script =
|
||||||
|
"import sys; from os import path; print('Y') if path.isfile(path.join(sys.prefix, 'poetry_env')) else print('N')";
|
||||||
|
let mut command = Command::new(&interp);
|
||||||
|
if let Some(args) = interp_args {
|
||||||
|
command.arg(args);
|
||||||
|
}
|
||||||
|
let output = command
|
||||||
|
.args(["-c", check_official_install_script])
|
||||||
|
.output_checked_utf8()?;
|
||||||
|
let stdout = output.stdout.trim();
|
||||||
|
let official_install = match stdout {
|
||||||
|
"N" => false,
|
||||||
|
"Y" => true,
|
||||||
|
_ => unreachable!("unexpected output from `check_official_install_script`"),
|
||||||
|
};
|
||||||
|
|
||||||
|
debug!("poetry is official install: {}", official_install);
|
||||||
|
|
||||||
|
if !official_install {
|
||||||
|
return Err(SkipStep("Not installed with the official script".to_string()).into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
print_separator("Poetry");
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(&poetry)
|
||||||
|
.args(["self", "update"])
|
||||||
|
.status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_uv(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let uv_exec = require("uv")?;
|
||||||
|
print_separator("uv");
|
||||||
|
|
||||||
|
// try uv self --help first - if it succeeds, we call uv self update
|
||||||
|
let result = ctx
|
||||||
|
.run_type()
|
||||||
|
.execute(&uv_exec)
|
||||||
|
.args(["self", "--help"])
|
||||||
|
.output_checked();
|
||||||
|
|
||||||
|
if result.is_ok() {
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(&uv_exec)
|
||||||
|
.args(["self", "update"])
|
||||||
|
.status_checked()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(&uv_exec)
|
||||||
|
.args(["tool", "upgrade", "--all"])
|
||||||
|
.status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Involve `zvm upgrade` to update ZVM
|
||||||
|
pub fn run_zvm(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let zvm = require("zvm")?;
|
||||||
|
|
||||||
|
print_separator("ZVM");
|
||||||
|
|
||||||
|
ctx.run_type().execute(zvm).arg("upgrade").status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_bun(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let bun = require("bun")?;
|
||||||
|
|
||||||
|
print_separator("Bun");
|
||||||
|
|
||||||
|
ctx.run_type().execute(bun).arg("upgrade").status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_zigup(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let zigup = require("zigup")?;
|
||||||
|
let config = ctx.config();
|
||||||
|
|
||||||
|
print_separator("zigup");
|
||||||
|
|
||||||
|
let mut path_args = Vec::new();
|
||||||
|
if let Some(path) = config.zigup_path_link() {
|
||||||
|
path_args.push("--path-link".to_owned());
|
||||||
|
path_args.push(shellexpand::tilde(path).into_owned());
|
||||||
|
}
|
||||||
|
if let Some(path) = config.zigup_install_dir() {
|
||||||
|
path_args.push("--install-dir".to_owned());
|
||||||
|
path_args.push(shellexpand::tilde(path).into_owned());
|
||||||
|
}
|
||||||
|
|
||||||
|
for zig_version in config.zigup_target_versions() {
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(&zigup)
|
||||||
|
.args(&path_args)
|
||||||
|
.arg("fetch")
|
||||||
|
.arg(&zig_version)
|
||||||
|
.status_checked()?;
|
||||||
|
|
||||||
|
if config.zigup_cleanup() {
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(&zigup)
|
||||||
|
.args(&path_args)
|
||||||
|
.arg("keep")
|
||||||
|
.arg(&zig_version)
|
||||||
|
.status_checked()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.zigup_cleanup() {
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(zigup)
|
||||||
|
.args(&path_args)
|
||||||
|
.arg("clean")
|
||||||
|
.status_checked()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_jetbrains_toolbox(_ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let installation = find_jetbrains_toolbox();
|
||||||
|
match installation {
|
||||||
|
Err(FindError::NotFound) => {
|
||||||
|
// Skip
|
||||||
|
Err(SkipStep(format!("{}", t!("No JetBrains Toolbox installation found"))).into())
|
||||||
|
}
|
||||||
|
Err(FindError::UnsupportedOS(os)) => {
|
||||||
|
// Skip
|
||||||
|
Err(SkipStep(format!("{}", t!("Unsupported operating system {os}", os = os))).into())
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
// Unexpected error
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
t!("jetbrains-toolbox-updater encountered an unexpected error during finding:")
|
||||||
|
);
|
||||||
|
println!("{e:?}");
|
||||||
|
Err(StepFailed.into())
|
||||||
|
}
|
||||||
|
Ok(installation) => {
|
||||||
|
print_separator("JetBrains Toolbox");
|
||||||
|
|
||||||
|
match update_jetbrains_toolbox(installation) {
|
||||||
|
Err(e) => {
|
||||||
|
// Unexpected error
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
t!("jetbrains-toolbox-updater encountered an unexpected error during updating:")
|
||||||
|
);
|
||||||
|
println!("{e:?}");
|
||||||
|
Err(StepFailed.into())
|
||||||
|
}
|
||||||
|
Ok(()) => Ok(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
475
src/steps/git.rs
475
src/steps/git.rs
@@ -6,30 +6,123 @@ use std::process::{Command, Output, Stdio};
|
|||||||
use color_eyre::eyre::Context;
|
use color_eyre::eyre::Context;
|
||||||
use color_eyre::eyre::{eyre, Result};
|
use color_eyre::eyre::{eyre, Result};
|
||||||
use console::style;
|
use console::style;
|
||||||
use futures::stream::{iter, FuturesUnordered};
|
use futures::stream::{iter, FuturesUnordered, StreamExt};
|
||||||
use futures::StreamExt;
|
|
||||||
use glob::{glob_with, MatchOptions};
|
use glob::{glob_with, MatchOptions};
|
||||||
use tokio::process::Command as AsyncCommand;
|
use tokio::process::Command as AsyncCommand;
|
||||||
use tokio::runtime;
|
use tokio::runtime;
|
||||||
use tracing::{debug, error};
|
use tracing::{debug, error};
|
||||||
|
|
||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
|
use crate::config::Step;
|
||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
|
use crate::steps::emacs::Emacs;
|
||||||
use crate::terminal::print_separator;
|
use crate::terminal::print_separator;
|
||||||
use crate::utils::{which, PathExt};
|
use crate::utils::{require, PathExt};
|
||||||
use crate::{error::SkipStep, terminal::print_warning};
|
use crate::{error::SkipStep, terminal::print_warning, HOME_DIR};
|
||||||
|
use etcetera::base_strategy::BaseStrategy;
|
||||||
|
use rust_i18n::t;
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
use crate::XDG_DIRS;
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
use crate::WINDOWS_DIRS;
|
||||||
|
|
||||||
|
pub fn run_git_pull(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let mut repos = RepoStep::try_new()?;
|
||||||
|
let config = ctx.config();
|
||||||
|
|
||||||
|
// handle built-in repos
|
||||||
|
if config.use_predefined_git_repos() {
|
||||||
|
// should be executed on all the platforms
|
||||||
|
{
|
||||||
|
if config.should_run(Step::Emacs) {
|
||||||
|
let emacs = Emacs::new();
|
||||||
|
if !emacs.is_doom() {
|
||||||
|
if let Some(directory) = emacs.directory() {
|
||||||
|
repos.insert_if_repo(directory);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
repos.insert_if_repo(HOME_DIR.join(".doom.d"));
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.should_run(Step::Vim) {
|
||||||
|
repos.insert_if_repo(HOME_DIR.join(".vim"));
|
||||||
|
repos.insert_if_repo(HOME_DIR.join(".config/nvim"));
|
||||||
|
}
|
||||||
|
|
||||||
|
repos.insert_if_repo(HOME_DIR.join(".ideavimrc"));
|
||||||
|
repos.insert_if_repo(HOME_DIR.join(".intellimacs"));
|
||||||
|
|
||||||
|
if config.should_run(Step::Rcm) {
|
||||||
|
repos.insert_if_repo(HOME_DIR.join(".dotfiles"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let powershell = crate::steps::powershell::Powershell::new();
|
||||||
|
if let Some(profile) = powershell.profile() {
|
||||||
|
repos.insert_if_repo(profile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
repos.insert_if_repo(crate::steps::zsh::zshrc());
|
||||||
|
if config.should_run(Step::Tmux) {
|
||||||
|
repos.insert_if_repo(HOME_DIR.join(".tmux"));
|
||||||
|
}
|
||||||
|
repos.insert_if_repo(HOME_DIR.join(".config/fish"));
|
||||||
|
repos.insert_if_repo(XDG_DIRS.config_dir().join("openbox"));
|
||||||
|
repos.insert_if_repo(XDG_DIRS.config_dir().join("bspwm"));
|
||||||
|
repos.insert_if_repo(XDG_DIRS.config_dir().join("i3"));
|
||||||
|
repos.insert_if_repo(XDG_DIRS.config_dir().join("sway"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
repos.insert_if_repo(
|
||||||
|
WINDOWS_DIRS
|
||||||
|
.cache_dir()
|
||||||
|
.join("Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState"),
|
||||||
|
);
|
||||||
|
|
||||||
|
super::os::windows::insert_startup_scripts(&mut repos).ok();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle user-defined repos
|
||||||
|
if let Some(custom_git_repos) = config.git_repos() {
|
||||||
|
for git_repo in custom_git_repos {
|
||||||
|
repos.glob_insert(git_repo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Warn the user about the bad patterns.
|
||||||
|
//
|
||||||
|
// NOTE: this should be executed **before** skipping the Git step or the
|
||||||
|
// user won't receive this warning in the cases where all the paths configured
|
||||||
|
// are bad patterns.
|
||||||
|
repos.bad_patterns.iter().for_each(|pattern| {
|
||||||
|
print_warning(t!(
|
||||||
|
"Path {pattern} did not contain any git repositories",
|
||||||
|
pattern = pattern
|
||||||
|
));
|
||||||
|
});
|
||||||
|
|
||||||
|
if repos.is_repos_empty() {
|
||||||
|
return Err(SkipStep(t!("No repositories to pull").to_string()).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
print_separator(t!("Git repositories"));
|
||||||
|
|
||||||
|
repos.pull_repos(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
static PATH_PREFIX: &str = "\\\\?\\";
|
static PATH_PREFIX: &str = "\\\\?\\";
|
||||||
|
|
||||||
#[derive(Debug)]
|
pub struct RepoStep {
|
||||||
pub struct Git {
|
git: PathBuf,
|
||||||
git: Option<PathBuf>,
|
repos: HashSet<PathBuf>,
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Repositories<'a> {
|
|
||||||
git: &'a Git,
|
|
||||||
repositories: HashSet<String>,
|
|
||||||
glob_match_options: MatchOptions,
|
glob_match_options: MatchOptions,
|
||||||
bad_patterns: Vec<String>,
|
bad_patterns: Vec<String>,
|
||||||
}
|
}
|
||||||
@@ -45,100 +138,41 @@ fn output_checked_utf8(output: Output) -> Result<()> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn pull_repository(repo: String, git: &Path, ctx: &ExecutionContext<'_>) -> Result<()> {
|
fn get_head_revision<P: AsRef<Path>>(git: &Path, repo: P) -> Option<String> {
|
||||||
let path = repo.to_string();
|
|
||||||
let before_revision = get_head_revision(git, &repo);
|
|
||||||
|
|
||||||
println!("{} {}", style("Pulling").cyan().bold(), path);
|
|
||||||
|
|
||||||
let mut command = AsyncCommand::new(git);
|
|
||||||
|
|
||||||
command
|
|
||||||
.stdin(Stdio::null())
|
|
||||||
.current_dir(&repo)
|
|
||||||
.args(["pull", "--ff-only"]);
|
|
||||||
|
|
||||||
if let Some(extra_arguments) = ctx.config().git_arguments() {
|
|
||||||
command.args(extra_arguments.split_whitespace());
|
|
||||||
}
|
|
||||||
|
|
||||||
let pull_output = command.output().await?;
|
|
||||||
let submodule_output = AsyncCommand::new(git)
|
|
||||||
.args(["submodule", "update", "--recursive"])
|
|
||||||
.current_dir(&repo)
|
|
||||||
.stdin(Stdio::null())
|
|
||||||
.output()
|
|
||||||
.await?;
|
|
||||||
let result = output_checked_utf8(pull_output)
|
|
||||||
.and_then(|_| output_checked_utf8(submodule_output))
|
|
||||||
.wrap_err_with(|| format!("Failed to pull {repo}"));
|
|
||||||
|
|
||||||
if result.is_err() {
|
|
||||||
println!("{} pulling {}", style("Failed").red().bold(), &repo);
|
|
||||||
} else {
|
|
||||||
let after_revision = get_head_revision(git, &repo);
|
|
||||||
|
|
||||||
match (&before_revision, &after_revision) {
|
|
||||||
(Some(before), Some(after)) if before != after => {
|
|
||||||
println!("{} {}:", style("Changed").yellow().bold(), &repo);
|
|
||||||
|
|
||||||
Command::new(git)
|
Command::new(git)
|
||||||
.stdin(Stdio::null())
|
.stdin(Stdio::null())
|
||||||
.current_dir(&repo)
|
.current_dir(repo.as_ref())
|
||||||
.args([
|
|
||||||
"--no-pager",
|
|
||||||
"log",
|
|
||||||
"--no-decorate",
|
|
||||||
"--oneline",
|
|
||||||
&format!("{before}..{after}"),
|
|
||||||
])
|
|
||||||
.status_checked()?;
|
|
||||||
println!();
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
println!("{} {}", style("Up-to-date").green().bold(), &repo);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result.map(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_head_revision(git: &Path, repo: &str) -> Option<String> {
|
|
||||||
Command::new(git)
|
|
||||||
.stdin(Stdio::null())
|
|
||||||
.current_dir(repo)
|
|
||||||
.args(["rev-parse", "HEAD"])
|
.args(["rev-parse", "HEAD"])
|
||||||
.output_checked_utf8()
|
.output_checked_utf8()
|
||||||
.map(|output| output.stdout.trim().to_string())
|
.map(|output| output.stdout.trim().to_string())
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
error!("Error getting revision for {}: {}", repo, e);
|
error!("Error getting revision for {}: {e}", repo.as_ref().display(),);
|
||||||
|
|
||||||
e
|
e
|
||||||
})
|
})
|
||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_remotes(git: &Path, repo: &str) -> Option<bool> {
|
impl RepoStep {
|
||||||
Command::new(git)
|
/// Try to create a `RepoStep`, fail if `git` is not found.
|
||||||
.stdin(Stdio::null())
|
pub fn try_new() -> Result<Self> {
|
||||||
.current_dir(repo)
|
let git = require("git")?;
|
||||||
.args(["remote", "show"])
|
let mut glob_match_options = MatchOptions::new();
|
||||||
.output_checked_utf8()
|
|
||||||
.map(|output| output.stdout.lines().count() > 0)
|
|
||||||
.map_err(|e| {
|
|
||||||
error!("Error getting remotes for {}: {}", repo, e);
|
|
||||||
e
|
|
||||||
})
|
|
||||||
.ok()
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Git {
|
if cfg!(windows) {
|
||||||
pub fn new() -> Self {
|
glob_match_options.case_sensitive = false;
|
||||||
Self { git: which("git") }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_repo_root<P: AsRef<Path>>(&self, path: P) -> Option<String> {
|
Ok(Self {
|
||||||
|
git,
|
||||||
|
repos: HashSet::new(),
|
||||||
|
bad_patterns: Vec::new(),
|
||||||
|
glob_match_options,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Try to get the root of the repo specified in `path`.
|
||||||
|
pub fn get_repo_root<P: AsRef<Path>>(&self, path: P) -> Option<PathBuf> {
|
||||||
match path.as_ref().canonicalize() {
|
match path.as_ref().canonicalize() {
|
||||||
Ok(mut path) => {
|
Ok(mut path) => {
|
||||||
debug_assert!(path.exists());
|
debug_assert!(path.exists());
|
||||||
@@ -162,111 +196,59 @@ impl Git {
|
|||||||
path_string
|
path_string
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(git) = &self.git {
|
let output = Command::new(&self.git)
|
||||||
let output = Command::new(git)
|
|
||||||
.stdin(Stdio::null())
|
.stdin(Stdio::null())
|
||||||
.current_dir(path)
|
.current_dir(path)
|
||||||
.args(["rev-parse", "--show-toplevel"])
|
.args(["rev-parse", "--show-toplevel"])
|
||||||
.output_checked_utf8()
|
.output_checked_utf8()
|
||||||
.ok()
|
.ok()
|
||||||
.map(|output| output.stdout.trim().to_string());
|
// trim the last newline char
|
||||||
|
.map(|output| PathBuf::from(output.stdout.trim()));
|
||||||
|
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
|
Err(e) => {
|
||||||
|
if e.kind() == io::ErrorKind::NotFound {
|
||||||
|
debug!("{} does not exist", path.as_ref().display());
|
||||||
|
} else {
|
||||||
|
error!("Error looking for {}: {e}", path.as_ref().display());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Err(e) => match e.kind() {
|
|
||||||
io::ErrorKind::NotFound => debug!("{} does not exist", path.as_ref().display()),
|
|
||||||
_ => error!("Error looking for {}: {}", path.as_ref().display(), e),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
pub fn multi_pull_step(&self, repositories: &Repositories, ctx: &ExecutionContext) -> Result<()> {
|
|
||||||
// Warn the user about the bad patterns.
|
|
||||||
//
|
|
||||||
// NOTE: this should be executed **before** skipping the Git step or the
|
|
||||||
// user won't receive this warning in the cases where all the paths configured
|
|
||||||
// are bad patterns.
|
|
||||||
repositories
|
|
||||||
.bad_patterns
|
|
||||||
.iter()
|
|
||||||
.for_each(|pattern| print_warning(format!("Path {pattern} did not contain any git repositories")));
|
|
||||||
|
|
||||||
if repositories.repositories.is_empty() {
|
|
||||||
return Err(SkipStep(String::from("No repositories to pull")).into());
|
|
||||||
}
|
|
||||||
|
|
||||||
print_separator("Git repositories");
|
|
||||||
self.multi_pull(repositories, ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn multi_pull(&self, repositories: &Repositories, ctx: &ExecutionContext) -> Result<()> {
|
|
||||||
let git = self.git.as_ref().unwrap();
|
|
||||||
|
|
||||||
if ctx.run_type().dry() {
|
|
||||||
repositories
|
|
||||||
.repositories
|
|
||||||
.iter()
|
|
||||||
.for_each(|repo| println!("Would pull {}", &repo));
|
|
||||||
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let futures_iterator = repositories
|
|
||||||
.repositories
|
|
||||||
.iter()
|
|
||||||
.filter(|repo| match has_remotes(git, repo) {
|
|
||||||
Some(false) => {
|
|
||||||
println!(
|
|
||||||
"{} {} because it has no remotes",
|
|
||||||
style("Skipping").yellow().bold(),
|
|
||||||
repo
|
|
||||||
);
|
|
||||||
false
|
|
||||||
}
|
|
||||||
_ => true, // repo has remotes or command to check for remotes has failed. proceed to pull anyway.
|
|
||||||
})
|
|
||||||
.map(|repo| pull_repository(repo.clone(), git, ctx));
|
|
||||||
|
|
||||||
let stream_of_futures = if let Some(limit) = ctx.config().git_concurrency_limit() {
|
|
||||||
iter(futures_iterator).buffer_unordered(limit).boxed()
|
|
||||||
} else {
|
|
||||||
futures_iterator.collect::<FuturesUnordered<_>>().boxed()
|
|
||||||
};
|
|
||||||
|
|
||||||
let basic_rt = runtime::Runtime::new()?;
|
|
||||||
let results = basic_rt.block_on(async { stream_of_futures.collect::<Vec<Result<()>>>().await });
|
|
||||||
|
|
||||||
let error = results.into_iter().find(|r| r.is_err());
|
|
||||||
error.unwrap_or(Ok(()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Repositories<'a> {
|
|
||||||
pub fn new(git: &'a Git) -> Self {
|
|
||||||
let mut glob_match_options = MatchOptions::new();
|
|
||||||
|
|
||||||
if cfg!(windows) {
|
|
||||||
glob_match_options.case_sensitive = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
Self {
|
|
||||||
git,
|
|
||||||
repositories: HashSet::new(),
|
|
||||||
bad_patterns: Vec::new(),
|
|
||||||
glob_match_options,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
/// Check if `path` is a git repo, if yes, add it to `self.repos`.
|
||||||
|
///
|
||||||
|
/// Return the check result.
|
||||||
pub fn insert_if_repo<P: AsRef<Path>>(&mut self, path: P) -> bool {
|
pub fn insert_if_repo<P: AsRef<Path>>(&mut self, path: P) -> bool {
|
||||||
if let Some(repo) = self.git.get_repo_root(path) {
|
if let Some(repo) = self.get_repo_root(path) {
|
||||||
self.repositories.insert(repo);
|
self.repos.insert(repo);
|
||||||
true
|
true
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Check if `repo` has a remote.
|
||||||
|
fn has_remotes<P: AsRef<Path>>(&self, repo: P) -> Option<bool> {
|
||||||
|
let mut cmd = Command::new(&self.git);
|
||||||
|
cmd.stdin(Stdio::null())
|
||||||
|
.current_dir(repo.as_ref())
|
||||||
|
.args(["remote", "show"]);
|
||||||
|
|
||||||
|
let res = cmd.output_checked_utf8();
|
||||||
|
|
||||||
|
res.map(|output| output.stdout.lines().count() > 0)
|
||||||
|
.map_err(|e| {
|
||||||
|
error!("Error getting remotes for {}: {e}", repo.as_ref().display());
|
||||||
|
e
|
||||||
|
})
|
||||||
|
.ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Similar to `insert_if_repo`, with glob support.
|
||||||
pub fn glob_insert(&mut self, pattern: &str) {
|
pub fn glob_insert(&mut self, pattern: &str) {
|
||||||
if let Ok(glob) = glob_with(pattern, self.glob_match_options) {
|
if let Ok(glob) = glob_with(pattern, self.glob_match_options) {
|
||||||
let mut last_git_repo: Option<PathBuf> = None;
|
let mut last_git_repo: Option<PathBuf> = None;
|
||||||
@@ -276,7 +258,7 @@ impl<'a> Repositories<'a> {
|
|||||||
if let Some(last_git_repo) = &last_git_repo {
|
if let Some(last_git_repo) = &last_git_repo {
|
||||||
if path.is_descendant_of(last_git_repo) {
|
if path.is_descendant_of(last_git_repo) {
|
||||||
debug!(
|
debug!(
|
||||||
"Skipping {} because it's a decendant of last known repo {}",
|
"Skipping {} because it's a descendant of last known repo {}",
|
||||||
path.display(),
|
path.display(),
|
||||||
last_git_repo.display()
|
last_git_repo.display()
|
||||||
);
|
);
|
||||||
@@ -288,7 +270,7 @@ impl<'a> Repositories<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Error in path {}", e);
|
error!("Error in path {e}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -297,20 +279,141 @@ impl<'a> Repositories<'a> {
|
|||||||
self.bad_patterns.push(String::from(pattern));
|
self.bad_patterns.push(String::from(pattern));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
error!("Bad glob pattern: {}", pattern);
|
error!("Bad glob pattern: {pattern}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(unix)]
|
/// True if `self.repos` is empty.
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_repos_empty(&self) -> bool {
|
||||||
self.repositories.is_empty()
|
self.repos.is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
// The following 2 functions are `#[cfg(unix)]` because they are only used in
|
/// Remove `path` from `self.repos`.
|
||||||
// the `oh-my-zsh` step, which is UNIX-only.
|
///
|
||||||
|
// `cfg(unix)` because it is only used in the oh-my-zsh step.
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
pub fn remove(&mut self, path: &str) {
|
pub fn remove<P: AsRef<Path>>(&mut self, path: P) {
|
||||||
let _removed = self.repositories.remove(path);
|
let _removed = self.repos.remove(path.as_ref());
|
||||||
debug_assert!(_removed);
|
debug_assert!(_removed);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Try to pull a repo.
|
||||||
|
async fn pull_repo<P: AsRef<Path>>(&self, ctx: &ExecutionContext<'_>, repo: P) -> Result<()> {
|
||||||
|
let before_revision = get_head_revision(&self.git, &repo);
|
||||||
|
|
||||||
|
if ctx.config().verbose() {
|
||||||
|
println!("{} {}", style(t!("Pulling")).cyan().bold(), repo.as_ref().display());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut command = AsyncCommand::new(&self.git);
|
||||||
|
|
||||||
|
command
|
||||||
|
.stdin(Stdio::null())
|
||||||
|
.current_dir(&repo)
|
||||||
|
.args(["pull", "--ff-only"]);
|
||||||
|
|
||||||
|
if let Some(extra_arguments) = ctx.config().git_arguments() {
|
||||||
|
command.args(extra_arguments.split_whitespace());
|
||||||
|
}
|
||||||
|
|
||||||
|
let pull_output = command.output().await?;
|
||||||
|
let submodule_output = AsyncCommand::new(&self.git)
|
||||||
|
.args(["submodule", "update", "--recursive"])
|
||||||
|
.current_dir(&repo)
|
||||||
|
.stdin(Stdio::null())
|
||||||
|
.output()
|
||||||
|
.await?;
|
||||||
|
let result = output_checked_utf8(pull_output)
|
||||||
|
.and_then(|()| output_checked_utf8(submodule_output))
|
||||||
|
.wrap_err_with(|| format!("Failed to pull {}", repo.as_ref().display()));
|
||||||
|
|
||||||
|
if result.is_err() {
|
||||||
|
println!(
|
||||||
|
"{} {} {}",
|
||||||
|
style(t!("Failed")).red().bold(),
|
||||||
|
t!("pulling"),
|
||||||
|
repo.as_ref().display()
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
let after_revision = get_head_revision(&self.git, repo.as_ref());
|
||||||
|
|
||||||
|
match (&before_revision, &after_revision) {
|
||||||
|
(Some(before), Some(after)) if before != after => {
|
||||||
|
println!("{} {}", style(t!("Changed")).yellow().bold(), repo.as_ref().display());
|
||||||
|
|
||||||
|
Command::new(&self.git)
|
||||||
|
.stdin(Stdio::null())
|
||||||
|
.current_dir(&repo)
|
||||||
|
.args([
|
||||||
|
"--no-pager",
|
||||||
|
"log",
|
||||||
|
"--no-decorate",
|
||||||
|
"--oneline",
|
||||||
|
&format!("{before}..{after}"),
|
||||||
|
])
|
||||||
|
.status_checked()?;
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
if ctx.config().verbose() {
|
||||||
|
println!("{} {}", style(t!("Up-to-date")).green().bold(), repo.as_ref().display());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Pull the repositories specified in `self.repos`.
|
||||||
|
///
|
||||||
|
/// # NOTE
|
||||||
|
/// This function will create an async runtime and do the real job so the
|
||||||
|
/// function itself is not async.
|
||||||
|
fn pull_repos(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
if ctx.run_type().dry() {
|
||||||
|
self.repos
|
||||||
|
.iter()
|
||||||
|
.for_each(|repo| println!("{}", t!("Would pull {repo}", repo = repo.display())));
|
||||||
|
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
if !ctx.config().verbose() {
|
||||||
|
println!(
|
||||||
|
"\n{} {}\n",
|
||||||
|
style(t!("Only")).green().bold(),
|
||||||
|
t!("updated repositories will be shown...")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let futures_iterator = self
|
||||||
|
.repos
|
||||||
|
.iter()
|
||||||
|
.filter(|repo| match self.has_remotes(repo) {
|
||||||
|
Some(false) => {
|
||||||
|
println!(
|
||||||
|
"{} {} {}",
|
||||||
|
style(t!("Skipping")).yellow().bold(),
|
||||||
|
repo.display(),
|
||||||
|
t!("because it has no remotes")
|
||||||
|
);
|
||||||
|
false
|
||||||
|
}
|
||||||
|
_ => true, // repo has remotes or command to check for remotes has failed. proceed to pull anyway.
|
||||||
|
})
|
||||||
|
.map(|repo| self.pull_repo(ctx, repo));
|
||||||
|
|
||||||
|
let stream_of_futures = if let Some(limit) = ctx.config().git_concurrency_limit() {
|
||||||
|
iter(futures_iterator).buffer_unordered(limit).boxed()
|
||||||
|
} else {
|
||||||
|
futures_iterator.collect::<FuturesUnordered<_>>().boxed()
|
||||||
|
};
|
||||||
|
|
||||||
|
let basic_rt = runtime::Runtime::new()?;
|
||||||
|
let results = basic_rt.block_on(async { stream_of_futures.collect::<Vec<Result<()>>>().await });
|
||||||
|
|
||||||
|
let error = results.into_iter().find(std::result::Result::is_err);
|
||||||
|
error.unwrap_or(Ok(()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
use crate::terminal::print_separator;
|
use crate::terminal::print_separator;
|
||||||
use crate::utils::require;
|
use crate::utils::require;
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
|
use rust_i18n::t;
|
||||||
|
|
||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
|
|
||||||
@@ -17,7 +18,7 @@ pub fn upgrade_kak_plug(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
.args(["-ui", "dummy", "-e", UPGRADE_KAK])
|
.args(["-ui", "dummy", "-e", UPGRADE_KAK])
|
||||||
.output()?;
|
.output()?;
|
||||||
|
|
||||||
println!("Plugins upgraded");
|
println!("{}", t!("Plugins upgraded"));
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,16 +4,17 @@ use std::os::unix::fs::MetadataExt;
|
|||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
use crate::utils::{get_require_sudo_string, require_option};
|
||||||
use crate::HOME_DIR;
|
use crate::HOME_DIR;
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
use nix::unistd::Uid;
|
use nix::unistd::Uid;
|
||||||
|
use rust_i18n::t;
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
use crate::terminal::print_separator;
|
use crate::terminal::{print_info, print_separator};
|
||||||
use crate::utils::{require, PathExt};
|
use crate::utils::{require, PathExt};
|
||||||
use crate::{error::SkipStep, execution_context::ExecutionContext};
|
use crate::{error::SkipStep, execution_context::ExecutionContext};
|
||||||
|
|
||||||
@@ -86,13 +87,13 @@ impl NPM {
|
|||||||
.args(["--version"])
|
.args(["--version"])
|
||||||
.output_checked_utf8()
|
.output_checked_utf8()
|
||||||
.map(|s| s.stdout.trim().to_owned());
|
.map(|s| s.stdout.trim().to_owned());
|
||||||
Version::parse(&version_str?).map_err(|err| err.into())
|
Version::parse(&version_str?).map_err(std::convert::Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade(&self, ctx: &ExecutionContext, use_sudo: bool) -> Result<()> {
|
fn upgrade(&self, ctx: &ExecutionContext, use_sudo: bool) -> Result<()> {
|
||||||
let args = ["update", self.global_location_arg()];
|
let args = ["update", self.global_location_arg()];
|
||||||
if use_sudo {
|
if use_sudo {
|
||||||
let sudo = require_option(ctx.sudo().clone(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().clone(), get_require_sudo_string())?;
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(sudo)
|
.execute(sudo)
|
||||||
.arg(&self.command)
|
.arg(&self.command)
|
||||||
@@ -156,7 +157,7 @@ impl Yarn {
|
|||||||
let args = ["global", "upgrade"];
|
let args = ["global", "upgrade"];
|
||||||
|
|
||||||
if use_sudo {
|
if use_sudo {
|
||||||
let sudo = require_option(ctx.sudo().clone(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().clone(), get_require_sudo_string())?;
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(sudo)
|
.execute(sudo)
|
||||||
.arg(self.yarn.as_ref().unwrap_or(&self.command))
|
.arg(self.yarn.as_ref().unwrap_or(&self.command))
|
||||||
@@ -183,6 +184,92 @@ impl Yarn {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct Deno {
|
||||||
|
command: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deno {
|
||||||
|
fn new(command: PathBuf) -> Self {
|
||||||
|
Self { command }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let mut args = vec![];
|
||||||
|
|
||||||
|
let version = ctx.config().deno_version();
|
||||||
|
if let Some(version) = version {
|
||||||
|
let bin_version = self.version()?;
|
||||||
|
|
||||||
|
if bin_version >= Version::new(2, 0, 0) {
|
||||||
|
args.push(version);
|
||||||
|
} else if bin_version >= Version::new(1, 6, 0) {
|
||||||
|
match version {
|
||||||
|
"stable" => { /* do nothing, as stable is the default channel to upgrade */ }
|
||||||
|
"rc" => {
|
||||||
|
return Err(SkipStep(
|
||||||
|
"Deno (1.6.0-2.0.0) cannot be upgraded to a release candidate".to_string(),
|
||||||
|
)
|
||||||
|
.into());
|
||||||
|
}
|
||||||
|
"canary" => args.push("--canary"),
|
||||||
|
_ => {
|
||||||
|
if Version::parse(version).is_err() {
|
||||||
|
return Err(SkipStep("Invalid Deno version".to_string()).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
args.push("--version");
|
||||||
|
args.push(version);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if bin_version >= Version::new(1, 0, 0) {
|
||||||
|
match version {
|
||||||
|
"stable" | "rc" | "canary" => {
|
||||||
|
// Prior to v1.6.0, `deno upgrade` is not able fetch the latest tag version.
|
||||||
|
return Err(
|
||||||
|
SkipStep("Deno (1.0.0-1.6.0) cannot be upgraded to a named channel".to_string()).into(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
if Version::parse(version).is_err() {
|
||||||
|
return Err(SkipStep("Invalid Deno version".to_string()).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
args.push("--version");
|
||||||
|
args.push(version);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// v0.x cannot be upgraded with `deno upgrade` to v1.x or v2.x
|
||||||
|
// nor can be upgraded to a specific version.
|
||||||
|
return Err(SkipStep("Unsupported Deno version".to_string()).into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(&self.command)
|
||||||
|
.arg("upgrade")
|
||||||
|
.args(args)
|
||||||
|
.status_checked()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the version of Deno.
|
||||||
|
///
|
||||||
|
/// This function will return the version of Deno installed on the system.
|
||||||
|
/// The version is parsed from the output of `deno -V`.
|
||||||
|
///
|
||||||
|
/// ```sh
|
||||||
|
/// deno -V # deno 1.6.0
|
||||||
|
/// ```
|
||||||
|
fn version(&self) -> Result<Version> {
|
||||||
|
let version_str = Command::new(&self.command)
|
||||||
|
.args(["-V"])
|
||||||
|
.output_checked_utf8()
|
||||||
|
.map(|s| s.stdout.trim().to_owned().split_off(5)); // remove "deno " prefix
|
||||||
|
Version::parse(&version_str?).map_err(std::convert::Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
fn should_use_sudo(npm: &NPM, ctx: &ExecutionContext) -> Result<bool> {
|
fn should_use_sudo(npm: &NPM, ctx: &ExecutionContext) -> Result<bool> {
|
||||||
if npm.should_use_sudo()? {
|
if npm.should_use_sudo()? {
|
||||||
@@ -214,7 +301,7 @@ fn should_use_sudo_yarn(yarn: &Yarn, ctx: &ExecutionContext) -> Result<bool> {
|
|||||||
pub fn run_npm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_npm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let npm = require("npm").map(|b| NPM::new(b, NPMVariant::Npm))?;
|
let npm = require("npm").map(|b| NPM::new(b, NPMVariant::Npm))?;
|
||||||
|
|
||||||
print_separator("Node Package Manager");
|
print_separator(t!("Node Package Manager"));
|
||||||
|
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
{
|
{
|
||||||
@@ -230,7 +317,7 @@ pub fn run_npm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
pub fn run_pnpm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_pnpm_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let pnpm = require("pnpm").map(|b| NPM::new(b, NPMVariant::Pnpm))?;
|
let pnpm = require("pnpm").map(|b| NPM::new(b, NPMVariant::Pnpm))?;
|
||||||
|
|
||||||
print_separator("Performant Node Package Manager");
|
print_separator(t!("Performant Node Package Manager"));
|
||||||
|
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
{
|
{
|
||||||
@@ -251,7 +338,7 @@ pub fn run_yarn_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
print_separator("Yarn Package Manager");
|
print_separator(t!("Yarn Package Manager"));
|
||||||
|
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
{
|
{
|
||||||
@@ -265,14 +352,59 @@ pub fn run_yarn_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn deno_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
pub fn deno_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let deno = require("deno")?;
|
let deno = require("deno").map(Deno::new)?;
|
||||||
let deno_dir = HOME_DIR.join(".deno");
|
let deno_dir = HOME_DIR.join(".deno");
|
||||||
|
|
||||||
if !deno.canonicalize()?.is_descendant_of(&deno_dir) {
|
if !deno.command.canonicalize()?.is_descendant_of(&deno_dir) {
|
||||||
let skip_reason = SkipStep("Deno installed outside of .deno directory".to_string());
|
let skip_reason = SkipStep(t!("Deno installed outside of .deno directory").to_string());
|
||||||
return Err(skip_reason.into());
|
return Err(skip_reason.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
print_separator("Deno");
|
print_separator("Deno");
|
||||||
ctx.run_type().execute(&deno).arg("upgrade").status_checked()
|
deno.upgrade(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// There is no `volta upgrade` command, so we need to upgrade each package
|
||||||
|
pub fn run_volta_packages_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let volta = require("volta")?;
|
||||||
|
|
||||||
|
print_separator("Volta");
|
||||||
|
|
||||||
|
if ctx.run_type().dry() {
|
||||||
|
print_info(t!("Updating Volta packages..."));
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let list_output = ctx
|
||||||
|
.run_type()
|
||||||
|
.execute(&volta)
|
||||||
|
.args(["list", "--format=plain"])
|
||||||
|
.output_checked_utf8()?
|
||||||
|
.stdout;
|
||||||
|
|
||||||
|
let installed_packages: Vec<&str> = list_output
|
||||||
|
.lines()
|
||||||
|
.filter_map(|line| {
|
||||||
|
// format is 'kind package@version ...'
|
||||||
|
let mut parts = line.split_whitespace();
|
||||||
|
parts.next();
|
||||||
|
let package_part = parts.next()?;
|
||||||
|
let version_index = package_part.rfind('@').unwrap_or(package_part.len());
|
||||||
|
Some(package_part[..version_index].trim())
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if installed_packages.is_empty() {
|
||||||
|
print_info(t!("No packages installed with Volta"));
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
for package in &installed_packages {
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(&volta)
|
||||||
|
.args(["install", package])
|
||||||
|
.status_checked()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,12 +4,13 @@ use std::path::{Path, PathBuf};
|
|||||||
|
|
||||||
use color_eyre::eyre;
|
use color_eyre::eyre;
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
|
use rust_i18n::t;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
use crate::error::TopgradeError;
|
use crate::error::TopgradeError;
|
||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
use crate::sudo::Sudo;
|
use crate::utils::require_option;
|
||||||
use crate::utils::which;
|
use crate::utils::which;
|
||||||
use crate::{config, Step};
|
use crate::{config, Step};
|
||||||
|
|
||||||
@@ -144,13 +145,13 @@ impl Trizen {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub struct Pacman {
|
pub struct Pacman {
|
||||||
sudo: Sudo,
|
|
||||||
executable: PathBuf,
|
executable: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ArchPackageManager for Pacman {
|
impl ArchPackageManager for Pacman {
|
||||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||||
let mut command = ctx.run_type().execute(&self.sudo);
|
let sudo = require_option(ctx.sudo().as_ref(), "sudo is required to run pacman".into())?;
|
||||||
|
let mut command = ctx.run_type().execute(sudo);
|
||||||
command
|
command
|
||||||
.arg(&self.executable)
|
.arg(&self.executable)
|
||||||
.arg("-Syu")
|
.arg("-Syu")
|
||||||
@@ -161,7 +162,7 @@ impl ArchPackageManager for Pacman {
|
|||||||
command.status_checked()?;
|
command.status_checked()?;
|
||||||
|
|
||||||
if ctx.config().cleanup() {
|
if ctx.config().cleanup() {
|
||||||
let mut command = ctx.run_type().execute(&self.sudo);
|
let mut command = ctx.run_type().execute(sudo);
|
||||||
command.arg(&self.executable).arg("-Scc");
|
command.arg(&self.executable).arg("-Scc");
|
||||||
if ctx.config().yes(Step::System) {
|
if ctx.config().yes(Step::System) {
|
||||||
command.arg("--noconfirm");
|
command.arg("--noconfirm");
|
||||||
@@ -174,10 +175,9 @@ impl ArchPackageManager for Pacman {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Pacman {
|
impl Pacman {
|
||||||
pub fn get(ctx: &ExecutionContext) -> Option<Self> {
|
pub fn get() -> Option<Self> {
|
||||||
Some(Self {
|
Some(Self {
|
||||||
executable: which("powerpill").unwrap_or_else(|| PathBuf::from("pacman")),
|
executable: which("powerpill").unwrap_or_else(|| PathBuf::from("pacman")),
|
||||||
sudo: ctx.sudo().to_owned()?,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -263,46 +263,75 @@ impl ArchPackageManager for Pamac {
|
|||||||
|
|
||||||
pub struct Aura {
|
pub struct Aura {
|
||||||
executable: PathBuf,
|
executable: PathBuf,
|
||||||
sudo: Sudo,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Aura {
|
impl Aura {
|
||||||
fn get(ctx: &ExecutionContext) -> Option<Self> {
|
fn get() -> Option<Self> {
|
||||||
Some(Self {
|
Some(Self {
|
||||||
executable: which("aura")?,
|
executable: which("aura")?,
|
||||||
sudo: ctx.sudo().to_owned()?,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ArchPackageManager for Aura {
|
impl ArchPackageManager for Aura {
|
||||||
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = which("sudo").unwrap_or_default();
|
use semver::Version;
|
||||||
let mut aur_update = ctx.run_type().execute(&sudo);
|
|
||||||
|
|
||||||
if sudo.ends_with("sudo") {
|
let version_cmd_output = ctx
|
||||||
aur_update
|
.run_type()
|
||||||
.arg(&self.executable)
|
.execute(&self.executable)
|
||||||
|
.arg("--version")
|
||||||
|
.output_checked_utf8()?;
|
||||||
|
// Output will be something like: "aura x.x.x\n"
|
||||||
|
let version_cmd_stdout = version_cmd_output.stdout;
|
||||||
|
let version_str = version_cmd_stdout.trim_start_matches("aura ").trim_end();
|
||||||
|
let version = Version::parse(version_str).expect("invalid version");
|
||||||
|
|
||||||
|
// Aura, since version 4.0.6, no longer needs sudo.
|
||||||
|
//
|
||||||
|
// https://github.com/fosskers/aura/releases/tag/v4.0.6
|
||||||
|
let version_no_sudo = Version::new(4, 0, 6);
|
||||||
|
|
||||||
|
if version >= version_no_sudo {
|
||||||
|
let mut cmd = ctx.run_type().execute(&self.executable);
|
||||||
|
cmd.arg("-Au")
|
||||||
|
.args(ctx.config().aura_aur_arguments().split_whitespace());
|
||||||
|
if ctx.config().yes(Step::System) {
|
||||||
|
cmd.arg("--noconfirm");
|
||||||
|
}
|
||||||
|
cmd.status_checked()?;
|
||||||
|
|
||||||
|
let mut cmd = ctx.run_type().execute(&self.executable);
|
||||||
|
cmd.arg("-Syu")
|
||||||
|
.args(ctx.config().aura_pacman_arguments().split_whitespace());
|
||||||
|
if ctx.config().yes(Step::System) {
|
||||||
|
cmd.arg("--noconfirm");
|
||||||
|
}
|
||||||
|
cmd.status_checked()?;
|
||||||
|
} else {
|
||||||
|
let sudo = crate::utils::require_option(
|
||||||
|
ctx.sudo().as_ref(),
|
||||||
|
t!("Aura(<0.4.6) requires sudo installed to work with AUR packages").to_string(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mut cmd = ctx.run_type().execute(sudo);
|
||||||
|
cmd.arg(&self.executable)
|
||||||
.arg("-Au")
|
.arg("-Au")
|
||||||
.args(ctx.config().aura_aur_arguments().split_whitespace());
|
.args(ctx.config().aura_aur_arguments().split_whitespace());
|
||||||
if ctx.config().yes(Step::System) {
|
if ctx.config().yes(Step::System) {
|
||||||
aur_update.arg("--noconfirm");
|
cmd.arg("--noconfirm");
|
||||||
}
|
}
|
||||||
|
cmd.status_checked()?;
|
||||||
|
|
||||||
aur_update.status_checked()?;
|
let mut cmd = ctx.run_type().execute(sudo);
|
||||||
} else {
|
cmd.arg(&self.executable)
|
||||||
println!("Aura requires sudo installed to work with AUR packages")
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut pacman_update = ctx.run_type().execute(&self.sudo);
|
|
||||||
pacman_update
|
|
||||||
.arg(&self.executable)
|
|
||||||
.arg("-Syu")
|
.arg("-Syu")
|
||||||
.args(ctx.config().aura_pacman_arguments().split_whitespace());
|
.args(ctx.config().aura_pacman_arguments().split_whitespace());
|
||||||
if ctx.config().yes(Step::System) {
|
if ctx.config().yes(Step::System) {
|
||||||
pacman_update.arg("--noconfirm");
|
cmd.arg("--noconfirm");
|
||||||
|
}
|
||||||
|
cmd.status_checked()?;
|
||||||
}
|
}
|
||||||
pacman_update.status_checked()?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -323,16 +352,16 @@ pub fn get_arch_package_manager(ctx: &ExecutionContext) -> Option<Box<dyn ArchPa
|
|||||||
.or_else(|| Trizen::get().map(box_package_manager))
|
.or_else(|| Trizen::get().map(box_package_manager))
|
||||||
.or_else(|| Pikaur::get().map(box_package_manager))
|
.or_else(|| Pikaur::get().map(box_package_manager))
|
||||||
.or_else(|| Pamac::get().map(box_package_manager))
|
.or_else(|| Pamac::get().map(box_package_manager))
|
||||||
.or_else(|| Pacman::get(ctx).map(box_package_manager))
|
.or_else(|| Pacman::get().map(box_package_manager))
|
||||||
.or_else(|| Aura::get(ctx).map(box_package_manager)),
|
.or_else(|| Aura::get().map(box_package_manager)),
|
||||||
config::ArchPackageManager::GarudaUpdate => GarudaUpdate::get().map(box_package_manager),
|
config::ArchPackageManager::GarudaUpdate => GarudaUpdate::get().map(box_package_manager),
|
||||||
config::ArchPackageManager::Trizen => Trizen::get().map(box_package_manager),
|
config::ArchPackageManager::Trizen => Trizen::get().map(box_package_manager),
|
||||||
config::ArchPackageManager::Paru => YayParu::get("paru", &pacman).map(box_package_manager),
|
config::ArchPackageManager::Paru => YayParu::get("paru", &pacman).map(box_package_manager),
|
||||||
config::ArchPackageManager::Yay => YayParu::get("yay", &pacman).map(box_package_manager),
|
config::ArchPackageManager::Yay => YayParu::get("yay", &pacman).map(box_package_manager),
|
||||||
config::ArchPackageManager::Pacman => Pacman::get(ctx).map(box_package_manager),
|
config::ArchPackageManager::Pacman => Pacman::get().map(box_package_manager),
|
||||||
config::ArchPackageManager::Pikaur => Pikaur::get().map(box_package_manager),
|
config::ArchPackageManager::Pikaur => Pikaur::get().map(box_package_manager),
|
||||||
config::ArchPackageManager::Pamac => Pamac::get().map(box_package_manager),
|
config::ArchPackageManager::Pamac => Pamac::get().map(box_package_manager),
|
||||||
config::ArchPackageManager::Aura => Aura::get(ctx).map(box_package_manager),
|
config::ArchPackageManager::Aura => Aura::get().map(box_package_manager),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -355,7 +384,7 @@ pub fn show_pacnew() {
|
|||||||
.peekable();
|
.peekable();
|
||||||
|
|
||||||
if iter.peek().is_some() {
|
if iter.peek().is_some() {
|
||||||
println!("\nPacman backup configuration files found:");
|
println!("\n{}", t!("Pacman backup configuration files found:"));
|
||||||
|
|
||||||
for entry in iter {
|
for entry in iter {
|
||||||
println!("{}", entry.path().display());
|
println!("{}", entry.path().display());
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
use crate::terminal::print_separator;
|
use crate::terminal::print_separator;
|
||||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
use crate::utils::{get_require_sudo_string, require_option};
|
||||||
use crate::Step;
|
use crate::Step;
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
print_separator("DragonFly BSD Packages");
|
print_separator(t!("DragonFly BSD Packages"));
|
||||||
let mut cmd = ctx.run_type().execute(sudo);
|
let mut cmd = ctx.run_type().execute(sudo);
|
||||||
cmd.args(["/usr/local/sbin/pkg", "upgrade"]);
|
cmd.args(["/usr/local/sbin/pkg", "upgrade"]);
|
||||||
if ctx.config().yes(Step::System) {
|
if ctx.config().yes(Step::System) {
|
||||||
@@ -18,9 +18,9 @@ pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
|
pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
|
||||||
print_separator("DragonFly BSD Audit");
|
print_separator(t!("DragonFly BSD Audit"));
|
||||||
|
|
||||||
#[allow(clippy::disallowed_methods)]
|
#[allow(clippy::disallowed_methods)]
|
||||||
if !Command::new(sudo)
|
if !Command::new(sudo)
|
||||||
@@ -28,7 +28,9 @@ pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
.status()?
|
.status()?
|
||||||
.success()
|
.success()
|
||||||
{
|
{
|
||||||
println!("The package audit was successful, but vulnerable packages still remain on the system");
|
println!(t!(
|
||||||
|
"The package audit was successful, but vulnerable packages still remain on the system"
|
||||||
|
));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
use crate::terminal::print_separator;
|
use crate::terminal::print_separator;
|
||||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
use crate::utils::{get_require_sudo_string, require_option};
|
||||||
use crate::Step;
|
use crate::Step;
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
|
use rust_i18n::t;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
pub fn upgrade_freebsd(ctx: &ExecutionContext) -> Result<()> {
|
pub fn upgrade_freebsd(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
print_separator("FreeBSD Update");
|
print_separator(t!("FreeBSD Update"));
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(sudo)
|
.execute(sudo)
|
||||||
.args(["/usr/sbin/freebsd-update", "fetch", "install"])
|
.args(["/usr/sbin/freebsd-update", "fetch", "install"])
|
||||||
@@ -16,8 +17,8 @@ pub fn upgrade_freebsd(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
print_separator("FreeBSD Packages");
|
print_separator(t!("FreeBSD Packages"));
|
||||||
|
|
||||||
let mut command = ctx.run_type().execute(sudo);
|
let mut command = ctx.run_type().execute(sudo);
|
||||||
|
|
||||||
@@ -29,9 +30,9 @@ pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
|
pub fn audit_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
|
||||||
print_separator("FreeBSD Audit");
|
print_separator(t!("FreeBSD Audit"));
|
||||||
|
|
||||||
Command::new(sudo)
|
Command::new(sudo)
|
||||||
.args(["/usr/sbin/pkg", "audit", "-Fr"])
|
.args(["/usr/sbin/pkg", "audit", "-Fr"])
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ use std::process::Command;
|
|||||||
|
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
use ini::Ini;
|
use ini::Ini;
|
||||||
|
use rust_i18n::t;
|
||||||
use tracing::{debug, warn};
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
@@ -10,8 +11,8 @@ use crate::error::{SkipStep, TopgradeError};
|
|||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
use crate::steps::generic::is_wsl;
|
use crate::steps::generic::is_wsl;
|
||||||
use crate::steps::os::archlinux;
|
use crate::steps::os::archlinux;
|
||||||
use crate::terminal::print_separator;
|
use crate::terminal::{print_separator, prompt_yesno};
|
||||||
use crate::utils::{require, require_option, which, PathExt, REQUIRE_SUDO};
|
use crate::utils::{get_require_sudo_string, require, require_option, which, PathExt};
|
||||||
use crate::{Step, HOME_DIR};
|
use crate::{Step, HOME_DIR};
|
||||||
|
|
||||||
static OS_RELEASE_PATH: &str = "/etc/os-release";
|
static OS_RELEASE_PATH: &str = "/etc/os-release";
|
||||||
@@ -20,14 +21,17 @@ static OS_RELEASE_PATH: &str = "/etc/os-release";
|
|||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||||
pub enum Distribution {
|
pub enum Distribution {
|
||||||
Alpine,
|
Alpine,
|
||||||
|
Wolfi,
|
||||||
Arch,
|
Arch,
|
||||||
Bedrock,
|
Bedrock,
|
||||||
CentOS,
|
CentOS,
|
||||||
|
Chimera,
|
||||||
ClearLinux,
|
ClearLinux,
|
||||||
Fedora,
|
Fedora,
|
||||||
FedoraImmutable,
|
FedoraImmutable,
|
||||||
Debian,
|
Debian,
|
||||||
Gentoo,
|
Gentoo,
|
||||||
|
NILRT,
|
||||||
OpenMandriva,
|
OpenMandriva,
|
||||||
OpenSuseTumbleweed,
|
OpenSuseTumbleweed,
|
||||||
PCLinuxOS,
|
PCLinuxOS,
|
||||||
@@ -47,35 +51,23 @@ impl Distribution {
|
|||||||
let section = os_release.general_section();
|
let section = os_release.general_section();
|
||||||
let id = section.get("ID");
|
let id = section.get("ID");
|
||||||
let name = section.get("NAME");
|
let name = section.get("NAME");
|
||||||
let variant: Option<Vec<&str>> = section.get("VARIANT").map(|s| s.split_whitespace().collect());
|
let variant = section.get("VARIANT");
|
||||||
let id_like: Option<Vec<&str>> = section.get("ID_LIKE").map(|s| s.split_whitespace().collect());
|
let id_like: Option<Vec<&str>> = section.get("ID_LIKE").map(|s| s.split_whitespace().collect());
|
||||||
|
|
||||||
Ok(match id {
|
Ok(match id {
|
||||||
Some("alpine") => Distribution::Alpine,
|
Some("alpine") => Distribution::Alpine,
|
||||||
|
Some("chimera") => Distribution::Chimera,
|
||||||
|
Some("wolfi") => Distribution::Wolfi,
|
||||||
Some("centos") | Some("rhel") | Some("ol") => Distribution::CentOS,
|
Some("centos") | Some("rhel") | Some("ol") => Distribution::CentOS,
|
||||||
Some("clear-linux-os") => Distribution::ClearLinux,
|
Some("clear-linux-os") => Distribution::ClearLinux,
|
||||||
Some("fedora") => {
|
Some("fedora") => Distribution::match_fedora_variant(&variant),
|
||||||
return if let Some(variant) = variant {
|
Some("nilrt") => Distribution::NILRT,
|
||||||
if variant.contains(&"Silverblue")
|
|
||||||
|| variant.contains(&"Kinoite")
|
|
||||||
|| variant.contains(&"Sericea")
|
|
||||||
|| variant.contains(&"Onyx")
|
|
||||||
{
|
|
||||||
Ok(Distribution::FedoraImmutable)
|
|
||||||
} else {
|
|
||||||
Ok(Distribution::Fedora)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(Distribution::Fedora)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Some("nobara") => Distribution::Nobara,
|
Some("nobara") => Distribution::Nobara,
|
||||||
Some("void") => Distribution::Void,
|
Some("void") => Distribution::Void,
|
||||||
Some("debian") | Some("pureos") | Some("Deepin") => Distribution::Debian,
|
Some("debian") | Some("pureos") | Some("Deepin") | Some("linuxmint") => Distribution::Debian,
|
||||||
Some("arch") | Some("manjaro-arm") | Some("garuda") | Some("artix") => Distribution::Arch,
|
Some("arch") | Some("manjaro-arm") | Some("garuda") | Some("artix") => Distribution::Arch,
|
||||||
Some("solus") => Distribution::Solus,
|
Some("solus") => Distribution::Solus,
|
||||||
Some("gentoo") => Distribution::Gentoo,
|
Some("gentoo") | Some("funtoo") => Distribution::Gentoo,
|
||||||
Some("exherbo") => Distribution::Exherbo,
|
Some("exherbo") => Distribution::Exherbo,
|
||||||
Some("nixos") => Distribution::NixOS,
|
Some("nixos") => Distribution::NixOS,
|
||||||
Some("opensuse-microos") => Distribution::SuseMicro,
|
Some("opensuse-microos") => Distribution::SuseMicro,
|
||||||
@@ -105,7 +97,7 @@ impl Distribution {
|
|||||||
} else if id_like.contains(&"alpine") {
|
} else if id_like.contains(&"alpine") {
|
||||||
return Ok(Distribution::Alpine);
|
return Ok(Distribution::Alpine);
|
||||||
} else if id_like.contains(&"fedora") {
|
} else if id_like.contains(&"fedora") {
|
||||||
return Ok(Distribution::Fedora);
|
return Ok(Distribution::match_fedora_variant(&variant));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Err(TopgradeError::UnknownLinuxDistribution.into());
|
return Err(TopgradeError::UnknownLinuxDistribution.into());
|
||||||
@@ -113,6 +105,15 @@ impl Distribution {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn match_fedora_variant(variant: &Option<&str>) -> Self {
|
||||||
|
if let Some("Silverblue" | "Kinoite" | "Sericea" | "Onyx" | "IoT Edition" | "Sway Atomic" | "CoreOS") = variant
|
||||||
|
{
|
||||||
|
Distribution::FedoraImmutable
|
||||||
|
} else {
|
||||||
|
Distribution::Fedora
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn detect() -> Result<Self> {
|
pub fn detect() -> Result<Self> {
|
||||||
if PathBuf::from("/bedrock").exists() {
|
if PathBuf::from("/bedrock").exists() {
|
||||||
return Ok(Distribution::Bedrock);
|
return Ok(Distribution::Bedrock);
|
||||||
@@ -132,10 +133,12 @@ impl Distribution {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn upgrade(self, ctx: &ExecutionContext) -> Result<()> {
|
pub fn upgrade(self, ctx: &ExecutionContext) -> Result<()> {
|
||||||
print_separator("System update");
|
print_separator(t!("System update"));
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
Distribution::Alpine => upgrade_alpine_linux(ctx),
|
Distribution::Alpine => upgrade_alpine_linux(ctx),
|
||||||
|
Distribution::Chimera => upgrade_chimera_linux(ctx),
|
||||||
|
Distribution::Wolfi => upgrade_wolfi_linux(ctx),
|
||||||
Distribution::Arch => archlinux::upgrade_arch_linux(ctx),
|
Distribution::Arch => archlinux::upgrade_arch_linux(ctx),
|
||||||
Distribution::CentOS | Distribution::Fedora => upgrade_redhat(ctx),
|
Distribution::CentOS | Distribution::Fedora => upgrade_redhat(ctx),
|
||||||
Distribution::FedoraImmutable => upgrade_fedora_immutable(ctx),
|
Distribution::FedoraImmutable => upgrade_fedora_immutable(ctx),
|
||||||
@@ -155,6 +158,7 @@ impl Distribution {
|
|||||||
Distribution::OpenMandriva => upgrade_openmandriva(ctx),
|
Distribution::OpenMandriva => upgrade_openmandriva(ctx),
|
||||||
Distribution::PCLinuxOS => upgrade_pclinuxos(ctx),
|
Distribution::PCLinuxOS => upgrade_pclinuxos(ctx),
|
||||||
Distribution::Nobara => upgrade_nobara(ctx),
|
Distribution::Nobara => upgrade_nobara(ctx),
|
||||||
|
Distribution::NILRT => upgrade_nilrt(ctx),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -170,7 +174,7 @@ impl Distribution {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn update_bedrock(ctx: &ExecutionContext) -> Result<()> {
|
fn update_bedrock(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
|
||||||
ctx.run_type().execute(sudo).args(["brl", "update"]);
|
ctx.run_type().execute(sudo).args(["brl", "update"]);
|
||||||
|
|
||||||
@@ -181,7 +185,7 @@ fn update_bedrock(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
debug!("Bedrock distribution {}", distribution);
|
debug!("Bedrock distribution {}", distribution);
|
||||||
match distribution {
|
match distribution {
|
||||||
"arch" => archlinux::upgrade_arch_linux(ctx)?,
|
"arch" => archlinux::upgrade_arch_linux(ctx)?,
|
||||||
"debian" | "ubuntu" => upgrade_debian(ctx)?,
|
"debian" | "ubuntu" | "linuxmint" => upgrade_debian(ctx)?,
|
||||||
"centos" | "fedora" => upgrade_redhat(ctx)?,
|
"centos" | "fedora" => upgrade_redhat(ctx)?,
|
||||||
"bedrock" => upgrade_bedrock_strata(ctx)?,
|
"bedrock" => upgrade_bedrock_strata(ctx)?,
|
||||||
_ => {
|
_ => {
|
||||||
@@ -195,13 +199,36 @@ fn update_bedrock(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
fn upgrade_alpine_linux(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_alpine_linux(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let apk = require("apk")?;
|
let apk = require("apk")?;
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
|
||||||
|
ctx.run_type().execute(sudo).arg(&apk).arg("update").status_checked()?;
|
||||||
|
ctx.run_type().execute(sudo).arg(&apk).arg("upgrade").status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn upgrade_chimera_linux(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let apk = require("apk")?;
|
||||||
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
|
||||||
|
ctx.run_type().execute(sudo).arg(&apk).arg("update").status_checked()?;
|
||||||
|
ctx.run_type().execute(sudo).arg(&apk).arg("upgrade").status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn upgrade_wolfi_linux(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let apk = require("apk")?;
|
||||||
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
|
||||||
ctx.run_type().execute(sudo).arg(&apk).arg("update").status_checked()?;
|
ctx.run_type().execute(sudo).arg(&apk).arg("update").status_checked()?;
|
||||||
ctx.run_type().execute(sudo).arg(&apk).arg("upgrade").status_checked()
|
ctx.run_type().execute(sudo).arg(&apk).arg("upgrade").status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade_redhat(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_redhat(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
if let Some(bootc) = which("bootc") {
|
||||||
|
if ctx.config().bootc() {
|
||||||
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
return ctx.run_type().execute(sudo).arg(&bootc).arg("upgrade").status_checked();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(ostree) = which("rpm-ostree") {
|
if let Some(ostree) = which("rpm-ostree") {
|
||||||
if ctx.config().rpm_ostree() {
|
if ctx.config().rpm_ostree() {
|
||||||
let mut command = ctx.run_type().execute(ostree);
|
let mut command = ctx.run_type().execute(ostree);
|
||||||
@@ -210,7 +237,7 @@ fn upgrade_redhat(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let mut command = ctx.run_type().execute(sudo);
|
let mut command = ctx.run_type().execute(sudo);
|
||||||
command
|
command
|
||||||
.arg(which("dnf").unwrap_or_else(|| Path::new("yum").to_path_buf()))
|
.arg(which("dnf").unwrap_or_else(|| Path::new("yum").to_path_buf()))
|
||||||
@@ -233,7 +260,7 @@ fn upgrade_redhat(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade_nobara(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_nobara(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let pkg_manager = require("dnf")?;
|
let pkg_manager = require("dnf")?;
|
||||||
|
|
||||||
let mut update_command = ctx.run_type().execute(sudo);
|
let mut update_command = ctx.run_type().execute(sudo);
|
||||||
@@ -266,7 +293,22 @@ fn upgrade_nobara(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn upgrade_nilrt(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
let opkg = require("opkg")?;
|
||||||
|
|
||||||
|
ctx.run_type().execute(sudo).arg(&opkg).arg("update").status_checked()?;
|
||||||
|
ctx.run_type().execute(sudo).arg(&opkg).arg("upgrade").status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
fn upgrade_fedora_immutable(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_fedora_immutable(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
if let Some(bootc) = which("bootc") {
|
||||||
|
if ctx.config().bootc() {
|
||||||
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
return ctx.run_type().execute(sudo).arg(&bootc).arg("upgrade").status_checked();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let ostree = require("rpm-ostree")?;
|
let ostree = require("rpm-ostree")?;
|
||||||
let mut command = ctx.run_type().execute(ostree);
|
let mut command = ctx.run_type().execute(ostree);
|
||||||
command.arg("upgrade");
|
command.arg("upgrade");
|
||||||
@@ -275,14 +317,14 @@ fn upgrade_fedora_immutable(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade_bedrock_strata(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_bedrock_strata(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
ctx.run_type().execute(sudo).args(["brl", "update"]).status_checked()?;
|
ctx.run_type().execute(sudo).args(["brl", "update"]).status_checked()?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade_suse(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_suse(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(sudo)
|
.execute(sudo)
|
||||||
.args(["zypper", "refresh"])
|
.args(["zypper", "refresh"])
|
||||||
@@ -305,7 +347,7 @@ fn upgrade_suse(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade_opensuse_tumbleweed(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_opensuse_tumbleweed(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(sudo)
|
.execute(sudo)
|
||||||
.args(["zypper", "refresh"])
|
.args(["zypper", "refresh"])
|
||||||
@@ -323,7 +365,7 @@ fn upgrade_opensuse_tumbleweed(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade_suse_micro(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_suse_micro(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let mut cmd = ctx.run_type().execute(sudo);
|
let mut cmd = ctx.run_type().execute(sudo);
|
||||||
cmd.arg("transactional-update");
|
cmd.arg("transactional-update");
|
||||||
if ctx.config().yes(Step::System) {
|
if ctx.config().yes(Step::System) {
|
||||||
@@ -336,10 +378,10 @@ fn upgrade_suse_micro(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade_openmandriva(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_openmandriva(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let mut command = ctx.run_type().execute(sudo);
|
let mut command = ctx.run_type().execute(sudo);
|
||||||
|
|
||||||
command.arg(&which("dnf").unwrap()).arg("upgrade");
|
command.arg(which("dnf").unwrap()).arg("upgrade");
|
||||||
|
|
||||||
if let Some(args) = ctx.config().dnf_arguments() {
|
if let Some(args) = ctx.config().dnf_arguments() {
|
||||||
command.args(args.split_whitespace());
|
command.args(args.split_whitespace());
|
||||||
@@ -355,10 +397,10 @@ fn upgrade_openmandriva(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade_pclinuxos(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_pclinuxos(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let mut command_update = ctx.run_type().execute(sudo);
|
let mut command_update = ctx.run_type().execute(sudo);
|
||||||
|
|
||||||
command_update.arg(&which("apt-get").unwrap()).arg("update");
|
command_update.arg(which("apt-get").unwrap()).arg("update");
|
||||||
|
|
||||||
if let Some(args) = ctx.config().dnf_arguments() {
|
if let Some(args) = ctx.config().dnf_arguments() {
|
||||||
command_update.args(args.split_whitespace());
|
command_update.args(args.split_whitespace());
|
||||||
@@ -371,7 +413,7 @@ fn upgrade_pclinuxos(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
command_update.status_checked()?;
|
command_update.status_checked()?;
|
||||||
|
|
||||||
let mut cmd = ctx.run_type().execute(sudo);
|
let mut cmd = ctx.run_type().execute(sudo);
|
||||||
cmd.arg(&which("apt-get").unwrap());
|
cmd.arg(which("apt-get").unwrap());
|
||||||
cmd.arg("dist-upgrade");
|
cmd.arg("dist-upgrade");
|
||||||
if ctx.config().yes(Step::System) {
|
if ctx.config().yes(Step::System) {
|
||||||
cmd.arg("-y");
|
cmd.arg("-y");
|
||||||
@@ -402,7 +444,7 @@ fn upgrade_vanilla(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade_void(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_void(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let mut command = ctx.run_type().execute(sudo);
|
let mut command = ctx.run_type().execute(sudo);
|
||||||
command.args(["xbps-install", "-Su", "xbps"]);
|
command.args(["xbps-install", "-Su", "xbps"]);
|
||||||
if ctx.config().yes(Step::System) {
|
if ctx.config().yes(Step::System) {
|
||||||
@@ -423,7 +465,7 @@ fn upgrade_void(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
fn upgrade_gentoo(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_gentoo(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let run_type = ctx.run_type();
|
let run_type = ctx.run_type();
|
||||||
|
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
if let Some(layman) = which("layman") {
|
if let Some(layman) = which("layman") {
|
||||||
run_type
|
run_type
|
||||||
.execute(sudo)
|
.execute(sudo)
|
||||||
@@ -432,7 +474,11 @@ fn upgrade_gentoo(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
.status_checked()?;
|
.status_checked()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("Syncing portage");
|
println!("{}", t!("Syncing portage"));
|
||||||
|
if let Some(ego) = which("ego") {
|
||||||
|
// The Funtoo team doesn't reccomend running both ego sync and emerge --sync
|
||||||
|
run_type.execute(sudo).arg(ego).arg("sync").status_checked()?;
|
||||||
|
} else {
|
||||||
run_type
|
run_type
|
||||||
.execute(sudo)
|
.execute(sudo)
|
||||||
.args(["emerge", "--sync"])
|
.args(["emerge", "--sync"])
|
||||||
@@ -443,6 +489,7 @@ fn upgrade_gentoo(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
.unwrap_or_else(|| vec!["-q"]),
|
.unwrap_or_else(|| vec!["-q"]),
|
||||||
)
|
)
|
||||||
.status_checked()?;
|
.status_checked()?;
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(eix_update) = which("eix-update") {
|
if let Some(eix_update) = which("eix-update") {
|
||||||
run_type.execute(sudo).arg(eix_update).status_checked()?;
|
run_type.execute(sudo).arg(eix_update).status_checked()?;
|
||||||
@@ -493,7 +540,7 @@ fn upgrade_debian(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
if !is_nala {
|
if !is_nala {
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(sudo)
|
.execute(sudo)
|
||||||
@@ -547,7 +594,7 @@ pub fn run_deb_get(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade_solus(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_solus(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let mut cmd = ctx.run_type().execute(sudo);
|
let mut cmd = ctx.run_type().execute(sudo);
|
||||||
cmd.arg("eopkg");
|
cmd.arg("eopkg");
|
||||||
if ctx.config().yes(Step::System) {
|
if ctx.config().yes(Step::System) {
|
||||||
@@ -656,7 +703,7 @@ pub fn run_packer_nu(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade_clearlinux(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_clearlinux(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let mut cmd = ctx.run_type().execute(sudo);
|
let mut cmd = ctx.run_type().execute(sudo);
|
||||||
cmd.args(["swupd", "update"]);
|
cmd.args(["swupd", "update"]);
|
||||||
if ctx.config().yes(Step::System) {
|
if ctx.config().yes(Step::System) {
|
||||||
@@ -668,7 +715,7 @@ fn upgrade_clearlinux(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade_exherbo(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_exherbo(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
ctx.run_type().execute(sudo).args(["cave", "sync"]).status_checked()?;
|
ctx.run_type().execute(sudo).args(["cave", "sync"]).status_checked()?;
|
||||||
|
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
@@ -697,7 +744,7 @@ fn upgrade_exherbo(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn upgrade_nixos(ctx: &ExecutionContext) -> Result<()> {
|
fn upgrade_nixos(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let mut command = ctx.run_type().execute(sudo);
|
let mut command = ctx.run_type().execute(sudo);
|
||||||
command.args(["/run/current-system/sw/bin/nixos-rebuild", "switch", "--upgrade"]);
|
command.args(["/run/current-system/sw/bin/nixos-rebuild", "switch", "--upgrade"]);
|
||||||
|
|
||||||
@@ -723,7 +770,7 @@ fn upgrade_neon(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
// seems rare
|
// seems rare
|
||||||
// if that comes up we need to create a Distribution::PackageKit or some such
|
// if that comes up we need to create a Distribution::PackageKit or some such
|
||||||
|
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let pkcon = which("pkcon").unwrap();
|
let pkcon = which("pkcon").unwrap();
|
||||||
// pkcon ignores update with update and refresh provided together
|
// pkcon ignores update with update and refresh provided together
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
@@ -752,7 +799,7 @@ fn upgrade_neon(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
/// alternative
|
/// alternative
|
||||||
fn should_skip_needrestart() -> Result<()> {
|
fn should_skip_needrestart() -> Result<()> {
|
||||||
let distribution = Distribution::detect()?;
|
let distribution = Distribution::detect()?;
|
||||||
let msg = "needrestart will be ran by the package manager";
|
let msg = t!("needrestart will be ran by the package manager");
|
||||||
|
|
||||||
if distribution.redhat_based() {
|
if distribution.redhat_based() {
|
||||||
return Err(SkipStep(String::from(msg)).into());
|
return Err(SkipStep(String::from(msg)).into());
|
||||||
@@ -787,12 +834,12 @@ fn should_skip_needrestart() -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_needrestart(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_needrestart(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let needrestart = require("needrestart")?;
|
let needrestart = require("needrestart")?;
|
||||||
|
|
||||||
should_skip_needrestart()?;
|
should_skip_needrestart()?;
|
||||||
|
|
||||||
print_separator("Check for needed restarts");
|
print_separator(t!("Check for needed restarts"));
|
||||||
|
|
||||||
ctx.run_type().execute(sudo).arg(needrestart).status_checked()?;
|
ctx.run_type().execute(sudo).arg(needrestart).status_checked()?;
|
||||||
|
|
||||||
@@ -803,10 +850,10 @@ pub fn run_fwupdmgr(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let fwupdmgr = require("fwupdmgr")?;
|
let fwupdmgr = require("fwupdmgr")?;
|
||||||
|
|
||||||
if is_wsl()? {
|
if is_wsl()? {
|
||||||
return Err(SkipStep(String::from("Should not run in WSL")).into());
|
return Err(SkipStep(t!("Should not run in WSL").to_string()).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
print_separator("Firmware upgrades");
|
print_separator(t!("Firmware upgrades"));
|
||||||
|
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(&fwupdmgr)
|
.execute(&fwupdmgr)
|
||||||
@@ -828,7 +875,7 @@ pub fn run_fwupdmgr(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
pub fn run_flatpak(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_flatpak(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let flatpak = require("flatpak")?;
|
let flatpak = require("flatpak")?;
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let cleanup = ctx.config().cleanup();
|
let cleanup = ctx.config().cleanup();
|
||||||
let yes = ctx.config().yes(Step::Flatpak);
|
let yes = ctx.config().yes(Step::Flatpak);
|
||||||
let run_type = ctx.run_type();
|
let run_type = ctx.run_type();
|
||||||
@@ -848,7 +895,7 @@ pub fn run_flatpak(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
run_type.execute(&flatpak).args(&cleanup_args).status_checked()?;
|
run_type.execute(&flatpak).args(&cleanup_args).status_checked()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
print_separator("Flatpak System Packages");
|
print_separator(t!("Flatpak System Packages"));
|
||||||
if ctx.config().flatpak_use_sudo() || std::env::var("SSH_CLIENT").is_ok() {
|
if ctx.config().flatpak_use_sudo() || std::env::var("SSH_CLIENT").is_ok() {
|
||||||
let mut update_args = vec!["update", "--system"];
|
let mut update_args = vec!["update", "--system"];
|
||||||
if yes {
|
if yes {
|
||||||
@@ -889,11 +936,11 @@ pub fn run_flatpak(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_snap(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_snap(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let snap = require("snap")?;
|
let snap = require("snap")?;
|
||||||
|
|
||||||
if !PathBuf::from("/var/snapd.socket").exists() && !PathBuf::from("/run/snapd.socket").exists() {
|
if !PathBuf::from("/var/snapd.socket").exists() && !PathBuf::from("/run/snapd.socket").exists() {
|
||||||
return Err(SkipStep(String::from("Snapd socket does not exist")).into());
|
return Err(SkipStep(t!("Snapd socket does not exist").to_string()).into());
|
||||||
}
|
}
|
||||||
print_separator("snap");
|
print_separator("snap");
|
||||||
|
|
||||||
@@ -901,7 +948,7 @@ pub fn run_snap(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_pihole_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_pihole_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let pihole = require("pihole")?;
|
let pihole = require("pihole")?;
|
||||||
Path::new("/opt/pihole/update.sh").require()?;
|
Path::new("/opt/pihole/update.sh").require()?;
|
||||||
|
|
||||||
@@ -935,7 +982,7 @@ pub fn run_distrobox_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
) {
|
) {
|
||||||
(r, Some(c)) => {
|
(r, Some(c)) => {
|
||||||
if c.is_empty() {
|
if c.is_empty() {
|
||||||
return Err(SkipStep("You need to specify at least one container".to_string()).into());
|
return Err(SkipStep(t!("You need to specify at least one container").to_string()).into());
|
||||||
}
|
}
|
||||||
r.args(c)
|
r.args(c)
|
||||||
}
|
}
|
||||||
@@ -950,7 +997,7 @@ pub fn run_distrobox_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_dkp_pacman_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_dkp_pacman_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
let dkp_pacman = require("dkp-pacman")?;
|
let dkp_pacman = require("dkp-pacman")?;
|
||||||
|
|
||||||
print_separator("Devkitpro pacman");
|
print_separator("Devkitpro pacman");
|
||||||
@@ -973,20 +1020,20 @@ pub fn run_dkp_pacman_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_config_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_config_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
if ctx.config().yes(Step::ConfigUpdate) {
|
if ctx.config().yes(Step::ConfigUpdate) {
|
||||||
return Err(SkipStep("Skipped in --yes".to_string()).into());
|
return Err(SkipStep(t!("Skipped in --yes").to_string()).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Ok(etc_update) = require("etc-update") {
|
if let Ok(etc_update) = require("etc-update") {
|
||||||
print_separator("Configuration update");
|
print_separator(t!("Configuration update"));
|
||||||
ctx.run_type().execute(sudo).arg(etc_update).status_checked()?;
|
ctx.run_type().execute(sudo).arg(etc_update).status_checked()?;
|
||||||
} else if let Ok(pacdiff) = require("pacdiff") {
|
} else if let Ok(pacdiff) = require("pacdiff") {
|
||||||
if std::env::var("DIFFPROG").is_err() {
|
if std::env::var("DIFFPROG").is_err() {
|
||||||
require("vim")?;
|
require("vim")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
print_separator("Configuration update");
|
print_separator(t!("Configuration update"));
|
||||||
ctx.execute_elevated(&pacdiff, false)?.status_checked()?;
|
ctx.execute_elevated(&pacdiff, false)?.status_checked()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1009,6 +1056,78 @@ pub fn run_lure_update(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
exe.status_checked()
|
exe.status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn run_waydroid(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
let waydroid = require("waydroid")?;
|
||||||
|
let status = ctx.run_type().execute(&waydroid).arg("status").output_checked_utf8()?;
|
||||||
|
// example output of `waydroid status`:
|
||||||
|
//
|
||||||
|
// ```sh
|
||||||
|
// $ waydroid status
|
||||||
|
// Session: RUNNING
|
||||||
|
// Container: RUNNING
|
||||||
|
// Vendor type: MAINLINE
|
||||||
|
// IP address: 192.168.240.112
|
||||||
|
// Session user: w568w(1000)
|
||||||
|
// Wayland display: wayland-0
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// ```sh
|
||||||
|
// $ waydroid status
|
||||||
|
// Session: STOPPED
|
||||||
|
// Vendor type: MAINLINE
|
||||||
|
// ```
|
||||||
|
let session = status
|
||||||
|
.stdout
|
||||||
|
.lines()
|
||||||
|
.find(|line| line.contains("Session:"))
|
||||||
|
.unwrap_or_else(|| panic!("the output of `waydroid status` should contain `Session:`"));
|
||||||
|
let is_container_running = session.contains("RUNNING");
|
||||||
|
let assume_yes = ctx.config().yes(Step::Waydroid);
|
||||||
|
|
||||||
|
print_separator("Waydroid");
|
||||||
|
|
||||||
|
if is_container_running && !assume_yes {
|
||||||
|
let update_allowed = prompt_yesno(&t!(
|
||||||
|
"Going to execute `waydroid upgrade`, which would STOP the running container, is this ok?"
|
||||||
|
))?;
|
||||||
|
if !update_allowed {
|
||||||
|
return Err(
|
||||||
|
SkipStep(t!("Skip the Waydroid step because the user don't want to proceed").to_string()).into(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(sudo)
|
||||||
|
.arg(&waydroid)
|
||||||
|
.arg("upgrade")
|
||||||
|
.status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_auto_cpufreq(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
let auto_cpu_freq = require("auto-cpufreq")?;
|
||||||
|
|
||||||
|
print_separator("auto-cpufreq");
|
||||||
|
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(sudo)
|
||||||
|
.arg(auto_cpu_freq)
|
||||||
|
.arg("--update")
|
||||||
|
.status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_cinnamon_spices_updater(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let cinnamon_spice_updater = require("cinnamon-spice-updater")?;
|
||||||
|
|
||||||
|
print_separator("Cinnamon spices");
|
||||||
|
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(cinnamon_spice_updater)
|
||||||
|
.arg("--update-all")
|
||||||
|
.status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
@@ -1021,6 +1140,11 @@ mod tests {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_wolfi() {
|
||||||
|
test_template(include_str!("os_release/wolfi"), Distribution::Wolfi);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_arch_linux() {
|
fn test_arch_linux() {
|
||||||
test_template(include_str!("os_release/arch"), Distribution::Arch);
|
test_template(include_str!("os_release/arch"), Distribution::Arch);
|
||||||
@@ -1081,6 +1205,11 @@ mod tests {
|
|||||||
test_template(include_str!("os_release/fedorakinoite"), Distribution::FedoraImmutable);
|
test_template(include_str!("os_release/fedorakinoite"), Distribution::FedoraImmutable);
|
||||||
test_template(include_str!("os_release/fedoraonyx"), Distribution::FedoraImmutable);
|
test_template(include_str!("os_release/fedoraonyx"), Distribution::FedoraImmutable);
|
||||||
test_template(include_str!("os_release/fedorasericea"), Distribution::FedoraImmutable);
|
test_template(include_str!("os_release/fedorasericea"), Distribution::FedoraImmutable);
|
||||||
|
test_template(include_str!("os_release/fedoraiot"), Distribution::FedoraImmutable);
|
||||||
|
test_template(
|
||||||
|
include_str!("os_release/fedoraswayatomic"),
|
||||||
|
Distribution::FedoraImmutable,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -1098,6 +1227,11 @@ mod tests {
|
|||||||
test_template(include_str!("os_release/gentoo"), Distribution::Gentoo);
|
test_template(include_str!("os_release/gentoo"), Distribution::Gentoo);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_funtoo() {
|
||||||
|
test_template(include_str!("os_release/funtoo"), Distribution::Gentoo);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_exherbo() {
|
fn test_exherbo() {
|
||||||
test_template(include_str!("os_release/exherbo"), Distribution::Exherbo);
|
test_template(include_str!("os_release/exherbo"), Distribution::Exherbo);
|
||||||
@@ -1157,4 +1291,29 @@ mod tests {
|
|||||||
fn test_nobara() {
|
fn test_nobara() {
|
||||||
test_template(include_str!("os_release/nobara"), Distribution::Nobara);
|
test_template(include_str!("os_release/nobara"), Distribution::Nobara);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_nilrt() {
|
||||||
|
test_template(include_str!("os_release/nilrt"), Distribution::NILRT);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_coreos() {
|
||||||
|
test_template(include_str!("os_release/coreos"), Distribution::FedoraImmutable);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_aurora() {
|
||||||
|
test_template(include_str!("os_release/aurora"), Distribution::FedoraImmutable);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_bluefin() {
|
||||||
|
test_template(include_str!("os_release/bluefin"), Distribution::FedoraImmutable);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_bazzite() {
|
||||||
|
test_template(include_str!("os_release/bazzite"), Distribution::FedoraImmutable);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +1,18 @@
|
|||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
use crate::terminal::{print_separator, prompt_yesno};
|
use crate::terminal::{print_separator, prompt_yesno};
|
||||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
use crate::utils::{get_require_sudo_string, require_option};
|
||||||
use crate::{utils::require, Step};
|
use crate::{utils::require, Step};
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
|
use rust_i18n::t;
|
||||||
|
use std::collections::HashSet;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
pub fn run_macports(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_macports(ctx: &ExecutionContext) -> Result<()> {
|
||||||
require("port")?;
|
require("port")?;
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
|
||||||
print_separator("MacPorts");
|
print_separator("MacPorts");
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
@@ -33,25 +35,25 @@ pub fn run_macports(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
pub fn run_mas(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_mas(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let mas = require("mas")?;
|
let mas = require("mas")?;
|
||||||
print_separator("macOS App Store");
|
print_separator(t!("macOS App Store"));
|
||||||
|
|
||||||
ctx.run_type().execute(mas).arg("upgrade").status_checked()
|
ctx.run_type().execute(mas).arg("upgrade").status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn upgrade_macos(ctx: &ExecutionContext) -> Result<()> {
|
pub fn upgrade_macos(ctx: &ExecutionContext) -> Result<()> {
|
||||||
print_separator("macOS system update");
|
print_separator(t!("macOS system update"));
|
||||||
|
|
||||||
let should_ask = !(ctx.config().yes(Step::System)) || (ctx.config().dry_run());
|
let should_ask = !(ctx.config().yes(Step::System) || ctx.config().dry_run());
|
||||||
if should_ask {
|
if should_ask {
|
||||||
println!("Finding available software");
|
println!("{}", t!("Finding available software"));
|
||||||
if system_update_available()? {
|
if system_update_available()? {
|
||||||
let answer = prompt_yesno("A system update is available. Do you wish to install it?")?;
|
let answer = prompt_yesno(t!("A system update is available. Do you wish to install it?").as_ref())?;
|
||||||
if !answer {
|
if !answer {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
println!();
|
println!();
|
||||||
} else {
|
} else {
|
||||||
println!("No new software available.");
|
println!("{}", t!("No new software available."));
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -93,3 +95,145 @@ pub fn run_sparkle(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn update_xcodes(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let xcodes = require("xcodes")?;
|
||||||
|
print_separator("Xcodes");
|
||||||
|
|
||||||
|
let should_ask = !(ctx.config().yes(Step::Xcodes) || ctx.config().dry_run());
|
||||||
|
|
||||||
|
let releases = ctx
|
||||||
|
.run_type()
|
||||||
|
.execute(&xcodes)
|
||||||
|
.args(["update"])
|
||||||
|
.output_checked_utf8()?
|
||||||
|
.stdout;
|
||||||
|
|
||||||
|
let releases_installed: Vec<String> = releases
|
||||||
|
.lines()
|
||||||
|
.filter(|r| r.contains("(Installed)"))
|
||||||
|
.map(String::from)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if releases_installed.is_empty() {
|
||||||
|
println!("{}", t!("No Xcode releases installed."));
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let (installed_gm, installed_beta, installed_regular) =
|
||||||
|
releases_installed
|
||||||
|
.iter()
|
||||||
|
.fold((false, false, false), |(gm, beta, regular), release| {
|
||||||
|
(
|
||||||
|
gm || release.contains("GM") || release.contains("Release Candidate"),
|
||||||
|
beta || release.contains("Beta"),
|
||||||
|
regular
|
||||||
|
|| !(release.contains("GM")
|
||||||
|
|| release.contains("Release Candidate")
|
||||||
|
|| release.contains("Beta")),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
let releases_gm = releases
|
||||||
|
.lines()
|
||||||
|
.filter(|&r| r.matches("GM").count() > 0 || r.matches("Release Candidate").count() > 0)
|
||||||
|
.map(String::from)
|
||||||
|
.collect();
|
||||||
|
let releases_beta = releases
|
||||||
|
.lines()
|
||||||
|
.filter(|&r| r.matches("Beta").count() > 0)
|
||||||
|
.map(String::from)
|
||||||
|
.collect();
|
||||||
|
let releases_regular = releases
|
||||||
|
.lines()
|
||||||
|
.filter(|&r| {
|
||||||
|
r.matches("GM").count() == 0
|
||||||
|
&& r.matches("Release Candidate").count() == 0
|
||||||
|
&& r.matches("Beta").count() == 0
|
||||||
|
})
|
||||||
|
.map(String::from)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if installed_gm {
|
||||||
|
process_xcodes_releases(releases_gm, should_ask, ctx)?;
|
||||||
|
}
|
||||||
|
if installed_beta {
|
||||||
|
process_xcodes_releases(releases_beta, should_ask, ctx)?;
|
||||||
|
}
|
||||||
|
if installed_regular {
|
||||||
|
process_xcodes_releases(releases_regular, should_ask, ctx)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let releases_new = ctx
|
||||||
|
.run_type()
|
||||||
|
.execute(&xcodes)
|
||||||
|
.args(["list"])
|
||||||
|
.output_checked_utf8()?
|
||||||
|
.stdout;
|
||||||
|
|
||||||
|
let releases_gm_new_installed: HashSet<_> = releases_new
|
||||||
|
.lines()
|
||||||
|
.filter(|release| {
|
||||||
|
release.contains("(Installed)") && (release.contains("GM") || release.contains("Release Candidate"))
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let releases_beta_new_installed: HashSet<_> = releases_new
|
||||||
|
.lines()
|
||||||
|
.filter(|release| release.contains("(Installed)") && release.contains("Beta"))
|
||||||
|
.collect();
|
||||||
|
let releases_regular_new_installed: HashSet<_> = releases_new
|
||||||
|
.lines()
|
||||||
|
.filter(|release| {
|
||||||
|
release.contains("(Installed)")
|
||||||
|
&& !(release.contains("GM") || release.contains("Release Candidate") || release.contains("Beta"))
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
for releases_new_installed in [
|
||||||
|
releases_gm_new_installed,
|
||||||
|
releases_beta_new_installed,
|
||||||
|
releases_regular_new_installed,
|
||||||
|
] {
|
||||||
|
if should_ask && releases_new_installed.len() == 2 {
|
||||||
|
let answer_uninstall =
|
||||||
|
prompt_yesno(t!("Would you like to move the former Xcode release to the trash?").as_ref())?;
|
||||||
|
if answer_uninstall {
|
||||||
|
let _ = ctx
|
||||||
|
.run_type()
|
||||||
|
.execute(&xcodes)
|
||||||
|
.args([
|
||||||
|
"uninstall",
|
||||||
|
releases_new_installed.iter().next().copied().unwrap_or_default(),
|
||||||
|
])
|
||||||
|
.status_checked();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn process_xcodes_releases(releases_filtered: Vec<String>, should_ask: bool, ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let xcodes = require("xcodes")?;
|
||||||
|
|
||||||
|
if releases_filtered.last().map_or(true, |s| !s.contains("(Installed)")) && !releases_filtered.is_empty() {
|
||||||
|
println!(
|
||||||
|
"{} {}",
|
||||||
|
t!("New Xcode release detected:"),
|
||||||
|
releases_filtered.last().cloned().unwrap_or_default()
|
||||||
|
);
|
||||||
|
if should_ask {
|
||||||
|
let answer_install = prompt_yesno(t!("Would you like to install it?").as_ref())?;
|
||||||
|
if answer_install {
|
||||||
|
let _ = ctx
|
||||||
|
.run_type()
|
||||||
|
.execute(xcodes)
|
||||||
|
.args(["install", &releases_filtered.last().cloned().unwrap_or_default()])
|
||||||
|
.status_checked();
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,23 +1,66 @@
|
|||||||
|
use crate::command::CommandExt;
|
||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
use crate::terminal::print_separator;
|
use crate::terminal::print_separator;
|
||||||
use crate::utils::{require_option, REQUIRE_SUDO};
|
use crate::utils::{get_require_sudo_string, require_option};
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
use std::path::PathBuf;
|
use rust_i18n::t;
|
||||||
|
use std::fs;
|
||||||
|
|
||||||
|
fn is_openbsd_current(ctx: &ExecutionContext) -> Result<bool> {
|
||||||
|
let motd_content = fs::read_to_string("/etc/motd")?;
|
||||||
|
let is_current = ["-current", "-beta"].iter().any(|&s| motd_content.contains(s));
|
||||||
|
if ctx.config().dry_run() {
|
||||||
|
println!("{}", t!("Would check if OpenBSD is -current"));
|
||||||
|
Ok(is_current)
|
||||||
|
} else {
|
||||||
|
Ok(is_current)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn upgrade_openbsd(ctx: &ExecutionContext) -> Result<()> {
|
pub fn upgrade_openbsd(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
print_separator("OpenBSD Update");
|
print_separator(t!("OpenBSD Update"));
|
||||||
ctx.run_type()
|
|
||||||
.execute(sudo)
|
let is_current = is_openbsd_current(ctx)?;
|
||||||
.args(&["/usr/sbin/sysupgrade", "-n"])
|
|
||||||
.status_checked()
|
if ctx.config().dry_run() {
|
||||||
|
println!("{}", t!("Would upgrade the OpenBSD system"));
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let args = if is_current {
|
||||||
|
vec!["/usr/sbin/sysupgrade", "-sn"]
|
||||||
|
} else {
|
||||||
|
vec!["/usr/sbin/syspatch"]
|
||||||
|
};
|
||||||
|
|
||||||
|
ctx.run_type().execute(sudo).args(&args).status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
pub fn upgrade_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
print_separator("OpenBSD Packages");
|
print_separator(t!("OpenBSD Packages"));
|
||||||
|
|
||||||
|
let is_current = is_openbsd_current(ctx)?;
|
||||||
|
|
||||||
|
if ctx.config().dry_run() {
|
||||||
|
println!("{}", t!("Would upgrade OpenBSD packages"));
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx.config().cleanup() {
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(sudo)
|
.execute(sudo)
|
||||||
.args(&["/usr/sbin/pkg_add", "-u"])
|
.args(["/usr/sbin/pkg_delete", "-ac"])
|
||||||
.status_checked()
|
.status_checked()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut args = vec!["/usr/sbin/pkg_add", "-u"];
|
||||||
|
if is_current {
|
||||||
|
args.push("-Dsnap");
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.run_type().execute(sudo).args(&args).status_checked()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
23
src/steps/os/os_release/aurora
Normal file
23
src/steps/os/os_release/aurora
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
NAME="Aurora"
|
||||||
|
VERSION="latest-41.20250210.4 (Kinoite)"
|
||||||
|
RELEASE_TYPE=stable
|
||||||
|
ID=aurora
|
||||||
|
ID_LIKE="fedora"
|
||||||
|
VERSION_ID=41
|
||||||
|
VERSION_CODENAME=""
|
||||||
|
PLATFORM_ID="platform:f41"
|
||||||
|
PRETTY_NAME="Aurora (Version: latest-41.20250210.4 / FROM Fedora Kinoite 41)"
|
||||||
|
ANSI_COLOR="0;38;2;60;110;180"
|
||||||
|
LOGO=fedora-logo-icon
|
||||||
|
CPE_NAME="cpe:/o:universal-blue:aurora:41"
|
||||||
|
DEFAULT_HOSTNAME="aurora"
|
||||||
|
HOME_URL="https://getaurora.dev/"
|
||||||
|
DOCUMENTATION_URL="https://docs.getaurora.dev"
|
||||||
|
SUPPORT_URL="https://github.com/ublue-os/aurora/issues/"
|
||||||
|
BUG_REPORT_URL="https://github.com/ublue-os/aurora/issues/"
|
||||||
|
SUPPORT_END=2025-12-15
|
||||||
|
VARIANT="Kinoite"
|
||||||
|
VARIANT_ID=aurora
|
||||||
|
OSTREE_VERSION='latest-41.20250210.4'
|
||||||
|
BUILD_ID="fc1570c"
|
||||||
|
IMAGE_ID="aurora"
|
||||||
25
src/steps/os/os_release/bazzite
Normal file
25
src/steps/os/os_release/bazzite
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
NAME="Bazzite"
|
||||||
|
VERSION="41.20250208.0 (Kinoite)"
|
||||||
|
RELEASE_TYPE=stable
|
||||||
|
ID=bazzite
|
||||||
|
ID_LIKE="fedora"
|
||||||
|
VERSION_ID=41
|
||||||
|
VERSION_CODENAME="Holographic"
|
||||||
|
PLATFORM_ID="platform:f41"
|
||||||
|
PRETTY_NAME="Bazzite 41 (FROM Fedora Kinoite)"
|
||||||
|
ANSI_COLOR="0;38;2;138;43;226"
|
||||||
|
LOGO=bazzite-logo-icon
|
||||||
|
CPE_NAME="cpe:/o:universal-blue:bazzite:41"
|
||||||
|
DEFAULT_HOSTNAME="bazzite"
|
||||||
|
HOME_URL="https://bazzite.gg"
|
||||||
|
DOCUMENTATION_URL="https://docs.bazzite.gg"
|
||||||
|
SUPPORT_URL="https://discord.bazzite.gg"
|
||||||
|
BUG_REPORT_URL="https://github.com/ublue-os/bazzite/issues/"
|
||||||
|
SUPPORT_END=2025-12-15
|
||||||
|
VARIANT="Kinoite"
|
||||||
|
VARIANT_ID=bazzite-nvidia-open
|
||||||
|
OSTREE_VERSION='41.20250208.0'
|
||||||
|
BUILD_ID="Stable (F41.20250208)"
|
||||||
|
BOOTLOADER_NAME="Bazzite Stable (F41.20250208)"
|
||||||
|
BUILD_ID="Stable (F41.20250208)"
|
||||||
|
BOOTLOADER_NAME="Bazzite Stable (F41.20250208)"
|
||||||
24
src/steps/os/os_release/bluefin
Normal file
24
src/steps/os/os_release/bluefin
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
NAME="Bluefin"
|
||||||
|
VERSION="41.20250216.1 (Silverblue)"
|
||||||
|
RELEASE_TYPE=stable
|
||||||
|
ID=bluefin
|
||||||
|
ID_LIKE="fedora"
|
||||||
|
VERSION_ID=41
|
||||||
|
VERSION_CODENAME="Archaeopteryx"
|
||||||
|
PLATFORM_ID="platform:f41"
|
||||||
|
PRETTY_NAME="Bluefin (Version: 41.20250216.1 / FROM Fedora Silverblue 41)"
|
||||||
|
ANSI_COLOR="0;38;2;60;110;180"
|
||||||
|
LOGO=fedora-logo-icon
|
||||||
|
CPE_NAME="cpe:/o:universal-blue:bluefin:41"
|
||||||
|
DEFAULT_HOSTNAME="bluefin"
|
||||||
|
HOME_URL="https://projectbluefin.io"
|
||||||
|
DOCUMENTATION_URL="https://docs.projectbluefin.io"
|
||||||
|
SUPPORT_URL="https://github.com/ublue-os/bluefin/issues/"
|
||||||
|
BUG_REPORT_URL="https://github.com/ublue-os/bluefin/issues/"
|
||||||
|
SUPPORT_END=2025-12-15
|
||||||
|
VARIANT="Silverblue"
|
||||||
|
VARIANT_ID=bluefin
|
||||||
|
OSTREE_VERSION='41.20250216.1'
|
||||||
|
BUILD_ID="185146a"
|
||||||
|
IMAGE_ID="bluefin"
|
||||||
|
IMAGE_VERSION="41.20250216.1"
|
||||||
23
src/steps/os/os_release/coreos
Normal file
23
src/steps/os/os_release/coreos
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
NAME="Fedora Linux"
|
||||||
|
VERSION="41.20250117.3.0 (CoreOS)"
|
||||||
|
RELEASE_TYPE=stable
|
||||||
|
ID=fedora
|
||||||
|
VERSION_ID=41
|
||||||
|
VERSION_CODENAME=""
|
||||||
|
PLATFORM_ID="platform:f41"
|
||||||
|
PRETTY_NAME="Fedora CoreOS 41.20250117.3.0 (uCore)"
|
||||||
|
ANSI_COLOR="0;38;2;60;110;180"
|
||||||
|
LOGO=fedora-logo-icon
|
||||||
|
CPE_NAME="cpe:/o:fedoraproject:fedora:41"
|
||||||
|
HOME_URL="https://getfedora.org/coreos/"
|
||||||
|
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-coreos/"
|
||||||
|
SUPPORT_URL="https://github.com/coreos/fedora-coreos-tracker/"
|
||||||
|
BUG_REPORT_URL="https://github.com/coreos/fedora-coreos-tracker/"
|
||||||
|
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||||
|
REDHAT_BUGZILLA_PRODUCT_VERSION=41
|
||||||
|
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||||
|
REDHAT_SUPPORT_PRODUCT_VERSION=41
|
||||||
|
SUPPORT_END=2025-12-15
|
||||||
|
VARIANT="CoreOS"
|
||||||
|
VARIANT_ID=coreos
|
||||||
|
OSTREE_VERSION='41.20250117.3.0'
|
||||||
22
src/steps/os/os_release/fedoraiot
Normal file
22
src/steps/os/os_release/fedoraiot
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
NAME="Fedora Linux"
|
||||||
|
VERSION="39.20240415.0 (IoT Edition)"
|
||||||
|
ID=fedora
|
||||||
|
VERSION_ID=39
|
||||||
|
VERSION_CODENAME=""
|
||||||
|
PLATFORM_ID="platform:f39"
|
||||||
|
PRETTY_NAME="Fedora Linux 39.20240415.0 (IoT Edition)"
|
||||||
|
ANSI_COLOR="0;38;2;60;110;180"
|
||||||
|
LOGO=fedora-logo-icon
|
||||||
|
CPE_NAME="cpe:/o:fedoraproject:fedora:39"
|
||||||
|
HOME_URL="https://fedoraproject.org/"
|
||||||
|
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora/f39/system-administrators-guide/"
|
||||||
|
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||||
|
BUG_REPORT_URL="https://bugzilla.redhat.com/"
|
||||||
|
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||||
|
REDHAT_BUGZILLA_PRODUCT_VERSION=39
|
||||||
|
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||||
|
REDHAT_SUPPORT_PRODUCT_VERSION=39
|
||||||
|
SUPPORT_END=2024-11-12
|
||||||
|
VARIANT="IoT Edition"
|
||||||
|
VARIANT_ID=iot
|
||||||
|
OSTREE_VERSION='39.20240415.0'
|
||||||
23
src/steps/os/os_release/fedoraswayatomic
Normal file
23
src/steps/os/os_release/fedoraswayatomic
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
NAME="Fedora Linux"
|
||||||
|
VERSION="40.20240426.0 (Sway Atomic)"
|
||||||
|
ID=fedora
|
||||||
|
VERSION_ID=40
|
||||||
|
VERSION_CODENAME=""
|
||||||
|
PLATFORM_ID="platform:f40"
|
||||||
|
PRETTY_NAME="Fedora Linux 40.20240426.0 (Sway Atomic)"
|
||||||
|
ANSI_COLOR="0;38;2;60;110;180"
|
||||||
|
LOGO=fedora-logo-icon
|
||||||
|
CPE_NAME="cpe:/o:fedoraproject:fedora:40"
|
||||||
|
DEFAULT_HOSTNAME="fedora"
|
||||||
|
HOME_URL="https://fedoraproject.org/atomic-desktops/sway/"
|
||||||
|
DOCUMENTATION_URL="https://docs.fedoraproject.org/en-US/fedora-sericea/"
|
||||||
|
SUPPORT_URL="https://ask.fedoraproject.org/"
|
||||||
|
BUG_REPORT_URL="https://gitlab.com/fedora/sigs/sway/SIG/-/issues"
|
||||||
|
REDHAT_BUGZILLA_PRODUCT="Fedora"
|
||||||
|
REDHAT_BUGZILLA_PRODUCT_VERSION=40
|
||||||
|
REDHAT_SUPPORT_PRODUCT="Fedora"
|
||||||
|
REDHAT_SUPPORT_PRODUCT_VERSION=40
|
||||||
|
SUPPORT_END=2025-05-13
|
||||||
|
VARIANT="Sway Atomic"
|
||||||
|
VARIANT_ID=sway-atomic
|
||||||
|
OSTREE_VERSION='40.20240426.0'
|
||||||
6
src/steps/os/os_release/funtoo
Normal file
6
src/steps/os/os_release/funtoo
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
ID="funtoo"
|
||||||
|
NAME="Funtoo"
|
||||||
|
PRETTY_NAME="Funtoo Linux"
|
||||||
|
ANSI_COLOR="0;34"
|
||||||
|
HOME_URL="https://www.funtoo.org"
|
||||||
|
BUG_REPORT_URL="https://bugs.funtoo.org"
|
||||||
8
src/steps/os/os_release/nilrt
Normal file
8
src/steps/os/os_release/nilrt
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
ID=nilrt
|
||||||
|
NAME="NI Linux Real-Time"
|
||||||
|
VERSION="10.0 (kirkstone)"
|
||||||
|
VERSION_ID=10.0
|
||||||
|
PRETTY_NAME="NI Linux Real-Time 10.0 (kirkstone)"
|
||||||
|
DISTRO_CODENAME="kirkstone"
|
||||||
|
BUILD_ID="23.8.0f153-x64"
|
||||||
|
VERSION_CODENAME="kirkstone"
|
||||||
5
src/steps/os/os_release/wolfi
Normal file
5
src/steps/os/os_release/wolfi
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
ID=wolfi
|
||||||
|
NAME="Wolfi"
|
||||||
|
PRETTY_NAME="Wolfi"
|
||||||
|
VERSION_ID="20230201"
|
||||||
|
HOME_URL="https://wolfi.dev"
|
||||||
@@ -13,6 +13,12 @@ use color_eyre::eyre::Context;
|
|||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
use home;
|
use home;
|
||||||
use ini::Ini;
|
use ini::Ini;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
#[cfg(target_os = "linux")]
|
||||||
|
use nix::unistd::Uid;
|
||||||
|
use regex::Regex;
|
||||||
|
use rust_i18n::t;
|
||||||
|
use semver::Version;
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
@@ -24,7 +30,7 @@ use crate::executor::Executor;
|
|||||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||||
use crate::executor::RunType;
|
use crate::executor::RunType;
|
||||||
use crate::terminal::print_separator;
|
use crate::terminal::print_separator;
|
||||||
use crate::utils::{require, require_option, PathExt, REQUIRE_SUDO};
|
use crate::utils::{get_require_sudo_string, require, require_option, PathExt};
|
||||||
|
|
||||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||||
const INTEL_BREW: &str = "/usr/local/bin/brew";
|
const INTEL_BREW: &str = "/usr/local/bin/brew";
|
||||||
@@ -98,19 +104,19 @@ pub fn run_fisher(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
.args(["-c", "type -t fisher"])
|
.args(["-c", "type -t fisher"])
|
||||||
.output_checked_utf8()
|
.output_checked_utf8()
|
||||||
.map(|_| ())
|
.map(|_| ())
|
||||||
.map_err(|_| SkipStep("`fisher` is not defined in `fish`".to_owned()))?;
|
.map_err(|_| SkipStep(t!("`fisher` is not defined in `fish`").to_string()))?;
|
||||||
|
|
||||||
Command::new(&fish)
|
Command::new(&fish)
|
||||||
.args(["-c", "echo \"$__fish_config_dir/fish_plugins\""])
|
.args(["-c", "echo \"$__fish_config_dir/fish_plugins\""])
|
||||||
.output_checked_utf8()
|
.output_checked_utf8()
|
||||||
.and_then(|output| Path::new(&output.stdout.trim()).require().map(|_| ()))
|
.and_then(|output| Path::new(&output.stdout.trim()).require().map(|_| ()))
|
||||||
.map_err(|err| SkipStep(format!("`fish_plugins` path doesn't exist: {err}")))?;
|
.map_err(|err| SkipStep(t!("`fish_plugins` path doesn't exist: {err}", err = err).to_string()))?;
|
||||||
|
|
||||||
Command::new(&fish)
|
Command::new(&fish)
|
||||||
.args(["-c", "fish_update_completions"])
|
.args(["-c", "fish_update_completions"])
|
||||||
.output_checked_utf8()
|
.output_checked_utf8()
|
||||||
.map(|_| ())
|
.map(|_| ())
|
||||||
.map_err(|_| SkipStep("`fish_update_completions` is not available".to_owned()))?;
|
.map_err(|_| SkipStep(t!("`fish_update_completions` is not available").to_string()))?;
|
||||||
|
|
||||||
print_separator("Fisher");
|
print_separator("Fisher");
|
||||||
|
|
||||||
@@ -177,7 +183,7 @@ pub fn run_oh_my_fish(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
pub fn run_pkgin(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_pkgin(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let pkgin = require("pkgin")?;
|
let pkgin = require("pkgin")?;
|
||||||
let sudo = require_option(ctx.sudo().as_ref(), REQUIRE_SUDO.to_string())?;
|
let sudo = require_option(ctx.sudo().as_ref(), get_require_sudo_string())?;
|
||||||
|
|
||||||
print_separator("Pkgin");
|
print_separator("Pkgin");
|
||||||
|
|
||||||
@@ -232,7 +238,7 @@ pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let gdbus = require("gdbus")?;
|
let gdbus = require("gdbus")?;
|
||||||
require_option(
|
require_option(
|
||||||
var("XDG_CURRENT_DESKTOP").ok().filter(|p| p.contains("GNOME")),
|
var("XDG_CURRENT_DESKTOP").ok().filter(|p| p.contains("GNOME")),
|
||||||
"Desktop doest not appear to be gnome".to_string(),
|
t!("Desktop doest not appear to be gnome").to_string(),
|
||||||
)?;
|
)?;
|
||||||
let output = Command::new("gdbus")
|
let output = Command::new("gdbus")
|
||||||
.args([
|
.args([
|
||||||
@@ -249,10 +255,10 @@ pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
debug!("Checking for gnome extensions: {}", output);
|
debug!("Checking for gnome extensions: {}", output);
|
||||||
if !output.stdout.contains("org.gnome.Shell.Extensions") {
|
if !output.stdout.contains("org.gnome.Shell.Extensions") {
|
||||||
return Err(SkipStep(String::from("Gnome shell extensions are unregistered in DBus")).into());
|
return Err(SkipStep(t!("Gnome shell extensions are unregistered in DBus").to_string()).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
print_separator("Gnome Shell extensions");
|
print_separator(t!("Gnome Shell extensions"));
|
||||||
|
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(gdbus)
|
.execute(gdbus)
|
||||||
@@ -269,6 +275,23 @@ pub fn upgrade_gnome_extensions(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
.status_checked()
|
.status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "linux")]
|
||||||
|
pub fn brew_linux_sudo_uid() -> Option<u32> {
|
||||||
|
let linuxbrew_directory = "/home/linuxbrew/.linuxbrew";
|
||||||
|
if let Ok(metadata) = std::fs::metadata(linuxbrew_directory) {
|
||||||
|
let owner_id = metadata.uid();
|
||||||
|
let current_id = Uid::effective();
|
||||||
|
// print debug these two values
|
||||||
|
debug!("linuxbrew_directory owner_id: {}, current_id: {}", owner_id, current_id);
|
||||||
|
return if owner_id == current_id.as_raw() {
|
||||||
|
None // no need for sudo if linuxbrew is owned by the current user
|
||||||
|
} else {
|
||||||
|
Some(owner_id) // otherwise use sudo to run brew as the owner
|
||||||
|
};
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
#[cfg(any(target_os = "linux", target_os = "macos"))]
|
||||||
pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()> {
|
pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()> {
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
@@ -277,18 +300,50 @@ pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<
|
|||||||
#[cfg(target_os = "macos")]
|
#[cfg(target_os = "macos")]
|
||||||
{
|
{
|
||||||
if variant.is_path() && !BrewVariant::is_macos_custom(binary_name) {
|
if variant.is_path() && !BrewVariant::is_macos_custom(binary_name) {
|
||||||
return Err(SkipStep("Not a custom brew for macOS".to_string()).into());
|
return Err(SkipStep(t!("Not a custom brew for macOS").to_string()).into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "linux")]
|
||||||
|
{
|
||||||
|
let sudo_uid = brew_linux_sudo_uid();
|
||||||
|
// if brew is owned by another user, execute "sudo -Hu <uid> brew update"
|
||||||
|
if let Some(user_id) = sudo_uid {
|
||||||
|
let uid = nix::unistd::Uid::from_raw(user_id);
|
||||||
|
let user = nix::unistd::User::from_uid(uid)
|
||||||
|
.expect("failed to call getpwuid()")
|
||||||
|
.expect("this user should exist");
|
||||||
|
|
||||||
|
let sudo_as_user = t!("sudo as user '{user}'", user = user.name);
|
||||||
|
print_separator(format!("{} ({})", variant.step_title(), sudo_as_user));
|
||||||
|
|
||||||
|
let sudo = crate::utils::require_option(ctx.sudo().as_ref(), crate::utils::get_require_sudo_string())?;
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(sudo)
|
||||||
|
.current_dir("/tmp") // brew needs a writable current directory
|
||||||
|
.args([
|
||||||
|
"--set-home",
|
||||||
|
&format!("--user={}", user.name),
|
||||||
|
&format!("{}", binary_name.to_string_lossy()),
|
||||||
|
"update",
|
||||||
|
])
|
||||||
|
.status_checked()?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
print_separator(variant.step_title());
|
print_separator(variant.step_title());
|
||||||
let run_type = ctx.run_type();
|
let run_type = ctx.run_type();
|
||||||
|
|
||||||
variant.execute(run_type).arg("update").status_checked()?;
|
variant.execute(run_type).arg("update").status_checked()?;
|
||||||
variant
|
|
||||||
.execute(run_type)
|
let mut command = variant.execute(run_type);
|
||||||
.args(["upgrade", "--formula"])
|
command.args(["upgrade", "--formula"]);
|
||||||
.status_checked()?;
|
|
||||||
|
if ctx.config().brew_fetch_head() {
|
||||||
|
command.arg("--fetch-HEAD");
|
||||||
|
}
|
||||||
|
|
||||||
|
command.status_checked()?;
|
||||||
|
|
||||||
if ctx.config().cleanup() {
|
if ctx.config().cleanup() {
|
||||||
variant.execute(run_type).arg("cleanup").status_checked()?;
|
variant.execute(run_type).arg("cleanup").status_checked()?;
|
||||||
@@ -305,7 +360,7 @@ pub fn run_brew_formula(ctx: &ExecutionContext, variant: BrewVariant) -> Result<
|
|||||||
pub fn run_brew_cask(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()> {
|
pub fn run_brew_cask(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()> {
|
||||||
let binary_name = require(variant.binary_name())?;
|
let binary_name = require(variant.binary_name())?;
|
||||||
if variant.is_path() && !BrewVariant::is_macos_custom(binary_name) {
|
if variant.is_path() && !BrewVariant::is_macos_custom(binary_name) {
|
||||||
return Err(SkipStep("Not a custom brew for macOS".to_string()).into());
|
return Err(SkipStep(t!("Not a custom brew for macOS").to_string()).into());
|
||||||
}
|
}
|
||||||
print_separator(format!("{} - Cask", variant.step_title()));
|
print_separator(format!("{} - Cask", variant.step_title()));
|
||||||
let run_type = ctx.run_type();
|
let run_type = ctx.run_type();
|
||||||
@@ -328,6 +383,12 @@ pub fn run_brew_cask(ctx: &ExecutionContext, variant: BrewVariant) -> Result<()>
|
|||||||
if ctx.config().brew_cask_greedy() {
|
if ctx.config().brew_cask_greedy() {
|
||||||
brew_args.push("--greedy");
|
brew_args.push("--greedy");
|
||||||
}
|
}
|
||||||
|
if ctx.config().brew_greedy_latest() {
|
||||||
|
brew_args.push("--greedy-latest");
|
||||||
|
}
|
||||||
|
if ctx.config().brew_greedy_auto_updates() {
|
||||||
|
brew_args.push("--greedy-auto-updates");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
variant.execute(run_type).args(&brew_args).status_checked()?;
|
variant.execute(run_type).args(&brew_args).status_checked()?;
|
||||||
@@ -354,7 +415,7 @@ pub fn run_guix(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
if should_upgrade {
|
if should_upgrade {
|
||||||
return run_type.execute(&guix).args(["package", "-u"]).status_checked();
|
return run_type.execute(&guix).args(["package", "-u"]).status_checked();
|
||||||
}
|
}
|
||||||
Err(SkipStep(String::from("Guix Pull Failed, Skipping")).into())
|
Err(SkipStep(t!("Guix Pull Failed, Skipping").to_string()).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
|
||||||
@@ -374,23 +435,72 @@ pub fn run_nix(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
#[cfg(target_os = "macos")]
|
#[cfg(target_os = "macos")]
|
||||||
{
|
{
|
||||||
if require("darwin-rebuild").is_ok() {
|
if require("darwin-rebuild").is_ok() {
|
||||||
return Err(SkipStep(String::from(
|
return Err(
|
||||||
"Nix-darwin on macOS must be upgraded via darwin-rebuild switch",
|
SkipStep(t!("Nix-darwin on macOS must be upgraded via darwin-rebuild switch").to_string()).into(),
|
||||||
))
|
);
|
||||||
.into());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let run_type = ctx.run_type();
|
let run_type = ctx.run_type();
|
||||||
run_type.execute(nix_channel).arg("--update").status_checked()?;
|
run_type.execute(nix_channel).arg("--update").status_checked()?;
|
||||||
|
|
||||||
|
let mut get_version_cmd = ctx.run_type().execute(&nix);
|
||||||
|
get_version_cmd.arg("--version");
|
||||||
|
let get_version_cmd_output = get_version_cmd.output_checked_utf8()?;
|
||||||
|
let get_version_cmd_first_line_stdout = get_version_cmd_output
|
||||||
|
.stdout
|
||||||
|
.lines()
|
||||||
|
.next()
|
||||||
|
.ok_or_else(|| eyre!("`nix --version` output is empty"))?;
|
||||||
|
|
||||||
|
let is_lix = get_version_cmd_first_line_stdout.contains("Lix");
|
||||||
|
|
||||||
|
debug!(
|
||||||
|
output=%get_version_cmd_output,
|
||||||
|
?is_lix,
|
||||||
|
"`nix --version` output"
|
||||||
|
);
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref NIX_VERSION_REGEX: Regex =
|
||||||
|
Regex::new(r"^nix \([^)]*\) ([0-9.]+)").expect("Nix version regex always compiles");
|
||||||
|
}
|
||||||
|
|
||||||
|
if get_version_cmd_first_line_stdout.is_empty() {
|
||||||
|
return Err(eyre!("`nix --version` output was empty"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let captures = NIX_VERSION_REGEX.captures(get_version_cmd_first_line_stdout);
|
||||||
|
let raw_version = match &captures {
|
||||||
|
None => {
|
||||||
|
return Err(eyre!(
|
||||||
|
"`nix --version` output was weird: {get_version_cmd_first_line_stdout:?}\n\
|
||||||
|
If the `nix --version` output format changed, please file an issue to Topgrade"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Some(captures) => &captures[1],
|
||||||
|
};
|
||||||
|
|
||||||
|
let version =
|
||||||
|
Version::parse(raw_version).wrap_err_with(|| format!("Unable to parse Nix version: {raw_version:?}"))?;
|
||||||
|
|
||||||
|
debug!("Nix version: {:?}", version);
|
||||||
|
|
||||||
|
// Nix since 2.21.0 uses `--all --impure` rather than `.*` to upgrade all packages.
|
||||||
|
// Lix is based on Nix 2.18, so it doesn't!
|
||||||
|
let packages = if version >= Version::new(2, 21, 0) && !is_lix {
|
||||||
|
vec!["--all", "--impure"]
|
||||||
|
} else {
|
||||||
|
vec![".*"]
|
||||||
|
};
|
||||||
|
|
||||||
if Path::new(&manifest_json_path).exists() {
|
if Path::new(&manifest_json_path).exists() {
|
||||||
run_type
|
run_type
|
||||||
.execute(nix)
|
.execute(nix)
|
||||||
.args(nix_args())
|
.args(nix_args())
|
||||||
.arg("profile")
|
.arg("profile")
|
||||||
.arg("upgrade")
|
.arg("upgrade")
|
||||||
.arg(".*")
|
.args(&packages)
|
||||||
.arg("--verbose")
|
.arg("--verbose")
|
||||||
.status_checked()
|
.status_checked()
|
||||||
} else {
|
} else {
|
||||||
@@ -419,20 +529,16 @@ pub fn run_nix_self_upgrade(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !should_self_upgrade {
|
if !should_self_upgrade {
|
||||||
return Err(SkipStep(String::from(
|
return Err(SkipStep(t!("`nix upgrade-nix` can only be used on macOS or non-NixOS Linux").to_string()).into());
|
||||||
"`nix upgrade-nix` can only be used on macOS or non-NixOS Linux",
|
|
||||||
))
|
|
||||||
.into());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if nix_profile_dir(&nix)?.is_none() {
|
if nix_profile_dir(&nix)?.is_none() {
|
||||||
return Err(SkipStep(String::from(
|
return Err(
|
||||||
"`nix upgrade-nix` cannot be run when Nix is installed in a profile",
|
SkipStep(t!("`nix upgrade-nix` cannot be run when Nix is installed in a profile").to_string()).into(),
|
||||||
))
|
);
|
||||||
.into());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
print_separator("Nix (self-upgrade)");
|
print_separator(t!("Nix (self-upgrade)"));
|
||||||
|
|
||||||
let multi_user = fs::metadata(&nix)?.uid() == 0;
|
let multi_user = fs::metadata(&nix)?.uid() == 0;
|
||||||
debug!("Multi user nix: {}", multi_user);
|
debug!("Multi user nix: {}", multi_user);
|
||||||
@@ -497,7 +603,6 @@ fn nix_profile_dir(nix: &Path) -> Result<Option<PathBuf>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
debug!("Found Nix profile {profile_dir:?}");
|
debug!("Found Nix profile {profile_dir:?}");
|
||||||
|
|
||||||
let user_env = profile_dir
|
let user_env = profile_dir
|
||||||
.canonicalize()
|
.canonicalize()
|
||||||
.wrap_err_with(|| format!("Failed to canonicalize {profile_dir:?}"))?;
|
.wrap_err_with(|| format!("Failed to canonicalize {profile_dir:?}"))?;
|
||||||
@@ -506,8 +611,7 @@ fn nix_profile_dir(nix: &Path) -> Result<Option<PathBuf>> {
|
|||||||
if user_env
|
if user_env
|
||||||
.file_name()
|
.file_name()
|
||||||
.and_then(|name| name.to_str())
|
.and_then(|name| name.to_str())
|
||||||
.map(|name| name.ends_with("user-environment"))
|
.is_some_and(|name| name.ends_with("user-environment"))
|
||||||
.unwrap_or(false)
|
|
||||||
{
|
{
|
||||||
Some(profile_dir)
|
Some(profile_dir)
|
||||||
} else {
|
} else {
|
||||||
@@ -532,10 +636,33 @@ pub fn run_asdf(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let asdf = require("asdf")?;
|
let asdf = require("asdf")?;
|
||||||
|
|
||||||
print_separator("asdf");
|
print_separator("asdf");
|
||||||
|
|
||||||
|
// asdf (>= 0.15.0) won't support the self-update command
|
||||||
|
//
|
||||||
|
// https://github.com/topgrade-rs/topgrade/issues/1007
|
||||||
|
let version_output = Command::new(&asdf).arg("version").output_checked_utf8()?;
|
||||||
|
// Example output
|
||||||
|
//
|
||||||
|
// ```
|
||||||
|
// $ asdf version
|
||||||
|
// v0.15.0-31e8c93
|
||||||
|
//
|
||||||
|
// ```
|
||||||
|
let version_stdout = version_output.stdout.trim();
|
||||||
|
// trim the starting 'v'
|
||||||
|
let mut remaining = version_stdout.trim_start_matches('v');
|
||||||
|
let idx = remaining
|
||||||
|
.find('-')
|
||||||
|
.expect("the output of `asdf version` changed, please file an issue to Topgrade");
|
||||||
|
// remove the hash part
|
||||||
|
remaining = &remaining[..idx];
|
||||||
|
let version = Version::parse(remaining).expect("should be a valid version");
|
||||||
|
if version < Version::new(0, 15, 0) {
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(&asdf)
|
.execute(&asdf)
|
||||||
.arg("update")
|
.arg("update")
|
||||||
.status_checked_with_codes(&[42])?;
|
.status_checked_with_codes(&[42])?;
|
||||||
|
}
|
||||||
|
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(&asdf)
|
.execute(&asdf)
|
||||||
@@ -543,6 +670,19 @@ pub fn run_asdf(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
.status_checked()
|
.status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn run_mise(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let mise = require("mise")?;
|
||||||
|
|
||||||
|
print_separator("mise");
|
||||||
|
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(&mise)
|
||||||
|
.args(["plugins", "update"])
|
||||||
|
.status_checked()?;
|
||||||
|
|
||||||
|
ctx.run_type().execute(&mise).arg("upgrade").status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn run_home_manager(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_home_manager(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let home_manager = require("home-manager")?;
|
let home_manager = require("home-manager")?;
|
||||||
|
|
||||||
@@ -572,12 +712,32 @@ pub fn run_pearl(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
ctx.run_type().execute(pearl).arg("update").status_checked()
|
ctx.run_type().execute(pearl).arg("update").status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn run_pyenv(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let pyenv = require("pyenv")?;
|
||||||
|
print_separator("pyenv");
|
||||||
|
|
||||||
|
let pyenv_dir = var("PYENV_ROOT").map_or_else(|_| HOME_DIR.join(".pyenv"), PathBuf::from);
|
||||||
|
|
||||||
|
if !pyenv_dir.exists() {
|
||||||
|
return Err(SkipStep(t!("Pyenv is installed, but $PYENV_ROOT is not set correctly").to_string()).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
if !pyenv_dir.join(".git").exists() {
|
||||||
|
return Err(SkipStep(t!("pyenv is not a git repository").to_string()).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
if !pyenv_dir.join("plugins").join("pyenv-update").exists() {
|
||||||
|
return Err(SkipStep(t!("pyenv-update plugin is not installed").to_string()).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.run_type().execute(pyenv).arg("update").status_checked()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn run_sdkman(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_sdkman(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let bash = require("bash")?;
|
let bash = require("bash")?;
|
||||||
|
|
||||||
let sdkman_init_path = var("SDKMAN_DIR")
|
let sdkman_init_path = var("SDKMAN_DIR")
|
||||||
.map(PathBuf::from)
|
.map_or_else(|_| HOME_DIR.join(".sdkman"), PathBuf::from)
|
||||||
.unwrap_or_else(|_| HOME_DIR.join(".sdkman"))
|
|
||||||
.join("bin")
|
.join("bin")
|
||||||
.join("sdkman-init.sh")
|
.join("sdkman-init.sh")
|
||||||
.require()
|
.require()
|
||||||
@@ -586,8 +746,7 @@ pub fn run_sdkman(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
print_separator("SDKMAN!");
|
print_separator("SDKMAN!");
|
||||||
|
|
||||||
let sdkman_config_path = var("SDKMAN_DIR")
|
let sdkman_config_path = var("SDKMAN_DIR")
|
||||||
.map(PathBuf::from)
|
.map_or_else(|_| HOME_DIR.join(".sdkman"), PathBuf::from)
|
||||||
.unwrap_or_else(|_| HOME_DIR.join(".sdkman"))
|
|
||||||
.join("etc")
|
.join("etc")
|
||||||
.join("config")
|
.join("config")
|
||||||
.require()?;
|
.require()?;
|
||||||
@@ -635,21 +794,16 @@ pub fn run_sdkman(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_bun(ctx: &ExecutionContext) -> Result<()> {
|
|
||||||
let bun = require("bun")?;
|
|
||||||
|
|
||||||
print_separator("Bun");
|
|
||||||
|
|
||||||
ctx.run_type().execute(bun).arg("upgrade").status_checked()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn run_bun_packages(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_bun_packages(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let bun = require("bun")?;
|
let bun = require("bun")?;
|
||||||
|
|
||||||
print_separator("Bun Packages");
|
print_separator(t!("Bun Packages"));
|
||||||
|
|
||||||
if !HOME_DIR.join(".bun/install/global/package.json").exists() {
|
let mut package_json: PathBuf = var("BUN_INSTALL").map_or_else(|_| HOME_DIR.join(".bun"), PathBuf::from);
|
||||||
println!("No global packages installed");
|
package_json.push("install/global/package.json");
|
||||||
|
|
||||||
|
if !package_json.exists() {
|
||||||
|
println!("{}", t!("No global packages installed"));
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -674,6 +828,7 @@ pub fn run_maza(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn reboot() -> Result<()> {
|
pub fn reboot() -> Result<()> {
|
||||||
print!("Rebooting...");
|
print!("{}", t!("Rebooting..."));
|
||||||
|
|
||||||
Command::new("sudo").arg("reboot").status_checked()
|
Command::new("sudo").arg("reboot").status_checked()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
use std::convert::TryFrom;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::{ffi::OsStr, process::Command};
|
use std::{ffi::OsStr, process::Command};
|
||||||
|
|
||||||
@@ -10,8 +9,9 @@ use crate::command::CommandExt;
|
|||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
use crate::terminal::{print_separator, print_warning};
|
use crate::terminal::{print_separator, print_warning};
|
||||||
use crate::utils::{require, which};
|
use crate::utils::{require, which};
|
||||||
use crate::{error::SkipStep, steps::git::Repositories};
|
use crate::{error::SkipStep, steps::git::RepoStep};
|
||||||
use crate::{powershell, Step};
|
use crate::{powershell, Step};
|
||||||
|
use rust_i18n::t;
|
||||||
|
|
||||||
pub fn run_chocolatey(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_chocolatey(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let choco = require("choco")?;
|
let choco = require("choco")?;
|
||||||
@@ -42,15 +42,12 @@ pub fn run_winget(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
print_separator("winget");
|
print_separator("winget");
|
||||||
|
|
||||||
if !ctx.config().enable_winget() {
|
let mut args = vec!["upgrade", "--all"];
|
||||||
print_warning("Winget is disabled by default. Enable it by setting enable_winget=true in the [windows] section in the configuration.");
|
if ctx.config().winget_silent_install() {
|
||||||
return Err(SkipStep(String::from("Winget is disabled by default")).into());
|
args.push("--silent");
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.run_type()
|
ctx.run_type().execute(winget).args(args).status_checked()
|
||||||
.execute(winget)
|
|
||||||
.args(["upgrade", "--all"])
|
|
||||||
.status_checked()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_scoop(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_scoop(ctx: &ExecutionContext) -> Result<()> {
|
||||||
@@ -63,6 +60,10 @@ pub fn run_scoop(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
if ctx.config().cleanup() {
|
if ctx.config().cleanup() {
|
||||||
ctx.run_type().execute(&scoop).args(["cleanup", "*"]).status_checked()?;
|
ctx.run_type().execute(&scoop).args(["cleanup", "*"]).status_checked()?;
|
||||||
|
ctx.run_type()
|
||||||
|
.execute(&scoop)
|
||||||
|
.args(["cache", "rm", "-a"])
|
||||||
|
.status_checked()?
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -70,12 +71,12 @@ pub fn run_scoop(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
pub fn update_wsl(ctx: &ExecutionContext) -> Result<()> {
|
pub fn update_wsl(ctx: &ExecutionContext) -> Result<()> {
|
||||||
if !is_wsl_installed()? {
|
if !is_wsl_installed()? {
|
||||||
return Err(SkipStep("WSL not installed".to_string()).into());
|
return Err(SkipStep(t!("WSL not installed").to_string()).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let wsl = require("wsl")?;
|
let wsl = require("wsl")?;
|
||||||
|
|
||||||
print_separator("Update WSL");
|
print_separator(t!("Update WSL"));
|
||||||
|
|
||||||
let mut wsl_command = ctx.run_type().execute(wsl);
|
let mut wsl_command = ctx.run_type().execute(wsl);
|
||||||
wsl_command.args(["--update"]);
|
wsl_command.args(["--update"]);
|
||||||
@@ -128,7 +129,7 @@ fn upgrade_wsl_distribution(wsl: &Path, dist: &str, ctx: &ExecutionContext) -> R
|
|||||||
let topgrade = Command::new(wsl)
|
let topgrade = Command::new(wsl)
|
||||||
.args(["-d", dist, "bash", "-lc", "which topgrade"])
|
.args(["-d", dist, "bash", "-lc", "which topgrade"])
|
||||||
.output_checked_utf8()
|
.output_checked_utf8()
|
||||||
.map_err(|_| SkipStep(String::from("Could not find Topgrade installed in WSL")))?
|
.map_err(|_| SkipStep(t!("Could not find Topgrade installed in WSL").to_string()))?
|
||||||
.stdout // The normal output from `which topgrade` appends a newline, so we trim it here.
|
.stdout // The normal output from `which topgrade` appends a newline, so we trim it here.
|
||||||
.trim_end()
|
.trim_end()
|
||||||
.to_owned();
|
.to_owned();
|
||||||
@@ -177,7 +178,7 @@ fn upgrade_wsl_distribution(wsl: &Path, dist: &str, ctx: &ExecutionContext) -> R
|
|||||||
|
|
||||||
pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> {
|
||||||
if !is_wsl_installed()? {
|
if !is_wsl_installed()? {
|
||||||
return Err(SkipStep("WSL not installed".to_string()).into());
|
return Err(SkipStep(t!("WSL not installed").to_string()).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let wsl = require("wsl")?;
|
let wsl = require("wsl")?;
|
||||||
@@ -200,27 +201,34 @@ pub fn run_wsl_topgrade(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
if ran {
|
if ran {
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(SkipStep(String::from("Could not find Topgrade in any WSL disribution")).into())
|
Err(SkipStep(t!("Could not find Topgrade in any WSL disribution").to_string()).into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn windows_update(ctx: &ExecutionContext) -> Result<()> {
|
pub fn windows_update(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let powershell = powershell::Powershell::windows_powershell();
|
let powershell = powershell::Powershell::windows_powershell();
|
||||||
|
|
||||||
|
print_separator(t!("Windows Update"));
|
||||||
|
|
||||||
if powershell.supports_windows_update() {
|
if powershell.supports_windows_update() {
|
||||||
print_separator("Windows Update");
|
println!("The installer will request to run as administrator, expect a prompt.");
|
||||||
return powershell.windows_update(ctx);
|
|
||||||
|
powershell.windows_update(ctx)
|
||||||
|
} else {
|
||||||
|
print_warning(t!(
|
||||||
|
"Consider installing PSWindowsUpdate as the use of Windows Update via USOClient is not supported."
|
||||||
|
));
|
||||||
|
|
||||||
|
Err(SkipStep(t!("USOClient not supported.").to_string()).into())
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let usoclient = require("UsoClient")?;
|
pub fn microsoft_store(ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let powershell = powershell::Powershell::windows_powershell();
|
||||||
|
|
||||||
print_separator("Windows Update");
|
print_separator(t!("Microsoft Store"));
|
||||||
println!("Running Windows Update. Check the control panel for progress.");
|
|
||||||
ctx.run_type()
|
powershell.microsoft_store(ctx)
|
||||||
.execute(&usoclient)
|
|
||||||
.arg("ScanInstallWait")
|
|
||||||
.status_checked()?;
|
|
||||||
ctx.run_type().execute(&usoclient).arg("StartInstall").status_checked()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn reboot() -> Result<()> {
|
pub fn reboot() -> Result<()> {
|
||||||
@@ -229,7 +237,7 @@ pub fn reboot() -> Result<()> {
|
|||||||
Command::new("shutdown").args(["/R", "/T", "0"]).status_checked()
|
Command::new("shutdown").args(["/R", "/T", "0"]).status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert_startup_scripts(git_repos: &mut Repositories) -> Result<()> {
|
pub fn insert_startup_scripts(git_repos: &mut RepoStep) -> Result<()> {
|
||||||
let startup_dir = crate::WINDOWS_DIRS
|
let startup_dir = crate::WINDOWS_DIRS
|
||||||
.data_dir()
|
.data_dir()
|
||||||
.join("Microsoft\\Windows\\Start Menu\\Programs\\Startup");
|
.join("Microsoft\\Windows\\Start Menu\\Programs\\Startup");
|
||||||
@@ -239,7 +247,7 @@ pub fn insert_startup_scripts(git_repos: &mut Repositories) -> Result<()> {
|
|||||||
if let Ok(lnk) = parselnk::Lnk::try_from(Path::new(&path)) {
|
if let Ok(lnk) = parselnk::Lnk::try_from(Path::new(&path)) {
|
||||||
debug!("Startup link: {:?}", lnk);
|
debug!("Startup link: {:?}", lnk);
|
||||||
if let Some(path) = lnk.relative_path() {
|
if let Some(path) = lnk.relative_path() {
|
||||||
git_repos.insert_if_repo(&startup_dir.join(path));
|
git_repos.insert_if_repo(startup_dir.join(path));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,11 +4,12 @@ use std::path::PathBuf;
|
|||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
|
use rust_i18n::t;
|
||||||
|
|
||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
use crate::terminal::{is_dumb, print_separator};
|
use crate::terminal::{is_dumb, print_separator};
|
||||||
use crate::utils::{require_option, which, PathExt};
|
use crate::utils::{require_option, which};
|
||||||
use crate::Step;
|
use crate::Step;
|
||||||
|
|
||||||
pub struct Powershell {
|
pub struct Powershell {
|
||||||
@@ -29,7 +30,7 @@ impl Powershell {
|
|||||||
.args(["-NoProfile", "-Command", "Split-Path $profile"])
|
.args(["-NoProfile", "-Command", "Split-Path $profile"])
|
||||||
.output_checked_utf8()
|
.output_checked_utf8()
|
||||||
.map(|output| PathBuf::from(output.stdout.trim()))
|
.map(|output| PathBuf::from(output.stdout.trim()))
|
||||||
.and_then(|p| p.require())
|
.and_then(super::super::utils::PathExt::require)
|
||||||
.ok()
|
.ok()
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -62,21 +63,21 @@ impl Powershell {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_modules(&self, ctx: &ExecutionContext) -> Result<()> {
|
pub fn update_modules(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||||
let powershell = require_option(self.path.as_ref(), String::from("Powershell is not installed"))?;
|
let powershell = require_option(self.path.as_ref(), t!("Powershell is not installed").to_string())?;
|
||||||
|
|
||||||
print_separator("Powershell Modules Update");
|
print_separator(t!("Powershell Modules Update"));
|
||||||
|
|
||||||
let mut cmd = vec!["Update-Module"];
|
let mut cmd = vec!["Update-Module"];
|
||||||
|
|
||||||
if ctx.config().verbose() {
|
if ctx.config().verbose() {
|
||||||
cmd.push("-Verbose")
|
cmd.push("-Verbose");
|
||||||
}
|
}
|
||||||
|
|
||||||
if ctx.config().yes(Step::Powershell) {
|
if ctx.config().yes(Step::Powershell) {
|
||||||
cmd.push("-Force")
|
cmd.push("-Force");
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("Updating modules...");
|
println!("{}", t!("Updating modules..."));
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(powershell)
|
.execute(powershell)
|
||||||
// This probably doesn't need `shell_words::join`.
|
// This probably doesn't need `shell_words::join`.
|
||||||
@@ -94,10 +95,18 @@ impl Powershell {
|
|||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
pub fn windows_update(&self, ctx: &ExecutionContext) -> Result<()> {
|
pub fn windows_update(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||||
let powershell = require_option(self.path.as_ref(), String::from("Powershell is not installed"))?;
|
let powershell = require_option(self.path.as_ref(), t!("Powershell is not installed").to_string())?;
|
||||||
|
|
||||||
debug_assert!(self.supports_windows_update());
|
debug_assert!(self.supports_windows_update());
|
||||||
|
|
||||||
|
let accept_all = if ctx.config().accept_all_windows_updates() {
|
||||||
|
"-AcceptAll"
|
||||||
|
} else {
|
||||||
|
""
|
||||||
|
};
|
||||||
|
|
||||||
|
let install_windowsupdate_verbose = "Install-WindowsUpdate -Verbose".to_string();
|
||||||
|
|
||||||
let mut command = if let Some(sudo) = ctx.sudo() {
|
let mut command = if let Some(sudo) = ctx.sudo() {
|
||||||
let mut command = ctx.run_type().execute(sudo);
|
let mut command = ctx.run_type().execute(sudo);
|
||||||
command.arg(powershell);
|
command.arg(powershell);
|
||||||
@@ -107,18 +116,46 @@ impl Powershell {
|
|||||||
};
|
};
|
||||||
|
|
||||||
command
|
command
|
||||||
.args([
|
.args(["-NoProfile", &install_windowsupdate_verbose, accept_all])
|
||||||
"-NoProfile",
|
|
||||||
"-Command",
|
|
||||||
&format!(
|
|
||||||
"Import-Module PSWindowsUpdate; Install-WindowsUpdate -MicrosoftUpdate {} -Verbose",
|
|
||||||
if ctx.config().accept_all_windows_updates() {
|
|
||||||
"-AcceptAll"
|
|
||||||
} else {
|
|
||||||
""
|
|
||||||
}
|
|
||||||
),
|
|
||||||
])
|
|
||||||
.status_checked()
|
.status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
pub fn microsoft_store(&self, ctx: &ExecutionContext) -> Result<()> {
|
||||||
|
let powershell = require_option(self.path.as_ref(), t!("Powershell is not installed").to_string())?;
|
||||||
|
|
||||||
|
let mut command = if let Some(sudo) = ctx.sudo() {
|
||||||
|
let mut command = ctx.run_type().execute(sudo);
|
||||||
|
command.arg(powershell);
|
||||||
|
command
|
||||||
|
} else {
|
||||||
|
ctx.run_type().execute(powershell)
|
||||||
|
};
|
||||||
|
|
||||||
|
println!("{}", t!("Scanning for updates..."));
|
||||||
|
|
||||||
|
// Scan for updates using the MDM UpdateScanMethod
|
||||||
|
// This method is also available for non-MDM devices
|
||||||
|
let update_command = "(Get-CimInstance -Namespace \"Root\\cimv2\\mdm\\dmmap\" -ClassName \"MDM_EnterpriseModernAppManagement_AppManagement01\" | Invoke-CimMethod -MethodName UpdateScanMethod).ReturnValue";
|
||||||
|
|
||||||
|
command.args(["-NoProfile", update_command]);
|
||||||
|
|
||||||
|
command
|
||||||
|
.output_checked_with_utf8(|output| {
|
||||||
|
if output.stdout.trim() == "0" {
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
t!("Success, Microsoft Store apps are being updated in the background")
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
t!("Unable to update Microsoft Store apps, manual intervention is required")
|
||||||
|
);
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
|
use rust_i18n::t;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
command::CommandExt, error::SkipStep, execution_context::ExecutionContext, terminal::print_separator, utils,
|
command::CommandExt, error::SkipStep, execution_context::ExecutionContext, terminal::print_separator, utils,
|
||||||
@@ -27,7 +28,7 @@ pub fn ssh_step(ctx: &ExecutionContext, hostname: &str) -> Result<()> {
|
|||||||
{
|
{
|
||||||
prepare_async_ssh_command(&mut args);
|
prepare_async_ssh_command(&mut args);
|
||||||
crate::tmux::run_command(ctx, hostname, &shell_words::join(args))?;
|
crate::tmux::run_command(ctx, hostname, &shell_words::join(args))?;
|
||||||
Err(SkipStep(String::from("Remote Topgrade launched in Tmux")).into())
|
Err(SkipStep(String::from(t!("Remote Topgrade launched in Tmux"))).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(unix))]
|
#[cfg(not(unix))]
|
||||||
@@ -35,7 +36,7 @@ pub fn ssh_step(ctx: &ExecutionContext, hostname: &str) -> Result<()> {
|
|||||||
} else if ctx.config().open_remotes_in_new_terminal() && !ctx.run_type().dry() && cfg!(windows) {
|
} else if ctx.config().open_remotes_in_new_terminal() && !ctx.run_type().dry() && cfg!(windows) {
|
||||||
prepare_async_ssh_command(&mut args);
|
prepare_async_ssh_command(&mut args);
|
||||||
ctx.run_type().execute("wt").args(&args).spawn()?;
|
ctx.run_type().execute("wt").args(&args).spawn()?;
|
||||||
Err(SkipStep(String::from("Remote Topgrade launched in an external terminal")).into())
|
Err(SkipStep(String::from(t!("Remote Topgrade launched in an external terminal"))).into())
|
||||||
} else {
|
} else {
|
||||||
let mut args = vec!["-t", hostname];
|
let mut args = vec!["-t", hostname];
|
||||||
|
|
||||||
@@ -47,7 +48,7 @@ pub fn ssh_step(ctx: &ExecutionContext, hostname: &str) -> Result<()> {
|
|||||||
args.extend(["env", &env, "$SHELL", "-lc", topgrade]);
|
args.extend(["env", &env, "$SHELL", "-lc", topgrade]);
|
||||||
|
|
||||||
print_separator(format!("Remote ({hostname})"));
|
print_separator(format!("Remote ({hostname})"));
|
||||||
println!("Connecting to {hostname}...");
|
println!("{}", t!("Connecting to {hostname}...", hostname = hostname));
|
||||||
|
|
||||||
ctx.run_type().execute(ssh).args(&args).status_checked()
|
ctx.run_type().execute(ssh).args(&args).status_checked()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ use std::{fmt::Display, rc::Rc, str::FromStr};
|
|||||||
|
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
use rust_i18n::t;
|
||||||
use strum::EnumString;
|
use strum::EnumString;
|
||||||
use tracing::{debug, error};
|
use tracing::{debug, error};
|
||||||
|
|
||||||
@@ -125,7 +126,7 @@ impl<'a> TemporaryPowerOn<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Drop for TemporaryPowerOn<'a> {
|
impl Drop for TemporaryPowerOn<'_> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let subcommand = if self.ctx.config().vagrant_always_suspend().unwrap_or(false) {
|
let subcommand = if self.ctx.config().vagrant_always_suspend().unwrap_or(false) {
|
||||||
"suspend"
|
"suspend"
|
||||||
@@ -151,14 +152,14 @@ impl<'a> Drop for TemporaryPowerOn<'a> {
|
|||||||
pub fn collect_boxes(ctx: &ExecutionContext) -> Result<Vec<VagrantBox>> {
|
pub fn collect_boxes(ctx: &ExecutionContext) -> Result<Vec<VagrantBox>> {
|
||||||
let directories = utils::require_option(
|
let directories = utils::require_option(
|
||||||
ctx.config().vagrant_directories(),
|
ctx.config().vagrant_directories(),
|
||||||
String::from("No Vagrant directories were specified in the configuration file"),
|
String::from(t!("No Vagrant directories were specified in the configuration file")),
|
||||||
)?;
|
)?;
|
||||||
let vagrant = Vagrant {
|
let vagrant = Vagrant {
|
||||||
path: utils::require("vagrant")?,
|
path: utils::require("vagrant")?,
|
||||||
};
|
};
|
||||||
|
|
||||||
print_separator("Vagrant");
|
print_separator("Vagrant");
|
||||||
println!("Collecting Vagrant boxes");
|
println!("{}", t!("Collecting Vagrant boxes"));
|
||||||
|
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
|
|
||||||
@@ -183,7 +184,11 @@ pub fn topgrade_vagrant_box(ctx: &ExecutionContext, vagrant_box: &VagrantBox) ->
|
|||||||
let mut _poweron = None;
|
let mut _poweron = None;
|
||||||
if !vagrant_box.initial_status.powered_on() {
|
if !vagrant_box.initial_status.powered_on() {
|
||||||
if !(ctx.config().vagrant_power_on().unwrap_or(true)) {
|
if !(ctx.config().vagrant_power_on().unwrap_or(true)) {
|
||||||
return Err(SkipStep(format!("Skipping powered off box {vagrant_box}")).into());
|
return Err(SkipStep(format!(
|
||||||
|
"{}",
|
||||||
|
t!("Skipping powered off box {vagrant_box}", vagrant_box = vagrant_box)
|
||||||
|
))
|
||||||
|
.into());
|
||||||
} else {
|
} else {
|
||||||
print_separator(seperator);
|
print_separator(seperator);
|
||||||
_poweron = Some(vagrant.temporary_power_on(vagrant_box, ctx)?);
|
_poweron = Some(vagrant.temporary_power_on(vagrant_box, ctx)?);
|
||||||
@@ -205,7 +210,7 @@ pub fn topgrade_vagrant_box(ctx: &ExecutionContext, vagrant_box: &VagrantBox) ->
|
|||||||
|
|
||||||
pub fn upgrade_vagrant_boxes(ctx: &ExecutionContext) -> Result<()> {
|
pub fn upgrade_vagrant_boxes(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let vagrant = utils::require("vagrant")?;
|
let vagrant = utils::require("vagrant")?;
|
||||||
print_separator("Vagrant boxes");
|
print_separator(t!("Vagrant boxes"));
|
||||||
|
|
||||||
let outdated = Command::new(&vagrant)
|
let outdated = Command::new(&vagrant)
|
||||||
.args(["box", "outdated", "--global"])
|
.args(["box", "outdated", "--global"])
|
||||||
@@ -227,7 +232,7 @@ pub fn upgrade_vagrant_boxes(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !found {
|
if !found {
|
||||||
println!("No outdated boxes")
|
println!("{}", t!("No outdated boxes"));
|
||||||
} else {
|
} else {
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(&vagrant)
|
.execute(&vagrant)
|
||||||
|
|||||||
@@ -7,6 +7,8 @@ use color_eyre::eyre::Context;
|
|||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
|
|
||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
|
use crate::config::TmuxConfig;
|
||||||
|
use crate::config::TmuxSessionMode;
|
||||||
use crate::terminal::print_separator;
|
use crate::terminal::print_separator;
|
||||||
use crate::HOME_DIR;
|
use crate::HOME_DIR;
|
||||||
use crate::{
|
use crate::{
|
||||||
@@ -14,11 +16,19 @@ use crate::{
|
|||||||
utils::{which, PathExt},
|
utils::{which, PathExt},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use rust_i18n::t;
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
use std::os::unix::process::CommandExt as _;
|
use std::os::unix::process::CommandExt as _;
|
||||||
|
|
||||||
pub fn run_tpm(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_tpm(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let tpm = HOME_DIR.join(".tmux/plugins/tpm/bin/update_plugins").require()?;
|
let tpm = match env::var("TMUX_PLUGIN_MANAGER_PATH") {
|
||||||
|
// If `TMUX_PLUGIN_MANAGER_PATH` is set, search for
|
||||||
|
// `$TMUX_PLUGIN_MANAGER_PATH/bin/install_plugins/tpm/bin/update_plugins`
|
||||||
|
Ok(var) => PathBuf::from(var).join("bin/install_plugins/tpm/bin/update_plugins"),
|
||||||
|
// Otherwise, use the default location `~/.tmux/plugins/tpm/bin/update_plugins`
|
||||||
|
Err(_) => HOME_DIR.join(".tmux/plugins/tpm/bin/update_plugins"),
|
||||||
|
}
|
||||||
|
.require()?;
|
||||||
|
|
||||||
print_separator("tmux plugins");
|
print_separator("tmux plugins");
|
||||||
|
|
||||||
@@ -118,13 +128,13 @@ impl Tmux {
|
|||||||
.output_checked_utf8()?
|
.output_checked_utf8()?
|
||||||
.stdout
|
.stdout
|
||||||
.lines()
|
.lines()
|
||||||
.map(|l| l.parse())
|
.map(str::parse)
|
||||||
.collect::<Result<Vec<usize>, _>>()
|
.collect::<Result<Vec<usize>, _>>()
|
||||||
.context("Failed to compute tmux windows")
|
.context("Failed to compute tmux windows")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_in_tmux(args: Vec<String>) -> Result<()> {
|
pub fn run_in_tmux(config: TmuxConfig) -> Result<()> {
|
||||||
let command = {
|
let command = {
|
||||||
let mut command = vec![
|
let mut command = vec![
|
||||||
String::from("env"),
|
String::from("env"),
|
||||||
@@ -137,39 +147,50 @@ pub fn run_in_tmux(args: Vec<String>) -> Result<()> {
|
|||||||
shell_words::join(command)
|
shell_words::join(command)
|
||||||
};
|
};
|
||||||
|
|
||||||
let tmux = Tmux::new(args);
|
let tmux = Tmux::new(config.args);
|
||||||
|
|
||||||
// Find an unused session and run `topgrade` in it with the current command's arguments.
|
// Find an unused session and run `topgrade` in it with the current command's arguments.
|
||||||
let session_name = "topgrade";
|
let session_name = "topgrade";
|
||||||
let window_name = "topgrade";
|
let window_name = "topgrade";
|
||||||
let session = tmux.new_unique_session(session_name, window_name, &command)?;
|
let session = tmux.new_unique_session(session_name, window_name, &command)?;
|
||||||
|
|
||||||
|
let is_inside_tmux = env::var("TMUX").is_ok();
|
||||||
|
let err = match config.session_mode {
|
||||||
|
TmuxSessionMode::AttachIfNotInSession => {
|
||||||
|
if is_inside_tmux {
|
||||||
// Only attach to the newly-created session if we're not currently in a tmux session.
|
// Only attach to the newly-created session if we're not currently in a tmux session.
|
||||||
if env::var("TMUX").is_err() {
|
println!("{}", t!("Topgrade launched in a new tmux session"));
|
||||||
let err = tmux.build().args(["attach-session", "-t", &session]).exec();
|
return Ok(());
|
||||||
Err(eyre!("{err}")).context("Failed to `execvp(3)` tmux")
|
|
||||||
} else {
|
} else {
|
||||||
println!("Topgrade launched in a new tmux session");
|
tmux.build().args(["attach-session", "-t", &session]).exec()
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TmuxSessionMode::AttachAlways => {
|
||||||
|
if is_inside_tmux {
|
||||||
|
tmux.build().args(["switch-client", "-t", &session]).exec()
|
||||||
|
} else {
|
||||||
|
tmux.build().args(["attach-session", "-t", &session]).exec()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Err(eyre!("{err}")).context("Failed to `execvp(3)` tmux")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_command(ctx: &ExecutionContext, window_name: &str, command: &str) -> Result<()> {
|
pub fn run_command(ctx: &ExecutionContext, window_name: &str, command: &str) -> Result<()> {
|
||||||
let tmux = Tmux::new(ctx.config().tmux_arguments()?);
|
let tmux = Tmux::new(ctx.config().tmux_config()?.args);
|
||||||
|
|
||||||
match ctx.get_tmux_session() {
|
if let Some(session_name) = ctx.get_tmux_session() {
|
||||||
Some(session_name) => {
|
|
||||||
let indices = tmux.window_indices(&session_name)?;
|
let indices = tmux.window_indices(&session_name)?;
|
||||||
let last_window = indices
|
let last_window = indices
|
||||||
.iter()
|
.iter()
|
||||||
.last()
|
.last()
|
||||||
.ok_or_else(|| eyre!("tmux session {session_name} has no windows"))?;
|
.ok_or_else(|| eyre!("tmux session {session_name} has no windows"))?;
|
||||||
tmux.new_window(&session_name, &format!("{last_window}"), command)?;
|
tmux.new_window(&session_name, &format!("{last_window}"), command)?;
|
||||||
}
|
} else {
|
||||||
None => {
|
|
||||||
let name = tmux.new_unique_session("topgrade", window_name, command)?;
|
let name = tmux.new_unique_session("topgrade", window_name, command)?;
|
||||||
ctx.set_tmux_session(name);
|
ctx.set_tmux_session(name);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,6 +9,11 @@ if exists(":AstroUpdate")
|
|||||||
quitall
|
quitall
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
if exists(":MasonUpdate")
|
||||||
|
echo "MasonUpdate"
|
||||||
|
MasonUpdate
|
||||||
|
endif
|
||||||
|
|
||||||
if exists(":NeoBundleUpdate")
|
if exists(":NeoBundleUpdate")
|
||||||
echo "NeoBundle"
|
echo "NeoBundle"
|
||||||
NeoBundleUpdate
|
NeoBundleUpdate
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ use crate::{
|
|||||||
execution_context::ExecutionContext,
|
execution_context::ExecutionContext,
|
||||||
utils::{require, PathExt},
|
utils::{require, PathExt},
|
||||||
};
|
};
|
||||||
|
use rust_i18n::t;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::{
|
use std::{
|
||||||
io::{self, Write},
|
io::{self, Write},
|
||||||
@@ -57,14 +58,14 @@ fn upgrade(command: &mut Executor, ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let status = output.status;
|
let status = output.status;
|
||||||
|
|
||||||
if !status.success() || ctx.config().verbose() {
|
if !status.success() || ctx.config().verbose() {
|
||||||
io::stdout().write(&output.stdout).ok();
|
io::stdout().write_all(&output.stdout).ok();
|
||||||
io::stderr().write(&output.stderr).ok();
|
io::stderr().write_all(&output.stderr).ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
return Err(TopgradeError::ProcessFailed(command.get_program(), status).into());
|
return Err(TopgradeError::ProcessFailed(command.get_program(), status).into());
|
||||||
} else {
|
} else {
|
||||||
println!("Plugins upgraded")
|
println!("{}", t!("Plugins upgraded"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -77,7 +78,7 @@ pub fn upgrade_ultimate_vimrc(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let python = require("python3")?;
|
let python = require("python3")?;
|
||||||
let update_plugins = config_dir.join("update_plugins.py").require()?;
|
let update_plugins = config_dir.join("update_plugins.py").require()?;
|
||||||
|
|
||||||
print_separator("The Ultimate vimrc");
|
print_separator(t!("The Ultimate vimrc"));
|
||||||
|
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(&git)
|
.execute(&git)
|
||||||
@@ -108,7 +109,7 @@ pub fn upgrade_vim(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
let output = Command::new(&vim).arg("--version").output_checked_utf8()?;
|
let output = Command::new(&vim).arg("--version").output_checked_utf8()?;
|
||||||
if !output.stdout.starts_with("VIM") {
|
if !output.stdout.starts_with("VIM") {
|
||||||
return Err(SkipStep(String::from("vim binary might be actually nvim")).into());
|
return Err(SkipStep(t!("vim binary might be actually nvim").to_string()).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let vimrc = vimrc()?;
|
let vimrc = vimrc()?;
|
||||||
|
|||||||
@@ -8,10 +8,12 @@ use walkdir::WalkDir;
|
|||||||
|
|
||||||
use crate::command::CommandExt;
|
use crate::command::CommandExt;
|
||||||
use crate::execution_context::ExecutionContext;
|
use crate::execution_context::ExecutionContext;
|
||||||
use crate::git::Repositories;
|
use crate::git::RepoStep;
|
||||||
use crate::terminal::print_separator;
|
use crate::terminal::print_separator;
|
||||||
use crate::utils::{require, PathExt};
|
use crate::utils::{require, PathExt};
|
||||||
use crate::HOME_DIR;
|
use crate::HOME_DIR;
|
||||||
|
use crate::XDG_DIRS;
|
||||||
|
use etcetera::base_strategy::BaseStrategy;
|
||||||
|
|
||||||
pub fn run_zr(ctx: &ExecutionContext) -> Result<()> {
|
pub fn run_zr(ctx: &ExecutionContext) -> Result<()> {
|
||||||
let zsh = require("zsh")?;
|
let zsh = require("zsh")?;
|
||||||
@@ -28,9 +30,7 @@ pub fn run_zr(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn zdotdir() -> PathBuf {
|
fn zdotdir() -> PathBuf {
|
||||||
env::var("ZDOTDIR")
|
env::var("ZDOTDIR").map_or_else(|_| HOME_DIR.clone(), PathBuf::from)
|
||||||
.map(PathBuf::from)
|
|
||||||
.unwrap_or_else(|_| HOME_DIR.clone())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn zshrc() -> PathBuf {
|
pub fn zshrc() -> PathBuf {
|
||||||
@@ -64,8 +64,7 @@ pub fn run_antigen(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let zsh = require("zsh")?;
|
let zsh = require("zsh")?;
|
||||||
let zshrc = zshrc().require()?;
|
let zshrc = zshrc().require()?;
|
||||||
env::var("ADOTDIR")
|
env::var("ADOTDIR")
|
||||||
.map(PathBuf::from)
|
.map_or_else(|_| HOME_DIR.join("antigen.zsh"), PathBuf::from)
|
||||||
.unwrap_or_else(|_| HOME_DIR.join("antigen.zsh"))
|
|
||||||
.require()?;
|
.require()?;
|
||||||
|
|
||||||
print_separator("antigen");
|
print_separator("antigen");
|
||||||
@@ -81,8 +80,7 @@ pub fn run_zgenom(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let zsh = require("zsh")?;
|
let zsh = require("zsh")?;
|
||||||
let zshrc = zshrc().require()?;
|
let zshrc = zshrc().require()?;
|
||||||
env::var("ZGEN_SOURCE")
|
env::var("ZGEN_SOURCE")
|
||||||
.map(PathBuf::from)
|
.map_or_else(|_| HOME_DIR.join(".zgenom"), PathBuf::from)
|
||||||
.unwrap_or_else(|_| HOME_DIR.join(".zgenom"))
|
|
||||||
.require()?;
|
.require()?;
|
||||||
|
|
||||||
print_separator("zgenom");
|
print_separator("zgenom");
|
||||||
@@ -99,8 +97,7 @@ pub fn run_zplug(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
zshrc().require()?;
|
zshrc().require()?;
|
||||||
|
|
||||||
env::var("ZPLUG_HOME")
|
env::var("ZPLUG_HOME")
|
||||||
.map(PathBuf::from)
|
.map_or_else(|_| HOME_DIR.join(".zplug"), PathBuf::from)
|
||||||
.unwrap_or_else(|_| HOME_DIR.join(".zplug"))
|
|
||||||
.require()?;
|
.require()?;
|
||||||
|
|
||||||
print_separator("zplug");
|
print_separator("zplug");
|
||||||
@@ -116,16 +113,12 @@ pub fn run_zinit(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
let zshrc = zshrc().require()?;
|
let zshrc = zshrc().require()?;
|
||||||
|
|
||||||
env::var("ZINIT_HOME")
|
env::var("ZINIT_HOME")
|
||||||
.map(PathBuf::from)
|
.map_or_else(|_| XDG_DIRS.data_dir().join("zinit"), PathBuf::from)
|
||||||
.unwrap_or_else(|_| HOME_DIR.join(".zinit"))
|
|
||||||
.require()?;
|
.require()?;
|
||||||
|
|
||||||
print_separator("zinit");
|
print_separator("zinit");
|
||||||
|
|
||||||
let cmd = format!(
|
let cmd = format!("source {} && zinit self-update && zinit update --all", zshrc.display());
|
||||||
"source {} && zinit self-update && zinit update --all -p",
|
|
||||||
zshrc.display(),
|
|
||||||
);
|
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute(zsh)
|
.execute(zsh)
|
||||||
.args(["-i", "-c", cmd.as_str()])
|
.args(["-i", "-c", cmd.as_str()])
|
||||||
@@ -140,7 +133,7 @@ pub fn run_zi(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
print_separator("zi");
|
print_separator("zi");
|
||||||
|
|
||||||
let cmd = format!("source {} && zi self-update && zi update --all -p", zshrc.display(),);
|
let cmd = format!("source {} && zi self-update && zi update --all", zshrc.display());
|
||||||
ctx.run_type().execute(zsh).args(["-i", "-c", &cmd]).status_checked()
|
ctx.run_type().execute(zsh).args(["-i", "-c", &cmd]).status_checked()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -154,8 +147,7 @@ pub fn run_zim(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
.output_checked_utf8()
|
.output_checked_utf8()
|
||||||
.map(|o| o.stdout)
|
.map(|o| o.stdout)
|
||||||
})
|
})
|
||||||
.map(PathBuf::from)
|
.map_or_else(|_| HOME_DIR.join(".zim"), PathBuf::from)
|
||||||
.unwrap_or_else(|_| HOME_DIR.join(".zim"))
|
|
||||||
.require()?;
|
.require()?;
|
||||||
|
|
||||||
print_separator("zim");
|
print_separator("zim");
|
||||||
@@ -211,8 +203,7 @@ pub fn run_oh_my_zsh(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
.unwrap_or_else(|e| {
|
.unwrap_or_else(|e| {
|
||||||
let default_path = oh_my_zsh.join("custom");
|
let default_path = oh_my_zsh.join("custom");
|
||||||
debug!(
|
debug!(
|
||||||
"Running zsh returned {}. Using default path: {}",
|
"Running zsh returned {e}. Using default path: {}",
|
||||||
e,
|
|
||||||
default_path.display()
|
default_path.display()
|
||||||
);
|
);
|
||||||
default_path
|
default_path
|
||||||
@@ -220,22 +211,17 @@ pub fn run_oh_my_zsh(ctx: &ExecutionContext) -> Result<()> {
|
|||||||
|
|
||||||
debug!("oh-my-zsh custom dir: {}", custom_dir.display());
|
debug!("oh-my-zsh custom dir: {}", custom_dir.display());
|
||||||
|
|
||||||
let mut custom_repos = Repositories::new(ctx.git());
|
let mut custom_repos = RepoStep::try_new()?;
|
||||||
|
|
||||||
for entry in WalkDir::new(custom_dir).max_depth(2) {
|
for entry in WalkDir::new(custom_dir).max_depth(2) {
|
||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
custom_repos.insert_if_repo(entry.path());
|
custom_repos.insert_if_repo(entry.path());
|
||||||
}
|
}
|
||||||
|
|
||||||
custom_repos.remove(&oh_my_zsh.to_string_lossy());
|
custom_repos.remove(&oh_my_zsh);
|
||||||
if !custom_repos.is_empty() {
|
|
||||||
println!("Pulling custom plugins and themes");
|
|
||||||
ctx.git().multi_pull(&custom_repos, ctx)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx.run_type()
|
ctx.run_type()
|
||||||
.execute("zsh")
|
.execute("zsh")
|
||||||
.arg(&oh_my_zsh.join("tools/upgrade.sh"))
|
.arg(oh_my_zsh.join("tools/upgrade.sh"))
|
||||||
// oh-my-zsh returns 80 when it is already updated and no changes pulled
|
// oh-my-zsh returns 80 when it is already updated and no changes pulled
|
||||||
// in this update.
|
// in this update.
|
||||||
// See this comment: https://github.com/r-darwish/topgrade/issues/569#issuecomment-736756731
|
// See this comment: https://github.com/r-darwish/topgrade/issues/569#issuecomment-736756731
|
||||||
|
|||||||
31
src/sudo.rs
31
src/sudo.rs
@@ -22,13 +22,28 @@ pub struct Sudo {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Sudo {
|
impl Sudo {
|
||||||
|
/// Get the `sudo` binary or the `gsudo` binary in the case of `gsudo`
|
||||||
|
/// masquerading as the `sudo` binary.
|
||||||
|
fn determine_sudo_variant(sudo_p: PathBuf) -> (PathBuf, SudoKind) {
|
||||||
|
match which("gsudo") {
|
||||||
|
Some(gsudo_p) => {
|
||||||
|
match std::fs::canonicalize(&gsudo_p).unwrap() == std::fs::canonicalize(&sudo_p).unwrap() {
|
||||||
|
true => (gsudo_p, SudoKind::Gsudo),
|
||||||
|
false => (sudo_p, SudoKind::Sudo),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => (sudo_p, SudoKind::Sudo),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Get the `sudo` binary for this platform.
|
/// Get the `sudo` binary for this platform.
|
||||||
pub fn detect() -> Option<Self> {
|
pub fn detect() -> Option<Self> {
|
||||||
which("doas")
|
which("doas")
|
||||||
.map(|p| (p, SudoKind::Doas))
|
.map(|p| (p, SudoKind::Doas))
|
||||||
.or_else(|| which("sudo").map(|p| (p, SudoKind::Sudo)))
|
.or_else(|| which("sudo").map(Self::determine_sudo_variant))
|
||||||
.or_else(|| which("gsudo").map(|p| (p, SudoKind::Gsudo)))
|
.or_else(|| which("gsudo").map(|p| (p, SudoKind::Gsudo)))
|
||||||
.or_else(|| which("pkexec").map(|p| (p, SudoKind::Pkexec)))
|
.or_else(|| which("pkexec").map(|p| (p, SudoKind::Pkexec)))
|
||||||
|
.or_else(|| which("run0").map(|p| (p, SudoKind::Run0)))
|
||||||
.or_else(|| which("please").map(|p| (p, SudoKind::Please)))
|
.or_else(|| which("please").map(|p| (p, SudoKind::Please)))
|
||||||
.map(|(path, kind)| Self { path, kind })
|
.map(|(path, kind)| Self { path, kind })
|
||||||
}
|
}
|
||||||
@@ -65,9 +80,11 @@ impl Sudo {
|
|||||||
cmd.arg("-v");
|
cmd.arg("-v");
|
||||||
}
|
}
|
||||||
SudoKind::Gsudo => {
|
SudoKind::Gsudo => {
|
||||||
// Shows current user, cache and console status.
|
// `gsudo` doesn't have anything like `sudo -v` to cache credentials,
|
||||||
|
// so we just execute a dummy `echo` command so we have something
|
||||||
|
// unobtrusive to run.
|
||||||
// See: https://gerardog.github.io/gsudo/docs/usage
|
// See: https://gerardog.github.io/gsudo/docs/usage
|
||||||
cmd.arg("status");
|
cmd.arg("echo");
|
||||||
}
|
}
|
||||||
SudoKind::Pkexec => {
|
SudoKind::Pkexec => {
|
||||||
// I don't think this does anything; `pkexec` usually asks for
|
// I don't think this does anything; `pkexec` usually asks for
|
||||||
@@ -79,6 +96,13 @@ impl Sudo {
|
|||||||
// See: https://linux.die.net/man/1/pkexec
|
// See: https://linux.die.net/man/1/pkexec
|
||||||
cmd.arg("echo");
|
cmd.arg("echo");
|
||||||
}
|
}
|
||||||
|
SudoKind::Run0 => {
|
||||||
|
// `run0` uses polkit for authentication
|
||||||
|
// and thus has the same issues as `pkexec`.
|
||||||
|
//
|
||||||
|
// See: https://www.freedesktop.org/software/systemd/man/devel/run0.html
|
||||||
|
cmd.arg("echo");
|
||||||
|
}
|
||||||
SudoKind::Please => {
|
SudoKind::Please => {
|
||||||
// From `man please`
|
// From `man please`
|
||||||
// -w, --warm
|
// -w, --warm
|
||||||
@@ -115,6 +139,7 @@ pub enum SudoKind {
|
|||||||
Sudo,
|
Sudo,
|
||||||
Gsudo,
|
Gsudo,
|
||||||
Pkexec,
|
Pkexec,
|
||||||
|
Run0,
|
||||||
Please,
|
Please,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ use color_eyre::eyre::Context;
|
|||||||
use console::{style, Key, Term};
|
use console::{style, Key, Term};
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use notify_rust::{Notification, Timeout};
|
use notify_rust::{Notification, Timeout};
|
||||||
|
use rust_i18n::t;
|
||||||
use tracing::{debug, error};
|
use tracing::{debug, error};
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use which_crate::which;
|
use which_crate::which;
|
||||||
@@ -51,9 +52,7 @@ impl Terminal {
|
|||||||
Self {
|
Self {
|
||||||
width: term.size_checked().map(|(_, w)| w),
|
width: term.size_checked().map(|(_, w)| w),
|
||||||
term,
|
term,
|
||||||
prefix: env::var("TOPGRADE_PREFIX")
|
prefix: env::var("TOPGRADE_PREFIX").map_or_else(|_| String::new(), |prefix| format!("({prefix}) ")),
|
||||||
.map(|prefix| format!("({prefix}) "))
|
|
||||||
.unwrap_or_else(|_| String::new()),
|
|
||||||
set_title: true,
|
set_title: true,
|
||||||
display_time: true,
|
display_time: true,
|
||||||
desktop_notification: false,
|
desktop_notification: false,
|
||||||
@@ -61,15 +60,15 @@ impl Terminal {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn set_desktop_notifications(&mut self, desktop_notifications: bool) {
|
fn set_desktop_notifications(&mut self, desktop_notifications: bool) {
|
||||||
self.desktop_notification = desktop_notifications
|
self.desktop_notification = desktop_notifications;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_title(&mut self, set_title: bool) {
|
fn set_title(&mut self, set_title: bool) {
|
||||||
self.set_title = set_title
|
self.set_title = set_title;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn display_time(&mut self, display_time: bool) {
|
fn display_time(&mut self, display_time: bool) {
|
||||||
self.display_time = display_time
|
self.display_time = display_time;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn notify_desktop<P: AsRef<str>>(&self, message: P, timeout: Option<Duration>) {
|
fn notify_desktop<P: AsRef<str>>(&self, message: P, timeout: Option<Duration>) {
|
||||||
@@ -144,7 +143,7 @@ impl Terminal {
|
|||||||
self.term
|
self.term
|
||||||
.write_fmt(format_args!(
|
.write_fmt(format_args!(
|
||||||
"{} {}",
|
"{} {}",
|
||||||
style(format!("{key} failed:")).red().bold(),
|
style(format!("{}", t!("{key} failed:", key = key))).red().bold(),
|
||||||
message
|
message
|
||||||
))
|
))
|
||||||
.ok();
|
.ok();
|
||||||
@@ -174,10 +173,10 @@ impl Terminal {
|
|||||||
"{}: {}\n",
|
"{}: {}\n",
|
||||||
key,
|
key,
|
||||||
match result {
|
match result {
|
||||||
StepResult::Success => format!("{}", style("OK").bold().green()),
|
StepResult::Success => format!("{}", style(t!("OK")).bold().green()),
|
||||||
StepResult::Failure => format!("{}", style("FAILED").bold().red()),
|
StepResult::Failure => format!("{}", style(t!("FAILED")).bold().red()),
|
||||||
StepResult::Ignored => format!("{}", style("IGNORED").bold().yellow()),
|
StepResult::Ignored => format!("{}", style(t!("IGNORED")).bold().yellow()),
|
||||||
StepResult::Skipped(reason) => format!("{}: {}", style("SKIPPED").bold().blue(), reason),
|
StepResult::Skipped(reason) => format!("{}: {}", style(t!("SKIPPED")).bold().blue(), reason),
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
.ok();
|
.ok();
|
||||||
@@ -188,7 +187,7 @@ impl Terminal {
|
|||||||
self.term
|
self.term
|
||||||
.write_fmt(format_args!(
|
.write_fmt(format_args!(
|
||||||
"{}",
|
"{}",
|
||||||
style(format!("{question} (y)es/(N)o",)).yellow().bold()
|
style(format!("{question} {}", t!("(Y)es/(N)o"))).yellow().bold()
|
||||||
))
|
))
|
||||||
.ok();
|
.ok();
|
||||||
|
|
||||||
@@ -207,14 +206,14 @@ impl Terminal {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if self.set_title {
|
if self.set_title {
|
||||||
self.term.set_title("Topgrade - Awaiting user");
|
self.term.set_title(format!("Topgrade - {}", t!("Awaiting user")));
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.desktop_notification {
|
if self.desktop_notification {
|
||||||
self.notify_desktop(format!("{step_name} failed"), None);
|
self.notify_desktop(format!("{}", t!("{step_name} failed", step_name = step_name)), None);
|
||||||
}
|
}
|
||||||
|
|
||||||
let prompt_inner = style(format!("{}Retry? (y)es/(N)o/(s)hell/(q)uit", self.prefix))
|
let prompt_inner = style(format!("{}{}", self.prefix, t!("Retry? (y)es/(N)o/(s)hell/(q)uit")))
|
||||||
.yellow()
|
.yellow()
|
||||||
.bold();
|
.bold();
|
||||||
|
|
||||||
@@ -222,21 +221,24 @@ impl Terminal {
|
|||||||
|
|
||||||
let answer = loop {
|
let answer = loop {
|
||||||
match self.term.read_key() {
|
match self.term.read_key() {
|
||||||
Ok(Key::Char('y')) | Ok(Key::Char('Y')) => break Ok(true),
|
Ok(Key::Char('y' | 'Y')) => break Ok(true),
|
||||||
Ok(Key::Char('s')) | Ok(Key::Char('S')) => {
|
Ok(Key::Char('s' | 'S')) => {
|
||||||
println!("\n\nDropping you to shell. Fix what you need and then exit the shell.\n");
|
println!(
|
||||||
|
"\n\n{}\n",
|
||||||
|
t!("Dropping you to shell. Fix what you need and then exit the shell.")
|
||||||
|
);
|
||||||
if let Err(err) = run_shell().context("Failed to run shell") {
|
if let Err(err) = run_shell().context("Failed to run shell") {
|
||||||
self.term.write_fmt(format_args!("{err:?}\n{prompt_inner}")).ok();
|
self.term.write_fmt(format_args!("{err:?}\n{prompt_inner}")).ok();
|
||||||
} else {
|
} else {
|
||||||
break Ok(true);
|
break Ok(true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(Key::Char('n')) | Ok(Key::Char('N')) | Ok(Key::Enter) => break Ok(false),
|
Ok(Key::Char('n' | 'N') | Key::Enter) => break Ok(false),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Error reading from terminal: {}", e);
|
error!("Error reading from terminal: {}", e);
|
||||||
break Ok(false);
|
break Ok(false);
|
||||||
}
|
}
|
||||||
Ok(Key::Char('q')) | Ok(Key::Char('Q')) => {
|
Ok(Key::Char('q' | 'Q')) => {
|
||||||
return Err(io::Error::from(io::ErrorKind::Interrupted)).context("Quit from user input")
|
return Err(io::Error::from(io::ErrorKind::Interrupted)).context("Quit from user input")
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
@@ -264,26 +266,26 @@ pub fn should_retry(interrupted: bool, step_name: &str) -> eyre::Result<bool> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_separator<P: AsRef<str>>(message: P) {
|
pub fn print_separator<P: AsRef<str>>(message: P) {
|
||||||
TERMINAL.lock().unwrap().print_separator(message)
|
TERMINAL.lock().unwrap().print_separator(message);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn print_error<P: AsRef<str>, Q: AsRef<str>>(key: Q, message: P) {
|
pub fn print_error<P: AsRef<str>, Q: AsRef<str>>(key: Q, message: P) {
|
||||||
TERMINAL.lock().unwrap().print_error(key, message)
|
TERMINAL.lock().unwrap().print_error(key, message);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn print_warning<P: AsRef<str>>(message: P) {
|
pub fn print_warning<P: AsRef<str>>(message: P) {
|
||||||
TERMINAL.lock().unwrap().print_warning(message)
|
TERMINAL.lock().unwrap().print_warning(message);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn print_info<P: AsRef<str>>(message: P) {
|
pub fn print_info<P: AsRef<str>>(message: P) {
|
||||||
TERMINAL.lock().unwrap().print_info(message)
|
TERMINAL.lock().unwrap().print_info(message);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_result<P: AsRef<str>>(key: P, result: &StepResult) {
|
pub fn print_result<P: AsRef<str>>(key: P, result: &StepResult) {
|
||||||
TERMINAL.lock().unwrap().print_result(key, result)
|
TERMINAL.lock().unwrap().print_result(key, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Tells whether the terminal is dumb.
|
/// Tells whether the terminal is dumb.
|
||||||
@@ -312,7 +314,7 @@ pub fn prompt_yesno(question: &str) -> Result<bool, io::Error> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn notify_desktop<P: AsRef<str>>(message: P, timeout: Option<Duration>) {
|
pub fn notify_desktop<P: AsRef<str>>(message: P, timeout: Option<Duration>) {
|
||||||
TERMINAL.lock().unwrap().notify_desktop(message, timeout)
|
TERMINAL.lock().unwrap().notify_desktop(message, timeout);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn display_time(display_time: bool) {
|
pub fn display_time(display_time: bool) {
|
||||||
|
|||||||
38
src/utils.rs
38
src/utils.rs
@@ -5,9 +5,9 @@ use std::path::{Path, PathBuf};
|
|||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use color_eyre::eyre::Result;
|
use color_eyre::eyre::Result;
|
||||||
|
use rust_i18n::t;
|
||||||
|
|
||||||
use tracing::{debug, error};
|
use tracing::{debug, error};
|
||||||
use tracing_subscriber::fmt::format::FmtSpan;
|
|
||||||
use tracing_subscriber::layer::SubscriberExt;
|
use tracing_subscriber::layer::SubscriberExt;
|
||||||
use tracing_subscriber::reload::{Handle, Layer};
|
use tracing_subscriber::reload::{Handle, Layer};
|
||||||
use tracing_subscriber::util::SubscriberInitExt;
|
use tracing_subscriber::util::SubscriberInitExt;
|
||||||
@@ -52,7 +52,11 @@ where
|
|||||||
debug!("Path {:?} exists", self.as_ref());
|
debug!("Path {:?} exists", self.as_ref());
|
||||||
Ok(self)
|
Ok(self)
|
||||||
} else {
|
} else {
|
||||||
Err(SkipStep(format!("Path {:?} doesn't exist", self.as_ref())).into())
|
Err(SkipStep(format!(
|
||||||
|
"{}",
|
||||||
|
t!("Path {path} doesn't exist", path = format!("{:?}", self.as_ref()))
|
||||||
|
))
|
||||||
|
.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -82,7 +86,7 @@ pub fn editor() -> Vec<String> {
|
|||||||
env::var("EDITOR")
|
env::var("EDITOR")
|
||||||
.unwrap_or_else(|_| String::from(if cfg!(windows) { "notepad" } else { "vi" }))
|
.unwrap_or_else(|_| String::from(if cfg!(windows) { "notepad" } else { "vi" }))
|
||||||
.split_whitespace()
|
.split_whitespace()
|
||||||
.map(|s| s.to_owned())
|
.map(std::borrow::ToOwned::to_owned)
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -93,9 +97,14 @@ pub fn require<T: AsRef<OsStr> + Debug>(binary_name: T) -> Result<PathBuf> {
|
|||||||
Ok(path)
|
Ok(path)
|
||||||
}
|
}
|
||||||
Err(e) => match e {
|
Err(e) => match e {
|
||||||
which_crate::Error::CannotFindBinaryPath => {
|
which_crate::Error::CannotFindBinaryPath => Err(SkipStep(format!(
|
||||||
Err(SkipStep(format!("Cannot find {:?} in PATH", &binary_name)).into())
|
"{}",
|
||||||
}
|
t!(
|
||||||
|
"Cannot find {binary_name} in PATH",
|
||||||
|
binary_name = format!("{:?}", &binary_name)
|
||||||
|
)
|
||||||
|
))
|
||||||
|
.into()),
|
||||||
_ => {
|
_ => {
|
||||||
panic!("Detecting {:?} failed: {}", &binary_name, e);
|
panic!("Detecting {:?} failed: {}", &binary_name, e);
|
||||||
}
|
}
|
||||||
@@ -124,7 +133,7 @@ pub fn hostname() -> Result<String> {
|
|||||||
match nix::unistd::gethostname() {
|
match nix::unistd::gethostname() {
|
||||||
Ok(os_str) => Ok(os_str
|
Ok(os_str) => Ok(os_str
|
||||||
.into_string()
|
.into_string()
|
||||||
.map_err(|_| SkipStep("Failed to get a UTF-8 encoded hostname".into()))?),
|
.map_err(|_| SkipStep(t!("Failed to get a UTF-8 encoded hostname").into()))?),
|
||||||
Err(e) => Err(e.into()),
|
Err(e) => Err(e.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -133,7 +142,7 @@ pub fn hostname() -> Result<String> {
|
|||||||
pub fn hostname() -> Result<String> {
|
pub fn hostname() -> Result<String> {
|
||||||
Command::new("hostname")
|
Command::new("hostname")
|
||||||
.output_checked_utf8()
|
.output_checked_utf8()
|
||||||
.map_err(|err| SkipStep(format!("Failed to get hostname: {err}")).into())
|
.map_err(|err| SkipStep(t!("Failed to get hostname: {err}", err = err).to_string()).into())
|
||||||
.map(|output| output.stdout.trim().to_owned())
|
.map(|output| output.stdout.trim().to_owned())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -192,7 +201,9 @@ pub mod merge_strategies {
|
|||||||
|
|
||||||
// Skip causes
|
// Skip causes
|
||||||
// TODO: Put them in a better place when we have more of them
|
// TODO: Put them in a better place when we have more of them
|
||||||
pub const REQUIRE_SUDO: &str = "Require sudo or counterpart but not found, skip";
|
pub fn get_require_sudo_string() -> String {
|
||||||
|
t!("Require sudo or counterpart but not found, skip").to_string()
|
||||||
|
}
|
||||||
|
|
||||||
/// Return `Err(SkipStep)` if `python` is a Python 2 or shim.
|
/// Return `Err(SkipStep)` if `python` is a Python 2 or shim.
|
||||||
///
|
///
|
||||||
@@ -219,11 +230,11 @@ pub fn check_is_python_2_or_shim(python: PathBuf) -> Result<PathBuf> {
|
|||||||
.parse::<u32>()
|
.parse::<u32>()
|
||||||
.expect("Major version should be a valid number");
|
.expect("Major version should be a valid number");
|
||||||
if major_version == 2 {
|
if major_version == 2 {
|
||||||
return Err(SkipStep(format!("{} is a Python 2, skip.", python.display())).into());
|
return Err(SkipStep(t!("{python} is a Python 2, skip.", python = python.display()).to_string()).into());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// No version number, is a shim
|
// No version number, is a shim
|
||||||
return Err(SkipStep(format!("{} is a Python shim, skip.", python.display())).into());
|
return Err(SkipStep(t!("{python} is a Python shim, skip.", python = python.display()).to_string()).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(python)
|
Ok(python)
|
||||||
@@ -239,10 +250,7 @@ pub fn install_tracing(filter_directives: &str) -> Result<Handle<EnvFilter, Regi
|
|||||||
.or_else(|_| EnvFilter::try_from_default_env())
|
.or_else(|_| EnvFilter::try_from_default_env())
|
||||||
.or_else(|_| EnvFilter::try_new(DEFAULT_LOG_LEVEL))?;
|
.or_else(|_| EnvFilter::try_new(DEFAULT_LOG_LEVEL))?;
|
||||||
|
|
||||||
let fmt_layer = fmt::layer()
|
let fmt_layer = fmt::layer().with_target(false).without_time();
|
||||||
.with_target(false)
|
|
||||||
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
|
|
||||||
.without_time();
|
|
||||||
|
|
||||||
let (filter, reload_handle) = Layer::new(env_filter);
|
let (filter, reload_handle) = Layer::new(env_filter);
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user