New upstream version 1.52.1+dfsg1

This commit is contained in:
Ximin Luo 2021-05-14 18:42:13 +01:00
parent 36d6ef2b3b
commit f20569fa03
4531 changed files with 455795 additions and 7 deletions

View File

@ -1,3 +1,24 @@
Version 1.52.1 (2021-05-10)
============================
This release disables incremental compilation, unless the user has explicitly
opted in via the newly added RUSTC_FORCE_INCREMENTAL=1 environment variable.
This is due to the widespread, and frequently occuring, breakage encountered by
Rust users due to newly enabled incremental verification in 1.52.0. Notably,
Rust users **should** upgrade to 1.52.0 or 1.52.1: the bugs that are detected by
newly added incremental verification are still present in past stable versions,
and are not yet fixed on any channel. These bugs can lead to miscompilation of
Rust binaries.
These problems only affect incremental builds, so release builds with Cargo
should not be affected unless the user has explicitly opted into incremental.
Debug and check builds are affected.
See [84970] for more details.
[84970]: https://github.com/rust-lang/rust/issues/84970
Version 1.52.0 (2021-05-06)
============================

View File

@ -590,7 +590,19 @@ fn incremental_verify_ich<CTX, K, V: Debug>(
let old_hash = tcx.dep_graph().fingerprint_of(dep_node_index);
assert!(new_hash == old_hash, "found unstable fingerprints for {:?}: {:?}", dep_node, result);
if new_hash != old_hash {
let run_cmd = if let Some(crate_name) = &tcx.sess().opts.crate_name {
format!("`cargo clean -p {}` or `cargo clean`", crate_name)
} else {
"`cargo clean`".to_string()
};
tcx.sess().struct_err(&format!("internal compiler error: encountered incremental compilation error with {:?}", dep_node))
.help(&format!("This is a known issue with the compiler. Run {} to allow your project to compile", run_cmd))
.note(&format!("Please follow the instructions below to create a bug report with the provided information"))
.note(&format!("See <https://github.com/rust-lang/rust/issues/84970> for more information."))
.emit();
panic!("Found unstable fingerprints for {:?}: {:?}", dep_node, result);
}
}
fn force_query_with_job<C, CTX>(

View File

@ -1885,7 +1885,12 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
check_thread_count(&debugging_opts, error_format);
let incremental = cg.incremental.as_ref().map(PathBuf::from);
let incremental =
if std::env::var_os("RUSTC_FORCE_INCREMENTAL").map(|v| v == "1").unwrap_or(false) {
cg.incremental.as_ref().map(PathBuf::from)
} else {
None
};
if debugging_opts.profile && incremental.is_some() {
early_error(

View File

@ -1 +1 @@
88f19c6dab716c6281af7602e30f413e809c5974
9bc8c42bb2f19e745a63f3445f1ac248fb015e53

View File

@ -161,6 +161,9 @@ to save information after compiling a crate to be reused when recompiling the
crate, improving re-compile times. This takes a path to a directory where
incremental files will be stored.
Note that this option currently does not take effect unless
`RUSTC_FORCE_INCREMENTAL=1` in the environment.
## inline-threshold
This option lets you set the default threshold for inlining a function. It

View File

@ -0,0 +1,7 @@
[alias]
uitest = "test --test compile-test"
dev = "run --target-dir clippy_dev/target --package clippy_dev --bin clippy_dev --manifest-path clippy_dev/Cargo.toml --"
lintcheck = "run --target-dir lintcheck/target --package lintcheck --bin lintcheck --manifest-path lintcheck/Cargo.toml -- "
[build]
rustflags = ["-Zunstable-options"]

View File

@ -0,0 +1,21 @@
# EditorConfig helps developers define and maintain consistent
# coding styles between different editors and IDEs
# editorconfig.org
root = true
[*]
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
indent_style = space
indent_size = 4
[*.md]
# double whitespace at end of line
# denotes a line break in Markdown
trim_trailing_whitespace = false
[*.yml]
indent_size = 2

View File

@ -0,0 +1,4 @@
---
name: Blank Issue
about: Create a blank issue.
---

View File

@ -0,0 +1,47 @@
---
name: Bug Report
about: Create a bug report for Clippy
labels: C-bug
---
<!--
Thank you for filing a bug report! 🐛 Please provide a short summary of the bug,
along with any information you feel relevant to replicating the bug.
-->
I tried this code:
```rust
<code>
```
I expected to see this happen: *explanation*
Instead, this happened: *explanation*
### Meta
- `cargo clippy -V`: e.g. clippy 0.0.212 (f455e46 2020-06-20)
- `rustc -Vv`:
```
rustc 1.46.0-nightly (f455e46ea 2020-06-20)
binary: rustc
commit-hash: f455e46eae1a227d735091091144601b467e1565
commit-date: 2020-06-20
host: x86_64-unknown-linux-gnu
release: 1.46.0-nightly
LLVM version: 10.0
```
<!--
Include a backtrace in the code block by setting `RUST_BACKTRACE=1` in your
environment. E.g. `RUST_BACKTRACE=1 cargo clippy`.
-->
<details><summary>Backtrace</summary>
<p>
```
<backtrace>
```
</p>
</details>

View File

@ -0,0 +1,5 @@
blank_issues_enabled: true
contact_links:
- name: Rust Programming Language Forum
url: https://users.rust-lang.org
about: Please ask and answer questions about Rust here.

View File

@ -0,0 +1,35 @@
---
name: Bug Report (False Negative)
about: Create a bug report about missing warnings from a lint
labels: C-bug, I-false-negative
---
<!--
Thank you for filing a bug report! 🐛 Please provide a short summary of the bug,
along with any information you feel relevant to replicating the bug.
-->
Lint name:
I tried this code:
```rust
<code>
```
I expected to see this happen: *explanation*
Instead, this happened: *explanation*
### Meta
- `cargo clippy -V`: e.g. clippy 0.0.212 (f455e46 2020-06-20)
- `rustc -Vv`:
```
rustc 1.46.0-nightly (f455e46ea 2020-06-20)
binary: rustc
commit-hash: f455e46eae1a227d735091091144601b467e1565
commit-date: 2020-06-20
host: x86_64-unknown-linux-gnu
release: 1.46.0-nightly
LLVM version: 10.0
```

View File

@ -0,0 +1,35 @@
---
name: Bug Report (False Positive)
about: Create a bug report about a wrongly emitted lint warning
labels: C-bug, I-false-positive
---
<!--
Thank you for filing a bug report! 🐛 Please provide a short summary of the bug,
along with any information you feel relevant to replicating the bug.
-->
Lint name:
I tried this code:
```rust
<code>
```
I expected to see this happen: *explanation*
Instead, this happened: *explanation*
### Meta
- `cargo clippy -V`: e.g. clippy 0.0.212 (f455e46 2020-06-20)
- `rustc -Vv`:
```
rustc 1.46.0-nightly (f455e46ea 2020-06-20)
binary: rustc
commit-hash: f455e46eae1a227d735091091144601b467e1565
commit-date: 2020-06-20
host: x86_64-unknown-linux-gnu
release: 1.46.0-nightly
LLVM version: 10.0
```

View File

@ -0,0 +1,53 @@
---
name: Internal Compiler Error
about: Create a report for an internal compiler error in Clippy.
labels: C-bug, I-ICE
---
<!--
Thank you for finding an Internal Compiler Error! 🧊 If possible, try to provide
a minimal verifiable example. You can read "Rust Bug Minimization Patterns" for
how to create smaller examples.
http://blog.pnkfx.org/blog/2019/11/18/rust-bug-minimization-patterns/
-->
### Code
```rust
<code>
```
### Meta
- `cargo clippy -V`: e.g. clippy 0.0.212 (f455e46 2020-06-20)
- `rustc -Vv`:
```
rustc 1.46.0-nightly (f455e46ea 2020-06-20)
binary: rustc
commit-hash: f455e46eae1a227d735091091144601b467e1565
commit-date: 2020-06-20
host: x86_64-unknown-linux-gnu
release: 1.46.0-nightly
LLVM version: 10.0
```
### Error output
```
<output>
```
<!--
Include a backtrace in the code block by setting `RUST_BACKTRACE=1` in your
environment. E.g. `RUST_BACKTRACE=1 cargo clippy`.
-->
<details><summary>Backtrace</summary>
<p>
```
<backtrace>
```
</p>
</details>

View File

@ -0,0 +1,35 @@
---
name: New lint suggestion
about: Suggest a new Clippy lint.
labels: A-lint
---
### What it does
*What does this lint do?*
### Categories (optional)
- Kind: *See <https://github.com/rust-lang/rust-clippy/blob/master/README.md#clippy> for list of lint kinds*
*What is the advantage of the recommended code over the original code*
For example:
- Remove bounce checking inserted by ...
- Remove the need to duplicating/storing/typo ...
### Drawbacks
None.
### Example
```rust
<code>
```
Could be written as:
```rust
<code>
```

View File

@ -0,0 +1,32 @@
Thank you for making Clippy better!
We're collecting our changelog from pull request descriptions.
If your PR only includes internal changes, you can just write
`changelog: none`. Otherwise, please write a short comment
explaining your change.
If your PR fixes an issue, you can add "fixes #issue_number" into this
PR description. This way the issue will be automatically closed when
your PR is merged.
If you added a new lint, here's a checklist for things that will be
checked during review or continuous integration.
- \[ ] Followed [lint naming conventions][lint_naming]
- \[ ] Added passing UI tests (including committed `.stderr` file)
- \[ ] `cargo test` passes locally
- \[ ] Executed `cargo dev update_lints`
- \[ ] Added lint documentation
- \[ ] Run `cargo dev fmt`
[lint_naming]: https://rust-lang.github.io/rfcs/0344-conventions-galore.html#lints
Note that you can skip the above if you are just opening a WIP PR in
order to get feedback.
Delete this line and everything above before opening your PR.
---
*Please write a short comment explaining your change (or "none" for internal only changes)*
changelog:

66
src/tools/clippy/.github/deploy.sh vendored Normal file
View File

@ -0,0 +1,66 @@
#!/bin/bash
set -ex
echo "Removing the current docs for master"
rm -rf out/master/ || exit 0
echo "Making the docs for master"
mkdir out/master/
cp util/gh-pages/index.html out/master
python3 ./util/export.py out/master/lints.json
if [[ -n $TAG_NAME ]]; then
echo "Save the doc for the current tag ($TAG_NAME) and point stable/ to it"
cp -r out/master "out/$TAG_NAME"
rm -f out/stable
ln -s "$TAG_NAME" out/stable
fi
if [[ $BETA = "true" ]]; then
echo "Update documentation for the beta release"
cp -r out/master/* out/beta
fi
# Generate version index that is shown as root index page
cp util/gh-pages/versions.html out/index.html
echo "Making the versions.json file"
python3 ./util/versions.py out
cd out
# Now let's go have some fun with the cloned repo
git config user.name "GHA CI"
git config user.email "gha@ci.invalid"
if [[ -n $TAG_NAME ]]; then
# track files, so that the following check works
git add --intent-to-add "$TAG_NAME"
if git diff --exit-code --quiet -- $TAG_NAME/; then
echo "No changes to the output on this push; exiting."
exit 0
fi
# Add the new dir
git add "$TAG_NAME"
# Update the symlink
git add stable
# Update versions file
git add versions.json
git commit -m "Add documentation for ${TAG_NAME} release: ${SHA}"
elif [[ $BETA = "true" ]]; then
if git diff --exit-code --quiet -- beta/; then
echo "No changes to the output on this push; exiting."
exit 0
fi
git add beta
git commit -m "Automatic deploy to GitHub Pages (beta): ${SHA}"
else
if git diff --exit-code --quiet; then
echo "No changes to the output on this push; exiting."
exit 0
fi
git add .
git commit -m "Automatic deploy to GitHub Pages: ${SHA}"
fi
git push "$SSH_REPO" "$TARGET_BRANCH"

39
src/tools/clippy/.github/driver.sh vendored Normal file
View File

@ -0,0 +1,39 @@
#!/bin/bash
set -ex
# Check sysroot handling
sysroot=$(./target/debug/clippy-driver --print sysroot)
test "$sysroot" = "$(rustc --print sysroot)"
if [[ ${OS} == "Windows" ]]; then
desired_sysroot=C:/tmp
else
desired_sysroot=/tmp
fi
sysroot=$(./target/debug/clippy-driver --sysroot $desired_sysroot --print sysroot)
test "$sysroot" = $desired_sysroot
sysroot=$(SYSROOT=$desired_sysroot ./target/debug/clippy-driver --print sysroot)
test "$sysroot" = $desired_sysroot
# Make sure this isn't set - clippy-driver should cope without it
unset CARGO_MANIFEST_DIR
# Run a lint and make sure it produces the expected output. It's also expected to exit with code 1
# FIXME: How to match the clippy invocation in compile-test.rs?
./target/debug/clippy-driver -Dwarnings -Aunused -Zui-testing --emit metadata --crate-type bin tests/ui/double_neg.rs 2>double_neg.stderr && exit 1
sed -e "s,tests/ui,\$DIR," -e "/= help/d" double_neg.stderr >normalized.stderr
diff -u normalized.stderr tests/ui/double_neg.stderr
# make sure "clippy-driver --rustc --arg" and "rustc --arg" behave the same
SYSROOT=$(rustc --print sysroot)
diff -u <(LD_LIBRARY_PATH=${SYSROOT}/lib ./target/debug/clippy-driver --rustc --version --verbose) <(rustc --version --verbose)
echo "fn main() {}" >target/driver_test.rs
# we can't run 2 rustcs on the same file at the same time
CLIPPY=$(LD_LIBRARY_PATH=${SYSROOT}/lib ./target/debug/clippy-driver ./target/driver_test.rs --rustc)
RUSTC=$(rustc ./target/driver_test.rs)
diff -u <($CLIPPY) <($RUSTC)
# TODO: CLIPPY_CONF_DIR / CARGO_MANIFEST_DIR

View File

@ -0,0 +1,87 @@
name: Clippy Test
on:
push:
# Ignore bors branches, since they are covered by `clippy_bors.yml`
branches-ignore:
- auto
- try
# Don't run Clippy tests, when only textfiles were modified
paths-ignore:
- 'COPYRIGHT'
- 'LICENSE-*'
- '**.md'
- '**.txt'
pull_request:
# Don't run Clippy tests, when only textfiles were modified
paths-ignore:
- 'COPYRIGHT'
- 'LICENSE-*'
- '**.md'
- '**.txt'
env:
RUST_BACKTRACE: 1
CARGO_TARGET_DIR: '${{ github.workspace }}/target'
NO_FMT_TEST: 1
jobs:
base:
# NOTE: If you modify this job, make sure you copy the changes to clippy_bors.yml
runs-on: ubuntu-latest
steps:
# Setup
- uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
with:
github_token: "${{ secrets.github_token }}"
- name: Checkout
uses: actions/checkout@v2.3.3
- name: Install toolchain
run: rustup show active-toolchain
# Run
- name: Set LD_LIBRARY_PATH (Linux)
run: |
SYSROOT=$(rustc --print sysroot)
echo "LD_LIBRARY_PATH=${SYSROOT}/lib${LD_LIBRARY_PATH+:${LD_LIBRARY_PATH}}" >> $GITHUB_ENV
- name: Build
run: cargo build --features deny-warnings,internal-lints
- name: Test
run: cargo test --features deny-warnings,internal-lints
- name: Test clippy_lints
run: cargo test --features deny-warnings,internal-lints
working-directory: clippy_lints
- name: Test rustc_tools_util
run: cargo test --features deny-warnings
working-directory: rustc_tools_util
- name: Test clippy_dev
run: cargo test --features deny-warnings
working-directory: clippy_dev
- name: Test cargo-clippy
run: ../target/debug/cargo-clippy
working-directory: clippy_workspace_tests
- name: Test cargo-clippy --fix
run: ../target/debug/cargo-clippy clippy --fix -Zunstable-options
working-directory: clippy_workspace_tests
- name: Test clippy-driver
run: bash .github/driver.sh
env:
OS: ${{ runner.os }}
- name: Test cargo dev new lint
run: |
cargo dev new_lint --name new_early_pass --pass early
cargo dev new_lint --name new_late_pass --pass late
cargo check
git reset --hard HEAD

View File

@ -0,0 +1,267 @@
name: Clippy Test (bors)
on:
push:
branches:
- auto
- try
env:
RUST_BACKTRACE: 1
CARGO_TARGET_DIR: '${{ github.workspace }}/target'
NO_FMT_TEST: 1
defaults:
run:
shell: bash
jobs:
changelog:
runs-on: ubuntu-latest
steps:
- uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
with:
github_token: "${{ secrets.github_token }}"
- name: Checkout
uses: actions/checkout@v2.3.3
with:
ref: ${{ github.ref }}
# Run
- name: Check Changelog
run: |
MESSAGE=$(git log --format=%B -n 1)
PR=$(echo "$MESSAGE" | grep -o "#[0-9]*" | head -1 | sed -e 's/^#//')
output=$(curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -s "https://api.github.com/repos/rust-lang/rust-clippy/pulls/$PR" | \
python -c "import sys, json; print(json.load(sys.stdin)['body'])" | \
grep "^changelog: " | \
sed "s/changelog: //g")
if [[ -z "$output" ]]; then
echo "ERROR: PR body must contain 'changelog: ...'"
exit 1
elif [[ "$output" = "none" ]]; then
echo "WARNING: changelog is 'none'"
fi
env:
PYTHONIOENCODING: 'utf-8'
base:
needs: changelog
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
host: [x86_64-unknown-linux-gnu, i686-unknown-linux-gnu, x86_64-apple-darwin, x86_64-pc-windows-msvc]
exclude:
- os: ubuntu-latest
host: x86_64-apple-darwin
- os: ubuntu-latest
host: x86_64-pc-windows-msvc
- os: macos-latest
host: x86_64-unknown-linux-gnu
- os: macos-latest
host: i686-unknown-linux-gnu
- os: macos-latest
host: x86_64-pc-windows-msvc
- os: windows-latest
host: x86_64-unknown-linux-gnu
- os: windows-latest
host: i686-unknown-linux-gnu
- os: windows-latest
host: x86_64-apple-darwin
runs-on: ${{ matrix.os }}
# NOTE: If you modify this job, make sure you copy the changes to clippy.yml
steps:
# Setup
- uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
with:
github_token: "${{ secrets.github_token }}"
- name: Install dependencies (Linux-i686)
run: |
sudo dpkg --add-architecture i386
sudo apt-get update
sudo apt-get install gcc-multilib libssl-dev:i386 libgit2-dev:i386
if: matrix.host == 'i686-unknown-linux-gnu'
- name: Checkout
uses: actions/checkout@v2.3.3
- name: Install toolchain
run: rustup show active-toolchain
# Run
- name: Set LD_LIBRARY_PATH (Linux)
if: runner.os == 'Linux'
run: |
SYSROOT=$(rustc --print sysroot)
echo "LD_LIBRARY_PATH=${SYSROOT}/lib${LD_LIBRARY_PATH+:${LD_LIBRARY_PATH}}" >> $GITHUB_ENV
- name: Link rustc dylib (MacOS)
if: runner.os == 'macOS'
run: |
SYSROOT=$(rustc --print sysroot)
sudo mkdir -p /usr/local/lib
sudo find "${SYSROOT}/lib" -maxdepth 1 -name '*dylib' -exec ln -s {} /usr/local/lib \;
- name: Set PATH (Windows)
if: runner.os == 'Windows'
run: |
SYSROOT=$(rustc --print sysroot)
echo "$SYSROOT/bin" >> $GITHUB_PATH
- name: Build
run: cargo build --features deny-warnings,internal-lints
- name: Test
run: cargo test --features deny-warnings,internal-lints
- name: Test clippy_lints
run: cargo test --features deny-warnings,internal-lints
working-directory: clippy_lints
- name: Test rustc_tools_util
run: cargo test --features deny-warnings
working-directory: rustc_tools_util
- name: Test clippy_dev
run: cargo test --features deny-warnings
working-directory: clippy_dev
- name: Test cargo-clippy
run: ../target/debug/cargo-clippy
working-directory: clippy_workspace_tests
- name: Test cargo-clippy --fix
run: ../target/debug/cargo-clippy clippy --fix -Zunstable-options
working-directory: clippy_workspace_tests
- name: Test clippy-driver
run: bash .github/driver.sh
env:
OS: ${{ runner.os }}
- name: Test cargo dev new lint
run: |
cargo dev new_lint --name new_early_pass --pass early
cargo dev new_lint --name new_late_pass --pass late
cargo check
git reset --hard HEAD
integration_build:
needs: changelog
runs-on: ubuntu-latest
steps:
# Setup
- uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
with:
github_token: "${{ secrets.github_token }}"
- name: Checkout
uses: actions/checkout@v2.3.3
- name: Install toolchain
run: rustup show active-toolchain
# Run
- name: Build Integration Test
run: cargo test --test integration --features integration --no-run
# Upload
- name: Extract Binaries
run: |
DIR=$CARGO_TARGET_DIR/debug
rm $DIR/deps/integration-*.d
mv $DIR/deps/integration-* $DIR/integration
find $DIR ! -executable -o -type d ! -path $DIR | xargs rm -rf
rm -rf $CARGO_TARGET_DIR/release
- name: Upload Binaries
uses: actions/upload-artifact@v1
with:
name: target
path: target
integration:
needs: integration_build
strategy:
fail-fast: false
max-parallel: 6
matrix:
integration:
- 'rust-lang/cargo'
# FIXME: re-enable once fmt_macros is renamed in RLS
# - 'rust-lang/rls'
- 'rust-lang/chalk'
- 'rust-lang/rustfmt'
- 'Marwes/combine'
- 'Geal/nom'
- 'rust-lang/stdarch'
- 'serde-rs/serde'
# FIXME: chrono currently cannot be compiled with `--all-targets`
# - 'chronotope/chrono'
- 'hyperium/hyper'
- 'rust-random/rand'
- 'rust-lang/futures-rs'
- 'rust-itertools/itertools'
- 'rust-lang-nursery/failure'
- 'rust-lang/log'
runs-on: ubuntu-latest
steps:
# Setup
- uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
with:
github_token: "${{ secrets.github_token }}"
- name: Checkout
uses: actions/checkout@v2.3.3
- name: Install toolchain
run: rustup show active-toolchain
# Download
- name: Download target dir
uses: actions/download-artifact@v1
with:
name: target
path: target
- name: Make Binaries Executable
run: chmod +x $CARGO_TARGET_DIR/debug/*
# Run
- name: Test ${{ matrix.integration }}
run: |
RUSTUP_TOOLCHAIN="$(rustup show active-toolchain | grep -o -E "nightly-[0-9]{4}-[0-9]{2}-[0-9]{2}")" \
$CARGO_TARGET_DIR/debug/integration
env:
INTEGRATION: ${{ matrix.integration }}
# These jobs doesn't actually test anything, but they're only used to tell
# bors the build completed, as there is no practical way to detect when a
# workflow is successful listening to webhooks only.
#
# ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB!
end-success:
name: bors test finished
if: github.event.pusher.name == 'bors' && success()
runs-on: ubuntu-latest
needs: [changelog, base, integration_build, integration]
steps:
- name: Mark the job as successful
run: exit 0
end-failure:
name: bors test finished
if: github.event.pusher.name == 'bors' && (failure() || cancelled())
runs-on: ubuntu-latest
needs: [changelog, base, integration_build, integration]
steps:
- name: Mark the job as a failure
run: exit 1

View File

@ -0,0 +1,78 @@
name: Clippy Dev Test
on:
push:
branches:
- auto
- try
pull_request:
# Only run on paths, that get checked by the clippy_dev tool
paths:
- 'CHANGELOG.md'
- 'README.md'
- '**.stderr'
- '**.rs'
env:
RUST_BACKTRACE: 1
jobs:
clippy_dev:
runs-on: ubuntu-latest
steps:
# Setup
- name: Checkout
uses: actions/checkout@v2.3.3
- name: remove toolchain file
run: rm rust-toolchain
- name: rust-toolchain
uses: actions-rs/toolchain@v1.0.6
with:
toolchain: nightly
target: x86_64-unknown-linux-gnu
profile: minimal
components: rustfmt
default: true
# Run
- name: Build
run: cargo build --features deny-warnings
working-directory: clippy_dev
- name: Test limit_stderr_length
run: cargo dev limit_stderr_length
- name: Test update_lints
run: cargo dev update_lints --check
- name: Test fmt
run: cargo dev fmt --check
# These jobs doesn't actually test anything, but they're only used to tell
# bors the build completed, as there is no practical way to detect when a
# workflow is successful listening to webhooks only.
#
# ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB!
end-success:
name: bors dev test finished
if: github.event.pusher.name == 'bors' && success()
runs-on: ubuntu-latest
needs: [clippy_dev]
steps:
- name: Mark the job as successful
run: exit 0
end-failure:
name: bors dev test finished
if: github.event.pusher.name == 'bors' && (failure() || cancelled())
runs-on: ubuntu-latest
needs: [clippy_dev]
steps:
- name: Mark the job as a failure
run: exit 1

View File

@ -0,0 +1,51 @@
name: Deploy
on:
push:
branches:
- master
- beta
tags:
- rust-1.**
env:
TARGET_BRANCH: 'gh-pages'
SHA: '${{ github.sha }}'
SSH_REPO: 'git@github.com:${{ github.repository }}.git'
jobs:
deploy:
runs-on: ubuntu-latest
if: github.repository == 'rust-lang/rust-clippy'
steps:
# Setup
- name: Checkout
uses: actions/checkout@v2.3.3
- name: Checkout
uses: actions/checkout@v2.3.3
with:
ref: ${{ env.TARGET_BRANCH }}
path: 'out'
# Run
- name: Set tag name
if: startswith(github.ref, 'refs/tags/')
run: |
TAG=$(basename ${{ github.ref }})
echo "TAG_NAME=$TAG" >> $GITHUB_ENV
- name: Set beta to true
if: github.ref == 'refs/heads/beta'
run: echo "BETA=true" >> $GITHUB_ENV
- name: Use scripts and templates from master branch
run: |
git fetch --no-tags --prune --depth=1 origin master
git checkout origin/master -- .github/deploy.sh util/gh-pages/ util/*.py
- name: Deploy
run: |
eval "$(ssh-agent -s)"
ssh-add - <<< "${{ secrets.DEPLOY_KEY }}"
bash .github/deploy.sh

View File

@ -0,0 +1,55 @@
name: Remark
on:
push:
branches:
- auto
- try
pull_request:
paths:
- '**.md'
jobs:
remark:
runs-on: ubuntu-latest
steps:
# Setup
- name: Checkout
uses: actions/checkout@v2.3.3
- name: Setup Node.js
uses: actions/setup-node@v1.4.4
- name: Install remark
run: npm install remark-cli remark-lint remark-lint-maximum-line-length remark-preset-lint-recommended
# Run
- name: Check *.md files
run: git ls-files -z '*.md' | xargs -0 -n 1 -I {} ./node_modules/.bin/remark {} -u lint -f > /dev/null
# These jobs doesn't actually test anything, but they're only used to tell
# bors the build completed, as there is no practical way to detect when a
# workflow is successful listening to webhooks only.
#
# ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB!
end-success:
name: bors remark test finished
if: github.event.pusher.name == 'bors' && success()
runs-on: ubuntu-latest
needs: [remark]
steps:
- name: Mark the job as successful
run: exit 0
end-failure:
name: bors remark test finished
if: github.event.pusher.name == 'bors' && (failure() || cancelled())
runs-on: ubuntu-latest
needs: [remark]
steps:
- name: Mark the job as a failure
run: exit 1

View File

@ -0,0 +1,12 @@
{
"plugins": [
"remark-preset-lint-recommended",
["remark-lint-list-item-indent", false],
["remark-lint-no-literal-urls", false],
["remark-lint-no-shortcut-reference-link", false],
["remark-lint-maximum-line-length", 120]
],
"settings": {
"commonmark": true
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,70 @@
# The Rust Code of Conduct
A version of this document [can be found online](https://www.rust-lang.org/conduct.html).
## Conduct
**Contact**: [rust-mods@rust-lang.org](mailto:rust-mods@rust-lang.org)
* We are committed to providing a friendly, safe and welcoming environment for all, regardless of level of experience,
gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
religion, nationality, or other similar characteristic.
* On IRC, please avoid using overtly sexual nicknames or other nicknames that might detract from a friendly, safe and
welcoming environment for all.
* Please be kind and courteous. There's no need to be mean or rude.
* Respect that people have differences of opinion and that every design or implementation choice carries a trade-off and
numerous costs. There is seldom a right answer.
* Please keep unstructured critique to a minimum. If you have solid ideas you want to experiment with, make a fork and
see how it works.
* We will exclude you from interaction if you insult, demean or harass anyone. That is not welcome behavior. We
interpret the term "harassment" as including the definition in the <a href="http://citizencodeofconduct.org/">Citizen
Code of Conduct</a>; if you have any lack of clarity about what might be included in that concept, please read their
definition. In particular, we don't tolerate behavior that excludes people in socially marginalized groups.
* Private harassment is also unacceptable. No matter who you are, if you feel you have been or are being harassed or
made uncomfortable by a community member, please contact one of the channel ops or any of the [Rust moderation
team][mod_team] immediately. Whether you're a regular contributor or a newcomer, we care about making this community a
safe place for you and we've got your back.
* Likewise any spamming, trolling, flaming, baiting or other attention-stealing behavior is not welcome.
## Moderation
These are the policies for upholding our community's standards of conduct. If you feel that a thread needs moderation,
please contact the [Rust moderation team][mod_team].
1. Remarks that violate the Rust standards of conduct, including hateful, hurtful, oppressive, or exclusionary remarks,
are not allowed. (Cursing is allowed, but never targeting another user, and never in a hateful manner.)
2. Remarks that moderators find inappropriate, whether listed in the code of conduct or not, are also not allowed.
3. Moderators will first respond to such remarks with a warning.
4. If the warning is unheeded, the user will be "kicked," i.e., kicked out of the communication channel to cool off.
5. If the user comes back and continues to make trouble, they will be banned, i.e., indefinitely excluded.
6. Moderators may choose at their discretion to un-ban the user if it was a first offense and they offer the offended
party a genuine apology.
7. If a moderator bans someone and you think it was unjustified, please take it up with that moderator, or with a
different moderator, **in private**. Complaints about bans in-channel are not allowed.
8. Moderators are held to a higher standard than other community members. If a moderator creates an inappropriate
situation, they should expect less leeway than others.
In the Rust community we strive to go the extra step to look out for each other. Don't just aim to be technically
unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly
if they're off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can
drive people away from the community entirely.
And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was
they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good
there was something you could've communicated better — remember that it's your responsibility to make your fellow
Rustaceans comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about
cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their
trust.
The enforcement policies listed above apply to all official Rust venues; including official IRC channels (#rust,
#rust-internals, #rust-tools, #rust-libs, #rustc, #rust-beginners, #rust-docs, #rust-community, #rust-lang, and #cargo);
GitHub repositories under rust-lang, rust-lang-nursery, and rust-lang-deprecated; and all forums under rust-lang.org
(users.rust-lang.org, internals.rust-lang.org). For other projects adopting the Rust Code of Conduct, please contact the
maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider
explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.
*Adapted from the [Node.js Policy on Trolling](http://blog.izs.me/post/30036893703/policy-on-trolling) as well as the
[Contributor Covenant v1.3.0](https://www.contributor-covenant.org/version/1/3/0/).*
[mod_team]: https://www.rust-lang.org/team.html#Moderation-team

View File

@ -0,0 +1,359 @@
# Contributing to Clippy
Hello fellow Rustacean! Great to see your interest in compiler internals and lints!
**First**: if you're unsure or afraid of _anything_, just ask or submit the issue or pull request anyway. You won't be
yelled at for giving it your best effort. The worst that can happen is that you'll be politely asked to change
something. We appreciate any sort of contributions, and don't want a wall of rules to get in the way of that.
Clippy welcomes contributions from everyone. There are many ways to contribute to Clippy and the following document
explains how you can contribute and how to get started. If you have any questions about contributing or need help with
anything, feel free to ask questions on issues or visit the `#clippy` on [Zulip].
All contributors are expected to follow the [Rust Code of Conduct].
- [Contributing to Clippy](#contributing-to-clippy)
- [Getting started](#getting-started)
- [High level approach](#high-level-approach)
- [Finding something to fix/improve](#finding-something-to-fiximprove)
- [Writing code](#writing-code)
- [Getting code-completion for rustc internals to work](#getting-code-completion-for-rustc-internals-to-work)
- [How Clippy works](#how-clippy-works)
- [Syncing changes between Clippy and `rust-lang/rust`](#syncing-changes-between-clippy-and-rust-langrust)
- [Patching git-subtree to work with big repos](#patching-git-subtree-to-work-with-big-repos)
- [Performing the sync from `rust-lang/rust` to Clippy](#performing-the-sync-from-rust-langrust-to-clippy)
- [Performing the sync from Clippy to `rust-lang/rust`](#performing-the-sync-from-clippy-to-rust-langrust)
- [Defining remotes](#defining-remotes)
- [Issue and PR triage](#issue-and-pr-triage)
- [Bors and Homu](#bors-and-homu)
- [Contributions](#contributions)
[Zulip]: https://rust-lang.zulipchat.com/#narrow/stream/clippy
[Rust Code of Conduct]: https://www.rust-lang.org/policies/code-of-conduct
## Getting started
**Note: If this is your first time contributing to Clippy, you should
first read the [Basics docs](doc/basics.md).**
### High level approach
1. Find something to fix/improve
2. Change code (likely some file in `clippy_lints/src/`)
3. Follow the instructions in the [Basics docs](doc/basics.md) to get set up
4. Run `cargo test` in the root directory and wiggle code until it passes
5. Open a PR (also can be done after 2. if you run into problems)
### Finding something to fix/improve
All issues on Clippy are mentored, if you want help simply ask @Manishearth, @flip1995, @phansch
or @llogiq directly by mentioning them in the issue or over on [Zulip]. This list may be out of date.
All currently active mentors can be found [here](https://github.com/rust-lang/highfive/blob/master/highfive/configs/rust-lang/rust-clippy.json#L3)
Some issues are easier than others. The [`good-first-issue`] label can be used to find the easy
issues. You can use `@rustbot claim` to assign the issue to yourself.
There are also some abandoned PRs, marked with [`S-inactive-closed`].
Pretty often these PRs are nearly completed and just need some extra steps
(formatting, addressing review comments, ...) to be merged. If you want to
complete such a PR, please leave a comment in the PR and open a new one based
on it.
Issues marked [`T-AST`] involve simple matching of the syntax tree structure,
and are generally easier than [`T-middle`] issues, which involve types
and resolved paths.
[`T-AST`] issues will generally need you to match against a predefined syntax structure.
To figure out how this syntax structure is encoded in the AST, it is recommended to run
`rustc -Z ast-json` on an example of the structure and compare with the [nodes in the AST docs].
Usually the lint will end up to be a nested series of matches and ifs, [like so][deep-nesting].
But we can make it nest-less by using [if_chain] macro, [like this][nest-less].
[`E-medium`] issues are generally pretty easy too, though it's recommended you work on an [`good-first-issue`]
first. Sometimes they are only somewhat involved code wise, but not difficult per-se.
Note that [`E-medium`] issues may require some knowledge of Clippy internals or some
debugging to find the actual problem behind the issue.
[`T-middle`] issues can be more involved and require verifying types. The [`ty`] module contains a
lot of methods that are useful, though one of the most useful would be `expr_ty` (gives the type of
an AST expression). `match_def_path()` in Clippy's `utils` module can also be useful.
[`good-first-issue`]: https://github.com/rust-lang/rust-clippy/labels/good-first-issue
[`S-inactive-closed`]: https://github.com/rust-lang/rust-clippy/pulls?q=is%3Aclosed+label%3AS-inactive-closed
[`T-AST`]: https://github.com/rust-lang/rust-clippy/labels/T-AST
[`T-middle`]: https://github.com/rust-lang/rust-clippy/labels/T-middle
[`E-medium`]: https://github.com/rust-lang/rust-clippy/labels/E-medium
[`ty`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty
[nodes in the AST docs]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_ast/ast/
[deep-nesting]: https://github.com/rust-lang/rust-clippy/blob/557f6848bd5b7183f55c1e1522a326e9e1df6030/clippy_lints/src/mem_forget.rs#L29-L43
[if_chain]: https://docs.rs/if_chain/*/if_chain
[nest-less]: https://github.com/rust-lang/rust-clippy/blob/557f6848bd5b7183f55c1e1522a326e9e1df6030/clippy_lints/src/bit_mask.rs#L124-L150
## Writing code
Have a look at the [docs for writing lints][adding_lints] for more details.
If you want to add a new lint or change existing ones apart from bugfixing, it's
also a good idea to give the [stability guarantees][rfc_stability] and
[lint categories][rfc_lint_cats] sections of the [Clippy 1.0 RFC][clippy_rfc] a
quick read.
[adding_lints]: https://github.com/rust-lang/rust-clippy/blob/master/doc/adding_lints.md
[clippy_rfc]: https://github.com/rust-lang/rfcs/blob/master/text/2476-clippy-uno.md
[rfc_stability]: https://github.com/rust-lang/rfcs/blob/master/text/2476-clippy-uno.md#stability-guarantees
[rfc_lint_cats]: https://github.com/rust-lang/rfcs/blob/master/text/2476-clippy-uno.md#lint-audit-and-categories
## Getting code-completion for rustc internals to work
Unfortunately, [`rust-analyzer`][ra_homepage] does not (yet?) understand how Clippy uses compiler-internals
using `extern crate` and it also needs to be able to read the source files of the rustc-compiler which are not
available via a `rustup` component at the time of writing.
To work around this, you need to have a copy of the [rustc-repo][rustc_repo] available which can be obtained via
`git clone https://github.com/rust-lang/rust/`.
Then you can run a `cargo dev` command to automatically make Clippy use the rustc-repo via path-dependencies
which rust-analyzer will be able to understand.
Run `cargo dev ra_setup --repo-path <repo-path>` where `<repo-path>` is an absolute path to the rustc repo
you just cloned.
The command will add path-dependencies pointing towards rustc-crates inside the rustc repo to
Clippys `Cargo.toml`s and should allow rust-analyzer to understand most of the types that Clippy uses.
Just make sure to remove the dependencies again before finally making a pull request!
[ra_homepage]: https://rust-analyzer.github.io/
[rustc_repo]: https://github.com/rust-lang/rust/
## How Clippy works
[`clippy_lints/src/lib.rs`][lint_crate_entry] imports all the different lint modules and registers in the [`LintStore`].
For example, the [`else_if_without_else`][else_if_without_else] lint is registered like this:
```rust
// ./clippy_lints/src/lib.rs
// ...
pub mod else_if_without_else;
// ...
pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf: &Conf) {
// ...
store.register_early_pass(|| box else_if_without_else::ElseIfWithoutElse);
// ...
store.register_group(true, "clippy::restriction", Some("clippy_restriction"), vec![
// ...
LintId::of(&else_if_without_else::ELSE_IF_WITHOUT_ELSE),
// ...
]);
}
```
The [`rustc_lint::LintStore`][`LintStore`] provides two methods to register lints:
[register_early_pass][reg_early_pass] and [register_late_pass][reg_late_pass]. Both take an object
that implements an [`EarlyLintPass`][early_lint_pass] or [`LateLintPass`][late_lint_pass] respectively. This is done in
every single lint. It's worth noting that the majority of `clippy_lints/src/lib.rs` is autogenerated by `cargo dev
update_lints`. When you are writing your own lint, you can use that script to save you some time.
```rust
// ./clippy_lints/src/else_if_without_else.rs
use rustc_lint::{EarlyLintPass, EarlyContext};
// ...
pub struct ElseIfWithoutElse;
// ...
impl EarlyLintPass for ElseIfWithoutElse {
// ... the functions needed, to make the lint work
}
```
The difference between `EarlyLintPass` and `LateLintPass` is that the methods of the `EarlyLintPass` trait only provide
AST information. The methods of the `LateLintPass` trait are executed after type checking and contain type information
via the `LateContext` parameter.
That's why the `else_if_without_else` example uses the `register_early_pass` function. Because the
[actual lint logic][else_if_without_else] does not depend on any type information.
[lint_crate_entry]: https://github.com/rust-lang/rust-clippy/blob/master/clippy_lints/src/lib.rs
[else_if_without_else]: https://github.com/rust-lang/rust-clippy/blob/4253aa7137cb7378acc96133c787e49a345c2b3c/clippy_lints/src/else_if_without_else.rs
[`LintStore`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/struct.LintStore.html
[reg_early_pass]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/struct.LintStore.html#method.register_early_pass
[reg_late_pass]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/struct.LintStore.html#method.register_late_pass
[early_lint_pass]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/trait.EarlyLintPass.html
[late_lint_pass]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/trait.LateLintPass.html
## Syncing changes between Clippy and [`rust-lang/rust`]
Clippy currently gets built with a pinned nightly version.
In the `rust-lang/rust` repository, where rustc resides, there's a copy of Clippy
that compiler hackers modify from time to time to adapt to changes in the unstable
API of the compiler.
We need to sync these changes back to this repository periodically, and the changes
made to this repository in the meantime also need to be synced to the `rust-lang/rust` repository.
To avoid flooding the `rust-lang/rust` PR queue, this two-way sync process is done
in a bi-weekly basis if there's no urgent changes. This is done starting on the day of
the Rust stable release and then every other week. That way we guarantee that we keep
this repo up to date with the latest compiler API, and every feature in Clippy is available
for 2 weeks in nightly, before it can get to beta. For reference, the first sync
following this cadence was performed the 2020-08-27.
This process is described in detail in the following sections. For general information
about `subtree`s in the Rust repository see [Rust's `CONTRIBUTING.md`][subtree].
### Patching git-subtree to work with big repos
Currently there's a bug in `git-subtree` that prevents it from working properly
with the [`rust-lang/rust`] repo. There's an open PR to fix that, but it's stale.
Before continuing with the following steps, we need to manually apply that fix to
our local copy of `git-subtree`.
You can get the patched version of `git-subtree` from [here][gitgitgadget-pr].
Put this file under `/usr/lib/git-core` (taking a backup of the previous file)
and make sure it has the proper permissions:
```bash
sudo cp --backup /path/to/patched/git-subtree.sh /usr/lib/git-core/git-subtree
sudo chmod --reference=/usr/lib/git-core/git-subtree~ /usr/lib/git-core/git-subtree
sudo chown --reference=/usr/lib/git-core/git-subtree~ /usr/lib/git-core/git-subtree
```
_Note:_ The first time running `git subtree push` a cache has to be built. This
involves going through the complete Clippy history once. For this you have to
increase the stack limit though, which you can do with `ulimit -s 60000`.
Make sure to run the `ulimit` command from the same session you call git subtree.
_Note:_ If you are a Debian user, `dash` is the shell used by default for scripts instead of `sh`.
This shell has a hardcoded recursion limit set to 1000. In order to make this process work,
you need to force the script to run `bash` instead. You can do this by editing the first
line of the `git-subtree` script and changing `sh` to `bash`.
### Performing the sync from [`rust-lang/rust`] to Clippy
Here is a TL;DR version of the sync process (all of the following commands have
to be run inside the `rust` directory):
1. Clone the [`rust-lang/rust`] repository or make sure it is up to date.
2. Checkout the commit from the latest available nightly. You can get it using `rustup check`.
3. Sync the changes to the rust-copy of Clippy to your Clippy fork:
```bash
# Make sure to change `your-github-name` to your github name in the following command
git subtree push -P src/tools/clippy git@github.com:your-github-name/rust-clippy sync-from-rust
```
_Note:_ This will directly push to the remote repository. You can also push
to your local copy by replacing the remote address with `/path/to/rust-clippy`
directory.
_Note:_ Most of the time you have to create a merge commit in the
`rust-clippy` repo (this has to be done in the Clippy repo, not in the
rust-copy of Clippy):
```bash
git fetch origin && git fetch upstream
git checkout sync-from-rust
git merge upstream/master
```
4. Open a PR to `rust-lang/rust-clippy` and wait for it to get merged (to
accelerate the process ping the `@rust-lang/clippy` team in your PR and/or
~~annoy~~ ask them in the [Zulip] stream.)
### Performing the sync from Clippy to [`rust-lang/rust`]
All of the following commands have to be run inside the `rust` directory.
1. Make sure Clippy itself is up-to-date by following the steps outlined in the previous
section if necessary.
2. Sync the `rust-lang/rust-clippy` master to the rust-copy of Clippy:
```bash
git checkout -b sync-from-clippy
git subtree pull -P src/tools/clippy https://github.com/rust-lang/rust-clippy master
```
3. Open a PR to [`rust-lang/rust`]
### Defining remotes
You may want to define remotes, so you don't have to type out the remote
addresses on every sync. You can do this with the following commands (these
commands still have to be run inside the `rust` directory):
```bash
# Set clippy-upstream remote for pulls
$ git remote add clippy-upstream https://github.com/rust-lang/rust-clippy
# Make sure to not push to the upstream repo
$ git remote set-url --push clippy-upstream DISABLED
# Set clippy-origin remote to your fork for pushes
$ git remote add clippy-origin git@github.com:your-github-name/rust-clippy
# Set a local remote
$ git remote add clippy-local /path/to/rust-clippy
```
You can then sync with the remote names from above, e.g.:
```bash
$ git subtree push -P src/tools/clippy clippy-local sync-from-rust
```
[gitgitgadget-pr]: https://github.com/gitgitgadget/git/pull/493
[subtree]: https://rustc-dev-guide.rust-lang.org/contributing.html#external-dependencies-subtree
[`rust-lang/rust`]: https://github.com/rust-lang/rust
## Issue and PR triage
Clippy is following the [Rust triage procedure][triage] for issues and pull
requests.
However, we are a smaller project with all contributors being volunteers
currently. Between writing new lints, fixing issues, reviewing pull requests and
responding to issues there may not always be enough time to stay on top of it
all.
Our highest priority is fixing [crashes][l-crash] and [bugs][l-bug], for example
an ICE in a popular crate that many other crates depend on. We don't
want Clippy to crash on your code and we want it to be as reliable as the
suggestions from Rust compiler errors.
We have prioritization labels and a sync-blocker label, which are described below.
- [P-low][p-low]: Requires attention (fix/response/evaluation) by a team member but isn't urgent.
- [P-medium][p-medium]: Should be addressed by a team member until the next sync.
- [P-high][p-high]: Should be immediately addressed and will require an out-of-cycle sync or a backport.
- [L-sync-blocker][l-sync-blocker]: An issue that "blocks" a sync.
Or rather: before the sync this should be addressed,
e.g. by removing a lint again, so it doesn't hit beta/stable.
## Bors and Homu
We use a bot powered by [Homu][homu] to help automate testing and landing of pull
requests in Clippy. The bot's username is @bors.
You can find the Clippy bors queue [here][homu_queue].
If you have @bors permissions, you can find an overview of the available
commands [here][homu_instructions].
[triage]: https://forge.rust-lang.org/release/triage-procedure.html
[l-crash]: https://github.com/rust-lang/rust-clippy/labels/L-crash
[l-bug]: https://github.com/rust-lang/rust-clippy/labels/L-bug
[p-low]: https://github.com/rust-lang/rust-clippy/labels/P-low
[p-medium]: https://github.com/rust-lang/rust-clippy/labels/P-medium
[p-high]: https://github.com/rust-lang/rust-clippy/labels/P-high
[l-sync-blocker]: https://github.com/rust-lang/rust-clippy/labels/L-sync-blocker
[homu]: https://github.com/rust-lang/homu
[homu_instructions]: https://bors.rust-lang.org/
[homu_queue]: https://bors.rust-lang.org/queue/clippy
## Contributions
Contributions to Clippy should be made in the form of GitHub pull requests. Each pull request will
be reviewed by a core contributor (someone with permission to land patches) and either landed in the
main tree or given feedback for changes that would be required.
All code in this repository is under the [Apache-2.0] or the [MIT] license.
<!-- adapted from https://github.com/servo/servo/blob/master/CONTRIBUTING.md -->
[Apache-2.0]: https://www.apache.org/licenses/LICENSE-2.0
[MIT]: https://opensource.org/licenses/MIT

View File

@ -0,0 +1,7 @@
Copyright 2014-2020 The Rust Project Developers
Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
<LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
option. All files in the project carrying such notice may not be
copied, modified, or distributed except according to those terms.

View File

@ -0,0 +1,56 @@
[package]
name = "clippy"
version = "0.1.52"
authors = ["The Rust Clippy Developers"]
description = "A bunch of helpful lints to avoid common pitfalls in Rust"
repository = "https://github.com/rust-lang/rust-clippy"
readme = "README.md"
license = "MIT OR Apache-2.0"
keywords = ["clippy", "lint", "plugin"]
categories = ["development-tools", "development-tools::cargo-plugins"]
build = "build.rs"
edition = "2018"
publish = false
[[bin]]
name = "cargo-clippy"
test = false
path = "src/main.rs"
[[bin]]
name = "clippy-driver"
path = "src/driver.rs"
[dependencies]
# begin automatic update
clippy_lints = { version = "0.1.50", path = "clippy_lints" }
# end automatic update
semver = "0.11"
rustc_tools_util = { version = "0.2.0", path = "rustc_tools_util" }
tempfile = { version = "3.1.0", optional = true }
[dev-dependencies]
cargo_metadata = "0.12"
compiletest_rs = { version = "0.6.0", features = ["tmp"] }
tester = "0.9"
clippy-mini-macro-test = { version = "0.2", path = "mini-macro" }
serde = { version = "1.0", features = ["derive"] }
derive-new = "0.5"
regex = "1.4"
# A noop dependency that changes in the Rust repository, it's a bit of a hack.
# See the `src/tools/rustc-workspace-hack/README.md` file in `rust-lang/rust`
# for more information.
rustc-workspace-hack = "1.0.0"
[build-dependencies]
rustc_tools_util = { version = "0.2.0", path = "rustc_tools_util" }
[features]
deny-warnings = []
integration = ["tempfile"]
internal-lints = ["clippy_lints/internal-lints"]
[package.metadata.rust-analyzer]
# This package uses #[feature(rustc_private)]
rustc_private = true

View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2014-2020 The Rust Project Developers
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,27 @@
MIT License
Copyright (c) 2014-2020 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

245
src/tools/clippy/README.md Normal file
View File

@ -0,0 +1,245 @@
# Clippy
[![Clippy Test](https://github.com/rust-lang/rust-clippy/workflows/Clippy%20Test/badge.svg?branch=auto&event=push)](https://github.com/rust-lang/rust-clippy/actions?query=workflow%3A%22Clippy+Test%22+event%3Apush+branch%3Aauto)
[![License: MIT OR Apache-2.0](https://img.shields.io/crates/l/clippy.svg)](#license)
A collection of lints to catch common mistakes and improve your [Rust](https://github.com/rust-lang/rust) code.
[There are over 400 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html)
Lints are divided into categories, each with a default [lint level](https://doc.rust-lang.org/rustc/lints/levels.html).
You can choose how much Clippy is supposed to ~~annoy~~ help you by changing the lint level by category.
| Category | Description | Default level |
| --------------------- | ----------------------------------------------------------------------- | ------------- |
| `clippy::all` | all lints that are on by default (correctness, style, complexity, perf) | **warn/deny** |
| `clippy::correctness` | code that is outright wrong or very useless | **deny** |
| `clippy::style` | code that should be written in a more idiomatic way | **warn** |
| `clippy::complexity` | code that does something simple but in a complex way | **warn** |
| `clippy::perf` | code that can be written to run faster | **warn** |
| `clippy::pedantic` | lints which are rather strict or might have false positives | allow |
| `clippy::nursery` | new lints that are still under development | allow |
| `clippy::cargo` | lints for the cargo manifest | allow |
More to come, please [file an issue](https://github.com/rust-lang/rust-clippy/issues) if you have ideas!
The [lint list](https://rust-lang.github.io/rust-clippy/master/index.html) also contains "restriction lints", which are
for things which are usually not considered "bad", but may be useful to turn on in specific cases. These should be used
very selectively, if at all.
Table of contents:
* [Usage instructions](#usage)
* [Configuration](#configuration)
* [Contributing](#contributing)
* [License](#license)
## Usage
Below are instructions on how to use Clippy as a subcommand, compiled from source
or in Travis CI.
### As a cargo subcommand (`cargo clippy`)
One way to use Clippy is by installing Clippy through rustup as a cargo
subcommand.
#### Step 1: Install rustup
You can install [rustup](https://rustup.rs/) on supported platforms. This will help
us install Clippy and its dependencies.
If you already have rustup installed, update to ensure you have the latest
rustup and compiler:
```terminal
rustup update
```
#### Step 2: Install Clippy
Once you have rustup and the latest stable release (at least Rust 1.29) installed, run the following command:
```terminal
rustup component add clippy
```
If it says that it can't find the `clippy` component, please run `rustup self update`.
#### Step 3: Run Clippy
Now you can run Clippy by invoking the following command:
```terminal
cargo clippy
```
#### Automatically applying Clippy suggestions
Clippy can automatically apply some lint suggestions.
Note that this is still experimental and only supported on the nightly channel:
```terminal
cargo clippy --fix -Z unstable-options
```
#### Workspaces
All the usual workspace options should work with Clippy. For example the following command
will run Clippy on the `example` crate:
```terminal
cargo clippy -p example
```
As with `cargo check`, this includes dependencies that are members of the workspace, like path dependencies.
If you want to run Clippy **only** on the given crate, use the `--no-deps` option like this:
```terminal
cargo clippy -p example -- --no-deps
```
### As a rustc replacement (`clippy-driver`)
Clippy can also be used in projects that do not use cargo. To do so, you will need to replace
your `rustc` compilation commands with `clippy-driver`. For example, if your project runs:
```terminal
rustc --edition 2018 -Cpanic=abort foo.rs
```
Then, to enable Clippy, you will need to call:
```terminal
clippy-driver --edition 2018 -Cpanic=abort foo.rs
```
Note that `rustc` will still run, i.e. it will still emit the output files it normally does.
### Travis CI
You can add Clippy to Travis CI in the same way you use it locally:
```yml
language: rust
rust:
- stable
- beta
before_script:
- rustup component add clippy
script:
- cargo clippy
# if you want the build job to fail when encountering warnings, use
- cargo clippy -- -D warnings
# in order to also check tests and non-default crate features, use
- cargo clippy --all-targets --all-features -- -D warnings
- cargo test
# etc.
```
Note that adding `-D warnings` will cause your build to fail if **any** warnings are found in your code.
That includes warnings found by rustc (e.g. `dead_code`, etc.). If you want to avoid this and only cause
an error for Clippy warnings, use `#![deny(clippy::all)]` in your code or `-D clippy::all` on the command
line. (You can swap `clippy::all` with the specific lint category you are targeting.)
## Configuration
Some lints can be configured in a TOML file named `clippy.toml` or `.clippy.toml`. It contains a basic `variable =
value` mapping eg.
```toml
blacklisted-names = ["toto", "tata", "titi"]
cognitive-complexity-threshold = 30
```
See the [list of lints](https://rust-lang.github.io/rust-clippy/master/index.html) for more information about which
lints can be configured and the meaning of the variables.
To deactivate the “for further information visit *lint-link*” message you can
define the `CLIPPY_DISABLE_DOCS_LINKS` environment variable.
### Allowing/denying lints
You can add options to your code to `allow`/`warn`/`deny` Clippy lints:
* the whole set of `Warn` lints using the `clippy` lint group (`#![deny(clippy::all)]`)
* all lints using both the `clippy` and `clippy::pedantic` lint groups (`#![deny(clippy::all)]`,
`#![deny(clippy::pedantic)]`). Note that `clippy::pedantic` contains some very aggressive
lints prone to false positives.
* only some lints (`#![deny(clippy::single_match, clippy::box_vec)]`, etc.)
* `allow`/`warn`/`deny` can be limited to a single function or module using `#[allow(...)]`, etc.
Note: `allow` means to suppress the lint for your code. With `warn` the lint
will only emit a warning, while with `deny` the lint will emit an error, when
triggering for your code. An error causes clippy to exit with an error code, so
is useful in scripts like CI/CD.
If you do not want to include your lint levels in your code, you can globally
enable/disable lints by passing extra flags to Clippy during the run:
To allow `lint_name`, run
```terminal
cargo clippy -- -A clippy::lint_name
```
And to warn on `lint_name`, run
```terminal
cargo clippy -- -W clippy::lint_name
```
This also works with lint groups. For example you
can run Clippy with warnings for all lints enabled:
```terminal
cargo clippy -- -W clippy::pedantic
```
If you care only about a single lint, you can allow all others and then explicitly warn on
the lint(s) you are interested in:
```terminal
cargo clippy -- -A clippy::all -W clippy::useless_format -W clippy::...
```
### Specifying the minimum supported Rust version
Projects that intend to support old versions of Rust can disable lints pertaining to newer features by
specifying the minimum supported Rust version (MSRV) in the clippy configuration file.
```toml
msrv = "1.30.0"
```
The MSRV can also be specified as an inner attribute, like below.
```rust
#![feature(custom_inner_attributes)]
#![clippy::msrv = "1.30.0"]
fn main() {
...
}
```
You can also omit the patch version when specifying the MSRV, so `msrv = 1.30`
is equivalent to `msrv = 1.30.0`.
Note: `custom_inner_attributes` is an unstable feature so it has to be enabled explicitly.
Lints that recognize this configuration option can be found [here](https://rust-lang.github.io/rust-clippy/master/index.html#msrv)
## Contributing
If you want to contribute to Clippy, you can find more information in [CONTRIBUTING.md](https://github.com/rust-lang/rust-clippy/blob/master/CONTRIBUTING.md).
## License
Copyright 2014-2020 The Rust Project Developers
Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
[https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0)> or the MIT license
<LICENSE-MIT or [https://opensource.org/licenses/MIT](https://opensource.org/licenses/MIT)>, at your
option. Files in the project may not be
copied, modified, or distributed except according to those terms.

19
src/tools/clippy/build.rs Normal file
View File

@ -0,0 +1,19 @@
fn main() {
// Forward the profile to the main compilation
println!("cargo:rustc-env=PROFILE={}", std::env::var("PROFILE").unwrap());
// Don't rebuild even if nothing changed
println!("cargo:rerun-if-changed=build.rs");
// forward git repo hashes we build at
println!(
"cargo:rustc-env=GIT_HASH={}",
rustc_tools_util::get_commit_hash().unwrap_or_default()
);
println!(
"cargo:rustc-env=COMMIT_DATE={}",
rustc_tools_util::get_commit_date().unwrap_or_default()
);
println!(
"cargo:rustc-env=RUSTC_RELEASE_CHANNEL={}",
rustc_tools_util::get_channel().unwrap_or_default()
);
}

View File

@ -0,0 +1,17 @@
[package]
name = "clippy_dev"
version = "0.0.1"
authors = ["The Rust Clippy Developers"]
edition = "2018"
[dependencies]
bytecount = "0.6"
clap = "2.33"
itertools = "0.9"
opener = "0.4"
regex = "1"
shell-escape = "0.1"
walkdir = "2"
[features]
deny-warnings = []

View File

@ -0,0 +1,103 @@
//! `bless` updates the reference files in the repo with changed output files
//! from the last test run.
use std::env;
use std::ffi::OsStr;
use std::fs;
use std::lazy::SyncLazy;
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
use crate::clippy_project_root;
// NOTE: this is duplicated with tests/cargo/mod.rs What to do?
pub static CARGO_TARGET_DIR: SyncLazy<PathBuf> = SyncLazy::new(|| match env::var_os("CARGO_TARGET_DIR") {
Some(v) => v.into(),
None => env::current_dir().unwrap().join("target"),
});
static CLIPPY_BUILD_TIME: SyncLazy<Option<std::time::SystemTime>> = SyncLazy::new(|| {
let profile = env::var("PROFILE").unwrap_or_else(|_| "debug".to_string());
let mut path = PathBuf::from(&**CARGO_TARGET_DIR);
path.push(profile);
path.push("cargo-clippy");
fs::metadata(path).ok()?.modified().ok()
});
/// # Panics
///
/// Panics if the path to a test file is broken
pub fn bless(ignore_timestamp: bool) {
let test_suite_dirs = [
clippy_project_root().join("tests").join("ui"),
clippy_project_root().join("tests").join("ui-internal"),
clippy_project_root().join("tests").join("ui-toml"),
clippy_project_root().join("tests").join("ui-cargo"),
];
for test_suite_dir in &test_suite_dirs {
WalkDir::new(test_suite_dir)
.into_iter()
.filter_map(Result::ok)
.filter(|f| f.path().extension() == Some(OsStr::new("rs")))
.for_each(|f| {
let test_name = f.path().strip_prefix(test_suite_dir).unwrap();
for &ext in &["stdout", "stderr", "fixed"] {
let test_name_ext = format!("stage-id.{}", ext);
update_reference_file(
f.path().with_extension(ext),
test_name.with_extension(test_name_ext),
ignore_timestamp,
);
}
});
}
}
fn update_reference_file(reference_file_path: PathBuf, test_name: PathBuf, ignore_timestamp: bool) {
let test_output_path = build_dir().join(test_name);
let relative_reference_file_path = reference_file_path.strip_prefix(clippy_project_root()).unwrap();
// If compiletest did not write any changes during the test run,
// we don't have to update anything
if !test_output_path.exists() {
return;
}
// If the test output was not updated since the last clippy build, it may be outdated
if !ignore_timestamp && !updated_since_clippy_build(&test_output_path).unwrap_or(true) {
return;
}
let test_output_file = fs::read(&test_output_path).expect("Unable to read test output file");
let reference_file = fs::read(&reference_file_path).unwrap_or_default();
if test_output_file != reference_file {
// If a test run caused an output file to change, update the reference file
println!("updating {}", &relative_reference_file_path.display());
fs::copy(test_output_path, &reference_file_path).expect("Could not update reference file");
// We need to re-read the file now because it was potentially updated from copying
let reference_file = fs::read(&reference_file_path).unwrap_or_default();
if reference_file.is_empty() {
// If we copied over an empty output file, we remove the now empty reference file
println!("removing {}", &relative_reference_file_path.display());
fs::remove_file(reference_file_path).expect("Could not remove reference file");
}
}
}
fn updated_since_clippy_build(path: &Path) -> Option<bool> {
let clippy_build_time = (*CLIPPY_BUILD_TIME)?;
let modified = fs::metadata(path).ok()?.modified().ok()?;
Some(modified >= clippy_build_time)
}
fn build_dir() -> PathBuf {
let profile = env::var("PROFILE").unwrap_or_else(|_| "debug".to_string());
let mut path = PathBuf::new();
path.push(CARGO_TARGET_DIR.clone());
path.push(profile);
path.push("test_build_base");
path
}

View File

@ -0,0 +1,202 @@
use crate::clippy_project_root;
use shell_escape::escape;
use std::ffi::OsStr;
use std::path::Path;
use std::process::{self, Command};
use std::{fs, io};
use walkdir::WalkDir;
#[derive(Debug)]
pub enum CliError {
CommandFailed(String, String),
IoError(io::Error),
RustfmtNotInstalled,
WalkDirError(walkdir::Error),
RaSetupActive,
}
impl From<io::Error> for CliError {
fn from(error: io::Error) -> Self {
Self::IoError(error)
}
}
impl From<walkdir::Error> for CliError {
fn from(error: walkdir::Error) -> Self {
Self::WalkDirError(error)
}
}
struct FmtContext {
check: bool,
verbose: bool,
}
// the "main" function of cargo dev fmt
pub fn run(check: bool, verbose: bool) {
fn try_run(context: &FmtContext) -> Result<bool, CliError> {
let mut success = true;
let project_root = clippy_project_root();
// if we added a local rustc repo as path dependency to clippy for rust analyzer, we do NOT want to
// format because rustfmt would also format the entire rustc repo as it is a local
// dependency
if fs::read_to_string(project_root.join("Cargo.toml"))
.expect("Failed to read clippy Cargo.toml")
.contains(&"[target.'cfg(NOT_A_PLATFORM)'.dependencies]")
{
return Err(CliError::RaSetupActive);
}
rustfmt_test(context)?;
success &= cargo_fmt(context, project_root.as_path())?;
success &= cargo_fmt(context, &project_root.join("clippy_dev"))?;
success &= cargo_fmt(context, &project_root.join("rustc_tools_util"))?;
success &= cargo_fmt(context, &project_root.join("lintcheck"))?;
for entry in WalkDir::new(project_root.join("tests")) {
let entry = entry?;
let path = entry.path();
if path.extension() != Some("rs".as_ref())
|| entry.file_name() == "ice-3891.rs"
// Avoid rustfmt bug rust-lang/rustfmt#1873
|| cfg!(windows) && entry.file_name() == "implicit_hasher.rs"
{
continue;
}
success &= rustfmt(context, &path)?;
}
Ok(success)
}
fn output_err(err: CliError) {
match err {
CliError::CommandFailed(command, stderr) => {
eprintln!("error: A command failed! `{}`\nstderr: {}", command, stderr);
},
CliError::IoError(err) => {
eprintln!("error: {}", err);
},
CliError::RustfmtNotInstalled => {
eprintln!("error: rustfmt nightly is not installed.");
},
CliError::WalkDirError(err) => {
eprintln!("error: {}", err);
},
CliError::RaSetupActive => {
eprintln!(
"error: a local rustc repo is enabled as path dependency via `cargo dev ra_setup`.
Not formatting because that would format the local repo as well!
Please revert the changes to Cargo.tomls first."
);
},
}
}
let context = FmtContext { check, verbose };
let result = try_run(&context);
let code = match result {
Ok(true) => 0,
Ok(false) => {
eprintln!();
eprintln!("Formatting check failed.");
eprintln!("Run `cargo dev fmt` to update formatting.");
1
},
Err(err) => {
output_err(err);
1
},
};
process::exit(code);
}
fn format_command(program: impl AsRef<OsStr>, dir: impl AsRef<Path>, args: &[impl AsRef<OsStr>]) -> String {
let arg_display: Vec<_> = args.iter().map(|a| escape(a.as_ref().to_string_lossy())).collect();
format!(
"cd {} && {} {}",
escape(dir.as_ref().to_string_lossy()),
escape(program.as_ref().to_string_lossy()),
arg_display.join(" ")
)
}
fn exec(
context: &FmtContext,
program: impl AsRef<OsStr>,
dir: impl AsRef<Path>,
args: &[impl AsRef<OsStr>],
) -> Result<bool, CliError> {
if context.verbose {
println!("{}", format_command(&program, &dir, args));
}
let child = Command::new(&program).current_dir(&dir).args(args.iter()).spawn()?;
let output = child.wait_with_output()?;
let success = output.status.success();
if !context.check && !success {
let stderr = std::str::from_utf8(&output.stderr).unwrap_or("");
return Err(CliError::CommandFailed(
format_command(&program, &dir, args),
String::from(stderr),
));
}
Ok(success)
}
fn cargo_fmt(context: &FmtContext, path: &Path) -> Result<bool, CliError> {
let mut args = vec!["+nightly", "fmt", "--all"];
if context.check {
args.push("--");
args.push("--check");
}
let success = exec(context, "cargo", path, &args)?;
Ok(success)
}
fn rustfmt_test(context: &FmtContext) -> Result<(), CliError> {
let program = "rustfmt";
let dir = std::env::current_dir()?;
let args = &["+nightly", "--version"];
if context.verbose {
println!("{}", format_command(&program, &dir, args));
}
let output = Command::new(&program).current_dir(&dir).args(args.iter()).output()?;
if output.status.success() {
Ok(())
} else if std::str::from_utf8(&output.stderr)
.unwrap_or("")
.starts_with("error: 'rustfmt' is not installed")
{
Err(CliError::RustfmtNotInstalled)
} else {
Err(CliError::CommandFailed(
format_command(&program, &dir, args),
std::str::from_utf8(&output.stderr).unwrap_or("").to_string(),
))
}
}
fn rustfmt(context: &FmtContext, path: &Path) -> Result<bool, CliError> {
let mut args = vec!["+nightly".as_ref(), path.as_os_str()];
if context.check {
args.push("--check".as_ref());
}
let success = exec(context, "rustfmt", std::env::current_dir()?, &args)?;
if !success {
eprintln!("rustfmt failed on {}", path.display());
}
Ok(success)
}

View File

@ -0,0 +1,558 @@
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
#![feature(once_cell)]
use itertools::Itertools;
use regex::Regex;
use std::collections::HashMap;
use std::ffi::OsStr;
use std::fs;
use std::lazy::SyncLazy;
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
pub mod bless;
pub mod fmt;
pub mod new_lint;
pub mod ra_setup;
pub mod serve;
pub mod stderr_length_check;
pub mod update_lints;
static DEC_CLIPPY_LINT_RE: SyncLazy<Regex> = SyncLazy::new(|| {
Regex::new(
r#"(?x)
declare_clippy_lint!\s*[\{(]
(?:\s+///.*)*
\s+pub\s+(?P<name>[A-Z_][A-Z_0-9]*)\s*,\s*
(?P<cat>[a-z_]+)\s*,\s*
"(?P<desc>(?:[^"\\]+|\\(?s).(?-s))*)"\s*[})]
"#,
)
.unwrap()
});
static DEC_DEPRECATED_LINT_RE: SyncLazy<Regex> = SyncLazy::new(|| {
Regex::new(
r#"(?x)
declare_deprecated_lint!\s*[{(]\s*
(?:\s+///.*)*
\s+pub\s+(?P<name>[A-Z_][A-Z_0-9]*)\s*,\s*
"(?P<desc>(?:[^"\\]+|\\(?s).(?-s))*)"\s*[})]
"#,
)
.unwrap()
});
static NL_ESCAPE_RE: SyncLazy<Regex> = SyncLazy::new(|| Regex::new(r#"\\\n\s*"#).unwrap());
pub static DOCS_LINK: &str = "https://rust-lang.github.io/rust-clippy/master/index.html";
/// Lint data parsed from the Clippy source code.
#[derive(Clone, PartialEq, Debug)]
pub struct Lint {
pub name: String,
pub group: String,
pub desc: String,
pub deprecation: Option<String>,
pub module: String,
}
impl Lint {
#[must_use]
pub fn new(name: &str, group: &str, desc: &str, deprecation: Option<&str>, module: &str) -> Self {
Self {
name: name.to_lowercase(),
group: group.to_string(),
desc: NL_ESCAPE_RE.replace(&desc.replace("\\\"", "\""), "").to_string(),
deprecation: deprecation.map(ToString::to_string),
module: module.to_string(),
}
}
/// Returns all non-deprecated lints and non-internal lints
#[must_use]
pub fn usable_lints(lints: &[Self]) -> Vec<Self> {
lints
.iter()
.filter(|l| l.deprecation.is_none() && !l.group.starts_with("internal"))
.cloned()
.collect()
}
/// Returns all internal lints (not `internal_warn` lints)
#[must_use]
pub fn internal_lints(lints: &[Self]) -> Vec<Self> {
lints.iter().filter(|l| l.group == "internal").cloned().collect()
}
/// Returns all deprecated lints
#[must_use]
pub fn deprecated_lints(lints: &[Self]) -> Vec<Self> {
lints.iter().filter(|l| l.deprecation.is_some()).cloned().collect()
}
/// Returns the lints in a `HashMap`, grouped by the different lint groups
#[must_use]
pub fn by_lint_group(lints: impl Iterator<Item = Self>) -> HashMap<String, Vec<Self>> {
lints.map(|lint| (lint.group.to_string(), lint)).into_group_map()
}
}
/// Generates the Vec items for `register_lint_group` calls in `clippy_lints/src/lib.rs`.
#[must_use]
pub fn gen_lint_group_list<'a>(lints: impl Iterator<Item = &'a Lint>) -> Vec<String> {
lints
.map(|l| format!(" LintId::of(&{}::{}),", l.module, l.name.to_uppercase()))
.sorted()
.collect::<Vec<String>>()
}
/// Generates the `pub mod module_name` list in `clippy_lints/src/lib.rs`.
#[must_use]
pub fn gen_modules_list<'a>(lints: impl Iterator<Item = &'a Lint>) -> Vec<String> {
lints
.map(|l| &l.module)
.unique()
.map(|module| format!("mod {};", module))
.sorted()
.collect::<Vec<String>>()
}
/// Generates the list of lint links at the bottom of the README
#[must_use]
pub fn gen_changelog_lint_list<'a>(lints: impl Iterator<Item = &'a Lint>) -> Vec<String> {
lints
.sorted_by_key(|l| &l.name)
.map(|l| format!("[`{}`]: {}#{}", l.name, DOCS_LINK, l.name))
.collect()
}
/// Generates the `register_removed` code in `./clippy_lints/src/lib.rs`.
#[must_use]
pub fn gen_deprecated<'a>(lints: impl Iterator<Item = &'a Lint>) -> Vec<String> {
lints
.flat_map(|l| {
l.deprecation
.clone()
.map(|depr_text| {
vec![
" store.register_removed(".to_string(),
format!(" \"clippy::{}\",", l.name),
format!(" \"{}\",", depr_text),
" );".to_string(),
]
})
.expect("only deprecated lints should be passed")
})
.collect::<Vec<String>>()
}
#[must_use]
pub fn gen_register_lint_list<'a>(
internal_lints: impl Iterator<Item = &'a Lint>,
usable_lints: impl Iterator<Item = &'a Lint>,
) -> Vec<String> {
let header = " store.register_lints(&[".to_string();
let footer = " ]);".to_string();
let internal_lints = internal_lints
.sorted_by_key(|l| format!(" &{}::{},", l.module, l.name.to_uppercase()))
.map(|l| {
format!(
" #[cfg(feature = \"internal-lints\")]\n &{}::{},",
l.module,
l.name.to_uppercase()
)
});
let other_lints = usable_lints
.sorted_by_key(|l| format!(" &{}::{},", l.module, l.name.to_uppercase()))
.map(|l| format!(" &{}::{},", l.module, l.name.to_uppercase()))
.sorted();
let mut lint_list = vec![header];
lint_list.extend(internal_lints);
lint_list.extend(other_lints);
lint_list.push(footer);
lint_list
}
/// Gathers all files in `src/clippy_lints` and gathers all lints inside
pub fn gather_all() -> impl Iterator<Item = Lint> {
lint_files().flat_map(|f| gather_from_file(&f))
}
fn gather_from_file(dir_entry: &walkdir::DirEntry) -> impl Iterator<Item = Lint> {
let content = fs::read_to_string(dir_entry.path()).unwrap();
let path = dir_entry.path();
let filename = path.file_stem().unwrap();
let path_buf = path.with_file_name(filename);
let mut rel_path = path_buf
.strip_prefix(clippy_project_root().join("clippy_lints/src"))
.expect("only files in `clippy_lints/src` should be looked at");
// If the lints are stored in mod.rs, we get the module name from
// the containing directory:
if filename == "mod" {
rel_path = rel_path.parent().unwrap();
}
let module = rel_path
.components()
.map(|c| c.as_os_str().to_str().unwrap())
.collect::<Vec<_>>()
.join("::");
parse_contents(&content, &module)
}
fn parse_contents(content: &str, module: &str) -> impl Iterator<Item = Lint> {
let lints = DEC_CLIPPY_LINT_RE
.captures_iter(content)
.map(|m| Lint::new(&m["name"], &m["cat"], &m["desc"], None, module));
let deprecated = DEC_DEPRECATED_LINT_RE
.captures_iter(content)
.map(|m| Lint::new(&m["name"], "Deprecated", &m["desc"], Some(&m["desc"]), module));
// Removing the `.collect::<Vec<Lint>>().into_iter()` causes some lifetime issues due to the map
lints.chain(deprecated).collect::<Vec<Lint>>().into_iter()
}
/// Collects all .rs files in the `clippy_lints/src` directory
fn lint_files() -> impl Iterator<Item = walkdir::DirEntry> {
// We use `WalkDir` instead of `fs::read_dir` here in order to recurse into subdirectories.
// Otherwise we would not collect all the lints, for example in `clippy_lints/src/methods/`.
let path = clippy_project_root().join("clippy_lints/src");
WalkDir::new(path)
.into_iter()
.filter_map(Result::ok)
.filter(|f| f.path().extension() == Some(OsStr::new("rs")))
}
/// Whether a file has had its text changed or not
#[derive(PartialEq, Debug)]
pub struct FileChange {
pub changed: bool,
pub new_lines: String,
}
/// Replaces a region in a file delimited by two lines matching regexes.
///
/// `path` is the relative path to the file on which you want to perform the replacement.
///
/// See `replace_region_in_text` for documentation of the other options.
///
/// # Panics
///
/// Panics if the path could not read or then written
pub fn replace_region_in_file<F>(
path: &Path,
start: &str,
end: &str,
replace_start: bool,
write_back: bool,
replacements: F,
) -> FileChange
where
F: FnOnce() -> Vec<String>,
{
let contents = fs::read_to_string(path).unwrap_or_else(|e| panic!("Cannot read from {}: {}", path.display(), e));
let file_change = replace_region_in_text(&contents, start, end, replace_start, replacements);
if write_back {
if let Err(e) = fs::write(path, file_change.new_lines.as_bytes()) {
panic!("Cannot write to {}: {}", path.display(), e);
}
}
file_change
}
/// Replaces a region in a text delimited by two lines matching regexes.
///
/// * `text` is the input text on which you want to perform the replacement
/// * `start` is a `&str` that describes the delimiter line before the region you want to replace.
/// As the `&str` will be converted to a `Regex`, this can contain regex syntax, too.
/// * `end` is a `&str` that describes the delimiter line until where the replacement should happen.
/// As the `&str` will be converted to a `Regex`, this can contain regex syntax, too.
/// * If `replace_start` is true, the `start` delimiter line is replaced as well. The `end`
/// delimiter line is never replaced.
/// * `replacements` is a closure that has to return a `Vec<String>` which contains the new text.
///
/// If you want to perform the replacement on files instead of already parsed text,
/// use `replace_region_in_file`.
///
/// # Example
///
/// ```
/// let the_text = "replace_start\nsome text\nthat will be replaced\nreplace_end";
/// let result =
/// clippy_dev::replace_region_in_text(the_text, "replace_start", "replace_end", false, || {
/// vec!["a different".to_string(), "text".to_string()]
/// })
/// .new_lines;
/// assert_eq!("replace_start\na different\ntext\nreplace_end", result);
/// ```
///
/// # Panics
///
/// Panics if start or end is not valid regex
pub fn replace_region_in_text<F>(text: &str, start: &str, end: &str, replace_start: bool, replacements: F) -> FileChange
where
F: FnOnce() -> Vec<String>,
{
let replace_it = replacements();
let mut in_old_region = false;
let mut found = false;
let mut new_lines = vec![];
let start = Regex::new(start).unwrap();
let end = Regex::new(end).unwrap();
for line in text.lines() {
if in_old_region {
if end.is_match(line) {
in_old_region = false;
new_lines.extend(replace_it.clone());
new_lines.push(line.to_string());
}
} else if start.is_match(line) {
if !replace_start {
new_lines.push(line.to_string());
}
in_old_region = true;
found = true;
} else {
new_lines.push(line.to_string());
}
}
if !found {
// This happens if the provided regex in `clippy_dev/src/main.rs` does not match in the
// given text or file. Most likely this is an error on the programmer's side and the Regex
// is incorrect.
eprintln!("error: regex \n{:?}\ndoesn't match. You may have to update it.", start);
std::process::exit(1);
}
let mut new_lines = new_lines.join("\n");
if text.ends_with('\n') {
new_lines.push('\n');
}
let changed = new_lines != text;
FileChange { changed, new_lines }
}
/// Returns the path to the Clippy project directory
///
/// # Panics
///
/// Panics if the current directory could not be retrieved, there was an error reading any of the
/// Cargo.toml files or ancestor directory is the clippy root directory
#[must_use]
pub fn clippy_project_root() -> PathBuf {
let current_dir = std::env::current_dir().unwrap();
for path in current_dir.ancestors() {
let result = std::fs::read_to_string(path.join("Cargo.toml"));
if let Err(err) = &result {
if err.kind() == std::io::ErrorKind::NotFound {
continue;
}
}
let content = result.unwrap();
if content.contains("[package]\nname = \"clippy\"") {
return path.to_path_buf();
}
}
panic!("error: Can't determine root of project. Please run inside a Clippy working dir.");
}
#[test]
fn test_parse_contents() {
let result: Vec<Lint> = parse_contents(
r#"
declare_clippy_lint! {
pub PTR_ARG,
style,
"really long \
text"
}
declare_clippy_lint!{
pub DOC_MARKDOWN,
pedantic,
"single line"
}
/// some doc comment
declare_deprecated_lint! {
pub SHOULD_ASSERT_EQ,
"`assert!()` will be more flexible with RFC 2011"
}
"#,
"module_name",
)
.collect();
let expected = vec![
Lint::new("ptr_arg", "style", "really long text", None, "module_name"),
Lint::new("doc_markdown", "pedantic", "single line", None, "module_name"),
Lint::new(
"should_assert_eq",
"Deprecated",
"`assert!()` will be more flexible with RFC 2011",
Some("`assert!()` will be more flexible with RFC 2011"),
"module_name",
),
];
assert_eq!(expected, result);
}
#[test]
fn test_replace_region() {
let text = "\nabc\n123\n789\ndef\nghi";
let expected = FileChange {
changed: true,
new_lines: "\nabc\nhello world\ndef\nghi".to_string(),
};
let result = replace_region_in_text(text, r#"^\s*abc$"#, r#"^\s*def"#, false, || {
vec!["hello world".to_string()]
});
assert_eq!(expected, result);
}
#[test]
fn test_replace_region_with_start() {
let text = "\nabc\n123\n789\ndef\nghi";
let expected = FileChange {
changed: true,
new_lines: "\nhello world\ndef\nghi".to_string(),
};
let result = replace_region_in_text(text, r#"^\s*abc$"#, r#"^\s*def"#, true, || {
vec!["hello world".to_string()]
});
assert_eq!(expected, result);
}
#[test]
fn test_replace_region_no_changes() {
let text = "123\n456\n789";
let expected = FileChange {
changed: false,
new_lines: "123\n456\n789".to_string(),
};
let result = replace_region_in_text(text, r#"^\s*123$"#, r#"^\s*456"#, false, Vec::new);
assert_eq!(expected, result);
}
#[test]
fn test_usable_lints() {
let lints = vec![
Lint::new("should_assert_eq", "Deprecated", "abc", Some("Reason"), "module_name"),
Lint::new("should_assert_eq2", "Not Deprecated", "abc", None, "module_name"),
Lint::new("should_assert_eq2", "internal", "abc", None, "module_name"),
Lint::new("should_assert_eq2", "internal_style", "abc", None, "module_name"),
];
let expected = vec![Lint::new(
"should_assert_eq2",
"Not Deprecated",
"abc",
None,
"module_name",
)];
assert_eq!(expected, Lint::usable_lints(&lints));
}
#[test]
fn test_by_lint_group() {
let lints = vec![
Lint::new("should_assert_eq", "group1", "abc", None, "module_name"),
Lint::new("should_assert_eq2", "group2", "abc", None, "module_name"),
Lint::new("incorrect_match", "group1", "abc", None, "module_name"),
];
let mut expected: HashMap<String, Vec<Lint>> = HashMap::new();
expected.insert(
"group1".to_string(),
vec![
Lint::new("should_assert_eq", "group1", "abc", None, "module_name"),
Lint::new("incorrect_match", "group1", "abc", None, "module_name"),
],
);
expected.insert(
"group2".to_string(),
vec![Lint::new("should_assert_eq2", "group2", "abc", None, "module_name")],
);
assert_eq!(expected, Lint::by_lint_group(lints.into_iter()));
}
#[test]
fn test_gen_changelog_lint_list() {
let lints = vec![
Lint::new("should_assert_eq", "group1", "abc", None, "module_name"),
Lint::new("should_assert_eq2", "group2", "abc", None, "module_name"),
];
let expected = vec![
format!("[`should_assert_eq`]: {}#should_assert_eq", DOCS_LINK.to_string()),
format!("[`should_assert_eq2`]: {}#should_assert_eq2", DOCS_LINK.to_string()),
];
assert_eq!(expected, gen_changelog_lint_list(lints.iter()));
}
#[test]
fn test_gen_deprecated() {
let lints = vec![
Lint::new(
"should_assert_eq",
"group1",
"abc",
Some("has been superseded by should_assert_eq2"),
"module_name",
),
Lint::new(
"another_deprecated",
"group2",
"abc",
Some("will be removed"),
"module_name",
),
];
let expected: Vec<String> = vec![
" store.register_removed(",
" \"clippy::should_assert_eq\",",
" \"has been superseded by should_assert_eq2\",",
" );",
" store.register_removed(",
" \"clippy::another_deprecated\",",
" \"will be removed\",",
" );",
]
.into_iter()
.map(String::from)
.collect();
assert_eq!(expected, gen_deprecated(lints.iter()));
}
#[test]
#[should_panic]
fn test_gen_deprecated_fail() {
let lints = vec![Lint::new("should_assert_eq2", "group2", "abc", None, "module_name")];
let _deprecated_lints = gen_deprecated(lints.iter());
}
#[test]
fn test_gen_modules_list() {
let lints = vec![
Lint::new("should_assert_eq", "group1", "abc", None, "module_name"),
Lint::new("incorrect_stuff", "group3", "abc", None, "another_module"),
];
let expected = vec!["mod another_module;".to_string(), "mod module_name;".to_string()];
assert_eq!(expected, gen_modules_list(lints.iter()));
}
#[test]
fn test_gen_lint_group_list() {
let lints = vec![
Lint::new("abc", "group1", "abc", None, "module_name"),
Lint::new("should_assert_eq", "group1", "abc", None, "module_name"),
Lint::new("internal", "internal_style", "abc", None, "module_name"),
];
let expected = vec![
" LintId::of(&module_name::ABC),".to_string(),
" LintId::of(&module_name::INTERNAL),".to_string(),
" LintId::of(&module_name::SHOULD_ASSERT_EQ),".to_string(),
];
assert_eq!(expected, gen_lint_group_list(lints.iter()));
}

View File

@ -0,0 +1,167 @@
#![cfg_attr(feature = "deny-warnings", deny(warnings))]
use clap::{App, Arg, ArgMatches, SubCommand};
use clippy_dev::{bless, fmt, new_lint, ra_setup, serve, stderr_length_check, update_lints};
fn main() {
let matches = get_clap_config();
match matches.subcommand() {
("bless", Some(matches)) => {
bless::bless(matches.is_present("ignore-timestamp"));
},
("fmt", Some(matches)) => {
fmt::run(matches.is_present("check"), matches.is_present("verbose"));
},
("update_lints", Some(matches)) => {
if matches.is_present("print-only") {
update_lints::print_lints();
} else if matches.is_present("check") {
update_lints::run(update_lints::UpdateMode::Check);
} else {
update_lints::run(update_lints::UpdateMode::Change);
}
},
("new_lint", Some(matches)) => {
match new_lint::create(
matches.value_of("pass"),
matches.value_of("name"),
matches.value_of("category"),
) {
Ok(_) => update_lints::run(update_lints::UpdateMode::Change),
Err(e) => eprintln!("Unable to create lint: {}", e),
}
},
("limit_stderr_length", _) => {
stderr_length_check::check();
},
("ra_setup", Some(matches)) => ra_setup::run(matches.value_of("rustc-repo-path")),
("serve", Some(matches)) => {
let port = matches.value_of("port").unwrap().parse().unwrap();
let lint = matches.value_of("lint");
serve::run(port, lint);
},
_ => {},
}
}
fn get_clap_config<'a>() -> ArgMatches<'a> {
App::new("Clippy developer tooling")
.subcommand(
SubCommand::with_name("bless")
.about("bless the test output changes")
.arg(
Arg::with_name("ignore-timestamp")
.long("ignore-timestamp")
.help("Include files updated before clippy was built"),
),
)
.subcommand(
SubCommand::with_name("fmt")
.about("Run rustfmt on all projects and tests")
.arg(
Arg::with_name("check")
.long("check")
.help("Use the rustfmt --check option"),
)
.arg(
Arg::with_name("verbose")
.short("v")
.long("verbose")
.help("Echo commands run"),
),
)
.subcommand(
SubCommand::with_name("update_lints")
.about("Updates lint registration and information from the source code")
.long_about(
"Makes sure that:\n \
* the lint count in README.md is correct\n \
* the changelog contains markdown link references at the bottom\n \
* all lint groups include the correct lints\n \
* lint modules in `clippy_lints/*` are visible in `src/lifb.rs` via `pub mod`\n \
* all lints are registered in the lint store",
)
.arg(Arg::with_name("print-only").long("print-only").help(
"Print a table of lints to STDOUT. \
This does not include deprecated and internal lints. \
(Does not modify any files)",
))
.arg(
Arg::with_name("check")
.long("check")
.help("Checks that `cargo dev update_lints` has been run. Used on CI."),
),
)
.subcommand(
SubCommand::with_name("new_lint")
.about("Create new lint and run `cargo dev update_lints`")
.arg(
Arg::with_name("pass")
.short("p")
.long("pass")
.help("Specify whether the lint runs during the early or late pass")
.takes_value(true)
.possible_values(&["early", "late"])
.required(true),
)
.arg(
Arg::with_name("name")
.short("n")
.long("name")
.help("Name of the new lint in snake case, ex: fn_too_long")
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name("category")
.short("c")
.long("category")
.help("What category the lint belongs to")
.default_value("nursery")
.possible_values(&[
"style",
"correctness",
"complexity",
"perf",
"pedantic",
"restriction",
"cargo",
"nursery",
"internal",
"internal_warn",
])
.takes_value(true),
),
)
.subcommand(
SubCommand::with_name("limit_stderr_length")
.about("Ensures that stderr files do not grow longer than a certain amount of lines."),
)
.subcommand(
SubCommand::with_name("ra_setup")
.about("Alter dependencies so rust-analyzer can find rustc internals")
.arg(
Arg::with_name("rustc-repo-path")
.long("repo-path")
.short("r")
.help("The path to a rustc repo that will be used for setting the dependencies")
.takes_value(true)
.value_name("path")
.required(true),
),
)
.subcommand(
SubCommand::with_name("serve")
.about("Launch a local 'ALL the Clippy Lints' website in a browser")
.arg(
Arg::with_name("port")
.long("port")
.short("p")
.help("Local port for the http server")
.default_value("8000")
.validator_os(serve::validate_port),
)
.arg(Arg::with_name("lint").help("Which lint's page to load initially (optional)")),
)
.get_matches()
}

View File

@ -0,0 +1,219 @@
use crate::clippy_project_root;
use std::fs::{self, OpenOptions};
use std::io::prelude::*;
use std::io::{self, ErrorKind};
use std::path::{Path, PathBuf};
struct LintData<'a> {
pass: &'a str,
name: &'a str,
category: &'a str,
project_root: PathBuf,
}
trait Context {
fn context<C: AsRef<str>>(self, text: C) -> Self;
}
impl<T> Context for io::Result<T> {
fn context<C: AsRef<str>>(self, text: C) -> Self {
match self {
Ok(t) => Ok(t),
Err(e) => {
let message = format!("{}: {}", text.as_ref(), e);
Err(io::Error::new(ErrorKind::Other, message))
},
}
}
}
/// Creates the files required to implement and test a new lint and runs `update_lints`.
///
/// # Errors
///
/// This function errors out if the files couldn't be created or written to.
pub fn create(pass: Option<&str>, lint_name: Option<&str>, category: Option<&str>) -> io::Result<()> {
let lint = LintData {
pass: pass.expect("`pass` argument is validated by clap"),
name: lint_name.expect("`name` argument is validated by clap"),
category: category.expect("`category` argument is validated by clap"),
project_root: clippy_project_root(),
};
create_lint(&lint).context("Unable to create lint implementation")?;
create_test(&lint).context("Unable to create a test for the new lint")
}
fn create_lint(lint: &LintData) -> io::Result<()> {
let (pass_type, pass_lifetimes, pass_import, context_import) = match lint.pass {
"early" => ("EarlyLintPass", "", "use rustc_ast::ast::*;", "EarlyContext"),
"late" => ("LateLintPass", "<'_>", "use rustc_hir::*;", "LateContext"),
_ => {
unreachable!("`pass_type` should only ever be `early` or `late`!");
},
};
let camel_case_name = to_camel_case(lint.name);
let lint_contents = get_lint_file_contents(
pass_type,
pass_lifetimes,
lint.name,
&camel_case_name,
lint.category,
pass_import,
context_import,
);
let lint_path = format!("clippy_lints/src/{}.rs", lint.name);
write_file(lint.project_root.join(&lint_path), lint_contents.as_bytes())
}
fn create_test(lint: &LintData) -> io::Result<()> {
fn create_project_layout<P: Into<PathBuf>>(lint_name: &str, location: P, case: &str, hint: &str) -> io::Result<()> {
let mut path = location.into().join(case);
fs::create_dir(&path)?;
write_file(path.join("Cargo.toml"), get_manifest_contents(lint_name, hint))?;
path.push("src");
fs::create_dir(&path)?;
let header = format!("// compile-flags: --crate-name={}", lint_name);
write_file(path.join("main.rs"), get_test_file_contents(lint_name, Some(&header)))?;
Ok(())
}
if lint.category == "cargo" {
let relative_test_dir = format!("tests/ui-cargo/{}", lint.name);
let test_dir = lint.project_root.join(relative_test_dir);
fs::create_dir(&test_dir)?;
create_project_layout(lint.name, &test_dir, "fail", "Content that triggers the lint goes here")?;
create_project_layout(lint.name, &test_dir, "pass", "This file should not trigger the lint")
} else {
let test_path = format!("tests/ui/{}.rs", lint.name);
let test_contents = get_test_file_contents(lint.name, None);
write_file(lint.project_root.join(test_path), test_contents)
}
}
fn write_file<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> io::Result<()> {
fn inner(path: &Path, contents: &[u8]) -> io::Result<()> {
OpenOptions::new()
.write(true)
.create_new(true)
.open(path)?
.write_all(contents)
}
inner(path.as_ref(), contents.as_ref()).context(format!("writing to file: {}", path.as_ref().display()))
}
fn to_camel_case(name: &str) -> String {
name.split('_')
.map(|s| {
if s.is_empty() {
String::from("")
} else {
[&s[0..1].to_uppercase(), &s[1..]].concat()
}
})
.collect()
}
fn get_test_file_contents(lint_name: &str, header_commands: Option<&str>) -> String {
let mut contents = format!(
"#![warn(clippy::{})]
fn main() {{
// test code goes here
}}
",
lint_name
);
if let Some(header) = header_commands {
contents = format!("{}\n{}", header, contents);
}
contents
}
fn get_manifest_contents(lint_name: &str, hint: &str) -> String {
format!(
r#"
# {}
[package]
name = "{}"
version = "0.1.0"
publish = false
[workspace]
"#,
hint, lint_name
)
}
fn get_lint_file_contents(
pass_type: &str,
pass_lifetimes: &str,
lint_name: &str,
camel_case_name: &str,
category: &str,
pass_import: &str,
context_import: &str,
) -> String {
format!(
"use rustc_lint::{{{type}, {context_import}}};
use rustc_session::{{declare_lint_pass, declare_tool_lint}};
{pass_import}
declare_clippy_lint! {{
/// **What it does:**
///
/// **Why is this bad?**
///
/// **Known problems:** None.
///
/// **Example:**
///
/// ```rust
/// // example code where clippy issues a warning
/// ```
/// Use instead:
/// ```rust
/// // example code which does not raise clippy warning
/// ```
pub {name_upper},
{category},
\"default lint description\"
}}
declare_lint_pass!({name_camel} => [{name_upper}]);
impl {type}{lifetimes} for {name_camel} {{}}
",
type=pass_type,
lifetimes=pass_lifetimes,
name_upper=lint_name.to_uppercase(),
name_camel=camel_case_name,
category=category,
pass_import=pass_import,
context_import=context_import
)
}
#[test]
fn test_camel_case() {
let s = "a_lint";
let s2 = to_camel_case(s);
assert_eq!(s2, "ALint");
let name = "a_really_long_new_lint";
let name2 = to_camel_case(name);
assert_eq!(name2, "AReallyLongNewLint");
let name3 = "lint__name";
let name4 = to_camel_case(name3);
assert_eq!(name4, "LintName");
}

View File

@ -0,0 +1,103 @@
use std::fs;
use std::fs::File;
use std::io::prelude::*;
use std::path::{Path, PathBuf};
// This module takes an absolute path to a rustc repo and alters the dependencies to point towards
// the respective rustc subcrates instead of using extern crate xyz.
// This allows rust analyzer to analyze rustc internals and show proper information inside clippy
// code. See https://github.com/rust-analyzer/rust-analyzer/issues/3517 and https://github.com/rust-lang/rust-clippy/issues/5514 for details
/// # Panics
///
/// Panics if `rustc_path` does not lead to a rustc repo or the files could not be read
pub fn run(rustc_path: Option<&str>) {
// we can unwrap here because the arg is required by clap
let rustc_path = PathBuf::from(rustc_path.unwrap())
.canonicalize()
.expect("failed to get the absolute repo path");
assert!(rustc_path.is_dir(), "path is not a directory");
let rustc_source_basedir = rustc_path.join("compiler");
assert!(
rustc_source_basedir.is_dir(),
"are you sure the path leads to a rustc repo?"
);
let clippy_root_manifest = fs::read_to_string("Cargo.toml").expect("failed to read ./Cargo.toml");
let clippy_root_lib_rs = fs::read_to_string("src/driver.rs").expect("failed to read ./src/driver.rs");
inject_deps_into_manifest(
&rustc_source_basedir,
"Cargo.toml",
&clippy_root_manifest,
&clippy_root_lib_rs,
)
.expect("Failed to inject deps into ./Cargo.toml");
let clippy_lints_manifest =
fs::read_to_string("clippy_lints/Cargo.toml").expect("failed to read ./clippy_lints/Cargo.toml");
let clippy_lints_lib_rs =
fs::read_to_string("clippy_lints/src/lib.rs").expect("failed to read ./clippy_lints/src/lib.rs");
inject_deps_into_manifest(
&rustc_source_basedir,
"clippy_lints/Cargo.toml",
&clippy_lints_manifest,
&clippy_lints_lib_rs,
)
.expect("Failed to inject deps into ./clippy_lints/Cargo.toml");
}
fn inject_deps_into_manifest(
rustc_source_dir: &Path,
manifest_path: &str,
cargo_toml: &str,
lib_rs: &str,
) -> std::io::Result<()> {
// do not inject deps if we have aleady done so
if cargo_toml.contains("[target.'cfg(NOT_A_PLATFORM)'.dependencies]") {
eprintln!(
"cargo dev ra_setup: warning: deps already found inside {}, doing nothing.",
manifest_path
);
return Ok(());
}
let extern_crates = lib_rs
.lines()
// get the deps
.filter(|line| line.starts_with("extern crate"))
// we have something like "extern crate foo;", we only care about the "foo"
// ↓ ↓
// extern crate rustc_middle;
.map(|s| &s[13..(s.len() - 1)]);
let new_deps = extern_crates.map(|dep| {
// format the dependencies that are going to be put inside the Cargo.toml
format!(
"{dep} = {{ path = \"{source_path}/{dep}\" }}\n",
dep = dep,
source_path = rustc_source_dir.display()
)
});
// format a new [dependencies]-block with the new deps we need to inject
let mut all_deps = String::from("[target.'cfg(NOT_A_PLATFORM)'.dependencies]\n");
new_deps.for_each(|dep_line| {
all_deps.push_str(&dep_line);
});
all_deps.push_str("\n[dependencies]\n");
// replace "[dependencies]" with
// [dependencies]
// dep1 = { path = ... }
// dep2 = { path = ... }
// etc
let new_manifest = cargo_toml.replacen("[dependencies]\n", &all_deps, 1);
// println!("{}", new_manifest);
let mut file = File::create(manifest_path)?;
file.write_all(new_manifest.as_bytes())?;
println!("Dependency paths injected: {}", manifest_path);
Ok(())
}

View File

@ -0,0 +1,67 @@
use std::ffi::{OsStr, OsString};
use std::path::Path;
use std::process::Command;
use std::thread;
use std::time::{Duration, SystemTime};
/// # Panics
///
/// Panics if the python commands could not be spawned
pub fn run(port: u16, lint: Option<&str>) -> ! {
let mut url = Some(match lint {
None => format!("http://localhost:{}", port),
Some(lint) => format!("http://localhost:{}/#{}", port, lint),
});
loop {
if mtime("util/gh-pages/lints.json") < mtime("clippy_lints/src") {
Command::new("python3")
.arg("util/export.py")
.spawn()
.unwrap()
.wait()
.unwrap();
}
if let Some(url) = url.take() {
thread::spawn(move || {
Command::new("python3")
.arg("-m")
.arg("http.server")
.arg(port.to_string())
.current_dir("util/gh-pages")
.spawn()
.unwrap();
// Give some time for python to start
thread::sleep(Duration::from_millis(500));
// Launch browser after first export.py has completed and http.server is up
let _result = opener::open(url);
});
}
thread::sleep(Duration::from_millis(1000));
}
}
fn mtime(path: impl AsRef<Path>) -> SystemTime {
let path = path.as_ref();
if path.is_dir() {
path.read_dir()
.into_iter()
.flatten()
.flatten()
.map(|entry| mtime(&entry.path()))
.max()
.unwrap_or(SystemTime::UNIX_EPOCH)
} else {
path.metadata()
.and_then(|metadata| metadata.modified())
.unwrap_or(SystemTime::UNIX_EPOCH)
}
}
#[allow(clippy::missing_errors_doc)]
pub fn validate_port(arg: &OsStr) -> Result<(), OsString> {
match arg.to_string_lossy().parse::<u16>() {
Ok(_port) => Ok(()),
Err(err) => Err(OsString::from(err.to_string())),
}
}

View File

@ -0,0 +1,51 @@
use crate::clippy_project_root;
use std::ffi::OsStr;
use std::fs;
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
// The maximum length allowed for stderr files.
//
// We limit this because small files are easier to deal with than bigger files.
const LENGTH_LIMIT: usize = 200;
pub fn check() {
let exceeding_files: Vec<_> = exceeding_stderr_files();
if !exceeding_files.is_empty() {
eprintln!("Error: stderr files exceeding limit of {} lines:", LENGTH_LIMIT);
for (path, count) in exceeding_files {
println!("{}: {}", path.display(), count);
}
std::process::exit(1);
}
}
fn exceeding_stderr_files() -> Vec<(PathBuf, usize)> {
// We use `WalkDir` instead of `fs::read_dir` here in order to recurse into subdirectories.
WalkDir::new(clippy_project_root().join("tests/ui"))
.into_iter()
.filter_map(Result::ok)
.filter(|f| !f.file_type().is_dir())
.filter_map(|e| {
let p = e.into_path();
let count = count_linenumbers(&p);
if p.extension() == Some(OsStr::new("stderr")) && count > LENGTH_LIMIT {
Some((p, count))
} else {
None
}
})
.collect()
}
#[must_use]
fn count_linenumbers(filepath: &Path) -> usize {
match fs::read(filepath) {
Ok(content) => bytecount::count(&content, b'\n'),
Err(e) => {
eprintln!("Failed to read file: {}", e);
0
},
}
}

View File

@ -0,0 +1,149 @@
use crate::{
gather_all, gen_changelog_lint_list, gen_deprecated, gen_lint_group_list, gen_modules_list, gen_register_lint_list,
replace_region_in_file, Lint, DOCS_LINK,
};
use std::path::Path;
#[derive(Clone, Copy, PartialEq)]
pub enum UpdateMode {
Check,
Change,
}
#[allow(clippy::too_many_lines)]
pub fn run(update_mode: UpdateMode) {
let lint_list: Vec<Lint> = gather_all().collect();
let internal_lints = Lint::internal_lints(&lint_list);
let deprecated_lints = Lint::deprecated_lints(&lint_list);
let usable_lints = Lint::usable_lints(&lint_list);
let mut sorted_usable_lints = usable_lints.clone();
sorted_usable_lints.sort_by_key(|lint| lint.name.clone());
let usable_lint_count = round_to_fifty(usable_lints.len());
let mut file_change = false;
file_change |= replace_region_in_file(
Path::new("README.md"),
&format!(
r#"\[There are over \d+ lints included in this crate!\]\({}\)"#,
DOCS_LINK
),
"",
true,
update_mode == UpdateMode::Change,
|| {
vec![format!(
"[There are over {} lints included in this crate!]({})",
usable_lint_count, DOCS_LINK
)]
},
)
.changed;
file_change |= replace_region_in_file(
Path::new("CHANGELOG.md"),
"<!-- begin autogenerated links to lint list -->",
"<!-- end autogenerated links to lint list -->",
false,
update_mode == UpdateMode::Change,
|| gen_changelog_lint_list(usable_lints.iter().chain(deprecated_lints.iter())),
)
.changed;
file_change |= replace_region_in_file(
Path::new("clippy_lints/src/lib.rs"),
"begin deprecated lints",
"end deprecated lints",
false,
update_mode == UpdateMode::Change,
|| gen_deprecated(deprecated_lints.iter()),
)
.changed;
file_change |= replace_region_in_file(
Path::new("clippy_lints/src/lib.rs"),
"begin register lints",
"end register lints",
false,
update_mode == UpdateMode::Change,
|| gen_register_lint_list(internal_lints.iter(), usable_lints.iter()),
)
.changed;
file_change |= replace_region_in_file(
Path::new("clippy_lints/src/lib.rs"),
"begin lints modules",
"end lints modules",
false,
update_mode == UpdateMode::Change,
|| gen_modules_list(usable_lints.iter()),
)
.changed;
// Generate lists of lints in the clippy::all lint group
file_change |= replace_region_in_file(
Path::new("clippy_lints/src/lib.rs"),
r#"store.register_group\(true, "clippy::all""#,
r#"\]\);"#,
false,
update_mode == UpdateMode::Change,
|| {
// clippy::all should only include the following lint groups:
let all_group_lints = usable_lints.iter().filter(|l| {
l.group == "correctness" || l.group == "style" || l.group == "complexity" || l.group == "perf"
});
gen_lint_group_list(all_group_lints)
},
)
.changed;
// Generate the list of lints for all other lint groups
for (lint_group, lints) in Lint::by_lint_group(usable_lints.into_iter().chain(internal_lints)) {
file_change |= replace_region_in_file(
Path::new("clippy_lints/src/lib.rs"),
&format!("store.register_group\\(true, \"clippy::{}\"", lint_group),
r#"\]\);"#,
false,
update_mode == UpdateMode::Change,
|| gen_lint_group_list(lints.iter()),
)
.changed;
}
if update_mode == UpdateMode::Check && file_change {
println!(
"Not all lints defined properly. \
Please run `cargo dev update_lints` to make sure all lints are defined properly."
);
std::process::exit(1);
}
}
pub fn print_lints() {
let lint_list: Vec<Lint> = gather_all().collect();
let usable_lints = Lint::usable_lints(&lint_list);
let usable_lint_count = usable_lints.len();
let grouped_by_lint_group = Lint::by_lint_group(usable_lints.into_iter());
for (lint_group, mut lints) in grouped_by_lint_group {
if lint_group == "Deprecated" {
continue;
}
println!("\n## {}", lint_group);
lints.sort_by_key(|l| l.name.clone());
for lint in lints {
println!("* [{}]({}#{}) ({})", lint.name, DOCS_LINK, lint.name, lint.desc);
}
}
println!("there are {} lints", usable_lint_count);
}
fn round_to_fifty(count: usize) -> usize {
count / 50 * 50
}

View File

@ -0,0 +1,17 @@
[package]
name = "clippy_dummy" # rename to clippy before publishing
version = "0.0.303"
authors = ["The Rust Clippy Developers"]
edition = "2018"
readme = "crates-readme.md"
description = "A bunch of helpful lints to avoid common pitfalls in Rust."
build = 'build.rs'
repository = "https://github.com/rust-lang/rust-clippy"
license = "MIT OR Apache-2.0"
keywords = ["clippy", "lint", "plugin"]
categories = ["development-tools", "development-tools::cargo-plugins"]
[build-dependencies]
term = "0.6"

View File

@ -0,0 +1,6 @@
This is a dummy crate to publish to crates.io. It primarily exists to ensure
that folks trying to install clippy from crates.io get redirected to the
`rustup` technique.
Before publishing, be sure to rename `clippy_dummy` to `clippy` in `Cargo.toml`,
it has a different name to avoid workspace issues.

View File

@ -0,0 +1,42 @@
use term::color::{GREEN, RED, WHITE};
use term::{Attr, Error, Result};
fn main() {
if foo().is_err() {
eprintln!(
"error: Clippy is no longer available via crates.io\n\n\
help: please run `rustup component add clippy` instead"
);
}
std::process::exit(1);
}
fn foo() -> Result<()> {
let mut t = term::stderr().ok_or(Error::NotSupported)?;
t.attr(Attr::Bold)?;
t.fg(RED)?;
write!(t, "\nerror: ")?;
t.reset()?;
t.fg(WHITE)?;
writeln!(t, "Clippy is no longer available via crates.io\n")?;
t.attr(Attr::Bold)?;
t.fg(GREEN)?;
write!(t, "help: ")?;
t.reset()?;
t.fg(WHITE)?;
write!(t, "please run `")?;
t.attr(Attr::Bold)?;
write!(t, "rustup component add clippy")?;
t.reset()?;
t.fg(WHITE)?;
writeln!(t, "` instead")?;
t.reset()?;
Ok(())
}

View File

@ -0,0 +1,9 @@
Installing clippy via crates.io is deprecated. Please use the following:
```terminal
rustup component add clippy
```
on a Rust version 1.29 or later. You may need to run `rustup self update` if it complains about a missing clippy binary.
See [the homepage](https://github.com/rust-lang/rust-clippy/#clippy) for more information

View File

@ -0,0 +1,3 @@
fn main() {
panic!("This shouldn't even compile")
}

View File

@ -0,0 +1,41 @@
[package]
name = "clippy_lints"
# begin automatic update
version = "0.1.52"
# end automatic update
authors = ["The Rust Clippy Developers"]
description = "A bunch of helpful lints to avoid common pitfalls in Rust"
repository = "https://github.com/rust-lang/rust-clippy"
readme = "README.md"
license = "MIT OR Apache-2.0"
keywords = ["clippy", "lint", "plugin"]
edition = "2018"
[dependencies]
cargo_metadata = "0.12"
clippy_utils = { path = "../clippy_utils" }
if_chain = "1.0.0"
itertools = "0.9"
pulldown-cmark = { version = "0.8", default-features = false }
quine-mc_cluskey = "0.2.2"
regex-syntax = "0.6"
serde = { version = "1.0", features = ["derive"] }
smallvec = { version = "1", features = ["union"] }
toml = "0.5.3"
unicode-normalization = "0.1"
semver = "0.11"
rustc-semver = "1.1.0"
# NOTE: cargo requires serde feat in its url dep
# see <https://github.com/rust-lang/rust/pull/63587#issuecomment-522343864>
url = { version = "2.1.0", features = ["serde"] }
quote = "1"
syn = { version = "1", features = ["full"] }
[features]
deny-warnings = []
# build clippy with internal lints enabled, off by default
internal-lints = ["clippy_utils/internal-lints"]
[package.metadata.rust-analyzer]
# This crate uses #[feature(rustc_private)]
rustc_private = true

View File

@ -0,0 +1 @@
This crate contains Clippy lints. For the main crate, check [GitHub](https://github.com/rust-lang/rust-clippy).

View File

@ -0,0 +1,117 @@
use crate::utils::span_lint;
use rustc_ast::ast::{FloatTy, LitFloatType, LitKind};
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::symbol;
use std::f64::consts as f64;
declare_clippy_lint! {
/// **What it does:** Checks for floating point literals that approximate
/// constants which are defined in
/// [`std::f32::consts`](https://doc.rust-lang.org/stable/std/f32/consts/#constants)
/// or
/// [`std::f64::consts`](https://doc.rust-lang.org/stable/std/f64/consts/#constants),
/// respectively, suggesting to use the predefined constant.
///
/// **Why is this bad?** Usually, the definition in the standard library is more
/// precise than what people come up with. If you find that your definition is
/// actually more precise, please [file a Rust
/// issue](https://github.com/rust-lang/rust/issues).
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// let x = 3.14;
/// let y = 1_f64 / x;
/// ```
/// Use predefined constants instead:
/// ```rust
/// let x = std::f32::consts::PI;
/// let y = std::f64::consts::FRAC_1_PI;
/// ```
pub APPROX_CONSTANT,
correctness,
"the approximate of a known float constant (in `std::fXX::consts`)"
}
// Tuples are of the form (constant, name, min_digits)
const KNOWN_CONSTS: [(f64, &str, usize); 18] = [
(f64::E, "E", 4),
(f64::FRAC_1_PI, "FRAC_1_PI", 4),
(f64::FRAC_1_SQRT_2, "FRAC_1_SQRT_2", 5),
(f64::FRAC_2_PI, "FRAC_2_PI", 5),
(f64::FRAC_2_SQRT_PI, "FRAC_2_SQRT_PI", 5),
(f64::FRAC_PI_2, "FRAC_PI_2", 5),
(f64::FRAC_PI_3, "FRAC_PI_3", 5),
(f64::FRAC_PI_4, "FRAC_PI_4", 5),
(f64::FRAC_PI_6, "FRAC_PI_6", 5),
(f64::FRAC_PI_8, "FRAC_PI_8", 5),
(f64::LN_10, "LN_10", 5),
(f64::LN_2, "LN_2", 5),
(f64::LOG10_E, "LOG10_E", 5),
(f64::LOG2_E, "LOG2_E", 5),
(f64::LOG2_10, "LOG2_10", 5),
(f64::LOG10_2, "LOG10_2", 5),
(f64::PI, "PI", 3),
(f64::SQRT_2, "SQRT_2", 5),
];
declare_lint_pass!(ApproxConstant => [APPROX_CONSTANT]);
impl<'tcx> LateLintPass<'tcx> for ApproxConstant {
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
if let ExprKind::Lit(lit) = &e.kind {
check_lit(cx, &lit.node, e);
}
}
}
fn check_lit(cx: &LateContext<'_>, lit: &LitKind, e: &Expr<'_>) {
match *lit {
LitKind::Float(s, LitFloatType::Suffixed(fty)) => match fty {
FloatTy::F32 => check_known_consts(cx, e, s, "f32"),
FloatTy::F64 => check_known_consts(cx, e, s, "f64"),
},
LitKind::Float(s, LitFloatType::Unsuffixed) => check_known_consts(cx, e, s, "f{32, 64}"),
_ => (),
}
}
fn check_known_consts(cx: &LateContext<'_>, e: &Expr<'_>, s: symbol::Symbol, module: &str) {
let s = s.as_str();
if s.parse::<f64>().is_ok() {
for &(constant, name, min_digits) in &KNOWN_CONSTS {
if is_approx_const(constant, &s, min_digits) {
span_lint(
cx,
APPROX_CONSTANT,
e.span,
&format!(
"approximate value of `{}::consts::{}` found. \
Consider using it directly",
module, &name
),
);
return;
}
}
}
}
/// Returns `false` if the number of significant figures in `value` are
/// less than `min_digits`; otherwise, returns true if `value` is equal
/// to `constant`, rounded to the number of digits present in `value`.
#[must_use]
fn is_approx_const(constant: f64, value: &str, min_digits: usize) -> bool {
if value.len() <= min_digits {
false
} else if constant.to_string().starts_with(value) {
// The value is a truncated constant
true
} else {
let round_const = format!("{:.*}", value.len() - 2, constant);
value == round_const
}
}

View File

@ -0,0 +1,168 @@
use crate::consts::constant_simple;
use crate::utils::span_lint;
use rustc_hir as hir;
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::Span;
declare_clippy_lint! {
/// **What it does:** Checks for integer arithmetic operations which could overflow or panic.
///
/// Specifically, checks for any operators (`+`, `-`, `*`, `<<`, etc) which are capable
/// of overflowing according to the [Rust
/// Reference](https://doc.rust-lang.org/reference/expressions/operator-expr.html#overflow),
/// or which can panic (`/`, `%`). No bounds analysis or sophisticated reasoning is
/// attempted.
///
/// **Why is this bad?** Integer overflow will trigger a panic in debug builds or will wrap in
/// release mode. Division by zero will cause a panic in either mode. In some applications one
/// wants explicitly checked, wrapping or saturating arithmetic.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// # let a = 0;
/// a + 1;
/// ```
pub INTEGER_ARITHMETIC,
restriction,
"any integer arithmetic expression which could overflow or panic"
}
declare_clippy_lint! {
/// **What it does:** Checks for float arithmetic.
///
/// **Why is this bad?** For some embedded systems or kernel development, it
/// can be useful to rule out floating-point numbers.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// # let a = 0.0;
/// a + 1.0;
/// ```
pub FLOAT_ARITHMETIC,
restriction,
"any floating-point arithmetic statement"
}
#[derive(Copy, Clone, Default)]
pub struct Arithmetic {
expr_span: Option<Span>,
/// This field is used to check whether expressions are constants, such as in enum discriminants
/// and consts
const_span: Option<Span>,
}
impl_lint_pass!(Arithmetic => [INTEGER_ARITHMETIC, FLOAT_ARITHMETIC]);
impl<'tcx> LateLintPass<'tcx> for Arithmetic {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
if self.expr_span.is_some() {
return;
}
if let Some(span) = self.const_span {
if span.contains(expr.span) {
return;
}
}
match &expr.kind {
hir::ExprKind::Binary(op, l, r) | hir::ExprKind::AssignOp(op, l, r) => {
match op.node {
hir::BinOpKind::And
| hir::BinOpKind::Or
| hir::BinOpKind::BitAnd
| hir::BinOpKind::BitOr
| hir::BinOpKind::BitXor
| hir::BinOpKind::Eq
| hir::BinOpKind::Lt
| hir::BinOpKind::Le
| hir::BinOpKind::Ne
| hir::BinOpKind::Ge
| hir::BinOpKind::Gt => return,
_ => (),
}
let (l_ty, r_ty) = (cx.typeck_results().expr_ty(l), cx.typeck_results().expr_ty(r));
if l_ty.peel_refs().is_integral() && r_ty.peel_refs().is_integral() {
match op.node {
hir::BinOpKind::Div | hir::BinOpKind::Rem => match &r.kind {
hir::ExprKind::Lit(_lit) => (),
hir::ExprKind::Unary(hir::UnOp::Neg, expr) => {
if let hir::ExprKind::Lit(lit) = &expr.kind {
if let rustc_ast::ast::LitKind::Int(1, _) = lit.node {
span_lint(cx, INTEGER_ARITHMETIC, expr.span, "integer arithmetic detected");
self.expr_span = Some(expr.span);
}
}
},
_ => {
span_lint(cx, INTEGER_ARITHMETIC, expr.span, "integer arithmetic detected");
self.expr_span = Some(expr.span);
},
},
_ => {
span_lint(cx, INTEGER_ARITHMETIC, expr.span, "integer arithmetic detected");
self.expr_span = Some(expr.span);
},
}
} else if r_ty.peel_refs().is_floating_point() && r_ty.peel_refs().is_floating_point() {
span_lint(cx, FLOAT_ARITHMETIC, expr.span, "floating-point arithmetic detected");
self.expr_span = Some(expr.span);
}
},
hir::ExprKind::Unary(hir::UnOp::Neg, arg) => {
let ty = cx.typeck_results().expr_ty(arg);
if constant_simple(cx, cx.typeck_results(), expr).is_none() {
if ty.is_integral() {
span_lint(cx, INTEGER_ARITHMETIC, expr.span, "integer arithmetic detected");
self.expr_span = Some(expr.span);
} else if ty.is_floating_point() {
span_lint(cx, FLOAT_ARITHMETIC, expr.span, "floating-point arithmetic detected");
self.expr_span = Some(expr.span);
}
}
},
_ => (),
}
}
fn check_expr_post(&mut self, _: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
if Some(expr.span) == self.expr_span {
self.expr_span = None;
}
}
fn check_body(&mut self, cx: &LateContext<'_>, body: &hir::Body<'_>) {
let body_owner = cx.tcx.hir().body_owner(body.id());
match cx.tcx.hir().body_owner_kind(body_owner) {
hir::BodyOwnerKind::Static(_) | hir::BodyOwnerKind::Const => {
let body_span = cx.tcx.hir().span(body_owner);
if let Some(span) = self.const_span {
if span.contains(body_span) {
return;
}
}
self.const_span = Some(body_span);
},
hir::BodyOwnerKind::Fn | hir::BodyOwnerKind::Closure => (),
}
}
fn check_body_post(&mut self, cx: &LateContext<'_>, body: &hir::Body<'_>) {
let body_owner = cx.tcx.hir().body_owner(body.id());
let body_span = cx.tcx.hir().span(body_owner);
if let Some(span) = self.const_span {
if span.contains(body_span) {
return;
}
}
self.const_span = None;
}
}

View File

@ -0,0 +1,66 @@
use rustc_ast::ast::{Expr, ExprKind};
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use crate::utils::span_lint_and_help;
declare_clippy_lint! {
/// **What it does:** Checks for usage of `as` conversions.
///
/// Note that this lint is specialized in linting *every single* use of `as`
/// regardless of whether good alternatives exist or not.
/// If you want more precise lints for `as`, please consider using these separate lints:
/// `unnecessary_cast`, `cast_lossless/possible_truncation/possible_wrap/precision_loss/sign_loss`,
/// `fn_to_numeric_cast(_with_truncation)`, `char_lit_as_u8`, `ref_to_mut` and `ptr_as_ptr`.
/// There is a good explanation the reason why this lint should work in this way and how it is useful
/// [in this issue](https://github.com/rust-lang/rust-clippy/issues/5122).
///
/// **Why is this bad?** `as` conversions will perform many kinds of
/// conversions, including silently lossy conversions and dangerous coercions.
/// There are cases when it makes sense to use `as`, so the lint is
/// Allow by default.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust,ignore
/// let a: u32;
/// ...
/// f(a as u16);
/// ```
///
/// Usually better represents the semantics you expect:
/// ```rust,ignore
/// f(a.try_into()?);
/// ```
/// or
/// ```rust,ignore
/// f(a.try_into().expect("Unexpected u16 overflow in f"));
/// ```
///
pub AS_CONVERSIONS,
restriction,
"using a potentially dangerous silent `as` conversion"
}
declare_lint_pass!(AsConversions => [AS_CONVERSIONS]);
impl EarlyLintPass for AsConversions {
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
if in_external_macro(cx.sess(), expr.span) {
return;
}
if let ExprKind::Cast(_, _) = expr.kind {
span_lint_and_help(
cx,
AS_CONVERSIONS,
expr.span,
"using a potentially dangerous silent `as` conversion",
None,
"consider using a safe wrapper for this conversion",
);
}
}
}

View File

@ -0,0 +1,125 @@
use std::fmt;
use crate::utils::span_lint_and_help;
use rustc_ast::ast::{Expr, ExprKind, InlineAsmOptions};
use rustc_lint::{EarlyContext, EarlyLintPass, Lint};
use rustc_session::{declare_lint_pass, declare_tool_lint};
#[derive(Clone, Copy, PartialEq, Eq)]
enum AsmStyle {
Intel,
Att,
}
impl fmt::Display for AsmStyle {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AsmStyle::Intel => f.write_str("Intel"),
AsmStyle::Att => f.write_str("AT&T"),
}
}
}
impl std::ops::Not for AsmStyle {
type Output = AsmStyle;
fn not(self) -> AsmStyle {
match self {
AsmStyle::Intel => AsmStyle::Att,
AsmStyle::Att => AsmStyle::Intel,
}
}
}
fn check_expr_asm_syntax(lint: &'static Lint, cx: &EarlyContext<'_>, expr: &Expr, check_for: AsmStyle) {
if let ExprKind::InlineAsm(ref inline_asm) = expr.kind {
let style = if inline_asm.options.contains(InlineAsmOptions::ATT_SYNTAX) {
AsmStyle::Att
} else {
AsmStyle::Intel
};
if style == check_for {
span_lint_and_help(
cx,
lint,
expr.span,
&format!("{} x86 assembly syntax used", style),
None,
&format!("use {} x86 assembly syntax", !style),
);
}
}
}
declare_clippy_lint! {
/// **What it does:** Checks for usage of Intel x86 assembly syntax.
///
/// **Why is this bad?** The lint has been enabled to indicate a preference
/// for AT&T x86 assembly syntax.
///
/// **Known problems:** None.
///
/// **Example:**
///
/// ```rust,no_run
/// # #![feature(asm)]
/// # unsafe { let ptr = "".as_ptr();
/// asm!("lea {}, [{}]", lateout(reg) _, in(reg) ptr);
/// # }
/// ```
/// Use instead:
/// ```rust,no_run
/// # #![feature(asm)]
/// # unsafe { let ptr = "".as_ptr();
/// asm!("lea ({}), {}", in(reg) ptr, lateout(reg) _, options(att_syntax));
/// # }
/// ```
pub INLINE_ASM_X86_INTEL_SYNTAX,
restriction,
"prefer AT&T x86 assembly syntax"
}
declare_lint_pass!(InlineAsmX86IntelSyntax => [INLINE_ASM_X86_INTEL_SYNTAX]);
impl EarlyLintPass for InlineAsmX86IntelSyntax {
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
check_expr_asm_syntax(Self::get_lints()[0], cx, expr, AsmStyle::Intel);
}
}
declare_clippy_lint! {
/// **What it does:** Checks for usage of AT&T x86 assembly syntax.
///
/// **Why is this bad?** The lint has been enabled to indicate a preference
/// for Intel x86 assembly syntax.
///
/// **Known problems:** None.
///
/// **Example:**
///
/// ```rust,no_run
/// # #![feature(asm)]
/// # unsafe { let ptr = "".as_ptr();
/// asm!("lea ({}), {}", in(reg) ptr, lateout(reg) _, options(att_syntax));
/// # }
/// ```
/// Use instead:
/// ```rust,no_run
/// # #![feature(asm)]
/// # unsafe { let ptr = "".as_ptr();
/// asm!("lea {}, [{}]", lateout(reg) _, in(reg) ptr);
/// # }
/// ```
pub INLINE_ASM_X86_ATT_SYNTAX,
restriction,
"prefer Intel x86 assembly syntax"
}
declare_lint_pass!(InlineAsmX86AttSyntax => [INLINE_ASM_X86_ATT_SYNTAX]);
impl EarlyLintPass for InlineAsmX86AttSyntax {
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
check_expr_asm_syntax(Self::get_lints()[0], cx, expr, AsmStyle::Att);
}
}

View File

@ -0,0 +1,144 @@
use crate::consts::{constant, Constant};
use crate::utils::{is_direct_expn_of, is_expn_of, match_panic_call, snippet_opt, span_lint_and_help};
use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// **What it does:** Checks for `assert!(true)` and `assert!(false)` calls.
///
/// **Why is this bad?** Will be optimized out by the compiler or should probably be replaced by a
/// `panic!()` or `unreachable!()`
///
/// **Known problems:** None
///
/// **Example:**
/// ```rust,ignore
/// assert!(false)
/// assert!(true)
/// const B: bool = false;
/// assert!(B)
/// ```
pub ASSERTIONS_ON_CONSTANTS,
style,
"`assert!(true)` / `assert!(false)` will be optimized out by the compiler, and should probably be replaced by a `panic!()` or `unreachable!()`"
}
declare_lint_pass!(AssertionsOnConstants => [ASSERTIONS_ON_CONSTANTS]);
impl<'tcx> LateLintPass<'tcx> for AssertionsOnConstants {
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
let lint_true = |is_debug: bool| {
span_lint_and_help(
cx,
ASSERTIONS_ON_CONSTANTS,
e.span,
if is_debug {
"`debug_assert!(true)` will be optimized out by the compiler"
} else {
"`assert!(true)` will be optimized out by the compiler"
},
None,
"remove it",
);
};
let lint_false_without_message = || {
span_lint_and_help(
cx,
ASSERTIONS_ON_CONSTANTS,
e.span,
"`assert!(false)` should probably be replaced",
None,
"use `panic!()` or `unreachable!()`",
);
};
let lint_false_with_message = |panic_message: String| {
span_lint_and_help(
cx,
ASSERTIONS_ON_CONSTANTS,
e.span,
&format!("`assert!(false, {})` should probably be replaced", panic_message),
None,
&format!("use `panic!({})` or `unreachable!({})`", panic_message, panic_message),
)
};
if let Some(debug_assert_span) = is_expn_of(e.span, "debug_assert") {
if debug_assert_span.from_expansion() {
return;
}
if_chain! {
if let ExprKind::Unary(_, ref lit) = e.kind;
if let Some((Constant::Bool(is_true), _)) = constant(cx, cx.typeck_results(), lit);
if is_true;
then {
lint_true(true);
}
};
} else if let Some(assert_span) = is_direct_expn_of(e.span, "assert") {
if assert_span.from_expansion() {
return;
}
if let Some(assert_match) = match_assert_with_message(&cx, e) {
match assert_match {
// matched assert but not message
AssertKind::WithoutMessage(false) => lint_false_without_message(),
AssertKind::WithoutMessage(true) | AssertKind::WithMessage(_, true) => lint_true(false),
AssertKind::WithMessage(panic_message, false) => lint_false_with_message(panic_message),
};
}
}
}
}
/// Result of calling `match_assert_with_message`.
enum AssertKind {
WithMessage(String, bool),
WithoutMessage(bool),
}
/// Check if the expression matches
///
/// ```rust,ignore
/// if !c {
/// {
/// ::std::rt::begin_panic(message, _)
/// }
/// }
/// ```
///
/// where `message` is any expression and `c` is a constant bool.
fn match_assert_with_message<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<AssertKind> {
if_chain! {
if let ExprKind::If(ref cond, ref then, _) = expr.kind;
if let ExprKind::Unary(UnOp::Not, ref expr) = cond.kind;
// bind the first argument of the `assert!` macro
if let Some((Constant::Bool(is_true), _)) = constant(cx, cx.typeck_results(), expr);
// block
if let ExprKind::Block(ref block, _) = then.kind;
if block.stmts.is_empty();
if let Some(block_expr) = &block.expr;
// inner block is optional. unwrap it if it exists, or use the expression as is otherwise.
if let Some(begin_panic_call) = match block_expr.kind {
ExprKind::Block(ref inner_block, _) => &inner_block.expr,
_ => &block.expr,
};
// function call
if let Some(args) = match_panic_call(cx, begin_panic_call);
if args.len() == 1;
// bind the second argument of the `assert!` macro if it exists
if let panic_message = snippet_opt(cx, args[0].span);
// second argument of begin_panic is irrelevant
// as is the second match arm
then {
// an empty message occurs when it was generated by the macro
// (and not passed by the user)
return panic_message
.filter(|msg| !msg.is_empty())
.map(|msg| AssertKind::WithMessage(msg, is_true))
.or(Some(AssertKind::WithoutMessage(is_true)));
}
}
None
}

View File

@ -0,0 +1,263 @@
use crate::utils::{
eq_expr_value, get_trait_def_id, implements_trait, snippet_opt, span_lint_and_then, trait_ref_of_method,
};
use crate::utils::{higher, sugg};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::intravisit::{walk_expr, NestedVisitorMap, Visitor};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::map::Map;
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// **What it does:** Checks for `a = a op b` or `a = b commutative_op a`
/// patterns.
///
/// **Why is this bad?** These can be written as the shorter `a op= b`.
///
/// **Known problems:** While forbidden by the spec, `OpAssign` traits may have
/// implementations that differ from the regular `Op` impl.
///
/// **Example:**
/// ```rust
/// let mut a = 5;
/// let b = 0;
/// // ...
/// // Bad
/// a = a + b;
///
/// // Good
/// a += b;
/// ```
pub ASSIGN_OP_PATTERN,
style,
"assigning the result of an operation on a variable to that same variable"
}
declare_clippy_lint! {
/// **What it does:** Checks for `a op= a op b` or `a op= b op a` patterns.
///
/// **Why is this bad?** Most likely these are bugs where one meant to write `a
/// op= b`.
///
/// **Known problems:** Clippy cannot know for sure if `a op= a op b` should have
/// been `a = a op a op b` or `a = a op b`/`a op= b`. Therefore, it suggests both.
/// If `a op= a op b` is really the correct behaviour it should be
/// written as `a = a op a op b` as it's less confusing.
///
/// **Example:**
/// ```rust
/// let mut a = 5;
/// let b = 2;
/// // ...
/// a += a + b;
/// ```
pub MISREFACTORED_ASSIGN_OP,
complexity,
"having a variable on both sides of an assign op"
}
declare_lint_pass!(AssignOps => [ASSIGN_OP_PATTERN, MISREFACTORED_ASSIGN_OP]);
impl<'tcx> LateLintPass<'tcx> for AssignOps {
#[allow(clippy::too_many_lines)]
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
match &expr.kind {
hir::ExprKind::AssignOp(op, lhs, rhs) => {
if let hir::ExprKind::Binary(binop, l, r) = &rhs.kind {
if op.node != binop.node {
return;
}
// lhs op= l op r
if eq_expr_value(cx, lhs, l) {
lint_misrefactored_assign_op(cx, expr, *op, rhs, lhs, r);
}
// lhs op= l commutative_op r
if is_commutative(op.node) && eq_expr_value(cx, lhs, r) {
lint_misrefactored_assign_op(cx, expr, *op, rhs, lhs, l);
}
}
},
hir::ExprKind::Assign(assignee, e, _) => {
if let hir::ExprKind::Binary(op, l, r) = &e.kind {
let lint = |assignee: &hir::Expr<'_>, rhs: &hir::Expr<'_>| {
let ty = cx.typeck_results().expr_ty(assignee);
let rty = cx.typeck_results().expr_ty(rhs);
macro_rules! ops {
($op:expr,
$cx:expr,
$ty:expr,
$rty:expr,
$($trait_name:ident),+) => {
match $op {
$(hir::BinOpKind::$trait_name => {
let [krate, module] = crate::utils::paths::OPS_MODULE;
let path: [&str; 3] = [krate, module, concat!(stringify!($trait_name), "Assign")];
let trait_id = if let Some(trait_id) = get_trait_def_id($cx, &path) {
trait_id
} else {
return; // useless if the trait doesn't exist
};
// check that we are not inside an `impl AssignOp` of this exact operation
let parent_fn = cx.tcx.hir().get_parent_item(e.hir_id);
if_chain! {
if let Some(trait_ref) = trait_ref_of_method(cx, parent_fn);
if trait_ref.path.res.def_id() == trait_id;
then { return; }
}
implements_trait($cx, $ty, trait_id, &[$rty])
},)*
_ => false,
}
}
}
if ops!(
op.node,
cx,
ty,
rty.into(),
Add,
Sub,
Mul,
Div,
Rem,
And,
Or,
BitAnd,
BitOr,
BitXor,
Shr,
Shl
) {
span_lint_and_then(
cx,
ASSIGN_OP_PATTERN,
expr.span,
"manual implementation of an assign operation",
|diag| {
if let (Some(snip_a), Some(snip_r)) =
(snippet_opt(cx, assignee.span), snippet_opt(cx, rhs.span))
{
diag.span_suggestion(
expr.span,
"replace it with",
format!("{} {}= {}", snip_a, op.node.as_str(), snip_r),
Applicability::MachineApplicable,
);
}
},
);
}
};
let mut visitor = ExprVisitor {
assignee,
counter: 0,
cx,
};
walk_expr(&mut visitor, e);
if visitor.counter == 1 {
// a = a op b
if eq_expr_value(cx, assignee, l) {
lint(assignee, r);
}
// a = b commutative_op a
// Limited to primitive type as these ops are know to be commutative
if eq_expr_value(cx, assignee, r) && cx.typeck_results().expr_ty(assignee).is_primitive_ty() {
match op.node {
hir::BinOpKind::Add
| hir::BinOpKind::Mul
| hir::BinOpKind::And
| hir::BinOpKind::Or
| hir::BinOpKind::BitXor
| hir::BinOpKind::BitAnd
| hir::BinOpKind::BitOr => {
lint(assignee, l);
},
_ => {},
}
}
}
}
},
_ => {},
}
}
}
fn lint_misrefactored_assign_op(
cx: &LateContext<'_>,
expr: &hir::Expr<'_>,
op: hir::BinOp,
rhs: &hir::Expr<'_>,
assignee: &hir::Expr<'_>,
rhs_other: &hir::Expr<'_>,
) {
span_lint_and_then(
cx,
MISREFACTORED_ASSIGN_OP,
expr.span,
"variable appears on both sides of an assignment operation",
|diag| {
if let (Some(snip_a), Some(snip_r)) = (snippet_opt(cx, assignee.span), snippet_opt(cx, rhs_other.span)) {
let a = &sugg::Sugg::hir(cx, assignee, "..");
let r = &sugg::Sugg::hir(cx, rhs, "..");
let long = format!("{} = {}", snip_a, sugg::make_binop(higher::binop(op.node), a, r));
diag.span_suggestion(
expr.span,
&format!(
"did you mean `{} = {} {} {}` or `{}`? Consider replacing it with",
snip_a,
snip_a,
op.node.as_str(),
snip_r,
long
),
format!("{} {}= {}", snip_a, op.node.as_str(), snip_r),
Applicability::MaybeIncorrect,
);
diag.span_suggestion(
expr.span,
"or",
long,
Applicability::MaybeIncorrect, // snippet
);
}
},
);
}
#[must_use]
fn is_commutative(op: hir::BinOpKind) -> bool {
use rustc_hir::BinOpKind::{
Add, And, BitAnd, BitOr, BitXor, Div, Eq, Ge, Gt, Le, Lt, Mul, Ne, Or, Rem, Shl, Shr, Sub,
};
match op {
Add | Mul | And | Or | BitXor | BitAnd | BitOr | Eq | Ne => true,
Sub | Div | Rem | Shl | Shr | Lt | Le | Ge | Gt => false,
}
}
struct ExprVisitor<'a, 'tcx> {
assignee: &'a hir::Expr<'a>,
counter: u8,
cx: &'a LateContext<'tcx>,
}
impl<'a, 'tcx> Visitor<'tcx> for ExprVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'_>) {
if eq_expr_value(self.cx, self.assignee, expr) {
self.counter += 1;
}
walk_expr(self, expr);
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}

View File

@ -0,0 +1,85 @@
use crate::utils::{implements_trait, snippet, span_lint_and_then};
use rustc_errors::Applicability;
use rustc_hir::{AsyncGeneratorKind, Body, BodyId, ExprKind, GeneratorKind, QPath};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// **What it does:** Checks for async blocks that yield values of types
/// that can themselves be awaited.
///
/// **Why is this bad?** An await is likely missing.
///
/// **Known problems:** None.
///
/// **Example:**
///
/// ```rust
/// async fn foo() {}
///
/// fn bar() {
/// let x = async {
/// foo()
/// };
/// }
/// ```
/// Use instead:
/// ```rust
/// async fn foo() {}
///
/// fn bar() {
/// let x = async {
/// foo().await
/// };
/// }
/// ```
pub ASYNC_YIELDS_ASYNC,
correctness,
"async blocks that return a type that can be awaited"
}
declare_lint_pass!(AsyncYieldsAsync => [ASYNC_YIELDS_ASYNC]);
impl<'tcx> LateLintPass<'tcx> for AsyncYieldsAsync {
fn check_body(&mut self, cx: &LateContext<'tcx>, body: &'tcx Body<'_>) {
use AsyncGeneratorKind::{Block, Closure};
// For functions, with explicitly defined types, don't warn.
// XXXkhuey maybe we should?
if let Some(GeneratorKind::Async(Block | Closure)) = body.generator_kind {
if let Some(future_trait_def_id) = cx.tcx.lang_items().future_trait() {
let body_id = BodyId {
hir_id: body.value.hir_id,
};
let typeck_results = cx.tcx.typeck_body(body_id);
let expr_ty = typeck_results.expr_ty(&body.value);
if implements_trait(cx, expr_ty, future_trait_def_id, &[]) {
let return_expr_span = match &body.value.kind {
// XXXkhuey there has to be a better way.
ExprKind::Block(block, _) => block.expr.map(|e| e.span),
ExprKind::Path(QPath::Resolved(_, path)) => Some(path.span),
_ => None,
};
if let Some(return_expr_span) = return_expr_span {
span_lint_and_then(
cx,
ASYNC_YIELDS_ASYNC,
return_expr_span,
"an async construct yields a type which is itself awaitable",
|db| {
db.span_label(body.value.span, "outer async construct");
db.span_label(return_expr_span, "awaitable value not awaited");
db.span_suggestion(
return_expr_span,
"consider awaiting this value",
format!("{}.await", snippet(cx, return_expr_span, "..")),
Applicability::MaybeIncorrect,
);
},
);
}
}
}
}
}
}

View File

@ -0,0 +1,229 @@
use crate::utils::{match_def_path, span_lint_and_help};
use if_chain::if_chain;
use rustc_hir::def_id::DefId;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// **What it does:** Checks for usage of invalid atomic
/// ordering in atomic loads/stores/exchanges/updates and
/// memory fences.
///
/// **Why is this bad?** Using an invalid atomic ordering
/// will cause a panic at run-time.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust,no_run
/// # use std::sync::atomic::{self, AtomicU8, Ordering};
///
/// let x = AtomicU8::new(0);
///
/// // Bad: `Release` and `AcqRel` cannot be used for `load`.
/// let _ = x.load(Ordering::Release);
/// let _ = x.load(Ordering::AcqRel);
///
/// // Bad: `Acquire` and `AcqRel` cannot be used for `store`.
/// x.store(1, Ordering::Acquire);
/// x.store(2, Ordering::AcqRel);
///
/// // Bad: `Relaxed` cannot be used as a fence's ordering.
/// atomic::fence(Ordering::Relaxed);
/// atomic::compiler_fence(Ordering::Relaxed);
///
/// // Bad: `Release` and `AcqRel` are both always invalid
/// // for the failure ordering (the last arg).
/// let _ = x.compare_exchange(1, 2, Ordering::SeqCst, Ordering::Release);
/// let _ = x.compare_exchange_weak(2, 3, Ordering::AcqRel, Ordering::AcqRel);
///
/// // Bad: The failure ordering is not allowed to be
/// // stronger than the success order, and `SeqCst` is
/// // stronger than `Relaxed`.
/// let _ = x.fetch_update(Ordering::Relaxed, Ordering::SeqCst, |val| Some(val + val));
/// ```
pub INVALID_ATOMIC_ORDERING,
correctness,
"usage of invalid atomic ordering in atomic operations and memory fences"
}
declare_lint_pass!(AtomicOrdering => [INVALID_ATOMIC_ORDERING]);
const ATOMIC_TYPES: [&str; 12] = [
"AtomicBool",
"AtomicI8",
"AtomicI16",
"AtomicI32",
"AtomicI64",
"AtomicIsize",
"AtomicPtr",
"AtomicU8",
"AtomicU16",
"AtomicU32",
"AtomicU64",
"AtomicUsize",
];
fn type_is_atomic(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
if let ty::Adt(&ty::AdtDef { did, .. }, _) = cx.typeck_results().expr_ty(expr).kind() {
ATOMIC_TYPES
.iter()
.any(|ty| match_def_path(cx, did, &["core", "sync", "atomic", ty]))
} else {
false
}
}
fn match_ordering_def_path(cx: &LateContext<'_>, did: DefId, orderings: &[&str]) -> bool {
orderings
.iter()
.any(|ordering| match_def_path(cx, did, &["core", "sync", "atomic", "Ordering", ordering]))
}
fn check_atomic_load_store(cx: &LateContext<'_>, expr: &Expr<'_>) {
if_chain! {
if let ExprKind::MethodCall(ref method_path, _, args, _) = &expr.kind;
let method = method_path.ident.name.as_str();
if type_is_atomic(cx, &args[0]);
if method == "load" || method == "store";
let ordering_arg = if method == "load" { &args[1] } else { &args[2] };
if let ExprKind::Path(ref ordering_qpath) = ordering_arg.kind;
if let Some(ordering_def_id) = cx.qpath_res(ordering_qpath, ordering_arg.hir_id).opt_def_id();
then {
if method == "load" &&
match_ordering_def_path(cx, ordering_def_id, &["Release", "AcqRel"]) {
span_lint_and_help(
cx,
INVALID_ATOMIC_ORDERING,
ordering_arg.span,
"atomic loads cannot have `Release` and `AcqRel` ordering",
None,
"consider using ordering modes `Acquire`, `SeqCst` or `Relaxed`"
);
} else if method == "store" &&
match_ordering_def_path(cx, ordering_def_id, &["Acquire", "AcqRel"]) {
span_lint_and_help(
cx,
INVALID_ATOMIC_ORDERING,
ordering_arg.span,
"atomic stores cannot have `Acquire` and `AcqRel` ordering",
None,
"consider using ordering modes `Release`, `SeqCst` or `Relaxed`"
);
}
}
}
}
fn check_memory_fence(cx: &LateContext<'_>, expr: &Expr<'_>) {
if_chain! {
if let ExprKind::Call(ref func, ref args) = expr.kind;
if let ExprKind::Path(ref func_qpath) = func.kind;
if let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id();
if ["fence", "compiler_fence"]
.iter()
.any(|func| match_def_path(cx, def_id, &["core", "sync", "atomic", func]));
if let ExprKind::Path(ref ordering_qpath) = &args[0].kind;
if let Some(ordering_def_id) = cx.qpath_res(ordering_qpath, args[0].hir_id).opt_def_id();
if match_ordering_def_path(cx, ordering_def_id, &["Relaxed"]);
then {
span_lint_and_help(
cx,
INVALID_ATOMIC_ORDERING,
args[0].span,
"memory fences cannot have `Relaxed` ordering",
None,
"consider using ordering modes `Acquire`, `Release`, `AcqRel` or `SeqCst`"
);
}
}
}
fn opt_ordering_defid(cx: &LateContext<'_>, ord_arg: &Expr<'_>) -> Option<DefId> {
if let ExprKind::Path(ref ord_qpath) = ord_arg.kind {
cx.qpath_res(ord_qpath, ord_arg.hir_id).opt_def_id()
} else {
None
}
}
fn check_atomic_compare_exchange(cx: &LateContext<'_>, expr: &Expr<'_>) {
if_chain! {
if let ExprKind::MethodCall(ref method_path, _, args, _) = &expr.kind;
let method = method_path.ident.name.as_str();
if type_is_atomic(cx, &args[0]);
if method == "compare_exchange" || method == "compare_exchange_weak" || method == "fetch_update";
let (success_order_arg, failure_order_arg) = if method == "fetch_update" {
(&args[1], &args[2])
} else {
(&args[3], &args[4])
};
if let Some(fail_ordering_def_id) = opt_ordering_defid(cx, failure_order_arg);
then {
// Helper type holding on to some checking and error reporting data. Has
// - (success ordering name,
// - list of failure orderings forbidden by the success order,
// - suggestion message)
type OrdLintInfo = (&'static str, &'static [&'static str], &'static str);
let relaxed: OrdLintInfo = ("Relaxed", &["SeqCst", "Acquire"], "ordering mode `Relaxed`");
let acquire: OrdLintInfo = ("Acquire", &["SeqCst"], "ordering modes `Acquire` or `Relaxed`");
let seq_cst: OrdLintInfo = ("SeqCst", &[], "ordering modes `Acquire`, `SeqCst` or `Relaxed`");
let release = ("Release", relaxed.1, relaxed.2);
let acqrel = ("AcqRel", acquire.1, acquire.2);
let search = [relaxed, acquire, seq_cst, release, acqrel];
let success_lint_info = opt_ordering_defid(cx, success_order_arg)
.and_then(|success_ord_def_id| -> Option<OrdLintInfo> {
search
.iter()
.find(|(ordering, ..)| {
match_def_path(cx, success_ord_def_id,
&["core", "sync", "atomic", "Ordering", ordering])
})
.copied()
});
if match_ordering_def_path(cx, fail_ordering_def_id, &["Release", "AcqRel"]) {
// If we don't know the success order is, use what we'd suggest
// if it were maximally permissive.
let suggested = success_lint_info.unwrap_or(seq_cst).2;
span_lint_and_help(
cx,
INVALID_ATOMIC_ORDERING,
failure_order_arg.span,
&format!(
"{}'s failure ordering may not be `Release` or `AcqRel`",
method,
),
None,
&format!("consider using {} instead", suggested),
);
} else if let Some((success_ord_name, bad_ords_given_success, suggested)) = success_lint_info {
if match_ordering_def_path(cx, fail_ordering_def_id, bad_ords_given_success) {
span_lint_and_help(
cx,
INVALID_ATOMIC_ORDERING,
failure_order_arg.span,
&format!(
"{}'s failure ordering may not be stronger than the success ordering of `{}`",
method,
success_ord_name,
),
None,
&format!("consider using {} instead", suggested),
);
}
}
}
}
}
impl<'tcx> LateLintPass<'tcx> for AtomicOrdering {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
check_atomic_load_store(cx, expr);
check_memory_fence(cx, expr);
check_atomic_compare_exchange(cx, expr);
}
}

View File

@ -0,0 +1,650 @@
//! checks for attributes
use crate::utils::{
first_line_of_span, is_present_in_source, match_panic_def_id, snippet_opt, span_lint, span_lint_and_help,
span_lint_and_sugg, span_lint_and_then, without_block_comments,
};
use if_chain::if_chain;
use rustc_ast::{AttrKind, AttrStyle, Attribute, Lit, LitKind, MetaItemKind, NestedMetaItem};
use rustc_errors::Applicability;
use rustc_hir::{
Block, Expr, ExprKind, ImplItem, ImplItemKind, Item, ItemKind, StmtKind, TraitFn, TraitItem, TraitItemKind,
};
use rustc_lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
use rustc_span::sym;
use rustc_span::symbol::{Symbol, SymbolStr};
use semver::Version;
static UNIX_SYSTEMS: &[&str] = &[
"android",
"dragonfly",
"emscripten",
"freebsd",
"fuchsia",
"haiku",
"illumos",
"ios",
"l4re",
"linux",
"macos",
"netbsd",
"openbsd",
"redox",
"solaris",
"vxworks",
];
// NOTE: windows is excluded from the list because it's also a valid target family.
static NON_UNIX_SYSTEMS: &[&str] = &["hermit", "none", "wasi"];
declare_clippy_lint! {
/// **What it does:** Checks for items annotated with `#[inline(always)]`,
/// unless the annotated function is empty or simply panics.
///
/// **Why is this bad?** While there are valid uses of this annotation (and once
/// you know when to use it, by all means `allow` this lint), it's a common
/// newbie-mistake to pepper one's code with it.
///
/// As a rule of thumb, before slapping `#[inline(always)]` on a function,
/// measure if that additional function call really affects your runtime profile
/// sufficiently to make up for the increase in compile time.
///
/// **Known problems:** False positives, big time. This lint is meant to be
/// deactivated by everyone doing serious performance work. This means having
/// done the measurement.
///
/// **Example:**
/// ```ignore
/// #[inline(always)]
/// fn not_quite_hot_code(..) { ... }
/// ```
pub INLINE_ALWAYS,
pedantic,
"use of `#[inline(always)]`"
}
declare_clippy_lint! {
/// **What it does:** Checks for `extern crate` and `use` items annotated with
/// lint attributes.
///
/// This lint permits `#[allow(unused_imports)]`, `#[allow(deprecated)]`,
/// `#[allow(unreachable_pub)]`, `#[allow(clippy::wildcard_imports)]` and
/// `#[allow(clippy::enum_glob_use)]` on `use` items and `#[allow(unused_imports)]` on
/// `extern crate` items with a `#[macro_use]` attribute.
///
/// **Why is this bad?** Lint attributes have no effect on crate imports. Most
/// likely a `!` was forgotten.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```ignore
/// // Bad
/// #[deny(dead_code)]
/// extern crate foo;
/// #[forbid(dead_code)]
/// use foo::bar;
///
/// // Ok
/// #[allow(unused_imports)]
/// use foo::baz;
/// #[allow(unused_imports)]
/// #[macro_use]
/// extern crate baz;
/// ```
pub USELESS_ATTRIBUTE,
correctness,
"use of lint attributes on `extern crate` items"
}
declare_clippy_lint! {
/// **What it does:** Checks for `#[deprecated]` annotations with a `since`
/// field that is not a valid semantic version.
///
/// **Why is this bad?** For checking the version of the deprecation, it must be
/// a valid semver. Failing that, the contained information is useless.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// #[deprecated(since = "forever")]
/// fn something_else() { /* ... */ }
/// ```
pub DEPRECATED_SEMVER,
correctness,
"use of `#[deprecated(since = \"x\")]` where x is not semver"
}
declare_clippy_lint! {
/// **What it does:** Checks for empty lines after outer attributes
///
/// **Why is this bad?**
/// Most likely the attribute was meant to be an inner attribute using a '!'.
/// If it was meant to be an outer attribute, then the following item
/// should not be separated by empty lines.
///
/// **Known problems:** Can cause false positives.
///
/// From the clippy side it's difficult to detect empty lines between an attributes and the
/// following item because empty lines and comments are not part of the AST. The parsing
/// currently works for basic cases but is not perfect.
///
/// **Example:**
/// ```rust
/// // Good (as inner attribute)
/// #![allow(dead_code)]
///
/// fn this_is_fine() { }
///
/// // Bad
/// #[allow(dead_code)]
///
/// fn not_quite_good_code() { }
///
/// // Good (as outer attribute)
/// #[allow(dead_code)]
/// fn this_is_fine_too() { }
/// ```
pub EMPTY_LINE_AFTER_OUTER_ATTR,
nursery,
"empty line after outer attribute"
}
declare_clippy_lint! {
/// **What it does:** Checks for `warn`/`deny`/`forbid` attributes targeting the whole clippy::restriction category.
///
/// **Why is this bad?** Restriction lints sometimes are in contrast with other lints or even go against idiomatic rust.
/// These lints should only be enabled on a lint-by-lint basis and with careful consideration.
///
/// **Known problems:** None.
///
/// **Example:**
/// Bad:
/// ```rust
/// #![deny(clippy::restriction)]
/// ```
///
/// Good:
/// ```rust
/// #![deny(clippy::as_conversions)]
/// ```
pub BLANKET_CLIPPY_RESTRICTION_LINTS,
style,
"enabling the complete restriction group"
}
declare_clippy_lint! {
/// **What it does:** Checks for `#[cfg_attr(rustfmt, rustfmt_skip)]` and suggests to replace it
/// with `#[rustfmt::skip]`.
///
/// **Why is this bad?** Since tool_attributes ([rust-lang/rust#44690](https://github.com/rust-lang/rust/issues/44690))
/// are stable now, they should be used instead of the old `cfg_attr(rustfmt)` attributes.
///
/// **Known problems:** This lint doesn't detect crate level inner attributes, because they get
/// processed before the PreExpansionPass lints get executed. See
/// [#3123](https://github.com/rust-lang/rust-clippy/pull/3123#issuecomment-422321765)
///
/// **Example:**
///
/// Bad:
/// ```rust
/// #[cfg_attr(rustfmt, rustfmt_skip)]
/// fn main() { }
/// ```
///
/// Good:
/// ```rust
/// #[rustfmt::skip]
/// fn main() { }
/// ```
pub DEPRECATED_CFG_ATTR,
complexity,
"usage of `cfg_attr(rustfmt)` instead of tool attributes"
}
declare_clippy_lint! {
/// **What it does:** Checks for cfg attributes having operating systems used in target family position.
///
/// **Why is this bad?** The configuration option will not be recognised and the related item will not be included
/// by the conditional compilation engine.
///
/// **Known problems:** None.
///
/// **Example:**
///
/// Bad:
/// ```rust
/// #[cfg(linux)]
/// fn conditional() { }
/// ```
///
/// Good:
/// ```rust
/// #[cfg(target_os = "linux")]
/// fn conditional() { }
/// ```
///
/// Or:
/// ```rust
/// #[cfg(unix)]
/// fn conditional() { }
/// ```
/// Check the [Rust Reference](https://doc.rust-lang.org/reference/conditional-compilation.html#target_os) for more details.
pub MISMATCHED_TARGET_OS,
correctness,
"usage of `cfg(operating_system)` instead of `cfg(target_os = \"operating_system\")`"
}
declare_lint_pass!(Attributes => [
INLINE_ALWAYS,
DEPRECATED_SEMVER,
USELESS_ATTRIBUTE,
BLANKET_CLIPPY_RESTRICTION_LINTS,
]);
impl<'tcx> LateLintPass<'tcx> for Attributes {
fn check_attribute(&mut self, cx: &LateContext<'tcx>, attr: &'tcx Attribute) {
if let Some(items) = &attr.meta_item_list() {
if let Some(ident) = attr.ident() {
let ident = &*ident.as_str();
match ident {
"allow" | "warn" | "deny" | "forbid" => {
check_clippy_lint_names(cx, ident, items);
},
_ => {},
}
if items.is_empty() || !attr.has_name(sym::deprecated) {
return;
}
for item in items {
if_chain! {
if let NestedMetaItem::MetaItem(mi) = &item;
if let MetaItemKind::NameValue(lit) = &mi.kind;
if mi.has_name(sym::since);
then {
check_semver(cx, item.span(), lit);
}
}
}
}
}
}
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
let attrs = cx.tcx.hir().attrs(item.hir_id());
if is_relevant_item(cx, item) {
check_attrs(cx, item.span, item.ident.name, attrs)
}
match item.kind {
ItemKind::ExternCrate(..) | ItemKind::Use(..) => {
let skip_unused_imports = attrs.iter().any(|attr| attr.has_name(sym::macro_use));
for attr in attrs {
if in_external_macro(cx.sess(), attr.span) {
return;
}
if let Some(lint_list) = &attr.meta_item_list() {
if let Some(ident) = attr.ident() {
match &*ident.as_str() {
"allow" | "warn" | "deny" | "forbid" => {
// permit `unused_imports`, `deprecated`, `unreachable_pub`,
// `clippy::wildcard_imports`, and `clippy::enum_glob_use` for `use` items
// and `unused_imports` for `extern crate` items with `macro_use`
for lint in lint_list {
match item.kind {
ItemKind::Use(..) => {
if is_word(lint, sym!(unused_imports))
|| is_word(lint, sym::deprecated)
|| is_word(lint, sym!(unreachable_pub))
|| is_word(lint, sym!(unused))
|| extract_clippy_lint(lint)
.map_or(false, |s| s == "wildcard_imports")
|| extract_clippy_lint(lint).map_or(false, |s| s == "enum_glob_use")
{
return;
}
},
ItemKind::ExternCrate(..) => {
if is_word(lint, sym!(unused_imports)) && skip_unused_imports {
return;
}
if is_word(lint, sym!(unused_extern_crates)) {
return;
}
},
_ => {},
}
}
let line_span = first_line_of_span(cx, attr.span);
if let Some(mut sugg) = snippet_opt(cx, line_span) {
if sugg.contains("#[") {
span_lint_and_then(
cx,
USELESS_ATTRIBUTE,
line_span,
"useless lint attribute",
|diag| {
sugg = sugg.replacen("#[", "#![", 1);
diag.span_suggestion(
line_span,
"if you just forgot a `!`, use",
sugg,
Applicability::MaybeIncorrect,
);
},
);
}
}
},
_ => {},
}
}
}
}
},
_ => {},
}
}
fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx ImplItem<'_>) {
if is_relevant_impl(cx, item) {
check_attrs(cx, item.span, item.ident.name, cx.tcx.hir().attrs(item.hir_id()))
}
}
fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx TraitItem<'_>) {
if is_relevant_trait(cx, item) {
check_attrs(cx, item.span, item.ident.name, cx.tcx.hir().attrs(item.hir_id()))
}
}
}
/// Returns the lint name if it is clippy lint.
fn extract_clippy_lint(lint: &NestedMetaItem) -> Option<SymbolStr> {
if_chain! {
if let Some(meta_item) = lint.meta_item();
if meta_item.path.segments.len() > 1;
if let tool_name = meta_item.path.segments[0].ident;
if tool_name.name == sym::clippy;
let lint_name = meta_item.path.segments.last().unwrap().ident.name;
then {
return Some(lint_name.as_str());
}
}
None
}
fn check_clippy_lint_names(cx: &LateContext<'_>, ident: &str, items: &[NestedMetaItem]) {
for lint in items {
if let Some(lint_name) = extract_clippy_lint(lint) {
if lint_name == "restriction" && ident != "allow" {
span_lint_and_help(
cx,
BLANKET_CLIPPY_RESTRICTION_LINTS,
lint.span(),
"restriction lints are not meant to be all enabled",
None,
"try enabling only the lints you really need",
);
}
}
}
}
fn is_relevant_item(cx: &LateContext<'_>, item: &Item<'_>) -> bool {
if let ItemKind::Fn(_, _, eid) = item.kind {
is_relevant_expr(cx, cx.tcx.typeck_body(eid), &cx.tcx.hir().body(eid).value)
} else {
true
}
}
fn is_relevant_impl(cx: &LateContext<'_>, item: &ImplItem<'_>) -> bool {
match item.kind {
ImplItemKind::Fn(_, eid) => is_relevant_expr(cx, cx.tcx.typeck_body(eid), &cx.tcx.hir().body(eid).value),
_ => false,
}
}
fn is_relevant_trait(cx: &LateContext<'_>, item: &TraitItem<'_>) -> bool {
match item.kind {
TraitItemKind::Fn(_, TraitFn::Required(_)) => true,
TraitItemKind::Fn(_, TraitFn::Provided(eid)) => {
is_relevant_expr(cx, cx.tcx.typeck_body(eid), &cx.tcx.hir().body(eid).value)
},
_ => false,
}
}
fn is_relevant_block(cx: &LateContext<'_>, typeck_results: &ty::TypeckResults<'_>, block: &Block<'_>) -> bool {
block.stmts.first().map_or(
block
.expr
.as_ref()
.map_or(false, |e| is_relevant_expr(cx, typeck_results, e)),
|stmt| match &stmt.kind {
StmtKind::Local(_) => true,
StmtKind::Expr(expr) | StmtKind::Semi(expr) => is_relevant_expr(cx, typeck_results, expr),
_ => false,
},
)
}
fn is_relevant_expr(cx: &LateContext<'_>, typeck_results: &ty::TypeckResults<'_>, expr: &Expr<'_>) -> bool {
match &expr.kind {
ExprKind::Block(block, _) => is_relevant_block(cx, typeck_results, block),
ExprKind::Ret(Some(e)) => is_relevant_expr(cx, typeck_results, e),
ExprKind::Ret(None) | ExprKind::Break(_, None) => false,
ExprKind::Call(path_expr, _) => {
if let ExprKind::Path(qpath) = &path_expr.kind {
typeck_results
.qpath_res(qpath, path_expr.hir_id)
.opt_def_id()
.map_or(true, |fun_id| !match_panic_def_id(cx, fun_id))
} else {
true
}
},
_ => true,
}
}
fn check_attrs(cx: &LateContext<'_>, span: Span, name: Symbol, attrs: &[Attribute]) {
if span.from_expansion() {
return;
}
for attr in attrs {
if let Some(values) = attr.meta_item_list() {
if values.len() != 1 || !attr.has_name(sym::inline) {
continue;
}
if is_word(&values[0], sym::always) {
span_lint(
cx,
INLINE_ALWAYS,
attr.span,
&format!(
"you have declared `#[inline(always)]` on `{}`. This is usually a bad idea",
name
),
);
}
}
}
}
fn check_semver(cx: &LateContext<'_>, span: Span, lit: &Lit) {
if let LitKind::Str(is, _) = lit.kind {
if Version::parse(&is.as_str()).is_ok() {
return;
}
}
span_lint(
cx,
DEPRECATED_SEMVER,
span,
"the since field must contain a semver-compliant version",
);
}
fn is_word(nmi: &NestedMetaItem, expected: Symbol) -> bool {
if let NestedMetaItem::MetaItem(mi) = &nmi {
mi.is_word() && mi.has_name(expected)
} else {
false
}
}
declare_lint_pass!(EarlyAttributes => [
DEPRECATED_CFG_ATTR,
MISMATCHED_TARGET_OS,
EMPTY_LINE_AFTER_OUTER_ATTR,
]);
impl EarlyLintPass for EarlyAttributes {
fn check_item(&mut self, cx: &EarlyContext<'_>, item: &rustc_ast::Item) {
check_empty_line_after_outer_attr(cx, item);
}
fn check_attribute(&mut self, cx: &EarlyContext<'_>, attr: &Attribute) {
check_deprecated_cfg_attr(cx, attr);
check_mismatched_target_os(cx, attr);
}
}
fn check_empty_line_after_outer_attr(cx: &EarlyContext<'_>, item: &rustc_ast::Item) {
for attr in &item.attrs {
let attr_item = if let AttrKind::Normal(ref attr, _) = attr.kind {
attr
} else {
return;
};
if attr.style == AttrStyle::Outer {
if attr_item.args.inner_tokens().is_empty() || !is_present_in_source(cx, attr.span) {
return;
}
let begin_of_attr_to_item = Span::new(attr.span.lo(), item.span.lo(), item.span.ctxt());
let end_of_attr_to_item = Span::new(attr.span.hi(), item.span.lo(), item.span.ctxt());
if let Some(snippet) = snippet_opt(cx, end_of_attr_to_item) {
let lines = snippet.split('\n').collect::<Vec<_>>();
let lines = without_block_comments(lines);
if lines.iter().filter(|l| l.trim().is_empty()).count() > 2 {
span_lint(
cx,
EMPTY_LINE_AFTER_OUTER_ATTR,
begin_of_attr_to_item,
"found an empty line after an outer attribute. \
Perhaps you forgot to add a `!` to make it an inner attribute?",
);
}
}
}
}
}
fn check_deprecated_cfg_attr(cx: &EarlyContext<'_>, attr: &Attribute) {
if_chain! {
// check cfg_attr
if attr.has_name(sym::cfg_attr);
if let Some(items) = attr.meta_item_list();
if items.len() == 2;
// check for `rustfmt`
if let Some(feature_item) = items[0].meta_item();
if feature_item.has_name(sym::rustfmt);
// check for `rustfmt_skip` and `rustfmt::skip`
if let Some(skip_item) = &items[1].meta_item();
if skip_item.has_name(sym!(rustfmt_skip)) ||
skip_item.path.segments.last().expect("empty path in attribute").ident.name == sym!(skip);
// Only lint outer attributes, because custom inner attributes are unstable
// Tracking issue: https://github.com/rust-lang/rust/issues/54726
if let AttrStyle::Outer = attr.style;
then {
span_lint_and_sugg(
cx,
DEPRECATED_CFG_ATTR,
attr.span,
"`cfg_attr` is deprecated for rustfmt and got replaced by tool attributes",
"use",
"#[rustfmt::skip]".to_string(),
Applicability::MachineApplicable,
);
}
}
}
fn check_mismatched_target_os(cx: &EarlyContext<'_>, attr: &Attribute) {
fn find_os(name: &str) -> Option<&'static str> {
UNIX_SYSTEMS
.iter()
.chain(NON_UNIX_SYSTEMS.iter())
.find(|&&os| os == name)
.copied()
}
fn is_unix(name: &str) -> bool {
UNIX_SYSTEMS.iter().any(|&os| os == name)
}
fn find_mismatched_target_os(items: &[NestedMetaItem]) -> Vec<(&str, Span)> {
let mut mismatched = Vec::new();
for item in items {
if let NestedMetaItem::MetaItem(meta) = item {
match &meta.kind {
MetaItemKind::List(list) => {
mismatched.extend(find_mismatched_target_os(&list));
},
MetaItemKind::Word => {
if_chain! {
if let Some(ident) = meta.ident();
if let Some(os) = find_os(&*ident.name.as_str());
then {
mismatched.push((os, ident.span));
}
}
},
_ => {},
}
}
}
mismatched
}
if_chain! {
if attr.has_name(sym::cfg);
if let Some(list) = attr.meta_item_list();
let mismatched = find_mismatched_target_os(&list);
if !mismatched.is_empty();
then {
let mess = "operating system used in target family position";
span_lint_and_then(cx, MISMATCHED_TARGET_OS, attr.span, &mess, |diag| {
// Avoid showing the unix suggestion multiple times in case
// we have more than one mismatch for unix-like systems
let mut unix_suggested = false;
for (os, span) in mismatched {
let sugg = format!("target_os = \"{}\"", os);
diag.span_suggestion(span, "try", sugg, Applicability::MaybeIncorrect);
if !unix_suggested && is_unix(os) {
diag.help("did you mean `unix`?");
unix_suggested = true;
}
}
});
}
}
}

View File

@ -0,0 +1,148 @@
use crate::utils::{match_def_path, paths, span_lint_and_note};
use rustc_hir::def_id::DefId;
use rustc_hir::{AsyncGeneratorKind, Body, BodyId, GeneratorKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::GeneratorInteriorTypeCause;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::Span;
declare_clippy_lint! {
/// **What it does:** Checks for calls to await while holding a
/// non-async-aware MutexGuard.
///
/// **Why is this bad?** The Mutex types found in std::sync and parking_lot
/// are not designed to operate in an async context across await points.
///
/// There are two potential solutions. One is to use an asynx-aware Mutex
/// type. Many asynchronous foundation crates provide such a Mutex type. The
/// other solution is to ensure the mutex is unlocked before calling await,
/// either by introducing a scope or an explicit call to Drop::drop.
///
/// **Known problems:** Will report false positive for explicitly dropped guards ([#6446](https://github.com/rust-lang/rust-clippy/issues/6446)).
///
/// **Example:**
///
/// ```rust,ignore
/// use std::sync::Mutex;
///
/// async fn foo(x: &Mutex<u32>) {
/// let guard = x.lock().unwrap();
/// *guard += 1;
/// bar.await;
/// }
/// ```
///
/// Use instead:
/// ```rust,ignore
/// use std::sync::Mutex;
///
/// async fn foo(x: &Mutex<u32>) {
/// {
/// let guard = x.lock().unwrap();
/// *guard += 1;
/// }
/// bar.await;
/// }
/// ```
pub AWAIT_HOLDING_LOCK,
pedantic,
"Inside an async function, holding a MutexGuard while calling await"
}
declare_clippy_lint! {
/// **What it does:** Checks for calls to await while holding a
/// `RefCell` `Ref` or `RefMut`.
///
/// **Why is this bad?** `RefCell` refs only check for exclusive mutable access
/// at runtime. Holding onto a `RefCell` ref across an `await` suspension point
/// risks panics from a mutable ref shared while other refs are outstanding.
///
/// **Known problems:** Will report false positive for explicitly dropped refs ([#6353](https://github.com/rust-lang/rust-clippy/issues/6353)).
///
/// **Example:**
///
/// ```rust,ignore
/// use std::cell::RefCell;
///
/// async fn foo(x: &RefCell<u32>) {
/// let mut y = x.borrow_mut();
/// *y += 1;
/// bar.await;
/// }
/// ```
///
/// Use instead:
/// ```rust,ignore
/// use std::cell::RefCell;
///
/// async fn foo(x: &RefCell<u32>) {
/// {
/// let mut y = x.borrow_mut();
/// *y += 1;
/// }
/// bar.await;
/// }
/// ```
pub AWAIT_HOLDING_REFCELL_REF,
pedantic,
"Inside an async function, holding a RefCell ref while calling await"
}
declare_lint_pass!(AwaitHolding => [AWAIT_HOLDING_LOCK, AWAIT_HOLDING_REFCELL_REF]);
impl LateLintPass<'_> for AwaitHolding {
fn check_body(&mut self, cx: &LateContext<'_>, body: &'_ Body<'_>) {
use AsyncGeneratorKind::{Block, Closure, Fn};
if let Some(GeneratorKind::Async(Block | Closure | Fn)) = body.generator_kind {
let body_id = BodyId {
hir_id: body.value.hir_id,
};
let typeck_results = cx.tcx.typeck_body(body_id);
check_interior_types(
cx,
&typeck_results.generator_interior_types.as_ref().skip_binder(),
body.value.span,
);
}
}
}
fn check_interior_types(cx: &LateContext<'_>, ty_causes: &[GeneratorInteriorTypeCause<'_>], span: Span) {
for ty_cause in ty_causes {
if let rustc_middle::ty::Adt(adt, _) = ty_cause.ty.kind() {
if is_mutex_guard(cx, adt.did) {
span_lint_and_note(
cx,
AWAIT_HOLDING_LOCK,
ty_cause.span,
"this MutexGuard is held across an 'await' point. Consider using an async-aware Mutex type or ensuring the MutexGuard is dropped before calling await",
ty_cause.scope_span.or(Some(span)),
"these are all the await points this lock is held through",
);
}
if is_refcell_ref(cx, adt.did) {
span_lint_and_note(
cx,
AWAIT_HOLDING_REFCELL_REF,
ty_cause.span,
"this RefCell Ref is held across an 'await' point. Consider ensuring the Ref is dropped before calling await",
ty_cause.scope_span.or(Some(span)),
"these are all the await points this ref is held through",
);
}
}
}
}
fn is_mutex_guard(cx: &LateContext<'_>, def_id: DefId) -> bool {
match_def_path(cx, def_id, &paths::MUTEX_GUARD)
|| match_def_path(cx, def_id, &paths::RWLOCK_READ_GUARD)
|| match_def_path(cx, def_id, &paths::RWLOCK_WRITE_GUARD)
|| match_def_path(cx, def_id, &paths::PARKING_LOT_MUTEX_GUARD)
|| match_def_path(cx, def_id, &paths::PARKING_LOT_RWLOCK_READ_GUARD)
|| match_def_path(cx, def_id, &paths::PARKING_LOT_RWLOCK_WRITE_GUARD)
}
fn is_refcell_ref(cx: &LateContext<'_>, def_id: DefId) -> bool {
match_def_path(cx, def_id, &paths::REFCELL_REF) || match_def_path(cx, def_id, &paths::REFCELL_REFMUT)
}

View File

@ -0,0 +1,326 @@
use crate::consts::{constant, Constant};
use crate::utils::sugg::Sugg;
use crate::utils::{span_lint, span_lint_and_then};
use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::Span;
declare_clippy_lint! {
/// **What it does:** Checks for incompatible bit masks in comparisons.
///
/// The formula for detecting if an expression of the type `_ <bit_op> m
/// <cmp_op> c` (where `<bit_op>` is one of {`&`, `|`} and `<cmp_op>` is one of
/// {`!=`, `>=`, `>`, `!=`, `>=`, `>`}) can be determined from the following
/// table:
///
/// |Comparison |Bit Op|Example |is always|Formula |
/// |------------|------|------------|---------|----------------------|
/// |`==` or `!=`| `&` |`x & 2 == 3`|`false` |`c & m != c` |
/// |`<` or `>=`| `&` |`x & 2 < 3` |`true` |`m < c` |
/// |`>` or `<=`| `&` |`x & 1 > 1` |`false` |`m <= c` |
/// |`==` or `!=`| `|` |`x | 1 == 0`|`false` |`c | m != c` |
/// |`<` or `>=`| `|` |`x | 1 < 1` |`false` |`m >= c` |
/// |`<=` or `>` | `|` |`x | 1 > 0` |`true` |`m > c` |
///
/// **Why is this bad?** If the bits that the comparison cares about are always
/// set to zero or one by the bit mask, the comparison is constant `true` or
/// `false` (depending on mask, compared value, and operators).
///
/// So the code is actively misleading, and the only reason someone would write
/// this intentionally is to win an underhanded Rust contest or create a
/// test-case for this lint.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// # let x = 1;
/// if (x & 1 == 2) { }
/// ```
pub BAD_BIT_MASK,
correctness,
"expressions of the form `_ & mask == select` that will only ever return `true` or `false`"
}
declare_clippy_lint! {
/// **What it does:** Checks for bit masks in comparisons which can be removed
/// without changing the outcome. The basic structure can be seen in the
/// following table:
///
/// |Comparison| Bit Op |Example |equals |
/// |----------|---------|-----------|-------|
/// |`>` / `<=`|`|` / `^`|`x | 2 > 3`|`x > 3`|
/// |`<` / `>=`|`|` / `^`|`x ^ 1 < 4`|`x < 4`|
///
/// **Why is this bad?** Not equally evil as [`bad_bit_mask`](#bad_bit_mask),
/// but still a bit misleading, because the bit mask is ineffective.
///
/// **Known problems:** False negatives: This lint will only match instances
/// where we have figured out the math (which is for a power-of-two compared
/// value). This means things like `x | 1 >= 7` (which would be better written
/// as `x >= 6`) will not be reported (but bit masks like this are fairly
/// uncommon).
///
/// **Example:**
/// ```rust
/// # let x = 1;
/// if (x | 1 > 3) { }
/// ```
pub INEFFECTIVE_BIT_MASK,
correctness,
"expressions where a bit mask will be rendered useless by a comparison, e.g., `(x | 1) > 2`"
}
declare_clippy_lint! {
/// **What it does:** Checks for bit masks that can be replaced by a call
/// to `trailing_zeros`
///
/// **Why is this bad?** `x.trailing_zeros() > 4` is much clearer than `x & 15
/// == 0`
///
/// **Known problems:** llvm generates better code for `x & 15 == 0` on x86
///
/// **Example:**
/// ```rust
/// # let x = 1;
/// if x & 0b1111 == 0 { }
/// ```
pub VERBOSE_BIT_MASK,
pedantic,
"expressions where a bit mask is less readable than the corresponding method call"
}
#[derive(Copy, Clone)]
pub struct BitMask {
verbose_bit_mask_threshold: u64,
}
impl BitMask {
#[must_use]
pub fn new(verbose_bit_mask_threshold: u64) -> Self {
Self {
verbose_bit_mask_threshold,
}
}
}
impl_lint_pass!(BitMask => [BAD_BIT_MASK, INEFFECTIVE_BIT_MASK, VERBOSE_BIT_MASK]);
impl<'tcx> LateLintPass<'tcx> for BitMask {
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
if let ExprKind::Binary(cmp, left, right) = &e.kind {
if cmp.node.is_comparison() {
if let Some(cmp_opt) = fetch_int_literal(cx, right) {
check_compare(cx, left, cmp.node, cmp_opt, e.span)
} else if let Some(cmp_val) = fetch_int_literal(cx, left) {
check_compare(cx, right, invert_cmp(cmp.node), cmp_val, e.span)
}
}
}
if_chain! {
if let ExprKind::Binary(op, left, right) = &e.kind;
if BinOpKind::Eq == op.node;
if let ExprKind::Binary(op1, left1, right1) = &left.kind;
if BinOpKind::BitAnd == op1.node;
if let ExprKind::Lit(lit) = &right1.kind;
if let LitKind::Int(n, _) = lit.node;
if let ExprKind::Lit(lit1) = &right.kind;
if let LitKind::Int(0, _) = lit1.node;
if n.leading_zeros() == n.count_zeros();
if n > u128::from(self.verbose_bit_mask_threshold);
then {
span_lint_and_then(cx,
VERBOSE_BIT_MASK,
e.span,
"bit mask could be simplified with a call to `trailing_zeros`",
|diag| {
let sugg = Sugg::hir(cx, left1, "...").maybe_par();
diag.span_suggestion(
e.span,
"try",
format!("{}.trailing_zeros() >= {}", sugg, n.count_ones()),
Applicability::MaybeIncorrect,
);
});
}
}
}
}
#[must_use]
fn invert_cmp(cmp: BinOpKind) -> BinOpKind {
match cmp {
BinOpKind::Eq => BinOpKind::Eq,
BinOpKind::Ne => BinOpKind::Ne,
BinOpKind::Lt => BinOpKind::Gt,
BinOpKind::Gt => BinOpKind::Lt,
BinOpKind::Le => BinOpKind::Ge,
BinOpKind::Ge => BinOpKind::Le,
_ => BinOpKind::Or, // Dummy
}
}
fn check_compare(cx: &LateContext<'_>, bit_op: &Expr<'_>, cmp_op: BinOpKind, cmp_value: u128, span: Span) {
if let ExprKind::Binary(op, left, right) = &bit_op.kind {
if op.node != BinOpKind::BitAnd && op.node != BinOpKind::BitOr {
return;
}
fetch_int_literal(cx, right)
.or_else(|| fetch_int_literal(cx, left))
.map_or((), |mask| check_bit_mask(cx, op.node, cmp_op, mask, cmp_value, span))
}
}
#[allow(clippy::too_many_lines)]
fn check_bit_mask(
cx: &LateContext<'_>,
bit_op: BinOpKind,
cmp_op: BinOpKind,
mask_value: u128,
cmp_value: u128,
span: Span,
) {
match cmp_op {
BinOpKind::Eq | BinOpKind::Ne => match bit_op {
BinOpKind::BitAnd => {
if mask_value & cmp_value != cmp_value {
if cmp_value != 0 {
span_lint(
cx,
BAD_BIT_MASK,
span,
&format!(
"incompatible bit mask: `_ & {}` can never be equal to `{}`",
mask_value, cmp_value
),
);
}
} else if mask_value == 0 {
span_lint(cx, BAD_BIT_MASK, span, "&-masking with zero");
}
},
BinOpKind::BitOr => {
if mask_value | cmp_value != cmp_value {
span_lint(
cx,
BAD_BIT_MASK,
span,
&format!(
"incompatible bit mask: `_ | {}` can never be equal to `{}`",
mask_value, cmp_value
),
);
}
},
_ => (),
},
BinOpKind::Lt | BinOpKind::Ge => match bit_op {
BinOpKind::BitAnd => {
if mask_value < cmp_value {
span_lint(
cx,
BAD_BIT_MASK,
span,
&format!(
"incompatible bit mask: `_ & {}` will always be lower than `{}`",
mask_value, cmp_value
),
);
} else if mask_value == 0 {
span_lint(cx, BAD_BIT_MASK, span, "&-masking with zero");
}
},
BinOpKind::BitOr => {
if mask_value >= cmp_value {
span_lint(
cx,
BAD_BIT_MASK,
span,
&format!(
"incompatible bit mask: `_ | {}` will never be lower than `{}`",
mask_value, cmp_value
),
);
} else {
check_ineffective_lt(cx, span, mask_value, cmp_value, "|");
}
},
BinOpKind::BitXor => check_ineffective_lt(cx, span, mask_value, cmp_value, "^"),
_ => (),
},
BinOpKind::Le | BinOpKind::Gt => match bit_op {
BinOpKind::BitAnd => {
if mask_value <= cmp_value {
span_lint(
cx,
BAD_BIT_MASK,
span,
&format!(
"incompatible bit mask: `_ & {}` will never be higher than `{}`",
mask_value, cmp_value
),
);
} else if mask_value == 0 {
span_lint(cx, BAD_BIT_MASK, span, "&-masking with zero");
}
},
BinOpKind::BitOr => {
if mask_value > cmp_value {
span_lint(
cx,
BAD_BIT_MASK,
span,
&format!(
"incompatible bit mask: `_ | {}` will always be higher than `{}`",
mask_value, cmp_value
),
);
} else {
check_ineffective_gt(cx, span, mask_value, cmp_value, "|");
}
},
BinOpKind::BitXor => check_ineffective_gt(cx, span, mask_value, cmp_value, "^"),
_ => (),
},
_ => (),
}
}
fn check_ineffective_lt(cx: &LateContext<'_>, span: Span, m: u128, c: u128, op: &str) {
if c.is_power_of_two() && m < c {
span_lint(
cx,
INEFFECTIVE_BIT_MASK,
span,
&format!(
"ineffective bit mask: `x {} {}` compared to `{}`, is the same as x compared directly",
op, m, c
),
);
}
}
fn check_ineffective_gt(cx: &LateContext<'_>, span: Span, m: u128, c: u128, op: &str) {
if (c + 1).is_power_of_two() && m <= c {
span_lint(
cx,
INEFFECTIVE_BIT_MASK,
span,
&format!(
"ineffective bit mask: `x {} {}` compared to `{}`, is the same as x compared directly",
op, m, c
),
);
}
}
fn fetch_int_literal(cx: &LateContext<'_>, lit: &Expr<'_>) -> Option<u128> {
match constant(cx, cx.typeck_results(), lit)?.0 {
Constant::Int(n) => Some(n),
_ => None,
}
}

View File

@ -0,0 +1,51 @@
use crate::utils::span_lint;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir::{Pat, PatKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
declare_clippy_lint! {
/// **What it does:** Checks for usage of blacklisted names for variables, such
/// as `foo`.
///
/// **Why is this bad?** These names are usually placeholder names and should be
/// avoided.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// let foo = 3.14;
/// ```
pub BLACKLISTED_NAME,
style,
"usage of a blacklisted/placeholder name"
}
#[derive(Clone, Debug)]
pub struct BlacklistedName {
blacklist: FxHashSet<String>,
}
impl BlacklistedName {
pub fn new(blacklist: FxHashSet<String>) -> Self {
Self { blacklist }
}
}
impl_lint_pass!(BlacklistedName => [BLACKLISTED_NAME]);
impl<'tcx> LateLintPass<'tcx> for BlacklistedName {
fn check_pat(&mut self, cx: &LateContext<'tcx>, pat: &'tcx Pat<'_>) {
if let PatKind::Binding(.., ident, _) = pat.kind {
if self.blacklist.contains(&ident.name.to_string()) {
span_lint(
cx,
BLACKLISTED_NAME,
ident.span,
&format!("use of a blacklisted/placeholder name `{}`", ident.name),
);
}
}
}
}

View File

@ -0,0 +1,160 @@
use crate::utils::{
differing_macro_contexts, get_parent_expr, get_trait_def_id, implements_trait, paths,
snippet_block_with_applicability, span_lint, span_lint_and_sugg,
};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, NestedVisitorMap, Visitor};
use rustc_hir::{BlockCheckMode, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::map::Map;
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// **What it does:** Checks for `if` conditions that use blocks containing an
/// expression, statements or conditions that use closures with blocks.
///
/// **Why is this bad?** Style, using blocks in the condition makes it hard to read.
///
/// **Known problems:** None.
///
/// **Examples:**
/// ```rust
/// // Bad
/// if { true } { /* ... */ }
///
/// // Good
/// if true { /* ... */ }
/// ```
///
/// // or
///
/// ```rust
/// # fn somefunc() -> bool { true };
/// // Bad
/// if { let x = somefunc(); x } { /* ... */ }
///
/// // Good
/// let res = { let x = somefunc(); x };
/// if res { /* ... */ }
/// ```
pub BLOCKS_IN_IF_CONDITIONS,
style,
"useless or complex blocks that can be eliminated in conditions"
}
declare_lint_pass!(BlocksInIfConditions => [BLOCKS_IN_IF_CONDITIONS]);
struct ExVisitor<'a, 'tcx> {
found_block: Option<&'tcx Expr<'tcx>>,
cx: &'a LateContext<'tcx>,
}
impl<'a, 'tcx> Visitor<'tcx> for ExVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
if let ExprKind::Closure(_, _, eid, _, _) = expr.kind {
// do not lint if the closure is called using an iterator (see #1141)
if_chain! {
if let Some(parent) = get_parent_expr(self.cx, expr);
if let ExprKind::MethodCall(_, _, args, _) = parent.kind;
let caller = self.cx.typeck_results().expr_ty(&args[0]);
if let Some(iter_id) = get_trait_def_id(self.cx, &paths::ITERATOR);
if implements_trait(self.cx, caller, iter_id, &[]);
then {
return;
}
}
let body = self.cx.tcx.hir().body(eid);
let ex = &body.value;
if matches!(ex.kind, ExprKind::Block(_, _)) && !body.value.span.from_expansion() {
self.found_block = Some(ex);
return;
}
}
walk_expr(self, expr);
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}
const BRACED_EXPR_MESSAGE: &str = "omit braces around single expression condition";
const COMPLEX_BLOCK_MESSAGE: &str = "in an `if` condition, avoid complex blocks or closures with blocks; \
instead, move the block or closure higher and bind it with a `let`";
impl<'tcx> LateLintPass<'tcx> for BlocksInIfConditions {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if in_external_macro(cx.sess(), expr.span) {
return;
}
if let ExprKind::If(cond, _, _) = &expr.kind {
if let ExprKind::Block(block, _) = &cond.kind {
if block.rules == BlockCheckMode::DefaultBlock {
if block.stmts.is_empty() {
if let Some(ex) = &block.expr {
// don't dig into the expression here, just suggest that they remove
// the block
if expr.span.from_expansion() || differing_macro_contexts(expr.span, ex.span) {
return;
}
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
cx,
BLOCKS_IN_IF_CONDITIONS,
cond.span,
BRACED_EXPR_MESSAGE,
"try",
format!(
"{}",
snippet_block_with_applicability(
cx,
ex.span,
"..",
Some(expr.span),
&mut applicability
)
),
applicability,
);
}
} else {
let span = block.expr.as_ref().map_or_else(|| block.stmts[0].span, |e| e.span);
if span.from_expansion() || differing_macro_contexts(expr.span, span) {
return;
}
// move block higher
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
cx,
BLOCKS_IN_IF_CONDITIONS,
expr.span.with_hi(cond.span.hi()),
COMPLEX_BLOCK_MESSAGE,
"try",
format!(
"let res = {}; if res",
snippet_block_with_applicability(
cx,
block.span,
"..",
Some(expr.span),
&mut applicability
),
),
applicability,
);
}
}
} else {
let mut visitor = ExVisitor { found_block: None, cx };
walk_expr(&mut visitor, cond);
if let Some(block) = visitor.found_block {
span_lint(cx, BLOCKS_IN_IF_CONDITIONS, block.span, COMPLEX_BLOCK_MESSAGE);
}
}
}
}
}

View File

@ -0,0 +1,504 @@
use crate::utils::{
eq_expr_value, get_trait_def_id, implements_trait, in_macro, is_type_diagnostic_item, paths, snippet_opt,
span_lint_and_sugg, span_lint_and_then,
};
use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::intravisit::{walk_expr, FnKind, NestedVisitorMap, Visitor};
use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, HirId, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::map::Map;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
use rustc_span::sym;
declare_clippy_lint! {
/// **What it does:** Checks for boolean expressions that can be written more
/// concisely.
///
/// **Why is this bad?** Readability of boolean expressions suffers from
/// unnecessary duplication.
///
/// **Known problems:** Ignores short circuiting behavior of `||` and
/// `&&`. Ignores `|`, `&` and `^`.
///
/// **Example:**
/// ```ignore
/// if a && true // should be: if a
/// if !(a == b) // should be: if a != b
/// ```
pub NONMINIMAL_BOOL,
complexity,
"boolean expressions that can be written more concisely"
}
declare_clippy_lint! {
/// **What it does:** Checks for boolean expressions that contain terminals that
/// can be eliminated.
///
/// **Why is this bad?** This is most likely a logic bug.
///
/// **Known problems:** Ignores short circuiting behavior.
///
/// **Example:**
/// ```ignore
/// if a && b || a { ... }
/// ```
/// The `b` is unnecessary, the expression is equivalent to `if a`.
pub LOGIC_BUG,
correctness,
"boolean expressions that contain terminals which can be eliminated"
}
// For each pairs, both orders are considered.
const METHODS_WITH_NEGATION: [(&str, &str); 2] = [("is_some", "is_none"), ("is_err", "is_ok")];
declare_lint_pass!(NonminimalBool => [NONMINIMAL_BOOL, LOGIC_BUG]);
impl<'tcx> LateLintPass<'tcx> for NonminimalBool {
fn check_fn(
&mut self,
cx: &LateContext<'tcx>,
_: FnKind<'tcx>,
_: &'tcx FnDecl<'_>,
body: &'tcx Body<'_>,
_: Span,
_: HirId,
) {
NonminimalBoolVisitor { cx }.visit_body(body)
}
}
struct NonminimalBoolVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
}
use quine_mc_cluskey::Bool;
struct Hir2Qmm<'a, 'tcx, 'v> {
terminals: Vec<&'v Expr<'v>>,
cx: &'a LateContext<'tcx>,
}
impl<'a, 'tcx, 'v> Hir2Qmm<'a, 'tcx, 'v> {
fn extract(&mut self, op: BinOpKind, a: &[&'v Expr<'_>], mut v: Vec<Bool>) -> Result<Vec<Bool>, String> {
for a in a {
if let ExprKind::Binary(binop, lhs, rhs) = &a.kind {
if binop.node == op {
v = self.extract(op, &[lhs, rhs], v)?;
continue;
}
}
v.push(self.run(a)?);
}
Ok(v)
}
fn run(&mut self, e: &'v Expr<'_>) -> Result<Bool, String> {
fn negate(bin_op_kind: BinOpKind) -> Option<BinOpKind> {
match bin_op_kind {
BinOpKind::Eq => Some(BinOpKind::Ne),
BinOpKind::Ne => Some(BinOpKind::Eq),
BinOpKind::Gt => Some(BinOpKind::Le),
BinOpKind::Ge => Some(BinOpKind::Lt),
BinOpKind::Lt => Some(BinOpKind::Ge),
BinOpKind::Le => Some(BinOpKind::Gt),
_ => None,
}
}
// prevent folding of `cfg!` macros and the like
if !e.span.from_expansion() {
match &e.kind {
ExprKind::Unary(UnOp::Not, inner) => return Ok(Bool::Not(box self.run(inner)?)),
ExprKind::Binary(binop, lhs, rhs) => match &binop.node {
BinOpKind::Or => {
return Ok(Bool::Or(self.extract(BinOpKind::Or, &[lhs, rhs], Vec::new())?));
},
BinOpKind::And => {
return Ok(Bool::And(self.extract(BinOpKind::And, &[lhs, rhs], Vec::new())?));
},
_ => (),
},
ExprKind::Lit(lit) => match lit.node {
LitKind::Bool(true) => return Ok(Bool::True),
LitKind::Bool(false) => return Ok(Bool::False),
_ => (),
},
_ => (),
}
}
for (n, expr) in self.terminals.iter().enumerate() {
if eq_expr_value(self.cx, e, expr) {
#[allow(clippy::cast_possible_truncation)]
return Ok(Bool::Term(n as u8));
}
if_chain! {
if let ExprKind::Binary(e_binop, e_lhs, e_rhs) = &e.kind;
if implements_ord(self.cx, e_lhs);
if let ExprKind::Binary(expr_binop, expr_lhs, expr_rhs) = &expr.kind;
if negate(e_binop.node) == Some(expr_binop.node);
if eq_expr_value(self.cx, e_lhs, expr_lhs);
if eq_expr_value(self.cx, e_rhs, expr_rhs);
then {
#[allow(clippy::cast_possible_truncation)]
return Ok(Bool::Not(Box::new(Bool::Term(n as u8))));
}
}
}
let n = self.terminals.len();
self.terminals.push(e);
if n < 32 {
#[allow(clippy::cast_possible_truncation)]
Ok(Bool::Term(n as u8))
} else {
Err("too many literals".to_owned())
}
}
}
struct SuggestContext<'a, 'tcx, 'v> {
terminals: &'v [&'v Expr<'v>],
cx: &'a LateContext<'tcx>,
output: String,
}
impl<'a, 'tcx, 'v> SuggestContext<'a, 'tcx, 'v> {
fn recurse(&mut self, suggestion: &Bool) -> Option<()> {
use quine_mc_cluskey::Bool::{And, False, Not, Or, Term, True};
match suggestion {
True => {
self.output.push_str("true");
},
False => {
self.output.push_str("false");
},
Not(inner) => match **inner {
And(_) | Or(_) => {
self.output.push('!');
self.output.push('(');
self.recurse(inner);
self.output.push(')');
},
Term(n) => {
let terminal = self.terminals[n as usize];
if let Some(str) = simplify_not(self.cx, terminal) {
self.output.push_str(&str)
} else {
self.output.push('!');
let snip = snippet_opt(self.cx, terminal.span)?;
self.output.push_str(&snip);
}
},
True | False | Not(_) => {
self.output.push('!');
self.recurse(inner)?;
},
},
And(v) => {
for (index, inner) in v.iter().enumerate() {
if index > 0 {
self.output.push_str(" && ");
}
if let Or(_) = *inner {
self.output.push('(');
self.recurse(inner);
self.output.push(')');
} else {
self.recurse(inner);
}
}
},
Or(v) => {
for (index, inner) in v.iter().rev().enumerate() {
if index > 0 {
self.output.push_str(" || ");
}
self.recurse(inner);
}
},
&Term(n) => {
let snip = snippet_opt(self.cx, self.terminals[n as usize].span)?;
self.output.push_str(&snip);
},
}
Some(())
}
}
fn simplify_not(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<String> {
match &expr.kind {
ExprKind::Binary(binop, lhs, rhs) => {
if !implements_ord(cx, lhs) {
return None;
}
match binop.node {
BinOpKind::Eq => Some(" != "),
BinOpKind::Ne => Some(" == "),
BinOpKind::Lt => Some(" >= "),
BinOpKind::Gt => Some(" <= "),
BinOpKind::Le => Some(" > "),
BinOpKind::Ge => Some(" < "),
_ => None,
}
.and_then(|op| {
Some(format!(
"{}{}{}",
snippet_opt(cx, lhs.span)?,
op,
snippet_opt(cx, rhs.span)?
))
})
},
ExprKind::MethodCall(path, _, args, _) if args.len() == 1 => {
let type_of_receiver = cx.typeck_results().expr_ty(&args[0]);
if !is_type_diagnostic_item(cx, type_of_receiver, sym::option_type)
&& !is_type_diagnostic_item(cx, type_of_receiver, sym::result_type)
{
return None;
}
METHODS_WITH_NEGATION
.iter()
.cloned()
.flat_map(|(a, b)| vec![(a, b), (b, a)])
.find(|&(a, _)| {
let path: &str = &path.ident.name.as_str();
a == path
})
.and_then(|(_, neg_method)| Some(format!("{}.{}()", snippet_opt(cx, args[0].span)?, neg_method)))
},
_ => None,
}
}
fn suggest(cx: &LateContext<'_>, suggestion: &Bool, terminals: &[&Expr<'_>]) -> String {
let mut suggest_context = SuggestContext {
terminals,
cx,
output: String::new(),
};
suggest_context.recurse(suggestion);
suggest_context.output
}
fn simple_negate(b: Bool) -> Bool {
use quine_mc_cluskey::Bool::{And, False, Not, Or, Term, True};
match b {
True => False,
False => True,
t @ Term(_) => Not(Box::new(t)),
And(mut v) => {
for el in &mut v {
*el = simple_negate(::std::mem::replace(el, True));
}
Or(v)
},
Or(mut v) => {
for el in &mut v {
*el = simple_negate(::std::mem::replace(el, True));
}
And(v)
},
Not(inner) => *inner,
}
}
#[derive(Default)]
struct Stats {
terminals: [usize; 32],
negations: usize,
ops: usize,
}
fn terminal_stats(b: &Bool) -> Stats {
fn recurse(b: &Bool, stats: &mut Stats) {
match b {
True | False => stats.ops += 1,
Not(inner) => {
match **inner {
And(_) | Or(_) => stats.ops += 1, // brackets are also operations
_ => stats.negations += 1,
}
recurse(inner, stats);
},
And(v) | Or(v) => {
stats.ops += v.len() - 1;
for inner in v {
recurse(inner, stats);
}
},
&Term(n) => stats.terminals[n as usize] += 1,
}
}
use quine_mc_cluskey::Bool::{And, False, Not, Or, Term, True};
let mut stats = Stats::default();
recurse(b, &mut stats);
stats
}
impl<'a, 'tcx> NonminimalBoolVisitor<'a, 'tcx> {
fn bool_expr(&self, e: &'tcx Expr<'_>) {
let mut h2q = Hir2Qmm {
terminals: Vec::new(),
cx: self.cx,
};
if let Ok(expr) = h2q.run(e) {
if h2q.terminals.len() > 8 {
// QMC has exponentially slow behavior as the number of terminals increases
// 8 is reasonable, it takes approximately 0.2 seconds.
// See #825
return;
}
let stats = terminal_stats(&expr);
let mut simplified = expr.simplify();
for simple in Bool::Not(Box::new(expr)).simplify() {
match simple {
Bool::Not(_) | Bool::True | Bool::False => {},
_ => simplified.push(Bool::Not(Box::new(simple.clone()))),
}
let simple_negated = simple_negate(simple);
if simplified.iter().any(|s| *s == simple_negated) {
continue;
}
simplified.push(simple_negated);
}
let mut improvements = Vec::with_capacity(simplified.len());
'simplified: for suggestion in &simplified {
let simplified_stats = terminal_stats(suggestion);
let mut improvement = false;
for i in 0..32 {
// ignore any "simplifications" that end up requiring a terminal more often
// than in the original expression
if stats.terminals[i] < simplified_stats.terminals[i] {
continue 'simplified;
}
if stats.terminals[i] != 0 && simplified_stats.terminals[i] == 0 {
span_lint_and_then(
self.cx,
LOGIC_BUG,
e.span,
"this boolean expression contains a logic bug",
|diag| {
diag.span_help(
h2q.terminals[i].span,
"this expression can be optimized out by applying boolean operations to the \
outer expression",
);
diag.span_suggestion(
e.span,
"it would look like the following",
suggest(self.cx, suggestion, &h2q.terminals),
// nonminimal_bool can produce minimal but
// not human readable expressions (#3141)
Applicability::Unspecified,
);
},
);
// don't also lint `NONMINIMAL_BOOL`
return;
}
// if the number of occurrences of a terminal decreases or any of the stats
// decreases while none increases
improvement |= (stats.terminals[i] > simplified_stats.terminals[i])
|| (stats.negations > simplified_stats.negations && stats.ops == simplified_stats.ops)
|| (stats.ops > simplified_stats.ops && stats.negations == simplified_stats.negations);
}
if improvement {
improvements.push(suggestion);
}
}
let nonminimal_bool_lint = |suggestions: Vec<_>| {
span_lint_and_then(
self.cx,
NONMINIMAL_BOOL,
e.span,
"this boolean expression can be simplified",
|diag| {
diag.span_suggestions(
e.span,
"try",
suggestions.into_iter(),
// nonminimal_bool can produce minimal but
// not human readable expressions (#3141)
Applicability::Unspecified,
);
},
);
};
if improvements.is_empty() {
let mut visitor = NotSimplificationVisitor { cx: self.cx };
visitor.visit_expr(e);
} else {
nonminimal_bool_lint(
improvements
.into_iter()
.map(|suggestion| suggest(self.cx, suggestion, &h2q.terminals))
.collect(),
);
}
}
}
}
impl<'a, 'tcx> Visitor<'tcx> for NonminimalBoolVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
if in_macro(e.span) {
return;
}
match &e.kind {
ExprKind::Binary(binop, _, _) if binop.node == BinOpKind::Or || binop.node == BinOpKind::And => {
self.bool_expr(e)
},
ExprKind::Unary(UnOp::Not, inner) => {
if self.cx.typeck_results().node_types()[inner.hir_id].is_bool() {
self.bool_expr(e);
} else {
walk_expr(self, e);
}
},
_ => walk_expr(self, e),
}
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}
fn implements_ord<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> bool {
let ty = cx.typeck_results().expr_ty(expr);
get_trait_def_id(cx, &paths::ORD).map_or(false, |id| implements_trait(cx, ty, id, &[]))
}
struct NotSimplificationVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
}
impl<'a, 'tcx> Visitor<'tcx> for NotSimplificationVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
if let ExprKind::Unary(UnOp::Not, inner) = &expr.kind {
if let Some(suggestion) = simplify_not(self.cx, inner) {
span_lint_and_sugg(
self.cx,
NONMINIMAL_BOOL,
expr.span,
"this boolean expression can be simplified",
"try",
suggestion,
Applicability::MachineApplicable,
);
}
}
walk_expr(self, expr);
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}

View File

@ -0,0 +1,117 @@
use crate::utils::{
contains_name, get_pat_name, match_type, paths, single_segment_path, snippet_with_applicability, span_lint_and_sugg,
};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, BorrowKind, Expr, ExprKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, UintTy};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::sym;
use rustc_span::Symbol;
declare_clippy_lint! {
/// **What it does:** Checks for naive byte counts
///
/// **Why is this bad?** The [`bytecount`](https://crates.io/crates/bytecount)
/// crate has methods to count your bytes faster, especially for large slices.
///
/// **Known problems:** If you have predominantly small slices, the
/// `bytecount::count(..)` method may actually be slower. However, if you can
/// ensure that less than 2³²-1 matches arise, the `naive_count_32(..)` can be
/// faster in those cases.
///
/// **Example:**
///
/// ```rust
/// # let vec = vec![1_u8];
/// &vec.iter().filter(|x| **x == 0u8).count(); // use bytecount::count instead
/// ```
pub NAIVE_BYTECOUNT,
pedantic,
"use of naive `<slice>.filter(|&x| x == y).count()` to count byte values"
}
declare_lint_pass!(ByteCount => [NAIVE_BYTECOUNT]);
impl<'tcx> LateLintPass<'tcx> for ByteCount {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
if_chain! {
if let ExprKind::MethodCall(ref count, _, ref count_args, _) = expr.kind;
if count.ident.name == sym!(count);
if count_args.len() == 1;
if let ExprKind::MethodCall(ref filter, _, ref filter_args, _) = count_args[0].kind;
if filter.ident.name == sym!(filter);
if filter_args.len() == 2;
if let ExprKind::Closure(_, _, body_id, _, _) = filter_args[1].kind;
then {
let body = cx.tcx.hir().body(body_id);
if_chain! {
if body.params.len() == 1;
if let Some(argname) = get_pat_name(&body.params[0].pat);
if let ExprKind::Binary(ref op, ref l, ref r) = body.value.kind;
if op.node == BinOpKind::Eq;
if match_type(cx,
cx.typeck_results().expr_ty(&filter_args[0]).peel_refs(),
&paths::SLICE_ITER);
then {
let needle = match get_path_name(l) {
Some(name) if check_arg(name, argname, r) => r,
_ => match get_path_name(r) {
Some(name) if check_arg(name, argname, l) => l,
_ => { return; }
}
};
if ty::Uint(UintTy::U8) != *cx.typeck_results().expr_ty(needle).peel_refs().kind() {
return;
}
let haystack = if let ExprKind::MethodCall(ref path, _, ref args, _) =
filter_args[0].kind {
let p = path.ident.name;
if (p == sym::iter || p == sym!(iter_mut)) && args.len() == 1 {
&args[0]
} else {
&filter_args[0]
}
} else {
&filter_args[0]
};
let mut applicability = Applicability::MaybeIncorrect;
span_lint_and_sugg(
cx,
NAIVE_BYTECOUNT,
expr.span,
"you appear to be counting bytes the naive way",
"consider using the bytecount crate",
format!("bytecount::count({}, {})",
snippet_with_applicability(cx, haystack.span, "..", &mut applicability),
snippet_with_applicability(cx, needle.span, "..", &mut applicability)),
applicability,
);
}
};
}
};
}
}
fn check_arg(name: Symbol, arg: Symbol, needle: &Expr<'_>) -> bool {
name == arg && !contains_name(name, needle)
}
fn get_path_name(expr: &Expr<'_>) -> Option<Symbol> {
match expr.kind {
ExprKind::Box(ref e) | ExprKind::AddrOf(BorrowKind::Ref, _, ref e) | ExprKind::Unary(UnOp::Deref, ref e) => {
get_path_name(e)
},
ExprKind::Block(ref b, _) => {
if b.stmts.is_empty() {
b.expr.as_ref().and_then(|p| get_path_name(p))
} else {
None
}
},
ExprKind::Path(ref qpath) => single_segment_path(qpath).map(|ps| ps.ident.name),
_ => None,
}
}

View File

@ -0,0 +1,129 @@
//! lint on missing cargo common metadata
use std::path::PathBuf;
use crate::utils::{run_lints, span_lint};
use rustc_hir::{hir_id::CRATE_HIR_ID, Crate};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::DUMMY_SP;
declare_clippy_lint! {
/// **What it does:** Checks to see if all common metadata is defined in
/// `Cargo.toml`. See: https://rust-lang-nursery.github.io/api-guidelines/documentation.html#cargotoml-includes-all-common-metadata-c-metadata
///
/// **Why is this bad?** It will be more difficult for users to discover the
/// purpose of the crate, and key information related to it.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```toml
/// # This `Cargo.toml` is missing an authors field:
/// [package]
/// name = "clippy"
/// version = "0.0.212"
/// description = "A bunch of helpful lints to avoid common pitfalls in Rust"
/// repository = "https://github.com/rust-lang/rust-clippy"
/// readme = "README.md"
/// license = "MIT OR Apache-2.0"
/// keywords = ["clippy", "lint", "plugin"]
/// categories = ["development-tools", "development-tools::cargo-plugins"]
/// ```
///
/// Should include an authors field like:
///
/// ```toml
/// # This `Cargo.toml` includes all common metadata
/// [package]
/// name = "clippy"
/// version = "0.0.212"
/// authors = ["Someone <someone@rust-lang.org>"]
/// description = "A bunch of helpful lints to avoid common pitfalls in Rust"
/// repository = "https://github.com/rust-lang/rust-clippy"
/// readme = "README.md"
/// license = "MIT OR Apache-2.0"
/// keywords = ["clippy", "lint", "plugin"]
/// categories = ["development-tools", "development-tools::cargo-plugins"]
/// ```
pub CARGO_COMMON_METADATA,
cargo,
"common metadata is defined in `Cargo.toml`"
}
#[derive(Copy, Clone, Debug)]
pub struct CargoCommonMetadata {
ignore_publish: bool,
}
impl CargoCommonMetadata {
pub fn new(ignore_publish: bool) -> Self {
Self { ignore_publish }
}
}
impl_lint_pass!(CargoCommonMetadata => [
CARGO_COMMON_METADATA
]);
fn missing_warning(cx: &LateContext<'_>, package: &cargo_metadata::Package, field: &str) {
let message = format!("package `{}` is missing `{}` metadata", package.name, field);
span_lint(cx, CARGO_COMMON_METADATA, DUMMY_SP, &message);
}
fn is_empty_str(value: &Option<String>) -> bool {
value.as_ref().map_or(true, String::is_empty)
}
fn is_empty_path(value: &Option<PathBuf>) -> bool {
value.as_ref().and_then(|x| x.to_str()).map_or(true, str::is_empty)
}
fn is_empty_vec(value: &[String]) -> bool {
// This works because empty iterators return true
value.iter().all(String::is_empty)
}
impl LateLintPass<'_> for CargoCommonMetadata {
fn check_crate(&mut self, cx: &LateContext<'_>, _: &Crate<'_>) {
if !run_lints(cx, &[CARGO_COMMON_METADATA], CRATE_HIR_ID) {
return;
}
let metadata = unwrap_cargo_metadata!(cx, CARGO_COMMON_METADATA, false);
for package in metadata.packages {
// only run the lint if publish is `None` (`publish = true` or skipped entirely)
// or if the vector isn't empty (`publish = ["something"]`)
if package.publish.as_ref().filter(|publish| publish.is_empty()).is_none() || self.ignore_publish {
if is_empty_vec(&package.authors) {
missing_warning(cx, &package, "package.authors");
}
if is_empty_str(&package.description) {
missing_warning(cx, &package, "package.description");
}
if is_empty_str(&package.license) && is_empty_path(&package.license_file) {
missing_warning(cx, &package, "either package.license or package.license_file");
}
if is_empty_str(&package.repository) {
missing_warning(cx, &package, "package.repository");
}
if is_empty_path(&package.readme) {
missing_warning(cx, &package, "package.readme");
}
if is_empty_vec(&package.keywords) {
missing_warning(cx, &package, "package.keywords");
}
if is_empty_vec(&package.categories) {
missing_warning(cx, &package, "package.categories");
}
}
}
}
}

View File

@ -0,0 +1,85 @@
use crate::utils::span_lint_and_help;
use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_hir::{Expr, ExprKind, PathSegment};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::{source_map::Spanned, symbol::sym, Span};
declare_clippy_lint! {
/// **What it does:**
/// Checks for calls to `ends_with` with possible file extensions
/// and suggests to use a case-insensitive approach instead.
///
/// **Why is this bad?**
/// `ends_with` is case-sensitive and may not detect files with a valid extension.
///
/// **Known problems:** None.
///
/// **Example:**
///
/// ```rust
/// fn is_rust_file(filename: &str) -> bool {
/// filename.ends_with(".rs")
/// }
/// ```
/// Use instead:
/// ```rust
/// fn is_rust_file(filename: &str) -> bool {
/// filename.rsplit('.').next().map(|ext| ext.eq_ignore_ascii_case("rs")) == Some(true)
/// }
/// ```
pub CASE_SENSITIVE_FILE_EXTENSION_COMPARISONS,
pedantic,
"Checks for calls to ends_with with case-sensitive file extensions"
}
declare_lint_pass!(CaseSensitiveFileExtensionComparisons => [CASE_SENSITIVE_FILE_EXTENSION_COMPARISONS]);
fn check_case_sensitive_file_extension_comparison(ctx: &LateContext<'_>, expr: &Expr<'_>) -> Option<Span> {
if_chain! {
if let ExprKind::MethodCall(PathSegment { ident, .. }, _, [obj, extension, ..], span) = expr.kind;
if ident.as_str() == "ends_with";
if let ExprKind::Lit(Spanned { node: LitKind::Str(ext_literal, ..), ..}) = extension.kind;
if (2..=6).contains(&ext_literal.as_str().len());
if ext_literal.as_str().starts_with('.');
if ext_literal.as_str().chars().skip(1).all(|c| c.is_uppercase() || c.is_digit(10))
|| ext_literal.as_str().chars().skip(1).all(|c| c.is_lowercase() || c.is_digit(10));
then {
let mut ty = ctx.typeck_results().expr_ty(obj);
ty = match ty.kind() {
ty::Ref(_, ty, ..) => ty,
_ => ty
};
match ty.kind() {
ty::Str => {
return Some(span);
},
ty::Adt(&ty::AdtDef { did, .. }, _) => {
if ctx.tcx.is_diagnostic_item(sym::string_type, did) {
return Some(span);
}
},
_ => { return None; }
}
}
}
None
}
impl LateLintPass<'tcx> for CaseSensitiveFileExtensionComparisons {
fn check_expr(&mut self, ctx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
if let Some(span) = check_case_sensitive_file_extension_comparison(ctx, expr) {
span_lint_and_help(
ctx,
CASE_SENSITIVE_FILE_EXTENSION_COMPARISONS,
span,
"case-sensitive file extension comparison",
None,
"consider using a case-insensitive comparison instead",
);
}
}
}

View File

@ -0,0 +1,87 @@
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, FloatTy, Ty};
use crate::utils::{in_constant, is_isize_or_usize, snippet_opt, span_lint_and_sugg};
use super::{utils, CAST_LOSSLESS};
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
if !should_lint(cx, expr, cast_from, cast_to) {
return;
}
// The suggestion is to use a function call, so if the original expression
// has parens on the outside, they are no longer needed.
let mut applicability = Applicability::MachineApplicable;
let opt = snippet_opt(cx, cast_op.span);
let sugg = opt.as_ref().map_or_else(
|| {
applicability = Applicability::HasPlaceholders;
".."
},
|snip| {
if should_strip_parens(cast_op, snip) {
&snip[1..snip.len() - 1]
} else {
snip.as_str()
}
},
);
span_lint_and_sugg(
cx,
CAST_LOSSLESS,
expr.span,
&format!(
"casting `{}` to `{}` may become silently lossy if you later change the type",
cast_from, cast_to
),
"try",
format!("{}::from({})", cast_to, sugg),
applicability,
);
}
fn should_lint(cx: &LateContext<'_>, expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) -> bool {
// Do not suggest using From in consts/statics until it is valid to do so (see #2267).
if in_constant(cx, expr.hir_id) {
return false;
}
match (cast_from.is_integral(), cast_to.is_integral()) {
(true, true) => {
let cast_signed_to_unsigned = cast_from.is_signed() && !cast_to.is_signed();
let from_nbits = utils::int_ty_to_nbits(cast_from, cx.tcx);
let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
!is_isize_or_usize(cast_from)
&& !is_isize_or_usize(cast_to)
&& from_nbits < to_nbits
&& !cast_signed_to_unsigned
},
(true, false) => {
let from_nbits = utils::int_ty_to_nbits(cast_from, cx.tcx);
let to_nbits = if let ty::Float(FloatTy::F32) = cast_to.kind() {
32
} else {
64
};
from_nbits < to_nbits
},
(_, _) => {
matches!(cast_from.kind(), ty::Float(FloatTy::F32)) && matches!(cast_to.kind(), ty::Float(FloatTy::F64))
},
}
}
fn should_strip_parens(cast_expr: &Expr<'_>, snip: &str) -> bool {
if let ExprKind::Binary(_, _, _) = cast_expr.kind {
if snip.starts_with('(') && snip.ends_with(')') {
return true;
}
}
false
}

View File

@ -0,0 +1,54 @@
use rustc_hir::Expr;
use rustc_lint::LateContext;
use rustc_middle::ty::{self, FloatTy, Ty};
use crate::utils::{is_isize_or_usize, span_lint};
use super::{utils, CAST_POSSIBLE_TRUNCATION};
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
let msg = match (cast_from.is_integral(), cast_to.is_integral()) {
(true, true) => {
let from_nbits = utils::int_ty_to_nbits(cast_from, cx.tcx);
let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
let (should_lint, suffix) = match (is_isize_or_usize(cast_from), is_isize_or_usize(cast_to)) {
(true, true) | (false, false) => (to_nbits < from_nbits, ""),
(true, false) => (
to_nbits <= 32,
if to_nbits == 32 {
" on targets with 64-bit wide pointers"
} else {
""
},
),
(false, true) => (from_nbits == 64, " on targets with 32-bit wide pointers"),
};
if !should_lint {
return;
}
format!(
"casting `{}` to `{}` may truncate the value{}",
cast_from, cast_to, suffix,
)
},
(false, true) => {
format!("casting `{}` to `{}` may truncate the value", cast_from, cast_to)
},
(_, _) => {
if matches!(cast_from.kind(), &ty::Float(FloatTy::F64))
&& matches!(cast_to.kind(), &ty::Float(FloatTy::F32))
{
"casting `f64` to `f32` may truncate the value".to_string()
} else {
return;
}
},
};
span_lint(cx, CAST_POSSIBLE_TRUNCATION, expr.span, &msg);
}

View File

@ -0,0 +1,44 @@
use rustc_hir::Expr;
use rustc_lint::LateContext;
use rustc_middle::ty::Ty;
use crate::utils::{is_isize_or_usize, span_lint};
use super::{utils, CAST_POSSIBLE_WRAP};
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
if !(cast_from.is_integral() && cast_to.is_integral()) {
return;
}
let arch_64_suffix = " on targets with 64-bit wide pointers";
let arch_32_suffix = " on targets with 32-bit wide pointers";
let cast_unsigned_to_signed = !cast_from.is_signed() && cast_to.is_signed();
let from_nbits = utils::int_ty_to_nbits(cast_from, cx.tcx);
let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
let (should_lint, suffix) = match (is_isize_or_usize(cast_from), is_isize_or_usize(cast_to)) {
(true, true) | (false, false) => (to_nbits == from_nbits && cast_unsigned_to_signed, ""),
(true, false) => (to_nbits <= 32 && cast_unsigned_to_signed, arch_32_suffix),
(false, true) => (
cast_unsigned_to_signed,
if from_nbits == 64 {
arch_64_suffix
} else {
arch_32_suffix
},
),
};
if should_lint {
span_lint(
cx,
CAST_POSSIBLE_WRAP,
expr.span,
&format!(
"casting `{}` to `{}` may wrap around the value{}",
cast_from, cast_to, suffix,
),
);
}
}

View File

@ -0,0 +1,51 @@
use rustc_hir::Expr;
use rustc_lint::LateContext;
use rustc_middle::ty::{self, FloatTy, Ty};
use crate::utils::{is_isize_or_usize, span_lint};
use super::{utils, CAST_PRECISION_LOSS};
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
if !cast_from.is_integral() || cast_to.is_integral() {
return;
}
let from_nbits = utils::int_ty_to_nbits(cast_from, cx.tcx);
let to_nbits = if let ty::Float(FloatTy::F32) = cast_to.kind() {
32
} else {
64
};
if !(is_isize_or_usize(cast_from) || from_nbits >= to_nbits) {
return;
}
let cast_to_f64 = to_nbits == 64;
let mantissa_nbits = if cast_to_f64 { 52 } else { 23 };
let arch_dependent = is_isize_or_usize(cast_from) && cast_to_f64;
let arch_dependent_str = "on targets with 64-bit wide pointers ";
let from_nbits_str = if arch_dependent {
"64".to_owned()
} else if is_isize_or_usize(cast_from) {
"32 or 64".to_owned()
} else {
utils::int_ty_to_nbits(cast_from, cx.tcx).to_string()
};
span_lint(
cx,
CAST_PRECISION_LOSS,
expr.span,
&format!(
"casting `{0}` to `{1}` causes a loss of precision {2}(`{0}` is {3} bits wide, \
but `{1}`'s mantissa is only {4} bits wide)",
cast_from,
if cast_to_f64 { "f64" } else { "f32" },
if arch_dependent { arch_dependent_str } else { "" },
from_nbits_str,
mantissa_nbits
),
);
}

View File

@ -0,0 +1,81 @@
use rustc_hir::{Expr, ExprKind, GenericArg};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
use rustc_span::symbol::sym;
use rustc_target::abi::LayoutOf;
use if_chain::if_chain;
use crate::utils::{is_hir_ty_cfg_dependant, span_lint};
use super::CAST_PTR_ALIGNMENT;
pub(super) fn check(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if let ExprKind::Cast(ref cast_expr, cast_to) = expr.kind {
if is_hir_ty_cfg_dependant(cx, cast_to) {
return;
}
let (cast_from, cast_to) = (
cx.typeck_results().expr_ty(cast_expr),
cx.typeck_results().expr_ty(expr),
);
lint_cast_ptr_alignment(cx, expr, cast_from, cast_to);
} else if let ExprKind::MethodCall(method_path, _, args, _) = expr.kind {
if_chain! {
if method_path.ident.name == sym!(cast);
if let Some(generic_args) = method_path.args;
if let [GenericArg::Type(cast_to)] = generic_args.args;
// There probably is no obvious reason to do this, just to be consistent with `as` cases.
if !is_hir_ty_cfg_dependant(cx, cast_to);
then {
let (cast_from, cast_to) =
(cx.typeck_results().expr_ty(&args[0]), cx.typeck_results().expr_ty(expr));
lint_cast_ptr_alignment(cx, expr, cast_from, cast_to);
}
}
}
}
fn lint_cast_ptr_alignment<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>, cast_from: Ty<'tcx>, cast_to: Ty<'tcx>) {
if_chain! {
if let ty::RawPtr(from_ptr_ty) = &cast_from.kind();
if let ty::RawPtr(to_ptr_ty) = &cast_to.kind();
if let Ok(from_layout) = cx.layout_of(from_ptr_ty.ty);
if let Ok(to_layout) = cx.layout_of(to_ptr_ty.ty);
if from_layout.align.abi < to_layout.align.abi;
// with c_void, we inherently need to trust the user
if !is_c_void(cx, from_ptr_ty.ty);
// when casting from a ZST, we don't know enough to properly lint
if !from_layout.is_zst();
then {
span_lint(
cx,
CAST_PTR_ALIGNMENT,
expr.span,
&format!(
"casting from `{}` to a more-strictly-aligned pointer (`{}`) ({} < {} bytes)",
cast_from,
cast_to,
from_layout.align.abi.bytes(),
to_layout.align.abi.bytes(),
),
);
}
}
}
/// Check if the given type is either `core::ffi::c_void` or
/// one of the platform specific `libc::<platform>::c_void` of libc.
fn is_c_void(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
if let ty::Adt(adt, _) = ty.kind() {
let names = cx.get_def_path(adt.did);
if names.is_empty() {
return false;
}
if names[0] == sym::libc || names[0] == sym::core && *names.last().unwrap() == sym!(c_void) {
return true;
}
}
false
}

View File

@ -0,0 +1,28 @@
use rustc_hir::{Expr, ExprKind, MutTy, Mutability, TyKind, UnOp};
use rustc_lint::LateContext;
use rustc_middle::ty;
use if_chain::if_chain;
use crate::utils::span_lint;
use super::CAST_REF_TO_MUT;
pub(super) fn check(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if_chain! {
if let ExprKind::Unary(UnOp::Deref, e) = &expr.kind;
if let ExprKind::Cast(e, t) = &e.kind;
if let TyKind::Ptr(MutTy { mutbl: Mutability::Mut, .. }) = t.kind;
if let ExprKind::Cast(e, t) = &e.kind;
if let TyKind::Ptr(MutTy { mutbl: Mutability::Not, .. }) = t.kind;
if let ty::Ref(..) = cx.typeck_results().node_type(e.hir_id).kind();
then {
span_lint(
cx,
CAST_REF_TO_MUT,
expr.span,
"casting `&T` to `&mut T` may cause undefined behavior, consider instead using an `UnsafeCell`",
);
}
}
}

View File

@ -0,0 +1,70 @@
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
use if_chain::if_chain;
use crate::consts::{constant, Constant};
use crate::utils::{method_chain_args, sext, span_lint};
use super::CAST_SIGN_LOSS;
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
if should_lint(cx, cast_op, cast_from, cast_to) {
span_lint(
cx,
CAST_SIGN_LOSS,
expr.span,
&format!(
"casting `{}` to `{}` may lose the sign of the value",
cast_from, cast_to
),
);
}
}
fn should_lint(cx: &LateContext<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) -> bool {
match (cast_from.is_integral(), cast_to.is_integral()) {
(true, true) => {
if !cast_from.is_signed() || cast_to.is_signed() {
return false;
}
// Don't lint for positive constants.
let const_val = constant(cx, &cx.typeck_results(), cast_op);
if_chain! {
if let Some((Constant::Int(n), _)) = const_val;
if let ty::Int(ity) = *cast_from.kind();
if sext(cx.tcx, n, ity) >= 0;
then {
return false;
}
}
// Don't lint for the result of methods that always return non-negative values.
if let ExprKind::MethodCall(ref path, _, _, _) = cast_op.kind {
let mut method_name = path.ident.name.as_str();
let allowed_methods = ["abs", "checked_abs", "rem_euclid", "checked_rem_euclid"];
if_chain! {
if method_name == "unwrap";
if let Some(arglist) = method_chain_args(cast_op, &["unwrap"]);
if let ExprKind::MethodCall(ref inner_path, _, _, _) = &arglist[0][0].kind;
then {
method_name = inner_path.ident.name.as_str();
}
}
if allowed_methods.iter().any(|&name| method_name == name) {
return false;
}
}
true
},
(false, true) => !cast_to.is_signed(),
(_, _) => false,
}
}

View File

@ -0,0 +1,42 @@
use rustc_ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, UintTy};
use if_chain::if_chain;
use crate::utils::{snippet_with_applicability, span_lint_and_then};
use super::CHAR_LIT_AS_U8;
pub(super) fn check(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if_chain! {
if let ExprKind::Cast(e, _) = &expr.kind;
if let ExprKind::Lit(l) = &e.kind;
if let LitKind::Char(c) = l.node;
if ty::Uint(UintTy::U8) == *cx.typeck_results().expr_ty(expr).kind();
then {
let mut applicability = Applicability::MachineApplicable;
let snippet = snippet_with_applicability(cx, e.span, "'x'", &mut applicability);
span_lint_and_then(
cx,
CHAR_LIT_AS_U8,
expr.span,
"casting a character literal to `u8` truncates",
|diag| {
diag.note("`char` is four bytes wide, but `u8` is a single byte");
if c.is_ascii() {
diag.span_suggestion(
expr.span,
"use a byte literal instead",
format!("b{}", snippet),
applicability,
);
}
});
}
}
}

View File

@ -0,0 +1,37 @@
use rustc_errors::Applicability;
use rustc_hir::Expr;
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty, UintTy};
use crate::utils::{snippet_with_applicability, span_lint_and_sugg};
use super::{utils, FN_TO_NUMERIC_CAST};
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
// We only want to check casts to `ty::Uint` or `ty::Int`
match cast_to.kind() {
ty::Uint(_) | ty::Int(..) => { /* continue on */ },
_ => return,
}
match cast_from.kind() {
ty::FnDef(..) | ty::FnPtr(_) => {
let mut applicability = Applicability::MaybeIncorrect;
let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability);
let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
if (to_nbits >= cx.tcx.data_layout.pointer_size.bits()) && (*cast_to.kind() != ty::Uint(UintTy::Usize)) {
span_lint_and_sugg(
cx,
FN_TO_NUMERIC_CAST,
expr.span,
&format!("casting function pointer `{}` to `{}`", from_snippet, cast_to),
"try",
format!("{} as usize", from_snippet),
applicability,
);
}
},
_ => {},
}
}

View File

@ -0,0 +1,39 @@
use rustc_errors::Applicability;
use rustc_hir::Expr;
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
use crate::utils::{snippet_with_applicability, span_lint_and_sugg};
use super::{utils, FN_TO_NUMERIC_CAST_WITH_TRUNCATION};
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
// We only want to check casts to `ty::Uint` or `ty::Int`
match cast_to.kind() {
ty::Uint(_) | ty::Int(..) => { /* continue on */ },
_ => return,
}
match cast_from.kind() {
ty::FnDef(..) | ty::FnPtr(_) => {
let mut applicability = Applicability::MaybeIncorrect;
let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability);
let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
if to_nbits < cx.tcx.data_layout.pointer_size.bits() {
span_lint_and_sugg(
cx,
FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
expr.span,
&format!(
"casting function pointer `{}` to `{}`, which truncates the value",
from_snippet, cast_to
),
"try",
format!("{} as usize", from_snippet),
applicability,
);
}
},
_ => {},
}
}

View File

@ -0,0 +1,407 @@
mod cast_lossless;
mod cast_possible_truncation;
mod cast_possible_wrap;
mod cast_precision_loss;
mod cast_ptr_alignment;
mod cast_ref_to_mut;
mod cast_sign_loss;
mod char_lit_as_u8;
mod fn_to_numeric_cast;
mod fn_to_numeric_cast_with_truncation;
mod ptr_as_ptr;
mod unnecessary_cast;
mod utils;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_semver::RustcVersion;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use crate::utils::is_hir_ty_cfg_dependant;
declare_clippy_lint! {
/// **What it does:** Checks for casts from any numerical to a float type where
/// the receiving type cannot store all values from the original type without
/// rounding errors. This possible rounding is to be expected, so this lint is
/// `Allow` by default.
///
/// Basically, this warns on casting any integer with 32 or more bits to `f32`
/// or any 64-bit integer to `f64`.
///
/// **Why is this bad?** It's not bad at all. But in some applications it can be
/// helpful to know where precision loss can take place. This lint can help find
/// those places in the code.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// let x = u64::MAX;
/// x as f64;
/// ```
pub CAST_PRECISION_LOSS,
pedantic,
"casts that cause loss of precision, e.g., `x as f32` where `x: u64`"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts from a signed to an unsigned numerical
/// type. In this case, negative values wrap around to large positive values,
/// which can be quite surprising in practice. However, as the cast works as
/// defined, this lint is `Allow` by default.
///
/// **Why is this bad?** Possibly surprising results. You can activate this lint
/// as a one-time check to see where numerical wrapping can arise.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// let y: i8 = -1;
/// y as u128; // will return 18446744073709551615
/// ```
pub CAST_SIGN_LOSS,
pedantic,
"casts from signed types to unsigned types, e.g., `x as u32` where `x: i32`"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts between numerical types that may
/// truncate large values. This is expected behavior, so the cast is `Allow` by
/// default.
///
/// **Why is this bad?** In some problem domains, it is good practice to avoid
/// truncation. This lint can be activated to help assess where additional
/// checks could be beneficial.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// fn as_u8(x: u64) -> u8 {
/// x as u8
/// }
/// ```
pub CAST_POSSIBLE_TRUNCATION,
pedantic,
"casts that may cause truncation of the value, e.g., `x as u8` where `x: u32`, or `x as i32` where `x: f32`"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts from an unsigned type to a signed type of
/// the same size. Performing such a cast is a 'no-op' for the compiler,
/// i.e., nothing is changed at the bit level, and the binary representation of
/// the value is reinterpreted. This can cause wrapping if the value is too big
/// for the target signed type. However, the cast works as defined, so this lint
/// is `Allow` by default.
///
/// **Why is this bad?** While such a cast is not bad in itself, the results can
/// be surprising when this is not the intended behavior, as demonstrated by the
/// example below.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// u32::MAX as i32; // will yield a value of `-1`
/// ```
pub CAST_POSSIBLE_WRAP,
pedantic,
"casts that may cause wrapping around the value, e.g., `x as i32` where `x: u32` and `x > i32::MAX`"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts between numerical types that may
/// be replaced by safe conversion functions.
///
/// **Why is this bad?** Rust's `as` keyword will perform many kinds of
/// conversions, including silently lossy conversions. Conversion functions such
/// as `i32::from` will only perform lossless conversions. Using the conversion
/// functions prevents conversions from turning into silent lossy conversions if
/// the types of the input expressions ever change, and make it easier for
/// people reading the code to know that the conversion is lossless.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// fn as_u64(x: u8) -> u64 {
/// x as u64
/// }
/// ```
///
/// Using `::from` would look like this:
///
/// ```rust
/// fn as_u64(x: u8) -> u64 {
/// u64::from(x)
/// }
/// ```
pub CAST_LOSSLESS,
pedantic,
"casts using `as` that are known to be lossless, e.g., `x as u64` where `x: u8`"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts to the same type, casts of int literals to integer types
/// and casts of float literals to float types.
///
/// **Why is this bad?** It's just unnecessary.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// let _ = 2i32 as i32;
/// let _ = 0.5 as f32;
/// ```
///
/// Better:
///
/// ```rust
/// let _ = 2_i32;
/// let _ = 0.5_f32;
/// ```
pub UNNECESSARY_CAST,
complexity,
"cast to the same type, e.g., `x as i32` where `x: i32`"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts, using `as` or `pointer::cast`,
/// from a less-strictly-aligned pointer to a more-strictly-aligned pointer
///
/// **Why is this bad?** Dereferencing the resulting pointer may be undefined
/// behavior.
///
/// **Known problems:** Using `std::ptr::read_unaligned` and `std::ptr::write_unaligned` or similar
/// on the resulting pointer is fine. Is over-zealous: Casts with manual alignment checks or casts like
/// u64-> u8 -> u16 can be fine. Miri is able to do a more in-depth analysis.
///
/// **Example:**
/// ```rust
/// let _ = (&1u8 as *const u8) as *const u16;
/// let _ = (&mut 1u8 as *mut u8) as *mut u16;
///
/// (&1u8 as *const u8).cast::<u16>();
/// (&mut 1u8 as *mut u8).cast::<u16>();
/// ```
pub CAST_PTR_ALIGNMENT,
pedantic,
"cast from a pointer to a more-strictly-aligned pointer"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts of function pointers to something other than usize
///
/// **Why is this bad?**
/// Casting a function pointer to anything other than usize/isize is not portable across
/// architectures, because you end up losing bits if the target type is too small or end up with a
/// bunch of extra bits that waste space and add more instructions to the final binary than
/// strictly necessary for the problem
///
/// Casting to isize also doesn't make sense since there are no signed addresses.
///
/// **Example**
///
/// ```rust
/// // Bad
/// fn fun() -> i32 { 1 }
/// let a = fun as i64;
///
/// // Good
/// fn fun2() -> i32 { 1 }
/// let a = fun2 as usize;
/// ```
pub FN_TO_NUMERIC_CAST,
style,
"casting a function pointer to a numeric type other than usize"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts of a function pointer to a numeric type not wide enough to
/// store address.
///
/// **Why is this bad?**
/// Such a cast discards some bits of the function's address. If this is intended, it would be more
/// clearly expressed by casting to usize first, then casting the usize to the intended type (with
/// a comment) to perform the truncation.
///
/// **Example**
///
/// ```rust
/// // Bad
/// fn fn1() -> i16 {
/// 1
/// };
/// let _ = fn1 as i32;
///
/// // Better: Cast to usize first, then comment with the reason for the truncation
/// fn fn2() -> i16 {
/// 1
/// };
/// let fn_ptr = fn2 as usize;
/// let fn_ptr_truncated = fn_ptr as i32;
/// ```
pub FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
style,
"casting a function pointer to a numeric type not wide enough to store the address"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts of `&T` to `&mut T` anywhere in the code.
///
/// **Why is this bad?** Its basically guaranteed to be undefined behaviour.
/// `UnsafeCell` is the only way to obtain aliasable data that is considered
/// mutable.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust,ignore
/// fn x(r: &i32) {
/// unsafe {
/// *(r as *const _ as *mut _) += 1;
/// }
/// }
/// ```
///
/// Instead consider using interior mutability types.
///
/// ```rust
/// use std::cell::UnsafeCell;
///
/// fn x(r: &UnsafeCell<i32>) {
/// unsafe {
/// *r.get() += 1;
/// }
/// }
/// ```
pub CAST_REF_TO_MUT,
correctness,
"a cast of reference to a mutable pointer"
}
declare_clippy_lint! {
/// **What it does:** Checks for expressions where a character literal is cast
/// to `u8` and suggests using a byte literal instead.
///
/// **Why is this bad?** In general, casting values to smaller types is
/// error-prone and should be avoided where possible. In the particular case of
/// converting a character literal to u8, it is easy to avoid by just using a
/// byte literal instead. As an added bonus, `b'a'` is even slightly shorter
/// than `'a' as u8`.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust,ignore
/// 'x' as u8
/// ```
///
/// A better version, using the byte literal:
///
/// ```rust,ignore
/// b'x'
/// ```
pub CHAR_LIT_AS_U8,
complexity,
"casting a character literal to `u8` truncates"
}
declare_clippy_lint! {
/// **What it does:**
/// Checks for `as` casts between raw pointers without changing its mutability,
/// namely `*const T` to `*const U` and `*mut T` to `*mut U`.
///
/// **Why is this bad?**
/// Though `as` casts between raw pointers is not terrible, `pointer::cast` is safer because
/// it cannot accidentally change the pointer's mutability nor cast the pointer to other types like `usize`.
///
/// **Known problems:** None.
///
/// **Example:**
///
/// ```rust
/// let ptr: *const u32 = &42_u32;
/// let mut_ptr: *mut u32 = &mut 42_u32;
/// let _ = ptr as *const i32;
/// let _ = mut_ptr as *mut i32;
/// ```
/// Use instead:
/// ```rust
/// let ptr: *const u32 = &42_u32;
/// let mut_ptr: *mut u32 = &mut 42_u32;
/// let _ = ptr.cast::<i32>();
/// let _ = mut_ptr.cast::<i32>();
/// ```
pub PTR_AS_PTR,
pedantic,
"casting using `as` from and to raw pointers that doesn't change its mutability, where `pointer::cast` could take the place of `as`"
}
pub struct Casts {
msrv: Option<RustcVersion>,
}
impl Casts {
#[must_use]
pub fn new(msrv: Option<RustcVersion>) -> Self {
Self { msrv }
}
}
impl_lint_pass!(Casts => [
CAST_PRECISION_LOSS,
CAST_SIGN_LOSS,
CAST_POSSIBLE_TRUNCATION,
CAST_POSSIBLE_WRAP,
CAST_LOSSLESS,
CAST_REF_TO_MUT,
CAST_PTR_ALIGNMENT,
UNNECESSARY_CAST,
FN_TO_NUMERIC_CAST,
FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
CHAR_LIT_AS_U8,
PTR_AS_PTR,
]);
impl<'tcx> LateLintPass<'tcx> for Casts {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if expr.span.from_expansion() {
return;
}
if let ExprKind::Cast(ref cast_expr, cast_to) = expr.kind {
if is_hir_ty_cfg_dependant(cx, cast_to) {
return;
}
let (cast_from, cast_to) = (
cx.typeck_results().expr_ty(cast_expr),
cx.typeck_results().expr_ty(expr),
);
if unnecessary_cast::check(cx, expr, cast_expr, cast_from, cast_to) {
return;
}
fn_to_numeric_cast::check(cx, expr, cast_expr, cast_from, cast_to);
fn_to_numeric_cast_with_truncation::check(cx, expr, cast_expr, cast_from, cast_to);
if cast_from.is_numeric() && cast_to.is_numeric() && !in_external_macro(cx.sess(), expr.span) {
cast_possible_truncation::check(cx, expr, cast_from, cast_to);
cast_possible_wrap::check(cx, expr, cast_from, cast_to);
cast_precision_loss::check(cx, expr, cast_from, cast_to);
cast_lossless::check(cx, expr, cast_expr, cast_from, cast_to);
cast_sign_loss::check(cx, expr, cast_expr, cast_from, cast_to);
}
}
cast_ref_to_mut::check(cx, expr);
cast_ptr_alignment::check(cx, expr);
char_lit_as_u8::check(cx, expr);
ptr_as_ptr::check(cx, expr, &self.msrv);
}
extract_msrv_attr!(LateContext);
}

View File

@ -0,0 +1,52 @@
use std::borrow::Cow;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, Mutability, TyKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, TypeAndMut};
use rustc_semver::RustcVersion;
use if_chain::if_chain;
use crate::utils::sugg::Sugg;
use crate::utils::{meets_msrv, span_lint_and_sugg};
use super::PTR_AS_PTR;
const PTR_AS_PTR_MSRV: RustcVersion = RustcVersion::new(1, 38, 0);
pub(super) fn check(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, msrv: &Option<RustcVersion>) {
if !meets_msrv(msrv.as_ref(), &PTR_AS_PTR_MSRV) {
return;
}
if_chain! {
if let ExprKind::Cast(cast_expr, cast_to_hir_ty) = expr.kind;
let (cast_from, cast_to) = (cx.typeck_results().expr_ty(cast_expr), cx.typeck_results().expr_ty(expr));
if let ty::RawPtr(TypeAndMut { mutbl: from_mutbl, .. }) = cast_from.kind();
if let ty::RawPtr(TypeAndMut { ty: to_pointee_ty, mutbl: to_mutbl }) = cast_to.kind();
if matches!((from_mutbl, to_mutbl),
(Mutability::Not, Mutability::Not) | (Mutability::Mut, Mutability::Mut));
// The `U` in `pointer::cast` have to be `Sized`
// as explained here: https://github.com/rust-lang/rust/issues/60602.
if to_pointee_ty.is_sized(cx.tcx.at(expr.span), cx.param_env);
then {
let mut applicability = Applicability::MachineApplicable;
let cast_expr_sugg = Sugg::hir_with_applicability(cx, cast_expr, "_", &mut applicability);
let turbofish = match &cast_to_hir_ty.kind {
TyKind::Infer => Cow::Borrowed(""),
TyKind::Ptr(mut_ty) if matches!(mut_ty.ty.kind, TyKind::Infer) => Cow::Borrowed(""),
_ => Cow::Owned(format!("::<{}>", to_pointee_ty)),
};
span_lint_and_sugg(
cx,
PTR_AS_PTR,
expr.span,
"`as` casting between raw pointers without changing its mutability",
"try `pointer::cast`, a safer alternative",
format!("{}.cast{}()", cast_expr_sugg.maybe_par(), turbofish),
applicability,
);
}
}
}

View File

@ -0,0 +1,106 @@
use rustc_ast::{LitFloatType, LitIntType, LitKind};
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, Lit, UnOp};
use rustc_lint::{LateContext, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::{self, FloatTy, InferTy, Ty};
use if_chain::if_chain;
use crate::utils::{numeric_literal::NumericLiteral, snippet_opt, span_lint, span_lint_and_sugg};
use super::UNNECESSARY_CAST;
pub(super) fn check(
cx: &LateContext<'_>,
expr: &Expr<'_>,
cast_expr: &Expr<'_>,
cast_from: Ty<'_>,
cast_to: Ty<'_>,
) -> bool {
if let Some(lit) = get_numeric_literal(cast_expr) {
let literal_str = snippet_opt(cx, cast_expr.span).unwrap_or_default();
if_chain! {
if let LitKind::Int(n, _) = lit.node;
if let Some(src) = snippet_opt(cx, lit.span);
if cast_to.is_floating_point();
if let Some(num_lit) = NumericLiteral::from_lit_kind(&src, &lit.node);
let from_nbits = 128 - n.leading_zeros();
let to_nbits = fp_ty_mantissa_nbits(cast_to);
if from_nbits != 0 && to_nbits != 0 && from_nbits <= to_nbits && num_lit.is_decimal();
then {
let literal_str = if is_unary_neg(cast_expr) { format!("-{}", num_lit.integer) } else { num_lit.integer.into() };
lint_unnecessary_cast(cx, expr, &literal_str, cast_from, cast_to);
return true
}
}
match lit.node {
LitKind::Int(_, LitIntType::Unsuffixed) if cast_to.is_integral() => {
lint_unnecessary_cast(cx, expr, &literal_str, cast_from, cast_to);
},
LitKind::Float(_, LitFloatType::Unsuffixed) if cast_to.is_floating_point() => {
lint_unnecessary_cast(cx, expr, &literal_str, cast_from, cast_to);
},
LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::Float(_, LitFloatType::Unsuffixed) => {},
_ => {
if cast_from.kind() == cast_to.kind() && !in_external_macro(cx.sess(), expr.span) {
span_lint(
cx,
UNNECESSARY_CAST,
expr.span,
&format!(
"casting to the same type is unnecessary (`{}` -> `{}`)",
cast_from, cast_to
),
);
return true;
}
},
}
}
false
}
fn lint_unnecessary_cast(cx: &LateContext<'_>, expr: &Expr<'_>, literal_str: &str, cast_from: Ty<'_>, cast_to: Ty<'_>) {
let literal_kind_name = if cast_from.is_integral() { "integer" } else { "float" };
span_lint_and_sugg(
cx,
UNNECESSARY_CAST,
expr.span,
&format!("casting {} literal to `{}` is unnecessary", literal_kind_name, cast_to),
"try",
format!("{}_{}", literal_str.trim_end_matches('.'), cast_to),
Applicability::MachineApplicable,
);
}
fn get_numeric_literal<'e>(expr: &'e Expr<'e>) -> Option<&'e Lit> {
match expr.kind {
ExprKind::Lit(ref lit) => Some(lit),
ExprKind::Unary(UnOp::Neg, e) => {
if let ExprKind::Lit(ref lit) = e.kind {
Some(lit)
} else {
None
}
},
_ => None,
}
}
/// Returns the mantissa bits wide of a fp type.
/// Will return 0 if the type is not a fp
fn fp_ty_mantissa_nbits(typ: Ty<'_>) -> u32 {
match typ.kind() {
ty::Float(FloatTy::F32) => 23,
ty::Float(FloatTy::F64) | ty::Infer(InferTy::FloatVar(_)) => 52,
_ => 0,
}
}
fn is_unary_neg(expr: &Expr<'_>) -> bool {
matches!(expr.kind, ExprKind::Unary(UnOp::Neg, _))
}

View File

@ -0,0 +1,25 @@
use rustc_middle::ty::{self, IntTy, Ty, TyCtxt, UintTy};
/// Returns the size in bits of an integral type.
/// Will return 0 if the type is not an int or uint variant
pub(super) fn int_ty_to_nbits(typ: Ty<'_>, tcx: TyCtxt<'_>) -> u64 {
match typ.kind() {
ty::Int(i) => match i {
IntTy::Isize => tcx.data_layout.pointer_size.bits(),
IntTy::I8 => 8,
IntTy::I16 => 16,
IntTy::I32 => 32,
IntTy::I64 => 64,
IntTy::I128 => 128,
},
ty::Uint(i) => match i {
UintTy::Usize => tcx.data_layout.pointer_size.bits(),
UintTy::U8 => 8,
UintTy::U16 => 16,
UintTy::U32 => 32,
UintTy::U64 => 64,
UintTy::U128 => 128,
},
_ => 0,
}
}

View File

@ -0,0 +1,360 @@
//! lint on manually implemented checked conversions that could be transformed into `try_from`
use if_chain::if_chain;
use rustc_ast::ast::LitKind;
use rustc_errors::Applicability;
use rustc_hir::{BinOp, BinOpKind, Expr, ExprKind, QPath, TyKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_semver::RustcVersion;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use crate::utils::{meets_msrv, snippet_with_applicability, span_lint_and_sugg, SpanlessEq};
const CHECKED_CONVERSIONS_MSRV: RustcVersion = RustcVersion::new(1, 34, 0);
declare_clippy_lint! {
/// **What it does:** Checks for explicit bounds checking when casting.
///
/// **Why is this bad?** Reduces the readability of statements & is error prone.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// # let foo: u32 = 5;
/// # let _ =
/// foo <= i32::MAX as u32
/// # ;
/// ```
///
/// Could be written:
///
/// ```rust
/// # use std::convert::TryFrom;
/// # let foo = 1;
/// # let _ =
/// i32::try_from(foo).is_ok()
/// # ;
/// ```
pub CHECKED_CONVERSIONS,
pedantic,
"`try_from` could replace manual bounds checking when casting"
}
pub struct CheckedConversions {
msrv: Option<RustcVersion>,
}
impl CheckedConversions {
#[must_use]
pub fn new(msrv: Option<RustcVersion>) -> Self {
Self { msrv }
}
}
impl_lint_pass!(CheckedConversions => [CHECKED_CONVERSIONS]);
impl<'tcx> LateLintPass<'tcx> for CheckedConversions {
fn check_expr(&mut self, cx: &LateContext<'_>, item: &Expr<'_>) {
if !meets_msrv(self.msrv.as_ref(), &CHECKED_CONVERSIONS_MSRV) {
return;
}
let result = if_chain! {
if !in_external_macro(cx.sess(), item.span);
if let ExprKind::Binary(op, ref left, ref right) = &item.kind;
then {
match op.node {
BinOpKind::Ge | BinOpKind::Le => single_check(item),
BinOpKind::And => double_check(cx, left, right),
_ => None,
}
} else {
None
}
};
if let Some(cv) = result {
if let Some(to_type) = cv.to_type {
let mut applicability = Applicability::MachineApplicable;
let snippet = snippet_with_applicability(cx, cv.expr_to_cast.span, "_", &mut applicability);
span_lint_and_sugg(
cx,
CHECKED_CONVERSIONS,
item.span,
"checked cast can be simplified",
"try",
format!("{}::try_from({}).is_ok()", to_type, snippet),
applicability,
);
}
}
}
extract_msrv_attr!(LateContext);
}
/// Searches for a single check from unsigned to _ is done
/// todo: check for case signed -> larger unsigned == only x >= 0
fn single_check<'tcx>(expr: &'tcx Expr<'tcx>) -> Option<Conversion<'tcx>> {
check_upper_bound(expr).filter(|cv| cv.cvt == ConversionType::FromUnsigned)
}
/// Searches for a combination of upper & lower bound checks
fn double_check<'a>(cx: &LateContext<'_>, left: &'a Expr<'_>, right: &'a Expr<'_>) -> Option<Conversion<'a>> {
let upper_lower = |l, r| {
let upper = check_upper_bound(l);
let lower = check_lower_bound(r);
upper.zip(lower).and_then(|(l, r)| l.combine(r, cx))
};
upper_lower(left, right).or_else(|| upper_lower(right, left))
}
/// Contains the result of a tried conversion check
#[derive(Clone, Debug)]
struct Conversion<'a> {
cvt: ConversionType,
expr_to_cast: &'a Expr<'a>,
to_type: Option<&'a str>,
}
/// The kind of conversion that is checked
#[derive(Copy, Clone, Debug, PartialEq)]
enum ConversionType {
SignedToUnsigned,
SignedToSigned,
FromUnsigned,
}
impl<'a> Conversion<'a> {
/// Combine multiple conversions if the are compatible
pub fn combine(self, other: Self, cx: &LateContext<'_>) -> Option<Conversion<'a>> {
if self.is_compatible(&other, cx) {
// Prefer a Conversion that contains a type-constraint
Some(if self.to_type.is_some() { self } else { other })
} else {
None
}
}
/// Checks if two conversions are compatible
/// same type of conversion, same 'castee' and same 'to type'
pub fn is_compatible(&self, other: &Self, cx: &LateContext<'_>) -> bool {
(self.cvt == other.cvt)
&& (SpanlessEq::new(cx).eq_expr(self.expr_to_cast, other.expr_to_cast))
&& (self.has_compatible_to_type(other))
}
/// Checks if the to-type is the same (if there is a type constraint)
fn has_compatible_to_type(&self, other: &Self) -> bool {
match (self.to_type, other.to_type) {
(Some(l), Some(r)) => l == r,
_ => true,
}
}
/// Try to construct a new conversion if the conversion type is valid
fn try_new(expr_to_cast: &'a Expr<'_>, from_type: &str, to_type: &'a str) -> Option<Conversion<'a>> {
ConversionType::try_new(from_type, to_type).map(|cvt| Conversion {
cvt,
expr_to_cast,
to_type: Some(to_type),
})
}
/// Construct a new conversion without type constraint
fn new_any(expr_to_cast: &'a Expr<'_>) -> Conversion<'a> {
Conversion {
cvt: ConversionType::SignedToUnsigned,
expr_to_cast,
to_type: None,
}
}
}
impl ConversionType {
/// Creates a conversion type if the type is allowed & conversion is valid
#[must_use]
fn try_new(from: &str, to: &str) -> Option<Self> {
if UINTS.contains(&from) {
Some(Self::FromUnsigned)
} else if SINTS.contains(&from) {
if UINTS.contains(&to) {
Some(Self::SignedToUnsigned)
} else if SINTS.contains(&to) {
Some(Self::SignedToSigned)
} else {
None
}
} else {
None
}
}
}
/// Check for `expr <= (to_type::MAX as from_type)`
fn check_upper_bound<'tcx>(expr: &'tcx Expr<'tcx>) -> Option<Conversion<'tcx>> {
if_chain! {
if let ExprKind::Binary(ref op, ref left, ref right) = &expr.kind;
if let Some((candidate, check)) = normalize_le_ge(op, left, right);
if let Some((from, to)) = get_types_from_cast(check, INTS, "max_value", "MAX");
then {
Conversion::try_new(candidate, from, to)
} else {
None
}
}
}
/// Check for `expr >= 0|(to_type::MIN as from_type)`
fn check_lower_bound<'tcx>(expr: &'tcx Expr<'tcx>) -> Option<Conversion<'tcx>> {
fn check_function<'a>(candidate: &'a Expr<'a>, check: &'a Expr<'a>) -> Option<Conversion<'a>> {
(check_lower_bound_zero(candidate, check)).or_else(|| (check_lower_bound_min(candidate, check)))
}
// First of we need a binary containing the expression & the cast
if let ExprKind::Binary(ref op, ref left, ref right) = &expr.kind {
normalize_le_ge(op, right, left).and_then(|(l, r)| check_function(l, r))
} else {
None
}
}
/// Check for `expr >= 0`
fn check_lower_bound_zero<'a>(candidate: &'a Expr<'_>, check: &'a Expr<'_>) -> Option<Conversion<'a>> {
if_chain! {
if let ExprKind::Lit(ref lit) = &check.kind;
if let LitKind::Int(0, _) = &lit.node;
then {
Some(Conversion::new_any(candidate))
} else {
None
}
}
}
/// Check for `expr >= (to_type::MIN as from_type)`
fn check_lower_bound_min<'a>(candidate: &'a Expr<'_>, check: &'a Expr<'_>) -> Option<Conversion<'a>> {
if let Some((from, to)) = get_types_from_cast(check, SINTS, "min_value", "MIN") {
Conversion::try_new(candidate, from, to)
} else {
None
}
}
/// Tries to extract the from- and to-type from a cast expression
fn get_types_from_cast<'a>(
expr: &'a Expr<'_>,
types: &'a [&str],
func: &'a str,
assoc_const: &'a str,
) -> Option<(&'a str, &'a str)> {
// `to_type::max_value() as from_type`
// or `to_type::MAX as from_type`
let call_from_cast: Option<(&Expr<'_>, &str)> = if_chain! {
// to_type::max_value(), from_type
if let ExprKind::Cast(ref limit, ref from_type) = &expr.kind;
if let TyKind::Path(ref from_type_path) = &from_type.kind;
if let Some(from_sym) = int_ty_to_sym(from_type_path);
then {
Some((limit, from_sym))
} else {
None
}
};
// `from_type::from(to_type::max_value())`
let limit_from: Option<(&Expr<'_>, &str)> = call_from_cast.or_else(|| {
if_chain! {
// `from_type::from, to_type::max_value()`
if let ExprKind::Call(ref from_func, ref args) = &expr.kind;
// `to_type::max_value()`
if args.len() == 1;
if let limit = &args[0];
// `from_type::from`
if let ExprKind::Path(ref path) = &from_func.kind;
if let Some(from_sym) = get_implementing_type(path, INTS, "from");
then {
Some((limit, from_sym))
} else {
None
}
}
});
if let Some((limit, from_type)) = limit_from {
match limit.kind {
// `from_type::from(_)`
ExprKind::Call(path, _) => {
if let ExprKind::Path(ref path) = path.kind {
// `to_type`
if let Some(to_type) = get_implementing_type(path, types, func) {
return Some((from_type, to_type));
}
}
},
// `to_type::MAX`
ExprKind::Path(ref path) => {
if let Some(to_type) = get_implementing_type(path, types, assoc_const) {
return Some((from_type, to_type));
}
},
_ => {},
}
};
None
}
/// Gets the type which implements the called function
fn get_implementing_type<'a>(path: &QPath<'_>, candidates: &'a [&str], function: &str) -> Option<&'a str> {
if_chain! {
if let QPath::TypeRelative(ref ty, ref path) = &path;
if path.ident.name.as_str() == function;
if let TyKind::Path(QPath::Resolved(None, ref tp)) = &ty.kind;
if let [int] = &*tp.segments;
let name = &int.ident.name.as_str();
then {
candidates.iter().find(|c| name == *c).cloned()
} else {
None
}
}
}
/// Gets the type as a string, if it is a supported integer
fn int_ty_to_sym<'tcx>(path: &QPath<'_>) -> Option<&'tcx str> {
if_chain! {
if let QPath::Resolved(_, ref path) = *path;
if let [ty] = &*path.segments;
let name = &ty.ident.name.as_str();
then {
INTS.iter().find(|c| name == *c).cloned()
} else {
None
}
}
}
/// Will return the expressions as if they were expr1 <= expr2
fn normalize_le_ge<'a>(op: &BinOp, left: &'a Expr<'a>, right: &'a Expr<'a>) -> Option<(&'a Expr<'a>, &'a Expr<'a>)> {
match op.node {
BinOpKind::Le => Some((left, right)),
BinOpKind::Ge => Some((right, left)),
_ => None,
}
}
// Constants
const UINTS: &[&str] = &["u8", "u16", "u32", "u64", "usize"];
const SINTS: &[&str] = &["i8", "i16", "i32", "i64", "isize"];
const INTS: &[&str] = &["u8", "u16", "u32", "u64", "usize", "i8", "i16", "i32", "i64", "isize"];

View File

@ -0,0 +1,166 @@
//! calculate cognitive complexity and warn about overly complex functions
use rustc_ast::ast::Attribute;
use rustc_hir::intravisit::{walk_expr, FnKind, NestedVisitorMap, Visitor};
use rustc_hir::{Body, Expr, ExprKind, FnDecl, HirId};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::map::Map;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::Span;
use rustc_span::{sym, BytePos};
use crate::utils::{is_type_diagnostic_item, snippet_opt, span_lint_and_help, LimitStack};
declare_clippy_lint! {
/// **What it does:** Checks for methods with high cognitive complexity.
///
/// **Why is this bad?** Methods of high cognitive complexity tend to be hard to
/// both read and maintain. Also LLVM will tend to optimize small methods better.
///
/// **Known problems:** Sometimes it's hard to find a way to reduce the
/// complexity.
///
/// **Example:** No. You'll see it when you get the warning.
pub COGNITIVE_COMPLEXITY,
nursery,
"functions that should be split up into multiple functions"
}
pub struct CognitiveComplexity {
limit: LimitStack,
}
impl CognitiveComplexity {
#[must_use]
pub fn new(limit: u64) -> Self {
Self {
limit: LimitStack::new(limit),
}
}
}
impl_lint_pass!(CognitiveComplexity => [COGNITIVE_COMPLEXITY]);
impl CognitiveComplexity {
#[allow(clippy::cast_possible_truncation)]
fn check<'tcx>(
&mut self,
cx: &LateContext<'tcx>,
kind: FnKind<'tcx>,
decl: &'tcx FnDecl<'_>,
body: &'tcx Body<'_>,
body_span: Span,
) {
if body_span.from_expansion() {
return;
}
let expr = &body.value;
let mut helper = CcHelper { cc: 1, returns: 0 };
helper.visit_expr(expr);
let CcHelper { cc, returns } = helper;
let ret_ty = cx.typeck_results().node_type(expr.hir_id);
let ret_adjust = if is_type_diagnostic_item(cx, ret_ty, sym::result_type) {
returns
} else {
#[allow(clippy::integer_division)]
(returns / 2)
};
let mut rust_cc = cc;
// prevent degenerate cases where unreachable code contains `return` statements
if rust_cc >= ret_adjust {
rust_cc -= ret_adjust;
}
if rust_cc > self.limit.limit() {
let fn_span = match kind {
FnKind::ItemFn(ident, _, _, _) | FnKind::Method(ident, _, _) => ident.span,
FnKind::Closure => {
let header_span = body_span.with_hi(decl.output.span().lo());
let pos = snippet_opt(cx, header_span).and_then(|snip| {
let low_offset = snip.find('|')?;
let high_offset = 1 + snip.get(low_offset + 1..)?.find('|')?;
let low = header_span.lo() + BytePos(low_offset as u32);
let high = low + BytePos(high_offset as u32 + 1);
Some((low, high))
});
if let Some((low, high)) = pos {
Span::new(low, high, header_span.ctxt())
} else {
return;
}
},
};
span_lint_and_help(
cx,
COGNITIVE_COMPLEXITY,
fn_span,
&format!(
"the function has a cognitive complexity of ({}/{})",
rust_cc,
self.limit.limit()
),
None,
"you could split it up into multiple smaller functions",
);
}
}
}
impl<'tcx> LateLintPass<'tcx> for CognitiveComplexity {
fn check_fn(
&mut self,
cx: &LateContext<'tcx>,
kind: FnKind<'tcx>,
decl: &'tcx FnDecl<'_>,
body: &'tcx Body<'_>,
span: Span,
hir_id: HirId,
) {
let def_id = cx.tcx.hir().local_def_id(hir_id);
if !cx.tcx.has_attr(def_id.to_def_id(), sym::test) {
self.check(cx, kind, decl, body, span);
}
}
fn enter_lint_attrs(&mut self, cx: &LateContext<'tcx>, attrs: &'tcx [Attribute]) {
self.limit.push_attrs(cx.sess(), attrs, "cognitive_complexity");
}
fn exit_lint_attrs(&mut self, cx: &LateContext<'tcx>, attrs: &'tcx [Attribute]) {
self.limit.pop_attrs(cx.sess(), attrs, "cognitive_complexity");
}
}
struct CcHelper {
cc: u64,
returns: u64,
}
impl<'tcx> Visitor<'tcx> for CcHelper {
type Map = Map<'tcx>;
fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
walk_expr(self, e);
match e.kind {
ExprKind::If(_, _, _) => {
self.cc += 1;
},
ExprKind::Match(_, ref arms, _) => {
if arms.len() > 1 {
self.cc += 1;
}
self.cc += arms.iter().filter(|arm| arm.guard.is_some()).count() as u64;
},
ExprKind::Ret(_) => self.returns += 1,
_ => {},
}
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}

View File

@ -0,0 +1,183 @@
//! Checks for if expressions that contain only an if expression.
//!
//! For example, the lint would catch:
//!
//! ```rust,ignore
//! if x {
//! if y {
//! println!("Hello world");
//! }
//! }
//! ```
//!
//! This lint is **warn** by default
use if_chain::if_chain;
use rustc_ast::ast;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use crate::utils::sugg::Sugg;
use crate::utils::{snippet_block, snippet_block_with_applicability, span_lint_and_sugg, span_lint_and_then};
use rustc_errors::Applicability;
declare_clippy_lint! {
/// **What it does:** Checks for nested `if` statements which can be collapsed
/// by `&&`-combining their conditions.
///
/// **Why is this bad?** Each `if`-statement adds one level of nesting, which
/// makes code look more complex than it really is.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust,ignore
/// if x {
/// if y {
/// …
/// }
/// }
///
/// ```
///
/// Should be written:
///
/// ```rust.ignore
/// if x && y {
/// …
/// }
/// ```
pub COLLAPSIBLE_IF,
style,
"nested `if`s that can be collapsed (e.g., `if x { if y { ... } }`"
}
declare_clippy_lint! {
/// **What it does:** Checks for collapsible `else { if ... }` expressions
/// that can be collapsed to `else if ...`.
///
/// **Why is this bad?** Each `if`-statement adds one level of nesting, which
/// makes code look more complex than it really is.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust,ignore
///
/// if x {
/// …
/// } else {
/// if y {
/// …
/// }
/// }
/// ```
///
/// Should be written:
///
/// ```rust.ignore
/// if x {
/// …
/// } else if y {
/// …
/// }
/// ```
pub COLLAPSIBLE_ELSE_IF,
style,
"nested `else`-`if` expressions that can be collapsed (e.g., `else { if x { ... } }`)"
}
declare_lint_pass!(CollapsibleIf => [COLLAPSIBLE_IF, COLLAPSIBLE_ELSE_IF]);
impl EarlyLintPass for CollapsibleIf {
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &ast::Expr) {
if !expr.span.from_expansion() {
check_if(cx, expr)
}
}
}
fn check_if(cx: &EarlyContext<'_>, expr: &ast::Expr) {
if let ast::ExprKind::If(check, then, else_) = &expr.kind {
if let Some(else_) = else_ {
check_collapsible_maybe_if_let(cx, else_);
} else if let ast::ExprKind::Let(..) = check.kind {
// Prevent triggering on `if let a = b { if c { .. } }`.
} else {
check_collapsible_no_if_let(cx, expr, check, then);
}
}
}
fn block_starts_with_comment(cx: &EarlyContext<'_>, expr: &ast::Block) -> bool {
// We trim all opening braces and whitespaces and then check if the next string is a comment.
let trimmed_block_text = snippet_block(cx, expr.span, "..", None)
.trim_start_matches(|c: char| c.is_whitespace() || c == '{')
.to_owned();
trimmed_block_text.starts_with("//") || trimmed_block_text.starts_with("/*")
}
fn check_collapsible_maybe_if_let(cx: &EarlyContext<'_>, else_: &ast::Expr) {
if_chain! {
if let ast::ExprKind::Block(ref block, _) = else_.kind;
if !block_starts_with_comment(cx, block);
if let Some(else_) = expr_block(block);
if else_.attrs.is_empty();
if !else_.span.from_expansion();
if let ast::ExprKind::If(..) = else_.kind;
then {
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
cx,
COLLAPSIBLE_ELSE_IF,
block.span,
"this `else { if .. }` block can be collapsed",
"collapse nested if block",
snippet_block_with_applicability(cx, else_.span, "..", Some(block.span), &mut applicability).into_owned(),
applicability,
);
}
}
}
fn check_collapsible_no_if_let(cx: &EarlyContext<'_>, expr: &ast::Expr, check: &ast::Expr, then: &ast::Block) {
if_chain! {
if !block_starts_with_comment(cx, then);
if let Some(inner) = expr_block(then);
if inner.attrs.is_empty();
if let ast::ExprKind::If(ref check_inner, ref content, None) = inner.kind;
// Prevent triggering on `if c { if let a = b { .. } }`.
if !matches!(check_inner.kind, ast::ExprKind::Let(..));
if expr.span.ctxt() == inner.span.ctxt();
then {
span_lint_and_then(cx, COLLAPSIBLE_IF, expr.span, "this `if` statement can be collapsed", |diag| {
let lhs = Sugg::ast(cx, check, "..");
let rhs = Sugg::ast(cx, check_inner, "..");
diag.span_suggestion(
expr.span,
"collapse nested if block",
format!(
"if {} {}",
lhs.and(&rhs),
snippet_block(cx, content.span, "..", Some(expr.span)),
),
Applicability::MachineApplicable, // snippet
);
});
}
}
}
/// If the block contains only one expression, return it.
fn expr_block(block: &ast::Block) -> Option<&ast::Expr> {
let mut it = block.stmts.iter();
if let (Some(stmt), None) = (it.next(), it.next()) {
match stmt.kind {
ast::StmtKind::Expr(ref expr) | ast::StmtKind::Semi(ref expr) => Some(expr),
_ => None,
}
} else {
None
}
}

View File

@ -0,0 +1,188 @@
use crate::utils::visitors::LocalUsedVisitor;
use crate::utils::{path_to_local, span_lint_and_then, SpanlessEq};
use if_chain::if_chain;
use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
use rustc_hir::{Arm, Expr, ExprKind, Guard, HirId, Pat, PatKind, QPath, StmtKind, UnOp};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{DefIdTree, TyCtxt, TypeckResults};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::{MultiSpan, Span};
declare_clippy_lint! {
/// **What it does:** Finds nested `match` or `if let` expressions where the patterns may be "collapsed" together
/// without adding any branches.
///
/// Note that this lint is not intended to find _all_ cases where nested match patterns can be merged, but only
/// cases where merging would most likely make the code more readable.
///
/// **Why is this bad?** It is unnecessarily verbose and complex.
///
/// **Known problems:** None.
///
/// **Example:**
///
/// ```rust
/// fn func(opt: Option<Result<u64, String>>) {
/// let n = match opt {
/// Some(n) => match n {
/// Ok(n) => n,
/// _ => return,
/// }
/// None => return,
/// };
/// }
/// ```
/// Use instead:
/// ```rust
/// fn func(opt: Option<Result<u64, String>>) {
/// let n = match opt {
/// Some(Ok(n)) => n,
/// _ => return,
/// };
/// }
/// ```
pub COLLAPSIBLE_MATCH,
style,
"Nested `match` or `if let` expressions where the patterns may be \"collapsed\" together."
}
declare_lint_pass!(CollapsibleMatch => [COLLAPSIBLE_MATCH]);
impl<'tcx> LateLintPass<'tcx> for CollapsibleMatch {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'tcx>) {
if let ExprKind::Match(_expr, arms, _source) = expr.kind {
if let Some(wild_arm) = arms.iter().rfind(|arm| arm_is_wild_like(arm, cx.tcx)) {
for arm in arms {
check_arm(arm, wild_arm, cx);
}
}
}
}
}
fn check_arm<'tcx>(arm: &Arm<'tcx>, wild_outer_arm: &Arm<'tcx>, cx: &LateContext<'tcx>) {
if_chain! {
let expr = strip_singleton_blocks(arm.body);
if let ExprKind::Match(expr_in, arms_inner, _) = expr.kind;
// the outer arm pattern and the inner match
if expr_in.span.ctxt() == arm.pat.span.ctxt();
// there must be no more than two arms in the inner match for this lint
if arms_inner.len() == 2;
// no if guards on the inner match
if arms_inner.iter().all(|arm| arm.guard.is_none());
// match expression must be a local binding
// match <local> { .. }
if let Some(binding_id) = path_to_local(strip_ref_operators(expr_in, cx.typeck_results()));
// one of the branches must be "wild-like"
if let Some(wild_inner_arm_idx) = arms_inner.iter().rposition(|arm_inner| arm_is_wild_like(arm_inner, cx.tcx));
let (wild_inner_arm, non_wild_inner_arm) =
(&arms_inner[wild_inner_arm_idx], &arms_inner[1 - wild_inner_arm_idx]);
if !pat_contains_or(non_wild_inner_arm.pat);
// the binding must come from the pattern of the containing match arm
// ..<local>.. => match <local> { .. }
if let Some(binding_span) = find_pat_binding(arm.pat, binding_id);
// the "wild-like" branches must be equal
if SpanlessEq::new(cx).eq_expr(wild_inner_arm.body, wild_outer_arm.body);
// the binding must not be used in the if guard
let mut used_visitor = LocalUsedVisitor::new(cx, binding_id);
if match arm.guard {
None => true,
Some(Guard::If(expr) | Guard::IfLet(_, expr)) => !used_visitor.check_expr(expr),
};
// ...or anywhere in the inner match
if !arms_inner.iter().any(|arm| used_visitor.check_arm(arm));
then {
span_lint_and_then(
cx,
COLLAPSIBLE_MATCH,
expr.span,
"unnecessary nested match",
|diag| {
let mut help_span = MultiSpan::from_spans(vec![binding_span, non_wild_inner_arm.pat.span]);
help_span.push_span_label(binding_span, "replace this binding".into());
help_span.push_span_label(non_wild_inner_arm.pat.span, "with this pattern".into());
diag.span_help(help_span, "the outer pattern can be modified to include the inner pattern");
},
);
}
}
}
fn strip_singleton_blocks<'hir>(mut expr: &'hir Expr<'hir>) -> &'hir Expr<'hir> {
while let ExprKind::Block(block, _) = expr.kind {
match (block.stmts, block.expr) {
([stmt], None) => match stmt.kind {
StmtKind::Expr(e) | StmtKind::Semi(e) => expr = e,
_ => break,
},
([], Some(e)) => expr = e,
_ => break,
}
}
expr
}
/// A "wild-like" pattern is wild ("_") or `None`.
/// For this lint to apply, both the outer and inner match expressions
/// must have "wild-like" branches that can be combined.
fn arm_is_wild_like(arm: &Arm<'_>, tcx: TyCtxt<'_>) -> bool {
if arm.guard.is_some() {
return false;
}
match arm.pat.kind {
PatKind::Binding(..) | PatKind::Wild => true,
PatKind::Path(QPath::Resolved(None, path)) if is_none_ctor(path.res, tcx) => true,
_ => false,
}
}
fn find_pat_binding(pat: &Pat<'_>, hir_id: HirId) -> Option<Span> {
let mut span = None;
pat.walk_short(|p| match &p.kind {
// ignore OR patterns
PatKind::Or(_) => false,
PatKind::Binding(_bm, _, _ident, _) => {
let found = p.hir_id == hir_id;
if found {
span = Some(p.span);
}
!found
},
_ => true,
});
span
}
fn pat_contains_or(pat: &Pat<'_>) -> bool {
let mut result = false;
pat.walk(|p| {
let is_or = matches!(p.kind, PatKind::Or(_));
result |= is_or;
!is_or
});
result
}
fn is_none_ctor(res: Res, tcx: TyCtxt<'_>) -> bool {
if let Some(none_id) = tcx.lang_items().option_none_variant() {
if let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Const), id) = res {
if let Some(variant_id) = tcx.parent(id) {
return variant_id == none_id;
}
}
}
false
}
/// Removes `AddrOf` operators (`&`) or deref operators (`*`), but only if a reference type is
/// dereferenced. An overloaded deref such as `Vec` to slice would not be removed.
fn strip_ref_operators<'hir>(mut expr: &'hir Expr<'hir>, typeck_results: &TypeckResults<'_>) -> &'hir Expr<'hir> {
loop {
match expr.kind {
ExprKind::AddrOf(_, _, e) => expr = e,
ExprKind::Unary(UnOp::Deref, e) if typeck_results.expr_ty(e).is_ref() => expr = e,
_ => break,
}
}
expr
}

View File

@ -0,0 +1,127 @@
use crate::utils::{
get_trait_def_id, if_sequence, implements_trait, parent_node_is_if_expr, paths, span_lint_and_help, SpanlessEq,
};
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// **What it does:** Checks comparison chains written with `if` that can be
/// rewritten with `match` and `cmp`.
///
/// **Why is this bad?** `if` is not guaranteed to be exhaustive and conditionals can get
/// repetitive
///
/// **Known problems:** The match statement may be slower due to the compiler
/// not inlining the call to cmp. See issue [#5354](https://github.com/rust-lang/rust-clippy/issues/5354)
///
/// **Example:**
/// ```rust,ignore
/// # fn a() {}
/// # fn b() {}
/// # fn c() {}
/// fn f(x: u8, y: u8) {
/// if x > y {
/// a()
/// } else if x < y {
/// b()
/// } else {
/// c()
/// }
/// }
/// ```
///
/// Could be written:
///
/// ```rust,ignore
/// use std::cmp::Ordering;
/// # fn a() {}
/// # fn b() {}
/// # fn c() {}
/// fn f(x: u8, y: u8) {
/// match x.cmp(&y) {
/// Ordering::Greater => a(),
/// Ordering::Less => b(),
/// Ordering::Equal => c()
/// }
/// }
/// ```
pub COMPARISON_CHAIN,
style,
"`if`s that can be rewritten with `match` and `cmp`"
}
declare_lint_pass!(ComparisonChain => [COMPARISON_CHAIN]);
impl<'tcx> LateLintPass<'tcx> for ComparisonChain {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if expr.span.from_expansion() {
return;
}
// We only care about the top-most `if` in the chain
if parent_node_is_if_expr(expr, cx) {
return;
}
// Check that there exists at least one explicit else condition
let (conds, _) = if_sequence(expr);
if conds.len() < 2 {
return;
}
for cond in conds.windows(2) {
if let (
&ExprKind::Binary(ref kind1, ref lhs1, ref rhs1),
&ExprKind::Binary(ref kind2, ref lhs2, ref rhs2),
) = (&cond[0].kind, &cond[1].kind)
{
if !kind_is_cmp(kind1.node) || !kind_is_cmp(kind2.node) {
return;
}
// Check that both sets of operands are equal
let mut spanless_eq = SpanlessEq::new(cx);
let same_fixed_operands = spanless_eq.eq_expr(lhs1, lhs2) && spanless_eq.eq_expr(rhs1, rhs2);
let same_transposed_operands = spanless_eq.eq_expr(lhs1, rhs2) && spanless_eq.eq_expr(rhs1, lhs2);
if !same_fixed_operands && !same_transposed_operands {
return;
}
// Check that if the operation is the same, either it's not `==` or the operands are transposed
if kind1.node == kind2.node {
if kind1.node == BinOpKind::Eq {
return;
}
if !same_transposed_operands {
return;
}
}
// Check that the type being compared implements `core::cmp::Ord`
let ty = cx.typeck_results().expr_ty(lhs1);
let is_ord = get_trait_def_id(cx, &paths::ORD).map_or(false, |id| implements_trait(cx, ty, id, &[]));
if !is_ord {
return;
}
} else {
// We only care about comparison chains
return;
}
}
span_lint_and_help(
cx,
COMPARISON_CHAIN,
expr.span,
"`if` chain can be rewritten with `match`",
None,
"consider rewriting the `if` chain to use `cmp` and `match`",
)
}
}
fn kind_is_cmp(kind: BinOpKind) -> bool {
matches!(kind, BinOpKind::Lt | BinOpKind::Gt | BinOpKind::Eq)
}

View File

@ -0,0 +1 @@
pub use clippy_utils::consts::*;

View File

@ -0,0 +1,211 @@
use crate::utils::{eq_expr_value, in_macro, search_same, SpanlessEq, SpanlessHash};
use crate::utils::{get_parent_expr, if_sequence, span_lint_and_note};
use rustc_hir::{Block, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// **What it does:** Checks for consecutive `if`s with the same condition.
///
/// **Why is this bad?** This is probably a copy & paste error.
///
/// **Known problems:** Hopefully none.
///
/// **Example:**
/// ```ignore
/// if a == b {
/// …
/// } else if a == b {
/// …
/// }
/// ```
///
/// Note that this lint ignores all conditions with a function call as it could
/// have side effects:
///
/// ```ignore
/// if foo() {
/// …
/// } else if foo() { // not linted
/// …
/// }
/// ```
pub IFS_SAME_COND,
correctness,
"consecutive `if`s with the same condition"
}
declare_clippy_lint! {
/// **What it does:** Checks for consecutive `if`s with the same function call.
///
/// **Why is this bad?** This is probably a copy & paste error.
/// Despite the fact that function can have side effects and `if` works as
/// intended, such an approach is implicit and can be considered a "code smell".
///
/// **Known problems:** Hopefully none.
///
/// **Example:**
/// ```ignore
/// if foo() == bar {
/// …
/// } else if foo() == bar {
/// …
/// }
/// ```
///
/// This probably should be:
/// ```ignore
/// if foo() == bar {
/// …
/// } else if foo() == baz {
/// …
/// }
/// ```
///
/// or if the original code was not a typo and called function mutates a state,
/// consider move the mutation out of the `if` condition to avoid similarity to
/// a copy & paste error:
///
/// ```ignore
/// let first = foo();
/// if first == bar {
/// …
/// } else {
/// let second = foo();
/// if second == bar {
/// …
/// }
/// }
/// ```
pub SAME_FUNCTIONS_IN_IF_CONDITION,
pedantic,
"consecutive `if`s with the same function call"
}
declare_clippy_lint! {
/// **What it does:** Checks for `if/else` with the same body as the *then* part
/// and the *else* part.
///
/// **Why is this bad?** This is probably a copy & paste error.
///
/// **Known problems:** Hopefully none.
///
/// **Example:**
/// ```ignore
/// let foo = if … {
/// 42
/// } else {
/// 42
/// };
/// ```
pub IF_SAME_THEN_ELSE,
correctness,
"`if` with the same `then` and `else` blocks"
}
declare_lint_pass!(CopyAndPaste => [IFS_SAME_COND, SAME_FUNCTIONS_IN_IF_CONDITION, IF_SAME_THEN_ELSE]);
impl<'tcx> LateLintPass<'tcx> for CopyAndPaste {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if !expr.span.from_expansion() {
// skip ifs directly in else, it will be checked in the parent if
if let Some(&Expr {
kind: ExprKind::If(_, _, Some(ref else_expr)),
..
}) = get_parent_expr(cx, expr)
{
if else_expr.hir_id == expr.hir_id {
return;
}
}
let (conds, blocks) = if_sequence(expr);
lint_same_then_else(cx, &blocks);
lint_same_cond(cx, &conds);
lint_same_fns_in_if_cond(cx, &conds);
}
}
}
/// Implementation of `IF_SAME_THEN_ELSE`.
fn lint_same_then_else(cx: &LateContext<'_>, blocks: &[&Block<'_>]) {
let eq: &dyn Fn(&&Block<'_>, &&Block<'_>) -> bool =
&|&lhs, &rhs| -> bool { SpanlessEq::new(cx).eq_block(lhs, rhs) };
if let Some((i, j)) = search_same_sequenced(blocks, eq) {
span_lint_and_note(
cx,
IF_SAME_THEN_ELSE,
j.span,
"this `if` has identical blocks",
Some(i.span),
"same as this",
);
}
}
/// Implementation of `IFS_SAME_COND`.
fn lint_same_cond(cx: &LateContext<'_>, conds: &[&Expr<'_>]) {
let hash: &dyn Fn(&&Expr<'_>) -> u64 = &|expr| -> u64 {
let mut h = SpanlessHash::new(cx);
h.hash_expr(expr);
h.finish()
};
let eq: &dyn Fn(&&Expr<'_>, &&Expr<'_>) -> bool = &|&lhs, &rhs| -> bool { eq_expr_value(cx, lhs, rhs) };
for (i, j) in search_same(conds, hash, eq) {
span_lint_and_note(
cx,
IFS_SAME_COND,
j.span,
"this `if` has the same condition as a previous `if`",
Some(i.span),
"same as this",
);
}
}
/// Implementation of `SAME_FUNCTIONS_IN_IF_CONDITION`.
fn lint_same_fns_in_if_cond(cx: &LateContext<'_>, conds: &[&Expr<'_>]) {
let hash: &dyn Fn(&&Expr<'_>) -> u64 = &|expr| -> u64 {
let mut h = SpanlessHash::new(cx);
h.hash_expr(expr);
h.finish()
};
let eq: &dyn Fn(&&Expr<'_>, &&Expr<'_>) -> bool = &|&lhs, &rhs| -> bool {
// Do not lint if any expr originates from a macro
if in_macro(lhs.span) || in_macro(rhs.span) {
return false;
}
// Do not spawn warning if `IFS_SAME_COND` already produced it.
if eq_expr_value(cx, lhs, rhs) {
return false;
}
SpanlessEq::new(cx).eq_expr(lhs, rhs)
};
for (i, j) in search_same(conds, hash, eq) {
span_lint_and_note(
cx,
SAME_FUNCTIONS_IN_IF_CONDITION,
j.span,
"this `if` has the same function call as a previous `if`",
Some(i.span),
"same as this",
);
}
}
fn search_same_sequenced<T, Eq>(exprs: &[T], eq: Eq) -> Option<(&T, &T)>
where
Eq: Fn(&T, &T) -> bool,
{
for win in exprs.windows(2) {
if eq(&win[0], &win[1]) {
return Some((&win[0], &win[1]));
}
}
None
}

View File

@ -0,0 +1,55 @@
use crate::utils::{is_copy, match_path, paths, span_lint_and_note};
use rustc_hir::{Impl, Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// **What it does:** Checks for types that implement `Copy` as well as
/// `Iterator`.
///
/// **Why is this bad?** Implicit copies can be confusing when working with
/// iterator combinators.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust,ignore
/// #[derive(Copy, Clone)]
/// struct Countdown(u8);
///
/// impl Iterator for Countdown {
/// // ...
/// }
///
/// let a: Vec<_> = my_iterator.take(1).collect();
/// let b: Vec<_> = my_iterator.collect();
/// ```
pub COPY_ITERATOR,
pedantic,
"implementing `Iterator` on a `Copy` type"
}
declare_lint_pass!(CopyIterator => [COPY_ITERATOR]);
impl<'tcx> LateLintPass<'tcx> for CopyIterator {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
if let ItemKind::Impl(Impl {
of_trait: Some(ref trait_ref),
..
}) = item.kind
{
let ty = cx.tcx.type_of(item.def_id);
if is_copy(cx, ty) && match_path(&trait_ref.path, &paths::ITERATOR) {
span_lint_and_note(
cx,
COPY_ITERATOR,
item.span,
"you are implementing `Iterator` on a `Copy` type",
None,
"consider implementing `IntoIterator` instead",
);
}
}
}
}

View File

@ -0,0 +1,51 @@
use crate::utils::{match_def_path, paths, snippet, span_lint_and_sugg};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// **What it does:** Checks usage of `std::fs::create_dir` and suggest using `std::fs::create_dir_all` instead.
///
/// **Why is this bad?** Sometimes `std::fs::create_dir` is mistakenly chosen over `std::fs::create_dir_all`.
///
/// **Known problems:** None.
///
/// **Example:**
///
/// ```rust
/// std::fs::create_dir("foo");
/// ```
/// Use instead:
/// ```rust
/// std::fs::create_dir_all("foo");
/// ```
pub CREATE_DIR,
restriction,
"calling `std::fs::create_dir` instead of `std::fs::create_dir_all`"
}
declare_lint_pass!(CreateDir => [CREATE_DIR]);
impl LateLintPass<'_> for CreateDir {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
if_chain! {
if let ExprKind::Call(ref func, ref args) = expr.kind;
if let ExprKind::Path(ref path) = func.kind;
if let Some(def_id) = cx.qpath_res(path, func.hir_id).opt_def_id();
if match_def_path(cx, def_id, &paths::STD_FS_CREATE_DIR);
then {
span_lint_and_sugg(
cx,
CREATE_DIR,
expr.span,
"calling `std::fs::create_dir` where there may be a better way",
"consider calling `std::fs::create_dir_all` instead",
format!("create_dir_all({})", snippet(cx, args[0].span, "..")),
Applicability::MaybeIncorrect,
)
}
}
}
}

View File

@ -0,0 +1,65 @@
use crate::utils::{snippet_opt, span_lint_and_help, span_lint_and_sugg};
use rustc_ast::ast;
use rustc_ast::tokenstream::TokenStream;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
declare_clippy_lint! {
/// **What it does:** Checks for usage of dbg!() macro.
///
/// **Why is this bad?** `dbg!` macro is intended as a debugging tool. It
/// should not be in version control.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust,ignore
/// // Bad
/// dbg!(true)
///
/// // Good
/// true
/// ```
pub DBG_MACRO,
restriction,
"`dbg!` macro is intended as a debugging tool"
}
declare_lint_pass!(DbgMacro => [DBG_MACRO]);
impl EarlyLintPass for DbgMacro {
fn check_mac(&mut self, cx: &EarlyContext<'_>, mac: &ast::MacCall) {
if mac.path == sym!(dbg) {
if let Some(sugg) = tts_span(mac.args.inner_tokens()).and_then(|span| snippet_opt(cx, span)) {
span_lint_and_sugg(
cx,
DBG_MACRO,
mac.span(),
"`dbg!` macro is intended as a debugging tool",
"ensure to avoid having uses of it in version control",
sugg,
Applicability::MaybeIncorrect,
);
} else {
span_lint_and_help(
cx,
DBG_MACRO,
mac.span(),
"`dbg!` macro is intended as a debugging tool",
None,
"ensure to avoid having uses of it in version control",
);
}
}
}
}
// Get span enclosing entire the token stream.
fn tts_span(tts: TokenStream) -> Option<Span> {
let mut cursor = tts.into_trees();
let first = cursor.next()?.span();
let span = cursor.last().map_or(first, |tree| first.to(tree.span()));
Some(span)
}

View File

@ -0,0 +1,262 @@
use crate::utils::{
any_parent_is_automatically_derived, contains_name, match_def_path, paths, snippet_with_macro_callsite,
};
use crate::utils::{span_lint_and_note, span_lint_and_sugg};
use if_chain::if_chain;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
use rustc_hir::{Block, Expr, ExprKind, PatKind, QPath, Stmt, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::Span;
declare_clippy_lint! {
/// **What it does:** Checks for literal calls to `Default::default()`.
///
/// **Why is this bad?** It's more clear to the reader to use the name of the type whose default is
/// being gotten than the generic `Default`.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// // Bad
/// let s: String = Default::default();
///
/// // Good
/// let s = String::default();
/// ```
pub DEFAULT_TRAIT_ACCESS,
pedantic,
"checks for literal calls to `Default::default()`"
}
declare_clippy_lint! {
/// **What it does:** Checks for immediate reassignment of fields initialized
/// with Default::default().
///
/// **Why is this bad?**It's more idiomatic to use the [functional update syntax](https://doc.rust-lang.org/reference/expressions/struct-expr.html#functional-update-syntax).
///
/// **Known problems:** Assignments to patterns that are of tuple type are not linted.
///
/// **Example:**
/// Bad:
/// ```
/// # #[derive(Default)]
/// # struct A { i: i32 }
/// let mut a: A = Default::default();
/// a.i = 42;
/// ```
/// Use instead:
/// ```
/// # #[derive(Default)]
/// # struct A { i: i32 }
/// let a = A {
/// i: 42,
/// .. Default::default()
/// };
/// ```
pub FIELD_REASSIGN_WITH_DEFAULT,
style,
"binding initialized with Default should have its fields set in the initializer"
}
#[derive(Default)]
pub struct Default {
// Spans linted by `field_reassign_with_default`.
reassigned_linted: FxHashSet<Span>,
}
impl_lint_pass!(Default => [DEFAULT_TRAIT_ACCESS, FIELD_REASSIGN_WITH_DEFAULT]);
impl LateLintPass<'_> for Default {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if_chain! {
// Avoid cases already linted by `field_reassign_with_default`
if !self.reassigned_linted.contains(&expr.span);
if let ExprKind::Call(ref path, ..) = expr.kind;
if !any_parent_is_automatically_derived(cx.tcx, expr.hir_id);
if let ExprKind::Path(ref qpath) = path.kind;
if let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id();
if match_def_path(cx, def_id, &paths::DEFAULT_TRAIT_METHOD);
// Detect and ignore <Foo as Default>::default() because these calls do explicitly name the type.
if let QPath::Resolved(None, _path) = qpath;
then {
let expr_ty = cx.typeck_results().expr_ty(expr);
if let ty::Adt(def, ..) = expr_ty.kind() {
// TODO: Work out a way to put "whatever the imported way of referencing
// this type in this file" rather than a fully-qualified type.
let replacement = format!("{}::default()", cx.tcx.def_path_str(def.did));
span_lint_and_sugg(
cx,
DEFAULT_TRAIT_ACCESS,
expr.span,
&format!("calling `{}` is more clear than this expression", replacement),
"try",
replacement,
Applicability::Unspecified, // First resolve the TODO above
);
}
}
}
}
fn check_block<'tcx>(&mut self, cx: &LateContext<'tcx>, block: &Block<'tcx>) {
// start from the `let mut _ = _::default();` and look at all the following
// statements, see if they re-assign the fields of the binding
let stmts_head = match block.stmts {
// Skip the last statement since there cannot possibly be any following statements that re-assign fields.
[head @ .., _] if !head.is_empty() => head,
_ => return,
};
for (stmt_idx, stmt) in stmts_head.iter().enumerate() {
// find all binding statements like `let mut _ = T::default()` where `T::default()` is the
// `default` method of the `Default` trait, and store statement index in current block being
// checked and the name of the bound variable
let (local, variant, binding_name, binding_type, span) = if_chain! {
// only take `let ...` statements
if let StmtKind::Local(local) = stmt.kind;
if let Some(expr) = local.init;
if !any_parent_is_automatically_derived(cx.tcx, expr.hir_id);
if !in_external_macro(cx.tcx.sess, expr.span);
// only take bindings to identifiers
if let PatKind::Binding(_, binding_id, ident, _) = local.pat.kind;
// only when assigning `... = Default::default()`
if is_expr_default(expr, cx);
let binding_type = cx.typeck_results().node_type(binding_id);
if let Some(adt) = binding_type.ty_adt_def();
if adt.is_struct();
let variant = adt.non_enum_variant();
if adt.did.is_local() || !variant.is_field_list_non_exhaustive();
let module_did = cx.tcx.parent_module(stmt.hir_id).to_def_id();
if variant
.fields
.iter()
.all(|field| field.vis.is_accessible_from(module_did, cx.tcx));
then {
(local, variant, ident.name, binding_type, expr.span)
} else {
continue;
}
};
// find all "later statement"'s where the fields of the binding set as
// Default::default() get reassigned, unless the reassignment refers to the original binding
let mut first_assign = None;
let mut assigned_fields = Vec::new();
let mut cancel_lint = false;
for consecutive_statement in &block.stmts[stmt_idx + 1..] {
// find out if and which field was set by this `consecutive_statement`
if let Some((field_ident, assign_rhs)) = field_reassigned_by_stmt(consecutive_statement, binding_name) {
// interrupt and cancel lint if assign_rhs references the original binding
if contains_name(binding_name, assign_rhs) {
cancel_lint = true;
break;
}
// if the field was previously assigned, replace the assignment, otherwise insert the assignment
if let Some(prev) = assigned_fields
.iter_mut()
.find(|(field_name, _)| field_name == &field_ident.name)
{
*prev = (field_ident.name, assign_rhs);
} else {
assigned_fields.push((field_ident.name, assign_rhs));
}
// also set first instance of error for help message
if first_assign.is_none() {
first_assign = Some(consecutive_statement);
}
}
// interrupt if no field was assigned, since we only want to look at consecutive statements
else {
break;
}
}
// if there are incorrectly assigned fields, do a span_lint_and_note to suggest
// construction using `Ty { fields, ..Default::default() }`
if !assigned_fields.is_empty() && !cancel_lint {
// if all fields of the struct are not assigned, add `.. Default::default()` to the suggestion.
let ext_with_default = !variant
.fields
.iter()
.all(|field| assigned_fields.iter().any(|(a, _)| a == &field.ident.name));
let field_list = assigned_fields
.into_iter()
.map(|(field, rhs)| {
// extract and store the assigned value for help message
let value_snippet = snippet_with_macro_callsite(cx, rhs.span, "..");
format!("{}: {}", field, value_snippet)
})
.collect::<Vec<String>>()
.join(", ");
let sugg = if ext_with_default {
if field_list.is_empty() {
format!("{}::default()", binding_type)
} else {
format!("{} {{ {}, ..Default::default() }}", binding_type, field_list)
}
} else {
format!("{} {{ {} }}", binding_type, field_list)
};
// span lint once per statement that binds default
span_lint_and_note(
cx,
FIELD_REASSIGN_WITH_DEFAULT,
first_assign.unwrap().span,
"field assignment outside of initializer for an instance created with Default::default()",
Some(local.span),
&format!(
"consider initializing the variable with `{}` and removing relevant reassignments",
sugg
),
);
self.reassigned_linted.insert(span);
}
}
}
}
/// Checks if the given expression is the `default` method belonging to the `Default` trait.
fn is_expr_default<'tcx>(expr: &'tcx Expr<'tcx>, cx: &LateContext<'tcx>) -> bool {
if_chain! {
if let ExprKind::Call(ref fn_expr, _) = &expr.kind;
if let ExprKind::Path(qpath) = &fn_expr.kind;
if let Res::Def(_, def_id) = cx.qpath_res(qpath, fn_expr.hir_id);
then {
// right hand side of assignment is `Default::default`
match_def_path(cx, def_id, &paths::DEFAULT_TRAIT_METHOD)
} else {
false
}
}
}
/// Returns the reassigned field and the assigning expression (right-hand side of assign).
fn field_reassigned_by_stmt<'tcx>(this: &Stmt<'tcx>, binding_name: Symbol) -> Option<(Ident, &'tcx Expr<'tcx>)> {
if_chain! {
// only take assignments
if let StmtKind::Semi(ref later_expr) = this.kind;
if let ExprKind::Assign(ref assign_lhs, ref assign_rhs, _) = later_expr.kind;
// only take assignments to fields where the left-hand side field is a field of
// the same binding as the previous statement
if let ExprKind::Field(ref binding, field_ident) = assign_lhs.kind;
if let ExprKind::Path(QPath::Resolved(_, path)) = binding.kind;
if let Some(second_binding_name) = path.segments.last();
if second_binding_name.ident.name == binding_name;
then {
Some((field_ident, assign_rhs))
} else {
None
}
}
}

View File

@ -0,0 +1,236 @@
use rustc_ast::ast::{LitFloatType, LitIntType, LitKind};
use rustc_errors::Applicability;
use rustc_hir::{
intravisit::{walk_expr, walk_stmt, NestedVisitorMap, Visitor},
Body, Expr, ExprKind, HirId, Lit, Stmt, StmtKind,
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::{
hir::map::Map,
ty::{self, FloatTy, IntTy, PolyFnSig, Ty},
};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use if_chain::if_chain;
use crate::utils::{snippet, span_lint_and_sugg};
declare_clippy_lint! {
/// **What it does:** Checks for usage of unconstrained numeric literals which may cause default numeric fallback in type
/// inference.
///
/// Default numeric fallback means that if numeric types have not yet been bound to concrete
/// types at the end of type inference, then integer type is bound to `i32`, and similarly
/// floating type is bound to `f64`.
///
/// See [RFC0212](https://github.com/rust-lang/rfcs/blob/master/text/0212-restore-int-fallback.md) for more information about the fallback.
///
/// **Why is this bad?** For those who are very careful about types, default numeric fallback
/// can be a pitfall that cause unexpected runtime behavior.
///
/// **Known problems:** This lint can only be allowed at the function level or above.
///
/// **Example:**
/// ```rust
/// let i = 10;
/// let f = 1.23;
/// ```
///
/// Use instead:
/// ```rust
/// let i = 10i32;
/// let f = 1.23f64;
/// ```
pub DEFAULT_NUMERIC_FALLBACK,
restriction,
"usage of unconstrained numeric literals which may cause default numeric fallback."
}
declare_lint_pass!(DefaultNumericFallback => [DEFAULT_NUMERIC_FALLBACK]);
impl LateLintPass<'_> for DefaultNumericFallback {
fn check_body(&mut self, cx: &LateContext<'tcx>, body: &'tcx Body<'_>) {
let mut visitor = NumericFallbackVisitor::new(cx);
visitor.visit_body(body);
}
}
struct NumericFallbackVisitor<'a, 'tcx> {
/// Stack manages type bound of exprs. The top element holds current expr type.
ty_bounds: Vec<TyBound<'tcx>>,
cx: &'a LateContext<'tcx>,
}
impl<'a, 'tcx> NumericFallbackVisitor<'a, 'tcx> {
fn new(cx: &'a LateContext<'tcx>) -> Self {
Self {
ty_bounds: vec![TyBound::Nothing],
cx,
}
}
/// Check whether a passed literal has potential to cause fallback or not.
fn check_lit(&self, lit: &Lit, lit_ty: Ty<'tcx>) {
if_chain! {
if let Some(ty_bound) = self.ty_bounds.last();
if matches!(lit.node,
LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::Float(_, LitFloatType::Unsuffixed));
if !ty_bound.is_integral();
then {
let suffix = match lit_ty.kind() {
ty::Int(IntTy::I32) => "i32",
ty::Float(FloatTy::F64) => "f64",
// Default numeric fallback never results in other types.
_ => return,
};
let sugg = format!("{}_{}", snippet(self.cx, lit.span, ""), suffix);
span_lint_and_sugg(
self.cx,
DEFAULT_NUMERIC_FALLBACK,
lit.span,
"default numeric fallback might occur",
"consider adding suffix",
sugg,
Applicability::MaybeIncorrect,
);
}
}
}
}
impl<'a, 'tcx> Visitor<'tcx> for NumericFallbackVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
#[allow(clippy::too_many_lines)]
fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
match &expr.kind {
ExprKind::Call(func, args) => {
if let Some(fn_sig) = fn_sig_opt(self.cx, func.hir_id) {
for (expr, bound) in args.iter().zip(fn_sig.skip_binder().inputs().iter()) {
// Push found arg type, then visit arg.
self.ty_bounds.push(TyBound::Ty(bound));
self.visit_expr(expr);
self.ty_bounds.pop();
}
return;
}
},
ExprKind::MethodCall(_, _, args, _) => {
if let Some(def_id) = self.cx.typeck_results().type_dependent_def_id(expr.hir_id) {
let fn_sig = self.cx.tcx.fn_sig(def_id).skip_binder();
for (expr, bound) in args.iter().zip(fn_sig.inputs().iter()) {
self.ty_bounds.push(TyBound::Ty(bound));
self.visit_expr(expr);
self.ty_bounds.pop();
}
return;
}
},
ExprKind::Struct(_, fields, base) => {
if_chain! {
let ty = self.cx.typeck_results().expr_ty(expr);
if let Some(adt_def) = ty.ty_adt_def();
if adt_def.is_struct();
if let Some(variant) = adt_def.variants.iter().next();
then {
let fields_def = &variant.fields;
// Push field type then visit each field expr.
for field in fields.iter() {
let bound =
fields_def
.iter()
.find_map(|f_def| {
if f_def.ident == field.ident
{ Some(self.cx.tcx.type_of(f_def.did)) }
else { None }
});
self.ty_bounds.push(bound.into());
self.visit_expr(field.expr);
self.ty_bounds.pop();
}
// Visit base with no bound.
if let Some(base) = base {
self.ty_bounds.push(TyBound::Nothing);
self.visit_expr(base);
self.ty_bounds.pop();
}
return;
}
}
},
ExprKind::Lit(lit) => {
let ty = self.cx.typeck_results().expr_ty(expr);
self.check_lit(lit, ty);
return;
},
_ => {},
}
walk_expr(self, expr);
}
fn visit_stmt(&mut self, stmt: &'tcx Stmt<'_>) {
match stmt.kind {
StmtKind::Local(local) => {
if local.ty.is_some() {
self.ty_bounds.push(TyBound::Any)
} else {
self.ty_bounds.push(TyBound::Nothing)
}
},
_ => self.ty_bounds.push(TyBound::Nothing),
}
walk_stmt(self, stmt);
self.ty_bounds.pop();
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}
fn fn_sig_opt<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option<PolyFnSig<'tcx>> {
let node_ty = cx.typeck_results().node_type_opt(hir_id)?;
// We can't use `TyS::fn_sig` because it automatically performs substs, this may result in FNs.
match node_ty.kind() {
ty::FnDef(def_id, _) => Some(cx.tcx.fn_sig(*def_id)),
ty::FnPtr(fn_sig) => Some(*fn_sig),
_ => None,
}
}
#[derive(Debug, Clone, Copy)]
enum TyBound<'tcx> {
Any,
Ty(Ty<'tcx>),
Nothing,
}
impl<'tcx> TyBound<'tcx> {
fn is_integral(self) -> bool {
match self {
TyBound::Any => true,
TyBound::Ty(t) => t.is_integral(),
TyBound::Nothing => false,
}
}
}
impl<'tcx> From<Option<Ty<'tcx>>> for TyBound<'tcx> {
fn from(v: Option<Ty<'tcx>>) -> Self {
match v {
Some(t) => TyBound::Ty(t),
None => TyBound::Nothing,
}
}
}

View File

@ -0,0 +1,190 @@
macro_rules! declare_deprecated_lint {
(pub $name: ident, $_reason: expr) => {
declare_lint!(pub $name, Allow, "deprecated lint")
}
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This used to check for `assert!(a == b)` and recommend
/// replacement with `assert_eq!(a, b)`, but this is no longer needed after RFC 2011.
pub SHOULD_ASSERT_EQ,
"`assert!()` will be more flexible with RFC 2011"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This used to check for `Vec::extend`, which was slower than
/// `Vec::extend_from_slice`. Thanks to specialization, this is no longer true.
pub EXTEND_FROM_SLICE,
"`.extend_from_slice(_)` is a faster way to extend a Vec by a slice"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** `Range::step_by(0)` used to be linted since it's
/// an infinite iterator, which is better expressed by `iter::repeat`,
/// but the method has been removed for `Iterator::step_by` which panics
/// if given a zero
pub RANGE_STEP_BY_ZERO,
"`iterator.step_by(0)` panics nowadays"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This used to check for `Vec::as_slice`, which was unstable with good
/// stable alternatives. `Vec::as_slice` has now been stabilized.
pub UNSTABLE_AS_SLICE,
"`Vec::as_slice` has been stabilized in 1.7"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This used to check for `Vec::as_mut_slice`, which was unstable with good
/// stable alternatives. `Vec::as_mut_slice` has now been stabilized.
pub UNSTABLE_AS_MUT_SLICE,
"`Vec::as_mut_slice` has been stabilized in 1.7"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This lint should never have applied to non-pointer types, as transmuting
/// between non-pointer types of differing alignment is well-defined behavior (it's semantically
/// equivalent to a memcpy). This lint has thus been refactored into two separate lints:
/// cast_ptr_alignment and transmute_ptr_to_ptr.
pub MISALIGNED_TRANSMUTE,
"this lint has been split into cast_ptr_alignment and transmute_ptr_to_ptr"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This lint is too subjective, not having a good reason for being in clippy.
/// Additionally, compound assignment operators may be overloaded separately from their non-assigning
/// counterparts, so this lint may suggest a change in behavior or the code may not compile.
pub ASSIGN_OPS,
"using compound assignment operators (e.g., `+=`) is harmless"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** The original rule will only lint for `if let`. After
/// making it support to lint `match`, naming as `if let` is not suitable for it.
/// So, this lint is deprecated.
pub IF_LET_REDUNDANT_PATTERN_MATCHING,
"this lint has been changed to redundant_pattern_matching"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This lint used to suggest replacing `let mut vec =
/// Vec::with_capacity(n); vec.set_len(n);` with `let vec = vec![0; n];`. The
/// replacement has very different performance characteristics so the lint is
/// deprecated.
pub UNSAFE_VECTOR_INITIALIZATION,
"the replacement suggested by this lint had substantially different behavior"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This lint has been superseded by the warn-by-default
/// `invalid_value` rustc lint.
pub INVALID_REF,
"superseded by rustc lint `invalid_value`"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This lint has been superseded by #[must_use] in rustc.
pub UNUSED_COLLECT,
"`collect` has been marked as #[must_use] in rustc and that covers all cases of this lint"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This lint has been uplifted to rustc and is now called
/// `array_into_iter`.
pub INTO_ITER_ON_ARRAY,
"this lint has been uplifted to rustc and is now called `array_into_iter`"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This lint has been uplifted to rustc and is now called
/// `unused_labels`.
pub UNUSED_LABEL,
"this lint has been uplifted to rustc and is now called `unused_labels`"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** Associated-constants are now preferred.
pub REPLACE_CONSTS,
"associated-constants `MIN`/`MAX` of integers are preferred to `{min,max}_value()` and module constants"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** The regex! macro does not exist anymore.
pub REGEX_MACRO,
"the regex! macro has been removed from the regex crate in 2018"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This lint has been uplifted to rustc and is now called
/// `drop_bounds`.
pub DROP_BOUNDS,
"this lint has been uplifted to rustc and is now called `drop_bounds`"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This lint has been uplifted to rustc and is now called
/// `temporary_cstring_as_ptr`.
pub TEMPORARY_CSTRING_AS_PTR,
"this lint has been uplifted to rustc and is now called `temporary_cstring_as_ptr`"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This lint has been uplifted to rustc and is now called
/// `panic_fmt`.
pub PANIC_PARAMS,
"this lint has been uplifted to rustc and is now called `panic_fmt`"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This lint has been integrated into the `unknown_lints`
/// rustc lint.
pub UNKNOWN_CLIPPY_LINTS,
"this lint has been integrated into the `unknown_lints` rustc lint"
}
declare_deprecated_lint! {
/// **What it does:** Nothing. This lint has been deprecated.
///
/// **Deprecation reason:** This lint has been replaced by `manual_find_map`, a
/// more specific lint.
pub FIND_MAP,
"this lint has been replaced by `manual_find_map`, a more specific lint"
}

View File

@ -0,0 +1,109 @@
use crate::utils::{get_parent_expr, implements_trait, snippet, span_lint_and_sugg};
use if_chain::if_chain;
use rustc_ast::util::parser::{ExprPrecedence, PREC_POSTFIX, PREC_PREFIX};
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
declare_clippy_lint! {
/// **What it does:** Checks for explicit `deref()` or `deref_mut()` method calls.
///
/// **Why is this bad?** Dereferencing by `&*x` or `&mut *x` is clearer and more concise,
/// when not part of a method chain.
///
/// **Example:**
/// ```rust
/// use std::ops::Deref;
/// let a: &mut String = &mut String::from("foo");
/// let b: &str = a.deref();
/// ```
/// Could be written as:
/// ```rust
/// let a: &mut String = &mut String::from("foo");
/// let b = &*a;
/// ```
///
/// This lint excludes
/// ```rust,ignore
/// let _ = d.unwrap().deref();
/// ```
pub EXPLICIT_DEREF_METHODS,
pedantic,
"Explicit use of deref or deref_mut method while not in a method chain."
}
declare_lint_pass!(Dereferencing => [
EXPLICIT_DEREF_METHODS
]);
impl<'tcx> LateLintPass<'tcx> for Dereferencing {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if_chain! {
if !expr.span.from_expansion();
if let ExprKind::MethodCall(ref method_name, _, ref args, _) = &expr.kind;
if args.len() == 1;
then {
if let Some(parent_expr) = get_parent_expr(cx, expr) {
// Check if we have the whole call chain here
if let ExprKind::MethodCall(..) = parent_expr.kind {
return;
}
// Check for Expr that we don't want to be linted
let precedence = parent_expr.precedence();
match precedence {
// Lint a Call is ok though
ExprPrecedence::Call | ExprPrecedence::AddrOf => (),
_ => {
if precedence.order() >= PREC_PREFIX && precedence.order() <= PREC_POSTFIX {
return;
}
}
}
}
let name = method_name.ident.as_str();
lint_deref(cx, &*name, &args[0], args[0].span, expr.span);
}
}
}
}
fn lint_deref(cx: &LateContext<'_>, method_name: &str, call_expr: &Expr<'_>, var_span: Span, expr_span: Span) {
match method_name {
"deref" => {
let impls_deref_trait = cx.tcx.lang_items().deref_trait().map_or(false, |id| {
implements_trait(cx, cx.typeck_results().expr_ty(&call_expr), id, &[])
});
if impls_deref_trait {
span_lint_and_sugg(
cx,
EXPLICIT_DEREF_METHODS,
expr_span,
"explicit deref method call",
"try this",
format!("&*{}", &snippet(cx, var_span, "..")),
Applicability::MachineApplicable,
);
}
},
"deref_mut" => {
let impls_deref_mut_trait = cx.tcx.lang_items().deref_mut_trait().map_or(false, |id| {
implements_trait(cx, cx.typeck_results().expr_ty(&call_expr), id, &[])
});
if impls_deref_mut_trait {
span_lint_and_sugg(
cx,
EXPLICIT_DEREF_METHODS,
expr_span,
"explicit deref_mut method call",
"try this",
format!("&mut *{}", &snippet(cx, var_span, "..")),
Applicability::MachineApplicable,
);
}
},
_ => (),
}
}

View File

@ -0,0 +1,429 @@
use crate::utils::paths;
use crate::utils::{
get_trait_def_id, is_allowed, is_automatically_derived, is_copy, match_def_path, span_lint_and_help,
span_lint_and_note, span_lint_and_then,
};
use if_chain::if_chain;
use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::{walk_expr, walk_fn, walk_item, FnKind, NestedVisitorMap, Visitor};
use rustc_hir::{
BlockCheckMode, BodyId, Expr, ExprKind, FnDecl, HirId, Impl, Item, ItemKind, TraitRef, UnsafeSource, Unsafety,
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::map::Map;
use rustc_middle::ty::{self, Ty};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
declare_clippy_lint! {
/// **What it does:** Checks for deriving `Hash` but implementing `PartialEq`
/// explicitly or vice versa.
///
/// **Why is this bad?** The implementation of these traits must agree (for
/// example for use with `HashMap`) so its probably a bad idea to use a
/// default-generated `Hash` implementation with an explicitly defined
/// `PartialEq`. In particular, the following must hold for any type:
///
/// ```text
/// k1 == k2 ⇒ hash(k1) == hash(k2)
/// ```
///
/// **Known problems:** None.
///
/// **Example:**
/// ```ignore
/// #[derive(Hash)]
/// struct Foo;
///
/// impl PartialEq for Foo {
/// ...
/// }
/// ```
pub DERIVE_HASH_XOR_EQ,
correctness,
"deriving `Hash` but implementing `PartialEq` explicitly"
}
declare_clippy_lint! {
/// **What it does:** Checks for deriving `Ord` but implementing `PartialOrd`
/// explicitly or vice versa.
///
/// **Why is this bad?** The implementation of these traits must agree (for
/// example for use with `sort`) so its probably a bad idea to use a
/// default-generated `Ord` implementation with an explicitly defined
/// `PartialOrd`. In particular, the following must hold for any type
/// implementing `Ord`:
///
/// ```text
/// k1.cmp(&k2) == k1.partial_cmp(&k2).unwrap()
/// ```
///
/// **Known problems:** None.
///
/// **Example:**
///
/// ```rust,ignore
/// #[derive(Ord, PartialEq, Eq)]
/// struct Foo;
///
/// impl PartialOrd for Foo {
/// ...
/// }
/// ```
/// Use instead:
/// ```rust,ignore
/// #[derive(PartialEq, Eq)]
/// struct Foo;
///
/// impl PartialOrd for Foo {
/// fn partial_cmp(&self, other: &Foo) -> Option<Ordering> {
/// Some(self.cmp(other))
/// }
/// }
///
/// impl Ord for Foo {
/// ...
/// }
/// ```
/// or, if you don't need a custom ordering:
/// ```rust,ignore
/// #[derive(Ord, PartialOrd, PartialEq, Eq)]
/// struct Foo;
/// ```
pub DERIVE_ORD_XOR_PARTIAL_ORD,
correctness,
"deriving `Ord` but implementing `PartialOrd` explicitly"
}
declare_clippy_lint! {
/// **What it does:** Checks for explicit `Clone` implementations for `Copy`
/// types.
///
/// **Why is this bad?** To avoid surprising behaviour, these traits should
/// agree and the behaviour of `Copy` cannot be overridden. In almost all
/// situations a `Copy` type should have a `Clone` implementation that does
/// nothing more than copy the object, which is what `#[derive(Copy, Clone)]`
/// gets you.
///
/// **Known problems:** Bounds of generic types are sometimes wrong: https://github.com/rust-lang/rust/issues/26925
///
/// **Example:**
/// ```rust,ignore
/// #[derive(Copy)]
/// struct Foo;
///
/// impl Clone for Foo {
/// // ..
/// }
/// ```
pub EXPL_IMPL_CLONE_ON_COPY,
pedantic,
"implementing `Clone` explicitly on `Copy` types"
}
declare_clippy_lint! {
/// **What it does:** Checks for deriving `serde::Deserialize` on a type that
/// has methods using `unsafe`.
///
/// **Why is this bad?** Deriving `serde::Deserialize` will create a constructor
/// that may violate invariants hold by another constructor.
///
/// **Known problems:** None.
///
/// **Example:**
///
/// ```rust,ignore
/// use serde::Deserialize;
///
/// #[derive(Deserialize)]
/// pub struct Foo {
/// // ..
/// }
///
/// impl Foo {
/// pub fn new() -> Self {
/// // setup here ..
/// }
///
/// pub unsafe fn parts() -> (&str, &str) {
/// // assumes invariants hold
/// }
/// }
/// ```
pub UNSAFE_DERIVE_DESERIALIZE,
pedantic,
"deriving `serde::Deserialize` on a type that has methods using `unsafe`"
}
declare_lint_pass!(Derive => [
EXPL_IMPL_CLONE_ON_COPY,
DERIVE_HASH_XOR_EQ,
DERIVE_ORD_XOR_PARTIAL_ORD,
UNSAFE_DERIVE_DESERIALIZE
]);
impl<'tcx> LateLintPass<'tcx> for Derive {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
if let ItemKind::Impl(Impl {
of_trait: Some(ref trait_ref),
..
}) = item.kind
{
let ty = cx.tcx.type_of(item.def_id);
let attrs = cx.tcx.hir().attrs(item.hir_id());
let is_automatically_derived = is_automatically_derived(attrs);
check_hash_peq(cx, item.span, trait_ref, ty, is_automatically_derived);
check_ord_partial_ord(cx, item.span, trait_ref, ty, is_automatically_derived);
if is_automatically_derived {
check_unsafe_derive_deserialize(cx, item, trait_ref, ty);
} else {
check_copy_clone(cx, item, trait_ref, ty);
}
}
}
}
/// Implementation of the `DERIVE_HASH_XOR_EQ` lint.
fn check_hash_peq<'tcx>(
cx: &LateContext<'tcx>,
span: Span,
trait_ref: &TraitRef<'_>,
ty: Ty<'tcx>,
hash_is_automatically_derived: bool,
) {
if_chain! {
if let Some(peq_trait_def_id) = cx.tcx.lang_items().eq_trait();
if let Some(def_id) = trait_ref.trait_def_id();
if match_def_path(cx, def_id, &paths::HASH);
then {
// Look for the PartialEq implementations for `ty`
cx.tcx.for_each_relevant_impl(peq_trait_def_id, ty, |impl_id| {
let peq_is_automatically_derived = is_automatically_derived(&cx.tcx.get_attrs(impl_id));
if peq_is_automatically_derived == hash_is_automatically_derived {
return;
}
let trait_ref = cx.tcx.impl_trait_ref(impl_id).expect("must be a trait implementation");
// Only care about `impl PartialEq<Foo> for Foo`
// For `impl PartialEq<B> for A, input_types is [A, B]
if trait_ref.substs.type_at(1) == ty {
let mess = if peq_is_automatically_derived {
"you are implementing `Hash` explicitly but have derived `PartialEq`"
} else {
"you are deriving `Hash` but have implemented `PartialEq` explicitly"
};
span_lint_and_then(
cx,
DERIVE_HASH_XOR_EQ,
span,
mess,
|diag| {
if let Some(local_def_id) = impl_id.as_local() {
let hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_def_id);
diag.span_note(
cx.tcx.hir().span(hir_id),
"`PartialEq` implemented here"
);
}
}
);
}
});
}
}
}
/// Implementation of the `DERIVE_ORD_XOR_PARTIAL_ORD` lint.
fn check_ord_partial_ord<'tcx>(
cx: &LateContext<'tcx>,
span: Span,
trait_ref: &TraitRef<'_>,
ty: Ty<'tcx>,
ord_is_automatically_derived: bool,
) {
if_chain! {
if let Some(ord_trait_def_id) = get_trait_def_id(cx, &paths::ORD);
if let Some(partial_ord_trait_def_id) = cx.tcx.lang_items().partial_ord_trait();
if let Some(def_id) = &trait_ref.trait_def_id();
if *def_id == ord_trait_def_id;
then {
// Look for the PartialOrd implementations for `ty`
cx.tcx.for_each_relevant_impl(partial_ord_trait_def_id, ty, |impl_id| {
let partial_ord_is_automatically_derived = is_automatically_derived(&cx.tcx.get_attrs(impl_id));
if partial_ord_is_automatically_derived == ord_is_automatically_derived {
return;
}
let trait_ref = cx.tcx.impl_trait_ref(impl_id).expect("must be a trait implementation");
// Only care about `impl PartialOrd<Foo> for Foo`
// For `impl PartialOrd<B> for A, input_types is [A, B]
if trait_ref.substs.type_at(1) == ty {
let mess = if partial_ord_is_automatically_derived {
"you are implementing `Ord` explicitly but have derived `PartialOrd`"
} else {
"you are deriving `Ord` but have implemented `PartialOrd` explicitly"
};
span_lint_and_then(
cx,
DERIVE_ORD_XOR_PARTIAL_ORD,
span,
mess,
|diag| {
if let Some(local_def_id) = impl_id.as_local() {
let hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_def_id);
diag.span_note(
cx.tcx.hir().span(hir_id),
"`PartialOrd` implemented here"
);
}
}
);
}
});
}
}
}
/// Implementation of the `EXPL_IMPL_CLONE_ON_COPY` lint.
fn check_copy_clone<'tcx>(cx: &LateContext<'tcx>, item: &Item<'_>, trait_ref: &TraitRef<'_>, ty: Ty<'tcx>) {
if cx
.tcx
.lang_items()
.clone_trait()
.map_or(false, |id| Some(id) == trait_ref.trait_def_id())
{
if !is_copy(cx, ty) {
return;
}
match *ty.kind() {
ty::Adt(def, _) if def.is_union() => return,
// Some types are not Clone by default but could be cloned “by hand” if necessary
ty::Adt(def, substs) => {
for variant in &def.variants {
for field in &variant.fields {
if let ty::FnDef(..) = field.ty(cx.tcx, substs).kind() {
return;
}
}
for subst in substs {
if let ty::subst::GenericArgKind::Type(subst) = subst.unpack() {
if let ty::Param(_) = subst.kind() {
return;
}
}
}
}
},
_ => (),
}
span_lint_and_note(
cx,
EXPL_IMPL_CLONE_ON_COPY,
item.span,
"you are implementing `Clone` explicitly on a `Copy` type",
Some(item.span),
"consider deriving `Clone` or removing `Copy`",
);
}
}
/// Implementation of the `UNSAFE_DERIVE_DESERIALIZE` lint.
fn check_unsafe_derive_deserialize<'tcx>(
cx: &LateContext<'tcx>,
item: &Item<'_>,
trait_ref: &TraitRef<'_>,
ty: Ty<'tcx>,
) {
fn item_from_def_id<'tcx>(cx: &LateContext<'tcx>, def_id: DefId) -> &'tcx Item<'tcx> {
let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
cx.tcx.hir().expect_item(hir_id)
}
fn has_unsafe<'tcx>(cx: &LateContext<'tcx>, item: &'tcx Item<'_>) -> bool {
let mut visitor = UnsafeVisitor { cx, has_unsafe: false };
walk_item(&mut visitor, item);
visitor.has_unsafe
}
if_chain! {
if let Some(trait_def_id) = trait_ref.trait_def_id();
if match_def_path(cx, trait_def_id, &paths::SERDE_DESERIALIZE);
if let ty::Adt(def, _) = ty.kind();
if let Some(local_def_id) = def.did.as_local();
let adt_hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_def_id);
if !is_allowed(cx, UNSAFE_DERIVE_DESERIALIZE, adt_hir_id);
if cx.tcx.inherent_impls(def.did)
.iter()
.map(|imp_did| item_from_def_id(cx, *imp_did))
.any(|imp| has_unsafe(cx, imp));
then {
span_lint_and_help(
cx,
UNSAFE_DERIVE_DESERIALIZE,
item.span,
"you are deriving `serde::Deserialize` on a type that has methods using `unsafe`",
None,
"consider implementing `serde::Deserialize` manually. See https://serde.rs/impl-deserialize.html"
);
}
}
}
struct UnsafeVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
has_unsafe: bool,
}
impl<'tcx> Visitor<'tcx> for UnsafeVisitor<'_, 'tcx> {
type Map = Map<'tcx>;
fn visit_fn(&mut self, kind: FnKind<'tcx>, decl: &'tcx FnDecl<'_>, body_id: BodyId, span: Span, id: HirId) {
if self.has_unsafe {
return;
}
if_chain! {
if let Some(header) = kind.header();
if let Unsafety::Unsafe = header.unsafety;
then {
self.has_unsafe = true;
}
}
walk_fn(self, kind, decl, body_id, span, id);
}
fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
if self.has_unsafe {
return;
}
if let ExprKind::Block(block, _) = expr.kind {
match block.rules {
BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided)
| BlockCheckMode::PushUnsafeBlock(UnsafeSource::UserProvided)
| BlockCheckMode::PopUnsafeBlock(UnsafeSource::UserProvided) => {
self.has_unsafe = true;
},
_ => {},
}
}
walk_expr(self, expr);
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::All(self.cx.tcx.hir())
}
}

View File

@ -0,0 +1,89 @@
use crate::utils::{fn_def_id, span_lint};
use rustc_data_structures::fx::FxHashSet;
use rustc_hir::Expr;
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::Symbol;
declare_clippy_lint! {
/// **What it does:** Denies the configured methods and functions in clippy.toml
///
/// **Why is this bad?** Some methods are undesirable in certain contexts,
/// and it's beneficial to lint for them as needed.
///
/// **Known problems:** Currently, you must write each function as a
/// fully-qualified path. This lint doesn't support aliases or reexported
/// names; be aware that many types in `std` are actually reexports.
///
/// For example, if you want to disallow `Duration::as_secs`, your clippy.toml
/// configuration would look like
/// `disallowed-methods = ["core::time::Duration::as_secs"]` and not
/// `disallowed-methods = ["std::time::Duration::as_secs"]` as you might expect.
///
/// **Example:**
///
/// An example clippy.toml configuration:
/// ```toml
/// # clippy.toml
/// disallowed-methods = ["alloc::vec::Vec::leak", "std::time::Instant::now"]
/// ```
///
/// ```rust,ignore
/// // Example code where clippy issues a warning
/// let xs = vec![1, 2, 3, 4];
/// xs.leak(); // Vec::leak is disallowed in the config.
///
/// let _now = Instant::now(); // Instant::now is disallowed in the config.
/// ```
///
/// Use instead:
/// ```rust,ignore
/// // Example code which does not raise clippy warning
/// let mut xs = Vec::new(); // Vec::new is _not_ disallowed in the config.
/// xs.push(123); // Vec::push is _not_ disallowed in the config.
/// ```
pub DISALLOWED_METHOD,
nursery,
"use of a disallowed method call"
}
#[derive(Clone, Debug)]
pub struct DisallowedMethod {
disallowed: FxHashSet<Vec<Symbol>>,
}
impl DisallowedMethod {
pub fn new(disallowed: &FxHashSet<String>) -> Self {
Self {
disallowed: disallowed
.iter()
.map(|s| s.split("::").map(|seg| Symbol::intern(seg)).collect::<Vec<_>>())
.collect(),
}
}
}
impl_lint_pass!(DisallowedMethod => [DISALLOWED_METHOD]);
impl<'tcx> LateLintPass<'tcx> for DisallowedMethod {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if let Some(def_id) = fn_def_id(cx, expr) {
let func_path = cx.get_def_path(def_id);
if self.disallowed.contains(&func_path) {
let func_path_string = func_path
.into_iter()
.map(Symbol::to_ident_string)
.collect::<Vec<_>>()
.join("::");
span_lint(
cx,
DISALLOWED_METHOD,
expr.span,
&format!("use of a disallowed method `{}`", func_path_string),
);
}
}
}
}

View File

@ -0,0 +1,741 @@
use crate::utils::{
implements_trait, is_entrypoint_fn, is_expn_of, is_type_diagnostic_item, match_panic_def_id, method_chain_args,
return_ty, span_lint, span_lint_and_note,
};
use if_chain::if_chain;
use itertools::Itertools;
use rustc_ast::ast::{Async, AttrKind, Attribute, FnKind, FnRetTy, ItemKind};
use rustc_ast::token::CommentKind;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sync::Lrc;
use rustc_errors::emitter::EmitterWriter;
use rustc_errors::Handler;
use rustc_hir as hir;
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_hir::{Expr, ExprKind, QPath};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::map::Map;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
use rustc_parse::maybe_new_parser_from_source_str;
use rustc_parse::parser::ForceCollect;
use rustc_session::parse::ParseSess;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::edition::Edition;
use rustc_span::source_map::{BytePos, FilePathMapping, MultiSpan, SourceMap, Span};
use rustc_span::{sym, FileName, Pos};
use std::io;
use std::ops::Range;
use url::Url;
declare_clippy_lint! {
/// **What it does:** Checks for the presence of `_`, `::` or camel-case words
/// outside ticks in documentation.
///
/// **Why is this bad?** *Rustdoc* supports markdown formatting, `_`, `::` and
/// camel-case probably indicates some code which should be included between
/// ticks. `_` can also be used for emphasis in markdown, this lint tries to
/// consider that.
///
/// **Known problems:** Lots of bad docs wont be fixed, what the lint checks
/// for is limited, and there are still false positives.
///
/// In addition, when writing documentation comments, including `[]` brackets
/// inside a link text would trip the parser. Therfore, documenting link with
/// `[`SmallVec<[T; INLINE_CAPACITY]>`]` and then [`SmallVec<[T; INLINE_CAPACITY]>`]: SmallVec
/// would fail.
///
/// **Examples:**
/// ```rust
/// /// Do something with the foo_bar parameter. See also
/// /// that::other::module::foo.
/// // ^ `foo_bar` and `that::other::module::foo` should be ticked.
/// fn doit(foo_bar: usize) {}
/// ```
///
/// ```rust
/// // Link text with `[]` brackets should be written as following:
/// /// Consume the array and return the inner
/// /// [`SmallVec<[T; INLINE_CAPACITY]>`][SmallVec].
/// /// [SmallVec]: SmallVec
/// fn main() {}
/// ```
pub DOC_MARKDOWN,
pedantic,
"presence of `_`, `::` or camel-case outside backticks in documentation"
}
declare_clippy_lint! {
/// **What it does:** Checks for the doc comments of publicly visible
/// unsafe functions and warns if there is no `# Safety` section.
///
/// **Why is this bad?** Unsafe functions should document their safety
/// preconditions, so that users can be sure they are using them safely.
///
/// **Known problems:** None.
///
/// **Examples:**
/// ```rust
///# type Universe = ();
/// /// This function should really be documented
/// pub unsafe fn start_apocalypse(u: &mut Universe) {
/// unimplemented!();
/// }
/// ```
///
/// At least write a line about safety:
///
/// ```rust
///# type Universe = ();
/// /// # Safety
/// ///
/// /// This function should not be called before the horsemen are ready.
/// pub unsafe fn start_apocalypse(u: &mut Universe) {
/// unimplemented!();
/// }
/// ```
pub MISSING_SAFETY_DOC,
style,
"`pub unsafe fn` without `# Safety` docs"
}
declare_clippy_lint! {
/// **What it does:** Checks the doc comments of publicly visible functions that
/// return a `Result` type and warns if there is no `# Errors` section.
///
/// **Why is this bad?** Documenting the type of errors that can be returned from a
/// function can help callers write code to handle the errors appropriately.
///
/// **Known problems:** None.
///
/// **Examples:**
///
/// Since the following function returns a `Result` it has an `# Errors` section in
/// its doc comment:
///
/// ```rust
///# use std::io;
/// /// # Errors
/// ///
/// /// Will return `Err` if `filename` does not exist or the user does not have
/// /// permission to read it.
/// pub fn read(filename: String) -> io::Result<String> {
/// unimplemented!();
/// }
/// ```
pub MISSING_ERRORS_DOC,
pedantic,
"`pub fn` returns `Result` without `# Errors` in doc comment"
}
declare_clippy_lint! {
/// **What it does:** Checks the doc comments of publicly visible functions that
/// may panic and warns if there is no `# Panics` section.
///
/// **Why is this bad?** Documenting the scenarios in which panicking occurs
/// can help callers who do not want to panic to avoid those situations.
///
/// **Known problems:** None.
///
/// **Examples:**
///
/// Since the following function may panic it has a `# Panics` section in
/// its doc comment:
///
/// ```rust
/// /// # Panics
/// ///
/// /// Will panic if y is 0
/// pub fn divide_by(x: i32, y: i32) -> i32 {
/// if y == 0 {
/// panic!("Cannot divide by 0")
/// } else {
/// x / y
/// }
/// }
/// ```
pub MISSING_PANICS_DOC,
pedantic,
"`pub fn` may panic without `# Panics` in doc comment"
}
declare_clippy_lint! {
/// **What it does:** Checks for `fn main() { .. }` in doctests
///
/// **Why is this bad?** The test can be shorter (and likely more readable)
/// if the `fn main()` is left implicit.
///
/// **Known problems:** None.
///
/// **Examples:**
/// ``````rust
/// /// An example of a doctest with a `main()` function
/// ///
/// /// # Examples
/// ///
/// /// ```
/// /// fn main() {
/// /// // this needs not be in an `fn`
/// /// }
/// /// ```
/// fn needless_main() {
/// unimplemented!();
/// }
/// ``````
pub NEEDLESS_DOCTEST_MAIN,
style,
"presence of `fn main() {` in code examples"
}
#[allow(clippy::module_name_repetitions)]
#[derive(Clone)]
pub struct DocMarkdown {
valid_idents: FxHashSet<String>,
in_trait_impl: bool,
}
impl DocMarkdown {
pub fn new(valid_idents: FxHashSet<String>) -> Self {
Self {
valid_idents,
in_trait_impl: false,
}
}
}
impl_lint_pass!(DocMarkdown =>
[DOC_MARKDOWN, MISSING_SAFETY_DOC, MISSING_ERRORS_DOC, MISSING_PANICS_DOC, NEEDLESS_DOCTEST_MAIN]
);
impl<'tcx> LateLintPass<'tcx> for DocMarkdown {
fn check_crate(&mut self, cx: &LateContext<'tcx>, _: &'tcx hir::Crate<'_>) {
let attrs = cx.tcx.hir().attrs(hir::CRATE_HIR_ID);
check_attrs(cx, &self.valid_idents, attrs);
}
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
let attrs = cx.tcx.hir().attrs(item.hir_id());
let headers = check_attrs(cx, &self.valid_idents, attrs);
match item.kind {
hir::ItemKind::Fn(ref sig, _, body_id) => {
if !(is_entrypoint_fn(cx, item.def_id.to_def_id()) || in_external_macro(cx.tcx.sess, item.span)) {
let body = cx.tcx.hir().body(body_id);
let mut fpu = FindPanicUnwrap {
cx,
typeck_results: cx.tcx.typeck(item.def_id),
panic_span: None,
};
fpu.visit_expr(&body.value);
lint_for_missing_headers(
cx,
item.hir_id(),
item.span,
sig,
headers,
Some(body_id),
fpu.panic_span,
);
}
},
hir::ItemKind::Impl(ref impl_) => {
self.in_trait_impl = impl_.of_trait.is_some();
},
_ => {},
}
}
fn check_item_post(&mut self, _cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) {
if let hir::ItemKind::Impl { .. } = item.kind {
self.in_trait_impl = false;
}
}
fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) {
let attrs = cx.tcx.hir().attrs(item.hir_id());
let headers = check_attrs(cx, &self.valid_idents, attrs);
if let hir::TraitItemKind::Fn(ref sig, ..) = item.kind {
if !in_external_macro(cx.tcx.sess, item.span) {
lint_for_missing_headers(cx, item.hir_id(), item.span, sig, headers, None, None);
}
}
}
fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'_>) {
let attrs = cx.tcx.hir().attrs(item.hir_id());
let headers = check_attrs(cx, &self.valid_idents, attrs);
if self.in_trait_impl || in_external_macro(cx.tcx.sess, item.span) {
return;
}
if let hir::ImplItemKind::Fn(ref sig, body_id) = item.kind {
let body = cx.tcx.hir().body(body_id);
let mut fpu = FindPanicUnwrap {
cx,
typeck_results: cx.tcx.typeck(item.def_id),
panic_span: None,
};
fpu.visit_expr(&body.value);
lint_for_missing_headers(
cx,
item.hir_id(),
item.span,
sig,
headers,
Some(body_id),
fpu.panic_span,
);
}
}
}
fn lint_for_missing_headers<'tcx>(
cx: &LateContext<'tcx>,
hir_id: hir::HirId,
span: impl Into<MultiSpan> + Copy,
sig: &hir::FnSig<'_>,
headers: DocHeaders,
body_id: Option<hir::BodyId>,
panic_span: Option<Span>,
) {
if !cx.access_levels.is_exported(hir_id) {
return; // Private functions do not require doc comments
}
if !headers.safety && sig.header.unsafety == hir::Unsafety::Unsafe {
span_lint(
cx,
MISSING_SAFETY_DOC,
span,
"unsafe function's docs miss `# Safety` section",
);
}
if !headers.panics && panic_span.is_some() {
span_lint_and_note(
cx,
MISSING_PANICS_DOC,
span,
"docs for function which may panic missing `# Panics` section",
panic_span,
"first possible panic found here",
);
}
if !headers.errors {
if is_type_diagnostic_item(cx, return_ty(cx, hir_id), sym::result_type) {
span_lint(
cx,
MISSING_ERRORS_DOC,
span,
"docs for function returning `Result` missing `# Errors` section",
);
} else {
if_chain! {
if let Some(body_id) = body_id;
if let Some(future) = cx.tcx.lang_items().future_trait();
let typeck = cx.tcx.typeck_body(body_id);
let body = cx.tcx.hir().body(body_id);
let ret_ty = typeck.expr_ty(&body.value);
if implements_trait(cx, ret_ty, future, &[]);
if let ty::Opaque(_, subs) = ret_ty.kind();
if let Some(gen) = subs.types().next();
if let ty::Generator(_, subs, _) = gen.kind();
if is_type_diagnostic_item(cx, subs.as_generator().return_ty(), sym::result_type);
then {
span_lint(
cx,
MISSING_ERRORS_DOC,
span,
"docs for function returning `Result` missing `# Errors` section",
);
}
}
}
}
}
/// Cleanup documentation decoration.
///
/// We can't use `rustc_ast::attr::AttributeMethods::with_desugared_doc` or
/// `rustc_ast::parse::lexer::comments::strip_doc_comment_decoration` because we
/// need to keep track of
/// the spans but this function is inspired from the later.
#[allow(clippy::cast_possible_truncation)]
#[must_use]
pub fn strip_doc_comment_decoration(doc: &str, comment_kind: CommentKind, span: Span) -> (String, Vec<(usize, Span)>) {
// one-line comments lose their prefix
if comment_kind == CommentKind::Line {
let mut doc = doc.to_owned();
doc.push('\n');
let len = doc.len();
// +3 skips the opening delimiter
return (doc, vec![(len, span.with_lo(span.lo() + BytePos(3)))]);
}
let mut sizes = vec![];
let mut contains_initial_stars = false;
for line in doc.lines() {
let offset = line.as_ptr() as usize - doc.as_ptr() as usize;
debug_assert_eq!(offset as u32 as usize, offset);
contains_initial_stars |= line.trim_start().starts_with('*');
// +1 adds the newline, +3 skips the opening delimiter
sizes.push((line.len() + 1, span.with_lo(span.lo() + BytePos(3 + offset as u32))));
}
if !contains_initial_stars {
return (doc.to_string(), sizes);
}
// remove the initial '*'s if any
let mut no_stars = String::with_capacity(doc.len());
for line in doc.lines() {
let mut chars = line.chars();
while let Some(c) = chars.next() {
if c.is_whitespace() {
no_stars.push(c);
} else {
no_stars.push(if c == '*' { ' ' } else { c });
break;
}
}
no_stars.push_str(chars.as_str());
no_stars.push('\n');
}
(no_stars, sizes)
}
#[derive(Copy, Clone)]
struct DocHeaders {
safety: bool,
errors: bool,
panics: bool,
}
fn check_attrs<'a>(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, attrs: &'a [Attribute]) -> DocHeaders {
let mut doc = String::new();
let mut spans = vec![];
for attr in attrs {
if let AttrKind::DocComment(comment_kind, comment) = attr.kind {
let (comment, current_spans) = strip_doc_comment_decoration(&comment.as_str(), comment_kind, attr.span);
spans.extend_from_slice(&current_spans);
doc.push_str(&comment);
} else if attr.has_name(sym::doc) {
// ignore mix of sugared and non-sugared doc
// don't trigger the safety or errors check
return DocHeaders {
safety: true,
errors: true,
panics: true,
};
}
}
let mut current = 0;
for &mut (ref mut offset, _) in &mut spans {
let offset_copy = *offset;
*offset = current;
current += offset_copy;
}
if doc.is_empty() {
return DocHeaders {
safety: false,
errors: false,
panics: false,
};
}
let parser = pulldown_cmark::Parser::new(&doc).into_offset_iter();
// Iterate over all `Events` and combine consecutive events into one
let events = parser.coalesce(|previous, current| {
use pulldown_cmark::Event::Text;
let previous_range = previous.1;
let current_range = current.1;
match (previous.0, current.0) {
(Text(previous), Text(current)) => {
let mut previous = previous.to_string();
previous.push_str(&current);
Ok((Text(previous.into()), previous_range))
},
(previous, current) => Err(((previous, previous_range), (current, current_range))),
}
});
check_doc(cx, valid_idents, events, &spans)
}
const RUST_CODE: &[&str] = &["rust", "no_run", "should_panic", "compile_fail"];
fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize>)>>(
cx: &LateContext<'_>,
valid_idents: &FxHashSet<String>,
events: Events,
spans: &[(usize, Span)],
) -> DocHeaders {
// true if a safety header was found
use pulldown_cmark::CodeBlockKind;
use pulldown_cmark::Event::{
Code, End, FootnoteReference, HardBreak, Html, Rule, SoftBreak, Start, TaskListMarker, Text,
};
use pulldown_cmark::Tag::{CodeBlock, Heading, Link};
let mut headers = DocHeaders {
safety: false,
errors: false,
panics: false,
};
let mut in_code = false;
let mut in_link = None;
let mut in_heading = false;
let mut is_rust = false;
let mut edition = None;
for (event, range) in events {
match event {
Start(CodeBlock(ref kind)) => {
in_code = true;
if let CodeBlockKind::Fenced(lang) = kind {
for item in lang.split(',') {
if item == "ignore" {
is_rust = false;
break;
}
if let Some(stripped) = item.strip_prefix("edition") {
is_rust = true;
edition = stripped.parse::<Edition>().ok();
} else if item.is_empty() || RUST_CODE.contains(&item) {
is_rust = true;
}
}
}
},
End(CodeBlock(_)) => {
in_code = false;
is_rust = false;
},
Start(Link(_, url, _)) => in_link = Some(url),
End(Link(..)) => in_link = None,
Start(Heading(_)) => in_heading = true,
End(Heading(_)) => in_heading = false,
Start(_tag) | End(_tag) => (), // We don't care about other tags
Html(_html) => (), // HTML is weird, just ignore it
SoftBreak | HardBreak | TaskListMarker(_) | Code(_) | Rule => (),
FootnoteReference(text) | Text(text) => {
if Some(&text) == in_link.as_ref() {
// Probably a link of the form `<http://example.com>`
// Which are represented as a link to "http://example.com" with
// text "http://example.com" by pulldown-cmark
continue;
}
headers.safety |= in_heading && text.trim() == "Safety";
headers.errors |= in_heading && text.trim() == "Errors";
headers.panics |= in_heading && text.trim() == "Panics";
let index = match spans.binary_search_by(|c| c.0.cmp(&range.start)) {
Ok(o) => o,
Err(e) => e - 1,
};
let (begin, span) = spans[index];
if in_code {
if is_rust {
let edition = edition.unwrap_or_else(|| cx.tcx.sess.edition());
check_code(cx, &text, edition, span);
}
} else {
// Adjust for the beginning of the current `Event`
let span = span.with_lo(span.lo() + BytePos::from_usize(range.start - begin));
check_text(cx, valid_idents, &text, span);
}
},
}
}
headers
}
fn check_code(cx: &LateContext<'_>, text: &str, edition: Edition, span: Span) {
fn has_needless_main(code: &str, edition: Edition) -> bool {
rustc_driver::catch_fatal_errors(|| {
rustc_span::with_session_globals(edition, || {
let filename = FileName::anon_source_code(code);
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let emitter = EmitterWriter::new(box io::sink(), None, false, false, false, None, false);
let handler = Handler::with_emitter(false, None, box emitter);
let sess = ParseSess::with_span_handler(handler, sm);
let mut parser = match maybe_new_parser_from_source_str(&sess, filename, code.into()) {
Ok(p) => p,
Err(errs) => {
for mut err in errs {
err.cancel();
}
return false;
},
};
let mut relevant_main_found = false;
loop {
match parser.parse_item(ForceCollect::No) {
Ok(Some(item)) => match &item.kind {
// Tests with one of these items are ignored
ItemKind::Static(..)
| ItemKind::Const(..)
| ItemKind::ExternCrate(..)
| ItemKind::ForeignMod(..) => return false,
// We found a main function ...
ItemKind::Fn(box FnKind(_, sig, _, Some(block))) if item.ident.name == sym::main => {
let is_async = matches!(sig.header.asyncness, Async::Yes { .. });
let returns_nothing = match &sig.decl.output {
FnRetTy::Default(..) => true,
FnRetTy::Ty(ty) if ty.kind.is_unit() => true,
_ => false,
};
if returns_nothing && !is_async && !block.stmts.is_empty() {
// This main function should be linted, but only if there are no other functions
relevant_main_found = true;
} else {
// This main function should not be linted, we're done
return false;
}
},
// Another function was found; this case is ignored too
ItemKind::Fn(..) => return false,
_ => {},
},
Ok(None) => break,
Err(mut e) => {
e.cancel();
return false;
},
}
}
relevant_main_found
})
})
.ok()
.unwrap_or_default()
}
if has_needless_main(text, edition) {
span_lint(cx, NEEDLESS_DOCTEST_MAIN, span, "needless `fn main` in doctest");
}
}
fn check_text(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, text: &str, span: Span) {
for word in text.split(|c: char| c.is_whitespace() || c == '\'') {
// Trim punctuation as in `some comment (see foo::bar).`
// ^^
// Or even as in `_foo bar_` which is emphasized.
let word = word.trim_matches(|c: char| !c.is_alphanumeric());
if valid_idents.contains(word) {
continue;
}
// Adjust for the current word
let offset = word.as_ptr() as usize - text.as_ptr() as usize;
let span = Span::new(
span.lo() + BytePos::from_usize(offset),
span.lo() + BytePos::from_usize(offset + word.len()),
span.ctxt(),
);
check_word(cx, word, span);
}
}
fn check_word(cx: &LateContext<'_>, word: &str, span: Span) {
/// Checks if a string is camel-case, i.e., contains at least two uppercase
/// letters (`Clippy` is ok) and one lower-case letter (`NASA` is ok).
/// Plurals are also excluded (`IDs` is ok).
fn is_camel_case(s: &str) -> bool {
if s.starts_with(|c: char| c.is_digit(10)) {
return false;
}
let s = s.strip_suffix('s').unwrap_or(s);
s.chars().all(char::is_alphanumeric)
&& s.chars().filter(|&c| c.is_uppercase()).take(2).count() > 1
&& s.chars().filter(|&c| c.is_lowercase()).take(1).count() > 0
}
fn has_underscore(s: &str) -> bool {
s != "_" && !s.contains("\\_") && s.contains('_')
}
fn has_hyphen(s: &str) -> bool {
s != "-" && s.contains('-')
}
if let Ok(url) = Url::parse(word) {
// try to get around the fact that `foo::bar` parses as a valid URL
if !url.cannot_be_a_base() {
span_lint(
cx,
DOC_MARKDOWN,
span,
"you should put bare URLs between `<`/`>` or make a proper Markdown link",
);
return;
}
}
// We assume that mixed-case words are not meant to be put inside bacticks. (Issue #2343)
if has_underscore(word) && has_hyphen(word) {
return;
}
if has_underscore(word) || word.contains("::") || is_camel_case(word) {
span_lint(
cx,
DOC_MARKDOWN,
span,
&format!("you should put `{}` between ticks in the documentation", word),
);
}
}
struct FindPanicUnwrap<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
panic_span: Option<Span>,
typeck_results: &'tcx ty::TypeckResults<'tcx>,
}
impl<'a, 'tcx> Visitor<'tcx> for FindPanicUnwrap<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
if self.panic_span.is_some() {
return;
}
// check for `begin_panic`
if_chain! {
if let ExprKind::Call(ref func_expr, _) = expr.kind;
if let ExprKind::Path(QPath::Resolved(_, ref path)) = func_expr.kind;
if let Some(path_def_id) = path.res.opt_def_id();
if match_panic_def_id(self.cx, path_def_id);
if is_expn_of(expr.span, "unreachable").is_none();
then {
self.panic_span = Some(expr.span);
}
}
// check for `unwrap`
if let Some(arglists) = method_chain_args(expr, &["unwrap"]) {
let reciever_ty = self.typeck_results.expr_ty(&arglists[0][0]).peel_refs();
if is_type_diagnostic_item(self.cx, reciever_ty, sym::option_type)
|| is_type_diagnostic_item(self.cx, reciever_ty, sym::result_type)
{
self.panic_span = Some(expr.span);
}
}
// and check sub-expressions
intravisit::walk_expr(self, expr);
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
}
}

View File

@ -0,0 +1,94 @@
//! Lint on unnecessary double comparisons. Some examples:
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
use crate::utils::{eq_expr_value, snippet_with_applicability, span_lint_and_sugg};
declare_clippy_lint! {
/// **What it does:** Checks for double comparisons that could be simplified to a single expression.
///
///
/// **Why is this bad?** Readability.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// # let x = 1;
/// # let y = 2;
/// if x == y || x < y {}
/// ```
///
/// Could be written as:
///
/// ```rust
/// # let x = 1;
/// # let y = 2;
/// if x <= y {}
/// ```
pub DOUBLE_COMPARISONS,
complexity,
"unnecessary double comparisons that can be simplified"
}
declare_lint_pass!(DoubleComparisons => [DOUBLE_COMPARISONS]);
impl<'tcx> DoubleComparisons {
#[allow(clippy::similar_names)]
fn check_binop(cx: &LateContext<'tcx>, op: BinOpKind, lhs: &'tcx Expr<'_>, rhs: &'tcx Expr<'_>, span: Span) {
let (lkind, llhs, lrhs, rkind, rlhs, rrhs) = match (&lhs.kind, &rhs.kind) {
(ExprKind::Binary(lb, llhs, lrhs), ExprKind::Binary(rb, rlhs, rrhs)) => {
(lb.node, llhs, lrhs, rb.node, rlhs, rrhs)
},
_ => return,
};
if !(eq_expr_value(cx, &llhs, &rlhs) && eq_expr_value(cx, &lrhs, &rrhs)) {
return;
}
macro_rules! lint_double_comparison {
($op:tt) => {{
let mut applicability = Applicability::MachineApplicable;
let lhs_str = snippet_with_applicability(cx, llhs.span, "", &mut applicability);
let rhs_str = snippet_with_applicability(cx, lrhs.span, "", &mut applicability);
let sugg = format!("{} {} {}", lhs_str, stringify!($op), rhs_str);
span_lint_and_sugg(
cx,
DOUBLE_COMPARISONS,
span,
"this binary expression can be simplified",
"try",
sugg,
applicability,
);
}};
}
#[rustfmt::skip]
match (op, lkind, rkind) {
(BinOpKind::Or, BinOpKind::Eq, BinOpKind::Lt) | (BinOpKind::Or, BinOpKind::Lt, BinOpKind::Eq) => {
lint_double_comparison!(<=)
},
(BinOpKind::Or, BinOpKind::Eq, BinOpKind::Gt) | (BinOpKind::Or, BinOpKind::Gt, BinOpKind::Eq) => {
lint_double_comparison!(>=)
},
(BinOpKind::Or, BinOpKind::Lt, BinOpKind::Gt) | (BinOpKind::Or, BinOpKind::Gt, BinOpKind::Lt) => {
lint_double_comparison!(!=)
},
(BinOpKind::And, BinOpKind::Le, BinOpKind::Ge) | (BinOpKind::And, BinOpKind::Ge, BinOpKind::Le) => {
lint_double_comparison!(==)
},
_ => (),
};
}
}
impl<'tcx> LateLintPass<'tcx> for DoubleComparisons {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if let ExprKind::Binary(ref kind, ref lhs, ref rhs) = expr.kind {
Self::check_binop(cx, kind.node, lhs, rhs, expr.span);
}
}
}

View File

@ -0,0 +1,76 @@
use crate::utils::span_lint;
use rustc_ast::ast::{Expr, ExprKind};
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// **What it does:** Checks for unnecessary double parentheses.
///
/// **Why is this bad?** This makes code harder to read and might indicate a
/// mistake.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// // Bad
/// fn simple_double_parens() -> i32 {
/// ((0))
/// }
///
/// // Good
/// fn simple_no_parens() -> i32 {
/// 0
/// }
///
/// // or
///
/// # fn foo(bar: usize) {}
/// // Bad
/// foo((0));
///
/// // Good
/// foo(0);
/// ```
pub DOUBLE_PARENS,
complexity,
"Warn on unnecessary double parentheses"
}
declare_lint_pass!(DoubleParens => [DOUBLE_PARENS]);
impl EarlyLintPass for DoubleParens {
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
if expr.span.from_expansion() {
return;
}
let msg: &str = "consider removing unnecessary double parentheses";
match expr.kind {
ExprKind::Paren(ref in_paren) => match in_paren.kind {
ExprKind::Paren(_) | ExprKind::Tup(_) => {
span_lint(cx, DOUBLE_PARENS, expr.span, &msg);
},
_ => {},
},
ExprKind::Call(_, ref params) => {
if params.len() == 1 {
let param = &params[0];
if let ExprKind::Paren(_) = param.kind {
span_lint(cx, DOUBLE_PARENS, param.span, &msg);
}
}
},
ExprKind::MethodCall(_, ref params, _) => {
if params.len() == 2 {
let param = &params[1];
if let ExprKind::Paren(_) = param.kind {
span_lint(cx, DOUBLE_PARENS, param.span, &msg);
}
}
},
_ => {},
}
}
}

View File

@ -0,0 +1,160 @@
use crate::utils::{is_copy, match_def_path, paths, span_lint_and_note};
use if_chain::if_chain;
use rustc_hir::{Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// **What it does:** Checks for calls to `std::mem::drop` with a reference
/// instead of an owned value.
///
/// **Why is this bad?** Calling `drop` on a reference will only drop the
/// reference itself, which is a no-op. It will not call the `drop` method (from
/// the `Drop` trait implementation) on the underlying referenced value, which
/// is likely what was intended.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```ignore
/// let mut lock_guard = mutex.lock();
/// std::mem::drop(&lock_guard) // Should have been drop(lock_guard), mutex
/// // still locked
/// operation_that_requires_mutex_to_be_unlocked();
/// ```
pub DROP_REF,
correctness,
"calls to `std::mem::drop` with a reference instead of an owned value"
}
declare_clippy_lint! {
/// **What it does:** Checks for calls to `std::mem::forget` with a reference
/// instead of an owned value.
///
/// **Why is this bad?** Calling `forget` on a reference will only forget the
/// reference itself, which is a no-op. It will not forget the underlying
/// referenced
/// value, which is likely what was intended.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// let x = Box::new(1);
/// std::mem::forget(&x) // Should have been forget(x), x will still be dropped
/// ```
pub FORGET_REF,
correctness,
"calls to `std::mem::forget` with a reference instead of an owned value"
}
declare_clippy_lint! {
/// **What it does:** Checks for calls to `std::mem::drop` with a value
/// that derives the Copy trait
///
/// **Why is this bad?** Calling `std::mem::drop` [does nothing for types that
/// implement Copy](https://doc.rust-lang.org/std/mem/fn.drop.html), since the
/// value will be copied and moved into the function on invocation.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// let x: i32 = 42; // i32 implements Copy
/// std::mem::drop(x) // A copy of x is passed to the function, leaving the
/// // original unaffected
/// ```
pub DROP_COPY,
correctness,
"calls to `std::mem::drop` with a value that implements Copy"
}
declare_clippy_lint! {
/// **What it does:** Checks for calls to `std::mem::forget` with a value that
/// derives the Copy trait
///
/// **Why is this bad?** Calling `std::mem::forget` [does nothing for types that
/// implement Copy](https://doc.rust-lang.org/std/mem/fn.drop.html) since the
/// value will be copied and moved into the function on invocation.
///
/// An alternative, but also valid, explanation is that Copy types do not
/// implement
/// the Drop trait, which means they have no destructors. Without a destructor,
/// there
/// is nothing for `std::mem::forget` to ignore.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// let x: i32 = 42; // i32 implements Copy
/// std::mem::forget(x) // A copy of x is passed to the function, leaving the
/// // original unaffected
/// ```
pub FORGET_COPY,
correctness,
"calls to `std::mem::forget` with a value that implements Copy"
}
const DROP_REF_SUMMARY: &str = "calls to `std::mem::drop` with a reference instead of an owned value. \
Dropping a reference does nothing";
const FORGET_REF_SUMMARY: &str = "calls to `std::mem::forget` with a reference instead of an owned value. \
Forgetting a reference does nothing";
const DROP_COPY_SUMMARY: &str = "calls to `std::mem::drop` with a value that implements `Copy`. \
Dropping a copy leaves the original intact";
const FORGET_COPY_SUMMARY: &str = "calls to `std::mem::forget` with a value that implements `Copy`. \
Forgetting a copy leaves the original intact";
declare_lint_pass!(DropForgetRef => [DROP_REF, FORGET_REF, DROP_COPY, FORGET_COPY]);
impl<'tcx> LateLintPass<'tcx> for DropForgetRef {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if_chain! {
if let ExprKind::Call(ref path, ref args) = expr.kind;
if let ExprKind::Path(ref qpath) = path.kind;
if args.len() == 1;
if let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id();
then {
let lint;
let msg;
let arg = &args[0];
let arg_ty = cx.typeck_results().expr_ty(arg);
if let ty::Ref(..) = arg_ty.kind() {
if match_def_path(cx, def_id, &paths::DROP) {
lint = DROP_REF;
msg = DROP_REF_SUMMARY.to_string();
} else if match_def_path(cx, def_id, &paths::MEM_FORGET) {
lint = FORGET_REF;
msg = FORGET_REF_SUMMARY.to_string();
} else {
return;
}
span_lint_and_note(cx,
lint,
expr.span,
&msg,
Some(arg.span),
&format!("argument has type `{}`", arg_ty));
} else if is_copy(cx, arg_ty) {
if match_def_path(cx, def_id, &paths::DROP) {
lint = DROP_COPY;
msg = DROP_COPY_SUMMARY.to_string();
} else if match_def_path(cx, def_id, &paths::MEM_FORGET) {
lint = FORGET_COPY;
msg = FORGET_COPY_SUMMARY.to_string();
} else {
return;
}
span_lint_and_note(cx,
lint,
expr.span,
&msg,
Some(arg.span),
&format!("argument has type {}", arg_ty));
}
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More