mirror of
https://git.proxmox.com/git/rustc
synced 2026-01-24 12:37:31 +00:00
New upstream version 1.64.0+dfsg1
This commit is contained in:
parent
923072b801
commit
064997fbe6
11
.cargo/config.toml
Normal file
11
.cargo/config.toml
Normal file
@ -0,0 +1,11 @@
|
||||
|
||||
[source.crates-io]
|
||||
replace-with = "vendored-sources"
|
||||
|
||||
[source."https://github.com/bjorn3/rust-ar.git"]
|
||||
git = "https://github.com/bjorn3/rust-ar.git"
|
||||
branch = "do_not_remove_cg_clif_ranlib"
|
||||
replace-with = "vendored-sources"
|
||||
|
||||
[source.vendored-sources]
|
||||
directory = "vendor"
|
||||
467
Cargo.lock
generated
467
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
14
Cargo.toml
14
Cargo.toml
@ -1,7 +1,5 @@
|
||||
[workspace]
|
||||
default-members = ["src/bootstrap"]
|
||||
members = [
|
||||
"src/bootstrap",
|
||||
"compiler/rustc",
|
||||
"library/std",
|
||||
"library/test",
|
||||
@ -44,6 +42,7 @@ exclude = [
|
||||
"build",
|
||||
"compiler/rustc_codegen_cranelift",
|
||||
"compiler/rustc_codegen_gcc",
|
||||
"src/bootstrap",
|
||||
"src/test/rustdoc-gui",
|
||||
# HACK(eddyb) This hardcodes the fact that our CI uses `/checkout/obj`.
|
||||
"obj",
|
||||
@ -61,7 +60,7 @@ exclude = [
|
||||
# verify that this is the case. This requires, however, that the crate is built
|
||||
# without overflow checks and debug assertions. Forcefully disable debug
|
||||
# assertions and overflow checks here which should ensure that even if these
|
||||
# assertions are enabled for libstd we won't enable then for compiler_builtins
|
||||
# assertions are enabled for libstd we won't enable them for compiler_builtins
|
||||
# which should ensure we still link everything correctly.
|
||||
debug-assertions = false
|
||||
overflow-checks = false
|
||||
@ -97,15 +96,6 @@ gimli.debug = 0
|
||||
miniz_oxide.debug = 0
|
||||
object.debug = 0
|
||||
|
||||
# The only package that ever uses debug builds is bootstrap.
|
||||
# We care a lot about bootstrap's compile times, so don't include debug info for
|
||||
# dependencies, only bootstrap itself.
|
||||
[profile.dev]
|
||||
debug = 0
|
||||
[profile.dev.package]
|
||||
# Only use debuginfo=1 to further reduce compile times.
|
||||
bootstrap.debug = 1
|
||||
|
||||
# We want the RLS to use the version of Cargo that we've got vendored in this
|
||||
# repository to ensure that the same exact version of Cargo is used by both the
|
||||
# RLS and the Cargo binary itself. The RLS depends on Cargo as a git repository
|
||||
|
||||
@ -233,7 +233,7 @@ Snapshot binaries are currently built and tested on several platforms:
|
||||
| Platform / Architecture | x86 | x86_64 |
|
||||
|---------------------------------------------|-----|--------|
|
||||
| Windows (7, 8, 10, ...) | ✓ | ✓ |
|
||||
| Linux (kernel 2.6.32, glibc 2.11 or later) | ✓ | ✓ |
|
||||
| Linux (kernel 3.2, glibc 2.17 or later) | ✓ | ✓ |
|
||||
| macOS (10.7 Lion or later) | (\*) | ✓ |
|
||||
|
||||
(\*): Apple dropped support for running 32-bit binaries starting from macOS 10.15 and iOS 11.
|
||||
|
||||
151
RELEASES.md
151
RELEASES.md
@ -1,3 +1,144 @@
|
||||
Version 1.64.0 (2022-09-22)
|
||||
===========================
|
||||
|
||||
Language
|
||||
--------
|
||||
- [Unions with mutable references or tuples of allowed types are now allowed](https://github.com/rust-lang/rust/pull/97995/)
|
||||
- It is now considered valid to deallocate memory pointed to by a shared reference `&T` [if every byte in `T` is inside an `UnsafeCell`](https://github.com/rust-lang/rust/pull/98017/)
|
||||
- Unused tuple struct fields are now warned against in an allow-by-default lint, [`unused_tuple_struct_fields`](https://github.com/rust-lang/rust/pull/95977/), similar to the existing warning for unused struct fields. This lint will become warn-by-default in the future.
|
||||
|
||||
Compiler
|
||||
--------
|
||||
- [Add Nintendo Switch as tier 3 target](https://github.com/rust-lang/rust/pull/88991/)
|
||||
- Refer to Rust's [platform support page][platform-support-doc] for more
|
||||
information on Rust's tiered platform support.
|
||||
- [Only compile `#[used]` as llvm.compiler.used for ELF targets](https://github.com/rust-lang/rust/pull/93718/)
|
||||
- [Add the `--diagnostic-width` compiler flag to define the terminal width.](https://github.com/rust-lang/rust/pull/95635/)
|
||||
- [Add support for link-flavor `rust-lld` for iOS, tvOS and watchOS](https://github.com/rust-lang/rust/pull/98771/)
|
||||
|
||||
Libraries
|
||||
---------
|
||||
- [Remove restrictions on compare-exchange memory ordering.](https://github.com/rust-lang/rust/pull/98383/)
|
||||
- You can now `write!` or `writeln!` into an `OsString`: [Implement `fmt::Write` for `OsString`](https://github.com/rust-lang/rust/pull/97915/)
|
||||
- [Make RwLockReadGuard covariant](https://github.com/rust-lang/rust/pull/96820/)
|
||||
- [Implement `FusedIterator` for `std::net::[Into]Incoming`](https://github.com/rust-lang/rust/pull/97300/)
|
||||
- [`impl<T: AsRawFd> AsRawFd for {Arc,Box}<T>`](https://github.com/rust-lang/rust/pull/97437/)
|
||||
- [`ptr::copy` and `ptr::swap` are doing untyped copies](https://github.com/rust-lang/rust/pull/97712/)
|
||||
- [Add cgroupv1 support to `available_parallelism`](https://github.com/rust-lang/rust/pull/97925/)
|
||||
- [Mitigate many incorrect uses of `mem::uninitialized`](https://github.com/rust-lang/rust/pull/99182/)
|
||||
|
||||
Stabilized APIs
|
||||
---------------
|
||||
|
||||
- [`future::IntoFuture`](https://doc.rust-lang.org/stable/std/future/trait.IntoFuture.html)
|
||||
- [`future::poll_fn`](https://doc.rust-lang.org/stable/std/future/fn.poll_fn.html)
|
||||
- [`task::ready!`](https://doc.rust-lang.org/stable/std/task/macro.ready.html)
|
||||
- [`num::NonZero*::checked_mul`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroUsize.html#method.checked_mul)
|
||||
- [`num::NonZero*::checked_pow`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroUsize.html#method.checked_pow)
|
||||
- [`num::NonZero*::saturating_mul`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroUsize.html#method.saturating_mul)
|
||||
- [`num::NonZero*::saturating_pow`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroUsize.html#method.saturating_pow)
|
||||
- [`num::NonZeroI*::abs`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroIsize.html#method.abs)
|
||||
- [`num::NonZeroI*::checked_abs`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroIsize.html#method.checked_abs)
|
||||
- [`num::NonZeroI*::overflowing_abs`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroIsize.html#method.overflowing_abs)
|
||||
- [`num::NonZeroI*::saturating_abs`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroIsize.html#method.saturating_abs)
|
||||
- [`num::NonZeroI*::unsigned_abs`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroIsize.html#method.unsigned_abs)
|
||||
- [`num::NonZeroI*::wrapping_abs`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroIsize.html#method.wrapping_abs)
|
||||
- [`num::NonZeroU*::checked_add`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroUsize.html#method.checked_add)
|
||||
- [`num::NonZeroU*::checked_next_power_of_two`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroUsize.html#method.checked_next_power_of_two)
|
||||
- [`num::NonZeroU*::saturating_add`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroUsize.html#method.saturating_add)
|
||||
- [`os::unix::process::CommandExt::process_group`](https://doc.rust-lang.org/stable/std/os/unix/process/trait.CommandExt.html#tymethod.process_group)
|
||||
- [`os::windows::fs::FileTypeExt::is_symlink_dir`](https://doc.rust-lang.org/stable/std/os/windows/fs/trait.FileTypeExt.html#tymethod.is_symlink_dir)
|
||||
- [`os::windows::fs::FileTypeExt::is_symlink_file`](https://doc.rust-lang.org/stable/std/os/windows/fs/trait.FileTypeExt.html#tymethod.is_symlink_file)
|
||||
|
||||
These types were previously stable in `std::ffi`, but are now also available in `core` and `alloc`:
|
||||
|
||||
- [`core::ffi::CStr`](https://doc.rust-lang.org/stable/core/ffi/struct.CStr.html)
|
||||
- [`core::ffi::FromBytesWithNulError`](https://doc.rust-lang.org/stable/core/ffi/struct.FromBytesWithNulError.html)
|
||||
- [`alloc::ffi::CString`](https://doc.rust-lang.org/stable/alloc/ffi/struct.CString.html)
|
||||
- [`alloc::ffi::FromVecWithNulError`](https://doc.rust-lang.org/stable/alloc/ffi/struct.FromVecWithNulError.html)
|
||||
- [`alloc::ffi::IntoStringError`](https://doc.rust-lang.org/stable/alloc/ffi/struct.IntoStringError.html)
|
||||
- [`alloc::ffi::NulError`](https://doc.rust-lang.org/stable/alloc/ffi/struct.NulError.html)
|
||||
|
||||
These types were previously stable in `std::os::raw`, but are now also available in `core::ffi` and `std::ffi`:
|
||||
|
||||
- [`ffi::c_char`](https://doc.rust-lang.org/stable/std/ffi/type.c_char.html)
|
||||
- [`ffi::c_double`](https://doc.rust-lang.org/stable/std/ffi/type.c_double.html)
|
||||
- [`ffi::c_float`](https://doc.rust-lang.org/stable/std/ffi/type.c_float.html)
|
||||
- [`ffi::c_int`](https://doc.rust-lang.org/stable/std/ffi/type.c_int.html)
|
||||
- [`ffi::c_long`](https://doc.rust-lang.org/stable/std/ffi/type.c_long.html)
|
||||
- [`ffi::c_longlong`](https://doc.rust-lang.org/stable/std/ffi/type.c_longlong.html)
|
||||
- [`ffi::c_schar`](https://doc.rust-lang.org/stable/std/ffi/type.c_schar.html)
|
||||
- [`ffi::c_short`](https://doc.rust-lang.org/stable/std/ffi/type.c_short.html)
|
||||
- [`ffi::c_uchar`](https://doc.rust-lang.org/stable/std/ffi/type.c_uchar.html)
|
||||
- [`ffi::c_uint`](https://doc.rust-lang.org/stable/std/ffi/type.c_uint.html)
|
||||
- [`ffi::c_ulong`](https://doc.rust-lang.org/stable/std/ffi/type.c_ulong.html)
|
||||
- [`ffi::c_ulonglong`](https://doc.rust-lang.org/stable/std/ffi/type.c_ulonglong.html)
|
||||
- [`ffi::c_ushort`](https://doc.rust-lang.org/stable/std/ffi/type.c_ushort.html)
|
||||
|
||||
These APIs are now usable in const contexts:
|
||||
|
||||
- [`slice::from_raw_parts`](https://doc.rust-lang.org/stable/core/slice/fn.from_raw_parts.html)
|
||||
|
||||
Cargo
|
||||
-----
|
||||
- [Packages can now inherit settings from the workspace so that the settings
|
||||
can be centralized in one place.](https://github.com/rust-lang/cargo/pull/10859) See
|
||||
[`workspace.package`](https://doc.rust-lang.org/nightly/cargo/reference/workspaces.html#the-workspacepackage-table)
|
||||
and
|
||||
[`workspace.dependencies`](https://doc.rust-lang.org/nightly/cargo/reference/workspaces.html#the-workspacedependencies-table)
|
||||
for more details on how to define these common settings.
|
||||
- [Cargo commands can now accept multiple `--target` flags to build for
|
||||
multiple targets at once](https://github.com/rust-lang/cargo/pull/10766), and the
|
||||
[`build.target`](https://doc.rust-lang.org/nightly/cargo/reference/config.html#buildtarget)
|
||||
config option may now take an array of multiple targets.
|
||||
- [The `--jobs` argument can now take a negative number to count backwards from
|
||||
the max CPUs.](https://github.com/rust-lang/cargo/pull/10844)
|
||||
- [`cargo add` will now update `Cargo.lock`.](https://github.com/rust-lang/cargo/pull/10902)
|
||||
- [Added](https://github.com/rust-lang/cargo/pull/10838) the
|
||||
[`--crate-type`](https://doc.rust-lang.org/nightly/cargo/commands/cargo-rustc.html#option-cargo-rustc---crate-type)
|
||||
flag to `cargo rustc` to override the crate type.
|
||||
- [Significantly improved the performance fetching git dependencies from GitHub
|
||||
when using a hash in the `rev` field.](https://github.com/rust-lang/cargo/pull/10079)
|
||||
|
||||
Misc
|
||||
----
|
||||
- [The `rust-analyzer` rustup component is now available on the stable channel.](https://github.com/rust-lang/rust/pull/98640/)
|
||||
|
||||
Compatibility Notes
|
||||
-------------------
|
||||
- The minimum required versions for all `-linux-gnu` targets are now at least kernel 3.2 and glibc 2.17, for targets that previously supported older versions: [Increase the minimum linux-gnu versions](https://github.com/rust-lang/rust/pull/95026/)
|
||||
- [Network primitives are now implemented with the ideal Rust layout, not the C system layout](https://github.com/rust-lang/rust/pull/78802/). This can cause problems when transmuting the types.
|
||||
- [Add assertion that `transmute_copy`'s `U` is not larger than `T`](https://github.com/rust-lang/rust/pull/98839/)
|
||||
- [A soundness bug in `BTreeMap` was fixed](https://github.com/rust-lang/rust/pull/99413/) that allowed data it was borrowing to be dropped before the container.
|
||||
- [The Drop behavior of C-like enums cast to ints has changed](https://github.com/rust-lang/rust/pull/96862/). These are already discouraged by a compiler warning.
|
||||
- [Relate late-bound closure lifetimes to parent fn in NLL](https://github.com/rust-lang/rust/pull/98835/)
|
||||
- [Errors at const-eval time are now in future incompatibility reports](https://github.com/rust-lang/rust/pull/97743/)
|
||||
- On the `thumbv6m-none-eabi` target, some incorrect `asm!` statements were erroneously accepted if they used the high registers (r8 to r14) as an input/output operand. [This is no longer accepted](https://github.com/rust-lang/rust/pull/99155/).
|
||||
- [`impl Trait` was accidentally accepted as the associated type value of return-position `impl Trait`](https://github.com/rust-lang/rust/pull/97346/), without fulfilling all the trait bounds of that associated type, as long as the hidden type satisfies said bounds. This has been fixed.
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
These changes do not affect any public interfaces of Rust, but they represent
|
||||
significant improvements to the performance or internals of rustc and related
|
||||
tools.
|
||||
|
||||
- Windows builds now use profile-guided optimization, providing 10-20% improvements to compiler performance: [Utilize PGO for windows x64 rustc dist builds](https://github.com/rust-lang/rust/pull/96978/)
|
||||
- [Stop keeping metadata in memory before writing it to disk](https://github.com/rust-lang/rust/pull/96544/)
|
||||
- [compiletest: strip debuginfo by default for mode=ui](https://github.com/rust-lang/rust/pull/98140/)
|
||||
- Many improvements to generated code for derives, including performance improvements:
|
||||
- [Don't use match-destructuring for derived ops on structs.](https://github.com/rust-lang/rust/pull/98446/)
|
||||
- [Many small deriving cleanups](https://github.com/rust-lang/rust/pull/98741/)
|
||||
- [More derive output improvements](https://github.com/rust-lang/rust/pull/98758/)
|
||||
- [Clarify deriving code](https://github.com/rust-lang/rust/pull/98915/)
|
||||
- [Final derive output improvements](https://github.com/rust-lang/rust/pull/99046/)
|
||||
- [Stop injecting `#[allow(unused_qualifications)]` in generated `derive` implementations](https://github.com/rust-lang/rust/pull/99485/)
|
||||
- [Improve `derive(Debug)`](https://github.com/rust-lang/rust/pull/98190/)
|
||||
- [Bump to clap 3](https://github.com/rust-lang/rust/pull/98213/)
|
||||
- [fully move dropck to mir](https://github.com/rust-lang/rust/pull/98641/)
|
||||
- [Optimize `Vec::insert` for the case where `index == len`.](https://github.com/rust-lang/rust/pull/98755/)
|
||||
- [Convert rust-analyzer to an in-tree tool](https://github.com/rust-lang/rust/pull/99603/)
|
||||
|
||||
Version 1.63.0 (2022-08-11)
|
||||
==========================
|
||||
|
||||
@ -27,6 +168,7 @@ Libraries
|
||||
- [Extend `ptr::null` and `null_mut` to all thin (including extern) types.][94954]
|
||||
- [`impl Read and Write for VecDeque<u8>`.][95632]
|
||||
- [STD support for the Nintendo 3DS.][95897]
|
||||
- [Use rounding in float to Duration conversion methods.][96051]
|
||||
- [Make write/print macros eagerly drop temporaries.][96455]
|
||||
- [Implement internal traits that enable `[OsStr]::join`.][96881]
|
||||
- [Implement `Hash` for `core::alloc::Layout`.][97034]
|
||||
@ -99,6 +241,8 @@ Compatibility Notes
|
||||
|
||||
- [`#[link]` attributes are now checked more strictly,][96885] which may introduce
|
||||
errors for invalid attribute arguments that were previously ignored.
|
||||
- [Rounding is now used when converting a float to a `Duration`.][96051] The converted
|
||||
duration can differ slightly from what it was.
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
@ -118,6 +262,7 @@ and related tools.
|
||||
[95818]: https://github.com/rust-lang/rust/pull/95818/
|
||||
[95897]: https://github.com/rust-lang/rust/pull/95897/
|
||||
[95953]: https://github.com/rust-lang/rust/pull/95953/
|
||||
[96051]: https://github.com/rust-lang/rust/pull/96051/
|
||||
[96296]: https://github.com/rust-lang/rust/pull/96296/
|
||||
[96455]: https://github.com/rust-lang/rust/pull/96455/
|
||||
[96737]: https://github.com/rust-lang/rust/pull/96737/
|
||||
@ -213,6 +358,7 @@ Language
|
||||
- [Fix constants not getting dropped if part of a diverging expression][94775]
|
||||
- [Support unit struct/enum variant in destructuring assignment][95380]
|
||||
- [Remove mutable_borrow_reservation_conflict lint and allow the code pattern][96268]
|
||||
- [`const` functions may now specify `extern "C"` or `extern "Rust"`][95346]
|
||||
|
||||
Compiler
|
||||
--------
|
||||
@ -302,6 +448,7 @@ and related tools.
|
||||
[94872]: https://github.com/rust-lang/rust/pull/94872/
|
||||
[95006]: https://github.com/rust-lang/rust/pull/95006/
|
||||
[95035]: https://github.com/rust-lang/rust/pull/95035/
|
||||
[95346]: https://github.com/rust-lang/rust/pull/95346/
|
||||
[95372]: https://github.com/rust-lang/rust/pull/95372/
|
||||
[95380]: https://github.com/rust-lang/rust/pull/95380/
|
||||
[95431]: https://github.com/rust-lang/rust/pull/95431/
|
||||
@ -1438,7 +1585,7 @@ Compatibility Notes
|
||||
- [Mixing Option and Result via `?` is no longer permitted in closures for inferred types.][86831]
|
||||
- [Previously unsound code is no longer permitted where different constructors in branches
|
||||
could require different lifetimes.][85574]
|
||||
- As previously mentioned the [`std::arch` instrinsics now uses stricter const checking][83278]
|
||||
- As previously mentioned the [`std::arch` intrinsics now uses stricter const checking][83278]
|
||||
than before and may reject some previously accepted code.
|
||||
- [`i128` multiplication on Cortex M0+ platforms currently unconditionally causes overflow
|
||||
when compiled with `codegen-units = 1`.][86063]
|
||||
@ -2516,7 +2663,7 @@ Compatibility Notes
|
||||
- [Fixed a regression parsing `{} && false` in tail expressions.][74650]
|
||||
- [Added changes to how proc-macros are expanded in `macro_rules!` that should
|
||||
help to preserve more span information.][73084] These changes may cause
|
||||
compiliation errors if your macro was unhygenic or didn't correctly handle
|
||||
compilation errors if your macro was unhygenic or didn't correctly handle
|
||||
`Delimiter::None`.
|
||||
- [Moved support for the CloudABI target to tier 3.][75568]
|
||||
- [`linux-gnu` targets now require minimum kernel 2.6.32 and glibc 2.11.][74163]
|
||||
|
||||
@ -5,6 +5,9 @@ fn main() {
|
||||
let target_env = env::var("CARGO_CFG_TARGET_ENV");
|
||||
if Ok("windows") == target_os.as_deref() && Ok("msvc") == target_env.as_deref() {
|
||||
set_windows_exe_options();
|
||||
} else {
|
||||
// Avoid rerunning the build script every time.
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -5,4 +5,4 @@ edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
bitflags = "1.2.1"
|
||||
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
|
||||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
||||
|
||||
@ -30,7 +30,7 @@ pub type DoubleDouble = DoubleFloat<ieee::Double>;
|
||||
// FIXME: Implement all operations in DoubleDouble, and delete these
|
||||
// semantics.
|
||||
// FIXME(eddyb) This shouldn't need to be `pub`, it's only used in bounds.
|
||||
pub struct FallbackS<F>(F);
|
||||
pub struct FallbackS<F>(#[allow(unused)] F);
|
||||
type Fallback<F> = ieee::IeeeFloat<FallbackS<F>>;
|
||||
impl<F: Float> ieee::Semantics for FallbackS<F> {
|
||||
// Forbid any conversion to/from bits.
|
||||
@ -45,7 +45,7 @@ impl<F: Float> ieee::Semantics for FallbackS<F> {
|
||||
// truncate the mantissa. The result of that second conversion
|
||||
// may be inexact, but should never underflow.
|
||||
// FIXME(eddyb) This shouldn't need to be `pub`, it's only used in bounds.
|
||||
pub struct FallbackExtendedS<F>(F);
|
||||
pub struct FallbackExtendedS<F>(#[allow(unused)] F);
|
||||
type FallbackExtended<F> = ieee::IeeeFloat<FallbackExtendedS<F>>;
|
||||
impl<F: Float> ieee::Semantics for FallbackExtendedS<F> {
|
||||
// Forbid any conversion to/from bits.
|
||||
|
||||
@ -4,4 +4,4 @@ version = "0.0.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
|
||||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
||||
|
||||
@ -19,6 +19,7 @@
|
||||
#![feature(rustc_attrs)]
|
||||
#![cfg_attr(test, feature(test))]
|
||||
#![feature(strict_provenance)]
|
||||
#![feature(ptr_const_cast)]
|
||||
|
||||
use smallvec::SmallVec;
|
||||
|
||||
@ -27,7 +28,7 @@ use std::cell::{Cell, RefCell};
|
||||
use std::cmp;
|
||||
use std::marker::{PhantomData, Send};
|
||||
use std::mem::{self, MaybeUninit};
|
||||
use std::ptr;
|
||||
use std::ptr::{self, NonNull};
|
||||
use std::slice;
|
||||
|
||||
#[inline(never)]
|
||||
@ -55,15 +56,24 @@ pub struct TypedArena<T> {
|
||||
|
||||
struct ArenaChunk<T = u8> {
|
||||
/// The raw storage for the arena chunk.
|
||||
storage: Box<[MaybeUninit<T>]>,
|
||||
storage: NonNull<[MaybeUninit<T>]>,
|
||||
/// The number of valid entries in the chunk.
|
||||
entries: usize,
|
||||
}
|
||||
|
||||
unsafe impl<#[may_dangle] T> Drop for ArenaChunk<T> {
|
||||
fn drop(&mut self) {
|
||||
unsafe { Box::from_raw(self.storage.as_mut()) };
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ArenaChunk<T> {
|
||||
#[inline]
|
||||
unsafe fn new(capacity: usize) -> ArenaChunk<T> {
|
||||
ArenaChunk { storage: Box::new_uninit_slice(capacity), entries: 0 }
|
||||
ArenaChunk {
|
||||
storage: NonNull::new(Box::into_raw(Box::new_uninit_slice(capacity))).unwrap(),
|
||||
entries: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Destroys this arena chunk.
|
||||
@ -72,14 +82,15 @@ impl<T> ArenaChunk<T> {
|
||||
// The branch on needs_drop() is an -O1 performance optimization.
|
||||
// Without the branch, dropping TypedArena<u8> takes linear time.
|
||||
if mem::needs_drop::<T>() {
|
||||
ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut self.storage[..len]));
|
||||
let slice = &mut *(self.storage.as_mut());
|
||||
ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut slice[..len]));
|
||||
}
|
||||
}
|
||||
|
||||
// Returns a pointer to the first allocated object.
|
||||
#[inline]
|
||||
fn start(&mut self) -> *mut T {
|
||||
MaybeUninit::slice_as_mut_ptr(&mut self.storage)
|
||||
self.storage.as_ptr() as *mut T
|
||||
}
|
||||
|
||||
// Returns a pointer to the end of the allocated space.
|
||||
@ -90,7 +101,7 @@ impl<T> ArenaChunk<T> {
|
||||
// A pointer as large as possible for zero-sized elements.
|
||||
ptr::invalid_mut(!0)
|
||||
} else {
|
||||
self.start().add(self.storage.len())
|
||||
self.start().add((*self.storage.as_ptr()).len())
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -274,7 +285,7 @@ impl<T> TypedArena<T> {
|
||||
// If the previous chunk's len is less than HUGE_PAGE
|
||||
// bytes, then this chunk will be least double the previous
|
||||
// chunk's size.
|
||||
new_cap = last_chunk.storage.len().min(HUGE_PAGE / elem_size / 2);
|
||||
new_cap = (*last_chunk.storage.as_ptr()).len().min(HUGE_PAGE / elem_size / 2);
|
||||
new_cap *= 2;
|
||||
} else {
|
||||
new_cap = PAGE / elem_size;
|
||||
@ -382,7 +393,7 @@ impl DroplessArena {
|
||||
// If the previous chunk's len is less than HUGE_PAGE
|
||||
// bytes, then this chunk will be least double the previous
|
||||
// chunk's size.
|
||||
new_cap = last_chunk.storage.len().min(HUGE_PAGE / 2);
|
||||
new_cap = (*last_chunk.storage.as_ptr()).len().min(HUGE_PAGE / 2);
|
||||
new_cap *= 2;
|
||||
} else {
|
||||
new_cap = PAGE;
|
||||
|
||||
@ -79,7 +79,11 @@ fn test_arena_alloc_nested() {
|
||||
#[test]
|
||||
pub fn test_copy() {
|
||||
let arena = TypedArena::default();
|
||||
for _ in 0..100000 {
|
||||
#[cfg(not(miri))]
|
||||
const N: usize = 100000;
|
||||
#[cfg(miri)]
|
||||
const N: usize = 1000;
|
||||
for _ in 0..N {
|
||||
arena.alloc(Point { x: 1, y: 2, z: 3 });
|
||||
}
|
||||
}
|
||||
@ -106,7 +110,11 @@ struct Noncopy {
|
||||
#[test]
|
||||
pub fn test_noncopy() {
|
||||
let arena = TypedArena::default();
|
||||
for _ in 0..100000 {
|
||||
#[cfg(not(miri))]
|
||||
const N: usize = 100000;
|
||||
#[cfg(miri)]
|
||||
const N: usize = 1000;
|
||||
for _ in 0..N {
|
||||
arena.alloc(Noncopy { string: "hello world".to_string(), array: vec![1, 2, 3, 4, 5] });
|
||||
}
|
||||
}
|
||||
@ -114,7 +122,11 @@ pub fn test_noncopy() {
|
||||
#[test]
|
||||
pub fn test_typed_arena_zero_sized() {
|
||||
let arena = TypedArena::default();
|
||||
for _ in 0..100000 {
|
||||
#[cfg(not(miri))]
|
||||
const N: usize = 100000;
|
||||
#[cfg(miri)]
|
||||
const N: usize = 1000;
|
||||
for _ in 0..N {
|
||||
arena.alloc(());
|
||||
}
|
||||
}
|
||||
@ -124,7 +136,11 @@ pub fn test_typed_arena_clear() {
|
||||
let mut arena = TypedArena::default();
|
||||
for _ in 0..10 {
|
||||
arena.clear();
|
||||
for _ in 0..10000 {
|
||||
#[cfg(not(miri))]
|
||||
const N: usize = 10000;
|
||||
#[cfg(miri)]
|
||||
const N: usize = 100;
|
||||
for _ in 0..N {
|
||||
arena.alloc(Point { x: 1, y: 2, z: 3 });
|
||||
}
|
||||
}
|
||||
|
||||
@ -14,5 +14,5 @@ rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
rustc_index = { path = "../rustc_index" }
|
||||
rustc_lexer = { path = "../rustc_lexer" }
|
||||
rustc_macros = { path = "../rustc_macros" }
|
||||
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
|
||||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
||||
bitflags = "1.2.1"
|
||||
|
||||
@ -64,7 +64,7 @@ impl fmt::Debug for Label {
|
||||
|
||||
/// A "Lifetime" is an annotation of the scope in which variable
|
||||
/// can be used, e.g. `'a` in `&'a i32`.
|
||||
#[derive(Clone, Encodable, Decodable, Copy)]
|
||||
#[derive(Clone, Encodable, Decodable, Copy, PartialEq, Eq)]
|
||||
pub struct Lifetime {
|
||||
pub id: NodeId,
|
||||
pub ident: Ident,
|
||||
@ -1111,10 +1111,6 @@ pub struct Expr {
|
||||
pub tokens: Option<LazyTokenStream>,
|
||||
}
|
||||
|
||||
// `Expr` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(Expr, 104);
|
||||
|
||||
impl Expr {
|
||||
/// Returns `true` if this expression would be valid somewhere that expects a value;
|
||||
/// for example, an `if` condition.
|
||||
@ -1390,7 +1386,7 @@ pub enum ExprKind {
|
||||
/// A closure (e.g., `move |a, b, c| a + b + c`).
|
||||
///
|
||||
/// The final span is the span of the argument block `|...|`.
|
||||
Closure(CaptureBy, Async, Movability, P<FnDecl>, P<Expr>, Span),
|
||||
Closure(ClosureBinder, CaptureBy, Async, Movability, P<FnDecl>, P<Expr>, Span),
|
||||
/// A block (`'label: { ... }`).
|
||||
Block(P<Block>, Option<Label>),
|
||||
/// An async block (`async move { ... }`).
|
||||
@ -1518,6 +1514,31 @@ pub enum Movability {
|
||||
Movable,
|
||||
}
|
||||
|
||||
/// Closure lifetime binder, `for<'a, 'b>` in `for<'a, 'b> |_: &'a (), _: &'b ()|`.
|
||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||
pub enum ClosureBinder {
|
||||
/// The binder is not present, all closure lifetimes are inferred.
|
||||
NotPresent,
|
||||
/// The binder is present.
|
||||
For {
|
||||
/// Span of the whole `for<>` clause
|
||||
///
|
||||
/// ```text
|
||||
/// for<'a, 'b> |_: &'a (), _: &'b ()| { ... }
|
||||
/// ^^^^^^^^^^^ -- this
|
||||
/// ```
|
||||
span: Span,
|
||||
|
||||
/// Lifetimes in the `for<>` closure
|
||||
///
|
||||
/// ```text
|
||||
/// for<'a, 'b> |_: &'a (), _: &'b ()| { ... }
|
||||
/// ^^^^^^ -- this
|
||||
/// ```
|
||||
generic_params: P<[GenericParam]>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Represents a macro invocation. The `path` indicates which macro
|
||||
/// is being invoked, and the `args` are arguments passed to it.
|
||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||
@ -2036,6 +2057,14 @@ impl TyKind {
|
||||
pub fn is_unit(&self) -> bool {
|
||||
matches!(self, TyKind::Tup(tys) if tys.is_empty())
|
||||
}
|
||||
|
||||
pub fn is_simple_path(&self) -> Option<Symbol> {
|
||||
if let TyKind::Path(None, Path { segments, .. }) = &self && segments.len() == 1 {
|
||||
Some(segments[0].ident.name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Syntax used to declare a trait object.
|
||||
@ -2667,13 +2696,16 @@ impl Item {
|
||||
#[derive(Clone, Copy, Encodable, Decodable, Debug)]
|
||||
pub enum Extern {
|
||||
None,
|
||||
Implicit,
|
||||
Explicit(StrLit),
|
||||
Implicit(Span),
|
||||
Explicit(StrLit, Span),
|
||||
}
|
||||
|
||||
impl Extern {
|
||||
pub fn from_abi(abi: Option<StrLit>) -> Extern {
|
||||
abi.map_or(Extern::Implicit, Extern::Explicit)
|
||||
pub fn from_abi(abi: Option<StrLit>, span: Span) -> Extern {
|
||||
match abi {
|
||||
Some(name) => Extern::Explicit(name, span),
|
||||
None => Extern::Implicit(span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2847,9 +2879,6 @@ pub enum ItemKind {
|
||||
MacroDef(MacroDef),
|
||||
}
|
||||
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(ItemKind, 112);
|
||||
|
||||
impl ItemKind {
|
||||
pub fn article(&self) -> &str {
|
||||
use ItemKind::*;
|
||||
@ -2921,9 +2950,6 @@ pub enum AssocItemKind {
|
||||
MacCall(MacCall),
|
||||
}
|
||||
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(AssocItemKind, 72);
|
||||
|
||||
impl AssocItemKind {
|
||||
pub fn defaultness(&self) -> Defaultness {
|
||||
match *self {
|
||||
@ -2973,9 +2999,6 @@ pub enum ForeignItemKind {
|
||||
MacCall(MacCall),
|
||||
}
|
||||
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(ForeignItemKind, 72);
|
||||
|
||||
impl From<ForeignItemKind> for ItemKind {
|
||||
fn from(foreign_item_kind: ForeignItemKind) -> ItemKind {
|
||||
match foreign_item_kind {
|
||||
@ -3002,3 +3025,27 @@ impl TryFrom<ItemKind> for ForeignItemKind {
|
||||
}
|
||||
|
||||
pub type ForeignItem = Item<ForeignItemKind>;
|
||||
|
||||
// Some nodes are used a lot. Make sure they don't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
mod size_asserts {
|
||||
use super::*;
|
||||
// These are in alphabetical order, which is easy to maintain.
|
||||
rustc_data_structures::static_assert_size!(AssocItemKind, 72);
|
||||
rustc_data_structures::static_assert_size!(Attribute, 152);
|
||||
rustc_data_structures::static_assert_size!(Block, 48);
|
||||
rustc_data_structures::static_assert_size!(Expr, 104);
|
||||
rustc_data_structures::static_assert_size!(Fn, 192);
|
||||
rustc_data_structures::static_assert_size!(ForeignItemKind, 72);
|
||||
rustc_data_structures::static_assert_size!(GenericBound, 88);
|
||||
rustc_data_structures::static_assert_size!(Generics, 72);
|
||||
rustc_data_structures::static_assert_size!(Impl, 200);
|
||||
rustc_data_structures::static_assert_size!(Item, 200);
|
||||
rustc_data_structures::static_assert_size!(ItemKind, 112);
|
||||
rustc_data_structures::static_assert_size!(Lit, 48);
|
||||
rustc_data_structures::static_assert_size!(Pat, 120);
|
||||
rustc_data_structures::static_assert_size!(Path, 40);
|
||||
rustc_data_structures::static_assert_size!(PathSegment, 24);
|
||||
rustc_data_structures::static_assert_size!(Stmt, 32);
|
||||
rustc_data_structures::static_assert_size!(Ty, 96);
|
||||
}
|
||||
|
||||
@ -8,7 +8,7 @@ use crate::ast::{Path, PathSegment};
|
||||
use crate::ptr::P;
|
||||
use crate::token::{self, CommentKind, Delimiter, Token};
|
||||
use crate::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
|
||||
use crate::tokenstream::{DelimSpan, Spacing, TokenTree, TreeAndSpacing};
|
||||
use crate::tokenstream::{DelimSpan, Spacing, TokenTree};
|
||||
use crate::tokenstream::{LazyTokenStream, TokenStream};
|
||||
use crate::util::comments;
|
||||
|
||||
@ -388,7 +388,7 @@ pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool {
|
||||
}
|
||||
|
||||
impl MetaItem {
|
||||
fn token_trees_and_spacings(&self) -> Vec<TreeAndSpacing> {
|
||||
fn token_trees(&self) -> Vec<TokenTree> {
|
||||
let mut idents = vec![];
|
||||
let mut last_pos = BytePos(0_u32);
|
||||
for (i, segment) in self.path.segments.iter().enumerate() {
|
||||
@ -396,12 +396,12 @@ impl MetaItem {
|
||||
if !is_first {
|
||||
let mod_sep_span =
|
||||
Span::new(last_pos, segment.ident.span.lo(), segment.ident.span.ctxt(), None);
|
||||
idents.push(TokenTree::token(token::ModSep, mod_sep_span).into());
|
||||
idents.push(TokenTree::token_alone(token::ModSep, mod_sep_span));
|
||||
}
|
||||
idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)).into());
|
||||
idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident), Spacing::Alone));
|
||||
last_pos = segment.ident.span.hi();
|
||||
}
|
||||
idents.extend(self.kind.token_trees_and_spacings(self.span));
|
||||
idents.extend(self.kind.token_trees(self.span));
|
||||
idents
|
||||
}
|
||||
|
||||
@ -411,12 +411,13 @@ impl MetaItem {
|
||||
{
|
||||
// FIXME: Share code with `parse_path`.
|
||||
let path = match tokens.next().map(TokenTree::uninterpolate) {
|
||||
Some(TokenTree::Token(Token {
|
||||
kind: kind @ (token::Ident(..) | token::ModSep),
|
||||
span,
|
||||
})) => 'arm: {
|
||||
Some(TokenTree::Token(
|
||||
Token { kind: kind @ (token::Ident(..) | token::ModSep), span },
|
||||
_,
|
||||
)) => 'arm: {
|
||||
let mut segments = if let token::Ident(name, _) = kind {
|
||||
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek()
|
||||
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }, _)) =
|
||||
tokens.peek()
|
||||
{
|
||||
tokens.next();
|
||||
vec![PathSegment::from_ident(Ident::new(name, span))]
|
||||
@ -427,14 +428,15 @@ impl MetaItem {
|
||||
vec![PathSegment::path_root(span)]
|
||||
};
|
||||
loop {
|
||||
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span })) =
|
||||
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) =
|
||||
tokens.next().map(TokenTree::uninterpolate)
|
||||
{
|
||||
segments.push(PathSegment::from_ident(Ident::new(name, span)));
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek()
|
||||
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }, _)) =
|
||||
tokens.peek()
|
||||
{
|
||||
tokens.next();
|
||||
} else {
|
||||
@ -444,7 +446,7 @@ impl MetaItem {
|
||||
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
||||
Path { span, segments, tokens: None }
|
||||
}
|
||||
Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt {
|
||||
Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. }, _)) => match *nt {
|
||||
token::Nonterminal::NtMeta(ref item) => return item.meta(item.path.span),
|
||||
token::Nonterminal::NtPath(ref path) => (**path).clone(),
|
||||
_ => return None,
|
||||
@ -491,9 +493,9 @@ impl MetaItemKind {
|
||||
let mut tts = Vec::new();
|
||||
for (i, item) in list.iter().enumerate() {
|
||||
if i > 0 {
|
||||
tts.push(TokenTree::token(token::Comma, span).into());
|
||||
tts.push(TokenTree::token_alone(token::Comma, span));
|
||||
}
|
||||
tts.extend(item.token_trees_and_spacings())
|
||||
tts.extend(item.token_trees())
|
||||
}
|
||||
MacArgs::Delimited(
|
||||
DelimSpan::from_single(span),
|
||||
@ -504,31 +506,28 @@ impl MetaItemKind {
|
||||
}
|
||||
}
|
||||
|
||||
fn token_trees_and_spacings(&self, span: Span) -> Vec<TreeAndSpacing> {
|
||||
fn token_trees(&self, span: Span) -> Vec<TokenTree> {
|
||||
match *self {
|
||||
MetaItemKind::Word => vec![],
|
||||
MetaItemKind::NameValue(ref lit) => {
|
||||
vec![
|
||||
TokenTree::token(token::Eq, span).into(),
|
||||
TokenTree::Token(lit.to_token()).into(),
|
||||
TokenTree::token_alone(token::Eq, span),
|
||||
TokenTree::Token(lit.to_token(), Spacing::Alone),
|
||||
]
|
||||
}
|
||||
MetaItemKind::List(ref list) => {
|
||||
let mut tokens = Vec::new();
|
||||
for (i, item) in list.iter().enumerate() {
|
||||
if i > 0 {
|
||||
tokens.push(TokenTree::token(token::Comma, span).into());
|
||||
tokens.push(TokenTree::token_alone(token::Comma, span));
|
||||
}
|
||||
tokens.extend(item.token_trees_and_spacings())
|
||||
tokens.extend(item.token_trees())
|
||||
}
|
||||
vec![
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_single(span),
|
||||
Delimiter::Parenthesis,
|
||||
TokenStream::new(tokens),
|
||||
)
|
||||
.into(),
|
||||
]
|
||||
vec![TokenTree::Delimited(
|
||||
DelimSpan::from_single(span),
|
||||
Delimiter::Parenthesis,
|
||||
TokenStream::new(tokens),
|
||||
)]
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -540,7 +539,7 @@ impl MetaItemKind {
|
||||
let item = NestedMetaItem::from_tokens(&mut tokens)?;
|
||||
result.push(item);
|
||||
match tokens.next() {
|
||||
None | Some(TokenTree::Token(Token { kind: token::Comma, .. })) => {}
|
||||
None | Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) => {}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
@ -554,7 +553,7 @@ impl MetaItemKind {
|
||||
Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
|
||||
MetaItemKind::name_value_from_tokens(&mut inner_tokens.into_trees())
|
||||
}
|
||||
Some(TokenTree::Token(token)) => {
|
||||
Some(TokenTree::Token(token, _)) => {
|
||||
Lit::from_token(&token).ok().map(MetaItemKind::NameValue)
|
||||
}
|
||||
_ => None,
|
||||
@ -586,7 +585,7 @@ impl MetaItemKind {
|
||||
MetaItemKind::list_from_tokens(inner_tokens)
|
||||
}
|
||||
Some(TokenTree::Delimited(..)) => None,
|
||||
Some(TokenTree::Token(Token { kind: token::Eq, .. })) => {
|
||||
Some(TokenTree::Token(Token { kind: token::Eq, .. }, _)) => {
|
||||
tokens.next();
|
||||
MetaItemKind::name_value_from_tokens(tokens)
|
||||
}
|
||||
@ -603,10 +602,12 @@ impl NestedMetaItem {
|
||||
}
|
||||
}
|
||||
|
||||
fn token_trees_and_spacings(&self) -> Vec<TreeAndSpacing> {
|
||||
fn token_trees(&self) -> Vec<TokenTree> {
|
||||
match *self {
|
||||
NestedMetaItem::MetaItem(ref item) => item.token_trees_and_spacings(),
|
||||
NestedMetaItem::Literal(ref lit) => vec![TokenTree::Token(lit.to_token()).into()],
|
||||
NestedMetaItem::MetaItem(ref item) => item.token_trees(),
|
||||
NestedMetaItem::Literal(ref lit) => {
|
||||
vec![TokenTree::Token(lit.to_token(), Spacing::Alone)]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -615,7 +616,7 @@ impl NestedMetaItem {
|
||||
I: Iterator<Item = TokenTree>,
|
||||
{
|
||||
match tokens.peek() {
|
||||
Some(TokenTree::Token(token))
|
||||
Some(TokenTree::Token(token, _))
|
||||
if let Ok(lit) = Lit::from_token(token) =>
|
||||
{
|
||||
tokens.next();
|
||||
|
||||
@ -125,6 +125,10 @@ pub trait MutVisitor: Sized {
|
||||
noop_visit_asyncness(a, self);
|
||||
}
|
||||
|
||||
fn visit_closure_binder(&mut self, b: &mut ClosureBinder) {
|
||||
noop_visit_closure_binder(b, self);
|
||||
}
|
||||
|
||||
fn visit_block(&mut self, b: &mut P<Block>) {
|
||||
noop_visit_block(b, self);
|
||||
}
|
||||
@ -671,7 +675,7 @@ pub fn visit_attr_annotated_tt<T: MutVisitor>(tt: &mut AttrAnnotatedTokenTree, v
|
||||
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
|
||||
pub fn visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
|
||||
match tt {
|
||||
TokenTree::Token(token) => {
|
||||
TokenTree::Token(token, _) => {
|
||||
visit_token(token, vis);
|
||||
}
|
||||
TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
|
||||
@ -686,7 +690,7 @@ pub fn visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
|
||||
pub fn visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T) {
|
||||
if T::VISIT_TOKENS && !tts.is_empty() {
|
||||
let tts = Lrc::make_mut(tts);
|
||||
visit_vec(tts, |(tree, _is_joint)| visit_tt(tree, vis));
|
||||
visit_vec(tts, |tree| visit_tt(tree, vis));
|
||||
}
|
||||
}
|
||||
|
||||
@ -825,6 +829,17 @@ pub fn visit_constness<T: MutVisitor>(constness: &mut Const, vis: &mut T) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn noop_visit_closure_binder<T: MutVisitor>(binder: &mut ClosureBinder, vis: &mut T) {
|
||||
match binder {
|
||||
ClosureBinder::NotPresent => {}
|
||||
ClosureBinder::For { span: _, generic_params } => {
|
||||
let mut vec = std::mem::take(generic_params).into_vec();
|
||||
vec.flat_map_in_place(|param| vis.flat_map_generic_param(param));
|
||||
*generic_params = P::from_vec(vec);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn noop_visit_asyncness<T: MutVisitor>(asyncness: &mut Async, vis: &mut T) {
|
||||
match asyncness {
|
||||
Async::Yes { span: _, closure_id, return_impl_trait_id } => {
|
||||
@ -1336,7 +1351,8 @@ pub fn noop_visit_expr<T: MutVisitor>(
|
||||
vis.visit_expr(expr);
|
||||
arms.flat_map_in_place(|arm| vis.flat_map_arm(arm));
|
||||
}
|
||||
ExprKind::Closure(_capture_by, asyncness, _movability, decl, body, span) => {
|
||||
ExprKind::Closure(binder, _capture_by, asyncness, _movability, decl, body, span) => {
|
||||
vis.visit_closure_binder(binder);
|
||||
vis.visit_asyncness(asyncness);
|
||||
vis.visit_fn_decl(decl);
|
||||
vis.visit_expr(body);
|
||||
|
||||
@ -42,11 +42,15 @@ use std::{fmt, iter};
|
||||
#[derive(Debug, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)]
|
||||
pub enum TokenTree {
|
||||
/// A single token.
|
||||
Token(Token),
|
||||
Token(Token, Spacing),
|
||||
/// A delimited sequence of token trees.
|
||||
Delimited(DelimSpan, Delimiter, TokenStream),
|
||||
}
|
||||
|
||||
// This type is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(TokenTree, 32);
|
||||
|
||||
// Ensure all fields of `TokenTree` is `Send` and `Sync`.
|
||||
#[cfg(parallel_compiler)]
|
||||
fn _dummy()
|
||||
@ -62,7 +66,7 @@ impl TokenTree {
|
||||
/// Checks if this `TokenTree` is equal to the other, regardless of span information.
|
||||
pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
|
||||
match (self, other) {
|
||||
(TokenTree::Token(token), TokenTree::Token(token2)) => token.kind == token2.kind,
|
||||
(TokenTree::Token(token, _), TokenTree::Token(token2, _)) => token.kind == token2.kind,
|
||||
(TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
|
||||
delim == delim2 && tts.eq_unspanned(&tts2)
|
||||
}
|
||||
@ -73,7 +77,7 @@ impl TokenTree {
|
||||
/// Retrieves the `TokenTree`'s span.
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
TokenTree::Token(token) => token.span,
|
||||
TokenTree::Token(token, _) => token.span,
|
||||
TokenTree::Delimited(sp, ..) => sp.entire(),
|
||||
}
|
||||
}
|
||||
@ -81,18 +85,26 @@ impl TokenTree {
|
||||
/// Modify the `TokenTree`'s span in-place.
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match self {
|
||||
TokenTree::Token(token) => token.span = span,
|
||||
TokenTree::Token(token, _) => token.span = span,
|
||||
TokenTree::Delimited(dspan, ..) => *dspan = DelimSpan::from_single(span),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn token(kind: TokenKind, span: Span) -> TokenTree {
|
||||
TokenTree::Token(Token::new(kind, span))
|
||||
// Create a `TokenTree::Token` with alone spacing.
|
||||
pub fn token_alone(kind: TokenKind, span: Span) -> TokenTree {
|
||||
TokenTree::Token(Token::new(kind, span), Spacing::Alone)
|
||||
}
|
||||
|
||||
// Create a `TokenTree::Token` with joint spacing.
|
||||
pub fn token_joint(kind: TokenKind, span: Span) -> TokenTree {
|
||||
TokenTree::Token(Token::new(kind, span), Spacing::Joint)
|
||||
}
|
||||
|
||||
pub fn uninterpolate(self) -> TokenTree {
|
||||
match self {
|
||||
TokenTree::Token(token) => TokenTree::Token(token.uninterpolate().into_owned()),
|
||||
TokenTree::Token(token, spacing) => {
|
||||
TokenTree::Token(token.uninterpolate().into_owned(), spacing)
|
||||
}
|
||||
tt => tt,
|
||||
}
|
||||
}
|
||||
@ -194,13 +206,12 @@ impl AttrAnnotatedTokenStream {
|
||||
.iter()
|
||||
.flat_map(|tree| match &tree.0 {
|
||||
AttrAnnotatedTokenTree::Token(inner) => {
|
||||
smallvec![(TokenTree::Token(inner.clone()), tree.1)].into_iter()
|
||||
smallvec![TokenTree::Token(inner.clone(), tree.1)].into_iter()
|
||||
}
|
||||
AttrAnnotatedTokenTree::Delimited(span, delim, stream) => {
|
||||
smallvec![TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),]
|
||||
.into_iter()
|
||||
}
|
||||
AttrAnnotatedTokenTree::Delimited(span, delim, stream) => smallvec![(
|
||||
TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),
|
||||
tree.1,
|
||||
)]
|
||||
.into_iter(),
|
||||
AttrAnnotatedTokenTree::Attributes(data) => {
|
||||
let mut outer_attrs = Vec::new();
|
||||
let mut inner_attrs = Vec::new();
|
||||
@ -226,7 +237,7 @@ impl AttrAnnotatedTokenStream {
|
||||
if !inner_attrs.is_empty() {
|
||||
let mut found = false;
|
||||
// Check the last two trees (to account for a trailing semi)
|
||||
for (tree, _) in target_tokens.iter_mut().rev().take(2) {
|
||||
for tree in target_tokens.iter_mut().rev().take(2) {
|
||||
if let TokenTree::Delimited(span, delim, delim_tokens) = tree {
|
||||
// Inner attributes are only supported on extern blocks, functions, impls,
|
||||
// and modules. All of these have their inner attributes placed at
|
||||
@ -299,15 +310,13 @@ pub struct AttributesData {
|
||||
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for
|
||||
/// backwards compatibility.
|
||||
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
||||
pub struct TokenStream(pub(crate) Lrc<Vec<TreeAndSpacing>>);
|
||||
|
||||
pub type TreeAndSpacing = (TokenTree, Spacing);
|
||||
pub struct TokenStream(pub(crate) Lrc<Vec<TokenTree>>);
|
||||
|
||||
// `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(TokenStream, 8);
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
|
||||
pub enum Spacing {
|
||||
Alone,
|
||||
Joint,
|
||||
@ -323,10 +332,10 @@ impl TokenStream {
|
||||
while let Some((pos, ts)) = iter.next() {
|
||||
if let Some((_, next)) = iter.peek() {
|
||||
let sp = match (&ts, &next) {
|
||||
(_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
|
||||
(_, TokenTree::Token(Token { kind: token::Comma, .. }, _)) => continue,
|
||||
(
|
||||
(TokenTree::Token(token_left), Spacing::Alone),
|
||||
(TokenTree::Token(token_right), _),
|
||||
TokenTree::Token(token_left, Spacing::Alone),
|
||||
TokenTree::Token(token_right, _),
|
||||
) if ((token_left.is_ident() && !token_left.is_reserved_ident())
|
||||
|| token_left.is_lit())
|
||||
&& ((token_right.is_ident() && !token_right.is_reserved_ident())
|
||||
@ -334,11 +343,11 @@ impl TokenStream {
|
||||
{
|
||||
token_left.span
|
||||
}
|
||||
((TokenTree::Delimited(sp, ..), Spacing::Alone), _) => sp.entire(),
|
||||
(TokenTree::Delimited(sp, ..), _) => sp.entire(),
|
||||
_ => continue,
|
||||
};
|
||||
let sp = sp.shrink_to_hi();
|
||||
let comma = (TokenTree::token(token::Comma, sp), Spacing::Alone);
|
||||
let comma = TokenTree::token_alone(token::Comma, sp);
|
||||
suggestion = Some((pos, comma, sp));
|
||||
}
|
||||
}
|
||||
@ -360,21 +369,9 @@ impl From<(AttrAnnotatedTokenTree, Spacing)> for AttrAnnotatedTokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(tree: TokenTree) -> TokenStream {
|
||||
TokenStream::new(vec![(tree, Spacing::Alone)])
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenTree> for TreeAndSpacing {
|
||||
fn from(tree: TokenTree) -> TreeAndSpacing {
|
||||
(tree, Spacing::Alone)
|
||||
}
|
||||
}
|
||||
|
||||
impl iter::FromIterator<TokenTree> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
|
||||
TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<TreeAndSpacing>>())
|
||||
TokenStream::new(iter.into_iter().collect::<Vec<TokenTree>>())
|
||||
}
|
||||
}
|
||||
|
||||
@ -387,7 +384,7 @@ impl PartialEq<TokenStream> for TokenStream {
|
||||
}
|
||||
|
||||
impl TokenStream {
|
||||
pub fn new(streams: Vec<TreeAndSpacing>) -> TokenStream {
|
||||
pub fn new(streams: Vec<TokenTree>) -> TokenStream {
|
||||
TokenStream(Lrc::new(streams))
|
||||
}
|
||||
|
||||
@ -420,13 +417,7 @@ impl TokenStream {
|
||||
}
|
||||
|
||||
pub fn map_enumerated<F: FnMut(usize, &TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
|
||||
TokenStream(Lrc::new(
|
||||
self.0
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, (tree, is_joint))| (f(i, tree), *is_joint))
|
||||
.collect(),
|
||||
))
|
||||
TokenStream(Lrc::new(self.0.iter().enumerate().map(|(i, tree)| f(i, tree)).collect()))
|
||||
}
|
||||
|
||||
fn opt_from_ast(node: &(impl HasAttrs + HasTokens)) -> Option<TokenStream> {
|
||||
@ -444,6 +435,21 @@ impl TokenStream {
|
||||
Some(attr_annotated.to_tokenstream())
|
||||
}
|
||||
|
||||
// Create a token stream containing a single token with alone spacing.
|
||||
pub fn token_alone(kind: TokenKind, span: Span) -> TokenStream {
|
||||
TokenStream::new(vec![TokenTree::token_alone(kind, span)])
|
||||
}
|
||||
|
||||
// Create a token stream containing a single token with joint spacing.
|
||||
pub fn token_joint(kind: TokenKind, span: Span) -> TokenStream {
|
||||
TokenStream::new(vec![TokenTree::token_joint(kind, span)])
|
||||
}
|
||||
|
||||
// Create a token stream containing a single `Delimited`.
|
||||
pub fn delimited(span: DelimSpan, delim: Delimiter, tts: TokenStream) -> TokenStream {
|
||||
TokenStream::new(vec![TokenTree::Delimited(span, delim, tts)])
|
||||
}
|
||||
|
||||
pub fn from_ast(node: &(impl HasAttrs + HasSpan + HasTokens + fmt::Debug)) -> TokenStream {
|
||||
TokenStream::opt_from_ast(node)
|
||||
.unwrap_or_else(|| panic!("missing tokens for node at {:?}: {:?}", node.span(), node))
|
||||
@ -452,16 +458,16 @@ impl TokenStream {
|
||||
pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {
|
||||
match nt {
|
||||
Nonterminal::NtIdent(ident, is_raw) => {
|
||||
TokenTree::token(token::Ident(ident.name, *is_raw), ident.span).into()
|
||||
TokenStream::token_alone(token::Ident(ident.name, *is_raw), ident.span)
|
||||
}
|
||||
Nonterminal::NtLifetime(ident) => {
|
||||
TokenTree::token(token::Lifetime(ident.name), ident.span).into()
|
||||
TokenStream::token_alone(token::Lifetime(ident.name), ident.span)
|
||||
}
|
||||
Nonterminal::NtItem(item) => TokenStream::from_ast(item),
|
||||
Nonterminal::NtBlock(block) => TokenStream::from_ast(block),
|
||||
Nonterminal::NtStmt(stmt) if let StmtKind::Empty = stmt.kind => {
|
||||
// FIXME: Properly collect tokens for empty statements.
|
||||
TokenTree::token(token::Semi, stmt.span).into()
|
||||
TokenStream::token_alone(token::Semi, stmt.span)
|
||||
}
|
||||
Nonterminal::NtStmt(stmt) => TokenStream::from_ast(stmt),
|
||||
Nonterminal::NtPat(pat) => TokenStream::from_ast(pat),
|
||||
@ -473,23 +479,23 @@ impl TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
fn flatten_token(token: &Token) -> TokenTree {
|
||||
fn flatten_token(token: &Token, spacing: Spacing) -> TokenTree {
|
||||
match &token.kind {
|
||||
token::Interpolated(nt) if let token::NtIdent(ident, is_raw) = **nt => {
|
||||
TokenTree::token(token::Ident(ident.name, is_raw), ident.span)
|
||||
TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span), spacing)
|
||||
}
|
||||
token::Interpolated(nt) => TokenTree::Delimited(
|
||||
DelimSpan::from_single(token.span),
|
||||
Delimiter::Invisible,
|
||||
TokenStream::from_nonterminal_ast(&nt).flattened(),
|
||||
),
|
||||
_ => TokenTree::Token(token.clone()),
|
||||
_ => TokenTree::Token(token.clone(), spacing),
|
||||
}
|
||||
}
|
||||
|
||||
fn flatten_token_tree(tree: &TokenTree) -> TokenTree {
|
||||
match tree {
|
||||
TokenTree::Token(token) => TokenStream::flatten_token(token),
|
||||
TokenTree::Token(token, spacing) => TokenStream::flatten_token(token, *spacing),
|
||||
TokenTree::Delimited(span, delim, tts) => {
|
||||
TokenTree::Delimited(*span, *delim, tts.flattened())
|
||||
}
|
||||
@ -500,7 +506,7 @@ impl TokenStream {
|
||||
pub fn flattened(&self) -> TokenStream {
|
||||
fn can_skip(stream: &TokenStream) -> bool {
|
||||
stream.trees().all(|tree| match tree {
|
||||
TokenTree::Token(token) => !matches!(token.kind, token::Interpolated(_)),
|
||||
TokenTree::Token(token, _) => !matches!(token.kind, token::Interpolated(_)),
|
||||
TokenTree::Delimited(_, _, inner) => can_skip(inner),
|
||||
})
|
||||
}
|
||||
@ -522,8 +528,8 @@ impl TokenStreamBuilder {
|
||||
TokenStreamBuilder(SmallVec::new())
|
||||
}
|
||||
|
||||
pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
|
||||
self.0.push(stream.into());
|
||||
pub fn push(&mut self, stream: TokenStream) {
|
||||
self.0.push(stream);
|
||||
}
|
||||
|
||||
pub fn build(self) -> TokenStream {
|
||||
@ -564,14 +570,14 @@ impl TokenStreamBuilder {
|
||||
// `stream` is not empty and the first tree within it is a
|
||||
// token tree, and (c) the two tokens can be glued
|
||||
// together...
|
||||
if let Some((TokenTree::Token(last_tok), Spacing::Joint)) = res_vec_mut.last()
|
||||
&& let Some((TokenTree::Token(tok), spacing)) = stream.0.first()
|
||||
if let Some(TokenTree::Token(last_tok, Spacing::Joint)) = res_vec_mut.last()
|
||||
&& let Some(TokenTree::Token(tok, spacing)) = stream.0.first()
|
||||
&& let Some(glued_tok) = last_tok.glue(&tok)
|
||||
{
|
||||
// ...then overwrite the last token tree in
|
||||
// `res_vec_mut` with the glued token, and skip the
|
||||
// first token tree from `stream`.
|
||||
*res_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing);
|
||||
*res_vec_mut.last_mut().unwrap() = TokenTree::Token(glued_tok, *spacing);
|
||||
res_vec_mut.extend(stream_iter.skip(1));
|
||||
} else {
|
||||
// Append all of `stream`.
|
||||
@ -597,16 +603,8 @@ impl<'t> CursorRef<'t> {
|
||||
CursorRef { stream, index: 0 }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn next_with_spacing(&mut self) -> Option<&'t TreeAndSpacing> {
|
||||
self.stream.0.get(self.index).map(|tree| {
|
||||
self.index += 1;
|
||||
tree
|
||||
})
|
||||
}
|
||||
|
||||
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
|
||||
self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
|
||||
self.stream.0.get(self.index + n)
|
||||
}
|
||||
}
|
||||
|
||||
@ -614,7 +612,10 @@ impl<'t> Iterator for CursorRef<'t> {
|
||||
type Item = &'t TokenTree;
|
||||
|
||||
fn next(&mut self) -> Option<&'t TokenTree> {
|
||||
self.next_with_spacing().map(|(tree, _)| tree)
|
||||
self.stream.0.get(self.index).map(|tree| {
|
||||
self.index += 1;
|
||||
tree
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -630,7 +631,10 @@ impl Iterator for Cursor {
|
||||
type Item = TokenTree;
|
||||
|
||||
fn next(&mut self) -> Option<TokenTree> {
|
||||
self.next_with_spacing().map(|(tree, _)| tree)
|
||||
self.stream.0.get(self.index).map(|tree| {
|
||||
self.index += 1;
|
||||
tree.clone()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -640,15 +644,7 @@ impl Cursor {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn next_with_spacing(&mut self) -> Option<TreeAndSpacing> {
|
||||
self.stream.0.get(self.index).map(|tree| {
|
||||
self.index += 1;
|
||||
tree.clone()
|
||||
})
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn next_with_spacing_ref(&mut self) -> Option<&TreeAndSpacing> {
|
||||
pub fn next_ref(&mut self) -> Option<&TokenTree> {
|
||||
self.stream.0.get(self.index).map(|tree| {
|
||||
self.index += 1;
|
||||
tree
|
||||
@ -656,7 +652,7 @@ impl Cursor {
|
||||
}
|
||||
|
||||
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
|
||||
self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
|
||||
self.stream.0.get(self.index + n)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -194,7 +194,7 @@ pub fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comme
|
||||
}
|
||||
|
||||
for token in rustc_lexer::tokenize(&text[pos..]) {
|
||||
let token_text = &text[pos..pos + token.len];
|
||||
let token_text = &text[pos..pos + token.len as usize];
|
||||
match token.kind {
|
||||
rustc_lexer::TokenKind::Whitespace => {
|
||||
if let Some(mut idx) = token_text.find('\n') {
|
||||
@ -211,8 +211,10 @@ pub fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comme
|
||||
}
|
||||
rustc_lexer::TokenKind::BlockComment { doc_style, .. } => {
|
||||
if doc_style.is_none() {
|
||||
let code_to_the_right =
|
||||
!matches!(text[pos + token.len..].chars().next(), Some('\r' | '\n'));
|
||||
let code_to_the_right = !matches!(
|
||||
text[pos + token.len as usize..].chars().next(),
|
||||
Some('\r' | '\n')
|
||||
);
|
||||
let style = match (code_to_the_left, code_to_the_right) {
|
||||
(_, true) => CommentStyle::Mixed,
|
||||
(false, false) => CommentStyle::Isolated,
|
||||
@ -246,7 +248,7 @@ pub fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comme
|
||||
code_to_the_left = true;
|
||||
}
|
||||
}
|
||||
pos += token.len;
|
||||
pos += token.len as usize;
|
||||
}
|
||||
|
||||
comments
|
||||
|
||||
@ -15,7 +15,7 @@
|
||||
|
||||
use crate::ast::*;
|
||||
|
||||
use rustc_span::symbol::{Ident, Symbol};
|
||||
use rustc_span::symbol::Ident;
|
||||
use rustc_span::Span;
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
@ -56,14 +56,14 @@ pub enum FnKind<'a> {
|
||||
Fn(FnCtxt, Ident, &'a FnSig, &'a Visibility, &'a Generics, Option<&'a Block>),
|
||||
|
||||
/// E.g., `|x, y| body`.
|
||||
Closure(&'a FnDecl, &'a Expr),
|
||||
Closure(&'a ClosureBinder, &'a FnDecl, &'a Expr),
|
||||
}
|
||||
|
||||
impl<'a> FnKind<'a> {
|
||||
pub fn header(&self) -> Option<&'a FnHeader> {
|
||||
match *self {
|
||||
FnKind::Fn(_, _, sig, _, _, _) => Some(&sig.header),
|
||||
FnKind::Closure(_, _) => None,
|
||||
FnKind::Closure(_, _, _) => None,
|
||||
}
|
||||
}
|
||||
|
||||
@ -77,7 +77,7 @@ impl<'a> FnKind<'a> {
|
||||
pub fn decl(&self) -> &'a FnDecl {
|
||||
match self {
|
||||
FnKind::Fn(_, _, sig, _, _, _) => &sig.decl,
|
||||
FnKind::Closure(decl, _) => decl,
|
||||
FnKind::Closure(_, decl, _) => decl,
|
||||
}
|
||||
}
|
||||
|
||||
@ -109,12 +109,7 @@ pub enum LifetimeCtxt {
|
||||
/// to monitor future changes to `Visitor` in case a new method with a
|
||||
/// new default implementation gets introduced.)
|
||||
pub trait Visitor<'ast>: Sized {
|
||||
fn visit_name(&mut self, _span: Span, _name: Symbol) {
|
||||
// Nothing to do.
|
||||
}
|
||||
fn visit_ident(&mut self, ident: Ident) {
|
||||
walk_ident(self, ident);
|
||||
}
|
||||
fn visit_ident(&mut self, _ident: Ident) {}
|
||||
fn visit_foreign_item(&mut self, i: &'ast ForeignItem) {
|
||||
walk_foreign_item(self, i)
|
||||
}
|
||||
@ -155,6 +150,9 @@ pub trait Visitor<'ast>: Sized {
|
||||
fn visit_generics(&mut self, g: &'ast Generics) {
|
||||
walk_generics(self, g)
|
||||
}
|
||||
fn visit_closure_binder(&mut self, b: &'ast ClosureBinder) {
|
||||
walk_closure_binder(self, b)
|
||||
}
|
||||
fn visit_where_predicate(&mut self, p: &'ast WherePredicate) {
|
||||
walk_where_predicate(self, p)
|
||||
}
|
||||
@ -264,10 +262,6 @@ macro_rules! walk_list {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_ident<'a, V: Visitor<'a>>(visitor: &mut V, ident: Ident) {
|
||||
visitor.visit_name(ident.span, ident.name);
|
||||
}
|
||||
|
||||
pub fn walk_crate<'a, V: Visitor<'a>>(visitor: &mut V, krate: &'a Crate) {
|
||||
walk_list!(visitor, visit_item, &krate.items);
|
||||
walk_list!(visitor, visit_attribute, &krate.attrs);
|
||||
@ -312,11 +306,7 @@ pub fn walk_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a Item) {
|
||||
visitor.visit_vis(&item.vis);
|
||||
visitor.visit_ident(item.ident);
|
||||
match item.kind {
|
||||
ItemKind::ExternCrate(orig_name) => {
|
||||
if let Some(orig_name) = orig_name {
|
||||
visitor.visit_name(item.span, orig_name);
|
||||
}
|
||||
}
|
||||
ItemKind::ExternCrate(_) => {}
|
||||
ItemKind::Use(ref use_tree) => visitor.visit_use_tree(use_tree, item.id, false),
|
||||
ItemKind::Static(ref typ, _, ref expr) | ItemKind::Const(_, ref typ, ref expr) => {
|
||||
visitor.visit_ty(typ);
|
||||
@ -636,6 +626,15 @@ pub fn walk_generics<'a, V: Visitor<'a>>(visitor: &mut V, generics: &'a Generics
|
||||
walk_list!(visitor, visit_where_predicate, &generics.where_clause.predicates);
|
||||
}
|
||||
|
||||
pub fn walk_closure_binder<'a, V: Visitor<'a>>(visitor: &mut V, binder: &'a ClosureBinder) {
|
||||
match binder {
|
||||
ClosureBinder::NotPresent => {}
|
||||
ClosureBinder::For { generic_params, span: _ } => {
|
||||
walk_list!(visitor, visit_generic_param, generic_params)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_where_predicate<'a, V: Visitor<'a>>(visitor: &mut V, predicate: &'a WherePredicate) {
|
||||
match *predicate {
|
||||
WherePredicate::BoundPredicate(WhereBoundPredicate {
|
||||
@ -682,7 +681,8 @@ pub fn walk_fn<'a, V: Visitor<'a>>(visitor: &mut V, kind: FnKind<'a>, _span: Spa
|
||||
walk_fn_decl(visitor, &sig.decl);
|
||||
walk_list!(visitor, visit_block, body);
|
||||
}
|
||||
FnKind::Closure(decl, body) => {
|
||||
FnKind::Closure(binder, decl, body) => {
|
||||
visitor.visit_closure_binder(binder);
|
||||
walk_fn_decl(visitor, decl);
|
||||
visitor.visit_expr(body);
|
||||
}
|
||||
@ -856,8 +856,8 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
|
||||
visitor.visit_expr(subexpression);
|
||||
walk_list!(visitor, visit_arm, arms);
|
||||
}
|
||||
ExprKind::Closure(_, _, _, ref decl, ref body, _decl_span) => {
|
||||
visitor.visit_fn(FnKind::Closure(decl, body), expression.span, expression.id)
|
||||
ExprKind::Closure(ref binder, _, _, _, ref decl, ref body, _decl_span) => {
|
||||
visitor.visit_fn(FnKind::Closure(binder, decl, body), expression.span, expression.id)
|
||||
}
|
||||
ExprKind::Block(ref block, ref opt_label) => {
|
||||
walk_list!(visitor, visit_label, opt_label);
|
||||
|
||||
@ -20,4 +20,4 @@ rustc_span = { path = "../rustc_span" }
|
||||
rustc_errors = { path = "../rustc_errors" }
|
||||
rustc_session = { path = "../rustc_session" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
|
||||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
||||
|
||||
@ -4,8 +4,7 @@ use super::LoweringContext;
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::*;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::stable_set::FxHashSet;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_errors::struct_span_err;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
@ -24,10 +23,16 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
) -> &'hir hir::InlineAsm<'hir> {
|
||||
// Rustdoc needs to support asm! from foreign architectures: don't try
|
||||
// lowering the register constraints in this case.
|
||||
let asm_arch = if self.sess.opts.actually_rustdoc { None } else { self.sess.asm_arch };
|
||||
if asm_arch.is_none() && !self.sess.opts.actually_rustdoc {
|
||||
struct_span_err!(self.sess, sp, E0472, "inline assembly is unsupported on this target")
|
||||
.emit();
|
||||
let asm_arch =
|
||||
if self.tcx.sess.opts.actually_rustdoc { None } else { self.tcx.sess.asm_arch };
|
||||
if asm_arch.is_none() && !self.tcx.sess.opts.actually_rustdoc {
|
||||
struct_span_err!(
|
||||
self.tcx.sess,
|
||||
sp,
|
||||
E0472,
|
||||
"inline assembly is unsupported on this target"
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
if let Some(asm_arch) = asm_arch {
|
||||
// Inline assembly is currently only stable for these architectures.
|
||||
@ -40,9 +45,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
| asm::InlineAsmArch::RiscV32
|
||||
| asm::InlineAsmArch::RiscV64
|
||||
);
|
||||
if !is_stable && !self.sess.features_untracked().asm_experimental_arch {
|
||||
if !is_stable && !self.tcx.features().asm_experimental_arch {
|
||||
feature_err(
|
||||
&self.sess.parse_sess,
|
||||
&self.tcx.sess.parse_sess,
|
||||
sym::asm_experimental_arch,
|
||||
sp,
|
||||
"inline assembly is not stable yet on this architecture",
|
||||
@ -52,17 +57,16 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
}
|
||||
if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
|
||||
&& !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
|
||||
&& !self.sess.opts.actually_rustdoc
|
||||
&& !self.tcx.sess.opts.actually_rustdoc
|
||||
{
|
||||
self.sess
|
||||
self.tcx
|
||||
.sess
|
||||
.struct_span_err(sp, "the `att_syntax` option is only supported on x86")
|
||||
.emit();
|
||||
}
|
||||
if asm.options.contains(InlineAsmOptions::MAY_UNWIND)
|
||||
&& !self.sess.features_untracked().asm_unwind
|
||||
{
|
||||
if asm.options.contains(InlineAsmOptions::MAY_UNWIND) && !self.tcx.features().asm_unwind {
|
||||
feature_err(
|
||||
&self.sess.parse_sess,
|
||||
&self.tcx.sess.parse_sess,
|
||||
sym::asm_unwind,
|
||||
sp,
|
||||
"the `may_unwind` option is unstable",
|
||||
@ -73,12 +77,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
let mut clobber_abis = FxHashMap::default();
|
||||
if let Some(asm_arch) = asm_arch {
|
||||
for (abi_name, abi_span) in &asm.clobber_abis {
|
||||
match asm::InlineAsmClobberAbi::parse(asm_arch, &self.sess.target, *abi_name) {
|
||||
match asm::InlineAsmClobberAbi::parse(asm_arch, &self.tcx.sess.target, *abi_name) {
|
||||
Ok(abi) => {
|
||||
// If the abi was already in the list, emit an error
|
||||
match clobber_abis.get(&abi) {
|
||||
Some((prev_name, prev_sp)) => {
|
||||
let mut err = self.sess.struct_span_err(
|
||||
let mut err = self.tcx.sess.struct_span_err(
|
||||
*abi_span,
|
||||
&format!("`{}` ABI specified multiple times", prev_name),
|
||||
);
|
||||
@ -86,7 +90,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
|
||||
// Multiple different abi names may actually be the same ABI
|
||||
// If the specified ABIs are not the same name, alert the user that they resolve to the same ABI
|
||||
let source_map = self.sess.source_map();
|
||||
let source_map = self.tcx.sess.source_map();
|
||||
if source_map.span_to_snippet(*prev_sp)
|
||||
!= source_map.span_to_snippet(*abi_span)
|
||||
{
|
||||
@ -101,7 +105,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
}
|
||||
}
|
||||
Err(&[]) => {
|
||||
self.sess
|
||||
self.tcx
|
||||
.sess
|
||||
.struct_span_err(
|
||||
*abi_span,
|
||||
"`clobber_abi` is not supported on this target",
|
||||
@ -109,8 +114,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
.emit();
|
||||
}
|
||||
Err(supported_abis) => {
|
||||
let mut err =
|
||||
self.sess.struct_span_err(*abi_span, "invalid ABI for `clobber_abi`");
|
||||
let mut err = self
|
||||
.tcx
|
||||
.sess
|
||||
.struct_span_err(*abi_span, "invalid ABI for `clobber_abi`");
|
||||
let mut abis = format!("`{}`", supported_abis[0]);
|
||||
for m in &supported_abis[1..] {
|
||||
let _ = write!(abis, ", `{}`", m);
|
||||
@ -128,7 +135,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
// Lower operands to HIR. We use dummy register classes if an error
|
||||
// occurs during lowering because we still need to be able to produce a
|
||||
// valid HIR.
|
||||
let sess = self.sess;
|
||||
let sess = self.tcx.sess;
|
||||
let mut operands: Vec<_> = asm
|
||||
.operands
|
||||
.iter()
|
||||
@ -137,7 +144,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
InlineAsmRegOrRegClass::Reg(s) => {
|
||||
asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
|
||||
asm::InlineAsmReg::parse(asm_arch, s).unwrap_or_else(|e| {
|
||||
let msg = format!("invalid register `{}`: {}", s.as_str(), e);
|
||||
let msg = format!("invalid register `{}`: {}", s, e);
|
||||
sess.struct_span_err(*op_sp, &msg).emit();
|
||||
asm::InlineAsmReg::Err
|
||||
})
|
||||
@ -148,7 +155,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
InlineAsmRegOrRegClass::RegClass(s) => {
|
||||
asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch {
|
||||
asm::InlineAsmRegClass::parse(asm_arch, s).unwrap_or_else(|e| {
|
||||
let msg = format!("invalid register class `{}`: {}", s.as_str(), e);
|
||||
let msg = format!("invalid register class `{}`: {}", s, e);
|
||||
sess.struct_span_err(*op_sp, &msg).emit();
|
||||
asm::InlineAsmRegClass::Err
|
||||
})
|
||||
@ -184,9 +191,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
}
|
||||
}
|
||||
InlineAsmOperand::Const { ref anon_const } => {
|
||||
if !self.sess.features_untracked().asm_const {
|
||||
if !self.tcx.features().asm_const {
|
||||
feature_err(
|
||||
&self.sess.parse_sess,
|
||||
&sess.parse_sess,
|
||||
sym::asm_const,
|
||||
*op_sp,
|
||||
"const operands for inline assembly are unstable",
|
||||
@ -198,9 +205,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
}
|
||||
}
|
||||
InlineAsmOperand::Sym { ref sym } => {
|
||||
if !self.sess.features_untracked().asm_sym {
|
||||
if !self.tcx.features().asm_sym {
|
||||
feature_err(
|
||||
&self.sess.parse_sess,
|
||||
&sess.parse_sess,
|
||||
sym::asm_sym,
|
||||
*op_sp,
|
||||
"sym operands for inline assembly are unstable",
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
use crate::{ImplTraitContext, ImplTraitPosition, LoweringContext};
|
||||
use rustc_ast::{AttrVec, Block, BlockCheckMode, Expr, Local, LocalKind, Stmt, StmtKind};
|
||||
use rustc_ast::{Block, BlockCheckMode, Local, LocalKind, Stmt, StmtKind};
|
||||
use rustc_hir as hir;
|
||||
use rustc_session::parse::feature_err;
|
||||
use rustc_span::{sym, DesugaringKind};
|
||||
use rustc_span::sym;
|
||||
|
||||
use smallvec::SmallVec;
|
||||
|
||||
@ -36,21 +36,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
match s.kind {
|
||||
StmtKind::Local(ref local) => {
|
||||
let hir_id = self.lower_node_id(s.id);
|
||||
match &local.kind {
|
||||
LocalKind::InitElse(init, els) => {
|
||||
let e = self.lower_let_else(hir_id, local, init, els, tail);
|
||||
expr = Some(e);
|
||||
// remaining statements are in let-else expression
|
||||
break;
|
||||
}
|
||||
_ => {
|
||||
let local = self.lower_local(local);
|
||||
self.alias_attrs(hir_id, local.hir_id);
|
||||
let kind = hir::StmtKind::Local(local);
|
||||
let span = self.lower_span(s.span);
|
||||
stmts.push(hir::Stmt { hir_id, kind, span });
|
||||
}
|
||||
}
|
||||
let local = self.lower_local(local);
|
||||
self.alias_attrs(hir_id, local.hir_id);
|
||||
let kind = hir::StmtKind::Local(local);
|
||||
let span = self.lower_span(s.span);
|
||||
stmts.push(hir::Stmt { hir_id, kind, span });
|
||||
}
|
||||
StmtKind::Item(ref it) => {
|
||||
stmts.extend(self.lower_item_ref(it).into_iter().enumerate().map(
|
||||
@ -101,10 +91,24 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
let init = l.kind.init().map(|init| self.lower_expr(init));
|
||||
let hir_id = self.lower_node_id(l.id);
|
||||
let pat = self.lower_pat(&l.pat);
|
||||
let els = if let LocalKind::InitElse(_, els) = &l.kind {
|
||||
if !self.tcx.features().let_else {
|
||||
feature_err(
|
||||
&self.tcx.sess.parse_sess,
|
||||
sym::let_else,
|
||||
l.span,
|
||||
"`let...else` statements are unstable",
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
Some(self.lower_block(els, false))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let span = self.lower_span(l.span);
|
||||
let source = hir::LocalSource::Normal;
|
||||
self.lower_attrs(hir_id, &l.attrs);
|
||||
self.arena.alloc(hir::Local { hir_id, ty, pat, init, span, source })
|
||||
self.arena.alloc(hir::Local { hir_id, ty, pat, init, els, span, source })
|
||||
}
|
||||
|
||||
fn lower_block_check_mode(&mut self, b: &BlockCheckMode) -> hir::BlockCheckMode {
|
||||
@ -115,59 +119,4 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_let_else(
|
||||
&mut self,
|
||||
stmt_hir_id: hir::HirId,
|
||||
local: &Local,
|
||||
init: &Expr,
|
||||
els: &Block,
|
||||
tail: &[Stmt],
|
||||
) -> &'hir hir::Expr<'hir> {
|
||||
let ty = local
|
||||
.ty
|
||||
.as_ref()
|
||||
.map(|t| self.lower_ty(t, ImplTraitContext::Disallowed(ImplTraitPosition::Variable)));
|
||||
let span = self.lower_span(local.span);
|
||||
let span = self.mark_span_with_reason(DesugaringKind::LetElse, span, None);
|
||||
let init = self.lower_expr(init);
|
||||
let local_hir_id = self.lower_node_id(local.id);
|
||||
self.lower_attrs(local_hir_id, &local.attrs);
|
||||
let let_expr = {
|
||||
let lex = self.arena.alloc(hir::Let {
|
||||
hir_id: local_hir_id,
|
||||
pat: self.lower_pat(&local.pat),
|
||||
ty,
|
||||
init,
|
||||
span,
|
||||
});
|
||||
self.arena.alloc(self.expr(span, hir::ExprKind::Let(lex), AttrVec::new()))
|
||||
};
|
||||
let then_expr = {
|
||||
let (stmts, expr) = self.lower_stmts(tail);
|
||||
let block = self.block_all(span, stmts, expr);
|
||||
self.arena.alloc(self.expr_block(block, AttrVec::new()))
|
||||
};
|
||||
let else_expr = {
|
||||
let block = self.lower_block(els, false);
|
||||
self.arena.alloc(self.expr_block(block, AttrVec::new()))
|
||||
};
|
||||
self.alias_attrs(let_expr.hir_id, local_hir_id);
|
||||
self.alias_attrs(else_expr.hir_id, local_hir_id);
|
||||
let if_expr = self.arena.alloc(hir::Expr {
|
||||
hir_id: stmt_hir_id,
|
||||
span,
|
||||
kind: hir::ExprKind::If(let_expr, then_expr, Some(else_expr)),
|
||||
});
|
||||
if !self.sess.features_untracked().let_else {
|
||||
feature_err(
|
||||
&self.sess.parse_sess,
|
||||
sym::let_else,
|
||||
local.span,
|
||||
"`let...else` statements are unstable",
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
if_expr
|
||||
}
|
||||
}
|
||||
|
||||
@ -46,7 +46,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let hir_id = self.lower_node_id(e.id);
|
||||
return hir::Expr { hir_id, kind, span: self.lower_span(e.span) };
|
||||
} else {
|
||||
self.sess
|
||||
self.tcx.sess
|
||||
.struct_span_err(
|
||||
e.span,
|
||||
"#[rustc_box] requires precisely one argument \
|
||||
@ -155,6 +155,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
self.lower_expr_await(span, expr)
|
||||
}
|
||||
ExprKind::Closure(
|
||||
ref binder,
|
||||
capture_clause,
|
||||
asyncness,
|
||||
movability,
|
||||
@ -164,6 +165,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
) => {
|
||||
if let Async::Yes { closure_id, .. } = asyncness {
|
||||
self.lower_expr_async_closure(
|
||||
binder,
|
||||
capture_clause,
|
||||
e.id,
|
||||
closure_id,
|
||||
@ -173,6 +175,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
)
|
||||
} else {
|
||||
self.lower_expr_closure(
|
||||
binder,
|
||||
capture_clause,
|
||||
e.id,
|
||||
movability,
|
||||
@ -207,8 +210,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
|
||||
}
|
||||
ExprKind::Underscore => {
|
||||
self.sess
|
||||
.struct_span_err(
|
||||
self.tcx
|
||||
.sess.struct_span_err(
|
||||
e.span,
|
||||
"in expressions, `_` can only be used on the left-hand side of an assignment",
|
||||
)
|
||||
@ -245,7 +248,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let rest = match &se.rest {
|
||||
StructRest::Base(e) => Some(self.lower_expr(e)),
|
||||
StructRest::Rest(sp) => {
|
||||
self.sess
|
||||
self.tcx
|
||||
.sess
|
||||
.struct_span_err(*sp, "base expression required after `..`")
|
||||
.span_label(*sp, "add a base expression here")
|
||||
.emit();
|
||||
@ -474,7 +478,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
} else {
|
||||
let try_span = this.mark_span_with_reason(
|
||||
DesugaringKind::TryBlock,
|
||||
this.sess.source_map().end_point(body.span),
|
||||
this.tcx.sess.source_map().end_point(body.span),
|
||||
this.allow_try_trait.clone(),
|
||||
);
|
||||
|
||||
@ -604,13 +608,18 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
});
|
||||
|
||||
// `static |_task_context| -> <ret_ty> { body }`:
|
||||
let generator_kind = hir::ExprKind::Closure {
|
||||
capture_clause,
|
||||
bound_generic_params: &[],
|
||||
fn_decl,
|
||||
body,
|
||||
fn_decl_span: self.lower_span(span),
|
||||
movability: Some(hir::Movability::Static),
|
||||
let generator_kind = {
|
||||
let c = self.arena.alloc(hir::Closure {
|
||||
binder: hir::ClosureBinder::Default,
|
||||
capture_clause,
|
||||
bound_generic_params: &[],
|
||||
fn_decl,
|
||||
body,
|
||||
fn_decl_span: self.lower_span(span),
|
||||
movability: Some(hir::Movability::Static),
|
||||
});
|
||||
|
||||
hir::ExprKind::Closure(c)
|
||||
};
|
||||
let generator = hir::Expr {
|
||||
hir_id: self.lower_node_id(closure_node_id),
|
||||
@ -653,7 +662,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
Some(hir::GeneratorKind::Async(_)) => {}
|
||||
Some(hir::GeneratorKind::Gen) | None => {
|
||||
let mut err = struct_span_err!(
|
||||
self.sess,
|
||||
self.tcx.sess,
|
||||
dot_await_span,
|
||||
E0728,
|
||||
"`await` is only allowed inside `async` functions and blocks"
|
||||
@ -830,6 +839,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
|
||||
fn lower_expr_closure(
|
||||
&mut self,
|
||||
binder: &ClosureBinder,
|
||||
capture_clause: CaptureBy,
|
||||
closure_id: NodeId,
|
||||
movability: Movability,
|
||||
@ -837,7 +847,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
body: &Expr,
|
||||
fn_decl_span: Span,
|
||||
) -> hir::ExprKind<'hir> {
|
||||
let (body, generator_option) = self.with_new_scopes(move |this| {
|
||||
let (binder_clause, generic_params) = self.lower_closure_binder(binder);
|
||||
|
||||
let (body_id, generator_option) = self.with_new_scopes(move |this| {
|
||||
let prev = this.current_item;
|
||||
this.current_item = Some(fn_decl_span);
|
||||
let mut generator_kind = None;
|
||||
@ -852,19 +864,21 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
(body_id, generator_option)
|
||||
});
|
||||
|
||||
self.with_lifetime_binder(closure_id, &[], |this, bound_generic_params| {
|
||||
// Lower outside new scope to preserve `is_in_loop_condition`.
|
||||
let fn_decl = this.lower_fn_decl(decl, None, FnDeclKind::Closure, None);
|
||||
let bound_generic_params = self.lower_lifetime_binder(closure_id, generic_params);
|
||||
// Lower outside new scope to preserve `is_in_loop_condition`.
|
||||
let fn_decl = self.lower_fn_decl(decl, None, FnDeclKind::Closure, None);
|
||||
|
||||
hir::ExprKind::Closure {
|
||||
capture_clause,
|
||||
bound_generic_params,
|
||||
fn_decl,
|
||||
body,
|
||||
fn_decl_span: this.lower_span(fn_decl_span),
|
||||
movability: generator_option,
|
||||
}
|
||||
})
|
||||
let c = self.arena.alloc(hir::Closure {
|
||||
binder: binder_clause,
|
||||
capture_clause,
|
||||
bound_generic_params,
|
||||
fn_decl,
|
||||
body: body_id,
|
||||
fn_decl_span: self.lower_span(fn_decl_span),
|
||||
movability: generator_option,
|
||||
});
|
||||
|
||||
hir::ExprKind::Closure(c)
|
||||
}
|
||||
|
||||
fn generator_movability_for_fn(
|
||||
@ -878,7 +892,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
Some(hir::GeneratorKind::Gen) => {
|
||||
if decl.inputs.len() > 1 {
|
||||
struct_span_err!(
|
||||
self.sess,
|
||||
self.tcx.sess,
|
||||
fn_decl_span,
|
||||
E0628,
|
||||
"too many parameters for a generator (expected 0 or 1 parameters)"
|
||||
@ -892,16 +906,37 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
}
|
||||
None => {
|
||||
if movability == Movability::Static {
|
||||
struct_span_err!(self.sess, fn_decl_span, E0697, "closures cannot be static")
|
||||
.emit();
|
||||
struct_span_err!(
|
||||
self.tcx.sess,
|
||||
fn_decl_span,
|
||||
E0697,
|
||||
"closures cannot be static"
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_closure_binder<'c>(
|
||||
&mut self,
|
||||
binder: &'c ClosureBinder,
|
||||
) -> (hir::ClosureBinder, &'c [GenericParam]) {
|
||||
let (binder, params) = match binder {
|
||||
ClosureBinder::NotPresent => (hir::ClosureBinder::Default, &[][..]),
|
||||
&ClosureBinder::For { span, ref generic_params } => {
|
||||
let span = self.lower_span(span);
|
||||
(hir::ClosureBinder::For { span }, &**generic_params)
|
||||
}
|
||||
};
|
||||
|
||||
(binder, params)
|
||||
}
|
||||
|
||||
fn lower_expr_async_closure(
|
||||
&mut self,
|
||||
binder: &ClosureBinder,
|
||||
capture_clause: CaptureBy,
|
||||
closure_id: NodeId,
|
||||
inner_closure_id: NodeId,
|
||||
@ -909,6 +944,15 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
body: &Expr,
|
||||
fn_decl_span: Span,
|
||||
) -> hir::ExprKind<'hir> {
|
||||
if let &ClosureBinder::For { span, .. } = binder {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
"`for<...>` binders on `async` closures are not currently supported",
|
||||
);
|
||||
}
|
||||
|
||||
let (binder_clause, generic_params) = self.lower_closure_binder(binder);
|
||||
|
||||
let outer_decl =
|
||||
FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
|
||||
|
||||
@ -916,7 +960,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
// FIXME(cramertj): allow `async` non-`move` closures with arguments.
|
||||
if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
|
||||
struct_span_err!(
|
||||
this.sess,
|
||||
this.tcx.sess,
|
||||
fn_decl_span,
|
||||
E0708,
|
||||
"`async` non-`move` closures with parameters are not currently supported",
|
||||
@ -946,21 +990,23 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
body_id
|
||||
});
|
||||
|
||||
self.with_lifetime_binder(closure_id, &[], |this, bound_generic_params| {
|
||||
// We need to lower the declaration outside the new scope, because we
|
||||
// have to conserve the state of being inside a loop condition for the
|
||||
// closure argument types.
|
||||
let fn_decl = this.lower_fn_decl(&outer_decl, None, FnDeclKind::Closure, None);
|
||||
let bound_generic_params = self.lower_lifetime_binder(closure_id, generic_params);
|
||||
|
||||
hir::ExprKind::Closure {
|
||||
capture_clause,
|
||||
bound_generic_params,
|
||||
fn_decl,
|
||||
body,
|
||||
fn_decl_span: this.lower_span(fn_decl_span),
|
||||
movability: None,
|
||||
}
|
||||
})
|
||||
// We need to lower the declaration outside the new scope, because we
|
||||
// have to conserve the state of being inside a loop condition for the
|
||||
// closure argument types.
|
||||
let fn_decl = self.lower_fn_decl(&outer_decl, None, FnDeclKind::Closure, None);
|
||||
|
||||
let c = self.arena.alloc(hir::Closure {
|
||||
binder: binder_clause,
|
||||
capture_clause,
|
||||
bound_generic_params,
|
||||
fn_decl,
|
||||
body,
|
||||
fn_decl_span: self.lower_span(fn_decl_span),
|
||||
movability: None,
|
||||
});
|
||||
hir::ExprKind::Closure(c)
|
||||
}
|
||||
|
||||
/// Destructure the LHS of complex assignments.
|
||||
@ -1163,7 +1209,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
);
|
||||
let fields_omitted = match &se.rest {
|
||||
StructRest::Base(e) => {
|
||||
self.sess
|
||||
self.tcx
|
||||
.sess
|
||||
.struct_span_err(
|
||||
e.span,
|
||||
"functional record updates are not allowed in destructuring \
|
||||
@ -1371,7 +1418,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
Some(hir::GeneratorKind::Gen) => {}
|
||||
Some(hir::GeneratorKind::Async(_)) => {
|
||||
struct_span_err!(
|
||||
self.sess,
|
||||
self.tcx.sess,
|
||||
span,
|
||||
E0727,
|
||||
"`async` generators are not yet supported"
|
||||
@ -1516,7 +1563,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
span,
|
||||
self.allow_try_trait.clone(),
|
||||
);
|
||||
let try_span = self.sess.source_map().end_point(span);
|
||||
let try_span = self.tcx.sess.source_map().end_point(span);
|
||||
let try_span = self.mark_span_with_reason(
|
||||
DesugaringKind::QuestionMark,
|
||||
try_span,
|
||||
|
||||
@ -192,9 +192,7 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> {
|
||||
}
|
||||
|
||||
fn visit_pat(&mut self, pat: &'hir Pat<'hir>) {
|
||||
let node =
|
||||
if let PatKind::Binding(..) = pat.kind { Node::Binding(pat) } else { Node::Pat(pat) };
|
||||
self.insert(pat.span, pat.hir_id, node);
|
||||
self.insert(pat.span, pat.hir_id, Node::Pat(pat));
|
||||
|
||||
self.with_parent(pat.hir_id, |this| {
|
||||
intravisit::walk_pat(this, pat);
|
||||
@ -325,7 +323,7 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> {
|
||||
fn visit_trait_item_ref(&mut self, ii: &'hir TraitItemRef) {
|
||||
// Do not visit the duplicate information in TraitItemRef. We want to
|
||||
// map the actual nodes, not the duplicate ones in the *Ref.
|
||||
let TraitItemRef { id, ident: _, kind: _, span: _, defaultness: _ } = *ii;
|
||||
let TraitItemRef { id, ident: _, kind: _, span: _ } = *ii;
|
||||
|
||||
self.visit_nested_trait_item(id);
|
||||
}
|
||||
@ -333,8 +331,7 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> {
|
||||
fn visit_impl_item_ref(&mut self, ii: &'hir ImplItemRef) {
|
||||
// Do not visit the duplicate information in ImplItemRef. We want to
|
||||
// map the actual nodes, not the duplicate ones in the *Ref.
|
||||
let ImplItemRef { id, ident: _, kind: _, span: _, defaultness: _, trait_item_def_id: _ } =
|
||||
*ii;
|
||||
let ImplItemRef { id, ident: _, kind: _, span: _, trait_item_def_id: _ } = *ii;
|
||||
|
||||
self.visit_nested_impl_item(id);
|
||||
}
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
use super::ResolverAstLoweringExt;
|
||||
use super::{AstOwner, ImplTraitContext, ImplTraitPosition};
|
||||
use super::{LoweringContext, ParamMode};
|
||||
use crate::{Arena, FnDeclKind};
|
||||
use super::{FnDeclKind, LoweringContext, ParamMode};
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::visit::AssocCtxt;
|
||||
@ -12,12 +11,9 @@ use rustc_errors::struct_span_err;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID};
|
||||
use rustc_hir::definitions::Definitions;
|
||||
use rustc_hir::PredicateOrigin;
|
||||
use rustc_index::vec::{Idx, IndexVec};
|
||||
use rustc_middle::ty::{ResolverAstLowering, ResolverOutputs};
|
||||
use rustc_session::cstore::CrateStoreDyn;
|
||||
use rustc_session::Session;
|
||||
use rustc_middle::ty::{DefIdTree, ResolverAstLowering, TyCtxt};
|
||||
use rustc_span::source_map::DesugaringKind;
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
use rustc_span::Span;
|
||||
@ -27,12 +23,8 @@ use smallvec::{smallvec, SmallVec};
|
||||
use std::iter;
|
||||
|
||||
pub(super) struct ItemLowerer<'a, 'hir> {
|
||||
pub(super) sess: &'a Session,
|
||||
pub(super) definitions: &'a mut Definitions,
|
||||
pub(super) cstore: &'a CrateStoreDyn,
|
||||
pub(super) resolutions: &'a ResolverOutputs,
|
||||
pub(super) tcx: TyCtxt<'hir>,
|
||||
pub(super) resolver: &'a mut ResolverAstLowering,
|
||||
pub(super) arena: &'hir Arena<'hir>,
|
||||
pub(super) ast_index: &'a IndexVec<LocalDefId, AstOwner<'a>>,
|
||||
pub(super) owners: &'a mut IndexVec<LocalDefId, hir::MaybeOwner<&'hir hir::OwnerInfo<'hir>>>,
|
||||
}
|
||||
@ -65,12 +57,9 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
|
||||
) {
|
||||
let mut lctx = LoweringContext {
|
||||
// Pseudo-globals.
|
||||
sess: &self.sess,
|
||||
definitions: self.definitions,
|
||||
cstore: self.cstore,
|
||||
resolutions: self.resolutions,
|
||||
tcx: self.tcx,
|
||||
resolver: self.resolver,
|
||||
arena: self.arena,
|
||||
arena: self.tcx.hir_arena,
|
||||
|
||||
// HirId handling.
|
||||
bodies: Vec::new(),
|
||||
@ -91,7 +80,6 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
|
||||
generator_kind: None,
|
||||
task_context: None,
|
||||
current_item: None,
|
||||
captured_lifetimes: None,
|
||||
impl_trait_defs: Vec::new(),
|
||||
impl_trait_bounds: Vec::new(),
|
||||
allow_try_trait: Some([sym::try_trait_v2, sym::yeet_desugar_details][..].into()),
|
||||
@ -144,12 +132,7 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
|
||||
fn lower_assoc_item(&mut self, item: &AssocItem, ctxt: AssocCtxt) {
|
||||
let def_id = self.resolver.node_id_to_def_id[&item.id];
|
||||
|
||||
let parent_id = {
|
||||
let parent = self.definitions.def_key(def_id).parent;
|
||||
let local_def_index = parent.unwrap();
|
||||
LocalDefId { local_def_index }
|
||||
};
|
||||
|
||||
let parent_id = self.tcx.local_parent(def_id);
|
||||
let parent_hir = self.lower_node(parent_id).unwrap();
|
||||
self.with_lctx(item.id, |lctx| {
|
||||
// Evaluate with the lifetimes in `params` in-scope.
|
||||
@ -771,17 +754,17 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let hir_id = self.lower_node_id(i.id);
|
||||
let trait_item_def_id = hir_id.expect_owner();
|
||||
|
||||
let (generics, kind) = match i.kind {
|
||||
let (generics, kind, has_default) = match i.kind {
|
||||
AssocItemKind::Const(_, ref ty, ref default) => {
|
||||
let ty = self.lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type));
|
||||
let body = default.as_ref().map(|x| self.lower_const_body(i.span, Some(x)));
|
||||
(hir::Generics::empty(), hir::TraitItemKind::Const(ty, body))
|
||||
(hir::Generics::empty(), hir::TraitItemKind::Const(ty, body), body.is_some())
|
||||
}
|
||||
AssocItemKind::Fn(box Fn { ref sig, ref generics, body: None, .. }) => {
|
||||
let names = self.lower_fn_params_to_names(&sig.decl);
|
||||
let (generics, sig) =
|
||||
self.lower_method_sig(generics, sig, i.id, FnDeclKind::Trait, None);
|
||||
(generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(names)))
|
||||
(generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(names)), false)
|
||||
}
|
||||
AssocItemKind::Fn(box Fn { ref sig, ref generics, body: Some(ref body), .. }) => {
|
||||
let asyncness = sig.header.asyncness;
|
||||
@ -794,7 +777,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
FnDeclKind::Trait,
|
||||
asyncness.opt_return_id(),
|
||||
);
|
||||
(generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body_id)))
|
||||
(generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body_id)), true)
|
||||
}
|
||||
AssocItemKind::TyAlias(box TyAlias {
|
||||
ref generics,
|
||||
@ -805,7 +788,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
}) => {
|
||||
let mut generics = generics.clone();
|
||||
add_ty_alias_where_clause(&mut generics, where_clauses, false);
|
||||
self.lower_generics(
|
||||
let (generics, kind) = self.lower_generics(
|
||||
&generics,
|
||||
i.id,
|
||||
ImplTraitContext::Disallowed(ImplTraitPosition::Generic),
|
||||
@ -821,7 +804,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
ty,
|
||||
)
|
||||
},
|
||||
)
|
||||
);
|
||||
(generics, kind, ty.is_some())
|
||||
}
|
||||
AssocItemKind::MacCall(..) => panic!("macro item shouldn't exist at this point"),
|
||||
};
|
||||
@ -833,28 +817,25 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
generics,
|
||||
kind,
|
||||
span: self.lower_span(i.span),
|
||||
defaultness: hir::Defaultness::Default { has_value: has_default },
|
||||
};
|
||||
self.arena.alloc(item)
|
||||
}
|
||||
|
||||
fn lower_trait_item_ref(&mut self, i: &AssocItem) -> hir::TraitItemRef {
|
||||
let (kind, has_default) = match &i.kind {
|
||||
AssocItemKind::Const(_, _, default) => (hir::AssocItemKind::Const, default.is_some()),
|
||||
AssocItemKind::TyAlias(box TyAlias { ty, .. }) => {
|
||||
(hir::AssocItemKind::Type, ty.is_some())
|
||||
}
|
||||
AssocItemKind::Fn(box Fn { sig, body, .. }) => {
|
||||
(hir::AssocItemKind::Fn { has_self: sig.decl.has_self() }, body.is_some())
|
||||
let kind = match &i.kind {
|
||||
AssocItemKind::Const(..) => hir::AssocItemKind::Const,
|
||||
AssocItemKind::TyAlias(..) => hir::AssocItemKind::Type,
|
||||
AssocItemKind::Fn(box Fn { sig, .. }) => {
|
||||
hir::AssocItemKind::Fn { has_self: sig.decl.has_self() }
|
||||
}
|
||||
AssocItemKind::MacCall(..) => unimplemented!(),
|
||||
};
|
||||
let id = hir::TraitItemId { def_id: self.local_def_id(i.id) };
|
||||
let defaultness = hir::Defaultness::Default { has_value: has_default };
|
||||
hir::TraitItemRef {
|
||||
id,
|
||||
ident: self.lower_ident(i.ident),
|
||||
span: self.lower_span(i.span),
|
||||
defaultness,
|
||||
kind,
|
||||
}
|
||||
}
|
||||
@ -865,6 +846,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
}
|
||||
|
||||
fn lower_impl_item(&mut self, i: &AssocItem) -> &'hir hir::ImplItem<'hir> {
|
||||
// Since `default impl` is not yet implemented, this is always true in impls.
|
||||
let has_value = true;
|
||||
let (defaultness, _) = self.lower_defaultness(i.kind.defaultness(), has_value);
|
||||
|
||||
let (generics, kind) = match &i.kind {
|
||||
AssocItemKind::Const(_, ty, expr) => {
|
||||
let ty = self.lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type));
|
||||
@ -919,19 +904,16 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
kind,
|
||||
vis_span: self.lower_span(i.vis.span),
|
||||
span: self.lower_span(i.span),
|
||||
defaultness,
|
||||
};
|
||||
self.arena.alloc(item)
|
||||
}
|
||||
|
||||
fn lower_impl_item_ref(&mut self, i: &AssocItem) -> hir::ImplItemRef {
|
||||
// Since `default impl` is not yet implemented, this is always true in impls.
|
||||
let has_value = true;
|
||||
let (defaultness, _) = self.lower_defaultness(i.kind.defaultness(), has_value);
|
||||
hir::ImplItemRef {
|
||||
id: hir::ImplItemId { def_id: self.local_def_id(i.id) },
|
||||
ident: self.lower_ident(i.ident),
|
||||
span: self.lower_span(i.span),
|
||||
defaultness,
|
||||
kind: match &i.kind {
|
||||
AssocItemKind::Const(..) => hir::AssocItemKind::Const,
|
||||
AssocItemKind::TyAlias(..) => hir::AssocItemKind::Type,
|
||||
@ -1272,13 +1254,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
pub(super) fn lower_extern(&mut self, ext: Extern) -> abi::Abi {
|
||||
match ext {
|
||||
Extern::None => abi::Abi::Rust,
|
||||
Extern::Implicit => abi::Abi::FALLBACK,
|
||||
Extern::Explicit(abi) => self.lower_abi(abi),
|
||||
Extern::Implicit(_) => abi::Abi::FALLBACK,
|
||||
Extern::Explicit(abi, _) => self.lower_abi(abi),
|
||||
}
|
||||
}
|
||||
|
||||
fn error_on_invalid_abi(&self, abi: StrLit) {
|
||||
struct_span_err!(self.sess, abi.span, E0703, "invalid ABI: found `{}`", abi.symbol)
|
||||
struct_span_err!(self.tcx.sess, abi.span, E0703, "invalid ABI: found `{}`", abi.symbol)
|
||||
.span_label(abi.span, "invalid ABI")
|
||||
.help(&format!("valid ABIs: {}", abi::all_names().join(", ")))
|
||||
.emit();
|
||||
@ -1367,12 +1349,12 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
|
||||
let mut predicates: SmallVec<[hir::WherePredicate<'hir>; 4]> = SmallVec::new();
|
||||
predicates.extend(generics.params.iter().filter_map(|param| {
|
||||
let bounds = self.lower_param_bounds(¶m.bounds, itctx);
|
||||
self.lower_generic_bound_predicate(
|
||||
param.ident,
|
||||
param.id,
|
||||
¶m.kind,
|
||||
bounds,
|
||||
¶m.bounds,
|
||||
itctx,
|
||||
PredicateOrigin::GenericParam,
|
||||
)
|
||||
}));
|
||||
@ -1420,13 +1402,17 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
ident: Ident,
|
||||
id: NodeId,
|
||||
kind: &GenericParamKind,
|
||||
bounds: &'hir [hir::GenericBound<'hir>],
|
||||
bounds: &[GenericBound],
|
||||
itctx: ImplTraitContext,
|
||||
origin: PredicateOrigin,
|
||||
) -> Option<hir::WherePredicate<'hir>> {
|
||||
// Do not create a clause if we do not have anything inside it.
|
||||
if bounds.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let bounds = self.lower_param_bounds(bounds, itctx);
|
||||
|
||||
let ident = self.lower_ident(ident);
|
||||
let param_span = ident.span;
|
||||
let span = bounds
|
||||
@ -1467,11 +1453,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
GenericParamKind::Lifetime => {
|
||||
let ident_span = self.lower_span(ident.span);
|
||||
let ident = self.lower_ident(ident);
|
||||
let res = self.resolver.get_lifetime_res(id).unwrap_or_else(|| {
|
||||
panic!("Missing resolution for lifetime {:?} at {:?}", id, ident.span)
|
||||
});
|
||||
let lt_id = self.next_node_id();
|
||||
let lifetime = self.new_named_lifetime_with_res(lt_id, ident_span, ident, res);
|
||||
let lifetime = self.new_named_lifetime(id, lt_id, ident_span, ident);
|
||||
Some(hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate {
|
||||
lifetime,
|
||||
span,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
115
compiler/rustc_ast_lowering/src/lifetime_collector.rs
Normal file
115
compiler/rustc_ast_lowering/src/lifetime_collector.rs
Normal file
@ -0,0 +1,115 @@
|
||||
use super::ResolverAstLoweringExt;
|
||||
use rustc_ast::visit::{self, BoundKind, LifetimeCtxt, Visitor};
|
||||
use rustc_ast::{
|
||||
FnRetTy, GenericBounds, Lifetime, NodeId, PathSegment, PolyTraitRef, TraitBoundModifier, Ty,
|
||||
TyKind,
|
||||
};
|
||||
use rustc_hir::def::LifetimeRes;
|
||||
use rustc_middle::span_bug;
|
||||
use rustc_middle::ty::ResolverAstLowering;
|
||||
use rustc_span::symbol::{kw, Ident};
|
||||
use rustc_span::Span;
|
||||
|
||||
struct LifetimeCollectVisitor<'ast> {
|
||||
resolver: &'ast ResolverAstLowering,
|
||||
current_binders: Vec<NodeId>,
|
||||
collected_lifetimes: Vec<Lifetime>,
|
||||
}
|
||||
|
||||
impl<'ast> LifetimeCollectVisitor<'ast> {
|
||||
fn new(resolver: &'ast ResolverAstLowering) -> Self {
|
||||
Self { resolver, current_binders: Vec::new(), collected_lifetimes: Vec::new() }
|
||||
}
|
||||
|
||||
fn record_lifetime_use(&mut self, lifetime: Lifetime) {
|
||||
match self.resolver.get_lifetime_res(lifetime.id).unwrap_or(LifetimeRes::Error) {
|
||||
LifetimeRes::Param { binder, .. } | LifetimeRes::Fresh { binder, .. } => {
|
||||
if !self.current_binders.contains(&binder) {
|
||||
if !self.collected_lifetimes.contains(&lifetime) {
|
||||
self.collected_lifetimes.push(lifetime);
|
||||
}
|
||||
}
|
||||
}
|
||||
LifetimeRes::Static | LifetimeRes::Error => {
|
||||
if !self.collected_lifetimes.contains(&lifetime) {
|
||||
self.collected_lifetimes.push(lifetime);
|
||||
}
|
||||
}
|
||||
LifetimeRes::Infer => {}
|
||||
res => {
|
||||
let bug_msg = format!(
|
||||
"Unexpected lifetime resolution {:?} for {:?} at {:?}",
|
||||
res, lifetime.ident, lifetime.ident.span
|
||||
);
|
||||
span_bug!(lifetime.ident.span, "{}", bug_msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This collect lifetimes that are elided, for nodes like `Foo<T>` where there are no explicit
|
||||
/// lifetime nodes. Is equivalent to having "pseudo" nodes introduced for each of the node ids
|
||||
/// in the list start..end.
|
||||
fn record_elided_anchor(&mut self, node_id: NodeId, span: Span) {
|
||||
if let Some(LifetimeRes::ElidedAnchor { start, end }) =
|
||||
self.resolver.get_lifetime_res(node_id)
|
||||
{
|
||||
for i in start..end {
|
||||
let lifetime = Lifetime { id: i, ident: Ident::new(kw::UnderscoreLifetime, span) };
|
||||
self.record_lifetime_use(lifetime);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ast> Visitor<'ast> for LifetimeCollectVisitor<'ast> {
|
||||
fn visit_lifetime(&mut self, lifetime: &'ast Lifetime, _: LifetimeCtxt) {
|
||||
self.record_lifetime_use(*lifetime);
|
||||
}
|
||||
|
||||
fn visit_path_segment(&mut self, path_span: Span, path_segment: &'ast PathSegment) {
|
||||
self.record_elided_anchor(path_segment.id, path_span);
|
||||
visit::walk_path_segment(self, path_span, path_segment);
|
||||
}
|
||||
|
||||
fn visit_poly_trait_ref(&mut self, t: &'ast PolyTraitRef, m: &'ast TraitBoundModifier) {
|
||||
self.current_binders.push(t.trait_ref.ref_id);
|
||||
|
||||
visit::walk_poly_trait_ref(self, t, m);
|
||||
|
||||
self.current_binders.pop();
|
||||
}
|
||||
|
||||
fn visit_ty(&mut self, t: &'ast Ty) {
|
||||
match t.kind {
|
||||
TyKind::BareFn(_) => {
|
||||
self.current_binders.push(t.id);
|
||||
visit::walk_ty(self, t);
|
||||
self.current_binders.pop();
|
||||
}
|
||||
TyKind::Rptr(None, _) => {
|
||||
self.record_elided_anchor(t.id, t.span);
|
||||
visit::walk_ty(self, t);
|
||||
}
|
||||
_ => {
|
||||
visit::walk_ty(self, t);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lifetimes_in_ret_ty(resolver: &ResolverAstLowering, ret_ty: &FnRetTy) -> Vec<Lifetime> {
|
||||
let mut visitor = LifetimeCollectVisitor::new(resolver);
|
||||
visitor.visit_fn_ret_ty(ret_ty);
|
||||
visitor.collected_lifetimes
|
||||
}
|
||||
|
||||
pub fn lifetimes_in_bounds(
|
||||
resolver: &ResolverAstLowering,
|
||||
bounds: &GenericBounds,
|
||||
) -> Vec<Lifetime> {
|
||||
let mut visitor = LifetimeCollectVisitor::new(resolver);
|
||||
for bound in bounds {
|
||||
visitor.visit_param_bound(bound, BoundKind::Bound);
|
||||
}
|
||||
visitor.collected_lifetimes
|
||||
}
|
||||
@ -133,7 +133,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
|
||||
// We should've returned in the for loop above.
|
||||
|
||||
self.sess.diagnostic().span_bug(
|
||||
self.diagnostic().span_bug(
|
||||
p.span,
|
||||
&format!(
|
||||
"lower_qpath: no final extension segment in {}..{}",
|
||||
@ -193,7 +193,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
GenericArgs::Parenthesized(ref data) => match parenthesized_generic_args {
|
||||
ParenthesizedGenericArgs::Ok => self.lower_parenthesized_parameter_data(data),
|
||||
ParenthesizedGenericArgs::Err => {
|
||||
let mut err = struct_span_err!(self.sess, data.span, E0214, "{}", msg);
|
||||
let mut err = struct_span_err!(self.tcx.sess, data.span, E0214, "{}", msg);
|
||||
err.span_label(data.span, "only `Fn` traits may use parentheses");
|
||||
// Suggest replacing parentheses with angle brackets `Trait(params...)` to `Trait<params...>`
|
||||
if !data.inputs.is_empty() {
|
||||
|
||||
@ -13,7 +13,9 @@ use rustc_ast::walk_list;
|
||||
use rustc_ast::*;
|
||||
use rustc_ast_pretty::pprust::{self, State};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_errors::{error_code, pluralize, struct_span_err, Applicability};
|
||||
use rustc_errors::{
|
||||
error_code, pluralize, struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed,
|
||||
};
|
||||
use rustc_parse::validate_attr;
|
||||
use rustc_session::lint::builtin::{
|
||||
DEPRECATED_WHERE_CLAUSE_LOCATION, MISSING_ABI, PATTERNS_IN_FNS_WITHOUT_BODY,
|
||||
@ -476,6 +478,17 @@ impl<'a> AstValidator<'a> {
|
||||
}
|
||||
|
||||
fn error_item_without_body(&self, sp: Span, ctx: &str, msg: &str, sugg: &str) {
|
||||
self.error_item_without_body_with_help(sp, ctx, msg, sugg, |_| ());
|
||||
}
|
||||
|
||||
fn error_item_without_body_with_help(
|
||||
&self,
|
||||
sp: Span,
|
||||
ctx: &str,
|
||||
msg: &str,
|
||||
sugg: &str,
|
||||
help: impl FnOnce(&mut DiagnosticBuilder<'_, ErrorGuaranteed>),
|
||||
) {
|
||||
let source_map = self.session.source_map();
|
||||
let end = source_map.end_point(sp);
|
||||
let replace_span = if source_map.span_to_snippet(end).map(|s| s == ";").unwrap_or(false) {
|
||||
@ -483,15 +496,15 @@ impl<'a> AstValidator<'a> {
|
||||
} else {
|
||||
sp.shrink_to_hi()
|
||||
};
|
||||
self.err_handler()
|
||||
.struct_span_err(sp, msg)
|
||||
.span_suggestion(
|
||||
replace_span,
|
||||
&format!("provide a definition for the {}", ctx),
|
||||
sugg,
|
||||
Applicability::HasPlaceholders,
|
||||
)
|
||||
.emit();
|
||||
let mut err = self.err_handler().struct_span_err(sp, msg);
|
||||
err.span_suggestion(
|
||||
replace_span,
|
||||
&format!("provide a definition for the {}", ctx),
|
||||
sugg,
|
||||
Applicability::HasPlaceholders,
|
||||
);
|
||||
help(&mut err);
|
||||
err.emit();
|
||||
}
|
||||
|
||||
fn check_impl_item_provided<T>(&self, sp: Span, body: &Option<T>, ctx: &str, sugg: &str) {
|
||||
@ -630,7 +643,8 @@ impl<'a> AstValidator<'a> {
|
||||
match (fk.ctxt(), fk.header()) {
|
||||
(Some(FnCtxt::Foreign), _) => return,
|
||||
(Some(FnCtxt::Free), Some(header)) => match header.ext {
|
||||
Extern::Explicit(StrLit { symbol_unescaped: sym::C, .. }) | Extern::Implicit
|
||||
Extern::Explicit(StrLit { symbol_unescaped: sym::C, .. }, _)
|
||||
| Extern::Implicit(_)
|
||||
if matches!(header.unsafety, Unsafe::Yes(_)) =>
|
||||
{
|
||||
return;
|
||||
@ -842,7 +856,7 @@ impl<'a> AstValidator<'a> {
|
||||
.emit();
|
||||
});
|
||||
self.check_late_bound_lifetime_defs(&bfty.generic_params);
|
||||
if let Extern::Implicit = bfty.ext {
|
||||
if let Extern::Implicit(_) = bfty.ext {
|
||||
let sig_span = self.session.source_map().next_point(ty.span.shrink_to_lo());
|
||||
self.maybe_lint_missing_abi(sig_span, ty.id);
|
||||
}
|
||||
@ -1190,8 +1204,38 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||
|
||||
if body.is_none() {
|
||||
let msg = "free function without a body";
|
||||
self.error_item_without_body(item.span, "function", msg, " { <body> }");
|
||||
let ext = sig.header.ext;
|
||||
|
||||
let f = |e: &mut DiagnosticBuilder<'_, _>| {
|
||||
if let Extern::Implicit(start_span) | Extern::Explicit(_, start_span) = &ext
|
||||
{
|
||||
let start_suggestion = if let Extern::Explicit(abi, _) = ext {
|
||||
format!("extern \"{}\" {{", abi.symbol_unescaped)
|
||||
} else {
|
||||
"extern {".to_owned()
|
||||
};
|
||||
|
||||
let end_suggestion = " }".to_owned();
|
||||
let end_span = item.span.shrink_to_hi();
|
||||
|
||||
e
|
||||
.multipart_suggestion(
|
||||
"if you meant to declare an externally defined function, use an `extern` block",
|
||||
vec![(*start_span, start_suggestion), (end_span, end_suggestion)],
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
self.error_item_without_body_with_help(
|
||||
item.span,
|
||||
"function",
|
||||
msg,
|
||||
" { <body> }",
|
||||
f,
|
||||
);
|
||||
}
|
||||
|
||||
self.visit_vis(&item.vis);
|
||||
self.visit_ident(item.ident);
|
||||
let kind =
|
||||
@ -1238,7 +1282,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||
self.visit_vis(&item.vis);
|
||||
self.visit_ident(item.ident);
|
||||
self.visit_generics(generics);
|
||||
self.with_banned_tilde_const(|this| {
|
||||
self.with_tilde_const_allowed(|this| {
|
||||
walk_list!(this, visit_param_bound, bounds, BoundKind::SuperTraits)
|
||||
});
|
||||
walk_list!(self, visit_assoc_item, items, AssocCtxt::Trait);
|
||||
@ -1553,10 +1597,14 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||
.emit();
|
||||
}
|
||||
|
||||
if let FnKind::Closure(ClosureBinder::For { generic_params, .. }, ..) = fk {
|
||||
self.check_late_bound_lifetime_defs(generic_params);
|
||||
}
|
||||
|
||||
if let FnKind::Fn(
|
||||
_,
|
||||
_,
|
||||
FnSig { span: sig_span, header: FnHeader { ext: Extern::Implicit, .. }, .. },
|
||||
FnSig { span: sig_span, header: FnHeader { ext: Extern::Implicit(_), .. }, .. },
|
||||
_,
|
||||
_,
|
||||
_,
|
||||
|
||||
@ -2,7 +2,7 @@ use rustc_ast as ast;
|
||||
use rustc_ast::visit::{self, AssocCtxt, FnCtxt, FnKind, Visitor};
|
||||
use rustc_ast::{AssocConstraint, AssocConstraintKind, NodeId};
|
||||
use rustc_ast::{PatKind, RangeEnd, VariantData};
|
||||
use rustc_errors::struct_span_err;
|
||||
use rustc_errors::{struct_span_err, Applicability};
|
||||
use rustc_feature::{AttributeGate, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP};
|
||||
use rustc_feature::{Features, GateIssue};
|
||||
use rustc_session::parse::{feature_err, feature_err_issue};
|
||||
@ -62,9 +62,9 @@ impl<'a> PostExpansionVisitor<'a> {
|
||||
let ast::StrLit { symbol_unescaped, span, .. } = abi;
|
||||
|
||||
if let ast::Const::Yes(_) = constness {
|
||||
match symbol_unescaped.as_str() {
|
||||
match symbol_unescaped {
|
||||
// Stable
|
||||
"Rust" | "C" => {}
|
||||
sym::Rust | sym::C => {}
|
||||
abi => gate_feature_post!(
|
||||
&self,
|
||||
const_extern_fn,
|
||||
@ -274,16 +274,18 @@ impl<'a> PostExpansionVisitor<'a> {
|
||||
);
|
||||
}
|
||||
abi => {
|
||||
self.sess.parse_sess.span_diagnostic.delay_span_bug(
|
||||
span,
|
||||
&format!("unrecognized ABI not caught in lowering: {}", abi),
|
||||
);
|
||||
if self.sess.opts.pretty.map_or(true, |ppm| ppm.needs_hir()) {
|
||||
self.sess.parse_sess.span_diagnostic.delay_span_bug(
|
||||
span,
|
||||
&format!("unrecognized ABI not caught in lowering: {}", abi),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_extern(&self, ext: ast::Extern, constness: ast::Const) {
|
||||
if let ast::Extern::Explicit(abi) = ext {
|
||||
if let ast::Extern::Explicit(abi, _) = ext {
|
||||
self.check_abi(abi, constness);
|
||||
}
|
||||
}
|
||||
@ -402,8 +404,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||
gate_feature_post!(self, rustdoc_internals, attr.span, msg);
|
||||
}
|
||||
|
||||
if nested_meta.has_name(sym::tuple_variadic) {
|
||||
let msg = "`#[doc(tuple_variadic)]` is meant for internal use only";
|
||||
if nested_meta.has_name(sym::fake_variadic) {
|
||||
let msg = "`#[doc(fake_variadic)]` is meant for internal use only";
|
||||
gate_feature_post!(self, rustdoc_internals, attr.span, msg);
|
||||
}
|
||||
}
|
||||
@ -575,6 +577,32 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_stmt(&mut self, stmt: &'a ast::Stmt) {
|
||||
if let ast::StmtKind::Semi(expr) = &stmt.kind
|
||||
&& let ast::ExprKind::Assign(lhs, _, _) = &expr.kind
|
||||
&& let ast::ExprKind::Type(..) = lhs.kind
|
||||
&& self.sess.parse_sess.span_diagnostic.err_count() == 0
|
||||
&& !self.features.type_ascription
|
||||
&& !lhs.span.allows_unstable(sym::type_ascription)
|
||||
{
|
||||
// When we encounter a statement of the form `foo: Ty = val;`, this will emit a type
|
||||
// ascription error, but the likely intention was to write a `let` statement. (#78907).
|
||||
feature_err_issue(
|
||||
&self.sess.parse_sess,
|
||||
sym::type_ascription,
|
||||
lhs.span,
|
||||
GateIssue::Language,
|
||||
"type ascription is experimental",
|
||||
).span_suggestion_verbose(
|
||||
lhs.span.shrink_to_lo(),
|
||||
"you might have meant to introduce a new binding",
|
||||
"let ".to_string(),
|
||||
Applicability::MachineApplicable,
|
||||
).emit();
|
||||
}
|
||||
visit::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, e: &'a ast::Expr) {
|
||||
match e.kind {
|
||||
ast::ExprKind::Box(_) => {
|
||||
@ -744,6 +772,11 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session) {
|
||||
"async closures are unstable",
|
||||
"to use an async block, remove the `||`: `async {`"
|
||||
);
|
||||
gate_all!(
|
||||
closure_lifetime_binder,
|
||||
"`for<...>` binders for closures are experimental",
|
||||
"consider removing `for<...>`"
|
||||
);
|
||||
gate_all!(more_qualified_paths, "usage of qualified paths in this context is experimental");
|
||||
gate_all!(generators, "yield syntax is experimental");
|
||||
gate_all!(raw_ref_op, "raw address of syntax is experimental");
|
||||
@ -789,8 +822,6 @@ fn maybe_stage_features(sess: &Session, krate: &ast::Crate) {
|
||||
// checks if `#![feature]` has been used to enable any lang feature
|
||||
// does not check the same for lib features unless there's at least one
|
||||
// declared lang feature
|
||||
use rustc_errors::Applicability;
|
||||
|
||||
if !sess.opts.unstable_features.is_nightly_build() {
|
||||
let lang_features = &sess.features_untracked().declared_lang_features;
|
||||
if lang_features.len() == 0 {
|
||||
|
||||
@ -16,9 +16,8 @@ impl NodeCounter {
|
||||
}
|
||||
|
||||
impl<'ast> Visitor<'ast> for NodeCounter {
|
||||
fn visit_ident(&mut self, ident: Ident) {
|
||||
fn visit_ident(&mut self, _ident: Ident) {
|
||||
self.count += 1;
|
||||
walk_ident(self, ident);
|
||||
}
|
||||
fn visit_foreign_item(&mut self, i: &ForeignItem) {
|
||||
self.count += 1;
|
||||
|
||||
@ -145,7 +145,7 @@ pub fn print_crate<'a>(
|
||||
/// This makes printed token streams look slightly nicer,
|
||||
/// and also addresses some specific regressions described in #63896 and #73345.
|
||||
fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool {
|
||||
if let TokenTree::Token(token) = prev {
|
||||
if let TokenTree::Token(token, _) = prev {
|
||||
if matches!(token.kind, token::Dot | token::Dollar) {
|
||||
return false;
|
||||
}
|
||||
@ -154,12 +154,12 @@ fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool {
|
||||
}
|
||||
}
|
||||
match tt {
|
||||
TokenTree::Token(token) => !matches!(token.kind, token::Comma | token::Not | token::Dot),
|
||||
TokenTree::Token(token, _) => !matches!(token.kind, token::Comma | token::Not | token::Dot),
|
||||
TokenTree::Delimited(_, Delimiter::Parenthesis, _) => {
|
||||
!matches!(prev, TokenTree::Token(Token { kind: token::Ident(..), .. }))
|
||||
!matches!(prev, TokenTree::Token(Token { kind: token::Ident(..), .. }, _))
|
||||
}
|
||||
TokenTree::Delimited(_, Delimiter::Bracket, _) => {
|
||||
!matches!(prev, TokenTree::Token(Token { kind: token::Pound, .. }))
|
||||
!matches!(prev, TokenTree::Token(Token { kind: token::Pound, .. }, _))
|
||||
}
|
||||
TokenTree::Delimited(..) => true,
|
||||
}
|
||||
@ -377,7 +377,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
||||
|
||||
fn print_string(&mut self, st: &str, style: ast::StrStyle) {
|
||||
let st = match style {
|
||||
ast::StrStyle::Cooked => (format!("\"{}\"", st.escape_debug())),
|
||||
ast::StrStyle::Cooked => format!("\"{}\"", st.escape_debug()),
|
||||
ast::StrStyle::Raw(n) => {
|
||||
format!("r{delim}\"{string}\"{delim}", delim = "#".repeat(n as usize), string = st)
|
||||
}
|
||||
@ -526,7 +526,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
||||
/// expression arguments as expressions). It can be done! I think.
|
||||
fn print_tt(&mut self, tt: &TokenTree, convert_dollar_crate: bool) {
|
||||
match tt {
|
||||
TokenTree::Token(token) => {
|
||||
TokenTree::Token(token, _) => {
|
||||
let token_str = self.token_to_string_ext(&token, convert_dollar_crate);
|
||||
self.word(token_str);
|
||||
if let token::DocComment(..) = token.kind {
|
||||
@ -1734,10 +1734,10 @@ impl<'a> State<'a> {
|
||||
|
||||
match header.ext {
|
||||
ast::Extern::None => {}
|
||||
ast::Extern::Implicit => {
|
||||
ast::Extern::Implicit(_) => {
|
||||
self.word_nbsp("extern");
|
||||
}
|
||||
ast::Extern::Explicit(abi) => {
|
||||
ast::Extern::Explicit(abi, _) => {
|
||||
self.word_nbsp("extern");
|
||||
self.print_literal(&abi.as_lit());
|
||||
self.nbsp();
|
||||
|
||||
@ -389,6 +389,7 @@ impl<'a> State<'a> {
|
||||
self.bclose(expr.span, empty);
|
||||
}
|
||||
ast::ExprKind::Closure(
|
||||
ref binder,
|
||||
capture_clause,
|
||||
asyncness,
|
||||
movability,
|
||||
@ -396,6 +397,7 @@ impl<'a> State<'a> {
|
||||
ref body,
|
||||
_,
|
||||
) => {
|
||||
self.print_closure_binder(binder);
|
||||
self.print_movability(movability);
|
||||
self.print_asyncness(asyncness);
|
||||
self.print_capture_clause(capture_clause);
|
||||
@ -594,6 +596,15 @@ impl<'a> State<'a> {
|
||||
self.end(); // Close enclosing cbox.
|
||||
}
|
||||
|
||||
fn print_closure_binder(&mut self, binder: &ast::ClosureBinder) {
|
||||
match binder {
|
||||
ast::ClosureBinder::NotPresent => {}
|
||||
ast::ClosureBinder::For { generic_params, .. } => {
|
||||
self.print_formal_generic_params(&generic_params)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn print_movability(&mut self, movability: ast::Movability) {
|
||||
match movability {
|
||||
ast::Movability::Static => self.word_space("static"),
|
||||
|
||||
@ -135,9 +135,42 @@ impl ConstStability {
|
||||
#[derive(Encodable, Decodable, PartialEq, Copy, Clone, Debug, Eq, Hash)]
|
||||
#[derive(HashStable_Generic)]
|
||||
pub enum StabilityLevel {
|
||||
// Reason for the current stability level and the relevant rust-lang issue
|
||||
Unstable { reason: Option<Symbol>, issue: Option<NonZeroU32>, is_soft: bool },
|
||||
Stable { since: Symbol },
|
||||
/// `#[unstable]`
|
||||
Unstable {
|
||||
/// Reason for the current stability level.
|
||||
reason: UnstableReason,
|
||||
/// Relevant `rust-lang/rust` issue.
|
||||
issue: Option<NonZeroU32>,
|
||||
is_soft: bool,
|
||||
/// If part of a feature is stabilized and a new feature is added for the remaining parts,
|
||||
/// then the `implied_by` attribute is used to indicate which now-stable feature previously
|
||||
/// contained a item.
|
||||
///
|
||||
/// ```pseudo-Rust
|
||||
/// #[unstable(feature = "foo", issue = "...")]
|
||||
/// fn foo() {}
|
||||
/// #[unstable(feature = "foo", issue = "...")]
|
||||
/// fn foobar() {}
|
||||
/// ```
|
||||
///
|
||||
/// ...becomes...
|
||||
///
|
||||
/// ```pseudo-Rust
|
||||
/// #[stable(feature = "foo", since = "1.XX.X")]
|
||||
/// fn foo() {}
|
||||
/// #[unstable(feature = "foobar", issue = "...", implied_by = "foo")]
|
||||
/// fn foobar() {}
|
||||
/// ```
|
||||
implied_by: Option<Symbol>,
|
||||
},
|
||||
/// `#[stable]`
|
||||
Stable {
|
||||
/// Rust release which stabilized this feature.
|
||||
since: Symbol,
|
||||
/// Is this item allowed to be referred to on stable, despite being contained in unstable
|
||||
/// modules?
|
||||
allowed_through_unstable_modules: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl StabilityLevel {
|
||||
@ -149,6 +182,32 @@ impl StabilityLevel {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Encodable, Decodable, PartialEq, Copy, Clone, Debug, Eq, Hash)]
|
||||
#[derive(HashStable_Generic)]
|
||||
pub enum UnstableReason {
|
||||
None,
|
||||
Default,
|
||||
Some(Symbol),
|
||||
}
|
||||
|
||||
impl UnstableReason {
|
||||
fn from_opt_reason(reason: Option<Symbol>) -> Self {
|
||||
// UnstableReason::Default constructed manually
|
||||
match reason {
|
||||
Some(r) => Self::Some(r),
|
||||
None => Self::None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_opt_reason(&self) -> Option<Symbol> {
|
||||
match self {
|
||||
Self::None => None,
|
||||
Self::Default => Some(sym::unstable_location_reason_default),
|
||||
Self::Some(r) => Some(*r),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Collects stability info from all stability attributes in `attrs`.
|
||||
/// Returns `None` if no stability attributes are found.
|
||||
pub fn find_stability(
|
||||
@ -172,6 +231,7 @@ where
|
||||
let mut stab: Option<(Stability, Span)> = None;
|
||||
let mut const_stab: Option<(ConstStability, Span)> = None;
|
||||
let mut promotable = false;
|
||||
let mut allowed_through_unstable_modules = false;
|
||||
|
||||
let diagnostic = &sess.parse_sess.span_diagnostic;
|
||||
|
||||
@ -182,6 +242,7 @@ where
|
||||
sym::unstable,
|
||||
sym::stable,
|
||||
sym::rustc_promotable,
|
||||
sym::rustc_allowed_through_unstable_modules,
|
||||
]
|
||||
.iter()
|
||||
.any(|&s| attr.has_name(s))
|
||||
@ -193,6 +254,8 @@ where
|
||||
|
||||
if attr.has_name(sym::rustc_promotable) {
|
||||
promotable = true;
|
||||
} else if attr.has_name(sym::rustc_allowed_through_unstable_modules) {
|
||||
allowed_through_unstable_modules = true;
|
||||
}
|
||||
// attributes with data
|
||||
else if let Some(MetaItem { kind: MetaItemKind::List(ref metas), .. }) = meta {
|
||||
@ -239,6 +302,7 @@ where
|
||||
let mut issue = None;
|
||||
let mut issue_num = None;
|
||||
let mut is_soft = false;
|
||||
let mut implied_by = None;
|
||||
for meta in metas {
|
||||
let Some(mi) = meta.meta_item() else {
|
||||
handle_errors(
|
||||
@ -304,6 +368,11 @@ where
|
||||
}
|
||||
is_soft = true;
|
||||
}
|
||||
sym::implied_by => {
|
||||
if !get(mi, &mut implied_by) {
|
||||
continue 'outer;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
handle_errors(
|
||||
&sess.parse_sess,
|
||||
@ -328,7 +397,12 @@ where
|
||||
);
|
||||
continue;
|
||||
}
|
||||
let level = Unstable { reason, issue: issue_num, is_soft };
|
||||
let level = Unstable {
|
||||
reason: UnstableReason::from_opt_reason(reason),
|
||||
issue: issue_num,
|
||||
is_soft,
|
||||
implied_by,
|
||||
};
|
||||
if sym::unstable == meta_name {
|
||||
stab = Some((Stability { level, feature }, attr.span));
|
||||
} else {
|
||||
@ -387,7 +461,7 @@ where
|
||||
meta.span(),
|
||||
AttrError::UnknownMetaItem(
|
||||
pprust::path_to_string(&mi.path),
|
||||
&["since", "note"],
|
||||
&["feature", "since"],
|
||||
),
|
||||
);
|
||||
continue 'outer;
|
||||
@ -406,7 +480,7 @@ where
|
||||
|
||||
match (feature, since) {
|
||||
(Some(feature), Some(since)) => {
|
||||
let level = Stable { since };
|
||||
let level = Stable { since, allowed_through_unstable_modules: false };
|
||||
if sym::stable == meta_name {
|
||||
stab = Some((Stability { level, feature }, attr.span));
|
||||
} else {
|
||||
@ -447,6 +521,27 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
if allowed_through_unstable_modules {
|
||||
if let Some((
|
||||
Stability {
|
||||
level: StabilityLevel::Stable { ref mut allowed_through_unstable_modules, .. },
|
||||
..
|
||||
},
|
||||
_,
|
||||
)) = stab
|
||||
{
|
||||
*allowed_through_unstable_modules = true;
|
||||
} else {
|
||||
struct_span_err!(
|
||||
diagnostic,
|
||||
item_sp,
|
||||
E0789,
|
||||
"`rustc_allowed_through_unstable_modules` attribute must be paired with a `stable` attribute"
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
|
||||
(stab, const_stab)
|
||||
}
|
||||
|
||||
@ -856,7 +951,6 @@ pub enum ReprAttr {
|
||||
ReprSimd,
|
||||
ReprTransparent,
|
||||
ReprAlign(u32),
|
||||
ReprNoNiche,
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
|
||||
@ -904,7 +998,6 @@ pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
|
||||
sym::packed => Some(ReprPacked(1)),
|
||||
sym::simd => Some(ReprSimd),
|
||||
sym::transparent => Some(ReprTransparent),
|
||||
sym::no_niche => Some(ReprNoNiche),
|
||||
sym::align => {
|
||||
let mut err = struct_span_err!(
|
||||
diagnostic,
|
||||
@ -943,7 +1036,7 @@ pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
|
||||
Ok(literal) => acc.push(ReprPacked(literal)),
|
||||
Err(message) => literal_error = Some(message),
|
||||
};
|
||||
} else if matches!(name, sym::C | sym::simd | sym::transparent | sym::no_niche)
|
||||
} else if matches!(name, sym::C | sym::simd | sym::transparent)
|
||||
|| int_type_of_word(name).is_some()
|
||||
{
|
||||
recognised = true;
|
||||
@ -1001,7 +1094,7 @@ pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
|
||||
} else {
|
||||
if matches!(
|
||||
meta_item.name_or_empty(),
|
||||
sym::C | sym::simd | sym::transparent | sym::no_niche
|
||||
sym::C | sym::simd | sym::transparent
|
||||
) || int_type_of_word(meta_item.name_or_empty()).is_some()
|
||||
{
|
||||
recognised = true;
|
||||
@ -1039,7 +1132,7 @@ pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
|
||||
.emit();
|
||||
} else if matches!(
|
||||
meta_item.name_or_empty(),
|
||||
sym::C | sym::simd | sym::transparent | sym::no_niche
|
||||
sym::C | sym::simd | sym::transparent
|
||||
) || int_type_of_word(meta_item.name_or_empty()).is_some()
|
||||
{
|
||||
recognised = true;
|
||||
|
||||
@ -11,7 +11,7 @@ either = "1.5.0"
|
||||
itertools = "0.10.1"
|
||||
tracing = "0.1"
|
||||
polonius-engine = "0.13.0"
|
||||
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
|
||||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
rustc_errors = { path = "../rustc_errors" }
|
||||
rustc_graphviz = { path = "../rustc_graphviz" }
|
||||
@ -19,6 +19,7 @@ rustc_hir = { path = "../rustc_hir" }
|
||||
rustc_index = { path = "../rustc_index" }
|
||||
rustc_infer = { path = "../rustc_infer" }
|
||||
rustc_lexer = { path = "../rustc_lexer" }
|
||||
rustc_macros = { path = "../rustc_macros" }
|
||||
rustc_middle = { path = "../rustc_middle" }
|
||||
rustc_const_eval = { path = "../rustc_const_eval" }
|
||||
rustc_mir_dataflow = { path = "../rustc_mir_dataflow" }
|
||||
|
||||
@ -92,9 +92,9 @@ impl LocalsStateAtExit {
|
||||
struct HasStorageDead(BitSet<Local>);
|
||||
|
||||
impl<'tcx> Visitor<'tcx> for HasStorageDead {
|
||||
fn visit_local(&mut self, local: &Local, ctx: PlaceContext, _: Location) {
|
||||
fn visit_local(&mut self, local: Local, ctx: PlaceContext, _: Location) {
|
||||
if ctx == PlaceContext::NonUse(NonUseContext::StorageDead) {
|
||||
self.0.insert(*local);
|
||||
self.0.insert(local);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -223,7 +223,7 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'tcx> {
|
||||
self.super_assign(assigned_place, rvalue, location)
|
||||
}
|
||||
|
||||
fn visit_local(&mut self, temp: &Local, context: PlaceContext, location: Location) {
|
||||
fn visit_local(&mut self, temp: Local, context: PlaceContext, location: Location) {
|
||||
if !context.is_use() {
|
||||
return;
|
||||
}
|
||||
@ -232,7 +232,7 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'tcx> {
|
||||
// check whether we (earlier) saw a 2-phase borrow like
|
||||
//
|
||||
// TMP = &mut place
|
||||
if let Some(&borrow_index) = self.pending_activations.get(temp) {
|
||||
if let Some(&borrow_index) = self.pending_activations.get(&temp) {
|
||||
let borrow_data = &mut self.location_map[borrow_index.as_usize()];
|
||||
|
||||
// Watch out: the use of TMP in the borrow itself
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
use rustc_errors::{struct_span_err, DiagnosticBuilder, DiagnosticId, ErrorGuaranteed, MultiSpan};
|
||||
use rustc_errors::{
|
||||
struct_span_err, DiagnosticBuilder, DiagnosticId, DiagnosticMessage, ErrorGuaranteed, MultiSpan,
|
||||
};
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
use rustc_span::Span;
|
||||
|
||||
@ -31,22 +33,6 @@ impl<'cx, 'tcx> crate::MirBorrowckCtxt<'cx, 'tcx> {
|
||||
err
|
||||
}
|
||||
|
||||
pub(crate) fn cannot_act_on_uninitialized_variable(
|
||||
&self,
|
||||
span: Span,
|
||||
verb: &str,
|
||||
desc: &str,
|
||||
) -> DiagnosticBuilder<'cx, ErrorGuaranteed> {
|
||||
struct_span_err!(
|
||||
self,
|
||||
span,
|
||||
E0381,
|
||||
"{} of possibly-uninitialized variable: `{}`",
|
||||
verb,
|
||||
desc,
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn cannot_mutably_borrow_multiply(
|
||||
&self,
|
||||
new_loan_span: Span,
|
||||
@ -173,8 +159,7 @@ impl<'cx, 'tcx> crate::MirBorrowckCtxt<'cx, 'tcx> {
|
||||
self,
|
||||
new_loan_span,
|
||||
E0501,
|
||||
"cannot borrow {}{} as {} because previous closure \
|
||||
requires unique access",
|
||||
"cannot borrow {}{} as {} because previous closure requires unique access",
|
||||
desc_new,
|
||||
opt_via,
|
||||
kind_new,
|
||||
@ -451,9 +436,8 @@ impl<'cx, 'tcx> crate::MirBorrowckCtxt<'cx, 'tcx> {
|
||||
self,
|
||||
closure_span,
|
||||
E0373,
|
||||
"{} may outlive the current function, \
|
||||
but it borrows {}, \
|
||||
which is owned by the current function",
|
||||
"{} may outlive the current function, but it borrows {}, which is owned by the current \
|
||||
function",
|
||||
closure_kind,
|
||||
borrowed_path,
|
||||
);
|
||||
@ -476,10 +460,11 @@ impl<'cx, 'tcx> crate::MirBorrowckCtxt<'cx, 'tcx> {
|
||||
struct_span_err!(self, span, E0716, "temporary value dropped while borrowed",)
|
||||
}
|
||||
|
||||
fn struct_span_err_with_code<S: Into<MultiSpan>>(
|
||||
#[rustc_lint_diagnostics]
|
||||
pub(crate) fn struct_span_err_with_code<S: Into<MultiSpan>>(
|
||||
&self,
|
||||
sp: S,
|
||||
msg: &str,
|
||||
msg: impl Into<DiagnosticMessage>,
|
||||
code: DiagnosticId,
|
||||
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
|
||||
self.infcx.tcx.sess.struct_span_err_with_code(sp, msg, code)
|
||||
|
||||
@ -5,8 +5,8 @@ use rustc_middle::mir::{
|
||||
BasicBlock, BasicBlockData, Body, Local, Location, Place, PlaceRef, ProjectionElem, Rvalue,
|
||||
SourceInfo, Statement, StatementKind, Terminator, TerminatorKind, UserTypeProjection,
|
||||
};
|
||||
use rustc_middle::ty::fold::TypeFoldable;
|
||||
use rustc_middle::ty::subst::SubstsRef;
|
||||
use rustc_middle::ty::visit::TypeVisitable;
|
||||
use rustc_middle::ty::{self, RegionVid, Ty};
|
||||
|
||||
use crate::{
|
||||
@ -149,7 +149,7 @@ impl<'cg, 'cx, 'tcx> Visitor<'tcx> for ConstraintGeneration<'cg, 'cx, 'tcx> {
|
||||
fn visit_ascribe_user_ty(
|
||||
&mut self,
|
||||
_place: &Place<'tcx>,
|
||||
_variance: &ty::Variance,
|
||||
_variance: ty::Variance,
|
||||
_user_ty: &UserTypeProjection,
|
||||
_location: Location,
|
||||
) {
|
||||
@ -163,7 +163,7 @@ impl<'cx, 'cg, 'tcx> ConstraintGeneration<'cx, 'cg, 'tcx> {
|
||||
/// `location`.
|
||||
fn add_regular_live_constraint<T>(&mut self, live_ty: T, location: Location)
|
||||
where
|
||||
T: TypeFoldable<'tcx>,
|
||||
T: TypeVisitable<'tcx>,
|
||||
{
|
||||
debug!("add_regular_live_constraint(live_ty={:?}, location={:?})", live_ty, location);
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
use rustc_hir::def_id::LocalDefId;
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_infer::infer::TyCtxtInferExt;
|
||||
use rustc_infer::infer::{DefiningAnchor, TyCtxtInferExt};
|
||||
use rustc_middle::mir::Body;
|
||||
use rustc_middle::ty::{self, TyCtxt};
|
||||
|
||||
@ -31,7 +31,7 @@ pub fn get_body_with_borrowck_facts<'tcx>(
|
||||
def: ty::WithOptConstParam<LocalDefId>,
|
||||
) -> BodyWithBorrowckFacts<'tcx> {
|
||||
let (input_body, promoted) = tcx.mir_promoted(def);
|
||||
tcx.infer_ctxt().with_opaque_type_inference(def.did).enter(|infcx| {
|
||||
tcx.infer_ctxt().with_opaque_type_inference(DefiningAnchor::Bind(def.did)).enter(|infcx| {
|
||||
let input_body: &Body<'_> = &input_body.borrow();
|
||||
let promoted: &IndexVec<_, _> = &promoted.borrow();
|
||||
*super::do_mir_borrowck(&infcx, input_body, promoted, true).1.unwrap()
|
||||
|
||||
@ -19,6 +19,9 @@ use std::fmt;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::region_infer::values::RegionElement;
|
||||
use crate::session_diagnostics::HigherRankedErrorCause;
|
||||
use crate::session_diagnostics::HigherRankedLifetimeError;
|
||||
use crate::session_diagnostics::HigherRankedSubtypeError;
|
||||
use crate::MirBorrowckCtxt;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -69,7 +72,7 @@ impl<'tcx> UniverseInfo<'tcx> {
|
||||
// up in the existing UI tests. Consider investigating this
|
||||
// some more.
|
||||
mbcx.buffer_error(
|
||||
mbcx.infcx.tcx.sess.struct_span_err(cause.span, "higher-ranked subtype error"),
|
||||
mbcx.infcx.tcx.sess.create_err(HigherRankedSubtypeError { span: cause.span }),
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -216,9 +219,12 @@ impl<'tcx> TypeOpInfo<'tcx> for PredicateQuery<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
span: Span,
|
||||
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
|
||||
let mut err = tcx.sess.struct_span_err(span, "higher-ranked lifetime error");
|
||||
err.note(&format!("could not prove {}", self.canonical_query.value.value.predicate));
|
||||
err
|
||||
tcx.sess.create_err(HigherRankedLifetimeError {
|
||||
cause: Some(HigherRankedErrorCause::CouldNotProve {
|
||||
predicate: self.canonical_query.value.value.predicate.to_string(),
|
||||
}),
|
||||
span,
|
||||
})
|
||||
}
|
||||
|
||||
fn base_universe(&self) -> ty::UniverseIndex {
|
||||
@ -263,9 +269,12 @@ where
|
||||
tcx: TyCtxt<'tcx>,
|
||||
span: Span,
|
||||
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
|
||||
let mut err = tcx.sess.struct_span_err(span, "higher-ranked lifetime error");
|
||||
err.note(&format!("could not normalize `{}`", self.canonical_query.value.value.value));
|
||||
err
|
||||
tcx.sess.create_err(HigherRankedLifetimeError {
|
||||
cause: Some(HigherRankedErrorCause::CouldNotNormalize {
|
||||
value: self.canonical_query.value.value.value.to_string(),
|
||||
}),
|
||||
span,
|
||||
})
|
||||
}
|
||||
|
||||
fn base_universe(&self) -> ty::UniverseIndex {
|
||||
@ -326,7 +335,7 @@ impl<'tcx> TypeOpInfo<'tcx> for AscribeUserTypeQuery<'tcx> {
|
||||
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
|
||||
// FIXME: This error message isn't great, but it doesn't show up in the existing UI tests,
|
||||
// and is only the fallback when the nice error fails. Consider improving this some more.
|
||||
tcx.sess.struct_span_err(span, "higher-ranked lifetime error")
|
||||
tcx.sess.create_err(HigherRankedLifetimeError { cause: None, span })
|
||||
}
|
||||
|
||||
fn base_universe(&self) -> ty::UniverseIndex {
|
||||
@ -366,7 +375,7 @@ impl<'tcx> TypeOpInfo<'tcx> for crate::type_check::InstantiateOpaqueType<'tcx> {
|
||||
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
|
||||
// FIXME: This error message isn't great, but it doesn't show up in the existing UI tests,
|
||||
// and is only the fallback when the nice error fails. Consider improving this some more.
|
||||
tcx.sess.struct_span_err(span, "higher-ranked lifetime error for opaque type!")
|
||||
tcx.sess.create_err(HigherRankedLifetimeError { cause: None, span })
|
||||
}
|
||||
|
||||
fn base_universe(&self) -> ty::UniverseIndex {
|
||||
|
||||
@ -2,9 +2,11 @@ use either::Either;
|
||||
use rustc_const_eval::util::CallKind;
|
||||
use rustc_data_structures::captures::Captures;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, MultiSpan};
|
||||
use rustc_errors::{
|
||||
struct_span_err, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, MultiSpan,
|
||||
};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::intravisit::{walk_block, walk_expr, Visitor};
|
||||
use rustc_hir::{AsyncGeneratorKind, GeneratorKind};
|
||||
use rustc_infer::infer::TyCtxtInferExt;
|
||||
use rustc_infer::traits::ObligationCause;
|
||||
@ -14,12 +16,12 @@ use rustc_middle::mir::{
|
||||
FakeReadCause, LocalDecl, LocalInfo, LocalKind, Location, Operand, Place, PlaceRef,
|
||||
ProjectionElem, Rvalue, Statement, StatementKind, Terminator, TerminatorKind, VarBindingForm,
|
||||
};
|
||||
use rustc_middle::ty::{
|
||||
self, subst::Subst, suggest_constraining_type_params, EarlyBinder, PredicateKind, Ty,
|
||||
};
|
||||
use rustc_middle::ty::{self, subst::Subst, suggest_constraining_type_params, PredicateKind, Ty};
|
||||
use rustc_mir_dataflow::move_paths::{InitKind, MoveOutIndex, MovePathIndex};
|
||||
use rustc_span::def_id::LocalDefId;
|
||||
use rustc_span::hygiene::DesugaringKind;
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::{BytePos, Span};
|
||||
use rustc_span::{BytePos, Span, Symbol};
|
||||
use rustc_trait_selection::infer::InferCtxtExt;
|
||||
use rustc_trait_selection::traits::TraitEngineExt as _;
|
||||
|
||||
@ -35,7 +37,7 @@ use crate::{
|
||||
|
||||
use super::{
|
||||
explain_borrow::{BorrowExplanation, LaterUseKind},
|
||||
IncludingDowncast, RegionName, RegionNameSource, UseSpans,
|
||||
DescribePlaceOpt, RegionName, RegionNameSource, UseSpans,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -94,32 +96,20 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
return;
|
||||
}
|
||||
|
||||
let item_msg =
|
||||
match self.describe_place_with_options(used_place, IncludingDowncast(true)) {
|
||||
Some(name) => format!("`{}`", name),
|
||||
None => "value".to_owned(),
|
||||
};
|
||||
let mut err = self.cannot_act_on_uninitialized_variable(
|
||||
let err = self.report_use_of_uninitialized(
|
||||
mpi,
|
||||
used_place,
|
||||
moved_place,
|
||||
desired_action,
|
||||
span,
|
||||
desired_action.as_noun(),
|
||||
&self
|
||||
.describe_place_with_options(moved_place, IncludingDowncast(true))
|
||||
.unwrap_or_else(|| "_".to_owned()),
|
||||
use_spans,
|
||||
);
|
||||
err.span_label(span, format!("use of possibly-uninitialized {}", item_msg));
|
||||
|
||||
use_spans.var_span_label_path_only(
|
||||
&mut err,
|
||||
format!("{} occurs due to use{}", desired_action.as_noun(), use_spans.describe()),
|
||||
);
|
||||
|
||||
self.buffer_error(err);
|
||||
} else {
|
||||
if let Some((reported_place, _)) = self.has_move_error(&move_out_indices) {
|
||||
if self.prefixes(*reported_place, PrefixSet::All).any(|p| p == used_place) {
|
||||
debug!(
|
||||
"report_use_of_moved_or_uninitialized place: error suppressed \
|
||||
mois={:?}",
|
||||
"report_use_of_moved_or_uninitialized place: error suppressed mois={:?}",
|
||||
move_out_indices
|
||||
);
|
||||
return;
|
||||
@ -145,7 +135,10 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
span,
|
||||
desired_action.as_noun(),
|
||||
partially_str,
|
||||
self.describe_place_with_options(moved_place, IncludingDowncast(true)),
|
||||
self.describe_place_with_options(
|
||||
moved_place,
|
||||
DescribePlaceOpt { including_downcast: true, including_tuple_field: true },
|
||||
),
|
||||
);
|
||||
|
||||
let reinit_spans = maybe_reinitialized_locations
|
||||
@ -282,8 +275,10 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
let opt_name =
|
||||
self.describe_place_with_options(place.as_ref(), IncludingDowncast(true));
|
||||
let opt_name = self.describe_place_with_options(
|
||||
place.as_ref(),
|
||||
DescribePlaceOpt { including_downcast: true, including_tuple_field: true },
|
||||
);
|
||||
let note_msg = match opt_name {
|
||||
Some(ref name) => format!("`{}`", name),
|
||||
None => "value".to_owned(),
|
||||
@ -317,7 +312,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
));
|
||||
|
||||
// Check first whether the source is accessible (issue #87060)
|
||||
if self.infcx.tcx.sess.source_map().span_to_snippet(deref_target).is_ok() {
|
||||
if self.infcx.tcx.sess.source_map().is_span_accessible(deref_target) {
|
||||
err.span_note(deref_target, "deref defined here");
|
||||
}
|
||||
}
|
||||
@ -326,6 +321,134 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn report_use_of_uninitialized(
|
||||
&self,
|
||||
mpi: MovePathIndex,
|
||||
used_place: PlaceRef<'tcx>,
|
||||
moved_place: PlaceRef<'tcx>,
|
||||
desired_action: InitializationRequiringAction,
|
||||
span: Span,
|
||||
use_spans: UseSpans<'tcx>,
|
||||
) -> DiagnosticBuilder<'cx, ErrorGuaranteed> {
|
||||
// We need all statements in the body where the binding was assigned to to later find all
|
||||
// the branching code paths where the binding *wasn't* assigned to.
|
||||
let inits = &self.move_data.init_path_map[mpi];
|
||||
let move_path = &self.move_data.move_paths[mpi];
|
||||
let decl_span = self.body.local_decls[move_path.place.local].source_info.span;
|
||||
let mut spans = vec![];
|
||||
for init_idx in inits {
|
||||
let init = &self.move_data.inits[*init_idx];
|
||||
let span = init.span(&self.body);
|
||||
if !span.is_dummy() {
|
||||
spans.push(span);
|
||||
}
|
||||
}
|
||||
|
||||
let (name, desc) = match self.describe_place_with_options(
|
||||
moved_place,
|
||||
DescribePlaceOpt { including_downcast: true, including_tuple_field: true },
|
||||
) {
|
||||
Some(name) => (format!("`{name}`"), format!("`{name}` ")),
|
||||
None => ("the variable".to_string(), String::new()),
|
||||
};
|
||||
let path = match self.describe_place_with_options(
|
||||
used_place,
|
||||
DescribePlaceOpt { including_downcast: true, including_tuple_field: true },
|
||||
) {
|
||||
Some(name) => format!("`{name}`"),
|
||||
None => "value".to_string(),
|
||||
};
|
||||
|
||||
// We use the statements were the binding was initialized, and inspect the HIR to look
|
||||
// for the branching codepaths that aren't covered, to point at them.
|
||||
let map = self.infcx.tcx.hir();
|
||||
let body_id = map.body_owned_by(self.mir_def_id());
|
||||
let body = map.body(body_id);
|
||||
|
||||
let mut visitor = ConditionVisitor { spans: &spans, name: &name, errors: vec![] };
|
||||
visitor.visit_body(&body);
|
||||
|
||||
let isnt_initialized = if let InitializationRequiringAction::PartialAssignment
|
||||
| InitializationRequiringAction::Assignment = desired_action
|
||||
{
|
||||
// The same error is emitted for bindings that are *sometimes* initialized and the ones
|
||||
// that are *partially* initialized by assigning to a field of an uninitialized
|
||||
// binding. We differentiate between them for more accurate wording here.
|
||||
"isn't fully initialized"
|
||||
} else if spans
|
||||
.iter()
|
||||
.filter(|i| {
|
||||
// We filter these to avoid misleading wording in cases like the following,
|
||||
// where `x` has an `init`, but it is in the same place we're looking at:
|
||||
// ```
|
||||
// let x;
|
||||
// x += 1;
|
||||
// ```
|
||||
!i.contains(span)
|
||||
// We filter these to avoid incorrect main message on `match-cfg-fake-edges.rs`
|
||||
&& !visitor
|
||||
.errors
|
||||
.iter()
|
||||
.map(|(sp, _)| *sp)
|
||||
.any(|sp| span < sp && !sp.contains(span))
|
||||
})
|
||||
.count()
|
||||
== 0
|
||||
{
|
||||
"isn't initialized"
|
||||
} else {
|
||||
"is possibly-uninitialized"
|
||||
};
|
||||
|
||||
let used = desired_action.as_general_verb_in_past_tense();
|
||||
let mut err =
|
||||
struct_span_err!(self, span, E0381, "{used} binding {desc}{isnt_initialized}");
|
||||
use_spans.var_span_label_path_only(
|
||||
&mut err,
|
||||
format!("{} occurs due to use{}", desired_action.as_noun(), use_spans.describe()),
|
||||
);
|
||||
|
||||
if let InitializationRequiringAction::PartialAssignment
|
||||
| InitializationRequiringAction::Assignment = desired_action
|
||||
{
|
||||
err.help(
|
||||
"partial initialization isn't supported, fully initialize the binding with a \
|
||||
default value and mutate it, or use `std::mem::MaybeUninit`",
|
||||
);
|
||||
}
|
||||
err.span_label(span, format!("{path} {used} here but it {isnt_initialized}"));
|
||||
|
||||
let mut shown = false;
|
||||
for (sp, label) in visitor.errors {
|
||||
if sp < span && !sp.overlaps(span) {
|
||||
// When we have a case like `match-cfg-fake-edges.rs`, we don't want to mention
|
||||
// match arms coming after the primary span because they aren't relevant:
|
||||
// ```
|
||||
// let x;
|
||||
// match y {
|
||||
// _ if { x = 2; true } => {}
|
||||
// _ if {
|
||||
// x; //~ ERROR
|
||||
// false
|
||||
// } => {}
|
||||
// _ => {} // We don't want to point to this.
|
||||
// };
|
||||
// ```
|
||||
err.span_label(sp, &label);
|
||||
shown = true;
|
||||
}
|
||||
}
|
||||
if !shown {
|
||||
for sp in &spans {
|
||||
if *sp < span && !sp.overlaps(span) {
|
||||
err.span_label(*sp, "binding initialized here in some conditions");
|
||||
}
|
||||
}
|
||||
}
|
||||
err.span_label(decl_span, "binding declared here but left uninitialized");
|
||||
err
|
||||
}
|
||||
|
||||
fn suggest_borrow_fn_like(
|
||||
&self,
|
||||
err: &mut DiagnosticBuilder<'tcx, ErrorGuaranteed>,
|
||||
@ -336,23 +459,24 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
let tcx = self.infcx.tcx;
|
||||
|
||||
// Find out if the predicates show that the type is a Fn or FnMut
|
||||
let find_fn_kind_from_did = |predicates: &[(ty::Predicate<'tcx>, Span)], substs| {
|
||||
predicates.iter().find_map(|(pred, _)| {
|
||||
let pred = if let Some(substs) = substs {
|
||||
EarlyBinder(*pred).subst(tcx, substs).kind().skip_binder()
|
||||
} else {
|
||||
pred.kind().skip_binder()
|
||||
};
|
||||
if let ty::PredicateKind::Trait(pred) = pred && pred.self_ty() == ty {
|
||||
let find_fn_kind_from_did =
|
||||
|predicates: ty::EarlyBinder<&[(ty::Predicate<'tcx>, Span)]>, substs| {
|
||||
predicates.0.iter().find_map(|(pred, _)| {
|
||||
let pred = if let Some(substs) = substs {
|
||||
predicates.rebind(*pred).subst(tcx, substs).kind().skip_binder()
|
||||
} else {
|
||||
pred.kind().skip_binder()
|
||||
};
|
||||
if let ty::PredicateKind::Trait(pred) = pred && pred.self_ty() == ty {
|
||||
if Some(pred.def_id()) == tcx.lang_items().fn_trait() {
|
||||
return Some(hir::Mutability::Not);
|
||||
} else if Some(pred.def_id()) == tcx.lang_items().fn_mut_trait() {
|
||||
return Some(hir::Mutability::Mut);
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
};
|
||||
None
|
||||
})
|
||||
};
|
||||
|
||||
// If the type is opaque/param/closure, and it is Fn or FnMut, let's suggest (mutably)
|
||||
// borrowing the type, since `&mut F: FnMut` iff `F: FnMut` and similarly for `Fn`.
|
||||
@ -360,11 +484,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
// borrowed variants in a function body when we see a move error.
|
||||
let borrow_level = match ty.kind() {
|
||||
ty::Param(_) => find_fn_kind_from_did(
|
||||
tcx.explicit_predicates_of(self.mir_def_id().to_def_id()).predicates,
|
||||
tcx.bound_explicit_predicates_of(self.mir_def_id().to_def_id())
|
||||
.map_bound(|p| p.predicates),
|
||||
None,
|
||||
),
|
||||
ty::Opaque(did, substs) => {
|
||||
find_fn_kind_from_did(tcx.explicit_item_bounds(*did), Some(*substs))
|
||||
find_fn_kind_from_did(tcx.bound_explicit_item_bounds(*did), Some(*substs))
|
||||
}
|
||||
ty::Closure(_, substs) => match substs.as_closure().kind() {
|
||||
ty::ClosureKind::Fn => Some(hir::Mutability::Not),
|
||||
@ -1111,8 +1236,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
from_closure: false,
|
||||
region_name:
|
||||
RegionName {
|
||||
source:
|
||||
RegionNameSource::AnonRegionFromUpvar(upvar_span, ref upvar_name),
|
||||
source: RegionNameSource::AnonRegionFromUpvar(upvar_span, upvar_name),
|
||||
..
|
||||
},
|
||||
span,
|
||||
@ -1384,7 +1508,70 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
| BorrowExplanation::UsedLaterInLoop(..)
|
||||
| BorrowExplanation::UsedLaterWhenDropped { .. } => {
|
||||
// Only give this note and suggestion if it could be relevant.
|
||||
err.note("consider using a `let` binding to create a longer lived value");
|
||||
let sm = self.infcx.tcx.sess.source_map();
|
||||
let mut suggested = false;
|
||||
let msg = "consider using a `let` binding to create a longer lived value";
|
||||
|
||||
/// We check that there's a single level of block nesting to ensure always correct
|
||||
/// suggestions. If we don't, then we only provide a free-form message to avoid
|
||||
/// misleading users in cases like `src/test/ui/nll/borrowed-temporary-error.rs`.
|
||||
/// We could expand the analysis to suggest hoising all of the relevant parts of
|
||||
/// the users' code to make the code compile, but that could be too much.
|
||||
struct NestedStatementVisitor {
|
||||
span: Span,
|
||||
current: usize,
|
||||
found: usize,
|
||||
}
|
||||
|
||||
impl<'tcx> Visitor<'tcx> for NestedStatementVisitor {
|
||||
fn visit_block(&mut self, block: &hir::Block<'tcx>) {
|
||||
self.current += 1;
|
||||
walk_block(self, block);
|
||||
self.current -= 1;
|
||||
}
|
||||
fn visit_expr(&mut self, expr: &hir::Expr<'tcx>) {
|
||||
if self.span == expr.span {
|
||||
self.found = self.current;
|
||||
}
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
let source_info = self.body.source_info(location);
|
||||
if let Some(scope) = self.body.source_scopes.get(source_info.scope)
|
||||
&& let ClearCrossCrate::Set(scope_data) = &scope.local_data
|
||||
&& let Some(node) = self.infcx.tcx.hir().find(scope_data.lint_root)
|
||||
&& let Some(id) = node.body_id()
|
||||
&& let hir::ExprKind::Block(block, _) = self.infcx.tcx.hir().body(id).value.kind
|
||||
{
|
||||
for stmt in block.stmts {
|
||||
let mut visitor = NestedStatementVisitor {
|
||||
span: proper_span,
|
||||
current: 0,
|
||||
found: 0,
|
||||
};
|
||||
visitor.visit_stmt(stmt);
|
||||
if visitor.found == 0
|
||||
&& stmt.span.contains(proper_span)
|
||||
&& let Some(p) = sm.span_to_margin(stmt.span)
|
||||
&& let Ok(s) = sm.span_to_snippet(proper_span)
|
||||
{
|
||||
let addition = format!("let binding = {};\n{}", s, " ".repeat(p));
|
||||
err.multipart_suggestion_verbose(
|
||||
msg,
|
||||
vec![
|
||||
(stmt.span.shrink_to_lo(), addition),
|
||||
(proper_span, "binding".to_string()),
|
||||
],
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
suggested = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if !suggested {
|
||||
err.note(msg);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -1482,21 +1669,18 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
let return_ty = tcx.erase_regions(return_ty);
|
||||
|
||||
// to avoid panics
|
||||
if let Some(iter_trait) = tcx.get_diagnostic_item(sym::Iterator) {
|
||||
if self
|
||||
if let Some(iter_trait) = tcx.get_diagnostic_item(sym::Iterator)
|
||||
&& self
|
||||
.infcx
|
||||
.type_implements_trait(iter_trait, return_ty, ty_params, self.param_env)
|
||||
.must_apply_modulo_regions()
|
||||
{
|
||||
if let Ok(snippet) = tcx.sess.source_map().span_to_snippet(return_span) {
|
||||
err.span_suggestion_hidden(
|
||||
return_span,
|
||||
"use `.collect()` to allocate the iterator",
|
||||
format!("{snippet}.collect::<Vec<_>>()"),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
}
|
||||
{
|
||||
err.span_suggestion_hidden(
|
||||
return_span.shrink_to_hi(),
|
||||
"use `.collect()` to allocate the iterator",
|
||||
".collect::<Vec<_>>()",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1586,7 +1770,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
borrow_span: Span,
|
||||
name: &Option<String>,
|
||||
upvar_span: Span,
|
||||
upvar_name: &str,
|
||||
upvar_name: Symbol,
|
||||
escape_span: Span,
|
||||
) -> DiagnosticBuilder<'cx, ErrorGuaranteed> {
|
||||
let tcx = self.infcx.tcx;
|
||||
@ -1628,7 +1812,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
location: Location,
|
||||
) -> impl Iterator<Item = Location> + Captures<'tcx> + 'a {
|
||||
if location.statement_index == 0 {
|
||||
let predecessors = body.predecessors()[location.block].to_vec();
|
||||
let predecessors = body.basic_blocks.predecessors()[location.block].to_vec();
|
||||
Either::Left(predecessors.into_iter().map(move |bb| body.terminator_loc(bb)))
|
||||
} else {
|
||||
Either::Right(std::iter::once(Location {
|
||||
@ -2049,7 +2233,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
let ty = self.infcx.tcx.type_of(self.mir_def_id());
|
||||
match ty.kind() {
|
||||
ty::FnDef(_, _) | ty::FnPtr(_) => self.annotate_fn_sig(
|
||||
self.mir_def_id().to_def_id(),
|
||||
self.mir_def_id(),
|
||||
self.infcx.tcx.fn_sig(self.mir_def_id()),
|
||||
),
|
||||
_ => None,
|
||||
@ -2093,8 +2277,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
// Check if our `target` was captured by a closure.
|
||||
if let Rvalue::Aggregate(
|
||||
box AggregateKind::Closure(def_id, substs),
|
||||
operands,
|
||||
) = rvalue
|
||||
ref operands,
|
||||
) = *rvalue
|
||||
{
|
||||
for operand in operands {
|
||||
let (Operand::Copy(assigned_from) | Operand::Move(assigned_from)) = operand else {
|
||||
@ -2118,7 +2302,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
// into a place then we should annotate the closure in
|
||||
// case it ends up being assigned into the return place.
|
||||
annotated_closure =
|
||||
self.annotate_fn_sig(*def_id, substs.as_closure().sig());
|
||||
self.annotate_fn_sig(def_id, substs.as_closure().sig());
|
||||
debug!(
|
||||
"annotate_argument_and_return_for_borrow: \
|
||||
annotated_closure={:?} assigned_from_local={:?} \
|
||||
@ -2240,12 +2424,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
/// references.
|
||||
fn annotate_fn_sig(
|
||||
&self,
|
||||
did: DefId,
|
||||
did: LocalDefId,
|
||||
sig: ty::PolyFnSig<'tcx>,
|
||||
) -> Option<AnnotatedBorrowFnSignature<'tcx>> {
|
||||
debug!("annotate_fn_sig: did={:?} sig={:?}", did, sig);
|
||||
let is_closure = self.infcx.tcx.is_closure(did);
|
||||
let fn_hir_id = self.infcx.tcx.hir().local_def_id_to_hir_id(did.as_local()?);
|
||||
let is_closure = self.infcx.tcx.is_closure(did.to_def_id());
|
||||
let fn_hir_id = self.infcx.tcx.hir().local_def_id_to_hir_id(did);
|
||||
let fn_decl = self.infcx.tcx.hir().fn_decl_by_hir_id(fn_hir_id)?;
|
||||
|
||||
// We need to work out which arguments to highlight. We do this by looking
|
||||
@ -2448,3 +2632,142 @@ impl<'tcx> AnnotatedBorrowFnSignature<'tcx> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Detect whether one of the provided spans is a statement nested within the top-most visited expr
|
||||
struct ReferencedStatementsVisitor<'a>(&'a [Span], bool);
|
||||
|
||||
impl<'a, 'v> Visitor<'v> for ReferencedStatementsVisitor<'a> {
|
||||
fn visit_stmt(&mut self, s: &'v hir::Stmt<'v>) {
|
||||
match s.kind {
|
||||
hir::StmtKind::Semi(expr) if self.0.contains(&expr.span) => {
|
||||
self.1 = true;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a set of spans representing statements initializing the relevant binding, visit all the
|
||||
/// function expressions looking for branching code paths that *do not* initialize the binding.
|
||||
struct ConditionVisitor<'b> {
|
||||
spans: &'b [Span],
|
||||
name: &'b str,
|
||||
errors: Vec<(Span, String)>,
|
||||
}
|
||||
|
||||
impl<'b, 'v> Visitor<'v> for ConditionVisitor<'b> {
|
||||
fn visit_expr(&mut self, ex: &'v hir::Expr<'v>) {
|
||||
match ex.kind {
|
||||
hir::ExprKind::If(cond, body, None) => {
|
||||
// `if` expressions with no `else` that initialize the binding might be missing an
|
||||
// `else` arm.
|
||||
let mut v = ReferencedStatementsVisitor(self.spans, false);
|
||||
v.visit_expr(body);
|
||||
if v.1 {
|
||||
self.errors.push((
|
||||
cond.span,
|
||||
format!(
|
||||
"if this `if` condition is `false`, {} is not initialized",
|
||||
self.name,
|
||||
),
|
||||
));
|
||||
self.errors.push((
|
||||
ex.span.shrink_to_hi(),
|
||||
format!("an `else` arm might be missing here, initializing {}", self.name),
|
||||
));
|
||||
}
|
||||
}
|
||||
hir::ExprKind::If(cond, body, Some(other)) => {
|
||||
// `if` expressions where the binding is only initialized in one of the two arms
|
||||
// might be missing a binding initialization.
|
||||
let mut a = ReferencedStatementsVisitor(self.spans, false);
|
||||
a.visit_expr(body);
|
||||
let mut b = ReferencedStatementsVisitor(self.spans, false);
|
||||
b.visit_expr(other);
|
||||
match (a.1, b.1) {
|
||||
(true, true) | (false, false) => {}
|
||||
(true, false) => {
|
||||
if other.span.is_desugaring(DesugaringKind::WhileLoop) {
|
||||
self.errors.push((
|
||||
cond.span,
|
||||
format!(
|
||||
"if this condition isn't met and the `while` loop runs 0 \
|
||||
times, {} is not initialized",
|
||||
self.name
|
||||
),
|
||||
));
|
||||
} else {
|
||||
self.errors.push((
|
||||
body.span.shrink_to_hi().until(other.span),
|
||||
format!(
|
||||
"if the `if` condition is `false` and this `else` arm is \
|
||||
executed, {} is not initialized",
|
||||
self.name
|
||||
),
|
||||
));
|
||||
}
|
||||
}
|
||||
(false, true) => {
|
||||
self.errors.push((
|
||||
cond.span,
|
||||
format!(
|
||||
"if this condition is `true`, {} is not initialized",
|
||||
self.name
|
||||
),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
hir::ExprKind::Match(e, arms, loop_desugar) => {
|
||||
// If the binding is initialized in one of the match arms, then the other match
|
||||
// arms might be missing an initialization.
|
||||
let results: Vec<bool> = arms
|
||||
.iter()
|
||||
.map(|arm| {
|
||||
let mut v = ReferencedStatementsVisitor(self.spans, false);
|
||||
v.visit_arm(arm);
|
||||
v.1
|
||||
})
|
||||
.collect();
|
||||
if results.iter().any(|x| *x) && !results.iter().all(|x| *x) {
|
||||
for (arm, seen) in arms.iter().zip(results) {
|
||||
if !seen {
|
||||
if loop_desugar == hir::MatchSource::ForLoopDesugar {
|
||||
self.errors.push((
|
||||
e.span,
|
||||
format!(
|
||||
"if the `for` loop runs 0 times, {} is not initialized",
|
||||
self.name
|
||||
),
|
||||
));
|
||||
} else if let Some(guard) = &arm.guard {
|
||||
self.errors.push((
|
||||
arm.pat.span.to(guard.body().span),
|
||||
format!(
|
||||
"if this pattern and condition are matched, {} is not \
|
||||
initialized",
|
||||
self.name
|
||||
),
|
||||
));
|
||||
} else {
|
||||
self.errors.push((
|
||||
arm.pat.span,
|
||||
format!(
|
||||
"if this pattern is matched, {} is not initialized",
|
||||
self.name
|
||||
),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// FIXME: should we also account for binops, particularly `&&` and `||`? `try` should
|
||||
// also be accounted for. For now it is fine, as if we don't find *any* relevant
|
||||
// branching code paths, we point at the places where the binding *is* initialized for
|
||||
// *some* context.
|
||||
_ => {}
|
||||
}
|
||||
walk_expr(self, ex);
|
||||
}
|
||||
}
|
||||
|
||||
@ -12,7 +12,7 @@ use rustc_middle::mir::{
|
||||
};
|
||||
use rustc_middle::ty::adjustment::PointerCast;
|
||||
use rustc_middle::ty::{self, RegionVid, TyCtxt};
|
||||
use rustc_span::symbol::Symbol;
|
||||
use rustc_span::symbol::{kw, Symbol};
|
||||
use rustc_span::{sym, DesugaringKind, Span};
|
||||
|
||||
use crate::region_infer::BlameConstraint;
|
||||
@ -282,7 +282,7 @@ impl<'tcx> BorrowExplanation<'tcx> {
|
||||
) {
|
||||
if let ConstraintCategory::OpaqueType = category {
|
||||
let suggestable_name =
|
||||
if region_name.was_named() { region_name.to_string() } else { "'_".to_string() };
|
||||
if region_name.was_named() { region_name.name } else { kw::UnderscoreLifetime };
|
||||
|
||||
let msg = format!(
|
||||
"you can add a bound to the {}to make it last less than `'static` and match `{}`",
|
||||
|
||||
@ -18,8 +18,8 @@ struct AllLocalUsesVisitor {
|
||||
}
|
||||
|
||||
impl<'tcx> Visitor<'tcx> for AllLocalUsesVisitor {
|
||||
fn visit_local(&mut self, local: &Local, _context: PlaceContext, location: Location) {
|
||||
if *local == self.for_local {
|
||||
fn visit_local(&mut self, local: Local, _context: PlaceContext, location: Location) {
|
||||
if local == self.for_local {
|
||||
self.uses.insert(location);
|
||||
}
|
||||
}
|
||||
|
||||
@ -106,7 +106,7 @@ enum DefUseResult {
|
||||
}
|
||||
|
||||
impl<'cx, 'tcx> Visitor<'tcx> for DefUseVisitor<'cx, 'tcx> {
|
||||
fn visit_local(&mut self, &local: &Local, context: PlaceContext, _: Location) {
|
||||
fn visit_local(&mut self, local: Local, context: PlaceContext, _: Location) {
|
||||
let local_ty = self.body.local_decls[local].ty;
|
||||
|
||||
let mut found_it = false;
|
||||
|
||||
@ -4,10 +4,10 @@ use itertools::Itertools;
|
||||
use rustc_const_eval::util::{call_kind, CallDesugaringKind};
|
||||
use rustc_errors::{Applicability, Diagnostic};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::Namespace;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::def::{CtorKind, Namespace};
|
||||
use rustc_hir::GeneratorKind;
|
||||
use rustc_infer::infer::TyCtxtInferExt;
|
||||
use rustc_middle::mir::tcx::PlaceTy;
|
||||
use rustc_middle::mir::{
|
||||
AggregateKind, Constant, FakeReadCause, Field, Local, LocalInfo, LocalKind, Location, Operand,
|
||||
Place, PlaceRef, ProjectionElem, Rvalue, Statement, StatementKind, Terminator, TerminatorKind,
|
||||
@ -15,7 +15,8 @@ use rustc_middle::mir::{
|
||||
use rustc_middle::ty::print::Print;
|
||||
use rustc_middle::ty::{self, DefIdTree, Instance, Ty, TyCtxt};
|
||||
use rustc_mir_dataflow::move_paths::{InitLocation, LookupResult};
|
||||
use rustc_span::{symbol::sym, Span, DUMMY_SP};
|
||||
use rustc_span::def_id::LocalDefId;
|
||||
use rustc_span::{symbol::sym, Span, Symbol, DUMMY_SP};
|
||||
use rustc_target::abi::VariantIdx;
|
||||
use rustc_trait_selection::traits::type_known_to_meet_bound_modulo_regions;
|
||||
|
||||
@ -41,9 +42,16 @@ pub(crate) use outlives_suggestion::OutlivesSuggestionBuilder;
|
||||
pub(crate) use region_errors::{ErrorConstraintInfo, RegionErrorKind, RegionErrors};
|
||||
pub(crate) use region_name::{RegionName, RegionNameSource};
|
||||
pub(crate) use rustc_const_eval::util::CallKind;
|
||||
use rustc_middle::mir::tcx::PlaceTy;
|
||||
|
||||
pub(super) struct IncludingDowncast(pub(super) bool);
|
||||
pub(super) struct DescribePlaceOpt {
|
||||
pub including_downcast: bool,
|
||||
|
||||
/// Enable/Disable tuple fields.
|
||||
/// For example `x` tuple. if it's `true` `x.0`. Otherwise `x`
|
||||
pub including_tuple_field: bool,
|
||||
}
|
||||
|
||||
pub(super) struct IncludingTupleField(pub(super) bool);
|
||||
|
||||
impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
/// Adds a suggestion when a closure is invoked twice with a moved variable or when a closure
|
||||
@ -164,7 +172,10 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
/// End-user visible description of `place` if one can be found.
|
||||
/// If the place is a temporary for instance, `None` will be returned.
|
||||
pub(super) fn describe_place(&self, place_ref: PlaceRef<'tcx>) -> Option<String> {
|
||||
self.describe_place_with_options(place_ref, IncludingDowncast(false))
|
||||
self.describe_place_with_options(
|
||||
place_ref,
|
||||
DescribePlaceOpt { including_downcast: false, including_tuple_field: true },
|
||||
)
|
||||
}
|
||||
|
||||
/// End-user visible description of `place` if one can be found. If the place is a temporary
|
||||
@ -174,7 +185,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
pub(super) fn describe_place_with_options(
|
||||
&self,
|
||||
place: PlaceRef<'tcx>,
|
||||
including_downcast: IncludingDowncast,
|
||||
opt: DescribePlaceOpt,
|
||||
) -> Option<String> {
|
||||
let local = place.local;
|
||||
let mut autoderef_index = None;
|
||||
@ -224,7 +235,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
}
|
||||
}
|
||||
}
|
||||
ProjectionElem::Downcast(..) if including_downcast.0 => return None,
|
||||
ProjectionElem::Downcast(..) if opt.including_downcast => return None,
|
||||
ProjectionElem::Downcast(..) => (),
|
||||
ProjectionElem::Field(field, _ty) => {
|
||||
// FIXME(project-rfc_2229#36): print capture precisely here.
|
||||
@ -238,9 +249,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
let field_name = self.describe_field(
|
||||
PlaceRef { local, projection: place.projection.split_at(index).0 },
|
||||
*field,
|
||||
IncludingTupleField(opt.including_tuple_field),
|
||||
);
|
||||
buf.push('.');
|
||||
buf.push_str(&field_name);
|
||||
if let Some(field_name_str) = field_name {
|
||||
buf.push('.');
|
||||
buf.push_str(&field_name_str);
|
||||
}
|
||||
}
|
||||
}
|
||||
ProjectionElem::Index(index) => {
|
||||
@ -261,6 +275,18 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
ok.ok().map(|_| buf)
|
||||
}
|
||||
|
||||
fn describe_name(&self, place: PlaceRef<'tcx>) -> Option<Symbol> {
|
||||
for elem in place.projection.into_iter() {
|
||||
match elem {
|
||||
ProjectionElem::Downcast(Some(name), _) => {
|
||||
return Some(*name);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Appends end-user visible description of the `local` place to `buf`. If `local` doesn't have
|
||||
/// a name, or its name was generated by the compiler, then `Err` is returned
|
||||
fn append_local_to_string(&self, local: Local, buf: &mut String) -> Result<(), ()> {
|
||||
@ -275,7 +301,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
}
|
||||
|
||||
/// End-user visible description of the `field`nth field of `base`
|
||||
fn describe_field(&self, place: PlaceRef<'tcx>, field: Field) -> String {
|
||||
fn describe_field(
|
||||
&self,
|
||||
place: PlaceRef<'tcx>,
|
||||
field: Field,
|
||||
including_tuple_field: IncludingTupleField,
|
||||
) -> Option<String> {
|
||||
let place_ty = match place {
|
||||
PlaceRef { local, projection: [] } => PlaceTy::from_ty(self.body.local_decls[local].ty),
|
||||
PlaceRef { local, projection: [proj_base @ .., elem] } => match elem {
|
||||
@ -289,7 +320,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
ProjectionElem::Field(_, field_type) => PlaceTy::from_ty(*field_type),
|
||||
},
|
||||
};
|
||||
self.describe_field_from_ty(place_ty.ty, field, place_ty.variant_index)
|
||||
self.describe_field_from_ty(
|
||||
place_ty.ty,
|
||||
field,
|
||||
place_ty.variant_index,
|
||||
including_tuple_field,
|
||||
)
|
||||
}
|
||||
|
||||
/// End-user visible description of the `field_index`nth field of `ty`
|
||||
@ -298,10 +334,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
ty: Ty<'_>,
|
||||
field: Field,
|
||||
variant_index: Option<VariantIdx>,
|
||||
) -> String {
|
||||
including_tuple_field: IncludingTupleField,
|
||||
) -> Option<String> {
|
||||
if ty.is_box() {
|
||||
// If the type is a box, the field is described from the boxed type
|
||||
self.describe_field_from_ty(ty.boxed_ty(), field, variant_index)
|
||||
self.describe_field_from_ty(ty.boxed_ty(), field, variant_index, including_tuple_field)
|
||||
} else {
|
||||
match *ty.kind() {
|
||||
ty::Adt(def, _) => {
|
||||
@ -311,30 +348,34 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
} else {
|
||||
def.non_enum_variant()
|
||||
};
|
||||
variant.fields[field.index()].name.to_string()
|
||||
if !including_tuple_field.0 && variant.ctor_kind == CtorKind::Fn {
|
||||
return None;
|
||||
}
|
||||
Some(variant.fields[field.index()].name.to_string())
|
||||
}
|
||||
ty::Tuple(_) => field.index().to_string(),
|
||||
ty::Tuple(_) => Some(field.index().to_string()),
|
||||
ty::Ref(_, ty, _) | ty::RawPtr(ty::TypeAndMut { ty, .. }) => {
|
||||
self.describe_field_from_ty(ty, field, variant_index)
|
||||
self.describe_field_from_ty(ty, field, variant_index, including_tuple_field)
|
||||
}
|
||||
ty::Array(ty, _) | ty::Slice(ty) => {
|
||||
self.describe_field_from_ty(ty, field, variant_index)
|
||||
self.describe_field_from_ty(ty, field, variant_index, including_tuple_field)
|
||||
}
|
||||
ty::Closure(def_id, _) | ty::Generator(def_id, _, _) => {
|
||||
// We won't be borrowck'ing here if the closure came from another crate,
|
||||
// so it's safe to call `expect_local`.
|
||||
//
|
||||
// We know the field exists so it's safe to call operator[] and `unwrap` here.
|
||||
let def_id = def_id.expect_local();
|
||||
let var_id = self
|
||||
.infcx
|
||||
.tcx
|
||||
.typeck(def_id.expect_local())
|
||||
.typeck(def_id)
|
||||
.closure_min_captures_flattened(def_id)
|
||||
.nth(field.index())
|
||||
.unwrap()
|
||||
.get_root_variable();
|
||||
|
||||
self.infcx.tcx.hir().name(var_id).to_string()
|
||||
Some(self.infcx.tcx.hir().name(var_id).to_string())
|
||||
}
|
||||
_ => {
|
||||
// Might need a revision when the fields in trait RFC is implemented
|
||||
@ -597,16 +638,16 @@ impl UseSpans<'_> {
|
||||
}
|
||||
|
||||
/// Describe the span associated with a use of a place.
|
||||
pub(super) fn describe(&self) -> String {
|
||||
pub(super) fn describe(&self) -> &str {
|
||||
match *self {
|
||||
UseSpans::ClosureUse { generator_kind, .. } => {
|
||||
if generator_kind.is_some() {
|
||||
" in generator".to_string()
|
||||
" in generator"
|
||||
} else {
|
||||
" in closure".to_string()
|
||||
" in closure"
|
||||
}
|
||||
}
|
||||
_ => String::new(),
|
||||
_ => "",
|
||||
}
|
||||
}
|
||||
|
||||
@ -715,12 +756,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
|
||||
debug!("move_spans: moved_place={:?} location={:?} stmt={:?}", moved_place, location, stmt);
|
||||
if let StatementKind::Assign(box (_, Rvalue::Aggregate(ref kind, ref places))) = stmt.kind {
|
||||
match kind {
|
||||
box AggregateKind::Closure(def_id, _)
|
||||
| box AggregateKind::Generator(def_id, _, _) => {
|
||||
match **kind {
|
||||
AggregateKind::Closure(def_id, _) | AggregateKind::Generator(def_id, _, _) => {
|
||||
debug!("move_spans: def_id={:?} places={:?}", def_id, places);
|
||||
if let Some((args_span, generator_kind, capture_kind_span, path_span)) =
|
||||
self.closure_span(*def_id, moved_place, places)
|
||||
self.closure_span(def_id, moved_place, places)
|
||||
{
|
||||
return ClosureUse {
|
||||
generator_kind,
|
||||
@ -812,12 +852,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
return FnSelfUse {
|
||||
var_span: stmt.source_info.span,
|
||||
fn_call_span: *fn_span,
|
||||
fn_span: self
|
||||
.infcx
|
||||
.tcx
|
||||
.sess
|
||||
.source_map()
|
||||
.guess_head_span(self.infcx.tcx.def_span(method_did)),
|
||||
fn_span: self.infcx.tcx.def_span(method_did),
|
||||
kind,
|
||||
};
|
||||
}
|
||||
@ -852,7 +887,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
if let StatementKind::Assign(box (_, Rvalue::Aggregate(ref kind, ref places))) =
|
||||
stmt.kind
|
||||
{
|
||||
let (def_id, is_generator) = match kind {
|
||||
let (&def_id, is_generator) = match kind {
|
||||
box AggregateKind::Closure(def_id, _) => (def_id, false),
|
||||
box AggregateKind::Generator(def_id, _, _) => (def_id, true),
|
||||
_ => continue,
|
||||
@ -863,7 +898,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
def_id, is_generator, places
|
||||
);
|
||||
if let Some((args_span, generator_kind, capture_kind_span, path_span)) =
|
||||
self.closure_span(*def_id, Place::from(target).as_ref(), places)
|
||||
self.closure_span(def_id, Place::from(target).as_ref(), places)
|
||||
{
|
||||
return ClosureUse { generator_kind, args_span, capture_kind_span, path_span };
|
||||
} else {
|
||||
@ -884,7 +919,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
/// The second span is the location the use resulting in the captured path of the capture
|
||||
fn closure_span(
|
||||
&self,
|
||||
def_id: DefId,
|
||||
def_id: LocalDefId,
|
||||
target_place: PlaceRef<'tcx>,
|
||||
places: &[Operand<'tcx>],
|
||||
) -> Option<(Span, Option<GeneratorKind>, Span, Span)> {
|
||||
@ -892,28 +927,23 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
"closure_span: def_id={:?} target_place={:?} places={:?}",
|
||||
def_id, target_place, places
|
||||
);
|
||||
let local_did = def_id.as_local()?;
|
||||
let hir_id = self.infcx.tcx.hir().local_def_id_to_hir_id(local_did);
|
||||
let hir_id = self.infcx.tcx.hir().local_def_id_to_hir_id(def_id);
|
||||
let expr = &self.infcx.tcx.hir().expect_expr(hir_id).kind;
|
||||
debug!("closure_span: hir_id={:?} expr={:?}", hir_id, expr);
|
||||
if let hir::ExprKind::Closure { body, fn_decl_span, .. } = expr {
|
||||
for (captured_place, place) in self
|
||||
.infcx
|
||||
.tcx
|
||||
.typeck(def_id.expect_local())
|
||||
.closure_min_captures_flattened(def_id)
|
||||
.zip(places)
|
||||
if let hir::ExprKind::Closure(&hir::Closure { body, fn_decl_span, .. }) = expr {
|
||||
for (captured_place, place) in
|
||||
self.infcx.tcx.typeck(def_id).closure_min_captures_flattened(def_id).zip(places)
|
||||
{
|
||||
match place {
|
||||
Operand::Copy(place) | Operand::Move(place)
|
||||
if target_place == place.as_ref() =>
|
||||
{
|
||||
debug!("closure_span: found captured local {:?}", place);
|
||||
let body = self.infcx.tcx.hir().body(*body);
|
||||
let body = self.infcx.tcx.hir().body(body);
|
||||
let generator_kind = body.generator_kind();
|
||||
|
||||
return Some((
|
||||
*fn_decl_span,
|
||||
fn_decl_span,
|
||||
generator_kind,
|
||||
captured_place.get_capture_kind_span(self.infcx.tcx),
|
||||
captured_place.get_path_span(self.infcx.tcx),
|
||||
@ -980,14 +1010,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
if self.fn_self_span_reported.insert(fn_span) {
|
||||
err.span_note(
|
||||
// Check whether the source is accessible
|
||||
if self
|
||||
.infcx
|
||||
.tcx
|
||||
.sess
|
||||
.source_map()
|
||||
.span_to_snippet(self_arg.span)
|
||||
.is_ok()
|
||||
{
|
||||
if self.infcx.tcx.sess.source_map().is_span_accessible(self_arg.span) {
|
||||
self_arg.span
|
||||
} else {
|
||||
fn_call_span
|
||||
|
||||
@ -4,9 +4,9 @@ use rustc_middle::ty;
|
||||
use rustc_mir_dataflow::move_paths::{
|
||||
IllegalMoveOrigin, IllegalMoveOriginKind, LookupResult, MoveError, MovePathIndex,
|
||||
};
|
||||
use rustc_span::{sym, Span};
|
||||
use rustc_span::Span;
|
||||
|
||||
use crate::diagnostics::UseSpans;
|
||||
use crate::diagnostics::{DescribePlaceOpt, UseSpans};
|
||||
use crate::prefixes::PrefixSet;
|
||||
use crate::MirBorrowckCtxt;
|
||||
|
||||
@ -218,29 +218,13 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
|
||||
fn report(&mut self, error: GroupedMoveError<'tcx>) {
|
||||
let (mut err, err_span) = {
|
||||
let (span, use_spans, original_path, kind, has_complex_bindings): (
|
||||
Span,
|
||||
Option<UseSpans<'tcx>>,
|
||||
Place<'tcx>,
|
||||
&IllegalMoveOriginKind<'_>,
|
||||
bool,
|
||||
) = match error {
|
||||
GroupedMoveError::MovesFromPlace {
|
||||
span,
|
||||
original_path,
|
||||
ref kind,
|
||||
ref binds_to,
|
||||
..
|
||||
let (span, use_spans, original_path, kind) = match error {
|
||||
GroupedMoveError::MovesFromPlace { span, original_path, ref kind, .. }
|
||||
| GroupedMoveError::MovesFromValue { span, original_path, ref kind, .. } => {
|
||||
(span, None, original_path, kind)
|
||||
}
|
||||
| GroupedMoveError::MovesFromValue {
|
||||
span,
|
||||
original_path,
|
||||
ref kind,
|
||||
ref binds_to,
|
||||
..
|
||||
} => (span, None, original_path, kind, !binds_to.is_empty()),
|
||||
GroupedMoveError::OtherIllegalMove { use_spans, original_path, ref kind } => {
|
||||
(use_spans.args_or_use(), Some(use_spans), original_path, kind, false)
|
||||
(use_spans.args_or_use(), Some(use_spans), original_path, kind)
|
||||
}
|
||||
};
|
||||
debug!(
|
||||
@ -259,7 +243,6 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
target_place,
|
||||
span,
|
||||
use_spans,
|
||||
has_complex_bindings,
|
||||
),
|
||||
&IllegalMoveOriginKind::InteriorOfTypeWithDestructor { container_ty: ty } => {
|
||||
self.cannot_move_out_of_interior_of_drop(span, ty)
|
||||
@ -302,7 +285,6 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
deref_target_place: Place<'tcx>,
|
||||
span: Span,
|
||||
use_spans: Option<UseSpans<'tcx>>,
|
||||
has_complex_bindings: bool,
|
||||
) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
|
||||
// Inspect the type of the content behind the
|
||||
// borrow to provide feedback about why this
|
||||
@ -386,41 +368,38 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
}
|
||||
_ => {
|
||||
let source = self.borrowed_content_source(deref_base);
|
||||
match (self.describe_place(move_place.as_ref()), source.describe_for_named_place())
|
||||
{
|
||||
(Some(place_desc), Some(source_desc)) => self.cannot_move_out_of(
|
||||
let move_place_ref = move_place.as_ref();
|
||||
match (
|
||||
self.describe_place_with_options(
|
||||
move_place_ref,
|
||||
DescribePlaceOpt {
|
||||
including_downcast: false,
|
||||
including_tuple_field: false,
|
||||
},
|
||||
),
|
||||
self.describe_name(move_place_ref),
|
||||
source.describe_for_named_place(),
|
||||
) {
|
||||
(Some(place_desc), Some(name), Some(source_desc)) => self.cannot_move_out_of(
|
||||
span,
|
||||
&format!("`{place_desc}` as enum variant `{name}` which is behind a {source_desc}"),
|
||||
),
|
||||
(Some(place_desc), Some(name), None) => self.cannot_move_out_of(
|
||||
span,
|
||||
&format!("`{place_desc}` as enum variant `{name}`"),
|
||||
),
|
||||
(Some(place_desc), _, Some(source_desc)) => self.cannot_move_out_of(
|
||||
span,
|
||||
&format!("`{place_desc}` which is behind a {source_desc}"),
|
||||
),
|
||||
(_, _) => self.cannot_move_out_of(
|
||||
(_, _, _) => self.cannot_move_out_of(
|
||||
span,
|
||||
&source.describe_for_unnamed_place(self.infcx.tcx),
|
||||
),
|
||||
}
|
||||
}
|
||||
};
|
||||
let ty = move_place.ty(self.body, self.infcx.tcx).ty;
|
||||
let def_id = match *ty.kind() {
|
||||
ty::Adt(self_def, _) => self_def.did(),
|
||||
ty::Foreign(def_id)
|
||||
| ty::FnDef(def_id, _)
|
||||
| ty::Closure(def_id, _)
|
||||
| ty::Generator(def_id, ..)
|
||||
| ty::Opaque(def_id, _) => def_id,
|
||||
_ => return err,
|
||||
};
|
||||
let diag_name = self.infcx.tcx.get_diagnostic_name(def_id);
|
||||
if matches!(diag_name, Some(sym::Option | sym::Result))
|
||||
&& use_spans.map_or(true, |v| !v.for_closure())
|
||||
&& !has_complex_bindings
|
||||
{
|
||||
err.span_suggestion_verbose(
|
||||
span.shrink_to_hi(),
|
||||
&format!("consider borrowing the `{}`'s content", diag_name.unwrap()),
|
||||
".as_ref()",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
} else if let Some(use_spans) = use_spans {
|
||||
if let Some(use_spans) = use_spans {
|
||||
self.explain_captures(
|
||||
&mut err, span, span, use_spans, move_place, None, "", "", "", false, true,
|
||||
);
|
||||
|
||||
@ -343,7 +343,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
);
|
||||
let tcx = self.infcx.tcx;
|
||||
if let ty::Closure(id, _) = *the_place_err.ty(self.body, tcx).ty.kind() {
|
||||
self.show_mutating_upvar(tcx, id, the_place_err, &mut err);
|
||||
self.show_mutating_upvar(tcx, id.expect_local(), the_place_err, &mut err);
|
||||
}
|
||||
}
|
||||
|
||||
@ -362,7 +362,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
|
||||
let upvar_hir_id = captured_place.get_root_variable();
|
||||
|
||||
if let Some(Node::Binding(pat)) = self.infcx.tcx.hir().find(upvar_hir_id)
|
||||
if let Some(Node::Pat(pat)) = self.infcx.tcx.hir().find(upvar_hir_id)
|
||||
&& let hir::PatKind::Binding(
|
||||
hir::BindingAnnotation::Unannotated,
|
||||
_,
|
||||
@ -382,7 +382,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
if let ty::Ref(_, ty, Mutability::Mut) = the_place_err.ty(self.body, tcx).ty.kind()
|
||||
&& let ty::Closure(id, _) = *ty.kind()
|
||||
{
|
||||
self.show_mutating_upvar(tcx, id, the_place_err, &mut err);
|
||||
self.show_mutating_upvar(tcx, id.expect_local(), the_place_err, &mut err);
|
||||
}
|
||||
}
|
||||
|
||||
@ -434,8 +434,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
|
||||
match self.local_names[local] {
|
||||
Some(name) if !local_decl.from_compiler_desugaring() => {
|
||||
let label = match local_decl.local_info.as_ref().unwrap() {
|
||||
box LocalInfo::User(ClearCrossCrate::Set(
|
||||
let label = match local_decl.local_info.as_deref().unwrap() {
|
||||
LocalInfo::User(ClearCrossCrate::Set(
|
||||
mir::BindingForm::ImplicitSelf(_),
|
||||
)) => {
|
||||
let (span, suggestion) =
|
||||
@ -443,7 +443,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
Some((true, span, suggestion))
|
||||
}
|
||||
|
||||
box LocalInfo::User(ClearCrossCrate::Set(mir::BindingForm::Var(
|
||||
LocalInfo::User(ClearCrossCrate::Set(mir::BindingForm::Var(
|
||||
mir::VarBindingForm {
|
||||
binding_mode: ty::BindingMode::BindByValue(_),
|
||||
opt_ty_info,
|
||||
@ -473,20 +473,15 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
// on for loops, RHS points to the iterator part
|
||||
Some(DesugaringKind::ForLoop) => {
|
||||
self.suggest_similar_mut_method_for_for_loop(&mut err);
|
||||
Some((
|
||||
false,
|
||||
opt_assignment_rhs_span.unwrap(),
|
||||
format!(
|
||||
"this iterator yields `{SIGIL}` {DESC}s",
|
||||
SIGIL = pointer_sigil,
|
||||
DESC = pointer_desc
|
||||
),
|
||||
))
|
||||
err.span_label(opt_assignment_rhs_span.unwrap(), format!(
|
||||
"this iterator yields `{pointer_sigil}` {pointer_desc}s",
|
||||
));
|
||||
None
|
||||
}
|
||||
// don't create labels for compiler-generated spans
|
||||
Some(_) => None,
|
||||
None => {
|
||||
let (span, suggestion) = if name != kw::SelfLower {
|
||||
let label = if name != kw::SelfLower {
|
||||
suggest_ampmut(
|
||||
self.infcx.tcx,
|
||||
local_decl,
|
||||
@ -501,7 +496,11 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
..
|
||||
}),
|
||||
))) => {
|
||||
suggest_ampmut_self(self.infcx.tcx, local_decl)
|
||||
let (span, sugg) = suggest_ampmut_self(
|
||||
self.infcx.tcx,
|
||||
local_decl,
|
||||
);
|
||||
(true, span, sugg)
|
||||
}
|
||||
// explicit self (eg `self: &'a Self`)
|
||||
_ => suggest_ampmut(
|
||||
@ -512,12 +511,12 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
),
|
||||
}
|
||||
};
|
||||
Some((true, span, suggestion))
|
||||
Some(label)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
box LocalInfo::User(ClearCrossCrate::Set(mir::BindingForm::Var(
|
||||
LocalInfo::User(ClearCrossCrate::Set(mir::BindingForm::Var(
|
||||
mir::VarBindingForm {
|
||||
binding_mode: ty::BindingMode::BindByReference(_),
|
||||
..
|
||||
@ -528,7 +527,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
.map(|replacement| (true, pattern_span, replacement))
|
||||
}
|
||||
|
||||
box LocalInfo::User(ClearCrossCrate::Clear) => {
|
||||
LocalInfo::User(ClearCrossCrate::Clear) => {
|
||||
bug!("saw cleared local state")
|
||||
}
|
||||
|
||||
@ -559,7 +558,12 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
Some((false, err_label_span, message)) => {
|
||||
err.span_label(err_label_span, &message);
|
||||
err.span_label(
|
||||
err_label_span,
|
||||
&format!(
|
||||
"consider changing this binding's type to be: `{message}`"
|
||||
),
|
||||
);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
@ -681,11 +685,10 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
fn show_mutating_upvar(
|
||||
&self,
|
||||
tcx: TyCtxt<'_>,
|
||||
id: hir::def_id::DefId,
|
||||
closure_local_def_id: hir::def_id::LocalDefId,
|
||||
the_place_err: PlaceRef<'tcx>,
|
||||
err: &mut Diagnostic,
|
||||
) {
|
||||
let closure_local_def_id = id.expect_local();
|
||||
let tables = tcx.typeck(closure_local_def_id);
|
||||
let closure_hir_id = tcx.hir().local_def_id_to_hir_id(closure_local_def_id);
|
||||
if let Some((span, closure_kind_origin)) =
|
||||
@ -695,7 +698,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
let upvar = ty::place_to_string_for_capture(tcx, closure_kind_origin);
|
||||
let root_hir_id = upvar_id.var_path.hir_id;
|
||||
// we have an origin for this closure kind starting at this root variable so it's safe to unwrap here
|
||||
let captured_places = tables.closure_min_captures[&id].get(&root_hir_id).unwrap();
|
||||
let captured_places =
|
||||
tables.closure_min_captures[&closure_local_def_id].get(&root_hir_id).unwrap();
|
||||
|
||||
let origin_projection = closure_kind_origin
|
||||
.projections
|
||||
@ -849,7 +853,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
let closure_id = self.mir_hir_id();
|
||||
let fn_call_id = hir.get_parent_node(closure_id);
|
||||
let node = hir.get(fn_call_id);
|
||||
let item_id = hir.enclosing_body_owner(fn_call_id);
|
||||
let def_id = hir.enclosing_body_owner(fn_call_id);
|
||||
let mut look_at_return = true;
|
||||
// If we can detect the expression to be an `fn` call where the closure was an argument,
|
||||
// we point at the `fn` definition argument...
|
||||
@ -857,10 +861,9 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
let arg_pos = args
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(_, arg)| arg.span == self.body.span)
|
||||
.filter(|(_, arg)| arg.hir_id == closure_id)
|
||||
.map(|(pos, _)| pos)
|
||||
.next();
|
||||
let def_id = hir.local_def_id(item_id);
|
||||
let tables = self.infcx.tcx.typeck(def_id);
|
||||
if let Some(ty::FnDef(def_id, _)) =
|
||||
tables.node_type_opt(func.hir_id).as_ref().map(|ty| ty.kind())
|
||||
@ -899,9 +902,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
if let Some(span) = arg {
|
||||
err.span_label(span, "change this to accept `FnMut` instead of `Fn`");
|
||||
err.span_label(func.span, "expects `Fn` instead of `FnMut`");
|
||||
if self.infcx.tcx.sess.source_map().is_multiline(self.body.span) {
|
||||
err.span_label(self.body.span, "in this closure");
|
||||
}
|
||||
err.span_label(self.body.span, "in this closure");
|
||||
look_at_return = false;
|
||||
}
|
||||
}
|
||||
@ -1004,7 +1005,7 @@ fn suggest_ampmut<'tcx>(
|
||||
local_decl: &mir::LocalDecl<'tcx>,
|
||||
opt_assignment_rhs_span: Option<Span>,
|
||||
opt_ty_info: Option<Span>,
|
||||
) -> (Span, String) {
|
||||
) -> (bool, Span, String) {
|
||||
if let Some(assignment_rhs_span) = opt_assignment_rhs_span
|
||||
&& let Ok(src) = tcx.sess.source_map().span_to_snippet(assignment_rhs_span)
|
||||
{
|
||||
@ -1028,24 +1029,24 @@ fn suggest_ampmut<'tcx>(
|
||||
let lt_name = &src[1..ws_pos];
|
||||
let ty = src[ws_pos..].trim_start();
|
||||
if !is_mutbl(ty) {
|
||||
return (assignment_rhs_span, format!("&{lt_name} mut {ty}"));
|
||||
return (true, assignment_rhs_span, format!("&{lt_name} mut {ty}"));
|
||||
}
|
||||
} else if let Some(stripped) = src.strip_prefix('&') {
|
||||
let stripped = stripped.trim_start();
|
||||
if !is_mutbl(stripped) {
|
||||
return (assignment_rhs_span, format!("&mut {stripped}"));
|
||||
return (true, assignment_rhs_span, format!("&mut {stripped}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let highlight_span = match opt_ty_info {
|
||||
let (suggestability, highlight_span) = match opt_ty_info {
|
||||
// if this is a variable binding with an explicit type,
|
||||
// try to highlight that for the suggestion.
|
||||
Some(ty_span) => ty_span,
|
||||
Some(ty_span) => (true, ty_span),
|
||||
|
||||
// otherwise, just highlight the span associated with
|
||||
// the (MIR) LocalDecl.
|
||||
None => local_decl.source_info.span,
|
||||
None => (false, local_decl.source_info.span),
|
||||
};
|
||||
|
||||
if let Ok(src) = tcx.sess.source_map().span_to_snippet(highlight_span)
|
||||
@ -1053,12 +1054,13 @@ fn suggest_ampmut<'tcx>(
|
||||
{
|
||||
let lt_name = &src[1..ws_pos];
|
||||
let ty = &src[ws_pos..];
|
||||
return (highlight_span, format!("&{} mut{}", lt_name, ty));
|
||||
return (true, highlight_span, format!("&{} mut{}", lt_name, ty));
|
||||
}
|
||||
|
||||
let ty_mut = local_decl.ty.builtin_deref(true).unwrap();
|
||||
assert_eq!(ty_mut.mutbl, hir::Mutability::Not);
|
||||
(
|
||||
suggestability,
|
||||
highlight_span,
|
||||
if local_decl.ty.is_region_ptr() {
|
||||
format!("&mut {}", ty_mut.ty)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
//! Error reporting machinery for lifetime errors.
|
||||
|
||||
use rustc_data_structures::stable_set::FxHashSet;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, MultiSpan};
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::intravisit::Visitor;
|
||||
@ -19,11 +19,11 @@ use rustc_middle::ty::subst::InternalSubsts;
|
||||
use rustc_middle::ty::Region;
|
||||
use rustc_middle::ty::TypeVisitor;
|
||||
use rustc_middle::ty::{self, RegionVid, Ty};
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::symbol::Ident;
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
use rustc_span::Span;
|
||||
|
||||
use crate::borrowck_errors;
|
||||
use crate::session_diagnostics::GenericDoesNotLiveLongEnough;
|
||||
|
||||
use super::{OutlivesSuggestionBuilder, RegionName};
|
||||
use crate::region_infer::BlameConstraint;
|
||||
@ -78,6 +78,8 @@ pub(crate) enum RegionErrorKind<'tcx> {
|
||||
span: Span,
|
||||
/// The hidden type.
|
||||
hidden_ty: Ty<'tcx>,
|
||||
/// The opaque type.
|
||||
key: ty::OpaqueTypeKey<'tcx>,
|
||||
/// The unexpected region.
|
||||
member_region: ty::Region<'tcx>,
|
||||
},
|
||||
@ -180,11 +182,11 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
let generic_ty = type_test.generic_kind.to_ty(self.infcx.tcx);
|
||||
let origin = RelateParamBound(type_test_span, generic_ty, None);
|
||||
self.buffer_error(self.infcx.construct_generic_bound_failure(
|
||||
self.body.source.def_id().expect_local(),
|
||||
type_test_span,
|
||||
Some(origin),
|
||||
type_test.generic_kind,
|
||||
lower_bound_region,
|
||||
self.body.source.def_id().as_local(),
|
||||
));
|
||||
} else {
|
||||
// FIXME. We should handle this case better. It
|
||||
@ -196,21 +198,25 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
// to report it; we could probably handle it by
|
||||
// iterating over the universal regions and reporting
|
||||
// an error that multiple bounds are required.
|
||||
self.buffer_error(self.infcx.tcx.sess.struct_span_err(
|
||||
type_test_span,
|
||||
&format!("`{}` does not live long enough", type_test.generic_kind),
|
||||
self.buffer_error(self.infcx.tcx.sess.create_err(
|
||||
GenericDoesNotLiveLongEnough {
|
||||
kind: type_test.generic_kind.to_string(),
|
||||
span: type_test_span,
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
RegionErrorKind::UnexpectedHiddenRegion { span, hidden_ty, member_region } => {
|
||||
RegionErrorKind::UnexpectedHiddenRegion { span, hidden_ty, key, member_region } => {
|
||||
let named_ty = self.regioncx.name_regions(self.infcx.tcx, hidden_ty);
|
||||
let named_key = self.regioncx.name_regions(self.infcx.tcx, key);
|
||||
let named_region = self.regioncx.name_regions(self.infcx.tcx, member_region);
|
||||
self.buffer_error(unexpected_hidden_region_diagnostic(
|
||||
self.infcx.tcx,
|
||||
span,
|
||||
named_ty,
|
||||
named_region,
|
||||
named_key,
|
||||
));
|
||||
}
|
||||
|
||||
@ -755,7 +761,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
return;
|
||||
};
|
||||
|
||||
let lifetime = if f.has_name() { fr_name.to_string() } else { "'_".to_string() };
|
||||
let lifetime = if f.has_name() { fr_name.name } else { kw::UnderscoreLifetime };
|
||||
|
||||
let arg = match param.param.pat.simple_ident() {
|
||||
Some(simple_ident) => format!("argument `{}`", simple_ident),
|
||||
@ -767,7 +773,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
self.infcx.tcx,
|
||||
diag,
|
||||
fn_returns,
|
||||
lifetime,
|
||||
lifetime.to_string(),
|
||||
Some(arg),
|
||||
captures,
|
||||
Some((param.param_ty_span, param.param_ty.to_string())),
|
||||
@ -848,13 +854,11 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
debug!("trait spans found: {:?}", traits);
|
||||
for span in &traits {
|
||||
let mut multi_span: MultiSpan = vec![*span].into();
|
||||
multi_span.push_span_label(
|
||||
*span,
|
||||
"this has an implicit `'static` lifetime requirement".to_string(),
|
||||
);
|
||||
multi_span
|
||||
.push_span_label(*span, "this has an implicit `'static` lifetime requirement");
|
||||
multi_span.push_span_label(
|
||||
ident.span,
|
||||
"calling this method introduces the `impl`'s 'static` requirement".to_string(),
|
||||
"calling this method introduces the `impl`'s 'static` requirement",
|
||||
);
|
||||
err.span_note(multi_span, "the used `impl` has a `'static` requirement");
|
||||
err.span_suggestion_verbose(
|
||||
|
||||
@ -34,13 +34,13 @@ pub(crate) enum RegionNameSource {
|
||||
/// The `'static` region.
|
||||
Static,
|
||||
/// The free region corresponding to the environment of a closure.
|
||||
SynthesizedFreeEnvRegion(Span, String),
|
||||
SynthesizedFreeEnvRegion(Span, &'static str),
|
||||
/// The region corresponding to an argument.
|
||||
AnonRegionFromArgument(RegionNameHighlight),
|
||||
/// The region corresponding to a closure upvar.
|
||||
AnonRegionFromUpvar(Span, String),
|
||||
AnonRegionFromUpvar(Span, Symbol),
|
||||
/// The region corresponding to the return type of a closure.
|
||||
AnonRegionFromOutput(RegionNameHighlight, String),
|
||||
AnonRegionFromOutput(RegionNameHighlight, &'static str),
|
||||
/// The region from a type yielded by a generator.
|
||||
AnonRegionFromYieldTy(Span, String),
|
||||
/// An anonymous region from an async fn.
|
||||
@ -110,7 +110,7 @@ impl RegionName {
|
||||
}
|
||||
RegionNameSource::SynthesizedFreeEnvRegion(span, note) => {
|
||||
diag.span_label(*span, format!("lifetime `{self}` represents this closure's body"));
|
||||
diag.note(note);
|
||||
diag.note(*note);
|
||||
}
|
||||
RegionNameSource::AnonRegionFromArgument(RegionNameHighlight::CannotMatchHirTy(
|
||||
span,
|
||||
@ -189,7 +189,7 @@ impl Display for RegionName {
|
||||
|
||||
impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
pub(crate) fn mir_def_id(&self) -> hir::def_id::LocalDefId {
|
||||
self.body.source.def_id().as_local().unwrap()
|
||||
self.body.source.def_id().expect_local()
|
||||
}
|
||||
|
||||
pub(crate) fn mir_hir_id(&self) -> hir::HirId {
|
||||
@ -325,7 +325,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
// Can't have BrEnv in functions, constants or generators.
|
||||
bug!("BrEnv outside of closure.");
|
||||
};
|
||||
let hir::ExprKind::Closure { fn_decl_span, .. }
|
||||
let hir::ExprKind::Closure(&hir::Closure { fn_decl_span, .. })
|
||||
= tcx.hir().expect_expr(self.mir_hir_id()).kind
|
||||
else {
|
||||
bug!("Closure is not defined by a closure expr");
|
||||
@ -350,10 +350,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
|
||||
Some(RegionName {
|
||||
name: region_name,
|
||||
source: RegionNameSource::SynthesizedFreeEnvRegion(
|
||||
fn_decl_span,
|
||||
note.to_string(),
|
||||
),
|
||||
source: RegionNameSource::SynthesizedFreeEnvRegion(fn_decl_span, note),
|
||||
})
|
||||
}
|
||||
|
||||
@ -592,8 +589,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
|
||||
hir::LifetimeName::Param(_, hir::ParamName::Fresh)
|
||||
| hir::LifetimeName::ImplicitObjectLifetimeDefault
|
||||
| hir::LifetimeName::Implicit
|
||||
| hir::LifetimeName::Underscore => {
|
||||
| hir::LifetimeName::Infer => {
|
||||
// In this case, the user left off the lifetime; so
|
||||
// they wrote something like:
|
||||
//
|
||||
@ -678,7 +674,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
|
||||
Some(RegionName {
|
||||
name: region_name,
|
||||
source: RegionNameSource::AnonRegionFromUpvar(upvar_span, upvar_name.to_string()),
|
||||
source: RegionNameSource::AnonRegionFromUpvar(upvar_span, upvar_name),
|
||||
})
|
||||
}
|
||||
|
||||
@ -701,16 +697,16 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
|
||||
let (return_span, mir_description, hir_ty) = match hir.get(mir_hir_id) {
|
||||
hir::Node::Expr(hir::Expr {
|
||||
kind: hir::ExprKind::Closure { fn_decl, body, fn_decl_span, .. },
|
||||
kind: hir::ExprKind::Closure(&hir::Closure { fn_decl, body, fn_decl_span, .. }),
|
||||
..
|
||||
}) => {
|
||||
let (mut span, mut hir_ty) = match fn_decl.output {
|
||||
hir::FnRetTy::DefaultReturn(_) => {
|
||||
(tcx.sess.source_map().end_point(*fn_decl_span), None)
|
||||
(tcx.sess.source_map().end_point(fn_decl_span), None)
|
||||
}
|
||||
hir::FnRetTy::Return(hir_ty) => (fn_decl.output.span(), Some(hir_ty)),
|
||||
};
|
||||
let mir_description = match hir.body(*body).generator_kind {
|
||||
let mir_description = match hir.body(body).generator_kind {
|
||||
Some(hir::GeneratorKind::Async(gen)) => match gen {
|
||||
hir::AsyncGeneratorKind::Block => " of async block",
|
||||
hir::AsyncGeneratorKind::Closure => " of async closure",
|
||||
@ -756,7 +752,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
|
||||
Some(RegionName {
|
||||
name: self.synthesize_region_name(),
|
||||
source: RegionNameSource::AnonRegionFromOutput(highlight, mir_description.to_string()),
|
||||
source: RegionNameSource::AnonRegionFromOutput(highlight, mir_description),
|
||||
})
|
||||
}
|
||||
|
||||
@ -841,9 +837,9 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
|
||||
let yield_span = match tcx.hir().get(self.mir_hir_id()) {
|
||||
hir::Node::Expr(hir::Expr {
|
||||
kind: hir::ExprKind::Closure { fn_decl_span, .. },
|
||||
kind: hir::ExprKind::Closure(&hir::Closure { fn_decl_span, .. }),
|
||||
..
|
||||
}) => (tcx.sess.source_map().end_point(*fn_decl_span)),
|
||||
}) => tcx.sess.source_map().end_point(fn_decl_span),
|
||||
_ => self.body.span,
|
||||
};
|
||||
|
||||
@ -879,7 +875,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
||||
}
|
||||
|
||||
let mut found = false;
|
||||
tcx.fold_regions(tcx.type_of(body_parent_did), &mut true, |r: ty::Region<'tcx>, _| {
|
||||
tcx.fold_regions(tcx.type_of(body_parent_did), |r: ty::Region<'tcx>, _| {
|
||||
if *r == ty::ReEarlyBound(region) {
|
||||
found = true;
|
||||
}
|
||||
|
||||
@ -40,7 +40,7 @@ pub(crate) trait AllFactsExt {
|
||||
impl AllFactsExt for AllFacts {
|
||||
/// Return
|
||||
fn enabled(tcx: TyCtxt<'_>) -> bool {
|
||||
tcx.sess.opts.debugging_opts.nll_facts || tcx.sess.opts.debugging_opts.polonius
|
||||
tcx.sess.opts.unstable_opts.nll_facts || tcx.sess.opts.unstable_opts.polonius
|
||||
}
|
||||
|
||||
fn write_to_dir(
|
||||
|
||||
@ -26,7 +26,7 @@ pub(super) fn generate_invalidates<'tcx>(
|
||||
|
||||
if let Some(all_facts) = all_facts {
|
||||
let _prof_timer = tcx.prof.generic_activity("polonius_fact_generation");
|
||||
let dominators = body.dominators();
|
||||
let dominators = body.basic_blocks.dominators();
|
||||
let mut ig = InvalidationGenerator {
|
||||
all_facts,
|
||||
borrow_set,
|
||||
@ -289,6 +289,10 @@ impl<'cx, 'tcx> InvalidationGenerator<'cx, 'tcx> {
|
||||
| Rvalue::ShallowInitBox(ref operand, _ /*ty*/) => {
|
||||
self.consume_operand(location, operand)
|
||||
}
|
||||
Rvalue::CopyForDeref(ref place) => {
|
||||
let op = &Operand::Copy(*place);
|
||||
self.consume_operand(location, op);
|
||||
}
|
||||
|
||||
Rvalue::Len(place) | Rvalue::Discriminant(place) => {
|
||||
let af = match *rvalue {
|
||||
|
||||
@ -6,6 +6,7 @@
|
||||
#![feature(let_else)]
|
||||
#![feature(min_specialization)]
|
||||
#![feature(never_type)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(stmt_expr_attributes)]
|
||||
#![feature(trusted_step)]
|
||||
#![feature(try_blocks)]
|
||||
@ -23,7 +24,7 @@ use rustc_hir as hir;
|
||||
use rustc_hir::def_id::LocalDefId;
|
||||
use rustc_index::bit_set::ChunkedBitSet;
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
|
||||
use rustc_infer::infer::{DefiningAnchor, InferCtxt, TyCtxtInferExt};
|
||||
use rustc_middle::mir::{
|
||||
traversal, Body, ClearCrossCrate, Local, Location, Mutability, Operand, Place, PlaceElem,
|
||||
PlaceRef, VarDebugInfoContents,
|
||||
@ -75,6 +76,7 @@ mod places_conflict;
|
||||
mod prefixes;
|
||||
mod region_infer;
|
||||
mod renumber;
|
||||
mod session_diagnostics;
|
||||
mod type_check;
|
||||
mod universal_regions;
|
||||
mod used_muts;
|
||||
@ -128,11 +130,14 @@ fn mir_borrowck<'tcx>(
|
||||
debug!("run query mir_borrowck: {}", tcx.def_path_str(def.did.to_def_id()));
|
||||
let hir_owner = tcx.hir().local_def_id_to_hir_id(def.did).owner;
|
||||
|
||||
let opt_closure_req = tcx.infer_ctxt().with_opaque_type_inference(hir_owner).enter(|infcx| {
|
||||
let input_body: &Body<'_> = &input_body.borrow();
|
||||
let promoted: &IndexVec<_, _> = &promoted.borrow();
|
||||
do_mir_borrowck(&infcx, input_body, promoted, false).0
|
||||
});
|
||||
let opt_closure_req = tcx
|
||||
.infer_ctxt()
|
||||
.with_opaque_type_inference(DefiningAnchor::Bind(hir_owner))
|
||||
.enter(|infcx| {
|
||||
let input_body: &Body<'_> = &input_body.borrow();
|
||||
let promoted: &IndexVec<_, _> = &promoted.borrow();
|
||||
do_mir_borrowck(&infcx, input_body, promoted, false).0
|
||||
});
|
||||
debug!("mir_borrowck done");
|
||||
|
||||
tcx.arena.alloc(opt_closure_req)
|
||||
@ -184,7 +189,7 @@ fn do_mir_borrowck<'a, 'tcx>(
|
||||
errors.set_tainted_by_errors();
|
||||
}
|
||||
let upvars: Vec<_> = tables
|
||||
.closure_min_captures_flattened(def.did.to_def_id())
|
||||
.closure_min_captures_flattened(def.did)
|
||||
.map(|captured_place| {
|
||||
let capture = captured_place.info.capture_kind;
|
||||
let by_ref = match capture {
|
||||
@ -210,7 +215,7 @@ fn do_mir_borrowck<'a, 'tcx>(
|
||||
|
||||
let (move_data, move_errors): (MoveData<'tcx>, Vec<(Place<'tcx>, MoveError<'tcx>)>) =
|
||||
match MoveData::gather_moves(&body, tcx, param_env) {
|
||||
Ok(move_data) => (move_data, Vec::new()),
|
||||
Ok((_, move_data)) => (move_data, Vec::new()),
|
||||
Err((move_data, move_errors)) => (move_data, move_errors),
|
||||
};
|
||||
let promoted_errors = promoted
|
||||
@ -229,7 +234,7 @@ fn do_mir_borrowck<'a, 'tcx>(
|
||||
let borrow_set =
|
||||
Rc::new(BorrowSet::build(tcx, body, locals_are_invalidated_at_exit, &mdpe.move_data));
|
||||
|
||||
let use_polonius = return_body_with_facts || infcx.tcx.sess.opts.debugging_opts.polonius;
|
||||
let use_polonius = return_body_with_facts || infcx.tcx.sess.opts.unstable_opts.polonius;
|
||||
|
||||
// Compute non-lexical lifetimes.
|
||||
let nll::NllOutput {
|
||||
@ -332,7 +337,7 @@ fn do_mir_borrowck<'a, 'tcx>(
|
||||
};
|
||||
}
|
||||
|
||||
let dominators = body.dominators();
|
||||
let dominators = body.basic_blocks.dominators();
|
||||
|
||||
let mut mbcx = MirBorrowckCtxt {
|
||||
infcx,
|
||||
@ -905,6 +910,16 @@ impl InitializationRequiringAction {
|
||||
InitializationRequiringAction::PartialAssignment => "partially assigned",
|
||||
}
|
||||
}
|
||||
|
||||
fn as_general_verb_in_past_tense(self) -> &'static str {
|
||||
match self {
|
||||
InitializationRequiringAction::Borrow
|
||||
| InitializationRequiringAction::MatchOn
|
||||
| InitializationRequiringAction::Use => "used",
|
||||
InitializationRequiringAction::Assignment => "assigned",
|
||||
InitializationRequiringAction::PartialAssignment => "partially assigned",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
@ -1224,6 +1239,23 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
| Rvalue::ShallowInitBox(ref operand, _ /*ty*/) => {
|
||||
self.consume_operand(location, (operand, span), flow_state)
|
||||
}
|
||||
Rvalue::CopyForDeref(place) => {
|
||||
self.access_place(
|
||||
location,
|
||||
(place, span),
|
||||
(Deep, Read(ReadKind::Copy)),
|
||||
LocalMutationIsAllowed::No,
|
||||
flow_state,
|
||||
);
|
||||
|
||||
// Finally, check if path was already moved.
|
||||
self.check_if_path_or_subpath_is_moved(
|
||||
location,
|
||||
InitializationRequiringAction::Use,
|
||||
(place.as_ref(), span),
|
||||
flow_state,
|
||||
);
|
||||
}
|
||||
|
||||
Rvalue::Len(place) | Rvalue::Discriminant(place) => {
|
||||
let af = match *rvalue {
|
||||
@ -1263,7 +1295,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
match **aggregate_kind {
|
||||
AggregateKind::Closure(def_id, _) | AggregateKind::Generator(def_id, _, _) => {
|
||||
let BorrowCheckResult { used_mut_upvars, .. } =
|
||||
self.infcx.tcx.mir_borrowck(def_id.expect_local());
|
||||
self.infcx.tcx.mir_borrowck(def_id);
|
||||
debug!("{:?} used_mut_upvars={:?}", def_id, used_mut_upvars);
|
||||
for field in used_mut_upvars {
|
||||
self.propagate_closure_used_mut_upvar(&operands[field.index()]);
|
||||
|
||||
@ -38,6 +38,8 @@ pub(crate) struct NllMemberConstraint<'tcx> {
|
||||
/// The hidden type in which `R0` appears. (Used in error reporting.)
|
||||
pub(crate) hidden_ty: Ty<'tcx>,
|
||||
|
||||
pub(crate) key: ty::OpaqueTypeKey<'tcx>,
|
||||
|
||||
/// The region `R0`.
|
||||
pub(crate) member_region_vid: ty::RegionVid,
|
||||
|
||||
@ -90,6 +92,7 @@ impl<'tcx> MemberConstraintSet<'tcx, ty::RegionVid> {
|
||||
member_region_vid,
|
||||
definition_span: m_c.definition_span,
|
||||
hidden_ty: m_c.hidden_ty,
|
||||
key: m_c.key,
|
||||
start_index,
|
||||
end_index,
|
||||
});
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
//! The entry point of the NLL borrow checker.
|
||||
|
||||
use rustc_data_structures::vec_map::VecMap;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::def_id::LocalDefId;
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_infer::infer::InferCtxt;
|
||||
use rustc_middle::mir::{create_dump_file, dump_enabled, dump_mir, PassWhere};
|
||||
@ -44,7 +44,7 @@ pub type PoloniusOutput = Output<RustcFacts>;
|
||||
/// closure requirements to propagate, and any generated errors.
|
||||
pub(crate) struct NllOutput<'tcx> {
|
||||
pub regioncx: RegionInferenceContext<'tcx>,
|
||||
pub opaque_type_values: VecMap<DefId, OpaqueHiddenType<'tcx>>,
|
||||
pub opaque_type_values: VecMap<LocalDefId, OpaqueHiddenType<'tcx>>,
|
||||
pub polonius_input: Option<Box<AllFacts>>,
|
||||
pub polonius_output: Option<Rc<PoloniusOutput>>,
|
||||
pub opt_closure_req: Option<ClosureRegionRequirements<'tcx>>,
|
||||
@ -278,9 +278,9 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
|
||||
|
||||
// Dump facts if requested.
|
||||
let polonius_output = all_facts.as_ref().and_then(|all_facts| {
|
||||
if infcx.tcx.sess.opts.debugging_opts.nll_facts {
|
||||
if infcx.tcx.sess.opts.unstable_opts.nll_facts {
|
||||
let def_path = infcx.tcx.def_path(def_id);
|
||||
let dir_path = PathBuf::from(&infcx.tcx.sess.opts.debugging_opts.nll_facts_dir)
|
||||
let dir_path = PathBuf::from(&infcx.tcx.sess.opts.unstable_opts.nll_facts_dir)
|
||||
.join(def_path.to_filename_friendly_no_crate());
|
||||
all_facts.write_to_dir(dir_path, location_table).unwrap();
|
||||
}
|
||||
@ -373,7 +373,7 @@ pub(super) fn dump_annotation<'a, 'tcx>(
|
||||
body: &Body<'tcx>,
|
||||
regioncx: &RegionInferenceContext<'tcx>,
|
||||
closure_region_requirements: &Option<ClosureRegionRequirements<'_>>,
|
||||
opaque_type_values: &VecMap<DefId, OpaqueHiddenType<'tcx>>,
|
||||
opaque_type_values: &VecMap<LocalDefId, OpaqueHiddenType<'tcx>>,
|
||||
errors: &mut crate::error::BorrowckErrors<'tcx>,
|
||||
) {
|
||||
let tcx = infcx.tcx;
|
||||
|
||||
@ -19,7 +19,9 @@ use rustc_middle::mir::{
|
||||
};
|
||||
use rustc_middle::traits::ObligationCause;
|
||||
use rustc_middle::traits::ObligationCauseCode;
|
||||
use rustc_middle::ty::{self, subst::SubstsRef, RegionVid, Ty, TyCtxt, TypeFoldable};
|
||||
use rustc_middle::ty::{
|
||||
self, subst::SubstsRef, RegionVid, Ty, TyCtxt, TypeFoldable, TypeVisitable,
|
||||
};
|
||||
use rustc_span::Span;
|
||||
|
||||
use crate::{
|
||||
@ -493,8 +495,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
NllRegionVariableOrigin::RootEmptyRegion
|
||||
| NllRegionVariableOrigin::Existential { .. } => {
|
||||
NllRegionVariableOrigin::Existential { .. } => {
|
||||
// For existential, regions, nothing to do.
|
||||
}
|
||||
}
|
||||
@ -588,7 +589,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
// In Polonius mode, the errors about missing universal region relations are in the output
|
||||
// and need to be emitted or propagated. Otherwise, we need to check whether the
|
||||
// constraints were too strong, and if so, emit or propagate those errors.
|
||||
if infcx.tcx.sess.opts.debugging_opts.polonius {
|
||||
if infcx.tcx.sess.opts.unstable_opts.polonius {
|
||||
self.check_polonius_subset_errors(
|
||||
body,
|
||||
outlives_requirements.as_mut(),
|
||||
@ -1037,7 +1038,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
) -> Option<ClosureOutlivesSubject<'tcx>> {
|
||||
let tcx = infcx.tcx;
|
||||
|
||||
let ty = tcx.fold_regions(ty, &mut false, |r, _depth| {
|
||||
let ty = tcx.fold_regions(ty, |r, _depth| {
|
||||
let region_vid = self.to_region_vid(r);
|
||||
|
||||
// The challenge if this. We have some region variable `r`
|
||||
@ -1317,7 +1318,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
where
|
||||
T: TypeFoldable<'tcx>,
|
||||
{
|
||||
tcx.fold_regions(value, &mut false, |r, _db| {
|
||||
tcx.fold_regions(value, |r, _db| {
|
||||
let vid = self.to_region_vid(r);
|
||||
let scc = self.constraint_sccs.scc(vid);
|
||||
let repr = self.scc_representatives[scc];
|
||||
@ -1436,8 +1437,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
self.check_bound_universal_region(fr, placeholder, errors_buffer);
|
||||
}
|
||||
|
||||
NllRegionVariableOrigin::RootEmptyRegion
|
||||
| NllRegionVariableOrigin::Existential { .. } => {
|
||||
NllRegionVariableOrigin::Existential { .. } => {
|
||||
// nothing to check here
|
||||
}
|
||||
}
|
||||
@ -1539,8 +1539,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
self.check_bound_universal_region(fr, placeholder, errors_buffer);
|
||||
}
|
||||
|
||||
NllRegionVariableOrigin::RootEmptyRegion
|
||||
| NllRegionVariableOrigin::Existential { .. } => {
|
||||
NllRegionVariableOrigin::Existential { .. } => {
|
||||
// nothing to check here
|
||||
}
|
||||
}
|
||||
@ -1764,6 +1763,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
errors_buffer.push(RegionErrorKind::UnexpectedHiddenRegion {
|
||||
span: m_c.definition_span,
|
||||
hidden_ty: m_c.hidden_ty,
|
||||
key: m_c.key,
|
||||
member_region,
|
||||
});
|
||||
}
|
||||
@ -1814,9 +1814,9 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
universe1.cannot_name(placeholder.universe)
|
||||
}
|
||||
|
||||
NllRegionVariableOrigin::RootEmptyRegion
|
||||
| NllRegionVariableOrigin::FreeRegion
|
||||
| NllRegionVariableOrigin::Existential { .. } => false,
|
||||
NllRegionVariableOrigin::FreeRegion | NllRegionVariableOrigin::Existential { .. } => {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2178,8 +2178,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
let blame_source = match from_region_origin {
|
||||
NllRegionVariableOrigin::FreeRegion
|
||||
| NllRegionVariableOrigin::Existential { from_forall: false } => true,
|
||||
NllRegionVariableOrigin::RootEmptyRegion
|
||||
| NllRegionVariableOrigin::Placeholder(_)
|
||||
NllRegionVariableOrigin::Placeholder(_)
|
||||
| NllRegionVariableOrigin::Existential { from_forall: true } => false,
|
||||
};
|
||||
|
||||
|
||||
@ -1,9 +1,20 @@
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::vec_map::VecMap;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::def_id::LocalDefId;
|
||||
use rustc_hir::OpaqueTyOrigin;
|
||||
use rustc_infer::infer::InferCtxt;
|
||||
use rustc_middle::ty::{self, OpaqueHiddenType, OpaqueTypeKey, TyCtxt, TypeFoldable};
|
||||
use rustc_trait_selection::opaque_types::InferCtxtExt;
|
||||
use rustc_infer::infer::error_reporting::unexpected_hidden_region_diagnostic;
|
||||
use rustc_infer::infer::TyCtxtInferExt as _;
|
||||
use rustc_infer::infer::{DefiningAnchor, InferCtxt};
|
||||
use rustc_infer::traits::{Obligation, ObligationCause, TraitEngine};
|
||||
use rustc_middle::ty::fold::{TypeFolder, TypeSuperFoldable};
|
||||
use rustc_middle::ty::subst::{GenericArg, GenericArgKind, InternalSubsts};
|
||||
use rustc_middle::ty::visit::TypeVisitable;
|
||||
use rustc_middle::ty::{
|
||||
self, OpaqueHiddenType, OpaqueTypeKey, ToPredicate, Ty, TyCtxt, TypeFoldable,
|
||||
};
|
||||
use rustc_span::Span;
|
||||
use rustc_trait_selection::traits::error_reporting::InferCtxtExt as _;
|
||||
use rustc_trait_selection::traits::TraitEngineExt as _;
|
||||
|
||||
use super::RegionInferenceContext;
|
||||
|
||||
@ -52,14 +63,14 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
&self,
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
opaque_ty_decls: VecMap<OpaqueTypeKey<'tcx>, (OpaqueHiddenType<'tcx>, OpaqueTyOrigin)>,
|
||||
) -> VecMap<DefId, OpaqueHiddenType<'tcx>> {
|
||||
let mut result: VecMap<DefId, OpaqueHiddenType<'tcx>> = VecMap::new();
|
||||
) -> VecMap<LocalDefId, OpaqueHiddenType<'tcx>> {
|
||||
let mut result: VecMap<LocalDefId, OpaqueHiddenType<'tcx>> = VecMap::new();
|
||||
for (opaque_type_key, (concrete_type, origin)) in opaque_ty_decls {
|
||||
let substs = opaque_type_key.substs;
|
||||
debug!(?concrete_type, ?substs);
|
||||
|
||||
let mut subst_regions = vec![self.universal_regions.fr_static];
|
||||
let universal_substs = infcx.tcx.fold_regions(substs, &mut false, |region, _| {
|
||||
let universal_substs = infcx.tcx.fold_regions(substs, |region, _| {
|
||||
if let ty::RePlaceholder(..) = region.kind() {
|
||||
// Higher kinded regions don't need remapping, they don't refer to anything outside of this the substs.
|
||||
return region;
|
||||
@ -91,7 +102,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
subst_regions.dedup();
|
||||
|
||||
let universal_concrete_type =
|
||||
infcx.tcx.fold_regions(concrete_type, &mut false, |region, _| match *region {
|
||||
infcx.tcx.fold_regions(concrete_type, |region, _| match *region {
|
||||
ty::ReVar(vid) => subst_regions
|
||||
.iter()
|
||||
.find(|ur_vid| self.eval_equal(vid, **ur_vid))
|
||||
@ -146,7 +157,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
where
|
||||
T: TypeFoldable<'tcx>,
|
||||
{
|
||||
tcx.fold_regions(ty, &mut false, |region, _| match *region {
|
||||
tcx.fold_regions(ty, |region, _| match *region {
|
||||
ty::ReVar(vid) => {
|
||||
// Find something that we can name
|
||||
let upper_bound = self.approx_universal_upper_bound(vid);
|
||||
@ -171,3 +182,481 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub trait InferCtxtExt<'tcx> {
|
||||
fn infer_opaque_definition_from_instantiation(
|
||||
&self,
|
||||
opaque_type_key: OpaqueTypeKey<'tcx>,
|
||||
instantiated_ty: OpaqueHiddenType<'tcx>,
|
||||
origin: OpaqueTyOrigin,
|
||||
) -> Ty<'tcx>;
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
/// Given the fully resolved, instantiated type for an opaque
|
||||
/// type, i.e., the value of an inference variable like C1 or C2
|
||||
/// (*), computes the "definition type" for an opaque type
|
||||
/// definition -- that is, the inferred value of `Foo1<'x>` or
|
||||
/// `Foo2<'x>` that we would conceptually use in its definition:
|
||||
/// ```ignore (illustrative)
|
||||
/// type Foo1<'x> = impl Bar<'x> = AAA; // <-- this type AAA
|
||||
/// type Foo2<'x> = impl Bar<'x> = BBB; // <-- or this type BBB
|
||||
/// fn foo<'a, 'b>(..) -> (Foo1<'a>, Foo2<'b>) { .. }
|
||||
/// ```
|
||||
/// Note that these values are defined in terms of a distinct set of
|
||||
/// generic parameters (`'x` instead of `'a`) from C1 or C2. The main
|
||||
/// purpose of this function is to do that translation.
|
||||
///
|
||||
/// (*) C1 and C2 were introduced in the comments on
|
||||
/// `register_member_constraints`. Read that comment for more context.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// - `def_id`, the `impl Trait` type
|
||||
/// - `substs`, the substs used to instantiate this opaque type
|
||||
/// - `instantiated_ty`, the inferred type C1 -- fully resolved, lifted version of
|
||||
/// `opaque_defn.concrete_ty`
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
fn infer_opaque_definition_from_instantiation(
|
||||
&self,
|
||||
opaque_type_key: OpaqueTypeKey<'tcx>,
|
||||
instantiated_ty: OpaqueHiddenType<'tcx>,
|
||||
origin: OpaqueTyOrigin,
|
||||
) -> Ty<'tcx> {
|
||||
if self.is_tainted_by_errors() {
|
||||
return self.tcx.ty_error();
|
||||
}
|
||||
|
||||
let OpaqueTypeKey { def_id, substs } = opaque_type_key;
|
||||
|
||||
// Use substs to build up a reverse map from regions to their
|
||||
// identity mappings. This is necessary because of `impl
|
||||
// Trait` lifetimes are computed by replacing existing
|
||||
// lifetimes with 'static and remapping only those used in the
|
||||
// `impl Trait` return type, resulting in the parameters
|
||||
// shifting.
|
||||
let id_substs = InternalSubsts::identity_for_item(self.tcx, def_id.to_def_id());
|
||||
debug!(?id_substs);
|
||||
let map: FxHashMap<GenericArg<'tcx>, GenericArg<'tcx>> =
|
||||
substs.iter().enumerate().map(|(index, subst)| (subst, id_substs[index])).collect();
|
||||
debug!("map = {:#?}", map);
|
||||
|
||||
// Convert the type from the function into a type valid outside
|
||||
// the function, by replacing invalid regions with 'static,
|
||||
// after producing an error for each of them.
|
||||
let definition_ty = instantiated_ty.ty.fold_with(&mut ReverseMapper::new(
|
||||
self.tcx,
|
||||
opaque_type_key,
|
||||
map,
|
||||
instantiated_ty.ty,
|
||||
instantiated_ty.span,
|
||||
));
|
||||
debug!(?definition_ty);
|
||||
|
||||
if !check_opaque_type_parameter_valid(
|
||||
self.tcx,
|
||||
opaque_type_key,
|
||||
origin,
|
||||
instantiated_ty.span,
|
||||
) {
|
||||
return self.tcx.ty_error();
|
||||
}
|
||||
|
||||
// Only check this for TAIT. RPIT already supports `src/test/ui/impl-trait/nested-return-type2.rs`
|
||||
// on stable and we'd break that.
|
||||
if let OpaqueTyOrigin::TyAlias = origin {
|
||||
// This logic duplicates most of `check_opaque_meets_bounds`.
|
||||
// FIXME(oli-obk): Also do region checks here and then consider removing `check_opaque_meets_bounds` entirely.
|
||||
let param_env = self.tcx.param_env(def_id);
|
||||
let body_id = self.tcx.local_def_id_to_hir_id(def_id);
|
||||
// HACK This bubble is required for this tests to pass:
|
||||
// type-alias-impl-trait/issue-67844-nested-opaque.rs
|
||||
self.tcx.infer_ctxt().with_opaque_type_inference(DefiningAnchor::Bubble).enter(
|
||||
move |infcx| {
|
||||
// Require the hidden type to be well-formed with only the generics of the opaque type.
|
||||
// Defining use functions may have more bounds than the opaque type, which is ok, as long as the
|
||||
// hidden type is well formed even without those bounds.
|
||||
let predicate =
|
||||
ty::Binder::dummy(ty::PredicateKind::WellFormed(definition_ty.into()))
|
||||
.to_predicate(infcx.tcx);
|
||||
let mut fulfillment_cx = <dyn TraitEngine<'tcx>>::new(infcx.tcx);
|
||||
|
||||
// Require that the hidden type actually fulfills all the bounds of the opaque type, even without
|
||||
// the bounds that the function supplies.
|
||||
match infcx.register_hidden_type(
|
||||
OpaqueTypeKey { def_id, substs: id_substs },
|
||||
ObligationCause::misc(instantiated_ty.span, body_id),
|
||||
param_env,
|
||||
definition_ty,
|
||||
origin,
|
||||
) {
|
||||
Ok(infer_ok) => {
|
||||
for obligation in infer_ok.obligations {
|
||||
fulfillment_cx.register_predicate_obligation(&infcx, obligation);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
infcx
|
||||
.report_mismatched_types(
|
||||
&ObligationCause::misc(instantiated_ty.span, body_id),
|
||||
self.tcx.mk_opaque(def_id.to_def_id(), id_substs),
|
||||
definition_ty,
|
||||
err,
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
|
||||
fulfillment_cx.register_predicate_obligation(
|
||||
&infcx,
|
||||
Obligation::misc(instantiated_ty.span, body_id, param_env, predicate),
|
||||
);
|
||||
|
||||
// Check that all obligations are satisfied by the implementation's
|
||||
// version.
|
||||
let errors = fulfillment_cx.select_all_or_error(&infcx);
|
||||
|
||||
// This is still required for many(half of the tests in ui/type-alias-impl-trait)
|
||||
// tests to pass
|
||||
let _ = infcx.inner.borrow_mut().opaque_type_storage.take_opaque_types();
|
||||
|
||||
if errors.is_empty() {
|
||||
definition_ty
|
||||
} else {
|
||||
infcx.report_fulfillment_errors(&errors, None, false);
|
||||
self.tcx.ty_error()
|
||||
}
|
||||
},
|
||||
)
|
||||
} else {
|
||||
definition_ty
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_opaque_type_parameter_valid(
|
||||
tcx: TyCtxt<'_>,
|
||||
opaque_type_key: OpaqueTypeKey<'_>,
|
||||
origin: OpaqueTyOrigin,
|
||||
span: Span,
|
||||
) -> bool {
|
||||
match origin {
|
||||
// No need to check return position impl trait (RPIT)
|
||||
// because for type and const parameters they are correct
|
||||
// by construction: we convert
|
||||
//
|
||||
// fn foo<P0..Pn>() -> impl Trait
|
||||
//
|
||||
// into
|
||||
//
|
||||
// type Foo<P0...Pn>
|
||||
// fn foo<P0..Pn>() -> Foo<P0...Pn>.
|
||||
//
|
||||
// For lifetime parameters we convert
|
||||
//
|
||||
// fn foo<'l0..'ln>() -> impl Trait<'l0..'lm>
|
||||
//
|
||||
// into
|
||||
//
|
||||
// type foo::<'p0..'pn>::Foo<'q0..'qm>
|
||||
// fn foo<l0..'ln>() -> foo::<'static..'static>::Foo<'l0..'lm>.
|
||||
//
|
||||
// which would error here on all of the `'static` args.
|
||||
OpaqueTyOrigin::FnReturn(..) | OpaqueTyOrigin::AsyncFn(..) => return true,
|
||||
// Check these
|
||||
OpaqueTyOrigin::TyAlias => {}
|
||||
}
|
||||
let opaque_generics = tcx.generics_of(opaque_type_key.def_id);
|
||||
let mut seen_params: FxHashMap<_, Vec<_>> = FxHashMap::default();
|
||||
for (i, arg) in opaque_type_key.substs.iter().enumerate() {
|
||||
let arg_is_param = match arg.unpack() {
|
||||
GenericArgKind::Type(ty) => matches!(ty.kind(), ty::Param(_)),
|
||||
GenericArgKind::Lifetime(lt) if lt.is_static() => {
|
||||
tcx.sess
|
||||
.struct_span_err(span, "non-defining opaque type use in defining scope")
|
||||
.span_label(
|
||||
tcx.def_span(opaque_generics.param_at(i, tcx).def_id),
|
||||
"cannot use static lifetime; use a bound lifetime \
|
||||
instead or remove the lifetime parameter from the \
|
||||
opaque type",
|
||||
)
|
||||
.emit();
|
||||
return false;
|
||||
}
|
||||
GenericArgKind::Lifetime(lt) => {
|
||||
matches!(*lt, ty::ReEarlyBound(_) | ty::ReFree(_))
|
||||
}
|
||||
GenericArgKind::Const(ct) => matches!(ct.kind(), ty::ConstKind::Param(_)),
|
||||
};
|
||||
|
||||
if arg_is_param {
|
||||
seen_params.entry(arg).or_default().push(i);
|
||||
} else {
|
||||
// Prevent `fn foo() -> Foo<u32>` from being defining.
|
||||
let opaque_param = opaque_generics.param_at(i, tcx);
|
||||
tcx.sess
|
||||
.struct_span_err(span, "non-defining opaque type use in defining scope")
|
||||
.span_note(
|
||||
tcx.def_span(opaque_param.def_id),
|
||||
&format!(
|
||||
"used non-generic {} `{}` for generic parameter",
|
||||
opaque_param.kind.descr(),
|
||||
arg,
|
||||
),
|
||||
)
|
||||
.emit();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
for (_, indices) in seen_params {
|
||||
if indices.len() > 1 {
|
||||
let descr = opaque_generics.param_at(indices[0], tcx).kind.descr();
|
||||
let spans: Vec<_> = indices
|
||||
.into_iter()
|
||||
.map(|i| tcx.def_span(opaque_generics.param_at(i, tcx).def_id))
|
||||
.collect();
|
||||
tcx.sess
|
||||
.struct_span_err(span, "non-defining opaque type use in defining scope")
|
||||
.span_note(spans, &format!("{} used multiple times", descr))
|
||||
.emit();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
struct ReverseMapper<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
|
||||
key: ty::OpaqueTypeKey<'tcx>,
|
||||
map: FxHashMap<GenericArg<'tcx>, GenericArg<'tcx>>,
|
||||
map_missing_regions_to_empty: bool,
|
||||
|
||||
/// initially `Some`, set to `None` once error has been reported
|
||||
hidden_ty: Option<Ty<'tcx>>,
|
||||
|
||||
/// Span of function being checked.
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl<'tcx> ReverseMapper<'tcx> {
|
||||
fn new(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
key: ty::OpaqueTypeKey<'tcx>,
|
||||
map: FxHashMap<GenericArg<'tcx>, GenericArg<'tcx>>,
|
||||
hidden_ty: Ty<'tcx>,
|
||||
span: Span,
|
||||
) -> Self {
|
||||
Self {
|
||||
tcx,
|
||||
key,
|
||||
map,
|
||||
map_missing_regions_to_empty: false,
|
||||
hidden_ty: Some(hidden_ty),
|
||||
span,
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_kind_mapping_missing_regions_to_empty(
|
||||
&mut self,
|
||||
kind: GenericArg<'tcx>,
|
||||
) -> GenericArg<'tcx> {
|
||||
assert!(!self.map_missing_regions_to_empty);
|
||||
self.map_missing_regions_to_empty = true;
|
||||
let kind = kind.fold_with(self);
|
||||
self.map_missing_regions_to_empty = false;
|
||||
kind
|
||||
}
|
||||
|
||||
fn fold_kind_normally(&mut self, kind: GenericArg<'tcx>) -> GenericArg<'tcx> {
|
||||
assert!(!self.map_missing_regions_to_empty);
|
||||
kind.fold_with(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TypeFolder<'tcx> for ReverseMapper<'tcx> {
|
||||
fn tcx(&self) -> TyCtxt<'tcx> {
|
||||
self.tcx
|
||||
}
|
||||
|
||||
#[instrument(skip(self), level = "debug")]
|
||||
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
|
||||
match *r {
|
||||
// Ignore bound regions and `'static` regions that appear in the
|
||||
// type, we only need to remap regions that reference lifetimes
|
||||
// from the function declaration.
|
||||
// This would ignore `'r` in a type like `for<'r> fn(&'r u32)`.
|
||||
ty::ReLateBound(..) | ty::ReStatic => return r,
|
||||
|
||||
// If regions have been erased (by writeback), don't try to unerase
|
||||
// them.
|
||||
ty::ReErased => return r,
|
||||
|
||||
// The regions that we expect from borrow checking.
|
||||
ty::ReEarlyBound(_) | ty::ReFree(_) | ty::ReEmpty(ty::UniverseIndex::ROOT) => {}
|
||||
|
||||
ty::ReEmpty(_) | ty::RePlaceholder(_) | ty::ReVar(_) => {
|
||||
// All of the regions in the type should either have been
|
||||
// erased by writeback, or mapped back to named regions by
|
||||
// borrow checking.
|
||||
bug!("unexpected region kind in opaque type: {:?}", r);
|
||||
}
|
||||
}
|
||||
|
||||
let generics = self.tcx().generics_of(self.key.def_id);
|
||||
match self.map.get(&r.into()).map(|k| k.unpack()) {
|
||||
Some(GenericArgKind::Lifetime(r1)) => r1,
|
||||
Some(u) => panic!("region mapped to unexpected kind: {:?}", u),
|
||||
None if self.map_missing_regions_to_empty => self.tcx.lifetimes.re_root_empty,
|
||||
None if generics.parent.is_some() => {
|
||||
if let Some(hidden_ty) = self.hidden_ty.take() {
|
||||
unexpected_hidden_region_diagnostic(
|
||||
self.tcx,
|
||||
self.tcx.def_span(self.key.def_id),
|
||||
hidden_ty,
|
||||
r,
|
||||
self.key,
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
self.tcx.lifetimes.re_root_empty
|
||||
}
|
||||
None => {
|
||||
self.tcx
|
||||
.sess
|
||||
.struct_span_err(self.span, "non-defining opaque type use in defining scope")
|
||||
.span_label(
|
||||
self.span,
|
||||
format!(
|
||||
"lifetime `{}` is part of concrete type but not used in \
|
||||
parameter list of the `impl Trait` type alias",
|
||||
r
|
||||
),
|
||||
)
|
||||
.emit();
|
||||
|
||||
self.tcx().lifetimes.re_static
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
|
||||
match *ty.kind() {
|
||||
ty::Closure(def_id, substs) => {
|
||||
// I am a horrible monster and I pray for death. When
|
||||
// we encounter a closure here, it is always a closure
|
||||
// from within the function that we are currently
|
||||
// type-checking -- one that is now being encapsulated
|
||||
// in an opaque type. Ideally, we would
|
||||
// go through the types/lifetimes that it references
|
||||
// and treat them just like we would any other type,
|
||||
// which means we would error out if we find any
|
||||
// reference to a type/region that is not in the
|
||||
// "reverse map".
|
||||
//
|
||||
// **However,** in the case of closures, there is a
|
||||
// somewhat subtle (read: hacky) consideration. The
|
||||
// problem is that our closure types currently include
|
||||
// all the lifetime parameters declared on the
|
||||
// enclosing function, even if they are unused by the
|
||||
// closure itself. We can't readily filter them out,
|
||||
// so here we replace those values with `'empty`. This
|
||||
// can't really make a difference to the rest of the
|
||||
// compiler; those regions are ignored for the
|
||||
// outlives relation, and hence don't affect trait
|
||||
// selection or auto traits, and they are erased
|
||||
// during codegen.
|
||||
|
||||
let generics = self.tcx.generics_of(def_id);
|
||||
let substs = self.tcx.mk_substs(substs.iter().enumerate().map(|(index, kind)| {
|
||||
if index < generics.parent_count {
|
||||
// Accommodate missing regions in the parent kinds...
|
||||
self.fold_kind_mapping_missing_regions_to_empty(kind)
|
||||
} else {
|
||||
// ...but not elsewhere.
|
||||
self.fold_kind_normally(kind)
|
||||
}
|
||||
}));
|
||||
|
||||
self.tcx.mk_closure(def_id, substs)
|
||||
}
|
||||
|
||||
ty::Generator(def_id, substs, movability) => {
|
||||
let generics = self.tcx.generics_of(def_id);
|
||||
let substs = self.tcx.mk_substs(substs.iter().enumerate().map(|(index, kind)| {
|
||||
if index < generics.parent_count {
|
||||
// Accommodate missing regions in the parent kinds...
|
||||
self.fold_kind_mapping_missing_regions_to_empty(kind)
|
||||
} else {
|
||||
// ...but not elsewhere.
|
||||
self.fold_kind_normally(kind)
|
||||
}
|
||||
}));
|
||||
|
||||
self.tcx.mk_generator(def_id, substs, movability)
|
||||
}
|
||||
|
||||
ty::Param(param) => {
|
||||
// Look it up in the substitution list.
|
||||
match self.map.get(&ty.into()).map(|k| k.unpack()) {
|
||||
// Found it in the substitution list; replace with the parameter from the
|
||||
// opaque type.
|
||||
Some(GenericArgKind::Type(t1)) => t1,
|
||||
Some(u) => panic!("type mapped to unexpected kind: {:?}", u),
|
||||
None => {
|
||||
debug!(?param, ?self.map);
|
||||
self.tcx
|
||||
.sess
|
||||
.struct_span_err(
|
||||
self.span,
|
||||
&format!(
|
||||
"type parameter `{}` is part of concrete type but not \
|
||||
used in parameter list for the `impl Trait` type alias",
|
||||
ty
|
||||
),
|
||||
)
|
||||
.emit();
|
||||
|
||||
self.tcx().ty_error()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => ty.super_fold_with(self),
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
|
||||
trace!("checking const {:?}", ct);
|
||||
// Find a const parameter
|
||||
match ct.kind() {
|
||||
ty::ConstKind::Param(..) => {
|
||||
// Look it up in the substitution list.
|
||||
match self.map.get(&ct.into()).map(|k| k.unpack()) {
|
||||
// Found it in the substitution list, replace with the parameter from the
|
||||
// opaque type.
|
||||
Some(GenericArgKind::Const(c1)) => c1,
|
||||
Some(u) => panic!("const mapped to unexpected kind: {:?}", u),
|
||||
None => {
|
||||
self.tcx
|
||||
.sess
|
||||
.struct_span_err(
|
||||
self.span,
|
||||
&format!(
|
||||
"const parameter `{}` is part of concrete type but not \
|
||||
used in parameter list for the `impl Trait` type alias",
|
||||
ct
|
||||
),
|
||||
)
|
||||
.emit();
|
||||
|
||||
self.tcx().const_error(ct.ty())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => ct,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -31,7 +31,7 @@ pub fn renumber_regions<'tcx, T>(infcx: &InferCtxt<'_, 'tcx>, value: T) -> T
|
||||
where
|
||||
T: TypeFoldable<'tcx>,
|
||||
{
|
||||
infcx.tcx.fold_regions(value, &mut false, |_region, _depth| {
|
||||
infcx.tcx.fold_regions(value, |_region, _depth| {
|
||||
let origin = NllRegionVariableOrigin::Existential { from_forall: false };
|
||||
infcx.next_nll_region_var(origin)
|
||||
})
|
||||
|
||||
44
compiler/rustc_borrowck/src/session_diagnostics.rs
Normal file
44
compiler/rustc_borrowck/src/session_diagnostics.rs
Normal file
@ -0,0 +1,44 @@
|
||||
use rustc_macros::{SessionDiagnostic, SessionSubdiagnostic};
|
||||
use rustc_middle::ty::Ty;
|
||||
use rustc_span::Span;
|
||||
|
||||
#[derive(SessionDiagnostic)]
|
||||
#[error(borrowck::move_unsized, code = "E0161")]
|
||||
pub(crate) struct MoveUnsized<'tcx> {
|
||||
pub ty: Ty<'tcx>,
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(SessionDiagnostic)]
|
||||
#[error(borrowck::higher_ranked_lifetime_error)]
|
||||
pub(crate) struct HigherRankedLifetimeError {
|
||||
#[subdiagnostic]
|
||||
pub cause: Option<HigherRankedErrorCause>,
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(SessionSubdiagnostic)]
|
||||
pub(crate) enum HigherRankedErrorCause {
|
||||
#[note(borrowck::could_not_prove)]
|
||||
CouldNotProve { predicate: String },
|
||||
#[note(borrowck::could_not_normalize)]
|
||||
CouldNotNormalize { value: String },
|
||||
}
|
||||
|
||||
#[derive(SessionDiagnostic)]
|
||||
#[error(borrowck::higher_ranked_subtype_error)]
|
||||
pub(crate) struct HigherRankedSubtypeError {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(SessionDiagnostic)]
|
||||
#[error(borrowck::generic_does_not_live_long_enough)]
|
||||
pub(crate) struct GenericDoesNotLiveLongEnough {
|
||||
pub kind: String,
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
}
|
||||
@ -39,7 +39,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
|
||||
let TypeOpOutput { output, constraints, error_info } = op.fully_perform(self.infcx)?;
|
||||
|
||||
if let Some(data) = &constraints {
|
||||
if let Some(data) = constraints {
|
||||
self.push_region_constraints(locations, category, data);
|
||||
}
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@ use rustc_infer::infer::region_constraints::{GenericKind, VerifyBound};
|
||||
use rustc_infer::infer::{self, InferCtxt, SubregionOrigin};
|
||||
use rustc_middle::mir::ConstraintCategory;
|
||||
use rustc_middle::ty::subst::GenericArgKind;
|
||||
use rustc_middle::ty::TypeFoldable;
|
||||
use rustc_middle::ty::TypeVisitable;
|
||||
use rustc_middle::ty::{self, TyCtxt};
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
|
||||
@ -22,8 +22,18 @@ pub(crate) struct ConstraintConversion<'a, 'tcx> {
|
||||
infcx: &'a InferCtxt<'a, 'tcx>,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
universal_regions: &'a UniversalRegions<'tcx>,
|
||||
/// Each RBP `GK: 'a` is assumed to be true. These encode
|
||||
/// relationships like `T: 'a` that are added via implicit bounds
|
||||
/// or the `param_env`.
|
||||
///
|
||||
/// Each region here is guaranteed to be a key in the `indices`
|
||||
/// map. We use the "original" regions (i.e., the keys from the
|
||||
/// map, and not the values) because the code in
|
||||
/// `process_registered_region_obligations` has some special-cased
|
||||
/// logic expecting to see (e.g.) `ReStatic`, and if we supplied
|
||||
/// our special inference variable there, we would mess that up.
|
||||
region_bound_pairs: &'a RegionBoundPairs<'tcx>,
|
||||
implicit_region_bound: Option<ty::Region<'tcx>>,
|
||||
implicit_region_bound: ty::Region<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
locations: Locations,
|
||||
span: Span,
|
||||
@ -36,7 +46,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
|
||||
infcx: &'a InferCtxt<'a, 'tcx>,
|
||||
universal_regions: &'a UniversalRegions<'tcx>,
|
||||
region_bound_pairs: &'a RegionBoundPairs<'tcx>,
|
||||
implicit_region_bound: Option<ty::Region<'tcx>>,
|
||||
implicit_region_bound: ty::Region<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
locations: Locations,
|
||||
span: Span,
|
||||
@ -108,7 +118,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
|
||||
// create new region variables, which can't be done later when
|
||||
// verifying these bounds.
|
||||
if t1.has_placeholders() {
|
||||
t1 = tcx.fold_regions(t1, &mut false, |r, _| match *r {
|
||||
t1 = tcx.fold_regions(t1, |r, _| match *r {
|
||||
ty::RePlaceholder(placeholder) => {
|
||||
self.constraints.placeholder_region(self.infcx, placeholder)
|
||||
}
|
||||
@ -120,7 +130,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
|
||||
&mut *self,
|
||||
tcx,
|
||||
region_bound_pairs,
|
||||
implicit_region_bound,
|
||||
Some(implicit_region_bound),
|
||||
param_env,
|
||||
)
|
||||
.type_must_outlive(origin, t1, r2);
|
||||
|
||||
@ -2,6 +2,7 @@ use rustc_data_structures::frozen::Frozen;
|
||||
use rustc_data_structures::transitive_relation::TransitiveRelation;
|
||||
use rustc_infer::infer::canonical::QueryRegionConstraints;
|
||||
use rustc_infer::infer::outlives;
|
||||
use rustc_infer::infer::outlives::env::RegionBoundPairs;
|
||||
use rustc_infer::infer::region_constraints::GenericKind;
|
||||
use rustc_infer::infer::InferCtxt;
|
||||
use rustc_middle::mir::ConstraintCategory;
|
||||
@ -34,18 +35,6 @@ pub(crate) struct UniversalRegionRelations<'tcx> {
|
||||
inverse_outlives: TransitiveRelation<RegionVid>,
|
||||
}
|
||||
|
||||
/// Each RBP `('a, GK)` indicates that `GK: 'a` can be assumed to
|
||||
/// be true. These encode relationships like `T: 'a` that are
|
||||
/// added via implicit bounds.
|
||||
///
|
||||
/// Each region here is guaranteed to be a key in the `indices`
|
||||
/// map. We use the "original" regions (i.e., the keys from the
|
||||
/// map, and not the values) because the code in
|
||||
/// `process_registered_region_obligations` has some special-cased
|
||||
/// logic expecting to see (e.g.) `ReStatic`, and if we supplied
|
||||
/// our special inference variable there, we would mess that up.
|
||||
type RegionBoundPairs<'tcx> = Vec<(ty::Region<'tcx>, GenericKind<'tcx>)>;
|
||||
|
||||
/// As part of computing the free region relations, we also have to
|
||||
/// normalize the input-output types, which we then need later. So we
|
||||
/// return those. This vector consists of first the input types and
|
||||
@ -61,7 +50,7 @@ pub(crate) struct CreateResult<'tcx> {
|
||||
pub(crate) fn create<'tcx>(
|
||||
infcx: &InferCtxt<'_, 'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
implicit_region_bound: Option<ty::Region<'tcx>>,
|
||||
implicit_region_bound: ty::Region<'tcx>,
|
||||
universal_regions: &Rc<UniversalRegions<'tcx>>,
|
||||
constraints: &mut MirTypeckRegionConstraints<'tcx>,
|
||||
) -> CreateResult<'tcx> {
|
||||
@ -71,7 +60,7 @@ pub(crate) fn create<'tcx>(
|
||||
implicit_region_bound,
|
||||
constraints,
|
||||
universal_regions: universal_regions.clone(),
|
||||
region_bound_pairs: Vec::new(),
|
||||
region_bound_pairs: Default::default(),
|
||||
relations: UniversalRegionRelations {
|
||||
universal_regions: universal_regions.clone(),
|
||||
outlives: Default::default(),
|
||||
@ -223,7 +212,7 @@ struct UniversalRegionRelationsBuilder<'this, 'tcx> {
|
||||
infcx: &'this InferCtxt<'this, 'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
universal_regions: Rc<UniversalRegions<'tcx>>,
|
||||
implicit_region_bound: Option<ty::Region<'tcx>>,
|
||||
implicit_region_bound: ty::Region<'tcx>,
|
||||
constraints: &'this mut MirTypeckRegionConstraints<'tcx>,
|
||||
|
||||
// outputs:
|
||||
@ -335,7 +324,7 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> {
|
||||
/// the same time, compute and add any implied bounds that come
|
||||
/// from this local.
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
fn add_implied_bounds(&mut self, ty: Ty<'tcx>) -> Option<Rc<QueryRegionConstraints<'tcx>>> {
|
||||
fn add_implied_bounds(&mut self, ty: Ty<'tcx>) -> Option<&'tcx QueryRegionConstraints<'tcx>> {
|
||||
let TypeOpOutput { output: bounds, constraints, .. } = self
|
||||
.param_env
|
||||
.and(type_op::implied_outlives_bounds::ImpliedOutlivesBounds { ty })
|
||||
@ -371,11 +360,13 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> {
|
||||
}
|
||||
|
||||
OutlivesBound::RegionSubParam(r_a, param_b) => {
|
||||
self.region_bound_pairs.push((r_a, GenericKind::Param(param_b)));
|
||||
self.region_bound_pairs
|
||||
.insert(ty::OutlivesPredicate(GenericKind::Param(param_b), r_a));
|
||||
}
|
||||
|
||||
OutlivesBound::RegionSubProjection(r_a, projection_b) => {
|
||||
self.region_bound_pairs.push((r_a, GenericKind::Projection(projection_b)));
|
||||
self.region_bound_pairs
|
||||
.insert(ty::OutlivesPredicate(GenericKind::Projection(projection_b), r_a));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -225,12 +225,12 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
|
||||
debug!("{:?} normalized to {:?}", t, norm_ty);
|
||||
|
||||
for data in constraints.into_iter().collect::<Vec<_>>() {
|
||||
for data in constraints {
|
||||
ConstraintConversion::new(
|
||||
self.infcx,
|
||||
&self.borrowck_context.universal_regions,
|
||||
&self.region_bound_pairs,
|
||||
Some(self.implicit_region_bound),
|
||||
self.implicit_region_bound,
|
||||
self.param_env,
|
||||
Locations::All(DUMMY_SP),
|
||||
DUMMY_SP,
|
||||
|
||||
@ -157,7 +157,7 @@ impl LocalUseMapBuild<'_> {
|
||||
}
|
||||
|
||||
impl Visitor<'_> for LocalUseMapBuild<'_> {
|
||||
fn visit_local(&mut self, &local: &Local, context: PlaceContext, location: Location) {
|
||||
fn visit_local(&mut self, local: Local, context: PlaceContext, location: Location) {
|
||||
if self.locals_with_use_data[local] {
|
||||
match def_use::categorize(context) {
|
||||
Some(DefUse::Def) => self.insert_def(local, location),
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
use itertools::{Either, Itertools};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_middle::mir::{Body, Local};
|
||||
use rustc_middle::ty::{RegionVid, TyCtxt};
|
||||
use std::rc::Rc;
|
||||
|
||||
use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
|
||||
use rustc_mir_dataflow::move_paths::MoveData;
|
||||
use rustc_mir_dataflow::ResultsCursor;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::{
|
||||
constraints::OutlivesConstraintSet,
|
||||
@ -46,7 +46,8 @@ pub(super) fn generate<'mir, 'tcx>(
|
||||
&typeck.borrowck_context.universal_regions,
|
||||
&typeck.borrowck_context.constraints.outlives_constraints,
|
||||
);
|
||||
let live_locals = compute_live_locals(typeck.tcx(), &free_regions, &body);
|
||||
let (relevant_live_locals, boring_locals) =
|
||||
compute_relevant_live_locals(typeck.tcx(), &free_regions, &body);
|
||||
let facts_enabled = use_polonius || AllFacts::enabled(typeck.tcx());
|
||||
|
||||
let polonius_drop_used = if facts_enabled {
|
||||
@ -57,48 +58,44 @@ pub(super) fn generate<'mir, 'tcx>(
|
||||
None
|
||||
};
|
||||
|
||||
if !live_locals.is_empty() || facts_enabled {
|
||||
trace::trace(
|
||||
typeck,
|
||||
body,
|
||||
elements,
|
||||
flow_inits,
|
||||
move_data,
|
||||
live_locals,
|
||||
polonius_drop_used,
|
||||
);
|
||||
}
|
||||
trace::trace(
|
||||
typeck,
|
||||
body,
|
||||
elements,
|
||||
flow_inits,
|
||||
move_data,
|
||||
relevant_live_locals,
|
||||
boring_locals,
|
||||
polonius_drop_used,
|
||||
);
|
||||
}
|
||||
|
||||
// The purpose of `compute_live_locals` is to define the subset of `Local`
|
||||
// The purpose of `compute_relevant_live_locals` is to define the subset of `Local`
|
||||
// variables for which we need to do a liveness computation. We only need
|
||||
// to compute whether a variable `X` is live if that variable contains
|
||||
// some region `R` in its type where `R` is not known to outlive a free
|
||||
// region (i.e., where `R` may be valid for just a subset of the fn body).
|
||||
fn compute_live_locals<'tcx>(
|
||||
fn compute_relevant_live_locals<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
free_regions: &FxHashSet<RegionVid>,
|
||||
body: &Body<'tcx>,
|
||||
) -> Vec<Local> {
|
||||
let live_locals: Vec<Local> = body
|
||||
.local_decls
|
||||
.iter_enumerated()
|
||||
.filter_map(|(local, local_decl)| {
|
||||
) -> (Vec<Local>, Vec<Local>) {
|
||||
let (boring_locals, relevant_live_locals): (Vec<_>, Vec<_>) =
|
||||
body.local_decls.iter_enumerated().partition_map(|(local, local_decl)| {
|
||||
if tcx.all_free_regions_meet(&local_decl.ty, |r| {
|
||||
free_regions.contains(&r.to_region_vid())
|
||||
}) {
|
||||
None
|
||||
Either::Left(local)
|
||||
} else {
|
||||
Some(local)
|
||||
Either::Right(local)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
});
|
||||
|
||||
debug!("{} total variables", body.local_decls.len());
|
||||
debug!("{} variables need liveness", live_locals.len());
|
||||
debug!("{} variables need liveness", relevant_live_locals.len());
|
||||
debug!("{} regions outlive free regions", free_regions.len());
|
||||
|
||||
live_locals
|
||||
(relevant_live_locals, boring_locals)
|
||||
}
|
||||
|
||||
/// Computes all regions that are (currently) known to outlive free
|
||||
|
||||
@ -54,7 +54,7 @@ impl UseFactsExtractor<'_, '_> {
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Visitor<'tcx> for UseFactsExtractor<'a, 'tcx> {
|
||||
fn visit_local(&mut self, &local: &Local, context: PlaceContext, location: Location) {
|
||||
fn visit_local(&mut self, local: Local, context: PlaceContext, location: Location) {
|
||||
match def_use::categorize(context) {
|
||||
Some(DefUse::Def) => self.insert_def(local, location),
|
||||
Some(DefUse::Use) => self.insert_use(local, location),
|
||||
|
||||
@ -3,7 +3,7 @@ use rustc_index::bit_set::HybridBitSet;
|
||||
use rustc_index::interval::IntervalSet;
|
||||
use rustc_infer::infer::canonical::QueryRegionConstraints;
|
||||
use rustc_middle::mir::{BasicBlock, Body, ConstraintCategory, Local, Location};
|
||||
use rustc_middle::ty::{Ty, TypeFoldable};
|
||||
use rustc_middle::ty::{Ty, TypeVisitable};
|
||||
use rustc_trait_selection::traits::query::dropck_outlives::DropckOutlivesResult;
|
||||
use rustc_trait_selection::traits::query::type_op::outlives::DropckOutlives;
|
||||
use rustc_trait_selection::traits::query::type_op::{TypeOp, TypeOpOutput};
|
||||
@ -41,12 +41,13 @@ pub(super) fn trace<'mir, 'tcx>(
|
||||
elements: &Rc<RegionValueElements>,
|
||||
flow_inits: &mut ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
|
||||
move_data: &MoveData<'tcx>,
|
||||
live_locals: Vec<Local>,
|
||||
relevant_live_locals: Vec<Local>,
|
||||
boring_locals: Vec<Local>,
|
||||
polonius_drop_used: Option<Vec<(Local, Location)>>,
|
||||
) {
|
||||
debug!("trace()");
|
||||
|
||||
let local_use_map = &LocalUseMap::build(&live_locals, elements, body);
|
||||
let local_use_map = &LocalUseMap::build(&relevant_live_locals, elements, body);
|
||||
|
||||
let cx = LivenessContext {
|
||||
typeck,
|
||||
@ -61,10 +62,12 @@ pub(super) fn trace<'mir, 'tcx>(
|
||||
let mut results = LivenessResults::new(cx);
|
||||
|
||||
if let Some(drop_used) = polonius_drop_used {
|
||||
results.add_extra_drop_facts(drop_used, live_locals.iter().copied().collect())
|
||||
results.add_extra_drop_facts(drop_used, relevant_live_locals.iter().copied().collect())
|
||||
}
|
||||
|
||||
results.compute_for_all_locals(live_locals);
|
||||
results.compute_for_all_locals(relevant_live_locals);
|
||||
|
||||
results.dropck_boring_locals(boring_locals);
|
||||
}
|
||||
|
||||
/// Contextual state for the type-liveness generator.
|
||||
@ -95,7 +98,7 @@ struct LivenessContext<'me, 'typeck, 'flow, 'tcx> {
|
||||
|
||||
struct DropData<'tcx> {
|
||||
dropck_result: DropckOutlivesResult<'tcx>,
|
||||
region_constraint_data: Option<Rc<QueryRegionConstraints<'tcx>>>,
|
||||
region_constraint_data: Option<&'tcx QueryRegionConstraints<'tcx>>,
|
||||
}
|
||||
|
||||
struct LivenessResults<'me, 'typeck, 'flow, 'tcx> {
|
||||
@ -133,8 +136,8 @@ impl<'me, 'typeck, 'flow, 'tcx> LivenessResults<'me, 'typeck, 'flow, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn compute_for_all_locals(&mut self, live_locals: Vec<Local>) {
|
||||
for local in live_locals {
|
||||
fn compute_for_all_locals(&mut self, relevant_live_locals: Vec<Local>) {
|
||||
for local in relevant_live_locals {
|
||||
self.reset_local_state();
|
||||
self.add_defs_for(local);
|
||||
self.compute_use_live_points_for(local);
|
||||
@ -157,6 +160,24 @@ impl<'me, 'typeck, 'flow, 'tcx> LivenessResults<'me, 'typeck, 'flow, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
// Runs dropck for locals whose liveness isn't relevant. This is
|
||||
// necessary to eagerly detect unbound recursion during drop glue computation.
|
||||
fn dropck_boring_locals(&mut self, boring_locals: Vec<Local>) {
|
||||
for local in boring_locals {
|
||||
let local_ty = self.cx.body.local_decls[local].ty;
|
||||
let drop_data = self.cx.drop_data.entry(local_ty).or_insert_with({
|
||||
let typeck = &mut self.cx.typeck;
|
||||
move || LivenessContext::compute_drop_data(typeck, local_ty)
|
||||
});
|
||||
|
||||
drop_data.dropck_result.report_overflows(
|
||||
self.cx.typeck.infcx.tcx,
|
||||
self.cx.body.local_decls[local].source_info.span,
|
||||
local_ty,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Add extra drop facts needed for Polonius.
|
||||
///
|
||||
/// Add facts for all locals with free regions, since regions may outlive
|
||||
@ -164,12 +185,12 @@ impl<'me, 'typeck, 'flow, 'tcx> LivenessResults<'me, 'typeck, 'flow, 'tcx> {
|
||||
fn add_extra_drop_facts(
|
||||
&mut self,
|
||||
drop_used: Vec<(Local, Location)>,
|
||||
live_locals: FxHashSet<Local>,
|
||||
relevant_live_locals: FxHashSet<Local>,
|
||||
) {
|
||||
let locations = IntervalSet::new(self.cx.elements.num_points());
|
||||
|
||||
for (local, location) in drop_used {
|
||||
if !live_locals.contains(&local) {
|
||||
if !relevant_live_locals.contains(&local) {
|
||||
let local_ty = self.cx.body.local_decls[local].ty;
|
||||
if local_ty.has_free_regions() {
|
||||
self.cx.add_drop_live_facts_for(local, local_ty, &[location], &locations);
|
||||
@ -237,7 +258,7 @@ impl<'me, 'typeck, 'flow, 'tcx> LivenessResults<'me, 'typeck, 'flow, 'tcx> {
|
||||
|
||||
let block = self.cx.elements.to_location(block_start).block;
|
||||
self.stack.extend(
|
||||
self.cx.body.predecessors()[block]
|
||||
self.cx.body.basic_blocks.predecessors()[block]
|
||||
.iter()
|
||||
.map(|&pred_bb| self.cx.body.terminator_loc(pred_bb))
|
||||
.map(|pred_loc| self.cx.elements.point_from_location(pred_loc)),
|
||||
@ -333,7 +354,7 @@ impl<'me, 'typeck, 'flow, 'tcx> LivenessResults<'me, 'typeck, 'flow, 'tcx> {
|
||||
}
|
||||
|
||||
let body = self.cx.body;
|
||||
for &pred_block in body.predecessors()[block].iter() {
|
||||
for &pred_block in body.basic_blocks.predecessors()[block].iter() {
|
||||
debug!("compute_drop_live_points_for_block: pred_block = {:?}", pred_block,);
|
||||
|
||||
// Check whether the variable is (at least partially)
|
||||
@ -456,7 +477,7 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
||||
/// points `live_at`.
|
||||
fn add_use_live_facts_for(
|
||||
&mut self,
|
||||
value: impl TypeFoldable<'tcx>,
|
||||
value: impl TypeVisitable<'tcx>,
|
||||
live_at: &IntervalSet<PointIndex>,
|
||||
) {
|
||||
debug!("add_use_live_facts_for(value={:?})", value);
|
||||
@ -521,7 +542,7 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
||||
fn make_all_regions_live(
|
||||
elements: &RegionValueElements,
|
||||
typeck: &mut TypeChecker<'_, 'tcx>,
|
||||
value: impl TypeFoldable<'tcx>,
|
||||
value: impl TypeVisitable<'tcx>,
|
||||
live_at: &IntervalSet<PointIndex>,
|
||||
) {
|
||||
debug!("make_all_regions_live(value={:?})", value);
|
||||
|
||||
@ -9,7 +9,6 @@ use hir::OpaqueTyOrigin;
|
||||
use rustc_data_structures::frozen::Frozen;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::vec_map::VecMap;
|
||||
use rustc_errors::struct_span_err;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_hir::def_id::LocalDefId;
|
||||
@ -28,8 +27,8 @@ use rustc_middle::mir::AssertKind;
|
||||
use rustc_middle::mir::*;
|
||||
use rustc_middle::ty::adjustment::PointerCast;
|
||||
use rustc_middle::ty::cast::CastTy;
|
||||
use rustc_middle::ty::fold::TypeFoldable;
|
||||
use rustc_middle::ty::subst::{GenericArgKind, SubstsRef, UserSubsts};
|
||||
use rustc_middle::ty::visit::TypeVisitable;
|
||||
use rustc_middle::ty::{
|
||||
self, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations, OpaqueHiddenType,
|
||||
OpaqueTypeKey, RegionVid, ToPredicate, Ty, TyCtxt, UserType, UserTypeAnnotationIndex,
|
||||
@ -48,6 +47,7 @@ use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
|
||||
use rustc_mir_dataflow::move_paths::MoveData;
|
||||
use rustc_mir_dataflow::ResultsCursor;
|
||||
|
||||
use crate::session_diagnostics::MoveUnsized;
|
||||
use crate::{
|
||||
borrow_set::BorrowSet,
|
||||
constraints::{OutlivesConstraint, OutlivesConstraintSet},
|
||||
@ -157,7 +157,7 @@ pub(crate) fn type_check<'mir, 'tcx>(
|
||||
} = free_region_relations::create(
|
||||
infcx,
|
||||
param_env,
|
||||
Some(implicit_region_bound),
|
||||
implicit_region_bound,
|
||||
universal_regions,
|
||||
&mut constraints,
|
||||
);
|
||||
@ -333,9 +333,9 @@ struct TypeVerifier<'a, 'b, 'tcx> {
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
||||
fn visit_span(&mut self, span: &Span) {
|
||||
fn visit_span(&mut self, span: Span) {
|
||||
if !span.is_dummy() {
|
||||
self.last_span = *span;
|
||||
self.last_span = span;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1142,7 +1142,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
self.infcx,
|
||||
self.borrowck_context.universal_regions,
|
||||
self.region_bound_pairs,
|
||||
Some(self.implicit_region_bound),
|
||||
self.implicit_region_bound,
|
||||
self.param_env,
|
||||
locations,
|
||||
locations.span(self.body),
|
||||
@ -1780,19 +1780,10 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
// slot or local, so to find all unsized rvalues it is enough
|
||||
// to check all temps, return slots and locals.
|
||||
if self.reported_errors.replace((ty, span)).is_none() {
|
||||
let mut diag = struct_span_err!(
|
||||
self.tcx().sess,
|
||||
span,
|
||||
E0161,
|
||||
"cannot move a value of type {0}: the size of {0} \
|
||||
cannot be statically determined",
|
||||
ty
|
||||
);
|
||||
|
||||
// While this is located in `nll::typeck` this error is not
|
||||
// an NLL error, it's a required check to prevent creation
|
||||
// of unsized rvalues in a call expression.
|
||||
diag.emit();
|
||||
self.tcx().sess.emit_err(MoveUnsized { ty, span });
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1856,14 +1847,11 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
let tcx = self.tcx();
|
||||
let def_id = uv.def.def_id_for_type_of();
|
||||
if tcx.def_kind(def_id) == DefKind::InlineConst {
|
||||
let predicates = self.prove_closure_bounds(
|
||||
tcx,
|
||||
def_id.expect_local(),
|
||||
uv.substs,
|
||||
location,
|
||||
);
|
||||
let def_id = def_id.expect_local();
|
||||
let predicates =
|
||||
self.prove_closure_bounds(tcx, def_id, uv.substs, location);
|
||||
self.normalize_and_prove_instantiated_predicates(
|
||||
def_id,
|
||||
def_id.to_def_id(),
|
||||
predicates,
|
||||
location.to_locations(),
|
||||
);
|
||||
@ -2278,6 +2266,10 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
Rvalue::Use(operand) | Rvalue::UnaryOp(_, operand) => {
|
||||
self.check_operand(operand, location);
|
||||
}
|
||||
Rvalue::CopyForDeref(place) => {
|
||||
let op = &Operand::Copy(*place);
|
||||
self.check_operand(op, location);
|
||||
}
|
||||
|
||||
Rvalue::BinaryOp(_, box (left, right))
|
||||
| Rvalue::CheckedBinaryOp(_, box (left, right)) => {
|
||||
@ -2308,6 +2300,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
| Rvalue::BinaryOp(..)
|
||||
| Rvalue::CheckedBinaryOp(..)
|
||||
| Rvalue::NullaryOp(..)
|
||||
| Rvalue::CopyForDeref(..)
|
||||
| Rvalue::UnaryOp(..)
|
||||
| Rvalue::Discriminant(..) => None,
|
||||
|
||||
@ -2518,9 +2511,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
aggregate_kind, location
|
||||
);
|
||||
|
||||
let (def_id, instantiated_predicates) = match aggregate_kind {
|
||||
let (def_id, instantiated_predicates) = match *aggregate_kind {
|
||||
AggregateKind::Adt(adt_did, _, substs, _, _) => {
|
||||
(*adt_did, tcx.predicates_of(*adt_did).instantiate(tcx, substs))
|
||||
(adt_did, tcx.predicates_of(adt_did).instantiate(tcx, substs))
|
||||
}
|
||||
|
||||
// For closures, we have some **extra requirements** we
|
||||
@ -2545,7 +2538,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
// clauses on the struct.
|
||||
AggregateKind::Closure(def_id, substs)
|
||||
| AggregateKind::Generator(def_id, substs, _) => {
|
||||
(*def_id, self.prove_closure_bounds(tcx, def_id.expect_local(), substs, location))
|
||||
(def_id.to_def_id(), self.prove_closure_bounds(tcx, def_id, substs, location))
|
||||
}
|
||||
|
||||
AggregateKind::Array(_) | AggregateKind::Tuple => {
|
||||
@ -2626,6 +2619,34 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
);
|
||||
}
|
||||
|
||||
// Now equate closure substs to regions inherited from `typeck_root_def_id`. Fixes #98589.
|
||||
let typeck_root_def_id = tcx.typeck_root_def_id(self.body.source.def_id());
|
||||
let typeck_root_substs = ty::InternalSubsts::identity_for_item(tcx, typeck_root_def_id);
|
||||
|
||||
let parent_substs = match tcx.def_kind(def_id) {
|
||||
DefKind::Closure => substs.as_closure().parent_substs(),
|
||||
DefKind::Generator => substs.as_generator().parent_substs(),
|
||||
DefKind::InlineConst => substs.as_inline_const().parent_substs(),
|
||||
other => bug!("unexpected item {:?}", other),
|
||||
};
|
||||
let parent_substs = tcx.mk_substs(parent_substs.iter());
|
||||
|
||||
assert_eq!(typeck_root_substs.len(), parent_substs.len());
|
||||
if let Err(_) = self.eq_substs(
|
||||
typeck_root_substs,
|
||||
parent_substs,
|
||||
location.to_locations(),
|
||||
ConstraintCategory::BoringNoLocation,
|
||||
) {
|
||||
span_mirbug!(
|
||||
self,
|
||||
def_id,
|
||||
"could not relate closure to parent {:?} != {:?}",
|
||||
typeck_root_substs,
|
||||
parent_substs
|
||||
);
|
||||
}
|
||||
|
||||
tcx.predicates_of(def_id).instantiate(tcx, substs)
|
||||
}
|
||||
|
||||
|
||||
@ -38,6 +38,23 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
.relate(a, b)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Add sufficient constraints to ensure `a == b`. See also [Self::relate_types].
|
||||
pub(super) fn eq_substs(
|
||||
&mut self,
|
||||
a: ty::SubstsRef<'tcx>,
|
||||
b: ty::SubstsRef<'tcx>,
|
||||
locations: Locations,
|
||||
category: ConstraintCategory<'tcx>,
|
||||
) -> Fallible<()> {
|
||||
TypeRelating::new(
|
||||
self.infcx,
|
||||
NllTypeRelatingDelegate::new(self, locations, category, UniverseInfo::other()),
|
||||
ty::Variance::Invariant,
|
||||
)
|
||||
.relate(a, b)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct NllTypeRelatingDelegate<'me, 'bccx, 'tcx> {
|
||||
|
||||
@ -503,7 +503,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
|
||||
|
||||
let root_empty = self
|
||||
.infcx
|
||||
.next_nll_region_var(NllRegionVariableOrigin::RootEmptyRegion)
|
||||
.next_nll_region_var(NllRegionVariableOrigin::Existential { from_forall: true })
|
||||
.to_region_vid();
|
||||
|
||||
UniversalRegions {
|
||||
@ -725,7 +725,7 @@ impl<'cx, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'cx, 'tcx> {
|
||||
where
|
||||
T: TypeFoldable<'tcx>,
|
||||
{
|
||||
self.tcx.fold_regions(value, &mut false, |_region, _depth| self.next_nll_region_var(origin))
|
||||
self.tcx.fold_regions(value, |_region, _depth| self.next_nll_region_var(origin))
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self, indices))]
|
||||
@ -817,9 +817,7 @@ impl<'tcx> UniversalRegionIndices<'tcx> {
|
||||
where
|
||||
T: TypeFoldable<'tcx>,
|
||||
{
|
||||
tcx.fold_regions(value, &mut false, |region, _| {
|
||||
tcx.mk_region(ty::ReVar(self.to_region_vid(region)))
|
||||
})
|
||||
tcx.fold_regions(value, |region, _| tcx.mk_region(ty::ReVar(self.to_region_vid(region))))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -91,8 +91,8 @@ impl<'visit, 'cx, 'tcx> Visitor<'tcx> for GatherUsedMutsVisitor<'visit, 'cx, 'tc
|
||||
self.super_statement(statement, location);
|
||||
}
|
||||
|
||||
fn visit_local(&mut self, local: &Local, place_context: PlaceContext, location: Location) {
|
||||
if place_context.is_place_assignment() && self.temporary_used_locals.contains(local) {
|
||||
fn visit_local(&mut self, local: Local, place_context: PlaceContext, location: Location) {
|
||||
if place_context.is_place_assignment() && self.temporary_used_locals.contains(&local) {
|
||||
// Propagate the Local assigned at this Location as a used mutable local variable
|
||||
for moi in &self.mbcx.move_data.loc_map[location] {
|
||||
let mpi = &self.mbcx.move_data.moves[*moi].path;
|
||||
|
||||
@ -20,7 +20,7 @@ rustc_macros = { path = "../rustc_macros" }
|
||||
rustc_parse = { path = "../rustc_parse" }
|
||||
rustc_target = { path = "../rustc_target" }
|
||||
rustc_session = { path = "../rustc_session" }
|
||||
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
|
||||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_expand = { path = "../rustc_expand" }
|
||||
rustc_span = { path = "../rustc_span" }
|
||||
|
||||
@ -410,12 +410,12 @@ fn parse_options<'a>(
|
||||
try_set_option(p, args, sym::noreturn, ast::InlineAsmOptions::NORETURN);
|
||||
} else if !is_global_asm && p.eat_keyword(sym::nostack) {
|
||||
try_set_option(p, args, sym::nostack, ast::InlineAsmOptions::NOSTACK);
|
||||
} else if !is_global_asm && p.eat_keyword(sym::may_unwind) {
|
||||
try_set_option(p, args, kw::Raw, ast::InlineAsmOptions::MAY_UNWIND);
|
||||
} else if p.eat_keyword(sym::att_syntax) {
|
||||
try_set_option(p, args, sym::att_syntax, ast::InlineAsmOptions::ATT_SYNTAX);
|
||||
} else if p.eat_keyword(kw::Raw) {
|
||||
try_set_option(p, args, kw::Raw, ast::InlineAsmOptions::RAW);
|
||||
} else if p.eat_keyword(sym::may_unwind) {
|
||||
try_set_option(p, args, kw::Raw, ast::InlineAsmOptions::MAY_UNWIND);
|
||||
} else {
|
||||
return p.unexpected();
|
||||
}
|
||||
@ -534,8 +534,8 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, args: AsmArgs) -> Option<ast::Inl
|
||||
|
||||
let mut template_strs = Vec::with_capacity(args.templates.len());
|
||||
|
||||
for template_expr in args.templates.into_iter() {
|
||||
if !template.is_empty() {
|
||||
for (i, template_expr) in args.templates.into_iter().enumerate() {
|
||||
if i != 0 {
|
||||
template.push(ast::InlineAsmTemplatePiece::String("\n".to_string()));
|
||||
}
|
||||
|
||||
@ -702,11 +702,12 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, args: AsmArgs) -> Option<ast::Inl
|
||||
Some(idx)
|
||||
}
|
||||
}
|
||||
parse::ArgumentNamed(name, span) => {
|
||||
parse::ArgumentNamed(name) => {
|
||||
match args.named_args.get(&Symbol::intern(name)) {
|
||||
Some(&idx) => Some(idx),
|
||||
None => {
|
||||
let msg = format!("there is no argument named `{}`", name);
|
||||
let span = arg.position_span;
|
||||
ecx.struct_span_err(
|
||||
template_span
|
||||
.from_inner(InnerSpan::new(span.start, span.end)),
|
||||
|
||||
@ -1,11 +1,10 @@
|
||||
use crate::assert::expr_if_not;
|
||||
use rustc_ast::{
|
||||
attr,
|
||||
ptr::P,
|
||||
token,
|
||||
tokenstream::{DelimSpan, TokenStream, TokenTree},
|
||||
BorrowKind, Expr, ExprKind, ItemKind, MacArgs, MacCall, MacDelimiter, Mutability, Path,
|
||||
PathSegment, Stmt, StructRest, UseTree, UseTreeKind, DUMMY_NODE_ID,
|
||||
BinOpKind, BorrowKind, Expr, ExprKind, ItemKind, MacArgs, MacCall, MacDelimiter, Mutability,
|
||||
Path, PathSegment, Stmt, StructRest, UnOp, UseTree, UseTreeKind, DUMMY_NODE_ID,
|
||||
};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
@ -16,11 +15,19 @@ use rustc_span::{
|
||||
};
|
||||
|
||||
pub(super) struct Context<'cx, 'a> {
|
||||
// An optimization.
|
||||
//
|
||||
// Elements that aren't consumed (PartialEq, PartialOrd, ...) can be copied **after** the
|
||||
// `assert!` expression fails rather than copied on-the-fly.
|
||||
best_case_captures: Vec<Stmt>,
|
||||
// Top-level `let captureN = Capture::new()` statements
|
||||
capture_decls: Vec<Capture>,
|
||||
cx: &'cx ExtCtxt<'a>,
|
||||
// Formatting string used for debugging
|
||||
fmt_string: String,
|
||||
// If the current expression being visited consumes itself. Used to construct
|
||||
// `best_case_captures`.
|
||||
is_consumed: bool,
|
||||
// Top-level `let __local_bindN = &expr` statements
|
||||
local_bind_decls: Vec<Stmt>,
|
||||
// Used to avoid capturing duplicated paths
|
||||
@ -36,9 +43,11 @@ pub(super) struct Context<'cx, 'a> {
|
||||
impl<'cx, 'a> Context<'cx, 'a> {
|
||||
pub(super) fn new(cx: &'cx ExtCtxt<'a>, span: Span) -> Self {
|
||||
Self {
|
||||
best_case_captures: <_>::default(),
|
||||
capture_decls: <_>::default(),
|
||||
cx,
|
||||
fmt_string: <_>::default(),
|
||||
is_consumed: true,
|
||||
local_bind_decls: <_>::default(),
|
||||
paths: <_>::default(),
|
||||
span,
|
||||
@ -69,14 +78,22 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
||||
self.manage_cond_expr(&mut cond_expr);
|
||||
let initial_imports = self.build_initial_imports();
|
||||
let panic = self.build_panic(&expr_str, panic_path);
|
||||
let cond_expr_with_unlikely = self.build_unlikely(cond_expr);
|
||||
|
||||
let Self { capture_decls, cx, local_bind_decls, span, .. } = self;
|
||||
let Self { best_case_captures, capture_decls, cx, local_bind_decls, span, .. } = self;
|
||||
|
||||
let mut assert_then_stmts = Vec::with_capacity(2);
|
||||
assert_then_stmts.extend(best_case_captures);
|
||||
assert_then_stmts.push(self.cx.stmt_expr(panic));
|
||||
let assert_then = self.cx.block(span, assert_then_stmts);
|
||||
|
||||
let mut stmts = Vec::with_capacity(4);
|
||||
stmts.push(initial_imports);
|
||||
stmts.extend(capture_decls.into_iter().map(|c| c.decl));
|
||||
stmts.extend(local_bind_decls);
|
||||
stmts.push(cx.stmt_expr(expr_if_not(cx, span, cond_expr, panic, None)));
|
||||
stmts.push(
|
||||
cx.stmt_expr(cx.expr(span, ExprKind::If(cond_expr_with_unlikely, assert_then, None))),
|
||||
);
|
||||
cx.expr_block(cx.block(span, stmts))
|
||||
}
|
||||
|
||||
@ -115,6 +132,16 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
||||
)
|
||||
}
|
||||
|
||||
/// Takes the conditional expression of `assert!` and then wraps it inside `unlikely`
|
||||
fn build_unlikely(&self, cond_expr: P<Expr>) -> P<Expr> {
|
||||
let unlikely_path = self.cx.std_path(&[sym::intrinsics, sym::unlikely]);
|
||||
self.cx.expr_call(
|
||||
self.span,
|
||||
self.cx.expr_path(self.cx.path(self.span, unlikely_path)),
|
||||
vec![self.cx.expr(self.span, ExprKind::Unary(UnOp::Not, cond_expr))],
|
||||
)
|
||||
}
|
||||
|
||||
/// The necessary custom `panic!(...)` expression.
|
||||
///
|
||||
/// panic!(
|
||||
@ -125,7 +152,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
||||
fn build_panic(&self, expr_str: &str, panic_path: Path) -> P<Expr> {
|
||||
let escaped_expr_str = escape_to_fmt(expr_str);
|
||||
let initial = [
|
||||
TokenTree::token(
|
||||
TokenTree::token_alone(
|
||||
token::Literal(token::Lit {
|
||||
kind: token::LitKind::Str,
|
||||
symbol: Symbol::intern(&if self.fmt_string.is_empty() {
|
||||
@ -140,12 +167,12 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
||||
}),
|
||||
self.span,
|
||||
),
|
||||
TokenTree::token(token::Comma, self.span),
|
||||
TokenTree::token_alone(token::Comma, self.span),
|
||||
];
|
||||
let captures = self.capture_decls.iter().flat_map(|cap| {
|
||||
[
|
||||
TokenTree::token(token::Ident(cap.ident.name, false), cap.ident.span),
|
||||
TokenTree::token(token::Comma, self.span),
|
||||
TokenTree::token_alone(token::Ident(cap.ident.name, false), cap.ident.span),
|
||||
TokenTree::token_alone(token::Comma, self.span),
|
||||
]
|
||||
});
|
||||
self.cx.expr(
|
||||
@ -167,17 +194,39 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
||||
/// See [Self::manage_initial_capture] and [Self::manage_try_capture]
|
||||
fn manage_cond_expr(&mut self, expr: &mut P<Expr>) {
|
||||
match (*expr).kind {
|
||||
ExprKind::AddrOf(_, _, ref mut local_expr) => {
|
||||
self.manage_cond_expr(local_expr);
|
||||
ExprKind::AddrOf(_, mutability, ref mut local_expr) => {
|
||||
self.with_is_consumed_management(
|
||||
matches!(mutability, Mutability::Mut),
|
||||
|this| this.manage_cond_expr(local_expr)
|
||||
);
|
||||
}
|
||||
ExprKind::Array(ref mut local_exprs) => {
|
||||
for local_expr in local_exprs {
|
||||
self.manage_cond_expr(local_expr);
|
||||
}
|
||||
}
|
||||
ExprKind::Binary(_, ref mut lhs, ref mut rhs) => {
|
||||
self.manage_cond_expr(lhs);
|
||||
self.manage_cond_expr(rhs);
|
||||
ExprKind::Binary(ref op, ref mut lhs, ref mut rhs) => {
|
||||
self.with_is_consumed_management(
|
||||
matches!(
|
||||
op.node,
|
||||
BinOpKind::Add
|
||||
| BinOpKind::And
|
||||
| BinOpKind::BitAnd
|
||||
| BinOpKind::BitOr
|
||||
| BinOpKind::BitXor
|
||||
| BinOpKind::Div
|
||||
| BinOpKind::Mul
|
||||
| BinOpKind::Or
|
||||
| BinOpKind::Rem
|
||||
| BinOpKind::Shl
|
||||
| BinOpKind::Shr
|
||||
| BinOpKind::Sub
|
||||
),
|
||||
|this| {
|
||||
this.manage_cond_expr(lhs);
|
||||
this.manage_cond_expr(rhs);
|
||||
}
|
||||
);
|
||||
}
|
||||
ExprKind::Call(_, ref mut local_exprs) => {
|
||||
for local_expr in local_exprs {
|
||||
@ -228,8 +277,11 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
||||
self.manage_cond_expr(local_expr);
|
||||
}
|
||||
}
|
||||
ExprKind::Unary(_, ref mut local_expr) => {
|
||||
self.manage_cond_expr(local_expr);
|
||||
ExprKind::Unary(un_op, ref mut local_expr) => {
|
||||
self.with_is_consumed_management(
|
||||
matches!(un_op, UnOp::Neg | UnOp::Not),
|
||||
|this| this.manage_cond_expr(local_expr)
|
||||
);
|
||||
}
|
||||
// Expressions that are not worth or can not be captured.
|
||||
//
|
||||
@ -242,7 +294,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
||||
| ExprKind::Block(_, _)
|
||||
| ExprKind::Box(_)
|
||||
| ExprKind::Break(_, _)
|
||||
| ExprKind::Closure(_, _, _, _, _, _)
|
||||
| ExprKind::Closure(_, _, _, _, _, _, _)
|
||||
| ExprKind::ConstBlock(_)
|
||||
| ExprKind::Continue(_)
|
||||
| ExprKind::Err
|
||||
@ -337,9 +389,23 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
||||
))
|
||||
.add_trailing_semicolon();
|
||||
let local_bind_path = self.cx.expr_path(Path::from_ident(local_bind));
|
||||
let ret = self.cx.stmt_expr(local_bind_path);
|
||||
let block = self.cx.expr_block(self.cx.block(self.span, vec![try_capture_call, ret]));
|
||||
*expr = self.cx.expr_deref(self.span, block);
|
||||
let rslt = if self.is_consumed {
|
||||
let ret = self.cx.stmt_expr(local_bind_path);
|
||||
self.cx.expr_block(self.cx.block(self.span, vec![try_capture_call, ret]))
|
||||
} else {
|
||||
self.best_case_captures.push(try_capture_call);
|
||||
local_bind_path
|
||||
};
|
||||
*expr = self.cx.expr_deref(self.span, rslt);
|
||||
}
|
||||
|
||||
// Calls `f` with the internal `is_consumed` set to `curr_is_consumed` and then
|
||||
// sets the internal `is_consumed` back to its original value.
|
||||
fn with_is_consumed_management(&mut self, curr_is_consumed: bool, f: impl FnOnce(&mut Self)) {
|
||||
let prev_is_consumed = self.is_consumed;
|
||||
self.is_consumed = curr_is_consumed;
|
||||
f(self);
|
||||
self.is_consumed = prev_is_consumed;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -36,7 +36,7 @@ pub fn expand_cfg(
|
||||
}
|
||||
|
||||
#[derive(SessionDiagnostic)]
|
||||
#[error(slug = "builtin-macros-requires-cfg-pattern")]
|
||||
#[error(builtin_macros::requires_cfg_pattern)]
|
||||
struct RequiresCfgPattern {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
@ -44,7 +44,7 @@ struct RequiresCfgPattern {
|
||||
}
|
||||
|
||||
#[derive(SessionDiagnostic)]
|
||||
#[error(slug = "builtin-macros-expected-one-cfg-pattern")]
|
||||
#[error(builtin_macros::expected_one_cfg_pattern)]
|
||||
struct OneCfgPattern {
|
||||
#[primary_span]
|
||||
span: Span,
|
||||
|
||||
@ -20,14 +20,14 @@ pub fn expand_concat_idents<'cx>(
|
||||
for (i, e) in tts.into_trees().enumerate() {
|
||||
if i & 1 == 1 {
|
||||
match e {
|
||||
TokenTree::Token(Token { kind: token::Comma, .. }) => {}
|
||||
TokenTree::Token(Token { kind: token::Comma, .. }, _) => {}
|
||||
_ => {
|
||||
cx.span_err(sp, "concat_idents! expecting comma");
|
||||
return DummyResult::any(sp);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if let TokenTree::Token(token) = e {
|
||||
if let TokenTree::Token(token, _) = e {
|
||||
if let Some((ident, _)) = token.ident() {
|
||||
res_str.push_str(ident.name.as_str());
|
||||
continue;
|
||||
|
||||
@ -19,7 +19,6 @@ pub fn expand_deriving_copy(
|
||||
path: path_std!(marker::Copy),
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
is_unsafe: false,
|
||||
supports_unions: true,
|
||||
methods: Vec::new(),
|
||||
associated_types: Vec::new(),
|
||||
|
||||
@ -2,10 +2,10 @@ use crate::deriving::generic::ty::*;
|
||||
use crate::deriving::generic::*;
|
||||
use crate::deriving::path_std;
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::{self as ast, Expr, GenericArg, Generics, ItemKind, MetaItem, VariantData};
|
||||
use rustc_ast::{self as ast, Generics, ItemKind, MetaItem, VariantData};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_expand::base::{Annotatable, ExtCtxt};
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
use rustc_span::Span;
|
||||
|
||||
pub fn expand_deriving_clone(
|
||||
@ -15,23 +15,22 @@ pub fn expand_deriving_clone(
|
||||
item: &Annotatable,
|
||||
push: &mut dyn FnMut(Annotatable),
|
||||
) {
|
||||
// check if we can use a short form
|
||||
// The simple form is `fn clone(&self) -> Self { *self }`, possibly with
|
||||
// some additional `AssertParamIsClone` assertions.
|
||||
//
|
||||
// the short form is `fn clone(&self) -> Self { *self }`
|
||||
//
|
||||
// we can use the short form if:
|
||||
// - the item is Copy (unfortunately, all we can check is whether it's also deriving Copy)
|
||||
// - there are no generic parameters (after specialization this limitation can be removed)
|
||||
// if we used the short form with generics, we'd have to bound the generics with
|
||||
// Clone + Copy, and then there'd be no Clone impl at all if the user fills in something
|
||||
// that is Clone but not Copy. and until specialization we can't write both impls.
|
||||
// - the item is a union with Copy fields
|
||||
// Unions with generic parameters still can derive Clone because they require Copy
|
||||
// for deriving, Clone alone is not enough.
|
||||
// Wherever Clone is implemented for fields is irrelevant so we don't assert it.
|
||||
// We can use the simple form if either of the following are true.
|
||||
// - The type derives Copy and there are no generic parameters. (If we
|
||||
// used the simple form with generics, we'd have to bound the generics
|
||||
// with Clone + Copy, and then there'd be no Clone impl at all if the
|
||||
// user fills in something that is Clone but not Copy. After
|
||||
// specialization we can remove this no-generics limitation.)
|
||||
// - The item is a union. (Unions with generic parameters still can derive
|
||||
// Clone because they require Copy for deriving, Clone alone is not
|
||||
// enough. Whether Clone is implemented for fields is irrelevant so we
|
||||
// don't assert it.)
|
||||
let bounds;
|
||||
let substructure;
|
||||
let is_shallow;
|
||||
let is_simple;
|
||||
match *item {
|
||||
Annotatable::Item(ref annitem) => match annitem.kind {
|
||||
ItemKind::Struct(_, Generics { ref params, .. })
|
||||
@ -44,30 +43,25 @@ pub fn expand_deriving_clone(
|
||||
.any(|param| matches!(param.kind, ast::GenericParamKind::Type { .. }))
|
||||
{
|
||||
bounds = vec![];
|
||||
is_shallow = true;
|
||||
is_simple = true;
|
||||
substructure = combine_substructure(Box::new(|c, s, sub| {
|
||||
cs_clone_shallow("Clone", c, s, sub, false)
|
||||
cs_clone_simple("Clone", c, s, sub, false)
|
||||
}));
|
||||
} else {
|
||||
bounds = vec![];
|
||||
is_shallow = false;
|
||||
is_simple = false;
|
||||
substructure =
|
||||
combine_substructure(Box::new(|c, s, sub| cs_clone("Clone", c, s, sub)));
|
||||
}
|
||||
}
|
||||
ItemKind::Union(..) => {
|
||||
bounds = vec![Literal(path_std!(marker::Copy))];
|
||||
is_shallow = true;
|
||||
bounds = vec![Path(path_std!(marker::Copy))];
|
||||
is_simple = true;
|
||||
substructure = combine_substructure(Box::new(|c, s, sub| {
|
||||
cs_clone_shallow("Clone", c, s, sub, true)
|
||||
cs_clone_simple("Clone", c, s, sub, true)
|
||||
}));
|
||||
}
|
||||
_ => {
|
||||
bounds = vec![];
|
||||
is_shallow = false;
|
||||
substructure =
|
||||
combine_substructure(Box::new(|c, s, sub| cs_clone("Clone", c, s, sub)));
|
||||
}
|
||||
_ => cx.span_bug(span, "`#[derive(Clone)]` on wrong item kind"),
|
||||
},
|
||||
|
||||
_ => cx.span_bug(span, "`#[derive(Clone)]` on trait item or impl item"),
|
||||
@ -81,80 +75,80 @@ pub fn expand_deriving_clone(
|
||||
path: path_std!(clone::Clone),
|
||||
additional_bounds: bounds,
|
||||
generics: Bounds::empty(),
|
||||
is_unsafe: false,
|
||||
supports_unions: true,
|
||||
methods: vec![MethodDef {
|
||||
name: sym::clone,
|
||||
generics: Bounds::empty(),
|
||||
explicit_self: borrowed_explicit_self(),
|
||||
args: Vec::new(),
|
||||
explicit_self: true,
|
||||
nonself_args: Vec::new(),
|
||||
ret_ty: Self_,
|
||||
attributes: attrs,
|
||||
is_unsafe: false,
|
||||
unify_fieldless_variants: false,
|
||||
combine_substructure: substructure,
|
||||
}],
|
||||
associated_types: Vec::new(),
|
||||
};
|
||||
|
||||
trait_def.expand_ext(cx, mitem, item, push, is_shallow)
|
||||
trait_def.expand_ext(cx, mitem, item, push, is_simple)
|
||||
}
|
||||
|
||||
fn cs_clone_shallow(
|
||||
fn cs_clone_simple(
|
||||
name: &str,
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
trait_span: Span,
|
||||
substr: &Substructure<'_>,
|
||||
is_union: bool,
|
||||
) -> P<Expr> {
|
||||
fn assert_ty_bounds(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
stmts: &mut Vec<ast::Stmt>,
|
||||
ty: P<ast::Ty>,
|
||||
span: Span,
|
||||
helper_name: &str,
|
||||
) {
|
||||
// Generate statement `let _: helper_name<ty>;`,
|
||||
// set the expn ID so we can use the unstable struct.
|
||||
let span = cx.with_def_site_ctxt(span);
|
||||
let assert_path = cx.path_all(
|
||||
span,
|
||||
true,
|
||||
cx.std_path(&[sym::clone, Symbol::intern(helper_name)]),
|
||||
vec![GenericArg::Type(ty)],
|
||||
);
|
||||
stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path)));
|
||||
}
|
||||
fn process_variant(cx: &mut ExtCtxt<'_>, stmts: &mut Vec<ast::Stmt>, variant: &VariantData) {
|
||||
for field in variant.fields() {
|
||||
// let _: AssertParamIsClone<FieldTy>;
|
||||
assert_ty_bounds(cx, stmts, field.ty.clone(), field.span, "AssertParamIsClone");
|
||||
}
|
||||
}
|
||||
|
||||
) -> BlockOrExpr {
|
||||
let mut stmts = Vec::new();
|
||||
let mut seen_type_names = FxHashSet::default();
|
||||
let mut process_variant = |variant: &VariantData| {
|
||||
for field in variant.fields() {
|
||||
// This basic redundancy checking only prevents duplication of
|
||||
// assertions like `AssertParamIsClone<Foo>` where the type is a
|
||||
// simple name. That's enough to get a lot of cases, though.
|
||||
if let Some(name) = field.ty.kind.is_simple_path() && !seen_type_names.insert(name) {
|
||||
// Already produced an assertion for this type.
|
||||
} else {
|
||||
// let _: AssertParamIsClone<FieldTy>;
|
||||
super::assert_ty_bounds(
|
||||
cx,
|
||||
&mut stmts,
|
||||
field.ty.clone(),
|
||||
field.span,
|
||||
&[sym::clone, sym::AssertParamIsClone],
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if is_union {
|
||||
// Just a single assertion for unions, that the union impls `Copy`.
|
||||
// let _: AssertParamIsCopy<Self>;
|
||||
let self_ty = cx.ty_path(cx.path_ident(trait_span, Ident::with_dummy_span(kw::SelfUpper)));
|
||||
assert_ty_bounds(cx, &mut stmts, self_ty, trait_span, "AssertParamIsCopy");
|
||||
super::assert_ty_bounds(
|
||||
cx,
|
||||
&mut stmts,
|
||||
self_ty,
|
||||
trait_span,
|
||||
&[sym::clone, sym::AssertParamIsCopy],
|
||||
);
|
||||
} else {
|
||||
match *substr.fields {
|
||||
StaticStruct(vdata, ..) => {
|
||||
process_variant(cx, &mut stmts, vdata);
|
||||
process_variant(vdata);
|
||||
}
|
||||
StaticEnum(enum_def, ..) => {
|
||||
for variant in &enum_def.variants {
|
||||
process_variant(cx, &mut stmts, &variant.data);
|
||||
process_variant(&variant.data);
|
||||
}
|
||||
}
|
||||
_ => cx.span_bug(
|
||||
trait_span,
|
||||
&format!("unexpected substructure in shallow `derive({})`", name),
|
||||
&format!("unexpected substructure in simple `derive({})`", name),
|
||||
),
|
||||
}
|
||||
}
|
||||
stmts.push(cx.stmt_expr(cx.expr_deref(trait_span, cx.expr_self(trait_span))));
|
||||
cx.expr_block(cx.block(trait_span, stmts))
|
||||
BlockOrExpr::new_mixed(stmts, Some(cx.expr_deref(trait_span, cx.expr_self(trait_span))))
|
||||
}
|
||||
|
||||
fn cs_clone(
|
||||
@ -162,12 +156,12 @@ fn cs_clone(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
trait_span: Span,
|
||||
substr: &Substructure<'_>,
|
||||
) -> P<Expr> {
|
||||
) -> BlockOrExpr {
|
||||
let ctor_path;
|
||||
let all_fields;
|
||||
let fn_path = cx.std_path(&[sym::clone, sym::Clone, sym::clone]);
|
||||
let subcall = |cx: &mut ExtCtxt<'_>, field: &FieldInfo<'_>| {
|
||||
let args = vec![cx.expr_addr_of(field.span, field.self_.clone())];
|
||||
let subcall = |cx: &mut ExtCtxt<'_>, field: &FieldInfo| {
|
||||
let args = vec![field.self_expr.clone()];
|
||||
cx.expr_call_global(field.span, fn_path.clone(), args)
|
||||
};
|
||||
|
||||
@ -183,15 +177,13 @@ fn cs_clone(
|
||||
all_fields = af;
|
||||
vdata = &variant.data;
|
||||
}
|
||||
EnumNonMatchingCollapsed(..) => {
|
||||
cx.span_bug(trait_span, &format!("non-matching enum variants in `derive({})`", name,))
|
||||
}
|
||||
EnumTag(..) => cx.span_bug(trait_span, &format!("enum tags in `derive({})`", name,)),
|
||||
StaticEnum(..) | StaticStruct(..) => {
|
||||
cx.span_bug(trait_span, &format!("associated function in `derive({})`", name))
|
||||
}
|
||||
}
|
||||
|
||||
match *vdata {
|
||||
let expr = match *vdata {
|
||||
VariantData::Struct(..) => {
|
||||
let fields = all_fields
|
||||
.iter()
|
||||
@ -215,5 +207,6 @@ fn cs_clone(
|
||||
cx.expr_call(trait_span, path, subcalls)
|
||||
}
|
||||
VariantData::Unit(..) => cx.expr_path(ctor_path),
|
||||
}
|
||||
};
|
||||
BlockOrExpr::new_expr(expr)
|
||||
}
|
||||
|
||||
@ -2,10 +2,10 @@ use crate::deriving::generic::ty::*;
|
||||
use crate::deriving::generic::*;
|
||||
use crate::deriving::path_std;
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::{self as ast, Expr, GenericArg, MetaItem};
|
||||
use rustc_ast::{self as ast, MetaItem};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_expand::base::{Annotatable, ExtCtxt};
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_span::symbol::{sym, Ident};
|
||||
use rustc_span::Span;
|
||||
|
||||
pub fn expand_deriving_eq(
|
||||
@ -27,16 +27,14 @@ pub fn expand_deriving_eq(
|
||||
path: path_std!(cmp::Eq),
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
is_unsafe: false,
|
||||
supports_unions: true,
|
||||
methods: vec![MethodDef {
|
||||
name: sym::assert_receiver_is_total_eq,
|
||||
generics: Bounds::empty(),
|
||||
explicit_self: borrowed_explicit_self(),
|
||||
args: vec![],
|
||||
ret_ty: nil_ty(),
|
||||
explicit_self: true,
|
||||
nonself_args: vec![],
|
||||
ret_ty: Unit,
|
||||
attributes: attrs,
|
||||
is_unsafe: false,
|
||||
unify_fieldless_variants: true,
|
||||
combine_substructure: combine_substructure(Box::new(|a, b, c| {
|
||||
cs_total_eq_assert(a, b, c)
|
||||
@ -54,47 +52,39 @@ fn cs_total_eq_assert(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
trait_span: Span,
|
||||
substr: &Substructure<'_>,
|
||||
) -> P<Expr> {
|
||||
fn assert_ty_bounds(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
stmts: &mut Vec<ast::Stmt>,
|
||||
ty: P<ast::Ty>,
|
||||
span: Span,
|
||||
helper_name: &str,
|
||||
) {
|
||||
// Generate statement `let _: helper_name<ty>;`,
|
||||
// set the expn ID so we can use the unstable struct.
|
||||
let span = cx.with_def_site_ctxt(span);
|
||||
let assert_path = cx.path_all(
|
||||
span,
|
||||
true,
|
||||
cx.std_path(&[sym::cmp, Symbol::intern(helper_name)]),
|
||||
vec![GenericArg::Type(ty)],
|
||||
);
|
||||
stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path)));
|
||||
}
|
||||
fn process_variant(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
stmts: &mut Vec<ast::Stmt>,
|
||||
variant: &ast::VariantData,
|
||||
) {
|
||||
for field in variant.fields() {
|
||||
// let _: AssertParamIsEq<FieldTy>;
|
||||
assert_ty_bounds(cx, stmts, field.ty.clone(), field.span, "AssertParamIsEq");
|
||||
}
|
||||
}
|
||||
|
||||
) -> BlockOrExpr {
|
||||
let mut stmts = Vec::new();
|
||||
let mut seen_type_names = FxHashSet::default();
|
||||
let mut process_variant = |variant: &ast::VariantData| {
|
||||
for field in variant.fields() {
|
||||
// This basic redundancy checking only prevents duplication of
|
||||
// assertions like `AssertParamIsEq<Foo>` where the type is a
|
||||
// simple name. That's enough to get a lot of cases, though.
|
||||
if let Some(name) = field.ty.kind.is_simple_path() && !seen_type_names.insert(name) {
|
||||
// Already produced an assertion for this type.
|
||||
} else {
|
||||
// let _: AssertParamIsEq<FieldTy>;
|
||||
super::assert_ty_bounds(
|
||||
cx,
|
||||
&mut stmts,
|
||||
field.ty.clone(),
|
||||
field.span,
|
||||
&[sym::cmp, sym::AssertParamIsEq],
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match *substr.fields {
|
||||
StaticStruct(vdata, ..) => {
|
||||
process_variant(cx, &mut stmts, vdata);
|
||||
process_variant(vdata);
|
||||
}
|
||||
StaticEnum(enum_def, ..) => {
|
||||
for variant in &enum_def.variants {
|
||||
process_variant(cx, &mut stmts, &variant.data);
|
||||
process_variant(&variant.data);
|
||||
}
|
||||
}
|
||||
_ => cx.span_bug(trait_span, "unexpected substructure in `derive(Eq)`"),
|
||||
}
|
||||
cx.expr_block(cx.block(trait_span, stmts))
|
||||
BlockOrExpr::new_stmts(stmts)
|
||||
}
|
||||
|
||||
@ -2,8 +2,7 @@ use crate::deriving::generic::ty::*;
|
||||
use crate::deriving::generic::*;
|
||||
use crate::deriving::path_std;
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::{self as ast, Expr, MetaItem};
|
||||
use rustc_ast::MetaItem;
|
||||
use rustc_expand::base::{Annotatable, ExtCtxt};
|
||||
use rustc_span::symbol::{sym, Ident};
|
||||
use rustc_span::Span;
|
||||
@ -23,16 +22,14 @@ pub fn expand_deriving_ord(
|
||||
path: path_std!(cmp::Ord),
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
is_unsafe: false,
|
||||
supports_unions: false,
|
||||
methods: vec![MethodDef {
|
||||
name: sym::cmp,
|
||||
generics: Bounds::empty(),
|
||||
explicit_self: borrowed_explicit_self(),
|
||||
args: vec![(borrowed_self(), sym::other)],
|
||||
ret_ty: Literal(path_std!(cmp::Ordering)),
|
||||
explicit_self: true,
|
||||
nonself_args: vec![(self_ref(), sym::other)],
|
||||
ret_ty: Path(path_std!(cmp::Ordering)),
|
||||
attributes: attrs,
|
||||
is_unsafe: false,
|
||||
unify_fieldless_variants: true,
|
||||
combine_substructure: combine_substructure(Box::new(|a, b, c| cs_cmp(a, b, c))),
|
||||
}],
|
||||
@ -42,72 +39,41 @@ pub fn expand_deriving_ord(
|
||||
trait_def.expand(cx, mitem, item, push)
|
||||
}
|
||||
|
||||
pub fn ordering_collapsed(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
self_arg_tags: &[Ident],
|
||||
) -> P<ast::Expr> {
|
||||
let lft = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[0]));
|
||||
let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1]));
|
||||
let fn_cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]);
|
||||
cx.expr_call_global(span, fn_cmp_path, vec![lft, rgt])
|
||||
}
|
||||
|
||||
pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
|
||||
pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
|
||||
let test_id = Ident::new(sym::cmp, span);
|
||||
let equals_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal]));
|
||||
|
||||
let equal_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal]));
|
||||
let cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]);
|
||||
|
||||
// Builds:
|
||||
//
|
||||
// match ::std::cmp::Ord::cmp(&self_field1, &other_field1) {
|
||||
// ::std::cmp::Ordering::Equal =>
|
||||
// match ::std::cmp::Ord::cmp(&self_field2, &other_field2) {
|
||||
// ::std::cmp::Ordering::Equal => {
|
||||
// ...
|
||||
// match ::core::cmp::Ord::cmp(&self.x, &other.x) {
|
||||
// ::std::cmp::Ordering::Equal =>
|
||||
// ::core::cmp::Ord::cmp(&self.y, &other.y),
|
||||
// cmp => cmp,
|
||||
// }
|
||||
// cmp => cmp
|
||||
// },
|
||||
// cmp => cmp
|
||||
// }
|
||||
//
|
||||
cs_fold(
|
||||
let expr = cs_fold(
|
||||
// foldr nests the if-elses correctly, leaving the first field
|
||||
// as the outermost one, and the last as the innermost.
|
||||
false,
|
||||
|cx, span, old, self_f, other_fs| {
|
||||
// match new {
|
||||
// ::std::cmp::Ordering::Equal => old,
|
||||
// cmp => cmp
|
||||
// }
|
||||
|
||||
let new = {
|
||||
let [other_f] = other_fs else {
|
||||
cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`");
|
||||
};
|
||||
|
||||
let args =
|
||||
vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())];
|
||||
|
||||
cx.expr_call_global(span, cmp_path.clone(), args)
|
||||
};
|
||||
|
||||
let eq_arm = cx.arm(span, cx.pat_path(span, equals_path.clone()), old);
|
||||
let neq_arm = cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id));
|
||||
|
||||
cx.expr_match(span, new, vec![eq_arm, neq_arm])
|
||||
},
|
||||
cx.expr_path(equals_path.clone()),
|
||||
Box::new(|cx, span, (self_args, tag_tuple), _non_self_args| {
|
||||
if self_args.len() != 2 {
|
||||
cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`")
|
||||
} else {
|
||||
ordering_collapsed(cx, span, tag_tuple)
|
||||
}
|
||||
}),
|
||||
cx,
|
||||
span,
|
||||
substr,
|
||||
)
|
||||
|cx, fold| match fold {
|
||||
CsFold::Single(field) => {
|
||||
let [other_expr] = &field.other_selflike_exprs[..] else {
|
||||
cx.span_bug(field.span, "not exactly 2 arguments in `derive(Ord)`");
|
||||
};
|
||||
let args = vec![field.self_expr.clone(), other_expr.clone()];
|
||||
cx.expr_call_global(field.span, cmp_path.clone(), args)
|
||||
}
|
||||
CsFold::Combine(span, expr1, expr2) => {
|
||||
let eq_arm = cx.arm(span, cx.pat_path(span, equal_path.clone()), expr1);
|
||||
let neq_arm =
|
||||
cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id));
|
||||
cx.expr_match(span, expr2, vec![eq_arm, neq_arm])
|
||||
}
|
||||
CsFold::Fieldless => cx.expr_path(equal_path.clone()),
|
||||
},
|
||||
);
|
||||
BlockOrExpr::new_expr(expr)
|
||||
}
|
||||
|
||||
@ -3,7 +3,7 @@ use crate::deriving::generic::*;
|
||||
use crate::deriving::{path_local, path_std};
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::{BinOpKind, Expr, MetaItem};
|
||||
use rustc_ast::{BinOpKind, BorrowKind, Expr, ExprKind, MetaItem, Mutability};
|
||||
use rustc_expand::base::{Annotatable, ExtCtxt};
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::Span;
|
||||
@ -15,8 +15,6 @@ pub fn expand_deriving_partial_eq(
|
||||
item: &Annotatable,
|
||||
push: &mut dyn FnMut(Annotatable),
|
||||
) {
|
||||
// structures are equal if all fields are equal, and non equal, if
|
||||
// any fields are not equal or if the enum variants are different
|
||||
fn cs_op(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
@ -24,41 +22,44 @@ pub fn expand_deriving_partial_eq(
|
||||
op: BinOpKind,
|
||||
combiner: BinOpKind,
|
||||
base: bool,
|
||||
) -> P<Expr> {
|
||||
let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P<Expr>, other_fs: &[P<Expr>]| {
|
||||
let [other_f] = other_fs else {
|
||||
cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`");
|
||||
};
|
||||
|
||||
cx.expr_binary(span, op, self_f, other_f.clone())
|
||||
};
|
||||
|
||||
cs_fold1(
|
||||
) -> BlockOrExpr {
|
||||
let expr = cs_fold(
|
||||
true, // use foldl
|
||||
|cx, span, subexpr, self_f, other_fs| {
|
||||
let eq = op(cx, span, self_f, other_fs);
|
||||
cx.expr_binary(span, combiner, subexpr, eq)
|
||||
},
|
||||
|cx, args| {
|
||||
match args {
|
||||
Some((span, self_f, other_fs)) => {
|
||||
// Special-case the base case to generate cleaner code.
|
||||
op(cx, span, self_f, other_fs)
|
||||
}
|
||||
None => cx.expr_bool(span, base),
|
||||
}
|
||||
},
|
||||
Box::new(|cx, span, _, _| cx.expr_bool(span, !base)),
|
||||
cx,
|
||||
span,
|
||||
substr,
|
||||
)
|
||||
|cx, fold| match fold {
|
||||
CsFold::Single(field) => {
|
||||
let [other_expr] = &field.other_selflike_exprs[..] else {
|
||||
cx.span_bug(field.span, "not exactly 2 arguments in `derive(PartialEq)`");
|
||||
};
|
||||
|
||||
// We received `&T` arguments. Convert them to `T` by
|
||||
// stripping `&` or adding `*`. This isn't necessary for
|
||||
// type checking, but it results in much better error
|
||||
// messages if something goes wrong.
|
||||
let convert = |expr: &P<Expr>| {
|
||||
if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, inner) =
|
||||
&expr.kind
|
||||
{
|
||||
inner.clone()
|
||||
} else {
|
||||
cx.expr_deref(field.span, expr.clone())
|
||||
}
|
||||
};
|
||||
cx.expr_binary(field.span, op, convert(&field.self_expr), convert(other_expr))
|
||||
}
|
||||
CsFold::Combine(span, expr1, expr2) => cx.expr_binary(span, combiner, expr1, expr2),
|
||||
CsFold::Fieldless => cx.expr_bool(span, base),
|
||||
},
|
||||
);
|
||||
BlockOrExpr::new_expr(expr)
|
||||
}
|
||||
|
||||
fn cs_eq(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
|
||||
fn cs_eq(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
|
||||
cs_op(cx, span, substr, BinOpKind::Eq, BinOpKind::And, true)
|
||||
}
|
||||
fn cs_ne(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
|
||||
fn cs_ne(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
|
||||
cs_op(cx, span, substr, BinOpKind::Ne, BinOpKind::Or, false)
|
||||
}
|
||||
|
||||
@ -69,11 +70,10 @@ pub fn expand_deriving_partial_eq(
|
||||
MethodDef {
|
||||
name: $name,
|
||||
generics: Bounds::empty(),
|
||||
explicit_self: borrowed_explicit_self(),
|
||||
args: vec![(borrowed_self(), sym::other)],
|
||||
ret_ty: Literal(path_local!(bool)),
|
||||
explicit_self: true,
|
||||
nonself_args: vec![(self_ref(), sym::other)],
|
||||
ret_ty: Path(path_local!(bool)),
|
||||
attributes: attrs,
|
||||
is_unsafe: false,
|
||||
unify_fieldless_variants: true,
|
||||
combine_substructure: combine_substructure(Box::new(|a, b, c| $f(a, b, c))),
|
||||
}
|
||||
@ -102,7 +102,6 @@ pub fn expand_deriving_partial_eq(
|
||||
path: path_std!(cmp::PartialEq),
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
is_unsafe: false,
|
||||
supports_unions: false,
|
||||
methods,
|
||||
associated_types: Vec::new(),
|
||||
|
||||
@ -2,8 +2,7 @@ use crate::deriving::generic::ty::*;
|
||||
use crate::deriving::generic::*;
|
||||
use crate::deriving::{path_std, pathvec_std};
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::{Expr, MetaItem};
|
||||
use rustc_ast::MetaItem;
|
||||
use rustc_expand::base::{Annotatable, ExtCtxt};
|
||||
use rustc_span::symbol::{sym, Ident};
|
||||
use rustc_span::Span;
|
||||
@ -15,13 +14,9 @@ pub fn expand_deriving_partial_ord(
|
||||
item: &Annotatable,
|
||||
push: &mut dyn FnMut(Annotatable),
|
||||
) {
|
||||
let ordering_ty = Literal(path_std!(cmp::Ordering));
|
||||
let ret_ty = Literal(Path::new_(
|
||||
pathvec_std!(option::Option),
|
||||
None,
|
||||
vec![Box::new(ordering_ty)],
|
||||
PathKind::Std,
|
||||
));
|
||||
let ordering_ty = Path(path_std!(cmp::Ordering));
|
||||
let ret_ty =
|
||||
Path(Path::new_(pathvec_std!(option::Option), vec![Box::new(ordering_ty)], PathKind::Std));
|
||||
|
||||
let inline = cx.meta_word(span, sym::inline);
|
||||
let attrs = vec![cx.attribute(inline)];
|
||||
@ -29,11 +24,10 @@ pub fn expand_deriving_partial_ord(
|
||||
let partial_cmp_def = MethodDef {
|
||||
name: sym::partial_cmp,
|
||||
generics: Bounds::empty(),
|
||||
explicit_self: borrowed_explicit_self(),
|
||||
args: vec![(borrowed_self(), sym::other)],
|
||||
explicit_self: true,
|
||||
nonself_args: vec![(self_ref(), sym::other)],
|
||||
ret_ty,
|
||||
attributes: attrs,
|
||||
is_unsafe: false,
|
||||
unify_fieldless_variants: true,
|
||||
combine_substructure: combine_substructure(Box::new(|cx, span, substr| {
|
||||
cs_partial_cmp(cx, span, substr)
|
||||
@ -46,7 +40,6 @@ pub fn expand_deriving_partial_ord(
|
||||
path: path_std!(cmp::PartialOrd),
|
||||
additional_bounds: vec![],
|
||||
generics: Bounds::empty(),
|
||||
is_unsafe: false,
|
||||
supports_unions: false,
|
||||
methods: vec![partial_cmp_def],
|
||||
associated_types: Vec::new(),
|
||||
@ -54,67 +47,42 @@ pub fn expand_deriving_partial_ord(
|
||||
trait_def.expand(cx, mitem, item, push)
|
||||
}
|
||||
|
||||
pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
|
||||
pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
|
||||
let test_id = Ident::new(sym::cmp, span);
|
||||
let ordering = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal]));
|
||||
let ordering_expr = cx.expr_path(ordering.clone());
|
||||
let equals_expr = cx.expr_some(span, ordering_expr);
|
||||
|
||||
let equal_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal]));
|
||||
let partial_cmp_path = cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp]);
|
||||
|
||||
// Builds:
|
||||
//
|
||||
// match ::std::cmp::PartialOrd::partial_cmp(&self_field1, &other_field1) {
|
||||
// ::std::option::Option::Some(::std::cmp::Ordering::Equal) =>
|
||||
// match ::std::cmp::PartialOrd::partial_cmp(&self_field2, &other_field2) {
|
||||
// ::std::option::Option::Some(::std::cmp::Ordering::Equal) => {
|
||||
// ...
|
||||
// match ::core::cmp::PartialOrd::partial_cmp(&self.x, &other.x) {
|
||||
// ::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
|
||||
// ::core::cmp::PartialOrd::partial_cmp(&self.y, &other.y),
|
||||
// cmp => cmp,
|
||||
// }
|
||||
// cmp => cmp
|
||||
// },
|
||||
// cmp => cmp
|
||||
// }
|
||||
//
|
||||
cs_fold(
|
||||
let expr = cs_fold(
|
||||
// foldr nests the if-elses correctly, leaving the first field
|
||||
// as the outermost one, and the last as the innermost.
|
||||
false,
|
||||
|cx, span, old, self_f, other_fs| {
|
||||
// match new {
|
||||
// Some(::std::cmp::Ordering::Equal) => old,
|
||||
// cmp => cmp
|
||||
// }
|
||||
|
||||
let new = {
|
||||
let [other_f] = other_fs else {
|
||||
cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`");
|
||||
};
|
||||
|
||||
let args =
|
||||
vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())];
|
||||
|
||||
cx.expr_call_global(span, partial_cmp_path.clone(), args)
|
||||
};
|
||||
|
||||
let eq_arm = cx.arm(span, cx.pat_some(span, cx.pat_path(span, ordering.clone())), old);
|
||||
let neq_arm = cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id));
|
||||
|
||||
cx.expr_match(span, new, vec![eq_arm, neq_arm])
|
||||
},
|
||||
equals_expr,
|
||||
Box::new(|cx, span, (self_args, tag_tuple), _non_self_args| {
|
||||
if self_args.len() != 2 {
|
||||
cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`")
|
||||
} else {
|
||||
let lft = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[0]));
|
||||
let rgt = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[1]));
|
||||
let fn_partial_cmp_path =
|
||||
cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp]);
|
||||
cx.expr_call_global(span, fn_partial_cmp_path, vec![lft, rgt])
|
||||
}
|
||||
}),
|
||||
cx,
|
||||
span,
|
||||
substr,
|
||||
)
|
||||
|cx, fold| match fold {
|
||||
CsFold::Single(field) => {
|
||||
let [other_expr] = &field.other_selflike_exprs[..] else {
|
||||
cx.span_bug(field.span, "not exactly 2 arguments in `derive(Ord)`");
|
||||
};
|
||||
let args = vec![field.self_expr.clone(), other_expr.clone()];
|
||||
cx.expr_call_global(field.span, partial_cmp_path.clone(), args)
|
||||
}
|
||||
CsFold::Combine(span, expr1, expr2) => {
|
||||
let eq_arm =
|
||||
cx.arm(span, cx.pat_some(span, cx.pat_path(span, equal_path.clone())), expr1);
|
||||
let neq_arm =
|
||||
cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id));
|
||||
cx.expr_match(span, expr2, vec![eq_arm, neq_arm])
|
||||
}
|
||||
CsFold::Fieldless => cx.expr_some(span, cx.expr_path(equal_path.clone())),
|
||||
},
|
||||
);
|
||||
BlockOrExpr::new_expr(expr)
|
||||
}
|
||||
|
||||
@ -2,15 +2,10 @@ use crate::deriving::generic::ty::*;
|
||||
use crate::deriving::generic::*;
|
||||
use crate::deriving::path_std;
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::{self as ast, Expr, LocalKind, MetaItem};
|
||||
use rustc_ast::{self as ast, MetaItem};
|
||||
use rustc_expand::base::{Annotatable, ExtCtxt};
|
||||
use rustc_span::symbol::{sym, Ident};
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
|
||||
fn make_mut_borrow(cx: &mut ExtCtxt<'_>, sp: Span, expr: P<Expr>) -> P<Expr> {
|
||||
cx.expr(sp, ast::ExprKind::AddrOf(ast::BorrowKind::Ref, ast::Mutability::Mut, expr))
|
||||
}
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_span::Span;
|
||||
|
||||
pub fn expand_deriving_debug(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
@ -20,8 +15,7 @@ pub fn expand_deriving_debug(
|
||||
push: &mut dyn FnMut(Annotatable),
|
||||
) {
|
||||
// &mut ::std::fmt::Formatter
|
||||
let fmtr =
|
||||
Ptr(Box::new(Literal(path_std!(fmt::Formatter))), Borrowed(None, ast::Mutability::Mut));
|
||||
let fmtr = Ref(Box::new(Path(path_std!(fmt::Formatter))), ast::Mutability::Mut);
|
||||
|
||||
let trait_def = TraitDef {
|
||||
span,
|
||||
@ -29,16 +23,14 @@ pub fn expand_deriving_debug(
|
||||
path: path_std!(fmt::Debug),
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
is_unsafe: false,
|
||||
supports_unions: false,
|
||||
methods: vec![MethodDef {
|
||||
name: sym::fmt,
|
||||
generics: Bounds::empty(),
|
||||
explicit_self: borrowed_explicit_self(),
|
||||
args: vec![(fmtr, sym::f)],
|
||||
ret_ty: Literal(path_std!(fmt::Result)),
|
||||
explicit_self: true,
|
||||
nonself_args: vec![(fmtr, sym::f)],
|
||||
ret_ty: Path(path_std!(fmt::Result)),
|
||||
attributes: Vec::new(),
|
||||
is_unsafe: false,
|
||||
unify_fieldless_variants: false,
|
||||
combine_substructure: combine_substructure(Box::new(|a, b, c| {
|
||||
show_substructure(a, b, c)
|
||||
@ -49,15 +41,11 @@ pub fn expand_deriving_debug(
|
||||
trait_def.expand(cx, mitem, item, push)
|
||||
}
|
||||
|
||||
/// We use the debug builders to do the heavy lifting here
|
||||
fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
|
||||
// build fmt.debug_struct(<name>).field(<fieldname>, &<fieldval>)....build()
|
||||
// or fmt.debug_tuple(<name>).field(&<fieldval>)....build()
|
||||
// based on the "shape".
|
||||
fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
|
||||
let (ident, vdata, fields) = match substr.fields {
|
||||
Struct(vdata, fields) => (substr.type_ident, *vdata, fields),
|
||||
EnumMatching(_, _, v, fields) => (v.ident, &v.data, fields),
|
||||
EnumNonMatchingCollapsed(..) | StaticStruct(..) | StaticEnum(..) => {
|
||||
EnumTag(..) | StaticStruct(..) | StaticEnum(..) => {
|
||||
cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`")
|
||||
}
|
||||
};
|
||||
@ -65,95 +53,129 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
|
||||
// We want to make sure we have the ctxt set so that we can use unstable methods
|
||||
let span = cx.with_def_site_ctxt(span);
|
||||
let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked));
|
||||
let fmt = substr.nonself_args[0].clone();
|
||||
let fmt = substr.nonselflike_args[0].clone();
|
||||
|
||||
// Special fast path for unit variants. In the common case of an enum that is entirely unit
|
||||
// variants (i.e. a C-like enum), this fast path allows LLVM to eliminate the entire switch in
|
||||
// favor of a lookup table.
|
||||
if let ast::VariantData::Unit(..) = vdata {
|
||||
// Struct and tuples are similar enough that we use the same code for both,
|
||||
// with some extra pieces for structs due to the field names.
|
||||
let (is_struct, args_per_field) = match vdata {
|
||||
ast::VariantData::Unit(..) => {
|
||||
// Special fast path for unit variants.
|
||||
assert!(fields.is_empty());
|
||||
(false, 0)
|
||||
}
|
||||
ast::VariantData::Tuple(..) => (false, 1),
|
||||
ast::VariantData::Struct(..) => (true, 2),
|
||||
};
|
||||
|
||||
// The number of fields that can be handled without an array.
|
||||
const CUTOFF: usize = 5;
|
||||
|
||||
if fields.is_empty() {
|
||||
// Special case for no fields.
|
||||
let fn_path_write_str = cx.std_path(&[sym::fmt, sym::Formatter, sym::write_str]);
|
||||
let expr = cx.expr_call_global(span, fn_path_write_str, vec![fmt, name]);
|
||||
let stmts = vec![cx.stmt_expr(expr)];
|
||||
let block = cx.block(span, stmts);
|
||||
return cx.expr_block(block);
|
||||
}
|
||||
BlockOrExpr::new_expr(expr)
|
||||
} else if fields.len() <= CUTOFF {
|
||||
// Few enough fields that we can use a specific-length method.
|
||||
let debug = if is_struct {
|
||||
format!("debug_struct_field{}_finish", fields.len())
|
||||
} else {
|
||||
format!("debug_tuple_field{}_finish", fields.len())
|
||||
};
|
||||
let fn_path_debug = cx.std_path(&[sym::fmt, sym::Formatter, Symbol::intern(&debug)]);
|
||||
|
||||
let builder = Ident::new(sym::debug_trait_builder, span);
|
||||
let builder_expr = cx.expr_ident(span, builder);
|
||||
|
||||
let mut stmts = Vec::with_capacity(fields.len() + 2);
|
||||
let fn_path_finish;
|
||||
match vdata {
|
||||
ast::VariantData::Unit(..) => {
|
||||
cx.span_bug(span, "unit variants should have been handled above");
|
||||
}
|
||||
ast::VariantData::Tuple(..) => {
|
||||
// tuple struct/"normal" variant
|
||||
let fn_path_debug_tuple = cx.std_path(&[sym::fmt, sym::Formatter, sym::debug_tuple]);
|
||||
let expr = cx.expr_call_global(span, fn_path_debug_tuple, vec![fmt, name]);
|
||||
let expr = make_mut_borrow(cx, span, expr);
|
||||
stmts.push(cx.stmt_let(span, false, builder, expr));
|
||||
|
||||
for field in fields {
|
||||
// Use double indirection to make sure this works for unsized types
|
||||
let field = cx.expr_addr_of(field.span, field.self_.clone());
|
||||
let field = cx.expr_addr_of(field.span, field);
|
||||
|
||||
let fn_path_field = cx.std_path(&[sym::fmt, sym::DebugTuple, sym::field]);
|
||||
let expr =
|
||||
cx.expr_call_global(span, fn_path_field, vec![builder_expr.clone(), field]);
|
||||
|
||||
// Use `let _ = expr;` to avoid triggering the
|
||||
// unused_results lint.
|
||||
stmts.push(stmt_let_underscore(cx, span, expr));
|
||||
}
|
||||
|
||||
fn_path_finish = cx.std_path(&[sym::fmt, sym::DebugTuple, sym::finish]);
|
||||
}
|
||||
ast::VariantData::Struct(..) => {
|
||||
// normal struct/struct variant
|
||||
let fn_path_debug_struct = cx.std_path(&[sym::fmt, sym::Formatter, sym::debug_struct]);
|
||||
let expr = cx.expr_call_global(span, fn_path_debug_struct, vec![fmt, name]);
|
||||
let expr = make_mut_borrow(cx, span, expr);
|
||||
stmts.push(cx.stmt_let(DUMMY_SP, false, builder, expr));
|
||||
|
||||
for field in fields {
|
||||
let mut args = Vec::with_capacity(2 + fields.len() * args_per_field);
|
||||
args.extend([fmt, name]);
|
||||
for i in 0..fields.len() {
|
||||
let field = &fields[i];
|
||||
if is_struct {
|
||||
let name = cx.expr_lit(
|
||||
field.span,
|
||||
ast::LitKind::Str(field.name.unwrap().name, ast::StrStyle::Cooked),
|
||||
);
|
||||
|
||||
// Use double indirection to make sure this works for unsized types
|
||||
let fn_path_field = cx.std_path(&[sym::fmt, sym::DebugStruct, sym::field]);
|
||||
let field = cx.expr_addr_of(field.span, field.self_.clone());
|
||||
let field = cx.expr_addr_of(field.span, field);
|
||||
let expr = cx.expr_call_global(
|
||||
span,
|
||||
fn_path_field,
|
||||
vec![builder_expr.clone(), name, field],
|
||||
);
|
||||
stmts.push(stmt_let_underscore(cx, span, expr));
|
||||
args.push(name);
|
||||
}
|
||||
fn_path_finish = cx.std_path(&[sym::fmt, sym::DebugStruct, sym::finish]);
|
||||
// Use an extra indirection to make sure this works for unsized types.
|
||||
let field = cx.expr_addr_of(field.span, field.self_expr.clone());
|
||||
args.push(field);
|
||||
}
|
||||
let expr = cx.expr_call_global(span, fn_path_debug, args);
|
||||
BlockOrExpr::new_expr(expr)
|
||||
} else {
|
||||
// Enough fields that we must use the any-length method.
|
||||
let mut name_exprs = Vec::with_capacity(fields.len());
|
||||
let mut value_exprs = Vec::with_capacity(fields.len());
|
||||
|
||||
for field in fields {
|
||||
if is_struct {
|
||||
name_exprs.push(cx.expr_lit(
|
||||
field.span,
|
||||
ast::LitKind::Str(field.name.unwrap().name, ast::StrStyle::Cooked),
|
||||
));
|
||||
}
|
||||
|
||||
// Use an extra indirection to make sure this works for unsized types.
|
||||
let field = cx.expr_addr_of(field.span, field.self_expr.clone());
|
||||
value_exprs.push(field);
|
||||
}
|
||||
|
||||
// `let names: &'static _ = &["field1", "field2"];`
|
||||
let names_let = if is_struct {
|
||||
let lt_static = Some(cx.lifetime_static(span));
|
||||
let ty_static_ref =
|
||||
cx.ty_rptr(span, cx.ty_infer(span), lt_static, ast::Mutability::Not);
|
||||
Some(cx.stmt_let_ty(
|
||||
span,
|
||||
false,
|
||||
Ident::new(sym::names, span),
|
||||
Some(ty_static_ref),
|
||||
cx.expr_array_ref(span, name_exprs),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// `let values: &[&dyn Debug] = &[&&self.field1, &&self.field2];`
|
||||
let path_debug = cx.path_global(span, cx.std_path(&[sym::fmt, sym::Debug]));
|
||||
let ty_dyn_debug = cx.ty(
|
||||
span,
|
||||
ast::TyKind::TraitObject(vec![cx.trait_bound(path_debug)], ast::TraitObjectSyntax::Dyn),
|
||||
);
|
||||
let ty_slice = cx.ty(
|
||||
span,
|
||||
ast::TyKind::Slice(cx.ty_rptr(span, ty_dyn_debug, None, ast::Mutability::Not)),
|
||||
);
|
||||
let values_let = cx.stmt_let_ty(
|
||||
span,
|
||||
false,
|
||||
Ident::new(sym::values, span),
|
||||
Some(cx.ty_rptr(span, ty_slice, None, ast::Mutability::Not)),
|
||||
cx.expr_array_ref(span, value_exprs),
|
||||
);
|
||||
|
||||
// `fmt::Formatter::debug_struct_fields_finish(fmt, name, names, values)` or
|
||||
// `fmt::Formatter::debug_tuple_fields_finish(fmt, name, values)`
|
||||
let sym_debug = if is_struct {
|
||||
sym::debug_struct_fields_finish
|
||||
} else {
|
||||
sym::debug_tuple_fields_finish
|
||||
};
|
||||
let fn_path_debug_internal = cx.std_path(&[sym::fmt, sym::Formatter, sym_debug]);
|
||||
|
||||
let mut args = Vec::with_capacity(4);
|
||||
args.push(fmt);
|
||||
args.push(name);
|
||||
if is_struct {
|
||||
args.push(cx.expr_ident(span, Ident::new(sym::names, span)));
|
||||
}
|
||||
args.push(cx.expr_ident(span, Ident::new(sym::values, span)));
|
||||
let expr = cx.expr_call_global(span, fn_path_debug_internal, args);
|
||||
|
||||
let mut stmts = Vec::with_capacity(3);
|
||||
if is_struct {
|
||||
stmts.push(names_let.unwrap());
|
||||
}
|
||||
stmts.push(values_let);
|
||||
BlockOrExpr::new_mixed(stmts, Some(expr))
|
||||
}
|
||||
|
||||
let expr = cx.expr_call_global(span, fn_path_finish, vec![builder_expr]);
|
||||
|
||||
stmts.push(cx.stmt_expr(expr));
|
||||
let block = cx.block(span, stmts);
|
||||
cx.expr_block(block)
|
||||
}
|
||||
|
||||
fn stmt_let_underscore(cx: &mut ExtCtxt<'_>, sp: Span, expr: P<ast::Expr>) -> ast::Stmt {
|
||||
let local = P(ast::Local {
|
||||
pat: cx.pat_wild(sp),
|
||||
ty: None,
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
kind: LocalKind::Init(expr),
|
||||
span: sp,
|
||||
attrs: ast::AttrVec::new(),
|
||||
tokens: None,
|
||||
});
|
||||
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp }
|
||||
}
|
||||
|
||||
@ -23,40 +23,32 @@ pub fn expand_deriving_rustc_decodable(
|
||||
let trait_def = TraitDef {
|
||||
span,
|
||||
attributes: Vec::new(),
|
||||
path: Path::new_(vec![krate, sym::Decodable], None, vec![], PathKind::Global),
|
||||
path: Path::new_(vec![krate, sym::Decodable], vec![], PathKind::Global),
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
is_unsafe: false,
|
||||
supports_unions: false,
|
||||
methods: vec![MethodDef {
|
||||
name: sym::decode,
|
||||
generics: Bounds {
|
||||
bounds: vec![(
|
||||
typaram,
|
||||
vec![Path::new_(vec![krate, sym::Decoder], None, vec![], PathKind::Global)],
|
||||
vec![Path::new_(vec![krate, sym::Decoder], vec![], PathKind::Global)],
|
||||
)],
|
||||
},
|
||||
explicit_self: None,
|
||||
args: vec![(
|
||||
Ptr(Box::new(Literal(Path::new_local(typaram))), Borrowed(None, Mutability::Mut)),
|
||||
explicit_self: false,
|
||||
nonself_args: vec![(
|
||||
Ref(Box::new(Path(Path::new_local(typaram))), Mutability::Mut),
|
||||
sym::d,
|
||||
)],
|
||||
ret_ty: Literal(Path::new_(
|
||||
ret_ty: Path(Path::new_(
|
||||
pathvec_std!(result::Result),
|
||||
None,
|
||||
vec![
|
||||
Box::new(Self_),
|
||||
Box::new(Literal(Path::new_(
|
||||
vec![typaram, sym::Error],
|
||||
None,
|
||||
vec![],
|
||||
PathKind::Local,
|
||||
))),
|
||||
Box::new(Path(Path::new_(vec![typaram, sym::Error], vec![], PathKind::Local))),
|
||||
],
|
||||
PathKind::Std,
|
||||
)),
|
||||
attributes: Vec::new(),
|
||||
is_unsafe: false,
|
||||
unify_fieldless_variants: false,
|
||||
combine_substructure: combine_substructure(Box::new(|a, b, c| {
|
||||
decodable_substructure(a, b, c, krate)
|
||||
@ -73,8 +65,8 @@ fn decodable_substructure(
|
||||
trait_span: Span,
|
||||
substr: &Substructure<'_>,
|
||||
krate: Symbol,
|
||||
) -> P<Expr> {
|
||||
let decoder = substr.nonself_args[0].clone();
|
||||
) -> BlockOrExpr {
|
||||
let decoder = substr.nonselflike_args[0].clone();
|
||||
let recurse = vec![
|
||||
Ident::new(krate, trait_span),
|
||||
Ident::new(sym::Decodable, trait_span),
|
||||
@ -85,7 +77,7 @@ fn decodable_substructure(
|
||||
let blkarg = Ident::new(sym::_d, trait_span);
|
||||
let blkdecoder = cx.expr_ident(trait_span, blkarg);
|
||||
|
||||
match *substr.fields {
|
||||
let expr = match *substr.fields {
|
||||
StaticStruct(_, ref summary) => {
|
||||
let nfields = match *summary {
|
||||
Unnamed(ref fields, _) => fields.len(),
|
||||
@ -162,14 +154,13 @@ fn decodable_substructure(
|
||||
cx.expr_match(trait_span, cx.expr_ident(trait_span, variant), arms),
|
||||
);
|
||||
let lambda = cx.lambda(trait_span, vec![blkarg, variant], result);
|
||||
let variant_vec = cx.expr_vec(trait_span, variants);
|
||||
let variant_vec = cx.expr_addr_of(trait_span, variant_vec);
|
||||
let variant_array_ref = cx.expr_array_ref(trait_span, variants);
|
||||
let fn_read_enum_variant_path: Vec<_> =
|
||||
cx.def_site_path(&[sym::rustc_serialize, sym::Decoder, sym::read_enum_variant]);
|
||||
let result = cx.expr_call_global(
|
||||
trait_span,
|
||||
fn_read_enum_variant_path,
|
||||
vec![blkdecoder, variant_vec, lambda],
|
||||
vec![blkdecoder, variant_array_ref, lambda],
|
||||
);
|
||||
let fn_read_enum_path: Vec<_> =
|
||||
cx.def_site_path(&[sym::rustc_serialize, sym::Decoder, sym::read_enum]);
|
||||
@ -185,7 +176,8 @@ fn decodable_substructure(
|
||||
)
|
||||
}
|
||||
_ => cx.bug("expected StaticEnum or StaticStruct in derive(Decodable)"),
|
||||
}
|
||||
};
|
||||
BlockOrExpr::new_expr(expr)
|
||||
}
|
||||
|
||||
/// Creates a decoder for a single enum variant/struct:
|
||||
|
||||
@ -1,11 +1,10 @@
|
||||
use crate::deriving::generic::ty::*;
|
||||
use crate::deriving::generic::*;
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::walk_list;
|
||||
use rustc_ast::EnumDef;
|
||||
use rustc_ast::VariantData;
|
||||
use rustc_ast::{Expr, MetaItem};
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_expand::base::{Annotatable, DummyResult, ExtCtxt};
|
||||
use rustc_span::symbol::Ident;
|
||||
@ -16,7 +15,7 @@ use smallvec::SmallVec;
|
||||
pub fn expand_deriving_default(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
mitem: &MetaItem,
|
||||
mitem: &ast::MetaItem,
|
||||
item: &Annotatable,
|
||||
push: &mut dyn FnMut(Annotatable),
|
||||
) {
|
||||
@ -30,16 +29,14 @@ pub fn expand_deriving_default(
|
||||
path: Path::new(vec![kw::Default, sym::Default]),
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
is_unsafe: false,
|
||||
supports_unions: false,
|
||||
methods: vec![MethodDef {
|
||||
name: kw::Default,
|
||||
generics: Bounds::empty(),
|
||||
explicit_self: None,
|
||||
args: Vec::new(),
|
||||
explicit_self: false,
|
||||
nonself_args: Vec::new(),
|
||||
ret_ty: Self_,
|
||||
attributes: attrs,
|
||||
is_unsafe: false,
|
||||
unify_fieldless_variants: false,
|
||||
combine_substructure: combine_substructure(Box::new(|cx, trait_span, substr| {
|
||||
match substr.fields {
|
||||
@ -61,12 +58,12 @@ fn default_struct_substructure(
|
||||
trait_span: Span,
|
||||
substr: &Substructure<'_>,
|
||||
summary: &StaticFields,
|
||||
) -> P<Expr> {
|
||||
) -> BlockOrExpr {
|
||||
// Note that `kw::Default` is "default" and `sym::Default` is "Default"!
|
||||
let default_ident = cx.std_path(&[kw::Default, sym::Default, kw::Default]);
|
||||
let default_call = |span| cx.expr_call_global(span, default_ident.clone(), Vec::new());
|
||||
|
||||
match summary {
|
||||
let expr = match summary {
|
||||
Unnamed(ref fields, is_tuple) => {
|
||||
if !is_tuple {
|
||||
cx.expr_ident(trait_span, substr.type_ident)
|
||||
@ -82,31 +79,27 @@ fn default_struct_substructure(
|
||||
.collect();
|
||||
cx.expr_struct_ident(trait_span, substr.type_ident, default_fields)
|
||||
}
|
||||
}
|
||||
};
|
||||
BlockOrExpr::new_expr(expr)
|
||||
}
|
||||
|
||||
fn default_enum_substructure(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
trait_span: Span,
|
||||
enum_def: &EnumDef,
|
||||
) -> P<Expr> {
|
||||
let Ok(default_variant) = extract_default_variant(cx, enum_def, trait_span) else {
|
||||
return DummyResult::raw_expr(trait_span, true);
|
||||
) -> BlockOrExpr {
|
||||
let expr = if let Ok(default_variant) = extract_default_variant(cx, enum_def, trait_span)
|
||||
&& let Ok(_) = validate_default_attribute(cx, default_variant)
|
||||
{
|
||||
// We now know there is exactly one unit variant with exactly one `#[default]` attribute.
|
||||
cx.expr_path(cx.path(
|
||||
default_variant.span,
|
||||
vec![Ident::new(kw::SelfUpper, default_variant.span), default_variant.ident],
|
||||
))
|
||||
} else {
|
||||
DummyResult::raw_expr(trait_span, true)
|
||||
};
|
||||
|
||||
// At this point, we know that there is exactly one variant with a `#[default]` attribute. The
|
||||
// attribute hasn't yet been validated.
|
||||
|
||||
if let Err(()) = validate_default_attribute(cx, default_variant) {
|
||||
return DummyResult::raw_expr(trait_span, true);
|
||||
}
|
||||
|
||||
// We now know there is exactly one unit variant with exactly one `#[default]` attribute.
|
||||
|
||||
cx.expr_path(cx.path(
|
||||
default_variant.span,
|
||||
vec![Ident::new(kw::SelfUpper, default_variant.span), default_variant.ident],
|
||||
))
|
||||
BlockOrExpr::new_expr(expr)
|
||||
}
|
||||
|
||||
fn extract_default_variant<'a>(
|
||||
|
||||
@ -89,8 +89,7 @@ use crate::deriving::generic::ty::*;
|
||||
use crate::deriving::generic::*;
|
||||
use crate::deriving::pathvec_std;
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::{Expr, ExprKind, MetaItem, Mutability};
|
||||
use rustc_ast::{ExprKind, MetaItem, Mutability};
|
||||
use rustc_expand::base::{Annotatable, ExtCtxt};
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_span::Span;
|
||||
@ -108,40 +107,32 @@ pub fn expand_deriving_rustc_encodable(
|
||||
let trait_def = TraitDef {
|
||||
span,
|
||||
attributes: Vec::new(),
|
||||
path: Path::new_(vec![krate, sym::Encodable], None, vec![], PathKind::Global),
|
||||
path: Path::new_(vec![krate, sym::Encodable], vec![], PathKind::Global),
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
is_unsafe: false,
|
||||
supports_unions: false,
|
||||
methods: vec![MethodDef {
|
||||
name: sym::encode,
|
||||
generics: Bounds {
|
||||
bounds: vec![(
|
||||
typaram,
|
||||
vec![Path::new_(vec![krate, sym::Encoder], None, vec![], PathKind::Global)],
|
||||
vec![Path::new_(vec![krate, sym::Encoder], vec![], PathKind::Global)],
|
||||
)],
|
||||
},
|
||||
explicit_self: borrowed_explicit_self(),
|
||||
args: vec![(
|
||||
Ptr(Box::new(Literal(Path::new_local(typaram))), Borrowed(None, Mutability::Mut)),
|
||||
explicit_self: true,
|
||||
nonself_args: vec![(
|
||||
Ref(Box::new(Path(Path::new_local(typaram))), Mutability::Mut),
|
||||
sym::s,
|
||||
)],
|
||||
ret_ty: Literal(Path::new_(
|
||||
ret_ty: Path(Path::new_(
|
||||
pathvec_std!(result::Result),
|
||||
None,
|
||||
vec![
|
||||
Box::new(Tuple(Vec::new())),
|
||||
Box::new(Literal(Path::new_(
|
||||
vec![typaram, sym::Error],
|
||||
None,
|
||||
vec![],
|
||||
PathKind::Local,
|
||||
))),
|
||||
Box::new(Unit),
|
||||
Box::new(Path(Path::new_(vec![typaram, sym::Error], vec![], PathKind::Local))),
|
||||
],
|
||||
PathKind::Std,
|
||||
)),
|
||||
attributes: Vec::new(),
|
||||
is_unsafe: false,
|
||||
unify_fieldless_variants: false,
|
||||
combine_substructure: combine_substructure(Box::new(|a, b, c| {
|
||||
encodable_substructure(a, b, c, krate)
|
||||
@ -158,8 +149,8 @@ fn encodable_substructure(
|
||||
trait_span: Span,
|
||||
substr: &Substructure<'_>,
|
||||
krate: Symbol,
|
||||
) -> P<Expr> {
|
||||
let encoder = substr.nonself_args[0].clone();
|
||||
) -> BlockOrExpr {
|
||||
let encoder = substr.nonselflike_args[0].clone();
|
||||
// throw an underscore in front to suppress unused variable warnings
|
||||
let blkarg = Ident::new(sym::_e, trait_span);
|
||||
let blkencoder = cx.expr_ident(trait_span, blkarg);
|
||||
@ -177,12 +168,12 @@ fn encodable_substructure(
|
||||
let fn_emit_struct_field_path =
|
||||
cx.def_site_path(&[sym::rustc_serialize, sym::Encoder, sym::emit_struct_field]);
|
||||
let mut stmts = Vec::new();
|
||||
for (i, &FieldInfo { name, ref self_, span, .. }) in fields.iter().enumerate() {
|
||||
for (i, &FieldInfo { name, ref self_expr, span, .. }) in fields.iter().enumerate() {
|
||||
let name = match name {
|
||||
Some(id) => id.name,
|
||||
None => Symbol::intern(&format!("_field{}", i)),
|
||||
};
|
||||
let self_ref = cx.expr_addr_of(span, self_.clone());
|
||||
let self_ref = cx.expr_addr_of(span, self_expr.clone());
|
||||
let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]);
|
||||
let lambda = cx.lambda1(span, enc, blkarg);
|
||||
let call = cx.expr_call_global(
|
||||
@ -219,7 +210,7 @@ fn encodable_substructure(
|
||||
let fn_emit_struct_path =
|
||||
cx.def_site_path(&[sym::rustc_serialize, sym::Encoder, sym::emit_struct]);
|
||||
|
||||
cx.expr_call_global(
|
||||
let expr = cx.expr_call_global(
|
||||
trait_span,
|
||||
fn_emit_struct_path,
|
||||
vec![
|
||||
@ -228,7 +219,8 @@ fn encodable_substructure(
|
||||
cx.expr_usize(trait_span, fields.len()),
|
||||
blk,
|
||||
],
|
||||
)
|
||||
);
|
||||
BlockOrExpr::new_expr(expr)
|
||||
}
|
||||
|
||||
EnumMatching(idx, _, variant, ref fields) => {
|
||||
@ -245,8 +237,8 @@ fn encodable_substructure(
|
||||
let mut stmts = Vec::new();
|
||||
if !fields.is_empty() {
|
||||
let last = fields.len() - 1;
|
||||
for (i, &FieldInfo { ref self_, span, .. }) in fields.iter().enumerate() {
|
||||
let self_ref = cx.expr_addr_of(span, self_.clone());
|
||||
for (i, &FieldInfo { ref self_expr, span, .. }) in fields.iter().enumerate() {
|
||||
let self_ref = cx.expr_addr_of(span, self_expr.clone());
|
||||
let enc =
|
||||
cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]);
|
||||
let lambda = cx.lambda1(span, enc, blkarg);
|
||||
@ -290,12 +282,12 @@ fn encodable_substructure(
|
||||
let blk = cx.lambda1(trait_span, call, blkarg);
|
||||
let fn_emit_enum_path: Vec<_> =
|
||||
cx.def_site_path(&[sym::rustc_serialize, sym::Encoder, sym::emit_enum]);
|
||||
let ret = cx.expr_call_global(
|
||||
let expr = cx.expr_call_global(
|
||||
trait_span,
|
||||
fn_emit_enum_path,
|
||||
vec![encoder, cx.expr_str(trait_span, substr.type_ident.name), blk],
|
||||
);
|
||||
cx.expr_block(cx.block(trait_span, vec![me, cx.stmt_expr(ret)]))
|
||||
BlockOrExpr::new_mixed(vec![me], Some(expr))
|
||||
}
|
||||
|
||||
_ => cx.bug("expected Struct or EnumMatching in derive(Encodable)"),
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,7 +1,6 @@
|
||||
//! A mini version of ast::Ty, which is easier to use, and features an explicit `Self` type to use
|
||||
//! when specifying impls to be derived.
|
||||
|
||||
pub use PtrTy::*;
|
||||
pub use Ty::*;
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
@ -11,22 +10,11 @@ use rustc_span::source_map::{respan, DUMMY_SP};
|
||||
use rustc_span::symbol::{kw, Ident, Symbol};
|
||||
use rustc_span::Span;
|
||||
|
||||
/// The types of pointers
|
||||
#[derive(Clone)]
|
||||
pub enum PtrTy {
|
||||
/// &'lifetime mut
|
||||
Borrowed(Option<Ident>, ast::Mutability),
|
||||
/// *mut
|
||||
#[allow(dead_code)]
|
||||
Raw(ast::Mutability),
|
||||
}
|
||||
|
||||
/// A path, e.g., `::std::option::Option::<i32>` (global). Has support
|
||||
/// for type parameters and a lifetime.
|
||||
/// for type parameters.
|
||||
#[derive(Clone)]
|
||||
pub struct Path {
|
||||
path: Vec<Symbol>,
|
||||
lifetime: Option<Ident>,
|
||||
params: Vec<Box<Ty>>,
|
||||
kind: PathKind,
|
||||
}
|
||||
@ -40,18 +28,13 @@ pub enum PathKind {
|
||||
|
||||
impl Path {
|
||||
pub fn new(path: Vec<Symbol>) -> Path {
|
||||
Path::new_(path, None, Vec::new(), PathKind::Std)
|
||||
Path::new_(path, Vec::new(), PathKind::Std)
|
||||
}
|
||||
pub fn new_local(path: Symbol) -> Path {
|
||||
Path::new_(vec![path], None, Vec::new(), PathKind::Local)
|
||||
Path::new_(vec![path], Vec::new(), PathKind::Local)
|
||||
}
|
||||
pub fn new_(
|
||||
path: Vec<Symbol>,
|
||||
lifetime: Option<Ident>,
|
||||
params: Vec<Box<Ty>>,
|
||||
kind: PathKind,
|
||||
) -> Path {
|
||||
Path { path, lifetime, params, kind }
|
||||
pub fn new_(path: Vec<Symbol>, params: Vec<Box<Ty>>, kind: PathKind) -> Path {
|
||||
Path { path, params, kind }
|
||||
}
|
||||
|
||||
pub fn to_ty(
|
||||
@ -71,10 +54,8 @@ impl Path {
|
||||
self_generics: &Generics,
|
||||
) -> ast::Path {
|
||||
let mut idents = self.path.iter().map(|s| Ident::new(*s, span)).collect();
|
||||
let lt = mk_lifetimes(cx, span, &self.lifetime);
|
||||
let tys = self.params.iter().map(|t| t.to_ty(cx, span, self_ty, self_generics));
|
||||
let params =
|
||||
lt.into_iter().map(GenericArg::Lifetime).chain(tys.map(GenericArg::Type)).collect();
|
||||
let params = tys.map(GenericArg::Type).collect();
|
||||
|
||||
match self.kind {
|
||||
PathKind::Global => cx.path_all(span, true, idents, params),
|
||||
@ -92,40 +73,17 @@ impl Path {
|
||||
#[derive(Clone)]
|
||||
pub enum Ty {
|
||||
Self_,
|
||||
/// &/Box/ Ty
|
||||
Ptr(Box<Ty>, PtrTy),
|
||||
/// A reference.
|
||||
Ref(Box<Ty>, ast::Mutability),
|
||||
/// `mod::mod::Type<[lifetime], [Params...]>`, including a plain type
|
||||
/// parameter, and things like `i32`
|
||||
Literal(Path),
|
||||
/// includes unit
|
||||
Tuple(Vec<Ty>),
|
||||
Path(Path),
|
||||
/// For () return types.
|
||||
Unit,
|
||||
}
|
||||
|
||||
pub fn borrowed_ptrty() -> PtrTy {
|
||||
Borrowed(None, ast::Mutability::Not)
|
||||
}
|
||||
pub fn borrowed(ty: Box<Ty>) -> Ty {
|
||||
Ptr(ty, borrowed_ptrty())
|
||||
}
|
||||
|
||||
pub fn borrowed_explicit_self() -> Option<Option<PtrTy>> {
|
||||
Some(Some(borrowed_ptrty()))
|
||||
}
|
||||
|
||||
pub fn borrowed_self() -> Ty {
|
||||
borrowed(Box::new(Self_))
|
||||
}
|
||||
|
||||
pub fn nil_ty() -> Ty {
|
||||
Tuple(Vec::new())
|
||||
}
|
||||
|
||||
fn mk_lifetime(cx: &ExtCtxt<'_>, span: Span, lt: &Option<Ident>) -> Option<ast::Lifetime> {
|
||||
lt.map(|ident| cx.lifetime(span, ident))
|
||||
}
|
||||
|
||||
fn mk_lifetimes(cx: &ExtCtxt<'_>, span: Span, lt: &Option<Ident>) -> Vec<ast::Lifetime> {
|
||||
mk_lifetime(cx, span, lt).into_iter().collect()
|
||||
pub fn self_ref() -> Ty {
|
||||
Ref(Box::new(Self_), ast::Mutability::Not)
|
||||
}
|
||||
|
||||
impl Ty {
|
||||
@ -136,23 +94,15 @@ impl Ty {
|
||||
self_ty: Ident,
|
||||
self_generics: &Generics,
|
||||
) -> P<ast::Ty> {
|
||||
match *self {
|
||||
Ptr(ref ty, ref ptr) => {
|
||||
match self {
|
||||
Ref(ty, mutbl) => {
|
||||
let raw_ty = ty.to_ty(cx, span, self_ty, self_generics);
|
||||
match *ptr {
|
||||
Borrowed(ref lt, mutbl) => {
|
||||
let lt = mk_lifetime(cx, span, lt);
|
||||
cx.ty_rptr(span, raw_ty, lt, mutbl)
|
||||
}
|
||||
Raw(mutbl) => cx.ty_ptr(span, raw_ty, mutbl),
|
||||
}
|
||||
cx.ty_rptr(span, raw_ty, None, *mutbl)
|
||||
}
|
||||
Literal(ref p) => p.to_ty(cx, span, self_ty, self_generics),
|
||||
Path(p) => p.to_ty(cx, span, self_ty, self_generics),
|
||||
Self_ => cx.ty_path(self.to_path(cx, span, self_ty, self_generics)),
|
||||
Tuple(ref fields) => {
|
||||
let ty = ast::TyKind::Tup(
|
||||
fields.iter().map(|f| f.to_ty(cx, span, self_ty, self_generics)).collect(),
|
||||
);
|
||||
Unit => {
|
||||
let ty = ast::TyKind::Tup(vec![]);
|
||||
cx.ty(span, ty)
|
||||
}
|
||||
}
|
||||
@ -185,9 +135,9 @@ impl Ty {
|
||||
|
||||
cx.path_all(span, false, vec![self_ty], params)
|
||||
}
|
||||
Literal(ref p) => p.to_path(cx, span, self_ty, generics),
|
||||
Ptr(..) => cx.span_bug(span, "pointer in a path in generic `derive`"),
|
||||
Tuple(..) => cx.span_bug(span, "tuple in a path in generic `derive`"),
|
||||
Path(ref p) => p.to_path(cx, span, self_ty, generics),
|
||||
Ref(..) => cx.span_bug(span, "ref in a path in generic `derive`"),
|
||||
Unit => cx.span_bug(span, "unit in a path in generic `derive`"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -245,28 +195,9 @@ impl Bounds {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_explicit_self(
|
||||
cx: &ExtCtxt<'_>,
|
||||
span: Span,
|
||||
self_ptr: &Option<PtrTy>,
|
||||
) -> (P<Expr>, ast::ExplicitSelf) {
|
||||
// this constructs a fresh `self` path
|
||||
pub fn get_explicit_self(cx: &ExtCtxt<'_>, span: Span) -> (P<Expr>, ast::ExplicitSelf) {
|
||||
// This constructs a fresh `self` path.
|
||||
let self_path = cx.expr_self(span);
|
||||
match *self_ptr {
|
||||
None => (self_path, respan(span, SelfKind::Value(ast::Mutability::Not))),
|
||||
Some(ref ptr) => {
|
||||
let self_ty = respan(
|
||||
span,
|
||||
match *ptr {
|
||||
Borrowed(ref lt, mutbl) => {
|
||||
let lt = lt.map(|s| cx.lifetime(span, s));
|
||||
SelfKind::Region(lt, mutbl)
|
||||
}
|
||||
Raw(_) => cx.span_bug(span, "attempted to use *self in deriving definition"),
|
||||
},
|
||||
);
|
||||
let self_expr = cx.expr_deref(span, self_path);
|
||||
(self_expr, self_ty)
|
||||
}
|
||||
}
|
||||
let self_ty = respan(span, SelfKind::Region(None, ast::Mutability::Not));
|
||||
(self_path, self_ty)
|
||||
}
|
||||
|
||||
@ -1,9 +1,8 @@
|
||||
use crate::deriving::generic::ty::*;
|
||||
use crate::deriving::generic::*;
|
||||
use crate::deriving::{self, path_std, pathvec_std};
|
||||
use crate::deriving::{path_std, pathvec_std};
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::{Expr, MetaItem, Mutability};
|
||||
use rustc_ast::{MetaItem, Mutability};
|
||||
use rustc_expand::base::{Annotatable, ExtCtxt};
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::Span;
|
||||
@ -15,7 +14,7 @@ pub fn expand_deriving_hash(
|
||||
item: &Annotatable,
|
||||
push: &mut dyn FnMut(Annotatable),
|
||||
) {
|
||||
let path = Path::new_(pathvec_std!(hash::Hash), None, vec![], PathKind::Std);
|
||||
let path = Path::new_(pathvec_std!(hash::Hash), vec![], PathKind::Std);
|
||||
|
||||
let typaram = sym::__H;
|
||||
|
||||
@ -26,16 +25,14 @@ pub fn expand_deriving_hash(
|
||||
path,
|
||||
additional_bounds: Vec::new(),
|
||||
generics: Bounds::empty(),
|
||||
is_unsafe: false,
|
||||
supports_unions: false,
|
||||
methods: vec![MethodDef {
|
||||
name: sym::hash,
|
||||
generics: Bounds { bounds: vec![(typaram, vec![path_std!(hash::Hasher)])] },
|
||||
explicit_self: borrowed_explicit_self(),
|
||||
args: vec![(Ptr(Box::new(Literal(arg)), Borrowed(None, Mutability::Mut)), sym::state)],
|
||||
ret_ty: nil_ty(),
|
||||
explicit_self: true,
|
||||
nonself_args: vec![(Ref(Box::new(Path(arg)), Mutability::Mut), sym::state)],
|
||||
ret_ty: Unit,
|
||||
attributes: vec![],
|
||||
is_unsafe: false,
|
||||
unify_fieldless_variants: true,
|
||||
combine_substructure: combine_substructure(Box::new(|a, b, c| {
|
||||
hash_substructure(a, b, c)
|
||||
@ -47,42 +44,37 @@ pub fn expand_deriving_hash(
|
||||
hash_trait_def.expand(cx, mitem, item, push);
|
||||
}
|
||||
|
||||
fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructure<'_>) -> P<Expr> {
|
||||
let [state_expr] = substr.nonself_args else {
|
||||
fn hash_substructure(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
trait_span: Span,
|
||||
substr: &Substructure<'_>,
|
||||
) -> BlockOrExpr {
|
||||
let [state_expr] = substr.nonselflike_args else {
|
||||
cx.span_bug(trait_span, "incorrect number of arguments in `derive(Hash)`");
|
||||
};
|
||||
let call_hash = |span, thing_expr| {
|
||||
let call_hash = |span, expr| {
|
||||
let hash_path = {
|
||||
let strs = cx.std_path(&[sym::hash, sym::Hash, sym::hash]);
|
||||
|
||||
cx.expr_path(cx.path_global(span, strs))
|
||||
};
|
||||
let ref_thing = cx.expr_addr_of(span, thing_expr);
|
||||
let expr = cx.expr_call(span, hash_path, vec![ref_thing, state_expr.clone()]);
|
||||
let expr = cx.expr_call(span, hash_path, vec![expr, state_expr.clone()]);
|
||||
cx.stmt_expr(expr)
|
||||
};
|
||||
let mut stmts = Vec::new();
|
||||
|
||||
let fields = match substr.fields {
|
||||
Struct(_, fs) | EnumMatching(_, 1, .., fs) => fs,
|
||||
EnumMatching(.., fs) => {
|
||||
let variant_value = deriving::call_intrinsic(
|
||||
cx,
|
||||
trait_span,
|
||||
sym::discriminant_value,
|
||||
vec![cx.expr_self(trait_span)],
|
||||
);
|
||||
|
||||
stmts.push(call_hash(trait_span, variant_value));
|
||||
|
||||
fs
|
||||
let (stmts, match_expr) = match substr.fields {
|
||||
Struct(_, fields) | EnumMatching(.., fields) => {
|
||||
let stmts =
|
||||
fields.iter().map(|field| call_hash(field.span, field.self_expr.clone())).collect();
|
||||
(stmts, None)
|
||||
}
|
||||
EnumTag(tag_field, match_expr) => {
|
||||
assert!(tag_field.other_selflike_exprs.is_empty());
|
||||
let stmts = vec![call_hash(tag_field.span, tag_field.self_expr.clone())];
|
||||
(stmts, match_expr.clone())
|
||||
}
|
||||
_ => cx.span_bug(trait_span, "impossible substructure in `derive(Hash)`"),
|
||||
};
|
||||
|
||||
stmts.extend(
|
||||
fields.iter().map(|FieldInfo { ref self_, span, .. }| call_hash(*span, self_.clone())),
|
||||
);
|
||||
|
||||
cx.expr_block(cx.block(trait_span, stmts))
|
||||
BlockOrExpr::new_mixed(stmts, match_expr)
|
||||
}
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::{Impl, ItemKind, MetaItem};
|
||||
use rustc_ast::{GenericArg, Impl, ItemKind, MetaItem};
|
||||
use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt, MultiItemModifier};
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_span::Span;
|
||||
@ -193,3 +193,16 @@ fn inject_impl_of_structural_trait(
|
||||
|
||||
push(Annotatable::Item(newitem));
|
||||
}
|
||||
|
||||
fn assert_ty_bounds(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
stmts: &mut Vec<ast::Stmt>,
|
||||
ty: P<ast::Ty>,
|
||||
span: Span,
|
||||
assert_path: &[Symbol],
|
||||
) {
|
||||
// Generate statement `let _: assert_path<ty>;`.
|
||||
let span = cx.with_def_site_ctxt(span);
|
||||
let assert_path = cx.path_all(span, true, cx.std_path(assert_path), vec![GenericArg::Type(ty)]);
|
||||
stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path)));
|
||||
}
|
||||
|
||||
@ -11,9 +11,12 @@ use rustc_errors::{pluralize, Applicability, MultiSpan, PResult};
|
||||
use rustc_expand::base::{self, *};
|
||||
use rustc_parse_format as parse;
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_span::{InnerSpan, Span};
|
||||
use rustc_span::{BytePos, InnerSpan, Span};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use rustc_lint_defs::builtin::NAMED_ARGUMENTS_USED_POSITIONALLY;
|
||||
use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiagnostics, LintId};
|
||||
use rustc_parse_format::Count;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::hash_map::Entry;
|
||||
|
||||
@ -29,6 +32,165 @@ enum Position {
|
||||
Named(Symbol, InnerSpan),
|
||||
}
|
||||
|
||||
/// Indicates how positional named argument (i.e. an named argument which is used by position
|
||||
/// instead of by name) is used in format string
|
||||
/// * `Arg` is the actual argument to print
|
||||
/// * `Width` is width format argument
|
||||
/// * `Precision` is precion format argument
|
||||
/// Example: `{Arg:Width$.Precision$}
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
enum PositionalNamedArgType {
|
||||
Arg,
|
||||
Width,
|
||||
Precision,
|
||||
}
|
||||
|
||||
/// Contains information necessary to create a lint for a positional named argument
|
||||
#[derive(Debug)]
|
||||
struct PositionalNamedArg {
|
||||
ty: PositionalNamedArgType,
|
||||
/// The piece of the using this argument (multiple pieces can use the same argument)
|
||||
cur_piece: usize,
|
||||
/// The InnerSpan for in the string to be replaced with the named argument
|
||||
/// This will be None when the position is implicit
|
||||
inner_span_to_replace: Option<rustc_parse_format::InnerSpan>,
|
||||
/// The name to use instead of the position
|
||||
replacement: Symbol,
|
||||
/// The span for the positional named argument (so the lint can point a message to it)
|
||||
positional_named_arg_span: Span,
|
||||
has_formatting: bool,
|
||||
}
|
||||
|
||||
impl PositionalNamedArg {
|
||||
/// Determines:
|
||||
/// 1) span to be replaced with the name of the named argument and
|
||||
/// 2) span to be underlined for error messages
|
||||
fn get_positional_arg_spans(&self, cx: &Context<'_, '_>) -> (Option<Span>, Option<Span>) {
|
||||
if let Some(inner_span) = &self.inner_span_to_replace {
|
||||
let span =
|
||||
cx.fmtsp.from_inner(InnerSpan { start: inner_span.start, end: inner_span.end });
|
||||
(Some(span), Some(span))
|
||||
} else if self.ty == PositionalNamedArgType::Arg {
|
||||
// In the case of a named argument whose position is implicit, if the argument *has*
|
||||
// formatting, there will not be a span to replace. Instead, we insert the name after
|
||||
// the `{`, which will be the first character of arg_span. If the argument does *not*
|
||||
// have formatting, there may or may not be a span to replace. This is because
|
||||
// whitespace is allowed in arguments without formatting (such as `format!("{ }", 1);`)
|
||||
// but is not allowed in arguments with formatting (an error will be generated in cases
|
||||
// like `format!("{ :1.1}", 1.0f32);`.
|
||||
// For the message span, if there is formatting, we want to use the opening `{` and the
|
||||
// next character, which will the `:` indicating the start of formatting. If there is
|
||||
// not any formatting, we want to underline the entire span.
|
||||
cx.arg_spans.get(self.cur_piece).map_or((None, None), |arg_span| {
|
||||
if self.has_formatting {
|
||||
(
|
||||
Some(arg_span.with_lo(arg_span.lo() + BytePos(1)).shrink_to_lo()),
|
||||
Some(arg_span.with_hi(arg_span.lo() + BytePos(2))),
|
||||
)
|
||||
} else {
|
||||
let replace_start = arg_span.lo() + BytePos(1);
|
||||
let replace_end = arg_span.hi() - BytePos(1);
|
||||
let to_replace = arg_span.with_lo(replace_start).with_hi(replace_end);
|
||||
(Some(to_replace), Some(*arg_span))
|
||||
}
|
||||
})
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Encapsulates all the named arguments that have been used positionally
|
||||
#[derive(Debug)]
|
||||
struct PositionalNamedArgsLint {
|
||||
positional_named_args: Vec<PositionalNamedArg>,
|
||||
}
|
||||
|
||||
impl PositionalNamedArgsLint {
|
||||
/// For a given positional argument, check if the index is for a named argument.
|
||||
///
|
||||
/// Since positional arguments are required to come before named arguments, if the positional
|
||||
/// index is greater than or equal to the start of named arguments, we know it's a named
|
||||
/// argument used positionally.
|
||||
///
|
||||
/// Example:
|
||||
/// println!("{} {} {2}", 0, a=1, b=2);
|
||||
///
|
||||
/// In this case, the first piece (`{}`) would be ArgumentImplicitlyIs with an index of 0. The
|
||||
/// total number of arguments is 3 and the number of named arguments is 2, so the start of named
|
||||
/// arguments is index 1. Therefore, the index of 0 is okay.
|
||||
///
|
||||
/// The second piece (`{}`) would be ArgumentImplicitlyIs with an index of 1, which is the start
|
||||
/// of named arguments, and so we should add a lint to use the named argument `a`.
|
||||
///
|
||||
/// The third piece (`{2}`) would be ArgumentIs with an index of 2, which is greater than the
|
||||
/// start of named arguments, and so we should add a lint to use the named argument `b`.
|
||||
///
|
||||
/// This same check also works for width and precision formatting when either or both are
|
||||
/// CountIsParam, which contains an index into the arguments.
|
||||
fn maybe_add_positional_named_arg(
|
||||
&mut self,
|
||||
current_positional_arg: usize,
|
||||
total_args_length: usize,
|
||||
format_argument_index: usize,
|
||||
ty: PositionalNamedArgType,
|
||||
cur_piece: usize,
|
||||
inner_span_to_replace: Option<rustc_parse_format::InnerSpan>,
|
||||
names: &FxHashMap<Symbol, (usize, Span)>,
|
||||
has_formatting: bool,
|
||||
) {
|
||||
let start_of_named_args = total_args_length - names.len();
|
||||
if current_positional_arg >= start_of_named_args {
|
||||
self.maybe_push(
|
||||
format_argument_index,
|
||||
ty,
|
||||
cur_piece,
|
||||
inner_span_to_replace,
|
||||
names,
|
||||
has_formatting,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Try constructing a PositionalNamedArg struct and pushing it into the vec of positional
|
||||
/// named arguments. If a named arg associated with `format_argument_index` cannot be found,
|
||||
/// a new item will not be added as the lint cannot be emitted in this case.
|
||||
fn maybe_push(
|
||||
&mut self,
|
||||
format_argument_index: usize,
|
||||
ty: PositionalNamedArgType,
|
||||
cur_piece: usize,
|
||||
inner_span_to_replace: Option<rustc_parse_format::InnerSpan>,
|
||||
names: &FxHashMap<Symbol, (usize, Span)>,
|
||||
has_formatting: bool,
|
||||
) {
|
||||
let named_arg = names
|
||||
.iter()
|
||||
.find(|&(_, &(index, _))| index == format_argument_index)
|
||||
.map(|found| found.clone());
|
||||
|
||||
if let Some((&replacement, &(_, positional_named_arg_span))) = named_arg {
|
||||
// In FormatSpec, `precision_span` starts at the leading `.`, which we want to keep in
|
||||
// the lint suggestion, so increment `start` by 1 when `PositionalArgumentType` is
|
||||
// `Precision`.
|
||||
let inner_span_to_replace = if ty == PositionalNamedArgType::Precision {
|
||||
inner_span_to_replace
|
||||
.map(|is| rustc_parse_format::InnerSpan { start: is.start + 1, end: is.end })
|
||||
} else {
|
||||
inner_span_to_replace
|
||||
};
|
||||
self.positional_named_args.push(PositionalNamedArg {
|
||||
ty,
|
||||
cur_piece,
|
||||
inner_span_to_replace,
|
||||
replacement,
|
||||
positional_named_arg_span,
|
||||
has_formatting,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Context<'a, 'b> {
|
||||
ecx: &'a mut ExtCtxt<'b>,
|
||||
/// The macro's call site. References to unstable formatting internals must
|
||||
@ -57,7 +219,7 @@ struct Context<'a, 'b> {
|
||||
/// Unique format specs seen for each argument.
|
||||
arg_unique_types: Vec<Vec<ArgumentType>>,
|
||||
/// Map from named arguments to their resolved indices.
|
||||
names: FxHashMap<Symbol, usize>,
|
||||
names: FxHashMap<Symbol, (usize, Span)>,
|
||||
|
||||
/// The latest consecutive literal strings, or empty if there weren't any.
|
||||
literal: String,
|
||||
@ -115,6 +277,12 @@ struct Context<'a, 'b> {
|
||||
|
||||
/// Whether this format string came from a string literal, as opposed to a macro.
|
||||
is_literal: bool,
|
||||
unused_names_lint: PositionalNamedArgsLint,
|
||||
}
|
||||
|
||||
pub struct FormatArg {
|
||||
expr: P<ast::Expr>,
|
||||
named: bool,
|
||||
}
|
||||
|
||||
/// Parses the arguments from the given list of tokens, returning the diagnostic
|
||||
@ -130,9 +298,9 @@ fn parse_args<'a>(
|
||||
ecx: &mut ExtCtxt<'a>,
|
||||
sp: Span,
|
||||
tts: TokenStream,
|
||||
) -> PResult<'a, (P<ast::Expr>, Vec<P<ast::Expr>>, FxHashMap<Symbol, usize>)> {
|
||||
let mut args = Vec::<P<ast::Expr>>::new();
|
||||
let mut names = FxHashMap::<Symbol, usize>::default();
|
||||
) -> PResult<'a, (P<ast::Expr>, Vec<FormatArg>, FxHashMap<Symbol, (usize, Span)>)> {
|
||||
let mut args = Vec::<FormatArg>::new();
|
||||
let mut names = FxHashMap::<Symbol, (usize, Span)>::default();
|
||||
|
||||
let mut p = ecx.new_parser_from_tts(tts);
|
||||
|
||||
@ -197,9 +365,9 @@ fn parse_args<'a>(
|
||||
p.bump();
|
||||
p.expect(&token::Eq)?;
|
||||
let e = p.parse_expr()?;
|
||||
if let Some(prev) = names.get(&ident.name) {
|
||||
if let Some((prev, _)) = names.get(&ident.name) {
|
||||
ecx.struct_span_err(e.span, &format!("duplicate argument named `{}`", ident))
|
||||
.span_label(args[*prev].span, "previously here")
|
||||
.span_label(args[*prev].expr.span, "previously here")
|
||||
.span_label(e.span, "duplicate argument")
|
||||
.emit();
|
||||
continue;
|
||||
@ -210,8 +378,8 @@ fn parse_args<'a>(
|
||||
// if the input is valid, we can simply append to the positional
|
||||
// args. And remember the names.
|
||||
let slot = args.len();
|
||||
names.insert(ident.name, slot);
|
||||
args.push(e);
|
||||
names.insert(ident.name, (slot, ident.span));
|
||||
args.push(FormatArg { expr: e, named: true });
|
||||
}
|
||||
_ => {
|
||||
let e = p.parse_expr()?;
|
||||
@ -222,11 +390,11 @@ fn parse_args<'a>(
|
||||
);
|
||||
err.span_label(e.span, "positional arguments must be before named arguments");
|
||||
for pos in names.values() {
|
||||
err.span_label(args[*pos].span, "named argument");
|
||||
err.span_label(args[pos.0].expr.span, "named argument");
|
||||
}
|
||||
err.emit();
|
||||
}
|
||||
args.push(e);
|
||||
args.push(FormatArg { expr: e, named: false });
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -239,15 +407,16 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
self.args.len() - self.num_captured_args
|
||||
}
|
||||
|
||||
fn resolve_name_inplace(&self, p: &mut parse::Piece<'_>) {
|
||||
fn resolve_name_inplace(&mut self, p: &mut parse::Piece<'_>) {
|
||||
// NOTE: the `unwrap_or` branch is needed in case of invalid format
|
||||
// arguments, e.g., `format_args!("{foo}")`.
|
||||
let lookup = |s: &str| *self.names.get(&Symbol::intern(s)).unwrap_or(&0);
|
||||
let lookup =
|
||||
|s: &str| self.names.get(&Symbol::intern(s)).unwrap_or(&(0, Span::default())).0;
|
||||
|
||||
match *p {
|
||||
parse::String(_) => {}
|
||||
parse::NextArgument(ref mut arg) => {
|
||||
if let parse::ArgumentNamed(s, _) = arg.position {
|
||||
if let parse::ArgumentNamed(s) = arg.position {
|
||||
arg.position = parse::ArgumentIs(lookup(s));
|
||||
}
|
||||
if let parse::CountIsName(s, _) = arg.format.width {
|
||||
@ -269,15 +438,54 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
parse::NextArgument(ref arg) => {
|
||||
// width/precision first, if they have implicit positional
|
||||
// parameters it makes more sense to consume them first.
|
||||
self.verify_count(arg.format.width);
|
||||
self.verify_count(arg.format.precision);
|
||||
self.verify_count(
|
||||
arg.format.width,
|
||||
&arg.format.width_span,
|
||||
PositionalNamedArgType::Width,
|
||||
);
|
||||
self.verify_count(
|
||||
arg.format.precision,
|
||||
&arg.format.precision_span,
|
||||
PositionalNamedArgType::Precision,
|
||||
);
|
||||
|
||||
let has_precision = arg.format.precision != Count::CountImplied;
|
||||
let has_width = arg.format.width != Count::CountImplied;
|
||||
|
||||
// argument second, if it's an implicit positional parameter
|
||||
// it's written second, so it should come after width/precision.
|
||||
let pos = match arg.position {
|
||||
parse::ArgumentIs(i) | parse::ArgumentImplicitlyIs(i) => Exact(i),
|
||||
parse::ArgumentNamed(s, span) => {
|
||||
Named(Symbol::intern(s), InnerSpan::new(span.start, span.end))
|
||||
parse::ArgumentIs(i) => {
|
||||
self.unused_names_lint.maybe_add_positional_named_arg(
|
||||
i,
|
||||
self.args.len(),
|
||||
i,
|
||||
PositionalNamedArgType::Arg,
|
||||
self.curpiece,
|
||||
Some(arg.position_span),
|
||||
&self.names,
|
||||
has_precision || has_width,
|
||||
);
|
||||
|
||||
Exact(i)
|
||||
}
|
||||
parse::ArgumentImplicitlyIs(i) => {
|
||||
self.unused_names_lint.maybe_add_positional_named_arg(
|
||||
i,
|
||||
self.args.len(),
|
||||
i,
|
||||
PositionalNamedArgType::Arg,
|
||||
self.curpiece,
|
||||
None,
|
||||
&self.names,
|
||||
has_precision || has_width,
|
||||
);
|
||||
Exact(i)
|
||||
}
|
||||
parse::ArgumentNamed(s) => {
|
||||
let symbol = Symbol::intern(s);
|
||||
let span = arg.position_span;
|
||||
Named(symbol, InnerSpan::new(span.start, span.end))
|
||||
}
|
||||
};
|
||||
|
||||
@ -345,10 +553,25 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
}
|
||||
}
|
||||
|
||||
fn verify_count(&mut self, c: parse::Count<'_>) {
|
||||
fn verify_count(
|
||||
&mut self,
|
||||
c: parse::Count<'_>,
|
||||
inner_span: &Option<rustc_parse_format::InnerSpan>,
|
||||
named_arg_type: PositionalNamedArgType,
|
||||
) {
|
||||
match c {
|
||||
parse::CountImplied | parse::CountIs(..) => {}
|
||||
parse::CountIsParam(i) => {
|
||||
self.unused_names_lint.maybe_add_positional_named_arg(
|
||||
i,
|
||||
self.args.len(),
|
||||
i,
|
||||
named_arg_type,
|
||||
self.curpiece,
|
||||
*inner_span,
|
||||
&self.names,
|
||||
true,
|
||||
);
|
||||
self.verify_arg_type(Exact(i), Count);
|
||||
}
|
||||
parse::CountIsName(s, span) => {
|
||||
@ -481,7 +704,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
if let Some(span) = fmt.width_span {
|
||||
let span = self.fmtsp.from_inner(InnerSpan::new(span.start, span.end));
|
||||
match fmt.width {
|
||||
parse::CountIsParam(pos) if pos > self.num_args() => {
|
||||
parse::CountIsParam(pos) if pos >= self.num_args() => {
|
||||
e.span_label(
|
||||
span,
|
||||
&format!(
|
||||
@ -548,7 +771,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
match self.names.get(&name) {
|
||||
Some(&idx) => {
|
||||
// Treat as positional arg.
|
||||
self.verify_arg_type(Capture(idx), ty)
|
||||
self.verify_arg_type(Capture(idx.0), ty)
|
||||
}
|
||||
None => {
|
||||
// For the moment capturing variables from format strings expanded from macros is
|
||||
@ -565,7 +788,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
};
|
||||
self.num_captured_args += 1;
|
||||
self.args.push(self.ecx.expr_ident(span, Ident::new(name, span)));
|
||||
self.names.insert(name, idx);
|
||||
self.names.insert(name, (idx, span));
|
||||
self.verify_arg_type(Capture(idx), ty)
|
||||
} else {
|
||||
let msg = format!("there is no argument named `{}`", name);
|
||||
@ -669,7 +892,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
// Build the position
|
||||
let pos = {
|
||||
match arg.position {
|
||||
parse::ArgumentIs(i) | parse::ArgumentImplicitlyIs(i) => {
|
||||
parse::ArgumentIs(i, ..) | parse::ArgumentImplicitlyIs(i) => {
|
||||
// Map to index in final generated argument array
|
||||
// in case of multiple types specified
|
||||
let arg_idx = match arg_index_consumed.get_mut(i) {
|
||||
@ -699,6 +922,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
self.curarg += 1;
|
||||
parse::ArgumentIs(i)
|
||||
},
|
||||
position_span: arg.position_span,
|
||||
format: parse::FormatSpec {
|
||||
fill: arg.format.fill,
|
||||
align: parse::AlignUnknown,
|
||||
@ -776,7 +1000,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
|
||||
// First, build up the static array which will become our precompiled
|
||||
// format "string"
|
||||
let pieces = self.ecx.expr_vec_slice(self.fmtsp, self.str_pieces);
|
||||
let pieces = self.ecx.expr_array_ref(self.fmtsp, self.str_pieces);
|
||||
|
||||
// We need to construct a &[ArgumentV1] to pass into the fmt::Arguments
|
||||
// constructor. In general the expressions in this slice might be
|
||||
@ -849,7 +1073,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
fmt_args.push(Context::format_arg(self.ecx, self.macsp, span, arg_ty, arg));
|
||||
}
|
||||
|
||||
let args_array = self.ecx.expr_vec(self.macsp, fmt_args);
|
||||
let args_array = self.ecx.expr_array(self.macsp, fmt_args);
|
||||
let args_slice = self.ecx.expr_addr_of(
|
||||
self.macsp,
|
||||
if no_need_for_match {
|
||||
@ -879,7 +1103,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
} else {
|
||||
// Build up the static array which will store our precompiled
|
||||
// nonstandard placeholders, if there are any.
|
||||
let fmt = self.ecx.expr_vec_slice(self.macsp, self.pieces);
|
||||
let fmt = self.ecx.expr_array_ref(self.macsp, self.pieces);
|
||||
|
||||
let path = self.ecx.std_path(&[sym::fmt, sym::UnsafeArg, sym::new]);
|
||||
let unsafe_arg = self.ecx.expr_call_global(self.macsp, path, Vec::new());
|
||||
@ -967,14 +1191,36 @@ pub fn expand_format_args_nl<'cx>(
|
||||
expand_format_args_impl(ecx, sp, tts, true)
|
||||
}
|
||||
|
||||
fn create_lints_for_named_arguments_used_positionally(cx: &mut Context<'_, '_>) {
|
||||
for named_arg in &cx.unused_names_lint.positional_named_args {
|
||||
let (position_sp_to_replace, position_sp_for_msg) = named_arg.get_positional_arg_spans(cx);
|
||||
|
||||
let msg = format!("named argument `{}` is not used by name", named_arg.replacement);
|
||||
|
||||
cx.ecx.buffered_early_lint.push(BufferedEarlyLint {
|
||||
span: MultiSpan::from_span(named_arg.positional_named_arg_span),
|
||||
msg: msg.clone(),
|
||||
node_id: ast::CRATE_NODE_ID,
|
||||
lint_id: LintId::of(&NAMED_ARGUMENTS_USED_POSITIONALLY),
|
||||
diagnostic: BuiltinLintDiagnostics::NamedArgumentUsedPositionally {
|
||||
position_sp_to_replace,
|
||||
position_sp_for_msg,
|
||||
named_arg_sp: named_arg.positional_named_arg_span,
|
||||
named_arg_name: named_arg.replacement.to_string(),
|
||||
is_formatting_arg: named_arg.ty != PositionalNamedArgType::Arg,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Take the various parts of `format_args!(efmt, args..., name=names...)`
|
||||
/// and construct the appropriate formatting expression.
|
||||
pub fn expand_preparsed_format_args(
|
||||
ecx: &mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
efmt: P<ast::Expr>,
|
||||
args: Vec<P<ast::Expr>>,
|
||||
names: FxHashMap<Symbol, usize>,
|
||||
args: Vec<FormatArg>,
|
||||
names: FxHashMap<Symbol, (usize, Span)>,
|
||||
append_newline: bool,
|
||||
) -> P<ast::Expr> {
|
||||
// NOTE: this verbose way of initializing `Vec<Vec<ArgumentType>>` is because
|
||||
@ -1063,6 +1309,25 @@ pub fn expand_preparsed_format_args(
|
||||
e.span_label(fmt_span.from_inner(InnerSpan::new(span.start, span.end)), label);
|
||||
}
|
||||
}
|
||||
if err.should_be_replaced_with_positional_argument {
|
||||
let captured_arg_span =
|
||||
fmt_span.from_inner(InnerSpan::new(err.span.start, err.span.end));
|
||||
let positional_args = args.iter().filter(|arg| !arg.named).collect::<Vec<_>>();
|
||||
if let Ok(arg) = ecx.source_map().span_to_snippet(captured_arg_span) {
|
||||
let span = match positional_args.last() {
|
||||
Some(arg) => arg.expr.span,
|
||||
None => fmt_sp,
|
||||
};
|
||||
e.multipart_suggestion_verbose(
|
||||
"consider using a positional formatting argument instead",
|
||||
vec![
|
||||
(captured_arg_span, positional_args.len().to_string()),
|
||||
(span.shrink_to_hi(), format!(", {}", arg)),
|
||||
],
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
}
|
||||
e.emit();
|
||||
return DummyResult::raw_expr(sp, true);
|
||||
}
|
||||
@ -1073,11 +1338,11 @@ pub fn expand_preparsed_format_args(
|
||||
.map(|span| fmt_span.from_inner(InnerSpan::new(span.start, span.end)))
|
||||
.collect();
|
||||
|
||||
let named_pos: FxHashSet<usize> = names.values().cloned().collect();
|
||||
let named_pos: FxHashSet<usize> = names.values().cloned().map(|(i, _)| i).collect();
|
||||
|
||||
let mut cx = Context {
|
||||
ecx,
|
||||
args,
|
||||
args: args.into_iter().map(|arg| arg.expr).collect(),
|
||||
num_captured_args: 0,
|
||||
arg_types,
|
||||
arg_unique_types,
|
||||
@ -1099,6 +1364,7 @@ pub fn expand_preparsed_format_args(
|
||||
arg_spans,
|
||||
arg_with_formatting: Vec::new(),
|
||||
is_literal: parser.is_literal,
|
||||
unused_names_lint: PositionalNamedArgsLint { positional_named_args: vec![] },
|
||||
};
|
||||
|
||||
// This needs to happen *after* the Parser has consumed all pieces to create all the spans
|
||||
@ -1113,7 +1379,7 @@ pub fn expand_preparsed_format_args(
|
||||
|
||||
let numbered_position_args = pieces.iter().any(|arg: &parse::Piece<'_>| match *arg {
|
||||
parse::String(_) => false,
|
||||
parse::NextArgument(arg) => matches!(arg.position, parse::Position::ArgumentIs(_)),
|
||||
parse::NextArgument(arg) => matches!(arg.position, parse::Position::ArgumentIs(..)),
|
||||
});
|
||||
|
||||
cx.build_index_map();
|
||||
@ -1265,6 +1531,10 @@ pub fn expand_preparsed_format_args(
|
||||
}
|
||||
|
||||
diag.emit();
|
||||
} else if cx.invalid_refs.is_empty() && cx.ecx.sess.err_count() == 0 {
|
||||
// Only check for unused named argument names if there are no other errors to avoid causing
|
||||
// too much noise in output errors, such as when a named argument is entirely unused.
|
||||
create_lints_for_named_arguments_used_positionally(&mut cx);
|
||||
}
|
||||
|
||||
cx.into_expr()
|
||||
|
||||
@ -317,7 +317,7 @@ fn mk_decls(cx: &mut ExtCtxt<'_>, macros: &[ProcMacro]) -> P<ast::Item> {
|
||||
proc_macro_ty_method_path(cx, custom_derive),
|
||||
vec![
|
||||
cx.expr_str(span, cd.trait_name),
|
||||
cx.expr_vec_slice(
|
||||
cx.expr_array_ref(
|
||||
span,
|
||||
cd.attrs.iter().map(|&s| cx.expr_str(span, s)).collect::<Vec<_>>(),
|
||||
),
|
||||
@ -362,7 +362,7 @@ fn mk_decls(cx: &mut ExtCtxt<'_>, macros: &[ProcMacro]) -> P<ast::Item> {
|
||||
ast::Mutability::Not,
|
||||
),
|
||||
ast::Mutability::Not,
|
||||
cx.expr_vec_slice(span, decls),
|
||||
cx.expr_array_ref(span, decls),
|
||||
)
|
||||
.map(|mut i| {
|
||||
let attr = cx.meta_word(span, sym::rustc_proc_macro_decls);
|
||||
|
||||
@ -51,7 +51,7 @@ pub fn inject(sess: &Session, resolver: &mut dyn ResolverExpand, krate: &mut ast
|
||||
let test_runner = get_test_runner(sess, span_diagnostic, &krate);
|
||||
|
||||
if sess.opts.test {
|
||||
let panic_strategy = match (panic_strategy, sess.opts.debugging_opts.panic_abort_tests) {
|
||||
let panic_strategy = match (panic_strategy, sess.opts.unstable_opts.panic_abort_tests) {
|
||||
(PanicStrategy::Abort, true) => PanicStrategy::Abort,
|
||||
(PanicStrategy::Abort, false) => {
|
||||
if panic_strategy == platform_panic_strategy {
|
||||
@ -352,7 +352,7 @@ fn mk_tests_slice(cx: &TestCtxt<'_>, sp: Span) -> P<ast::Expr> {
|
||||
debug!("building test vector from {} tests", cx.test_cases.len());
|
||||
let ecx = &cx.ext_cx;
|
||||
|
||||
ecx.expr_vec_slice(
|
||||
ecx.expr_array_ref(
|
||||
sp,
|
||||
cx.test_cases
|
||||
.iter()
|
||||
|
||||
@ -11,8 +11,8 @@ pub fn expand_trace_macros(
|
||||
let mut cursor = tt.into_trees();
|
||||
let mut err = false;
|
||||
let value = match &cursor.next() {
|
||||
Some(TokenTree::Token(token)) if token.is_keyword(kw::True) => true,
|
||||
Some(TokenTree::Token(token)) if token.is_keyword(kw::False) => false,
|
||||
Some(TokenTree::Token(token, _)) if token.is_keyword(kw::True) => true,
|
||||
Some(TokenTree::Token(token, _)) if token.is_keyword(kw::False) => false,
|
||||
_ => {
|
||||
err = true;
|
||||
false
|
||||
|
||||
@ -1,10 +1,9 @@
|
||||
{
|
||||
// source for rustc_* is not included in the rust-src component; disable the errors about this
|
||||
"rust-analyzer.diagnostics.disabled": ["unresolved-extern-crate", "unresolved-macro-call"],
|
||||
"rust-analyzer.assist.importGranularity": "module",
|
||||
"rust-analyzer.assist.importEnforceGranularity": true,
|
||||
"rust-analyzer.assist.importPrefix": "crate",
|
||||
"rust-analyzer.cargo.runBuildScripts": true,
|
||||
"rust-analyzer.imports.granularity.enforce": true,
|
||||
"rust-analyzer.imports.granularity.group": "module",
|
||||
"rust-analyzer.imports.prefix": "crate",
|
||||
"rust-analyzer.cargo.features": ["unstable-features"],
|
||||
"rust-analyzer.linkedProjects": [
|
||||
"./Cargo.toml",
|
||||
|
||||
147
compiler/rustc_codegen_cranelift/Cargo.lock
generated
147
compiler/rustc_codegen_cranelift/Cargo.lock
generated
@ -2,6 +2,17 @@
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.56"
|
||||
@ -25,6 +36,12 @@ version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "byteorder"
|
||||
version = "1.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
@ -33,56 +50,57 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-bforest"
|
||||
version = "0.83.0"
|
||||
version = "0.85.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed44413e7e2fe3260d0ed73e6956ab188b69c10ee92b892e401e0f4f6808c68b"
|
||||
checksum = "749d0d6022c9038dccf480bdde2a38d435937335bf2bb0f14e815d94517cdce8"
|
||||
dependencies = [
|
||||
"cranelift-entity",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-codegen"
|
||||
version = "0.83.0"
|
||||
version = "0.85.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b5d83f0f26bf213f971f45589d17e5b65e4861f9ed22392b0cbb6eaa5bd329c"
|
||||
checksum = "e94370cc7b37bf652ccd8bb8f09bd900997f7ccf97520edfc75554bb5c4abbea"
|
||||
dependencies = [
|
||||
"cranelift-bforest",
|
||||
"cranelift-codegen-meta",
|
||||
"cranelift-codegen-shared",
|
||||
"cranelift-entity",
|
||||
"cranelift-isle",
|
||||
"gimli",
|
||||
"log",
|
||||
"regalloc",
|
||||
"regalloc2",
|
||||
"smallvec",
|
||||
"target-lexicon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-codegen-meta"
|
||||
version = "0.83.0"
|
||||
version = "0.85.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6800dc386177df6ecc5a32680607ed8ba1fa0d31a2a59c8c61fbf44826b8191d"
|
||||
checksum = "e0a3cea8fdab90e44018c5b9a1dfd460d8ee265ac354337150222a354628bdb6"
|
||||
dependencies = [
|
||||
"cranelift-codegen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-codegen-shared"
|
||||
version = "0.83.0"
|
||||
version = "0.85.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c961f85070985ebc8fcdb81b838a5cf842294d1e6ed4852446161c7e246fd455"
|
||||
checksum = "5ac72f76f2698598951ab26d8c96eaa854810e693e7dd52523958b5909fde6b2"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-entity"
|
||||
version = "0.83.0"
|
||||
version = "0.85.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2347b2b8d1d5429213668f2a8e36c85ee3c73984a2f6a79007e365d3e575e7ed"
|
||||
checksum = "09eaeacfcd2356fe0e66b295e8f9d59fdd1ac3ace53ba50de14d628ec902f72d"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-frontend"
|
||||
version = "0.83.0"
|
||||
version = "0.85.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4cbcdbf7bed29e363568b778649b69dabc3d727256d5d25236096ef693757654"
|
||||
checksum = "dba69c9980d5ffd62c18a2bde927855fcd7c8dc92f29feaf8636052662cbd99c"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"log",
|
||||
@ -91,10 +109,16 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-jit"
|
||||
version = "0.83.0"
|
||||
name = "cranelift-isle"
|
||||
version = "0.85.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c769d4e0d76f59c8b2a3bf0477d89ee149bb0731b53fbb245ee081d49063095"
|
||||
checksum = "d2920dc1e05cac40304456ed3301fde2c09bd6a9b0210bcfa2f101398d628d5b"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-jit"
|
||||
version = "0.85.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1c3c5ed067f2c81577e431f3039148a9c187b33cc79e0d1731fede27d801ec56"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-codegen",
|
||||
@ -110,9 +134,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-module"
|
||||
version = "0.83.0"
|
||||
version = "0.85.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0ab57d399a2401074bb0cc40b3031e420f3d66d46ec0cf21eeae53ac04bd73e2"
|
||||
checksum = "eee6784303bf9af235237a4885f7417e09a35df896d38ea969a0081064b3ede4"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-codegen",
|
||||
@ -120,9 +144,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-native"
|
||||
version = "0.83.0"
|
||||
version = "0.85.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f4cdf93552e5ceb2e3c042829ebb4de4378492705f769eadc6a7c6c5251624c"
|
||||
checksum = "f04dfa45f9b2a6f587c564d6b63388e00cd6589d2df6ea2758cf79e1a13285e6"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"libc",
|
||||
@ -131,9 +155,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-object"
|
||||
version = "0.83.0"
|
||||
version = "0.85.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf8e65f4839c26e6237fc0744911d79b0a2ac5e76b4e4eebd14db2b8d849fd31"
|
||||
checksum = "0bf38b2c505db749276793116c0cb30bd096206c7810e471677a453134881881"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-codegen",
|
||||
@ -152,6 +176,26 @@ dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fxhash"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.2.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"wasi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gimli"
|
||||
version = "0.26.1"
|
||||
@ -161,6 +205,15 @@ dependencies = [
|
||||
"indexmap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.11.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.12.3"
|
||||
@ -174,14 +227,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"hashbrown",
|
||||
"hashbrown 0.12.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.119"
|
||||
version = "0.2.126"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4"
|
||||
checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
@ -219,11 +272,12 @@ checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
|
||||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.27.1"
|
||||
version = "0.28.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9"
|
||||
checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424"
|
||||
dependencies = [
|
||||
"crc32fast",
|
||||
"hashbrown 0.11.2",
|
||||
"indexmap",
|
||||
"memchr",
|
||||
]
|
||||
@ -235,13 +289,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9"
|
||||
|
||||
[[package]]
|
||||
name = "regalloc"
|
||||
version = "0.0.34"
|
||||
name = "regalloc2"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "62446b1d3ebf980bdc68837700af1d77b37bc430e524bf95319c6eada2a4cc02"
|
||||
checksum = "4a8d23b35d7177df3b9d31ed8a9ab4bf625c668be77a319d4f5efd4a5257701c"
|
||||
dependencies = [
|
||||
"fxhash",
|
||||
"log",
|
||||
"rustc-hash",
|
||||
"slice-group-by",
|
||||
"smallvec",
|
||||
]
|
||||
|
||||
@ -257,12 +312,6 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||
|
||||
[[package]]
|
||||
name = "rustc_codegen_cranelift"
|
||||
version = "0.1.0"
|
||||
@ -284,10 +333,16 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.8.0"
|
||||
name = "slice-group-by"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
|
||||
checksum = "03b634d87b960ab1a38c4fe143b508576f075e7c978bfad18217645ebfdfa2ec"
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cc88c725d61fc6c3132893370cac4a0200e3fedf5da8331c570664b1987f5ca2"
|
||||
|
||||
[[package]]
|
||||
name = "target-lexicon"
|
||||
@ -295,6 +350,18 @@ version = "0.12.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d7fa7e55043acb85fca6b3c01485a2eeb6b69c5d21002e273c79e465f43b7ac1"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.10.2+wasi-snapshot-preview1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
|
||||
@ -8,21 +8,21 @@ crate-type = ["dylib"]
|
||||
|
||||
[dependencies]
|
||||
# These have to be in sync with each other
|
||||
cranelift-codegen = { version = "0.83.0", features = ["unwind", "all-arch"] }
|
||||
cranelift-frontend = "0.83.0"
|
||||
cranelift-module = "0.83.0"
|
||||
cranelift-native = "0.83.0"
|
||||
cranelift-jit = { version = "0.83.0", optional = true }
|
||||
cranelift-object = "0.83.0"
|
||||
cranelift-codegen = { version = "0.85.3", features = ["unwind", "all-arch"] }
|
||||
cranelift-frontend = "0.85.3"
|
||||
cranelift-module = "0.85.3"
|
||||
cranelift-native = "0.85.3"
|
||||
cranelift-jit = { version = "0.85.3", optional = true }
|
||||
cranelift-object = "0.85.3"
|
||||
target-lexicon = "0.12.0"
|
||||
gimli = { version = "0.26.0", default-features = false, features = ["write"]}
|
||||
object = { version = "0.27.0", default-features = false, features = ["std", "read_core", "write", "archive", "coff", "elf", "macho", "pe"] }
|
||||
object = { version = "0.28.0", default-features = false, features = ["std", "read_core", "write", "archive", "coff", "elf", "macho", "pe"] }
|
||||
|
||||
ar = { git = "https://github.com/bjorn3/rust-ar.git", branch = "do_not_remove_cg_clif_ranlib" }
|
||||
indexmap = "1.9.1"
|
||||
libloading = { version = "0.6.0", optional = true }
|
||||
once_cell = "1.10.0"
|
||||
smallvec = "1.6.1"
|
||||
smallvec = "1.8.1"
|
||||
|
||||
[patch.crates-io]
|
||||
# Uncomment to use local checkout of cranelift
|
||||
|
||||
@ -56,9 +56,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "compiler_builtins"
|
||||
version = "0.1.72"
|
||||
version = "0.1.75"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "afdbb35d279238cf77f0c9e8d90ad50d6c7bff476ab342baafa29440f0f10bff"
|
||||
checksum = "c6e3183e88f659a862835db8f4b67dbeed3d93e44dd4927eef78edb1c149d784"
|
||||
dependencies = [
|
||||
"rustc-std-workspace-core",
|
||||
]
|
||||
@ -112,9 +112,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.12.1"
|
||||
version = "0.12.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3"
|
||||
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
|
||||
dependencies = [
|
||||
"compiler_builtins",
|
||||
"rustc-std-workspace-alloc",
|
||||
@ -123,20 +123,21 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.2.0"
|
||||
version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1ab7905ea95c6d9af62940f9d7dd9596d54c334ae2c15300c482051292d5637f"
|
||||
checksum = "7668753748e445859e4e373c3d41117235d9feed578392f5a3a73efdc751ca4a"
|
||||
dependencies = [
|
||||
"compiler_builtins",
|
||||
"libc",
|
||||
"rustc-std-workspace-alloc",
|
||||
"rustc-std-workspace-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.125"
|
||||
version = "0.2.126"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5916d2ae698f6de9bfb891ad7a8d65c09d232dc58cc4ac433c7da3b2fd84bc2b"
|
||||
checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
|
||||
dependencies = [
|
||||
"rustc-std-workspace-core",
|
||||
]
|
||||
|
||||
@ -205,7 +205,7 @@ fn build_clif_sysroot_for_triple(
|
||||
{
|
||||
let entry = entry.unwrap();
|
||||
if let Some(ext) = entry.path().extension() {
|
||||
if ext == "rmeta" || ext == "d" || ext == "dSYM" {
|
||||
if ext == "rmeta" || ext == "d" || ext == "dSYM" || ext == "clif" {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
|
||||
@ -6,7 +6,6 @@
|
||||
// Regression test for issue #91827.
|
||||
|
||||
#![feature(const_ptr_offset_from)]
|
||||
#![feature(const_slice_from_raw_parts)]
|
||||
#![feature(extern_types)]
|
||||
|
||||
use std::ptr::addr_of;
|
||||
|
||||
@ -458,7 +458,7 @@ pub trait FnMut<Args>: FnOnce<Args> {
|
||||
|
||||
#[lang = "panic"]
|
||||
#[track_caller]
|
||||
pub fn panic(_msg: &str) -> ! {
|
||||
pub fn panic(_msg: &'static str) -> ! {
|
||||
unsafe {
|
||||
libc::puts("Panicking\n\0" as *const str as *const i8);
|
||||
intrinsics::abort();
|
||||
@ -497,7 +497,7 @@ pub trait Deref {
|
||||
#[repr(transparent)]
|
||||
#[rustc_layout_scalar_valid_range_start(1)]
|
||||
#[rustc_nonnull_optimization_guaranteed]
|
||||
pub struct NonNull<T: ?Sized>(pub *mut T);
|
||||
pub struct NonNull<T: ?Sized>(pub *const T);
|
||||
|
||||
impl<T: ?Sized, U: ?Sized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
|
||||
impl<T: ?Sized, U: ?Sized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
|
||||
@ -521,7 +521,7 @@ impl<T: ?Sized> Drop for Box<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for Box<T> {
|
||||
impl<T: ?Sized> Deref for Box<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
|
||||
@ -124,6 +124,23 @@ fn call_return_u128_pair() {
|
||||
return_u128_pair();
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
pub struct bool_11 {
|
||||
field0: bool,
|
||||
field1: bool,
|
||||
field2: bool,
|
||||
field3: bool,
|
||||
field4: bool,
|
||||
field5: bool,
|
||||
field6: bool,
|
||||
field7: bool,
|
||||
field8: bool,
|
||||
field9: bool,
|
||||
field10: bool,
|
||||
}
|
||||
|
||||
extern "C" fn bool_struct_in_11(arg0: bool_11) {}
|
||||
|
||||
#[allow(unreachable_code)] // FIXME false positive
|
||||
fn main() {
|
||||
take_unique(Unique {
|
||||
@ -134,6 +151,20 @@ fn main() {
|
||||
|
||||
call_return_u128_pair();
|
||||
|
||||
bool_struct_in_11(bool_11 {
|
||||
field0: true,
|
||||
field1: true,
|
||||
field2: true,
|
||||
field3: true,
|
||||
field4: true,
|
||||
field5: true,
|
||||
field6: true,
|
||||
field7: true,
|
||||
field8: true,
|
||||
field9: true,
|
||||
field10: true,
|
||||
});
|
||||
|
||||
let slice = &[0, 1] as &[i32];
|
||||
let slice_ptr = slice as *const [i32] as *const i32;
|
||||
|
||||
@ -299,6 +330,17 @@ fn main() {
|
||||
static REF1: &u8 = &42;
|
||||
static REF2: &u8 = REF1;
|
||||
assert_eq!(*REF1, *REF2);
|
||||
|
||||
extern "C" {
|
||||
type A;
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let x: &A = unsafe { &*(1usize as *const A) };
|
||||
|
||||
assert_eq!(unsafe { intrinsics::size_of_val(x) }, 0);
|
||||
assert_eq!(unsafe { intrinsics::min_align_of_val(x) }, 1);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(not(jit), target_arch = "x86_64", target_os = "linux"))]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user